pegasus-wms_4.0.1+dfsg/0000755000175000017500000000000011757551626014041 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/bin/0000755000175000017500000000000011757531666014613 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/bin/pegasus-run0000755000175000017500000002660411757531136017012 0ustar ryngerynge#!/usr/bin/env perl # # Wrapper around pegasus-submit-dag to run a workflow # # Usage: pegasus-run rundir ## # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # # Author: Jens-S. Vöckler voeckler at isi dot edu # Author: Gaurang Mehta gmehta at isi dot edu # Revision : $Revision: 4507 $ # use 5.006; use strict; use Carp; use Cwd; use File::Spec; use File::Basename qw(basename dirname); use Getopt::Long qw(:config bundling no_ignore_case); # path to load our libs.. BEGIN { my $pegasus_config = File::Spec->catfile( dirname($0), 'pegasus-config' ); eval `$pegasus_config --perl-dump`; die("Unable to eval pegasus-config output. $@") if $@; } # load our own local modules use Pegasus::Common; use Pegasus::Properties qw(%initial); # parses -Dprop=val from @ARGV sub make_path($) { my $p = File::Spec->catfile( Cwd::realpath( dirname($0) ), shift() ); warn "# make_path = $p\n" if $main::DEBUG; $p; } sub usage(;$); # { } # constants $main::DEBUG = 0; # for now my $condor; # if set, use Condor for pegasus-monitord my $grid=0; # if set, enable grid checks, disabled by default my $default_tsd = make_path( 'pegasus-monitord' ); my $dagman = make_path( 'pegasus-dagman' ); my ($tsd,$monitor,$conffile); GetOptions( "help|h" => \&usage , "monitor|m=s" => \$tsd , "condor" => \$condor , "debug|d=o" => \$main::DEBUG , "verbose|v+" => \$main::DEBUG , "grid!" => \$grid , 'conf|c=s' => \$conffile , "M" => \$monitor ); my $run = shift; # NEW: Default to cwd if nothing was specified unless ( defined $run ) { $run = getcwd(); my $braindb = File::Spec->catfile( $run, $Pegasus::Common::brainbase ); usage("You need to provide a valid run directory. Cannot find pegasus brain file") unless -r $braindb; } # extra sanity usage( "$run is not a directory." ) unless -d $run; usage( "$run is not accessible." ) unless -r _; my %config = slurp_braindb( $run ) or die "ERROR: open braindb: $!\n"; # pre-condition: The planner writes all properties per workflow into the DAG dir. my $props = Pegasus::Properties->new( $conffile, File::Spec->catfile($run,$config{properties}) ); # if TSD is empty, fill in from default locations $tsd ||= $props->property('pegasus.monitord') || $default_tsd; # # --- functions ------------------------------------------------- # sub usage(;$) { my $msg = shift; my $flag = ( defined $msg && lc($msg) ne 'help' ); if ( $flag ) { my $tty = -t STDOUT; print "\033[1m" if $tty; print "ERROR: $msg\n"; print "\033[0m" if $tty; } my $basename = basename($0,'.pl'); print << "EOF"; Usage: $basename [options] [rundir] SemiMandatory arguments: rundir is the directory where the workflow resides as well as ancilliary files related to the workflow. Defaults to current working directory if not specified. Optional arguments: -Dprop=val Explicit settings of a property (multi-option) (only use if really required) -c|--conf fn Use file fn as pegasus properties file. (only use for debugging purposes. pegasus-run will pick the correct planned properties by default. -h|--help Print this help message and exit. -m|--monitor l Uses the workflow monitor daemon installed as l, default is $default_tsd. -d|--debug lvl Sets the debug level (verbosity), default is $main::DEBUG. -v|--verbose Raises debug level by 1, see --debug. --condor Uses Condor to submit daemons (prototype). --nogrid Disable checking for grids (default). --grid Enable checking for grids. EOF exit( $flag ? 1 : 0 ); } sub create_args { # purpose: create either a shell daemon, or condor-submitted job # warning: It is assumed that condor-jobs require -N to foreground # globals: $condor (IN): use Condor (true) or daemon (false) mode # paramtr: $daemon (IN): executable location of daemon # @args (IN): other arguments to daemon # returns: argument vector ready to execute my $daemon = shift || croak "Need a daemon name"; my @result; if ( $condor ) { # use Condor to submit a daemon -- makes it restartable on SH death # however, insert -N into the daemon's argument list for foregrounding my $condor_submit = find_exec('condor_submit') || die "ERROR: Unable to find condor_submit\n"; my $base = 'pegasus-run-' . basename($daemon); my $fn = File::Spec->catfile( $run, "$base.sub" ); local(*SUB); open( SUB, ">$fn" ) || die "open $fn: $!\n"; # only arg @result = ( ); foreach my $k ( keys %initial ) { push( @result, "-D$k=$initial{$k}" ) } push( @result, '-N' ); push( @result, @_ ) if ( @_ > 0 ); # write submit file for (restartable) pegasus-monitord print SUB "initialdir = $run\n"; print SUB 'executable = ', File::Spec->rel2abs($daemon), "\n"; print SUB 'arguments = ', join(' ',@result), "\n"; print SUB "universe = local\n"; print SUB "notification = NEVER\n"; print SUB "getenv = True\n"; print SUB "output = $base.out.txt\n"; print SUB "error = $base.err.txt\n"; print SUB "log = $base.log.txt\n"; print SUB "queue\n"; close SUB; @result = ( $condor_submit, $fn ); } else { # run daemon directly @result = ( $daemon ); foreach my $k ( keys %initial ) { push( @result, "-D$k=$initial{$k}" ) } push( @result, @_ ) if ( @_ > 0 ); } @result; } # # --- main ------------------------------------------------------ # # sanity check: lower umask umask 0002; # where were we... my $here = File::Spec->curdir(); $SIG{'__DIE__'} = sub { chdir($here) if defined $here; }; chdir($run) || die "ERROR: chdir $run: $!\n"; # sanity check: find the pegasus-monitord daemon warn "WARN: Either $tsd does not exist or is not executable\n" unless -x $tsd; print STDERR "# found $tsd\n" if $main::DEBUG; # Do GRID check if $grid enabled if ( $grid ) { # sanity check: Is there a GLOBUS_LOCATION? die ( "ERROR: Your environment setup misses GLOBUS_LOCATION.\n", "Please check carefully that you have sourced the correct setup files!\n" ) unless exists $ENV{'GLOBUS_LOCATION'}; # sanity check: find grid-proxy-init from GLOBUS_LOCATION my $g_l = $ENV{'GLOBUS_LOCATION'}; print STDERR "# GLOBUS_LOCATION=$g_l\n" if $main::DEBUG; # sanity check: Is G_L part of L_L_P? my @llp = grep { /^$g_l/ } split /:/, $ENV{'LD_LIBRARY_PATH'}; $ENV{'LD_LIBRARY_PATH'}=File::Spec->catfile($ENV{'GLOBUS_LOCATION'},"/lib") if @llp == 0; # Find grid-proxy-init (should we use openssl instead?? ) my $gpi = File::Spec->catfile( $g_l, 'bin', 'grid-proxy-info' ); die "ERROR: Unable to find $gpi\n" unless -x $gpi; print STDERR "# found $gpi\n" if $main::DEBUG; # common user error # sanity check: Sufficient time left on grid proxy certificate open( GPI, "$gpi -timeleft 2>&1|" ) || die "open $gpi: $!\n"; my $timeleft = ; chomp($timeleft); $timeleft += 0; # make numeric close GPI; die( "ERROR: $gpi died on signal ", ($? & 127) ) if ( ($? & 127) > 0 ); die( "ERROR: Grid proxy not initialized, Please generate a new proxy\n" ) if $timeleft == -1; die( "ERROR: Grid proxy expired, please refresh\n" ) if $timeleft == 0; die( "ERROR: $gpi exited with status ", $?>>8 ) if ( $? != 0 ); warn( "ERROR: Too little time left ($timeleft s) on grid proxy. Please refresh your proxy\n" ) if $timeleft < 7200; print STDERR "# grid proxy has $timeleft s left\n" if $main::DEBUG; } # end if($grid) checks only if grid option is enabled. if ( $config{dag} ) { # find pegasus-submit-dag my $psd = File::Spec->catfile( $config{bindir}, 'pegasus-submit-dag' ); die "ERROR: Unable to access $psd\n" unless -x $psd; print STDERR "# found $psd\n" if $main::DEBUG; # sanity check: Is the DAG file there? die "ERROR: Unable to locate $config{dag}\n" unless -r $config{dag}; print STDERR "# found $config{dag}\n" if $main::DEBUG; # NEW: is there a rescue file, or multiple rescue levels? my $original; my @rescue = check_rescue($run,$config{dag}); if ( @rescue > 0 ) { my (@stat,%rescue,$maxsize); foreach my $fn ( @rescue ) { if ( (@stat = stat($fn)) > 0 ) { $rescue{$fn} = [ @stat ]; $maxsize = $stat[7] if $maxsize < $stat[7]; } } print "\n\nDetected the presence of Rescue DAGs:\n"; my $width = log10($maxsize); foreach my $fn ( @rescue ) { printf( " %s %*u %s\n", isodate($rescue{$fn}[9]), $width, $rescue{$fn}[7], basename($fn) ); } # overwrite with "latest" (read: longest basename) rescue DAG $original = $config{dag}; $config{dag} = $rescue[$#rescue]; print "\nWILL USE ", $config{dag}, "\n\n"; } # find the workflow name and timestamp for pegasus-status my $workflow=$config{'pegasus_wf_name'}; my $time=$config{timestamp}; # start DAGMan with default throttles my @extra = (); foreach my $k ( keys %initial ) { push( @extra, "-D$k=$initial{$k}" ); } my @args = ( $psd ); push( @args, @extra ) if @extra > 0; push( @args, '-d', 0+$main::DEBUG); push( @args, '--grid' ) if $grid; push( @args, '--dagman', $dagman, $config{dag} ); print STDERR "# @args\n" if $main::DEBUG; system(@args) == 0 or die( "ERROR: Running pegasus-submit-dag failed with ", parse_exit($?)); print STDERR "# dagman is running\n" if $main::DEBUG; if ( $monitor ) { if ( -x $tsd ) { # run pegasus-monitord to update job stats until DAGMan finishes my @tsdargs = ($tsd); push( @tsdargs, $config{dag} . '.dagman.out' ); print STDERR "# @tsdargs\n" if $main::DEBUG; if ( system(@tsdargs) == 0 ) { print STDERR "# $tsd is running\n" if $main::DEBUG; } else { warn( "WARN: Running $tsd failed with ", parse_exit($?) ); } } } # next step if ( @rescue > 0 ) { my $n = @rescue + 0; print( "\nI found $n rescue DAG", ( $n>1 ? 's' : '' ), ". I submitted the rescue DAG\n", $config{dag}, "\ninstead of\n", $original, "\n" ); } my $did=undef; my $daglogfile=$config{dag}.".dagman.out"; if ( open( DID, "<$daglogfile" ) ) { while () { if ( /condor_scheduniv_exec/ ) { # this part was written by a python programmer? $did=(split /\./, (split)[3],2)[1]; last; } } close(DID); } print << "EOF"; Your Workflow has been started and runs in base directory given below cd $run *** To monitor the workflow you can run *** pegasus-status -l $run *** To remove your workflow run *** EOF print "pegasus-remove -d $did\nor\n" if defined $did; print "pegasus-remove $run\n\n"; } elsif ( $config{type}=="shell" ) { # sanity check: Is the SCRIPT file there? die "ERROR: Unable to execute $config{script}\n" unless -x $config{script}; print STDERR "# found $config{script}\n" if $main::DEBUG; my @args=( "/bin/bash", $config{script} ); system(@args) == 0 or die( "ERROR: Running $config{script} failed with ", parse_exit($?) ); } chdir($here); exit 0; pegasus-wms_4.0.1+dfsg/bin/pegasus-status0000755000175000017500000016034111757531136017526 0ustar ryngerynge#!/usr/bin/env perl # # collect information about workflows and display their states. # ## # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # # Author: Jens-S. Vöckler voeckler at isi dot edu # Revision: $Revision: 4486 $ # use v5.8.8; # unbroken unicode requires perl >= 5.8.8 use strict; BEGIN { # use very early - before loading most modules! $main::isutf8 = ( exists $ENV{LANG} && $ENV{LANG} =~ m{utf-?8}i ); delete $ENV{LANG}; $ENV{LANG} = 'C'; } use utf8; # tell Perl "this script contains UTF-8" use Carp; use Cwd qw(getcwd abs_path); use File::Spec; use File::Basename qw(basename dirname); use Getopt::Long qw(:config bundling no_ignore_case); use Data::Dumper; # Path to load Pegasus Perl modules BEGIN { my $pegasus_config = File::Spec->catfile( dirname($0), 'pegasus-config' ); eval `$pegasus_config --perl-dump`; die("Unable to eval pegasus-config output. $@") if $@; } use Pegasus::Common; use Pegasus::Properties qw(%initial); # parses -Dprop=val from @ARGV # set function TIOCGWINSZ to return ioctl() argument if ( $^O eq 'darwin' ) { # h2ph is broken on Darwin *TIOCGWINSZ = sub { 0x40087468; }; } elsif ( $^O eq 'linux' ) { # not broken, but may not be installed *TIOCGWINSZ = sub { 0x5413; }; } elsif ( lc $^O eq 'sunos' ) { # may be easier than getting the headers right *TIOCGWINSZ = sub { 0x5468; }; } else { # Neither linux nor macosx eval { require "sys/ioctl.ph" }; } # # --- globals ---------------------------------------------- # $main::debug = 0; # debug output $main::color = 0; # default: no color (black bg terminal) $main::user = $ENV{USER} || $ENV{LOGNAME} || scalar getpwuid($>); $_ = '$Revision: 4486 $'; # don't edit, automatically updated by CVS $main::revision=$1 if /Revision:\s+([0-9.]+)/o; $main::onatty = -t STDOUT; # are we connected to a terminal? $main::dirsep = File::Spec->catdir( '', '' ); $main::space = ' '; # 2 spaces in basic mode @main::clong = # Condor job states (basic mode) ( 'Unsub' # U , 'Idle' # I , 'Run' # R , 'Del' # X , 'Done' # C , 'Held' # H ); @main::cstat = qw(U I R X C H); # Condor job states (expert mode) @main::ccolor = ( "\033[0;37m" # unsubmitted (gray) , "\033[0;34m" # idle (blue) , "\033[0;32m" # running (green) , "\033[0;35m" # removing (magenta) , "\033[0;36m" # completed (cyan) , "\033[0;31m" # held (red) ); $main::bold = "\033[1m"; # start real xterm as "xterm -bdc" $main::reset = "\033[0m"; # reset all color/bold/etc. %main::ccolor = map { $main::cstat[$_] => $main::ccolor[$_] } 0 .. $#main::cstat; @main::dstat = qw(? I R S F); # Workflow states @main::dlong = ( 'Unknown' , 'Unknown' # actually indeterminable , 'Running' , 'Success' , 'Failure' ); @main::dcolor = ( "\033[0;37m" # unknown (gray) , "\033[0;36m" # indeterminable (cyan) , "\033[0;34m" # running (blue) , "\033[0;30m" # success (black) , "\033[0;31m" # failure (red) ); %main::gstat = # Globus job states (expert mode) ( 0 => '?' # unknown , 1 => 'P' # pending , 2 => 'A' # active , 4 => 'F' # failed , 8 => 'D' # done , 16 => 'S' # suspend , 32 => 'U' # unsuspend , 64 => 'I' # stage-in , 128 => 'O' # stage-out ); @main::jobclass = ( 'unknown' # 0 , 'compute' # 1 , 'stage-in' # 2 , 'stage-out' # 3 , 'register' # 4 , 'xsite-xfer' # 5 , 'createdir' # 6 , 's-compute' # 7 -- deprecated , 'clean-up' # 8 , 'chmod' # 9 , 'subdax' # 10 , 'subdag' # 11 ); @main::jobclass_desc = ( 'unknown (do not use)' , 'regular computation job' , 'auxilliary stage-in transfer job' , 'auxilliary stage-out transfer job' , 'auxilliary replica registration job' , 'auxilliary inter-site transfer job' , 'auxilliary createdir job' , 'remote compute job (do not use)' , 'auxilliary clean-up job' , 'auxilliary chmod job' , 'unplanned DAX sub-workflow job' , 'planned DAG sub-workflow job' ); @main::jobshort = # short job class (job type) ( '-', 'job', 'si', 'so' , 'rr', 'isx', 'cd', 'stc' , 'clu', 'chm', 'dax', 'dag' ); $main::time = $^T; # initialization only %main::dagman_p = # predicate to determine variations on DAGMan map { $_ => 1 } qw(pegasus-dagman condor_dagman); %main::width = # width selection (0=unlimited) ( dagnodename => 30 # width of concrete dag node identifiers , pegasus_wf_name => 24 # width of abstract workflow identifiers , cmd => 20 # width of executable that is actually run ); $main::cache = undef; # debug my ($rows,$cols) = &initialize_winch; # %qtitle describes any head we would want to show, # indexed by a short internal key: # {header} is what to put into the title of the output # {function} is an fptr, being called with current row (q) job classads # {minwidth} is an minimal width, with negative width meaning left adjustment # {legend} is what to display for this column in the legend # my %qtitle = ( 'STAT' => { header => 'STAT' , function => \&x_jobstatus , minwidth => -4 , legend => 'Condor job status' }, 'S' => { header => 'S' , function => \&x_cstat , minwidth => 1 , legend => 'Condor job status' }, 'IN_STATE' => { header => 'IN_STATE' , function => \&x_in_state , minwidth => 8 , legend => 'Time job spent in current Condor status' }, 'JPRIO' => { header => 'PRI' , minwidth => 3 , function => \&x_jobpriority , legend => 'Condor job priority' }, 'PJC' => { header => 'CLASS' , minwidth => -5 , function => \&x_pegasus_jobtype , legend => 'Pegasus job type' }, 'PJCN' => { header => 'C' , minwidth => 1 , function => \&x_pegasus_jobtypenum , legend => 'Pegasus job type' }, 'PJCS' => { header => 'PJC' , minwidth => -3 , function => \&x_pegasus_jobtypeshort , legend => 'Pegasus job type' }, 'JOB1' => { header => 'JOB' , minwidth => -50 , function => \&x_job1 , legend => 'Workflow- or DAG-Node ID' }, 'JOB2' => { header => 'JOB' , minwidth => -32 , function => \&x_job2 , legend => 'DAG-Node ID, command, and workflow-ID' }, 'CONDORID' => { header => 'ID' , minwidth => 2 , function => \&x_condorid , legend => 'Condor cluster ID' }, 'SITE' => { header => 'SITE' , minwidth => -5 , function => \&x_site , legend => 'Job site' }, 'C/G' => { header => 'C/G' , function => \&x_cgstatus , minwidth => 3 , legend => 'Condor- and Globus job status' } ); # @main::qtitle is an array of default outputs, indexed by 'expert # level'. Each entry is a key into %qtitle. (Eventually, this can # be overwritten by a CLI option for your own mix-n-match (TBD).) @main::qtitle = ( # first level is the novice state, leave out distractive information [qw(STAT IN_STATE JOB1) ], # some more complex information in first expert level [qw(CONDORID S IN_STATE JPRIO JOB2) ], # even more complex information in next expert level [qw(CONDORID C/G IN_STATE JPRIO PJCS SITE JOB2) ] ); # %dtitle describes any head we would want to show, # indexed by a short internal key: # {header} is what to put into the title of the output # {function} is an fptr, being called with current row (dag) workflow entry # {minwidth} is an minimal width, with negative width meaning left adjustment # {legend} is what to display for this column in the legend # my %dtitle = ( 'DONE' => { header => 'DONE' , function => \&y_dag_done , minwidth => 5 , legend => 'Job completed with success' }, 'PRE' => { header => 'PRE', , function => \&y_dag_pre , minwidth => 5 , legend => 'PRE-Scripts running' }, 'QUEUED' => { header => 'IN_Q', , function => \&y_dag_queued , minwidth => 5 , legend => 'Submitted jobs' }, 'POST' => { header => 'POST', , function => \&y_dag_post , minwidth => 5 , legend => 'POST-Scripts running' }, 'READY' => { header => 'READY', , function => \&y_dag_ready , minwidth => 5 , legend => 'Jobs ready for submission' }, 'UNREADY' => { header => 'UNRDY', , function => \&y_dag_unready , minwidth => 5 , legend => 'Jobs blocked by dependencies' }, 'FAILED' => { header => 'FAIL', , function => \&y_dag_failed , minwidth => 5 , legend => 'Jobs completed with failure' }, 'TOTAL' => { header => 'TOTAL' , function => \&y_dag_total , minwidth => 5 , legend => 'Jobs in workflow' }, 'S_DONE' => { header => 'SUCCESS' , function => \&y_dag_done , minwidth => 7 , legend => 'Job completed with success' }, 'S_PRE' => { header => 'PRE', , function => \&y_dag_pre , minwidth => 7 , legend => 'PRE-Scripts running' }, 'S_QUEUED' => { header => 'QUEUED', , function => \&y_dag_queued , minwidth => 7 , legend => 'Submitted jobs' }, 'S_POST' => { header => 'POST', , function => \&y_dag_post , minwidth => 7 , legend => 'POST-Scripts running' }, 'S_READY' => { header => 'READY', , function => \&y_dag_ready , minwidth => 7 , legend => 'Jobs ready for submission' }, 'S_UNREADY' => { header => 'UNREADY', , function => \&y_dag_unready , minwidth => 7 , legend => 'Jobs blocked by dependencies' }, 'S_FAILED' => { header => 'FAILURE', , function => \&y_dag_failed , minwidth => 7 , legend => 'Jobs completed with failure' }, 'S_TOTAL' => { header => 'TOTAL' , function => \&y_dag_total , minwidth => 7 , legend => 'Total of jobs' }, 'S' => { header => 'S' , function => \&y_dstat , minwidth => 1 , legend => 'Workflow state' }, 'DOFT' => { header => 'D/T' , function => \&y_done_total , minwidth => 3 , legend => 'Jobs done of total' }, 'PERCENT' => { header => '%DONE' , function => \&y_percent , minwidth => 5 , legend => 'Success percentage' }, 'STATE' => { header => 'STATE' , function => \&y_dlong , minwidth => -7 , legend => 'Workflow state' }, 'EC' => { header => 'EC' , function => \&y_status , minwidth => 1 , legend => 'Workflow exit status' }, 'WORKFLOW' => { header => 'DAGNAME' , function => \&y_name , minwidth => -40 , legend => 'Name of workflow' } ); # @main::dtitle is an array of default outputs, currently fixed. Each # entry is a key into %qtitle. (Eventually, this can be overwritten by a # CLI option for your own mix-n-match (TBD).) @main::dtitle = ( # summary mode [ qw(S_UNREADY S_READY S_PRE S_QUEUED S_POST S_DONE S_FAILED PERCENT) ] # show subdag mode , [ qw(UNREADY READY PRE QUEUED POST DONE FAILED PERCENT STATE WORKFLOW) ], ); # # --- functions -------------------------------------------- # sub usage(;$) { my $msg = shift; my $flag = defined $msg && lc($msg) ne 'help'; if ( $flag ) { print $main::bold if $main::color; print "ERROR: $msg\n"; print $main::reset if $main::color; } my $app = basename($0); print << "EOF"; Usage: $app [options ] [dagdir] $app helps monitor a workflow by querying Condor and directories. Optional arguments: -h|--help print this help and exit. -V|--version print version information and exit. -w|--watch [s] repeatedly print output every 's' seconds, default 60. -L|--[no]legend Enable or disable showing of the legends, default off. -c|--[no]color Enable or disable ANSI colors, default off. -U|--[no]utf8 Enable or disable UTF-8 graphics, default from \$LANG. Optional arguments affecting Condor Q output: -Q|--[no]queue Disable or enable Condor Q output, default is on. -v|--verbose increase expert level. -d|--debug increase debug level (Pegasus debugging only). -u|--user name monitor jobs for user 'name', default is $main::user. -i|--[no]idle Omit jobs in state 'idle' from output. --[no]held Disable or enable showing HoldReason, default on. --[no]heavy Disable or enable heavy Unicode lines, default on. -j|--jobtype jt *Only show jobs of type 'jt', default is all jobs. (run with 'jt' of 'help' to see available job types.) -s|--site sid *Only show jobs running on site 'sid', default is all sites. Optional arguments affecting DAG output: rundir directory to monitor, default is CWD. -l|--long Show all DAG states, including sub-DAGs, default only totals. -r|--rows Show summary in rows, not columns. Mutually-exclusive wrt -l. -S|--[no]success Omit 'Success' workflows from --long output, default show. (*) denotes a multi-option, which may be given multiple times or comma lists. EOF exit ($flag ? 1 : 0); } sub myversion() { my $version = version(); print "Pegasus $version, @{[basename($0)]} $main::revision\n"; exit 0; } sub profile_start($) { my $fn = shift; if ( CORE::open( $main::profile, ">$fn" ) ) { profile_log('start'); } } sub profile_log { if ( defined $main::profile ) { printf { $main::profile } "%s\n", join(' ', isomsdate(), @_ ); } } sub profile_done { profile_log('final'); close $main::profile; undef $main::profile; } END { profile_done if defined $main::profile } sub trim($) { # purpose: remove leading and trailing whitespace, quotes around # paramtr: $s (IN): a string # returns: possibly shortened string # my $s = shift; $s =~ s/^\s+//; $s =~ s/\s+$//; $s = substr($s,1,-1) if substr($s,0,1) =~ /[""'']/; $s; } sub commas($) { # purpose: put commas to separate engineering dimensions # paramtr: $x (IN): numerical string # returns: string with commas inserted as necessary # warning: assumes english locale # my $text = reverse shift(); $text =~ s/(\d{3})(?=\d)(?!\d*\.)/$1,/g; return scalar reverse $text; } sub plural($$) { # purpose: print number space item. Add plural-s if number != 1 # paramtr: $n (IN): count # $s (IN): item string # returns: constructed string with proper plural # my $n = shift; my $s = shift; return "$n $s" if $n == 1; my $last = substr($s,-1); if ( $last eq 'y' ) { commas($n) . ' ' . substr($s,0,-1) . 'ies'; } elsif ( $last eq 's' ) { commas($n) . " ${s}es"; } else { commas($n) . " ${s}s"; } } sub initialize_winch { # purpose: determine rows and columns of current window # returns: ($rows,$cols) # warning: Make sure that this function stays POSIX signal safe! # my $r = $ENV{LINES} || 25; my $c = $ENV{COLUMNS} || 80; if ( $main::onatty ) { my $ws = pack('S!4',()); if ( defined &TIOCGWINSZ && ioctl( STDOUT, &TIOCGWINSZ, $ws ) ) { ($r,$c) = unpack("S!4",$ws); } } else { $r = $c = 1E10; # virtually unlimited } ($r,$c); } sub sigwinch { # purpose: adjust global $rows and $cols when window size changes # globals: $rows (OUT): new row count # $cols (OUT): new column count # warning: Make sure that this function stays POSIX signal safe! # warning: Make sure the handler is only installed for ttys! # my $ws = pack('S!4',()); if ( defined &TIOCGWINSZ && ioctl( STDOUT, &TIOCGWINSZ, $ws ) ) { ($rows,$cols) = unpack("S!4",$ws); } } sub interval($) { # purpose: convert a number of seconds into days, hours, mins, secs # paramtr: $s (IN): total number of seconds # returns: formatted string with or without days # use integer; my $total = int( shift() ); my $s = $total % 60; my $m = ($total % 3600) / 60; if ( $total < 3600 ) { # no days or hours sprintf "%02d:%02d", $m, $s; } elsif ( $total < 86400 ) { # no days, don't show days sprintf "%02d:%02d:%02d", ($total / 3600), $m, $s; } else { my $h = ($total % 86400) / 3600; my $d = $total / 86400; sprintf "%d+%02d:%02d:%02d", $d, $h, $m, $s; } } sub fit($$) { # purpose: fit a string into a given width, truncate start or end # paramtr: $width (IN): maximum space # $s (IN): input string to format # returns: formatted string # my $width = shift; my $s = shift; my $len = length($s); if ( $width != 0 && $len > abs($width) ) { if ( $width < 0 ) { # fit from back '..' . substr($s,($len+$width)+2); } else { # forward fit substr($s,0,$width-2) . '..'; } } else { $s; } } sub cfit($$) { # purpose: fit a string into a given width, truncate center # paramtr: $width (IN): maximum space # $s (IN): string to fit # returns: fitted string # my $width = abs( shift() ); my $s = shift; my $len = length($s); if ( $width == 0 || $len <= $width ) { # string fits $s; } else { use integer; my $diff = $len - $width + 2; substr( $s, 0, ($len-$diff)/2 ) . '..' . substr( $s, ($len+$diff)/2 ); } } sub headline($$$;$) { # purpose: format header from 3 strings # paramtr: $left (IN): what to put on left side # $center (IN): what to put into center # $right (IN): what to put on right side # $width (opt. IN): total width (typically terminal) # globals: $cols (IN): terminal width default # returns: formatted string # my $l = shift || ''; my $c = shift || ''; my $r = shift || ''; my $width = shift || $cols; my $llen = length($l); my $clen = length($c); my $rlen = length($r); if ( $llen+$clen+$rlen > $width ) { # FIXME: fit strings $l . $c . $r; } else { # adjust strings use integer; my $room = $width - $llen - $clen - $rlen; my $x = ' ' x ( $room / 2 ); if ( ( $room & 1 ) == 1 ) { # odd $l . $x . ' ' . $c . $x . $r; } else { # even $l . $x . $c . $x . $r; } } } sub whittle_down(\%\@\@) { # purpose: taken a full input set (Q or DAG), and reduce to only # columns that we'll show # paramtr: %title (IN): hash of all available title definitions # @title (IN): current set of columns to show # @input (IN): array of job classads or workflows # returns: array of columns to show for the given input # my $avail = shift; # %[qd]title my $title = shift; # @[qd]title my $input = shift; # @q or @dags my @result = (); foreach my $row ( @{$input} ) { my @y = (); # all columns go into @y foreach my $k ( @{$title} ) { confess "FATAL: title \"$k\" does not exist" unless exists $avail->{$k}; push( @y, &{$avail->{$k}->{function}}( $row ) ); } push( @result, [ @y ] ); # a row goes into @result } @result; } sub signum($) { # purpose: sign (lat.: signum) function # paramtr: $x (IN) number # returns: -1 for negative $x, +1 for positive $x and 0 for $x==0 # warning: comparison with 0 should use |x| < epsilon for floats. # my $x = shift; ( $x < 0 ? -1 : ( $x > 0 ? 1 : 0 ) ); } sub column_widths(\%\@\@) { # purpose: compute width of output column from data requirements # paramtr: %title (IN): minimum column width comes from this # @title (IN): currently selected set of columns # @input (IN): whittled down input rows (array of arrays) # globals: $main::space (IN): current column spacing # $cols (IN): terminal width # returns: array of column widths. # warning: The last column is (attempted to) adjust to the screen width # my $avail = shift; my $title = shift; my $input = shift; my @max = (); my @sgn = (); # start with title minimum width, separating sign and magnitude foreach my $k ( @{$title} ) { my $x = $avail->{$k}{minwidth}; push( @max, abs($x) ); push( @sgn, signum($x) ); } # determine the width of each column, but no smaller than the title width foreach my $row ( @{$input} ) { for ( my $i=0; $i < @{$row}; ++$i ) { my $len = length( $row->[$i] ); $max[$i] = $len if $len > $max[$i]; } } # fix last column to match maximum terminal width my $s = 0; my $l = length( $main::space ); for ( my $i=0; $i < @max; ++$i ) { $s += $max[$i] + $l; } if ( $s > $cols ) { $s -= $max[$#max]; $max[$#max] = $cols - $s; $max[$#max] = 0 if $max[$#max] < 0; } # return results map { $max[$_] * $sgn[$_] } 0 .. $#max; } sub create_legend(\%\@) { # purpose: show the legend based on a title (both: Q and DAG) # paramtr: %title (IN): hash of all available title defs # @title (IN): select titles # globals: $cols (IN): current terminal width # $main::color (IN): whether to use ANSI colors # $main::bold (IN): turn on bold # $main::reset (IN): turn off bold # returns: scalar: string containing the legend # array: [0] string containing legend # [1] rows required to show legend # my $avail = shift; # %[qd]title ref my $title = shift; # @[qd]title ref my $result = ''; my $cursor = 0; my $p = 0; for ( my $i=0; $i<@{$title}; ++$i ) { my $k = $avail->{ $title->[$i] }{header}; my $v = $avail->{ $title->[$i] }{legend}; my $l = length($k) + length($v); my $s = ''; $s .= $main::bold if $main::color; $s .= $k; $s .= $main::reset if $main::color; $s .= ': ' . $v; if ( $p + $l + 2 > $cols ) { $result .= "\n$s"; ++$cursor; $p = $l + 2; } else { $result .= ' ' if $i; $result .= $s; $p += $l + 3; } } $result .= "\n\n"; $cursor += 2; wantarray ? ( $result, $cursor ) : $result; } sub kickstart($) { # purpose: remove kickstart arguments from commandline # paramtr: job classad 'Arguments' value # returns: remaining commandline, with kickstart removed # warning: also applies 'basename' to all absolute filenames # my @arg = split /\s+/, shift(); # FIXME: deal with quoting properly! my @result = (); my $state = 0; for ( my $i=0; $i<@arg; ++$i ) { if ( $state == 0 ) { if ( substr($arg[$i],0,1) eq '-' ) { my $opt = substr($arg[$i],1,1); if ( index('ioelnNRBLTIwWSs',$opt) >= 0 ) { # skip argument ++$i; } elsif ( index('HVX',$opt) >= 0 ) { # do nothing } else { warn "Warning: Unknown kickstart argument $arg[$i]\n"; } } else { # this better be the application that we are starting $state = 1; push( @result, basename($arg[$i]) ); } } else { # we can only apply basename to absolute filenames, because # those are the only element we can recognize as such. if ( substr($arg[$i],0,1) eq $main::dirsep ) { push( @result, basename($arg[$i]) ); } else { push( @result, $arg[$i] ); } } } wantarray ? @result : join( ' ', @result ); } sub seqexec(\%) { # purpose: count number of jobs in seqexec input file # paramtr: %r (IN): job class ad representation # returns: number of seqexec sub-jobs # my $r = shift; my $result = 0; local(*S); my $fn = File::Spec->rel2abs( $r->{in}, $r->{iwd} ); profile_log( "open $fn" ) if defined $main::profile; if ( open( S, "<$fn" ) ) { my @ok = (); while ( ) { s/[ \r\n]+$//; s/\#.*//; next if length($_) < 3; push( @ok, $_ ); } close S; $result = @ok+0; } else { warn "Warning: open $fn: $!, skipping\n" if $main::debug > 3; } $result; } sub cstat($) { # purpose: parse condor job state into string. # paramtr: $s (IN): job classad 'JobStatus' # returns: string representing Condor job state # my $s = shift; $s < @main::cstat ? $main::cstat[$s] : "$s"; } sub gstat($) { # purpose: parse condor job globus state into string. # paramtr: $s (IN): job classad 'GlobusStatus' (may be undef) # returns: string representing Globus job state # my $s = shift; if ( defined $s ) { exists $main::gstat{$s} ? $main::gstat{$s} : "$s"; } else { '-'; } } sub parsersl($) { # purpose: Parse a Globus RSL string into hash # paramtr: $rsl (IN): RSL string # returns: hash representing RSL values # warning: all keys will be canonicalized # my %result = (); local $_ = shift; while ( /\(([^)]+)\)/g ) { my ($k,$v) = split /=/, $1, 2; $k =~ s/[-_]//g; $result{lc $k} = $v; } %result; } sub condor_q(\%\%\@;%) { # purpose: Parse entire Condor-Q into hash of job classad hashes # paramtr: %jobs (OUT): parsed job classads indexed by 'clusterid' # %dags (OUT): maps dagmanjobid to array of clusterids # @t (IN): ask condor_q only for these keys # %flag (IN): key value pairs controlling behavior # globals: $main::user (IN): which user to limit output to # %qtitle (IN): determines which keys to ask for # my $jobref = shift; my $dagref = shift; my $t = shift; my %flags = ( @_ ); my $constraint = ''; # determine root wf uuid from workdir if ( exists $flags{rootuuid} ) { my $s = ''; foreach my $n ( @{ $flags{rootuuid} } ) { $s .= ' || ' if $s; $s .= "(pegasus_root_wf_uuid==\\\"$n\\\")"; } if ( $s ) { if ( $constraint ) { $constraint .= " && ( $s )"; } else { $constraint = "( $s )"; } } } # determine extra jobclass constraints if ( exists $flags{jobtypes} ) { my $s = ''; foreach my $n ( @{ $flags{jobtypes} } ) { $s .= ' || ' if $s; $s .= "(pegasus_job_class==$n)"; } if ( $s ) { if ( $constraint ) { $constraint .= " && ( $s )"; } else { $constraint = "( $s )"; } } } # determine extra jobsites constraints if ( exists $flags{jobsites} ) { my $s = ''; foreach my $site ( @{ $flags{jobsites} } ) { $s .= ' || ' if $s; $s .= "(pegasus_site==\\\"$site\\\")"; } if ( $s ) { if ( $constraint ) { $constraint .= " && ( $s )"; } else { $constraint = "( $s )"; } } } # finalize constraints if ( $constraint ) { $constraint = "-constraint \"$constraint\""; } local(*Q); my $condor_q = find_exec('condor_q') || die "FATAL: Unable to find 'condor_q' in your PATH.\n"; if ( defined $main::cache && $main::cache ) { # see --cache flag -- THIS IS ONLY FOR DEBUGGING open( Q, $main::cache ) || die "open $main::cache: $!\n"; } else { # FIXME: 'condor_q' is expensive. Find better restrictions! warn "# $condor_q -l $main::user $constraint\n" if $main::debug; open( Q, "$condor_q -l $main::user $constraint|" ) || die "FATAL: Unable to execute $condor_q -l $main::user $constraint: $!\n"; } # skip intro while ( ) { last if /^--/; } my (%db,@x); while ( ) { s/[\r\n]+$//; # safe chomp if ( length($_) > 2 ) { # regular job classad @x = split /\s+=\s+/, $_, 2; die "this must not happen!" if exists $db{lc($x[0])}; $db{lc($x[0])} = trim($x[1]); } else { my $id = $db{clusterid}; # extra sanity? die "nothing in queue?" unless scalar keys %db; die "nothing in cluster" unless defined $id; # noidle for Mats unless ( $flags{noidle} && $db{jobstatus} == 1 ) { # add parsed job classads to %job $jobref->{$id} = { %db }; # Add job belonging to a dagman to %dag # A dagman job will always be started prior to the job # it starts, thus the (condor) job for the DAGMan exists # for alive DAGMans. if ( exists $db{dagmanjobid} && exists $jobref->{ $db{dagmanjobid} } ) { push( @{ $dagref->{ $db{dagmanjobid} } }, $id ); } else { # we need this branch for Condor jobs not managed by # DAGMan, or for Condor jobs whose parent DAGMan died. $dagref->{$id} = [] unless exists $dagref->{$id}; } } # bookeeping if ( exists $flags{count} ) { $flags{count}{condor}{ $db{jobstatus} }++; $flags{count}{all}{condor}++; if ( exists $db{globusstatus} ) { $flags{count}{globus}{ $db{globusstatus} }++; $flags{count}{all}{globus}++; } } # prepare for next round %db = (); } } # extra sanity? warn "Warning: Maybe condor_q output formatting changed?" if scalar keys %db; close Q; if ( $main::debug ) { warn "Warning: condor_q returned ", parse_exit($?), "\n" if $?; } } sub find_leaves(\%) { # purpose: determine which are the top-level jobs to show # paramtr: %dag (IN): dag dependencies # returns: hash of leave jobs in queue # my $dagref = shift; # find children and parents that are dags my (%parent,%leaves); foreach my $d ( keys %{$dagref} ) { foreach my $v ( @{$dagref->{$d}} ) { $parent{$v}{$d} = 1 if exists $dagref->{$v}; } } # find leaves my @fifo = keys %{$dagref}; while ( @fifo ) { my $d = pop(@fifo); if ( exists $parent{$d} ) { push( @fifo, keys %{$parent{$d}} ); } else { $leaves{$d} = 1; } } %leaves; } sub assemble_job($;$) { # purpose: create the data columns for a given job # paramtr: $r (IN): job classad hashref # $indent (IN): what to use for indentation # returns: updated job classad hashref # my $r = shift; # job classad my $indent = shift || ''; # extra sanity? confess "no job?" unless scalar keys %{$r}; $r->{_indent} = $indent; $r; } sub assemble_dag($$$;$$); # { } sub assemble_dag($$$;$$) { # purpose: create the data rows for a given dag job # paramtr: %job (IN): see condor_q # %dag (IN): see condor_q # $dagid (IN): which workflow to assemble # $lastp (opt. IN): last job in parent workflow # $indent (opt. IN): what to use for indentation # returns: ordered list (rows) of job classad refs (cols) # my $jobref = shift; my $dagref = shift; my $dagid = shift; my $lastp = shift; my $indent = shift || ''; my @result = (); # show dagman itself push( @result, assemble_job( $jobref->{$dagid}, $indent ) ); delete $main::seen{$dagid}; # show dependent jobs for dagman # $indent = $main::graph[2] x ( length($indent) / length($main::graph[0]) ); substr( $indent, -3 ) = $main::graph[ 2 + $lastp ] if $indent; my @x = sort { $a <=> $b } @{ $dagref->{$dagid} }; for ( my $j=0; $j < @x; ++$j ) { # extra sanity? die "unknown job" unless scalar keys %{$jobref->{$x[$j]}}; my $conn = $main::graph[ $j == $#x ]; if ( exists $dagref->{$x[$j]} ) { push( @result, assemble_dag( $jobref, $dagref, $x[$j], ($j==$#x), "$indent$conn" ) ); } else { push( @result, assemble_job( $jobref->{$x[$j]}, "$indent$conn" ) ); } delete $main::seen{$x[$j]}; } @result; } sub x_site { my $row = shift; $row->{'pegasus_site'} || '-'; } sub x_pegasus_jobtype { my $row = shift; my $c = $row->{'pegasus_job_class'} + 0; $c < @main::jobclass ? $main::jobclass[$c] : "$c"; } sub x_pegasus_jobtypeshort { my $row = shift; my $c = $row->{'pegasus_job_class'} + 0; $c < @main::jobshort ? $main::jobshort[$c] : "$c"; } sub x_pegasus_jobtypenum { my $row = shift; $row->{'pegasus_job_class'} || '-'; } sub x_in_state { my $row = shift; interval( $main::time - $row->{enteredcurrentstatus} ); } sub x_jobpriority { my $row = shift; $row->{jobprio}; } sub x_jobstatus { my $row = shift; my $s = $row->{jobstatus}; $s < @main::clong ? $main::clong[$s] : "$s"; } sub x_cstat { my $row = shift; cstat( $row->{jobstatus} ); } sub x_cgstatus { my $row = shift; cstat( $row->{jobstatus} ) . '/' . gstat( $row->{globusstatus} ); } sub x_condorid { my $row = shift; $row->{clusterid}; } sub x_job1 { my $row = shift; my $result = ''; if ( exists $row->{dagnodename} ) { $result = $row->{dagnodename}; } elsif ( exists $row->{'pegasus_wf_name'} ) { $result = $row->{'pegasus_wf_name'}; } else { my $cmd = basename( $row->{cmd} || '' ); if ( $cmd eq 'kickstart' ) { my @x = kickstart($row->{arguments}); $result = '*' . $x[0]; } else { $result = $cmd; } } if ( $main::color ) { if ( $row->{'pegasus_job_class'} > 9 ) { "\033[0;37m" . $row->{_indent} . $main::reset . $result . $main::ccolor[ $row->{jobstatus} ]; } else { "\033[0;37m" . $row->{_indent} . $main::ccolor[ $row->{jobstatus} ] . $result; } } else { $row->{_indent} . $result; } } sub x_job2 { my $row = shift; my $result = ''; # show dagnodename first if ( exists $row->{dagnodename} ) { $result .= cfit( $main::width{dagnodename}, $row->{dagnodename} ); } elsif ( exists $row->{'pegasus_wf_name'} ) { $result .= cfit( $main::width{'pegasus_wf_name'}, $row->{'pegasus_wf_name'} ); } # replace commandline ('cmd' and 'arguments') my $cmd = basename( $row->{cmd} || '' ); if ( $cmd eq 'kickstart' ) { $cmd = (kickstart($row->{arguments}))[0]; $result .= ' [*' . cfit( $main::width{cmd}, $cmd ) . ']'; } else { $result .= ' [' . cfit( $main::width{cmd}, $cmd ); if ( exists $row->{'pegasus_cluster_size'} ) { my $n = $row->{'pegasus_cluster_size'} + 0; if ( $cmd eq 'seqexec' || $n > 1 ) { $result .= ": $n"; } } $result .= ']'; } if ( $main::color ) { if ( $row->{'pegasus_job_class'} > 9 ) { "\033[0;37m" . $row->{_indent} . $main::reset . $result . $main::ccolor[ $row->{jobstatus} ]; } else { "\033[0;37m" . $row->{_indent} . $main::ccolor[ $row->{jobstatus} ] . $result; } } else { $row->{_indent} . $result; } } sub q_print_debug($$$$\%) { # purpose: show job classads of certain matches for current job # paramtr: $cursor (IN): current row # $reserve (IN): how much space to reserve # $watch (IN): are we in watch mode? # $match (IN): regular expression of classads to match # %q[i] (IN): current job class ads # globals: $main::color (IN): whether to use ANSI colors # $cols (IN): current terminal width # $rows (IN): current terminal height # returns: new cursor position # my $cursor = shift; # current row my $reserve = shift; # current $reserve my $watch = shift; # current $watch my $match = shift; # what classads to match my $qi = shift; # $q[$i] ref my $p = $cols + $cols; my ($s); foreach my $k ( sort keys %{$qi} ) { if ( $watch && $cursor > $rows-$reserve-1 ) { print " .."; last; } if ( $k =~ /$match/o ) { my $v = $qi->{$k}; my $l = length($k) + length($v); $s = ''; $s .= "\033[1;30m" if $main::color; $s .= $k; $s .= $main::reset if $main::color; $s .= '=' . $v; if ( $p+$l+2 > $cols ) { print "\n\t$s"; ++$cursor; $p = $l+9; } else { print " $s"; $p += $l+2; } } } $cursor; } sub q_print_summary($\%) { # purpose: print summary line adding stats of Condor and Condor-G # paramtr: $cursor (IN): current row # %count (IN): queue statistics # globals: $main::color (IN): whether to use ANSI colors # @main::ccolor (IN): color settings # $main::reset (IN): undo colorings # returns: new cursor position # my $cursor = shift; # current row my $cref = shift; # %count ref # create Condor job summary print "Summary: ", plural($cref->{all}{condor},'Condor job'), " total"; my $f = 0; foreach my $c ( sort { $a <=> $b } keys %{$cref->{condor}} ) { print( $f++ ? ' ' : ' (' ); print $main::ccolor[$c] if $main::color; print $main::cstat[$c], ':', commas($cref->{condor}{$c}); print $main::reset if $main::color; } print ')' if $f; # Globus job summary if ( exists $cref->{all}{globus} && $cref->{all}{globus} > 0 ) { print ", ", plural( $cref->{all}{globus}, 'Condor-G job' ); $f = 0; foreach my $g ( sort { $a <=> $b } keys %{$cref->{globus}} ) { print( $f++ ? ' ' : ' (' ); print $main::gstat{$g}, ':', commas($cref->{globus}{$g}); } print ')' if $f; } print "\n"; ++$cursor; } sub dag_recurse(\@$$); # { } sub dag_recurse(\@$$) { local(*DIR); my $dirsref = shift; my $dir = shift; my $level = shift; profile_log( "opendir $dir" ) if defined $main::profile; if ( opendir( DIR, $dir ) ) { my ($file,$full); while ( defined ($file = readdir(DIR)) ) { next if ( $file eq '.' || $file eq '..' ); next if ( $file =~ /\.\d{3}$/ ); # NEW $full = File::Spec->catfile( $dir, $file ); if ( -d $full ) { dag_recurse( @{$dirsref}, $full, $level+1 ); } elsif ( $file =~ /\.dag\.dagman\.out$/ ) { push( @{ $dirsref->[$level] }, $full ); } } closedir DIR; } else { warn "Warning: Unable to open $dir: $!, ignoring\n"; } } sub dag_get_subdag(\@$$) { my $dirsref = shift; my $dag = shift; my $level = shift; local(*F); profile_log( "open $dag" ) if defined $main::profile; if ( open( F, "<$dag" ) ) { my @subdags = (); while ( ) { push( @subdags, $1 ) if /^SUBDAG EXTERNAL \S+ (\S+\.dag)($| DIR)/; } close F; foreach my $s ( @subdags ) { my $dagman = $s . '.dagman.out'; push( @{$dirsref->[$level]}, $dagman ) if ( -e $dagman && -f _ && -r _ && ! -z _ ); } } else { warn "Warning: open $dag: $!, ignoring\n"; } } sub dag_process_tab(@) { my @keys = split /\s+/, shift(); shift; # unused my @vals = split /\s+/, shift(); my @result = (); my $state = 0; my $total = 0; for ( my $i=0; $i < @keys; ++$i ) { # find where keys start ++$state if lc($keys[$i]) eq 'done'; next unless $state; # keep ordering by using an array (that is convertible into a # hash). However, remove any punctuation stuff from keys $keys[$i] =~ s/[^[:alnum:]]//g; push( @result, lc($keys[$i]) => $vals[$i] ); $total += $vals[$i]; } ( @result, 'total', $total ); } my $re1 = qr{\*\scondor_scheduniv_exec\.([0-9.]+)\s\(CONDOR_DAGMAN\)\sSTARTING\sUP}; my $re2 = qr{\*\scondor_scheduniv_exec\.([0-9.]+)\s\(condor_DAGMAN\)\spid\s\d+\sEXITING\sWITH\sSTATUS\s(\S+)}; sub dag_status($$;%) { my $run = shift; my $dagfn = shift; my %flags = ( @_ ); # optional my @dirs = (); dag_recurse(@dirs,$run,0); dag_get_subdag( @dirs, File::Spec->catfile($run,$dagfn), 1 ); my $dolen = length( '.dagman.out' ); my @result = (); my $lastfn = $dirs[0][0]; # master workflow foreach my $d ( reverse @dirs ) { next unless defined $d; foreach my $fn ( @{$d} ) { my @tab = (); my ($start,$final,$pid,$status); local(*F); profile_log( "open $fn" ) if defined $main::profile; if ( open( F, "<$fn" ) ) { while ( ) { # none of these will be in the same line. order by frequency if ( index($_,'Done') > 0 ) { $tab[0] = $_; $tab[1] = ; $tab[2] = ; } elsif ( /$re1/o ) { ($start,$final) = ($1,''); } elsif ( /\*\*\s+PID\s+=\s+(\d+)/ ) { $pid=$1; } elsif ( /$re2/o ) { ($final,$status) = ($1,$2); } } close F; } else { warn "Warning: open $fn: $!\n"; } my $short = ( $fn =~ /^$run/o ? substr( $fn, length($run)+1, -$dolen ) : substr( $fn, 0, -$dolen ) ); my $state = 0; # unknown if ( $start ne $final ) { if ( kill( 0, $pid ) ) { $state = 2; # running } else { $state = 1; # undeterminable } } else { # finished: success (3) or failure (4) $state = ( $status == 0 ? 3 : 4 ); } # I need this separately for bookeeping my %detail = dag_process_tab(@tab); # tinker with the job count. The "master" workflow dagman is # not counted in any of job counts, yet it does appear in # the Condor Q. if ( $fn eq $lastfn ) { if ( $state == 0 ) { # map unknown to unready $detail{unready}++; } elsif ( $state == 1 || $state == 2 ) { # map interdeterminate and running to queued $detail{queued}++; } elsif ( $state == 3 ) { # map success to done $detail{done}++; } elsif ( $state == 4 ) { # map failure to failed $detail{failed}++; } $detail{total}++; # tag root workflow that we included itself by asterisk $short = '*' . $short; } # nosuccess for my own sanity -- and maybe Mats? unless ( $state == 3 && $flags{nosuccess} ) { push( @result, { name => $short , state => $state , status => $status # may be undef , detail => { %detail } } ); } # bookeeping if ( exists $flags{count} ) { $flags{count}{'_state'}[$state]++; $flags{count}{'_total'}++; while ( my ($k,$v) = each %detail ) { $flags{count}{$k} += $v; } } } } @result; } sub y_dag_done { my $dag = shift; commas( $dag->{detail}->{done} || 0 ); } sub y_dag_pre { my $dag = shift; commas( $dag->{detail}->{pre} || 0 ); } sub y_dag_queued { my $dag = shift; commas( $dag->{detail}->{queued} || 0 ); } sub y_dag_post { my $dag = shift; commas( $dag->{detail}->{post} || 0 ); } sub y_dag_ready { my $dag = shift; commas( $dag->{detail}->{ready} || 0 ); } sub y_dag_unready { my $dag = shift; commas( $dag->{detail}->{unready} || 0 ); } sub y_dag_failed { my $dag = shift; commas( $dag->{detail}->{failed} || 0 ); } sub y_dag_total { my $dag = shift; commas( $dag->{detail}->{total} || 0 ); } sub y_percent { my $dag = shift; my $done = $dag->{detail}->{done}+0; my $total = $dag->{detail}->{total}+0; my $percent = ( $total == 0 ) ? 0 : ( 100.0 * $done / $total ); sprintf "%.1f", $percent; } sub y_done_total { my $dag = shift; commas( $dag->{detail}->{done} || 0 ) . '/' . commas( $dag->{detail}->{total} || 0 ); } sub y_dstat { my $dag = shift; my $s = $dag->{state}+0; $s < @main::dstat ? $main::dstat[$s] : ''; } sub y_dlong { my $dag = shift; my $s = $dag->{state}+0; $s < @main::dlong ? $main::dlong[$s] : ''; } sub y_status { my $dag = shift; my $x = $dag->{status}; defined $x ? parse_exit($x) : 'n.a'; } sub y_name { my $dag = shift; my $result = $dag->{name} || ''; $result; } sub dag_print_summary($\%) { # purpose: print summary line adding stats of workflows # paramtr: $cursor (IN): current row # %count (IN): queue statistics # globals: $main::color (IN): whether to use ANSI colors # @main::dcolor (IN): color settings # $main::reset (IN): undo colorings # returns: new cursor position # my $cursor = shift; # current row my $totals = shift; # %totals ref # create workflow summary print( "Summary: ", plural( $totals->{'_total'}, 'DAG' ), " total" ); if ( $totals->{'_total'} > 0 ) { my $f = 0; for ( my $i=0; $i < @main::dcolor; ++$i ) { my $x = $totals->{_state}; if ( $x->[$i] > 0 ) { print( $f++ ? ' ' : ' (' ); print $main::dcolor[$i] if $main::color; print $main::dlong[$i], ':', commas($x->[$i]); print $main::reset if $main::color; } } print ")" if $f; } print "\n"; ++$cursor; } # # --- main ------------------------------------------------- # binmode( STDOUT, ':utf8' ) if $main::isutf8; # parse CLI options my $heldinfo = 1; my $heavy = 1; my $queue = 1; my $showidle = 1; my $success = 1; $main::expert = 0; my $legend = 0; my $show_subdag = 0; my $classads = 0; my $vertical = 0; my ($watch,@jobtypes,@jobsites); GetOptions( 'help|h' => \&usage , 'user|u=s' => \$main::user , 'debug|d+' => \$main::debug , 'verbose|v+' => \$main::expert , 'color|c!' => \$main::color , 'utf8|U!' => \$main::isutf8 , 'version|V' => \&version , 'classad|ca+' => \$classads , 'jobtype|jobclass|j=s' => \@jobtypes , 'site|s=s' => \@jobsites , 'idle|i!' => \$showidle , 'success|S!' => \$success , 'legend|L!' => \$legend , 'queue|Q!' => \$queue , 'hold|held!' => \$heldinfo , 'heavy!' => \$heavy , 'profile=s' => sub { profile_start($_[1]) }, # the next two options are mutually exclusive , 'long|l!' => \$show_subdag , 'rows|row|r!' => \$vertical , 'watch|w:i' => sub { if ( ! $main::onatty ) { warn "FATAL: --watch requires a terminal for output\n"; exit 42; } else { unless ( defined &TIOCGWINSZ ) { warn( "Info: Your Perl installation is incomplete. Your sysadmin could\n", "run h2ph with proper args to create sys/ioctl.ph and friends.\n" ); sleep(3); } } $watch = $_[1] || 60; # once a minute is almost too often } # the next option is for internal debugging only , 'cache=s' => \$main::cache ); binmode( STDOUT, ':utf8' ) if $main::isutf8; # if both are (mistakenly) specified, --long wins over --rows $vertical=0 if ( $show_subdag && $vertical ); # # If the user specified any form of job type/class limitations... # if ( @jobtypes > 0 ) { # make keys unique, merge comma lists my %temp = map { lc($_) => 1 } split( /,/, join(',', @jobtypes) ); # determine valid inputs from @main::job{class,short} and numerical my %valid = ( ( map { $main::jobclass[$_] => $_ } 0..$#main::jobclass ), ( map { $main::jobshort[$_] => $_ } 0..$#main::jobshort ), ( map { $_ => $_ } 0..$#main::jobclass ) ); # determine, if there were any invalid job classes my @invalid = (); foreach my $k ( keys %temp ) { push( @invalid, $k ) unless ( $k eq 'help' || exists $valid{$k} ); } if ( exists $temp{help} || @invalid ) { # this path if 'help' was specified or invalid class specs found print "\n"; # deal with, if any, invalid job class specs if ( @invalid ) { print 'ERROR: ', plural(@invalid,'unrecognized job class'); print ': ', join(', ',@invalid), "\n\n"; } # print list of supported job class specs (omit unknown) print "List of recognized job classes:\n\n"; printf "%2s %-5s %-10s %s\n", 'NR', 'SHORT', 'LONG', 'DESCRITPION'; for ( my $i=1; $i<@main::jobclass; ++$i ) { printf( "%2d %-5s %-10s %s\n", $i, $main::jobshort[$i], $main::jobclass[$i], $main::jobclass_desc[$i] ); } print "\n"; # in case of invalid spec, exit with an error. 'help' is not an error. exit ( @invalid ? 1 : 0 ); } else { # all keys look kosher, translate into numbers @jobtypes = sort { $a <=> $b } map { $valid{$_} } keys %temp; } } # # If the user specified site limitations, unique specs # if ( @jobsites ) { @jobsites = sort keys %{{ map { $_ => 1 } split( /,/, join(',', @jobsites) ) }}; } # react to changes in terminal size $SIG{WINCH} = \&sigwinch if ( defined &TIOCGWINSZ && $main::onatty ); # mess with verbosity (expert level) on SIGUSR $SIG{USR1} = sub { ++$main::expert }; $SIG{USR2} = sub { $main::expert-- }; # experts don't need spaces :-P $main::space = ' ' if $main::expert; # determine UTF-8 capabilities if ( $main::isutf8 ) { # Draw UTF-8 graphics # Warning: These are the unicode strings that require 'use utf8;' if ( $heavy ) { @main::graph = ( " ┣━", " ┗━", " ┃ ", " " ); } else { @main::graph = ( " ├─", " └─", " │ ", " " ); } } else { # Assume ASCII graphics @main::graph = ( ' |-', ' \_', ' | ', ' ' ); } # Default $rundir to cwd if nothing was specified my $run = @ARGV ? abs_path(shift()) : getcwd(); my %braindb = slurp_braindb($run); my @rootuuid = (); if ( scalar keys %braindb ) { # we have a rundir push( @rootuuid, $braindb{'root_wf_uuid'} ) if exists $braindb{'root_wf_uuid'}; } else { # no valid rundir undef $run; } # POST-condition: $run is defined if it is a valid rundir # FIXME: Extend to permit multiple rundirs a la @ARGV for ( my $cursor=1; ; $cursor=1 ) { my (%ccount,%job,%dag,@result,@dags) = (); my %dcount = ( _state => [ map { 0 } @main::dstat ] , _total => 0 ); # what level of expertise (output, verbose mode). Eventually a CLI # option will permit to use your own mix-n-match output (TBD). my @qtitle = @{ $main::qtitle[$main::expert] }; my @dtitle = @{ $main::dtitle[$show_subdag] }; my @q = (); if ( $queue ) { # collect information from condor_q profile_log( 'start condor_q' ) if defined $main::profile; condor_q( %job, %dag, @qtitle , noidle => ! $showidle , count => \%ccount , ( @jobtypes ? ( jobtypes => \@jobtypes ) : () ) , ( @jobsites ? ( jobsites => \@jobsites ) : () ) , ( @rootuuid ? ( rootuuid => \@rootuuid ) : () ) ); profile_log( "final condor_q ($?)" ) if defined $main::profile; # %main::seen is for sanity checks %main::seen = map { $_ => 1 } keys %job; # collect data to show into @q profile_log( "start Q sorting" ) if defined $main::profile; my %leaves = find_leaves(%dag); foreach my $id ( sort { $a <=> $b } keys %leaves ) { push( @q, assemble_dag( \%job, \%dag, $id ) ); } profile_log( "final Q sorting" ) if defined $main::profile; } # collect data from $rundir (if applicable) if ( defined $run ) { profile_log( "start dag dir traversal" ) if defined $main::profile; @dags = dag_status( $run, $braindb{dag}, nosuccess => ! $success, count => \%dcount ); profile_log( "final dag dir traversal" ) if defined $main::profile; } # construct legends and legend sizes according to terminal my ($qlegend,$ql_size,$dlegend,$dl_size) = ('',0,'',0); if ( $legend ) { ($qlegend,$ql_size) = create_legend( %qtitle, @qtitle ); ($dlegend,$dl_size) = create_legend( %dtitle, @dtitle ); } # empty screen and print "Ctrl+C" and date header my $reserve = 5; if ( defined $watch ) { ($rows,$cols) = initialize_winch(); print "\033[2J\033[H"; $main::time = CORE::time(); my $now = scalar localtime($main::time); my $msg = "Press Ctrl+C to exit"; print headline($msg,"(pid=$$)",$now,$cols), "\n\n"; $cursor += 2; if ( $show_subdag ) { my $nd = @dags; $reserve += $dl_size + $nd + 2; } elsif ( $vertical ) { $reserve += $dl_size + 9; } else { $reserve += $dl_size + 3; } } # Are there are Condor jobs in the Q if ( @q > 0 ) { profile_log( "start Q printing" ) if defined $main::profile; # create data to actually show from potentially larger set @result = whittle_down( %qtitle, @qtitle, @q ); # determine dynamic column widths my @max = column_widths( %qtitle, @qtitle, @result ); # print legend (requested by Ewa) if ( $legend ) { print $qlegend; $cursor += $ql_size; } # print headers print $main::bold if $main::color; for ( my $i=0; $i<@max; ++$i ) { print $main::space if $i; printf "%*s", $max[$i], $qtitle{ $qtitle[$i] }{header}; } print $main::reset if $main::color; print "\n"; ++$cursor; # print each row of results for ( my $i=0; $i<@result; ++$i ) { my $jobstatus = $q[$i]->{jobstatus}; # decide on color for row and print data columns print $main::ccolor[$jobstatus] if $main::color; for ( my $j=0; $j<@{$result[$i]}; ++$j ) { print $main::space if $j; printf "%*s", $max[$j], $result[$i][$j]; } # HELD jobs get a separate line with the hold reason if ( $heldinfo && $jobstatus == 5 ) { my $tile = $main::graph[1]; print "\n", $tile, fit( $cols-length($tile), $q[$i]{holdreason} ); ++$cursor; } # reset color after this print $main::reset if $main::color; # transient trickery for classads mode if ( $classads ) { my $m = $classads == 1 ? qr{^(?:pegasus|wf)_} : qr{^(?:(?:pegasus|wf)_|job|globus)} ; $cursor = q_print_debug( $cursor, $reserve, $watch, $m, %{$q[$i]} ); } # terminate current line print "\n"; ++$cursor; # skip rest of output if reaching bottom of current terminal if ( $watch && @result > $rows-$reserve && $cursor > $rows-$reserve ) { print "(", plural( @result-$i, 'additional job' ); print " omitted.)\n"; ++$cursor; last; } } # create summaries from %ccount $cursor = q_print_summary( $cursor, %ccount ); profile_log( "final Q printing" ) if defined $main::profile; } else { # nothing in Q if ( $queue ) { print "(no matching jobs found in Condor Q)\n"; ++$cursor; } } # are we sane? warn "\n(Debug: I appear to be missing some jobs)\n" if ( scalar %main::seen ); if ( @q > 0 && $dcount{'_total'} > 0 ) { # separate the two sections print "\n"; ++$cursor; } # Is there state in the rundir (is there a rundir)? $reserve = 5; if ( $dcount{'_total'} ) { local $main::space = ' '; # temporarily scoped overwrite profile_log( "start DAG printing" ) if defined $main::profile; # create pseudo-row (last row) with totals push( @dags, { name => "TOTALS (" . plural($dcount{'total'},'job') . ')', state => 42, status => undef, detail => \%dcount } ); # create data to actually show from larger set @result = whittle_down( %dtitle, @dtitle, @dags ); # determine dynamic column widths my @max = column_widths( %dtitle, @dtitle, @result ); # print legend (requested by Ewa) if ( $legend ) { print $dlegend; $cursor += $dl_size; } # print headers unless ( $vertical ) { print $main::bold if $main::color; for ( my $i=0; $i<@max; ++$i ) { print ' ' if $i; printf "%*s", $max[$i], $dtitle{ $dtitle[$i] }{header}; } print $main::reset if $main::color; print "\n"; ++$cursor; } # print each row of results if ( $show_subdag ) { # exclude pseudo-row with total from this part for ( my $i=0; $i<$#result; ++$i ) { my $dagstate = $dags[$i]->{state}; # decide on a color for row and print data columns print $main::dcolor[$dagstate] if $main::color; for ( my $j=0; $j<@{$result[$i]}; ++$j ) { print ' ' if $j; if ( $j == $#max && length($result[$i][$j]) > abs($max[$j]) ) { $result[$i][$j] = fit( -abs($max[$j]), $result[$i][$j] ); } printf "%*s", $max[$j], $result[$i][$j]; } # reset colors print $main::reset if $main::color; # terminate current line print "\n"; ++$cursor; # skip rest of output if reaching bottom of current terminal my $diff = $rows - $reserve; if ( $watch && $#result > $diff && $cursor > $diff ) { print '(', plural( $#result-$i, 'additional workflow' ); print " omitted.)\n"; ++$cursor; last; } } } # print totals here if ( ! $show_subdag || $dcount{'_total'} > 1 ) { if ( $vertical ) { my $i = $#result; # my $mk = (sort { $b <=> $a } map { length($dtitle{$_}{header}) } @dtitle)[0]; for ( my $j=0; $j<@max; ++$j ) { print $main::bold if $main::color; # printf "%*s: ", $mk, $dtitle{ $dtitle[$j] }{header}; printf "%s: ", $dtitle{ $dtitle[$j] }{header}; print $main::reset if $main::color; printf "%s\n", $result[$i][$j]; ++$cursor; } } else { # decide on a color for row and print data columns print $main::bold if ( $show_subdag && $main::color ); my $i = $#result; for ( my $j=0; $j<@{$result[$i]}; ++$j ) { print ' ' if $j; if ( $j == $#max && length($result[$i][$j]) > abs($max[$j]) ) { $result[$i][$j] = fit( -abs($max[$j]), $result[$i][$j] ); } printf "%*s", $max[$j], $result[$i][$j]; } # reset colors print $main::reset if $main::color; # terminate current line print "\n"; ++$cursor; } } # print summary $cursor = dag_print_summary( $cursor, %dcount ); profile_log( "final DAG printing" ) if defined $main::profile; } else { # no valid rundir -- do nothing } # are we in 'watch' mode, or is this it? if ( defined $watch ) { sleep($watch); } else { last; } } exit 0; pegasus-wms_4.0.1+dfsg/bin/pegasus-plan0000755000175000017500000000126711757531136017136 0ustar ryngerynge#!/bin/bash # # generate a Concrete Dag by providing a DAX # # $Id: pegasus-plan 4507 2011-08-29 16:13:32Z rynge $ set -e PEGASUS_CONFIG="`dirname $0`/pegasus-config" eval `$PEGASUS_CONFIG --sh-dump` . $PEGASUS_SHARE_DIR/common.sh # PEGASUS_HOME should not be set - this is so we can find all the # places in the planner which still depends on PEGASUS_HOME unset PEGASUS_HOME # run java program nice ${JAVA} \ "-Dpegasus.home.sysconfdir=$PEGASUS_CONF_DIR" \ "-Dpegasus.home.bindir=$PEGASUS_BIN_DIR" \ "-Dpegasus.home.sharedstatedir=$PEGASUS_SHARE_DIR" \ "-Dpegasus.home.schemadir=$PEGASUS_SCHEMA_DIR" \ $addon edu.isi.pegasus.planner.client.CPlanner "$@" pegasus-wms_4.0.1+dfsg/bin/pegasus-analyzer0000755000175000017500000014135011757531136020027 0ustar ryngerynge#!/usr/bin/env python """ Pegasus utility for pasing jobstate.log and reporting succesful and failed jobs Usage: pegasus-analyzer [options] """ ## # Copyright 2007-2012 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision: 2012 $ # Import Python modules import os import re import sys import time import errno import logging import commands import optparse import traceback import subprocess import tempfile # Initialize logging object logger = logging.getLogger() # Set default level to WARNING logger.setLevel(logging.WARNING) # Use pegasus-config to find our lib path bin_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --noeoln --python" lib_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --noeoln --python-externals" lib_ext_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] # Insert this directory in our search path os.sys.path.insert(0, lib_ext_dir) os.sys.path.insert(0, lib_dir) # Import our modules import Pegasus.common from Pegasus.tools import utils from Pegasus.tools import db_utils from Pegasus.tools import kickstart_parser from netlogger.analysis.workflow import stampede_statistics from netlogger.analysis.schema.schema_check import SchemaVersionError # --- regular expressions ------------------------------------------------------------- re_parse_property = re.compile(r'([^:= \t]+)\s*[:=]?\s*(.*)') re_parse_script_pre = re.compile(r'^SCRIPT PRE (\S+) (.*)') re_parse_condor_subs = re.compile(r'(\S+)="([^"]+)"') re_collapse_condor_subs = re.compile(r'\$\([^\)]*\)') # --- classes ------------------------------------------------------------------------- class Job: def __init__(self, job_name, job_state=""): """ Initializes the Job class, setting job name, and state, if provided """ self.name = job_name # Job name self.state = job_state # Job state self.sub_file = "" # Submit file for this job self.out_file = "" # Output file for this job self.err_file = "" # Error file for this job self.cluster = "" # Cluster id for this job (from Condor) self.process = "" # Process id for this job (from Condor) self.sub_file_parsed = False # Flag to tell if we were able to parse this job's submit file self.site = "" # Site where the job ran self.executable = "" # Job's executable self.arguments = "" # Job's arguments self.initial_dir = "" # Job's initial dir (from submit file) self.transfer_input_files = "" # Files to transfer when debugging a job self.retries = None # Keep track of how many times a job is submitted self.is_subdax = False # Flag to tell if job is a SUBDAX/pegasus-plan job self.is_subdag = False # Flag to tell if job is a SUBDAG job in the dag file self.subdag_dir = "" # Subdag directory from a SUBDAG job in the dag file self.dag_path = "" # Full path to the dag file from a SUBDAG job self.dagman_out = "" # dagman.out file for this job (only for clustered jobs) self.pre_script = "" # SCRIPT PRE line from the dag file self.condor_subs = {} # Lits of condor substitutions rom DAG VARS line def set_state(self, new_state): """ This function updates a job state """ self.state = new_state # --- constants ----------------------------------------------------------------------- MAXLOGFILE = 1000 # For log file rotation, check files .000 to .999 debug_level = logging.WARNING # For now # --- global variables ---------------------------------------------------------------- prog_base = os.path.split(sys.argv[0])[1] # Name of this program input_dir = None # Directory given in -i command line option dag_path = None # Path of the dag file tsdl_path = None # Path to monitord's log file temp_dir = None # Temporary log file created debug_job = None # Path of a submit file to debug debug_dir = None # Temp directory to use while debugging a job workflow_type = None # Type of the workflow being debugged workflow_base_dir = "" # Workflow submit_dir or dirname(jsd) from braindump file run_monitord = 0 # Run monitord before trying to analyze the output output_dir = None # Output_dir for all files written by monitord top_dir = None # Top_dir of the main workflow, for obtaining the db location use_files = False # Flag for using files in the workflow dir instead of the db quiet_mode = 0 # Prints out/err filenames instead of dumping their contents strict_mode = 0 # Gets out/err filenames from submit file summary_mode = 0 # Print just the summary output debug_mode = 0 # Mode that enables debugging a single job print_invocation = 0 # Prints invocation command for failed jobs print_pre_script = 0 # Prints the SCRIPT PRE line for failed jobs, if present jsdl_filename = "jobstate.log" # Default name of the log file to use jobs = {} # List of jobs found in the jobstate.log file total = 0 # Number of total jobs success = 0 # Number of successful jobs failed = 0 # Number of failed jobs unsubmitted = 0 # Number of unsubmitted jobs unknown = 0 # Number of jobs in an unknown state failed_jobs = [] # List of jobs that failed unknown_jobs = [] # List of jobs that neither succeeded nor failed # --- functions ----------------------------------------------------------------------- def get_jsdl_filename(input_dir): """ This function parses the braindump file in the input_dir, retrieving the wf_uuid and assembling the filename for the jobstate.log file. """ try: my_wf_params = utils.slurp_braindb(input_dir) except: logger.error("cannot read braindump.txt file... exiting...") sys.exit(1) if "wf_uuid" in my_wf_params: return my_wf_params["wf_uuid"]+"-"+jsdl_filename logger.error("braindump.txt does not contain wf_uuid... exiting...") sys.exit(1) def create_temp_logfile(name): """ This function uses tempfile.mkstemp to create a temporary log filename in the /tmp directory """ try: tmp_file = tempfile.mkstemp(prefix="%s-" % (name), suffix=".log", dir="/tmp") except: return None # Close file, we will use it later os.close(tmp_file[0]) # Return filename return tmp_file[1] def has_seen(job_name): """ This function returns true if we are already tracking job_name """ if job_name in jobs: return True return False def add_job(job_name, job_state=""): """ This function adds a job to our list """ # Don't add the same job twice if job_name in jobs: return newjob = Job(job_name, job_state) jobs[job_name] = newjob def update_job_state(job_name, job_state=""): """ This function updates the job state of a given job """ # Make sure we have this job if not job_name in jobs: # Print a warning message logger.error("could not find job %s" % (job_name)) return jobs[job_name].set_state(job_state) def update_job_condor_info(job_name, condor_id="-"): """ This function updates a job's condor_id (it splits it into process and cluster) """ # Make sure we have this job if not job_name in jobs: # Print a warning message logger.error("could not find job %s" % (job_name)) return # Nothing to do if condor_id is not defined if condor_id == "-": return my_split = condor_id.split(".") # First part is cluster id jobs[job_name].cluster = my_split[0] # If we have two pieces, second piece is process if len(my_split) >= 2: jobs[job_name].process = my_split[1] def analyze(): """ This function processes all currently known jobs, generating some statistics """ global total, success, failed, unsubmitted, unknown for my_job in jobs: total = total + 1 if (jobs[my_job].state == "POST_SCRIPT_SUCCESS" or jobs[my_job].state == "JOB_SUCCESS"): success = success + 1 elif (jobs[my_job].state == "POST_SCRIPT_FAILURE" or jobs[my_job].state == "JOB_FAILURE"): failed_jobs.append(my_job) failed = failed + 1 elif (jobs[my_job].state == "UNSUBMITTED"): unsubmitted = unsubmitted + 1 else: # It seems we don't have a final result for this job unknown_jobs.append(my_job) unknown = unknown + 1 def parse_submit_file(my_job): """ This function opens a submit file and reads site and condor dagman log information """ # First we check if this is a SUBDAG job from the dag file if my_job.is_subdag: # Nothing to do here return if my_job.sub_file == "": # Create full path for the submit file if we already don't have the sub file set up my_job.sub_file = os.path.join(input_dir, my_job.name + ".sub") my_job.out_file = os.path.join(input_dir, my_job.name + ".out") my_job.err_file = os.path.join(input_dir, my_job.name + ".err") # Try to access submit file if os.access(my_job.sub_file, os.R_OK): # Open submit file try: SUB = open(my_job.sub_file, "r") except: # print "error opening submit file: %s" % (my_job.sub_file) # fail silently for now... return # submit file found my_job.sub_file_parsed = True # Check if this job includes sub workflows if my_job.is_subdax: has_sub_workflow = True else: has_sub_workflow = False # Parse submit file for line in SUB: # First we need to do some trimming... line = line.strip(" \t") # Remove leading and trailing spaces if line.startswith('#'): # Skip comments continue line = line.rstrip("\n\r") # Remove new lines, if any line = line.split('#')[0] # Remove inline comments too line = line.strip() # Remove any remaining spaces at both ends if len(line) == 0: # Skip empty lines continue prop = re_parse_property.search(line) if prop: # Parse successful k = prop.group(1) v = prop.group(2) # See if it is one of the properties we are looking for... if k == "+pegasus_site": my_job.site = v.strip('"') continue if k == "arguments": my_job.arguments = v.strip('"') if k == "executable": my_job.executable = v if k == "environment" and has_sub_workflow: # Ok, we need to find the CONDOR_DAGMAN_LOG entry now... sub_props = v.split(';') for sub_prop_line in sub_props: sub_prop_line = sub_prop_line.strip() # Remove any spaces if len(sub_prop_line) == 0: continue sub_prop = re_parse_property.search(sub_prop_line) if sub_prop: if sub_prop.group(1) == "_CONDOR_DAGMAN_LOG": my_job.dagman_out = sub_prop.group(2) my_job.dagman_out = os.path.normpath(my_job.dagman_out) if my_job.dagman_out.find(workflow_base_dir) >= 0: # Path to dagman_out file includes original submit_dir, let's try to change it my_job.dagman_out = os.path.normpath(my_job.dagman_out.replace((workflow_base_dir + os.sep), '', 1)) # Join with current input_dir my_job.dagman_out = os.path.join(input_dir, my_job.dagman_out) # Now, figure out the correct directory, accounting for # replanning and rescue modes # Split filename into dir and base names my_dagman_dir = os.path.dirname(my_job.dagman_out) my_dagman_file = os.path.basename(my_job.dagman_out) my_retry = my_job.retries if my_retry is None: logger.warning("sub-workflow retry counter not initialized... continuing...") continue # Compose directory... assuming replanning mode my_retry_dir = my_dagman_dir + ".%03d" % (my_retry) # If directory doesn't exist, let's change to rescue mode if not os.path.isdir(my_retry_dir): logger.debug("sub-workflow directory %s does not exist, shifting to rescue mode..." % (my_retry_dir)) my_retry_dir = my_dagman_dir + ".000" if not os.path.isdir(my_retry_dir): # Still not able to find it, output warning message logger.warning("sub-workflow directory %s does not exist!" % (my_retry_dir)) continue # Found sub-workflow directory, let's compose the final path to the new dagman.out file... my_job.dagman_out = os.path.join(my_retry_dir, my_dagman_file) # Only parse following keys if we are running in strict mode if strict_mode: # Get initial dir if k == "initialdir": my_job.initial_dir = v # Parse error and output keys if k == "output" or k == "error": # Take care of basic substitutions first v = v.replace("$(cluster)", my_job.cluster) v = v.replace("$(process)", my_job.process) # Now we do any substitutions from the DAG's VAR line (if any) for my_key in my_job.condor_subs: v = v.replace("$(%s)" % (my_key), my_job.condor_subs[my_key]) # Now, we collapse any remaining substitutions (not found in the VAR line) v = re_collapse_condor_subs.sub('', v) # Make sure we have an absolute path if not os.path.isabs(v): v = os.path.join(input_dir, v) # Done! Replace out/err filenames with what we have if k == "output": my_job.out_file = v else: my_job.err_file = v # Only parse following keys if we are debugging a job if debug_mode: # Get transfer input files if k == "transfer_input_files": my_job.transfer_input_files = v SUB.close() # If initialdir was specified, we need to make both error and output files relative to that if len(my_job.initial_dir): my_job.out_file = os.path.join(my_job.initial_dir, my_job.out_file) my_job.err_file = os.path.join(my_job.initial_dir, my_job.err_file) else: # Was not able to access submit file # fail silently for now... # print "cannot access submit file: %s" % (my_job.sub_file) pass def find_file(input_dir, file_type): """ This function finds a file with the suffix file_type in the input directory. We assume there is just one file of the requested type in the directory (otherwise the function will return the first file matching the type """ try: file_list = os.listdir(input_dir) except: logger.error("cannot read directory: %s" % (input_dir)) sys.exit(1) for file in file_list: if file.endswith(file_type): return os.path.join(input_dir, file) logger.error("could not find any %s file in %s" % (file_type, input_dir)) sys.exit(1) def parse_dag_file(dag_fn): """ This function walks through the dag file, learning about all jobs before hand. """ # Open dag file try: DAG = open(dag_fn, "r") except: logger.error("could not open dag file %s: exiting..." % (dag_fn)) sys.exit(1) # Loop through the dag file for line in DAG: line = line.strip(" \t") if line.startswith("#"): # Skip comments continue line = line.rstrip("\n\r") # Remove new lines, if any line = line.split('#')[0] # Remove inline comments too line = line.strip() # Remove any remaining spaces at both ends if len(line) == 0: # Skip empty lines continue if line.startswith("JOB"): # This is a job line, let's parse it my_job = line.split() if len(my_job) != 3: logger.warn("confused parsing dag line: %s" % (line)) continue if not has_seen(my_job[1]): add_job(my_job[1], "UNSUBMITTED") # Get submit file information from dag file jobs[my_job[1]].sub_file = os.path.join(input_dir, my_job[2]) if my_job[1].startswith("pegasus-plan") or my_job[1].startswith("subdax_"): # Mark job as subdax jobs[my_job[1]].is_subdax = True else: logger.warn("job appears twice in dag file: %s" % (my_job[1])) if line.startswith("SUBDAG EXTERNAL"): # This is a subdag line, parse it to get job name and directory my_job = line.split() if len(my_job) != 6: logger.warn("confused parsing dag line: %s" % (line)) continue if not has_seen(my_job[2]): add_job(my_job[2], "UNSUBMITTED") jobs[my_job[2]].is_subdag = True jobs[my_job[2]].dag_path = my_job[3] jobs[my_job[2]].subdag_dir = my_job[5] else: logger.warn("job appears twice in dag file: %s" % (my_job[2])) if line.startswith("SCRIPT PRE"): # This is a SCRIPT PRE line, parse it to get the script for the job my_script = re_parse_script_pre.search(line) if my_script is None: # Couldn't parse line logger.warn("confused parsing dag line: %s" % (line)) continue # Get job name, and check if we have it my_job = my_script.group(1) if not has_seen(my_job): # Cannot find this job, ignore this line logger.warn("couldn't find job: %s for PRE SCRIPT line in dag file" % (my_job)) continue # Good, copy PRE script line to our job structure jobs[my_job].pre_script = my_script.group(2) if line.startswith("VARS"): # This is a VARS line, parse it to get the condor substitutions if len(line.split()) > 2: # Line looks promising... my_job = line.split()[1] if not has_seen(my_job): # Cannot find this job, ignore this line logger.warn("couldn't find job: %s for VARS line in dag file" % (my_job)) continue # Good, parse the condor substitutions, and create substitution dictionary for my_key, my_val in re_parse_condor_subs.findall(line): jobs[my_job].condor_subs[my_key] = my_val def parse_jobstate_log(jobstate_fn): """ This function parses the jobstate.log file, loading all job information """ # Open log file try: JSDL = open(jobstate_fn, "r") except: logger.error("could not open file %s: exiting..." % (jobstate_fn)) sys.exit(1) # Loop through the log file for line in JSDL: sp = line.split() # Skip lines that don't have enough items if len(sp) < 6: continue # Skip monitord comments if sp[1] == "INTERNAL": continue # Ok, we have a valid job jobname = sp[1] jobstate = sp[2] condor_id = sp[3] # Add to job list if we have never seen this job before if not has_seen(jobname): logger.warn("job %s not present in dag file" % (jobname)) add_job(jobname, jobstate) if jobname.startswith("pegasus-plan") or jobname.startswith("subdax_"): # Mark job as subdax jobs[jobname].is_subdax = True else: # Update job state update_job_state(jobname, jobstate) # Update condor id if we reached the SUBMIT state if jobstate == "SUBMIT": update_job_condor_info(jobname, condor_id) # Keep track of retries if jobs[jobname].retries is None: jobs[jobname].retries = 0 else: jobs[jobname].retries = jobs[jobname].retries + 1 # Close log file JSDL.close() def find_latest_log(log_file_base): """ This function tries to locate the latest log file """ last_log = None curr_log = None if os.access(log_file_base, os.F_OK): last_log = log_file_base # Starts from .000 sf = 0 while (sf < MAXLOGFILE): curr_log = log_file_base + ".%03d" % (sf) if os.access(curr_log, os.F_OK): last_log = curr_log sf = sf + 1 else: break return last_log def invoke_monitord(dagman_out_file, output_dir): """ This function runs monitord on the given dagman_out_file. """ monitord_cmd = "pegasus-monitord -r --no-events" if output_dir is not None: # Add output_dir, if given monitord_cmd = monitord_cmd + " --output-dir " + output_dir monitord_cmd = monitord_cmd + " " + dagman_out_file logger.info("running: %s" % (monitord_cmd)) try: status, output = commands.getstatusoutput(monitord_cmd) except: logger.error("could not invoke monitord, exiting...") sys.exit(1) def dump_file(file): """ This function dumps a file to our stdout """ if file is not None: try: OUT = open(file, 'r') except: logger.warn("*** Cannot access: %s" % (file)) print else: print os.path.split(file)[1].center(80, '-') print # Dump file contents to terminal line = OUT.readline() while line: line = line.strip() print line line = OUT.readline() OUT.close() print def print_output_error(job): """ This function outputs both output and error files for a given job. """ out_file = find_latest_log(job.out_file) err_file = find_latest_log(job.err_file) my_parser = kickstart_parser.Parser(out_file) my_output = my_parser.parse_stdout_stderr() my_task_id = 0 if len(my_output) > 0: # Ok, we got valid kickstart records, output stdout and stderr for tasks that failed for entry in my_output: # Count tasks, the same way as pegasus-monitord for Stampede my_task_id = my_task_id + 1 if not "derivation" in entry or not "transformation" in entry: continue if not "exitcode" in entry and not "error" in entry: continue if "exitcode" in entry: try: if int(entry["exitcode"]) == 0: # Skip tasks with exitcode equals to zero continue except: logger.warn("couldn't convert exitcode to integer!") continue else: # We must have "error" in entry pass # Got a task with a non-zero exitcode print ("Task #" + str(my_task_id) + " - Summary").center(80, '-') print if "resource" in entry: print "site : %s" % (entry["resource"]) if "hostname" in entry: print "hostname : %s" % (entry["hostname"]) if "name" in entry: print "executable : %s" % (entry["name"]) if "argument-vector" in entry: print "arguments : %s" % (entry["argument-vector"]) if "exitcode" in entry: print "exitcode : %s" % (entry["exitcode"]) else: if "error" in entry: print "error : %s" % (entry["error"]) if "cwd" in entry: print "working dir : %s" % (entry["cwd"]) print # Now let's display stdout and stderr if "stdout" in entry: if len(entry["stdout"]) > 0: # Something to display print ("Task #" + str(my_task_id) + " - " + entry["transformation"] + " - " + entry["derivation"] + " - stdout").center(80, '-') print print entry["stdout"] print if "stderr" in entry: if len(entry["stderr"]) > 0: # Something to display print ("Task #" + str(my_task_id) + " - " + entry["transformation"] + " - " + entry["derivation"] + " - stderr").center(80, '-') print print entry["stderr"] print else: # Not able to parse the kickstart output file, let's just dump the out and err files # Print outfile to screen dump_file(out_file) # Print errfile to screen dump_file(err_file) def print_job_info(job): """ This function prints the information about a particular job """ print print job.center(80, '=') print print " last state: %s" % (jobs[job].state) parse_submit_file(jobs[job]) # Handle subdag jobs from the dag file if jobs[job].is_subdag == True: print " This is a SUBDAG job:" print " For more information, please run the following command:" user_cmd = " %s -s " % (prog_base) if output_dir is not None: user_cmd = user_cmd + " --output-dir %s" % (output_dir) print "%s -f %s" % (user_cmd, jobs[job].dag_path) print return if jobs[job].sub_file_parsed == False: print " site: submit file not available" else: print " site: %s" % (jobs[job].site or '-') print "submit file: %s" % (jobs[job].sub_file) print "output file: %s" % (find_latest_log(jobs[job].out_file)) print " error file: %s" % (find_latest_log(jobs[job].err_file)) if print_invocation: print print "To re-run this job, use: %s %s" % (jobs[job].executable, jobs[job].arguments) print if print_pre_script and len(jobs[job].pre_script) > 0: print print "SCRIPT PRE:" print jobs[job].pre_script print if len(jobs[job].dagman_out) > 0: # This job has a sub workflow print " This job contains sub workflows!" print " Please run the command below for more information:" user_cmd = " %s" % (prog_base) if output_dir is not None: user_cmd = user_cmd + " --output-dir %s" % (output_dir) print "%s -d %s" % (user_cmd, os.path.split(jobs[job].dagman_out)[0]) print print # Now dump file contents to screen if we are not in quiet mode if not quiet_mode: print_output_error(jobs[job]) def print_top_summary(): """ This function prints the summary for the analyzer report, which is the same for the long and short output versions """ print print "Summary".center(80, '*') print print " Total jobs : % 6d (%3.2f%%)" % (total, 100 * (1.0 * total/(total or 1))) print " # jobs succeeded : % 6d (%3.2f%%)" % (success, 100 * (1.0 * success/(total or 1))) print " # jobs failed : % 6d (%3.2f%%)" % (failed, 100 * (1.0 * failed/(total or 1))) print " # jobs unsubmitted : % 6d (%3.2f%%)" % (unsubmitted, 100 * (1.0 * unsubmitted/(total or 1))) if unknown > 0: print " # jobs unknown : % 6d (%3.2f%%)" % (unknown, 100 * (1.0 * unknown/(total or 1))) print def print_summary(): """ This function prints the analyzer report summary """ # First print the summary section print_top_summary() # Print information about failed jobs if len(failed_jobs): print "Failed jobs' details".center(80, '*') for job in failed_jobs: print_job_info(job) # Print information about unknown jobs if len(unknown_jobs): print "Unknown jobs' details".center(80, '*') for job in unknown_jobs: print_job_info(job) def analyze_files(): """ This function runs the analyzer using the files in the workflow directory as the data source. """ jsdl_path = None # Path of the jobstate.log file run_directory_writable = False # Flag to indicate if we can write to the run directory dagman_out_path = None # Path to the dagman.out file global workflow_base_dir, dag_path # Get the dag file if it was not specified by the user if dag_path is None: dag_path = find_file(input_dir, ".dag") logger.info("using %s, use the --dag option to override" % (dag_path)) # Build dagman.out path dagman_out_path = dag_path + ".dagman.out" # Check if we can write to the run directory run_directory_writable = os.access(input_dir, os.W_OK) # Invoke monitord if requested if run_monitord: if output_dir is not None: # If output_dir is specified, invoke monitord with that path invoke_monitord("%s.dagman.out" % (dag_path), output_dir) # jobstate.log file uses wf_uuid as prefix jsdl_path = os.path.join(output_dir, get_jsdl_filename(input_dir)) else: if run_directory_writable: # Run directory is writable, write monitord output to jobstate.log file jsdl_path = os.path.join(input_dir, jsdl_filename) # Invoke monitord invoke_monitord("%s.dagman.out" % (dag_path), None) else: # User must provide the --output-dir option logger.error("%s is not writable" % (input_dir)) logger.error("user must specify directory for new monitord logs with the --output-dir option") logger.error("exiting...") sys.exit(1) else: if output_dir is not None: # jobstate.log file uses wf_uuid as prefix and is inside output_dir jsdl_path = os.path.join(output_dir, get_jsdl_filename(input_dir)) else: jsdl_path = os.path.join(input_dir, jsdl_filename) # Compare timestamps of jsdl_path with dagman_out_path try: jsdl_stat = os.stat(jsdl_path) except: logger.error("could not access %s, exiting..." % (jsdl_path)) sys.exit(1) try: dagman_out_stat = os.stat(dagman_out_path) except: logger.error("could not access %s, exiting..." % (dagman_out_path)) sys.exit(1) # Compare mtime for both files if dagman_out_stat[8] > jsdl_stat[8]: logger.warning("jobstate.log older than the dagman.out file, workflow logs may not be up to date...") # Try to parse workflow parameters from braindump.txt file wfparams = utils.slurp_braindb(input_dir) if "submit_dir" in wfparams: workflow_base_dir = os.path.normpath(wfparams["submit_dir"]) elif "jsd" in wfparams: workflow_base_dir = os.path.dirname(os.path.normpath(wfparams["jsd"])) # First we learn about jobs by going through the dag file parse_dag_file(dag_path) # Read logfile parse_jobstate_log(jsdl_path) # Process our jobs analyze() # Print summary of our analysis if summary_mode : print_top_summary() else : # This is non summary mode despite of the name (go figure) print_summary() if failed > 0: # Workflow has failures, exit with exitcode 2 sys.exit(2) # Workflow has no failures, exit with exitcode 0 sys.exit(0) def analyze_db(config_properties): """ This function runs the analyzer using data from the database. """ global total, success, failed, unsubmitted, unknown # Get the database URL output_db_url, wf_uuid = db_utils.get_db_url_wf_uuid(input_dir, config_properties, top_dir) # Nothing to do if we cannot resolve the database URL if output_db_url is None: logger.error("cannot find database URL, exiting...") sys.exit(1) # Now, let's try to access the database try: workflow_stats = stampede_statistics.StampedeStatistics(output_db_url, False) workflow_stats.initialize(wf_uuid) except SchemaVersionError: logger.error("------------------------------------------------------") logger.error("Database schema mismatch! Please run the upgrade tool") logger.error("to upgrade the database to the latest schema version.") sys.exit(1) except: logger.error("Failed to load the database." + output_db_url ) logger.warning(traceback.format_exc()) sys.exit(1) total = workflow_stats.get_total_jobs_status() success = workflow_stats.get_total_succeeded_jobs_status() failed = workflow_stats.get_total_failed_jobs_status() unsubmitted = total - success - failed # Let's print the results print_top_summary() # Exit if summary mode is on if summary_mode : if failed > 0: # Workflow has failures, exit with exitcode 2 sys.exit(2) # Workflow has no failures, exit with exitcode 0 sys.exit(0) # Now, print information about jobs that failed... if failed > 0: # Get list of failed jobs from database my_failed_jobs = workflow_stats.get_failed_job_instances(final=True, all_jobs=True) # Print header print "Failed jobs' details".center(80, '*') # Now process one by one... for my_job in my_failed_jobs: my_info = workflow_stats.get_job_instance_info(my_job[0])[0] my_tasks = workflow_stats.get_invocation_info(my_job[0]) # Unquote stdout and stderr my_info.stdout_text = utils.unquote(my_info.stdout_text or "") my_info.stderr_text = utils.unquote(my_info.stderr_text or "") my_info.stdout_text = my_info.stdout_text.strip(" \n\r\t") my_info.stderr_text = my_info.stderr_text.strip(" \n\r\t") if my_job[0] == my_info.job_instance_id: print print my_info.job_name.center(80, '=') print print " last state: %s" % (my_info.state or '-') print " site: %s" % (my_info.site or '-') print "submit file: %s" % (my_info.submit_file or '-') print "output file: %s" % (my_info.stdout_file or '-') print " error file: %s" % (my_info.stderr_file or '-') if print_invocation: print print "To re-run this job, use: %s %s" % ((my_info.executable or '-'), (my_info.argv or '-')) print if print_pre_script and len((my_info.pre_executable or "")) > 0: print print "SCRIPT PRE:" print "%s %s" % ((my_info.pre_executable or ""), (my_info.pre_argv or "")) print if my_info.subwf_dir is not None: # This job has a sub workflow print " This job contains sub workflows!" print " Please run the command below for more information:" user_cmd = " %s" % (prog_base) my_wfdir = os.path.normpath(my_info.subwf_dir) if my_wfdir.find(my_info.submit_dir) >= 0: # Path to dagman_out file includes original submit_dir, let's try to change it... my_wfdir = os.path.normpath(my_wfdir.replace((my_info.submit_dir + os.sep), '', 1)) my_wfdir = os.path.join(input_dir, my_wfdir) print "%s -d %s --top-dir %s" % (user_cmd, my_wfdir, (top_dir or input_dir)) print print # Now, print task information for my_task in my_tasks: if my_task[0] < 1: # Skip pre, and post script tasks continue if my_task[1] == 0: # Skip tasks that succeeded continue # Got a task with a non-zero exitcode my_exitcode = utils.raw_to_regular(my_task[1]) # Print task summary print ("Task #" + str(my_task[0]) + " - Summary").center(80, '-') print print "site : %s" % (my_info.site or '-') print "hostname : %s" % (my_info.hostname or '-') print "executable : %s" % (str(my_task[2] or '-')) print "arguments : %s" % (str(my_task[3] or '-')) print "exitcode : %s" % (str(my_exitcode)) print "working dir : %s" % (my_info.work_dir or '-') print if quiet_mode: continue # Now, print task stdout and stderr, if anything is there my_stdout_str = "#@ %d stdout" % (my_task[0]) my_stderr_str = "#@ %d stderr" % (my_task[0]) # Start with stdout my_stdout_start = my_info.stdout_text.find(my_stdout_str) if my_stdout_start >= 0: my_stdout_start = my_stdout_start + len(my_stdout_str) + 1 my_stdout_end = my_info.stdout_text.find("#@", my_stdout_start) if my_stdout_end < 0: # Next comment not found, possibly the last entry my_stdout_end = len(my_info.stdout_text) else: my_stdout_end = my_stdout_end - 1 if my_stdout_end - my_stdout_start > 0: # Something to display print ("Task #" + str(my_task[0]) + " - " + str(my_task[4]) + " - " + str(my_task[5]) + " - stdout").center(80, '-') print print my_info.stdout_text[my_stdout_start:my_stdout_end] print # Now print stderr (from the kickstart output file) my_stderr_start = my_info.stdout_text.find(my_stderr_str) if my_stderr_start >= 0: my_stderr_start = my_stderr_start + len(my_stderr_str) + 1 my_stderr_end = my_info.stdout_text.find("#@", my_stderr_start) if my_stderr_end < 0: # Next comment not found, possibly the last entry my_stderr_end = len(my_info.stdout_text) else: my_stderr_end = my_stderr_end - 1 if my_stderr_end - my_stderr_start > 0: # Something to display print ("Task #" + str(my_task[0]) + " - " + str(my_task[4]) + " - " + str(my_task[5]) + " - Kickstart stderr").center(80, '-') print print my_info.stdout_text[my_stderr_start:my_stderr_end] print # Now print the stderr output from the .err file if my_info.stderr_text.strip("\n\t \r") != "": # Something to display print ("Task #" + str(my_task[0]) + " - " + str(my_task[4]) + " - " + str(my_task[5]) + " - stderr").center(80, '-') print print my_info.stderr_text print else: log.error("unexpected job instance returned by database!") log.error("returned: %d - expected: %d" % (my_info[0], my_job[0])) continue # Done with the database workflow_stats.close() if failed > 0: # Workflow has failures, exit with exitcode 2 sys.exit(2) # Workflow has no failures, exit with exitcode 0 sys.exit(0) def debug_condor(my_job): """ This function is used to debug a condor job. It creates a shell script in the debug_dir directory that is used to copy all necessary files to the (local) debug_dir directory and then execute the job locally. """ global strict_mode # Set strict mode in order to parse everything in the submit file strict_mode = 1 # Parse submit file parse_submit_file(my_job) # Create script name debug_script_basename = "debug_" + my_job.name + ".sh" debug_script_name = os.path.join(debug_dir, debug_script_basename) job_executable = os.path.join(debug_dir, my_job.executable) + my_job.arguments try: debug_script = open(debug_script_name, "w") except: logger.error("cannot create debug script %s" % (debug_script)) sys.exit(1) try: # Start with the bash line debug_script.write("#!/bin/bash\n") debug_script.write("\n") debug_script.write("set -e\n") debug_script.write("\n") debug_script.write("# Copy any files that are needed\n") debug_script.write('echo "copying input files..."\n') debug_script.write("\n") # Copy all files that we need for my_file in my_job.transfer_input_files.split(","): if len(my_file): if len(my_job.initial_dir): # Add the initial dir to all files to be copied my_file = os.path.join(my_job.initial_dir, my_file) debug_script.write("cp %s %s\n" % (my_file, debug_dir)) # Extra newline before executing the job debug_script.write("\n") debug_script.write('echo "copying input files completed."\n') debug_script.write("\n") debug_script.write("# Set the execute bit on the executable\n") debug_script.write("chmod +x %s\n" % (os.path.join(debug_dir, my_job.executable))) debug_script.write("\n") debug_script.write('echo "executing job: %s"\n' % (job_executable)) debug_script.write("\n") debug_script.write("# Now, execute the job\n") debug_script.write("%s\n" % (job_executable)) debug_script.write("\n") # Remember not to put anything between running the executable # and checking the exit code, otherwise $? will break... debug_script.write("# Check error code\n") debug_script.write("if [ $? -eq 0 ]; then\n") debug_script.write(' echo "executable ran successfully"\n') debug_script.write("else\n") debug_script.write(' echo "executable failed with error $?"\n') except: logger.error("cannot write to file %s" % (debug_script)) sys.exit(1) # We are done writing the file! debug_script.close() try: # Make our debug script executable os.chmod(debug_script_name, 0755) except: logger.error("cannot change permissions for the debug script %s" % (debug_script)) sys.exit(1) # Print next step print print "%s: finished generating job debug script!" % (prog_base) print print "To run it, you need to type:" print " $ cd %s" % (debug_dir) print " $ ./%s" % (debug_script_basename) print def debug_workflow(): """ This function handles the mode where the analyzer is used to debug a job in a workflow """ global debug_job, debug_dir # Check if we can find this job's submit file if not debug_job.endswith(".sub"): debug_job = debug_job + ".sub" # Figure out job name jobname = os.path.basename(debug_job) jobname = jobname[0:jobname.find(".sub")] # Create job class my_job = Job(jobname) my_job.sub_file = debug_job if not os.access(debug_job, os.R_OK): logger.error("cannot access job submit file: %s" % (debug_job)) sys.exit(1) # Handle the temporary directory option if debug_dir is None: # Create temporary directory try: debug_dir = tempfile.mkdtemp() except: logger.error("could not create temporary directory!") sys.exit(1) else: # Make sure directory specified is writable debug_dir = os.path.abspath(debug_dir) if not os.access(debug_dir, os.F_OK): # Create directory if it does not exist try: os.mkdir(debug_dir) except: logger.error("cannot create debug directory: %s" % (debug_dir)) # Check if we can write to the debug directory if not os.access(debug_dir, os.W_OK): logger.error("not able to write to temporary directory: %s" % (debug_dir)) sys.exit(1) # Handle workflow type if workflow_type is not None: if workflow_type.lower() == "condor": logger.info("debugging condor type workflow") debug_condor(my_job) else: logger.error("workflow type %s not supported!" % (workflow_type)) sys.exit(1) else: logger.info("debugging condor type workflow") debug_condor(my_job) # All done, in case we are back here! sys.exit(0) # --- main ---------------------------------------------------------------------------- # Configure command line option parser prog_usage = "usage: %s [options] workflow_directory" % (prog_base) parser = optparse.OptionParser(usage=prog_usage) parser.add_option("-v", "--verbose", action="count", default=0, dest="vb", help="Increase verbosity, repeatable") parser.add_option("-i", "-d", "--dir", action = "store", type = "string", dest = "input_dir", help = "input directory where the jobstate.log file is located, default is the current directory") parser.add_option("--dag", action = "store", type = "string", dest = "dag_filename", help = "full path to the dag file to use -- this option overrides the -d option") parser.add_option("-f", "--files", action = "store_const", const = 1, dest = "use_files", help = "disables the database mode and forces the use of workflow directory files") parser.add_option("-m", "-t", "--monitord", action = "store_const", const = 1, dest = "run_monitord", help = "run pegasus-monitord before analyzing the output") parser.add_option("-o", "--output-dir", action = "store", type = "string", dest = "output_dir", help = "provides an output directory for all monitord log files") parser.add_option("--top-dir", action = "store", type = "string", dest = "top_dir", help = "provides the location of the top-level workflow directory, needed to analyze sub-workflows") parser.add_option("-c","--conf", action = "store", type = "string", dest = "config_properties", help = "Specifies the properties file to use. This overrides all other property files.") parser.add_option("-q", "--quiet", action = "store_const", const = 1, dest = "quiet_mode", help = "output out/err filenames instead of their contents") parser.add_option("-p", "--print", action = "store", type = "string" , dest = "print_options", help = "specifies print options from pre,invocation") parser.add_option("-s", "--strict", action = "store_const", const = 1, dest = "strict_mode", help = "gets a job's out and err files from the submit file") parser.add_option("-S", "--summary", action = "store_const", const = 1, dest = "summary_mode", help = "Just print the summary and exit") parser.add_option("--debug-job", action = "store", type = "string", dest = "debug_job", help = "specifies a job to debug (can be either the job base name or the submit file name) -- this option enables debugging a single job") parser.add_option("--debug-dir", action = "store", type = "string", dest = "debug_dir", help = "specifies the directory to use as debug directory (default is to create a random directory in /tmp)") parser.add_option("--type", action = "store", type = "string", dest = "workflow_type", help = "specifies what type of workflow we are debugging (available types: condor)") # Parse command line options (options, args) = parser.parse_args() print "%s: initializing..." % (prog_base) # Copy options from the command line parser if options.vb == 0: lvl = logging.WARN elif options.vb == 1: lvl = logging.INFO else: lvl = logging.DEBUG # Set logging level logger.setLevel(lvl) if options.run_monitord is not None: run_monitord = options.run_monitord if options.strict_mode is not None: strict_mode = options.strict_mode if options.summary_mode is not None: summary_mode = options.summary_mode if options.quiet_mode is not None: quiet_mode = options.quiet_mode if options.use_files is not None: use_files = True if options.print_options is not None: my_options = options.print_options.split(",") if "pre" in my_options or "all" in my_options: print_pre_script = 1 if "invocation" in my_options or "all" in my_options: print_invocation = 1 if options.output_dir is not None: output_dir = options.output_dir if options.top_dir is not None: top_dir = os.path.abspath(options.top_dir) if options.debug_job is not None: debug_job = options.debug_job # Enables the debugging mode debug_mode = 1 if options.debug_dir is not None: debug_dir = options.debug_dir if options.workflow_type is not None: workflow_type = options.workflow_type if options.dag_filename is not None: dag_path = options.dag_filename input_dir = os.path.abspath(os.path.split(dag_path)[0]) # Assume current directory if input dir is empty if len(input_dir) == 0: input_dir = os.getcwd() else: # Select directory where jobstate.log is located if options.input_dir is not None: input_dir = os.path.abspath(options.input_dir) else: if len(args) > 1: parser.print_help() sys.exit(1) elif len(args) == 1: input_dir = args[0] else: input_dir = os.getcwd() if debug_mode == 1: # Enter debug mode if job name given # This function does not return debug_workflow() # Run the analyzer if use_files: analyze_files() else: analyze_db(options.config_properties) # Done! print "Done".center(80, '*') print print "%s: end of status report" % (prog_base) print pegasus-wms_4.0.1+dfsg/bin/pegasus-monitord0000755000175000017500000020037511757531136020040 0ustar ryngerynge#!/usr/bin/env python """ Logging daemon process to update the jobstate.log file from DAGMan logs. This program is to be run automatically by the pegasus-run command. Usage: pegasus-monitord [options] dagoutfile """ ## # Copyright 2007-2012 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision: 2012 $ # Import Python modules import os import re import sys import time import errno import atexit import shelve import signal import logging import calendar import datetime import optparse import traceback import subprocess # Initialize logging object logger = logging.getLogger() # Set default level to WARNING logger.setLevel(logging.WARNING) #logger.setLevel(logging.DEBUG) # Don't send events further up logger.propagate = 0 # Cached debugging state g_isdbg = 0 # Ordered logging levels _LEVELS = [logging.ERROR, logging.WARN, logging.INFO, logging.DEBUG] # Save our own basename prog_base = os.path.split(sys.argv[0])[1] # Import our modules # Use pegasus-config to find our lib path bin_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --python-dump" config = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] exec config # also need the python externals dir os.sys.path.insert(0, pegasus_python_externals_dir) import Pegasus.common from Pegasus.tools import utils from Pegasus.tools import properties from Pegasus.monitoring.workflow import Workflow, MONITORD_RECOVER_FILE from Pegasus.monitoring import notifications from Pegasus.monitoring import event_output as eo from Pegasus.monitoring import socket_interface # Add SEEK_CUR to os if Python version < 2.5 if sys.version_info < (2, 5): os.SEEK_CUR = 1 # set up the environment - this is to control and provide a sane environment # when calling out to sub programs - for example notification scripts os.environ['PEGASUS_BIN_DIR'] = pegasus_bin_dir os.environ['PEGASUS_CONF_DIR'] = pegasus_conf_dir os.environ['PEGASUS_JAVA_DIR'] = pegasus_java_dir os.environ['PEGASUS_PERL_DIR'] = pegasus_perl_dir os.environ['PEGASUS_PYTHON_DIR'] = pegasus_python_dir os.environ['PEGASUS_SHARE_DIR'] = pegasus_share_dir os.environ['PEGASUS_SCHEMA_DIR'] = pegasus_schema_dir # Compile our regular expressions # Used in process re_parse_dag_name = re.compile(r"Parsing (.+) ...$") re_parse_timestamp = re.compile(r"^\s*(\d{1,2})\/(\d{1,2})(\/(\d{1,2}))?\s+(\d{1,2}):(\d{2}):(\d{2})") re_parse_iso_stamp = re.compile(r"^\s*(\d{4}).?(\d{2}).?(\d{2}).(\d{2}).?(\d{2}).?(\d{2})([.,]\d+)?([Zz]|[-+](\d{2}).?(\d{2}))") re_parse_event = re.compile(r"Event:\s+ULOG_(\S+) for Condor (?:Job|Node) (\S+)\s+\((-?[0-9]+\.[0-9]+)(\.[0-9]+)?\)$") re_parse_script_running = re.compile(r"\d{2}\sRunning (PRE|POST) script of (?:Job|Node) (.+)\.{3}") re_parse_script_done = re.compile(r"\d{2}\s(PRE|POST) Script of (?:Job|Node) (\S+)") re_parse_script_successful = re.compile(r"completed successfully\.$") re_parse_script_failed = re.compile(r"failed with status\s+(-?\d+)\.?$") re_parse_job_submit = re.compile(r"Submitting Condor Node (.+) job") re_parse_job_submit_error = re.compile(r"ERROR: submit attempt failed") re_parse_job_failed = re.compile(r"\d{2}\sNode (\S+) job proc \(([0-9\.]+)\) failed with status\s+(-?\d+)\.$") re_parse_job_successful = re.compile(r"\d{2}\sNode (\S+) job proc \(([0-9\.]+)\) completed successfully\.$") re_parse_retry = re.compile(r"Retrying node (\S+) \(retry \#(\d+) of (\d+)\)") re_parse_dagman_condor_id = re.compile(r"\*\* condor_scheduniv_exec\.([0-9\.]+) \(CONDOR_DAGMAN\) STARTING UP") re_parse_dagman_finished = re.compile(r"\(condor_DAGMAN\)[\w\s]+EXITING WITH STATUS (\d+)$") re_parse_dagman_pid = re.compile(r"\*\* PID = (\d+)$") re_parse_condor_version = re.compile(r"\*\* \$CondorVersion: ((\d+\.\d+)\.\d+)") re_parse_condor_logfile = re.compile(r"Condor log will be written to ([^,]+)") re_parse_condor_logfile_insane = re.compile(r"\d{2}\s{3,}(\S+)") re_parse_multiline_files = re.compile(r"All DAG node user log files:") # Constants logbase = "monitord.log" # Basename of daemon logfile speak = "PMD/1.0" # Protocol version for our socket command-line interface MONITORD_WF_RETRY_FILE = "monitord.subwf" # filename for writing persistent sub-workflow retry information MAX_SLEEP_TIME = 10 # in seconds SLEEP_WAIT_NOTIFICATION = 5 # in seconds unsubmitted_events = {"UN_READY": 1, "PRE_SCRIPT_STARTED": 1, "PRE_SCRIPT_SUCCESS": 1, "PRE_SCRIPT_FAILURE": 1} # Global variables wfs = [] # list of workflow entries monitord is tracking tracked_workflows = [] # list of workflows we have started tracking wf_retry_dict = None # File-based dictionary keeping track of sub-workflows retries, opened later... follow_subworkflows = True # Flag for tracking sub-workflows root_wf_id = None # Workflow id of the root workflow replay_mode = 0 # disable checking if DAGMan's pid is gone keep_state = 0 # Flag for keeping a Workflow's state across several DAGMan start/stop cycles db_stats = 'no' # collect and print database stats at the end of execution no_events = False # Flag for disabling event output altogether event_dest = None # URL containing the destination of the events encoding = None # Way to encode the data monitord_exit_code = 0 # Exit code for pegasus-monitord socket_enabled = False # Enable socket for real-time debugging start_server = False # Keep track if socket server needs to be started do_notifications = True # Flag to enable notifications skip_pid_check = False # Flag to skip checking if a previous monitord is still running using the pid file monitord_notifications = None # Notifications' manager class max_parallel_notifications = 10 # Maximum number of notifications we can do in parallel notifications_timeout = 0 # Time to wait for notification scripts to finish (0 means wait forever) store_stdout_stderr = True # Flag for storing jobs' stdout and stderr in our output wf_event_sink = None # Where wf events go # Revision handling revision = "$Revision: 2012 $" # Let cvs handle this, do not edit manually # Remaining variables out = None # .dag.dagman.out file from command-line run = None # run directory from command-line dagman.out file server = None # server socket sockfn = None # socket filename # # --- at exit handlers ------------------------------------------------------------------- # def delete_pid_file(): """ This function deletes the pid file when exiting. """ try: os.unlink(pid_filename) except OSError: logger.error("cannot delete pid file %s" % (pid_filename)) def socket_exit_handler(): """ This function closes the socket server, and removes the sockfn file. """ if server is not None: server.close() try: os.unlink(sockfn) except OSError: # Just be silent pass def close_wf_retry_file(): """ This function closes the persistent storage file containing sub-workflow retry information. """ if wf_retry_dict is not None: wf_retry_dict.close() def finish_notifications(): """ This function flushes all notifications, and closes the notifications' log file. It also logs all pending (but not yet issued) notifications. """ if monitord_notifications is not None: monitord_notifications.finish_notifications() def finish_stampede_loader(): """ This function is called by the atexit module when monitord exits. It is used to make sure the loader has finished loading all data into the database. It will also produce stats for benchmarking. """ if wf_event_sink is not None: print (utils.isodate(time.time()) + " - pegasus-monitord - DB flushing beginning ").ljust(80, "-") try: if db_stats == 'yes' and logger.getEffectiveLevel() > logging.INFO: # Make sure log level is enough to display database # benchmarking information logger.setLevel(logging.INFO) wf_event_sink.close() except: logger.warning("could not call the finish method "+\ "in the nl loader class... exiting anyway") print (utils.isodate(time.time()) + " - pegasus-monitord - DB flushing ended ").ljust(80, "-") # Workflow Entry Class class WorkflowEntry: """ Class used to store one workflow entry """ run_dir = None # Run directory for the workflow dagman_out = None # Location of the dagman.out file n_retries = 0 # Number of retries for looking for the dagman.out file wf = None # Pointer to the Workflow class for this Workflow DMOF = None # File pointer once we open the dagman.out file ml_buffer = '' # Buffer for reading the dagman.out file ml_retries = 0 # Keep track of how many times we have looked for new content ml_current = 0 # Keep track of where we are in the dagman.out file delete_workflow = False # Flag for dropping this workflow sleep_time = None # Time to sleep for this workflow output_dir = None # output_dir for all files written by monitord jsd = None # location of jobstate.log file nodaemon = 0 # foreground mode logfile = None # location of monitord.log file millisleep = None # emulated run mode delay adjustment = 0 # time zone adjustment (@#~! Condor) # Parse command line options prog_usage = "usage: %s [options] workflow.dag.dagman.out" % (prog_base) prog_desc = """Mandatory arguments: outfile is the log file produced by Condor DAGMan, usually ending in the suffix ".dag.dagman.out".""" parser = optparse.OptionParser(usage=prog_usage, description=prog_desc) parser.add_option("-a", "--adjust", action = "store", type = "int", dest = "adjustment", help = "adjust for time zone differences by i seconds, default 0") parser.add_option("-N", "--foreground", action = "store_const", const = 2, dest = "nodaemon", help = "(Condor) don't daemonize %s; go through motions as if" % (prog_base)) parser.add_option("-n", "--no-daemon", action = "store_const", const = 1, dest = "nodaemon", help = "(debug) don't daemonize %s; keep it in the foreground" % (prog_base)) parser.add_option("-j", "--job", action = "store", type = "string", dest = "jsd", help = "alternative job state file to write, default is %s in the workflow's directory" % (utils.jobbase)) parser.add_option("-l", "--log", action = "store", type = "string", dest = "logfile", help = "alternative %s log file, default is %s in the workflow's directory" % (prog_base, logbase)) parser.add_option("-o", "--output-dir", action = "store", type = "string", dest = "output_dir", help = "provides an output directory for all monitord log files") parser.add_option("--conf", action = "store", type = "string", dest = "config_properties", help = "specifies the properties' file to use. This option overrides all other property files.") parser.add_option("--no-recursive", action = "store_const", const = 1, dest = "disable_subworkflows", help = "disables pegasus-monitord to automatic follow any sub-workflows that are found") parser.add_option("--no-database", "--nodatabase", "--no-events", action = "store_const", const = 0, dest = "no_events", help = "turn off event generation completely, and overrides the URL in the -d option") parser.add_option("--no-notifications", "--no-notification", action = "store_const", const = 0, dest = "no_notify", help = "turn off notifications completely") parser.add_option("--notifications-max", action = "store", type = "int", dest = "notifications_max", help = "maximum number of concurrent notification concurrent notification scripts, 0 disable notifications, default is %d" % (max_parallel_notifications)) parser.add_option("--notifications-timeout", action = "store", type = "int", dest = "notifications_timeout", help = "time to wait for notification scripts to finish before terminating them, 0 allows scripts to run indefinitely") parser.add_option("-S", "--sim", action = "store", type = "int", dest = "millisleep", help = "Developer: simulate delays between reads by sleeping ms milliseconds") parser.add_option("-r", "--replay", action = "store_const", const = 1, dest = "replay_mode", help = "disables checking for DAGMan's pid while running %s" % (prog_base)) parser.add_option("--db-stats", action = "store_const", const = "yes", dest = "db_stats", help = "collect and print database stats at the end") parser.add_option("--keep-state", action = "store_const", const = 1, dest = "keep_state", help = "keep state across several DAGMan start/stop cycles (development option)") parser.add_option("--socket", action = "store_const", const = "yes", dest = "socket_enabled", help = "enables a socket interface for debugging") parser.add_option("--skip-stdout", action = "store_const", const = 0, dest = "skip_stdout", help = "disables storing both stdout and stderr in our output") parser.add_option("-f", "--force", action = "store_const", const = 1, dest = "skip_pid_check", help = "runs pegasus-monitord even if it detects a previous instance running") parser.add_option("-v", "--verbose", action="count", default=0, dest="vb", help="Increase verbosity, repeatable") grp = optparse.OptionGroup(parser, "Output options") grp.add_option("-d", "--dest", action="store", dest="event_dest", metavar="PATH or URL", help="Output destination URL [], where " "scheme = [empty] | x-tcp:// | DB-dialect+driver://. " "For empty scheme, params are a file path with '-' meaning standard output. " "For x-tcp scheme, params are TCP host[:port=14380]. " "For DB, use SQLAlchemy engine URL. " "(default=sqlite:///.stampede.db)", default=None) grp.add_option("-e", "--encoding", action='store', dest='enc', default="bp", metavar='FORMAT', help="How to encode log events: bson | bp (default=%default)") parser.add_option_group(grp) # Parse command-line options (options, args) = parser.parse_args() # Remaining argument is .dag.dagman.out file if len(args) != 1: parser.print_help() sys.exit(1) out = args[0] if not out.endswith(".dagman.out"): parser.print_help() sys.exit(1) # Turn into absolute filename out = os.path.abspath(out) # Infer run directory run = os.path.dirname(out) # Resolve command-line options conflicts if options.event_dest is not None and options.no_events is not None: logger.warning("the --no-events and --dest options conflict, please use only one of them") sys.exit(1) # Check if user wants to override pid checking if options.skip_pid_check is not None: skip_pid_check = True # Make sure no other pegasus-monitord instances are running... pid_filename = os.path.join(run, "monitord.pid") if not skip_pid_check and utils.pid_running(pid_filename): logger.critical("it appears that pegasus-monitord is still running on this workflow... exiting") # Exit with exitcode 43 sys.exit(43) # Create pid file utils.write_pid_file(pid_filename) # Make sure we delete it when we are done atexit.register(delete_pid_file) # Get the location of the properties file from braindump top_level_wf_params = utils.slurp_braindb(run) top_level_prop_file = None # Get properties tag from braindump if "properties" in top_level_wf_params: top_level_prop_file = top_level_wf_params["properties"] # Create the full path by using the submit_dir key from braindump if "submit_dir" in top_level_wf_params: top_level_prop_file = os.path.join(top_level_wf_params["submit_dir"], top_level_prop_file) # Parse, and process properties props = properties.Properties() props.new(config_file=options.config_properties, rundir_propfile=top_level_prop_file) # Parse notification-related properties if int(props.property("pegasus.monitord.notifications.timeout") or -1) >= 0: notifications_timeout = int(props.property("pegasus.monitord.notifications.timeout")) if int(props.property("pegasus.monitord.notifications.max") or -1) >= 0: max_parallel_notifications = int(props.property("pegasus.monitord.notifications.max")) if max_parallel_notifications == 0: logger.warning("maximum parallel notifications set to 0, disabling notifications...") do_notifications = False if not utils.make_boolean(props.property("pegasus.monitord.notifications") or 'true'): do_notifications = False # Parse stdout/stderr disable parsing property # Copy command line options into our variables if utils.make_boolean(props.property("pegasus.monitord.stdout.disable.parsing") or 'false'): store_stdout_stderr = False if options.vb == 0: lvl = logging.WARN elif options.vb == 1: lvl = logging.INFO else: lvl = logging.DEBUG # Set logging level logger.setLevel(lvl) # Cache whether debugging g_isdbg = logger.isEnabledFor(logging.DEBUG) if options.adjustment is not None: adjustment = options.adjustment if options.nodaemon is not None: nodaemon = options.nodaemon if options.jsd is not None: jsd = options.jsd if options.logfile is not None: logfile = options.logfile if options.millisleep is not None: millisleep = options.millisleep if options.replay_mode is not None: replay_mode = options.replay_mode # Replay mode always runs in foreground nodaemon = 1 # No notifications in replay mode do_notifications = False if options.no_notify is not None: do_notifications = False if options.notifications_max is not None: max_parallel_notifications = options.notifications_max if max_parallel_notifications == 0: do_notifications = False if max_parallel_notifications < 0: logger.critical("notifications-max must be integet >= 0") sys.exit(1) if options.notifications_timeout is not None: notifications_timeout = options.notifications_timeout if notifications_timeout < 0: logger.critical("notifications-timeout must be integet >= 0") sys.exit(1) if notifications_timeout > 0 and notifications_timeout < 5: logger.warning("notifications-timeout set too low... notification scripts may not have enough time to complete... continuing anyway...") if options.disable_subworkflows is not None: follow_subworkflows = False if options.db_stats is not None: db_stats = options.db_stats if options.keep_state is not None: keep_state = options.keep_state if options.skip_stdout is not None: store_stdout_stderr = False if options.output_dir is not None: output_dir = options.output_dir try: if not os.path.exists(output_dir): os.makedirs(output_dir) except OSError: logger.critical("cannot create directory %s. exiting..." % (output_dir)) sys.exit(1) if options.socket_enabled is not None: socket_enabled = options.socket_enabled if options.event_dest is None: if options.no_events is not None: # Turn off event generation no_events = True else: if props.property("pegasus.monitord.events") is not None: # Set event generation according to properties (default is True) no_events = not utils.make_boolean(props.property("pegasus.monitord.events")) else: # Default is to generate events no_events = False if props.property("pegasus.monitord.output") is None: # No command-line or property specified, use default event_dest = "sqlite:///" + out[:out.find(".dag.dagman.out")] + ".stampede.db" else: # Ok, get it from the properties file event_dest = props.property("pegasus.monitord.output") else: # Use command-line option event_dest = options.event_dest if options.enc is not None: # Get encoding from command-line options encoding = options.enc else: if props.property("pegasus.monitord.encoding") is not None: # Get encoding from property encoding = props.property("pegasus.monitord.encoding") # Use default monitord logfile if user hasn't specified another file if logfile is None: if output_dir is None: logfile = os.path.join(run, logbase) else: logfile = os.path.join(run, output_dir, logbase) logfile = os.path.abspath(logfile) # Check if the user-provided jsd file is an absolute path, if so, we # disable recursive mode if jsd is not None: if os.path.isabs(jsd): # Yes, this is an absolute path follow_subworkflows = False logger.warning("jsd file is an absolute filename, disabling sub-workflow tracking") # # --- functions --------------------------------------------------------------------------- # def systell(fh): """ purpose: make things symmetric, have a systell for sysseek paramtr: fh (IO): filehandle returns: current file position """ os.lseek(fh, 0, os.SEEK_CUR) def add(wf, jobid, event, sched_id=None, status=None): """ This function processes events related to jobs' state changes. It creates a new job, when needed, and by calling the workflow's update_job_state method, it causes output to be generated (both to jobstate.log and to the backend configured to receive events). wf is the workflow object for this operation, jobid is the id for the job (job_name), event is the actual state associated with this event (SUBMIT, EXECUTE, etc). sched_id is the scheduler's id for this particular job instance, and status is the exitcode for the job. This function returns the job_submit_seq for the corresponding jobid. """ my_site = None my_time = None my_job_submit_seq = None # Remove existing site info during replanning if event in unsubmitted_events: if jobid in wf._job_site: del wf._job_site[jobid] if jobid in wf._walltime: del wf._walltime[jobid] # Variables originally from submit file information if jobid in wf._job_site: my_site = wf._job_site[jobid] if jobid in wf._walltime: my_time = wf._walltime[jobid] # A PRE_SCRIPT_START event always means a new job if event == "PRE_SCRIPT_STARTED": # This is a new job, we need to add it to the workflow my_job_submit_seq = wf.add_job(jobid, event) # A DAGMAN_SUBMIT event requires a new job (unless this was # already done by a PRE_SCRIPT_STARTED event, but we let the # add_job function figure this out). if event == "DAGMAN_SUBMIT": wf._last_submitted_job = jobid my_job_submit_seq = wf.add_job(jobid, event) # Nothing else to do... we should stop here... return my_job_submit_seq # A SUBMIT event brings sched id and job type information (it can also be # a new job for us when there is no PRE_SCRIPT) if event == "SUBMIT": # Add job to our workflow (if not alredy there), will update sched_id in both cases my_job_submit_seq = wf.add_job(jobid, event, sched_id=sched_id) # Obtain planning information from the submit file when entering Condor, # Figure out how long the job _intends_ to run maximum my_time, my_site = wf.parse_job_sub_file(jobid, my_job_submit_seq) if my_site == "!!SITE!!": my_site = None # If not None, convert into seconds if my_time is not None: my_time = my_time * 60 logger.info("job %s requests %d s walltime" % (jobid, my_time)) wf._walltime[jobid] = my_time else: logger.info("job %s does not request a walltime" % (jobid)) # Remember the run-site if my_site is not None: logger.info("job %s is planned for site %s" % (jobid, my_site)) wf._job_site[jobid] = my_site else: logger.info("job %s does not have a site information!" % (jobid)) # Get job_submit_seq if we don't already have it if my_job_submit_seq is None: my_job_submit_seq = wf.find_jobid(jobid) if my_job_submit_seq is None: logger.warning("cannot find job_submit_seq for job: %s" % (jobid)) # Nothing else to do... return None # Make sure job has the updated state wf.update_job_state(jobid, sched_id, my_job_submit_seq, event, status, my_time) return my_job_submit_seq def process_dagman_out(wf, log_line): """ This function processes a log line from the dagman.out file and calls either the add function to generate a jobstate.log output line, or calls the corresponding workflow class method in order to track the various events that happen during the life of a workflow. It returns a tuple containing the new DAGMan output file, with the parent jobid and sequence number if we need to follow a sub-workflow. """ # Keep track of line count wf._line = wf._line + 1 # Make sure we have not already seen this line # This is used in the case of rescue dags, for skipping # what we have already seen in the dagman.out file if wf._line <= wf._last_processed_line: return # Strip end spaces, tabs, and and/or log_line = log_line.rstrip() # Check log_line for timestamp at the beginning timestamp_found = False my_expr = re_parse_timestamp.search(log_line) if my_expr is not None: # Found time stamp, let's assume valid log line curr_time = time.localtime() adj_time = list(curr_time) adj_time[1] = int(my_expr.group(1)) # Month adj_time[2] = int(my_expr.group(2)) # Day adj_time[3] = int(my_expr.group(5)) # Hours adj_time[4] = int(my_expr.group(6)) # Minutes adj_time[5] = int(my_expr.group(7)) # Seconds adj_time[8] = -1 # DST, let Python figure it out if my_expr.group(3) is not None: # New timestamp format adj_time[0] = int(my_expr.group(4)) + 2000 # Year wf._current_timestamp = time.mktime(adj_time) + adjustment timestamp_found = True else: # FIXME: Use method from utils.py, do not re-invent the wheel! # FIXME: Slated for 3.1 my_expr = re_parse_iso_stamp.search(log_line) if my_expr is not None: # /^\s*(\d{4}).?(\d{2}).?(\d{2}).(\d{2}).?(\d{2}).?(\d{2})([.,]\d+)?([Zz]|[-+](\d{2}).?(\d{2}))/ dt = "%04d-%02d-%02d %02d:%02d:%02d" % (int(my_expr.group(1)), int(my_expr.group(2)), int(my_expr.group(3)), int(my_expr.group(4)), int(my_expr.group(5)), int(my_expr.group(6))) my_time = datetime.datetime(*(time.strptime(dt, "%Y-%m-%d %H:%M:%S")[0:6])) tz = my_expr.group(8) if tz.upper() != 'Z': # no zulu time, has zone offset my_offset = datetime.timedelta(hours=int(my_expr.group(9)), minutes=int(my_expr.group(10))) # adjust for time zone offset if tz[0] == '-': my_time = my_time + my_offset else: my_time = my_time - my_offset # Turn my_time into Epoch format wf._current_timestamp = int(calendar.timegm(my_time.timetuple())) + adjustment timestamp_found = True if timestamp_found: split_log_line = log_line.split(None, 3) if len(split_log_line) >= 3: logger.debug("debug: ## %d: %s" % (wf._line, split_log_line[2][:64])) # If in recovery mode, check if we reached the end of it if wf._skipping_recovery_lines: if log_line.find("...done with RECOVERY mode") >= 0: wf._skipping_recovery_lines = False return # Search for more content if re_parse_event.search(log_line) is not None: # Found ULOG Event my_expr = re_parse_event.search(log_line) # groups = jobid, event, sched_id my_event = my_expr.group(1) my_jobid = my_expr.group(2) my_sched_id = my_expr.group(3) my_job_submit_seq = add(wf, my_jobid, my_event, sched_id=my_sched_id) if my_event == "SUBMIT" and follow_subworkflows == True: # For SUBMIT ULOG events, check if this is a sub-workflow my_new_dagman_out = wf.has_subworkflow(my_jobid, wf_retry_dict) # Ok, return result to main loop return (my_new_dagman_out, my_jobid, my_job_submit_seq) elif re_parse_job_submit.search(log_line) is not None: # Found a DAGMan job submit event my_expr = re_parse_job_submit.search(log_line) # groups = jobid add(wf, my_expr.group(1), "DAGMAN_SUBMIT") elif re_parse_job_submit_error.search(log_line) is not None: # Found a DAGMan job submit error event if wf._last_submitted_job is not None: add(wf, wf._last_submitted_job, "SUBMIT_FAILED") else: logger.warning("found submit error in dagman.out, but last job is not set") elif re_parse_script_running.search(log_line) is not None: # Pre scripts are not regular Condor event # Starting of scripts is not a regular Condor event my_expr = re_parse_script_running.search(log_line) # groups = script, jobid my_script = my_expr.group(1).upper() my_jobid = my_expr.group(2) add(wf, my_jobid, "%s_SCRIPT_STARTED" % (my_script)) elif re_parse_script_done.search(log_line) is not None: my_expr = re_parse_script_done.search(log_line) # groups = script, jobid my_script = my_expr.group(1).upper() my_jobid = my_expr.group(2) if my_script == "PRE": # Special case for PRE_SCRIPT_TERMINATED, as Condor # does not generate a PRE_SCRIPT_TERMINATED ULOG event add(wf, my_jobid, "PRE_SCRIPT_TERMINATED") if re_parse_script_successful.search(log_line) is not None: # Remember success with artificial jobstate add(wf, my_jobid, "%s_SCRIPT_SUCCESS" % (my_script), status=0) elif re_parse_script_failed.search(log_line) is not None: # Remember failure with artificial jobstate my_expr = re_parse_script_failed.search(log_line) # groups = exit code (error status) try: my_exit_code = int(my_expr.group(1)) except ValueError: # Unable to convert exit code to integer -- should not happen logger.warning("unable to convert exit code to integer!") my_exit_code = 1 add(wf, my_jobid, "%s_SCRIPT_FAILURE" % (my_script), status=my_exit_code) else: # Ignore logger.warning("unknown pscript state: %s" % (log_line[-14:])) elif re_parse_job_failed.search(log_line) is not None: # Job has failed my_expr = re_parse_job_failed.search(log_line) # groups = jobid, schedid, jobstatus my_jobid = my_expr.group(1) my_sched_id = my_expr.group(2) try: my_jobstatus = int(my_expr.group(3)) except ValueError: # Unable to convert exit code to integet -- should not happen logger.warning("unable to convert exit code to integer!") my_jobstatus = 1 # remember failure with artificial jobstate add(wf, my_jobid, "JOB_FAILURE", sched_id=my_sched_id, status=my_jobstatus) elif re_parse_job_successful.search(log_line) is not None: # Job succeeded my_expr = re_parse_job_successful.search(log_line) my_jobid = my_expr.group(1) my_sched_id = my_expr.group(2) # remember success with artificial jobstate add(wf, my_jobid, "JOB_SUCCESS", sched_id=my_sched_id, status=0) elif re_parse_dagman_finished.search(log_line) is not None: # DAG finished -- done parsing my_expr = re_parse_dagman_finished.search(log_line) # groups = exit code try: wf._dagman_exit_code = int(my_expr.group(1)) except ValueError: # Cannot convert exit code to integer! logger.warning("cannot convert DAGMan's exit code to integer!") wf._dagman_exit_code = 0 wf._monitord_exit_code = 1 logger.info("DAGMan finished with exit code %s" % (wf._dagman_exit_code)) # Send info to database wf.change_wf_state("end") elif re_parse_dagman_condor_id.search(log_line) is not None: # DAGMan starting, capture its condor id my_expr = re_parse_dagman_condor_id.search(log_line) wf._dagman_condor_id = my_expr.group(1) if not keep_state: # Initialize workflow parameters wf.start_wf() elif re_parse_dagman_pid.search(log_line) is not None and not replay_mode: # DAGMan's pid, but only set pid if not running in replay mode # (otherwise pid may belong to another process) my_expr = re_parse_dagman_pid.search(log_line) # groups = DAGMan's pid try: wf._dagman_pid = int(my_expr.group(1)) except ValueError: logger.critical("cannot set pid: %s" % (my_expr.group(1))) sys.exit(42) logger.info("DAGMan runs at pid %d" % (wf._dagman_pid)) elif re_parse_dag_name.search(log_line) is not None: # Found the dag filename, read dag, and generate start event for the database my_expr = re_parse_dag_name.search(log_line) my_dag = my_expr.group(1) # Parse dag file logger.info("using dag %s" % (my_dag)) wf.parse_dag_file(my_dag) # Send the delayed workflow start event to database wf.change_wf_state("start") elif re_parse_condor_version.search(log_line) is not None: # Version of this logfile format my_expr = re_parse_condor_version.search(log_line) # groups = condor version, condor major my_condor_version = my_expr.group(1) my_condor_major = my_expr.group(2) logger.info("Using DAGMan version %s" % (my_condor_version)) elif (re_parse_condor_logfile.search(log_line) is not None or wf._multiline_file_flag == True and re_parse_condor_logfile_insane.search(log_line) is not None): # Condor common log file location, DAGMan 6.6 if re_parse_condor_logfile.search(log_line) is not None: my_expr = re_parse_condor_logfile.search(log_line) else: my_expr = re_parse_condor_logfile_insane.search(log_line) wf._condorlog = my_expr.group(1) logger.info("Condor writes its logfile to %s" % (wf._condorlog)) # Make a symlink for NFS-secured files my_log, my_base = utils.out2log(wf._run_dir, wf._out_file) if os.path.islink(my_log): logger.info("symlink %s already exists" % (my_log)) elif os.access(my_log, os.R_OK): logger.info("%s is a regular file, not touching" % (my_base)) else: logger.info("trying to create local symlink to common log") if os.access(wf._condorlog, os.R_OK) or not os.access(wf._condorlog, os.F_OK): if os.access(my_log, os.R_OK): try: os.rename(my_log, "%s.bak" % (my_log)) except OSError: logger.warning("error renaming %s to %s.bak" % (my_log, my_log)) try: os.symlink(wf._condorlog, my_log) except OSError: logger.info("unable to symlink %s" % (wf._condorlog)) else: logger.info("symlink %s -> %s" % (wf._condorlog, my_log)) else: logger.info("%s exists but is not readable!" % (wf._condorlog)) # We only expect one of such files wf._multiline_file_flag = False elif re_parse_multiline_files.search(log_line) is not None: # Multiline user log files, DAGMan > 6.6 wf._multiline_file_flag = True elif log_line.find("Running in RECOVERY mode...") >= 0: # Entering recovery mode, skip lines until we reach the end wf._skipping_recovery_lines = True return else: # Could not parse timestamp logger.info( "time stamp format not recognized" ) def sleeptime(retries): """ purpose: compute suggested sleep time as a function of retries paramtr: $retries (IN): number of retries so far returns: recommended sleep time in seconds """ if retries < 5: my_y = 1 elif retries < 50: my_y = 5 elif retries < 500: my_y = 30 else: my_y = 60 return my_y # # --- signal handlers ------------------------------------------------------------------- # def prog_sighup_handler(signum, frame): """ This function catches SIGHUP. """ logger.info("ignoring signal %d" % (signum)) def prog_sigint_handler(signum, frame): """ This function catches SIGINT. """ logger.warning("graceful exit on signal %d" % (signum)) # Go through all workflows we are tracking for my_wf in wfs: if my_wf.wf is not None: # Update monitord exit code if my_wf.wf._monitord_exit_code == 0: my_wf.wf._monitord_exit_code = 1 # Close open files my_wf.wf.end_workflow() # All done! sys.exit(1) def prog_sigusr1_handler(signum, frame): """ This function increases the log level to the next one. """ global g_isdbg global start_server cur_level = logger.getEffectiveLevel() try: idx = _LEVELS.index(cur_level) if idx + 1 < len(_LEVELS): logger.setLevel(_LEVELS[idx + 1]) except ValueError: logger.setLevel(logging.INFO) logger.error("Unknown current level = %s, setting to INFO" % (cur_level)) g_isdbg = logger.isEnabledFor(logging.DEBUG) # Check debugging socket if not socket_enabled: start_server = True def prog_sigusr2_handler(signum, frame): """ This function decreases the log level to the previous one. """ global g_isdbg cur_level = logger.getEffectiveLevel() try: idx = _LEVELS.index(cur_level) if idx > 0: logger.setLevel(_LEVELS[idx - 1]) except ValueError: logger.setLevel(logging.WARN) logger.error("Unknown current level = %s, setting to WARN" % (cur_level)) g_isdbg = logger.isEnabledFor(logging.DEBUG) # # --- main ------------------------------------------------------------------------------ # # Rotate log file, if it exists utils.rotate_log_file(logfile) # Turn into daemon process if nodaemon == 0: utils.daemonize() # Open logfile as stdout try: sys.stdout = open(logfile, "a", 0) except IOError: logger.critical("could not open %s!" % (logfile)) sys.exit(1) elif nodaemon == 2: utils.keep_foreground() # Open logfile as stdout try: sys.stdout = open(logfile, "a", 0) except IOError: logger.critical("could not open %s!" % (logfile)) sys.exit(1) else: # Hack to make stdout unbuffered sys.stdout = os.fdopen(sys.stdout.fileno(), "w", 0) # Close stdin sys.stdin.close() # dup stderr onto stdout sys.stderr = sys.stdout # Touch logfile with start event print print (utils.isodate(time.time()) + " - pegasus-monitord starting - pid %d " % (os.getpid())).ljust(80, "-") print # Ignore dying shells signal.signal(signal.SIGHUP, prog_sighup_handler) # Die nicely when asked to (Ctrl+C, system shutdown) signal.signal(signal.SIGINT, prog_sigint_handler) # Permit dynamic changes of debug level signal.signal(signal.SIGUSR1, prog_sigusr1_handler) signal.signal(signal.SIGUSR2, prog_sigusr2_handler) # Log recover mode if os.access(os.path.join(run, MONITORD_RECOVER_FILE), os.F_OK): logger.warning("monitord entering recover mode...") # Create wf_event_sink object if no_events: wf_event_sink = None # Avoid parsing kickstart output if not # generating bp file or database events else: restart_logging = False if replay_mode or os.access(os.path.join(run, MONITORD_RECOVER_FILE), os.F_OK): restart_logging = True try: wf_event_sink = eo.create_wf_event_sink(event_dest, db_stats=db_stats, restart=restart_logging, enc=encoding) atexit.register(finish_stampede_loader) except eo.SchemaVersionError: logger.warning("****************************************************") logger.warning("Detected database schema version mismatch!") logger.warning("cannot create events output... disabling event output!") logger.warning("****************************************************") wf_event_sink = None except: logger.error(traceback.format_exc()) logger.error("cannot create events output... disabling event output!") wf_event_sink = None else: try: if restart_logging and isinstance(wf_event_sink, eo.DBEventSink): # If in replay mode or recovery mode and it is a DB, # attempt to purge wf_uuid_first eo.purge_wf_uuid_from_database(run, event_dest) except: logger.error(traceback.format_exc()) logger.error("error flushing previous wf_uuid from database... continuing...") logger.error("cannot create events output... disabling event output!") wf_event_sink = None # Say hello logger.info("starting [%s], using pid %d" % (revision, os.getpid())) if millisleep is not None: logger.info("using simulation delay of %d ms" % (millisleep)) # Only create server socket if asked... if output_dir is None: sockfn = os.path.join(os.path.dirname(out), "monitord.sock") else: sockfn = os.path.join(os.path.dirname(out), output_dir, "monitord.sock") if socket_enabled: # Create server socket for communication with site selector server = socket_interface.server_socket(49152, 65536) # Take care of closing socket when we exit atexit.register(socket_exit_handler) # Save our address so that site selectors know where to connect if server is not None: my_host, my_port = server.getsockname() try: OUT = open(sockfn, "w") OUT.write("%s %d\n" % (my_host, my_port)) except IOError: logger.warning("unable to write %s!" % (sockfn)) else: OUT.close() # For future reference plus = '' if "LD_LIBRARY_PATH" in os.environ: for my_path in os.environ["LD_LIBRARY_PATH"].split(':'): logger.info("env: LD_LIBRARY_PATH%s=%s" % (plus, my_path)) plus = '+' if "GLOBUS_TCP_PORT_RANGE" in os.environ: logger.info("env: GLOBUS_TCP_PORT_RANGE=%s" % (os.environ["GLOBUS_TCP_PORT_RANGE"])) else: logger.info("env: GLOBUS_TCP_PORT_RANGE=") if "GLOBUS_TCP_SOURCE_RANGE" in os.environ: logger.info("env: GLOBUS_TCP_SOURCE_RANGE=%s" % (os.environ["GLOBUS_TCP_SOURCE_RANGE"])) else: logger.info("env: GLOBUS_TCP_SOURCE_RANGE=") if "GLOBUS_LOCATION" in os.environ: logger.info("env: GLOBUS_LOCATION=%s" % (os.environ["GLOBUS_LOCATION"])) else: logger.info("env: GLOBUS_LOCATION=") # Build sub-workflow retry filename if output_dir is None: wf_retry_fn = os.path.join(run, MONITORD_WF_RETRY_FILE) wf_notification_fn_prefix = run else: wf_retry_fn = os.path.join(run, output_dir, MONITORD_WF_RETRY_FILE) wf_notification_fn_prefix = os.path.join(run, output_dir) # Empty sub-workflow retry information if in replay mode if replay_mode: try: os.unlink(wf_retry_fn) except OSError: # Nothing to do... pass # Link wf_retry_dict to persistent storage try: wf_retry_dict = shelve.open(wf_retry_fn) atexit.register(close_wf_retry_file) except: logger.critical("cannot create persistent storage file for sub-workflow retry information... exiting...") sys.exit(1) # Open notifications' log file if do_notifications == True: monitord_notifications = notifications.Notifications(wf_notification_fn_prefix, max_parallel_notifications=max_parallel_notifications, notifications_timeout=notifications_timeout) atexit.register(finish_notifications) # Ok! Let's start now... # Instantiate workflow class wf = Workflow(run, out, database=wf_event_sink, jsd=jsd, enable_notifications=do_notifications, replay_mode=replay_mode, output_dir=output_dir, store_stdout_stderr=store_stdout_stderr, notifications_manager=monitord_notifications) # If everything went well, create a workflow entry for this workflow if wf._monitord_exit_code == 0: workflow_entry = WorkflowEntry() workflow_entry.run_dir = run workflow_entry.dagman_out = out workflow_entry.wf = wf # And add it to our list of workflows wfs.append(workflow_entry) if replay_mode: tracked_workflows.append(out) # Also set the root workflow id root_wf_id = wf._wf_uuid # # --- main loop begin -------------------------------------------------------------------- # # Loop while we have workflows to follow... while (len(wfs) > 0): # Go through each of our workflows for workflow_entry in wfs: # Check if we are waiting for the dagman.out file to appear... if workflow_entry.DMOF is None: # Yes... check if it has shown up... # First, we test if the file is already there, in case we are running in replay mode if replay_mode: try: f_stat = os.stat(workflow_entry.dagman_out) except OSError: logger.critical("error: workflow not started, %s does not exist, dropping this workflow..." % (workflow_entry.dagman_out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue try: f_stat = os.stat(workflow_entry.dagman_out) except OSError, e: if errno.errorcode[e.errno] == 'ENOENT': # File doesn't exist yet, keep looking workflow_entry.n_retries = workflow_entry.n_retries + 1 if workflow_entry.n_retries > 100: # We tried too long, just exit logger.critical("%s never made an appearance" % (workflow_entry.dagman_out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue # Continue waiting logger.info("waiting for dagman.out file, retry %d" % (workflow_entry.n_retries)) workflow_entry.sleep_time = time.time() + sleeptime(workflow_entry.n_retries) else: # Another error logger.critical("stat %s" % (workflow_entry.dagman.out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue except: # Another exception logger.critical("stat %s" % (workflow_entry.dagman.out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue else: # Found it, open dagman.out file try: workflow_entry.DMOF = open(workflow_entry.dagman_out, "r") except IOError: logger.critical("opening %s" % (workflow_entry.dagman_out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue if workflow_entry.DMOF is not None: # Say Hello logger.debug("wake up and smell the silicon") try: f_stat = os.stat(workflow_entry.dagman_out) logger.debug("stating file: %s" % (workflow_entry.dagman_out)) except OSError: # stat error logger.critical("stat %s" % (workflow_entry.dagman_out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue # f_stat[6] is the file size if f_stat[6] == workflow_entry.ml_current: # Death by natural causes if workflow_entry.wf._dagman_exit_code is not None and not replay_mode: logger.info("workflow %s ended" % (workflow_entry.dagman_out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue # Check if DAGMan is alive -- if we know where it lives if workflow_entry.ml_retries > 10 and workflow_entry.wf._dagman_pid > 0: # Just send signal 0 to check if the pid is ours try: os.kill(int(workflow_entry.wf._dagman_pid), 0) except OSError: logger.critical("DAGMan is gone! Sudden death syndrome detected!") workflow_entry.wf._monitord_exit_code = 42 workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue # No change, wait a while workflow_entry.ml_retries = workflow_entry.ml_retries + 1 if workflow_entry.ml_retries > 17280: # Too long without change logger.critical("too long without action, stopping workflow %s" % (workflow_entry.dagman_out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue # In replay mode, we can be a little more aggresive if replay_mode and workflow_entry.ml_retries > 5: # We are in replay mode, so we should have everything here logger.info("no more action, stopping workflow %s" % (workflow_entry.dagman_out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue elif f_stat[6] < workflow_entry.ml_current: # Truncated file, booh! logger.critical("%s file truncated, time to exit" % (workflow_entry.dagman_out)) workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue elif f_stat[6] > workflow_entry.ml_current: # We have something to read! try: ml_rbuffer = workflow_entry.DMOF.read(32768) except: # Error while reading logger.critical("while reading %s" % (workflow_entry.dagman_out)) workflow_entry.wf._monitord_exit_code = 42 workflow_entry.delete_workflow = True # Close jobstate.log, if any if workflow_entry.wf is not None: workflow_entry.wf.end_workflow() # Go to the next workflow_entry in the for loop continue if len(ml_rbuffer) == 0: # Detected EOF logger.critical("detected EOF, resetting position to %d" % (workflow_entry.ml_current)) workflow_entry.DMOF.seek(workflow_entry.ml_current) else: # Something in the read buffer, merge it with our buffer workflow_entry.ml_buffer = workflow_entry.ml_buffer + ml_rbuffer # Look for end of line ml_pos = workflow_entry.ml_buffer.find('\n') while (ml_pos >= 0): # Take out 1 line, and adjust buffer process_output = process_dagman_out(workflow_entry.wf, workflow_entry.ml_buffer[0:ml_pos]) workflow_entry.ml_buffer = workflow_entry.ml_buffer[ml_pos+1:] ml_pos = workflow_entry.ml_buffer.find('\n') # Do we need to start following another workflow? if type(process_output) is tuple and len(process_output) == 3 and process_output[0] is not None: # Unpack the output tuple new_dagman_out = process_output[0] parent_jobid = process_output[1] parent_jobseq = process_output[2] # Only if we are not already tracking it... tracking_already = False new_dagman_out = os.path.abspath(new_dagman_out) # Add the current run directory in case this is a relative path new_dagman_out = os.path.join(workflow_entry.run_dir, new_dagman_out) if replay_mode: # Check if we started tracking this subworkflow in the past if new_dagman_out in tracked_workflows: # Yes, no need to do it again... logger.info("already tracking workflow: %s, not adding" % (new_dagman_out)) tracking_already = True else: # Not in replay mode, let's check if we are currently tracking this subworkflow for my_wf in wfs: if my_wf.dagman_out == new_dagman_out and not my_wf.delete_workflow: # Found it, exit loop tracking_already = True logger.info("already tracking workflow: %s, not adding" % (new_dagman_out)) break if not tracking_already: logger.info("found new workflow to track: %s" % (new_dagman_out)) # Not tracking this workflow, let's try to add it to our list new_run_dir = os.path.dirname(new_dagman_out) parent_wf_id = workflow_entry.wf._wf_uuid new_wf = Workflow(new_run_dir, new_dagman_out, database=wf_event_sink, parent_id=parent_wf_id, parent_jobid=parent_jobid, parent_jobseq=parent_jobseq, root_id=root_wf_id, jsd=jsd, replay_mode=replay_mode, enable_notifications=do_notifications, output_dir=output_dir, store_stdout_stderr=store_stdout_stderr, notifications_manager=monitord_notifications) if new_wf._monitord_exit_code == 0: new_workflow_entry = WorkflowEntry() new_workflow_entry.run_dir = new_run_dir new_workflow_entry.dagman_out = new_dagman_out new_workflow_entry.wf = new_wf # And add it to our list of workflows wfs.append(new_workflow_entry) # Don't forget to add it to our list, so we don't do it again in replay mode if replay_mode: tracked_workflows.append(new_dagman_out) else: # Just make sure we link the workflow to its parent job, # which in this case is a job retry... if os.path.dirname(new_dagman_out) in Workflow.wf_list: workflow_entry.wf.map_subwf(parent_jobid, parent_jobseq, Workflow.wf_list[os.path.dirname(new_dagman_out)]) else: logger.warning("cannot link job %s:%s to its subwf because we don't have info for dir: %s" % (parent_jobid, parent_jobseq, os.path.dirname(new_dagman_out))) if millisleep is not None: if server is not None: socket_interface.check_request(server, wfs, millisleep / 1000.0) else: time.sleep(millisleep / 1000.0) ml_pos = workflow_entry.DMOF.tell() logger.info("processed chunk of %d byte" % (ml_pos - workflow_entry.ml_current -len(workflow_entry.ml_buffer))) workflow_entry.ml_current = ml_pos workflow_entry.ml_retries = 0 # Write workflow progress for recovery mode workflow_entry.wf.write_workflow_progress() workflow_entry.sleep_time = time.time() + sleeptime(workflow_entry.ml_retries) # End of main for loop, still in the while loop... # Print number of workflows we currently have logger.info("currently tracking %d workflow(s)..." % (len(wfs))) # Go through the workflows again, and finish any marked ones wf_index = 0 while wf_index < len(wfs): workflow_entry = wfs[wf_index] if workflow_entry.delete_workflow == True: logger.info("finishing workflow: %s" % (workflow_entry.dagman_out)) # Close dagman.out file, if any if workflow_entry.DMOF is not None: workflow_entry.DMOF.close() # # Close jobstate.log, if any # if workflow_entry.wf is not None: # workflow_entry.wf.end_workflow() # Delete this workflow from our list deleted_entry = wfs.pop(wf_index) # Don't move index to next one else: # Mode index to next workflow wf_index = wf_index + 1 # Check if we need to start the socket server if not socket_enabled and start_server: # Reset flag start_server = False # Create server socket for communication with site selector server = socket_interface.server_socket(49152, 65536) # Take care of closing socket when we exit atexit.register(socket_exit_handler) # Save our address so that site selectors know where to connect if server is not None: # Socket open, make sure we set out socket_enabled flag socket_enabled = True my_host, my_port = server.getsockname() try: OUT = open(sockfn, "w") OUT.write("%s %d\n" % (my_host, my_port)) except IOError: logger.warning("unable to write %s!" % (sockfn)) else: OUT.close() # Periodically check for service requests if server is not None: socket_interface.check_request(server, wfs) # Service notifications once per while loop, in the future we can # move this into the for loop and service notifications more often if do_notifications == True and monitord_notifications is not None: logger.info("servicing notifications...") monitord_notifications.service_notifications() # Skip sleeping, if we have no more workflow to track... if len(wfs) == 0: continue # All done... let's figure out how long to sleep... time_to_sleep = time.time() + MAX_SLEEP_TIME for workflow_entry in wfs: # Figure out if we have anything more urgent to do if workflow_entry.sleep_time < time_to_sleep: time_to_sleep = workflow_entry.sleep_time # Sleep if not replay_mode: time_to_sleep = time_to_sleep - time.time() if time_to_sleep < 0: time_to_sleep = 0 time.sleep(time_to_sleep) # # --- main loop end ----------------------------------------------------------------------- # if socket_enabled and server is not None: # Finish trailing connection requests while (socket_interface.check_request(server, wfs)): pass server.close() server = None try: os.unlink(sockfn) except OSError: # Just be silent pass if do_notifications == True and monitord_notifications is not None: # Finish pending notifications logger.info("finishing notifications...") while monitord_notifications.has_active_notifications() or monitord_notifications.has_pending_notifications(): monitord_notifications.service_notifications() time.sleep(SLEEP_WAIT_NOTIFICATION) logger.info("finishing notifications... done!") # done logger.info("finishing, exit with 0") # Touch logfile with end event print print (utils.isodate(time.time()) + " - pegasus-monitord ending - pid %d " % (os.getpid())).ljust(80, "-") print sys.exit(0) pegasus-wms_4.0.1+dfsg/bin/pegasus-tc-client0000755000175000017500000000102211757531136020053 0ustar ryngerynge#!/bin/bash # # Query, Add, modify delete a TC. # # $Id: pegasus-tc-client 5014 2012-02-24 22:17:17Z vahi $ PEGASUS_CONFIG="`dirname $0`/pegasus-config" eval `$PEGASUS_CONFIG --sh-dump` . $PEGASUS_SHARE_DIR/common.sh # run java program ${JAVA} \ "-Dpegasus.home.sysconfdir=$PEGASUS_CONF_DIR" \ "-Dpegasus.home.bindir=$PEGASUS_BIN_DIR" \ "-Dpegasus.home.sharedstatedir=$PEGASUS_SHARE_DIR" \ "-Dpegasus.home.schemadir=$PEGASUS_SCHEMA_DIR" \ $addon edu.isi.pegasus.planner.client.TCClient "$@" pegasus-wms_4.0.1+dfsg/bin/pegasus-remove0000755000175000017500000001172011757531136017474 0ustar ryngerynge#!/usr/bin/env perl # # This file or a portion of this file is licensed under the terms of # the Globus Toolkit Public License, found in file GTPL, or at # http://www.globus.org/toolkit/download/license.html. This notice must # appear in redistributions of this file, with or without modification. # # Redistributions of this Software, with or without modification, must # reproduce the GTPL in: (1) the Software, or (2) the Documentation or # some other similar material which is provided with the Software (if # any). # # Copyright 1999-2004 University of Chicago and The University of # Southern California. All rights reserved. # # Author Gaurang Mehta gmehta@isi.edu # Revision : $Revision: 4486 $ use 5.006; use strict; use Carp; use Cwd; # standard module since 5.6.0 or so use File::Basename; # standard module since 5.005 use File::Spec; # standard module since 5.005 or 5.6.0 use Getopt::Long qw(:config bundling no_ignore_case); BEGIN { my $pegasus_config = File::Spec->catfile( dirname($0), 'pegasus-config' ); eval `$pegasus_config --perl-dump`; die("Unable to eval pegasus-config output. $@") if $@; } use Pegasus::Common; # debug off $main::debug = 0; $_ = '$Revision: 4486 $'; # don't edit, automatically updated by CVS $main::revision=$1 if /Revision:\s+([0-9.]+)/o; sub myversion() { my $version = version(); print "Pegasus $version, @{[basename($0)]} $main::revision\n"; exit 0; } sub usage(;$) { my $msg = shift; my $flag = defined $msg && lc($msg) ne 'help'; if ( $flag ) { my $tty = -t STDOUT; print "\033[1m" if $tty; print "ERROR: $msg\n"; print "\033[0m" if $tty; } print << "EOF"; Usage: @{[basename($0)]} -d | dagdir pegasus_remove helps you remove an entire workflow. Optional arguments: -d|--dagid N The id of the dag to be removed. -v|--verbose Enter verbose mode, default is not. -V|--version Print version number and exit. Mandatory arguments: dagdir The directory for the dag that you want removed. You may use period (.) for the current working directory. EOF exit( $flag ? 1 : 0 ); } sub handler { # purpose: generic signal handler # paramtr: whatever the OS sends a signal handler and Perl makes of it # returns: dies my $sig = shift; # you should not do this in signal handler, but what the heck warn "# Signal $sig found\n" if $main::debug; die "ERROR: Killed by SIG$sig\n"; } # # --- main ------------------------------------------------- # # FIXME: Why do you need signal handlers at all for this? $SIG{HUP} = \&handler; $SIG{INT} = \&handler; $SIG{TERM} = \&handler; $SIG{QUIT} = \&handler; my ($dagid); my $condor_rm=find_exec('condor_rm'); GetOptions( "dagid|d=s" => \$dagid, "version|V" => \&myversion, "verbose|v" => \$main::debug, "help|h|?" => \&usage ); my $run = shift; $run = getcwd() unless ( defined $run || defined $dagid ); # 20110519 (jsv): partial relative path may break the chdir stuff below $run = Cwd::abs_path($run) if defined $run; if ( defined $run ) { # extra sanity usage( "$run is not a directory." ) unless -d $run; usage( "$run is not accessible." ) unless -r _; # where were we... my $here = File::Spec->curdir(); $SIG{'__DIE__'} = sub { chdir($here) if defined $here; }; chdir($run) || die "ERROR: Cannot change to directory $run: $!\n"; my %config = slurp_braindb( $run ) or die "ERROR: Please ensure that either the run directory is provided as an argument or the dagid.\n Alternatively run the pegasus-remove command from within the run directory without any arguments\n"; my @rescue = check_rescue($run,$config{dag}); if ( @rescue > 0 ) { my (@stat,%rescue,$maxsize); foreach my $fn ( @rescue ) { if ( (@stat = stat($fn)) > 0 ) { $rescue{$fn} = [ @stat ]; $maxsize = $stat[7] if $maxsize < $stat[7]; } } print "\n\nDetected the presence of Rescue DAGs:\n"; my $width = log10($maxsize); foreach my $fn ( @rescue ) { printf( " %s %*u %s\n", isodate($rescue{$fn}[9]), $width, $rescue{$fn}[7], basename($fn) ); } # overwrite with "latest" (read: longest basename) rescue DAG $config{dag} = $rescue[$#rescue]; print "\nWILL USE ", $config{dag}, "\n\n"; } my $daglogfile = $config{dag} . ".dagman.out"; open( DID, "<$daglogfile" ) || die "Error: Cannot open file $daglogfile: $!\n"; while () { if ( /\(CONDOR_DAGMAN\) STARTING UP/ ) { # this was written by a Python programmer: $dagid = (split /\./, (split)[3], 2)[1]; } } # return to where we were chdir($here); } if ( defined $dagid ) { # construct the command line string my @arg = ( $condor_rm, $dagid ); warn "# run @arg\n" if $main::debug; # DO NOT call pipe_out_cmd, if you don't need popen() system { $arg[0] } @arg; print "\nResult: ", parse_exit($?), "\n"; exit( $? == 0 ? 0 : 42 ); } else { usage("You must provide either a dagid or dagdirectory to remove a workflow."); } pegasus-wms_4.0.1+dfsg/bin/pegasus-version0000755000175000017500000000055711757531136017672 0ustar ryngerynge#!/bin/bash # # Show the version number. May be used to show even more (e.g. build) # $Id: pegasus-version 4494 2011-08-26 00:27:39Z rynge $ # PEGASUS_CONFIG="`dirname $0`/pegasus-config" eval `$PEGASUS_CONFIG --sh-dump` . $PEGASUS_SHARE_DIR/common.sh # run java program ${JAVA} -Dpegasus.home=$PEGASUS_HOME $addon edu.isi.pegasus.planner.client.VersionNumber "$@" pegasus-wms_4.0.1+dfsg/bin/pegasus-plots0000755000175000017500000005306411757531136017347 0ustar ryngerynge#!/usr/bin/env python import os import re import sys import logging import optparse import commands import subprocess import shutil from datetime import timedelta import traceback # Initialize logging object logger = logging.getLogger() # Set default level to INFO logger.setLevel(logging.INFO) # Use pegasus-config to find our Pegasus environment bin_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --python-dump" config = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] exec config # We also need the externals lib pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --noeoln --python-externals" lib_ext_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] # Insert the lib directories in our search path os.sys.path.insert(0, lib_ext_dir) import Pegasus.common from Pegasus.plots_stats import utils as plot_utils from Pegasus.plots_stats.plots import populate from Pegasus.plots_stats.plots import pegasus_gantt from Pegasus.plots_stats.plots import pegasus_host_over_time from Pegasus.plots_stats.plots import pegasus_breakdown from Pegasus.plots_stats.plots import pegasus_time from Pegasus.tools import utils # Regular expressions re_parse_property = re.compile(r'([^:= \t]+)\s*[:=]?\s*(.*)') # Global variables---- prog_base = os.path.split(sys.argv[0])[1] # Name of this program NEW_LINE_STR ="\n" MAX_GRAPH_NODES = 100 calc_dax_graph = False calc_dag_graph = False calc_gantt_chart = False calc_host_chart = False calc_time_chart = False calc_breakdown_chart = False max_graph_nodes = MAX_GRAPH_NODES DEFAULT_OUTPUT_DIR = "plots" pegasus_env_path = { 'pegasus_bin_dir' : pegasus_bin_dir, 'pegasus_conf_dir' : pegasus_conf_dir, 'pegasus_java_dir' : pegasus_java_dir, 'pegasus_perl_dir' : pegasus_perl_dir, 'pegasus_python_dir' : pegasus_python_dir, 'pegasus_php_dir' : pegasus_php_dir, 'pegasus_javascript_dir': pegasus_javascript_dir, 'pegasus_share_dir' : pegasus_share_dir } def setup_logger(level_str): """ Sets the logging level @param level_str logging level """ level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) if level_str == "info": logger.setLevel(logging.INFO) populate.setup_logger(level_str) return def setup(output_dir): """ Set up the pegaus plots. @param output_dir output directory path """ src_img_path = os.path.join(pegasus_share_dir, "plots/images/common/") dest_img_path = os.path.join(output_dir, "images/") if os.path.isdir(dest_img_path): logger.warning("Image directory exists. Deleting... " + dest_img_path) try: shutil.rmtree(dest_img_path) except: logger.error("Unable to remove image directory." + dest_img_path) sys.exit(1) # Need to create the path in case it doesn't exist yet when running under Python < 2.5 if sys.version_info < (2, 5): if not os.path.isdir(output_dir): os.makedirs(output_dir) shutil.copytree (src_img_path, dest_img_path) def generate_dag_graph(wf_info, output_dir): """ Generates the DAG graph @wf_info WorkflowInfo object reference @output_dir the output directory path """ dag_file_path = wf_info.dag_file_path title = str(wf_info.wf_uuid) + " (" + str(wf_info.dax_label) +")" if dag_file_path is not None: dag2dot_file_path = os.path.join(pegasus_share_dir, "visualize/dag2dot") dot_file_path = os.path.join(output_dir, wf_info.wf_uuid + ".dot") dag_cmd = dag2dot_file_path dag_cmd += " --output " + dot_file_path dag_cmd += " " + dag_file_path logger.debug("Executing command :\n" + dag_cmd) status, output = commands.getstatusoutput(dag_cmd) if status == 0: logger.info("Finished executing command." ) else: logger.warn("Failed to generate dag graph for workflow " + title) logger.debug("%s: %d:%s" % (dag_cmd, status, output)) return None png_file_path = os.path.join(output_dir, wf_info.wf_uuid + ".png") dot_png_cmd = utils.find_exec("dot") if dot_png_cmd is None: logger.warn("dot is not present . Unable to create chart in png format. ") return None dot_png_cmd += " -Tpng -o" + png_file_path dot_png_cmd += " " + dot_file_path logger.debug("Executing command :\n" + dot_png_cmd) status, output = commands.getstatusoutput(dot_png_cmd) if status == 0: logger.debug("Finished executing command." ) return status else: logger.warn("%s: %d:%s" % (dot_png_cmd, status, output)) else: logger.warn("Unable to find the dag file for workflow " + title) return None def generate_dax_graph(wf_info, output_dir): """ Generates the DAX graph @wf_info WorkflowInfo object reference @output_dir the output directory path """ dax_file_path = wf_info.dax_file_path title = str(wf_info.wf_uuid) + " (" + str(wf_info.dax_label) +")" if dax_file_path is not None: dax2dot_file_path = os.path.join(pegasus_share_dir, "visualize/dax2dot") dot_file_path = os.path.join(output_dir, wf_info.wf_uuid + ".dot") dax_cmd = dax2dot_file_path dax_cmd +=" --output "+ dot_file_path dax_cmd += " "+ dax_file_path logger.debug("Executing command :\n" + dax_cmd) status, output = commands.getstatusoutput(dax_cmd) if status == 0: logger.debug("Finished executing command." ) else: logger.warn("Failed to generate dax graph for workflow " + title) logger.debug("%s: %d:%s" % (dax_cmd, status, output)) return None # Find dot command dot_png_cmd = utils.find_exec("dot") if dot_png_cmd is None: logger.warn("dot is not present . Unable to create chart in png format. ") return None png_file_path = os.path.join(output_dir, wf_info.wf_uuid + ".png") dot_png_cmd +=" -Tpng -o" + png_file_path dot_png_cmd += " "+ dot_file_path logger.debug("Executing command :\n" + dot_png_cmd) status, output = commands.getstatusoutput(dot_png_cmd) if status == 0: logger.debug("Finished executing command." ) return status else: logger.warn("Failed to generate dax graph in png format for workflow " + wf_info.wf_uuid) logger.debug("%s: %d:%s" % (dot_png_cmd, status, output)) else: logger.warn("Unable to find the dax file for workflow " + title) return None def create_image_gallery(file_name, wf_uuid_list, wf_parent_uuid_list, uuid_image_map, wf_uuid_label_map, isDax): """ Creates the image gallery. @param file_mame the output file name @param wf_uuid_list the list of all workflow id's @param wf_parent_uuid_list the list of parent workflow id's corresponding to each workflow @param uuid_image_map uuid and image file mapping @param wf_uuid_label_map uuid and label mapping @isDax boolean indicating whether it is a dax file or not. """ wf_uuid_parent_ref = None try: fh = open(file_name, "w") content = """ """ + plot_utils.create_home_button() + """
""" if isDax: content += "

DAX Graph

" + NEW_LINE_STR else: content += "

DAG Graph

"+ NEW_LINE_STR for index in range(len(wf_uuid_list)): uuid = wf_uuid_list[index] image = uuid_image_map[index] label = wf_uuid_label_map[index] parent_uuid =wf_parent_uuid_list[index] if parent_uuid is None: content += "

Top level workflow (" + uuid + ")

" else: if parent_uuid != wf_uuid_parent_ref: wf_uuid_parent_ref = parent_uuid content += "

Sub workflow's of workflow (" + parent_uuid + ")

" content += "
" if image is None: content += "\n" content +="\n" content +="\n" content +="
\n" content += "wf_uuid :" + uuid +"
" if isDax: content+= "dax label :" + label else: if image is not None: content += "dag label :" + image content +="
" else: content +="" content +="" content +="\n\n" content +="
\n" content += "wf_uuid :" + uuid +"
" if isDax: content += "dax label :" + label else: if label is not None: content += "dag label :" + label content += "
" content += "
" content += """
""" fh.write( content) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() def createOuterhtml(wf_stats, wf_info, submit_dir, output_dir, log_level): """ Generates the outer html file which links to all the charts and graph @param wf_info WorkflowInfo object reference @param wf_stats StampedeStatistics reference @param submit_dir submit directory @param output_dir the output directory path @param log_level logging level """ wf_uuid = wf_info.wf_uuid title = str(wf_info.wf_uuid) + " (" + str(wf_info.dax_label) + ")" data_file = os.path.join(output_dir, "index.html") gantt_chart_parent_file = os.path.join("gantt_chart/" + wf_uuid + ".html") dag_graph_parent_file = os.path.join("dag_graph/" + wf_uuid + ".html") dax_graph_parent_file = os.path.join("dax_graph/" + wf_uuid + ".html") host_chart_parent_file = os.path.join("host_chart/" + wf_uuid + ".html") breakdown_chart_parent_file = os.path.join("breakdown_chart/" + wf_uuid + ".html") time_summary_output_dir = None breakdown_summary_output_dir = None if calc_time_chart: time_summary_output_dir = output_dir pegasus_time.setup(submit_dir, time_summary_output_dir, pegasus_env_path, log_level) if calc_breakdown_chart: breakdown_summary_output_dir = output_dir pegasus_breakdown.setup(submit_dir, breakdown_summary_output_dir, pegasus_env_path, log_level) try: fh = open(data_file, "w") content = """
Pegasus plots
""" if calc_time_chart or calc_breakdown_chart: content += """ """ if calc_gantt_chart : content += "Workflow Execution Gantt Chart (Per workflow)
" + NEW_LINE_STR if calc_host_chart : content += "Host Over Time Chart (Per workflow)
" + NEW_LINE_STR if calc_breakdown_chart: content += """Invocation Breakdown Chart (Across workflows)
""" + NEW_LINE_STR content += "Invocation Breakdown Chart (Per workflow)
" + NEW_LINE_STR if calc_time_chart : content += """Time Chart (Across workflows)
""" + NEW_LINE_STR if calc_dax_graph : content += "DAX graph
" + NEW_LINE_STR if calc_dag_graph : content += "DAG graph
" + NEW_LINE_STR content += """ Workflow environment


""" + NEW_LINE_STR content += """
Workflow environment
""" content += plot_utils.print_property_table(wf_info.wf_env, False, ":") if calc_breakdown_chart: content += """
Invocation breakdown chart (Across workflows)
""" populate.populate_transformation_details(wf_stats, wf_info) logger.debug("Generating breakdown chart for the workflow " + title + " ... ") content += pegasus_breakdown.create_breakdown_plot(wf_info, breakdown_summary_output_dir) if calc_time_chart : populate.populate_time_details(wf_stats, wf_info) logger.debug("Generating time chart for the workflow " + title + " ... ") content += """
Time chart (Across workflows)
""" content += pegasus_time.create_time_plot(wf_info, time_summary_output_dir) content += """
""" fh.write(content) plots_output = NEW_LINE_STR + "SUMMARY".center(100, '*') plots_output += NEW_LINE_STR plots_output += NEW_LINE_STR plots_output += "Graphs and charts generated by pegasus-plots can be viewed by opening the generated html file in the web browser : \n" + data_file plots_output += NEW_LINE_STR plots_output += NEW_LINE_STR plots_output += "".center(100, '*') print plots_output except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() def create_charts(submit_dir, output_dir, config_properties, log_level): """ Generates all the graphs and charts @submit_dir submit directory pathd @output_dir the output directory path @config_properties path to the pegasus property file @log_level logging level """ wf_uuid_dax_image = [] wf_uuid_dax_label = [] wf_uuid_dag_image = [] wf_uuid_dag_label = [] wf_uuid_parent = [] populate.setup(submit_dir, config_properties) wf_uuid_list = populate.get_workflows_uuid() if len(wf_uuid_list) == 0: logger.error("Unable to populate workflow information.") if calc_gantt_chart: gantt_chart_output_dir = os.path.join(output_dir, "gantt_chart") pegasus_gantt.setup(submit_dir, gantt_chart_output_dir, pegasus_env_path, log_level) if calc_host_chart: host_chart_output_dir = os.path.join(output_dir, "host_chart") pegasus_host_over_time.setup(submit_dir, host_chart_output_dir, pegasus_env_path, log_level) if calc_breakdown_chart: breakdown_chart_output_dir = os.path.join(output_dir, "breakdown_chart") pegasus_breakdown.setup(submit_dir, breakdown_chart_output_dir, pegasus_env_path, log_level) if calc_dag_graph: dag_graph_output_dir = os.path.join(output_dir, "dag_graph") setup(dag_graph_output_dir) if calc_dax_graph : dax_graph_output_dir = os.path.join(output_dir, "dax_graph") setup(dax_graph_output_dir) for wf_uuid in wf_uuid_list: wf_stats, wf_info = populate.populate_chart(wf_uuid) title = str(wf_uuid) + " (" + str(wf_info.dax_label) +")" logger.info("Generating graphs/charts for the workflow " + title +" ... ") if calc_gantt_chart or calc_host_chart: populate.populate_job_instance_details(wf_stats, wf_info) wf_uuid_parent.append(wf_info.parent_wf_uuid) if calc_gantt_chart : logger.debug("Generating gantt chart for the workflow " + title +" ... ") pegasus_gantt.generate_chart(wf_info) if calc_host_chart : logger.debug("Generating host chart for the workflow " + title +" ... ") pegasus_host_over_time.generate_chart(wf_info) if calc_breakdown_chart: populate.populate_transformation_details(wf_stats, wf_info) logger.debug("Generating breakdown chart for the workflow " + title +" ... ") pegasus_breakdown.generate_chart(wf_info) if calc_dag_graph : populate.populate_job_details(wf_stats, wf_info) if wf_info.total_jobs <= max_graph_nodes: logger.debug("Generating dag graph for the workflow " + title +" ... ") if generate_dag_graph(wf_info, dag_graph_output_dir) is None: wf_uuid_dag_image.append(None) else: wf_uuid_dag_image.append(wf_info.wf_uuid+".png") else: wf_uuid_dag_image.append(None) logger.info("Dag graph for workflow '" + title + "' was not created because number of jobs is more than the maximum graph node generation limit ") wf_uuid_dag_label.append( wf_info.dag_label) if calc_dax_graph : populate.populate_task_details(wf_stats, wf_info) if wf_info.total_tasks <= max_graph_nodes: logger.debug("Generating dax graph for the workflow " + title +" ... ") if generate_dax_graph(wf_info, dax_graph_output_dir) is None: wf_uuid_dax_image.append(None) else: wf_uuid_dax_image.append(wf_info.wf_uuid+".png") else: wf_uuid_dax_image.append(None) logger.info("Dax graph for workflow '" + title +"' was not created because number of tasks is more than the maximum graph node generation limit ") wf_uuid_dax_label.append(wf_info.dax_label) wf_stats.close() wf_stats = None wf_info = None root_wf_stats, root_wf_info = populate.populate_chart(wf_uuid_list[0], True) if calc_dax_graph : data_file = os.path.join(dax_graph_output_dir, wf_uuid_list[0] + ".html") create_image_gallery(data_file, wf_uuid_list, wf_uuid_parent, wf_uuid_dax_image, wf_uuid_dax_label, True ) if calc_dag_graph : data_file = os.path.join(dag_graph_output_dir, wf_uuid_list[0] + ".html") create_image_gallery(data_file, wf_uuid_list, wf_uuid_parent, wf_uuid_dag_image, wf_uuid_dag_label, False ) createOuterhtml(root_wf_stats, root_wf_info, submit_dir, output_dir, log_level) root_wf_stats.close() return def set_plotting_level(plots_level): """ Sets the plotting level @param plot_level """ global calc_dax_graph global calc_dag_graph global calc_gantt_chart global calc_host_chart global calc_time_chart global calc_breakdown_chart if plots_level =='all': calc_dax_graph = True calc_dag_graph = True calc_gantt_chart = True calc_host_chart = True calc_time_chart = True calc_breakdown_chart = True elif plots_level =='all_charts': calc_gantt_chart = True calc_host_chart = True calc_time_chart = True calc_breakdown_chart = True elif plots_level =='all_graphs': calc_dax_graph = True calc_dag_graph = True elif plots_level == 'dax_graph': calc_dax_graph = True elif plots_level == 'dag_graph': calc_dag_graph = True elif plots_level == 'gantt_chart': calc_gantt_chart = True elif plots_level == 'host_chart': calc_host_chart = True elif plots_level == 'time_chart': calc_time_chart = True else: calc_breakdown_chart = True # ---------main---------------------------------------------------------------------------- def main(): # Configure command line option parser prog_usage = prog_base +" [options] SUBMIT DIRECTORY" parser = optparse.OptionParser(usage=prog_usage) parser.add_option("-o", "--output", action = "store", dest = "output_dir", help = "writes the output to given directory.") parser.add_option("-c", "--conf", action = "store", type = "string", dest = "config_properties", help = "specifies the properties file to use. This option overrides all other property files.") parser.add_option("-m", "--max-graph-nodes", action = "store", type = "int", dest = "max_graph_nodes", help = "Maximum limit on the number of tasks/jobs in the dax/dag upto which the graph should be generated; Default value is 100") parser.add_option("-p", "--plotting-level", action = "store", dest = "plotting_level", choices=['all', 'all_charts', 'all_graphs', 'dax_graph', 'dag_graph', 'gantt_chart', 'host_chart', 'time_chart', 'breakdown_chart'], help = "specifies the chart and graphs to generate. Valid levels are: all, all_charts, all_graphs, dax_graph,\ dag_graph, gantt_chart, host_chart, time_chart, breakdown_chart; Default is all_charts.") parser.add_option("-i", "--ignore-db-inconsistency", action = "store_const", const = 0, dest = "ignore_db_inconsistency", help = "turn off the check for db consistency") parser.add_option("-v", "--verbose", action="count", default=0, dest="verbose", help="Increase verbosity, repeatable") parser.add_option("-q", "--quiet", action="count", default=0, dest="quiet", help="Decrease verbosity, repeatable") # Parse command line options (options, args) = parser.parse_args() if len(args) > 1: parser.error("Invalid argument") sys.exit(1) if len(args) < 1: submit_dir = os.getcwd() else: submit_dir = os.path.abspath(args[0]) # Copy options from the command line parser log_level = 1 log_level_str = "info" log_level += (options.verbose - options.quiet) if log_level <= 0: log_level_str = "error" elif log_level == 1: log_level_str = "warning" elif log_level == 2: log_level_str = "info" elif log_level >= 3: log_level_str = "debug" setup_logger(log_level_str) logger.info(prog_base + " : initializing...") if options.ignore_db_inconsistency is None: if not utils.loading_completed(submit_dir): if utils.monitoring_running(submit_dir): logger.warning("pegasus-monitord still running. Please wait for it to complete. ") else: logger.warning("Please run pegasus monitord in replay mode. ") sys.exit(1) else: logger.warning("The tool is meant to be run after the completion of workflow run.") if options.plotting_level is not None: plotting_level = options.plotting_level else: plotting_level = 'all_charts' set_plotting_level(plotting_level) if options.output_dir is not None: output_dir = options.output_dir else : output_dir = os.path.join(submit_dir, DEFAULT_OUTPUT_DIR) utils.create_directory(output_dir, True) if options.max_graph_nodes is not None: global max_graph_nodes max_graph_nodes = options.max_graph_nodes try: create_charts(submit_dir, output_dir, options.config_properties, log_level_str) except SystemExit: sys.exit(1) except: logger.warning(traceback.format_exc()) sys.exit(1) sys.exit(0) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/bin/pegasus-tc-converter0000755000175000017500000000077311757531137020621 0ustar ryngerynge#!/bin/bash # # Converts transformation catalog from one format to another # # $Id$ PEGASUS_CONFIG="`dirname $0`/pegasus-config" eval `$PEGASUS_CONFIG --sh-dump` . $PEGASUS_SHARE_DIR/common.sh # run java program ${JAVA} \ "-Dpegasus.home.sysconfdir=$PEGASUS_CONF_DIR" \ "-Dpegasus.home.bindir=$PEGASUS_BIN_DIR" \ "-Dpegasus.home.sharedstatedir=$PEGASUS_SHARE_DIR" \ "-Dpegasus.home.schemadir=$PEGASUS_SCHEMA_DIR" \ $addon edu.isi.pegasus.planner.client.TCConverter "$@" pegasus-wms_4.0.1+dfsg/bin/pegasus-config0000755000175000017500000002761111757531137017453 0ustar ryngerynge#!/usr/bin/env perl # # prototype for a tool like pkg-config that tells us all kinds of # interesting things about Pegasus. This is mainly destined to be # used inside scripts to determine various configuration options, # locations and directories. # # $Id: pegasus-config 5055 2012-02-29 17:25:57Z voeckler $ # use 5.006; use strict; use Cwd; use File::Spec; use File::Basename; use Getopt::Long qw(:config bundling no_ignore_case); use POSIX (); use Sys::Hostname; my $tmpdir = $ENV{'MY_TMP'} || # MY_TMP to override standard settings $ENV{TMP} || # standard $ENV{TEMP} || # windows standard $ENV{TMPDIR} || # also somewhat used File::Spec->tmpdir() || # OK, this gets used if all above fail ( -d '/scratch' ? '/scratch' : '/tmp' ); # last resort my $userdir = $ENV{HOME} || (getpwuid($>))[7] || $tmpdir; # user $HOME my $bin_dir = Cwd::abs_path(dirname($0)); # basically PEGASUS_HOME - but let's not expose that anymore my $base_dir = dirname($bin_dir); my $lib = "lib"; # lib64 for 64bit RPMS my $conf_dir = File::Spec->catdir( $base_dir, 'etc' ); my $share_dir = File::Spec->catdir( $base_dir, 'share', 'pegasus' ); my $java_dir = File::Spec->catdir( $share_dir, 'java' ); my $perl_dir = File::Spec->catdir( $base_dir, $lib, 'pegasus', 'perl' ); my $python_dir = File::Spec->catdir( $base_dir, $lib, 'pegasus', 'python' ); my $python_externals_dir = File::Spec->catdir( $base_dir, $lib, 'pegasus', 'externals', 'python' ); my $php_dir = File::Spec->catdir( $base_dir, $lib, 'pegasus', 'php' ); my $js_dir = File::Spec->catdir( $base_dir, $lib, 'pegasus', 'javascript' ); my $schema_dir = File::Spec->catdir( $share_dir, 'schema' ); my $extra_classpath = ''; # for development - running out of a svn checkout my $test = File::Spec->catdir( $base_dir, 'build', 'classes' ); $extra_classpath = $test if -e $test; # in native packaging mode, some directories move if ($base_dir eq "/usr") { $conf_dir = "/etc/pegasus"; } # classpath my @jars = sort(<$java_dir/*.jar>); if ($extra_classpath ne "") { unshift(@jars, $extra_classpath); } my $classpath = join(":", @jars); if ($ENV{"CLASSPATH"} ne "") { $classpath = $classpath . ":" . $ENV{"CLASSPATH"}; } sub usage { my $app = basename($0); print << "EOF"; Usage: $app [argument] This is NOT an application to configure Pegasus, but an application to query the current Pegasus installation. Arguments: -h|--help Print this help and exit. -V|--version Print Pegasus version information and exit. --perl-dump Dumps all settings in perl format as separate variables. --perl-hash Dumps all settings in perl format as single perl hash. --python-dump Dumps all settings in python format. --sh-dump Dumps all settings in shell format. --bin Print the directory containing Pegasus binaries. --conf Print the directory containing configuration files. --java Print the directory containing the jars. --perl Print the directory to include into your PERL5LIB. --python Print the directory to include into your PYTHONLIB. --python-externals Print the directory to the external Python libraries. --schema Print the directory containing schemas. --classpath Builds a classpath containing the Pegasus jars. --noeoln Do not produce a end-of-line after output. This is useful when being called from non-shell backticks in scripts. Order is important for this option; specify first. --local-site [d] Create a site catalog entry for site "local". This is only an XML snippet without root element nor XML headers. The optional argument "d" points to the mount point to use. If not specified, defaults to the user\'s \$HOME directory. --full-local [d] Create a complete site catalog with only site "local". The an XML snippet without root element nor XML headers. The optional argument "d" points to the mount point to use. If not specified, defaults to the user\'s \$HOME directory. EOF exit 1; } sub find_exec($;@) { # purpose: determine location of given binary in $PATH # paramtr: $program (IN): executable basename to look for # @extra (opt. IN): additional directories to search # returns: fully qualified path to binary, undef if not found my $program = shift; foreach my $dir ( ( File::Spec->path, @_ ) ) { my $fs = File::Spec->catfile( $dir, $program ); return $fs if -x $fs; } undef; } sub site_snippet($) { my $home = shift; my @u = POSIX::uname(); $u[2] =~ s/^(\d+(\.\d+(\.\d+)?)?).*/$1/; $u[4] =~ s/i.86/x86/; # check for presence of Globus my $g_l = $ENV{'GLOBUS_LOCATION'}; unless ( defined $g_l ) { my $gr = find_exec( 'globusrun' ); if ( defined $gr && -x $gr ) { # OK, so we got Globus. Guess the globus location $g_l = dirname( Cwd::abs_path(dirname($gr)) ); } undef $g_l unless -d $g_l; } print " \n"; print " \n"; # so we got Globus installed. Let's check, if there are any # services associated with this Globus on this site if ( defined $g_l ) { my $g_v = `globus-version`; chomp($g_v); my @g_v = split /\./, $g_v; my $type = $g_v[0] >= 5 ? 'gt5' : 'gt2'; my $dir = File::Spec->catdir( $g_l, 'etc', 'grid-services' ); my $glob = File::Spec->catfile( $dir, 'jobmanager-*' ); my $fqdn = Sys::Hostname::hostname(); foreach my $jm ( CORE::glob($glob) ) { if ( open( J, "<$jm" ) ) { my %grmblftz = ( 'fork' => 'Fork', 'condor' => 'Condor', 'pbs' => 'PBS', 'lsf' => 'LSF' ); chomp( $_ = ); close J; my $basejm = basename($jm); my $jobtype = ( $jm =~ /-fork$/ ? 'auxillary' : 'compute' ); if ( /-type (\S+)/ ) { my $t = lc($1); print( ' \n" ); } } } } print " \n"; print " \n"; print "\t\n"; print "\t \n"; print "\t \n"; print "\t\n"; print " \n"; print " \n"; print "\t\n"; print "\t \n"; print "\t \n"; print "\t\n"; print " \n"; print " \n"; print " \n"; print " $userdir\n"; if ( defined $g_l ) { print " $g_l\n"; } if ( exists $ENV{'LD_LIBRARY_PATH'} ) { print( " ", $ENV{LD_LIBRARY_PATH}, "\n" ); } print " \n"; } # Parse command-line options. usage() unless @ARGV; my $eoln = 1; GetOptions( "help|h" => \&usage , 'eoln|crlf!' => \$eoln , 'version|V' => sub { print "#PEGASUS_VERSION#"; print "\n" if $eoln; exit 0; } , 'perl-hash' => sub { print << "EOF"; use vars qw(\%pegasus); \%pegasus = ( bin => \"$bin_dir\" , conf => \"$conf_dir\" , java => \"$java_dir\" , perl => \"$perl_dir\" , python => \"$python_dir\" , pyexts => \"$python_externals_dir\" , php => \"$php_dir\" , js => \"$js_dir\" , share => \"$share_dir\" , schema => \"$schema_dir\" ); unshift( \@INC, \$pegasus{perl} ); EOF exit 0; } , 'perl-dump' => sub { # This won't work, because the "my" variables inside a BEGIN/eval block # won't make it outside the BEGIN (i.e. not available to main program). print "my \$pegasus_bin_dir = \"$bin_dir\";\n"; print "my \$pegasus_conf_dir = \"$conf_dir\";\n"; print "my \$pegasus_java_dir = \"$java_dir\";\n"; print "my \$pegasus_perl_dir = \"$perl_dir\";\n"; print "my \$pegasus_python_dir = \"$python_dir\";\n"; print "my \$pegasus_python_externals_dir = \"$python_externals_dir\";\n"; print "my \$pegasus_php_dir = \"$php_dir\";\n"; print "my \$pegasus_javascript_dir = \"$js_dir\";\n"; print "my \$pegasus_share_dir = \"$share_dir\";\n"; print "my \$pegasus_schema_dir = \"$schema_dir\";\n"; print "unshift(\@INC, \$pegasus_perl_dir);\n"; exit 0; } , 'python-dump' => sub { print "pegasus_bin_dir = \"$bin_dir\"\n"; print "pegasus_conf_dir = \"$conf_dir\"\n"; print "pegasus_java_dir = \"$java_dir\"\n"; print "pegasus_perl_dir = \"$perl_dir\"\n"; print "pegasus_python_dir = \"$python_dir\"\n"; print "pegasus_python_externals_dir = \"$python_externals_dir\"\n"; print "pegasus_php_dir = \"$php_dir\"\n"; print "pegasus_javascript_dir = \"$js_dir\"\n"; print "pegasus_share_dir = \"$share_dir\"\n"; print "pegasus_schema_dir = \"$schema_dir\"\n"; print "os.sys.path.insert(0, pegasus_python_dir)\n"; exit 0; } , 'sh-dump' => sub { print "PEGASUS_BIN_DIR=\"$bin_dir\"\n"; print "export PEGASUS_BIN_DIR\n"; print "PEGASUS_CONF_DIR=\"$conf_dir\"\n"; print "export PEGASUS_CONF_DIR\n"; print "PEGASUS_JAVA_DIR=\"$java_dir\"\n"; print "export PEGASUS_JAVA_DIR\n"; print "PEGASUS_PERL_DIR=\"$perl_dir\"\n"; print "export PEGASUS_PERL_DIR\n"; print "PEGASUS_PYTHON_DIR=\"$python_dir\"\n"; print "export PEGASUS_PYTHON_DIR\n"; print "PEGASUS_PYTHON_EXTERNALS_DIR=\"$python_externals_dir\"\n"; print "export PEGASUS_PYTHON_EXTERNALS_DIR\n"; print "PEGASUS_SHARE_DIR=\"$share_dir\"\n"; print "export PEGASUS_SHARE_DIR\n"; print "PEGASUS_SCHEMA_DIR=\"$schema_dir\"\n"; print "export PEGASUS_SCHEMA_DIR\n"; print "CLASSPATH=\"$classpath\"\n"; print "export CLASSPATH\n"; exit 0; } , 'bin' => sub { print $bin_dir; print "\n" if $eoln; exit 0; } , 'conf' => sub { print $conf_dir; print "\n" if $eoln; exit 0; } , 'classpath' => sub { print $classpath; print "\n" if $eoln; exit 0; } , 'java' => sub { print $java_dir; print "\n" if $eoln; exit 0; } , 'perl' => sub { print $perl_dir; print "\n" if $eoln; exit 0; } , 'python' => sub { print $python_dir; print "\n" if $eoln; exit 0; } , 'python-externals' => sub { print $python_externals_dir; print "\n" if $eoln; exit 0; } , 'schema' => sub { print $schema_dir; print "\n" if $eoln; exit 0; } , 'local-site:s' => sub { site_snippet( ($_[1] || $userdir) ); exit 0; } , 'full-local:s' => sub { print "\n"; #print '\n"; print('', "\n" ); site_snippet( ($_[1] || $userdir) ); print "\n"; exit 0; } ); usage(); pegasus-wms_4.0.1+dfsg/bin/pegasus-dax-validator0000755000175000017500000000157111757531137020742 0ustar ryngerynge#!/bin/bash # # verify that a given DAX-3.2 is sane and parsable. # Warning: This requires Xerces-J 2.10 or greater. # # $Id: pegasus-dax-validator 4494 2011-08-26 00:27:39Z rynge $ PEGASUS_CONFIG="`dirname $0`/pegasus-config" eval `$PEGASUS_CONFIG --sh-dump` . $PEGASUS_SHARE_DIR/common.sh # check version of Xerces-J xerces=`${JAVA} org.apache.xerces.impl.Version` version=`echo $xerces | \ perl -alne '@x=split/\./,$F[1]; print $x[0]*1000000+$x[1]*1000+$x[2]'` if [ "X$version" = 'X' ]; then echo "ERROR: Unable to determine version of your Xerces-J" 1>&2 exit 1 elif [ $version -lt 2010000 ]; then echo "ERROR: Sorry, your version of Xerces ($xerces) is too old." 1>&2 echo "At least version 2.10.0 is required for the DAX validator." 1>&2 exit 1 else ${JAVA} "-Dpegasus.home=${PEGASUS_HOME}" $addon edu.isi.pegasus.planner.client.DAXValidator "$@" fi pegasus-wms_4.0.1+dfsg/bin/pegasus-transfer0000755000175000017500000013060211757531137020025 0ustar ryngerynge#!/usr/bin/env python """ Pegasus utility for transfer of files during workflow enactment Usage: pegasus-transfer [options] If the runtime environment sets a variable PEGASUS_POLICY_CHECKS to a true value (e.g. True, enabled, on, yes, 1, etc.), this utility will submit the list of transfers to a Policy Web Service, which will optionally return the list or some subset of the list after applying policy restrictions to it. The returned list is then processed normally. Communication with the Policy Web Service is optionally controlled either by setting additional environment variables or command line options. (The additional command line options are only available if PEGASUS_POLICY_CHECKS is defined and True.) The environment variables are: PEGASUS_POLICY_HOST PEGASUS_POLICY_PORT PEGASUS_POLICY_URL If neither environment variables nor command line options are used, the defaults are, respectively, localhost, 80, and /policy/transfer/. """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## import os import re import sys import errno import logging import optparse import tempfile import subprocess import signal import string import stat import time from collections import deque __author__ = "Mats Rynge " # --- regular expressions ------------------------------------------------------------- re_parse_url = re.compile(r'([\w]+)://([\w\.\-:@]*)(/[\S]*)') # --- classes ------------------------------------------------------------------------- class Transfer: pair_id = 0 # the id of the pair in the input, the nth pair in the input src_proto = "" # src_host = "" # src_path = "" # dst_proto = "" # dst_host = "" # dst_path = "" # allow_grouping = True # can this transfer be grouped with others? policy_id = "" # ID assigned by Policy Web Service (when used) def __init__(self, pair_id): """ Initializes the transfer class """ self.pair_id = pair_id def set_src(self, url): self.src_proto, self.src_host, self.src_path = self.parse_url(url) def set_dst(self, url): self.dst_proto, self.dst_host, self.dst_path = self.parse_url(url) def set_policy_id(self, id): """ Used only when a policy service is queried about the transfer list. """ self.policy_id = id def parse_url(self, url): proto = "" host = "" path = "" # default protocol is file:// if string.find(url, ":") == -1: logger.debug("URL without protocol (" + url + ") - assuming file://") url = "file://" + url # file url is a special cases as it can contain relative paths and env vars if string.find(url, "file:") == 0: proto = "file" # file urls can either start with file://[\w]*/ or file: (no //) path = re.sub("^file:(//[\w\.\-:@]*)?", "", url) path = expand_env_vars(path) return proto, host, path # symlink url is a special cases as it can contain relative paths and env vars if string.find(url, "symlink:") == 0: proto = "symlink" # symlink urls can either start with symlink://[\w]*/ or symlink: (no //) path = re.sub("^symlink:(//[\w\.\-:@]*)?", "", url) path = expand_env_vars(path) return proto, host, path # other than file/symlink urls r = re_parse_url.search(url) if not r: raise RuntimeError("Unable to parse URL: %s" % (url)) # Parse successful proto = r.group(1) host = r.group(2) path = r.group(3) # no double slashes in urls path = re.sub('//+', '/', path) return proto, host, path def src_url(self): return "%s://%s%s" % (self.src_proto, self.src_host, self.src_path) def src_url_srm(self): """ srm-copy is using broken urls - wants an extra / """ if self.src_proto != "srm": return "%s://%s/%s" % (self.src_proto, self.src_host, self.src_path) return self.src_url() def dst_url(self): return "%s://%s%s" % (self.dst_proto, self.dst_host, self.dst_path) def dst_url_srm(self): """ srm-copy is using broken urls - wants an extra / """ if self.dst_proto != "srm": return "%s://%s/%s" % (self.dst_proto, self.dst_host, self.dst_path) return self.dst_url() def dst_url_dirname(self): dn = os.path.dirname(self.dst_path) return "%s://%s%s" % (self.dst_proto, self.dst_host, dn) def groupable(self): """ currently only gridftp allows for grouping """ return self.allow_grouping and (self.src_proto == "gsiftp" or self.dst_proto == "gsiftp") def __cmp__(self, other): """ compares first on protos, then on hosts, then on paths - useful for grouping similar types of transfers """ if cmp(self.src_proto, other.src_proto) != 0: return cmp(self.src_proto, other.src_proto) if cmp(self.dst_proto, other.dst_proto) != 0: return cmp(self.dst_proto, other.dst_proto) if cmp(self.src_host, other.src_host) != 0: return cmp(self.src_host, other.src_host) if cmp(self.dst_host, other.dst_host) != 0: return cmp(self.dst_host, other.dst_host) if cmp(self.src_path, other.src_path) != 0: return cmp(self.src_path, other.src_path) if cmp(self.dst_path, other.dst_path) != 0: return cmp(self.dst_path, other.dst_path) return 0 class Alarm(Exception): pass # --- global variables ---------------------------------------------------------------- prog_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) prog_base = os.path.split(sys.argv[0])[1] # Name of this program logger = logging.getLogger("my_logger") # timeout for when shelling out default_subshell_timeout = 6 * 60 * 60; # this is the map of what tool to use for a given protocol pair (src, dest) tool_map = {} tool_map[('fdt' , 'file' )] = 'fdt' tool_map[('file' , 'fdt' )] = 'fdt' tool_map[('file' , 'file' )] = 'cp' tool_map[('file' , 'gsiftp' )] = 'gsiftp' tool_map[('file' , 'irods' )] = 'irods' tool_map[('file' , 'scp' )] = 'scp' tool_map[('file' , 's3' )] = 's3' tool_map[('file' , 's3s' )] = 's3' tool_map[('file' , 'srm' )] = 'srm' tool_map[('file' , 'symlink' )] = 'symlink' tool_map[('ftp' , 'ftp' )] = 'gsiftp' tool_map[('ftp' , 'gsiftp' )] = 'gsiftp' tool_map[('gsiftp' , 'file' )] = 'gsiftp' tool_map[('gsiftp' , 'ftp' )] = 'gsiftp' tool_map[('gsiftp' , 'gsiftp' )] = 'gsiftp' tool_map[('gsiftp' , 'srm' )] = 'srm' tool_map[('http' , 'file' )] = 'webget' tool_map[('http' , 'gsiftp' )] = 'gsiftp' tool_map[('https' , 'file' )] = 'webget' tool_map[('irods' , 'file' )] = 'irods' tool_map[('s3' , 'file' )] = 's3' tool_map[('s3s' , 'file' )] = 's3' tool_map[('scp' , 'file' )] = 'scp' tool_map[('srm' , 'file' )] = 'srm' tool_map[('srm' , 'gsiftp' )] = 'srm' tool_map[('srm' , 'srm' )] = 'srm' tool_map[('symlink' , 'symlink' )] = 'symlink' tool_info = {} # track remote directories created so that don't have to # try to create them over and over again remote_dirs_created = {} # stats stats_start = 0 stats_end = 0 stats_total_bytes = 0 # This flag used to control calls to code that uses an external Policy Service. # If the runtime environment sets a variable PEGASUS_POLICY_CHECKS, the flag is # toggled and functions to submit the transfer list to the policy service are # used. Otherwise, the list is just sorted lexically. using_policy_service = False # --- functions ----------------------------------------------------------------------- def setup_logger(level_str): # log to the console console = logging.StreamHandler() # default log level - make logger/console match logger.setLevel(logging.INFO) console.setLevel(logging.INFO) # level - from the command line level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) console.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) console.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) console.setLevel(logging.ERROR) # formatter formatter = logging.Formatter("%(asctime)s %(levelname)7s: %(message)s") console.setFormatter(formatter) logger.addHandler(console) logger.debug("Logger has been configured") def prog_sigint_handler(signum, frame): logger.warn("Exiting due to signal %d" % (signum)) myexit(1) def alarm_handler(signum, frame): raise Alarm def expand_env_vars(s): re_env_var = re.compile(r'\${?([a-zA-Z0-9_]+)}?') s = re.sub(re_env_var, get_env_var, s) return s def get_env_var(match): name = match.group(1) value = "" logger.debug("Looking up " + name) if name in os.environ: value = os.environ[name] return value def myexec(cmd_line, timeout_secs, should_log): """ executes shell commands with the ability to time out if the command hangs """ global delay_exit_code if should_log or logger.isEnabledFor(logging.DEBUG): logger.info(cmd_line) sys.stdout.flush() # set up signal handler for timeout signal.signal(signal.SIGALRM, alarm_handler) signal.alarm(timeout_secs) p = subprocess.Popen(cmd_line, shell=True) try: stdoutdata, stderrdata = p.communicate() except Alarm: if sys.version_info >= (2, 6): p.terminate() raise RuntimeError("Command '%s' timed out after %s seconds" % (cmd_line, timeout_secs)) rc = p.returncode if rc != 0: raise RuntimeError("Command '%s' failed with error code %s" % (cmd_line, rc)) def backticks(cmd_line): """ what would a python program be without some perl love? """ return subprocess.Popen(cmd_line, shell=True, stdout=subprocess.PIPE).communicate()[0] def check_tool(executable, version_arg, version_regex): # initialize the global tool info for this executable tool_info[executable] = {} tool_info[executable]['full_path'] = None tool_info[executable]['version'] = None tool_info[executable]['version_major'] = None tool_info[executable]['version_minor'] = None tool_info[executable]['version_patch'] = None # figure out the full path to the executable full_path = backticks("which " + executable + " 2>/dev/null") full_path = full_path.rstrip('\n') if full_path == "": logger.info("Command '%s' not found in the current environment" %(executable)) return tool_info[executable]['full_path'] = full_path # version if version_regex == None: version = "N/A" else: version = backticks(executable + " " + version_arg + " 2>&1") version = version.replace('\n', "") re_version = re.compile(version_regex) result = re_version.search(version) if result: version = result.group(1) tool_info[executable]['version'] = version # if possible, break up version into major, minor, patch re_version = re.compile("([0-9]+)\.([0-9]+)(\.([0-9]+)){0,1}") result = re_version.search(version) if result: tool_info[executable]['version_major'] = int(result.group(1)) tool_info[executable]['version_minor'] = int(result.group(2)) tool_info[executable]['version_patch'] = result.group(4) if tool_info[executable]['version_patch'] == None or tool_info[executable]['version_patch'] == "": tool_info[executable]['version_patch'] = None else: tool_info[executable]['version_patch'] = int(tool_info[executable]['version_patch']) logger.info(" %-18s Version: %-7s Path: %s" % (executable, version, full_path)) def check_env_and_tools(): # PATH setup path = "/usr/bin:/bin" if "PATH" in os.environ: path = os.environ['PATH'] path_entries = path.split(':') # is /usr/bin in the path? if not("/usr/bin" in path_entries): path_entries.append("/usr/bin") path_entries.append("/bin") # fink on macos x if os.path.exists("/sw/bin") and not("/sw/bin" in path_entries): path_entries.append("/sw/bin") # need LD_LIBRARY_PATH for Globus tools ld_library_path = "" if "LD_LIBRARY_PATH" in os.environ: ld_library_path = os.environ['LD_LIBRARY_PATH'] ld_library_path_entries = ld_library_path.split(':') # if PEGASUS_HOME is set, prepend it to the PATH (we want it early to override other cruft) if "PEGASUS_HOME" in os.environ: try: path_entries.remove(os.environ['PEGASUS_HOME'] + "/bin") except Exception: pass path_entries.insert(0, os.environ['PEGASUS_HOME'] + "/bin") # if GLOBUS_LOCATION is set, prepend it to the PATH and LD_LIBRARY_PATH # (we want it early to override other cruft) if "GLOBUS_LOCATION" in os.environ: try: path_entries.remove(os.environ['GLOBUS_LOCATION'] + "/bin") except Exception: pass path_entries.insert(0, os.environ['GLOBUS_LOCATION'] + "/bin") try: ld_library_path_entries.remove(os.environ['GLOBUS_LOCATION'] + "/lib") except Exception: pass ld_library_path_entries.insert(0, os.environ['GLOBUS_LOCATION'] + "/lib") os.environ['PATH'] = ":".join(path_entries) os.environ['LD_LIBRARY_PATH'] = ":".join(ld_library_path_entries) os.environ['DYLD_LIBRARY_PATH'] = ":".join(ld_library_path_entries) logger.info("PATH=" + os.environ['PATH']) logger.info("LD_LIBRARY_PATH=" + os.environ['LD_LIBRARY_PATH']) # irods requires a password hash file os.environ['irodsAuthFileName'] = os.getcwd() + "/.irodsA" # tools we might need later check_tool("wget", "--version", "([0-9]+\.[0-9]+)") check_tool("globus-version", "--full", "([0-9]+\.[0-9]+\.[0-9]+)") check_tool("globus-url-copy", "-version", "([0-9]+\.[0-9]+)") check_tool("srm-copy", "-version", "srm-copy[ \t]+([\.0-9a-zA-Z]+)") check_tool("iget", "-h", "Version[ \t]+([\.0-9a-zA-Z]+)") check_tool("pegasus-s3", "help", None) def prepare_local_dir(path): """ makes sure a local path exists before putting files into it """ if not(os.path.exists(path)): logger.debug("Creating local directory " + path) try: os.makedirs(path, 0755) except os.error, err: # if dir already exists, ignore the error if not(os.path.isdir(path)): raise RuntimeError(err) def cp(transfers, failed_q): """ copies locally using /bin/cp """ for i, transfer in enumerate(transfers): prepare_local_dir(os.path.dirname(transfer.dst_path)) cmd = "/bin/cp -f -L \"%s\" \"%s\"" % (transfer.src_path, transfer.dst_path) try: myexec(cmd, default_subshell_timeout, True) except RuntimeError, err: logger.error(err) failed_q.append(transfer) stats_add(transfer.dst_path) def symlink(transfers, failed_q): """ symlinks locally using ln """ for i, transfer in enumerate(transfers): prepare_local_dir(os.path.dirname(transfer.dst_path)) # we do not allow dangling symlinks if not os.path.exists(transfer.src_path): logger.warning("Symlink source (%s) does not exist" % (transfer.src_path)) failed_q.append(transfer) continue if os.path.exists(transfer.src_path) and os.path.exists(transfer.dst_path): # make sure src and target are not the same file - have to compare at the # inode level as paths can differ src_inode = os.stat(transfer.src_path)[stat.ST_INO] dst_inode = os.stat(transfer.dst_path)[stat.ST_INO] if src_inode == dst_inode: logger.warning("symlink: src (%s) and dst (%s) already exists" % (transfer.src_path, transfer.dst_path)) continue cmd = "ln -f -s %s %s" % (transfer.src_path, transfer.dst_path) try: myexec(cmd, 60, True) except RuntimeError, err: logger.error(err) failed_q.append(transfer) def prepare_scp_dir(rhost, rdir): """ makes sure a local path exists before putting files into it """ cmd = "/usr/bin/ssh" if "SSH_PRIVATE_KEY" in os.environ: cmd += " -i " + os.environ['SSH_PRIVATE_KEY'] cmd += " -q -o StrictHostKeyChecking=no" cmd += " " + rhost + " '/bin/mkdir -p " + rdir + "'" myexec(cmd, default_subshell_timeout, True) def scp(transfers, failed_q): """ copies using scp """ for i, transfer in enumerate(transfers): cmd = "/usr/bin/scp" if "SSH_PRIVATE_KEY" in os.environ: cmd += " -i " + os.environ['SSH_PRIVATE_KEY'] cmd += " -q -B -o StrictHostKeyChecking=no" try: if transfer.dst_proto == "file": prepare_local_dir(os.path.dirname(transfer.dst_path)) cmd += " " + transfer.src_host + ":" + transfer.src_path cmd += " " + transfer.dst_path else: mkdir_key = "scp://" + transfer.dst_host + ":" + os.path.dirname(transfer.dst_path) if not mkdir_key in remote_dirs_created: prepare_scp_dir(transfer.dst_host, os.path.dirname(transfer.dst_path)) remote_dirs_created[mkdir_key] = True cmd += " " + transfer.src_path cmd += " " + transfer.dst_host + ":" + transfer.dst_path stats_add(transfer.src_path) myexec(cmd, default_subshell_timeout, True) if transfer.dst_proto == "file": stats_add(transfer.dst_path) except RuntimeError, err: logger.error(err) failed_q.append(transfer) def fdt(transfers, failed_q): """ copies using FDT - http://monalisa.cern.ch/FDT/license.html """ # download fdt.jar on demand - it can not be shipped with Pegasus due to licensing if not os.path.exists("fdt.jar"): cmd = "wget -nv -O fdt.jar http://monalisa.cern.ch/FDT/lib/fdt.jar" try: myexec(cmd, 10*60, True) except RuntimeError, err: logger.error(err) for i, transfer in enumerate(transfers): cmd = "echo | java -jar fdt.jar" if transfer.dst_proto == "file": prepare_local_dir(os.path.dirname(transfer.dst_path)) cmd += " " + transfer.src_host + ":" + transfer.src_path cmd += " " + transfer.dst_path else: cmd += " " + transfer.src_path cmd += " " + transfer.dst_host + ":" + transfer.dst_path stats_add(transfer.src_path) try: myexec(cmd, default_subshell_timeout, True) if transfer.dst_proto == "file": stats_add(transfer.dst_path) except RuntimeError, err: logger.error(err) failed_q.append(transfer) def webget(transfers, failed_q): """ pulls http/https using wget """ if len(transfers) == 0: return if len(transfers) > 0 and tool_info['wget']['full_path'] == None: raise RuntimeError("Unable to do http/https transfers becuase wget could not be found") for i, transfer in enumerate(transfers): prepare_local_dir(os.path.dirname(transfer.dst_path)) cmd = tool_info['wget']['full_path'] if logger.isEnabledFor(logging.DEBUG): cmd += " -v" else: cmd += " -q" cmd += " --no-check-certificate -O \"" + transfer.dst_path + "\" \"" + transfer.src_url() + "\"" try: myexec(cmd, default_subshell_timeout, True) stats_add(transfer.dst_path) except RuntimeError, err: logger.error(err) failed_q.append(transfer) def transfers_groupable(a, b): """ compares two url_pairs, and determins if they are similar enough to be grouped together for one tool """ if not a.groupable() or not b.groupable(): return False if a.src_proto != b.src_proto: return False if a.dst_proto != b.dst_proto: return False return True def gsiftp_similar(a, b): """ compares two url_pairs, and determins if they are similar enough to be grouped together in one transfer input file """ if a.src_host != b.src_host: return False if a.dst_host != b.dst_host: return False if os.path.dirname(a.src_path) != os.path.dirname(b.src_path): return False if os.path.dirname(a.dst_path) != os.path.dirname(b.dst_path): return False return True def gsiftp(full_list, failed_q, attempt): """ gsiftp - globus-url-copy for now, maybe uberftp in the future """ if len(full_list) == 0: return if tool_info['globus-url-copy']['full_path'] == None: raise RuntimeError("Unable to do gsiftp transfers becuase globus-url-copy could not be found") # create lists with similar (same src host/path, same dst host/path) url pairs while len(full_list) > 0: similar_list = [] curr = full_list.pop() prev = curr third_party = curr.src_proto == "gsiftp" and curr.dst_proto == "gsiftp" while gsiftp_similar(curr, prev): similar_list.append(curr) if len(full_list) == 0: break else: prev = curr curr = full_list.pop() if not gsiftp_similar(curr, prev): # the last pair is not part of the set and needs to be added back to the # beginning of the list full_list.append(curr) if len(similar_list) == 0: break # we now have a list of similar transfers - break up and send the first one with create dir # and the rest with no create dir options first_list = [] first_list.append(similar_list.pop()) gsiftp_do_transfers(first_list, failed_q, True, third_party) if len(similar_list) > 0: gsiftp_do_transfers(similar_list, failed_q, False, third_party) def gsiftp_do_transfers(transfers, failed_q, create_dest, third_party): """ sub to gsiftp() - transfers a list of urls """ # keep track of what transfer we attempted so we can add to fail q in case of failures attempted_transfers = transfers[:] delayed_file_stat = [] # create tmp file with transfer src/dst pairs num_pairs = 0 try: tmp_fd, tmp_name = tempfile.mkstemp(prefix="pegasus-transfer-", suffix=".lst", dir="/tmp") tmp_file = os.fdopen(tmp_fd, "w+b") except: raise RuntimeError("Unable to create tmp file for globus-url-copy transfers") for i, t in enumerate(transfers): num_pairs += 1 logger.debug(" adding %s %s" % (t.src_url(), t.dst_url())) # delay stating until we have finished the transfers if t.src_proto == "file": delayed_file_stat.append(t.src_path) elif t.dst_proto == "file": delayed_file_stat.append(t.dst_path) tmp_file.write("%s %s\n" % (t.src_url(), t.dst_url())) tmp_file.close() logger.info("Grouped %d similar gsiftp transfers together in temporary file %s" %(num_pairs, tmp_name)) # build command line for globus-url-copy cmd = tool_info['globus-url-copy']['full_path'] # make output from guc match our current log level if logger.isEnabledFor(logging.DEBUG): cmd += " -dbg" elif num_pairs < 10: cmd += " -verbose" # should we try to create directories? if create_dest: cmd += " -create-dest" # Only do third party transfers for gsiftp->gsiftp. For other combinations, fall # back to settings which will for well over for example NAT if third_party: cmd += " -parallel 4" # -fast only for Globus 4 and above if tool_info['globus-version']['version_major'] >= 4: cmd += " -fast" # -pipeline only for Globus 4.2 and above if (tool_info['globus-version']['version_major'] == 5 \ or (tool_info['globus-version']['version_major'] >= 4 \ and tool_info['globus-version']['version_minor'] >= 2)): cmd += " -pipeline" else: cmd += " -no-third-party-transfers -no-data-channel-authentication" cmd += " -f " + tmp_name try: myexec(cmd, default_subshell_timeout, True) # stat the files for i, filename in enumerate(delayed_file_stat): stats_add(filename) except Exception, err: logger.error(err) for i, t in enumerate(attempted_transfers): failed_q.append(t) os.unlink(tmp_name) def irods_login(): """ log in to irods by using the iinit command - if the file already exists, we are already logged in """ f = os.environ['irodsAuthFileName'] if os.path.exists(f): return # read password from env file if not "irodsEnvFile" in os.environ: raise RuntimeError("Missing irodsEnvFile - unable to do irods transfers") password = None h = open(os.environ['irodsEnvFile'], 'r') for line in h: items = line.split(" ", 2) if items[0].lower() == "irodspassword": password = items[1].strip(" \t'\"\r\n") h.close() if password == None: raise RuntimeError("No irodsPassword specified in irods env file") h = open(".irodsAc", "w") h.write(password + "\n") h.close() cmd = "cat .irodsAc | iinit" myexec(cmd, 5*60, True) os.unlink(".irodsAc") def irods(transfers, failed_q): """ irods - use the icommands to interact with irods """ if len(transfers) == 0: return if tool_info['iget']['full_path'] == None: raise RuntimeError("Unable to do irods transfers becuase iget could not be found in the current path") # log in to irods try: irods_login() except Exception, loginErr: logger.error(loginErr) raise RuntimError("Unable to log into irods") for i, url_pair in enumerate(transfers): if url_pair.dst_proto == "file": # irods->file prepare_local_dir(os.path.dirname(url_pair.dst_path)) cmd = "iget -f " + url_pair.src_path + " " + url_pair.dst_path else: # file->irods cmd = "imkdir -p " + os.path.dirname(url_pair.dst_path) try: myexec(cmd, 60*60, True) except: # ignore errors from the mkdir command pass cmd = "iput -f " + url_pair.src_path + " " + url_pair.dst_path try: myexec(cmd, default_subshell_timeout, True) # stats if url_pair.dst_proto == "file": stats_add(url_pair.dst_path) else: stats_add(url_pair.src_path) except Exception, err: logger.error(err) failed_q.append(url_pair) def srm(transfers, failed_q): """ srm - use srm-copy (Is this generic enough? Do we need to handle space tokens?) """ if len(transfers) == 0: return if tool_info['srm-copy']['full_path'] == None: raise RuntimeError("Unable to do srm transfers becuase srm-copy could not be found") for i, url_pair in enumerate(transfers): if url_pair.dst_proto == "file": prepare_local_dir(os.path.dirname(url_pair.dst_path)) #elif url_pair.dst_proto == "gsiftp" or url_pair.dst_proto == "srm": # srm_mkdir(os.path.dirname(url_pair.dst_url_srm())) third_party = (url_pair.src_proto == "gsiftp" or url_pair.src_proto == "srm") and \ (url_pair.dst_proto == "gsiftp" or url_pair.dst_proto == "srm") cmd = "srm-copy %s %s -mkdir" % (url_pair.src_url_srm(), url_pair.dst_url_srm()) if third_party: cmd = cmd + " -parallelism 4 -3partycopy" if not logger.isEnabledFor(logging.DEBUG): cmd = cmd + " >/dev/null" try: myexec(cmd, 6*60*60, True) except Exception, err: logger.error(err) failed_q.append(url_pair) def srm_mkdir(url): """ implements recursive mkdir as srm-mkdir can not handle it """ # end condition if url == "/" or url == "": return True # does the url exist? cmd = "srm-ls %s >/dev/null" %(url) try: myexec(cmd, 10*60, True) return True except Exception, err: logger.error(err) # if we get here, the directory does not exist # create the parent first one_up = os.path.dirname(url) srm_mkdir(one_up) cmd = "srm-mkdir %s >/dev/null" %(url) try: myexec(cmd, 10*60, True) except Exception, err: logger.error(err) return False return True def s3(transfers, failed_q): """ s3 - uses pegasus-s3 to interact with Amazon S3 """ if len(transfers) == 0: return if tool_info['pegasus-s3']['full_path'] == None: raise RuntimeError("Unable to do S3 transfers becuase pegasus-s3 could not be found") buckets_created = {} for i, url_pair in enumerate(transfers): # get/put? if url_pair.dst_proto == "file": # this is a 'get' local_filename = url_pair.dst_path prepare_local_dir(os.path.dirname(url_pair.dst_path)) cmd = "pegasus-s3 get %s %s" % (url_pair.src_url(), url_pair.dst_path) else: # this is a 'put' local_filename = url_pair.src_path # extract the bucket part re_bucket = re.compile(r'(s3(s){0,1}://\w+@\w+/+[\w]+)') bucket = url_pair.dst_url_dirname() r = re_bucket.search(bucket) if r: bucket = r.group(1) else: raise RuntimeError("Unable to parse bucket: %s" % (bucket)) # first ensure that the bucket exists if not bucket in buckets_created: buckets_created[bucket] = True cmd = "pegasus-s3 mkdir %s" %(bucket) try: myexec(cmd, 5*60, True) except Exception, err: logger.error("mkdir failed - possibly due to the bucket already existing, so continuing...") cmd = "pegasus-s3 put %s %s" % (url_pair.src_path, url_pair.dst_url()) try: myexec(cmd, default_subshell_timeout, True) stats_add(local_filename) except Exception, err: logger.error(err) failed_q.append(url_pair) def handle_transfers(transfers, failed_q, attempt): """ handles a list of transfers - failed ones are added to the failed queue """ try: if tool_map.has_key((t_main.src_proto, t_main.dst_proto)): tool = tool_map[(t_main.src_proto, t_main.dst_proto)] if tool == "cp": cp(transfers, failed_q) elif tool == "fdt": fdt(transfers, failed_q) elif tool == "symlink": symlink(transfers, failed_q) elif tool == "scp": scp(transfers, failed_q) elif tool == "webget": webget(transfers, failed_q) elif tool == "gsiftp": gsiftp(transfers, failed_q, attempt) elif tool == "irods": irods(transfers, failed_q) elif tool == "srm": srm(transfers, failed_q) elif tool == "s3": s3(transfers, failed_q) else: logger.critical("Error: No mapping for the tool '%s'" %(tool)) myexit(1) else: logger.critical("Error: This tool does not know how to transfer from %s:// to %s://" \ % (url_pair.src_proto, url_pair.dst_proto)) myexit(1) except RuntimeError, err: logger.critical(err) myexit(1) def stats_add(filename): global stats_total_bytes try: s = os.stat(filename) stats_total_bytes = stats_total_bytes + s[stat.ST_SIZE] except BaseException, err: pass # ignore def stats_summarize(): if stats_total_bytes == 0: logger.info("Stats: no local files in the transfer set") return total_secs = stats_end - stats_start Bps = stats_total_bytes / total_secs logger.info("Stats: %sB transferred in %.0f seconds. Rate: %sB/s (%sb/s)" % ( iso_prefix_formatted(stats_total_bytes), total_secs, iso_prefix_formatted(Bps), iso_prefix_formatted(Bps*8))) logger.info("NOTE: stats do not include third party gsiftp/srm transfers") def iso_prefix_formatted(n): prefix = "" n = float(n) if n > (1024*1024*1024*1024): prefix = "T" n = n / (1024*1024*1024*1024) elif n > (1024*1024*1024): prefix = "G" n = n / (1024*1024*1024) elif n > (1024*1024): prefix = "M" n = n / (1024*1024) elif n > (1024): prefix = "K" n = n / (1024) return "%.1f %s" % (n, prefix) def myexit(rc): """ system exit without a stack trace - silly python """ try: sys.exit(rc) except SystemExit: sys.exit(rc) # --- policy functions ------------------------------------------------------- # # Functions post_policy_requests and delete_policy_request are only used if # PEGASUS_POLICY_CHECKS is set in the runtime environment. # # ---------------------------------------------------------------------------- def post_policy_requests(inputs): """ Format a request to the policy web service with the input Transfer objects. @param inputs: List of transfers to process @type inputs: list @return: List of Transfer objects as modified by the web service. @rtype: list """ logger.debug("post_policy_requests entered; input list has %d Transfers", len(inputs)) transfers = [] for pt in inputs: transfer = {'source' : pt.src_url(), 'destination' : pt.dst_url(), 'properties' : None } transfers.append(transfer) policy_requests = json.dumps(transfers) policy_headers = {'Content-type' : 'application/json', 'Encoding' : 'latin-1'} try: # TODO Change to variables instead of hardcoding ws = HTTPConnection(policy_host, policy_port) ws.request('POST', policy_url, policy_requests, policy_headers) resp = ws.getresponse(); logger.info("policy web service status: %d", resp.status) resp_string = resp.read() ws.close() except HTTPException, e: logger.critical("Exception communicating with policy web servcie: %s", e.str()) raise RuntimeError("Exception communicating with policy web servcie: %s" %(e.str())) except SocketError, (value, message): logger.critical("socket exception: [ERRNO%s] %s", value, message) raise RuntimeError("socket exception: [ERRNO%s] %s" %(value, message)) logger.debug("policy web service response: %s", resp_string) posted_transfers = json.loads(resp_string) inputs = []; # wipe out old list pair_nr = 0 for pt in posted_transfers: logger.debug("source = %s destination = %s properties = {%s} id = %s", pt['source'], pt['destination'], pt['properties'], pt['id']) pair_nr +=1 policy_transfer = Transfer(pair_nr) policy_transfer.set_src(pt['source']); policy_transfer.set_dst(pt['destination']) policy_transfer.set_policy_id(pt['id']) logger.debug("appending Transfer %d, id=%s", pair_nr, policy_transfer.policy_id ) inputs.append(policy_transfer) logger.debug("post_policy_requests return") return inputs def delete_policy_requests(transfers): """ Send a DELETE request to the policy web service. Uses the 'policy_id' attribute of each Transfer object to remove that transfer from the web service database. @param transfers: A list of transfers to process. @type transfers: list @return: Nothing. """ logger.debug("delete_policy_requests enter; input list has %d Transfers", len(transfers)) try: ws = HTTPConnection(policy_host, policy_port) for transfer in transfers: delete_request = policy_url + transfer.policy_id logger.debug("Sending DELETE request: %s", delete_request) ws.request('DELETE', delete_request) resp = ws.getresponse(); logger.debug("status for %s: %d", delete_request, resp.status) ws.close() except HTTPException, e: logger.critical("Exception communicating with policy web servcie: %s", e.str()) raise RuntimeError("Exception communicating with policy web servcie: %s" %(e.str())) except SocketError, (value, message): logger.critical("socket exception: [ERRNO%s] %s", value, message) raise RuntimeError("socket exception: [ERRNO%s] %s" %(value, message)) logger.debug("delete_policy_requests exit") # --- main ---------------------------------------------------------------------------- # dup stderr onto stdout sys.stderr = sys.stdout # Configure command line option parser prog_usage = "usage: %s [options]" % (prog_base) parser = optparse.OptionParser(usage=prog_usage) parser.add_option("-l", "--loglevel", action = "store", dest = "log_level", help = "Log level. Valid levels are: debug,info,warning,error, Default is info.") parser.add_option("-f", "--file", action = "store", dest = "file", help = "File containing URL pairs to be transferred. If not given, list is read from stdin.") parser.add_option("", "--max-attempts", action = "store", type="int", dest = "max_attempts", default = 2, help = "Number of attempts allowed for each transfer. Default is 2.") # Check environment to decide whether to use a Policy Service. if 'PEGASUS_POLICY_CHECKS' in os.environ: true_synonyms = ('true', '1', 't', 'y', 'yes', 'on', 'enabled', 'enable') using_policy_service = os.environ['PEGASUS_POLICY_CHECKS'].lower() in true_synonyms if using_policy_service: # Extra imports to make policy service work. If not available, fail now try: import json except ImportError: print >>sys.stderr, "Cannot use Policy Check: Failed to import JSON library" sys.exit(1) try: from httplib import HTTPConnection, HTTPException except ImportError: print >>sys.stderr, "Cannot use Policy Check: Failed to import HTTP library" sys.exit(1) from socket import error as SocketError policy_host = "localhost" policy_port = 80 policy_url = "/policy/transfer/" parser.add_option("", "--policy-host", action = "store", dest = "policy_host", help = "hostname for the Policy web service; default is '%s'" %(policy_host)) parser.add_option("", "--policy-port", type="int", dest = "policy_port", help = ("Port used by the policy web service; default is %d" %(policy_port))) parser.add_option("", "--policy-url", dest = "policy_url", help = ("URL of the policy web service; default is '%s'" %(policy_url))) if 'PEGASUS_POLICY_HOST' in os.environ: policy_host = os.environ['PEGASUS_POLICY_HOST'] if 'PEGASUS_POLICY_PORT' in os.environ: policy_port = int(os.environ['PEGASUS_POLICY_PORT']) if 'PEGASUS_POLICY_URL' in os.environ: policy_url = os.environ['PEGASUS_POLICY_URL'] # Parse command line options (options, args) = parser.parse_args() if options.log_level == None: options.log_level = "info" setup_logger(options.log_level) # If we're using a Policy Service, check the command line for environment # overrides. if using_policy_service: if options.policy_host != None: policy_host = options.policy_host if options.policy_port != None: policy_port = options.policy_port if options.policy_url != None: policy_url = options.policy_url # Die nicely when asked to (Ctrl+C, system shutdown) signal.signal(signal.SIGINT, prog_sigint_handler) attempts_max = options.max_attempts # stdin or file input? if options.file == None: logger.info("Reading URL pairs from stdin") input_file = sys.stdin else: logger.info("Reading URL pairs from %s" % (options.file)) try: input_file = open(options.file, 'r') except Exception, err: logger.critical('Error reading url pair list: %s' % (err)) myexit(1) # check environment and tools try: check_env_and_tools() except Exception, err: logger.critical(err) myexit(1) # queues to track the work transfer_q = deque() failed_q = deque() # fill the transfer queue with user provided entries line_nr = 0 pair_nr = 0 inputs = [] url_first = True try: for line in input_file.readlines(): line_nr += 1 if line[0] != '#' and len(line) > 4: line = line.rstrip('\n') if url_first: pair_nr += 1 url_pair = Transfer(pair_nr) url_pair.set_src(line) url_first = False else: url_pair.set_dst(line) inputs.append(url_pair) url_first = True except Exception, err: logger.critical('Error reading url pair list: %s' % (err)) myexit(1) # Check our policy service flag and branch appropriately. if using_policy_service: # Send the input list to the policy server before continuing logger.info("Using policy web service at %s on port %d with URL %s", policy_host, policy_port, policy_url) inputs = post_policy_requests(inputs) else: # we will now sort the list as some tools (gridftp) can optimize when # given a group of similar transfers logger.info("Sorting the tranfers based on transfer type and source/destination") inputs.sort() transfer_q = deque(inputs) # start the stats time stats_start = time.time() # attempt transfers until the queue is empty done = False attempt_current = 0 while not done: attempt_current = attempt_current + 1 logger.info("----------------------------------------------------------------------") logger.info("Starting transfers - attempt %d" % (attempt_current)) # do the transfers while transfer_q: t_main = transfer_q.popleft() # create a list of transfers to pass to underlying tool t_list = [] t_list.append(t_main) try: t_next = transfer_q[0] except IndexError, err: t_next = False while t_next and transfers_groupable(t_main, t_next): t_list.append(t_next) transfer_q.popleft() try: t_next = transfer_q[0] except IndexError, err: t_next = False # magic! handle_transfers(t_list, failed_q, attempt_current) logger.debug("%d items in failed_q" %(len(failed_q))) if using_policy_service: # Remove these from policy web service delete_policy_requests(t_list) # are we done? if attempt_current == attempts_max or not failed_q: done = True break # retry failed transfers with a delay if failed_q and attempt_current < attempts_max: time.sleep(10) # do not sleep too long - we want to give quick feed back on failures to the workflow while failed_q: t = failed_q.popleft() t.allow_grouping = False # only allow grouping on the first try transfer_q.append(t) # end the stats timer and show summary stats_end = time.time() stats_summarize() if failed_q: logger.critical("Some transfers failed! See above, and possibly stderr.") myexit(1) logger.info("All transfers completed successfully.") myexit(0) pegasus-wms_4.0.1+dfsg/bin/pegasus-s30000755000175000017500000007151311757531137016533 0ustar ryngerynge#!/usr/bin/env python # # Copyright 2010-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import sys import os import math import stat import subprocess import threading import Queue import mmap import fnmatch import re from optparse import OptionParser from urlparse import urlsplit from ConfigParser import ConfigParser # Use pegasus-config to find our lib path bin_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --noeoln --python" lib_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --noeoln --python-externals" lib_ext_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] # Insert this directory in our search path os.sys.path.insert(0, lib_ext_dir) os.sys.path.insert(0, lib_dir) try: import boto import boto.exception import boto.s3.connection from boto.s3.bucket import Bucket from boto.s3.key import Key from boto.s3.multipart import MultiPartUpload except ImportError, e: sys.stderr.write("ERROR: Unable to load boto library: %s\n" % e) exit(1) COMMANDS = { 'ls': 'List the contents of a bucket', 'mkdir': 'Create a bucket in S3', 'rmdir': 'Delete a bucket from S3', 'rm': 'Delete a file from S3', 'put': 'Upload a file to S3', 'get': 'Download a file from S3', 'lsup': 'List multipart uploads', 'rmup': 'Cancel multipart uploads', 'help': 'Print this message' } KB = 1024 MB = 1024*KB GB = 1024*MB TB = 1024*GB DEFAULT_CONFIG = { "max_object_size": str(5), "multipart_uploads": str(False), "ranged_downloads": str(False) } DEBUG = False VERBOSE = False class FilePart: """ This is a file-like object that can be used to access a given range of bytes in a file. It is used for chunked uploads and downloads. """ def __init__(self, mm, start, length): self.mm = mm self.start = start self.offset = start self.length = length def seek(self, offset, whence=os.SEEK_SET): if whence==os.SEEK_SET: self.offset = self.start + offset elif whence==os.SEEK_CUR: self.offset += offset elif whence==os.SEEK_END: self.offset = self.start + self.length + offset else: raise Exception("Invalid seek") def tell(self): return self.offset - self.start def read(self, size=None): # Attempt to read before start if self.offset < self.start: return '' # Attempt to read past end if self.offset - self.start >= self.length: return '' remaining = self.start + self.length - self.offset if size > remaining or size is None: size = remaining start = self.offset end = self.offset + size data = self.mm[start:end] self.offset += size return data def write(self, data): size = len(data) start = self.offset end = self.offset + size # Attempt to write before start if start < self.start: raise Exception("Invalid offset") # Attempt to write past end if end > self.start + self.length + 1: raise Exception("Invalid offset") self.mm[start:end] = data self.offset += size class WorkThread(threading.Thread): def __init__(self, queue): threading.Thread.__init__(self) self.queue = queue self.exception = None self.daemon = True def run(self): try: # Just keep grabbing work units and # executing them until there are no # more to process, then exit while True: fn = self.queue.get(False) fn() except Queue.Empty: return except Exception, e: self.exception = e def debug(message): if DEBUG: sys.stderr.write("%s\n" % message) def info(message): if VERBOSE: sys.stdout.write("%s\n" % message) def warn(message): sys.stderr.write("WARNING: %s\n" % message) def fix_file(url): if url.startswith("file://"): url = os.path.abspath(url.replace("file:","")) return url def has_wildcards(string): if string is None: return False wildcards = "*?[]" for c in wildcards: if c in string: return True return False def help(args): sys.stderr.write("Usage: %s COMMAND\n\n" % os.path.basename(sys.argv[0])) sys.stderr.write("Commands:\n") for cmd in COMMANDS: sys.stderr.write(" %-8s%s\n" % (cmd, COMMANDS[cmd])) def option_parser(usage): command = os.path.basename(sys.argv[0]) parser = OptionParser(usage="usage: %s %s" % (command, usage)) parser.add_option("-d", "--debug", dest="debug", action="store_true", default=False, help="Turn on debugging") parser.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False, help="Show progress messages") parser.add_option("-C", "--conf", dest="config", metavar="FILE", default=None, help="Path to configuration file") # Add a hook so we can handle global arguments fn = parser.parse_args def parse(*args, **kwargs): options, args = fn(*args, **kwargs) if options.debug: boto.set_stream_logger("boto") global DEBUG DEBUG = True if options.verbose: global VERBOSE VERBOSE = True return options, args parser.parse_args = parse return parser def get_config(options): if options.config: cfg = options.config else: cfg = os.getenv("S3CFG", None) if cfg is None: cfg = os.path.expanduser("~/.s3cfg") if not os.path.isfile(cfg): raise Exception("Unable to locate config file: %s" % cfg) # Make sure nobody else can read the file mode = os.stat(cfg).st_mode if mode & (stat.S_IRWXG | stat.S_IRWXO): raise Exception("Permissions of config file %s are too liberal" % cfg) config = ConfigParser(DEFAULT_CONFIG) config.read(cfg) return config def parse_endpoint(uri): result = urlsplit(uri) return { 'is_secure': result.scheme=='https', 'host': result.hostname, 'port': result.port, 'path': result.path } def get_connection(config, uri): if not config.has_section(uri.site): raise Exception("Config file has no section for site '%s'" % uri.site) if not config.has_section(uri.ident): raise Exception("Config file has no section for identity '%s'" % uri.ident) endpoint = config.get(uri.site,"endpoint") kwargs = parse_endpoint(endpoint) kwargs['aws_access_key_id'] = config.get(uri.ident,"access_key") kwargs['aws_secret_access_key'] = config.get(uri.ident,"secret_key") kwargs['calling_format'] = boto.s3.connection.OrdinaryCallingFormat() # If the URI is s3s, then override the config if uri.secure: kwargs['is_secure'] = True return boto.s3.connection.S3Connection(**kwargs) def read_command_file(path): tokenizer = re.compile(r"\s+") f = open(path, "r") try: for line in f: line = line.strip() if len(line) == 0: continue if line.startswith("#"): continue yield tokenizer.split(line) finally: f.close() class S3URI: def __init__(self, user, site, bucket=None, key=None, secure=False): self.user = user self.site = site self.ident = "%s@%s" % (user, site) self.bucket = bucket self.key = key self.secure = secure def __repr__(self): if self.secure: uri = "s3s://%s" % self.ident else: uri = "s3://%s" % self.ident if self.bucket is not None: uri += "/%s" % self.bucket if self.key is not None: uri += "/%s" % self.key return uri def parse_uri(uri): "Parse S3 uri into an S3URI object" # The only valid schemes are s3s:// and s3:// if uri.startswith("s3s://"): secure = True elif uri.startswith("s3://"): secure = False else: raise Exception("Invalid URL scheme: %s" % (uri)) # Need to do this because urlparse does not recognize # custom URI schemes. Replace our scheme with http. # The actual scheme used isn't important as long as # urlsplit recognizes it. if secure: http = uri.replace("s3s://","http://") else: http = uri.replace("s3://","http://") result = urlsplit(http) # The user should not be specifying a query part unless # they are trying to use the ? wildcard. If they do use # the ? wildcard, then urlsplit thinks it is the query # separator. In that case, we put the path and query # back together. if '?' in uri: path = "?".join([result.path, result.query]).strip() else: path = result.path.strip() # The path should be empty, /BUCKET or /BUCKET/KEY if path.startswith("/"): path = path[1:] if len(path) == 0: bucket = None key = None else: comp = path.split('/',1) bucket = comp[0] if len(comp) == 1: key = None elif comp[1] == '': key = None else: key = comp[1] # We require the username part user = result.username if user is None: raise Exception("User missing from URL: %s" % uri) if result.port is None: site = result.hostname else: site = "%s:%s" % (result.hostname, result.port) return S3URI(user, site, bucket, key, secure) def ls(args): parser = option_parser("ls URL...") options, args = parser.parse_args(args) if len(args) == 0: parser.error("Specify a URL") config = get_config(options) items = [] for uri in args: items.append(parse_uri(uri)) for uri in items: conn = get_connection(config, uri) bucket = uri.bucket key = uri.key sys.stdout.write("%s\n" % uri) if bucket is None: buckets = conn.get_all_buckets() for bucket in buckets: sys.stdout.write("\t%s\n" % bucket.name) else: b = conn.get_bucket(uri.bucket) if has_wildcards(uri.key): for o in b.list(): if fnmatch.fnmatch(o.name, uri.key): sys.stdout.write("\t%s\n" % o.name) else: for o in b.list(prefix=uri.key): # For some reason, Walrus sometimes returns a Prefix object if isinstance(o, boto.s3.prefix.Prefix): continue sys.stdout.write("\t%s\n" % o.name) def mkdir(args): parser = option_parser("mkdir URL...") options, args = parser.parse_args(args) if len(args) == 0: parser.error("Specify URL") buckets = [] for arg in args: uri = parse_uri(arg) if uri.bucket is None: raise Exception("URL for mkdir must contain a bucket: %s" % arg) if uri.key is not None: raise Exception("URL for mkdir cannot contain a key: %s" % arg) buckets.append(uri) config = get_config(options) for uri in buckets: info("Creating %s" % uri) conn = get_connection(config, uri) conn.create_bucket(uri.bucket) def rmdir(args): parser = option_parser("rmdir URL...") options, args = parser.parse_args(args) if len(args) == 0: parser.error("Specify URL") buckets = [] for arg in args: uri = parse_uri(arg) if uri.bucket is None: raise Exception("URL for rmdir must contain a bucket: %s" % arg) if uri.key is not None: raise Exception("URL for rmdir cannot contain a key: %s" % arg) buckets.append(uri) config = get_config(options) for uri in buckets: info("Removing bucket %s" % uri) conn = get_connection(config, uri) conn.delete_bucket(uri.bucket) def rm(args): parser = option_parser("rm URL...") parser.add_option("-f", "--force", dest="force", action="store_true", default=False, help="Ignore nonexistent keys") parser.add_option("-F", "--file", dest="file", action="store", default=None, help="File containing a list of URLs to delete") options, args = parser.parse_args(args) if len(args) == 0 and not options.file: parser.error("Specify URL") if options.file: for rec in read_command_file(options.file): if len(rec) != 1: raise Exception("Invalid record: %s" % rec) args.append(rec[0]) buckets = {} for arg in args: uri = parse_uri(arg) if uri.bucket is None: raise Exception("URL for rm must contain a bucket: %s" % arg) if uri.key is None: raise Exception("URL for rm must contain a key: %s" % arg) bid = "%s/%s" % (uri.ident, uri.bucket) buri = S3URI(uri.user, uri.site, uri.bucket, uri.secure) if bid not in buckets: buckets[bid] = (buri, []) buckets[bid][1].append(uri) config = get_config(options) for bucket in buckets: uri, keys = buckets[bucket] conn = get_connection(config, uri) b = Bucket(connection=conn, name=uri.bucket) for key in keys: key_name = key.key if has_wildcards(key_name): for k in b.list(): if fnmatch.fnmatch(k.name, key_name): info("Removing key %s" % k.name) k.delete() else: info("Removing key %s" % key.key) b.delete_key(key_name=key.key) def PartialUpload(up, part, parts, mm, offset, length): def upload(): info("Uploading part %d of %d" % (part, parts)) f = FilePart(mm, offset, length) up.upload_part_from_file(f, part) info("Finished uploading part %d (%s bytes)" % (part, length)) return upload def put(args): parser = option_parser("put FILE URL") parser.add_option("-c", "--chunksize", dest="chunksize", action="store", type="int", metavar="X", default=10, help="Set the chunk size for multipart uploads to X MB." "A value of 0 disables multipart uploads. The default is 10MB, the min is 5MB " "and the max is 1024MB. This parameter only applies for sites that support " "multipart uploads (see multipart_uploads configuration parameter). The maximum " "number of chunks is 10,000, so if you are uploading a large file, then the " "chunksize is automatically increased to enable the upload. Choose smaller values " "to reduce the impact of transient failures.") parser.add_option("-p", "--parallel", dest="parallel", action="store", type="int", metavar="N", default=0, help="Use N threads to upload FILE in parallel. " "The default value is 0, which disables parallel uploads. This parameter " "is only valid if the site supports mulipart uploads and the --chunksize " "parameter is not 0.") parser.add_option("-b", "--create-bucket", dest="create_bucket", action="store_true", default=False, help="Create the destination bucket if it does not already exist") options, args = parser.parse_args(args) if options.chunksize!=0 and (options.chunksize < 5 or options.chunksize > 1024): parser.error("Invalid chunksize") if options.parallel < 0: parser.error("Invalid value for --parallel") if len(args) != 2: parser.error("Specify FILE and URL") infile = fix_file(args[0]) url = args[1] if not os.path.isfile(infile): raise Exception("No such file: %s" % infile) # Validate URL uri = parse_uri(url) if uri.bucket is None: raise Exception("URL for put must have a bucket: %s" % url) if uri.key is None: uri.key = os.path.basename(infile) config = get_config(options) # Make sure file is not too large for the service size = os.stat(infile).st_size max_object_size = config.getint(uri.site, "max_object_size") if size > (max_object_size*GB): raise Exception("File %s exceeds object size limit" " (%sGB) of service" % (infile, max_object_size)) info("Uploading %s" % uri) # Does the site support multipart uploads? multipart_uploads = config.getboolean(uri.site, "multipart_uploads") # Warn the user if options.parallel > 0: if not multipart_uploads: warn("Multipart uploads disabled, ignoring --parallel ") elif options.chunksize == 0: warn("--chunksize set to 0, ignoring --parallel") conn = get_connection(config, uri) # Create the bucket if the user requested it and it does not exist if options.create_bucket: b = conn.lookup(uri.bucket) if b is None: conn.create_bucket(uri.bucket) b = Bucket(connection=conn, name=uri.bucket) if (not multipart_uploads) or (options.chunksize==0): # no multipart, or chunks disabled, just do it the simple way k = Key(bucket=b, name=uri.key) k.set_contents_from_filename(infile) else: # Multipart supported, chunking requested # The target chunk size is user-defined, but we may need # to go larger if the file is big because the maximum number # of chunks is 10,000. So the actual size of a chunk # will range from 5MB to ~525MB if the maximum object size # is 5 TB. part_size = max(options.chunksize*MB, size/9999) num_parts = int(math.ceil(size / float(part_size))) if num_parts <= 1: # Serial k = Key(bucket=b, name=uri.key) k.set_contents_from_filename(infile) else: # Parallel # Map the file f = open(infile, "r+b") mm = mmap.mmap(f.fileno(), 0, mmap.MAP_SHARED) # Request upload info("Creating multipart upload") upload = b.initiate_multipart_upload(uri.key) try: # Create all uploads uploads = [] for i in range(0, num_parts): length = min(size-(i*part_size), part_size) up = PartialUpload(upload, i+1, num_parts, mm, i*part_size, length) uploads.append(up) if options.parallel <= 1: # Serial for up in uploads: up() else: # Parallel # Queue up requests queue = Queue.Queue() for up in uploads: queue.put(up) # No sense forking more threads than there are chunks nthreads = min(options.parallel, num_parts) # Fork threads threads = [] for i in range(0, nthreads): t = WorkThread(queue) threads.append(t) t.start() # Wait for the threads for t in threads: t.join() # If any of the threads encountered # an error, then we fail here if t.exception is not None: raise t.exception info("Completing upload") upload.complete_upload() mm.close() f.close() except Exception, e: # If there is an error, then we need to try and abort # the multipart upload so that it doesn't hang around # forever on the server. try: info("Aborting multipart upload") upload.cancel_upload() except Exception, f: sys.stderr.write("ERROR: Unable to abort multipart" " upload (use lsup/rmup): %s\n" % f) raise e info("Upload complete") def lsup(args): parser = option_parser("lsup URL") options, args = parser.parse_args(args) if len(args) == 0: parser.error("Specify URL") uri = parse_uri(args[0]) if uri.bucket is None: raise Exception("URL must contain a bucket: %s" % args[0]) if uri.key is not None: raise Exception("URL cannot contain a key: %s" % args[0]) config = get_config(options) conn = get_connection(config, uri) b = conn.get_bucket(uri.bucket) for up in b.list_multipart_uploads(): uri.key = up.key_name sys.stdout.write("%s %s\n" % (uri, up.id)) def rmup(args): parser = option_parser("rmup URL [UPLOAD]") parser.add_option("-a", "--all", dest="all", action="store_true", default=False, help="Cancel all uploads for the specified bucket") options, args = parser.parse_args(args) if options.all: if len(args) < 1: parser.error("Specify bucket URL") else: if len(args) != 2: parser.error("Specify bucket URL and UPLOAD") upload = args[1] uri = parse_uri(args[0]) if uri.bucket is None: raise Exception("URL must contain a bucket: %s" % args[0]) if uri.key is not None: raise Exception("URL cannot contain a key: %s" % args[0]) config = get_config(options) conn = get_connection(config, uri) # There is no easy way to do this with boto b = Bucket(connection=conn, name=uri.bucket) for up in b.list_multipart_uploads(): if options.all or up.id == upload: info("Removing upload %s" % up.id) up.cancel_upload() def PartialDownload(key, mm, part, parts, start, end): def download(): info("Downloading part %d of %d" % (part, parts)) length = end - start f = FilePart(mm, start, length) key.get_file(f, headers={"Range": "bytes=%d-%d" % (start, end)}) info("Part %d finished (%s bytes)" % (part, key.size)) return download def get(args): parser = option_parser("get URL [FILE]") parser.add_option("-c", "--chunksize", dest="chunksize", action="store", type="int", metavar="X", default=10, help="Set the chunk size for parallel downloads to X " "megabytes. A value of 0 will avoid chunked reads. This option only applies for " "sites that support ranged downloads (see ranged_downloads configuration " "parameter). The default chunk size is 10MB, the min is 1MB and the max is " "1024MB. Choose smaller values to reduce the impact of transient failures.") parser.add_option("-p", "--parallel", dest="parallel", action="store", type="int", metavar="N", default=0, help="Use N threads to upload FILE in parallel. The " "default value is 0, which disables parallel downloads. This parameter is " "only valid if the site supports ranged downloads and the --chunksize " "parameter is not 0.") options, args = parser.parse_args(args) if options.chunksize < 0 or options.chunksize > 1024: parser.error("Invalid chunksize") if options.parallel < 0: parser.error("Invalid value for --parallel") if len(args) == 0: parser.error("Specify URL") uri = parse_uri(args[0]) if uri.bucket is None: raise Exception("URL must contain a bucket: %s" % args[0]) if uri.key is None: raise Exception("URL must contain a key: %s" % args[0]) if len(args) > 1: outfile = fix_file(args[1]) else: outfile = uri.key info("Downloading %s" % uri) # Does the site support ranged downloads properly? config = get_config(options) ranged_downloads = config.getboolean(uri.site, "ranged_downloads") # Warn the user if options.parallel > 1: if not ranged_downloads: warn("ranged downloads not supported, ignoring --parallel") elif options.chunksize == 0: warn("--chunksize set to 0, ignoring --parallel") conn = get_connection(config, uri) b = Bucket(connection=conn, name=uri.bucket) if (not ranged_downloads) or (options.chunksize == 0): # Ranged downloads not supported, or chunking disabled k = Key(bucket=b, name=uri.key) k.get_contents_to_filename(outfile) else: # Ranged downloads and chunking requested # Get the size of the key k = b.get_key(uri.key) if k is None: raise Exception("No such key: %s" % uri) size = k.size # Compute chunks part_size = options.chunksize*MB num_parts = int(math.ceil(size / float(part_size))) if num_parts <= 1: # No point if there is only one chunk k.get_contents_to_filename(outfile) else: # Create the file and mmap it. We have to pre-create # the file, otherwise the mmaping won't work properly. f = open(outfile, "w+b") f.seek(size-1) f.write('\0') f.flush() mm = mmap.mmap(f.fileno(), 0, mmap.MAP_SHARED) # Create all the downloads downloads = [] for i in range(0, num_parts): start = i*part_size end = min(size, start+part_size-1) # Need to pass a different key object to each thread because # the Key object in boto is not thread-safe ki = Key(bucket=b, name=uri.key) down = PartialDownload(ki, mm, i+1, num_parts, start, end) downloads.append(down) if options.parallel <= 1: # Serial for down in downloads: down() else: # Parallel # No sense forking more threads than there are chunks nthreads = min(options.parallel, num_parts) info("Starting parallel download with %d threads" % nthreads) # Queue up requests queue = Queue.Queue() for down in downloads: queue.put(down) # Fork threads threads = [] for i in range(0, nthreads): t = WorkThread(queue) threads.append(t) t.start() # Wait for the threads for t in threads: t.join() # If any of the threads encountered # an error, then we fail here if t.exception is not None: raise t.exception # Close the mmap()ed file mm.close() f.close() info("Download complete") def main(): if len(sys.argv) < 2: help(sys.argv) exit(1) command = sys.argv[1].lower() args = sys.argv[2:] if command in COMMANDS: fn = globals()[command] try: fn(args) except boto.exception.S3ResponseError, e: if sys.stderr.isatty() and not DEBUG: sys.stderr.write("ERROR: %s\n" % e.error_message) exit(1) else: raise except Exception, e: if sys.stderr.isatty() and not DEBUG: sys.stderr.write("ERROR: %s\n" % e) exit(1) else: raise else: sys.stderr.write("ERROR: Unknown command: %s\n" % command) exit(1) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/bin/pegasus-submit-dag0000755000175000017500000002126711757531137020243 0ustar ryngerynge#!/usr/bin/env perl # # wrapper around condor_submit_dag, activates basic throttles # # This file or a portion of this file is licensed under the terms of # the Globus Toolkit Public License, found in file GTPL, or at # http://www.globus.org/toolkit/download/license.html. This notice must # appear in redistributions of this file, with or without modification. # # Redistributions of this Software, with or without modification, must # reproduce the GTPL in: (1) the Software, or (2) the Documentation or # some other similar material which is provided with the Software (if # any). # # Copyright 1999-2004 University of Chicago and The University of # Southern California. All rights reserved. # # Author: Jens-S. Vöckler voeckler@cs.uchicago.edu # Author: Gaurang Mehta gmehta at isi dot edu # Revision : $Revision: 4900 $ # use 5.006; use strict; use File::Spec; use File::Copy; use File::Basename qw(basename dirname); use Getopt::Long qw(:config bundling no_ignore_case); use Data::Dumper; BEGIN { my $pegasus_config = File::Spec->catfile( dirname($0), 'pegasus-config' ); eval `$pegasus_config --perl-dump`; die("Unable to eval pegasus-config output. $@") if $@; } # load our own local modules, see PERL5LIB settings use Pegasus::Properties; # parses -Dprop=val from @ARGV use Pegasus::Common; # some reasonable defaults my $maxpre = 20; my $maxpost = 20; my $maxjobs = 0; my $maxidle = 0; my $dagman; my $submit=1; my $notify = 'NEVER'; my $verbose; my $conffile; my %props = (); my $grid=0; #Grid checks enabled $main::DEBUG = 0; # for now $main::revision = 'unknown'; $_ = '$Revision: 4900 $'; # don't edit, automatically updated by CVS $main::revision=$1 if /Revision:\s+([0-9.]+)/o; sub myversion() { my $version = version(); print "Pegasus $version, @{[basename($0)]} $main::revision\n"; exit 0; } sub usage(;$) { my $msg = shift; print "ERROR: $msg\n" if defined $msg && lc($msg) ne 'help'; print << "EOF"; Usage: @{[basename($0)]} [-Dprops] [options] dagfile -Dprop=val Commandline overwrite for properties, must be initial args! -c|--conf fn Read properties from given filename instead of rundir. -d|--debug lv Initializes the level lv of verbosity, default $main::DEBUG -e|--dagman fn Specify an alternative dagman binary to use. -P|--maxpre N Maximum number of pre-scripts to run, default $maxpre -p|--maxpost N Maximum number of post-scripts to run, default $maxpost -j|--maxjobs N Maximum number of jobs to submit to Condor, default $maxjobs -i|--maxidle N Maximum number of idle jobs, default $maxidle -n|--notify x When to notify: Never, Error, Complete; default $notify -v|--verbose Enter DAGMan verbose mode, default is not -V|--version Print version number and exit. --grid | --nogrid Enable checks for grid proxy and GLOBUS LOCATION (Default is enabled) A maximum number of 0 means unlimited. EOF exit(1); } sub proxy_duration { # purpose: determine remaining time on grid user proxy # returns: undef in case of error, or remaining time. # my $gpi = File::Spec->catfile( $ENV{'GLOBUS_LOCATION'}, 'bin', 'grid-proxy-info' ); die "ERROR: Unable to find GLOBUS_LOCATION, please check your setup\n" unless exists $ENV{'GLOBUS_LOCATION'} && $ENV{'GLOBUS_LOCATION'}; die "ERROR: Unable to execute grid-proxy-info\n" unless -x $gpi; my $left = 0; chomp($left=`$gpi -timeleft`); $? == 0 ? $left + 0 : undef; } sub salvage_logfile($) { # purpose: salvage Condor common log file from truncation # paramtr: $dagfn (IN): Name of dag filename # returns: - # my $dagfn = shift; my $result = undef; local(*DAG,*SUB,*LOG); if ( open( DAG, "<$dagfn" ) ) { # read to to figure out submit files my @x; my %submit = (); while ( ) { next unless /^\s*job/i; s/[\r\n]+$//; # safe chomp @x = split; $submit{$x[1]} = $x[2]; # dagjobid -> subfn } close DAG; if ( $main::DEBUG > 2 ) { print STDERR "# found the following associations:\n"; local $Data::Dumper::Indent = 1; local $Data::Dumper::Pad = "# "; print STDERR Data::Dumper->Dump( [\%submit], [qw(config)] ); } # read two submit files to figure out condor common log file foreach my $subfn ( values %submit ) { if ( open( SUB, "<$subfn" ) ) { my $logfile = undef; while ( ) { next unless /^log(=|\s)/i; s/[\r\n]+$//; # safe chomp @x = split /\s*=\s*/, $_, 2; $logfile = ( substr( $x[1], 0, 1 ) =~ /[''""]/ ? substr( $x[1], 1, -1 ) : $x[1] ); last; } close SUB; print STDERR "# $subfn points to $logfile\n" if ( $main::DEBUG > 1 ); if ( ! defined $result ) { $result = $logfile; } else { last if $result eq $logfile; warn "# Using distinct, different log files, skipping preservation.\n"; return undef; } } else { warn "Unable to read sub file $subfn: $!\n"; } } # try to preserve log file if ( defined $result && -s $result ) { my $newfn; print STDERR "# log $result exists, rescuing from DAGMan.\n" if $main::DEBUG; for ( my $i=0; $i<1000; ++$i ) { $newfn = sprintf "%s.%03d", $result, $i; if ( open( LOG, "<$newfn" ) ) { # file exists close LOG; } else { # file does not exist, use that my $newresult=$result; #check if the file is a smylink then dereference it. if ( -l $result ) { $newresult=readlink($result); } print STDOUT "Rescued $result as $newfn\n" if copy( $newresult, $newfn ) or warn "Could not rescue the log file $newresult to $newfn\n $! \nTrying to continue\n"; last; } } } else { print STDERR "# log $result does not yet exist (good)\n" if ( $main::DEBUG ); } } else { die "ERROR: Unable to read dag file $dagfn: $!\n"; } $result; } GetOptions( "debug|d=i" => \$main::DEBUG, "maxpre|P=i" => \$maxpre, "maxpost|p=i" => \$maxpost, 'dagman|e=s' => \$dagman, "submit!" => \$submit, 'conf|c=s' => \$conffile, "maxjob|maxjobs|j=i" => \$maxjobs, "maxidle|i=i" => \$maxidle, "notify|n=s" => \$notify, "version|V" => \&myversion, "verbose|v" => \$verbose, "grid!"=>\$grid, "help|h|?" => \&usage ); #check grid stuff only if $grid enabled if($grid){ # check Globus proxy lifetime? my $left = proxy_duration() || die "ERROR: Problems with grid-proxy-info. Check your user proxy\n"; if ( $left <= 0 ) { die "ERROR: Your grid user proxy has expired, please refresh now.\n"; } elsif ( $left < 7200 ) { warn "Warning: There is little time remaining on your proxy. You need to refresh soon!\n"; } } my $dag = shift || usage("Need the name of a .dag file\n"); my $c_s_d = find_exec( 'condor_submit_dag' ) || die "Unable to locate condor_submit_dag\n"; my $c_s = find_exec('condor_submit') || die "Unable to locate condor_submit\n"; salvage_logfile($dag); my $run=dirname($dag); my %config = slurp_braindb( $run ) or die "ERROR: open braindb: $!\n"; # pre-condition: The planner writes all properties per workflow into the DAG dir. my $props = Pegasus::Properties->new( $conffile, File::Spec->catfile($run,$config{properties} )); # set true defaults from properties $maxpre = $props->property('dagman.maxpre') || $maxpre; $maxpost = $props->property('dagman.maxpost') || $maxpost; $maxjobs = $props->property('dagman.maxjobs') || $maxjobs; $maxidle = $props->property('dagman.maxidle') || $maxidle; $notify = $props->property('dagman.notify') || $notify; $verbose = 1 if lc($props->property('dagman.verbose')) =~ /(true|on|1)/ || $verbose; # construct commandline my @arg = ( $c_s_d ); #push( @arg, '-dagman', $dagman ) if $dagman; push( @arg, '-MaxPre', $maxpre ) if $maxpre > 0; push( @arg, '-MaxPost', $maxpost ) if $maxpost > 0; push( @arg, '-maxjobs', $maxjobs ) if $maxjobs > 0; push( @arg, '-maxidle', $maxidle ) if $maxidle > 0; push( @arg, '-notification', $notify ); push( @arg, '-verbose' ) if $verbose; push( @arg, '-append', 'executable='.$dagman ) if $dagman; push( @arg, '-append', '+pegasus_wf_uuid="'.$config{'wf_uuid'}.'"' ); push( @arg, '-append', '+pegasus_root_wf_uuid="'.$config{'root_wf_uuid'}.'"' ); push( @arg, '-append', '+pegasus_wf_name="'.$config{'pegasus_wf_name'}.'"' ); push( @arg, '-append', '+pegasus_wf_time="'.$config{timestamp}.'"' ); push( @arg, '-append', '+pegasus_version="'.$config{'planner_version'}.'"' ); push( @arg, '-append', '+pegasus_job_class=11' ); push( @arg, '-append', '+pegasus_cluster_size=1' ); push( @arg, '-append', '+pegasus_site="local"' ); push( @arg, '-append', '+pegasus_wf_xformation="pegasus::dagman"' ); #push( @arg, '-no_submit') if $submit==0; push( @arg, $dag ); print STDERR "# @arg\n" if $main::DEBUG; #my $csdresult=`@arg`; exec { $arg[0] } @arg or die "Cannot execute @arg: $! \n"; exit 127; pegasus-wms_4.0.1+dfsg/bin/pegasus-statistics0000755000175000017500000020130711757531137020374 0ustar ryngerynge#!/usr/bin/env python import os import re import sys import logging import optparse import subprocess import traceback # Initialize logging object logger = logging.getLogger() # Use pegasus-config to find our lib path bin_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --noeoln --python" lib_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --noeoln --python-externals" lib_ext_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] # Insert this directory in our search path os.sys.path.insert(0, lib_ext_dir) os.sys.path.insert(0, lib_dir) import Pegasus.common from Pegasus.tools import utils from Pegasus.tools import db_utils from Pegasus.plots_stats import utils as stats_utils from netlogger.analysis.workflow.stampede_statistics import StampedeStatistics from netlogger.analysis.schema.schema_check import SchemaVersionError # Regular expressions re_parse_property = re.compile(r'([^:= \t]+)\s*[:=]?\s*(.*)') # Global variables prog_base = os.path.split(sys.argv[0])[1] # Name of this program workflow_summary_file_name = "summary" workflow_summary_time_file_name = "summary-time" workflow_statistics_file_name = "workflow" job_statistics_file_name = "jobs" logical_transformation_statistics_file_name = "breakdown" time_statistics_file_name = "time" time_statistics_per_host_file_name = "time-per-host" text_file_extension = ".txt" csv_file_extension = ".csv" calc_wf_stats = False calc_wf_summary = False calc_jb_stats = False calc_tf_stats = False calc_ti_stats = False time_filter = None NEW_LINE_STR ="\n" DEFAULT_OUTPUT_DIR = "statistics" # Transformations file column names transformation_stats_col_name_text = ["Transformation", "Count", "Succeeded", "Failed", "Min", "Max", "Mean", "Total"] transformation_stats_col_name_csv = ["Workflow_Id", "Dax_Label", "Transformation", "Count", "Succeeded", "Failed", "Min", "Max", "Mean", "Total"] transformation_stats_col_size = [60, 12, 12, 12, 20, 20, 20, 12] # Jobs file column names job_stats_col_name_text = ['#Job', 'Try', 'Site', 'Kickstart', 'Mult', 'Kickstart-Mult', 'CPU-Time', 'Post', 'CondorQTime', 'Resource', 'Runtime', 'Seqexec', 'Seqexec-Delay', 'Exitcode', 'Hostname'] job_stats_col_name_csv = ['Workflow_Id', 'Dax_Label', 'Job', 'Try', 'Site', 'Kickstart', 'Mult', 'Kickstart-Mult', 'CPU-Time', 'Post', 'CondorQTime', 'Resource', 'Runtime', 'Seqexec', 'Seqexec-Delay', 'Exitcode', 'Hostname'] job_stats_col_size = [60, 4, 15, 12, 6, 16, 12, 12, 12, 12, 12, 12, 15, 10, 30] # Summary file column names workflow_summary_col_name_csv = ["Type", "Succeeded", "Failed", "Incomplete", "Total", "Retries", "Total_Run)"] workflow_summary_col_name_text = ["Type", "Succeeded", "Failed", "Incomplete", "Total", " ", "Retries", "Total Run (Retries Included)"] workflow_summary_col_size = [20, 20, 20, 20, 20, 5, 20, 20] workflow_time_summary_col_name_csv = ["Stat_Type", "time_seconds"] # Workflow file column names workflow_status_col_name_text = ["#", "Type", "Succeeded", "Failed", "Incomplete", "Total", " ", "Retries", "Total Run (Retries Included)", "Workflow Retries"] workflow_status_col_name_csv = ["Workflow_Id", "Dax_Label", "Type", "Succeeded", "Failed", "Incomplete", "Total", "Retries", "Total_Run", "Workflow_Retries"] workflow_status_col_size = [40, 15, 12, 12, 12, 12, 5, 12, 30, 18] # Time file column names time_stats_col_name_csv = ["Stat_Type", "Date", "Count", "Runtime"] time_stats_col_name_text = ["Date", "Count", "Runtime"] time_stats_col_size = [30, 20, 20] time_host_stats_col_name_csv = ["Stat_Type", "Date", "Host", "Count", "Runtime(sec)"] time_host_stats_col_name_text = ["Date", "Host", "Count", "Runtime (sec)"] time_host_stats_col_size = [30, 80, 20, 20] class JobStatistics: def __init__(self): self.name = None self.site = None self.kickstart = None self.multiplier_factor = None self.kickstart_mult = None self.remote_cpu_time = None self.post = None self.condor_delay = None self.resource = None self.runtime = None self.condorQlen =None self.seqexec = None self.seqexec_delay = None self.retry_count = 0 self.exitcode = None self.hostname = None def getFormattedJobStatistics(self, output_format): """ Returns the formatted job statistics information @return: formatted job statistics information """ formatted_job_stats = [self.name] if output_format == "text": formatted_job_stats.append(" " + str(self.retry_count)) else: formatted_job_stats.append(str(self.retry_count)) if self.site is None: formatted_job_stats.append('-') else: formatted_job_stats.append(self.site) formatted_job_stats.append(round_to_str(self.kickstart)) formatted_job_stats.append(str(self.multiplier_factor)) formatted_job_stats.append(round_to_str(self.kickstart_mult)) if self.remote_cpu_time is None: formatted_job_stats.append('-') else: formatted_job_stats.append(round_to_str(self.remote_cpu_time)) formatted_job_stats.append(round_to_str(self.post)) formatted_job_stats.append(round_to_str(self.condor_delay)) formatted_job_stats.append(round_to_str(self.resource)) formatted_job_stats.append(round_to_str(self.runtime)) formatted_job_stats.append(round_to_str(self.seqexec)) formatted_job_stats.append(round_to_str(self.seqexec_delay)) formatted_job_stats.append(str(self.exitcode)) formatted_job_stats.append(self.hostname) return formatted_job_stats def setup_logger(level_str): """ Sets the logging level @param level_str: logging level """ level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) if level_str == "info": logger.setLevel(logging.INFO) return def formatted_wf_summary_legends_part1(): """ Returns the first part of the workflow summary legend @return : workflow summary legend """ formatted_wf_statistics_legend = "" formatted_wf_statistics_legend += """ # Workflow summary: # Summary of the workflow execution. It shows total # tasks/jobs/sub workflows run, how many succeeded/failed etc. # In case of hierarchical workflow the calculation shows the # statistics across all the sub workflows.It shows the following # statistics about tasks, jobs and sub workflows. # * Succeeded - total count of succeeded tasks/jobs/sub workflows. # * Failed - total count of failed tasks/jobs/sub workflows. # * Incomplete - total count of tasks/jobs/sub workflows that are # not in succeeded or failed state. This includes all the jobs # that are not submitted, submitted but not completed etc. This # is calculated as difference between 'total' count and sum of # 'succeeded' and 'failed' count. # * Total - total count of tasks/jobs/sub workflows. # * Retries - total retry count of tasks/jobs/sub workflows. # * Total Run - total count of tasks/jobs/sub workflows executed # during workflow run. This is the cumulative of retries, # succeeded and failed count. """ return formatted_wf_statistics_legend def formatted_wf_summary_legends_part2(): """ Returns the second part of the workflow summary legend @return : workflow summary legend """ formatted_wf_statistics_legend = "" formatted_wf_statistics_legend += """ # Workflow wall time: # The walltime from the start of the workflow execution # to the end as reported by the DAGMAN.In case of rescue dag the value # is the cumulative of all retries. """ formatted_wf_statistics_legend += """ # Workflow cumulative job wall time: # The sum of the walltime of all jobs as reported by kickstart. # In case of job retries the value is the cumulative of all retries. # For workflows having sub workflow jobs (i.e SUBDAG and SUBDAX jobs), # the walltime value includes jobs from the sub workflows as well. """ formatted_wf_statistics_legend += """ # Cumulative job walltime as seen from submit side: # The sum of the walltime of all jobs as reported by DAGMan. # This is similar to the regular cumulative job walltime, but includes # job management overhead and delays. In case of job retries the value is # the cumulative of all retries. For workflows having sub workflow jobs # (i.e SUBDAG and SUBDAX jobs), the walltime value includes jobs # from the sub workflows as well. """ return formatted_wf_statistics_legend def formatted_wf_summary_legends_txt(): """ Returns the complete workflow summary legend @return : workflow summary legend """ formatted_wf_statistics_legend ="# legends\n" formatted_wf_statistics_legend += formatted_wf_summary_legends_part1() formatted_wf_statistics_legend += formatted_wf_summary_legends_part2() return formatted_wf_statistics_legend def formatted_wf_summary_legends_csv1(): """ Returns the workflow summary legend for the first summary csv file @return : workflow summary legend """ formatted_wf_statistics_legend ="# legends\n" formatted_wf_statistics_legend += formatted_wf_summary_legends_part1() return formatted_wf_statistics_legend def formatted_wf_summary_legends_csv2(): """ Returns the workflow summary legend for the second summary csv file @return : workflow summary legend """ formatted_wf_statistics_legend ="# legends\n" formatted_wf_statistics_legend += formatted_wf_summary_legends_part2() return formatted_wf_statistics_legend def formatted_wf_status_legends(): """ Returns the workflow table legend @return : workflow table legend """ formatted_wf_statistics_legend ="# legends\n" formatted_wf_statistics_legend +=""" #Workflow summary - Summary of the workflow execution. It shows total # tasks/jobs/sub workflows run, how many succeeded/failed etc. # In case of hierarchical workflow the calculation shows the # statistics of each individual sub workflow.The file also # contains a 'Total' table at the bottom which is the cummulative # of all the individual statistics details.t shows the following # statistics about tasks, jobs and sub workflows. # # * Workflow Retries - number of times a workflow was retried. # * Succeeded - total count of succeeded tasks/jobs/sub workflows. # * Failed - total count of failed tasks/jobs/sub workflows. # * Incomplete - total count of tasks/jobs/sub workflows that are # not in succeeded or failed state. This includes all the jobs # that are not submitted, submitted but not completed etc. This # is calculated as difference between 'total' count and sum of # 'succeeded' and 'failed' count. # * Total - total count of tasks/jobs/sub workflows. # * Retries - total retry count of tasks/jobs/sub workflows. # * Total Run - total count of tasks/jobs/sub workflows executed # during workflow run. This is the cumulative of retries, # succeeded and failed count. # """ return formatted_wf_statistics_legend def formatted_job_stats_legends(): """ Returns the job table legend @return : job table legend """ formatted_job_stats_legend = "# legends\n" formatted_job_stats_legend += "# Job - name of the job\n" formatted_job_stats_legend += "# Try - number representing the job instance run count\n" formatted_job_stats_legend += "# Site - site where the job ran\n" formatted_job_stats_legend += "# Kickstart - actual duration of the job instance in seconds on the remote compute node\n" formatted_job_stats_legend += "# Mult - multiplier factor specified by the user\n" formatted_job_stats_legend += "# Kickstart-Mult - Kickstart time multiplied by the multiplier factor\n" formatted_job_stats_legend += "# CPU-Time - remote cpu time computed as the stime + utime\n" formatted_job_stats_legend += "# Post - postscript time as reported by DAGMan\n" formatted_job_stats_legend += "# CondorQTime - time between submission by DAGMan and the remote Grid submission. It is an estimate of the time spent in the condor q on the submit node\n" formatted_job_stats_legend += "# Resource - time between the remote Grid submission and start of remote execution. It is an estimate of the time job spent in the remote queue\n" formatted_job_stats_legend += "# Runtime - time spent on the resource as seen by Condor DAGMan. Is always >=kickstart\n" formatted_job_stats_legend += "# Seqexec - time taken for the completion of a clustered job\n" formatted_job_stats_legend += "# Seqexec-Delay - time difference between the time for the completion of a clustered job and sum of all the individual tasks kickstart time\n" formatted_job_stats_legend += "# Exitcode - exitcode for this job\n" formatted_job_stats_legend += "# Hostname - name of the host where the job ran, as reported by kickstart\n" return formatted_job_stats_legend def formatted_transformation_stats_legends(): """ Returns the transformation table legend @return : transformation table legend """ formatted_transformation_stats_legend="# legends\n" formatted_transformation_stats_legend +="# Transformation - name of the transformation.\n" formatted_transformation_stats_legend +="# Count - the number of times the invocations corresponding to the transformation was executed.\n" formatted_transformation_stats_legend +="# Succeeded - the count of the succeeded invocations corresponding to the transformation.\n" formatted_transformation_stats_legend +="# Failed - the count of the failed invocations corresponding to the transformation.\n" formatted_transformation_stats_legend +="# Min(sec) - the minimum invocation runtime value corresponding to the transformation.\n" formatted_transformation_stats_legend +="# Max(sec) - the maximum invocation runtime value corresponding to the transformation.\n" formatted_transformation_stats_legend +="# Mean(sec) - the mean of the invocation runtime corresponding to the transformation.\n" formatted_transformation_stats_legend +="# Total(sec) - the cumulative of invocation runtime corresponding to the transformation.\n" return formatted_transformation_stats_legend def formatted_time_stats_legends_text(): """ Returns the time table legend @return : time table legend """ filter = str(time_filter) formatted_time_stats_legend = "# legends" + NEW_LINE_STR formatted_time_stats_legend += "# Job instance statistics per " + filter + " : the number of job instances run, total runtime sorted by " + filter+ NEW_LINE_STR formatted_time_stats_legend += "# Invocation statistics per " + filter + " : the number of invocations , total runtime sorted by " + filter+ NEW_LINE_STR formatted_time_stats_legend += "# Job instance statistics by host per " + filter + " : the number of job instance run, total runtime on each host sorted by " + filter+ NEW_LINE_STR formatted_time_stats_legend += "# Invocation by host per " + filter + " : the number of invocations, total runtime on each host sorted by " + filter + NEW_LINE_STR return formatted_time_stats_legend def formatted_time_stats_legends_csv(): """ Returns the time table legend @return : time table legend """ filter = str(time_filter) formatted_time_stats_legend = "# legends" + NEW_LINE_STR formatted_time_stats_legend += "# Job instance statistics per " + filter + " : the number of job instances run, total runtime sorted by " + filter+ NEW_LINE_STR formatted_time_stats_legend += "# Invocation statistics per " + filter + " : the number of invocations , total runtime sorted by " + filter+ NEW_LINE_STR return formatted_time_stats_legend def formatted_time_host_stats_legends_csv(): """ Returns the time table legend @return : time table legend """ filter = str(time_filter) formatted_time_stats_legend = "# legends" + NEW_LINE_STR formatted_time_stats_legend += "# Job instance statistics by host per " + filter + " : the number of job instance run, total runtime on each host sorted by " + filter + NEW_LINE_STR formatted_time_stats_legend += "# Invocation by host per " + filter + " : the number of invocations, total runtime on each host sorted by " + filter + NEW_LINE_STR return formatted_time_stats_legend def write_to_file(file_path, mode, content): """ Utility method for writing content to a given file @param file_path : file path @param mode : file writing mode 'a' append , 'w' write @param content : content to write to file """ try: fh = open(file_path, mode) fh.write(content) except IOError: logger.error("Unable to write to file " + file_path) sys.exit(1) else: fh.close() def format_seconds(duration): """ Utility for converting time to a readable format @param duration : time in seconds and miliseconds @return time in format day,hour, min,sec """ return stats_utils.format_seconds(duration) def convert_to_str(value): """ Utility for returning a str representation of the given value. Return '-' if value is None @parem value : the given value that need to be converted to string """ if value is None: return '-' return str(value) def print_row(content, column_format, output_format): """ Utility method for generating formatted row based on the column format given @param content : list of column values @param column_format : column_size of each columns """ row_str = "" if output_format == "text": for index in range(len(content)): row_str += (content[index].ljust(column_format[index])) elif output_format == "csv": for word in content: if row_str != "": row_str += "," row_str += word else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) return row_str def print_workflow_details(output_db_url, wf_uuid, output_dir): """ Prints the workflow statistics information of all workflows @param output_db_url : time in seconds and miliseconds @param wf_uuid : uuid of the top level workflow """ try: expanded_workflow_stats = StampedeStatistics(output_db_url) expanded_workflow_stats.initialize(wf_uuid) except SchemaVersionError: logger.error("------------------------------------------------------") logger.error("Database schema mismatch! Please run the upgrade tool") logger.error("to upgrade the database to the latest schema version.") sys.exit(1) except: logger.error("Failed to load the database." + output_db_url ) logger.warning(traceback.format_exc()) sys.exit(1) # print workflow statistics wf_uuid_list = [wf_uuid] desc_wf_uuid_list = expanded_workflow_stats.get_descendant_workflow_ids() for wf_det in desc_wf_uuid_list: wf_uuid_list.append(wf_det.wf_uuid) if calc_wf_stats: # Do it for the text file wf_stats_file_txt = os.path.join(output_dir, workflow_statistics_file_name + text_file_extension) write_to_file(wf_stats_file_txt, "w", formatted_wf_status_legends()) workflow_status_table_header_str = print_row(workflow_status_col_name_text, workflow_status_col_size, "text") workflow_status_table_header_str += NEW_LINE_STR write_to_file(wf_stats_file_txt, "a", workflow_status_table_header_str) # Now output the csv file too wf_stats_file_csv = os.path.join(output_dir, workflow_statistics_file_name + csv_file_extension) write_to_file(wf_stats_file_csv, "w", formatted_wf_status_legends()) workflow_status_table_header_str = print_row(workflow_status_col_name_csv, workflow_status_col_size, "csv") workflow_status_table_header_str += NEW_LINE_STR write_to_file(wf_stats_file_csv, "a", workflow_status_table_header_str) if calc_jb_stats: # Write the text file jobs_stats_file_txt = os.path.join(output_dir, job_statistics_file_name + text_file_extension) write_to_file(jobs_stats_file_txt, "w", formatted_job_stats_legends()) # Now write the csv file jobs_stats_file_csv = os.path.join(output_dir, job_statistics_file_name + csv_file_extension) write_to_file(jobs_stats_file_csv, "w", formatted_job_stats_legends()) if calc_tf_stats: # Write the text file transformation_stats_file_txt = os.path.join(output_dir, logical_transformation_statistics_file_name + text_file_extension) write_to_file(transformation_stats_file_txt, "w", formatted_transformation_stats_legends()) # Now write the csv file transformation_stats_file_csv = os.path.join(output_dir, logical_transformation_statistics_file_name + csv_file_extension) write_to_file(transformation_stats_file_csv, "w", formatted_transformation_stats_legends()) if calc_ti_stats: # Create the text file time_stats_file_txt = os.path.join(output_dir, time_statistics_file_name + text_file_extension) write_to_file(time_stats_file_txt, "w", formatted_time_stats_legends_text()) content = print_statistics_by_time_and_host(expanded_workflow_stats, "text", combined=True, per_host=True) write_to_file(time_stats_file_txt, "a", content) # Now create the csv file time_stats_file_csv = os.path.join(output_dir, time_statistics_file_name + csv_file_extension) write_to_file(time_stats_file_csv, "w", formatted_time_stats_legends_csv()) content = print_statistics_by_time_and_host(expanded_workflow_stats, "csv", combined=True, per_host=False) write_to_file(time_stats_file_csv, "a", content) # Now create the second, per-host csv file time_stats_file2_csv = os.path.join(output_dir, time_statistics_per_host_file_name + csv_file_extension) write_to_file(time_stats_file2_csv, "w", formatted_time_host_stats_legends_csv()) content = print_statistics_by_time_and_host(expanded_workflow_stats, "csv", combined=False, per_host=True) write_to_file(time_stats_file2_csv, "a", content) if calc_jb_stats or calc_tf_stats or calc_wf_stats: for sub_wf_uuid in wf_uuid_list: try: individual_workflow_stats = StampedeStatistics(output_db_url, False) individual_workflow_stats.initialize(sub_wf_uuid) except SchemaVersionError: logger.error("------------------------------------------------------") logger.error("Database schema mismatch! Please run the upgrade tool") logger.error("to upgrade the database to the latest schema version.") sys.exit(1) except: logger.error("Failed to load the database." + output_db_url ) logger.warning(traceback.format_exc()) sys.exit(1) wf_det = individual_workflow_stats.get_workflow_details()[0] workflow_id = str(sub_wf_uuid) dax_label = str(wf_det.dax_label) logger.info("Generating statistics information about the workflow " + workflow_id + " ... ") if calc_jb_stats: logger.debug("Generating job instance statistics information for workflow " + workflow_id + " ... ") individual_workflow_stats.set_job_filter('all') # Write the text file content = print_individual_wf_job_stats(individual_workflow_stats, workflow_id, dax_label, "text") write_to_file(jobs_stats_file_txt, "a", content) # Now write the csv file content = print_individual_wf_job_stats(individual_workflow_stats, workflow_id, dax_label, "csv") write_to_file(jobs_stats_file_csv, "a", content) if calc_tf_stats: logger.debug("Generating invocation statistics information for workflow " + workflow_id + " ... ") individual_workflow_stats.set_job_filter('all') # Write the text file content = print_wf_transformation_stats(individual_workflow_stats, workflow_id, dax_label, "text") write_to_file(transformation_stats_file_txt, "a", content) # Now write the csv file content = print_wf_transformation_stats(individual_workflow_stats, workflow_id, dax_label, "csv") write_to_file(transformation_stats_file_csv, "a", content) if calc_wf_stats: logger.debug("Generating workflow statistics information for workflow " + workflow_id + " ... ") individual_workflow_stats.set_job_filter('all') # Write text file content = print_individual_workflow_stats(individual_workflow_stats, workflow_id, dax_label, "text") write_to_file(wf_stats_file_txt, "a", content) # Write csv file content = print_individual_workflow_stats(individual_workflow_stats, workflow_id, dax_label, "csv") write_to_file(wf_stats_file_csv, "a", content) individual_workflow_stats.close() stats_output = NEW_LINE_STR + "SUMMARY".center(100, '*') stats_output += NEW_LINE_STR if calc_wf_summary: # First we generate the txt file summary_output = formatted_wf_summary_legends_txt() summary_output += NEW_LINE_STR logger.info("Generating workflow summary ... ") summary_output += print_workflow_summary(expanded_workflow_stats, "text", wf_summary=True, time_summary=True) wf_summary_file_txt = os.path.join(output_dir, workflow_summary_file_name + text_file_extension) write_to_file(wf_summary_file_txt, "w", summary_output) stats_output += summary_output stats_output += NEW_LINE_STR stats_output += "Summary : " stats_output += wf_summary_file_txt + "\n" # Now we generate the first csv summary file summary_output = formatted_wf_summary_legends_csv1() summary_output += NEW_LINE_STR summary_output += print_workflow_summary(expanded_workflow_stats, "csv", wf_summary=True, time_summary=False) wf_summary_file_csv = os.path.join(output_dir, workflow_summary_file_name + csv_file_extension) write_to_file(wf_summary_file_csv, "w", summary_output) # Now we generate the second csv summary file summary_output = formatted_wf_summary_legends_csv2() summary_output += NEW_LINE_STR summary_output += print_workflow_summary(expanded_workflow_stats, "csv", wf_summary=False, time_summary=True) wf_summary_file2_csv = os.path.join(output_dir, workflow_summary_time_file_name + csv_file_extension) write_to_file(wf_summary_file2_csv, "w", summary_output) if calc_wf_stats: stats_output += NEW_LINE_STR # Write text file content = print_individual_workflow_stats(expanded_workflow_stats , "Total", "", "text") write_to_file(wf_stats_file_txt, "a" , content) stats_output += "Workflow execution statistics : " stats_output += wf_stats_file_txt +"\n" # Now write the csv file content = print_individual_workflow_stats(expanded_workflow_stats , "TOTAL", "", "csv") write_to_file(wf_stats_file_csv, "a" , content) if calc_jb_stats: stats_output += NEW_LINE_STR stats_output += "Job instance statistics : " stats_output += jobs_stats_file_txt +"\n" if calc_tf_stats: stats_output += NEW_LINE_STR expanded_workflow_stats.set_job_filter('all') # Write the text file content = print_wf_transformation_stats(expanded_workflow_stats , "All", "", "text") write_to_file(transformation_stats_file_txt, "a" , content) stats_output += "Transformation statistics : " stats_output += transformation_stats_file_txt +"\n" # Now write the csv file content = print_wf_transformation_stats(expanded_workflow_stats , "ALL", "", "csv") write_to_file(transformation_stats_file_csv, "a" , content) if calc_ti_stats: stats_output += NEW_LINE_STR stats_output += "Time statistics : " stats_output += time_stats_file_txt +"\n" expanded_workflow_stats.close() stats_output += NEW_LINE_STR stats_output += "".center(100, '*') print stats_output return def print_workflow_summary(workflow_stats, output_format, wf_summary=True, time_summary=True): """ Prints the workflow statistics summary of an top level workflow @param workflow_stats : workflow statistics object reference """ summary_str = "" if wf_summary == True: # status workflow_stats.set_job_filter('nonsub') # Tasks total_tasks = workflow_stats.get_total_tasks_status() total_succeeded_tasks = workflow_stats.get_total_succeeded_tasks_status() total_failed_tasks = workflow_stats.get_total_failed_tasks_status() total_unsubmitted_tasks = total_tasks - (total_succeeded_tasks + total_failed_tasks) total_task_retries = workflow_stats.get_total_tasks_retries() total_invocations = total_succeeded_tasks + total_failed_tasks + total_task_retries # Jobs total_jobs = workflow_stats.get_total_jobs_status() total_succeeded_jobs = workflow_stats.get_total_succeeded_jobs_status() total_failed_jobs = workflow_stats.get_total_failed_jobs_status() total_unsubmitted_jobs = total_jobs - (total_succeeded_jobs + total_failed_jobs) total_job_retries = workflow_stats.get_total_jobs_retries() total_job_instance_retries = total_succeeded_jobs + total_failed_jobs + total_job_retries # Sub workflows workflow_stats.set_job_filter('subwf') total_sub_wfs = workflow_stats.get_total_jobs_status() total_succeeded_sub_wfs = workflow_stats.get_total_succeeded_jobs_status() total_failed_sub_wfs = workflow_stats.get_total_failed_jobs_status() total_unsubmitted_sub_wfs = total_sub_wfs - (total_succeeded_sub_wfs + total_failed_sub_wfs) total_sub_wfs_retries = workflow_stats.get_total_jobs_retries() total_sub_wfs_tries = total_succeeded_sub_wfs + total_failed_sub_wfs + total_sub_wfs_retries # Format the output if output_format == "text": summary_str += "".center(sum(workflow_summary_col_size), '-') summary_str += NEW_LINE_STR if output_format == "text": summary_str += print_row(workflow_summary_col_name_text, workflow_summary_col_size, output_format) content = ["Tasks", convert_to_str(total_succeeded_tasks), convert_to_str(total_failed_tasks), convert_to_str(total_unsubmitted_tasks), convert_to_str(total_tasks), "||", convert_to_str(total_task_retries), convert_to_str(total_invocations)] elif output_format == "csv": summary_str += print_row(workflow_summary_col_name_csv, workflow_summary_col_size, output_format) content = ["Tasks", convert_to_str(total_succeeded_tasks), convert_to_str(total_failed_tasks), convert_to_str(total_unsubmitted_tasks), convert_to_str(total_tasks), convert_to_str(total_task_retries), convert_to_str(total_invocations)] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) summary_str += NEW_LINE_STR summary_str += print_row(content, workflow_summary_col_size, output_format) if output_format == "text": content = ["Jobs", convert_to_str(total_succeeded_jobs), convert_to_str(total_failed_jobs), convert_to_str(total_unsubmitted_jobs), convert_to_str(total_jobs), "||", str(total_job_retries), convert_to_str(total_job_instance_retries)] elif output_format == "csv": content = ["Jobs", convert_to_str(total_succeeded_jobs), convert_to_str(total_failed_jobs), convert_to_str(total_unsubmitted_jobs), convert_to_str(total_jobs), str(total_job_retries), convert_to_str(total_job_instance_retries)] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) summary_str += NEW_LINE_STR summary_str += print_row(content, workflow_summary_col_size, output_format) if output_format == "text": content = ["Sub Workflows", convert_to_str(total_succeeded_sub_wfs), convert_to_str(total_failed_sub_wfs), convert_to_str(total_unsubmitted_sub_wfs), convert_to_str(total_sub_wfs), "||", str(total_sub_wfs_retries), convert_to_str(total_sub_wfs_tries)] elif output_format == "csv": content = ["Sub_Workflows", convert_to_str(total_succeeded_sub_wfs), convert_to_str(total_failed_sub_wfs), convert_to_str(total_unsubmitted_sub_wfs), convert_to_str(total_sub_wfs), str(total_sub_wfs_retries), convert_to_str(total_sub_wfs_tries)] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) summary_str += NEW_LINE_STR summary_str += print_row(content, workflow_summary_col_size, output_format) summary_str += NEW_LINE_STR if output_format == "text": summary_str += "".center(sum(workflow_summary_col_size), '-') summary_str += NEW_LINE_STR if time_summary == True: workflow_states_list = workflow_stats.get_workflow_states() workflow_wall_time = stats_utils.get_workflow_wall_time(workflow_states_list) workflow_cum_job_wall_time = workflow_stats.get_workflow_cum_job_wall_time() submit_side_job_wall_time = workflow_stats.get_submit_side_job_wall_time() summary_str += NEW_LINE_STR if output_format == "text": if workflow_wall_time is None: summary_str += "Workflow wall time : -\n" else: summary_str += "Workflow wall time : %-20s (total %d seconds)\n" % \ (format_seconds(workflow_wall_time), (workflow_wall_time)) summary_str += NEW_LINE_STR if workflow_cum_job_wall_time is None: summary_str += "Workflow cumulative job wall time : -\n" else: summary_str += "Workflow cumulative job wall time : %-20s (total %d seconds)\n" % \ (format_seconds(workflow_cum_job_wall_time), workflow_cum_job_wall_time) summary_str += NEW_LINE_STR if submit_side_job_wall_time is None: summary_str += "Cumulative job walltime as seen from submit side : -\n" else: summary_str += "Cumulative job walltime as seen from submit side : %-20s (total %d seconds)\n" % \ (format_seconds(submit_side_job_wall_time), submit_side_job_wall_time) elif output_format == "csv": # Print header line summary_str += print_row(workflow_time_summary_col_name_csv, None, output_format) summary_str += NEW_LINE_STR if workflow_wall_time is None: summary_str += "Workflow_wall_time," else: summary_str += ('Workflow_wall_time,%s' % workflow_wall_time) summary_str += NEW_LINE_STR if workflow_cum_job_wall_time is None: summary_str += "Workflow_cumulative_job_wall_time," else: summary_str += ('Workflow_cumulative_job_wall_time,%s' % workflow_cum_job_wall_time) summary_str += NEW_LINE_STR if submit_side_job_wall_time is None: summary_str += "Cumulative_job_walltime_from_submit_side," else: summary_str += ('Cumulative_job_walltime_from_submit_side,%s' % submit_side_job_wall_time) summary_str += NEW_LINE_STR else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) return summary_str def print_individual_workflow_stats(workflow_stats, workflow_id, dax_label, output_format): """ Prints the workflow statistics of workflow @param workflow_stats : workflow statistics object reference @param workflow_id : workflow_id (title of the workflow table) """ content_str = "\n" # individual workflow status # Add dax_label to workflow_id if writing text file if output_format == "text" and dax_label != "": workflow_id = workflow_id + " (" + dax_label +")" # workflow status workflow_stats.set_job_filter('all') total_wf_retries = workflow_stats.get_workflow_retries() # only used for the text output... content = [workflow_id, convert_to_str(total_wf_retries)] retry_col_size = workflow_status_col_size[len(workflow_status_col_size) - 1] wf_status_str = print_row(content, [sum(workflow_status_col_size) - retry_col_size, retry_col_size], output_format) # tasks workflow_stats.set_job_filter('nonsub') total_tasks = workflow_stats.get_total_tasks_status() total_succeeded_tasks = workflow_stats.get_total_succeeded_tasks_status() total_failed_tasks = workflow_stats.get_total_failed_tasks_status() total_unsubmitted_tasks = total_tasks - (total_succeeded_tasks + total_failed_tasks) total_task_retries = workflow_stats.get_total_tasks_retries() total_task_invocations = total_succeeded_tasks + total_failed_tasks + total_task_retries if output_format == "text": content = ["", "Tasks", convert_to_str(total_succeeded_tasks), convert_to_str(total_failed_tasks), convert_to_str(total_unsubmitted_tasks), convert_to_str(total_tasks), "||", convert_to_str(total_task_retries), convert_to_str(total_task_invocations), ""] elif output_format == "csv": content = [workflow_id, dax_label, "Tasks", convert_to_str(total_succeeded_tasks), convert_to_str(total_failed_tasks), convert_to_str(total_unsubmitted_tasks), convert_to_str(total_tasks), convert_to_str(total_task_retries), convert_to_str(total_task_invocations), convert_to_str(total_wf_retries)] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) tasks_status_str = print_row(content, workflow_status_col_size, output_format) # job status workflow_stats.set_job_filter('nonsub') total_jobs = workflow_stats.get_total_jobs_status() total_succeeded_jobs = workflow_stats.get_total_succeeded_jobs_status() total_failed_jobs = workflow_stats.get_total_failed_jobs_status() total_unsubmitted_jobs = total_jobs - (total_succeeded_jobs + total_failed_jobs) total_job_retries = workflow_stats.get_total_jobs_retries() total_job_invocations = total_succeeded_jobs + total_failed_jobs + total_job_retries if output_format == "text": content = ["", "Jobs", convert_to_str(total_succeeded_jobs), convert_to_str(total_failed_jobs), convert_to_str(total_unsubmitted_jobs), convert_to_str(total_jobs), "||", convert_to_str(total_job_retries), convert_to_str(total_job_invocations), ""] elif output_format == "csv": content = [workflow_id, dax_label, "Jobs", convert_to_str(total_succeeded_jobs), convert_to_str(total_failed_jobs), convert_to_str(total_unsubmitted_jobs), convert_to_str(total_jobs), convert_to_str(total_job_retries), convert_to_str(total_job_invocations), convert_to_str(total_wf_retries)] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) jobs_status_str = print_row(content, workflow_status_col_size, output_format) # sub workflow workflow_stats.set_job_filter('subwf') total_sub_wfs = workflow_stats.get_total_jobs_status() total_succeeded_sub_wfs = workflow_stats.get_total_succeeded_jobs_status() total_failed_sub_wfs = workflow_stats.get_total_failed_jobs_status() total_unsubmitted_sub_wfs = total_sub_wfs - (total_succeeded_sub_wfs + total_failed_sub_wfs) total_sub_wfs_retries = workflow_stats.get_total_jobs_retries() total_sub_wfs_invocations = total_succeeded_sub_wfs + total_failed_sub_wfs + total_sub_wfs_retries if output_format == "text": content = ["", "Sub Workflows", convert_to_str(total_succeeded_sub_wfs), convert_to_str(total_failed_sub_wfs), convert_to_str(total_unsubmitted_sub_wfs), convert_to_str(total_sub_wfs), "||", convert_to_str(total_sub_wfs_retries), convert_to_str(total_sub_wfs_invocations), ""] elif output_format == "csv": content = [workflow_id, dax_label, "Sub_Workflows", convert_to_str(total_succeeded_sub_wfs), convert_to_str(total_failed_sub_wfs), convert_to_str(total_unsubmitted_sub_wfs), convert_to_str(total_sub_wfs), convert_to_str(total_sub_wfs_retries), convert_to_str(total_sub_wfs_invocations), convert_to_str(total_wf_retries)] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sub_wf_status_str = print_row(content, workflow_status_col_size, output_format) if output_format == "text": # Only print these in the text format output content_str += "".center(sum(workflow_status_col_size), '-') + "\n" content_str += wf_status_str + "\n" content_str += tasks_status_str + "\n" content_str += jobs_status_str + "\n" content_str += sub_wf_status_str + "\n" return content_str def print_individual_wf_job_stats(workflow_stats, workflow_id, dax_label, output_format): """ Prints the job statistics of workflow @param workflow_stats : workflow statistics object reference @param workflow_id : workflow_id (title for the table) """ job_stats_dict = {} job_stats_list = [] job_retry_count_dict = {} # Add dax_label to workflow_id if writing text file if output_format == "text": workflow_id = workflow_id + " (" + dax_label +")" if output_format == "text": job_status_str = "\n# " + workflow_id + "\n" else: job_status_str = "\n" # Print header if output_format == "text": job_status_str += print_row(job_stats_col_name_text, job_stats_col_size, output_format) elif output_format == "csv": job_status_str += print_row(job_stats_col_name_csv, job_stats_col_size, output_format) else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) job_status_str += "\n" wf_job_stats_list = workflow_stats.get_job_statistics() # Go through each job in the workflow for job in wf_job_stats_list: job_stats = JobStatistics() job_stats.name = job.job_name job_stats.site = job.site job_stats.kickstart = job.kickstart job_stats.multiplier_factor = job.multiplier_factor job_stats.kickstart_mult = job.kickstart_multi job_stats.remote_cpu_time = job.remote_cpu_time job_stats.post = job.post_time job_stats.runtime = job.runtime job_stats.condor_delay = job.condor_q_time job_stats.resource = job.resource_delay job_stats.seqexec = job.seqexec job_stats.exitcode = utils.raw_to_regular(job.exit_code) job_stats.hostname = job.host_name if job_stats.seqexec is not None and job_stats.kickstart is not None: job_stats.seqexec_delay = (float(job_stats.seqexec) - float(job_stats.kickstart)) if job_retry_count_dict.has_key(job.job_name): job_retry_count_dict[job.job_name] += 1 else: job_retry_count_dict[job.job_name] = 1 job_stats.retry_count = job_retry_count_dict[job.job_name] job_stats_list.append(job_stats) # printing content_list = [] # find the pretty print length for job_stat in job_stats_list: job_det = job_stat.getFormattedJobStatistics(output_format) if output_format == "text": index = 0 for content in job_det: job_status_str += str(content).ljust(job_stats_col_size[index]) index = index + 1 elif output_format == "csv": job_status_str += workflow_id job_status_str += "," job_status_str += dax_label for content in job_det: job_status_str += "," + str(content) else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) job_status_str += NEW_LINE_STR return job_status_str def round_to_str(value , to=3): """ Utility method for rounding the float value to rounded string @param value : value to round @param to : how many decimal points to round to """ return stats_utils.round_decimal_to_str(value,to) def print_wf_transformation_stats(workflow_stats, workflow_id, dax_label, output_format): """ Prints the transformation statistics of workflow @param workflow_stats : workflow statistics object reference @param workflow_id : workflow_id (title of the transformation statistics) """ transformation_status_str = "\n" # Add dax_label to workflow_id if writing text file if output_format == "text" and dax_label != "": workflow_id = workflow_id + " (" + dax_label +")" # In text file, we need a line with the workflow id first if output_format == "text": transformation_status_str = "\n# " + workflow_id + "\n" if output_format == "text": transformation_status_str += print_row(transformation_stats_col_name_text, transformation_stats_col_size, output_format) elif output_format == "csv": transformation_status_str += print_row(transformation_stats_col_name_csv, transformation_stats_col_size, output_format) else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) transformation_status_str += NEW_LINE_STR for transformation in workflow_stats.get_transformation_statistics(): if output_format == "text": content = [transformation.transformation, str(transformation.count), str(transformation.success), str(transformation.failure), round_to_str(transformation.min), round_to_str(transformation.max), round_to_str(transformation.avg), round_to_str(transformation.sum)] elif output_format == "csv": content = [workflow_id, dax_label, transformation.transformation, str(transformation.count), str(transformation.success), str(transformation.failure), round_to_str(transformation.min), round_to_str(transformation.max), round_to_str(transformation.avg), round_to_str(transformation.sum)] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) transformation_status_str += print_row(content, transformation_stats_col_size, output_format) transformation_status_str += NEW_LINE_STR return transformation_status_str def print_statistics_by_time_and_host(workflow_stats, output_format, combined=True, per_host=True): """ Prints the job instance and invocation statistics sorted by time @param workflow_stats : workflow statistics object reference @param output_format : indicates how to format the output, currently supported "text" and "csv" @param combined : print combined output (all hosts consolidated) @param per_host : print per-host totals """ statistics_by_time_str = NEW_LINE_STR workflow_stats.set_job_filter('nonsub') workflow_stats.set_time_filter('hour') workflow_stats.set_transformation_filter(exclude=['condor::dagman']) if combined == True: statistics_by_time_str +="# Job instances statistics per " + time_filter statistics_by_time_str += NEW_LINE_STR if output_format == "text": statistics_by_time_str += print_row(time_stats_col_name_text, time_stats_col_size, output_format) elif output_format == "csv": statistics_by_time_str += print_row(time_stats_col_name_csv, time_stats_col_size, output_format) else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) statistics_by_time_str += NEW_LINE_STR stats_by_time = workflow_stats.get_jobs_run_by_time() formatted_stats_list = stats_utils.convert_stats_to_base_time(stats_by_time, time_filter) for stats in formatted_stats_list: if output_format == "text": content = [stats['date_format'], str(stats['count']), round_to_str(stats['runtime'])] elif output_format == "csv": content = ["Job instances/" + time_filter, stats['date_format'], str(stats['count']), round_to_str(stats['runtime'])] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) statistics_by_time_str += print_row(content, time_stats_col_size, output_format) statistics_by_time_str += NEW_LINE_STR if combined == True: statistics_by_time_str += NEW_LINE_STR statistics_by_time_str += "# Invocation statistics run per " + time_filter statistics_by_time_str += NEW_LINE_STR if output_format == "text": statistics_by_time_str += print_row(time_stats_col_name_text, time_stats_col_size, output_format) elif output_format == "csv": statistics_by_time_str += print_row(time_stats_col_name_csv, time_stats_col_size, output_format) else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) statistics_by_time_str += NEW_LINE_STR stats_by_time = workflow_stats.get_invocation_by_time() formatted_stats_list = stats_utils.convert_stats_to_base_time(stats_by_time, time_filter) for stats in formatted_stats_list: if output_format == "text": content = [stats['date_format'], str(stats['count']), round_to_str(stats['runtime'])] elif output_format == "csv": content = ["Invocations/" + time_filter, stats['date_format'], str(stats['count']), round_to_str(stats['runtime'])] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) statistics_by_time_str += print_row(content, time_stats_col_size, output_format) statistics_by_time_str += NEW_LINE_STR if per_host == True: statistics_by_time_str += NEW_LINE_STR statistics_by_time_str += "# Job instances statistics on host per " + time_filter statistics_by_time_str += NEW_LINE_STR if output_format == "text": statistics_by_time_str += print_row(time_host_stats_col_name_text, time_host_stats_col_size, output_format) elif output_format == "csv": statistics_by_time_str += print_row(time_host_stats_col_name_csv, time_host_stats_col_size, output_format) else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) statistics_by_time_str += NEW_LINE_STR stats_by_time = workflow_stats.get_jobs_run_by_time_per_host() formatted_stats_list = stats_utils.convert_stats_to_base_time(stats_by_time, time_filter, True) for stats in formatted_stats_list: if output_format == "text": content = [stats['date_format'], str(stats['host']), str(stats['count']), round_to_str(stats['runtime'])] elif output_format == "csv": content = ["Job_instances/host/" + time_filter, stats['date_format'], str(stats['host']), str(stats['count']), round_to_str(stats['runtime'])] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) statistics_by_time_str += print_row(content, time_host_stats_col_size, output_format) statistics_by_time_str += NEW_LINE_STR if per_host == True: statistics_by_time_str += NEW_LINE_STR statistics_by_time_str += "# Invocation statistics on host per " + time_filter statistics_by_time_str += NEW_LINE_STR if output_format == "text": statistics_by_time_str += print_row(time_host_stats_col_name_text, time_host_stats_col_size, output_format) elif output_format == "csv": statistics_by_time_str += print_row(time_host_stats_col_name_csv, time_host_stats_col_size, output_format) else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) statistics_by_time_str += NEW_LINE_STR stats_by_time = workflow_stats.get_invocation_by_time_per_host() formatted_stats_list = stats_utils.convert_stats_to_base_time(stats_by_time, time_filter, True) for stats in formatted_stats_list: if output_format == "text": content = [stats['date_format'], str(stats['host']), str(stats['count']), round_to_str(stats['runtime'])] elif output_format == "csv": content = ["Invocations/host/" + time_filter, stats['date_format'], str(stats['host']), str(stats['count']), round_to_str(stats['runtime'])] else: print "%s: error: output format %s not recognized!" % (prog_base, output_format) sys.exit(1) statistics_by_time_str += print_row(content, time_host_stats_col_size, output_format) statistics_by_time_str += NEW_LINE_STR return statistics_by_time_str def set_statistics_level(stats_level): """ Sets the statistics level @param stats_level """ global calc_wf_stats global calc_wf_summary global calc_jb_stats global calc_tf_stats global calc_ti_stats if stats_level =='all': calc_wf_stats = True calc_wf_summary = True calc_jb_stats = True calc_tf_stats = True calc_ti_stats = True elif stats_level =='summary': calc_wf_summary = True elif stats_level =='wf_stats': calc_wf_stats = True elif stats_level == 'jb_stats': calc_jb_stats = True elif stats_level == 'tf_stats': calc_tf_stats = True else: calc_ti_stats = True # ---------main---------------------------------------------------------------------------- def main(): # Configure command line option parser prog_usage = prog_base +" [options] [SUBMIT_DIRECTORY]" parser = optparse.OptionParser(usage=prog_usage) parser.add_option("-o", "--output", action = "store", dest = "output_dir", help = "Writes the output to given directory.") parser.add_option("-c","--conf", action = "store", type = "string", dest = "config_properties", help = "Specifies the properties file to use. This option overrides all other property files.") parser.add_option("-s", "--statistics-level", action = "store", dest = "statistics_level", choices=['all', 'summary', 'wf_stats', 'jb_stats', 'tf_stats', 'ti_stats'], help = "Valid levels are: all,summary,wf_stats,jb_stats,tf_stats,ti_stats; Default is summary.") parser.add_option("-t", "--time-filter", action = "store", dest = "time_filter", choices=['day', 'hour'], help = "Valid levels are: day,hour; Default is day.") parser.add_option("-i", "--ignore-db-inconsistency", action = "store_const", const = 0, dest = "ignore_db_inconsistency", help = "turn off the check for db consistency") parser.add_option("-v", "--verbose", action="count", default=0, dest="verbose", help="Increase verbosity, repeatable") parser.add_option("-q", "--quiet", action="count", default=0, dest="quiet", help="Decrease verbosity, repeatable") # Parse command line options (options, args) = parser.parse_args() if len(args) > 1: parser.error("Invalid argument") sys.exit(1) if len(args) < 1: submit_dir = os.getcwd() else: submit_dir = os.path.abspath(args[0]) # Copy options from the command line parser # default is info log_level = 1 log_level_str = "info" log_level += (options.verbose - options.quiet) if log_level <= 0: log_level_str = "error" elif log_level == 1: log_level_str = "warning" elif log_level == 2: log_level_str = "info" elif log_level >= 3: log_level_str = "debug" setup_logger(log_level_str) logger.info(prog_base +" : initializing...") if options.ignore_db_inconsistency is None: if not utils.loading_completed(submit_dir): if utils.monitoring_running(submit_dir): logger.warning("pegasus-monitord still running. Please wait for it to complete. ") else: logger.warning("Please run pegasus monitord in replay mode. ") sys.exit(1) else: logger.warning("The tool is meant to be run after the completion of workflow run.") # Figure out what statistics we need to calculate if options.statistics_level is not None: statistics_level = options.statistics_level else: statistics_level = 'summary' set_statistics_level(statistics_level) global time_filter if options.time_filter is not None: time_filter = options.time_filter else: time_filter = 'day' # Change the legend to show the time filter format time_stats_col_name_text[0] += str(stats_utils.get_date_print_format(time_filter)) time_stats_col_name_csv[1] += str(stats_utils.get_date_print_format(time_filter)) time_host_stats_col_name_text[0] += str(stats_utils.get_date_print_format(time_filter)) time_host_stats_col_name_csv[1] += str(stats_utils.get_date_print_format(time_filter)) if options.output_dir is not None: output_dir = options.output_dir if not os.path.isdir(output_dir): logger.warning("Output directory doesn't exists. Creating directory... ") try: os.mkdir(output_dir) except: logger.error("Unable to create output directory." + output_dir) sys.exit(1) else: output_dir = os.path.join(submit_dir, DEFAULT_OUTPUT_DIR) utils.create_directory(output_dir, True) output_db_url, wf_uuid = db_utils.get_db_url_wf_uuid(submit_dir, options.config_properties) if output_db_url is not None: print_workflow_details(output_db_url, wf_uuid, output_dir) sys.exit(0) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/bin/pegasus-create-dir0000755000175000017500000004042111757531137020217 0ustar ryngerynge#!/usr/bin/env python """ Pegasus utility for creating directories for a set of protocols Usage: pegasus-create-dir [options] """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## import os import re import sys import errno import logging import optparse import tempfile import subprocess import signal import string import stat import time from collections import deque __author__ = "Mats Rynge " # --- regular expressions ------------------------------------------------------------- re_parse_url = re.compile(r'([\w]+)://([\w\.\-:@]*)(/[\S]*)') # --- classes ------------------------------------------------------------------------- class URL: proto = "" host = "" path = "" def set_url(self, url): self.proto, self.host, self.path = self.parse_url(url) def parse_url(self, url): proto = "" host = "" path = "" # default protocol is file:// if string.find(url, ":") == -1: logger.debug("URL without protocol (" + url + ") - assuming file://") url = "file://" + url # file url is a special cases as it can contain relative paths and env vars if string.find(url, "file:") == 0: proto = "file" # file urls can either start with file://[\w]*/ or file: (no //) path = re.sub("^file:(//[\w\.\-:@]*)?", "", url) path = expand_env_vars(path) return proto, host, path # other than file urls r = re_parse_url.search(url) if not r: raise RuntimeError("Unable to parse URL: %s" % (url)) # Parse successful proto = r.group(1) host = r.group(2) path = r.group(3) # no double slashes in urls path = re.sub('//+', '/', path) return proto, host, path def url(self): return "%s://%s%s" % (self.proto, self.host, self.path) def url_dirname(self): dn = os.path.dirname(self.path) return "%s://%s%s" % (self.proto, self.host, dn) def parent_url(self): parent = URL() parent.proto = self.proto parent.host = self.host parent.path = os.path.dirname(self.path) return parent class Alarm(Exception): pass # --- global variables ---------------------------------------------------------------- prog_base = os.path.split(sys.argv[0])[1] # Name of this program logger = logging.getLogger("my_logger") # this is the map of what tool to use for a given protocol pair (src, dest) tool_map = {} tool_map['file' ] = 'mkdir' tool_map['ftp' ] = 'gsiftp' tool_map['gsiftp'] = 'gsiftp' tool_map['irods' ] = 'irods' tool_map['s3' ] = 's3' tool_map['s3s' ] = 's3' tool_map['scp' ] = 'scp' tool_map['srm' ] = 'srm' tool_info = {} # --- functions ----------------------------------------------------------------------- def setup_logger(level_str): # log to the console console = logging.StreamHandler() # default log level - make logger/console match logger.setLevel(logging.INFO) console.setLevel(logging.INFO) # level - from the command line level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) console.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) console.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) console.setLevel(logging.ERROR) # formatter formatter = logging.Formatter("%(asctime)s %(levelname)7s: %(message)s") console.setFormatter(formatter) logger.addHandler(console) logger.debug("Logger has been configured") def prog_sigint_handler(signum, frame): logger.warn("Exiting due to signal %d" % (signum)) myexit(1) def alarm_handler(signum, frame): raise Alarm def expand_env_vars(s): re_env_var = re.compile(r'\${?([a-zA-Z0-9_]+)}?') s = re.sub(re_env_var, get_env_var, s) return s def get_env_var(match): name = match.group(1) value = "" logger.debug("Looking up " + name) if name in os.environ: value = os.environ[name] return value def myexec(cmd_line, timeout_secs, should_log): """ executes shell commands with the ability to time out if the command hangs """ global delay_exit_code if should_log or logger.isEnabledFor(logging.DEBUG): logger.info(cmd_line) sys.stdout.flush() # set up signal handler for timeout signal.signal(signal.SIGALRM, alarm_handler) signal.alarm(timeout_secs) p = subprocess.Popen(cmd_line + " 2>&1", shell=True) try: stdoutdata, stderrdata = p.communicate() except Alarm: if sys.version_info >= (2, 6): p.terminate() raise RuntimeError("Command '%s' timed out after %s seconds" % (cmd_line, timeout_secs)) rc = p.returncode if rc != 0: raise RuntimeError("Command '%s' failed with error code %s" % (cmd_line, rc)) def backticks(cmd_line): """ what would a python program be without some perl love? """ return subprocess.Popen(cmd_line, shell=True, stdout=subprocess.PIPE).communicate()[0] def check_tool(executable, version_arg, version_regex): # initialize the global tool info for this executable tool_info[executable] = {} tool_info[executable]['full_path'] = None tool_info[executable]['version'] = None tool_info[executable]['version_major'] = None tool_info[executable]['version_minor'] = None tool_info[executable]['version_patch'] = None # figure out the full path to the executable full_path = backticks("which " + executable + " 2>/dev/null") full_path = full_path.rstrip('\n') if full_path == "": logger.info("Command '%s' not found in the current environment" %(executable)) return tool_info[executable]['full_path'] = full_path # version if version_regex == None: version = "N/A" else: version = backticks(executable + " " + version_arg + " 2>&1") version = version.replace('\n', "") re_version = re.compile(version_regex) result = re_version.search(version) if result: version = result.group(1) tool_info[executable]['version'] = version # if possible, break up version into major, minor, patch re_version = re.compile("([0-9]+)\.([0-9]+)(\.([0-9]+)){0,1}") result = re_version.search(version) if result: tool_info[executable]['version_major'] = int(result.group(1)) tool_info[executable]['version_minor'] = int(result.group(2)) tool_info[executable]['version_patch'] = result.group(4) if tool_info[executable]['version_patch'] == None or tool_info[executable]['version_patch'] == "": tool_info[executable]['version_patch'] = None else: tool_info[executable]['version_patch'] = int(tool_info[executable]['version_patch']) logger.info(" %-18s Version: %-7s Path: %s" % (executable, version, full_path)) def check_env_and_tools(): # PATH setup path = "/usr/bin:/bin" if "PATH" in os.environ: path = os.environ['PATH'] path_entries = path.split(':') # is /usr/bin in the path? if not("/usr/bin" in path_entries): path_entries.append("/usr/bin") path_entries.append("/bin") # fink on macos x if os.path.exists("/sw/bin") and not("/sw/bin" in path_entries): path_entries.append("/sw/bin") # need LD_LIBRARY_PATH for Globus tools ld_library_path = "" if "LD_LIBRARY_PATH" in os.environ: ld_library_path = os.environ['LD_LIBRARY_PATH'] ld_library_path_entries = ld_library_path.split(':') # if PEGASUS_HOME is set, prepend it to the PATH (we want it early to override other cruft) if "PEGASUS_HOME" in os.environ: try: path_entries.remove(os.environ['PEGASUS_HOME'] + "/bin") except Exception: pass path_entries.insert(0, os.environ['PEGASUS_HOME'] + "/bin") # if GLOBUS_LOCATION is set, prepend it to the PATH and LD_LIBRARY_PATH # (we want it early to override other cruft) if "GLOBUS_LOCATION" in os.environ: try: path_entries.remove(os.environ['GLOBUS_LOCATION'] + "/bin") except Exception: pass path_entries.insert(0, os.environ['GLOBUS_LOCATION'] + "/bin") try: ld_library_path_entries.remove(os.environ['GLOBUS_LOCATION'] + "/lib") except Exception: pass ld_library_path_entries.insert(0, os.environ['GLOBUS_LOCATION'] + "/lib") os.environ['PATH'] = ":".join(path_entries) os.environ['LD_LIBRARY_PATH'] = ":".join(ld_library_path_entries) os.environ['DYLD_LIBRARY_PATH'] = ":".join(ld_library_path_entries) logger.info("PATH=" + os.environ['PATH']) logger.info("LD_LIBRARY_PATH=" + os.environ['LD_LIBRARY_PATH']) # irods requires a password hash file os.environ['irodsAuthFileName'] = os.getcwd() + "/.irodsA" def mkdir(url): """ creates a directory on a mounted file system """ path = url.path if not(os.path.exists(path)): logger.debug("Creating local directory " + path) try: os.makedirs(path, 0755) except os.error, err: # if dir already exists, ignore the error if not(os.path.isdir(path)): raise RuntimeError(err) def scp(url): """ creates a directory using ssh """ cmd = "/usr/bin/ssh" if "SSH_PRIVATE_KEY" in os.environ: cmd += " -i " + os.environ['SSH_PRIVATE_KEY'] cmd += " -o PasswordAuthentication=no" cmd += " -o StrictHostKeyChecking=no" cmd += " " + url.host cmd += " '/bin/mkdir -p " + url.path + "'" myexec(cmd, 60, True) def gsiftp(url): """ create directories on gridftp servers """ if tool_info['globus-url-copy']['full_path'] == None: raise RuntimeError("Unable to do gsiftp mkdir becuase globus-url-copy could not be found") # build command line for globus-url-copy cmd = tool_info['globus-url-copy']['full_path'] # make output from guc match our current log level if logger.isEnabledFor(logging.DEBUG): cmd += " -dbg" cmd += " -create-dest" cmd += " -no-third-party-transfers -no-data-channel-authentication" cmd += " file:///dev/null " + url.url() + "/.empty" myexec(cmd, 60, True) def irods_login(): """ log in to irods by using the iinit command - if the file already exists, we are already logged in """ f = os.environ['irodsAuthFileName'] if os.path.exists(f): return # read password from env file if not "irodsEnvFile" in os.environ: raise RuntimeError("Missing irodsEnvFile - unable to do irods transfers") password = None h = open(os.environ['irodsEnvFile'], 'r') for line in h: items = line.split(" ", 2) if items[0].lower() == "irodspassword": password = items[1].strip(" \t'\"\r\n") h.close() if password == None: raise RuntimeError("No irodsPassword specified in irods env file") h = open(".irodsAc", "w") h.write(password + "\n") h.close() cmd = "cat .irodsAc | iinit" myexec(cmd, 60*60, True) os.unlink(".irodsAc") def irods(url): """ irods - use the icommands to interact with irods """ if len(transfers) == 0: return if tool_info['imkdir']['full_path'] == None: raise RuntimeError("Unable to do irods create dir because imkdir could not be found in the current path") # log in to irods try: irods_login() except Exception, loginErr: logger.error(loginErr) raise RuntimError("Unable to log into irods") cmd = "imkdir -p " + os.path.dirname(url.path) myexec(cmd, 60, True) def srm(url): """ implements recursive mkdir as srm-mkdir can not handle it """ if tool_info['srm-mkdir']['full_path'] == None: raise RuntimeError("Unable to do srm mkdir becuase srm-mkdir could not be found") # if the directory exists, just return cmd = "srm-ls %s >/dev/null" %(url.url()) try: myexec(cmd, 60, True) return except Exception, err: logger.info("Directory %s does not exist yet" % (url.path)) # back down to a directory which exists one_up = url.parent_url() if one_up.path != "/": srm(one_up) cmd = "srm-mkdir %s" %(url.url()) try: myexec(cmd, 60, True) except Exception, err: if check_error: raise err def s3(url): """ s3 - uses pegasus-s3 to interact with Amazon S3 """ if tool_info['pegasus-s3']['full_path'] == None: raise RuntimeError("Unable to do S3 mkdir becuase pegasus-s3 could not be found") # extract the bucket part re_bucket = re.compile(r'(s3(s){0,1}://\w+@\w+/+[\w]+)') bucket = url.url() r = re_bucket.search(bucket) if r: bucket = r.group(1) else: raise RuntimeError("Unable to parse bucket: %s" % (bucket)) # first ensure that the bucket exists cmd = "pegasus-s3 mkdir %s" %(bucket) myexec(cmd, 60, True) def create_dir(url): """ handles the creation of a directory """ try: if tool_map.has_key(url.proto): tool = tool_map[url.proto] if tool == "mkdir": mkdir(url) elif tool == "scp": scp(url) elif tool == "gsiftp": check_tool("globus-url-copy", "-version", "([0-9]+\.[0-9]+)") check_tool("uberftp", "-version", "([0-9]+\.[0-9]+)") gsiftp(url) elif tool == "irods": check_tool("imkdir", "-h", "Version[ \t]+([\.0-9a-zA-Z]+)") irods(url) elif tool == "srm": check_tool("srm-mkdir", "-version", "srm-mkdir[ \t]+([\.0-9a-zA-Z]+)") srm(url) elif tool == "s3": check_tool("pegasus-s3", "help", None) s3(url) else: logger.critical("Error: No mapping for the tool '%s'" %(tool)) myexit(1) else: logger.critical("Error: This tool does not know how to create a directory for %s://" % (url.proto)) myexit(1) except RuntimeError, err: logger.critical(err) myexit(1) def myexit(rc): """ system exit without a stack trace - silly python """ try: sys.exit(rc) except SystemExit: sys.exit(rc) # --- main ---------------------------------------------------------------------------- # dup stderr onto stdout sys.stderr = sys.stdout # Configure command line option parser prog_usage = "usage: %s [options]" % (prog_base) parser = optparse.OptionParser(usage=prog_usage) parser.add_option("-l", "--loglevel", action = "store", dest = "log_level", help = "Log level. Valid levels are: debug,info,warning,error, Default is info.") parser.add_option("-u", "--url", action = "store", dest = "url", help = "URL for the directory to create") # Parse command line options (options, args) = parser.parse_args() if options.log_level == None: options.log_level = "info" setup_logger(options.log_level) if options.url == None: logger.critical("Please specify the URL for the directory to create") myexit(1) # Die nicely when asked to (Ctrl+C, system shutdown) signal.signal(signal.SIGINT, prog_sigint_handler) # check environment and tools try: check_env_and_tools() except Exception, err: logger.critical(err) myexit(1) url = URL() url.set_url(options.url) try: create_dir(url) except Exception, err: logger.critical(err) logger.critical("Directory not created!") myexit(1) logger.info("Directory created") myexit(0) pegasus-wms_4.0.1+dfsg/bin/pegasus-cleanup0000755000175000017500000004173411757531137017637 0ustar ryngerynge#!/usr/bin/env python """ Pegasus utility for removing of files during workflow enactment Usage: pegasus-cleanup [options] """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## import os import re import sys import errno import logging import optparse import tempfile import subprocess import signal import string import stat import time from collections import deque __author__ = "Mats Rynge " # --- regular expressions ------------------------------------------------------------- re_parse_url = re.compile(r'([\w]+)://([\w\.\-:@]*)(/[\S]*)') # --- classes ------------------------------------------------------------------------- class URL: proto = "" host = "" path = "" def set_url(self, url): self.proto, self.host, self.path = self.parse_url(url) def parse_url(self, url): proto = "" host = "" path = "" # default protocol is file:// if string.find(url, ":") == -1: logger.debug("URL without protocol (" + url + ") - assuming file://") url = "file://" + url # file url is a special cases as it can contain relative paths and env vars if string.find(url, "file:") == 0: proto = "file" # file urls can either start with file://[\w]*/ or file: (no //) path = re.sub("^file:(//[\w\.\-:@]*)?", "", url) path = expand_env_vars(path) return proto, host, path # other than file urls r = re_parse_url.search(url) if not r: raise RuntimeError("Unable to parse URL: %s" % (url)) # Parse successful proto = r.group(1) host = r.group(2) path = r.group(3) # no double slashes in urls path = re.sub('//+', '/', path) return proto, host, path def url(self): return "%s://%s%s" % (self.proto, self.host, self.path) def url_dirname(self): dn = os.path.dirname(self.path) return "%s://%s%s" % (self.proto, self.host, dn) class Alarm(Exception): pass # --- global variables ---------------------------------------------------------------- prog_base = os.path.split(sys.argv[0])[1] # Name of this program prog_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) # path to bin logger = logging.getLogger("my_logger") # this is the map of what tool to use for a given protocol pair (src, dest) tool_map = {} tool_map['file' ] = 'rm' tool_map['ftp' ] = 'gsiftp' tool_map['gsiftp'] = 'gsiftp' tool_map['irods' ] = 'irods' tool_map['s3' ] = 's3' tool_map['s3s' ] = 's3' tool_map['scp' ] = 'scp' tool_map['srm' ] = 'srm' tool_info = {} # --- functions ----------------------------------------------------------------------- def setup_logger(level_str): # log to the console console = logging.StreamHandler() # default log level - make logger/console match logger.setLevel(logging.INFO) console.setLevel(logging.INFO) # level - from the command line level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) console.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) console.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) console.setLevel(logging.ERROR) # formatter formatter = logging.Formatter("%(asctime)s %(levelname)7s: %(message)s") console.setFormatter(formatter) logger.addHandler(console) logger.debug("Logger has been configured") def prog_sigint_handler(signum, frame): logger.warn("Exiting due to signal %d" % (signum)) myexit(1) def alarm_handler(signum, frame): raise Alarm def expand_env_vars(s): re_env_var = re.compile(r'\${?([a-zA-Z0-9_]+)}?') s = re.sub(re_env_var, get_env_var, s) return s def get_env_var(match): name = match.group(1) value = "" logger.debug("Looking up " + name) if name in os.environ: value = os.environ[name] return value def myexec(cmd_line, timeout_secs, should_log): """ executes shell commands with the ability to time out if the command hangs """ global delay_exit_code if should_log or logger.isEnabledFor(logging.DEBUG): logger.info(cmd_line) sys.stdout.flush() # set up signal handler for timeout signal.signal(signal.SIGALRM, alarm_handler) signal.alarm(timeout_secs) p = subprocess.Popen(cmd_line + " 2>&1", shell=True) try: stdoutdata, stderrdata = p.communicate() except Alarm: if sys.version_info >= (2, 6): p.terminate() raise RuntimeError("Command '%s' timed out after %s seconds" % (cmd_line, timeout_secs)) rc = p.returncode if rc != 0: raise RuntimeError("Command '%s' failed with error code %s" % (cmd_line, rc)) def backticks(cmd_line): """ what would a python program be without some perl love? """ return subprocess.Popen(cmd_line, shell=True, stdout=subprocess.PIPE).communicate()[0] def check_tool(executable, version_arg, version_regex): # initialize the global tool info for this executable tool_info[executable] = {} tool_info[executable]['full_path'] = None tool_info[executable]['version'] = None tool_info[executable]['version_major'] = None tool_info[executable]['version_minor'] = None tool_info[executable]['version_patch'] = None # figure out the full path to the executable full_path = backticks("which " + executable + " 2>/dev/null") full_path = full_path.rstrip('\n') if full_path == "": logger.info("Command '%s' not found in the current environment" %(executable)) return tool_info[executable]['full_path'] = full_path # version if version_regex == None: version = "N/A" else: version = backticks(executable + " " + version_arg + " 2>&1") version = version.replace('\n', "") re_version = re.compile(version_regex) result = re_version.search(version) if result: version = result.group(1) tool_info[executable]['version'] = version # if possible, break up version into major, minor, patch re_version = re.compile("([0-9]+)\.([0-9]+)(\.([0-9]+)){0,1}") result = re_version.search(version) if result: tool_info[executable]['version_major'] = int(result.group(1)) tool_info[executable]['version_minor'] = int(result.group(2)) tool_info[executable]['version_patch'] = result.group(4) if tool_info[executable]['version_patch'] == None or tool_info[executable]['version_patch'] == "": tool_info[executable]['version_patch'] = None else: tool_info[executable]['version_patch'] = int(tool_info[executable]['version_patch']) logger.info(" %-18s Version: %-7s Path: %s" % (executable, version, full_path)) def check_env_and_tools(): # PATH setup path = "/usr/bin:/bin" if "PATH" in os.environ: path = os.environ['PATH'] path_entries = path.split(':') # is /usr/bin in the path? if not("/usr/bin" in path_entries): path_entries.append("/usr/bin") path_entries.append("/bin") # fink on macos x if os.path.exists("/sw/bin") and not("/sw/bin" in path_entries): path_entries.append("/sw/bin") # need LD_LIBRARY_PATH for Globus tools ld_library_path = "" if "LD_LIBRARY_PATH" in os.environ: ld_library_path = os.environ['LD_LIBRARY_PATH'] ld_library_path_entries = ld_library_path.split(':') # if PEGASUS_HOME is set, prepend it to the PATH (we want it early to override other cruft) if "PEGASUS_HOME" in os.environ: try: path_entries.remove(os.environ['PEGASUS_HOME'] + "/bin") except Exception: pass path_entries.insert(0, os.environ['PEGASUS_HOME'] + "/bin") # if GLOBUS_LOCATION is set, prepend it to the PATH and LD_LIBRARY_PATH # (we want it early to override other cruft) if "GLOBUS_LOCATION" in os.environ: try: path_entries.remove(os.environ['GLOBUS_LOCATION'] + "/bin") except Exception: pass path_entries.insert(0, os.environ['GLOBUS_LOCATION'] + "/bin") try: ld_library_path_entries.remove(os.environ['GLOBUS_LOCATION'] + "/lib") except Exception: pass ld_library_path_entries.insert(0, os.environ['GLOBUS_LOCATION'] + "/lib") os.environ['PATH'] = ":".join(path_entries) os.environ['LD_LIBRARY_PATH'] = ":".join(ld_library_path_entries) os.environ['DYLD_LIBRARY_PATH'] = ":".join(ld_library_path_entries) logger.info("PATH=" + os.environ['PATH']) logger.info("LD_LIBRARY_PATH=" + os.environ['LD_LIBRARY_PATH']) # irods requires a password hash file os.environ['irodsAuthFileName'] = os.getcwd() + "/.irodsA" # tools we might need later check_tool("globus-url-copy", "-version", "([0-9]+\.[0-9]+)") check_tool("srm-rm", "-version", "srm-copy[ \t]+([\.0-9a-zA-Z]+)") check_tool("irm", "-h", "Version[ \t]+([\.0-9a-zA-Z]+)") check_tool("pegasus-s3", "help", None) def rm(urls): """ removes locally using /bin/rm """ for i, url in enumerate(urls): cmd = "/bin/rm -f \"%s\"" % (url.path) try: myexec(cmd, 5*60, True) except RuntimeError, err: logger.error(err) def scp(urls): """ removes using ssh+rm """ for i, url in enumerate(urls): cmd = "/usr/bin/ssh" if "SSH_PRIVATE_KEY" in os.environ: cmd += " -i " + os.environ['SSH_PRIVATE_KEY'] cmd += " -o PasswordAuthentication=no" cmd += " -o StrictHostKeyChecking=no" + \ " " + url.host + " " + \ " \"/bin/rm -f " + url.path + "\"" try: myexec(cmd, 5*60, True) except RuntimeError, err: logger.error(err) def gsiftp(urls): """ remove files on gridftp servers - delegate run to pegasus-transfer """ try: tmp_fd, tmp_name = tempfile.mkstemp(prefix="pegasus-cleanup-", suffix=".lst", dir="/tmp") tmp_file = os.fdopen(tmp_fd, "w+b") except: raise RuntimeError("Unable to create tmp file for globus-url-copy transfers") for i, url in enumerate(urls): tmp_file.write("file:///dev/null\n") tmp_file.write("%s\n" %(url.url())) tmp_file.close() # use pegasus-transfer cmd = prog_dir + "/pegasus-transfer" # make output from guc match our current log level if logger.isEnabledFor(logging.DEBUG): cmd += " -l debug" cmd += " -f " + tmp_name try: myexec(cmd, 1*60*60, True) except RuntimeError, err: logger.error(err) os.unlink(tmp_name) def irods_login(): """ log in to irods by using the iinit command - if the file already exists, we are already logged in """ f = os.environ['irodsAuthFileName'] if os.path.exists(f): return # read password from env file if not "irodsEnvFile" in os.environ: raise RuntimeError("Missing irodsEnvFile - unable to do irods transfers") password = None h = open(os.environ['irodsEnvFile'], 'r') for line in h: items = line.split(" ", 2) if items[0].lower() == "irodspassword": password = items[1].strip(" \t'\"\r\n") h.close() if password == None: raise RuntimeError("No irodsPassword specified in irods env file") h = open(".irodsAc", "w") h.write(password + "\n") h.close() cmd = "cat .irodsAc | iinit" myexec(cmd, 60*60, True) os.unlink(".irodsAc") def irods(urls): """ irods - use the icommands to interact with irods """ if tool_info['irm']['full_path'] == None: raise RuntimeError("Unable to do irods transfers becuase iget could not be found in the current path") # log in to irods try: irods_login() except Exception, loginErr: logger.error(loginErr) raise RuntimeError("Unable to log into irods") for i, url in enumerate(urls): cmd = "irm -f " + url.path try: myexec(cmd, 5*60, True) except Exception, err: logger.error(err) def srm(urls): """ srm - use srm-rm """ if tool_info['srm-rm']['full_path'] == None: raise RuntimeError("Unable to do srm remove becuase srm-rm could not be found") for i, url in enumerate(urls): cmd = "srm-rm " + url.url() try: myexec(cmd, 5*60, True) except Exception, err: logger.error(err) def s3(urls): """ s3 - uses pegasus-s3 to interact with Amazon S3 """ if tool_info['pegasus-s3']['full_path'] == None: raise RuntimeError("Unable to do S3 transfers becuase pegasus-s3 could not be found") for i, url in enumerate(urls): cmd = "pegasus-s3 rm " + url.url() try: myexec(cmd, 60, True) except Exception, err: logger.error(err) def urls_groupable(a, b): """ compares two urls, and determins if they are similar enough to be grouped together for one tool """ if a.proto != b.proto: return False return True def handle_removes(urls): """ removes the file with the given url """ try: if tool_map.has_key(urls[0].proto): tool = tool_map[urls[0].proto] if tool == "rm": rm(urls) elif tool == "scp": scp(urls) elif tool == "gsiftp": gsiftp(urls) elif tool == "irods": irods(urls) elif tool == "srm": srm(urls) elif tool == "s3": s3(urls) else: logger.critical("Error: No mapping for the tool '%s'" %(tool)) myexit(1) else: logger.critical("Error: This tool does not know how to remove from %s://" % (url.proto)) myexit(1) except RuntimeError, err: logger.critical(err) myexit(1) def myexit(rc): """ system exit without a stack trace - silly python """ try: sys.exit(rc) except SystemExit: sys.exit(rc) # --- main ---------------------------------------------------------------------------- # dup stderr onto stdout sys.stderr = sys.stdout # Configure command line option parser prog_usage = "usage: %s [options]" % (prog_base) parser = optparse.OptionParser(usage=prog_usage) parser.add_option("-l", "--loglevel", action = "store", dest = "log_level", help = "Log level. Valid levels are: debug,info,warning,error, Default is info.") parser.add_option("-f", "--file", action = "store", dest = "file", help = "File containing URLs to be removed. If not given, list is read from stdin.") # Parse command line options (options, args) = parser.parse_args() if options.log_level == None: options.log_level = "info" setup_logger(options.log_level) # Die nicely when asked to (Ctrl+C, system shutdown) signal.signal(signal.SIGINT, prog_sigint_handler) # stdin or file input? if options.file == None: logger.info("Reading URL pairs from stdin") input_file = sys.stdin else: logger.info("Reading URL pairs from %s" % (options.file)) try: input_file = open(options.file, 'r') except Exception, err: logger.critical('Error reading url pair list: %s' % (err)) myexit(1) # check environment and tools try: check_env_and_tools() except Exception, err: logger.critical(err) myexit(1) # list of work url_q = deque() # fill the url queue with user provided entries line_nr = 0 try: for line in input_file.readlines(): line_nr += 1 if line[0] != '#' and len(line) > 4: line = line.rstrip('\n') url = URL() url.set_url(line) url_q.append(url) except Exception, err: logger.critical('Error handling url: %s' % (err)) myexit(1) # do the removals while url_q: u_main = url_q.popleft() # create a list of urls to pass to underlying tool u_list = [] u_list.append(u_main) try: u_next = url_q[0] except IndexError, err: u_next = False while u_next and urls_groupable(u_main, u_next): u_list.append(u_next) url_q.popleft() try: u_next = url_q[0] except IndexError, err: u_next = False # magic! handle_removes(u_list) myexit(0) pegasus-wms_4.0.1+dfsg/bin/pegasus-exitcode0000755000175000017500000001370611757531137020012 0ustar ryngerynge#!/usr/bin/env python # # Copyright 2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # # exitcode.py # # This program parses kickstart invocation records looking for failures. # If failures are found, it prints a message and exits with a non-zero # exit code. If no failures are found, it exits with 0. # # This program also renames the .out and .err file to .out.XXX and .err.XXX # where XXX is a sequence number. This sequence number is incremented each # time the program is run with the same kickstart.out argument. # # Since XML parsers are slow, this program doesn't parse the full invocation # XML, but rather looks for the tag in the XML and extracts the raw # exitcode using simple string manipulations. This turns out to be much # faster than using an XML parser. On .out files with 1000 invocation # records this program runs in about 30 milliseconds and uses less than # 4 MB of physical memory. # import sys import re import os from optparse import OptionParser __author__ = "Gideon Juve " def fail(message=None): if message: print "fail: %s" % message sys.exit(1) def rename(outfile, errfile): """Rename .out and .err files to .out.XXX and .err.XXX where XXX is the next sequence number. Returns the new name, or fails with an error message and a non-zero exit code.""" # This is just to prevent the file from being accidentally renamed # again in testing. if re.search("\.out\.[0-9]{3}$", outfile): return outfile, errfile # Must end in .out if not outfile.endswith(".out"): fail("%s does not look like a kickstart .out file" % outfile) # Find next file in sequence retry = None for i in range(0,1000): candidate = "%s.%03d" % (outfile,i) if not os.path.isfile(candidate): retry = i break # unlikely to occur if retry is None: fail("%s has been renamed too many times!" % (outfile)) basename = outfile[:-4] # rename .out to .out.000 newout = "%s.out.%03d" % (basename,retry) os.rename(outfile,newout) # rename .err to .err.000 if it exists newerr = None if os.path.isfile(errfile): newerr = "%s.err.%03d" % (basename,retry) os.rename(errfile,newerr) return newout, newerr def exitcode(outfile): """Parse invocation records looking for status codes. Returns the number of successful invocations, or fails with an error message and a non-zero exit code.""" # Read the file first f = open(outfile) txt = f.read() f.close() # Verify the length if len(txt) == 0: fail("kickstart produced no output") # Check the exitcode of all tasks regex = re.compile(r'raw="(-?[0-9]+)"') succeeded = 0 e = 0 while True: b = txt.find("", b) if e < 0: fail("mismatched ") e = e + len("") m = regex.search(txt[b:e]) if m: raw = int(m.group(1)) else: fail(" was missing valid 'raw' attribute") if raw != 0: fail("task exited with raw status %d" % raw) succeeded = succeeded + 1 # Require at least one task to succeed if succeeded == 0: fail("no tasks succeeded") return succeeded def pegasuslite_failures(errfile): """Determine if the stderr contains PegasusLite output, and if so make sure that the PegasusLite tasks finished successfully""" # Read the file first f = open(errfile) txt = f.read() f.close() # Is this a PegasusLite job? regex = re.compile(r'^PegasusLite:', re.MULTILINE) if regex.search(txt) == None: return False # If we got here, we know it is a PegasusLite job. Now check # for exitcode 0 regex = re.compile(r'^PegasusLite: exitcode 0$', re.MULTILINE) if regex.search(txt): return False # PegasusLite job without exitcode 0 - must be a failure return True def main(): usage = "Usage: %prog [options] kickstart.out" parser = OptionParser(usage) parser.add_option("-t", "--tasks", action="store", type="int", dest="tasks", metavar="N", help="Number of tasks expected. If less than N tasks succeeded, then exitcode will fail.") parser.add_option("-r", "--return", action="store", type="int", dest="exitcode", default=0, metavar="R", help="Return code reported by DAGMan. This can be specified in a DAG using the $RETURN variable.") parser.add_option("-n", "--no-rename", action="store_false", dest="rename", default=True, help="Don't rename kickstart.out and .err to .out.XXX and .err.XXX. Useful for testing.") (options, args) = parser.parse_args() if len(args) != 1: parser.error("please specify kickstart.out") outfile = args[0] i = outfile.rfind(".out") left = outfile[0:i] right = "" if i + 5 < len(outfile): right = outfile[i+4:] errfile = left + ".err" + right if not os.path.isfile(outfile): fail("%s does not exist" % outfile) # if we are renaming, then rename if options.rename: outfile, errfile = rename(outfile, errfile) # check supplied exitcode first if options.exitcode != 0: fail("dagman reported non-zero exitcode: %d" % options.exitcode) # check exitcodes of all tasks succeeded = exitcode(outfile) # if we know how many tasks to expect, check that they all succeeded if options.tasks and options.tasks>=0 and succeeded != options.tasks: fail("wrong number of successful tasks: wanted %d got %d" % \ (options.tasks,succeeded)) # check stderr for PegasusLite information, and possibly non-zero exit # of the wrapper tasks if pegasuslite_failures(errfile): fail("Some PegasusLite wrapper tasks failed") # If we reach this, then it was OK sys.exit(0) if __name__ == "__main__": main() pegasus-wms_4.0.1+dfsg/bin/pegasus-sc-client0000755000175000017500000000114611757531137020062 0ustar ryngerynge#!/bin/bash # # generate a Concrete Dag by providing a DAX # # $Id: pegasus-sc-client 4650 2011-11-11 16:43:40Z rynge $ PEGASUS_CONFIG="`dirname $0`/pegasus-config" eval `$PEGASUS_CONFIG --sh-dump` . $PEGASUS_SHARE_DIR/common.sh # PEGASUS_HOME should not be set anymore unset PEGASUS_HOME # run java program nice ${JAVA} \ "-Dpegasus.home.sysconfdir=$PEGASUS_CONF_DIR" \ "-Dpegasus.home.bindir=$PEGASUS_BIN_DIR" \ "-Dpegasus.home.sharedstatedir=$PEGASUS_SHARE_DIR" \ "-Dpegasus.home.schemadir=$PEGASUS_SCHEMA_DIR" \ $addon edu.isi.pegasus.planner.client.PegasusGetSites "$@" pegasus-wms_4.0.1+dfsg/bin/pegasus-dagman0000755000175000017500000001701311757531137017430 0ustar ryngerynge#!/usr/bin/env python """ pegasus-dagman This program is to be run as a replacement for condor_dagman inside of a submit file. The dag can be submitted by running the command condor_submit_dag -dagman /path/to/pegasus-dagman my.dag Usage: pegasus-dagman [options] """ ## # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Author : gmehta at isi dot edu # Revision : $Revision$ __author__ = "Gaurang Mehta" __author__ = "Mats Rynge" import os, sys, signal, subprocess import logging import time import math import shutil def find_prog(prog,dir=[]): def is_prog(fpath): return os.path.isfile(fpath) and os.access(fpath, os.X_OK) fpath, fname = os.path.split(prog) if fpath: if is_prog(prog): return prog else: for path in dir+os.environ["PATH"].split(os.pathsep): exe_file = os.path.join(path, prog) if is_prog(exe_file): return exe_file return None # Use pegasus-config to find our lib path bin_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) pegasus_config = find_prog("pegasus-config",[bin_dir]) lib_dir = subprocess.Popen([pegasus_config,"--noeoln","--python"], stdout=subprocess.PIPE, shell=False).communicate()[0] lib_ext_dir = subprocess.Popen([pegasus_config,"--noeoln","--python-externals"], stdout=subprocess.PIPE, shell=False).communicate()[0] print "Pegasus LIB %s" % lib_dir print "Pegasus LIB %s" % lib_ext_dir print "Pegasus BIN_DIR %s" % bin_dir print "Pegasus DAGMAN is %s" % sys.argv[0] # Insert this directory in our search path os.sys.path.insert(0, lib_ext_dir) os.sys.path.insert(0, lib_dir) import Pegasus.common # logger is setup in common logger = logging.getLogger() logger.setLevel(logging.INFO) SLEEP_TIME = 15 DIED_TOO_QUICKLY_TIME = 120 dagman = None monitord = None monitord_last_start = 0 monitord_next_start = 0 monitord_current_restarts = 0 def dagman_launch(dagman_bin,arguments=[]): '''Launches the condor_dagman program with all the arguments passed to pegasus-dagman''' if dagman_bin != None : arguments.insert(0, "condor_scheduniv_exec."+os.getenv("CONDOR_ID")) try : dagman_proc = subprocess.Popen(arguments, stdout=sys.stdout, stderr=sys.stderr, executable=dagman_bin) logger.info("Launched Dagman with Pid %d" % dagman_proc.pid) except OSError, err : logger.error("Could not launch Dagman.", err) sys.exit(1) else : logger.error("Condor Dagman not found") sys.exit(127) return dagman_proc def monitord_launch(monitord_bin,arguments=[]): '''Launches Monitord in condor foreground mode''' if monitord_bin != None : try : log = open("monitord.log", 'a') monitord_proc = subprocess.Popen( [monitord_bin, "-N", os.getenv('_CONDOR_DAGMAN_LOG')], stdout=log, stderr=subprocess.STDOUT) logger.info("Launched Monitord with Pid %d" % monitord_proc.pid) return monitord_proc except OSError, err : logger.error("Could not launch Monitord.", err) else : logger.error("pegausus-monitord not found") return None def is_dagman_copy_to_spool(): '''Checks using condor_config_val if dagman_copy_to_spool is set then copy condor_dagman to the current dir "bin_dir" ''' condor_config_val = find_prog("condor_config_val") copy_to_spool = subprocess.Popen([condor_config_val,"DAGMAN_COPY_TO_SPOOL"], stdout=subprocess.PIPE, shell=False).communicate()[0] logger.info("DAGMAN_COPY_TO_SPOOL is set to %s" % copy_to_spool) if copy_to_spool.lower().strip() == "true": return True else : return False def sighandler(signum, frame): ''' Signal handler to catch and pass SIGTERM, SIGABRT, SIGUSR1, SIGTERM ''' # global dagman, monitord print "pegasus-dagman caught SIGNAL", signum if dagman != None : os.kill(dagman.pid, signum) if monitord != None: if signum == signal.SIGUSR1 : signum = signal.SIGINT os.kill(monitord.pid, signum) #-- main-------------------------------------------------------------- if __name__ == "__main__": os.setpgid(0, 0) signal.signal(signal.SIGTERM, sighandler) signal.signal(signal.SIGINT, sighandler) signal.signal(signal.SIGABRT, sighandler) signal.signal(signal.SIGUSR1, sighandler) signal.signal(signal.SIGUSR2, sighandler) copy_to_spool = is_dagman_copy_to_spool() # Find dagman Binary dagman_bin = find_prog("condor_dagman", [bin_dir]) if dagman_bin != None : # If copy_to_spool is set copy dagman binary to dag submit directory if copy_to_spool : old_dagman_bin=dagman_bin dagman_bin=os.path.join(os.getcwd(),"condor_scheduniv_exec."+os.getenv("CONDOR_ID")) shutil.copy2(old_dagman_bin,dagman_bin) logger.info("Copied condor_dagman from %s to %s" % (old_dagman_bin, dagman_bin)) # Launch DAGMAN dagman = dagman_launch(dagman_bin,sys.argv[1:]) # Find monitord Binary monitord_bin = find_prog("pegasus-monitord",[bin_dir]) # Launch Monitord monitord = monitord_launch(monitord_bin) dagman.poll() monitord.poll() while monitord.returncode == None or dagman.returncode == None : if dagman.returncode == None and monitord.returncode != None : # monitord is not running t = time.time() if monitord_next_start == 0: logger.error("monitord is not running") # did the process die too quickly? if t - monitord_last_start < DIED_TOO_QUICKLY_TIME: monitord_current_restarts += 1 else: monitord_current_restarts = 0 # backoff with upper limit backoff = min(math.exp(monitord_current_restarts) * 10, 3600) logger.info("next monitord launch scheduled in about %d seconds" % (backoff)) monitord_next_start = t + backoff - 1 # time to restart yet? if monitord_next_start <= t: monitord_next_start = 0 monitord_last_start = t monitord = monitord_launch() # sleep in between polls time.sleep(SLEEP_TIME) monitord.poll() dagman.poll() # Dagman and Monitord have exited. Lets exit pegasus-dagman with #a merged returncode logger.info("Dagman exited with code %d" % dagman.returncode) logger.info("Monitord exited with code %d" % monitord.returncode) if copy_to_spool: logger.info("Removing copied condor_dagman from submit directory %s" % dagman_bin) os.remove(dagman_bin); sys.exit(dagman.returncode & monitord.returncode) pegasus-wms_4.0.1+dfsg/bin/pegasus-rc-client0000755000175000017500000000122211757531137020054 0ustar ryngerynge#!/bin/bash # # manipulate any replica catalog implementation through a generic # interface from the shell. # # $Id: pegasus-rc-client 4650 2011-11-11 16:43:40Z rynge $ # PEGASUS_CONFIG="`dirname $0`/pegasus-config" eval `$PEGASUS_CONFIG --sh-dump` . $PEGASUS_SHARE_DIR/common.sh # PEGASUS_HOME should not be set anymore unset PEGASUS_HOME # run java program nice ${JAVA} \ "-Dpegasus.home.sysconfdir=$PEGASUS_CONF_DIR" \ "-Dpegasus.home.bindir=$PEGASUS_BIN_DIR" \ "-Dpegasus.home.sharedstatedir=$PEGASUS_SHARE_DIR" \ "-Dpegasus.home.schemadir=$PEGASUS_SCHEMA_DIR" \ $addon edu.isi.pegasus.planner.client.RCClient "$@" pegasus-wms_4.0.1+dfsg/bin/pegasus-sc-converter0000755000175000017500000000103611757531137020611 0ustar ryngerynge#!/bin/bash # # generate a Concrete Dag by providing a DAX # # $Id: pegasus-sc-converter 5014 2012-02-24 22:17:17Z vahi $ PEGASUS_CONFIG="`dirname $0`/pegasus-config" eval `$PEGASUS_CONFIG --sh-dump` . $PEGASUS_SHARE_DIR/common.sh # run java program ${JAVA} \ "-Dpegasus.home.sysconfdir=$PEGASUS_CONF_DIR" \ "-Dpegasus.home.bindir=$PEGASUS_BIN_DIR" \ "-Dpegasus.home.sharedstatedir=$PEGASUS_SHARE_DIR" \ "-Dpegasus.home.schemadir=$PEGASUS_SCHEMA_DIR" \ $addon edu.isi.pegasus.planner.client.SCClient "$@" pegasus-wms_4.0.1+dfsg/LICENSE0000644000175000017500000002615011757531137015045 0ustar ryngerynge Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2007-2008 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. pegasus-wms_4.0.1+dfsg/contrib/0000755000175000017500000000000011757531666015503 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/contrib/dbschema/0000755000175000017500000000000011757531666017251 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/contrib/dbschema/Catalog-TC.emf0000755000175000017500000017212011757531137021617 0ustar ryngeryngel(8.2 EMFP@F, EMF+@``FpdEMF+0@?@ @ @Ba @$AC\@C,B@_A!b $$=='% % V0?     % % $$AAFEMF+@@4u?@H<ACD[CCD[CC\@CAC\@CACD[C@$$==_888% % V0@    % % $$AA( FEMF+*@$BBAC\@C@0$9>ARIAL6@H<tc~>8>>8>??   RpArial |@Cy|yh(y9x6kwk(yx`jw(y\y9\y4yC!jwpw,qw(y|P{|#|8@ |Ht{|#|8 q9ܨpidv% TXAALPtc% FtEMF++@ *@$BBAC\@C6@@4_>8>??   % TTAALP_% FEMF++@ *@$BBAC\@C6@ lfnpfnmap!>8>>8>?8>?8>@d(?8>@E2?8> F?8>c?8>pw?8>??   % T#AA L`lfnpfnmap % F@4EMF++@ @$ACD[C,BWB( $$=='% % V0?' ee % % $$AAFEMF+@@4u?@H<AC"CC"CCD[CACD[CAC"C@$$==_888% % V0@( ee e% % $$AA( FEMF+@@4u?@<0 C"C CD[CAC!CC!C@$$==_888% % Z8@(e % % $$AA( FEMF+*@$BBACD[C@0$9>ARIAL6@H<PKc=8>>8>??   ( RpArialArial |@C 8x@y|||4wGw \-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w \dv% TXAALPPK % FtEMF++@ *@$BBACD[C6@@4,v~>8>??   % TTAALP,% F|EMF++@ *@$BBACD[C6@H<FK>8>m>8>??   % TXAALPFK% FtEMF++@ *@$BBACD[C6@@42>8>??   % TTAALP2% FtEMF++@ *@$BBACD[C6@@4, ?8>??   % TTAALP,% FtEMF++@ *@$BBACD[C6@@4I?8>??   % TTAALPI% FtEMF++@ *@$BBACD[C6@@41l ?8>??   % TTAALP1% FEMF++@ *@$BBACD[C6@h\lfnid>Y?8>c?8>n?8>`W?8>G?8>??   % Tl-AALXlfnid% FpdEMF++@ @4(33?@ @ 5CTWnCʗCTWnC: $$==_888"% % W$38% % $$AA( : F|pEMF+*@$BBACD[C6@H<PKc=DZ>>DZ>??   % TXAALPPK % FtEMF++@ *@$BBACD[C6@@4,v~>DZ>??   % TTAALP,% F|EMF++@ *@$BBACD[C6@H<FK>DZ>m>DZ>??   % TXAALPFK% FtEMF++@ *@$BBACD[C6@@41>DZ>??   % TTAALP1% FtEMF++@ *@$BBACD[C6@@4, ?DZ>??   % TTAALP,% FtEMF++@ *@$BBACD[C6@@4I?DZ>??   % TTAALPI% FtEMF++@ *@$BBACD[C6@@42l ?DZ>??   % TTAALP2% FEMF++@ *@$BBACD[C6@h\pfnid>Y?DZ>n?DZ>z?DZ>B?DZ>`3?DZ>??   % Tl1AALXpfnid% FpdEMF++@ @4(33?@ @ 5CDW~C5CDW~C: $$==_888"% % W$88A% % $$AA( : F4(EMF+ @$p?.vCd%$C@_A( $$=='% % V0  b b  % % $$AAFEMF+@D8u?@@H<p?"CG &C"CG &C.vCp?.vCp?"C@$$==_888% % V0 b b   % % $$AA( FEMF+*@$BBp?.vC@0$9>ARIAL6@H<tc>8> ?8>??   ( RpArialArialArial 8x@y|||4wGw ]-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w ]dv% TX2;AA2LPtc% FtEMF++@ *@$BBp?.vC6@@4_?8>??   % TT<BAA<LP_% FEMF++@ *@$BBp?.vC6@ lfnprofile]/?8>@C7?8>@$A?8>T?8>h?8>t?8>'?8>?8> ?8>?8>??   % TDsAAD L`lfnprofile% F@4EMF++@ @$p?"Cd%$CWB( $$=='% % V0  e b e b  % % $$AAFEMF+@D8u?@@H<p?DG &CDG &C"Cp?"Cp?D@$$==_888% % V0 e b e b   e % % $$AA( FEMF+@D8u?@@<0 3BD 3B"Cp?CG &CC@$$==_888% % Z8e  b % % $$AA( FEMF+*@$BBp?"C@0$9>ARIAL6@H<PKc=8>>8>??   ( RpArialArialArial 8x@y|||4wGw ^-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w ^dv% TXAALPPK % FtEMF++@ *@$BBp?"C6@@4,v~>8>??   % TTAALP,% FtEMF++@ *@$BBp?"C6@@4I>8>??   % TT AALPI% FtEMF++@ *@$BBp?"C6@@42>8>??   % TT!&AA!LP2% FEMF++@ *@$BBp?"C6@ namespace>Y?8>n?8>^?8> -?8>@?8>`?8>`ϯ?8>?8>?8>??   % TSAAS L`namespace % FpdEMF++@ @4(33?@ @ B+CqC+C: $$==_888"% % W$P8 % % $$AA( : F|pEMF+*@$BBp?"C6@H<PKc=DZ>>DZ>??   % TXAALPPK % FtEMF++@ *@$BBp?"C6@@4,v~>DZ>??   % TTAALP,% FtEMF++@ *@$BBp?"C6@@4I>DZ>??   % TT AALPI% FtEMF++@ *@$BBp?"C6@@43>DZ>??   % TT!&AA!LP3% FEMF++@ *@$BBp?"C6@\Pname>Y?DZ>n?DZ>^?DZ> -?DZ>??   % TdStAASLTname % FpdEMF++@ @4(33?@ @ B+C#B+C: $$==_888"% % W$Pz8e% % $$AA( : F|pEMF+*@$BBp?"C6@H<PKc=@?>@???   % TXAALPPK % FEMF++@ *@$BBp?"C6@h\value>Y?@?m?@?e?@? V?@? 2?@???   % TlRsAASLXvalue% FpdEMF++@ @4(33?@ @ B+CB+C: $$==_888"% % W$Py8Y% % $$AA( : F|pEMF+*@$BBp?"C6@H<PKc=9.?>9.???   % TXAALPPK % FtEMF++@ *@$BBp?"C6@@4,v~>9.???   % TTAALP,% F|EMF++@ *@$BBp?"C6@H<FK>9.?m>9.???   % TX.AALPFK% FtEMF++@ *@$BBp?"C6@@41>9.???   % TT/3AA/LP1% FtEMF++@ *@$BBp?"C6@@4, ?9.???   % TT79AA7LP,% FtEMF++@ *@$BBp?"C6@@4I?9.???   % TT:<AA:LPI% FtEMF++@ *@$BBp?"C6@@41l ?9.???   % TT>BAA>LP1% FEMF++@ *@$BBp?"C6@h\lfnid>Y?9.?c?9.?n?9.?`W?9.?G?9.???   % TlSmAASLXlfnid% FpdEMF++@ @4(33?@ @ B+C+B+C: $$==_888"% % W$Ps8% % $$AA( : F4(EMF+ @$@\@Cd%C@_A( $$=='% % V0$ $   $ % % $$AAFEMF+@D8u?@@H<@D[CHaCD[CHaC\@C@\@C@D[C@$$==_888% % V0$   $ $ % % $$AA( FEMF+*@$BB@\@C@0$9>ARIAL6@H<tc͵>8>>8>??   ( RpArialArialArial 8x@y|||4wGw _-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w _dv% TX$-AA$LPtc% FtEMF++@ *@$BB@\@C6@@4_>8>??   % TT/5AA/LP_% FEMF++@ *@$BB@\@C6@ logicaltxU ?8>@;?8>&?8>9?8>A?8>@uS?8>;g?8>!o?8>y?8>??   % T6dAA6 L`logicaltx% F@4EMF++@ @$@D[Cd%CWB( $$=='% % V07$ $ee $ % % $$AAFEMF+@D8u?@@H<@"CHaC"CHaCD[C@D[C@"C@$$==_888% % V08$ee $ $e% % $$AA( FEMF+@D8u?@@<0l`AB"Cl`ABD[C@vCHaCvC@$$==_888% % Z88e $mm% % $$AA( FEMF+*@$BB@D[C@0$9>ARIAL6@H<PKc=8>>8>??   ( RpArialArialArial 8x@y|||4wGw `-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w `dv% TXAALPPK % F|EMF++@ *@$BB@D[C6@H<id> ?8>?8>??   % TX6@AA6LPid% FpdEMF++@ @4(33?@ @ VBTWnCBTWnC: $$==_888"% % W$2E\% % $$AA( : FthEMF+*@$BB@D[C6@@4Uc=@???   % TTAA LPU% FtEMF++@ *@$BB@D[C6@@41>@???   % TTAA LP1% FEMF++@ *@$BB@D[C6@ namespace> ?@??@?2?@?@ZR?@? f?@?y?@?`χ?@??@??@???   % T6{AA6  L`namespace % FtEMF++@ *@$BB@D[C6@@4Uc=9.???   % TTAALPU% FtEMF++@ *@$BB@D[C6@@41>9.???   % TTAALP1% FtEMF++@ *@$BB@D[C6@@4,n>9.???   % TTAALP,% FtEMF++@ *@$BB@D[C6@@4I>9.???   % TTAALPI% FtEMF++@ *@$BB@D[C6@@41ݞ>9.???   % TT $AA LP1% FEMF++@ *@$BB@D[C6@\Pname> ?9.??9.?2?9.?@ZR?9.???   % Td6WAA6LTname % FtEMF++@ *@$BB@D[C6@@4Uc=X???   % TT /AA-LPU% FtEMF++@ *@$BB@D[C6@@41>X???   % TT /AA-LP1% FEMF++@ *@$BB@D[C6@|pversion> ?X??X?@0?X?>?X?gR?X?H\?X?r?X???   % Tx5 b/AA6-L\version% F@4EMF++@ @$qC.vCf%$C@_A( $$=='% % V0'  b"b" % % $$AAFEMF+@D8u?@@H<qC"C D"C D.vCqC.vCqC"C@$$==_888% % V0( b"b"  % % $$AA( FEMF+*@$BBqC.vC@0$9>ARIAL6@H<tc>8>?8>??   ( RpArialArialArial 8x@y|||4wGw a-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w adv% TXAALPtc% FtEMF++@ *@$BBqC.vC6@@4_?8>??   % TTAALP_% FEMF++@ *@$BBqC.vC6@ pfnprofilem)?8>@3=?8>@G?8>Z?8>n?8>wz?8>?8>?8>?8>?8>??   % TAA L`pfnprofile% F@4EMF++@ @$qC"Cf%$CWB( $$=='% % V0'  e b"e b" % % $$AAFEMF+@D8u?@@H<qCD DD D"CqC"CqCD@$$==_888% % V0( e b"e b"  e % % $$AA( FEMF+@D8u?@@<0 CD C"CqCC DC@$$==_888% % Z8(e  b"% % $$AA( FEMF+*@$BBqC"C@0$9>ARIAL6@H<PKc=8>>8>??   ( RpArialArialArial 8x@y|||4wGw b-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w bdv% TXAALPPK % FtEMF++@ *@$BBqC"C6@@4,v~>8>??   % TTAALP,% FtEMF++@ *@$BBqC"C6@@4I>8>??   % TTAALPI% FtEMF++@ *@$BBqC"C6@@42>8>??   % TTAALP2% FEMF++@ *@$BBqC"C6@ namespace>Y?8>n?8>^?8> -?8>@?8>`?8>`ϯ?8>?8>?8>??   % TAA L`namespace % FpdEMF++@ @4(33?@ @ 5C+C\D+C: $$==_888"% % W$ 8!% % $$AA( : F|pEMF+*@$BBqC"C6@H<PKc=DZ>>DZ>??   % TXAALPPK % FtEMF++@ *@$BBqC"C6@@4,v~>DZ>??   % TTAALP,% FtEMF++@ *@$BBqC"C6@@4I>DZ>??   % TTAALPI% FtEMF++@ *@$BBqC"C6@@43>DZ>??   % TTAALP3% FEMF++@ *@$BBqC"C6@\Pname>Y?DZ>n?DZ>^?DZ> -?DZ>??   % TdAALTname % FpdEMF++@ @4(33?@ @ 5C+C#C+C: $$==_888"% % W$8e% % $$AA( : F|pEMF+*@$BBqC"C6@H<PKc=@?>@???   % TXAALPPK % FEMF++@ *@$BBqC"C6@h\value>Y?@?m?@?e?@? V?@? 2?@???   % TlAALXvalue% FpdEMF++@ @4(33?@ @ 5C+CiC+C: $$==_888"% % W$8Y% % $$AA( : F|pEMF+*@$BBqC"C6@H<PKc=9.?>9.???   % TXAALPPK % FtEMF++@ *@$BBqC"C6@@4,v~>9.???   % TTAALP,% F|EMF++@ *@$BBqC"C6@H<FK>9.?m>9.???   % TXAALPFK% FtEMF++@ *@$BBqC"C6@@41>9.???   % TTAALP1% FtEMF++@ *@$BBqC"C6@@4, ?9.???   % TTAALP,% FtEMF++@ *@$BBqC"C6@@4I?9.???   % TTAALPI% FtEMF++@ *@$BBqC"C6@@41l ?9.???   % TTAALP1% FEMF++@ *@$BBqC"C6@h\pfnid>Y?9.?n?9.?z?9.?B?9.?`3?9.???   % TlAALXpfnid% FpdEMF++@ @4(33?@ @ 5C+C5C+C: $$==_888"% % W$8A% % $$AA( : F4(EMF+ @$ؿCACCH_A( $$=='% % V0    % % $$AAFEMF+@D8u?@@H<ؿCʦ\CRDʦ\CRDACؿCACؿCʦ\C@$$==_888% % V0~    % % $$AA( FEMF+*@$BBؿCAC@0$9>ARIAL6@H<tc>8>>8>??   ( RpArialArialArial 8x@y|||4wGw c-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w cdv% TXAALPtc% FtEMF++@ *@$BBؿCAC6@@4_6>8>??   % TTAALP_% FEMF++@ *@$BBؿCAC6@ physicaltx>8>?8>n'?8>@59?8>J?8>R?8>d?8>@px?8>@+?8>?8>??   % TAA L`physicaltx% F@4EMF++@ @$ؿCʦ\CCWB( $$=='% % V0 H v v  % % $$AAFEMF+@D8u?@@H<ؿCaCRDaCRDʦ\CؿCʦ\CؿCaC@$$==_888% % V0~ Iv v  v% % $$AA( FEMF+@D8u?@@<0eCaCeCʦ\CؿCQwCRDQwC@$$==_888% % Z8~ IMvM v v% % $$AA( FEMF+*@$BBؿCʦ\C@0$9>ARIAL6@H<PKc=8>>8>??   ( RpArialArialArial 8x@y|||4wGw d-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w ddv% TXAALPPK % F|EMF++@ *@$BBؿCʦ\C6@H<id@?8>@%?8>??   % TXAALPid% FpdEMF++@ @4(33?@ @ CeoC1CeoC: $$==_888"% % W$a% % $$AA( : FthEMF+*@$BBؿCʦ\C6@@4Ic=@???   % TTAALPI% FtEMF++@ *@$BBؿCʦ\C6@@41=@???   % TTAALP1% FtEMF++@ *@$BBؿCʦ\C6@@4,/>@???   % TTAALP,% FtEMF++@ *@$BBؿCʦ\C6@@4UW>@???   % TTAALPU% FtEMF++@ *@$BBؿCʦ\C6@@41ݞ>@???   % TTAALP1% FEMF++@ *@$BBؿCʦ\C6@ resourceid@?@?)?@?=?@?LQ?@?g?@?|?@? I?@?@,?@?`?@??@???   % TAA L`resourceid% FEMF++@ *@$BBؿCʦ\C@0$9>ARIAL6@@4Ic=9.???   ( RpArialArialArial 8x@y|||4wGw e-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w edv% TT AALPI% FtEMF++@ *@$BBؿCʦ\C6@@43=9.???   % TT AALP3% FtEMF++@ *@$BBؿCʦ\C6@@4,/>9.???   % TT AALP,% FtEMF++@ *@$BBؿCʦ\C6@@4UW>9.???   % TT AALPU% FtEMF++@ *@$BBؿCʦ\C6@@41ݞ>9.???   % TT AALP1% FEMF++@ *@$BBؿCʦ\C6@THpfn@?9.?/?9.?9?9.???   % T` AALTpfn% FEMF++@ *@$BBؿCʦ\C@0$9>ARIAL6@@4Ic=X???   ( RpArialArialArial 8x@y|||4wGw f-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w fdv% TT!0AA.LPI% FtEMF++@ *@$BBؿCʦ\C6@@42=X???   % TT!0AA.LP2% FtEMF++@ *@$BBؿCʦ\C6@@4,/>X???   % TT!0AA.LP,% FtEMF++@ *@$BBؿCʦ\C6@@4UW>X???   % TT!0AA.LPU% FtEMF++@ *@$BBؿCʦ\C6@@41ݞ>X???   % TT!0AA.LP1% FEMF++@ *@$BBؿCʦ\C6@\Ptype@?X?@'?X?;?X?>Q?X???   % Td!0AA.LTtype% FEMF++@ *@$BBؿCʦ\C@0$9>ARIAL6@H<FKc=@ǁ?>@ǁ???   ( RpArialArialArial 8x@y|||4wGw g-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w gdv% TX1@AA>LPFK% FtEMF++@ *@$BBؿCʦ\C6@@41n>@ǁ???   % TT1@AA>LP1% FtEMF++@ *@$BBؿCʦ\C6@@4,ݞ>@ǁ???   % TT1@AA>LP,% FtEMF++@ *@$BBؿCʦ\C6@@4I>@ǁ???   % TT1@AA>LPI% FtEMF++@ *@$BBؿCʦ\C6@@44a>@ǁ???   % TT1@AA>LP4% FEMF++@ *@$BBؿCʦ\C6@pdarchid@?@ǁ?/?@ǁ?;?@ǁ?MM?@ǁ?a?@ǁ?@h?@ǁ???   % Tp1@AA>LXarchid% F@4EMF++@ @$qCp?BB_A( $$=='% % V0    % % $$AAFEMF+@D8u?@@H<qCN6AhCN6AhCp?qCp?qCN6A@$$==_888% % V0   % % $$AA( F|pEMF+*@$BBqCp?6@H<tc>8>>8>??   % TXAALPtc% FtEMF++@ *@$BBqCp?6@@4_3>8>??   % TTAALP_% FEMF++@ *@$BBqCp?6@|psysinfo?8>?8>@n*?8>5D?8>W?8>a?8>??   % TxAAL\sysinfo% F@4EMF++@ @$qCN6ABWB( $$=='% % V0  vv % % $$AAFEMF+@D8u?@@H<qCRChCRChCN6AqCN6AqCRC@$$==_888% % V0 vv  v% % $$AA( FEMF+@D8u?@@<0YCRCYCN6AqCG]BhCG]B@$$==_888% % Z82v2 vv% % $$AA( FEMF+*@$BBqCN6A@0$9>ARIAL6@H<PKc=8>>8>??   ( RpArialArialArial 8x@y|||4wGw h-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w hdv% TX!0AA.LPPK % F|EMF++@ *@$BBqCN6A6@H<id(>8>>8>??   % TX!0AA.LPid% FpdEMF++@ @4(33?@ @ 19Cg=B%Cg=B: $$==_888"% % W$,3E% % $$AA( : FthEMF+*@$BBqCN6A6@@4Uc=@???   % TTAPAANLPU% FtEMF++@ *@$BBqCN6A6@@41>@???   % TTAPAANLP1% FEMF++@ *@$BBqCN6A6@ architecture(>@?>@?0?@??@?1?@?;?@?fG?@?-[?@?@n?@?@z?@? A?@?@,?@???   % TAPAAN Ldarchitecture% FtEMF++@ *@$BBqCN6A6@@4Uc=9.???   % TTQ`AA^LPU% FtEMF++@ *@$BBqCN6A6@@41>9.???   % TTQ`AA^LP1% F|EMF++@ *@$BBqCN6A6@H<os(>9.?>9.???   % TXQ`AA^LPos% FEMF++@ *@$BBqCN6A@0$9>ARIAL6@@4Uc=X???   ( RpArialArialArial 8x@y|||4wGw i-`0wP{|#|y@ wPwGww-8 8 P!݈yCw`0w idv% TTapAAnLPU% FtEMF++@ *@$BBqCN6A6@@41>X???   % TTapAAnLP1% FEMF++@ *@$BBqCN6A6@h\glibc(>X?>X?@?X?& ?X??X???   % TlapAAnLXglibc% FtEMF++@ *@$BBqCN6A6@@4Uc=@ǁ???   % TTqAA~LPU% FtEMF++@ *@$BBqCN6A6@@41>@ǁ???   % TTqAA~LP1% FEMF++@ *@$BBqCN6A6@ osversion(>@ǁ?>@ǁ?! ?@ǁ??@ǁ?1?@ǁ?=?@ǁ?@MO?@ǁ?3W?@ǁ?j?@ǁ???   % TqAA~ L`osversion% FEMF++@ @D8?@@, ӹCtCCtC@$$==_888% % W$<v;BB% % $$AA( F\PEMF+@<0~CD>mCؿCtC~C{C~CD>mC@( $$=='%  % V,qB%  % $$AAFEMF+@D8?@@, kCIsCACIsC@$$==_888% % W$G : :% % $$AA( F\PEMF+@<0#CyzCHaCIsC#ClC#CyzC@$$==%  % V,c :c c %  % $$AAFEMF+@D8?@@, *B;'C*B.vC@$$==_888% % W$C@HU%U% % $$AA( F\PEMF+@<06yBdC*B"C_BdC6yBdC@$$==%  % V,>6MEAUeAA%  % $$AAFEMF+@D8?@@,  CC C.vC@$$==_888% % W$QS6S% % $$AA( F\PEMF+@<0f&CC CaCCCf&CC@$$==%  % V,GVRSvRR%  % $$AAFEMF+@D8?@@, 3C]C3CAC@$$==_888% % W$6  % % $$AA( F\PEMF+@<0dCRC3CRCCRCdCRC@$$==%  % V,R vuR R %  % $$AALd'')??" FEMF+@ pegasus-wms_4.0.1+dfsg/contrib/dbschema/Catalog-RC.vsd0000755000175000017500000022500011757531137021636 0ustar ryngeryngeࡱ> 2Root EntryRoot EntryF [VisioDocument3SummaryInformation(SDocumentSummaryInformation8  !"#$%&'()*+,-./01456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~VisioInformation" ՜.+,D՜.+, `ht  X$USC, Information Sciences Institute Page-1,Tracking TextmaDynamic Connectoron OptionalonnDatabase ModeloEntityeRelationshiplo PagesMasters0|_PID_LINKBASE_VPID_ALTERNATENAMES _TemplateIDATC010497871033Oh+'0hS `ht  Jens<C:\Program Files\Microsoft Office\Visio11\1033\DBMODL_U.VSTJensMicrosoft Visio@@{ZG@RJZ EMF8Rl@VISIODrawingLQJZK[ ??d( Q(K[ Qwwwggggggwww OOO000```@@@@@@```000OOOPPP```@@@```@@@@@@``````@@@```@@@ <<< PPP000___,,,¿444``````"""PPP000___!!!444@@@@@@``````@@@```@@@@@@``````,,,,,,CCC777͚======̀^^^777777777777777)))MMMMMM)))777777777777777777777"""nnnϿ￿Visio (TM) Drawing QTR\db | |48||d !fffMMM333.DB EnWgie r/CM=1.DB EnWgie r/CM=1/4R-G/RI.G/-{g"Wb!e{ !oI$`)P?7 {{,, }& & , 1&?@/+?I443? Q0A  Tz,,,'m/%L6$U6 ((} g?h)P?"  U U=3#o :S<#>:aU[RaUaUaUUaUaUaUaPONOD`T[RaUaUaUUaUaUaUaUUb RSR{;21OI% :Qim;RRRggR mh:Qj  g+g=ZB' !p3{|CbFp#| |% | ri?@9Oo<35t,,,/RQ  ieUm]~U<$QrorH[_m____OO_OO Y=\(Xg?ВؤsUʡȦtꯂʟܟ  {Ƽj ɥ&~/ T{ۛ /?//A'T??'9K]oQZ/l/~////////? ?2?M?V?h?z??<OOOOOO__&_kOoo?? OO?O~oC/P_b_t________oo(oCo`^opooo|kxwxo / 7@Rdo*<3FXj|ď֏ 9LoTfxoxҟ,>PbϸϘί@m(\ ?QcuϿ).M_qσϭ?q߃ߕߧ+OjsO`ߤ&8JUFDfP h(TPYYBUIjY?IM ` ??I?(| P O @HZAnZ_7+?+sU/:BBHZ/н?!"o^ %*v!2!%` Relati nsPip%0 `D/12 ?&:e// I$3u 1I  ( p}H-cuyvWWWW/""*x*//w""""j/"vTracking teUx suWedoaEnth1URl-inhpNaSe.buA` Ŀll?v?"Sſ_M<+ ?m?^t,-؂-Ce?O?HD $Th8> T ]]9 P#+AU@jYѸ?@ ` ?@hF?@Bdݶк?P6  Au#`lW? u+uLA-@[AaV_b .a&J  #$>0zGz?@#K95( ?A&;ad)S5 P (Qr!jW@ bj"Jh,9}/ '3T#*%fH2&V8b?r"P4f&B `0$11'11'1/7;`?CopyrigT}tk(c)kW20@3kM@]c@os@fBR1r@;Aa@i@n.k AUlV@ HsZBe.@e@vl@dN@4g> $eOCZyT D"W0B3llM!! 11EA!%*7T >Cl$@2/31@ =kTBBck@n @yQe!x@4GQ%TZ%3u`.{ @ܽ!&I!1 MvLw/,  'zrCUE)(tZ"Vw 5 "GQ(RWj3ryrl Qry-+Yll5dW`gPa@d@pL@tPTNT(z hbtzBu` !8BX+Al.@.sXZ a`p~Y`)@[Dbg5R 0VcHt@^pƏ܏BRFYU@S!bd(mO3 IM#qq"515|1# !U3BQtpB @R .@ @dk1A'a@-)EA :@oP wD@XB'ifB!Q'okfΒl@wzC`S`UI{W<%02`1$1wH2v;ڥ%h3wIx@UeWi2@y&'zXRwA'.W'fBcƚ >BQAb\SqAUi@+AwknPmP#w{n&gAoAcFFAS1{Ba Rľж2ڿiABeM9 ב$'@7&!;1#% :l81Uh@E 714S\4>T P F(@ Aw (#*(4rUp@3Piw@MP{cr^ cR#j"]!9 H'+XR  d FdR1 h #DZ^? dba_oURo[?|R%:R(/ ; 'F4UFDfP h>(/T6D UmUI@W??(E P* O @HZAnZ_7?جsU/>KȍHмrZ/н?6U!"gda/#yT $7!!)4*4* 4 +4"TE4UR44^#-`0,=,IE1X;,24Relation0lh"1b1#av;3SOuv0r@id0Us,tPe,r<]sPip,n0Ut8 @p0c0f0d,b G9 0r?25Os.;3:1?]V3)S~@tBo,/A A 2AO/CACCEp0g~@0^52VO0G3POC_/Et~@x0B^5)2nNT7~3_3*k_CPK+,,FTa0WAIbQSOZ%Q _Y3'_CEQGAMA?GoR@ 0n@21AZC2v@oH0!0.]ookD21yJBofvDo:XP2N_0Å3\PookC1cDl0QnDaKAr4n GAoXokAH0\3L0߅3jiokhvBiz2;Ay{PBm0r\BK~@]ArB0mH9AAyAc0'luw/AEA:To%a (7\~1~12~1%` }MW0!10 ǒ8Q(5G !L!0 ! #) j????HDSSuqob|wwxwwUse_ thRlaioUnh pc uncorodf " R 9*e gW kybtweSd2Utbe.b?0,? 5H D #  B>TJM` bhR> e09 #  B AU@? P6 8LApI=Yu` _A@7=GQ_iMouyi(au`u`bv(u`u ,""@(H5"0"V$46"t"F~/ 5&7'->0q!? *jApbueG"r 7 r.-4Ց!6 b*r,-4 F!R6b <5oO1)91/4m7x5F\2,83O6S&\2?x504(66bh: |ofg28p/`OELbds#K(<KRJ"288?@B?t6u?(@81 *vr[1J ``A&@@b~bj@u=0r$wFrMx]BKruzy&ApC с9-#RhSتC сه;a`V`s_2b.chm`#8aπ+"`?Copy`i`ht8p(R@)8p938pM`uc`o2ofoR`rcaoiaun> 8pAl gse2av"`d>πB1pl@a+n%( R`UlsBicsݕgF!zj5rTia"..a1a1R&A&AhRے=1311 aaaaE2111!!S1"!"!1"!77;;!Arq"r#q81(NTJU@5!\CTUrFein8pK`ywޔEdbûҳra\%U?AOOѿHqߎr:Q!3|@ϭW SnSY'L \#ePet  R%ؐ R5C1X!BR?Z忑l~ߨS(S%E r@;M#W%R5i Ȁ.Qʹ9d!d%9ָhUX!'2qk?ֽ!V#QbW"~2Q!"'/1RQ'1/1-̵RUbU-9>ՙָUfUe@bU0|2d!6p>` Veu-y6"@+R.C\[?~%% +'>ռgJ%//%+-7' WW=SC'\R5S]n(ſ׿ 1@1}@`11PaV42E=QTS+PJqѣa`DגaIbuQPRwrRҠ.B:a<` WDBPEPggep /PMD=10@1pyFUA>@BI:OLOaQNhK6`ҝ)Nh\@ѐoa.bXTa.bYPbnS*;rW prd@ AF9avC^C&_8\FhKA,I@Ec`1Xt______b@_b_ l s>_Ao6^PhK|`CBwzFݐaokJo!zojualsUl=%@6{' 7X`!/ $];؊-%𿣋]{At]20F xa D3=F2L-FRTa_s_abC"L~o<xaHu+Xr > FR#_7? _d,_]yadDzRh ?lRPUFDfP h$T PYYBmUI?W?? ?3P Y tO @DVAjV'BHий?U2eB^  I#I ;߃UT?|15?666666VW?WWWwww|ww0w'~wwpwwpwpwTww%w/mbȿ?l:mȿ@ t贁Nk?T?HD $?h(>T9 3U@?ȷ?Pv l> u`u+` ?J#0贁Nk?<B5 B5 `?CopyrigTt `c)203Mcosfr!ain. Al, s0"e evB d$ U5l>0>Uhhb!b!`l+JZ Vk b &" k(k2MB:; #<$"053@;+//41@?@Y?k?105l2/5rA 5NBTC5H+X  IWFR+#4,!-F \-$DetkoKLRo9?̷R'.-UFDf h TPYYmU?!@m&?I#?3P]JH нEУ?ze^ Ny ^P<//,/>/P/b/t'4 &t'9K]c{ xtDaabse _Modlmtroe med inSf!tIoK.b?(\(? :4f<tPu7P ;@:w@@f@@V?P.R 1<1=) BU7=jbKQjUu` ?u @@u{` ?0u8 u 1rf;Zblry 5@?@P@u` 7[u?K= JS  17uk 7uBm \2q?? !A }`?Copyright (c))2053)M!coKsf'rTa'in.) Alo sseGevdgIRA2qGtQ2aGTJR3gGRz3+G$3G > z20-7DVhz .@Rdv8!3bytea81 %72B1 A!("A CObjTypeDf:%% I#idM-8CRoleb%%$ CPredicatm!c%% L,P#@P(V#(P+_"CElemArrayOfCells?vf2V%Mo2+ts:Lop|' &DocTitleLongArial  d{b &PageDescB&D &Time#,2-/8CUCIndex}AyC! TAkMmOOO_L2__o_ݯHo_4FlC6oHmUGRPInp ob ^A? tSMA(Ϙ$;:TSBAqv 1zOI(V$Sur eDckCߛF.?N7ĎHBc0'I"Bp{mOVooo/c`oocB|fz?@KAst0,HEDOaV#>%G21!1BCj{,qe$qA,mb Kna@<0c(`pINTEGE(:LGꄼ` %ADHr/(B@)AAr̯d/@v/S///.0?partly?///?O/=-kO}EU?IgB;L@OattrOBSMUViDWBEgwS{]cnd@r.WObM@p}aJA#:3ߤKO$?Md"{l4"M,v:COPOZP1B.ZMW}Dp~ӅupeK_:A ;ODOMψ__9F>__ 1:QQ*P76Ѻ0dw6/gD$0 Zja1xU!0"0#0$0%0&0'g0)0Q+_doooooo 1CUgyYF }PlBr__DO7oSRmDxUryt0 AB%(3ďG]0\&4r1SyPAsh}P _(۶G;O|g6ǟa af_e e6a6Ѷ:Qw2ocJokQfoxooA.0/00010203040506070809/B2<0=B0>0B0QCp0F0G0TmaJ0K0LoDaQ01T0T1aW0X0Y0Z0[0]0^J0_0`0(Ab0c0@g0h0i0k0m0'~Qq01z1Ku0v0w0xP0 1o< xQCUserDataType) 5#pubwli "intΡI[mϑϣϥC .#:a1 CwuIfe0:a&0unknow&$Xj|T,arJG0jA_1J x2floaVhz"A RotEnry6Ř`&꿹HVisDBgDaa 6fPOZrDc"6 S}ObjKM p (3>>A/ /\1float83F&D6T _ >o 1publi>int4/////??/K?]?o??P??C! H,:1^!c)p$5.ޑ/H/%rc_at#serial?HOZOlO~OO1OOOOO __?C^!s!??##O F(lo_____oo_;oMo_oqo|oo!blo^!$@c'>e.'__texto4FXj| aA9q^!(c'h>%.+o0 X"=?Џ'9pK]oB"H/^!,c-_Q./mp,&c/onn&2$6HZl*?ƯدB"92^!0S]_Q.3x0timestamptzƿؿw*ؕ#@@o );!EcuDV|has~value,is ofv_ Tc%w_ Xc% 8"> !ԕ,t(44]LONGVARCHAR#DGE\ hu?_ "A\c%CR rp EW!uIz _ Kc% 9{(/7Ypartly identified by"//namDH-bUtJzNM &Btr$n0 z??@wHa-5oQMo6 ܑoW>c@.l.-Igl.cPBFY[^`uODBC Generic Driver PostgreSQLgriodine.uchicago.edum- g#8B:/aeawunknown:c`0Ofawora$fa{u[j!xspalq"@R"%awlEoq&A_dz)axtextq*Y&-}.ʟ"dx )2_%5awQ6χ"9awq:ՙ5oOasͯ߯'9K]oɿۿ7A{  lh&PageNoY,=*/,ȃgm ba CMapping?Group`n ,%UOӐx$R3_4 4-u]]  0lfnpublic-a CRolePathhy#ϲXb$OPTIO(bb] N} CAg,p/gateq_zaNodeAOrray+ ؏InstqaiԍaListZJ qql 8jG{^p._Q Pid(r0߾LjtSOMEqbtYY s1| Q0DUMMY | v 1pfn<Q6 \R0 CForeignKeOy~~<~~%g %_*m4<R;rc_attrQpublicR6 F>INTU<@5=Q\c  Eu |OJYT/%(8.:}&idQ qP 2QROWSWdv!#}/16/name/Z//JQOFMI/?K!K#|1?;valuUOC _QJ *THl_yN C-PF9_}GDTskUA-QUC52{(BO)" ]+ 0 pS]Q_2B00QQooo%7I[mDSN=PostgreSQL;DATABASE=voeckler;SERVER=griodine.uch_@go.edu;PORT=5432;ReadOnly=0;Protocol=6.4;FakeOidInd{exShow1ColumnRowVersioning9SystemTablesConnSettVs=;Fetch=10:ocket=4096;UnknownSizjMaxVarchar=254Long8190;DebuXComm|\@Optimr=1;KsqoUseDeclare}0;TextAss~0;Bool}CParselancelAsFreeStmtExtra`fPrefixjdd__;;LFovPpdatgCursorkDisallowfmaturBTrueIsMinus1-I=-5;ByteaBinarSerySkidep=0!3EWi{ïկ /ASewo +C;O| CTimQampKWDateEP last M?erge @KWbValiStKWbappVtVhzόϰKW>uC"øb߆ߘߪߨ@Oh+'<D,<a3N12+,$J< < N! )Ъ2DQ&Q(< ᎃȼN eH3oLt :71.1)1Microsof iR^BoCDBf>@ A .CG&s?"u Qidof<!6<g*QUPpfXEGERbasjC #' l,qQ_'QR1CP3acCONFI? VARCHAOhzkߠ @8]1 !![x:D7C!aN:REVOK?LONGQo///(〣X|9 h1S_A ns[intKصKPQ2"$EЉ 7AIP0?B8 7M(_K1e7UB<:$fPRIOR?O@OOdOh1sO)O/`TE7 />eTEMPP/b/@__/_/rq}_/_%pWY]`f_a(4 eBNE__oo_o_ n|/I15 9 4PK:u? G3N9 [306bQ_F4 721U C24aS]aw?^mٿ(oXj|ď֏ 0BTfxҟV@sDBEng`Secoda`yObjMpat":7:SM dl",b:ZOZP1٢IWDu.oumn% :pAԯORIf :>Ͽ&Q*($!.G.OZ7ằᤲj'''''U*D!'"'#'$'%''(')'3D7 bb/_,/*?@ABCDEFGHJ*KLMUQRSTUUWXYUZ[\]U^_`aUbcdeUfghiUjklmUnopqUrstuUvwxyU{|}~E212 BDH>C0O*F4F)D-BfBeAdCoBOOOOOOO __1_C_U_g_y______UR0o0 0n0r2oo11`&/Hn!UV0s BRg0EDq0tq2oo/@oPOZP1(broD0co"w@ S!ToaOu0j!0a0 (w@>z Eubliz1f1>OLK]w6rH0? 1TblF}MF$ WmFR0#f1L? $:A]Xa 2VD+R3?NܺR1rSVU2U !"U#$%&'t4 !@& RY+C-lJ7 AUU U U!%(U)*+,U-./0U1234U56789:t4!!@& D8JYoC-$mZ A/t4!%@& dmRZA-'X7"A;<t4!E@& mZ A-4)7AUbU U!U%+01U=>?@UABCDEt4!@& LRܯZRC-t\.[ AUU U U!U%)*+U,-./U0123U4567U89:Ft4#!@& S@{[uC-\7 A @DRTYBR@$RZCR@RZZ;R@RZ;R@_8[CR@_[BRH<(H<(H<(H< 0H<(H<( EV\ REtV ] REV] RETV%]RE_6] RED_C] RUFDfP h>(/T6D UUIgE#?I>鿓>??6I?E P H_7@O`YrVsU:ȉHdн?H!)"t@  07g`.Relation ls,!ab s ,E !ty -sPip IE1IXU!o^( %b`!`!2`%`'ij sU! `U!S2 Y?$y"BdaT?C +TI4744 4^3-M`!r00 S=8@),8@28DR/lU!h21Q A3aZ)Co3@SX@t8@t "dX@AhNBPK8@!pX@Ur"o@ i "g1 BG= "CUiBp gX@.%P A?Mo3;O*KT"sdRpOO__%e//% 1G61 #W+= >?Rci{ewkpp`rDag ont] hep ad_abl .Ue/aisi im3bU1>пc^??XпJh]k?W? Ce>|j??UG DF P# BT&j1BQOh^C<^E$6D x#!xU@aA?@?@U1>?@?c^n?P C3x 1i;u` ?-7Ku#UaA@?Yp ='p@%>$uU0zGz_ #b="bE)bE)*R+R+R+a&!_#"#)?" $!8+r! C *R  ,$xxq1t2^f 05%-#) " 3#V9(HL` DP Engi@e@r`/CM=20/)@OEL1yX06I.:6 OF &G10@BFR"#A17BA`V@s_2B.chm!#54A@8@0`?Copy@i@ht0(R@)0C30M@uc@o@of!RRQrPNQa!PiPun@ 0AliP XsmRe@Av@d@0fG>1@0Ug]_[)#TF1 d "3]211112*1*1 Q"Q BABA Y]f>1d=!(Y&cV?R91PuAZQtPTaZ%j$@Se S+-IPedf@QU !Ph@ @hWP%p@'yRQr@\S* qo xaPd0^S0="b (a3Ԝ1gc`00=0I9BFA@Xd0,QsR@lWXaiP0V # x56y #BAP`e]5\caTE=P# ֍U@@q?@U1>?@vKP6 bAAt#` ujD ttG $uLEjsA-. >0zGz?<bub*醍p;?_) V?A3p   f  _b55 m$32=">"!h79V$gJ!`)0G7M1I|45 T3\1>`?CoC yrigTt (c)0W20030M0]c0os0f2R1rC 1a0i0n.0 AUl0 8sBe0e0v@d0]441AheCO 7 +T#!!@11<"3xqCM30wJ0d0h01 BiBsTaC e'Bt@x0]0- 7xA<%!AM31h@7OOO"mPSx&\3}qC @M3_3*Siu#yW B AoSnlTaE01t0 01uC wY$B -(jAlePXI='lb/f!3b[1f4!}$Df:xo$/ /)n9O<%`MqC1EQt@{9a5=4=[1/T3E!@a T0b0aN0mEb-]0 nPixlo~oX'XG'0jbefv XFl[1Uhs}Kx1SU @qS/}T 9-P_,IL  #9(aۑ3ו5畼CgM2};]rA +b UH TuD " #  hU8 T UU#A MjؙU@@q?@U1>S?P bAeQ%[u#`R9?Kt` $uLA-F.0zGz?bR++UOp =" F  #uo!?U  =o#)?\2\qc?/6*?x%$Ae!>t!d75$g%8<:%$J!P>6Ji!; `0%71D;01Z`?Copyrig`tw(c)w20a@3wMM@cK@oKsE@fSBDArG@AaS@iE@n0 wAl@ KHsBes@eK@v@d$2j&nU5( {7/6$V!37R1(?V0#!$$V5?MkOO*Q7$T$TM76 PV % f i__X7Pg'0cZ2R[U?V96 Pfz1AtqoCyTY!!11"3J2,1,12 Bot2C|tYEtw!x's@#?32W`wM@dS@hwvA riBs`aG@e'Bt@xHS@0 )Cq%t03,1@@h@NG $qA*3}'sx @3_=C*SsuO} I qsnpE@AtI@ O@pAuG@{BJuU53`+9scM@fM@A(nmOAkcJE@lnB$t@b@pKBm@r;k@&q2 % 2%M62Fbzb 怷@b X#-5'sa2L@@HS@TAtE@ td,hBi#zB摞Al#ȴAEnO@(PKşw@pAm'CtC5 b53qR5W=C&1( BrBc0Τӧ- ASfx*2@dͿ'`SY{IFUqեԟuo!urN4~ϵaA e̘# ]3rf3rF U/'n#o3 y'sAAMM"v'sIaEM113ErzE̕ 03ad%tx <3 SwbwL0pv"Q,1m=@~brqё|aqѶ|mt=)S!|:r3x˿ lT1UtWq?%I  .U5c52X[r^ 5Z #"1_!73c'^r \ [P" WrqMp@1 5q*/("-/?/Q/c/u- //4//,Z?9-?sE?W?i?qx#+#$h>c*]rxENQ_%_7_I_[_mUFءqI` _Hu( EjȍIH^/ ?:%J"Bª G 0 ;`Etwo\key\R lati n0l\D2ab0s ,E01t/0sPip\IE1X\f r ig0,4l s!gda?3yTA u;!!)4H"*4 4B D2U"D/D8^31!}`00)=0K9;02jD/l!Y1?A3aSK^bCSOv r\0Widp2 0h ) \25?p0n 15 0p c0f d0b,0GK9 pZ2O 1_s.K3JH%AO]bC)S tBo0C1A 2lQOC3A SEp0g u4?A2)V_^@GbC_S_ Ut x0R;EDBn+d;[C_bC*HoSPK\ $ca04QIb|a@cp_j5QoYbC'oSEa$Q*QWoX0 "8ip2j BBv^@|1e@.:L{5+1(Q^Y3tj-B+o^@bC\PL{C1ciTl0QnD1(Qr4nb0GAoh`{AH 9C)^@bCjFL{hZ2iz21tPBmJ0r9RK"f\0AoGQAVQc 'luރw(2C1"Qu4o5a H;sU!!2%%` 1?U 1e@!y a$ YP!' ! #) j????HDSSuqob|wwxwwDrag onut hep d fi afr ink yb tw e;LaGl s.eeaiJsi imkb?0,? 5H D #  B>TJM` bhR> e09 #  B AU@? P6 8LApI=Yu` _A@7=GQ_iMouyi(au`u`bv(u`u ,""@(H5"0"V$46"t"F~/ 5&7'->0q!? *jApbueG"r 7 r.-4Ց!6 b*r,-4 F!R6b <5oO1)91/4m7x5F\2,83O6S&\2?x504(66bh: |ofg28p/`OELbds#K(<KRJ"288?@B?t6u?(@81 *vr[1J ``A&@@b~bj@u=0r$wFrMx]BKruzy&ApC с9-#RhSتC сه;a`V`s_2b.chm`# 4`8"+"`?Copy`i`ht8p(@)8p938pM`c`o2ofo`rcaoian> 8pAl gse2av`d>πB1pl@au+%( R`lsBi csݕgt!zj"rT81"..a1a1R&A&AhRے=1311 @aaaaE2111!!S1"!"!1"!77;;!Arq"r#q81`aF!T!A(NTU@5!\CUrFein 8pK`ywޔбEdݻra\%U1!T>r!qz y~"44pC C21yҪ'r!w#A` ` O?Ar:tiI0FCWQ`trsQ(?$`PA3?du߀&߭@r '!`Ȃ&r9DV7_ qd{տߟ߽߱1; 2&1X1hDQcY>?AOO俗宅rQ!3|@qp$S.SY:'Lv!3euI&$RX$RHhQpcԡ?8SBSE:X r¼S =WX8RDi#HQ[Yo$h4Ur!''2q?!uav阘2CQ&W%qI1lQA1I10lU|U%%qbȤJ#ofoe@|U$6(Pp7` :peuL9!1Pn2^,3RH%Z\ 4u?5#?5?G45V; G%q 5g@E??G4AEV;|Gn7$W8WWS7.\3ROSwnw $6H۩WQ.`_U]aTcz!`)q`p!a`DSPJorY.RTa<` DBlglp /CMD=107`1pyYjQqY____N[i!P` }xNQPpPbO!vϱñTa!~gh!abiñTbc*HQr(w ۱rdgEf{ZanZuolFl[Q,I1`Ep1Xoooo b>`3BJ\|ݍKQaonP[`CRwFq{e%Iewl)U/d/)M tWP7YG"F4D\4winٛ5KwQE@V` q CȌ-9rT ooAZ"oqHD, ">h6DvF_ v#TA"z? $pC]aX{OD+R\ ?_ |gP_( RWXRW-F!Rg+/"|R0SRX/?ԼR<\wTD?tRP]"<_TuS!T_% ?@@:g "*^p:[GW #5GYk} C:\Progam FilesMcsftOfeVso1"\"03,\ENTI6Y_U.SF[G7W"4FXj|vD C:\Progam Fi_lesMucsftUOfeV]so1"\"03,\ BJREL__U.SFՂ(NC!*dYcqJC!%U+/'USf>U`,?@`* LFDTyB Puh$T UA!@Z&&.?8&@!P _<b 2?<s nu ` "" "u );E i!&@Q(&(3"ç'sU!&x /Lg ?3lPH4U4 11 o4 |4K144X1T4arT!%@"`0 =0uIxEl1Xw 24Re?0ation@@11y2CX1C5 3a33SOv0r0iUd0s0te0urPZr0-͓T@b?0A *jӯr0͓ !CfZE PUcu%͓ ME 0ʔEe(/S͓; ϕTB!PzcGMEUS7򑀕% c$GPр\z1g \X1H+:Eeҡ $$mmX11 .gy X1K1E_ Ћ?Cg$U $ـu1\r1E\5Q@@UȀ@@ߪ߀?@YGN?8! +$ҧ^1~2АQ"%8 f"{l k!l#)YԢ1a < ]~0& 1 ?:1,?ХT\@H88z22̑̑2AA$!!B1"Z19]l#Ai^OFeif3lnW͓&{0W4908D-P1D4P08B3-00B6700:42}Y$^(5_ U1>?c^?ȣn؎'ޱ3ʡф5pʢ293Y].&8edʢpp8R5=@h!?ʥ uUeoiY/U^|.*rvU~p'UX rX m/!1ooi//'T<{9.&{?hvQ'b9z?b+'UPK0F 1i<@ $I"n6@m2@ @ :v6@lu"ԑ4'Q4\G'QbYtWUvQ/T0XBTf8xcE1EX*r?=?6cz??? O7ZQ?c?SOeOwO:/U??OOO:OAO1_C_U_:=54O___]@Aqܟmowӱxoա`BkVoho̖Gfy7ȱG]4񎤺^ȱ4VCiN]CLnBLGi񡛡=14ե?c^o?590q8Jnդ]]ATMq`r,cѫ c叕`.PD t b s rop r2i s'Uё8劳PSUhDw-`R l0ҥe Z5lP sc6tlc"8]H_8=11tq .ƺu=1ÿ}Mq5U56t)u99j$e/vK?X''Ɓ֏tU a=5=S͚0koˑMq RZ(I"p#t"qq;I%hF%t%&Nat!.@zo{oogܥoo>Q-/q=1'2;7%?rW@+/5E/p/Q8/R.@'UR%@/tU'/DtUrc_f N:Ò1Tt_o"i17hq[t5u?əqbZ+4U,/로FNłtU/AWm#S0z/Gzsܵ\S2\}1qc?~_V ? pA% x@B1.Q//c/J/\/n/kE/@.o/]oe$WF ' o)o6Ao~dFzލotmUwkEu*<N`x(HkkˑEcyq,)bHQQUBCp,,cBAA[>TfZl EݫebŬwş p域cEBv'1CǝɸB0yn3C^.h]f!p8+PK3i U1I 6 F7129iR?cw?X??9xvbtBb4q?ϳveb4T'(:ǡeb4A{ߟ߱݊cA#?5?VuVQ̿_߀A`mk?8J\XxvVa75(B141cCwEA'?CD/U{E#VXFH8A/DDɶAF Q!TJqDh-@@UȀ@@w) #c?@[?}хMuF`uB`P@EQhDZqs_ssuf`uezp1?;*UD (/vBE@ UOKE_%P1)YF=Uq!L]0E!#vc0ġ tN,zs {"/#.| (ɣ&(&ɢ" //B& //!ߴI28_8?@Q5Z5KcM& i!DƄva8"/LOB+DvbDADAb-8Db'HE!! R/_D(QQoR5B2$D3DF4D9" 5D126DRrrD%D:TACD2AAi;ԓW&{99@80@6N:@EFqPlQ4@UE@-BBwR8AqPQ0qR1@}eI=Fc՟ha)WoM{odFo9%S~@b15voMvB7BolRoԜRԘ/~Q?Uťɯ$ RBBRHbAAتt>?A?Pqd\üȏVu/ 1V)4wT9U<Keßf%jAUw);M_qߡ3u-/!U񯳯'UASeF.Aᱫ?.;A"PDatbseProe.tir(sT@Al|$UU+6UeU3@h'3؈~ d4Q`r6U-tǦ6>3KՐdv4W3O߳~UU U UU!U%+,-U./01U2345U6789U:=>?U@ABCDEFt4.!@& T毤C-d\ A@DVCRH<(E$V R\LVW~դD?ԄV̥.TDV .TU1( UO"D&aUAUNj )h"T} U+U- |ɉB&Q- -H*=(XiwEQ//,/feArial UncodeMiS6?/?`4 R$fSymbol$67fWingds*7 fEArial"z@D/ R$fSwimunS$fGPMingLU{a (  R$fGMS PGothic{a (  R$fGDotum|"{a (  R$fESylaen  $fEstrangeloU dsa@`9$fGVrinda{ (  R$fEShrut1i$&<fEM_angl$$%>fETungaH"@&>fGSendya{ (  R$fERavi"&5<fGDhenu|"{a (  R$fELath#&<fEGautmi &<fGCordia New{ (  R$fGMS Farsi{{ ( _ R$fGulim"{a (  R$fETimes NwRoanz@D$>ALEB=A.BBW~8BWGBGuideTheDocPage-1"Gestur Fom aRow_1DBNotainRow_2DBCrowwsFotRow_3,DBHideiscrmnat o Row_4(DBHideAnotua insEntiyDBHidePKRowuDBwepndtDBHide_Typs"visDecrpton,DBHideVrtc_alLnsvisVerion.DBHideorzntalL nCo}nectrWhite 5ln"Arial centrd"Arial topl7efArial top"Times ]cntrdTimes top"Times _topl f TrackingTextDBShapeTy"DBEventTri5g eOldPinXOldPinYManulEditsHideSetPinXSetPinY"DBShapeVrsionRefShetIDRefContrl(Dynamic CoWnetr,DBHide}Rlatonsh p2DBHide}RlatonshpTx RefShapGuidIdentiwfyng DBCardinltyCrowsFotOptionalOvwerid*OvweridDBNoWta i nEndAgle(OvweridCowsF otDX1DY1DX3DY3BeginA7gl RITextefGuid$CardTextRfGui"Datbse Modl,Viso ExtenuddDaaWidthHeightSubhapeIDPKCount0DBHide_PKSprato 5Ln"DBHideTabLn1"DBHideTabLn2LineCoutLineHightRelationshp6GG3ѭE3G3LG3\N %G3|0E3dV@G3\E3VlG3ܭE3T/G3 ʮE3ڮ*G3tlG3V!G3DlD9G3$VTG3[s$G34.G3ddDůG34V0G3dVG3V,G3NI%G3Nn&G3VG3LN$G3$V԰G3N%G3N$G3dV9G3?@ABCDEFU&Ut4 !@& d_oC-_7A%t4 do_޼ A-7AJ@VCR@V6RH<(H<(JEԋVN REDV[ R{N  g"4FxX,h(JIs@(kYQ贁N`/NRTFQ | 4RR`}.|RwݖTR ֔vLRV$V-O!OVM)M'V=h1\VrdVO2 1:T?V *OD~dVhH%a!pegasus-wms_4.0.1+dfsg/contrib/dbschema/Catalog-RC.emf0000755000175000017500000003576411757531137021631 0ustar ryngeryngeli( EMF;q@F, EMF+@``FpdEMF+0@?@ @ @ @$HaCp? BB_A!b $$=='% % V0 ||  % % $$AAFEMF+@@4u?@H<HaCN6AMwCN6AMwCp?HaCp?HaCN6AC@$$==_888% % V0||  % % $$AA( FEMF+*@$BBHaCp?@0$9>ARIAL6@H<rc >8>>8>??   RpArial |@Cy|yL6(y%x6kwh&(yx`jw(y\y%\y4yC!jwpw,qw(y|P{|#|8`"|Ht{|#|8 q9ܨ41dv% TXAALPrc% FtEMF++@ *@$BBHaCp?6@@4_\>8>??   % TTAALP_% FEMF++@ *@$BBHaCp?6@\Pattrt?8>:'?8>1?8>:?8>??   % TdAALTattr% F@4EMF++@ @$HaCN6A BWB( $$=='% % V0hv|v|% % $$AAFEMF+@@4u?@H<HaCxBMwCxBMwCN6AHaCN6AHaCxBC@$$==_888% % V0iv|v|v% % $$AA( FEMF+@@4u?@<0~ECxB~ECN6AHaCBMwCB@$$==_888% % Z8iX vX |% % $$AA( FEMF+*@$BBHaCN6A@0$9>ARIAL6@H<PKc=8>>8>??   ( RpArialArial |@C 8x@y|||4wGw Pp!`0wP{|#|y`"wPwGwwPp!8 8 !!.yCw`0w dv% TX!0AA.LPPK % FtEMF++@ *@$BBHaCN6A6@@4,v~>8>??   % TT!0AA.LP,% F|EMF++@ *@$BBHaCN6A6@H<FK>8>m>8>??   % TX!0AA.LPFK% FtEMF++@ *@$BBHaCN6A6@@41>8>??   % TT!0AA.LP1% F|EMF++@ *@$BBHaCN6A6@H<id>1?8>;?8>??   % TX!0AA.LPid% FpdEMF++@ @4(33?@ @ JCg=B0VCg=B: $$==_888"% % W$,3 l % % $$AA( : F|pEMF+*@$BBHaCN6A6@H<PKc=DZ>>DZ>??   % TX1@AA>LPPK % FtEMF++@ *@$BBHaCN6A6@@4,v~>DZ>??   % TT1@AA>LP,% FtEMF++@ *@$BBHaCN6A6@@4I>DZ>??   % TT1@AA>LPI% FtEMF++@ *@$BBHaCN6A6@@41>DZ>??   % TT1@AA>LP1% FEMF++@ *@$BBHaCN6A6@\Pname>1?DZ>F?DZ>Z?DZ>@Zz?DZ>??   % Td1@AA>LTname % FpdEMF++@ @4(33?@ @ JC'}BxmC'}B: $$==_888"% % W$<C % % $$AA( : FthEMF+*@$BBHaCN6A6@@4 c=9.???FEMF++@ *@$BBHaCN6A6@h\value>1?9.?E?9.?@X?9.?@b?9.?@dx?9.???   % TlQ`AA^LXvalue% F@4EMF++@ @$p?p?yBB_A( $$=='% % V0U DD % % $$AAFEMF+@D8u?@@H<p?N6AoBN6AoBp?p?p?p?N6AC@$$==_888% % V0V DD % % $$AA( FEMF+*@$BBp?p?@0$9>ARIAL6@H<rco>8>>8>??   ( RpArialArialArial 8x@y|||4wGw Pp!`0wP{|#|y`"wPwGwwPp!8 8 !!.yCw`0w dv% TX$AALPrc% FtEMF++@ *@$BBp?p?6@@4_>8>??   % TT&,AA&LP_% FEMF++@ *@$BBp?p?6@THlfn8>8>>8>@c?8>??   % T`-9AA-LTlfn% F@4EMF++@ @$p?N6AyBWB( $$=='% % V0Uh  vDvD % % $$AAFEMF+@D8u?@@H<p?xBoBxBoBN6Ap?N6Ap?xBC@$$==_888% % V0Vi vDvD  v% % $$AA( FEMF+@D8u?@@<0g\@BxBg\@BN6Ap?bBoBbB@$$==_888% % Z8Viv D% % $$AA( FEMF+*@$BBp?N6A@0$9>ARIAL6@H<PKc=8>>8>??   ( RpArialArialArial 8x@y|||4wGw Pp!`0wP{|#|y`"wPwGwwPp!8 8 !!.yCw`0w dv% TX!0AA.LPPK % F|EMF++@ *@$BBp?N6A6@H<id> ?8>?8>??   % TX5!?0AA5.LPid% FpdEMF++@ @4(33?@ @ UBg=BBg=B: $$==_888"% % W$2,E3X% % $$AA( : FthEMF+*@$BBp?N6A6@@4Uc=@???   % TTAPAANLPU% FtEMF++@ *@$BBp?N6A6@@41>@???   % TTAPAANLP1% FtEMF++@ *@$BBp?N6A6@@4,n>@???   % TTAPAANLP,% FtEMF++@ *@$BBp?N6A6@@4I>@???   % TTAPAANLPI% FtEMF++@ *@$BBp?N6A6@@41ݞ>@???   % TT A$PAA NLP1% FEMF++@ *@$BBp?N6A6@THlfn> ?@??@??@???   % T`5ACPAA5NLTlfn% FtEMF++@ *@$BBp?N6A6@@4Uc=9.???   % TTQ`AA^LPU% FtEMF++@ *@$BBp?N6A6@@41>9.???   % TTQ`AA^LP1% FEMF++@ *@$BBp?N6A6@THpfn> ?9.??9.?*?9.???   % T`5QG`AA5^LTpfn% FEMF++@ @D8?@@, 8B:RBHaC:RB@$$==_888% % W$^27KK% % $$AA( F\PEMF+@<0BY nBoB:RBB7BBY nB@( $$=='%  % V,T-b< DK  %  % $$AALdhh)??" FEMF+@ pegasus-wms_4.0.1+dfsg/contrib/dbschema/Catalog-TC.vsd0000755000175000017500000026700011757531137021646 0ustar ryngeryngeࡱ> 2Root EntryRoot EntryFmVisioDocument3SummaryInformation(SDocumentSummaryInformation8  !"#$%&'()*+,-./01456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~VisioInformation" ՜.+,D՜.+, `ht  X$USC, Information Sciences Institute Page-1,Tracking TextmaDynamic Connectoron OptionalonnDatabase ModeloEntityeRelationshiplo PagesMasters0|_PID_LINKBASE_VPID_ALTERNATENAMES _TemplateIDATC010497871033Oh+'0hS `ht  Jens<C:\Program Files\Microsoft Office\Visio11\1033\DBMODL_U.VSTJensMicrosoft Visio@0NG@RJZ EMF8Rl@VISIODrawingLQJZK[ ??d( Q(K[ QSSS{{{{{{SSS000oooPPP``````PPPooo000HHHwwwWWWZZZwwwHHH@@@@@@@@@@@@@@@888@@@@@@@@@@@@@@@(((???HHH(((@@@@@@@@@@@@888@@@@@@@@@@@@@@@@@@``` `````` `````` `````` `````` `````` `````` `````` `````` `````` ```---oooPPPPPPPPPPPPooo---333333333333333&&&333333333333333333 LLLLLL 333333333333333&&&333333333333333333<<<```^^^^^^```<<<===============(((==================================== 777=====================``` ``` ``` ```߿333[[[(((888@@@@@@000@@@@@@@@@@@@@@@888(((@@@@@@@@@222&&&666>>>@@@@@@@@@@@@HHH @@@ HHHwwwoooooo @@@ `````` ??????PPP``` @@@ ``` ``` @@@ `````` <<< @@@ 000PPP000HHHooo``` ooo000PPP000;;;@@@@@@@@@@@@@@@@@@444ooo``` ooo``` ```000 CCCOOÒ^^^ͳ777777777777777777777777777777777777777777777777777777777"""nnn======ǿ߿@@@@@@@@@KKK ```@@@```@@@```@@@```@@@```@@@```@@@<<<```@@@FFF 444***444444444444444444 [[[HHHssssssssssssssssss^^^@@@fffffffffffffffffffffffffffVisio (TM) Drawing sFRR\`!P| |4`f||`fd !fffMMM333.DB EnWgie r/CM=1.DB EnWgie r/CM=1/s-G/LsI.G/ea/ea/Dea/ea/ea/ea/Dea/ea/ea/ea/Dea/,ea/ea/ea/lea UJ:DT5I[1hTT<. z/U~bb 0zGz?R@H X">-{g"Wb!e{ !oI$`)P?7 {{,, }& & , 1&?@/+?I443? Q0A  Tz,,,'m/%L6$U6 ((} g?h)P?"  U U=3#o :S<#>:aU[RaUaUaUUaUaUaUaPONOD`T[RaUaUaUUaUaUaUaUUb RSR{;21OI% :Qim;RRRggR mh:Qj  g+g=ZB' !p3{|CbFp#| |% | ri?@9Oo<35t,,,/RQ  ieUm]~U<$QrorH[_m____OO_OO Y=\(Xg?ВؤsUʡȦtꯂʟܟ  {Ƽj ɥ&~/ T{ۛ /?//A'T??'9K]oQZ/l/~////////? ?2?M?V?h?z??<OOOOOO__&_kOoo?? OO?O~oC/P_b_t________oo(oCo`^opooo|kxwxo / 7@Rdo*<3FXj|ď֏ 9LoTfxoxҟ,>PbϸϘί@m(\ ?QcuϿ).M_qσϭ?q߃ߕߧ+OjsO`ߤ&8JUFDfP h(TPYYBUIjY?IM ` ??I?(| P O @HZAnZ_7+?+sU/:BBHZ/н?!"o^ %*v!2!%` Relati nsPip%0 `D/12 ?&:e// I$3u 1I  ( p}H-cuyvWWWW/""*x*//w""""j/"vTracking teUx suWedoaEnth1URl-inhpNaSe.buA` Ŀll?v?"Sſ_M<+ ?m?^t,-؂-Ce?O?HD $Th8> T ]]9 P#+AU@jYѸ?@ ` ?@hF?@Bdݶк?P6  Au#`lW? u+uLA-@[AaV_b .a&J  #$>0zGz?@#K95( ?A&;ad)S5 P (Qr!jW@ bj"Jh,9}/ '3T#*%fH2&V8b?r"P4f&B `0$11'11'1/7;`?CopyrigT}tk(c)kW20@3kM@]c@os@fBR1r@;Aa@i@n.k AUlV@ HsZBe.@e@vl@dN@4g> $eOCZyT D"W0B3llM!! 11EA!%*7T >Cl$@2/31@ =kTBBck@n @yQe!x@4GQ%TZ%3u`.{ @ܽ!&I!1 MvLw/,  'zrCUE)(tZ"Vw 5 "GQ(RWj3ryrl Qry-+Yll5dW`gPa@d@pL@tPTNT(z hbtzBu` !8BX+Al.@.sXZ a`p~Y`)@[Dbg5R 0VcHt@^pƏ܏BRFYU@S!bd(mO3 IM#qq"515|1# !U3BQtpB @R .@ @dk1A'a@-)EA :@oP wD@XB'ifB!Q'okfΒl@wzC`S`UI{W<%02`1$1wH2v;ڥ%h3wIx@UeWi2@y&'zXRwA'.W'fBcƚ >BQAb\SqAUi@+AwknPmP#w{n&gAoAcFFAS1{Ba Rľж2ڿiABeM9 ב$'@7&!;1#% :l81Uh@E 714S\4>T P F(@ Aw (#*(4rUp@3Piw@MP{cr^ cR#j"]!9 H'+XR  d Fs1 h #dI^? [Qdba_@oD(/T6D UmUI@W??(E P* O @HZAnZ_7?جsU/>KȍHмrZ/н?6U!"gda/#yT $7!!)4*4* 4 +4"TE4UR44^#-`0,=,IE1X;,24Relation0lh"1b1#av;3SOuv0r@id0Us,tPe,r<]sPip,n0Ut8 @p0c0f0d,b G9 0r?25Os.;3:1?]V3)S~@tBo,/A A 2AO/CACCEp0g~@0^52VO0G3POC_/Et~@x0B^5)2nNT7~3_3*k_CPK+,,FTa0WAIbQSOZ%Q _Y3'_CEQGAMA?GoR@ 0n@21AZC2v@oH0!0.]ookD21yJBofvDo:XP2N_0Å3\PookC1cDl0QnDaKAr4n GAoXokAH0\3L0߅3jiokhvBiz2;Ay{PBm0r\BK~@]ArB0mH9AAyAc0'luw/AEA:To%a (7\~1~12~1%` }MW0!10 ǒ8Q(5G !L!0 ! #) j????HDSSuqob|wwxwwUse_ thRlaioUnh pc uncorodf " R 9*e gW kybtweSd2Utbe.b?0,? 5H D #  B>TJM` bhR> e09 #  B AU@? P6 8LApI=Yu` _A@7=GQ_iMouyi(au`u`bv(u`u ,""@(H5"0"V$46"t"F~/ 5&7'->0q!? *jApbueG"r 7 r.-4Ց!6 b*r,-4 F!R6b <5oO1)91/4m7x5F\2,83O6S&\2?x504(66bh: |ofg28p/`OELbds#K(<KRJ"288?@B?t6u?(@81 *vr[1J ``A&@@b~bj@u=0r$wFrMx]BKruzy&ApC с9-#RhSتC сه;a`V`s_2b.chm`#8aπ+"`?Copy`i`ht8p(R@)8p938pM`uc`o2ofoR`rcaoiaun> 8pAl gse2av"`d>πB1pl@a+n%( R`UlsBicsݕgF!zj5rTia"..a1a1R&A&AhRے=1311 aaaaE2111!!S1"!"!1"!77;;!Arq"r#q81(NTJU@5!\CTUrFein8pK`ywޔEdbûҳra\%U?AOOѿHqߎr:Q!3|@ϭW SnSY'L \#ePet  R%ؐ R5C1X!BR?Z忑l~ߨS(S%E r@;M#W%R5i Ȁ.Qʹ9d!d%9ָhUX!'2qk?ֽ!V#QbW"~2Q!"'/1RQ'1/1-̵RUbU-9>ՙָUfUe@bU0|2d!6p>` Veu-y6"@+R.C\[?~%% +'>ռgJ%//%+-7' WW=SC'\R5S]n(ſ׿ 1@1}@`11PaV42E=QTS+PJqѣa`DגaIbuQPRwrRҠ.B:a<` WDBPEPggep /PMD=10@1pyFUA>@BI:OLOaQNhK6`ҝ)Nh\@ѐoa.bXTa.bYPbnS*;rW prd@ AF9avC^C&_8\FhKA,I@Ec`1Xt______b@_b_ l s>_Ao6^PhK|`CBwzFݐaokJo!zojualsUl=%@6{' 7X`!/ $];؊-%𿣋]{At]20F xa D3=F2L-FRTa_s_abC"L~o<xaHu+Xr > Fs#de_:? U_]yaL@gDxk ?Ay PUFDfP h$T PYYBmUI?W?? ?3P Y tO @DVAjV'BHий?U2eB^  I#I ;߃UT?|15?666666VW?WWWwww|ww0w'~wwpwwpwpwTww%w/mbȿ?l:mȿ@ t贁Nk?T?HD $?h(>T9 3U@?ȷ?Pv l> u`u+` ?J#0贁Nk?<B5 B5 `?CopyrigTt `c)203Mcosfr!ain. Al, s0"e evB d$ U5l>0>Uhhb!b!`l+JZ Vk b &" k(k2MB:; #<$"053@;+//41@?@Y?k?105l2/5rA 5NBTC5H+X  IWFDy+#d5^,!-F @-$Deϫ@oK/xo9?|Ey).-UFDf h TPYYmU?!@m&?I#?3P]JH нEУ?ze^ Ny ^P<//,/>/P/b/t'4 &t'9K]c{ xtDaabse _Modlmtroe med inSf!tIoK.b?(\(? :4f<tPu7P ;@:w@@f@@V?P.R 1<1=) BU7=jbKQjUu` ?u @@u{` ?0u8 u 1rf;Zblry 5@?@P@u` 7[u?K= JS  17uk 7uBm \2q?? !A }`?Copyright (c))2053)M!coKsf'rTa'in.) Alo sseGevdg\wAD2qGq2aG xA3gGx}3+G zom/k7DVhz .@RdvG*Lş\GaI^37[( k5Op]lEHWAL5@\&/l>BN7(JN/}t:QEƿͣU`v/I+C~ƞ/@%!6 "/? 5! &DocTitleLongArial  F? &PageDesc]? &Date? &Time}?X?t2No?91!3bytea8 /76 D!@B(B0q0CObjTy?peD(B ICarchidMM8CRolerED CPredSic0n@rE LLPCPHVCHPK_BCElemArrayOfCells_\d@x,Q7QM!h_z_VD_"P_<__ha;P___ kGmis of o!@G !@%E 'C tc_pwhys@ltxN-vb%E!@%EOOO_Yoo$oUg2Dj_X$ "a-!"eMq 7TR'f -H_Ӄ`>-:!nݭ Mb/bYwdcDYؐp.!dJ7 /!f-b!F|Cf9U9<$abR6^xcx@DŽ UCtk@mgu/vbg0b!g2.M=AAPDs *h,B@KA V?ARCHAR  %!1E"h$X._BCConstraintListK_BCMandatoryA)c8 UGRPInנ"z` 4 Su MA(Ϙ$2;G,TSIAqv @ aO(VDS!urceD!c0Rߚ-8߂7B0RM/30CUCIndKexͽ%EC!@(twamooo M1I_5G>mpfnEmgHvbhIb{MV@ʤH~צLONG"3o/Ѐj?AER׳1LHSms //4/e/Q/c// re?sourceGoki[vbh\b{+Mi=O(ʤpHq??Ve0s}BEgwS{cndawyObjqap}at@>%:z`qKO>%"?Md{l4"M,2;COϠG0vLBIqWr}D~um{n%@ __ܰ;AfP>OKEOM__9F>__! j@#>H>bPfq dM6f F  Z ab R ! " # U$ % & 'gѪ ) * +BT` 0 1 U2 3 4 5 U6 7 8 9/B;a<@J A Bѡ D E F PhBJ K LoNoR? QHoHUdaX UY Z [ \ U] ^ h ` (Ab c R12 1g W f1a}F}PlBr__D&a:JSRmDxUry!te0 A>%(3ď$Na\$1SyPAsipo _'ܶG _ ~an 6ǟa agED: e6ŗe'da6meaRouϯ);M_q˿ݿ!$X9 AEMW>//$/u?nknown5/_/q//// !G$3f@$%6  >~publi>float8/X?j?|???A????OO,Oh1J,3!'C#0FF>?P-7tc_l;3int4/OOOO__OK_]_o____CA ,:Q!cIpDU>O.??serial_HoZolo~oo1ooooo _c!sA__3#o8olo;M_q!lo!$'>>'{}anno_;3text4FXj|Ÿԟ "aY!A('h"&>+ 6==_Я?*/m9Q2$6HZl*_ϢϴψBBG]2B!0S}_q>3濏hfimestamptzϢߴߖ*7]< byte+=Oais identifi_ed by#0Ae! ) id9 rsp u%q % 2K- ~1a݋σfINTEGER+ \x Q%p@? ;EP<2RCpp..A%p/)/3CՀfz&-'QĜ/Km?????c ?.% architecture$%A% n qq[L .OOOO8O_1\halO_6_V MR o\1p6G G]tc_sysinf+[6YBA8GOYOkO}O_Ol_~_ooY_oo C_U_o. osversGionx_x76I;Lzy'?9?XX9 VARCHAl?`r^?ޑ#q/?&=ƏI/:/' -#m/4oFoXojooYk؟Fǟ20B0V?glibcx.6I2L  n1pe:L}Տʏ܏R$1<##m%\nӯ n!RXpj>Pvolϯ|ƒ$6I"(L*<S1XJHS_刺˿/S,:9)'TTf%#m|Ϡ ?!$BT#5[ is identified byznid sπdh 9' xfmINTEGER //0/?dEpT hCx.O/v.A$$/,%/?m'>#!+1?/Ha/s/)"%?,9??x?7-7 VARCHAOO=?3.p3v??_uHcpd M__q___/oo oFo%7I[c@WuWStc_logical+tx(^biLQQ_c_u__o_oopo0hab7syversion_`ooc@Yv\-$18OhR0jO`rOOFO˟&?%u?zq%*B JϹ!3EM7x[mRߤ,/L-hX?j??@"A  95&$-^v/OH?,)@ Q(HNINTECGE////j ?2Q=z\lfn&_8Tv@]w*oN+?O?OOOaN_`partlyao0OBOhO\K5MMoeuiNS| qfnprofil[ւk7}my????_?OOooO_)oZouOOODoVo<|ohvaluTOT _R]ւlh9^邑p;=e-kr/[$hpNBINARY? j n?w/0!D5/]\c1Xh=q_____ooo+.sooo]Ko_W)YqI\y`K~[yӅpՃ'25\$%ԤxGs< F& sw3\Ẃۯ9RKǹc1bh=cЏ5ӟ埡ϖ߭}!3J Ua)Y[jK `Æկ竕#NwLZJZ)BsRS[WeK@俵Ƕ0appM6mt. ]Eo4C~$"u D4?! A ~# [ y5MnL nF 3 ]fI?NTEGERYS;T!D ]lfnidJ$ !@ !A $""3j(" V/&!&7/////+?=;is partly identified b??P@?**#&=c=s?;"?A'!;'#tc_~$ pfnmap).3IIM!KR/d/v//?/??OOx?OO0_N?`?r?_,_R_W+@???3U.@ߑ߳5I'v”u"o(s!,躢k`jX13'0@ ?VARCHAQ/*/[@A%Hr16HZlo@?R?\/?i??2DV???vaClu w?"2ǟOO%v/Ŗ0ED LBHK@LONG~ BINARY_H__r _L,_// `mOw)Oytoo>% oo|oa"v>P%lsaޏ"F_X_ ` <$6T/f&L|>ְWj\).>$l;0I\q0N^q̎2,`JnBFYln') "  ,.+-@BEĞODBC Generic Driver PostgreSQLgriodine.uchicago.ed]u= muint8mux`bݑunk/nownA /floa0RP4Qk!isal"ӯ%.lo&``)tWext*Vܒ-i.122a5 timest_amptz6EU 9bytea:Eg%O*<N`r߄ߖߨߺn=/;!;^ CTGSKDate of last MergebValidationtbappingtVhzЌuCt4DAgg8CG @CNode_ArrayKCWInstCeBList#k |]% C=ȨRrD 8$%@%3Oname,, :L^x1RBiversionp 766T~ ȷ/>S tc_lfnprofilpubliYc 4^^ͺPRIOR.@JXx!ZrU,'!6 "]'$ ,L)"g 9b?*?4q 9l(Q?c?4{ 9v??PoN6 ospach^/p/^DECLA//P1P3L?"+ r1OCO^02OVER-O23OOReVvaluOq|OO^HA'!%_7_13S_e_2T|W/ id;IMoV ( CLogicalKWeyrC -KVW"Df,*_PKYa]o7!%idx_,,d`o7K ~J~2fc97VV~3ow8Qn9),!ct |_FK10hasis ofqsoRcGwqHjhfT7G" ~u fiDH t,F_keyb UCyqnpfnmaAp 2fo;c?w./6u~hW̟7/xlfZ76yprQn}physs1b_(?e #!A($2/D/V$_>^,!_e o$)s/3 9?C13= 9dv4E 9ĝϯ4 9!?i O_^{@æg@kO+ }ߏ^oaFHm|_* 3E^dv ӟo@o=dV cae b CMappingGroup 0b #&  , snfob CRolePath0ǸŸ-?Tb CAggregate! Qa NodeArray$In st!4]IList=,߼@")$4Dq./)/4<7P/b/3 WČQ W(  /i; architecturq*/?W !? 3? !#n?'Kos?4??W? O!O#,OKglibpԔV=bOtOWƒOO !#O[ osversion[dForeign`,+_PcJQMpitp Qq*oW0J_\_?ߠa_&._a>t$hWc/WQPbxWt$Wt$/ /D/ Yoo}? okqs~!e resourceWkcݏkqs~#М_Zlѵˏkqs~y%՟typ?c _ؐ= Oj33~';2_^+Pm<oԯ8Hd̵X;1d`lfnmapqoRF ÿWȑ>GRANT:.@P,t=(y*}t$EyZ90hOyJψX2BcP.@WxfFLOAk} 0R4U-i.^/d0Ѵ߉D 0Ց XMogkoߏGX? namespaceG CDataTypel867x5fqLONGVARCHARxfqrsLtc_pfnprofile_PKlYl&e :3pidx_*"&o,/l*L+[.2"&rv/lq/[.3lEB'>o!hysicalt],_FK1!hasis ofL!,f $$O'#?"51P/G7 (u?l)I/[%G6"&A?o+/ N'{%Ol*BO N1%#~pOl,O N4"&Ol0*@FC;resourceid_keyC;UC5J>t698QsH0nfo K(^?p?n$$S'_JQT: X (_l'^Xarchitectur5eQRXUCPjV} 8J.x?g<L0n '#!omkiB@B<TRPN42Эn_publim!3EDSN=PostgreSQL;DATABASE=voeckler;SERVER=griodine.u{`cago.edu;PORT=5432;ReadOnly=0;Protocol=6.4;FakeOidInd{exShowȀColumnRowVersioningЄSystemTablesConnSetts=;Fetch=10рocket=4096;UnknownSizMaxVyaar1=2548Long<8190;Debu~CommLoOptim2r=1;KsqoUseDeclare0;TextAsO+s0_;BoolCParseancelAsFreeStmtExtraPrefixdd_;;LFv烆pdyCursorDisallowmaarueIsMinus1đI=-5;ByteaBinarSerSidp=aͯ߯'9K]oɿۿπ#5G$}9-7IdRϡϳ 1CUgy@ߝ߯nPOh+'la,l "74$l public tc_lfnpfnmap Col?umnG>w&_f(Z+idq|f\PLIfI?NTEGER$ FqQVhz[`.PLAST!/01CP#$ InstraintB( _PK'  $[Y'idx_*l' /$\/)p-/)'(physicalt,FK1I? :Q\0.x9(og??5'5?<[V1+OrofiY__qzUx/HUT namespacuE POAD VARCHA!_3_NBJUG^WiOW_zӴAAORHOUROZ_*o؄ @DO/OTLF8VOVvOfz XofteJuSCHEM>Pbb(EWѯz0`ɱ Oհx}MATCH-uo\nuhmIWſ׿zHsuCONTYI_ ɏ:Lew߂ϛߦϊEWz`}@FversitoR߂o^pH_l_"E!J.6P$[#hǙ%P5)k'σ6UC2JlOC2mOHߤzBC8U  DVW88G_1/  %4A>!I[8Msos / ?/$/6/H/97=RAĉ1]s0nfoހ 7 8UC18 architectureosglibc'o,Bj>:˓`:qyqr8c/r!"#$%'()Ʞ*+b-./01J23a!567l9/bK;q*6Rj|Fϣϵ!3EWi{ߍ߱߶H# aCElemArrayOfCell J?#D{L1x Xj|fed3`5 RotEnry6Ř`&꿹H VisDBgDaa 6f@G0vLrDc8"6 S}O_bjM p *(3>H>Oh+'S>0,>If?j>k> Ъ&!>[πFN ec"ġ /?1.1)2!Microsoft ViGsio?^?;;Q }1  ) =Tbl1>83<>/?O!O3OEOWOiO{OOOOOOOO __/_A_S_e_w________oo+o=oOoaosoooooooo'9K]o#5GYk}ŏ׏ 1CUgyӟ -?Qcuϯ);M_q˿ݿ%7I[mϣϵ!3EWi{ߍߟ /ASew+=Oas'9K]o#5GYk} //1/C/U/g/y//////// ??-???Q?c?u????????OO)O;OMO_OqOOOOOOOO__%_7_I_[_m_________o!o3oEoWoio{oooooooo /ASew+=Oas͏ߏ'9K]oɟ۟#5GYk}ůׯ 1CUgyӿ -?QcuχϙϫϽ);M_q߃ߕߧ߹%7I[m!3EWi{z4i Fz(>FE[F@bKFhS-F[ FeF@7n Fq@u` vu @u` ?{uq@u` ?u q@u` vu?q@u` vu xtE$yt%Gdy;t,Gygt'Gyt)G$yt%GUDFE h4Iy-4T CWsKlJytRUH'X >F}MF$ WmF?@UABCDEt4!@& x/xRC-įx7 AUU U U!U%)*+U,-./U0123U4567U89:Ft4#!@& )EͯxuC-xBy A @JyvBR@KybwCR@DLyw;R@Lyw;R@,xBR@LyCRH<(H<(H<(H< 0H<(H<( EMyQz RENy^z REtNykz RENyxzREz RElz RUFDfP h>(/T6D UUIgE#?I>鿓>??6I?E P H_7@O`YrVsU:ȉHdн?H!)"t@  07g`.Relation ls,!ab s ,E !ty -sPip IE1IXU!o^( %b`!`!2`%`'ij sU! `U!S2 Y?$y"BdaT?C +TI4744 4^3-M`!r00 S=8@),8@28DR/lU!h21Q A3aZ)Co3@SX@t8@t "dX@AhNBPK8@!pX@Ur"o@ i "g1 BG= "CUiBp gX@.%P A?Mo3;O*KT"sdRpOO__%e//% 1G61 #W+= >?Rci{ewkpp`rDag ont] hep ad_abl .Ue/aisi im3bU1>пc^??XпJh]k?W? Ce>|j??UG DF P# BT&j1BQOh^C<^E$6D x#!xU@aA?@?@U1>?@?c^n?P C3x 1i;u` ?-7Ku#UaA@?Yp ='p@%>$uU0zGz_ #b="bE)bE)*R+R+R+a&!_#"#)?" $!8+r! C *R  ,$xxq1t2^f 05%-#) " 3#V9(HL` DP Engi@e@r`/CM=20/)@OEL1yX06I.:6 OF &G10@BFR"#A17BA`V@s_2B.chm!#54A@8@0`?Copy@i@ht0(R@)0C30M@uc@o@of!RRQrPNQa!PiPun@ 0AliP XsmRe@Av@d@0fG>1@0Ug]_[)#TF1 d "3]211112*1*1 Q"Q BABA Y]f>1d=!(Y&cV?R91PuAZQtPTaZ%j$@Se S+-IPedf@QU !Ph@ @hWP%p@'yRQr@\S* qo xaPd0^S0="b (a3Ԝ1gc`00=0I9BFA@Xd0,QsR@lWXaiP0V # x56y #BAP`e]5\caTE=P# ֍U@@q?@U1>?@vKP6 bAAt#` ujD ttG $uLEjsA-. >0zGz?<bub*醍p;?_) V?A3p   f  _b55 m$32=">"!h79V$gJ!`)0G7M1I|45 T3\1>`?CoC yrigTt (c)0W20030M0]c0os0f2R1rC 1a0i0n.0 AUl0 8sBe0e0v@d0]441AheCO 7 +T#!!@11<"3xqCM30wJ0d0h01 BiBsTaC e'Bt@x0]0- 7xA<%!AM31h@7OOO"mPSx&\3}qC @M3_3*Siu#yW B AoSnlTaE01t0 01uC wY$B -(jAlePXI='lb/f!3b[1f4!}$Df:xo$/ /)n9O<%`MqC1EQt@{9a5=4=[1/T3E!@a T0b0aN0mEb-]0 nPixlo~oX'XG'0jbefv XFl[1Uhs}Kx1SU @qS/}T 9-P_,IL  #9(aۑ3ו5畼CgM2};]rA +b UH TuD " #  hU8 T UU#A MjؙU@@q?@U1>S?P bAeQ%[u#`R9?Kt` $uLA-F.0zGz?bR++UOp =" F  #uo!?U  =o#)?\2\qc?/6*?x%$Ae!>t!d75$g%8<:%$J!P>6Ji!; `0%71D;01Z`?Copyrig`tw(c)w20a@3wMM@cK@oKsE@fSBDArG@AaS@iE@n0 wAl@ KHsBes@eK@v@d$2j&nU5( {7/6$V!37R1(?V0#!$$V5?MkOO*Q7$T$TM76 PV % f i__X7Pg'0cZ2R[U?V96 Pfz1AtqoCyTY!!11"3J2,1,12 Bot2C|tYEtw!x's@#?32W`wM@dS@hwvA riBs`aG@e'Bt@xHS@0 )Cq%t03,1@@h@NG $qA*3}'sx @3_=C*SsuO} I qsnpE@AtI@ O@pAuG@{BJuU53`+9scM@fM@A(nmOAkcJE@lnB$t@b@pKBm@r;k@&q2 % 2%M62Fbzb 怷@b X#-5'sa2L@@HS@TAtE@ td,hBi#zB摞Al#ȴAEnO@(PKşw@pAm'CtC5 b53qR5W=C&1( BrBc0Τӧ- ASfx*2@dͿ'`SY{IFUqեԟuo!urN4~ϵaA e̘# ]3rf3rF U/'n#o3 y'sAAMM"v'sIaEM113ErzE̕ 03ad%tx <3 SwbwL0pv"Q,1m=@~brqё|aqѶ|mt=)S!|:r3x˿ lT1UtWq?%I  .U5c52X[r^ 5Z #"1_!73c'^r \ [P" WrqMp@1 5q*/("-/?/Q/c/u- //4//,Z?9-?sE?W?i?qx#+#$h>c*]rxENQ_%_7_I_[_mUFءqI` _Hu( EjȍIH^/ ?:%J"Bª G 0 ;`Etwo\key\R lati n0l\D2ab0s ,E01t/0sPip\IE1X\f r ig0,4l s!gda?3yTA u;!!)4H"*4 4B D2U"D/D8^31!}`00)=0K9;02jD/l!Y1?A3aSK^bCSOv r\0Widp2 0h ) \25?p0n 15 0p c0f d0b,0GK9 pZ2O 1_s.K3JH%AO]bC)S tBo0C1A 2lQOC3A SEp0g u4?A2)V_^@GbC_S_ Ut x0R;EDBn+d;[C_bC*HoSPK\ $ca04QIb|a@cp_j5QoYbC'oSEa$Q*QWoX0 "8ip2j BBv^@|1e@.:L{5+1(Q^Y3tj-B+o^@bC\PL{C1ciTl0QnD1(Qr4nb0GAoh`{AH 9C)^@bCjFL{hZ2iz21tPBmJ0r9RK"f\0AoGQAVQc 'luރw(2C1"Qu4o5a H;sU!!2%%` 1?U 1e@!y a$ YP!' ! #) j????HDSSuqob|wwxwwDrag onut hep d fi afr ink yb tw e;LaGl s.eeaiJsi imkb?0,? 5H D #  B>TJM` bhR> e09 #  B AU@? P6 8LApI=Yu` _A@7=GQ_iMouyi(au`u`bv(u`u ,""@(H5"0"V$46"t"F~/ 5&7'->0q!? *jApbueG"r 7 r.-4Ց!6 b*r,-4 F!R6b <5oO1)91/4m7x5F\2,83O6S&\2?x504(66bh: |ofg28p/`OELbds#K(<KRJ"288?@B?t6u?(@81 *vr[1J ``A&@@b~bj@u=0r$wFrMx]BKruzy&ApC с9-#RhSتC сه;a`V`s_2b.chm`# 4`8"+"`?Copy`i`ht8p(@)8p938pM`c`o2ofo`rcaoian> 8pAl gse2av`d>πB1pl@au+%( R`lsBi csݕgt!zj"rT81"..a1a1R&A&AhRے=1311 @aaaaE2111!!S1"!"!1"!77;;!Arq"r#q81`aF!T!A(NTU@5!\CUrFein 8pK`ywޔбEdݻra\%U1!T>r!qz y~"44pC C21yҪ'r!w#A` ` O?Ar:tiI0FCWQ`trsQ(?$`PA3?du߀&߭@r '!`Ȃ&r9DV7_ qd{տߟ߽߱1; 2&1X1hDQcY>?AOO俗宅rQ!3|@qp$S.SY:'Lv!3euI&$RX$RHhQpcԡ?8SBSE:X r¼S =WX8RDi#HQ[Yo$h4Ur!''2q?!uav阘2CQ&W%qI1lQA1I10lU|U%%qbȤJ#ofoe@|U$6(Pp7` :peuL9!1Pn2^,3RH%Z\ 4u?5#?5?G45V; G%q 5g@E??G4AEV;|Gn7$W8WWS7.\3ROSwnw $6H۩WQ.`_U]aTcz!`)q`p!a`DSPJorY.RTa<` DBlglp /CMD=107`1pyYjQqY____N[i!P` }xNQPpPbO!vϱñTa!~gh!abiñTbc*HQr(w ۱rdgEf{ZanZuolFl[Q,I1`Ep1Xoooo b>`3BJ\|ݍKQaonP[`CRwFq{e%Iewl)U/d/)M tWP7YG"F4D\4winٛ5KwQE@V` q CȌ-9rT ooAZ"oqHD, ">h6DvF }_#;^v? M*bܹ@oOD+xo |?\~ogP_( sWܐ¿Z-F!kHMhSaTK3PQsRGoo4Bvo0!0.>oSDBAyBA %722ޙO03btSECAQDQlRGAsQ rTGA{och}}o h2Hh+Ri` AA/=POIVaf@oPGAA.Qc@luPoPdcy gou0=uK1z1  11l!1r0+`Tnafac@3P@r10AD J\Q"~/r0)ECup@sHAskݢ 㯈 ȯr0-ET@b?0dAr xjhK]r0E CZl 3eVs4%E 2]E Ϩ eSnϊ4El # dB!PGaI^37jz1r1y q6 z1K1uX1G118AB8Af!GѲz1 X18Ae1۽e!Ԧ1Hu`AЅ(5!~A41eJr mm11G1y X1K1E·^դnԅF|Cf9U9͟ ٰծE]5̕@@VȀ@@ߪd?@Y?SKxD1-" `AK1͈1~2H~Q"%8f"pl k!l#־*C ܤr-&f #11G$lr|4 ?1,?HT8Aw$<H8$z2!$2DD2~~AA[[W"$1Z19!i4F'Jyef372WE&{7C46aC85qE0C0-w03a0q89y06q3050F29 062} i<=8(ܸԪB4U1>?c^?#tn 8/ڟU_-p#B<B9p#%!6H'OBBD<OO4_BQrQB.ÁPubl@coLQp3KPSweibjqB%!RDBqYUB Bq q $O/ g`rgq1RD[ـuiEս@~XvK?;w h5K-h_k}JL5]=f&& ),!<JO\OnO$Tqq<>B!!!.4=@A?>;B Wu'7`={w&!3Yia=5!!&q0_AB<6Yu2Jl~U G״qJw!Q8zۋ?Q5Wu`ѬA[nf3 FL5eȔ]]%$6H| qW{L5}#0z_Gz#'j l`\2\q??% wA "<-?QcQu3įϯ ',$X jxF_ UR0UW% lKpR,!ğe_-yp#xetb&&"B} rCb11v!廏 >;e?@9&<:7 u2/9e +7D +<BANQ2DOOGL%Y6 P%_? qFXj|#9?. KSeL`PKIPFk@2IPI1tiP iH1uB2zCAaa4'fa ywÿUd,xQUϴ޿._@]k6Y_kVϴ{d{____jWϴ_oo,o>og|__~ooogZWϴA_oo Goo\ngVF03[Wiy,ﻏL`k$c얊HYc-7|GB;5W{iu||v`"eMq 7ꁳ1Wb;AU@j7@@O8@[?@K%P9[S8LpE\GUA2tŀߒ٠X1F *e꾿(Ť-#  'u /e?OT)):2%ߴ{%NB OAb{+Bѕщ1-ij2GD /D)*,a`) 'Ef5lA#W&{U84s@21NW656N4UE9y@72U3Ao@EF1o@}5X/%( "U1>?c^?I!U//"?4?F7X4d%g:?HL?T.à,D_@=Ku.PDautbsep_Pro@et@e.M^FPShwpURle@  l C! (HHFA@{AaU vAߔEeבEFѭԦ9r I& %T\RvK?bG[Ű#0uƯةQE=[cVcP!eoq߷"1==Ѐ(B[p}5%my(}(E%' (A6F{R/vś/#1N!`.gڜ1UA㓮2GO.w@??;$OQK?[N7eE@JO0u?0u$utc_KfJpf L N/`/Q?OOOO*OKjeROdNO'k4uCr7 HF_W>UXn1*BgH`r @U?GyqqBڤC,-bYaYaѱ @/)je˖crǁ3wǧŏu,Bc;"w-ecr (2p;(zY3@m25ǟٟ8iPI'slPEA,tKPKI2nm pcB3 vluF1R1CiA ,(Ak LN4|  w=i-aQ _@A_S_e_wY 1x@,T]5;_@,T-a.@Rdl,T5+-?E,Tŗe,TuefxhzEe-e,T O?/Q/c/u/dz///U,T-/?/?A?S??????j*dOO|x-W6O!e3BGFa I:|ZLQ `ek IO[O|xgXpv07aG>hTŊSQZX{Ee٥6wdcDYؐhgѴs+Raax́}>N@@\a+@$?@Mk?$e?֔glÄh+GbɁg fs6xA]`qR%aL!PT>r_ʾDY&lv-iDS 6 EN@ ^MMpφET `Q`Q% @EūіAAaaaay1nQQ^lŵ̢a}flDWs&{W16I7DI]-08@-IU43pA1ϠW-5ABϠUD%9@0I}Uյ(ipӵ`ɴU1>?c^?_AvI[ϑϣDU@Z=ةT!Q]oЯt,O;b@b;u@.ҫPDtbs roe?t(.QsDU^PShAw @URl-e* W 2lЊS i} DU=!!H?ԱDUഊ!뱘Tv VU]!Q+EaUQvᚙ zYoj]ϿQ9e`vKxi׿¾a aogayA<=yAOiyq!Q hT8~~4P 118DUm@?ڋ GɧAA !_3_mO{OOGمOO;gޑ8ߡ*ը,l)cU!o7 $K0׊$$Qg5O=+EOƋ L$AL"S_tc_opui atx i6"$q?OI.*H Ma QXTQ2έU/˩,g5v}MadLщ%,=U̩&. ߸bLՠ,>vTyj#2s0zGz&h\v2s2\q??6? A"H,Q`GYkK%OfO"E$79iO2O3sJODFOzޖO}MUWKNU3_E_W_i_ULԄ__^JgKq!%`ڈuy)q&A1$1LBtvC&tml"!!:LWC%ڋmk|Œtvml%{KrDG-kxޑL߹o.@,$=__oow_r0#1?`%!`jkHtew*<V-.7-GHt!6qׅIs!/--%u)R'f -H_`A!=O3G?M(!l!Kq%%/U@]5rk@@O8@?@K%P9 +C!5+/o$ O1%?.9i(A,=pP򞊕[Bb"(-  <ōaʁ)1)1TKq||ւ%2R!++"+f2%"1Oqq2qE|O|Qayw5f]sĶsW*3&{(p96%275)-pE)048)D9-5FD3?3pZ2%0?A}25OfE6q^U1>?c^o?mO΁Џ⇈l((T=o///.,+D:!QQ -i8Z%.BPDtUbs rS%pKtQ .c2řPShSw! Jr$q='*-1qfx%uyX%TvKRgyy +uaͩa= //zi`pooM_ryVkaU']:!=O߄V{oMza_no*8>!q6JA>!{ DUH{QGaUO'@6ZSDt'U_#fpQ2fQ1Xqoo|b')\y12j!ԦADN)>!q呩Ei!vt =;>qAOHN_@>_P[f_+|Hv0zGKz<\s2\q?Xj? A4إs@Q#rYk}˽ڏzϾt6 U(FE^ϛF_ϑUqbGYk}#U߽^5rZ#-yZc;&o8oTኒHֱֱB-qC:B;A;AMoTio{i  E   e2p__./@/R',)&@/8rA,P۩zƬEtPK I2B@n0m_2p0c3%3#(8# v0lu5(FJ 1"1B@"i0 "81sQL4sQsQ*<ǐ,AcsC⊔@,ʙx!eu?0=I?[6w4????Z7A?? OO.Ou7l?~?nOOOW:?OOOT=M%YOI_[_m_T=ROO___W:/:_*o #nݭ Mb/bYhc~aQ-BW ?d@@ok-@?@K%P9ܠ֣Fݥ?&%q5܇WC- 77c6"'`71gIx,th o܄2yb-/ /Qs!.مTa%}Ď<bb+ďR71711cJJ" c a( Uf Wr&{IDU6V0 8-xU7"D4I0UB6! -rDxЩ2g E"5} dc(J5)U1>?c^7?!єQ/[Tt-C,ԅ UZ^.cPD' t' b' s5  rou et{ ,!.볗PShwsR5 leR} l+"5 #Kƽj![aH]1=HHH &Qt%?ڿc^??Awa-_/Ab%S_bFzxGT1I[,dgZg}xUu.rPDtbs Rdrria.=zSb%ssPSWow Rle ClftU]Bb%x AHvb%̔waחבAב4 if5K=1Asqt@}q"SOUxGvK?]箞AuAOSsAeOY~!:K=̓Y~/IYQ1TrCaݱjj ݲab%ŚqȪaߥ7!?)?c/BԶ{/ϣ/'e//'Ssqݱ{ l&CUO# Ҽ+SsQxG!ݵ;ߥ);weк-ae 0յtc_#y#ipf\ 7#{ύϳܝյ] ) (בqlD4s59KEeiCAҁP卮aUeaYE¨ήBe*e@YV#MS0z/Gz樜H\MS2\nqc?o? ஀A(b@ߵQL3EWr+@/N/ %$/I/S2/o$F(a}~/e-U7+65?-???Q?Udl??>2G+Q LкtU}yZQ !^bBTbCTYTQ&8֥CUɑk^Y{S\l`zge_iY_iT[l3RVȄ2phqa̿O, b???O_?$O8 (NM֦{Y@ѮPKSin U1Sa+cZijc5u+eRvo#ugfiv%cv1o? Q5G U/ASe0hzax2Sb̏ލf 5SeS/ASeeSǟٟ%S"$6HrT&ϊf^ϊf ߰{\@ `k0Tүh46#%73QaTlQH÷FelQt[( k5Op]#)8ugfiZǝ 7qs q̚@@a@@P!M@"?չ nu7p`u3P`6@ [xo.a عj7+ ubufRu@`ue&Rzp]ѳ^ɲ) 1X7u.q=I`xGMЛ3qw%aa tYNRzs] .a wc?Q?$b9 ?Qc mcxt| *288?@H0 c&PZuAanptojno""`鱾b+2``b-r{R99G/R@@`&R3R2$-B3:7r4G 5T6a Z§O%ea:Z]aTSW&{/8VB;9*PE6P0P-IF<58c1P-p0A@CVD:4H2}Q eoY_x=7l_#_(a___o/>o?WE94P?-DT! 7bW7u9?YNeÕ[6?ȫ6??Y\?n<?\fOXВb!lnY eolpb3Q+rʸY Aq3 ϿAd\>_A x+Qu7U@U?_b_f7UIS?:ܿA6IU^(_q7UUUn \(o_t^6IU"N?^Z ?AUhNEOnhF4IUN4oFmho{ooO?a?s????"46Ho|._#o_Goo}ooofxϜҏ䏩,>Pbx֟@#Ǿi~%X؏ꏦѶӯ l>BN7(J8J\n|>N@@\a+@eL[ؿؼ6up`u:lV@?M_󯐃ϕ)_$6HZl~ߐߢߴӘQќSyn"yyϋ.Ϲ\ndT_x_=Oas'9K]o{9908R3261J32C0 64&?"4FXj|0?)M'-DT! /o0//f.!Hᡨ ./@//S?B?j_|____?G6Qx/ON,E%./WNqFr?/N6,E6K>OM? _9I/_A_/"/4/F/__ooOoo=oOaoOOOE_o_,_>_9K]oFX`N-`?;oMoui a)􏿏d8Jd~:SDp}t:QEͣU`՟ /ށ@@43O8bt݆hfo@܇F=Ocyӿ -?QXem9C (DV &8J\n߀ߒ?O"4FXj| {QAGD7BN4_4-5VF7TCCmAp1z'K@d~.}?贁N}>s>DEOO+O=OOO]/o/AΉ((?/% \9/z%Z 8:?%n?=^4/?S)??\OnOp__@?Oh_O9?O]????X_???__-___ ooooTofoxooo _ooOOky\__nvt%Xo< o1CUI+C~r̎]5̙ok-툀h_s"4hFXzvgo@ywWyo-ϟc(:L^pʯܯpP#\=şh󟖿vÿտ //Sϲ/wωϛϭϿ+=O@as߅ߗߩ߻{U98F7- n33BH80EC7 F1A cJ\n0"Fj?kCζ+;O4X|⯀///// A?a|,%Kb~hAʱHH /-9/K/.i/{/&8J\n/ ? OOS?Ow??//?T/f/x/=OOOʏsOOOOO__O__'_9_?wg_y_u??_:Nco.o_VtrOO@o~VaoOUU U UU!U%+,-U./01U2345U6789U:=>?U@ABCDEFt4.!@& X_C-x/ A@Ry9CRH<(ESy R\Py<xkD?dRy|.TDDSy.TU1( UO"D&aUAUNj )h"T} U+U- |ɉB&Q- -H*=(XiwEQ//,/feArial UncodeMiS6?/?`4 R$fSymbol$67fWingds*7 fEArial"z@D/ R$fSwimunS$fGPMingLU{a (  R$fGMS PGothic{a (  R$fGDotum|"{a (  R$fESylaen  $fEstrangeloU dsa@`9$fGVrinda{ (  R$fEShrut1i$&<fEM_angl$$%>fETungaH"@&>fGSendya{ (  R$fERavi"&5<fGDhenu|"{a (  R$fELath#&<fEGautmi &<fGCordia New{ (  R$fGMS Farsi{{ ( _ R$fGulim"{a (  R$fETimes NwRoanz@D$L`EB\`A.Bl`o%B|`5B`2B`=B`8CB`{9B̷`7Bܶ`=B`(8Bz`'Bz"Bz#Bz9B z"Bz'9B,z`&BBlz.8B|zfGBGuideTheDocPage-1"Gestur Fom aRow_1DBNotainRow_2DBCrowwsFotRow_3,DBHideiscrmnat o Row_4(DBHideAnotua insEntiyDBHidePKRowuDBwepndtDBHide_Typs"visDecrpton,DBHideVrtc_alLnsvisVerion.DBHideorzntalL nCo}nectrWhite 5ln"Arial centrd"Arial topl7efArial top"Times ]cntrdTimes top"Times _topl f TrackingTextDBShapeTy"DBEventTri5g eOldPinXOldPinYManulEditsHideSetPinXSetPinY"DBShapeVrsionRefShetIDRefContrl(Dynamic CoWnetr,DBHide}Rlatonsh p2DBHide}RlatonshpTx RefShapGuidIdentiwfyng DBCardinltyCrowsFotOptionalOvwerid*OvweridDBNoWta i nEndAgle(OvweridCowsF otDX1DY1DX3DY3BeginA7gl RITextefGuid$CardTextRfGui"Datbse Modl,Viso ExtenuddDaaWidthHeightSubhapeIDPKCount0DBHide_PKSprato 5Ln"DBHideTabLn1"DBHideTabLn2LineCoutLineHightRelationshp6GG3ykE3LykG3|ykG3v%G3ykE3yG3yk E3$yG3 zk;E3d0oK/G3o.G3E3G3dyG3G3P 2G3|=%G3b%G3yG3yG3"G  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFU&Ut4 !@& Mu_C-_\{k=7A%t4 x_ A-D@7AJ@XyKCR@Xy6RH<(H<(JEdYy REYy  R{N  g"4FxX,h(Hzr@(DO&E#sPv8QTF/s  s`.lswT}s ֔vsҨwV$Oy}$!})4PydG'h1SyYdUyO2Vy:?Wy*ODXyH%a!pegasus-wms_4.0.1+dfsg/contrib/README0000755000175000017500000000126011757531137016356 0ustar ryngeryngegraph-build: builds a graph image of an ant build file. exitcode: This contains two scripts, "fixdag" to insert post-processing scripts into a gencdag-generated DAG, and "parse-exitcode" to process the kickstart results to determine job failure. This is an old version! showlog: help display results from a DAG run summary: summarizes a Condor DAGMan log file (or multiple ones), and print some elementary statistics about job and site performance. gstar: A set of 'UNIX' like utilities for the Grid/Grid3. qq: A simple script to give a more comprehensive output from condor_q. This one is useful if you have grid jobs, but does not hurt with plain Condor jobs, either. pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/0000755000175000017500000000000011757531666021074 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/README0000755000175000017500000000466411757531137021762 0ustar ryngeryngeIntroduction ------------ Pegasus provides a two command line utilities pegasus-create-workflow-page and pegasus-create-workflow-type-page for generating workflow gallery. Prerequisite ------------ Utility has a dependency on the pegasus-config file in the pegasus.home/bin directory. So pegasus-create-workflow-page and pegasus-create-workflow-type-page should be copied to bin directory for running the the utility.The tool also copies the protovis javascript file from pegasus.home/lib/javascript , php files from pegasus.home/lib/javascript and image and cssfiles from /share/pegasus/plots/ directory. Functionality ------------- pegasus-create-workflow-page - Parses the given directory for workflow tar files and generates a run directory corresponding to each workflow run which contains workflow page which contains statistics information and charts to visualize the run. It also generates a workflow_info.txt file which is used by the pegasus-create-workflow-type-page. If output option is given the files are generated to a gallery folder inside the given directory where tar files are copied. pegasus-create-workflow-type-page - Parses the run directory created by the pegasus-create-workflow-page and generates an index file that is used to link all the workflow runs of a given type. pegasus-create-workflow-type-page looks for space separated property file 'workflow_type.txt'set the name of the workflow type, type description and workflow type image. A sample format is given below. name Broadband image broadband.jpg desc info.txt Gallery Directory Structure --------------------------- pegasus-create-workflow-type-page generates an index.php file ,images and css directories as output. These pages should be placed as parent directory while the gallery is setup. Similarly , it has links to index.php and help.php which are the workflow gallery home directory and gallery information links that need to placed two levels higher than the directory where the output files are copied. The workflow gallery structure should be like this workflow_gallery [root directory] index.php help.php css/ gallery/ broadband/ broadband.jpg gallery_header.php gallery_footer.php index.php info.txt workflow_type.txt css/ images/ run_1/ run_2/ cybershake/ cybershake.jpg gallery_header.php gallery_footer.php index.php info.txt workflow_type.txt css/ images/ run_1/ run_2/ images/ pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/pegasus-create-workflow-type-page0000755000175000017500000002364411757531137027475 0ustar ryngerynge#!/usr/bin/env python import os import re import sys import logging import optparse import math import tempfile import commands import shutil import tarfile import subprocess # Initialize logging object logger = logging.getLogger() # Set default level to INFO logger.setLevel(logging.INFO) # use pegasus-config to get basic pegasus settings bin_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --python-dump" config = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] exec config # Insert this directory in our search path os.sys.path.insert(0, pegasus_python_dir) os.sys.path.insert(0, pegasus_python_externals_dir) import Pegasus.common from Pegasus.plots_stats import utils as plot_stats_utils from Pegasus.tools import utils from netlogger.analysis.workflow.stampede_statistics import StampedeStatistics from datetime import timedelta #regular expressions re_parse_property = re.compile(r'([^:= \t]+)\s*[:=]?\s*(.*)') #Global variables---- brainbase ='braindump.txt' dagman_extension = ".dagman.out" prog_base = os.path.split(sys.argv[0])[1] # Name of this program def setup_logger(level_str): level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) if level_str == "info": logger.setLevel(logging.INFO) return def parse_property_file(file_name, separator=" "): """ Reads a property file Param: file name Returns: Dictionary with the configuration, empty if error """ my_config = {} try: my_file = open(file_name, 'r') except: # Error opening file return my_config for line in my_file: # Remove \r and/or \n from the end of the line line = line.rstrip("\r\n") # Split the line into a key and a value k, v = line.split(separator, 1) v = v.strip() my_config[k] = v my_file.close() logger.debug("# parsed " + file_name) return my_config def listFiles(basedir): subdirlist = [] for item in os.listdir(basedir): if os.path.islink(os.path.join(basedir, item)): continue if os.path.isfile(os.path.join(basedir, item)): continue else: subdirlist.append(os.path.join(basedir, item)) return subdirlist def create_header(workflow_type): header_str = """
""" return header_str def create_footer(): footer_str = """ """ return footer_str def print_workflow_type_details(type_dir): html_content ="" props ={} workflow_type_file_name = "workflow_type.txt" if os.path.exists(os.path.join(type_dir,workflow_type_file_name)): if os.path.exists(os.path.join(type_dir,workflow_type_file_name)): props = parse_property_file(os.path.join(type_dir,workflow_type_file_name)," ") if props.has_key('name'): html_content += "

" + props['name'] + "

" if props.has_key('desc'): html_content += "

" + read_file(os.path.join(type_dir,props['desc'])) + "

" if props.has_key('image'): html_content += "" return html_content def setup(output_dir): dest_img_path = os.path.join(output_dir, "images/") utils.create_directory(dest_img_path) src_img_path = os.path.join(pegasus_share_dir , "plots/images/common/download.jpg") shutil.copy(src_img_path, dest_img_path) dest_css_path = os.path.join(output_dir, "css/") utils.create_directory(dest_css_path) src_css_path =os.path.join(pegasus_share_dir , "plots/css/default.css") shutil.copy(src_css_path, dest_css_path) plot_stats_utils.copy_files(pegasus_php_dir, output_dir ) return def create_workflow_type_page(type_dir , output_dir , log_level): setup(output_dir) workflow_dirs = listFiles(type_dir) file_name = os.path.join(output_dir, "index.php") html_content = create_header("") html_content += "
\n\
\n\ Home
\n\ Gallery Info\n\
\n\
\n\
\n\
\n" html_content += print_workflow_type_details(type_dir) workflow_info_file_name = "workflow_info.txt" workflow_run_count = 0 for workflow_dir in workflow_dirs: if os.path.exists(os.path.join(workflow_dir,workflow_info_file_name)): props = parse_property_file(os.path.join(workflow_dir,workflow_info_file_name) ,":") workflow_run_count +=1 html_content += "

Run "+ str(workflow_run_count) +"

" workflow_info = '' workflow_info += "
\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" if int(props['total_sub_wfs']) > 0 : workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="\n" workflow_info +="" workflow_info +="
Workflow runtime                   :
" + str(props['workflow_runtime']) + "
Cumulative workflow runtime        :
" + str(props['cumulative_workflow_runtime_dagman'])+ "
Total tasks                        :
" + str(props['total_tasks'])+ "
# tasks succeeded                  :
" + str(props['total_succeeded_tasks'])+ "
# tasks failed                     :
" + str(props['total_failed_tasks'])+ "
# tasks incomplete                 :
" + str(props['total_unsubmitted_tasks'])+ "
Total jobs                         :
" + str(props['total_jobs'])+ "
# jobs succeeded                   :
" + str(props['total_succeeded_jobs'])+ "
# jobs failed                      :
" + str(props['total_failed_jobs'])+ "
# jobs incomplete                  :
" + str(props['total_unsubmitted_jobs'])+ "
Total sub workflows                :
" + str(props['total_sub_wfs'])+ "
# sub workflows succeeded          :
" + str(props['total_succeeded_sub_wfs'])+ "
# sub workflows failed             :
" + str(props['total_failed_sub_wfs'])+ "
# sub workflows incomplete         :
" + str(props['total_unsubmitted_sub_wfs'])+ "
" html_content +=workflow_info html_content += "Download tar : Download" html_content += "
\n" html_content += "
\n" html_content += create_footer() write_to_file(file_name, html_content) def write_to_file(file_name , content): try: fh = open(file_name, "w") fh.write(content) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def read_file(file_name): content ='' try: my_file = open(file_name, 'r') except: # Error opening file return content for line in my_file: content += line return content # ---------main---------------------------------------------------------------------------- def main(): # Configure command line option parser prog_usage = prog_base +" [options] WORKFLOW TYPE DIRECTORY" parser = optparse.OptionParser(usage=prog_usage) parser.add_option("-o", "--output", action = "store", dest = "output_dir", help = "writes the output to given directory.") parser.add_option("-l", "--loglevel", action = "store", dest = "log_level", help = "Log level. Valid levels are: debug,info,warning,error, Default is warning.") # Parse command line options (options, args) = parser.parse_args() logger.info(prog_base +" : initializing...") if len(args) < 1: parser.error("Please specify directory to look for workflow pages that are created by pegasus-create-workflow-page.") sys.exit(1) if len(args) > 1: parser.error("Invalid argument") sys.exit(1) type_dir = os.path.abspath(args[0]) # Copy options from the command line parser if options.output_dir is not None: output_dir = options.output_dir if not os.path.isdir(output_dir): logger.warning("Output directory doesn't exists. Creating directory... ") try: os.mkdir(output_dir) except: logger.error("Unable to create output directory."+output_dir) sys.exit(1) else: output_dir = type_dir if options.log_level == None: options.log_level = "warning" setup_logger(options.log_level) create_workflow_type_page(type_dir,output_dir , options.log_level) sys.exit(0) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/index.php0000755000175000017500000001141111757531137022706 0ustar ryngerynge

Workflow Gallery

Workflow type Structure

CyberShake

The CyberShake workflow is used
by the Southern Calfornia Earthquake
Center to characterize
earthquake hazards in a region.
					

Galactic

The Galactic Plane workflow will use the 
Montage image mosaic engine to transform 
all the images in 17 sky surveys to a 
common pixel scale of 1 second or arc, 
where all the pixels are co-registered 
on the sky and represented in Galactic 
coordinates and the Cartesian projection. 
					

Periodogram

NASA’s Infared Processing and Analysis 
Center (IPAC) use workflow technologies to 
process the large amount of data produced 
by the Kepler mission. IPAC has developed
a set of analysis codes to compute periodograms
from light curves. These periodograms reveal 
periodic signals in the light curves that arise 
from transiting planets and stellar variability.
					

Sipht

The SIPHT workflow, from the
bioinformatics project at Harvard,
is used to automate the search for
untranslated RNAs (sRNAs) for bacterial
replicons in the NCBI database.  
					

Broadband

Broadband platform enables researchers to combine
long period (<1.0Hz) deterministic seismograms 
with high frequency (~10Hz) stochastic seismograms.
					

Epigenomics

The epigenomics workflow created
by the USC Epigenome Center
and the Pegasus Team is used to
automate various operations
in genome sequence processing.
					

LIGO

LIGO workflow is used to generate and
analyze gravitational waveforms
from data collected during the
coalescing of compact binary systems. 
					

Montage

The Montage application created
by NASA/IPAC stitches together multiple
input images to create
custom mosaics of the sky.  
					

Proteomics

Scientists at OSU use Pegasus for 
mass-spectrometry-based proteomics. 
Proteomics workflows have been 
executed on local clusters and cloud resources. 
					

pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/pegasus-create-workflow-page0000755000175000017500000005151211757531137026511 0ustar ryngerynge#!/usr/bin/env python import os import re import sys import logging import optparse import math import tempfile import commands import shutil import tarfile import subprocess from datetime import timedelta # Initialize logging object logger = logging.getLogger() # Set default level to INFO logger.setLevel(logging.INFO) # use pegasus-config to get basic pegasus settings bin_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --python-dump" config = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] exec config # Insert this directory in our search path os.sys.path.insert(0, pegasus_python_dir) os.sys.path.insert(0, pegasus_python_externals_dir) import Pegasus.common from Pegasus.tools import utils from Pegasus.plots_stats import utils as plot_stats_utils from Pegasus.plots_stats.plots import populate from Pegasus.plots_stats.plots import workflow_info from Pegasus.plots_stats.plots import pegasus_time from Pegasus.plots_stats.plots import pegasus_gantt from Pegasus.plots_stats.plots import pegasus_host_over_time from Pegasus.plots_stats.plots import pegasus_breakdown from Pegasus.plots_stats.stats import workflow_stats from netlogger.analysis.workflow.stampede_statistics import StampedeStatistics #regular expressions re_parse_property = re.compile(r'([^:= \t]+)\s*[:=]?\s*(.*)') #Global variables---- submit_dir = None prefix ="" no_dax = 0 no_dag = 0 monitord = True MAX_GRAPH_LIMIT = 100 DEFAULT_OUTPUT_DIR = "gallery" brainbase ='braindump.txt' dagman_extension = ".dagman.out" prog_base = os.path.split(sys.argv[0])[1] # Name of this program pegasus_env_path ={ 'pegasus_bin_dir' : pegasus_bin_dir, 'pegasus_conf_dir' : pegasus_conf_dir, 'pegasus_java_dir' : pegasus_java_dir, 'pegasus_perl_dir' : pegasus_perl_dir, 'pegasus_python_dir' : pegasus_python_dir, 'pegasus_php_dir' : pegasus_php_dir, 'pegasus_javascript_dir': pegasus_javascript_dir, 'pegasus_share_dir' : pegasus_share_dir } def setup_logger(level_str): level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) if level_str == "info": logger.setLevel(logging.INFO) populate.setup_logger(level_str) return def run_pegasus_monitord(dagman_out_file): monitord_path = os.path.join(pegasus_bin_dir, "pegasus-monitord") monitord_cmd = monitord_path monitord_cmd += " -r "+ dagman_out_file logger.info("Executing command :\n" + monitord_cmd) status, output = commands.getstatusoutput(monitord_cmd) logger.info("Pegasus monitord log. " + output) if status == 0: logger.info("Finished executing command." ) return 0 else: logger.warn("Failed to run pegasus-monitord on workflow") logger.debug("%s: %d:%s" % (monitord_cmd, status, output)) return None def listFiles(dir): basedir = dir subdirlist = [] for file in os.listdir(dir): if os.path.isfile(os.path.join(basedir, file)): if file == "braindump.txt": global submit_dir submit_dir = basedir return if os.path.islink(os.path.join(basedir, file)): continue if os.path.isdir(os.path.join(basedir, file)): listFiles(os.path.join(basedir, file)) def untar_workflow(tar_file , output_dir): tar = tarfile.open(tar_file) tar.extractall(output_dir) tar.close() return def delete_directory(dir_path): """ Deletes a directory @param dir_path directory path @return Returns dir_path if deletion succeeds , None otherwise """ try: logger.warning("Deleting directory. Deleting... " + dir_path) shutil.rmtree(dir_path) except: logger.error("Unable to remove directory." + dir_path) return None return dir_path def create_header(workflow_info): header_str = """ """ header_str +="""
""" header_str += """ Home """ return header_str def create_toc(workflow_info , isRoot = False): content = """
Table of contents
Workflow environment details
Workflow execution details
Job statistics
Invocation statistics
DAX graph
DAG graph
""" if isRoot: content += """ Invocation breakdown chart(Across workflow)
Time chart(Across workflows)
""" else: content += """ Invocation breakdown chart(Per workflow)
""" content += """ Workflow execution gantt chart(Per workflow)
Host over time chart(Per workflow)
""" if len(workflow_info.sub_wf_id_uuids) >0: content += """ Sub workflows
""" return content def generate_dag_graph(wf_info, output_dir): logger.info("Generating dag graph for workflow " + wf_info.wf_uuid) dag_file_path = wf_info.dag_file_path if dag_file_path is not None: dag2dot_file_path = os.path.join(pegasus_share_dir, "visualize/dag2dot") dot_file_path = os.path.join(output_dir, wf_info.wf_uuid+".dot") dag_cmd = dag2dot_file_path dag_cmd +=" --output "+ dot_file_path dag_cmd += " "+ dag_file_path logger.info("Executing command :\n" + dag_cmd) status, output = commands.getstatusoutput(dag_cmd) if status == 0: logger.info("Finished executing command." ) else: logger.warn("Failed to generate dag graph for workflow "+ wf_info.wf_uuid) logger.debug("%s: %d:%s" % (dag_cmd, status, output)) return None png_file_path = os.path.join(output_dir, wf_info.wf_uuid+".png") dot_png_cmd = utils.find_exec("dot") if dot_png_cmd is None: logger.warn("dot is not present . Unable to create chart in png format. ") return dot_png_cmd +=" -Tpng -o" + png_file_path dot_png_cmd += " "+ dot_file_path logger.info("Executing command :\n" + dot_png_cmd) status, output = commands.getstatusoutput(dot_png_cmd) if status == 0: logger.info("Finished executing command." ) return status else: logger.warn("%s: %d:%s" % (dot_png_cmd, status, output)) else: logger.warn("Unable to find the dag file for workflow " + wf_info.wf_uuid) return None def generate_dax_graph(wf_info, output_dir): logger.info("Generating dax graph for workflow " + wf_info.wf_uuid) dax_file_path = wf_info.dax_file_path if dax_file_path is not None: dax2dot_file_path = os.path.join(pegasus_share_dir, "visualize/dax2dot") dot_file_path = os.path.join(output_dir, wf_info.wf_uuid + ".dot") dax_cmd = dax2dot_file_path dax_cmd +=" --output "+ dot_file_path dax_cmd += " "+ dax_file_path logger.info("Executing command :\n" + dax_cmd) status, output = commands.getstatusoutput(dax_cmd) if status == 0: logger.info("Finished executing command." ) else: logger.warn("Failed to generate dax graph for workflow "+ wf_info.wf_uuid) logger.debug("%s: %d:%s" % (dax_cmd, status, output)) return None # Find dot command dot_png_cmd = utils.find_exec("dot") if dot_png_cmd is None: logger.warn("dot is not present . Unable to create chart in png format. ") png_file_path = os.path.join(output_dir, wf_info.wf_uuid +".png") dot_png_cmd +=" -Tpng -o" + png_file_path dot_png_cmd += " "+ dot_file_path logger.info("Executing command :\n" + dot_png_cmd) status, output = commands.getstatusoutput(dot_png_cmd) if status == 0: logger.info("Finished executing command." ) return status else: logger.warn("Failed to generate dax graph in png format for workflow " + wf_info.wf_uuid) logger.debug("%s: %d:%s" % (dot_png_cmd, status, output)) else: logger.warn("Unable to find the dax file for workflow " + wf_info.wf_uuid) return None def create_footer(): footer_str = """ """ return footer_str def setup_run_dir(output_dir): dest_img_path = os.path.join(output_dir, "images/") utils.create_directory(dest_img_path) src_img_path = os.path.join(pegasus_share_dir, "plots/images/common/not_available.jpg") shutil.copy(src_img_path, dest_img_path) src_img_path = os.path.join(pegasus_share_dir, "plots/images/common/download.jpg") shutil.copy(src_img_path, dest_img_path) dest_css_path = os.path.join(output_dir, "css/") utils.create_directory(dest_css_path) src_css_path =os.path.join(pegasus_share_dir, "plots/css/default.css") shutil.copy(src_css_path, dest_css_path) def setup(output_dir): plot_stats_utils.copy_files(pegasus_php_dir, output_dir ) def create_workflow_page(tar_file_name , output_dir , log_level): setup_run_dir(output_dir) extract_output_dir = os.path.join(output_dir,"temp") logger.debug("Extracting the tar file to "+ extract_output_dir) untar_workflow(os.path.join(output_dir,tar_file_name), extract_output_dir) listFiles(extract_output_dir) if submit_dir is None: logger.warning("Unable to find the submit dir ") sys.exit(1) config = utils.slurp_braindb(submit_dir) braindb = os.path.join(submit_dir, brainbase) if not config: logger.warning("Unable to parse braindump.txt " + submit_dir) delete_directory(extract_output_dir) sys.exit(1) dag_name = None if (config.has_key('dag')): dag_name = config['dag'] else: logger.warning("Unable to find the dag name in the braindump.txt " ) delete_directory(extract_output_dir) sys.exit(1) dagman_out_file = os.path.join(submit_dir, dag_name) + dagman_extension if monitord: if run_pegasus_monitord(dagman_out_file) is None: logger.warning("Failed to execute monitord on the workflow") delete_directory(extract_output_dir) sys.exit(1) else: logger.info("Skipping pegasus monitord") populate.setup(submit_dir , None) dag_graph_output_dir = os.path.join(output_dir,"dag_graph") dax_graph_output_dir = os.path.join(output_dir,"dax_graph") utils.create_directory(dag_graph_output_dir) utils.create_directory(dax_graph_output_dir) pegasus_gantt.setup(submit_dir,output_dir ,pegasus_env_path ,log_level) pegasus_host_over_time.setup(submit_dir,output_dir ,pegasus_env_path , log_level) pegasus_breakdown.setup(submit_dir, output_dir, pegasus_env_path, log_level) pegasus_time.setup(submit_dir,output_dir ,pegasus_env_path ,log_level) top_level_wf_uuid = None workflow_run_time = 0 workflow_cpu_time = 0 total_jobs = 0 succeeded_jobs =0 failed_jobs = 0 unsubmitted_jobs =0 unknown_jobs =0 total_succeeded_tasks =0 total_failed_tasks =0 wf_uuid_list = populate.get_workflows_uuid() isRootWF = True for wf_uuid in wf_uuid_list: logger.debug("Populating the workflow information... "+ wf_uuid) st_stats,wf_info = populate.populate_chart(wf_uuid) populate.populate_job_instance_details(st_stats, wf_info) populate.populate_job_details(st_stats, wf_info) populate.populate_task_details(st_stats, wf_info) populate.populate_time_details(st_stats ,wf_info) title = str(wf_uuid) + " (" + str(wf_info.dax_label) +")" if wf_info.parent_wf_uuid is None: top_level_wf_uuid = wf_uuid html_content = create_header(wf_info) html_content +="""
""" html_content +="""
""" html_content += create_toc(wf_info , isRootWF ) html_content +="""
Workflow environment details ( Download tar: Download)
""" html_content += plot_stats_utils.print_property_table(wf_info.wf_env , False ,":") html_content += """
Workflow execution details
""" html_content += workflow_stats.print_individual_workflow_stats(st_stats , title ) html_content +="""
Job statistics
""" html_content += workflow_stats.print_individual_wf_job_stats(st_stats , title ) html_content +="""
Invocation statistics
""" html_content += workflow_stats.print_wf_transformation_stats(st_stats , title ) html_content += """
DAX graph
""" # dax also compares against the total non sub workflow jobs instead of tasks. No task information available if no_dax or wf_info.total_tasks > MAX_GRAPH_LIMIT: html_content +="
\n" else: if generate_dax_graph(wf_info,dax_graph_output_dir) is None: html_content +="
\n" else: image = "dax_graph/" + wf_info.wf_uuid+".png" html_content +="
\n" html_content += """
DAG graph
""" if no_dag or wf_info.total_jobs > MAX_GRAPH_LIMIT: html_content += "
\n" else: if generate_dag_graph(wf_info,dag_graph_output_dir) is None: html_content += "
\n" else: image = "dag_graph/" +wf_info.wf_uuid+".png" html_content +="
\n" if top_level_wf_uuid == wf_uuid: top_level_stats ,top_level_info = populate.populate_chart(top_level_wf_uuid , True) populate.populate_transformation_details(top_level_stats, top_level_info) logger.debug("Generating the invocation breakdown chart... ") html_content +="""
Invocation breakdown chart
""" html_content += pegasus_breakdown.create_breakdown_plot(top_level_info , output_dir) populate.populate_time_details(top_level_stats ,top_level_info) logger.debug("Generating the time chart... ") html_content +="""
Time chart
""" html_content += pegasus_time.create_time_plot(top_level_info , output_dir) else: populate.populate_transformation_details(st_stats, wf_info) logger.debug("Generating the invocation breakdown chart... ") html_content +="""
Invocation breakdown chart
""" html_content += pegasus_breakdown.create_breakdown_plot(wf_info , output_dir) logger.debug("Generating the workflow execution gantt chart... ") html_content +="""
Workflow execution gantt chart
""" html_content += pegasus_gantt.create_gantt_plot(wf_info , output_dir ,"php") logger.debug("Generating the host over time chart... ") html_content +="""
Host over time chart
""" html_content += pegasus_host_over_time.create_host_plot(wf_info , output_dir ,"php") if len(wf_info.sub_wf_id_uuids) >0: html_content += """
Sub workflows
""" html_content += plot_stats_utils.print_sub_wf_links(wf_info.sub_wf_id_uuids ,"php") html_content += """
""" html_content += create_footer() file_name = os.path.join(output_dir,wf_info.wf_uuid +".php") write_to_file(file_name, html_content) st_stats.close() isRootWF = False workflow_content = "tar_file: " + tar_file_name workflow_content += "\nwf_uuid: " + str(top_level_wf_uuid) +"\n" root_st_stats ,root_wf_info = populate.populate_chart(wf_uuid_list[0] , True) workflow_content +=workflow_stats.print_workflow_summary(root_st_stats) root_st_stats.close() file_name = os.path.join(output_dir,"workflow_info.txt") write_to_file(file_name, workflow_content) delete_directory(extract_output_dir) def write_to_file(file_name , content): try: fh = open(file_name, "w") fh.write(content) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def get_next_file_name(dir_path, base): """ Utility method to return the next directory path name @param directory path @param base the count to start looking for directory path """ while base < sys.maxint: dest_dir= dir_path + str(base) base +=1 if not os.path.isdir(dest_dir): return dest_dir, base raise OverflowError("Directory path out of range.") # ---------main---------------------------------------------------------------------------- def main(): # Configure command line option parser prog_usage = prog_base +" [options] TAR DIRECTORY" parser = optparse.OptionParser(usage=prog_usage) parser.add_option("-o", "--output", action = "store", dest = "output_dir", help = "writes the output to given directory.") parser.add_option("-p", "--prefix", action = "store", dest = "prefix", help = "Adds prefix to the workflow page directory.") parser.add_option("-s", "--skip-monitord", action = "store_const", const = 1, dest = "no_monitord", help = "if set pegasus monitord won't be run on the workflow.") parser.add_option("-d", "--nodag", action = "store_const", const = 1, dest = "no_dag", help = "if set dag chart would not be created") parser.add_option("-D", "--nodax", action = "store_const", const = 1, dest = "no_dax", help = "if set dax chart would not be created") parser.add_option("-l", "--loglevel", action = "store", dest = "log_level", help = "Log level. Valid levels are: debug,info,warning,error, Default is warning.") # Parse command line options (options, args) = parser.parse_args() logger.info(prog_base +" : initializing...") if len(args) < 1: parser.error("Please specify directory to look for workflow tar files.") sys.exit(1) if len(args) > 1: parser.error("Invalid argument") sys.exit(1) tar_dir = os.path.abspath(args[0]) # Copy options from the command line parser if options.log_level == None: options.log_level = "info" global prefix global no_dag global no_dax global monitord if options.prefix is not None: prefix = options.prefix if options.no_monitord is not None: monitord = False if options.no_dax is not None: no_dax = options.no_dax if options.no_dag is not None: no_dag = options.no_dag setup_logger(options.log_level) if options.output_dir is not None: output_dir = options.output_dir utils.create_directory(output_dir) else : output_dir = os.path.join(tar_dir, DEFAULT_OUTPUT_DIR) utils.create_directory(output_dir) tarCount = 0 base = 1 logger.info( "PEGASUS SHARE DIR is %s " %(pegasus_share_dir)) logger.info( "PEGASUS PHP DIR is %s " %(pegasus_php_dir)) logger.info( "PEGASUS PYTHON LIB DIR is %s " %(pegasus_python_dir)) logger.info( "PEGASUS PYTHON EXTERNALS LIB DIR is %s" %(pegasus_python_externals_dir)) setup(output_dir) for tar_file_name in os.listdir(tar_dir): if os.path.isfile(os.path.join(tar_dir, tar_file_name)): if tarfile.is_tarfile(os.path.join(tar_dir,tar_file_name)): tarCount =tarCount+1 run_dir, base = get_next_file_name(os.path.join(output_dir , prefix + "run_" ), base) utils.create_directory(run_dir) shutil.copy(os.path.join(tar_dir,tar_file_name), run_dir) create_workflow_page(tar_file_name,run_dir , options.log_level) else: logger.debug("Skipping ..." + tar_file_name) print "Successfully generated "+ str(tarCount) + " workflow pages" sys.exit(0) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/0000755000175000017500000000000011757531666022341 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/broadband.jpg0000644000175000017500000112173011757531137024755 0ustar ryngeryngeJFIFHHICC_PROFILEapplmntrRGB XYZ  -acspAPPL-applrXYZ,gXYZ@bXYZTwtpthchad|,rTRCgTRCbTRCvcgt0ndin8desc@ddscmmmod(cprt$XYZ xl?7XYZ W:XYZ &XYZ sf32 W)curvcurvcurvvcgttttndin0WJ&[P@T@333333desc Cinema HDmluc nbNOptPTsvSEfiFIdaDKzhCNfrFRjaJPenUSplPLptBResESzhTWruRUkoKRdeDEnlNLitITCinema HDmmod!Z+textCopyright Apple, Inc., 2009ExifMM*V^(ifHH4C     C  4" }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?(((((((((((((((((((((((((((((((((((((((>,𯁼 }͖϶ޣX̚VTM$eW?Q<fUIR|rlXr?~kc[=/כQ^G'zO5G-?ydG: c,|Ç?e9op?^mE?NXr?~kc[=/כQGG!ӯ??8z_XKQzO5G-Gyywz.^4{Yr2-p+Š(((((!?xgxgHjYhV?uy4= pYTa 9S~"*K c,|Ç?e9op?^mEtdpzO5G-?ydG: c,|Ç?e9op?^mE?NXr?~kc[=/כW??N7JYM|/4}(cB((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((g!K4bg!K4җƽL—<_ ,`KM(JyΑ[Y*24wNӵ/55 i&8td:9x|hwwqa*:H@9.]xSǿ }KL%K%ܶB5]BZя{YȡX5xĺ xڔ>g>\g__oFI@񾡭[ƺޟs  CrӴ|}os?]?þ![q\kcf !HӒ@%*7~'SEU{uۡ$GȵM _G5] K vs/ 4~a+? xZ^W4Jik{qclGxb0T;Fy#,|}{^⿋x|o꺕Z;!d- 4Lf 3|q t_5曮]<|e-󇸎YYs彻_>E^xM/Kj-ݞ>oxR$Ҽ5\j$~bA*$۴$yjlVo Iw_u8M5On$u&XYKH!=wYᯇ͟5O?H0y3JD0B6Lh\ rإ)[F޾247\Wﭽy_};}q¾n  N*FE=? ;bBs3)f*r{߆Iu#NaQ IJ#Qu3Ȯ?7x+o&#XHaKŔ0"cyyLFAɫ6k |ֵ+_'kŬn @4LЭg^uR/[S}5(E>ەom&Zn'_7|@w?$_u S?h2KW~ݴg_? j>&W: 闒ٻ4,C-MRbZ€>~Y¿P*i%?ވ74}?_È[gj$GM$*M hs߈J)OW˜xF~CMGmj.pk9 7 CK}k =ƽ ^GmS\]$p0@Wn]y9_8{^*qRWmWR7dVėR)O,k#ßg_߁%z'c2ZX bPp1ltoӢRzo   tzT$68q ]FnWhka+Lʍj} i~MxM6ݤhiL$i,y?3n$iA#DEQ)~N>k{1PzOg_o+_K x>{ǺѮ%1=;4ƅ"3y|rCPukN.0IqUJIZחv7Qk+?ۉC߳<ݟ6sZ^=i(g7Y|;^ټOˍ'[ Id;v|A<|>>ֿjsUauGLEd?gk{H`}eGتqTx9VT-M]noNu^~Wkr:ߊvφƿ z緂8 ) L̠Nʸ]y?hJ_|9̚;k^a=\h vcT#ay?`t;jռH=]m gF:]1HMA>1^t?7~/u18l!HͶo w0]o&ȐA^G.gݺzsn۫m;PV'mVKǾx3➉⏁X-KOKY"*2XRYe{ Vmcv{D/-'KiI&EʶϜO|񇂿dmCᕅۨ-v[MJ[h2Al7[UYdIeu+!xo/l}CZ Zs]J -y0XS)ͥ?WR=:^]ɪ t V?tF M.-Y|+g;ư4o)F+.Cx jˣhdlo ڝܗмۋ3"3f 6w}-W×2.Ǎu]Am"HV#[!`W1Z[w-aό<;q.r,>ϥ^inɾ"VG_TgugJg[ߛO%h]j8U$u\C5?Cྒྷ_񢽆msqk^\JΩr PbTd P||l5&ƭ_J4Z͝ջxMՇJAA3[|XOxڗgTӞVU8]S$QhQ`w񟄵/js D~5 BQQq(&4a~W& Gv GY5hW>3WN߃tk\}>ow4‰1sweaㆸm/3| _6ҼAuqI3j$E }} DWj?? /^߽EEE_ކo?4;<ag+9GӼ)}oG /oXmQ@= W7I_>>wyQ ?rOռedx[ȳ|4w'o~?~Cڃ(}/x|/"<~5k^kٯY.R /;51xk$. \>0"_ nx?p{ᦽD+3{ wxwudJғEj?9_>~~U֏HjI faUqQ  Cs'?? ~g: n(O7ʟs^s4yY'ܜרizh.ElHWE>\Ǘ3_OE>i|&|]w[AkGNѾyḸݏzś\|\Qo wzoZ?Ϗ5E~šsrC@9 ALCW_{^YiMyk繹c5K35w[|k8kz4qI.m |9g" ?{^)|_T\ u_ڧb&L{&?7? |8~|F;akpž_Wǿ7#x^Hqz?>h~~˖ 🈾x#uO62%Ƌjzj3yp .E?.7O Aosÿ=x M&:o2\? CnmY/<xšִVTқP)RQEfGkP P AIՋ]QSD~\> ?G EC /:E7G EC ? xvLg2& ~f#\@, 32t_v_ySD~\> ??πhoj[lDŽ PDnxGWz|HgMxඹ'W/IFWz]B;mQܼ kL$8k M[poE +_?t]N+>%s99VVCGWj^#Y|p'GO kn`޵`mz50  4nP~־>W? ㏆!QjY>iĂH`'~7ߊo ;~GVBZOdSjpq]u]_? ?m|B^!m^hHŧ}U#l7<ףZӼ;u~tMJ[O[:W?+kߋx% (5Ft>E<W{Exm/0칫بɹյ#ԃ( }yMf5"7o?ogj#NY~߆|^$ik NΩj_kl|En}s^<-HqG_lxMey'o,&H?BsO ~_=sIaC]n#i?3ޫ|q?J|52u{6]5ۍ{g,zǍ?ggQ_|?M>pGBzfA~ڵҾ&]/v-s@X& A&zeM[oG|w5Vg42a+ɂVᙾ|-Igu]F>>ReXO4Kgqŧ6 1ݚ Kǿ>>Ց>BP< ~ }@>1"}뿯]w_Ʋ?9ퟵ6A _Z][ ր>~WKğO} xF_}][NIS6_/k OVn?}__n|%$fx?↫j |;iexo:ƹnj> yi,mfXEbAĪ@"пh%Ծáfa ڥFHبO~<j? ~ ckRhte l'+ $ 8֍WJjktsRt3i|^jO~6j]=yZFf $6S0%ڏ hdu |nvvfEć›F#FI++~XxWštzNdZFI]+#wwffbI&Z/zI/^0,ω--"ׯd,=urWn3V/𵴏h2~*ugi<'i߫ZEw=Y|Dȓ+VӠ+ȖSu% *?v_i}$qplw`YOxF8gÐq9OZG?k|v|17 mVy"9mNK>xNZvk;Vܒi|w=mO^O:z/I}B};#BZ4x1Ư:^feR9W¾ umK!n"b+$~;KXwM̫Աw"v=?ޤdmٯb=. w+LM_nݔ"+]1a'ixkjQ[@6|_;Nz /$z윁V5U/>4:FO+.BA 1Dh>6YF߆'R1s΋U7Om'h߈3-;B#aG~=i~^9~ mOxki2=O_^ n~/_I^(TсUf;HȚ_^]M.q{9!CþTo5,L?zLHWnFT 5|S 7o.o7-[FM;P\i.8/lH _0:oco/oFa|/GS ȷ7۲ r#˒`I^ J.Yn|m18%k.QY7/_3J{˯o)gݼ@$]7ďoݼ7 ?ʼ~gHf34i@geEyߍ< CxOǿ ;ſ:|HLcV"[ܪ6}G EC ?W+/ 'டy-?X­B֗2[ɼ$]˵G EC ??woMײˆ|CA\Ǎeߎ<7i>"nl,q my8W$gL*.-K!qm-sbo~Ծ|[sn үw/ۻS𗞟~v}U#5O*Ӛ?Y/_\cݤ֡u\j-" ^=M<MzDWU\B},áS< |'"[x<:N]f(oH=٤+(+k|q0?_|}^ܩY#pzsIq|.-K`F QsϥA-(}hj+5%ef(> xEn3g8'J~>~1|C& f]w~/n;?,9}E|%3~mߌ_o&C|q'*8?-n5 4z}ι fI:*8{hX2IVu%QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEWo ~n<߅|'%WG;cQI>_5col}RC_FOc>(*+O=+| QHԭ~:/*ٿi_#Z!kzF`zQ!r^+ |qYxz- Y}_E>hv~ˬ^ߟ?ly%7Bm|![==&mE RCg3f- Oמ̀}WAqCk8@-| ;OE1llUޏ?V7~:_/u9j'ns_@@?ž/c~hzd5~1x]cn7;ߺ3gtMo> 0S^-{LѴO66V **((((Gs>'@<}^>t1&qcfx>8xSZ|L+!x=CiKodߓa% Ǣx/ܰtجJ?*[_M9n>o%E\=zgA>|hҼ bt>'QQ7;7Go c5%_G[(jGxU ?Kǒ(#0A H(b@8TPE-3oCn @~?A =yn,v4”~όh7߲NЭgj#c[ .&yG(%$ǯ_g)xN/m~xb]^)u+H%д{#f2Gh+3_VrW,O x_ŭM=%cŊRW|'o^ӔUV:a#P?JhR 9k/iAׇ8}_.|-kzm5]j{ԙev7dƾM~տ#wd?5u |lm/innuyxǥ|'ujR?4ߩi^)e_ͱ g'/> |$~F{Ze%,u8yE"LyW@ov'nZqդ|M,uʬ&2` 6灼w|=:V\ټ:m4 1gܗ$/P_?t\h-u}偟I9q-"$BG 0ʰ'wž׎exwP˻\C+0QՀeu!2@ <Ÿ1宣X1m/WKMOMso!dݿډԞ?tow~/Qqmjzgj>ijz5/y ~":=߰64S ?o G0_O th R(/rIu~_?ɪx?Ŷڛ 1ƽyn?mZ=@x?s(i4k\~8OZ>*x6Ok߲#3-Ae_zcO*)h+#>~G^׌_[p t ER}>.c~x_&JvP_ZΣ{gvǨDa.9hS}MtIaxE7R2'\CosQe66e:J{?fAfCEr׿=V.ߋWX~cgZ/݆äʱZp%,'{;Wk?쎝U#j{J} Wl/tG_eQ@ 蟵ψaEO?2+4`917z״PjO-PljzmuLL=5A..=H?YVqBTpAS߉?<=Z 蚉ƾ=}]0߼l9Y7 ͑<Cūφ6_m{:=[|@%H,خK3J^D-bvx~~X7iS~+%m5{d&> |0>ῄ_o yz~jV˒Jf7 KyKko_ã&>E_uϥ?w4CW6slj!뷾8MSRa6cVl/ A~~_ ѵ =dl~Z\[%V!ڡm@0l_~!Ijj-EɚH-n!LP̭wn?=]LJC_!tI[ 񝥵I-v=?j=#x%YP#aI3(j;NF|U/|E[ ږ.*9*)Jvq#'˃Khw߁|l.c[ is߈ ?j߀>#_ƍÒꚮE"OO&5K]"7T){4[??﷈6vo&۟S_?xj_υIo|M埊Dbբyd [lpo?<%|6ߎ;'oLV͇=|_~xJbz,,I4-N%> 0Z"aŦۮq?HF?񝎖{e{2+z_;hğ/>úމ= 5΋4RŀOj%]_.t9>ⷉ?^>>?ּ?g7VΫv\hsҏPR_o@GͶ31#<D?Fyzy#?i#> u|.ۘm~ P6^+.K8|2, }SE|UL|_5kx\ KڃsAm8S2~/d_-_<;1H=Fc";?m]ǡxŶoS}epȌFMqtV?tĻ&gJw3Z"S_i^w!a+='O?y%eK,I$PSx~&~_DA+2<]p?!i+cxJ%ؘfP;a+?L7ߏ^ķ-A|3湏|Lk(|,.u]^XݭrHBb31 ?ѼtrJЬ\m\jwxd@8e޹v}5a<7a-%ZB F6k_E/U_~5~涎YƸ;B%G%= V7  4/~ wxWL˷m4AXXmn'<^_|k>?{_o'{ifǧrĭ{v24n$>,OGºMNNxVaYWp_1 ZL-|-/o_ V=e52abX5HZ8$2E8j }~ ]N-ov>O=hxnej3e#sDZ]iZ>lO@S[^'Et'ktmj,XĨH!/7|!g$;\֐]"-N5!>ZF}+?D_ ߺ|cc5 OcA+&9Ws=|)y~|c#.㾏qqm^z(+Y#^!όtu&gV䏻*_(QJ(/K~8|3ߏ|' G3?- : !#dWW>%~R7m/} .R_snewTVw^:/ux/.ŞMmWźy0^&ÊZ+/c m[W$Z7eC^?$^^6xCj-;<~uIRxScuVaz,]Ymu76)Y"#x"'߲nf־ |>iLz}ܣѧ(๿4Y⯌_fy _X.NIigYf=q >S_t|)wl]cPb]'~I<ۆ%>j(OH^ V'6Qwm-"R} SQc°)#|OИHyxzi +?5`i] X t b{I%OfgJ+mS^Kݹ]rQ'<-/>/𧆼We}X!dVo;:[M/x*Nx7[!b(T=M4cJHl5[>ŝ}IZ-VMK+97puv߶?8ў9OqQ# ~_'8ĝo'|;UylѮ~TC~ٔZ7IG0)mgᾰ-u 4Whg/`318ݧ7-Lzq"*+m xnm]V (((((((((((((((((((((((((((((((((((.nm/. 6i"F2YzW~*o5nZJ4 B%R<(he[ѼME=o v=aY.$V2FH|+]l|%Itk0ܛPmIjbQ?O tY?˶9nˍH{{X_e?;tĽQmoӬry-[N2ோP<ϊ}(nWKMBZ(|É?/B>Km$foףQEQEQEWǀ<3I#\j?@oΧÞhDvjyėRBW:?|s |; j-R n:a+{x$qơU @?n~,FGÏZ{jkڊcbb~%H7M"io?vˡ27a%L}d R@diGO: W?Џ省o%X W} YZֻxo÷Lj+@`QKx"B׈µ⏛ǟ.;GWÏG74 ҵ4_Ƒ+}rV0,tZ'w)q~I}@L|9\_s .}?[eG ')4O|;I`vza`ck~*P@p+;U4 GQ]7FYs}rDVrG_?|YҼ?xӇ_eOu@GsWë<ž&NNO\c3\3Jq]ɮ>z_ W6,x-=&p>/MgU9[\^#Ʃsm0H٠hioAپƹV޵?ۭ>z(O'MgcomZ6qt~ox/<m'֖rlCc??_n_Z[s[>1ݘ~ qok_>ƣ,@ޏ\_~+k(Y;^nۦGK Os_:Wν߰64B闿-|(||>҃~:q@ d~8!X0񶩫[pm{,@{WxK_@#'.00BЭF?-y-z?fi_k%>w>ݨu? CѦV(6>ր>?Կ+~k$r'w+,}()|GHr{3iqe6|rz-WǗwr-}/) #?*.bX7:o9˯|Mno`מH# gρig⦗4d:,/I$t-4Hmm>KFD|X>-]_>[Oq?;J ;xSnVbS > /M?u/Z-oM4Nz>6v  +B><;6qTۋӣipvZL"}?iy=|A22 xROYL_?>~R⭿;O@oKo!-iDz_ ?J?W|b7LxO'|%Y)6}P߉> ?i^ď:i%w9םcc4bG폡??gu?Osۋwwnl5^4!_ g{G)"2YU^oK}s<_| t^rP',EQK@vI!U!Mz-д3[XxnVFT#i/٥Tόgu9M$< Fd׼#t?wDpQEfƓ kZ.l673\2I*ApEiWǁQkZ iR {|3䴷FIi-XnX,{_|qUV-_F,4r*1Y '6'Ut`U ++\t?NA&xCyjVsmpn ֭q,ilώ<MwY1WQ/S+O\oONGhnl⸷9@KWR2s g_GK %4t7τ|/]Ev.淂ֶk"HZlYƨmt 14)b#i Ȉ%>d>'Ӽ _ Wuik(6E`G<$xg_;hj}  (ThT5|aGx#o^ 4M ʏn4ك/^e۽@WھMBדxFlKgk,o&vZAa@ |3cIƦ-4:+H!,AH(Y/=~AE< !th/a*6+?DΝ6iUvV5~>}j}$0O x?Ig}:W~О߀61txHD3]+}O6!yw,,~>Xğ -9 qm$@؂؊N?hO_m^<hzR.? |:2o%~[1W%,c᷎x~`RAbEt#?+ž!/[ j:vVF 0Եg[+ 6RXo-1:2DbU$FAR 湭E|>m`:o& G%F|"O[7_͋@jf 7KJ#ȿnN?~%? <3W#Y_& W,IMy?.x_xP?š1 m{* C >+|$oßN|'u'#V[)bdHM?C۠Y~Yjw\n@°O]CH_sᥩ`#^߂K\tWxO]hwjBGcZ~i//C}+t/k6mTɣx;BUf mj\<Ux'{>!xoN/{?Y%gh0tMG[@| a)'Q^I +~!{ދ^t}ծ^.X\R+BY0%]9> ?ĺo>9 vM֡>o즘}vZ/5cRm Y.nnv?@ csgפ!qopk*//m} ~_GVwr.VxR5SN$&=h/tcޫ[kz$աo4ʟR%z>|?.GƶJ0kDA*+Vk?㗅`Fci 06.y<@Ƨڥ#m#O6ΥV@= ğc޽oYӦœϧo4m Z"bW1|jKNou@t:E཰ecm$RC IQlԞ(ை~^ =lטs((0ߺѾ"hxL^b!%Ip$hO\_H.ImeGO(\0d 2Aϭ1*ӜKo|#Px Wz5{(oxR @xk nѼmh,!xFqEޚ֊5}E| +W8y[>xUqi+wZ.6>'U=NG~?.6#_GJOc8=}>¿|L_xrRb^ZZcZB=T`4MFu ͬ,r/n>rιkǧ?-R8/rǚ2x #2Kٝ;UڿyۨyXۯ_Gƞ__^|S[,L)T! A? { Q<YfGZk*ptitI|c߇~>xNV>֝!ܞUu ?exַv+H,Ҁ=ʊ A0jh}b_gy>`)m۸[QEQEQ`E^%GԼGSUĆ+ReC+|~o[-}j}:^"(gQK&ۧxY?VI>c^=qš(-;;6oƱhP i׬}jY eacH| s}FR܎?C*r"#w!Uo? iiX)Ϫ{v5 6k+[{9l\F$Ee<kïfoj^|9%mj>Ԯ|7< 8=#Rh%/Ǒ/ޒĺ)jOGTٯH>u]U_+@i |'S7o?V&e~_kkP-!r=hOiV/%xo|=,-^! [?nmZL֍d,@5O_)v~,aF%5=MEpxϗSm>57a ]=oER=1*7_?.^Oxu)J?JK__m!Yٚչyя'> x~:{z˄'[= .-z9e t/K>mwu#{2@EPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPE>*kZ]Z<[GxWC\꺻y1P 2C "=)R6w`,_;ru|_eh/J2M-t:%!P**T(mCJ _㷊>2j1%D^pr<-YR{ǹO*Ҿ(#cPƊU@%QEQEQEQE|ۭ|@Ooj^#.X\%)5"i9V"(?x'ּw:n/䌝i2inH#Ϲx$x?g9_'USືܿ-c*Xx9_eOwZ}KV58|A˼vW0vK{9]7׺\]_\7ޖVNk %Г__9>Eꕛ5\eD3}ߨZ}Y|Gmu hPW.,gktf8%++1MyEɵc'x7%P\~k.;4Zşg?ehxU,k^j=6{~О5nm/?r["}uW/[}R̍,~m'CDx[#F4? +J~+t>;?. ȥ?u>ʿ)AiEh.G?zG.?2E|r*/\R)'o2 Qms>2Μn~eCx@րִDܫr?٥VepJ9E}Wi4xXT_籞_(;I~=\_|M2i~ n\6G%hIrQ U!Ώo͎ua~IKGq;ܓIn#\9=3%<]nj݂Q|5kwwr5:yTDAUpgxž0g쏮xTX./|KumirPH 7)Icdžhf8L<`6c~gUn[irxk`M*_˿s:o/k/!|h5gK4>mأ.Dʹn?{ߌ|K`qc V +_ů jxQz-?hM3F]; ?g/PW o`M@}[?Һ30b? 3'V>HS~Z~C°7񧏕'z}9G"εXxK-}w4m4%|t[QK@}v6O'S{ĿOjz/эϹ_õ}E| N!WZf~4!}?3G3hgwqnXk42U׷!M//-dxnoˌb(((A_BDB0QF33*I颾}]c~|F/u[+?/q ۱!jnzEbj>>5Ś Z\Okf񭕴e:v9y.1[י*>0gge=xTp zv/^)5j~<8>ZƲZ&o-ʰ1@d hT 8|Ac7dq,}#4GIC8MޚX3(䲞UQ~qj9(澫054 mǍ%<5=ǚNս .YTBvJ PQERHu&##PK3RM|qy~Ӛfu{0JjZ ǏN=+ uFD;Ƶ/Z?ǿ%oxZ,t[d`DDC ]tҼu㿃=/ᅤ - 4n4X#P'5U TH-"++/D4;];M cB"(U@0Zo~1kOD^q^,GU܄áS< t,^2/-wqk{)@湿L>63/s‘O=R֎&2rsdPѵܾ<þoxhQs\/lip35oĿQ_:ޑJ&PLVw.$[{+ [%̱FREWPđw |otσBL/ZFi/ѦIoy z%g_8$1KQ!u{elFjd{x>Rk̿ O ^=CPt[KTth̗WS,QD3@&O;zi0z!OOUxHs;8Bp ?ps&m7K8t -el x+c""8B(P:; '{>;|:>w%մ&昚C}1?]k/\ZHYvdw?gm|Oh~;|i.@\`na_+RUa?:^|V/)>"Z'uGEDS/!2AwVzy6}յbHԫ#ᔂA |/ň~x[uY\3.+-sfI-ڀ> /KյrJ0iQj)n 2+AI~k#!=+E6LxV[~p=:Dum\]QCF1ĪT}}QAkE0F#5 `ډb{i!8E*V z/t8MW6_wz}O  J cm&/cð67~+즻gVVdkmwQ $2drxk>+cҼ?ۙKdDEGb*$^I" 6PӤӼgZ5|9;Ҝ}7I.dB! [~[ϊR\OBZ䱅mFVB4G|ƿPؼCf}v6=(*+tgM{F3mKSxzF 潾O IWtSA tqpGҀ'>*>xֵ}6ɭ>wؾ}PWu|Ig~n.&} iG(7O^8wo|,ss}gvlOd_go]|Sv?ږNwqQ`R1\|7});YֳuMmYH峾efT)"bG@> xN+GhңrSNnk?sHcnv gך}qo:~VUXԮf? ~<iGrp8+(Lh'C?HWZS#4_&Ty=6`>hV}@Ү>å}E|?j_ lj3CE ƿ /|"iR!F`Ev# ~&Sw"g[ۄm'[2sw߳79i=q?fm^.|7s#zGzGSoW^ Öݲmtޒyץ;e>[@ ЁFG<sOgYhY-Lh qOfnĆQRH#OL6Oxd7֟ePGuw?Cǯ~ a|Nޞ+'@J[mۧChw}{G,cq Ҭ/5M>l [6rPFKE\6Oi'='T|@C,Ei<w56þ+thwzngʹ*+cWg:sօ7ΫkOLGk]g D ld>YMr/^$5"M*p}eoK`x? 7/< CjL>k;LCm-}LCxyxg_ %/~$/7 {o#)1ʪb@X'IbI$WYG-fF?qkAxzxrOU?hzFs_?WO~.gg9G_0 ]J7h:h_ LʷVbOVEIT~;++4S]?kmTZRqV_UvV?ZmV{Yṁ8e?B8Z5[F:N}d'57[ax7Kz>cI-vi$oɟWǞ:HImf8izЀO}Ah^.п4;չ`K dzc^CĞn-%jݺg!x^̵_>iQ_||x Cׄ7My? "W׾ߊ~;ohxCoM4Ť5=gĿiM>~HlͤE[Ix=^]Vd5fOs,md 5*+{W}^g㿃#z;|G\K/'gOpEzeK] ਹ6wXh,m_F+T:^eMsM*χ;Y,um)E,gWcf4x>֛?+^1۷4x/cǦU+>}aEcxGNWYZ#ź۵nڏU>|Fx5Wg iͺ{nK+}E|NB A&~ƅܵ#kǩto vx?ڐ& }EyW~9|޵/ ֵpDZKD}Ո 5f^] geVZW>?܋(6F{%-E}E||7Ix\A+ƞm.O|,|:vFK jBco˟%Ǟ!:'|}(̞jK F:Jz((((((((((((((((((((((yώQ?>j#Mm6`INmND!$(i(㟉ĩ5ƒkz4Gq)טіI8fht~hu=rkKu_H]*&I 6ežYw\w3k[.6K1 h((((^J'WUAďi$OE/A_О5| SR~ iwRYŚekJˤC$jd  <ׇ B|1/þҬ4=MKk > hmTDP M B|//þ4MMK]?Ob$P¨kPEP|T jzAì; @=ȯIZIؒ$ԓ_Z֏aZGn"w>&Fk`X%/-%4Q~jPCu(6ikۓ]gm/oy_FYJiYY'kO,>3Yө6s:<"\7ʝ'<{׵W>=k [om>a'#S)F<6]|7K,qQf{En $hϖۤcx|Uuuۿ W:xS-7ILX_Gxz_k+G澽x.(QNs_>?࿌!i SeL/ i:v4sl<\:$,/͹0z^o:,&u 4m8;6@l)籯?=XT8W|[_ ԯ˥tCl6ene}yUunkxw_A kKS.3eEBw d%[FW _mlEK2[L,2,EKLZ|'e`M /NA6V#f\0E@ϵ~sW"|O>/t-_Ru%b'Ṷ$f)pA-LOJ\-T*Fi{~EeUgy7|m9sfr;;=4ӥ7徍 /:?=g^H ȶY}BEߖê:0Pִ6យUs<\Gfpa,~IbpQ> a&񝍶Amg?|>:aA2Ψ$䙥Uf,JPg7Z|Kx<-4MNMl=üH$~Z eB*N1M;YIJt罓%kjRDݚꖜɻ*ꔓZ8mj'^1~=N>,X|?xzWJ!R3pk~:>.h!¶6io pm3ot#38{[| jFiY% @v`y Aɫ.'h5momik\HHój?᣼'_//`Էo\PTWϿzsF8[[xP4caW??ڣ3]^[7/ۮcB7>f_x3/x¾=RKB[ՠּH8~Nm=n]N}ExN2|աԮxoGtZ>p٬㥽|SH`08m+>5K;@@QEQE ᵖ->&xRu]N#7+ZÑosm/\$ŀܪGUzGĿtXmuOetn,.#ve$׳»Z__b|4~-'<#z$wi:Ѡ(+;$0о%hZw.uGxoKZrF aUȌW@ oW[m?C*9$hKfXʨ-xdice{Cmg-ı[ąF d'5D ko\5k_d_r H5WgrHR*GX.>f/q5ޙ5&$KE?S&^t{kk{=> KH!5a@(ªh4*"( > +goJZk|@Nm8ڶ- t+e+ᇌyt?j}֍{il[Ltq'Dzn)G5^=Okc/|? ֯ZݜYjck{$_-=Gٟ>.o@x:?RxhRIٻÚ6okO/mo7"2]CĶo\$G\׬ϺHਾx٦үfMR5}_>mOocM3"iׯo^X*cu5}PQEi>U;]PnmBDgG`} |@o7|eä1s{;0\N煊5gc~|D.8VπZ_ IZΤdSJmBNA M4' KϋZ'<izp὞$_=gQcL+û,ʷFi|94| qD:mn5lR{-Z;OƟuXzߌuYu p6y[h"^KcQ@Q@|Y$+jt/M kNќ1ŷ?w'x>,jwWnR>'>$$9]-`;#T'Oxᶋ i7tU$FI%3ı$hz88a"*".@AOivMZ˶kkX#+nfM}x:K^w>,|׽@>Ÿ>"Zƿi:]ךڗm|Ao0Χ=Ͷw뻄t&Hn1orU\f>~xGgKd,0"I!=ٜ3ܒh{|?#J~+cҝ"[hGp+ß'khu跺YjK')2(+- k);5}:+Nz)t2A}?;_o~J9x-2Q Vσh}CW~YjcwoRzPZPR-wM?>&m5k~5}+˵SXmvyCu# t/ß'᮹d#ik(cI>una=ڀNw;NџlŕSx~"翳U#!LU9=}Ey׃~/|*+EO}dImp\UG.; ( (  4ovZ{S0JFP{n^\|'-h%23Ԃ;k,f9+ -YŨ~WWsf Q&ZӼ;( mRl~ -Z͡lC{- d}iXL]ar )S~ken5gv}z<] M8ե5(S-.xkᎤʧ=6 DXcr1+?ᨵggeo]xؼ~0H!}3"k38~+_4!o>惡lgeO=*FX/ :ڇ=}>|5NcBM#Q/SuKhfR)r!wSҏ Ѭj&RJZ&ݽzkKyN2K{Z^IOۏ|95۽S5ud*# #y-ExK zE=Q,ŴPܸVXB h IEovzdNڶSXA++N(դ* 762q>uK7??i>!!Ԧ XnM L@S# )άw+?y[0n\U+)u;ż-8idrJ klUhu_^=5<C}kS2g 6j1FU 0zq{'c3?ۖZ>Xb*L[WuRkj )[VMCOfu.9Gfl`2N=O ~ͺLjxJG.o5+ kK%мa+-kJƮEMP9g J5E 7kCI+g 8[{=J]7͢/?W?1I h?nm{(|SmX,c2!rд` INu?߂ Gqg1xQr[h Km-yL[Ʒ1<3$E|]-N=lj.\_Gs1<|fY1gS+Ќ9'*_uTW5UZv|Y-5ޟEk٫\&Z|W'|wsÈ4)xSBT/q5m9ܲavIDc8h& o]7Iռ w}k-Z)Q%Pm&ڌ+F7_oį>&ut{çTxH֥n!1HtmK 2E>oz~ =䗗p]pYHXxc0Fgy,5)K1*IkI\IZ._έDăZrYZ.*񼯯˧n?g7'kҾ#.tHb֖v}[)wC+o =s\q[rZßّD]WuH߻`pX(# eME| Y-a|Y+fIfL0Bpnإ;y|EeUp~ M)NE[ܥtvy%fחexb9MmeQ{{?W+5 T/l$\$7ѳG=(a8-ymjY9&U~[?40On\?Uy=1/\0p9$A'{-4TMI4^I~(H((2ӵR=CDvzɳܑ Œ؁&xq^E|k/ u]jtJZ,$f`ȰN H@ B*^i]Z]B0~!xxi>yƵhғ焯[AX{X%:ۢ4t3߉m|?CTY⻇#a5Y<1I3Q_qw>,hv]cG=2Su{;?moҸOfoe!_ ~7E~c}r:6&=yd(/yR.WRE$R7Uu e~IS&cUuVL~Ӡh9W>iM/) פfo)|5i?<);g}*T!w/Ap"WM2l#uPG/FkGR{ք|E~<#bC [<1ztKџ2ozFONy}k^G4M^=<9?,--A5~ӽ|%Nߋ 0K]~6rW!ҽJʶizl;9k+2 auJ%Qafx?C-Z?wfzխvVь$0{WY/ 9D908x]yRMͳ36&_ +<Т([ᶶKI,5ầ-Ke⇆~Wۂob"oEU-ORojƭ{mV6\]+xR#UTIWk揈xh O>-r:)=|>0t.uxX|Qese[Ag22$2Rpx|Tv ( ( |=j_U;vSe+*_UG;\'@@QEQEW/ď^ O麭gd RXrGJh-k?/x?EtP<_DA1'tmc0'/:7 FIӭWHǒ~@|<yo,SGG$s[42$:GFʰ<(OQtڶ}givVqyyw2 HGv *NOǿ>|-s?1Au1, @oEPX5~(a> xvp~wi^&[tZ%GHY gdHv3[k^!?g{y}Aé ` 4VI@lm}k %jZƭ V*[WO?? A/~~o x.!}|qt4ǁ|GokW/Eo-?gԚ4ҼkNIl.|#nd><&naoѩ>?/ڗ?^'wO)Q`Tד#8a'?Fꮌ0FAׅK>:-#PWei(d ]QdOuk}@= M~[}#Of~& %,I$MvW~+~l((ï|IZ?1|Sw&UĐJ;I+W>x?eO޲0muFw跑q5mǏ^ֿd_ZwU<[lwhPYJmexb)bXd@R2>SǞįׂgi~ lZڤ7)° :*Ð@ _Ϟ~_ |G[Ax#KyAw쇟+ w_8[>8im^4c I* "R@a,G|7/2G3SiV?w;<_G0,2RP2Be[9m~ x\WwEm6K(C ,x]>D!_,ëHF Gt~! O=V JI{{_Jvgn 3S7gB~ CU⅌W^JB *̐{e Ҵ¨GΝi0_]#k n44@Z(((?h|9K?mo?cѬ>M}_>x*/)?|~;<O?'UtQEQEZQ.xahn w$+H"+nuMs8..1}<_v|O Y?nj?g˝r<_g1_ }ʢGA}E|?iv #?s|5}[-e<55_ B6rI> |mԢvG֪}=~=ol<@wD֮3nwb9xy eY?gƢu{Gf\lg׉-GմV!f ziJP?E?2RH ik8凕J/2ɓ1x/s)*3VK?3.FX^72U*@<Ef\h=~פYڣ1_2;ގatU?>0/yr0d𗆉=!^'Cм+WD|?hzn?/n.guHĐO,d >A<3_/'t|^uGtW >?i*f^>/ZF|ɿ?&~/aÇ޿*@$CcLarGK'}cxmmQT#ߔ`/kUaR$33 DOc?ytMPgk%quݠFMG+J+ n*Y疟tyS|?ļ5= xZki.0ol{gQE~atU=5-RI|b15JrŠ((((vU͍MżT`C#)ᔂA X>jm-i#Vj_-[EsHI/j<^HZ><{:sAc=o ie,G-U<1ǎ4?-. +mb.uvki=$Exvdj(((((yS|EOd47R̀<vIsP>̬8?0|zXw\1km8 $q_GQ@k55y!mdxh,<+3kҫ> |(WĨ4C[kk|E]=Q$`d]r+~ ߉Cƅ4Ю&AW6j1խUg ^':H)((((((((((((((6GmVNQPDYv<*I<(Mg+oTAЬH^M $Ɗ9TFa%uRZwlϋ*x|O[` o#O+F I+ q 4CH?xJ[i-~w}?N&4m(7Cm?((((( jᆏͦx/?c)&_FT'2*~{|`8xOD'}JCyLƝaʲ\ya@{xSÖ~Ѵl48FPG`[TQ@Q@Q@Q@Q@Q@Q@xO8;ʭ%''U?ĴQEQQO<6r\jZIepu$ hi_V6|7n#t ;뚂CkbLg6?FrZ]BxvُRx]߅7F}CW =Fݨ3o OSlt ?}(|ZilחVVpgGRǀ>Z5WMW˦xA% mַiC{Gi1| QW}S{ˏ#yedսOӴ'IOlWlVְ,QF=TҀ<'g߳z6~v? f>QԞ >iNiȭ=ۿTPϿ{|6>(j?6~ix`kך`x6~;nM22Tv{:VŦfi EkenğDPB(((((;P|Y ڽ hk&յ9[{| g/#Yi>|=o-&QVu&pDZmNr™#LQyß_xC?([\>]{!x"(((>o:|a?Zo)|qVO>(Zh7ZX:efKaQ՝؀ܚv|3|1/YڷTс:Ef;e脐Ձo_~|ISAQ/㴶$`5Wy<)WEaOcDuUr eo<]m{O uN#59 (s7?ƺ|wo3isDݘѼ<uSq!=:i<$ehYŌv[#+ MO|?ռU^AlRO@+|:O4~=Wi-Yijj9cI5S" IH`}sIe ~Ξ /zt?ʗӼLf]tZuv\oa#3e_7dp}\g|DCxjygu51xhCJ*y((_|O_:jZžqo7\@԰8Iܚ/O{| 熮.UT/6Mc̴hfL VV=x/ZoR..#fӼEn+eƜ_[% S$R/k:Gq =ލ߆lO4M6 ;MN kh>W |?]Ե-%(/oXronKAp0\] +QmQEQEQT-KNѼ?{jZVgOwyy: j2*I@kmj~ xcڗZGmf];ºs8!`cR$|ye"W]@~[ d-ƍk2###( %\σ O߀<9owcD4SKwu}v4I4I2K| _xѾ*x+}Cwu*ig.D%Zp0,F1<M?x9 #}W2E:`;("\'J2?m^E6Ask#RD_{'־-lnm,dx$!#K$qo$3FU*Angjwj_uydi4×gҜ$m3-./>gI#6*k%k<=?x>!𮻣@̳Դm"=#ãiZߎt;|!{izjʎˑ C,H#4_|;ӯ'yNwזhVDRJQBtӥv2xg J|6>"wDH~xV6 FnX ]ehzo>#ihZwuṂEÂ}V ( ( (+{IgDм9ªI[tX܄{ V3gھ!س)' 8׼Qu#0@v((poh]o:(W2̖E;/{n=+G}?;2+L™; OlaI 㟇H_gb;HwWtPϟОtZxQ5̃q"E̟^L|8>_'[> mg^g |Bsrl4]l_Zt`ZdP:_~4x/߼lBM&KTjXR>@_NoďgiV_oIlKd> ( ( ( ( ( ( ( :#O|o_ZqgxTռt[>ԙ }|ד><3t;K*]J؟tK;I[Ҁ>((((((((.j< >^LnuxrGd@~zT'3G6os➃#MxF`'uX-fU*ѸRc[濦>xWL`|=3C8Q H=n ( ( ( ( ( ( ( ( ( ( ( ( ( _v=.@d3G+<}/?Ӽ'kk|KEϓq0kxKˈi 7/z}֩4&KOVuk靥 J#c(((((( #Ҽ^-ni9pAK#~ י^|9!xC( u<Jy/R[z*(IyM+k#p.5cQEQEQEQEQEW#xKvd\뺼1SZ=W9<)vd~&/x/x&ٿǾ4\=~˦v7)sߏjֿ<kbtϨ7dXϠZ 3?iOٚR?=/]֡j u'!Hma-1_6xcCH_̟ u?[I?I~ȣ,* s=_M<? xGM.tl&(61x?k۫-GĶy-NBcR큵Wr+> xDo{]ZdhˋYpB^ied4ָ9cž:[y>)u_42yٶͶt/ X¤`;eiZ=cLtz~[iUvVф(*F8UU8U(((+|AC|.ѼWu'X+v?<a.}YŸdz_۫ٮXHiP|U$sƋ${ƽm+9RF7zq |N_t1}. =Yf i{!W>G/Z2H81~Y,m pUi?7$/#mǾiEXGEUP*x.86xό>&KAjZZuxUTQQaUFExž'Ý[ž.4/Ad 32H/?j~-~X3NR(Ai%weDwvUPIc^ 7OoT57| E8!u 𤬺);TP c^$IcH|1\߁~^1ꚢ 1AZlJE|צś' ~({ZW4Vފѣܠutp<|L+x?PRm?T].ZBOXfH\+ ߏ|+/i<;|' UK&t?+[-ޕXNmϗwi:̙8a+2¿O_ZWjZf!['lm5,a9^(D쨊2S_6Sg}R@9Z S↧nfm%]4Ȳ?yXޒVVzO^ys[~*ެtQ,PP)#}̐Ǒ  MKO Wqjη:?í2VA]NTWj@"IUa1¬kz'ÿ>mm\:ߊufY5sV$Q>E  vƈWPھ}I?࢟5Ab}4{;WU |--;Cgq_\w,: @AQEQEWa}:k~-P׶nkxO4p'ZJo7猬> .Kէug-,GeSU㶅PAp!aOτ/Чm+o~ ssFd:CixeR]:(@4猾xť=wÚφlG6XQl. 12&0Tcw ïO-h>2ey<7,H$1,ĒMq5#/x5 x# ^64|\D?/, o|>"ƹ;{-/\K$CEzN~~ψi?:[^hx"=%̈@q5+*f=i;-W:Ftv{I$qeY@uTڊ((7կcDZL]tQ/9SNaDQ;׹Wϟ{}_뺔ZOtkm-I|%zd"MӬⴴt("/ Ѣ(((V =`~E4-헶-%ZD9ߴNjrK>zӤ&$p:׭%ukoA2,*4EPEPEPEPEPEP_?nOL4j<9eWhQ@_~=  Z|F5^^:A-s]YGP6=:O>|UOi}} ޽i~o>|K_XKfc?^+:(|3@oC~"vď@m/U[+>Ux>F<=!yg%VSW5IkL?O^t̶ӊO:O,A}O B3aI ~~$¿5_ Qa-o~|PДWGx_5u1 X}.ތt5QEQEQEW|ux?IT6z|Ckkk04QUQ*j(((((((xgú׊g]8-85.{mz ~Mu_p3M^}%ş=k(滼/,H#QԳ4=ߴxiYKx L$?$!>Bu$j,ό$/v_0f/n?2G=޴ 'u5!yOb[u7z+@4e]'?^%խdkHK^"DCq CmP۝s+_ߗ/MR6E{{?1 xz=#¾3I$j~U@>}/}k~7އM=LQ#{yl}kgwrAZOdgū:(ʼ#75xKᗂt]hji16){g>תEQE?d*^1~?xxk^"OvPMMΰ/s41&@0`< xB% ź*Ukm2FڅB4T7f ( ( ( ( ( ( ( ( ( ( ({O4[o7K- YR7RoL^/grړ~_ QbּJz<6 7RJxkU5r v> Z٥;^G<Ļ|I$(_gƞ3Ѵ}ZfHY ƥ}ogi>ƀ="g| ]:cL:DEC#_O] geU= Ow*0' g~:|-?ռSk6;chNP(@?L>xG-vw\x./1Ҭd;뉮qS$^>x[烴GӣG y%$I'5ϖ'O^8[ƕTϳxٻkL{o7 RA𮃢gC]ve##QEu$|.xY'](+ώ|3[g6Pg`[O#DgfMEg{xX,T.d;arO#aR5>#˼ xU[3xC2i]Jf\Jp24PI&7~$1׶.eOU )_\%&pJ['2K/TQ@Q@|C-7ǝ#㽧|'}ׄEiY%oRzfxoPѵ{+mKJ0\E"xO PhQE|GS<)Q{s{x6֡w!yuJ̶s3di1B<(힛j:6V<78T4RO 'W~W-s޵m=/U>!!Ym4åyBQbig8wVNh-ͪYc1g{Wȉ)hƊFPUOxOh {ay\=+ 2@5w~17슖Gj+W6|8ӯa>(bC K-kvYⷝIC]"F@|27ƖQk_jiwBK-k~7-;~\XhBg&Ug3"N}| C Ao|BKx;Rxۆ&+䵒hiwǿ[\\xZ)ї2ZD6ŭDɸ_PmAo f'< Czgt/ ·ȗ0{oݼ pC,r8 Q_9°>!ɮq u>jڠ{mfojݏ i:ծ@־xyD6xhײ[mV"m\ؤxl}_>YGB.Zͽ/\5-&ɏkexѕцU_?|֥|u϶h&4Iz>((((?g1㟃|i\grMp\ @f=|?|Z}])j:[OV%@@>((((((i7OU'5}_?Wzi%k7ÛB]KJoeuau#4;,T}|͐+?g}3.|2 KBWF3@5U>N8I$QEQEQEQE^74"\ҡP}y?3 ^cDrF8:y(s\j_S y?G4>ÿ x{SA:?7k\w ;}@,xWqZOFc׭@נluyZ+Z?%|HVIφk}%[{'qZ|OB'|mhtK 1юҀ=RI_/5 + y?f}yk-sX-k~dq@@QEQEQEQEQEQEQEQEQEW; ~9[uzt8! 0r9Q@)v>0mGǾψvcui-n+ G_D|wS_}+?ۖk1}@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@:xcωu}+BleoglGoHdF=UI?Jq]{׋.4|CMNq6t?oGh忊>.X*rO2`-,A೻C((((((-߄|[:^ "݋Ng!\o.xg# ռ`S ~=x<{@>q42(%&}[W>#࿅ hCQ {/r~ R⏌7`czVc"YFs<>?(ѿhoQIJxŦ=2^kx4-"sc|$`fd7?ZlsT;}[{Ayrܖ8I j>"({$BGP"DP[-3IM`M[D*aV 'Hh ?_jJT~"]i.a[xTUDI( W'RDOׂydy*  D?C?{C ?Q^egu C3{P~^7?wgQ@?gu C3{P~^7?wgQ@?gu C3{P~^7?wgQ@?gu C3{P~>$'\&O{E'-M J.7vTleTC;o/;ڇ3ý?N;o/;ڇ3ý?N;o/;ڇ3ý?N;o/;ڇ3ý?N;o/;ڇ3ý?N;?u?şxSV:֯^vzjE6]40j\8F4'?,٫I)Io (((((~Lj_6FYK{\ح #ʪx]X5}{㏍3ž18}*1/(5)ɽn_6_,&>{ #>?!|m7&}|'n.<6ϹEh|,JvBjH?۷ѽԼm#? (/Þ[6 r@$ yo.Ox~ xY~#?4yc7mg5'׆|#`'6]xRMGYܴ2O !Z*G~}j+sxzȏyK# |*q< sݝזw̳摘ֻ((((e`Ov|+XH?do@@W ;x[IMl 3jrdCikB0;UUE?x=g[w+el^v8&;KX7'Yݑp~x"|\lo$âw$[V yݐB6(HR!k-/dr:xk0N'm mfx3[cQ@Q@Q@Q@>Gj 4}$$ /?"E1煸sNZ<1f+>XPH|@ּLj_ >feC}p})Ee7xԛ­'RE?\Aㄳ\I|+_i ~a3h\vb$d2X@í}\<|<1o (. VI"ā|o⑈,yfbO&ƿ럇 t/Y_E7Zt7>NڥVG̱*ă,_K׀n|xrx16Νn]o0#̖/+ؼ}on +# ]J{o'NЕ.̳-򴥞A˅_p oxwA🂴 _F<=:[,юA[4Q@|ծx?a඘׆^ş #bF;ҋVLR+}+Er߅#:O55pѴṠl<3"_0t~ \YQ޻Gx(+UFدQKºc~+A~m<ĞՑbt[2# 9PR7e9I(߂(D5H<~5[Դ+o=*$_Tz~Z\Ɲ]#P9{-T"WTCU4:ZE7׷r᷆5,;$w|bn-;ya،o r!F֮nVtfXca w9 $ҪQ%~~𮩶]ZNaM>ḸMq/ԴQEQEQE:V +?3| g Kq, EY?DZvXvψо%|$ռ%[ V_*7Y As*$]gU/,uMM5:#ͭ+,S# u`C)AAwVu.-eʇYpխӴ7r SG4Y;7P9]gO>!Lwxi[:gUڇ<j~ xĿeoKßx^EC߭iO"wF.li]C,B#`3^Lo*|:G?ks{o}@[ /W]K'iiW[NI#%X{[U!^wZ?]?s.lOZ3q ᖱ+ǃ~1b>"?eբ_\i3DZ;mׁ@E^}O%޷Qa^X]KKu{mGMsVs*nʊ}h((]_lj#"hо̗Y$boT$c9xHO _m=>S%2}x K俻_xK}T-}i8{Eu=6 :ƛЅ<{ԔqC. xƊns|#^>&&+7QPcgM KM㯆O>\, nCj0lt$蒯 /HH|WϿ uZ~.?2E 7~ 2 DzWiVx?+x]xc]IcɱHT&j/ /o:Hmk]F:8! kHxC;w^ 9@3Cϗu @]Āp话g|ZWG浥KᵈB~]:E3ź]]'n,g{`ܸ|M>YQ[QEQEQEQEQEQEQEQE|J쿀:O(L"οcqO1z˗QO4k}oOk|Ci=$.-l~#{311?IKc[\ιЯ3~ /&5]3ϲnMO%x7׀>诧x >e,Z& y؞Ily&:((f#>6>|e+ K/|}x:>,B&mvt02H*EIįjI?$}8= ulH9 hUWK|9.mBmNu՜[aWž,oߌ4,O̺ݼ/Gٜ:+-A4{H> τ-m?Ztp=/++g%/\7/G jݿ1x-`}2!ǯy~)|Ls|Cmɾ?.|_:B_A@>;hz߈$9x}:Ot[e+_&he>𥅵X4ĭSJ|'P1ZIY;9ɯZJӵڏ5+}KI|?м8io\\Y_R][=S-Ħ}#yUqSrzn$44M|w]+Y+$%>2P%_XcRº+MG$SDѢHuBc͖J]Ѵ gIGy>|#uO H;+8 NO̖q\ZE"r,Tp5:xc<{ߞnǽkN2Ikv1եR V~>U|wO wZes)EU9 pmi |3ߌOE|uei_j0^}B/+0F&ϔ`{>£B"$yYv']p ,gE5]N\6Q|A}}?وk]L0bYpēiMJ uVG}7$ֆ[5m]MխS>sƽWG;CMլ+kŗ-͌%UJWa]͟*u;e:fdKQ k)`F$q?O>ǢS][Gy\]jMrIC<88dP =xBGF26o5Ǒ᲼| jZsE+./ev>~k-k~'薗wv6p<, lq( $8fnNZվ!h:z^~ӮDmNeYśHwIJMjE8~XUwڧWf_J3Cg SL덧<7ꭺI2Z(أ-V~ 5Լ1+w5M $qޛss}ݦ)H#¥QpCUQ|S_j c>y.wSܥJmRiIH>crNf8|/ CY4}j>-GXx bM+=P/B#l2EB\Ԫ;W_6+$II(^jݿkƺ_>?zoP60KS$RķLid@#ק.}K |A5M]f/NŦ;kh@rRĊ }('ޟiU_"atb>ׯmEgN HLm 2ue<0OMAE}q+e3D! ̅2Uf$:2խ>5nmt&x<=ݚ~ow藙@|R<79|h ڶe_:eL2G2LYd9&_{ ?#5Ek i i4ƢX 3v|?G߆;UӵGZՎxRO$ \K$kAOg>uKnK.IF8#gw빅j>X+jqmJ&OK5zbg&OK5zbF0 ((hm常(-B#TP2I'@^wHTxn7@pR]E; p׎+|#>OOH_*gm'vP꺾xzW5=?FmS̹H *rkO9=/|m&%Dj4V(|&p:!9|46-u~)2 cڃjo'ѿ'k݀ TP0 #7Ey?2F9i~/Ӯ|W+-KA"ukS/53Do[G3j:i; k?r25^q俴 5+ { Ch zf.[xeE ۈg+mԴ8󪯍"j-t]R#EHp.f,ѤHTĿ>6$ g|s[ /~ǂ;-EO&KM+41/,~gFgv$s]]QEQEQEWIR-״GY>bu) U4:uoo! `ʄJ#?AEU?IᇇAgIh^*;Pic/&f'uO Ioɮ>qڧ̵C&|R{3JM$ҭdPɥ9[ u |$4_O|!r>WSeuIuݡ`># H2G #'_ Yx[A7w1$mw+L~yd, :MC3+þlm LOG 1TpTjQEQEQEQEQExkک#|:Kh,xG̓GI֭$n D*>T.B|?ރ'.ȰVY-l9gʏyWFh#cSkf.A(﷖Np!׎ 巇k7CsGZU>캥/qc {~ONjUֺ%7*y5[Sw (<~I_lT7_K_O[\C@pkϿt']7[=!ԭgRY7rZ+tohMl}9VQ^! G^>ZׂSM{?rȷ6[{s%}E| :߄m_ ~G5٧X?׶xk>񟃭{.W>,Wޠ}^SzeAxZ[ʗvey;Kƚwo>ExGaOcW18IVA@xc(xMw5U5ȿ}_x?÷ ҢkMi|;{G[I=-ΉPğxoGuahZw?3Kx.fq$a0-BKY0w~xW#XIk3P.FsŬȡYwm`yMvE jvE֛l3+H9Ÿ|IKz4视I/4px 4%[b2ZR. {5mCZ2}m奶};h'xV W<7?|)2H`eWly SUm>ā {-k橨i :Ai0ϥq'j`gn JZ-wojΡQ zU \Y[8+"nR)r 1's;L;Iu˷&`ms[rT9AG&T-\T$N|nǚxo_m8VicXG,[$AFs<S|E|_=ϊ-7F]\[O=m]VY\¬"¯xׇ]Y4SX]b\A:_+#SFHdV l#7|~nxr+MBKJ{Jy+=JUWs9!UT`(t+:n*Vn)w(]kމ˧(n[^V-m+[x3a}m{m2nֿN6䦩F-=t|޾j8|&P<9;K5B5;?jV"G%ݭ |B>joa%m1mtF."i.wM,Zd!d.T^>U?V~}/@&[Oosi M %S;>|iU]{~VwKj#}յ?n;xRڶwsg$+X+DbYY%EvEyKWeh񦵡tO!ӯLndi ta FYG%Kٿ.?DѼAIvxRRa'[q:M72\jk9|AweO,-ln#riqicl9[m;TڲCxth[z+ݟ1x7e{Mf[hڽΛ hU_es* C>e_?s&e|owwk6qiw#ܼs[=hJ ctɏ#5zSƉz_i-kwuy rsYZ\g7)iw>#YO?gyE,Q=FwVp/cS$-Ǚ1b J#&w~Wv56/mΡykewxC4f1!E_`#Q_'i6j~&ĶM,q9y;7`(cQ_' C 8?7Ѿ*feM&V~QEg߅Q@Q@Q@̩;UQ_?*׿އCy7ua4Z%F?A[^؃UΏ'  5 [?Ьm-gYQ~e+9u=fznK;uy|eeX׫:kwķMxbNZ[$ͼNl 4> '+Z wj:v/! k?~qMrH|>=jx۳~G>>i^.G62~8:ճz7 3k+׎M{}|<Ƨ~ҟh ,BK.x, n:_߁ c Sl&gUet\0d'((((.oon`yG(31(O ž(۵M>NvC6nsBtL ;z2)@㿉>g}Wƾ Ѣ<Ȃ0\{G3JoF2~>6i_Iyd;O~v}Wa{yq4&&wڈ2N@]\G5p?QwW'~m??ibdc/Wy?߲O7_;XiKls=?mEzO2wF +_.e5_~Ο{ˁii6 4۝GW`J[Z[数Na@,UYSſ Q7w!Wyx?L5ȅLC뢬v +^ ,Ѵy??_O6';XiK?fOٷN2Y MSUզtgLF%FO'OEzO2wF )&~)~F>ib(?{}:@n;ZH]-䄜 Ϳ˒_ᓿe6o41G? W'~m??ibdc/Qy?߲O7_;XiKls=?mEzO2wF (N?~O&7&i7OY3|KNᏆO4sc:̑bn/m|ݻv V["^+'O72x}'R~?ͳ4~_hn~pд 5imWmYmo $hix{9:]xa soqKYcup2C7+W⮯⿎Z8,j894R+ o9#5^O--m쬠G@UQU(+/_֢mCaZnuKɝbM3\@)ss+NHܟ](((((w?֋Z^x:ko i΢W-x-''$?(xI&Fl 6"Y 2 6 ~|/𾵪x:0±:BRw&a`[2HY4ch>%>#|FW{Xo(kxtnbBf?Ǚ!O:o'/[M'׹y: 6[~JMxᎀn(((((('o |7mu?oZ4zH>իj/X!#| s#=K@Ċ gf<* $[~6H)Ÿf$7t[YF.3e8Ɨ#;fW⿊ڭ?h:R_2ͤZw#Ri\@1@ 1"Lp? |> Gn4g7;rW㾹rdKď*h*>xYOsx(Ԯ2<FX [Lp4gi>1zh-4:RE?2Aڬoе%=}Υ ^M\ͮ: X=k>@=u#KBWu[(tvxSYXj((((+hCϒ+R#-g0^z2[e!"stt^?wg>+Lt<멊ccDPZIF*$^O"<{s⎞8ԭþïteb%ԪH,06~ '~~ڥO <e{&J6QaĤM?d8)Sekhuaك1v mhO|+Zah_X_@As)#+>xB? x[Qڏ&kSr2sY-̖~QUl-QӮVkkiDMHe 8 ՚((((*坦X_x捁VFS)^A~(k9#_÷0h7Ά Dm4g /Q}xfI ZF٤VQHۜL xW|=DOi1xcZP.[m/ (>SF=K^? ;M◅nWOvnOZ F̊@=z(((SĿ>k1^o|7?ztۆ!yu{]?hgq[+A"/Naq%5xOo I]?r~-H_LP(2?y39ckŞ <?oBK b; dñ;PK_<|K/ ~3J~J F8|W|/ ڧ)y#)JXn$Wo~~֟ >$Z_{|O t/9ld1$izĨm^qQeK*k0π]\7_r$`A5m'J+ZZ&i%۬pH$XxX+$AIN?~߲O7_kc{<ڊdc/Q ,Ѵy??_O6';XiK'~m??ib~?WͨſmONj[?~'ts=t"وUEUA$_ ?d/<}6ͫ[S?&f7u(-jRQ-4̮? ^ ,Ѵy'ۦt.ZN [ɞ'T ??_Ndc/Q ,Ѵy??_O6';XiK]නu{mM\4׾>yM1,c< '7>T|Qz]πo?Zxg=CfE{vR#[Tl]"dc/Qy?߲O7_;XiKls=?m^UrӭYxg0vWIpՌbyb \Vdc/Vg^ݏ|?OeZ +}ѿ4Q+wC2Aʾg ˿ ´j{K K|:|[O(_obݭ)ÿ>៌_ 5۠p^'t?hě zs>!W|]m^<8Ǘip.=1":8{theaOYi᫇; R}wM| ;>|;㟎?¶د.'G@= +S?iόGԶG"~75>S)Ҁ>gOuԯu= K}2~gpޝy~,8|h?Q:i#0{n\Wu];K(-Ox-kr]jW_j $* C9 j?oU𿄮mmB;具{c4VV"cԼ?iZrOKH?jEPEPEPڷ}w)%f'%4ʛ۴ҰK/w7m_ <[}0ҵ5OOlɛ1`hw6 P}j6qos= z+~ irמ C%r_PQV7\,_YN AQEQEQEQEW7?7k2IZ@ٯ+gmG2.%g(((((((((([VҴ j޹Xh5\^\,0[D-$*I xW2c'xU5dl]GK˥*z(9|RZۯi^2lR/X>F'_7PQ<30LJ~8|Sľ3[wxo—q ?uk٫8\ z7oƞ3{%kYΒ ,BU_/O4ntgM_:0][ Kq/1(((((=; 9euK{{S] @ҭΧ\" X̍Y5ȁs+MxZ*mhsnXoZ IYJD,|Y>,yZn~TY;rN1 Xegu_?dM%Y}ǝZw#r5+|mRVU&ĨUQ)PEP*x_|-d{;iz Z5 |/$)K<*EQEQEQEQERHu&hό߆~_&k}s<˫ y29'vQEW|dZq_w[CK@EPv^ 𮥡kmmc{.^7F2$F5njNj4R> j7)gٝťjR%bB[]J1/uO'h:c蚍_^s Wс  iQ_+LW#ⷌ##Ajk&ERJg(" $ރ 1?>%,_{#gL c}BOP #qr>4/_/#>3b<9݁{MA.aVc=/ 5~ nUo9̓?R314ƿ'_cL/٪WiyRqCמ?F,?~4O̚o hc|Y>*qw2^!o?h^<+nEinV5ǓSEQEQEQEWWMiƟD<ū DvhA=ZVƯ볶ZoyNOiQmڧtY(s xcBW} >ӡ|=eYŝUI'$MoQEQEQEQEQEWN}'^u C?fف%̘?grg`qDW㍡F_ZW</:N8HY/e"F0YWjx5m]|&M"7).E[EnH|1\ ռ[\xⶾ -v)&;+H?gypI%FyP/$~t٧Nߔlkg('xlw/+ĞO_Mfvߔ.k   X(袊(((\?#xCᧃּ];Ŷeow ֶfq68$x!>9Z>k67v:,@B:۹%e&5Z'}~¿Co;[k8]Iu%^;UVS)$n_ <Ե5?VUV}SSʣ8UXRNxU!'j }Nt@t -/K`b5TDPTWߴ=CGm|KGV[M4j Ps?hr¾| 2OhF]&-c:̈XiڥZ Z&E* F > [L+xl&5`Yk,m#Qs դuhGPvXQEQEQE Žwskk m$L4QfcI=*jsNG}O?'ontX}H n@P|}X֠/z ϟHB7ߝ`F!7/+ j"E G,qDQtv+φhFG]-3%MRf=:WWrş=>wmLw=6Z𗉾~#Ux,5ƽw\ii/4U$ $S{]4{U82t,{(((+Ϗ:󯈾#^'fdam2<_AϗQQ4?Zἷ2/jcX{=s@Aׂ^,K|go6 >) oMbp<9L64(?߆|+&^nlJ2:Ueee Etq௏5ut/dr.Lr'((((+;0x_7gt~3h\"k;p=W@4x ?h_^ Dxڶ kzVO M^61T_x_+B~)>![ 5[V5=0q^մ]JXҮImyepC2GRU5y2`HdSrg/.Z? MgQ]09݇26g"( gcO_ɿ}_?_7*Aψ]z?@@QEQE3=g獯~YWX~x[xwƺdE[fom{׵|-6þ/viwVF@T"7h?>*rm{=w}֞Oapֻ|d|ci֐|ݘOk"H| 2|( ( ^WO iZ_H>}_,r6] !WʟJQ@׃_3Ѽa=N-_#$2 +FF k/D>ЃǖIZ+o٨;}a;$Wc<3~(((~&xx3qPм7}-?eg{ GCv#-"vڌ؃^_ Ưolb}'}ڀ ( ( ( ( ( ( ( ( |D?ox-4 8T|V /4Aڊ $TğxZQԮ&,}KMkWgɳi8UU Ȉo^"/[V[Xx:L=$}7L˅ m᷉*x,amgK 3KIe;YIKp|[Pw_=ߎ^1<$x(x )R"?̰ z|I@p?yow|QEM:3n4Ϸ<iL_Mk[;R $$_EQEQEQEQEW|I!Q[{lw_M𥓱Qb\AjzA(+9 q( $}o?Ks_j_k ZpW2;mU HR4UDU|6g4ơxToI8Pʅ38pTBHQBF((?ٟ]7ܴo&xȯt%GuߥkEΙza&?s\mG߱gMkZm"6z9ۨڏ^/q Þxa(((o0ǀj>+aؠk˷ڠQ@ݘTPY؅PIOi4V HI[#̹Q2ۙU?(:wύwwh]C,1hi*ײ5@9Ќ.a<(x*F6꬧uE.{-3Et;]?NacTDPQ@Q@Q@xO8;ʭ%l5|Aijo *rkϋ|y ğ,%]Mow5z!d:KY4y'-jW?x3k7O!q&1Z@|OaEh_|,ῃ4{H Ե,uKGGb9'B:_ |Y/Cx|8|IHͬN˫H {+>6z ?M Cm _~SS|3#亝ВOn~3+Z{B}fKZMGTԫuytdt`H"Ir}k]gW<_xڝoQK?PA,ZN+Z2HKk?7 *ִ]#Ğ? 2ZWeRH$hN{Dֵ_|qk_+o5 YIŤr%)<' ( ( ( ( W  5PJc%w&H(ԳX`($$ +w/_ac:'qZƼ';@ctU௅s|wORɦ`+#Ѭ,'T&=ފ(((((BÚV/Z<ӭm}KPraǗ,IX~%OY|6kYxmYܳ}C,PZ/)n`$u)H|1>hzwjJmnB ~XAEb8 |1_gMu+VuR>+%Χ2FDVt#R~3PEP2O'o7MoCYd1t1+H? OJn<#7i) foQtQEQEs^.񏅼 ]Ӽ9Y^˱rj#1 1 t~(Ũj^<Ծl)|gQ|2g:2^p"%Gߏޏ|9/KEvW*ه)[ZU߂|_:XE} C98QܒI'I$I:?1>-o>#VsVCk`YZ>I\od(:w9:=Zdiܩme ﵧ-}o^ižP]"!~.LJǻH\_('<r;OǠkJ<_=  A.jK+rK";Xr3a  #v@׾%x=?VBhZUqʷ\?׹PEPE~#j~Foߋ^)2C6䟳"mͼug#ݣNV9)>$g\:s_Yºd "n@nBZ_f𯅼?/+hmVv99,ǒXI$IsÝ3ok[sX}C:l֯ǝw9nl* ,qFQGEP_>qOOƣMMzNLT>ѾxCo;~_5{H{ GHem&-N o\{= > iR{iY"UՆAR >ͯ< H^_ʦ9\21<ȍdpDט|Wt>":NχORj`4KҼ_{izj譁VII H@Q@Q@Q@Q@<|^?1j?Y H8a(ݽJ0 4\'|Jg;5d>Mz++7Bx;:o~=i֯g[] ƚrx{R1F67>1xY`CEQEQEWϿL xkI/,=-~6Vc_AWsx륦^mԟ?J(((O #3Pi>+.֕Y=r sisA2FA*r@4x|Ie_ڈ׼wplBkG3rm$ rGTozZ}n׶W,\Ut`C+)  ̚UƵ7xÚxx]LHNZ]93,R!I( (25FW N4 Z[-J7EsRHw A׍ 5{-/|]j~1wݾ}"`Ϧ_+A+ŭ O.}= Oni.~X\̭k4}_>YIAn-| (VX3NIc2 ( ( ( ( ( ( ( ax Ꮅ1^ҭ.*#1TTPYٕT@@5S U[f3M@ٷj1;xtvdvP ߆ >!⮟%x|%yd~.2jSq( BQ(<77V~{/ONՙ;70 J}k=nd=kSG'LZi=(((+ɾ+IkL|K;YxG)E..'` "IʸU $u>=χ'eiKEVn.vmcgFHr꣭yOkƭ[d&RkTM-pBg|/7X:? -WPT>:nE|OuIkrdEjplQQ@Q@Q@Q@|w)?z4]1c+!Ǿx_B]l|Ix(֑bcowfd$#%ʐ̫ti Es E GƖvZ}Z`KRUGHpHO@U=GQ}Vկ.exgwbTI$ +k |4gxvRy!%3~$E!r s~6sƭkx>릆4@RO xk CD=>?. $}Yfcf%$@Ï:ø=H^j^*βQEiMGUu̠,P&H%HPKQ@t5z~֡Qms^Y\I`>29(*{K /,ҸD@f'k>9O|'C^5GuҾftFE5]NG↣⟎:n$WF4zdIl!8?5B}7ፇ>8k%I5); q_y#CXEH|c_@[[[YiZYii c 8§ gO6$l5_,2 oǚ3[xG?C&|d~?p|U?ĵxO8;ʭ%(((/[4?TtkBԭ^ ]Jne A=oX-/:/:Y3Z,4bu9XI mxa[ekoaZ}+,0xR բ_еgxTjWIg4;Il׎y)v<PPEPEPEխs{{sM-8A,ǀI<(>2xUo4#Z?|U|4 GFgG?fd`;~ JgkG~=+{]:ܮNM4$znsyG[u?^]V.c*~]\ݕ,,ڭčv ٱ*5 @i*+pD)TYeu~  ">(\[$wN𕜙 yz6ȵ<̤Ƚ?ϦiR꺭_k P֯W279 (#EHQTEឍ—w5[ĺ+weB`"@#QU@G((򿎲y?9l>ߕƺG \]6hVk@&O' n>MXNkѼ-Ñt٥ۯݢugHu w_4D/ %gG!U@p+oI#|uùu do^Yyzֹ4i/. ;}-Z++/#7+}.7: PHY>TR@~#|I%[to )1mU/p#z!/x>x|;#O35˙kߙ..'疖FgcԚ((/xFo_xW&>]?H`6G?ERkvu?ힹ rF%S{\^=O߈En|޳>$eڌ(5-n.V{--^EQEQE>!h ^ŖV/5*9c ^:cwچynVxC#YH 5lT5g¢~|o>ޓmwᔭ8>ץ)ҩ{[\@>KV|K;j'GolpDs1A<IsWM#|A^귳\Gc YIܐʢ3c7ý_k^=se\Hۻf-m]`{se2m^oᯆ|O}.H6A _&$ޟ(\Nt;>sIUQEQEi6zm Q+)m.ȅ~!i@!5׺mRO7Zն?}#C'3x |Q'cԌZj1QEp>x_gOQi7s68X,DZ*-5,1 CAѕ x{ƞ|-Ou( }"HC؃#@  +xwGj^,#$-'W|3%EM@-?#%%HC#`z{}Q@?Iu0;׍l<C٪a;d!#Ƒ@DhQ`(|;*3yXⲾ|c}f1G((ைVꚮf~8,c-/df|ƃh;J{h)F_V΁y_+dP"JGֵ?, )]"pKK.3%P(((+珆F~؟bXZ9@S\:h=_iΑZsh`q㿂OO|6cmߒҧrޙSG-3ɸÞ,Bd=K[ԊY?Ċr0H i/_^0|;xDvGwo'ȥO(vú~/W]5iKMY^Ɋ6F?):a,ՂE)OS)Ҁ>{_SzDo?K:Ai.Itҵ}+]-mS.7XܤʾJ4+']tOx;R47_u ;PYca B+Z#7OxZ/|]7䓵׈7c,vl]o E$$|_ |A|XB,1N׊E 4rt`U +¼ez@[Eˤx ũک_1en#=֊|a[>7'qbi|7}p%P1NRۑPG%& g(~ӿiK|>iQDݑ-KRiT{L {^IZ֢n]>kӴCsxcFq ATڻ 5S%~_>>Kuxs[ً絶{m2MݮY$ʻ2F8pEPEPEPEPTu=3Nּ7hŅ[=+,7:xH jG G3u GZ+ ~S$cVy4;XF;GsۻpLAxGYn6McY[2'^[C*zeFA|޳jV_#{e) ӈH`Sȶ0s{FIhހ=(x_F¿x3V@tF ̂x9{y] Qt|Yrnw[3Xao(ܑ\F"Q˄h[(>u.,y(MQEQ^gƷe?>4:K[`szP#V7 `Y0|cCW>,/ ]Ze~i)Gdϕ"7X14v+eZHaaevZfgmp$$PU t/5R}SzDZ}NljpHp2 (8^,_s9A|9}9nco xNO p]n;hWoVg $O~z'źOC궖s c즾(((+ozhnC}RS?ØTqw"0ʢU9d3 >:QhS m71j\#ƙǟ0Ȗ_*tG4 Xllbo j#E**QEUKTt}&KZMu2Ƌ/H4H5ܩhWȃ?z'ʲsckƟzHG.'CVi]证uWh'(9Oč\ ^<oa?-c7G v֦qy,ynrQA *+͗ 3酇#ŵOo>./>xW~57OgO0F <סxW ?r잞N&j+JHM+Esz!]8#B̾\SBaکLq<ECdM IWvl͕4Dm R0{9O|1j27jߊW͞Utr~ՏױQ΁}xxQ}OL3idn{)=h:/|szUvki`VGSR  j+^־xJV\W%|kLeMشNV$%۟J|qQEWG_ih(\~_cm##]ay? 39G\Xtϫ32z42DŽ]Tb(|!}udk'=h~ iv7m~YgĖLjU)hZᕣ^|:Y/:%^u)ms:5g p1kƈ(i4}xQn_E9Ip]3T#0 74Q@Q@Q@Q@|3+oEqTG[8_>Ҿ%||iAsGyyq]՜$N:nC]7⽏DapⱷuF_rn>Q|AAu]/7CeRmۥCr F(!eR,n+ggii~kocak mmo8cPQTp,EQEQEQEG&^мAn?R_X] p_AWϾ8'+?~ZޑR_I%+mRiWXw((x.'YhT + `x A|&?ٺw𖟫xu/S6/qf3$#7 /PF~*VO j~jVsaXγAs !kͺSƺzptB]fX`c/tbP, ڟu;O^3i|1|8O<~45F;ؾ{o`qھ((O;cx;ZtZ{GrWr00QԆV][ ;o7jn8 |S18<@y߇zёthm̑h={:ܤrt{x](((: 2#׆j[~[yij6=FmwՒ;p)8P?~^ %sKOL^ӣ`-'/<5 ~#|3 iۺ1ovkCwQ[F~<+ſoT>(xN֕zw7O kSL7a躍_[ŕ41:I#u%YCA0࠿oߏ3<[..5(c'IvG|2s㟲'O|+_~,|6sxwZcDcqf憄lViAY9;qbF|y v5~ۿa}_5-SM JKPϽKf/AVgV]N2g/|yYx?z|:wuGL76b%Z^%źێFNOa=_uk=&y05xVf_o?~U?잛km-^,%V<$i+)s+ }~/zK‡jgg;IBIzͿ(O3{?7jgHJ oѝG̮=H,aam>~#DŽXb [n^-gIO"?Ț+0q~keNju3u1im^?4fp7nAc׿AI2RN\[Itw#[CUhȚklm)'S(u 2Goyyqfb'U%@mM fMF > v~z=Ҋ Ṵh CG$n\-~jŠ(_7|`VcgAEMDH8Ei E~ޭa4)=v5VRѯ4B  y4rU= XVW]]eX=i{y+_u߆9iotwO7J,yrm~tQE+w<%mZR0pfo5mn#ڹעCZ7~xkƞ7ziȄk NJ珁>/|lkWN =;t[(0>(4Owxuo1?E xA  _<#+O4 Yx'Inh*"Ο Ga} EPEPEPEPEPEPEP_6x_~ WnR`^cHfRV?>$>im<{/t y} EJO`QEQEQEr>߈|o+H3~duy4A;MqxkpIKWcWc'" c+$n`_ .ջǩxH[($[:ЏQE'Y|5DW^J_kwNcbG.Fq$9#+ڗh>,[63ۑϳF{/,N ^R{&rw<Vs|xzj>".5 r|PDA^b򏎟k?1u4Z_n%@ kfcTnU$w})׬{E|'+ǚ?|EOxK.?pSJ`,ڸÍep)R+ǷZ?#L[4s3h/}$lyl6+|*:%ě ~ںP|!?śiy">\$̄ O3?.ž\׋d0ZN1mY]J*un[,:䯦.&?\]]|4O]D-Bڳu[<.ߓ f=.oJTsceTum%,35Oeskϕ{qOI-ݕ˧r:-_oϷS:h>Ffcg'ٶ)ML0q ~rK[:%w S]*}&;ExUcinHez_Zx2|:/evJ[mI4&DB`ؒ9mQfby|uܹ~ޔos5EkwdާcT~)[w]7]5?C-'Rbؖ_ mb<_{}boW,~@mb3@2)fc5=1 X| ;+mR1iLLbn TȌ@pY]IP꭪+]7S4{-wSoG|>~)yVM5PhMqom;-D*-"3_I Ц]1$㙭#+:YvsIT,p7`W0Te%YQJV嶎\fԓz[{>'E+nwMW}jè7zmG1o)G_v_u/nGM'"HVTeQ2~Uo?-{Q@Q@Q@Q@Q@x:IXkj{du<A|?xD_5_ xJ4 Nu >,718# lQ_1x{i_>!j-N,}Bc,2Jܙ [ݱH|q\Ah_i)|Ko]Bݫ<%2 q@^ L|Wï\\?<Ǩ_|j>{;vpJ!+B x'H߇]#BҬӬmlVD#eUPZ(((+x4"7|zݡT|‘Ĭ O0d!VYf/~$j jM(<5/6V{IFR)l$3| ;kxO:ji-bdi%GbM4K4+ĒMC?gk? R4]Yiogsq3O##Ēk(((w־.5fீ6gK9[񶢬$Srjr Bp>!Q=O\^^k: U5ϕkn4YH摑Aa4;oIo3x^v&pUN'*HY|Z%ڃCP[R5<,YϫD0.>[έQFUJ@+J ?秄/c+k ((((⯎i=xƦɵK>fJ 5Đľ o <]X hgK,7Z6Õoya_EW|!$ go xNu]r^^Ԁs#]{y2&;zMQEQEQE񔠼 晀&+HK 4k? C7T7ΥOI{Pp $EbR_o2uůشw oSy眹"h6 gZ /%(IBj% @,(8nmTeu#!G ( ( (>}[?57%WҮ9wk恦k^:[inw+2߅w>_ź~>6ڍG#ެ=GLtT%dIњ9Wnqyį1xW|_bO|Uah?$>Y;ud|1?[{bs+fQkо??wOof3^wr]Jyy[,}@_7?˥i7ƭ}t{n]\8;UUT#ETEQT|cc^'=i}Qō((((3y` |rlV3q$G@9d_>Ə nH&aLĬ~G#5itn1ڔMsk[|KFU=]Fi{dyHřI/x^-Ѽcua/n]iE#gEF5YE{4U䕯{;3;os+\>%^Fr_G{p1MFI\2F*dG6/tm0KaPmf^m.Pd 7ʝV^\[Y= ^YE| Yx[-ơo>x"ǥK@.q_{;}@OK  U#S}-g 4eᯇT'{I?O+$EϦƙpŠwip}r8Kû/i[@Qoq{~~ξ<7!cmVCn ERGbs"d/3Yf]·+z}/џImEWIAEPοW?ڿgŋsxǢr3isE~V=)*⯁9vt$6W>k7[ܯqLʿ -ZaoiV3^Q#H*@P|}}xRC{bЁM[jtpzCrk?gPѿc?되|Sg/UrK} {}QEQE|oO3'«<{}T?*OQ)R^}@Q@Q@yůZ kXxwHMNE<΁{פC.[>Ł;_[M{R=Ѿx|5fZioA2m}SRa̚I/(G(*)kkb KH#KE&V`>Rɦ73>FrXeܟeCK^yBd<žtޙp}F<ȳ){Л]c֑nP_wXqUT@:0#3ƎrҿV^n8ik?ٟfяam{GV?~Q QmBqsMǁ[qQ$}ÿ ^7"Aq' I+iZ5geMJ.d4k~s p%- 5ߌ54]-YxERTO&` ; `L?d9{t`pv<Ȍ=AS]{R^M!5m*g<9dh3♻,v_%d.w"uF"C*nǫ~Z>T ?k;S#V^m~T* ]E SNp^ E|!m25t%^DnX\[^qEiJ0 ^MKIvo&RޭOx/_Ԯ'z}$OQ__>Ѽ14}Ot~Q1<Ml_Ҵz/%e}>,~EWQEQEW|dZq_w[CK@EPEPEPEPEPEP|W| ;5gix=&Q[}o!aX$D3;(^HA?yGehE#|1\99hUu38lM"/gt,/Kk~&@8=X9 ԑ{F"j6cS{]COI N0z(fÞ"~xIqV~jIgoj&"5IOY-MEPEP_6>%VYV46sk㿈67S1dH8y0RiJ"'Gǚoz2s$r)ĺ^r2ܪ"/w<;zO%Y> Xڦ%K31,KI&!w5_|#)mm-%ؖGb9,I$(((;>?ogrn[߈4煊5gcc}oV&h02ݨo-C-a`T&Eoh:O>?&]hƿ{l!ңoZeАZil``k?_AXUNa]畮k- v]ij`E9=;Ahz oc[,(¤q @+N( a?禆Nk ?QEQEQE󯌿ܾ~þ_RK =z쎪a^~KZ >K۩lpEwcUA$ /K.>|Cg=$jk{ĶvGZ0+TùXQEQEQEWǾkQo6v"w\8--<2ƈ:$t>ԵoPtm>KDPCy O |^|pyf?+ Dc7Q7+yqnrY&TQ/A-W⯈a5ҭ8y}ąܨ ha(T)$r(eu#x %mZuid6E~\ěl=_O܎DlIgl}!^3:߉< x dEFI_p1(((5_gZr +7;~>d=POz??L𵝞V}@(R.*.-%Y4r!d]>&x_Od׉0>/DR_*YH+񟂼/ ֡Oiևxu`C*0WI(e kôl<W>K_ |KUV݂cb;)R}5EPEPEPh?~ $wME+=+9Đ 9RXrGJx^Ԥ>F~+DFD"\pӠp>B7W`ǡBx-`$/m&ߵi7/ ESG >YV+{ WM+7Q݇I".csxeWd#|R< .|Ko_ 7ג(I|"NA *|R}k#>-|SsH42Ą u^braH". |8w^)r/Uk{B q(G Erj|-d| e"Շ'xmn"̰ 3I$X(xQPx]u|B"fBCJ; tflu5=|D~ o~I~ [R"ek-SwGkq%c<`=g##EQEQEy&?<mnLo־巇[?}EPEPEPEP\ xw;V}61ۓnA5Q\ ]Q8KF"*SH]3~-%6&%7J=;[_>jB\/9X-6U^S~#x bd灪]/3Zw'Yz-֟yopTm$a\r;zt&R? 5g¶vR`m 8eNn=EmSx .l<;mi4d-NwOҼ ]R]l]Eg5˄J :Z'tĞ)-$^Fv'eeQm#‚eNKs-r 6^7OEK+Hs|AyW3F78s{L4ugFWWrTɳ{, +qw<򯈼]xPA$8s`AkooK`yqؖ}`$I^wPKbx{oQH<74IC2nDP<\j50S |PqUGq_CI~x-YPwtMi"\,uP:F!]@WI5KKSWۖ??WcH&z4᎜P/{t l!N#/}^-;5RSK+_r?;Y*N$ #k=N~~٩op5-;ž^Ǜoj!7I_m}Af/[&waJ+Z?g xRUmj25xÜGqh5=Wye>`(G~m[gapp +:B(zWY}Jƚ"TGԋ+ٛIk翈O֎LѵݛLhJ( ? ^/[|SK#wP)Ң9} +cٗKQ#1"|UW)i_~ xzqHԮr14Twfۀ:@h'ƍd> qȧ pe }^G+e $ Kאڭy{Dȫ芣r ( ( ( ( ( ( faf ⫍?»Tյ m1[l%?r⯇z~\*F0JuPEPEPyrJO@O_@W o_~DoQEQE|PկO^8R}.ĽĚӑi(f%ïD5k?NeԼO U/g4>?v08Q\'m~þ 2~Uo?-{Q@Q@Q@Q@Q@Q@Q@xO8;ʭ%''U?ĴQEQEQEQEQE}<.9%2MqJzm>#ZĞ|_=[(lujmi !XXSҡ+.J:8 ?4q6緋Uu< _n^^3h Zxq$aJan>l@vGT feYKeL_}A3vQCh|-X[&=v=^FbYgbY$騢 ( (>\LԭW?k״(j}@Q@Q@Q@:|'7>lKy3Wܨ+=/gmsV1FiZ.4먝e43$r;PqEyj5cwc->$bOX Ɨ*<{+ϗ:(((`3QG/ʋ٤ѵFuAU|c$ߵW&<5y' '\>1Ҁ>((( {2dW8TE&+bdRj9%>T3M'{mcĖx_K>wZZd,j^קXZiZvom }E =P( izfZu}"lo Yԫ+A`W~m>1KS.Mj!wO,5mUү5=2epI  \0yxN{⟄=߉~>"mf`KK{ +KIa-!e+|1Zo<kBvA*4r#Wtee` tQEQEyğ'?j_jڕZKj@|Kd$vı!vTF`O?~ΏkxGI(mY!/Ƌ+X){¯׾{ߋR|+n$.Mj^mW%_ܳ=޳.Kg>ᘃ^@|E~}7l/]g_@W>ܯ2еiA'6ߝ}_<|o7?~/k!v4wL۰<2\8w[U⤟,ssj/}BYoQa}EPEPEPEPEPEPEP_??i?X|U>ѪAfƾ,?`ωwΥ;P|irǨăݞ@4U[]KElK+kySX{ATQEQE/ ?hKCuWQETS H^Yd`KxsR}ƣ7Me4ĭv‘.VH."ʲi_:'#G:C6?RI uHxtoECo+'yRI-%b@(ª((((((''U?ĵ?d*=((((((('WeVP׿׀|dZ( ( ( ( (2 |#SxRJl:>V3c4 rxC@k/ kR>&J>$Ka̢q@ ohZk&S>}[S;.H-2Dq$qzUQEQMwHi$eDPK3RMbz<_:MN \ (+w Lb'i? kss5sc_AW#pxnvI?M6Bs퇠Ҋ,ǟ]n=>_$FҫYO5QEQE~ρŗ$ԣe3/4oR+:J yCx'nݹn庻Ob">`(((|5'~"_/MOi~- [Z!@T#%xoo5= gzgtk$AWFtec~k~0XO>Ao&K-bIaaB-^&upik__״|P#V/fK!. )>] "kxƾyT5(GJ#(i4UA$ ~|3Gf2JvVp5+[l6ЏYru''<@?ovoh'ug$j"(OLoOm?QK"d]%dCd} @p>~jHYѵ, oy< #pr0*̧<-O<5;KM⧄ŚuD.컡ɴANJ1 oh؛xnt'%K61rj\u{^3nXm6 Ka-HEQEW9= ?ϯ|zu띭?N>((((((((F}&F}&>((((((({?_t躮*ql5k ¾g_|:⯊p~uv3pO_@P_?xlLi5^v{61ʾ(M||h-s^jsݣ@ilWQl|gu-̰=ޭ|,4? ?g_x;f༿q]ۖfViy?m OD: 0kI-(((((((*ݥuc{wVw14S"dF2HP~1Z7'ּ yuW͙`s5[\L{|@:'t:"jG kLDSFŔw3qQEQEѿ z ybߛ~;4ٶʭeSM##+^pY(ᶎcHB"P-{;;xmm cP"F08*j( /xg3_iZuKf|SA#@ ljAcn}+T[?xP|#q`n.@ N?}y'Ə|0Ѿ ^o+iK6eamkk2M/T)`LʑU,Nm_|Tծ=>&?S3ht@; 6t2' Gq? > N)7ټj);s_S}>W"$:M%ĸ5ggiV66,V8U@yOé]Q|Y2nqsjf;!$1Dh((((((i^g GnO }_?7 |G?_@PEPEPg+ޱhzdo̷,vA)$}K:ֹ^ :hoƺk>,foekk$cc5(JY4MM/Qʘe)?uSrَ;TQEQEAuume^AigoK+vA\J#i!R(Ai%weDAgvUPIo|ExWcR:,)\YIu*#Rc ksⶓs]:ᮍsOYW~3ةeF,lg#|(((((;w_do~%MGFF ū[(.a\&~U;i(t/|,1z5 Z}KG"3S Et_~׷~#\cZn4*$E R ( (>}_Cn$u!yiOsfͰ߇?%h }ԟC|IvM1-.~(+O߳|{~7Zpǂ<~!(g]1gĝQӌz,%e<|_Aז߀c^K>xj RF4JݍzQEQEQEQYz޷kڧ|AjZ(-E,; I4<-{x>&ƭ7_Gp!X-5$vN=NKMB!$70ȥ$UG :+O:֭|y\Y~RZo=<;e'+TiP8QU>OM}@|:Ǐrܿ"뚊?m2k۰"⯌̞iuD2/`a`w2Πp"b#袊(?KnO[oo5k;@/4?~_῅,|?O@r[w?Mdf( 5+_:&s--|'MĐϪE!pgxLlto i>mcmc QTAW|I嶗6ME?[gRUͱ?k:(+h_~8x~'gT/|V $VGk)SGU5gf/ tE?RWiYt Mu#Ig4oo#A.܎'T~p\cGK^%~3l-1.޹1c`'ZOwiog-ռws44I(Ln*H8OT?_ڸO?7/xOgf/ siC#7цHnqJk_k:VHRNKX: Z>CP1;0?3gIQ62Tq#T`/m5t76,3) nPQj?8tE?RQ?'J*ޥX^څơpm"uGFdr>ڌzhg1?'J?3gI]Ma_Y,eK+$k1(䞕z&sZ&ۅW8;2G$:*;9tE?RQ?'JӾo4iھiV$q^j1Ecv`[Rsڬ]]Ѭ@aaq{wXN9 S{rjcm~vbgf/ tE?RWK{uޝXXO\;yQ Fڹ;Q0 _t? /gf/ tE?RWOii7^cqk]ؒyr~G"6Ӄ"M4kC4}FVnvs u$GS?qo:_")+f?[Ѣ |@]/K5kkj}帲57Nrկ-MUQ_ןaӤU=^!Ƥ(SB'WeVP׿׀|dZ( ( ( ~ Emw],4A[kXˋٝ8-Oi^8OwQ޻*/gp~gNK},uZ"{GYNI l;?ּ1[>94cx5odVl  ,(xVIH +( (cX|=MK^׵+Eu hQK<@UP $(mGQVկ.//.XUI5 Lψ,tUGnhnoc_ ͫ|BS*wGk (*)k #DP2XrMK_=~>1M,ZĭSrmFmZlW6w 'өC,zĭS[OI Ilo&-Coomk k0āR5QTQEWz_Q-3| Π`zMΏ+8XGې [ {]Pwn5MiyzR6V A!{fy(|?i/@|ƚ>$#\w͍⾃-:]};G}EPEPEPE$(Y]"@{ Lѯ5-JO yehݘ$p|Ǣ^3&xrM_m/ͼ˩=Hs(foag2ϯZOe3's\Ǩ ( ( (?8?j&n~ xKf] Ɠ,mFbrItE?RV7Ÿ|ueWjkZMW:Εʯyݥ*p'50](^eX3z7~3?7/xOOSQj˪J"8X7#S^ưjMwGrgJk:; VSGK^%~3N/48rY5<sgyqo OoiOu,z,Fy܆¨< O _e1io:_")(GK^%hM/[x\xpxb`-^MNqڸÓYEmoW|-8S+g c+1m'ɥL?(gf/ tE?RV9]iPj 4IzdwW׮q30=E\u]/GWԬ4ynb[˅idX weE^)3gf/ tE?RWiTmM2Y4MFPd[Xn.H )2G"Tڸg?7/xOgf/ { KNl -J;muXh$GFF^AU>CAk{I?j~|It xwUt.{I.nM2ېXd5) dpE}_;i%YmITBRwmQ\i*?cѫ w}6?:>S_@PϟWN<|N*K[iNzFPQ\G7 k=Kf?J:XH+fYxô&>(((((((((s x&Eu FybXђ{~ۼʒc1(??_;o %ƉXwjeMar!$F:U uUgHAc^ ěgx] V\c|+:(WJտ`kVVceGzkҵ:O;Ol⻵ȁ soޟ=I=Ѵr*b xOq9~hrڇf˙t ZГQExwC)o.5Kţ,2=#Z }ſ5D7M:ѿKcv(_Y~JCnt>1vw&;oGw[MZ725k'ޕ5pjI ǿ 0TT}UnI[2?kkc!ki;ϋY\[Dikrxm##nVs\^<;;֑+?k$)ko$ 9(HMJ*33J! :5݅:eA2{|]Hz]qt]宋S7^*fe @.5Ynmu4B#qہkO?F+࿂& \jڗ0.wɻU1i]t}{Õ4|'e|6>[XFi3k6{s7.$Ӽ̒ru66Ǎ}_^MxLoxզgwvB{ 1I4w]Ȧ$q4FI/>ibbzjZw#_'k _xZ++i-lnb J$V ÐSr ~2|g7.xZ(@EَOLM-udXː lw[_E%a%D"W[‚HG<ΙXh5ZbVvrbI5)$NUR鵵t?=d}OE{OϋtO hE缚!k_<%BFenAo$cG]K^%^_3k_յ[W?%y2aMYD {i1MFY>HdI4,,I !50yb@95*n?oMzZɘ B)eoŻo4~{&4Z|\o$ZW5ji y#>@ehK'YX#e՞x|?ooe{=9[# )?8U2*8V<3xῈ)A$>ͧj) )$h7PS9)Emwn3 q+y٭o'ӵ_:^'bU[PK-bIdWEy$wIdRAW[HV8*znk5҄W$ósJO.Uͳ(|P''U?ĵ?d*=((+> |)=k#m}IRmAF>A?y7ƯzndyeҮbFjΧ3Ò=/|@_,͎%LܪY13Y$^MWֱq\2Q.]%f#&mqp:XcǿEoռU^Alש'I I&¾ |I]VB4W5 لP[D,ǀ󶏠kW!R>!1Oʌ-SUUq|<ݑ.|p n}|+!м)KĞ'tN7+c ܚد1a돇 4#+Fs:~2jYmPb 9&e~%|wׄ_?Okco? W:}y|qEO^K׀3AѢ1Cy\Iys<3;by:_ko5'h[o'M.̳ e#iAWEQEQE w5 H|;[/n)*0 V'¾*GϊZRZxϗs4I "n``OkW|;㏇zYY-7+=VVRX tW~W|E>|S5 w>ljOocU0e(((((5WE^Gk ͫ tvZ ( \Wy?}hLT{`=5R_:?-e[VUVK"cI?^~| 4f4ح2:'G-#{iEP^7g.ᑢ'vzſxv&$wNV:GhRU#IeBXn.cд;Z" ;u$A% #Dfoz]Y?Kk"akfmJR~.)X<=Hz݆l >mvs(>Cq>/?<Sh ~شmJbY5GS"@RIOjaIӼk &o^uh>h=PTQEQEQE^*t٥t.[arW1W`ViG, JLBS%Z [/4߃ZUS4Rkӡ+.a*DJAK?/g2?JnxvH,,&-!X*FUP`@ [(mH#P0@8TQ@Q@Q@|?)eW7-u Ó2k;s%1 i!aC&'`5E}~5w1hGv̟xš~kN^/%$ #q0UYHe` kx_|Y|MöG. pH71(.0J&0yi>&XC{@׵sC6ѣO+\壕1د|!@ UAVI /=o_KF>ӿ?? z]{v| о]!C-;[?hMSYhld\qY8;@EjgUo>+xwU乞ZmZ-2HO,; 7kG.NTS^,^5,P9{goN%^u姤[z) j=wYmE{ie\j6R1$R!*GPkߏw9'chiNe[]4oq]Kptg zo:նa n +eVFVFASܔ_G5:q=wӯKa[m>5i~+'ޡͤ^M?k 6&i'WQ;F,1L[1._٫lE厷kuO M.kM,7T{SvtZ(hh£nN*JT8u~v'Ҿ9RIZIB熿mo2m+zޝ|%5 F7,qѪvMxfS|t1Gޛh0Z–|ʐmX.&Wi)5qLH"rvdC .+|m-v[R9?7 ^>F>|EO<i>uZ -.dY%yYF{Vai|n|,<_sQE~ɵ-YsPr@8']Sju762Ev3ggF4Jjm.י!' JanXpu^QMVIFosZ=5W>>|GlUYtox},Km= \cyg D|r?]'+VOo|Aw} C鶖cIcMBEx1~ A 5#H $h:*ZURsNߙ?4V 8 1˗^&O7:^3i -W$Y.=8bX}kkO ~^5An1j6DnRCI/iL%.DFFaLQ@ǖTm}Q~^EY+$葼roG{i~9x`?g]v8uΝon-5;{uFF[.+e6bx+V8R~#9};:G-okZxn\?$v&DgFdVe9RFJ)TJ'%ic*ikk*+^z~,Z\x^o4⎍Mwo֑:IlAf)$BVɓe'ź%>}dÿizn^?%Ei)QÏ?Cm+~{}Nj4,͂K7,zJ:U,tȨimO[}煉c5};h[_̶߉O{Ķz߁՚[;=(j3K!X_/ _Ae1xo _?)sQEr_Qq=*|/_@W_=D|EϮi_,ӼW84hZ?uo >&5׋<1C]= "H~ǩ0t"Τt`Oml,mᴲa% "@ ?\<;2oèu_kn/8ou>((((((((((ϊ&|Լ0hxt-jOj080zr1Sîl04>|@]v8$_FNZ D4OUUOo?~=+G a~(۠#V"Y+Fr7{Y]F(j)WF 2AWϟAJ~оo(Mk6/ji&Ͽ5|3Ƽ~KxVO}iX;Е]r|A[}qFxK4iΧ[&SY/) oCkF34Ԙo{h䝿Y^QC Vq[H4QTQEWzN7?h= B5 k[~붚~an]&ph <UA$$׸Q@_𵴏h2~*u𵴏h2~*u~^n3լ/K_~u?E??E?+Eջ/Z4GW-m#_ ]G-m#_ ]_mg5[ąG d'Gn0V/ [H'WV~IZ?=MV,4~xܿ !Jݑrǰfr?ΓkZXsĝv9?6hdddA <=/?5GU:xLW>0$qrZFY[,m l?v_i}ki"dU?ki"dUjݗaa_z#B/O_B/O_Z(֭~??:ki"dU?ki"dUjݗaa_z#_zO?>C j^IொU@$;z97 {_tD:{61u>_Uֿ[/Դ v<'nyc@Z%3E>n0V/-m#_ ]G-m#_ ]_].V@hMQCw'thTs_x`'.b9WwŞ玾niwS{ep VR+ gY?{wB&)/]ET.^W#VN ( ( (>xym3S7|fUZ+_|'/|Grm4 KQ&%a6;w87q࡚f?}:w/m`A 9Z*c75 ~mW}7|Yra"Cmm- ?|=|YVDhzb8"Ac 'H̩;TQbp45o/⾟[f@OF'u[ZAVP8ESIJqʭPO 8>$wzF[׫Ν.7M2ʅ3c/Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@|ok61{*-+̾.|>?'MBnuMhRﶜG)",g4W|)į!O} ^ith4JWV{[kt(pk~Īw<;GSx=M }Uc_AWϾ⦔~K_+D--羲?CNPTQEx C?GF/3i!/? xO_xWIO+KѴ446 kEx/x?[m_m&UϰO_AEPEP_4x:_:oZ*D)X[:^2ο썵ObxCVGb֘?BYZ[T-a3)'xC>m#ҭ_iy6v%řgffbI$[? Iφ]Z}B8mAEQ+f((((($چ⎉.(DDC5Due!̷rѣRE(|V<_s>.{!f2YB6fĕܤ af *(~oo-p_Xj6"CtGE dutfSO E~|m66+s]i(-W$ZAg$u}^Iw'~&b^&RciW/-iq0̃ptP/c3\Z6:ms& 큹ѵH[Iqdb dFI(OW/V,nG↋iwUnz| >t+-SL4Q$Sp@ ֏FtSf&t=򿵣D|-Xhx8/i%YWBީ+$dH xB״}ON32l.xW{&qOx> g[[(B^W?zI\9,Ēē?v_i}ki"dU?ki"dUjݗaa_z#B/O_B/O_Z(֭~??:ki"dU?ki"dUjݗaa_z#J mELjmҢ?vR)$ibm-m#_ ]_b|GQCm3VyoxEeQٱ0+4[ʯ HR8 |W׼53;=7+M2h>)|| 9&I8&'POjݗaa_z#ZG?ZG?EhZeX_޿Ћ@W+Ћ@W+V?v_i}ίZG?ZG?EhZeX_޿l׿l>+4-nOKCM!d/ؗ698(I>48삊(͏f| u|TϨOF*VH~$xL K)T籰T߳&%xYEK~ /|I4{`?Z5z,cצ\DHAٮ)5_5Z׋3"clZbyt,TO}T>I,t6+=:-mHE `}*QEQEQEQEQEQEQEQEQEQE^Pn/lnahn-nIywm +7~wR{YUYۖg#K۽ygχ>[`0 $Օ|9x%G }fF \W_huӦk>.ܣ,s{T5]2[ƥj^wZ[2E"t>I]GI"Y#et` B x+=hZ(嵏eM =%?~?쭤[[Ğ42O&+[Ep\'[OK/qyG4Q@Q@Q@Q@ǾsyXVX]:KkCw },gT Vϯx>5/h6%5ӫ4ߞy Kq3;w&:b""@TP0RQEQEQEQEQEy?XTf7\\:>x0r~$ZOlja|GaWO ¦nYI}?\f%c?LTsCŤOs"92<Gj!H-x%@KWR2#5%|7_|W  /bDGn,d~xf%>⇄&hzV&惪[5NF|wb9V$\23 @=((('WeVP׿׀|dZ( d M"E jYPI$_7|^w;yg45>%^۹SidK ˨|[ǧg^ع>yVX|2~rTTtb)nb,x>&wī-"%fe1:2;.R}JD%decVh;LM%>*Wᵦu gZ[jĊ2V*;t^k?^# ϶f-kmg&Kh)ۅe:ڐA&Xc`D+-χ |9R֖T_5dQx]g5-*ªv[© d@I$m~Ƨo~Lr}N\)d XDf?I"$q,q(UFEQEQEQEQEQEQEQEs-H~|Xծ'RՎ ]3P`8[RCQEQE|, |e{'//_@P_:`N+@4cx okAAt0@5Q IM|j=r :o<_k;PTQ^Oƞ"kgYѿ-c?/_ {-`h*(>| ?奯>EƷZ/,|;'~<@o=oΛ`ݶ Kd]?F("xĞ"/wk6&W<YJ2UY5 ?gUYn|LſO⎣j7>uh]p1W[{l6Kq!e6G]ٯ~=|n>=_i^,sE{n~}@{r&}6I{Lc=6?s5`?WyǑY?Nz-VǑ[ǏbQ(~(((({ᶵ6ipf'zWxF_{L~#P{c'Yw`č<HY};vk ֊<7dAxY5YAU Fe⿊2i֕Z6Ľz? Dd\sGmOCxׇ<⹼a_j?u>#t9hmUbH cqE6 i1TDP@ }QEQEQEQE sƾ-MuLW}Xf\= kz=MV_U}? pdԬ` >qp7HEz!歧knos *GpkVt>0Լw+ۆ6#< ϿnI)]1tW9ᯉZ^b:/tVcY[4dGqO )*Uw^k((((s<?s_7P?|Y/k #—7qP4k }u⿡cݿfVD]|/0`lzǢDciݿ"MxbXfV@ܔ02{5s@zEQEQEQEQEQEQEQEQEQEQEQEWbo_zTou0|Tr?[U;&=ZՉ96ѭ}/QO76S[\żRX@F x 4K 9+ޤ7O~IJ%Q[}3OB<Ȳ`֯7%jV>x!?QhO#G:+jzpznkWJC))olM*ǿkGF6Baj_ ȼI(Iy4Gca=Ӫ# |nS:[Ҵx千+f:} EPEPE=–V~$^C KqʄM.X-xbR.Lʈf8z>UgN+A{TxcMu8aL[ @ cVU?h-RO\єuQ!$w;n ---l4kk{++x+{x# H PO]8^.?|x^T-b᰷P!+p$7WQEQEQEQEQEQEQEQE~9, Լ;]L BN-{{۪2~Uo?-{xO8;ʭ%(7[ڍ֧$\ni귳:kg<fs-@ԼŗVğ6-J 彑UD٭H.'f'^ڮoM?o4!-[Z1٣[(OrA-0g+F@x/>x15[k Ha{Jᳲ0N85f8'7?_;Y'6,o@utuX6P%Uc;aa]HqC?5HU>r}%,m] Ju{-Q47E:Negegj0* EQEQEQEQEQEQEQEQEQE>}[>.mjPWv du`22)>:+ TԕA-کeāҾoߏ~vm{Rʻ#8!I:ʬ(|/%\Vgn?p ]: (;_U,H$JB (>~wǚu _~I~Z=}@ CjtşDŅŻb[;yQ/ AcԈ<zNh^4= ;Iӭ#qơv +|Ϗ_x;d_Jblmr_^>>|+gKh!ͻnadYQuf doċN2|M-YxG@yJ-wIq; SM.8\*HѮ|,ooq]sxzԼYKO_:FDq" (q|(i&GnOmu4m6ЪiFF#2}k,$K>1n})P/ZDŽ|eeZk9.!IWmOxVm6}BCHS^1/|GSHFO1[Z?iod}KQfίD:6g趲\}jk)BRgS^HUvWĝ[FὝ{^i-5hcH+n_n!.ipxLjċ ) "`Kx dbDSw+_~oIYzo|Y⛸\j!v($ColHb6~xO׵JG-D_kw{ª$jE$hz-PEPEPEPEPEPEPEPEPEP̚QCw/?\xᥩPio`,sKdHs)o;O Au^" UT`WFRWQ_?xៈ;PM,|etVOZcU k,w *L _,cvCikg %ƀypjHZUvTQʕ!bxDK~.෹Ս~]֝u-.'Ux{20M/?<;3—w:难s=" KE'k`VR"xѕxoƻߌj:2?1":$7x)f<xZn4qx(#33u[)RC.`FFGzxM/=HhI4j[΄`y $,),nFNCЃW_B ? Z[|* : \tY ^t뀿İZE }*xW7~6A?j ( ( )#gv 2X+Kύ׏u{Z6I!֨ΞNRD!&|<\ckoEOMּ;chڅγAq":A ^(y<)?![e6/ZtvZ= HtlwU/ǚyU⳼I aT%jW *^uZOx&O?Ri?-xo"iψoO QۏKg~4̓4+2< $O&} Yls>/2֑+W*L}gO J&W]opw/R h> s\x[Kiɔ @*3'w^)YN/ߋφu/k2sja:5d{dTUز ӁIm?I>xWV7{Dݕ^=ח@,#Yzנ՞=W~WKyԟ?ʷk]U1iF4j Gz|5_? SO\h蓮o9=ԥ<]='kzΗKY$}vƵҌӹUiN'aES3 ( ( ( ( ( ( ( ( ( (GV#sjVe-ğ)6.9(]Ǐ_:mME֣ kʃ hEʁq%U܍ŏ(|^Guo T#c%֝SVSu )RbʥcY~վ;|+O[^:%2O_ֿ3i/ C\ι UmCZ&tb!ts#NwU}p/b`o8V?b-nt{;."Y`*0ʲG/g_هLVg>#i:-đx{Q֮V;ɳ}ow|za3=_*ǃ9_|/;K/Miw.\BցUp41߿w5}OFWZOfCğ^/|\xwXM_D{5^rFdK |S> q#o?A|C_h->#~ ~ xρRw4xL:n Jf?3g W7'-C&坽4?^v4gcs58iwqi}ExEPEPEPEPEPEPEPEPEPEPEkeoX4;=Dq)JJ*ҕ)՚mZ<|6ռ#*Zuk)#8!ՔIH22)?|s0>5mG<+x$њLjZB̹gꎱE|-]-bYzf?}_~Ѝ4~ MB8DUA`8&d~!;4D;brY\O[I>|M5wN1.~uh ຟ ?WĽ#G%x}V 1By6c坽H?6%7#񾗧ë_ȉmu&$X;0+6w6k xXu][ڌcqɸ17#[T4s|(>/\ѼIZoJ#,m.BPG^ n柣>C2/z Kۢ+((((((((((*) hAW =I< nȖow .|Ycy2DMт"S<n̺WAѦVJf:_EG`8?Ƥ&*dx]O2}[κ׈2^uokr\C9bʐnՙ?> ?G?g.5 IgE<̏}_ (nk]A,|gRi?A׈UPkUmifDX(,#U ı &}-kޢ8"5|=N|noxTN|5x/5 pmrE1$~`cY߷hs&/=|-m;항[oa2u%<ln@Nf?5,&7 `&3OC+8$ 8?~ q%4'.kW$l!c'UbU].B*,p`We/[/<h'5סdIW>S ']7o<3p=!Eݫ'-⟏>K.u WK]%"(c(Q 2V3[C2o;|È)AKqZ>[~24z|DY=#:-09&ټYiwҴ|ߤ6Gm觡imVH:ğ4_hs܁LJ(fXZB?\DŽoR袊>t((((((((((wڎX֥g̱X^e|qYY[ey HZ¶&/M/Wck#l\`υ|5,+Ӯ,! ,Q5qex#t;|%1L<=J@5Zt/xgTЄ~i:H{ 7CE>Rx"EXIQA~v|JI3xv|5Jԡtxx'đK\" {goSo 8pzh/Ok⯄!ծ|5x]T e:E7E%V GR mMƣvm) !ܫ"9iT1tCq|b>8i$^;Y]JEDվq̎HdR+m>=Ө|Nǎ|Z [[+x$YbXN]YAH8H<|5Zӊ.y_/AoxG*^Cmg-ı[ąF d'5Wo5h !񧏼aRi>Ҵ{iP _[GbđT)x5#Yeۯzzn8ڍkgخCɕ?+0o8?g/ٟaM{I˧]ŪZEmE)RDApȌ* kuWO~q&^x9o&+Ɵ$[~s>srBI7[cbC]Z}KB8ETUPW="e]+t2?*хH^./f-tK%^hVwv΍;pO'Ǻ?x^._&Mc%ݽFxɰ;ij^{ !%ʲoie͕֗m."h$dCJAwǫ[ #/{>񇏴Fբծa}2cE-l2AɐE0:?m"X|Cwcc #MiSr 6U8q_>f?ZO<)Ώ]3°[O}\/;Uv768~՞=W~WKyԟ?ʅ?m?{k>>/]^XrRڞ~qWD8/|)* u(Ú:OUeI5ˍ&Vn(eV^y{y_wws}t/5ĦGoUʯ(X+tkWr.7%ٷ?G?t|J\46&[dtRJ4WdoD޽Uq>?\B_^ cUȗZ @g9=K?xbVzսϔ evUk{ $B*0`G9 V4Ziu~8|_xI,6jէcK'R?h| GhWW֋skWF;xúor²ȝFT.~9omUE.mbmru'[Ė{sr\hr7J^ܺMCɏerWU4zksjmOhW7W|iD}ܭJş3_~5g yZyi}MqFYN-omuZkuVV;Kn%2PG"_ WZO\ =>~|o5oQnI?ڴ.kY -Ɂ]F U #R}Lξ'iJ*4Zzi;=lqW!Fܶt/c~s^_7zvKmu H09 A՟!W h;qՊmw~5Yuj_ĚdZHp$}b;M|]xK}Ymn+ձkdwbs$@I9<\=~ƒgh<~:MӻJ)ݷ],_rG #RI CǼ/ڼ^Q{BRI&ׂCk XnK 'XK."֡`F]3Bʮ'''&}6TCd (*TMIXݛɷnﶬ++& xWW< x u t2x┡ *S_-]b3ս垻5`AGp' ^t->M{Z˨?-/4t՚=?WӡI*$doW^5ѧ'8^h}kL}CN<Ipj|0xW?̲hN!c=w;}+J+2?S/8ZrJ|OkFQE&QEQEQEQEQEQEQEx'~p_eP*Hב]~ND6ߌzI#Hn("c#׉fI8[xm0,tiZ4v9[OHQjw\jR8?+CPCS#bI5Z /Ԛ< -/ 8l n^>BZ'sc=(OZ&zoW*+n>3kSUk᮷ h6ֺkxhiV[kP>RM~S;C:۟|Kմ]'‹.iR62ldS%͐ၚ6 *g5V:ngi>+kɨ+^LZktĒR:0-Tȯ+贿_w 4Z[<,KQKT}/;Ks(0VN4k}{їh<=F? uk^{h[jFC(5޿l;{mC~$5}"#KSRfkadUgUtda;3㯪۩+Jj]5uitٽ%nGRI#;F}XxÐ)5 +CTX IB?!|MCX \5NׯE/Ry~l['Ϡjk{;m.&%:PG"iпh3ZO ZմH52ceܩUsԼW|>Qwxk^4O=N}Vi)$ 2}ĝdUgbcwky?=zoc)ʪ5/ͧ'*|/>]?@<D3Et+ |?٘|{wҾ}?WExuT: 4=!L_u~ &f`"7?Z!aQGu|gUgS JHKm{zE27 I6nPvggV ( ( ( ( ( ( ( ( z~j}gڨ=WګڛDI$:LV+}"4P8f?3ǬյzS5,5RNoo>ڎgc0WV=xоsE|ľ)OuKW9Qq9dOWW=\☴Ox/A{9.72m-b1dDWZⱒ|Ny/ d$^&IkGWExtWDnuG)U準bDY9gTùtj0^M>ki>7O" U,eڭ{[7;q.]IJov~욽mtvխmOh.>*|B|D𶕫OjKVV2\Eyۘι'tM/UѝvTS_ RCsp{Nͦ՟K]^٢TM5}BV5cr #| w>#[iqcolm yg2}>gĿ.y{~jk֎Um>nj qzKu{Y캳+>OkChHZ>;j&Q> {66~#mV+%[+E >ؘx.jYv"7捭O]t)'TnZ2wJ:ۭm>-xDO.Oqc<zo? hF1sc+Qeobx'RY𮙫ڬm}i+ ȡ 35޺Y/>[-<"#ϨGMFrWSfKho?/:Uޭ$fr*v TJ7 ^5FvjopNN.;=Š(8((((((㇊OZ;è,v\/X7\{bAqFKdzV]W0IE_m]+|B.>OaM\N[ھHOIwoN#{lߥx3+I#3Ėf9$m~m^!K=_p߅yUje5u(켷~eKQu/u+]Nҹjx=Ʃ GkQh"kǨ!8bFA*}Fj\2xQҬ./-cO:!G`>x]J+{mY`}DĐv ן jյmu?WzIaTm>eeo--d՛SݫRQ^῍⮥KU羷!kbsd`vɹ(Jj_XkZd?{sϩQyz*mreG. BS_$ͱ -ztj^MUܒW齏h*#wGTFF]tn/n"I UW 62%$x4FhQ*"FpF0z_KߡWrnI]{m5-;^GGwsEej/%Ե-}JiaRS+m,5/ ׇxZdv[XC/I23+dqU~ߊ=׮.%ˣ{eחOWjGz>)@F4jin[_Om7]%*T 2x Z^jΙo,q͝lpu^V~=us4p?\Wpw4Z= }t _SU?{Mr|cu3DC$L%-xRI&|,JVnuU )i]'uk5z0oiM?TφlnW֡mg[d]IA%Iս早hbQXkpGEM+`n%H_ޫIOɸ(WwK$m6}-E|gxO{Oeij>m*AmQ+_Q{msi}W6$OɮQڍڻn0$SKo*ymxFOwgO2SszoF+{].]@qk<rDYO#_5ƃx%wߏT).֥oL[t֩"[sd +)n&0fj}V>((잚Ÿo w&TFǴ?RG}o!fOpx5R<ַM%N)cbAr=4o=Ϙ \ޜ j^q:+[+|ƿ\ڕlcdv;nٻLV5)q٫?S BN/;0+C((((( :$x9qua_~wr\1u0y](qN3<6 OFgpwa(k?o|Ok\B 9@+p#`t<[kR\y&$l }u4x_Vӯ[>|?OwOg.XjO涩k[-O[|>nKW7gOF6_D 0V=+G7VZ/|E~.|/h}F8%?3ᙘ1$wWOHf'mB:Kaq.wZЮ~"C6¹M^\O$~HEBHΛH>MI5^Xv7ŏ 9͟&s_U&W/ޥe\ ס?)xFX-tUu[Q}ƒ]x{Sm/O#g+ՒJ6hZuzO?|KFR-4&.=sr1_:nex~JӠK[ Hx"v<)Qս|s*kFb^m~rAEW|PQEQEQEQEQEQEQEzOwYFqʺϐx&9 /gM[A |R22c?^= BR׏dq9i%Ͻ4JNt?>hK?TWXMc: φK{(x`W=G*In;*}ʓ^ɳ$lRS+N\?agJCJ)sERsSxnV^[_?x_Cm U_[;D)--JoZ)f{eem0b98gpAuo⦖XǡjXcWƔ5xGW}:]μگMIN ^wz0P|\*>VJvw&VgZx[~vO<3t]/FXt6)ei$7!HQT=~!2/%N]nlWKO6P8kץXf1H:~:WMov| FFMB[(y|)GF_ɍwJen-^i~N剏4-ܝJoWmt|[o gžo.oa'7m';>'59]siQH<75|;^94Tr)-r;z;sm*}CP-C0|t>Ty,q9 (atՒ?sbOrKAEVQEQEQEQEQEQEQE7:.?Z>mt>¹)-$kՎIQ$+0c)rT>cr op}{Iv񥕒VGVGSV M}K |/B7_ x3"1b =Iʕ_E|U报/7+^;N{6џM{:mᯉ>{m{Y[_ (5ʦaMG`waS|յz?hzMXݟ ke\J[/yʊ _r>)h_ ^_Ľ$.} oW]WG4Eݣ+ʭSMkKk$vG\==E46rKk w4ZNj <kx"iJrH'Rr}>]o:\F`mJ=NqfcO z[:o{Xy% ?gͲI+ϙ%d>Ui:ֱqo~G7Lgy-F@ڪ9~}Ş7W0Y qIgJ$ITy!iM4,6zZmuSoILE󍶶)AGU% ]-=Y?pRMyk+|>KohK&koaԥV sy~XќcBB4_ҿ!7?1>ovhk^aw>> |Q֙<qW+lz! ?~:Ķ6h4g#{m Xl׫V߫>{nSzW.FNM+(exFDٻ'ko5][Zae.ȳXλe`r[9<o +3F[M0VP"y\} *TWwk~_>4Fxl Z9Rջ'QEAEPEPEPEPEPEPEPEP^!*|Iޛ$V'Յpy11 9IEaSRt+zy>o8֌~g~!Jt/PΛN=A{{?x3WE5o ڛ^YOuPI2AI[8 k>xGխ'-=J0m7쩠$tw<[0!}wlO bhϞ_?ױ#xɓ4=gJNj7}ӹ|״m7K߈ ',Á|A4m*v>fKUEry] !1ZT<-xG@wFu; %$̭zy1GPi+6V}mfwks o^Ye%.dn]s~ 5-g[lFQK=O]+DcgIČc|o|vH>MI5^Xv7ŏ 9͟&s_GU ¼"dYywZOugoW{i[WV[;~Na࣬[3BF9TXξsf&Emc tR喩/>l_ `)^[KZ-/oyZFuhit n:nԼ5kOi!DrYrD;T__xm|w -kqobRCђP 珤5_ѾZ4(Rmzƃ.Qt}nFJn&_ |C1s.FԼZS${5~Tޮ]ϕ4/.6yZݷ'qZ |Q]-.t|]j;9ά~*NHHzf,+m"% 0D@;8 թ?iZ|xƬ&zm-KEd1|)7i(eef1$1$khB0U?ʭI^RmmQEF!EPEPEPEPEPX#>`7u[%PpÜSف+nJ.-]3Z5Qݥt魚?3"w/n.^$ǨZXƿJيw$QM~)'`kq'v凕^ )95kX/V2=[?: {ᵸt)f_&71خI;"=l^!)[vmo+.W%߫7I'|Z@ }N?W߳=0Hc^a|r*gp1:$k]YIX=86ZVM+|K:/hz˨\8n$ i-|`F TPO9__hthrA$(\Er:%k5ɿJUdײ_e.O^ݮyv|CxQki6]Yۥו !q^Ads\O$ctZizKW}2Ei6$.2x =5?MCGBRtҲdZKm5[2WimjOxL4oR}C0p0۰̑ƹ@Y c/^*aR ޕx}o˨d}"6^E[qw-_f| i7Z7Ͷ|OnJxT-y|+}oo OC;j.%{wڲ-W[IV{FΙ4Jh1KxĩtsrΤ09|9Sٺ٬?5)j̾G+u?zRQ#)+eXnW<NC'B(nG}eYveMui8t[(kݐ 10WQO s~qlRf+1n۟L}ʾ1emo\4hU}̣¯=^ٓͬINJKß,Eu3G'˫{-z:5I%kC]"o{~x:|=׌FR{/R?47).bCT2߀$:6cY/"H4Ϯu*liadMI}oJSJR|Ӕ]=տ?[Y]Ytj^"ue=*E)nşy' OEǎeCTuO'._mqnbpm9xKZIoO vަHVyqymGa2êT'zW*i;Z_|Ħ:+Vl@, ٷ1ߙvv 4$ +?q="RIM^%蕺[Ge<JT$vniK{[sn5%ǍO=2Nk/ sx%o8Hb4ˀǡ_ÿu}2V_-S xN=Y" ,qDQZל\[yqP\ER2w4oMz+{/$F{mg Su4Gs=2ɱ'81o'>xm|}m\DU:m߅sm伸TI$ _?* xV*4[Գ2K裙WsMkkk-^}znaYj.Y{>mm%w-餑H>MI5^Xv7ŏ 9͟&s]:H<?`۾Ϸ{᯲vS,"=d V;!^~̒x)աӡiMꣅ8f.;:ܺ3U+U$Ҕ-բݷK"׷|2!o6װ\h~Kkξ{r0~߇>;}j`Q\Sz ZJX_~?=9ʎUMܺzG_oPҴV:Fip,6/W袾$R');aES ((((((((uiO_>+r zT/)Tqؚ?è87]O_ tȗ ٓ ׿5 hֹ6Я5B'ytwYgvfff$I&_Oڳ_r:3+O[h<Xtd[Nci}:yhQZF8jMoT[aEV QEQEQEQEQEQEQEQEQEQEQEQEQEAqd~s zlOkT-bkRmK5w'' 'ǀ|y ě=R {e/`_ xE*d Fꠏ+>2~Uo?-d_r;Vwg'1HBjEom-&?UERQ[A}ȉUe;m;O6v©\$8'9INES$(((((((((((((0yǻ G_>93“I/_zYJ9oGm,IZd_?C#SC-᷊.QoHex_qC>H567uUF;^R3^t <PR[F1^WyɷQEQQEQEQEQEQEQEQEQEQEQEQEQEQEJMӯ +< W;7ܹkJg=Y _Eg*PΪ8Mqm*~Ӟn|AcDu3ᶻyjŧ\|*?"_+xj/x/3 yy? 6G_ jeI[qWEP8h<td^],>)t Em uK7o?3t*b4)t|,>)tQEQEQEQEQEQEQEQEQEQEQEQEQEQEwiU5W׀~/٧Wm@EPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEP^N+Nyk>2~Uo?-{Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@?Jo%^2ӧin|~-hFR4}SPfX!f &\pN*&EJ\&sڟ'5:4oSDT悺?sڟ'5:4oSDT悏G(?0??>㢾 NQ>?;诇?7|SAG&?OjuOh(/s#:+Mj(?Q OES ??sڟ'5:4oSDT悏G(?0??:ۣ+\-'Ofѯ線w>s|S %Q_#=7~#>&ԭ?ڟ'5:4gb?A&?OjuOh( N؏Pn`}E|9 OES ?7|SAGv#ۘq_oSDT悏Mj(?Q_GtWßQ>?ڟ'5:4gb?A&?OjuOh( N؏Pn`}E|+W?~|3R`wC xnI5xdX羻߷:6VVO8YP=*UaV qwL(@(?Ǟ*o/Gy?ڟ'5:4gb?A&?OjuOh( N؏Pn`}_?U߆?⏆(pxh:Y -e@f`;g';F7|SAXZ֡BxBfj}R]KH3Xݢ㢾 NQ>?;诇?7|SAG&?OjuOh(/s#:+Mj(?Q OES ??sڟ'5:4oSDT悏G(?0??>㢾 NKtڇ|k?#ϋEu^ŢxKѵ=E%iukLG**gJ:|)fJPߓ#(H(((h~?Oss?[o[DĚ4 ꚃ2 ճ42cq^K OES 襅U^*'2aRV{D}E|9 OES ?7|SAZgb?G?GtWßQ>?ڟ'5:4gb?A&?OjuOh( N؏Pn`}E|9 OES ?7|SAGv#ۘq_oSDT悏Mj(?Q_GtWßQ>?ڟ'5:4gb?Aw[| Ȟ_{?eP"PC! iI *(Hb71ckC|yxMthn!uI/scA:E! G`nع[Q>?;诇?7|SAG&?OjuOh(/s#:+Mj(?Q OES ??sڟ'5:4oSDT悏G(?0??>㢾 NQ>?;诇?7|SAG&?OjuOh(/s#:+Mj(?Q OES ??w_6:&_[ FHIUmJdQr@U ƼHk9S,=Z pwL(P(kį~ ᦥEW'֭\iQ}iw2MT %Z9$}KE|9 OES ?7|SA]؏Qn`}E|9 OES ?7|SAGv#ۘq_oSDT悏Mj(?Q_GtWßQ>?ڟ'5:4gb?A&?OjuOh( N؏Pn`}E|9 OES ?7|SAGv#ۘ9a<=Kо#xC>&m|/=4 JH-o4@$ ƬM,GI߷c>+{_as:m?oVXhY~\M2 c^q]Ku? xW|9隟F2׍2lxU|C:pQ_GܔWßQ>?ڟ'5:4gb?A&?OjuOh( N؏Pn`}E|9 OES ?7|SAGv#ۘq_oSDT悏Mj(?Q_GtWßQ>?ڟ'5:4gb?A&?OjuOh( N؏Pn`}E|<|C4ϊk߄j$ѧm`}K Yd ~H"&%ztFqRP*K ((Qkhk_~_ =i> 5'o5{5xUԭ8t bL tR\ʽxQݒ> NQ>?G(0??>㢾 NQ>?;诇?7|SAG&?OjuOh(/s#:+Mj(?Q OES ??sڟ'5:4oSDT悏G(?0??>㢾 NQ>?;^o⏂| ?&]#N[u#G wA~&Hug[ԼI _^u-@g2Vg(d/IWjGMo_:~cFeq$_;eeqʑkwڟ'5:4gb?A&?OjuOh( N؏Pn`}E|9 OES ?7|SAGv#ۘq_oSDT悏Mj(?Q_GtWßQ>?ڟ'5:4gb?A&?OjuOh( N؏Pn`}E|9 OES ?7|SAGv#ۘq^'=x_?eOx}O"][K9=%fޖLcWWVv=XI&(0(JIxS񆡡x~X_4+zAk|m,BHjă,@*3s5Gec+Mj(?Q OES /诇?7|SAG&?OjuOh(/s#:+Mj(?Q OES ??sڟ'5:4oSDT悏G(?0??>㢾 NQ>?;[*x[+=/[FFi\}ylՍ N7:¸OMj(?RoSDQ_G̿Gox"w5uj2KmoiddGR4mǩwf xkOB:V{QRňH_ 1u$&?OjuOh(/s#:+Mj(?Q OES ??sڟ'5:4oSDT悏G(?0??>㢾 NQ>?;诇?7|SAG&?OjuOh(/s#:+Mj(?Q OES ??<ǿ|V:?j5y^glݳn~T5c MwQEbu|R2UQ_W5wI#weVE|E[῍UZE|soW_ ~r=/tZ,zOѴ;R .]%HF%[{H)ZVյ?| k:-o\񍾺li'٤Z$Cbm %`CSA_Fݮ3(e%nڦ~[yW懊t+^-/i:m+xͬo]0}n&-C9b[?4u֥ >1EiEI'Ze52?bFTi\ps]1r_˕\*|R}ӳ>Y-Zƺ~uX牴.{TT W  4+_8HuOISzN=[Iׅt  ڷl-@"奘4:R J!U|k_'Ѥ0Sի^[/~ Nu~'x'tJ-լ1[ibc˴"cm %`މºuE|3a<'XK-ŖϧpCa1ޑJ f57f|^{ Rݔ&xgf&thC p.0q׊Om+RVN|K?L(62nHZymF1\9 o5 .MsTѼ?Mn^J%MN[{sE/3LCg^- ~4oMY/~"i/𕿂1^[hE' oGdVev"!1x'>ܞOFK`lv,%X.ТV b}={y˽_MWom:Ğ*Wό ᝆhv3ӵ7֯~=wF.ΖHT$cQVn_|U#ƶ񿇚iq-엺^q2H~|\NUQ4'kjs9%%O;i{o~Q_*7+O_l ⧊+nEw'P'2ۮPHV74/N9ZsX[^Ayrr<0W-*\ubӯ7S=zyܿ-Oο$97/5+Jh,,ݢ]A0cF=8ͫ^{Mk_«9$ҿ}lٳOώN!WUHg'~Ӈ+o}SO(=(k3Hx/R"k3Hx/R"S,G>#[?O 6ɡ隍աY ;Rwӌq "b0!A"&7ooOB_5va[o]yeH$1>)$eoE^-{]t+ g|p/sX)[i:-sA5[kˋ-^tUmbwWFh" fY1?Y5ӯwzJi8~Egi:Dž4ZP9K[0Y"pQe9+on[ޑEdK{iE~e*̣?Y*q ]۷}E|)x?K5?!=<[|pmv_ Ѿk_[ f'K=/n\5+%B\u{}ߝg̤ͷM%{ӵ-%|g: 4W\햣ydYI< -#Hz[X |YxN^6?Y."Im5G$XǕ2%`6e7%( +~zXtJ=K>4`?hKv_4=H񍗌t8Mk%3<"Br?n}35*j,%mfv7,m/V2~w0 FVIZ-Znk< f{v#D7{§gt.!I ZBBX;4|V D`O|C 'JX!Iu?:U9Yj|1KM,]EnWܭF;ó+LIP2[QX<1}u]k?mjrêUo'o_h|[izֵZֵ4Fi׷d"?IF645xOƏiδ "K.㘴pɉ":Yw)Gċω:ߌ>:xjGҥH̦ M ]BK%ۗ<{qZ-u{]B_&׼nk?쏿}GŖ>Mdi>O_h- wBK<{Nݭ'^<,[/tRMRBa:bX lX cۅ( SȮ̥]|-?#v1xW߇m|;ŷocki37mN9W'hdiB%|;W<x~@<]IJ*ĨW?A\|4{,ھ};_ȅWNhݾ-o3blj|gim7P>ͦAtQgyD ," Fߵ/'މhtf7^ū{a;Aq!2ǶD?ESUӯ+9iSRZm'֧Wv*K񭦛 <=e} t$[hLVeU-(0>EGNAJZU+ 8Kd !H&R&m5jֻi}aI:nz|\߇JWO u?)xN|#2?|6QEqQEWƿ!N_ρN_ V <Χ+09wu짓W!7QѮm4ۉYKo!gFtUWC*? hWO8 {M|;Ɏ[k{(NJ} ,GQ[~I:]OWvgìiv|G-MƟ 4P_loe}M=8HddkVPwme=cXa ZwVPj"೏Q!r¤ ,H3*zwZ鱖#AN_}Ϣh~(=𿍿lx2PcoxWnlm$(-Sdc73R@ 0:x%ӵZ׎/%מ:DⶲeYmN mB~lby!֚ouf|֚en]Z~߇/Gx[?|$]SYԴP^[MO|hVX>ִf/&gaWU<-> k7$\֨֗ &D"BۤQ#HP+OT~koBѼ=s+l#aUyL;&y Dv|Bs!9W޷yf5O=-_EM;oW}>OIcy7O^/^>?h>Ɲ՗:_F,5UK+U*jeKv_<]&J:># Hwo|+ƕ}3Lo~4Imuk ;i~Зh]$Tل0xS_ÿj]ݵB.zu֔EI|ݕP}W7_FF}%BbQ$lENo]_﷮5xM}E~p|~ j;-x>B"M R&݇سooouçk^,HX-bkصhdo3,vZ|M[])>|/~jXe}J־;TWڣ|x/^]ڟ .2K[ID"H3][6ک?c}z?x}bCh趇/(漀 1ay5n_j+K I1Ro>â?+ټe2ƒ>V+ooAWtʗ菦65V~E#2+eFy:4KhߞxI*Η2ӯM}E~`JO)h:>{6Vx[v9ݤV1#Xm~ZP|P~1_7?tӴ;PٿĢ Ai.o]`PMomn W梨Oz϶诇SWuok5B0kSi i->RTr[J]A/%XXiڎs4ZHhổ\)r#۔*6p.ö_╻[QMood'DmO^Xx;{χ/c}C&?q ڦ *Q|UV*Ng:?Zj\Ϧjm=\N3(ZTGTv ;ʕ%8go7+>YY`?6oUmzWOѿfKź?dSU؊W׊|s?xb➕M 4b)gҭO =R  1xPLb]uޤI 8 Jkv +珆>-ǟ?_hWv-kf` :Tڡvo2Itxs^ u}6)cV1 隟Öڴ֋d ծ5DV-_)|mVr[_Jӽ''OEߚݯxŲw>9#xᶍ7uoC߅ih%9^[:I30ׄC>Hsӥi 鴾??͙pW{(-wZEܿ|_J' rxKiF#-rN./[O|'E} kQ'&tymn&HiPeVB ݚۊk˦Ywf6X rOM$ڵWo3&RҮd_6>4 xC|swZĿZͥΑk6Gv h=6SdCDU>8xKR}{7efK[t 1#Fh9/4˖7ti:qrOv~/AFM%ϰyMLY,q+ŏCxE}GLCR-QWծ.[EpNw(}3:IMdI~,ke-gY|}xz:X]ĺFo}m r~#Z\A/Ȩ iߴ֗^k:f>kz@Yi꒗nLXt'm9rkԘ*3ۨ>&s~ j0tKVC+gs di]CN 84;>&i6Pףng_~mo kkI˧ar1Ԝ%^vvm<5⤚~ZzQ_g/?~,Ė 5s ,`{ `4 ˦O'lTbm;Bl>jwq.źDc{4"q`R]jRvDB¯uoUgf_m9joG{v?Fhȟ^m|JHow'u;oGiuagȳ\,h+3~tfC1:=J|;=_</c=X8s( l8䊺Uyn}k}wd:vޒ~}zke_wZ~~}yYj>X4{j>^5m~NYbl'1zA)x//Wew]uYē!IQ62!bjmk̿KG,J>_^kt>oKľ0<}LjM:}WSSsqmr<H'~WQ>T'( ;e?"kOĞ!^c@'ڵ=Z;[h7c(吅]ʣ'u"ÿ]qZ/>&x~n3gvhOwk˧+eI5gO}M+⎫}/Ӽej: wHvd.bdG~?^~ m?᣿g/?}R5]NڳO:m;7#^!4Ӵ-ykzuoO&:Z[|!x?`Ӽ,X[FƉ;r嘜湏h-=G4wEN٠uOS4 i2^=Lju;e{KW =կbh( 1?<yj~ ׎-cA. jҸYk0b:\W4wE;yg m?UsӽUׯ Q\Lyھm}"UJm%>ѩ<6ȍy/y?x\qڊ:+᣿g/?߳?[izzvՁҬk >:7I4Ut{[Tث Lc@P+Cn|?ki\URZ#C @$ F1#!(`^[ <y3ǨO.֡iςn<ﳍE|fݟs3ҫi ~%5᷀cRYP-g βHąT$ g5G~?^~ m?᣿g/?O5+nƫ|2 ǃ?s1}7j-ݾ6gU5T[S OZtPa14QT`G~?^~ m?᣿g/?>z}6pr;?Ǯ^|\7q}j+6ks^|-umyENj4qo2i@0)];76:uÿx'v^.a0jzL){y۔`N$sW <y3ǨO]:c< L'|-;4M.4LcsCqUK >+C㻯 R c%j jh)nK;yg m?Q <y3ǩsU{:n]괏 ?KZÿh'VU5k +o̕3oNT;^5[υ=-[|9j@sp91$gU )`G~?^~ m?᣿g/? Tլ֟>LEgd_ >|MZv4a&utKt̛ߖ9qSX_ > =E.ž-u}"NmTq$hٶ9<h-=G4wEI4մCM4Od) dpE}_~˾5o?n[i06Z7G03fP] RsSW5XƝi4~QEsEP_3?fD_E}1_-~Z~W:ei6>_]|ry /{vJ᣿g/?߳?[izt~ii h/43M amth#mT1bbH|gx{1wl uk:zK#.]Q0Is+OG~?^~ m-!~ly⛋;߶Kl-Au9ð >"_Oi$/Wa |YK&#q_߳?[izh-=OӺ^οg۩܏~-_Cxiphgۿ@\g+O5 {8/ Upe@LWG~?^~ m?᣿g/? tҲhNg_vï'^<%f)"c,&Rr~ckV/ xfi<;C4avcͿ᣿g/?߳?[iz5Ct_}৽nnj|of&mEFÀ>/ ߉7^3au#Isâ[ӄYCz᣿g/?߳?[izwCkY?~x V¯6.RҢZʇ)$짡##(+sQѴm:G{ "5Jݬ":Jc(gve5 <y3ǨOԩ.q=N-y]/\Y&j?ů5 [|&mobժNE v\iv#}oi߇z.guYaӃX@F dO"᣿g/?߳?[iz*o!;ԾY{u-[|-s0G {I$]rf|"Ux_Ú_hЬfm 5 0!LDIdkOG~?^~ m9]? u=wu?zguxZƚt^Uq%|,sޣ~&4#|?Xv *A,tao6UPϖ< <y3ǨO)SN}tfoSώ5;Kx.kgEBHOH oEoHĖv,IOᯍtm'NO4][E+1.ؕЄUҹ?h-=G4wEʛVmt6i׋N>\UkeyivvC5[# NFYHJ0@^:7g^8G0ȗQSᳵT#=+OG~?^~ mnQ*5JCC//O[c_ob ͵nV8UI |sg?g]SiiR-U$$Y߳?[izh-=RVVԻbo}{:i-+_gMԟ@76~ZGIrlT@#hUb:7g^8G0ȗQSᳵT#=+OG~?^~ m|}qi_~hcZeI؂k}[~LA9#?/w_lsk]9jm *Ejdk#;yg m?Q <y3ǩ/b*؛^} n|c᷀u'-͟Rlܛ.@O Y?Þ'LF=}N/jv$_v;/w6ygvvӊ qCoZ 2ڽΏm#do>hRm]OALOG~?^~ mjTM57CMKG7~t|7x7¯[/xk3lt5/_94k>% (T(+_ρN_ UwE_žn7]mCe ^VU.U8V=tUs4M*%إoݧ˟9R-wNyʀ@C|-gxg:.:Fn۰-R 5@ >G~?^~ m?᣿g/?}-yb;?ǦqKD 7O{q RI2,;61yx^ X:L0n`ʪr@'᣿g/?߳?[iz=;eZYo|+x55W/n>~4m.As'?$2׼k~ϰkW4^lbɲfR뵉+$WAzfqͬ+[46ZDjY 2Le\ќ;yg m?Q <y3Ǩk]t:}a,W6ֵ/i6[iZwVwQȥ%LJR O>5𥝥>#ðZ݋dt;{q .$&mp<4wE;yg m?QO"ֳGȼqxI;]Mowb=9jl L! 6gU۟ > h,yi0I[ՔqH)2v#nN1\4wE;yg m?Sh8b-k?Ǯyi>:w1}UֱR<8V8qW5φ_|*]kgOi څIgT z湿h-=G4wER-]^'O(]:Ο8&a̓jZ 1^oGU|XG5< ~Q>7o3:Z%_&0onl穮G;yg m?Q <y3Ǩ{fU:|//i4Rú \dG^sfs?qmpE,{,J=OG~?^~ m魚O=[kOĞr!𦽢@u=&;ixc%mu˯;՗?E ++S~"*S|i <*{yz|/u߷zgQ]{K#΢傚K +GX|:6$ZŦoI!&D"$l$l? [#|%hvY7l[rFrG~?^~ m?᣿g/?}tm;| ˈ߹ ~k|7&h B 8#!XxPҮ^ mGY_MYH"PX *8gS\o4wE;yg m?S~=NFҴ۽BN$IuecXIdpv? >x[K׬|5~25};E=@ # }Mr_߳?[izh-=C7]BT뭓zlw7i<1O xo~6+{W2 9hBÃȭ94 X4ht#%YtkD"6^ݰ\á";yg m?Q <y3ǩHw]? 5~W׿]^ y G/.S$Ϸϗ9t5O 麝izjłw vٸxɯ8OG~?^~ m\:ݧ =h:/ƩqEf]]X,A־/x]4׃<)導hFAmeV=67yi᣿g/?߳?[iz:k!YmX {˪x/:7KSKj>.,q$]r9~x7~H݂KZd+/  #W v88Oh-=G4wEzV7w΃U=]EZaWG6Lw,m" G8ظ\v~_?3!ƃ@~_~|9ߗG~?^~ m?᣿g/?Wu.LEgۮݎHC5 q-͗aYRerʀG!XrÓ|C>ixhfv|v8G;yg m?Q <y3ǩFTM$ Cm}NGe`0oeA7oȸڣ̓oST„L__=_A Zl0ٿ |kOG~?^~ m/K˷˰r/{>w=SM¶z^inֺ}EoJ65U(q?>;@YDӵeuKK 荷F%E+oaG~?^~ m?᣿g/?9;['׆|9gs^ tMDj6z~vp7qTt? 4nJeMlWֱfh&ڃ̍d ᣿g/?߳?[izj}rW~ u/ < xn]յ];JmbԞ9s#"?^< ?ú(;I2ij`($I$;yg m?Q <y3ǩt⬚D^J?[F4'?,k5Դg-WInllYU2I*A E}K_%Sgt?DQEAQEW7袟>x>)x$E Z ڿFkw_ I"{xc_?i,n`}Q|RHw#+ CЊW~xEK UQ~ |#Ѽ[I_LֲTU+}qgROXex+Vs}OctKK+Fhd( ަh-=G4wEvomqvZߍ/-Ut}NG[(!aO#i º'2%v7y=ʹiѪ|T3#xaX]+H"LҨأh,?;F8h-=G4wErʵ7txG>z1eehaPnLHsaX7u>-Ig=u&nIzBSbs 7 <y3ǨOԜf6qvuGZ{.qy,`B RAn񇄮t{CNpTϦ1JU)hR;yg m?Q <y3ǩj͢U*==M.xJgû XeKV20y qڅbߟ^ׂ'sd4D m-t|,?>-tQEQEQEQEQEQEQEQEQEQEQEQEQEQEwiU5W׀~/٧Wm@EPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEP^N+Nyk>2~Uo?-{Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@?Jo%^2ӧ{i}EPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEP^N+Nyk$~OǞ+tmo^3}\ DzRYs=De,sњ+O(׍3s55. OKHkyYbsFS ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (<߃JeO ׿/ůۛkuc𽏋iW> o=φ٭/=5j$ʯ@v sp$CeRPy(((((((((((((((QeK*k oaYc⣤|?t[]km7,NTl39 _X~Ğ#_&t$;< m)Dn4!%ݷ?3@WEQEQEQEQEQEQEQEQEQEQEQEQEQEQE$_ ־W%ğ~?^CZ"T:m,D-qF~y |S+߈^ <-Z 6[W"H ʪ F Ί(((((((((((((((+0c&jz/l!NDB n "8cQm+o/WM|zX40q Zn @H߅bxh((((((((((((dwfMWZ^}F'j}kqU,</OS J.Oz+~>9g4H ʌO6=?%Za;OcI}!Os FeU'^i|O> ɫ՚>>i?b_ i,OrMr~K{ĖQk_&>6/:ki?KOfFՉ򠁊煆4r^Yx1%$A ׳}[$G@V?KtW>i ٞ&׬XoUTY&hU{ZnaKg@9!xQMЩk< )~+< 5͓B֤K;#pC}+-`j,E7 yQ8|4+EQ\'0QEQEQEQEQEQEQEQEQEQEQEQEW'/4HGRI qi6W^06Y%I\*/. P>ZZuoɥ@"#~>z*8ENqT]>!sI(/=U?fiO^H`Vk}}$rKܠUuQjz9k6+_(ǹvO[+ N ]>,qF*uousip%Q␫Wq|S̯aezGu9d+"+8''#*Y/O[giF R:,7t7zokZ1/pDe·?M|u¹W"Ul#`u ;WtQE|Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@^Ϫk7m#2O;Q=&_o;gk\걃_{y?f9KnzY~QƻQ~y2N~ik~$hEDEL1I=ԵFo3P$XOrW}}e%ʩz&țbMl5ʮVJ m\k>{W@ =~BUK ۻ)?̇g/_DM+O+3EBM;XJG2@^?}Y'Lv~7Wf~fXRE+Ϯ} /?j6ڝ !l?a[؀kb=Jt5fOF% 8Y* ( ( ( ( ( ( ( ( ( ( ( += Ow6V"<ˆ H_nfRL_!t1ŷz_e\O_E^g7:a}ӯU}᷒=OqBC[ڱBq^Y~ 7uD?Jb[^SI~R8nA~?[/t3?fO|&uiZ0o^c*+]+\.[52!>w:D7Wo.kL٫ׯ'/>x+ըwFM?io |</#ői:y:Us $[wH)ž"FQ-O+8Jڟ3z+1{R=ß!}FAMu' WQӫ-VgVR&FQE`fQEQEQEQEQEQEQEQEQEQEQEW +I >鼓>^%RyCOݡ?MZ-/Xmh1;I+]c޴8ck=(>x((((((((((+]=H} Ԃ>wy%i$fwbK3ORM{iwoJOpȺKRzl VM-5ˈ$xOu8*6 pkopX\"zjQݯohio4~o6(/^1|Q>CFԯѭj4^bo/eZ޿s~}?PisCso$,Z!Q>[*JR|DSʛRi-"K[FKv5w/ xJ~$Rvi\&&_PA!ɳ Du*i7n_h7J+vg.P}եz_>pk~"kF&w#e~l{A@9^UmRo4'aK/*풼}Wvf97,I-̟U=EWQEQEQEQEQEQEQEQEQEQEQEʑU$+;x{+; 𭤥lNAǜ|-o~vYdtA6 _̬`+V[5yy^dO\*Vky^Exq_OJg#eV։pI XO.ri%V4"-gU_}:XF4۽2_-#Yp,XlF{XIvRQP⵵[}+2T$ӍFK^]}E|z_x|TGUmrM7V5% mOenHrAP2ߊzoM:]X*HZ4 F$ *R{kk5N?s:p#]6ɟw 1&g,xI4{hD2:0* ~}걍WBZc>0sjZDSH_LIFk3e ~ɴf[鉼8̠,X iBt-]T;Ϋ$.%4n|7]g~,ִ+,=AȽ)τ,j~ 쯵~+ޡvx=kZ<|a z? ]EȄ^owj#Iim9*N :4iIY[F}_?x䧄èӎ%W>;,mUZkDh 2N2Iz~|㯉4VKxXgDL1&!3#@DDm _ms^x9l]NTTgԡ^)rf FҒ/z;ݥzAqz2*^Mk}u_ zj_|[u#VZ_ NѬ^+`yCDcZj_'WƝl4wo;";4&fAG>LT0 sӝ.MD(Wo<txZ瀼gN"@f>Uga -['wMrW4y ~MW{K?{Ëv iF#i\A|X Osr7C8G{z?ǰQEYQEQEQEQEQEQEQEQEQEQEQEcķ~({;88bLXo0ռ_MMdkx]S!G'kύ ƝJBF{9w9$~X?T<-}Oڷ,ݕلX9XǕBH[^2!< wl֝wWV/]>|BIbIie;¿#Ey /J[6Sh4bH2Yik4I,r 'WeU^O)'i"W4\ˤ%ˬ]82fE6k'ec\5+S< ^x6}Z-SZեomYgHR6IbM $Ǜ<~ooײ{Y?vTW8m)wv̞mtoMOo~;3J񞣧5o}2+=gĶZYIn%V)fIĈYYc7W㯄wz!|]j:leV;y%.i#RUUC2^=9Qvm_ú:ἱFƋ߻&7i.;F}N/ Z|'JZN6r2ʲ%UȥVBvPk/6}#}nZlE>^6ע^_g3Blf&}xoS׵=3Htqx֓Upo,B+!W5/ hrh>5N\:>!IۦdxZ3Mh1rEysC֩k5尺2%Fдvj0OݖS偁etT̛YZ;vͭ(_0v{&wzz-GU['%<-Q E'>Q@=8־Afi>Լ_B謁a1\"EGUUۗWgӣj>+d((((((((((Ie."&XA݆/ϊ.燴3)W#035ͼ170 e8 +~6/|{ĺYX\j:k( ur@88⿭l>Q,<.VMFwlM{Mh5/G.xZ߄?iUx'm9J[Dֺ>ϴ#1LbPˍ|EF_G{>,KAԃٽj k3!b0F+M OپN*ܕ{fݯ7f_ ^ݯz-Y>+((((((((((+珏?nxwA1$SZ؂=pkz.d?t^ُM ޾# )ϫ[/}' `sƮ&ݯI#;+;wc$4di8!N_ZvEy_xZnu-SDşڌImT/,]LNq fTj*?ģ{螉b^FMZZ|Q_ g]-WmG ?kjpڅ͝Iث+0{\?_,e.u mSH.PK$u0&dQ^|=mguW]Sܧ{rӝt_Oujʷ0(gܡ9RAg>kz>51!h㶐D̡;SzrMyNתi{+uJ0WꯤyJw."ǍN/|'o?ۮ] ̂iRi% d@|2 }_.YMyYƬH5٬qujQw%[ZΓN-ߝg"{/]yQG=pz 6Z] |,Lâ#m9"I+r6 6²T]ms0Qv̮םQE~~|QEQEQEQEQEQEQEQEQEY|GZ[eZn}>@'>99 xxZφuT1́>X+'YдoaiZʹ8gpazuW^O*sC^~5k  i ji}ޗ Z]"BP@P8p> k- ѴAK{J$p!2$ TpNp?SDZ e-n3N!^2t)?Mv]"'Y9Ě[F->嘏S^>c|;B Ng\:Vpb)~'žmi_b%O8mcA_?|:Գ/P D3?@ ?a4*ۍoՏ$~MŜ}_5z1۫켗|y14OW(@(((((((((((~3Zw-Zgc.ĿW<Qzg8aQ^CⲌJA4k=,5i?UѣI,u 6}CN[uim$*ѳ!#(YGC\+x<TsZT?lWio7nxs+ſ O2]kF__X}=sZ^)WY?~ۗxcbg/5u&?H~]K.G殯v?:>|W iSgfmh+ցLFE%A*a_U-7?<zHu'$8d$a}IxseN4s[i-kɟeiK۪_~ x~4<-o k#n.u ]Pܝn|4O8|3 hgk%t'ko[cv -Fn+ogϊFWڄ@0Gu'iU]d;{}deƤSW>eм_ jsk&F/Ӵ`kq!EIɪw_zG5]JSRh#{럆6&AyYџ:[X$`֣>n@!X~u(Yt;[?qBG8JsS+$չ[ꎧqIHižh#KbX 1avī"mèU`p@gh_{[ /ºմ/ iT0xL:֢"S"RS}sOr3f۝ >3 r{megW=_:[i~VLOwz}o0ZE#$\*(F/:WH+eo?:sU*%( ( ( ( ( ( ( ( ( ( ( (1 ǴZ&,;W՟/x G~$[IWX4NR@:)^&M"MBQ5;_e/]W<,>N ʛa_l(M~i ҤVTM >*F0 #Ehq|{Z/3忇yk! ɷSyp_+EUHUFT`N)sni[gwt^oنg+QEEPEPEPEPEPEPEPEPEPEP_ ]k׌<n&tnǺvGMj_n xFWҡ{)WGj<sԟYBޯ[Ѣ1uu4Xf1+c]>(+}N r'^g:Lb-촭Wos6Wx.)DHUdfwݖ4s%>x|m?YF񅧃1mk1i-9.Hȧxv/|Yc YYnx4Ȓ`Pp3ku>=Z$ZlWW[I>l-O쪣$[>*\J6mB'U9R~ޖz=wN_k w|C~xw@tVFs#'ȣ.K$|-gjq^_|Gx/cDDnl0dںOۙ:扥z$*~|L:w>'So~Z0%vQtz%3q6SJ6z"=]O:bUṡA/9_ xwLl4  {Ww,ǻI?Zׂ [(`5 Q T@:Tq+:R\V?>8c$~ (@((((((((((k[F,B_O+zw(BW^_^5˖Q3|6&jv?)"J51̿Fy1ռ}wV>ּSEMuE%Bqژ +>YjmɊ Qs_>e<{U<=#r ~d*;.ST |4_V,OҲ5Տ.*摡KzCKX XPyy{ sMiwMwOӆ>JxFjB 000}o~>w6z~_.s'7 ;v&kpԃPNwwi]]ޝ)4Rv$Y-xķZT"wٴ5 .͠2vx=*τ<'y^n#;ɴiVY"Vr!U0@i~ּ?Eo)W$e[q]֝5Ce7hZJݗaF kjCR^Żm= *\.Rz{mc _5 wua"/fHo;)m3q|o/tt 3@E=?NX1TDf h&}0cbrGЩt Wؼ?XV 7V>_yOoWf2QC]eoo `wn+^S')uA^Eeb1b+eͱʸҫUL(9(((((((((((NY~?J=j;Qe4*袊(((((((((((((((+>2~Uo?-{xO8;ʭ%(((((((((((((((((? :J? :J ( ( ( ( ( ( ( ( ( ( ( ( ( ( ;Qe4*?dEdk((((((((((((((((''U?ĵ?d*=(((((((((((((((((7u#U/:xN ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (<Y~JCQeK*k(((((((((((((((+Iß*ŭ}_?_N~U-k ((((((((((((((((X}/S+ X}/S+ ((((((((((((((+?dEdk!/(i2~Uo?-{~W~^|6G#SEAeth}6N+'? 'kjzд[]8Z5+%$W@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@?Jo%^2ӧ|_Ox/^=a៉èXHU(uhY4$…я$J I4( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (<Y~JCx#z|F|M_T{/iZiڀHX,fX#nǀ|W zujޭ65=4dö2ƀ>(((((((((((((((+Iß*ŭ}_XMw_cӵ+֕js, ,sa$pcrӍ']GZηx[4{]OT[dP_j@EPEPEPEPEPEPEPEPEPEPEPEPEPEPEPE|Ge|U< A񕞗M^oq4l.>?ڟ'5:4EXnc_boSDT悏Mj(?Vgaa OES ?7|SA[tQ_Ň;FLC|C#:?pV'?(/s#ڟ'5:4oSDT悶;w OMj(?Q OES ۢ?,?1/1?7|SAG&?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w OMj(?VFC]_D񮁢jVQ,)Zc躖T0k2"'R&rj;'g8ׄe=Ke+O (>Z+⟆ho~i|?;j7ucD1p_Zl#33? \&?OjuOh+g3JU)sIj|qhb\)ˢ#ڟ'5:4oSDT悶?,1/1?7|SAG&?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w OMj(?Q OES ۢ?,?1/1?7|SAG&?OjuOh+n?? NakZ ; 'u]Iu-"KcvEPHak?,?1/1?7|SAG&?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w OMj(?Q OES ۢ?,?1/1?7|SAG&?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w 0<ǿ|V:?j5y^glݳn~(d[|]^NcB)%cr,elE9:~_QEW~(h5?u4i7ך Vhr &F}_xRW?ӍJ2G )٫~h?7|SAG&?OjuOh+n1/1?7|SAG&?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w OMj(?Q OES ۢ?,?1/1?7|SAG&?OjuOh+n?? NQ>?(/s#?hMGǚ'oO4F'T61Rr q F틞 OES ۢ?,?1/1?7|SAG&?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w OMj(?Q OES ۢ?,?1/1?7|SAG&?OjuOh+n?? NQ>?(/s#ڟ'5:4_%?P|W?_/khýFu=>Fw֦XE.q̾+ȣ_* ԫI(F:02uOFw?Eh(|CcPaςGÞt(_ꗓ6Ml~(aPdME OES j_(?zJ 6FN:ω3|],L' OES ?7|SA[tVY OES ?7|SA[tQ_Ň;F'&?OjuOh( NmGvۘQ>?ڟ'5:4EXnc_boSDT悏Mj(?Vgaa OES ?7|SA[tQ_Ň;G]Ku? xW|9隟F2׍2lxU|C:pVgaa OES ?7|SA[tQ_Ň;F'&?OjuOh( NmGvۘQ>?ڟ'5:4EXnc_boSDT悏Mj(?Vgaa OES ?7|SA[tQ_Ň;F'&?OjuOh( NmGvۘQ>?w_6:&_[ FHIUmJdQr@U ƼH+߇JWO .7F(~}G*qUQtv~GtQExG؟_i/~04/4K&eQH4bO5VImCXe7|SA\oŸ|ueWjT0%N-t q8z&˿ OES ?7|SA[tVY OES ?7|SA[tQ_Ň;F'&?OjuOh( NmGvۘQ>?ڟ'5:4EXnc_boSDT悏Mj(?Vgaa OES ?7|SA[tQ_Ň;Gj'liנ(5+mwnH.6v# &?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w OMj(?Q OES ۢ?,?1/1?7|SAG&?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w OMj(?W}+W?~|3R`wC xnI5xdX羻߷:6VVO8oHg'~Ӈ+N4V3?ڟ'5:4EXnc_boSDT悏Mj(?Vgaa OES ?7|SA[tQ_Ň;F'&?OjuOh( NmGvۘQ>???(/s#wf xkOB:V{QRňH_ 1u$&?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w OMj(?Q OES ۢ?,?1/1?7|SAG&?OjuOh+n?? NQ>?(/s#ڟ'5:4oSDT悶;w \y⯉j|>'Z{.c- LKTrW1(OF4'?,诚>žǍy/KD=_{_|L> 񦷦Iңn4771v;eS{<ڗ%~gφZ/h<XISU-4KxVV*"gdaTg㶇+ž.2י?=<}ڟxNjof jڷĶj5SǬ.Lkv֚jʁ.rgbڤ>m|Mg6Ec$kZ[%y/ۙP|S?/+v3ǁRu CߞHBlv WS5/? u/ԨoSnnuOڏ%>)~T(" @Ep8^;D4[;qE_7RlF@ 铌WMU%׮;? =]y^׼aoAfy=N?\+lE, JѨ]}iCO?x~ &x{QIӯ05N[.[p\-|5wKzwn5N.AnچKQr~l5kžG<-s'q3OqcZA,Y#P:\7[GiWSalw掠oGͮ{O<]5ςVN[jhQ4m3'o -k/} w~!Լ5ivZ,WL"3N?2Cp3 G"RдN] [g5j/-bbP1"@АvsCsc}a4[WukEѸ<`!pi< o$bxx}_~ o%֭{hlB21km}'ә(h^227]m4Zk{k kJ HiC J$rnV'qQE|^Wq[G_ѡR!)Y&wQ)M!?K4+_mJvB=*;D[E B"`g˳q~it[mjh,!oGB)`py҅Wkoo9'}OKOK֓{者|+_ V~7Ú͵Z]\ŒIHZhH&I~͟ J_^k}E]F$@#͒4nZ71Fm߀<9'G#iZ]_iv:DgrĪ$dKaTu߅ Oþ#smw8Io+y@q lca@RFwV8Pn7}'~6|A4D+xxÚNhš:+@1w'2ua!F_(ʯ*k;JOѮ¾W?Eh3P(_ρN_ 篏1|a!t? 4m{w݅oU`F Q?{or&AnlbĹwO<ס:-$vth&2Vw}?_.~zƱþԵᑯ8EgsB3 H$Y0T0f'6-BSQ ^UUMUIf.%߁$[84k>m-#tK4P!@{K/[u("F6vDPUv zmپusӚtW{|c>"ǃKj е^KYi]-Ii}5ӪaR6lw~j[-s^/}s$^gքZ&^&vW_1$vG>(Ú|!\jI}:o ٨n0]O<$h6r~{@M= Nf9}1o,KtZ?>,_ #TڞAssi#e09tg/ə(kn>*~&xW'u_j>fQ>7LEY$NЮ -#!_7xV?3: 6֋H,Q3*}~]cvbS=Bp GBEsK5_'Nn{[t($e0Cy -__]ZӔ&];Z~wz9|AOj?<'#Foi#] 49oaծg`(f;K4e >Bxſ[Ӿ*xAyºFwtf%K;ZPOX۳ :_ |,weLF5-*Zc Mŗp;I$`knR]BmJb1&~}Kߥ8 %ɲKeӯ+_[x7ž/ϋ|GFhǹ4| l͎$Ĥ p@m`~g]35x^xHM.[Xlۈ̮*9b8&Y?S|i,r$Vz5zCoap~㽉9'9ɤ_3$λ_3$Vs͝N9b1І9SJ' *J׿hi+ۡNxj[m[wv՟%x{W[x~'4<3cilKdkV}>I3$B o]G?'| |QѾ!gV]EحƎio57tÐJ >i~.ֵ3ᧀ4sWhumB_$t3Hyp=sO~|.tgOxGI[k-dʪHUe,#=)BKwI|5޺[mKT~d{-9|ih`kBll}HZk4i-O|G9~O-?>%xZA[F.Wگl`T.fx"6 1޷?x:c\_\3Au>AdFR&UN3x1>jwUMLhW<=g.;>k`8/L+;eU8'}{ը~AEW}8WÞ7DUO:x㯇-egPSKkhonS:.ᯎ.'=#Þ4Z^2,yc  ]&FVzdX*XڻPUΗĚ-4rḐ_WkcO߉?7×76} ;Rr8?SZjp\I_GkkA^@]GQ~|#t-gK|;^ݭ[[/ZMV0$C[#ڎIs_M4RV-w\Tկ/8Z^g: nn-a&̫!O\M=|]*,'J+]ZEu~wS~4ɢj2xC_d֑}6{/EoBX m&?9Jj.kѾ"x=Oů |TF6~&5Ox Ym#^_4(4Y4,[X",s2Os(ټc.k=<'xK߈w%okvJŚM4Z ┮P @M*ten}Ӳm[V~~ym>9ź5J]C.ccAa-v*vnu~ַZ:x:᷄+-CY:k}JK7H%ǚpY)%eZ|Jga xUPi0gnRYu; ATO_ <7Ku}B.NЭᶺdy#T TA:%NjWO^o^58ljqR˗kj!ÿeXc^ּI* 7V1 \][o3}>8|~%skSZt-KL᧋↺omzg Ċ1'uO|_:ϱx^j^!7+j0e3 {+sxCG\7~gͼ:@Uʬ28 ~ iiˏ]xrլ 2Bcز75|Mv-z=.Dqp|=._džŶMO~3& =7T4T]Ŵw[ʌOs`e'ŝ> o5 .MsTѼ?Mn^J%MN[{sE/3LCg7 vDEibghb 1o_:OvWW󽏗/T~:-3z?#“M4xF@&3H\LE#+ȣ_* ԫI? |;u_7!Y$ȡOPGj~ |#Ѽ[I_LֲTU+}qxjRtZio릖> EXo^+M]~,͟EX/Oo ^hZkEvoAjZo+T/_!l]ώn5(mM]zPwyk gWLE .|ROXex+Vs}OctKK+Fhd( ަj ~7мKxUU; nlFRх<`1[;\Y75w[C0n7n[~ Rp " (<N HODVuO[ߴw4#bEAܟk &>7yg?{vͻ>lgoGEGFAѣ 'l]4;I6qbNrkJەޞzYCeIZ-D'"w^x ēj:<$h4(2!eKJ~0{,<%EE_)3dX<h^-Șy<Ÿ:g›c/{[$򯠭/xSMwo ?: EYƁqҧK˭ Wo)=ھ23qV4-^YV?T_]Z49oXDvqh$OFy/ sisk77݃g$H#)`vT+Rm+K>a6g _In=r29\` s |-S>|=mO[]omJ0 ry#'V^߿aB1J_SV|e;Z.5K ҽ -m$0ČvPKW^/]ރᆌ5;'ZPK`|Fr*X9WMOM{)n;Ɔ4ɴps 7>m#Zl?r0Z J+yP2d v88$i|{oC]ŏCxE}GLCR-QWծ.[EpNw(gxY|m_j*b.[[Cu7֗0K*:!|}> ,f|%>]9tbZF*a ,F~Pds>x:|2{=>N[ (H'Ntw^fձEF[E-wK>![+gL|1'o\{k;=RRV; _oK/|?bo4v'f*f{[U"[UGAYCӾ$ٺŶPj/hT<'Z/x!+htMEKsq"8frv'hl^nV٨"Uj*ݗ=wO?P5=6@ oY"ir(扥|%Jí{n^Lx>!O]hEwPū-pXs d,p߄.|>Nu#/~yO9} .ž$KK{#1<@Y6d,0J;ODwi9wq[+;+?[u-SӾ|GZ񆳤i: GIGwnnb M4wȧhbSx?eo 3im[[5y)c\C&5kM5e@93oυ _k^(x#K&Zh]^UE@, ;ɨ tڧ4χJe4;};+;~y! NH'_AIPկ6ghoE)F1wK^ڥѫw mR]6>&|"5--֍usme@ߟkwGxVmh|OxګxtJyep!}[|!S?|'4}i:Eރm-&( rs ; xSR]Lj>t5iI5.V\$mۏ‹ywW*M;ZͿK~u>4[O>#OG(cb7&\R$22]5H ;Y6n"68uo }7"Z&;iOHB_ۏLOs|4 7h/ۛ$"ó`|g5{ZeĚO&}BVWj]Qx,Exk o5 .MsTѼ?Mn^J%MN[{sE/3LCg7 vDEibghb 1o_:O_wĿS|Q{_* O4-On#΂1!rc)21G7 |;u_7_4M{L YญSGFYHj?µ)_1n43[}O=<۱wJR^GO pQ54[uO5?%yv M6S\jzPppblu]WŶ_tZ^>3ӼWncҭz;ؙpn<[%A|WYkW>^'|Ӽ5~2|{+KUF֬.wFseX5}[yᎡww9%X حc3S]Q4Vt4c:AoFgֵ 1HHʐtmY4VgsxmΚgd5i$!I.Mso~+^aYvwcsc=2q֢vu_ߟ2׺t(/_=~ؿGI{߈z֛HKG̗vEۢUSB^3>%\3W ֬4Ni:ǹgU_4 ^#ןJ^:ү5/ +urR .rFKI'$&džo<}׋4}FkBwH@v)t*Pvj69o~鹔jQu9l{|>$|p|7;^ӼX ?)-PҮ.$K囓'@$޻B~xA ]5jDqMW+k#I(xɖ܈2w k&ևw ں2iP#HLvɶ8E ´]@xYbYƑy'޷IG?l*F0]+/j9í='@h~EW.Z $[tpxk^''KT*^'OcZFM,b,7I…ofEQ6xG2VW;oyʛgdܻ\cis\_>"^x^wr:Vgh&vf$ J5Tj:!~NkOm<Ucx[s^{?Z) ֟#[i,pVq?"R=WKVq$(a4.~bm"s%n Oo YxgV$4<;j(k $+| m<OGo@^9J4a\`8bNv_ƾW[<^2ZIV䟗*սW_>ߋ>-%5? \BK;-C?5 2B.$bحNе6O| n>.' 'HX-EZo6hi?/}:b-nè݄F° n8S|%[w7_< q{I|]Mݬ s6mp7][ϗG[mGvM/--ݿå~>(EĨrx7Sv^jzvz5r"1\K!d9ߗ7kϾVu;\Kyw H`sly*Oj ~χ5k >i( fBb UH twYEf kZveqi$q{gC`+kWS<+;FN{icv!X|qhj A|Y3گ4u zT2I)YK 6IBP"ly{[-v[m'9rK;/'m|w|c?sL>tԽY4b3%I<;( E1+̴w?iX7[ z ,FvaԴW1LB0#˳d^ ;o xoTsyZlWY7Hmw]g Gs\ޭc E>:4FhȉIU\jܯ߫vZ;p\mktOg'tgjLbYpNkW{߉>8x=|w4-(Ѡ\bȟngfs~^S ˙w}mg4IQnO>#j=A~ ?_]6ؗ0MȉVPLkOHg'~Ӈ+˾|yO-&=2Q{[WȌ E$1^#R߄?6vkn磒J2ƮU*(>+?kE-d?k_>I? tT xS ySQ򥀾al{#,.T+7Z_]o[O~-.-|/P{]BSyڭ4оiس_~'kxú't)Zm;VHe%$I8"τ_  sCk;y A&)  }El<}w_vn~qCNK]5[>cM]{#xYcݡx$Fۥe_.k +ApF:z_;mS5x?Ő_x(]!:Qk mD~oSxQgiEXk\2_Z9]>woSTx MW|EO^ǩFqbӪnbX95uyZ^]m5pjV[[ux_QS  ۛn$$pte`| >|0FWxrkz?J~$mLll`KˈFRyE''>\hsɽ5xJK')Y")dr$_?oK~*c$NմخѐaYQԨ H&󻾏m{F GVw}ue}g|]]@~juNwrkmowBF!d o KMj5_$QGttXC9|](U\|2qM' uh&M/DU\pWY>W66we͝ĐIn]v#+x8G.Xo׮O_6ou;bqj{]%eoi6VgRh_~(/6SG{ wv:Vq%JB?Y6G#1ul^kƿ JdP¿=˛R#0irwn^sA׿9 |NW~>|xO F?<9!3=Ei @Ơ ~%붏M}_]M~ݽzh^w쉭G>'Y[IfkCq9VePeI8 S?Ž[2|g5:m t(86t_+O.Cg{;]S#Z!Me1"09*?>'g "PDΪ&x2}Mo(QJԹܞO.sR𞵬<Z4DӴmR >PV jɹo"6e"},SYXOk]ɤgHʯn$c^&V셕Vs|;q|闚D2Ir<)S+;3I9&:]]{- j08< TЄok&*r-unk|CaxM]sIPۡWK"Icu5AO.]T+̯mោ /ꗞ$5yO𶗣GYXٻaYbP[\7(%U_&͏5/xS[P~$t{y2acspS~*FX,}QxA7zvA=XlU穢'*J;57.OM_[ MOOEd|kEgt+7:T7h1^2]j+go~$VffPInAoů7~2^? 򵾳'KK"me>iljsfybX_o|<-K·߆utX6"X#*#@1c+ZG/~>;Ox~٬$bK"վ.VZU Ş,Zf. T('c[PU} S5:/5x[UFt^J[ E+)dF0I4~Ҿ(j^7<-?Zf;vI;;rL;sўRjj]~+[~ X8J*;z{:y+ozk~+|l,|+r^MͦIu/:4J7P`HXռ_ccC⏇Gۘy^&q0o,3ۤ~_+_R G|;=~+Oۺ;J-LBD &9Up< > LEdn;~t⢝𓖞o}l6'-ON9v|2Εi=@:ϊ~&Z֗k+I)iV(`~,2U|iKE|Kl5]6ŸtyZggđo4>U;/[VqeU <֩o,k"1gj%͸$+30$rh85ۦitvKMTvV2Mo>7#H߲xS^m.dҴh^9ee7 \?f%O!xg¯x+:w gQ8Ӧ_+>D$Glmqh|wu7>7A,~ DmA\ X\7UPwdV՟6.&$WhV+J_,l۾t]zmsԿdo3IxR^?do3IxR^J?C >kZo=x_xO]B|+(RYGUcW᣿g/?vZ_KeQ?Wi_KR1ρ?Ժ{x4wE;yg m?WQ]6 K3 <y3ǨOQi?5.x4wE;yg m?WQEԻ??߳?[izh-=^EsRG~?^~ m?᣿g/?{5Z}!K3 <y3ǨOQi?5.x4wE;yg m?WQEԻ??߳?[izh-=^EsRG~?^~ m?᣿g/?{5Z}!K3 <y3ǨOQi?5.x4wE;yg m?WQEԻ??߳?[izh-=^EsRG~?^~ m?᣿g/?{5Z}!K3 <y3ǫƿ~.o^ů^'*3f;xKwrm2"'R&))ku`]?Ӳ{~Q_(~QE|Qxj8o|?[E{A$,#WKG!Ac\4wEd) dp:-R:3h-=G4wEf Ov᣿g/?߳?[iz->OG~?^~ mj(CgOG~?^~ mj(CgOG~?^~ mj(Cg>||¿#O]WVmeegfT4YK;I$_G<<;=~Wߚ7>}WP+>+7੾4Ǟ Gxc?ݽڼSľg:۽3r稯zs߈J)OWf_FgV?᣿g/?߳?[iz[O.j]h-=G4wEfO9v᣿g/?߳?[iz->OG~?^~ mj(CgOG~?^~ mj(CgOG~?^~ mj(Cgn"B͵pU_Q/_U8?e-z>N vOu+(-k¿~ݿ㏉M>+-wVS 𮀢EIdV(YV_Oiy/KD=]}F K_?<?Ժ{x4wE;yg m?WQ]6 K3 <y3ǨOQi?5.x4wE;yg m?WQEԻ??߳?[izh-=^EsRG~?^~ m?᣿g/?{5Z}!K3 <y3ǨOQi?5.x4wE;yg m?WQEԻ??߳?[izh-=^EsRG~?^~ m?᣿g/?{5Z}!K3 <y3ǨOQi?5.x4wE;yg m?WQEԻ??߳?[izh-=^EsRG~?^~ m?᣿g/?{5Z}!K3 <y3ǫ_?੾ Ǟ >xݽ|S^_lݱgkcOOS~*_Wƿ~o_ů^>*Zf;[7Tt]#"Oh-=[? Q_UWԽu迭Οj]=_?C᣿g/?߳?[iz9yv᣿g/?߳?[iz->OG~?^~ mj(CgOG~?^~ mj(CgOG~?^~ mj(Cg /L6x8WBT~C>8;LS~-_ʝ,IJT#vWiy/KD=_ O6|P5?Q' ]Y]#w"t=p$#!_jV1xo.jZW#P_sqKmesml#5Kaa|ϘVVN^E _\]f|WNk-mQO=AiO kJv{M-gˣ| x/gh쾰4/ n56+hUD, H*2 ^a~&/ xWQ]\G2vzXu8g=CxaO 9/&qIiFwg5ǔ^m&\G9Kψŕ-Wö%ռ1x Eqoivؑ%ëU4ۛ%mxST׳[yi=ZekM-tKRbhb݌<]R$0cC=¾58 VaLBPNZIGl8{O h:%֗:͵ˢlY%GYf+mާoX/Kz.{aOIo$P̂CMXbv0b3׽-:~nW;|do%՗[ujiW6Vmy-ܤ[3\JDjLV0~>h|mit5-簺F\$f+S,Ѡ.UA`Ex/O/ƒxMNKg9t{}9 CnH ee.M.n3=ߎMni^$O|5j(x 4ԊKmR grJ3J[}^-m߭6_N|<~4okjtz\c.+!_.!;BΈ }w>#<+PLӍݽEfm)夑vs|4x^Zx'[~u뛽WTc& V{h&Q9wYdcU xoEִ=}/l5]>[X5C$Ha + Np4ӮDS',~GU⯍ |x֚lZzL ilQ"FJ𲲢-84 K StXIk m˹6r!rO%cx_]oi N;xqd?2a!F$)\,4>^iJ{]Jq< r;o1Y& +nL4iVoM  3]ww姮=ŸoAk]WR kAaskp0[(Aܖ%L0°<_>g฼YAJ;nfm-Щyc)#dee"'ک c×:Ɵswi(-bG1̑It^7<afߊ/ :ς4]7 }&IgoFoGiU 9W)Σi>^Vh׍m/_+t3d5=;Y g_&Ҽ5_%\WnW*`!/c\xNO\xi')YPRE`*Vʏ '>:7[,wZZv[%,:Pqix]5̻> *]p 5۟o^_MIEĶ\%UǙ, 7mT]*kgz{fTy,kh^NM#F!/t=K*O-Vp[op'ID8WnO R]cq~:BГbog0koߌ/>1𾏧ZR 6Rex"-F$1)v!A|T?c+5Lo$?M:h'drs` /nRZ]ӧ6mz蘡 6|ֽM{ѥ|Xe={mm>e5y2$v 4wr%:R0 w>N 쫣\jWvzݽ^# yLF1ܤZrigV^.iܗ"mtT%E6 h{EXYƇU5ρWUExZn Kl5'մ.V6]6$M̜%[>}筥{>of{93~n菭<|7}KU {+95$1VVnVVu_,ȗV%ʲ‰$b8K><+qOO(i=O?,펯XjEi-~T6iB/jJ C|g5|e-c>ĺUuOZ:@uL@LM[0UJR]FE8g&jw=[o 4uym ƒZ nQ 0D򌎢Rvf _(ʯ*k ƍgRj#~!f=:ЕdFQ/_UWY_ֽJNQQ}cu?Eh3P(_ρN_ ךÏ-u+XmcS R\mrH #ȬI Ҿ;C>;!,ψ<5/|c-t﷥- fH&d [lC*l0Fj"k? x?oxgMeuf-V1du8MyO7?xZ5y&t=gP7ⵀE}{ibŊ؊>M25/VE+}SĖ:ͻ ROoh >-bq;iE$_~M8Fj%xEk-{eyc:iaώ.nu+tA(9&)RywL7M> `⻯ i>*7m5ߎ$*/c~:OzĚYaoG{u24̾O`ݹa@ ]|B|eaAu9oZK㲖#G=γ61pW5uh߶ѽ70꛶+k'Vl_|sR g|l5 _:ƻq-,q%n,wlވbqGeWÿ1OŚ;"-2ф۹K y xŸ>>|B_ c^%r#ɸL$MhI"*ܚe~mK|Oﱳe߇Ay7tckH8R Rgf^0dL1__c|S.U4ɬ.Ă-Hދxik[J,nc#IgI"@|=w>|b}M_ -m^Z}xU kȵk{h&'znus:S_G}aNt7S:xL<6"]9ukiԯS/9x?¿.MeOJvz?g'i V0@J"#i,>o>|}O&4g&<]o ~ z2Ja0$;4"V ]kMI⛯.qQ+h,i=Jkիj3t^  MRX>l [! 4`ʩEįVkjI@m۷,dL;J |g ~~0h7 'UŖmZkN J[|ޏG\x7ziigs=/=M|+CNfhgx&Deme8/?a1~mKƗoiԚuӵX bGW'{'^xqG,c㲷Ky78,m^M-}~~ۮKʋK[m;i"_𵮹o㥾Q힏{q \3G mr؆7%ՕtS|wMoj/EçŬ;Y`By1*:y!~&'OG~e-|a7z}؊|gAyhZ"ZUv׉m/WV|MuMSķs\[ScO/"K%"eXqru%}Z;-]XzJ]ZC??N7_G<<;=~Wq>?P+>+S~"*+K:\Z|jZ5-OJ5Hu+mBtƕ{vm+?թPEX{~L ֟ծd͠{ u0[R#B={F_M-}mnۡS7+ed{GUk1xWᆷxN_h֣Kι[i)4#WR*J¯ Byu. FYrk ĿM[WÏX\W?n59Q76@Kye(Ki!9173Z|-G_ hTz9tX HÂ9: )V_~^W~*^_uկEoƿWms~:Ayi[\4V6^{!_!EٜXgPy!Ӽ2xß~ON+{ˍf;rBte_-$;_o^դ$A'%YѬ_ қ(YDѲ%Ub"$@GI[q^OͮtƝ5ݾm >eBglD_gKW馭ZӆOV߭K]=| WA6幊&HFFgTl[ Wɧi|)O xk|Xx !誄rF2F/<~,+m+Ş0Ҡҭ4[; khgX\I O#SVU,z?>x}.t{GIv@Fv䓏p\k~}v?+7#UsmFyAISku[˅MhyTi!QcHϠ߶NwICmBZVN~T8B$/o$խ~_iWux|GG43˦v[{FM :;/_Ÿ_Þ;>!I'OVShwZ]\ww Ti 'ew4h?{t-ҡ^Vꬵv.ӭ.kB $!>tQ$fgk+kHV{M`Yi7x-顎YJ"JH7:7-KJiz5v7Oa[}EvO0DT0Fs⼷JIF'ijWZkie}eۦۿ4h鐂|+HGHQK[m4?K'4Ri9{].}w'25-/.um>k3h$fa%ti A$ EF?ic g}mhHn$ X 7'er?~l.< _Ѥo sw\=QK ܦF!7 .mӵ~˕?^_}v}k?4Kn->PI!.&HayyvĿJX:DVZu^ thc7RHD`w8#ǯMm- RҼEk^b|Mg-j_gy] 8-Ҿ|oѾ-Iq,>>q>Zr_Yi'":d #di4M7*No^Kv -mKK˝m-OwZ,II]B@PI>~,Yi6vFmħFpΐq\4d"  i~\ ii5x{[4-JyteI,U!FYIȳ'<j? ?m]S]5 .6A݈i >X-9sm߅ zw5Tp?~n[-I|7דvJMAqsn!A4K[V9cWV&@v/ڇEbW%{֧ZAS'Be7[z/U<~xh _0xgž}a}Bt[K)c{=@h3M{Ĥ|%MOuKic;KıE"eVG~.x>%PFi ~l.E;%č5t ~MỘ<=9&xڙ,Ήmut[G`c$BO%K-m+vr}-/>^Eߣkh`𝦱#Zu\[kX`-ɒ(QVpL$veo5}_ۖU"!6]ȼ`X-&29d/_6Oυ/ko4YwEY%U^蚞}{jEܨZ]H)}e8l/&Y[?ZqcE<( ]k+Wiuּgx.5KZM Z /aI.JFh l]k®?g_&xO&Eas 6WWǦ{HdPzZV^v|i=ءK =}g:ꏲ|+_IIۑo[Og=ͫO2$Uu!u;p{}&\g+eGoY|=ӵKizN]\vKt*[´hA'Q3R|uW[Iu?]۵:iF/DE%AAJb/O=ތʕ*,Z> ZxJǚlg*oo<ֱGx֒pa9(I$uV$($Vo| eW?i5㞳|jWt|i1d.4Ԏ1]2-*(_ Fp>*x$ NFgލ(CG?麱/EWʟQ@){i\eak9n'pKx $ GWI<uV''$ңB- P/UY!rkts폢ڍ-kši< FKǓ(`Jdg5|b#I1xgT Fڞ4[]ۋHe+*OsxiS{ɻgcxO޻{+_Ӫ=jU,+:<^j6-9t) }=wɳ&oڗND0Cx/H垽Bk-mپ]6HD4?j^Zf$ n qLwH]2|aX )]5GM>Fn^ G]Fиc| uO-l#t/}N缗X4LiRZɻ,|'ǟ.&[ZxmmqNdH킦JoVH UCeuz:p8/6?g|.͡|Yo ?izOm-[+B$6rA Jk5+O6^^GIĞd00E|NC⧏Ky|@59mfV?2X`T)O,Wp-d<_<χ^]_WhӼi%ƕ W6E)bcc1#*pkE66yRN~˾#r;HΝwͩGA1 i eb(QVʨ*9e xo$ZjxGVM",tۛ S"osrI~xBGAxZuojOE=ŔSj1Vp OCÞ;S/nϢb-úOXt&Eo,r.M VAE7vknwm|E߉t;F(u(J綁Ie´~qy~A? m+i.uK;>Hl%biXvy D'ḣ\G4_*᎑jռ7sewӄ_l{)<3,mk Hq)cŔឣ^^uINNR$ R<4n[jxuӻѶ%Pw-m}Nj?j_ïz6!]<6q\;Y-QD|ºxJ_\xj"ho5L[#Y6p˸WR~%4=.~edOj7R4iqmHf HT ^gG7K/euMWCuȅH"2RcEXUsVoEg{6zFiacw{7tiiJ|*ҴN2NM+#Hb9F)hMGL𷈾jzOFӠ|/zAUPR1ivf%3h/͟5MO:Tz BVdHe{{=ȸ]&/'Ox %2Az|[FNԯ5m B72TܚXZ=Κ2Ir*9 ;TG]z#:Mon }_־%qX{67yqE)ud(@ee`@ ⭿h߃7om/4k?\&[,o9ˁ'7X^VE\O|?Yφ&>2ҼGu܋UxD VFV>$kU<5mv)4xg+[]R"P(] rnz.7{?$կT塞{_>+h-čqum gqS\Z xxRO9u_ w;U:x^?U?!< Ҽ'x>wῃ)]>\LdEomӼjodLp&Z(Srퟔq+xb |=|MG_>!n/o%\Un- ¦ӷ̚ۯ޼KVkko}A /qw}^$-RܛͥZզy{VD![z4mg/K,C2]ܠhX0ɑS+3-KLxbTޫ}B캌VS)ndk#K$DH^u d>kOUx?a^(gKɯ.+8gphxw7k;sYkwG]%o_zmSrFuWbό1j׳>26WeLقyWfڤqھ5. F%{Z -%i"Dtg*ʻMhj[?~oy?hO}BQpyoRNl?_{‰5}&WcṲHhH%f3pb[vNի0֗muY)}❗4 Zwm@ܺDo0F#5x@|As_jQ[TДbVkϵxW¿[^Ĺ9?.iom:#JdMF8}&*%  IIEoMRk|`uC!~Fq,homȬΞcqckc{=bQ/u;k%;EyM"i>bxO2㿏Px6g=:jvܻMޜKk e< 2E^>\ ]jW7:_XPB6BAyJ= [ 5in;?߮^~&i$[[?"L1ux07\׊~(hE攱xo`$19fe_ٽO{K8BNѢmBKrITr`Їo5|:֛}w"K{AmmԗFi l]ZaiJ!<91Y/fPI +1[{P}{dž<.wźvӡ[twKII2͆8\*0ʞ7>x[.qxv^׵M_=N^ k@>k%>"jҾxĺָqβṲ[(Dr[H hV~>|*~[-;^[b0$M}[CI#9>bj(ʫ}Ku-jR^\KoվvJ=/W/x|{8h?%A$l2;ۤ+ d?j?_"j\_&[jֶsWn#'#I)}9+__u[ş K.XwPxˋj5ArEZ4߄%'Lk~$- 2XS[K2-]#2ͅ,6R^yU޺Zګ3'K=t+IG醴>%x G<[ot*!pU˦9[LWΟ?5kؑG[4BאD՘N࿍-?f? 5 ]h³sI4Sɦ\_n#*+Gy 1%1絯c5w@ζڞPGmE{{G.kIQ*d.IÙܭoDI*drD=>K W|W~,P3g=,,j+Tsn+ԾC>8;%tm&Jf9vLwP?!?CN(shkiiC#R}EW̟pU5׿׀~?/[Ws@MjnmK[o#O@[yUA$ ۿ3AP񧋵v-h)q%{$2$1ꘌ9 Wއwo-z$׺sg Zͻc#I YY9 Ț'Fxo Y^vK\ضWWuKo$Hu[oRk҅:N~7OVzL_e~/MRMwk%䔚GD*H!+ڋFϋ|7 c\oi># ]nr oc7. By>|h?ֳv a|O:ׂͮH'[y$mH1mhٳq\6 k:5ohZu׉׵=3P_?N[Yfu<7gNU -9}ۺ+խ^. ;m.umZ>h巃SLC'd.O'6Fjoo*n}"l5KM{t$ XUFf!yh 6 "&y.[ŽW{IJ~VxS]:2gm# Icc[f)CiSE}o}v9񦴪WeoV_XoakYb-b-~m5\͡V露o *B~6{H%|ask* 6M._e'. p]$f_5YBʹx?fxW7AT񥮹/zkC.uܴ |V(ϧ˜}}CPZ5vY~ jP'(]CQw`"2w9ih5057ގ4k=QQ:֕5 c-JR"N߉(kr~o+^lq}n}\Dpe.JUx#Yk_ L|hIYtbFEgsN>rٿu=D/.Ч_eKȔD-´mz߉t;i^"^.m i1 0H?8y u]/ψƳI.o.٭2̖*HZyqrzWU{?dZSH!G.}=Jlnmy\X]˵\$14DA_woG)>kDvs-v&&ډ"ۿڣ m |)ѼqLO =V$t}3_Si0 #28g.v!֣Gn]o.tJ+}BJTJڣmyx 095> k7t/`k|L"(^Vmp=Wƺ~ӟT}nnj.tx"HQ615|Wi_▆`Fw-CC#mFVkiLr3Ǽ96 V^ү+v/[ТޏW'}/@']Oī4wm&RF$$щ {kh.'hEb"c~>ICxZ-i>=i$hby~Ҫ ^D7}gqi{E5kƷ7W5qYr!QY?|ŤGVcGΩ>獵=wNiἶmovV8UeX%l6Nϕͫ]{5ѿMR{-?h;WT𯇾ZyW|삎O+ #&̙j]:þ#Öz ඟI `,L°Ң1 XV?~1ou>?eSalKw¢PcŠ~~&~ OI>SHIn{+r`TuXC+IJ_vwK˅<%3[w]>Fͤۺ07^J $ qʇvA Ǜп󤛯جZfXgHaN{24,ʝkÿ=iuu? xcƺ"#*مvq#G=M˃d~t ƒ?)}|}vzw? j+e =R}HgB Hȑ N+G x ~/.ly)$GlqcEiPq^Gxo᭷{gMԮYᅮKڣZ©#ZFm.Ч_Os {Ǣy6:ݴrʆ&|a=}MTS_^o.tm7?i5Yߊuq?>)m8Nm<J#}qgO ;Qu-6mŝn;4yI1v ;(9+W vgJ r_=V=F4'?,F4'?,诒|?j_(?zJkMyv_;i-ۭRv].ũ[*R!Cp g ~\98]OyoGQ^3CQ3P7?k^WO;_g_Ex EGCaT~ ?yG;_GQ^3CQ3P7?k^Q0quWaT~ ?yG EGCs=~?{5?7?k^QCQ3P/g_Ex EGCaT~ ?yG;_GQ^3CQ3P7?k^Q0quWaT~ ?yG EGCs=~?{5?7?k^QCQ3P/g_Ex EGCaT~ ?yG;_GQ^3CQ3P7?k^Q0quWaT~ ?yG EGCs=~?{5?7?k^QCQ3P/g_^e[EUOZMcaT~ ?y\_tᖵ/_}I̹>dja cRӣu`ibiu߿EWQ@w|w8x^\Ei+۫D^׼3Xx-λͪ@9E+fW,P2ax/?5/+r5Gc'wZ.{5?7?k^QCQ3Pz/3XC/x3{>xcռ%&9߻rnӻ/vGVtO[m_?5/+y_|fgg ~?ho*?/a#٨_?5/({8:j+ho*?g ~9_?ο=?5/(_wfgg ~?ho*?/a#٨_?5/({8:j+ho*?g ~9_?ο=?5/(_wfgg ~?ho*?/a#٨_?5/({8:j+ho*?g ~9_?ο=?5/(_wfgg ~?ho*?/a#٫̾+ȣ_* ԫIo?5/+پO|2ִϵY>>'lMVuL>sd>RC q~Zt} 8M?yn?XC􀢊(=?O/c ִߊo=/eúH枲k 'i2,Z(1who*?fu=忟Ex EGCaT~ ?y]</8=~?{5?7?k^QCQ3P/g_Ex EGCaT~ ?yG;_GQ^3CQ3P7?k^Q0quWaT~ ?yG EGCs=~?{5?7?k^QCQ3P/g_Ex EGCaT~ ?yG;_GQ^3CQ3P7?k^Q0quWaT~ ?yG EGCs=~?{5?7?k^QCQ3P/g_Ex EGCaT~ ?yG;_GQ^3CQ3P7?k^Q0quWaT~ ?yG EGCs=~?{5bx#RWzO?5/+_͏;/*o=Gh|Ulo_ŏ_qp1iCQ3P}^OGN:^&}Ex EGCaT~ ?y[squWaT~ ?yG EGCs=~?{5?7?k^QCQ3P/g_Ex EGCaT~ ?yG;_GQ^3CQ3P7?k^Q0quWaT~ ?yG EGCs=~?{5?7?k^QCQ3P/g_Ex EGCaT~ ?yG;_GQ^3CQ3P7?k^Q0quWaT~ ?yG EGCs=~?{5?7?k^QCQ3P/g_Ex EGCaT~ ?yG;_GQ^3CQ3P7?k^Q0qu߁ώN!WaT~ ?y]i+?۫^8׼3-X-΅ͥF[g AQm%3)7E{9 ūI=>+O QeK*k?kE-d?k6gg ~?ho*?ofgg ~?ho*?/a#٨_?5/({8:j+ho*?g ~9_?ο=?5/(_wfgg ~?ho*?/a#٨_?5/({8:j+ho*?g ~9_?ο=?5/(_wfgg ~?ho*?/a#٨_?5/({8:j+ho*?g ~9_?ο=?5/(_wfgg ~?ho*?/a#oLo(4ԳW~Ʃ [EY_j͝[4Z&F d29P@,+j|L6(j_(?zJ<ڗ%~!|M uO^X|65M"Ml<4o44>D)CoT(O(:ʶ{kfQ_xgC/T{\C$Ku<,dCg|B3傠nFψ:>jMu[x[Q?֥T1\0Qs};Qs7:Crm}&/^]?H诖eKQŻ|?G@Ҿ!]X ]4$,{ENLi#,#!|^𝥥忎me.k\E.%jH*6e Kq`m:o2KQVΰV9?{o.}YE~i5?~ЖW5.hz./E2ZqK4GK3fxD.%K>%zm6xN/㨵YZ$tlW췫 $7 U^tZ۵ٽ ga.{~Q_xozs|S>/gE꬟Zl0Eu_UjwM#Ɵ<}k,-IҖ'HR]OΆ&fUm$c7O+l(_+ݒlK R1ݵq_7ݏʿ|`N?Rl7C \.ngxctudTHUMs+a[ˏw.q$BHRFAR*^2Ic<4ॼOy~Y|kmgnZ׾kZAŧ^=_ȊV}&U`HH>2h:_[j_>p@fEe0IդIbO%Eo)8q0J_uQ_v_<^gƯ Xm;֛A:QCIec¼s.1$RY@?oFy`;WwvRF{urVeW (J*W_~B׏o> $F8xPʹR:qA39XFr[׼o@|j.jV\YK="n$[Idin JH$ZMEm_8+QUtoܔW&㏉Vמ?fi_ƭ Dn[wȏ&lIE:/>*Wày<&->1Xk=y.Ś 㞞%Oe7󒍼ݼݍgpKkemo5u5E~\G5OxWǟ|V|K/fTXM:Y9"[xUn*27?k_$~~.w3m:M<ꪤMaWNZou|}u7owc+|< ߆x]?bwl6v 9n,~p;EKjk=N_ CMYl+i^Hdcmдe]LB~&qM_=ӣL8լx|+÷W0 -S>'4cB'ѴƷ0ͯڣiK,@m0:2W޿{5uKmM,}/ۧ+/5I+-Fu6RA`q0eN0k|}o|@ռ.V\YK="n$[Idin J6H7N_ӯ~)$2m'z[>2"'R&mEGNAJZU+ 8Kd !H&R&o+ȣ_* ԫIϟ k]3|=RӋeGQ_*~QE|kR?ߋzu73_tG:S +z^W6m,}M%ȳBv:*Q#Kvz|vm#Z[-|GeC*? hWO8 {M|;Ɏ[k{(NJok]ǟ Ag #1ı[_@Oml.g).դT,kҝWK.hW_xNvj$[*4$S I51,BoӿgK %KV}E~y?46,"]VwbH:~2.ݘUhV4I4kGmV?~)E~yx7ž/ϋ|GFhǹ4| l͎$Ĥ p@m`~g]35x^xHM.[Xlۈ̮*9b8&*UN eoKu05{'o8kg_{W[x~'4<3cilKdkV}>I3$B o]G?w C?V~kh3 cT֭saKNW3"TXe$d]XX=b={t6Y{s侺}|~_Z?io>6@]~".VJ\M2ʢ076R1x'>ܞOFK`lv,%X.ТV U<ֿ:_ݭKQÚzk.Ke{ڊ57>Ӽ-~|1 -kk5踷Y^~&Yԏ>k:1|` Ӽ[ekgmHkl!\m#y T*k_5+|N h>4zuo[qԚ־"׊Of۩3]y΀NfG]GeTc'uox^h*:y{UhݶOъ+Q6Au_U!NQDQ.nbomKu4f!W_u{Wj)ķӧ&Mp XxWSKK'^OKFX#n6jzú៶xZWǫ]mVsk SɁ/-%WR_xu#2ƾ$:ZMv)47 M:^J!՜gsvݭm>K5}m2whߋ^z%k_]ת+oEmQi>w/CӮO~{%[$$QHXrLn̾%Ox mWDE,u4!wQ Lbƥn_j+( ]$}]E| 6=&㖓ZĚůŭ7K^6s;Omj=f*9ܒ*U֟%7"V~g@,<;^O෾hT;Ĩ<s hW0Ӈ{=7֦ Q/5#.ڮwmd>4д˩'Ee5ީmm8+A~= 4i<]Zxxt*FWn/.Ki#zi̹[Ó_sO )ǚ_ϵa8|O{O>6ew?Kki'Qڼ4-nf%F$]W|M/xw|bx~3X#NFђnJJ'-Kinny[8r?7t?A*O=wvOsM LtV_]ӏLr k믊9/3> 3Fŭl.7MҤkxL:;@R7˕9?5?TyeLZ 4c3hWzσ>FF}%BbQ$lE!>Ͽ ÚY/ DF]VA-j[5h$yJ%:5mR_ 72I&6yX* AgA*m~GtԪyt,{[wge}ժ+sFψ:>jMu[x[Q?֥T1\0Qs};Qs7>+ŷߎz5N^5x/^ӭ6FظCy^1eCx9'5txy^_]z-y}E~S뗖=gPǬO\C6.33ʼA!Uq oOд|okAR]ɣ&~$L FeOf7uϛM5^++0p绵[$=vъ+*׬~Ѽ'&/xMEk-cȞX!O-$!#UuC<=+ĚiRϥ[Ğ{;(  %,$b1 k/hD2AN}TmyaZ|{<ݶqkY&Kk %M˝7n8v|~g=B7z5M^v-VV6nXuX7# jmZSJs_fkwnu_Q/_U|wx7= ⮄tgAִ <9 )&YT)q&ccp!i?c|T%6*T %OQVQZ\& BT4oկ?E袊Cp(=?O/c/OX'G7OV]XXxriny<62ۋ;y>o{ [Jg[t2G MP~YBt|3 *ډ>}mO*+WV^O+_~>o%y}E~{Ck3?e_ae% 3]F9bH;1iV?5/[Z4_Csmxj/;MN|X"Dg !֜,{V{u3X=l宿z/~#xĿT UVc^״/z/6 Cp3 G"R;}*w~}zz7~(I-Ǣ_/aomSQ񆝦ݼ^( tf9?|\/> x3L-CRj^.O6^(ΚF}]ͦ\nOm'ڠm$7ݣFΒlJd1gZ0qk޳K[]_e|FD84ttK/TӦ%t$-ʉy{o{/o# b*)?]zzt_-K{p>BЕ^xt]fϑWIl/z |E{oYχ15>;ZkZ6\\FDaC,Ltak_FwZg5ͥk碻?Y(+ֹ}; f%LJlL[rylg< jgMC:BN0mogΚXRٽun?A(ij~?Rm֯-Y]5u)~#U4 9j-vK)OuhzuOk2Rjj08k8$%EdTUgߊ[yŽAI=iw+TWŸ~ڟo-LM~ h:'c{x4&K=ZLrFKIWVEg?C[|KrvzfRi׺| vp. 8a׍U5__:JnM{޺}kE|>?4o5 WW/Lmrkh-Hi]\FdVy[>C3bSCNt}፴mᘭ q\,,sInb0F86*&1mv+áG t᭿+7Ik<%q?N/;hڌoo)\!Q5 à~"ӮݦyE~p]ǟ Ag #1ı[_@Oml.g).դT,kӾxJ<5r`o<17SKki\ kQ1fLF\`Ü)bb)5W.wZO$g4v_"=>'t egȄUռ' jp}K[k}=#1 en?Kxf_|Sg_mSW|~y";G,ZIJFBӫ+h/^v3p_Qz|ih4m+H4/|u)Eqz#;+m%X!g,#d5|S_z?ZG|EK2(?_^e͉n``V)k4bVv/Go4XӴu~ZG-_쉭G>'Y[IfkCq9VePeI8 S?Ž[2|g5:m t(86t_+O.CTQI8V}?E|u㟎ZgM-R9&[ji[P>My,o_0?P|ef Ϊ{]BM':FU{q$2Gd,P"UV<[o_k5%Eխ/mvE||a =5u&Bn].DI%ZL?5,|F_j_MR;}>Kf[kEld-4]_MSF_k׺^=~V`$i}R\G- x4.c,K+'깽=zN)`lOgW6j'}>! x_x2kb4I8 !DLd0Miw"lWU-[exPYbŨoR̿eH⍢x8eXoھuv-{o0rr_ն=wƻx¿-kOn,ndRNJDq_J~$uZXc Cm5JDD1*Sq46myT$;~jgJ{g?|_i\K]4mo|[ǿ`*ھ4G%%O[6ށb:<-qo3bHJ!B R]=K-kkm_w?č#Ǻo~ M{9eђJѡx啔)p\-'/U>՟ /uGSǧ{$N~̬gHR|K_|ǖI׵MO7Û!w'+xZk W79>t''YV~_u |OV4km;;Aюh-=G4wEukimG."ޅ_Iм+%-:7K]3>UmjG9<޶t :}ȓ\,c+M[STdicYv8\h<OG~?^~ mi UnWyt|@> Ћ!ɇ@&ݛ|xI|Mڵ 7W;*g''G~?^~ m?᣿g/?%*jkO:Y=F- Dg3j]/ۤ1E-z+?5Ѵ;??K-um лbWBaT```J᣿g/?߳?[iz*mY tk#J-l4>hfkdxWH²2)PFX*b|6vʞgr?߳?[izh-=Mԃ%FVQqi_~hcZeI؂k}[~LA9#?/w_lsk]9jm *Ejdk#;yg m?Q <y3ǪWJzzlM~M?>1|EqSkY麓UH6nMm cAX*b|6vʞgr?߳?[izh-=OtO:='SCgGR xz $*x.UWÍwe5x7X}*Zkw4^«fe.'kOG~?^~ mII%k-m;So.Ѵ;?< =?Dѭu-->=\*DiA@UZ<|w7)/i> =}v.t&h-=G4wEkYnK?tw^FMsP }IX2y!UO <59߇ ?=w:%6\Ѫ2#G+(cOG~?^~ mʒVM »?дHEjRjڍv (qcV$ 펦mO h>s]Kl ^Wr#uVFT ᣿g/?߳?[iz/{{*zW1xJ@o?WLncݴX2E*H`k1퇄|;HZDkKc#MYf Oh-=G4wE'{qux7"^SMMHڶ*2A Ծ;~)i8 Sv eK}n&e.K0xZ᣿g/?߳?[iz7_9*e_?Ot;OF5-Ꚃ5) dpqv ~|\<m㛅+?"[APtw y:Uڋƾ '#WNj|3&o+wT &7a\9 p{᣿g/?}[(**]OϡQ*viVខ6nD Kp*xf0#Y~(=i9|opn _A61JI&Eff;'kOG~?^~ mխ6Ǒˈ߹]EKּC}6;[]7DKkXcA B q[zo2_wZٲ}^$kۘcgdI% ZI)bɯ4OG~?^~ mi :UnW>xBOoOXaso6UPϖ<Ԟ +7h6 O-F᣿g/?߳?[iz5k5!u=&ݧ}?%u`xßxJ/|L*tOSѠˑԲo`y! <y3ǨO9Svh uzm%ݬyh7f%3HM$.A1wpt$W1oKt#P"x4V'GBFS1\4wE;yg m?Q)S{ e_nYC Uχv^cRҠZyl^?60dYwF jo/o>4(.<=l髸6#8\ k;yg m?Q <y3Ǩ=w:U#?, 7ƒ*Eg[ǩ!67;ؓrsE{c2Mm%><2Ayk71Q#c<G~?^~ m?᣿g/?ԭk&"ه>\x*}|;l,3EkHq8_|?hv¿W4KE7-Y,^I<%)-'qX߳?[izh-=O_}ɈOQwG᷀UtnuKɀP4̅z: _ C_}$4X.ZC||W' <y3ǨOԔ-c_x~-_Cxipݻ<zqҪ h/43M amth#mT1bbH|gG;yg m?Q <y3ǩ^_rb;>mt; ï *? ~|< YhEl.t;-T/lAw;V;Nڬ& OZI/o#;r{ؙvz ᣿g/?߳?[izC-=tO 5o^yúav%6c T *v㎕ xc^;x[AӴ(mfbB $q5?߳?[izh-=B5A*u}t >i~.ֵ3ᧀ4sWhumB_$t3Hyp=sO~|.tgOxGI[k-dʪHUe,#?᣿g/?߳?[iz%CpO;o~/u=>ƹ7_jf|ȌZLg^c}6|>'i?j>Ѵ xz]7OUy̰.F2;p᣿g/?߳?[izs)iuo+O_^ho@U_gդi~꺵k++?=c2qYݘI 6f}W ӔiNڅQ^I_xRW?:>3|Gy OO|y}W>}zLjׁ~9".-4;xRI 2$P%d1$aMπ sx| q/?Cɸ6fەN3k;yg m?Q <y3ǫ蓥ki^\Eg߮ΗUK\𿇴Mk῁5mAhv7i 0€8wRcj:zjAZ XaWa=z᣿g/?߳?[iz5>+qv:66Zu"Kn-IJL#DM͓@gAa^cᯇ -NH00ckOG~?^~ m]lc灼I xK~&i[ڹaFEk>ɦiRi:cj;V5;b;yg m?Q <y3ǩHw]? 5w}Û@ C&j (19]%s+sU4VStluqj궊yNr%U8/Cr8Oh-=G4wEM+&tvtEu /׺+cޑ7e,Dɵ*1Mk Ø?χRN;y6kpF&{]3+ ;yg m?Q <y3ǩ^J8\,eyGeI8tZMݮ>ZE)Mȩ)rqa·jkڌ gúM[i#Up=s߳?[izh-=OuOkYo΋}_uoW #oY[打kTc9V1>wb=9jl L! 6gV4wE;yg m?R+ORq dCY˿ F pz%]YG|RS'hR6Fj:D_\Zxz).Q]VFTdFU=@s;yg m?Q <y3ǩR;Iuk?Ǧqj_ ~?j;QeK}n&e.I$`񓎵>zu =hjzT1\*'Mq߳?[izh-=B/.Ptk?ǦqX|!S|9<|5=Z_7TѭxXBlr6 F: CxG:VJhd1G2*\$p~ <y3ǨO魚%N?Ǯ]2qI|xѠj\mQI﷩&]o|9<>Οۋ d{AamʧqʃW5 <y3ǨOԹymooA/{>wzh It [vF*` WCXӴtM;VXTݬވtbTX¸V`py4wE;yg m?PMЕ:d:χ,x iFO`..Y@88)!aŞ<6wNth't w5G~?^~ m?᣿g/?Tڳk@N;6_>;#s\4;I,Hb[7Ŀ"Xeq[Q/_U74wE/ƿ x{Ÿx_ϲO,g}-dQYʼ&{3 rOG4~QEEPz_KeQ?Tv? /iuut-h_{nO8pZş~i#7Vǚ}VZl]DȬP8 2; <y3ǫ鰒cv?>NT_ ~h_ hx[XfmWGⴾ,XG|~/<}7g3ZKSb1.;=M`G~?^~ m?᣿g/?tS8q~eਮUƋ$̈́dMoK&<']73NzTZ|]I|3 x{OW-.x\wl?h-=G4wEMu]Vj}w>-{AeW6ao-I$Vےu=dj~kz{2C=߇(I0 2!f*\4wE;yg m?QO?g^ֳOGo x]՛zj.ׇOnpO;x>W>%<|x?>GK+T>ۦDb훙A Q?߳?[izh-=I&\& 8uTvc{o_q&6 oD}Mjx#RWzOh-=Z~#<M_ ^)$ G!Ev߳?[izQq59S3C? 7:A(ăM6ng4OO jCTwZ*ڶᐈlLWW/ <y3ǨOտ=>ov]Sj7c>7H_ȶU"?£aZ-t]"FgF["5ZEL@qƛ'j(,OG~?^~ mt?>|2ׂu x{4?!h߆]&P,ۆ:V?h? &N i> )bBD>j!v2s <y3ǨOԹ]Cp>{߿s=֑=Γ\OJf1E$~tv Y5w|X凁<e{_G/mӅr'+OG~?^~ mn*dѵO ׷P}^`մБ^ՑQ\Ggt ZVݷ|o[wz4 }*CLyʀ'1Ҫx{Ÿ ] 3ề8Kză8|dG~?^~ m?᣿g/? TյZ fNûO>?'spgZ< (*.O<JH[@ψt]t^ag[a S:|x9M GQ +Ķ@I=Mk^ޅGl,V8EDd@B .@;yg m?Q <y3ǩ)NA:UN3O?>8/|ixoٮ5HBe ! @>?_ hdY"PF')d"!@\4wE;yg m?PIu]cu>v=žxu  7=ԑ\G*5ԋ- 3H~vP[$^e~_ u jNoQ5}ZYHЭamK^g$-<;cOG~?^~ m^*+uO C~^ ΁-eiVG㴀;}{dA#R߄?h-=]_ž#m-C{s 1&eЕ'8e=rfSdCJIϼhY~JCQeK*hOO tOSvcջJH pEswjw74+wMlLS@'h#?᣿g/?߳?[izu z]lw S"|' [#LPC1i71,|?᣿g/?߳?[izj]_|ϧ~}]o;&X-g÷}>yC-.#Ѳ21|n,ējsYU׼!@Yi YZƣccas_߳?[izh-=G5>\U|86>,ԼMovBQɇiMA cc? >^G<YڎbΑV;$N㞦/h-=G4wEzV3yGZo 25Fm$EvU%RFTFch5W^/}wM*vMѦYHĖx)1$ 5 <y3ǨO^Oo(bO;}S5:/5x[UFt^J[ E+)dF0I4~Ҿ(j^7<-?Zf;vI;;rL;sўh-=G4wEMu]u?o1!zÿGᢿ Bc_AW>ݧk].=Vrmw\шMq߳?[izh-=MNwM »u;>h>m`Vլta9|UC>O'q9<~"a.>+YZossn#ft #e 8᣿g/?߳?[iz$a{:nn>|6x&H%h+ f ڳ^BzOu$W6|Qʍu"t *לG~?^~ m?᣿g/? T]C~_ u jNoQ5}ZYHЭamK^g$-<;c+*<3xKEYZU yFw5߳?[izh-=J~"DդsoLo(4ԳW~ƺVھe7<͕40fI#u%]H! Rh >(5 ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (?pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/ligo.jpg0000644000175000017500000004611711757531137023777 0ustar ryngeryngePNG  IHDR6(LIDATx]`TUHPP@,+*⮻kYkžaAl 6]^7i Ii$$$d+L2ޛ;B{}sgP SJMQz <уPX30=5>=JBƎ+X)$=o%2@+t6FRrd.|}pGQґҎHUrX10zn b2ͧ@CnKDAtF§%=1zi僂$@ TcJÁ M; wv ŬrwAnDrjh`4#2]\Rp@hDPw\<5(An X@ƶR; ]~QQμ:AnQ WQRfuW!@i {m>4_Op'k<+/ >R)>*_K,=. E)2{``0rJ95Byp ?ø7!sTP'<1Ӧ˩X_Om"P rq:ܯ(~AМbB>S3(i* Fa,N \ H ;@-?( ]sotǬ.t Ah%]#7 J CRyMW#0<@8NlrG}eÀ"sdHOל3a\J(^0'//{*"CnKvdP#{Fp&jbd{׀F4Ϲ.oЁw>nYv Av2>@B@{oSrU(q^GԛýS??Y Ivs|=\|G&过AZ"wW+-M{M@d߮@CQ ]'DɟC5?샰 ֘bM٪cԦm')`z֜p7$^~6Yv{{ĘP>dyDQ,n`{R1p/A鶀\_q#(ݮA{[p2s{j  $|sz?_kaIC NHX"78~|G`ʘp.<5g Ya{/_ը~J793ěWPS-UUj4$lC 6sfl g8D@ȭ j .0zHuƆop{QZΗ=a➩&I)H#M.a!uܳ5̙2 XnQ|<=_p B=S'KA,, Ә!S(t`w z{b'1y  %W\Jp_80ß?Eo܀ѓ,pM/\7p pJN=,cӉe<%~^c,i gN o BPYGȩC.IJHaW^[ڪmT(UwC]fE.0c=Xv67 ȼb)l) OkLGK-P_g4.k^ '$77ׯʐ86vicٿضmkĚ5ke>ǷRO mgILLt8-PWW@=9 ,pJ/{B-۽Ӣ_yuf=-N-س˟=-=͞Ǟm͞5oۣۣ(g=micO{ƞ6=micO{ƞ6cɓXI&]޼y3 i1+V@[,ڹsrk׮pĉya[ h4@QQիeYeB ׯ$J-q~~~ǎ;> 2ES|:t6pӧce@aaaZ ٳ7,0lذ7 5 jzx<?Ers070-&?aK8!K˖-{O}ݧqٸe˖{eMP=~ 5bqt*:y :B)Msl}#*RDEաu>5LJn GfSrd'!D}l8@Rq?R-SeyqqH4 39F)\6dXP@ch\mQ\z>bU9P=Թ}'6(E쑄;%&zY%U>N| S$uNLk*Iz􈸧AOMRO&+ N>+,NS5CGD燏;ka.ߜ*J`cvqp cD q[ s9+CAn Smg;"=&g[j%I d "ۭ.@x;zQj{@sɎ&.I; IVeDs K!,I镈*!0ۂ'9nfw q(&01':tzAdoPݘ$%%B ]B 39vzWܜ,DXT`%er 蒀>8&TSkrfqS^'!zI 7ޞ%EmmlQ&L \\L􎵥HDsfbPa*cwg.d?֟Ao5z~lpA(U!QO%?>$qCD$:t5@e:}>iD<:$⟫@.E 3A)v1a'dESb 1ގ**\TjlBL5VE[4@PUXEk*Ή}A&2B =Ey4£k^0\TuXGD_%2lxb^V)vԵ[^$r3Mp0mvt4N ,ؘ2$]rY56^~cupI"EfWs~o+FYW%rxYӽR.x/ϵN#Fn2A)Ⱥ%HZ~ VyJ'VD8[btt:Y(T+pp,/}(|h{'Vx4a]<;?? GB 8>^zX/ϩ1eT(z49uX\¸/H gD􎷗J`_f~٧4P#!c-5]+jQX(J3 !(._CD{GvR)"ӸG)rrIR(g>s_wK|_fB`Ox1Aq_E! x1sCZF||sd{B`oRwb8}=#|S NG'H`BR;h`WžqB}[{-Bަ bBaHl?)ʇ~8O`!:%mfﰗJfͶYi{n_#|ƒf+|_l_0NfL{ߑĚ|bnNA8=tqFxKE~I3ڂ }T;#(1 'v;=xוIrEs 3zZ-g4}*B|L')gu=L(3^u`_eL;셁!Cٟ#WE2Dw4VG1C ¿ٵa(iA;AvFM/{,tK9@rY"q=%E6j4{|,?p]i|2GmV\i;m9* &i ]!@sry 1Ś*1ˋ)|_P $=.th{$WPCg ^yܼed1I'E{02sGsľ c-K+kDլy7SςGH7%XE7ܑ"|zR nY$yU1恱֧Kkd5NcƎGx.~pJ/ZXKjcy(ZT-×Tcw\bmӓL־ͷ&9>,&Sn#GhʹCcWǘ6(V/%)S2}L6NDtAApã9Ҝ*@oC`'.&!vݺuUUUڼrO'… yJ{D?FWvvn8_[lW? ޽{T7oE}DI)8@tiii~MŋGMrNZ+a|N'X,իW)f;;vX <0o>{&06V~%78 ґnOZa[s`B -fϘ1cm̙34-0.##C :~ysw˃8/. 錀t" kIIɲe U~,GTTaqM0AZuZBDqebhUY1|Ν;CeX!@0 tWUoZX?xݙӧOW *{ wy5dtN XѣGz[*8t=?yd;w$I~qI^}AF{(UUU+W L_.=~+qge8z_޽/ 纃_1{̙3o>s 6RK"ܲ$cǎ]dr1 g:uIBeP*O?ĹhU.";, Pa/j4Gom65\1)i1 \ax^IAt wiii:vou\ጛ~Э`tA=p?oEEGGZ-EW^^__%?#IoN Z"#AN2~qƼg=ୃe@yP9.yvD)X{4iB U#0-A< lu hh 9|'b!62O/^l2΢bOaMjܟyhԩC3܍@C1ͭݵk^v}РA:uy"pjFy;۟RRRs/8i$j#""fΜ4G}~KcgWM=s0( ҥK333xNvgD8GmBc<N燅K5ZCh {]U;?ji޽N/jzӔUaKDf)H}|;Bղuxj0T/ @p`^Lrj)( GK,LwN]5ѣ1cƣ>9_lstB% }wBb.6 X=[aG,#bٞ>Na%o}f zK{2Q]Abb:_eIm-Dm7>S /{!f{M}nxy{q]ƆxZq[gEtf[{GB`QP4{UD6UeڵS1sbU,ѵ>|{| ℉ wPS\R?t޽ IirfFtk{s. 7Qx%nh>ѯ\w.+W';Xx 4;#Do‘> E\v_)|z]r n۩8Gj;u._OT3F}.xwoG0Vy5ycsUMN}yӧ9`rJzwKخk^'vTE,5@]g=C.\K.#F W )i( ' 'ȫ3]vR)v_+*FԄӨZ&6cK?l8 .YC:hQH!skP$3._0ꭁ^cK~O@gvR󏖨-@Q!9N4= \'|z($Y;\w=G=hx'nCEbԽ6o9kw_hKxQ {c#GK7$ 9@\Ő߾R_r{%zf u_$H>xa*GК~Щw4b/a5oi=opLN͹]X֗Rx/~"v7f;60!G\(ANF<("`K%B* vg|"Uz5GѾ$_ _l=p{z!Et 6w y#H%58ѕic]!hƒ}{=^]D{Hox#C'k'wcjI%콚I, H9{35K-i cGҰǷ]4$D *=d:'d6!'94ƏX3otօy&-t#֨ qp`0#$|!Dr=+Dc_2 {:r+lE<-վoKR#'>fb^*4gA'`RwxcU"b>H=a{0(;,P0GޮèxC(gI wׁNKI&peߒz|w!8o d w!nL>)jHx1S=[b?Zj9j0')jvV̗8 N*c9\vXHbR?Āܕ#﹚V. N]%r{ĵZeɺħ\~# F9"rZZ8s>+՛ .rP @}3nԖF kEm(*xGG0G{~p\xw gbO^c`HlģM/dO G|˅ ${cdKV궳 #s~~r?<]Ă"AJi%JzJ3^i3{y%*ׁ{5ar3 SG$Rҳ) x\7z+ҝaaB#nn| Kq+Njp69w ՞BDtvmGΉx=@d S~dSlz&UP5gCԡM5X@iztfj 'acw!uaLN{]5}6C{\ -C BK 9%e.k̜yo pEW,ntX֩)$z1/ #RD{svi| Eވ@ @[`'IAz1"5v"9]&^T7tW{y\Pa [{kA2G( j!sc@`!\-Qw+vcݝhN?a@G[ P%fX3k]}W(X0u̚-V.JG]FI *um+>' .w*> @=ϵ*$N;d Y=?[:̡PMS9C5=^c E(Ε*\tT bU &k4W2BHN&Ika@WŷmB\nČm*o EO̟7tj-U_ 9qNcu03Ze=@-C2}0Xa!4kOO=H54^bO, G4䱿ee u45C8ELo.B{w9=SΈ9r.mϙ뢍=micO{ƞ6=micO{ƞ69uTLLLGt8'On0Қ-[UZvʕ+Ogywz@-N@[niXE_K&AQ+[ۥwoi,q/(,,С;b?䓗_~o{o[{eo/?~<"vZ8> ^`1c~ M6k=Vp~5iҤ#Gn馀0~?СC}j nƖ5}G-=رc٢pرZkڃ)",.o+>A'@`JL?_=*`n1?+&JǦ5dJ.b#P46Ef ;KpAQJ%iaR| -MQUBp~SezB/yEiv "hy.J?$Y/Ò&+4r2{ǘ/I$PCDtuLrEFfNJLz9\U')ʾ:M])Pk2\'#fb)1\!ҀgUd;DLJxWeR! re:Dml}]!>#l8 Y_#=sTkܔ`L>(ED8!/sB[Xqaf쁳M> aI/ib2eq{DZCxy): m& 4[bDZC4Z3=g5Z~sN!P9ay*19?v5:2˽$U$2*tTDr Ư nbjD `;T\oLnݔ"IDi|gΛ%sMtIA\%pC!y|`ޗCG:4,4 %mY SMnЯWBE+]2mZYLiqpI^\ڠtK4QLv5.h6A*&. Z9t5aw:x=[p5xm=HL-r8pH|dz~,iր$1 LRsy < $E*DrrDVa&s hc3N ~?CGEjn.lwpy~GD,3_u5ʔD Jv *R-s Ӟ "IŴ:@Rbsۄ%x>vW_2<+\ 6lC{-O$C-rGe /X?&[.I j<ѳ308MB==ؘv Fsݓ(?+L}sz6çI$ŊX$ S ~pI҃Í4&\޵i;?( 0dq{wTw_CeK!xg+N!{g>n0JtDب=QwI6X[c|"gpYv> ސ&}'mrߤ _;͞ >Ȟ R{LP =A_q'{ MfzmGGW;03|:IܑPyw&M$fN Adc-҇cO=d;|L0v$vd6](N`elkN~i)` EdW%IA-5&G2#0F=(8" P|:")'`,>!{ؒ:= &W@` O9: =zaHU-ӮU|ي*K:j!<~8nN_6Bܥ**K˃SHB #w`rr2'AQv6A[S 1}&WH͒<77gF3p_ oIv<_ M+}Qk3g[ k֬3E4ʪUѲJ.((ON`lRb% R-k׮MJJZ5*~Y?·~:nz-/{{QQǏHm,$I!=07rοqm,vj@Nwtt'{DQT{Qz_~}^^rX NsVn:Nrm3Nd?33>,%CcϡC8{dYYل ZhK8_BmWjҭww3Ş;wr=""|VZZqa'9&&oOG&3~>|X bŊ3'>a9]}}6 So70Nl]rrr`)E|PkªSJ+޳g9|lܸQ7n@ ~ g\N=೴n ?y,͛Vݿlڴ q08׷6lo{`5w\q_~%#8? ,:] ãaڭ[xu.GYpVFu_:pG'QYSS3rp>d0@'_jQ'.. 4vw}^FQ˺7P4Z;ZjUw8 x(-d$I} 8Q[zu7c87U̎;*cfڴip3*JVuR oV՘ 80Z9fҴMCCCO_|_ ?}^{_WY3gf -FKPhڇbԖIϚ5 ֆt.SPP'x(qLiM=<"cN΀T oTCι`FVgr?s[ʰ_}ABÒziPqEF1@G<\G(Ы-{6y{/QZ?(qfw]<Y!"C=V/\I!3&ڦs$6Hl/|" _2Ѣ\EvX~ϗ \gbbA7LrrvcL :f}ݠSFxpT}K)Nv_LOseTg*"1$8D*iQj>:S݈-FO\bUc\<8>MN@1g/`OU5,03W]eh0[-MXw TAR.G{ʙHK(6_OicO[icO[icO[ =\ްyeι\عcOHQr%BQ)aE!YQ2ǡ.g/AN);+:w,fkGjEJP>5ph{j(Y '1[0z#|y@ɳ J{ Pte@4\l`gSKmt8C '\ӧ٣6J>q1!!zю;?0bҥ qA/< LlE:hB`{k({BpӦM[***>30BW$%%EGG?45e)0fٲe J@ F、q( =])]0gʆ <3_~f1c/uV 믿eggh0c;d@.ف=dX10֭VƦP! ?}pܠpwu$`Oii)hӦMQF)@`S Q97׻.٣1@Ko &}BСCGLL{@pѫW/󁶇'=3P {6n߃rC ҼL:;;#8|k0h͞GŁ###׿=n(ϟ=X$g vޛ=7x=p+k׮}'oFbK 9Ipʞ'NE{`s'A-^\=#Ҁ /\pZ> tzzh?#Pvaa!He`ã>JOOP[[ z}AxkphцJ֭_ sǸvEs3dnob+*@ =&U5_;>>n `@s=q  ^!+vgxN9 L5D̈́p#'JeCBQ8C]y˘n܅^PtfP C!`;R@wdJJ 2 o^RRޠ/SEН0اJ)999w}io<>VboonL'QUUչsgќIYMAOY>Zo}n7A ax4${tMO3[m3 E?ˍpZ2mkt{ДOŌ… !Bj)#dӞJJ)Aq/A183|&\ԦNXֹ`]iB{ }{ފoegRWgopc2KZCwBX'OJ4rM3ZNJZ77IENDB`pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/montage.jpg0000644000175000017500000003763611757531137024505 0ustar ryngeryngePNG  IHDR6(?eIDATx|}bI-q9'9%㞜N.c锓9"%Eh.bAhD .e7yj Ђ"y4yk@) B~B}A1$ 1I<%w 4'(M%)R3V;Qu2?#g.Q'%6}haM%rQ:ntϔ=-BJIP%d%?:;}#d43 uI+J޼RrOGwkuN/,Wd<_Cl %R#ߍkb@t%~>tzF(q5DɊxQ%RDbJ-. 5y-)KQx Ks:k:ēkDby6>HgMWHRBO"V=`yѪCO% _c;1j*zbj(5o#9>ZLďD5]AWP&I] =;"fwc4kj:w§"K/U*.=!V!F qG(ħBOͻZS`;d`QQѵk,1l#>J/ @OAAAnnDtUU.F$],*z!^x PL}H . HCHY(떠gR_gڦQְW btQVo֨࠾!U>|F}!*'}T{9(vyAp\!;0rZ Q$\f'5Ѻm<$&zgd$P ]T*w߷{\_nJzV~ C#N5n<~} Wj EUh1G,j#Pb'O=d،PmpO ߚ{ӒYW09&yXM~&1 =jcpA`=> C@tj䧍j/hMo~?3h/ίl>4F lGӐUͶ'N@#:]]](K#ŸUݩa`R6"gC#x5&w̰r;ZIߓMuO\?^Cǧ޳E?K`FBAÿ@13F1Rl`s;<:6vh刧LvK2&W^OTGL<ީGgt{b$[4:= ײ+d.\EvxR/FKW,]PEIzC;rPЍ8)DOi&ؔw2EXb.uݫ)eB+zl_EK/<]+rrqdUB-۟WGL Po:蝑wO<((k%aTNo7L:hUuj N  3ɞ:WY8*|>ZV=Drd}PJ0"/o&ÛG#kTBEW?<* |oˀ{:|ed6o߀vϧT^{<VFYTCfoP#Hu?tGo 5_WW|:J7?X;){4d}'|VUnCOOOFFFaa!9y;Pߧko% {:Ooo|{OuJ+H"Ѵl5S>u \*/((Ά(罿:0{Ho2˧)y>^Wf_Ka,2ՊW7f!便|ẓ:ހ!VC_llB!/^,** $TTD],wJHAp0w PaDrdv[,:.~n[zP^Wb;!Z-RA3`LII7p"Q ;;V gB%lv(DdsH&>~YR055fz#AosxRu؈~L gv̜bZ@(݇_T? 2H2`nuDEd~HPq~̧#r+F` yG*5d*   (t'‹о(C5.lDv3jay借p^G|PH0^$/ٿ?D[b[-E.ZL bkh4."8>ӟq =9l&r@l Aatx ~MTc&׵/zPsaA@EE:$0AfqM;jx㍧zGLdx@ۈsС'xwH{^ p@*v!lP_XܖWaAa0jhh;†ϋs R189DSњcw>u7R<3 fD2y-`n ULoL`B>RެtwmZ[[ #!ٳg!?Q>~M`2vf`9& <D2W αϞ% ngz aDt76te>)XPsx mrm[$@^rmkmWzAR$Qn9C %v 3Voz`"&>&(28A.\Lz(:Օ .|璒bE.*;3T>z,))E!BB@$''N/D[~rFFzP[ XNXP3 ]ڊ ޻dŕ$6%|dv1ݹT>Ct+C>5gMXvRQ~8cq朞XmN,z NC5NuUbrt9FLOnF11;z`$D'=Ҝs"N虆<+^y\%Ai? )J[=LlfA0n5 )l5=Up:[E z$yl/ ug uv-$A2' E9 zHrrr;zt:*#qhzJZ[[l;&H** $ n.[<. '=+?`8|p "`9xzɕZѣGYHAE$'Y5z,h4x0 ZMͻq!_sqC;.z_bm 7>E&qq_)'O\|9 ͸fhM`N^ ddd$6B(-|ُ rONNw{ )^'vfAβh({"@L;TDUU ɩ8\-Bۖ /۴|#x> 5^x Qt/AIl0Vb7=?,}e+eމe}}^#Q?ͰwLT' 79 5 ScW$2̈27 u8- ӓ "͐d7>zNR1v&&v>E==ۡCyX9=||i+;;7l:2A,!VPzP p K}~*=RA#(w>/Q,ƻ Ɂ.+(\~ ---)7 XC=Yp2lo2Hb˗cYشeޚ_r/*cѢEO>#GxOJ\XlY\ TÇ ]Wϝ;Å .^+N+J?۶mD3gΠ233zTM9 PR4Mkѓ][ ٶdɒ_][oeN\(++xHksd "Sϟp~ ŋU* kkk{@zAK/TYY ;#y}%h4uykIŁK4ﳑaw<>%ﱇ+p/]4N'*8// @;v Q7n܈B۵k~n:<¿6PXXr͚5';;QN6AG' ;jkk9==тGG(N̷m1BL`bKcACP["u?״|8_FHT:R5VCC{L0{Pn__m,ch ^y3A˰},/ˑ{>ɼNw'e|PrAS N!WOPFt1*(@s)%np:h-vv#ș3D+dM,ҫ_$=E 6r|Nŷ MuULϦʎ󋀦f0]T'9]:^(͹u[K`4{477MԿO<׃SGs q3*l6-ǟHnGNiii26s|V|yy9 ,/ L@>UTT@8zH`!yK {oiffߓ0PFٵSgZNll0 b0>,p*޽{!lxERRD(@y|Y 2=0f5 ? cǎH&'WF===r{!8=O 9XB۷o矤HH =qtB(a{?Am8R aD_*z@\VsU1rd>c5ͣ1@AJEiWNNT?hP$0GG}g׽A88qDZߵkW##܁MX3ڒn9_Aǻ&l{4a W  "TЁ}܉aTŻnpƢP=Xې ֭ b4c#]3A}yYz5!ÜQ__ԉNزeΝ; Rfy䑩M ф>}=%XXYО/Yǁ{챎vYۑUtZ7R{u(:$ڗnvQY|ॾSg޽{v U*#0A L{-CHΔRז.E[VvH5,M5jX4 Tjjj5eBpU 96~,)l2hfl3%B$3 ˍQ"EzZ,y?Lņ.-i92:ozs~怚III16MHI'YG^:D}ٍk$* #_칓 }WKB#,h2Q艧]KBhQNЫ5M9*UUȐ%iU;^_"A-am[ٓ)tG@X:)i!6yqmoj:4D7U=PX*^zs3)?HSwgҺ~7m].WIIq01]XZvгru(2rxbl Y*MJ8xӸ7:"gc)Em*kB#%at뒒]!=~#y}gsV]mC̦ׯ5k* tZ`iiieeliS 7Z߼d4Ѹ,ieEnav#c9Mo]m\6%$AxNhd^ִ.ї<ęݴR}npTlֲfKnb@T _<!`io)8E檩&)-=ɳNw4Yrv_Mmdiû:::"}G%jZ;G1 nDCƊ >_675}LbriڰkwnM:W`0bܽ}g[Z]TT4<<|s11@M$:.'Z[[SN:eZ" EbŃ^PP;Lz`@'BDas{#b0488%7GIߦo Qq%ccc}}}duuuۍtFC >moƷ5/C\/ K f;^Rnm.z>I}w8x7 6 49lҥ\8%iZZZ,>ʬs1qPrss:e~$aTIԍnmŋcCD<~+NT@B̙30h`TmE$b\^322SB-@̹sToڴcL(2@t644q .? sFG$tٳg*&A% @%&oȁa,>u ==ZbÍq lÆ ˖-Œ=J6 ^zҥKp uRq};>`^5fwX GF7EwU* >R 1\<_KK `^zurrmN:U׭[I ojjjj5"G `WP8Pp|F2=(wܽE8\ˎmn4㦀3ULe>f_􌌌@sݖ(,eADT r +3Ch#NaҥޮGgVȰ :y5n$z2ԥc]:{|ֽF @c0f=ZTp\m774no}Л5`6o>BB?5mCQ~'KtppPGGOQQQ=Ԁ(7n󏮏+kzinnDfQ5wz`/WUUpݡʚ =**^zZZZzz¬!e-BɈPJG$Z%Uѣғ}L&E(詮KOrr5kޮ= =>z*++ᶎ6`= zeyM$YT] jNtѱ϶vuurF(**:sj CgfzJKK1uQ1~zF??w}AΝC}2BZZZzzzNN\UUN9Ω!3pnT9|a bX |O'$wݷoނ+~$>Jz̧fk4 r:  A>%%%]>7f=}aՖ @{^!!d  ʋoEGl NOo 8TB54톲"G'`MlzHYS㣧P@hCFm~zfKGOQQQ; ҠF#Rԙ=8jZAE(@ ƀM{zBeIzۧ=\ = =>zkI( )3T*kIz}=vzf_a}}}q.e-B.%tZR=$aY|駁s zzzebiY)RihhPd|OWzjjj =>zۧ=6<~}tA\ae-BϴXnq0"==.c4ͥY;aDB4z챻TQyoUT4)(((G&T*GGjhymLȘ ;$({*Lǿ_3eT  ~iU>WHɰ]bq49 2;󼤟_(+wzBWWZ2;6F >PGuYMM]]"{z&g[Vn7Lccc.4|wp/X =>zH__Fp'===f9 Հ =@rM gw]]]%:a[(a~#MM wu>_Q(QQ㑤!$*I x7L ,q&ͅe<.atHo+sJ7|a^& 3πDF%FLJ^d7&d9%?!Bd$uKI66SC$NMPB;!r[NouD| uT WHOgBfÒspY=ھsC&d#RBĿJJ^QGj(hzݟ&wk ]j6e j0\ȾҶE9ѡtXRR{s.7:a|-հ$24Ò͹7F-"@ >]Z]ޏ)&C+z<`?U8:v"T΁ cԡ >eĖh3Q?jX*s3I?2VtY+spG)Y'I@p_lpn6.7J?D%ML6꾾>0>>sbZf`@bvO+Y5uQ=L&e#;üqbb"'a2: ttt#omUǃ p2K]GGG~}x5Ӹ"cIEqp`"b"$D#.Hp`EOE@GVaⓌN<ܼw^ݎjC!":d "}!APWnllܿvv#GZZZ+ zf3]~Et۷-F]PPT]];m=} A} ZădddzTב#d)G]"@ǯS_q+W2o+H!|\p%aT!l._»._ЌDk9pǃ9R$r즷Ծd699%BH 3%%sHO$W4NO)" u{(鉌?~(-[VZU\\ zΝ;&b\uܰ{nJbx"R={ugyA=}QyzQF[!?H:^D|dffٳʘz+Ėzo}5$_2* SO"WUU{!0 k'e]%yzH36^z%Dvo>vڵl26[ϟ߱cڵk8q ^ LaPB/]4#Gh[nE!(^ЌB gΜAi999H(AQd!0 *++w lW\ A"@z Bh4eH_>mҳmtUDꫯ|o~K+ heTM:t'DG<^D6n܈ P7BxLym۶! ?0Ҁ<.\ՉWY+ ̗I*\zo"](sT ޅZ|!pI@g?* ;J176eG'qiDІ"4+VLMnڴ ZBYYdL ѣG!/4v=,q O!E)Q U2$X~*+,evZ Xӹc"7 ދzEAa ]4(n @.L Mtٶ7GB>WpՎr`["nF~qo>)C!|#2=wx9f"EDTd]\YHa!yxox/O:ᠣ!Z$zڶP(=ϿՂ7"NT?9sf@_\A )T"J @e"~6<=-ixwAn&{&S#=fv<b0~z"sf^GzD3k,%X+;5}2͙zb Ȃ$XZq,$+"~:] Qsu{͞t=&ŁVZ΍+K'H%5$Yo)lu u)=/ D})"+csn#l]}?3jKML`%*W '7 el|+/xJE3q g>zJ{͚S%i9o nH3T-I7btFb#Tu 1DU6 39DunG0w2G'z Xrl,nf0ÉC&McvxS[¾Nfu:"1CHGz.zUXXUZ+럠t. 3z:640kKq& ]z^Cghhh8cENzdy<<=cÓ>b x<MFF NLL`~bqg"/6q-pX,(d d2Ђ j#˗/GT*Uee%HA::۷þrԼY=n=77Wl߾%G??kǠ|ѮVSSs5$o9uԕ+W͛.dM߿ ˂ FЩNm/P*eeexo߇~xʕONOOG#7f3c6mVTT?刬fffΞ=:F5 èfիQa_i.l]|ۥW_}x%TҥKŻ֯_zꩰw!GAT999k֬կ~Ii&$ <qFaHA$HTz1&lٲ%-- ў8qRy l+ZŊ+ AͿcAE#]u \}&ػ7߼wȹ`8tjn:45[QT … Q/FQ=8vc=8pgt`Jwvb޽{'|J^ڂ @^_}{gy pOTKe|y< =#ɌVB~7ك6CAm" ޽=vIv`5C@1ėMEkmcDz[ĮRDwr:QRP uv[؏TۻdȌL /Mug>WfQ͡=XCMBizw>y\ǚ9*J 5\YS4o5eZ4 TbbQ:.Ε&Z1 'oJސAN]Cip.W1;2qpԕOm|͒M 2sܽ(xU澔K+o@ɝfS{G{[{۠i,GGnyԝoseݥ-o>{̞۞ IENDB`pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/sipht.jpg0000644000175000017500000005123711757531137024173 0ustar ryngeryngePNG  IHDRpRfIDATx}x\W脶,˲,?,v ] @ !TJ)Nsc'K8-nI%ٖmUwFS5>I#MscF3sȒ,Y>F_9w L4Kn}Z|TdbFYϫjߖ)Kt5'QB$aai2lXlftTQzc, vvu6ik)X+%K-JDoFw
8 6KŒiyD5|Qv\ՍYI/9NᛍkUⅣFH0s. Ξ{9SdLxqq!)ґp!nq&H!ˡfu ݶnAaR^Jϫ !2ǽlgMɵBy?I+;!k}+c׺"N2 st4rე<-Cz$JDf1fbZ>̢ cD,ģ{N"zJ"9.#x!tu2qokb(]UGem !GJё@X;ۓb̏&Mc$I)]I1F3!^)y&仔G(݃%c&Jߠw|ǘX(Kq>]Jf&$¢K]'Z^Pyq @,hU˜o`0ܢQcБl^"\R/PmJJF=%ωbhK)$s3‡ c%cAYR>q\H :`u%!_R%H)u5(;|7. #Aȏ 6)Ts zlqx`.q? 慣`,"u3O,Da(|0Fh 0wuw%YQa~Nd*PK P^5"Fj/0Yuj]*pzωEso~:Kz܍aU-2E!ar%!蒔D%_fjD$!e{actuRePJ1~ u~m bkj*(Z7$u V۹7qitKEٸJ;TYSWI!z&nʎ+KQٔ|NxF3oDԄ_A+\Mm1H\4D^ {qӆɐ~ 6d0/xF̀0'EP6JLFe= 2 okz*`)<DŽ{yJJ5!\MKgcc!>8&(-/ձd<NmLvsONg/ttth4b4%G-^Bj *'[T %06h|C<}|+?!w$ vc0d1bⷲoxy_8MBV;UaN]n Y;}f.ጋ ``lھipg2]ٱ`PGoJ3 [Ϸium=jq \t:6sbmz<Od'>JJEˈEfs/G\ajA#B$#1d߯6n|dSr1c, 1~ $(_TލɁvƺsiHcl`NPB *UTT(bp8ƈbpʿߥur,DD顄Ȼ'XsN`HgGMM 9IO+]},n\;tUT˔Ջb\,ѱ̽ m ^& 6@Ws4u5f0ZLwAV1D>3τzfV"|z Y J]o'I =Ы={4 ` )>B~ ;dD T|бAE0.XWCd\s1=֦q?f/N|"|G!}%wZ7҂{RIFJ7RL2J1 ^Bkk|lz(}cǏj̳DAؚ %t֭8ȑ`QI^1xba2ȹ$r)=R.P!OD! @ԓ1F3cBMÄ|9Y3d!Xͨ}G.\8G7`pI тBk'nJz|NvQ~_K{Rz͘k&̦VEղJj"!sM8fAaX,$; )x-VDZIl]J[KK :4E)[y9E9Buz!\h0cn-#vN76 F9!IU[{ 37 0!/TY5-.-MW<>z5u]k֬`dؠ\jr#*6LVh}CW%ThQ Dezy^d`}NZgJ w7:ؒŋa'WA:>jҷ䢅B]1^ ƁV.! ɺ: 8}>& 6Hɳ|"A2@/Q!L&hِ4\?4"gCtuu)m=drfq!3imSLq46m(cfг?(:I(%I%|aa|oB&r?Fم:9|R8'B!Dy2Dxq`p_,wSK n[O 'Q|GcdA_s/2*;Y֢(((v?ܤ 41wǕCޏR:_]Q@CE'u;KJF_5FL'l6kC^TH/nӱmO4C`'FнOAyYGva)=B/([j:9Ny"35h)-eR[UU = ۼ)f f6NO<;a.rssnE}3ޯɚj_*dRCl"|bX~H$d6Dc\VrO[dšI]0 Làk!e ^[ ǝ BZ@/(eIӔzqB!d- I)O)͘rAP q`"mKIqL 9{l )/!*ƺitqHKVyR>Yςðnjjm tH^uwS I,!]r([âhqJ(0SA@_+iOIIIR`i4Ѐ/?cp|aKDV?+l[Իs%O RiÃ2grd^浄N!+NcCdK*3\2 Od`L\$[|q*Ld̕C,Ј@MQN;B]Mu45cvgsuuq$U+X딖1f4OyOeF aw\"XnD2*MI^kJ_Ca(: rQ[Bދ)]HDMe]=Auۂ+;[qmXUǗ}Bwօ,|Z^@apFl >0,q)xz { 칟 YQ7;t<[$63gSH> 5Q#6 .Ts;&q@ uJiE:(7­Rra9xqn=Apm6[!w 43O \̄k13<(dž%}3L:]KdE P{Ue|y̫ D尘r%6eKNy8߇rά1/L#QSXX{ u w)*<;Rϊ6QS%n%的wp:3ykVJ^L0eN奶MyΗEc `yD@XZt@q-v"b8a;0J RF J8F~-VECS%rhÓ/pcR,(N7siêU)( O?(h砐 t:V΂"=K=~~& ~b0:h`M?(y ÇgddrJ{=cM], ˆZcbMڥP6oWWQ7BYA+œ[GsޥH1W)!yvP7+:uҽcV/ٺG / ^$H1ָfW>Lnӟȑ# 4Ia$0|&.(ہAܤɳNCCCdBV&YYH]xXN„߫5Re:' D/IOApldq8$0+1GBkpԊ`;\;űXGb"rh^pM/DsxTVt~v+XZfS"aDϤ`Ir"̦<_HwKȯVee&!BRAvQH9`cN*w\$?x*-ѧh[$)`0SOK>⤙7 I=)`rA^QW* j(u) ((߼DHT+M 2{IXZ-.}4IF ɤgW^!Ì "@ 1u"Dy;Uz>Pq?Ņ0Ȥ8,z(--9DKK3cfJنȞH1hZKUՅ\g0KU8J-p4:FB5:){.GVm"p!?&]a U`[( 8! 3x!(AիyWoPf R%Oy&,ҍ&cU EiFp &tS<kQAd%MORK)"x%fL ߓ~Ҭl 1QSeRv){ZD#ci@)XC088@A9L/鼑ڒbv4\y#<Pr+%7)rF*OK{ã*!(@_+K!|ҿ\m^+MܕZ %˕iÛ岜/;@:itQ+)m]MiÛOQs4B& Љ}ɖW]pkb*?zz AxhNd&3z\5XuAF4$ Rڋ=ժ`.!1ݣ8PěB3gΌOsrr6T"³HOJ ȣp`G =C5fsC(G՝~.d<4 SZe{VhTQ-sqyv#UcCR@EkڼIǾIMМ$iD/ĜmnA)H\>Ep j=Hɿ̈́j)OG+yA"<놇c(D||6.X=w5F"< @"'1AtztabHya+K3KUd (NƎG`q .P1f8`ॸ~B,>6 7̲2E`wPNgʞbj-ʑV,mj`v7F ɓ2E6(wƘ+1sHya+U.%T"nzBƷyct;Xy@yAdU'_J= GE@%zB8!|A6M"!˜E~rB^b4'nSOa-a,/jqNh.=̤TQ*}I3kiv"XqIc;tr l3w/=#MI:PQv`,$̗(*0Ըp˗2quhFN[ɯnw:폚OhBE 08&%ڬ'NϣƸ~ OoJ.ct|R]p3g<^qyAՐ%?SjQAAXD`LR{&n$ W)kpR&5с@\ښvKJJb0$KkN%8||G/ /z۷oKȑ#O?՝9st-Vzٛ.ppΗj&n<a8]rѤXbR(rx!`&+-1-/(vR"w3yG q(_ s/=%)f%ƅW_pdCg9r;vJ&,&$rtYueE!G 0h]٦]OG:DW=J^xyY|^[x\}*UI)$7@) T;#o(i r#k _ K4Ey8ETM|Unp v~"7H[JoMR_.3tpnmMm%NC X!ݪїЕfG i Wu͞_&/0̟C]< 7ھooSc#\gߩp 0Az=Qo J-1 [|@ugee.--嫕.߼f}!mÊI)B WQ!FM2$^ ]x`Ǝ ݖ;cmDO\W t^N=qS -ߏ;\.XτxS9-S<}V%T.Dvա0u|%b-Dœ|ˍl >@oC(fފK  N\4FBˇ!uT^Ixԡ/?M;u;Fa 疛O<7NJ%ahO%g>i@z^G Ne Z˹$j+1[hc0B'ݻwC1dMpXM)kaOAx-?cL',qvݣ.펁,(qۡU`&# =%eg+B߈]%@7LK 3@t@n^녂t4I5'DGDEPa4ޱc|1y~ف ʐԭ[~~&7+UB|SDX>̊wK%79̾fֻ`tz?ǬD-}#0G*Zxs'|TadIϧAÒ!G73OP`|S/+n['E`d>䓯5k/`_-[׿|7(嗀.: BǠ!|X؅ckw0Zu 0.|@8$NUf.]C%D=3̳wዂmh9ڛ= u2W-0f2 5a2AJP;J %8e6T%_={nhl>Urs{G?zv KLux:;: bb06z>@Hn $@ڰ֒XuڍU*0*rm)K|SͿoj17H|P1Fg7`hA) w^C`P9$/O_mܸk_ڜ'*~}_Ízq~7V\ lø&Bt80 !v<x?<'oeGGG1LQOW0okԎ*X-GSɔvY@!.n赮~ 73P>Mi$ԞܼH.|:;D6Ă5??R@F===ȍ.\vc%9sfh\~`ҥK|h PP4.ߠaLC=1q8!AuC]q[+ 0B}#hSWp(.Lys)wSOA%o?a&'h1=>z3 4*SRt+:&2qqN_~6)?HY+VMqJk}Ȁq3 Hdy]b炥g_2YZ\hOcg>&ߠˡhY 0ǽB+qUa.\e>C.ש+` u{ /B+1׈ݔg:w()wLA(`{u|h4ʼn=d0_EJn$C% wNhB|rl6n0O]ѝ!:x{]477I,+IɷhXIJ/y`)I[7L} tcuuy2rEچ^ UѦ?jѨ^`-:;;!JrDDOkDZ}MSF A,g.군]dUD0af?I(Vi'h71??g|{J^mCgP+-~o}o[|r WD|׵y4]P%`Sٶf"4jNR1:30'1|/ĈH~^4'o7E`J0_ҭg`NOžѝ!b*k{t :̐HIMiC[&>.WO#o SNڝyL t,((!> s+1`ݕ0'k•[ z,ՈW K7ǙM\:(rgڥPY,[r|7v d艬,</JA,jhX (̱O4 q!nfq/y$ Vaה&.Mq!3:uYw?Cٳ㓟r6ci͉<ý}g`$ ~/hmm}ϟC{ .|(9qĜ仲A-.BawÇ=G7۽ᕻ64[`Ujk5 JvN7^kzNG>Z؜\kn+ |lߕcI5_3qhߏG^&30_mW6M f>ߥWzLaU|g;*0Ii+=w5oT/]N W>3}e˭ . W f{W ﴟ R3hpOd0ݐg[fvݭt{ۚݡYK @L]3G8[toosO7L9 Y[~ ߵ0r-Hyg`yg 3m6Lyg`yg`j(]^P&w̝NRq DYGڄvԐ`\ ]j(u7S\^^>w]0+wVE[)|*[2/^ n@yڜyYf6?{ڰcG[Ch] 3[ CغlxWK>*\_CcTЄӆ4ŞsRJvN쑥}&HykVugݯ=ӏ$?ĥݑc̏kϵA4a<ӥ ސ]3-'PwFomsuF녫ȁVܡp$q8 <T[ePR@nW-BVVf{{{AAj.=A줢%q{ m?GkoMNS`#JC*evZ WK_ŗP(.Wh 䶼,(o,# Ckkkuu5 GUm޼78zhSSSqqٳg8ph4^\Z?XBwH.9cPNj w _NKK۲e,Xpر]v@ŋ:l2rѲY0iNhpѢEeeedӦM؛o 8W\~ t>E0 qk2Qp7giF\f zA\@+VΝ۳g >ظqcOO G?ssscf]R\-{w6 L I[ZO <88xvfeelnkkÏАМ5KmnnhmnO0n>00DO(lwuu_Un{\–]4gbm cmOՑey>I|%Qh OK s?T2ٷNhs{5MUiiK6d4Ɨ +zƦ.yim@/J'#މf \T\tE*ʅcῶ:#Ӽq .g9,3˭i@|%G[;P4[`_ y m wM[.yz$! ڏy͘+Kˁ>qf&IphnNݖ`{قRd&& >wݾ3RI `08-[OU邉!ү5zM~L htC=':;L4uQ` x::VܢMQ~?YW:a2Pn7oh=+Otmri[kꪫkG]}mu-afyмE)fe[^tM%jXGm]M[{Tʆ9wm/$b|`Lnu{q klп @r8qb޽%%%EEEppp3ްA ëWKr^W[[{ԩ&M 8lyݺu-JOOGpgh"zޕ 40Myݻw]ZZ6wuر&iɒ% ϟt:pyyFWZ5wr91ц233=zẺ:0Ν;L999yyy"W6ì&[-ɓ'_y啲2PB|ܶ6ߣ i`9rBOddd 6={vڵ555iiicjkalH<,i&^K~;vdee30mI M!@[QQwą@755m߾=JNWXq9D2зٳg[:~ l6 ,khc 3 ޳em39Mc ӃH6% B7 N 4*Z|ioox9BC&-服C&,-ui8J3 m 'k5)u}}Sa{{L$ōƍ-^[ә CmVDɱ765٧8y|AىGɿP#͂>!M[@zR\8b6pr7KҗM~EN}\%s>HWhF@K^1_> zYE?k0jp'%8z\Mɧ)] M>1H9HW=>'})!{LY2Ug=ӥd -eG>cka|iBXD(%XgGcs=s?9짞AreLA7A\+DlҲf@Y`A}}=}v\RJWUaEwq07}wl^vulقIxHVuM2P |ꩧ Ӑf )/!0Șw3*0D6"rt,~Λ77(8?WZqE> 8s!{9H/Ya8 뫪vAO<7@WB $Rf?غu+k uZ___JQ eCn| F;:&{:tS{$#jBDq+MڊasS b ,MrSjpt 0d_<( qt//`WlrCyХ>q>=}M̥}l0$Y8F0>`tV ]ȭIySw E3n|o,C9|QQل A>)6Ͼ=X=Й ĞA "^G4=yCg &eSjɑnrH96,9OSrhQaeYOvg#1txm'1'[7!Mr?ѿ &NJzZ=@1nqԦ=yȿOO QP$)m#O6.bAdݞxjN>ON3ӧ= 8MnRn'E6Zd#U;@ dHfXa`0+.ӈ ])K1E!̢#w5na6mL7+^0Ad)vuCB=atiPaql<䲉\1Rv8P;d""Vҿ޵˃yQ͉v>в-4Se.kWvPR/?ZwTNg 7ջ_}}|x~qnZ-tM)̌jӍoUw#Gs\e" omz58~b:ec=ĺ4MS¦;ܿ5wFq\YL1?t23JLfsccnl2G|-thKu ]vAzs[[leE(_ 7Q_Q:{ɇ$пwd/ Ϝ9QXXX\\\QQ: OMo:t277ԩSplقφl~ 䶲m$򲲲WRRϓvGG^i@#>ӝvgw^B\.D&3מQX9C3sν[Vu۶m^*g[r~3>՗^z }gfffAAJKKэ I&߆,޽{wMM >v:{Ǐs<9rdѢE?ݻwÆ 'H:st~Ta]w:m+ x>kSϟG 8=^tUz< z lٲ]vrp"0{6S)iYDy ,X6mڄ>_!/&GDy#|rt{G!GYΝwNz&8,2@X>\~@.=Th;|+';>ظq#țL%KM;i$CW>}z_|_6o BR ŸvZ :b8gɒ%dd{@uuvvNPT*́NX=!x"~Ha^i#7PЀ% "Λ7 ;NøeRDU0vt Sh$XTK3( AU A[rpF%V2fuQٰjYF [ZZpk=j]݂iTh, lpkh(v%'n4ˍצJբDy(ghEŸ!b L!qHH N.`v> .nk|Bww7 ~$?SA|~Hqrޞv=tkG;u5# G-@AdWlׁn"; 3t@"ӝCrNzɁnus[7LsJPuo*z|xa\Uy:0͜ D| k{>v-Ow8<)ެt<3}~ݟRڿ 5]^/9Mu{nӬUi=ݲCW:,EY?BɖAe1Yqۆ=ٸL,%sgob>(% ?LKS_u|&c~W;bU7=x aAֶƸdU=n4)m' ߤp`$z꣛uhw0OZ ٳaÆG:t(///g .tݻwO.)=O+a Y} ^eϜ93gΜl>N8XlL_ 4;7hq8{$+500$W^TOJcO=IkwӧOoٲ 9r.JKK k֬po?~޺ukSSnb6;SXX{9{G[|fbϋ}}}]Dl{{+JJJ'dv4sڊ1|8 σ ;wދ4(7K>+HAAnK  +**-[Sr6 Z!,]]z5|ɓrٕ+Wfee%y/ ւc0!s#y* F '|Bzϟс;o<\AA>ak׮}0Pܴiuen:̻Js_z%0hCCC[[(_4% tN~W@\p!_3`˗[,tY1@|0 `}g1 Sf\S. .nѢE`ŋ׃z\o@v݀dW`%K$M^5\4 a0x7ݱcjK3`ƃ1r| *M_PR߫y ?O}̇U`SN7N577C j^ ]oL&+'.FZd.a8@ pmA]v\izU 39hq-n = y?Bm@@INFUO/`֤ mf_Q<TSQÛaxP0v .?9 "\k4u.j^wռQ gOs| Zt%n5ws[[FTQxy N@Sbn 2Y!`9l"{զ#ԅw,u'iRd~|dCڭ.E9 .ķ DοyI>_&ݾ"3A'kmzbMol<>/F5fđ^#Oed5XZWMQ0/!FwB^|ς9ծ#И|4\鶼Qmy"2;&Bd{<+h+|C涆hd;QŴdIENDB`pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/periodogram.jpg0000644000175000017500000003464411757531137025357 0ustar ryngeryngeJFIFHH@ICC_PROFILE0appl mntrRGB XYZ   acspAPPLappl-appl dscmdescogXYZlwtptrXYZbXYZrTRCcprt8chad,gTRCbTRCmluc enUS&~esES&daDK.deDE,fiFI(frFU(*itIT(VnlNL(nbNO&ptBR&svSE&jaJPRkoKR@zhTWlzhCNruRU"plPL,Yleinen RGB-profiiliGenerisk RGB-profilProfil Gnrique RVBN, RGB 000000u( RGB r_icϏPerfil RGB GenricoAllgemeines RGB-Profilfn RGB cϏeNGenerel RGB-beskrivelseAlgemeen RGB-profiel| RGB \ |Profilo RGB GenericoGeneric RGB Profile1I89 ?@>D8;L RGBUniwersalny profil RGBdescGeneric RGB ProfileGeneric RGB ProfileXYZ Zus4XYZ RXYZ tM=XYZ (6curvtextCopyright 2007 Apple Inc., all rights reserved.sf32 B&ltExifMM*>F(iNHHC     C  " }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?(((g5=;Q}ZCu,(wbUd =O~j ,tW ,>< +냻!x ޵u vZ-tKڬ]G,uD8Q 32R}ERӵ-[CԴ/lnbY`* :@Q@Q@Q@Q@Pz@ .Wi<ؿBvӴ KOiɫ:qM+>deP@ 'ؾ/46/Ч j_S P?OO`е]gUOqu'dFhЮcQLEUAEPEPEPEPEPEPο =xHM{`кe \\ſ8??6C&WīYGgOވfXȷR<ٟ-G#sNYn>Ҽ%{}mnP2A+j6}+2p$D}gE|W_soWRuM&Vxcs2d/\ |XWS¹E<\D:ӳ ǿ/5;׿EPEPEPEPH~R~o<?vMU ZKwHt"H&/$0 pC|d6Z~ .iOI- rLSE&Զ $3;LͰj?xƱ☤5K|vv\^)F 1UV`??ࠒDF$ay'ѵω> C?ITkjZ{-{MCy1ny dTQ~Ÿ w}(dK؋쿴om.j*D+ ma)Ѵ sB?{Y#!@>VQ_AWh V]:E]cv VFŜ$W ".X}A״xVYѮ~c> FѼnUDedt`J@QEQEQEQE-yf_|Aׯ|9˭?65ZJcE%dyR+5œq;{ۀ|jw֥SUn+MBAmCBLfhչbW+gO$oER~;T,,@1QT`?AxP9FX2_ eIR?ױ~i~;x3X5&$G{*1䎕|6[K4m?E.~W?|Iugr X-KMdx[r:5 z׉%cqhW2CT^ 2"_Dҵ];\喯^:e+-%B24~(((+/>vexZl~q87)<glWt˲N? 6u&ݿwZb&_}FHN)7/_09xR}[6[-m.sZ&a$\4_>'CqO☓9$P96 ~:%ZJu*҃}j-}͟b|y)q:KQI^Ih?4o |Oz~#N&4Nr@TʪV=K^n|D e5LF ]f9Ġny;G32#|=|as$ߌ WIӛc%g|\ B)Eh_$~<痖)Ah'e~[&{|uro->zTLnfQ=CQ]_#Ynuā;(VxT@dǹ*x27|S?%9Ys/zo?p_R FonK>yB9B]iO*^u?~.ؼ$jƘ3 q #IDrRcĽiQY\1t nH#8e`A*J'W1<=V)wX$iHId*.qr/}-m}TRGNqjIٮn+__=獾kj\ l<$n\VGRG*?:s -|=K dž Ki%eV\q6-Mh vc\?jȯ,~*T0KB1Q9-ϣRLwe}U_/CAq7w2?^9O}vHA*܃>|SyZޚ4w<m_2X*?Knʆ_RP:Vd[I#Wk5tz!`,@^]GeKs+$PR⿌5=/ῆæMF2|X Q?3&}W+]Ŝ]qM[}뿈>-,tH'/6ڞmm ]ŚO 2-zR=vWMqch:ǀ|?( t'ަg/oP(pOWN2\4~0'~}m[:ǽO/w!'ᗅ6ĭ>9Aw6J$s,R.i: m B8@ P NQx+z<嶡+O. yH- DK.;/#`bdݚ'+1wq;Ki.OUf~|]p(F~- x'oF< wY Ʊoiq u*%8ܤҾ>Ğ0o kvrwtRQ&_clJFW;K04"U?(I/$P e9bI'_!Z mOgk_#acUTa-Vm=O}b/d[ |W?|Os,o[+bFu8; vt(-tf/v2H P Gj?|.Ww|\-dǵ [̼.Ңk*k7O=,];[Zjq_$Ѿ=gZLj(GytYn2< Pd,></Ɵ t_N+i -IpGy]> jsh:Vk6ʭqH7%`U$ٵ䑁(Fn̋N3 ٦e>ZT][z/g0+TT;/}KDž|'4 #>"`.uK|dPw` S"{ev "%ɯ(/[_k6U N ד*\>e=I.kcN?in֒@zG ;VRTWҧ$3><7S_O' ,E4l+#R _Ë6#ƙvJZ)ʁ&|dqwTuWm7Rimu+yFh782W^8d~ Q*s+]'4ZQѧWW1m\+\ڧ("6#+λ>WeҝEuxWc!{ZϯhoEV-8dqʷ$duH.+W3XOF鮶jkN6좮 O>N_Z>O]+#H8ܨ#c%<O03xW⍦xK"]Qc1ǫH'_gOjRiՈCrm6=YYX#Z7WZ,-|bra^?e_J=v^8ѢlDh 0uxُ9c_7|mྣN+[I+m}_˿:I_Io{k}wIGzqx$q$\Rmk Ā핥U 6GY 8qp`=X޾xp4ZUkIi%F᜶KI̒WC~|tw`&;e?q(u)tO u bQoAȓ\Hq\[#,nN&?#ûML8TTtVzg?i7FY0 A=<Y+?fQ[[I5uߏqssJoW"S~e&7imPB@=ĠhOrf~~mk$ WJC% ěOXӮHxb=6`_+G*2\_^ZR={pubUyuow~gL%YǮm^7#3<|Hshz' .l,n/gJGf74Gծdieig^I$bY݉f$GWpK>ڤ)rSRRrxK_?6iUSO]? 5sN1ץ:w-烼[w}aUrAP'ʓۜgk?đn)"G,xKq=?k )Tm' 0k5 )xեѯ(є'xךk.~隇5fP4-ƞR@P9OH·h~ ;R%~*>d)r$P|[x@ҵKzZ,qJU-Fzwbi<Z6{[kKh(R #W~K~/ S~~{t~1Wn0g[nNSM+FqjaX_yTds_熭.%-^?e |/Yac6eaI_,I5Dۈ@@0~G5N ю][oo~z<3>yOe.}W㇉<gs^jL|Gq3:g}ed}loC?gj<32 n4آ@kVW̚fF+S;ak"+gw<#2 G"6mgc 4W|NKK8'R$@؃ֿ|K. s_/H$ǴcsXk&>'ĖM'TK/6uNgB$k`x2R׼dk>VIߩ?Ҕ]~-|sSK!‡|v 604=^߉5{94{UԾI\Q8ޱՈ$ A漣5O={Y|^,;bR0_miz]eijb<?Sc18[V]^Y\wi4bHf@"GAU) uP~+oxW&[]GUVP]JL5>^Դ>0x/IH#ٝJcoRw 2A ¨ޫWU_uW}τ|Qԗ ?fcD1G g=1{E״OxrXkMxe#*J#ڀ5/%^) OQۍwݍ˜tz~HO"oiu_O*‚f?WϊkǞg!yzj[G2fG H{Şv-|G92\iƒ.2ddu ( Oy~%#v 躍ՁHP[ȌDI{oدO 9#zƀ;T> 9XbJ[mF%Ġٍ@F#@\%# J6#xU#:myx[O:}H6MC+IՅ[ZMF9 _~8__gþ е+o\iv\WШ Bܡeg }Ua0e;=l|HiCJxW@t?L>8^񞣦@4 iSՊOr')/feBUՕrbEyCxbKڞ%ŦS;XH/E, 戢~>ź }2kAAͯ~8!~-#lK3^ ߆:7<=vNjuaه Ҽw~> k+SZ\<Gܒ! yUO&X_=?owXOgɟDUF~%IKʠs}_*2W|WW_><}'Ug t[V"iH\}(Ár80[xiu?iP_٬m&8fA(9Cjk`#fBb<<-g??Y|AuϯxJPw1\鵼MˇPT φ_t?hnm$mmbmF$BUFU׆R8a^M|rb .W/M `,\ &-*S]$mnٶ=ss;׊|Rs9-'',]/NW__x{~4Ow+Q?mF-b}?d𢱒$`k]}T ht+)9btqpAm%\xWԾ+]am-&Ԥh~ey(y^I2GAb-Zaoq*I'uIu nc2Ng3J|Hя~'#E[X˃ EI6Ige~\yF=k#?yIeXh1IJGs`t{VED[5&g\@@FVW5?D7֗pV'B@KO&/-YwS񅗊E7)5M:T{ qaҝ̖9gƹT @>.lU"Q Y1^GEz4caʼ?Toŧ.5e}Hn8>||5._ĚJb.4]\-m,R19y$IÙr>|NU~JwۭIՌg1ecpC1E3|s~#|N|Ej|]6XӢӭ#$O1^ 'J3pJI*7ӚkZUedף{/ZCghs3bÈ; 7p#υ;I1xI k@EP_>]F+bk:Wq~/{-OY+g:ݧA<6I7FՂu/ڂΛo:>mjKgp}gXu1 n,"_ι?E?g_(ZrԾ >7-PT-(ַGg-%跷-شfLetOPw<?Xo[?(>Gcmk, o\&?0kGuwjɔ[;KNH_tک]*=D`~ 2DڭdzIz͸8\=dg@mKOG">0kyrj{._8CЅ1G&jlcioo%?EuMF(V;f!T}/'gqʼ?Ib&U{{ou? ⏈φߴW~/\֖k Oe*, gd@7>2 }GND-jJʉzB)),YNE0eh6,_s#R5zϩm"kH Z1@[=%rg#io?(MSJпdGD5kzO:"K#(6Y ?ld-kڳ>-O,l&kZ4p_R n?6QE&:ePn(Ө+__y:s'.k>y?wI͵Bf߇ B<z%:uA;OL㴶P U('MiĒFa<$O} E| T~~%M6r݊+-GK+8`1Rk:5Igx$N0Aj(WW'"%oFʑA X.i_j1 nZmfYZֻxvH/X^ qBXtmxB_8a-XшY bbޅ#fD`2K,ІKDa1=l$EhTa?Eh`l9+~ova_b\ŷ(*--ݺu8w\fffTTliz鹹\qJ6MKpX:UfTNO2,++NbjusssCCôi<tuegg/XHd0R5k<[~hXChX8pQHmk$˞.**JLLySN./Hy=:--)y yӧ';wgϞ a_|Eww7BLbq71=U`Q{ |}}}II (Pp&L>䓍7ϟ4iO?%'x ;v_"ʠK/`L&… E :yL&Ÿ% ޸#Gr9ڔK [RRڵk뮻Rc566N2[LL @Hp$RԖ-[5!!󱱱K,7NKk̙" g}eYZCS2<5ґǐ'1pρ$(3[Je#7h4vxlOί:{*,, ?~0RjļS 3Y6mX` [,׈kxaQR?L.X{howE7JGTTX^e۶m!K8p X^,KHbF!ukDXDFF[Ԟ@$ ,${_ѣ`;0`1X!B3o:Ϝ  t`77j*_vioo#ᰜ̔TVꫯ$)HΟ?E8槟~Ά׺F$+֨Kr{r?\fވ7瑕XW sh|'_ &VZ%J~C-^C_]vY,+V:ujggo e7mtI+""t9~Yf@)EJ 歨N, Xu{n5!yZ5 ޓiVNSD>s.E@Z|9ǿ=A[n߾}~Μ9YVWWڵk;`mܸB9e _G $\: azt9/e2R_)#^VO'[ؽBKtgDS2n]NAquMI//+1hbM6+&3v8_ߝg*a{Զȑ?7H$!!R}w hcES2NbkM}ce6rbt>yKԳl賯l#^(v=g-XʯQ* oc1ei n1*nPK-%QwGwL6ĸVqGWn~zw5.`b!=-dZ1QM֧-w+dui au4+JUL/* o l̩Aq)iA^+eA]u:Sx1u*r̘.f cPq1>hdH[gXZ[!PɳK[s 0c;.vhf^8*thpqqqya?ŎU8ʱt[`C~!Zzeںu뀋,{ԩc0/AYz2K "j7;$b`']dɘ1cV+'Lg]Pbiu=-e>5%o-ոkU֭[7fggw}piӦEEE qs<ޓ3bkK9̊ A4Z ]U2vBbԹg ֢Ť5w%_M`&]Yn ɟԦ9OE`G7g{WS[00éE ȥ{\Z`JNss- =+m fQcMK,20Qs阥ӻV iI9#Yd\ yh]I]z*+^3l}$f*r6wA3m1&Ͷ=hlo j'{\O.ST39U[2؝ o*WYQnnY^E^v(׾z 2؟Եf6ri^+@IR@zRmoGN\"[|52oex6Ǧ;ZLE ˃@b-F">Wۛ8vbq hGzJQK f܀`6U^"jv, =zN`.scGQ2MUYhQBBsS?DEYKԀ:,ulK[±˼NK'4;&2OyУY@܎qģ©f0m.rtƌMMM~Gڱf;v }g۷;vlܸ111{Ln޼YI'NZgΜ,JƷMQ/@f*.Kk vw41-pH+L''' 4f@= X F=[WWG&''Ǐ8Ǒ m7NN];C8=V݀y]r !28:Ya :14q|G g솙t^rVGW$߿)ИR2`a\(z&9;s9Ͷq2l=ZBm ־qi})Bmu5 %m8_ # X`@<"_̘mq9~PE8.LL0#XwjJ{ȗ'j:lЈy$E/b-{4265^cn>cL&+DQ)Gq,SWZ5>}{UU''Nظ)֭-"ɡzbl@ (ҫe%40{?SM74G 1c|+վ;y䑡QJǖ57q;d(qLdPRy}ېB|cGNRhhؗ^|^ gݹ!XZ/cn}믿ۗ< dD"9sܹs,N-%RR8CЖ=@q5X̙29ܟ'`[rƚQ,6Pq/b i3[?lOZ|9H3fkgφO3g:BL&?:|y_}_:RfItX8' 7:Z/LV^lSԻnpljRy!cYG$ ̖MTC(iLol:󿉚HGtF,&f-j z`N܃cܸq .̄ρ^x7JKKs.R.޽>79f@E QTm^4LJ=0j@?=혤1dZ+kkw{)Qo,*lt&:f*~Zd 1L~Lg,Nbcc+++瞆| ք awP(Ӂ3 TaSSSjjzo,1 JT_۹_#%]OmCP0JmZX/^Lnx3J-QliE Xqo?>:zO<e˖i\}ӦMlǏ뮻&O ZﭷM _9uЀETXj3vy-M71IZNk( ,߇9T(c@ΗOr8(u|(x1z4 zfTd.sZ.EH$ш#-p6⥝J@Me<NEXXA#z/ro*++ xXQQˡI 0bڊ9.}g~ t[]n*[1{n@Ѭ5uޖmdx%˦+AP99/OɿbtijLX0* 'pH: *$Fc/k-wS  * y6xLd/x>m>ؘv!"ܻA8XgØG3z[ %^ x0uQgA@<h c>oKߖ4J͖KmOl` ÝGH,ObEx5ƚdvd<~ȱ#3gI h8c@wl'jcy<Ëw!!6H6/1oC!6V_6E7)wƚ3"D~ncjhG+-{ OG0y/ h3ẗ&.[O}yƗ4)b iB%wyju?!2|[_o0:vcd SϱB2eL $L*|Pi3FZU#o9"ڕ}#n+J x-0ɔ_şb[ L=Y:J$ST/hlWRiod3YIe=L'$9_!k#]\Q tZ uGG9oK!T82*n>*G/,` n'Wq~Єlamщױ853-Wpw968FQIkm\L{o3b; 5+lvr5(QJ,/]XL g1uqZLKC,lN 'D(BeEF \*k([T4ue2م SSS9qF|X~ $eYW84f1ΩqItT¿ƴŋz=;vv ~a  u}{3̛7oܸqRt`ϟ??a„>``˿j>v{㜪B,wĪOOOWT<` [Ub Ogn`0 ~Ç\?}jjMCy*A/9A fHK.MNNw0+x gF_XL*hV ,x4iId&lѓ-wGfM4֢EcǂK/wz:Wt~ `?P<>`)655M8s "`yF/@FEEpԩ>V{YQ".*Kcm3ŠU%IsssVVVuuuZZZkk+4SO=UXXT) ^Ο?܀6 N}o߾gB7p`xB9)@J//Ǜfh&?m6?OX1X$ k#Xx3g7o>wܮ]x MG Kz/AǃS"""|]<̩ $-,ݎ4*ꇲƞ9DZ@t^B +{لl^B#s4V%]&+ x2MY-V{Y5VL[h3c{yڎ9Fðn\3Ĭva7pop0 -`o͝;ʚ>Ԡ5HԶ U:_M^(QAP:tH4ML`"-S[50kʔ)z+x@:J 9==xX3J:VIO?4ANs0ihjݽ[pဟ3e!1Yp)cC`]vXHj\n= r%]i:YQD΄i":R-.lrR;CnƜIa,PR-)Un+;iE;tp2R@ruHr6XI-&`x/כXq%0Zp)jb*h>~̙(I&_pᥗ^[4iKKx¼EjԴ^ЀȆW/X3Tv)Bt8+j49%Uzڊl"0Uw)h]U( n$/gs'@É8aT+ (_iH9XL SYd1ҪX@muiYu"k,PUl-*V6+b"JEqgeƶ0],Nw5ל>}*!X>TKKkUig# kuɬr,xmGTk$]򄍨 \$/TkXM QJ^wn%'**.tC+*Y̆xb xC1C%[J-KP%- rU*MWD|_UJ#jkk#՗X8{T8qaRY^\' gQer`)IM`544 X1˲111Zw ]KOyu,J/0=z`mٲɓ'Wr㛛bcc׮]p_;mX% C7 .(O;wڵ 4s߳>zȑ,Tz|/qQ,K:Y`I %nvږ/_KT?~d;5R(gϞh4PjXNrpURi*tB%V\Yvv6'N I&Xzw) /ojqqqW*$ud럜)W[M" *i;b`NV**.uj9 Mї"˭u%Hg&Erk>1h0G)]  h9tЕ+e sc幅pz):}tLLL?z.;gD%999]!(KKӛ*,;r#G}Q1љ]c25l5"hÕ+W8@JRRRv,\oԴn:b34EeDDBmm}yse0J؂h3q7&i׮]f;vF1r!vqޒQI}}}jxQ Ǔ#P'Oq{"hu$m۶9~/r11HH*xc > ,Rtt40ShhJ%$$D'd}izdBPxbYۀOMj'N5h-..ٵW^---u>>>5Cc V&D:@|ԬzWP7h P: "Tw-Xە> ,n m98ŗ53ٳArW:/g#([  ݴS(2E59PGpO[2Xb v.h4'b.G6=66YJjo=U+[oSOهMr쫅%=ܺ %7xa***!@6G4>nԆ $+ Ij쩥wj`aAK&K*xVZDFvB^4X譗zATI6$=mgLhpxVZ6\h\cNHZcJ.PktO y$ 'A-T&c}L-ʵpO6j0uGgl@0t8B[ p=36c!wϵҽX=gO95|`>&')7ㆻx3ZV$d-&k!@*γ&2 Fvδx7'0-ĴRc!E2c*FљW AU7lp+@ն C~Tk/b7)X ^iEƁI0ׁPmzktӧj3*4\^h gNi&Ao|;cF`(s* @Q޽{5]jbCVhvL"B&cyV~2ʹb zE5m~jx"]⛩E&5ܦshvgx+R3nO k/^Ly¿:hiiYfNg-4wKsg]º=R䮇q[3zHKkk+XE]?`!LVLD>{$ =ۼy3ɜrZ ](8YZ 9M84z(D07_|E"$0$***B^ܰaȎaSYVAsa=Y =tIIIƍS 5 >CZPCT$fdkoٲe޼y^^^YsÞ,虩-~l(cҤ7 tw>|xz8?n:fvsE`n U\ҥK=,ǡ}|{޳!uPg|ۼy3xQ<'-E hwpX߅8{"R~{p={8qz73]0WdR)|, Y  9XEQ*]V>V]>cIr1w;DRm*sz.ִ"_h/U(2ɭ{H6ưfex%RBkPW$ . Qc'Mlo'_Л?i?KתV]om߸!ac߷ľ+2|/":j -+ +^ ̆%|QQlVWФ֌^] H)N1Vh5/eCd/jTĞ:d u f/c5M| G^֟b7+$]-tӌR$TaWh猭L$pNt+ĘdZ t/z5S$'dK7 N𢺀TQډ$pPgDi | dN3W& )6茏{\ 2kg`/OuQ`oIJ\78y TS_jF+ 7:5Ys::@^Q1z'8ʙu!x)ڽ].$^-i,KJ!*hJ|g8":vzppAˮ#Xq|9Y&&.SB&S!XAhMſU`rQ'%[¹.lҸ[] ?ce{QХҁ7wœ ΞM9_E}o9B 1Hr57.=FD3)\CxJW9Q\q:.fOUsW L k;E)D]|c{ޒGس [`#K(W:J1*4#^rts>QԝARƋ -P;`jx'Z{qa9G~ʾDtHˎJ47qT#U5XDB~i9E; ;Kz&Mj,4?a[cv~V,6`2%SJ òtC1q#i^6  L9+Qq\~JˑȺBxJ'}v4; }LDCmssC@ g*JwBwXBq4 wWwigCrbG&ep{J{'y?2U)R&yUV!:6ner е'V^őmE1e)[K#="tr$@$Ac,p [dN2n d%dpŇjOtn38Cn˻ '{QIϻIr# qqH2O,rI$㞲^|J6'UL9sУta!osf}QytH'< dLE؝=Uxc&|Bl=X@s|"PtlE=-R8N^y6CyL3Qric>i8 L(c_^|s>ߟe+3!f SX$؛rIg2Q6\k"^9|6&w.\ ٰ߲UdP ׈;$fK䉲ZE6JE[/:Cuñ5G?h4zW[s9y,_re` A3ҢLQDNꫯ:ȓTb_~ >TMSD#ƹsYB- QOrjE\C͐a֬Y慟쮦RΙq2 !9lM^}a +\{>bMRNJSQ*= ޙRRR&N0WL^$I+innF7Xg!$J˗/ۭ~z$W^U>>ȶ)evF7==ԩSj+rqK"lw+ūOIM͚5 iKVVVLL ӄ/P'1 -D3=X0ӵO~_EBsdr Z^FqS?t &X?r*I $^4q":1c-v(0s\7G,kD\ "|&M2iE vn;w/N\\3RM6Y-ފ"҉'r-N~᦭wqtMMM$<p`5u5233RSS EP PW@Mt"Fߵ_=j(DW%AJ:fI~#>}:8jClwZ֭0xp!N=y$Z d=_X=P`'N={ƍ`,ћdCz/# WYs" ML\Ґ!C-imwi . dňkaa+Vp}$ @ r~! @uvZ///-TUPڢe˗/;W^y%;;(555@.(pʔ)R%c*^?"ցEB7tmzs* {:` gǖÇQd/|e=|dy}ZAHT>D,ׅۂֹ-TIO\knVY0|3rN"R"V? SK ^f \^R~`:D16x|ŎزUlT%~`)@%꧙.` 5gH 'Iyނd"P|`;{?2MN #b  Cxnx~(6M2`EB+;VzC04$򣓄?%6~{VƱDCAUbH ύD$ ^(9v sDqO5,W@t ub?c9.%`BR+Q|{owA?>}˗|M>ۿ_~ CJLLqĉCBB0uT4;;S/Z~,Zɓы/?=zQ3`7B+|Giiiƍ)++۶m:ߠ/^ Ik4W_ELY^^. ׂ}||PZ A+/J $͞=XoHs=7 1c`~ .Ad%oMMMEdm)hTqq1zGOZZZtK%3>H%zKLKNNi]SiiMDZ%XmIKIqk= 1 'ė 8i~P ?x~[]]ʡ>*i94i>TTT@o ԉӠ1Ȁ{|NP!´ $Z{/>?Xw̥0kp"Wg໗,Y [1'lyob駁Y-v$$<.l J"{O P{yK#l$5>s=`}x+/Y7HKh׏$/87M8co8q/wQ%Y;$+YEKO3B?O`YW!;Xf*LdwxKQL6"]ڇ ^%:vRVۋim3ۿBWekЛS,aR?c5}C+^NE9BE0΁P9`*9t[Y,lY l[]N%c` =A^6m&$$%еk a$6lZq2@x! tiSgݱeS^f燆;Au2l*%$4+ g^Ю&;#W_}@"@ j4FK/#A sbsuD"wp[nJ`,8PjY]]=i$`?, }uEaLg$4MC`2A`5ydi (nܸqZ^n͛aRIR YbCJ`%2|~'qW^y.Agp yY e!BՌ=Xb)>TB_z=% :>U0n)vfNv=Itabnц4P2SZp3d{h,1P ,uM!6Ξ= K?> Qq8q͛\|ꮨ9~#h7|m`c@]w^DL\o!pBЖPAGѣG?ð0}L2=A#A@K`9*B 7Q 0,%&U/\C}Mofml 47 4l!%ZRN8 _Js%DT,֮zyYޟ5SS llD E\d׎uqJ%YrtTho0nDta$K6r@aɆo04j)?I}4 D.c9&rc ڨ2X<+$|U 3՚s},Nx^ ]я|SfcI`RCY9s2ϐ޷./]B,STruSF=l=Niz]NPrKo ȞZyMY5ͳBE 6~Һs"Һ)}`W5Y={̔9[ #{v٘^QOp̝A*"MB.ep%|`,p\\"N]Қ_!.)Q׳?=4 GIENDB`pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/proteomics.jpg0000644000175000017500000005064311757531137025230 0ustar ryngeryngeJFIFHHExifMM*bj(1 r2~iHHGIMP 2.6.112011:04:08 18:46:5502200100  XICC_PROFILE HLinomntrRGB XYZ  1acspMSFTIEC sRGB-HP cprtP3desclwtptbkptrXYZgXYZ,bXYZ@dmndTpdmddvuedLview$lumimeas $tech0 rTRC< gTRC< bTRC< textCopyright (c) 1998 Hewlett-Packard CompanydescsRGB IEC61966-2.1sRGB IEC61966-2.1XYZ QXYZ XYZ o8XYZ bXYZ $descIEC http://www.iec.chIEC http://www.iec.chdesc.IEC 61966-2.1 Default RGB colour space - sRGB.IEC 61966-2.1 Default RGB colour space - sRGBdesc,Reference Viewing Condition in IEC61966-2.1,Reference Viewing Condition in IEC61966-2.1view_. \XYZ L VPWmeassig CRT curv #(-27;@EJOTY^chmrw| %+28>ELRY`gnu| &/8AKT]gqz !-8COZfr~ -;HUcq~ +:IXgw'7HYj{+=Oat 2FZn  % : O d y  ' = T j " 9 Q i  * C \ u & @ Z t .Id %A^z &Ca~1Om&Ed#Cc'Ij4Vx&IlAe@e Ek*Qw;c*R{Gp@j>i  A l !!H!u!!!"'"U"""# #8#f###$$M$|$$% %8%h%%%&'&W&&&''I'z''( (?(q(())8)k))**5*h**++6+i++,,9,n,,- -A-v--..L.../$/Z///050l0011J1112*2c223 3F3334+4e4455M555676r667$7`7788P8899B999:6:t::;-;k;;<' >`>>?!?a??@#@d@@A)AjAAB0BrBBC:C}CDDGDDEEUEEF"FgFFG5G{GHHKHHIIcIIJ7J}JK KSKKL*LrLMMJMMN%NnNOOIOOP'PqPQQPQQR1R|RSS_SSTBTTU(UuUVV\VVWDWWX/X}XYYiYZZVZZ[E[[\5\\]']x]^^l^__a_``W``aOaabIbbcCccd@dde=eef=ffg=ggh?hhiCiijHjjkOkklWlmm`mnnknooxop+ppq:qqrKrss]sttptu(uuv>vvwVwxxnxy*yyzFz{{c{|!||}A}~~b~#G k͂0WGrׇ;iΉ3dʋ0cʍ1fΏ6n֑?zM _ɖ4 uL$h՛BdҞ@iءG&vVǥ8nRĩ7u\ЭD-u`ֲK³8%yhYѹJº;.! zpg_XQKFAǿ=ȼ:ɹ8ʷ6˶5̵5͵6ζ7ϸ9к<Ѿ?DINU\dlvۀ܊ݖޢ)߯6DScs 2F[p(@Xr4Pm8Ww)KmC  !"$"$C " T !1AQa"RTUqs#2356BSr4Cbc $D%&7tE>Q!1RA2Saq"34#BDb$5C ?8 CE~IJ`P۞+?hU&3Iv)1$c7/fsy 8H=%$#Cf39@QٜGfsy 8H=}C䣳8' t'p7O {W GfqO4:, ^)n2Dfk+Qw!iUNɊbb=[H,*Yw%joҬ/I:oIzvkTLʗ'{&Ku9{5^qݚU9gId|/wfxxN;\<U>PN#T**9d~u$}?5OBĔ55uк.Yd<#LlGjԺ =LpLCiؙf"gsk]._r|Y+fE9${UrEeISFVDew:S$zLeR.YTGrojU ҶF9e쑱\VNHɕ|TnlGuڼk6ue5C`W.^c55'>aT(dyXo ˉD→8kOP,Tz7OEe<u;7EQm~/$ގ?RBч[_B=K} 7/|ɮwʙ}껡L"$4G5LD rJVb;ڝplty:7lMժUjd΅u]SXS:xQUwYE_MRD$W$(ګmլr% մq:J3zISϱIsj5de5(>,Tkk%;]U^SТٺ?y V{+7|ͺj"g,c%=\&SYT;*4C{aUGmE\L[F5H׵ʊS_\(ٶߣ?C4}ed.϶}MYwGԿ58/o 9sPOT~#Sq6!>qo|"/.^N6w=,(:QG;#r-4kS|Tܛy-F_h}9Ux }|>+)YŰ?q\RU?vEY\S1awұr:LSޙEރuZSw9ĥ;`(NUj;[Cn 6 -$ފ?RZiViRUqɉk*^IWsEEfèU],p)RU' ](&gI;~R;~Rs='vd(sk-vy8b]0[}j3s#Jy^je ڶ:TJ-=bxL|H[]|#B^@ßԎ^@ßԞv^Nt/ϙ_.~@ßԎ~@ßԞvbOhP%!lOm:г";Vmm]ͪ5C/wfKԱi<ϧ$:<:hnr"mnʢ*䙨,l7G#Qrd^t5&47*l+[9NR7bv <eA7>QT:xˀ޲ *IjlPs܈Yb=j8+\8-QpڡQZYScN*XRUSGS>Y)^2N6 \9he 2WvES0`pj>]]sFΆs`֢H@!.hKŪz#{#cbEު$_0վ S\ӥP1=SI )gsIjS^7#{B7r:OeL${I3#2?YN|бrtES3EkU}@ήɝ}%@x>-\#"{Ȓ%>~xmÅWdٳ{iЄw*nlxC:OFd&j1o,TƵYQj#UGDS5=0U%l@3y{:y-CMpn՛H{Y֋gi|_E{_^E ! -{1l*.Q#ZuF)d[W,9ʍN Ys.ΧD囘c`1Rַmkϊ/πnTIM5,ұw,iև%uWOzyОV+m4J^$k^-Er7vC%oOv5vGRTF n H׬pd$1h/AMS $P$N|QO#>UwYEfj ǚgWt_ޤ M#TeLI*OTV#w7H>Gs'Qذ]ڬl&o{o3ij"&̇Z,i6H9XIJQ`bp?:>Cbӝm %岸=R*oS4Mhţ%i:V,3\@6)'iʌIrMjFD| ^IbIK穊/"霔d2vzM/vzM/̳3z>4b+`kT#)\2udh#"&H3vx=/vxE/e<7NFJs'lw$Ob;ג)'c-?tN$c;JõYTwzbMK }['7LHco_U࿊=OWS , 5C8Dz6WQ:E*Mxb\-nF#1U3N&6k>2r]'G.r[̷  (Tވڂ>"8g5> "MlE~f _6~7+#~#YdM$l-h=U{S">[.ʺˉ2{ߚK~Q3\۪cX"ܓt2[jK)ĉtR[jKI*贿ς@}\'4>:IU.NQ6oti"e(hrdY%#9hc}Fz5؅1{gPx/B_A㴾 a{4䩊F +dFT>5Fs)֙,L^r6v}RDA ]7AȧroCK]@ 2? &tW5IWtTD$fwkht3zP{>g?"I-*d]-IsMgf LDE$D6sC+ޮᏫb7@1%n/Sl]RW'/eh-sw:j:_6W1on]EOuHZ9ñ\%4/KU"'';UyH(ተƲ6"#ZPY8yת(h^nkK*zѰV f: ]L*w/,19,,{[ N⿍9bG5QQwrtWyp ,Y7Zb/fg(Ye2/iWm½Yͩ?3,|kY##gy0/L%d/w%iۭ<ğr!d6]K?GN~40gI$\_3Ŀh8~G*glvY϶_l.Q^/j3ym#Rq$h*=HOcp,Ve%|Z.g>Z͢[lFT\b/TʫNo53qqY6s&6ԝ0̝/,LcHs'%>lTզAuNS3:s r2V1n**w!)^clxWe$W&zEZN̕+QnD\%Xs[̔0|Ej)3ѩ[[5.s~ G a%}ꊆ-*ѫ]hv&6ˮguD tDcD: +U\(mf3QT#$Dx<[T*M)m&" ݷ2䫒!W$xA}u_\D'*ku=u4TR(/9]:ZJGn\J`>T"vR'ݷHJ9Y k$_3zW76i'wHVz)an/EOrܢch 4- lՏ[/IWthsFg'RfG[oŊ]IO+ޙĜ6dQ*nX {cAP.K,K߱o0tUԕ/ |?gm*Ie3"[l9MPDOl:?[_mztTDs$|w;\Ѧ:4*2Z(Yu^;% 2^߅)NW?HH۱[nωDew9$.MƲt]ǪMreIYe ?|_UEUZ:}u©y"a\A-4&I{TTN9e|!LMk/fFķIxTH߳^8 -m:Fɏ&JD]O뚵s6,qf~UM-lVR"ܬ55?V%T~c$)]uIO$N(Kv%ƍ+(d.i7ƮJuj;W48}j{&Vé9b^#Tڻt:]|3 s]zKY7כ\._rɲfOEܗ:&%DoG=.9a\[J;,6Y9_Cʋ _%c?6+2wo]E%%R^u]hcidT,9i~x38Ԟ᫥ ꛱('tq7d%Ji)DM=D&eR1RT6bIn:V笑>[8ƽ*|Ā2&=MV $NUy$Eݙ-'K-4o,MqSQ @2ctR5c'"R:D,r4~)$ f+$Kz!x7T]U#>S%Mz|]@3]2¯g7}]-d[:hF"V*اy;U?˞G5kV/wx)i7kb,iS")pmS5]QI;zzT\.nDE}h/=4%+mG}IۈjN|=nIQYjGIu^FxK.4Wk_i6 D9-57C_M{&*1%78]}ŅN0NWFyR$koXD`JGOQ"Ȟm)>(cuO܍^Foҭ=2>Y{Q j>T]tsr/*x(X krD=7a٦3r15揱ꛒN,^&  _M;h]#k-:kF>9B\peT,<85OmiU}U/zh{ԌM^j B[ufYWhs'i Zi^:)3w4^]tWrSƌIcr&"!-;<h=*"l4LԒEzQ*%}Is<[#ˍ*C.;j*U7z]쏩V戮W$3"f_̫K~OܾHnZ:Vy__fs`;ʡȈ5KSrrT*KAoO5Y>&@0ލarDϽSYQO>.kQ7EGTHr'W&1MՎ]:W^R9^*_h5r!?`9E(̏=Ȗtg^lR>X&>[wѝ= -*F%qDyog)9OA3=|{W}WyJ!\/`QENc<^WyJ!X$F2r䈒&jsO)]wi/ղR zbfa&QUMI/M+э]:lJEX{ b78 9oqyp4QAbN 'YUw1rhkb*f Ү(UVgfppV-E)dGȺUZqȞU;1-ϛV87=žoDԳYS_S=0MtoIcW"fVnU^UXg*'Q>[C7Om/0ƒ rH}bzY#WKM4"g1l`+){V83yME|,b&b] ڗhf *@ka.9DDE=/}K!]~`UY&K$ԍs tY4_|ጧro[$٩I#^ ٽ5"1A5 cgF%?.mjT9BcB}_"?9:*;b}?C^7QB#G6TUCa\ ip$U,c'$Gz"tdvT}CvO~RYf}Ç>r7QT(Sx3: _KA $AyPg*9BةНZ_R-}/y'*̧=7?Q Nvj}H39Pvre9.=i{rjn]ھ_RKLY+۫:E,Tz a ƦH%MfHkMy9f;io5)تT5^bߔi jU_.l\vj}Hm$N|)Y˼Pj=Pg*t'hԃ WKB\NT!s9fOoLEEE/.dj4Y5i+H!ZX"JNY*_koE/*=F-5*4E2;2 ^o@G"*dTGG]ZvPU.~K톯ʪQҼ8!$Qș=rt*fb*+a˧V7?"W9+|WhrtCBD\˫= gbI%dA6\)|H_ k`j; %UCS{115Ј3kU: rH}wD2:1x)uj.31֪ UQU5޹&eՆzz52h 5^p@/{[Q?1gҍ¾ YQ5s^Ona#\N~|yȬtў}ch 9bG,]<~ک.Db3̔jl_բm^%Q;]FoDFaXWSƏ~kKƾ({Eitў}ch 9bG,]<~ڨȟ;HO3GJOoZNe2 Ek=.:ʉR4oa~( ioGh[5jR,ܙΖ1-gt9.yn6>yWev  .4Gq"9ᢞsb<2K=7j/yz;5kP~O]V8ߐwȝ]VSgjeg:jf ^':TmZ;zӤmkIE TϠɱNr%d1,?o&sx9 UzJ >\=V}?C`}⭔5hݨ/xbsT6hȜp`Rɳ7*%G5^'9j*dUCZX.Kh?hr׶Cm7?FsCGr|s+Jux=7lxcZ,PV@Ҳ7"/>[Cm7?FqGr)˶9vkIxyitŠZVTj.A{5rRqKYNhAATP*+@>yGLYȄB:gD6G =_ -2ŧYO&"\jt?$fhev8 y%zEgĸf"6=jj"SzJQ7o":4EUBC5|4rQ$~͟JЛNe6٪R͑yӁ6mOUxW Z5Tek9#5fCuYjLT fJ6vS=ʦBAMljFjE]jJĒ蕟u7IIh+zBԘzNWˎpB+f]QG=\%\*tT:m*Ҳ=h8vQI}v~U9_iR8Y|8,MEƠq_ˇRe'g臹 >A]b/Q"E'yϢk9uܫ$f#j"IPkj'tmRҲ=*!,/}Zz._FRz m& ]O,fݙѭ Q7 jdQ8lBpftr0:AŴV{ޙDΕ9b{!t2+UwF mn"]T0ϋ1We&(| V\$uy3^) PY+t2#pLgF[6{-!گ@(-GWMWIM3%bs=0v4IO;>.htb;$7 GDoeR[.=ѹ#n 9"eH]qk"u43UAMLH:+'_cWRTD'=Y!HJܤ8*}{6j\>ɺ*$]KEđGnaWY,|r)b,hy)RG92"e3ð&k"iͣۈ/xmҢVEN4L޽=ge+cj$ԪAt4 ÛE:O@8j{MK\kk5xy[6QY K&HeBJ>8 zڪj*wTULȢfs_$!aEdԶޏT>n??J[ԺS:ĵIʝNb?ZjMZS58n&5kQ!Ī 7&OvڧRE4u gñj9Tsĺ3 GmYQ4iL|9x'IcZj5r!QuA+QkdOGf-6YGG̦Ot4WȚʺH⨳ԮOG/g2a$ɓAO f%[q 6s),m'ͪ)jR[xrEͯWn9(:_3kڛoI.yn{sؽ "ۅATUGnVsv}td汊*#Sz]P :r"-kҧ)x%~҂Ŕ\)GNzhLL׭.Uo{rȣt9֦j̆ F*s"ߔM_pC_ȘGdz$=rE=o#4ɑ[ᗺ;{؞U_cEEL[uGYh1 ֿ R2fb%f承$mjN7`SI6ٱR[n9%bͪMY{E0QW܉_[m(Z}R'm~Κ_dǢR1b-UH23" _ʂ3j[U4sIG9qsA<}zÞ=aN]C!A}-sF\Zh2Ez4li7;$MW$/_95E5?m*7ᕲU"UZm37Y=OQv Av XUkbcDMpxrujl}٢`3Ui_U1"C{'%ް' {A} U–FʪG"w)w薪MHϩzF~ou}:Hwt d/¶qƖ-R.$EY dܚ TBvxv"s=(USF5C;>k3WRề2G'zw4Үi#j&9~1?6SSW*jlZ, \9˺xa!iQqlz-H䑊5 E_S3KSO]}O=.}O39~gYK]=ٷc2Ƣ?Ub91LNA6J1,k^40~:y[rFԳxŕPRv*S#q5}*)Jdw82lf ǼXUuM &DVĉH-đ% mʂYvj#broRrЗѕ~)=s!O T$0ƑƜ ǢTIdBfՙ};ލCV%!}v*+#|]{ԵƿPzyUdw0x?w0x?iSO'w0x?w0x?T2z]<{~.'u]>Qȏ]Ĭj" Z'Vh飁CDǸ6teU=DQN5х?)LjOQ]W {xavxavxyN=.c)㹅):O2D8gz=CX䡁c\poY^Yu}}t㹅)JJ9=.c)}) ҧQ;]rn=Õ8zxjfEs36 lMcqQkf*s{#ή)w0x?w/?iSMKavxavxN5.flSm=dWfc#&$LFpegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/galactic.jpg0000644000175000017500000020315011757531137024604 0ustar ryngeryngeJFIF;CREATOR: gd-jpeg v1.0 (using IJG JPEG v62), quality = 75 C    $.' ",#(7),01444'9=82<.342C  2!!22222222222222222222222222222222222222222222222222T" }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?( ( ( ( JZ(( ))h()h ( ( ( (((()h ( ( ( ((((( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (Z()h( JZ(( ( ( ( JZ(( JZ((()i)h)h ( JZ((((((((((((( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (((((((( ( ( ?~1,Na$-t Tr WW|A'dP$,% 9eٗAl/,2|F|8׼E}q>˙P4dC<{zBRa𵖢T岣c]o53lۍRU5="+`rHxQ_=;ĚR&L!D`1۳Pxg⾍6og}msiMyEd|sѢ{]Va1)}q\VC!(4ycț `P&]ZwﴝsŚk9fKw緝@xϿWx爯|Jom[,;3% xO@յ-/ɦڛt)#$_~ >>Q3ݺ;ђ0pFk񾕭0OִZr#6S1#;aᨘzq-tZwy"^4,dz~ơ? I^3W6ivcjڍyO+&ib__ݿmkRj&-,p6%^W8&=V'ZFs5'F(G]=3ҨZNӋYزPj]C˗;Y]Xu3?l>xΛkh:^^ۮ땲pwxG_4ail)7@uj~kmrcE)cEچ.-&]Fam'#uz}ώ_K[5ղM32PO_5 Cl\2)^(|%N'X.e..-ivJzp8WV?:Oo JvRVeU8E'%`zW/*6h'KIOі9޶K_Nh w1`-|n㦖=imxvOko_t?WI$֟jHYXTsEy<}|/|=VZ$'t,֯O?_*5IV^1@uo<yo5c}<)@@f:ӽu4I}'S`.mVI89àpti,sf>ꯡK?"y$!}}fzvKRfr- s;  xMo_Ut\Y{ xj<$'E`{?NvE2 )Q>1ik6Myyԓ=9ʴ>&7-D{[$fI$|>WF>'YInוbeT#2~G⥝֏&Z #5A??łu_# qwVW?l/:5/9+c#NXPt_L@p(h(((((((((((((((((((((({ky&IT /h$E^Q ,va# ϵMEQѴ k=:V* UAy1ǙnǦzԴPKmnK34c=j4I@C`z{՚(:6oh谨 }H5.m|3YGk}2-220קwP'xP.bVV(w3heϧp%xTqWh9n"55JG-mO9h O:Lxw4+JEW6qk֛.c5${oтJEUM61K+uJ6}8⣻k>D,J~$U() XL2EF 2DmG-HcQ㊵ET,5$TUDч5<0o ij0% ;~R9_1`f[p`Mv=3֠NR] aq "]=9Po-cmʾRP1֤gGxQ*}*Z(aV!WΌ>>1q"j0aO {h.c q r9"jZ(mcRƧ!]~P }J7VmWªߘin0={ԴPOm$AJqegln>m3fS@%P1B?EoN-1, ۏLth VZF=?0ĪO*ymm$tCڦ!->E灏7`ݏZ(((((((((((((( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (&huy( hM{G%[+?±((<<ɵ%WF`l\Q@Q@{} N6w1K(R39:puu )4Lvnz@S]ҴD;{A!yAމ&KMNmF,_. cgzѢڥj.n]0`֧{cEP4'Ƞ hGkQ Ž@x|#hdƒ̀ 35޴@7=xW%m_Xmy\A XKtXF ͸r13[8}ɰsgƧ%f|" \`{gҽ&MERk,.VIdd &rZ%VZivXU#c܎3@_|GDu.oϑ 99GN/yk6a;zmN)*օT ZA`6-:>y8|=3֫xOjݭeݩK)=(*ȹϰVxF.L gdg&ze Rd.Z[vI<8B܍h_Ccf1oGsNYۛv8T ::m:'Oh/#J$S$e;}1U`[Ht"{}6Ic2"-!q-:pLֶ&8_h`#a#^o x~mIRhA1vOu-[W;iww/nO{\\}o}6Bm{[[m媷nPq^ͪni]R 3$U$ecҟEkz[#jqj\nịtm$nW# FI >%jMi-~Q?.@U9 4 f4'6B)y uV6? ;+{[=&;sPĪQ79>|I^xE.WY!U@YJkǃn_@L=ڹbp cԞ>*JXs:>d$1/Vc)QEQEQE1%P7W NӜ}hS$!# p6.TTvHc`|QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEPEPREQEQEQI@ EPEP?<5jS!(Bp>N믶GL>ȇ<-;G9ZXo5ekjq[%,}YuWȮ~-ar0,ne9pdzGrU^)lf>tWwy1Ƹ2UJuݭ\>+b ȽKAl9dcӥyk2x'n'ٓ,_<3.|ogks~j/oa)8CGNpW5izek4ͩW3QVɝvuQ u?v-Jd?f fBn5_Iu$2'b77Nqkf_4Ig 1xq$z֨lV2!vgvxk-N6Y_.<1)pPsz7mXI.h]ˏߩ>xs^ڞ=+!B8?:T4#O0Ln@3iՕK{cdW^C9N)'t [t m#-i?vT9hkҵz gkVbAffvJoټZX_K;\ԑ#rrGpLzj D:o-O4uT pN>gWXO%Ș)$ խa?P9'ʖ=VmK4P&dH8*S嬯cնAN FX#Ac`9LkD:[.Ia؜sGT^ajNeO#jVu䳅O(J&,NzC{rQ%F]C$ 忸mSڸbԻr功,ysn_P͵X2\ͺ4.Jɗ,qkѵm+EoV+[x3 *ܧ \]OLSFY;9a68嵽mwͨOۘ$^P1<~M.zڭlc3Y9̨3/c[]^C651,S+W4%%Bt6_:Iq2Silx[拨שkmRܰBF7[K{h`#R8*pyOTwkbRȚ(h^ zѼKck9x.Zh%VW-0xQCy-66X}9+l(Kǚ}%lke#Wn ýG:.,9ᳺ!kHd{ucch9+sP:l;!)'rINc08SZ_fq4w"-eD5ℌ6 W^m$ǔ3ڿ.qp>աn.4]i[ɾ2-YeI'+ 6=+rTkqp E4qְ4\_j d 7ې=[[>>m[$;C10$0 H\NDik{;2,=\ n3ZlIisk+bYC)-r#je f ]T N;{᭴LkJ/g[9<7lc\t5fZ#Ba$]{+{aQ rv 㟘ڏt=.[;YauIJ[* @-S@t~#v0Z$Ky]y\!Y^`uhZQ,5SSɫƺ^q8vE"Ki`PF jů4k1H 43>Y%[i5mq[ut6[oz'm) 7j@]UXʖxZ|:+Ztqlw)xBi2,h%ңт2۞< D-H4m$݉+#f3M=Fjz#6o=̭"EMp~#uwZfk4h7Ihve`?:@:[^KD0?%hh,)A8Z~7{:(U꼀N=WSMGKqmtɵyVo c w6N~rp2N zP^SBEkX?fS?dpۣs=kժt,.F?kUoӳtWG|641k4;hEC~ U/#$MeV<*O_0O7-Hx OrM<:y6v?{-fhZ 4vD3#ǩ&pI(QEQEQEQEQEQEQEQEQEQEQEQEQERR@Q@Q@Q@Q@ǂaWM\σ>aWO@Q@l h6OrwL?Bp:fhRBkoL|L]~nq޻J(oV /F51m[n g8y!DFܾdLI *{|zho¾[;)':ՙm./<6+on>1lR$ Gns+W-5+[Z;.Cp]Hg+9M/zZͅqb˧C$P}I.:ANjK wqH.Q~kvSetP {JA}ƺ ?/J?/,uoX.iw sVT%w@1ݎpD+83Kޡn<ˍl@'<m]e,:/4t2ԃYNZ}l-]U|Ab!M~֬(cݷhVuO~&}#\لB+"mC o=>{;6$eőPAykt]map7 t[Tzݣj3dfp3@[fdPyλ3 WGrϧ$&x9It8` J*61mp*=XVWPA( ( ( ( ( ( ( ( ( ʱ| "^Q5|]>/ EI ɺF[ha1O)׼I Vɦiѥ d3;{@EPEPEPEPEPEPEPEPEPEPEPQKI@ EPEQEQEQEQEQE↏ci ڝG%#>}M?o.4? ,62u ER/Xp܊xa%5ڢGWf9mɝ<{axj/$ee-6d v\ѵ;r_iIq8*{>ƾwkRxssg}*ؖ8G gКҮF7ɂͥRUoGTQErQE%-AM[d;n3ye0yzVAϊYTbA0 0sqd޳\(]I=;ՆVY}bcU.H#=mQ\BxL\*Gqӽ=g/hP #t>Ebk^#Ft) GwDdBUJnH׵P<{LwGkw*ʌ%Xqހ:+;J,u7$|$ "&# `cTG#$o$dh#9h~vcu&Hd.V zfw .UIMćvQ8p@2spU n[ PwK-d oqJY%c2|]G)to5apbJo7` ~fM HJ"&3ex^|4ч[ə B`E=v+BOhj+bdY5fU {Z-Oz孿|e%j ' š捫Vm v4"w𮭼{g{yY |w0H tTzx.Ai$btpCH@34]B죺natxrGj$O05 o1~D-=(|]xmpb2Sdr0X`׃SYxK4i{i٬!R1$F=(~x7TĻӧl>+$ϸ93 VHѯ`uk]CN<"68${k-Y][{(;]?O^wWZ~dW2X%vW1xԨf OxnVui-rƑ+(…OH!"[9e2$r,?Z7+;(iIvv)Hֵluit{Y]+h 1quh:ޕqo4Hlvq"Z6CwXltE.,?3պwtP7K}qt5Dl]U2K>¹lf{?&('lH7-po_jz(<C>,O{U O6moi4s{,$ʷ⻺(<)j]Z5vcC? E uI/ >@d~ 4յ\YNTjI-eG2+ @UTFF{^Eqๆٛ[r}7~mz<· p"u<!Z訠-|!XݯTB7Ř!y@cOG|>m<.vm݊ߢ8szKyiCMύ ˃>/!f=m(uQ@UׁE[ȕn_aETGKO#]}~? 4*_/s- $xNGIPKHBn/N>5Q@}Pk;Bђa0S .{OXdF9^:W{EqnoP.Ij'-\&C!Fy`p  :(4Sq;M"KEDP>?!yxch^\ܭpvۗ`s՜y'WIEy?&Ћ햶4dۇ*;zՉ% MioHNc! rrsu]&K%KۗRI\rz(()(kľ)Y34wZ>쭵:#-<+XVw)IFiJUpwgQ.*Xx{QYEPEPEPEPEPEPEPEPEPEPEPEPEPEPIKEQEQEQEs UjB5wWO@Q@r5 Ys.`hr_Y˨]-AicqN> ^eh-#4I;PCnߌb(_Xy[$ju,g}ɓIe;\[ ̦8ۅH'(cH&ih2i FGpvKq8>~M/wcۯJEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEr~#5_;ͅŻNHX5~-sniv p?*75_?Aw<5: Z(PQEQEQEQEQEQEQEQEQEQERR@Q@Q@Q@Q@Q@Q@Q@Q@ǂkЫc ]UEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPq\V/C?7_ȇה t4QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQERREPEPE^N70BI\*zrh ]U}G]^-RXxY 1]QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEA{7ɿ_ȇה m^ǍsoXC(h(((((((((((U/$pG$8_.4;M:mEY{0HG>¤3r:+1r:/I(:/I(3r:zZo h'~6΋(_~6΋>o 'o '~6΋(_~6΋>o '<]o jZo h'+#R֮+ aI~K<9!nR"7s(|62{ ?gE ?uW/?gE ?ct_PQErct_Q?gE ?uW/?gE ?ct_PQErc4_Q?gE ?uW/?gE ?ct_PQErct_Q?gE ?uW/?gE ?g_8t9mi,y8uV&KԬOբM6:osVQEQEQEQEQEQEQEQER $xPfdz4\& [/Q u\& [/Q u\& [/Q u\& [/Q u\& [/Q u\& [/Q u\& [/Q u|jtm([@ߌb|^]jzmm!` 0; κoZfiW:FaoKF3ӨYԬ<#?F]+[i LKH,zzҀ="?7NM_?7NM_?7NM_?7NM_?7NM_?7NM_?7NM_?< ũX݄>tp)42$8ܮ%Q@Q@Q@r,A}{GkƖu }Am{WS\#ߺT▊(((jυ.f \Is mbF=ʒ=Ha|CCUQqK.t\Š)+2LZ3@ E&yڽ֛Ao]Bamn2#$G} | 8-X%} {Nŕnﭭٺ eT'5a$IP6$$PhU@\ݷOֿ:z((((=o:\ġgR ֨$ץS1}^]EזDY=/pսY_==M#JRWH謽[_x3E<}d^`JM]QL('E KwRܶHfcTrOu5_J<|Tbcl,+fLk.4-|= 4]j(ԣ͊Xr"PyR \drA֧ZM/fKM;0x]-:.kbvڑ9d{@Ox[g޸15 },TiZx}+U;}ZC"gg EtuJ*T̖vaEV (]JHuJ|-QIՒI!}{ 쬹8 Z뫙J騢((+׿u_*|+((({DT-#h/vM/,$q;m ]k_tw .:2{CWcX47k{.Y\ڬhcMi\0@W_kKףO&5;b*+0y*xV4+ͭɑM+g=Y5Y7 bz^VR\*k|PHU=~+5TWIoq QC)X(ƽ)RM}N9`~եE]jso RVSJ1]EmoezUBCocIK];@%N8vG#5xKdbzy2lO b~ůxVXW=#2܁UU{+mFk;Rkya_gBteG4]z/dFsuGU)Go }XW/K-+YE~n-PEtVRɍ@^sdsVH􋻳]'P\P}sʠ*}M|v]bV,]oCuӍU[ FTK nmS_prQ@Q@foW/EG%uQEQEQEQEQEQEQEQE[Q ko*:tEPEPEPe>nz_sn=zlL` 2I1%Qc81PxQ׌G/=O $%AnKxocgGI?^vS3[üx]66:yp?+_ ?vRO:vPz گaӣ'w7&?:4׷&]kg?? WAX߆o/u<&?o&E6fݜcEwѬn LfmN䜟~>n~%7t0gV1GV/ Qm{̖(xh<}k oohSx HpHk'z1RKnR*6fƩNJ|.$QY/]r^Ѯ/i {e;xۓ&l `W[_M``QW_Y)Jߟכֿ?%c+b]Ue-VJ(>\LoG {k4˝S:J]O +&V:c̥mYXލHٛ֔Y1<϶WXSwⶋ9XI gWe_W`PSu{sԒAEW@QEW3<%_s#.$1}}o8© xK@oQo+D[M Z9予#qN מ>WkoxI)7cSNPV|i^嶖^YpUA;8dZYuCT6u})nFz&<O h%{;p H쎃R(Kt}COcP14e~̛A ޻55}V!6Y fs!!\Ŷ:KdoF29X!}jT]fхìҟ+|,Rk]&^4=䖕zTݮ+ۃ 쫗8Q]EmAiQ0z\_pzJc$JEoV q̐*D u 0L4wGj D c̴;e#9=}{xsW޾~ {ge G1_DhZyEW'2 {8+|+잧 -UDe`kV Y?(cz WE.0GlXQ&Rzccz]%ݥP4WEʰ":u㋈}^Ufӣ%d7 9<oD6WgAK^cKχ/dԴdnĮ4cǵz5e=90h4b(3"m ȣ7|䮢 ( ( ( ( ( ( ( (9{o*:tW1m%GS5z((c?o:& wZPҼmi}r}4~ =I) yeoDQ54򶁧7H!ܬA+CEͶ3,ڬ_\`QG[xkH=,B^Y;eY_5huk%}|\u5k ϵ!X{!pzV{}:ipI2pv<"5_ǚmSPӬmMG.2<Hڮy99<3^z~j`&Iy[ǖ>83L2Ҵ脶uHИ^זC>5~RKxT@޾Lѵ=!tm=5|k&9@5i~+]7QM3Uz=qÏ>U_{G|%'1.:2x~y5e ܻxUNbĚzA鈓kĥMP+NzXtivѻM,m\Igos fMݤZb 5RzI5Z%s^t Z3 ,^ҵ {Ia*_0`ER:9ƫƞgzF]eOM,u$c:}jzp4XΫٕ?{0tOJ+Kfl ( |C#o'j|C#g':j( ( ( |+}{G_ u(< If*<.ѱ#w О7Vst ;|->Tx  _6 {֮/:ί7&v8KHl*/O=Ͻ2{t#1ޠ85%7,о=}A};yuk;!xq#z3W=7cyo{QڿaJ`FcyP9}9sRVsԩk 8,=@ֱ@r$@1\-YeT1B?r.mjrK{iy]Vp\Kw3ͥʚ݉}ea7>,-<x*{;Q]A6*h#̽ڨbg¾¿ܶ?9{ǖ{]CsܕFk5kjg+KH39o(5Zۥ6QQ~b _i9I(= L~5"W ij-9fisVcܓMUyhS[̽UopzS.3hCh\aqXv ])ihk½Y`:W mo֟>iouAxOv#$T|k!o[8$7EicE[tu|FvFZkzZi_NUbͨY1 ȐGROG.: p)v)`F09- ;ٮ"Y-˙QcoCk\Xv} ڔCH ,=QT''q_tPMhe2_ե_i}c3O;HacFj/^^bTazM`܌zb ⹅fExd2DxXvD"Ubޙg1]K__'\񮯪j-摭by8dduq]ZO Rj2&Av]f[2~2і|Lno/inϔ,鴫;O_'GGkb9go[hyŽ6,0FƣTQխuRQ/hK޸#Њ  ar(Ojֺ.ȭpW&7@&.m ~{oC{|'jp!U϶H:3;-+M{g׼'i',Þb^9it.#euh#;&'Tdҿ{Ky!o[ eͬs[μ4n:~wzei]l+jQ, Wa=39.[Fg>] Hܞ½2hon#89y=m_Ю| տ_ӎD56WF{ur~-f 7:[1~h,=+S"m ȣ7|䮢 ( ( ( ( ( ( ( (9{o*:tW/m%GS5(jڕj7Dm.q{2E]:Tj[{ ]V@w>ֆZh:x ŘHs$z iSZͪjJ01e\Ozh(׊% vJ^8ʦYvHv-&{y}3v? /+Ǯxz [6_#~UeXjS%wssn"uJqV\</X5.1̐Ɍ-J@8 zzhb{C&g!qPc?{yٕR c%UaTӵsχ<'jij^_kPg$ F^^\̈Rֵ5xdA0 Fy%=e w8zSy\AJiSm+_NSZk"An;p18`u8-"ssQqq b)F2:iF*ֆGj0תӵYGc?SQL<ǷNb[-@0g-T1]}1)>A3׽;O6QÆv<r?U֍2zD_N=̞ɦ]uC]ɿ>? >"y\TLznGb*< FTyl'N)%4|O4ݴa4wV=p/I#u{Yb]5V;H䴄a ~vA>\]6x< eiԼfo \$/eo=>ohkWacq}rpU]3z6Ɵ[Nk ޿UVqh a(bRU{g =N]GdM~ѯuI `NJeWS4b6pH>˽\mTU1 6Yr擻6N4dKK~iz}+5͒5k?G.uK8n>,aƉjm6,Bv$(7Lڻ̂8^znX-BJoc|K"x}E5+C2gI'PKr:`W'Z.B!ݨ-vq麍}e2o2FЎ.i;JTJ*VHETQ@$ݗϱoFVCtA:q-1XkBM*5" J=2/MAx+k*\ީ!UѲFץO<֦&?I$͌%o_umWSGQFAJi6쏩{+g8 AF =ɯGe{&&ӥxDz0q%Aq[cmhZm5|>ar=iFq]ʩJtݦ:^Ѵw:d2jran{-kl|K3ZϨ("T?0r~ytΏy ~K-&1 O'='Oz:K~or,}Z!n.m_PV ā]%\TQŦ\6` @*k3|NnLiJ(((c<[kqMj51oi::}G~P|@Fo]Er?QmrWQ@Q@Q@Q@Q@Q@Q@Q@Q@ŷOֿ:z뗶GO]E7~3WQ\u{kc|{~~kZ(丮9hb-ld%'hsC̣т^Iq:\/=FFʊ 2"wsIrw`BX_Zv0L[LU&ĤB0 i |J~X_ν ƿ&6Z p)x⏩_#S_xF+裕̲PrI#OꩥHŶ0qQ kI{+M&B-dlwve@xv?SjKM =΢w4q):dtqx*Un59k8O1.OE68(8Q`*+~oxqQ٫c>+ڂD?F((((zѯXdMnH^ao,q3@k rXO*l}T ァJ騢((+u_*|+(_^(ΧMĖ2iPG]E`ORv7(ZZ^j60²цhQ@Q@Q@s׈UK}B]=s^!絭((Ku}OXHԟ7(=˟55 zw*SK#Vsc$ڸsT}'N϶HAthOmnGܓɍ]~X{ qFB[D;Ϣ䟥z/ JҮou7ڌg&Oˡgu)Qڹi:z:M:D f 1*W|QE%sHg/:O+C]Er6zYS! Q@Q@Q@Q@k4{ӼJ10F&؛; >v0O!mR?UӀps?QmrWQ\foPEPEPEPEPEPEPEPEP1m%GS5z-O@rg-v/mܨhToĿ~պFIL}h(((*zZ(|җqg?tx [w.ǧЊnccsU5g-=ʹ)e :e?:_3BL=Im<[N[ B.bF) .1N]JMG^O*Gm31<+jV\ϷR\[&񖥤hḴƱd ]|>` ^koVDEH`(+mIx6In/$ЮrҵWW>*I)4 (!\{Ԯy(Rl48X yOIFn Idwy@QEQEQEx<<dhb'ȣa] r)y]qLcY[%}cX?`xF O%tF O%tQEQEQE:W]Er~EPQEPv[2U̶RYqzkS&tc?Cu4IK@Q@Q@s|K~L?Ea9x/ϴZ(*9($PG5%! Iq>[OoC,zȁS^v.XubuQ Nt j|Ɏ8zi4]sJ!"(+~ *4x{Nӆ?ѭ((((+ B zL >@oNq}])>n{NfO]Ue{G] BÙ5̲N]ϹbMjP/EG%u@Fo]EQEQEQEQEQEQEQEQErTu?Z먮bJ`k_=tV75 ^O6ݿ*+fеH QqrnGZ5x|cxT|LNb;lsI~+(((cŸռQf垦eHoMtH`/4.GFtoo]EQEr! y&ku>~Yp8.}>[j>kWj*,jnQq^1E?ʹ xCDtesU^"k)h(y#'qhEdEkI*(P=s^!MoΪ v="UPEPEPEP\okHӭPFW8U:j_ܰOL/:`jއEhlDRǫVcI' |C#g'j|C#g':j( ( ({G_ u_^¿袖AX\Y] 9`՚(}Xϣj_Q_һu1.?kS&kTp_'1-]J:ȊAQEQEW5oD.fK"\9 t^/9c& be$Cu  >]߃1l"&6T k+]ioFq$岰q_;NwչJXtf,1\DpkF.TAsW5ius5$ »}~=?Y3t肽qsЍF{a5'Wtۛ=BO2NK.1GǩR3 aRFң(+1OX?oY"+H7vz{Td?w:4QEQEQEW%g/$?63Am%ǨPvroZ:[Lb%ߘ(j:e[i֊V"ROrNI>v(_3ͷJ+"m((((((((-O\ŷOֿ:z(kQ$@E,8ı+Q]7[$d#dxl`W~|4c0(((_YCׇ/z-X9*%Qu|:Lc8ZpAzZ(hTdP7>5HCFJ+C.qLT( )詷l@sN ER^|P5z9%`hLX{3j{w–%䚮%Ѡce?'aOft,-W{EWl`IoAZڥ6mZ!>~v |C#o'j|C#g':j( ( ( |+}{G_ u(Msʁu* bi4k)v|:F?mŸmuumJ+X[=7c?O=vеu&+ؔ),-pVQEQErGΚ#f6?h׽u5k-~"p2~S@ygwi#)ѳGXs wm&?vD\q]x~Za 1OrcpYȝ1I$9$Կck5 sPDZ; HT|E{gnM|>w:OVSB&% #ʌzW6/NszbJQ(w֚{;n"=' ?1V+8_aklqwpu_ճVcZXB1K`5Ƒaѵ+Mr  ( ( j6N=;xڬ&lُOD9ǫsڀ.xcOkj vrϵtQ@Q@foW/EG%uRR@W!G\mYmqC3d@}Q@R<%}{y7yJX|w ?p>4Ѯ5M;K h. U6;cހ=n wZ<ss6V+-h̑+@#oZO@.5;yG-ʠ_c.THyŠ(((^J`k_=u[Q k JZ(i7Wvѫi}̱f e?P{VoG2pÆV9Z~tmbP%7 ( (*VQZeՌ1B7сֲ{%t(?=kA$u284cqf޹zV5`ێsa3o$ž^:m1UqιeDX1SM ;V/=1.zAjI[e#xi^Hsq?A_/,KN粱/}{Al2NaFgm=]eU->t+쑠HD UYXۻW3M4j u@W-/M~"XtkS#zyp?14Q1$H0TS袀 ( (+3]ʱ[I& Y_]OmJ&BZm_fo߀U?~m K7W r#T;(VgïxkP[YqqJLGqӒ}++|,˩xzte͵tA9K-UԚO~"Ym&04F녕~P*?)Ϩet|LJ.4YD&ԓH = 4 {zkW_YE$g'>2b'`;xKTu4W$HQ|ʏ?6>Jg&8e?SZIxvEtʅ&.z Ҵ.;\&ty O^;訯]jq伯v\M7r0J]Y.a {V1}v Tqx-IaC$e`zGQX^!cDтɬ].CaGZ*뷓뺓xgKP?#?O~CJl-,ᴵbG miigbK4/4v=?ZTQEQE@Fo]Er?QmrWQ@Q@dz"GN-9W .wֵ \X٧t"9}px5NkMj_l#O(dk9)Ԣ3=:PeKm0@V0HzW6</-k溼`Im-E[Q261$]MIT^ k [D 0]͖ÁZz/o,}_T]CP{uGHvnI9'=*((((GO]=sTu?Z맠(( ki-B{/5ú+&o{J=0G|KOC`KYFmoQYJH x(ᑇ?ZtQYzt R:Ao~f=Gr}+XM.y"y.n8T2đz5p/״xD%)T\êZ?k::0AO|?+\x.L[גaĞX-)R -g i:*7KUEk#\ӭKj7 -ɊVKݜr/<JL]Z4Y3=72O@k|)5zry䳐]O\d*@5R"mCn"f"__V}cM ?71|}V寢.vmqZCacO!kOt/cY"}?6eH?ec7;]Tr"0+ =y-53>6;N!iXxG>7c֕yZ]hsPƧ'((Vq y"_fĚ#e=%_[sݶa9W}|=th=B81}Ҁ;J+Ic?|~ǢZJb-C-@ sڀ=j:H ScZxkFy܅AVE6v1 ^𔴷S?SR{CMnoͻaM!}a@46HӠGoE'=IjPEP\ψl}WM\ψl}PMEPEPEP\#ߺUW/_UuQI@ EPEP\ xb}?O!>[ >tTP$"tx%Y-kK[C w r? zlIaci"WPd.AߎYF}Z)c%'WU_/)dƦ3n%bYݠ ϡ5ot~s0`$`9;Ujk ^O>{Ym4E/3:g# TNjhxOK1j6`˷Ofv` 'WY|t3ӥI ٯ bC`O5g\F5?+O1'8^+u`qXr8=Tm=Jb-6wh_GSXx*;jE&g rB7agdN$Z]$Yh27ߍX1`T$r(f` }_up~?ݕzC\y@PY° Fc!#AUF|B'm<ˍ6qp\봒=pvp^۬ #=c}|vmT!}^;'|?$}F9 5;ׇVAayy-'k: z 5>7i}-3\g|zgUb20jߋ@}m,s ObB=yo"EI#E, sES2z*:厛x[Oq bp-6َq= CwWzxnui| *qd|5ӥ/|Fm,+kg`ȁx"Ҵer(\|o=uTPv&p6j %.XrGje@[R&{]aI>ܻw?tE:: DyqL8(? J,2vm׿ZϻOD֒#ù]1{g=+3dѮC-H^9W~ H Y7^J{QE[Q k{o*:tPEPEPEP|Gկ7$ v;d%b>V|qqk -a;ۈDO'kUԫe#kum2 o s"RvaVt7&NțEspM޻kq.P'8,€ ) g 5Z !LóH$cWbkD+XD{WDLc~!$]_52sv>NӍr7y}lMsc*J N);#mOC4=r)>ʺxZԗV*it2cfy?HqÒLR12K_;bS,e ByU_kP((g?6xKy+g?6xKy((((_]¿+׿u_*:( ( ( (F5_ -G.,[F1WWgE&*P]YjZdR1 %cXG#ܮGP+wY"`՚/#nohi-\G[ż7v[D V|=t;E BZNX!8޸#ҷ+|Sd-Rq]<3s M,N2a{siyi]B!dp}L ˻[ZiTVTyÓ"_CMѾ[I1%2틌' h`"U hQ@Q@Q@foW1EG%tQEQEQEQEQEQEQEQEQ k{o*tPEPEPEPX^&𭗉YH.%xׂpx5E&V`xiʏխlLֶ\z3c]CA\9k"u5x_N&[Y-uռp=EeKJ8q$r'EtjjWA^^p,: \/Kƹ7pzMykZҸ?0Pc`zWxcV~gIdǟw67>:(Q?Z۶+kh"P *+Z%D46>ϵaK FrRbI#.񕦝RIᇞ`-< {qv5=k?5.7Ob#6jl-4D8*VgY+saYHƿ ̓M|$פYYiPB#ڧ tU $ (QEQEW3<%_s>ƺ(.ӆ b-)k8`_j(w%8N>coVKJj:QKfDQtI2ֶac B28xc[h766k_I6w9Ov1K@Š((((((_3ͷJ+"m((((((((Q\ Oֿ:z(((((({Rj:tZui{HvA@/CなY)hO-=MnQC.jѸ)wb:![*FF9+gt?3>CPK}MSu3Z'jqFm*+Դ?4ۈf o2hnq P?@#ƽP0ڟQNhGc閉iak ,pU4PEPEPEPEPEP\ψl}WM\ψl}PMEPEPEP\#ߺUW/_UuQEQEQEQEQEQExuF{;y%ϓnP*dcjOk^as\IqJ,#dz`֔ͩωOtRdk]%!w*H`=Wo49.n#1y G\}NUhba]v'Uumc "|ϱEQEQEQEQEQEQEr?QmrWQ\foPEPEPEPEPEPEPEPEP1m%GS5zQ@Q@Q@Q@Q@Q@Q@G<[<8л5%fMCÚ'}J(2u+Mk?g"(?t}9g}Z(f2!:zgixj+ {(}~, O,)ff8JJrLJ״+]I#1yCcu%YOсi-7U. -q H̿kEP0(((((g?6xKy+g?6xKy((((_^¿+׿u_*:( ( ( ( ( ( ( :Zj[X@tm͂vp&8WCM[\I-ڴ䠎2'^^u__Eٚs({WEPEPEPEPEPEPEP/EG%u@Fo]EQEQEQEQEQEQEQEQErTu?Z먮bJ`k_=tQEQEQEQEQEQEQERB}GMp-ͷUH<[GFNƒsF$@rH|< ?pAFv7涧LT`ږ)QEQEQEQEQEQEF O%tF O%tQEQEQE:W]Er~EPQEPEPEPREQEQEQE%s((c:ʶwI';s]|Sk^)u+,s$QUIR:0=N~;5YMTkEGX v@q޽ ⹂9%E !AJ#uh2~8?m*K,Tb4QEqEPEPEPEPEPEP/EG%u@Fo]EQEQEQEQEQEQEQEQEsTu?Z맮^J`k_=uQEQEQEQEQEQEQECwsSE m#@MeSayuw4bg F>L7m7r3KqL$I^ ujOjnm8 v1֪Yqޥ1d0y}v,y5}]EbxG__^W D$RU mQEQEQEQEQEQEF O%tF O%tQEQEQE%s~EWQ\#ߺTQEQEQEQEQEQEQEWΞ>DžKw;J"3l{#.nbhg%CUӗ25ZTgqUU"'}^>Wڄt("p9>Vm"-ZHtAE8O]k_Zt(c(((((( ȣ7|䮢(_69+((((((((^J`k_=uQ k ( ( ( ( ( ( l[S58{i"S@uE| q2I>TF AE>GX*'{ϊh$}A%O~{|&:nS>z>5Ү7_giLg}=Ҏ6*O^9T5ZO2J4sNdh`w,t▼y-݅QHAEPEPEPEPEP\ψl}WM\ψm}PMEPEPEP\#ߺUW/_UuQI@ EPEPEPEPEPEPH*'rIKUu FJ{mI+[{xmQUPħ1^#?.$wWbuʌ1w }hШ( tA41B\BIﻔ`sڡ_l\ Z@ #h.ed{Of8ΒuēiZ DrDC\t RWGp@,:rb1b=Wցm s>&m%fHFPE<myw 8fCq>4D`N1qX9ev}gUcl?rg=݄ES۝@5czvo}w6BiQKQ"ɼg>خNԬk.{iFRD<VhtROrm>ykï"DխV 3i+;dty da4rcTc<*8"<=~+ԟNߪA?LE}V]:4IFK 38L@Რ<ׯ+ZMLo{7Hg8$ @5jͩE,>vuKKt X6Yb9%7\=)Aro/[.V@!Tl1rknn"f(ds@Ud'M*ݜuۭ\MZ ֝Utu+/`YV): c1=Ƭ3<%_s\\^*5;;[o-o-#;r2zYxU.w7pϧAlQ6ƥJNsO^+ը +HK Zi"lmTۂ< L&mVfK4M쀶2{go?( __[.5^@0VYԱ|)Zk:Nw Hh\0#j^$wz_k6%+\g'eG]ǽ;M5ǧ m`&UeK+n@^Eymu"cXa.+dEoqĈrSRCkB`NvB4nX/vP/EG%u@Fo]EQEQEQEQEQEQEQEQErTu?Z먮^J`k_=uQEQEQEQEQEQEQEk>Pó۪\9k{VF۞g+A: ɠӴ-4 RgY?t+{"}Q>Zci KHQ8W cCFY4ԢL\p +j3y@.,v\%jHW/i 2v jݍߘMcïwxy"v3 | WMo F)XTu ?#Yz΄6i.F1F@eeVgeo.tyU/z}jWK&omê&0HhHedǮrI5:oifyWIs,p|[5ȹ6׭x66omEyVd]"- \ҽ5lGc`қKf oF>jRA:j]iuR} u  lAEQizt '!"F1zSeBR p*9-]{xd9BJ2[7V32s2woj֞":ḭq1 ;Wyi;W,0N:i&4hp6zP45kmwT,tvґ^3Ź>lꦠ$^^(rV,{vMm (]+J T@wF6W"UXbrp8_}]J*(Uo?6Ky((((_^¿+׿u_*:( ( ( ( ( ( (22B0yWtex*hVs5ATóPr{ڲZ4a!7c3? Y<b`'F5UI>j4ۥWsd|Ss[.+;MGg72q !bRLh((((((( ȣ7|䮢(_69+((((((((^J`k_=uQ k ( ( ( ( ( ( (4;?Ús[ÕOާj,罶њ;}ˠ',sUC3Škm:'h3! m#q<ꝝι>$amhˇ*xSerHW3SnZlz 7][J2FA5t((((((+J髙J騢((+׿u_*XV/׍ T(nϽu4R Z((((((+5/67 *<"7Ы﫞<,jV7iڢ'2ȝvȇkuɒY?k|EEͳl:;Db?j3oߏ#R Ziֆ)\zogmϽuz~kCce m ̀N(ۖh ( ( ( ( ( (9(_69+W.l3PQP"#}H ( ( ( ( ( ( ( (9{o*tW/J7noK޿uQEQEQEQEQEQEV_% gK9Z~v*#`)QEQEQEQEQEW1<%_s<11cY$#D G@=Q@Q@Q@Awiog5)5RHd2P'}DM妥`E̒;(A>sR}e'o2QEj(wO5Gۼm@=R(_6)?n_Iƫ9xz/ T}e'o2QEj(wO5Gۼm@=R(_6)?n_Iƫ83~/V2ZEY?uү}e'_wWte'6)?uP/Ejxz/ UQ@ۼm@=R>o2WQErn_IƨwO5]Ee'6)?uP/Ejxz/ UQ@ۼm@=R ׎%L{$"5]Exud+5iWabD܍2v/$rMnEQEQEQEQEQEQEQEQE`xGU^4լ  27߉`p0{ Uzd+^WC.pkh 'G, Pߙ?vAF(?̟MtG=@, Pߙ??`x_Kzʍ?*`x_ O&]~Tmk(?̟MmkBs`ُIiƫ75#m Xs t.JJZJZ(((((((((((c ]U1Z*((JZ( ( ( ( ))h(((((((((((( ( ( ( ʱ| "&^Q"_ȉה((((((JZ(())i(h(((((((((j~Ԯ)Dz^ty@ۂ4kЫ x~$}ŤQfTw=#5-QEQEQEQEQEQEQEAyusgʂ62p'W+|J5TQ7KV93n-n[Qw- R޼޽ck7[gXW"3hRO7e-I]BɈ w,[^ gKjV\y֭m.d.Lrj>]n ;Ka&chiMq^~S=3@omE~17cj=#S#m`|^\ ^򿰇NS=wv7nX#Mk]$}SB0#U5oL]JiQhA'r;;/ĺf})Py3ub QZ^[E,qIqI'Fp }z#xmJ7mbh$%FA#I \O%v"XY#A^`hk~ĀG^) DĪ$u H]I|R>A'vzuzTZwMmw\h^K{ʺJ$*Uwn!(עq\E#8$Z~~d /w}H!ai -m7z큉 qb\СV}.kYDQG N8wM> c88Wu-U1r+Y>BYmđY@- ,ml%$ *89m9KIE*<,WU+#o_Z"FVv pMT%ĖJlyd v%3p=MmxD NJy&{hd*Llaz8#Eh$-qrh_0< soû8$- H7A*j`}IO5s ܖyp5 v 8et>(u8V 38i8UOMabFkBtf6|v TWfnVe(Yd]\~d97{k:lv4/ s=@ {O7-^g4N}FViݧÕ ɰᶱl5ZX[v &R0}' J᭄!Vpqמ\u|an>SϵPobKEG<$d@K m-'5mFU ݜv@&3#C;dxJa}!j_yWFPhv3I$}B>R>rqOUekqpn ]esڤesXwu;=6X?!!tئOUهYxUKqeFya}17:ҍk慴<,x(FxL'dO/]Ub\]C\Qo%{fm7[CK5n&ṻv/)eu U2W!#=(VźKZ~"s4#hPP614y|I7QuQK :;a61ۻnmݎqsz!kiD**6G H*xZзe)XSo񃱺}(((((((!VpHz(4 ٗ'c>M359_ϧX4|?ywg%kz-ȴ1!O>}}Zq {rp? yO9©m?+a hBۆ磡zRхQAAEPIKEQEPEPEPEPEPEPEPEPEPEPEPEPEPEPlgKצ)oBmDkx%Bqk+U.qǧSsҝcRgl[im3L`rww7' ͮiiKmhn{Yw7iT7#IwNer\ xSS]S8XUzuޡoihYn&VWLW;dBF'0ϗq=k; VĒ^X_9T:%gMwn=lXjzw1\Ie)Exց>o9,Z)*Ǩ+JmxR'M$C3ʹ$ @` KEI-(aEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPREQEQEQEPEPQWVA⮍ Idgue%HךŸQ ?;i} W~zk Vڃ-m-m1% =wRzՔ݈L7O[TQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE%-Q@Q@Q@Q@axU ӂJu.2$¨-n1|c@zyI 7 _inn=v&/ȧ@ bi|<(L_?GOS j֭mcMd{qb]idž.$aMC21PRu5j\|EHl( ƚ ], hpp6qۜhJ;)5e9O.kuXHa3\,կ\M.t5'.R%o-" I9&/ĨY4xg9 (Cp-"aZD߷W+?Yl-9o]Qs瀪;Zti[ )?rcK vʊyA ' '{B}k_@,Z̓ZMm%nb e#S?S+VE'/s{FG"6Aʩ+r%ƍt-4[(F((m,dǮhJ+άkj:y}ӟ-)!c+2}mu\A·mw,wgmvXG׼I.٬-dlKˀPCISO񎪺D7zoe4֟go6=9Z +g%xm 3g\,y>knĚuĠ3\BfrxX 3}BvIV#yg_ yώHtV7onPꮇpƼ 9YUJIYL`#"<(^D.#H{$@#=\-LJuHıtIDu'94Q^SP+/-[O47R 22I0+Y+iWwekYh.`[:( '7nQ̔ c)rHgo66_ߴv6*!XK(N 2O944W$;ɷo5&yR[7%\`VfE775đ'%ng9$ +λ^}]q.Ԡ Wخ ( dM,,q;=IׅY ƩGRF]:T8J?7=M]:nQ*N"z^=b/i1;BҺA{=c"#o#y7w+K1 Jpɟ@J#a]8^N#Ǚ;袊8()ʈ] $€EsǾ [~& J>4g P[܋!KF@E}=L+ku2H#ۑc/ !1 }Ǐ<7N+[G>W?GN:?gCokM GqAڀ:J(1u;[]FOVDVV?e-jUffrYv۷b(ej]ڽӋFCpxF 4-qg坘f>kB֓] 9FviC ]x1<Ȥ5;1ldỦs5 BPeEQܚεVM]=Ӷ χ.MncdMdz-߈umf[]21u$0d*yU Hw'ĽvI|m;~]0}@?u񍟉XD2Z_ۀgTp}Gp|-G/mO!bQE)Ʋu*~EbZ[OxA60sKxcOgkO7|V1X.$kj.k6 f5 n<}EsGOx%հvnxsR?Z&HaKȭɍdppF@?چgXZ]ˈZaeq88iU͜r $EӦG}[}82sEQEQE aCwґ>Ess 72Pĥ*I4MYv=||6xcb}ҹsZh0Km}k ,V1L_ʠ6G8b5/Pב$Osyxה{2U|jRjzn-~:*K!\(9 EՍZV/3\In&H,6dm=S DZ4]Kyd i=+0O ǢKsJ o'@;VPEP_>Oysq"JC'bY~ }X1ò~U0dc~wo6LsZRHڅWJjh½Ε5ψ/ZH߳UMfŢx.ķ1xXg9G}ֽC]-t[EDM#ٷںkHXjHB(# ( 6h|(SQ@.[A2Mg>faO_Q| ^=Z{l4ZȄ\~9^EyxAM6=S˱}5AATY +=?¾!ѴI<6LLDVR=PR(Yv%8EQEQEQEQEQEQEQEad,j"fXύ_tPO?|mٚߥ ET/Qύ_tP0ɂ(bj((((((((L?7'ȂK^B3 uU@֯ok$lwbqܙx{z8¬e-xJtp5&3@G6Fr+-H\sN 2H2 < CZ_7JBy9&ez^ū瑄S'kX*(|B(뚝i]C<%(zNw:s]jh< mݑm^@KlQѮ,䷸f&;t;ȻbW{ˋ[{{˷%Y A9J ҞU1r:&youcH.zPV]\¬%ė)l^ _s1,Wn Rڏ?6s}|{랕iZYQy|6[#b_ 1^$m9M#m9 َr ={ⰵ? j],r1'F!^=Mn9s@ EPEP\'9Z}" 4O(wuIlEr[JFBJ*O}wh敝L3{Bꮇ#cM6SmA&v2LkT|a Fo4z4r,72A0SG'+@0ǚ'k[˝&M:uv[V`Ym UZv^",q^4R `V/iڝ܏q kDn9j։[_kh.n&M<2ogmsc8PQ@yO'm_713]3Ԇ=ի|]jԥtL!im. nLmޑŝ>ʬ I IXv`ω$-a"I  D((A!is䦝KlxO Nz{_\ S]VP?S!׷ڀ2T7?vWRݩDvlIm$>f7muD.ҋd*7a<pzU+i$hwi-bc<@!8 fyu;mFk=o+x|ۏ|:'d.,%Lq ;X=8榋źtzE+4f é11gY( 6It~),&layoCч<~xq,ZՃGlTLvǸd猞+X^G=/, pSfycGq(- {*)(}m"J0lK^B3 ujΛcl=mV4'=ri<;bG# Bκ ī,>Kj% FR>QWSٞz'=ɵ|n WԝUsҧAΝ( ( +ϓƾ"N"%ˆ C桋Y񆫣WRKaܗg;!G\az=Iq/.KoΥ=QZZ^\3Mܒpy/їMm[C簄+WnzuwTW g-gT 2O][}ȸ2J,. d,j wڎill"5Ky s]p8̀sAz-AEOKudx(<`ǿSZ(K[ %u+!HW F><{sdee`+([R("RHTu$Եx" L=(vdUi6bP A &Iڔ(0{dگi^)(kOν!O2u69@>`x1;gVHGƷtՎbqEGm㐌@zdTQEQ\?|cWI4IY^A'eHz(:G7Q;;e mUKr9pu@[/k:]ed!U Rq,]p156o.bhmoHH8Obto&91q^H7$ڭxSW_ t9>Dr.=((JZ.aڥͫ_w4z+r[?ndxF\!\Ɠ0^SɊ{YֽClu9nlK*k2#ҩVF-=GM:oJcVR#&ݙQIK^QEQIX%tXlE]\^]-#˱A`NIcҀ7(P  S6,a0̠q܃OO.t/OkuvmIӟ\3@B[a:\Z7;8k1QaGmkqjm[:/}qVQEQEQEQEPEPEPEPO{uoiwKiZHuLrO&׀Gz詔T-pN;\W/%᠞6IیUx'}k-o2y] O HJҪH$ 䣁J|S敫m$(<( Kw_ZIv}GUyk QIuo5ķ]C.by dxK^5-ѿiTa[wN# ⧲e<;'Zi7<2l%1[P57tדηΚ_[͵zAuF@8^oz4Ks_[n 2!w#=]mͿt IMvxgF־jC OF E]3E+W=ė$}س$޴h ( >[!DT9\ؚ٢88eaj=+fľ𶛨Mw,]mnm|snL~=}h_ i%W-X[tPxJk{.[Y$͞Id̦R *PE_!5/TO0ǒh -_ֺH k6NH#*Ɨ;Gbhi]Qe?;dZ()h ( [ _]OR(Ws p@kJ5`-s  xXb ϶+Kºm߈;v50N[јzP!d29FORW6NAEWiAEPT5* NK';iX9(l1ny5|= N K^n3Q['wP&<gq 7đ$HT`jI9#L_Ꚑq< 0FFNk9oMq|BQ$Z TmNJh ( ( ( ( JZJZ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ))h(((((((((((((((((((( ( ( ( J((()h (()h ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (?pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/diamond.png0000644000175000017500000015567511757531137024476 0ustar ryngeryngePNG  IHDRu`XiCCPICC ProfilexYuXVͶ&R/ )"t7(!(*" !!!(H( {y=׬Y3{Y`\!#( xöم9$3Cyf#Q'$gN(>JREDQ2(&J )=gzFE)zí?f|V'}vPpTw8&s=D8 D8$$%ߔ3d,E?(5k&nVGnoh@(7@C`8ox   d+*A5 {Os0^)0 2X?.AX!F⅄!)HRt!cr\ /(bP* ]J uB}0 C X|qBaF8!!h9D:"QhFt"#^!fˈ $@"鐜HQRiv""%b'"#:JH@CTKN4BhX،؝8u6A]*~Uk_3$y$$OHIHIIHI-H}HO&%#&&"&;FANVEAl 4pGqt\ 1nE'#7 w''/ o&!BALKIq""bbR@GY@J9NA2 HUKGH֥v>G]NkgO 4~_T44Դ2'i h! .ݥۡgפOd`f``HaaQя2=7L(&!& LLOViݘS2O XX,YbXYY6XX>f]ac`efkg[bdzg?bAˡϑͱ©Y9eÕu 7 'w6w*; O,O$/17o.o&?_=E~~h:i@@ FPAOHpH!$+-T 4(.A(jF։Ήщ%"#~Txu)IjIC6RBRnRR8i=xo2222xY$.ٟrrrrK<. 4 zъZ•*}UUSU^TWPNKZ:ơV6ΩNP/Spר)yC󋖄VV֦)]j]ݫ3z\z^zuzGdG_7`5p31X57-GGmVpupLX1'~N}ǙLLpnpAعԺe WBU7m\ew l%UL^^Y^K9+>>W}^3;@Ht2h8X881x6D)JjQheN;#"" "N؞h8Iu2dPTrhT[LW,gعSJ8׸xs >Cr̋̄vgα;}#CǓ]@]0,,U"5'ugiiyi.g`23._Τʌ|e՜͑~J_Lε\܈<㼖|W_h*d)L.,r/)(z-NOD2rLyd{**j**S+VVV[VwԲ^CE-8vc͖z[tRo?q3vnWBC}#oca)jj^}ošeհM >N~QNw]]Sv[t<1zT͞GZ)<\ysl Mr̓-CCm*##/u^>5} MFNNFO|32SVYهs:sVS-{pCG5R>9~ZX^]ILƯ_WW~;~qqj]fk|cG͔-ƭm'{y?OB |$\#<=^p0I@0ɀHm)t HCY(~T/:# *' 9Oz,WE>NHA@:a at`ffϝ̓[W_)P!X!T"/!$/%))+}R樬Z|B2 Be^K\.#LNLycn~aߑqJgJJ+;o_w `Ȱ[}'vhEbbOEe<ۄsT.&ԥ_r8#rcJ6眬'yW  /+^(.U/+O^Q^9^W+TfR}ͭ緿ޥoPolJm7вFtCvG&]:5U(<c{6V Þ#R/WYcf^MMMI xk=<9_y7q!CGE%Oc +>}`u;M݈рAXddhaA:S1Ǽ>$ . !-${ۡP#iic20V%} Q<M DDE;M$%6%*cded+H)|WB]uTEP#^󸖂6Nޣ#7 2 SLRL3̯YTY[]1ݲ{iPx41XSέ.wծn=x   Q'OE%D'Ťf*o8yf(ayD$ ~)NȠ,i囝pJNMTEJKj{/VW V^yc-2wlF4\ilhn܂mlSoaHG: :'* < y LLޘy?Yܱҵ[ܶ3gz1cNs}] UnU:^#Kdܫ/{i9cNn6l]sfr,a+fʝjZ뺓7o>_M}GsCbc}X~ e[GΌ+Oh*8m`0fWfcL4=^]|]e{PsK6#@yvp@m@&UA3kE3UY,m?pNF8 " |L$A>pX A:!@! Hb,DS6Z}݃8an`JQ-1ؗxĜ):i+,YNDDAO1HiO9OHC}OuJw5c6kS,\,}!llOC89prYsSs˖-l.).,.H2]YZFLl\i2?=Rq]3_E$SGNx327172c5 Zamlj;q?ՕG纷U@G!a'"@4*ތ8u'ttly ɫiibC,YW>|,.a(ӸTYVX}U[ $M$Zu?t's$J&_r dSwˢ'眯k3y7lm5HVdOu~qQ w`b! JʡhG"q1D#%KN:J偪B}FˣϢG0|x4V [IDEth8xvBTxofhԒ̑>xy,1{1dI~Irdґ3]7Mo߅exj֑d/ A#Ucl㓯sP f[Yibyj5u[/wvmu~Sag@.L&D Az+vh#_A@ ڿ@y~ct"_DKH=_UdS2VVVWhkjjO2)g({ӦRfZ[eZ5t4)K{(g, /چuFEfG`֩GZ\<_d~a+3u,M?-CrEkvn>R>ղBkZ%}efS5_obsnii0jmlW~0AYX{iB/O1/*G|Ge0ӯNM(ms!΢Rϲk~ jʷۭkO7l6FX4|%UM=cӸ˰{jwnOw'O}BI ?SZzL v&gfnr8؀t_qȌsəۿ|g pHYs   IDATx|ƷB U(B ޤ RT,X^Q槈 U"ERDR{B$$l²I6-S_y-̻gϼE-   ,$@$@$@$`$@ AI( $@$@$@#@Et&@$@$@$@tPx љ A=@$@$@$ :tPDg D$@$@$@ AI( $@$@$@#@Et&@$@$@$@tPx љ A=@$@$@$ :tPDg D$@$@$@ AI( $@$@$@#@Et&@$@$@$@tPx љ A=@$@$@$ ::IDHBjbbNr Amr\,>%} >H)t)QW>jO25|_ AQũ ;yog22 .ڲU2e86uv¬$@'`D/&$' dbɱDOywoSq'FQ yBdJh2_)WݻrmP5]Ut#Z$@ @Ef#7, Wr/{AtACQk3^F&·ltb@F-r1ՊBM!^W^x&g5zHKsy6!^HNس97e8Ӽ;FoY>C@쯗% ưL2zuaڀ^<ػz փ B AؼBN\Tc91snɾw䒐kQyz~őjx$ZFwQkbTo[9.C}='&[&e,{S[0Ğ˺z,JDC׎ 9/Bc\DCCu/O_bR}nx'`hAQ( (EJN$nAe IGPR!6]E5#CzwC֬& q[&\3AQUXuڤyɱ ccӄW*/JăBekB-$f7z^CK )6b2RbMlZ3F!@E1E0kHOEaB%?UBepvPSySURgDɯ q+y-TtTZݝ snE5T@0WQ;+!S s󏫧t4AQ!z y>?, ЂokeTjcTw ʫ ‚SV_˪GT?RF'BUƯæ]e9L89qk°¸jjq:0PN:.C n5fF ߟpZjyGM+|)!@JV~kYPuqf?{n _wUݓ֑~%E!!S(JQMi\XUqLYZpRf!kŻBhAU=X]S6~p+ĜmYl,l$ WtPjYeCޝ} Yr4^mp;}!WWjl5g5Z q?^PYyJ@J"6ZWUMhg<=~iaMb`SUTw.aV4Zտ<Xm߶VW"'2'd0l".ƏpPB+},ޛ+H %2NT]Q ;n%|CݡjZC9 Sd%VԄxq!Lh~QSx*(6>ŵKC2OZ,WJʂ]ŋK7`:!NXYN콞GDܐw֌Jn*hU0? ȂYJ8D aLp(}Uo52~nN[N}#ao~cV>ߟVVP;)v!xMjy)tfXz5pL-NX܎zU?_xbr5A%gT+ ǵʪWRzz,*BUK ZiSU]#7~~.ImUR\zacqO*PXULV6]w&Y11/5!b/ (eؙZE 71Ʊ' +ͿAo9Ll>hN |N&>|,Șx(T(g?,H/Ÿ™;PP*8J"1Ub]M肈NA:Zzx%?v xՅd(eJsǵ)h^"->>uj>)VH!pyUԔЕdbSQ 0 _Ǥg6(t-=W9q +%gAExkTŷW0[y_I**cj0ȏgwL?"l*l4¬'4ES\E+Y9UN)(RW >`Xq } Zܜ AQm>Bvt,2 {3L=bnk[!Sc:.qH%gAE aEj/:iRu5- iu8 I;(CI6lgin^-1lj{&3o1F~e ) ̿0؁76dgLNQN@#lbz$[ <x'01j梒5a?%yYSUKy1@BKV걮fw ހ=T7]1J!]U_⩸td6 Lށ/bq`:6Iw:b<zUv/_,{q~H@(׾B27(60yT0)ȃ¥俑v/gY pRl::tX ”W Dh|'~5`]Ssa^n[SwT/ݫʼzFiyNjQ>尐m!'Ou)sS\#t`2MT˟;?^ov*VTCK0{jq@P-$ ^K]Kʋ#qXD^ cRnPAw*P &>jvI(E5Ѡs|)^S_(`ZVE6N965!xbEN_R`)Z,ooSe 9ٝխ濸ĬHՇ޵՘_*+=jYj{rW9AEiSu316ZV8.K?JfT,6=K/ zq!bc( ˮ~̀(jP^:s WDmL"n7 w72f}ź}OΪ%W@1w:$';&W+WPe#Ѳ5'ZOeLS>Ϊ)ˆZ#靈@x:*!>9T֖bDTDb&z㖫olq*L9ύ3|@XHiY*fdh21x1X;5ti\g`N6$PZRZ,/KvYijwyYJNQ~_no 4A(GĜN:+eOM ֨ v|SxWo(7 8der${q_so_D^/!GMn.魠' c$ tPdaF*O_oSq ri7bItAvnO&ևғ{ Aq/o&yqOZ亴5UB޵2Ў*ZQitx!U2-/m]:AX$OMHW`dţvq]H+)}hwzBs?1gJ#@Ei.' 鳯ͼ31d$t#Q|3N6_~]Ƨf35 I) A}( \˺vQDMUIG=ӡCٻwoǎmOH R SHe˖5k\fS<׮];r8BVB$ W|_+WR/p˗8qbN+ T Q*d=$@r%\-KHCCC:fˬ/6:{q=~HHH$@o&Lp. TjQsem$@2#@Ef:$ɟ}ِ!CUׂ Q-*G $@ŒI@`ʔ):l0W@|ԩu ȃyؑZ3 ܸqcΜ9cƌ)[3_Eh"&&~O$@ هpH~]v9sE@rrr6lإKE VK$ iH|O?;ШO?]d IOېS <3esAWUZn1֭sH@ԁE௿ܹٵkWgi4Խ{w4کS'+xH@(T&Fv*VqƢs O>IIIsuJ e$ -V:|0&Sl4wСիWQE$ ~FA 77QFزxhϬ7|s̒yH$h(T ,\'N,Hqm-! `E6$r!!!oVwڲ?cTTT2e\- FP`%H.&_ȑ#]NՏ=KAbs AQ. "p֭3fE(_|nJ@b` 75fHMxmJG'wϟq}kŶU~}Λ7L@$(T"`yz'e1T… EI4 e`EY$`A_tRDDF+yrJ 9yJ$4Fx`5k``8p@<( G0lDA;ʶmD!}!|I= %`EV$k׮ݻw>bgϞ[K$@'MLIz>,,Yf˖-+ⲧ^{Ǐ8qBzZO$x;[%XdItt4&xVZ`XUB$@'MLI@FFFݺu_yYfY^1SE#!pFPǚ-H/D"3f v;wnWH$ {tPdob*HHLL>}:6 ~N G}4m4,2( ;AqeA!0yd4 T$C 2eJqN$ ctPdl\F__;6 !$Dꫯbbb'%"p-u-_N";ڵ̙3^^^8arssCCCt t Y`EfR$P%RND?ùsPI$@%|mKHa/•+W/Z~ϰ`˖-ԩj*Q JHJSq2+ ءC[C2+ז-[ڴiSl&^ :('!b`߾}{1Şܭ[7~+R>DD$@N&yfl|'>v!ҫW/76˦H#X ,E$ ZtPDk Fw#%XJffݻ%(;E&Fx5ݻWNƖ;B7HEiًҒ@ b(/.6/+>رc qI<+[v~O4Ivlٲ2 0")sQX(_~>}|Y1YnYOFJ"M]KǏ;V*Z ޾}e5 I`ER械$Pu_ˎ+"mԨQ}TTv RTN[)v-6lt-Vő#Gb{(6/ HI’@!x1k֬ѣGe̙qqq R| |mK͔AZ.\Uj0R矟3gNxH@"A(& E ,Qz'%tCe(ZL#FPd-Ja'NhZK<0Mlrҥ$@I@6ƔTDq?rJP+f͚GFFb]s A( Go߾III؉FVG `s 6(P}L!@E6""k׮.]ܹsmk׮cǎ6dg 1"FP&(@1v%Tf޽{GQԚd@@' (u84mw-Zظq#^^9IC؂M 6h`Ŋ6PjW^y;cFzPo)s+eQvE_rI&)R{;0a@э0"&kP(@nnnhhhϟ_R^^W;0{Y/// FPe/Jt-3fAئ?.@-;s #("2E!222֭;`kաC._<:: ᛧH@A( < 0wQF=HQI о꫒2: A=( GN6 Z\&\6eʔWB$ ZtPDk F/0?|('64N7}t2 XA%( X!p9s=:00J6^*1bXd& Ɋ(, ׯDzc>>>xnz/!; x#(% ._`q;U___L9FHC؂@x'Ob E9j^ߤIVZ-]ԆB$atP^@.M%,K$R/+'GpGY IAq7qG%.]4a„s2'k`A `ťxY9 M؍Qr,G ;( ~8fۋ0c9!!+R$@.%@ťxY9 G 55?˗cjI_5k,tKGIA)-A'pRu'mEE`H,L$Xh͛7ǍfXƏy`&G+`9 `Y%Qn^{/,СC.**79"s1 ܹsG؆F3XVr$@"@TX%4m4:, 3XXŝ- :(Hs?C"3XӧOw4 Rca[`сvdf`ٳg߾}#QP2U ص>>>n9D ;;;$$_~_~C Aq._`O>މ<”clH*{9rIV تCz}&MZjtR*`! GAqːN:w~[=N`ʕ7nqa( (jz1`Cj' B֭WvZK #8BeH.o߾ޣG 2xl۶)۴i#( Ș@.]0dbr8D{(G;:HntPF$`Nz_vAgmۖ0qZRɏٔi£>zjEQ%Dc9A VZuO?")f}Ydd$*f!) ȃ#(##?Q>gsagHl"M ҥKX:ց,"Z&LYt@L6A)deeիW{*N{*[… ܵIDY A"0JO`޼ycƌ)}UAl`;w|b򐀜A5X~RXd#¸'OW+k"xpB`֬YaذaNG} CP6D @Ev""0cƌ#GH,Tʕ>|8 s;bVF$p $dx{9^V'2 E&!"CR ~:~~~~"bرcanEMZP2N3V ;;w<{kg"#a(]v]pD8$ yHބT@<Ν;&NHDѼys׷H@)(z{nW7EH}UyfFH@Hp[\vեKx';vd-L7Gw- ?:(15t]6mrC[lBBKNN޷o8ţT$ 9'9Q`ƶD'r#l̓7dS$ gٺ 0#,,QF"oW_8.i\"œFP$a& )^˖-'&M]p9sx`Ez.%0z1|6ʥB۹TE9L1[w}w֭qƉ]P.؝'&&fѢEj퐀l 0"[R1WȨ[iӦ-/!C]|yTTvFYHܹsᣌ9R2SP5jTzz:nFH@ִT̥82dH \+`Qp{&CxlAID_??Д]pc>}d;$ CtPdhTj=:00m~)7۷(?e&1 Y1X2HヒUϟ?#1)dggׯ_g/&!Y`EV2n peL"O蝸t1~ |QTZO`ҤIׯ"m! 8#(Π:dM`0`jqӦM-[&kEk 01X EAωc|XMAp6lLӌl-[觷g$` S~޽{ϛ7ORE#(v#cy^h7n3xe CH@AQ(' 80""ĉZ֓rmkܸqV.]u" G}[,6./oMH`UW_}qÆ E" "CP Qx]vaZ- ( ТEڵk^ZR?x1 :tm۶6mիդjb#}ײeKFyHx>={bbΝ;%QN:a֭[+U$[ Aɛ]޽cǎ֔ډ={&ܹţH$tP<@˗/qFIFA `ʽ{*C]jI%Е9H@6lp.O&OѲp+^)) #(nͦDIʆcMO.J)^z饨HN"SɩU\I*^T_&LPTQ&Mt_~ERR8pFPE툒^YߋR@ ,o`N2=-LL K,lEA:SUy~HHMAqh6#BYYYׯߗ_~)B(2 ׯ?2 Pk0`wr ̟??11q̘1E@G?o'%"`ŭ٘x=o6wG*JB 0jԨŋGGG (#(53gNnnaÔnK={DD$>tPǚ-@RRӇc#%! *G1cnT2!X+ZiӦ}:te˖Ub۵kiӦ^z (%XY: VZWV4*/}?7jjkC H\@,uV:vا~*uE(? L4)226`E!Vyyy7nժR(-/|'NhZyiFmH#(Dx.'X76::z„ rR(/^l2%C !B D51~Xd޼yJԟ:˔_G2Uj#(dK޾}{رՐ)1cnݺh"EjOD[Qfdd<㯿i8UaÆ-OTT$2.Ν eȑW*#ӿkNDUO>tPl_rSLAQ/F\RIE2dn3g*R{*tPafE);F TTVQʖ-;|YfWTV9(Jt1S) SOxpO:U Aϟ?n8]4ݩ&㏱6QW 8X VV?wqY///MUJ 77744{˕ʀz˖#(5K,8q"Z_*V=DQ&j-cظSmN:~8f7(Ny*T!,,iӦܠGl"[*M1%k֬yg;U8?vX&Mˉ9YSѺ/|jE# B۶mVvZiOeKlM(o?hAE)NeID`۶m={ߎ ʭ@2 pP௓ ȀQ*O&3E<#IIRj M oG9zKO?\uq.roi}@YIJXܹs&L(:I@q&MbŊӜ ˎ#(3b.$15;SsN):_ۇEEQAヒ;ve@= Е*ʙ#(ru yWf̘!c5 8F`+WR$qx_uJJʈ#)2$ wGNNN7o~r&@E֕npMN߈+WԋJC|0eʔò$AtP<M;H`֬Y`|8RV$tP<:N/w{=ǫ`IPAa,^(C]j)7tPfQyɓ[SjG'd̘1xdbccK_k 74c7M{ IDATgs"w֭XۻT0 (@NNNz=0dEV攷2XO>w"oCS;'ÂG˗X-"7` لs 80""ĉZ95PiҤIV.]u|A-ɩS~_|QޚR;p:_up6lY! eN& /\~СCjU:;AZhQNUV]W'tPcKkm۶6mիդj$:[lPYsNS4*O 9CCCtpBO6IIn&p…KN0މɳ90,YrE2!`E$P 8}c4z̊  M.[̃bi( @_5** JP|,aHxoINN9r$$XF{.^"@SnjjԩS?ʕ++Ny*L%)Sd$@Efs١CJQxLJ&G̙3G Aq?s%4}tts˗WԙL-3fA]bH`މb 43(w':(жn߾;jPT$N 00gϞ'qU(dpdL%]Awz,)J׈֭K/͚54H AqdE7qce6z' ƃ5oܸ!}m0"#IZD.-iQx0'x 4x'<$ *G ::?~<7=A2 ;b{<2P*#("7vҶ'B6mڢEKp&:(Τɺ :u*,, x <&4yǎZ'NhԨ"'@ExK]vaZ-a5(: Ahٲeڵ<|g$LtPIu8rH֭7mԫWD ȃ͛틟x# AQ R޽ݻ u (DcǎX J+L Ŝ'xbΝ;w6O1 lڵK.xF)*"*tPDek:mݺU&P  HQF%@Yނ9pƠX\) :tm۶xSOI/"tPDbKjԨfD5H'sݼyW<$^qWuI~4iRWH$ 3x#""֮]+30"+Dٰed (kvǏk4[,^'{h1U˗/?w갚IdE`„ xWX!+0"#B^i .BTHV_uNgk#0R!^68~رcm\$@!0n8kN@>*Q`EF!!!?w\kC H&0h x񢏏݅Y"JQTf'oΝ;嘝H@&?L"  qz?*Hq#G\dIttt@@㵰$ '}Z'++kذaVr$@r 0|pt_~" @EFӧO:th e'(-tC Anu< TtPx_|j\ZXH@>C,׆nAP aH9s1"00PҊPx @W:TJL_ZݧNJ[˓ ȅ:t :(C/cbb͛~~~Rׅ 8:t E8N֣LfL;Aw}w˖-؃ ձ  iРA^E'x |EƏOD֤ $\w}¹56E`EQvoơCN:)%$@EX`"/KAR%&9A1G\ST^!3w\cl$@Aa>8`X,yJǜ" {'YЭ\CxB$0t(]xq0 D:(@xk4!X- 9& C t="/:C)xA2H@´@,Tr: B: t [3 AQKرc kʄ?>|844bL$@fi@R࣠cAb$P:(E@az6g3TVpHH^:Ё1AB^ OEFI7o"+c[THHt FЙF{A'S(H:(XȜz4O>裟~ɴy ]Ѝ3AR^] 虙Y,o6r2Y8pȐ7Bv9zsc08h:/Z.HWUNlsO_boگQϸtN5zؔ%H@]S~e> =U~ǥefFWf0Х{:s/?S拤Boa%/)ѡiujt;:]z3tJH_㱏SG}M,.{ i))Y9i9ii٩)٩8H027>ex)S>X4S 1@bGwS藌ݔHȁwbc7q*A>Dž};5Ϣ4l,ڂn } L˖Eg>fQ&@ŅzC❌䬔䬻w3S2&eded2gcBVA'Ϩ`GejމWr~+oPo`YrNqxF?)%'gMO&tVTVttS)StrV7N8uS*58 )kP?Y *Sc$JZ tPѱbE_B4MO6=ݦ[_0 TNNfcpX :t*W.SE ^!) 5%k;w=۩ 8 :"8iTOIA?uRT \% 0T*"Q#@P۷߸natDj<ަ/ZGf3ؒ&gUj`eV TLj h"2nR[)86 D(Gz7PqH}z<Qr@Tp_*f @HnƤHƿג0:NV XDI&V덝UH h9]&$@Gw;7'ߌI;Z zpYTx-JUԸMU`%dtP0B OIqx3oh,g/Vl:k.LX7d\ ԬU3=^MeSӠIX57ǧ$eBclbShE5yzf]9U?cz9n.~Z1C׬4t7v/qt]jڝGiaV.H&E3𫢨O**]ix3wN3Tz [j ׎[/eq_37Et@v %l=TȝM E4 {d6 OX{/g g=p7޻`Uk'k4Z6?j636k-R{7ڍAe`GJj|Ff'+Uӣ}}Q z^ygg/WoG5WGk4A=7^?^JHGy{.NN\}`ӖݻZތ=w$r-/_XrY4 k߽o5͞ X/Ck|0&0 >Y _SCLZ/CGVz{f ,,ȌuKSܵ ȃ;*WSp)l%߹sMҡ і}mk RֲӶasYnvluSRrqrBtSPOVj8۩]I/&Bu͸kks^y}R0-9\"''Ϳ}os|5j4p`6:g>~K)=O~Ա[ut}m|r4~+jӪ?Ntj:{1|#+rjF]LKO)|g@z-^9vx62cw )QKd1fN2pMD_>a;W_7PĪcL,6%sa||J\pzLxy E~532 &N<?-Mbz\ytSPzBtSk߻E?FP/&`O<7Um,Dǁ>RիGz U=rj<ǨOˇ\ڠ3R?/I1Dr5Xv IDATS,Nͫ-خvQ[g _[X4& ֩t>Μѣqϑ*W]f3UkVQzuK5crzGFb.ŋ O>e$ Gx*]n/\ЖJ7 m,erZ<bWǢBJ,A77 /$X*KRde;8?MT 1nG2)%>4rAPQv+.pJ &(gwbpťnA=| Tu~u܆<$ގ_RpZLŤ7 N8]{Й~el,wO w{{b̳ed`IA 8`쒳JH]qtRaq#4FϘR23S.j~ޥ͚ch߿\NNfA~!12[vݙv+N{& TvibiNmׁJ+'qrRuP5KzC//! 6=7MY}veg$&!%gA?] ܳ#Ä:ַM $5ÞQAJq.|nSil0p)KK7jW:"pS~X32 4mVxf1c=]rZXS{uA76oHi8{Tkea;;1laNqde*%](Qδ;’ؘRb80OcKV NA\Ƀ4wM `̎{jj7YDqkhb^-x{lL`}߸ԻHfBn]/3)i*T. PA5Tw.ۏ ]M4N7՗J~Wٻ#>X8*F `͘[~Ӑ_@\Yd47aD'mBvl_i,5`ÀYKaJPj5eX­Y'{_H J]YnD@p PLNW+hYIO{3b⣾..V[7'+LCr%SNL:{8@"Ww9@/CAӅ` +GՍA |d#X?|ܭ\<"JQV{w|tXU}zXxo! 57HOϻ丳uVt6R)/Q3!~Jǡz3@ygZZZ l"4py7D$՞Ԋl B2^0.)1W _r\Wx΂w`c. AՔ)8ժȾ<(t޼~j+i/%߹m*+VGRs -i1&"2j!S Npŵ4ĄkWSQS]r7^;WZ%G.MEz ^ɓO`[TSU B pz^1`bBQAA~͛ 733o\KOH[AȂuA9qs>U+WB,Y9R F$`L&x%!!=fFBB*3 oVŻ͊k9@AZ)2JՠȀJPe3!؎$LߤLt `/HD; `XPb*]2@l,LTR`Dl6ϹmlK4h Fq+)V"|ô !chX׌Vք*2N1-NޢKΝlZQ 'xh Əyf /t24&4aſA"sA[]AX uԝ*5C.x¦f*.Wh%2Xz 5ua< !?Q~} 1@qUsE230ESSSsq%'GW"X1|bo 0VOW3~}<0,$ԷB_oA؛9 p) 5Ԕ ;;?'db4?`JCjAQ?WSwqW"TJ~Քwp mxK0@q .C`NuG0N-/4v!=,_G744KOZ,X姥%Gڃh$x_x{yxhT_'>,Ȉ* QG!(1r_dL#j3g=C$@ hQFQQQK,q@p4B P,%n2?|h9 }PAe*>I- ةի״iE#yI%Ǐjﯷ$, n XBI4P0'ƚ($@@Ձ ቛ-(Z[lqFk32= :w_ ΁[E-(V:)S6mڴe)x + @Ձ |L.:XeϞ=\䒛uY%I"J4iO\R(i؂b?C$ 966vLSI cQu!Y%Ao`{iooo0# )`N̙#ѱ؂X6vįZB ̞=֭[cƌO s Jʕ+c!H 5lŒ$@'* T7:gPU!C`짟~r;i 펇vޱC MQF}ׯ_Ms kPE@unKipتYr~~~ݺu۵k7w\5IHl"oر===mL$>0a?p)[3 z h@2`'v[nСΝ;[nm$P?"hH]MEv5%%e|UR[i F ͛7_vmL8ńLX–C$.˖-;tuEk!i{8V&h U 0ͳ*#@؂R*^qw}gE&%Pok#a7 YF!iT-hѢ4C$` 5j߱XB-(Pbr1KM6tM$R}N6lJY`"t2|p(III곎 M?r *wIrⱜSE ''v={e= xw8G$#Ы`__ߏ?oxL9$@eX gtR%޲[Pl,@t 4ipS* ѣ3IIWʕ+{} :ZD$`FIƍ/_OIN PB-ZB׿7xB$F/bZZ6PqIb P$v߾};6 ÆamڴQuHD/ >m۶5] 8GRuH@\R,⑑3T'|o>L;TE4HU80qD<%G` HR8-$`"`콅 H؂px\zٳg| PZja^̙3dm#gԨQwܙ={zL%$@nnfJMM0H8###?I&aAxP|'Ojvȗ7,//v]v`u8VLwYnF{{{́[P\YRP3fܹؓ]\ O |wǏgt|g*(Oֲg_?hC< L_?ycf+:%aApСF́B ۴i5ktM&Pm`8)U-[nذAPo@Nrrrv%0jZWD%ى[޺ukEe@I@͛7矊ԞJ+[P:*޽{۷oٳG(P}L"111UT"_R9IRUe?+撀 ,_'NT T]؂L)V~!F9~8TTD!y{nL;eC-(q`/bv1̥$lͻ|22R^؂L)Y,[ >*N*']VW_}?W4O؂"KZ)l䑞_JG'0š |`o P&(.fSRRlu'5sʔ)ص8<<\v6`"cWzzzSM^^^PmW2(Jbuǒ#GD7k'~::aGZ#i p]R󣢢:v8gHH@zm۶:u (Jz+[PCeZoرĈ<ʴZ: 9s~N`XJuEGG׬Ysҥ3R ؾݝoJu:f?*ՊkcM4Q ԛTD`߾}-ZXn].]TdMQ$(tn۶-ZW-$P:t(,,ܾ}Bj"PME2ydfi&ظq#bqD;M-(:1=F P^߸q㧞z+wjUcY%مܱ;vuWԕE`ɒ%&LPYF؂`I~8pZ-Xڢ :gAg1[PpLĚ(.\X`$6]|y̘1j3([P=uoqqq>>>게֐ ֪UW^1cr` `#99+ETMfϞވ#iR,(u0`'|:h Ȕ@zz:6.G/""B*R-Q 0@{Ȑ!7}tYjGH@MM'Sm4I(܁R?88%j۷o2ZCr$aÆ'GY{Y~Q#P_mU` 7??ѣGcիWUdM!G c?#___)GH͍ӌȎ@AAA:uڵk7w\)GH@-ܹԩS\Q-.UlAQGU`qrDQ94dH̙3?щ C؂'A0 :::**_~~ԉNO>ΝX|MU/ի>Vᖭ]GjԨM$ /YfMn!Chٲ%_^fzQP6N:ڵKfP{αjH ݱcG۶mlu'غu͛%T؂0,/bZZݻ hѢEXXxKh px=pЈwիW+O 'N( m I-(eMxO}kC#jA #:v:[PDv!7nro6!!%yEmH, IDAT؂h6#' Zx籛.$YYYիWׯߔ)S.U$blAრ ÇG%;sLeK-I@Nf̘;l09)E]H P2!FD%A ̴iӆ $  0@ᓠ YQ+aTT0`<ԡ$`)(b: 1<+CH@n޼̨Q0$5ծ]Ϛ5KRE;Snݺgz{{K ':^L--Tcƌɋ/J K'||dYalAy Ț@aaaz6mpBY+JH@j}=zhllԺ|1-[kawD*+CH@ƇgϞԐZ@͇wH@TR~~ԉd@ॗ^JJJڳgF:T&jd 7oԩݻ[h!} Ȋ{;Xo^VQY1tQo./ Ȁ@vɦMd U  hml$ 5O>'hJRI@F6lذsν{H'B` ̘C6^y啫W߿ Fh5nZbĪxAsKJ`„ ^|Zp%K`^1>2҉M؂b6f  Ǐk쯔WDt:]ݺu[l9|T`$0lAqJ رc/\`IJg$ +ͻr VTl#۸1X%..GFjQp-Zja`֌3\[2K#` SR+ `ٳgPEr#@jj*vbԇl#6n%# }:teJ *p P%&v=znԨĪxp&]vuf9M`" wT[X~SKp@Nrrr0EZ5X: 8'X) nӦ۶m+,F`֭:tުU+B$ %(Rg#/޽yEP2 HE@׷h"<<;RrI8ل)_Xj޽WxJ$80qDgB$ 1H<ɓ';@y)0UaÆ 4駟\]6#`B,ʵ8V`%Z,H={K8$@` Y [lA5ܜ\œ+GEEu믿vEy,#سd%7o'\DoMHH5jc1$ HǞ%СC@B"Uz~M2YeP. Ȇ[Pd *~!3g:G<̘1#77wذa+%tHǞ%@XXؠAƙY 8@rrԩS (dFBu@`$Vq'H3`ɒH@R P$]B 00pĈYBHΚ5 cc,H@8HV^%Wvnݺ}WLa$ Ά 0a  Tnj/^t~i,I >>đX)K؂"{QA(,,7^pDR @߾}=X ȃyZ+yիY Kkttezi,'E`(wQY`*U`3X$17 @o߾gF"Y ȃyZ͛;u{-ZLC6믿ZjoFF%EСƣl߾VG."Ю]; ݴic1$ ' P CɺutY Bv&MؒyH@(܁T&ʕ+W8~}1 `f=X ȒA[ L06/_r(l$d+7nO-(!-owǏkZ0 8N[n˖-ϟ2(dO-(wty… 8G<͛.H}l$` }H l%bA8lk #ɩYf޽g̘`G"E:MNN={CR Edzz:߶K 3 0@Qi"## 0i$|*H(N}:;I3$%%Q:thPPÄR (ź;@ yC6O1?0@Q?i|}}?#LzC| EWJ EYx71nzGI@abŏd` F[\r~(&͛>c{1? [P[Zf{oժUqqq>>>Ibnx@nnn5z=cƌnHC(A`ȑs)q$:;55uĈE$^ P[ZfH4|'IbnO ##c G$@`^ { RPP_ܻI^ӧOY֮]^^^Ibn[Pp37mo/]dŋh1cv*\L,(LW^-ϟ/(m~aǏkC>- (¸G_۷'N 4Xxq>}3J#룣BҥK3KW^9tFW0jj7pѣf5jd0 cbb֭[ץKQl$ PA2!вeˠ c1 ⒬]v+I@0 Ps8͵;ڵk/VZ' `|`vٺuk! $8 Pǒ йslm{D &7{928MHh;L8C 6l!vP"+-޽cPF D)O m \mոqիsڗSl\IVlzL0!66v6f&!,Yɸq㄰F P)RƅAnDpax15 F-(Q5(رc/\hѢ,Xٰfp8$ňGw쌳gr7_vXӏ_H<lA)#=:11?>/ J`Μ9n1blp8$I`С剏6'Ec1s7ޘ4i= XC-(bZxaðh̙33q |C -'` #$AM2%--MH4)))}< Xό9HbԩSK\㡠xzz{OI8'G; > IIIYOi<xHܿ# [ p䘏5j{ƌ+WxLN '@H KrarիW3%pe<y 8[PEr&ͣNhoڵԩSZ-!ݎ&G6h ::zѢE곎LE&j#uExO?6h܊%njC$@#籥d ۘu|iOOOY|Թs篿ZEf%TH5׿{XDC@`ܹ=j( p*8/N`РA+|}}Eg kԨFSNUA4K-( 5S?0==/T-4ꫬ,4HM S/πL0Eh0>--m WA4dMCT@&&jժ+W~嗱޽{5┧$h P>*<6lڵ={5k<x߾}͛7ߴiSǎ3撀Hj va,D3\qvo۶MqSaP(*p"MP?u[n}HAPvݢE 즭$  P !ݻ'''O(d,F !.X5k6U%5ЪBJ!0~x7Ɨ_=Pz58|PVXTF :V&W 6l%q' 6|g~%KI@X3ӯJ2ɓ',Y"UEq bGrf8%Y` Ae"oڵԩSZ-;[zN}\t$ $v-}ѕ+W~yC- 6kH@ZlA?K{キjժ8Y5j{ƌ2P*؂"iF|P={vZZڈ#HH@r P$w@dd$Q&M!4L<ߏ:TD'E'KN`Ȑ!3gΔ\>}:Fs$  Pd!.O:}=Rr6mСCօ 0@s@@"F^Q5@环/D@I@v%TH@^͗7o~嗣F\*@$`$i|H@j׮yf͚% DRwY~ٳgD Er=z~{%q/^?cF'rpu P$&)$kѢ%VE뇍O8!ݴN=D"믿upFH 4X=V*X. @I@z>::/]T +W^9tFtKr'E~Xv-,?;;z?Lbf%p4! 6,##64f@(~rH53K! 0K;'} '`WQQ 4hƌ"C??{0/ + 0@q%mE62dem!p;wL>;|G` 4FFu%p :woC^f!ȳ\#f͚h]NS߼y󫯾b(b$0f0Q] a^z a '{\\W؂" XJ{}˗-#vK.G}D+[P6*-,uWn6mpmYK_ɓ'Z%陆H@>؂"_P(hnj`sΕZgϞ>pщW*(s[XXaÆ矅`᯿ӧ971K1 ȋyڐ%VZիWG"L$i;rJ)|L* EN ͛7To&/c>={4e9H@^Ԇ,$iӦ.] Yff'پ}Qǎű0@QCi@ڵkٳfL} mfYr"#(r u"K短[޾}{۶m-I/H%۷{Ni& UFB_@"؉$$$dڵe `"_P3(C6m/cD*&!PX5j$TLKӄ лw .+U0qիW_t$j\Hq7nX|cebcctl9xH@ ؂/QG(@~Ђf2&`ٺ&M,\Pv6A:Z IDAT[Pq5 U/?[?xy61e` p.$ooٲ̙34l󣢢:w_wIB-(J$=ƍ++z} FR#\NJ>Xd z|}}jcvԬY^6mZ xH@؂DQg(a={vzz~X ^"P,(u' DDD~)֖}`,L~`m$ (A fΜ^-/t:oH@(܁TJ4Q>쳔U{ cTPAF0Q=OUJ<^^^V}5uTU UF=ÇGGbbkΚ5 BZEb4cOH 77n{5c 5w&4\2..(*6؂"ij ͙3իj5@D.]$>$BSN:[\湹?ٺu+j4F  vX>>>^}aS3щKHD-(&< U(**_~ttETeGƺKe9$p,xD*#bŊ>};v^z1ĉ 6\lYϞ=Uc !x ^oڴO+wCgٍj}+}_GpB[/7W}qoX]<3 (Eʒ@Rod]Jz''vnfBVzbv:rJ4u`rV㎿[M#emק-Գv2u1ץn{F4zAtzzDU F_ſBՀ 8EpcJ `'$f]Ls!̔W2e&d@@QFZX aLq Q/ OUAHO?g$@ E~@L\jnǧߊKI~'K Z,#7ݯha=h8܊ xz?Z;$zPjO׮H@ TL(r&%'8~q(wn.SK]8̪! {&JTH!R& p1(.H#uuoE%9| a-W#0̳>y/zҼφWx\$@ )"V)7%\jZ&WP\L8R%ټY,9 j3XLЄ:>Z|LH$P:(sU(@,BC]Jv唋KSԨcf30@qUT9l7bMO34Qm5ciuE.O**H@h Pv?MܡjzvXOj7ZԧFc,$ PA2 ?ojo<~_z!ov}SlWu VxXO9JbhHꏛaR~\WyFZxKㆹ\nX7嫶_陀HD))H@dXo3> yUܧ]IzXOݭv.ڻ&-^۟ݠz}+{ΡN4}Bճ xHQ< }.׺9I鎚׼no>Y~<@ꏛ=*;`_'ҳ= 5>^gn>Xkxl0n ZH r`Qo]t''SWG9zr(-76p(ҫF}.e J1ʭ9>bJVdi; XB˯JY` W+p1|(=+}?oqgƲ;n};z~"CilE+UtRO_$ ]x;{ųj@ ;dF#ɫaկx=U ZbLq^k5/s895 r .*}n?fRH5hI._LڄAӈ_qNDGihgg)ڐq+xas_(mlQ^WXrΎxyi9xZ/ @HtJƖ Z& 1Dqa G㷸!%$<{{.L .%h#BL=+r{؝anvZ``I.6 0){)7t~hL24nN\Ki#$PxLIlXfO 5n ebɥlK|N!kMcG aavUp nm۰01YQNKZ phjnw,mj[ P0&{M|a! 65E9yqn`ݒwk2݃rM  =JnJVZ _hx$pF{:H jTmae?_oSAΔfu²|Sz Lh!qbVi&6-׺:q =r.|k%KO"lGLSɐ(cV񸒔1(4F6p-:EzАcԬpٙ!E_R[R3[wB)x7/'` ?0ӧDwFHtS])W”- %OmS\t{گ۴Lc/`bqO>c[aSbAI#?}tƠФ0#3|Tnj*D/CeyG=YRǾZմ01l\mg{yXo/KP%voGEfy1)b@&ג\1$Ө/²dA35nvjږ(cZy'/;<j RނP[ʿxCa!:RB^ð3>|2wm_3v)ћ)lpG]| AJ$@cƌ! 24 <G(dۗqL)xSpBջ1HQZV:Ŝ'r\/(84:4X!m槐~Y*bٕ' O|c-zC/0V Mu[8EUӳrz7/&ʸ )Fx@m9EкW ֪76.;% 7 c@~YV(e` WlaX2t(/L#( z{a2Uj0C, <ŷ|,KզaݑXnt<=|WtNJJm]Ie)@_~VpqSlY;yT,~THE<0*Dl|!^ G`ϣ B{<0K0ZW(q.g"_Q3`: P}&\ؗxĝhS "& 1 %|yEV}$EHh(MI<| ~&\1a bΪlT"&ΘD@HJբ+>٨X˙fZE!E9@J\j锛0uLrB. TȐŎg $Xyj )uB+ sp@ˬ$ PBIQndII8t1v\ﭜL|KQ@6th4hBs@U (R TSo+$ 3 PdCKw\H}-3B8MMs+*,xk܋0?w_ B<^-0SZ 4Kz}K#Yw#+zf*VaINON]rz3=5nnEbhg*pb#z *|0BQ// 7o`_P%+<POoYBeH!zK!+,Yf77'-?'97A L1RKԤaX=+!$''p$'WY("P(j&m!"7HUpt+~O8IDAT8\0)UC0xj8v"/6-.+bb 3 P̀HHH@z/ (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@`" (f@xJ$@$@$ =( `b$@$@$@>شIENDB`pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/download.jpg0000644000175000017500000000174011757531137024645 0ustar ryngeryngeJFIF    ! #'2*#%/-$+;,/3@888!$=A<*507,5 5$$5550555555555555555555,555555555555525,5555555565500"/!A"1Qq3Ba2R$12!"aR ?xU;?ki'U5Zݘa:Z n <7YN".vHNZ5p$nJy鑮_26w[m  &ImPH\IqPP wܩR\ }X]`R:>JX3uFrK(kZK-իͭl9UV˜c~Bx%Jr0yNm}t5[ĈLp!MGv;JHQh05d!n5=|0흷<A{tGEV^[@wtt,Ta\bW:B=ct+tUIV@ߥOV6ʭ}Ф55_i*K7]Hzj'mvGyJ@ Ҏql8r{9:=cXh,HJj?*J[GulF͜My7ǚAK%dx'$n Dp.oHKJoJq v#QQ~Xa~bSB# :\YQsqqIc\\^PsmQJjf/M 2^z-1S#gSGQe64,T0SGtYŹ'fdpegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/images/info.png0000644000175000017500000001003711757531137023774 0ustar ryngeryngePNG  IHDR00WIDATxڽZ UUs8<>ࠄ|<+&F74w%!PfnyT^+dzJK(D?B(̛y 9g~l1Z{Q5k=11R՞5葎 TO0SL{\] yX]TF  p] ])wg ٗ뉅ċ"$Xw¿{vDӮ;O^]yҨL8f :re:T , U ð[{ZlmOBEX#[|oޒʨg| h=h98RZ+>Sx"q*a >aD |uaomGXw  oO|Ǜ+m=z2*q $G .D&AI^aaB9דRh5m\uPĪWv%s60y ;^_黯`c MrП/v)(ݼ^}xx;a{GapB>v^m6;fVQ B[*31"+s&/7섍{)c'ƯUo DBhƓN4Z   "? I FCa@40ڧ@-LUϟo߽s}ӧC&ܲ!Qy3Ǩ=Ig `Fb`XX6E-mF2L-%{3yJ~`gvWv@]u>ZJ*OǢ|PۖXF,Y^̆H.&cyOTy$PB3Wukxu :v1Ͳmρg FS}د6{=;QE L,Xyz"{z{^x0H'%p@㕫oo4zo`1%} =}0^zLTʸjjY1z gr@+[+S33pGei CRڔ(i{g-p8}^޾c$*bIK`ڲ:c H!PUI~a7gI>8@O-cSߗ6msڥ#meT?{! Kxi%/AKM` )bTJ J^>ePOИU0N.]`Dp('Bh[)r -/B2Z$HɵE =(P'.6t]Ur(pC^Ii;΂xB8gCZDm uC=> x|6Ck2+m(D`ߝ3d<\䐖?B48 X,Uv%o]~={qU-0o (GĨ XI+e*F-YL W3.`vPHXޒg}eP_,AI]2ܒfL=B:R'E "v!K+UyNq;5sN'wO=皘>+z"5`*RN`SF:i8hgdMI&.ya6 ՊH Ȃ uc~ѢNn6G iid4$lY_B5B,22U@" e+Vk;\b#k A&JT_0(Ɍt੖OY3o> 5PbƯJ(³͞8׺i0[_H <6edr^ت+~SWjX#n:_.z8´,&4[r惟|,l;ba6VNʬh;<(5t(5UOɘW!2N2H!>L $Ԋϥ[{5vZ䁯:L)?420uPRtxoJ~TUĢԊ/=~6~ /<4 ܌ðK_:V @17wxsn-J[l <'db]ހ|('&.2 y`֋>-GbQlX"qx%MMbPpfT"],~J^EޑtԯYWv$!hG@IM Jgd^(44VWn>Z{<裴PD8   C*pvQ^8o};:BH6*:wp왼F?cBFP)[ &YE|(@ǩybX=1&9A=p0Q'g#c4]x xB}eZ[[ gNEE0wUНhS^G!Ao\Oc|ʾ堢 }\n?ϟ??9F 8qcd8~๮ߣun 6ۿnuْz~4` zJbHv\i:^GXln7+WIENDB`pegasus-wms_4.0.1+dfsg/contrib/workflow_gallery/help.php0000644000175000017500000002642411757531137022536 0ustar ryngerynge

Workflow gallery

pegasus-wms_4.0.1+dfsg/contrib/qq/0000755000175000017500000000000011757531666016124 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/contrib/qq/qq0000755000175000017500000002530611757531137016472 0ustar ryngerynge#!/usr/bin/env perl # # show Condor Q, assuming mostly DAGMan and Condor-G jobs # use 5.006; use strict; use File::Basename; use File::Spec; use POSIX qw(strftime); use Getopt::Long qw(:config bundling no_ignore_case); my $condor=0; my $globus=0; my %condor=(); my %globus=(); my $debug = 0; sub usage; # { } sub count_deps($); # { } sub showdag($$); # { } my ($rows,$cols) = (25,80); # will be auto-detected later my $isa_terminal = -t STDOUT; # auto-set with override my $dagid_width = 20; my $args_width = 60; my ($machine_width); my $user = $ENV{USER} || $ENV{LOGNAME} || scalar getpwuid($>); GetOptions( 'help|h' => \&usage, 'debug|d+' => \$debug, 'all', sub { undef $user }, 'user|u=s' => \$user, 'terminal|t!' => \$isa_terminal, 'machine-width|mw=i', \$machine_width, 'args-width|args=i', \$args_width, 'dagid-width|dagid=i', \$dagid_width ); # --- subs ------------------------------------------------------ sub usage { my $me = basename($0); print << "EOF"; Usage: $me [options] --help display this usage info. --dagid n width of DAG job identifiers, 0 off, default 12. --user id limit output to the jobs of specified user id. --args n maximum width of args to be displayed. --terminal permit terminal ansi sequences even in pipes. --mw n show Condor RemoteHost with maximum width of n. Columns in output: ID Condor job identification number. DATE Point of time when the job was submitted. TIME Point of time when the job was submitted. U Condor job universe: 7=scheduler (local), 9=Grid, 5=vanilla, 1=standard, 8=MPI, 4=PVM, ... C/G Local Condor status and remote Globus status: (R)unning (A)ctive (I)dle (P)ending (H)eld (?) unknown (X) removing (F)ailed (C)ompleting (D)one (S)uspended (U)nsubmitted stage-(I)n stage-(O)out (-) local job IN-STATE Amount of time the job spent in the Condor state JOB Summary of job and arguments. Paths are abbreviated to basenames. Kickstart has its own arguments removed. Seqexec counts the number of jobs. EOF exit 1; } sub levels($$) { my $n = shift; my $s = shift; my @s = split /\//, $s; if ( $n > 0 ) { $n > @s ? $s : join( '/', @s[ 0 .. ($n-1) ] ); } elsif ( $n < 0 ) { my $i = @s + $n; $i <= 0 ? $s : join( '/', @s[ $i .. $#s ] ); } else { $s; } } sub trim($) { local $_ = shift; s/^\s*//; s/\s*$//; $_ = substr($_,1,-1) if ( substr($_,0,1) eq '"' || substr($_,0,1) eq "'" ); $_; } sub dateme($) { strftime '%m/%d %H:%M:%S', localtime(shift()); } sub interval($) { use integer; my $total = shift; my $s = $total % 60; my $m = ($total % 3600) / 60; my $h = ($total % 86400 ) / 3600; my $d = $total / 86400; sprintf '%d+%02d:%02d:%02d', $d, $h, $m, $s; } sub fit($$) { my $width = shift; my $s = shift; if ( length($s) > abs($width)+2 ) { if ( $width < 0 ) { # fit from back '..' . substr($s,$width); } else { # forward fit substr($s,0,$width) . '..'; } } else { $s; } } sub kickstart($) { my @arg = split /\s+/, shift(); my @result = (); my $state = 0; for ( my $i=0; $i<@arg; ++$i ) { if ( $state == 0 ) { if ( substr($arg[$i],0,1) eq '-' ) { my $opt = substr($arg[$i],1,1); if ( index('ioelnNRBLTIwWSs',$opt) >= 0 ) { # skip argument ++$i; } elsif ( index('HVX',$opt) >= 0 ) { # do nothing } else { warn "# unknown kickstart argument $arg[$i]\n"; } } else { $state = 1; push( @result, basename($arg[$i]) ); } } else { if ( substr($arg[$i],0,1) eq '/' ) { push( @result, basename($arg[$i]) ); } else { push( @result, $arg[$i] ); } } } join( ' ', @result ); } sub seqexec(\%) { my $r = shift; my $result = ''; my $fn = File::Spec->rel2abs( $r->{in}, $r->{iwd} ); if ( open( S, "<$fn" ) ) { my @ok = (); while ( ) { s/[ \r\n]+$//; s/\#.*//; next if length($_) < 3; push( @ok, $_ ); } close S; $result = "[@{[@ok+0]} jobs]"; } else { warn "open $fn: $!\n"; } $result; } my @cstat = qw(U I R X C H); sub cstat($) { my $s = shift; $condor{$s}++; $s < @cstat ? $cstat[$s] : "$s"; } my %gstat = ( 0 => '?', # unknown 1 => 'P', # pending 2 => 'A', # active 4 => 'F', # failed 8 => 'D', # done 16 => 'S', # suspended 32 => 'U', # unsuspended, unsubmitted 64 => 'I', # stage in 128 => 'O' ); # stage out my %dagman_p = map { $_ => 1 } qw(pegasus-dagman condor_dagman); sub gstat($) { if ( defined $_[0] ) { my $s = shift; $globus++; $globus{$s}++; exists $gstat{$s} ? $gstat{$s} : "$s"; } else { $condor++; '-'; } } sub parsersl($) { my %result = (); local $_ = shift; while ( /\(([^)]+)\)/g ) { my ($k,$v) = split /=/, $1, 2; $k =~ s/[-_]//g; $result{lc $k} = $v; } %result; } sub mybold { $isa_terminal ? "\033[1m" : ''; } sub myreset { $isa_terminal ? "\033[0m" : ''; } sub showjob($\%) { my $prefix = shift; my $r = shift; my $flag = 0; my $x = ''; $x .= sprintf "%*d", $r->{width}, $r->{clusterid}; $x .= ' ' . dateme($r->{qdate}); $x .= ' ' . $r->{jobuniverse}; $x .= ' ' . cstat($r->{jobstatus}); $x .= '/' . gstat($r->{globusstatus}); my $diff = $^T - $r->{enteredcurrentstatus}; $x .= ' ' . interval($diff); print $x, ' ', $prefix; if ( $dagid_width > 0 && length($prefix) ) { if ( exists $r->{dagnodename} ) { print '[', mybold(), fit(-$dagid_width,$r->{dagnodename}), myreset(); if ( defined $machine_width && exists $r->{remotehost} ) { print ' ', fit($machine_width,$r->{remotehost}); } print '] '; $flag = 1; } } my $cmd = basename($r->{cmd} || ''); print $cmd; if ( $args_width > length($cmd) && ! exists $dagman_p{$cmd} ) { if ( $cmd eq 'kickstart' ) { print ' ', fit( $args_width-length($cmd), kickstart($r->{arguments}) ); } elsif ( $cmd eq 'seqexec' || $cmd eq 'giraffe.pl' ) { print ' ', fit( $args_width-length($cmd), seqexec(%{$r}) ); } else { print ' ', fit( $args_width-length($cmd), $r->{arguments} ); } if ( $cmd eq 'seqexec' ) { print " $2 $1" if ( exists $r->{gridresource} && $r->{gridresource} =~ m{\w+ ([^/]+)/jobmanager-(\S+)} ); } } # if ( length($prefix) == 0 ) { # if ( $cmd eq 'condor_dagman' ) { # print '[', $r->{iwd}, ']'; # } else { # print "\n\twd=", $r->{iwd}; # } # } if ( exists $dagman_p{$cmd} ) { if ( exists $r->{'wf_uuid'} ) { print ' [', mybold(), levels(-2,$r->{iwd}), myreset(), ' ', $r->{'wf_uuid'}, ']'; } else { print ' [', mybold(), $r->{iwd}, myreset(), ']'; } print ' # dj=', scalar count_deps( $r->{clusterid} ); } elsif ( length($prefix) == 0 ) { print "\n\twd=", $r->{iwd}; } if ( exists $r->{globusrsl} ) { my %x = parsersl($r->{globusrsl}); print ' [', ( $x{name} || basename($r->{out},'.out') ), ']' unless $flag; print " # q=", ( $x{queue} || 'default' ); my $x = $x{maxtime} || $x{maxwalltime} || $x{maxcputime}; if ( defined $x && $x > 0 ) { printf " t=%d:%02d", ( $x /60 ), ( $x % 60 ); } my $p = $x{hostcount} || $x{'host_count'} || $x{count}; printf( " p=%d", $p ) if $p > 1; } print "\n"; } sub condor_q(\%\%\$;$) { my $jobref = shift; my $dagref = shift; my $maxref = shift; my $user = shift; local(*Q); if ( defined $user ) { open( Q, "condor_q -l $user|" ) } else { open( Q, "condor_q -l|" ) } # skip intro while ( ) { last if /^--/; } $$maxref = 0; my (@x,%db); while ( ) { s/[\r\n]+$//; if ( length($_) > 2 ) { # regular class-ad line @x = split /\s=\s/, $_, 2; die if exists $db{lc($x[0])}; $db{lc($x[0])} = trim($x[1]); } else { # end of job class-ad $jobref->{$db{clusterid}} = { %db }; if ( exists $db{dagmanjobid} ) { push( @{$dagref->{$db{dagmanjobid}}}, $db{clusterid} ); } else { $dagref->{$db{clusterid}} = [] unless exists $dagref->{$db{clusterid}}; } $$maxref = length($db{clusterid}) if $$maxref < length($db{clusterid}); %db = (); } } close Q || die "pclose: $!\n"; } # --- main ------------------------------------------------------ # # determine termininal size # if ( -t STDOUT ) { my $x; eval { require "sys/ioctl.ph"; ioctl( STDOUT, &TIOCGWINSZ, $x ) || die "ioctl"; }; if ( ! $@ && defined $x && length($x) ) { ($rows,$cols) = unpack("S2",$x); } } else { $rows = $cols = 1E10; # unlimited } my (%dag,%job,$max); condor_q( %job, %dag, $max, $user ); # artificial width classad my %seen = (); my $total = 0; foreach my $j ( keys %job ) { $job{$j}{width} = $max; $seen{$j} = 1; ++$total; } # find children and parents that are dags my (%parent,%leaves); foreach my $d ( keys %dag ) { foreach my $v ( @{$dag{$d}} ) { $parent{$v}{$d} = 1 if exists $dag{$v}; } } # find leaves my @fifo = keys %dag; while ( @fifo ) { my $d = pop(@fifo); if ( exists $parent{$d} ) { push( @fifo, keys %{$parent{$d}} ); } else { $leaves{$d} = 1; } } printf( "%*s %5s %8s U C/G %10s JOB\n", $max, 'ID', 'DATE', 'TIME', 'IN_STATE' ) if $total > 0; sub count_deps($) { my $jobid = shift; if ( exists $dag{$jobid} ) { @{$dag{$jobid}}; } else { (); } } sub showdag($$) { my $indented = shift || ''; my $dagid = shift; showjob( $indented, %{$job{$dagid}} ); delete $seen{$dagid}; my @x = sort { $a <=> $b } @{$dag{$dagid}}; my $indent = ' ' x length($indented); for ( my $j=0; $j<@x; ++$j ) { my $xtra = ( $j == $#x ) ? '\-' : '|-'; if ( exists $dag{$x[$j]} ) { # it's a sub-DAG showdag( "$indent $xtra", $x[$j] ); } else { # it's a job showjob( "$indent $xtra", %{$job{$x[$j]}} ); } delete $seen{$x[$j]}; } } foreach my $i ( sort { $a <=> $b } keys %leaves ) { showdag( '', $i ); } if ( $total > 0 ) { my $f; printf( "%d Condor-G job%s", $globus, ( $globus == 1 ? '' : 's' ) ) ; $f = 0; foreach my $g ( sort { $a <=> $b } keys %globus ) { print( $f++ ? ' ' : ' (' ); printf "%s:%d", $gstat{$g}, $globus{$g}; } print ')' if $f; printf( ", %d job%s total", $total, ( $total == 1 ? '' : 's' ) ); $f = 0; foreach my $c ( sort { $a <=> $b } keys %condor ) { print( $f++ ? ' ' : ' (' ); printf "%s:%d", $cstat[$c], $condor{$c}; } print ')' if $f; print "\n"; } warn "I am missing some jobs :=(\n" if ( scalar %seen ); pegasus-wms_4.0.1+dfsg/contrib/netlogger/0000755000175000017500000000000011757531667017472 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/contrib/netlogger/nl_dbquery.10000644000175000017500000001154711757531137021720 0ustar ryngerynge.TH nl_dbquery 1 "Aug 5 2008" "version $Id: nl_dbquery 896 2008-07-29 20:47:03Z dang $" "USER COMMANDS" .SH NAME nl_dbquery \- A script for running user defined queries on a database (generated by netlogger or otherwise). .SH SYNOPSIS nl_dbquery runs queries on a database using the various parameters supplied on the command line or via a configuration file. .SH DESCRIPTION nl_dbquery is a script for running user defined queries on a database. The query file takes various command line parameters as user input and queries the database according to the user defined queries and input parameters. The queries to be executed should be written in a config file which nl_dbquery will read. The queries in the config file are generalised so that the database connection and other specific parameters are separated from the actual query statements themselves. This way these parameters can be changed without having a need to change the queries themselves. .SH OPTIONS .TP \fB\-\-version show program's version number and exit .TP \fB\-h \fB\-\-help show this help message and exit .TP \fB\-c FILE \fB\-\-config=FILE Read configuration from FILE. Default=./nl_dbquery.conf.The configuration file should follow the syntax in this file for it to be successfully parsed by the script. The configuration file is forreading the query information and other parameters from a database.Its also used for automatically generating a part of the help message itself. .TP \fB\-d DB_NAME \fB\-\-db=DB_NAME Database to connect to. Default=pegasus .TP \fB\-l \fB\-\-list This generates a numbered list of the availablequeries reading the query information from the configfile .TP \fB\-n \fB\-\-dry\-run Display but don't run the query .TP \fB\-p DB_PARAM \fB\-\-param=DB_PARAM Database connection parameters (full path to the filename in case of a sqlite database or a host name in case of MySql). The host name for MySQL should be of the form mysql://hostname while for SQLite it should be of the form sqlite:///path/to/filename. This parameter is a required parameterexcept in case when the script is executed just with the -l/--list option to list the available queries. .TP \fB\-P QUERY_PARAM \fB\-\-query\-param=QUERY_PARAM Parameter for the given query, in the form 'name=value'. The 'value' is substituted for occurrences of '' in the query string. May be repeated. .TP \fB\-q QUERY \fB\-\-query=QUERY Run QUERY, which can be a number or name. Use -l/--list to list available queries .TP \fB\-u URI \fB\-\-uri=URI Database connection URI, where the database module name is used as the URI scheme. MySQL requires a host and sqlite requires a filename. .TP \fB\-v \fB\-\-verbose Repeat up to 3 times for more verbose logging. The default level is ERROR .SS Time Range: The following start/end times for the query accept many date expressions like 'yesterday', '2 weeks 1 day ago', 'last wed', 'Jan 4', etc. A more complete list is: weeks|days|hours|minutes|seconds ago, today, now, tomorrow, yesterday, 4th [[Jan] 2003], Jan 4th 2003, mm/dd/yyyy (preferred), dd/mm/yyyy, yyyy-mm-dd, yyyymmdd, next Tuesday, last Tuesday .TP \fB\-s START \fB\-\-start=START Start date for the query ['1 week ago']. .TP \fB\-e END \fB\-\-end=END End date for the query ['today']. .SH USAGE .TP \fBConfiguration File .PP This is part of a sample configuration file which gives information about different sections and their usage: [DEFAULT] # put defaults here # These are the query sections [how_many_jobs] desc = "How many jobs ran on a given day" query = "select count(id) from event where TIMERANGE and name = 'pegasus.invocation';" [jobs_on_hosts] desc = "How many jobs ran on given hosts" query = "select count(id), value from event join attr on e_id = id where event.name = 'pegasus.invocation' and attr.name = 'host' group by value;" [DEFAULT] has all the default values/paarmeters to be used by the queries To add a new query to the configuration file add that section like [how_may_jobs] above desc: is a description of the query which will be read by the -l/--list option query: this is the actual query statement.The values of the TIMERANGE parameter can be substituted at the runtime from the command line (see -s/--start and -e/--end option for more information) .SH EXAMPLES .TP .B nl_dbquery -c filename -p mysql://localhost -d dbname -q 1 -s "3 weeks ago" -e "1 week 2 days ago" .PP Description of what this example does: This example runs the script on a configuration file named filename for a MySQL database running at localhost. The name of the database is dbname and the query to be executed is query 1. The start date for the query is "3 weeks ago" while the end date for it is "1 week 2 days ago" .SH EXIT STATUS nl_dbquery returns 0 on success and a non-zero value on failure. .SH BUGS No known bugs. .SH AUTHOR Binit S Bhatia (bsbhatia (at) lbl.gov) .SH SEE ALSO NetLogger home page .RS http://acs.lbl.gov/NetLoggerWiki .RE pegasus-wms_4.0.1+dfsg/contrib/netlogger/nl_dbquery0000755000175000017500000002767311757531137021573 0ustar ryngerynge#!/usr/bin/env python # nl_dbquery # Author: Binit Singh Bhatia # BSBhatia@lbl.gov, Binit.Bhatia@TTU.edu # $Id: nl_dbquery 674 2008-08-05 22:22:08Z ksb $ """ This is a basic database connection and query processing script reads information from a configuration file and also from the command line executes the database queries based on the information/arguments supplied by the user """ from configobj import ConfigObj from copy import copy import logging import magicdate import optparse import os import re import signal import string import sys import time import warnings logging.basicConfig() log = logging.getLogger("nl_dbquery") class ConnectionError(Exception): pass class Query: def execute_query(self, q): """Executes a query q """ log.info("query.start") cursor = self.conn.cursor() print "Running query: %s" % q print log.debug("query.execute.start") cursor.execute(q) log.debug("query.execute.end") #numrows = int(DBC.rowcount) log.debug("query.fetchall.start") for i, row in enumerate(cursor.fetchall()): row_str = '\t'.join([str(x) for x in row]) print '%03d: %s' % (i+1 , row_str) log.debug("query.fetchall.end") log.info("query.end") class MySQLQuery(Query): """MySQL query class """ def __init__(self, **kw): try: import MySQLdb self.conn = MySQLdb.connect(**kw) except ImportError: print "MySQLdb python module required for mysql scheme" sys.exit(1) except MySQLdb.Error, e: raise ConnectionError("Error %d: %s" % (e.args[0], e.args[1])) class SQLiteQuery(Query): """SQLite query class """ def __init__(self, filename): try: import sqlite3 self.conn = sqlite3.connect(filename) c = self.conn.cursor() c.execute("select count(id) from event") except ImportError: print "sqlite3 python module required for sqlite scheme" sys.exit(1) except sqlite3.OperationalError, E: raise ConnectionError("Error connecting to file '%s': %s" % (filename, E)) def split_uri(uri): m = re.match("([^:]*)://(.*)", uri) if m is None: return (None, None) else: return m.groups() # Fix magicdate returning None for values it seems to only kinda # dislike - 'years' or 'months'. def check_magicdate_wrap(option, opt, value): """ A wrapper of magicdate.check_magicdate to raise an exception when None is returned. """ ret = magicdate.check_magicdate(option, opt, value) if ret is None: raise optparse.OptionValueError( "option %s: invalid date value: %r" % (opt, value)) return ret class MagicDateOptionWrapper(optparse.Option): TYPES = optparse.Option.TYPES + ("magicdate",) TYPE_CHECKER = copy(optparse.Option.TYPE_CHECKER) TYPE_CHECKER["magicdate"] = check_magicdate_wrap MAGICDATE_EXAMPLES = ', '.join(["%s" % s for s in ( ' weeks|days|hours|minutes|seconds ago', 'today', 'now', 'tomorrow', 'yesterday', '4th [[Jan] 2003]', 'Jan 4th 2003', 'mm/dd/yyyy (preferred)', 'dd/mm/yyyy', 'yyyy-mm-dd', 'yyyymmdd', 'next Tuesday', 'last Tuesday')]) PARAM_PAT = r'<[a-zA-Z]\w*>' def substitute(qstr, param_val): """Perform string substitution in string 'qstr' using names and values from dictionary 'param_val'. Replace occurrences of or with the value of param_val['key']. Raises KeyError if a parameter is not found in the dictionary. """ param_unused = dict.fromkeys(param_val.keys()) def subfn(m): # get --> foo p_key = qstr[m.start()+1:m.end()-1].lower() if not param_val.has_key(p_key): raise KeyError("No value given for " "parameter <%s>" % p_key) del param_unused[p_key] return param_val[p_key] # substitute with subfn result = re.sub(PARAM_PAT, subfn, qstr) # return new string and list of unused params return result, param_unused.keys() def getparam(qstr): """Extract a list of the parameters in 'qstr' If there are none, return an empty list. """ # find parameters plist = re.findall(PARAM_PAT, qstr) # remove angle brackets and normalize case result = [s[1:-1].lower() for s in plist] # return resulting list return result def main(): parser = optparse.OptionParser(version="LBL r896, Pegasus r673", option_class=MagicDateOptionWrapper) parser.add_option('-c','--config', action='store', dest='config_name', default="./%s.conf" % os.path.basename(sys.argv[0]), metavar='FILE', help="Read configuration from FILE. Default=%default." "The configuration file should follow the syntax in this file for it " "to be successfully parsed by the script. The configuration file is for" "reading the query information and other parameters from a database." "Its also used for automatically generating a part of the help message itself. ") parser.add_option('-d','--db',action="store", dest="db_name",default="pegasus", help="Database to connect to. Default=%default") parser.add_option('-l', '--list', action='store_true', dest='qlist', help="This generates a numbered list of the available" "queries reading the query information from the config" "file") parser.add_option('-n', '--dry-run', action='store_true', dest='dryRun', help="Display but don't run the query") parser.add_option('-p', '--param', action="append", dest="db_param", default=[], help="Database connection parameters (full path to the filename" " in case of a sqlite database or a host name in case of MySql). The host name " "for MySQL should be of the form mysql://hostname while for SQLite it should be " "of the form sqlite:///path/to/filename. This parameter is a required parameter" "except in case when the script is executed just with the -l/--list option to list" " the available queries.") parser.add_option('-P', '--query-param', action="append", dest="query_param", default=[], help="Parameter for the given query, in the " "form 'name=value'. The 'value' is substituted for " "occurrences of '' in the query string. May " "be repeated.") parser.add_option('-q', '--query', action="store", dest="query", metavar='QUERY', help="Run QUERY, which can be a number or name. Use -l/--list to list available queries") parser.add_option('-u', '--uri', default=None, action='store', dest='db_uri', metavar='URI', help="Database connection URI, where the database module " "name is used as the URI scheme. " "MySQL requires a host and sqlite requires a filename.") parser.add_option('-v', '--verbose', action="count", default=0, dest='verbosity', help="Repeat up to 3 " "times for more verbose logging. The " "default level is ERROR") grp = optparse.OptionGroup(parser, "Time Range", "The following start/end times for the query " "accept many date expressions like 'yesterday', " "'2 weeks 1 day ago', 'last wed', 'Jan 4', etc. " "A more complete list is: " + MAGICDATE_EXAMPLES) grp.add_option('-s', '--start', action="store", dest="start",type='magicdate', default='1 week ago', help="Start date for the query ['1 week ago'].") grp.add_option('-e', '--end', action="store", dest="end",type='magicdate', default='today', help="End date for the query ['today'].") parser.add_option_group(grp) (options, args) = parser.parse_args() # set log level vb = min(options.verbosity, 3) log.setLevel(logging.ERROR - vb*10) # parse uri if options.db_uri is None: if options.qlist is not None or options.dryRun: uri_scheme = 'mysql' else: parser.error("-u/--uri is required") else: uri_scheme, uri_rest = split_uri(options.db_uri) if uri_scheme is None: parser.error("URI must be 'scheme://host-or-file', e.g., " "mysql://localhost or sqlite:///tmp/abc") uri_scheme = uri_scheme.lower() if uri_scheme not in ('sqlite', 'mysql'): parser.error("Unknown URI scheme, must be mysql:// or sqlite://") # init config file config = ConfigObj(options.config_name, interpolation='template') # number queries and build 2 lookup tables q_bynum = { } q_byname = { } def list_queries(section, key): if key == 'DEFAULT' or 'DEFAULT' not in section.keys(): return d = section[key] desc, val = d['desc'], d['query'] q_bynum[num[0]] = (key, desc, val) q_byname[key] = (num[0], desc, val) num[0] = num[0] + 1 num = [1] config.walk(list_queries, call_on_sections=True) if len(q_bynum) < 1: parser.error("config file '%s' has no queries" % options.config_name) # For -l/--list, print queries and quit if options.qlist: print "[Number] Name: Description. (parameters)" for i in xrange(1, len(q_bynum)+1): v = q_bynum[i] plist = getparam(v[2]) if plist: params = ','.join(plist) else: params = "None" print "[%2d] %s: %s (%s)" % (i, v[0], v[1], params) return # otherwise, find query if options.query is None: parser.error("-q/--query is required") try: qnum = int(options.query) if not q_bynum.has_key(qnum): parser.error("query %d out of range" % qnum) query_str = q_bynum[qnum][2] except ValueError: if not q_byname.has_key(options.query): parser.error("no query named '%s' found" % options.query) query_str = q_byname[options.query][2] # get timerange start = options.start end = options.end if uri_scheme == 'mysql': tr = ("time >= unix_timestamp('%s') and " "time <= unix_timestamp('%s')" % (start, end)) elif uri_scheme == 'sqlite': tr = "time >= datetime(%s) and time <= datetime(%s)" % (start, end) # get other parameters params = { 'timerange' : tr } for nvp in options.query_param: try: name, value = nvp.split('=') except ValueError: parser.error("Parameter '%s' not in form name=value" % nvp) params[name.lower()] = value # substitute parameters in query string try: query_str, unused = substitute(query_str, params) except KeyError, E: parser.error("%s in query string:\n %s" % (E, query_str)) if unused: log.warn("Unused parameters: %s" % ', '.join(unused)) # run query if options.dryRun: print query_str return try: if uri_scheme == 'mysql': q = MySQLQuery(db = options.db_name,host = uri_rest, read_default_file="~/.my.cnf") elif uri_scheme == 'sqlite': q = SQLiteQuery(filename = uri_rest) except ConnectionError, E: parser.error("While connecting to %s database: %s" % (uri_scheme, E)) #print "Time range: %s -- %s" % (start, end) t0 = time.time() q.execute_query(query_str) dt = time.time() - t0 print print "Query execution time: %lf seconds" % dt if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/contrib/netlogger/nl_dbquery.conf0000644000175000017500000001274211757531137022503 0ustar ryngerynge# default.conf # sample config file for nl_dbquery # This one basically makes use of the database and the queries for the pegasus database # A similar config file can be generated for any other database also [DEFAULT] # put defaults here # These are the query sections [how_many_jobs] desc = "How many jobs ran on a given day" query = "select count(id) from event where and name = 'pegasus.invocation';" [cumulative] desc = "What was the cumulative runtime of these jobs" query = "select sum(value) from attr join event on e_id = id where and event.name = 'pegasus.invocation' and attr.name = 'duration';" [jobs_on_hosts] desc = "How many jobs ran on given hosts" query = "select count(id), value from event join attr on e_id = id where event.name = 'pegasus.invocation' and attr.name = 'host' group by value;" [jobs_on_day] desc = "How many jobs of a given type ran on a given day" query = "select attr.value, count(id) from attr join event on e_id = id where and event.name = 'pegasus.invocation' and attr.name = 'type' group by attr.value;" [jobs_failed] desc = "How many jobs failed" query = "select count(id) from attr join event on e_id = id where and event.name = 'pegasus.invocation' and attr.name = 'status' and attr.value != '0';" [jobs_succeeded] desc = "How many jobs succeeded" query = "select count(id) from attr join event on e_id = id where and event.name = 'pegasus.invocation' and attr.name = 'status' and attr.value = '0';" # these are the new queries which are added for cybershake (these begin with cs_).I have added some basic description of these which I would suggest the original # author should change to something more meaningful # QUERIES PER WORKFLOW WHERE WORKFLOW ID IS A DAX LABEL [cs_total_jobs] desc = "CyberShake: Total number of jobs" query = "select count(attr.e_id) from attr join ident on attr.e_id = ident.e_id where attr.name = 'status' and ident.name='workflow' and ident.value LIKE 'CyberShake_WNGC%';" [cs_jobs_succeeded] desc = "Cybershake: Total number of succeeded jobs" query = "select count(attr.e_id) from attr join ident on attr.e_id = ident.e_id where attr.name = 'status' and attr.value = '0' and ident.name='workflow' and ident.value LIKE 'CyberShake_WNGC%';" [cs_job_breakdown] desc = "Cybershake: breakdown of jobs" query = "select attr.value, count(attr.e_id) from attr join ident on attr.e_id = ident.e_id where ident.name='workflow' and ident.value LIKE 'CyberShake_WNGC%' and attr.name='type' group by attr.value;" [cs_total_runtime] desc = "Cybershake: total runtime of the jobs" query = "select sum(attr.value) from attr join ident on attr.e_id=ident.e_id where attr.name='duration' and ident.name='workflow' and ident.value LIKE 'CyberShake_WNGC%';" # QUERIES PER WORKFLOW PER JOB TYPE # here jt in the query name stands for job type [cs_runtime_breakdown_jt] desc = "Cybershake: Runtime Breakdown by job type per workflow " query = "select TRANSFORMATION, count(TRANSFORMATION) as number ,round(sum(attr.value),2) as sum_seconds, round(sum(attr.value)/(3600),2) as sum_hours, round(avg(attr.value),2) as avg_seconds from attr join (select attr.e_id as event_id, attr.value as TRANSFORMATION from attr join ident on attr.e_id=ident.e_id where attr.name='type' and ident.name='workflow' and ident.value LIKE 'CyberShake_USC%') ident on attr.e_id=event_id WHERE attr.name='duration' group by TRANSFORMATION;" [cs_numof_failure_jt] desc = "Cybershake: No. of Failures by Job Type" query = "select TRANSFORMATION, count(TRANSFORMATION) as failures from attr join (select attr.e_id as event_id, attr.value as TRANSFORMATION from attr join ident on attr.e_id=ident.e_id where attr.name='type' and ident.name='workflow' and ident.value LIKE 'CyberShake_USC%') ident on attr.e_id=event_id WHERE attr.name = 'status' and attr.value != '0' group by TRANSFORMATION;" # QUERIES PER UNIT TIME PER WORKFLOW # here pd: per day, ph: per hour, pt: per host and pw: per workflow [cs_jobs_pd_pw] desc = "Cybershake: Jobs Per Day Per Workflow" query = "select count(id) as 'count', day(from_unixtime(time)) as day from event join attr on attr.e_id = event.id join ident on attr.e_id=ident.e_id where event.name = 'pegasus.invocation' and attr.name = 'host' and ident.name='workflow' and ident.value LIKE 'CyberShake_CCP%' group by day;" [cs_jobs_ph_pd_pw] desc = "Cybershake: Jobs Per Hour Per Day Per Workflow" query = "select count(id) as 'count', hour(from_unixtime(time)) as hour, day(from_unixtime(time)) as day from event join attr on attr.e_id = event.id join ident on attr.e_id=ident.e_id where event.name = 'pegasus.invocation' and attr.name = 'host' and ident.name='workflow' and ident.value LIKE 'CyberShake_CCP%' group by hour, day;" [cs_jobs_pt_ph_pw] desc = "Cybershake: Jobs Per Host Per Hour Per Workflow" query = "select count(id) as 'count', hour(from_unixtime(time)) as 'hour', attr.value as value from event join attr on attr.e_id = event.id join ident on attr.e_id=ident.e_id where event.name = 'pegasus.invocation' and attr.name = 'host' and ident.name='workflow' and ident.value LIKE 'CyberShake_%' group by value, hour;" # Add new queries here following the format for the queries above # If a query needs to be executed within a particular timerange then just # enter for that query. The actual start and end dates can be entered as a command line # parameters in the nl_dbquery script file pegasus-wms_4.0.1+dfsg/contrib/statistics/0000755000175000017500000000000011757531667017676 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/contrib/modules/0000755000175000017500000000000011757531667017154 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/contrib/modules/check-modules0000755000175000017500000000306611757531137021622 0ustar ryngerynge#!/usr/bin/env perl # # Checks that all non-standard perl modules are accessible and loadable. # require 5.005; use File::Basename; # this is a standard module # DO NOT use any other "use" -- we want to find modules, not stumble over 'em. sub check($) { my $module = shift; my ($filename,$realname,$result,$dir); ($filename = $module) =~ s{::}{/}g; $filename .= '.pm' unless substr($filename,-3) eq '.pm'; ITER: { foreach $dir ( @INC ) { $realname = "$dir/$filename"; warn "# trying $realname\n" if $main::DEBUG; if ( -f $realname ) { local $SIG{__WARN__} = sub { }; $result = do $realname; last ITER; } } die "Can't find $filename in \@INC\n"; } die $@ if $@; die "$module does not return true value" unless $result; $result; } my $fn = dirname($0) . '/modulelist.txt'; open( LIST, "<$fn" ) || die "open $fn: $!\n"; while ( ) { s/[\r\n]+$//; # chomp next if /^\#/; # skip comments next unless length($_)>1; # skip empty lines my $x = eval { check($_) }; if ( $x ) { # module was loaded warn( "# OK: loaded module $_\n" ); } else { warn( "# NOTFOUND: module $_ requires installation.\n" ); push( @notfound, $_ ); } } close LIST; if ( @notfound > 0 ) { print "\nModules that require installation:\n\n"; print join("\n",@notfound), "\n"; print << "EOF"; You may want to consider either downloading and installing the tarballs from http://www.cpan.org/modules/, or install them using perl -MCPAN -e shell EOF } else { print "\nAll modules found, very good.\n\n"; } pegasus-wms_4.0.1+dfsg/contrib/modules/modulelist.txt0000755000175000017500000000075111757531137022074 0ustar ryngeryngeArchive::Tar AutoLoader Carp # Common Compress::Zlib Config DBD::Pg DBD::mysql DBD::SQLite2 DBI DB_File Data::Dumper Digest::MD5 Errno Exporter ExtUtils::MakeMaker Fcntl File::Basename File::Path File::Spec File::Temp GDBM_File Getopt::Long Getopt::Std IO::File IO::Socket POSIX Socket Sys::Hostname Time::HiRes Time::Local URI XML::Parser XML::Parser::EasyTree XML::Parser::Expat Date::Format Template Template::Plugin::GD::Image GD::Graph GD::Text GD Email::Send Email::MIME::Modifier pegasus-wms_4.0.1+dfsg/contrib/bp2db/0000755000175000017500000000000011757531667016475 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/contrib/bp2db/schema.sql0000644000175000017500000000563711757531137020461 0ustar ryngerynge-- This is the Stampede schema CREATE TABLE workflow ( wf_id INTEGER NOT NULL, wf_uuid VARCHAR(255) NOT NULL, dax_label VARCHAR(255), timestamp NUMERIC(16, 6), submit_hostname VARCHAR(255), submit_dir TEXT, planner_arguments TEXT, username VARCHAR(255), grid_dn VARCHAR(255), planner_version VARCHAR(255), parent_workflow_id INTEGER, PRIMARY KEY (wf_id), FOREIGN KEY(parent_workflow_id) REFERENCES workflow (wf_id) ); CREATE TABLE workflowstate ( wf_id INTEGER NOT NULL, state VARCHAR(255) NOT NULL, timestamp NUMERIC(16, 6) NOT NULL, FOREIGN KEY(wf_id) REFERENCES workflow (wf_id) ); CREATE TABLE host ( host_id INTEGER NOT NULL, site_name VARCHAR(255) NOT NULL, hostname VARCHAR(255) NOT NULL, ip_address VARCHAR(255) NOT NULL, uname VARCHAR(255), total_ram NUMERIC(16, 6), PRIMARY KEY (host_id) ); CREATE TABLE job ( job_id INTEGER NOT NULL, wf_id INTEGER NOT NULL, job_submit_seq INTEGER NOT NULL, name VARCHAR(255) NOT NULL, host_id INTEGER, condor_id VARCHAR(255), jobtype VARCHAR(255) NOT NULL, clustered INTEGER, site_name VARCHAR(255), remote_user VARCHAR(255), remote_working_dir TEXT, cluster_start_time NUMERIC(16, 6), cluster_duration NUMERIC(16, 6), PRIMARY KEY (job_id), FOREIGN KEY(wf_id) REFERENCES workflow (wf_id), CHECK (clustered IN (0, 1)), FOREIGN KEY(host_id) REFERENCES host (host_id) ); CREATE TABLE jobstate ( job_id INTEGER NOT NULL, state VARCHAR(255) NOT NULL, timestamp NUMERIC(16, 6) NOT NULL, jobstate_submit_seq INTEGER NOT NULL, PRIMARY KEY (job_id, state, timestamp, jobstate_submit_seq), FOREIGN KEY(job_id) REFERENCES job (job_id) ); CREATE TABLE task ( task_id INTEGER NOT NULL, job_id INTEGER NOT NULL, task_submit_seq INTEGER NOT NULL, start_time NUMERIC(16, 6) NOT NULL, duration NUMERIC(16, 6) NOT NULL, exitcode INTEGER NOT NULL, transformation TEXT NOT NULL, executable TEXT, arguments TEXT, PRIMARY KEY (task_id), FOREIGN KEY(job_id) REFERENCES job (job_id) ); CREATE TABLE edge ( parent_id INTEGER NOT NULL, child_id INTEGER NOT NULL, PRIMARY KEY (parent_id, child_id), FOREIGN KEY(parent_id) REFERENCES job (job_id), FOREIGN KEY(child_id) REFERENCES job (job_id) ); CREATE TABLE edge_static ( wf_uuid VARCHAR(255) NOT NULL, parent VARCHAR(255) NOT NULL, child VARCHAR(255) NOT NULL, -- The order of this is wf_uuid, child, parent to make edge lookups faster PRIMARY KEY (wf_uuid, child, parent) ); -- These indexes enforce uniqueness constraints CREATE UNIQUE INDEX UNIQUE_WORKFLOW ON workflow (wf_uuid); CREATE UNIQUE INDEX UNIQUE_HOST ON host (site_name, hostname, ip_address); CREATE UNIQUE INDEX UNIQUE_JOB ON job (wf_id, job_submit_seq); CREATE UNIQUE INDEX UNIQUE_JOBSTATE ON jobstate (job_id, jobstate_submit_seq); CREATE UNIQUE INDEX UNIQUE_TASK ON task (job_id, task_submit_seq); -- To make lookups of parent-child relationships faster CREATE INDEX IDX_JOB_LOOKUP ON job (wf_id, name);pegasus-wms_4.0.1+dfsg/contrib/bp2db/bp2db0000755000175000017500000004014411757531137017407 0ustar ryngerynge#!/usr/bin/env python # # This utility is used to load Netlogger .bp files containing # Pegasus events into SQLite databases containing the Stampede # schema. The databases can be mined for information about the # workflow events loaded. # import sys import os import re import time import calendar from datetime import datetime BATCHSIZE = 1000 # The maximum number of SQL statements in a batch # The format of a netlogger event is a series of key=value or key="value" re_parse_netlogger_event = re.compile(r'([^ =]+)[ ]*=[ ]*(("([^"]*)")|([^ ]+))') def parse_netlogger_event(line): "Convert a string containing a netlogger event into a dict" rec = {} for m in re_parse_netlogger_event.finditer(line): key = m.group(1) value = m.group(3) if value is None: value = m.group(5) rec[key] = value return rec def parse_ts(ts): "Parse a netlogger timestamp" ts, subs = ts.split('.') subs = float(subs[:-1]) return calendar.timegm(time.strptime(ts, r'%Y-%m-%dT%H:%M:%S')) + subs class Batch: "Stores and executes a batch of sql statements" def __init__(self, sql, batchsize=BATCHSIZE): self.items = [] self.batchsize = batchsize # Convert parameters of type :key into a list of keys re_param = re.compile(":([a-zA-Z0-9_]+)") self.keys = [] for m in re_param.finditer(sql): key = m.group(1) self.keys.append(key) # Convert parameters of type :key into question marks self.sql = re_param.sub("?", sql) def add_batch(self, rec): "Add a record to the batch" params = [] for k in self.keys: params.append(rec[k]) self.items.append(params) def batch_ready(self): "Return True if the batch is ready to execute" return len(self.items) >= self.batchsize def execute_batch(self, cursor): "Execute the batch if there are any parameters" if len(self.items) > 0: try: cursor.executemany(self.sql, self.items) self.items = [] except Exception, e: e.args = list(e.args) + [self.sql] raise class Loader: "Database loader for stampede events" def __init__(self): # Caches for IDs self.workflows = {} self.jobs = {} self.hosts = {} # Stores the number of times we skipped each event self.skips = {} # Next ID number to use self.next_wf_id = 1 self.next_job_id = 1 self.next_task_id = 1 self.next_host_id = 1 # Batched SQL statements self.workflow_batch = Batch("INSERT INTO workflow (wf_id, wf_uuid, dax_label, timestamp, submit_hostname, submit_dir, planner_arguments, username, grid_dn, planner_version, parent_workflow_id) VALUES (:wf_id, :wf_uuid, :dax_label, :timestamp, :submit_hostname, :submit_dir, :planner_arguments, :user, :grid_dn, :planner_version, :parent_workflow_id)") self.workflowstate_batch = Batch("INSERT INTO workflowstate (wf_id, state, timestamp) VALUES (:wf_id, :state, :timestamp)") self.host_batch = Batch("INSERT INTO host (host_id, site_name, hostname, ip_address, uname, total_ram) VALUES (:host_id, :site_name, :hostname, :ip_address, :uname, :total_ram)") self.edge_static_batch = Batch("INSERT INTO edge_static (wf_uuid, parent, child) VALUES (:wf_uuid, :parent, :child)") self.pre_batch = Batch("INSERT INTO job (job_id, wf_id, job_submit_seq, name, jobtype) VALUES (:job_id, :wf_id, :job_submit_seq, :name, :jobtype)") self.job_batch = Batch("REPLACE INTO job (job_id, wf_id, job_submit_seq, name, condor_id, jobtype) VALUES (:job_id, :wf_id, :job_submit_seq, :name, :condor_id, :jobtype)") self.job_host_batch = Batch("UPDATE job SET host_id=:host_id WHERE job_id=:job_id") self.job_update_batch = Batch("UPDATE job SET cluster_duration=:cluster_duration, remote_user=:remote_user, site_name=:site_name, remote_working_dir=:remote_working_dir, clustered=:clustered, cluster_start_time=:cluster_start_time WHERE job_id=:job_id") self.jobstate_batch = Batch("INSERT INTO jobstate (job_id, state, timestamp, jobstate_submit_seq) VALUES (:job_id, :state, :timestamp, :jobstate_submit_seq)") self.task_batch = Batch("INSERT INTO task (task_id, job_id, task_submit_seq, start_time, duration, exitcode, transformation, executable, arguments) VALUES (:task_id, :job_id, :task_submit_seq, :start_time, :duration, :exitcode, :transformation, :executable, :arguments)") # This query assumes that the parent job with the largest job_id is the one we want self.edge_batch = Batch("INSERT INTO edge SELECT MAX(p.job_id) parent_id, c.job_id child_id FROM edge_static e, job c, job p WHERE e.wf_uuid=:wf_uuid AND e.child=:name AND c.job_id=:job_id AND p.wf_id=:wf_id AND p.name=e.parent GROUP BY p.name") # Order in which batches should be executed self.batches = [ self.workflow_batch, self.workflowstate_batch, self.host_batch, self.edge_static_batch, self.pre_batch, self.job_batch, self.job_host_batch, self.job_update_batch, self.jobstate_batch, self.task_batch, self.edge_batch ] def load_bpfile(self, bpfile): "Load a netlogger bpfile" try: tty = sys.stdout.isatty() cursor = self.conn.cursor() i = 0 log = open(bpfile) for l in log: rec = parse_netlogger_event(l.strip()) if self.load_event(rec): i += 1 # Only check batches once every 100 records if (i%100) == 0 and self.check_batches(): self.execute_batches(cursor) if tty and (i%73) == 0: sys.stdout.write("Processed %d events\r" % i) sys.stdout.flush() else: self.skip_event(rec) self.execute_batches(cursor) print "Processed %d events" % i cursor.close() self.conn.commit() except: self.conn.rollback() raise def skip_event(self, rec): "Skip a parsed event" event = rec['event'] if event in self.skips: self.skips[event] = self.skips[event] + 1 else: self.skips[event] = 1 def load_event(self, rec): "Load a parsed event" event = rec['event'] if not event.startswith('stampede.'): raise Exception("Invalid event: %s" % e) e = event.replace('stampede.','').replace('.','_') handler = getattr(self, e, None) if handler: try: handler(rec) except Exception, e: e.args = list(e.args) + [rec] raise return True else: return False def check_batches(self): "Check to see if any batch SQL statements are ready to run" execute = False for b in self.batches: if b.batch_ready(): return True return False def execute_batches(self, cursor): "Execute all batch SQL statements" for b in self.batches: b.execute_batch(cursor) def workflow_plan(self, rec): "Handle a stampede.workflow.plan event" wf_id = self.new_workflow(rec) rec.setdefault('submit_hostname', None) rec.setdefault('submit_dir', None) rec.setdefault('planner_arguments', None) rec.setdefault('user', None) rec.setdefault('grid_dn', None) rec.setdefault('planner_version', None) rec.setdefault('parent_workflow_id', None) parent = rec['parent.wf.id'] if parent is not None and parent != 'None': if parent in self.workflows: rec['parent_workflow_id'] = self.workflows[parent] else: raise Exception("Unknown parent workflow: %s" % parent) rec['wf_id'] = wf_id rec['wf_uuid'] = rec['wf.id'] rec['timestamp'] = parse_ts(rec['ts']) self.workflow_batch.add_batch(rec) # Add to workflowstate table state = {} state['wf_id'] = wf_id state['timestamp'] = rec['timestamp'] state['state'] = 'parse' self.workflowstate_batch.add_batch(state) def workflow_start(self, rec): "Handle a stampede.workflow.start event" wf_id = self.lookup_workflow(rec) if wf_id is None: wf_id = self.old_workflow(rec) rec['wf_id'] = wf_id rec['timestamp'] = parse_ts(rec['ts']) rec['state'] = 'start' self.workflowstate_batch.add_batch(rec) def workflow_end(self, rec): "Handle a stampede.workflow.end event" wf_id = self.lookup_workflow(rec) if wf_id is None: raise Exception("Unknown workflow: %s" % rec['wf.id']) rec['wf_id'] = wf_id rec['timestamp'] = parse_ts(rec['ts']) rec['state'] = 'end' self.workflowstate_batch.add_batch(rec) # Clean up workflow map wf_uuid = rec['wf.id'] del self.workflows[wf_uuid] del self.jobs[wf_uuid] def host(self, rec): "Handle a stampede.host event" key = (rec['site_name'], rec['hostname'], rec['ip_address']) if key not in self.hosts: # If it is not in the host cache, add it host_id = self.next_host_id self.next_host_id += 1 self.hosts[key] = host_id rec['host_id'] = host_id rec.setdefault('uname',None) rec.setdefault('total_ram',None) # Update host table self.host_batch.add_batch(rec) # Update job table host_id = self.hosts[key] job_id = self.lookup_job(rec) self.job_host_batch.add_batch( {'host_id': host_id, 'job_id': job_id}) def edge(self, rec): "Handle a stampede.edge event" # Update edge_static table self.edge_static_batch.add_batch({ 'wf_uuid': rec['wf.id'], 'parent': rec['parent'], 'child': rec['child'] }) def job_prescript_start(self, rec): "Handle a stampede.job.prescript.start event" # Get wf_id wf_uuid = rec['wf.id'] wf_id = self.workflows[wf_uuid] rec['wf_id'] = wf_id # Get job_id job_id = self.new_job(rec) rec['job_id'] = job_id rec['job_submit_seq'] = rec['job.id'] # Update the job table self.pre_batch.add_batch(rec) def job_mainjob_start(self, rec): "Handle a stampede.job.mainjob.start event" # Get wf_id wf_uuid = rec['wf.id'] wf_id = self.workflows[wf_uuid] rec['wf_id'] = wf_id # Get job_id job_seq = rec['job.id'] if job_seq in self.jobs[wf_uuid]: job_id = self.jobs[wf_uuid][job_seq] else: job_id = self.new_job(rec) rec['job_id'] = job_id rec['job_submit_seq'] = rec['job.id'] # Sometimes we don't get a condor ID rec['condor_id'] = None if 'condor.id' in rec: rec['condor_id'] = rec['condor.id'] # Update job table self.job_batch.add_batch(rec) # Update edge table self.edge_batch.add_batch({ 'name': rec['name'], 'job_id': job_id, 'wf_uuid': wf_uuid, 'wf_id': wf_id }) def job_mainjob_end(self, rec): "Handle a stampede.job.mainjob.end event" job_id = self.lookup_job(rec) rec['job_id'] = job_id rec.setdefault('remote_user',None) rec.setdefault('remote_working_dir',None) rec.setdefault('clustered',0) rec.setdefault('cluster_start_time',None) rec.setdefault('cluster_duration',None) # Update job table self.job_update_batch.add_batch(rec) def job_state(self, rec): "Handle a stampede.job.state event" job_id = self.lookup_job(rec) if job_id is None: # This usually occurs when we have a job.state before the mainjob.start # The only thing we can do is skip the event self.skip_event(rec) return rec['job_id'] = job_id rec['timestamp'] = parse_ts(rec['ts']) rec['jobstate_submit_seq'] = rec['js.id'] # Update jobstate table self.jobstate_batch.add_batch(rec) def task_mainjob(self, rec): "Handle a stampede.task.mainjob event" job_id = self.lookup_job(rec) # If the job hasn't been loaded yet, we just have to skip it if job_id is None: self.skip_event(rec) return task_id = self.new_task() rec['job_id'] = job_id rec['task_id'] = task_id rec['task_submit_seq'] = rec['task.id'] rec.setdefault('executable',None) rec.setdefault('arguments',None) self.task_batch.add_batch(rec) task_prescript = task_mainjob task_postscript = task_mainjob def new_workflow(self, rec): "Create a new workflow and add it to the cache" wf_uuid = rec['wf.id'] if wf_uuid in self.workflows: raise Exception("Duplicate workflow: %s" % wf_uuid) wf_id = self.next_wf_id self.next_wf_id += 1 self.workflows[wf_uuid] = wf_id self.jobs[wf_uuid] = {} return wf_id def old_workflow(self, rec): "Re-add a workflow to the workflow cache" wf_uuid = rec['wf.id'] wf_id = self.get_wf_id(wf_uuid) self.workflows[wf_uuid] = wf_id self.jobs[wf_uuid] = {} return wf_id def get_wf_id(self, wf_uuid): "Get a wf_id from the database using wf_uuid" cur = self.conn.cursor() cur.execute("SELECT wf_id FROM workflow WHERE wf_uuid=?",(wf_uuid,)) wf_id = str(cur.fetchone()[0]) cur.close() return wf_id def new_job(self, rec): "Create a new job and add it to the cache" wf_uuid = rec['wf.id'] jobs = self.jobs[wf_uuid] seq_no = rec['job.id'] if seq_no in jobs: raise Exception("Duplicate job: %d" % seq_no) job_id = self.next_job_id self.next_job_id += 1 jobs[seq_no] = job_id return job_id def new_task(self): "Create a new task" task_id = self.next_task_id self.next_task_id += 1 return task_id def lookup_workflow(self, rec): "Find a workflow in the cache and return its wf_id" wf_uuid = rec['wf.id'] if wf_uuid in self.workflows: return self.workflows[wf_uuid] return None def lookup_job(self, rec): "Find a job in the cache and return its job_id" wf_uuid = rec['wf.id'] seq_no = rec['job.id'] jobs = self.jobs[wf_uuid] if seq_no in jobs: return jobs[seq_no] return None class SQLiteLoader(Loader): def __init__(self, dbfile): Loader.__init__(self) self.dbfile = dbfile import sqlite3 self.conn = sqlite3.connect(dbfile,isolation_level="EXCLUSIVE") self.conn.execute("PRAGMA locking_mode = EXCLUSIVE") # Count the number of tables in the schema cur = self.conn.cursor() cur.execute("SELECT count(*) tables FROM sqlite_master WHERE type='table'") tables = cur.fetchone()[0] cur.close() if tables == 0: # If there are no tables, then we need to create the database self.conn.execute("PRAGMA page_size = 4096") self.create_schema() else: # If the tables already exist, we need to figure out what the next # ID numbers need to be, and repopulate the host cache self.set_id_numbers() self.populate_host_cache() def create_schema(self): "Create tables and indexes in database" print "Creating Stampede schema in %s" % self.dbfile basedir = os.path.abspath(os.path.dirname(sys.argv[0])) schemafile = os.path.join(basedir, "schema.sql") script = open(schemafile).read() self.conn.executescript(script) self.conn.commit() def set_id_numbers(self): "Set the next ID numbers for the wf, job, task, and host entities" self.next_wf_id = self.next_id("wf_id", "workflow") self.next_job_id = self.next_id("job_id", "job") self.next_task_id = self.next_id("task_id", "task") self.next_host_id = self.next_id("host_id", "host") def next_id(self, idfield, table): "Retrieve max(table.idfield)+1 from the database" cur = self.conn.cursor() cur.execute("SELECT coalesce(max(%s),0)+1 FROM %s" % (idfield, table)) next_id = cur.fetchone()[0] cur.close() return next_id def populate_host_cache(self): "Retrieve all the hosts from the database and place them in the host cache" cur = self.conn.cursor() cur.execute("SELECT host_id, site_name, hostname, ip_address FROM host") for row in cur: key = (str(row[1]),str(row[2]),str(row[3])) self.hosts[key] = str(row[0]) cur.close() def main(): if len(sys.argv) < 2: print "Usage: %s DBFILE [BPFILE]..." % os.path.basename(sys.argv[0]) sys.exit(1) print "Loader starting with PID %d" % os.getpid() # Check dbfile dbfile = sys.argv[1] if os.path.exists(dbfile): db = open(dbfile) magic = db.read(13) db.close() if magic != "SQLite format": print "Invalid SQLite database: %s" % dbfile sys.exit(1) # Create database directory if it doesn't exist dbdir = os.path.abspath(os.path.dirname(dbfile)) if not os.path.isdir(dbdir): os.makedirs(dbdir) # Check bpfiles bpfiles = sys.argv[2:] for bpfile in bpfiles: if not os.path.isfile(bpfile): print "BPFILE does not exist: %s" % bpfile sys.exit(1) # Read from stdin if no input file specified if len(bpfiles) == 0: bpfiles.append("/dev/stdin") # Load data loader = SQLiteLoader(dbfile) start = datetime.now() for bpfile in bpfiles: print "Loading file %s" % bpfile fs = datetime.now() loader.load_bpfile(bpfile) fe = datetime.now() print "Loaded %s in %s" % (bpfile,fe-fs) end = datetime.now() print "Loaded all files in %s" % (end-start) for event in loader.skips: print "Skipped event %s %d times" % (event, loader.skips[event]) if __name__ == '__main__': try: main() except KeyboardInterrupt, k: passpegasus-wms_4.0.1+dfsg/etc/0000755000175000017500000000000011757531667014617 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/etc/sample.sites.txt0000644000175000017500000000154011757531137017757 0ustar ryngeryngesite local { lrc "rlsn://smarty.isi.edu" gridftp "gsiftp://smarty.isi.edu/smarty/storage" "4.0.4" gridlaunch "/smarty/software/pegasus/bin/kickstart" workdir "/smarty/scratch/" universe vanilla "smarty.isi.edu/jobmanager-fork" "4.0.4" universe transfer "smarty.isi.edu/jobmanager-fork" "4.0.4" profile pegasus "group" "local" profile env "PEGASUS_HOME" "/usr/local/pegasus" profile env "JAVA_HOME" "/usr/local/java" profile env "GLOBUS_LOCATION" "/usr/local/globus" profile env "LD_LIBRARY_PATH" "/usr/local/globus/lib" } site isi { lrc "rlsn://smarty.isi.edu" gridftp "gsiftp://skynet-data.isi.edu/exports/storage01" "4.0.4" gridlaunch "/nfs/software/pegasus/default/bin/kickstart" workdir "/nfs/scratch01" universe vanilla "smarty.isi.edu/jobmanager-pbs" "4.0.4" universe transfer "smarty.isi.edu/jobmanager-pbs" "4.0.4" } pegasus-wms_4.0.1+dfsg/etc/sample.tc.data0000755000175000017500000000236711757531137017343 0ustar ryngerynge#SITE LFN PFN TYPE SYSINFO PROFILES #local pegasus::rc-client /home/pegasus/2.0/bin/rc-client INSTALLED null null #local pegasus::transfer /home/pegasus/2.0/bin/linux/transfer INSTALLED null env::GLOBUS_LOCATION="/vdt/globus";env::LD_LIBRARY_PATH="/vdt/globus/lib" #local pegasus::dirmanager /home/pegasus/2.0/bin/dirmanager INSTALLED null null isi pegasus::transfer /home/pegasus/2.0/bin/transfer INSTALLED INTEL32::LINUX env::GLOBUS_LOCATION="/vdt/globus";env::LD_LIBRARY_PATH="/vdt/globus/lib" isi pegasus::dirmanager /home/pegasus/2.0/bin/dirmanager INSTALLED INTEL32::LINUX null isi pegasus::cleanup /home/pegasus/2.0/bin/dirmanager INSTALLED INTEL32::LINUX null isi pegasus::seqexec /home/pegasus/2.0/bin/dirmanager INSTALLED INTEL32::LINUX null isi black::analyze:1.0 /home/pegasus/2.0/bin/keg INSTALLED INTEL32::LINUX null isi black::preprocess:1.0 /home/pegasus/2.0/bin/keg INSTALLED INTEL32::LINUX null isi black::findrange:1.0 /home/pegasus/2.0/bin/keg INSTALLED INTEL32::LINUX null isi diamond::findrange:1.0 /home/pegasus/2.0/bin/keg INSTALLED INTEL32::LINUX null isi diamond::analyze:1.0 /home/pegasus/2.0/bin/keg INSTALLED INTEL32::LINUX null isi diamond::generate:1.0 /home/pegasus/2.0/bin/keg INSTALLED INTEL32::LINUX null pegasus-wms_4.0.1+dfsg/etc/sample.dot.pegasusrc0000644000175000017500000000075611757531137020603 0ustar ryngeryngepegasus.catalog.provenance=InvocationSchema pegasus.catalog.db.driver=Postgres pegasus.catalog.db.url=jdbc:postgresql:${user.name} pegasus.catalog.db.user=${user.name} pegasus.catalog.db.password=${user.name} pegasus.catalog.replica.url=rlsn://smarty.isi.edu pegasus.catalog.replica=RLS pegasus.catalog.transformation=File pegasus.catalog.transformation.file=${pegasus.home}/var/tc.data pegasus.catalog.site=XML pegasus.catalog.site.file={pegasus.home}/etc/sites.xml pegasus.exitcode.scope=all pegasus-wms_4.0.1+dfsg/etc/sample.rc.data0000644000175000017500000000012411757531137017323 0ustar ryngeryngef.a gsiftp://viz-login.is.edu/scratch/tutorial/inputdata/diamond/f.a pool="cluster" pegasus-wms_4.0.1+dfsg/etc/sample.sites.xml0000644000175000017500000000343011757531137017740 0ustar ryngerynge local /usr/local/pegasus /usr/local/java /usr/local/globus /usr/local/globus/lib /smarty/scratch/ /nfs/scratch01 pegasus-wms_4.0.1+dfsg/etc/sample.sites.xml30000644000175000017500000000462611757531137020033 0ustar ryngerynge /usr/local/globus /usr/local/java /usr/local/globus/lib /usr/local/pegasus local pegasus-wms_4.0.1+dfsg/etc/basic.properties0000644000175000017500000004663411757531137020023 0ustar ryngerynge# TITLE "BASIC PROPERTIES" # # This is the reference guide to the basic properties regarding the # Pegasus Workflow Planner, and their respective default values. Please refer # to the advanced properties guide to know about all the properties that # a user can use to configure the Pegasus Workflow Planner. # Please note that the values rely on proper capitalization, unless explicitly # noted otherwise. # # Some properties rely with their default on the value of other # properties. As a notation, the curly braces refer to the value of the # named property. For instance, ${pegasus.home} means that the value depends # on the value of the pegasus.home property plus any noted additions. You # can use this notation to refer to other properties, though the extent # of the subsitutions are limited. Usually, you want to refer to a set # of the standard system properties. Nesting is not allowed. # Substitutions will only be done once. # # There is a priority to the order of reading and evaluating properties. # Usually one does not need to worry about the priorities. However, it # is good to know the details of when which property applies, and how # one property is able to overwrite another. The following is a mutually exclusive # list ( highest priority first ) of property file locations. # # # --conf option to the tools. Almost all of the clients that use properties # have a --conf option to specify the property file to pick up. # # submit-dir/pegasus.xxxxxxx.properties file. All tools that work on the # submit directory ( i.e after pegasus has planned a workflow) pick up the # pegasus.xxxxx.properties file from the submit directory. The location for the # pegasus.xxxxxxx.propertiesis picked up from the braindump file. # # The properties defined in the user property file # ${user.home}/.pegasusrc have lowest priority. # # # # Commandline properties have the highest priority. These override any property loaded # from a property file. Each commandline property is introduced by a -D argument. # Note that these arguments are parsed by the shell wrapper, and thus the -D arguments # must be the first arguments to any command. Commandline properties are useful for debugging # purposes. # # From Pegasus 3.1 release onwards, support has been dropped for the following # properties that were used to signify the location of the properties file # # # pegasus.properties # pegasus.user.properties # # # The following example provides a sensible set of properties to be set # by the user property file. These properties use mostly non-default # settings. It is an example only, and will not work for you: # # # pegasus.catalog.replica File # pegasus.catalog.replica.file ${pegasus.home}/etc/sample.rc.data # pegasus.catalog.transformation Text # pegasus.catalog.transformation.file ${pegasus.home}/etc/sample.tc.text # pegasus.catalog.site XML3 # pegasus.catalog.site.file ${pegasus.home}/etc/sample.sites.xml3 # # # If you are in doubt which properties are actually visible, pegasus during the # planning of the workflow dumps all properties after reading and prioritizing # in the submit directory in a file with the suffix properties. # Property : pegasus.home # Systems : all # Type : directory location string # Default : "$PEGASUS_HOME" # # The property pegasus.home cannot be set in the property file. This property is # automatically set up by the pegasus clients internally by determining the installation # directory of pegasus. Knowledge about this property is important for developers who # want to invoke PEGASUS JAVA classes without the shell wrappers. # # pegasus.home "$PEGASUS_HOME" # # SECTION "CATALOG PROPERTIES" # # # SUBSECTION "REPLICA CATALOG" # # Property : pegasus.catalog.replica # System : Pegasus # Since : 2.0 # Type : enumeration # Value[0] : RLS # Value[1] : LRC # Value[2] : JDBCRC # Value[3] : File # Value[4] : MRC # Default : RLS # # Pegasus queries a Replica Catalog to discover the physical filenames # (PFN) for input files specified in the DAX. Pegasus can interface # with various types of Replica Catalogs. This property specifies # which type of Replica Catalog to use during the planning process. # # # # RLS # # RLS (Replica Location Service) is a distributed replica # catalog, which ships with GT4. There is an index service called # Replica Location Index (RLI) to which 1 or more Local Replica # Catalog (LRC) report. Each LRC can contain all or a subset of # mappings. In this mode, Pegasus queries the central RLI to # discover in which LRC's the mappings for a LFN reside. It then # queries the individual LRC's for the PFN's. # To use RLS, the user additionally needs to set the property # pegasus.catalog.replica.url to specify the URL for the RLI to # query. # Details about RLS can be found at # http://www.globus.org/toolkit/data/rls/ # # # # LRC # # If the user does not want to query the RLI, but directly a # single Local Replica Catalog. # To use LRC, the user additionally needs to set the property # pegasus.catalog.replica.url to specify the URL for the LRC to # query. # Details about RLS can be found at # http://www.globus.org/toolkit/data/rls/ # # # # JDBCRC # # In this mode, Pegasus queries a SQL based replica catalog that # is accessed via JDBC. The sql schema's for this catalog can be # found at $PEGASUS_HOME/sql directory. # To use JDBCRC, the user additionally needs to set the following # properties # # pegasus.catalog.replica.db.url # pegasus.catalog.replica.db.user # pegasus.catalog.replica.db.password # # # # # File # In this mode, Pegasus queries a file based replica catalog. # It is neither transactionally safe, nor advised to use for # production purposes in any way. Multiple concurrent access to # the File will end up clobbering the contents of the file. The # site attribute should be specified whenever possible. The attribute # key for the site attribute is "pool". # # The LFN may or may not be quoted. If it contains linear # whitespace, quotes, backslash or an equality sign, it must be # quoted and escaped. Ditto for the PFN. The attribute key-value # pairs are separated by an equality sign without any # whitespaces. The value may be in quoted. The LFN sentiments about quoting apply. # # # LFN PFN # LFN PFN a=b [..] # LFN PFN a="b" [..] # "LFN w/LWS" "PFN w/LWS" [..] # # # To use File, the user additionally needs to specify # pegasus.catalog.replica.file property to specify the path to the # file based RC. # # # # MRC # In this mode, Pegasus queries multiple replica catalogs to # discover the file locations on the grid. To use it set # # # pegasus.catalog.replica MRC # # # Each associated replica catalog can be configured via properties # as follows. # # The user associates a variable name referred to as [value] for # each of the catalogs, where [value] is any legal identifier # (concretely [A-Za-z][_A-Za-z0-9]*) For each associated replica # catalogs the user specifies the following properties. # # # pegasus.catalog.replica.mrc.[value] specifies the type of replica catalog. # pegasus.catalog.replica.mrc.[value].key specifies a property name key for a # particular catalog # # # For example, if a user wants to query two lrc's at the same time # he/she can specify as follows # # # pegasus.catalog.replica.mrc.lrc1 LRC # pegasus.catalog.replica.mrc.lrc2.url rls://sukhna # # pegasus.catalog.replica.mrc.lrc2 LRC # pegasus.catalog.replica.mrc.lrc2.url rls://smarty # # # # In the above example, lrc1, lrc2 are any valid identifier names # and url is the property key that needed to be specified. # # # # # # # pegasus.catalog.replica RLS # Property : pegasus.catalog.replica.url # System : Pegasus # Since : 2.0 # Type : URI string # Default : (no default) # # When using the modern RLS replica catalog, the URI to the Replica # catalog must be provided to Pegasus to enable it to look up # filenames. There is no default. # # pegasus.catalog.replica.url (no default) # # SUBSECTION "SITE CATALOG" # # Property : pegasus.catalog.site # System : Site Catalog # Since : 2.0 # Type : enumeration # Value[0] : XML3 # Value[1] : XML # Default : XML3 # # The site catalog file is available in three major flavors: The Text and # and XML formats for the site catalog are deprecated. # Users can use pegasus-sc-converter client to convert their site catalog # to the newer XML3 format. # # THIS FORMAT IS DEPRECATED. WILL BE REMOVED IN COMING VERSIONS. # USE pegasus-sc-converter to convert XML format to XML3 Format. # The "XML" format is an XML-based file. The XML format reads site # catalog conforming to the old site catalog schema available at # http://pegasus.isi.edu/wms/docs/schemas/sc-2.0/sc-2.0.xsd # # The "XML3" format is an XML-based file. The XML format reads site # catalog conforming to the old site catalog schema available at # http://pegasus.isi.edu/wms/docs/schemas/sc-3.0/sc-3.0.xsd # # # # pegasus.catalog.site XML3 # Property : pegasus.catalog.site.file # System : Site Catalog # Since : 2.0 # Type : file location string # Default : ${pegasus.home.sysconfdir}/sites.xml3 |${pegasus.home.sysconfdir}/sites.xml # See also : pegasus.catalog.site # # Running things on the grid requires an extensive description of the # capabilities of each compute cluster, commonly termed "site". This # property describes the location of the file that contains such a site # description. As the format is currently in flow, please refer to the # userguide and Pegasus for details which format is expected. # The default value is dependant on the value specified for # the property pegasus.catalog.site . If type of SiteCatalog used is XML3, then sites.xml3 # is picked up from sysconfdir else sites.xml # # pegasus.catalog.site.file ${pegasus.home.sysconfdir}/sites.xml3 | ${pegasus.home.sysconfdir}/sites.xml # # SUBSECTION "TRANSFORMATION CATALOG" # # Property : pegasus.catalog.transformation # System : Transformation Catalog # Since : 2.0 # Type : enumeration # Value[0] : Text # Value[1] : File # Default : Text # See also : pegasus.catalog.transformation.file # # # # Text # In this mode, a multiline file based format is understood. The file # is read and cached in memory. Any modifications, as adding or # deleting, causes an update of the memory and hence to the file # underneath. All queries are done against the memory # representation. # # The file sample.tc.text in the etc directory contains an example # # Here is a sample textual format for transfomation catalog containing # one transformation on two sites # # # tr example::keg:1.0 { # # #specify profiles that apply for all the sites for the transformation # #in each site entry the profile can be overriden # profile env "APP_HOME" "/tmp/karan" # profile env "JAVA_HOME" "/bin/app" # # site isi { # profile env "me" "with" # profile condor "more" "test" # profile env "JAVA_HOME" "/bin/java.1.6" # pfn "/path/to/keg" # arch "x86" # os "linux" # osrelease "fc" # osversion "4" # type "INSTALLED" # } # # site wind { # profile env "me" "with" # profile condor "more" "test" # pfn "/path/to/keg" # arch "x86" # os "linux" # osrelease "fc" # osversion "4" # type "STAGEABLE" # } # } # # # # File # THIS FORMAT IS DEPRECATED. WILL BE REMOVED IN COMING VERSIONS. # USE pegasus-tc-converter to convert File format to Text Format. # In this mode, a file format is understood. The file is # read and cached in memory. Any modifications, as adding or # deleting, causes an update of the memory and hence to the file # underneath. All queries are done against the memory # representation. The new TC file format uses 6 columns: # # The resource ID is represented in the first column. # The logical transformation uses the colonized format # ns::name:vs. # The path to the application on the system # The installation type is identified by one of the following # keywords - all upper case: INSTALLED, STAGEABLE. # If not specified, or NULL is used, the type # defaults to INSTALLED. # The system is of the format ARCH::OS[:VER:GLIBC]. The # following arch types are understood: "INTEL32", "INTEL64", # "SPARCV7", "SPARCV9". # The following os types are understood: "LINUX", "SUNOS", # "AIX". If unset or NULL, defaults to # INTEL32::LINUX. # Profiles are written in the format # NS::KEY=VALUE,KEY2=VALUE;NS2::KEY3=VALUE3 # Multiple key-values for same namespace are seperated by a # comma "," and multiple namespaces are seperated by a # semicolon ";". If any of your profile values contains a # comma you must not use the namespace abbreviator. # # # # # # pegasus.catalog.transformation Text # Property : pegasus.catalog.transformation.file # Systems : Transformation Catalog # Type : file location string # Default : ${pegasus.home.sysconfdir}/tc.text | ${pegasus.home.sysconfdir}/tc.data # See also : pegasus.catalog.transformation # # This property is used to set the path to the textual transformation # catalogs of type File or Text. If the transformation catalog is of type Text # then tc.text file is picked up from sysconfdir, else tc.data # # # pegasus.catalog.transformation.file ${pegasus.home.sysconfdir}/tc.text | ${pegasus.home.sysconfdir}/tc.data # # SECTION "DATA STAGING CONFIGURATION" # # Property : pegasus.data.configuration # System : Pegasus # Since : 3.1 # Type : enumeration # Value[0] : sharedfs # Value[1] : nonsharedfs # Value[2] : condorio # Default : sharedfs # # This property sets up Pegasus to run in different environments. # # # # sharedfs # If this is set, Pegasus will be setup to execute jobs on the shared # filesystem on the execution site. This assumes, that the head node of a cluster # and the worker nodes share a filesystem. The staging site in this case is # the same as the execution site. Pegasus adds a create dir job to the executable # workflow that creates a workflow specific directory on the shared filesystem . # The data transfer jobs in the executable workflow ( stage_in_ , stage_inter_ , # stage_out_ ) transfer the data to this directory.The compute jobs in the # executable workflow are launched in the directory on the shared filesystem. # Internally, if this is set the following properties are set. # # pegasus.execute.*.filesystem.local false # # # # condorio # If this is set, Pegasus will be setup to run jobs in a pure condor pool, # with the nodes not sharing a filesystem. Data is staged to the compute nodes from # the submit host using Condor File IO. # The planner is automatically setup to use the submit host ( site local ) as the # staging site. All the auxillary jobs added by the planner to the executable # workflow ( create dir, data stagein and stage-out, cleanup ) jobs refer to # the workflow specific directory on the local site. The data transfer jobs in # the executable workflow ( stage_in_ , stage_inter_ , stage_out_ ) transfer the # data to this directory. When the compute jobs start, the input data for each # job is shipped from the workflow specific directory on the submit host to # compute/worker node using Condor file IO. The output data for each job is # similarly shipped back to the submit host from the compute/worker node. # This setup is particularly helpful when running workflows in the cloud # environment where setting up a shared filesystem across the VM's may be # tricky. # On loading this property, internally the following properies are set # # pegasus.transfer.sls.*.impl Condor # pegasus.execute.*.filesystem.local true # pegasus.gridstart PegasusLite # pegasus.transfer.worker.package true # # # # nonsharedfs # If this is set, Pegasus will be setup to execute jobs on an execution site # without relying on a shared filesystem between the head node and the worker nodes. # You can specify staging site ( using --staging-site option to pegasus-plan) to # indicate the site to use as a central storage location for a workflow. The # staging site is independant of the execution sites on which a workflow executes. # All the auxillary jobs added by the planner to the executable # workflow ( create dir, data stagein and stage-out, cleanup ) jobs refer to # the workflow specific directory on the staging site. The data transfer jobs in # the executable workflow ( stage_in_ , stage_inter_ , stage_out_ ) transfer the # data to this directory. When the compute jobs start, the input data for each # job is shipped from the workflow specific directory on the submit host to # compute/worker node using pegasus-transfer. The output data for each job is # similarly shipped back to the submit host from the compute/worker node. # The protocols supported are at this time SRM, GridFTP, iRods, S3. # This setup is particularly helpful when running workflows on OSG where # most of the execution sites don't have enough data storage. Only a few # sites have large amounts of data storage exposed that can be used to place # data during a workflow run. This setup is also helpful when running workflows # in the cloud environment where setting up a shared filesystem across the VM's may be # tricky. # On loading this property, internally the following properies are set # # pegasus.execute.*.filesystem.local true # pegasus.gridstart PegasusLite # pegasus.transfer.worker.package true # # # # # # # pegasus.data.configuration sharedfs pegasus-wms_4.0.1+dfsg/etc/advanced.properties0000644000175000017500000032461111757531137020501 0ustar ryngerynge# TITLE "PROPERTIES" # # This is the reference guide to all properties regarding the # Pegasus Workflow Planner, and their respective default values. Please refer # to the user guide for a discussion when and which properties to use to # configure various components. Please note that the values rely on # proper capitalization, unless explicitly noted otherwise. # # Some properties rely with their default on the value of other # properties. As a notation, the curly braces refer to the value of the # named property. For instance, ${pegasus.home} means that the value depends # on the value of the pegasus.home property plus any noted additions. You # can use this notation to refer to other properties, though the extent # of the subsitutions are limited. Usually, you want to refer to a set # of the standard system properties. Nesting is not allowed. # Substitutions will only be done once. # # # There is a priority to the order of reading and evaluating properties. # Usually one does not need to worry about the priorities. However, it # is good to know the details of when which property applies, and how # one property is able to overwrite another. The following is a mutually exclusive # list ( highest priority first ) of property file locations. # # # --conf option to the tools. Almost all of the clients that use properties # have a --conf option to specify the property file to pick up. # # submit-dir/pegasus.xxxxxxx.properties file. All tools that work on the # submit directory ( i.e after pegasus has planned a workflow) pick up the # pegasus.xxxxx.properties file from the submit directory. The location for the # pegasus.xxxxxxx.propertiesis picked up from the braindump file. # # The properties defined in the user property file # ${user.home}/.pegasusrc have lowest priority. # # # # Commandline properties have the highest priority. These override any property loaded # from a property file. Each commandline property is introduced by a -D argument. # Note that these arguments are parsed by the shell wrapper, and thus the -D arguments # must be the first arguments to any command. Commandline properties are useful for debugging # purposes. # # From Pegasus 3.1 release onwards, support has been dropped for the following # properties that were used to signify the location of the properties file # # # pegasus.properties # pegasus.user.properties # # # The following example provides a sensible set of properties to be set # by the user property file. These properties use mostly non-default # settings. It is an example only, and will not work for you: # # # pegasus.catalog.replica File # pegasus.catalog.replica.file ${pegasus.home}/etc/sample.rc.data # pegasus.catalog.transformation Text # pegasus.catalog.transformation.file ${pegasus.home}/etc/sample.tc.text # pegasus.catalog.site XML3 # pegasus.catalog.site.file ${pegasus.home}/etc/sample.sites.xml3 # # # If you are in doubt which properties are actually visible, pegasus during the # planning of the workflow dumps all properties after reading and prioritizing # in the submit directory in a file with the suffix properties. # Property : pegasus.home # Systems : all # Type : directory location string # Default : "$PEGASUS_HOME" # # The property pegasus.home cannot be set in the property file. This property is # automatically set up by the pegasus clients internally by determining the installation # directory of pegasus. Knowledge about this property is important for developers who # want to invoke PEGASUS JAVA classes without the shell wrappers. # # pegasus.home "$PEGASUS_HOME" # # SECTION "LOCAL DIRECTORIES" # # This section describes the GNU directory structure conventions. GNU # distinguishes between architecture independent and thus sharable # directories, and directories with data specific to a platform, and # thus often local. It also distinguishes between frequently modified # data and rarely changing data. These two axis form a space of four # distinct directories. # Property : pegasus.home.datadir # Systems : all # Type : directory location string # Default : ${pegasus.home}/share # # The datadir directory contains broadly visiable and possilby exported # configuration files that rarely change. This directory is currently # unused. # # pegasus.home.datadir ${pegasus.home}/share # Property : pegasus.home.sysconfdir # Systems : all # Type : directory location string # Default : ${pegasus.home}/etc # # The system configuration directory contains configuration files that # are specific to the machine or installation, and that rarely change. # This is the directory where the XML schema definition copies are # stored, and where the base pool configuration file is stored. # # pegasus.home.sysconfdir ${pegasus.home}/etc # Property : pegasus.home.sharedstatedir # Systems : all # Type : directory location string # Default : ${pegasus.home}/com # # Frequently changing files that are broadly visible are stored in the # shared state directory. This is currently unused. # # pegasus.home.sharedstatedir ${pegasus.home}/com # Property : pegasus.home.localstatedir # Systems : all # Type : directory location string # Default : ${pegasus.home}/var # # Frequently changing files that are specific to a machine and/or # installation are stored in the local state directory. This directory # is being used for the textual transformation catalog, # and the file-based replica catalog. # # pegasus.home.localstatedir ${pegasus.home}/var # Property : pegasus.dir.submit.logs # System : Pegasus # Since : 2.4 # Type : directory location string # Default : false # # By default, Pegasus points the condor logs for the workflow to # /tmp directory. This is done to ensure that the logs are created # in a local directory even though the submit directory maybe on NFS. # In the submit directory the symbolic link to the appropriate log file # in the /tmp exists. # # However, since /tmp is automatically purged in most cases, users # may want to preserve their condor logs in a directory on the # local filesystem other than /tmp # # # pegasus.dir.submit.logs (no default) # # SECTION "SITE DIRECTORIES" # # The site directory properties modify the behavior of remotely run jobs. # In rare occasions, it may also pertain to locally run compute jobs. # Property : pegasus.dir.useTimestamp # System : Pegasus # Since : 2.1 # Type : Boolean # Default : false # # While creating the submit directory, Pegasus employs a run numbering # scheme. Users can use this property to use a timestamp based # numbering scheme instead of the runxxxx scheme. # # pegasus.dir.useTimestamp false # Property : pegasus.dir.exec # System : Pegasus # Since : 2.0 # Type : remote directory location string # Default : (no default) # # This property modifies the remote location work directory in which all # your jobs will run. If the path is relative then it is appended to the # work directory (associated with the site), as specified in the site # catalog. If the path is absolute then it overrides the work directory # specified in the site catalog. # # pegasus.dir.exec (no default) # Property : pegasus.dir.storage # System : Pegasus # Since : 2.0 # Type : remote directory location string # Default : (no default) # # This property modifies the remote storage location on various pools. # If the path is relative then it is appended to the storage mount point # specified in the pool.config file. If the path is absolute then it # overrides the storage mount point specified in the pool config file. # # pegasus.dir.storage (no default) # Property : pegasus.dir.storage.deep # System : Pegasus # Since : 2.1 # Type : Boolean # Default : false # See Also : pegasus.dir.storage # See Also : pegasus.dir.useTimestamp # # This property results in the creation of a deep directory structure # on the output site, while populating the results. The base directory # on the remote end is determined from the site catalog and the # property pegasus.dir.storage. # # To this base directory, the relative submit directory structure # ( $user/$vogroup/$label/runxxxx ) is appended. # # $storage = $base + $relative_submit_directory # # Depending on the number of files being staged to the remote site a # Hashed File Structure is created that ensures that only 256 files # reside in one directory. # # To create this directory structure on the storage site, Pegasus # relies on the directory creation feature of the Grid FTP server, # which appeared in globus 4.0.x # # pegasus.dir.storage.true false # Property : pegasus.dir.create.strategy # System : Pegasus # Since : 2.2 # Type : enumeration # Value[0] : HourGlass # Value[1] : Tentacles # Default : Tentacles # # If the --randomdir option is given to the Planner at # runtime, the Pegasus planner adds nodes that create the random # directories at the remote pool sites, before any jobs are # actually run. The two modes determine the placement of these # nodes and their dependencies to the rest of the graph. # # # HourGlass # # It adds a make directory node at the top level of the graph, and all # these concat to a single dummy job before branching out to the root # nodes of the original/ concrete dag so far. So we introduce a # classic X shape at the top of the graph. Hence the name HourGlass. # # Tentacles # # This option places the jobs creating directories at the top of the # graph. However instead of constricting it to an hour glass shape, # this mode links the top node to all the relevant nodes for which the # create dir job is necessary. It looks as if the node spreads its # tentacleas all around. This puts more load on the DAGMan because of # the added dependencies but removes the restriction of the plan # progressing only when all the create directory jobs have progressed # on the remote pools, as is the case in the HourGlass model. # # # # pegasus.dir.create.strategy Tentacles # Property : pegasus.dir.create.impl # System : Pegasus # Since : 2.2 # Type : enumeration # Value[0] : DefaultImplementation # Value[1] : S3 # Default : DefaultImpelmentation # # This property is used to select the executable that is used to # create the working directory on the compute sites. # # # # DefaultImplementation # # The default executable that is used to create a directory is the # dirmanager executable shipped with Pegasus. It is found at # $PEGASUS_HOME/bin/dirmanager in the pegasus distribution. # An entry for transformation pegasus::dirmanager needs # to exist in the Transformation Catalog or the PEGASUS_HOME # environment variable should be specified in the site catalog for # the sites for this mode to work. # # S3 # # This option is used to create buckets in S3 instead of a # directory. This should be set when running workflows on Amazon # EC2. This implementation relies on s3cmd command line client to # create the bucket. An entry for transformation amazon::s3cmd needs # to exist in the Transformation Catalog for this to work. # # # # # # pegasus.dir.create.impl DefaultImplementation # # SECTION "SCHEMA FILE LOCATION PROPERTIES" # # This section defines the location of XML schema files that are # used to parse the various XML document instances in the PEGASUS. The # schema backups in the installed file-system permit PEGASUS operations # without being online. # Property : pegasus.schema.dax # Systems : Pegasus # Since : 2.0 # Type : XML schema file location string # Value[0] : ${pegasus.home.sysconfdir}/dax-3.2.xsd # Default : ${pegasus.home.sysconfdir}/dax-3.2.xsd # # This file is a copy of the XML schema that describes abstract DAG # files that are the result of the abstract planning process, and input # into any concrete planning. Providing a copy of the schema enables the # parser to use the local copy instead of reaching out to the internet, # and obtaining the latest version from the GriPhyN website dynamically. # # pegasus.schema.dax ${pegasus.home.sysconfdir}/dax-3.2.xsd # Property : pegasus.schema.sc # Systems : Pegasus # Since : 2.0 # Type : XML schema file location string # Value[0] : ${pegasus.home.sysconfdir}/sc-3.0.xsd # Default : ${pegasus.home.sysconfdir}/sc-3.0.xsd # # This file is a copy of the XML schema that describes the xml # description of the site catalog, that is generated as a result of # using genpoolconfig command. # Providing a copy of the schema enables the parser to use the local # copy instead of reaching out to the internet, and obtaining the # latest version from the GriPhyN website dynamically. # # pegasus.schema.sc ${pegasus.home.sysconfdir}/sc-3.0.xsd # Property : pegasus.schema.ivr # Systems : all # Type : XML schema file location string # Value[0] : ${pegasus.home.sysconfdir}/iv-2.0.xsd # Default : ${pegasus.home.sysconfdir}/iv-2.0.xsd # # This file is a copy of the XML schema that describes invocation record # files that are the result of the a grid launch in a remote or local # site. Providing a copy of the schema enables the parser to use the # local copy instead of reaching out to the internet, and obtaining the # latest version from the GriPhyN website dynamically. # # pegasus.schema.ivr ${pegasus.home.sysconfdir}/iv-2.1.xsd # # SECTION "DATABASE DRIVERS FOR ALL RELATIONAL CATALOGS" # # Property : pegasus.catalog.*.db.driver # Property : pegasus.catalog.[catalog-name].db.driver # System : Pegasus # Type : Java class name # Value[0] : Postgres # Value[1] : MySQL # Value[2] : SQLServer2000 (not yet implemented!) # Value[3] : Oracle (not yet implemented!) # Default : (no default) # See also : pegasus.catalog.provenance # # The database driver class is dynamically loaded, as required by the # schema. Currently, only PostGreSQL 7.3 and MySQL 4.0 are supported. # Their respective JDBC3 driver is provided as part and parcel of the # PEGASUS. # # A user may provide their own implementation, derived from # org.griphyn.vdl.dbdriver.DatabaseDriver, to talk to a database of # their choice. # # For each schema in PTC, a driver is instantiated # separately, which has the same prefix as the schema. This may result # in multiple connections to the database backend. As fallback, the # schema "*" driver is attempted. # # The * in the property name can be replaced by a catalog name to # apply the property only for that catalog. # Valid catalog names are # # # replica # provenance # # # # pegasus.catalog.*.db.driver (no default) # Property : pegasus.catalog.*.db.url # Property : pegasus.catalog.[catalog-name].db.url # System : PTC, ... # Type : JDBC database URI string # Default : (no default) # Example : jdbc:postgresql:${user.name} # # Each database has its own string to contact the database on a given # host, port, and database. Although most driver URLs allow to pass # arbitrary arguments, please use the # pegasus.catalog.[catalog-name].db.* keys or pegasus.catalog.*.db.* # to preload these arguments. # THE URL IS A MANDATORY PROPERTY FOR ANY DBMS BACKEND. # # # Postgres : jdbc:postgresql:[//hostname[:port]/]database # MySQL : jdbc:mysql://hostname[:port]]/database # SQLServer: jdbc:microsoft:sqlserver://hostname:port # Oracle : jdbc:oracle:thin:[user/password]@//host[:port]/service # # # The * in the property name can be replaced by a catalog name to # apply the property only for that catalog. # Valid catalog names are # # # replica # provenance # # # # # pegasus.catalog.*.db.url (no default) # Property : pegasus.catalog.*.db.user # Property : pegasus.catalog.[catalog-name].db.user # System : PTC, ... # Type : string # Default : (no default) # Example : ${user.name} # # In order to access a database, you must provide the name of your # account on the DBMS. This property is database-independent. THIS IS A # MANDATORY PROPERTY FOR MANY DBMS BACKENDS. # # The * in the property name can be replaced by a catalog name to # apply the property only for that catalog. # Valid catalog names are # # # replica # provenance # # # # # # pegasus.catalog.*.db.user (no default) # Property : pegasus.catalog.*.db.password # Property : pegasus.catalog.[catalog-name].db.password # System : PTC, ... # Type : string # Default : (no default) # Example : ${user.name} # # In order to access a database, you must provide an optional password # of your account on the DBMS. This property is database-independent. # THIS IS A MANDATORY PROPERTY, IF YOUR DBMS BACKEND ACCOUNT REQUIRES # A PASSWORD. # # The * in the property name can be replaced by a catalog name to # apply the property only for that catalog. # Valid catalog names are # # # replica # provenance # # # # # pegasus.catalog.*.db.password (no default) # Property : pegasus.catalog.*.db.* # Property : pegasus.catalog.[catalog-name].db.* # System : PTC, RC # # Each database has a multitude of options to control in fine detail # the further behaviour. You may want to check the JDBC3 documentation # of the JDBC driver for your database for details. The keys will be # passed as part of the connect properties by stripping the # "pegasus.catalog.[catalog-name].db." prefix from them. # The catalog-name can be replaced by the following values # provenance for Provenance Catalog (PTC), # replica for Replica Catalog (RC) # # # Postgres 7.3 parses the following properties: # # pegasus.catalog.*.db.user # pegasus.catalog.*.db.password # pegasus.catalog.*.db.PGHOST # pegasus.catalog.*.db.PGPORT # pegasus.catalog.*.db.charSet # pegasus.catalog.*.db.compatible # # # MySQL 4.0 parses the following properties: # # # pegasus.catalog.*.db.user # pegasus.catalog.*.db.password # pegasus.catalog.*.db.databaseName # pegasus.catalog.*.db.serverName # pegasus.catalog.*.db.portNumber # pegasus.catalog.*.db.socketFactory # pegasus.catalog.*.db.strictUpdates # pegasus.catalog.*.db.ignoreNonTxTables # pegasus.catalog.*.db.secondsBeforeRetryMaster # pegasus.catalog.*.db.queriesBeforeRetryMaster # pegasus.catalog.*.db.allowLoadLocalInfile # pegasus.catalog.*.db.continueBatchOnError # pegasus.catalog.*.db.pedantic # pegasus.catalog.*.db.useStreamLengthsInPrepStmts # pegasus.catalog.*.db.useTimezone # pegasus.catalog.*.db.relaxAutoCommit # pegasus.catalog.*.db.paranoid # pegasus.catalog.*.db.autoReconnect # pegasus.catalog.*.db.capitalizeTypeNames # pegasus.catalog.*.db.ultraDevHack # pegasus.catalog.*.db.strictFloatingPoint # pegasus.catalog.*.db.useSSL # pegasus.catalog.*.db.useCompression # pegasus.catalog.*.db.socketTimeout # pegasus.catalog.*.db.maxReconnects # pegasus.catalog.*.db.initialTimeout # pegasus.catalog.*.db.maxRows # pegasus.catalog.*.db.useHostsInPrivileges # pegasus.catalog.*.db.interactiveClient # pegasus.catalog.*.db.useUnicode # pegasus.catalog.*.db.characterEncoding # # # MS SQL Server 2000 support the following properties (keys are # case-insensitive, e.g. both "user" and "User" are valid): # # # pegasus.catalog.*.db.User # pegasus.catalog.*.db.Password # pegasus.catalog.*.db.DatabaseName # pegasus.catalog.*.db.ServerName # pegasus.catalog.*.db.HostProcess # pegasus.catalog.*.db.NetAddress # pegasus.catalog.*.db.PortNumber # pegasus.catalog.*.db.ProgramName # pegasus.catalog.*.db.SendStringParametersAsUnicode # pegasus.catalog.*.db.SelectMethod # # # The * in the property name can be replaced by a catalog name to # apply the property only for that catalog. # Valid catalog names are # # # replica # provenance # # # # # pegasus.catalog.*.db.* (no default) # # SECTION "CATALOG PROPERTIES" # # # SUBSECTION "REPLICA CATALOG" # # Property : pegasus.catalog.replica # System : Pegasus # Since : 2.0 # Type : enumeration # Value[0] : RLS # Value[1] : LRC # Value[2] : JDBCRC # Value[3] : File # Value[4] : MRC # Default : RLS # # Pegasus queries a Replica Catalog to discover the physical filenames # (PFN) for input files specified in the DAX. Pegasus can interface # with various types of Replica Catalogs. This property specifies # which type of Replica Catalog to use during the planning process. # # # RLS # RLS (Replica Location Service) is a distributed replica # catalog, which ships with GT4. There is an index service called # Replica Location Index (RLI) to which 1 or more Local Replica # Catalog (LRC) report. Each LRC can contain all or a subset of # mappings. In this mode, Pegasus queries the central RLI to # discover in which LRC's the mappings for a LFN reside. It then # queries the individual LRC's for the PFN's. # To use RLS, the user additionally needs to set the property # pegasus.catalog.replica.url to specify the URL for the RLI to # query. # Details about RLS can be found at # http://www.globus.org/toolkit/data/rls/ # # LRC # If the user does not want to query the RLI, but directly a # single Local Replica Catalog. # To use LRC, the user additionally needs to set the property # pegasus.catalog.replica.url to specify the URL for the LRC to # query. # Details about RLS can be found at # http://www.globus.org/toolkit/data/rls/ # # JDBCRC # In this mode, Pegasus queries a SQL based replica catalog that # is accessed via JDBC. The sql schema's for this catalog can be # found at $PEGASUS_HOME/sql directory. # To use JDBCRC, the user additionally needs to set the following # properties # # pegasus.catalog.replica.db.url # pegasus.catalog.replica.db.user # pegasus.catalog.replica.db.password # # # File # In this mode, Pegasus queries a file based replica catalog. # It is neither transactionally safe, nor advised to use for # production purposes in any way. Multiple concurrent instances # will clobber each other!. The site attribute should # be specified whenever possible. The attribute key for the site # attribute is "pool". # # The LFN may or may not be quoted. If it contains linear # whitespace, quotes, backslash or an equality sign, it must be # quoted and escaped. Ditto for the PFN. The attribute key-value # pairs are separated by an equality sign without any # whitespaces. The value may be in quoted. The LFN sentiments about quoting apply. # # # LFN PFN # LFN PFN a=b [..] # LFN PFN a="b" [..] # "LFN w/LWS" "PFN w/LWS" [..] # # # To use File, the user additionally needs to specify # pegasus.catalog.replica.file property to specify the path to the # file based RC. # # # MRC # In this mode, Pegasus queries multiple replica catalogs to # discover the file locations on the grid. To use it set # # # pegasus.catalog.replica MRC # # # Each associated replica catalog can be configured via properties # as follows. # # The user associates a variable name referred to as [value] for # each of the catalogs, where [value] is any legal identifier # (concretely [A-Za-z][_A-Za-z0-9]*) For each associated replica # catalogs the user specifies the following properties. # # # pegasus.catalog.replica.mrc.[value] specifies the type of replica catalog. # pegasus.catalog.replica.mrc.[value].key specifies a property name key for a # particular catalog # # # For example, if a user wants to query two lrc's at the same time # he/she can specify as follows # # # pegasus.catalog.replica.mrc.lrc1 LRC # pegasus.catalog.replica.mrc.lrc2.url rls://sukhna # # pegasus.catalog.replica.mrc.lrc2 LRC # pegasus.catalog.replica.mrc.lrc2.url rls://smarty # # # # In the above example, lrc1, lrc2 are any valid identifier names # and url is the property key that needed to be specified. # # # # # # # pegasus.catalog.replica RLS # Property : pegasus.catalog.replica.url # System : Pegasus # Since : 2.0 # Type : URI string # Default : (no default) # # When using the modern RLS replica catalog, the URI to the Replica # catalog must be provided to Pegasus to enable it to look up # filenames. There is no default. # # pegasus.catalog.replica.url (no default) # Property : pegasus.catalog.replica.chunk.size # System : Pegasus, rc-client # Since : 2.0 # Type : Integer # Default : 1000 # # The rc-client takes in an input file containing the mappings upon # which to work. This property determines, the number of lines that # are read in at a time, and worked upon at together. This allows the # various operations like insert, delete happen in bulk if the # underlying replica implementation supports it. # # # pegasus.catalog.replica.chunk.size 1000 # Property : pegasus.catalog.replica.lrc.ignore # System : Replica Catalog - RLS # Since : 2.0 # Type : comma separated list of LRC urls # Default : (no default) # See also : pegasus.catalog.replica.lrc.restrict # # Certain users may like to skip some LRCs while querying for the physical # locations of a file. If some LRCs need to be skipped from those found in the # rli then use this property. You can define either the full URL or partial # domain names that need to be skipped. E.g. If a user wants # rls://smarty.isi.edu and all LRCs on usc.edu to be skipped then the # property will be set as pegasus.rls.lrc.ignore=rls://smarty.isi.edu,usc.edu # # pegasus.catalog.replica.lrc.ignore (no default) # Property : pegasus.catalog.replica.lrc.restrict # System : Replica Catalog - RLS # Since : 1.3.9 # Type : comma separated list of LRC urls # Default : (no default) # See also : pegasus.catalog.replica.lrc.ignore # # This property applies a tighter restriction on the results returned # from the LRCs specified. Only those PFNs are returned that have a # pool attribute associated with them. The property "pegasus.rc.lrc.ignore" # has a higher priority than "pegasus.rc.lrc.restrict". For example, in case # a LRC is specified in both properties, the LRC would be ignored (i.e. # not queried at all instead of applying a tighter restriction on the # results returned). # # pegasus.catalog.replica.lrc.restrict (no default) # Property : pegasus.catalog.replica.lrc.site.[site-name] # System : Replica Catalog - RLS # Since : 2.3.0 # Type : LRC url # Default : (no default) # # # This property allows for the LRC url to be associated with site # handles. Usually, a pool attribute is required to be associated with # the PFN for Pegasus to figure out the site on which PFN resides. # However, in the case where an LRC is responsible for only # a single site's mappings, Pegasus can safely associate LRC url # with the site. This association can be used to determine the pool # attribute for all mappings returned from the LRC, if the mapping # does not have a pool attribute associated with it. # # The site_name in the property should be replaced by the name of # the site. For example # # pegasus.catalog.replica.lrc.site.isi rls://lrc.isi.edu # # tells Pegasus that all PFNs returned from LRC rls://lrc.isi.edu # are associated with site isi. # # The [site_name] should be the same as the site handle specified in # the site catalog. # # pegasus.catalog.replica.lrc.site.[site-name] (no default) # Property : pegasus.catalog.replica.cache.asrc # System : Pegasus # Since : 2.0 # Type : Boolean # Value[0] : false # Value[1] : true # Default : false # See also : pegasus.catalog.replica # # This property determines whether to treat the cache file specified # as a supplemental replica catalog or not. User can specify on the # command line to pegasus-plan a comma separated list of cache files using # the --cache option. By default, the LFN->PFN mappings contained in # the cache file are treated as cache, i.e if an entry is found in a # cache file the replica catalog is not queried. This results in only # the entry specified in the cache file to be available for replica # selection. # # Setting this property to true, results in the cache files to be # treated as supplemental replica catalogs. This results in the # mappings found in the replica catalog (as specified by # pegasus.catalog.replica) to be merged with the ones found in the # cache files. Thus, mappings for a particular LFN found in both the # cache and the replica catalog are available for replica selection. # # pegasus.catalog.replica.cache.asrc false # # SUBSECTION "SITE CATALOG" # # Property : pegasus.catalog.site # System : Site Catalog # Since : 2.0 # Type : enumeration # Value[0] : XML3 # Value[1] : XML # Default : XML3 # # The site catalog file is available in three major flavors: The Text and # and XML formats for the site catalog are deprecated. # Users can use pegasus-sc-converter client to convert their site catalog # to the newer XML3 format. # # THIS FORMAT IS DEPRECATED. WILL BE REMOVED IN COMING VERSIONS. # USE pegasus-sc-converter to convert XML format to XML3 Format. # The "XML" format is an XML-based file. The XML format reads site # catalog conforming to the old site catalog schema available at # http://pegasus.isi.edu/wms/docs/schemas/sc-2.0/sc-2.0.xsd # # The "XML3" format is an XML-based file. The XML format reads site # catalog conforming to the old site catalog schema available at # http://pegasus.isi.edu/wms/docs/schemas/sc-3.0/sc-3.0.xsd # # # # pegasus.catalog.site XML3 # Property : pegasus.catalog.site.file # System : Site Catalog # Since : 2.0 # Type : file location string # Default : ${pegasus.home.sysconfdir}/sites.xml3 |${pegasus.home.sysconfdir}/sites.xml # See also : pegasus.catalog.site # # Running things on the grid requires an extensive description of the # capabilities of each compute cluster, commonly termed "site". This # property describes the location of the file that contains such a site # description. As the format is currently in flow, please refer to the # userguide and Pegasus for details which format is expected. # The default value is dependant on the value specified for # the property pegasus.catalog.site . If type of SiteCatalog used is XML3, then sites.xml3 # is picked up from sysconfdir else sites.xml # # pegasus.catalog.site.file ${pegasus.home.sysconfdir}/sites.xml3 | ${pegasus.home.sysconfdir}/sites.xml # # SUBSECTION "TRANSFORMATION CATALOG" # # Property : pegasus.catalog.transformation # System : Transformation Catalog # Since : 2.0 # Type : enumeration # Value[0] : Text # Value[1] : File # Default : Text # See also : pegasus.catalog.transformation.file # # # # Text # In this mode, a multiline file based format is understood. The file # is read and cached in memory. Any modifications, as adding or # deleting, causes an update of the memory and hence to the file # underneath. All queries are done against the memory # representation. # # The file sample.tc.text in the etc directory contains an example # # Here is a sample textual format for transfomation catalog containing # one transformation on two sites # # # tr example::keg:1.0 { # # #specify profiles that apply for all the sites for the transformation # #in each site entry the profile can be overriden # profile env "APP_HOME" "/tmp/karan" # profile env "JAVA_HOME" "/bin/app" # # site isi { # profile env "me" "with" # profile condor "more" "test" # profile env "JAVA_HOME" "/bin/java.1.6" # pfn "/path/to/keg" # arch "x86" # os "linux" # osrelease "fc" # osversion "4" # type "INSTALLED" # } # # site wind { # profile env "me" "with" # profile condor "more" "test" # pfn "/path/to/keg" # arch "x86" # os "linux" # osrelease "fc" # osversion "4" # type "STAGEABLE" # } # } # # # # File # THIS FORMAT IS DEPRECATED. WILL BE REMOVED IN COMING VERSIONS. # USE pegasus-tc-converter to convert File format to Text Format. # In this mode, a file format is understood. The file is # read and cached in memory. Any modifications, as adding or # deleting, causes an update of the memory and hence to the file # underneath. All queries are done against the memory # representation. The new TC file format uses 6 columns: # # The resource ID is represented in the first column. # The logical transformation uses the colonized format # ns::name:vs. # The path to the application on the system # The installation type is identified by one of the following # keywords - all upper case: INSTALLED, STAGEABLE. # If not specified, or NULL is used, the type # defaults to INSTALLED. # The system is of the format ARCH::OS[:VER:GLIBC]. The # following arch types are understood: "INTEL32", "INTEL64", # "SPARCV7", "SPARCV9". # The following os types are understood: "LINUX", "SUNOS", # "AIX". If unset or NULL, defaults to # INTEL32::LINUX. # Profiles are written in the format # NS::KEY=VALUE,KEY2=VALUE;NS2::KEY3=VALUE3 # Multiple key-values for same namespace are seperated by a # comma "," and multiple namespaces are seperated by a # semicolon ";". If any of your profile values contains a # comma you must not use the namespace abbreviator. # # # # # # pegasus.catalog.transformation Text # Property : pegasus.catalog.transformation.file # Systems : Transformation Catalog # Type : file location string # Default : ${pegasus.home.sysconfdir}/tc.text | ${pegasus.home.sysconfdir}/tc.data # See also : pegasus.catalog.transformation # # This property is used to set the path to the textual transformation # catalogs of type File or Text. If the transformation catalog is of type Text # then tc.text file is picked up from sysconfdir, else tc.data # # # pegasus.catalog.transformation.file ${pegasus.home.sysconfdir}/tc.text | ${pegasus.home.sysconfdir}/tc.data # # SUBSECTION "PROVENANCE CATALOG" # # Property : pegasus.catalog.provenance # System : Provenance Tracking Catalog (PTC) # Since : 2.0 # Type : Java class name # Value[0] : InvocationSchema # Value[1] : NXDInvSchema # Default : (no default) # See also : pegasus.catalog.*.db.driver # # This property denotes the schema that is being used to access a PTC. # The PTC is usually not a standard installation. If you use a database # backend, you most likely have a schema that supports PTCs. By default, # no PTC will be used. # # Currently only the InvocationSchema is available for storing the # provenance tracking records. Beware, this can become a lot of data. # The values are names of Java classes. If no absolute Java classname # is given, "org.griphyn.vdl.dbschema." is prepended. Thus, by deriving # from the DatabaseSchema API, and implementing the PTC interface, # users can provide their own classes here. # # Alternatively, if you use a native XML database like eXist, you can # store data using the NXDInvSchema. This will avoid using any of the # other database driver properties. # # pegasus.catalog.provenance (no default) # Property : pegasus.catalog.provenance.refinement # System : PASOA Provenance Store # Since : 2.0.1 # Type : Java class name # Value[0] : Pasoa # Value[1] : InMemory # Default : InMemory # See also : pegasus.catalog.*.db.driver # # This property turns on the logging of the refinement process that # happens inside Pegasus to the PASOA store. Not all actions are # currently captured. It is still an experimental feature. # # The PASOA store needs to run on localhost on port 8080 # https://localhost:8080/prserv-1.0 # # # pegasus.catalog.provenance.refinement InMemory # # SECTION "REPLICA SELECTION PROPERTIES" # # Property : pegasus.selector.replica # System : Replica Selection # Since : 2.0 # Type : URI string # Default : default # See also : pegasus.replica.*.ignore.stagein.sites # See also : pegasus.replica.*.prefer.stagein.sites # # Each job in the DAX maybe associated with input LFN's denoting the # files that are required for the job to run. To determine the # physical replica (PFN) for a LFN, Pegasus queries the replica # catalog to get all the PFN's (replicas) associated with a LFN. # Pegasus then calls out to a replica selector to select a replica # amongst the various replicas returned. This property determines the # replica selector to use for selecting the replicas. # # # Default # # If a PFN that is a file URL (starting with file:///) and has a # pool attribute matching to the site handle of the site where the # compute is to be run is found, then that is returned. # Else,a random PFN is selected amongst all the PFN's that # have a pool attribute matching to the site handle of the site # where a compute job is to be run. # Else, a random pfn is selected amongst all the PFN's. # # Restricted # # This replica selector, allows the user to specify good sites and # bad sites for staging in data to a particular compute site. A good # site for a compute site X, is a preferred site from which # replicas should be staged to site X. If there are more than one # good sites having a particular replica, then a random site is # selected amongst these preferred sites. # # A bad site for a compute site X, is a site from which replica's # should not be staged. The reason of not accessing replica from a # bad site can vary from the link being down, to the user not having # permissions on that site's data. # # The good | bad sites are specified by the properties # # # pegasus.replica.*.prefer.stagein.sites # pegasus.replica.*.ignore.stagein.sites # # # where the * in the property name denotes the name of the compute # site. A * in the property key is taken to mean all sites. # # The pegasus.replica.*.prefer.stagein.sites property takes precedence # over pegasus.replica.*.ignore.stagein.sites property i.e. if for a # site X, a site Y is specified both in the ignored and the # preferred set, then site Y is taken to mean as only a preferred # site for a site X. # # # Regex # # This replica selector allows the user allows the user to specific regex # expressions that can be used to rank various PFN's returned from the # Replica Catalog for a particular LFN. This replica selector selects the # highest ranked PFN i.e the replica with the lowest rank value. # # The regular expressions are assigned different rank, that determine # the order in which the expressions are employed. The rank values for # the regex can expressed in user properties using the property. # # # pegasus.selector.replica.regex.rank.[value] regex-expression # # # The value is an integer value that denotes the rank of an expression # with a rank value of 1 being the highest rank. # # Please note that before applying any regular expressions on # the PFN's, the file URL's that dont match the preferred site are # explicitly filtered out. # # # Local # # This replica selector prefers replicas from the local host and that # start with a file: URL scheme. It is useful, when users want to # stagin files to a remote site from your submit host using the # Condor file transfer mechanism. # # # # # pegasus.selector.replica Default # Property : pegasus.selector.replica.*.ignore.stagein.sites # System : Replica Selection # Type : comma separated list of sites # Since : 2.0 # Default : no default # See also : pegasus.selector.replica # See also : pegasus.selector.replica.*.prefer.stagein.sites # # A comma separated list of storage sites from which to never stage in # data to a compute site. The property can apply to all or a single # compute site, depending on how the * in the property name is expanded. # # The * in the property name means all compute sites unless replaced # by a site name. # # For e.g setting pegasus.selector.replica.*.ignore.stagein.sites to usc means that # ignore all replicas from site usc for staging in to any compute site. # Setting pegasus.replica.isi.ignore.stagein.sites to usc means that # ignore all replicas from site usc for staging in data to site isi. # # # # pegasus.selector.replica.*.ignore.stagein.sites (no default) # Property : pegasus.selector.replica.*.prefer.stagein.sites # System : Replica Selection # Type : comma separated list of sites # Since : 2.0 # Default : no default # See also : pegasus.selector.replica # See also : pegasus.selector.replica.*.ignore.stagein.sites # # A comma separated list of preferred storage sites from which to stage in # data to a compute site. The property can apply to all or a single # compute site, depending on how the * in the property name is expanded. # # The * in the property name means all compute sites unless replaced # by a site name. # # For e.g setting pegasus.selector.replica.*.prefer.stagein.sites to usc means that # prefer all replicas from site usc for staging in to any compute site. # Setting pegasus.replica.isi.prefer.stagein.sites to usc means that # prefer all replicas from site usc for staging in data to site isi. # # # # pegasus.selector.replica.*.ignore.stagein.sites (no default) # Property : pegasus.selector.replica.regex.rank.[value] # System : Replica Selection # Type : Regex Expression # Since : 2.3.0 # Default : no default # See also : pegasus.selector.replica # # # Specifies the regex expressions to be applied on the PFNs returned # for a particular LFN. Refer to # # http://java.sun.com/javase/6/docs/api/java/util/regex/Pattern.html # # on information of how to construct a regex expression. # # The [value] in the property key is to be replaced by an int value # that designates the rank value for the regex expression to be # applied in the Regex replica selector. # # The example below indicates preference for file URL's over # URL's referring to gridftp server at example.isi.edu # # # pegasus.selector.replica.regex.rank.1 file://.* # pegasus.selector.replica.regex.rank.2 gsiftp://example\.isi\.edu.* # # # # pegasus.selector.replica.regex.rank.[value] regex-expression # # SECTION "SITE SELECTION PROPERTIES" # # Property : pegasus.selector.site # System : Pegasus # Since : 2.0 # Type : enumeration # Value[0] : Random # Value[1] : RoundRobin # Value[2] : NonJavaCallout # Value[3] : Group # Value[4] : Heft # Default : Random # See also : pegasus.selector.site.path # See also : pegasus.selector.site.timeout # See also : pegasus.selector.site.keep.tmp # See also : pegasus.selector.site.env.* # # The site selection in Pegasus can be on basis of any of the # following strategies. # # # Random # In this mode, the jobs will be randomly distributed among the # sites that can execute them. # # RoundRobin # In this mode. the jobs will be assigned in a round # robin manner amongst the sites that can execute them. Since # each site cannot execute everytype of job, the round robin # scheduling is done per level on a sorted list. The sorting is # on the basis of the number of jobs a particular site has been # assigned in that level so far. If a job cannot be run on the # first site in the queue (due to no matching entry in the # transformation catalog for the transformation referred to by # the job), it goes to the next one and so on. This implementation # defaults to classic round robin in the case where all the jobs # in the workflow can run on all the sites. # # NonJavaCallout # In this mode, Pegasus will callout to an external site # selector.In this mode a temporary file is prepared containing # the job information that is passed to the site selector as an # argument while invoking it. The path to the site selector is # specified by setting the property pegasus.site.selector.path. The # environment variables that need to be set to run the site # selector can be specified using the properties with a # pegasus.site.selector.env. prefix. The temporary file contains # information about the job that needs to be scheduled. It # contains key value pairs with each key value pair being on a # new line and separated by a =. # # The following pairs are currently generated for the site # selector temporary file that is generated in the NonJavaCallout. # # # version & is the version of the site selector # api,currently 2.0. # transformation & is the fully-qualified definition # identifier for the transformation (TR) # namespace::name:version. # derivation & is teh fully qualified definition # identifier for the derivation (DV), # namespace::name:version. # job.level & is the job's depth in the tree of the # workflow DAG. # job.id & is the job's ID, as used in the DAX # file. # resource.id & is a pool handle, followed by whitespace, # followed by a gridftp server. Typically, # each gridftp server is enumerated once, # so you may have multiple occurances of # the same site. There can be multiple # occurances of this key. # input.lfn & is an input LFN, optionally followed by a # whitespace and file size. There can be # multiple occurances of this key,one for # each input LFN required by the job. # wf.name & label of the dax, as found in the DAX's # root element. # wf.index is the DAX index, that is incremented for # each partition in case of deferred # planning. # wf.time & is the mtime of the workflow. # wf.manager & is the name of the workflow manager being # used .e.g condor # vo.name & is the name of the virtual organization # that is running this workflow. It is # currently set to NONE # vo.group & unused at present and is set to NONE. # # # # Group # In this mode, a group of jobs will be assigned to the same # site that can execute them. The use of the PEGASUS profile key # group in the dax, associates a job with a particular group. The # jobs that do not have the profile key associated with them, # will be put in the default group. The jobs in the # default group are handed over to the "Random" Site Selector for # scheduling. # # Heft # In this mode, a version of the HEFT processor scheduling # algorithm is used to schedule jobs in the workflow to multiple # grid sites. The implementation assumes default data # communication costs when jobs are not scheduled on to the same # site. Later on this may be made more configurable. # # The runtime for the jobs is specified in the transformation # catalog by associating the pegasus profile key runtime with the # entries. # # The number of processors in a site is picked up from the # attribute idle-nodes associated with the vanilla jobmanager of # the site in the site catalog. # # # # # pegasus.selector.site Random # Property : pegasus.selector.site.path # System : Site Selector # Since : 2.0 # Type : String # # If one calls out to an external site selector using the # NonJavaCallout mode, this refers to the path where the site selector # is installed. In case other strategies are used it does not need to # be set. # # pegasus.site.selector.path (no default) # Property : pegasus.site.selector.env.* # System : Pegasus # Since : 1.2.3 # Type : String # # The environment variables that need to be set while callout to the # site selector. These are the variables that the user would set if # running the site selector on the command line. The name of the # environment variable is got by stripping the keys of the prefix # "pegasus.site.selector.env." prefix from them. The value of the # environment variable is the value of the property. # # e.g pegasus.site.selector.path.LD_LIBRARY_PATH /globus/lib would lead to # the site selector being called with the LD_LIBRARY_PATH set to # /globus/lib. # # pegasus.site.selector.env.* (no default) # Property : pegasus.selector.site.timeout # System : Site Selector # Since : 2.0 # Type : non negative integer # Default : 60 # # It sets the number of seconds Pegasus waits to hear back from an # external site selector using the NonJavaCallout interface before # timing out. # # pegasus.selector.site.timeout 60 # Property : pegasus.selector.site.keep.tmp # System : Pegasus # Since : 2.0 # Type : enumeration # Value[0] : onerror # Value[1] : always # Value[2] : never # Default : onerror # # It determines whether Pegasus deletes the temporary input files that # are generated in the temp directory or not. These temporary input # files are passed as input to the external site selectors. # # A temporary input file is created for each that needs to be scheduled. # # pegasus.selector.site.keep.tmp onerror # # SECTION "DATA STAGING CONFIGURATION" # # Property : pegasus.data.configuration # System : Pegasus # Since : 3.1 # Type : enumeration # Value[0] : sharedfs # Value[1] : nonsharedfs # Value[2] : condorio # Default : sharedfs # # This property sets up Pegasus to run in different environments. # # # # sharedfs # If this is set, Pegasus will be setup to execute jobs on the shared # filesystem on the execution site. This assumes, that the head node of a cluster # and the worker nodes share a filesystem. The staging site in this case is # the same as the execution site. Pegasus adds a create dir job to the executable # workflow that creates a workflow specific directory on the shared filesystem . # The data transfer jobs in the executable workflow ( stage_in_ , stage_inter_ , # stage_out_ ) transfer the data to this directory.The compute jobs in the # executable workflow are launched in the directory on the shared filesystem. # Internally, if this is set the following properties are set. # # pegasus.execute.*.filesystem.local false # # # # condorio # If this is set, Pegasus will be setup to run jobs in a pure condor pool, # with the nodes not sharing a filesystem. Data is staged to the compute nodes from # the submit host using Condor File IO. # The planner is automatically setup to use the submit host ( site local ) as the # staging site. All the auxillary jobs added by the planner to the executable # workflow ( create dir, data stagein and stage-out, cleanup ) jobs refer to # the workflow specific directory on the local site. The data transfer jobs in # the executable workflow ( stage_in_ , stage_inter_ , stage_out_ ) transfer the # data to this directory. When the compute jobs start, the input data for each # job is shipped from the workflow specific directory on the submit host to # compute/worker node using Condor file IO. The output data for each job is # similarly shipped back to the submit host from the compute/worker node. # This setup is particularly helpful when running workflows in the cloud # environment where setting up a shared filesystem across the VM's may be # tricky. # On loading this property, internally the following properies are set # # pegasus.transfer.sls.*.impl Condor # pegasus.execute.*.filesystem.local true # pegasus.gridstart PegasusLite # pegasus.transfer.worker.package true # # # # nonsharedfs # If this is set, Pegasus will be setup to execute jobs on an execution site # without relying on a shared filesystem between the head node and the worker nodes. # You can specify staging site ( using --staging-site option to pegasus-plan) to # indicate the site to use as a central storage location for a workflow. The # staging site is independant of the execution sites on which a workflow executes. # All the auxillary jobs added by the planner to the executable # workflow ( create dir, data stagein and stage-out, cleanup ) jobs refer to # the workflow specific directory on the staging site. The data transfer jobs in # the executable workflow ( stage_in_ , stage_inter_ , stage_out_ ) transfer the # data to this directory. When the compute jobs start, the input data for each # job is shipped from the workflow specific directory on the submit host to # compute/worker node using pegasus-transfer. The output data for each job is # similarly shipped back to the submit host from the compute/worker node. # The protocols supported are at this time SRM, GridFTP, iRods, S3. # This setup is particularly helpful when running workflows on OSG where # most of the execution sites don't have enough data storage. Only a few # sites have large amounts of data storage exposed that can be used to place # data during a workflow run. This setup is also helpful when running workflows # in the cloud environment where setting up a shared filesystem across the VM's may be # tricky. # On loading this property, internally the following properies are set # # pegasus.execute.*.filesystem.local true # pegasus.gridstart PegasusLite # pegasus.transfer.worker.package true # # # # # # # pegasus.data.configuration sharedfs # # SECTION "TRANSFER CONFIGURATION PROPERTIES" # # Property : pegasus.transfer.*.impl # System : Pegasus # Type : enumeration # Value[0] : Transfer # Value[1] : GUC # Default : Transfer # See also : pegasus.transfer.refiner # Since : 2.0 # # Each compute job usually has data products that are required to be # staged in to the execution site, materialized data products staged # out to a final resting place, or staged to another job running at a # different site. This property determines the underlying grid # transfer tool that is used to manage the transfers. # # The * in the property name can be replaced to achieve finer grained # control to dictate what type of transfer jobs need to be managed # with which grid transfer tool. # # Usually,the arguments with which the client is invoked can be # specified by # # - the property pegasus.transfer.arguments # - associating the PEGASUS profile key transfer.arguments # # # # The table below illustrates all the possible variations of the # property. # # # # Property Name & Applies to # pegasus.transfer.stagein.impl & the stage in transfer jobs # pegasus.transfer.stageout.impl & the stage out transfer jobs # pegasus.transfer.inter.impl & the inter pool transfer jobs # pegasus.transfer.setup.impl & the setup transfer job # pegasus.transfer.*.impl & apply to types of transfer jobs # # # Note: Since version 2.2.0 the worker package is staged automatically during # staging of executables to the remote site. This is achieved # by adding a setup transfer job to the workflow. The setup transfer job by # default uses GUC to stage the data. The implementation to use can be # configured by setting the property # pegasus.transfer.setup.impl property. # However, if you have pegasus.transfer.*.impl set in your properties file, # then you need to set pegasus.transfer.setup.impl to GUC # # # The various grid transfer tools that can be used to manage data # transfers are explained below # # # Transfer # # This results in pegasus-transfer to be used for transferring of files. It # is a python based wrapper around various transfer clients like # globus-url-copy, lcg-copy, wget, cp, ln . pegasus-transfer looks at # source and destination url and figures out automatically which underlying # client to use. pegasus-transfer is distributed with the PEGASUS and can # be found at $PEGASUS_HOME/bin/pegasus-transfer. # # For remote sites, Pegasus constructs the default path to pegasus-transfer # on the basis of PEGASUS_HOME env profile specified in the site catalog. # To specify a different path to the pegasus-transfer client , users can # add an entry into the transformation catalog with fully qualified logical # name as pegasus::pegasus-transfer # # # GUC # This refers to the new guc client that does multiple file # transfers per invocation. The globus-url-copy client # distributed with Globus 4.x is compatible with this mode. # # # # # pegasus.transfer.*.impl Transfer # Property : pegasus.transfer.refiner # System : Pegasus # Type : enumeration # Value[0] : Bundle # Value[1] : Chain # Value[2] : Condor # Value[3] : Cluster # Default : Bundle # Since : 2.0 # See also : pegasus.transfer.*.impl # # This property determines how the transfer nodes are added to the # workflow. The various refiners differ in the how they link the # various transfer jobs, and the number of transfer jobs that are # created per compute jobs. # # Bundle # This is default refinement strategy in Pegasus. # In this refinement strategy, the number of stage in transfer # nodes that are constructed per execution site can vary. The # number of transfer nodes can be specified, by associating the pegasus # profile "bundle.stagein". The profile can either be associated # with the execution site in the site catalog or with the # "transfer" executable in the transformation catalog. The value in # the transformation catalog overrides the one in the site # catalog. # This refinement strategy extends from the Default refiner, and # thus takes care of file clobbering while staging in data. # # Chain # In this refinement strategy, chains of stagein transfer nodes # are constructed. A chain means that the jobs are sequentially # dependant upon each other i.e. at any moment, only one stage in # transfer job will run per chain. The number of chains can be # specified by associating the pegasus profile "chain.stagein". The # profile can either be associated with the execution site in the # site catalog or with the "transfer" executable in the # transformation catalog. The value in the transformation catalog # overrides the one in the site catalog. # This refinement strategy extends from the Default refiner, and # thus takes care of file clobbering while staging in data. # # Condor # In this refinement strategy, no additional staging transfer jobs # are added to the workflow. Instead the compute jobs are modified # to have the transfer_input_files and transfer_output_files set # to pull the input data. To stage-out the data a separate # stage-out is added. The stage-out job is a /bin/true job that # uses the transfer_input_file and transfer_output_files to stage # the data back to the submit host. # This refinement strategy is used workflows are being executed on # a Condor pool, and the submit node itself is a part of the # Condor pool. # # Cluster # # In this refinement strategy, clusters of stage-in and stageout jobs # are created per level of the workflow. It builds upon the Bundle refiner. # The differences between the Bundle and Cluster refiner are as follows. # # - stagein is also clustered/bundled per level. In Bundle it was # for the whole workflow. # - keys that control the clustering ( old name bundling are ) # cluster.stagein and cluster.stageout # # This refinement strategy also adds dependencies between the # stagein transfer jobs on different levels of the workflow to ensure # that stagein for the top level happens first and so on. # # An image of the workflow with this refinement strategy can be found at # # http://vtcpc.isi.edu/pegasus/index.php/ChangeLog#Added_a_Cluster_Transfer_Refiner # # # # # # # pegasus.transfer.refiner Default # Property : pegasus.transfer.sls.*.impl # System : Pegasus # Type : enumeration # Value[0] : Transfer # Value[1] : Condor # Default : Transfer # Since : 2.2.0 # See also : pegasus.data.configuration # See also : pegasus.execute.*.filesystem.local # # This property specifies the transfer tool to be used for # Second Level Staging (SLS) of input and output data between the # head node and worker node filesystems. # # Currently, the * in the property name CANNOT be replaced to achieve # finer grained control to dictate what type of SLS transfers need to # be managed with which grid transfer tool. # # # The various grid transfer tools that can be used to manage SLS data # transfers are explained below # # # Transfer # # This results in pegasus-transfer to be used for transferring of files. It # is a python based wrapper around various transfer clients like # globus-url-copy, lcg-copy, wget, cp, ln . pegasus-transfer looks at # source and destination url and figures out automatically which underlying # client to use. pegasus-transfer is distributed with the PEGASUS and can # be found at $PEGASUS_HOME/bin/pegasus-transfer. # # For remote sites, Pegasus constructs the default path to pegasus-transfer # on the basis of PEGASUS_HOME env profile specified in the site catalog. # To specify a different path to the pegasus-transfer client , users can # add an entry into the transformation catalog with fully qualified logical # name as pegasus::pegasus-transfer # # # Condor # # This results in Condor file transfer mechanism to be used to transfer the # input data files from the submit host directly to the worker node # directories. This is used when running in pure Condor mode or in a Condor # pool that does not have a shared filesystem between the nodes. # # When setting the SLS transfers to Condor make sure that the # following properties are also set # # pegasus.gridstart PegasusLite # pegasus.execute.*.filesystem.local true # # Alternatively, you can set # # pegasus.data.configuration condorio # in lieu of the above 3 properties. # # Also make sure that pegasus.gridstart is not set. # # Please refer to the section on "Condor Pool Without a Shared Filesystem" # in the chapter on Planning and Submitting. # # # # # # pegasus.transfer.sls.*.impl Transfer # Property : pegasus.transfer.arguments # System : Pegasus # Since : 2.0 # Type : String # Default : (no default) # See also : pegasus.transfer.sls.arguments # # This determines the extra arguments with which the transfer implementation is # invoked. The transfer executable that is invoked is dependant upon # the transfer mode that has been selected. # The property can be overloaded by associated the pegasus profile key # transfer.arguments either with the site in the site catalog or the # corresponding transfer executable in the transformation catalog. # # # pegasus.transfer.arguments (no default) # Property : pegasus.transfer.sls.arguments # System : Pegasus # Since : 2.4 # Type : String # Default : (no default) # See also : pegasus.transfer.arguments # See also : pegasus.transfer.sls.*.impl # # This determines the extra arguments with which the SLS transfer # implementation is invoked. The transfer executable that is invoked # is dependant upon the SLS transfer implementation that has been selected. # # # pegasus.transfer.sls.arguments (no default) # Property : pegasus.transfer.stage.sls.file # System : Pegasus # Since : 3.0 # Type : Boolean # Default : (no default) # See also : pegasus.gridstart # See also : pegasus.execute.*.filesystem.local # # For executing jobs on the local filesystem, Pegasus creates sls files for # each compute jobs. These sls files list the files that need to be # staged to the worker node and the output files that need to be pushed out # from the worker node after completion of the job. By default, pegasus will # stage these SLS files to the shared filesystem on the head node as part of # first level data stagein jobs. However, in the case where there is no # shared filesystem between head nodes and the worker nodes, the user can set # this property to false. This will result in the sls files to be transferred # using the Condor File Transfer from the submit host. # # # # pegasus.transfer.stage.sls.file true # Property : pegasus.transfer.worker.package # System : Pegasus # Type : boolean # Default : false # Since : 3.0 # See also : pegasus.data.configuration # # By default, Pegasus relies on the worker package to be installed in a directory # accessible to the worker nodes on the remote sites . Pegasus uses the value of # PEGASUS_HOME environment profile in the site catalog for the remote sites, to then # construct paths to pegasus auxillary executables like kickstart, pegasus-transfer, # seqexec etc. # # If the Pegasus worker package is not installed on the remote sites # users can set this property to true to get Pegasus to deploy worker package on the # nodes. # # # In the case of sharedfs setup, the worker package is deployed on the shared scratch # directory for the workflow , that is accessible to all the compute nodes of the # remote sites. # # When running in nonsharefs environments, the worker package is first brought to the # submit directory and then transferred to the worker node filesystem using Condor # file IO. # # # pegasus.transfer.worker.package false # Property : pegasus.transfer.links # System : Pegasus # Type : boolean # Default : false # Since : 2.0 # See also : pegasus.transfer # See also : pegasus.transfer.force # # # If this is set, and the transfer implementation is set to Transfer # i.e. using the transfer executable distributed with the PEGASUS. # On setting this property, if Pegasus while fetching data from the # Replica Catalog sees a pool attribute associated with the PFN that matches # the execution pool on which the data has to be transferred to, # Pegasus instead of the URL returned by the Replica Catalog replaces it with # a file based URL. This is based on the assumption that the if the pools match the # filesystems are visible to the remote execution directory where # input data resides. # On seeing both the source and destination urls as file based URLs # the transfer executable spawns a job that creates a symbolic link # by calling ln -s on the remote pool. # # # # pegasus.transfer.links false # Property : pegasus.transfer.*.remote.sites # System : Pegasus # Type : comma separated list of sites # Default : no default # Since : 2.0 # # By default Pegasus looks at the source and destination URL's for to determine # whether the associated transfer job runs on the submit host or the head node # of a remote site, with preference set to run a transfer job to run on submit # host. # # Pegasus will run transfer jobs on the remote sites # # # - if the file server for the compute site is a file server i.e url prefix file:// # - symlink jobs need to be added that require the symlink transfer jobs to # be run remotely. # # # This property can be used to change the default behaviour of Pegasus and force pegasus # to run different types of transfer jobs for the sites specified on the remote site. # # The table below illustrates all the possible variations of the # property. # # # # Property Name & Applies to # pegasus.transfer.stagein.remote.sites & the stage in transfer jobs # pegasus.transfer.stageout.remote.sites & the stage out transfer jobs # pegasus.transfer.inter.remote.sites & the inter pool transfer jobs # pegasus.transfer.*.remote.sites & apply to types of transfer jobs # # # In addition * can be specified as a property value, to designate # that it applies to all sites. # # pegasus.transfer.*.remote.sites (no default) # Property : pegasus.transfer.staging.delimiter # System : Pegasus # Since : 2.0 # Type : String # Default : : # See also : pegasus.transformation.selector # # Pegasus supports executable staging as part of the # workflow. Currently staging of statically linked executables is # supported only. An executable is normally staged to the work # directory for the workflow/partition on the remote site. The # basename of the staged executable is derived from the namespace,name # and version of the transformation in the transformation # catalog. This property sets the delimiter that is used for the # construction of the name of the staged executable. # # # pegasus.transfer.staging.delimiter : # Property : pegasus.transfer.disable.chmod.sites # System : Pegasus # Since : 2.0 # Type : comma separated list of sites # Default : no default # # # During staging of executables to remote sites, chmod jobs are # added to the workflow. These jobs run on the remote sites and do a # chmod on the staged executable. For some sites, this maynot be # required. The permissions might be preserved, or there maybe an # automatic mechanism that does it. # # This property allows you to specify the list of sites, where you do # not want the chmod jobs to be executed. For those sites, the chmod # jobs are replaced by NoOP jobs. The NoOP jobs are executed by # Condor, and instead will immediately have a terminate event written # to the job log file and removed from the queue. # # # # # pegasus.transfer.disable.chmod.sites (no default) # Property : pegasus.transfer.setup.source.base.url # System : Pegasus # Type : URL # Default : no default # Since : 2.3 # # # This property specifies the base URL to the directory containing the # Pegasus worker package builds. During Staging of Executable, the # Pegasus Worker Package is also staged to the remote site. The worker # packages are by default pulled from the http server at pegasus.isi.edu. # This property can be used to override the location from where the worker # package are staged. This maybe required if the remote computes sites don't # allows files transfers from a http server. # # pegasus.transfer.setup.source.base.url (no default) # # SECTION "GRIDSTART AND EXITCODE PROPERTIES" # # Property : pegasus.gridstart # System : Pegasus # Since : 2.0 # Type : enumeration # Value[0] : Kickstart # Value[1] : None # Value[2] : PegasusLite # Default : Kickstart # See also : pegasus.execute.*.filesystem.local # # Jobs that are launched on the grid maybe wrapped in a wrapper # executable/script that enables information about about the # execution, resource consumption, and - most importantly - the # exitcode of the remote application. # At present, a job scheduled on a remote site is launched with a # gridstart if site catalog has the corresponding gridlaunch attribute # set and the job being launched is not MPI. # # Users can explicitly decide what gridstart to use for a job, by # associating the pegasus profile key named gridstart with the job. # # # Kickstart # In this mode, all the jobs are lauched via kickstart. The # kickstart executable is a light-weight program # which connects the stdin,stdout and stderr filehandles for # PEGASUS jobs on the remote site. Kickstart is an executable # distributed with PEGASUS that can generally be found at # ${pegasus.home.bin}/kickstart. # # None # In this mode, all the jobs are launched directly on # the remote site. Each job's stdin,stdout and stderr are # connected to condor commands in a manner to ensure that they are # sent back to the submit host. # # PegasusLite # In this mode, the compute jobs are wrapped by PegasusLite instances. # PegasusLite instance is a bash script, that is launced on the compute node. # It determins at runtime the directory a job needs to execute in, pulls in data # from the staging site , launches the job, pushes out the data and cleans up the # directory after execution. # # # # # # pegasus.gridstart Kickstart # Property : pegasus.gridstart.kickstart.set.xbit # System : Pegasus # Since : 2.4 # Type : Boolean # Default : false # See also : pegasus.transfer.disable.chmod.sites # # # Kickstart has an option to set the X bit on an executable before it # launches it on the remote site. In case of staging of executables, # by default chmod jobs are launched that set the x bit of the user # executables staged to a remote site. # # On setting this property to true, kickstart gridstart module adds a # -X option to kickstart arguments. The -X arguments tells kickstart # to set the x bit of the executable before launching it. # # User should usually disable the chmod jobs by setting the property # pegasus.transfer.disable.chmod.sites , if they set this property # to true. # # # pegasus.gridstart.kickstart.set.xbit false # Property : pegasus.gridstart.kickstart.stat # System : Pegasus # Since : 2.1 # Type : Boolean # Default : false # See also : pegasus.gridstart.generate.lof # # # Kickstart has an option to stat the input files and the output # files. The stat information is collected in the XML record generated # by kickstart. Since stat is an expensive operation, it is not turned # on by on. Set this property to true if you want to see stat # information for the input files and output files of a job in it's # kickstart output. # # # # # pegasus.gridstart.kickstart.stat false # Property : pegasus.gridstart.generate.lof # System : Pegasus # Since : 2.1 # Type : Boolean # Default : false # See also : pegasus.gridstart.kickstart.stat # # # For the stat option for kickstart, we generate 2 lof ( list of # filenames ) files for each job. One lof file containing the input # lfn's for the job, and the other containing output lfn's for the # job. # In some cases, it maybe beneficial to have these lof files generated # but not do the actual stat. This property allows you to generate the # lof files without triggering the stat in kickstart invocations. # # # # # pegasus.gridstart.generate.lof false # Property : pegasus.gridstart.invoke.always # System : Pegasus # Since : 2.0 # Type : Boolean # Default : false # See also : pegasus.gridstart.invoke.length # # Condor has a limit in it, that restricts the length of arguments to # an executable to 4K. To get around this limit, you can trigger # Kickstart to be invoked with the -I option. In this case, an # arguments file is prepared per job that is transferred to the remote # end via the Condor file transfer mechanism. This way the arguments # to the executable are not specified in the condor submit file for # the job. This property specifies whether you want to use the invoke # option always for all jobs, or want it to be triggered only when the # argument string is determined to be greater than a certain limit. # # # pegasus.gridstart.invoke.always false # Property : pegasus.gridstart.invoke.length # System : Pegasus # Since : 2.0 # Type : Long # Default : 4000 # See also : pegasus.gridstart.invoke.always # # Gridstart is automatically invoked with the -I option, if it is # determined that the length of the arguments to be specified is going # to be greater than a certain limit. By default this limit is set to # 4K. However, it can overriden by specifying this property. # # # pegasus.gridstart.invoke.length 4000 # # SECTION "INTERFACE TO Condor and Condor DAGMan" # # # The Condor DAGMan facility is usually activate using the # condor_submit_dag command. However, many shapes of workflows have the # ability to either overburden the submit host, or overflow remote # gatekeeper hosts. While DAGMan provides throttles, unfortunately these # can only be supplied on the command-line. Thus,PEGASUS provides a # versatile wrapper to invoke DAGMan, called pegasus-submit-dag. It can be # configured from the command-line, from user- and system properties, # and by defaults. # Property : pegasus.condor.logs.symlink # System : Condor # Type : Boolean # Default : true # Since : 3.0 # # By default pegasus has the Condor common log [dagname]-0.log in the submit # file as a symlink to a location in /tmp . This is to ensure that condor # common log does not get written to a shared filesystem. If the user knows # for sure that the workflow submit directory is not on the shared filesystem, # then they can opt to turn of the symlinking of condor common log file by # setting this property to false. # # # # pegasus.condor.logs.symlink true # Property : pegasus.condor.arguments.quote # System : Condor # Type : Boolean # Default : true # Since : 2.0 # Old Name : pegasus.condor.arguments.quote # # This property determines whether to apply the new Condor quoting # rules for quoting the argument string. The new argument quoting # rules appeared in Condor 6.7.xx series. We have verified it for # 6.7.19 version. If you are using an old condor at the submit host, # set this property to false. # # # # pegasus.scheduler.condor.arguments.quote true # Property : pegasus.dagman.nofity # System : DAGman wrapper # Type : Case-insensitive enumeration # Value[0] : Complete # Value[1] : Error # Value[2] : Never # Default : Error # Document : http://www.cs.wisc.edu/condor/manual/v6.9/condor_submit_dag.html # Document : http://www.cs.wisc.edu/condor/manual/v6.9/condor_submit.html # # The pegasus-submit-dag wrapper processes properties to set DAGMan # commandline arguments. The argument sets the e-mail notification for # DAGMan itself. This information will be used within the Condor submit # description file for DAGMan. This file is produced by the the # condor_submit_dag. See notification within the section of submit # description file commands in the condor_submit manual page for # specification of value. Many users prefer the value NEVER. # # pegasus.dagman.notify Error # Property : pegasus.dagman.verbose # System : DAGman wrapper # Type : Boolean # Value[0] : false # Value[1] : true # Default : false # Document : http://www.cs.wisc.edu/condor/manual/v6.9/condor_submit_dag.html # # The pegasus-submit-dag wrapper processes properties to set DAGMan # commandline arguments. If set and true, the argument activates # verbose output in case of DAGMan errors. # # pegasus.dagman.verbose false # Property : pegasus.dagman.[category].maxjobs # System : DAGman wrapper # Type : Integer # Since : 2.2 # Default : no default # Document : http://vtcpc.isi.edu/pegasus/index.php/ChangeLog\#Support_for_DAGMan_node_categories # # DAGMan now allows for the nodes in the DAG to be grouped in # category. The tuning parameters like maxjobs then can be applied per # category instead of being applied to the whole workflow. To use this # facility users need to associate the dagman profile key named # category with their jobs. The value of the key is the category to # which the job belongs to. # # You can then use this property to specify the value for a # category. For the above example you will set # pegasus.dagman.short-running.maxjobs # # pegasus.dagman.[category].maxjobs no default # # SECTION "MONITORING PROPERTIES" # # Property : pegasus.monitord.events # System : Pegasus-monitord # Type : Boolean # Default : true # Since : 3.0.2 # See Also : pegasus.monitord.output # # This property determines whether pegasus-monitord generates log # events. If log events are disabled using this property, no bp file, # or database will be created, even if the pegasus.monitord.output # property is specified. # # # # pegasus.monitord.events true # Property : pegasus.monitord.output # System : Pegasus-monitord # Type : String # Since : 3.0.2 # See Also : pegasus.monitord.events # # This property specifies the destination for generated log events in # pegasus-monitord. By default, events are stored in a sqlite database # in the workflow directory, which will be created with the workflow's # name, and a ".stampede.db" extension. Users can specify an # alternative database by using a SQLAlchemy connection # string. Details are available at: # # http://www.sqlalchemy.org/docs/05/reference/dialects/index.html # # It is important to note that users will need to have the appropriate # db interface library installed. Which is to say, SQLAlchemy is a # wrapper around the mysql interface library (for instance), it does # not provide a MySQL driver itself. The Pegasus distribution # includes both SQLAlchemy and the SQLite Python driver. # As a final note, it is important to mention that unlike when using # SQLite databases, using SQLAlchemy with other database servers, # e.g. MySQL or Postgres , the target database needs to exist. # Users can also specify a file name using this property in order to # create a file with the log events. # # Example values for the SQLAlchemy connection string for various end points # are listed below # # # SQL Alchemy End Point & Example Value # Netlogger BP File & file:///submit/dir/myworkflow.bp # SQL Lite Database & sqlite:///submit/dir/myworkflow.db # MySQL Database & mysql://user:password@host:port/databasename # # # # # pegasus.monitord.output (no default) # Property : pegasus.monitord.notifications # System : Pegasus-monitord # Type : Boolean # Default : true # Since : 3.1 # See Also : pegasus.monitord.notifications.max # See Also : pegasus.monitord.notifications.timeout # # This property determines whether pegasus-monitord processes # notifications. When notifications are enabled, pegasus-monitord will # parse the .notify file generated by pegasus-plan and will invoke # notification scripts whenever conditions matches one of the # notifications. # # # # pegasus.monitord.notifications true # Property : pegasus.monitord.notifications.max # System : Pegasus-monitord # Type : Integer # Default : 10 # Since : 3.1 # See Also : pegasus.monitord.notifications # See Also : pegasus.monitord.notifications.timeout # # This property determines how many notification scripts # pegasus-monitord will call concurrently. Upon reaching this limit, # pegasus-monitord will wait for one notification script to finish # before issuing another one. This is a way to keep the number of # processes under control at the submit host. Setting this property to # 0 will disable notifications completely. # # # # pegasus.monitord.notifications.max 10 # Property : pegasus.monitord.notifications.timeout # System : Pegasus-monitord # Type : Integer # Default : 0 # Since : 3.1 # See Also : pegasus.monitord.notifications # See Also : pegasus.monitord.notifications.max # # This property determines how long will pegasus-monitord let # notification scripts run before terminating them. When this property # is set to 0 (default), pegasus-monitord will not terminate any # notification scripts, letting them run indefinitely. If some # notification scripts missbehave, this has the potential problem of # starving pegasus-monitord's notification slots (see the # pegasus.monitord.notifications.max property), and block further # notifications. In addition, users should be aware that # pegasus-monitord will not exit until all notification scripts are # finished. # # # # pegasus.monitord.notifications.timeout 0 # Property : pegasus.monitord.stdout.disable.parsing # System : Pegasus-monitord # Type : Boolean # Default : False # Since : 3.1.1 # # By default, pegasus-monitord parses the stdout/stderr section of the # kickstart to populate the applications captured stdout and stderr in # the job instance table for the stampede schema. For large workflows, # this may slow down monitord especially if the application is # generating a lot of output to it's stdout and stderr. This property, # can be used to turn of the database population. # # # # # pegasus.monitord.stdout.disable.parsing false # # SECTION "JOB CLUSTERING PROPERTIES" # # Property : pegasus.clusterer.job.aggregator # System : Job Clustering # Since : 2.0 # Type : String # Value[0] : seqexec # Value[1] : mpiexec # Default : seqexec # # A large number of workflows executed through the Virtual Data # System, are composed of several jobs that run for only a few seconds # or so. The overhead of running any job on the grid is usually 60 # seconds or more. Hence, it makes sense to collapse small independent # jobs into a larger job. # This property determines, the executable that will be used for # running the larger job on the remote site. # # # seqexec # In this mode, the executable used to run the merged job is # seqexec that runs each of the smaller jobs sequentially on the # same node. The executable "seqexec" is a PEGASUS tool distributed # in the PEGASUS worker package, and can be usually found at # {pegasus.home}/bin/seqexec. # # mpiexec # In this mode, the executable used to run the merged job is # mpiexec that runs the smaller jobs via mpi on n nodes where n # is the nodecount associated with the merged job. The executable # "mpiexec" is a PEGASUS tool distributed in the PEGASUS worker package, # and can be usually found at {pegasus.home}/bin/mpiexec. # # # # # # pegasus.clusterer.job.aggregator seqexec # Property : pegasus.clusterer.job.aggregator.seqexec.log # System : Job Clustering # Type : Boolean # Default : false # Since : 2.3 # See also : pegasus.clusterer.job.aggregator # See also : pegasus.clusterer.job.aggregator.seqexec.log.global # # # Seqexec logs the progress of the jobs that are being run by it in a # progress file on the remote cluster where it is executed. # # This property sets the Boolean flag, that indicates whether to turn # on the logging or not. # # # pegasus.clusterer.job.aggregator.seqexec.log false # Property : pegasus.clusterer.job.aggregator.seqexec.log.global # System : Job Clustering # Type : Boolean # Default : true # Since : 2.3 # See also : pegasus.clusterer.job.aggregator # See also : pegasus.clusterer.job.aggregator.seqexec.log # Old Name : pegasus.clusterer.job.aggregator.seqexec.hasgloballog # # # Seqexec logs the progress of the jobs that are being run by it in a # progress file on the remote cluster where it is executed. The # progress log is useful for you to track the progress of your # computations and remote grid debugging. The progress log file can be # shared by multiple seqexec jobs that are running on a particular # cluster as part of the same workflow. Or it can be per job. # # This property sets the Boolean flag, that indicates whether to have # a single global log for all the seqexec jobs on a particular cluster # or progress log per job. # # # # pegasus.clusterer.job.aggregator.seqexec.log.global true # Property : pegasus.clusterer.job.aggregator.seqexec.firstjobfail # System : Job Clustering # Type : Boolean # Default : true # Since : 2.2 # See also : pegasus.clusterer.job.aggregator # # By default seqexec does not stop execution even if one of the # clustered jobs it is executing fails. This is because seqexec tries # to get as much work done as possible. # # This property sets the Boolean flag, that indicates whether to make # seqexec stop on the first job failure it detects. # # # # pegasus.clusterer.job.aggregator.seqexec.firstjobfail false # Property : pegasus.clusterer.label.key # System : Job Clustering # Type : String # Default : label # Since : 2.0 # See also : pegasus.partitioner.label.key # # While clustering jobs in the workflow into larger jobs, you can # optionally label your graph to control which jobs are clustered and # to which clustered job they belong. This done using a label based # clustering scheme and is done by associating a profile/label key in # the PEGASUS namespace with the jobs in the DAX. Each job that has the # same value/label value for this profile key, is put in the same # clustered job. # # This property allows you to specify the PEGASUS profile key that you # want to use for label based clustering. # # # pegasus.clusterer.label.key label # # SECTION "LOGGING PROPERTIES" # # Property : pegasus.log.manager # System : Pegasus # Since : 2.2.0 # Type : Enumeration # Value[0] : Default # Value[1] : Log4j # Default : Default # See also : pegasus.log.manager.formatter # # # This property sets the logging implementation to use for logging. # # # Default # This implementation refers to the legacy Pegasus logger, that # logs directly to stdout and stderr. It however, does have the # concept of levels similar to log4j or syslog. # # Log4j # This implementation, uses Log4j to log messages. The log4j # properties can be specified in a properties file, the location of # which is specified by the property # # pegasus.log.manager.log4j.conf # # # # # # pegasus.log.manager Default # Property : pegasus.log.manager.formatter # System : Pegasus # Since : 2.2.0 # Type : Enumeration # Value[0] : Simple # Value[1] : Netlogger # Default : Simple # See also : pegasus.log.manager.formatter # # # This property sets the formatter to use for formatting the log messages # while logging. # # # Simple # This formats the messages in a simple format. The messages are logged as # is with minimal formatting. Below are sample log messages in this format # while ranking a dax according to performance. # # event.pegasus.ranking dax.id se18-gda.dax - STARTED # event.pegasus.parsing.dax dax.id se18-gda-nested.dax - STARTED # event.pegasus.parsing.dax dax.id se18-gda-nested.dax - FINISHED # job.id jobGDA # job.id jobGDA query.name getpredicted performace time 10.00 # event.pegasus.ranking dax.id se18-gda.dax - FINISHED # # # Netlogger # # This formats the messages in the Netlogger format , that is based on key # value pairs. The netlogger format is useful for loading the logs into a # database to do some meaningful analysis. Below are sample log messages # in this format while ranking a dax according to performance. # # ts=2008-09-06T12:26:20.100502Z event=event.pegasus.ranking.start \ # msgid=6bc49c1f-112e-4cdb-af54-3e0afb5d593c \ # eventId=event.pegasus.ranking_8d7c0a3c-9271-4c9c-a0f2-1fb57c6394d5 \ # dax.id=se18-gda.dax prog=Pegasus # # ts=2008-09-06T12:26:20.100750Z event=event.pegasus.parsing.dax.start \ # msgid=fed3ebdf-68e6-4711-8224-a16bb1ad2969 \ # eventId=event.pegasus.parsing.dax_887134a8-39cb-40f1-b11c-b49def0c5232\ # dax.id=se18-gda-nested.dax prog=Pegasus # # ts=2008-09-06T12:26:20.100894Z event=event.pegasus.parsing.dax.end \ # msgid=a81e92ba-27df-451f-bb2b-b60d232ed1ad \ # eventId=event.pegasus.parsing.dax_887134a8-39cb-40f1-b11c-b49def0c5232 # # ts=2008-09-06T12:26:20.100395Z event=event.pegasus.ranking \ # msgid=4dcecb68-74fe-4fd5-aa9e-ea1cee88727d \ # eventId=event.pegasus.ranking_8d7c0a3c-9271-4c9c-a0f2-1fb57c6394d5 \ # job.id="jobGDA" # # ts=2008-09-06T12:26:20.100395Z event=event.pegasus.ranking \ # msgid=4dcecb68-74fe-4fd5-aa9e-ea1cee88727d \ # eventId=event.pegasus.ranking_8d7c0a3c-9271-4c9c-a0f2-1fb57c6394d5 \ # job.id="jobGDA" query.name="getpredicted performace" time="10.00" # # ts=2008-09-06T12:26:20.102003Z event=event.pegasus.ranking.end \ # msgid=31f50f39-efe2-47fc-9f4c-07121280cd64 \ # eventId=event.pegasus.ranking_8d7c0a3c-9271-4c9c-a0f2-1fb57c6394d5 # # # # # # # pegasus.log.manager.formatter Simple # Property : pegasus.log.* # System : Pegasus # Since : 2.0 # Type : String # Default : No default # # # This property sets the path to the file where all the logging for # Pegasus can be redirected to. Both stdout and stderr are logged to # the file specified. # # # pegasus.log.* no default # Property : pegasus.log.metrics # System : Pegasus # Since : 2.1.0 # Type : Boolean # Default : true # See also : pegasus.log.metrics.file # # # This property enables the logging of certain planning and workflow # metrics to a global log file. By default the file to which the # metrics are logged is ${pegasus.home}/var/pegasus.log. # # # # pegasus.log.metrics true # Property : pegasus.log.metrics.file # System : Pegasus # Since : 2.1.0 # Type : Boolean # Default : ${pegasus.home}/var/pegasus.log # See also : pegasus.log.metrics # # # This property determines the file to which the workflow and planning # metrics are logged if enabled. # # # # pegasus.log.metrics.file ${pegasus.home}/var/pegasus.log # # SECTION "MISCELLANEOUS PROPERTIES" # # Property : pegasus.code.generator # System : Pegasus # Since : 3.0 # Type : enumeration # Value[0] : Condor # Value[1] : Shell # Default : Condor # # This property is used to load the appropriate Code Generator to use for # writing out the executable workflow. # # # Condor # # This is the default code generator for Pegasus . This generator generates # the executable workflow as a Condor DAG file and associated job submit files. # The Condor DAG file is passed as input to Condor DAGMan for job execution. # # Shell # # This Code Generator generates the executable workflow as a shell script that # can be executed on the submit host. While using this code generator, all the # jobs should be mapped to site local i.e specify --sites local to pegasus-plan. # # # # # pegasus.code.generator Condor # Property : pegasus.job.priority.assign # System : Pegasus # Since : 3.0.3 # Type : Boolean # Default : true # # This property can be used to turn of the default level based condor priorities # that are assigned to jobs in the executable workflow. # # # pegasus.job.priority.assign true # Property : pegasus.file.cleanup.strategy # System : Pegasus # Since : 2.2 # Type : enumeration # Value[0] : InPlace # Default : InPlace # # This property is used to select the strategy of how the the cleanup # nodes are added to the executable workflow. # # # # InPlace # # This is the only mode available . # # # # # pegasus.file.cleanup.strategy InPlace # Property : pegasus.file.cleanup.impl # System : Pegasus # Since : 2.2 # Type : enumeration # Value[0] : Cleanup # Value[1] : RM # Value[2] : S3 # Default : Cleanup # # This property is used to select the executable that is used to # create the working directory on the compute sites. # # # # Cleanup # # The default executable that is used to delete files is the # dirmanager executable shipped with Pegasus. It is found at # $PEGASUS_HOME/bin/dirmanager in the pegasus distribution. # An entry for transformation pegasus::dirmanager needs # to exist in the Transformation Catalog or the PEGASUS_HOME # environment variable should be specified in the site catalog for # the sites for this mode to work. # # RM # # This mode results in the rm executable to be used to delete files # from remote directories. The rm executable is standard on *nix # systems and is usually found at /bin/rm location. # # S3 # # This mode is used to delete files/objects from the buckets in S3 # instead of a directory. This should be set when running workflows # on Amazon EC2. This implementation relies on s3cmd command line # client to create the bucket. An entry for transformation # amazon::s3cmd needs to exist in the Transformation Catalog for # this to work. # # # # # pegasus.file.cleanup.impl Cleanup # Property : pegasus.file.cleanup.scope # System : Pegasus # Since : 2.3.0 # Type : enumeration # Value[0] : fullahead # Value[1] : deferred # Default : fullahead # # By default in case of deferred planning InPlace file cleanup is turned OFF. # This is because the cleanup algorithm does not work across partitions. # This property can be used to turn on the cleanup in case of deferred planning. # # fullahead # # This is the default scope. The pegasus cleanup algorithm does not work # across partitions in deferred planning. Hence the cleanup is always turned # OFF , when deferred planning occurs and cleanup scope is set to full ahead. # # deferred # # If the scope is set to deferred, then Pegasus will not disable file cleanup # in case of deferred planning. This is useful for scenarios where the # partitions themselves are independant ( i.e. dont share files ). Even if # the scope is set to deferred, users can turn off cleanup by specifying # --nocleanup option to pegasus-plan. # # # # # # pegasus.file.cleanup.scope fullahead # Property : pegasus.catalog.transformation.mapper # System : Staging of Executables # Since : 2.0 # Type : enumeration # Value[0] : All # Value[1] : Installed # Value[2] : Staged # Value[3] : Submit # Default : All # See also : pegasus.transformation.selector # # # Pegasus now supports transfer of statically linked executables as # part of the concrete workflow. At present, there is only support for # staging of executables referred to by the compute jobs specified in # the DAX file. # Pegasus determines the source locations of the binaries from the # transformation catalog, where it searches for entries of type # STATIC_BINARY for a particular architecture type. The PFN for these # entries should refer to a globus-url-copy valid and accessible # remote URL. # For transfer of executables, Pegasus constructs a soft state map # that resides on top of the transformation catalog, that helps in # determining the locations from where an executable can be staged to # the remote site. # # This property determines, how that map is created. # # All # In this mode, all sources with entries of type STATIC_BINARY # for a particular transformation are considered valid sources for # the transfer of executables. This the most general mode, and # results in the constructing the map as a result of the cartesian # product of the matches. # # Installed # In this mode, only entries that are of type INSTALLED # are used while constructing the soft state map. This results in # Pegasus never doing any transfer of executables as part of the # workflow. It always prefers the installed executables at the remote # sites. # # Staged # In this mode, only entries that are of type STATIC_BINARY # are used while constructing the soft state map. This results in # the concrete workflow referring only to the staged executables, # irrespective of the fact that the executables are already # installed at the remote end. # # Submit # In this mode, only entries that are of type STATIC_BINARY # and reside at the submit host (pool local), are used while # constructing the soft state map. This is especially helpful, # when the user wants to use the latest compute code for his # computations on the grid and that relies on his submit # host. # # # # pegasus.catalog.transformation.mapper All # Property : pegasus.selector.transformation # System : Staging of Executables # Since : 2.0 # Type : enumeration # Value[0] : Random # Value[1] : Installed # Value[2] : Staged # Value[3] : Submit # Default : Random # See also : pegasus.catalog.transformation # # # In case of transfer of executables, Pegasus could have various # transformations to select from when it schedules to run a particular # compute job at a remote site. For e.g it can have the choice of # staging an executable from a particular remote pool, from the local # (submit host) only, use the one that is installed on the remote site # only. # # This property determines, how a transformation amongst the various # candidate transformations is selected, and is applied after the # property pegasus.tc has been applied. For e.g specifying # pegasus.tc as Staged and then pegasus.transformation.selector as # INSTALLED does not work, as by the time this property is applied, # the soft state map only has entries of type STAGED. # # # Random # In this mode, a random matching candidate transformation # is selected to be staged to the remote execution pool. # # Installed # In this mode, only entries that are of type INSTALLED # are selected. This means that the concrete workflow only refers # to the transformations already pre installed on the remote # pools. # # Staged # In this mode, only entries that are of type STATIC_BINARY # are selected, ignoring the ones that are installed at the remote # site. # # Submit # In this mode, only entries that are of type STATIC_BINARY # and reside at the submit host (pool local), are selected as # sources for staging the executables to the remote execution # pools. # # # # pegasus.selector.transformation Random # Property : pegasus.execute.*.filesystem.local # System : Pegasus # Type : Boolean # Default : false # Since : 2.1.0 # See also : pegasus.data.configuration # # # Normally, Pegasus transfers the data to and from a directory on the # shared filesystem on the head node of a compute site. The directory # needs to be visible to both the head node and the worker nodes for # the compute jobs to execute correctly. # # By setting this property to true, you can get Pegasus to execute jobs # on the worker node filesystem. In this case, when the jobs are # launched on the worker nodes, the jobs grab the input data from # the workflow specific execution directory on the compute site and # push the output data to the same directory after completion. # The transfer of data to and from the worker node directory is referred # to as Second Level Staging ( SLS ). # # # # # pegasus.execute.*.filesystem.local false # Property : pegasus.parser.dax.preserver.linebreaks # System : Pegasus # Type : Boolean # Default : false # Since : 2.2.0 # # # The DAX Parser normally does not preserve line breaks while parsing the # CDATA section that appears in the arguments section of the job element # in the DAX. On setting this to true, the DAX Parser preserves any line # line breaks that appear in the CDATA section. # # pegasus.parser.dax.preserver.linebreaks false pegasus-wms_4.0.1+dfsg/etc/sample.tc.text0000644000175000017500000000262111757531137017404 0ustar ryngerynge# multiple line text-based transformation catalog: 2010-11-17T19:52:42.179-08:00 tr black::analyze:1.0 { site isi { pfn "/home/pegasus/2.0/bin/keg" arch "x86" os "LINUX" type "INSTALLED" } } tr black::findrange:1.0 { site isi { pfn "/home/pegasus/2.0/bin/keg" arch "x86" os "LINUX" type "INSTALLED" } } tr black::preprocess:1.0 { site isi { pfn "/home/pegasus/2.0/bin/keg" arch "x86" os "LINUX" type "INSTALLED" } } tr diamond::analyze:1.0 { site isi { pfn "/home/pegasus/2.0/bin/keg" arch "x86" os "LINUX" type "INSTALLED" } } tr diamond::findrange:1.0 { site isi { pfn "/home/pegasus/2.0/bin/keg" arch "x86" os "LINUX" type "INSTALLED" } } tr diamond::generate:1.0 { site isi { pfn "/home/pegasus/2.0/bin/keg" arch "x86" os "LINUX" type "INSTALLED" } } tr pegasus::cleanup { site isi { pfn "/home/pegasus/2.0/bin/dirmanager" arch "x86" os "LINUX" type "INSTALLED" } } tr pegasus::dirmanager { site isi { pfn "/home/pegasus/2.0/bin/dirmanager" arch "x86" os "LINUX" type "INSTALLED" } } tr pegasus::seqexec { site isi { pfn "/home/pegasus/2.0/bin/dirmanager" arch "x86" os "LINUX" type "INSTALLED" } } tr pegasus::transfer { site isi { profile env "GLOBUS_LOCATION" "/vdt/globus" profile env "LD_LIBRARY_PATH" "/vdt/globus/lib" pfn "/home/pegasus/2.0/bin/transfer" arch "x86" os "LINUX" type "INSTALLED" } } pegasus-wms_4.0.1+dfsg/README0000644000175000017500000000133511757531137014716 0ustar ryngeryngeHi, Welcome to PEGASUS. Before you try to run anything, you might want to make sure that your environment works. We depend on a number of packages that you need to have installed: Condor 7.4+, Java 1.6+, and optionally Globus 4.2+, Perl 5.6+ and Python 2.4+. Please refer to the RELEASE_NOTES for important changes. For instance, it is no longer necessary to set the PEGASUS_HOME environment variable. However, in order to find all tools, you must include Pegasus's "bin" directory in your PATH environment variable. Please refer to the user guide for instructions on the packages and their installation. You can find the documentation online at http://pegasus.isi.edu/documentation and in the distributed "doc" directory. pegasus-wms_4.0.1+dfsg/release-tools/0000755000175000017500000000000011757531667016622 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/release-tools/README0000644000175000017500000000007211757531137017471 0ustar ryngeryngeThis directory contains tools to make NMI builds for WMS. pegasus-wms_4.0.1+dfsg/release-tools/getsystem/0000755000175000017500000000000011757531667020646 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/release-tools/getsystem/README0000644000175000017500000000011111757531137021507 0ustar ryngeryngeFile how we determine a system identification string for the web pages. pegasus-wms_4.0.1+dfsg/release-tools/getsystem/getsystem0000755000175000017500000001743611757531137022623 0ustar ryngerynge#! /usr/bin/env perl use strict; use warnings; use Config; # Global Hawkeye data my %uname_cmd = ( "arch", "uname -m", "os", "uname -s", "os_ver", "uname -r", ); my %short_os = ( "SunOS", "sol", "IRIX64", "irix", "Darwin", "macos", "OSF1", "osf", "AIX", "aix", "HP-UX", "hpux", "Microsoft Windows XP Professional", "winnt", "NT", "winnt", "FreeBSD", "freebsd", "Ubuntu", "ubuntu" ); my %short_arch = ( "alpha", "alpha", "sun4u", "sun4u", "powerpc", "ppc", "ppc", "ppc", "ppc64", "ppc64", "ps3", "ps3", "i686", "x86", "i386", "x86", "x86", "x86", "ia32", "ia32", "ia64", "ia64", "x86_64", "x86_64", "X86-based PC", "x86", "HP-UX", "hppa", "arm", "arm" ); # Override the flags based on a system if ( ($Config{'archname'} =~ m!aix!) or ($Config{'archname'} =~ m!darwin!) ) { $uname_cmd{"arch"} = "uname -p"; } # $Config{'archname'} = PA-RISC2.0 on hpux 10.20 if ( ($Config{'archname'} =~ m!PA!) ) { $uname_cmd{"arch"} = "uname -s"; } sub getPlatform(){ my ($arch, $os, $os_version, $os_sp); if ($Config{'archname'} =~ m!MSWin!) { my $info_str = `systeminfo.exe`; my @info = split /\n/, $info_str; $arch = (split /:\s*/, $info[14])[1]; $os = (split /:\s*/, $info[2])[1]; $os_version = (split /\./ ,(split /:\s*/, $info[3])[1])[0] . "." . (split /\./ ,(split /:\s*/, $info[3])[1])[1]; if ($info[3] =~ m/Service Pack/){ $os_sp = "SP" . (split " ", (split /:\s*/, $info[3])[1])[3]; } } else { $arch = `$uname_cmd{"arch"}`; $os = `$uname_cmd{"os"}`; $os_version = `$uname_cmd{"os_ver"}`; chomp($arch); chomp($os); chomp($os_version); } if ($os =~ m!CYGWIN!) { $os_version = substr($os, 10); $os = substr($os, 7, 2); } if ($Config{'archname'} =~ m!aix!) { my $major = `uname -v`; my $minor = `uname -r`; chomp $major; chomp $minor; $os_version = "$major.$minor"; } if ($arch =~ m/arm/) { $arch ="arm"; } # differentiate IBM PPC64 from Sony PS3 if ($arch eq "ppc64" && $os_version =~ /ps3$/) { $arch = "ps3"; } # Simplify HPUX 11 version string if ($Config{'archname'} =~ m!PA!) { if ($os_version eq "B.11.11") { $os_version = "11"; } } if ( lc($os) eq "linux") { my %distro = &get_linux_distro(); return($short_arch{$arch},$distro{'short'},$distro{'version'}); } elsif($short_os{$os} eq "macos") { # need something like 'ppc_macos_10.3' # find if processor is 64 bit. my $x64=`/usr/sbin/sysctl -a -n hw.cpu64bit_capable 2>/dev/null`; my $vers = `/usr/bin/sw_vers -productVersion`; chomp $vers; $vers =~ s/(^\d+\.\d+).*/$1/; return ( ($x64) ? $short_arch{$arch}."_64" : $short_arch{$arch},$short_os{$os},$vers); } elsif($short_os{$os} eq "irix") { return ($short_os{$os},$os_version); } elsif ( lc($os) eq "freebsd" ) { $os_version =~ s/\.[\d]-RELEASE//i; return ($short_arch{$arch},$short_os{$os}, $os_version); } else { return ($short_arch{$arch}, $short_os{$os}, $os_version); } } sub get_linux_distro () { my $issue_file; if (-f "/etc/rocks-release") { $issue_file = "/etc/rocks-release"; } else { $issue_file = "/etc/issue"; } open(DISTRO, $issue_file) || die "Unable to open $issue_file"; my %distro = ( "long", "unknown", "short", "unknown", "version", "unknown", ); while() { next if /^(\s)*$/; # skip blank lines chomp($_); my $line = $_; my @distro_strs = split(" ", $line); if($line =~ m!Red Hat!) { if($line =~ m!Red Hat Enterprise Linux!) { $distro{"long"} = "$distro_strs[0]$distro_strs[1]$distro_strs[4]"; $distro{"version"} = "$distro_strs[6]"; $distro{"short"} = "rhel"; } else { $distro{"long"} = "$distro_strs[0]$distro_strs[1]"; $distro{"version"} = "$distro_strs[4]"; $distro{"short"} = "rh"; } } elsif($line =~ m!Rocks !) { $distro{"long"} = "$distro_strs[0]"; $distro{"version"} = "$distro_strs[2]"; $distro{"short"} = "rocks"; } elsif($line =~ m!SuSE! || $line =~ m!SUSE!) { if ($line =~ m!Enterprise Server!) { $distro{"long"} = "$distro_strs[3]"; $distro{"version"} = "$distro_strs[6]"; } elsif ($line =~ m!openSUSE!) { $distro{"long"} = "$distro_strs[3]"; $distro{"version"} = "$distro_strs[3]"; } else { $distro{"long"} = "$distro_strs[3]"; $distro{"version"} = "$distro_strs[4]"; } if($line =~ m!SuSE SLES! || $line =~ m!Enterprise!) { $distro{"short"} = "sles"; } else { $distro{"short"} = "suse"; } } elsif($line =~ m!Fedora Core!) { $distro{"long"} = "$distro_strs[0] $distro_strs[1]"; $distro{"version"} = "$distro_strs[3]"; $distro{"short"} = "fc"; } elsif($line =~ m!Fedora release!){ $distro{"long"} = "$distro_strs[0] $distro_strs[1]"; $distro{"version"} = "$distro_strs[2]"; $distro{"short"} = "fc"; } elsif($line =~ m!Tao Linux!) { $distro{"long"} = "$distro_strs[0]"; $distro{"version"} = "$distro_strs[3]"; $distro{"short"} = "tao"; } elsif($line =~ m!Scientific Linux!) { $distro{"long"} = "Scientific Linux"; $distro{"version"} = 'xxx'; if ($line =~ m/(\d)/) { $distro{"version"} = $1; } if ($line =~ m/CERN/i) { $distro{"short"} = "slc"; } elsif ($line =~ m/Fermi/i) { $distro{"short"} = "slf"; } else { $distro{"short"} = "sl"; if ($line =~ m/(\d)/) { $distro{"version"} = "$distro_strs[4]"; } } } elsif ($line =~ m!CentOS!) { $distro{"long"} = "$distro_strs[0]"; $distro{"version"} = "$distro_strs[2]"; $distro{"short"} = "cent"; } elsif ($line =~ m!Yellow!) { $distro{"long"} = "$distro_strs[0] $distro_strs[1]"; $distro{"version"} = "$distro_strs[4]"; $distro{"short"} = "ydl"; } elsif($line =~ m!Debian!) { $distro{"long"} = "$distro_strs[0]"; $distro{"version"} = "$distro_strs[2]"; $distro{"short"} = "deb"; } elsif($line =~ m!Ubuntu!) { $distro{"long"} = "$distro_strs[0]"; $distro{"version"} = "$distro_strs[1]"; $distro{"short"} = "ubuntu"; } #hack to handle "/" in version numbers should probably apply to other places as well $distro{"version"} =~ s/\//_/g ; # we only want major version numbers $distro{"version"} =~ s/\..*//; return %distro; } } #sub trim () { # my @str = @_; # return <rim(&rtrim(@str)); #} # #sub rtrim () { # my @str = @_; # for (@str) { # s/\S+$//; # } # return @str == 1 ? $str[0] : @str; #} # #sub ltrim () { # my @str = @_; # for (@str) { # s/^\S+//; # } # return @str == 1 ? $str[0] : @str; #} #my ($arch, $os, $version)=getPlatform(); my @platform=getPlatform(); for(my $i=0 ; $i<@platform; $i++){ if ($i==@platform-1){ print $platform[$i]; }else{ print $platform[$i],"_"; } } print "\n"; #print "NMI Platform :", $arch,"_", $os,"_",$version,"\n"; pegasus-wms_4.0.1+dfsg/release-tools/update-pegasus-apt-repo0000755000175000017500000000163311757531137023217 0ustar ryngerynge#!/bin/bash set -e REPO_DIR=`mktemp -d` REMOTE_REPO=download.pegasus.isi.edu:/data/webspace/download.pegasus.isi.edu/wms/download/debian rm -rf $REPO_DIR rsync -a -v -e ssh $REMOTE_REPO/ $REPO_DIR/ # debian for DIST in squeeze; do for ARCH in i386 amd64; do echo echo "Updating Debian packages list for $DIST-$ARCH" cd $REPO_DIR dpkg-scanpackages --arch $ARCH dists/$DIST/main/binary-$ARCH/ /dev/null \ >dists/$DIST/main/binary-$ARCH/Packages cd $REPO_DIR/dists/$DIST/main/binary-$ARCH/ gzip -9c Packages >Packages.gz bzip2 -9 -k -f Packages done # generate a release file and sign the repository cd $REPO_DIR/dists/$DIST apt-ftparchive -o APT::FTPArchive::Release::Codename=$DIST release . >Release rm -f Release.gpg gpg -abs -o Release.gpg Release done rsync -a -v -e ssh $REPO_DIR/ $REMOTE_REPO/ rm -rf $REPO_DIR pegasus-wms_4.0.1+dfsg/release-tools/RELEASE-CHECKLIST.txt0000644000175000017500000000303111757531137021737 0ustar ryngerynge NOTE: Do not copy and paste the commands below without double checking. The commands contain version numbers which are probably not correct for what you are trying to do! 1. Check out the branch. For example: svn co https://pegasus.isi.edu/svn/pegasus/branches/N.M N.M-branch 2. Generate man pages documentation PDFs: ant doc 3. Copy and check the PDFs in: cp dist/pegasus-*/share/doc/pegasus/html/*.pdf doc/ svn commit -m "Updated man pages PDFs" 4. Set the version number in Version.in and commit: vim ./src/edu/isi/pegasus/common/util/Version.in svn commit -m "Setting version for tag" 5. Tag. Example: svn copy https://pegasus.isi.edu/svn/pegasus/branches/N.M \ https://pegasus.isi.edu/svn/pegasus/tags/N.M.O 6. Open branch for new checkins with new version: vim ./src/edu/isi/pegasus/common/util/Version.in svn commit -m "Branch reopen for checkins" 7. Build using the NMI system. 8. Copy the build binaries from nightlies to their final place. 9. Build APT repo. First copy the debs into their final place in the apt tree. Then run update-pegasus-apt-repo on a Debian machine. Note that Mats' Debian key is used to sign the repository, so currently he has to do this step. 10. Built Yum repo. Similar to Apt repo, but the step has to be done on a RHEL machine. 11. Update download page (and test the links!) 12. Send annouce email to pegasus-announce@mailman.isi.edu (Karan or Gaurang has to do this step) pegasus-wms_4.0.1+dfsg/release-tools/nmi/0000755000175000017500000000000011757531667017405 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/release-tools/nmi/condor_config0000644000175000017500000025656611757531137022155 0ustar ryngerynge###################################################################### ## ## condor_config ## ## This is the global configuration file for condor. ## ## The file is divided into four main parts: ## Part 1: Settings you MUST customize ## Part 2: Settings you may want to customize ## Part 3: Settings that control the policy of when condor will ## start and stop jobs on your machines ## Part 4: Settings you should probably leave alone (unless you ## know what you're doing) ## ## Please read the INSTALL file (or the Install chapter in the ## Condor Administrator's Manual) for detailed explanations of the ## various settings in here and possible ways to configure your ## pool. ## ## If you are installing Condor as root and then handing over the ## administration of this file to a person you do not trust with ## root access, please read the Installation chapter paying careful ## note to the condor_config.root entries. ## ## Unless otherwise specified, settings that are commented out show ## the defaults that are used if you don't define a value. Settings ## that are defined here MUST BE DEFINED since they have no default ## value. ## ## Unless otherwise indicated, all settings which specify a time are ## defined in seconds. ## ###################################################################### ###################################################################### ###################################################################### ## ## ###### # ## # # ## ##### ##### ## ## # # # # # # # # # ## ###### # # # # # # ## # ###### ##### # # ## # # # # # # # ## # # # # # # ##### ## ## Part 1: Settings you must customize: ###################################################################### ###################################################################### ## What machine is your central manager? CONDOR_HOST = $(FULL_HOSTNAME) ##-------------------------------------------------------------------- ## Pathnames: ##-------------------------------------------------------------------- ## Where have you installed the bin, sbin and lib condor directories? RELEASE_DIR = !!PEGASUS_HOME_DIR!!/condor ## Where is the local condor directory for each host? ## This is where the local config file(s), logs and ## spool/execute directories are located LOCAL_DIR = $(RELEASE_DIR)/var #LOCAL_DIR = $(RELEASE_DIR)/hosts/$(HOSTNAME) ## Where is the machine-specific local config file for each host? LOCAL_CONFIG_FILE = $(RELEASE_DIR)/etc/condor_config.local #LOCAL_CONFIG_FILE = $(RELEASE_DIR)/etc/$(HOSTNAME).local ## If the local config file is not present, is it an error? ## WARNING: This is a potential security issue. ## If not specificed, te default is True #REQUIRE_LOCAL_CONFIG_FILE = TRUE ##-------------------------------------------------------------------- ## Mail parameters: ##-------------------------------------------------------------------- ## When something goes wrong with condor at your site, who should get ## the email? CONDOR_ADMIN = !!VALID_EMAIL_ADDRESS!! ## Full path to a mail delivery program that understands that "-s" ## means you want to specify a subject: MAIL = /usr/bin/mail ##-------------------------------------------------------------------- ## Network domain parameters: ##-------------------------------------------------------------------- ## Internet domain of machines sharing a common UID space. If your ## machines don't share a common UID space, set it to ## UID_DOMAIN = $(FULL_HOSTNAME) ## to specify that each machine has its own UID space. UID_DOMAIN = $(FULL_HOSTNAME) ## Internet domain of machines sharing a common file system. ## If your machines don't use a network file system, set it to ## FILESYSTEM_DOMAIN = $(FULL_HOSTNAME) ## to specify that each machine has its own file system. FILESYSTEM_DOMAIN = $(FULL_HOSTNAME) ## This macro is used to specify a short description of your pool. ## It should be about 20 characters long. For example, the name of ## the UW-Madison Computer Science Condor Pool is ``UW-Madison CS''. COLLECTOR_NAME = Pegasus WMS Pool ###################################################################### ###################################################################### ## ## ###### ##### ## # # ## ##### ##### # # ## # # # # # # # # ## ###### # # # # # ##### ## # ###### ##### # # ## # # # # # # # ## # # # # # # ####### ## ## Part 2: Settings you may want to customize: ## (it is generally safe to leave these untouched) ###################################################################### ###################################################################### ## ## The user/group ID . of the "Condor" user. ## (this can also be specified in the environment) ## Note: the CONDOR_IDS setting is ignored on Win32 platforms #CONDOR_IDS=x.x ##-------------------------------------------------------------------- ## Flocking: Submitting jobs to more than one pool ##-------------------------------------------------------------------- ## Flocking allows you to run your jobs in other pools, or lets ## others run jobs in your pool. ## ## To let others flock to you, define FLOCK_FROM. ## ## To flock to others, define FLOCK_TO. ## FLOCK_FROM defines the machines where you would like to grant ## people access to your pool via flocking. (i.e. you are granting ## access to these machines to join your pool). FLOCK_FROM = ## An example of this is: #FLOCK_FROM = somehost.friendly.domain, anotherhost.friendly.domain ## FLOCK_TO defines the central managers of the pools that you want ## to flock to. (i.e. you are specifying the machines that you ## want your jobs to be negotiated at -- thereby specifying the ## pools they will run in.) FLOCK_TO = ## An example of this is: #FLOCK_TO = central_manager.friendly.domain, condor.cs.wisc.edu ## FLOCK_COLLECTOR_HOSTS should almost always be the same as ## FLOCK_NEGOTIATOR_HOSTS (as shown below). The only reason it would be ## different is if the collector and negotiator in the pool that you are ## flocking too are running on different machines (not recommended). ## The collectors must be specified in the same corresponding order as ## the FLOCK_NEGOTIATOR_HOSTS list. FLOCK_NEGOTIATOR_HOSTS = $(FLOCK_TO) FLOCK_COLLECTOR_HOSTS = $(FLOCK_TO) ## An example of having the negotiator and the collector on different ## machines is: #FLOCK_NEGOTIATOR_HOSTS = condor.cs.wisc.edu, condor-negotiator.friendly.domain #FLOCK_COLLECTOR_HOSTS = condor.cs.wisc.edu, condor-collector.friendly.domain ##-------------------------------------------------------------------- ## Host/IP access levels ##-------------------------------------------------------------------- ## Please see the administrator's manual for details on these ## settings, what they're for, and how to use them. ## What machines have administrative rights for your pool? This ## defaults to your central manager. You should set it to the ## machine(s) where whoever is the condor administrator(s) works ## (assuming you trust all the users who log into that/those ## machine(s), since this is machine-wide access you're granting). HOSTALLOW_ADMINISTRATOR = $(CONDOR_HOST) ## If there are no machines that should have administrative access ## to your pool (for example, there's no machine where only trusted ## users have accounts), you can uncomment this setting. ## Unfortunately, this will mean that administering your pool will ## be more difficult. #HOSTDENY_ADMINISTRATOR = * ## What machines should have "owner" access to your machines, meaning ## they can issue commands that a machine owner should be able to ## issue to their own machine (like condor_vacate). This defaults to ## machines with administrator access, and the local machine. This ## is probably what you want. HOSTALLOW_OWNER = $(FULL_HOSTNAME), $(HOSTALLOW_ADMINISTRATOR) ## Read access. Machines listed as allow (and/or not listed as deny) ## can view the status of your pool, but cannot join your pool ## or run jobs. ## NOTE: By default, without these entries customized, you ## are granting read access to the whole world. You may want to ## restrict that to hosts in your domain. If possible, please also ## grant read access to "*.cs.wisc.edu", so the Condor developers ## will be able to view the status of your pool and more easily help ## you install, configure or debug your Condor installation. ## It is important to have this defined. HOSTALLOW_READ = * #HOSTALLOW_READ = *.your.domain, *.cs.wisc.edu #HOSTDENY_READ = *.bad.subnet, bad-machine.your.domain, 144.77.88.* ## Write access. Machines listed here can join your pool, submit ## jobs, etc. Note: Any machine which has WRITE access must ## also be granted READ access. Granting WRITE access below does ## not also automatically grant READ access; you must change ## HOSTALLOW_READ above as well. ## ## You must set this to something else before Condor will run. ## This most simple option is: ## HOSTALLOW_WRITE = * ## but note that this will allow anyone to submit jobs or add ## machines to your pool and is serious security risk. HOSTALLOW_WRITE = $(FULL_HOSTNAME) #HOSTALLOW_WRITE = *.your.domain, your-friend's-machine.other.domain #HOSTDENY_WRITE = bad-machine.your.domain ## Negotiator access. Machines listed here are trusted central ## managers. You should normally not have to change this. HOSTALLOW_NEGOTIATOR = $(CONDOR_HOST) ## Now, with flocking we need to let the SCHEDD trust the other ## negotiators we are flocking with as well. You should normally ## not have to change this either. HOSTALLOW_NEGOTIATOR_SCHEDD = $(CONDOR_HOST), $(FLOCK_NEGOTIATOR_HOSTS) ## Config access. Machines listed here can use the condor_config_val ## tool to modify all daemon configurations except those specified in ## the condor_config.root file. This level of host-wide access ## should only be granted with extreme caution. By default, config ## access is denied from all hosts. #HOSTALLOW_CONFIG = trusted-host.your.domain ## Flocking Configs. These are the real things that Condor looks at, ## but we set them from the FLOCK_FROM/TO macros above. It is safe ## to leave these unchanged. HOSTALLOW_WRITE_COLLECTOR = $(HOSTALLOW_WRITE), $(FLOCK_FROM) HOSTALLOW_WRITE_STARTD = $(HOSTALLOW_WRITE), $(FLOCK_FROM) HOSTALLOW_READ_COLLECTOR = $(HOSTALLOW_READ), $(FLOCK_FROM) HOSTALLOW_READ_STARTD = $(HOSTALLOW_READ), $(FLOCK_FROM) ##-------------------------------------------------------------------- ## Security parameters for setting configuration values remotely: ##-------------------------------------------------------------------- ## These parameters define the list of attributes that can be set ## remotely with condor_config_val for the security access levels ## defined above (for example, WRITE, ADMINISTRATOR, CONFIG, etc). ## Please see the administrator's manual for futher details on these ## settings, what they're for, and how to use them. There are no ## default values for any of these settings. If they are not ## defined, no attributes can be set with condor_config_val. ## Do you want to allow condor_config_val -rset to work at all? ## This feature is disabled by default, so to enable, you must ## uncomment the following setting and change the value to "True". ## Note: changing this requires a restart not just a reconfig. #ENABLE_RUNTIME_CONFIG = False ## Do you want to allow condor_config_val -set to work at all? ## This feature is disabled by default, so to enable, you must ## uncomment the following setting and change the value to "True". ## Note: changing this requires a restart not just a reconfig. #ENABLE_PERSISTENT_CONFIG = False ## Directory where daemons should write persistent config files (used ## to support condor_config_val -set). This directory should *ONLY* ## be writable by root (or the user the Condor daemons are running as ## if non-root). There is no default, administrators must define this. ## Note: changing this requires a restart not just a reconfig. #PERSISTENT_CONFIG_DIR = /full/path/to/root-only/local/directory ## Attributes that can be set by hosts with "CONFIG" permission (as ## defined with HOSTALLOW_CONFIG and HOSTDENY_CONFIG above). ## The commented-out value here was the default behavior of Condor ## prior to version 6.3.3. If you don't need this behavior, you ## should leave this commented out. #SETTABLE_ATTRS_CONFIG = * ## Attributes that can be set by hosts with "ADMINISTRATOR" ## permission (as defined above) #SETTABLE_ATTRS_ADMINISTRATOR = *_DEBUG, MAX_*_LOG ## Attributes that can be set by hosts with "OWNER" permission (as ## defined above) NOTE: any Condor job running on a given host will ## have OWNER permission on that host by default. If you grant this ## kind of access, Condor jobs will be able to modify any attributes ## you list below on the machine where they are running. This has ## obvious security implications, so only grant this kind of ## permission for custom attributes that you define for your own use ## at your pool (custom attributes about your machines that are ## published with the STARTD_ATTRS setting, for example). #SETTABLE_ATTRS_OWNER = your_custom_attribute, another_custom_attr ## You can also define daemon-specific versions of each of these ## settings. For example, to define settings that can only be ## changed in the condor_startd's configuration by hosts with OWNER ## permission, you would use: #STARTD_SETTABLE_ATTRS_OWNER = your_custom_attribute_name ##-------------------------------------------------------------------- ## Network filesystem parameters: ##-------------------------------------------------------------------- ## Do you want to use NFS for file access instead of remote system ## calls? #USE_NFS = False ## Do you want to use AFS for file access instead of remote system ## calls? #USE_AFS = False ##-------------------------------------------------------------------- ## Checkpoint server: ##-------------------------------------------------------------------- ## Do you want to use a checkpoint server if one is available? If a ## checkpoint server isn't available or USE_CKPT_SERVER is set to ## False, checkpoints will be written to the local SPOOL directory on ## the submission machine. #USE_CKPT_SERVER = True ## What's the hostname of this machine's nearest checkpoint server? #CKPT_SERVER_HOST = checkpoint-server-hostname.your.domain ## Do you want the starter on the execute machine to choose the ## checkpoint server? If False, the CKPT_SERVER_HOST set on ## the submit machine is used. Otherwise, the CKPT_SERVER_HOST set ## on the execute machine is used. The default is true. #STARTER_CHOOSES_CKPT_SERVER = True ##-------------------------------------------------------------------- ## Miscellaneous: ##-------------------------------------------------------------------- ## Try to save this much swap space by not starting new shadows. ## Specified in megabytes. #RESERVED_SWAP = 5 ## What's the maximum number of jobs you want a single submit machine ## to spawn shadows for? #MAX_JOBS_RUNNING = 200 ## Condor needs to create a few lock files to synchronize access to ## various log files. Because of problems we've had with network ## filesystems and file locking over the years, we HIGHLY recommend ## that you put these lock files on a local partition on each ## machine. If you don't have your LOCAL_DIR on a local partition, ## be sure to change this entry. Whatever user (or group) condor is ## running as needs to have write access to this directory. If ## you're not running as root, this is whatever user you started up ## the condor_master as. If you are running as root, and there's a ## condor account, it's probably condor. Otherwise, it's whatever ## you've set in the CONDOR_IDS environment variable. See the Admin ## manual for details on this. LOCK = $(LOG) ## If you don't use a fully qualified name in your /etc/hosts file ## (or NIS, etc.) for either your official hostname or as an alias, ## Condor wouldn't normally be able to use fully qualified names in ## places that it'd like to. You can set this parameter to the ## domain you'd like appended to your hostname, if changing your host ## information isn't a good option. This parameter must be set in ## the global config file (not the LOCAL_CONFIG_FILE from above). #DEFAULT_DOMAIN_NAME = your.domain.name ## If you don't have DNS set up, Condor will normally fail in many ## places because it can't resolve hostnames to IP addresses and ## vice-versa. If you enable this option, Condor will use ## pseudo-hostnames constructed from a machine's IP address and the ## DEFAULT_DOMAIN_NAME. Both NO_DNS and DEFAULT_DOMAIN must set in ## your top-level config file for this mode of operation to work ## properly. #NO_DNS = True ## Condor can be told whether or not you want the Condor daemons to ## create a core file if something really bad happens. This just ## sets the resource limit for the size of a core file. By default, ## we don't do anything, and leave in place whatever limit was in ## effect when you started the Condor daemons. If this parameter is ## set and "True", we increase the limit to as large as it gets. If ## it's set to "False", we set the limit at 0 (which means that no ## core files are even created). Core files greatly help the Condor ## developers debug any problems you might be having. #CREATE_CORE_FILES = True ## Condor Glidein downloads binaries from a remote server for the ## machines into which you're gliding. This saves you from manually ## downloading and installing binaries for every architecture you ## might want to glidein to. The default server is one maintained at ## The University of Wisconsin. If you don't want to use the UW ## server, you can set up your own and change the following values to ## point to it, instead. GLIDEIN_SERVER_URLS = \ http://www.cs.wisc.edu/condor/glidein/binaries \ gsiftp://gridftp.cs.wisc.edu/p/condor/public/binaries/glidein ## List the sites you want to GlideIn to on the GLIDEIN_SITES. For example, ## if you'd like to GlideIn to some Alliance GiB resources, ## uncomment the line below. ## Make sure that $(GLIDEIN_SITES) is included in HOSTALLOW_READ and ## HOSTALLW_WRITE, or else your GlideIns won't be able to join your pool. #GLIDEIN_SITES = *.ncsa.uiuc.edu, *.cs.wisc.edu, *.mcs.anl.gov GLIDEIN_SITES = ## If your site needs to use UID_DOMAIN settings (defined above) that ## are not real Internet domains that match the hostnames, you can ## tell Condor to trust whatever UID_DOMAIN a submit machine gives to ## the execute machine and just make sure the two strings match. The ## default for this setting is False, since it is more secure this ## way. #TRUST_UID_DOMAIN = False ## If you would like to be informed in near real-time via condor_q when ## a vanilla/standard/java job is in a suspension state, set this attribute to ## TRUE. However, this real-time update of the condor_schedd by the shadows ## could cause performance issues if there are thousands of concurrently ## running vanilla/standard/java jobs under a single condor_schedd and they are ## allowed to suspend and resume. #REAL_TIME_JOB_SUSPEND_UPDATES = False ## A standard universe job can perform arbitrary shell calls via the ## libc 'system()' function. This function call is routed back to the shadow ## which performs the actual system() invocation in the initialdir of the ## running program and as the user who submitted the job. However, since the ## user job can request ARBITRARY shell commands to be run by the shadow, this ## is a generally unsafe practice. This should only be made available if it is ## actually needed. If this attribute is not defined, then it is the same as ## it being defined to False. Set it to True to allow the shadow to execute ## arbitrary shell code from the user job. #SHADOW_ALLOW_UNSAFE_REMOTE_EXEC = False ## KEEP_OUTPUT_SANDBOX is an optional feature to tell Condor-G to not ## remove the job spool when the job leaves the queue. To use, just ## set to TRUE. Since you will be operating Condor-G in this manner, ## you may want to put leave_in_queue = false in your job submit ## description files, to tell Condor-G to simply remove the job from ## the queue immediately when the job completes (since the output files ## will stick around no matter what). #KEEP_OUTPUT_SANDBOX = False ## This setting tells the negotiator to ignore user priorities. This ## avoids problems where jobs from different users won't run when using ## condor_advertise instead of a full-blown startd (some of the user ## priority system in Condor relies on information from the startd -- ## we will remove this reliance when we support the user priority ## system for grid sites in the negotiator; for now, this setting will ## just disable it). #NEGOTIATOR_IGNORE_USER_PRIORITIES = False ## These are the directories used to locate classad plug-in functions #CLASSAD_SCRIPT_DIRECTORY = #CLASSAD_LIB_PATH = ## This setting tells Condor whether to delegate or copy GSI X509 ## credentials when sending them over the wire between daemons. ## Delegation can take up to a second, which is very slow when ## submitting a large number of jobs. Copying exposes the credential ## to third parties if Condor isn't set to encrypt communications. ## By default, Condor will delegate rather than copy. DELEGATE_JOB_GSI_CREDENTIALS = True ## This setting controls the default behaviour for the spooling of files ## into, or out of, the Condor system by such tools as condor_submit ## and condor_transfer_data. Here is the list of valid settings for this ## parameter and what they mean: ## ## stm_use_schedd_only ## Ask the condor_schedd to solely store/retreive the sandbox ## ## stm_use_transferd ## Ask the condor_schedd for a location of a condor_transferd, then ## store/retreive the sandbox from the transferd itself. ## ## The allowed values are case insensitive. ## The default of this parameter if not specified is: stm_use_schedd_only SANDBOX_TRANSFER_METHOD = stm_use_schedd_only ##-------------------------------------------------------------------- ## Settings that control the daemon's debugging output: ##-------------------------------------------------------------------- ## ## The flags given in ALL_DEBUG are shared between all daemons. ## ALL_DEBUG = MAX_COLLECTOR_LOG = 1000000 COLLECTOR_DEBUG = MAX_KBDD_LOG = 1000000 KBDD_DEBUG = MAX_NEGOTIATOR_LOG = 1000000 NEGOTIATOR_DEBUG = D_MATCH MAX_NEGOTIATOR_MATCH_LOG = 1000000 MAX_SCHEDD_LOG = 1000000 SCHEDD_DEBUG = D_PID MAX_SHADOW_LOG = 1000000 SHADOW_DEBUG = MAX_STARTD_LOG = 1000000 STARTD_DEBUG = MAX_STARTER_LOG = 1000000 STARTER_DEBUG = D_NODATE MAX_MASTER_LOG = 1000000 MASTER_DEBUG = ## When the master starts up, should it truncate it's log file? #TRUNC_MASTER_LOG_ON_OPEN = False ## The daemons touch their log file periodically, even when they have ## nothing to write. When a daemon starts up, it prints the last time ## the log file was modified. This lets you estimate when a previous ## instance of a daemon stopped running. This paramete controls often ## the daemons touch the file (in seconds). TOUCH_LOG_INTERVAL = 60 ###################################################################### ###################################################################### ## ## ###### ##### ## # # ## ##### ##### # # ## # # # # # # # # ## ###### # # # # # ##### ## # ###### ##### # # ## # # # # # # # # ## # # # # # # ##### ## ## Part 3: Settings control the policy for running, stopping, and ## periodically checkpointing condor jobs: ###################################################################### ###################################################################### ## This section contains macros are here to help write legible ## expressions: MINUTE = 60 HOUR = (60 * $(MINUTE)) StateTimer = (CurrentTime - EnteredCurrentState) ActivityTimer = (CurrentTime - EnteredCurrentActivity) ActivationTimer = (CurrentTime - JobStart) LastCkpt = (CurrentTime - LastPeriodicCheckpoint) ## The JobUniverse attribute is just an int. These macros can be ## used to specify the universe in a human-readable way: STANDARD = 1 PVM = 4 VANILLA = 5 MPI = 8 VM = 13 IsPVM = (TARGET.JobUniverse == $(PVM)) IsMPI = (TARGET.JobUniverse == $(MPI)) IsVanilla = (TARGET.JobUniverse == $(VANILLA)) IsStandard = (TARGET.JobUniverse == $(STANDARD)) IsVM = (TARGET.JobUniverse == $(VM)) NonCondorLoadAvg = (LoadAvg - CondorLoadAvg) BackgroundLoad = 0.3 HighLoad = 0.5 StartIdleTime = 15 * $(MINUTE) ContinueIdleTime = 5 * $(MINUTE) MaxSuspendTime = 10 * $(MINUTE) MaxVacateTime = 10 * $(MINUTE) KeyboardBusy = (KeyboardIdle < $(MINUTE)) ConsoleBusy = (ConsoleIdle < $(MINUTE)) CPUIdle = ($(NonCondorLoadAvg) <= $(BackgroundLoad)) CPUBusy = ($(NonCondorLoadAvg) >= $(HighLoad)) KeyboardNotBusy = ($(KeyboardBusy) == False) BigJob = (TARGET.ImageSize >= (50 * 1024)) MediumJob = (TARGET.ImageSize >= (15 * 1024) && TARGET.ImageSize < (50 * 1024)) SmallJob = (TARGET.ImageSize < (15 * 1024)) JustCPU = ($(CPUBusy) && ($(KeyboardBusy) == False)) MachineBusy = ($(CPUBusy) || $(KeyboardBusy)) ## The RANK expression controls which jobs this machine prefers to ## run over others. Some examples from the manual include: ## RANK = TARGET.ImageSize ## RANK = (Owner == "coltrane") + (Owner == "tyner") \ ## + ((Owner == "garrison") * 10) + (Owner == "jones") ## By default, RANK is always 0, meaning that all jobs have an equal ## ranking. #RANK = 0 ##################################################################### ## This where you choose the configuration that you would like to ## use. It has no defaults so it must be defined. We start this ## file off with the UWCS_* policy. ###################################################################### ## Also here is what is referred to as the TESTINGMODE_*, which is ## a quick hardwired way to test Condor. ## Replace UWCS_* with TESTINGMODE_* if you wish to do testing mode. ## For example: ## WANT_SUSPEND = $(UWCS_WANT_SUSPEND) ## becomes ## WANT_SUSPEND = $(TESTINGMODE_WANT_SUSPEND) WANT_SUSPEND = $(UWCS_WANT_SUSPEND) WANT_VACATE = $(UWCS_WANT_VACATE) ## When is this machine willing to start a job? START = $(UWCS_START) ## When should a local universe job be allowed to start? START_LOCAL_UNIVERSE = True # Only start a local universe jobs if there are less # than 100 local jobs currently running #START_LOCAL_UNIVERSE = TotalLocalJobsRunning < 100 ## When should a scheduler universe job be allowed to start? START_SCHEDULER_UNIVERSE = True # Only start a scheduler universe jobs if there are less # than 100 scheduler jobs currently running #START_SCHEDULER_UNIVERSE = TotalSchedulerJobsRunning < 100 ## When to suspend a job? SUSPEND = $(UWCS_SUSPEND) ## When to resume a suspended job? CONTINUE = $(UWCS_CONTINUE) ## When to nicely stop a job? ## (as opposed to killing it instantaneously) PREEMPT = $(UWCS_PREEMPT) ## When to instantaneously kill a preempting job ## (e.g. if a job is in the pre-empting stage for too long) KILL = $(UWCS_KILL) PERIODIC_CHECKPOINT = $(UWCS_PERIODIC_CHECKPOINT) PREEMPTION_REQUIREMENTS = $(UWCS_PREEMPTION_REQUIREMENTS) PREEMPTION_RANK = $(UWCS_PREEMPTION_RANK) NEGOTIATOR_PRE_JOB_RANK = $(UWCS_NEGOTIATOR_PRE_JOB_RANK) NEGOTIATOR_POST_JOB_RANK = $(UWCS_NEGOTIATOR_POST_JOB_RANK) MaxJobRetirementTime = $(UWCS_MaxJobRetirementTime) ##################################################################### ## This is the UWisc - CS Department Configuration. ##################################################################### UWCS_WANT_SUSPEND = ( $(SmallJob) || $(KeyboardNotBusy) \ || $(IsPVM) || $(IsVanilla) ) UWCS_WANT_VACATE = ( $(ActivationTimer) > 10 * $(MINUTE) \ || $(IsPVM) || $(IsVanilla) ) # Only start jobs if: # 1) the keyboard has been idle long enough, AND # 2) the load average is low enough OR the machine is currently # running a Condor job # (NOTE: Condor will only run 1 job at a time on a given resource. # The reasons Condor might consider running a different job while # already running one are machine Rank (defined above), and user # priorities.) UWCS_START = ( (KeyboardIdle > $(StartIdleTime)) \ && ( $(CPUIdle) || \ (State != "Unclaimed" && State != "Owner")) ) # Suspend jobs if: # 1) the keyboard has been touched, OR # 2a) The cpu has been busy for more than 2 minutes, AND # 2b) the job has been running for more than 90 seconds UWCS_SUSPEND = ( $(KeyboardBusy) || \ ( (CpuBusyTime > 2 * $(MINUTE)) \ && $(ActivationTimer) > 90 ) ) # Continue jobs if: # 1) the cpu is idle, AND # 2) we've been suspended more than 10 seconds, AND # 3) the keyboard hasn't been touched in a while UWCS_CONTINUE = ( $(CPUIdle) && ($(ActivityTimer) > 10) \ && (KeyboardIdle > $(ContinueIdleTime)) ) # Preempt jobs if: # 1) The job is suspended and has been suspended longer than we want # 2) OR, we don't want to suspend this job, but the conditions to # suspend jobs have been met (someone is using the machine) UWCS_PREEMPT = ( ((Activity == "Suspended") && \ ($(ActivityTimer) > $(MaxSuspendTime))) \ || (SUSPEND && (WANT_SUSPEND == False)) ) # Maximum time (in seconds) to wait for a job to finish before kicking # it off (due to PREEMPT, a higher priority claim, or the startd # gracefully shutting down). This is computed from the time the job # was started, minus any suspension time. Once the retirement time runs # out, the usual preemption process will take place. The job may # self-limit the retirement time to _less_ than what is given here. # By default, nice user jobs and standard universe jobs set their # MaxJobRetirementTime to 0, so they will usually not wait in retirement. UWCS_MaxJobRetirementTime = 0 # Kill jobs if they have taken too long to vacate gracefully UWCS_KILL = $(ActivityTimer) > $(MaxVacateTime) ## Only define vanilla versions of these if you want to make them ## different from the above settings. #SUSPEND_VANILLA = ( $(KeyboardBusy) || \ # ((CpuBusyTime > 2 * $(MINUTE)) && $(ActivationTimer) > 90) ) #CONTINUE_VANILLA = ( $(CPUIdle) && ($(ActivityTimer) > 10) \ # && (KeyboardIdle > $(ContinueIdleTime)) ) #PREEMPT_VANILLA = ( ((Activity == "Suspended") && \ # ($(ActivityTimer) > $(MaxSuspendTime))) \ # || (SUSPEND_VANILLA && (WANT_SUSPEND == False)) ) #KILL_VANILLA = $(ActivityTimer) > $(MaxVacateTime) ## Checkpoint every 3 hours on average, with a +-30 minute random ## factor to avoid having many jobs hit the checkpoint server at ## the same time. UWCS_PERIODIC_CHECKPOINT = $(LastCkpt) > (3 * $(HOUR) + \ $RANDOM_INTEGER(-30,30,1) * $(MINUTE) ) ## You might want to checkpoint a little less often. A good ## example of this is below. For jobs smaller than 60 megabytes, we ## periodic checkpoint every 6 hours. For larger jobs, we only ## checkpoint every 12 hours. #UWCS_PERIODIC_CHECKPOINT = \ # ( (TARGET.ImageSize < 60000) && \ # ($(LastCkpt) > (6 * $(HOUR) + $RANDOM_INTEGER(-30,30,1))) ) || \ # ( $(LastCkpt) > (12 * $(HOUR) + $RANDOM_INTEGER(-30,30,1)) ) ## The rank expressions used by the negotiator are configured below. ## This is the order in which ranks are applied by the negotiator: ## 1. NEGOTIATOR_PRE_JOB_RANK ## 2. rank in job ClassAd ## 3. NEGOTIATOR_POST_JOB_RANK ## 4. cause of preemption (0=user priority,1=startd rank,2=no preemption) ## 5. PREEMPTION_RANK ## The NEGOTIATOR_PRE_JOB_RANK expression overrides all other ranks ## that are used to pick a match from the set of possibilities. ## The following expression matches jobs to unclaimed resources ## whenever possible, regardless of the job-supplied rank. UWCS_NEGOTIATOR_PRE_JOB_RANK = RemoteOwner =?= UNDEFINED ## The NEGOTIATOR_POST_JOB_RANK expression chooses between ## resources that are equally preferred by the job. ## The following example expression steers jobs toward ## faster machines and tends to fill a cluster of multi-processors ## breadth-first instead of depth-first. In this example, ## the expression is chosen to have no effect when preemption ## would take place, allowing control to pass on to ## PREEMPTION_RANK. #UWCS_NEGOTIATOR_POST_JOB_RANK = \ # (RemoteOwner =?= UNDEFINED) * (KFlops - SlotID) ## The negotiator will not preempt a job running on a given machine ## unless the PREEMPTION_REQUIREMENTS expression evaluates to true ## and the owner of the idle job has a better priority than the owner ## of the running job. This expression defaults to true. UWCS_PREEMPTION_REQUIREMENTS = ( $(StateTimer) > (1 * $(HOUR)) && \ RemoteUserPrio > SubmittorPrio * 1.2 ) || (MY.NiceUser == True) ## The PREEMPTION_RANK expression is used in a case where preemption ## is the only option and all other negotiation ranks are equal. For ## example, if the job has no preference, it is usually preferable to ## preempt a job with a small ImageSize instead of a job with a large ## ImageSize. The default is to rank all preemptable matches the ## same. However, the negotiator will always prefer to match the job ## with an idle machine over a preemptable machine, if all other ## negotiation ranks are equal. UWCS_PREEMPTION_RANK = (RemoteUserPrio * 1000000) - TARGET.ImageSize ##################################################################### ## This is a Configuration that will cause your Condor jobs to ## always run. This is intended for testing only. ###################################################################### ## This mode will cause your jobs to start on a machine an will let ## them run to completion. Condor will ignore all of what is going ## on in the machine (load average, keyboard activity, etc.) TESTINGMODE_WANT_SUSPEND = False TESTINGMODE_WANT_VACATE = False TESTINGMODE_START = True TESTINGMODE_SUSPEND = False TESTINGMODE_CONTINUE = True TESTINGMODE_PREEMPT = False TESTINGMODE_KILL = False TESTINGMODE_PERIODIC_CHECKPOINT = False TESTINGMODE_PREEMPTION_REQUIREMENTS = False TESTINGMODE_PREEMPTION_RANK = 0 ###################################################################### ###################################################################### ## ## ###### # ## # # ## ##### ##### # # ## # # # # # # # # # ## ###### # # # # # # # ## # ###### ##### # ####### ## # # # # # # # ## # # # # # # # ## ## Part 4: Settings you should probably leave alone: ## (unless you know what you're doing) ###################################################################### ###################################################################### ###################################################################### ## Daemon-wide settings: ###################################################################### ## Pathnames LOG = $(LOCAL_DIR)/log SPOOL = $(LOCAL_DIR)/spool EXECUTE = $(LOCAL_DIR)/execute BIN = $(RELEASE_DIR)/bin LIB = $(RELEASE_DIR)/lib INCLUDE = $(RELEASE_DIR)/include SBIN = $(RELEASE_DIR)/sbin LIBEXEC = $(RELEASE_DIR)/libexec ## If you leave HISTORY undefined (comment it out), no history file ## will be created. HISTORY = $(SPOOL)/history ## Log files COLLECTOR_LOG = $(LOG)/CollectorLog KBDD_LOG = $(LOG)/KbdLog MASTER_LOG = $(LOG)/MasterLog NEGOTIATOR_LOG = $(LOG)/NegotiatorLog NEGOTIATOR_MATCH_LOG = $(LOG)/MatchLog SCHEDD_LOG = $(LOG)/SchedLog SHADOW_LOG = $(LOG)/ShadowLog STARTD_LOG = $(LOG)/StartLog STARTER_LOG = $(LOG)/StarterLog ## Lock files SHADOW_LOCK = $(LOCK)/ShadowLock ## This setting primarily allows you to change the port that the ## collector is listening on. By default, the collector uses port ## 9618, but you can set the port with a ":port", such as: ## COLLECTOR_HOST = $(CONDOR_HOST):1234 COLLECTOR_HOST = $(CONDOR_HOST) ## The NEGOTIATOR_HOST parameter has been deprecated. The port where ## the negotiator is listening is now dynamically allocated and the IP ## and port are now obtained from the collector, just like all the ## other daemons. However, if your pool contains any machines that ## are running version 6.7.3 or earlier, you can uncomment this ## setting to go back to the old fixed-port (9614) for the negotiator. #NEGOTIATOR_HOST = $(CONDOR_HOST) ## How long are you willing to let daemons try their graceful ## shutdown methods before they do a hard shutdown? (30 minutes) #SHUTDOWN_GRACEFUL_TIMEOUT = 1800 ## How much disk space would you like reserved from Condor? In ## places where Condor is computing the free disk space on various ## partitions, it subtracts the amount it really finds by this ## many megabytes. (If undefined, defaults to 0). RESERVED_DISK = 5 ## If your machine is running AFS and the AFS cache lives on the same ## partition as the other Condor directories, and you want Condor to ## reserve the space that your AFS cache is configured to use, set ## this to true. #RESERVE_AFS_CACHE = False ## By default, if a user does not specify "notify_user" in the submit ## description file, any email Condor sends about that job will go to ## "username@UID_DOMAIN". If your machines all share a common UID ## domain (so that you would set UID_DOMAIN to be the same across all ## machines in your pool), *BUT* email to user@UID_DOMAIN is *NOT* ## the right place for Condor to send email for your site, you can ## define the default domain to use for email. A common example ## would be to set EMAIL_DOMAIN to the fully qualified hostname of ## each machine in your pool, so users submitting jobs from a ## specific machine would get email sent to user@machine.your.domain, ## instead of user@your.domain. In general, you should leave this ## setting commented out unless two things are true: 1) UID_DOMAIN is ## set to your domain, not $(FULL_HOSTNAME), and 2) email to ## user@UID_DOMAIN won't work. #EMAIL_DOMAIN = $(FULL_HOSTNAME) ## Should Condor daemons create a UDP command socket (for incomming ## UDP-based commands) in addition to the TCP command socket? By ## default, classified ad updates sent to the collector use UDP, in ## addition to some keep alive messages and other non-essential ## communication. However, in certain situations, it might be ## desirable to disable the UDP command port (for example, to reduce ## the number of ports represented by a GCB broker, etc). If not ## defined, the UDP command socket is enabled by default, and to ## modify this, you must restart your Condor daemons. Also, this ## setting must be defined machine-wide. For example, setting ## "STARTD.WANT_UDP_COMMAND_SOCKET = False" while the global setting ## is "True" will still result in the startd creating a UDP socket. #WANT_UDP_COMMAND_SOCKET = True ## If your site needs to use TCP updates to the collector, instead of ## UDP, you can enable this feature. HOWEVER, WE DO NOT RECOMMEND ## THIS FOR MOST SITES! In general, the only sites that might want ## this feature are pools made up of machines connected via a ## wide-area network where UDP packets are frequently or always ## dropped. If you enable this feature, you *MUST* turn on the ## COLLECTOR_SOCKET_CACHE_SIZE setting at your collector, and each ## entry in the socket cache uses another file descriptor. If not ## defined, this feature is disabled by default. #UPDATE_COLLECTOR_WITH_TCP = True ## HIGHPORT and LOWPORT let you set the range of ports that Condor ## will use. This may be useful if you are behind a firewall. By ## default, Condor uses port 9618 for the collector, 9614 for the ## negotiator, and system-assigned (apparently random) ports for ## everything else. HIGHPORT and LOWPORT only affect these ## system-assigned ports, but will restrict them to the range you ## specify here. If you want to change the well-known ports for the ## collector or negotiator, see COLLECTOR_HOST or NEGOTIATOR_HOST. ## Note that both LOWPORT and HIGHPORT must be at least 1024 if you ## are not starting your daemons as root. You may also specify ## different port ranges for incoming and outgoing connections by ## using IN_HIGHPORT/IN_LOWPORT and OUT_HIGHPORT/OUT_LOWPORT. #HIGHPORT = 9700 #LOWPORT = 9600 ###################################################################### ## Daemon-specific settings: ###################################################################### ##-------------------------------------------------------------------- ## condor_master ##-------------------------------------------------------------------- ## Daemons you want the master to keep running for you: DAEMON_LIST = MASTER, STARTD, SCHEDD, COLLECTOR, NEGOTIATOR ## Which daemons use the Condor DaemonCore library (i.e., not the ## checkpoint server or custom user daemons)? ## Note: Daemons in this list cannot use a static command port. #DC_DAEMON_LIST = \ #MASTER, STARTD, SCHEDD, KBDD, COLLECTOR, NEGOTIATOR, EVENTD, \ #VIEW_SERVER, CONDOR_VIEW, VIEW_COLLECTOR, HAWKEYE, CREDD, HAD, \ #QUILL ## Where are the binaries for these daemons? MASTER = $(SBIN)/condor_master STARTD = $(SBIN)/condor_startd SCHEDD = $(SBIN)/condor_schedd KBDD = $(SBIN)/condor_kbdd NEGOTIATOR = $(SBIN)/condor_negotiator COLLECTOR = $(SBIN)/condor_collector STARTER_LOCAL = $(SBIN)/condor_starter ## When the master starts up, it can place it's address (IP and port) ## into a file. This way, tools running on the local machine don't ## need to query the central manager to find the master. This ## feature can be turned off by commenting out this setting. MASTER_ADDRESS_FILE = $(LOG)/.master_address ## Where should the master find the condor_preen binary? If you don't ## want preen to run at all, just comment out this setting. PREEN = $(SBIN)/condor_preen ## How do you want preen to behave? The "-m" means you want email ## about files preen finds that it thinks it should remove. The "-r" ## means you want preen to actually remove these files. If you don't ## want either of those things to happen, just remove the appropriate ## one from this setting. PREEN_ARGS = -m -r ## How often should the master start up condor_preen? (once a day) #PREEN_INTERVAL = 86400 ## If a daemon dies an unnatural death, do you want email about it? #PUBLISH_OBITUARIES = True ## If you're getting obituaries, how many lines of the end of that ## daemon's log file do you want included in the obituary? #OBITUARY_LOG_LENGTH = 20 ## Should the master run? #START_MASTER = True ## Should the master start up the daemons you want it to? #START_DAEMONS = True ## How often do you want the master to send an update to the central ## manager? #MASTER_UPDATE_INTERVAL = 300 ## How often do you want the master to check the timestamps of the ## daemons it's running? If any daemons have been modified, the ## master restarts them. #MASTER_CHECK_NEW_EXEC_INTERVAL = 300 ## Once you notice new binaries, how long should you wait before you ## try to execute them? #MASTER_NEW_BINARY_DELAY = 120 ## What's the maximum amount of time you're willing to give the ## daemons to quickly shutdown before you just kill them outright? #SHUTDOWN_FAST_TIMEOUT = 120 ###### ## Exponential backoff settings: ###### ## When a daemon keeps crashing, we use "exponential backoff" so we ## wait longer and longer before restarting it. This is the base of ## the exponent used to determine how long to wait before starting ## the daemon again: #MASTER_BACKOFF_FACTOR = 2.0 ## What's the maximum amount of time you want the master to wait ## between attempts to start a given daemon? (With 2.0 as the ## MASTER_BACKOFF_FACTOR, you'd hit 1 hour in 12 restarts...) #MASTER_BACKOFF_CEILING = 3600 ## How long should a daemon run without crashing before we consider ## it "recovered". Once a daemon has recovered, we reset the number ## of restarts so the exponential backoff stuff goes back to normal. #MASTER_RECOVER_FACTOR = 300 ##-------------------------------------------------------------------- ## condor_startd ##-------------------------------------------------------------------- ## Where are the various condor_starter binaries installed? STARTER_LIST = STARTER, STARTER_PVM, STARTER_STANDARD STARTER = $(SBIN)/condor_starter STARTER_PVM = $(SBIN)/condor_starter.pvm STARTER_STANDARD = $(SBIN)/condor_starter.std STARTER_LOCAL = $(SBIN)/condor_starter ## When the startd starts up, it can place it's address (IP and port) ## into a file. This way, tools running on the local machine don't ## need to query the central manager to find the startd. This ## feature can be turned off by commenting out this setting. STARTD_ADDRESS_FILE = $(LOG)/.startd_address ## When a machine is claimed, how often should we poll the state of ## the machine to see if we need to evict/suspend the job, etc? #POLLING_INTERVAL = 5 ## How often should the startd send updates to the central manager? #UPDATE_INTERVAL = 300 ## How long is the startd willing to stay in the "matched" state? #MATCH_TIMEOUT = 300 ## How long is the startd willing to stay in the preempting/killing ## state before it just kills the starter directly? #KILLING_TIMEOUT = 30 ## When a machine unclaimed, when should it run benchmarks? ## LastBenchmark is initialized to 0, so this expression says as soon ## as we're unclaimed, run the benchmarks. Thereafter, if we're ## unclaimed and it's been at least 4 hours since we ran the last ## benchmarks, run them again. The startd keeps a weighted average ## of the benchmark results to provide more accurate values. ## Note, if you don't want any benchmarks run at all, either comment ## RunBenchmarks out, or set it to "False". BenchmarkTimer = (CurrentTime - LastBenchmark) RunBenchmarks : (LastBenchmark == 0 ) || ($(BenchmarkTimer) >= (4 * $(HOUR))) #RunBenchmarks : False ## Normally, when the startd is computing the idle time of all the ## users of the machine (both local and remote), it checks the utmp ## file to find all the currently active ttys, and only checks access ## time of the devices associated with active logins. Unfortunately, ## on some systems, utmp is unreliable, and the startd might miss ## keyboard activity by doing this. So, if your utmp is unreliable, ## set this setting to True and the startd will check the access time ## on all tty and pty devices. #STARTD_HAS_BAD_UTMP = False ## This entry allows the startd to monitor console (keyboard and ## mouse) activity by checking the access times on special files in ## /dev. Activity on these files shows up as "ConsoleIdle" time in ## the startd's ClassAd. Just give a comma-separated list of the ## names of devices you want considered the console, without the ## "/dev/" portion of the pathname. #CONSOLE_DEVICES = mouse, console ## The STARTD_ATTRS (and legacy STARTD_EXPRS) entry allows you to ## have the startd advertise arbitrary attributes from the config ## file in its ClassAd. Give the comma-separated list of entries ## from the config file you want in the startd ClassAd. ## NOTE: because of the different syntax of the config file and ## ClassAds, you might have to do a little extra work to get a given ## entry into the ClassAd. In particular, ClassAds require double ## quotes (") around your strings. Numeric values can go in ## directly, as can boolean expressions. For example, if you wanted ## the startd to advertise its list of console devices, when it's ## configured to run benchmarks, and how often it sends updates to ## the central manager, you'd have to define the following helper ## macro: #MY_CONSOLE_DEVICES = "$(CONSOLE_DEVICES)" ## Note: this must come before you define STARTD_ATTRS because macros ## must be defined before you use them in other macros or ## expressions. ## Then, you'd set the STARTD_ATTRS setting to this: #STARTD_ATTRS = MY_CONSOLE_DEVICES, RunBenchmarks, UPDATE_INTERVAL ## ## STARTD_ATTRS can also be defined on a per-slot basis. The startd ## builds the list of attributes to advertise by combining the lists ## in this order: STARTD_ATTRS, SLOTx_STARTD_ATTRS. In the below ## example, the startd ad for slot1 will have the value for ## favorite_color, favorite_season, and favorite_movie, and slot2 ## will have favorite_color, favorite_season, and favorite_song. ## #STARTD_ATTRS = favorite_color, favorite_season #SLOT1_STARTD_ATTRS = favorite_movie #SLOT2_STARTD_ATTRS = favorite_song ## ## Attributes in the STARTD_ATTRS list can also be on a per-slot basis. ## For example, the following configuration: ## #favorite_color = "blue" #favorite_season = "spring" #SLOT2_favorite_color = "green" #SLOT3_favorite_season = "summer" #STARTD_ATTRS = favorite_color, favorite_season ## ## will result in the following attributes in the slot classified ## ads: ## ## slot1 - favorite_color = "blue"; favorite_season = "spring" ## slot2 - favorite_color = "green"; favorite_season = "spring" ## slot3 - favorite_color = "blue"; favorite_season = "summer" ## ## Finally, the recommended default value for this setting, is to ## publish the COLLECTOR_HOST setting as a string. This can be ## useful using the "$$(COLLECTOR_HOST)" syntax in the submit file ## for jobs to know (for example, via their environment) what pool ## they're running in. COLLECTOR_HOST_STRING = "$(COLLECTOR_HOST)" STARTD_ATTRS = COLLECTOR_HOST_STRING ## When the startd is claimed by a remote user, it can also advertise ## arbitrary attributes from the ClassAd of the job its working on. ## Just list the attribute names you want advertised. ## Note: since this is already a ClassAd, you don't have to do ## anything funny with strings, etc. This feature can be turned off ## by commenting out this setting (there is no default). STARTD_JOB_EXPRS = ImageSize, ExecutableSize, JobUniverse, NiceUser ## If you want to "lie" to Condor about how many CPUs your machine ## has, you can use this setting to override Condor's automatic ## computation. If you modify this, you must restart the startd for ## the change to take effect (a simple condor_reconfig will not do). ## Please read the section on "condor_startd Configuration File ## Macros" in the Condor Administrators Manual for a further ## discussion of this setting. Its use is not recommended. This ## must be an integer ("N" isn't a valid setting, that's just used to ## represent the default). #NUM_CPUS = N ## If you never want Condor to detect more the "N" CPUs, uncomment this ## line out. You must restart the startd for this setting to take ## effect. If set to 0 or a negative number, it is ignored. ## By default, it is ignored. Otherwise, it must be a positive ## integer ("N" isn't a valid setting, that's just used to ## represent the default). #MAX_NUM_CPUS = N ## Normally, Condor will automatically detect the amount of physical ## memory available on your machine. Define MEMORY to tell Condor ## how much physical memory (in MB) your machine has, overriding the ## value Condor computes automatically. For example: #MEMORY = 128 ## How much memory would you like reserved from Condor? By default, ## Condor considers all the physical memory of your machine as ## available to be used by Condor jobs. If RESERVED_MEMORY is ## defined, Condor subtracts it from the amount of memory it ## advertises as available. #RESERVED_MEMORY = 0 ###### ## SMP startd settings ## ## By default, Condor will evenly divide the resources in an SMP ## machine (such as RAM, swap space and disk space) among all the ## CPUs, and advertise each CPU as its own slot with an even share of ## the system resources. If you want something other than this, ## there are a few options available to you. Please read the section ## on "Configuring The Startd for SMP Machines" in the Condor ## Administrator's Manual for full details. The various settings are ## only briefly listed and described here. ###### ## The maximum number of different slot types. #MAX_SLOT_TYPES = 10 ## Use this setting to define your own slot types. This ## allows you to divide system resources unevenly among your CPUs. ## You must use a different setting for each different type you ## define. The "" in the name of the macro listed below must be ## an integer from 1 to MAX_SLOT_TYPES (defined above), ## and you use this number to refer to your type. There are many ## different formats these settings can take, so be sure to refer to ## the section on "Configuring The Startd for SMP Machines" in the ## Condor Administrator's Manual for full details. In particular, ## read the section titled "Defining Slot Types" to help ## understand this setting. If you modify any of these settings, you ## must restart the condor_start for the change to take effect. #SLOT_TYPE_ = 1/4 #SLOT_TYPE_ = cpus=1, ram=25%, swap=1/4, disk=1/4 # For example: #SLOT_TYPE_1 = 1/8 #SLOT_TYPE_2 = 1/4 ## If you define your own slot types, you must specify how ## many slots of each type you wish to advertise. You do ## this with the setting below, replacing the "" with the ## corresponding integer you used to define the type above. You can ## change the number of a given type being advertised at run-time, ## with a simple condor_reconfig. #NUM_SLOTS_TYPE_ = M # For example: #NUM_SLOTS_TYPE_1 = 6 #NUM_SLOTS_TYPE_2 = 1 ## The number of evenly-divided slots you want Condor to ## report to your pool (if less than the total number of CPUs). This ## setting is only considered if the "type" settings described above ## are not in use. By default, all CPUs are reported. This setting ## must be an integer ("N" isn't a valid setting, that's just used to ## represent the default). #NUM_SLOTS = N ## How many of the slots the startd is representing should ## be "connected" to the console (in other words, notice when there's ## console activity)? This defaults to all slots (N in a ## machine with N CPUs). This must be an integer ("N" isn't a valid ## setting, that's just used to represent the default). #SLOTS_CONNECTED_TO_CONSOLE = N ## How many of the slots the startd is representing should ## be "connected" to the keyboard (for remote tty activity, as well ## as console activity). Defaults to 1. #SLOTS_CONNECTED_TO_KEYBOARD = 1 ## If there are slots that aren't connected to the ## keyboard or the console (see the above two settings), the ## corresponding idle time reported will be the time since the startd ## was spawned, plus the value of this parameter. It defaults to 20 ## minutes. We do this because, if the slot is configured ## not to care about keyboard activity, we want it to be available to ## Condor jobs as soon as the startd starts up, instead of having to ## wait for 15 minutes or more (which is the default time a machine ## must be idle before Condor will start a job). If you don't want ## this boost, just set the value to 0. If you change your START ## expression to require more than 15 minutes before a job starts, ## but you still want jobs to start right away on some of your SMP ## nodes, just increase this parameter. #DISCONNECTED_KEYBOARD_IDLE_BOOST = 1200 ###### ## Settings for computing optional resource availability statistics: ###### ## If STARTD_COMPUTE_AVAIL_STATS = True, the startd will compute ## statistics about resource availability to be included in the ## classad(s) sent to the collector describing the resource(s) the ## startd manages. The following attributes will always be included ## in the resource classad(s) if STARTD_COMPUTE_AVAIL_STATS = True: ## AvailTime = What proportion of the time (between 0.0 and 1.0) ## has this resource been in a state other than "Owner"? ## LastAvailInterval = What was the duration (in seconds) of the ## last period between "Owner" states? ## The following attributes will also be included if the resource is ## not in the "Owner" state: ## AvailSince = At what time did the resource last leave the ## "Owner" state? Measured in the number of seconds since the ## epoch (00:00:00 UTC, Jan 1, 1970). ## AvailTimeEstimate = Based on past history, this is an estimate ## of how long the current period between "Owner" states will ## last. #STARTD_COMPUTE_AVAIL_STATS = False ## If STARTD_COMPUTE_AVAIL_STATS = True, STARTD_AVAIL_CONFIDENCE sets ## the confidence level of the AvailTimeEstimate. By default, the ## estimate is based on the 80th percentile of past values. #STARTD_AVAIL_CONFIDENCE = 0.8 ## STARTD_MAX_AVAIL_PERIOD_SAMPLES limits the number of samples of ## past available intervals stored by the startd to limit memory and ## disk consumption. Each sample requires 4 bytes of memory and ## approximately 10 bytes of disk space. #STARTD_MAX_AVAIL_PERIOD_SAMPLES = 100 ##-------------------------------------------------------------------- ## condor_schedd ##-------------------------------------------------------------------- ## Where are the various shadow binaries installed? SHADOW_LIST = SHADOW, SHADOW_PVM, SHADOW_STANDARD SHADOW = $(SBIN)/condor_shadow SHADOW_PVM = $(SBIN)/condor_shadow.pvm SHADOW_STANDARD = $(SBIN)/condor_shadow.std ## When the schedd starts up, it can place it's address (IP and port) ## into a file. This way, tools running on the local machine don't ## need to query the central manager to find the schedd. This ## feature can be turned off by commenting out this setting. SCHEDD_ADDRESS_FILE = $(LOG)/.schedd_address ## Additionally, a daemon may store its ClassAd on the local filesystem ## as well as sending it to the collector. This way, tools that need ## information about a daemon do not have to contact the central manager ## to get information about a daemon on the same machine. ## This feature is necessary for Quill to work. SCHEDD_DAEMON_AD_FILE = $(LOG)/.schedd_classad ## How often should the schedd send an update to the central manager? #SCHEDD_INTERVAL = 300 ## How long should the schedd wait between spawning each shadow? #JOB_START_DELAY = 2 ## How often should the schedd send a keep alive message to any ## startds it has claimed? (5 minutes) #ALIVE_INTERVAL = 300 ## This setting controls the maximum number of times that a ## condor_shadow processes can have a fatal error (exception) before ## the condor_schedd will simply relinquish the match associated with ## the dying shadow. #MAX_SHADOW_EXCEPTIONS = 5 ## Estimated virtual memory size of each condor_shadow process. ## Specified in kilobytes. SHADOW_SIZE_ESTIMATE = 1800 ## The condor_schedd can renice the condor_shadow processes on your ## submit machines. How how "nice" do you want the shadows? (1-19). ## The higher the number, the lower priority the shadows have. ## This feature can be disabled entirely by commenting it out. SHADOW_RENICE_INCREMENT = 10 ## By default, when the schedd fails to start an idle job, it will ## not try to start any other idle jobs in the same cluster during ## that negotiation cycle. This makes negotiation much more ## efficient for large job clusters. However, in some cases other ## jobs in the cluster can be started even though an earlier job ## can't. For example, the jobs' requirements may differ, because of ## different disk space, memory, or operating system requirements. ## Or, machines may be willing to run only some jobs in the cluster, ## because their requirements reference the jobs' virtual memory size ## or other attribute. Setting NEGOTIATE_ALL_JOBS_IN_CLUSTER to True ## will force the schedd to try to start all idle jobs in each ## negotiation cycle. This will make negotiation cycles last longer, ## but it will ensure that all jobs that can be started will be ## started. #NEGOTIATE_ALL_JOBS_IN_CLUSTER = False ## This setting controls how often, in seconds, the schedd considers ## periodic job actions given by the user in the submit file. ## (Currently, these are periodic_hold, periodic_release, and periodic_remove.) #PERIODIC_EXPR_INTERVAL = 60 ###### ## Queue management settings: ###### ## How often should the schedd truncate it's job queue transaction ## log? (Specified in seconds, once a day is the default.) #QUEUE_CLEAN_INTERVAL = 86400 ## How often should the schedd commit "wall clock" run time for jobs ## to the queue, so run time statistics remain accurate when the ## schedd crashes? (Specified in seconds, once per hour is the ## default. Set to 0 to disable.) #WALL_CLOCK_CKPT_INTERVAL = 3600 ## What users do you want to grant super user access to this job ## queue? (These users will be able to remove other user's jobs). ## By default, this only includes root. QUEUE_SUPER_USERS = root, condor ##-------------------------------------------------------------------- ## condor_shadow ##-------------------------------------------------------------------- ## If the shadow is unable to read a checkpoint file from the ## checkpoint server, it keeps trying only if the job has accumulated ## more than MAX_DISCARDED_RUN_TIME seconds of CPU usage. Otherwise, ## the job is started from scratch. Defaults to 1 hour. This ## setting is only used if USE_CKPT_SERVER (from above) is True. #MAX_DISCARDED_RUN_TIME = 3600 ## Should periodic checkpoints be compressed? #COMPRESS_PERIODIC_CKPT = False ## Should vacate checkpoints be compressed? #COMPRESS_VACATE_CKPT = False ## Should we commit the application's dirty memory pages to swap ## space during a periodic checkpoint? #PERIODIC_MEMORY_SYNC = False ## Should we write vacate checkpoints slowly? If nonzero, this ## parameter specifies the speed at which vacate checkpoints should ## be written, in kilobytes per second. #SLOW_CKPT_SPEED = 0 ## How often should the shadow update the job queue with job ## attributes that periodically change? Specified in seconds. #SHADOW_QUEUE_UPDATE_INTERVAL = 15 * 60 ## Should the shadow wait to update certain job attributes for the ## next periodic update, or should it immediately these update ## attributes as they change? Due to performance concerns of ## aggressive updates to a busy condor_schedd, the default is True. #SHADOW_LAZY_QUEUE_UPDATE = TRUE ##-------------------------------------------------------------------- ## condor_shadow.pvm ##-------------------------------------------------------------------- ## Where is the condor pvm daemon installed? PVMD = $(SBIN)/condor_pvmd ## Where is the condor pvm group server daemon installed? PVMGS = $(SBIN)/condor_pvmgs ##-------------------------------------------------------------------- ## condor_starter ##-------------------------------------------------------------------- ## The condor_starter can renice the processes from remote Condor ## jobs on your execute machines. If you want this, uncomment the ## following entry and set it to how "nice" do you want the user ## jobs. (1-19) The larger the number, the lower priority the ## process gets on your machines. ## Note on Win32 platforms, this number needs to be greater than ## zero (i.e. the job must be reniced) or the mechanism that ## monitors CPU load on Win32 systems will give erratic results. #JOB_RENICE_INCREMENT = 10 ## Should the starter do local logging to its own log file, or send ## debug information back to the condor_shadow where it will end up ## in the ShadowLog? #STARTER_LOCAL_LOGGING = TRUE ## If the UID_DOMAIN settings match on both the execute and submit ## machines, but the UID of the user who submitted the job isn't in ## the passwd file of the execute machine, the starter will normally ## exit with an error. Do you want the starter to just start up the ## job with the specified UID, even if it's not in the passwd file? #SOFT_UID_DOMAIN = FALSE ##-------------------------------------------------------------------- ## condor_procd ##-------------------------------------------------------------------- ## # the path to the procd binary # PROCD = $(SBIN)/condor_procd # the path to the procd "address" # - on UNIX this will be a named pipe; we'll put it in the # $(LOCK) directory by default (note that multiple named pipes # will be created in this directory for when the procd responds # to its clients) # - on Windows, this will be a named pipe as well (but named pipes on # Windows are not even close to the same thing as named pipes on # UNIX); the name will be something like: # \\.\pipe\condor_procd # PROCD_ADDRESS = $(LOCK)/procd_pipe # The procd currently uses a very simplistic logging system. Since this # log will not be rotated like other Condor logs, it is only recommended # to set PROCD_LOG when attempting to debug a problem. In other Condor # daemons, turning on D_PROCFAMILY will result in that daemon logging # all of its interactions with the ProcD. # #PROCD_LOG = $(LOG)/ProcLog # This is the maximum period that the procd will use for taking # snapshots (the actual period may be lower if a condor daemon registers # a family for which it wants more frequent snapshots) # PROCD_MAX_SNAPSHOT_INTERVAL = 60 # On Windows, we send a process a "soft kill" via a WM_CLOSE message. # This binary is used by the ProcD (and other Condor daemons if PRIVSEP # is not enabled) to help when sending soft kills. WINDOWS_SOFTKILL = $(SBIN)/condor_softkill ##-------------------------------------------------------------------- ## condor_submit ##-------------------------------------------------------------------- ## If you want condor_submit to automatically append an expression to ## the Requirements expression or Rank expression of jobs at your ## site, uncomment these entries. #APPEND_REQUIREMENTS = (expression to append job requirements) #APPEND_RANK = (expression to append job rank) ## If you want expressions only appended for either standard or ## vanilla universe jobs, you can uncomment these entries. If any of ## them are defined, they are used for the given universe, instead of ## the generic entries above. #APPEND_REQ_VANILLA = (expression to append to vanilla job requirements) #APPEND_REQ_STANDARD = (expression to append to standard job requirements) #APPEND_RANK_STANDARD = (expression to append to vanilla job rank) #APPEND_RANK_VANILLA = (expression to append to standard job rank) ## This can be used to define a default value for the rank expression ## if one is not specified in the submit file. #DEFAULT_RANK = (default rank expression for all jobs) ## If you want universe-specific defaults, you can use the following ## entries: #DEFAULT_RANK_VANILLA = (default rank expression for vanilla jobs) #DEFAULT_RANK_STANDARD = (default rank expression for standard jobs) ## If you want condor_submit to automatically append expressions to ## the job ClassAds it creates, you can uncomment and define the ## SUBMIT_EXPRS setting. It works just like the STARTD_EXPRS ## described above with respect to ClassAd vs. config file syntax, ## strings, etc. One common use would be to have the full hostname ## of the machine where a job was submitted placed in the job ## ClassAd. You would do this by uncommenting the following lines: #MACHINE = "$(FULL_HOSTNAME)" #SUBMIT_EXPRS = MACHINE ## Condor keeps a buffer of recently-used data for each file an ## application opens. This macro specifies the default maximum number ## of bytes to be buffered for each open file at the executing ## machine. #DEFAULT_IO_BUFFER_SIZE = 524288 ## Condor will attempt to consolidate small read and write operations ## into large blocks. This macro specifies the default block size ## Condor will use. #DEFAULT_IO_BUFFER_BLOCK_SIZE = 32768 ##-------------------------------------------------------------------- ## condor_preen ##-------------------------------------------------------------------- ## Who should condor_preen send email to? #PREEN_ADMIN = $(CONDOR_ADMIN) ## What files should condor_preen leave in the spool directory? VALID_SPOOL_FILES = job_queue.log, job_queue.log.tmp, history, \ Accountant.log, Accountantnew.log, \ local_univ_execute, .quillwritepassword, \ .pgpass ## What files should condor_preen remove from the log directory? INVALID_LOG_FILES = core ##-------------------------------------------------------------------- ## Java parameters: ##-------------------------------------------------------------------- ## If you would like this machine to be able to run Java jobs, ## then set JAVA to the path of your JVM binary. If you are not ## interested in Java, there is no harm in leaving this entry ## empty or incorrect. JAVA = /usr/bin/java ## Some JVMs need to be told the maximum amount of heap memory ## to offer to the process. If your JVM supports this, give ## the argument here, and Condor will fill in the memory amount. ## If left blank, your JVM will choose some default value, ## typically 64 MB. The default (-Xmx) works with the Sun JVM. JAVA_MAXHEAP_ARGUMENT = -Xmx ## JAVA_CLASSPATH_DEFAULT gives the default set of paths in which ## Java classes are to be found. Each path is separated by spaces. ## If your JVM needs to be informed of additional directories, add ## them here. However, do not remove the existing entries, as Condor ## needs them. JAVA_CLASSPATH_DEFAULT = $(LIB) $(LIB)/scimark2lib.jar . ## JAVA_CLASSPATH_ARGUMENT describes the command-line parameter ## used to introduce a new classpath: JAVA_CLASSPATH_ARGUMENT = -classpath ## JAVA_CLASSPATH_SEPARATOR describes the character used to mark ## one path element from another: JAVA_CLASSPATH_SEPARATOR = : ## JAVA_BENCHMARK_TIME describes the number of seconds for which ## to run Java benchmarks. A longer time yields a more accurate ## benchmark, but consumes more otherwise useful CPU time. ## If this time is zero or undefined, no Java benchmarks will be run. JAVA_BENCHMARK_TIME = 2 ## If your JVM requires any special arguments not mentioned in ## the options above, then give them here. JAVA_EXTRA_ARGUMENTS = ## ##-------------------------------------------------------------------- ## Condor-G settings ##-------------------------------------------------------------------- ## Where is the GridManager binary installed? GRIDMANAGER = $(SBIN)/condor_gridmanager GT2_GAHP = $(SBIN)/gahp_server GRID_MONITOR = $(SBIN)/grid_monitor.sh ##-------------------------------------------------------------------- ## Settings that control the daemon's debugging output: ##-------------------------------------------------------------------- ## ## Note that the Gridmanager runs as the User, not a Condor daemon, so ## all users must have write permssion to the directory that the ## Gridmanager will use for it's logfile. Our suggestion is to create a ## directory called GridLogs in $(LOG) with UNIX permissions 1777 ## (just like /tmp ) ## Another option is to use /tmp as the location of the GridManager log. ## MAX_GRIDMANAGER_LOG = 1000000 GRIDMANAGER_DEBUG = #GRIDMANAGER_LOG = $(LOG)/GridLogs/GridmanagerLog.$(USERNAME) GRIDMANAGER_LOG = /tmp/GridmanagerLog.$(USERNAME) ##-------------------------------------------------------------------- ## Various other settings that the Condor-G can use. ##-------------------------------------------------------------------- ## For grid-type gt2 jobs (pre-WS GRAM), limit the number of jobmanager ## processes the gridmanager will let run on the headnode. Letting too ## many jobmanagers run causes severe load on the headnode. GRIDMANAGER_MAX_JOBMANAGERS_PER_RESOURCE = 10 ## If we're talking to a Globus 2.0 resource, Condor-G will use the new ## version of the GRAM protocol. The first option is how often to check the ## proxy on the submit site of things. If the GridManager discovers a new ## proxy, it will restart itself and use the new proxy for all future ## jobs launched. In seconds, and defaults to 10 minutes #GRIDMANAGER_CHECKPROXY_INTERVAL = 600 ## The GridManager will shut things down 3 minutes before loosing Contact ## because of an expired proxy. ## In seconds, and defaults to 3 minutes #GRDIMANAGER_MINIMUM_PROXY_TIME = 180 ## Condor requires that each submitted job be designated to run under a ## particular "universe". Condor-G is active when jobs are as marked as ## "GLOBUS" universe jobs. The universe of a job is set in the submit file ## with the 'universe = GLOBUS' line. ## ## If no universe is specificed in the submit file, Condor must pick one ## for the job to use. By default, it chooses the "standard" universe. ## The default can be overridden in the config file with the DEFAULT_UNIVERSE ## setting, which is a string to insert into a job submit description if the ## job does not try and define it's own universe ## #DEFAULT_UNIVERSE = grid # # The Cred_min_time_left is the first-pass at making sure that Condor-G # does not submit your job without it having enough time left for the # job to finish. For example, if you have a job that runs for 20 minutes, and # you might spend 40 minutes in the queue, it's a bad idea to submit with less # than an hour left before your proxy expires. # 2 hours seemed like a reasonable default. # CRED_MIN_TIME_LEFT = 120 ## ## The GridMonitor allows you to submit many more jobs to a GT2 GRAM server ## than is normally possible. ENABLE_GRID_MONITOR = TRUE ## ## The location of the wrapper for invoking ## Condor GAHP server ## CONDOR_GAHP = $(SBIN)/condor_c-gahp CONDOR_GAHP_WORKER = $(SBIN)/condor_c-gahp_worker_thread ## ## The Condor GAHP server has it's own log. Like the Gridmanager, the ## GAHP server is run as the User, not a Condor daemon, so all users must ## have write permssion to the directory used for the logfile. Our ## suggestion is to create a directory called GridLogs in $(LOG) with ## UNIX permissions 1777 (just like /tmp ) ## Another option is to use /tmp as the location of the CGAHP log. ## MAX_C_GAHP_LOG = 1000000 #C_GAHP_LOG = $(LOG)/GridLogs/CGAHPLog.$(USERNAME) C_GAHP_LOG = /tmp/CGAHPLog.$(USERNAME) C_GAHP_WORKER_THREAD_LOG = /tmp/CGAHPWorkerLog.$(USERNAME) ## ## The location of the wrapper for invoking ## GT3 GAHP server ## GT3_GAHP = $(SBIN)/gt3_gahp ## ## The location of GT3 files. This should normally be lib/gt3 ## GT3_LOCATION = $(LIB)/gt3 ## ## The location of the wrapper for invoking ## GT4 GAHP server ## GT4_GAHP = $(SBIN)/gt4_gahp ## ## The location of GT4 files. This should normally be lib/gt4 ## GT4_LOCATION = $(LIB)/gt4 ## ## gt4 gram requires a gridftp server to perform file transfers. ## If GRIDFTP_URL_BASE is set, then Condor assumes there is a gridftp ## server set up at that URL suitable for its use. Otherwise, Condor ## will start its own gridftp servers as needed, using the binary ## pointed at by GRIDFTP_SERVER. GRIDFTP_SERVER_WRAPPER points to a ## wrapper script needed to properly set the path to the gridmap file. ## #GRIDFTP_URL_BASE = gsiftp://$(FULL_HOSTNAME) GRIDFTP_SERVER = $(LIBEXEC)/globus-gridftp-server GRIDFTP_SERVER_WRAPPER = $(LIBEXEC)/gridftp_wrapper.sh ## ## Location of the PBS/LSF gahp and its associated binaries ## GLITE_LOCATION = $(LIB)/glite PBS_GAHP = $(GLITE_LOCATION)/bin/batch_gahp LSF_GAHP = $(GLITE_LOCATION)/bin/batch_gahp ## ## The location of the wrapper for invoking the Unicore GAHP server ## UNICORE_GAHP = $(SBIN)/unicore_gahp ## ## The location of the wrapper for invoking the NorduGrid GAHP server ## NORDUGRID_GAHP = $(SBIN)/nordugrid_gahp ## Condor-G and CredD can use MyProxy to refresh GSI proxies which are ## about to expire. #MYPROXY_GET_DELEGATION = /path/to/myproxy-get-delegation ## ##-------------------------------------------------------------------- ## condor_credd credential managment daemon ##-------------------------------------------------------------------- ## Where is the CredD binary installed? CREDD = $(SBIN)/condor_credd ## When the credd starts up, it can place it's address (IP and port) ## into a file. This way, tools running on the local machine don't ## need an additional "-n host:port" command line option. This ## feature can be turned off by commenting out this setting. CREDD_ADDRESS_FILE = $(LOG)/.credd_address ## Specify a remote credd server here, #CREDD_HOST = $(CONDOR_HOST):$(CREDD_PORT) ## CredD startup arguments ## Start the CredD on a well-known port. Uncomment to to simplify ## connecting to a remote CredD. Note: that this interface may change ## in a future release. CREDD_PORT = 9620 CREDD_ARGS = -p $(CREDD_PORT) -f ## CredD daemon debugging log CREDD_LOG = $(LOG)/CredLog CREDD_DEBUG = D_FULLDEBUG MAX_CREDD_LOG = 4000000 ## The credential owner submits the credential. This list specififies ## other user who are also permitted to see all credentials. Defaults ## to root on Unix systems, and Administrator on Windows systems. #CRED_SUPER_USERS = ## Credential storage location. This directory must exist ## prior to starting condor_credd. It is highly recommended to ## restrict access permissions to _only_ the directory owner. CRED_STORE_DIR = $(LOCAL_DIR)/cred_dir ## Index file path of saved credentials. ## This file will be automatically created if it does not exist. #CRED_INDEX_FILE = $(CRED_STORE_DIR/cred-index ## condor_credd will attempt to refresh credentials when their ## remaining lifespan is less than this value. Units = seconds. #DEFAULT_CRED_EXPIRE_THRESHOLD = 3600 ## condor-credd periodically checks remaining lifespan of stored ## credentials, at this interval. #CRED_CHECK_INTERVAL = 60 ## ##-------------------------------------------------------------------- ## Stork data placment server ##-------------------------------------------------------------------- ## Where is the Stork binary installed? STORK = $(SBIN)/stork_server ## When Stork starts up, it can place it's address (IP and port) ## into a file. This way, tools running on the local machine don't ## need an additional "-n host:port" command line option. This ## feature can be turned off by commenting out this setting. STORK_ADDRESS_FILE = $(LOG)/.stork_address ## Specify a remote Stork server here, #STORK_HOST = $(CONDOR_HOST):$(STORK_PORT) ## STORK_LOG_BASE specifies the basename for heritage Stork log files. ## Stork uses this macro to create the following output log files: ## $(STORK_LOG_BASE): Stork server job queue classad collection ## journal file. ## $(STORK_LOG_BASE).history: Used to track completed jobs. ## $(STORK_LOG_BASE).user_log: User level log, also used by DAGMan. STORK_LOG_BASE = $(LOG)/Stork ## Modern Condor DaemonCore logging feature. STORK_LOG = $(LOG)/StorkLog STORK_DEBUG = D_FULLDEBUG MAX_STORK_LOG = 4000000 ## Stork startup arguments ## Start Stork on a well-known port. Uncomment to to simplify ## connecting to a remote Stork. Note: that this interface may change ## in a future release. #STORK_PORT = 34048 STORK_PORT = 9621 STORK_ARGS = -p $(STORK_PORT) -f -Serverlog $(STORK_LOG_BASE) ## Stork environment. Stork modules may require external programs and ## shared object libraries. These are located using the PATH and ## LD_LIBRARY_PATH environments. Further, some modules may require ## further specific environments. By default, Stork inherits a full ## environment when invoked from condor_master or the shell. If the ## default environment is not adequate for all Stork modules, specify ## a replacement environment here. This environment will be set by ## condor_master before starting Stork, but does not apply if Stork is ## started directly from the command line. #STORK_ENVIRONMENT = TMP=/tmp;CONDOR_CONFIG=/special/config;PATH=/lib ## Limits the number of concurrent data placements handled by Stork. #STORK_MAX_NUM_JOBS = 5 ## Limits the number of retries for a failed data placement. #STORK_MAX_RETRY = 5 ## Limits the run time for a data placement job, after which the ## placement is considered failed. #STORK_MAXDELAY_INMINUTES = 10 ## Temporary credential storage directory used by Stork. #STORK_TMP_CRED_DIR = /tmp ## Directory containing Stork modules. #STORK_MODULE_DIR = $(LIBEXEC) ## ##-------------------------------------------------------------------- ## Quill Job Queue Mirroring Server ##-------------------------------------------------------------------- ## Where is the Quill binary installed and what arguments should be passed? QUILL = $(SBIN)/condor_quill #QUILL_ARGS = # Where is the log file for the quill daemon? QUILL_LOG = $(LOG)/QuillLog # The identification and location of the quill daemon for local clients. QUILL_ADDRESS_FILE = $(LOG)/.quill_address # If this is set to true, then the rest of the QUILL arguments must be defined # for quill to function. If it is Fase or left undefined, then quill will not # be consulted by either the scheduler or the tools, but in the case of a # remote quill query where the local client has quill turned off, but the # remote client has quill turned on, things will still function normally. #QUILL_ENABLED = TRUE # This will be the name of a quill daemon using this config file. This name # should not conflict with any other quill name--or schedd name. #QUILL_NAME = quill@postgresql-server.machine.com # The Postgreql server requires usernames that can manipulate tables. This will # be the username associated with this instance of the quill daemon mirroring # a schedd's job queue. Each quill daemon must have a unique username # associated with it otherwise multiple quill daemons will corrupt the data # held under an indentical user name. #QUILL_DB_NAME = name_of_db # The required password for the DB user which quill will use to read # information from the database about the queue. #QUILL_DB_QUERY_PASSWORD = foobar # The machine and port of the postgres server. #QUILL_DB_IP_ADDR = machine.domain.com:5432 # Polling period, in seconds, for when quill reads transactions out of the # schedd's job queue log file and puts them into the database. #QUILL_POLLING_PERIOD = 10 # Number of days that historical information about previous jobs will be kept. # It defaults to 180 days #QUILL_HISTORY_DURATION = 180 # Number of hours between scans of QUILL_HISTORY_DURATION. #QUILL_HISTORY_CLEANING_INTERVAL = 24 # Allows or disallows a remote query to the quill daemon and database # which is reading this log file. Defaults to true. #QUILL_IS_REMOTELY_QUERYABLE = TRUE # Add debugging flags to here if you need to debug quill for some reason. #QUILL_DEBUG = D_FULLDEBUG ## ##-------------------------------------------------------------------- ## Database Management Daemon settings ##-------------------------------------------------------------------- ## Where is the DBMSd binary installed and what arguments should be passed? DBMSD = $(SBIN)/condor_dbmsd DBMSD_ARGS = -f # Where is the log file for the quill daemon? DBMSD_LOG = $(LOG)/DbmsdLog ## ##-------------------------------------------------------------------- ## VM Universe Parameters ##-------------------------------------------------------------------- ## Where is the Condor VM-GAHP installed? (Required) #VM_GAHP_SERVER = $(SBIN)/condor_vm-gahp ## Where is the configuration file for the Condor VM-GAHP? (Required) #VM_GAHP_CONFIG = /full/path/to/condor_vmgahp_config.vmware ## If the VM-GAHP is to have its own log, define ## the location of log file. ## Like the Gridmanager and C-GAHP, the VM-GAHP is run as the User, ## not a Condor daemon, so all users must have write permssion ## to the directory used for the log file. Our suggestion is to create ## a directory called VMGahpLogs in $(LOG) with ## UNIX permissions 1777 (just like /tmp ) ## Another option is to use /tmp as the location of the VM-GAHP log. ## ## Optionally, if you do NOT define VM_GAHP_LOG, logs of VM-GAHP will ## be stored in the starter's log file. ## However, on Windows machine you must always define VM_GAHP_LOG. # #VM_GAHP_LOG = $(LOG)/VMGahpLogs/VMGahpLog.$(USERNAME) VM_GAHP_LOG = /tmp/VMGAHPLog.$(USERNAME) MAX_VM_GAHP_LOG = 1000000 #VM_GAHP_DEBUG = D_FULLDEBUG ## What kind of virtual machine program will be used for ## the VM universe? ## The two options are vmware and xen. (Required) #VM_TYPE = vmware ## How much memory can be used for the VM universe? (Optional) ## If you don't define this parameter, the value will be ## obtained from the VM-GAHP ## ## If the defined value is larger than VM_MAX_MEMORY defined ## in the VM-GAHP configuration file, ## VM_MAX_MEMORY will be used instead of this value. ## If the defined value is smaller than VM_MAX_MEMORY defined ## in VM-GAHP configuration file, this value will be used. #VM_MEMORY = 128 ## Want to support networking for VM universe? ## Default value is FALSE #VM_NETWORKING = FALSE ## In default, the number of possible virtual machines is same as ## NUM_CPUS. ## Since too many virtual machines can cause the system to be too slow ## and lead to unexpected problems, limit the number of running ## virtual machines on this machine with #VM_MAX_NUMBER = 2 ## When a VM universe job is started, a status command is sent ## to the VM-GAHP to see if the job is finished. ## If the interval between checks is too short, it will consume ## too much of the CPU. If the VM-GAHP fails to get status 5 times in a row, ## an error will be reported to startd, and then startd will check ## the availability of VM universe. ## Default value is 60 seconds and minimum value is 30 seconds #VM_STATUS_INTERVAL = 60 ## How long will we wait for a request sent to the VM-GAHP to be completed? ## If a request is not completed within the timeout, an error will be reported ## to the startd, and then the startd will check ## the availability of vm universe. Default value is 5 mins. #VM_GAHP_REQ_TIMEOUT = 300 ## When VMware or Xen causes an error, the startd will disable the ## VM universe. However, because some errors are just transient, ## we will test one more ## whether vm universe is still unavailable after some time. ## In default, startd will recheck vm universe after 10 minutes. ## If the test also fails, vm universe will be disabled. #VM_RECHECK_INTERVAL = 600 ## Usually, when we suspend a VM, the memory being used by the VM ## will be saved into a file and then freed. ## However, when we use soft suspend, neither saving nor memory freeing ## will occur. ## For VMware, we send SIGSTOP to a process for VM in order to ## stop the VM temporarily and send SIGCONT to resume the VM. ## For Xen, we pause CPU. Pausing CPU doesn't save the memory of VM ## into a file. It only stops the execution of a VM temporarily. #VM_SOFT_SUSPEND = TRUE ## If Condor runs as root and a job comes from a different UID domain, ## Condor generally uses "nobody", unless SLOTx_USER is defined. ## If "VM_UNIV_NOBODY_USER" is defined, a VM universe job will run ## as the user defined in "VM_UNIV_NOBODY_USER" instead of "nobody". ## ## Notice: In VMware VM universe, "nobody" can not create a VMware VM. ## So we need to define "VM_UNIV_NOBODY_USER" with a regular user. ## For VMware, the user defined in "VM_UNIV_NOBODY_USER" must have a ## home directory. So SOFT_UID_DOMAIN doesn't work for VMware VM universe job. ## If neither "VM_UNIV_NOBODY_USER" nor "SLOTx_USER" is defined, ## VMware VM universe job will run as "condor" instead of "nobody". ## As a result, the preference of local users for a VMware VM universe job ## which comes from the different UID domain is ## "VM_UNIV_NOBODY_USER" -> "SLOTx_USER" -> "condor". #VM_UNIV_NOBODY_USER = login name of a user who has home directory ## If Condor runs as root and "ALWAYS_VM_UNIV_USE_NOBODY" is set to TRUE, ## all VM universe jobs will run as a user defined in "VM_UNIV_NOBODY_USER". #ALWAYS_VM_UNIV_USE_NOBODY = FALSE ####################################################################### # # special settings for Pegasus WMS package # START = True SUSPEND = False CONTINUE = True PREEMPT = False KILL = False DAGMAN_LOG_ON_NFS_IS_ERROR = FALSE pegasus-wms_4.0.1+dfsg/release-tools/nmi/submit-host/0000755000175000017500000000000011757531667021663 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/release-tools/nmi/submit-host/remote-post.scp0000644000175000017500000000006211757531137024636 0ustar ryngeryngemethod = scp scp_file = $(SUBMITDIR)/remote-post pegasus-wms_4.0.1+dfsg/release-tools/nmi/submit-host/remote-declare.scp0000644000175000017500000000006511757531137025253 0ustar ryngeryngemethod = scp scp_file = $(SUBMITDIR)/remote-declare pegasus-wms_4.0.1+dfsg/release-tools/nmi/submit-host/remote-post0000755000175000017500000000014111757531137024053 0ustar ryngerynge#!/bin/bash set -e TOP_DIR=`pwd` cd pegasus/build-results/ tar czf $TOP_DIR/results.tar.gz * pegasus-wms_4.0.1+dfsg/release-tools/nmi/submit-host/platform-post0000755000175000017500000000127511757531137024415 0ustar ryngerynge#!/bin/bash set -e if [ "x$_NMI_STEP_FAILED" != "x" ]; then echo "Previous step failed, exiting\n" >&2 exit 1 fi TOP_DIR=`pwd` rm -rf build-results mkdir -p build-results cd build-results tar xzf ../results.tar.gz echo echo "Files in result set:" find . -type f -exec ls -l -h {} \; # use the worker package to figure out target version DOWNLOAD_SECTION=`ls pegasus-worker-*.tar.gz | head -n 1 | sed -r 's/pegasus-worker-([0-9]+\.[0-9]+)\..*/\1/'` TARGET_DIR=/data/webspace/download.pegasus.isi.edu/wms/download/$DOWNLOAD_SECTION/nightly/ # upload to pegasus.isi.edu echo "Uploading to $TARGET_DIR" scp -r * nmibuild@download.pegasus.isi.edu:$TARGET_DIR 2>&1 echo "Results uploaded!" pegasus-wms_4.0.1+dfsg/release-tools/nmi/submit-host/build-pegasus.scp0000644000175000017500000000006411757531137025126 0ustar ryngeryngemethod = scp scp_file = $(SUBMITDIR)/build-pegasus pegasus-wms_4.0.1+dfsg/release-tools/nmi/submit-host/submit-build0000755000175000017500000000452411757531137024206 0ustar ryngerynge#!/bin/bash VERSION="$1" if [ "x$VERSION" = "x" ]; then echo "Please specify a SVN version to checkout. Examples:" echo " ./submit-build trunk" echo " ./submit-build branches/2.4.1" echo " ./submit-build tags/2.4.0" exit 1 fi EMAIL="$2" if [ "x$EMAIL" = "x" ]; then echo "Please specify a notification email address as second arguent." echo "Example:" echo " ./submit-build trunk rynge@isi.edu" exit 1 fi # nmi env export _NMI_HOME=$HOME export _NMI_HOSTNAME=$HOSTNAME export _NMI_SUBMITDIR=$PWD cat >cmdfile <pegasus.svn <&1", 1, 0); cmd("java -version 2>&1", 1, 0); cmd("ant -version 2>&1", 1, 0); cmd("python -V 2>&1", 1, 0); cmd("(rpm -qa | grep python) 2>&1", 1, 0); cmd("(dpkg -l | grep python) 2>&1", 1, 0); # tmp dir $ENV{'TMPDIR'} = "$top_dir/tmp"; logmsg("TMPDIR set to " . $ENV{'TMPDIR'}); cmd("mkdir -p $top_dir/tmp", 1, 1); } sub build_binary { announce("build mappper package"); cd($src_dir); cmd("ant clean", 1, 0); cmd("rm -rf dist/*", 1, 0); cmd("mkdir -p build-results", 1, 1); cmd("ant dist", 1, 1); cmd("mv dist/*.tar.gz build-results/", 1, 1); cmd("cd build-results/ && ls -l -h", 1, 1); } sub build_worker { announce("building worker package"); cd($src_dir); cmd("ant clean", 1, 0); cmd("rm -rf dist/*", 1, 0); cmd("mkdir -p build-results", 1, 1); cmd("ant dist-worker", 1, 1); cmd("mv dist/*.tar.gz build-results/", 1, 1); cmd("cd build-results/ && ls -l -h", 1, 1); } sub build_wms { announce("building wms package"); cd("$top_dir/public"); cmd("ls -l", 1, 1); my $condor_version = `ls *.gz | sed 's/condor-//' | sed 's/-.*//' | head -n 1`; chomp($condor_version); cmd("rm -rf native *.deb *.rpm *.sha1 *.md5 condor_examples condor_tests", 1, 1); cmd("rm -f *unstripped*", 1, 1); cmd("rm -f *debug*", 1, 1); my $num_tars = `ls *.tar.gz | wc -l`; # some platforms only have dynamic builds, try to remove the static ones if ($num_tars == 2) { cmd("ls | grep -v dynamic | xargs rm", 1, 1); } $num_tars = `ls *.tar.gz | wc -l`; if ($num_tars != 1) { cmd("ls -l -h", 1, 1); die("Too many tars left: $num_tars"); } cmd("ls -l -h", 1, 1); cmd("tar xzf *.tar.gz", 1, 1); cmd("rm *.tar.gz", 1, 1); cmd("ls -l -h", 1, 1); # untar our binary package and mv condor into it cd("$src_dir/dist"); cmd("tar xzf ../build-results/pegasus-binary-*.tar.gz", 1, 1); cmd("ls -l -h", 1, 1); my $pegasus_version = `ls | grep pegasus | sed 's/.*-//'`; chomp($pegasus_version); logmsg("Pegasus version is $pegasus_version"); my $pegasus_system = `$src_dir/release-tools/getsystem/getsystem`; chomp($pegasus_system); logmsg("Pegasus system is $pegasus_system"); cmd("mv pegasus-$pegasus_version pegasus-wms-$pegasus_version", 1, 1); cmd("mv $top_dir/public/condor-* pegasus-wms-$pegasus_version/condor"); cd("pegasus-wms-$pegasus_version"); cmd("ls -l -h", 1, 1); cmd("cp $src_dir/release-tools/nmi/condor* condor/etc/", 1, 1); #open(SETUP, ">>setup.sh") or die("Unable to open setup.sh"); #print SETUP "\n# condor setup\n"; #print SETUP "PATH=\$PEGASUS_HOME/condor/bin:\$PEGASUS_HOME/condor/sbin:\$PATH\n"; #print SETUP "export PATH\n"; #print SETUP "CONDOR_CONFIG=\$PEGASUS_HOME/condor/etc/condor_config\n"; #print SETUP "export CONDOR_CONFIG\n"; #close(SETUP); #open(SETUP, ">>setup.csh") or die("Unable to open setup.csh"); #print SETUP "\n# condor setup\n"; #print SETUP "setenv PATH \$PEGASUS_HOME/condor/bin:\$PEGASUS_HOME/condor/sbin:\$PATH\n"; #print SETUP "setenv CONDOR_CONFIG \$PEGASUS_HOME/condor/etc/condor_config\n"; #close(SETUP); # work dirs cmd("mkdir -p condor/var/execute", 1, 1); cmd("mkdir -p condor/var/log", 1, 1); cmd("mkdir -p condor/var/spool", 1, 1); # tar up the result cd(".."); cmd("tar czf ../build-results/pegasus-wms-binary-$pegasus_version-$pegasus_system.tar.gz" . " pegasus-wms-$pegasus_version", 1, 1); cmd("rm -rf pegasus*"); cd("../build-results/"); cmd("ls -l -h", 1, 1); } sub clean_sources { announce("cleaning source tree and condor inputs"); cd("$top_dir"); cmd("rm -rf public", 1, 1); cd("$src_dir"); cmd("rm -rf bin build contrib dist doc etc lib libexec man" . " share sql src var", 1, 1); } sub build_deb { announce("building debian package"); cmd("mkdir -p $top_dir/tmp"); cd("$top_dir/tmp/"); # we need fakeroot to get file ownership correct in the deb cmd("wget -nv http://pegasus.isi.edu/wms/download/tooling/fakeroot-1.12.4.tar.gz 2>&1", 1, 1); cmd("tar xzf fakeroot-1.12.4.tar.gz", 1, 1); cd("fakeroot-1.12.4"); cmd("./configure --prefix=$top_dir/tmp/fakeroot-install && make && make install 2>&1", 1, 1); cd("$top_dir/tmp/"); cmd("$top_dir/tmp/fakeroot-install/bin/fakeroot" . " $src_dir/release-tools/nmi/deb/build-pegasus-deb" . " $src_dir/build-results/pegasus-binary-*.tar.gz", 1, 1); cmd("rm -rf fakeroot*", 1, 1); cmd("find . -type f -exec ls -l -h {} \\;", 1, 1); if ( -e "debian" ) { cmd("mv debian $src_dir/build-results/", 1, 1); } if ( -e "ubuntu" ) { cmd("mv ubuntu $src_dir/build-results/", 1, 1); } } sub build_rpm { announce("building rpm package"); cmd("df -H"); cmd("mkdir -p $top_dir/tmp"); cd("$top_dir/tmp/"); # rpmbuild has TMPDIR bug cmd("(unset TMPDIR &&" . " $src_dir/release-tools/nmi/rpm/build-pegasus-rpm" . " $src_dir/build-results/pegasus-binary-*.tar.gz)", 1, 1); cmd("find . -type f -exec ls -l -h {} \\;", 1, 1); if ( -e "rhel" ) { cmd("mv rhel $src_dir/build-results/", 1, 1); } else { die("RPM was built - but where did it go?"); } } sub test_binary { my $rc = 0; announce("Installing the binary package"); cmd("mkdir -p $top_dir/binary-test/", 1, 1); cd("$top_dir/binary-test"); cmd("tar xzf $src_dir/build-results/pegasus-binary-*.tar.gz", 1, 1); my $dir = `ls`; chomp($dir); # make sure we are not missing any expected files announce("Checking install for missing files"); my @expected_files = ( 'bin/pegasus-cleanup', 'bin/pegasus-cluster', 'bin/pegasus-create-dir', 'bin/pegasus-keg', 'bin/pegasus-kickstart', 'bin/pegasus-transfer', ); foreach my $efile (@expected_files) { my $msg = "Checking install for $efile..."; if ( -e "$top_dir/binary-test/$dir/$efile" ) { $msg = sprintf("%-50s %-10s", $msg, "OK"); } else { $msg = sprintf("%-50s %-10s", $msg, "MISSING"); $rc++; } logmsg($msg); } announce("cleaning up"); cd($top_dir); cmd("rm -rf binary-test", 1, 0); if ($rc != 0) { die("Some tests failed"); } } sub test_worker { my $rc = 0; announce("Installing the worker package"); cmd("mkdir -p $top_dir/worker-test/", 1, 1); cd("$top_dir/worker-test"); cmd("tar xzf $src_dir/build-results/pegasus-worker-*.tar.gz", 1, 1); my $dir = `ls`; chomp($dir); # make sure we are not missing any expected files announce("Checking install for missing files"); my @expected_files = ( 'bin/pegasus-cleanup', 'bin/pegasus-cluster', 'bin/pegasus-create-dir', 'bin/pegasus-keg', 'bin/pegasus-kickstart', 'bin/pegasus-transfer', ); foreach my $efile (@expected_files) { my $msg = "Checking install for $efile..."; if ( -e "$top_dir/worker-test/$dir/$efile" ) { $msg = sprintf("%-50s %-10s", $msg, "OK"); } else { $msg = sprintf("%-50s %-10s", $msg, "MISSING"); $rc++; } logmsg($msg); } announce("cleaning up"); cd($top_dir); cmd("rm -rf worker-test", 1, 0); if ($rc != 0) { die("Some tests failed"); } } sub test_wms { my $rc = 0; announce("fake globus location for Pegasus to be happy"); $ENV{'GLOBUS_LOCATION'} = "$top_dir/globus"; cmd("mkdir -p \$GLOBUS_LOCATION/bin", 1, 1); cmd("touch \$GLOBUS_LOCATION/bin/globus-url-copy", 1, 1); cmd("touch \$GLOBUS_LOCATION/bin/grid-proxy-info", 1, 1); announce("installing the wms package"); cmd("mkdir -p $top_dir/wms/", 1, 1); cd("$top_dir/wms"); cmd("tar xzf $src_dir/build-results/pegasus-wms-*", 1, 1); my $dir = `ls`; chomp($dir); my $install_dir = "$top_dir/wms/$dir"; cd($install_dir); # env $ENV{'CONDOR_CONFIG'} = "$install_dir/condor/etc/condor_config"; $ENV{'PATH'} = "$install_dir/bin:$install_dir/condor/bin:$install_dir/condor/sbin:" . $ENV{'PATH'}; announce("setting up condor"); cmd("perl -p -i -e \"s:^RELEASE_DIR.*:RELEASE_DIR=$install_dir/condor:\"" . " $install_dir/condor/etc/condor_config", 1, 1); cmd("perl -p -i -e \"s:^CONDOR_ADMIN.*:CONDOR_ADMIN=rynge\@isi.edu:\"" . " $install_dir/condor/etc/condor_config", 1, 1); announce("checking condor"); $rc += cmd("condor_version", 1, 0); $rc += cmd("condor_config_val RELEASE_DIR", 1, 0); announce("starting master"); $rc += cmd("condor_master", 1, 0); # wait for the daemons to register sleep(120); announce("simple condor tests"); $rc += cmd("condor_status", 1, 0); $rc += cmd("condor_q", 1, 0); #announce("black diamond test"); #$rc += cmd("wget -nv http://yggdrasil.isi.edu/~rynge/pegasus-testing/blackdiamond.tar.gz" . # " && tar xzf blackdiamond.tar.gz", 1, 0); #cd("blackdiamond"); #$rc += cmd(". \$PEGASUS_HOME/setup.sh && ./submit-local 2>&1", 1, 0); #sleep(180); #$dir = `(cd workdir-localcondor/pegasusexec/*/pegasus/black-diamond/*/ && pwd)`; #chomp($dir); #if ( -e "$dir/f.d" ) { # logmsg("black diamond test successful!"); #} #else { # logmsg("black diamond test failed!"); # $rc++; # $dir = `(cd dags/*/pegasus/black-diamond/*/ && pwd)`; # chomp($dir); # cd($dir); # cmd(". \$PEGASUS_HOME/setup.sh && condor_q", 1, 0); # cmd("ls -l $dir/", 1, 0); # cmd(". \$PEGASUS_HOME/setup.sh && pegasus-analyzer -q", 1, 0); #} announce("stopping condor"); cmd("condor_off -master", 1, 0); sleep(20); announce("showing logs"); cd("$install_dir/condor/var/log"); cmd("find $install_dir/condor"); cmd("for LOG in `ls *Log`; do echo; echo \"## cat \$LOG\"; cat \$LOG; done", 1, 1); announce("cleaning up"); cd($top_dir); cmd("rm -rf wms", 1, 0); if ($rc != 0) { die("Some tests failed"); } } sub cmd { my $cmd = shift; my $log = shift; my $fatal = shift; my $rc = 0; if (!defined($log) or $log != 0) { $log = 1; } if (!defined($fatal) or $fatal != 0) { $fatal = 1; } if ($log) { logmsg($cmd); } system("$cmd"); $rc = $? >> 8; if ($fatal and $? & 127) { print "\n\n"; printf "'$cmd' died with signal %d, %s coredump\n", ($? & 127), ($? & 128) ? 'with' : 'without'; fatal_error("\n"); } if ($rc != 0) { if ($fatal) { print "\n\n"; fatal_error("'$cmd' failed with return code $rc\n"); } else { logmsg("Ignoring return code $rc. Continuing..."); } } print "\n"; return ($rc); } sub cd { my $dir = shift; logmsg("cd $dir"); chdir($dir) or fatal_error("Unable to cd to $dir"); } sub announce { my $m = shift; print "\n"; print "################################################################\n"; print "#\n"; print "# $m\n"; print "#\n"; print "\n"; } sub logmsg { my $m = shift; my $date = `/bin/date +'\%y\%m\%d \%H:\%M'`; chomp($date); print "[$date]: $m\n"; } sub fatal_error { my $msg = shift; print STDERR "$msg\n\n"; exit(1); } pegasus-wms_4.0.1+dfsg/release-tools/nmi/submit-host/condor.nmi0000644000175000017500000000056611757531137023653 0ustar ryngeryngemethod = nmi # this is the pinned Condor 7.6.5 release input_runids = 397396 # map some platforms to others # see: https://nmi.cs.wisc.edu/node/1804 # http://condor-wiki.cs.wisc.edu/index.cgi/wiki?p=PortsTable platforms = x86_64_deb_6.0-updated, x86_macos_10.4::x86_64_macos_10.5-updated, x86_macos_10.4::x86_64_macos_10.6, x86_64_rhap_5, x86_macos_10.4, x86_rhap_5 pegasus-wms_4.0.1+dfsg/release-tools/nmi/submit-host/remote-declare0000755000175000017500000000172611757531137024477 0ustar ryngerynge#!/usr/bin/env perl use strict; # platforms with special needs my $lite = 0; if ( $ENV{NMI_PLATFORM} =~ /rhas_4/ ) { $lite = 1; } my $wms = 1; if ( $ENV{NMI_PLATFORM} =~ /deb_5/ or $ENV{NMI_PLATFORM} =~ /x86_deb_6/ or $ENV{NMI_PLATFORM} =~ /rhap_6/ ) { $wms = 0; } open( LIST, ">tasklist.nmi" ) || die "Can't open tasklist.nmi: $!\n"; # builds if ( ! $lite ) { print LIST "build-binary\n"; if ( $wms ) { print LIST "build-wms\n"; } } print LIST "build-worker\n"; # clean print LIST "clean-sources\n"; # rpms/ debs #if ( ! $lite ) { # if ($ENV{NMI_PLATFORM} =~ /deb/ || $ENV{NMI_PLATFORM} =~ /ubuntu/) { # print LIST "build-deb\n"; # } # if ($ENV{NMI_PLATFORM} =~ /rhas/ || $ENV{NMI_PLATFORM} =~ /rhap/) { # print LIST "build-rpm\n"; # } #} # tests if ( ! $lite ) { print LIST "test-binary\n"; if ( $wms ) { print LIST "test-wms\n"; } } print LIST "test-worker\n"; close( LIST ); pegasus-wms_4.0.1+dfsg/release-tools/nmi/condor_config.local0000644000175000017500000000003311757531137023216 0ustar ryngerynge# Empty Condor Local Configpegasus-wms_4.0.1+dfsg/release-tools/update-pegasus-yum-repo0000755000175000017500000000057511757531137023251 0ustar ryngerynge#!/bin/bash set -e WORK_DIR=`mktemp -d` WEB_SERVER_DIR=/lfs2/webspace/pegasus4/wms/download/rhel rm -rf $REPO_DIR rsync -a -v -e ssh pegasus.isi.edu:$WEB_SERVER_DIR/ $WORK_DIR/ cd $WORK_DIR for DER in `find . -type d -name i386 -o -name x86_64`; do cd $WORK_DIR/$DER createrepo . done rsync -a -v -e ssh $WORK_DIR/ pegasus.isi.edu:$WEB_SERVER_DIR/ rm -rf $WORK_DIR pegasus-wms_4.0.1+dfsg/release-tools/svn-nightly/0000755000175000017500000000000011757531667021104 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/release-tools/svn-nightly/README0000755000175000017500000000036411757531137021762 0ustar ryngeryngeSVN-NIGHTLY Scripts ================== The svn-nightly scripts downloads the PEGASUS TRUNK and builds the pegasus-binary, pegasus-worker and pegasus-source tarballs. Usage: svn-nightly

The workflow gallery is a gallery of archived workflow runs on distributed resources. The workflow gallery is divided into three main pages. The main page lists various types of workflows. It provides a description of each workflow type. Selecting a particular workflow type will takes you to the workflow type page. It shows all the runs of that workflow type along with a short summary of the workflow .
The summary contains the following details. For workflows with sub workflows the summary is across all sub workflows.i.e. For workflows having sub workflow jobs (i.e SUBDAG and SUBDAX jobs), the value includes jobs from the sub workflows as well.
Workflow runtime(min,sec)         :
the walltime from the start of the workflow execution to the end as reported by the DAGMAN.In case of rescue dag the value is the cumulative of all retries.
Cumulative workflow runtime(min,sec):
the sum of the walltime of all jobs as reported by the DAGMan .In case of job retries the value is the cumulative of all retries.
Total jobs run                     :
the total number of jobs runs during the workflow run . In case of a failed workflow the number of jobs run could be less than the total jobs in the planned workflow. This is a runtime view of the workflow.
# jobs succeeded                  :
the total number of succeeded jobs during the workflow run
# jobs failed                      :
the total number of failed jobs during the workflow run
# jobs unsubmitted                 :
the total number of unsubmitted jobs during the workflow run
# jobs unknown                     :
the total number of unknown jobs during the workflow run
# Total tasks succeeded            :
the total number of succeeded tasks
# Total tasks failed               :
the total number of failed tasks

Selecting a particular run will take you the page which contains all the information about a workflow. The workflow page contains the following details.

Workflow execution details

For workflows having sub workflow jobs (i.e SUBDAG and SUBDAX jobs), the sub workflow jobs are considered as single jobs.i.e The parent workflow won't recursively calculate sub workflows job information.

Workflow runtime(min,sec)         :
the walltime from the start of the workflow execution to the end as reported by the DAGMAN.In case of rescue dag the value is the cumulative of all retries.
Cumulative workflow runtime(min,sec):
the sum of the walltime of all jobs as reported by the DAGMan .In case of job retries the value is the cumulative of all retries.
Total jobs run                     :
the total number of jobs runs during the workflow run . In case of a failed workflow the number of jobs run could be less than the total jobs in the planned workflow. This is a runtime view of the workflow.
# jobs succeeded                  :
the total number of succeeded jobs during the workflow run
# jobs failed                      :
the total number of failed jobs during the workflow run
# jobs unsubmitted                 :
the total number of unsubmitted jobs during the workflow run
# jobs unknown                     :
the total number of unknown jobs during the workflow run
# Total tasks succeeded            :
the total number of succeeded tasks
# Total tasks failed               :
the total number of failed tasks

Workflow execution environment

The workflow execution ennvironment contains the details in the braindump file. It contains information like dax label, dag label, submit dir, pegasus home environment variables etc.

Job statistics

Job statistics contains the following details about the jobs in workflow.

Job                  :
the name of the job
Site                 :
the site where job ran.
Kickstart(sec.)      :
the actual duration of the job in seconds on the remote compute node. In case of retries the value is the cumulative of all retries.
Post(sec.)           :
the postscript time as reported by DAGMan .In case of retries the value is the cumulative of all retries.
DAGMan(sec.)         :
the time between the last parent job of a job completes and the job gets submitted.In case of retries the value of the last retry is used for calculation.
CondorQTime(sec.)    :
the time between submission by DAGMan and the remote Grid submission. It is an estimate of the time spent in the condor q on the submit node .In case of retries the value is the cumulative of all retries.
Resource(sec.)       :
the time between the remote Grid submission and start of remote execution . It is an estimate of the time job spent in the remote queue .In case of retries the value is the cumulative of all retries.
Runtime(sec.)        :
the time spent on the resource as seen by Condor DAGMan . Is always >=kickstart .In case of retries the value is the cumulative of all retries.
Seqexec(sec.)        :
the time taken for the completion of a clustered job .In case of retries the value is the cumulative of all retries.
Seqexec-Delay(sec.)   :
the time difference between the time for the completion of a clustered job and sum of all the individual tasks kickstart time .In case of retries the value is the cumulative of all retries.

Task statistics

Task statistics contains the following details about the tranformation in workflow.

Transformation       :
name of the transformation.
Count                :
the number of times the transformation was executed.
Succeeded            :
the number of times the tranformation execution succeeded.
Failed               :
the number of times the tranformation execution failed.
Mean(sec.)           :
the mean of the transformation runtime.
Variance(sec.)       :
the variance of the transformation runtime.Variance is calculated using the on-line algorithm by Knuth (http://en.wikipedia.org/wiki/Algorithms_for_calculating_variance).
Min(sec.)            :
the minimum transformation runtime value.
Max(sec.)            :
the maximum transformation runtime value.
Total(sec.)          :
the cumulative of transformation runtime.

Dax graph

Graph image of the dax file .

Dag graph

Graph image of the dag file .

Workflow execution gantt chart

The toolbar at the top provides zoom in/out , pan left/right/top/bottom and show/hide job name functionality. The toolbar at the bottom can be used to show/hide job states. A failed job is shown by a red border. Clicking on a sub workflow job will take you to the corresponding sub workflow.

Host over time chart

The toolbar at the top provides zoom in/out , pan left/right/top/bottom and show/hide host name functionality. The toolbar at the bottom can be used to show/hide job states. A failed job is shown by a red border. Clicking on a sub workflow job will take you to the corresponding sub workflow.

pegasus-wms_4.0.1+dfsg/release-tools/svn-nightly/svn-nightly0000755000175000017500000001052011757531137023302 0ustar ryngerynge#!/bin/sh # # Cron script to automagically do nightly builds of PEGASUS # # $Revision: 50 $ # Authors : gmehta@isi.edu, voeckler@cs.uchicago.edu # setup env # # CHANGE These MAGIC Values for your setup # export JAVA_HOME=/nfs/asd2/pegasus/software/linux/java/default export ANT_HOME=/nfs/asd2/pegasus/software/linux/ant/default export PATH=${JAVA_HOME}/bin:${ANT_HOME}/bin:/opt/condor/bin:${PATH} export PATH=${PATH}:/bin:/usr/bin export CONDOR_CONFIG=/opt/condor/etc/condor_config SVNROOT='https://smarty.isi.edu/svn/repo1/pegasus/trunk' BUILDDIR=$1 DST=$2 if [ "X$BUILDDIR" = "X" -o "X$DST" = "X" ]; then echo "Usage: $0 builddir dstdir" exit 1 fi DATE=`date +'%Y%m%d'` LOG=$DST/pegasus-logger-$HOST-$DATE.txt # extra message for uncaught errors, see "man 1 trap" trap 'echo "ERROR: Detected a failure..." >> $LOG' ERR # paranoia if mkdir -p $BUILDDIR >> /dev/null 2>&1; then cd $BUILDDIR else echo "ERROR: mkdir -p $BUILDDIR failed" 1>&2 exit 2 fi # extra sanity check here=`/bin/pwd` if [ "$here" = "/" -o "$here" = "$HOME" ]; then echo "ERROR! I am not in a directory I expect to be in" 1>&2 exit 3 elif [ "$here" != "$BUILDDIR" ]; then echo "Warning: Not quite the expected destination directory" 1>&2 fi # rotate dirs and use svn, unless called with 3 args if [ "X$3" = "X" ]; then # shift previous runs if [ -d pegasus.old ]; then if ! rm -rf pegasus.old; then echo "Warning: Unable to remove pegasus.old" 1>&2 fi fi if [ -d pegasus ]; then if ! mv pegasus pegasus.old; then echo "ERROR: Unable to rename pegasus to pegasus.old" 1>&2 exit 4 fi fi # grab latest and greatest echo "##### SVN CHECKOUT #####" > $LOG svn co $SVNROOT pegasus >> $LOG 2>&1 if [ $? -ne 0 ]; then echo "ERROR: svn check-out failed, aborting" 1>&2 exit 5 fi else echo "##### Using pre-select #####" > $LOG fi # enter Pegasus if ! cd pegasus; then echo "ERROR: Unable to chdir into $BUILDDIR/pegasus" 1>&2 exit 6 fi PEGASUS_HOME=`pwd` export PEGASUS_HOME unset CLASSPATH source setup-devel.sh if [ $? -ne 0 ]; then echo "ERROR: Unable to source PEGASUS developer setup script" 1>&2 exit 7 fi # which versions echo "#" >> $LOG java -version 2>&1 | sed -e 's/^/# /' >> $LOG ant -version 2>&1 | sed -e 's/^/# /' >> $LOG echo "#" >> $LOG VERSION=`ant version | gawk '/.echo. [0-9]+\.[0-9]+\.[-0-9a-z]+/ { print $2 }'` if [ "X$VERSION" = "X" ]; then echo 'ERROR: Unable to obtain a version number' 2>&1 exit 9 else echo "# detected PEGASUS version $VERSION" >> $LOG fi # rm -f $TMP #fi SYSTEM=`ant version | gawk '/.echo. Architect/ { print $3 }'` if [ "X$SYSTEM" = "X" ]; then echo 'ERROR: Unable to obtain a system info' 2>&1 exit 10 else echo "# detected System Info : $SYSTEM" >> $LOG fi # create binary echo "##### ANT DIST #####" >> $LOG BLOG="$DST/pegasus-binary-$VERSION-$SYSTEM-$DATE.txt" ant clean dist 2>&1 | tee "$BLOG" >> $LOG; if [ $? -eq 0 ]; then src="dist/pegasus-binary-$VERSION-$SYSTEM.tar.gz" dst="$DST/pegasus-binary-$VERSION-$SYSTEM-$DATE.tar.gz" if cp "$src" "$dst"; then chmod a+r,g+w "$dst" "$BLOG" else echo "Warning: Unable to copy binary distribution" 1>&2 fi else echo "ERROR: ant dist failed" 1>&2 exit 43 fi # create worker echo "##### ANT DIST-WORKER #####" >> $LOG WLOG="$DST/pegasus-worker-$VERSION-$SYSTEM-$DATE.txt" ant clean dist-worker 2>&1 | tee "$WLOG" >> $LOG if [ $? -eq 0 ]; then src="dist/pegasus-worker-$VERSION-$SYSTEM.tar.gz" dst="$DST/pegasus-worker-$VERSION-$SYSTEM-$DATE.tar.gz" if cp "$src" "$dst"; then chmod a+r,g+w "$dst" "$WLOG" else echo "Warning: Unable to copy worker distribution" 1>&2 fi else echo "ERROR: ant dist-worker failed" 1>&2 exit 42 fi # create source echo "##### SOURCE #####" >> $LOG ant clean | tee $TMP >> $LOG 2>&1 if [ $? -ne 0 ]; then echo "ERROR: ant clean failed" 1>&2 cat $TMP 1>&2 rm -f $TMP exit 44 else rm -f $TMP fi cd .. gtar --exclude=\.svn -czvf $DST/pegasus-source-$VERSION-$DATE.tar.gz pegasus >> $LOG 2>&1 if [ $? -eq 0 ]; then chmod a+r,g+w $DST/pegasus-source-$VERSION-$DATE.tar.gz else echo "ERROR: gtar source failed, removing source" 1>&2 rm -f $DST/pegasus-source-$VERSION-$DATE.tar.gz >> /dev/null 2>&1 exit 45 fi NLOG=$DST/pegasus-logger-$SYSTEM-$DATE.txt mv $LOG $NLOG # done trap - ERR exit 0 pegasus-wms_4.0.1+dfsg/test/0000755000175000017500000000000011757531667015023 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/tools/0000755000175000017500000000000011757531667016163 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/api/0000755000175000017500000000000011757531667015574 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/0000755000175000017500000000000011757531667015753 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/004-montage-shared-fs/0000755000175000017500000000000011757531667021560 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/004-montage-shared-fs/run-bamboo-test0000755000175000017500000000763711757531137024531 0ustar ryngerynge#!/bin/bash set -e ####################################################################### # # Settings # DEGREES=0.5 ####################################################################### TOP_DIR=`pwd` export PATH=/ccg/software/montage/Montage_v3.3_mats/bin:$PATH # unique directory for this run RUN_ID=`/bin/date +'%F_%H%M%S'` RUN_DIR=/nfs/ccg3/scratch/bamboo/wf/$RUN_ID echo "Work directory: $RUN_DIR" mkdir -p $RUN_DIR/inputs cd $RUN_DIR cp $TOP_DIR/pegasusrc . # create the transformation catalogue (tc) echo echo "Creating the transformation catalog..." for BINARY in `(cd /ccg/software/montage/Montage_v3.3_mats/bin/ && ls)`; do cat >>tc <sites.xml < /ccg/software/globus/default 40000,50000 condor (TARGET.FileSystemDomain =!= "") /ccg/software/montage/Montage_v3.3_mats /usr EOF echo echo "Running mDAG (finding input images, generating DAX, ...)..." mDAG 2mass j M17 $DEGREES $DEGREES 0.0002777778 . "file://$RUN_DIR" "file://$RUN_DIR/inputs" echo echo "Adding input images to the replica catalog..." echo " " `cat cache.list | wc -l` "images found" cat cache.list | grep -v ".fits " >rc perl -p -i -e 's/ipac_cluster/CCG/' rc cat url.list | sed 's/ http:.*ref=/ http:\/\/obelix.isi.edu\/irsa-cache/' >>rc echo "Planning and submitting the workflow..." pegasus-plan \ --conf pegasusrc \ --dir work \ --sites CCG \ --output CCG \ --nocleanup \ --dax dag.xml \ --cluster horizontal \ --submit | tee $TOP_DIR/plan.out pegasus-wms_4.0.1+dfsg/test/core/004-montage-shared-fs/pegasusrc0000644000175000017500000000042411757531137023467 0ustar ryngerynge pegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=rc pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=tc pegasus.dir.useTimestamp=true pegasus.condor.logs.symlink = false pegasus-wms_4.0.1+dfsg/test/core/005-galactic-plane/0000755000175000017500000000000011757531667021121 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/005-galactic-plane/run-bamboo-test0000755000175000017500000000057311757531137024062 0ustar ryngerynge#!/bin/bash set -e TOP_DIR=`pwd` WORK_DIR=$TOP_DIR/work export PATH=/ccg/software/montage/Montage_v3.3_mats/bin:$PATH export GLOBUS_LOCATION=/ccg/software/globus/default cp ccg-test.config.template ccg-test.config perl -p -i -e "s:\[WORK_DIR\]:$WORK_DIR:g" ccg-test.config perl -p -i -e "s/\[USER\]/$USER/g" ccg-test.config ./galactic-plane ccg-test.config | tee plan.out pegasus-wms_4.0.1+dfsg/test/core/005-galactic-plane/ccg-test.config.template0000644000175000017500000000125411757531137025625 0ustar ryngerynge[main] mode: full [tiles] survey: 2mass band: j min_lon: -0.2 max_lon: 0.2 min_lat: -0.6 max_lat: 0.6 size: 1.0 overlap: 0.1 [local] work_dir: [WORK_DIR] montage_location: /ccg/software/montage/Montage_v3.3_mats [cluster] name: CCG pegasus_home: /ccg/software/pegasus/dev/trunk globus_location: /ccg/software/globus/5.0.4 montage_location: /ccg/software/montage/Montage_v3.3_mats gridftp_server: cartman.isi.edu work_dir: /nfs/ccg3/scratch/bamboo/galactic-plane-test [output] name: CCG_Data storage_proto: gridftp storage_url: gsiftp://cartman.isi.edu storage_mount: /nfs/ccg3/scratch/bamboo/outputs pegasus-wms_4.0.1+dfsg/test/core/005-galactic-plane/notify0000755000175000017500000000012211757531137022342 0ustar ryngerynge#!/bin/bash set -e #$PEGASUS_HOME/libexec/notification/email -t rynge@isi.edu pegasus-wms_4.0.1+dfsg/test/core/005-galactic-plane/local-tile-setup0000755000175000017500000000026511757531137024225 0ustar ryngerynge#!/bin/bash set -e TILE_ID=$1 . $TILE_ID.params START_DIR=`pwd` echo "Job started in $START_DIR" mkdir -p $TILE_WORK_DIR cd $TILE_WORK_DIR tar xzf $START_DIR/$TILE_ID.tar.gz pegasus-wms_4.0.1+dfsg/test/core/005-galactic-plane/remote-extra-cleanup0000755000175000017500000000016011757531137025075 0ustar ryngerynge#!/bin/bash set -e TILE_ID=$1 . $TILE_ID.params START_DIR=`pwd` rm -rf tile-setup/$TILE_ID rm -rf $TILE_ID pegasus-wms_4.0.1+dfsg/test/core/005-galactic-plane/remote-tile-setup0000755000175000017500000000211311757531137024420 0ustar ryngerynge#!/bin/bash set -e MODE=$1 TILE_ID=$2 . $TILE_ID.params WORK_DIR=`pwd`/tile-setup/$TILE_ID rm -rf $WORK_DIR mkdir -p $WORK_DIR cd $WORK_DIR # generate dag for the tile $MONTAGE_HOME/bin/mDAGGalacticPlane $SURVEY $BAND $CENTER_LON $CENTER_LAT $TILE_SIZE $TILE_SIZE 0.0002777778 . "gsiftp://$WF_MANAGER_HOST$TILE_WORK_DIR" "gsiftp://$WF_MANAGER_HOST$TILE_WORK_DIR/inputs" echo echo "Number of images for this tile: "`cat images.tbl | grep http | wc -l` echo # add the inputs to the rc echo cd $WORK_DIR cat cache.list | grep -v ".fits " >rc.data cat url.list | sed 's/ http:.*ref=/ http:\/\/obelix.isi.edu\/irsa-cache/' >>rc.data if [ "$MODE" = "prefetch" ]; then echo "Prefteching data..." cat rc.data | grep obelix | sed 's/.*http:/http:/' | sed 's/fits\.gz.*/fits.gz/' >prefetch.list cat prefetch.list | while read URL; do echo " ... $URL" wget -q -O /dev/null $URL done fi # prepare tarball that we can give to the local setup script tar czf ../../$TILE_ID.tar.gz *.hdr *.list *.data *.tbl *.xml cd ../../ echo "Tarball created." pwd ls -l $TILE_ID.tar.gz pegasus-wms_4.0.1+dfsg/test/core/005-galactic-plane/galactic-plane0000755000175000017500000004001311757531137023701 0ustar ryngerynge#!/usr/bin/env python """ Creates a uber workflow over Montage to generate tiles for the galactic plane Usage: galactic-plane [options] """ ## # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## import os import re import sys import errno import logging import optparse import tempfile import subprocess import math import time import socket import string import ConfigParser # set PEGASUS_HOME - transition from Pegasus 2.4 pegasus_home = None paths = string.split(os.environ["PATH"], ":") for path in paths: if os.path.isfile(os.path.join(path, "pegasus-plan")): pegasus_home = os.path.normpath(os.path.join(path, "..")) os.environ["PEGASUS_HOME"] = pegasus_home break if pegasus_home == None: raise RuntimeError("pegasus-plan not found in the PATH") sys.path.append(os.getenv("PEGASUS_HOME") + "/lib/pegasus/python") from Pegasus.DAX3 import * __author__ = "Mats Rynge " # --- settings ------------------------------------------------------------------------ monitord_output = None config = ConfigParser.ConfigParser() config.read(sys.argv[1]) mode = config.get('main', 'mode') if config.has_option('main', 'monitord_output'): monitord_output = config.get('main', 'monitord_output') survey = config.get('tiles', 'survey') band = config.get('tiles', 'band') min_lon = config.getfloat('tiles', 'min_lon') max_lon = config.getfloat('tiles', 'max_lon') min_lat = config.getfloat('tiles', 'min_lat') max_lat = config.getfloat('tiles', 'max_lat') tile_size = config.getfloat('tiles', 'size') tile_overlap = config.getfloat('tiles', 'overlap') local_work_dir = config.get('local', 'work_dir') local_montage_location = config.get('local', 'montage_location') cluster_name = config.get('cluster', 'name') cluster_pegasus_home = config.get('cluster', 'pegasus_home') cluster_globus_location = config.get('cluster', 'globus_location') cluster_montage_location = config.get('cluster', 'montage_location') cluster_gridftp_server = config.get('cluster', 'gridftp_server') cluster_work_dir = config.get('cluster', 'work_dir') output_name = config.get('output', 'name') output_storage_proto = config.get('output', 'storage_proto') output_storage_url = config.get('output', 'storage_url') output_storage_mount = config.get('output', 'storage_mount') # --- classes ------------------------------------------------------------------------- class Tile: center_lon = 0.0 center_lat = 0.0 size = 1.0 def __init__(self, center_lon, center_lat, size): self.center_lon = center_lon self.center_lat = center_lat self.size = size # --- global variables ---------------------------------------------------------------- local_galacticplane_location = os.path.dirname(os.path.realpath( __file__ )) local_hostname = socket.getfqdn() run_id = "" work_dir = "" gp_files = [] gp_jobs = [] gp_relations = [] # --- functions ----------------------------------------------------------------------- def myexec(cmd_line): sys.stdout.flush() p = subprocess.Popen(cmd_line + " 2>&1", shell=True) stdoutdata, stderrdata = p.communicate() r = p.returncode if r != 0: raise RuntimeError("Command '%s' failed with error code %s" % (cmd_line, r)) def create_work_dir(): global run_id global work_dir lt = time.localtime(time.time()) run_id = "galactic-plane-%04d%02d%02d-%02d%02d%02d" % (lt[0], lt[1], lt[2], lt[3], lt[4], lt[5]) work_dir = "%s/%s" % (local_work_dir, run_id) print "Work dir is: " + work_dir os.makedirs(work_dir) def add_tile(mode, uberdax, tile_id, lon, lat): tile_work_dir = "%s/tiles/%s" % (work_dir, tile_id) # parameters file pf = open("%s/%s.params" % (work_dir, tile_id), 'w') pf.write("export TILE_ID=\"%s\"\n" % (tile_id)) pf.write("export CLUSTER_NAME=\"%s\"\n" % (cluster_name)) pf.write("export WF_MANAGER_HOST=\"%s\"\n" % (local_hostname)) pf.write("export TILE_WORK_DIR=\"%s\"\n" % (tile_work_dir)) pf.write("export SURVEY=\"%s\"\n" % (survey)) pf.write("export BAND=\"%s\"\n" % (band)) pf.write("export CENTER_LON=\"%f\"\n" % (lon)) pf.write("export CENTER_LAT=\"%f\"\n" % (lat)) pf.write("export TILE_SIZE=\"%f\"\n" % (tile_size)) pf.close() # params input file params = File("%s.params" % (tile_id)) params.addPFN(PFN("gsiftp://%s%s/%s.params" % (local_hostname, work_dir, tile_id), "local")) uberdax.addFile(params) mdagtar = File("%s.tar.gz" % (tile_id)) remote_tile_setup = Job(namespace="gp", name="remote_tile_setup", version="1.0", id="rts-%s"%(tile_id)) remote_tile_setup.addArguments(mode) remote_tile_setup.addArguments(tile_id) remote_tile_setup.addProfile(Profile("dagman", "CATEGORY", "remote_tile_setup")) remote_tile_setup.uses(params, link=Link.INPUT, register=False) remote_tile_setup.uses(mdagtar, link=Link.OUTPUT, register=False, transfer=True) remote_tile_setup.invoke('on_error', local_galacticplane_location + "/notify") uberdax.addJob(remote_tile_setup) if mode == "prefetch": return local_tile_setup = Job(namespace="gp", name="local_tile_setup", version="1.0", id="lts-%s"%(tile_id)) local_tile_setup.addArguments(tile_id) local_tile_setup.addProfile(Profile("hints", "executionPool", "local")) local_tile_setup.uses(params, link=Link.INPUT, register=False) local_tile_setup.uses(mdagtar, link=Link.INPUT, register=False) uberdax.addJob(local_tile_setup) uberdax.depends(parent=remote_tile_setup, child=local_tile_setup) # dax file subdax_file = File("%s.dax" % (tile_id)) subdax_file.addPFN(PFN("file://%s/dag.xml" % (tile_work_dir), "local")) uberdax.addFile(subdax_file) subwf = DAX("%s.dax" % (tile_id), id="sub-%s" % (tile_id)) subwf.addArguments("-Dpegasus.catalog.replica.file=%s/rc.data" % (tile_work_dir), "-Dpegasus.catalog.site.file=%s/sites.xml" % (work_dir), "-Dpegasus.transfer.links=true", "--cluster", "horizontal", "--sites", cluster_name, "--basename", tile_id, "--force", "--force-replan", "--output", output_name) subwf.addProfile(Profile("dagman", "CATEGORY", "subworkflow")) subwf.uses(subdax_file, link=Link.INPUT, register=False) subwf.invoke('at_end', local_galacticplane_location + "/notify") uberdax.addDAX(subwf) uberdax.depends(parent=local_tile_setup, child=subwf) remote_extra_cleanup = Job(namespace="gp", name="remote_extra_cleanup", version="1.0", id="rec-%s"%(tile_id)) remote_extra_cleanup.addArguments(tile_id) remote_extra_cleanup.uses(params, link=Link.INPUT, register=False) uberdax.addJob(remote_extra_cleanup) uberdax.depends(parent=subwf, child=remote_extra_cleanup) def generate_pegasus_rc(mode): rc = open(work_dir + "/pegasusrc", "w") rc.write("pegasus.catalog.replica=SimpleFile\n") rc.write("pegasus.catalog.replica.file=%s/rc.data\n" % (work_dir)) rc.write("pegasus.catalog.site=XML3\n") rc.write("pegasus.catalog.site.file=%s/sites.xml\n" % (work_dir)) rc.write("pegasus.catalog.transformation=File\n") rc.write("pegasus.catalog.transformation.file=%s/tc.data\n" % (work_dir)) rc.write("pegasus.data.configuration=sharedfs\n") rc.write("pegasus.clusterer.job.aggregator.seqexec.firstjobfail=true\n") rc.write("pegasus.file.cleanup.scope=deferred\n") rc.write("pegasus.dir.useTimestamp=true\n") rc.write("pegasus.dir.storage.deep=false\n") rc.write("pegasus.condor.logs.symlink=false\n") rc.write("pegasus.stagein.clusters=10\n") rc.write("pegasus.stageout.clusters=100\n") rc.write("pegasus.transfer.stagein.remote.sites=%s\n" % (cluster_name)) rc.write("condor.periodic_release=2\n") rc.write("condor.periodic_remove=2\n") rc.write("dagman.maxpre=5\n") rc.write("dagman.retry=2\n") rc.write("dagman.remote_tile_setup.maxjobs=4\n") rc.write("dagman.subworkflow.maxjobs=15\n") if monitord_output != None: rc.write("pegasus.monitord.output=%s\n" % (monitord_output)) rc.close() def generate_sc(): sc = open(work_dir + "/sites.xml", 'w') sc.write("\n") sc.write("\n") sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n" % (local_hostname, work_dir)) sc.write(" \n" % (work_dir)) sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n" % (local_hostname, work_dir)) sc.write(" \n" % (work_dir)) sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" %s\n" % (os.environ["GLOBUS_LOCATION"])) sc.write(" %s:%s/bin:%s\n" %(local_galacticplane_location, local_montage_location, os.environ["PATH"])) sc.write(" \n") sc.write(" \n" %(cluster_name)) sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n" % (cluster_gridftp_server, cluster_work_dir)) sc.write(" \n" % (cluster_work_dir)) sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" condor\n") sc.write(" True\n") sc.write(" ON_EXIT\n") sc.write(" (FileSystemDomain != "")\n") sc.write(" %s\n" % (cluster_pegasus_home)) sc.write(" %s\n" % (cluster_globus_location)) sc.write(" %s\n" %(cluster_montage_location)) sc.write(" \n") sc.write(" \n" %(output_name)) sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n" % (output_storage_proto, output_storage_url, output_storage_mount, run_id)) sc.write(" \n" % (output_storage_mount, run_id)) sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write(" \n") sc.write("\n") sc.close() def generate_tc(): # tc needs to be in old format to work with montage tc = open(work_dir + "/tc.data", 'w') tc.write("local gp::remote_tile_setup:1.0 gsiftp://%s%s/remote-tile-setup STATIC_BINARY INTEL32::LINUX condor::priority=100\n" % (local_hostname, local_galacticplane_location)) tc.write("local gp::remote_extra_cleanup:1.0 gsiftp://%s%s/remote-extra-cleanup STATIC_BINARY INTEL32::LINUX condor::priority=1000\n" % (local_hostname, local_galacticplane_location)) tc.write("local gp::local_tile_setup:1.0 %s/local-tile-setup INSTALLED INTEL32::LINUX\n" % (local_galacticplane_location)) for binary in os.listdir(local_montage_location + "/bin/"): extra = "PEGASUS::clusters.size=20" if binary == "mProject" or binary == "mBackground": extra = "PEGASUS::clusters.size=3" tc.write("%s %s:3.3 %s/bin/%s INSTALLED INTEL32::LINUX %s\n" % (cluster_name, binary, cluster_montage_location, binary, extra)) tc.close() def main(): create_work_dir() # find the center, and use that as a starting point for our calculations # this is so that we tiles will overshoot equally much on each boundry clon = (max_lon + min_lon) / 2.0 clat = (max_lat + min_lat) / 2.0 print "Center of the tiled area is: %f, %f" % (clon, clat) # spacing between tiles spacing = (float)(tile_size - tile_overlap) print "Spacing between the tiles will be %f" % (spacing) # tiles needed tiles_hori = int(math.ceil((max_lon - min_lon) / spacing)) tiles_vert = int(math.ceil((max_lat - min_lat) / spacing)) print "%d tiles needed horizontally" %(tiles_hori) print "%d tiles needed vertically" %(tiles_vert) print "Total number of tiles: %d" % (tiles_vert * tiles_hori) # uber dax uberdax = ADAG("gp") uberdax.invoke('all', local_galacticplane_location + "/notify") # start from top left, and move down in rows start_lon = clon - spacing * (tiles_vert / 2.0) + (spacing / 2) start_lat = clat + spacing * (tiles_vert / 2.0) - (spacing / 2) tile_id = 0 for ny in range(0, tiles_vert): for nx in range(0, tiles_hori): lon = start_lon + (nx * spacing) lat = start_lat - (ny * spacing) tile_id = "tile_%+06.0f_%+06.0f" % (lat * 100, lon * 100) tile_id = tile_id.replace("+", "_") add_tile(mode, uberdax, tile_id, lon, lat) generate_pegasus_rc(mode) generate_sc() generate_tc() daxfile = open(work_dir + "/gp.dax", "w") uberdax.writeXML(daxfile) daxfile.close() print "Planning and submitting the uberdax..." os.chdir(work_dir) os.environ["JAVA_HEAPMAX"] = "512" cmd = "pegasus-plan --conf pegasusrc --relative-dir " + run_id + " --sites " + cluster_name + " --dir . --output local --dax gp.dax --nocleanup --submit 2>&1 | tee pegasus-plan.out" myexec(cmd) # --- main ---------------------------------------------------------------------------- main() pegasus-wms_4.0.1+dfsg/test/core/007-black-diamond-pegasuslite-local/0000755000175000017500000000000011757531667024357 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/007-black-diamond-pegasuslite-local/run-bamboo-test0000755000175000017500000000374511757531137027324 0ustar ryngerynge#!/bin/bash set -e set -v if [ X${testdir} = "X" ]; then testdir=`dirname $0` export testdir fi TOPDIR=`pwd` # generate the input file echo "This is sample input to KEG" >f.a # output directory mkdir -p outputs # build the dax generator export CLASSPATH=$testdir:`pegasus-config --classpath` javac $testdir/BlackDiamondDAX.java # generate the dax java BlackDiamondDAX /usr blackdiamond.dax # create the site catalog cat >sites.xml < 1 condor vanilla /usr EOF # plan and submit the workflow pegasus-plan \ --conf $testdir/pegasusrc \ --sites local \ --dir work \ --output local \ --cluster horizontal \ --dax blackdiamond.dax \ --submit | tee plan.out pegasus-wms_4.0.1+dfsg/test/core/007-black-diamond-pegasuslite-local/run-test0000755000175000017500000000047511757531137026064 0ustar ryngerynge#!/bin/bash set -e set -v testdir=`dirname $0` export testdir $testdir/run-bamboo-test # wait for workflow to finish $testdir/../../common/wait-for-workflow # check success with pegasus-analyzer - does it use exit codes correctly? cd work/*/*/*/*/ pegasus-analyzer # show some stats pegasus-statistics `pwd` pegasus-wms_4.0.1+dfsg/test/core/007-black-diamond-pegasuslite-local/README0000644000175000017500000000055011757531137025227 0ustar ryngeryngeTEST DESCRIPTION - This test runs a blackdiamond workflow in the Pegasus Lite mode on local site. - Clustering and staging of executables both is turned on . PURPOSE - The purpose is to make sure the pegasus-lite-local.sh wrapper works correctly for local universe jobs with condor io set to true . Associated JIRA Item - https://jira.isi.edu/browse/PM-542 pegasus-wms_4.0.1+dfsg/test/core/007-black-diamond-pegasuslite-local/test.config0000644000175000017500000000002411757531137026511 0ustar ryngeryngemax_wall_time = 30 pegasus-wms_4.0.1+dfsg/test/core/007-black-diamond-pegasuslite-local/BlackDiamondDAX.java0000644000175000017500000001032511757531137030060 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import edu.isi.pegasus.planner.dax.*; public class BlackDiamondDAX { /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length != 2) { System.out.println("Usage: java ADAG "); System.exit(1); } try { Diamond(args[0]).writeToFile(args[1]); } catch (Exception e) { e.printStackTrace(); } } private static ADAG Diamond(String pegasus_location) throws Exception { java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); ADAG dax = new ADAG("blackdiamond"); File fa = new File("f.a"); fa.addPhysicalFile("file://" + cwd + "/f.a", "local"); dax.addFile(fa); File fb1 = new File("f.b1"); File fb2 = new File("f.b2"); File fc1 = new File("f.c1"); File fc2 = new File("f.c2"); File fd = new File("f.d"); fd.setRegister(true); Executable preprocess = new Executable("pegasus", "preprocess", "4.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); preprocess.setInstalled( false ); preprocess.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "condorpool"); Executable findrange = new Executable("pegasus", "findrange", "4.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.setInstalled( false ); findrange.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "condorpool"); Executable analyze = new Executable("pegasus", "analyze", "4.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.setInstalled( false ); analyze.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "condorpool"); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable(analyze); // Add a preprocess job Job j1 = new Job("j1", "pegasus", "preprocess", "4.0"); j1.addArgument("-a preprocess -T 60 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1); j1.addArgument(" ").addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses(fb2, File.LINK.OUTPUT); dax.addJob(j1); // Add left Findrange job Job j2 = new Job("j2", "pegasus", "findrange", "4.0"); j2.addArgument("-a findrange -T 60 -i ").addArgument(fb1); j2.addArgument("-o ").addArgument(fc1); j2.uses(fb1, File.LINK.INPUT); j2.uses(fc1, File.LINK.OUTPUT); dax.addJob(j2); // Add right Findrange job Job j3 = new Job("j3", "pegasus", "findrange", "4.0"); j3.addArgument("-a findrange -T 60 -i ").addArgument(fb2); j3.addArgument("-o ").addArgument(fc2); j3.uses(fb2, File.LINK.INPUT); j3.uses(fc2, File.LINK.OUTPUT); dax.addJob(j3); // Add analyze job Job j4 = new Job("j4", "pegasus", "analyze", "4.0"); j4.addArgument("-a analyze -T 60 -i ").addArgument(fc1); j4.addArgument(" ").addArgument(fc2); j4.addArgument("-o ").addArgument(fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); dax.addJob(j4); dax.addDependency("j1", "j2"); dax.addDependency("j1", "j3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } pegasus-wms_4.0.1+dfsg/test/core/007-black-diamond-pegasuslite-local/pegasusrc0000644000175000017500000000031011757531137026260 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink = false pegasus.data.configuration = Condor pegasus-wms_4.0.1+dfsg/test/core/008-black-diamond-pegasuslite-condorio/0000755000175000017500000000000011757531667025102 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/008-black-diamond-pegasuslite-condorio/run-bamboo-test0000755000175000017500000000343011757531137030036 0ustar ryngerynge#!/bin/bash set -e set -v if [ X${testdir} = "X" ]; then testdir=`dirname $0` export testdir fi TOPDIR=`pwd` # generate the input file echo "This is sample input to KEG" > f.a # output directory mkdir -p outputs # build the dax generator export PYTHONPATH=`pegasus-config --python` $testdir/blackdiamond.py /usr > blackdiamond.dax # create the site catalog cat > sites.xml < condor vanilla EOF # plan and submit the workflow pegasus-plan \ --conf $testdir/pegasusrc \ --sites condorpool \ --output local \ --dir work \ --nocleanup \ --dax blackdiamond.dax \ --submit | tee plan.out pegasus-wms_4.0.1+dfsg/test/core/008-black-diamond-pegasuslite-condorio/run-test0000755000175000017500000000047511757531137026607 0ustar ryngerynge#!/bin/bash set -e set -v testdir=`dirname $0` export testdir $testdir/run-bamboo-test # wait for workflow to finish $testdir/../../common/wait-for-workflow # check success with pegasus-analyzer - does it use exit codes correctly? cd work/*/*/*/*/ pegasus-analyzer # show some stats pegasus-statistics `pwd` pegasus-wms_4.0.1+dfsg/test/core/008-black-diamond-pegasuslite-condorio/README0000644000175000017500000000037711757531137025761 0ustar ryngeryngeTEST DESCRIPTION - This test runs a blackdiamond workflow in the Pegasus Lite mode on condor pool. - Worker package staging is turned on. PURPOSE - The purpose is to make sure that worker package staging works correctly Pegasus lite - condor io mode. pegasus-wms_4.0.1+dfsg/test/core/008-black-diamond-pegasuslite-condorio/blackdiamond.py0000755000175000017500000000523611757531137030065 0ustar ryngerynge#!/usr/bin/env python from Pegasus.DAX3 import * import sys import os if len(sys.argv) != 2: print "Usage: %s PEGASUS_HOME" % (sys.argv[0]) sys.exit(1) # Create a abstract dag diamond = ADAG("diamond") # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN("file://" + os.getcwd() + "/f.a", "local")) diamond.addFile(a) # Add executables to the DAX-level replica catalog # In this case the binary is pegasus-keg, which is shipped with Pegasus, so we use # the remote PEGASUS_HOME to build the path. e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", arch="x86", installed=False) e_preprocess.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "condorpool")) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86", installed=False) e_findrange.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "condorpool")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86", installed=False) e_analyze.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "condorpool")) diamond.addExecutable(e_analyze) # Add a preprocess job preprocess = Job(namespace="diamond", name="preprocess", version="4.0") b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T10","-i",a,"-o",b1,b2) preprocess.uses(a, link=Link.INPUT) preprocess.uses(b1, link=Link.OUTPUT) preprocess.uses(b2, link=Link.OUTPUT) diamond.addJob(preprocess) # Add left Findrange job frl = Job(namespace="diamond", name="findrange", version="4.0") c1 = File("f.c1") frl.addArguments("-a findrange","-T5","-i",b1,"-o",c1) frl.uses(b1, link=Link.INPUT) frl.uses(c1, link=Link.OUTPUT) diamond.addJob(frl) # Add right Findrange job frr = Job(namespace="diamond", name="findrange", version="4.0") c2 = File("f.c2") frr.addArguments("-a findrange","-T5","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT) diamond.addJob(frr) # Add Analyze job analyze = Job(namespace="diamond", name="analyze", version="4.0") d = File("f.d") analyze.addArguments("-a analyze","-T10","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, register=True) diamond.addJob(analyze) # Add control-flow dependencies diamond.addDependency(Dependency(parent=preprocess, child=frl)) diamond.addDependency(Dependency(parent=preprocess, child=frr)) diamond.addDependency(Dependency(parent=frl, child=analyze)) diamond.addDependency(Dependency(parent=frr, child=analyze)) # Write the DAX to stdout diamond.writeXML(sys.stdout) pegasus-wms_4.0.1+dfsg/test/core/008-black-diamond-pegasuslite-condorio/test.config0000644000175000017500000000002411757531137027234 0ustar ryngeryngemax_wall_time = 30 pegasus-wms_4.0.1+dfsg/test/core/008-black-diamond-pegasuslite-condorio/pegasusrc0000644000175000017500000000035511757531137027014 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink = false pegasus.data.configuration = Condor pegasus.transfer.worker.package = True pegasus-wms_4.0.1+dfsg/test/core/Test-Cases-Key.xlsx0000644000175000017500000002571311757531137021374 0ustar ryngeryngePK!A7r[Content_Types].xml (n0EUb袪*>-L<$my IxB<M؞{u<ֵMVx~ k\|O?g )r` W;EE^ĢZa8X+Xʠ*A>zOQJ`5ǒv]ʅ B+ M˚3  jh'@PȓAw2lae>p3f|]h4$cS՜]q1~]5e2n]}ol _A|@[he6iȕ'ħڗ|pK].[N A$&9uDn;E>6PK!U0#L _rels/.rels (N0 HCnHLH!T$$@Jc?[iTb/Nú(A3b{jxVb"giaWl_xb#b4O r0Qahѓeܔ=P-<4Mox/}bN@;vCf ۨBI"c&\O8q"KH<ߊs@.h<⧄MdaT_PK!>xl/_rels/workbook.xml.rels (J0nӮ""Ej}Lm2㟾ۅef2k&WP%&w ޚ[= &$W4KH"xR㣔dNҨ9ɨAw(7ey/O ނhm| }Dg"$4FY.2#59鳔Y]bd @%s"ݚ0tB)[ȓPK!XHxl/workbook.xmlQN0 #Qm NB!1sh5ZTIJi7N(IsbJ@FH}\ 9=j{4H@޷(re w3ӂNel=B{\k WFE,RӑaiaJhʮG {\ղuXURnTDx۾>)JwIH"s?۵T]ƌFE%*)Ey;RƲ0Iϣi/0}NY֞'0DKп[RcȟY_W޸f?LubF/ql /+֦d^Mp 箔(I LA8"k>Q4X1RKVIf;CQ悕Z)!%hGJ2MTۂZSv)!J|[ =Tӵtz0j 0،S3&q#Z0ˉ%!#F&%4BVB} t z:- SvӔmsLj@ :nzBln_Ҫv`b[o =Vͬ=#kڷR Dnl}׫\PedǗZwM8\ySol D:-6OXė<{}D_%fΥ tX9ly.0P Å@#,wir9r<FnR-o0$G#"$}zȹ/1tknezmLrʏ70GÆ9",q3?좮dP6׷kWBU#k!%/mG_b*rf }4).َʎgoPK!;m2KB#xl/worksheets/_rels/sheet1.xml.rels0ECx{օ CS7"Ubۗ{ep6<f,Ժch{-A8 -Iy0Ьjm_/N,}W:=RY}H9EbAwk}m PK!bmxl/theme/theme1.xmlYOo6w tom'uرMniXS@I}úa0l+t&[HJKՇD"|#uڃC"$q۫]z>8h{wK cxLޜH]ś*$A>J%aACMʈJ&M;4Be tY>c~4$ &^ L1bma]ut(gZ[Wvr2u{`M,EF,2nQ%[NJeD >֗f}{7vtd%|JYw2Oڡ~J=L8-o|(<4 ժX}.@'d}.Fbo\C\ҼMT0 zSώt--g.—~?~xY'y92h!ы/ɋ>%mGEFD[t3q%'#qSgv 9feqwW@(^wdbh a8g.J pC*Xx8rbV`|XƻcǵYU3 Jݐ8b3+(QuK>QELKM2#'vi~ vlwu8+zHHJ:) ~L\E\O*t@G1lm~C*uG.R(:-ys^Di7QR8,b?SQ*q7C;+}ݧ;4pDZ_^'܉M01UJS#]flmʒgD^< dB[_WE)*k;ZxO(c5g4 h܇A:I~KBxԙ \ YWQB@^4@.hKik<ʞ6b+jΎ9#U`δuM֯DAaVB[͈f-WY؜j0[:X ~;Qㅋt >z/fʒ"Z x Zp;+e{/eP;,.&'Qk5q&pT(KLb} Sd›L17 jpaS! 35'+ZzQ TIIvt]K&⫢ #v5-|#4b3q:TA1pa*~9mm34銗bg1KB[Y&[)H V*Q Ua?SE'p>vX`3qBU( 8W0 Aw 9Kä5$ PeD)jeI2b!aC]zoPnIZ diͩdks|l2Rn6 Mf\ļ=XvYEEĢͪgY [A+M[XK52`%p7!?ڊ&aQ}6HH;8`ҤiI[-۬/0,>eE;ck;ٓ) C cc?f}p|61%M0*<ҭPK!JS xl/styles.xmlV[o0~`&P JR (RnT&I`՗6-t߱Bm>>ܢjÔq$ĈLL1\12Ȝp%i-7+J-ibVTs**aPZ S]Ҕm<Y FAeJTIJqf #MJ4I9P$ax2* (XFP1;ar+ս\%0`D>(S\id2@K$֘R͜ZAZ`}κǗd]~l- /NYպf1~90_.pzQo<{l:/ `oh|%?mn.ېbܙ}PGvL&?PK!۶4xl/worksheets/sheet1.xmlYےH}߈N ڱ;{y~f]*jCKTYY{v^Lv{}J.qO/ǥן/S*貏%YIi]-/~$,`KTU׹)ɢ_ YT"xɢb yqG~8qK\*AR$稂Sz-k,.//q]9=',oϠFqͿ4.2?T=sDm3gjOA*Οܠ>[U=9'q!O >P(J_ 9y)WL2'qYV qó{aCrx`j`0)cLF,8?ZY \E:-1 #w4<v`[IYmRn[KY?JVI>x,79x$.th4O'?P{7&S t熲|G=78&kwMGͣ=w' 193{h29/]5#YmnSA /*Uw<*eu×dUjQoCV׈uWw1/?&/1HYy<tl wo ʳKMq%ő_V KnCQq<&K}w*NF䷨8:'`%⒔+FWpY;-x3u3Ir"U~yQQZ pn *P1OKEuPK!TKKLqdocProps/core.xml (QO M -KFnC[JKۭl>9*-+k!(k!:C"FuL V 2E5嵁'Sk0N p04b_@q,&!Ȉr:0g{ݏrޮU}"Pދ{ݔ f mq[s q}{Z}\j'XD<O<O<O<O2Dv'o"|gOD)Wb?]_>Z̿PK!$docProps/app.xml (AO0Hw UѪ@ZDL Ƕxl/_rels/workbook.xml.relsPK-!XH xl/workbook.xmlPK-!*+ z xl/sharedStrings.xmlPK-!;m2KB# xl/worksheets/_rels/sheet1.xml.relsPK-!bmxl/theme/theme1.xmlPK-!JS Lxl/styles.xmlPK-!۶4xl/worksheets/sheet1.xmlPK-!TKKLqdocProps/core.xmlPK-!E)'r"xl/printerSettings/printerSettings1.binPK-!$%docProps/app.xmlPK &(pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/0000755000175000017500000000000011757531667020363 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-condorio-local/0000755000175000017500000000000011757531667023700 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-condorio-local/test.config0000755000175000017500000000031211757531137026035 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=local executable_url=file:// executable_site=condorpool execution_site=local staging_site=local output_site=local planner_args=--cluster horizontal pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-condorio-local/pegasusrc0000755000175000017500000000031211757531137025606 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink = false pegasus.data.configuration = condorio pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/run-bamboo-test0000755000175000017500000001067311757531137023326 0ustar ryngerynge#!/bin/bash set -e set -v TOPDIR=`pwd` CONFIG=`basename $1` DAX=blackdiamond.dax # Unique directory for this run RUN_ID=`/bin/date +'%Y%m%d_%H%M%S%N'` # Read a property from ${CONFIG}/test.config file properties () { eval $1=\"`grep "^[\s]*[^#]*$2\s*=" ${CONFIG}/test.config | cut -d"=" -f2 | sed -e 's/^\s*//g' -e 's/\s*$//g'`\" local i=\$$1 eval local temp=$i # If property not set or is empty, then check if default value is provided. If Yes set property to default value. if [[ -z $temp && ! -z $3 ]]; then eval $1=$3 fi } # Read the physical directory where the input file is located. properties input_file input_file if [ -z ${input_file} ]; then input_file='./f.a' else mkdir -p ${input_file}/$USER/inputs input_file=${input_file}/$USER/inputs/f.a fi # generate the input file echo "This is sample input to KEG" > ${input_file} # output directory mkdir -p outputs mkdir -p staging-site # build the dax generator export PYTHONPATH=`pegasus-config --python` ./blackdiamond.py /usr ${CONFIG} > ${DAX} # create the site catalog cat > sites.xml << EOF 1 condor vanilla Yes ON_EXIT /usr condor vanilla EOF # plan and submit the workflow properties execution_site execution_site properties staging_site staging_site properties output_site output_site properties planner_args planner_args properties clean_up clean_up '--nocleanup' if [ "$clean_up" = 'True' ]; then clean_up='' fi set -x pegasus-plan \ --conf ${CONFIG}/pegasusrc \ --sites $execution_site \ --staging-site $staging_site \ --output $output_site \ --dir work/ \ $clean_up \ --dax ${DAX} \ --submit ${planner_args} set +x pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-condorio/0000755000175000017500000000000011757531667022610 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-condorio/test.config0000755000175000017500000000025511757531137024753 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=local executable_url=file:// executable_site=condorpool execution_site=condorpool staging_site=local output_site=local pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-condorio/pegasusrc0000755000175000017500000000042711757531137024525 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink = false pegasus.data.configuration = CondorIO pegasus.transfer.worker.package = False pegasus.monitord.notifications = False pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/sharedfs-symlink-nogridstart/0000755000175000017500000000000011757531667026204 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/blackdiamond/sharedfs-symlink-nogridstart/test.config0000755000175000017500000000034611757531137030350 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=sharedfs executable_url=file:// executable_site=sharedfs execution_site=sharedfs staging_site=sharedfs output_site=sharedfs input_file = /nfs/ccg3/scratch #planner_args=-vvvvv pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/sharedfs-symlink-nogridstart/pegasusrc0000755000175000017500000000041011757531137030111 0ustar ryngeryngepegasus.catalog.site = XML3 pegasus.catalog.site.file = sites.xml pegasus.dir.useTimestamp = true pegasus.dir.storage.deep = false pegasus.condor.logs.symlink = false pegasus.transfer.links = True pegasus.data.configuration = sharedfs pegasus.gridstart = None pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/run-test0000755000175000017500000000041211757531137022057 0ustar ryngerynge#!/bin/bash set -e set -v ./run-bamboo-test $1 # wait for workflow to finish ../../common/wait-for-workflow # check success with pegasus-analyzer - does it use exit codes correctly? cd work/*/*/*/*/ pegasus-analyzer # show some stats pegasus-statistics `pwd` pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/README0000644000175000017500000000053611757531137021237 0ustar ryngeryngeBlackDiamond Workflow pl-condorio Pegasus Lite in condor IO mode. pl-condorio-local Pegasus Lite in CondorIO mode on local site. pl-pt Pegasus Lite in Pegasus Transfer mode pl-pt-local Pegasus Lite in Pegasus Transfer mode on local site. sharedfs-symlink-nogridstart SharedFS mode, with symlinking enabled and kickstart disabled. pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/blackdiamond.py0000755000175000017500000000666311757531137023353 0ustar ryngerynge#!/usr/bin/env python from Pegasus.DAX3 import * import sys import os import ConfigParser if len(sys.argv) != 3: print "Usage: %s PEGASUS_HOME" % (sys.argv[0]) sys.exit(1) config = ConfigParser.ConfigParser({'input_file': '', 'workflow_name': 'diamond', 'executable_installed':"False"}) config.read(sys.argv [2] + '/test.config') # Create a abstract dag diamond = ADAG(config.get('all', 'workflow_name')) diamond.invoke ('all', os.getcwd() + "/my-notify.sh") input_file = config.get('all', 'input_file') if (input_file == ''): input_file = os.getcwd () else: input_file += '/' + os.getenv ('USER') + '/inputs' # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN(config.get('all', 'file_url') + input_file + "/f.a", config.get('all', 'file_site'))) diamond.addFile(a) # Add executables to the DAX-level replica catalog # In this case the binary is pegasus-keg, which is shipped with Pegasus, so we use # the remote PEGASUS_HOME to build the path. e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", arch="x86", installed=config.getboolean('all', 'executable_installed')) e_preprocess.addPFN(PFN(config.get('all', 'executable_url') + sys.argv[1] + "/bin/pegasus-keg", config.get('all', 'executable_site'))) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86", installed=config.getboolean('all', 'executable_installed')) e_findrange.addPFN(PFN(config.get('all', 'executable_url') + sys.argv[1] + "/bin/pegasus-keg", config.get('all', 'executable_site'))) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86", installed=config.getboolean('all', 'executable_installed')) e_analyze.addPFN(PFN(config.get('all', 'executable_url') + sys.argv[1] + "/bin/pegasus-keg", config.get('all', 'executable_site'))) diamond.addExecutable(e_analyze) # Add a preprocess job preprocess = Job(namespace="diamond", name="preprocess", version="4.0") b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T5","-i",a,"-o",b1,b2) preprocess.uses(a, link=Link.INPUT) preprocess.uses(b1, link=Link.OUTPUT) preprocess.uses(b2, link=Link.OUTPUT) diamond.addJob(preprocess) # Add left Findrange job frl = Job(namespace="diamond", name="findrange", version="4.0") c1 = File("f.c1") frl.addArguments("-a findrange","-T5","-i",b1,"-o",c1) frl.uses(b1, link=Link.INPUT) frl.uses(c1, link=Link.OUTPUT) diamond.addJob(frl) # Add right Findrange job frr = Job(namespace="diamond", name="findrange", version="4.0") c2 = File("f.c2") frr.addArguments("-a findrange","-T5","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT) diamond.addJob(frr) # Add Analyze job analyze = Job(namespace="diamond", name="analyze", version="4.0") d = File("f.d") analyze.addArguments("-a analyze","-T5","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, register=True) diamond.addJob(analyze) analyze.invoke ('at_end', os.getcwd() + "/my-notify.sh") # Add control-flow dependencies diamond.addDependency(Dependency(parent=preprocess, child=frl)) diamond.addDependency(Dependency(parent=preprocess, child=frr)) diamond.addDependency(Dependency(parent=frl, child=analyze)) diamond.addDependency(Dependency(parent=frr, child=analyze)) # Write the DAX to stdout diamond.writeXML(sys.stdout) pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/sharedfs-worker-staging-cleanup/0000755000175000017500000000000011757531667026550 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/blackdiamond/sharedfs-worker-staging-cleanup/test.config0000755000175000017500000000036711757531137030717 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=sharedfs executable_url=file:// executable_site=sharedfs execution_site=sharedfs staging_site=sharedfs output_site=sharedfs input_file = /nfs/ccg3/scratch clean_up = true #planner_args=-vvvvv pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/sharedfs-worker-staging-cleanup/pegasusrc0000755000175000017500000000044611757531137030466 0ustar ryngeryngepegasus.catalog.site = XML3 pegasus.catalog.site.file = sites.xml pegasus.dir.useTimestamp = true pegasus.dir.storage.deep = false pegasus.condor.logs.symlink = false pegasus.data.configuration = sharedfs pegasus.transfer.worker.package = True pegasus.catalog.transformation.mapper = Staged pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-pt-local/0000755000175000017500000000000011757531667022507 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-pt-local/test.config0000755000175000017500000000030511757531137024646 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=local executable_url=file:// executable_site=condorpool execution_site=local staging_site=cartman-data output_site=local #planner_args=-vvvvvv pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-pt-local/pegasusrc0000755000175000017500000000032311757531137024417 0ustar ryngeryngepegasus.catalog.site = XML3 pegasus.catalog.site.file = sites.xml pegasus.dir.useTimestamp = true pegasus.dir.storage.deep = false pegasus.condor.logs.symlink = false pegasus.data.configuration = nonsharedfs pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-pt/0000755000175000017500000000000011757531667021417 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-pt/test.config0000755000175000017500000000031211757531137023554 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=local executable_url=file:// executable_site=condorpool execution_site=condorpool staging_site=cartman-data output_site=local #planner_args=-vvvvvv pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/pl-pt/pegasusrc0000755000175000017500000000036411757531137023334 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink = false pegasus.data.configuration = nonsharedfs pegasus.transfer.worker.package = False pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/vanilla-condor/0000755000175000017500000000000011757531667023273 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/blackdiamond/vanilla-condor/test.config0000755000175000017500000000025511757531137025436 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=local executable_url=file:// executable_site=condorpool execution_site=condorpool staging_site=local output_site=local pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/vanilla-condor/pegasusrc0000755000175000017500000000031111757531137025200 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink = false pegasus.data.configuration = CondorIO pegasus-wms_4.0.1+dfsg/test/core/blackdiamond/my-notify.sh0000755000175000017500000000021111757531137022637 0ustar ryngerynge#!/bin/bash echo $PEGASUS_EVENT_TIMESTAMP_ISO $PEGASUS_JOBID $PEGASUS_EVENT $PEGASUS_STATUS >> $PEGASUS_SUBMIT_DIR/notifications-output pegasus-wms_4.0.1+dfsg/test/core/000-env/0000755000175000017500000000000011757531667017040 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/000-env/run-test0000755000175000017500000000003311757531137020533 0ustar ryngerynge#!/bin/bash /usr/bin/env pegasus-wms_4.0.1+dfsg/test/core/000-env/test.config0000644000175000017500000000002311757531137021171 0ustar ryngeryngemax_wall_time = 5 pegasus-wms_4.0.1+dfsg/test/core/004-montage-staging-site/0000755000175000017500000000000011757531667022302 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/004-montage-staging-site/run-bamboo-test0000755000175000017500000000773211757531137025247 0ustar ryngerynge#!/bin/bash set -e ####################################################################### # # Settings # DEGREES=0.5 ####################################################################### TOP_DIR=`pwd` export PATH=/ccg/software/montage/Montage_v3.3_mats/bin:$PATH # unique directory for this run RUN_ID=`/bin/date +'%F_%H%M%S'` RUN_DIR=`pwd`/work/$RUN_ID echo "Work directory: $RUN_DIR" mkdir -p $RUN_DIR/inputs cd $RUN_DIR cp $TOP_DIR/pegasusrc . # create the transformation catalogue (tc) echo echo "Creating the transformation catalog..." for BINARY in `(cd /ccg/software/montage/Montage_v3.3_mats/bin/ && ls)`; do cat >>tc <sites.xml < /ccg/software/globus/default 40000,50000 condor (TARGET.FileSystemDomain =!= "") Yes ON_EXIT /ccg/software/montage/Montage_v3.3_mats EOF echo echo "Running mDAG (finding input images, generating DAX, ...)..." mDAG 2mass j M17 $DEGREES $DEGREES 0.0002777778 . "file://$RUN_DIR" "file://$RUN_DIR/inputs" echo echo "Adding input images to the replica catalog..." echo " " `cat cache.list | wc -l` "images found" cat cache.list | grep -v ".fits " >rc perl -p -i -e 's/ipac_cluster/local/' rc cat url.list | sed 's/ http:.*ref=/ http:\/\/obelix.isi.edu\/irsa-cache/' >>rc echo "Planning and submitting the workflow..." pegasus-plan \ --conf pegasusrc \ --sites CCG \ --staging-site CCGData \ --dir work \ --output local \ --nocleanup \ --dax dag.xml \ --cluster horizontal \ --submit | tee $TOP_DIR/plan.out pegasus-wms_4.0.1+dfsg/test/core/004-montage-staging-site/pegasusrc0000644000175000017500000000054611757531137024216 0ustar ryngerynge pegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=rc pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=tc pegasus.dir.useTimestamp=true pegasus.condor.logs.symlink = false pegasus.transfer.worker.package = true pegasus.data.configuration = nonsharedfs pegasus-wms_4.0.1+dfsg/test/core/004-montage-condor-io/0000755000175000017500000000000011757531667021575 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/004-montage-condor-io/run-bamboo-test0000755000175000017500000000643711757531137024543 0ustar ryngerynge#!/bin/bash set -e ####################################################################### # # Settings # DEGREES=0.5 ####################################################################### TOP_DIR=`pwd` export PATH=/ccg/software/montage/Montage_v3.3_mats/bin:$PATH # unique directory for this run RUN_ID=`/bin/date +'%F_%H%M%S'` RUN_DIR=`pwd`/work/$RUN_ID echo "Work directory: $RUN_DIR" mkdir -p $RUN_DIR/inputs cd $RUN_DIR cp $TOP_DIR/pegasusrc . # create the transformation catalogue (tc) echo echo "Creating the transformation catalog..." for BINARY in `(cd /ccg/software/montage/Montage_v3.3_mats/bin/ && ls)`; do cat >>tc <sites.xml < /ccg/software/globus/default 40000,50000 condor /ccg/software/montage/Montage_v3.3_mats /usr EOF echo echo "Running mDAG (finding input images, generating DAX, ...)..." mDAG 2mass j M17 $DEGREES $DEGREES 0.0002777778 . "file://$RUN_DIR" "file://$RUN_DIR/inputs" echo echo "Adding input images to the replica catalog..." echo " " `cat cache.list | wc -l` "images found" cat cache.list | grep -v ".fits " >rc perl -p -i -e 's/ipac_cluster/local/' rc cat url.list | sed 's/ http:.*ref=/ http:\/\/obelix.isi.edu\/irsa-cache/' >>rc echo "Planning and submitting the workflow..." pegasus-plan \ --conf pegasusrc \ --sites CCG \ --dir work \ --output local \ --nocleanup \ --dax dag.xml \ --cluster horizontal \ --submit | tee $TOP_DIR/plan.out pegasus-wms_4.0.1+dfsg/test/core/004-montage-condor-io/pegasusrc0000644000175000017500000000047311757531137023510 0ustar ryngerynge pegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=rc pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=tc pegasus.dir.useTimestamp=true pegasus.data.configuration = condorio pegasus.condor.logs.symlink = false pegasus-wms_4.0.1+dfsg/test/core/001-black-diamond-vanilla-condor/0000755000175000017500000000000011757531667023644 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/001-black-diamond-vanilla-condor/run-bamboo-test0000755000175000017500000000401311757531137026576 0ustar ryngerynge#!/bin/bash set -e set -v TOPDIR=`pwd` # generate the input file echo "This is sample input to KEG" >f.a # output directory mkdir -p outputs # build the dax generator export CLASSPATH=.:`pegasus-config --classpath` javac BlackDiamondDAX.java # generate the dax java BlackDiamondDAX /usr blackdiamond.dax # create the site catalog cat >sites.xml < condor vanilla /usr EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites condorpool \ --staging-site local \ --dir work \ --output local \ --nocleanup \ --dax blackdiamond.dax \ --submit | tee plan.out #../../common/wait-for-workflow # check success with pegasus-analyzer - does it use exit codes correctly? #cd work/*/*/*/*/ #pegasus-analyzer # show some stats #pegasus-statistics `pwd` pegasus-wms_4.0.1+dfsg/test/core/001-black-diamond-vanilla-condor/run-test0000755000175000017500000000376611757531137025357 0ustar ryngerynge#!/bin/bash set -e set -v TOPDIR=`pwd` # generate the input file echo "This is sample input to KEG" >f.a # output directory mkdir -p outputs # build the dax generator export CLASSPATH=.:`pegasus-config --classpath` javac BlackDiamondDAX.java # generate the dax java BlackDiamondDAX /usr blackdiamond.dax # create the site catalog cat >sites.xml < condor vanilla /usr EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites condorpool \ --staging-site local \ --dir work \ --output local \ --nocleanup \ --dax blackdiamond.dax \ --submit ../../common/wait-for-workflow # check success with pegasus-analyzer - does it use exit codes correctly? cd work/*/*/*/*/ pegasus-analyzer # show some stats pegasus-statistics `pwd` pegasus-wms_4.0.1+dfsg/test/core/001-black-diamond-vanilla-condor/test.config0000644000175000017500000000002411757531137025776 0ustar ryngeryngemax_wall_time = 30 pegasus-wms_4.0.1+dfsg/test/core/001-black-diamond-vanilla-condor/BlackDiamondDAX.java0000644000175000017500000001031411757531137027343 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import edu.isi.pegasus.planner.dax.*; public class BlackDiamondDAX { /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length != 2) { System.out.println("Usage: java ADAG "); System.exit(1); } try { Diamond(args[0]).writeToFile(args[1]); } catch (Exception e) { e.printStackTrace(); } } private static ADAG Diamond(String pegasus_location) throws Exception { java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); ADAG dax = new ADAG("blackdiamond"); File fa = new File("f.a"); fa.addPhysicalFile("file://" + cwd + "/f.a", "local"); dax.addFile(fa); File fb1 = new File("f.b1"); File fb2 = new File("f.b2"); File fc1 = new File("f.c1"); File fc2 = new File("f.c2"); File fd = new File("f.d"); fd.setRegister(true); Executable preprocess = new Executable("pegasus", "preprocess", "4.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); preprocess.setInstalled(true); preprocess.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "condorpool"); Executable findrange = new Executable("pegasus", "findrange", "4.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.setInstalled(true); findrange.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "condorpool"); Executable analyze = new Executable("pegasus", "analyze", "4.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.setInstalled(true); analyze.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "condorpool"); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable(analyze); // Add a preprocess job Job j1 = new Job("j1", "pegasus", "preprocess", "4.0"); j1.addArgument("-a preprocess -T 60 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1); j1.addArgument(" ").addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses(fb2, File.LINK.OUTPUT); dax.addJob(j1); // Add left Findrange job Job j2 = new Job("j2", "pegasus", "findrange", "4.0"); j2.addArgument("-a findrange -T 60 -i ").addArgument(fb1); j2.addArgument("-o ").addArgument(fc1); j2.uses(fb1, File.LINK.INPUT); j2.uses(fc1, File.LINK.OUTPUT); dax.addJob(j2); // Add right Findrange job Job j3 = new Job("j3", "pegasus", "findrange", "4.0"); j3.addArgument("-a findrange -T 60 -i ").addArgument(fb2); j3.addArgument("-o ").addArgument(fc2); j3.uses(fb2, File.LINK.INPUT); j3.uses(fc2, File.LINK.OUTPUT); dax.addJob(j3); // Add analyze job Job j4 = new Job("j4", "pegasus", "analyze", "4.0"); j4.addArgument("-a analyze -T 60 -i ").addArgument(fc1); j4.addArgument(" ").addArgument(fc2); j4.addArgument("-o ").addArgument(fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); dax.addJob(j4); dax.addDependency("j1", "j2"); dax.addDependency("j1", "j3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } pegasus-wms_4.0.1+dfsg/test/core/001-black-diamond-vanilla-condor/pegasusrc0000644000175000017500000000031011757531137025545 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink = false pegasus.data.configuration = Condor pegasus-wms_4.0.1+dfsg/test/core/011-rosetta-staging-site/0000755000175000017500000000000011757531667022327 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/011-rosetta-staging-site/run-bamboo-test0000755000175000017500000000671611757531137025275 0ustar ryngerynge#!/bin/bash set -e TOP_DIR=`pwd` # download rosetta binary - this is to save space in the Pegasus distribution if [ ! -e rosetta.exe ]; then wget -q http://pegasus.isi.edu/wms/example-workflows/rosetta/rosetta.exe chmod 755 rosetta.exe fi # do we have the required minirosetta_database? if [ ! -e minirosetta_database ]; then wget -q http://pegasus.isi.edu/wms/example-workflows/rosetta/minirosetta_database.tar.gz tar xzf minirosetta_database.tar.gz rm minirosetta_database.tar.gz fi # what about the required pdbs? if [ ! -e pdbs ]; then wget -q http://pegasus.isi.edu/wms/example-workflows/rosetta/pdbs.tar.gz tar xzf pdbs.tar.gz rm pdbs.tar.gz fi # figure out where Pegasus is installed export PEGASUS_BIN_DIR=`pegasus-config --bin` if [ "x$PEGASUS_BIN_DIR" = "x" ]; then echo "Please make sure pegasus-plan is in your path" exit 1 fi # build the dax generator export CLASSPATH=.:`pegasus-config --classpath` javac RosettaDAX.java # generate the dax java RosettaDAX dax.xml # site catalog cat >sites.xml < $PEGASUS_BIN_DIR /opt/globus/5.0.3 EOF echo echo echo "Planning and submitting the workflow..." pegasus-plan \ --conf pegasusrc \ --dir work \ --dax dax.xml \ --sites CCG \ --staging-site CCG_Staging \ --output local \ --submit | tee $TOP_DIR/plan.out pegasus-wms_4.0.1+dfsg/test/core/011-rosetta-staging-site/design.resfile0000644000175000017500000000133711757531137025147 0ustar ryngerynge#karyopherin flex bb design protocol karyopherin_design_topsail_peptext.cc gen 2 resfile ALLAAxc EX 1 EX 2 USE_INPUT_SC start #fix aa for peptide, fix rotamer for conserved aa at the Cterm of peptide (KRXKLX) 1 A PIKAA D 2 A PIKAA E 3 A PIKAA I 4 A PIKAA M 5 A PIKAA K 6 A PIKAA E 7 A PIKAA I 8 A PIKAA E 9 A PIKAA R 10 A PIKAA E 11 A PIKAA S # 12 A PIKAA K 12 A NATRO # 13 A PIKAA R 13 A NATRO 14 A PIKAA I # 15 A PIKAA K 15 A NATRO # 16 A PIKAA L 16 A NATRO 17 A PIKAA N #fix rotamer for the residues on the protein that is interacting with the conserved residues on the peptide 84 C NATRO 88 C NATRO 97 C NATRO 123 C NATRO 126 C NATRO 130 C NATRO 134 C NATRO 168 C NATRO 207 C NATRO pegasus-wms_4.0.1+dfsg/test/core/011-rosetta-staging-site/repack.resfile0000644000175000017500000000124511757531137025141 0ustar ryngerynge#karyopherin flex bb design protocol karyopherin_design_topsail_peptext.cc gen 2 REPACK resfile NATAA EX 1 EX 2 USE_INPUT_SC start #fix aa for peptide, fix rotamer for conserved aa at the Cterm of peptide (KRXKLX) 1 A PIKAA D 2 A PIKAA E 3 A PIKAA I 4 A PIKAA M 5 A PIKAA K 6 A PIKAA E 7 A PIKAA I 8 A PIKAA E 9 A PIKAA R 10 A PIKAA E 11 A PIKAA S 12 A NATRO 13 A NATRO 14 A PIKAA I 15 A NATRO 16 A NATRO 17 A PIKAA N #fix rotamer for the residues on the protein that is interacting with the conserved residues on the peptide 84 C NATRO 88 C NATRO 97 C NATRO 123 C NATRO 126 C NATRO 130 C NATRO 134 C NATRO 168 C NATRO 207 C NATRO pegasus-wms_4.0.1+dfsg/test/core/011-rosetta-staging-site/RosettaDAX.java0000644000175000017500000001402511757531137025142 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.lang.Math; import java.util.ArrayList; import java.util.List; import edu.isi.pegasus.planner.dax.*; public class RosettaDAX { public void constructDAX(String daxfile){ try{ java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); // construct a dax object ADAG dax = new ADAG("rosetta"); // executables and transformations // including this in the dax is a new feature in // 3.0. Earlier you had a standalone transformation catalog Executable exe = new Executable("rosetta.exe"); // the executable is not installed on the remote sites, so // pick it up from the local file system exe.setInstalled(false); exe.addPhysicalFile("file://" + cwd + "/rosetta.exe", "local"); // cluster the jobs together to lessen the grid overhead //exe.addProfile("pegasus", "clusters.size", "3"); // the dag needs to know about the executable to handle // transferrring dax.addExecutable(exe); // all jobs depend on the flatfile databases List inputs = new ArrayList(); recursiveAddToFileCollection(inputs, "minirosetta_database", "Rosetta Database"); dax.addFiles(inputs); // for replica catalog // and some top level files File f1 = new File("design.resfile", File.LINK.INPUT); f1.addPhysicalFile("file://" + cwd + "/design.resfile", "local"); dax.addFile(f1); inputs.add(f1); // dependency for the job File f2 = new File("repack.resfile", File.LINK.INPUT); f2.addPhysicalFile("file://" + cwd + "/repack.resfile", "local"); dax.addFile(f2); inputs.add(f2); // dependency for the job java.io.File pdbDir = new java.io.File("pdbs/"); String pdbs[] = pdbDir.list(); for (int i = 0; i < Math.min(10, pdbs.length); i++) { java.io.File pdb = new java.io.File("pdbs/" + pdbs[i]); if (pdb.isFile()) { Job j = createJobFromPDB(dax, pdb, inputs); dax.addJob(j); } } //write DAX to file dax.writeToFile(daxfile); } catch (Exception e) { e.printStackTrace(); } } /* * This adds all the files in a directory to a set which can be used for job * data dependencies */ private void recursiveAddToFileCollection(List list, String dir, String desc) { try { java.io.File d = new java.io.File(dir); String items[] = d.list(); for (int i = 0; i < items.length; i++) { if (items[i].substring(0,1).equals(".")) { continue; } java.io.File f = new java.io.File(dir + "/" + items[i]); if (f.isFile()) { // File found, let's add it to the list File input = new File(dir + "/" + items[i], File.LINK.INPUT); input.addPhysicalFile("file://" + f.getAbsolutePath(), "local"); list.add(input); } else { recursiveAddToFileCollection(list, f.getPath(), desc); } } } catch (Exception e) { e.printStackTrace(); } } private Job createJobFromPDB(ADAG dax, java.io.File pdb, List inputs) { Job job = null; try { String id = pdb.getName(); id = id.replaceAll(".pdb", ""); job = new Job(id, "rosetta.exe"); // general rosetta inputs (database, design, ...) job.uses(inputs, File.LINK.INPUT); // input pdb file File pdbFile = new File(pdb.getName()); pdbFile.addPhysicalFile("file://" + pdb.getAbsolutePath(), "local"); job.uses(pdbFile, File.LINK.INPUT); // the job uses the file dax.addFile(pdbFile); // the dax needs to know about it to handle transfers // outputs File outFile = new File(pdb.getName() + ".score.sc"); job.uses(outFile, File.LINK.OUTPUT); // the job uses the file // add the arguments to the job job.addArgument(" -in:file:s "); job.addArgument(pdbFile); job.addArgument(" -out:prefix " + pdb.getName() + "."); job.addArgument(" -database ./minirosetta_database"); job.addArgument(" -linmem_ig 10"); job.addArgument(" -nstruct 1"); job.addArgument(" -pert_num 2"); job.addArgument(" -inner_num 1"); job.addArgument(" -jd2::ntrials 1"); } catch (Exception e) { e.printStackTrace(); return null; } return job; } /** * Usage : RosettaDAX daxfile * * @param args the arguments passed */ public static void main(String[] args) { RosettaDAX daxgen = new RosettaDAX(); if (args.length == 1) { daxgen.constructDAX(args[0]); } else { System.out.println("Usage: RosettaDAX "); } } } pegasus-wms_4.0.1+dfsg/test/core/011-rosetta-staging-site/pegasusrc0000644000175000017500000000032111757531137024232 0ustar ryngerynge pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.data.configuration = nonsharedfs pegasus.transfer.worker.package = true pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus-wms_4.0.1+dfsg/test/core/012-blackdiamond-invoke/0000755000175000017500000000000011757531667022154 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/012-blackdiamond-invoke/run-bamboo-test0000755000175000017500000000506611757531137025117 0ustar ryngerynge#!/bin/bash set -e set -v if [ X${testdir} = "X" ]; then testdir=`dirname $0` export testdir fi TOPDIR=`pwd` # generate the input file echo "This is sample input to KEG" >f.a # output directory mkdir -p outputs # build the dax generator export CLASSPATH=$testdir:`pegasus-config --classpath` javac $testdir/BlackDiamondDAX.java # generate the dax java BlackDiamondDAX /usr blackdiamond.dax # create the site catalog cat >sites.xml < 1 condor vanilla /usr EOF # plan and submit the workflow pegasus-plan \ --conf $testdir/pegasusrc \ --sites condorpool \ --dir work \ --output local \ --cluster horizontal \ --dax blackdiamond.dax \ --submit | tee plan.out pegasus-wms_4.0.1+dfsg/test/core/012-blackdiamond-invoke/run-test0000755000175000017500000000047511757531137023661 0ustar ryngerynge#!/bin/bash set -e set -v testdir=`dirname $0` export testdir $testdir/run-bamboo-test # wait for workflow to finish $testdir/../../common/wait-for-workflow # check success with pegasus-analyzer - does it use exit codes correctly? cd work/*/*/*/*/ pegasus-analyzer # show some stats pegasus-statistics `pwd` pegasus-wms_4.0.1+dfsg/test/core/012-blackdiamond-invoke/README0000644000175000017500000000051011757531137023020 0ustar ryngeryngeTEST DESCRIPTION - This test runs a blackdiamond workflow on shared fs with invoke functionality of kickstart turned on - Clustering and staging of executables both is turned on . PURPOSE - To make sure that Condor IO issues dont trip the transfer of the .arg files. Associated JIRA Item - https://jira.isi.edu/browse/PM-526 pegasus-wms_4.0.1+dfsg/test/core/012-blackdiamond-invoke/test.config0000644000175000017500000000002411757531137024306 0ustar ryngeryngemax_wall_time = 30 pegasus-wms_4.0.1+dfsg/test/core/012-blackdiamond-invoke/BlackDiamondDAX.java0000644000175000017500000001032511757531137025655 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import edu.isi.pegasus.planner.dax.*; public class BlackDiamondDAX { /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length != 2) { System.out.println("Usage: java ADAG "); System.exit(1); } try { Diamond(args[0]).writeToFile(args[1]); } catch (Exception e) { e.printStackTrace(); } } private static ADAG Diamond(String pegasus_location) throws Exception { java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); ADAG dax = new ADAG("blackdiamond"); File fa = new File("f.a"); fa.addPhysicalFile("file://" + cwd + "/f.a", "local"); dax.addFile(fa); File fb1 = new File("f.b1"); File fb2 = new File("f.b2"); File fc1 = new File("f.c1"); File fc2 = new File("f.c2"); File fd = new File("f.d"); fd.setRegister(true); Executable preprocess = new Executable("pegasus", "preprocess", "4.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); preprocess.setInstalled( false ); preprocess.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "condorpool"); Executable findrange = new Executable("pegasus", "findrange", "4.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.setInstalled( false ); findrange.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "condorpool"); Executable analyze = new Executable("pegasus", "analyze", "4.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.setInstalled( false ); analyze.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "condorpool"); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable(analyze); // Add a preprocess job Job j1 = new Job("j1", "pegasus", "preprocess", "4.0"); j1.addArgument("-a preprocess -T 60 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1); j1.addArgument(" ").addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses(fb2, File.LINK.OUTPUT); dax.addJob(j1); // Add left Findrange job Job j2 = new Job("j2", "pegasus", "findrange", "4.0"); j2.addArgument("-a findrange -T 60 -i ").addArgument(fb1); j2.addArgument("-o ").addArgument(fc1); j2.uses(fb1, File.LINK.INPUT); j2.uses(fc1, File.LINK.OUTPUT); dax.addJob(j2); // Add right Findrange job Job j3 = new Job("j3", "pegasus", "findrange", "4.0"); j3.addArgument("-a findrange -T 60 -i ").addArgument(fb2); j3.addArgument("-o ").addArgument(fc2); j3.uses(fb2, File.LINK.INPUT); j3.uses(fc2, File.LINK.OUTPUT); dax.addJob(j3); // Add analyze job Job j4 = new Job("j4", "pegasus", "analyze", "4.0"); j4.addArgument("-a analyze -T 60 -i ").addArgument(fc1); j4.addArgument(" ").addArgument(fc2); j4.addArgument("-o ").addArgument(fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); dax.addJob(j4); dax.addDependency("j1", "j2"); dax.addDependency("j1", "j3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } pegasus-wms_4.0.1+dfsg/test/core/012-blackdiamond-invoke/pegasusrc0000644000175000017500000000033611757531137024065 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink = false #turn on invoke always. pegasus.gridstart.invoke.length 2 pegasus-wms_4.0.1+dfsg/test/core/003-osg-srm-blackdiamond/0000755000175000017500000000000011757531667022250 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/003-osg-srm-blackdiamond/run-test0000755000175000017500000000463211757531137023754 0ustar ryngerynge#!/bin/bash set -e TOPDIR=`pwd` # remove previous runs rm -rf work # generate the input file echo "This is sample input to KEG" >f.a # generate the dax export PYTHONPATH=`pegasus-config --python` ./blackdiamond.py /usr >blackdiamond.dax # create the site catalog cat >sites.xml < $GLOBUS_LOCATION condor EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites OSG \ --staging-site SRM \ --dir work \ --output local \ --nocleanup \ --dax blackdiamond.dax \ pegasus-wms_4.0.1+dfsg/test/core/003-osg-srm-blackdiamond/blackdiamond.py0000755000175000017500000000523211757531137025227 0ustar ryngerynge#!/usr/bin/env python from Pegasus.DAX3 import * import sys import os if len(sys.argv) != 2: print "Usage: %s PEGASUS_HOME" % (sys.argv[0]) sys.exit(1) # Create a abstract dag diamond = ADAG("diamond") # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN("file://" + os.getcwd() + "/f.a", "local")) diamond.addFile(a) # Add executables to the DAX-level replica catalog # In this case the binary is pegasus-keg, which is shipped with Pegasus, so we use # the remote PEGASUS_HOME to build the path. e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", arch="x86_64", installed=False) e_preprocess.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "local")) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86_64", installed=False) e_findrange.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "local")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86_64", installed=False) e_analyze.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "local")) diamond.addExecutable(e_analyze) # Add a preprocess job preprocess = Job(namespace="diamond", name="preprocess", version="4.0") b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T60","-i",a,"-o",b1,b2) preprocess.uses(a, link=Link.INPUT) preprocess.uses(b1, link=Link.OUTPUT) preprocess.uses(b2, link=Link.OUTPUT) diamond.addJob(preprocess) # Add left Findrange job frl = Job(namespace="diamond", name="findrange", version="4.0") c1 = File("f.c1") frl.addArguments("-a findrange","-T60","-i",b1,"-o",c1) frl.uses(b1, link=Link.INPUT) frl.uses(c1, link=Link.OUTPUT) diamond.addJob(frl) # Add right Findrange job frr = Job(namespace="diamond", name="findrange", version="4.0") c2 = File("f.c2") frr.addArguments("-a findrange","-T60","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT) diamond.addJob(frr) # Add Analyze job analyze = Job(namespace="diamond", name="analyze", version="4.0") d = File("f.d") analyze.addArguments("-a analyze","-T60","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, register=True) diamond.addJob(analyze) # Add control-flow dependencies diamond.addDependency(Dependency(parent=preprocess, child=frl)) diamond.addDependency(Dependency(parent=preprocess, child=frr)) diamond.addDependency(Dependency(parent=frl, child=analyze)) diamond.addDependency(Dependency(parent=frr, child=analyze)) # Write the DAX to stdout diamond.writeXML(sys.stdout) pegasus-wms_4.0.1+dfsg/test/core/003-osg-srm-blackdiamond/pegasusrc0000644000175000017500000000025011757531137024154 0ustar ryngerynge pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.execute.*.filesystem.local true pegasus.gridstart PegasusLite pegasus-wms_4.0.1+dfsg/test/core/009-black-diamond-pegasuslite-pegasustransfer/0000755000175000017500000000000011757531667026503 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/009-black-diamond-pegasuslite-pegasustransfer/run-bamboo-test0000755000175000017500000000447411757531137031450 0ustar ryngerynge#!/bin/bash set -e set -v if [ X${testdir} = "X" ]; then testdir=`dirname $0` export testdir fi TOPDIR=`pwd` # generate the input file echo "This is sample input to KEG" > f.a # output directory mkdir -p outputs mkdir -p staging-site # build the dax generator export PYTHONPATH=`pegasus-config --python` $testdir/blackdiamond.py /usr > blackdiamond.dax # create the site catalog cat > sites.xml < condor vanilla EOF # plan and submit the workflow pegasus-plan \ --conf $testdir/pegasusrc \ --sites condorpool \ --staging-site cartman-data \ --output local \ --dir work \ --nocleanup \ --dax blackdiamond.dax \ --submit | tee plan.out pegasus-wms_4.0.1+dfsg/test/core/009-black-diamond-pegasuslite-pegasustransfer/run-test0000755000175000017500000000047511757531137030210 0ustar ryngerynge#!/bin/bash set -e set -v testdir=`dirname $0` export testdir $testdir/run-bamboo-test # wait for workflow to finish $testdir/../../common/wait-for-workflow # check success with pegasus-analyzer - does it use exit codes correctly? cd work/*/*/*/*/ pegasus-analyzer # show some stats pegasus-statistics `pwd` pegasus-wms_4.0.1+dfsg/test/core/009-black-diamond-pegasuslite-pegasustransfer/README0000755000175000017500000000042311757531137027355 0ustar ryngeryngeTEST DESCRIPTION - This test runs a blackdiamond workflow in the Pegasus Lite mode on Condor pool. - Both staging of executables and worker package is turned on. PURPOSE - The purpose is to make sure the worker package staging works in Pegasus Lite pegasus-transfer mode. pegasus-wms_4.0.1+dfsg/test/core/009-black-diamond-pegasuslite-pegasustransfer/blackdiamond.py0000755000175000017500000000523411757531137031464 0ustar ryngerynge#!/usr/bin/env python from Pegasus.DAX3 import * import sys import os if len(sys.argv) != 2: print "Usage: %s PEGASUS_HOME" % (sys.argv[0]) sys.exit(1) # Create a abstract dag diamond = ADAG("diamond") # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN("file://" + os.getcwd() + "/f.a", "local")) diamond.addFile(a) # Add executables to the DAX-level replica catalog # In this case the binary is pegasus-keg, which is shipped with Pegasus, so we use # the remote PEGASUS_HOME to build the path. e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", arch="x86", installed=False) e_preprocess.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "condorpool")) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86", installed=False) e_findrange.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "condorpool")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86", installed=False) e_analyze.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "condorpool")) diamond.addExecutable(e_analyze) # Add a preprocess job preprocess = Job(namespace="diamond", name="preprocess", version="4.0") b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T5","-i",a,"-o",b1,b2) preprocess.uses(a, link=Link.INPUT) preprocess.uses(b1, link=Link.OUTPUT) preprocess.uses(b2, link=Link.OUTPUT) diamond.addJob(preprocess) # Add left Findrange job frl = Job(namespace="diamond", name="findrange", version="4.0") c1 = File("f.c1") frl.addArguments("-a findrange","-T5","-i",b1,"-o",c1) frl.uses(b1, link=Link.INPUT) frl.uses(c1, link=Link.OUTPUT) diamond.addJob(frl) # Add right Findrange job frr = Job(namespace="diamond", name="findrange", version="4.0") c2 = File("f.c2") frr.addArguments("-a findrange","-T5","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT) diamond.addJob(frr) # Add Analyze job analyze = Job(namespace="diamond", name="analyze", version="4.0") d = File("f.d") analyze.addArguments("-a analyze","-T5","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, register=True) diamond.addJob(analyze) # Add control-flow dependencies diamond.addDependency(Dependency(parent=preprocess, child=frl)) diamond.addDependency(Dependency(parent=preprocess, child=frr)) diamond.addDependency(Dependency(parent=frl, child=analyze)) diamond.addDependency(Dependency(parent=frr, child=analyze)) # Write the DAX to stdout diamond.writeXML(sys.stdout) pegasus-wms_4.0.1+dfsg/test/core/009-black-diamond-pegasuslite-pegasustransfer/test.config0000755000175000017500000000002411757531137030640 0ustar ryngeryngemax_wall_time = 30 pegasus-wms_4.0.1+dfsg/test/core/009-black-diamond-pegasuslite-pegasustransfer/pegasusrc0000755000175000017500000000042411757531137030415 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink = false pegasus.gridstart = PegasusLite pegasus.execute.*.filesystem.local = True pegasus.transfer.worker.package = True pegasus-wms_4.0.1+dfsg/test/core/004-montage-grid/0000755000175000017500000000000011757531667020631 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/004-montage-grid/run-bamboo-test0000755000175000017500000000733011757531137023570 0ustar ryngerynge#!/bin/bash set -e ####################################################################### # # Settings # DEGREES=0.5 ####################################################################### TOP_DIR=`pwd` export PATH=/ccg/software/montage/Montage_v3.3_mats/bin:$PATH # unique directory for this run RUN_ID=`/bin/date +'%F_%H%M%S'` RUN_DIR=`pwd`/work/$RUN_ID echo "Work directory: $RUN_DIR" mkdir -p $RUN_DIR/inputs cd $RUN_DIR cp $TOP_DIR/pegasusrc . # create the transformation catalogue (tc) echo echo "Creating the transformation catalog..." for BINARY in `(cd /ccg/software/montage/Montage_v3.3_mats/bin/ && ls)`; do cat >>tc <sites.xml < /ccg/software/globus/default 40000,50000 /ccg/software/montage/Montage_v3.3_mats /usr EOF echo echo "Running mDAG (finding input images, generating DAX, ...)..." mDAG 2mass j M17 $DEGREES $DEGREES 0.0002777778 . "file://$RUN_DIR" "file://$RUN_DIR/inputs" echo echo "Adding input images to the replica catalog..." echo " " `cat cache.list | wc -l` "images found" cat cache.list | grep -v ".fits " >rc perl -p -i -e 's/ipac_cluster/local/' rc cat url.list | sed 's/ http:.*ref=/ http:\/\/obelix.isi.edu\/irsa-cache/' >>rc echo "Planning and submitting the workflow..." pegasus-plan \ --conf pegasusrc \ --sites CCG \ --dir work \ --output local \ --nocleanup \ --dax dag.xml \ --cluster horizontal \ --submit | tee $TOP_DIR/plan.out pegasus-wms_4.0.1+dfsg/test/core/004-montage-grid/pegasusrc0000644000175000017500000000042411757531137022540 0ustar ryngerynge pegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=rc pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=tc pegasus.dir.useTimestamp=true pegasus.condor.logs.symlink = false pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/0000755000175000017500000000000011757531667022111 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/run-bamboo-test0000755000175000017500000001113711757531137025050 0ustar ryngerynge#!/bin/bash set -e set -v TOPDIR=`pwd` CONFIG=`basename $1` DAX=horizontal-cluster.dax # Unique directory for this run RUN_ID=`/bin/date +'%Y%m%d_%H%M%S%N'` # Read a property from ${CONFIG}/test.config file properties () { eval $1=\"`grep "^[\s]*[^#]*$2\s*=" ${CONFIG}/test.config | cut -d"=" -f2 | sed -e 's/^\s*//g' -e 's/\s*$//g'`\" local i=\$$1 eval local temp=$i # If property not set or is empty, then check if default value is provided. If Yes set property to default value. if [[ -z $temp && ! -z $3 ]]; then eval $1=$3 fi } # Read the physical directory where the input file is located. properties input_file input_file if [ -z ${input_file} ]; then input_file='./f.a' else mkdir -p ${input_file}/$USER/inputs input_file=${input_file}/$USER/inputs/f.a fi # generate the input file echo "This is sample input to KEG" > ${input_file} mkdir -p staging-site # build the dax generator export PYTHONPATH=`pegasus-config --python` ./cluster.py /usr $CONFIG > ${DAX} properties local_site_protocol local_site_protocol file:// properties local_site_url local_site_url # create the site catalog cat > sites.xml << EOF condor vanilla Yes ON_EXIT /usr condor vanilla /usr EOF # plan and submit the workflow properties execution_site execution_site local properties staging_site staging_site local properties output_site output_site local properties planner_args planner_args set -x pegasus-plan \ --conf ${CONFIG}/pegasusrc \ --sites ${execution_site} \ --staging-site $staging_site \ --output $output_site \ --dir work \ --nocleanup \ --dax ${DAX} \ --cluster horizontal \ --submit ${planner_args} set +x pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-nonsharedfs/0000755000175000017500000000000011757531667026104 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-nonsharedfs/README0000644000175000017500000000013711757531137026755 0ustar ryngerynge1. Runtime based clustering. 2. Executable staging in nonsharedfs mode. 3. Enabled symlinking. pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-nonsharedfs/test.config0000755000175000017500000000061211757531137030244 0ustar ryngerynge[all] max_wall_time=30 file_url=gsiftp://cartman.isi.edu file_site=local executable_url=file:// executable_site=condorpool #executable_installed= local_site_protocol=gsiftp local_site_url=gsiftp://cartman.isi.edu execution_site=condorpool staging_site=local output_site=local #input_file = /nfs/ccg3/scratch/testing/inputs #planner_args=-vvvvvv #clusters_size = #clusters_maxruntime = pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-nonsharedfs/pegasusrc0000755000175000017500000000044011757531137030014 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.condor.logs.symlink = false pegasus.data.configuration = nonsharedfs pegasus.clusterer.preference = Runtime pegasus.catalog.transformation.mapper = Staged pegasus.transfer.links = true pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs-all-staging-nogridstart/0000755000175000017500000000000011757531667032127 5ustar ryngerynge././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootpegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs-all-staging-nogridstart/READMEpegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs-all-staging-nogridstart/REA0000644000175000017500000000016211757531137032450 0ustar ryngerynge1. Runtime based clustering in Shared FS mode. 2. Executable staging. 3. Worker Package staging. 4. No Gridstart. ././@LongLink0000000000000000000000000000015500000000000011566 Lustar rootrootpegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs-all-staging-nogridstart/test.configpegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs-all-staging-nogridstart/tes0000755000175000017500000000046011757531137032640 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=sharedfs executable_url=file:// executable_site=condorpool #executable_installed=true execution_site=sharedfs staging_site=sharedfs output_site=sharedfs input_file = /nfs/ccg3/scratch #planner_args=-vvvvvv #clusters_size = #clusters_maxruntime = ././@LongLink0000000000000000000000000000015300000000000011564 Lustar rootrootpegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs-all-staging-nogridstart/pegasusrcpegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs-all-staging-nogridstart/peg0000755000175000017500000000071711757531137032625 0ustar ryngeryngepegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=rc pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=tc pegasus.dir.useTimestamp=true pegasus.condor.logs.symlink = false pegasus.data.configuration = sharedfs pegasus.gridstart = None pegasus.catalog.transformation.mapper = Staged pegasus.transfer.worker.package = True pegasus.clusterer.preference = Runtime pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-condorio/0000755000175000017500000000000011757531667025406 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-condorio/README0000644000175000017500000000014611757531137026257 0ustar ryngeryngeTesting the following features. 1. Runtime based clustering. 2. Executable staging in condorio mode. pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-condorio/test.config0000755000175000017500000000043611757531137027552 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=local executable_url=file:// executable_site=condorpool execution_site=condorpool staging_site=local output_site=local #input_file = /nfs/ccg3/scratch/testing/inputs #planner_args=-vvvvvv #clusters_size = #clusters_maxruntime = pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-condorio/pegasusrc0000755000175000017500000000037711757531137027327 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.condor.logs.symlink = false pegasus.clusterer.preference = Runtime pegasus.data.configuration = condorio pegasus.catalog.transformation.mapper = Staged pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/run-test0000755000175000017500000000041211757531137023605 0ustar ryngerynge#!/bin/bash set -e set -v ./run-bamboo-test $1 # wait for workflow to finish ../../common/wait-for-workflow # check success with pegasus-analyzer - does it use exit codes correctly? cd work/*/*/*/*/ pegasus-analyzer # show some stats pegasus-statistics `pwd` pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs/0000755000175000017500000000000011757531667025371 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs/README0000644000175000017500000000010611757531137026236 0ustar ryngerynge1. Runtime based clustering in Shared FS mode. 2. Executable staging. pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs/test.config0000755000175000017500000000046011757531137027532 0ustar ryngerynge[all] max_wall_time=30 file_url=file:// file_site=sharedfs executable_url=file:// executable_site=condorpool #executable_installed=true execution_site=sharedfs staging_site=sharedfs output_site=sharedfs input_file = /nfs/ccg3/scratch #planner_args=-vvvvvv #clusters_size = #clusters_maxruntime = pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/runtime-sharedfs/pegasusrc0000755000175000017500000000055111757531137027304 0ustar ryngeryngepegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=rc pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=tc pegasus.dir.useTimestamp=true pegasus.condor.logs.symlink = false pegasus.clusterer.preference = Runtime pegasus.catalog.transformation.mapper = Staged pegasus-wms_4.0.1+dfsg/test/core/010-runtime-clustering/cluster.py0000755000175000017500000000431311757531137024140 0ustar ryngerynge#!/usr/bin/env python from Pegasus.DAX3 import ADAG, File, Link, Job, Executable, PFN, Profile import sys import os import ConfigParser if len(sys.argv) != 3: print "Usage: %s PEGASUS_HOME" % (sys.argv[0]) sys.exit(1) config = ConfigParser.ConfigParser({'input_file':'', 'workflow_name':'horizontal-clustering-test', 'executable_installed':"False", 'clusters_size':"3", 'clusters_maxruntime':"7"}) config.read(sys.argv[2] + '/test.config') # Create an abstract dag cluster = ADAG (config.get('all', 'workflow_name')) input_file = config.get('all', 'input_file') if (input_file == ''): input_file = os.getcwd () else: input_file += '/' + os.getenv ('USER') + '/inputs' # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN(config.get('all', 'file_url') + input_file + "/f.a", config.get('all', 'file_site'))) cluster.addFile(a) for i in range (1, 3): sleep = Executable (namespace = "cluster", name = "level" + str (i), version = "1.0", os = "linux", arch = "x86", installed=config.getboolean('all', 'executable_installed')) sleep.addPFN (PFN (config.get('all', 'executable_url') + sys.argv[1] + "/bin/pegasus-keg", config.get('all', 'executable_site'))) sleep.addProfile (Profile (namespace = "pegasus", key = "clusters.size", value = config.get('all', 'clusters_size'))) sleep.addProfile (Profile (namespace = "pegasus", key = "clusters.maxruntime", value = config.get('all', 'clusters_maxruntime'))) cluster.addExecutable(sleep) for i in range (4): job = Job (namespace = "cluster", name = "level1", version = "1.0") job.addArguments('-a level1 -T ' + str (i + 1)) job.addArguments('-i', a) job.addProfile (Profile (namespace = "pegasus", key = "job.runtime", value = str (i + 1))) job.uses(a, link=Link.INPUT) cluster.addJob (job) for j in range (4): child = Job (namespace = "cluster", name = "level2", version = "1.0") child.addArguments('-a level2 -T ' + str ((j + 1) * 2)) child.addProfile (Profile (namespace = "pegasus", key = "job.runtime", value = str ((j + 1) * 2))) cluster.addJob (child) cluster.depends (parent = job, child = child) # Write the DAX to standard out cluster.writeXML (sys.stdout) pegasus-wms_4.0.1+dfsg/test/core/006-black-diamond-shell-code-generator/0000755000175000017500000000000011757531667024744 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/006-black-diamond-shell-code-generator/run-bamboo-test0000755000175000017500000000340211757531137027677 0ustar ryngerynge#!/bin/bash set -e set -v TOPDIR=`pwd` # unique directory for this run RUN_ID=`/bin/date +'%F_%H%M%S'` RUN_DIR=`pwd`/work/$RUN_ID # generate the input file echo "This is sample input to KEG" > f.a # output directory mkdir -p outputs # build the dax generator export CLASSPATH=.:`pegasus-config --classpath` javac BlackDiamondDAX.java # generate the dax java BlackDiamondDAX /usr blackdiamond.dax # create the site catalog cat >sites.xml < /usr /ccg/software/globus/default EOF # plan and submit the workflow pegasus-plan \ -Dpegasus.code.generator=Shell \ --conf pegasusrc \ --sites local \ --dir work \ --relative-submit-dir ${RUN_ID} \ --output local \ --nocleanup \ --dax blackdiamond.dax \ --submit | tee plan.out pegasus-wms_4.0.1+dfsg/test/core/006-black-diamond-shell-code-generator/run-test0000755000175000017500000000011011757531137026433 0ustar ryngerynge#!/bin/bash . ./run-bamboo-test cd ${RUN_DIR} tail -n 5 jobstate.log pegasus-wms_4.0.1+dfsg/test/core/006-black-diamond-shell-code-generator/test.config0000644000175000017500000000002411757531137027076 0ustar ryngeryngemax_wall_time = 30 pegasus-wms_4.0.1+dfsg/test/core/006-black-diamond-shell-code-generator/BlackDiamondDAX.java0000644000175000017500000001055011757531137030445 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import edu.isi.pegasus.planner.dax.*; public class BlackDiamondDAX { /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length != 2) { System.out.println("Usage: java ADAG "); System.exit(1); } try { Diamond(args[0]).writeToFile(args[1]); } catch (Exception e) { e.printStackTrace(); } } private static ADAG Diamond(String pegasus_location) throws Exception { java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); ADAG dax = new ADAG("blackdiamond"); File fa = new File("f.a"); fa.addPhysicalFile("file://" + cwd + "/f.a", "local"); dax.addFile(fa); File fa2 = new File("f.a2"); fa2.addPhysicalFile("gsiftp://cartman.isi.edu" + cwd + "/f.a", "condorpool"); dax.addFile(fa2); File fb1 = new File("f.b1"); File fb2 = new File("f.b2"); File fc1 = new File("f.c1"); File fc2 = new File("f.c2"); File fd = new File("f.d"); fd.setRegister(true); Executable preprocess = new Executable("pegasus", "preprocess", "4.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); preprocess.setInstalled(true); preprocess.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "local"); Executable findrange = new Executable("pegasus", "findrange", "4.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.setInstalled(true); findrange.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "local"); Executable analyze = new Executable("pegasus", "analyze", "4.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.setInstalled(true); analyze.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", "local"); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable(analyze); // Add a preprocess job Job j1 = new Job("j1", "pegasus", "preprocess", "4.0"); j1.addArgument("-a preprocess -T 2 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1); j1.addArgument(" ").addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fa2, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses(fb2, File.LINK.OUTPUT); dax.addJob(j1); // Add left Findrange job Job j2 = new Job("j2", "pegasus", "findrange", "4.0"); j2.addArgument("-a findrange -T 2 -i ").addArgument(fb1); j2.addArgument("-o ").addArgument(fc1); j2.uses(fb1, File.LINK.INPUT); j2.uses(fc1, File.LINK.OUTPUT); dax.addJob(j2); // Add right Findrange job Job j3 = new Job("j3", "pegasus", "findrange", "4.0"); j3.addArgument("-a findrange -T 2 -i ").addArgument(fb2); j3.addArgument("-o ").addArgument(fc2); j3.uses(fb2, File.LINK.INPUT); j3.uses(fc2, File.LINK.OUTPUT); dax.addJob(j3); // Add analyze job Job j4 = new Job("j4", "pegasus", "analyze", "4.0"); j4.addArgument("-a analyze -T 2 -i ").addArgument(fc1); j4.addArgument(" ").addArgument(fc2); j4.addArgument("-o ").addArgument(fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); dax.addJob(j4); dax.addDependency("j1", "j2"); dax.addDependency("j1", "j3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } pegasus-wms_4.0.1+dfsg/test/core/006-black-diamond-shell-code-generator/pegasusrc0000644000175000017500000000017411757531137026655 0ustar ryngeryngepegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus-wms_4.0.1+dfsg/test/core/004-montage-universe-local/0000755000175000017500000000000011757531667022634 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/004-montage-universe-local/run-bamboo-test0000755000175000017500000000563011757531137025574 0ustar ryngerynge#!/bin/bash set -e ####################################################################### # # Settings # DEGREES=0.5 ####################################################################### TOP_DIR=`pwd` export PATH=/ccg/software/montage/Montage_v3.3_mats/bin:$PATH # unique directory for this run RUN_ID=`/bin/date +'%F_%H%M%S'` RUN_DIR=`pwd`/work/$RUN_ID echo "Work directory: $RUN_DIR" mkdir -p $RUN_DIR/inputs cd $RUN_DIR cp $TOP_DIR/pegasusrc . # create the transformation catalogue (tc) echo echo "Creating the transformation catalog..." for BINARY in `(cd /ccg/software/montage/Montage_v3.3_mats/bin/ && ls)`; do cat >>tc <sites.xml < condor local /ccg/software/montage/Montage_v3.3_mats EOF echo echo "Running mDAG (finding input images, generating DAX, ...)..." mDAG 2mass j M17 $DEGREES $DEGREES 0.0002777778 . "file://$RUN_DIR" "file://$RUN_DIR/inputs" echo echo "Adding input images to the replica catalog..." echo " " `cat cache.list | wc -l` "images found" cat cache.list | grep -v ".fits " >rc perl -p -i -e 's/ipac_cluster/local/' rc cat url.list | sed 's/ http:.*ref=/ http:\/\/obelix.isi.edu\/irsa-cache/' >>rc echo "Planning and submitting the workflow..." pegasus-plan \ --conf pegasusrc \ --sites local \ --dir work \ --output local \ --nocleanup \ --dax dag.xml \ --cluster horizontal \ --submit | tee $TOP_DIR/plan.out pegasus-wms_4.0.1+dfsg/test/core/004-montage-universe-local/pegasusrc0000644000175000017500000000047311757531137024547 0ustar ryngerynge pegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=rc pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=tc pegasus.dir.useTimestamp=true pegasus.data.configuration = sharedfs pegasus.condor.logs.symlink = false pegasus-wms_4.0.1+dfsg/test/core/002-s3-plan/0000755000175000017500000000000011757531667017527 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/core/002-s3-plan/run-test0000755000175000017500000000055111757531137021227 0ustar ryngerynge#!/bin/bash set -e pegasus-plan \ --conf properties \ -vvvv \ --dax blackdiamond_dax.xml \ --sites cloud cd $USER/pegasus/blackdiamond/run0001/ echo echo for FILE in `ls sls*.in stage*.in`; do echo "Cecking $FILE..." if ! (cat $FILE | grep s3://) >/dev/null 2>&1; then echo " No s3 URLS found!" exit 1 fi done pegasus-wms_4.0.1+dfsg/test/core/002-s3-plan/sites.xml30000644000175000017500000000572211757531137021461 0ustar ryngerynge /opt/globus/default /opt/globus/default/lib /opt/pegasus/default condor 2 1 YES true true vanilla ON_EXIT /home/rynge/.s3cfg /opt/globus/default /opt/globus/default/lib /home/rynge/Pegasus/install/trunk pegasus-wms_4.0.1+dfsg/test/core/002-s3-plan/blackdiamond_dax.xml0000644000175000017500000000642611757531137023515 0ustar ryngerynge -a top -T 6 -i -o -a left -T 6 -i -o -p 0.5 -a right -T 6 -i -o -p 1.0 -a bottom -T 6 -i -o 10 3 4 pegasus-wms_4.0.1+dfsg/test/core/002-s3-plan/test.config0000644000175000017500000000002411757531137021661 0ustar ryngeryngemax_wall_time = 30 pegasus-wms_4.0.1+dfsg/test/core/002-s3-plan/tc.data0000644000175000017500000000206211757531137020760 0ustar ryngerynge# # local site at sukhna.isi.edu # local vahi::analyze:1.0 gsiftp://sukhna.isi.edu/nfs/asd2/vahi/montage/tutorial/pegasus/default/bin/keg STATIC_BINARY INTEL32::LINUX NULL local vahi::preprocess:1.0 gsiftp://sukhna.isi.edu/nfs/asd2/vahi/montage/tutorial/pegasus/default/bin/keg STATIC_BINARY INTEL32::LINUX NULL local vahi::findrange:1.0 gsiftp://sukhna.isi.edu/nfs/asd2/vahi/montage/tutorial/pegasus/default/bin/keg STATIC_BINARY INTEL32::LINUX NULL #local condor::dagman /opt/condor/bin/condor_dagman INSTALLED INTEL32::LINUX NULL #local pegasus::worker http://pegasus.isi.edu/mapper/download/pegasus-worker-2.1.0-x86_fc_3.tar.gz STATIC_BINARY INTEL32::LINUX NULL #cloud tar /bin/tar INSTALLED INTEL32::LINUX NULL cloud vahi::analyze:1.0 /opt/pegasus/default/bin/keg INSTALLED INTEL32::LINUX NULL cloud vahi::preprocess:1.0 /opt/pegasus/default/bin/keg INSTALLED INTEL32::LINUX NULL cloud vahi::findrange:1.0 /opt/pegasus/default/bin/keg INSTALLED INTEL32::LINUX NULL pegasus-wms_4.0.1+dfsg/test/core/002-s3-plan/rc.data0000644000175000017500000000016711757531137020762 0ustar ryngerynge# file-based replica catalog: 2011-11-11T10:50:26.110-08:00 f.a file:///home/rynge/Pegasus/s3/example/f.a pool="local" pegasus-wms_4.0.1+dfsg/test/core/002-s3-plan/properties0000644000175000017500000000047211757531137021641 0ustar ryngerynge pegasus.catalog.site = XML3 pegasus.catalog.site.file = sites.xml3 pegasus.catalog.transformation = File pegasus.catalog.transformation.file = tc.data pegasus.catalog.transformation.mapper = Installed pegasus.catalog.replica = SimpleFile pegasus.catalog.replica.file = rc.data pegasus.data.configuration = S3 pegasus-wms_4.0.1+dfsg/test/scripts/0000755000175000017500000000000011757531667016512 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/scripts/pegasus-testing-glue0000755000175000017500000001367411757531137022517 0ustar ryngerynge#!/usr/bin/env python ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## import os import re import sys import errno import logging import optparse import tempfile import subprocess import signal import string import stat import time # --- global variables ---------------------------------------------------------------- prog_base = os.path.split(sys.argv[0])[1] # Name of this program logger = logging.getLogger("my_logger") # --- functions ----------------------------------------------------------------------- def setup_logger(level_str): # log to the console console = logging.StreamHandler() # default log level - make logger/console match logger.setLevel(logging.INFO) console.setLevel(logging.INFO) # level - from the command line level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) console.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) console.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) console.setLevel(logging.ERROR) # formatter formatter = logging.Formatter("%(asctime)s %(levelname)7s: %(message)s") console.setFormatter(formatter) logger.addHandler(console) logger.debug("Logger has been configured") def prog_sigint_handler(signum, frame): logger.warn("Exiting due to signal %d" % (signum)) myexit(1) def alarm_handler(signum, frame): raise RuntimeError def myexec(cmd_line, timeout_secs, should_log): """ executes shell commands with the ability to time out if the command hangs """ if should_log or logger.isEnabledFor(logging.DEBUG): logger.info(cmd_line) sys.stdout.flush() # set up signal handler for timeout signal.signal(signal.SIGALRM, alarm_handler) signal.alarm(timeout_secs) p = subprocess.Popen(cmd_line, shell=True) try: stdoutdata, stderrdata = p.communicate() except RuntimeError: if sys.version_info >= (2, 6): p.terminate() raise RuntimeError("Command '%s' timed out after %s seconds" % (cmd_line, timeout_secs)) rc = p.returncode if rc != 0: raise RuntimeError("Command '%s' failed with error code %s" % (cmd_line, rc)) def myexit(rc): """ system exit without a stack trace - silly python """ try: sys.exit(rc) except SystemExit: sys.exit(rc) def clean_condor_q(): """ cleans out the condor queue so we have a clean environment to start with """ cmd = "(condor_rm $USER; sleep 10s; condor_rm -forcex $USER; sleep 10s) >/dev/null 2>&1" try: myexec(cmd, 60, True) except Exception, e: logger.error(e) def summarize_stdout_stderr(test_dir): """ summaries a test output (test.stdout/test.stderr) """ cmd = "cd %s && tail -n 100 test.stdout test.stderr" % test_dir try: myexec(cmd, 60, False) except Exception, e: logger.error(e) # --- main ---------------------------------------------------------------------------- # keep track where we run from start_dir = os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))) default_tests_dir = os.path.normpath(os.path.join(start_dir, "../core")) # Configure command line option parser prog_usage = "usage: pegasus-testing-glue [options]" parser = optparse.OptionParser(usage=prog_usage) parser.add_option("-l", "--loglevel", action = "store", dest = "log_level", help = "Log level. Valid levels are: debug,info,warning,error, Default is info.") parser.add_option("-d", "--tests-dir", action = "store", dest = "tests_dir", help = "Directory containing the tests") # Parse command line options (options, args) = parser.parse_args() if options.log_level == None: options.log_level = "info" if options.tests_dir == None: options.tests_dir = default_tests_dir setup_logger(options.log_level) # Die nicely when asked to (Ctrl+C, system shutdown) signal.signal(signal.SIGINT, prog_sigint_handler) # keep exit code and a list of failed tests rc = 0 failed_tests = [] # loop over the tests dir_list = os.listdir(options.tests_dir) dir_list.sort() for entry in dir_list: # ignore .files if entry.find(".") == 0: continue; # make sure the test has a run-test script if not os.path.exists("%s/%s/run-test" % (options.tests_dir, entry)): logger.error("%s/%s does not have a run-test script" % (options.tests_dir, entry)) continue # some space to make the output easier to read logger.info("\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n") logger.info("====================================================================") logger.info("Starting test: %s" %(entry)) logger.info("Preparing for test - cleaning queue") clean_condor_q() logger.info("Running test %s" % entry) cmd = "cd %s/%s && ./run-test >test.stdout 2>test.stderr" % (options.tests_dir, entry) try: myexec(cmd, 3600, True) logger.info("Test successful") except Exception, e: logger.error(e) failed_tests.append(entry) rc += 1 logger.info("Last 100 lines of stdout/stderr:") summarize_stdout_stderr("%s/%s" % (options.tests_dir, entry)) print if len(failed_tests) == 0: print "All tests successful!" else: print "The following tests failed:" for test in failed_tests: print " %s" %(test) myexit(rc) pegasus-wms_4.0.1+dfsg/test/scripts/launch-bamboo-test-no-status0000755000175000017500000000076711757531137024061 0ustar ryngerynge#!/bin/bash #set -v #Get the Test dir from the build Plan Name or from command line TOP_DIR=`pwd` TEST=`echo $1 | awk -F " - " '{print $3}'` shift cd test/core/$TEST echo "TEST is $TEST" pwd #Launch the test . ./run-bamboo-test "$@" STATUS=$? if [ $STATUS != 0 ]; then echo "Workflow submission failed" exit $STATUS fi #Get RUNDIR from the planning output echo "RUNDIR is $RUN_DIR" # Change in to the rundir cd $RUN_DIR # REPORT LAST FEW LINES OF JOBSTATE.log tail -10 jobstate.log exit pegasus-wms_4.0.1+dfsg/test/scripts/untar-dist0000755000175000017500000000133511757531137020524 0ustar ryngerynge#!/bin/sh VERSION=$1 PEGASUS_BINARY=pegasus-binary-${VERSION}.tar.gz PEGASUS_TEST=pegasus-test-${VERSION}.tar.gz if [ ! -e $PEGASUS_BINARY ]; then echo "Pegasus distribution $PEGASUS_BINARY missing or empty." exit 1 fi if [ ! -e $PEGASUS_TEST ]; then echo "Pegasus test $PEGASUS_TEST missing or empty." exit 2 fi #UNTAR PEGASUS BINARY TARBALL echo "Untaring the Pegasus distribution $PEGASUS_BINARY" tar zxf $PEGASUS_BINARY status=$? if [ $status -ne 0 ]; then echo "PEGASUS BINARY UNTAR FAILED" exit $status fi # UNTAR PEGASUS TEST TARBALL echo "Untaring the Pegasus test $PEGASUS_TEST" tar zxf $PEGASUS_TEST status=$? if [ $status -ne 0 ]; then echo "PEGASUS TEST UNTAR FAILED" exit $status fi exit 0pegasus-wms_4.0.1+dfsg/test/scripts/launch-bamboo-test0000755000175000017500000000135111757531137022114 0ustar ryngerynge#!/bin/bash #set -v #Get the Test dir from the build Plan Name or from command line TOP_DIR=`pwd` TEST=`echo $1 | awk -F " - " '{print $3}'` shift cd test/core/$TEST echo "TEST is $TEST" pwd PLANFILE=`mktemp plan.XXXXXX` #Launch the test ./run-bamboo-test "$@" | tee $PLANFILE STATUS=$? if [ $STATUS != 0 ]; then echo "Workflow submission failed" exit $STATUS fi #Get RUNDIR from the planning output RUN_DIR=`grep pegasus-remove $PLANFILE | awk '{print $5}'` echo "RUNDIR is $RUN_DIR" # Change in to the rundir cd $RUN_DIR #Check status $TOP_DIR/test/common/check-status STATUS=$? if [ $STATUS = 0 ]; then # Run Pegasus Statistics pegasus-statistics $RUN_DIR else echo "Pegasus status failed" exit $STATUS fi exit pegasus-wms_4.0.1+dfsg/test/common/0000755000175000017500000000000011757531667016313 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/test/common/wait-for-workflow0000755000175000017500000000160011757531137021626 0ustar ryngerynge#!/bin/bash # # This should probably be implementd using pegasus-status or pegasus-analyzer, # if we can get good exit codes # function update_condor_states { TOTAL_JOBS=`condor_q -constraint "Owner == \"$USER\"" -format '%s\n' ClusterID 2>/dev/null | wc -l` if [ "x$TOTAL_JOBS" = "x" ]; then TOTAL_JOBS="0" fi TOTAL_JOBS=$(($TOTAL_JOBS + 0)) HELD_JOBS=`condor_q -constraint "Owner == \"$USER\" && JobStatus == 5" -format '%s\n' ClusterID 2>/dev/null | wc -l` if [ "x$HELD_JOBS" = "x" ]; then HELD_JOBS="0" fi HELD_JOBS=$(($HELD_JOBS + 0)) TS=`date +'%F %R'` echo "$TS - Condor Jobs - Total: $TOTAL_JOBS Held: $HELD_JOBS" } update_condor_states while [ $TOTAL_JOBS -gt 0 -a $HELD_JOBS = 0 ]; do sleep 1m update_condor_states done if [ $HELD_JOBS -gt 0 ]; then echo "Held jobs detected!" exit 1 fi exit 0 pegasus-wms_4.0.1+dfsg/test/common/check-status0000755000175000017500000000201411757531137020624 0ustar ryngerynge#!/bin/bash echo "CURRENT DIR is " `pwd` echo "Checking status " TIMEOUT=180 COUNT=0 function update_status { STATUS=`pegasus-status --noqueue | tail -1 | sed 's/[:\(\)]/ /g'| awk '{print $5}'` } sleep 1m update_status echo "#" STATUS is "$STATUS" while [ "$STATUS" = "Running" -o "$STATUS" = "" -o "$STATUS" = "Unknown" ] ; do if [ $COUNT -ge $TIMEOUT ]; then echo "Reached TIMEOUT of $TIMEOUT. Calling pegasus-remove" pegasus-remove `pwd` STATUS=TIMEOUT sleep 1m break; fi ((COUNT++)) sleep 1m update_status echo "#" STATUS is "$STATUS" done if [ "$STATUS" = "Success" ]; then echo "*** Workflow finished succesfully ***" exit 0 else echo "*** Workflow failed ***" echo "Running Pegasus analyzer" pegasus-analyzer exit 1 fi pegasus-wms_4.0.1+dfsg/lib/0000755000175000017500000000000011757531667014612 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/0000755000175000017500000000000011757531667016261 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/perl/0000755000175000017500000000000011757531667017223 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/0000755000175000017500000000000011757531667020632 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/0000755000175000017500000000000011757531667021246 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/Factory.pm0000644000175000017500000001346011757531137023207 0ustar ryngerynge# # License: (atend) # $Id: Factory.pm 4527 2011-09-15 19:02:58Z voeckler $ # package Pegasus::DAX::Factory; use 5.006; use strict; use Carp; use Exporter; our @ISA = qw(Exporter); # # import all instantiatable classes # use Pegasus::DAX::Profile qw(:all); use Pegasus::DAX::PFN; use Pegasus::DAX::MetaData; use Pegasus::DAX::PlainFilename; use Pegasus::DAX::Filename qw(:all); use Pegasus::DAX::File; use Pegasus::DAX::Executable qw(:all); use Pegasus::DAX::Invoke qw(:all); use Pegasus::DAX::Transformation; use Pegasus::DAX::DAG; use Pegasus::DAX::DAX; use Pegasus::DAX::Job; use Pegasus::DAX::ADAG qw(:all); # # define wrappers around their c'tors # sub newProfile { Pegasus::DAX::Profile->new(@_) } sub newPFN { Pegasus::DAX::PFN->new(@_) } sub newMetaData { Pegasus::DAX::MetaData->new(@_) } sub newPlainFilename { Pegasus::DAX::PlainFilename->new(@_) } sub newFilename { Pegasus::DAX::Filename->new(@_) } sub newFile { Pegasus::DAX::File->new(@_) } sub newExecutable { Pegasus::DAX::Executable->new(@_) } sub newTransformation { Pegasus::DAX::Transformation->new(@_) } sub newDAG { Pegasus::DAX::DAG->new(@_) } sub newDAX { Pegasus::DAX::DAX->new(@_) } sub newJob { Pegasus::DAX::Job->new(@_) } sub newADAG { Pegasus::DAX::ADAG->new(@_) } sub newInvoke { Pegasus::DAX::Invoke->new(@_) } # # export bonanza # our $VERSION = '3.3'; our %EXPORT_TAGS = ( func => [qw(newADAG newDAG newDAX newExecutable newFile newFilename newJob newMetaData newPFN newPlainFilename newProfile newTransformation newInvoke)], link => [ @{$Pegasus::DAX::Filename::EXPORT_TAGS{link}} ], transfer => [ @{$Pegasus::DAX::Filename::EXPORT_TAGS{transfer}} ], arch => [ @{$Pegasus::DAX::Executable::EXPORT_TAGS{arch}} ], os => [ @{$Pegasus::DAX::Executable::EXPORT_TAGS{os}} ], ns => [ @{$Pegasus::DAX::Profile::EXPORT_TAGS{ns}} ], schema => [ @{$Pegasus::DAX::ADAG::EXPORT_TAGS{schema}} ], invoke => [ @{$Pegasus::DAX::Invoke::EXPORT_TAGS{all}} ] ); $EXPORT_TAGS{all} = [ map { @{$_} } values %EXPORT_TAGS ]; our @EXPORT_OK = ( @{$EXPORT_TAGS{all}} ); our @EXPORT = ( @{$EXPORT_TAGS{func}} ); sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); croak "The c'tor on $class is not instantiatable"; } 1; __END__ =head1 NAME Pegasus::DAX::Factory - convenience module =head1 SYNOPSIS use Pegasus::DAX::Factory qw(:all); my $a = newProfile( PROFILE_ENV, 'FOO', 'bar' ); my $b = newFilename( name => 'fubar', link => LINK_OUT ); =head1 DESCRIPTION This class exports all constructors as convenience functions into the caller's namespace. In addition, when using the C<:all> tag, all constants from any class are exported. =head1 IMPORT TAGS =over 4 =item C<:func> This tag imports the convenience wrapper functions around the class constructors. The wrappers are exported by default. =item C<:link> This tag imports the linkage constants C from L. =item C<:transfer> This tag imports the transfer constants C from L. =item C<:arch> This tag imports the architecture constants C from L. =item C<:os> This tag imports the operating system constants C from L. =item C<:ns> This tag imports the profile namespace constants C from L. =item C<:schema> This tag imports the XML schema constants C from L. =item C<:invoke> This tag imports the notification event constants C from L. =item C<:all> All of the above. =back =head1 FUNCTIONS =over 4 =item newProfile Factory function for C-Enew>. =item newPFN Factory function for C-Enew>. =item newMetaData Factory function for C-Enew>. =item newPlainFilename Factory function for C-Enew>. =item newFilename Factory function for C-Enew>. =item newFile Factory function for C-Enew>. =item newExecutable Factory function for C-Enew>. =item newTransformation Factory function for C-Enew>. =item newDAG Factory function for C-Enew>. =item newDAX Factory function for C-Enew>. =item newJob Factory function for C-Enew>. =item newADAG Factory function for C-Enew>. =item newInvoke Factory function for C-Enew>. =back =head1 SEE ALSO =over 4 =item L Base class. =item L =item L =item L =item L =item L =item L =item L =item L =item L =item L =item L =item L =item L =item L =item L Classes for which a convenience c'tor is provided. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/testme.pl0000644000175000017500000000643311757531137023102 0ustar ryngerynge#!/usr/bin/env perl # use 5.006; use strict; use IO::Handle; use Cwd; use File::Spec; use File::Basename; # # all this to determine PEGASUS_HOME, which we don't set any more. # and from there derive the PERL5LIB that we need for the DAX API. # BEGIN { my $found = dirname(dirname(getcwd())); die "FATAL: Sorry, but you need to include Pegasus into your PATH." unless ( defined $found && -d $found ); unshift( @INC, $found ); $ENV{'PEGASUS_HOME'} = dirname(dirname($found)) unless exists $ENV{'PEGASUS_HOME'}; warn "# found $found\n"; } use Pegasus::DAX::Factory qw(:all); use constant NS => 'diamond'; my $adag = newADAG( name => NS ); $adag->invoke( INVOKE_AT_END, '/bin/date' ); my $job1 = newJob( namespace => NS, name => 'preprocess', version => '2.0' ); my $job2 = newJob( namespace => NS, name => 'findrange', version => '2.0' ); my $job3 = newJob( namespace => NS, name => 'findrange', version => '2.0' ); my $job4 = newJob( namespace => NS, name => 'analyze', version => '2.0' ); $job4->invoke( INVOKE_AT_END, '/bin/true' ); # create "f.a" locally my $fn = "f.a"; open( F, ">$fn" ) || die "FATAL: Unable to open $fn: $!\n"; my @now = gmtime(); printf F "%04u-%02u-%02u %02u:%02u:%02uZ\n", $now[5]+1900, $now[4]+1, @now[3,2,1,0]; close F; my $file = newFile( name => 'f.a' ); $file->addPFN( newPFN( url => 'file://' . Cwd::abs_path($fn), site => 'local' ) ); $adag->addFile($file); if ( exists $ENV{'PEGASUS_HOME'} ) { use File::Spec; use POSIX (); my $keg = File::Spec->catfile( $ENV{'PEGASUS_HOME'}, 'bin', 'keg' ); my @os = POSIX::uname(); $os[2] =~ s/^(\d+(\.\d+(\.\d+)?)?).*/$1/; if ( -x $keg ) { my $app1 = newExecutable( namespace => NS, name => 'preprocess', version => '2.0', arch => $os[4], os => lc($^O), osversion => $os[2] ); $app1->addPFN( newPFN( url => "file://$keg", site => 'local' ) ); $adag->addExecutable($app1); warn "# created executable for keg\n"; } } my %hash = ( link => LINK_OUT, register => 'false', transfer => 'true' ); my $fna = newFilename( name => $file->name, link => LINK_IN ); my $fnb1 = newFilename( name => 'f.b1', %hash ); my $fnb2 = newFilename( name => 'f.b2', %hash ); $job1->addArgument( '-a', $job1->name, '-T60', '-i', $fna, '-o', $fnb1, $fnb2 ); $adag->addJob($job1); my $fnc1 = newFilename( name => 'f.c1', %hash ); $fnb1->link( LINK_IN ); $job2->addArgument( '-a', $job2->name, '-T60', '-i', $fnb1, '-o', $fnc1 ); $adag->addJob($job2); my $fnc2 = newFilename( name => 'f.c2', %hash ); $fnb2->link( LINK_IN ); $job3->addArgument( '-a', $job3->name, '-T60', '-i', $fnb2, '-o', $fnc2 ); $adag->addJob($job3); # yeah, you can create multiple children for the same parent # string labels are distinguished from jobs, no problem $adag->addDependency( $job1, $job2, 'edge1', $job3, 'edge2' ); my $fnd = newFilename( name => 'f.d', %hash ); $fnc1->link( LINK_IN ); $fnc2->link( LINK_IN ); $job4->separator(''); # just to show the difference wrt default $job4->addArgument( '-a ', $job4->name, ' -T60 -i ', $fnc1, ' ', $fnc2, ' -o ', $fnd ); $adag->addJob($job4); # this is a convenience function -- easier than overloading addDependency? $adag->addInverse( $job4, $job2, 'edge3', $job3, 'edge4' ); my $xmlns = shift; $adag->toXML( \*STDOUT, '', $xmlns ); pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/Job.pm0000644000175000017500000001115211757531137022306 0ustar ryngerynge# # License: (atend) # $Id: Job.pm 3862 2011-05-27 03:19:42Z voeckler $ # package Pegasus::DAX::Job; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::AbstractJob; use Exporter; our @ISA = qw(Pegasus::DAX::AbstractJob Exporter); our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = (); our %EXPORT_TAGS = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ > 1 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor invocation"; } bless $self, $class; } # forward declaration to auto loaders sub name; sub namespace; sub version; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (opt. IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:job" : 'job'; $f->print( "$indent<$tag" , attribute('namespace',$self->namespace,$xmlns) , attribute('name',$self->name,$xmlns) , attribute('version',$self->version,$xmlns) , attribute('id',$self->id,$xmlns) , attribute('node-label',$self->nodelabel,$xmlns) , ">\n" ); $self->innerXML($f," $indent",$xmlns); $f->print( "$indent\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::Job - Job node to describe a job in the current workflow. =head1 SYNOPSIS use Pegasus::DAX::Job; my $j = Pegasus::DAX::Job->new( namespace => undef, name => 'fubar', version => '3.0' ); $j->addArgument( '-flag' ); =head1 DESCRIPTION This class stores a single job within the current workflow. Most of the heavy lifting is done in the base class L. =head1 METHODS =over 4 =item new() =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. Other means of construction is to use named lists. =item name Getter and setter for the job's name required string. Regardless of the child class, any job always some form of name. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 INHERITED METHODS Please refer to L for inherited methods. =over 4 =item addArgument( $string ) =item addArgument( $plainfilename_instance ) =item addArgument( $filename_instance ) =item addArgument( $file_instance ) =item addArgument( $exectuable_instance ) =item addProfile( $namespace, $key, $value ) =item addProfile( $profile_instance ) =item stdin =item stdout =item stderr =item id =item nodelabel =item addUses( .. ) =item uses( $filename_instance ) =item uses( $file_instance ) =item uses( $executable_instance ) =item addInvoke( $when, $cmd ) =item notify( $when, $cmd ) =item invoke( $when $cmd ) =item innerXML( $handle, $indent, $xmlns ) =back =head1 SEE ALSO =over 4 =item L Base class. =item L =item L =item L Sibling classes. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/File.pm0000644000175000017500000000750411757531137022461 0ustar ryngerynge# # License: (atend) # $Id: File.pm 3598 2011-04-27 23:42:04Z voeckler $ # package Pegasus::DAX::File; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::CatalogType; use Exporter; our @ISA = qw(Pegasus::DAX::CatalogType Exporter); our $VERSION = '3.3'; our %EXPORT_TAGS = (); our @EXPORT = (); our @EXPORT_OK = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ > 1 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor invocation"; } bless $self, $class; } # forward declarations #sub name; # inherited sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (opt. IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:file" : 'file'; $f->print( "$indent<$tag" , attribute('name',$self->name,$xmlns) , ">\n" ); $self->innerXML($f," $indent",$xmlns); $f->print( "$indent\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::File - stores an included replica catalog entry. =head1 SYNOPSIS use Pegasus::DAX::File; my $a = Pegasus::DAX::File(); $a->name( 'lfn' ); $a->addPFN( ... ); =head1 DESCRIPTION This class remembers an included Pegasus replica catalog entry. =head1 METHODS =over 4 =item new() =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. Other means of construction is to use named lists. However, you will have to be aware of the internals to be able to use these lists successfully. =item name Setter and getter for the logical filename. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 INHERITED METHODS Please refer to L for inherited methods. =over 4 =item addMeta( $key, $type, $value ) =item addMeta( $metadata_instance ) =item addPFN( $url ) =item addPFN( $url, $site ) =item addPFN( $pfn_instance ) =item addProfile( $namespace, $key, $value ) =item addProfile( $profile_instance ) =back =head1 SEE ALSO =over 4 =item L Base class. =item L Class using L. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/MetaData.pm0000644000175000017500000001023511757531137023255 0ustar ryngerynge# # License: (atend) # $Id: MetaData.pm 3598 2011-04-27 23:42:04Z voeckler $ # package Pegasus::DAX::MetaData; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Exporter; our @ISA = qw(Pegasus::DAX::Base Exporter); our $VERSION = '3.3'; our @EXPORT = (); our %EXPORT_TAGS = (); our @EXPORT_OK = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ == 3 ) { # called as namespace, key, value @{$self}{'key','type','value'} = @_; } elsif ( @_ > 2 && (@_ & 1) == 0 ) { # even: called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor for ", __PACKAGE__; } bless $self, $class; } # forward declarations so can we check using 'can' sub key; sub type; sub value; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:metadata" : 'metadata'; $f->print( "$indent<$tag", , attribute('key',$self->key,$xmlns) , attribute('type',$self->type,$xmlns) , ">" , quote($self->value) , "\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::Metadata - stores a Pegasus piece of meta data. =head1 SYNOPSIS use Pegasus::DAX::Metadata; my $a = Pegasus::DAX::Profile->new( 'key', 'type', 'fubar' ); my $b = Pegasus::DAX::Profile->new( key => 'foo' , type => 'integer' , value => 'bar' ); =head1 DESCRIPTION This class remembers a Pegasus meta data. The internal transformation- and replica catalog may use meta data associated with entries. =head1 METHODS =over 4 =item new() =item new( $key, $type, $value ) =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. When invoked with exactly 3 arguments, the first argument is the meta data key, the second argument the type identifier, and the third argument the value to set. Other means of construction is to use named lists. =item key Setter and getter for a key string. The key value may be of restricted range, dependinng on the namespace, but this is not checked at this point. =item type Setter and getter for a type string. Types are not standardized in any way. =item value Setter and getter for the value to be transported. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 SEE ALSO =over 4 =item L Base class. =item L Abstract class using meta data. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/Profile.pm0000644000175000017500000001264511757531137023204 0ustar ryngerynge# # License: (atend) # $Id: Profile.pm 3598 2011-04-27 23:42:04Z voeckler $ # package Pegasus::DAX::Profile; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Exporter; our @ISA = qw(Pegasus::DAX::Base Exporter); use constant PROFILE_PEGASUS => 'pegasus'; use constant PROFILE_CONDOR => 'condor'; use constant PROFILE_DAGMAN => 'dagman'; use constant PROFILE_ENV => 'env'; use constant PROFILE_HINTS => 'hints'; use constant PROFILE_GLOBUS => 'globus'; use constant PROFILE_SELECTOR => 'selector'; use constant PROFILE_STAT => 'stat'; our $VERSION = '3.3'; our @EXPORT = (); our %EXPORT_TAGS = ( ns => [ qw(PROFILE_PEGASUS PROFILE_CONDOR PROFILE_DAGMAN PROFILE_ENV PROFILE_HINTS PROFILE_GLOBUS PROFILE_SELECTOR PROFILE_STAT ) ] ); $EXPORT_TAGS{all} = [ @{$EXPORT_TAGS{ns}} ]; our @EXPORT_OK = ( @{$EXPORT_TAGS{ns}} ); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ == 3 ) { # called as namespace, key, value @{$self}{'namespace','key','value'} = @_; } elsif ( @_ > 2 && (@_ & 1) == 0 ) { # even: called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor for ", __PACKAGE__; } bless $self, $class; } # forward declarations so can we check using 'can' sub namespace; sub key; sub value; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:profile" : 'profile'; # older DAXes permitted single element inside a profile value my $value = ( ref $self->{value} && $self->{value}->isa('Pegasus::DAX::PlainFilename') ) ? $self->{value}->name : $self->value; $f->print( "$indent<$tag", , attribute('namespace',$self->namespace,$xmlns) , attribute('key',$self->key,$xmlns) , ">" , quote($value) , "\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::Profile - stores a Pegasus profile. =head1 SYNOPSIS use Pegasus::DAX::Profile qw(:ns); my $a = Pegasus::DAX::Profile->new( PROFILE_ENV, 'FOO', 'bar' ); my $b = Pegasus::DAX::Profile->new( namespace => PROFILE_CONDOR, key => 'getenv', value => 'True' ); =head1 DESCRIPTION This class remembers a Pegasus profile. Pegasus profiles abstracts the various concrete planning details. =head1 CONSTANTS The following constants are imported with the I tag when using this module. The constants define the various permissible namespaces. =over 4 =item PROFILE_PEGASUS =item PROFILE_CONDOR =item PROFILE_DAGMAN =item PROFILE_ENV =item PROFILE_HINTS =item PROFILE_GLOBUS =item PROFILE_SELECTOR =item PROFILE_STAT =back =head1 METHODS =over 4 =item new() =item new( $namespace, $key, $value ); =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. When invoked with exactly 3 arguments, the first argument is the profile namespace, the second argument the key inside the namespace, and the third argument the value to set. Other means of construction is to use named lists. =item namespace Setter and getter for a namespace string. Please use the C constants defined previously. These constants are not imported by default, unless you use the I import tag. =item key Setter and getter for a key string. The key value may be of restricted range, dependinng on the namespace, but this is not checked at this point. =item value Setter and getter for the value to be transported. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 SEE ALSO =over 4 =item L Base class. =item L =item L =item L Classes using profiles. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/DAX.pm0000644000175000017500000001061111757531137022207 0ustar ryngerynge# # License: (atend) # $Id: DAX.pm 3598 2011-04-27 23:42:04Z voeckler $ # package Pegasus::DAX::DAX; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::AbstractJob; use Exporter; our @ISA = qw(Pegasus::DAX::AbstractJob Exporter); our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = (); our %EXPORT_TAGS = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ > 1 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor for ", __PACKAGE__; } bless $self, $class; } # forward declaration to auto loaders sub file; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (opt. IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:dax" : 'dax'; $f->print( "$indent<$tag" , attribute('file',$self->file,$xmlns) , attribute('id',$self->id,$xmlns) , attribute('node-label',$self->nodelabel,$xmlns) , ">\n" ); $self->innerXML($f," $indent",$xmlns); $f->print( "$indent\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::DAX - Job node to store an unplanned workflow. =head1 SYNOPSIS use Pegasus::DAX::DAX; my $a = Pegasus::DAX::DAX->new( file => 'fubar' ); $a->addArgument( '-flag' ); =head1 DESCRIPTION This class stores the job that describes a workflow (in DAX file format) that still needs to be planned. The job refers to the external filename for the workflow. =head1 METHODS =over 4 =item new() =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. Other means of construction is to use named lists. =item name Getter and setter for the job's name required string. Regardless of the child class, any job always some form of name. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 INHERITED METHODS Please refer to L for inherited methods. =over 4 =item addArgument( $string ) =item addArgument( $plainfilename_instance ) =item addArgument( $filename_instance ) =item addArgument( $file_instance ) =item addArgument( $exectuable_instance ) =item addProfile( $namespace, $key, $value ) =item addProfile( $profile_instance ) =item stdin =item stdout =item stderr =item id =item nodelabel =item addUses( .. ) =item uses( $filename_instance ) =item uses( $file_instance ) =item uses( $executable_instance ) =item addInvoke( $when, $cmd ) =item notify( $when, $cmd ) =item invoke( $when $cmd ) =item innerXML( $handle, $indent, $xmlns ) =back =head1 SEE ALSO =over 4 =item L Base class. =item L =item L =item L Sibling classes. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/Executable.pm0000644000175000017500000001721711757531137023665 0ustar ryngerynge# # License: (atend) # $Id: Executable.pm 3632 2011-04-29 22:34:00Z voeckler $ # package Pegasus::DAX::Executable; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::CatalogType; use Pegasus::DAX::InvokeMixin; use Exporter; our @ISA = qw(Pegasus::DAX::CatalogType Pegasus::DAX::InvokeMixin Exporter); use constant ARCH_IA64 => 'ia64'; use constant ARCH_PPC => 'ppc'; use constant ARCH_PPC_64 => 'ppc_64'; use constant ARCH_SPARCV7 => 'sparcv7'; use constant ARCH_SPARCV9 => 'sparcv9'; use constant ARCH_X86 => 'x86'; use constant ARCH_X86_64 => 'x86_64'; use constant ARCH_AMD64 => 'x86_64'; use constant OS_AIX => 'aix'; use constant OS_LINUX => 'linux'; use constant OS_DARWIN => 'darwin'; use constant OS_MACOSX => 'darwin'; use constant OS_SUNOS => 'sunos'; use constant OS_SOLARIS => 'sunos'; use constant OS_WINDOWS => 'windows'; our $VERSION = '3.3'; our @EXPORT = (); our %EXPORT_TAGS = ( arch =>[qw(ARCH_IA64 ARCH_PPC ARCH_PPC_64 ARCH_SPARCV7 ARCH_SPARCV9 ARCH_X86 ARCH_X86_64 ARCH_AMD64)], os => [qw(OS_AIX OS_LINUX OS_DARWIN OS_MACOSX OS_WINDOWS OS_SUNOS OS_SOLARIS)] ); $EXPORT_TAGS{all} = [ map { @{$_} } values %EXPORT_TAGS ]; our @EXPORT_OK = ( @{$EXPORT_TAGS{all}} ); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ > 1 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor invocation"; } bless $self, $class; } # forward declarations sub namespace; #sub name; # inherited from parent sub version; sub arch; sub os; sub osrelease; sub osversion; sub glibc; sub installed; sub key { # purpose: create the distinguishing key # returns: a string that can be used in a hash # my $self = shift; my @x = ( $self->namespace , $self->name , $self->version , $self->arch , $self->os , $self->osrelease , $self->osversion , $self->glibc , boolean($self->installed) ); join( $;, grep { defined $_ } @x ); } sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (opt. IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:executable" : 'executable'; $f->print( "$indent<$tag" , attribute('namespace',$self->namespace,$xmlns) , attribute('name',$self->name,$xmlns) , attribute('version',$self->version,$xmlns) , attribute('arch',$self->arch,$xmlns) , attribute('os',$self->os,$xmlns) , attribute('osrelease',$self->osrelease,$xmlns) , attribute('osversion',$self->osversion,$xmlns) , attribute('glibc',$self->glibc,$xmlns) , attribute('installed',boolean($self->installed),$xmlns) , ">\n" ); $self->innerXML($f," $indent",$xmlns); # # # if ( exists $self->{invokes} ) { foreach my $i ( @{$self->{invokes}} ) { $i->toXML($f," $indent",$xmlns); } } $f->print( "$indent\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::Executable - stores an included transformation catalog entry. =head1 SYNOPSIS use Pegasus::DAX::Executable; my $a = Pegasus::DAX::Executable(); $a->namespace( 'somewhere' ); $a->name( 'lfn' ); $a->version( '1.0' ); $a->os( 'x86_64' ); =head1 DESCRIPTION This class remembers an included Pegasus transformation catalog entry. =head1 CONSTANTS These constants describe the architecture for which an executable was compiled. Note that multi-architectures as available on Mac OS X are currently not supported. =over 4 =item ARCH_IA64 =item ARCH_PPC =item ARCH_PPC_64 =item ARCH_SPARCV7 =item ARCH_SPARCV9 =item ARCH_X86 =item ARCH_X86_64 =item ARCH_AMD64 =back These constants describe the operating system platform. Some of them are aliases mapping to the same string. =over 4 =item OS_AIX The IBM AIX Unix platform. =item OS_LINUX The Linux platform. =item OS_DARWIN The Mac OS X platform. =item OS_MACOSX An alias for the Mac OS X platform. =item OS_SUNOS The SUN Sparc and SUN Intel platforms. =item OS_SOLARIS An alias for the SUN platforms. =item OS_WINDOWS The Microsoft Windows family of platforms. =back =head1 METHODS =over 4 =item new() =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. Other means of construction is to use named lists. However, you will have to be aware of the internals to be able to use these lists successfully. =item namespace Setter and getter for the optional logical transformation namespace string. =item name Setter and getter for the logical transformation's name string. =item version Setter and getter for the optional logical transformation version number string. =item arch Setter and getter for the optional architecture string. =item os Setter and getter for the optional operating system identifier. =item osrelease Setter and getter for the optional OS release string. =item osversion Setter and getter for the optional OS version string. =item glibc Setter and getter for the optional GNU libc platform identifier string. =item key This function munges all above attributes of this instance into a binary string that can be used as unique identifier for this instance in a hash. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 INHERITED METHODS Please refer to L for inherited methods. =over 4 =item addMeta( $key, $type, $value ) =item addMeta( $metadata_instance ) =item addPFN( $url ) =item addPFN( $url, $site ) =item addPFN( $pfn_instance ) =item addProfile( $namespace, $key, $value ) =item addProfile( $profile_instance ) =back Please refer to L for inherited methods. =over 4 =item addInvoke( $when, $cmd ) =item invoke( $when, $cmd ) =item notify( $when, $cmd ) =back =head1 SEE ALSO =over 4 =item L Base class. =item L Base class. =item L Class using L. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/PlainFilename.pm0000644000175000017500000000727711757531137024315 0ustar ryngerynge# # License: (atend) # $Id: PlainFilename.pm 3598 2011-04-27 23:42:04Z voeckler $ # package Pegasus::DAX::PlainFilename; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Exporter; our @ISA = qw(Pegasus::DAX::Base Exporter); our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = (); our %EXPORT_TAGS = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); $self->{name} = undef; if ( @_ == 0 ) { # nothing to do } elsif ( @_ > 1 && (@_ & 1) == 0) { # even: called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ! ref $_[0] ) { # called with single scalar $self->{name} = shift; } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b,c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { carp "invalid c'tor for ", __PACKAGE__; } bless $self, $class; } # forward declaration of methods sub name; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:file" : 'file'; $f->print( "$indent<$tag" , attribute('name',$self->{name},$xmlns) , " />" ); } 1; __END__ =head1 NAME Pegasus::DAX::PlainFilename - class for simple file names. =head1 SYNOPSIS use Pegasus::DAX::PlainFilename; my $i = Pegasus::DAX::PlainFilename->new( 'asdf.txt' ); print "name is ", $i->name, "\n"; $i->name = 'newname.txt'; print "name is ", $i->name, "\n"; =head1 DESCRIPTION This class remembers a simple filename. These filenames are aggregated by the C class. A simple filename is either part of a concrete job's argument list. =head1 METHODS =over 4 =item new() =item new( $filename ) =item new( name => $filename ) =item new( { name => $filename } ) The constructor may be called with a single scalar argument, which is the filename string. Alternative ways to invoke the c'tor pass the arguments as named list. =item name() This is the getter. =item name( $name ) This is the setter. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 SEE ALSO =over 4 =item L Base class. =item L Child class. =item L The abstract job class aggregates instances of this class in C and in C. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/Filename.pm0000644000175000017500000001371711757531137023325 0ustar ryngerynge# # License: (atend) # $Id: Filename.pm 3598 2011-04-27 23:42:04Z voeckler $ # package Pegasus::DAX::Filename; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::PlainFilename; use Exporter; our @ISA = qw(Pegasus::DAX::PlainFilename Exporter); use constant LINK_NONE => 'none'; use constant LINK_IN => 'input'; use constant LINK_OUT => 'output'; use constant LINK_INPUT => 'input'; use constant LINK_OUTPUT => 'output'; use constant LINK_INOUT => 'inout'; use constant LINK_IO => 'inout'; use constant TRANSFER_TRUE => 'true'; use constant TRANSFER_FALSE => 'false'; use constant TRANSFER_OPTIONAL => 'optional'; our $VERSION = '3.3'; our @EXPORT = (); our %EXPORT_TAGS = ( 'link' => [qw(LINK_NONE LINK_IN LINK_OUT LINK_INPUT LINK_OUTPUT LINK_INOUT LINK_IO)], 'transfer' => [qw(TRANSFER_TRUE TRANSFER_FALSE TRANSFER_OPTIONAL)] ); $EXPORT_TAGS{all} = [ map { @{$_} } values %EXPORT_TAGS ]; our @EXPORT_OK = ( @{$EXPORT_TAGS{all}} ); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ > 1 && (@_ & 1) == 0 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] && ( ref $_[0] eq 'HASH' || ref $_[0] eq __PACKAGE__ ) ) { # called with { a=>b, c=>d } hashref # or called as copy-c'tor (deep copy) %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor invocation"; } bless $self, $class; } # forward declarations so can we check using 'can' #sub name; # inherited sub namespace; sub version; sub link; sub optional; sub register; sub transfer; sub executable; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:uses" : 'uses'; $f->print( "$indent<$tag", , attribute('namespace',$self->namespace,$xmlns) , attribute('name',$self->name,$xmlns) , attribute('version',$self->version,$xmlns) , attribute('link',$self->link,$xmlns) , attribute('optional',boolean($self->optional),$xmlns) , attribute('register',boolean($self->register),$xmlns) , attribute('transfer',$self->transfer,$xmlns) , attribute('executable',boolean($self->executable),$xmlns) , " />\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::Filename - class for complete file names. =head1 SYNOPSIS use Pegasus::DAX::Filename; my $i = Pegasus::DAX::Filename->new( name => 'filename.txt' ); $i->link = LINK_IN; $i->register = 1; $i->optional = 0; =head1 DESCRIPTION This class remembers a simple filename. These filenames are aggregated by the C class. A simple filename is either part of a concrete job's argument list. =head1 CONSTANTS The following constants define valid values for the I attribute. =over 4 =item LINK_NONE Constant denoting that a file has no linkage. To be used with the I attribute. =item LINK_IN =item LINK_INPUT Constant denoting that a file is an input file. To be used with the I attribute. =item LINK_OUT =item LINK_OUTPUT Constant denoting that a file is an output file. To be used with the I attribute. =item LINK_IO =item LINK_INOUT Constant denoting that a file is an input- and output file. To be used with the I attribute. =back The following constants define valid values for the I attribute. =over 4 =item TRANSFER_TRUE Stage the files as necessary. =item TRANSFER_FALSE Do not stage files. =item TRANSFER_OPTIONAL Attempt to stage files, but failing to stage an input file is not an error. =back =head1 METHODS =over 4 =item new() =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. Other means to set attributes is to used named lists. =item name This setter and getter is inherited. =item namespace Setter and getter for a namespace string. =item version Setter and getter for a version string. =item link Setter and getter for a linkage string. Please use the pre-defined constants starting with C. =item optional =item register =item executable Setter and getter for boolean values. Please use Perl truth. =item transfer Setter and getter for tri-state value. Please use strings. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 SEE ALSO =over 4 =item Pegasus::DAX::PlainFilename Base class. =item Pegasus::DAX::AbstractJob Aggregating class. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/Base.pm0000644000175000017500000002251011757531137022446 0ustar ryngerynge# # License: (atend) # $Id: Base.pm 3616 2011-04-28 22:22:45Z voeckler $ # package Pegasus::DAX::Base; use 5.006; use strict; use vars qw($AUTOLOAD); use Carp; use Exporter; our @ISA = qw(Exporter); sub quote($); # { } sub attribute($$;$); # { } sub boolean($); # { } our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = qw(quote attribute boolean $escape %escape); our %EXPORT_TAGS = ( xml => [ @EXPORT_OK ], all => [ @EXPORT_OK ] ); our $prefix = '[' . __PACKAGE__ . '] '; sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = bless { @_ }, $class; $self; } sub _deep($); # { } sub _deep($) { my $src = shift; if ( ref $src eq 'ARRAY' ) { [ map { _deep($_) } @{$src} ]; } elsif ( ref $src eq 'HASH' ) { my $x = { }; while ( my ($k,$v) = each %{$src} ) { $x->{$k} = _deep($v); } $x; } elsif ( ref $src ) { if ( $src->can('clone') ) { $src->clone(); } else { carp "FATAL: Do not know how to clone ", ref($src), "\n"; } } else { $src; } } sub clone { # purpose: simplistic clone method # paramtr: no arguments # returns: copy of current object # my $self = shift; my $result = bless { }, ref($self); while ( my ($k,$v) = each %{ $self } ) { $result->{$k} = _deep($v); } $result; } sub AUTOLOAD { # purpose: catch-all accessor (set and get) for all data fields # ever defined in any great-grandchild of this class # warning: The autoload maps the data fields XYZ to method XYZ # paramtr: ? # returns: ? my $self = shift; my $type = ref($self) or croak( $prefix, "$self is not an object" ); my $name = $AUTOLOAD; $name =~ s/.*:://; # strip fully-qualified portion unless ( exists $self->{$name} || $self->can($name) ) { croak( $prefix, "Can't access >>$name<< field in class $type" ); } my $result = $self->{$name}; if ( ref $self->{$name} eq 'HASH' ) { # hash value if ( @_ > 0 ) { if ( ref $_[0] eq 'HASH' && @_ == 1 ) { $self->{$name} = { @{shift()} }; # deep copy } elsif ( (@_ & 1) == 0 ) { $self->{$name} = { @_ }; } else { croak( "${type}->${name}() setter is helpless" ); } } # return unrolled hash in list context, hashref in scalar return wantarray ? ( %{ $result } ) : $result; } elsif ( ref $self->{$name} eq 'ARRAY' ) { # array value if ( @_ > 0 ) { if ( ref $_[0] eq 'ARRAY' && @_ == 1 ) { $self->{$name} = [ @{shift()} ]; # deep copy } else { $self->{$name} = [ @_ ]; } } # returned unrolled array in list context, arrayref in scalar return wantarray ? ( @{ $result } ) : $result; } else { # scalar or instance value if ( @_ ) { my $v = shift; if ( defined $v ) { $self->{$name} = $v; } else { delete $self->{$name}; } } return $result; } croak "AUTOLOAD: This point should not be reached for ${type}->${name}"; } our %escape = ( '&' => '&' , '<' => '<' , '>' => '>' , "'" => ''' , '"' => '"' ); our $escape = '([' . join( '', keys %escape ) . '])'; sub quote($) { # purpose: quote XML entities inside a value string # paramtr: $s (IN): value string # returns: quoted version, possibly same string # my $s = shift; $s =~ s/$escape/$escape{$1}/ge if defined $s; $s; } sub attribute($$;$) { # purpose: format an element attribute # paramtr: $key (IN): name of attribute # $val (IN): value for attribute # $xmlns (opt. IN): xml namespace for qualifications # returns: formatted string # warning: may return empty string if key is empty # my $key = shift; my $val = shift; my $xmlns = shift; if ( defined $key && $key && defined $val ) { if ( defined $xmlns && $xmlns ) { " $xmlns:$key=\"", quote($val), "\""; } else { " $key=\"", quote($val), "\""; } } else { ''; } } sub boolean($) { # purpose: translate perl boolean into xml boolean # paramtr: $v (IN): value # returns: string 'true' or string 'false' for defined input # warning: returns undefined value for undefined input! # warning: string "false" input will return 'false', too. # my $s = shift; if ( defined $s ) { ( $s =~ /false/i || ! $s ) ? 'false' : 'true'; } else { undef; } } sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (opt. IN): namespace of element, if necessary # my $self = shift; croak( ref($self), " called *abstract* ", __PACKAGE__, "::toXML" ); } 1; __END__ __END__ =head1 NAME Pegasus::DAX::Base - base class for all ADAG/DAX related classes. =head1 SYNOPSIS use Pegasus::DAX::Base qw(:xml); use Exporter; our @ISA = qw(Pegasus::DAX::Base Exporter); ... sub toXML { my $self = shift; my $handle = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:element" : 'element'; # open tag $handle->print( "$indent<$tag", , attribute('key1',$self->{key1}) , attribute('key2',boolean($self->{key2})) , ">\n" ); # child element $self->{aggregate}->toXML( $handle, " $indent", $xmlns ); # collection of child elements foreach my $i ( @{$self->{collection}} ) { $i->toXML( $handle, " $indent", $xmlns ); } # closing tag $handle->print( "$indent\n" ); } =head1 DESCRIPTION This module implements the base class for all classes related to generating DAX files. It provides helper functions to generate XML, and mandates that non-abstract child classes implement the C method. In addition, this class provides an C method, which in effect implements the setter and getter for all scalar values in any child class. =head1 FUNCTIONS The following section defines true functions, not static methods. If you don't know the difference, you don't need to worry. =over 4 =item quote($string) This function replaces all characters in the given input C<$string> that require to be entity-escaped. The result is a string that is either the original string, if it did not contain any characters from C<%escape>, or the string with entity replaced characters. This method will return C, if the input string was C. =item attribute($key,$value) =item attribute($key,$value,$xmlns) This function is a helper for sub-classes that instantiate the abstract C method when printing an element tag. Given the I<$key> for an element's attribute, and the I<$value> to put with the element, this method returns the string to be put into the tag assembly. The result starts with a space, the key as is, the equal sign, a quote character, the value as result of the C method, and the closing quote character. If the key is not defined or empty, or the value is not defined, the empty string will be returned. In the 3-argument form, if the C<$xmlns> argument is defined and true, the attribute will be qualified with the string in C<$xmlns>. =item boolean($v) This function translates a Perl boolean value into an XML boolean value. The output is the string C, if the expression evaluates to a Perl false value I if the input value matches the expression C. Every other value returns the string C. As a quirk to accomodate the omission of attributes, an I input will generate I output. =back =head1 METHODS =over 4 =item toXML( $handle, $indent, $xmlns ) This I function will terminate with an error, unless the child class overrides it. The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 VARIABLES =over 4 =item %escape This variable contains all characters that require an entity escape in an XML context, and map to the escaped XML entity that the character should be replaced with. The variable is used internally by the C static method. =item $escape This string is a regular expression that can be used to identify characters that will require an entity escape in XML context. The variable is used internally by the C static method. =back =head1 AUTOLOAD The C method implement the getter and setter for all scalar values in any sibling class. While there is some effort to support non-scalar setters and getters, please do not use that feature (yet). =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/Transformation.pm0000644000175000017500000001773211757531137024614 0ustar ryngerynge# # License: (atend) # $Id: Transformation.pm 4726 2011-12-22 05:11:51Z voeckler $ # package Pegasus::DAX::Transformation; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::Filename; use Pegasus::DAX::InvokeMixin; use Pegasus::DAX::TUType; use Exporter; our @ISA = qw(Pegasus::DAX::Base Pegasus::DAX::InvokeMixin Exporter); our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = (); our %EXPORT_TAGS = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ == 3 ) { # assume namespace,name,version @{$self}{'namespace','name','version'} = @_; } elsif ( @_ > 1 && (@_ & 1) == 0 ) { # even: called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor for ", __PACKAGE__; } bless $self, $class; } # forward declarations -- AUTOLOAD will provide getter/setter sub namespace; sub name; sub version; sub key { # purpose: create the distinguishing key # returns: a string that can be used in a hash # my $self = shift; join( $;, ($self->namespace || ''), $self->name, ($self->version || '') ); } sub addUses { my $self = shift; $self->uses(@_); } sub uses { my $self = shift; my $uses = shift; if ( defined $uses && ref $uses ) { if ( $uses->isa('Pegasus::DAX::TUType' ) ) { $self->{uses}->{ $uses->namespace || '', $uses->name, $uses->version || '' } = Pegasus::DAX::TUType->new( $uses ); # deep copy! } elsif ( $uses->isa('Pegasus::DAX::Filename') ) { $self->{uses}->{ $uses->namespace || '', $uses->name, $uses->version || '' } = Pegasus::DAX::TUType->new( namespace => $uses->namespace, name => $uses->name, version => $uses->version, exectuable => $uses->executable ); } elsif ( $uses->isa('Pegasus::DAX::Executable') ) { $self->{uses}->{ $uses->namespace || '', $uses->name, $uses->version || '' } = Pegasus::DAX::TUType->new( namespace => $uses->namespace, name => $uses->name, version => $uses->version, executable => 1 ); } elsif ( $uses->isa('Pegasus::DAX::File') ) { $self->{uses}->{ '', $uses->name, '' } = Pegasus::DAX::TUType->new( name => $uses->name, executable => 0 ); } else { croak "argument is not an instance I understand"; } } else { croak "invalid argument"; } } sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (opt. IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:transformation" : 'transformation'; $f->print( "$indent<$tag" , attribute('namespace',$self->namespace,$xmlns) , attribute('name',$self->name,$xmlns) , attribute('version',$self->version,$xmlns) , ">\n" ); # # -- may be empty according to Karan+Gideon # while ( my ($name,$i) = each %{$self->{uses}} ) { $i->toXML($f," $indent",$xmlns); } # # # if ( exists $self->{invokes} ) { foreach my $i ( @{$self->{invokes}} ) { $i->toXML($f," $indent",$xmlns); } } $f->print( "$indent\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::Transformation - aggregates multiple executables and data files. =head1 SYNOPSIS use Pegasus::DAX::Transformation; use Pegasus::DAX::Filename; my $a = Pegasus::DAX::Transformation->new( undef, 'pre', '1.0' ); my $b = Pegasus::DAX::Profile->new( namespace => 'foo' , name => 'bar' , version => '3.1416' ); $a->uses( $filename_instance ); $b->uses( Pegasus::DAX::Filename->new( ... ) ); =head1 DESCRIPTION This class aggregates multiple logical data files and transformations under a single handle that acts like a transformation itself. =head1 METHODS =over 4 =item new() =item new( $namespace, $name, $version ) =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. When invoked with exactly 3 arguments, the first argument is the logical transformation namespace or I, the second argument the required and defined transformation name, and the third argument the optional version string. Other means of construction is to use named lists. =item namespace Setter and getter for the optional transformation namespace identifier. =item name Setter and getter for required transformation name. =item version Setter and getter for the optional transformation version string. =item key creates a binary string that functions as key to identify this object when stashed into a hash. The key comprises namespace, name and version attribute values. =item addUses Alias method for C method. =item uses( $tutype_instance ) This method deeply copies the passed L instance. =item uses( $filename_instance ) This method constructs an internal L instance by copying the I, I, I and I attributes from the L instance passed as argument. =item uses( $file_instance ) This method constructs an internal L instance by copying the I attributes from the L instance passed as argument, and sets its I attribute to C. You will have to add a proper L instance to overwrite these defaults. =item uses( $executable_instance ) This method constructs an internal L instance by copying the I, I, and I attributes from the L instance passed as argument, and sets the I attribute to C. You will have to add a proper L instance to overwrite these defaults. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 INHERITED METHODS Please refer to L for inherited methods. =over 4 =item addInvoke( $when, $cmd ) =item invoke( $when, $cmd ) =item notify( $when, $cmd ) =back =head1 SEE ALSO =over 4 =item L Base class. =item L Base class. =item L Class that aggregates the L class. =item L =item L =item L Permissible ways to specify a file that is being used. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/DAG.pm0000644000175000017500000001046011757531137022170 0ustar ryngerynge# # License: (atend) # $Id: DAG.pm 3598 2011-04-27 23:42:04Z voeckler $ # package Pegasus::DAX::DAG; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::AbstractJob; use Exporter; our @ISA = qw(Pegasus::DAX::AbstractJob Exporter); our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = (); our %EXPORT_TAGS = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ > 1 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor for ", __PACKAGE__; } bless $self, $class; } # forward declaration to auto loaders sub file; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (opt. IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:dag" : 'dag'; $f->print( "$indent<$tag" , attribute('file',$self->file,$xmlns) , attribute('id',$self->id,$xmlns) , attribute('node-label',$self->nodelabel,$xmlns) , ">\n" ); $self->innerXML($f," $indent",$xmlns); $f->print( "$indent\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::DAG - Job node to store a concrete DAG workflow. =head1 SYNOPSIS use Pegasus::DAX::DAG; my $a = Pegasus::DAX::DAG->new( file => 'fubar' ); $a->addArgument( '-flag' ); =head1 DESCRIPTION This class stores the job that describes a concrete Condor DAGMan DAG. =head1 METHODS =over 4 =item new() =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. Other means of construction is to use named lists. =item name Getter and setter for the job's name required string. Regardless of the child class, any job always some form of name. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 INHERITED METHODS Please refer to L for inherited methods. =over 4 =item addArgument( $string ) =item addArgument( $plainfilename_instance ) =item addArgument( $filename_instance ) =item addArgument( $file_instance ) =item addArgument( $exectuable_instance ) =item addProfile( $namespace, $key, $value ) =item addProfile( $profile_instance ) =item stdin =item stdout =item stderr =item id =item nodelabel =item addUses( .. ) =item uses( $filename_instance ) =item uses( $file_instance ) =item uses( $executable_instance ) =item addInvoke( $when, $cmd ) =item notify( $when, $cmd ) =item invoke( $when $cmd ) =item innerXML( $handle, $indent, $xmlns ) =back =head1 SEE ALSO =over 4 =item L Base class. =item L =item L =item L Sibling classes. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/CatalogType.pm0000644000175000017500000001357311757531137024021 0ustar ryngerynge# # License: (atend) # $Id: CatalogType.pm 3642 2011-05-02 23:04:49Z voeckler $ # package Pegasus::DAX::CatalogType; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Exporter; our @ISA = qw(Pegasus::DAX::Base Exporter); our $VERSION = '3.3'; our @EXPORT = (); our %EXPORT_TAGS = (); our @EXPORT_OK = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ > 1 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor for ", __PACKAGE__; } bless $self, $class; } # forward declaration sub name; sub addMeta { my $self = shift; my $meta; if ( @_ == 3 ) { # explicit $meta = Pegasus::DAX::MetaData->new( shift(), shift(), shift() ); } elsif ( @_ == 1 && ref $_[0] && $_[0]->isa('Pegasus::DAX::MetaData') ) { my $m = shift; $meta = $m->clone(); } else { croak "argument is not a valid MetaData"; } if ( exists $self->{metas} ) { push( @{$self->{metas}}, $meta ); } else { $self->{metas} = [ $meta ]; } } sub addPFN { my $self = shift; my $pfn; if ( @_ == 1 && ! ref $_[0] ) { # plain string argument as PFN, no pfn-profiles $pfn = Pegasus::DAX::PFN->new( shift() ); } elsif ( @_ == 2 && ! ref $_[0] && ! ref $_[1] ) { # two plain strings, no pfn-profiles $pfn = Pegasus::DAX::PFN->new( shift(), shift() ); } elsif ( @_ == 1 && $_[0]->isa('Pegasus::DAX::PFN' ) ) { # ok my $p = shift; $pfn = $p->clone(); } else { croak "argument is not a valid PFN"; } if ( exists $self->{pfns} ) { push( @{$self->{pfns}}, $pfn ); } else { $self->{pfns} = [ $pfn ]; } } sub addProfile { my $self = shift; my $prof; if ( @_ == 3 ) { # explicit $prof = Pegasus::DAX::Profile->new( shift(), shift(), shift() ); } elsif ( @_ == 1 && ref $_[0] && $_[0]->isa('Pegasus::DAX::Profile') ) { my $p = shift; $prof = $p->clone(); } else { croak "argument is not a valid Profile"; } if ( exists $self->{profiles} ) { push( @{$self->{profiles}}, $prof ); } else { $self->{profiles} = [ $prof ]; } } sub innerXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (IN): namespace of element, if necessary # returns: number of inner elements produced # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $result = 0; # # # if ( exists $self->{profiles} ) { foreach my $i ( @{$self->{profiles}} ) { $result++; $i->toXML($f,$indent,$xmlns); } } # # # if ( exists $self->{metas} ) { foreach my $i ( @{$self->{metas}} ) { $result++; $i->toXML($f,$indent,$xmlns); } } # # # if ( exists $self->{pfns} ) { foreach my $i ( @{$self->{pfns}} ) { $result++; $i->toXML($f,$indent,$xmlns); } } $result; } 1; __END__ =head1 NAME Pegasus::DAX::CatalogType - abstract class for included transformation- and replica catalogs. =head1 SYNOPSIS This is an abstract class. You do not instantiate abstract classes. =head1 DESCRIPTION This class is the base for the included transformation- and replica catalog entry. =head1 METHODS =over 4 =item new() The constructor is used by child classes to establish data structures. =item addProfile( $namespace, $key, $value ) =item addProfile( $profile_instance ) This method will add a specified profile, either as three strings or instance of L, to the collection of profiles associated with the logical level catalog entry. =item addMeta( $key, $type, $value ) =item addMeta( $metadata_instance ) This method adds a piece of meta data to the collection of meta data, either as trhee strings or instance of L, associated with this logical catalog entry. =item addPFN( $url ) =item addPFN( $url, $site ) =item addPFN( $pfn_instance ) This method adds a physical filename, either as url and site string or instance of L, to the collection of PFNs associated with this catalog entry. =item innerXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. Since this class is abstract, it will not create the element tag nor attributes. However, it needs to create the inner elements as necessary. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 SEE ALSO =over 4 =item L Base class. =item L Replica catalog entry child class. =item L Transformation catalog entry child class. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/AbstractJob.pm0000644000175000017500000003017211757531137023775 0ustar ryngerynge# # License: (atend) # $Id: AbstractJob.pm 5104 2012-03-10 01:16:38Z voeckler $ # package Pegasus::DAX::AbstractJob; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::InvokeMixin; use Pegasus::DAX::Filename qw(LINK_IN LINK_OUT); use Exporter; our @ISA = qw(Pegasus::DAX::Base Pegasus::DAX::InvokeMixin Exporter); our $VERSION = '3.3'; our @EXPORT = (); our %EXPORT_TAGS = (); our @EXPORT_OK = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } use Pegasus::DAX::Filename qw(LINK_INPUT); my $count = 0; # class variable sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); $self->{separator} = ' '; # between arguments default if ( @_ > 1 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } bless $self, $class; } sub addArgument { my $self = shift; # WARNING: foreach is susceptible to in-place modification of the # underlying object through the iterator variable! my $arg; foreach my $name ( @_ ) { if ( ! ref $name ) { # plain text -- take as is $arg = "$name"; # deep copy } elsif ( $name->isa('Pegasus::DAX::PlainFilename')) { # auto-add uses for P::D::Filename only! $self->uses($name) if $name->isa('Pegasus::DAX::Filename'); # sub-classing not permissible for storing/printing $arg = Pegasus::DAX::PlainFilename->new( $name->name ) } elsif ( $name->isa('Pegasus::DAX::CatalogType') ) { # auto-add uses for File or Executable $self->uses($name); # sub-classing not permissible for storing/printing $arg = Pegasus::DAX::PlainFilename->new( $name->name ); } else { croak "Illegal argument to addArgument"; } # # add $arg to list of arguments # if ( exists $self->{arguments} ) { push( @{$self->{arguments}}, $arg ); } else { $self->{arguments} = [ $arg ]; } } } sub addProfile { my $self = shift; my $prof; if ( @_ == 3 ) { # explicit $prof = Pegasus::DAX::Profile->new( shift(), shift(), shift() ); } elsif ( @_ == 1 && ref $_[0] && $_[0]->isa('Pegasus::DAX::Profile') ) { my $p = shift; $prof = $p->clone; } else { croak "argument is not a valid Profile"; } if ( exists $self->{profiles} ) { push( @{$self->{profiles}}, $prof ); } else { $self->{profiles} = [ $prof ]; } } sub stdio($$;@) { my $self = shift; my $what = shift; my $result = $self->{$what}; if ( @_ ) { my $name = shift; if ( ! ref $name ) { # plain string $self->{$what} = $name; } elsif ( $name->can('name') ) { # some class? $self->{$what} = $name->name; $self->uses($name) if ( $name->isa('Pegasus::DAX::Filename') || $name->isa('Pegasus::DAX::CatalogType') ); } else { croak "illegal name argument"; } } $result; } sub stdin { my $self = shift; stdio($self,'stdin',@_); } sub stdout { my $self = shift; stdio($self,'stdout',@_); } sub stderr { my $self = shift; stdio($self,'stderr',@_); } sub addUses { my $self = shift; $self->uses(@_); } sub uses { my $self = shift; my $uses = shift; if ( defined $uses && ref $uses ) { if ( $uses->isa('Pegasus::DAX::Filename') ) { $self->{uses}->{ $uses->name } = Pegasus::DAX::Filename->new( $uses ); # deep copy! } elsif ( $uses->isa('Pegasus::DAX::Executable') ) { $self->{uses}->{ $uses->name } = Pegasus::DAX::Filename->new( namespace => $uses->namespace, name => $uses->name, version => $uses->version, executable => 1 ); } elsif ( $uses->isa('Pegasus::DAX::File') ) { $self->{uses}->{ $uses->name } = Pegasus::DAX::Filename->new( name => $uses->name, link => LINK_INPUT, optional => 0, executable => 0 ); } else { croak( "Instance of ", ref $uses, ' is an invalid argument' ); } } else { croak "invalid argument"; } } sub id { my $self = shift; if ( @_ ) { # setter my $old = $self->{id}; $self->{id} = shift; return $old; } else { # getter # default identifier using class variable $count $self->{id} = sprintf( "ID%06u", ++$count ) unless exists $self->{id}; return $self->{id}; } } # forward declarations sub nodelabel; sub innerXML { # purpose: partial XML for common stuff # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; # # # if ( exists $self->{arguments} ) { my $tag = defined $xmlns && $xmlns ? "$xmlns:argument" : 'argument'; my $flag = 0; $f->print( "$indent<$tag>" ); foreach my $i ( @{$self->{arguments}} ) { $f->print( $self->{separator} ) if ( $flag && $self->{separator} ); if ( ref $i ) { $i->toXML($f,'',$xmlns); } else { $f->print($i); } $flag++; } $f->print( "\n" ); } # # # if ( exists $self->{profiles} ) { foreach my $i ( @{$self->{profiles}} ) { $i->toXML($f,$indent,$xmlns); } } # # # if ( exists $self->{stdin} && $self->{stdin} ) { my $tag = defined $xmlns && $xmlns ? "$xmlns:stdin" : 'stdin'; $f->print( "$indent<$tag" , attribute('name',$self->stdin,$xmlns) , attribute('link',LINK_IN,$xmlns) , " />\n" ); } if ( exists $self->{stdout} && $self->{stdout} ) { my $tag = defined $xmlns && $xmlns ? "$xmlns:stdout" : 'stdout'; $f->print( "$indent<$tag" , attribute('name',$self->stdout,$xmlns) , attribute('link',LINK_OUT,$xmlns) , " />\n" ); } if ( exists $self->{stderr} && $self->{stderr} ) { my $tag = defined $xmlns && $xmlns ? "$xmlns:stderr" : 'stderr'; $f->print( "$indent<$tag" , attribute('name',$self->stderr,$xmlns) , attribute('link',LINK_OUT,$xmlns) , " />\n" ); } # # # if ( exists $self->{uses} ) { while ( my ($name,$i) = each %{$self->{uses}} ) { $i->toXML($f,$indent,$xmlns); } } # # # if ( exists $self->{invokes} ) { foreach my $i ( @{$self->{invokes}} ) { $i->toXML($f,$indent,$xmlns); } } } 1; __END__ =head1 NAME Pegasus::DAX::AbstractJob - abstract base class for jobs. =head1 SYNOPSIS This is an abstract class. You do not instantiate abstract classes. =head1 DESCRIPTION This class is the base for the four kinds of jobs and sub-workflows. =head1 METHODS =over 4 =item new() The constructor is used by child classes to establish data structures. =item addArgument( $string ) This method will add a simple string into the ordered list of arguments. =item addArgument( $plainfilename_instance ) This method adds a simple filename into the ordered list of arguments. You will have to add the filename separately to the C section. =item addArgument( $filename_instance ) This method adds a full filename to the ordered list of arguments B also adds the filename to the C section. =item addArgument( $file_instance ) =item addArgument( $exectuable_instance ) This method adds a full filename to the ordered list of arguments B also adds the filename to the C section. However, being of L that lacks a I attribute, please refer to I below for details. You may have to override the automatically added defaults entity by separately and explicitly adding the proper L to the I section. =item addArgument( ... ) You may pass any number of the above permitted arguments as long list of these arguments. This is a convenience method. =item addProfile( $namespace, $key, $value ) =item addProfile( $profile_instance ) This method will add a specified profile, either as three strings or instance of L, to the collection of profiles associated with the logical level catalog entry. =item stdin =item stdout =item stderr Setter and getter for stdio handles. In get mode, the plain string of the logical file is returned. In set mode, use a string or L to provide the logical file name. You are responsible to add the filename to the C section. You may also specify an argument of L, L, or L. In these cases, the filename is added automatically to the C section. You are responsible to provide the proper linkage, if applicable. =item addUses Alias method for C method. =item uses( $filename_instance ) This method adds a deep copy of a L instance to the I section of a job. A deep copy is made so that you can change attributes on your passed object later. =item uses( $file_instance ) This method converts a L instance into an internal L entity for I section of a job. This method assumes copies the I attribute, sets the I attribute to C, the I attribute to C, and the I attribute to C. You will have to add a proper L instance to overwrite these defaults. =item uses( $executable_instance ) This method converts a L instance into an internal L instance for the I section of a job. This method copies the I, I and I attributes, and sets the I attribute to C. You will have to add a proper L instance to overwrite these defaults. =item id Getter and setter for the job's identifier string. Please note that the identifier is more restrictive, e.g. needs to obey more stringend rules. The job identifier is a required argument, and unique within the C. =item nodelabel Getter and setter for the optional job label string. =item separator This attribute defaults to a single space. The arguments in the argument string will be formatted with the separator value between each argument. The default should be good in many circumstances. In case your application is sensitive to white-space in its argument list, you may want to set C to the empty string, and provide the proper whitespaces yourself. =item innerXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. Since this class is abstract, it will not create the element tag nor attributes. However, it needs to create the inner elements as necessary. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 INHERITED METHODS Please refer to L for inherited methods. =over 4 =item addInvoke( $when, $cmd ) =item invoke( $when, $cmd ) =item notify( $when, $cmd ) =back =head1 SEE ALSO =over 4 =item L Base class. =item L Base class for L delegation methods. =item L =item L =item L =item L Child classes inheriting from L. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/PFN.pm0000644000175000017500000001173211757531137022223 0ustar ryngerynge# # License: (atend) # $Id: PFN.pm 3642 2011-05-02 23:04:49Z voeckler $ # package Pegasus::DAX::PFN; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Exporter; our @ISA = qw(Pegasus::DAX::Base Exporter); our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = (); our %EXPORT_TAGS = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ == 1 && ! ref $_[0] ) { # single string argument $self->{url} = shift; } elsif ( @_ == 2 && ! ref $_[0] && ! ref $_[1] ) { # two string arguments $self->{url} = shift; $self->{site} = shift; } elsif ( @_ > 2 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor for ", __PACKAGE__; } bless $self, $class; } sub addProfile { my $self = shift; my $prof; if ( @_ == 3 ) { # explicit $prof = Pegasus::DAX::Profile->new( shift(), shift(), shift() ); } elsif ( @_ == 1 && ref $_[0] && $_[0]->isa('Pegasus::DAX::Profile') ) { my $p = shift; $prof = $p->clone(); } else { croak "argument is not a valid Profile"; } if ( exists $self->{profiles} ) { push( @{$self->{profiles}}, $prof ); } else { $self->{profiles} = [ $prof ]; } } # forward declarations sub url; sub site; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (opt. IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:pfn" : 'pfn'; $f->print( "$indent<$tag" , attribute('url',$self->url,$xmlns) , attribute('site',$self->site,$xmlns) ); if ( exists $self->{profiles} ) { $f->print(">\n"); foreach my $i ( @{$self->{profiles}} ) { $i->toXML($f," $indent",$xmlns); } $f->print( "$indent\n" ); } else { $f->print(" />\n"); } } 1; __END__ =head1 NAME Pegasus::DAX::PFN - stores a Pegasus concrete data- or executable description. =head1 SYNOPSIS use Pegasus::DAX::PFN; my $a = Pegasus::DAX::PFN->new( 'url1' ); my $b = Pegasus::DAX::PFN->new( 'url2', 'local' ); my $c = Pegasus::DAX::PFN->new( url => 'file://foo' , site => 'local' ); $c->addProfile( PROFILE_ENV, 'FOO', 'bar' ); $c->addProfile( $profile_instance ); =head1 DESCRIPTION This class remembers a Pegasus concrete remote file location. The file may refer to an executable (in the transformation catalog), or a data file (in the replica catalog). =head1 METHODS =over 4 =item new() =item new( $url ) =item new( $url, $site ) =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. When invoked with exactly 1 or 2 arguments, the first argument is the location URL, and the second argument is the site handle. Other means of construction is to use named lists. =item url Setter and getter for the URL string. =item site Setter and getter for the site handle string. =item addProfile( $namespace, $key, $value ) =item addProfile( $profile_instance ) This method will add a specified profile, either as three string or instance of L, to the collection of profiles associated with this PFN. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 SEE ALSO =over 4 =item L Base class. =item L Abstract class using PFNs. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/InvokeMixin.pm0000644000175000017500000000520711757531137024040 0ustar ryngerynge# # Base class for methods to work on Pegasus::DAX::Invoke delegations. # # License: (atend) # $Id: InvokeMixin.pm 3632 2011-04-29 22:34:00Z voeckler $ # package Pegasus::DAX::InvokeMixin; use 5.006; use strict; use Carp; use Exporter; our @ISA = qw(Exporter); our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = (); our %EXPORT_TAGS = (); use Pegasus::DAX::Invoke qw(%permitted); sub addInvoke { my $self = shift; $self->invoke(@_); } sub notify { my $self = shift; $self->invoke(@_); } sub invoke { my $self = shift; if ( @_ == 0 ) { # assume getter for full list return ( exists $self->{invokes} ? @{ $self->{invokes} } : () ); } elsif ( @_ == 2 ) { # assume setter my $when = shift; my $cmd = shift; if ( defined $when && defined $cmd ) { my $i = Pegasus::DAX::Invoke->new($when,$cmd); if ( exists $self->{invokes} ) { push( @{$self->{invokes}}, $i ); } else { $self->{invokes} = [ $i ]; } } else { croak "use proper arguments to addInvoke(when,cmdstring)"; } } else { croak "invalid arguments"; } } 1; __END__ =head1 NAME Pegasus::DAX::InvokeMixin - base class. =head1 SYNOPSIS This is a constructor-less base class. You do not instantiate it. =head1 DESCRIPTION This class provides and thus implements dealing with L instances inside classes that can contain instances thereof. =head1 METHODS =over 4 =item addInvoke( $when, $cmd ) Alias for C method. =item notify( $when, $cmd ) Alias for C method. =item invoke( ) This method is the getter for the full list of L objects stored in this instance. =item invoke( $when, $cmd ) This method adds a simple executable instruction to run (on the submit host) when a job reaches the state in C<$when>. Please refer to the constants C for details. =back =head1 SEE ALSO =over 4 =item L =item L =item L Classes requiring this interface. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/ADAG.pm0000644000175000017500000005240611757531137022277 0ustar ryngerynge# # License: (atend) # $Id: ADAG.pm 3642 2011-05-02 23:04:49Z voeckler $ # package Pegasus::DAX::ADAG; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::AbstractJob; use Exporter; our @ISA = qw(Pegasus::DAX::AbstractJob Exporter); use constant SCHEMA_NAMESPACE => 'http://pegasus.isi.edu/schema/DAX'; use constant SCHEMA_LOCATION => 'http://pegasus.isi.edu/schema/dax-3.3.xsd'; use constant SCHEMA_VERSION => 3.3; our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = qw(SCHEMA_NAMESPACE SCHEMA_LOCATION SCHEMA_VERSION); our %EXPORT_TAGS = ( schema => [ @EXPORT_OK ], all => [ @EXPORT_OK ] ); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); $self->{index} = 0; $self->{count} = 1; $self->{version} = SCHEMA_VERSION; if ( @_ > 1 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] eq 'HASH' ) { # called with { a=>b, c=>d } hashref %{$self} = ( %{$self}, %{ shift() } ); } bless $self, $class; } # forward declarations sub name; sub index; sub count; use Pegasus::DAX::File; sub addFile { my $self = shift; my $name = shift; if ( ref $name ) { if ( $name->isa('Pegasus::DAX::File') ) { # files uses LFN to distinguish files carp( 'Warning: ', __PACKAGE__, '->addFile(', $name->name , ") already exists, REPLACING existing file!" ) if exists $self->{files}->{ $name->name }; $self->{files}->{ $name->name } = $name->clone(); } else { croak( "Instance of ", ref($name), " is an invalid argument" ); } } else { croak "invalid argument"; } } sub hasFile { my $self = shift; return undef unless exists $self->{files}; my $name = shift; my $key = undef; if ( ! ref $name ) { $key = $name; } elsif ( $name->can('name') ) { $key = $name->name; } else { croak "invalid argument"; } exists $self->{files}->{$key}; } sub getFile { my $self = shift; return undef unless exists $self->{files}; my $name = shift; my $key = undef; if ( ! ref $name ) { $key = $name; } elsif ( $name->can('name') ) { $key = $name->name; } else { croak( 'getFile requires string or Pegasus::DAX::File argument' ); } # avoid auto-vivification exists $self->{files}->{$key} ? $self->{files}->{$key} : undef; } sub addExecutable { my $self = shift; my $name = shift; if ( ref $name ) { if ( $name->isa('Pegasus::DAX::Executable') ) { my $key = $name->key; carp( 'Warning: ', __PACKAGE__, '->addExecutable(' , ($name->namespace || ''), ',', $name->name, ',' , ($name->version || '') , ",...) already exists, REPLACING existing executable!" ) if exists $self->{executables}->{$key}; $self->{executables}->{$key} = $name->clone(); } else { croak( "Instance of ", ref($name), " is an invalid argument" ); } } else { croak "invalid argument"; } } sub hasExecutable { my $self = shift; return undef unless exists $self->{executables}; my $name = shift; my $key = undef; if ( ref $name ) { if ( $name->isa('Pegasus::DAX::Executable') ) { $key = $name->key; } elsif ( ref $name eq 'HASH' ) { $key = Pegasus::DAX::Executable->new($name)->key; } else { croak "invalid argument"; } } else { croak "invalid argument"; } exists $self->{executables}->{$key}; } sub addTransformation { my $self = shift; my $name = shift; if ( ref $name ) { if ( $name->isa('Pegasus::DAX::Transformation') ) { my $key = $name->key; carp( 'Warning: ', __PACKAGE__, '->addTransformation(' , ($name->namespace || ''), ',', $name->name, ',' , ($name->version || '') , ") already exists, REPLACING existing transformation!" ) if exists $self->{transformation}->{$key}; $self->{transformations}->{$key} = $name->clone(); } else { croak( "Instance of ", ref($name), " is an invalid argument" ); } } else { croak "invalid argument"; } } sub hasTransformation { my $self = shift; return undef unless exists $self->{transformation}; my $what = shift; my $key = undef; if ( ref $what && $what->isa('Pegasus::DAX::Transformation') ) { # using Transformation as argument $key = $what->key; } elsif ( ! ref $what && @_ == 2 ) { # using (ns,id,vs) tuple as argument my $id = shift; my $vs = shift; $key = join( $;, $what, $id, $vs ); } else { # not a valid argument croak( "Invalid argument to ", __PACKAGE__, "->hasTransformation" ); } exists $self->{transformation}->{$key}; } sub getTransformation { my $self = shift; my $what = shift; my $key = undef; if ( ref $what && $what->isa('Pegasus::DAX::Transformation') ) { # using Transformation as argument $key = $what->key; } elsif ( ! ref $what && @_ == 2 ) { # using (ns,id,vs) tuple as argument my $id = shift; my $vs = shift; $key = join( $;, $what, $id, $vs ); } else { # not a valid argument croak( "Invalid call of ", __PACKAGE__, "->getTransformation" ); } # avoid auto-vivification return undef unless exists $self->{transformation}; exists $self->{transformation}->{$key} ? $self->{transformation}->{$key} : undef; } sub addJob { my $self = shift; my $job = shift; if ( ref $job ) { if ( $job->isa('Pegasus::DAX::AbstractJob') ) { my $id = $job->id || croak( "Instance of ", ref($job), " does not have an 'id' attribute" ); carp( "Warning: job identifier $id already exists, REPLACING existing job!" ) if exists $self->{jobs}->{$id}; $self->{jobs}->{$id} = $job->clone(); } else { croak( "Instance of ", ref($job), " is an invalid argument" ); } } else { croak "invalid argument"; } } sub hasJob { my $self = shift; return undef unless exists $self->{jobs}; my $job = shift; # job id my $key = undef; if ( ref $job && $job->isa('Pegasus::DAX::AbstractJob') ) { $key = $job->id; } elsif ( ! ref $job ) { $key = $job; } else { croak( "Instance of ", ref($job), " is an invalid argument" ); } exists $self->{jobs}->{$key}; } sub getJob { my $self = shift; my $job = shift; # job id my $key = undef; if ( ref $job && $job->isa('Pegasus::DAX::AbstractJob') ) { $key = $job->id; } elsif ( ! ref $job ) { $key = $job; } else { croak( "Instance of ", ref($job), " is an invalid argument" ); } # avoid auto-vivification return undef unless exists $self->{jobs}; exists $self->{jobs}->{$key} ? $self->{jobs}->{$key} : undef; } sub addDependency { my $self = shift; my $parent = shift; # we only need the job identifier string if ( ref $parent ) { if ( $parent->isa('Pegasus::DAX::AbstractJob') ) { $parent = $parent->id; croak( "parent does not have a valid job-id" ) unless ( defined $parent && $parent ); } else { croak "parent is not a job type"; } } while ( @_ ) { my $child = shift; # we only need the job identifier string if ( ref $child ) { if ( $child->isa('Pegasus::DAX::AbstractJob') ) { $child = $child->id; croak( "child does not have a valid job-id" ) unless ( defined $child && length($child) ); } else { croak "child is not a job type"; } } # plain string is a label my $label = ( ref $_[0] ? undef : shift ); # spring into existence -- store undef, if necessary $self->{deps}->{$child}->{$parent} = $label; } } sub addDependencyById { my $self = shift; my $parent = shift || croak "need a parent as first argument"; if ( ref $parent ) { if ( $parent->isa('Pegasus::DAX::AbstractJob') ) { $parent = $parent->id; } else { croak( "instance of ", ref($parent), " is an invalid parent argument" ); } } croak "parent does not have a valid job-id" unless ( defined $parent && length($parent) ); while ( @_ ) { my $child = shift; # we only need the job identifier string if ( ref $child ) { if ( $child->isa('Pegasus::DAX::AbstractJob') ) { $child = $child->id; } else { croak( "Instance of ", ref($child), " is an invalid child argument" ); } } croak "child does not have a valid job-id" unless ( defined $child && length($child) ); # spring into existence -- store undef, if necessary $self->{deps}->{$child}->{$parent} = undef; } } sub addInverse { my $self = shift; my $child = shift; # we only need the job identifier string if ( ref $child ) { if ( $child->isa('Pegasus::DAX::AbstractJob') ) { $child = $child->id; croak( "child does not have a valid job-id" ) unless ( defined $child && $child ); } else { croak "child is not a job type"; } } while ( @_ ) { my $parent = shift; # we only need the job identifier string if ( ref $parent ) { if ( $parent->isa('Pegasus::DAX::AbstractJob') ) { $parent = $parent->id; croak( "parent does not have a valid job-id" ) unless ( defined $parent && $parent ); } else { croak "parent is not a job type"; } } # plain string is a label my $label = ( ref $_[0] ? undef : shift ); # spring into existence -- store undef, if necessary $self->{deps}->{$child}->{$parent} = $label; } } sub topo_sort { my $self = shift; # determine start nodes (no incoming edges), jobid only my (%start,%p,%c) = map { $_ => 1 } keys %{ $self->{jobs} }; foreach my $c ( keys %{ $self->{deps} } ) { foreach my $p ( keys %{ $self->{deps}->{$c} } ) { $p{$c}{$p} = 1; $c{$p}{$c} = 1; delete $start{$c}; } } # compute topological sort order my %topo = (); my @topo = (); my @q = sort keys %start; while ( @q ) { my $n = shift(@q); push( @topo, $n ) unless exists $topo{$n}; $topo{$n} = 1; foreach my $x ( sort keys %{$c{$n}} ) { delete $c{$n}{$x}; delete $p{$x}{$n}; push( @q, $x ) if ( keys %{$p{$x}} == 0 ); } } @topo; } sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (opt. IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:adag" : 'adag'; # OK, this is slightly ugly and tricky: If there is no indentation, # this element is the outer-most, and thus gets the XML intro. if ( $indent eq '' ) { $f->print( "\n" ); my @t = gmtime; # avoid loading POSIX $f->printf( "\n", $t[5]+1900, $t[4]+1, $t[3], $t[2], $t[1], $t[0] ); $f->print( "\n" ); } my $ns = defined $xmlns && $xmlns ? "xmlns:$xmlns" : 'xmlns'; $f->print( "$indent<$tag" , attribute($ns,SCHEMA_NAMESPACE) , attribute('xmlns:xsi','http://www.w3.org/2001/XMLSchema-instance') , attribute('xsi:schemaLocation',SCHEMA_NAMESPACE . ' ' . SCHEMA_LOCATION) , attribute('version',SCHEMA_VERSION,$xmlns) , attribute('name',$self->name,$xmlns) , attribute('index',$self->index,$xmlns) , attribute('count',$self->count,$xmlns) , ">\n" ); # # # if ( exists $self->{invokes} ) { $f->print( " $indent\n" ); foreach my $i ( @{$self->{invokes}} ) { $i->toXML($f," $indent",$xmlns); } $f->print("\n"); } # # # if ( exists $self->{files} ) { $f->print( " $indent\n" ); foreach my $i ( values %{ $self->{files} } ) { $i->toXML($f," $indent",$xmlns); } $f->print("\n"); } # # # if ( exists $self->{executables} ) { $f->print( " $indent\n" ); foreach my $i ( values %{ $self->{executables} } ) { $i->toXML($f," $indent",$xmlns); } $f->print("\n"); } # # # if ( exists $self->{transformations} ) { $f->print( " $indent\n" ); foreach my $i ( values %{ $self->{transformations} } ) { $i->toXML($f," $indent",$xmlns); } $f->print("\n"); } # # # $f->print( " $indent\n" ); my @topo = $self->topo_sort; foreach my $id ( $self->topo_sort ) { $self->{jobs}->{$id}->toXML($f," $indent",$xmlns); } $f->print("\n"); # # # if ( exists $self->{deps} ) { $f->print( " $indent\n" ); my $ctag = defined $xmlns && $xmlns ? "$xmlns:child" : 'child'; my $ptag = defined $xmlns && $xmlns ? "$xmlns:parent" : 'parent'; my %topo = map { $topo[$_] => $_ } 0 .. $#topo; @topo = (); # free space foreach my $child ( sort { $topo{$a} <=> $topo{$b} } keys %{$self->{deps}} ) { $f->print( " $indent<$ctag" , attribute('ref',$child,$xmlns) , ">\n" ); foreach my $parent ( sort { $topo{$a} <=> $topo{$b} } keys %{$self->{deps}->{$child}} ) { my $label = $self->{deps}->{$child}->{$parent}; $f->print( " $indent<$ptag" , attribute('ref',$parent,$xmlns) , attribute('edge-label',$label,$xmlns) , " />\n" ); } $f->print( " $indent\n" ); } } $f->print( "$indent\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::ADAG - Pegasus workflow description. =head1 SYNOPSIS use Pegasus::DAX::ADAG; my $d = Pegasus::DAX::ADAG->new( name => 'fubar' ); $d->addJob( $job ); $d->addDependency( $parent, $child, 'label' ); =head1 DESCRIPTION This class stores the entire abstract directed acyclic graph (ADAG) that is a Pegasus workflow ready to be planned out. The heavy lifting is done in the base class L. Please note that, even though the schema and API permit it, you cannot stores an C within an C. We are hoping to add recursion at some unspecified time in the future. =head1 METHODS =over 4 =item new() =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. Other means of construction is to use named lists. =item name Getter and setter for the job's name required string. Regardless of the child class, any job always some form of name. =item index Getter and setter to the slot number, starting with 0, of this workflow variation. This is mostly an obsolete feature that will go away. =item count Getter and setter to the total number of slots, a count, of all variations of this workflow. This is mostly an obsolete feature that will go away. =item addFile( $file_instance ) Adds an included replica catalog entry. This method will make a copy of the instance passed. =item hasFile( $file_instance ) Checks, if the logical object described by the L argument is already known to this instance. =item hasFile( $filename ) Checks, if the logical object described by the file name is already known to this instance. =item getFile( $filename ) Retrieves an included replica catalog entry by a given filename. Returns C if not found. =item getFile( $file_instance ) Retrieves an included replica catalog entry by a given instance of L. Returns C if not found. =item addExecutable( $executable_instance ) Adds a copy of an included transformation catalog entry. =item hasExecutable( $executable_instance ) Checks, if the given L object is already known to this instance. =item addTransformation( $transformation_instance ) Adds a copy of the L combiner to the workflow. =item hasTransformation( $transformation_instance ) Checks, if the given L object is already known to this instance. =item hasTransformation( $ns, $name, $version ) Checks, if the object described by the argument triple is already known to this instance. =item getTransformation( $ns, $name, $version ) Retrieves a transformation combiner instance by its namespace, name and version tuple. Returns C, if not found. =item getTransformation( $transformation_instance ) Retrieves a transformation combiner instance from a known instance. Returns C if not found. =item addJob( $dag_instance ) Adds an already concretized sub-workflow as node to the workflow graph. The job must have a valid and unique I attribute. =item addJob( $dax_instance ) Adds a yet to be planned sub-workflow as node to the workflow graph. The job must have a valid and unique I attribute. =item addJob( $job_instance ) Adds a regular job as node to the workflow graph. The job must have a valid and unique I attribute. =item addJob( $adag_instance ) While not forbidden by the API, we cannot plan C within C yet. The job must have a valid and unique I attribute once this gets implemented. =item hasJob( $job_instance ) Checks, if the given L object is already known to this instance. It uses solely the object's C value to determine existence. =item hasJob( $jobid ) Check, if the job described by a job identifier is already known to this instance. =item getJob( $job_instance ) This method looks up a job by a given abstract job instance. If the instance is not known to this instance (i.e. this job hasn't been added yet), the method will return the C value. =item getJob( $jobid ) This method looks up a job by its I. If the I is not known to this instance (i.e. this job hasn't been added yet), the method will return the C value. =item addDependency( $parent, $child, .. ) =item addDependency( $parent, $child, $label, .. ) This method adds one or more children to a single parent, using each job's C attribute. In addition, an optional edge label may be stored with each dependency. Internal structures ensure that each relationship is only added once. You may add any number of children to the same parent by just listing them. Each child may be followed by a separate edge label - or not. The proper argument form is distinguished internally by whether the argument has a job type, or is a plain scalar (label). Note: You must use the full job instance when adding dependencies. You cannot use just the job's id value, because it is indistinguishable from the edge label. Look at L to obtain full job instances from a given C. =item addInverse( $child, $parent, .. ) =item addInverse( $child, $parent, $label, .. ) This method adds one or more parents to a single child, using each job's C attribute. In addition, an optional edge label may be stored with each dependency. Internal structures ensure that each relationship is only added once. You may add any number of parents to the same child by just listing them. Each parent may be followed by a separate edge label - or not. The proper argument form is distinguished internally by whether the argument has a job type, or is a plain scalar (label). =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 INHERITED METHODS Please refer to L for inherited methods. =over 4 =item addArgument( $string ) =item addArgument( $plainfilename_instance ) =item addArgument( $filename_instance ) =item addArgument( $file_instance ) =item addArgument( $exectuable_instance ) =item addProfile( $namespace, $key, $value ) =item addProfile( $profile_instance ) =item stdin =item stdout =item stderr =item id =item nodelabel =item addUses( .. ) =item uses( $filename_instance ) =item uses( $file_instance ) =item uses( $executable_instance ) =item addInvoke( $when, $cmd ) =item notify( $when, $cmd ) =item invoke( $when, $cmd ) =item innerXML( $handle, $indent, $xmlns ) =back =head1 SEE ALSO =over 4 =item L Base class. =item L =item L =item L Sibling classes. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/Invoke.pm0000644000175000017500000001226011757531137023030 0ustar ryngerynge# # License: (atend) # $Id: Invoke.pm 3632 2011-04-29 22:34:00Z voeckler $ # package Pegasus::DAX::Invoke; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Exporter; our @ISA = qw(Pegasus::DAX::Base Exporter); use constant INVOKE_NEVER => 'never'; use constant INVOKE_START => 'start'; use constant INVOKE_ON_SUCCESS => 'on_success'; use constant INVOKE_ON_ERROR => 'on_error'; use constant INVOKE_AT_END => 'at_end'; use constant INVOKE_ALL => 'all'; our $VERSION = '3.3'; our @EXPORT = (); our @EXPORT_OK = qw(INVOKE_NEVER INVOKE_START INVOKE_ON_SUCCESS INVOKE_ON_ERROR INVOKE_AT_END INVOKE_ALL); our %permitted = map { eval($_) => 1 } @EXPORT_OK; push( @EXPORT_OK, '%permitted' ); our %EXPORT_TAGS = ( 'all' => [ @EXPORT_OK ] ); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); my $when = lc shift(); $self->{when} = $when; $self->{cmd} = shift; carp( "Invalid value '$when' for 'when' parameter in $class->new" ) unless exists $permitted{$when}; bless $self, $class; } # forward declaration (resolved by AUTOLOAD) sub when; sub cmd; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:invoke" : 'invoke'; $f->print( "$indent<$tag" , attribute('when',$self->{when},$xmlns) , ">" , quote($self->{cmd}) , "\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::Invoke - class to collect data for callback invocations. =head1 SYNOPSIS use Pegasus::DAX::Invoke qw(:all); my $i = Pegasus::DAX::Invoke->new( INVOKE_AT_END, '....' ); print "when is ", $i->when, "\n"; $i->cmd = '/bin/mailx -s foo a@b.c' print "command is '", $i->cmd, "'\n"; =head1 DESCRIPTION This class remembers a callback invocation. The callback is a command passed to the shell to be executed on the user's behalf whenever a job passes a certain event. The event states are available as C constants. =head1 CONSTANTS =over 4 =item INVOKE_NEVER Never run the invoke. This is primarily to temporarily disable an invoke. =item INVOKE_START Run the invoke when the job gets submitted. =item INVOKE_ON_SUCCESS Run the invoke after the job finishes with success (exitcode == 0). =item INVOKE_ON_ERROR Run the invoke after the job finishes with failure (exitcode != 0). =item INVOKE_AT_END Run the invoke after the job finishes, regardless of exit code. =item INVOKE_ALL Like C and C combined. =back =head1 METHODS =over 4 =item new( $when, $cmd ) The construct must be called with two arguments. The first argument is a string, according to the C constants. The second argument is the complete command-line to be executed on the user's behalf. =item when() This is the getter for the event specification. =item when( C ) This is the setter for the event specification. =item cmd() This is the getter for the command-line string. =item cmd( $cmd ) This is the setter for the complete command-line string. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 SEE ALSO =over 4 =item L Base class. =item L The abstract job class aggregates instances of this class to be called when the proper event is triggered. =item L The abstract DAX aggregates instances of this class to be called when the proper event is triggered on a workflow level. =item L The executable class aggregates instances of this class to be called when the proper event is triggered in a job that uses the executable. =item L The transformation class aggregates instances of this class to be called when the proper event is triggered in a job that uses the transformation. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/DAX/TUType.pm0000644000175000017500000000766111757531137023000 0ustar ryngerynge# # License: (atend) # $Id: TUType.pm 3598 2011-04-27 23:42:04Z voeckler $ # package Pegasus::DAX::TUType; use 5.006; use strict; use Carp; use Pegasus::DAX::Base qw(:xml); use Pegasus::DAX::PlainFilename; use Exporter; our @ISA = qw(Pegasus::DAX::PlainFilename Exporter); our $VERSION = '3.3'; our @EXPORT = (); our %EXPORT_TAGS = (); our @EXPORT_OK = (); # one AUTOLOAD to rule them all BEGIN { *AUTOLOAD = \&Pegasus::DAX::Base::AUTOLOAD } sub new { my $proto = shift; my $class = ref($proto) || $proto; my $self = $class->SUPER::new(); if ( @_ == 0 ) { # nothing to do } elsif ( @_ > 1 && (@_ & 1) == 0 ) { # called with a=>b,c=>d list %{$self} = ( %{$self}, @_ ); } elsif ( @_ == 1 && ref $_[0] && ( ref $_[0] eq 'HASH' || ref $_[0] eq __PACKAGE__ ) ) { # called with { a=>b, c=>d } hashref # or called as copy-c'tor (deep copy) %{$self} = ( %{$self}, %{ shift() } ); } else { croak "invalid c'tor invocation"; } bless $self, $class; } # forward declarations so can we check using 'can' #sub name; # inherited sub namespace; sub version; sub executable; sub toXML { # purpose: put self onto stream as XML # paramtr: F (IN): perl file handle open for writing # ident (IN): indentation level # xmlns (IN): namespace of element, if necessary # my $self = shift; my $f = shift; my $indent = shift || ''; my $xmlns = shift; my $tag = defined $xmlns && $xmlns ? "$xmlns:uses" : 'uses'; $f->print( "$indent<$tag", , attribute('namespace',$self->namespace,$xmlns) , attribute('name',$self->name,$xmlns) , attribute('version',$self->version,$xmlns) , attribute('executable',boolean($self->executable),$xmlns) , " />\n" ); } 1; __END__ =head1 NAME Pegasus::DAX::TUType - class for Transformation referenced entities. =head1 SYNOPSIS use Pegasus::DAX::TUType; my $i = Pegasus::DAX::TUType->new( name => 'filename.txt' ); $i->exectuable = 'false'; =head1 DESCRIPTION This class remembers a reference expressed in the C class. The reference becomes part of the transformation's C bundle. =head1 METHODS =over 4 =item new() =item new( a => b, c => d, ... ) =item new( { a => b, c => d, ... } ) The default constructor will create an empty instance whose scalar attributes can be adjusted using the getters and setters provided by the C inherited method. Other means to set attributes is to used named lists. =item name This setter and getter is inherited. =item namespace Setter and getter for a namespace string. =item version Setter and getter for a version string. =item executable Setter and getter for boolean values. Please use Perl truth. =item toXML( $handle, $indent, $xmlns ) The purpose of the C function is to recursively generate XML from the internal data structures. The first argument is a file handle open for writing. This is where the XML will be generated. The second argument is a string with the amount of white-space that should be used to indent elements for pretty printing. The third argument may not be defined. If defined, all element tags will be prefixed with this name space. =back =head1 SEE ALSO =over 4 =item Pegasus::DAX::PlainFilename Base class. =item Pegasus::DAX::Transformation Aggregating class. =back =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/Properties.pm0000644000175000017500000004421111757531137023316 0ustar ryngeryngepackage Pegasus::Properties; # # Provides parsing of Java property files from Perl. # # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: Jens-S. Vöckler voeckler at isi dot edu # Revision : $Revision: 3828 $ # $Id: Properties.pm 3828 2011-05-20 18:59:51Z voeckler $ # use 5.006; use strict; use warnings; use vars qw(%initial %system); require Exporter; our @ISA = qw(Exporter); # declarations of methods here. Use the commented body to unconfuse emacs sub pegasusrc(;$); # { } sub parse_properties($;\%); # { } # Items to export into callers namespace by default. Note: do not export # names by default without a very good reason. Use EXPORT_OK instead. # Do not simply export all your public functions/methods/constants. our $VERSION = '1.0'; $VERSION=$1 if ( '$Revision: 3828 $' =~ /Revision:\s+([0-9.]+)/o ); our $pegasus_env = 'pegasus.env.'; our $pegasus_len = length($pegasus_env); our @EXPORT_OK = qw($VERSION parse_properties pegasusrc %initial %system $pegasus_env); our %EXPORT_TAGS = ( all => [ @EXPORT_OK ] ); our @EXPORT = (); # Preloaded methods go here. use POSIX qw(uname); use Carp; use File::Spec; sub pegasusrc(;$) { # purpose: "static" method to determine location of pegasusrc # paramtr: $home (opt. IN): override home location # returns: a string # my $home = shift() || $ENV{HOME} || (getpwuid($>))[7] || File::Spec->curdir(); File::Spec->catfile( $home, '.pegasusrc' ); } sub parse_properties($;\%) { # purpose: "static" method to parse properties from a file. # paramtr: $fn (IN): is the filename of the property file to read # $hashref (IN): more properties for substitutions # warning: dies, if the $fn cannot be opened properly. # globals: %system (IN): more properties for substitutions # returns: a hash of properties, possibly empty. # my $fn = shift; my $hashref = shift || {}; # may be undef'd my %result = (); open( IN, "<$fn" ) || die "Warning: open $fn: $!\n"; print STDERR "# parsing properties in $fn...\n" if $main::DEBUG; my $save; while ( ) { next if /^[!\#]/; # comments are skipped s/[\r\n]*$//; # safe chomp s/\#(.*)$//; # NEW: chop in-line comments to EOLN s/\\(.)/$1/g; # replace escaped special characters #!=: s/^\s*//; # replace all starting whitespace s/\s*$//; # replace all trailing whitespace next unless length($_); # skip empty lines if ( /\\$/ ) { # continuation line chop ; $save .= $_; } else { # regular line $_ = $save . $_ if defined $save; undef $save; print STDERR "# Parsing: $_\n" if $main::DEBUG; if ( /([^:= \t]+)\s*[:=]?\s*(.*)/ ) { # new fix for auto gen properties my ($k,$v) = ($1,$2); # substitutions -- works arbitrarily deep? while ( $v =~ /(\$\{([A-Za-z0-9._]+)\})/g ) { my ($a,$b) = ($1,$2); my $newval = $hashref->{$b} || $system{$b} || $result{$b} || ''; substr($v,index($v,$a),length($a),$newval); } print STDERR "# Storing: $k => $v\n" if $main::DEBUG; # 20110519 (jsv): No key lower-casing requested by FS,KV $result{$k} = $v; } else { carp "Illegal content in $fn:$.\n"; } } } close(IN); %result; } BEGIN { # # Part 1: Assemble %system properties emulating some Java properties # %system = (); # start empty # assemble some default Java properties $system{'file.separator'} = File::Spec->catfile('',''); $system{'java.home'} = $ENV{'JAVA_HOME'} if exists $ENV{'JAVA_HOME'}; $system{'java.class.path'} = $ENV{CLASSPATH} if exists $ENV{CLASSPATH}; $system{'java.io.tmpdir'} = $ENV{TMP} || File::Spec->tmpdir(); # $system{'line.separator'} = "\n"; # Unix @system{'os.name','os.version','os.arch'} = (POSIX::uname())[0,2,4]; $system{'user.dir'} = File::Spec->curdir(); $system{'user.home'} = $ENV{HOME} || (getpwuid($>))[7]; $system{'user.language'} = $ENV{LANG} || 'en'; $system{'user.name'} = $ENV{USER} || $ENV{LOGNAME} || scalar getpwuid($>); $system{'user.timezone'} = $ENV{TZ}; # can be undef'd # not required, but useful $system{'pegasus.home'} = $ENV{'PEGASUS_HOME'}; # can be undef'd # # Part 2: Assemble commandline properties from initial -D argument # %initial = (); # start empty # Extracts -Dk=v properties from @ARGV before Getopt sees it # This will remove *only* the initial -D arguments from the CLI! if ( @ARGV > 0 ) { while ( defined $ARGV[0] && substr( $ARGV[0], 0, 2 ) eq '-D' ) { my $arg = shift(@ARGV); my ($k,$v) = split( /=/, ($arg eq '-D' ? shift(@ARGV) : substr($arg,2)), 2 ); # 20110519 (jsv): No key lower-casing requested by FS,KV #$k = lc $k; if ( $k eq 'pegasus.properties' || $k eq 'pegasus.user.properties' ) { carp "Warning: $k is no longer supported, ignoring, please use --conf\n"; } else { $initial{$k} = $v if length($k); } } } # CLI properties extend (and overwrite) system properties %system = ( %system, %initial ); } # # ctor # sub new { # purpose: Initialize an instance variable # paramtr: $conffile (IN): --conf filename (or undef) # $runprops (IN): properties from rundir (or undef) # warning: exceptions from parse_properties() may be propagated # returns: reference to blessed self # my $proto = shift; my $class = ref($proto) || $proto || __PACKAGE__; my $conffile = shift; my $rundirpfn = shift; my $pegasusrc = pegasusrc(); my %config = (); if ( defined $conffile ) { croak "FATAL: $conffile does not exist" unless -e $conffile; croak "FATAL: $conffile is not readable" unless -r _; if ( -s _ ) { print STDERR "# priority level 1: $conffile\n" if $main::DEBUG; %config = parse_properties($conffile); } else { carp "Warning: $conffile is empty, trying next"; goto LEVEL2; } } elsif ( defined $rundirpfn ) { LEVEL2: croak "FATAL: $rundirpfn does not exist" unless -e $rundirpfn; croak "FATAL: $rundirpfn is not readable" unless -r _; if ( -s _ ) { print STDERR "# priority level 2: $rundirpfn\n" if $main::DEBUG; %config = parse_properties($rundirpfn); } else { carp "Warning: $rundirpfn is empty, trying next priority"; goto LEVEL3; } } else { LEVEL3: # $HOME/.pegasusrc may safely not exist, no failures here if ( -s $pegasusrc ) { print STDERR "# priority level 3: $pegasusrc\n" if $main::DEBUG; %config = parse_properties($pegasusrc); } else { warn "Warning: No property files parsed whatsoever\n"; } } # create instance and return handle to self. # last one in chain below has highest priority. my $self = bless { m_config => { %config, %initial } }, $class; $self->setenv(); $self; } sub setenv { # purpoes: merge properties starting in $pegasus_key into %ENV # my $self = shift || croak; foreach my $k ( keys %{ $self->{'m_config'} } ) { $ENV{substr($k,$pegasus_len)}=$self->{'m_config'}{$k} if substr($k,0,$pegasus_len) eq $pegasus_env; } } sub reinit { # purpose: ensure that %initial has highest priority # my $self = shift; %{ $self->{'m_config'} } = ( %{ $self->{'m_config'} }, %initial ); } sub merge { # purpose: Read and merge a file into the current property set # paramtr: $fn (IN): where to read properties from. # warning: Properties from the file will overwrite existing ones. # If the instance has keys (a,b), and the file has (b,c) # the updated instance has keys (a,b,c) with b from file. # warning: use reinit() to give CLI properties precedence again. # warning: exceptions from parse_properties() will be propagated. # returns: hash of all new (merged) properties. # my $self = shift; my $where = shift || croak "need a filename"; # the new props from the file will merge with the existing properties, # where duplicate keys take precedence from the file. %{ $self->{'m_config'} } = ( %{ $self->{'m_config'} }, parse_properties($where) ); } sub property { # purpose: Accessor, simultaneous get (1arg) and set (2arg) method # paramtr: $key (IN): property name to access # $val (IN): if specified, the new value to set # returns: in get mode, the current value, # in set mode, the old value. my $self = shift; my $key = shift || croak "need a property key"; my $oldv = $self->{'m_config'}{$key}; $self->{'m_config'}{$key} = shift if ( @_ ); $oldv; } sub has { # purpose: Checks for the existence of a given property key # paramtr: $key (IN): property name to access # returns: true, if a property with this key exists # my $self = shift; my $key = shift || croak "need a property key"; exists $self->{'m_config'}{$key}; } sub all { # purpose: Return all known properties as simple hash # returns: hash # my $self = shift; %{ $self->{'m_config'} }; } sub keyset { # purpose: finds a subset of keys that matches a RE predicate # paramtr: $predicate (opt. IN): predicate to match against # returns: a set of keys that match a predicate, or all w/o predicate my $self = shift; my $predicate = shift; if ( defined $predicate ) { grep { /$predicate/ } keys %{ $self->{'m_config'} }; } else { keys %{ $self->{'m_config'} }; } } sub propertyset { # purpose: finds a subset of keys that matches a prefix # paramtr: $predicate (IN): predicate to match against # paramtr: $remove (IN): if true, remove prefix # returns: a hash containing the matching keys and respective values my $self = shift; my $prefix = shift || croak "need a prefix to match"; my $length = length($prefix); my $remove = shift; my %result = (); foreach my $key ( grep { substr($_,0,$length) eq $prefix } keys %{ $self->{'m_config'} } ) { my $newkey = $remove ? substr($key,$length) : $key; $result{$newkey} = $self->{'m_config'}->{$key} if ( length($newkey) > 0 ); } %result; } sub _quote($) { local $_ = shift; s{\015}{\\r}g; s{\011}{\\n}g; s{([:= \t\f])}{\\$1}g; "$_"; } sub dump { # purpose: prints the key set in property format # paramtr: $fn (opt. IN): Name of file to print into # returns: number of things printed, undef for error. local(*OUT); my $self = shift; my $fn = shift || '-'; # defaults to stdout my $count = 0; if ( open( OUT, ">$fn" ) ) { print OUT "# generated ", scalar localtime(), "\n"; foreach my $key ( sort keys %{ $self->{'m_config'} } ) { print OUT _quote($key), '=', _quote($self->{'m_config'}->{$key}), "\n"; } close OUT; } else { carp "open $fn: $!"; undef $count; } $count; } # # return 'true' to package loader # 1; __END__ =head1 NAME Pegasus::Properties - parsing of Java property files from Perl. =head1 SYNOPSIS use Pegasus::Properties qw(:parse); $p = Pegasus::Properties->new( $conffile, undef ); $p->merge( $fn ); $p->reinit(); $p->dump('-'); # dump all known properties on stdout something() if $p->property('pegasus.db'); $p->property('pegasus.tc.file') = "/some/where"; foreach my $key ( $p->keyset('^pegasus\.rc') ) { ... } %x = $p->propertyset('pegasus.rc.'); do( $p->property('asdf') ) if $p->has('asdf'); =head1 DESCRIPTION The Pegasus::Properties module reads Java properties for the GriPhyN Virtual Data System. It permits commandline-based overwrites of properties using Java's C<-Dprop=val> syntax in Perl by removing initial definitions from C<@ARGV> during module initialization time. Thus, it is recommended to use this module before parsing commandline arguments. Up to three property files from the GriPhyN Virtual Data System are read from the constructor, please refer to the L method. All property keys are lower cased when read as a safety precaution. =head1 VARIABLES Variables are not exported by default. They must be explicitely imported when importing this module. =over 4 =item %initial This variable is initialed during module initialization. It parses the commandline vector C<@ARGV> for initial arguments starting with hyphen capital D like the following: perl myprog.pl -Dk1=v1 -Dk2=v2 ... Such definitions are removed from C<@ARGV>, and the definitions placed into the initial variable. If your application uses capital-D as a valid argument switch, you can still use it, alas never as the first argument. Only property-like definitions that are initial on the commandline will be removed and put into this variable. Commandline properties have the highest priority of all properties. You should not write to this variable. =item %system This variable is initialized to mimick some Java system properties. However, only a smaller subset is provided. These system properties have the lowest priority. You should not write to this variable. Properties from C<%initial> are merged with a higher priority into the system properties, permitting command-line option to overwrite system properties. =back =head1 STATIC METHODS =over 4 =item Pegasus::Properties::parse_properties( $fn ) =item Pegasus::Properties::parse_properties( $fn, $hashref ) The static method reads a property file, located by $fn, into a single-level Perl hash. If the optional second argument is specified, the hash will be used to do variable substitutions from the the second argument properties or system properties. Not found properties are replaced by an empty string. Please note that the method throws an error, if the file does not exist or cannot be opened properly. It is up to the caller to catch this exception. =item Pegasus::Properties::pegasusrc( ) =item Pegasus::Properties::pegasusrc( $home ) This simple static method constructs a filename where to find the C<$HOME/.pegasusrc> file. The location of the home directory can be passed as optional argument, or auto-detected otherwise. =back =head1 INSTANCE METHODS =over 4 =item new( $conffile, $rundirpropfn ) The constructor needs to know about the possible I command-line option file location, and the property file in the designated run directory. Either argument may be C to indicate that it does not exist. Internally the constructor uses the location of the C<$HOME/.pegasusrc> file, which is automatically constructed. The constructor attempts to read from the defined file with the highest priority first. If the file does not exist or is not readable, it will throw an exception. If the file is empty (0 byte sized), it will warn and attempt to read the next lower priority (etc.). Values from the C<%initial> hash are merged into the instance with the highest priority. =item merge( $fn ) The C method permits you to easily add properties from a file to the current instance. The new properties from the file take a higher priority than the existing one, in case keys exist in both. Typically, you want to follow C with C to give command-line properties precedence. =item reinit( ) Will ensure that properties from C<%initial> are merged back into the instance, overwriting any existing properties with the same key. =item property( $key ) If used as r-value, the property setting of the specified key is obtained. If the property does not exist, the value of C is returned. If used as l-value, as in an assignment, the property of the specified value will be set. The old value previously know is the result of the method. The emulated system properties will not be considered. =item has( $key ) This method checks for the existence of a given key in the properties. Unlike the C method, it will not auto-vivify any key. =item keyset( $predicate ) Given a regular expression predicate, this method returns all keys that match the predicate. Please note that it is recommended to match prefixes by anchoring the expression with an initial roof (C<^>) character, and that you must backslash-escape the period character that is literal part of most properties. foreach my $key ( $p->keyset('^pegasus\.db\.') ) { xxx( $p->property($key) ); } The above code snippet will only find properties matching the prefix of C, but not C itself. If invoked without argument, this method will return all keys. The emulated system properties will not be considered. =item propertyset( $prefix, $truncate ) Given a normal string prefix, this method returns a hash starting with the prefix string. This is not a regular expression match, just plain string prefix matching. I f the optional argument $truncate is specified and true, the prefix string will be removed from the keys in the result set. %x = $p->propertyset('pegasus.db.'); foreach my $key ( sort keys %x ) { xxx( $x{$key} ); } =item dump( $filename ) This is mostly a debug function. It dumps all properties except the artificial system properties into the specified file. For convenience, you can use the hyphen C<-> for I. =back =head1 SEE ALSO L =head1 AUTHOR Jens-S. VEckler, C Gaurang Mehta, C =head1 COPYRIGHT AND LICENSE Copyright 2007-2011 University Of Southern California Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at L Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/Common.pm0000644000175000017500000002531611757531137022417 0ustar ryngerynge# # Provides common sub-functions shared by all workflow programs. # # This file or a portion of this file is licensed under the terms of # the Globus Toolkit Public License, found in file GTPL, or at # http://www.globus.org/toolkit/download/license.html. This notice must # appear in redistributions of this file, with or without modification. # # Redistributions of this Software, with or without modification, must # reproduce the GTPL in: (1) the Software, or (2) the Documentation or # some other similar material which is provided with the Software (if # any). # # Copyright 1999-2004 University of Chicago and The University of # Southern California. All rights reserved. # # Author: Jens-S. Vöckler voeckler@cs.uchicago.edu # Revision : $Revision: 3876 $ # $Id: Common.pm 3876 2011-05-27 22:35:53Z voeckler $ # package Pegasus::Common; use 5.006; use strict; use warnings; use File::Basename qw(basename dirname); require Exporter; our @ISA = qw(Exporter); # declarations of methods here. Use the commented body to unconfuse emacs sub isodate(;$$$); # { } sub isomsdate(;$$$); # { } sub find_exec($;@); # { } sub pipe_out_cmd; # { } sub parse_exit(;$); # { } sub slurp_braindb($); # { } sub version(); # { } sub check_rescue($$); # { } sub log10($); # { } our $jobbase = 'jobstate.log'; # basename of the job state logfile our $brainbase = 'braindump.txt'; # basename of brain dump file # Items to export into callers namespace by default. Note: do not export # names by default without a very good reason. Use EXPORT_OK instead. # Do not simply export all your public functions/methods/constants. our $VERSION = '0.1'; our @EXPORT_OK = qw($VERSION $brainbase $jobbase); our @EXPORT = qw(isodate isomsdate find_exec pipe_out_cmd parse_exit slurp_braindb version check_rescue log10); our %EXPORT_TAGS = ( all => [ @EXPORT ] ); # Preloaded methods go here. use POSIX qw(strftime); use File::Spec; use Carp; BEGIN { # non-fatally attempt to load semi-standard Time::HiRes module eval { require Time::HiRes; import Time::HiRes qw(time); }; } sub isodate(;$$$) { # purpose: convert seconds since epoch into ISO timestamp # paramtr: $seconds (opt. IN): seconds since epoch, now is default # $utc (opt. IN): if true, use a UTC timestamp # $short (opt. IN): if true, use extra short format # warning: UTC short format omits the center 'T' separator # returns: string of ISO timestamp my $now = shift || time(); my $utc = shift; my $short = shift; my $result; if ( $short ) { $result = $utc ? strftime( "%Y%m%d%H%M%SZ", gmtime($now) ) : strftime( "%Y%m%dT%H%M%S%z", localtime($now) ); } else { $result = $utc ? strftime( "%Y-%m-%dT%H:%M:%SZ", gmtime($now) ) : strftime( "%Y-%m-%dT%H:%M:%S%z", localtime($now) ); } $result; } sub isomsdate(;$$$) { # purpose: see isodate, but with millisecond extension # returns: formatted ISO 8601 time stamp # my $now = shift || time(); my $utc = shift; my $short = shift; my $result = isodate($now,$utc,$short); my $s = substr( sprintf( "%.3f", $now-int($now) ), 1 ); substr( $result, ( $utc ? -1 : -5 ), 0, $s ); $result; } sub find_exec($;@) { # purpose: determine location of given binary in $PATH # paramtr: $program (IN): executable basename to look for # @extra (opt. IN): additional directories to search # returns: fully qualified path to binary, undef if not found my $program = shift; foreach my $dir ( ( File::Spec->path, @_ ) ) { my $fs = File::Spec->catfile( $dir, $program ); return $fs if -x $fs; } undef; } sub pipe_out_cmd { # purpose: Runs a cmd w/o invoking a shell, and captures stdout+stderr # warning: DO NOT use shell meta characters in the argument string. # paramtr: @arg (IN): argument string, executable first # returns: failed: undef # scalar: first line of output # vector: all lines of output local(*READ); # must use type glob and local for FDs my $pid = open( READ, '-|' ); return undef unless defined $pid; my @result; if ( $pid ) { # parent while ( ) { s/[\r\n]+$//; push( @result, $_ ); } close READ; } else { # child open( STDERR, '>&STDOUT'); select(STDERR); $|=1; select(STDOUT); $|=1; exec { $_[0] } @_; # lotsa magic :-) exit 127; # no such exe :-( } wantarray ? @result : $result[0]; } sub slurp_braindb($) { # purpose: Read extra configuration from braindump database # paramtr: $run (IN): $run directory # returns: a hash with the configuration, empty on error. my $run = shift; my $braindb = File::Spec->catfile( $run, $brainbase ); my %config = (); if ( open( DB, "<$braindb" ) ) { while ( ) { s/[\r\n]*$//; my ($k,$v) = split /\s/, $_, 2; if ( $k eq 'run' && $v ne $run && $run ne '.' ) { warn "Warning: run directory mismatch, using $run\n"; $config{$k} = $run; next; } $v =~ s/^\s*//; $v =~ s/\s*$//; $config{$k} = $v; } close DB; print STDERR "# slurped $braindb\n" if $main::DEBUG; } wantarray ? %config : croak "wrong context"; } sub version() { #obtain pegasus version my $version = `pegasus-version`; chomp($version); $version; } sub parse_exit(;$) { # purpose: parse an exit code any way possible # paramtr: $ec (IN): exit code from $? # returns: string that shows what went wrong my $ec = shift; $ec=$? unless defined $ec; my $result; if ( ($ec & 127) > 0 ) { my $signo = ($ec & 127); my $core = ( ($ec & 128) == 128 ? ' (core)' : '' ); $result = "died on signal $signo$core"; } elsif ( ($ec >> 8) > 0 ) { $result = "exit code @{[$ec >> 8]}"; } else { $result = "OK"; } $result; } sub check_rescue($$) { # purpose: Check for the existence of (multiple levels of) rescue DAGs. # paramtr: $dir (IN): directory to check for the presence of rescue DAGs. # $dag (IN): filename of regular DAG file. # returns: List of rescue DAGs, may be empty, if none found my $dir = shift || croak "Need a directory to check"; my $dag = shift || croak "Need a dag filename"; my $base = basename($dag); my @result = (); local(*DIR); if ( opendir( DIR, $dir ) ) { while ( defined ($_ = readdir(DIR)) ) { next unless /^$base/o; # only pegasus-planned DAGs next unless /\.rescue$/; # that have a rescue DAG. push( @result, File::Spec->catfile( $dir, $_ ) ); } @result = sort @result; closedir DIR; } wantarray ? @result : $result[$#result]; } sub log10($) { # purpose: Simpler than ceil(log($x) / log(10)) # paramtr: $x (IN): non-negative number # returns: approximate width of number use integer; my $x = shift; my $result = 0; while ( $x > 1 ) { $result++; $x /= 10; } $result || 1; } # must 1; __END__ =head1 NAME Pegasus::Common - generally useful collection of methods. =head1 SYNOPSIS use Pegasus::Common; $now = isodate(); $when = isodate( $then ); $zulu = isodate( time(), 1 ); $short = isodate( $then, 0, 1 ); $millis = isomsdate(); $version = version(); my $app = find_exec( 'ls' ); my $gpi = find_exec( 'grid-proxy-info', File::Spec->catdir( $ENV{'GLOBUS_LOCATION'}, 'bin' ) ); my @result = pipe_out_cmd( $app, '-lart' ); warn "# ", parse_exit($?), "\n"; my %x = slurp_braindb('rundir'); =head1 DESCRIPTION This modules collects a few generally useful tools for miscellaneous Perl work. =head1 FUNCTIONS =over 4 =item isodate(); =item isodate($when); =item isodate($when,$zuluflag); =item isodate($when,$zuluflag,$shortflag); The C function has various formatting options, permitting arbitrary time stamps, the choice between local and zulu (UTC) time, and the choice between a regular and an extra concise output format. It does not use millisecond extensions (yet). =item isomsdate(); =item isomsdate($whenms); =item isomsdate($whenms,$zuluflag); =item isomsdate($whenms,$zuluflag,$shortflag); The C function works like the C function. The difference is the milliseconds extension in the time stamp. In order to properly use the millisecond extension, and not have C<.000> appear, you need to import the L module. =item find_exec( $basename ); =item find_exec( $basename, @extra ); The C function searches the C environment variable for the existence of the given base name. Please only use a base name for the first argument. If you need to search additional directories outside your C directories, add as many as you need as additional optional arguments. =item pipe_out_cmd( @argv ); This is the simple version of the well-known C efficient replacement for the C function. The first and only mandatory entry in the argument vector is the fully-qualified path to an executable. This version does neither provide a I override, nor a time out mechanism. It should preferably used with non-blocking applications. Please refer to the C<$?> variable after execution. =item $x = parse_exit( $status ); The C function parses the C<$?> exit code from a program, and provides a concise string describing the exit scenario. There are generally three possibilities. If the exit code indicated a signal, the signal number and possibly core file is retunred as string. If the exit code was not 0, the the exit condition is returned. The remaining case, C<$?> was 0, "OK" is returned. =item %db = slurp_braindb($rundir); The C function reads the contents of the file C in the specified run directory. This is a workflow helper function of less general applicability. =item %ver = version(); The C function runs the C command and returns the version of Pegasus being used. =back =head1 SEE ALSO L =head1 AUTHOR Jens-S. VEckler, C Gaurang Mehta, C =head1 COPYRIGHT AND LICENSE This file or a portion of this file is licensed under the terms of the Globus Toolkit Public License, found in file GTPL, or at http://www.globus.org/toolkit/download/license.html. This notice must appear in redistributions of this file, with or without modification. Redistributions of this Software, with or without modification, must reproduce the GTPL in: (1) the Software, or (2) the Documentation or some other similar material which is provided with the Software (if any). Copyright 1999-2004 University of Chicago and The University of Southern California. All rights reserved. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/Catalog/0000755000175000017500000000000011757531667022204 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/Catalog/Site/0000755000175000017500000000000011757531667023110 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Pegasus/Catalog/Site/TG.pm0000755000175000017500000002520411757531137023756 0ustar ryngeryngepackage Pegasus::Catalog::Site::TG; # Copyright 2007 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Author: Gaurang Mehta gmehta at isi dot edu # Revision : $Revision$ # Id : $Id$ # use strict; use File::Temp qw/ tempfile tempdir/; use Socket; use XML::Parser; use vars qw($VERSION $AUTOLOAD); use Exporter; our @ISA = qw(Exporter); our @EXPORT = qw($VERSION convert_siteinfo get_sites); sub convert_siteinfo(%); sub get_sites($$); #our @EXPORT_OK = qw($VERSION convert_siteinfo_to_vds get_site_information get_sites_in_grid); our @EXPORT_OK = qw(); $VERSION=$1 if ( '$Revision: 50 $' =~ /Revision:\s+([0-9.]+)/o ); my ( @stack, %sites ); my ( $siteid, $resourceid, $resourcename, $kit, $support, $service, $endpoint, $serviceversion, $servicename ); ################### # %sites data layout #kits are of type "SCHEDD", "DATA", "FILESYSTEM" #service are of type pre-wsgram , ws-gram # # %sites={$site_$support=> { $kit => { $service => { Name=, Type=>, Endpoint=, Version=} # } # } # } ### my %kits = ( 'remote-compute.teragrid.org' => 'SCHEDD', 'data-movement.teragrid.org' => 'DATA', 'wan-gpfs.teragrid.org' => 'FILESYSTEM' ); # hashmap that maps vds-get-sites kerys to keys in VORS namespace my %sitetacalogkeys = ( NAME => 'name', appdir => 'app_loc', datadir => 'data_loc', grid3dir => 'osg_grid', #osg_grid is now populated wntmpdir => 'wntmp_loc', tmpdir => 'tmp_loc', CS_GATEKEEPER => 'cs_gatekeeper_hostname', CS_GKPORT => 'cs_gatekeeper_port', CS_GRIDFTP => 'cs_gsiftp_hostname', CS_GRIDFTP_PORT => 'cs_gsiftp_port', SS_GATEKEEPER => 'ss_gatekeeper_hostname', SS_GKPORT => 'ss_gatekeeper_port', SS_GRIDFTP => 'ss_gsiftp_hostname', SS_GRIDFTP_PORT => 'ss_gsiftp_port', VO => 'vo', VERSION => 'vdt_version' ); sub convert_siteinfo(%){ #purpose: an adapter function to translate site information from # webmds to sitecatalog format. #paramtr: %sites information about sites in the WEBMDS format. # #returns: a hash of hash containing information about the site in sitecatalog format. # my %sites = @_; my %sitecatalog; #stores information in sitecatalog format for my $id ( keys %sites ) { print "Site=$id \n"; for my $kit ( keys %{ $sites{$id} } ) { print " KIT=$kit \n"; for my $support ( keys %{ $sites{$id}->{$kit} } ) { if($support eq "production"){ print " SUPPORT=$support \n"; for my $service ( keys %{ $sites{$id}->{$kit}->{$support} } ) { print " SERVICE=$service \n"; my %entries = %{$sites{$id}->{$kit}->{$support}->{$service}}; print " $entries{ENDPOINT}\n"; print " $entries{RESOURCE}\n"; print " $entries{NAME}\n"; if($service eq "prews-gram"){ my $siteid = $entries{RESOURCE}; $siteid =~ s/ /_/g; $siteid .= "_$SUPPORT"; $sites{$siteid}->{name} = $siteid; $sites{$siteid}->{cs_gatekeeper_host}=$entries{ENDPOINT}; } } } } } foreach my $key (keys %keys_vds_to_vors_adapter ){ $vds_site{$key} = $vors_site{ $keys_vds_to_vors_adapter{$key}}; } #set grid3dir to appdir if undefined in VORS if (!defined($vds_site{'grid3dir'}) ){ warn "# substituting OSG_GRID value to $vds_site{'appdir'} for site $vds_site{'name'} \n"; $vds_site{'grid3dir'} = $vds_site{'appdir'} } #set the type of jobmanagers #Note we losing information here, as transfer #is being implicitly set to fork my $jm_types = $vors_site{'exec_jm'}; $jm_types =~ s/^\S+\/\S+-//; $jm_types = 'fork'.','.$jm_types; $vds_site{'jobmanagers'} = $jm_types; #some other values populated by default till #we figure out how to get them $vds_site{'vo'} = 'ivdgl' if !defined($vds_site{'vo'}); $vds_site{'num_cpus'} = '50'; $vds_site{'ncpus'} = '50'; return %vds_site; } sub get_sites($$) { #purpose: query a WEBMDS server, and information # for all sites. #paramtr: $host the host where the WEBMDS server is running # $port the port on the host. #returns: Returns a deep hash structure see above. my ( $host, $port ) = @_; #construct the HTTP get request my $http_get_req = "GET /webmds/webmds?info=tgislocal\015\012"; # print "host = $host:$port, get string = $http_get_req\n"; socket( SOCK, PF_INET, SOCK_STREAM, getprotobyname('tcp') ) || die "ERROR creating socket: $!"; connect( SOCK, sockaddr_in( $port, inet_aton($host) ) ) || die "ERROR Connecting to $host:$port : $!"; send SOCK, $http_get_req, 0; # store the webmds info in a temp file my ( $fh, $filename ) = tempfile("webmds.XXXXXX"); while () { print $fh $_; } close(SOCK); close($fh); parse_xml($filename); return %sites; } sub parse_xml($) { #purpose: Parse WEBMDS XML #in: $filename XML file to parse my $fn = shift; open( my $fh, "<$fn" ) || die "Cannot open file $fn !@ \n"; my $parser = new XML::Parser( 'ErrorContext' => 2 ); $parser->setHandlers( 'XMLDecl' => \&ignore_handler, 'Start' => \&start_handler, 'End' => \&end_handler, 'Char' => \&char_handler, 'Comment' => \&ignore_handler, 'Default' => \&default_handler ); $parser->parse($fh); close($fh); } sub ignore_handler{ #ignore Comments and XML header for now } sub start_handler { my $self = shift; my $element = shift; # name of element # my %attr = @_; # attributes # push( @stack, $element ); if ( @stack == 0 ) { push( @stack, $element ) if ( $element eq 'ns0:IndexRP' ); } elsif ( @stack == 1 && $self->current_element eq 'ns0:IndexRP' ) { push( @stack, $element ) if ( $element eq 'ns1:V4KitsRP' ); } elsif ( @stack == 2 && $self->current_element eq 'ns1:V4KitsRP' ) { push( @stack, $element ) if ( $element eq 'V4KitsRP' ); } elsif ( @stack == 3 && $self->current_element eq 'V4KitsRP' ) { push( @stack, $element ) if ( $element eq 'KitRegistration' ); } elsif ( @stack == 4 && $self->current_element eq 'KitRegistration' ) { push( @stack, $element ) if ( $element eq 'ResourceName' || $element eq 'ResourceID' ); if ( $element eq 'Kit' ) { push( @stack, $element ); } } elsif ( @stack == 5 && $self->current_element eq 'Kit' ) { push( @stack, $element ) if ( $element eq 'SupportLevel' || $element eq 'Name' ); if ( $element eq 'Service' ) { push( @stack, $element ); } } elsif ( @stack == 6 && $self->current_element eq 'Service' ) { push( @stack, $element ) if ( $element eq 'Type' || $element eq 'Version' || $element eq 'Endpoint' || $element eq 'Name' ); } 1; } sub end_handler { my $self = shift; my $el = shift; # if($elements{$el}){ print "END STACK = " . scalar(@stack) . " el=$el\n"; # print "Stack = @stack\n" } pop(@stack) if ( ( @stack == 1 && $el eq 'ns0:IndexRP' ) || ( @stack == 2 && $el eq 'ns1:V4KitsRP' ) || ( @stack == 3 && $el eq 'V4KitsRP' ) || ( @stack == 6 && ( $el eq 'SupportLevel' || $el eq 'Name' ) ) || ( @stack == 5 && ( $el eq 'ResourceName' || $el eq 'ResourceID' ) ) || ( @stack == 7 && ( $el eq 'Name' || $el eq 'Type' || $el eq 'Version' || $el eq 'Endpoint' ) ) ); if ( @stack == 4 && $el eq 'KitRegistration' ) { undef $resourceid; undef $resourcename; pop(@stack); } if ( @stack == 5 && $el eq 'Kit' ) { undef $kit; undef $support; pop(@stack); } if ( @stack == 6 && $el eq 'Service' ) { if ( ( $kit eq 'remote-compute.teragrid.org' && ( $service eq 'prews-gram' || $service eq 'ws-gram' ) ) || ( $kit eq 'data-movement.teragrid.org' && $service eq 'gridftp' ) || ( $kit eq 'wan-gpfs.teragrid.org' && $service eq 'gpfs' ) ) { $sites{$resourceid}->{ $kits{$kit} }->{$support}->{$service} ->{'NAME'} = $servicename; $sites{$resourceid}->{ $kits{$kit} }->{$support}->{$service} ->{'VERSION'} = $serviceversion; $sites{$resourceid}->{ $kits{$kit} }->{$support}->{$service} ->{'ENDPOINT'} = $endpoint; $sites{$resourceid}->{ $kits{$kit} }->{$support}->{$service} ->{'RESOURCE'} = $resourcename; } undef $service; undef $servicename; undef $serviceversion; undef $endpoint; pop(@stack); } } sub char_handler { my $self = shift; my $text = shift; if ( $text =~ /^\s+$/ ) { # ignore } else { my $i = @stack; my $el = $stack[ $i - 1 ]; if ( length($text) ) { if ( $i == 5 ) { if ( $el eq 'ResourceName' ) { $resourcename .= $text; } elsif ( $el eq 'ResourceID' ) { $resourceid .= $text; } } elsif ( $i == 6 ) { if ( $el eq 'SupportLevel' ) { $support .= $text; } elsif ( $el eq 'Name' ) { $kit .= $text; } } elsif ( $i == 7 ) { if ( $el eq 'Type' ) { $service .= $text; } elsif ( $el eq 'Version' ) { $serviceversion .= $text; } elsif ( $el eq 'Endpoint' ) { $endpoint .= $text; } elsif ( $el eq 'Name' ) { $servicename .= $text; } } } } 1; } sub default_handler { my $self = shift; my $text = shift; if ( $text =~ /^\s*$/ ) { # ignore } else { print "unknown xml \"$text\", ignoring\n"; } 1; } # # return 'true' to package loader # 1; __END__ #for my $site ( keys %sites ) { # print "Site=$site \n"; # for my $kit ( keys %{ $sites{$site} } ) { # print " KIT=$kit \n"; # for my $support ( keys %{ $sites{$site}->{$kit} } ) { # print " SUPPORT=$support \n"; # for my $service ( keys %{ $sites{$site}->{$kit}->{$support} } ) { # print " SERVICE=$service \n"; # my %entries = # print " $entries{ENDPOINT}\n"; # print " $entries{RESOURCE}\n"; # print " $entries{NAME}\n"; # # } # } # } # print "\n\n"; #} pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Site/0000755000175000017500000000000011757531667020127 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Site/Selector.pm0000755000175000017500000002567311757531137022255 0ustar ryngerynge# # Basic class to parse and provide site-selector data # # This file or a portion of this file is licensed under the terms of # the Globus Toolkit Public License, found in file GTPL, or at # http://www.globus.org/toolkit/download/license.html. This notice must # appear in redistributions of this file, with or without modification. # # Redistributions of this Software, with or without modification, must # reproduce the GTPL in: (1) the Software, or (2) the Documentation or # some other similar material which is provided with the Software (if # any). # # Copyright 1999-2004 University of Chicago and The University of # Southern California. All rights reserved. # # Author: Jens-S. Vckler voeckler@cs.uchicago.edu # Revision: $Revision: 3616 $ # package Site::Selector; use 5.006; use strict; use warnings; require Exporter; our @ISA = qw(Exporter); # declare prototypes sub parse_file_v2($); # { } # which keys may occur multiple times? our %multikeys = ( 'resource.id' => 1, 'input.lfn' => 1, 'resource.bad' => 1 ); # Items to export into callers namespace by default. Note: do not export # names by default without a very good reason. Use EXPORT_OK instead. # Do not simply export all your public functions/methods/constants. our $VERSION='1.0'; $VERSION=$1 if ( '$Revision: 3616 $' =~ /Revision:\s+([0-9.]+)/o ); our %EXPORT_TAGS = (); our @EXPORT_OK = qw($VERSION %multikeys parse_file_v2); our @EXPORT = qw(); # # --- methods --------------------------------------------------- # use Carp; sub parse_file_v2($) { # purpose: static method to parse a site selector file # paramtr: $fn (IN): filename of communication file # returns: [0]: hash ref to key-value-pairs # [1]: hash ref to site-griftp array # returns undef for both refs in case of error, check $! my $fn = shift; my (%hash,%site,%bad); local(*IN); if ( open( IN, "<$fn" ) ) { # expect the first line to say "version=2" die "Wrong format version: \"$_\"\n" unless ( ($_ = ) =~ /^version=2/ ); $hash{version} = 2; my ($key,$value); while ( ) { # trim and chomp s/[ \t\r\n]+$//; next if length($_) < 1; # split line at first equal sign ($key,$value) = split /=/, $_, 2; # check of keys of multiple occurrances if ( exists $multikeys{$key} ) { # store vector reference for multikeys push( @{$hash{$key}}, $value ); # compile site catalog if ( $key eq 'resource.id' ) { ($key,$value) = split /\s+/, $value, 2; push( @{$site{$key}}, $value ); } elsif ( $key eq 'resource.bad' ) { ($key,$value) = split /\s+/, $value, 2; $bad{$key} = $value; } } else { # store scalar for singular keys $hash{$key} = $value; } } close IN; } # done (\%hash,\%site,\%bad); } # # ctor # sub new { # purpose: Initialize an instance variable # paramtr: $filename (IN): path to site-selector file # returns: reference to instance, or undef in case of failure my $proto = shift; my $class = ref($proto) || $proto || __PACKAGE__; my $filename = shift || croak "c'tor requires a filename argument"; # slurp file my ($hash,$site,$bad) = parse_file_v2($filename); return undef unless ( scalar %{$hash} ); # return the perl way bless { 'm_filename' => $filename, 'm_hash' => $hash, 'm_site' => $site, 'm_bad' => $bad }, $class; } sub filename { # purpose: returns the name of the communication file my $self = shift; $self->{'m_filename'}; } sub site { # purpose: returns site knowledge # paramtr: $site (opt. IN): site handle # returns: case [no $site]: list of all site handles # case [$site]: in array context, all gridftp server, # in scalar context, first gridftp server. my $self = shift; my $site = shift; if ( defined $site ) { wantarray ? @{$self->{'m_site'}->{$site}} : $self->{'m_site'}->{$site}->[0]; } else { keys %{$self->{'m_site'}}; } } sub bad { # purpose: manages bad site knowledge, which may be empty # paramtr: $site (opt. IN): site handle # returns: case [no $site]: list of all bad handles, oldest first # case [$site]: UTC timestamp of site being detected bad my $self = shift; my $site = shift; my $x = $self->{'m_bad'}; return undef unless ( defined $x && scalar %{$x} ); if ( defined $site ) { $x->{$site}; } else { sort { $x->{$b} <=> $x->{$a} } keys %{$x}; } } sub hash { # purpose: manages hash knowledge # paramtr: $key (opt. IN): key to read # returns: case [no $key]: return list of all keys # case [$key NOTA %multikeys]: return scalar value # case [$key IN %multikeys]: in scalar context first element, # in array context whole array my $self = shift; my $key = shift; if ( defined $key ) { if ( $multikeys{$key} ) { wantarray ? @{$self->{'m_hash'}->{$key}} : $self->{'m_hash'}->{$key}->[0]; } else { $self->{'m_hash'}->{$key}; } } else { keys %{$self->{'m_hash'}}; } } # # return 'true' to package loader # 1; __END__ =head1 NAME Site::Selector - provides attributes of a site-selection communication file. =head1 SYNOPSIS use Site::Selector; my $s = Site::Selector->new( 'file.kss' ) || die; print $s->filename, "\n"; # prints file.kss foreach my $site ( $s->site ) { work( $s->site($site) ); } foreach my $key ( $s->hash ) { print $key, '=', $s->hash($key), "\n"; } =head1 DESCRIPTION The Site::Selector module provides a simple interface work with the Eurale and Pegasus site-selection mechanism. The mechanism provides selection information about the sites, files, and the job in a (temporary) file, which is passed as first and only argument to the site selector. In return, the site selectors expect one line on I of the site selector reading SOLUTION:handle or SOLUTION:handle:more So don't write anything onto I yourself. The Site::Selector module parses a given temporary file, which is passed as mandatory argument to the constructor. The produced instance provides simplified access to the arguments inside the file. =head1 VARIABLES Class variables are not exported by default. They must be explicitely imported when importing this module. =over 4 =item %multikeys is a hash that enumerates all those keys which may occur multiple times in the temporary file. Values from these keys will always be stored internally inside a hash. =back =head1 METHODS =over 4 =item Site::Selector::parse_file_v2($filename) This static function reads a temporary file, and on success returns a vector with hash references. The first hash reference contains all keys from the temporary file. The second hash reference maps site handles from the C key to a vector of gridftp server URIs. The third hash reference may be unused. It maps bad sites to the UTC timestamp when they were (last) detected being bad. In case of error, undef is returned. The C function is employed, if the file is of the wrong version format. The static method is usually invoked by the constructor, and should not be used directly. =item new( $filename ) The constructor reads the temporary file passed as F<$filename>, and compiles the internal data structures. On success, a blessed instance is returned. On error, undef is returned. =item filename is a simple accessor, returning the filename that was passed to the constructor. =item site invoked without arguments, this method returns a list of all site handles. A site handle is a unique identifier for a remote compute resource. =item site( $handle ) invoked with a single argument, the result depends on the caller's context. In array context, a list of all gridftp-URIs is returned. In scalar context, only the first (possibly empty) gridftp-URI is returned. =item bad invoked without arguments, this method returns a list of all known site handles that were marked bad. The list is ordered with the oldest first. A site handle is a unique identifier for a remote compute resource. If the features is not supported, the C value is returned. =item bad( $handle ) invoked with a single argument, the result is the UTC timestamp when the site was detected being bad. However, if no such site exists, the result is undefined. =item hash invoked without arguments, this method returns a list of all keys available from the temporary file data structures. =item hash( $key ) invoked with single scalar, the result depends on multiple factors: If the key is not a part of the C<%multikeys> structure, a scalar of the value for the key is returned. If the key is in C<%multikeys>, in array context, the full array of values accumulated is returned. In scalar context, only the first entry is returned. =back =head1 EXAMPLE The following shows an example for a transient file that is passed between a concrete planner and the site selector module: version=2.0 transformation=air::alignwarp derivation=air3::i1129_3.anon000001 job.id=ID000001 wf.manager=dagman wf.label=air25-0 wf.stamp=20040713215939Z vo.name=ivdgl vo.group=ivdgl1 resource.id=UM_ATLAS gsiftp://atgrid.grid.umich.edu/ resource.id=Rice_Grid3 gsiftp://bonner-pcs11.rice.edu/ resource.id=CalTech_Grid3 gsiftp://citgrid3.cacr.caltech.edu/ resource.id=ATLAS_SMU gsiftp://mcfarm.physics.smu.edu/ resource.id=UBuffalo_CCR gsiftp://acdc.ccr.buffalo.edu/ resource.id=KNU gsiftp://cluster28.knu.ac.kr/ resource.id=UC_ATLAS_Tier2 gsiftp://tier2-01.uchicago.edu/ resource.id=UTA_DPCC gsiftp://atlas.dpcc.uta.edu/ resource.id=BNL_ATLAS gsiftp://spider.usatlas.bnl.gov/ resource.id=IU_ATLAS_Tier2 gsiftp://atlas.iu.edu/ resource.id=PDSF gsiftp://pdsfgrid3.nersc.gov/ resource.id=PDSF gsiftp://pdsfgrid2.nersc.gov/ resource.id=PDSF gsiftp://pdsfgrid1.nersc.gov/ resource.id=UNM_HPC gsiftp://lldimu.alliance.unm.edu/ input.lfn=fmri.3472-3_anonymized.img input.lfn=fmri.1129-3_anonymized.hdr input.lfn=fmri.1129-3_anonymized.img input.lfn=fmri.3472-3_anonymized.hdr More keys may be added at the leisure of the planners. Site selectors should ignore keys they don't understand, but not warn. =head1 SEE ALSO L =head1 AUTHOR Jens-S. VEckler, C =head1 COPYRIGHT AND LICENSE This file or a portion of this file is licensed under the terms of the Globus Toolkit Public License, found in file GTPL, or at http://www.globus.org/toolkit/download/license.html. This notice must appear in redistributions of this file, with or without modification. Redistributions of this Software, with or without modification, must reproduce the GTPL in: (1) the Software, or (2) the Documentation or some other similar material which is provided with the Software (if any). Copyright 1999-2004 University of Chicago and The University of Southern California. All rights reserved. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/perl/Site/Intent.pm0000755000175000017500000003107011757531137021722 0ustar ryngerynge# # Records the intent to run a job a certain site # # This file or a portion of this file is licensed under the terms of # the Globus Toolkit Public License, found in file GTPL, or at # http://www.globus.org/toolkit/download/license.html. This notice must # appear in redistributions of this file, with or without modification. # # Redistributions of this Software, with or without modification, must # reproduce the GTPL in: (1) the Software, or (2) the Documentation or # some other similar material which is provided with the Software (if # any). # # Copyright 1999-2004 University of Chicago and The University of # Southern California. All rights reserved. # # Author: Jens-S. Vckler voeckler@cs.uchicago.edu # Revision: $Revision: 50 $ # package Site::Intent; use 5.006; use strict; #use warnings; use File::Basename qw(dirname); use Fcntl qw(:DEFAULT :flock); use DB_File; use Errno qw(EINVAL); require Exporter; our @ISA = qw(Exporter); # Items to export into callers namespace by default. Note: do not export # names by default without a very good reason. Use EXPORT_OK instead. # Do not simply export all your public functions/methods/constants. our $VERSION='1.0'; $VERSION=$1 if ( '$Revision: 50 $' =~ /Revision:\s+([0-9.]+)/o ); # prototypes sub create_lock($); # { } sub delete_lock($); # { } our %EXPORT_TAGS = ( lock => [ qw!create_lock delete_lock! ] ); our @EXPORT_OK = qw($VERSION create_lock delete_lock); our @EXPORT = qw(); # auto-cleanup keys my %atexit = (); END { unlink( keys %atexit ) if scalar %atexit } # # --- methods --------------------------------------------------- # use Carp; sub create_lockfile($) { # purpose: create a lock file NFS-reliably # warning: use create_lock, not this function # paramtr: $fn (IN): name of main file to lock # returns: 1 on success, undef on failure to lock my $tolock = shift || die; local(*LOCK); my $lock = "$tolock.lock"; my $uniq = "$tolock.$$"; if ( sysopen( LOCK, $uniq, O_CREAT|O_EXCL|O_TRUNC|O_WRONLY ) ) { warn( "created unique $uniq\n" ) if ( $main::DEBUG > 1 ); $atexit{$uniq} = 1; syswrite( LOCK, "$$\n" ); close LOCK; if ( link( $uniq, $lock ) ) { # created link warn( "hardlink locked\n" ) if ( $main::DEBUG > 1 ); $atexit{$lock} = 1; unlink($uniq) && delete $atexit{$uniq}; return 1; } else { # unable to create link, check errno warn( "while locking: $!\n" ) if ( $main::DEBUG > 1 ); if ( (stat($uniq))[3] == 2 ) { # lock was still successful $atexit{$lock} = 1; warn( "link-count locked\n" ) if ( $main::DEBUG > 1 ); return 1; } } } else { warn( "Locking: open $uniq: $!\n" ); } unlink $uniq; undef; } sub break_lock($) { # purpose: check for a dead lock file, and remove if dead # paramtr: $fn (IN): name of the file to create lock file for # returns: undef if the lock is valid, 1..2 if it was forcefully # removed, and 0, if it could not be removed. my $fn = shift; my $lock = "$fn.lock"; local(*LOCK); if ( open( LOCK, "<$lock" ) ) { my $pid = ; close LOCK; chomp($pid); if ( kill( 0, $pid ) == 1 ) { # process that owns lock still lives warn( 'lock-owner ', $pid, ' still lives...' ) if $main::DEBUG; undef; } else { # process that owns lock is gone? my $uniq = "$fn.$pid"; warn( 'lock-owner ', $pid, ' found dead, removing lock!' ); unlink($lock,$uniq); } } } sub create_lock($) { # purpose: blockingly wait for lock file creation # paramtr: $fn (IN): name of file to create lock file for # returns: 1: lock was created. my $fn = shift; my $retries = 0; while ( ! create_lockfile($fn) ) { break_lock($fn) if ( ++$retries > 10 ); my $towait = 5 * rand(); warn( "lock on $fn is busy, retry $retries, waiting ", sprintf("%.1f s...",$towait) ) if $main::DEBUG; Time::HiRes::sleep($towait); } warn( "obtained lock for $fn\n" ) if $main::DEBUG; 1; } sub delete_lock($) { # purpose: removes a lock file NFS-reliably # paramtr: $fn (IN): name of main file to lock # returns: 1 or 2 on success, undef on failure to unlock my $tolock = shift; my $result; if ( unlink("$tolock.$$") == 1 ) { delete $atexit{"$tolock.$$"}; $result++; } if ( unlink("$tolock.lock") == 1 ) { delete $atexit{"$tolock.lock"}; $result++; } warn( "released lock for $tolock\n" ) if $main::DEBUG; $result; } # # ctor # sub new { # purpose: Initialize an instance variable # paramtr: $filename (IN): path to intent database file # returns: reference to instance, or undef in case of failure my $proto = shift; my $class = ref($proto) || $proto || __PACKAGE__; my $filename = shift || croak "c'tor requires a filename argument"; # need to be able to mod/creat file if ( -e $filename ) { if ( ! -w _ ) { $! = 1; # EPERM return undef; } } else { if ( ! -w dirname($filename) ) { $! = 1; return undef; } } # should be ok now bless { 'm_filename' => $filename }, $class; } sub filename { # purpose: returns the name of the communication file my $self = shift; $self->{'m_filename'}; } sub dbtie { # purpose: Lock a file and tie it to a hash # paramtr: $href (IO): reference to hash to be tied # $ro (opt. IN): if true, open in read-only mode # returns: undef on error, underlying object otherwise no warnings; my $self = shift; my $href = shift; my $readonly = shift; # sanity check unless ( ref $href eq 'HASH' ) { $! = Errno::EINVAL; return undef; } my $fn = $self->filename; create_lock($fn) if ( $self->{'m_count'}{$$}++ == 0 ); my $result = $readonly ? tie( %{$href}, 'DB_File', $fn, O_RDONLY ) : tie( %{$href}, 'DB_File', $fn ); unless ( defined $result ) { # remove lock on failure to tie my $saverr = $!; delete_lock($fn) if ( --$self->{'m_count'}{$$} == 0 ); $! = $saverr; } $result; } sub locked { # purpose: detects already tied databases # returns: reference count for lock my $self = shift; $self->{'m_count'}{$$}+0; } sub dbuntie { # purpose: untie a hash and release the lock # paramtr: $href (IO): reference to hash to be untied # returns: - no warnings; my $self = shift; my $href = shift; # sanity check unless ( ref $href eq 'HASH' ) { $! = Errno::EINVAL; return undef; } untie %{$href}; delete_lock($self->filename) if ( --$self->{'m_count'}{$$} == 0 ); } sub clone { # purpose: obtains all current values into a copy # returns: a hash with key => value, may be empty my $self = shift; my (%db,%result); if ( $self->dbtie(\%db) ) { %result = ( %db ); $self->dbuntie(\%db); } %result; } sub inc { # purpose: increment the count for a site handle # paramtr: $key (IN): key of value to increment # $incr (opt. IN): increment, defaults to 1 # warning: If the value is not a simple integer, it will be afterwards. # returns: new value, undef on error my $self = shift; my $key = shift || croak "Need a database key"; my $incr = shift || 1; my (%db,$result); if ( $self->dbtie(\%db) ) { $result = ( $db{$key} += $incr ); $self->dbuntie(\%db); } $result; } sub dec { # purpose: decrement the count for a site handle # paramtr: $key (IN): key of value to decrement # $decr (opt. IN): decrement, defaults to 1 # warning: If the value is not a simple integer, it will be afterwards. # returns: new value, undef in case of error my $self = shift; my $key = shift || croak "Need a database key"; my $decr = shift || 1; my (%db,$result); if ( $self->dbtie(\%db) ) { $result = ( $db{$key} -= $decr ); $self->dbuntie(\%db); } $result; } # # return 'true' to package loader # 1; __END__ =head1 NAME Site::Intent - provides NFS-safe locking around a BSD DB file. =head1 SYNOPSIS use Site::Intent; my $i = Site::Intent->new( 'db.file' ) || die; my %db; if ( $i->dbtie(\%db) ) { # work on %db $i->dbuntie(\%db); } else { # complain } my %copy = $i->clone(); =head1 DESCRIPTION The Site::Intent coordinates intentions between multiple concurrent site-selector instances. For this reason, it provides access to a BSD DB file to record arbitrary scalar intentions into. The BSD file is locked using NFS-safe (so is hoped) file locks. =head1 METHODS =over 4 =item Site::Intent::create_lock($filename) This static function attempts to create a file lock around the specified filename according to Linux conventions. It first creates a unique file using the process id as unique suffix, then attempts to hardlink said file to the filename plus suffix C<.lock>. The attempt is randomly backed off to retry on failure to hardlink. Additionally, the link count is checked to detect hidden success. This is a blocking function, and may block indefinitely on dead-locks, despite occasional lock acquiry wake-ups. =item Site::Intent::delete_lock($filename) This static function deletes all lock files around the given filename. It should be a fast function, as no waiting is required. =item new( $filename ) The constructor records the filename as the BSD database file to either create or to connect to. If the file does not exist yet, it will not be created in the c'tor. However, some simple checks are employed to see, if the file will be creatable and/or writable, should it not exist. =item filename is a simple accessor, returning the filename that was passed to the constructor. =item dbtie( $hashref ) This member increases the lock count for the database file, ties the database file to the hash argument. The method is usually invoked with a reference to a (true) hash, e.g.: dbtie( \%hash ) If the first argument is not a reference to a hash, the call will fail with an undefined value, and $! is set to EINVAL. Failure to tie the file will remove the acquired lock. The return value is the result of the C call. It may be undefined in case of failure to tie the hash. =item locked returns the reference count for locks on the file. Refernce counters are kept on a per-process basis. This is not thread safe. =item dbuntie( $hashref ) unties the hash reference and relinquishes the lock. This method should only be called, if the previous C operation was successful, similar to opening and closing file handles, e.g.: if ( $i->dbtie ... ) { ... $i->dbuntie ... } =item clone This is a comprehensive function to copy all values from the database into memory. Please note that you can create nasty dead-locks this way. =back =head1 WARNINGS Locking files can always lead to very nasty dead-locks, or worse, to WAR and WAW situations. You should obtain the tie-n-lock once, do B your operations on the database, and then relinquish the lock. You should also attempt to hold the lock for the least possible time. The following is a B: # BAD EXAMPLE my $i = Site::Intent->new('my.db'); my (%db,$n); if ( $i->dbtie(\%db) ) { $n = $db{X}; $i->dbuntie(\%db); } $n++; if ( $i->dbtie(\%db) ) { $db{X} = $n; $i->dbuntie(\%db); } The above example, in the presence of concurrent instances of the above program, will result in a write-after-write (WAW) conflict. Two processes start. The first reads, say, zero from C<$db{X}>, and then is time-sliced to the next one. That one reads happily C<$db{X}>, and again obtains a zero. It may even write the 1 back, before the time slice returns to the first process. Which now writes back a 1. What the algorithm most likely had intended, however, was getting a 2 at this point. I hope you see the subtle flaws. And my reasoning for my recommendations: =over 4 =item 1 Try to do everything between one C and C pair. =item 2 Keep your I short - computation wise. =back =head1 SEE ALSO L =head1 AUTHOR Jens-S. VEckler, C =head1 COPYRIGHT AND LICENSE This file or a portion of this file is licensed under the terms of the Globus Toolkit Public License, found in file GTPL, or at http://www.globus.org/toolkit/download/license.html. This notice must appear in redistributions of this file, with or without modification. Redistributions of this Software, with or without modification, must reproduce the GTPL in: (1) the Software, or (2) the Documentation or some other similar material which is provided with the Software (if any). Copyright 1999-2004 University of Chicago and The University of Southern California. All rights reserved. =cut pegasus-wms_4.0.1+dfsg/lib/pegasus/php/0000755000175000017500000000000011757531667017050 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/php/gallery_type_header.php0000644000175000017500000000020011757531137023551 0ustar ryngerynge pegasus-wms_4.0.1+dfsg/lib/pegasus/php/gallery_footer.php0000644000175000017500000000003011757531137022557 0ustar ryngerynge pegasus-wms_4.0.1+dfsg/lib/pegasus/php/gallery_type_footer.php0000644000175000017500000000003611757531137023626 0ustar ryngerynge pegasus-wms_4.0.1+dfsg/lib/pegasus/php/gallery_header.php0000644000175000017500000000014511757531137022520 0ustar ryngerynge pegasus-wms_4.0.1+dfsg/lib/pegasus/python/0000755000175000017500000000000011757531667017602 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/0000755000175000017500000000000011757531667021211 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/tools/0000755000175000017500000000000011757531667022351 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/tools/properties.py0000644000175000017500000003022011757531137025104 0ustar ryngerynge""" properties.py This module reads Java properties for the GriPhyN Virtual Data System. It allows for command-line based overrides of properties using Java's -Dk=v syntax in Python by removing initial definitions from sys.argv during the module initialization. Therefore, it is recommended to use this module before parsing other command-line arguments. """ ## # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision: 2012 $ import os import re import sys import time import logging import tempfile import pwd # Regular expressions re_remove_escapes = re.compile(r"\\(.)") re_parse_property = re.compile(r"([^:= \t]+)\s*[:=]?\s*(.*)") re_find_subs = re.compile(r"(\$\{([A-Za-z0-9._]+)\})") re_inline_comments = re.compile("#(.*)$") # not using it right now... system = {} # System properties initial = {} # Command-line properties # Get logger object (initialized elsewhere) logger = logging.getLogger() # Assemble system properties system["file.separator"] = ',' if "JAVA_HOME" in os.environ: system["java.home"] = os.environ["JAVA_HOME"] if "CLASSPATH" in os.environ: system["java.class.path"] = os.environ["CLASSPATH"] if "TMP" in os.environ: system["java.io.tmpdir"] = os.environ["TMP"] else: system["java.io.tmpdir"] = tempfile.gettempdir() system["os.name"] = os.uname()[0] system["os.version"] = os.uname()[2] system["os.arch"] = os.uname()[4] system["user.dir"] = os.getcwd() if "HOME" in os.environ: system["user.home"] = os.environ["HOME"] else: system["user.home"] = pwd.getpwuid(os.geteuid())[5] if "LANG" in os.environ: system["user.language"] = os.environ["LANG"] else: system["user.language"] = "en" if "USER" in os.environ: system["user.name"] = os.environ["USER"] elif "LOGNAME" in os.environ: system["user.name"] = os.environ["LOGNAME"] else: system["user.name"] = pwd.getpwuid(os.geteuid())[0] # Can be undefined if "TZ" in os.environ: system["user.timezone"] = os.environ["TZ"] # Useful, but not required if "PEGASUS_HOME" in os.environ: system["pegasus.home"] = os.environ["PEGASUS_HOME"] # Assemble command-line properties if len(sys.argv) > 0: # First parameter is program name, just skip it, and remove it for now... program_name = sys.argv.pop(0) while len(sys.argv) > 0 and sys.argv[0][:2] == "-D": my_arg = sys.argv.pop(0) if my_arg == "-D": # k, v must be in next parameter if len(sys.argv) > 0: # Make sure we have another parameter my_arg = sys.argv.pop(0) else: # No, let's put the "-D" back and leave this loop sys.argv.insert(0, my_arg) break else: # remove -D from this parameter before split my_arg = my_arg[2:] try: k, v = my_arg.split("=", 1) except: logger.info("cannot parse command-line option %s... continuing..." % (my_arg)) k = "" if len(k): if k == "pegasus.properties" or k == "pegasus.user.properties": logger.warn("%s is no longer supported, ignoring, please use --conf!" % (k)) else: logger.debug("parsed property %s..." % (my_arg)) initial[k] = v #print "key:value = %s:%s" % (k, v) # Re-insert program_name sys.argv.insert(0, program_name) # Merge the two, with command-line taking precedence over environmental variables system.update(initial) def parse_properties(fn, hashref={}): """ This functions parses properties from a file """ # fn is the filename of the property file to read # hashref contains more properties for substitution (optional) # global system contains yet more properties for substitution # returns a map of properties, possibly empty my_result = {} my_save = '' try: my_file = open(fn, 'r') except: # Error opening file logger.warn("Could not open %s!" % (fn)) return my_result logger.debug("# parsing properties in %s..." % (fn)) for line in my_file: line = line.strip(" \t") # Remove leading and trailing spaces, tabs if line.startswith('!') or line.startswith('#'): # Skip comments continue line = line.rstrip("\n\r") # Remove new lines, if any # line = re_inline_comments.sub('', line) # Remove inline comments using regular expressions line = line.split('#')[0] # Remove inline comments line = re_remove_escapes.sub(r"\1", line) # replace Java properties escaped special characters #!=: line = line.strip() # Remove all starting and trailing whitespaces # Skip empty lines if len(line) == 0: continue if line[-1] == '\\': # Continuation line line = line[:-1] my_save += line else: # Regular line if my_save != "": # Append current line to previous line(s) and process my_save+= line line = my_save my_save = "" logger.debug("#Property being parsed is # %s" % (line)) # Try to parse property my_res = re_parse_property.search(line) if my_res: # Parse successful k = my_res.group(1) v = my_res.group(2) logger.debug("#Property being stored is # %s ==> %s" % (k, v)) # Substitutions subs = re_find_subs.search(v) while subs: if subs.group(1) in hashref: my_newval = hashref[subs.group(1)] elif subs.group(1) in system: my_newval = system[subs.group(1)] else: my_newval = '' # Make substitution new_v = v[:subs.start(1)] new_v += my_newval new_v += v[subs.end(1):] v = new_v # Search again, and loop subs = re_find_subs.search(v) # Insert key, value into my_result my_result[k] = v else: logger.fatal("Illegal content in %s: %s" % (fn, line)) sys.exit(1) my_file.close() return my_result class Properties: def __init__(self): # Initialize class variables self.m_config = {} def new(self, config_file=None, rundir_propfile=None): """ Initialize instance variable, processing the appropriate properties file. config_file is the properties file passed via the --conf command-line option, it has the highest priority. rundir_propfile is the properties file in the run directory (specified in the braindump.txt file with the properties tag. It has the second highest priority. If those are not specified, we try to lost $(HOME)/.pegasusrc, as a last resort. """ my_config = {} my_already_loaded = False # First, try config_file, highest priority if config_file is not None: if os.path.isfile(config_file) and os.access(config_file, os.R_OK): logger.info("processing properties file %s..." % (config_file)) my_config.update(parse_properties(config_file)) my_already_loaded = True else: logger.warn("cannot access properties file %s... continuing..." % (config_file)) # Second, try rundir_propfile if not my_already_loaded and rundir_propfile is not None: if os.path.isfile(rundir_propfile) and os.access(rundir_propfile, os.R_OK): logger.info("processing properties file %s... " % (rundir_propfile)) my_config.update(parse_properties(rundir_propfile)) my_already_loaded = True else: logger.warn("cannot access properties file %s... continuing..." % (rundir_propfile)) # Last chance, look for $(HOME)/.pegasusrc if not my_already_loaded: if "user.home" in system: my_user_propfile = os.path.join(system["user.home"], ".pegasusrc") if os.path.isfile(my_user_propfile) and os.access(my_user_propfile, os.R_OK): logger.info("processing properties file %s... " % (my_user_propfile)) my_config.update(parse_properties(my_user_propfile)) my_already_loaded = True else: # No need to complain about this pass if not my_already_loaded: logger.warn("no properties file parsed whatsoever!") # Keep ordering of config before initial so that the -D CLI # properties can override any other properties self.m_config = my_config self.m_config.update(initial) def property(self, key, val=None): """ Get and set a property Param: key is the property name to access Param: val is not None is the value to set the key Return: in get mode, the current value (None if not found) Return: in set mode, the old value (None if it didn't exist before) """ my_old_val = None if key in self.m_config: my_old_val = self.m_config[key] if val is not None: self.m_config[key] = val return my_old_val def keyset(self, predicate=None): """ Finds a subset of keys that matches a predicate Param: predicate is what we match against Return: set of keys that match the predicate, or all keys if no predicate """ if predicate is None: return self.m_config.keys() my_set = [] for my_key in self.m_config.keys(): if re.match(predicate, my_key): my_set.append(my_key) return my_set def propertyset(self, prefix, remove=False): """ Finds a subset of keys that match a prefix Param: prefix to compare keys against Param: remove if True, remove prefix Return: Dictionary containing the matching results """ my_result = {} for my_key in self.m_config.keys(): # Check if it begins with prefix if my_key.startswith(prefix): if remove: # Remove prefix from my_key my_newkey = my_key[len(prefix):] else: # Keep my_key as it is my_newkey = my_key if len(my_newkey) > 0: # Only copy if my_newkey is not empty my_result[my_newkey] = self.m_config[key] return my_result def dump(self, fn='-'): """ Prints the key set in property format Param: fn is the name of the file to print to, defaults to stdout Return: number of things printed, None if error """ my_count = 0 if fn == '-': my_file = sys.stdout else: try: my_file = open(fn, 'w') except: logger.warn("error opening %s !" % (fn)) return None # Add header my_file.write("# generated %s\n" % (time.asctime())) for my_key in self.m_config: # Write entry my_file.write("%s=%s\n" % (my_key, self.m_config[my_key])) my_count = my_count + 1 # Close file if not stdout if fn != '-': my_file.close() return my_count if __name__ == "__main__": a = Properties() a.new() print "testing finished!" pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/tools/kickstart_parser.py0000755000175000017500000005032311757531137026274 0ustar ryngerynge#!/usr/bin/env python """ Pegasus utility functions for pasing a kickstart output file and return wanted information """ ## # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision: 2012 $ # Import Python modules from xml.parsers import expat import re import sys import logging import traceback # Regular expressions used in the kickstart parser re_parse_props = re.compile(r'(\S+)\s*=\s*([^",]+)') re_parse_quoted_props = re.compile(r'(\S+)\s*=\s*"([^"]+)"') # Get logger object (initialized elsewhere) logger = logging.getLogger() class Parser: """ This class is used to parse a kickstart output file, and return requested information. """ def __init__(self, filename): """ This function initializes the Parser class with the kickstart output file that should be parsed. """ self._kickstart_output_file = filename self._parsing_job_element = False self._parsing_arguments = False self._parsing_main_job = False self._parsing_machine = False self._parsing_stdout = False self._parsing_stderr = False self._parsing_data = False self._parsing_cwd = False self._line_number = 0 self._arguments = "" self._stdout = "" self._stderr = "" self._cwd = "" self._keys = {} self._ks_elements = {} self._fh = None self._open_error = False def open(self): """ This function opens a kickstart output file. """ try: self._fh = open(self._kickstart_output_file) except: # Error opening file self._fh = None self._open_error = True return False # Open succeeded self._open_error = False return True def close(self): """ This function closes the kickstart output file. """ try: self._fh.close() except: return False return True def read_record(self): """ This function reads an invocation record from the kickstart output file. We also look for the struct at the end of a file containing multiple records. It returns a string containing the record, or None if it is not found. """ buffer = "" # First, we find the beginning = 0: # Found invocation record start = line.find("") # Check if we have everything in a single line if end >= 0: end = end + len("") return buffer[:end] elif line.find("[seqexec-summary") >= 0: # Found line with cluster jobs summary start = line.find("[seqexec-summary") buffer = line[start:] end = buffer.find("]") if end >= 0: end = end + len("]") return buffer[:end] # clustered record should be in a single line! logger.warning("%s: seqexec-summary line is malformed... ignoring it..." % (self._kickstart_output_file)) return "" elif line.find("[seqexec-task") >= 0: # Found line with task information start = line.find("[seqexec-task") buffer = line[start:] end = buffer.find("]") if end >= 0: end = end + len("]") return buffer[:end] # task record should be in a single line! logger.warning("%s: seqexec-task line is malformed... ignoring it..." % (self._kickstart_output_file)) return "" else: return "" # Ok, now continue reading the file until we get a full record while True: line = self._fh.readline() if line == '': # End of file, record not found return None buffer = buffer + line if buffer.find("") > 0: break # Now, we got it, let's make sure end = buffer.find("") if end == -1: return "" end = end + len("") return buffer[:end] def is_invocation_record(self, buffer=''): """ Returns True if buffer contains an invocation record. """ if buffer.find(" self._line_number: # Yes, we are in a new line, add newline to our _stdout self._stdout = self._stdout + "\n" + data else: # No, still on the same line, this is probably an XML substitution self._stdout = self._stdout + data # Track line number self._line_number = self._my_parser.CurrentLineNumber # Capture stderr if self._parsing_stderr == True and self._parsing_data == True: # If empty, just copy if self._stderr == "": self._stderr = data else: # If we already have something, let's check if we need to add a newline if self._my_parser.CurrentLineNumber > self._line_number: # Yes, we are in a new line, add newline to our _stdout self._stderr = self._stderr + "\n" + data else: # No, still on the same line, this is probably an XML substitution self._stderr = self._stderr + data # Track line number self._line_number = self._my_parser.CurrentLineNumber def parse_invocation_record(self, buffer=''): """ Parses the xml record in buffer, returning the desired keys. """ # Initialize variables self._parsing_arguments = False self._parsing_main_job = False self._parsing_machine = False self._parsing_stdout = False self._parsing_stderr = False self._parsing_data = False self._parsing_cwd = False self._line_number = 0 self._arguments = "" self._stdout = "" self._stderr = "" self._cwd = "" self._keys = {} # Check if we have an invocation record if self.is_invocation_record(buffer) == False: return self._keys # Add invocation key to our response self._keys["invocation"] = True # Prepend XML header buffer = '\n' + buffer # Create parser self._my_parser = expat.ParserCreate() self._my_parser.StartElementHandler = self.start_element self._my_parser.EndElementHandler = self.end_element self._my_parser.CharacterDataHandler = self.char_data # Parse everything! output = self._my_parser.Parse(buffer) # Add cwd, arguments, stdout, and stderr to keys if "cwd" in self._ks_elements: self._keys["cwd"] = self._cwd if "argument-vector" in self._ks_elements: self._keys["argument-vector"] = self._arguments if "stdout" in self._ks_elements: self._keys["stdout"] = self._stdout if "stderr" in self._ks_elements: self._keys["stderr"] = self._stderr return self._keys def parse_clustered_record(self, buffer=''): """ Parses the clustered record in buffer, returning all found keys """ self._keys = {} # Check if we have an invocation record if self.is_clustered_record(buffer) == False: return self._keys # Add clustered key to our response self._keys["clustered"] = True # Parse all quoted properties for my_key, my_val in re_parse_quoted_props.findall(buffer): self._keys[my_key] = my_val # And add unquoted properties as well for my_key, my_val in re_parse_props.findall(buffer): self._keys[my_key] = my_val return self._keys def parse_task_record(self, buffer=''): """ Parses the task record in buffer, returning all found keys """ self._keys = {} # Check if we have an invocation record if self.is_task_record(buffer) == False: return self._keys # Add task key to our response self._keys["task"] = True # Parse all quoted properties for my_key, my_val in re_parse_quoted_props.findall(buffer): self._keys[my_key] = my_val # And add unquoted properties as well for my_key, my_val in re_parse_props.findall(buffer): self._keys[my_key] = my_val return self._keys def parse(self, keys_dict, tasks=True, clustered=True): """ This function parses the kickstart output file, looking for the keys specified in the keys_dict variable. It returns a list of dictionaries containing the found keys. Look at the parse_stampede function for details about how to pass keys using the keys_dict structure. The function will return an empty list if no records are found or if an error happens. """ my_reply = [] # Place keys_dict in the _ks_elements self._ks_elements = keys_dict # Try to open the file if self.open() == False: return my_reply # Read first record my_buffer = self.read_record() # Loop while we still have record to read while my_buffer is not None: if self.is_invocation_record(my_buffer) == True: # We have an invocation record, parse it! try: my_record = self.parse_invocation_record(my_buffer) except: logger.warning("KICKSTART-PARSE-ERROR --> error parsing invocation record in file %s" % (self._kickstart_output_file)) logger.warning(traceback.format_exc()) # Found error parsing this file, return empty reply my_reply = [] # Finish the loop break my_reply.append(my_record) elif self.is_clustered_record(my_buffer) == True: # Check if we want clustered records too if clustered: # Clustered records are seqexec summary records for clustered jobs # We have a clustered record, parse it! my_reply.append(self.parse_clustered_record(my_buffer)) elif self.is_task_record(my_buffer) == True: # Check if we want task records too if tasks: # We have a clustered record, parse it! my_reply.append(self.parse_task_record(my_buffer)) else: # We have something else, this shouldn't happen! # Just skip it pass # Read next record my_buffer = self.read_record() # Lastly, close the file self.close() return my_reply def parse_stampede(self): """ This function works similarly to the parse function above, but does not require a keys_dict parameter as it uses a built-in list of keys speficically used in the Stampede schema. """ stampede_elements = {"invocation": ["hostname", "resource", "user", "hostaddr", "transformation", "derivation"], "mainjob": ["duration", "start"], "usage": ["utime", "stime"], "ram": ["total"], "uname": ["system", "release", "machine"], "file": ["name"], "status": ["raw"], "regular": ["exitcode"], "argument-vector": [], "cwd": [], "stdout": [], "stderr": []} return self.parse(stampede_elements, tasks=True, clustered=True) def parse_stdout_stderr(self): """ This function extracts the stdout and stderr from a kickstart output file. It returns an array containing the output for each task in a job. """ stdout_stderr_elements = {"invocation": ["hostname", "resource", "derivation", "transformation"], "file": ["name"], "regular": ["exitcode"], "failure": ["error"], "argument-vector": [], "cwd": [], "stdout": [], "stderr": []} return self.parse(stdout_stderr_elements, tasks=False, clustered=False) if __name__ == "__main__": # Let's run a test! print "Testing kickstart output file parsing..." # Make sure we have an argument if len(sys.argv) < 2: print "For testing, please give a kickstart output filename!" sys.exit(1) # Create parser class p = Parser(sys.argv[1]) # Parse file according to the Stampede schema output = p.parse_stampede() # Print output for record in output: print record pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/tools/db_utils.py0000644000175000017500000001013311757531137024516 0ustar ryngerynge""" db_utils.py: Provides database related functions used by several monitoring tools """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision: 2012 $ import os import sys import logging from Pegasus.tools import properties from Pegasus.tools import utils # Get logger object (initialized elsewhere) logger = logging.getLogger() def get_db_url_wf_uuid(submit_dir, config_properties, top_dir=None): """ Utility method for returning the db_url and wf_uuid given the submit_dir and pegasus properties file. @submit_dir submit directory path @config_properties config properties file path @top_dir directory of the top-level workflow (where the database is) """ # From the submit dir, we need the wf_uuid # Getting values from the submit_dir braindump file top_level_wf_params = utils.slurp_braindb(submit_dir) # Return if we cannot parse the braindump.txt file if not top_level_wf_params: logger.error("Unable to process braindump.txt in %s" % (submit_dir)) return None, None # Get wf_uuid for this workflow wf_uuid = None if (top_level_wf_params.has_key('wf_uuid')): wf_uuid = top_level_wf_params['wf_uuid'] else: logger.error("workflow id cannot be found in the braindump.txt ") return None, None # Load the top-level braindump now if top_dir is not None if top_dir is not None: # Getting values from the top_dir braindump file top_level_wf_params = utils.slurp_braindb(top_dir) # Return if we cannot parse the braindump.txt file if not top_level_wf_params: logger.error("Unable to process braindump.txt in %s" % (top_dir)) return None, None # Get the location of the properties file from braindump top_level_prop_file = None # Get properties tag from braindump if "properties" in top_level_wf_params: top_level_prop_file = top_level_wf_params["properties"] # Create the full path by using the submit_dir key from braindump if "submit_dir" in top_level_wf_params: top_level_prop_file = os.path.join(top_level_wf_params["submit_dir"], top_level_prop_file) # Parse, and process properties props = properties.Properties() props.new(config_file=config_properties, rundir_propfile=top_level_prop_file) # Ok, now figure out the database URL output_db_url = None if props.property('pegasus.monitord.output') is not None: output_db_url = props.property('pegasus.monitord.output') # Return, if not using sqlite or mysql if not (output_db_url.startswith("mysql:") or output_db_url.startswith("sqlite:")): logger.error("Unable to find database file from the properties file ") return None, None else: # Ok, the default case is a .stampede.db file with the dag name as base dag_file_name = '' if (top_level_wf_params.has_key('dag')): dag_file_name = top_level_wf_params['dag'] else: logger.error("dag file name cannot be found in the braindump.txt") return None, None # Create the sqllite db url output_db_file = (top_dir or submit_dir) + "/" + dag_file_name[:dag_file_name.find(".dag")] + ".stampede.db" output_db_url = "sqlite:///" + output_db_file if not os.path.isfile(output_db_file): logger.error("Unable to find database file in " + (top_dir or submit_dir)) return None, None # Ok, all done! return output_db_url, wf_uuid if __name__ == "__main__": pass pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/tools/utils.py0000644000175000017500000005336211757531137024064 0ustar ryngerynge""" utils.py: Provides common functions used by all workflow programs """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision: 2012 $ import re import os import sys import time import errno import shutil import logging import calendar import commands import datetime import traceback import subprocess # The unquote routine comes from urllib from urllib import unquote __all__ = ['quote', 'unquote'] _mapping = {} # Initialize _mapping for i, c in zip(xrange(256), ''.join([chr(x) for x in xrange(256)])): if (i >= 32 and i < 127 and c not in '"%\''): _mapping[c] = c else: _mapping[c] = ('%%%02X'%i) del i; del c # Compile our regular expressions # Used in epochdate parse_iso8601 = re.compile(r'(\d{4})-?(\d{2})-?(\d{2})[ tT]?(\d{2}):?(\d{2}):?(\d{2})([.,]\d+)?([zZ]|[-+](\d{2}):?(\d{2}))') # Used in out2log re_remove_extensions = re.compile(r"(?:\.(?:rescue|dag))+$") # Module variables MAXLOGFILE = 1000 # For log rotation, check files from .000 to .999 jobbase = "jobstate.log" # Default name for jobstate.log file brainbase = "braindump.txt" # Default name for workflow information file # Get logger object (initialized elsewhere) logger = logging.getLogger() def quote(s): """ Encodes a string using a partial URL encoding. The encoding replaces the following elements with their URL-encoded equivalents: 1. Non-printing and control characters (characters < 0x20 and 0x7F [DEL]) 2. Extended ASCII characters (characters >= 0x80) 3. Percent (0x25), quote (0x27), and double quote (0x22) """ return ''.join(map(_mapping.__getitem__, s)) def isodate(now=int(time.time()), utc=False, short=False): """ This function converts seconds since epoch into ISO 8601 timestamp """ my_time_u = time.gmtime(now) if utc: if short: return time.strftime("%Y%m%dT%H%M%SZ", my_time_u) else: return time.strftime("%Y-%m-%dT%H:%M:%SZ", my_time_u) else: my_time_l = time.localtime(now) my_offset = int( time.mktime(my_time_l) - time.mktime(my_time_u) ) offset = "%+03d%02d" % ( my_offset / 3600, (abs(my_offset) % 3600) / 60) if short: return time.strftime("%Y%m%dT%H%M%S", my_time_l) + offset else: return time.strftime("%Y-%m-%dT%H:%M:%S", my_time_l) + offset def epochdate(timestamp): """ This function converts an ISO timestamp into seconds since epoch """ try: # Split date/time and timezone information m = parse_iso8601.search(timestamp) if m is None: logger.warn("unable to match \"%s\" to ISO 8601" % timestamp) return None else: dt = "%04d-%02d-%02d %02d:%02d:%02d" % (int(m.group(1)), int(m.group(2)), int(m.group(3)), int(m.group(4)), int(m.group(5)), int(m.group(6))) tz = m.group(8) # my_time = datetime.datetime.strptime(dt, "%Y-%m-%d %H:%M:%S") my_time = datetime.datetime(*(time.strptime(dt, "%Y-%m-%d %H:%M:%S")[0:6])) if tz.upper() != 'Z': # no zulu time, has zone offset my_offset = datetime.timedelta(hours=int(m.group(9)), minutes=int(m.group(10))) # adjust for time zone offset if tz[0] == '-': my_time = my_time + my_offset else: my_time = my_time - my_offset # Turn my_time into Epoch format return int(calendar.timegm(my_time.timetuple())) except: logger.warn("unable to parse timestamp \"%s\"" % timestamp) return None def create_directory(dir_name, delete_if_exists=False): """ Utility method for creating directory @param dir_name the directory path @param delete_if_exists specifies whether to delete the directory if it exists """ if delete_if_exists: if os.path.isdir(dir_name): logger.warning("Deleting existing directory. Deleting... " + dir_name) try: shutil.rmtree(dir_name) except: logger.error("Unable to remove existing directory." + dir_name) sys.exit(1) if not os.path.isdir(dir_name): logger.info("Creating directory... " + dir_name) try: os.mkdir(dir_name) except OSError: logger.error("Unable to create directory." + dir_name) sys.exit(1) def find_exec(program, curdir=False, otherdirs=[]): """ Determine logical location of a given binary in PATH """ # program is the executable basename to look for # When curdir is True we also check the current directory # Returns fully qualified path to binary, None if not found my_path = os.getenv("PATH","/bin:/usr/bin") for my_dir in my_path.split(':')+otherdirs: my_file = os.path.join(os.path.expanduser(my_dir), program) # Test if file is 'executable' if os.access(my_file, os.X_OK): # Found it! return my_file if curdir: my_file = os.path.join(os.getcwd(), program) # Test if file is 'executable' if os.access(my_file, os.X_OK): # Yes! return my_file # File not found return None def pipe_out_cmd(cmd_string): """ Runs a command and captures stderr and stdout. Warning: Do not use shell meta characters Params: argument string, executable first Returns: All lines of output """ my_result = [] # Launch process using the subprocess module interface try: proc = subprocess.Popen(cmd_string.split(), shell=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=1) except: # Error running command return None # Wait for it to finish, capturing output resp = proc.communicate() # Capture stdout for line in resp[0].split('\n'): if len(line): my_result.append(line) # Capture stderr for line in resp[1].split('\n'): if len(line): my_result.append(line) return my_result def add_to_braindb(run, missing_keys, brain_alternate=None): """ This function adds to the braindump database the missing keys from missing_keys. """ my_config = {} if brain_alternate is None: my_braindb = os.path.join(run, brainbase) else: my_braindb = os.path.join(run, brain_alternate) try: my_file = open(my_braindb, 'a') except IOError: return try: for key in missing_keys: my_file.write("%s %s\n" % (str(key), str(missing_keys[key]))) except IOError: # Nothing to do... pass try: my_file.close() except IOError: pass def slurp_braindb(run, brain_alternate=None): """ Reads extra configuration from braindump database Param: run is the run directory Returns: Dictionary with the configuration, empty if error """ my_config = {} if brain_alternate is None: my_braindb = os.path.join(run, brainbase) else: my_braindb = os.path.join(run, brain_alternate) try: my_file = open(my_braindb, 'r') except IOError: # Error opening file return my_config for line in my_file: # Remove \r and/or \n from the end of the line line = line.rstrip("\r\n") # Split the line into a key and a value k, v = line.split(" ", 1) if k == "run" and v != run and run != '.': logger.warn("run directory mismatch, using %s" % (run)) my_config[k] = run else: # Remove leading and trailing whitespaces from value v = v.strip() my_config[k] = v # Close file my_file.close() # Done! logger.debug("# slurped %s" % (my_braindb)) return my_config def version(): """ Obtains Pegasus version """ my_output = commands.getstatusoutput("pegasus-version") return my_output[1] def raw_to_regular(exitcode): """ This function decodes the raw exitcode into a plain format: For a regular exitcode, it returns a value between 0 and 127; For signals, it returns the negative signal number (-1 through -127) For failures (when exitcode < 0), it returns the special value -128 """ if not type(exitcode) is int and not type(exitcode) is long : logger.warning("exitcode not an integer!") return exitcode if exitcode < 0: return -128 if (exitcode & 127) > 0: # Signal return -(exitcode & 127) return (exitcode >> 8) def regular_to_raw(exitcode): """ This function encodes a regular exitcode into a raw exitcode. """ if not type(exitcode) is int and not type(exitcode) is long : logger.warning("exitcode not an integer!") return exitcode if exitcode == -128: return -1 return (exitcode << 8) def parse_exit(ec): """ Parses an exit code any way possible Returns a string that shows what went wrong """ if (ec & 127) > 0: my_signo = ec & 127 my_core = '' if (ec & 128) == 128: my_core = " (core)" my_result = "died on signal %s%s" % (my_signo, my_core) elif (ec >> 8) > 0: my_result = "exit code %d" % ((ec >> 8)) else: my_result = "OK" return my_result def check_rescue(directory, dag): """ Check for the existence of (multiple levels of) rescue DAGs Param: directory is the directory to check for the presence of rescue DAGs Param: dag is the filename of a regular DAG file Returns: List of rescue DAGs (which may be empty if none found) """ my_base = os.path.basename(dag) my_result = [] try: my_files = os.listdir(directory) except OSError: return my_result for my_file in my_files: # Add file to the list if pegasus-planned DAGs that have a rescue DAG if my_file.startswith(my_base) and my_file.endswith(".rescue"): my_result.append(os.path.join(directory, my_file)) # Sort list my_result.sort() return my_result def out2log(rundir, outfile): """ purpose: infer output symlink for Condor common user log paramtr: rundir (IN): the run directory paramtr: outfile (IN): the name of the out file we use returns: the name of the log file to use """ # Get the basename my_base = os.path.basename(outfile) # NEW: Account for rescue DAGs my_base = my_base[:my_base.find(".dagman.out")] my_base = re_remove_extensions.sub('', my_base) # Add .log extension my_base = my_base + ".log" # Create path my_log = os.path.join(rundir, my_base) return my_log, my_base def write_pid_file(pid_filename, ts=int(time.time())): """ This function writes a pid file with name 'filename' containing the current pid and timestamp. """ try: PIDFILE = open(pid_filename, "w") PIDFILE.write("pid %s\n" % (os.getpid())) PIDFILE.write("timestamp %s\n" % (isodate(ts))) except IOError: logger.error("cannot write PID file %s" % (pid_filename)) else: PIDFILE.close() def pid_running(filename): """ This function takes a file containing a single line in the format of pid 'xxxxxx'. If the file exists, it reads the line and checks if the process id 'xxxxxx' is still running. The function returns True if the process is still running, or False if not. """ # First, we check if file exists if os.access(filename, os.F_OK): try: # Open pid file PIDFILE = open(filename, 'r') # Look for pid line for line in PIDFILE: line = line.strip() if line.startswith("pid"): # Get pid my_pid = int(line.split(" ")[1]) # We are done with this file, just close it... PIDFILE.close() # Now let's see if process still around... try: os.kill(my_pid, 0) except OSError, err: if err.errno == errno.ESRCH: # pid is not found, monitoring cannot be running logger.info("pid %d not running anymore..." % (my_pid)) return False elif err.errno == errno.EPERM: # pid cannot be killed because we don't have permission logger.debug("no permission to talk to pid %d..." % (my_pid)) return True else: logger.warning("unknown error while sending signal to pid %d" % (my_pid)) logger.warning(traceback.format_exc()) return True except: logger.warning("unknown error while sending signal to pid %d" % (my_pid)) logger.warning(traceback.format_exc()) return True else: logger.debug("pid %d still running..." % (my_pid)) return True logger.warning("could not find pid line in file %s. continuing..." % (filename)) # Don't forget to close file PIDFILE.close() except: logger.warning("error processing file %s. continuing..." % (filename)) logger.warning(traceback.format_exc()) return True # PID file doesn't exist return False def monitoring_running(run_dir): """ This function takes a run directory and returns true if it appears that pegasus-monitord is still running, or false if it has finished (or perhaps it was never started). """ start_file = os.path.join(run_dir, "monitord.started") done_file = os.path.join(run_dir, "monitord.done") # If monitord finished, it is not running anymore if os.access(done_file, os.F_OK): return False # If monitord started, it is (possibly) still running if os.access(start_file, os.F_OK): # Let's check return pid_running(start_file) # Otherwise, monitord was never executed (so it is not running right now...) return False def loading_completed(run_dir): """ This function examines a run directory and returns True if all events were successfully processed by pegasus-monitord. """ # Loading is never completed if monitoring is still running if monitoring_running(run_dir) == True: return False start_file = os.path.join(run_dir, "monitord.started") done_file = os.path.join(run_dir, "monitord.done") log_file = os.path.join(run_dir, "monitord.log") # Both started and done files need to exist... if (not os.access(start_file, os.F_OK)) or (not os.access(done_file, os.F_OK)): return False # Check monitord.log for loading errors... if os.access(log_file, os.F_OK): try: LOG = open(log_file, "r") for line in LOG: if line.find("NL-LOAD-ERROR -->") > 0: # Found loading error... event processing was not completed LOG.close() return False if line.find("KICKSTART-PARSE-ERROR -->") > 0: # Found kickstart parsing error... data not fully loaded LOG.close() return False if line.find("cannot create events output... disabling event output") > 0: # Found loader initialization error... data not loaded LOG.close() return False LOG.close() except IOError: logger.warning("could not process log file: %s" % (log_file)) # Otherwise, return true return True def rotate_log_file(source_file): """ This function rotates the specified logfile. """ # First we check if we have the log file if not os.access(source_file, os.F_OK): # File doesn't exist, we don't have to rotate return # Now we need to find the latest log file # We start from .000 sf = 0 while (sf < MAXLOGFILE): dest_file = source_file + ".%03d" % (sf) if os.access(dest_file, os.F_OK): # Continue to the next one sf = sf + 1 else: break # Safety check to see if we have reached the maximum number of log files if sf >= MAXLOGFILE: logger.error("%s exists, cannot rotate log file anymore!" % (dest_file)) sys.exit(1) # Now that we have source_file and dest_file, try to rotate the logs try: os.rename(source_file, dest_file) except OSError: logger.error("cannot rename %s to %s" % (source_file, dest_file)) sys.exit(1) # Done! return def log10(val): """ Equivalent to ceil(log(val) / log(10)) """ result = 0 while val > 1: result = result + 1 val = val / 10 if result: return result return 1 def make_boolean(value): # purpose: convert an input string into something boolean # paramtr: $x (IN): a property value # returns: 0 (false) or 1 (true) my_val = str(value) if (my_val.lower() == 'true' or my_val.lower() == 'on' or my_val.lower() == 'yes' or my_val.isdigit() and int(value) > 0): return 1 return 0 def daemonize(stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'): ''' Fork the current process as a daemon, redirecting standard file descriptors (by default, redirects them to /dev/null). This function was adapted from O'Reilly's Python Cookbook 2nd Edition. ''' # Perform first fork. try: pid = os.fork() if pid > 0: sys.exit(0) # Exit first parent. except OSError, e: sys.stderr.write("fork #1 failed: (%d) %s\n" % (e.errno, e.strerror)) sys.exit(1) # Decouple from parent environment. os.chdir("/") os.umask(0002) # Be permisive os.setsid() # Perform second fork. try: pid = os.fork() if pid > 0: sys.exit(0) # Exit second parent. except OSError, e: sys.stderr.write("fork #2 failed: (%d) %s\n" % (e.errno, e.strerror)) sys.exit(1) # The process is now daemonized, redirect standard file descriptors. for f in sys.stdout, sys.stderr: f.flush() si = file(stdin, 'r') so = file(stdout, 'w', 0) se = file(stderr, 'w', 0) os.dup2(si.fileno(), sys.stdin.fileno()) os.dup2(so.fileno(), sys.stdout.fileno()) os.dup2(se.fileno(), sys.stderr.fileno()) def keep_foreground(): """ This function turns the program into almost a daemon, but keep in foreground for Condor. It does not take any parameters and does not return anything. """ # Go to a safe place that is not susceptible to sudden umounts # FIX THIS: It may break some things try: os.chdir('/') except OSError: logger.critical("could not chdir!") sys.exit(1) # Although we cannot set sid, we can still become process group leader try: os.setpgid(0, 0) except OSError: logger.critical("could not setpgid!") sys.exit(1) if __name__ == "__main__": current_time = int(time.time()) print "Testing isodate() function from now=%lu" % (current_time) print " long local timestamp:", isodate(now=current_time) print " long utc timestamp:", isodate(now=current_time, utc=True) print "short local timestamp:", isodate(now=current_time, short=True) print " short utc timestamp:", isodate(now=current_time, utc=True, short=True) print print "Testing epochdate() function from above ISO dates" print " long local epochdate:", epochdate(isodate(now=current_time)) print " long utc epochdate:", epochdate(isodate(now=current_time, utc=True)) print "short local timestamp:", epochdate(isodate(now=current_time, short=True)) print " short utc timestamp:", epochdate(isodate(now=current_time, utc=True, short=True)) print print "Testing find exec" print "Looking for ls...", find_exec('ls') print "Looking for test.pl...", find_exec('test.pl', True) print "Monitord 1", find_exec("pegasus-mointord") print "Monitord 2", find_exec(program="pegasus-monitord",otherdirs=["/usr/local/pegasus/src/4.0-branch/bin","/usr/local/pegasus"]) print print "Testing parse_exit() function" print "ec = 5 ==> ", parse_exit(5) print "ec = 129 ==> ", parse_exit(129) print print "Testing log10() function" print "log10(10):", log10(10) print "log10(100.2):", log10(100.2) print version() print slurp_braindb(".") print pipe_out_cmd('ls -lR') print print "Testing quote/unquote functions..." print repr(str(bytearray(xrange(256)))) print quote(str(bytearray(xrange(256)))) print unquote("carriage return: %0Apercent: %25%0Aquote: %27%0Adouble quote: %22") print print pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/tools/filelock.py0000644000175000017500000003316211757531137024510 0ustar ryngerynge""" filelock.py: Provides NFS-safe locking around a DB File. """ ## # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision: 2012 $ import os.path import time import random import anydbm import atexit import logging # Keep list of files to delete at_exit = {} def intent_exit_handler(): # Cleanup keys when exiting for key in at_exit: try: logger.debug("unlinking %s" % (key)) os.unlink(key) except: logger.debug("error unlinking %s" % (key)) # Get logger object (initialized elsewhere) logger = logging.getLogger() class Intent: """ The Intent class coordinates intentions between multiple concurrent site-selector instances. For this reason, it provides access to a DB file to record arbitrary scalar intentions into. The file is locked using NFS-safe (so is hoped) file locks. """ def __init__(self): self.m_filename = None self.m_pid = os.getpid() self.m_count = {} def new(self, fn): """ This function records the filename as the database file to either create or to connect to. If the file does not exist yet, it will not be created in the initialization function. However, some simple checks are employed to see, if the file will be creatable and/or writable, should it not exist. """ # purpose: Initialize class # paramtr: $fn (IN): path to intent database file if os.path.isfile(fn): # File already exists if os.access(fn, os.R_OK) and os.access(fn, os.W_OK): # Good! self.m_filename = fn return True else: # Cannot read/write to file return False else: # File does not exist yet dir = os.path.dirname(fn) # Check if directory exists and is writable if os.path.exists(dir) and os.access(dir, os.W_OK): # Everything looks good! self.m_filename = fn return True # Failed return False def create_lockfile(self, fn): # Create a lock file NFS-reliably # warning: use create_lock, not this function # paramtr: $fn (IN): name of main file to lock # returns: 1 on success, 0 on failure to lock tolock = fn lock = "%s.lock" % (tolock) uniq = "%s.%d" % (tolock, self.m_pid) if os.path.isfile(uniq): logger.warn("Locking: open %s: file already exists" % (uniq)) # os.unlink(uniq) return False if os.path.isfile(lock): logger.warn("Locking: open %s: file already exists" % (lock)) # os.unlink(lock) return False try: my_lock = open(uniq, "w") except: logger.warn("Locking: open %s: creating file" % (uniq)) return False else: at_exit[uniq] = 1 my_lock.write("%d\n" % (self.m_pid)) my_lock.close() try: os.link(uniq, lock) except: # Unable to create link, check error logger.warn("while locking %s" % (uniq)) try: stats = os.stat(uniq) except: # Error, no need to do anything logger.warn("error trying to stat %s" % uniq) pass else: if stats.st_nlink == 2: # Lock successful logger.info("link-count locked") at_exit[lock] = 1 os.unlink(uniq) at_exit.pop(uniq) return True else: # Created link logger.info("hardlinnk locked") at_exit[lock] = 1 os.unlink(uniq) at_exit.pop(uniq) return True return False def break_lock(self, fn): # purpose: check for a dead lock file, and remove if dead # paramtr: $fn (IN): name of the file to create lock file for # returns: None if the lock is valid, 1..2 if it was forcefully # removed, and 0, if it could not be removed. lock = "%s.lock" % (fn) # Let's open file and check its pid try: input_file = open(lock, 'r') except: pass else: file_pid = input_file.readline() file_pid = file_pid.strip() input_file.close() # Let's check if said pid is still around try: os.kill(int(file_pid), 0) except: # Process is not around anymore if file_pid.isdigit(): uniq = "%s.%d" % (fn, int(file_pid)) logger.info("lock-owner %d found dead, removing lock!" % (int(file_pid))) os.unlink(lock) try: # Also try to remove uniq file os.unlink(uniq) except: pass # Lock should be broken now return True else: logger.warn("error: cannot determine process id from lock file!") else: logger.info("lock-owned %d still lives..." % (int(file_pid))) # Was not able to break lock return False def create_lock(self, fn): """ This function attempts to create a file lock around the specified filename according to Linux conventions. It first creates a unique file using the process id as unique suffix, then attempts to hardlink said file to the filename plus suffix <.lock>. The attempt is randomly backed off to retry on failure to hardlink. Additionally, the link count is checked to detect hidden success. This is a blocking function, and may block indefinitely on dead-locks, despite occasional lock acquiry wake-ups. """ # purpose: blockingly wait for lock file creation # paramtr: $fn (IN): name of file to create lock file for # returns: 1: lock was created. retries = 0 to_wait = 0 while not self.create_lockfile(fn): if retries > 10: # We waited enough, let's try to break the lock self.break_lock(fn) retries = 0 # Shouldn't be necessary, just in case else: # Let's wait for a little while to_wait = 5 * random.random() logger.info("lock on file %s is busy, retry %d, waiting %.1f s..." % (fn, retries, to_wait)) time.sleep(to_wait) retries = retries + 1 logger.info("obtained lock for %s" % (fn)) return True def delete_lock(self, fn): """ This static function deletes all lock files around the given filename. It should be a fast function, as no waiting is required. """ # purpose: removes a lock file NFS-reliably # paramtr: $fn (IN): name of main file to lock # returns: 1 or 2 on success, 0 on failure to unlock tolock = fn lock = "%s.lock" % (tolock) uniq = "%s.%d" % (tolock, self.m_pid) result = 0 try: os.unlink(lock) except: pass else: result = result + 1 at_exit.pop(lock) try: os.unlink(uniq) except: pass else: result = result + 1 at_exit.pop(uniq) return result def filename(self): """ This is a simple accessor function, returning the filename that was passed to the constructor. """ # purpose: returns the name of the communication file return self.m_filename def dbtie(self, ro=False): """ This member increases the lock count for the database file, and connects to the database file. The return value is the result of the open call. It may be None in case of failure to open the database. """ # purpose: Lock a file and tie it to a hash # paramtr: $ro (opt. IN): if true, open in read-only mode # returns: None on error, underlying object otherwise # Create key if not already there if not self.m_count.has_key(self.m_pid): self.m_count[self.m_pid] = 0 if self.m_count[self.m_pid] == 0: self.create_lock(self.m_filename) self.m_count[self.m_pid] = self.m_count[self.m_pid] + 1 # Open database in read only or read/write mode if ro: my_mode = 'r' else: my_mode = 'c' try: my_db = anydbm.open(self.m_filename, my_mode) except: # Remove lock on failure to connect self.m_count[self.m_pid] = self.m_count[self.m_pid] - 1 if self.m_count[self.m_pid] == 0: self.delete_lock(self.m_filename) return None return my_db def locked(self): """ This function returns the reference count for locks on the file. Refernce counters are kept on a per-process basis. This is not thread safe. """ # purpose: detects already tied databases # returns: reference count for lock if not self.m_count.has_key(self.m_pid): return 0 return self.m_count[self.m_pid] def dbuntie(self, dbref): """ This function closes the hash data base and relinquishes the lock. This method should only be called, if the previous dbtie operation was successful, similar to opening and closing file handles """ # purpose: untie a hash and release the lock # paramtr: $dbref (I): reference to db to be closed # returns: - self.m_count[self.m_pid] = self.m_count[self.m_pid] - 1 if self.m_count[self.m_pid] == 0: self.delete_lock(self.m_filename) # Close datbase try: dbref.close() except: logger.warn("Error closing %s database" % (m_filename)) def clone(self): """ This is a comprehensive function to copy all values from the database into memory. Please note that you can create nasty dead-locks this way """ # purpose: obtains all current values into a copy # returns: a hash with key => value, may be empty # if no keys in database, or None if error my_copy = {} my_db = self.dbtie() if my_db is not None: # Copy each key/value pair, converting the value to int for key in my_db.keys(): my_copy[key] = int(my_db[key]) # All done self.dbuntie(my_db) return my_copy return None def inc(self, key, incr=1): # purpose: increment the count for a site handle # paramtr: $key (IN): key of value to increment # $incr (opt. IN): increment, defaults to 1 # returns: new value, None on error if key is None: return None # Just in case key is not string key = str(key) my_db = self.dbtie() if my_db is not None: if my_db.has_key(key): val = int(my_db[key]) val = val + incr else: val = incr # Write new value my_db[key] = str(val) # Done, disconnect from data base self.dbuntie(my_db) return val return None def dec(self, key, decr=1): # purpose: decrement the count for a site handle # paramtr: $key (IN): key of value to decrement # $decr (opt. IN): decrement, defaults to 1 # returns: new value, None in case of error if key is None: return None # Just in case key is not string key = str(key) my_db = self.dbtie() if my_db is not None: if my_db.has_key(key): val = int(my_db[key]) val = val - decr else: val = decr # Write new value my_db[key] = str(val) # Done, disconnect from data base self.dbuntie(my_db) return val return None # Register module exit handler atexit.register(intent_exit_handler) # Built-in testing if __name__ == '__main__': a = Intent() a.new("/tmp/test1") # Test tie/untie b = a.dbtie() a.dbuntie(b) # Increment keys a.inc('usc') a.inc('usc') c = a.inc('usc') if c is None: print "Cannot get counter!" else: print "Counter is now %d" % (c) c = a.dec("usc", 3) if c is None: print "Cannot get counter!" else: print "Counter is now %d" % (c) my_dict = {} my_dict = a.clone() for key in my_dict.keys(): print key, "-->", my_dict[key] print "done" pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/tools/__init__.py0000644000175000017500000000000011757531137024440 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/DAX3.py0000644000175000017500000020143311757531137022255 0ustar ryngerynge# Copyright 2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """API for generating Pegasus DAXes The classes in this module can be used to generate DAXes that can be read by Pegasus. The official DAX schema is here: http://pegasus.isi.edu/schema/dax-3.2.xsd Here is an example showing how to create the diamond DAX using this API: # Create ADAG object diamond = ADAG("diamond") # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN("gsiftp://site.com/inputs/f.a","site")) diamond.addFile(a) # Add executables to the DAX-level replica catalog e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", arch="x86_64") e_preprocess.addPFN(PFN("gsiftp://site.com/bin/preprocess","site")) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86_64") e_findrange.addPFN(PFN("gsiftp://site.com/bin/findrange","site")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86_64") e_analyze.addPFN(PFN("gsiftp://site.com/bin/analyze","site")) diamond.addExecutable(e_analyze) # Add a preprocess job preprocess = Job(e_preprocess) b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T60","-i",a,"-o",b1,b2) preprocess.uses(a, link=Link.INPUT) preprocess.uses(b1, link=Link.OUTPUT, transfer=True) preprocess.uses(b2, link=Link.OUTPUT, transfer=True) diamond.addJob(preprocess) # Add left Findrange job frl = Job(e_findrange) c1 = File("f.c1") frl.addArguments("-a findrange","-T60","-i",b1,"-o",c1) frl.uses(b1, link=Link.INPUT) frl.uses(c1, link=Link.OUTPUT, transfer=True) diamond.addJob(frl) # Add right Findrange job frr = Job(e_findrange) c2 = File("f.c2") frr.addArguments("-a findrange","-T60","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT, transfer=True) diamond.addJob(frr) # Add Analyze job analyze = Job(e_analyze) d = File("f.d") analyze.addArguments("-a analyze","-T60","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, transfer=True, register=True) diamond.addJob(analyze) # Add control-flow dependencies diamond.depends(parent=preprocess, child=frl) diamond.depends(parent=preprocess, child=frr) diamond.depends(parent=frl, child=analyze) diamond.depends(parent=frr, child=analyze) # Write the DAX to stdout import sys diamond.writeXML(sys.stdout) # Write the DAX to a file f = open("diamond.dax","w") diamond.writeXML(f) f.close() """ __author__ = "Gideon Juve " __version__ = "3.3" __all__ = [ 'DAX3Error', 'DuplicateError', 'NotFoundError', 'FormatError', 'Metadata', 'Profile', 'PFN', 'Namespace', 'Arch', 'Link', 'Transfer', 'OS', 'When', 'File', 'Executable', 'Use', 'Transformation', 'Invoke', 'Job', 'DAX', 'DAG', 'ADAG', 'Dependency', 'parse', 'parseString' ] import datetime, pwd, os, sys from StringIO import StringIO import codecs import shlex import codecs SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/DAX" SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/dax-3.3.xsd" SCHEMA_VERSION = "3.3" class DAX3Error(Exception): pass class DuplicateError(DAX3Error): pass class NotFoundError(DAX3Error): pass class FormatError(DAX3Error): pass class ParseError(DAX3Error): pass class Element: """Representation of an XML element for formatting output""" def __init__(self, name, attrs=[]): self.name = name self.attrs = [] for attr, value in attrs: if value is not None: if isinstance(value, bool): value = str(value).lower() elif not isinstance(value, basestring): value = repr(value) attr = attr.replace('__',':') self.attrs.append((attr,value)) self.children = [] self.flat = False def _escape(self, text): """Escape special characters in XML""" o = [] for c in text: if c == '"': o.append(""") elif c == "'": o.append("'") elif c == "<": o.append("<") elif c == ">": o.append(">") elif c == "&": o.append("&") else: o.append(c) return ''.join(o) def element(self, element): self.children.append(element) return element def text(self, value): if not isinstance(value, basestring): value = str(value) self.children.append(self._escape(value)) return self def comment(self, message): self.children.append("" % self._escape(message)) def flatten(self): self.flat = True return self def __unicode__(self): s = StringIO() self.write(s) x = s.getvalue() s.close() return unicode(x) def __str__(self): return unicode(self).encode('utf-8') def write(self, stream=sys.stdout, level=0, flatten=False): flat = self.flat or flatten stream.write('<%s' % self.name) for attr, value in self.attrs: value = self._escape(value) stream.write(' %s="%s"' % (attr, value)) if len(self.children) == 0: stream.write('/>') else: stream.write('>') if not flat: stream.write('\n') for child in self.children: if not flat: stream.write('\t'*(level+1)) if isinstance(child, basestring): stream.write(child) else: child.write(stream, level+1, flat) if not flat: stream.write('\n') if not flat: stream.write('\t'*level) stream.write('' % self.name) class Namespace: """ Namespace values recognized by Pegasus. See Executable, Transformation, and Job. """ PEGASUS = 'pegasus' CONDOR = 'condor' DAGMAN = 'dagman' ENV = 'env' HINTS = 'hints' GLOBUS = 'globus' SELECTOR = 'selector' STAT = 'stat' class Arch: """ Architecture types. See Executable. """ X86 = 'x86' X86_64 = 'x86_64' PPC = 'ppc' PPC_64 = 'ppc_64' IA64 = 'ia64' SPARCV7 = 'sparcv7' SPARCV9 = 'sparcv9' AMD64 = 'amd64' class Link: """ Linkage attributes. See File, Executable and uses(). """ NONE = 'none' INPUT = 'input' OUTPUT = 'output' INOUT = 'inout' class Transfer: """ Transfer types for uses. See Executable, File. """ FALSE = 'false' OPTIONAL = 'optional' TRUE = 'true' class OS: """ OS types. See Executable. """ LINUX = 'linux' SUNOS = 'sunos' AIX = 'aix' MACOS = 'macos' WINDOWS = 'windows' class When: """ Job states for notifications. See Job/DAX/DAG.invoke(). """ NEVER = 'never' START = 'start' ON_ERROR = 'on_error' ON_SUCCESS = 'on_success' AT_END = 'at_end' ALL = 'all' class Invoke: def __init__(self, when, what): if not when: raise FormatError("invalid when", when) if not what: raise FormatError("invalid what", what) self.when = when self.what = what def __unicode__(self): return u"" % (self.when, self.what) def __str__(self): return unicode(self).encode('utf-8') def __hash__(self): return hash((self.when, self.what)) def __eq__(self, other): if isinstance(other, Invoke): return self.when == other.when and self.what == other.what return False def toXML(self): e = Element('invoke', [('when', self.when)]) e.text(self.what) e.flatten() return e class InvokeMixin: def addInvoke(self, invoke): """Add invoke to this object""" if self.hasInvoke(invoke): raise DuplicateError("Duplicate Invoke", invoke) self.invocations.add(invoke) def hasInvoke(self, invoke): """Test to see if this object has invoke""" return invoke in self.invocations def removeInvoke(self, invoke): """Remove invoke from this object""" if not self.hasInvoke(invoke): raise NotFoundError("Invoke not found", invoke) self.invocations.remove(invoke) def clearInvokes(self): """Remove all Invoke objects""" self.invocations.clear() def invoke(self, when, what): """ Invoke executable 'what' when job reaches status 'when'. The value of 'what' should be a command that can be executed on the submit host. The list of valid values for 'when' is: WHEN MEANING ========== ======================================================= never never invoke start invoke just before job gets submitted. on_error invoke after job finishes with failure (exitcode != 0). on_success invoke after job finishes with success (exitcode == 0). at_end invoke after job finishes, regardless of exit status. all like start and at_end combined. Examples: obj.invoke('at_end','/usr/bin/mail -s "job done" juve@usc.edu') obj.invoke('on_error','/usr/bin/update_db -failure') """ self.addInvoke(Invoke(when, what)) class ProfileMixin: def addProfile(self, profile): """Add a profile to this object""" if self.hasProfile(profile): raise DuplicateError("Duplicate profile", profile) self.profiles.add(profile) def hasProfile(self, profile): """Does this object have profile?""" return profile in self.profiles def removeProfile(self, profile): """Remove profile from this object""" if not self.hasProfile(profile): raise NotFoundError("Profile not found", profile) self.profiles.remove(profile) def clearProfiles(self): """Remove all profiles from this object""" self.profiles.clear() def profile(self, namespace, key, value): """Declarative profile addition""" self.addProfile(Profile(namespace, key, value)) class MetadataMixin: def addMetadata(self, metadata): """Add metadata to this object""" if self.hasMetadata(metadata): raise DuplicateError("Duplicate Metadata", metadata) self._metadata.add(metadata) def removeMetadata(self, metadata): """Remove meta from this object""" if not self.hasMetadata(metadata): raise NotFoundError("Metadata not found", metadata) self._metadata.remove(metadata) def hasMetadata(self, metadata): """Does this object have metadata?""" return metadata in self._metadata def clearMetadata(self): """Remove all metadata from this object""" self._metadata.clear() def metadata(self, key, type, value): """Declarative metadata addition""" self.addMetadata(Metadata(key, type, value)) class PFNMixin: def addPFN(self, pfn): """Add a PFN to this object""" if self.hasPFN(pfn): raise DuplicateError("Duplicate PFN", pfn) self.pfns.add(pfn) def removePFN(self, pfn): """Remove PFN from this object""" if not self.hasPFN(pfn): raise NotFoundError("PFN not found", pfn) self.pfns.remove(pfn) def hasPFN(self, pfn): """Does this object have pfn?""" return pfn in self.pfns def clearPFNs(self): """Remove all PFNs from this object""" self.pfns.clear() def PFN(self, url, site=None): """Declarative PFN addition""" self.addPFN(PFN(url,site)) class CatalogType(ProfileMixin, MetadataMixin, PFNMixin): """Base class for File and Executable""" def __init__(self, name): """ All arguments specify the workflow-level behavior of this File. Job-level behavior can be defined when adding the File to a Job's uses. If the properties are not overridden at the job-level, then the workflow-level values are used as defaults. If this LFN is to be used as a job's stdin/stdout/stderr then the value of link is ignored when generating the tags. Arguments: name: The name of the file (required) """ if not name: raise FormatError('name required') self.name = name self.profiles = set() self._metadata = set() self.pfns = set() def innerXML(self, parent): for p in self.profiles: parent.element(p.toXML()) for m in self._metadata: parent.element(m.toXML()) for p in self.pfns: parent.element(p.toXML()) class File(CatalogType): """File(name) A file entry for the DAX-level replica catalog, or a reference to a logical file used by the workflow. Examples: input = File('input.txt') Example use in job: input = File('input.txt') output = File('output.txt') job = Job(name="compute") job.uses(input, link=Link.INPUT, transfer=True) job.uses(output, link=Link.OUTPUT, transfer=True, register=True) """ def __init__(self, name): """ All arguments specify the workflow-level behavior of this File. Job-level behavior can be defined when adding the File to a Job's uses. If the properties are not overridden at the job-level, then the workflow-level values are used as defaults. If this LFN is to be used as a job's stdin/stdout/stderr then the value of link is ignored when generating the tags. Arguments: name: The name of the file (required) """ CatalogType.__init__(self, name) def __unicode__(self): return u"" % self.name def __str__(self): return unicode(self).encode('utf-8') def __hash__(self): return hash(self.name) def __eq__(self, other): return isinstance(other, File) and self.name == other.name def toArgumentXML(self): """Returns an XML representation of this File with no inner elements""" return Element('file', [('name', self.name)]) def toStdioXML(self, tag): """Returns an XML representation of this file as a stdin/out/err tag""" if tag is 'stdin': link = "input" # stdin is always input elif tag in ['stdout','stderr']: link = "output" # stdout/stderr are always output else: raise FormatError("invalid tag",tag,"should be one of stdin, stdout, stderr") return Element(tag, [ ('name',self.name), ('link',link) ]) def toXML(self): """Return the XML representation of this File with inner elements""" e = self.toArgumentXML() self.innerXML(e) return e class Executable(CatalogType, InvokeMixin): """Executable(name[,namespace][,version][,arch][,os][,osrelease][,osversion][,glibc][,installed]) An entry for an executable in the DAX-level replica catalog. Examples: grep = Executable("grep") grep = Executable(namespace="os",name="grep",version="2.3") grep = Executable(namespace="os",name="grep",version="2.3",arch=Arch.X86) grep = Executable(namespace="os",name="grep",version="2.3",arch=Arch.X86,os=OS.LINUX) """ def __init__(self, name, namespace=None, version=None, arch=None, os=None, osrelease=None, osversion=None, glibc=None, installed=None): """ Arguments: name: Logical name of executable namespace: Executable namespace version: Executable version arch: Architecture that this exe was compiled for os: Name of os that this exe was compiled for osrelease: Release of os that this exe was compiled for osversion: Version of os that this exe was compiled for glibc: Version of glibc this exe was compiled against installed: Is the executable installed (true), or stageable (false) """ CatalogType.__init__(self, name) self.namespace = namespace self.version = version self.arch = arch self.os = os self.osrelease = osrelease self.osversion = osversion self.glibc = glibc self.installed = installed self.invocations = set() def __unicode__(self): return u"" % (self.namespace, self.name, self.version) def __str__(self): return unicode(self).encode('utf-8') def __hash__(self): return hash((self.name, self.namespace, self.version, self.arch, self.os, self.osrelease, self.osversion, self.glibc, self.installed)) def __eq__(self, other): if isinstance(other, Executable): return self.name == other.name and \ self.namespace == other.namespace and \ self.version == other.version and \ self.arch == other.arch and \ self.os == other.os and \ self.osrelease == other.osrelease and \ self.osversion == other.osversion and \ self.glibc == other.glibc and \ self.installed == other.installed return False def toXML(self): """Returns an XML representation of this file as a filename tag""" e = Element('executable', [ ('name', self.name), ('namespace', self.namespace), ('version', self.version), ('arch', self.arch), ('os', self.os), ('osrelease', self.osrelease), ('osversion', self.osversion), ('glibc', self.glibc), ('installed', self.installed) ]) self.innerXML(e) # Invocations for inv in self.invocations: e.element(inv.toXML()) return e class Metadata: """Metadata(key,type,value) A way to add metadata to File and Executable objects. This is useful if you want to annotate the DAX with things like file sizes, application-specific attributes, etc. There is currently no restriction on the type. Examples: s = Metadata('size','int','12') a = Metadata('algorithm','string','plav') """ def __init__(self, key, type, value): """ Arguments: key: The key name of the item type: The type of the value (e.g. string, int, float) value: The value of the item """ if not key: raise FormatError("Invalid key", key) if not type: raise FormatError("Invalid type", type) if not value: raise FormatError("Invalid value", value) self.key = key self.type = type self.value = value def __unicode__(self): return u"" % (self.type, self.key, self.value) def __str__(self): return unicode(self).encode('utf-8') def __hash__(self): return hash(self.key) def __eq__(self, other): return isinstance(other, Metadata) and self.key == other.key def toXML(self): m = Element('metadata', [ ('key',self.key), ('type',self.type) ]) m.text(self.value).flatten() return m class PFN(ProfileMixin): """PFN(url[,site]) A physical file name. Used to provide URLs for files and executables in the DAX-level replica catalog. PFNs can be added to File and Executable. Examples: PFN('http://site.com/path/to/file.txt','site') PFN('http://site.com/path/to/file.txt',site='site') PFN('http://site.com/path/to/file.txt') """ def __init__(self, url, site=None): """ Arguments: url: The url of the file. site: The name of the site. [default: local] """ if not url: raise FormatError("Invalid url", url) if not site: raise FormatError("Invalid site", site) self.url = url self.site = site self.profiles = set() def __unicode__(self): return u"" % (self.site, self.url) def __str__(self): return unicode(self).encode('utf-8') def __hash__(self): return hash((self.url, self.site)) def __eq__(self, other): return isinstance(other, PFN) and \ self.url == other.url and \ self.site == other.site def toXML(self): pfn = Element('pfn', [ ('url', self.url), ('site', self.site) ]) for p in self.profiles: pfn.element(p.toXML()) return pfn class Profile: """Profile(namespace,key,value) A Profile captures scheduler-, system-, and environment-specific parameters in a uniform fashion. Each profile declaration assigns a value to a key within a namespace. Profiles can be added to Job, DAX, DAG, File, Executable, and PFN. Examples: path = Profile(Namespace.ENV,'PATH','/bin') vanilla = Profile(Namespace.CONDOR,'universe','vanilla') path = Profile(namespace='env',key='PATH',value='/bin') path = Profile('env','PATH','/bin') """ def __init__(self, namespace, key, value): """ Arguments: namespace: The namespace of the profile (see Namespace) key: The key name. Can be anything that responds to str(). value: The value for the profile. Can be anything that responds to str(). """ self.namespace = namespace self.key = key self.value = value def __unicode__(self): return u"" % (self.namespace, self.key, self.value) def __str__(self): return unicode(self).encode('utf-8') def __hash__(self): return hash((self.namespace, self.key)) def __eq__(self, other): return isinstance(other, Profile) and \ self.namespace == other.namespace and \ self.key == other.key def toXML(self): """Return an XML element for this profile""" p = Element("profile", [ ('namespace', self.namespace), ('key', self.key) ]) p.text(self.value).flatten() return p class Use: """Use(file[,link][,register][,transfer][,optional][,namespace][,version][,executable]) Use of a logical file name. Used for referencing files in the DAX. Attributes: file: A string, File or Executable representing the logical file link: Is this file a job input, output or both (See LFN) (optional) register: Should this file be registered in RLS? (True/False) (optional) transfer: Should this file be transferred? (True/False or See LFN) (optional) optional: Is this file optional, or should its absence be an error? (optional) namespace: Namespace of executable (optional) version: version of executable (optional) executable: Is file an executable? (True/False) (optional) For Use objects that are added to Transformations, the attributes 'link', 'register', 'transfer' and 'optional' are ignored. If a File object is passed in as 'file', then the default value for executable is 'false'. Similarly, if an Executable object is passed in, then the default value for executable is 'true'. """ def __init__(self, name, link=None, register=None, transfer=None, optional=None, namespace=None, version=None, executable=None): if not name: raise FormatError('Invalid name', name) self.name = name self.link = link self.optional = optional self.register = register self.transfer = transfer self.namespace = namespace self.version = version self.executable = executable def __unicode__(self): return u"" % (self.namespace, self.name, self.version) def __str__(self): return unicode(self).encode("utf-8") def __hash__(self): return hash((self.namespace, self.name, self.version)) def __eq__(self, other): if isinstance(other, Use): return self.namespace == other.namespace and \ self.name == other.name and \ self.version == other.version def toTransformationXML(self): return Element('uses', [ ('namespace',self.namespace), ('name',self.name), ('version',self.version), ('executable',self.executable) ]) def toJobXML(self): return Element('uses', [ ('namespace',self.namespace), ('name',self.name), ('version',self.version), ('link',self.link), ('register',self.register), ('transfer',self.transfer), ('optional',self.optional), ('executable',self.executable) ]) class UseMixin: def addUse(self, use): """Add Use to this object""" if self.hasUse(use): raise DuplicateError("Duplicate Use", use) self.used.add(use) def removeUse(self, use): """Remove use from this object""" if not self.hasUse(use): raise NotFoundError("No such Use", use) self.used.remove(use) def hasUse(self, use): """Test to see if this object has use""" return use in self.used def clearUses(self): """Remove all uses from this object""" self.used.clear() def uses(self, arg, link=None, register=None, transfer=None, optional=None, namespace=None, version=None, executable=None): if isinstance(arg, CatalogType): _name = arg.name else: _name = arg _namespace = None _version = None _executable = None if isinstance(arg, Executable): _namespace = arg.namespace _version = arg.version # We only need to set this for jobs # the default is True for Transformations if isinstance(self, AbstractJob): _executable = True if isinstance(arg, File): # We only need to set this for transformations # The default is False for Jobs if isinstance(self, Transformation): _executable = False if namespace is not None: _namespace = namespace if version is not None: _version = str(version) if executable is not None: _executable = executable use = Use(_name,link,register,transfer,optional,_namespace,_version,_executable) self.addUse(use) class Transformation(UseMixin,InvokeMixin): """Transformation((name|executable)[,namespace][,version]) A logical transformation. This is basically defining one or more entries in the transformation catalog. You can think of it like a macro for adding to your jobs. You can define a transformation that uses several files and/or executables, and refer to it when creating a job. If you do, then all of the uses defined for that transformation will be copied to the job during planning. This code: in = File("input.txt") exe = Executable("exe") t = Transformation(namespace="foo", name="bar", version="baz") t.uses(in) t.uses(exe) j = Job(t) is equivalent to: in = File("input.txt") exe = Executable("exe") j = Job(namespace="foo", name="bar", version="baz") j.uses(in) j.uses(exe) Examples: Transformation(name='mDiff') Transformation(namespace='montage',name='mDiff') Transformation(namespace='montage',name='mDiff',version='3.0') Using one executable: mProjectPP = Executable(namespace="montage",name="mProjectPP",version="3.0") x_mProjectPP = Transformation(mProjectPP) Using several executables: mDiff = Executable(namespace="montage",name="mProjectPP",version="3.0") mFitplane = Executable(namespace="montage",name="mFitplane",version="3.0") mDiffFit = Executable(namespace="montage",name="mDiffFit",version="3.0") x_mDiffFit = Transformation(mDiffFit) x_mDiffFit.uses(mDiff) x_mDiffFit.uses(mFitplane) Config files too: conf = File("jbsim.conf") jbsim = Executable(namespace="scec",name="jbsim") x_jbsim = Transformation(jbsim) x_jbsim.uses(conf) """ def __init__(self,name,namespace=None,version=None): """ The name argument can be either a string or an Executable object. If it is an Executable object, then the Transformation inherits its name, namespace and version from the Executable, and the Transformation is set to use the Executable with link=input, transfer=true, and register=False. Arguments: name: The name of the transformation namespace: The namespace of the xform (optional) version: The version of the xform (optional) """ self.name = None self.namespace = None self.version = None self.used = set() self.invocations = set() if isinstance(name, Executable): self.name = name.name self.namespace = name.namespace self.version = name.version else: self.name = name if namespace: self.namespace = namespace if version: self.version = version def __unicode__(self): return u"" % (self.namespace, self.name, self.version) def __str__(self): return unicode(self).encode("utf-8") def __hash__(self): return hash((self.namespace, self.name, self.version)) def __eq__(self, other): if isinstance(other, Transformation): return self.namespace == other.namespace and \ self.name == other.name and \ self.version == other.version def toXML(self): """Return an XML representation of this transformation""" e = Element('transformation', [ ('namespace', self.namespace), ('name', self.name), ('version', self.version) ]) # Uses for u in self.used: e.element(u.toTransformationXML()) # Invocations for inv in self.invocations: e.element(inv.toXML()) return e class AbstractJob(ProfileMixin,UseMixin,InvokeMixin): """The base class for Job, DAX, and DAG""" def __init__(self, id=None, node_label=None): self.id = id self.node_label = node_label self.arguments = [] self.profiles = set() self.used = set() self.invocations = set() self.stdout = None self.stderr = None self.stdin = None def addArguments(self, *arguments): """Add one or more arguments to the job (this will add whitespace)""" for arg in arguments: if not isinstance(arg, (File, basestring)): raise FormatError("Invalid argument", arg) for arg in arguments: if len(self.arguments) > 0: self.arguments.append(' ') self.arguments.append(arg) def addRawArguments(self, *arguments): """Add one or more arguments to the job (whitespace will NOT be added)""" for arg in arguments: if not isinstance(arg, (File, basestring)): raise FormatError("Invalid argument", arg) self.arguments.extend(arguments) def clearArguments(self): """Remove all arguments from this job""" self.arguments = [] def getArguments(self): """Get the arguments of this job""" args = [] for a in self.arguments: if isinstance(a, File): args.append(unicode(a.toArgumentXML())) else: args.append(a) return ''.join(args) def setStdout(self, filename): """Redirect stdout to a file""" if isinstance(filename, File): self.stdout = filename else: self.stdout = File(filename) def clearStdout(self): """Remove stdout file""" self.stdout = None def setStderr(self, filename): """Redirect stderr to a file""" if isinstance(filename, File): self.stderr = filename else: self.stderr = File(filename) def clearStderr(self): """Remove stderr file""" self.stderr = None def setStdin(self, filename): """Redirect stdin from a file""" if isinstance(filename, File): self.stdin = filename else: self.stdin = File(filename) def clearStdin(self): """Remove stdin file""" self.stdin = None def innerXML(self, element): """Return an XML representation of this job""" # Arguments if len(self.arguments) > 0: args = Element('argument').flatten() for x in self.arguments: if isinstance(x, File): args.element(x.toArgumentXML()) else: args.text(x) element.element(args) # Profiles for pro in self.profiles: element.element(pro.toXML()) # Stdin/xml/err if self.stdin is not None: element.element(self.stdin.toStdioXML('stdin')) if self.stdout is not None: element.element(self.stdout.toStdioXML('stdout')) if self.stderr is not None: element.element(self.stderr.toStdioXML('stderr')) # Uses for use in self.used: element.element(use.toJobXML()) # Invocations for inv in self.invocations: element.element(inv.toXML()) class Job(AbstractJob): """Job((name|Executable|Transformation)[,id][,namespace][,version][,node_label]) This class defines the specifics of a job to run in an abstract manner. All filename references still refer to logical files. All references transformations also refer to logical transformations, though physical location hints can be passed through profiles. Examples: sleep = Job(id="ID0001",name="sleep") jbsim = Job(id="ID0002",name="jbsim",namespace="cybershake",version="2.1") merge = Job("jbsim") You can create a Job based on a Transformation: mDiff_xform = Transformation("mDiff", ...) mDiff_job = Job(mDiff_xform) Or an Executable: mDiff_exe = Executable("mDiff", ...) mDiff_job = Job(mDiff_exe) Several arguments can be added at the same time: input = File(...) output = File(...) job.addArguments("-i",input,"-o",output) Profiles are added similarly: job.addProfile(Profile(Namespace.ENV, key='PATH', value='/bin')) job.profile(Namespace.ENV, "PATH", "/bin") Adding file uses is simple, and you can override global File attributes: job.uses(input, Link.INPUT) job.uses(output, Link.OUTPUT, transfer=True, register=True) """ def __init__(self, name, id=None, namespace=None, version=None, node_label=None): """The ID for each job should be unique in the DAX. If it is None, then it will be automatically generated when the job is added to the DAX. The name, namespace, and version should match what you have in your transformation catalog. For example, if namespace="foo" name="bar" and version="1.0", then the transformation catalog should have an entry for "foo::bar:1.0". The name argument can be either a string, or a Transformation object. If it is a Transformation object, then the job will inherit the name, namespace, and version from the Transformation. Arguments: name: The transformation name or Transformation object (required) id: A unique identifier for the job (optional) namespace: The namespace of the transformation (optional) version: The transformation version (optional) node_label: The label for this job to use in graphing (optional) """ self.namespace = None self.version = None if isinstance(name, (Transformation, Executable)): self.name = name.name self.namespace = name.namespace self.version = name.version elif isinstance(name, basestring): self.name = name else: raise FormatError("Name must be a string, Transformation or Executable") if not self.name: raise FormatError("Invalid name", self.name) AbstractJob.__init__(self, id=id, node_label=node_label) if namespace: self.namespace = namespace if version: self.version = version def __unicode__(self): return u"" % (self.id, self.namespace, self.name, self.version) def __str__(self): return unicode(self).encode("utf-8") def toXML(self): e = Element('job',[ ('id',self.id), ('namespace',self.namespace), ('name',self.name), ('version',self.version), ('node-label',self.node_label) ]) self.innerXML(e) return e class DAX(AbstractJob): """DAX(file[,id][,node_label]) This job represents a sub-DAX that will be planned and executed by the workflow. Examples: daxjob1 = DAX("foo.dax") daxfile = File("foo.dax") daxjob2 = DAX(daxfile) """ def __init__(self, file, id=None, node_label=None): """ The name argument can be either a string, or a File object. If it is a File object, then this job will inherit its name from the File and the File will be added in a with transfer=True, register=False, and link=input. Arguments: file: The logical name of the DAX file or the DAX File object id: The id of the DAX job [default: autogenerated] node_label: The label for this job to use in graphing """ if isinstance(file, File): self.file = file elif isinstance(file, str) or isinstance(file, unicode): self.file = File(name=file) else: raise FormatError("invalid file",file) AbstractJob.__init__(self, id=id, node_label=node_label) def __unicode__(self): return u"" % (self.id, self.file.name) def __str__(self): return unicode(self).encode("utf-8") def toXML(self): """Return an XML representation of this job""" e = Element('dax', [ ('id', self.id), ('file', self.file.name), ('node-label', self.node_label) ]) self.innerXML(e) return e class DAG(AbstractJob): """DAG(file[,id][,node_label]) This job represents a sub-DAG that will be executed by this workflow. Examples: dagjob1 = DAG(file="foo.dag") dagfile = File("foo.dag") dagjob2 = DAG(dagfile) """ def __init__(self, file, id=None, node_label=None): """ The name argument can be either a string, or a File object. If it is a File object, then this job will inherit its name from the File and the File will be added in a with transfer=True, register=False, and link=input. Arguments: file: The logical name of the DAG file, or the DAG File object id: The ID of the DAG job [default: autogenerated] node_label: The label for this job to use in graphing """ if isinstance(file, File): self.file = file elif isinstance(file, str) or isinstance(file, unicode): self.file = File(name=file) else: raise FormatError("Invalid file", file) AbstractJob.__init__(self, id=id, node_label=node_label) def __unicode__(self): return u"" % (self.id, self.file.name) def __str__(self): return unicode(self).encode("utf-8") def toXML(self): """Return an XML representation of this DAG""" e = Element('dag', [ ('id', self.id), ('file', self.file.name), ('node-label', self.node_label) ]) self.innerXML(e) return e class Dependency: """A dependency between two nodes in the ADAG""" def __init__(self, parent, child, edge_label=None): if isinstance(parent, AbstractJob): if not parent.id: raise FormatError("Parent job has no id", parent) self.parent = parent.id elif parent: self.parent = parent else: raise FormatError("Invalid parent", parent) if isinstance(child, AbstractJob): if not child.id: raise FormatError("Child job has no id", child) self.child = child.id elif child: self.child = child else: raise FormatError("Invalid child", child) if self.parent == self.child: raise FormatError("No self edges allowed",(self.parent,self.child)) self.edge_label = edge_label def __unicode__(self): return " %s>" % (self.parent, self.child) def __str__(self): return unicode(self).encode("utf-8") def __hash__(self): return hash((self.parent,self.child)) def __eq__(self, other): """Equal dependencies have the same parent and child""" if isinstance(other, Dependency): return self.parent == other.parent and self.child == other.child return False class ADAG(InvokeMixin): """ADAG(name[,count][,index]) Representation of a directed acyclic graph in XML (DAX). Examples: dax = ADAG('diamond') or, if you want to use the old style count/index partitioning stuff: part5 = ADAG('partition_5',count=10,index=5) Adding jobs: a = Job(...) dax.addJob(a) Adding parent-child control-flow dependency: dax.addDependency(Dependency(parent=a,child=b)) dax.addDependency(Dependency(parent=a,child=c)) dax.addDependency(Dependency(parent=b,child=d)) dax.addDependency(Dependency(parent=c,child=d)) or: dax.depends(child=b, parent=a) Adding Files (not required if you have a replica catalog): input = File(...) dax.addFile(input) Adding Executables (not required if you have a transformation catalog): exe = Executable(...) dax.addExecutable(exe) Adding Transformations (not required if you have a transformation catalog): xform = Transformation(...) dax.addTransformation(xform) Writing a DAX out to a file: f = open('diamond.dax','w') dax.writeXML(f) f.close() """ def __init__(self, name, count=None, index=None): """ Arguments: name: The name of the workflow count: Total number of DAXes that will be created index: Zero-based index of this DAX """ if not name: raise FormatError("Invalid ADAG name", name) self.name = name if count: count = int(count) if index: index = int(index) self.count = count self.index = index # This is used to generate unique ID numbers self.sequence = 1 self.jobs = {} self.files = set() self.executables = set() self.dependencies = set() self.transformations = set() self.invocations = set() def __unicode__(self): return u"" % self.name def __str__(self): return unicode(self).encode("utf-8") def nextJobID(self): """Get an autogenerated ID for the next job""" next = None while not next or next in self.jobs: next = "ID%07d" % self.sequence self.sequence += 1 return next def getJob(self, jobid): """Get a Job/DAG/DAX""" if not jobid in self.jobs: raise NotFoundError("Job not found",jobid) return self.jobs[jobid] def addJob(self, job): """Add a job to this ADAG""" # Add an auto-generated ID if the job doesn't have one if job.id is None: job.id = self.nextJobID() if self.hasJob(job): raise DuplicateError("Duplicate job",job) self.jobs[job.id] = job def hasJob(self, job): """Test to see if job is in this ADAG The job parameter can be an object or a job ID """ if isinstance(job, AbstractJob): return job.id in self.jobs else: return job in self.jobs def removeJob(self, job): """Remove job from this ADAG""" if not self.hasJob(job): raise NotFoundError("Job not found", job) if isinstance(job, AbstractJob): del self.jobs[job.id] else: del self.jobs[job] def clearJobs(self): """Remove all jobs""" self.jobs = {} def addDAX(self, dax): """Add a sub-DAX (synonym for addJob)""" if not isinstance(dax, DAX): raise FormatError("Not a DAX", dax) self.addJob(dax) def addDAG(self, dag): """Add a sub-DAG (synonym for addJob)""" if not isinstance(dag, DAG): raise FormatError("Not a DAG", dag) self.addJob(dag) def addFile(self, file): """Add a file to the DAX""" if not isinstance(file, File): raise FormatError("Invalid File", file) if self.hasFile(file): raise DuplicateError("Duplicate file", file) self.files.add(file) def hasFile(self, file): """Check to see if file is in this ADAG""" return file in self.files def removeFile(self, file): """Remove file from this ADAG""" if not self.hasFile(file): raise NotFoundError("File not found", file) self.files.remove(file) def clearFiles(self): """Remove all files""" self.files.clear() def addExecutable(self, executable): """Add an executable to this ADAG""" if self.hasExecutable(executable): raise DuplicateError("Duplicate executable",executable) self.executables.add(executable) def hasExecutable(self, executable): """Check if executable is in this ADAG""" return executable in self.executables def removeExecutable(self, executable): """Remove executable from this ADAG""" if not self.hasExecutable(executable): raise NotFoundError("Executable not found",executable) self.executables.remove(executable) def clearExecutables(self): """Remove all executables""" self.executables.clear() def addTransformation(self, transformation): """Add a transformation to this ADAG""" if self.hasTransformation(transformation): raise DuplicateError("Duplicate tranformation",transformation) self.transformations.add(transformation) def hasTransformation(self, transformation): """Check to see if transformation is in this ADAG""" return transformation in self.transformations def removeTransformation(self, transformation): """Remove transformation from this ADAG""" if not self.hasTransformation(transformation): raise NotFoundError("Transformation not found",transformation) self.transformations.remove(transformation) def clearTransformations(self): """Remove all transformations""" self.transformations.clear() def depends(self, child, parent, edge_label=None): """Add a dependency to the workflow Arguments: child: The child job/dax/dag or id parent: The parent job/dax/dag or id edge_label: A label for the edge (optional) """ d = Dependency(parent, child, edge_label) self.addDependency(d) def addDependency(self, dep): """Add a dependency to the workflow The old way to call this method is no longer valid. Please change: adag.addDependency(parent="ID01", child="ID02", edge_label="E01") to be: adag.addDependency(Dependency(parent="ID01", child="ID02", edge_label="E01")) or: adag.depends(parent="ID01", child="ID02", edge_label="E01") """ if self.hasDependency(dep): raise DuplicateError("Duplicate dependency", dep) # Check the jobs if dep.parent not in self.jobs: raise NotFoundError("Parent not found", dep.parent) if dep.child not in self.jobs: raise NotFoundError("Child not found", dep.child) self.dependencies.add(dep) def hasDependency(self, dep): """Check to see if dependency exists""" return dep in self.dependencies def removeDependency(self, dep): """Remove dependency from workflow""" if not self.hasDependency(dep): raise NotFoundError("Dependency not found",dep) self.dependencies.remove(dep) def clearDependencies(self): """Remove all dependencies""" self.dependencies.clear() def toXML(self): """Get the XML string for this ADAG This is primarily intended for testing. If you have a large ADAG you should use writeXML instead. """ s = StringIO() self.writeXML(s) xml = s.getvalue() s.close() return xml def writeXMLFile(self, filename): """Write the ADAG to an XML file""" file = codecs.open(filename, "w", "utf-8") self.writeXML(file) file.close() def writeXML(self, out): """Write the ADAG as XML to a stream""" # Preamble out.write('\n') # Metadata out.write('\n' % datetime.datetime.now()) out.write('\n' % pwd.getpwuid(os.getuid())[0]) out.write('\n') # Open tag out.write('\n') # Invocations for i in self.invocations: out.write('\t') i.toXML().write(stream=out, level=1) out.write('\n') # Files for f in self.files: out.write('\t') f.toXML().write(stream=out, level=1) out.write('\n') # Executables for e in self.executables: out.write('\t') e.toXML().write(stream=out, level=1) out.write('\n') # Transformations for t in self.transformations: out.write('\t') t.toXML().write(stream=out, level=1) out.write('\n') # Jobs keys = self.jobs.keys() keys.sort() for job_id in keys: job = self.jobs[job_id] out.write('\t') job.toXML().write(stream=out, level=1) out.write('\n') # Dependencies # Since we store dependencies as tuples, but we need to print them as nested elements # we first build a map of all the children that maps child -> [(parent,label),...] children = {} for dep in self.dependencies: if not dep.child in children: children[dep.child] = [] children[dep.child].append((dep.parent, dep.edge_label)) # Now output all the xml in sorted order by child, then parent keys = children.keys() keys.sort() for child in keys: out.write('\t') c = Element("child",[("ref",child)]) parents = children[child] parents.sort() for parent, edge_label in parents: p = Element("parent",[ ("ref", parent), ("edge-label", edge_label) ]) c.element(p) c.write(stream=out, level=1) out.write('\n') # Close tag out.write('\n') def parseString(string): s = StringIO(string) return parse(s) def parse(infile): try: import xml.etree.cElementTree as etree except: try: import xml.etree.ElementTree as etree except: try: import elementtree.ElementTree as etree except: raise Exception("Please install elementtree") NS = "{http://pegasus.isi.edu/schema/DAX}" def QN(tag): return NS+tag def badattr(e, exc): return ParseError("Attribute '%s' is required for element %s" % (exc.args[0], e.tag)) def parse_invoke(e): try: return Invoke(when=e.attrib["when"], what=e.text) except KeyError, ke: raise badattr(e, ke) def parse_adag(e): try: name = e.attrib['name'] count = e.get("count", None) index = e.get("index", None) return ADAG(name=name, count=count, index=index) except KeyError, ke: raise badattr(e, ke) def parse_profile(e): try: return Profile( namespace=e.attrib["namespace"], key=e.attrib["key"], value=e.text) except KeyError, ke: raise badattr(e, ke) def parse_metadata(e): try: return Metadata( key=e.attrib['key'], type=e.attrib['type'], value=e.text) except KeyError, ke: raise badattr(e, ke) def parse_pfn(e): try: p = PFN( url=e.attrib['url'], site=e.get("site", None) ) except KeyError, ke: raise badattr(e, ke) for pr in e.findall(QN("profile")): p.addProfile(parse_profile(pr)) return p def parse_catalog(e, f): for p in e.findall(QN("profile")): f.addProfile(parse_profile(p)) for m in e.findall(QN("metadata")): f.addMetadata(parse_metadata(m)) for p in e.findall(QN("pfn")): f.addPFN(parse_pfn(p)) return f def parse_file(e): try: f = File(e.attrib['name']) except KeyError, ke: raise badattr(e, ke) return parse_catalog(e, f) def parse_executable(e): try: exe = Executable( name=e.attrib['name'], namespace=e.get("namespace", None), version=e.get("version", None), arch=e.get("arch", None), os=e.get("os", None), osrelease=e.get("osrelease", None), osversion=e.get("osversion", None), glibc=e.get("glibc", None), installed=e.get("installed", None) ) except KeyError, ke: raise badattr(e, ke) parse_catalog(e, exe) for i in e.findall(QN("invoke")): exe.addInvoke(parse_invoke(i)) return exe def parse_uses(e): try: return Use( e.attrib['name'], namespace = e.get('namespace', None), version = e.get('version', None), link = e.get('link', None), register = e.get('register', None), transfer = e.get('transfer', None), optional = e.get('optional', None), executable = e.get('executable', None) ) except KeyError, ke: raise badattr(e, ke) def parse_transformation(e): try: t = Transformation( namespace=e.get("namespace", None), name=e.attrib['name'], version=e.get("version", None)) except KeyError, ke: raise badattr(e, ke) for u in e.findall(QN("uses")): t.addUse(parse_uses(u)) for i in e.findall(QN("invoke")): t.addInvoke(parse_invoke(i)) return t def iterelem(e): if e.text: yield e.text for f in e: if f.text: yield f.text yield f if f.tail: yield f.tail def parse_absjob(e, j): args = e.find(QN("argument")) if args is not None: for i in iterelem(args): if isinstance(i, basestring): j.addRawArguments(i) else: j.addRawArguments(File(i.attrib['name'])) try: s = e.find(QN("stdin")) if s is not None: j.setStdin(s.attrib['name']) s = e.find(QN("stdout")) if s is not None: j.setStdout(s.attrib['name']) s = e.find(QN("stderr")) if s is not None: j.setStderr(s.attrib['name']) except KeyError, ke: raise badattr(s, ke) for p in e.findall(QN("profile")): j.addProfile(parse_profile(p)) for u in e.findall(QN("uses")): j.addUse(parse_uses(u)) for i in e.findall(QN("invoke")): j.addInvoke(parse_invoke(i)) return j def parse_job(e): try: j = Job( name=e.attrib["name"], id=e.attrib["id"], namespace=e.get("namespace", None), version=e.get("version", None), node_label=e.get("node-label", None) ) except KeyError, ke: raise badattr(e, ke) return parse_absjob(e, j) def parse_dax(e): try: d = DAX( file=e.attrib["file"], id=e.attrib["id"], node_label=e.get("node-label", None) ) except KeyError, ke: raise badattr(e, ke) return parse_absjob(e, d) def parse_dag(e): try: d = DAG( file=e.attrib["file"], id=e.attrib["id"], node_label=e.get("node-label", None) ) except KeyError, ke: raise badattr(e, ke) return parse_absjob(e, d) def parse_dependencies(e): try: child = e.attrib["ref"] except KeyError, ke: raise badattr(e, ke) for p in e.findall(QN("parent")): try: parent = p.attrib["ref"] label = p.attrib.get("edge-label", None) yield Dependency(parent, child, label) except KeyError, ke: raise badattr(p, ke) # We use iterparse because we don't have to read in the # entire document iterator = etree.iterparse(infile, events=("start", "end")) iterator = iter(iterator) # Get the document element (should be ) event, root = iterator.next() adag = parse_adag(root) # This function reads all the children of "node" def expand(node): event, elem = iterator.next() while elem != node: event, elem = iterator.next() # We clear the document element to prevent # the memory usage from growing root.clear() for ev, elem in iterator: if ev == "end": continue # Read in the entire element and children expand(elem) if elem.tag == QN("job"): j = parse_job(elem) adag.addJob(j) elif elem.tag == QN("child"): for d in parse_dependencies(elem): adag.addDependency(d) elif elem.tag == QN("file"): f = parse_file(elem) adag.addFile(f) elif elem.tag == QN("executable"): e = parse_executable(elem) adag.addExecutable(e) elif elem.tag == QN("transformation"): t = parse_transformation(elem) adag.addTransformation(t) elif elem.tag == QN("dag"): d = parse_dag(elem) adag.addJob(d) elif elem.tag == QN("dax"): d = parse_dax(elem) adag.addJob(d) elif elem.tag == QN("invoke"): adag.addInvoke(parse_invoke(elem)) else: raise ParseError("Unknown tag", elem.tag) return adag def main(): """Simple smoke test""" # Create a DAX diamond = ADAG("diamond") # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN("gsiftp://site.com/inputs/f.a","site")) diamond.addFile(a) # Add executables to the DAX-level replica catalog e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", arch="x86_64") e_preprocess.addPFN(PFN("gsiftp://site.com/bin/preprocess","site")) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86_64") e_findrange.addPFN(PFN("gsiftp://site.com/bin/findrange","site")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86_64") e_analyze.addPFN(PFN("gsiftp://site.com/bin/analyze","site")) diamond.addExecutable(e_analyze) # Add a preprocess job preprocess = Job(e_preprocess) b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T60","-i",a,"-o",b1,b2) preprocess.uses(a, link=Link.INPUT) preprocess.uses(b1, link=Link.OUTPUT, transfer=True) preprocess.uses(b2, link=Link.OUTPUT, transfer=True) diamond.addJob(preprocess) # Add left Findrange job frl = Job(e_findrange) c1 = File("f.c1") frl.addArguments("-a findrange","-T60","-i",b1,"-o",c1) frl.uses(b1, link=Link.INPUT) frl.uses(c1, link=Link.OUTPUT, transfer=True) diamond.addJob(frl) # Add right Findrange job frr = Job(e_findrange) c2 = File("f.c2") frr.addArguments("-a findrange","-T60","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT, transfer=True) diamond.addJob(frr) # Add Analyze job analyze = Job(e_analyze) d = File("f.d") analyze.addArguments("-a analyze","-T60","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, transfer=True, register=True) diamond.addJob(analyze) # Add dependencies diamond.depends(parent=preprocess, child=frl) diamond.depends(parent=preprocess, child=frr) diamond.depends(parent=frl, child=analyze) diamond.depends(parent=frr, child=analyze) # Get generated diamond dax import sys diamond.writeXML(sys.stdout) if __name__ == '__main__': main()pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/test/0000755000175000017500000000000011757531667022170 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/test/dax33test.py0000644000175000017500000001047311757531137024361 0ustar ryngerynge# This is a sort-of kitchen sink dax generator for validating that # the API produces valid XML for all elements according to the # new DAX 3.3 schema. It tries to generate XML to cover every part # of the schema. import sys from Pegasus.DAX3 import * # Create a DAX diamond = ADAG("diamond", index=1, count=10) diamond.invoke(what="what", when="when") # Add input file to the DAX-level replica catalog a = File("f.a") a.profile("pegasus","foobar","true") a.PFN("gsiftp://site.com/inputs/f.a","site") diamond.addFile(a) cfg = File("config.ini") cfg.metadata("size","int","10") diamond.addFile(cfg) # Add executables to the DAX-level replica catalog e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", osrelease="5", glibc="3.3", arch="x86_64", installed=True, osversion="2.6") e_preprocess.profile("pegasus", "barfoo", "false") e_preprocess.metadata("size","int",100) pfn = PFN("gsiftp://site.com/bin/preprocess","site") pfn.profile("pegasus", "baz", "abcd") e_preprocess.addPFN(pfn) e_preprocess.invoke(what="what", when="when") diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86_64") e_findrange.addPFN(PFN("gsiftp://site.com/bin/findrange","site")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86_64") e_analyze.addPFN(PFN("gsiftp://site.com/bin/analyze","site")) diamond.addExecutable(e_analyze) # Add transformations to the DAX-level transformation catalog t_preprocess = Transformation(e_preprocess) t_preprocess.invoke(what="what", when="when") t_preprocess.uses(cfg) diamond.addTransformation(t_preprocess) t_findrange = Transformation(e_findrange) t_findrange.uses(cfg) diamond.addTransformation(t_findrange) t_analyze = Transformation(e_analyze) t_analyze.uses(cfg) diamond.addTransformation(t_analyze) # Add a preprocess job preprocess = Job(t_preprocess) b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T60","-i",a,"-o",b1,b2) preprocess.profile("pegasus", "site", "local") preprocess.setStdin(File("stdin")) preprocess.setStdout(File("stdout")) preprocess.setStderr(File("stderr")) preprocess.uses(a, link=Link.INPUT, optional=True) preprocess.uses(b1, link=Link.OUTPUT, transfer=True, optional=True) preprocess.uses(b2, link=Link.OUTPUT, transfer=True, register=True) preprocess.uses(e_preprocess) preprocess.invoke(when="when", what="what") diamond.addJob(preprocess) # Add left Findrange job frl = Job(t_findrange, node_label="foo") c1 = File("f.c1") frl.addArguments("-a findrange","-T60","-i",b1,"-o",c1) diamond.addJob(frl) # Add right Findrange job frr = Job(t_findrange) c2 = File("f.c2") frr.addArguments("-a findrange","-T60","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT, transfer=True) diamond.addJob(frr) # Add Analyze job analyze = Job(t_analyze) d = File("f.d") analyze.addArguments("-a analyze","-T60","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, transfer=True, register=True) diamond.addJob(analyze) dax = DAX("file.dax", node_label="apple") dax.addArguments("-Dpegasus.properties=foobar") dax.profile("pegasus", "site", "local") dax.setStdin(File("stdin")) dax.setStdout(File("stdout")) dax.setStderr(File("stderr")) dax.uses(a, link=Link.INPUT, optional=True) dax.uses(b1, link=Link.OUTPUT, transfer=True, optional=True) dax.uses(b2, link=Link.OUTPUT, transfer=True, register=True) dax.uses(e_preprocess) dax.invoke(when="when", what="what") diamond.addJob(dax) dag = DAG("file.dag", node_label="pear") dag.addArguments("-Dpegasus.properties=foobar") dag.profile("pegasus", "site", "local") dag.setStdin(File("stdin")) dag.setStdout(File("stdout")) dag.setStderr(File("stderr")) dag.uses(a, link=Link.INPUT, optional=True) dag.uses(b1, link=Link.OUTPUT, transfer=True, optional=True) dag.uses(b2, link=Link.OUTPUT, transfer=True, register=True) dag.uses(e_preprocess) dag.invoke(when="when", what="what") diamond.addDAG(dag) # Add dependencies diamond.depends(parent=preprocess, child=frl, edge_label="foobar") diamond.depends(parent=preprocess, child=frr) diamond.depends(parent=frl, child=analyze) diamond.depends(parent=frr, child=analyze) # Get generated diamond dax import sys diamond.writeXML(sys.stdout) pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/test/dax33test.xml0000644000175000017500000001155511757531137024533 0ustar ryngerynge what 10 true false 100 abcd what what -a preprocess -T60 -i -o local what -a findrange -T60 -i -o -a findrange -T60 -i -o -a analyze -T60 -i -o -Dpegasus.properties=foobar local what -Dpegasus.properties=foobar local what pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/test/TestDAX3.py0000644000175000017500000013505711757531137024104 0ustar ryngeryngeimport unittest from Pegasus.DAX3 import * from Pegasus.DAX3 import Element, CatalogType import sys import os DIR = os.path.dirname(__file__) DIAMOND_DAX = os.path.join(DIR, "diamond.xml") DAX33TEST_DAX = os.path.join(DIR, "dax33test.xml") class TestElement(unittest.TestCase): def testSimple(self): x = Element("x") self.assertEquals(str(x), '') def testElement(self): x = Element("x") y = Element("y") x.element(y) self.assertEquals(str(x), '\n\t\n') def testText(self): x = Element("x") x.text("y") self.assertEquals(str(x), '\n\ty\n') def testUnicode(self): x = Element("x") x.comment(u'\u03a3') x.flatten() self.assertEquals(unicode(x), u'') x = Element(u'\u03a3') self.assertEquals(unicode(x), u'<\u03a3/>') x = Element('x', [(u'\u03a3', 'foo')]) self.assertEquals(unicode(x), u'') x = Element('x', [('foo', u'\u03a3')]) self.assertEquals(unicode(x), u'') x = Element('x') x.text(u'\u03a3') x.flatten() self.assertEquals(unicode(x), u'\u03a3') def testFlatten(self): x = Element("x") x.text("y") x.flatten() self.assertEquals(str(x), 'y') def testComment(self): x = Element("x") x.comment("test") self.assertEquals(str(x), '\n\t\n') class TestMetadata(unittest.TestCase): def testConstructor(self): """Metadata constructor should only allow valid values""" m = Metadata("key","type","value") self.assertEquals(m.key, "key") self.assertEquals(m.type, "type") self.assertEquals(m.value, "value") self.assertRaises(FormatError, Metadata, None, "type", "value") self.assertRaises(FormatError, Metadata, "key", None, "value") self.assertRaises(FormatError, Metadata, "key", "type", None) def testEqual(self): """Equal Metadata should have the same key""" a = Metadata("key","type","value") b = Metadata("key","type","value1") c = Metadata("key","type1","value") d = Metadata("key1","type","value") self.assertTrue(a == b) self.assertTrue(a == c) self.assertFalse(a == d) self.assertTrue(b == c) self.assertFalse(b == d) self.assertFalse(c == d) def testXML(self): """toXML should output properly formatted XML""" a = Metadata("key","type","value") self.assertEquals(str(a.toXML()), 'value') class TestPFN(unittest.TestCase): def testConstructor(self): """PFN constructor should only allow valid values""" a = PFN("url","site") self.assertEquals(a.url, "url") self.assertEquals(a.site, "site") self.assertRaises(FormatError, PFN, None) self.assertRaises(FormatError, PFN, "url", None) self.assertRaises(FormatError, PFN, "") self.assertRaises(FormatError, PFN, "url", "") def testEqual(self): """Equal PFNs should have the same URL and site""" a = PFN("http://abc","a") b = PFN("http://abc","a") c = PFN("http://abc","b") d = PFN("http://cde","a") self.assertTrue(a == b) self.assertFalse(a == c) self.assertFalse(a == d) def testProfiles(self): """PFNs should handle profile properly""" c = PFN("http","a") p = Profile("ns","name","value") self.assertFalse(c.hasProfile(p)) c.addProfile(p) self.assertRaises(DuplicateError, c.addProfile, p) self.assertTrue(c.hasProfile(p)) c.removeProfile(p) self.assertFalse(c.hasProfile(p)) self.assertRaises(NotFoundError, c.removeProfile, p) c.addProfile(p) c.clearProfiles() self.assertFalse(c.hasProfile(p)) def testXML(self): """toXML should output properly formatted XML""" a = PFN("http://abc", "a") self.assertEquals(unicode(a.toXML()), '') a.addProfile(Profile("ns","name","value")) self.assertEquals(str(a.toXML()), '\n\tvalue\n') class TestProfile(unittest.TestCase): def testConstructor(self): a = Profile("ns","key","value") self.assertEquals(a.namespace,"ns") self.assertEquals(a.key,"key") self.assertEquals(a.value,"value") def testEqual(self): """Equal profiles should have the same (ns, key)""" a = Profile("ns","key","value") b = Profile("ns","key","value") c = Profile("ns","key","value1") d = Profile("ns","key1","value") e = Profile("ns1","key","value") self.assertTrue(a == b) self.assertTrue(a == c) self.assertTrue(b == c) self.assertFalse(a == d) self.assertFalse(a == e) self.assertFalse(d == e) def testXML(self): """toXML should output properly formatted XML""" a = Profile("ns","key","value") self.assertEquals(str(a.toXML()),'value') class TestCatalogType(unittest.TestCase): def testConstructor(self): """Catalog types require a name""" t = CatalogType("name") self.assertEquals(t.name, "name") self.assertRaises(FormatError, CatalogType, None) self.assertRaises(FormatError, CatalogType, "") def testProfile(self): """Should be able to add/remove/has profiles""" c = CatalogType("name") p = Profile("ns","name","value") self.assertFalse(c.hasProfile(p)) c.addProfile(p) self.assertRaises(DuplicateError, c.addProfile, p) self.assertTrue(c.hasProfile(p)) c.removeProfile(p) self.assertFalse(c.hasProfile(p)) self.assertRaises(NotFoundError, c.removeProfile, p) c.addProfile(p) c.clearProfiles() self.assertFalse(c.hasProfile(p)) def testMetadata(self): """Should be able to add/remove/has metadata""" c = CatalogType("name") p = Metadata("key","type","value") self.assertFalse(c.hasMetadata(p)) c.addMetadata(p) self.assertRaises(DuplicateError, c.addMetadata, p) self.assertTrue(c.hasMetadata(p)) c.removeMetadata(p) self.assertFalse(c.hasMetadata(p)) self.assertRaises(NotFoundError, c.removeMetadata, p) c.addMetadata(p) c.clearMetadata() self.assertFalse(c.hasMetadata(p)) def testPFN(self): "Should be able to add/remove/has PFNs" c = CatalogType("name") p = PFN("url","site") self.assertFalse(c.hasPFN(p)) c.addPFN(p) self.assertRaises(DuplicateError, c.addPFN, p) self.assertTrue(c.hasPFN(p)) c.removePFN(p) self.assertFalse(c.hasPFN(p)) self.assertRaises(NotFoundError, c.removePFN, p) c.addPFN(p) c.clearPFNs() self.assertFalse(c.hasPFN(p)) class TestFile(unittest.TestCase): def testEqual(self): """Equal files should have the same name""" a = File("a") b = File("a") c = File("b") self.assertTrue(a==b) self.assertFalse(a==c) def testXML(self): """toXML should output proper XML with nested elements""" c = File("name") self.assertEquals(str(c.toXML()), '') # Profile c.addProfile(Profile("ns","key","value")) self.assertEquals(str(c.toXML()), '\n\tvalue\n') c.clearProfiles() # Metadata c.addMetadata(Metadata("key","type","value")) self.assertEquals(str(c.toXML()), '\n\tvalue\n') c.clearMetadata() # PFN c.addPFN(PFN("url","site")) self.assertEquals(str(c.toXML()), '\n\t\n') def testArgumentXML(self): """toArgumentXML should never include inner elements""" c = File("name") self.assertEquals(str(c.toArgumentXML()), '') c.addProfile(Profile("ns","key","value")) c.addMetadata(Metadata("key","type","value")) c.addPFN(PFN("url","site")) self.assertEquals(str(c.toArgumentXML()), '') def testStdioXML(self): """toStdioXML should return proper xml for the supported stdio tags""" f = File("name") f.addProfile(Profile("ns","key","value")) f.addMetadata(Metadata("key","type","value")) f.addPFN(PFN("url","site")) self.assertEquals(str(f.toStdioXML("stdin")), '') self.assertEquals(str(f.toStdioXML("stdout")), '') self.assertEquals(str(f.toStdioXML("stderr")), '') self.assertRaises(FormatError, f.toStdioXML, "other") class TestExecutable(unittest.TestCase): def testEqual(self): """Equal Executables have the same namespace,name,version,os,arch,osrelease,osversion,glibc,installed""" a = Executable("grep") b = Executable("grep") c = Executable(namespace="os",name="grep") d = Executable(namespace="os",name="grep",version="2.3") e = Executable(namespace="os",name="grep",version="2.3",arch=Arch.X86) f = Executable(namespace="os",name="grep",version="2.3",arch=Arch.X86,os=OS.LINUX) g = Executable(namespace="os",name="grep",version="2.3",arch=Arch.X86,os=OS.LINUX,osrelease="foo") h = Executable(namespace="os",name="grep",version="2.3",arch=Arch.X86,os=OS.LINUX,osrelease="foo",osversion="bar") i = Executable(namespace="os",name="grep",version="2.3",arch=Arch.X86,os=OS.LINUX,osrelease="foo",osversion="bar",glibc="2.4") j = Executable(namespace="os",name="grep",version="2.3",arch=Arch.X86,os=OS.LINUX,osrelease="foo",osversion="bar",glibc="2.4",installed=True) self.assertTrue(a == b) self.assertFalse(b == c) self.assertFalse(c == d) self.assertFalse(b == c) self.assertFalse(d == e) self.assertFalse(e == f) self.assertFalse(f == g) self.assertFalse(g == h) self.assertFalse(h == i) self.assertFalse(i == j) for x in [a,b,c,d,e,f,g,h,i,j]: self.assertTrue(x == x) def testInvoke(self): """Transformations should support invoke""" c = Executable('myjob') p = Invoke("when","what") self.assertFalse(c.hasInvoke(p)) c.addInvoke(p) self.assertRaises(DuplicateError, c.addInvoke, p) self.assertTrue(c.hasInvoke(p)) c.removeInvoke(p) self.assertFalse(c.hasInvoke(p)) self.assertRaises(NotFoundError, c.removeInvoke, p) c.addInvoke(p) c.clearInvokes() self.assertFalse(c.hasInvoke(p)) c.invoke("when","what") self.assertTrue(c.hasInvoke(p)) def testXML(self): """toXML should output proper xml""" x = Executable(namespace="os",name="grep",version="2.3",arch=Arch.X86,os=OS.LINUX,osrelease="foo",osversion="bar",glibc="2.4",installed=True) self.assertEquals(str(x.toXML()), '') x.invoke("when","what") self.assertEquals(str(x.toXML()), '\n\twhat\n') class TestUse(unittest.TestCase): def testConstructor(self): """Constructor should only allow valid objects""" Use("name") Use("name", namespace="ns") Use("name", version="version") Use("name", register=True) Use("name", transfer=True) Use("name", link="link") Use("name", executable=True) Use("name", optional=True) self.assertRaises(FormatError, Use, None) def testEquals(self): """Equal uses have the same (namespace, name, version)""" a = Use("name", namespace="ns", version="version") b = Use("name", namespace="ns", version="version") c = Use("name", namespace="ns", version="version1") d = Use("name", namespace="ns1", version="version") e = Use("name1", namespace="ns", version="version") f = Use("name", namespace="ns", version="version", transfer=True) self.assertTrue(a == b) self.assertFalse(a == c) self.assertFalse(a == d) self.assertFalse(a == e) self.assertTrue(a == f) def testJobXML(self): """Use.toXML should output properly formatted XML""" a = Use("name", namespace="ns", version="version") self.assertEquals(str(a.toJobXML()), '') a = Use("name", version="version") self.assertEquals(str(a.toJobXML()), '') a = Use("name") self.assertEquals(str(a.toJobXML()), '') a = Use("name", version="version", transfer=True) self.assertEquals(str(a.toJobXML()), '') a = Use("name", version="version", transfer=True, register=False) self.assertEquals(str(a.toJobXML()), '') a = Use("name", link="link", register="true", transfer="true", optional=True, namespace="ns", version="10", executable=True) self.assertEquals(str(a.toJobXML()), '') def testTransformationXML(self): """Use.toXML should output properly formatted XML""" a = Use("name", namespace="ns", version="version") self.assertEquals(str(a.toTransformationXML()), '') a = Use("name", version="version") self.assertEquals(str(a.toTransformationXML()), '') a = Use("name") self.assertEquals(str(a.toTransformationXML()), '') a = Use("name", version="version", transfer=True) self.assertEquals(str(a.toTransformationXML()), '') a = Use("name", version="version", transfer=True, register=False) self.assertEquals(str(a.toTransformationXML()), '') a = Use("name", link="link", register="true", transfer="true", optional=True, namespace="ns", version="10", executable=True) self.assertEquals(str(a.toTransformationXML()), '') class TestTransformation(unittest.TestCase): def testConstructor(self): t = Transformation("name","namespace","version") self.assertEquals(t.name, "name") self.assertEquals(t.namespace, "namespace") self.assertEquals(t.version, "version") def testExecutable(self): e = Executable("name",namespace="ns",version="version") t = Transformation(e) self.assertEquals(t.name,e.name) self.assertEquals(t.namespace,e.namespace) self.assertEquals(t.version,e.version) def testUse(self): """Transformations should allow Use objects""" u = Use("name",namespace="namespace",version="version",register=True,transfer=True) t = Transformation("xform") t.addUse(u) self.assertRaises(DuplicateError, t.addUse, u) self.assertTrue(t.hasUse(u)) t.removeUse(u) self.assertRaises(NotFoundError, t.removeUse, u) self.assertFalse(t.hasUse(u)) t.addUse(u) t.clearUses() self.assertFalse(t.hasUse(u)) t.uses("name",namespace="namespace",version="version",register=True,transfer=True) self.assertTrue(t.hasUse(u)) def testInvoke(self): """Transformations should support invoke""" c = Transformation('myjob') p = Invoke("when","what") self.assertFalse(c.hasInvoke(p)) c.addInvoke(p) self.assertRaises(DuplicateError, c.addInvoke, p) self.assertTrue(c.hasInvoke(p)) c.removeInvoke(p) self.assertFalse(c.hasInvoke(p)) self.assertRaises(NotFoundError, c.removeInvoke, p) c.addInvoke(p) c.clearInvokes() self.assertFalse(c.hasInvoke(p)) c.invoke("when","what") self.assertTrue(c.hasInvoke(p)) def testUsesFile(self): """uses should accept File as an argument""" c = Transformation('myjob') c.uses(File("filename")) self.assertEquals(str(c.toXML()), '\n\t\n') def testUsesExecutable(self): """Use should accept Executable as an argument""" c = Transformation('myjob') e = Executable(name="exe", namespace="ns", version="1.0") c.uses(e) self.assertEquals(str(c.toXML()), '\n\t\n') c.clearUses() c.uses(e, namespace="alt") self.assertEquals(str(c.toXML()), '\n\t\n') c.clearUses() c.uses(e, version="alt") self.assertEquals(str(c.toXML()), '\n\t\n') c.clearUses() c.uses(e, register=True) self.assertEquals(str(c.toXML()), '\n\t\n') c.clearUses() def testXML(self): t = Transformation("name","namespace","version") self.assertEquals(str(t.toXML()), '') t.uses("name",namespace="ns",version="ver",executable=True) self.assertEquals(str(t.toXML()), '\n\t\n') t.clearUses() t.uses(Executable(name="name",namespace="ns",version="ver")) self.assertEquals(str(t.toXML()), '\n\t\n') t.clearUses() t.uses(File(name="filename"),link="input", transfer=True, register=True) self.assertEquals(str(t.toXML()), '\n\t\n') t.clearUses() t.invoke("when","what") self.assertEquals(str(t.toXML()), '\n\twhat\n') class TestInvoke(unittest.TestCase): def testConstructor(self): """Invoke requires valid when and what""" Invoke("when","what") self.assertRaises(FormatError, Invoke, "when", None) self.assertRaises(FormatError, Invoke, None, "what") self.assertRaises(FormatError, Invoke, "", "what") self.assertRaises(FormatError, Invoke, "when", "") def testEqual(self): """Invoke objects are equal when they have the same when and what""" a = Invoke("when","what") b = Invoke("when","what") c = Invoke("when","what1") d = Invoke("when1","what") e = Invoke("when1","what1") self.assertTrue(a == b) self.assertFalse(a == c) self.assertFalse(a == d) self.assertFalse(a == e) class TestJob(unittest.TestCase): def testConstructor(self): """Should be able to create a job using n+ns+ver or Transformation""" self.assertRaises(FormatError, Job, None) self.assertRaises(FormatError, Job, "") j = Job('myjob',namespace="ns",version="2",node_label="label") self.assertEquals(j.name,'myjob') self.assertEquals(j.namespace,'ns') self.assertEquals(j.version,'2') self.assertEquals(j.node_label,'label') j = Job(Transformation('myxform')) self.assertEquals(j.name,'myxform') j = Job(Transformation('myxform',version="1"),version="2") self.assertEquals(j.version,"2") j = Job(Transformation('myxform',namespace="ns1"),namespace="ns2") self.assertEquals(j.namespace,"ns2") def testStd(self): """Should be able to set stdin/out/err using File or string""" j = Job('myjob') j.setStdout(File("stdout")) self.assertEquals(j.stdout, File("stdout")) j.setStdin(File("stdin")) self.assertEquals(j.stdin, File("stdin")) j.setStderr(File("stderr")) self.assertEquals(j.stderr, File("stderr")) j.setStdout("stdout") self.assertEquals(j.stdout, File("stdout")) j.setStdin("stdin") self.assertEquals(j.stdin, File("stdin")) j.setStderr("stderr") self.assertEquals(j.stderr, File("stderr")) def testProfile(self): """Jobs should support profiles""" c = Job('myjob') p = Profile("ns","name","value") self.assertFalse(c.hasProfile(p)) c.addProfile(p) self.assertRaises(DuplicateError, c.addProfile, p) self.assertTrue(c.hasProfile(p)) c.removeProfile(p) self.assertFalse(c.hasProfile(p)) self.assertRaises(NotFoundError, c.removeProfile, p) c.addProfile(p) c.clearProfiles() self.assertFalse(c.hasProfile(p)) def testUse(self): """Jobs should allow Use objects""" u = Use("name",namespace="namespace",version="version",register=True,transfer=True) t = Job("xform") t.addUse(u) self.assertRaises(DuplicateError, t.addUse, u) self.assertTrue(t.hasUse(u)) t.removeUse(u) self.assertRaises(NotFoundError, t.removeUse, u) self.assertFalse(t.hasUse(u)) t.addUse(u) t.clearUses() self.assertFalse(t.hasUse(u)) t.uses("name",namespace="namespace",version="version",register=True,transfer=True) self.assertTrue(t.hasUse(u)) def testArguments(self): j = Job('myjob') # Regular arguments j.addArguments('a','b','c') j.addArguments('d',u'e') self.assertEquals(j.getArguments(), 'a b c d e') j.clearArguments() # File arguments f = File("name") g = File("name2") j.addArguments('a',f,'b',g) self.assertEquals(j.getArguments(), 'a b ') j.clearArguments() # Quoted strings j.addArguments('a','"gideon is cool"','b',"'apple bananna'") self.assertEquals(j.getArguments(), 'a "gideon is cool" b \'apple bananna\'') j.clearArguments() # Non-string arguments e = Executable("exe") self.assertRaises(FormatError, j.addArguments, e) self.assertRaises(FormatError, j.addArguments, 1) self.assertRaises(FormatError, j.addArguments, 1.0) def testInvoke(self): """Jobs should support invoke""" c = Job('myjob') p = Invoke("when","what") self.assertFalse(c.hasInvoke(p)) c.addInvoke(p) self.assertRaises(DuplicateError, c.addInvoke, p) self.assertTrue(c.hasInvoke(p)) c.removeInvoke(p) self.assertFalse(c.hasInvoke(p)) self.assertRaises(NotFoundError, c.removeInvoke, p) c.addInvoke(p) c.clearInvokes() self.assertFalse(c.hasInvoke(p)) c.invoke("when","what") self.assertTrue(c.hasInvoke(p)) def testUsesFile(self): """uses should accept File as an argument""" c = Job('myjob') c.uses(File("filename")) self.assertEquals(str(c.toXML()), '\n\t\n') def testUsesExecutable(self): """Use should accept Executable as an argument""" c = Job('myjob') e = Executable(name="exe", namespace="ns", version="1.0") c.uses(e) self.assertEquals(str(c.toXML()), '\n\t\n') c.clearUses() c.uses(e, namespace="alt") self.assertEquals(str(c.toXML()), '\n\t\n') c.clearUses() c.uses(e, version="alt") self.assertEquals(str(c.toXML()), '\n\t\n') c.clearUses() c.uses(e, register=True) self.assertEquals(str(c.toXML()), '\n\t\n') c.clearUses() def testXML(self): # Job element j = Job(name="name") self.assertEquals(str(j.toXML()), '') j = Job(name="name", id="id") self.assertEquals(str(j.toXML()), '') j = Job(name="name", id="id", namespace="ns") self.assertEquals(str(j.toXML()), '') j = Job(name="name", id="id", namespace="ns", version="version") self.assertEquals(str(j.toXML()), '') j = Job(name="name", id="id", namespace="ns", version="version", node_label="label") self.assertEquals(str(j.toXML()), '') # Arguments j = Job(name="name") j.addArguments('a') self.assertEquals(str(j.toXML()), '\n\ta\n') j.clearArguments() # File arguments j.addArguments(File("file")) self.assertEquals(str(j.toXML()), '\n\t\n') j.clearArguments() # Profiles j.addProfile(Profile("namespace","key","value")) self.assertEquals(str(j.toXML()), '\n\tvalue\n') j.clearProfiles() # Stdin/out/err j.setStdin(File("stdin")) self.assertEquals(str(j.toXML()), '\n\t\n') j.clearStdin() j.setStdout(File("stdout")) self.assertEquals(str(j.toXML()), '\n\t\n') j.clearStdout() j.setStderr(File("stderr")) self.assertEquals(str(j.toXML()), '\n\t\n') j.clearStderr() # Uses j.uses("name") self.assertEquals(str(j.toXML()), '\n\t\n') j.clearUses() # Invocations j.invoke("when","what") self.assertEquals(str(j.toXML()), '\n\twhat\n') j.clearInvokes() # Combined j = Job(name="name", id="id", namespace="ns", version="version", node_label="label") j.addArguments('-a',File("file")) j.addProfile(Profile("namespace","key","value")) j.setStdin(File("stdin")) j.setStdout(File("stdout")) j.setStderr(File("stderr")) j.uses("name", link="input", transfer=True, register=True) j.invoke("when","what") self.assertEquals(str(j.toXML()), ''' \t-a \tvalue \t \t \t \t \twhat ''') class TestDAG(unittest.TestCase): def testConstructor(self): DAG("file") DAG(File("file")) DAG("file",id="10") DAG("file",id="10",node_label="dag") self.assertRaises(FormatError, DAG, None) self.assertRaises(FormatError, DAG, "") def testXML(self): d = DAG("file") self.assertEquals(str(d.toXML()), '') d = DAG(File("file")) self.assertEquals(str(d.toXML()), '') d = DAG("file",id="10") self.assertEquals(str(d.toXML()), '') d = DAG("file",node_label="label") self.assertEquals(str(d.toXML()), '') class TestDAX(unittest.TestCase): def testConstructor(self): DAX("file") DAX(File("file")) DAX("file",id="10") DAX("file",id="10",node_label="dag") self.assertRaises(FormatError, DAX, None) self.assertRaises(FormatError, DAX, "") def testXML(self): d = DAX("file") self.assertEquals(str(d.toXML()), '') d = DAX(File("file")) self.assertEquals(str(d.toXML()), '') d = DAX("file",id="10") self.assertEquals(str(d.toXML()), '') d = DAX("file",node_label="label") self.assertEquals(str(d.toXML()), '') class TestDependency(unittest.TestCase): def testConstructor(self): """Constuctor should only allow valid dependencies""" # IDs are allowed Dependency("a","b") # Id must be valid self.assertRaises(FormatError, Dependency, "a", None) self.assertRaises(FormatError, Dependency, None, "b") self.assertRaises(FormatError, Dependency, "a", "") self.assertRaises(FormatError, Dependency, "", "b") # Jobs, DAGs and DAXes are allowed a = Job("a",id="ID01") b = Job("b",id="ID02") Dependency(a,b) a = DAG("a",id="ID01") b = DAG("b",id="ID02") Dependency(a,b) a = DAX("a",id="ID01") b = DAX("b",id="ID02") Dependency(a,b) # Job objects must have IDs a = Job("a") self.assertRaises(FormatError, Dependency, a, "ID01") self.assertRaises(FormatError, Dependency, "ID01", a) # No self-edges a = Job("a", id="ID01") self.assertRaises(FormatError, Dependency, a, a) def testEquals(self): """Equal dependencies have the same parent and child (but not edge label)""" a = Dependency("a","b") b = Dependency("a","b") c = Dependency("a","c") d = Dependency("c","b") self.assertTrue(a==b) self.assertFalse(a==c) self.assertFalse(a==d) class TestADAG(unittest.TestCase): def testConstructor(self): """Constructor should only allow valid ADAG objects""" self.assertRaises(FormatError, ADAG, None) self.assertRaises(FormatError, ADAG, "") a = ADAG("name",10,1) self.assertEquals(a.name,"name") self.assertEquals(a.index,1) self.assertEquals(a.count,10) def testNextJobID(self): """nextJobID() should always return a valid job ID""" a = ADAG("foo") self.assertEquals(a.nextJobID(),"ID0000001") self.assertEquals(a.nextJobID(),"ID0000002") self.assertEquals(a.nextJobID(),"ID0000003") a.addJob(Job("a",id="ID0000004")) self.assertEquals(a.nextJobID(),"ID0000005") a.addJob(Job("a",id="ID0000006")) a.addJob(Job("a",id="ID0000007")) a.addJob(Job("a",id="ID0000008")) self.assertEquals(a.nextJobID(),"ID0000009") def testJobs(self): """Should be able to add/remove/test for jobs/dags/daxes""" a = ADAG("adag") j = Job("job") self.assertTrue(j.id is None) a.addJob(j) self.assertTrue(j.id is not None) self.assertTrue(a.hasJob(j)) self.assertTrue(a.hasJob(j.id)) a.removeJob(j) self.assertFalse(a.hasJob(j)) self.assertFalse(a.hasJob(j.id)) a.addJob(j) self.assertTrue(a.hasJob(j)) a.removeJob(j.id) self.assertFalse(a.hasJob(j)) a.addJob(j) a.clearJobs() self.assertFalse(a.hasJob(j)) dax = DAX("dax") dag = DAG("dag") a.addJob(dax) a.addJob(dag) a.clearJobs() self.assertRaises(FormatError, a.addDAX, j) self.assertRaises(FormatError, a.addDAG, j) a.addDAX(dax) a.addDAG(dag) a.clearJobs() a.addJob(j) self.assertEquals(a.getJob(j.id), j) self.assertRaises(DuplicateError, a.addJob, j) a.clearJobs() self.assertRaises(NotFoundError, a.getJob, j) self.assertRaises(NotFoundError, a.removeJob, j) def testFiles(self): """Should be able to add/remove/test files in ADAG""" a = ADAG("adag") f = File("file") self.assertFalse(a.hasFile(f)) a.addFile(f) self.assertTrue(a.hasFile(f)) a.removeFile(f) self.assertFalse(a.hasFile(f)) a.addFile(f) self.assertTrue(a.hasFile(f)) a.clearFiles() self.assertFalse(a.hasFile(f)) a.addFile(f) self.assertRaises(DuplicateError, a.addFile, f) a.clearFiles() self.assertRaises(NotFoundError, a.removeFile, f) def testExecutables(self): """Should be able to add/remove/test executables in ADAG""" a = ADAG("adag") e = Executable("exe") self.assertFalse(a.hasExecutable(e)) a.addExecutable(e) self.assertTrue(a.hasExecutable(e)) a.removeExecutable(e) self.assertFalse(a.hasExecutable(e)) a.addExecutable(e) self.assertTrue(a.hasExecutable(e)) a.clearExecutables() self.assertFalse(a.hasExecutable(e)) a.addExecutable(e) self.assertRaises(DuplicateError, a.addExecutable, e) a.clearExecutables() self.assertRaises(NotFoundError, a.removeExecutable, e) def testTransformations(self): """Should be able to add/remove/clear/test transformations in ADAG""" a = ADAG("adag") t = Transformation("xform") self.assertFalse(a.hasTransformation(t)) a.addTransformation(t) self.assertTrue(a.hasTransformation(t)) a.removeTransformation(t) self.assertFalse(a.hasTransformation(t)) a.addTransformation(t) self.assertTrue(a.hasTransformation(t)) a.clearTransformations() self.assertFalse(a.hasTransformation(t)) a.addTransformation(t) self.assertRaises(DuplicateError, a.addTransformation, t) a.clearTransformations() self.assertRaises(NotFoundError, a.removeTransformation, t) def testDependencies(self): """Should be able to add/remove/clear/test dependencies in ADAG""" a = ADAG("adag") x = Job("x", id="ID01") y = Job("y", id="ID02") t = Dependency(x, y) self.assertRaises(NotFoundError, a.addDependency, t) a.addJob(x) self.assertRaises(NotFoundError, a.addDependency, t) a.addJob(y) a.addDependency(t) self.assertRaises(DuplicateError, a.addDependency, t) self.assertTrue(a.hasDependency(t)) a.removeDependency(t) self.assertFalse(a.hasDependency(t)) a.depends(parent=x, child=y) self.assertTrue(a.hasDependency(t)) a.clearDependencies() self.assertFalse(a.hasDependency(t)) self.assertRaises(NotFoundError, a.removeDependency, t) def testInvoke(self): """ADAGs should support invoke""" c = ADAG('adag') p = Invoke("when","what") self.assertFalse(c.hasInvoke(p)) c.addInvoke(p) self.assertRaises(DuplicateError, c.addInvoke, p) self.assertTrue(c.hasInvoke(p)) c.removeInvoke(p) self.assertFalse(c.hasInvoke(p)) self.assertRaises(NotFoundError, c.removeInvoke, p) c.addInvoke(p) c.clearInvokes() self.assertFalse(c.hasInvoke(p)) c.invoke("when","what") self.assertTrue(c.hasInvoke(p)) def testXML(self): """ADAGs should output properly-formatted XML""" c = ADAG('adag',count=10,index=1) self.assertEqualXML(c.toXML(),""" """) # Invoke c.invoke("when","what") self.assertEqualXML(c.toXML(),""" what """) c.clearInvokes() # File c.addFile(File("file")) self.assertEqualXML(c.toXML(),""" """) c.clearFiles() # Executable c.addExecutable(Executable("exe")) self.assertEqualXML(c.toXML(),""" """) c.clearExecutables() # Transformation c.addTransformation(Transformation("xform")) self.assertEqualXML(c.toXML(),""" """) c.clearTransformations() # Job c.addJob(Job("xform",id="ID01")) self.assertEqualXML(c.toXML(),""" """) c.clearJobs() # Dependency c.addJob(Job("xform",id="ID01")) c.addJob(Job("xform",id="ID02")) c.depends("ID02","ID01") self.assertEqualXML(c.toXML(),""" """) # All c.invoke("when","what") c.addFile(File("file")) c.addExecutable(Executable("exe")) c.addTransformation(Transformation("xform")) self.assertEqualXML(c.toXML(),""" what """) def testWriteFile(self): diamond = ADAG("diamond") diamond.addJob(Job(u"\u03a3cat")) diamond.writeXMLFile("/tmp/dax.xml") def testDiamond(self): """Compare generated DAX to reference DAX""" # Create a DAX diamond = ADAG("diamond") # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN("gsiftp://site.com/inputs/f.a","site")) diamond.addFile(a) # Add a config file for the transformations cfg = File("diamond.cfg") diamond.addFile(cfg) # Add executables to the DAX-level replica catalog e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", arch="x86_64") e_preprocess.addPFN(PFN("gsiftp://site.com/bin/preprocess","site")) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86_64") e_findrange.addPFN(PFN("gsiftp://site.com/bin/findrange","site")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86_64") e_analyze.addPFN(PFN("gsiftp://site.com/bin/analyze","site")) diamond.addExecutable(e_analyze) # Add transformations to the DAX-level transformation catalog t_preprocess = Transformation(e_preprocess) t_preprocess.uses(cfg) diamond.addTransformation(t_preprocess) t_findrange = Transformation(e_findrange) t_findrange.uses(cfg) diamond.addTransformation(t_findrange) t_analyze = Transformation(e_analyze) t_analyze.uses(cfg) diamond.addTransformation(t_analyze) # Add a preprocess job preprocess = Job(t_preprocess) b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T60","-i",a,"-o",b1,b2) preprocess.uses(a, link=Link.INPUT) preprocess.uses(b1, link=Link.OUTPUT, transfer=True) preprocess.uses(b2, link=Link.OUTPUT, transfer=True) diamond.addJob(preprocess) # Add left Findrange job frl = Job(t_findrange) c1 = File("f.c1") frl.addArguments("-a findrange","-T60","-i",b1,"-o",c1) frl.uses(b1, link=Link.INPUT) frl.uses(c1, link=Link.OUTPUT, transfer=True) diamond.addJob(frl) # Add right Findrange job frr = Job(t_findrange) c2 = File("f.c2") frr.addArguments("-a findrange","-T60","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT, transfer=True) diamond.addJob(frr) # Add Analyze job analyze = Job(t_analyze) d = File("f.d") analyze.addArguments("-a analyze","-T60","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, transfer=True, register=True) diamond.addJob(analyze) # Add dependencies diamond.depends(parent=preprocess, child=frl) diamond.depends(parent=preprocess, child=frr) diamond.depends(parent=frl, child=analyze) diamond.depends(parent=frr, child=analyze) # Get generated diamond dax left = diamond.toXML() # Get reference diamond dax right = open(DIAMOND_DAX).read() # For this test we sort because we don't really care about minor # ordering differences caused by the use of sets self.assertEqualXML(left, right, True) def simplifyXML(self, a): """Split XML into lines and remove comments, whitespace, and preprocessing tags""" a = [x.strip() for x in a.split('\n')] a = [x for x in a if x and not x.startswith(" -a preprocess -T60 -i -o -a findrange -T60 -i -o -a findrange -T60 -i -o -a analyze -T60 -i -o pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/0000755000175000017500000000000011757531667023570 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/plots/0000755000175000017500000000000011757531667024731 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/plots/workflow_info.py0000755000175000017500000004027411757531137030172 0ustar ryngerynge#!/usr/bin/env python """ Base class for storing workflow and job information """ ## # Copyright 2010-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision$ import os import re import sys import logging import optparse import math import tempfile import shutil # Initialize logging object logger = logging.getLogger() # Set default level to INFO logger.setLevel(logging.INFO) from Pegasus.plots_stats import utils as plot_utils from datetime import timedelta from datetime import datetime #---workflow information object class WorkflowInfo: def __init__(self): self.submit_dir = None self.wf_uuid = None self.parent_wf_uuid = None self.dax_label = None self.dag_label = None self.dax_file_path = None self.dag_file_path = None self.dagman_start_time = None self.workflow_run_time = None self.total_jobs= None self.total_job_instances = None self.total_tasks = None self.job_statistics_list =[] self.transformation_statistics_dict ={} self.host_job_map={} self.transformation_color_map={} self.job_instance_id_sub_wf_uuid_map ={} self.sub_wf_id_uuids = [] self.wf_env ={} self.wf_job_instances_over_time_statistics = {} self.wf_invocations_over_time_statistics = {} def get_formatted_host_data(self , extn = "html" ): """ Returns formatted host information data. """ # find the pretty print length host_info ='' for host_name , job_list in self.host_job_map.items(): # Case where host_name is not calculated if host_name is None: host_info += ( " \n{ \"name\":" + "\"Unknown\" , \"jobs\": [") else: host_info += ( " \n{ \"name\":" + "\"" + host_name+ "\" , \"jobs\": [") job_info = '' for job_stat in job_list: job_stat_det = job_stat.getJobDetails(self.dagman_start_time) job_info +=("\n\t{") job_info +=( "\n\t") job_info += ( "\"name\":" + "\"" + job_stat_det['name']+ "\" , ") job_info += ( "\"jobS\":" + job_stat_det['jobExecS'] +" , ") job_info += ( "\"jobD\":" + job_stat_det['jobExecD'] +" , ") job_info += ( "\"preS\":" + job_stat_det['preS'] +" , ") job_info += ( "\"preD\":" + job_stat_det['preD'] +" , ") job_info += ( "\"cS\":" + job_stat_det['cS'] +" , ") job_info += ( "\"cD\":" + job_stat_det['cD'] +" , ") job_info += ( "\"gS\":" + job_stat_det['gS'] +" , ") job_info += ( "\"gD\":" + job_stat_det['gD'] +" , ") job_info += ( "\"eS\":" + job_stat_det['eS'] +" , ") job_info += ( "\"eD\":" + job_stat_det['eD'] +" , ") job_info += ( "\"kS\":" + job_stat_det['kS'] +" , ") job_info += ( "\"kD\":" + job_stat_det['kD'] +" , ") job_info += ( "\"postS\":" + job_stat_det['postS'] +" , ") job_info += ( "\"postD\":" + job_stat_det['postD'] +" , ") job_info += ( "\"state\":" + job_stat_det['state'] +" , ") job_info += ( "\"transformation\": \"" + job_stat_det['transformation'] +"\" , ") if self.transformation_color_map.has_key(job_stat_det['transformation']): job_info += ( "\"color\": \"" + self.transformation_color_map[job_stat_det['transformation']] +"\" , ") else: # there is no compute task job_info += ( "\"color\": 'white' , ") if plot_utils.isSubWfJob(job_stat_det['name']): job_info += ( "\"sub_wf\":1 , " ) corresponding_dax ='' if (self.job_instance_id_sub_wf_uuid_map.has_key(job_stat_det['instance_id'])): corresponding_dax = self.job_instance_id_sub_wf_uuid_map[job_stat_det['instance_id']] job_info += ( "\"sub_wf_name\":\""+ corresponding_dax+ "." + extn +"\"") else: job_info += ( "\"sub_wf_name\":''") else: job_info += ( "\"sub_wf\":0 , " ) job_info += ( "\"sub_wf_name\":''") job_info +=( "\n\t},\n") host_info+= job_info host_info += "]}," return host_info def get_formatted_transformation_data(self ): """ Returns formatted job information data. """ # find the pretty print length trans_info = '' for trans_stat in self.transformation_statistics_dict.values(): trans_stat_det = trans_stat.getTransformationDetails() trans_info +=("{") trans_info +=( "\n") trans_info += ( "\"name\":" + "\"" + trans_stat_det['name']+ "\" , ") trans_info += ( "\"count\":" + trans_stat_det['count'] +" , ") trans_info += ( "\"success\":" + trans_stat_det['success'] +" , ") trans_info += ( "\"failure\":" + trans_stat_det['failure'] +" , ") trans_info += ( "\"min\":" + trans_stat_det['min'] +" , ") trans_info += ( "\"max\":" + trans_stat_det['max'] +" , ") trans_info += ( "\"avg\":" + trans_stat_det['avg'] +" , ") trans_info += ( "\"total\":" + trans_stat_det['total'] +" , ") if self.transformation_color_map.has_key(trans_stat_det['name']): trans_info += ( "\"color\": \"" + self.transformation_color_map[trans_stat_det['name']] +"\" ") else: # there is no compute task trans_info += ( "\"color\": 'white' ") trans_info +=( "},\n") return trans_info def get_total_count_run_time(self): total_invoc_count =0 total_runtime =0 for trans_stat in self.transformation_statistics_dict.values(): total_invoc_count +=trans_stat.count total_runtime +=trans_stat.total_runtime return total_invoc_count, total_runtime def get_formatted_job_data(self ,extn ="html" ): """ Returns formatted job information data. """ # find the pretty print length job_info = '' for job_stat in self.job_statistics_list: job_stat_det = job_stat.getJobDetails(self.dagman_start_time) job_info +=("{") job_info +=( "\n") job_info += ( "\"name\":" + "\"" + job_stat_det['name']+ "\" , ") job_info += ( "\"jobS\":" + job_stat_det['jobS'] +" , ") job_info += ( "\"jobD\":" + job_stat_det['jobD'] +" , ") job_info += ( "\"preS\":" + job_stat_det['preS'] +" , ") job_info += ( "\"preD\":" + job_stat_det['preD'] +" , ") job_info += ( "\"cS\":" + job_stat_det['cS'] +" , ") job_info += ( "\"cD\":" + job_stat_det['cD'] +" , ") job_info += ( "\"gS\":" + job_stat_det['gS'] +" , ") job_info += ( "\"gD\":" + job_stat_det['gD'] +" , ") job_info += ( "\"eS\":" + job_stat_det['eS'] +" , ") job_info += ( "\"eD\":" + job_stat_det['eD'] +" , ") job_info += ( "\"kS\":" + job_stat_det['kS'] +" , ") job_info += ( "\"kD\":" + job_stat_det['kD'] +" , ") job_info += ( "\"postS\":" + job_stat_det['postS'] +" , ") job_info += ( "\"postD\":" + job_stat_det['postD'] +" , ") job_info += ( "\"state\":" + job_stat_det['state'] +" , ") job_info += ( "\"transformation\": \"" + job_stat_det['transformation'] +"\" , ") if self.transformation_color_map.has_key(job_stat_det['transformation']): job_info += ( "\"color\": \"" + self.transformation_color_map[job_stat_det['transformation']] +"\" , ") else: # there is no compute task job_info += ( "\"color\": 'white' , ") if plot_utils.isSubWfJob(job_stat_det['name']): job_info += ( "\"sub_wf\":1 , " ) corresponding_dax ='' if (self.job_instance_id_sub_wf_uuid_map.has_key(job_stat_det['instance_id'])): corresponding_dax = self.job_instance_id_sub_wf_uuid_map[job_stat_det['instance_id']] job_info += ( "\"sub_wf_name\":\""+ corresponding_dax+ "." + extn+"\"") else: job_info += ( "\"sub_wf_name\":''") else: job_info += ( "\"sub_wf\":0 , " ) job_info += ( "\"sub_wf_name\":''") job_info +=( "},\n") return job_info def get_formatted_job_instances_over_time_data(self , date_time_filter): """ Returns formatted job instance over time data. @param date_time_filter date time filter """ job_instance_over_time_list = self.wf_job_instances_over_time_statistics[date_time_filter] job_info = '' for job_stat in job_instance_over_time_list: job_info +=("{") job_info +=( "\n") job_info += ( "\"datetime\":" + "\"" + job_stat[0]+ "\" , ") job_info += ( "\"count\":" + str(job_stat[1]) +" , ") job_info += ( "\"runtime\":" + plot_utils.round_decimal_to_str(job_stat[2]) +" ") job_info +=( "},\n") return job_info def get_formatted_invocations_over_time_data(self , date_time_filter): """ Returns formatted invocations over time data. @param date_time_filter date time filter """ invs_over_time_list = self.wf_invocations_over_time_statistics[date_time_filter] inv_info = '' for inv_stat in invs_over_time_list: inv_info +=("{") inv_info +=( "\n") inv_info += ( "\"datetime\":" + "\"" + inv_stat[0] + "\" , ") inv_info += ( "\"count\":" + str(inv_stat[1]) +" , ") inv_info += ( "\"runtime\":" + plot_utils.round_decimal_to_str(inv_stat[2]) +" ") inv_info +=( "},\n") return inv_info def get_formatted_job_instances_over_time_metadata(self , date_time_filter): """ Returns formatted job instances over time metadata. @param date_time_filter date time filter """ job_instance_over_time_list = self.wf_job_instances_over_time_statistics[date_time_filter] max_count, max_runtime = self.get_max_count_run_time(True, date_time_filter) job_info = '' job_info +=("{") job_info +=( "\n") job_info += ( "\"num\":" + "\"" + str(len(job_instance_over_time_list))+ "\" , ") job_info += ( "\"max_count\":" + str(max_count) +" , ") job_info += ( "\"max_runtime\":" + plot_utils.round_decimal_to_str(max_runtime) +" ") job_info +=( "},\n") return job_info def get_formatted_invocations_over_time_metadata(self , date_time_filter): """ Returns formatted invocations over time metadata. @param date_time_filter date time filter """ invs_over_time_list = self.wf_invocations_over_time_statistics[date_time_filter] max_count, max_runtime = self.get_max_count_run_time(False, date_time_filter) inv_info = '' inv_info +=("{") inv_info +=( "\n") inv_info += ( "\"num\":" + "\"" + str(len(invs_over_time_list))+ "\" , ") inv_info += ( "\"max_count\":" + str(max_count) +" , ") inv_info += ( "\"max_runtime\":" + plot_utils.round_decimal_to_str(max_runtime) +" ") inv_info +=( "},\n") return inv_info def get_max_count_run_time(self, isJobInstance, date_time_filter): """ Returns the maximum count and runtime when filter by given filter. @parm isJobInstance true if it is calculated for job instances, false otherwise @param date_time_filter date time filter """ if isJobInstance: content_list = self.wf_job_instances_over_time_statistics[date_time_filter] else: content_list = self.wf_invocations_over_time_statistics[date_time_filter] if len(content_list) < 1: return None, None max_run_time = 0.0 max_count =0 for content in content_list: max_count = max(content[1],max_count ) max_run_time = max(content[2],max_run_time ) return max_count , max_run_time #---job information --- class JobInfo: def __init__(self): self.name = None self.instance_id = None self.retry_count = None self.site = None self.jobStart = None # This is timestamp of the first event in the jobstate.log this could be PRE_SCRIPT_STARTED self.jobDuration = None # This is duration till POST SCRIPT TERMINATED event or the last state of the job's run self.jobExecStart = None # This is timestamp of SUBMIT event self.jobExecDuration = None # This is duration till JOB TERMINATED event or the last state of the job's run self.kickstartStart = None self.kickstartDuration = None self.postStart = None self.postDuration = None self.preStart = None self.preDuration = None self.condorStart = None self.condorDuration = None self.gridStart = None self.gridDuration = None self.executeStart = None self.executeDuration = None self.transformation = None self.state = None self.host_name = None @property def is_success(self): """ Returns whether the job instance was successful or not """ if self.state =='SUCCESS': return True return False @property def is_failure(self): """ Returns whether the job instance was successful or not """ if self.state =='FAILED': return True return False def getJobDetails(self , global_start_time): """ Returns the job instance details @global_start_time the workflow start event. """ job_details ={} if self.retry_count > 0: job_details['name'] = self.name +"_retry_"+ str(self.retry_count) else: job_details['name'] = self.name job_details['site'] = self.site job_details['instance_id']= self.instance_id if self.jobStart is not None and self.jobDuration is not None: job_details['jobS'] = str(convert_to_base_time(self.jobStart , global_start_time)) job_details['jobD'] = str(self.jobDuration) else: job_details['jobS'] = "''" job_details['jobD'] ="''" if self.jobExecStart is not None and self.jobExecDuration is not None: job_details['jobExecS'] = str(convert_to_base_time(self.jobExecStart , global_start_time)) job_details['jobExecD'] = str(self.jobExecDuration) else: job_details['jobExecS'] = "''" job_details['jobExecD'] ="''" if self.preStart is not None and self.preDuration is not None: job_details['preS'] = str(convert_to_base_time(self.preStart , global_start_time)) job_details['preD'] = str(self.preDuration) else: job_details['preS'] = "''" job_details['preD'] ="''" if self.condorStart is not None and self.condorDuration is not None: job_details['cS'] = str(convert_to_base_time(self.condorStart , global_start_time)) job_details['cD'] = str(self.condorDuration) else: job_details['cS'] = "''" job_details['cD'] ="''" if self.gridStart is not None and self.gridDuration is not None: job_details['gS'] = str(convert_to_base_time(self.gridStart , global_start_time)) job_details['gD'] = str(self.gridDuration) else: job_details['gS'] = "''" job_details['gD'] ="''" if self.executeStart is not None and self.executeDuration is not None: job_details['eS'] = str(convert_to_base_time(self.executeStart , global_start_time)) job_details['eD'] = str(self.executeDuration) else: job_details['eS'] = "''" job_details['eD'] ="''" if self.kickstartStart is not None and self.kickstartDuration is not None: job_details['kS'] = str(convert_to_base_time(self.kickstartStart , global_start_time)) job_details['kD'] = str(self.kickstartDuration) else: job_details['kS'] = "''" job_details['kD'] ="''" if self.postStart is not None and self.postDuration is not None: job_details['postS'] = str(convert_to_base_time(self.postStart , global_start_time)) job_details['postD'] = str(self.postDuration) else: job_details['postS'] = "''" job_details['postD'] ="''" if self.transformation is not None: job_details['transformation'] = self.transformation else: job_details['transformation'] = "" if self.is_failure: job_details['state'] = "0" elif self.is_success: job_details['state'] = "1" elif self.state is None: job_details['state'] = "2" else: job_details['state'] = "3" return job_details class TransformationInfo: def __init__(self): self.name = None self.count = None self.succeeded_count = None self.failed_count = None self.total_runtime = None self.min = None self.max = None self.avg = None def getTransformationDetails(self ): """ Returns the transformation details """ trans_details ={} trans_details['name'] = self.name trans_details['count'] = str(self.count) trans_details['success'] = str(self.succeeded_count) trans_details['failure'] = str(self.failed_count) trans_details['min'] = str(self.min) trans_details['max'] = str(self.max) trans_details['avg'] = str(self.avg) trans_details['total'] = str(self.total_runtime) return trans_details #-----date conversion---------- def convert_to_base_time(end_time , start_time): """ Converts the time to base time @param: end_time end time @param ; start_time start time """ return int(end_time)- int(start_time) pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/plots/populate.py0000755000175000017500000004164411757531137027140 0ustar ryngerynge#!/usr/bin/env python import os import re import sys import logging import optparse import math import tempfile import commands # Initialize logging object logger = logging.getLogger() # Set default level to INFO logger.setLevel(logging.INFO) from Pegasus.tools import utils from Pegasus.tools import db_utils from Pegasus.plots_stats import utils as plot_utils from workflow_info import WorkflowInfo, JobInfo , TransformationInfo import pegasus_gantt import pegasus_host_over_time import traceback from netlogger.analysis.workflow.stampede_statistics import StampedeStatistics from netlogger.analysis.schema.schema_check import SchemaVersionError from datetime import timedelta from datetime import datetime #regular expressions re_parse_property = re.compile(r'([^:= \t]+)\s*[:=]?\s*(.*)') #Global variables---- prog_base = os.path.split(sys.argv[0])[1] # Name of this program brainbase ='braindump.txt' predefined_colors = ["#8dd3c7" , "#4daf4a","#bebada" , "#80b1d3" , "#b3de69" , "#fccde5" , "#d9d9d9" , "#bc80bd" , "#ccebc5" , "#fb8072"] exclude_transformations =['dagman::post' , 'dagman::pre'] global_transformtion_color_map ={} global_base_submit_dir = None global_braindb_submit_dir =None global_db_url = None global_top_wf_uuid =None global_wf_id_uuid_map = {} color_count =0 def populate_individual_job_instance_details(job_states , job_stat , isFailed , retry_count): """ Returns the job statistics information Param: the job reference """ state_timestamp_dict ={} kickstartDur= 0 resource_delay = 0 condor_delay = 0 runtime = 0 condorTime = 0 job_duration = 0 # assigning job name job_stat.name = job_states.job_name # assigning site name job_stat.site = job_states.site # assigning instance id job_stat.instance_id = job_states.job_instance_id # Assumption host name is unique job_stat.host_name = job_states.host_name # Setting the first job state as the job start job_stat.jobStart = job_states.jobS # Setting the last job state as the job end job_stat.jobDuration = job_states.jobDuration # Setting SUBMIT as the execution job start job_stat.jobExecStart = job_states.condor_start # Setting the JOB_TERMINATED as the end event for the host over time chart if it is present , otherwise that last event. if job_states.condor_duration is not None: job_duration = job_states.condor_duration else: job_duration = job_states.jobDuration job_stat.jobExecDuration = job_duration #kickstart time # Assigning start and duration of kickstart , pre and post job_stat.kickstartStart = job_states.kickstart_start job_stat.kickstartDuration = job_states.kickstart_duration #transformations associated with job job_stat.transformation = job_states.transformation # pre script time job_stat.preStart = job_states.pre_start job_stat.preDuration = job_states.pre_duration # post script time job_stat.postStart = job_states.post_start job_stat.postDuration = job_states.post_duration # GRID/GLOBUS SUBMIT start and duration job_stat.gridStart = job_states.grid_start job_stat.gridDuration = job_states.grid_duration #runtime job_stat.executeStart = job_states.exec_start job_stat.executeDuration = job_states.exec_duration #condor start to end job_stat.condorStart = job_states.condor_start job_stat.condorDuration = job_states.condor_duration #Assigning job state if isFailed: job_stat.state ='FAILED' else: job_stat.state= "Unknown" job_stat.retry_count = retry_count return def rlb(file_path): """ This function converts the path relative to base path Returns : path relative to the base """ file_path = plot_utils.rlb(file_path, global_braindb_submit_dir,global_base_submit_dir) return file_path #------------Gets sub worklows job names---------------- def get_job_inst_sub_workflow_map(workflow ): """ Returns the mapping of sub workflow jobs to the corresponding wf_uuid @workflow StampedeStatistics object reference. """ job_inst_wf_uuid_map ={} jb_inst_sub_wf_list = workflow.get_job_instance_sub_wf_map() for jb_inst_sub_wf in jb_inst_sub_wf_list: if jb_inst_sub_wf.subwf_id is not None: job_inst_wf_uuid_map[jb_inst_sub_wf.job_instance_id] = global_wf_id_uuid_map[jb_inst_sub_wf.subwf_id] return job_inst_wf_uuid_map #-------return workflow uuid by parsing db alone----- def get_workflows_uuid(): """ Returns the workflow uuid of a given workflow , this includes the id of all sub workflows. """ # expand = True try: expanded_workflow_stats = StampedeStatistics(global_db_url) expanded_workflow_stats.initialize(global_top_wf_uuid) expanded_workflow_stats.set_job_filter('all') except SchemaVersionError: logger.error("------------------------------------------------------") logger.error("Database schema mismatch! Please run the upgrade tool") logger.error("to upgrade the database to the latest schema version.") sys.exit(1) except: logger.error("Failed to load the database." + global_db_url ) sys.exit(1) #expand = False try: root_workflow_stats = StampedeStatistics(global_db_url , False) root_workflow_stats.initialize(global_top_wf_uuid) root_workflow_stats.set_job_filter('all') except SchemaVersionError: logger.error("------------------------------------------------------") logger.error("Database schema mismatch! Please run the upgrade tool") logger.error("to upgrade the database to the latest schema version.") sys.exit(1) except: logger.error("Failed to load the database." + global_db_url ) sys.exit(1) wf_det = root_workflow_stats.get_workflow_details()[0] # print workflow statistics global global_wf_id_uuid_map global_wf_id_uuid_map[wf_det.wf_id] = global_top_wf_uuid wf_uuid_list = [global_top_wf_uuid] desc_wf_uuid_list = expanded_workflow_stats.get_descendant_workflow_ids() for wf_det in desc_wf_uuid_list: global_wf_id_uuid_map[wf_det.wf_id] = wf_det.wf_uuid wf_uuid_list.append(wf_det.wf_uuid) return wf_uuid_list # ---------populate_workflow_details-------------------------- def populate_workflow_details(workflow_stats): """ populates the workflow statistics information @param workflow_stats the StampedeStatistics object reference """ workflow_info = WorkflowInfo() # Getting the workflow details wf_det = workflow_stats.get_workflow_details()[0] # Populating workflow details workflow_info.wf_uuid = wf_det.wf_uuid if global_wf_id_uuid_map.has_key(wf_det.parent_wf_id): workflow_info.parent_wf_uuid =global_wf_id_uuid_map[wf_det.parent_wf_id] workflow_info.submit_dir = wf_det.submit_dir workflow_info.dax_label = wf_det.dax_label workflow_info.wf_env = plot_utils.parse_workflow_environment(wf_det) return workflow_info def populate_job_details(workflow_stats , workflow_info): """ populates the job details of the workflow @param workflow_stats the StampedeStatistics object reference @param workflow_info the WorkflowInfo object reference """ total_jobs =0 total_jobs = workflow_stats.get_total_jobs_status() workflow_info.total_jobs = total_jobs def populate_task_details(workflow_stats, workflow_info): """ populates the task details of the workflow @param workflow_stats the StampedeStatistics object reference @param workflow_info the WorkflowInfo object reference """ total_tasks = 0 total_tasks = workflow_stats.get_total_tasks_status() workflow_info.total_tasks = total_tasks def populate_job_instance_details(workflow_stats , workflow_info): """ populates the job instances details of the workflow @param workflow_stats the StampedeStatistics object reference @param workflow_info the WorkflowInfo object reference """ workflow_run_time = None total_job_instances = 0 transformation_stats_dict ={} job_stats_list =[] host_job_mapping ={} job_name_retry_count_dict ={} wf_transformation_color_map ={} global color_count start_event = sys.maxint end_event = -sys.maxint -1 worklow_states_list = workflow_stats.get_workflow_states() if len(worklow_states_list) > 0: if worklow_states_list[0].state != "WORKFLOW_STARTED": logger.warning("Mismatch in the order of the events. Taking the first state in the database as the start event ") # Storing the start and end event from the workflow states start_event = worklow_states_list[0].timestamp end_event = worklow_states_list[len(worklow_states_list)-1].timestamp else: logger.warning("Workflow states are missing for workflow " + workflow_info.wf_uuid) failed_job_list = workflow_stats.get_failed_job_instances() job_states_list = workflow_stats.get_job_states() for job_states in job_states_list: # Additional check for the case where "WORKFLOW_STARTED" event is missing if job_states.jobS is not None: start_event = min(int(start_event) , int(job_states.jobS) ) if job_states.jobDuration is not None: end_event = max(int(end_event) , int(job_states.jobS + job_states.jobDuration)) job_stat = JobInfo() job_stats_list.append(job_stat) is_job_failed = False restart_count =0 if job_name_retry_count_dict.has_key(job_states.job_name): restart_count = job_name_retry_count_dict[job_states.job_name] restart_count +=1 job_name_retry_count_dict[job_states.job_name] = restart_count if job_states.job_instance_id in failed_job_list: is_job_failed = True populate_individual_job_instance_details(job_states ,job_stat , is_job_failed , job_name_retry_count_dict[job_states.job_name]) # Assigning host to job mapping if host_job_mapping.has_key(job_stat.host_name): job_list =host_job_mapping[job_stat.host_name] job_list.append(job_stat) else: job_list = [] job_list.append(job_stat) host_job_mapping[job_stat.host_name] = job_list # Assigning the tranformation name if job_stat.transformation is not None: transformation_stats_dict[job_stat.transformation] = None if not global_transformtion_color_map.has_key(job_stat.transformation): global_transformtion_color_map[job_stat.transformation]= predefined_colors[color_count%len(predefined_colors)] color_count +=1 # Assigning the mapping to the workflow map wf_transformation_color_map[job_stat.transformation] =global_transformtion_color_map[job_stat.transformation] if (start_event != sys.maxint) and (end_event != (-sys.maxint -1)): workflow_info.workflow_run_time = end_event - start_event else: logger.error("Unable to find the start and event event for the workflow " + workflow_info.wf_uuid) total_job_instances = len(job_stats_list) workflow_info.dagman_start_time = start_event workflow_info.job_statistics_list =job_stats_list workflow_info.host_job_map = host_job_mapping workflow_info.transformation_statistics_dict = transformation_stats_dict workflow_info.transformation_color_map = wf_transformation_color_map workflow_info.total_job_instances = total_job_instances return workflow_info def populate_transformation_details(workflow_stats , workflow_info): """ populates the transformation details of the workflow @param workflow_stats the StampedeStatistics object reference @param workflow_info the WorkflowInfo object reference """ transformation_stats_dict ={} wf_transformation_color_map ={} global color_count transformation_stats_list= workflow_stats.get_transformation_statistics() for trans_stats in transformation_stats_list: if trans_stats.transformation.strip() in exclude_transformations: continue trans_info = TransformationInfo() trans_info.name = trans_stats.transformation trans_info.count = trans_stats.count trans_info.succeeded_count = trans_stats.success trans_info.failed_count = trans_stats.failure trans_info.min = trans_stats.min trans_info.max = trans_stats.max trans_info.avg = trans_stats.avg trans_info.total_runtime = trans_stats.sum transformation_stats_dict[trans_stats.transformation] = trans_info if not global_transformtion_color_map.has_key(trans_stats.transformation): global_transformtion_color_map[trans_stats.transformation]= predefined_colors[color_count%len(predefined_colors)] color_count +=1 # Assigning the mapping to the workflow map wf_transformation_color_map[trans_stats.transformation] =global_transformtion_color_map[trans_stats.transformation] workflow_info.transformation_statistics_dict = transformation_stats_dict workflow_info.transformation_color_map = wf_transformation_color_map def setup_logger(level_str): """ Set up the logger for the module. @param level_str logging level """ level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) if level_str == "info": logger.setLevel(logging.INFO) return def get_wf_stats(wf_uuid,expand = False): workflow_stampede_stats = None try: workflow_stampede_stats = StampedeStatistics(global_db_url , expand) workflow_stampede_stats.initialize(wf_uuid) except SchemaVersionError: logger.error("------------------------------------------------------") logger.error("Database schema mismatch! Please run the upgrade tool") logger.error("to upgrade the database to the latest schema version.") sys.exit(1) except: logger.error("Failed to load the database." + global_db_url ) logger.warning(traceback.format_exc()) sys.exit(1) return workflow_stampede_stats def populate_chart(wf_uuid , expand = False): """ Populates the workflow info object corresponding to the wf_uuid @param wf_uuid the workflow uuid @param expand expand workflow or not. """ workflow_stampede_stats = get_wf_stats(wf_uuid , expand) workflow_info = populate_workflow_details(workflow_stampede_stats) sub_wf_uuids = workflow_stampede_stats.get_sub_workflow_ids() workflow_info.sub_wf_id_uuids = sub_wf_uuids if len(sub_wf_uuids) > 0: workflow_info.job_instance_id_sub_wf_uuid_map = get_job_inst_sub_workflow_map(workflow_stampede_stats ) config = utils.slurp_braindb(rlb(workflow_info.submit_dir)) if (config.has_key('dag')): dag_file_name = config['dag'] workflow_info.dag_label = dag_file_name[:dag_file_name.find(".dag")] workflow_info.dag_file_path = os.path.join(rlb(workflow_info.submit_dir), dag_file_name) if (config.has_key('dax')): workflow_info.dax_file_path = config['dax'] return workflow_stampede_stats, workflow_info def populate_time_details(workflow_stats, wf_info ): """ Populates the job instances and invocation time and runtime statistics sorted by time. @param workflow_stats the StampedeStatistics object reference @param workflow_info the WorkflowInfo object reference """ workflow_stats.set_job_filter('nonsub') # day is calculated from hour. workflow_stats.set_time_filter('hour') job_stats_by_time = workflow_stats.get_jobs_run_by_time() workflow_stats.set_transformation_filter(exclude=['condor::dagman']) inv_stats_by_time = workflow_stats.get_invocation_by_time() populate_job_invocation_time_details(wf_info,job_stats_by_time,inv_stats_by_time ,'hour') populate_job_invocation_time_details(wf_info, job_stats_by_time,inv_stats_by_time ,'day') def populate_job_invocation_time_details(wf_info, job_stats, invocation_stats ,date_time_filter): """ Populates the job instances and invocation time and runtime statistics sorted by time. @param workflow_info the WorkflowInfo object reference @param job_stats the job statistics by time tuple @param invocation_stats the invocation statisctics by time tuple @param date_time_filter date time filter """ formatted_stats_list = plot_utils.convert_stats_to_base_time(job_stats , date_time_filter) jobs_time_list =[] for stats in formatted_stats_list: content = [stats['date_format'] , stats['count'],stats['runtime']] jobs_time_list.append(content) wf_info.wf_job_instances_over_time_statistics[date_time_filter] = jobs_time_list formatted_stats_list = plot_utils.convert_stats_to_base_time(invocation_stats , date_time_filter) invoc_time_list = [] for stats in formatted_stats_list: content = [stats['date_format'] , stats['count'],stats['runtime']] invoc_time_list.append(content) wf_info.wf_invocations_over_time_statistics[date_time_filter] = invoc_time_list def setup(submit_dir , config_properties): """ Setup the populate module @submit_dir submit directory path of the workflow run @config_properties path to the propery file """ # global reference global global_base_submit_dir global global_braindb_submit_dir global global_db_url global global_top_wf_uuid global_base_submit_dir = submit_dir #Getting values from braindump file config = utils.slurp_braindb(submit_dir) if (config.has_key('submit_dir') or config.has_key('run')): if config.has_key('submit_dir'): global_braindb_submit_dir = os.path.abspath(config['submit_dir']) else: global_braindb_submit_dir = os.path.abspath(config['run']) else: logger.error("Submit directory cannot be found in the braindump.txt . ") sys.exit(1) # Create the sqllite db url global_db_url, global_top_wf_uuid = db_utils.get_db_url_wf_uuid(submit_dir, config_properties) if global_db_url is None: sys.exit(1) pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/plots/pegasus_time.py0000755000175000017500000005414111757531137027770 0ustar ryngerynge#!/usr/bin/env python """ Pegasus utility for generating workflow execution gantt chart Usage: pegasus-gantt [options] submit directory """ ## # Copyright 2010-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision$ import os import sys import logging import optparse # Initialize logging object logger = logging.getLogger() # Set default level to INFO logger.setLevel(logging.INFO) from Pegasus.plots_stats import utils as plot_utils from Pegasus.tools import utils #Global variables---- prog_base = os.path.split(sys.argv[0])[1] # Name of this program output_dir = None def setup_logger(level_str): """ Sets the logging level @param level_str: logging level """ level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) if level_str == "info": logger.setLevel(logging.INFO) return #----------print workflow details-------- def print_workflow_details(workflow_stat , output_dir): """ Prints the data required for generating the time chart into data file. @param workflow_stat the WorkflowInfo object reference @param output_dir output directory path """ job_info_day = "var data_job_per_day = [" + workflow_stat.get_formatted_job_instances_over_time_data('day') + "];\n" invocation_info_day = "var data_invoc_per_day = [" + workflow_stat.get_formatted_invocations_over_time_data('day') + "];\n" job_info_hour = "var data_job_per_hour = [" + workflow_stat.get_formatted_job_instances_over_time_data('hour') + "];\n" invocation_info_hour = "var data_invoc_per_hour = [" + workflow_stat.get_formatted_invocations_over_time_data('hour') + "];\n" job_info_day_metadata = "var job_metadata_per_day = [" + workflow_stat.get_formatted_job_instances_over_time_metadata('day') + "];\n" invocation_info_day_metadata = "var invoc_metadata_per_day = [" + workflow_stat.get_formatted_invocations_over_time_metadata('day') + "];\n" job_info_hour_metadata = "var job_metadata_per_hour = [" + workflow_stat.get_formatted_job_instances_over_time_metadata('hour') + "];\n" invocation_info_hour_metadata = "var invoc_metadata_per_hour = [" + workflow_stat.get_formatted_invocations_over_time_metadata('hour') + "];\n" # print javascript file data_file = os.path.join(output_dir, "tc_data.js") try: fh = open(data_file, "w") fh.write( "\n") fh.write(job_info_day) fh.write(invocation_info_day) fh.write(job_info_hour) fh.write(invocation_info_hour) fh.write(job_info_day_metadata) fh.write(invocation_info_day_metadata) fh.write(job_info_hour_metadata) fh.write(invocation_info_hour_metadata) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def create_action_script(output_dir): """ Generates the action script file which contains the javascript functions used by the main html file. @param output_dir output directory path """ action_content = """ function tc_fadeRight(){ if(tc_curX == 0){ return "images/right-fade.png" } return "images/right.png" } function tc_fadeDown(){ if(tc_curRuntimeY == 0){ return "images/down-fade.png" } return "images/down.png" } function tc_panLeft(){ var tc_panBy = (tc_curEndX -tc_curX)/tc_panXFactor; tc_curX +=tc_panBy; tc_curEndX +=tc_panBy; tc_xScale.domain(tc_curX ,tc_curEndX ); tc_rootPanel.render(); tc_headerPanel.render(); } function tc_panRight(){ var tc_panBy = (tc_curEndX -tc_curX)/tc_panXFactor; if(tc_curX > 0){ tc_curX -=tc_panBy; tc_curEndX -=tc_panBy; if(tc_curX <= 0){ tc_curEndX += (tc_curX + tc_panBy) tc_curX = 0; } tc_xScale.domain(tc_curX ,tc_curEndX ); tc_rootPanel.render(); tc_headerPanel.render(); } } function tc_panUp(){ var tc_panRuntimeBy = (tc_curRuntimeEndY -tc_curRuntimeY)/tc_panYFactor; var tc_panCountBy = (tc_curCountEndY -tc_curCountY)/tc_panYFactor; tc_curRuntimeY +=tc_panRuntimeBy; tc_curRuntimeEndY += tc_panRuntimeBy; tc_curCountY+=tc_panCountBy; tc_curCountEndY += tc_panCountBy; tc_yRuntimeScale.domain(tc_curRuntimeY ,tc_curRuntimeEndY); tc_yCountScale.domain(tc_curCountY, tc_curCountEndY); tc_rootPanel.render(); tc_headerPanel.render(); } function tc_panDown(){ var tc_panRuntimeBy = (tc_curRuntimeEndY -tc_curRuntimeY)/tc_panYFactor; var tc_panCountBy = (tc_curCountEndY -tc_curCountY)/tc_panYFactor; if(tc_curRuntimeY > 0){ tc_curRuntimeY -= tc_panRuntimeBy; tc_curRuntimeEndY -= tc_panRuntimeBy; if(tc_curRuntimeY< 0){ tc_curRuntimeEndY += (tc_curRuntimeY + panRuntimeBy); tc_curRuntimeY = 0; } tc_yRuntimeScale.domain(tc_curRuntimeY ,tc_curRuntimeEndY ); tc_curCountY -= tc_panCountBy; tc_curCountEndY -= tc_panCountBy; if(tc_curCountY <0){ tc_curCountEndY += (tc_curCountY+tc_panCountBy); tc_curCountY = 0; } tc_yCountScale.domain(tc_curCountY, tc_curCountEndY); tc_rootPanel.render(); tc_headerPanel.render(); } } function tc_zoomOut(){ var tc_newX = 0; var tc_newRuntimeY = 0; var tc_newCountY = 0; tc_newX = tc_curEndX + tc_curEndX*0.1; tc_newRuntimeY = tc_curRuntimeEndY + tc_curRuntimeEndY*0.1; tc_newCountY = tc_curCountEndY+tc_curCountEndY*0.1; if(tc_curX < tc_newX && isFinite(tc_newX)){ tc_curEndX = tc_newX; tc_xScale.domain(tc_curX, tc_curEndX); } if(tc_curRuntimeY < tc_newRuntimeY && isFinite(tc_newRuntimeY)){ tc_curRuntimeEndY = tc_newRuntimeY; tc_yRuntimeScale.domain(tc_curRuntimeY, tc_curRuntimeEndY); } if(tc_curCountY < tc_newCountY && isFinite(tc_newCountY)){ tc_curCountEndY = tc_newCountY; tc_yCountScale.domain(tc_curCountY, tc_curCountEndY); } tc_rootPanel.render(); } function tc_zoomIn(){ var tc_newX = 0; var tc_newRuntimeY = 0; var tc_newCountY =0; tc_newX = tc_curEndX - tc_curEndX*0.1; tc_newRuntimeY = tc_curRuntimeEndY - tc_curRuntimeEndY*0.1; tc_newCountY = tc_curCountEndY - tc_curCountEndY*0.1; if(tc_curX < tc_newX && isFinite(tc_newX)){ tc_curEndX = tc_newX; tc_xScale.domain(tc_curX, tc_curEndX); } if(tc_curRuntimeY < tc_newRuntimeY && isFinite(tc_newRuntimeY)){ tc_curRuntimeEndY =tc_newRuntimeY; tc_yRuntimeScale.domain(tc_curRuntimeY, tc_curRuntimeEndY); } if(tc_curCountY < tc_newCountY && isFinite(tc_newCountY)){ tc_curCountEndY = tc_newCountY; tc_yCountScale.domain(tc_curCountY, tc_curCountEndY); } tc_rootPanel.render(); } function tc_resetZooming(){ tc_curX = 0; tc_curEndX = tc_dateTimeCount*tc_bar_spacing; tc_curRuntimeY = 0; tc_curRuntimeEndY = tc_maxRuntime; tc_curCountY = 0; tc_curCountEndY = tc_maxCount; tc_xScale.domain(tc_curX, tc_curEndX); tc_yCountScale.domain(tc_curCountY,tc_curCountEndY); tc_yRuntimeScale.domain(tc_curRuntimeY, tc_curRuntimeEndY); tc_rootPanel.render(); tc_headerPanel.render(); } function tc_setType(isJobSet){ tc_isJob= isJobSet; tc_loadGraph(); } function tc_setTime(isHourSet){ tc_isHour = isHourSet; tc_loadGraph(); } function tc_setChartTitle(){ if(tc_isJob){ if(tc_isHour){ return "Job count/runtime grouped by hour"; }else{ return "Job count/runtime grouped by day"; } }else{ if(tc_isHour){ return "Invocation count/runtime grouped by hour"; }else{ return "Invocation count/runtime grouped by day"; } } } function tc_getMetaData(){ if(tc_isJob){ if(tc_isHour){ return job_metadata_per_hour; }else{ return job_metadata_per_day; } }else{ if(tc_isHour){ return invoc_metadata_per_hour; }else{ return invoc_metadata_per_day; } } } function tc_getContentData(){ if(tc_isJob){ if(tc_isHour){ return data_job_per_hour; }else{ return data_job_per_day; } }else{ if(tc_isHour){ return data_invoc_per_hour; }else{ return data_invoc_per_day; } } } function tc_loadGraph(){ tc_metadata = tc_getMetaData(); tc_dateTimeCount =tc_metadata[0].num; tc_maxCount = tc_metadata[0].max_count; tc_maxRuntime =tc_metadata[0].max_runtime; tc_maxRuntime += tc_maxRuntime/10; tc_maxCount += tc_maxCount/10; tc_resetZooming(); } function tc_getData(){ return tc_getContentData(); } """ # print action script data_file = os.path.join(output_dir, "tc_action.js") try: fh = open(data_file, "w") fh.write( action_content) fh.write( "\n") except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() def create_header(workflow_stat): """ Generates the header html content. @param workflow_stat the WorkflowInfo object reference """ header_str = """ """+ workflow_stat.wf_uuid +""" """ header_str += plot_utils.create_home_button() return header_str def create_include(workflow_stat): """ Generates the html script include content. @param workflow_stat the WorkflowInfo object reference """ include_str = """ """ return include_str def create_variable(workflow_stat): """ Generates the javascript variables used to generate the chart. @param workflow_stat the WorkflowInfo object reference """ number_of_units = len(workflow_stat.wf_job_instances_over_time_statistics['hour']) max_count, max_runtime = workflow_stat.get_max_count_run_time(True, 'hour') # Adding variables var_str = """ """ return var_str def create_toolbar_panel(workflow_stat): """ Generates the top level toolbar content. @param workflow_stat the WorkflowInfo object reference """ panel_str = """ """ return panel_str def create_chart_panel(workflow_stat): """ Generates the chart panel content. @param workflow_stat the WorkflowInfo object reference """ panel_str =""" """ return panel_str def create_legend_panel(workflow_stat): """ Generates the legend panel content. @param workflow_stat the WorkflowInfo object reference """ panel_str =""" """ return panel_str def create_bottom_toolbar(): """ Generates the bottom toolbar html content. @param workflow_stat the WorkflowInfo object reference """ toolbar_content =""" """ return toolbar_content def create_time_plot(workflow_info , output_dir): """ Generates the html page content for displaying the time chart. @param workflow_stat the WorkflowInfo object reference @output_dir the output directory path """ print_workflow_details(workflow_info ,output_dir) str_list = [] wf_content = create_include(workflow_info) str_list.append(wf_content) # Adding variables wf_content =create_variable(workflow_info) str_list.append(wf_content) wf_content = "
\n" str_list.append(wf_content) # adding the tool bar panel wf_content =create_toolbar_panel(workflow_info) str_list.append(wf_content) # Adding the chart panel wf_content =create_chart_panel(workflow_info) str_list.append(wf_content) # Adding the legend panel wf_content =create_legend_panel(workflow_info) str_list.append(wf_content) wf_content = "
\n" str_list.append(wf_content) wf_content =create_bottom_toolbar() str_list.append(wf_content) return "".join(str_list) def create_time_plot_page(workflow_info ,output_dir): """ Prints the complete html page with the time chart and workflow details. @param workflow_stat the WorkflowInfo object reference @output_dir the output directory path """ str_list = [] wf_page = create_header(workflow_info) str_list.append(wf_page) wf_page = create_time_plot(workflow_info ,output_dir) str_list.append(wf_page) # printing the brain dump content if workflow_info.submit_dir is None: logger.warning("Unable to display brain dump contents. Invalid submit directory for workflow " + workflow_info.wf_uuid) else: wf_page = plot_utils.print_property_table(workflow_info.wf_env,False ," : ") str_list.append(wf_page) wf_page = "\n
\n
\n" str_list.append(wf_page) data_file = os.path.join(output_dir, workflow_info.wf_uuid+".html") try: fh = open(data_file, "w") fh.write( "\n") fh.write("".join(str_list)) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def setup(submit_dir,out_dir, env, log_level): """ Setup the pegasus time module @param submit_dir submit directory path @out_dir the output directory path @env the environment variables @log_level logging level """ # global reference global output_dir output_dir = out_dir if log_level == None: log_level = "info" setup_logger(log_level) utils.create_directory(output_dir) src_js_path = env['pegasus_javascript_dir'] src_img_path = os.path.join(env['pegasus_share_dir'] , "plots/images/protovis/") dest_js_path = os.path.join(output_dir, "js") dest_img_path = os.path.join(output_dir, "images/") utils.create_directory(dest_js_path) utils.create_directory(dest_img_path) plot_utils.copy_files(src_js_path , dest_js_path) plot_utils.copy_files(src_img_path, dest_img_path) # copy images from common src_img_path = os.path.join(env['pegasus_share_dir'] , "plots/images/common/") plot_utils.copy_files(src_img_path, dest_img_path) create_action_script(output_dir) def generate_chart(workflow_info): """ Generates the time chart and all it's required files @workflow_info WorkflowInfo object reference """ create_time_plot_page(workflow_info , output_dir) # ---------main---------------------------------------------------------------------------- def main(): sys.exit(0) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/plots/pegasus_gantt.py0000755000175000017500000007123311757531137030150 0ustar ryngerynge#!/usr/bin/env python """ Pegasus utility for generating workflow execution gantt chart Usage: pegasus-gantt [options] submit directory """ ## # Copyright 2010-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision$ import os import sys import logging # Initialize logging object logger = logging.getLogger() # Set default level to INFO logger.setLevel(logging.INFO) from Pegasus.tools import utils from Pegasus.plots_stats import utils as plot_utils import populate from datetime import timedelta from datetime import datetime #Global variables---- prog_base = os.path.split(sys.argv[0])[1] # Name of this program output_dir = None def setup_logger(level_str): """ Sets the logging level @param level_str: logging level """ level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) if level_str == "info": logger.setLevel(logging.INFO) return #----------print workflow details-------- def print_workflow_details(workflow_stat , output_dir , extn): """ Prints the data required for generating the gantt chart into data file. @param workflow_stat the WorkflowInfo object reference @param output_dir output directory path """ job_info = "var data = [" + workflow_stat.get_formatted_job_data(extn) + "];" # print javascript file data_file = os.path.join(output_dir, "gc_" + workflow_stat.wf_uuid+"_data.js") try: fh = open(data_file, "w") fh.write( "\n") fh.write(job_info) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def create_action_script(output_dir): """ Generates the action script file which contains the javascript functions used by the main html file. @param output_dir output directory path """ action_content = "\n\ function barvisibility(d , index){\n\ if(!d){\n\ return false;\n\ }\n\ var yPos = index * bar_spacing;\n\ if(yPos < curY || yPos > curEndY ){\n\ return false;\n\ }else{\n\ return true;\n\ }\n\ }\n\n\ function openWF(url){\n\ if(isNewWindow){\n\ window.open(url);\n\ }else{\n\ self.location = url;\n\ }\n\ }\n\n\ function printJobDetails(d){\n\ var job_details = \"Job name :\"+d.name;\n\ if(d.preD !==''){\n\ job_details +=\"\\nPre script duration :\"+d.preD +\" sec.\";\n\ }\n\ if(d.gD !==''){\n\ job_details +=\"\\nResource delay :\"+d.gD +\" sec.\";\n\ }\n\ if(d.eD !==''){\n\ job_details +=\"\\nRuntime as seen by dagman :\"+d.eD +\" sec.\";\n\ }\n\ if(d.kD!==''){\n\ job_details +=\"\\nKickstart duration :\"+d.kD +\" sec.\";\n\ }\n\ if(d.postD!==''){\n\ job_details +=\"\\nPost script duration :\"+d.postD +\" sec.\";\n\ }\n\ job_details +=\"\\nMain task :\"+d.transformation ;\n\ alert(job_details);\n\ }\n\n\ function getJobBorder(d){\n\ if(!d.state){\n\ return 'red';\n\ }\n\ else if(d.sub_wf){\n\ return 'orange';\n\ }\n\ if(d.transformation){\n\ return d.color;\n\ }else{\n\ return 'gray';\n\ }\n\ }\n\n\ function getJobTime(d) {\n\ var jobWidth = 0;\n\ if(d.jobD){\n\ jobWidth = xScale(d.jobS + d.jobD) -xScale(d.jobS);\n\ }\n\ if(jobWidth > 0 && jobWidth < 1 ){\n\ jobWidth = 1;\n\ }\n\ return jobWidth;\n\ }\n\n\ function getPreTime(d) {\n\ var preWidth = 0; \n\ if(d.preD){\n\ preWidth = xScale(d.preS + d.preD) -xScale(d.preS);\n\ }\n\ if(preWidth > 0 && preWidth < 1 ){\n\ preWidth = 1;\n\ }\n\ return preWidth;\n\ }\n\n\ function getCondorTime(d) {\n\ var cDWidth = 0;\n\ if(d.cD){\n\ cDWidth = xScale(d.cS + d.cD) - xScale(d.cS)\n\ }\n\ if(cDWidth > 0 && cDWidth < 1 ){\n\ cDWidth = 1;\n\ }\n\ return cDWidth;\n\ }\n\n\ function getResourceDelay(d) {\n\ var gWidth = 0;\n\ if(d.gS){\n\ gWidth = xScale(d.gS + d.gD) - xScale(d.gS);\n\ }\n\ if(gWidth > 0 && gWidth < 1 ){\n\ gWidth = 1;\n\ }\n\ return gWidth;\n\ }\n\n\ function getRunTime(d) {\n\ var rtWidth = 0;\n\ if(d.eD){\n\ rtWidth = xScale(d.eS + d.eD) -xScale(d.eS);\n\ }\n\ if(rtWidth > 0 && rtWidth < 1 ){\n\ rtWidth = 1;\n\ }\n\ return rtWidth;\n\ }\n\n\ function getKickStartTime(d) {\n\ var kickWidth = 0;\n\ if(d.kD){\n\ kickWidth = xScale(d.kS + d.kD) -xScale(d.kS);\n\ }\n\ if(kickWidth > 0 && kickWidth < 1 ){\n\ kickWidth = 1;\n\ }\n\ return kickWidth;\n\ }\n\n\ function getPostTime(d) {\n\ var postWidth = 0;\n\ if(d.postD){\n\ postWidth = xScale(d.postS + d.postD) -xScale(d.postS);\n\ }\n\ if(postWidth > 0 && postWidth < 1 ){\n\ postWidth = 1;\n\ }\n\ return postWidth;\n\ }\n\n\ function showState(){\n\ if(condorTime || kickstart || condorRuntime || resourceDelay || preScript || postScript){\n\ return true;\n\ }else{\n\ return false;\n\ }\n\ }\n\n\ function setCondorTime(){\n\ if(condorTime){\n\ condorTime = false;\n\ }else{\n\ condorTime = true;\n\ }\n\ rootPanel.render();\n\ }\n\n\ function setKickstart(){\n\ if(kickstart){\n\ kickstart = false;\n\ }else{\n\ kickstart = true;\n\ }\n\ rootPanel.render();\n\ }\n\n\ function setCondorRuntime(){\n\ if(condorRuntime){\n\ condorRuntime = false;\n\ }else{\n\ condorRuntime = true;\n\ }\n\ rootPanel.render();\n\ }\n\n\ function setResourceDelay(){\n\ if(resourceDelay){\n\ resourceDelay = false;\n\ }else{\n\ resourceDelay = true;\n\ }\n\ rootPanel.render();\n\ }\n\n\ function setPreScript(){\n\ if(preScript){\n\ preScript = false;\n\ }else{\n\ preScript = true;\n\ }\n\ rootPanel.render();\n\ }\n\n\ function setPostScript(){\n\ if(postScript){\n\ postScript = false;\n\ }else{\n\ postScript = true;\n\ }\n\ rootPanel.render();\n\ }\n\n\ function setShowLabel(){\n\ if(showName){\n\ return 'Hide job name';\n\ }else{\n\ return 'Show job name';\n\ }\n\ }\n\n\ function setShowName(){\n\ if(showName){\n\ showName = false;\n\ }else{\n\ showName = true;\n\ }\n\ rootPanel.render();\n\ return;\n\ }\n\n\ function fadeRight(){\n\ if(curX == 0){\n\ return \"images/right-fade.png\"\n\ }\n\ return \"images/right.png\"\n\ }\n\n\ function fadeDown(){\n\ if(curY == 0){\n\ return \"images/down-fade.png\"\n\ }\n\ return \"images/down.png\"\n\ }\n\ \n\ function panLeft(){\n\ var panBy = (curEndX -curX)/panXFactor;\n\ curX +=panBy;\n\ curEndX +=panBy;\n\ xScale.domain(curX ,curEndX );\n\ rootPanel.render();\n\ headerPanel.render();\n\ }\n\ \n\ function panRight(){\n\ var panBy = (curEndX -curX)/panXFactor;\n\ if(curX > 0){\n\ curX -=panBy;\n\ curEndX -=panBy;\n\ if(curX <= 0){\n\ curEndX += (curX + panBy)\n\ curX = 0;\n\ }\n\ xScale.domain(curX ,curEndX );\n\ rootPanel.render();\n\ headerPanel.render();\n\ }\n\ }\n\ \n\ function panUp(){\n\ var panBy = (curEndY -curY)/panYFactor;\n\ curY +=panBy;\n\ curEndY += panBy;\n\ yScale.domain(curY ,curEndY);\n\ rootPanel.render();\n\ headerPanel.render();\n\ }\n\ \n\ function panDown(){\n\ var panBy = (curEndY -curY)/panYFactor;\n\ if(curY > 0){\n\ curY -= panBy;\n\ curEndY -= panBy;\n\ if(curY< 0){\n\ curEndY += (curY + panBy);\n\ curY = 0;\n\ }\n\ yScale.domain(curY ,curEndY );\n\ rootPanel.render();\n\ headerPanel.render();\n\ }\n\ }\n\ \n\ function zoomOut(){\n\ var newX = 0;\n\ var newY = 0;\n\ \n\ newX = curEndX + curEndX*0.1;\n\ newY = curEndY + curEndY*0.1;\n\ \n\ if(curX < newX && isFinite(newX)){\n\ curEndX = newX;\n\ xScale.domain(curX, curEndX);\n\ }\n\ if(curY < newY && isFinite(newY)){\n\ curEndY = newY;\n\ yScale.domain(curY, curEndY);\n\ }\n\ rootPanel.render();\n\ }\n\ \n\ function zoomIn(){\n\ var newX = 0;\n\ var newY = 0;\n\ newX = curEndX - curEndX*0.1;\n\ newY = curEndY - curEndY*0.1;\n\ if(curX < newX && isFinite(newX)){\n\ curEndX = newX;\n\ xScale.domain(curX, curEndX);\n\ }\n\ if(curY < newY && isFinite(newY)){\n\ curEndY = newY;\n\ yScale.domain(curY, curEndY);\n\ }\n\ rootPanel.render();\n\ }\n\ \n\ function resetZooming(){\n\ curX = 0;\n\ curY = 0;\n\ curEndX = initMaxX;\n\ curEndY = initMaxY;\n\ xScale.domain(curX, curEndX);\n\ yScale.domain(curY, curEndY);\n\ rootPanel.render();\n\ headerPanel.render();\n\ }\n" # print action script data_file = os.path.join(output_dir, "gc_action.js") try: fh = open(data_file, "w") fh.write( action_content) fh.write( "\n") except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() def create_header(workflow_stat): """ Generates the header html content. @param workflow_stat the WorkflowInfo object reference """ header_str = """ """+ workflow_stat.wf_uuid +"""" """ header_str += plot_utils.create_home_button() return header_str def create_toc(workflow_stat): """ Generates the table of content for the pages @param workflow_stat the WorkflowInfo object reference """ toc_str ="""
Workflow execution gantt chart
""" toc_str += """ Workflow execution gantt chart
Workflow environment
""" if len(workflow_stat.sub_wf_id_uuids) >0: toc_str += """ Sub workflows
""" return toc_str def create_include(workflow_stat): """ Generates the html script include content. @param workflow_stat the WorkflowInfo object reference """ include_str = "\n\ \n\ \n" return include_str def create_variable(workflow_stat): """ Generates the javascript variables used to generate the chart. @param workflow_stat the WorkflowInfo object reference """ number_of_jobs = workflow_stat.total_job_instances # Adding variables var_str = "\n" return var_str def create_toolbar_panel(workflow_stat , extn): """ Generates the top level toolbar content. @param workflow_stat the WorkflowInfo object reference """ panel_str = "\n" return panel_str def create_chart_panel(workflow_stat): """ Generates the chart panel content. @param workflow_stat the WorkflowInfo object reference """ panel_str ="\n" return panel_str def create_legend_panel(workflow_stat): """ Generates the bottom level legend panel content. @param workflow_stat the WorkflowInfo object reference """ panel_str ="\n" return panel_str def create_bottom_toolbar(): """ Generates the bottom toolbar html content. @param workflow_stat the WorkflowInfo object reference """ toolbar_content ="""
""" return toolbar_content def create_gantt_plot(workflow_info , output_dir , extn = "html"): """ Generates the html page content for displaying the gantt chart. @param workflow_stat the WorkflowInfo object reference @output_dir the output directory path """ print_workflow_details(workflow_info ,output_dir , extn) str_list = [] wf_content = create_include(workflow_info) str_list.append(wf_content) # Adding variables wf_content =create_variable(workflow_info) str_list.append(wf_content) wf_content = "
\n" str_list.append(wf_content) # adding the tool bar panel wf_content =create_toolbar_panel(workflow_info , extn ) str_list.append(wf_content) # Adding the chart panel wf_content =create_chart_panel(workflow_info) str_list.append(wf_content) # Adding the legend panel wf_content =create_legend_panel(workflow_info) str_list.append(wf_content) wf_content = "
\n" str_list.append(wf_content) wf_content =create_bottom_toolbar() str_list.append(wf_content) return "".join(str_list) def create_gantt_plot_page(workflow_info ,output_dir, extn = "html"): """ Prints the complete html page with the gantt chart and workflow details. @param workflow_stat the WorkflowInfo object reference @output_dir the output directory path """ str_list = [] wf_page = create_header(workflow_info) str_list.append(wf_page) wf_page = """
""" str_list.append(wf_page) wf_page = create_toc(workflow_info) str_list.append(wf_page) wf_page = """
Workflow execution gantt chart
""" str_list.append(wf_page) wf_page = create_gantt_plot(workflow_info ,output_dir) str_list.append(wf_page) # printing the brain dump content wf_page = """
Workflow environment
""" str_list.append(wf_page) if workflow_info.submit_dir is None: logger.warning("Unable to display brain dump contents. Invalid submit directory for workflow " + workflow_info.wf_uuid) else: wf_page = plot_utils.print_property_table(workflow_info.wf_env,False ," : ") str_list.append(wf_page) # print sub workflow list if len(workflow_info.sub_wf_id_uuids) >0: wf_page = """
Sub workflows
""" str_list.append(wf_page) wf_page = plot_utils.print_sub_wf_links(workflow_info.sub_wf_id_uuids , extn) str_list.append(wf_page) wf_page = """
""" str_list.append(wf_page) wf_page = """
""" str_list.append(wf_page) data_file = os.path.join(output_dir, workflow_info.wf_uuid+"." + extn) try: fh = open(data_file, "w") fh.write( "\n") fh.write("".join(str_list)) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def setup(submit_dir,out_dir, env, log_level): """ Setup the pegasus gantt module @param submit_dir submit directory path @out_dir the output directory path @env the environment variables @log_level logging level """ # global reference global output_dir output_dir = out_dir if log_level == None: log_level = "info" setup_logger(log_level) utils.create_directory(output_dir) src_js_path = env['pegasus_javascript_dir'] src_img_path = os.path.join(env['pegasus_share_dir'] , "plots/images/protovis/") dest_js_path = os.path.join(output_dir, "js") dest_img_path = os.path.join(output_dir, "images/") utils.create_directory(dest_js_path) utils.create_directory(dest_img_path) plot_utils.copy_files(src_js_path , dest_js_path) plot_utils.copy_files(src_img_path, dest_img_path) # copy images from common src_img_path = os.path.join(env['pegasus_share_dir'] , "plots/images/common/") plot_utils.copy_files(src_img_path, dest_img_path) create_action_script(output_dir) def generate_chart(workflow_info): """ Generates the gantt chart and all it's required files @workflow_info WorkflowInfo object reference """ create_gantt_plot_page(workflow_info , output_dir) # ---------main---------------------------------------------------------------------------- def main(): sys.exit(0) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/plots/pegasus_host_over_time.py0000755000175000017500000006742011757531137032064 0ustar ryngerynge#!/usr/bin/env python """ Pegasus utility for generating host over time chart Usage: pegasus-gantt [options] submit directory """ ## # Copyright 2010-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision$ import os import sys import logging # Initialize logging object logger = logging.getLogger() # Set default level to INFO logger.setLevel(logging.INFO) from Pegasus.tools import utils from Pegasus.plots_stats import utils as plot_utils import populate from datetime import timedelta from datetime import datetime #Global variables---- prog_base = os.path.split(sys.argv[0])[1] # Name of this program output_dir = None def setup_logger(level_str): """ Sets the logging level @param level_str: logging level """ level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) if level_str == "info": logger.setLevel(logging.INFO) return #----------print workflow details-------- def print_workflow_details(workflow_stat ,output_dir , extn): """ Prints the data required for generating the host chart into data file. @param workflow_stat the WorkflowInfo object reference @param output_dir output directory path """ job_info = "var hc_data = [" + workflow_stat.get_formatted_host_data(extn) + "];" # print javascript file data_file = os.path.join(output_dir, "hc_"+workflow_stat.wf_uuid+"_data.js") try: fh = open(data_file, "w") fh.write( "\n") fh.write(job_info) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def create_action_script(output_dir): """ Generates the action script file which contains the javascript functions used by the main html file. @param output_dir output directory path """ # print action script action_content = "\n\ function hc_barvisibility(d , index){\n\ if(!d){\n\ return false;\n\ }\n\ var yPos = index * hc_bar_spacing;\n\ if(yPos < hc_curY || yPos > hc_curEndY ){\n\ return false;\n\ }else{\n\ return true;\n\ }\n\ }\n\n\ function hc_openWF(url){\n\ if(hc_isNewWindow){\n\ window.open(url);\n\ }else{\n\ self.location = url;\n\ }\n\ }\n\n\ function hc_printJobDetails(d){\n\ var job_details = \"Job name :\"+d.name;\n\ if(d.gD !==''){\n\ job_details +=\"\\nResource delay :\"+d.gD +\" sec.\";\n\ }\n\ if(d.eD !==''){\n\ job_details +=\"\\nRuntime as seen by dagman :\"+d.eD +\" sec.\";\n\ }\n\ if(d.kD!==''){\n\ job_details +=\"\\nKickstart duration :\"+d.kD +\" sec.\";\n\ }\n\ job_details +=\"\\nMain task :\"+d.transformation ;\n\ alert(job_details);\n\ }\n\n\ function hc_getJobBorder(d){\n\ if(!d.state){\n\ return 'red';\n\ }\n\ else if(d.sub_wf){\n\ return 'orange';\n\ }\n\ if(d.transformation){\n\ return d.color;\n\ }else{\n\ return 'gray';\n\ }\n\ }\n\n\ function hc_getJobTime(d) {\n\ var jobWidth = 0;\n\ if(d.jobD){\n\ jobWidth = hc_xScale(d.jobS + d.jobD) -hc_xScale(d.jobS);\n\ }\n\ if(jobWidth > 0 && jobWidth < 1 ){\n\ jobWidth = 1;\n\ }\n\ return jobWidth;\n\ }\n\n\ function hc_getCondorTime(d) {\n\ var cDWidth = 0;\n\ if(d.cD){\n\ cDWidth = hc_xScale(d.cS + d.cD) - hc_xScale(d.cS)\n\ }\n\ if(cDWidth > 0 && cDWidth < 1 ){\n\ cDWidth = 1;\n\ }\n\ return cDWidth;\n\ }\n\n\ function hc_getResourceDelay(d) {\n\ var gWidth = 0;\n\ if(d.gS){\n\ gWidth = hc_xScale(d.gS + d.gD) - hc_xScale(d.gS);\n\ }\n\ if(gWidth > 0 && gWidth < 1 ){\n\ gWidth = 1;\n\ }\n\ return gWidth;\n\ }\n\n\ function hc_getRunTime(d) {\n\ var rtWidth = 0;\n\ if(d.eD){\n\ rtWidth = hc_xScale(d.eS + d.eD) -hc_xScale(d.eS);\n\ }\n\ if(rtWidth > 0 && rtWidth < 1 ){\n\ rtWidth = 1;\n\ }\n\ return rtWidth;\n\ }\n\n\ function hc_getKickStartTime(d) {\n\ var kickWidth = 0;\n\ if(d.kD){\n\ kickWidth = hc_xScale(d.kS + d.kD) -hc_xScale(d.kS);\n\ }\n\ if(kickWidth > 0 && kickWidth < 1 ){\n\ kickWidth = 1;\n\ }\n\ return kickWidth;\n\ }\n\n\ function hc_showState(){\n\ if(hc_condorTime || hc_kickstart || hc_condorRunTime || hc_resourceDelay){\n\ return true;\n\ }else{\n\ return false;\n\ }\n\ }\n\n\ function hc_setCondorTime(){\n\ if(hc_condorTime){\n\ hc_condorTime = false;\n\ }else{\n\ hc_condorTime = true;\n\ }\n\ hc_rootPanel.render();\n\ }\n\n\ function hc_setKickstart(){\n\ if(hc_kickstart){\n\ hc_kickstart = false;\n\ }else{\n\ hc_kickstart = true;\n\ }\n\ hc_rootPanel.render();\n\ }\n\n\ function hc_setCondorRuntime(){\n\ if(hc_condorRunTime){\n\ hc_condorRunTime = false;\n\ }else{\n\ hc_condorRunTime = true;\n\ }\n\ hc_rootPanel.render();\n\ }\n\n\ function hc_setResourceDelay(){\n\ if(hc_resourceDelay){\n\ hc_resourceDelay = false;\n\ }else{\n\ hc_resourceDelay = true;\n\ }\n\ hc_rootPanel.render();\n\ }\n\n\ function hc_setShowLabel(){\n\ if(hc_showName){\n\ return 'Hide host name';\n\ }else{\n\ return 'Show host name';\n\ }\n\ }\n\n\ function hc_setShowName(){\n\ if(hc_showName){\n\ hc_showName = false;\n\ }else{\n\ hc_showName = true;\n\ }\n\ hc_rootPanel.render();\n\ return;\n\ }\n\n\ function hc_fadeRight(){\n\ if(hc_curX == 0){\n\ return \"images/right-fade.png\"\n\ }\n\ return \"images/right.png\"\n\ }\n\n\ function hc_fadeDown(){\n\ if(hc_curY == 0){\n\ return \"images/down-fade.png\"\n\ }\n\ return \"images/down.png\"\n\ }\n\ \n\ function hc_panLeft(){\n\ var panBy = (hc_curEndX -hc_curX)/hc_panXFactor;\n\ hc_curX +=panBy;\n\ hc_curEndX +=panBy;\n\ hc_xScale.domain(hc_curX ,hc_curEndX );\n\ hc_rootPanel.render();\n\ hc_headerPanel.render();\n\ }\n\ \n\ function hc_panRight(){\n\ var panBy = (hc_curEndX -hc_curX)/hc_panXFactor;\n\ if(hc_curX > 0){\n\ hc_curX -=panBy;\n\ hc_curEndX -=panBy;\n\ if(hc_curX <= 0){\n\ hc_curEndX += (hc_curX + panBy)\n\ hc_curX = 0;\n\ }\n\ hc_xScale.domain(hc_curX ,hc_curEndX );\n\ hc_rootPanel.render();\n\ hc_headerPanel.render();\n\ }\n\ }\n\ \n\ function hc_panUp(){\n\ var panBy = (hc_curEndY -hc_curY)/hc_panYFactor;\n\ hc_curY +=panBy;\n\ hc_curEndY += panBy;\n\ hc_yScale.domain(hc_curY ,hc_curEndY);\n\ hc_rootPanel.render();\n\ hc_headerPanel.render();\n\ }\n\ \n\ function hc_panDown(){\n\ var panBy = (hc_curEndY -hc_curY)/hc_panYFactor;\n\ if(hc_curY > 0){\n\ hc_curY -= panBy;\n\ hc_curEndY -= panBy;\n\ if(hc_curY< 0){\n\ hc_curEndY += (hc_curY + panBy);\n\ hc_curY = 0;\n\ }\n\ hc_yScale.domain(hc_curY ,hc_curEndY );\n\ hc_rootPanel.render();\n\ hc_headerPanel.render();\n\ }\n\ }\n\ \n\ function hc_zoomOut(){\n\ var newX = 0;\n\ var newY = 0;\n\ \n\ newX = hc_curEndX + hc_curEndX*0.1;\n\ newY = hc_curEndY + hc_curEndY*0.1;\n\ \n\ if(hc_curX < newX && isFinite(newX)){\n\ hc_curEndX = newX;\n\ hc_xScale.domain(hc_curX, hc_curEndX);\n\ }\n\ if(hc_curY < newY && isFinite(newY)){\n\ hc_curEndY = newY;\n\ hc_yScale.domain(hc_curY, hc_curEndY);\n\ }\n\ hc_rootPanel.render();\n\ }\n\ \n\ function hc_zoomIn(){\n\ var newX = 0;\n\ var newY = 0;\n\ newX = hc_curEndX - hc_curEndX*0.1;\n\ newY = hc_curEndY - hc_curEndY*0.1;\n\ if(hc_curX < newX && isFinite(newX)){\n\ hc_curEndX = newX;\n\ hc_xScale.domain(hc_curX, hc_curEndX);\n\ }\n\ if(hc_curY < newY && isFinite(newY)){\n\ hc_curEndY = newY;\n\ hc_yScale.domain(hc_curY, hc_curEndY);\n\ }\n\ hc_rootPanel.render();\n\ }\n\ \n\ function hc_resetZomming(){\n\ hc_curX = 0;\n\ hc_curY = 0;\n\ hc_curEndX = hc_initMaxX;\n\ hc_curEndY = hc_initMaxY;\n\ hc_xScale.domain(hc_curX, hc_curEndX);\n\ hc_yScale.domain(hc_curY, hc_curEndY);\n\ hc_rootPanel.render();\n\ hc_headerPanel.render();\n\ }\n" data_file = os.path.join(output_dir, "hc_action.js") try: fh = open(data_file, "w") fh.write( action_content) fh.write( "\n") except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() def create_header(workflow_stat): """ Generates the header html content. @param workflow_stat the WorkflowInfo object reference """ header_str = """ """+ workflow_stat.wf_uuid +""" """ header_str += plot_utils.create_home_button() return header_str def create_toc(workflow_stat): """ Generates the table of content for the pages @param workflow_stat the WorkflowInfo object reference """ toc_str ="""
Workflow host over time chart
""" toc_str += """ Workflow host over time chart
Workflow environment
""" if len(workflow_stat.sub_wf_id_uuids) >0: toc_str += """ Sub workflows
""" return toc_str def create_include(workflow_stat): """ Generates the html script include content. @param workflow_stat the WorkflowInfo object reference """ include_str = "\n\ \n\ \n" return include_str def create_variable(workflow_stat): """ Generates the javascript variables used to generate the chart. @param workflow_stat the WorkflowInfo object reference """ number_of_hosts = len(workflow_stat.host_job_map) # Adding variables var_str = "\n" return var_str def create_toolbar_panel(workflow_stat, extn): """ Generates the top level toolbar content. @param workflow_stat the WorkflowInfo object reference """ panel_str = "\n" return panel_str def create_chart_panel(workflow_stat): """ Generates the chart panel content. @param workflow_stat the WorkflowInfo object reference """ panel_str ="\n" return panel_str def create_legend_panel(workflow_stat): """ Generates the bottom level legend panel content. @param workflow_stat the WorkflowInfo object reference """ panel_str ="\n" return panel_str def create_bottom_toolbar(): """ Generates the bottom toolbar html content. @param workflow_stat the WorkflowInfo object reference """ toolbar_content ="""
""" return toolbar_content def create_host_plot(workflow_info,output_dir , extn ="html"): """ Generates the html page content for displaying the host chart. @param workflow_stat the WorkflowInfo object reference @output_dir the output directory path """ print_workflow_details(workflow_info ,output_dir, extn) str_list = [] wf_content = create_include(workflow_info) str_list.append(wf_content) # Adding variables wf_content =create_variable(workflow_info ) str_list.append(wf_content) # adding the tool bar panel wf_content = "
\n" str_list.append(wf_content) wf_content =create_toolbar_panel(workflow_info , extn) str_list.append(wf_content) # Adding the chart panel wf_content =create_chart_panel(workflow_info) str_list.append(wf_content) # Adding the legend panel wf_content =create_legend_panel(workflow_info) str_list.append(wf_content) wf_content = "
\n" str_list.append(wf_content) wf_content =create_bottom_toolbar() str_list.append(wf_content) return "".join(str_list) def create_host_plot_page(workflow_info , output_dir , extn ="html"): """ Prints the complete html page with the host chart and workflow details. @param workflow_stat the WorkflowInfo object reference @output_dir the output directory path """ str_list = [] wf_page = create_header(workflow_info) str_list.append(wf_page) wf_page = """
""" str_list.append(wf_page) wf_page = create_toc(workflow_info) str_list.append(wf_page) wf_page = """
Host over time chart
""" str_list.append(wf_page) wf_page = create_host_plot(workflow_info , output_dir ,extn) str_list.append(wf_page) # printing the brain dump content wf_page = """
Workflow environment
""" str_list.append(wf_page) if workflow_info.submit_dir is None: logger.warning("Unable to display brain dump contents. Invalid submit directory for workflow " + workflow_info.wf_uuid) else: wf_page = plot_utils.print_property_table(workflow_info.wf_env,False ," : ") str_list.append(wf_page) # print sub workflow list if len(workflow_info.sub_wf_id_uuids) >0: wf_page = """
Sub workflows
""" str_list.append(wf_page) wf_page = plot_utils.print_sub_wf_links(workflow_info.sub_wf_id_uuids) str_list.append(wf_page) wf_page = """
""" str_list.append(wf_page) wf_page = """
""" str_list.append(wf_page) # print html file data_file = os.path.join(output_dir, workflow_info.wf_uuid+"."+extn) try: fh = open(data_file, "w") fh.write( "\n") fh.write("".join(str_list)) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def setup(submit_dir,out_dir,env,log_level): """ Setup the pegasus host over time module @param submit_dir submit directory path @out_dir the output directory path @env the environment variables @log_level logging level """ global output_dir output_dir = out_dir if log_level == None: log_level = "info" setup_logger(log_level) utils.create_directory(output_dir) src_js_path = env['pegasus_javascript_dir'] src_img_path = os.path.join(env['pegasus_share_dir'] , "plots/images/protovis/") dest_js_path = os.path.join(output_dir, "js") dest_img_path = os.path.join(output_dir, "images/") utils.create_directory(dest_js_path) utils.create_directory(dest_img_path) plot_utils.copy_files(src_js_path , dest_js_path) plot_utils.copy_files(src_img_path, dest_img_path) # copy images from common src_img_path = os.path.join(env['pegasus_share_dir'] , "plots/images/common/") plot_utils.copy_files(src_img_path, dest_img_path) create_action_script(output_dir) def generate_chart(workflow_info): """ Generates the host chart and all it's required files @workflow_info WorkflowInfo object reference """ create_host_plot_page(workflow_info,output_dir ) # ---------main---------------------------------------------------------------------------- def main(): sys.exit(0) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/plots/__init__.py0000644000175000017500000000000011757531137027020 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/plots/pegasus_breakdown.py0000755000175000017500000003467511757531137031020 0ustar ryngerynge#!/usr/bin/env python """ Pegasus utility for generating breakdown chart """ ## # Copyright 2010-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision$ import os import sys import logging # Initialize logging object logger = logging.getLogger() # Set default level to INFO logger.setLevel(logging.INFO) from Pegasus.tools import utils from Pegasus.plots_stats import utils as plot_utils import populate from datetime import timedelta from datetime import datetime #Global variables---- prog_base = os.path.split(sys.argv[0])[1] # Name of this program output_dir = None def setup_logger(level_str): """ Sets the logging level @param level_str: logging level """ level_str = level_str.lower() if level_str == "debug": logger.setLevel(logging.DEBUG) if level_str == "warning": logger.setLevel(logging.WARNING) if level_str == "error": logger.setLevel(logging.ERROR) if level_str == "info": logger.setLevel(logging.INFO) return #----------print workflow details-------- def print_workflow_details(workflow_stat , output_dir): """ Prints the data required for generating the gantt chart into data file. @param workflow_stat the WorkflowInfo object reference @param output_dir output directory path """ trans_info = "var bc_data = [" + workflow_stat.get_formatted_transformation_data() + "];" # print javascript file data_file = os.path.join(output_dir, "bc_" + workflow_stat.wf_uuid+"_data.js") try: fh = open(data_file, "w") fh.write( "\n") fh.write(trans_info) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def create_action_script(output_dir): """ Generates the action script file which contains the javascript functions used by the main html file. @param output_dir output directory path """ action_content = """ function printTransformationDetails(d){ var transformation_details = "name : "+d.name ; transformation_details += "\\nTotal count : "+ d.count ; transformation_details += "\\nSucceeded count : "+ d.success ; transformation_details += "\\nFailed count : "+d.failure ; transformation_details += "\\nMin Runtime : "+d.min ; transformation_details += "\\nMax Runtime : "+d.max ; transformation_details += "\\nAvg Runtime : "+d.avg ; transformation_details += "\\nTotal Runtime : "+d.total ; alert(transformation_details); } function setBreakdownBy(isBreakdown){ breakdownByCount= isBreakdown; loadBCGraph(); } function setBCChartTitle(){ if(breakdownByCount){ return "Invocation breakdown by count grouped by transformation name"; }else{ return "Invocation breakdown by runtime grouped by transformation name"; } } function loadBCGraph(){ bc_headerPanel.render(); bc_chartPanel.render(); bc_chartPanel.def("o", -1); } function getOuterAngle(d){ if(breakdownByCount){ return d.count/ bc_total_count * 2 * Math.PI; } else{ return d.total/ bc_total_runtime * 2 * Math.PI; } } function getInnerRadius(d){ if(d.failure < 0){ return 0; } if(breakdownByCount){ return bc_radius*(d.failure/d.count); } else{ // Changed to fix JIRA issue PM-566 return bc_radius*(d.failure/d.count); // return bc_radius*d.total*(d.failure/d.count); } } """ # print action script data_file = os.path.join(output_dir, "bc_action.js") try: fh = open(data_file, "w") fh.write( action_content) fh.write( "\n") except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() def create_header(workflow_stat): """ Generates the header html content. @param workflow_stat the WorkflowInfo object reference """ header_str = """ """+ workflow_stat.wf_uuid +""" """ header_str += plot_utils.create_home_button() return header_str def create_toc(workflow_stat): """ Generates the table of content for the pages @param workflow_stat the WorkflowInfo object reference """ toc_str ="""
Invocation breakdown chart
""" toc_str += """ Invocation breakdown chart
Workflow environment
""" if len(workflow_stat.sub_wf_id_uuids) >0: toc_str += """ Sub workflows
""" return toc_str def create_include(workflow_stat): """ Generates the html script include content. @param workflow_stat the WorkflowInfo object reference """ include_str = """ """ return include_str def create_variable(workflow_stat): """ Generates the javascript variables used to generate the chart. @param workflow_stat the WorkflowInfo object reference """ number_of_invocations , total_runtime = workflow_stat.get_total_count_run_time() # Adding variables var_str = """ """ return var_str def create_toolbar_panel(workflow_stat): """ Generates the top level toolbar content. @param workflow_stat the WorkflowInfo object reference """ panel_str = """ """ return panel_str def create_chart_panel(workflow_stat): """ Generates the chart panel content. @param workflow_stat the WorkflowInfo object reference """ panel_str =""" """ return panel_str def create_legend_panel(workflow_stat): """ Generates the bottom level legend panel content. @param workflow_stat the WorkflowInfo object reference """ panel_str =""" """ return panel_str def create_bottom_toolbar(): """ Generates the bottom toolbar html content. """ toolbar_content =""" """ return toolbar_content def create_breakdown_plot(workflow_info , output_dir): """ Generates the html page content for displaying the breakdown chart. @param workflow_stat the WorkflowInfo object reference @output_dir the output directory path """ print_workflow_details(workflow_info ,output_dir) str_list = [] wf_content = create_include(workflow_info) str_list.append(wf_content) # Adding variables wf_content =create_variable(workflow_info) str_list.append(wf_content) wf_content = """
""" str_list.append(wf_content) # adding the tool bar panel wf_content =create_toolbar_panel(workflow_info) str_list.append(wf_content) # Adding the chart panel wf_content =create_chart_panel(workflow_info) str_list.append(wf_content) # Adding the legend panel wf_content =create_legend_panel(workflow_info) str_list.append(wf_content) wf_content = """
""" str_list.append(wf_content) wf_content =create_bottom_toolbar() str_list.append(wf_content) return "".join(str_list) def create_breakdown_plot_page(workflow_info ,output_dir): """ Prints the complete html page with the gantt chart and workflow details. @param workflow_stat the WorkflowInfo object reference @output_dir the output directory path """ str_list = [] wf_page = create_header(workflow_info) str_list.append(wf_page) wf_page = """
""" str_list.append(wf_page) wf_page = create_toc(workflow_info) str_list.append(wf_page) wf_page = """
Invocation breakdown chart
""" str_list.append(wf_page) wf_page = create_breakdown_plot(workflow_info ,output_dir) str_list.append(wf_page) # printing the brain dump content wf_page = """
Workflow environment
""" str_list.append(wf_page) if workflow_info.submit_dir is None: logger.warning("Unable to display brain dump contents. Invalid submit directory for workflow " + workflow_info.wf_uuid) else: wf_page = plot_utils.print_property_table(workflow_info.wf_env,False ," : ") str_list.append(wf_page) # print sub workflow list if len(workflow_info.sub_wf_id_uuids) >0: wf_page = """
Sub workflows
""" str_list.append(wf_page) wf_page = plot_utils.print_sub_wf_links(workflow_info.sub_wf_id_uuids) str_list.append(wf_page) wf_page = """
""" str_list.append(wf_page) wf_page = """
""" str_list.append(wf_page) data_file = os.path.join(output_dir, workflow_info.wf_uuid+".html") try: fh = open(data_file, "w") fh.write( "\n") fh.write("".join(str_list)) except IOError: logger.error("Unable to write to file " + data_file) sys.exit(1) else: fh.close() return def setup(submit_dir,out_dir,env, log_level): """ Setup the pegasus breakdown module @param submit_dir submit directory path @out_dir the output directory path @env the environment variables @log_level logging level """ # global reference global output_dir output_dir = out_dir if log_level == None: log_level = "info" setup_logger(log_level) utils.create_directory(output_dir) src_js_path = env['pegasus_javascript_dir'] src_img_path = os.path.join(env['pegasus_share_dir'] , "plots/images/protovis/") dest_js_path = os.path.join(output_dir, "js") dest_img_path = os.path.join(output_dir, "images/") utils.create_directory(dest_js_path) utils.create_directory(dest_img_path) plot_utils.copy_files(src_js_path , dest_js_path) plot_utils.copy_files(src_img_path, dest_img_path) # copy images from common src_img_path = os.path.join(env['pegasus_share_dir'] , "plots/images/common/") plot_utils.copy_files(src_img_path, dest_img_path) create_action_script(output_dir) def generate_chart(workflow_info): """ Generates the breakdown chart and all it's required files @workflow_info WorkflowInfo object reference """ create_breakdown_plot_page(workflow_info , output_dir) # ---------main---------------------------------------------------------------------------- def main(): sys.exit(0) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/utils.py0000755000175000017500000002460311757531137025302 0ustar ryngerynge#!/usr/bin/env python """ This file implements several utility functions pegasus-statistics and pegasus-plots. """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Import Python modules import os import re import sys import logging import optparse import math import tempfile import commands import shutil from datetime import datetime from Pegasus.tools import properties from Pegasus.tools import utils # Initialize logging object logger = logging.getLogger() def isSubWfJob(job_name): """ Returns whether the given job is a sub workflow or not """ if (job_name.lstrip().startswith('subdax_') or job_name.lstrip().startswith('pegasus-plan_') or job_name.lstrip().startswith('subdag_')): return True; return False; def rlb(file_path, parent_wf_braindb_submit_dir, parent_submit_dir): """ This function converts the path relative to base path param file_path : file path for which the relative path needs to be calculated. param parent_wf_braindb_submit_dir : submit directory obtained from the braindump file param parent_submit_dir : submit directory given by the user Returns : path relative to the base """ file_path = file_path.replace(parent_wf_braindb_submit_dir, parent_submit_dir) return file_path def parse_workflow_environment(wf_det): """ Parses the workflow detail to get the workflow environment details @param wf_det the workflow detail list. """ config = {} config["wf_uuid"] = wf_det.wf_uuid config["dag_file_name"] = wf_det.dag_file_name config["submit_hostname"] = wf_det.submit_hostname config["submit_dir"] = wf_det.submit_dir config["planner_arguments"] = wf_det.planner_arguments config["user"] = wf_det.user config["grid_dn"] = wf_det.grid_dn config["planner_version"] = wf_det.planner_version config["dax_label"] = wf_det.dax_label config["dax_version"] = wf_det.dax_version return config def print_property_table(props, border= True, separator =""): """ Utility method for printing a property table @props dictionary of the property key and value. @border boolean indication whether to draw a border or not. @separator string to separate key value pair. """ html_content = '' if border: html_content ="\n" else: html_content ="\n
" for key, value in props.items(): if value is None: value ='-' html_content += "\n\n\n\n" html_content += "\n
"+ key +"" + separator +str(value) +"
" return html_content def print_sub_wf_links(wf_id_uuid_list, extn = "html"): """ Utility method for printing the link to sub workflow pages @param wf_id_uuid_list list of wf_id and wf_uuid """ html_content = '' if len(wf_id_uuid_list) == 0: return html_content html_content ="\n
" for wf_id_uuid in wf_id_uuid_list: html_content += "\n" + str(wf_id_uuid.wf_uuid) + " - " + str(wf_id_uuid.dax_label) + "
" html_content += "\n
" return html_content def create_home_button(): """ Utility method for creating a home button """ html_content =""" Home """ return html_content def convert_to_seconds(time): """ Converts the timedelta to seconds format Param: time delta reference """ return (time.microseconds + (time.seconds + time.days * 24 * 3600) * pow(10, 6)) / pow(10, 6) def copy_files(src, dest): """ Utility method for recursively copying from a directory to another @src source directory path @dst destination directory path """ for file in os.listdir(src): if os.path.isfile(os.path.join(src,file)): try: if not os.path.exists(os.path.join(dest,file)): shutil.copy(os.path.join(src,file), dest) except: logger.error("Unable to copy file " + file + " to " + dest) sys.exit(1) def format_seconds(duration, max_comp = 2): """ Utility for converting time to a readable format @param duration : time in seconds and miliseconds @param max_comp : number of components of the returned time @return time in n component format """ comp = 0 if duration is None: return '-' milliseconds = math.modf(duration)[0] sec = int(duration) formatted_duration = '' days = sec / 86400 sec -= 86400 * days hrs = sec / 3600 sec -= 3600 * hrs mins = sec / 60 sec -= 60 * mins # days if comp < max_comp and (days >= 1 or comp > 0): comp += 1 if days == 1: formatted_duration += str(days) + ' day, ' else: formatted_duration += str(days) + ' days, ' # hours if comp < max_comp and (hrs >= 1 or comp > 0): comp += 1 if hrs == 1: formatted_duration += str(hrs) + ' hr, ' else: formatted_duration += str(hrs) + ' hrs, ' # mins if comp < max_comp and (mins >= 1 or comp > 0): comp += 1 if mins == 1: formatted_duration += str(mins) + ' min, ' else: formatted_duration += str(mins) + ' mins, ' # seconds if comp < max_comp and (sec >= 1 or comp > 0): comp += 1 if sec == 1: formatted_duration += str(sec) + " sec, " else: formatted_duration += str(sec) + " secs, " return formatted_duration def get_workflow_wall_time(workflow_states_list): """ Utility method for returning the workflow wall time given all the workflow states @worklow_states_list list of all workflow states. """ workflow_wall_time = None workflow_start_event_count = 0 workflow_end_event_count = 0 is_end = False workflow_start_cum = 0 workflow_end_cum = 0 for workflow_state in workflow_states_list: if workflow_state.state == 'WORKFLOW_STARTED': workflow_start_event_count += 1 workflow_start_cum += workflow_state.timestamp else: workflow_end_event_count += 1 workflow_end_cum += workflow_state.timestamp if workflow_start_event_count >0 and workflow_end_event_count > 0: if workflow_start_event_count == workflow_end_event_count: workflow_wall_time = workflow_end_cum - workflow_start_cum return workflow_wall_time def get_date_multiplier(date_filter): """ Utility for returning the multiplier for a given date filter @param date filter : the given date filter @return multiplier for a given filter """ vals = { 'day': 86400, 'hour': 3600 } return vals[date_filter] def get_date_format(date_filter): """ Utility for returning the date format for a given date filter @param date filter : the given date filter @return the date format for a given filter """ vals = { 'day': '%Y-%m-%d', 'hour': '%Y-%m-%d : %H' } return vals[date_filter] def get_date_print_format(date_filter): """ Utility for returning the date format for a given date filter in human readable format @param date filter : the given date filter @return the date format for a given filter """ vals = { 'day': '[YYYY-MM-DD]', 'hour': '[YYYY-MM-DD : HH]' } return vals[date_filter] def convert_datetime_to_printable_format(timestamp, date_time_filter = 'hour'): """ Utility for returning the date format in human readable format @param timestamp : the unix timestamp @param date filter : the given date filter @return the date format in human readable format """ local_date_time = convert_utc_to_local_datetime(timestamp) date_formatted = local_date_time.strftime(get_date_format(date_time_filter)) return date_formatted def convert_utc_to_local_datetime(utc_timestamp): """ Utility for converting the timestamp to local time @param timestamp : the unix timestamp @return the date format in human readable format """ local_datetime = datetime.fromtimestamp(utc_timestamp) return local_datetime def convert_stats_to_base_time(stats_by_time, date_time_filter = 'hour', isHost = False): """ Converts the time grouped by hour into local time.Converts the time grouped by hours into day based on the date_time_filter @param stats_by_time : ime grouped by hou @param date filter : the given date filter @param isHost : true if it is grouped by host @return the stats list after doing conversion """ formatted_stats_by_hour_list = [] for stats in stats_by_time: timestamp = stats.date_format*get_date_multiplier('hour') formatted_stats = {} formatted_stats['timestamp'] = timestamp formatted_stats['count'] = stats.count formatted_stats['runtime'] = stats.total_runtime if isHost: formatted_stats['host'] = stats.host_name formatted_stats_by_hour_list.append(formatted_stats) formatted_stats = None if date_time_filter == 'hour': for formatted_stats in formatted_stats_by_hour_list: formatted_stats['date_format'] = convert_datetime_to_printable_format(formatted_stats['timestamp'], date_time_filter) return formatted_stats_by_hour_list else: day_to_hour_mapping = {} formatted_stats_by_day_list = [] for formatted_stats_by_hour in formatted_stats_by_hour_list: formatted_stats_by_day = None corresponding_day = convert_datetime_to_printable_format(formatted_stats_by_hour['timestamp'], date_time_filter) id = '' if isHost: id += formatted_stats_by_hour['host'] + ":" id += corresponding_day if day_to_hour_mapping.has_key(id): formatted_stats_by_day = day_to_hour_mapping[id] formatted_stats_by_day['count'] += formatted_stats_by_hour['count'] formatted_stats_by_day['runtime'] += formatted_stats_by_hour['runtime'] else: formatted_stats_by_day = formatted_stats_by_hour formatted_stats_by_day['date_format'] = corresponding_day day_to_hour_mapping[id] = formatted_stats_by_day formatted_stats_by_day_list.append(formatted_stats_by_day) return formatted_stats_by_day_list def round_decimal_to_str(value , to=3): """ Utility method for rounding the decimal value to string to given digits @param value : value to round @param to : how many decimal points to round to """ rounded_value = '-' if value is None: return rounded_value rounded_value = str(round(float(value), to)) return rounded_value pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/stats/0000755000175000017500000000000011757531667024726 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/stats/__init__.py0000644000175000017500000000000011757531137027015 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/stats/workflow_stats.py0000644000175000017500000004173711757531137030374 0ustar ryngerynge#!/usr/bin/env python import os import re import sys import logging import optparse import math import tempfile # Initialize logging object logger = logging.getLogger() from Pegasus.plots_stats import utils as stats_utils transformation_stats_col_name =["Transformation","Count","Succeeded" , "Failed", "Min","Max","Mean","Total"] job_stats_col_name =['Job','Try','Site','Kickstart','Post' ,'CondorQTime','Resource','Runtime','Seqexec','Seqexec-Delay'] worklow_summary_col_name =["Type" ,"Succeeded","Failed","Incomplete" ,"Total" , " " ,"Retries" , "Total Run (Retries Included)"] worklow_status_col_name =["#","Type" ,"Succeeded","Failed","Incomplete" ,"Total" , " " ,"Retries" , "Total Run (Retries Included)" ,"Workflow Retries"] time_stats_col_name =["Date","Count","Runtime"] time_host_stats_col_name =["Date","Host", "Count","Runtime (Sec.)"] NEW_LINE_STR ="\n" class JobStatistics: def __init__(self): self.name = None self.site = None self.kickstart =None self.post = None self.condor_delay = None self.resource = None self.runtime = None self.condorQlen =None self.seqexec= None self.seqexec_delay = None self.retry_count = 0 def getFormattedJobStatistics(self): """ Returns the formatted job statistics information @return: formatted job statistics information """ formatted_job_stats = [self.name] formatted_job_stats.append(str(self.retry_count)) if self.site is None: formatted_job_stats.append('-') else: formatted_job_stats.append(self.site) formatted_job_stats.append(round_to_str(self.kickstart)) formatted_job_stats.append(round_to_str(self.post)) formatted_job_stats.append(round_to_str(self.condor_delay)) formatted_job_stats.append(round_to_str(self.resource)) formatted_job_stats.append(round_to_str(self.runtime)) formatted_job_stats.append(round_to_str(self.seqexec)) formatted_job_stats.append(round_to_str(self.seqexec_delay)) return formatted_job_stats def convert_to_str(value): """ Utility for returning a str representation of the given value. Return '-' if value is None @parem value : the given value that need to be converted to string """ if value is None: return '-' return str(value) def round_to_str(value , to=3): """ Utility method for rounding the float value to rounded string @param value : value to round @param to : how many decimal points to round to """ return stats_utils.round_decimal_to_str(value,to) def format_seconds(duration): """ Utility for converting time to a readable format @param duration : time in seconds and miliseconds @return time in format day,hour, min,sec """ return stats_utils.format_seconds(duration) def print_row(content, isHeader= False): """ Utility method for generating one html row @param content : list of column values @param format : column_size of each columns """ row_str ="" row_str ="" for index in range(len(content)): if isHeader: row_str +="" else: row_str +="" row_str += str(content[index]) if isHeader: row_str +="" else: row_str +="" row_str += "" row_str += NEW_LINE_STR return row_str def print_workflow_summary(workflow_stats ): """ Prints the workflow statistics summary of an top level workflow @param workflow_stats : workflow statistics object reference """ # status workflow_stats.set_job_filter('nonsub') # Tasks total_tasks = workflow_stats.get_total_tasks_status() total_succeeded_tasks = workflow_stats.get_total_succeeded_tasks_status() total_failed_tasks = workflow_stats.get_total_failed_tasks_status() total_unsubmitted_tasks = total_tasks -(total_succeeded_tasks + total_failed_tasks) total_task_retries = workflow_stats.get_total_tasks_retries() total_invocations = total_succeeded_tasks + total_failed_tasks + total_task_retries # Jobs total_jobs = workflow_stats.get_total_jobs_status() total_succeeded_jobs = workflow_stats.get_total_succeeded_jobs_status() total_failed_jobs = workflow_stats.get_total_failed_jobs_status() total_unsubmitted_jobs = total_jobs - (total_succeeded_jobs + total_failed_jobs ) total_job_retries = workflow_stats.get_total_jobs_retries() total_job_instance_retries = total_succeeded_jobs + total_failed_jobs + total_job_retries # Sub workflows workflow_stats.set_job_filter('subwf') total_sub_wfs = workflow_stats.get_total_jobs_status() total_succeeded_sub_wfs = workflow_stats.get_total_succeeded_jobs_status() total_failed_sub_wfs = workflow_stats.get_total_failed_jobs_status() total_unsubmitted_sub_wfs = total_sub_wfs - (total_succeeded_sub_wfs + total_failed_sub_wfs) total_sub_wfs_retries = workflow_stats.get_total_jobs_retries() total_sub_wfs_tries = total_succeeded_sub_wfs + total_failed_sub_wfs + total_sub_wfs_retries # tasks summary_str = "" summary_str += "total_succeeded_tasks: " + convert_to_str(total_succeeded_tasks) summary_str += NEW_LINE_STR summary_str += "total_failed_tasks: " + convert_to_str(total_failed_tasks) summary_str += NEW_LINE_STR summary_str += "total_unsubmitted_tasks: " + convert_to_str(total_unsubmitted_tasks) summary_str += NEW_LINE_STR summary_str += "total_tasks: " + convert_to_str(total_tasks) summary_str += NEW_LINE_STR summary_str += "total_task_retries: " + convert_to_str(total_task_retries) summary_str += NEW_LINE_STR summary_str += "total_invocations: " + convert_to_str(total_invocations) summary_str += NEW_LINE_STR summary_str += "total_succeeded_jobs: " + convert_to_str(total_succeeded_jobs) summary_str += NEW_LINE_STR summary_str += "total_failed_jobs: " + convert_to_str(total_failed_jobs) summary_str += NEW_LINE_STR summary_str += "total_unsubmitted_jobs: " + convert_to_str(total_unsubmitted_jobs) summary_str += NEW_LINE_STR summary_str += "total_jobs:" + convert_to_str(total_jobs) summary_str += NEW_LINE_STR summary_str += "total_job_retries: " + str(total_job_retries) summary_str += NEW_LINE_STR summary_str += "total_job_instance_retries:" + convert_to_str(total_job_instance_retries) summary_str += NEW_LINE_STR summary_str += "total_succeeded_sub_wfs: " + convert_to_str(total_succeeded_sub_wfs) summary_str += NEW_LINE_STR summary_str += "total_failed_sub_wfs: " + convert_to_str(total_failed_sub_wfs) summary_str += NEW_LINE_STR summary_str += "total_unsubmitted_sub_wfs: " + convert_to_str(total_unsubmitted_sub_wfs) summary_str += NEW_LINE_STR summary_str += "total_sub_wfs: " + convert_to_str(total_sub_wfs) summary_str += NEW_LINE_STR summary_str += "total_sub_wfs_retries: " + str(total_sub_wfs_retries) summary_str += NEW_LINE_STR summary_str += "total_sub_wfs_tries: " + convert_to_str(total_sub_wfs_tries) summary_str += NEW_LINE_STR workflow_states_list = workflow_stats.get_workflow_states() workflow_wall_time = stats_utils.get_workflow_wall_time(workflow_states_list) if workflow_wall_time is None: summary_str += "workflow_runtime: -" else: summary_str += "workflow_runtime: %-20s (total %d seconds)" % \ (format_seconds(workflow_wall_time), (workflow_wall_time)) summary_str += NEW_LINE_STR workflow_cum_job_wall_time = workflow_stats.get_workflow_cum_job_wall_time() if workflow_cum_job_wall_time is None: summary_str += "cumulative_workflow_runtime_kickstart: -" else: summary_str += "cumulative_workflow_runtime_kickstart: %-20s (total %d seconds)" % \ (format_seconds(workflow_cum_job_wall_time),workflow_cum_job_wall_time) summary_str += NEW_LINE_STR submit_side_job_wall_time = workflow_stats.get_submit_side_job_wall_time() if submit_side_job_wall_time is None: summary_str += "cumulative_workflow_runtime_dagman: -" else: summary_str += "cumulative_workflow_runtime_dagman: %-20s (total %d seconds)" % \ (format_seconds(submit_side_job_wall_time), submit_side_job_wall_time) return summary_str def print_individual_workflow_stats(workflow_stats , title): """ Prints the workflow statistics of workflow @param workflow_stats : workflow statistics object reference @param title : title of the workflow table """ content_str ="" # individual workflow status # workflow status workflow_stats.set_job_filter('all') total_wf_retries = workflow_stats.get_workflow_retries() content = [title,convert_to_str(total_wf_retries) ] title_col_span = len(worklow_status_col_name) -1 content_str += print_row(worklow_status_col_name, True) wf_status_str = """""" #tasks workflow_stats.set_job_filter('nonsub') total_tasks = workflow_stats.get_total_tasks_status() total_succeeded_tasks = workflow_stats.get_total_succeeded_tasks_status() total_failed_tasks = workflow_stats.get_total_failed_tasks_status() total_unsubmitted_tasks = total_tasks -(total_succeeded_tasks + total_failed_tasks ) total_task_retries = workflow_stats.get_total_tasks_retries() total_task_invocations = total_succeeded_tasks + total_failed_tasks + total_task_retries content =[" ","Tasks", convert_to_str(total_succeeded_tasks) , convert_to_str(total_failed_tasks), convert_to_str(total_unsubmitted_tasks) , convert_to_str(total_tasks) ," ",convert_to_str(total_task_retries), convert_to_str(total_task_invocations) ," "] tasks_status_str = print_row(content) # job status workflow_stats.set_job_filter('nonsub') total_jobs = workflow_stats.get_total_jobs_status() total_succeeded_jobs = workflow_stats.get_total_succeeded_jobs_status() total_failed_jobs = workflow_stats.get_total_failed_jobs_status() total_unsubmitted_jobs = total_jobs - (total_succeeded_jobs + total_failed_jobs ) total_job_retries = workflow_stats.get_total_jobs_retries() total_job_invocations = total_succeeded_jobs + total_failed_jobs + total_job_retries content = [" ","Jobs",convert_to_str(total_succeeded_jobs), convert_to_str(total_failed_jobs) , convert_to_str(total_unsubmitted_jobs), convert_to_str(total_jobs) ," ",convert_to_str(total_job_retries), convert_to_str(total_job_invocations) ," " ] jobs_status_str = print_row(content) # sub workflow workflow_stats.set_job_filter('subwf') total_sub_wfs = workflow_stats.get_total_jobs_status() total_succeeded_sub_wfs = workflow_stats.get_total_succeeded_jobs_status() total_failed_sub_wfs = workflow_stats.get_total_failed_jobs_status() total_unsubmitted_sub_wfs = total_sub_wfs - (total_succeeded_sub_wfs + total_failed_sub_wfs ) total_sub_wfs_retries = workflow_stats.get_total_jobs_retries() total_sub_wfs_invocations = total_succeeded_sub_wfs + total_failed_sub_wfs + total_sub_wfs_retries content = [" ","Sub Workflows",convert_to_str(total_succeeded_sub_wfs), convert_to_str(total_failed_sub_wfs) , convert_to_str(total_unsubmitted_sub_wfs), convert_to_str(total_sub_wfs) ," ",convert_to_str(total_sub_wfs_retries), convert_to_str(total_sub_wfs_invocations) ," " ] sub_wf_status_str = print_row(content) content_str += wf_status_str +"\n" content_str += tasks_status_str +"\n" content_str += jobs_status_str +"\n" content_str += sub_wf_status_str +"\n" content_str +="" return content_str def print_individual_wf_job_stats(workflow_stats , title): """ Prints the job statistics of workflow @param workflow_stats : workflow statistics object reference @param title : title for the table """ job_stats_dict={} job_stats_list=[] job_retry_count_dict={} job_status_str = "
# " + title +"
" job_status_str += "" job_status_str += print_row(job_stats_col_name, True) job_status_str +="\n" wf_job_stats_list = workflow_stats.get_job_statistics() for job in wf_job_stats_list: job_stats = JobStatistics() job_stats.name = job.job_name job_stats.site = job.site job_stats.kickstart = job.kickstart job_stats.post = job.post_time job_stats.runtime = job.runtime job_stats.condor_delay = job.condor_q_time job_stats.resource = job.resource_delay job_stats.seqexec = job.seqexec if job_stats.seqexec is not None and job_stats.kickstart is not None: job_stats.seqexec_delay = (float(job_stats.seqexec) - float(job_stats.kickstart)) if job_retry_count_dict.has_key(job.job_name): job_retry_count_dict[job.job_name] +=1 else: job_retry_count_dict[job.job_name] = 1 job_stats.retry_count = job_retry_count_dict[job.job_name] job_stats_list.append(job_stats) # printing content_list = [] # find the pretty print length for job_stat in job_stats_list: job_det =job_stat.getFormattedJobStatistics() job_status_str +=print_row(job_det) job_status_str +=NEW_LINE_STR job_status_str += "" return job_status_str def round_to_str(value , to=3): """ Utility method for rounding the float value to rounded string @param value : value to round @param to : how many decimal points to round to """ return stats_utils.round_decimal_to_str(value,to) def print_wf_transformation_stats(workflow_stats , title): """ Prints the transformation statistics of workflow @param workflow_stats : workflow statistics object reference @param title : title of the transformation statistics """ transformation_status_str = "
# " + title +"
" transformation_status_str += "" transformation_status_str += print_row(transformation_stats_col_name, True) transformation_status_str += NEW_LINE_STR for transformation in workflow_stats.get_transformation_statistics(): content = [transformation.transformation ,str(transformation.count),str(transformation.success) , str(transformation.failure), round_to_str(transformation.min),round_to_str(transformation.max),round_to_str(transformation.avg),round_to_str(transformation.sum)] transformation_status_str += print_row(content ) transformation_status_str += NEW_LINE_STR transformation_status_str += "" return transformation_status_str def print_statistics_by_time_and_host(workflow_stats , time_filter): """ Prints the job instance and invocation statistics sorted by time @param workflow_stats : workflow statistics object reference """ statistics_by_time_str = NEW_LINE_STR workflow_stats.set_job_filter('nonsub') workflow_stats.set_time_filter('hour') workflow_stats.set_transformation_filter(exclude=['condor::dagman']) statistics_by_time_str +="
#Job instances statistics per " + time_filter +"
" statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="" statistics_by_time_str +=print_row(time_stats_col_name, True) statistics_by_time_str += NEW_LINE_STR stats_by_time = workflow_stats.get_jobs_run_by_time() formatted_stats_list = stats_utils.convert_stats_to_base_time(stats_by_time , time_filter) for stats in formatted_stats_list: content = [stats['date_format'] , str(stats['count']),round_to_str(stats['runtime'])] statistics_by_time_str += print_row(content ) statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="" statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="
#Invocation statistics run per " + time_filter +"
" statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="" statistics_by_time_str +=print_row(time_stats_col_name , True ) statistics_by_time_str += NEW_LINE_STR stats_by_time = workflow_stats.get_invocation_by_time() formatted_stats_list = stats_utils.convert_stats_to_base_time(stats_by_time , time_filter) for stats in formatted_stats_list: content = [stats['date_format'] , str(stats['count']),round_to_str(stats['runtime'])] statistics_by_time_str += print_row(content ) statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="" statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="
#Job instances statistics on host per " + time_filter +"
" statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="" statistics_by_time_str +=print_row(time_host_stats_col_name , True ) statistics_by_time_str += NEW_LINE_STR stats_by_time = workflow_stats.get_jobs_run_by_time_per_host() formatted_stats_list = stats_utils.convert_stats_to_base_time(stats_by_time , time_filter, True) for stats in formatted_stats_list: content = [stats['date_format'] ,str(stats['host']) , str(stats['count']),round_to_str(stats['runtime'])] statistics_by_time_str += print_row(content) statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="" statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="
#Invocation statistics on host per " + time_filter +"
" statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="" statistics_by_time_str +=print_row(time_host_stats_col_name , True ) statistics_by_time_str += NEW_LINE_STR stats_by_time = workflow_stats.get_invocation_by_time_per_host() formatted_stats_list = stats_utils.convert_stats_to_base_time(stats_by_time , time_filter, True) for stats in formatted_stats_list: content = [stats['date_format'] ,str(stats['host']) , str(stats['count']),round_to_str(stats['runtime'])] statistics_by_time_str += print_row(content ) statistics_by_time_str += NEW_LINE_STR statistics_by_time_str +="" return statistics_by_time_str pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/plots_stats/__init__.py0000644000175000017500000000000011757531137025657 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/common.py0000644000175000017500000000236011757531137023044 0ustar ryngerynge## # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Revision : $Revision: 2012 $ # Import few Python modules import os import sys import logging # Get logger object logger = logging.getLogger() # Set default level to WARNING logger.setLevel(logging.WARNING) # Format our log messages the way we want cl = logging.StreamHandler() # Don't add funcName to the formatter for Python versions < 2.5 if sys.version_info < (2, 5): formatter = logging.Formatter("%(asctime)s:%(filename)s:%(lineno)d: %(levelname)s: %(message)s") else: formatter = logging.Formatter("%(asctime)s:%(filename)s:%(funcName)s:%(lineno)d: %(levelname)s: %(message)s") cl.setFormatter(formatter) logger.addHandler(cl) pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/monitoring/0000755000175000017500000000000011757531667023376 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/monitoring/notifications.py0000644000175000017500000010302211757531137026607 0ustar ryngerynge#!/usr/bin/env python """ Class for managing notifications in pegasus-monitord. """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Import Python modules import os import sys import math import time import shlex import signal import logging import tempfile import subprocess from Pegasus.tools import utils NOTIFICATION_FILE = "monitord-notifications.log" # filename for writing the output of notification scripts WAIT_CHILD_FINISH = 5 # in seconds # Get logger object (initialized elsewhere) logger = logging.getLogger() class Notifications: """ This object contains all functions needed for managing notifications and starting notification scripts. """ def __init__(self, notification_file_prefix, max_parallel_notifications=10, notifications_timeout=0): """ This function initialized the Notifications class. """ self._active_notifications = [] self._pending_notifications = [] self._max_parallel_notifications = max_parallel_notifications self._notifications_timeout = notifications_timeout self._notifications_fn = os.path.join(notification_file_prefix, NOTIFICATION_FILE) self._notifications_log = None self._notifications = {} # Open notifications' log file try: self._notifications_log = open(self._notifications_fn, 'a') except IOError: logger.critical("cannot create notifications' log file... exiting...") sys.exit(1) def has_pending_notifications(self): """ This function returns True if we have pending notifications. """ return len(self._pending_notifications) > 0 def has_active_notifications(self): """ This function returns True if we have active notifications. """ return len(self._active_notifications) > 0 def terminate_notification(self, my_entry): """ This function terminates a notification process, and cleans up its output/error files. """ my_p = my_entry["subp"] my_pid = my_entry["pid"] my_notification = my_entry["notification"] my_out_fn = my_entry["out_fn"] my_err_fn = my_entry["err_fn"] my_out_fd = my_entry["out_fd"] my_err_fd = my_entry["err_fd"] my_action = my_entry["action"] my_p.poll() # If process hasn't finished... if my_p.returncode is None: # Send SIGTERM first... try: os.kill(my_pid, signal.SIGTERM) except OSError: logger.info("error sending SIGTERM to notification script...") # Wait for child to finish logger.warning("waiting for notification process to finish: %s - %s" % (my_notification, my_action)) time.sleep(WAIT_CHILD_FINISH) my_p.poll() if my_p.returncode is None: # Send SIGKILL now... logger.warning("killing notification process to finish: %s - %s" % (my_notification, my_action)) try: os.kill(my_pid, signal.SIGKILL) except OSError: logger.info("error sending SIGKILL to notification script...") # Finally, clean up files... try: os.unlink(my_out_fn) os.unlink(my_err_fn) except OSError: # No error here... pass logger.warning("notification terminated: %s - %s" % (my_notification, my_action)) def service_notifications(self): """ This function services notifications. It chekcs the notifications in the active list to see if they have finished. If so, it copies the stdout/stderr from these notifications to the monitord-notifications.log file. For notifications in the pending_notifications list, it starts the notification scripts, unless there are already too many notifications running in the system. """ logger.info("active notifications %d, pending notifications: %d" % (len(self._active_notifications), len(self._pending_notifications))) # Step 1: Look at existing notifications if len(self._active_notifications) > 0: # We have active notifications, let's check on their statuses my_notif_index = 0 while my_notif_index < len(self._active_notifications): my_active_notif = self._active_notifications[my_notif_index] # Get subprocess object my_active_p = my_active_notif["subp"] my_status = my_active_p.poll() if my_status is not None: # Process finished notification my_finished_out_fn = my_active_notif["out_fn"] my_finished_err_fn = my_active_notif["err_fn"] my_finished_out_fd = my_active_notif["out_fd"] my_finished_err_fd = my_active_notif["err_fd"] my_finished_notification = my_active_notif["notification"] my_finished_action = my_active_notif["action"] my_finished_notification_params = my_active_notif["params"] # Close out/err files, if not already closed... try: my_finished_out_fd.close() except IOError: logger.warning("error closing stdout file for notification %s... continuing..." % (my_finished_notification)) try: my_finished_err_fd.close() except IOError: logger.warning("error closing stderr file for notification %s... continuing..." % (my_finished_notification)) if self._notifications_log is not None: if logger.isEnabledFor(logging.INFO): self._notifications_log.write("%s\n" % ('-' * 80)) self._notifications_log.write("Notification time : %s\n" % (utils.isodate())) self._notifications_log.write("Notification event : %s\n" % (my_finished_notification)) self._notifications_log.write("Notification action: %s\n" % (my_finished_action)) self._notifications_log.write("Notification status: %s\n" % (my_status)) self._notifications_log.write("\n") self._notifications_log.write("Notification environment\n") for k in my_finished_notification_params: self._notifications_log.write("%s : %s\n" % (k, my_finished_notification_params[k])) self._notifications_log.write("\n") self._notifications_log.write("stdout:\n") try: my_f = open(my_finished_out_fn, 'r') for line in my_f: self._notifications_log.write(line) except IOError: logger.warning("error processing notification stdout file: %s. continuing..." % (my_finished_out_fn)) else: my_f.close() self._notifications_log.write("\n") self._notifications_log.write("stderr:\n") try: my_f = open(my_finished_err_fn, 'r') for line in my_f: self._notifications_log.write(line) except IOError: logger.warning("error processing notification stderr file: %s. continuing..." % (my_finished_err_fn)) else: my_f.close() self._notifications_log.write("\n") self._notifications_log.write("\n") else: # Only log a one-liner so we can debug things later if we need to self._notifications_log.write("%s - %s - %s - %s\n" % (utils.isodate(), my_finished_notification, my_finished_action, my_status)) else: logger.critical("notifications' output log file not initialized... exiting...") sys.exit(1) # Now, delete output and error files try: os.unlink(my_finished_out_fn) except OSError: logger.warning("error deleting notification stdout file: %s. continuing..." % (my_finished_out_fn)) try: os.unlink(my_finished_err_fn) except OSError: logger.warning("error deleting notification stderr file: %s. continuing..." % (my_finished_err_fn)) # Delete this notification from our list my_deleted_entry = self._active_notifications.pop(my_notif_index) else: # Process still going... leave it... my_notif_index = my_notif_index + 1 # Step 2: Look at our notification queue while len(self._pending_notifications) > 0: # Ok we have notifications to service... # print "pending notifications: %s" % (len(self._pending_notifications)) logger.debug("pending notifications: %s" % (len(self._pending_notifications))) # Check if we have reached the maximum number of concurrent notifications if len(self._active_notifications) > self._max_parallel_notifications: # print "reaching maximum number of concurrent notifications... waiting until next cycle..." logger.info("reaching maximum number of concurrent notifications... waiting until next cycle...") break # Get first notification from the list try: my_action, my_env = self._pending_notifications.pop(0) except IndexError: logger.error("error processing notification list... exiting!") sys.exit(1) # Merge default environment with notification-specific environment my_complete_env = os.environ.copy() my_complete_env.update(my_env) try: my_notification = "%s - %s" % (my_env["PEGASUS_JOBID"], my_env["PEGASUS_EVENT"]) except KeyError: logger.warning("notification missing PEGASUS_JOBID or PEGASUS_EVENT... skipping...") continue # Split arguments my_args = shlex.split(my_action) # Create output and error files for the notification script to use try: my_temp_out = tempfile.mkstemp(prefix="notification-", suffix="-out.log", dir="/tmp") my_temp_err = tempfile.mkstemp(prefix="notification-", suffix="-err.log", dir="/tmp") os.close(my_temp_out[0]) os.close(my_temp_err[0]) my_out_fn = my_temp_out[1] my_err_fn = my_temp_err[1] except OSError: logger.warning("cannot create temp files for notification: %s... skipping..." % (my_notification)) continue # Open output and error files for the notification script try: my_f_out = open(my_out_fn, 'w') my_f_err = open(my_err_fn, 'w') except IOError: logger.warning("cannot open temp files for notification: %s... skipping..." % (my_notification)) try: os.unlink(my_out_fn) os.unlink(my_err_fn) except OSError: # No error here... pass continue # Ok, here we go... try: my_p = subprocess.Popen(my_args, stdout=my_f_out, stderr=my_f_err, env=my_complete_env) except OSError: logger.warning("cannot start notification executable: %s... skipping..." % (my_notification)) try: my_f_out.close() my_f_err.close() os.unlink(my_out_fn) os.unlink(my_err_fn) except OSError: logger.warning("found problem cleaning up notification: %s... skipping..." % (my_notification)) continue # Clean up ok, just continue continue except: logger.warning("problem starting notification: %s... skipping..." % (my_notification)) try: my_f_out.close() my_f_err.close() os.unlink(my_out_fn) os.unlink(my_err_fn) except OSError: logger.warning("found problem cleaning up notification: %s... skipping..." % (my_notification)) continue # Clean up ok, just continue continue # Let's keep everything we need for the future my_started_notification = {} my_started_notification["pid"] = my_p.pid my_started_notification["subp"] = my_p my_started_notification["env"] = my_complete_env my_started_notification["params"] = my_env my_started_notification["args"] = my_args my_started_notification["action"] = my_action my_started_notification["out_fd"] = my_f_out my_started_notification["err_fd"] = my_f_err my_started_notification["out_fn"] = my_out_fn my_started_notification["err_fn"] = my_err_fn my_started_notification["notification"] = my_notification my_started_notification["time"] = time.time() # Add to the active list, and done! self._active_notifications.append(my_started_notification) logger.info("started notification for: %s" % (my_notification)) # Step 3: Check if any notifications ran over the allowed time if self._notifications_timeout > 0: # Only go through the list if a timeout was specified # Get current time now = int(math.floor(time.time())) # Go through our list my_index = 0 while my_index < len(self._active_notifications): my_entry = self._active_notifications[my_index] my_exp_time = my_entry["time"] + self._notifications_timeout # Check if notification has expired if my_exp_time < now: # Notification has expired... kill it... logger.warning("notification expired... terminating it...") self.terminate_notification(my_entry) # Delete this notification from our list my_deleted_entry = self._active_notifications.pop(my_index) else: # Notification hasn't expired yet, move to next one... my_index = my_index + 1 def finish_notifications(self): """ This function flushes all notifications, and closes the notifications' log file. It also logs all pending (but not yet issued) notifications. """ # Take care of active notifications if len(self._active_notifications) > 0: for my_entry in self._active_notifications: self.terminate_notification(my_entry) # Take care of pending notifications if len(self._pending_notifications) > 0: for my_action, my_env in self._pending_notifications: try: my_notification = "%s - %s" % (my_env["PEGASUS_JOBID"], my_env["PEGASUS_EVENT"]) except KeyError: logger.warning("notification missing PEGASUS_JOBID or PEGASUS_EVENT... skipping...") continue logger.warning("pending notification skipped: %s - %s" % (my_notification, my_action)) # Close notifications' log file if self._notifications_log is not None: try: self._notifications_log.close() except IOError: logger.warning("error closing notifications' log file...") self._notifications_log = None def read_notification_file(self, notify_file, wf_uuid): """ This function reads the notification file, parsing all notifications and creating our list of events to track. It returns the number of notifications read from the notifications' file. """ if notify_file is None: return 0 logger.info("loading notifications from %s" % (notify_file)) # Open file try: NOTIFY = open(notify_file, "r") except IOError: logger.warning("cannot load notification file %s, continuing without notifications" % (notify_file)) return 0 # Start with empty dictionaries for the three types of notifications my_notifications_read = 0 my_notifications = {"workflow" : {}, "job" : {}, "invocation": {}} # For workflow and job notifications, we have a dict(workflow_id|job_id, dict(cond, [actions])) # For invocation notifications, we have a dict(job_id, dict(inv_id, dict(cond, [actions]))) # Process notifications for line in NOTIFY: line = line.strip() # Skip blank lines if len(line) == 0: continue # Skip comments if line.startswith("#"): continue # Check if we split it in 4 or 5 pieces if line.lower().startswith("invocation"): # This is an invocation notification, split and get all pieces my_entry = line.split(None, 4) if len(my_entry) != 5: logger.warning("cannot parse notification: %s, skipping..." % (line)) continue my_type = my_entry[0].lower() my_id = my_entry[1] try: my_inv = int(my_entry[2]) except ValueError: logger.warning("cannot parse notification: %s, skipping..." % (line)) continue my_condition = my_entry[3] my_action = my_entry[4] else: # This is a workflow/job notification, split and get all pieces my_entry = line.split(None, 3) if len(my_entry) != 4: logger.warning("cannot parse notification: %s, skipping..." % (line)) continue my_type = my_entry[0].lower() my_id = my_entry[1] my_condition = my_entry[2] my_action = my_entry[3] # Pick the right dictionary, depending on event type if my_type == "workflow": my_dict = my_notifications["workflow"] if my_id != wf_uuid: logger.warning("workflow notification has id %s, our id is %s, skipping..." % (my_id, wf_uuid)) continue elif my_type == "job" or my_type == "daxjob" or my_type == "dagjob": my_dict = my_notifications["job"] elif my_type == "invocation": my_dict = my_notifications["invocation"] else: logger.warning("unknown notification type: %s, skipping..." % (line)) continue logger.debug("loading notification: %s" % (line)) my_notifications_read = my_notifications_read + 1 # Make sure id is in dictionary if not my_id in my_dict: my_dict[my_id] = {} # For invocations, one extra level... if my_type == "invocation": my_dict = my_dict[my_id] if not my_inv in my_dict: my_dict[my_inv] = {} # Now add the notification condition, action pair if not my_condition in my_dict[my_inv]: # No actions, start with the list my_dict[my_inv][my_condition] = [my_action] else: # We already have an action(s), let's add the new one to the list my_dict[my_inv][my_condition].append(my_action) else: # Now add the notification condition, action pair if not my_condition in my_dict[my_id]: my_dict[my_id][my_condition] = [my_action] else: my_dict[my_id][my_condition].append(my_action) # Save our notifications for later use... if wf_uuid in self._notifications: logger.debug("reloaded notifications for workflow %s" % (wf_uuid)) self._notifications[wf_uuid] = my_notifications # Close file try: NOTIFY.close() except IOError: pass # Return number of notifications read logger.debug("loaded %d notifications for workflow %s" % (my_notifications_read, wf_uuid)) return my_notifications_read def process_workflow_notifications(self, wf, state): """ This function takes care of processing workflow-level notifications. """ # Check if we have notifications for this workflow if not wf._wf_uuid in self._notifications: return # Get the notifications' dictionary for this workflow id wf_notifications = self._notifications[wf._wf_uuid] if "workflow" in wf_notifications: my_dict = wf_notifications["workflow"] if len(my_dict) == 0: # No workflow notifications return else: logger.warning("notification structure missing workflow entry...") return # Our workflow is must be in there... if wf._wf_uuid in my_dict: my_notifications = my_dict[wf._wf_uuid] else: logger.warning("notification has mismatching workflow id: %s different from %s" % (wf._wf_uuid, str(my_dict))) return # Sanity check the state... if state != "start" and state != "end": logger.warning("unknown workflow state %s, continuing..." % (state)) return # Now, match the workflow state to the conditions in the notifications... for k in my_notifications: # Look up the actions for this notification now my_actions = my_notifications[k] if state == "start": if k != "start" and k != "all": continue # Change k == 'all' to 'start' k = "start" if state == "end": if k == "on_error": if wf._dagman_exit_code == 0: continue elif k == "on_success": if wf._dagman_exit_code != 0: continue elif k != "at_end" and k != "all": continue if k == "all": k = "at_end" # Ok, we have a match! for action in my_actions: # Create dictionary with needed environment variables my_env = {} my_env["PEGASUS_EVENT"] = k my_env["PEGASUS_EVENT_TIMESTAMP"] = str(wf._current_timestamp) my_env["PEGASUS_EVENT_TIMESTAMP_ISO"] = utils.isodate(wf._current_timestamp) my_env["PEGASUS_SUBMIT_DIR"] = wf._original_submit_dir my_env["PEGASUS_STDOUT"] = wf._out_file my_env["PEGASUS_JOBID"] = wf._wf_uuid my_env["PEGASUS_WFID"] = ((wf._dax_label or "unknown") + "-" + (wf._dax_index or "unknown")) if state == "end": # Workflow status is already in plain format, no need for conversion my_env["PEGASUS_STATUS"] = str(wf._dagman_exit_code) # Done, queue the notification self._pending_notifications.append((action, my_env)) # print "WORKFLOW NOTIFICATION ---> ", action, my_env def process_job_notifications(self, wf, state, job, status): """ This function takes care of processing job-level notifications. """ # Check if we have notifications for this workflow if not wf._wf_uuid in self._notifications: return # Get the notifications' dictionary for this workflow id wf_notifications = self._notifications[wf._wf_uuid] if "job" in wf_notifications: my_dict = wf_notifications["job"] else: logger.warning("notification structure missing job entry...") return # Check if we have notifications for this job if not job._exec_job_id in my_dict: return my_notifications = my_dict[job._exec_job_id] if job._exec_job_id in wf._job_info: if wf._job_info[job._exec_job_id][3] is None: job_has_post_script = False else: job_has_post_script = True else: logger.warning("cannot find job %s in job_info database... skipping notification..." % (job._exec_job_id)) return # Now, match the job state to the conditions in the notifications... for k in my_notifications: # Look up the actions for this notification now my_actions = my_notifications[k] if state == "EXECUTE": if k != "start" and k != "all": continue # Change k to "start" k = "start" my_status = None elif state == "JOB_SUCCESS": if job_has_post_script: # Wait till postscript... continue if k == "start" or k == "on_error": continue if k == "all": k = "at_end" my_status = "0" elif state == "POST_SCRIPT_SUCCESS": if k == "start" or k == "on_error": continue if k == "all": k = "at_end" my_status = "0" elif state == "JOB_FAILURE": if job_has_post_script: # Wait till postscript... continue if k == "start" or k == "on_success": continue if k == "all": k = "at_end" my_status = status elif state == "POST_SCRIPT_FAILURE": if k == "start" or k == "on_success": continue if k == "all": k = "at_end" my_status = status else: # We are in some other state... continue my_output = os.path.join(wf._original_submit_dir, job._output_file) my_error = os.path.join(wf._original_submit_dir, job._error_file) # Use the rotated file names if at the end of the job if k != "start": my_output = my_output + ".%03d" % (job._job_output_counter) my_error = my_error + ".%03d" % (job._job_output_counter) # Ok, we have a match! for action in my_actions: # Create dictionary with needed environment variables my_env = {} my_env["PEGASUS_EVENT"] = k my_env["PEGASUS_EVENT_TIMESTAMP"] = str(wf._current_timestamp) my_env["PEGASUS_EVENT_TIMESTAMP_ISO"] = utils.isodate(wf._current_timestamp) my_env["PEGASUS_SUBMIT_DIR"] = wf._original_submit_dir my_env["PEGASUS_JOBID"] = job._exec_job_id my_env["PEGASUS_WFID"] = ((wf._dax_label or "unknown") + "-" + (wf._dax_index or "unknown")) my_env["PEGASUS_STDOUT"] = my_output my_env["PEGASUS_STDERR"] = my_error if my_status is not None: my_env["PEGASUS_STATUS"] = str(my_status) # Done, queue the notification self._pending_notifications.append((action, my_env)) # print "JOB NOTIFICATION ---> ", action, my_env def process_invocation_notifications(self, wf, job, task_id, record=None): """ This function takes care of processing invocation-level notifications. """ if record is None: record = {} # Check if we have notifications for this workflow if not wf._wf_uuid in self._notifications: return # Get the notifications' dictionary for this workflow id wf_notifications = self._notifications[wf._wf_uuid] if "invocation" in wf_notifications: my_dict = wf_notifications["invocation"] else: logger.warning("notification structure missing invocation entry...") return # Check if we have notifications for this job if not job._exec_job_id in my_dict: return # Advance to the task dictionary my_dict = my_dict[job._exec_job_id] # Check if we have notifications for this invocation if not task_id in my_dict: return my_notifications = my_dict[task_id] # Now, match the invocation state to the condition in the notification for k in my_notifications: # Look up the actions for this notification now my_actions = my_notifications[k] if "raw" in record: my_status = record["raw"] else: my_status = job._main_job_exitcode # Convert exitcode to int try: my_status = int(my_status) except ValueError: pass # Now, compare to the notification condition(s) if my_status == 0: if k == "on_error": continue if my_status != 0: if k == "on_success": continue if k == "all": k = "at_end" # Here, we always use the rotated file names as the invocation has already finished... my_output = os.path.join(wf._original_submit_dir, job._output_file) + ".%03d" % (job._job_output_counter) my_error = os.path.join(wf._original_submit_dir, job._error_file) + ".%03d" % (job._job_output_counter) # Ok, we have a match! for action in my_actions: # Create dictionary with needed environment variables my_env = {} my_env["PEGASUS_EVENT"] = k my_env["PEGASUS_EVENT_TIMESTAMP"] = str(wf._current_timestamp) my_env["PEGASUS_EVENT_TIMESTAMP_ISO"] = utils.isodate(wf._current_timestamp) my_env["PEGASUS_SUBMIT_DIR"] = wf._original_submit_dir my_env["PEGASUS_JOBID"] = job._exec_job_id my_env["PEGASUS_INVID"] = str(task_id) my_env["PEGASUS_WFID"] = ((wf._dax_label or "unknown") + "-" + (wf._dax_index or "unknown")) my_env["PEGASUS_STDOUT"] = my_output my_env["PEGASUS_STDERR"] = my_error if k != "start": # Convert raw exitcode into human-parseable format my_env["PEGASUS_STATUS"] = str(utils.raw_to_regular(my_status)) # Done, queue the notification self._pending_notifications.append((action, my_env)) # print "INVOCATION NOTIFICATION ---> ", action, my_env def remove_notifications(self, wf_uuid): """ This function removes the notifications for workflow wf_uuid from our _notifications dictionary. """ # Check if we have notifications for this workflow if not wf_uuid in self._notifications: return logger.debug("deleting notifications for workflow %s..." % (wf_uuid)) # Delete them from our dictionary del self._notifications[wf_uuid] pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/monitoring/event_output.py0000644000175000017500000002466011757531137026511 0ustar ryngerynge#!/usr/bin/env python """ Functions for output pegasus-monitord events to various destinations. """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Import Python modules import os import sys import socket import logging import urlparse from Pegasus.tools import utils # Event name-space STAMPEDE_NS = "stampede." # Get logger object (initialized elsewhere) logger = logging.getLogger() # Optional imports, only generate 'warnings' if they fail nlapi = None stampede_loader = None bson = None amqp = None try: from netlogger import nlapi except: logger.info("cannot import netlogger.nlapi") try: from netlogger.analysis.schema.schema_check import SchemaVersionError from netlogger.analysis.modules import stampede_loader except: logger.info("cannot import NetLogger's stampede_loader") try: from netlogger.analysis.workflow.util import Expunge except: logger.info("cannot import NetLogger's Expunge") try: import bson except: logger.info("cannot import BSON library, 'bson'") try: from amqplib import client_0_8 as amqp except: logger.info("cannot import AMQP library") def purge_wf_uuid_from_database(rundir, output_db): """ This function purges a workflow id from the output database. """ if output_db.lower().find('sqlite:///') == 0: # Ok, we have a SQLite database, let's get the filename and check if it exists filename = output_db[10:] # Check if SQLite database exists if not os.path.isfile(filename): # No, nothing to do return # Parse the braindump file wfparams = utils.slurp_braindb(rundir) if "wf_uuid" in wfparams: if wfparams["wf_uuid"] is not None: # Get wf_uuid wf_uuid = wfparams["wf_uuid"] e = Expunge(output_db, wf_uuid) e.expunge() # Done, make this connection go away e = None class OutputURL: """ Break output URL into named parts for easier handling. """ def __init__(self, url): # Fix for Python 2.5 and earlier 2.6, as their urlparse module # does not handle these schemes correctly (netloc empty and # everything after the scheme in path) if (url.startswith("amqp:") or url.startswith("mysql:") or url.startswith("x-tcp:") or url.startswith("sqlite:")): self.scheme, rest_url = url.split(":", 1) url = "http:" + rest_url http_scheme, self.netloc, self.path, self.params, query, frag = urlparse.urlparse(url) else: # No need to change anything self.scheme, self.netloc, self.path, self.params, query, frag = urlparse.urlparse(url) host_port = '' user_pass = '' if '@' in self.netloc: user_pass, host_port = self.netloc.split('@', 1) else: host_port = self.netloc if ':' in host_port: self.host, portstr = host_port.split(':', 1) self.port = int(portstr) else: self.host = self.netloc self.port = None class EventSink(object): """ Base class for an Event Sink. """ def __init__(self): name = self.__class__.__name__.split('.')[-1] self._log = logging.getLogger("pegasus.monitord." + name) self._isdbg = self._log.isEnabledFor(logging.DEBUG) def send(self, event, kw): """ Clients call this function to send an event to the sink. """ pass def close(self): """ Clients call this function to close the output to this sink. """ pass class EmptySink(EventSink): """ Empty class, doesn't do anything, events go nowhere... Just a placeholder in case we need to do something different later... """ def __init__(self): super(EmptySink, self).__init__() def send(self, event, kw): pass def close(self): pass class DBEventSink(EventSink): """ Write wflow event logs to database via loader """ def __init__(self, dest, db_stats=False, **kw): assert stampede_loader is not None, "Database destination selected, "+\ "but cannot import stampede loader" self._db = stampede_loader.Analyzer(dest, perf=db_stats, batch="yes", mysql_engine='InnoDB') super(DBEventSink, self).__init__() def send(self, event, kw): if self._isdbg: self._log.debug("send.start event=%s" % (event)) d = {'event' : "stampede." + event} for k, v in kw.iteritems(): d[k.replace('__','.')] = v self._db.notify(d) if self._isdbg: self._log.debug("send.end event=%s" % (event)) def close(self): self._log.debug("close.start") self._db.finish() self._log.debug("close.end") class FileEventSink(EventSink): """ Write wflow event logs to a file. """ def __init__(self, path, restart=False, encoder=None, **kw): assert nlapi is not None, "File/socket destination selected, "+\ "but cannot import NetLogger API" super(FileEventSink, self).__init__() if restart: self._output = open(path, 'w') else: self._output = open(path, 'a') self._encoder = encoder def send(self, event, kw): if self._isdbg: self._log.debug("send.start event=%s" % (event)) self._output.write(self._encoder(event=event, **kw)) if self._isdbg: self._log.debug("send.end event=%s" % (event)) def close(self): self._log.debug("close.start") self._output.close() self._log.debug("close.end") class TCPEventSink(EventSink): """ Write wflow event logs to a host:port. """ def __init__(self, host, port, encoder=None, **kw): super(TCPEventSink, self).__init__() self._encoder = encoder self._sock = socket.socket() self._sock.connect((host, port)) def send(self, event, kw): if self._isdbg: self._log.debug("send.start event=%s" % (event)) self._sock.send(self._encoder(event=event, **kw)) if self._isdbg: self._log.debug("send.end event=%s" % (event)) def close(self): self._log.debug("close.start") self._sock.close() self._log.debug("close.end") class AMQPEventSink(EventSink): """ Write wflow event logs to an AMQP server. """ EXCH_OPTS = {'type' : 'topic'} def __init__(self, host, port, exch=None, encoder=None, userid='guest', password='guest', virtual_host='/', ssl=False, connect_timeout=None, **kw): super(AMQPEventSink, self).__init__() self._encoder = encoder self._conn = amqp.Connection(host="%s:%s" % (host, port), userid=userid, password=password, virtual_host=virtual_host, ssl=ssl, connect_timeout=connect_timeout, **kw) self._channel = self._conn.channel() self._exch = exch self._channel.exchange_declare(exch, **self.EXCH_OPTS) def send(self, event, kw): full_event = STAMPEDE_NS + event if self._isdbg: self._log.debug("send.start event=%s" % (full_event)) data = self._encoder(event=event, **kw) self._channel.basic_publish(amqp.Message(body=data), exchange=self._exch, routing_key=full_event) if self._isdbg: self._log.debug("send.end event=%s" % (event)) def close(self): self._log.debug("close.start") self._conn.close() self._log.debug("close.end") def bson_encode(event, **kw): """ Adapt bson.dumps() to NetLogger's Log.write() signature. """ kw['event'] = STAMPEDE_NS + event return bson.dumps(kw) def create_wf_event_sink(dest, enc=None, **kw): """ Create & return subclass of EventSink, chosen by value of 'dest' and parameterized by values (if any) in 'kw'. """ url = OutputURL(dest) # Pick an encoder def pick_encfn(enc_name): ##enc_name = url.query.get('enc', 'bp').lower() if enc_name is None or enc_name == 'bp': assert nlapi is not None, "NetLogger encoding selected, "+\ "but cannot import nlapi library" # NetLogger name=value encoding encfn = nlapi.Log(level=nlapi.Level.ALL, prefix=STAMPEDE_NS) elif enc_name == 'bson': # BSON assert bson is not None, "BSON encoding selected, "+\ "but cannot import bson library" encfn = bson_encode else: raise ValueError("Unknown encoding '%s'" % (enc_name)) return encfn # Branch on scheme if url.scheme == '': sink = FileEventSink(dest, encoder=pick_encfn(enc), **kw) _type, _name = "file", dest elif url.scheme == 'file': sink = FileEventSink(url.path, encoder=pick_encfn(enc), **kw) _type, _name = "file", url.path elif url.scheme == 'x-tcp': if url.port is None: url.port = 14380 sink = TCPEventSink(url.host, url.port, encoder=pick_encfn(enc), **kw) _type, _name = "network", "%s:%s" % (url.host, url.port) elif url.scheme == 'amqp': assert amqp is not None, "AMQP destination selected, "+\ "but cannot import AMQP library" if url.port is None: url.port = 5672 # RabbitMQ default while url.path.startswith('/'): url.path = url.path[1:] sink = AMQPEventSink(url.host, url.port, exch=url.path, encoder=pick_encfn(enc), **kw) _type, _name="AMQP", "%s:%s/%s" % (url.host, url.port, url.path) else: sink = DBEventSink(dest, **kw) _type, _name = "DB", dest logger.info("output type=%s name=%s" % (_type, _name)) return sink pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/monitoring/workflow.py0000644000175000017500000025664411757531137025633 0ustar ryngerynge#!/usr/bin/env python """ This file implements the Workflow class for pegasus-monitord. """ ## # Copyright 2007-2012 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Import Python modules import os import re import sys import time import socket import logging import traceback # Import other Pegasus modules from Pegasus.tools import utils from Pegasus.monitoring.job import Job from Pegasus.tools import kickstart_parser # Get logger object (initialized elsewhere) logger = logging.getLogger() # Optional imports, only generate 'warnings' if they fail NLSimpleParser = None try: from netlogger.parsers.base import NLSimpleParser except: logger.info("cannot import NL parser") # Compile our regular expressions # Used while reading the DAG file re_parse_dag_submit_files = re.compile(r"JOB\s+(\S+)\s(\S+)(\s+DONE)?", re.IGNORECASE) re_parse_dag_script = re.compile(r"SCRIPT (?:PRE|POST)\s+(\S+)\s(\S+)\s(.*)", re.IGNORECASE) re_parse_dag_subdag = re.compile(r"SUBDAG EXTERNAL\s+(\S+)\s(\S+)\s?(?:DIR)?\s?(\S+)?", re.IGNORECASE) # Constants MONITORD_START_FILE = "monitord.started" # filename for writing when monitord starts MONITORD_DONE_FILE = "monitord.done" # filename for writing when monitord finishes MONITORD_STATE_FILE = "monitord.info" # filename for writing monitord state information MONITORD_RECOVER_FILE = "monitord.recover" # filename for writing monitord recovery information PRESCRIPT_TASK_ID = -1 # id for prescript tasks POSTSCRIPT_TASK_ID = -2 # id for postscript tasks MAX_OUTPUT_LENGTH = 2**16-1 # in bytes, maximum we can put into the database for job's stdout and stderr # Other variables condor_dagman_executable = None # condor_dagman binary location # Find condor_dagman condor_dagman_executable = utils.find_exec("condor_dagman") if condor_dagman_executable is None: # Default value condor_dagman_executable = "condor_dagman" class Workflow: """ Class used to keep everything needed to track a particular workflow """ # Class variables, used to send link parent jobs to sub workflows wf_list = {} def output_to_db(self, event, kwargs): """ This function sends an NetLogger event to the loader class. """ # Sanity check (should also be done elsewhere, but repeated here) if self._sink is None: return # Don't output anything if we have disabled events to the database if self._database_disabled == True: return try: # Send event to corresponding sink self._sink.send(event, kwargs) except: # Error sending this event... disable the sink from now on... logger.warning("NL-LOAD-ERROR --> %s - %s" % (self._wf_uuid, ((self._dax_label or "unknown") + "-" + (self._dax_index or "unknown")))) logger.warning("error sending event: %s --> %s" % (event, kwargs)) logger.warning(traceback.format_exc()) self._database_disabled = True def parse_dag_file(self, dag_file): """ This function parses the DAG file and determines submit file locations """ # If we already have jobs in our _job_info dictionary, skip reading the dag file if len(self._job_info) > 0: logger.debug("skipping parsing the dag file, already have job info loaded...") return dag_file = os.path.join(self._run_dir, dag_file) try: DAG = open(dag_file, "r") except: logger.warning("unable to read %s!" % (dag_file)) else: for dag_line in DAG: if (dag_line.lower()).find("job") >= 0: # Found Job line, parse it my_match = re_parse_dag_submit_files.search(dag_line) if my_match: if not my_match.group(3): my_jobid = my_match.group(1) my_sub = os.path.join(self._run_dir, my_match.group(2)) # Found submit file for not-DONE job if my_jobid in self._job_info: # Entry already exists for this job, just collect submit file info self._job_info[my_jobid][0] = my_sub else: # No entry for this job, let's create a new one self._job_info[my_jobid] = [my_sub, None, None, None, None, False, None, None] elif (dag_line.lower()).find("script post") >= 0: # Found SCRIPT POST line, parse it my_match = re_parse_dag_script.search(dag_line) if my_match: my_jobid = my_match.group(1) my_exec = my_match.group(2) my_args = my_match.group(3) if my_jobid in self._job_info: # Entry already exists for this job, just collect post script info self._job_info[my_jobid][3] = my_exec self._job_info[my_jobid][4] = my_args else: # No entry for this job, let's create a new one self._job_info[my_jobid] = [None, None, None, my_exec, my_args, False, None, None] elif (dag_line.lower()).find("script pre") >= 0: # Found SCRIPT PRE line, parse it my_match = re_parse_dag_script.search(dag_line) if my_match: my_jobid = my_match.group(1) my_exec = my_match.group(2) my_args = my_match.group(3) if my_jobid in self._job_info: # Entry already exists for this job, just collect pre script info self._job_info[my_jobid][1] = my_exec self._job_info[my_jobid][2] = my_args else: # No entry for this job, let's create a new one self._job_info[my_jobid] = [None, my_exec, my_args, None, None, False, None, None] elif (dag_line.lower()).find("subdag external") >= 0: # Found SUBDAG line, parse it my_match = re_parse_dag_subdag.search(dag_line) if my_match: my_jobid = my_match.group(1) my_dag = my_match.group(2) my_dir = my_match.group(3) if my_dir is None: # SUBDAG EXTERNAL line without DIR, let's get it from the DAG path if my_dag is not None: my_dir = os.path.dirname(my_dag) if my_jobid in self._job_info: # Entry already exists for this job, just set subdag flag, and dag/dir info self._job_info[my_jobid][5] = True self._job_info[my_jobid][6] = my_dag self._job_info[my_jobid][7] = my_dir else: # No entry for this job, let's create a new one self._job_info[my_jobid] = [None, None, None, None, None, True, my_dag, my_dir] try: DAG.close() except: pass # POST-CONDITION: _job_info contains only submit-files of jobs # that are not yet done. Normally, this are all submit # files. In rescue DAGS, that is an arbitrary subset of all # jobs. In addition, _job_info should contain all PRE and POST # script information for job in this workflow, and all subdag # jobs, with the their dag files, and directories def parse_in_file(self, jobname, tasks): """ This function parses the in file for a given job, reading the task information and adding to the dictionary of tasks. It returns True if parsing was successful, or None, if an error was found. """ in_file = os.path.join(self._run_dir, jobname) + ".in" try: IN = open(in_file, "r") except: logger.warning("unable to read %s!" % (in_file)) return None tasks_found = 0 for line in IN: line = line.strip() if len(line) == 0: continue if line.startswith("#@"): line = line[2:] line = line.strip() try: my_task_id, my_transformation, my_derivation = line.split(None, 2) my_task_id = int(my_task_id) except: # Doesn't look line a proper comment line with embedded info, skipping... continue # Update information in dictionary try: my_task_info = tasks[my_task_id] except: logger.warning("cannot locate task %d in dictionary... skipping this task for job: %s, dag file: %s" % (my_task_id, jobname, os.path.join(self._run_dir, self._dag_file_name))) continue my_task_info["transformation"] = my_transformation my_task_info["derivation"] = my_derivation elif line.startswith("#"): # Regular comment line... just skip it continue else: # This is regular line, so we assume it is a task split_line = line.split(None, 1) if len(split_line) == 0: # Nothing here continue my_executable = split_line[0] if len(split_line) == 2: my_argv = split_line[1] else: my_argv = None # Increment the task_found counter, so that we have the correct task index tasks_found = tasks_found + 1 try: my_task_info = tasks[tasks_found] except: logger.warning("cannot locate task %d in dictionary... skipping this task for job: %s, dag file: %s" % (my_task_id, jobname, os.path.join(self._run_dir, self._dag_file_name))) continue my_task_info["argument-vector"] = my_argv my_task_info["name"] = my_executable try: IN.close() except: pass return True def read_workflow_state(self): """ This function reads the job_submit_seq and the job_counters dictionary from a file in the workflow's run directory. This is used for restarting the logging information from where we stopped last time. """ if self._output_dir is None: my_fn = os.path.join(self._run_dir, MONITORD_STATE_FILE) else: my_fn = os.path.join(self._output_dir, "%s-%s" % (self._wf_uuid, MONITORD_STATE_FILE)) try: INPUT = open(my_fn, "r") except: logger.info("cannot open state file %s, continuing without state..." % (my_fn)) return try: for line in INPUT: # Split the input line in 2, and make the second part an integer my_job, my_count = line.split(" ", 1) my_job = my_job.strip() my_count = int(my_count.strip()) if my_job == "monitord_job_sequence": # This is the last job_submit_seq used self._job_submit_seq = my_count elif my_job == "monitord_dagman_out_sequence": # This is the line we last read from the dagman.out file self._last_processed_line = my_count elif my_job == "monitord_workflow_restart_count": # This is the number of restarts we have seen in the past self._restart_count = my_count else: # Another job counter self._job_counters[my_job] = my_count except: logger.error("error processing state file %s" % (my_fn)) # Close the file try: INPUT.close() except: pass # All done! return def write_workflow_state(self): """ This function writes the job_submit_seq and the job_counters dictionary to a file in the workflow's run directory. This can be used later for restarting the logging information from where we stop. This function will overwrite the log file every time is it called. """ if self._output_dir is None: my_fn = os.path.join(self._run_dir, MONITORD_STATE_FILE) else: my_fn = os.path.join(self._output_dir, "%s-%s" % (self._wf_uuid, MONITORD_STATE_FILE)) try: OUT = open(my_fn, "w") except: logger.error("cannot open state file %s" % (my_fn)) return try: # Write first line with the last job_submit_seq used OUT.write("monitord_job_sequence %d\n" % (self._job_submit_seq)) # Then, write the last line number of the dagman.out file we processed if self._line > self._last_processed_line: OUT.write("monitord_dagman_out_sequence %s\n" % (self._line)) else: OUT.write("monitord_dagman_out_sequence %s\n" % (self._last_processed_line)) # Next, write the restart count OUT.write("monitord_workflow_restart_count %d\n" % (self._restart_count)) # Finally, write all job_counters for my_job in self._job_counters: OUT.write("%s %d\n" % (my_job, self._job_counters[my_job])) except: logger.error("cannot write state to log file %s" % (my_fn)) # Close the file try: OUT.close() except: pass # All done! return def read_workflow_progress(self): """ This function reads the workflow progress from a previous instance of the monitoring daemon, and keeps track of the last time that was processed by pegasus-monitord. """ if self._output_dir is None: my_recover_file = os.path.join(self._run_dir, MONITORD_RECOVER_FILE) else: my_recover_file = os.path.join(self._output_dir, "%s-%s" % (self._wf_uuid, MONITORD_RECOVER_FILE)) if os.access(my_recover_file, os.F_OK): try: RECOVER = open(my_recover_file, 'r') for line in RECOVER: line = line.strip() my_key, my_value = line.split(" ", 1) if my_key == "line_processed": self._previous_processed_line = int(my_value.strip()) logger.info("monitord last processed line: %d" % (self._previous_processed_line)) break RECOVER.close() except: logger.info("couldn't open/parse recover file information: %s" % (my_recover_file)) def write_workflow_progress(self): """ This function writes the workflow progress so that a future instance of the monitoring daemon can figure out where we were in case of failure. """ # Nothing to do if we still haven't caught up with the last instance's progress... if self._line < self._previous_processed_line: return if self._output_dir is None: my_recover_file = os.path.join(self._run_dir, MONITORD_RECOVER_FILE) else: my_recover_file = os.path.join(self._output_dir, "%s-%s" % (self._wf_uuid, MONITORD_RECOVER_FILE)) try: RECOVER = open(my_recover_file, "w") except: logger.error("cannot open recover file: %s" % (my_recover_file)) return try: # Write line with information about where we are in the dagman.out file RECOVER.write("line_processed %s\n" % (self._line)) except: logger.error("cannot write recover information to file: %s" % (my_recover_file)) # Close the file try: RECOVER.close() except: pass return def db_send_wf_info(self): """ This function sends to the DB information about the workflow """ # Check if database is configured if self._sink is None: return # Start empty kwargs = {} # Make sure we include the wf_uuid kwargs["xwf__id"] = self._wf_uuid # Now include others, if they are defined if self._dax_label is not None: kwargs["dax__label"] = self._dax_label if self._dax_version is not None: kwargs["dax__version"] = self._dax_version if self._dax_index is not None: kwargs["dax__index"] = self._dax_index if self._dax_file is not None: kwargs["dax__file"] = self._dax_file if self._dag_file_name is not None: kwargs["dag__file__name"] = self._dag_file_name if self._timestamp is not None: kwargs["ts"] = self._timestamp if self._submit_hostname is not None: kwargs["submit__hostname"] = self._submit_hostname if self._submit_dir is not None: kwargs["submit__dir"] = self._submit_dir if self._planner_arguments is not None: kwargs["argv"] = self._planner_arguments.strip('" \t\n\r') if self._user is not None: kwargs["user"] = self._user if self._grid_dn is not None: if self._grid_dn != "null": # Only add it if it is not "null" kwargs["grid_dn"] = self._grid_dn if self._planner_version is not None: kwargs["planner__version"] = self._planner_version if self._parent_workflow_id is not None: kwargs["parent__xwf__id"] = self._parent_workflow_id if self._root_workflow_id is not None: kwargs["root__xwf__id"] = self._root_workflow_id # Send workflow event to database self.output_to_db("wf.plan", kwargs) def db_send_subwf_link(self, wf_uuid, parent_workflow_id, parent_jobid, parent_jobseq): """ This function sends to the DB the information linking a subwf to its parent job. Hack: Note that in most cases wf_uuid and parent_workflow_id would be instance variables, but there is also the case where these variables are cached in the Workflow class from a previous instance (that is why they are explicitly passed into this function). """ # Check if database is configured if self._sink is None: return # And if we have all needed parameters if wf_uuid is None or parent_workflow_id is None or parent_jobid is None or parent_jobseq is None: return # Start empty kwargs = {} # Make sure we include the wf_uuid, but note that in this # particular event, the xwf.id key refers to the parent # workflow, while the subwf.id key refers to this workflow kwargs["xwf__id"] = parent_workflow_id if self._timestamp is not None: kwargs["ts"] = self._timestamp kwargs["subwf__id"] = wf_uuid kwargs["job__id"] = parent_jobid kwargs["job_inst__id"] = parent_jobseq # Send sub-workflow event to database self.output_to_db("xwf.map.subwf_job", kwargs) def db_send_wf_state(self, state): """ This function sends to the DB information about the current workflow state """ # Check if database is configured if self._sink is None: return # Make sure parameters are not None if state is None: return # Start empty kwargs = {} # Make sure we include the wf_uuid kwargs["xwf__id"] = self._wf_uuid kwargs["ts"] = self._current_timestamp # Always decrement the restart count by 1 kwargs["restart_count"] = self._restart_count - 1 if state == "end": # Add status field for workflow.end event kwargs["status"] = self._dagman_exit_code if self._dagman_exit_code != 0: # Set level to Error if workflow did not finish successfully kwargs["level"] = "Error" if self._dagman_exit_code is None: logger.warning("%s - %s - %s - %s: DAGMan exit code hasn't been set..." % (self._wf_uuid, ((self._dax_label or "unknown") + "-" + (self._dax_index or "unknown")), self._line, self._out_file)) kwargs["status"] = 0 state = "xwf." + state # Send workflow state event to database self.output_to_db(state, kwargs) def change_wf_state(self, state): """ This function changes the workflow state, and sends the state change to the DB. This function is called as response to DAGMan starting/stopping. """ if state == "start": logger.info("DAGMan starting with condor id %s" % (self._dagman_condor_id)) self._JSDB.write("%d INTERNAL *** DAGMAN_STARTED %s ***\n" % (self._current_timestamp, self._dagman_condor_id)) self._restart_count = self._restart_count + 1 elif state == "end": self._JSDB.write("%d INTERNAL *** DAGMAN_FINISHED %s ***\n" % (self._current_timestamp, self._dagman_exit_code)) # Take care of workflow-level notifications if self.check_notifications() == True and self._notifications_manager is not None: self._notifications_manager.process_workflow_notifications(self, state) self.db_send_wf_state(state) def start_wf(self): """ This function initializes basic parameters in the Workflow class. It should be called every time DAGMAN starts so that we can wipe out any old state in case of restarts. """ # We only wipe state about jobs that have completed logger.debug("DAGMan restarted, cleaning up old job information...") # Keep list of jobs whose information we want to delete jobs_to_delete = [] # Compile list of jobs whose information we don't need anymore... for (my_jobid, my_job_submit_seq) in self._jobs: my_job = self._jobs[my_jobid, my_job_submit_seq] my_job_state = my_job._job_state if my_job_state == "POST_SCRIPT_SUCCESS": # This job is done jobs_to_delete.append((my_jobid, my_job_submit_seq)) elif my_job_state == "JOB_SUCCESS": if my_jobid in self._job_info and self._job_info[my_jobid][3] is None: # No postscript for this job jobs_to_delete.append((my_jobid, my_job_submit_seq)) else: logger.debug("keeping job %s..." % (my_jobid)) else: logger.debug("keeping job %s..." % (my_jobid)) # Delete jobs... for (my_jobid, my_job_submit_seq) in jobs_to_delete: if my_jobid in self._walltime: del self._walltime[my_jobid] if my_jobid in self._job_site: del self._job_site[my_jobid] if my_jobid in self._jobs_map: del self._jobs_map[my_jobid] if (my_jobid, my_job_submit_seq) in self._jobs: del self._jobs[(my_jobid, my_job_submit_seq)] # Done! return def check_notifications(self): """ This function returns True if we need to check notifications, or False if we should skip notification checking. """ # Skip, if notificatications for this workflow are disabled if not self._enable_notifications: return False if self._line < self._previous_processed_line: # Recovery mode, skip notification that we already did. logger.debug("Recovery mode: skipping notification already issued... line %s" % (self._line)) return False return True def __init__(self, rundir, outfile, database=None, workflow_config_file=None, jsd=None, root_id=None, parent_id=None, parent_jobid=None, parent_jobseq=None, enable_notifications=True, replay_mode=False, store_stdout_stderr=True, output_dir=None, notifications_manager=None): """ This function initializes the workflow object. It looks for the workflow configuration file (or for workflow_config_file, if specified). Here we also open the jobstate.log file, and parse the dag. """ # Initialize class variables from creator parameters self._out_file = outfile self._run_dir = rundir self._parent_workflow_id = parent_id self._root_workflow_id = root_id self._sink = database self._database_disabled = False self._workflow_start = int(time.time()) self._enable_notifications = enable_notifications self._replay_mode = replay_mode self._notifications_manager = notifications_manager self._output_dir = output_dir self._store_stdout_stderr = store_stdout_stderr # Initialize other class variables self._wf_uuid = None self._dag_file_name = None self._static_bp_file = None self._dax_label = None self._dax_version = None self._dax_file = None self._dax_index = None self._timestamp = None self._submit_hostname = None self._submit_dir = None # submit dir from braindump file (run dir, if submit_dir key is not found) self._original_submit_dir = None # submit dir from braindump file (jsd dir, if submit_dir key is not found) self._planner_arguments = None self._user = None self._grid_dn = None self._planner_version = None self._last_submitted_job = None self._jobs_map = {} self._jobs = {} self._job_submit_seq = 1 self._log_file = None # monitord.log file self._jsd_file = None # jobstate.log file self._notify_file = None # notification file self._notifications = None # list of notifications for this workflow self._JSDB = None # Handle for jobstate.log file self._job_counters = {} # Job counters for figuring out which output file to parse self._job_info = {} # jobid --> [sub_file, pre_exec, pre_args, post_exec, post_args, is_subdag, subdag_dag, subdag_dir] self._valid_braindb = True # Flag for creating a new brain db if we don't find one self._line = 0 # line number from dagman.out file self._last_processed_line = 0 # line last processed by the monitoring daemon self._previous_processed_line = 0 # line last processed by a previous instance of monitord self._restart_count = 0 # Keep track of how many times the workflow was restarted self._skipping_recovery_lines = False # Flag for skipping the repeat duplicate messages generated by DAGMan self._dagman_condor_id = None # Condor id of the current DAGMan self._dagman_pid = 0 # Condor DAGMan's PID self._current_timestamp = 0 # Last timestamp from DAGMan self._dagman_exit_code = None # Keep track of when to finish this workflow self._monitord_exit_code = 0 # Keep track of errors inside monitord self._finished = False # keep track so we don't finish multiple times self._condorlog = None # Condor common logfile self._multiline_file_flag = False # Track multiline user log files, DAGMan > 6.6 self._walltime = {} # jid --> walltime self._job_site = {} # last site a job was planned for # Parse the braindump file wfparams = utils.slurp_braindb(rundir, workflow_config_file) if len(wfparams) == 0: # Set flag for creating a braindb file if nothing was read self._valid_braindb = False # Go through wfparams, and read what we need if "wf_uuid" in wfparams: if wfparams["wf_uuid"] is not None: self._wf_uuid = wfparams["wf_uuid"] else: logger.error("wf_uuid not specified in braindump, skipping this (sub-)workflow...") self._monitord_exit_code = 1 return # Now that we have the wf_uuid, set root_wf_uuid if not already set if self._root_workflow_id is None: self._root_workflow_id = self._wf_uuid if "dax_label" in wfparams: self._dax_label = wfparams["dax_label"] else: # Use "label" if "dax_label" not found if "label" in wfparams: self._dax_label = wfparams["label"] if "dax_index" in wfparams: self._dax_index = wfparams["dax_index"] if "dax_version" in wfparams: self._dax_version = wfparams["dax_version"] if "dax" in wfparams: self._dax_file = wfparams["dax"] if "dag" in wfparams: self._dag_file_name = wfparams["dag"] else: logger.error("dag not specified in braindump, skipping this (sub-)workflow...") self._monitord_exit_code = 1 return if "timestamp" in wfparams: self._timestamp = wfparams["timestamp"] else: # Use "pegasus_wf_time" if "timestamp" not found if "pegasus_wf_time" in wfparams: self._timestamp = wfparams["pegasus_wf_time"] # Convert timestamp from YYYYMMDDTHHMMSSZZZZZ to Epoch if self._timestamp is not None: # Convert timestamp to epoch wf_timestamp = utils.epochdate(self._timestamp) if wf_timestamp is not None: self._timestamp = wf_timestamp else: # Couldn't do it, let's just use the current time self._timestamp = int(time.time()) else: # No timestamp information is available, just use current time self._timestamp = int(time.time()) if "submit_dir" in wfparams: self._submit_dir = wfparams["submit_dir"] self._original_submit_dir = os.path.normpath(wfparams["submit_dir"]) else: # Use "run" if "submit_dir" not found if "run" in wfparams: self._submit_dir = wfparams["run"] # Use "jsd" if "submit_dir" is not found if "jsd" in wfparams: self._original_submit_dir = os.path.dirname(os.path.normpath(wfparams["jsd"])) if "planner_version" in wfparams: self._planner_version = wfparams["planner_version"] else: # Use "pegasus_version" if "planner_version" not found if "pegasus_version" in wfparams: self._planner_version = wfparams["pegasus_version"] if "planner_arguments" in wfparams: self._planner_arguments = wfparams["planner_arguments"] if "submit_hostname" in wfparams: self._submit_hostname = wfparams["submit_hostname"] if "user" in wfparams: self._user = wfparams["user"] if "grid_dn" in wfparams: self._grid_dn = wfparams["grid_dn"] if not self._replay_mode: # Recover state from a previous run self.read_workflow_state() self.read_workflow_progress() if self._previous_processed_line != 0: # Recovery mode detected, reset last_processed_line so # that we start from the beginning of the dagman.out # file... self._last_processed_line = 0 # Determine location of jobstate.log file my_jsd = (jsd or utils.jobbase) if self._output_dir is None: # Make sure we have an absolute path self._jsd_file = os.path.join(rundir, my_jsd) else: self._jsd_file = os.path.join(rundir, self._output_dir, "%s-%s" % (self._wf_uuid, my_jsd)) if not os.path.isfile(self._jsd_file): logger.info("creating new file %s" % (self._jsd_file)) try: # Create new file, or append to an existing one if not self._replay_mode and self._previous_processed_line == 0: # Append to current one if not in replay mode and not # in recovering from previous errors self._JSDB = open(self._jsd_file, 'a', 0) else: # Rotate jobstate.log file, if any in case of replay # mode of if we are starting from the beginning # because of a previous failure utils.rotate_log_file(self._jsd_file) self._JSDB = open(self._jsd_file, 'w', 0) except: logger.critical("error creating/appending to %s!" % (self._jsd_file)) self._monitord_exit_code = 1 return # Skip notifications, if disabled if self._enable_notifications and self._notifications_manager is not None: if "notify" in wfparams: self._notify_file = wfparams["notify"] # Add rundir to notifications filename if self._run_dir is not None: self._notify_file = os.path.join(self._run_dir, self._notify_file) # Read notification file if self._notifications_manager.read_notification_file(self._notify_file, self._wf_uuid) == 0: # Disable notifications, if this workflow doesn't include any... self._enable_notifications = False # Say hello.... add start information to JSDB my_now = int(time.time()) print "%d - %s - MONITORD_STARTED - %s - %s" % (my_now, utils.isodate(my_now), self._wf_uuid, ((self._dax_label or "unknown") + "-" + (self._dax_index or "unknown"))) self._JSDB.write("%d INTERNAL *** MONITORD_STARTED ***\n" % (self._workflow_start)) # Write monitord.started file if self._output_dir is None: my_start_file = os.path.join(self._run_dir, MONITORD_START_FILE) else: my_start_file = os.path.join(self._output_dir, "%s-%s" % (self._wf_uuid, MONITORD_START_FILE)) utils.write_pid_file(my_start_file, my_now) # Remove monitord.done file, if it is there if self._output_dir is None: my_touch_name = os.path.join(self._run_dir, MONITORD_DONE_FILE) else: my_touch_name = os.path.join(self._output_dir, "%s-%s" % (self._wf_uuid, MONITORD_DONE_FILE)) try: os.unlink(my_touch_file) except: pass # Add this workflow to Workflow's class master list if not rundir in Workflow.wf_list: Workflow.wf_list[rundir] = {"wf_uuid": self._wf_uuid, "parent_workflow_id": self._parent_workflow_id} # All done... last step is to send to the database the workflow plan event, # along with all the static information generated by pegasus-plan # However, we only do this, if this is the first time we run if self._sink is not None and self._last_processed_line == 0: # Make sure NetLogger parser is available if NLSimpleParser is None: logger.critical("NetLogger parser is not loaded, exiting...") sys.exit(1) # Create NetLogger parser my_bp_parser = NLSimpleParser(parse_date=False) # Figure out static data filename, and create full path name my_bp_file = os.path.splitext(self._dag_file_name)[0] + ".static.bp" self._static_bp_file = os.path.join(self._run_dir, my_bp_file) # Open static bp file try: my_static_file = open(self._static_bp_file, 'r') except: logger.critical("cannot find static bp file %s, exiting..." % (self._static_bp_file)) sys.exit(1) # Send workflow plan info to database self.db_send_wf_info() # Send event to mark the start of the static content self.output_to_db("static.start", {}) # Process static bp file try: for my_line in my_static_file: my_keys = {} my_keys = my_bp_parser.parseLine(my_line) if len(my_keys) == 0: continue if not "event" in my_keys: logger.error("bad event in static bp file: %s, continuing..." % (my_line)) continue my_event = my_keys["event"] del my_keys["event"] # Convert timestamp to epochtime if "ts" in my_keys: my_new_ts = utils.epochdate(my_keys["ts"]) if my_new_ts is not None: my_keys["ts"] = my_new_ts # Send event to database self.output_to_db(my_event, my_keys) except: logger.critical("error processing static bp file %s, exiting..." % (self._static_bp_file)) logger.critical(traceback.format_exc()) sys.exit(1) # Close static bp file try: my_static_file.close() except: logger.warning("error closing static bp file %s, continuing..." % (self._static_bp_file)) # Send event to mark the end of the static content self.output_to_db("static.end", {}) # If this workflow is a subworkflow and has a parent_id, # parent_jobid and parent_jobseq, we send an event to link # this workflow's id to the parent job... if (self._sink is not None and self._parent_workflow_id is not None and parent_jobid is not None and parent_jobseq is not None): self.db_send_subwf_link(self._wf_uuid, self._parent_workflow_id, parent_jobid, parent_jobseq) def map_subwf(self, parent_jobid, parent_jobseq, wf_info): """ This function creates a link between a subworkflow and its parent job """ # If this workflow is a subworkflow and has a parent_id, # parent_jobid and parent_jobseq, we send an event to link # this workflow's id to the parent job... if "wf_uuid" in wf_info: sub_wf_id = wf_info["wf_uuid"] else: sub_wf_id = None if "parent_workflow_id" in wf_info: parent_wf_id = wf_info["parent_workflow_id"] else: parent_wf_id = None if (self._sink is not None and sub_wf_id is not None and parent_wf_id is not None and parent_jobid is not None and parent_jobseq is not None): self.db_send_subwf_link(sub_wf_id, parent_wf_id, parent_jobid, parent_jobseq) def end_workflow(self): """ This function writes the last line in the jobstate.log and closes the file. """ if self._finished: return self_finished = True my_workflow_end = int(time.time()) if self._output_dir is None: my_recover_file = os.path.join(self._run_dir, MONITORD_RECOVER_FILE) else: my_recover_file = os.path.join(self._output_dir, "%s-%s" % (self._wf_uuid, MONITORD_RECOVER_FILE)) print "%d - %s - MONITORD_FINISHED - %s - %s" % (my_workflow_end, utils.isodate(my_workflow_end), self._wf_uuid, ((self._dax_label or "unknown") + "-" + (self._dax_index or "unknown"))) self._JSDB.write("%d INTERNAL *** MONITORD_FINISHED %d ***\n" % (my_workflow_end, self._monitord_exit_code)) self._JSDB.close() # Save all state to disk so that we can start again later self.write_workflow_state() # Delete recovery file try: os.unlink(my_recover_file) logger.info("recovery file deleted: %s" % (my_recover_file)) except: logger.warning("unable to remove recover file: %s" % (my_recover_file)) # Write monitord.done file if self._output_dir is None: my_touch_name = os.path.join(self._run_dir, MONITORD_DONE_FILE) else: my_touch_name = os.path.join(self._output_dir, "%s-%s" % (self._wf_uuid, MONITORD_DONE_FILE)) try: TOUCH = open(my_touch_name, "w") TOUCH.write("%s %.3f\n" % (utils.isodate(my_workflow_end), (my_workflow_end - self._workflow_start))) TOUCH.close() except: logger.error("writing %s" % (my_touch_name)) # Remove our notifications from the notification lists if self._notifications_manager is not None: self._notifications_manager.remove_notifications(self._wf_uuid) if not self._replay_mode: # Attempt to copy the condor common logfile to the current directory if self._condorlog is not None: if (os.path.isfile(self._condorlog) and os.access(self._condorlog, os.R_OK) and self._condorlog.find('/') == 0): # Copy common condor log to local directory my_log = utils.out2log(self._run_dir, self._out_file)[0] my_cmd = "/bin/cp -p %s %s.copy" % (self._condorlog, my_log) my_status, my_output = commands.getstatusoutput(my_cmd) if my_status == 0: # Copy successful try: os.unlink(my_log) except: logger.error("removing %s" % (my_log)) else: try: os.rename("%s.copy" % (my_log), my_log) except: logger.error("renaming %s.copy to %s" % (my_log, my_log)) else: logger.info("copied common log to %s" % (self._run_dir)) else: logger.info("%s: %d:%s" % (my_cmd, my_status, my_output)) def find_jobid(self, jobid): """ This function finds the job_submit_seq of a given jobid by checking the _jobs_map dict. Since add_job will update _jobs_map, this function will return the job_submit_seq of the latest jobid added to the workflow """ if jobid in self._jobs_map: return self._jobs_map[jobid] # Not found, return None return None def find_job_submit_seq(self, jobid, sched_id=None): """ If a jobid already exists and is in the PRE_SCRIPT_SUCCESS mode, this function returns its job_submit_seq. Otherwise, it returns None, meaning a new job needs to be created """ # Look for a jobid my_job_submit_seq = self.find_jobid(jobid) # No such job, return None if my_job_submit_seq is None: return None # Make sure the job is there if not (jobid, my_job_submit_seq) in self._jobs: logger.warning("cannot find job: %s, %s" % (jobid, my_job_submit_seq)) return None my_job = self._jobs[jobid, my_job_submit_seq] if my_job._job_state == "PRE_SCRIPT_SUCCESS" or my_job._job_state == "DAGMAN_SUBMIT": # jobid is in "PRE_SCRIPT_SUCCESS" or "DAGMAN_SUBMIT" state, # just return job_submit_seq return my_job_submit_seq # Ok, check sched_id if it is not None... if sched_id is not None: if my_job._sched_id == sched_id: # sched_id matches job we already have... must be an # out-of-order submit event... return my_job_submit_seq # jobid is in another state, return None return None def db_send_job_brief(self, my_job, event, status=None): """ This function sends to the DB basic state events for a particular job """ # Check if database is configured if self._sink is None: return # Start empty kwargs = {} # Make sure we include the wf_uuid, name, and job_submit_seq kwargs["xwf__id"] = my_job._wf_uuid kwargs["job__id"] = my_job._exec_job_id kwargs["job_inst__id"] = my_job._job_submit_seq kwargs["ts"] = my_job._job_state_timestamp kwargs["js__id"] = my_job._job_state_seq if my_job._sched_id is not None: kwargs["sched__id"] = my_job._sched_id if status is not None: kwargs["status"] = status if status != 0: kwargs["level"] = "Error" if event == "post.end": # For post-script SUCCESS/FAILED, we send the exitcode kwargs["exitcode"] = str(my_job._post_script_exitcode) # Send job state event to database self.output_to_db("job_inst." + event, kwargs) def db_send_job_start(self, my_job): """ This function sends to the DB the main.start event """ # Check if database is configured if self._sink is None: return # Start empty kwargs = {} # Make sure we include the wf_uuid, name, and job_submit_seq kwargs["xwf__id"] = my_job._wf_uuid kwargs["job__id"] = my_job._exec_job_id kwargs["job_inst__id"] = my_job._job_submit_seq kwargs["ts"] = my_job._job_state_timestamp kwargs["js__id"] = my_job._job_state_seq if my_job._input_file is not None: kwargs["stdin.file"] = my_job._input_file if my_job._output_file is not None: kwargs["stdout.file"] = my_job._output_file if my_job._error_file is not None: kwargs["stderr.file"] = my_job._error_file if my_job._sched_id is not None: kwargs["sched__id"] = my_job._sched_id # Send job state event to database self.output_to_db("job_inst.main.start", kwargs) def db_send_job_end(self, my_job, status=None): """ This function sends to the DB the main.end event """ # Check if database is configured if self._sink is None: return # Start empty kwargs = {} # Make sure we include the wf_uuid, name, and job_submit_seq kwargs["xwf__id"] = my_job._wf_uuid kwargs["job__id"] = my_job._exec_job_id kwargs["job_inst__id"] = my_job._job_submit_seq kwargs["ts"] = my_job._job_state_timestamp kwargs["js__id"] = my_job._job_state_seq if my_job._site_name is not None: kwargs["site"] = my_job._site_name else: kwargs["site"] = "" if my_job._remote_user is not None: kwargs["user"] = my_job._remote_user else: if self._user is not None: kwargs["user"] = self._user if my_job._remote_working_dir is not None: kwargs["work_dir"] = my_job._remote_working_dir else: if self._original_submit_dir is not None: kwargs["work_dir"] = self._original_submit_dir if my_job._cluster_start_time is not None: kwargs["cluster__start"] = my_job._cluster_start_time if my_job._cluster_duration is not None: kwargs["cluster__dur"] = my_job._cluster_duration if my_job._main_job_start is not None and my_job._main_job_done is not None: # If we have both timestamps, let's try to compute the local duration try: my_duration = int(my_job._main_job_done) - int(my_job._main_job_start) kwargs["local__dur"] = my_duration except: # Nothing to do, this is not mandatory pass if my_job._input_file is not None: kwargs["stdin__file"] = my_job._input_file else: # This is not mandatory, according to the schema pass if my_job._output_file is not None: if my_job._kickstart_parsed: # Only use rotated filename for job with kickstart output kwargs["stdout__file"] = my_job._output_file + ".%03d" % (my_job._job_output_counter) else: kwargs["stdout__file"] = my_job._output_file else: kwargs["stdout__file"] = "" if my_job._error_file is not None: if my_job._kickstart_parsed: # Only use rotated filename for job with kickstart output kwargs["stderr__file"] = my_job._error_file + ".%03d" % (my_job._job_output_counter) else: kwargs["stderr__file"] = my_job._error_file else: kwargs["stderr__file"] = "" if self._store_stdout_stderr: # Only add stdout and stderr text fields if user hasn't disabled it if my_job._stdout_text is not None: if len(my_job._stdout_text) > MAX_OUTPUT_LENGTH: # Need to truncate to avoid database problems... kwargs["stdout__text"] = my_job._stdout_text[:MAX_OUTPUT_LENGTH] logger.warning("truncating stdout for job %s" % (my_job._exec_job_id)) else: # Put everything in kwargs["stdout__text"] = my_job._stdout_text if my_job._stderr_text is not None: if len(my_job._stderr_text) > MAX_OUTPUT_LENGTH: # Need to truncate to avoid database problems... kwargs["stderr__text"] = my_job._stderr_text[:MAX_OUTPUT_LENGTH] logger.warning("truncating stderr for job %s" % (my_job._exec_job_id)) else: # Put everything in kwargs["stderr__text"] = my_job._stderr_text # Use constant for now... will change it if my_job._main_job_multiplier_factor is not None: kwargs["multiplier_factor"] = str(my_job._main_job_multiplier_factor) # Use the job exitcode for now (if the job has a postscript, it will get updated later kwargs["exitcode"] = str(my_job._main_job_exitcode) if my_job._sched_id is not None: kwargs["sched__id"] = my_job._sched_id if status is not None: kwargs["status"] = status if status != 0: kwargs["level"] = "Error" else: kwargs["status"] = -1 kwargs["level"] = "Error" # Send job state event to database self.output_to_db("job_inst.main.end", kwargs) # Clean up stdout and stderr, to avoid memory issues... if my_job._stdout_text is not None: my_job._stdout_text = None if my_job._stderr_text is not None: my_job._stderr_text = None def db_send_task_start(self, my_job, task_type, task_id=None, invocation_record=None): """ This function sends to the database task start events. task_type is either "PRE_SCRIPT", "MAIN_JOB", or "POST_SCRIPT" """ # Check if database is configured if self._sink is None: return # Start empty kwargs = {} if invocation_record is None: invocation_record = {} # Sanity check, verify task type if task_type != "PRE_SCRIPT" and task_type != "POST_SCRIPT" and task_type != "MAIN_JOB": logger.warning("unknown task type: %s" % (task_type)) return # Make sure we include the wf_uuid, name, and job_submit_seq kwargs["xwf__id"] = my_job._wf_uuid kwargs["job__id"] = my_job._exec_job_id kwargs["job_inst__id"] = my_job._job_submit_seq if task_type == "PRE_SCRIPT": # This is a PRE SCRIPT invocation # Add PRE_SCRIPT task id to this event kwargs["inv__id"] = PRESCRIPT_TASK_ID kwargs["ts"] = my_job._pre_script_start elif task_type == "POST_SCRIPT": # This is a POST SCRIPT invocation kwargs["inv__id"] = POSTSCRIPT_TASK_ID kwargs["ts"] = my_job._post_script_start elif task_type == "MAIN_JOB": # This is a MAIN JOB invocation if task_id is not None: kwargs["inv__id"] = task_id else: logger.warning("warning: task id is not specified... skipping task...") return if "start" in invocation_record: # Need to convert it to epoch data my_start = utils.epochdate(invocation_record["start"]) else: # Not in the invocation record, let's use our own time keeping my_start = my_job._main_job_start if my_start is None: # This must be a zero duration job (without an ULOG_EXECUTE), just use the end time my_start = my_job._main_job_done if my_start is not None: kwargs["ts"] = my_start # Send job event to database self.output_to_db("inv.start", kwargs) def db_send_task_end(self, my_job, task_type, task_id=None, invocation_record=None): """ This function sends to the database task end events with all the information. task_type is either "PRE_SCRIPT", "MAIN_JOB", or "POST_SCRIPT" """ # Check if database is configured if self._sink is None: return # Start empty kwargs = {} if invocation_record is None: invocation_record = {} # Sanity check, verify task type if task_type != "PRE_SCRIPT" and task_type != "POST_SCRIPT" and task_type != "MAIN_JOB": logger.warning("unknown task type: %s" % (task_type)) return # Make sure we include the wf_uuid, name, and job_submit_seq kwargs["xwf__id"] = my_job._wf_uuid kwargs["job__id"] = my_job._exec_job_id kwargs["job_inst__id"] = my_job._job_submit_seq if task_type == "PRE_SCRIPT": # This is a PRE SCRIPT invocation kwargs["inv__id"] = PRESCRIPT_TASK_ID kwargs["transformation"] = "dagman::pre" # For prescript tasks, nothing to put in the task_id field if my_job._pre_script_start is not None: kwargs["start_time"] = my_job._pre_script_start else: kwargs["start_time"] = my_job._pre_script_done try: kwargs["dur"] = my_job._pre_script_done - my_job._pre_script_start kwargs["remote_cpu_time"] = my_job._pre_script_done - my_job._pre_script_start except: # Duration cannot be determined, possibly a missing PRE_SCRIPT_START event kwargs["dur"] = 0 kwargs["exitcode"] = str(my_job._pre_script_exitcode) if my_job._exec_job_id in self._job_info: if self._job_info[my_job._exec_job_id][1] is not None: kwargs["executable"] = self._job_info[my_job._exec_job_id][1] else: kwargs["executable"] = "" if self._job_info[my_job._exec_job_id][2] is not None: kwargs["argv"] = self._job_info[my_job._exec_job_id][2] else: kwargs["executable"] = "" kwargs["ts"] = my_job._pre_script_done elif task_type == "POST_SCRIPT": # This is a POST SCRIPT invocation kwargs["inv__id"] = POSTSCRIPT_TASK_ID kwargs["transformation"] = "dagman::post" # For postscript tasks, nothing to put in the task_id field if my_job._post_script_start is not None: kwargs["start_time"] = my_job._post_script_start else: kwargs["start_time"] = my_job._post_script_done try: kwargs["dur"] = my_job._post_script_done - my_job._post_script_start kwargs["remote_cpu_time"] = my_job._post_script_done - my_job._post_script_start except: # Duration cannot be determined, possibly a missing POST_SCRIPT_START event kwargs["dur"] = 0 kwargs["exitcode"] = str(my_job._post_script_exitcode) if my_job._exec_job_id in self._job_info: if self._job_info[my_job._exec_job_id][3] is not None: kwargs["executable"] = self._job_info[my_job._exec_job_id][3] else: kwargs["executable"] = "" if self._job_info[my_job._exec_job_id][4] is not None: kwargs["argv"] = self._job_info[my_job._exec_job_id][4] else: kwargs["executable"] = "" kwargs["ts"] = my_job._post_script_done elif task_type == "MAIN_JOB": # This is a MAIN JOB invocation if task_id is not None: kwargs["inv__id"] = task_id else: logger.warning("warning: task id is not specified... skipping task...") return if "transformation" in invocation_record: kwargs["transformation"] = invocation_record["transformation"] else: if my_job._main_job_transformation is not None: kwargs["transformation"] = my_job._main_job_transformation else: if (my_job._exec_job_id in self._job_info and self._job_info[my_job._exec_job_id][5] == True): kwargs["transformation"] = "condor::dagman" if "derivation" in invocation_record: if invocation_record["derivation"] != "null": # Make sure it is not "null" kwargs["task__id"] = invocation_record["derivation"] else: # Lets see if we have the derivation from the submit file if my_job._main_job_derivation is not None: kwargs["task__id"] = my_job._main_job_derivation else: # Nothing to do if we cannot get the derivation # from the kickstart record or submit file pass if "start" in invocation_record: # Need to convert it to epoch data my_start = utils.epochdate(invocation_record["start"]) else: # Not in the invocation record, let's use our own time keeping my_start = my_job._main_job_start if my_start is None: # This must be a zero duration job (without an ULOG_EXECUTE), just use the end time my_start = my_job._main_job_done if my_start is not None: kwargs["start_time"] = my_start if "duration" in invocation_record: kwargs["dur"] = invocation_record["duration"] else: # Duration not in the invocation record if my_job._main_job_start is not None and my_job._main_job_done is not None: try: my_duration = int(my_job._main_job_done) - int(my_job._main_job_start) except: my_duration = None if my_duration is not None: kwargs["dur"] = my_duration elif my_job._main_job_done is not None: # This must be a zero duration job (without an ULOG_EXECUTE) # In this case, duration should be set to ZERO kwargs["dur"] = 0 if "utime" in invocation_record and "stime" in invocation_record: try: kwargs["remote_cpu_time"] = (float(invocation_record["utime"]) + float(invocation_record["stime"])) except ValueError: pass if my_start is not None and "duration" in invocation_record: # Calculate timestamp for when this task finished try: kwargs["ts"] = int(my_start + int(invocation_record["duration"])) except: # Something went wrong, just use the time the main job finished kwargs["ts"] = my_job._main_job_done else: kwargs["ts"] = my_job._main_job_done if "raw" in invocation_record: kwargs["exitcode"] = invocation_record["raw"] else: if my_job._main_job_exitcode is not None: kwargs["exitcode"] = str(my_job._main_job_exitcode) if "name" in invocation_record: kwargs["executable"] = invocation_record["name"] else: if my_job._main_job_executable is not None: kwargs["executable"] = my_job._main_job_executable else: if (my_job._exec_job_id in self._job_info and self._job_info[my_job._exec_job_id][5] == True): kwargs["executable"] = condor_dagman_executable else: kwargs["executable"] = "" if "argument-vector" in invocation_record: if invocation_record["argument-vector"] is not None and invocation_record["argument-vector"] != "": kwargs["argv"] = invocation_record["argument-vector"] else: if my_job._main_job_arguments is not None and my_job._main_job_arguments != "": kwargs["argv"] = my_job._main_job_arguments if "exitcode" in kwargs: if kwargs["exitcode"] != "0": kwargs["level"] = "Error" else: kwargs["level"] = "Error" # Send job event to database self.output_to_db("inv.end", kwargs) def db_send_host_info(self, my_job, record): """ This function sends host information collected from the kickstart record to the database. """ # Check if database is configured if self._sink is None: return # Start empty kwargs = {} # Make sure we include the wf_uuid, name, and job_submit_seq kwargs["xwf__id"] = my_job._wf_uuid kwargs["job__id"] = my_job._exec_job_id kwargs["job_inst__id"] = my_job._job_submit_seq # Add information about the host if "hostname" in record: kwargs["hostname"] = record["hostname"] else: # Don't know what the hostname is kwargs["hostname"] = "unknown" if "hostaddr" in record: kwargs["ip"] = record["hostaddr"] else: # Don't know what the ip address is kwargs["ip"] = "unknown" if "resource" in record: kwargs["site"] = record["resource"] else: # Don't know what the site name is kwargs["site"] = "unknown" if "total" in record: kwargs["total_memory"] = record["total"] else: # This is not mandatory pass if "system" in record and "release" in record and "machine" in record: kwargs["uname"] = record["system"] + "-" + record["release"] + "-" + record["machine"] else: # This is not mandatory pass # Add timestamp kwargs["ts"] = self._current_timestamp # Send host event to database self.output_to_db("job_inst.host.info", kwargs) def parse_job_output(self, my_job, job_state): """ This function tries to parse the kickstart output file of a given job and collect information for the stampede schema. """ my_output = [] parse_kickstart = True # Check if this is a subdag job if (my_job._exec_job_id in self._job_info and self._job_info[my_job._exec_job_id][5] == True): # Disable kickstart_parsing... parse_kickstart = False # If job is a subdag job, skip looking for its kickstart output if parse_kickstart: # Compose kickstart output file name (base is the filename before rotation) my_job_output_fn_base = os.path.join(self._run_dir, my_job._exec_job_id) + ".out" my_job_output_fn = my_job_output_fn_base + ".%03d" % (my_job._job_output_counter) # First assume we will find rotated file my_parser = kickstart_parser.Parser(my_job_output_fn) my_output = my_parser.parse_stampede() # Check if we were able to find it if my_parser._open_error == True: # File wasn't there, look for the file before the rotation my_parser.__init__(my_job_output_fn_base) my_output = my_parser.parse_stampede() if my_parser._open_error == True: # Couldn't find it again, one last try, as it might have just been moved my_parser.__init__(my_job_output_fn) my_output = my_parser.parse_stampede() # Check if successful if my_parser._open_error == True: logger.info("unable to find output file for job %s" % (my_job._exec_job_id)) # Initialize task id counter my_task_id = 1 if len(my_output) > 0: # Parsing the output file resulted in some info... let's parse it # Add job information to the Job class. my_invocation_found = my_job.extract_job_info(self._run_dir, my_output) if my_invocation_found: # Loop through all records for record in my_output: # Skip non-invocation records if not "invocation" in record: continue # Take care of invocation-level notifications if self.check_notifications() == True and self._notifications_manager is not None: self._notifications_manager.process_invocation_notifications(self, my_job, my_task_id, record) # Send task information to the database self.db_send_task_start(my_job, "MAIN_JOB", my_task_id, record) self.db_send_task_end(my_job, "MAIN_JOB", my_task_id, record) # Increment task id counter my_task_id = my_task_id + 1 # Send host information to the database self.db_send_host_info(my_job, record) else: # No invocation found, but possibly task records are present... # This can be the case for clustered jobs when Kickstart is not used. my_tasks = {} for record in my_output: if "task" in record: # Ok, this is a task record if not "id" in record: logger.warning("id missing from task record... skipping to next one") continue try: my_id = int(record["id"]) except: logger.warning("task id looks invalid, cannot convert it to int: %s skipping to next" % (record["id"])) continue # Add to our list my_tasks[my_id] = record if len(my_tasks) > 0: # Now, bring information from the .in file my_status = self.parse_in_file(my_job._exec_job_id, my_tasks) if my_status is True: # Parsing the in file completed, now generate tasks by task order for i in sorted(my_tasks): record = my_tasks[i] # Take care of renaming the exitcode field if "status" in record: record["exitcode"] = record["status"] # This should not be needed anymore... record["raw"] = record["status"] # Validate record if (not "transformation" in record or not "derivation" in record or not "start" in record or not "duration" in record or not "name" in record or not "argument-vector" in record): logger.info("task %d has incomplete information, skipping it..." % (i)) continue # Take care of invocation-level notifications if self.check_notifications() == True and self._notifications_manager is not None: self._notifications_manager.process_invocation_notifications(self, my_job, my_task_id, record) # Ok, it all validates, send task information to the database self.db_send_task_start(my_job, "MAIN_JOB", my_task_id, record) self.db_send_task_end(my_job, "MAIN_JOB", my_task_id, record) # Increment task id counter my_task_id = my_task_id + 1 else: # No tasks found... logger.info("no tasks found for job %s..." % (my_job._exec_job_id)) else: # This is the case where we cannot find kickstart records # in the output file, this will be true for SUBDAG jobs as well # Take care of invocation-level notifications if self.check_notifications() == True and self._notifications_manager is not None: self._notifications_manager.process_invocation_notifications(self, my_job, my_task_id) # If we don't have any records, we only generate 1 task self.db_send_task_start(my_job, "MAIN_JOB", my_task_id) self.db_send_task_end(my_job, "MAIN_JOB", my_task_id) # Read stdout/stderr files, if not disabled by user if self._store_stdout_stderr: my_job.read_stdout_stderr_files(self._run_dir) # parse_kickstart will be False for subdag jobs if my_job._exec_job_id.startswith("subdax_") or not parse_kickstart: # For subdag and subdax jobs, we also generate a host event record = {} record["hostname"] = socket.getfqdn() try: record["hostaddr"] = socket.gethostbyname(socket.getfqdn()) except: record["hostaddr"] = "unknown" record["resource"] = my_job._site_name # Send event to the database self.db_send_host_info(my_job, record) def add_job(self, jobid, job_state, sched_id=None): """ This function adds a new job to our list of jobs. It first checks if the job is already in our list in the PRE_SCRIPT_SUCCESS state, if so, we just update its sched id. Otherwise we create a new Job container. In any case, we always set the job state to job_state. """ my_job_submit_seq = self.find_job_submit_seq(jobid, sched_id) if my_job_submit_seq is not None: # Job already exists if not (jobid, my_job_submit_seq) in self._jobs: logger.warning("cannot find job: %s, %s" % (jobid, my_job_submit_seq)) return my_job = self._jobs[jobid, my_job_submit_seq] # Set sched_id if sched_id is not None: my_job._sched_id = sched_id # Update job state my_job._job_state = job_state my_job._job_state_timestamp = int(self._current_timestamp) else: # This is a new job, we have to do everything from scratch my_job_submit_seq = self._job_submit_seq # Make sure job is not already there if (jobid, my_job_submit_seq) in self._jobs: logger.warning("trying to add job twice: %s, %s" % (jobid, my_job_submit_seq)) return # Create new job container my_job = Job(self._wf_uuid, jobid, my_job_submit_seq) # Set job state my_job._job_state = job_state my_job._job_state_timestamp = int(self._current_timestamp) # Set sched_id my_job._sched_id = sched_id # Add job to our list of jobs self._jobs[jobid, my_job_submit_seq] = my_job # Add/Update job in our job map self._jobs_map[jobid] = my_job_submit_seq # Update job_submit_seq self._job_submit_seq = self._job_submit_seq + 1 # Update job counter if this job is in the SUBMIT state if job_state == "SUBMIT": if jobid in self._job_counters: # Counter already exists for this job, just increate it by 1 self._job_counters[jobid] = self._job_counters[jobid] + 1 else: # No counter for this job yet self._job_counters[jobid] = 0 # Now, we set the job output counter for this particular job my_job._job_output_counter = self._job_counters[jobid] return my_job_submit_seq def job_update_info(self, jobid, job_submit_seq, sched_id=None): """ This function adds info to an exising job. """ # Make sure job is already there if not (jobid, job_submit_seq) in self._jobs: logger.warning("cannot find job: %s, %s" % (jobid, job_submit_seq)) return my_job = self._jobs[jobid, job_submit_seq] # Set sched_id my_job._sched_id = sched_id # Everything done return def update_job_state(self, jobid, sched_id, job_submit_seq, job_state, status, walltime): """ This function updates a job's state, and also writes a line in our jobstate.out file. """ # Find job if job_submit_seq is None: # Need to get job_submit_seq from our hash table if jobid in self._jobs_map: job_submit_seq = self._jobs_map[jobid] if not (jobid, job_submit_seq) in self._jobs: logger.warning("cannot find job: %s, %s" % (jobid, job_submit_seq)) return # Got it my_job = self._jobs[jobid, job_submit_seq] # Check for the out of order submit event case if my_job._sched_id is None and sched_id is not None: my_out_of_order_events_detected = True else: my_out_of_order_events_detected = False # Update job state my_job.set_job_state(job_state, sched_id, self._current_timestamp, status) # Make status a string so we can print properly if status is not None: status = str(status) # Create content -- use one space only my_line = "%d %s %s %s %s %s %d" % (self._current_timestamp, jobid, job_state, status or my_job._sched_id or '-', my_job._site_name or '-', walltime or '-', job_submit_seq or '-') logger.info("new state %s" % (my_line)) # Prepare for atomic append self._JSDB.write("%s\n" % (my_line)) if self._sink is None and not self._enable_notifications: # Not generating events and notifcations, nothing else to do return # Parse the kickstart output file, also send mainjob tasks, if needed if job_state == "JOB_SUCCESS" or job_state == "JOB_FAILURE": # Main job has ended self.parse_job_output(my_job, job_state) # Take care of job-level notifications if self.check_notifications() == True and self._notifications_manager is not None: self._notifications_manager.process_job_notifications(self, job_state, my_job, status) if self._sink is None: # Not generating events, nothing else to do except clean # up stdout and stderr, to avoid memory issues... if my_job._stdout_text is not None: my_job._stdout_text = None if my_job._stderr_text is not None: my_job._stderr_text = None return if my_out_of_order_events_detected: # We need to send a submit.start event in order to create # the database entry for this job self.db_send_job_brief(my_job, "submit.start") # Check if we need to send any tasks to the database if job_state == "PRE_SCRIPT_SUCCESS" or job_state == "PRE_SCRIPT_FAILURE": # PRE script finished self.db_send_task_start(my_job, "PRE_SCRIPT") self.db_send_task_end(my_job, "PRE_SCRIPT") elif job_state == "POST_SCRIPT_SUCCESS" or job_state == "POST_SCRIPT_FAILURE": # POST script finished self.db_send_task_start(my_job, "POST_SCRIPT") self.db_send_task_end(my_job, "POST_SCRIPT") # Now, figure out what state event we need to send to the database if job_state == "PRE_SCRIPT_STARTED": self.db_send_job_brief(my_job, "pre.start") elif job_state == "PRE_SCRIPT_TERMINATED": self.db_send_job_brief(my_job, "pre.term") elif job_state == "PRE_SCRIPT_SUCCESS": self.db_send_job_brief(my_job, "pre.end", 0) elif job_state == "PRE_SCRIPT_FAILURE": self.db_send_job_brief(my_job, "pre.end", -1) elif job_state == "SUBMIT": self.db_send_job_brief(my_job, "submit.start") self.db_send_job_brief(my_job, "submit.end", 0) elif job_state == "GRID_SUBMIT": self.db_send_job_brief(my_job, "grid.submit.start") self.db_send_job_brief(my_job, "grid.submit.end", 0) elif job_state == "GLOBUS_SUBMIT": self.db_send_job_brief(my_job, "globus.submit.start") self.db_send_job_brief(my_job, "globus.submit.end", 0) elif job_state == "SUBMIT_FAILED": self.db_send_job_brief(my_job, "submit.start") self.db_send_job_brief(my_job, "submit.end", -1) elif job_state == "GLOBUS_SUBMIT_FAILED": self.db_send_job_brief(my_job, "globus.submit.start") self.db_send_job_brief(my_job, "globus.submit.end", -1) elif job_state == "GRID_SUBMIT_FAILED": self.db_send_job_brief(my_job, "grid.submit.start") self.db_send_job_brief(my_job, "grid.submit.end", -1) elif job_state == "EXECUTE": self.db_send_job_start(my_job) elif job_state == "REMOTE_ERROR": self.db_send_job_brief(my_job, "remote_error") elif job_state == "IMAGE_SIZE": self.db_send_job_brief(my_job, "image.info") elif job_state == "JOB_TERMINATED": self.db_send_job_brief(my_job, "main.term", 0) elif job_state == "JOB_SUCCESS": self.db_send_job_end(my_job, 0) elif job_state == "JOB_FAILURE": self.db_send_job_end(my_job, -1) elif job_state == "JOB_HELD": self.db_send_job_brief(my_job, "held.start") elif job_state == "JOB_EVICTED": self.db_send_job_brief(my_job, "main.term", -1) elif job_state == "JOB_RELEASED": self.db_send_job_brief(my_job, "held.end", 0) elif job_state == "POST_SCRIPT_STARTED": self.db_send_job_brief(my_job, "post.start") elif job_state == "POST_SCRIPT_TERMINATED": self.db_send_job_brief(my_job, "post.term") elif job_state == "POST_SCRIPT_SUCCESS": self.db_send_job_brief(my_job, "post.end", 0) elif job_state == "POST_SCRIPT_FAILURE": self.db_send_job_brief(my_job, "post.end", -1) def parse_job_sub_file(self, jobid, job_submit_seq): """ This function calls a function in the Job class to parse a job's submit file and extract planning information """ # Find job if not (jobid, job_submit_seq) in self._jobs: logger.warning("cannot find job: %s, %s" % (jobid, job_submit_seq)) return None, None # Check if we have an entry for this job if not jobid in self._job_info: return None, None # Get corresponding job my_job = self._jobs[jobid, job_submit_seq] # Make sure if we have a file for this entry # (should always be there, except for SUBDAG jobs) if self._job_info[jobid][0] is None: if self._job_info[jobid][5] is True: # Yes, this is a SUBDAG job... let's set the site as local for this job my_job._site_name = "local" return None, None # Parse sub file my_diff, my_site = my_job.parse_sub_file(self._current_timestamp, self._job_info[jobid][0]) # Change input, output, and error files to be relative to the submit directory try: if my_job._input_file.find(self._original_submit_dir) >= 0: # Path to file includes original submit_dir, let's try to remove it my_job._input_file = os.path.normpath(my_job._input_file.replace((self._original_submit_dir + os.sep), '', 1)) except: # Something went wrong, let's just keep what we had... pass try: if my_job._output_file.find(self._original_submit_dir) >= 0: # Path to file includes original submit_dir, let's try to remove it my_job._output_file = os.path.normpath(my_job._output_file.replace((self._original_submit_dir + os.sep), '', 1)) except: # Something went wrong, let's just keep what we had... pass try: if my_job._error_file.find(self._original_submit_dir) >= 0: # Path to file includes original submit_dir, let's try to remove it my_job._error_file = os.path.normpath(my_job._error_file.replace((self._original_submit_dir + os.sep), '', 1)) except: # Something went wrong, let's just use what we had... pass # All done return my_diff, my_site def has_subworkflow(self, jobid, wf_retries): """ This function returns a new dagman.out file to follow if the job is either a SUBDAG job, a pegasus-plan, or a subdax_ job. Otherwise, it returns None. """ # This shouldn't be the case... if not jobid in self._job_info: return None # First we take care of SUBDAG jobs if self._job_info[jobid][5] == True: # We cannot go into SUBDAG workflows as they are not # planned by Pegasus and do not contain the information # needed by the 3.1 Stampede schema. return None # # This is a SUBDAG job, first check if dag is there # if self._job_info[jobid][6] is None: # return None # # Looks ok, return new dagman.out # my_dagman_out = self._job_info[jobid][6] + ".dagman.out" else: # Now check if this is a pegasus-plan or a subdax_ job # First, look for a jobid my_job_submit_seq = self.find_jobid(jobid) # No such job, return None if my_job_submit_seq is None: return None # Make sure the job is there if not (jobid, my_job_submit_seq) in self._jobs: logger.warning("cannot find job: %s, %s" % (jobid, my_job_submit_seq)) return None my_job = self._jobs[jobid, my_job_submit_seq] my_dagman_out = my_job._job_dagman_out if my_dagman_out is None: return None # Got it! my_dagman_out = os.path.normpath(my_dagman_out) if my_dagman_out.find(self._original_submit_dir) >= 0: # Path to new dagman.out file includes original submit_dir, let's try to change it my_dagman_out = os.path.normpath(my_dagman_out.replace((self._original_submit_dir + os.sep), '', 1)) # Join with current run directory my_dagman_out = os.path.join(self._run_dir, my_dagman_out) # try: # my_dagman_out = os.path.relpath(my_dagman_out, self._original_submit_dir) # except: # pass # Split filename into dir and base names my_dagman_dir = os.path.dirname(my_dagman_out) my_dagman_file = os.path.basename(my_dagman_out) if wf_retries is None: logger.warning("persistent wf_retry not available... using sub-workflow directory: %s" % (my_dagman_dir)) return my_dagman_out # Check if we have seen this sub-workflow before if my_dagman_dir in wf_retries: # Yes, increment out retry counter... my_retry = wf_retries[my_dagman_dir] my_retry = my_retry + 1 wf_retries[my_dagman_dir] = my_retry else: # No, this is the first time we get to this sub-workflow wf_retries[my_dagman_dir] = 0 my_retry = 0 # Compose directory... assuming replanning mode my_retry_dir = my_dagman_dir + ".%03d" % (my_retry) # If directory doesn't exist, let's change to rescue mode if not os.path.isdir(my_retry_dir): logger.debug("sub-workflow directory %s does not exist, shifting to rescue mode..." % (my_retry_dir)) my_retry_dir = my_dagman_dir + ".000" if not os.path.isdir(my_retry_dir): # Still not able to find it, output warning message logger.warning("sub-workflow directory %s does not exist! Skipping this sub-workflow..." % (my_retry_dir)) return None # Found sub-workflow directory, let's compose the final path to the new dagman.out file... my_dagman_out = os.path.join(my_retry_dir, my_dagman_file) return my_dagman_out pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/monitoring/__init__.py0000644000175000017500000000113311757531137025475 0ustar ryngerynge# Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/monitoring/socket_interface.py0000644000175000017500000001621111757531137027251 0ustar ryngerynge#!/usr/bin/env python """ This file implements the debug socket interface available in pegasus-monitord. """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Import Python modules import os import re import sys import time import socket import logging import traceback # Import other Pegasus modules from Pegasus.tools import utils from Pegasus.monitoring.job import Job from Pegasus.tools import kickstart_parser # Get logger object (initialized elsewhere) logger = logging.getLogger() # Compile our regular expressions # Used in untaint re_clean_content = re.compile(r"([^-a-zA-z0-9_\s.,\[\]^\*\?\/\+])") def server_socket(low, hi, bind_addr="127.0.0.1"): """ purpose: create a local TCP server socket to listen to sitesel requests paramtr: low (IN): minimum port from bind range paramtr: hi (IN): maximum port from bind range paramtr: bind_addr (IN): optional hostaddr_in to bind to , defaults to LOOPBACK returns: open socket, or None on error """ # Create socket try: my_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.getprotobyname("tcp")) except: logger.critical("could not create socket!") sys.exit(42) # Set options try: my_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) except: logger.critical("setsockopt SO_REUSEADDR!") sys.exit(42) # Bind to a free port my_port = low for my_port in range(low, hi): try: my_res = my_socket.bind((bind_addr, my_port)) except: # Go to next port continue else: break if my_port >= hi: logger.critical("no free port to bind to!") sys.exit(42) # Make server socket non-blocking to not have a race condition # when doing select() before accept() on a server socket try: my_socket.setblocking(0) except: logger.critical("setblocking!") sys.exit(42) # Start listener try: my_socket.listen(socket.SOMAXCONN) except: logger.critical("listen!") sys.exit(42) # Return socket return my_socket def untaint(text): """ purpose: do not trust anything we get from the internet paramtr: text(IN): text to untaint returns: cleaned text, without any "special" characters """ if text is None: return None my_text = re_clean_content.sub('', str(text)) return my_text def sendmsg(client_connection, msg): """ purpose: send all data to socket connection, try several time if necessary paramtr: client_connection(IN): socket connection to send data paramtr: msg(IN): message to send returns: None on error, 1 on success """ my_total_bytes_sent = 0 while my_total_bytes_sent < len(msg): try: my_bytes_sent = client_connection.send(msg[my_total_bytes_sent:]) except: logger.error("writing to socket!") return None my_total_bytes_sent = my_total_bytes_sent + my_bytes_sent return 1 def list_workflows(client_conn, wfs, param=""): """ purpose: lists the workflows currently being tracked globals: wfs(IN): array of workflows """ for workflow_entry in wfs: if workflow_entry.wf is not None: my_wf = workflow_entry.wf my_line = "%d - %s\r\n" % (my_wf._workflow_start, my_wf._wf_uuid) sendmsg(client_conn, my_line) jumptable = {'list-wf': list_workflows} def service_request(server, wfs): """ purpose: accept an incoming connection and service its request paramtr: server(IN): server socket with a pending connection request returns: number of status lines, or None in case of error """ # First, we accept the connection try: my_conn, my_addr = server.accept() except: logger.error("accept!") return None my_count = 0 logger.info("processing request from %s:%d" % (my_addr[0], my_addr[1])) # TODO: Can only handle 1 line up to 1024 bytes long, should fix this later # Read line fron socket while True: try: my_buffer = my_conn.recv(1024) except socket.error, e: if e[0] == 35: continue else: logger.error("recv: %d:%s" % (e[0], e[1])) try: # Close socket my_conn.close() except: pass return None else: # Received line, leave loop break if my_buffer == '': # Nothing else to read try: my_conn.close() except: pass return my_count # Removed leading/trailing spaces/tabs, trailing \r\n my_buffer = my_buffer.strip() # Do not trust anything we get from the internet my_buffer = untaint(my_buffer) # Create list of tokens my_args = my_buffer.split() if len(my_args) < 3: # Clearly not enough information sendmsg(my_conn, "%s 204 No Content\r\n" % (speak)) try: my_conn.close() except: pass return my_count # Read information we need my_proto = my_args.pop(0).upper() my_method = my_args.pop(0) my_what = my_args.pop(0) if my_proto != speak: # Illegal or unknown protocol sendmsg(my_conn, "%s 400 Bad request\r\n" % (speak)) elif my_method.upper() != "GET": # Unsupported method sendmsg(my_conn, "%s 405 Method not allowed\r\n" % (speak)) elif not my_what in jumptable: # Request item is not supported sendmsg(my_conn, "%s 501 Not implemented\r\n" % (speak)) else: # OK, process the command sendmsg(my_conn, "%s 200 OK\r\n" % (speak)) my_count = jumptable[my_what](my_conn, wfs, " ".join(my_args).lower()) try: my_conn.close() except: pass return my_count def check_request(server, wfs, timeout=0): """ purpose: check for a pending service request, and service it paramtr: server(IN): server socket paramtr: timeout(IN, OPT): timeout in seconds, defaults to 0 returns: return value of select on server socket """ # Nothing to do if server was not started if server is None: return my_input_list = [server] my_input_ready, my_output_ready, my_except_ready = select.select(my_input_list, [], [], timeout) if len(my_input_ready) == 1: service_request(server, wfs) return len(my_input_ready) pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/monitoring/job.py0000644000175000017500000004201511757531137024514 0ustar ryngerynge#!/usr/bin/env python """ This file implements the Job class for pegasus-monitord. """ ## # Copyright 2007-2012 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # Import Python modules import os import re import logging from Pegasus.tools import utils # Get logger object (initialized elsewhere) logger = logging.getLogger() # Global variables good_rsl = {"maxcputime": 1, "maxtime":1, "maxwalltime": 1} # Used in parse_sub_file re_rsl_string = re.compile(r"^\s*globusrsl\W", re.IGNORECASE) re_rsl_clean = re.compile(r"([-_])") re_site_parse_gvds = re.compile(r"^\s*\+(pegasus|wf)_(site|resource)\s*=\s*([\'\"])?(\S+)\3") re_parse_transformation = re.compile(r"^\s*\+pegasus_wf_xformation\s*=\s*(\S+)") re_parse_derivation = re.compile(r"^\s*\+pegasus_wf_dax_job_id\s*=\s*(\S+)") re_parse_multiplier_factor = re.compile(r"^\s*\+pegasus_cores\s=\s(\S+)") re_parse_executable = re.compile(r"^\s*executable\s*=\s*(\S+)") re_parse_arguments = re.compile(r'^\s*arguments\s*=\s*"([^"\r\n]*)"') re_parse_environment = re.compile(r'^\s*environment\s*=\s*(.*)') re_site_parse_euryale = re.compile(r"^\#!\s+site=(\S+)") re_parse_property = re.compile(r'([^:= \t]+)\s*[:=]?\s*(.*)') re_parse_input = re.compile(r"^\s*intput\s*=\s*(\S+)") re_parse_output = re.compile(r"^\s*output\s*=\s*(\S+)") re_parse_error = re.compile(r"^\s*error\s*=\s*(\S+)") class Job: """ Class used to keep information needed to track a particular job """ # Variables that describe a job, as per the Stampede schema # Some will be initialized in the init method, others will # get their values from the kickstart output file when a job # finished def __init__(self, wf_uuid, name, job_submit_seq): """ This function initializes the job parameters with the information available when a job is detected in the "PRE_SCRIPT_STARTED" or the "SUBMIT" state. Other parameters will remain None until a job finishes and a kickstart output file can be parsed. """ self._wf_uuid = wf_uuid self._exec_job_id = name self._job_submit_seq = job_submit_seq self._sched_id = None self._site_name = None self._host_id = None self._remote_user = None self._remote_working_dir = None self._cluster_start_time = None self._cluster_duration = None self._job_state = None self._job_state_seq = 0 self._job_state_timestamp = None self._job_output_counter = 0 self._pre_script_start = None self._pre_script_done = None self._pre_script_exitcode = None self._main_job_start = None self._main_job_done = None self._main_job_transformation = None self._main_job_derivation = None self._main_job_executable = None self._main_job_arguments = None self._main_job_exitcode = None self._main_job_multiplier_factor = None self._post_script_start = None self._post_script_done = None self._post_script_exitcode = None self._input_file = None self._output_file = None self._error_file = None self._stdout_text = None self._stderr_text = None self._job_dagman_out = None # _CONDOR_DAGMAN_LOG from environment # line for pegasus-plan and subdax_ jobs self._kickstart_parsed = False # Flag indicating if the kickstart # output for this job was parsed or not def set_job_state(self, job_state, sched_id, timestamp, status): """ This function sets the job state for this job. It also updates the times the main job and PRE/POST scripts start and finish. """ self._job_state = job_state self._job_state_timestamp = int(timestamp) # Increment job state sequence self._job_state_seq = self._job_state_seq + 1 # Set sched_id if we don't already have it... if self._sched_id is None: self._sched_id = sched_id # Record timestamp for certain job states if job_state == "PRE_SCRIPT_STARTED": self._pre_script_start = int(timestamp) elif (job_state == "PRE_SCRIPT_SUCCESS" or job_state == "PRE_SCRIPT_FAILURE"): self._pre_script_done = int(timestamp) self._pre_script_exitcode = utils.regular_to_raw(status) elif job_state == "POST_SCRIPT_STARTED": self._post_script_start = int(timestamp) elif job_state == "POST_SCRIPT_TERMINATED": self._post_script_done = int(timestamp) elif job_state == "EXECUTE": self._main_job_start = int(timestamp) elif job_state == "JOB_TERMINATED": self._main_job_done = int(timestamp) elif job_state == "JOB_SUCCESS" or job_state == "JOB_FAILURE": self._main_job_exitcode = utils.regular_to_raw(status) elif (job_state == "POST_SCRIPT_SUCCESS" or job_state == "POST_SCRIPT_FAILURE"): self._post_script_exitcode = utils.regular_to_raw(status) def parse_sub_file(self, stamp, submit_file): """ This function parses a job's submit file and returns job planning information. In addition, we try to populate the job type from information in the submit file. # paramtr: stamp(IN): timestamp associated with the log line # paramtr: submit_file(IN): submit file name # globals: good_rsl(IN): which RSL keys constitute time requirements # returns: (largest job time requirement in minutes, destination site) # returns: (None, None) if sub file not found """ parse_environment = False my_result = None my_site = None # Update stat record for submit file try: my_stats = os.stat(submit_file) except OSError: # Could not stat file logger.error("stat %s" % (submit_file)) return my_result, my_site # Check submit file timestamp if stamp < my_stats[8]: #mtime logger.info("%s: sub file modified: job timestamp=%d, file mtime=%d, diff=%d" % (submit_file, stamp, my_stats[8], my_stats[8]-stamp)) # Check if we need to parse the environment line if (self._exec_job_id.startswith("pegasus-plan") or self._exec_job_id.startswith("subdax_")): parse_environment = True try: SUB = open(submit_file, "r") except IOError: logger.error("unable to parse %s" % (submit_file)) return my_result, my_site # Parse submit file for my_line in SUB: if re_rsl_string.search(my_line): # Found RSL string, do parse now for my_match in re.findall(r"\(([^)]+)\)", my_line): # Split into key and value my_k, my_v = my_match.split("=", 1) # Remove _- characters from string my_k = re_rsl_clean.sub('', my_k) if my_k.lower() in good_rsl and my_v > my_result: try: my_result = int(my_v) except ValueError: my_result = None elif re_site_parse_gvds.search(my_line): # GVDS agreement my_site = re_site_parse_gvds.search(my_line).group(4) self._site_name = my_site elif re_site_parse_euryale.search(my_line): # Euryale specific comment my_site = re_site_parse_euryale.search(my_line).group(1) self._site_name = my_site elif re_parse_transformation.search(my_line): # Found line with job transformation my_transformation = re_parse_transformation.search(my_line).group(1) # Remove quotes, if any my_transformation = my_transformation.strip('"') self._main_job_transformation = my_transformation elif re_parse_derivation.search(my_line): # Found line with job derivation my_derivation = re_parse_derivation.search(my_line).group(1) # Remove quotes, if any my_derivation = my_derivation.strip('"') if my_derivation == "null": # If derivation is the "null" string, we don't want to take it self._main_job_derivation = None else: self._main_job_derivation = my_derivation elif re_parse_executable.search(my_line): # Found line with executable my_executable = re_parse_executable.search(my_line).group(1) # Remove quotes, if any my_executable = my_executable.strip('"') self._main_job_executable = my_executable elif re_parse_arguments.search(my_line): # Found line with arguments my_arguments = re_parse_arguments.search(my_line).group(1) # Remove quotes, if any my_arguments = my_arguments.strip('"') self._main_job_arguments = my_arguments elif re_parse_multiplier_factor.search(my_line): # Found line with multiplier_factor my_multiplier_factor = re_parse_multiplier_factor.search(my_line).group(1) try: self._main_job_multiplier_factor = int(my_multiplier_factor) except ValueError: logger.warning("%s: cannot convert multiplier factor: %s" % (os.path.basename(submit_file), my_multiplier_factor)) self._main_job_multiplier_factor = None elif re_parse_input.search(my_line): # Found line with input file my_input = re_parse_input.search(my_line).group(1) # Remove quotes, if any my_input = my_input.strip('"') self._input_file = os.path.normpath(my_input) elif re_parse_output.search(my_line): # Found line with output file my_output = re_parse_output.search(my_line).group(1) # Remove quotes, if any my_output = my_output.strip('"') self._output_file = os.path.normpath(my_output) elif re_parse_error.search(my_line): # Found line with error file my_error = re_parse_error.search(my_line).group(1) # Remove quotes, if any my_error = my_error.strip('"') self._error_file = os.path.normpath(my_error) elif parse_environment and re_parse_environment.search(my_line): # Found line with environment v = re_parse_environment.search(my_line).group(1) sub_props = v.split(';') for sub_prop_line in sub_props: sub_prop_line = sub_prop_line.strip() # Remove any spaces if len(sub_prop_line) == 0: continue sub_prop = re_parse_property.search(sub_prop_line) if sub_prop: if sub_prop.group(1) == "_CONDOR_DAGMAN_LOG": self._job_dagman_out = sub_prop.group(2) SUB.close() # All done! return my_result, my_site def extract_job_info(self, run_dir, kickstart_output): """ This function reads the output from the kickstart parser and extracts the job information for the Stampede schema. It first looks for an invocation record, and then for a clustered record. Returns None if an error occurs, True if an invocation record was found, and False if it wasn't. """ # Check if we have anything if len(kickstart_output) == 0: return None # Kickstart was parsed self._kickstart_parsed = True # Let's try to find an invocation record... my_invocation_found = False my_task_number = 0 self._stdout_text = "" # Initialize stdout for my_record in kickstart_output: if not "invocation" in my_record: # Not this one... skip to the next continue # Ok, we have an invocation record, extract the information we # need. Note that this may overwrite information obtained from # the submit file (e.g. the site_name). # Increment task_number my_task_number = my_task_number + 1 if not my_invocation_found: # Things we only need to do once if "resource" in my_record: self._site_name = my_record["resource"] if "user" in my_record: self._remote_user = my_record["user"] if "cwd" in my_record: self._remote_working_dir = my_record["cwd"] if "hostname" in my_record: self._host_id = my_record["hostname"] # We are done with this part my_invocation_found = True if "stdout" in my_record: self._stdout_text = self._stdout_text + "#@ %d stdout\n" % (my_task_number) + my_record["stdout"] + "\n" if "stderr" in my_record: self._stdout_text = self._stdout_text + "#@ %d stderr\n" % (my_task_number) + my_record["stderr"] + "\n" # Now, we encode it! if self._stdout_text != "": self._stdout_text = utils.quote(self._stdout_text) if not my_invocation_found: logger.debug("cannot find invocation record in output") # Look for clustered record... my_cluster_found = False for my_record in kickstart_output: if not "clustered" in my_record: # Not this one... skip to the next continue # Ok found it, fill in cluster parameters if "duration" in my_record: self._cluster_duration = my_record["duration"] if "start" in my_record: # Convert timestamp to EPOCH my_start = utils.epochdate(my_record["start"]) if my_start is not None: self._cluster_start_time = my_start # No need to look further... my_cluster_found = True break if not my_cluster_found: logger.debug("cannot find cluster record in output") # Finally, read error file only my_err_file = os.path.join(run_dir, self._error_file) if my_invocation_found: # in my job output there were some invocation records # assume then that they are rotated also my_err_file = my_err_file + ".%03d" % (self._job_output_counter) try: ERR = open(my_err_file, 'r') self._stderr_text = utils.quote(ERR.read()) except IOError: self._stderr_text = None logger.warning("unable to read error file: %s, continuing..." % (my_err_file)) else: ERR.close() # Done populating Job class with information from the output file return my_invocation_found def read_stdout_stderr_files(self, run_dir): """ This function reads both stdout and stderr files and populates these fields in the Job class. """ if self._output_file is None: # This is the case for SUBDAG jobs self._stdout_text = None else: my_out_file = os.path.join(run_dir, self._output_file) try: OUT = open(my_out_file, 'r') self._stdout_text = utils.quote("#@ 1 stdout\n" + OUT.read()) except IOError: self._stdout_text = None logger.warning("unable to read output file: %s, continuing..." % (my_out_file)) else: OUT.close() if self._error_file is None: # This is the case for SUBDAG jobs self._stderr_text = None else: my_err_file = os.path.join(run_dir, self._error_file) try: ERR = open(my_err_file, 'r') self._stderr_text = utils.quote(ERR.read()) except IOError: self._stderr_text = None logger.warning("unable to read error file: %s, continuing..." % (my_err_file)) else: ERR.close() pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/DAX2.py0000644000175000017500000006342611757531137022264 0ustar ryngerynge# Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """API for generating Pegasus DAXes The classes in this module can be used to generate DAXes that can be read by Pegasus. The official DAX schema is here: http://pegasus.isi.edu/schema/dax-2.1.xsd """ __author__ = "Gideon Juve " __all__ = ["DAX","Filename","Profile","Job","Namespace","LFN", "parse","parseString"] __version__ = "2.1" import datetime, pwd, os from cStringIO import StringIO import xml.sax import xml.sax.handler import shlex SCHEMA_NAMESPACE = u"http://pegasus.isi.edu/schema/DAX" SCHEMA_LOCATION = u"http://pegasus.isi.edu/schema/dax-2.1.xsd" SCHEMA_VERSION = u"2.1" class Namespace: """Namespace values recognized by Pegasus. You can use these, or just pass your own value when creating a Profile object (see Profile). """ PEGASUS = u'pegasus' CONDOR = u'condor' DAGMAN = u'dagman' ENV = u'env' HINTS = u'hints' GLOBUS = u'globus' SELECTOR = u'selector' class LFN: """Logical file name attributes. These include: Linkage Attributes: NONE, INPUT, OUTPUT, INOUT Type Attributes: TYPE_DATA, TYPE_EXECUTABLE, TYPE_PATTERN Transfer Attributes: XFER_NOT, XFER_OPTIONAL, XFER_MANDATORY """ # Linkage NONE = u'none' INPUT = u'input' OUTPUT = u'output' INOUT = u'inout' # File type TYPE_DATA = u'data' TYPE_EXECUTABLE = u'executable' TYPE_PATTERN = u'pattern' # Transfer XFER_NOT = u'false' XFER_OPTIONAL = u'optional' XFER_MANDATORY = u'true' class Filename: """Filename(filename[,type][,link][,register][,transfer][,optional][,varname]) A logical file name. Examples: input = Filename('input.txt',link=LFN.INPUT,transfer=True) intermediate = Filename('intermediate.txt',link=LFN.OUTPUT) result = Filename('result.txt',link=LFN.OUTPUT,register=True,transfer=True) opt = Filename('optional.txt',link=LFN.OUTPUT,optional=True) binary = Filename('bin/binary',link=LFN.INPUT,type=LFN.TYPE_EXECUTABLE,transfer=True) """ def __init__(self, filename, type=LFN.TYPE_DATA, link=LFN.NONE, register=False, transfer=LFN.XFER_NOT, optional=None, varname=None): """ All arguments specify the workflow-level behavior of this Filename. Job-level behavior can be defined when adding the Filename to a Job's uses. If the properties are not overridden at the job-level, then the workflow-level values are used as defaults. If this LFN is to be used as a job's stdin/stdout/stderr then the value of link is ignored when generating the tags. Arguments: filename: The name of the file (required) type: The file type (see LFN) link: Is this file a workflow-level input/output/both? (see LFN) register: The default value for register (True/False) transfer: The default value for transfer (see LFN, or True/False) optional: The default value for optional (True/False) type: The file type (see LFN) varname: Only used for stdio files """ if filename is None: raise ValueError, 'filename required' self.filename = filename self.link = link self.register = register self.transfer = transfer self.optional = optional self.type = type self.varname = varname def getFilename(self): return self.filename def setFilename(self, filename): self.filename = filename def getType(self): return self.type def setType(self, type): self.type = type def getLink(self): return self.link def setLink(self, link): self.link = link def getRegister(self): return self.register def setRegister(self, register): self.register = register def getTransfer(self): return self.transfer def setTransfer(self, transfer): self.transfer = transfer def getOptional(self): return self.optional def setOptional(self, optional): self.optional = optional def getVarname(self): return self.varname def setVarname(self, varname): self.varname = varname def __str__(self): """Returns argument-style version of the filename XML tag""" return self.toArgumentXML() def toArgumentXML(self): """Returns an XML representation of this file as a short filename tag for use in job arguments""" return u'' % (self.filename) def toFilenameXML(self): """Returns an XML representation of this file as a filename tag""" xml = StringIO() xml.write(u'') result = xml.getvalue() xml.close() return result def toStdioXML(self, tag): """Returns an XML representation of this file as a stdin/out/err tag""" xml = StringIO() xml.write(u'<%s file="%s"' % (tag, self.filename)) if self.varname is not None: xml.write(u' varname="%s"' % self.varname) if tag is 'stdin': xml.write(u' link="input"') # stdin is always input else: xml.write(u' link="output"') # stdout/stderr are always output xml.write(u'/>') result = xml.getvalue() xml.close() return result class Profile: """Profile(namespace,key,value[,origin]) A Profile captures scheduler-, system-, and environment-specific parameters in a uniform fashion. Each profile declaration assigns a value to a key within a namespace. The origin records what entity is responsible for setting the profile and is optional. Examples: path = Profile(Namespace.ENV,'PATH','/bin') vanilla = Profile(Namespace.CONDOR,'universe','vanilla') path = Profile(namespace='env',key='PATH',value='/bin') path = Profile('env','PATH','/bin') """ def __init__(self, namespace, key, value, origin=None): """ Arguments: namespace: The namespace of the profile (see Namespace) key: The key name. Can be anything that responds to str(). value: The value for the profile. Can be anything that responds to str(). origin: The entity responsible for setting this profile (optional) """ self.namespace = namespace self.key = key self.value = value self.origin = origin def toXML(self): """Return an XML representation of this profile""" xml = StringIO() xml.write(u'') xml.write(unicode(self.value)) xml.write(u'') result = xml.getvalue() xml.close() return result def __str__(self): return u'%s:%s = %s' % (self.namespace, self.key, self.value) class Job: """Job(name[,id][,namespace][,version][,dv_name][,dv_namespace][,dv_version][,level][,compound]) This class defines the specifics of a job to run in an abstract manner. All filename references still refer to logical files. All references transformations also refer to logical transformations, though physical location hints can be passed through profiles. Examples: sleep = Job(id="ID0001",name="sleep") jbsim = Job(id="ID0002",name="jbsim",namespace="cybershake",version="2.1") merge = Job(name="merge",level=2) Several arguments can be added at the same time: input = Filename(...) output = Filename(...) job.addArguments("-i",input,"-o",output) Profiles are added similarly: job.addProfile(Profile(Namespace.ENV,key='PATH',value='/bin')) Adding file uses is simple, and you can override global Filename attributes: job.addUses(input,LFN.INPUT) job.addUses(output,LFN.OUTPUT,transfer=True,register=True) """ class Use: """Use(file[,link][,register][,transfer][,optional][,temporaryHint]) Use of a logical file name. Used for referencing LFNs in the DAX. Note: This class is used internally. You shouldn't need to use it in your code. You should use Job.addUses(...). """ def __init__(self, file, link=None, register=None, transfer=None, optional=None, temporaryHint=None): if file is None: raise ValueError, 'file required' self.file = file self.link = link self.optional = optional self.register = register self.transfer = transfer self.temporaryHint = temporaryHint def toXML(self): xml = StringIO() if self.link is None: link = self.file.getLink() else: link = self.link if self.optional is None: optional = self.file.getOptional() else: optional = self.optional if self.register is None: register = self.file.getRegister() else: register = self.register if self.transfer is None: transfer = self.file.getTransfer() else: transfer = self.transfer type = self.file.getType() temporaryHint = self.temporaryHint xml.write(u'') result = xml.getvalue() xml.close() return result def __init__(self, name, id=None, namespace=None, version=None, dv_name=None, dv_namespace=None, dv_version=None, level=None, compound=None): """The ID for each job should be unique in the DAX. If it is None, then it will be automatically generated when the job is added to the DAX. As far as I can tell this ID is only used for uniqueness during planning, and is otherwise ignored. For example, when Condor is running the job there doesn't seem to be a way to use this ID to trace the running job back to its entry in the DAX. The name, namespace, and version should match what you have in your transformation catalog. For example, if namespace="foo" name="bar" and version="1.0", then the transformation catalog should have an entry for "foo::bar:1.0". Level is the level in the workflow. So if you have a workflow with three jobs--A, B, and C--and you have dependencies between A->B and B->C, then A is level 1, B is level 2, and C is level 3. You don't need to specify this because Pegasus calculates it automatically. I have no idea what 'compound' does, or what the 'dv_' stuff does. Arguments: name: The transformation name (required) id: A unique identifier for the job (autogenerated if None) namespace: The namespace of the transformation version: The transformation version dv_name: ? dv_namespace: ? dv_version: ? level: The level of the job in the workflow compound: ? """ if name is None: raise ValueError, 'name required' self.name = name self.namespace = namespace self.version = version self.id = id self.dv_namespace = dv_namespace self.dv_name = dv_name self.dv_version = dv_version self.level = level self.compound = compound self.arguments = [] self.profiles = [] self.uses = [] self.stdout = None self.stderr = None self.stdin = None def addArguments(self, *arguments): """Add several arguments to the job""" self.arguments.extend(arguments) def addArgument(self, arg): """Add an argument to the job""" self.addArguments(arg) def addProfile(self,profile): """Add a profile to the job""" self.profiles.append(profile) def addUses(self, file, link=None, register=None, transfer=None, optional=None, temporaryHint=None): """Add a logical filename that the job uses. Optional arguments to this method specify job-level attributes of the 'uses' tag in the DAX. If not specified, these values default to those specified when creating the Filename object. I don't know what 'temporaryHint' does. Arguments: file: A Filename object representing the logical file name link: Is this file a job input, output or both (See LFN) register: Should this file be registered in RLS? (True/False) transfer: Should this file be transferred? (True/False or See LFN) optional: Is this file optional, or should its absence be an error? temporaryHint: ? """ use = Job.Use(file,link,register,transfer,optional) self.uses.append(use) def setStdout(self, filename): """Redirect stdout to a file""" self.stdout = filename def setStderr(self, filename): """Redirect stderr to a file""" self.stderr = filename def setStdin(self, filename): """Redirect stdin from a file""" self.stdin = filename def setID(self, id): """Set the ID of this job""" self.id = id def getID(self): """Return the job ID""" return self.id def setNamespace(self, namespace): """Set the transformation namespace for this job""" self.namespace = namespace def getNamespace(self): """Get the transformation namespace for this job""" return self.namespace def setName(self, name): """Set the transformation name of this job""" self.name = name def getName(self): """Get the transformation name of this job""" return self.name def setVersion(self, version): """Set the version of the transformation""" self.version = version def getVersion(self): """Get the version of the transformation""" return self.version def toXML(self,level=0,indent=u'\t'): """Return an XML representation of this job Arguments: level: The level of indentation indent: The indentation string """ xml = StringIO() indentation = u''.join(indent for x in range(0,level)) # Open tag xml.write(indentation) xml.write(u'\n') # Arguments if len(self.arguments) > 0: xml.write(indentation) xml.write(indent) xml.write(u'') xml.write(u' '.join(unicode(x) for x in self.arguments)) xml.write(u'\n') # Profiles if len(self.profiles) > 0: for pro in self.profiles: xml.write(indentation) xml.write(indent) xml.write(u'%s\n' % pro.toXML()) # Stdin/xml/err if self.stdin is not None: xml.write(indentation) xml.write(indent) xml.write(self.stdin.toStdioXML('stdin')) xml.write(u'\n') if self.stdout is not None: xml.write(indentation) xml.write(indent) xml.write(self.stdout.toStdioXML('stdout')) xml.write(u'\n') if self.stderr is not None: xml.write(indentation) xml.write(indent) xml.write(self.stderr.toStdioXML('stderr')) xml.write(u'\n') # Uses if len(self.uses) > 0: for use in self.uses: xml.write(indentation) xml.write(indent) xml.write(use.toXML()) xml.write(u'\n') # Close tag xml.write(indentation) xml.write(u'') result = xml.getvalue() xml.close() return result class DAX: """DAX(name[,count][,index]) Representation of a directed acyclic graph in XML (DAX). Examples: dax = DAX('diamond') part5 = DAX('partition_5',count=10,index=5) Adding jobs: a = Job(...) dax.addJob(a) Adding parent-child control-flow dependency: dax.addDependency(a,b) dax.addDependency(a,c) dax.addDependency(b,d) dax.addDependency(c,d) Adding Filenames (this is not required to produce a valid DAX): input = Filename(...) dax.addFilename(input) Writing a DAX out to a file: f = open('diamond.dax','w') dax.writeXML(f) f.close() """ class Dependency: """A control-flow dependency between a child and its parents""" def __init__(self,child): self.child = child self.parents = [] def addParent(self, parent): self.parents.append(parent) def toXML(self, level=0, indent=u'\t'): xml = StringIO() indentation = ''.join([indent for x in range(0,level)]) xml.write(indentation) xml.write(u'\n' % self.child.getID()) for parent in self.parents: xml.write(indentation) xml.write(indent) xml.write(u'\n' % parent.getID()) xml.write(indentation) xml.write(u'') result = xml.getvalue() xml.close() return result def __init__(self, name, count=1, index=0): """ Arguments: name: The name of the workflow count: Total number of DAXes that will be created index: Zero-based index of this DAX """ self.name = name self.count = count self.index = index # This is used to generate unique ID numbers self.sequence = 1 self.jobs = [] self.filenames = [] self.lookup = {} # A lookup table for dependencies self.dependencies = [] def getName(self): return self.name def setName(self,name): self.name = name def getCount(self): return self.count def setCount(self,count): self.count = count def getIndex(self): return self.index def setIndex(self,index): self.index = index def addJob(self,job): """Add a job to the list of jobs in the DAX""" # Add an auto-generated ID if the job doesn't have one if job.getID() is None: job.setID("ID%07d" % self.sequence) self.sequence += 1 self.jobs.append(job) def addFilename(self, filename): """Add a filename""" self.filenames.append(filename) def addDependency(self, parent, child): """Add a control flow dependency""" if not child in self.lookup: dep = DAX.Dependency(child) self.lookup[child] = dep self.dependencies.append(dep) self.lookup[child].addParent(parent) def writeXML(self, out, indent='\t'): """Write the DAX as XML to a stream""" # Preamble out.write(u'\n') # Metadata out.write(u'\n' % datetime.datetime.now()) out.write(u'\n' % pwd.getpwuid(os.getuid())[0]) out.write(u'\n') # Open tag out.write(u'\n' % (len(self.jobs), len(self.filenames), len(self.dependencies))) # Files out.write(u'\n%s\n' % indent) for filename in self.filenames: out.write(indent) out.write(filename.toFilenameXML()) out.write('\n') # Jobs out.write(u'\n%s\n' % indent) for job in self.jobs: out.write(job.toXML(level=1,indent=indent)) out.write(u'\n') # Dependencies out.write(u'\n%s\n' % indent) for dep in self.dependencies: out.write(dep.toXML(level=1,indent=indent)) out.write(u'\n') # Close tag out.write(u'\n') class DAXHandler(xml.sax.handler.ContentHandler): """ This is a DAX parser """ def __init__(self): self.dax = None self.jobmap = {} self.filemap = {} self.lastJob = None self.lastChild = None self.lastArguments = None self.lastProfile = None def startElement(self, name, attrs): if name == "adag": name = attrs.get("name") count = int(attrs.get("count","1")) index = int(attrs.get("index","0")) self.dax = DAX(name,count,index) elif name == "filename": if self.lastJob is None: file = attrs.get("file") link = attrs.get("link") optional = attrs.get("optional") f = Filename(file, type=None, link=link, register=None, transfer=None, optional=optional) self.dax.addFilename(f) self.filemap[name] = f else: name = attrs.get("file") if name in self.filemap: f = self.filemap[name] else: f = Filename(name) self.filemap[name] = f if self.lastProfile is None: self.lastArguments.append(f) else: self.lastProfile.value = f elif name == "job": id = attrs.get("id") namespace = attrs.get("namespace") name = attrs.get("name") version = attrs.get("version") dv_namespace = attrs.get("dv-namespace") dv_name = attrs.get("dv-name") dv_version = attrs.get("dv-version") level = attrs.get("level") compound = attrs.get("compound") job = Job(id=id, namespace=namespace, name=name, version=version, dv_namespace=dv_namespace, dv_name=dv_name, dv_version=dv_version, level=level, compound=compound) self.dax.addJob(job) self.lastJob = job self.jobmap[job.getID()] = job elif name == "argument": self.lastArguments = [] elif name == "profile": namespace = attrs.get("namespace") key = attrs.get("key") self.lastProfile = Profile(namespace,key,"") self.lastJob.addProfile(self.lastProfile) elif name in ["stdin","stdout","stderr"]: file = attrs.get("file") link = attrs.get("link") varname = attrs.get("varname") if file in self.filemap: f = self.filemap[file] else: f = Filename(file,link=link) self.filemap[file] = f if varname is not None: if f.varname is None: f.varname = varname if name == "stdin": self.lastJob.setStdin(f) elif name == "stdout": self.lastJob.setStdout(f) else: self.lastJob.setStderr(f) elif name == "uses": file = attrs.get("file") link = attrs.get("link") register = attrs.get("register") transfer = attrs.get("transfer") type = attrs.get("type") temporaryHint = attrs.get("temporaryHint") if file in self.filemap: f = self.filemap[file] if f.type is None: f.type = type else: f = Filename(file, type=type, link=link, register=register, transfer=transfer) self.filemap[file] = f self.lastJob.addUses(f,link=link,register=register, transfer=transfer,temporaryHint=temporaryHint) elif name == "child": id = attrs.get("ref") child = self.jobmap[id] self.lastChild = child elif name == "parent": id = attrs.get("ref") parent = self.jobmap[id] self.dax.addDependency(parent, self.lastChild) def characters(self, chars): if self.lastArguments is not None: self.lastArguments += [unicode(a) for a in shlex.split(chars)] elif self.lastProfile is not None: self.lastProfile.value += chars def endElement(self, name): if name == "child": self.lastChild = None elif name == "job": self.lastJob = None elif name == "argument": self.lastJob.addArguments(*self.lastArguments) self.lastArguments = None elif name == "profile": self.lastProfile = None def parse(fname): """ Parse DAX from a Pegasus DAX file. """ handler = DAXHandler() xml.sax.parse(fname, handler) return handler.dax def parseString(string): """ Parse DAX from a string """ handler = DAXHandler() xml.sax.parseString(string, handler) return handler.dax if __name__ == '__main__': """An example of using the DAX API""" # Create a DAX diamond = DAX("diamond") # Create some logical file names a = Filename("f.a",link=LFN.INPUT,transfer=True) b1 = Filename("f.b1",link=LFN.OUTPUT,transfer=True) b2 = Filename("f.b2",link=LFN.OUTPUT,transfer=True) c1 = Filename("f.c1",link=LFN.OUTPUT,transfer=True) c2 = Filename("f.c2",link=LFN.OUTPUT,transfer=True) d = Filename("f.d",link=LFN.OUTPUT,transfer=True,register=True) # Add the filenames to the DAX (this is not strictly required) diamond.addFilename(a) diamond.addFilename(d) # Add a preprocess job preprocess = Job(namespace="diamond",name="preprocess",version="2.0") preprocess.addArguments("-a preprocess","-T60","-i",a,"-o",b1,b2) preprocess.addUses(a,link=LFN.INPUT) preprocess.addUses(b1,link=LFN.OUTPUT) preprocess.addUses(b2,link=LFN.OUTPUT) diamond.addJob(preprocess) # Add left Findrange job frl = Job(namespace="diamond",name="findrange",version="2.0") frl.addArguments("-a findrange","-T60","-i",b1,"-o",c1) frl.addUses(b1,link=LFN.INPUT) frl.addUses(c1,link=LFN.OUTPUT) diamond.addJob(frl) # Add right Findrange job frr = Job(namespace="diamond",name="findrange",version="2.0") frr.addArguments("-a findrange","-T60","-i",b2,"-o",c2) frr.addUses(b2,link=LFN.INPUT) frr.addUses(c2,link=LFN.OUTPUT) diamond.addJob(frr) # Add Analyze job analyze = Job(namespace="diamond",name="analyze",version="2.0") analyze.addArguments("-a analyze","-T60","-i",c1,c2,"-o",d) analyze.addUses(c1,link=LFN.INPUT) analyze.addUses(c2,link=LFN.INPUT) analyze.addUses(d,link=LFN.OUTPUT) diamond.addJob(analyze) # Add control-flow dependencies diamond.addDependency(parent=preprocess, child=frl) diamond.addDependency(parent=preprocess, child=frr) diamond.addDependency(parent=frl, child=analyze) diamond.addDependency(parent=frr, child=analyze) out = StringIO() diamond.writeXML(out) foo1 = out.getvalue() out.close() diamond = parseString(foo1) out = StringIO() diamond.writeXML(out) foo2 = out.getvalue() out.close() print foo1 print foo2 pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/__init__.py0000644000175000017500000000113311757531137023310 0ustar ryngerynge# Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. pegasus-wms_4.0.1+dfsg/lib/pegasus/python/Pegasus/PDAX2.py0000644000175000017500000001422511757531137022375 0ustar ryngerynge# Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """API for generating Pegasus PDAXes The classes in this module can be used to generate PDAXes that can be read by Pegasus. The official PDAX schema is here: http://pegasus.isi.edu/schema/pdax-2.0.xsd """ __author__ = "Gideon Juve " __all__ = ["PDAX","Partition"] __version__ = "2.0" import datetime, pwd, os from cStringIO import StringIO SCHEMA_NAMESPACE = u"http://pegasus.isi.edu/schema/PDAX" SCHEMA_LOCATION = u"http://pegasus.isi.edu/schema/pdax-2.0.xsd" SCHEMA_VERSION = u"2.0" class Partition: """Partition(name,index,id) A Partition represents one DAX in the PDAX. You should create a Partition for each DAX you want to include in your PDAX. Jobs and job-dependencies are not supported at this time as they will be ignored by Pegasus. """ def __init__(self,name,index,id): """The name and index are used by Pegasus to determine the name of the DAX file for this partition. For example, if name is "test_workflow" and index is "1" then Pegasus will assume the DAX name is "test_workflow_1.dax". The partition ID is used by Pegasus to generate working directories and scripts. Pegasus prefixes the ID value with 'P' to generate the working directory name and DAG file name. The ID is also used as the name of the submit file and associated log files for the Condor job that runs the dag. Arguments: name: The partition name index: The partition index id: The partition ID """ self._name = name self._index = index self._id = id def addJob(self, job): """Raises NotImplementedError""" raise NotImplementedError def addDependency(self, parent, child): """Raises NotImplementedError""" raise NotImplementedError def getName(self): return self._name def setName(self, name): self._name = name def getIndex(self): return self._index def setIndex(self, index): self._index = index def getID(self): return self._id def setID(self, id): self._id = id def toXML(self, level=0, indent='\t'): """Return an XML representation of this partition Arguments: level: The indentation level indent: The character(s) to use for indentation """ xml = StringIO() indentation = u''.join(indent for x in range(0,level)) xml.write(indentation) xml.write(u'' % (self._name, self._index, self._id)) result = xml.getvalue() xml.close() return result class PDAX: """PDAX(name[,index][,count]) A PDAX is a partitioned DAX. Basically, its a DAX that refers to other DAXes. Pegasus turns PDAXes into PDAGs that can be used to plan and execute a series of DAXes. In other words, the PDAX is a DAX of DAXes. """ class Dependency: """A dependency in the PDAX This class is used internally. You shouldn't use it in your code. See PDAX.addDependency(...) """ def __init__(self, child): self._child = child self._parents = [] def addParent(self, parent): self._parents.append(parent) def toXML(self, level=0, indent='\t'): xml = StringIO() indentation = u''.join(indent for x in range(0,level)) xml.write(indentation) xml.write(u'\n' % self._child.getID()) for parent in self._parents: xml.write(indentation) xml.write(indent) xml.write(u'\n' % parent.getID()) xml.write(indentation) xml.write(u'') result = xml.getvalue() xml.close() return result def __init__(self, name, index=0, count=1): """ Arguments: name: The name of the PDAX index: The index of this PDAX (this should be 0) count: The total number of PDAXes (this should be 1) """ self._name = name self._index = index self._count = count self._lookup = {} self._dependencies = [] self._partitions = [] def addPartition(self, partition): """Add a partition to this PDAX""" self._partitions.append(partition) def addDependency(self, parent, child): """Add a dependency to this PDAX Arguments: parent: The parent Partition child: The child Partition """ if child not in self._lookup: dep = PDAX.Dependency(child) self._lookup[child] = dep self._dependencies.append(dep) self._lookup[child].addParent(parent) def writeXML(self, out, indent='\t'): """Write the PDAX as XML to a stream""" # Preamble out.write(u'\n') # Metadata out.write(u'\n' % datetime.datetime.now()) out.write(u'\n' % pwd.getpwuid(os.getuid())[0]) out.write(u'\n') # Open tag out.write(u'\n' % (self._name, self._index, self._count, SCHEMA_VERSION)) # Partitions for partition in self._partitions: out.write(partition.toXML(level=1,indent=indent)) out.write(u'\n') # Dependencies for dep in self._dependencies: out.write(dep.toXML(level=1,indent=indent)) out.write(u'\n') # Close tag out.write(u'\n') if __name__ == '__main__': """An example of using the PDAX API""" # Create a PDAX object pdax = PDAX('blackdiamond') # Add 20 partitions linked together in a chain last = None for i in range(0,20): part = Partition(name='blackdiamond',index=i,id='ID%02d' % i) pdax.addPartition(part) if last is not None: pdax.addDependency(last, part) last = part # Write the PDAX to stdout import sys pdax.writeXML(sys.stdout)pegasus-wms_4.0.1+dfsg/lib/pegasus/python/pegasus-setup.py0000644000175000017500000000306111757531137022751 0ustar ryngerynge""" Installation script for Pegasus Python library Author: Dan Gunter """ try: from setuptools import setup except: from distutils.core import setup from glob import glob import os import sys VERSION = os.environ.get('PEGASUS_VERSION','trunk') # Main function # ------------- setup(name = "Pegasus", version=VERSION, packages = ["Pegasus", "Pegasus.monitoring", "Pegasus.plots_stats", "Pegasus.plots_stats.plots", "Pegasus.plots_stats.stats", "Pegasus.test", "Pegasus.tools"], ext_modules = [], package_data = {}, scripts = [ ], install_requires=[ ], # metadata for upload to PyPI author = "Pegasus Team", author_email = "deelman@isi.edu", maintainer = "Karan Vahi", maintainer_email = "vahi@isi.edu", description = "Pegasus Python library", long_description = "", license = "LBNL Open-Source", keywords = "workflow", url = "https://confluence.pegasus.isi.edu/display/pegasus/Home", classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: No Input/Output (Daemon)", "Intended Audience :: Science/Research", "Intended Audience :: System Administrators", "License :: Other/Proprietary License", "Natural Language :: English", "Operating System :: POSIX", "Programming Language :: Python", "Topic :: Database", "Topic :: Workflow", "Topic :: System :: Monitoring", ], ) pegasus-wms_4.0.1+dfsg/lib/pegasus/python/pegasus-setup.cfg0000644000175000017500000000003311757531137023054 0ustar ryngerynge[egg_info] #tag-build=.dev pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/0000755000175000017500000000000011757531667021570 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/nlapi.py0000644000175000017500000003375411757531137023251 0ustar ryngerynge## Copyright (c) 2004, The Regents of the University of California, through ## Lawrence Berkeley National Laboratory (subject to receipt of any required ## approvals from the U.S. Dept. of Energy). All rights reserved. """ NetLogger instrumentation API for Python Write NetLogger log messages. Most users of this API will use the Log class, which is a little like a 'Logger' object in the Python logging API. Utility functions include functions to get and set the Grid Job ID. """ __author__ = "Dan Gunter" __created__ = "1 April 2004" __rcsid__ = "$Id: nlapi.py 27037 2011-02-04 20:16:27Z dang $" import calendar import datetime import math import os import socket import string import sys import time import types import urlparse from netlogger.nldate import utcFormatISO from netlogger.util import uuid1 # ## Exceptions # class ParseException(Exception): pass class FormatException(Exception): pass # ## Constants # # Environment variable to store GID GID_ENV = "NETLOGGER_GUID" #new # OLD: GID_ENV = 'NL_GID' # Environment variable to store destination NLDEST_ENV = 'NL_DEST' # Environment variable for level file CFG_ENV = 'NL_CFG' FIELD_SEP = ' ' REC_SEP = '\n' EOR = '\n' KEYVAL_SEP = '=' # Port DEFAULT_PORT = 14380 # Level class Level: NOLOG = 0 FATAL = 1 ERROR = 2 WARN = 3 WARNING = 3 INFO = 4 DEBUG = 5 DEBUG1 = 6 DEBUG2 = 7 DEBUG3 = 8 TRACE = DEBUG1 ALL = -1 names = { NOLOG : 'NOLOG', FATAL:'Fatal', ERROR:'Error', WARN:'Warn', INFO : 'Info', DEBUG:'Debug', TRACE : 'Trace', DEBUG2:'Debug2', DEBUG3:'Debug3', } @staticmethod def getName(level): return Level.names.get(level, 'User') @staticmethod def getLevel(name): if name.isupper() and hasattr(Level, name): return getattr(Level, name) raise ValueError("no such level name: %s" % name) DATE_FMT = "%04d-%02d-%02dT%02d:%02d:%02d" # Required fields TS_FIELD = 'ts' EVENT_FIELD = 'event' # Other conventions LEVEL_FIELD = 'level' STATUS_FIELD = 'status' MESSAGE_FIELD = 'msg' HASH_FIELD = 'nlhash' # ## Utility functions # def quotestr(v): """Quote a string value to be output. """ if not v: v = '""' elif ' ' in v or '\t' in v or '"' in v or '=' in v: v = '"%s"' % v.replace(r'"', r'\"') return v def getGuid(create=True, env=GID_ENV): """Return a GUID. If 'create' is True (the default), and if none is found in the environment then create one. """ gid = os.environ.get(env, None) if gid is None: if create: gid = uuid1() return gid # Call this if you want to set a GID manually def setGuid(id, env=GID_ENV): """Replace current guid in the environment with provided value. Return old value, or None if there was no old value. Note: may cause memory leak on FreeBSD and MacOS. See system docs. """ old_gid = os.environ.get(env, None) os.environ[env] = id return old_gid def clearGuid(env=GID_ENV): """Unset guid """ old_gid = os.environ.get(env, None) if old_gid: del os.environ[env] return old_gid _g_hostip = None def getHost(): global _g_hostip if _g_hostip is not None: return _g_hostip try: ip = socket.gethostbyname(socket.getfqdn()) except: ip = '127.0.0.1' _g_hostip = ip return ip def getProg(): import sys return sys.argv[0] def getDest(): return os.environ.get(NLDEST_ENV,None) class LevelConfig: """Set logging level from a configuration file. The format of the file is trivial: an integer log level. """ DEFAULT = Level.INFO def __init__(self,filename): self._f = filename self._level = None def getLevel(self): if self._level is None: try: self._level = self.DEFAULT f = file(self._f) line = f.readline() i = int(line.strip()) self._level = i except IOError: pass except ValueError: pass return self._level if os.getenv(CFG_ENV) != None: g_level_cfg = LevelConfig(os.getenv(CFG_ENV)) else: g_level_cfg = None class Log: """NetLogger log class. Name=value pairs for the log are passed as keyword arguments. This is mostly good, but one drawback is that a period '.' in the name is confusing to python. As a work-around, use '__' to mean '.', e.g. if you want the result to be "foo.id=bar", then do:: log.write(.., foo__id='bar') Similarly, a leading '__' will be stripped (e.g. to avoid stepping on keywords like 'class') If you instantiate this class without a 'logfile', it will act as a formatter, returning a string. To disable filtering of messages on level, add 'level=Level.ALL' """ class OpenError(Exception): pass def __init__(self, logfile=None, flush=False, prefix=None, level=Level.INFO, newline=True, guid=True, pretty=False, float_time=False, meta={}): """Constructor. """ self._logfile = None self._float_time = float_time self._pretty = pretty self._newline = newline self._flush = [None, self.flush][flush] self.setPrefix(prefix) self._meta = {} if meta: self._meta[None] = meta if isinstance(logfile,types.StringType): try: self._logfile = urlfile(logfile) except (socket.gaierror, socket.error, IOError), E: raise self.OpenError(E) else: self._logfile = logfile if g_level_cfg is None: self._level = level else: self._level = g_level_cfg.getLevel() if guid is True: guid = getGuid(create=False) if guid: _m = self._meta.get(None, {}) _m['guid'] = guid self._meta[None] = _m elif isinstance(guid,str): _m = self._meta.get(None, {}) _m['guid'] = guid self._meta[None] = _m def setLevel(self,level): """Set highest level of messages that WILL be logged. Messages below this level (that is, less severe, higher numbers) will be dropped. For example:: log.setLevel(Level.WARN) log.error('argh',{}) # logged log.info('whatever',{}) # dropped! """ self._level = level def setPrefix(self, prefix): if prefix is None: self._pfx = '' elif prefix.endswith('.'): self._pfx = prefix else: self._pfx = prefix + '.' def debugging(self): """Return whether the level >= debug. """ return self._level >= Level.DEBUG def flush(self): """Flush output object. """ if self._logfile: self._logfile.flush() def write(self, event='event', ts=None, level=Level.INFO, **kw): """Write a NetLogger string. If there is a logfile, returns None Otherwise, returns a string that would have been written. """ if self._level != Level.ALL and level > self._level: if self._logfile: return None else: return "" if not ts: ts = time.time() buf = self.format(self._pfx + event, ts, level, kw) if self._logfile is None: return buf self._logfile.write(buf) if self._flush: self.flush() __call__ = write def error(self, event='', **kwargs): return self.write(event, level=Level.ERROR, **kwargs) def warn(self, event='', **kwargs): return self.write(event, level=Level.WARN, **kwargs) def info(self, event='', **kwargs): return self.write(event, level=Level.INFO, **kwargs) def debug(self, event='', **kwargs): return self.write(event, level=Level.DEBUG, **kwargs) def _append(self, fields, kw): for k,v in kw.items(): if k.startswith('__'): k = k[2:] k = k.replace('__','.') if isinstance(v,str): v = quotestr(v) fields.append("%s=%s" % (k,v)) elif isinstance(v, float): fields.append("%s=%lf" % (k,v)) elif isinstance(v, int): fields.append("%s=%d" % (k,v)) else: s = str(v) if ' ' in s or '\t' in s: s = '"%s"' % s fields.append("%s=%s" % (k,s)) def format(self, event, ts, level, kw): if not self._pretty: # Regular BP formatting if isinstance(ts,str): fields = ["ts=" + ts, "event="+event] elif isinstance(ts, datetime.datetime): if self._float_time: tsfloat = calendar.timegm(ts.utctimetuple()) + ts.microsecond/1e6 fields = ["ts=%.6f" % tsfloat, "event="+event] else: tsstr = "%s.%06dZ" % (DATE_FMT % ts.utctimetuple()[0:6], ts.microsecond) fields = ["ts=" + tsstr, "event=" + event] elif self._float_time: fields = ["ts=%.6f" % ts, "event="+event] else: fields = ["ts=" + utcFormatISO(ts), "event=" + event] if level is not None: if isinstance(level, int): fields.append("level=" + Level.getName(level)) else: fields.append("level=%s" % level) if kw: self._append(fields, kw) if self._meta.has_key(event): self._append(fields, self._meta[event]) if self._meta.has_key(None): self._append(fields, self._meta[None]) buf = FIELD_SEP.join(fields) else: # "Pretty" BP formatting if not isinstance(ts,str): ts = utcFormatISO(ts) if isinstance(level, int): level = Level.getName(level).upper() # print traceback later if kw.has_key('traceback'): tbstr = kw['traceback'] del kw['traceback'] else: tbstr = None if kw.has_key('msg'): msg = kw['msg'] del kw['msg'] else: msg = None remainder = ",".join(["%s=%s" % (key, value) for key, value in kw.items()]) if msg: buf = "%s %-6s %s | %s. %s" % (ts, level, event, msg, remainder) else: buf = "%s %-6s %s | %s" % (ts, level, event, remainder) # add traceback if tbstr: buf += '\n' + tbstr if self._newline: return buf + REC_SEP else: return buf def setMeta(self, event=None, **kw): self._meta[event] = kw def close(self): self.flush() def __del__(self): if not hasattr(self,'closed'): self.close() self.closed = True def __str__(self): if self._logfile: return str(self._logfile) else: return repr(self) # set up urlparse to recognize x-netlog schemes for scheme in 'x-netlog','x-netlog-udp': urlparse.uses_netloc.append(scheme) urlparse.uses_query.append(scheme) def urlfile(url): """urlfile(url:str) -> file Open a NetLogger URL and return a write-only file object. """ #print "url='%s'" % url # Split URL scheme, netloc, path, params, query, frag = urlparse.urlparse(url) # Put query parts into a dictionary for easy access later query_data = {} if query: query_parts = query.split('&') for flag in query_parts: name, value = flag.split('=') query_data[name] = value # Create file object if scheme == "file" or scheme == "" or scheme is None: # File if path == '-': fileobj = sys.stdout elif path == '&': fileobj = sys.stderr else: if query_data.has_key('append'): is_append = boolparse(query_data['append']) open_flag = 'aw'[is_append] else: open_flag = 'a' fileobj = file(path,open_flag) elif scheme.startswith("x-netlog"): # TCP or UDP socket if netloc.find(':') == -1: addr = (netloc, DEFAULT_PORT) else: host, port_str = netloc.split(':') addr = (host, int(port_str)) if scheme == "x-netlog": # TCP Socket sock = socket.socket() elif scheme == "x-netlog-udp": # UDP Socket sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) else: raise ValueError("Unknown URL scheme '%s', " "must be empty, 'file' or 'x-netlog[-udp]'" % scheme) #print "connect to address %s" % addr sock.connect(addr) fileobj = sock.makefile('w') else: raise ValueError("Unknown URL scheme '%s', " "must be empty, 'file' or 'x-netlog[-udp]'" % scheme) return fileobj def urltype(url): """urltype(url:str) -> 'file' | 'tcp' | None Return a canonical string representing the type of URL, or None if the type is unknown """ scheme = urlparse.urlparse(url)[0] if scheme == "file" or scheme == "" or scheme is None: return 'file' elif scheme == "x-netlog": return 'tcp' else: return None # Get host _g_hostip = None def get_host(): global _g_hostip if _g_hostip is not None: return _g_hostip try: ip = socket.gethostbyname(socket.getfqdn()) except: ip = '127.0.0.1' _g_hostip = ip return ip pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/util.py0000644000175000017500000005640311757531137023117 0ustar ryngerynge""" Utility functions for NetLogger modules and command-line programs """ __rcsid__ = "$Id: util.py 27069 2011-02-08 20:09:10Z dang $" __author__ = "Dan Gunter (dkgunter (at) lbl.gov)" from asyncore import compact_traceback from copy import copy import glob import imp import logging try: from hashlib import md5 md5_new = md5 except ImportError: import md5 md5_new = md5.new from optparse import OptionParser, Option, OptionValueError, make_option import os import Queue import re import signal import sys import tempfile import time import traceback # import netlogger from netlogger import configobj ## Globals NL_HOME = "NETLOGGER_HOME" EOF_EVENT = "netlogger.EOF" MAGICDATE_EXAMPLES = ', '.join(["%s" % s for s in ( ' weeks|days|hours|minutes|seconds time ago', 'Today', 'Now', 'Tomorrow', 'Yesterday', '4th [[Jan] 2003]', 'Jan 4th 2003', 'mm/dd/yyyy (first)', 'dd/mm/yyyy (second)', 'yyyy-mm-dd', 'yyyymmdd', 'next Tuesday', 'last Tuesday')]) DATA_DIR = os.path.join(os.path.dirname(netlogger.__file__), 'data') ## Exceptions class ConfigError(Exception): """Use this exception for configuration-time error handling. """ pass class DBConnectError(Exception): """Used for failed database connections. """ pass ## Classes class ScriptOptionParser(OptionParser): standard_option_list = ( make_option('-q', '--quiet', action='store_true', dest='quiet', help="only report errors"), make_option('-v', '--verbosity', default=0, action='count', dest='verbosity', help="show verbose status information, " "repeat for even more detail"), ) class CountingQueue(Queue.Queue): """Wrapper around Queue that counts how many items were processed, for accounting purposes. """ def __init__(self, type_, *args): Queue.Queue.__init__(self, *args) self.n = 0 if type_ == 'get': self._gets, self._puts = True, False else: self._gets, self._puts = False, True def get(self, *args, **kwargs): if self._gets: self.n += 1 return Queue.Queue.get(self, *args, **kwargs) def put(self, *args, **kwargs): if self._puts: self.n += 1 return Queue.Queue.put(self, *args, **kwargs) def getNumProcessed(self): return self.n class FIFODict: """A container that is limited to a fixed size. When that size is exceeded, every new item inserted results in an item removed (in FIFO order). Any 'old' items do not replace newer ones. The interface is specialized for adding an item and checking that it is new at the same time. """ class CircularBuffer: """Specialized circular buffer """ def __init__(self, size): self._data = [ ] self._size = size self._p = 0 def getput(self, item): """Put item in buffer. If the buffer is full, return the oldest element, otherwise return None. """ if len(self._data) == self._size: oldest = self._data[self._p] self._data[self._p] = item self._p = (self._p + 1) % self._size else: oldest = None self._data.append(item) return oldest def __init__(self, size): self._q = self.CircularBuffer(size) self._data = { } def add(self, key): """Add a key. If it is new, return True otherwise False. """ if self._data.has_key(key): return False self._data[key] = 1 removed = self._q.getput(key) if removed is not None: del self._data[removed] return True def traceback(): """Traceback as a string with no newlines.""" return str(compact_traceback()) def parse_nvp(args): d = { } for arg in args: try: name, value = arg.split('=') except ValueError: pass d[name] = value return d def tzstr(): return "%s%02d:%02d" % (('+','-')[time.timezone > 0], time.timezone / 3600 , (time.timezone - int(time.timezone/3600)*3600)/60) def parseDatetime(d, utc=False): """Parse a datetime object, or anything that formats itself with isoformat(), to number of seconds since epoch. """ from netlogger.parsers.base import parseDate if d is None: raise ValueError("date is None") iso = d.isoformat() # add 'midnight' time if none given if 'T' not in iso: iso += 'T00:00:00' # only append timezone if none is there already if not iso.endswith('Z') and not re.match('.*[+-]\d\d:\d\d$', iso): if utc: iso += 'Z' else: iso += tzstr() return parseDate(iso) class ProgressMeter: """A simple textual progress meter. """ REPORT_INTERVAL = 1000 def __init__(self, ofile, units="lines"): self.ofile = ofile self.units = units self._counter = 0 self.reset(0) def reset(self, n): self.t0 = time.time() self.last_report = n def advance(self, num=0, inc=1): if num == 0: self._counter += inc num = self._counter if num - self.last_report >= self.REPORT_INTERVAL: n = num - self.last_report dt = time.time() - self.t0 rate = n / dt if rate < 1: dig = 2 elif rate < 10: dig = 1 else: dig = 0 fmt = "[ %%5d ] %%s, rate = %%.%df %%s/sec \r" % dig self.ofile.write(fmt % (num, self.units, rate, self.units)) self.ofile.flush() self.reset(num) class NullProgressMeter: """Substitute for ProgressMeter when you don't want anything to actually be printed. """ def __init__(self, ofile=None, units=None): return def reset(self, n): pass def advance(self, num=0): pass def mostRecentFile(dir, file_pattern, after_time=None): """Search 'dir' for all files matching glob 'file_pattern', and return the mostRecent one(s). If 'after_time' is given, it should be a number of seconds since the epoch UTC; no files will be returned if none is on or after this time. Returns a list of the full paths to file(s), or an empty list. More than one file may be returned, in the case that they have the same modification time. """ if not os.path.isdir(dir): return [ ] search_path = os.path.join(dir, file_pattern) # make a sortable list of filenames and modification times timed_files = [(os.stat_result(os.stat(fname)).st_mtime, fname) for fname in glob.glob(search_path)] # if the list is empty, stop if not timed_files: return [ ] # reverse sort so most-recent is first timed_files.sort(reverse=True) most_recent_time = timed_files[0][0] # return nothing if the most recent time is not # after the cutoff if after_time is not None and most_recent_time < int(after_time): return [ ] # start with most recent, then append all 'ties' result = [ timed_files[0][1] ] i = 1 try: while timed_files[i][0] == most_recent_time: result.append(timed_files[i][1]) i += 1 except IndexError: pass # ran off end of list. all ties (!) # return all 'most recent' files return result def daemonize(log=None, root_log=None, close_fds=True): """Make current process into a daemon. """ # Do a double-fork so that the daemon process is completely # detached from its parent (it becomes a child of init). # For details the classic text is: # W. Richard Stevens, "Advanced Programming in the UNIX Environment" log.info("daemonize.start") log.debug("daemonize.fork1") try: pid = os.fork() if pid > 0: # parent: exit sys.exit(0) except OSError, err: log.exc( "fork.1.failed", err) sys.exit(1) log.debug("daemonize.fork2") # child: do second fork try: pid = os.fork() if pid > 0: # parent: exit sys.exit(0) except OSError, err: log.exc("daemonize.fork2.failed", err) sys.exit(1) # child: decouple from parent environment log.debug("daemonize.chdir_slash") os.chdir("/") try: os.setsid() except OSError: pass os.umask(0) if close_fds: # Remove log handlers that write to stdout or stderr. # Construct list of other log handlers' file descriptors. no_close = [ ] # list of fd's to keep open if root_log and len(root_log.handlers) > 0: console = (sys.stderr.fileno(), sys.stdout.fileno()) for handler in root_log.handlers[:]: fd = handler.stream.fileno() #print "handler: %s, fileno=%d, console=%s" % (handler, fd, console) if fd in console: log.removeHandler(handler) else: no_close.append(fd) # Close all fd's except those that belong to non-console # log handlers, just discovered above. log.info("daemonize.close_fds", ignore=','.join(no_close)) for fd in xrange(1024): if fd not in no_close: try: os.close(fd) except OSError: pass # redirect stdin, stdout, stderr to /dev/null log.info("daemonize.redirect") try: devnull = os.devnull except AttributeError: devnull = "/dev/null" os.open(devnull, os.O_RDWR) try: os.dup2(0, 1) os.dup2(0, 2) except OSError: pass log.info("daemonize.end") def _getNumberedFiles(path): result = [ ] for filename in glob.glob(path + ".*"): try: name, ext = filename.rsplit('.',1) n = int(ext) result.append((n,filename)) except (IndexError, ValueError): pass return result def getNextNumberedFile(path, mode="w", strip=False, open_file=True): if strip: # take off . extension first path = path.rsplit('.', 1)[0] numbered = _getNumberedFiles(path) if numbered: numbered.sort(reverse=True) next_num = numbered[0][0] + 1 else: next_num = 1 next_file = "%s.%d" % (path, next_num) if open_file: return file(next_file, mode) else: return next_file def getAllNumberedFiles(path): nf = _getNumberedFiles(path) return map(lambda x: x[1], nf) def getLowestNumberedFile(path, mode="r"): numbered = _getNumberedFiles(path) if numbered: numbered.sort() result = file(numbered[0][1], mode) else: result = None return result class ThrottleTimer: """Given a ratio of time that the program should be running in a given time-slice, and assuming that the program is running continuously between calls to throttle(), periodically sleep so that the program is running for roughly that proportion of time. For example, if run_ratio is 0.1 then calling throttle() in a loop will cause it to sleep 90% of the time: tt = ThrottleTimer(0.1) ... tt.start() while 1: do_something() tt.throttle() # sleeps here """ def __init__(self, run_ratio, min_sleep_sec=0.1): """Create timer. 'run_ratio' is the desired ratio of the time between calls to the time sleeping in ths timer. 'min_sleep_sec' is the mininum size of the argument to time.sleep(), before throttle() will actually call it. This attempts to minimize the inaccuracy encountered with very small sleep times. """ self.sleep_ratio = (1/run_ratio - 1) self.t0 = time.time() self.min_sleep_sec = min_sleep_sec def start(self): """Start the timer. """ self.t0 = time.time() def throttle(self): """Sleep for an appropriate time. If that time would be less than 'min_sleep_sec' (see constructor), don't actually perform the sleep. Therefore, it should be safe to call this in a (relatively) tight loop. """ t1 = time.time() sleep_sec = (t1 - self.t0) * self.sleep_ratio if sleep_sec >= self.min_sleep_sec: time.sleep(sleep_sec) self.t0 = time.time() class NullThrottleTimer(ThrottleTimer): """Null class for ThrottleTimer. Cleans up calling code.""" def __init__(self, run_ratio=None, min_sleep_sec=None): ThrottleTimer.__init__(self, 1) def start(self): return def throttle(self): return class NullFile: """Null-object pattern for 'file' class. """ def __init__(self, name='(null)', mode='r', buffering=None): self.name = name self.mode = mode self.encoding = None self.newlines = None self.softspace = 0 def close(self): pass def closed(self): return True def fileno(self): return -1 def flush(self): pass def isatty(self): return False def next(self): raise StopIteration() def read(self, n): return '' def readline(self): return '' def readlines(self): return [ ] def seek(self, pos): pass def tell(self): return 0 def write(self, data): return None def writelines(self, seq): return None def xreadlines(self): return self def rm_rf(d): """Remove directories and their contents, recursively. """ for path in (os.path.join(d,f) for f in os.listdir(d)): if os.path.isdir(path): rm_rf(path) else: os.unlink(path) os.rmdir(d) class IncConfigObj(configobj.ConfigObj): """Recognize and process '#@include ' directives transparently. Do not deal with recursive references, i.e. ignore directives inside included files. """ def __init__(self, infile, **kw): """Take same arguments as ConfigObj, but in the case of a file object or filename, process #@include statements in the file. """ if not(isinstance(infile,str) or hasattr(infile,'read')): # not a file: stop configobj.ConfigObj.__init__(self, infile, **kw) return # open file if hasattr(infile, 'read'): f = infile f.seek(0) # rewind to start of file else: f = file(infile) dir_ = os.path.dirname(f.name) # Create list of lines that includes the included files lines = [ ] file_lines = [ ] # tuple: (filename, linenum) i = 0 for line in f: # look for include directive s = line.strip() m = re.match("@include (\"(.*)\"|\'(.*)\'|(\S+))", s) if m: # This line is an @include. # Pick out the group that matched. inc_path = filter(None, m.groups()[1:])[0] # open the corresponding file if not inc_path[0] == '/': inc_path = os.path.join(dir_, inc_path) try: inc_file = file(inc_path) except IOError: raise IOError("Cannot read file '%s' " "included from '%s'" % (inc_path, f.name)) # add contents of file to list of lines j = 0 for line in inc_file: j += 1 file_lines.append((inc_file.name, j)) lines.append(line) else: # This is a regular old line i += 1 file_lines.append((f.name, i)) lines.append(line) # Call superclass with list of lines we built try: configobj.ConfigObj.__init__(self, lines, **kw) except configobj.ParseError, E: # Report correct file and line on parse error m = re.search('line "(\d+)"', str(E)) if m is None: raise else: #print file_lines n = int(m.group(1)) - 1 filename, lineno = file_lines[n] msg = "Invalid line %s in %s: \"%s\"" % (lineno, filename, lines[n].strip()) raise configobj.ParseError(msg) def getHasLoggingSection(self): """Return True if configuration had a [logging] section, False otherwise. """ return self._has_logging_section def _setHasLoggingSection(self, value): """Set whether configuration has a [logging] section. """ self._has_logging_section = value def handleSignals(*siglist): """Set up signal handlers. Input is a list of pairs of a function, and then a list of signals that should trigger that action, e.g.: handleSignals( (myfun1, (signal.SIGUSR1, signal.SIGUSR2)), (myfun2, (signal.SIGTERM)) ) """ for action, signals in siglist: for signame in signals: if hasattr(signal, signame): signo = getattr(signal, signame) signal.signal(signo, action) _TPAT = re.compile("(\d+)\s*([mhds]|minutes?|hours?|days?|seconds?)?$") _TFAC = { None : 1, 's': 1, 'm':60, 'h': 60*60, 'd': 60*60*24, 'seconds': 1, 'minutes':60, 'hours': 60*60, 'days': 60*60*24 , 'second': 1, 'minute':60, 'hour': 60*60, 'day': 60*60*24 } def timeToSec(s): """Convert time period to a number of seconds. Raise ValueError for invalid time, otherwise return a number of seconds. """ s = s.lower() m = _TPAT.match(s) if m is None: raise ValueError("invalid time") g = m.groups() num = int(g[0]) factor = _TFAC[g[1]] return num * factor def check_timeperiod(option, opt, value): try: return timeToSec(value) except ValueError: raise OptionValueError( "option %s: invalid time period value: %r" % (opt, value)) _BPAT = re.compile("(\d+)\s*(\S+)") _BFAC = { None : 1, 'b': 1, 'bytes':1, 'kb':1024, 'kilobytes':1024, 'mb':1024*1024, 'megabytes':1024*1024 } def sizeToBytes(s): """Convert a size to a number of bytes Return number of bytes or raise ValueError if not parseable. """ bytes = None m = re.match("(\d+)\s*(\S+)", s.lower()) if not m: raise ValueError("Not of form: ") value, units = m.groups() if not _BFAC.has_key(units): raise ValueError("Unrecognized units for '%s'" % s) return int(value) * _BFAC[units] class ScriptOption(Option): TYPES = Option.TYPES + ("timeperiod",) TYPE_CHECKER = copy(Option.TYPE_CHECKER) TYPE_CHECKER["timeperiod"] = check_timeperiod def noop(*args, **kwargs): """Handy no-operation function. """ pass def as_bool(x): """Convert value (possibly a string) into a boolean. """ if x is True or x is False: return x if isinstance(x, int): return bool(x) retval = None if isinstance(x, str): retval = { 'yes': True, 'no': False, 'on': True, 'off': False, '1': True, '0': False, 'true': True, 'false': False, }.get(x.lower(), None) if retval is None: raise ValueError("Cannot convert to bool: %s" % x) return retval def as_list(value, sep=" "): """Convert value (possibly a string) into a list. Raises ValueError if it's not convert-able. """ retval = None if isinstance(value,list) or isinstance(value,tuple): retval = value elif isinstance(value,str): if not value: retval = [ ] else: retval = value.split(sep) if retval is None: raise ValueError("Cannot convert to list: %s" % value) return retval def is_stdout(fname): return fname == sys.stdout.name def parseParams(opt): """Parse a set of name=value parameters in the input value. Return list of (name,value) pairs. Raise ValueError if a parameter is badly formatted. """ params = [ ] for nameval in opt: try: name, val = nameval.split('=') except ValueError: raise ValueError("Bad name=value format for '%s'" % nameval) params.append((name,val)) return params def getProgFromFile(f): """Get program name from __file__. """ if f.endswith(".py"): f = f[:-3] return os.path.basename(f) # Python 2.4-friendly uuid generator try: import uuid def uuid1(): return str(uuid.uuid1()) except ImportError: # From: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/213761 import time, random, md5 def uuid1(): t = long( time.time() * 1000 ) r = long( random.random()*100000000000000000L ) try: a = socket.gethostbyname( socket.gethostname() ) except: # if we can't get a network address, just imagine one a = random.random()*100000000000000000L data = str(t)+' '+str(r)+' '+str(a) data = md5.md5(data).hexdigest() return "%s-%s-%s-%s-%s" % (data[0:8], data[8:12], data[12:16], data[16:20], data[20:32]) """ Word-wrap utility functions. Examples: # wrap at 70 cols print wrap(dewrap(blob_of_text), 70) """ def dewrap(text): "Take newlines out of text and normalize whitespace." return ' '.join(text.split()) def wrap(text, n, leader=""): """Word-wrap text at 'n' columns. The 'leader' will be inserted before each new line of text after the first. """ if len(text) <= n: return text else: spc = _find_space(text, n) if spc < 0: return text else: return text[:spc] + '\n' + leader + \ wrap(text[spc+1:].lstrip(), n, leader=leader) def _find_space(text, maxpos): "Find rightmost whitespace position, or -1 if none." p = -1 for ws in (' ', '-'): p = max(p, text.rfind(ws, 0, maxpos)) return p def process_kvp(option, all={}, _bool={}, type="AMQP"): """Process a name=value option. Parameters: - option (str): "name=value" option string Returns: (key,value) Raises: ValueError for bad format or unknown option. """ parts = option.split('=', 1) if len(parts) != 2: raise ValueError("argument '%s' not in form name=value" % option) key, value = parts if all and (key not in all): raise ValueError("unknown %s option '%s'." % (type, key)) if key in _bool: value = as_bool(value) return key, value def hash_event(e): """Generate and return a probabilistically unique hash code for event dictionary, 'e'. Returns: String (hexdigest) representation of the hash value. """ return md5(str(e)).hexdigest() def stringize(v): if isinstance(v,str): result = v elif isinstance(v, float): result = "%f" % v elif isinstance(v, int): result = "%d" % v else: result = str(v) return result pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/nldate.py0000644000175000017500000002304311757531137023403 0ustar ryngeryngeimport calendar import magicdate import re import time ISO_DATE_PARTS = re.compile( "(\d\d\d\d)(?:-(\d\d)(?:-(\d\d)(?:T(\d\d)(?::(\d\d)(?::(\d\d)(?:\.(\d+))?)?)?)?)?)?(Z|[+-]\d\d:\d\d)?" ) ISO_DATE_ZEROES = (None, '01', '01', '00', '00', '00', '0') NUMBER_DATE = re.compile("(\d+)(?:\.(\d+))?") # Date format constants UNKNOWN = "Unknown" ISO8601 = "ISO8601 date" ENGLISH = "Natural English language date" SECONDS = "Seconds since UNIX epoch" DATE_FMT = "%04d-%02d-%02dT%02d:%02d:%02d" class DateFormatError(Exception): pass def getLocaltimeOffsetSec(t=None): """Return current localtime offset at time 't' (default=now) from UTC as a number of seconds. """ if t is None: t = time.time() # this doesn't handle DST properly #offs_sec = time.mktime(time.localtime(t)) - time.mktime(time.gmtime(t)) # this does: offs_sec = calendar.timegm(time.localtime(t)) - time.mktime(time.localtime(t)) return offs_sec def getLocaltimeOffsetParts(t=None): """Return current localtime offset at time 't' (default=now) from GMT as a triple: hours, minutes, sign (1 for positive or 0, -1 for negative) """ offs_sec = getLocaltimeOffsetSec(t) offs_hr = int(offs_sec / 3600) offs_min = int((offs_sec - (offs_hr * 3600)) / 60) sign = (1, -1)[offs_hr < 0] return abs(offs_hr), abs(offs_min), sign def getLocaltimeISO(t): """Get an ISO8601 string for the local timezone at time 't', where t is either seconds since the epoch as a float, or a tuple like the one returned by time.localtime(), or a string date Return the string, which may be [+-]HH:MM or 'Z'. """ # If 't' is a list, tuple or string, first convert to seconds if isinstance(t, list) or isinstance(t, tuple) or isinstance(t,str): # Convert a string to a list if isinstance(t,str): parts = splitISODate(t) else: # If it's a tuple or list, just assign parts = t # From the tuple or list, calculate time offset for the # same year, month, day, and hour. tm = (int(parts[0]), int(parts[1]), int(parts[2]), int(parts[3]), 0, 0, 0, 1, -1) t = time.mktime(tm) # Given the time, figure out the timezone offset hr, min, sign = getLocaltimeOffsetParts(t) # Format the offset as an ISO string if hr == 0 and min == 0: s = 'Z' else: s = "%s%02d:%02d" % (('','+', '-')[sign], hr, min) return s def splitISODate(s): """Split an ISO date into parts like those returned by localtime() Raises a DateFormatError if the date couldn't be split. Returns the list of parts. """ m = ISO_DATE_PARTS.match(s) if not m: raise DateFormatError("Not a partial ISO date: %s" % s) parts = list(m.groups()) # Fill in missing parts with 'zero' for i, part in enumerate(parts[:-1]): if part is None: parts[i] = ISO_DATE_ZEROES[i] return parts def completeISO(s, is_gmt=False, set_gmt=False): """Make a partial ISO8601 date into a full ISO8601 date. If 'gmt' is True, assume timezone is GMT when not given. Otherwise, assume localtime. """ parts = splitISODate(s) # add timezone iso_str = None if parts[7] is None: if is_gmt and set_gmt: tz = 'Z' elif not is_gmt and not set_gmt: tz = getLocaltimeISO(parts) else: # adjust time GMT to localtime or localtime to GMT # easiest at this point to just format string from time p = map(int, parts[:-1] + [0, -1]) if set_gmt: t = time.mktime(p) iso_str = utcFormatISO(t) else: t = calendar.timegm(p) iso_str = localtimeFormatISO(t) else: # explicit timezone overrides tz = parts[7] if iso_str is None: iso_str = '-'.join(parts[:3]) + 'T' + ':'.join(parts[3:6]) + '.' + parts[6] + tz return iso_str def parseISO(s): """Parse ISO8601 (string) date into a floating point seconds since epoch UTC. The string must be an ISO8601 date of the form YYYY-MM-DDTHH:MM:SS[.fff...](Z|[+-]dd:dd) If something doesn't parse, a DateFormatError will be raised. The return value is floating point seconds since the UNIX epoch (January 1, 1970 at midnight UTC). """ # if it's too short if len(s) < 7: raise DateFormatError("Date '%s' is too short" % s) # UTC timezone? if s[-1] == 'Z': tz_offs, tz_len = 0, 1 # explicit +/-nn:nn timezone? elif s[-3] == ':': tz_offs = int(s[-6] + '1') * (int(s[-5:-3])*3600 + int(s[-2:])*60) tz_len = 6 # otherwise else: raise DateFormatError("Date '%s' is missing timezone" % s) # split into components cal, clock = s.split('T') year, month, day = cal.split('-') hr, minute, sec = clock[:-tz_len].split(':') # handle fractional seconds frac = 0 point = sec.find('.') if point != -1: frac = float(sec[point+1:]) / pow(10, len(sec) - point - 1) sec = sec[:point] # use calendar to get seconds since epoch args = map(int, (year, month, day, hr, minute, sec)) + [0,1,-1] return calendar.timegm(args) + frac - tz_offs # adjust to GMT def makeISO(value, is_gmt=False, set_gmt=False): """If value is a tuple, assume it is the one returned by time.gmtime() or time.localtime() Otherwise, assume value is an English language description (for partial ISO strings, use completeISO() instead). Return an ISO8601 string, with timezone set to GMT or localtime. """ tz_str = 'Z' # assume GMT if isinstance(value,tuple) or isinstance(value,list): fmt = ("%04d","-%02d","-%02d","T%02d",":%02d", ":%02d") s = ''.join([f % v for f,v in zip(fmt,value)]) if not gmt: tz_str = getLocaltimeISO(value) iso = s + tz_str else: try: d = magicdate.magicdate(value) except Exception, E: raise ValueError("magicdate cannot parse '%s'" % value) partial_iso = d.isoformat() iso = completeISO(partial_iso, is_gmt=is_gmt, set_gmt=set_gmt) return iso def guess(s, parse=True, is_gmt=False, set_gmt=False, try_iso=True, try_num=True, try_en=True): """Guess the format, and optionally parse, the input string. If 'is_gmt' is True, assume timezone is GMT when not given. Otherwise, assume localtime. If 'set_gmt' is True then set the timezone to GMT, otherwise set it to localtime. The answer is a pair containing the guessed format and, if the 'parse' flag was given, the parsed value as seconds since the epoch, otherwise None. The format is a constant defined in this module: UNKNOWN - Cannot guess the format (associated value is None) ISO8601 - This is a prefix of the ISO8601 format accepted by completeISO() ENGLISH - This is an natural English-language format accepted by makeISO() SECONDS - This is seconds since the UNIX epoch (Midnight on 1970/1/1). """ if not(s): return UNKNOWN, None sec = None s = s.strip() # try ISO8601 if try_iso: m = ISO_DATE_PARTS.match(s) if m and m.start() == 0 and m.end() == len(s): if parse: if s[-1] == 'Z': # explicit timezone overrides option is_gmt = True iso_s = completeISO(s, is_gmt=is_gmt, set_gmt=set_gmt) sec = parseISO(iso_s) return ISO8601, sec # try number if try_num: m = NUMBER_DATE.match(s) if m and m.start() == 0 and m.end() == len(s): if parse: sec = float(s) return SECONDS, sec # try natural language if try_en: try: d = magicdate.magicdate(s) except Exception, E: d = None if d is not None: if parse: partial_iso = d.isoformat() iso = completeISO(partial_iso, is_gmt=False, set_gmt=set_gmt) sec = parseISO(iso) return ENGLISH, sec # default: unknown return UNKNOWN, None def utcFormatISO(sec): """Format 'sec' seconds since the epoch as a UTC ISO8601 date, in the UTC (GMT) timezone. Use the format: "YYYY-MM-DDThh:mm:ss.fffffffZ" """ tm = time.gmtime(sec) usec = int((sec - int(sec)) * 1e6) iso_date = "%s.%06dZ" % (DATE_FMT % tm[0:6], usec) return iso_date def localtimeFormatISO(sec): """Format 'sec' seconds since the epoch as a ISO8601 date, in the local timezone. Use the format: "YYYY-MM-DDThh:mm:ss.fffffff[+-]HH:MM" """ tm = time.localtime(sec) usec = int((sec - int(sec)) * 1000000) hr, min, sign = getLocaltimeOffsetParts(sec) iso_date = "%s.%06d%s%02d:%02d" % (DATE_FMT % tm[0:6], usec, ('-','+')[sign==1], hr, min) return iso_date # Syslog-style dates (always in localtime) SYSLOG_DATE_RE = re.compile("\s*(...)\s+(...)\s+(\d\d?) " + "(\d\d):(\d\d):(\d\d)\s+(\d\d\d\d)\s*") MONTHS = {'Jan':1, 'Feb':2, 'Mar':3, 'Apr':4, 'May':5, 'Jun': 6, 'Jul':7, 'Aug':8, 'Sep':9, 'Oct':10, 'Nov':11, 'Dec':12} def parseSyslogDate(date): """Parse syslog-style date to seconds UTC. Expected format: Fri Oct 24 04:18:36 2008 """ m = SYSLOG_DATE_RE.match(date) if m is None: raise ValueError("bad syslog date '%s'" % date) g = m.groups() month = MONTHS[g[1]] day, hh, mm, ss, year = map(int, g[2:]) sec = time.mktime((year, month, day, hh, mm, ss, 0, 0, -1)) return sec pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/magicdate.py0000644000175000017500000002266111757531137024057 0ustar ryngeryngeimport re import datetime import calendar from optparse import Option, OptionValueError from copy import copy def check_magicdate(option, opt, value): try: return magicdate(value) except: raise OptionValueError( "option %s: invalid date value: %r" % (opt, value)) class MagicDateOption(Option): TYPES = Option.TYPES + ("magicdate",) TYPE_CHECKER = copy(Option.TYPE_CHECKER) TYPE_CHECKER["magicdate"] = check_magicdate res = [ # x time ago (re.compile( r'''^ ((?P\d+) \s weeks?)? [^\d]* ((?P\d+) \s days?)? [^\d]* ((?P\d+) \s hours?)? [^\d]* ((?P\d+) \s minutes?)? [^\d]* ((?P\d+) \s seconds?)? \s ago ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.datetime.today() - datetime.timedelta( days=int(m.group('days') or 0), seconds=int(m.group('seconds') or 0), minutes=int(m.group('minutes') or 0), hours=int(m.group('hours') or 0), weeks=int(m.group('weeks') or 0))), # Today (re.compile( r'''^ tod # Today ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date.today()), # Now (special case, returns datetime.datetime (re.compile( r'''^ now # Now ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.datetime.now()), # Tomorrow (re.compile( r'''^ tom # Tomorrow ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date.today() + datetime.timedelta(days=1)), # Yesterday (re.compile( r'''^ yes # Yesterday ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date.today() - datetime.timedelta(days=1)), # 4th (re.compile( r'''^ (?P\d{1,2}) # 4 (?:st|nd|rd|th)? # optional suffix $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date.today().replace( day=int(m.group('day')))), # 4th Jan (re.compile( r'''^ (?P\d{1,2}) # 4 (?:st|nd|rd|th)? # optional suffix \s+ # whitespace (?P\w+) # Jan $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date.today().replace( day=int(m.group('day')), month=_parseMonth(m.group('month')))), # 4th Jan 2003 (re.compile( r'''^ (?P\d{1,2}) # 4 (?:st|nd|rd|th)? # optional suffix \s+ # whitespace (?P\w+) # Jan ,? # optional comma \s+ # whitespace (?P\d{4}) # 2003 $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date( year=int(m.group('year')), month=_parseMonth(m.group('month')), day=int(m.group('day')))), # Jan 4th (re.compile( r'''^ (?P\w+) # Jan \s+ # whitespace (?P\d{1,2}) # 4 (?:st|nd|rd|th)? # optional suffix $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date.today().replace( day=int(m.group('day')), month=_parseMonth(m.group('month')))), # Jan 4th 2003 (re.compile( r'''^ (?P\w+) # Jan \s+ # whitespace (?P\d{1,2}) # 4 (?:st|nd|rd|th)? # optional suffix ,? # optional comma \s+ # whitespace (?P\d{4}) # 2003 $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date( year=int(m.group('year')), month=_parseMonth(m.group('month')), day=int(m.group('day')))), # mm/dd/yyyy (American style, default in case of doubt) (re.compile( r'''^ (?P0?[1-9]|10|11|12) # m or mm / # (?P0?[1-9]|[12]\d|30|31) # d or dd / # (?P\d{4}) # yyyy $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date( year=int(m.group('year')), month=int(m.group('month')), day=int(m.group('day')))), # dd/mm/yyyy (European style) (re.compile( r'''^ (?P0?[1-9]|[12]\d|30|31) # d or dd / # (?P0?[1-9]|10|11|12) # m or mm / # (?P\d{4}) # yyyy $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date( year=int(m.group('year')), month=int(m.group('month')), day=int(m.group('day')))), # yyyy-mm-dd (ISO style) (re.compile( r'''^ (?P\d{4}) # yyyy - # (?P0?[1-9]|10|11|12) # m or mm - # (?P0?[1-9]|[12]\d|30|31) # d or dd $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date( year=int(m.group('year')), month=int(m.group('month')), day=int(m.group('day')))), # yyyymmdd (re.compile( r'''^ (?P\d{4}) # yyyy (?P0?[1-9]|10|11|12) # m or mm (?P0?[1-9]|[12]\d|30|31) # d or dd $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: datetime.date( year=int(m.group('year')), month=int(m.group('month')), day=int(m.group('day')))), # next Tuesday (re.compile( r'''^ next # next \s+ # whitespace (?P\w+) # Tuesday $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: _nextWeekday(_parseWeekday(m.group('weekday')))), # last Tuesday (re.compile( r'''^ (last # last \s+)? # whitespace (?P\w+) # Tuesday $ # EOL ''', (re.VERBOSE | re.IGNORECASE)), lambda m: _lastWeekday(_parseWeekday(m.group('weekday')))), ] def _parseMonth(input): months = "January February March April May June July August September October November December".split(' ') for i, month in enumerate(months): p = re.compile(input, re.IGNORECASE) if p.match(month): return i+1 else: raise Exception def _parseWeekday(input): days = "Monday Tuesday Wednesday Thursday Friday Saturday Sunday".split(' ') for i, day in enumerate(days): p = re.compile(input, re.IGNORECASE) if p.match(day): return i else: raise Exception def _nextWeekday(weekday): day = datetime.date.today() + datetime.timedelta(days=1) while calendar.weekday(*day.timetuple()[:3]) != weekday: day = day + datetime.timedelta(days=1) return day def _lastWeekday(weekday): day = datetime.date.today() - datetime.timedelta(days=1) while calendar.weekday(*day.timetuple()[:3]) != weekday: day = day - datetime.timedelta(days=1) return day def magicdate(input): for r, f in res: m = r.match(input.strip()) if m: return f(m) pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/nllog.py0000644000175000017500000005731111757531137023254 0ustar ryngerynge""" NetLogger interactions with the Python logging module. """ __rcsid__ = "$Id: logutil.py 772 2008-05-23 22:59:22Z dang $" import logging import logging.handlers import optparse import os import sys import time import traceback import types # from netlogger import nlapi from netlogger.nlapi import Level from netlogger.version import * # extra logging levels TRACE = logging.DEBUG -1 # Top-level qualified name for netlogger PROJECT_NAMESPACE = "netlogger" # Global holder of the "correct" NetLogger class # to use when instantiating new loggers _logger_class = None def setLoggerClass(clazz): """Set the class used by NetLogger logging """ global _logger_class _logger_class = clazz # consistent with new naming style set_logger_class = setLoggerClass def get_logger(filename): """ Return a NetLogger logger with qualified name based on the provied filename. This method is indended to be called by scripts and modules by passing in their own __file__ as filename after already having initialized the logging module via the NL OptionParser or some equivalent action. If the logger name starts with a '.', it will be taken as-is, with the leading '.' stripped. Otherwise, the logger will be rooted at PROJECT_NAMESPACE. Parameters: filename - The full filename of the NL script or module requesting a logger, i.e. __file__ """ if filename == "": qname = "" elif filename[0] == '.': qname = filename else: qname = '.'.join(_modlist(filename)) return _logger(qname) def get_root_logger(): """Return root for all NetLogger loggers. """ return _logger('')# logging.getLogger(PROJECT_NAMESPACE) def _logger(qualname): """ Return a logger based on the provided qualified name Prepend PROJECT_NAMESPACE if not already there, unless qualified name starts with a '.'. """ # Mess with qualified name if not qualname: qualname = PROJECT_NAMESPACE elif qualname[0] == '.': qualname = qualname[1:] elif not qualname.startswith(PROJECT_NAMESPACE): qualname = PROJECT_NAMESPACE + '.' + qualname # Swap in "my" logger class, create logger, swap back out orig_class = logging.getLoggerClass() logging.setLoggerClass(_logger_class) logger = logging.getLogger(qualname) logging.setLoggerClass(orig_class) # Return "my" new logger instance return logger def _modlist(path): """ Return a list of module names based on the path provided. The expected path list will be rooted at either "netlogger" or "scripts" so won't contain either as one of the module names. Any tailing python extension is also trimmed. """ if path == '/': return [] head, tail = os.path.split(path) # ignore python extensions if tail.endswith(".py") or tail.endswith(".pyc"): tail = os.path.splitext(tail)[0] # stop if at top of source tree if tail in ('netlogger', 'scripts'): return [] # stop if at root of path if head == '' or head == '.': return [tail] # otherwise continue return _modlist(head) + [tail] class DoesLogging: """Mix-in class that creates the attribute 'log', setting its qualified name to the name of the module and class. """ def __init__(self, name=None): if name is None: if self.__module__ != '__main__': name = "%s.%s" % (self.__module__, self.__class__.__name__) else: name = self.__class__.__name__ self.log = _logger(name) # cache whether log is debug or higher in a flag to # lower overhead of debugging statements self._dbg = self.log.isEnabledFor(logging.DEBUG) self._trace = self.log.isEnabledFor(TRACE) class BPLogger(logging.Logger): """Logger class that writes Best-Practices formatted logs. Usage: The arguments are not exactly the same as for the Logger in the logging module. Instead they consist of an event name and keywords that are name=value pairs for the event contents. An exception to this is the exc() or exception() method, which takes an Exception instance as its second argument in addition to the event name. Example: log.info("my.event", value=88.7, units="seconds") # output # ts=2009-07-24T20:18:04.775650Z event=netlogger.my.event level=INFO units=seconds value=88.700000 """ def __init__(self, qualname): self._qualname = qualname self._format = nlapi.Log(newline=False, level=nlapi.Level.ALL) logging.Logger.__init__(self, qualname) def set_meta(self, **kw): """Set metadata to be logged with every event, e.g. an identifier or host name. """ self._format.setMeta(None, **kw) def log(self, level, nl_level, event, exc_info=None, **kwargs): ts = time.time() if self._qualname: event = self._qualname + '.' + event # replace '__' with '.' kwargs = dict([(key.replace('__', '.'), value) for key, value in kwargs.iteritems()]) # format as BP msg = self._format(event, ts, nl_level, **kwargs) logging.Logger.log(self, level, msg, exc_info=exc_info) def trace(self, event, **kwargs): if self.isEnabledFor(TRACE): self.log(TRACE, Level.TRACE, event, **kwargs) def debug(self, event, **kwargs): if self.isEnabledFor(logging.DEBUG): self.log(logging.DEBUG, Level.DEBUG, event, **kwargs) def info(self, event, **kwargs): self.log(logging.INFO, Level.INFO, event, **kwargs) def warning(self, event, **kwargs): self.log(logging.WARN, Level.WARN, event, **kwargs) warn = warning def error(self, event, **kwargs): self.log(logging.ERROR, Level.ERROR, event, **kwargs) def critical(self, event, **kwargs): self.log(logging.CRITICAL, Level.FATAL, event, **kwargs) def exception(self, event, err, **kwargs): estr = traceback.format_exc() estr = ' | '.join(map(lambda e: e.strip(), estr.split('\n'))) self.log(logging.ERROR, Level.ERROR, event, msg=str(err), status=-1, traceback=estr, **kwargs) exc = exception class BPSysLogger(BPLogger): """This is a hack that prepends a header to the output of BPLogger in order to work-around some bug with the Python SysLogHandler and Ubuntu rsylog that otherwise splits out the first section of the timestamp as part of the header. """ header = "netlogger" # header prefix def __init__(self, qualname): BPLogger.__init__(self, qualname) self._orig_format = self._format self._format = self.syslog_format self._hdr = self.header + ": " def set_meta(self, **kw): """See set_meta() in superclass. Repeated here because superclass method accesses a protected attribute that was modified in the constructor. """ self._orig_format.setMeta(None, **kw) def flush(self): self._orig_format.flush() def syslog_format(self, *arg, **kw): return self._hdr + self._orig_format(*arg, **kw) ############################################### ## Set BPLogger as default logging class ############################################### setLoggerClass(BPLogger) class PrettyBPLogger(BPLogger): """Logger class that writes 'pretty' Best-Practices formatted logs. This is a variation on BP format. Stack traces logged with the method exc() or exception() will be in their original form. Usage: See Usage notes for BPLogger. Example: log.info("my.event", value=88.7, units="seconds") # output # 2009-07-24T20:18:04.716913Z INFO netlogger.my.event - units=seconds,value=88.7 """ def __init__(self, qualname): BPLogger.__init__(self, qualname) self._format = nlapi.Log(newline=False, level=nlapi.Level.ALL, pretty=True) def exception(self, event, err, **kwargs): tbstr = traceback.format_exc() self.log(logging.ERROR, Level.ERROR, event, traceback=tbstr, **kwargs) exc = exception class RawBPLogger(logging.Logger): """Logger class that does not modify the message, just leaves it as a 'raw' dictionary. This is useful for network communication that is just going to pickle the event anyways. """ def log(self, level, nl_level, event, exc_info=None, **kwargs): ts = time.time() if self._qualname: event = self._qualname + '.' + event # replace '__' with '.' kwargs = dict([(key.replace('__', '.'), value) for key, value in kwargs.iteritems()]) # build msg dictionary msg = { 'event': event, 'ts': ts, 'level' : nl_level } msg.update(kwargs) # 'write' out logging.Logger.log(self, level, msg, exc_info=exc_info) class FakeBPLogger(logging.Logger): def __init__(self, qualname): logging.Logger.__init__(self, qualname) def set_meta(self, **kw): pass def log(self, level, nl_level, event, **kwargs): pass def trace(self, event, **kwargs): pass def debug(self, event, **kwargs): pass def info(self, event, **kwargs): pass def warning(self, event, **kwargs): pass warn = warning def error(self, event, **kwargs): pass def critical(self, event, **kwargs): pass def exception(self, event, err, **kwargs): pass exc = exception def profile(func): """ decorator for start,end event function profiling with netlogger. """ if os.getenv('NETLOGGER_ON', False) in ( 'off','0','no','false','',False): return func if type(func) is not types.FunctionType: return func if func.__module__ == '__main__': f = func.func_globals['__file__'] or 'unknown' event = '%s' %os.path.splitext(os.path.basename(f))[0] log = _logger('script') log.set_meta(file=f, pid=os.getpid(), ppid=os.getppid(), gpid=os.getgid()) else: event = '%s' %func.func_name log = _logger('%s' %func.__module__) log.set_meta(pid=os.getpid(), ppid=os.getppid(), gpid=os.getgid()) def nl_profile_func(*args, **kw): log.debug("%s.start" %event) try: v = func(*args, **kw) except: log.error("%s.end" %event) raise log.debug("%s.end" %event) return v return nl_profile_func def profile_result(func): """ decorator for start,end event function profiling with netlogger. return value is logged as result. """ if os.getenv('NETLOGGER_ON', False) in ( 'off','0','no','false','',False): return func if type(func) is not types.FunctionType: return func if func.__module__ == '__main__': f = func.func_globals['__file__'] or 'unknown' event = '%s' %os.path.splitext(os.path.basename(f))[0] log = _logger('script') log.set_meta(file=f, pid=os.getpid(), ppid=os.getppid(), gpid=os.getgid()) else: event = '%s' %func.func_name log = _logger('%s' %func.__module__) log.set_meta(pid=os.getpid(), ppid=os.getppid(), gpid=os.getgid()) def nl_profile_func(*args, **kw): log.debug("%s.start" %event) try: v = func(*args, **kw) except: log.error("%s.end" %event) raise log.debug("%s.end" %event, result=v) return v return nl_profile_func class Profiler(type): """ metaclass that will wrap all user defined methods with start and end event logs. Currently wrapping only instancemethod type. Variables: profiler_skip_methods: list of methods profiler will skip profile_skip_all: profiler will not wrap any methods _log: Logging object to use with class Usage: class MyClass: __metaclass__ = Profiler profiler_skip_methods = ['__init__', 'getsecret'] profiler_skip_all = False """ profiler_skip_methods = ['__init__'] profiler_skip_all = False @staticmethod def __profile_method(func): """ decorator for start,end event method profiling with netlogger skips any classmethod or staticmethod types. """ if type(func) is not types.FunctionType: return func event = '%s' %func.func_name def nl_profile_method(self, *args, **kw): self._log.debug("%s.start" %event) try: v = func(self, *args, **kw) except: self._log.error("%s.end" %event) raise self._log.debug("%s.end" %event) return v return nl_profile_method def __new__(cls, classname, bases, classdict): if os.getenv('NETLOGGER_ON', False) in ( 'off','0','no','false','',False): setLoggerClass(FakeBPLogger) classdict['_log'] = _logger('%s.%s' % ( classdict['__module__'],classname)) return type.__new__(cls,classname,bases,classdict) classdict['_log'] = log = _logger( '%s.%s' %(classdict['__module__'],classname)) log.set_meta(pid=os.getpid(), ppid=os.getppid(), gpid=os.getgid()) keys = [] if not classdict.get('profiler_skip_all',cls.profiler_skip_all): keys = filter(lambda k: type(classdict[k]) is types.FunctionType and k not in classdict.get('profiler_skip_methods',cls.profiler_skip_methods), classdict.keys()) for k in keys: classdict[k] = cls.__profile_method(classdict[k]) return type.__new__(cls,classname,bases,classdict) class MethodProfiler(Profiler): """ metaclass that will wrap all user defined methods with start and end event logs. Currently wrapping only instancemethod type. """ profiler_skip_all = False class BasicProfiler(Profiler): """ metaclass does not wrap methods with 'start' and 'end' tags, to do that use 'Profiler'. Useful for classes where one only wants to do 'precision' logging. """ profiler_skip_all = True class OptionParser(optparse.OptionParser): """Set logging 'tude for scripts. Usage: parser = NLOptionParser(..., can_be_daemon=True/False) # add rest of options to 'parser'... # This next line sets up logging as a side-effect parser.parse_args() # rest of program .. ******************************************************* | Pseudo-code description of logic to determine which | | types of logs to produce, and where to send them | ******************************************************* Variables: D - daemon mode [True | False] L - log file [Missing | Empty | filename] Logic: if (D) then case (L = Missing) error! case (L = Empty) error! case (L = filename) stderr -> filename BP logs -> filename else case (L = Missing) stderr -> stderr Pretty logs -> stderr case (L = Empty) stderr -> stderr BP logs -> stderr case (L = filename) stderr -> stderr BP logs -> filename ******************************************************* | Pseudo-code description of logic for verbosity | ******************************************************* Variables: V - verbosity [0 .. N] Q - quiet [True | False] Logic: if (Q) then case (V > 0) error! else set verbosity -> OFF else case V = 0 set verbosity -> WARN case V = 1 set verbosity -> INFO case V = 2 set verbosity -> DEBUG case V >= 3 set verbosity -> TRACE """ # Attribute (option parser 'dest') names DEST_LOG = "log_file" DEST_VERBOSE = "verbose" DEST_QUIET = "quiet" DEST_DAEMON = "daemon" DEST_ROT = "log_rotate" # Option names, by attribute OPTIONS = { DEST_LOG : ('-L', '--log'), DEST_VERBOSE : ('-v', '--verbose'), DEST_QUIET : ('-q', '--quiet'), DEST_DAEMON : (None, '--daemon'), DEST_ROT : ('-R', '--logrotate'), } # Verbosity (number of -v's) to logging level VBMAP = (logging.WARN, logging.INFO, logging.DEBUG, TRACE) def __init__(self, can_be_daemon=False, **kwargs): """Add logging-related command-line options to an option parser. Parameters: can_be_daemon - if True, add an option for daemonizing kwargs - additional keywords for OptionParser. The 'version' argument will override the default version """ if not kwargs.has_key('version'): version_str = "%%prog, NetLogger Toolkit version: %s\n %s" % ( NL_VERSION, NL_CREATE_DATE) version_str += "\n\n" + NL_COPYRIGHT kwargs['version'] = version_str optparse.OptionParser.__init__(self, **kwargs) self._dmn = can_be_daemon def _add_options(self): group = optparse.OptionGroup(self, "Logging options") if self._dmn: self.add_option(self.OPTIONS[self.DEST_DAEMON][1], action='store_true', dest=self.DEST_DAEMON, default=False, help="run in daemon mode") logfile_default = "required" else: logfile_default = "default=stderr" group.add_option(self.OPTIONS[self.DEST_LOG][0], self.OPTIONS[self.DEST_LOG][1], default=None, action='store', dest=self.DEST_LOG, metavar='FILE', help="write logs to FILE (%s)" % logfile_default) group.add_option(self.OPTIONS[self.DEST_ROT][0], self.OPTIONS[self.DEST_ROT][1], default=None, action='store', dest=self.DEST_ROT, metavar='TIME', help="rotate logs at an interval (d or h or m)") group.add_option(self.OPTIONS[self.DEST_VERBOSE][0], self.OPTIONS[self.DEST_VERBOSE][1], action="count", default=0, dest=self.DEST_VERBOSE, help="more verbose logging") group.add_option(self.OPTIONS[self.DEST_QUIET][0], self.OPTIONS[self.DEST_QUIET][1], action="store_true", default=False, dest=self.DEST_QUIET, help="quiet mode, no logging") self.add_option_group(group) def check_required (self, opt): """Simplify checks for required values. The default value for a required option must be None. The easiest way to achieve this is not to provide a default. Call error() if the required option is not present. """ option = self.get_option(opt) # Assumes the option's 'default' is set to None! if getattr(self.values, option.dest) is None: self.error("%s option not supplied" % option) def parse_args(self, args=None): """Process command-line options. Parameters: args - same as OptionParser.parse_args Return: True if all went well, False if not Post-conditions: If the return was True, logging levels and handlers are properly set for qualified name 'netlogger'. Otherwise, an error will be reported via the 'error' method of the parser passed to the constructor. """ if args is None: args = sys.argv[1:] self._add_options() options, pargs = optparse.OptionParser.parse_args(self, args) # Where and in what format to write logs if self._dmn: is_daemon = getattr(options, self.DEST_DAEMON) else: is_daemon = False logfile = getattr(options, self.DEST_LOG, None) logrot = getattr(options, self.DEST_ROT, None) if ((not logfile) or logfile == '-') and logrot: self.error("Log rotation requires a logfile") if logrot: if len(logrot) < 1: self.error("Bad log rotation interval, too short") tm_unit = logrot[-1].lower() if tm_unit not in ('h', 'm', 'd'): self.error("Bad log rotation unit '%s' " "not in m,h,d" % tm_unit) try: tm_interval = int(logrot[:-1]) except ValueError: self.error("Log rotation value '%s' must be an integer" % logrot[:-1]) do_logrot = True _tfrh = logging.handlers.TimedRotatingFileHandler else: do_logrot = False log = logging.getLogger(PROJECT_NAMESPACE) # Set handler and logger class handler = None if is_daemon: if logfile is None or logfile == '' or logfile == '-': # missing/empty self.error("log file is required in daemon mode") return # defensive else: # stderr and BP logs -> logfile setLoggerClass(BPLogger) logfile = logfile.strip() try: if do_logrot: handler = _tfrh(logfile, when=tm_unit, interval=tm_interval) else: handler = logging.FileHandler(logfile) except IOError,err: self.error("Cannot open log file '%s': %s" % (logfile, err)) sys.stderr = handler.stream handler.setFormatter(logging.Formatter("%(message)s")) else: if logfile is None or logfile == '': # missing # Pretty-BP logs -> stderr setLoggerClass(PrettyBPLogger) handler = logging.StreamHandler() elif logfile.strip() == '-': # empty # BP logs -> stderr setLoggerClass(BPLogger) handler = logging.StreamHandler() else: # BP logs -> logfile logfile = logfile.strip() setLoggerClass(BPLogger) try: if do_logrot: handler = _tfrh(logfile, when=tm_unit, interval=tm_interval) else: handler = logging.FileHandler(logfile) except IOError,err: self.error("Cannot open log file '%s': %s" % (logfile, err)) handler.setFormatter(logging.Formatter("%(message)s")) if handler: log.addHandler(handler) # Verbosity level quiet = getattr(options, self.DEST_QUIET, False) #delattr(options, self.DEST_QUIET) vb = getattr(options, self.DEST_VERBOSE, 0) #delattr(options, self.DEST_VERBOSE) if quiet and (vb > 0): self.error("quiet and verbosity options conflict") return # defensive if quiet: log.setLevel(logging.CRITICAL + 1) else: log.setLevel(self.VBMAP[min(vb, len(self.VBMAP) - 1)]) # Return remaining options and args to caller return options, pargs pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/0000755000175000017500000000000011757531667023413 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/schema/0000755000175000017500000000000011757531667024653 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/schema/stampede_schema.py0000644000175000017500000004306611757531137030350 0ustar ryngerynge""" Contains the code to create and map objects to the Stampede DB schema via a SQLAlchemy interface. """ __rcsid__ = "$Id: stampede_schema.py 30421 2012-02-21 21:20:02Z mgoode $" __author__ = "Monte Goode MMGoode@lbl.gov" from netlogger.analysis.schema._base import SABase, SchemaIntegrityError try: from sqlalchemy import * from sqlalchemy import orm, exceptions, func, exc from sqlalchemy.orm import relation, backref except ImportError, e: print '** SQLAlchemy library needs to be installed: http://www.sqlalchemy.org/ **\n' raise ImportError, e import time import warnings CURRENT_SCHEMA_VERSION = 4.0 # Empty classes that will be populated and mapped # to tables via the SQLAlch mapper. class Host(SABase): pass class Workflow(SABase): pass class Workflowstate(SABase): pass class Job(SABase): pass class JobEdge(SABase): pass class JobInstance(SABase): pass class Jobstate(SABase): pass class Task(SABase): pass class TaskEdge(SABase): pass class Invocation(SABase): pass class File(SABase): pass class SchemaInfo(SABase): pass def initializeToPegasusDB(db, metadata, kw={}): """ Function to create the Stampede schema if it does not exist, if it does exist, then connect and set up object mappings. @type db: SQLAlch db/engine object. @param db: Engine object to initialize. @type metadata: SQLAlch metadata object. @param metadata: Associated metadata object to initialize. @type kw: dict @param kw: Keywords to pass to Table() functions """ KeyInt = Integer # MySQL likes using BIGINT for PKs but some other # DB don't like it so swap as needed. if db.name == 'mysql': KeyInt = BigInteger kw['mysql_charset'] = 'latin1' if db.name == 'sqlite': warnings.filterwarnings('ignore', '.*does \*not\* support Decimal*.') # st_workflow definition # ==> Information comes from braindump.txt file # wf_uuid = autogenerated by database # dax_label = label # timestamp = pegasus_wf_time # submit_hostname = (currently missing) # submit_dir = run # planner_arguments = (currently missing) # user = (currently missing) # grid_dn = (currently missing) # planner version = pegasus version # parent_workflow_id = wf_id of parent workflow st_workflow = Table('workflow', metadata, Column('wf_id', KeyInt, primary_key=True, nullable=False), Column('wf_uuid', VARCHAR(255), nullable=False), Column('dag_file_name', VARCHAR(255), nullable=True), Column('timestamp', NUMERIC(precision=16,scale=6), nullable=True), Column('submit_hostname', VARCHAR(255), nullable=True), Column('submit_dir', TEXT, nullable=True), Column('planner_arguments', TEXT, nullable=True), Column('user', VARCHAR(255), nullable=True), Column('grid_dn', VARCHAR(255), nullable=True), Column('planner_version', VARCHAR(255), nullable=True), Column('dax_label', VARCHAR(255), nullable=True), Column('dax_version', VARCHAR(255), nullable=True), Column('dax_file', VARCHAR(255), nullable=True), Column('parent_wf_id', KeyInt, ForeignKey("workflow.wf_id"), nullable=True), # not marked as FK to not screw up the cascade. Column('root_wf_id', KeyInt, nullable=True), **kw ) Index('wf_id_KEY', st_workflow.c.wf_id, unique=True) Index('wf_uuid_UNIQUE', st_workflow.c.wf_uuid, unique=True) try: wf_props = { 'child_wf':relation(Workflow, cascade='all'), 'child_wfs':relation(Workflowstate, backref='st_workflow', cascade='all'), 'child_host':relation(Host, backref='st_workflow', cascade='all'), 'child_task':relation(Task, backref='st_workflow', cascade='all'), 'child_job':relation(Job, backref='st_workflow', cascade='all'), 'child_invocation':relation(Invocation, backref='st_workflow', cascade='all'), 'child_task_e':relation(TaskEdge, backref='st_workflow', cascade='all'), 'child_job_e':relation(JobEdge, backref='st_workflow', cascade='all'), } orm.mapper(Workflow, st_workflow, properties = wf_props) except exc.ArgumentError: pass st_workflowstate = Table('workflowstate', metadata, # All three columns are marked as primary key to produce the desired # effect - ie: it is the combo of the three columns that make a row # unique. Column('wf_id', KeyInt, ForeignKey('workflow.wf_id'), nullable=False, primary_key=True), Column('state', Enum('WORKFLOW_STARTED', 'WORKFLOW_TERMINATED'), nullable=False, primary_key=True), Column('timestamp', NUMERIC(precision=16,scale=6), nullable=False, primary_key=True, default=time.time()), Column('restart_count', INT, nullable=False), Column('status', INT, nullable=True), **kw ) Index('UNIQUE_WORKFLOWSTATE', st_workflowstate.c.wf_id, st_workflowstate.c.state, st_workflowstate.c.timestamp, unique=True) try: orm.mapper(Workflowstate, st_workflowstate) except exc.ArgumentError: pass # st_host definition # ==> Information from kickstart output file # # site_name = # hostname = # ip_address = # uname = # total_ram = st_host = Table('host', metadata, Column('host_id', KeyInt, primary_key=True, nullable=False), Column('wf_id', KeyInt, ForeignKey('workflow.wf_id'), nullable=False), Column('site', VARCHAR(255), nullable=False), Column('hostname', VARCHAR(255), nullable=False), Column('ip', VARCHAR(255), nullable=False), Column('uname', VARCHAR(255), nullable=True), Column('total_memory', KeyInt, nullable=True), **kw ) Index('UNIQUE_HOST', st_host.c.wf_id, st_host.c.site, st_host.c.hostname, st_host.c.ip, unique=True) try: orm.mapper(Host, st_host) except exc.ArgumentError: pass # static job table st_job = Table('job', metadata, Column('job_id', KeyInt, primary_key=True, nullable=False), Column('wf_id', KeyInt, ForeignKey('workflow.wf_id'), nullable=False), Column('exec_job_id', VARCHAR(255), nullable=False), Column('submit_file', VARCHAR(255), nullable=False), Column('type_desc', Enum('unknown', 'compute', 'stage-in-tx', 'stage-out-tx', 'registration', 'inter-site-tx', 'create-dir', 'staged-compute', 'cleanup', 'chmod', 'dax', 'dag'), nullable=False), Column('clustered', BOOLEAN, nullable=False), Column('max_retries', INT, nullable=False), Column('executable', TEXT, nullable=False), Column('argv', TEXT, nullable=True), Column('task_count', INT, nullable=False), **kw ) Index('job_id_KEY', st_job.c.job_id, unique=True) Index('job_type_desc_COL', st_job.c.type_desc) Index('job_exec_job_id_COL', st_job.c.exec_job_id) Index('UNIQUE_JOB', st_job.c.wf_id, st_job.c.exec_job_id, unique=True) try: orm.mapper(Job, st_job, properties = { 'child_job_instance':relation(JobInstance, backref='st_job', cascade='all', lazy=False) } ) except exc.ArgumentError: pass # static job edge table st_job_edge = Table('job_edge', metadata, Column('wf_id', KeyInt, ForeignKey('workflow.wf_id'), primary_key=True, nullable=False), Column('parent_exec_job_id', VARCHAR(255), primary_key=True, nullable=False), Column('child_exec_job_id', VARCHAR(255), primary_key=True, nullable=False), **kw ) Index('UNIQUE_JOB_EDGE', st_job_edge.c.wf_id, st_job_edge.c.parent_exec_job_id, st_job_edge.c.child_exec_job_id, unique=True) try: orm.mapper(JobEdge, st_job_edge) except exc.ArgumentError: pass # job_instance table st_job_instance = Table('job_instance', metadata, Column('job_instance_id', KeyInt, primary_key=True, nullable=False), Column('job_id', KeyInt, ForeignKey('job.job_id'), nullable=False), Column('host_id', KeyInt, ForeignKey('host.host_id', ondelete='SET NULL'), nullable=True), Column('job_submit_seq', INT, nullable=False), Column('sched_id', VARCHAR(255), nullable=True), Column('site', VARCHAR(255), nullable=True), Column('user', VARCHAR(255), nullable=True), Column('work_dir', TEXT, nullable=True), Column('cluster_start', NUMERIC(16,6), nullable=True), Column('cluster_duration', NUMERIC(10,3), nullable=True), Column('local_duration', NUMERIC(10,3), nullable=True), Column('subwf_id', KeyInt, ForeignKey('workflow.wf_id', ondelete='SET NULL'), nullable=True), Column('stdout_file', VARCHAR(255), nullable=True), Column('stdout_text', TEXT, nullable=True), Column('stderr_file', VARCHAR(255), nullable=True), Column('stderr_text', TEXT, nullable=True), Column('stdin_file', VARCHAR(255), nullable=True), Column('multiplier_factor', INT, nullable=False, default=1), Column('exitcode', INT, nullable=True), **kw ) Index('job_instance_id_KEY', st_job_instance.c.job_instance_id, unique=True) Index('UNIQUE_JOB_INSTANCE', st_job_instance.c.job_id, st_job_instance.c.job_submit_seq, unique=True) try: orm.mapper(JobInstance, st_job_instance, properties = { 'child_tsk':relation(Invocation, backref='st_job_instance', cascade='all', lazy=False), 'child_jst':relation(Jobstate, backref='st_job_instance', cascade='all', lazy=False), } ) except exc.ArgumentError: pass # st_jobstate definition # ==> Same information that currently goes into jobstate.log file, # obtained from dagman.out file # # job_id = from st_job table (autogenerated) # state = from dagman.out file (3rd column of jobstate.log file) # timestamp = from dagman,out file (1st column of jobstate.log file) st_jobstate = Table('jobstate', metadata, # All four columns are marked as primary key to produce the desired # effect - ie: it is the combo of the four columns that make a row # unique. Column('job_instance_id', KeyInt, ForeignKey('job_instance.job_instance_id'), nullable=False, primary_key=True), Column('state', VARCHAR(255), nullable=False, primary_key=True), Column('timestamp', NUMERIC(precision=16,scale=6), nullable=False, primary_key=True, default=time.time()), Column('jobstate_submit_seq', INT, nullable=False, primary_key=True), **kw ) Index('UNIQUE_JOBSTATE', st_jobstate.c.job_instance_id, st_jobstate.c.state, st_jobstate.c.timestamp, st_jobstate.c.jobstate_submit_seq, unique=True) try: orm.mapper(Jobstate, st_jobstate) except exc.ArgumentError: pass # Task table st_task = Table('task', metadata, Column('task_id', KeyInt, primary_key=True, nullable=False), Column('job_id', KeyInt, ForeignKey('job.job_id', ondelete='SET NULL'), nullable=True), Column('wf_id', KeyInt, ForeignKey('workflow.wf_id'), nullable=False), Column('abs_task_id', VARCHAR(255), nullable=False), Column('transformation', TEXT, nullable=False), Column('argv', TEXT, nullable=True), Column('type_desc', VARCHAR(255), nullable=False), **kw ) Index('task_id_KEY', st_task.c.task_id, unique=True) Index('task_abs_task_id_COL', st_task.c.abs_task_id) Index('task_wf_id_COL', st_task.c.wf_id) Index('UNIQUE_TASK', st_task.c.wf_id, st_task.c.abs_task_id, unique=True) try: orm.mapper(Task, st_task, properties = { 'child_file':relation(File, backref='st_task', cascade='all'), } ) except exc.ArgumentError: pass # Task edge table st_task_edge = Table('task_edge', metadata, Column('wf_id', KeyInt, ForeignKey('workflow.wf_id'), primary_key=True, nullable=False), Column('parent_abs_task_id', VARCHAR(255), primary_key=True, nullable=True), Column('child_abs_task_id', VARCHAR(255), primary_key=True, nullable=True), **kw ) Index('UNIQUE_TASK_EDGE', st_task_edge.c.wf_id, st_task_edge.c.parent_abs_task_id, st_task_edge.c.child_abs_task_id, unique=True) try: orm.mapper(TaskEdge, st_task_edge) except exc.ArgumentError: pass # Invocation table st_invocation = Table('invocation', metadata, Column('invocation_id', KeyInt, primary_key=True, nullable=False), Column('job_instance_id', KeyInt, ForeignKey('job_instance.job_instance_id'), nullable=False), Column('task_submit_seq', INT, nullable=False), Column('start_time', NUMERIC(16,6), nullable=False, default=time.time()), Column('remote_duration', NUMERIC(10,3), nullable=False), Column('remote_cpu_time', NUMERIC(10,3), nullable=True), Column('exitcode', INT, nullable=False), Column('transformation', TEXT, nullable=False), Column('executable', TEXT, nullable=False), Column('argv', TEXT, nullable=True), Column('abs_task_id', VARCHAR(255), nullable=True), Column('wf_id', KeyInt, ForeignKey('workflow.wf_id'), nullable=False), **kw ) Index('invocation_id_KEY', st_invocation.c.invocation_id, unique=True) Index('invoc_abs_task_id_COL', st_invocation.c.abs_task_id) Index('invoc_wf_id_COL', st_invocation.c.wf_id) Index('UNIQUE_INVOCATION', st_invocation.c.job_instance_id, st_invocation.c.task_submit_seq, unique=True) try: orm.mapper(Invocation, st_invocation) except exc.ArgumentError: pass # st_file definition # ==> Information will come from kickstart output file st_file = Table('file', metadata, Column('file_id', KeyInt, primary_key=True, nullable=False), Column('task_id', KeyInt, ForeignKey('task.task_id'), nullable=True), Column('lfn', VARCHAR(255), nullable=True), Column('estimated_size', INT, nullable=True), Column('md_checksum', VARCHAR(255), nullable=True), Column('type', VARCHAR(255), nullable=True), **kw ) Index('file_id_UNIQUE', st_file.c.file_id, unique=True) Index('FK_FILE_TASK_ID', st_task.c.task_id, unique=False) try: orm.mapper(File, st_file) except exc.ArgumentError: pass st_schema_info = Table('schema_info', metadata, Column('version_number', NUMERIC(2,1), primary_key=True, nullable=False), Column('version_timestamp', NUMERIC(16,6), primary_key=True, nullable=False, default=time.time()) ) try: orm.mapper(SchemaInfo, st_schema_info) except exc.ArgumentError: pass pass metadata.create_all(db) pass def main(): """ Example of how to creat SQLAlch object and initialize/create to Stampede DB schema. """ db = create_engine('sqlite:///pegasusTest.db', echo=True) metadata = MetaData() initializeToPegasusDB(db, metadata) metadata.bind = db sm = orm.sessionmaker(bind=db, autoflush=True, autocommit=False, expire_on_commit=True) session = orm.scoped_session(sm) pass if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/schema/_base.py0000644000175000017500000000400311757531137026263 0ustar ryngerynge""" Base for schema modules """ from sqlalchemy import exceptions class SchemaException(Exception): """Common base. """ pass class SchemaIntegrityError(SchemaException): """ Raise this when an insert/update attempst to violate the schema - ie: violating a unique index or a column definition (NOT/NULL, etc). This is just a "wrapper" to unify handing multiple exceptions (ie: ones that violate defined schema) but being able to trap them all the same way in the calling code. """ def __init__(self, value): self.value = 'SchemaIntegrityError: %s' % value def __str__(self): return repr(self.value) class SABase(object): """ Base class for all the DB mapper objects. """ def _commit(self, session, batch, merge=False): if merge: session.merge(self) else: session.add(self) if batch: return session.flush() session.commit() def commit_to_db(self, session, batch=False): """ Commit the DB object/row to the database. @type session: sqlalchemy.orm.scoping.ScopedSession object @param session: SQLAlch session to commit row to. """ self._commit(session, batch) def merge_to_db(self, session, batch=False): """ Merge the DB object/row with an existing row in the database. @type session: sqlalchemy.orm.scoping.ScopedSession object @param session: SQLAlch session to merge row with. Using this method pre-supposes that the developer has already assigned any primary key information to the object before calling. """ self._commit(session, batch, merge=True) def __repr__(self): retval = '%s:\n' % self.__class__ for k,v in self.__dict__.items(): if k == '_sa_instance_state': continue retval += ' * %s : %s\n' % (k,v) return retval pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/schema/schema_check.py0000644000175000017500000002356311757531137027623 0ustar ryngerynge""" Code to handle various aspects of transitioning to a new verson of the Stampede schema. """ __rcsid__ = "$Id: schema_check.py 30802 2012-03-07 17:01:34Z mgoode $" __author__ = "Monte Goode" import exceptions from netlogger.analysis.modules._base import SQLAlchemyInit, dsn_dialect from netlogger.analysis.schema.stampede_schema import * from netlogger.nllog import DoesLogging class SchemaVersionError(Exception): """ Custom exception. Will be raised in the loader/etc if the schema is out of date so the calling program can catch and handle it. """ pass class ConnHandle(SQLAlchemyInit, DoesLogging): """ Stand-alone connection class that returns a SQLAlchemy session. """ def __init__(self, connString=None, mysql_engine=None, **kw): DoesLogging.__init__(self) if connString is None: raise ValueError("connString is required") _kw = { } dialect = dsn_dialect(connString) _kw[dialect] = { } if dialect == 'mysql': if mysql_engine is not None: _kw[dialect]['mysql_engine'] = mysql_engine try: SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB, **_kw) except exceptions.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise RuntimeError pass def get_session(self): return self.session class ErrorStrings: """ Parses SQLAlchemy OperationalErrors to generate error strings. Currently just handles case of when a user with limited permissions might hit a wall when running 4.0 code on an existing 3.1 DB. """ # Actions create_failure = 'CREATE command denied to user' # Tables schema_info = 'schema_info' @staticmethod def get_init_error(e): action_error = table_error = None try: action_error = e.args[0].split("'")[0].split('"')[1].strip() table_error = e.args[0].split("'")[-2] except IndexError: # specific parse didn't work, so pass the original # exception through. pass er = '' if action_error == ErrorStrings.create_failure and \ table_error == ErrorStrings.schema_info: er = 'The schema_info table does not exist: ' er += 'user does not have CREATE TABLE permissions - ' er += 'database schema needs to be upgraded to version %s, ' % CURRENT_SCHEMA_VERSION er += 'database admin will need to run upgrade tool' return er else: er = 'Error raised during database init: %s' % e.args[0] return er class SchemaCheck(DoesLogging): """ This handles checking the schema, setting the proper version_number if not already set, returning the current version for things like the API and methods to manually upgrade an older existing DB to the new schema. The check_schema() method should be called by any code that creates/initializes a database (like the loader) so it can scan schema and set the correct version number. """ def __init__(self, session): DoesLogging.__init__(self) self.session = session self.log.info('init') self._table_map = {} pass def _get_current_version(self): q = self.session.query(cast(func.max(SchemaInfo.version_number), Float)) if not q.one()[0]: return q.one()[0] else: return round(q.one()[0],1) def _version_check(self, version_number): self.log.info('check_schema', msg='Current version set to: %s' % version_number) if float(version_number) == CURRENT_SCHEMA_VERSION: self.log.info('check_schema', msg='Schema up to date') return True elif float(version_number) < CURRENT_SCHEMA_VERSION: self.log.error('check_schema', \ msg='Schema version %s found - expecting %s - database admin will need to run upgrade tool' % \ (float(version_number), CURRENT_SCHEMA_VERSION)) return False def _table_scan(self, sql, table, idx): if not self._table_map.has_key(table): self._table_map[table] = {} for row in self.session.execute(sql).fetchall(): self._table_map[table][row[idx]] = True def check_schema(self): """ Checks the schema to determine the version, sets the information in the schema_info table, and outputs an error message if the schema is out of date. Returns True or False so calling apps (like the loader) can handle appropriately with execptions, etc. """ self.log.info('check_schema.start') version_number = self._get_current_version() if not version_number: self.log.info('check_schema', msg='No version_number set in schema_info') elif version_number == 3.2: self.log.info('check_schema', msg='Schema set to 3.2 deveopment version - resetting to release version') else: return self._version_check(version_number) self.log.info('check_schema', msg='Determining schema version') table_scan = ['job_instance', 'invocation'] # Due to how the SQLAlchemy mapper works, I need to look at these with # raw SQL calls to the DBM and not use the mapper objects. for t in table_scan: if self.session.connection().dialect.name == 'sqlite': self._table_scan('PRAGMA table_info(%s)' % t, t, 1) elif self.session.connection().dialect.name == 'mysql': self._table_scan('desc %s' % t, t, 0) else: self.log.error('check_schema', msg='Dialect %s not available for scanning' \ % self.session.connection().dialect.name ) # # Checks for version 4.0 # m_factor_check = exitcode_check = remote_cpu_check = False # Check job_instance table if self._table_map['job_instance'].has_key('multiplier_factor'): m_factor_check = True if self._table_map['job_instance'].has_key('exitcode'): exitcode_check = True # Check invocation if self._table_map['invocation'].has_key('remote_cpu_time'): remote_cpu_check = True s_info = SchemaInfo() if not m_factor_check and not exitcode_check and not remote_cpu_check: self.log.info('check_schema', msg='Setting schema to version 3.1') s_info.version_number = 3.1 elif m_factor_check and exitcode_check and remote_cpu_check: s_info.version_number = 4.0 self.log.info('check_schema', msg='Setting schema to version 4.0') else: self.log.error('check_schema', msg='Error in determining database schema') raise RuntimeError s_info.commit_to_db(self.session) # # End version 4.0 code # self._table_map = {} return self._version_check(self._get_current_version()) def check_version(self): """ Check version in the schema_info table. Called for things like the stats api. Assumes 3.1 if no version set. """ version_number = self._get_current_version() if not version_number: # presume 3.1 return 3.1 else: return version_number def upgrade_to_4_0(self): """ Called by the "upgrade tool" - upgrades a populated 3.1 DB to 4.0. """ self.log.info('upgrade_to_4_0', msg='Upgrading to schema version 4.0') if self._get_current_version() >= 4.0: self.log.warn('upgrade_to_4_0', msg='Schema version already 4.0 - skipping upgrade') return # Alter tables r_c_t = 'ALTER TABLE invocation ADD COLUMN remote_cpu_time NUMERIC(10,3) NULL' if self.session.connection().dialect.name != 'sqlite': r_c_t += ' AFTER remote_duration' m_fac = 'ALTER TABLE job_instance ADD COLUMN multiplier_factor INT NOT NULL DEFAULT 1' e_cod = 'ALTER TABLE job_instance ADD COLUMN exitcode INT NULL' self.session.execute(r_c_t) self.session.execute(m_fac) self.session.execute(e_cod) # Seed new columns with data derived from existing 3.1 data success = ['JOB_SUCCESS', 'POST_SCRIPT_SUCCESS'] failure = ['PRE_SCRIPT_FAILED', 'SUBMIT_FAILED', 'JOB_FAILURE', 'POST_SCRIPT_FAILED'] q = self.session.query(JobInstance.job_instance_id).order_by(JobInstance.job_instance_id) for r in q.all(): qq = self.session.query(Jobstate.state) qq = qq.filter(Jobstate.job_instance_id == r.job_instance_id) qq = qq.order_by(Jobstate.jobstate_submit_seq.desc()).limit(1) for rr in qq.all(): if rr.state in success: self.session.execute('UPDATE job_instance set exitcode = 0 where job_instance_id = %s' \ % r.job_instance_id ) elif rr.state in failure: self.session.execute('UPDATE job_instance set exitcode = 256 where job_instance_id = %s' \ % r.job_instance_id) else: pass s_info = SchemaInfo() s_info.version_number = 4.0 s_info.commit_to_db(self.session) pass def upgrade(self): """ Public wrapper around the version-specific upgrade methods. """ self.check_schema() self.upgrade_to_4_0() pass def main(): pass if __name__ == '__main__': main()pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/schema/__init__.py0000644000175000017500000000000011757531137026742 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/workflow/0000755000175000017500000000000011757531667025265 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/workflow/util.py0000644000175000017500000000650611757531137026613 0ustar ryngerynge""" Utility code to work with workflow databases. """ __rcsid__ = "$Id: util.py 28135 2011-07-05 20:07:28Z mgoode $" __author__ = "Monte Goode" from netlogger.analysis.schema.stampede_schema import * from netlogger.analysis.modules._base import SQLAlchemyInit from netlogger import util from netlogger.nllog import DoesLogging import os, time class Expunge(SQLAlchemyInit, DoesLogging): """ Utility class to expunge a workflow and the associated data from a stampede schema database in the case of running with the replay option or a similar situation. The wf_uuid that is passed into the constructor MUST be the "top-level" workflow the user wants to delete. Which is to say if the wf_uuid is a the child of another workflow, then only the data associated with that workflow will be deleted. Any parent or sibling workflows will be left untouched. Usage:: from netlogger.analysis.workflow.util import Expunge connString = 'sqlite:///pegasusMontage.db' wf_uuid = '1249335e-7692-4751-8da2-efcbb5024429' e = Expunge(connString, wf_uuid) e.expunge() All children/grand-children/etc information and associated workflows will be removed. """ def __init__(self, connString, wf_uuid): """ Init object @type connString: string @param connString: SQLAlchemy connection string - REQUIRED @type wf_uuid: string @param wf_uuid: The wf_uuid string of the workflow to remove along with associated data from the database """ DoesLogging.__init__(self) self.log.info('init.start') SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB) self._wf_uuid = wf_uuid self.log.info('init.end') def expunge(self): """ Invoke this to remove workflow/information from DB. """ self.log.info('expunge.start') # delete main workflow uuid and start cascade query = self.session.query(Workflow).filter(Workflow.wf_uuid == self._wf_uuid) try: wf = query.one() except orm.exc.NoResultFound, e: self.log.warn('expunge', msg='No workflow found with wf_uuid %s - aborting expunge' % self._wf_uuid) return root_wf_id = wf.wf_id subs = [] query = self.session.query(Workflow.wf_id).filter(Workflow.root_wf_id == root_wf_id).filter(Workflow.wf_id != root_wf_id) for row in query: subs.append(row[0]) for sub in subs: query = self.session.query(Workflow).filter(Workflow.wf_id == sub) subwf = query.one() self.log.info('expunge', msg='Expunging sub-workflow: %s' % subwf.wf_uuid) i = time.time() self.session.delete(subwf) self.session.flush() self.session.commit() self.log.info('expunge', msg='Flush took: %f seconds' % (time.time() - i)) self.log.info('expunge', msg='Flushing top-level workflow: %s' % wf.wf_uuid) i = time.time() self.session.delete(wf) self.session.flush() self.session.commit() self.log.info('expunge', msg=' Flush took: %f seconds' % (time.time() - i) ) pass if __name__ == '__main__': passpegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/workflow/api_test.py0000644000175000017500000002653411757531137027451 0ustar ryngerynge""" Test module to check that a specific implementation of the workflow retrieval API is returning the correct types. Program should import test_workflow_types() function and pass it an initialized Workflow instance. Usage: from netlogger.analysis.workflow.sql_alchemy import Workflow from netlogger.analysis.workflow.api_test import test_workflow_types w = Workflow('sqlite:///pegasusMontage.db') w.initialize('5117013a-f7f1-4bc5-a2f8-517186599fad') test_workflow_types(w) Any properties that are not returning the correct type will be listed along with the return value and the incorrect type that is being returned, otherwise the section will be marked as passed. Sample output including errors: ** Checking workflow object: dax_label montage ** Checking w.jobtypes_executed: Passed ** Checking w.jobs for instance type: Passed Etc. """ __rcsid__ = "$Id: api_test.py 26974 2011-01-11 18:57:35Z mgoode $" __author__ = "Monte Goode MMGoode@lbl.gov" from netlogger.analysis.workflow._base import Job as JobBase, Host as HostBase,\ Task as TaskBase, Jobstate as JobstateBase, Workflowstate as WorkflowstateBase import datetime # basic type checks string = type('string') integer = type(1) floating = type(1.0) timestamp = type(datetime.datetime.utcfromtimestamp(100)) boolean = type(True) nonetype = type(None) delta = type(datetime.timedelta(seconds=2 - 1)) dictionary = type({}) array = type([]) # object type dicts workflow_types = { 'wf_uuid' : (string), 'dax_label' : (string, nonetype), 'timestamp' : (timestamp, nonetype), 'submit_hostname' : (string, nonetype), 'submit_dir' : (string, nonetype), 'planner_arguments' : (string, nonetype), 'user' : (string, nonetype), 'grid_dn' : (string, nonetype), 'parent_wf_uuid' : (string, nonetype), 'sub_wf_uuids' : (array), 'is_running' : (boolean), 'is_restarted' : (boolean), 'restart_count' : (integer), 'total_time' : (delta, nonetype), 'jobs' : (array), 'total_jobs_executed' : (integer), 'successful_jobs' : (integer), 'failed_jobs' : (integer), 'restarted_jobs' : (integer), 'submitted_jobs' : (integer), 'jobtypes_executed' : (dictionary) } workflowstate_types = { 'state' : (string, nonetype), 'timestamp' : (timestamp, nonetype) } job_types = { 'job_submit_seq' : (integer), 'name' : (string), 'host' : (HostBase), 'condor_id' : (string, nonetype), 'jobtype' : (string), 'clustered' : (boolean), 'site_name' : (string, nonetype), 'remote_user' : (string, nonetype), 'remote_working_dir' : (string, nonetype), 'cluster_start_time' : (timestamp, nonetype), 'cluster_duration' : (floating, nonetype), 'tasks' : (array), 'is_restart' : (boolean), 'is_success' : (boolean), 'is_failure' : (boolean), 'current_state' : (JobstateBase), 'all_jobstates' : (array), 'submit_time' : (timestamp, nonetype), 'elapsed_time' : (delta, nonetype), 'edge_parents' : (array), 'edge_children' : (array) } host_types = { 'site_name' : (string, nonetype), 'hostname' : (string, nonetype), 'ip_address' : (string, nonetype), 'uname' : (string, nonetype), 'total_ram' : (integer, nonetype) } jobstate_types = { 'state' : (string, nonetype), 'timestamp' : (timestamp, nonetype) } task_types = { 'task_submit_seq' : (integer), 'start_time' : (timestamp), 'duration' : (floating), 'exitcode' : (integer), 'transformation' : (string), 'executable' : (string), 'arguments' : (string, nonetype) } def test_workflow_types(w): """ Tests an instance of a workflow object and it's children to ensure that the correct return types are being returned. Used for testing differing backend implementations. This version just dumps output to the console for a quick visual check. """ # workflow passed = True print '** Checking workflow object:' for k,v in workflow_types.items(): testval = eval('w.%s' % k) if not isinstance(testval, v): print ' ', k, testval, type(testval) passed = False if passed: print ' Passed' else: passed = True print '\n** Checking w.jobtypes_executed:' for k,v in w.jobtypes_executed.items(): if not isinstance(k, string): print ' jobtype key problem:', k, type(k) passed = False if not isinstance(v, integer): print ' jobtype value problem', v, type(v) passed = False if passed: print ' Passed' else: passed = True print '\n** Checking workflow state:' ws = w.start_events[0] for k,v in workflowstate_types.items(): testval = eval('ws.%s' % k) if not isinstance(testval, v): print ' ', k, testval, type(testval) passed = False if passed: print ' Passed' else: passed = True print '\n** Checking w.jobs for instance type:' if w.jobs: job = w.jobs[0] if not isinstance(job, JobBase): print ' Job instance type failed:', job passed = False if passed: print ' Passed' else: passed = True # job print '\n** Checking Job properties:' for k,v in job_types.items(): testval = eval('job.%s' % k) if not isinstance(testval, v): print ' ', k, testval, type(testval) passed = False if passed: print ' Passed' else: passed = True print '\n** Checking Host properties:' for k,v in host_types.items(): testval = eval('job.host.%s' % k) if not isinstance(testval, v): print ' ', k, testval, type(testval) passed = False if passed: print ' Passed' else: passed = True print '\n** Checking Jobstate properties:' for k,v in jobstate_types.items(): testval = eval('job.current_state.%s' % k) if not isinstance(testval, v): print ' ', k, testval, type(testval) passed = False if passed: print ' Passed' else: passed = True if job.tasks: print '\n** Checking Task properties:' task = job.tasks[0] for k,v in task_types.items(): testval = eval('task.%s' % k) if not isinstance(testval, v): print ' ', k, testval, type(testval) passed = False if passed: print ' Passed' else: print 'WARNING: no valid Task object to test!' if job.edge_children: print '\n** Checking child edges' job = job.edge_children[0] if not isinstance(job, JobBase): print ' Job instance type failed:', job passed = False if passed: print ' Passed' else: passed = True if job.edge_parents: print '\n** Checking parent edges' job = job.edge_parents[0] if not isinstance(job, JobBase): print ' Job instance type failed:', job passed = False if passed: print ' Passed' else: passed = True else: print 'WARNING: no valid Job object to test!' pass def test_workflow_types_list(w): """ Like test_workflow_types, but returns a list of problems. More useful for unittests. """ messages = [] for k,v in workflow_types.items(): testval = eval('w.%s' % k) if not isinstance(testval, v): messages.append('Workflow property %s returned %s (%s)' \ % (k, type(testval), testval)) for k,v in w.jobtypes_executed.items(): if not isinstance(k, string): messages.append('Workflow.jobtypes_executed jobtype key problem: %s (%s)' % (type(k), k)) if not isinstance(v, integer): messages.append('Workflow.jobtypes_executed jobtype value problem: %s (%s)' % (type(v), v)) ws = w.start_events[0] for k,v in workflowstate_types.items(): testval = eval('ws.%s' % k) if not isinstance(testval, v): messages.append('Workflowstate property %s returned %s (%s)' % (k, type(testval), testval)) if w.jobs: job = w.jobs[0] if not isinstance(job, JobBase): messages.append('Workflow.job instance type failed: %s' % job.__class__) for k,v in job_types.items(): testval = eval('job.%s' % k) if not isinstance(testval, v): messages.append('Job property %s returned %s (%s)' % (k, type(testval), testval)) for k,v in host_types.items(): testval = eval('job.host.%s' % k) if not isinstance(testval, v): messages.append('Host property %s returned %s (%s)' % (k, type(testval), testval)) for k,v in jobstate_types.items(): testval = eval('job.current_state.%s' % k) if not isinstance(testval, v): messages.append('Jobstate property %s returned %s (%s)' % (k, type(testval), testval)) if job.tasks: task = job.tasks[0] for k,v in task_types.items(): testval = eval('task.%s' % k) if not isinstance(testval, v): messages.append('Task property %s returned %s (%s)' % (k, type(testval), testval)) else: messages.append('WARNING: no valid Task object to test!') if job.edge_parents: job = job.edge_parents[0] if not isinstance(job, JobBase): messages.append('Workflow.job.edge_parents instance type failed: %s' % job.__class__) if job.edge_children: job = job.edge_children[0] if not isinstance(job, JobBase): messages.append('Workflow.job.edge_children instance type failed: %s' % job.__class__) else: messages.append('WARNING: no valid Job object to test!') return messages def create_reference_dump(w): """ Create a reference dump of a workflow object. Can be used for comparing output in tests or across implementations. Could just print the top level object, but this explicitly iterates through all of the nested lists. """ print 'Workflow:\n' wt = workflow_types.keys() wt.sort() for k in wt: if k in ['jobs']: continue print k, eval('w.%s\n' % k) print '\nJobs:\n' jt = job_types.keys() jt.sort() for j in w.jobs: for k in jt: if k in ['tasks', 'all_jobstates']: continue print k, eval('j.%s' % k) print '\n Tasks:' for t in j.tasks: print ' ', t print '\n Jobstates:' for js in j.all_jobstates: print ' ', js print '\n============\n' def main(): pass if __name__ == '__main__': main()pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/workflow/stampede_statistics.py0000644000175000017500000016344011757531137031713 0ustar ryngerynge""" Library to generate statistics from the new Stampede 3.1 backend. Usage:: stats = StampedeStatistics(connString='sqlite:///montage.db') stats.initialize('unique_wf_uuid') stats.set_job_filter('dax') print stats.get_total_jobs_status() print stats.get_total_succeeded_jobs_status() stats.set_job_filter('dag') print stats.get_total_jobs_status() print stats.get_total_succeeded_jobs_status() etc. stats.close() Constructor and initialize methods: The constructor takes a required sqlalchemy connection string as the first argument. The stats class will default to returning data in the "expanded workflow" mode. To change this behavior and only analyize a single workflow set the optional arg: expand_workflow = False along with the connection string argument. The initialize method is called with a single argument - the wf_uuid of the desired "root workflow" whether returning data in expanded mode or not. The method will return True or False if a query exception is raised so the programmer can test for success before calling the subsequent query methods. This method is intended to be called once per object. Job filtering: Jobs can be filtered using any of the strings in the jobtype ENUM, with the addition of the values 'all' and 'nonsub' which will return all jobs and non-subworkflow jobs respectively. If the filter is not explicitly set, it will default to the 'all' mode. The desired filter can be set with the set_job_filter() method. After setting this method, all subsequent calls to the query methods will return results according to the filter. This can be set and reset as many times as the user desires. There is an example of re/setting the job filter in the usage section above. The query methods will return different values after the filter is re/set. Time filtering: This behaves much like job filtering. For the runtime queries, the time intervals 'month', 'week', 'day', and 'hour' can be set using the set_time_filter() method. If this method is not set, it will default to the 'month' interval for filtering. Hostname filtering: For the runtime queries the method set_host_filter() can be used to filter by various hosts. This method differs from the job and time filtering methods in that the argument can be either a string (for a single hostname), or an array/list of hostnames for multiple hostnames. Example:: s.set_host_filter('butterfly.isi.edu') or s.set_host_filter(['engage-submit3.renci.org', 'node0012.palmetto.clemson.edu']) Either one of these variations will work. The first variation will only retrieve data for that one host, the second will return data for both hosts. If this method is not set, no hostname filtering will be done and information for all hosts will be returned. Transformation filtering: Transformation filtering works similarly to hostname filtering in that it can accept a single string value or a array/list of strings. However the set_transformation_filter() method accepts two keyword arguments - 'include' and 'exclude'. Only one of these keywords can be set per method call. Example:: s.set_transformation_filter(include='pegasus::dirmanager') s.set_transformation_filter(exclude=['dagman::post' , 'dagman::pre' ,'condor::dagman']) etc. This example demonstrates the two proper keyword invocations and that either a string or list may be used. If this method is not set, no filtering will be done and information for all transforms will be returned. Calling this method with no arguments will reset any previously set filters. Return values from methods: The return value types will vary from method to method. Most of the methods will return a single integer or floating point number. Methods which return rows from the DB (rather than just a number) will return a list which can be interacted with in one of two ways - either by array index (list of tuples) or by a named attr (list of objects). The two following methods of interacting with the same query results will both produce the same output: Example:: for row in s.get_job_kickstart(): print row[0], row[1], row[2] print row.job_id, row.job_name, row.kickstart Either syntax will work. When using the named attribute method, the attributes are the names of the columns/aliases in the SELECT stanza of the query. If the row returned by the method is printed, it will display as a tuple of results per row. Methods:: get_sub_workflow_ids get_descendant_workflow_ids get_schema_version get_total_jobs_status get_total_succeeded_jobs_status get_total_failed_jobs_status get_total_jobs_retries get_total_tasks_status get_total_succeeded_tasks_status get_total_failed_tasks_status get_task_success_report get_task_failure_report get_total_tasks_retries get_workflow_states get_workflow_cum_job_wall_time get_submit_side_job_wall_time get_workflow_details get_workflow_retries get_job_statistics get_job_states get_job_instance_sub_wf_map get_failed_job_instances get_job_instance_info get_job_name get_job_site get_job_kickstart get_job_runtime get_job_seqexec get_condor_q_time get_resource_delay get_post_time get_transformation_statistics get_invocation_by_time get_jobs_run_by_time get_invocation_by_time_per_host get_jobs_run_by_time_per_host Methods listed in order of query list on wiki. https://confluence.pegasus.isi.edu/display/pegasus/Pegasus+Statistics+Python+Version+Modified """ __rcsid__ = "$Id: stampede_statistics.py 31846 2012-05-21 18:43:33Z mgoode $" __author__ = "Monte Goode" from netlogger.analysis.modules._base import SQLAlchemyInit from netlogger.analysis.schema.schema_check import ErrorStrings, SchemaCheck, SchemaVersionError from netlogger.analysis.schema.stampede_schema import * from netlogger.nllog import DoesLogging, get_logger # Main stats class. class StampedeStatistics(SQLAlchemyInit, DoesLogging): def __init__(self, connString=None, expand_workflow=True): if connString is None: raise ValueError("connString is required") DoesLogging.__init__(self) try: SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB) except exceptions.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise RuntimeError # Check the schema version before proceeding. self.s_check = SchemaCheck(self.session) if not self.s_check.check_schema(): raise SchemaVersionError self._expand = expand_workflow self._root_wf_id = None self._root_wf_uuid = None self._job_filter_mode = None self._time_filter_mode = None self._host_filter = None self._xform_filter = {'include':None, 'exclude':None} self._wfs = [] pass def initialize(self, root_wf_uuid): self.log.debug('initialize') self._root_wf_uuid = root_wf_uuid q = self.session.query(Workflow.wf_id).filter(Workflow.wf_uuid == self._root_wf_uuid) try: self._root_wf_id = q.one().wf_id except orm.exc.MultipleResultsFound, e: self.log.error('initialize', msg='Multiple results found for wf_uuid: %s' % root_wf_uuid) return False except orm.exc.NoResultFound, e: self.log.error('initialize', msg='No results found for wf_uuid: %s' % root_wf_uuid) return False if self._expand: q = self.session.query(Workflow.wf_id).filter(Workflow.root_wf_id == self._root_wf_id) for row in q.all(): self._wfs.append(row.wf_id) if not len(self._wfs): self.log.error('initialize', msg='Unable to expand wf_uuid: %s - not a root_wf_id?' % root_wf_uuid) return False else: self._wfs.append(self._root_wf_id) if not len(self._wfs): self.log.error('initialize', msg='No results found for wf_uuid: %s' % root_wf_uuid) return False # Initialize filters with default value self.set_job_filter() self.set_time_filter() self.set_host_filter() self.set_transformation_filter() return True def close(self): self.log.debug('close') self.disconnect() def set_job_filter(self, filter='all'): modes = ['all', 'nonsub', 'subwf', 'dax', 'dag', 'compute', 'stage-in-tx', 'stage-out-tx', 'registration', 'inter-site-tx', 'create-dir', 'staged-compute', 'cleanup', 'chmod'] try: modes.index(filter) self._job_filter_mode = filter self.log.debug('set_job_filter', msg='Setting filter to: %s' % filter) except: self._job_filter_mode = 'all' self.log.error('set_job_filter', msg='Unknown job filter %s - setting to all' % filter) def set_time_filter(self, filter='month'): modes = ['month', 'week', 'day', 'hour'] try: modes.index(filter) self._time_filter_mode = filter self.log.debug('set_time_filter', msg='Setting filter to: %s' % filter) except: self._job_filter_mode = 'month' self.log.error('set_time_filter', msg='Unknown time filter %s - setting to month' % filter) def set_host_filter(self, host=None): """ The host argument can either be a string/single hostname or it can be a list/array of hostnames. """ self._host_filter = host def set_transformation_filter(self, include=None, exclude=None): """ Either of these args can either be a single string/xform type or it can be a list/array of xform types. Both arguments can not be set at the same time. If they are, the program will log an error and not do any filtering. """ self._xform_filter['include'] = include self._xform_filter['exclude'] = exclude # # Pulls information about sub workflows # def get_sub_workflow_ids(self): """ Returns info on child workflows only. """ q = self.session.query(Workflow.wf_id, Workflow.wf_uuid, Workflow.dax_label) q = q.filter(Workflow.parent_wf_id == self._root_wf_id) return q.all() def get_descendant_workflow_ids(self): q = self.session.query(Workflow.wf_id, Workflow.wf_uuid) q = q.filter(Workflow.root_wf_id == self._root_wf_id) q = q.filter(Workflow.wf_id != self._root_wf_id) return q.all() def get_schema_version(self): return self.s_check.check_version() # # Status of initially planned wf components. # # # The following block of queries are documented here: # https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary # and # https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file # def _dax_or_dag_cond(self, JobO=Job): return or_(JobO.type_desc == 'dax', JobO.type_desc == 'dag') def _get_job_filter(self, JobO=Job): filters = { 'all': None, 'nonsub': not_(self._dax_or_dag_cond(JobO)), 'subwf': self._dax_or_dag_cond(JobO), 'dax': JobO.type_desc == 'dax', 'dag': JobO.type_desc == 'dag', 'compute': JobO.type_desc == 'compute', 'stage-in-tx': JobO.type_desc == 'stage-in-tx', 'stage-out-tx': JobO.type_desc == 'stage-out-tx', 'registration': JobO.type_desc == 'registration', 'inter-site-tx': JobO.type_desc == 'inter-site-tx', 'create-dir': JobO.type_desc == 'create-dir', 'staged-compute': JobO.type_desc == 'staged-compute', 'cleanup': JobO.type_desc == 'cleanup', 'chmod': JobO.type_desc == 'chmod', } return filters[self._job_filter_mode] def _max_job_seq_subquery(self): """ Creates the following subquery that is used in several queries: and jb_inst.job_submit_seq = ( select max(job_submit_seq) from job_instance where job_id = jb_inst.job_id group by job_id ) """ JobInstanceSubMax = orm.aliased(JobInstance) sub_q = self.session.query(func.max(JobInstanceSubMax.job_submit_seq).label('max_id')) sub_q = sub_q.filter(JobInstanceSubMax.job_id == JobInstance.job_id).correlate(JobInstance) sub_q = sub_q.group_by(JobInstanceSubMax.job_id).subquery() return sub_q def get_total_jobs_status(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-Totaljobs https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Totaljobs """ q = self.session.query(Job.job_id) if self._expand: q = q.filter(Workflow.root_wf_id == self._root_wf_id) else: q = q.filter(Workflow.wf_id == self._wfs[0]) q = q.filter(Job.wf_id == Workflow.wf_id) if self._get_job_filter() is not None: q = q.filter(self._get_job_filter()) return q.count() def get_total_succeeded_jobs_status(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-Totalsucceededjobs https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Totalsucceededjobs """ JobInstanceSub = orm.aliased(JobInstance, name='JobInstanceSub') sq_1 = self.session.query(func.max(JobInstanceSub.job_submit_seq).label('jss'), JobInstanceSub.job_id.label('jobid'), JobInstanceSub.exitcode.label('ec')) if self._expand: sq_1 = sq_1.filter(Workflow.root_wf_id == self._root_wf_id) else: sq_1 = sq_1.filter(Workflow.wf_id == self._wfs[0]) sq_1 = sq_1.filter(Workflow.wf_id == Job.wf_id) sq_1 = sq_1.filter(Job.job_id == JobInstanceSub.job_id) if self._get_job_filter() is not None: sq_1 = sq_1.filter(self._get_job_filter()) sq_1 = sq_1.group_by(JobInstanceSub.job_id).subquery() q = self.session.query(JobInstance.job_instance_id.label('last_job_instance')) q = q.filter(JobInstance.job_id == sq_1.c.jobid) q = q.filter(JobInstance.job_submit_seq == sq_1.c.jss) q = q.filter(sq_1.c.ec == 0).filter(sq_1.c.ec != None) return q.count() def get_total_failed_jobs_status(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-Totalfailedjobs https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Totalfailedjobs """ JobInstanceSub = orm.aliased(JobInstance, name='JobInstanceSub') sq_1 = self.session.query(func.max(JobInstanceSub.job_submit_seq).label('jss'), JobInstanceSub.job_id.label('jobid'), JobInstanceSub.exitcode.label('ec')) if self._expand: sq_1 = sq_1.filter(Workflow.root_wf_id == self._root_wf_id) else: sq_1 = sq_1.filter(Workflow.wf_id == self._wfs[0]) sq_1 = sq_1.filter(Workflow.wf_id == Job.wf_id) sq_1 = sq_1.filter(Job.job_id == JobInstanceSub.job_id) if self._get_job_filter() is not None: sq_1 = sq_1.filter(self._get_job_filter()) sq_1 = sq_1.group_by(JobInstanceSub.job_id).subquery() q = self.session.query(JobInstance.job_instance_id.label('last_job_instance')) q = q.filter(JobInstance.job_id == sq_1.c.jobid) q = q.filter(JobInstance.job_submit_seq == sq_1.c.jss) q = q.filter(sq_1.c.ec != 0).filter(sq_1.c.ec != None) return q.count() def _query_jobstate_for_instance(self, states): """ The states arg is a list of strings. Returns an appropriate subquery. """ q = self.session.query(Jobstate.job_instance_id) q = q.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) q = q.filter(Jobstate.state.in_(states)).subquery() return q def get_total_jobs_retries(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-TotalJobRetries https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-TotalJobRetries """ d_or_d = self._dax_or_dag_cond() sq_1 = self.session.query(func.count(Job.job_id)) if self._expand: sq_1 = sq_1.filter(Workflow.root_wf_id == self._root_wf_id) else: sq_1 = sq_1.filter(Workflow.wf_id == self._wfs[0]) sq_1 = sq_1.filter(Job.wf_id == Workflow.wf_id) sq_1 = sq_1.filter(Job.job_id == JobInstance.job_id) if self._get_job_filter() is not None: sq_1 = sq_1.filter(self._get_job_filter()) sq_1 = sq_1.subquery() sq_2 = self.session.query(func.count(distinct(JobInstance.job_id))) if self._expand: sq_2 = sq_2.filter(Workflow.root_wf_id == self._root_wf_id) else: sq_2 = sq_2.filter(Workflow.wf_id == self._wfs[0]) sq_2 = sq_2.filter(Job.wf_id == Workflow.wf_id) sq_2 = sq_2.filter(Job.job_id == JobInstance.job_id) if self._get_job_filter() is not None: sq_2 = sq_2.filter(self._get_job_filter()) sq_2 = sq_2.subquery() q = self.session.query((sq_1.as_scalar() - sq_2.as_scalar()).label('total_job_retries')) return q.all()[0].total_job_retries def get_total_tasks_status(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-Totaltask https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Totaltasks """ q = self.session.query(Task.task_id) if self._expand: q = q.filter(Workflow.root_wf_id == self._root_wf_id) else: q = q.filter(Workflow.wf_id == self._wfs[0]) q = q.filter(Task.wf_id == Workflow.wf_id) q = q.filter(Task.job_id == Job.job_id) if self._get_job_filter(Task) is not None: q = q.filter(self._get_job_filter(Task)) return q.count() def _base_task_status_query_old(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-Totalsucceededtasks https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Totalsucceededtasks """ # This query generation method is obsolete and is only being # kept for optimization reference. WorkflowSub1 = orm.aliased(Workflow, name='WorkflowSub1') JobInstanceSub1 = orm.aliased(JobInstance, name='JobInstanceSub1') JobSub1 = orm.aliased(Job, name='JobSub1') sq_1 = self.session.query(WorkflowSub1.wf_id.label('wid'), func.max(JobInstanceSub1.job_submit_seq).label('jss'), JobInstanceSub1.job_id.label('jobid') ) if self._expand: sq_1 = sq_1.filter(WorkflowSub1.root_wf_id == self._root_wf_id) else: sq_1 = sq_1.filter(WorkflowSub1.wf_id == self._wfs[0]) sq_1 = sq_1.filter(WorkflowSub1.wf_id == JobSub1.wf_id) sq_1 = sq_1.filter(JobSub1.job_id == JobInstanceSub1.job_id) sq_1 = sq_1.group_by(JobInstanceSub1.job_id) if self._get_job_filter(JobSub1) is not None: sq_1 = sq_1.filter(self._get_job_filter(JobSub1)) sq_1 = sq_1.subquery() JobInstanceSub2 = orm.aliased(JobInstance, name='JobInstanceSub2') sq_2 = self.session.query(sq_1.c.wid.label('wf_id'), JobInstanceSub2.job_instance_id.label('last_job_instance_id')) sq_2 = sq_2.filter(JobInstanceSub2.job_id == sq_1.c.jobid) sq_2 = sq_2.filter(JobInstanceSub2.job_submit_seq == sq_1.c.jss) sq_2 = sq_2.subquery() q = self.session.query(Invocation.invocation_id) q = q.filter(Invocation.abs_task_id != None) q = q.filter(Invocation.job_instance_id == sq_2.c.last_job_instance_id) q = q.filter(Invocation.wf_id == sq_2.c.wf_id) # Calling wrapper methods would invoke like so: # q = self._base_task_status_query() # q = q.filter(Invocation.exitcode == 0) # return q.count() return q def _base_task_statistics_query(self, success=True): w = orm.aliased(Workflow, name='w') j = orm.aliased(Job, name='j') ji = orm.aliased(JobInstance, name='ji') tk = orm.aliased(Task, name='tk') sq_1 = self.session.query(w.wf_id, j.job_id, ji.job_instance_id.label('jiid'), ji.job_submit_seq.label('jss'), func.max(ji.job_submit_seq).label('maxjss')) sq_1 = sq_1.join(j, w.wf_id == j.wf_id) sq_1 = sq_1.join(ji, j.job_id == ji.job_id) if self._expand: sq_1 = sq_1.filter(w.root_wf_id == self._root_wf_id) else: sq_1 = sq_1.filter(w.wf_id == self._wfs[0]) sq_1 = sq_1.group_by(j.job_id) if self._get_job_filter(j) is not None: sq_1 = sq_1.filter(self._get_job_filter(j)) sq_1 = sq_1.subquery('t') sq_2 = self.session.query(sq_1.c.wf_id, func.count(Invocation.exitcode).label('count')) sq_2 = sq_2.select_from(orm.join(sq_1, Invocation, sq_1.c.jiid == Invocation.job_instance_id)) #sq_2 = sq_2.join(tk, tk.abs_task_id == Invocation.abs_task_id) #sq_2 = sq_2.filter(tk.type_desc != 'dax') sq_2 = sq_2.filter(sq_1.c.jss == sq_1.c.maxjss) sq_2 = sq_2.filter(Invocation.abs_task_id != None) if success: sq_2 = sq_2.filter(Invocation.exitcode == 0) else: sq_2 = sq_2.filter(Invocation.exitcode != 0) sq_2 = sq_2.group_by(sq_1.c.wf_id) return sq_2 def _task_statistics_query_sum(self, success=True): s = self._base_task_statistics_query(success).subquery('tt') q = self.session.query(func.sum(s.c.count).label('task_count')) return q.one()[0] or 0 def get_total_succeeded_tasks_status(self): return self._task_statistics_query_sum(success=True) def get_total_failed_tasks_status(self): return self._task_statistics_query_sum(success=False) def get_task_success_report(self): return self._base_task_statistics_query(True).all() def get_task_failure_report(self): return self._base_task_statistics_query(False).all() def get_total_tasks_retries(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-Totaltaskretries https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Totaltaskretries """ sq_1 = self.session.query(Workflow.wf_id.label('wid'), Invocation.abs_task_id.label('tid')) if self._expand: sq_1 = sq_1.filter(Workflow.root_wf_id == self._root_wf_id) else: sq_1 = sq_1.filter(Workflow.wf_id == self._wfs[0]) sq_1 = sq_1.filter(Job.wf_id == Workflow.wf_id) sq_1 = sq_1.filter(Invocation.wf_id == Workflow.wf_id) sq_1 = sq_1.filter(Job.job_id == JobInstance.job_id) if self._get_job_filter() is not None: sq_1 = sq_1.filter(self._get_job_filter()) sq_1 = sq_1.filter(JobInstance.job_instance_id == Invocation.job_instance_id) sq_1 = sq_1.filter(Invocation.abs_task_id != None) i = 0 f = {} for row in sq_1.all(): i += 1 if not f.has_key(row): f[row] = True return i - len(f.keys()) # # Run statistics # def get_workflow_states(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-Workflowwalltime https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Workflowwalltime """ q = self.session.query(Workflowstate.wf_id, Workflowstate.state, Workflowstate.timestamp, Workflowstate.restart_count, Workflowstate.status) q = q.filter(Workflowstate.wf_id == self._root_wf_id).order_by(Workflowstate.restart_count) return q.all() def get_workflow_cum_job_wall_time(self): """ select sum(remote_duration * multiplier_factor) FROM invocation as invoc, job_instance as ji WHERE invoc.task_submit_seq >= 0 and invoc.job_instance_id = ji.job_instance_id and invoc.wf_id in (1,2,3) and invoc.transformation <> 'condor::dagman' https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-Workflowcumulativejobwalltime https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Workflowcumulativejobwalltime """ q = self.session.query(cast(func.sum(Invocation.remote_duration * JobInstance.multiplier_factor), Float)) q = q.filter(Invocation.task_submit_seq >= 0) q = q.filter(Invocation.job_instance_id == JobInstance.job_instance_id) q = q.filter(Invocation.wf_id.in_(self._wfs)) q = q.filter(Invocation.transformation != 'condor::dagman') return q.first()[0] def get_submit_side_job_wall_time(self): """ select sum(local_duration * multiplier_factor) FROM job_instance as jb_inst, job as jb WHERE jb_inst.job_id = jb.job_id and jb.wf_id in (1,2,3) and ((not (jb.type_desc ='dax' or jb.type_desc ='dag')) or ((jb.type_desc ='dax' or jb.type_desc ='dag') and jb_inst.subwf_id is NULL) ) https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Summary#WorkflowSummary-Cumulativejobwalltimeasseenfromsubmitside https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Cumulativejobwalltimeasseenfromsubmitside """ q = self.session.query(cast(func.sum(JobInstance.local_duration * JobInstance.multiplier_factor), Float).label('wall_time')) q = q.filter(JobInstance.job_id == Job.job_id) q = q.filter(Job.wf_id.in_(self._wfs)) if self._expand: d_or_d = self._dax_or_dag_cond() q = q.filter(or_(not_(d_or_d), and_(d_or_d, JobInstance.subwf_id == None))) return q.first().wall_time def get_workflow_details(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Workflowdetails """ q = self.session.query(Workflow.wf_id, Workflow.wf_uuid, Workflow.parent_wf_id, Workflow.root_wf_id, Workflow.dag_file_name, Workflow.submit_hostname, Workflow.submit_dir, Workflow.planner_arguments, Workflow.user, Workflow.grid_dn, Workflow.planner_version, Workflow.dax_label, Workflow.dax_version) q = q.filter(Workflow.wf_id.in_(self._wfs)) return q.all() def get_workflow_retries(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Workflow+Statistics+file#WorkflowStatisticsfile-Workflowretries """ sq_1 = self.session.query(func.max(Workflowstate.restart_count).label('retry')) if self._expand: sq_1 = sq_1.filter(Workflow.root_wf_id == self._root_wf_id) else: sq_1 = sq_1.filter(Workflow.wf_id.in_(self._wfs)) sq_1 = sq_1.filter(Workflowstate.wf_id == Workflow.wf_id) sq_1 = sq_1.group_by(Workflowstate.wf_id) sq_1 = sq_1.subquery() q = self.session.query(func.sum(sq_1.c.retry).label('total_retry')) return q.one().total_retry # # Job Statistics # These queries are documented: # https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file # def get_job_statistics(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-All """ if self._expand: return [] sq_1 = self.session.query(func.min(Jobstate.timestamp)) sq_1 = sq_1.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_1 = sq_1.filter(or_(Jobstate.state == 'GRID_SUBMIT', Jobstate.state == 'GLOBUS_SUBMIT', Jobstate.state == 'EXECUTE')) sq_1 = sq_1.subquery() sq_2 = self.session.query(Jobstate.timestamp) sq_2 = sq_2.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_2 = sq_2.filter(Jobstate.state == 'SUBMIT') sq_2 = sq_2.subquery() sq_3 = self.session.query(func.min(Jobstate.timestamp)) sq_3 = sq_3.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_3 = sq_3.filter(Jobstate.state == 'EXECUTE') sq_3 = sq_3.subquery() sq_4 = self.session.query(func.min(Jobstate.timestamp)) sq_4 = sq_4.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_4 = sq_4.filter(or_(Jobstate.state == 'GRID_SUBMIT', Jobstate.state == 'GLOBUS_SUBMIT')) sq_4 = sq_4.subquery() sq_5 = self.session.query(func.sum(Invocation.remote_duration)) sq_5 = sq_5.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_5 = sq_5.filter(Invocation.wf_id == Job.wf_id).correlate(Job) sq_5 = sq_5.filter(Invocation.task_submit_seq >= 0) sq_5 = sq_5.group_by().subquery() sq_6 = self.session.query(Jobstate.timestamp) sq_6 = sq_6.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_6 = sq_6.filter(Jobstate.state == 'POST_SCRIPT_TERMINATED') sq_6 = sq_6.subquery() sq_7 = self.session.query(func.max(Jobstate.timestamp)) sq_7 = sq_7.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_7 = sq_7.filter(or_(Jobstate.state == 'POST_SCRIPT_STARTED', Jobstate.state == 'JOB_TERMINATED')) sq_7 = sq_7.subquery() sq_8 = self.session.query(func.max(Invocation.exitcode)) sq_8 = sq_8.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_8 = sq_8.filter(Invocation.wf_id == Job.wf_id).correlate(Job) sq_8 = sq_8.filter(Invocation.task_submit_seq >= 0) sq_8 = sq_8.group_by().subquery() JobInstanceSub = orm.aliased(JobInstance) sq_9 = self.session.query(Host.hostname) sq_9 = sq_9.filter(JobInstanceSub.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_9 = sq_9.filter(Host.host_id == JobInstanceSub.host_id) sq_9 = sq_9.subquery() sq_10 = self.session.query(func.sum(Invocation.remote_duration * JobInstance.multiplier_factor)) sq_10 = sq_10.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_10 = sq_10.filter(Invocation.wf_id == Job.wf_id).correlate(Job) sq_10 = sq_10.filter(Invocation.task_submit_seq >= 0) sq_10 = sq_10.group_by().subquery() sq_11 = self.session.query(func.sum(Invocation.remote_cpu_time)) sq_11 = sq_11.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_11 = sq_11.filter(Invocation.wf_id == Job.wf_id).correlate(Job) sq_11 = sq_11.filter(Invocation.task_submit_seq >= 0) sq_11 = sq_11.group_by().subquery() q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, Job.exec_job_id.label('job_name'), JobInstance.site, cast(sq_1.as_scalar() - sq_2.as_scalar(), Float).label('condor_q_time'), cast(sq_3.as_scalar() - sq_4.as_scalar(), Float).label('resource_delay'), cast(JobInstance.local_duration, Float).label('runtime'), cast(sq_5.as_scalar(), Float).label('kickstart'), cast(sq_6.as_scalar() - sq_7.as_scalar(), Float).label('post_time'), cast(JobInstance.cluster_duration, Float).label('seqexec'), sq_8.as_scalar().label('exit_code'), sq_9.as_scalar().label('host_name'), JobInstance.multiplier_factor, cast(sq_10.as_scalar(), Float).label('kickstart_multi'), sq_11.as_scalar().label('remote_cpu_time')) q = q.filter(JobInstance.job_id == Job.job_id) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.order_by(JobInstance.job_submit_seq) return q.all() def _state_sub_q(self, states, function=None): sq = None if not function: sq = self.session.query(Jobstate.timestamp) elif function == 'max': sq = self.session.query(func.max(Jobstate.timestamp)) elif function == 'min': sq = self.session.query(func.min(Jobstate.timestamp)) sq = sq.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq = sq.filter(Jobstate.state.in_(states)).subquery() return sq def get_job_states(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-JobStates """ if self._expand: return [] sq_1 = self.session.query(Host.hostname).filter(Host.host_id == JobInstance.host_id).correlate(JobInstance).subquery() #select min(timestamp) from jobstate where job_instance_id = jb_inst.job_instance_id # ) as jobS , #( #select max(timestamp)-min(timestamp) from jobstate where job_instance_id = jb_inst.job_instance_id # ) as jobDuration, sq_jobS = self.session.query(func.min(Jobstate.timestamp)) sq_jobS = sq_jobS.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance).subquery() sq_jobD = self.session.query(func.max(Jobstate.timestamp) - func.min(Jobstate.timestamp)) sq_jobD = sq_jobD.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance).subquery() sq_2 = self._state_sub_q(['PRE_SCRIPT_STARTED']) sq_3 = self._state_sub_q(['PRE_SCRIPT_TERMINATED']) sq_4 = self._state_sub_q(['PRE_SCRIPT_STARTED']) sq_5 = self._state_sub_q(['SUBMIT']) sq_6 = self._state_sub_q(['JOB_TERMINATED']) sq_7 = self._state_sub_q(['GRID_SUBMIT', 'GLOBUS_SUBMIT'], 'max') sq_8 = self._state_sub_q(['EXECUTE'], 'min') sq_9 = self._state_sub_q(['EXECUTE', 'SUBMIT'], 'max') sq_10 = self._state_sub_q(['JOB_TERMINATED']) sq_11 = self.session.query(func.min(Invocation.start_time)) sq_11 = sq_11.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_11 = sq_11.filter(Invocation.wf_id == Job.wf_id).correlate(Job) sq_11 = sq_11.filter(Invocation.task_submit_seq >= 0) sq_11 = sq_11.group_by(Invocation.job_instance_id).subquery() sq_12 = self.session.query(func.sum(Invocation.remote_duration)) sq_12 = sq_12.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_12 = sq_12.filter(Invocation.wf_id == Job.wf_id).correlate(Job) sq_12 = sq_12.filter(Invocation.task_submit_seq >= 0) sq_12 = sq_12.group_by(Invocation.job_instance_id).subquery() sq_13 = self._state_sub_q(['POST_SCRIPT_STARTED', 'JOB_TERMINATED'], 'max') sq_14 = self._state_sub_q(['POST_SCRIPT_TERMINATED']) sq_15 = self.session.query(func.group_concat(func.distinct(Invocation.transformation))) sq_15 = sq_15.filter(Invocation.wf_id.in_(self._wfs)) sq_15 = sq_15.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_15 = sq_15.filter(Invocation.transformation != 'dagman::post') sq_15 = sq_15.filter(Invocation.transformation != 'dagman::pre') sq_15 = sq_15.subquery() q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, Job.exec_job_id.label('job_name'), JobInstance.site, sq_1.as_scalar().label('host_name'), cast(sq_jobS.as_scalar(), Float).label('jobS'), cast(sq_jobD.as_scalar(), Float).label('jobDuration'), cast(sq_2.as_scalar(), Float).label('pre_start'), cast(sq_3.as_scalar() - sq_4.as_scalar(), Float).label('pre_duration'), cast(sq_5.as_scalar(), Float).label('condor_start'), cast(sq_6.as_scalar() - sq_5.as_scalar(), Float).label('condor_duration'), cast(sq_7.as_scalar(), Float).label('grid_start'), cast(sq_8.as_scalar() - sq_7.as_scalar(), Float).label('grid_duration'), cast(sq_9.as_scalar(), Float).label('exec_start'), cast(sq_10.as_scalar() - sq_9.as_scalar(), Float).label('exec_duration'), cast(sq_11.as_scalar(), Float).label('kickstart_start'), cast(sq_12.as_scalar(), Float).label('kickstart_duration'), cast(sq_13.as_scalar(), Float).label('post_start'), cast(sq_14.as_scalar() - sq_13.as_scalar(), Float).label('post_duration'), sq_15.as_scalar().label('transformation') ) q = q.filter(JobInstance.job_id == Job.job_id) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.order_by(JobInstance.job_submit_seq) return q.all() def get_job_instance_sub_wf_map(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-Subworkflowjobinstancesmapping """ if self._expand: return [] q = self.session.query(JobInstance.job_instance_id, JobInstance.subwf_id) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.filter(Job.job_id == JobInstance.job_id) q = q.filter(self._dax_or_dag_cond()) return q.all() def get_failed_job_instances(self, final=False, all_jobs=False): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-Failedjobinstances """ if self._expand: return [] d_or_d = self._dax_or_dag_cond() if not final: q = self.session.query(JobInstance.job_instance_id, JobInstance.job_submit_seq) else: q = self.session.query(JobInstance.job_instance_id, func.max(JobInstance.job_submit_seq)) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.filter(Job.job_id == JobInstance.job_id) if not all_jobs: q = q.filter(or_(not_(d_or_d), and_(d_or_d, JobInstance.subwf_id == None))) q = q.filter(JobInstance.exitcode != 0).filter(JobInstance.exitcode != None) if final: q = q.group_by(JobInstance.job_id) q = q.order_by(JobInstance.job_submit_seq) return q.all() def get_job_instance_info(self, job_instance_id=None): """ Job instance information. Pulls all or for one instance. https://confluence.pegasus.isi.edu/pages/viewpage.action?pageId=14876831 """ if self._expand: return [] sq_0 = self.session.query(Workflow.submit_dir) sq_0 = sq_0.filter(Workflow.wf_id == JobInstance.subwf_id).correlate(JobInstance) sq_0 = sq_0.subquery() sq_1 = self.session.query(Job.exec_job_id) sq_1 = sq_1.filter(Job.job_id == JobInstance.job_id).correlate(JobInstance) sq_1 = sq_1.subquery() sq_2 = self.session.query(Job.submit_file) sq_2 = sq_2.filter(Job.job_id == JobInstance.job_id).correlate(JobInstance) sq_2 = sq_2.subquery() sq_3 = self.session.query(Job.executable) sq_3 = sq_3.filter(Job.job_id == JobInstance.job_id).correlate(JobInstance) sq_3 = sq_3.subquery() sq_4 = self.session.query(Job.argv) sq_4 = sq_4.filter(Job.job_id == JobInstance.job_id).correlate(JobInstance) sq_4 = sq_4.subquery() sq_5 = self.session.query(Workflow.submit_dir) sq_5 = sq_5.filter(Workflow.wf_id == self._root_wf_id).subquery() sq_6 = self.session.query(func.max(Jobstate.jobstate_submit_seq).label('max_job_submit_seq')) sq_6 = sq_6.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_6 = sq_6.subquery() sq_7 = self.session.query(Jobstate.state) sq_7 = sq_7.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_7 = sq_7.filter(Jobstate.jobstate_submit_seq == sq_6.as_scalar()) sq_7 = sq_7.subquery() sq_8 = self.session.query(Invocation.executable) sq_8 = sq_8.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_8 = sq_8.filter(Invocation.task_submit_seq == -1) sq_8 = sq_8.subquery() sq_9 = self.session.query(Invocation.argv) sq_9 = sq_9.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_9 = sq_9.filter(Invocation.task_submit_seq == -1) sq_9 = sq_9.subquery() sq_10 = self.session.query(Host.hostname) sq_10 = sq_10.filter(Host.host_id == JobInstance.host_id).correlate(JobInstance) sq_10 = sq_10.subquery() q = self.session.query(JobInstance.job_instance_id, JobInstance.site, JobInstance.stdout_file, JobInstance.stderr_file, JobInstance.stdout_text, JobInstance.stderr_text, JobInstance.work_dir, sq_0.as_scalar().label('subwf_dir'), sq_1.as_scalar().label('job_name'), sq_2.as_scalar().label('submit_file'), sq_3.as_scalar().label('executable'), sq_4.as_scalar().label('argv'), sq_5.as_scalar().label('submit_dir'), sq_7.as_scalar().label('state'), sq_8.as_scalar().label('pre_executable'), sq_9.as_scalar().label('pre_argv'), sq_10.as_scalar().label('hostname') ) if job_instance_id: q = q.filter(JobInstance.job_instance_id == job_instance_id) return q.all() def get_invocation_info(self, ji_id=None): """ SELECT task_submit_seq, exitcode, executable, argv, transformation, abs_task_id FROM invocation WHERE job_instance_id = 7 and wf_id = 1 """ if self._expand or not ji_id: return [] q = self.session.query(Invocation.task_submit_seq, Invocation.exitcode, Invocation.executable, Invocation.argv, Invocation.transformation, Invocation.abs_task_id) q = q.filter(Invocation.job_instance_id == ji_id) q = q.filter(Invocation.wf_id.in_(self._wfs)) return q.all() def get_job_name(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-Name """ if self._expand: return [] q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, Job.exec_job_id.label('job_name')) q = q.filter(Job.job_id == JobInstance.job_id) q = q.filter(Job.wf_id.in_(self._wfs)).order_by(JobInstance.job_submit_seq) return q.all() def get_job_site(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-Site """ if self._expand: return [] q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, JobInstance.site) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.filter(Job.job_id == JobInstance.job_id).group_by(Job.job_id) q = q.order_by(JobInstance.job_submit_seq) return q.all() def get_job_kickstart(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-Kickstart """ if self._expand: return [] sq_1 = self.session.query(func.sum(Invocation.remote_duration * JobInstance.multiplier_factor)) sq_1 = sq_1.filter(Invocation.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_1 = sq_1.filter(Invocation.wf_id == Job.wf_id).correlate(Job) sq_1 = sq_1.filter(Invocation.task_submit_seq >= 0) sq_1 = sq_1.group_by(Invocation.job_instance_id) sq_1 = sq_1.subquery() q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, cast(sq_1.as_scalar(), Float).label('kickstart')) q = q.filter(JobInstance.job_id == Job.job_id) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.order_by(JobInstance.job_submit_seq) return q.all() def get_job_runtime(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-Runtime """ if self._expand: return [] q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, JobInstance.local_duration.label('runtime')) q = q.filter(Job.job_id == JobInstance.job_id) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.group_by(Job.job_id).order_by(JobInstance.job_submit_seq) return q.all() def get_job_seqexec(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-Seqexec """ if self._expand: return [] q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, JobInstance.cluster_duration) q = q.filter(Job.job_id == JobInstance.job_id) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.filter(Job.clustered != 0) q = q.order_by(JobInstance.job_submit_seq) return q.all() def get_condor_q_time(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-CondorQTime """ if self._expand: return [] sq_1 = self.session.query(func.min(Jobstate.timestamp)) sq_1 = sq_1.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_1 = sq_1.filter(or_(Jobstate.state == 'GRID_SUBMIT', Jobstate.state == 'GLOBUS_SUBMIT', Jobstate.state == 'EXECUTE')) sq_1 = sq_1.subquery() sq_2 = self.session.query(Jobstate.timestamp) sq_2 = sq_2.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_2 = sq_2.filter(Jobstate.state == 'SUBMIT') sq_2 = sq_2.subquery() q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, cast(sq_1.as_scalar() - sq_2.as_scalar(), Float).label('condor_q_time')) q = q.filter(JobInstance.job_id == Job.job_id) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.order_by(JobInstance.job_submit_seq) return q.all() def get_resource_delay(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-Resource """ if self._expand: return [] sq_1 = self.session.query(func.min(Jobstate.timestamp)) sq_1 = sq_1.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_1 = sq_1.filter(Jobstate.state == 'EXECUTE') sq_1 = sq_1.subquery() sq_2 = self.session.query(Jobstate.timestamp) sq_2 = sq_2.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_2 = sq_2.filter(or_(Jobstate.state == 'GRID_SUBMIT', Jobstate.state == 'GLOBUS_SUBMIT')) sq_2 = sq_2.subquery() q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, cast(sq_1.as_scalar() - sq_2.as_scalar(), Float).label('resource_delay')) q = q.filter(JobInstance.job_id == Job.job_id) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.order_by(JobInstance.job_submit_seq) return q.all() def get_post_time(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Job+Statistics+file#JobStatisticsfile-Post """ if self._expand: return [] sq_1 = self.session.query(Jobstate.timestamp) sq_1 = sq_1.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_1 = sq_1.filter(Jobstate.state == 'POST_SCRIPT_TERMINATED') sq_1 = sq_1.subquery() sq_2 = self.session.query(func.max(Jobstate.timestamp)) sq_2 = sq_2.filter(Jobstate.job_instance_id == JobInstance.job_instance_id).correlate(JobInstance) sq_2 = sq_2.filter(or_(Jobstate.state == 'POST_SCRIPT_STARTED', Jobstate.state == 'JOB_TERMINATED')) sq_2 = sq_2.subquery() q = self.session.query(Job.job_id, JobInstance.job_instance_id, JobInstance.job_submit_seq, cast(sq_1.as_scalar() - sq_2.as_scalar(), Float).label('post_time')) q = q.filter(JobInstance.job_id == Job.job_id) q = q.filter(Job.wf_id.in_(self._wfs)) q = q.order_by(JobInstance.job_submit_seq) return q.all() # # This query documented: # https://confluence.pegasus.isi.edu/display/pegasus/Transformation+Statistics+file # def get_transformation_statistics(self): """ SELECT transformation, count(invocation_id) as count, min(remote_duration * multiplier_factor) as min, count(CASE WHEN (invoc.exitcode = 0 and invoc.exitcode is NOT NULL) THEN invoc.exitcode END) AS success, count(CASE WHEN (invoc.exitcode != 0 and invoc.exitcode is NOT NULL) THEN invoc.exitcode END) AS failure, max(remote_duration * multiplier_factor) as max, avg(remote_duration * multiplier_factor) as avg, sum(remote_duration * multiplier_factor) as sum FROM invocation as invoc, job_instance as ji WHERE invoc.job_instance_id = ji.job_instance_id and invoc.wf_id IN (1,2,3) GROUP BY transformation """ q = self.session.query(Invocation.transformation, func.count(Invocation.invocation_id).label('count'), cast(func.min(Invocation.remote_duration * JobInstance.multiplier_factor), Float).label('min'), func.count(case([(Invocation.exitcode == 0, Invocation.exitcode)])).label('success'), func.count(case([(Invocation.exitcode != 0, Invocation.exitcode)])).label('failure'), cast(func.max(Invocation.remote_duration * JobInstance.multiplier_factor), Float).label('max'), cast(func.avg(Invocation.remote_duration * JobInstance.multiplier_factor), Float).label('avg'), cast(func.sum(Invocation.remote_duration * JobInstance.multiplier_factor), Float).label('sum')) q = q.filter(Invocation.job_instance_id == JobInstance.job_instance_id) q = q.filter(Invocation.wf_id.in_(self._wfs)) q = q.group_by(Invocation.transformation) return q.all() # # Runtime queries # https://confluence.pegasus.isi.edu/display/pegasus/Additional+queries # def _get_date_divisors(self): vals = { 'month': 2629743, 'week': 604800, 'day': 86400, 'hour': 3600 } return vals[self._time_filter_mode] def _get_host_filter(self): if self._host_filter == None: return None elif type(self._host_filter) == type('str'): return Host.hostname == self._host_filter elif type(self._host_filter) == type([]): return Host.hostname.in_(self._host_filter) else: return None def _get_xform_filter(self): if self._xform_filter['include'] != None and \ self._xform_filter['exclude'] != None: self.log.error('_get_xform_filter', msg='Can\'t set both transform include and exclude - reset s.set_transformation_filter()') return None elif self._xform_filter['include'] == None and \ self._xform_filter['exclude'] == None: return None elif self._xform_filter['include'] != None: if type(self._xform_filter['include']) == type('str'): return Invocation.transformation == self._xform_filter['include'] elif type(self._xform_filter['include']) == type([]): return Invocation.transformation.in_(self._xform_filter['include']) else: return None elif self._xform_filter['exclude'] != None: if type(self._xform_filter['exclude']) == type('str'): return Invocation.transformation != self._xform_filter['exclude'] elif type(self._xform_filter['exclude']) == type([]): return not_(Invocation.transformation.in_(self._xform_filter['exclude'])) else: return None pass else: return None def get_invocation_by_time(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Additional+queries """ q = self.session.query( (cast(Invocation.start_time / self._get_date_divisors(), Integer)).label('date_format'), func.count(Invocation.invocation_id).label('count'), cast(func.sum(Invocation.remote_duration), Float).label('total_runtime') ) q = q.filter(Workflow.root_wf_id == self._root_wf_id) q = q.filter(Invocation.wf_id == Workflow.wf_id) if self._get_xform_filter() is not None: q = q.filter(self._get_xform_filter()) q = q.group_by('date_format').order_by('date_format') return q.all() def get_jobs_run_by_time(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Additional+queries """ q = self.session.query( (cast(Jobstate.timestamp / self._get_date_divisors(), Integer)).label('date_format'), func.count(JobInstance.job_instance_id).label('count'), cast(func.sum(JobInstance.local_duration), Float).label('total_runtime') ) q = q.filter(Workflow.root_wf_id == self._root_wf_id) q = q.filter(Workflow.wf_id == Job.wf_id) q = q.filter(Job.job_id == JobInstance.job_id) q = q.filter(Jobstate.job_instance_id == JobInstance.job_instance_id) q = q.filter(Jobstate.state == 'EXECUTE') if self._get_job_filter() is not None: q = q.filter(self._get_job_filter()) q = q.group_by('date_format').order_by('date_format') return q.all() def get_invocation_by_time_per_host(self, host=None): """ https://confluence.pegasus.isi.edu/display/pegasus/Additional+queries """ q = self.session.query( (cast(Invocation.start_time / self._get_date_divisors(), Integer)).label('date_format'), Host.hostname.label('host_name'), func.count(Invocation.invocation_id).label('count'), cast(func.sum(Invocation.remote_duration), Float).label('total_runtime') ) q = q.filter(Workflow.root_wf_id == self._root_wf_id) q = q.filter(Invocation.wf_id == Workflow.wf_id) q = q.filter(JobInstance.job_instance_id == Invocation.job_instance_id) q = q.filter(JobInstance.host_id == Host.host_id) if self._get_host_filter() is not None: q = q.filter(self._get_host_filter()) if self._get_xform_filter() is not None: q = q.filter(self._get_xform_filter()) q = q.group_by('date_format', 'host_name').order_by('date_format') return q.all() def get_jobs_run_by_time_per_host(self): """ https://confluence.pegasus.isi.edu/display/pegasus/Additional+queries """ q = self.session.query( (cast(Jobstate.timestamp / self._get_date_divisors(), Integer)).label('date_format'), Host.hostname.label('host_name'), func.count(JobInstance.job_instance_id).label('count'), cast(func.sum(JobInstance.local_duration), Float).label('total_runtime') ) q = q.filter(Workflow.root_wf_id == self._root_wf_id) q = q.filter(Workflow.wf_id == Job.wf_id) q = q.filter(Job.job_id == JobInstance.job_id) q = q.filter(Jobstate.job_instance_id == JobInstance.job_instance_id) q = q.filter(Jobstate.state == 'EXECUTE') q = q.filter(JobInstance.host_id == Host.host_id) if self._get_host_filter() is not None: q = q.filter(self._get_host_filter()) if self._get_job_filter() is not None: q = q.filter(self._get_job_filter()) q = q.group_by('date_format', 'host_name').order_by('date_format') return q.all() if __name__ == '__main__': passpegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/workflow/example.py0000644000175000017500000007113511757531137027271 0ustar ryngerynge""" Example file - this can be copied, renamed and then filled in for a new implementation. Has all the appropriate @property methods filled in, base class initializations, etc. """ from netlogger.analysis.workflow._base import Workflow as BaseWorkflow, \ Job as BaseJob, Host as BaseHost, Task as BaseTask, Jobstate as BaseJobstate __rcsid__ = "$Id: example.py 26972 2011-01-11 16:19:33Z mgoode $" __author__ = "Monte Goode MMGoode@lbl.gov" class Workflow(BaseWorkflow): """ Top level workflow class that exposes information about a specific workflow and the associated jobs/etc. Usage:: w = Workflow() w.initialize('unique_wf_uuid') print w.timestamp, w.dax_label etc """ def __init__(self): BaseWorkflow.__init__(self) def initialize(self, wf_id): """ This method is the initialization method that accepts the unique wf_uuid and triggers the subclass specific queries and calculations that pulls the workflow information from the back end. The wf_id is represented in the .bp logs as "wf.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus """ raise NotImplementedError, \ 'initialize not yet implemented' pass @property def wf_uuid(self): """ Return the wf_uuid for this workflow. @rtype: string @return: The wf_uuid of the current workflow """ raise NotImplementedError, \ 'wf_uuid not yet implemented' @property def dax_label(self): """ Return dax_label from storage backend. @rtype: string @return: The dax_label of the current workflow. """ raise NotImplementedError, \ 'dax_label not yet implemented' @property def timestamp(self): """ Return timestamp from storage backend. @rtype: python datetime obj (utc) @return: The workflow timestamp """ raise NotImplementedError, \ 'timestamp not yet implemented' @property def submit_hostname(self): """ Return submit_hostname from storage backend. @rtype: string @return: The workflow submit host """ raise NotImplementedError, \ 'submit_hostname not yet implemented' @property def submit_dir(self): """ Return submid_dir from storage backend. @rtype: string @return: The workflow submit directory """ raise NotImplementedError, \ 'submit_dir not yet implemented' @property def planner_arguments(self): """ Return planner_arguments from storage backend. @rtype: string @return: The workflow planner arguments """ raise NotImplementedError, \ 'planner_arguments not yet implemented' @property def user(self): """ Return user from storage backend. @rtype: string @return: The workflow user """ raise NotImplementedError, \ 'user not yet implemented' @property def grid_dn(self): """ Return grid_dn from storage backend. @rtype: string @return: The grid DN of the workflow """ raise NotImplementedError, \ 'grid_dn not yet implemented' @property def planner_version(self): """ Return planner_version from storage backend. @rtype: string @return: The planner version of the workflow """ raise NotImplementedError, \ 'planner_version not yet implemented' @property def parent_wf_uuid(self): """ Return parent_wf_uuid from storage backend. @rtype: string @return: The parent wf_uuid if it exists """ raise NotImplementedError, \ 'parent_wf_uuid not yet implemented' @property def sub_wf_uuids(self): """ Returns a list of the wf_uuids of any sub-workflows associated with the current workflow object. Returned in the order in which they are entered in the workflow table. If no sub-workflows are found, return an empty list. @rtype: List of strings @return: The wf_uuids of any sub-workflows. """ raise NotImplementedError, \ 'sub_wf_uuids not yet implemented' @property def start_events(self): """ Return a list of Workflowstate object instances representing the re/start events. The list should ordered by timestamp. In the event that there are no logged workflow states an empty list should be returned. In the case that there is a dropped event (ie: no matching end event to a start event or vice versa), the missing event will be padded as a None. This is an error situation. @rtype: List of Workflowstate object instances (or None) @return: Returns a list with workflow start events. """ raise NotImplementedError, \ 'start_events not yet implemented' @property def end_events(self): """ Return a list of Workflowstate object instances representing the end events. The list should ordered by timestamp. In the event that there are no logged workflow states an empty list should be returned. In the case that there is a dropped event (ie: no matching end event to a start event or vice versa), the missing event will be padded as a None. This is an error situation. @rtype: List of Workflowstate object instances (or None) @return: Returns a list with workflow end events. """ raise NotImplementedError, \ 'end_events not yet implemented' @property def is_running(self): """ Derived boolean flag indicating if the workflow is currently running. Derived in a backend-appropriate way. @rtype: boolean @return: Indicates if the workflow is running. """ raise NotImplementedError, \ 'running not yet implemented' @property def is_restarted(self): """ Derived boolean flag indicating if this workflow has been retstarted. Derived in a backend-appropriate way. @rtype: boolean @return: Indicates if the workflow has been restarted. """ raise NotImplementedError, \ 'is_restarted not yet implemented' @property def restart_count(self): """ Returns an integer reflecting restart count. Derived in a backend-appropriate way. @rtype: integer @return: Number of workflow restarts. """ raise NotImplementedError, \ 'restart_count not yet implemented' @property def total_time(self): """ Returns the total runtime of the workflow. This is defined as the delta between either the first start event and last end event, or if the job is still running, between the first start event and current epoch UTC time. @rtype: python datetime.timedelta object or None @return: The total time of the workflow. """ raise NotImplementedError, \ 'total_time not yet implemented' @property def jobs(self): """ Returns a list of the jobs associated with this workflow object. This property is a prime candidate for lazy eval as there is no need to query the backend for this information if the user is only looking at superficial information about the workflow - ie: if it is running, how long it took, etc. @rtype: list of Job objects @return: List of job objects associated with current wf """ raise NotImplementedError, \ 'jobs not yet implemented' @property def total_jobs_executed(self): """ Return the number of jobs that were executed as an integer value. @rtype: integer @return: Number of jobs executed """ raise NotImplementedError, \ 'total_jobs_executed not yet implemented' @property def successful_jobs(self): """ Return the number of jobs that executed successfully as an integer value. @rtype: integer @return: Number of sucessfully executed jobs """ raise NotImplementedError, \ 'successful_jobs not yet implemented' @property def failed_jobs(self): """ Return the number of jobs that failed as an integer value. @rtype: integer @return: Number of failed jobs """ raise NotImplementedError, \ 'failed_jobs not yet implemented' @property def restarted_jobs(self): """ Return the number of jobs that were restarted. @rtype: integer @return: Number of restarted jobs """ raise NotImplementedError, \ 'restarted_jobs not yet implemented' @property def submitted_jobs(self): """ Return the number of jobs that were submitted. @rtype: integer @return: Number of submitted jobs """ raise NotImplementedError, \ 'submitted_jobs not yet implemented' @property def jobtypes_executed(self): """ Returns a dictionary of the various jobtypes that are executed in the current workflow and a count of how many of each type. Example: {'create dir': 1, 'compute': 105} @rtype: dict - string keys, integer values @return: A dictionary of a count of the jobtypes that were executed in the current workflow. """ raise NotImplementedError, \ 'jobtypes_executed not yet implemented' class Job(BaseJob): """ Class to retrieve and expose information about a specific job. This class is intended to be instantiated inside a Workflow() object and not as a stand-alone instance. Usage:: j = Job() j.initialize('unique_wf_uuid', 3) print j.name etc """ _indent = 2 def __init__(self): BaseJob.__init__(self) def initialize(self, wf_id, job_id): """ This method is the initialization method that accepts the unique wf_uuid and and job_submit_seq that triggers the subclass specific queries and calculations that pulls job information from the back end. The wf_id is represented in the .bp logs as "wf.id". The job_id is represented as "job.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus @type job_id: integer @param job_id: the sequence number as defined by tailstatd """ raise NotImplementedError, \ 'initialize not yet implemented' @property def job_submit_seq(self): """ Return job_submit_seq of current job (an input arg). @rtype: integer @return: Return job_submit_seq of current job """ raise NotImplementedError, \ 'job_submit_seq not yet implemented' @property def name(self): """ Return the job name from the storage backend. @rtype: string @return: Return job name """ raise NotImplementedError, \ 'name not yet implemented' @property def host(self): """ Return job host information from storage backend. @rtype: Host object instance @return: Return a host object with host info for current job. """ raise NotImplementedError, \ 'host not yet implemented' @property def condor_id(self): """ Return the condor_id from the storage backend. @rtype: string (looks like a float however) @return: Return job condor_id """ raise NotImplementedError, \ 'condor_id not yet implemented' @property def jobtype(self): """ Return jobtype from the storage backend. @rtype: string @return: Return jobtype """ raise NotImplementedError, \ 'jobtype not yet implemented' @property def clustered(self): """ Return the clustered boolean flag from the storage backend. This may need to be derived depending on how the backend implementation does/not store this value. @rtype: boolean @return: Return True or False depending on if the job is clustered or not. """ raise NotImplementedError, \ 'clustered not yet implemented' @property def site_name(self): """ Return the site name from the storage backend. @rtype: string @return: Return site_name for current job """ raise NotImplementedError, \ 'site_name not yet implemented' @property def remote_user(self): """ Return the remote use of the current job from the storage backend. @rtype: string @return: Return remote_user for current job. """ raise NotImplementedError, \ 'remote_user not yet implemented' @property def remote_working_dir(self): """ Return the remote working directory of the current job from the storage backend. @rtype: string @return: """ raise NotImplementedError, \ 'remote_working_dir not yet implemented' @property def cluster_start_time(self): """ Return the job cluster start time as a python datetime object (utc) if it exists or None if it does not. Not all jobs will have this value. @rtype: python datetime obj (utc) or None @return: Return job cluster start time. """ raise NotImplementedError, \ 'cluster_start_time not yet implemented' @property def cluster_duration(self): """ Return the job cluster duration from the storage backend as a float or None if this value is not assocaited with the current job. Not all j will have this value. @rtype: float (from db) @return: """ raise NotImplementedError, \ 'cluster_duration not yet implemented' @property def tasks(self): """ Returns a list of the tasks associated with this job object. This property is a prime candidate for lazy eval as there is no need to query the backend for this information if the user is only looking at superficial information about the job - ie: its current state, name, etc. @rtype: list of Task objects @return: List of task objects associated with current job """ raise NotImplementedError, \ 'tasks not yet implemented' @property def is_restart(self): """ Return a boolean flag indicating whether or not this curent job is a "restart". This value will be derived from backend information as appropriate. @rtype: boolean @return: Return True or False if the job is a restart or not. """ raise NotImplementedError, \ 'is_restart not yet implemented' @property def is_success(self): """ Return a boolean flag indicating whether or not this curent job was successful. This value will be derived from backend information as appropriate. @rtype: boolean @return: Return True or False if the job is a restart """ raise NotImplementedError, \ 'is_success not yet implemented' @property def is_failure(self): """ Return a boolean flag indicating whether or not this curent job has failed. This value will be derived from backend information as appropriate. @rtype: boolean @return: Return True or False if the job is a restart """ raise NotImplementedError, \ 'is_failure not yet implemented' @property def current_state(self): """ Return the current state of this job. This property pretty much requires lazy evaluation every access rather than attribute caching. A single job moves through multiple jobstates and this property should return the current state of the running job when accessed. In the event that there is not yet a jobstate logged for this job, the Jobstate instance will have its properties "state" and "timestamp" set to None. @rtype: Jobstate object instance @return: Returns the current state and timestamp """ raise NotImplementedError, \ 'current_state not yet implemented' @property def all_jobstates(self): """ Return a list of Jobstate object instances representing the states that the job has moved through. The list should ordered by the order of the different jobstate submissions. In the event that there are no logged jobstates an empty list should be returned. This property may do light weight attribute caching, but the current jobstate should still be lazily evaluated and the list updated if need be. @rtype: List of Jobstate object instances @return: Returns a list with all the jobstates this job has moved through. """ raise NotImplementedError, \ 'all_jobstates not yet implemented' @property def submit_time(self): """ Return the timestamp of when this job was submitted. @rtype: python datetime obj (utc) or None @return: Return the submit time of this job """ raise NotImplementedError, \ 'submit_time not yet implemented' @property def elapsed_time(self): """ Return the elapsed time of this job. Calculated as the delta between the submit time and the current/last jobstate timestamp. @rtype: python datetime obj (utc) or None @return: Return the elapsed time of this job """ raise NotImplementedError, \ 'elapsed_time not yet implemented' @property def edge_parents(self): """ Return a list of job objects for the parent job edges for this current job object. The job objects returned by this property will NOT contain additional edge information (ie: this method will return an empty list) to avoid a recursive situation. @rtype: list containing Job objects @return: Return the parent edge Job objects. """ raise NotImplementedError, \ 'edge_parents not yet implemented' @property def edge_children(self): """ Return a list of job objects for the child job edges for this current job object. The job objects returned by this property will NOT contain additional edge information (ie: this method will return an empty list) to avoid a recursive situation. @rtype: list containing Job objects @return: Return the child edge Job objects. """ raise NotImplementedError, \ 'edge_children not yet implemented' class Jobstate(BaseJobstate): """ A small class that returns jobstate information. Intended to be instantiated by a call to job.current_state. Is not cached so multiple calls will return the latest information. Usage:: js = Jobstate() js.initialize('unique_wf_id', 3) print js.state, js.timestamp etc. """ _indent = 3 def __init__(self): BaseJobstate.__init__(self) def initialize(self, wf_id, job_id): """ This method is the initialization method that accepts the unique wf_uuid and and job_submit_seq that triggers the subclass specific queries and calculations that pulls host information from the back end. The wf_id is represented in the .bp logs as "wf.id". The job_id is represented as "job.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus @type job_id: integer @param job_id: the sequence number as defined by tailstatd """ raise NotImplementedError, \ 'initialize not yet implemented' @property def state(self): """ Return the current jobstate state. Might be none if there is no state information logged yet. @rtype: string or None @return: Return current job state """ raise NotImplementedError, \ 'state not implemented yet' @property def timestamp(self): """ Return the timestampe of the current job state. Might be none if there is no state information logged yet. @rtype: python datetime obj (utc) or None @return: Return timestamp of current job state """ raise NotImplementedError, \ 'timestamp not implemented yet' class Host(BaseHost): """ A straightforward class that contains host information about a job. This is intended to be instantiated inside a Job() object and not as a standalone instance. Usage:: h = Host() h.initialize('unique_wf_uuid', 3) print h.site_name, h.hostname etc. """ _indent = 3 def __init__(self): BaseHost.__init__(self) def initialize(self, wf_id, job_id): """ This method is the initialization method that accepts the unique wf_uuid and and job_submit_seq that triggers the subclass specific queries and calculations that pulls host information from the back end. The wf_id is represented in the .bp logs as "wf.id". The job_id is represented as "job.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus @type job_id: integer @param job_id: the sequence number as defined by tailstatd """ raise NotImplementedError, \ 'initialize not yet implemented' @property def site_name(self): """ Return the site name associated with this host. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return host site name or None """ raise NotImplementedError, \ 'site_name not yet implemented' @property def hostname(self): """ Return the host name associated with this host. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return hostname or None """ raise NotImplementedError, \ 'hostname not yet implemented' @property def ip_address(self): """ Return the ip address associated with this host. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return host ip address or None """ raise NotImplementedError, \ 'ip_address not yet implemented' @property def uname(self): """ Return the uname information of this host machine. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return host uname or None. """ raise NotImplementedError, \ 'uname not yet implemented' @property def total_ram(self): """ Return the total ram of this host machine. Might be None if a host has not been associated with a particular job at time of calling. @rtype: integer or None @return: Return host RAM or None """ raise NotImplementedError, \ 'total_ram not yet implemented' class Task(BaseTask): """ Class to expose information about a task associated with a particular job. This is intended to be instantiated inside of a Job() object and not as a stand alone instance. Usage:: t = Task() t.initialize('unique_wf_uuid', 3, 1) print t.start_time, t.duration etc """ _indent = 3 def __init__(self): BaseTask.__init__(self) def initialize(self, wf_id, job_id, task_id): """ This method is the initialization method that accepts the unique wf_uuid, job_submit_seq and task_submit_seq that triggers the subclass specific queries and calculations that pulls task information from the back end. The wf_id is represented in the .bp logs as "wf.id". The job_id is represented as "job.id". The task_id is represented as "task.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus @type job_id: integer @param job_id: the sequence number as defined by tailstatd @type task_id: integer @param task_id: the sequence number as defined by tailstatd """ raise NotImplementedError, \ 'initialize not yet implemented' @property def task_submit_seq(self): """ Return the task submit sequence number from the storage backend as an integer. @rtype: int @return: submit sequence number """ raise NotImplementedError, \ 'task_submit_seq not yet implemented' @property def start_time(self): """ Return start time of this task from the storage backend as a python datetime object (utc). @rtype: python datetime obj (utc) @return: Return task start time """ raise NotImplementedError, \ 'start_time not yet implemented' @property def duration(self): """ Return duration of this task from the storage backend as a float. @rtype: float (from db) @return: Return the duration of this task """ raise NotImplementedError, \ 'duration not yet implemented' @property def exitcode(self): """ Return the exitcode of this task from the storage backend as an integer. @rtype: integer @return: Return the task exitcode """ raise NotImplementedError, \ 'exitcode not yet implemented' @property def transformation(self): """ Return the transformation type of this task from the storage backend. @rtype: string @return: Return task transformation """ raise NotImplementedError, \ 'transformation not yet implemented' @property def executable(self): """ Return the executable invoked by this task from the storage backend. @rtype: string @return: Return the task executable """ raise NotImplementedError, \ 'executable not yet implemented' @property def arguments(self): """ Return the task args from the storage backend. @rtype: string @return: Return the task arguments """ raise NotImplementedError, \ 'arguments not yet implemented' if __name__ == '__main__': w = Workflow() print wpegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/workflow/_base.py0000644000175000017500000010275111757531137026706 0ustar ryngerynge""" Base classes that define the interface that the back-end specific retrieval classes must expose. Each read-only property includes inline epydoc describing what is to be returned and what type (string, int, an object) each property must return when called. How a subclass caches attributes or performs lazy evaluation is implementation-specific and not defined here. The only conformity necessary is that the proper property returns the correct data as the correct type. Any class attributes or methods that are specific to the subclass implementations MUST start with an underscore: ie: self._wf_uuid = None or def _calculateValue(self, job): This signals that the attr or method is subclass-specific and also allows the inherited __repr__() method to ignore it. """ __rcsid__ = "$Id: _base.py 26972 2011-01-11 16:19:33Z mgoode $" __author__ = "Monte Goode MMGoode@lbl.gov" import logging from netlogger.nllog import DoesLogging, get_root_logger class NullHandler(logging.Handler): def emit(self, record): pass # silence the logger since lib may be used by non-netloger apps. get_root_logger().addHandler(NullHandler()) class WorkflowBase(DoesLogging): # indent level for pretty printing = override in subclasses # if you want different indent levels for your various # objects. _indent = 1 def __init__(self): DoesLogging.__init__(self) pass def __repr__(self): spacer = ' ' retval = '%s:' % self.__class__ if self._indent > 1: retval = '\n%s+++ %s:' % (spacer * self._indent, self.__class__) for i in dir(self): if i.startswith('_') or i == 'initialize' or i == 'db' \ or i == 'metadata' or i == 'session' or i == 'log': continue try: retval += '\n%s* %s : %s' % (spacer * self._indent, i, eval('self.%s' % i)) except NotImplementedError, e: retval += '\n%s* %s : WARNING: %s' % (spacer * self._indent, i,e) return retval class Workflow(WorkflowBase): """ Top level workflow class that exposes information about a specific workflow and the associated jobs/etc. Usage:: w = Workflow() w.initialize('unique_wf_uuid') print w.timestamp, w.dax_label etc """ def __init__(self): WorkflowBase.__init__(self) def initialize(self, wf_id): """ This method is the initialization method that accepts the unique wf_uuid and triggers the subclass specific queries and calculations that pulls the workflow information from the back end. The wf_id is represented in the .bp logs as "wf.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus """ raise NotImplementedError, \ 'initialize not yet implemented' pass @property def wf_uuid(self): """ Return the wf_uuid for this workflow. @rtype: string @return: The wf_uuid of the current workflow """ raise NotImplementedError, \ 'wf_uuid not yet implemented' @property def dax_label(self): """ Return dax_label from storage backend. @rtype: string @return: The dax_label of the current workflow. """ raise NotImplementedError, \ 'dax_label not yet implemented' @property def timestamp(self): """ Return timestamp from storage backend. @rtype: python datetime obj (utc) @return: The workflow timestamp """ raise NotImplementedError, \ 'timestamp not yet implemented' @property def submit_hostname(self): """ Return submit_hostname from storage backend. @rtype: string @return: The workflow submit host """ raise NotImplementedError, \ 'submit_hostname not yet implemented' @property def submit_dir(self): """ Return submid_dir from storage backend. @rtype: string @return: The workflow submit directory """ raise NotImplementedError, \ 'submit_dir not yet implemented' @property def planner_arguments(self): """ Return planner_arguments from storage backend. @rtype: string @return: The workflow planner arguments """ raise NotImplementedError, \ 'planner_arguments not yet implemented' @property def user(self): """ Return user from storage backend. @rtype: string @return: The workflow user """ raise NotImplementedError, \ 'user not yet implemented' @property def grid_dn(self): """ Return grid_dn from storage backend. @rtype: string @return: The grid DN of the workflow """ raise NotImplementedError, \ 'grid_dn not yet implemented' @property def planner_version(self): """ Return planner_version from storage backend. @rtype: string @return: The planner version of the workflow """ raise NotImplementedError, \ 'planner_version not yet implemented' @property def parent_wf_uuid(self): """ Return parent_wf_uuid from storage backend. @rtype: string @return: The parent wf_uuid if it exists """ raise NotImplementedError, \ 'parent_wf_uuid not yet implemented' @property def sub_wf_uuids(self): """ Returns a list of the wf_uuids of any sub-workflows associated with the current workflow object. Returned in the order in which they are entered in the workflow table. If no sub-workflows are found, return an empty list. @rtype: List of strings @return: The wf_uuids of any sub-workflows. """ raise NotImplementedError, \ 'sub_wf_uuids not yet implemented' @property def start_events(self): """ Return a list of Workflowstate object instances representing the re/start events. The list should ordered by timestamp. In the event that there are no logged workflow states an empty list should be returned. In the case that there is a dropped event (ie: no matching end event to a start event or vice versa), the missing event will be padded as a None. This is an error situation. @rtype: List of Workflowstate object instances (or None) @return: Returns a list with workflow start events. """ raise NotImplementedError, \ 'start_events not yet implemented' @property def end_events(self): """ Return a list of Workflowstate object instances representing the end events. The list should ordered by timestamp. In the event that there are no logged workflow states an empty list should be returned. In the case that there is a dropped event (ie: no matching end event to a start event or vice versa), the missing event will be padded as a None. This is an error situation. @rtype: List of Workflowstate object instances (or None) @return: Returns a list with workflow end events. """ raise NotImplementedError, \ 'end_events not yet implemented' @property def is_running(self): """ Derived boolean flag indicating if the workflow is currently running. Derived in a backend-appropriate way. @rtype: boolean @return: Indicates if the workflow is running. """ raise NotImplementedError, \ 'is_running not yet implemented' @property def is_restarted(self): """ Derived boolean flag indicating if this workflow has been retstarted. Derived in a backend-appropriate way. @rtype: boolean @return: Indicates if the workflow has been restarted. """ raise NotImplementedError, \ 'is_restarted not yet implemented' @property def restart_count(self): """ Returns an integer reflecting restart count. Derived in a backend-appropriate way. @rtype: integer @return: Number of workflow restarts. """ raise NotImplementedError, \ 'restart_count not yet implemented' @property def total_time(self): """ Returns the total runtime of the workflow. This is defined as the delta between either the first start event and last end event, or if the job is still running, between the first start event and current epoch UTC time. @rtype: python datetime.timedelta object or None @return: The total time of the workflow. """ raise NotImplementedError, \ 'total_time not yet implemented' @property def jobs(self): """ Returns a list of the jobs associated with this workflow object. This property is a prime candidate for lazy eval as there is no need to query the backend for this information if the user is only looking at superficial information about the workflow - ie: if it is running, how long it took, etc. @rtype: list of Job objects @return: List of job objects associated with current wf """ raise NotImplementedError, \ 'jobs not yet implemented' @property def total_jobs_executed(self): """ Return the number of jobs that were executed as an integer value. @rtype: integer @return: Number of jobs executed """ raise NotImplementedError, \ 'total_jobs_executed not yet implemented' @property def successful_jobs(self): """ Return the number of jobs that executed successfully as an integer value. @rtype: integer @return: Number of sucessfully executed jobs """ raise NotImplementedError, \ 'successful_jobs not yet implemented' @property def failed_jobs(self): """ Return the number of jobs that failed as an integer value. @rtype: integer @return: Number of failed jobs """ raise NotImplementedError, \ 'failed_jobs not yet implemented' @property def restarted_jobs(self): """ Return the number of jobs that were restarted. @rtype: integer @return: Number of restarted jobs """ raise NotImplementedError, \ 'restarted_jobs not yet implemented' @property def submitted_jobs(self): """ Return the number of jobs that were submitted. @rtype: integer @return: Number of submitted jobs """ raise NotImplementedError, \ 'submitted_jobs not yet implemented' @property def jobtypes_executed(self): """ Returns a dictionary of the various jobtypes that are executed in the current workflow and a count of how many of each type. Example: {'create dir': 1, 'compute': 105} @rtype: dict - string keys, integer values @return: A dictionary of a count of the jobtypes that were executed in the current workflow. """ raise NotImplementedError, \ 'jobtypes_executed not yet implemented' class Workflowstate(WorkflowBase): """ Class to expose information about a specific workflow event. This is a simple class to expose state and timestamp information as class attributes. Usage:: ws = Workflowstate() ws.initialize(state, timestamp) print ws.state etc """ _indent = 2 def __init__(self): WorkflowBase.__init__(self) def initialize(self, state, timestamp): """ This method is the initialization method that accepts the state and timestamp of a given workflow state event. @type state: string @param state: the jobstate entry as defined by Pegasus. @type timestamp: float @param timestamp: the epoch timestamp as reported by Pegasus. """ raise NotImplementedError, \ 'initialize not yet implemented' @property def state(self): """ Return the current workflowstate state. Might be none if there is no state information logged yet. @rtype: string or None @return: Return current job state """ raise NotImplementedError, \ 'state not implemented yet' @property def timestamp(self): """ Return the timestamp of the current workflow state. Might be none if there is no state information logged yet. @rtype: python datetime obj (utc) or None @return: Return timestamp of current job state """ raise NotImplementedError, \ 'timestamp not implemented yet' class Job(WorkflowBase): """ Class to retrieve and expose information about a specific job. This class is intended to be instantiated inside a Workflow() object and not as a stand-alone instance. Usage:: j = Job() j.initialize('unique_wf_uuid', 3) print j.name etc """ _indent = 2 def __init__(self): WorkflowBase.__init__(self) def initialize(self, wf_id, job_id): """ This method is the initialization method that accepts the unique wf_uuid and and job_submit_seq that triggers the subclass specific queries and calculations that pulls job information from the back end. The wf_id is represented in the .bp logs as "wf.id". The job_id is represented as "job.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus @type job_id: integer @param job_id: the sequence number as defined by tailstatd """ raise NotImplementedError, \ 'initialize not yet implemented' @property def job_submit_seq(self): """ Return job_submit_seq of current job (an input arg). @rtype: integer @return: Return job_submit_seq of current job """ raise NotImplementedError, \ 'job_submit_seq not yet implemented' @property def name(self): """ Return the job name from the storage backend. @rtype: string @return: Return job name """ raise NotImplementedError, \ 'name not yet implemented' @property def host(self): """ Return job host information from storage backend. @rtype: Host object instance @return: Return a host object with host info for current job. """ raise NotImplementedError, \ 'host not yet implemented' @property def condor_id(self): """ Return the condor_id from the storage backend. @rtype: string (looks like a float however) @return: Return job condor_id """ raise NotImplementedError, \ 'condor_id not yet implemented' @property def jobtype(self): """ Return jobtype from the storage backend. @rtype: string @return: Return jobtype """ raise NotImplementedError, \ 'jobtype not yet implemented' @property def clustered(self): """ Return the clustered boolean flag from the storage backend. This may need to be derived depending on how the backend implementation does/not store this value. @rtype: boolean @return: Return True or False depending on if the job is clustered or not. """ raise NotImplementedError, \ 'clustered not yet implemented' @property def site_name(self): """ Return the site name from the storage backend. @rtype: string @return: Return site_name for current job """ raise NotImplementedError, \ 'site_name not yet implemented' @property def remote_user(self): """ Return the remote use of the current job from the storage backend. @rtype: string @return: Return remote_user for current job. """ raise NotImplementedError, \ 'remote_user not yet implemented' @property def remote_working_dir(self): """ Return the remote working directory of the current job from the storage backend. @rtype: string @return: """ raise NotImplementedError, \ 'remote_working_dir not yet implemented' @property def cluster_start_time(self): """ Return the job cluster start time as a python datetime object (utc) if it exists or None if it does not. Not all jobs will have this value. @rtype: python datetime obj (utc) or None @return: Return job cluster start time. """ raise NotImplementedError, \ 'cluster_start_time not yet implemented' @property def cluster_duration(self): """ Return the job cluster duration from the storage backend as a float or None if this value is not assocaited with the current job. Not all j will have this value. @rtype: float (from db) @return: """ raise NotImplementedError, \ 'cluster_duration not yet implemented' @property def tasks(self): """ Returns a list of the tasks associated with this job object. This property is a prime candidate for lazy eval as there is no need to query the backend for this information if the user is only looking at superficial information about the job - ie: its current state, name, etc. @rtype: list of Task objects @return: List of task objects associated with current job """ raise NotImplementedError, \ 'tasks not yet implemented' @property def is_restart(self): """ Return a boolean flag indicating whether or not this curent job is a "restart". This value will be derived from backend information as appropriate. @rtype: boolean @return: Return True or False if the job is a restart or not. """ raise NotImplementedError, \ 'is_restart not yet implemented' @property def is_success(self): """ Return a boolean flag indicating whether or not this curent job was successful. This value will be derived from backend information as appropriate. @rtype: boolean @return: Return True or False if the job is a restart """ raise NotImplementedError, \ 'is_success not yet implemented' @property def is_failure(self): """ Return a boolean flag indicating whether or not this curent job has failed. This value will be derived from backend information as appropriate. @rtype: boolean @return: Return True or False if the job is a restart """ raise NotImplementedError, \ 'is_failure not yet implemented' @property def current_state(self): """ Return the current state of this job. This property pretty much requires lazy evaluation every access rather than attribute caching. A single job moves through multiple jobstates and this property should return the current state of the running job when accessed. In the event that there is not yet a jobstate logged for this job, the Jobstate instance will have its properties "state" and "timestamp" set to None. @rtype: Jobstate object instance @return: Return the current/last jobstate event. """ raise NotImplementedError, \ 'current_state not yet implemented' @property def all_jobstates(self): """ Return a list of Jobstate object instances representing the states that the job has moved through. The list should ordered by the order of the different jobstate submissions. In the event that there are no logged jobstates an empty list should be returned. This property may do light weight attribute caching, but the current jobstate should still be lazily evaluated and the list updated if need be. @rtype: List of Jobstate object instances @return: Returns a list with all the jobstates this job has moved through. """ raise NotImplementedError, \ 'all_jobstates not yet implemented' @property def submit_time(self): """ Return the timestamp of when this job was submitted. @rtype: python datetime obj (utc) or None @return: Return the submit time of this job """ raise NotImplementedError, \ 'submit_time not yet implemented' @property def elapsed_time(self): """ Return the elapsed time of this job. Calculated as the delta between the submit time and the current/last jobstate timestamp. @rtype: python datetime.timedelta or None @return: Return the elapsed time of this job """ raise NotImplementedError, \ 'elapsed_time not yet implemented' @property def edge_parents(self): """ Return a list of job objects for the parent job edges for this current job object. The job objects returned by this property will NOT contain additional edge information (ie: this method will return an empty list) to avoid a recursive situation. @rtype: list containing Job objects @return: Return the parent edge Job objects. """ raise NotImplementedError, \ 'edge_parents not yet implemented' @property def edge_children(self): """ Return a list of job objects for the child job edges for this current job object. The job objects returned by this property will NOT contain additional edge information (ie: this method will return an empty list) to avoid a recursive situation. @rtype: list containing Job objects @return: Return the child edge Job objects. """ raise NotImplementedError, \ 'edge_children not yet implemented' class Jobstate(WorkflowBase): """ A small class that returns jobstate information. Intended to be instantiated by a call to job.current_state. Is not cached so multiple calls will return the latest information. Usage:: js = Jobstate() js.initialize('unique_wf_id', 3) print js.state, js.timestamp etc """ _indent = 3 def __init__(self): WorkflowBase.__init__(self) def initialize(self, wf_id, job_id): """ This method is the initialization method that accepts the unique wf_uuid and and job_submit_seq that triggers the subclass specific queries and calculations that pulls host information from the back end. The wf_id is represented in the .bp logs as "wf.id". The job_id is represented as "job.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus @type job_id: integer @param job_id: the sequence number as defined by tailstatd """ raise NotImplementedError, \ 'initialize not yet implemented' @property def state(self): """ Return the current jobstate state. Might be none if there is no state information logged yet. @rtype: string or None @return: Return current job state """ raise NotImplementedError, \ 'state not implemented yet' @property def timestamp(self): """ Return the timestampe of the current job state. Might be none if there is no state information logged yet. @rtype: python datetime obj (utc) or None @return: Return timestamp of current job state """ raise NotImplementedError, \ 'timestamp not implemented yet' class Host(WorkflowBase): """ A straightforward class that contains host information about a job. This is intended to be instantiated inside a Job() object and not as a standalone instance. Usage:: h = Host() h.initialize('unique_wf_uuid', 3) print h.site_name, h.hostname etc. """ _indent = 3 def __init__(self): WorkflowBase.__init__(self) def initialize(self, wf_id, job_id): """ This method is the initialization method that accepts the unique wf_uuid and and job_submit_seq that triggers the subclass specific queries and calculations that pulls host information from the back end. The wf_id is represented in the .bp logs as "wf.id". The job_id is represented as "job.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus @type job_id: integer @param job_id: the sequence number as defined by tailstatd """ raise NotImplementedError, \ 'initialize not yet implemented' @property def site_name(self): """ Return the site name associated with this host. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return host site name or None """ raise NotImplementedError, \ 'site_name not yet implemented' @property def hostname(self): """ Return the host name associated with this host. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return hostname or None """ raise NotImplementedError, \ 'hostname not yet implemented' @property def ip_address(self): """ Return the ip address associated with this host. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return host ip address or None """ raise NotImplementedError, \ 'ip_address not yet implemented' @property def uname(self): """ Return the uname information of this host machine. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return host uname or None. """ raise NotImplementedError, \ 'uname not yet implemented' @property def total_ram(self): """ Return the total ram of this host machine. Might be None if a host has not been associated with a particular job at time of calling. @rtype: integer or None @return: Return host RAM or None """ raise NotImplementedError, \ 'total_ram not yet implemented' class Task(WorkflowBase): """ Class to expose information about a task associated with a particular job. This is intended to be instantiated inside of a Job() object and not as a stand alone instance. Usage:: t = Task() t.initialize('unique_wf_uuid', 3, 1) print t.start_time, t.duration etc """ _indent = 3 def __init__(self): WorkflowBase.__init__(self) def initialize(self, wf_id, job_id, task_id): """ This method is the initialization method that accepts the unique wf_uuid, job_submit_seq and task_submit_seq that triggers the subclass specific queries and calculations that pulls task information from the back end. The wf_id is represented in the .bp logs as "wf.id". The job_id is represented as "job.id". The task_id is represented as "task.id". @type wf_id: string @param wf_id: the unique wf_uuid as defined by Pegasus @type job_id: integer @param job_id: the sequence number as defined by tailstatd @type task_id: integer @param task_id: the sequence number as defined by tailstatd """ raise NotImplementedError, \ 'initialize not yet implemented' @property def task_submit_seq(self): """ Return the task submit sequence number from the storage backend as an integer. @rtype: int @return: submit sequence number """ raise NotImplementedError, \ 'task_submit_seq not yet implemented' @property def start_time(self): """ Return start time of this task from the storage backend as a python datetime object (utc). @rtype: python datetime obj (utc) @return: Return task start time """ raise NotImplementedError, \ 'start_time not yet implemented' @property def duration(self): """ Return duration of this task from the storage backend as a float. @rtype: float (from db) @return: Return the duration of this task """ raise NotImplementedError, \ 'duration not yet implemented' @property def exitcode(self): """ Return the exitcode of this task from the storage backend as an integer. @rtype: integer @return: Return the task exitcode """ raise NotImplementedError, \ 'exitcode not yet implemented' @property def transformation(self): """ Return the transformation type of this task from the storage backend. @rtype: string @return: Return task transformation """ raise NotImplementedError, \ 'transformation not yet implemented' @property def executable(self): """ Return the executable invoked by this task from the storage backend. @rtype: string @return: Return the task executable """ raise NotImplementedError, \ 'executable not yet implemented' @property def arguments(self): """ Return the task args from the storage backend. @rtype: string @return: Return the task arguments """ raise NotImplementedError, \ 'arguments not yet implemented' # Base class for discovery/wf_uuid querying. class Discovery(object): def __init__(self, connectionInfo=None): """ Initialization method. The manditory argument connectionInfo will be defined as appropriate in a subclass (string, dict, etc), and connect to the appropriate back end. """ pass def fetch_all(self): """ Void method that will return a list of workflow uuids from the back end. @rtype: list @return: Returns a list of wf_uuid strings. """ raise NotImplementedError, \ 'fetch_all not yet implemented' def time_threshold(self, startTime=None, endTime=None): """ Method to return a list of wf_uuid strings that were submitted within a certain timeframe. The startTime arg should represent the time floor and endTime the time ceiling. If both args are supplied return workflows that were exectued between the two. If only one arg is supplied, use that one as the appropriate floor or ceiling. This is based on the workflowstate start event occurring after the startTime and/or before the endTime. @type startTime: python datetime obj (utc) @param startTime: The time "floor" to bound query. @type endTime: python datetime obj (utc) @param endTime: The time "ceiling" to bound query. @rtype: list @return: Returns a list of wf_uuid strings. """ raise NotImplementedError, \ 'time_threshold not yet implemented' pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/workflow/__init__.py0000644000175000017500000000000011757531137027354 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/workflow/sql_alchemy.py0000644000175000017500000013515211757531137030137 0ustar ryngerynge""" SQLAlchemy interface to the Stampede backend. Named sql_alchemy to avoid import errors with the library proper. """ __rcsid__ = "$Id: sql_alchemy.py 27235 2011-02-24 16:10:26Z mgoode $" __author__ = "Monte Goode MMGoode@lbl.gov" import calendar import datetime import time from netlogger.analysis.schema.stampede_schema import initializeToPegasusDB, func, orm, \ Workflow as WorkflowTable, Workflowstate as WorkflowstateTable, Job as JobTable, \ Jobstate as JobstateTable, Host as HostTable, Task as TaskTable, Edge as EdgeTable from netlogger.analysis.modules._base import SQLAlchemyInit from netlogger.analysis.workflow._base import Workflow as BaseWorkflow, \ Job as BaseJob, Host as BaseHost, Task as BaseTask, Jobstate as BaseJobstate, \ Discovery as BaseDiscovery, Workflowstate as BaseWorkflowstate from sqlalchemy import or_ debug = False def as_string(s): if s is None: return None else: return s.encode('ascii') def as_integer(i): if i is None: return None else: return int(i) def as_float(f): if f is None: return None else: return float(f) class Workflow(BaseWorkflow, SQLAlchemyInit): """ Top level workflow class that exposes information about a specific workflow and the associated jobs/etc. Usage:: w = Workflow() w.initialize('unique_wf_uuid') print w.timestamp, w.dax_label etc """ def __init__(self, connString=None): BaseWorkflow.__init__(self) if connString is None: raise ValueError("connString is required") SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB) # These attrs come straight from the workflow table self._wf_id = None self._wf_uuid = None self._dax_label = None self._timestamp = None self._submit_hostname = None self._submit_dir = None self._planner_arguments = None self._user = None self._grid_dn = None self._planner_version = None self._parent_workflow_id = None self._parent_wf_uuid = None # State information self._startstate = [] self._endstate = [] # Job information self._jobs = [] self._jobtypes_executed = {} # Sub-workflow information self._sub_wf_uuid = None # A debug flag that can be manually switched to turn off # generation of job edges. Just used in development # to quiet down the amount of output generated. # ie: # w = Workflow() # w._edges = False # w.initialize(wf_uuid) self._edges = True def initialize(self, wf_id): """ This method is the initialization method that accepts the unique wf_uuid and triggers the subclass specific queries and calculations that pulls the workflow information from the back end. The wf_id is represented in the .bp logs as "wf.id". @type wf_uuid: string @param wf_uuid: the unique wf_uuid as defined by Pegasus """ self._wf_uuid = wf_id query = self.session.query(WorkflowTable).filter(WorkflowTable.wf_uuid == self._wf_uuid) try: wf = query.one() self._wf_id = wf.wf_id self._dax_label = as_string(wf.dax_label) self._timestamp = wf.timestamp self._submit_hostname = as_string(wf.submit_hostname) self._submit_dir = as_string(wf.submit_dir) self._planner_arguments = as_string(wf.planner_arguments) self._user = as_string(wf.user) self._grid_dn = as_string(wf.grid_dn) self._planner_version = as_string(wf.planner_version) self._parent_workflow_id = wf.parent_workflow_id except orm.exc.MultipleResultsFound, e: self.log.error('initialize', msg='Multiple wf_id results for wf_uuid %s : %s' % (wf_uuid, e)) return pass def _check_states(self): if not self._startstate: state_cache = [] query = self.session.query(WorkflowstateTable.state, WorkflowstateTable.timestamp).filter(WorkflowstateTable.wf_id == self._wf_id).order_by(WorkflowstateTable.timestamp) for row in query.all(): wfs = Workflowstate() wfs.initialize(row.state, row.timestamp) # seed with the first start event if wfs.state == 'start': self._startstate.append(wfs) state_cache.append(wfs) continue if wfs.state.startswith('start'): if state_cache[-1].state.startswith('start'): self.log.warn('_check_states', msg='Workflow state missing end event - padding list.') self._endstate.append(None) self._startstate.append(wfs) state_cache.append(wfs) elif wfs.state.startswith('end'): if state_cache[-1].state.startswith('end'): self.log.warn('_check_states', msg='Workflow state missing start event - padding list.') self._startstate.append(None) self._endstate.append(wfs) state_cache.append(wfs) else: self.log.error('_check_states', msg='Bad state attribute:' % wfs.state) pass @property def wf_uuid(self): """ Return the wf_uuid for this workflow. @rtype: string @return: The wf_uuid of the current workflow """ return self._wf_uuid @property def dax_label(self): """ Return dax_label from storage backend. @rtype: string @return: The dax_label of the current workflow. """ return self._dax_label @property def timestamp(self): """ Return timestamp from storage backend. @rtype: python datetime obj (utc) @return: The workflow timestamp """ return datetime.datetime.utcfromtimestamp(self._timestamp) @property def submit_hostname(self): """ Return submit_hostname from storage backend. @rtype: string @return: The workflow submit host """ return self._submit_hostname @property def submit_dir(self): """ Return submid_dir from storage backend. @rtype: string @return: The workflow submit directory """ return self._submit_dir @property def planner_arguments(self): """ Return planner_arguments from storage backend. @rtype: string @return: The workflow planner arguments """ return self._planner_arguments @property def user(self): """ Return user from storage backend. @rtype: string @return: The workflow user """ return self._user @property def grid_dn(self): """ Return grid_dn from storage backend. @rtype: string @return: The grid DN of the workflow """ return self._grid_dn @property def planner_version(self): """ Return planner_version from storage backend. @rtype: string @return: The planner version of the workflow """ return self._planner_version @property def parent_wf_uuid(self): """ Takes the parent_workflow_id column from the current Workflow which is a reference to a Primary Key and return the string wf_uuid of the workflow it references, otherwise, return None. @rtype: string @return: The parent wf_uuid if it exists """ if self._parent_workflow_id is not None: if self._parent_wf_uuid is None: query = self.session.query(WorkflowTable.wf_uuid).filter(WorkflowTable.wf_id == self._parent_workflow_id) try: self._parent_wf_uuid = query.one().wf_uuid except orm.exc.MultipleResultsFound, e: self.log.error('parent_wf_uuid', msg='Multiple wf_uuid results for parent_workflow_id %s : %s' % (self._parent_workflow_id, e)) return return self._parent_wf_uuid @property def sub_wf_uuids(self): """ Returns a list of the wf_uuids of any sub-workflows associated with the current workflow object. Returned in the order in which they are entered in the workflow table. If no sub-workflows are found, return an empty list. @rtype: List of strings @return: The wf_uuids of any sub-workflows. """ if self._sub_wf_uuid == None: self._sub_wf_uuid = [] query = self.session.query(WorkflowTable.wf_uuid).filter(WorkflowTable.parent_workflow_id == self._wf_id).order_by(WorkflowTable.wf_id) for row in query.all(): self._sub_wf_uuid.append(row.wf_uuid) return self._sub_wf_uuid @property def start_events(self): """ Return a list of Workflowstate object instances representing the re/start events. The list should ordered by timestamp. In the event that there are no logged workflow states an empty list should be returned. In the case that there is a dropped event (ie: no matching end event to a start event or vice versa), the missing event will be padded as a None. @rtype: List of Workflowstate object instances (or None) @return: Returns a list with workflow start events. """ self._check_states() return self._startstate @property def end_events(self): """ Return a list of Workflowstate object instances representing the end events. The list should ordered by timestamp. In the event that there are no logged workflow states an empty list should be returned. In the case that there is a dropped event (ie: no matching end event to a start event or vice versa), the missing event will be padded as a None. @rtype: List of Workflowstate object instances @return: Returns a list with workflow end events. """ self._check_states() return self._endstate @property def is_running(self): """ Derived boolean flag indicating if the workflow is currently running. Derived in a backend-appropriate way. @rtype: boolean @return: Indicates if the workflow is running. """ self._check_states() if ((len(self._startstate) - len(self._endstate)) > 0): return True else: return False pass @property def is_restarted(self): """ Derived boolean flag indicating if this workflow has been retstarted. Derived in a backend-appropriate way. @rtype: boolean @return: Indicates if the workflow has been restarted. """ self._check_states() if len(self._startstate) > 1: return True else: return False @property def restart_count(self): """ Returns an integer reflecting restart count. Derived in a backend-appropriate way. @rtype: integer @return: Number of workflow restarts. """ self._check_states() return len(self._startstate) - 1 @property def total_time(self): """ Returns the total runtime of the workflow. This is defined as the delta between either the first start event and last end event, or if the job is still running, between the first start event and current epoch UTC time. @rtype: python datetime.timedelta object or None @return: The total time of the workflow. """ self._check_states() if len(self._startstate) == 0: return None if self._endstate[-1] == None or self._startstate[0] == None: return None if len(self._startstate) == len(self._endstate): return self._endstate[-1].timestamp - self._startstate[0].timestamp else: return datetime.datetime.utcfromtimestamp(time.time()) - self._startstate[0].timestamp @property def jobs(self): """ Returns a list of the jobs associated with this workflow object. This property is a prime candidate for lazy eval as there is no need to query the backend for this information if the user is only looking at superficial information about the workflow - ie: if it is running, how long it took, etc. @rtype: list of Job objects @return: List of job objects associated with current wf """ if self._jobs: query = self.session.query(func.max(JobTable.job_id)) row = query.first() if self._jobs[-1]._job_id != row[0]: self._jobs = [] if not self._jobs: query = self.session.query(JobTable.job_id).filter(JobTable.wf_id == self._wf_id).order_by(JobTable.job_submit_seq) for row in query.all(): j = Job(self.session) j._sql_initialize(row[0], find_edges=self._edges) self._jobs.append(j) if debug: break return self._jobs @property def total_jobs_executed(self): """ Return the number of jobs that were executed as an integer value. @rtype: integer @return: Number of jobs executed """ return len(self.jobs) @property def successful_jobs(self): """ Return the number of jobs that executed successfully as an integer value. @rtype: integer @return: Number of sucessfully executed jobs """ # XXX: note - this is not fully accurate at the moment. # Mostly here to improve later and act as example code. # This can only give a count at the moment but the value # is subject to change. Logic in the job objects will # need to be tightened up later when it is possible. success = 0 for j in self.jobs: if j.is_success: success += 1 return success @property def failed_jobs(self): """ Return the number of jobs that failed as an integer value. @rtype: integer @return: Number of failed jobs """ # XXX: note - this is not fully accurate at the moment. # Mostly here to improve later and act as example code. # This can only give a count at the moment but the value # is subject to change. Logic in the job objects will # need to be tightened up later when it is possible. failures = 0 for j in self.jobs: if j.is_failure: failures += 1 return failures @property def restarted_jobs(self): """ Return the number of jobs that were restarted. @rtype: integer @return: Number of restarted jobs """ restarts = 0 for j in self.jobs: if j.is_restart: restarts += 1 return restarts @property def submitted_jobs(self): """ Return the number of jobs that were submitted. @rtype: integer @return: Number of submitted jobs """ submits = 0 for j in self.jobs: if j.submit_time: submits += 1 return submits @property def jobtypes_executed(self): """ Returns a dictionary of the various jobtypes that are executed in the current workflow and a count of how many of each type. Example: {'create dir': 1, 'compute': 105} @rtype: dict - string keys, integer values @return: A dictionary of a count of the jobtypes that were executed in the current workflow. """ for j in self.jobs: if not self._jobtypes_executed.has_key(j.jobtype): self._jobtypes_executed[j.jobtype] = 1 else: self._jobtypes_executed[j.jobtype] += 1 return self._jobtypes_executed class Workflowstate(BaseWorkflowstate): """ Class to expose information about a specific workflow event. This is a simple class to expose state and timestamp information as class attributes. Usage:: ws = Workflowstate() ws.initialize(state, timestamp) print ws.state etc """ _indent = 2 def __init__(self): BaseWorkflowstate.__init__(self) self._state = None self._timestamp = None def initialize(self, state, timestamp): """ This method is the initialization method that accepts the state and timestamp of a given workflow state event. @type state: string @param state: the jobstate entry as defined by Pegasus. @type timestamp: float @param timestamp: the epoch timestamp as reported by Pegasus. """ self._state = as_string(state) self._timestamp = timestamp @property def state(self): """ Return the current workflowstate state. Might be none if there is no state information logged yet. @rtype: string or None @return: Return current job state """ return self._state @property def timestamp(self): """ Return the timestamp of the current workflow state. Might be none if there is no state information logged yet. @rtype: python datetime obj (utc) or None @return: Return timestamp of current job state """ if self._timestamp: return datetime.datetime.utcfromtimestamp(self._timestamp) else: return None class Job(BaseJob): """ Class to retrieve and expose information about a specific job. This class is intended to be instantiated inside a Workflow() object and not as a stand-alone instance. Usage:: j = Job() j._sql_initialize(job_id) (PK from the DB) print j.name etc """ def __init__(self, db_session): BaseJob.__init__(self) self.session = db_session self._job_id = None self._wf_id = None self._job_submit_seq = None self._name = None self._host_id = None self._condor_id = None self._jobtype = None self._clustered = False self._site_name = None self._remote_user = None self._remote_working_dir = None self._cluster_start_time = None self._cluster_duration = None # nested objects self._host = None self._tasks = [] # derived state values self._is_restart = None self._is_success = None self._is_failure = None # cached values self._submit_time = None self._current_js_ss = None self._jobstates = [] self._parent_edge_ids = [] self._child_edge_ids = [] self._parent_edges = [] self._child_edges = [] def _sql_initialize(self, job_id, find_edges=True): """ This private method is the initialization method that accepts the sql DB job_id primary key from the job table to use for initialization of the job object. @type job_id: integer @param job_id: The job_id primary key from the sql Jobs table. """ self._job_id = job_id query = self.session.query(JobTable).filter(JobTable.job_id == self._job_id) try: j = query.one() self._job_id = j.job_id self._wf_id = j.wf_id self._job_submit_seq = as_integer(j.job_submit_seq) self._name = as_string(j.name) self._host_id = j.host_id self._condor_id = as_string(j.condor_id) self._jobtype = as_string(j.jobtype) self._clustered = j.clustered self._site_name = as_string(j.site_name) self._remote_user = as_string(j.remote_user) self._remote_working_dir = as_string(j.remote_working_dir) self._cluster_start_time = j.cluster_start_time self._cluster_duration = as_float(j.cluster_duration) except orm.exc.MultipleResultsFound, e: # this probably will NEVER happen and if it does # then there is something screwy with the jobs table self.log.error('_sql_initialize', msg='Multiple job results for job_id: %s : %s' % (self._job_id, e)) return if find_edges: self._find_edge_id() def _find_edge_id(self): """ Private method to get the job_ids of the parent and child edge jobs. """ query = self.session.query(EdgeTable.parent_id).filter(EdgeTable.child_id == self._job_id) for row in query.all(): self._parent_edge_ids.append(row[0]) query = self.session.query(EdgeTable.child_id).filter(EdgeTable.parent_id == self._job_id) for row in query.all(): self._child_edge_ids.append(row[0]) pass def _get_current_js_ss(self): """ This private method hits the jobstate table to get the most recent jobstate_submit_seq for a given job. This is used by both the properties that return jobstate information. """ max_id = self.session.query(func.max(JobstateTable.jobstate_submit_seq).label('max_id')).filter(JobstateTable.job_id == self._job_id) row = max_id.first() if row: self._current_js_ss = row[0] @property def job_submit_seq(self): """ Return job_submit_seq of current job (an input arg). @rtype: integer @return: Return job_submit_seq of current job """ return self._job_submit_seq @property def name(self): """ Return the job name from the storage backend. @rtype: string @return: Return job name """ return self._name @property def host(self): """ Return job host information from storage backend. @rtype: Host object instance @return: Return a host object with host info for current job. """ if self._host is None: self._host = Host(self.session) if self._host_id is not None: self._host._sql_initialize(self._host_id) return self._host @property def condor_id(self): """ Return the condor_id from the storage backend. @rtype: string (looks like a float however) @return: Return job condor_id """ return self._condor_id @property def jobtype(self): """ Return jobtype from the storage backend. @rtype: string @return: Return jobtype """ return self._jobtype @property def clustered(self): """ Return the clustered boolean flag from the storage backend. This may need to be derived depending on how the backend implementation does/not store this value. @rtype: boolean @return: Return True or False depending on if the job is clustered or not. """ return self._clustered @property def site_name(self): """ Return the site name from the storage backend. @rtype: string @return: Return site_name for current job """ return self._site_name @property def remote_user(self): """ Return the remote use of the current job from the storage backend. @rtype: string @return: Return remote_user for current job. """ return self._remote_user @property def remote_working_dir(self): """ Return the remote working directory of the current job from the storage backend. @rtype: string @return: """ return self._remote_working_dir @property def cluster_start_time(self): """ Return the job cluster start time as a python datetime object (utc) if it exists or None if it does not. Not all jobs will have this value. @rtype: python datetime obj (utc) or None @return: Return job cluster start time. """ if self._cluster_start_time is not None: return datetime.datetime.utcfromtimestamp(self._cluster_start_time) else: return None @property def cluster_duration(self): """ Return the job cluster duration from the storage backend as a float or None if this value is not assocaited with the current job. Not all j will have this value. @rtype: float (from db) @return: Return cluster duration. """ return self._cluster_duration @property def tasks(self): """ Returns a list of the tasks associated with this job object. This property is a prime candidate for lazy eval as there is no need to query the backend for this information if the user is only looking at superficial information about the job - ie: its current state, name, etc. @rtype: list of Task objects @return: List of task objects associated with current job """ if not self._tasks: query = self.session.query(TaskTable.task_id).filter(TaskTable.job_id == self._job_id).order_by(TaskTable.task_id) for row in query.all(): t = Task(self.session) t._sql_initialize(row[0]) self._tasks.append(t) if debug: break return self._tasks @property def is_restart(self): """ Return a boolean flag indicating whether or not this curent job is a "restart". This value will be derived from backend information as appropriate. @rtype: boolean @return: Return True or False if the job is a restart or not. """ if self._is_restart is None: query = self.session.query(JobTable.job_id).filter(JobTable.wf_id == self._wf_id).filter(JobTable.name == self._name).filter(JobTable.job_submit_seq < self._job_submit_seq) if query.count() > 1: self._is_restart = True else: self._is_restart = False return self._is_restart @property def is_success(self): """ Return a boolean flag indicating whether or not this curent job was successful. This value will be derived from backend information as appropriate. @rtype: boolean @return: Return True or False if the job is a restart """ # XXX: note - this is not fully accurate at the moment. # Mostly here to improve later and act as example code. if self.is_failure: return False scount = self.session.query(JobstateTable.state).filter(JobstateTable.state.like('%_SUCCESS')).filter(JobstateTable.job_id == self._job_id).count() if scount > 0: return True return False @property def is_failure(self): """ Return a boolean flag indicating whether or not this curent job has failed. This value will be derived from backend information as appropriate. @rtype: boolean @return: Return True or False if the job is a restart """ # XXX: note - this is not fully accurate at the moment. # Mostly here to improve later and act as example code. fcount = self.session.query(JobstateTable.state).filter(or_(JobstateTable.state.like('%_FAILURE'),JobstateTable.state.like('%_FAILED'))).filter(JobstateTable.job_id == self._job_id).count() if fcount > 0: return True return False @property def current_state(self): """ Return the current state of this job. This property pretty much requires lazy evaluation every access rather than attribute caching. A single job moves through multiple jobstates and this property should return the current state of the running job when accessed. In the event that there is not yet a jobstate logged for this job, the Jobstate instance will have its properties "state" and "timestamp" set to None. @rtype: Jobstate object instance @return: Returns the current state and timestamp """ self._get_current_js_ss() js = Jobstate(self.session) if self._current_js_ss: js._sql_initialize(self._job_id, self._current_js_ss) return js @property def all_jobstates(self): """ Return a list of Jobstate object instances representing the states that the job has moved through. The list should ordered by the order of the different jobstate submissions. In the event that there are no logged jobstates an empty list should be returned. This property may do light weight attribute caching, but the current jobstate should still be lazily evaluated and the list updated if need be. @rtype: List of Jobstate object instances @return: Returns a list with all the jobstates this job has moved through. """ self._get_current_js_ss() if self._current_js_ss == None: # no current submit_seq so do nothing pass elif not self._jobstates or self._jobstates[-1]._jss != self._current_js_ss: # either initialize or refresh if new states have been logged. self._jobstates = [] query = self.session.query(JobstateTable.jobstate_submit_seq).filter(JobstateTable.job_id == self._job_id).order_by(JobstateTable.jobstate_submit_seq) results = query.all() for row in results: js = Jobstate(self.session) js._sql_initialize(self._job_id, row.jobstate_submit_seq) self._jobstates.append(js) if debug: break return self._jobstates @property def submit_time(self): """ Return the timestamp of when this job was submitted. @rtype: python datetime obj (utc) or None @return: Return the submit time of this job """ if self._submit_time is None: query = self.session.query(JobstateTable.timestamp).filter(JobstateTable.job_id == self._job_id).filter(JobstateTable.state == 'SUBMIT') state = query.first() if state is None: return None # a rare state of unlikely timing else: self._submit_time = state.timestamp return datetime.datetime.utcfromtimestamp(self._submit_time) @property def elapsed_time(self): """ Return the elapsed time of this job. Calculated as the delta between the submit time and the current/last jobstate timestamp. @rtype: python datetime.timedelta object or None @return: Return the elapsed time of this job """ if self.submit_time is not None: return self.current_state.timestamp - self.submit_time else: return None @property def edge_parents(self): """ Return a list of job objects for the parent job edges for this current job object. The job objects returned by this property will NOT contain additional edge information (ie: this method will return an empty list) to avoid a recursive situation. @rtype: list containing Job objects @return: Return the parent edge Job objects. """ if self._parent_edge_ids and not self._parent_edges: for p_id in self._parent_edge_ids: j = Job(self.session) j._sql_initialize(p_id, find_edges=False) self._parent_edges.append(j) if debug: break return self._parent_edges @property def edge_children(self): """ Return a list of job objects for the child job edges for this current job object. The job objects returned by this property will NOT contain additional edge information (ie: this method will return an empty list) to avoid a recursive situation. @rtype: list containing Job objects @return: Return the child edge Job objects. """ if self._child_edge_ids and not self._child_edges: for c_id in self._child_edge_ids: j = Job(self.session) j._sql_initialize(c_id, find_edges=False) self._child_edges.append(j) if debug: break return self._child_edges class Jobstate(BaseJobstate): """ A small class that returns jobstate information. Intended to be instantiated by a call to job.current_state. Is not cached so multiple calls will return the latest information. Usage:: js = Jobstate() js.initialize('unique_wf_id', 3) print js.state, js.timestamp etc. """ _indent = 3 def __init__(self, db_session): BaseJobstate.__init__(self) self.session = db_session self._job_id = None self._jss = None self._state = None self._timestamp = None def _sql_initialize(self, job_id, jss): """ This is the private initialization method that accepts the job_id primary key from the relational implementation. @type host_id: integer @param host_id: The host_id public key from the sql task table. """ self._job_id = job_id self._jss = jss query = self.session.query(JobstateTable).filter(JobstateTable.job_id == self._job_id).filter(JobstateTable.jobstate_submit_seq == self._jss) row = query.first() if row: self._state = as_string(row.state) self._timestamp = row.timestamp @property def state(self): """ Return the current jobstate state. Might be none if there is no state information logged yet. @rtype: string or None @return: Return current job state """ return self._state @property def timestamp(self): """ Return the timestampe of the current job state. Might be none if there is no state information logged yet. @rtype: python datetime obj (utc) or None @return: Return timestamp of current job state """ if self._timestamp: return datetime.datetime.utcfromtimestamp(self._timestamp) else: return None class Host(BaseHost): """ A straightforward class that contains host information about a job. This is intended to be instantiated inside a Job() object and not as a standalone instance. Usage:: h = Host() h._sql_initialize(task_id) (PK from the db) print h.site_name, h.hostname etc. """ def __init__(self, db_session): BaseHost.__init__(self) self.session = db_session self._host_id = None self._site_name = None self._hostname = None self._ip_address = None self._uname = None self._total_ram = None def _sql_initialize(self, host_id): """ This is the private initialization method that accepts the host_id primary key from the relational implementation. @type host_id: integer @param host_id: The host_id public key from the sql task table. """ self._host_id = host_id query = self.session.query(HostTable).filter(HostTable.host_id == self._host_id) try: h = query.one() self._site_name = as_string(h.site_name) self._hostname = as_string(h.hostname) self._ip_address = as_string(h.ip_address) self._uname = as_string(h.uname) self._total_ram = as_integer(h.total_ram) except orm.exc.MultipleResultsFound, e: self.log.error('_sql_initialize', msg='Multiple host results for host_id %s : %s' % (self._host_id, e)) return pass @property def site_name(self): """ Return the site name associated with this host. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return host site name or None """ return self._site_name @property def hostname(self): """ Return the host name associated with this host. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return hostname or None """ return self._hostname @property def ip_address(self): """ Return the ip address associated with this host. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return host ip address or None """ return self._ip_address @property def uname(self): """ Return the uname information of this host machine. Might be None if a host has not been associated with a particular job at time of calling. @rtype: string or None @return: Return host uname or None. """ return self._uname @property def total_ram(self): """ Return the total ram of this host machine. Might be None if a host has not been associated with a particular job at time of calling. @rtype: integer or None @return: Return host RAM or None """ return self._total_ram class Task(BaseTask): """ Class to expose information about a task associated with a particular job. This is intended to be instantiated inside of a Job() object and not as a stand alone instance. Usage:: t = Task() t._sql_initialize(task_id) (PK from the db) print t.start_time, t.duration etc """ def __init__(self, db_session): BaseTask.__init__(self) self.session = db_session self._task_id = None self._job_id = None self._task_submit_seq = None self._start_time = None self._duration = None self._exitcode = None self._transformation = None self._executable = None self._arguments = None def _sql_initialize(self, task_id): """ This is the private initialization method that accepts the unique task_id public key from the relational implementation. @type task_id: integer @param task_id: The task_id PK from the relational implemntation. """ self._task_id = task_id query = self.session.query(TaskTable).filter(TaskTable.task_id == self._task_id) try: tsk = query.one() self._job_id = tsk.job_id self._task_submit_seq = tsk.task_submit_seq self._start_time = tsk.start_time self._duration = as_float(tsk.duration) self._exitcode = as_integer(tsk.exitcode) self._transformation = as_string(tsk.transformation) self._executable = as_string(tsk.executable) self._arguments = as_string(tsk.arguments) except orm.exc.MultipleResultsFound, e: self.log.error('_sql_initialize', msg='Multiple task results for task_id %s : %s' % (self._task_id, e)) return @property def task_submit_seq(self): """ Return the task submit sequence number from the storage backend as an integer. @rtype: int @return: submit sequence number """ return self._task_submit_seq @property def start_time(self): """ Return start time of this task from the storage backend as a python datetime object (utc). @rtype: python datetime obj (utc) @return: Return task start time """ if self._start_time is not None: return datetime.datetime.utcfromtimestamp(self._start_time) else: return None @property def duration(self): """ Return duration of this task from the storage backend as a float. @rtype: float (from db) @return: Return the duration of this task """ return self._duration @property def exitcode(self): """ Return the exitcode of this task from the storage backend as an integer. @rtype: integer @return: Return the task exitcode """ return self._exitcode @property def transformation(self): """ Return the transformation type of this task from the storage backend. @rtype: string @return: Return task transformation """ return self._transformation @property def executable(self): """ Return the executable invoked by this task from the storage backend. @rtype: string @return: Return the task executable """ return self._executable @property def arguments(self): """ Return the task args from the storage backend. @rtype: string @return: Return the task arguments """ return self._arguments # Discovery class class Discovery(BaseDiscovery, SQLAlchemyInit): """ Class to facilitate pulling the wf_uuid strings from the back end to feed to the Workflow() objects. Usage:: db_conn = 'sqlite:///pegasusMontage.db' d = Discovery(db_conn) for wf_uuid in d.fetch_all(): w = Workflow(db_conn) w.initialize(wf_uuid) etc """ def __init__(self, connString): """ Initialization method. The manditory argument connectionInfo will be defined as appropriate in a subclass (string, dict, etc), and connect to the appropriate back end. """ BaseDiscovery.__init__(self) SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB) pass def fetch_all(self): """ Void method that will return a list of workflow uuids from the back end. @rtype: list @return: Returns a list of wf_uuid strings. """ query = self.session.query(WorkflowTable.wf_uuid).order_by(WorkflowTable.wf_id) ids = [] for row in query.all(): ids.append(as_string(row.wf_uuid)) return ids def time_threshold(self, startTime=None, endTime=None): """ Method to return a list of wf_uuid strings that were submitted within a certain timeframe. The startTime arg should represent the time floor and endTime the time ceiling. If both args are supplied return workflows that were exectued between the two. If only one arg is supplied, use that one as the appropriate floor or ceiling. This is based on the workflowstate start event occurring after the startTime and/or before the endTime. @type startTime: python datetime obj (utc) @param startTime: The time "floor" to bound query. @type endTime: python datetime obj (utc) @param endTime: The time "ceiling" to bound query. @rtype: list @return: Returns a list of wf_uuid strings. """ if not startTime and not endTime: return [] retvals = [] querystring = "self.session.query(Workflowstate.wf_id).filter(Workflowstate.state == 'start')" orderstring = '.order_by(Workflowstate.wf_id)' filterstring = '' if startTime: filterstring += ".filter(Workflowstate.timestamp > %s)" % calendar.timegm(startTime.timetuple()) if endTime: filterstring += ".filter(Workflowstate.timestamp < %s)" % calendar.timegm(endTime.timetuple()) fullquery = querystring + filterstring + orderstring query = eval(fullquery) wf_ids = [] for row in query.all(): wf_ids.append(row.wf_id) query = self.session.query(WorkflowTable.wf_uuid).filter(WorkflowTable.wf_id.in_(wf_ids)) for row in query.all(): retvals.append(as_string(row.wf_uuid)) return retvals ### XXX: remove this testing code if __name__ == '__main__': import os, calendar from netlogger.analysis.workflow.api_test import test_workflow_types os.chdir('/Users/monte/Desktop/Pegasus') debug = True db_conn = 'sqlite:///pegasusMontage.db' #db_conn = 'sqlite:///diamond.db' d = Discovery(db_conn) for uuid in d.fetch_all(): w = Workflow(db_conn) #w._edges = False w.initialize(uuid) print w #test_workflow_types(w) pass pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/__init__.py0000644000175000017500000000007311757531137025514 0ustar ryngerynge__all__ = ['datamining', 'modules', 'schema', 'workflow' ] pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/modules/0000755000175000017500000000000011757531667025063 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/modules/mongodb.py0000644000175000017500000001537411757531137027064 0ustar ryngerynge""" Load input in to a MongoDB database. See http://www.mongodb.org/ for details on MongoDB """ # Standard library from datetime import datetime import re import sys import time # Third-party import pymongo from pymongo.connection import Connection from pymongo.errors import ConnectionFailure # NetLogger from netlogger.analysis.modules._base import Analyzer as BaseAnalyzer from netlogger.analysis.modules._base import ConnectionException from netlogger import util from netlogger.nlapi import TS_FIELD, STATUS_FIELD class Analyzer(BaseAnalyzer): """Load into a collection in a MongoDB database. Parameters: - host {string,localhost*}: mongod server host - port {integer,27017*}: mongod server port - database {string,application*}: Name of database to create/use - collection {string,netlogger*}: Name of collection to create/use - user {string,anonymous*}: Name of user to authenticate as - password {string,None*}: Password to authenticate with - indices {field1,field2,...,""*}: Comma-separated list of fields to index. Add a caret ('^') before the field name to make the index "unique". Example: "ts,event,^_hash". - datetime {yes,no,yes*}: If 'yes', convert the timestamp in 'ts' into a datetime object. Otherwise, leave it as a floating-point number. - event_filter {regex,""*}: Regular expression for event name. If it doesn't match (from the start of the event name), the event will not be inserted. Attributes: - connection: MongoDB Connection instance. - database: MongoDB Database instance. - collection: MongoDB Collection instance. """ def __init__(self, host="localhost", port=27017, database='application', collection='netlogger', indices="", datetime='yes', intvals="", floatvals="", event_filter="", user="", password="", batch=0, perf=None, **kw): BaseAnalyzer.__init__(self, _validate=True, **kw) # map for converting values self._convert = { } # mongo database and collection self.db_name, self.coll_name = database, collection # connect try: self.connection = pymongo.Connection(host=host, port=port) except ConnectionFailure: raise ConnectionException("Couldn't connect to DB " "at %s:%d" % (host, port)) # create/use database, by retrieving it if self._dbg: self.log.debug("init.database_name", value=self.db_name) self.database = self.connection[self.db_name] # if authentication is on, use it if user != "": success = self.database.authenticate(user, password) if not success: raise ConnectionException("Could not authenticate to " "database=%s, collection=%s as user '%s'" % ( self.db_name, self.coll_name, user)) # create/use collection, by retrieving it if self._dbg: self.log.debug("init.collection_name", value=self.coll_name) self.collection = self.database[self.coll_name] # ensure indexes are set index_fields = indices.split(",") for field in index_fields: field = field.strip() if not field or field == "^": continue if self._dbg: self.log.debug("init.index", value=field) if field[0] == '^': unique = True field = field[1:] else: unique = False self.collection.ensure_index(field, unique=unique) # datetime flag self._datetime = util.as_bool(datetime) # Add numeric values to conversion map if intvals.strip(): self._convert.update(dict.fromkeys(intvals.split(','),int)) if floatvals.strip(): self._convert.update(dict.fromkeys(floatvals.split(','),float)) # filter, if given self._event_re = None if event_filter: self._event_re = re.compile(event_filter) # batch, if requested if batch: self._batch = int(batch) self._curbatch = [ ] self._batchlen = 0 else: self._batch = 0 def fix_key_formats(self, data): """Make sure key names are not illegal * cannot have a '.' anywhere will replace with '_' * cannot have $ as first symbol will remove """ fixed_data = { } for key, value in data.items(): key = key.replace('.', '_') if key[0] == '$': key = key[1:] fixed_data[key] = value return fixed_data def process(self, data): """Insert 'data' into Mongo collection. """ if self._dbg: self.log.debug("process_data.start") # Apply filter, if there is one if self._event_re is not None: try: m = self._event_re.match(data['event']) except KeyError: raise ValueError("no 'event' field") if m is None: if self._dbg: self.log.debug("process_data.end", msg="filtered out") return # fix keys data = self.fix_key_formats(data) # try to set status to int if STATUS_FIELD in data: try: data[STATUS_FIELD] = int(data[STATUS_FIELD]) except ValueError: self.log.warn("bad_status", value=data[STATUS_FIELD], msg="not integer") # optionally convert timestamp to datetime if self._datetime: ts = data[TS_FIELD] if not isinstance(ts, datetime): data[TS_FIELD] = datetime.utcfromtimestamp(ts) # convert fields for key, func in self._convert.items(): if key in data: try: data[key] = func(data[key]) except ValueError: self.log.warn("bad_value", value=data[key], msg="expected " + str(func)) del data[key] # insert data if self._batch > 0: self._curbatch.append(data) self._batchlen += 1 if self._batchlen > self._batch: self.collection.insert(self._curbatch) self._curbatch = [ ] self._batchlen = 0 else: self.collection.insert(data) if self._dbg: self.log.debug("process_data.end", status=0) def finish(self): if self._batch > 0 and self._batchlen > 0: self.collection.insert(self._curbatch) pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/modules/stampede_loader.py0000644000175000017500000011436111757531137030563 0ustar ryngerynge""" Load input into the Stampede DB schema via a SQLAlchemy interface. This is an nl_load module which MUST be invoked with the command-line pair connString='SQLAlchemy connection string'. Example: nl_parse bp pegasus.db | nl_load stampede_loader connString='sqlite:///pegasusTest.db' The connection string must be of a format that is accepted as the first arg of the SQLAlchemy create_engine() function. The database indicated by the conection string will be create and populated with tables and indexes if it does not exist. If it does exist, it will merely be connected to and the SQLAlchemy object mappings will be initialized. This module does not produce any output other than loading the BP data into the Stampede DB. See http://www.sqlalchemy.org/ for details on SQLAlchemy """ __rcsid__ = "$Id: stampede_loader.py 31116 2012-03-29 15:45:15Z mgoode $" __author__ = "Monte Goode" from netlogger.analysis.schema.schema_check import ErrorStrings, SchemaCheck, SchemaVersionError from netlogger.analysis.schema.stampede_schema import * from netlogger.analysis.modules._base import Analyzer as BaseAnalyzer from netlogger.analysis.modules._base import SQLAlchemyInit, dsn_dialect from netlogger import util import sys import time class Analyzer(BaseAnalyzer, SQLAlchemyInit): """Load into the Stampede SQL schema through SQLAlchemy. Parameters: - connString {string,None*}: SQLAlchemy connection string. The general form of this is 'dialect+driver://username:password@host:port/database'. See the SQLAlchemy docs for details. For sqlite, use 'sqlite:///foo.db' for a relative path and 'sqlite:////path/to/foo.db' (four slashes) for an absolute one. When using MySQL, the general form will work, but the library expects the database to exist (ie: will not issue CREATE DB) but will populate an empty DB with tables/indexes/etc. - mysql_engine {string,None*}: For MySQL, the storage engine. Accepted values include 'InnoDB' and 'MyISAM'. See SQLAlchemy/MySQL documentation for more details. Ignored if connString does not start with 'mysql'. """ def __init__(self, connString=None, perf='no', batch='no', mysql_engine=None, **kw): """Init object @type connString: string @param connString: SQLAlchemy connection string - REQUIRED """ BaseAnalyzer.__init__(self, **kw) _kw = { } if connString is None: raise ValueError("connString is required") dialect = dsn_dialect(connString) _kw[dialect] = { } if dialect == 'mysql': # mySQL-specific options if mysql_engine is not None: _kw[dialect]['mysql_engine'] = mysql_engine # This mixin adds a class member "self.session" after initialization. # This is the session handler that the code logic uses for queries # and other DB interaction. The arg "initializeToPegasusDB" is # a function from the stampede_schema module. try: SQLAlchemyInit.__init__(self, connString, initializeToPegasusDB, **_kw) except exceptions.OperationalError, e: self.log.error('init', msg='%s' % ErrorStrings.get_init_error(e)) raise RuntimeError # Check the schema version before proceeding. s_check = SchemaCheck(self.session) if not s_check.check_schema(): raise SchemaVersionError self.log.info('init.start') # "Case" dict to map events to handler methods self.eventMap = { 'stampede.wf.plan' : self.workflow, 'stampede.wf.map.task_job' : self.task_map, 'stampede.static.start' : self.noop, # good 'stampede.static.end' : self.static_end, 'stampede.xwf.start' : self.workflowstate, 'stampede.xwf.end' : self.workflowstate, 'stampede.xwf.map.subwf_job' : self.subwf_map, 'stampede.task.info' : self.task, 'stampede.task.edge' : self.task_edge, 'stampede.job.info' : self.job, 'stampede.job.edge' : self.job_edge, 'stampede.job_inst.pre.start' : self.job_instance, 'stampede.job_inst.pre.term' : self.jobstate, 'stampede.job_inst.pre.end' : self.jobstate, 'stampede.job_inst.submit.start' : self.job_instance, 'stampede.job_inst.submit.end' : self.jobstate, 'stampede.job_inst.held.start' : self.jobstate, 'stampede.job_inst.held.end' : self.jobstate, 'stampede.job_inst.main.start' : self.jobstate, 'stampede.job_inst.main.term' : self.jobstate, 'stampede.job_inst.main.end' : self.job_instance, 'stampede.job_inst.post.start' : self.jobstate, 'stampede.job_inst.post.term' : self.jobstate, 'stampede.job_inst.post.end' : self.job_instance, 'stampede.job_inst.host.info' : self.host, 'stampede.job_inst.image.info' : self.jobstate, 'stampede.job_inst.grid.submit.start' : self.noop, # good 'stampede.job_inst.grid.submit.end' : self.jobstate, 'stampede.job_inst.globus.submit.start' : self.noop, # good 'stampede.job_inst.globus.submit.end' : self.jobstate, 'stampede.inv.start' : self.noop, # good 'stampede.inv.end' : self.invocation, } # Dicts for caching FK lookups self.wf_id_cache = {} self.root_wf_id_cache = {} self.job_id_cache = {} self.job_instance_id_cache = {} self.host_cache = {} self.hosts_written_cache = None # undocumented performance option self._perf = util.as_bool(perf) if self._perf: self._insert_time, self._insert_num = 0, 0 self._start_time = time.time() # flags and state for batching self._batch = util.as_bool(batch) self._flush_every = 10000 self._flush_count = 0 self._last_flush = time.time() # caches for batched events self._batch_cache = { 'batch_events' : [], 'update_events' : [], 'host_map_events' : [] } self._task_map_flush = {} self._task_edge_flush = {} self.log.info('init.end', msg='Batching: %s' % self._batch) pass def process(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Get the BP dict from the controlling process and dispatch to the appropriate method per-event. """ self.log.debug('process', msg=linedata) if not self._batch: self.check_connection() try: if self._perf: t = time.time() self.eventMap[linedata['event']](linedata) self._insert_time += (time.time() - t) self._insert_num += 1 else: self.eventMap[linedata['event']](linedata) except KeyError: if linedata['event'].startswith('stampede.job_inst.'): self.log.warn('process', msg='Corner case jobstate event: "%s"' % linedata['event']) self.jobstate(linedata) else: self.log.error('process', msg='no handler for event type "%s" defined' % linedata['event']) except exceptions.IntegrityError, e: # This is raised when an attempted insert violates the # schema (unique indexes, etc). self.log.error('process', msg='Insert failed for event "%s" : %s' % (linedata['event'], e)) self.session.rollback() except exceptions.OperationalError, e: self.log.error('process', msg='Connection seemingly lost - attempting to refresh') self.session.rollback() self.check_connection() self.process(linedata) self.check_flush() pass def linedataToObject(self, linedata, o): """ @type linedata: dict @param linedata: One line of BP data dict-ified. @type o: instance of mapper class from stampede_schema module. @param o: Passed in by the appropriate event handler method. Takes the dict of BP linedata, assigns contents to the class o as attributes, and does any global type massaging like transforming dict strings to numeric types. """ for k,v in linedata.items(): if k == 'level': continue # undot attr = k.replace('.', '_') attr_remap = { # workflow 'xwf_id': 'wf_uuid', 'parent_xwf_id': 'parent_wf_id', # task.info 'task_id': 'abs_task_id', # task.edge 'child_task_id': 'child_abs_task_id', 'parent_task_id': 'parent_abs_task_id', # job.info 'job_id': 'exec_job_id', # job.edge 'child_job_id': 'child_exec_job_id', 'parent_job_id': 'parent_exec_job_id', # xwf.start/end (none) # job_inst.submit.start/job_inst.submit.start/etc 'job_inst_id': 'job_submit_seq', 'js_id': 'jobstate_submit_seq', 'cluster_dur': 'cluster_duration', 'local_dur': 'local_duration', # inv.end 'inv_id': 'task_submit_seq', 'dur': 'remote_duration', } # remap attr names if attr_remap.has_key(attr): attr = attr_remap[attr] # sanitize argv input if attr == 'argv': if v != None: v = v.replace("\\", "\\\\" ) v = v.replace("'", "\\'") pass try: exec("o.%s = '%s'" % (attr,v)) except: self.log.error('linedataToObject', msg='unable to process attribute %s with values: %s' % (k,v)) # global type re-assignments if hasattr(o, 'ts'): # make all timestamp values floats o.ts = float(o.ts) if hasattr(o, 'start_time') and o.start_time != None: o.start_time = float(o.start_time) if hasattr(o, 'cluster_start_time') and o.cluster_start_time != None: o.cluster_start_time = float(o.cluster_start_time) if hasattr(o, 'duration') and o.duration != None: o.duration = float(o.duration) if hasattr(o, 'restart_count') and o.restart_count != None: o.restart_count = int(o.restart_count) return o ############################################# # Methods to handle batching/flushing ############################################# def reset_flush_state(self): """ Reset the internal flust state if batching. """ if self._batch: self.log.debug('reset_flush_state', msg='Resetting flush state') self._flush_count = 0 self._last_flush = time.time() def check_flush(self): """ Check to see if the batch needs to be flushed based on either the number of queued inserts or based on time since last flush. """ if not self._batch: return if self._flush_count >= self._flush_every: self.hard_flush() self.log.debug('check_flush', msg='Flush: flush count') return else: self._flush_count += 1 if (time.time() - self._last_flush) > 30: self.hard_flush() self.log.debug('check_flush', msg='Flush: time based') def check_connection(self, sub=False): self.log.debug('check_connection.start') try: self.session.connection().closed except exceptions.OperationalError, e: try: if not self.session.is_active: self.session.rollback() self.log.error('check_connection', msg='Lost connection - attempting reconnect') time.sleep(5) self.session.connection().connect() except exceptions.OperationalError, e: self.check_connection(sub=True) if not sub: self.log.warn('check_connection', msg='Connection re-established') self.log.debug('check_connection.end') def hard_flush(self, batch_flush=True): """ @type batch_flush: boolean @param batch_flush: Defaults to true. Is set to false when the batch commit hits and integrity error. Process queued inserts and flush/commit to the database. If the commit fails due to an integrity error, then method re-calls itself with setting batch_flush to False which causes each insert/object to be committed individually so all the "good" inserts can succeed. This will increase the processing time of the batch with the bad data in it. """ if not self._batch: return self.log.debug('hard_flush.begin', batching=batch_flush) self.check_connection() if self._perf: s = time.time() end_event = [] for event in self._batch_cache['batch_events']: if event.event == 'stampede.xwf.end': end_event.append(event) if batch_flush: self.session.add(event) else: self.individual_commit(event) for event in self._batch_cache['update_events']: if batch_flush: self.session.merge(event) else: self.individual_commit(event, merge=True) try: self.session.commit() except exceptions.IntegrityError, e: self.log.error('batch_flush', msg='Integrity error on batch flush: %s - batch will need to be committed per-event which will take longer' % e) self.session.rollback() self.hard_flush(batch_flush=False) except exceptions.OperationalError, e: self.log.error('batch_flush', msg='Connection problem during commit: %s - reattempting batch' % e) self.session.rollback() self.hard_flush() for host in self._batch_cache['host_map_events']: self.map_host_to_job_instance(host) for ee in end_event: self.flushCaches(ee) end_event = [] # Clear all data structures here. for k in self._batch_cache.keys(): self._batch_cache[k] = [] self.session.commit() self.reset_flush_state() self.log.debug('hard_flush.end', batching=batch_flush) if self._perf: self.log.info('hard_flush.duration', msg='%s' % (time.time() - s)) def individual_commit(self, event, merge=False): """ @type merge: boolean @param merge: Set to true if the row should be a merge rather than a plain insert. This gets called by hard_flush if there is a problem with a batch commit to commit each object individually. """ try: if merge: event.merge_to_db(self.session) else: event.commit_to_db(self.session) self.session.expunge(event) except exceptions.IntegrityError, e: self.log.error('individual_commit', msg='Insert failed for event %s : %s' % (event,e)) self.session.rollback() ############################################# # Methods to handle the various insert events ############################################# def workflow(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a workflow insert event. """ wf = self.linedataToObject(linedata, Workflow()) self.log.debug('workflow', msg=wf) wf.timestamp = wf.ts wf.planner_arguments = wf.argv is_root = True if wf.root_xwf_id != wf.wf_uuid: is_root = False wf.root_wf_id = self.wf_uuid_to_id(wf.root_xwf_id) if wf.parent_wf_id is not None: wf.parent_wf_id = self.wf_uuid_to_id(wf.parent_wf_id) # workflow inserts must be explicitly written to db whether # batching or not wf.commit_to_db(self.session) if is_root: wf.root_wf_id = self.wf_uuid_to_id(wf.root_xwf_id) wf.commit_to_db(self.session) if wf.root_wf_id == None: self.log.warn('workflow', msg='Count not determine root_wf_id for event %s' % wf) pass def workflowstate(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a workflowstate insert event. """ wfs = self.linedataToObject(linedata, Workflowstate()) self.log.debug('workflowstate', msg=wfs) state = { 'stampede.xwf.start': 'WORKFLOW_STARTED', 'stampede.xwf.end': 'WORKFLOW_TERMINATED' } wfs.wf_id = self.wf_uuid_to_id(wfs.wf_uuid) wfs.timestamp = wfs.ts wfs.state = state[wfs.event] if self._batch: self._batch_cache['batch_events'].append(wfs) else: wfs.commit_to_db(self.session) if wfs.event == 'stampede.xwf.end': self.flushCaches(wfs) pass def job(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a static job insert event. """ job = self.linedataToObject(linedata, Job()) job.wf_id = self.wf_uuid_to_id(job.wf_uuid) job.clustered = util.as_bool(job.clustered) self.log.debug('job', msg=job) if self._batch: self._batch_cache['batch_events'].append(job) else: job.commit_to_db(self.session) pass def job_edge(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a static job edge insert event. """ je = self.linedataToObject(linedata, JobEdge()) je.wf_id = self.wf_uuid_to_id(je.wf_uuid) self.log.debug('job_edge', msg=je) if self._batch: self._batch_cache['batch_events'].append(je) else: je.commit_to_db(self.session) pass def job_instance(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a job instance insert event. """ job_instance = self.linedataToObject(linedata, JobInstance()) self.log.debug('job_instance', msg=job_instance) job_instance.wf_id = self.wf_uuid_to_id(job_instance.wf_uuid) if job_instance.wf_id == None: er = 'No wf_id associated with wf_uuid %s - can not insert job instance %s' \ % (job_instance.wf_uuid, job_instance) self.log.error('job_instance', msg=er) return job_instance.job_id = self.get_job_id(job_instance.wf_id, job_instance.exec_job_id) if not job_instance.job_id: self.log.error('job_instance', msg='Could not determine job_id for job_instance: %s' % job_instance) return if job_instance.event == 'stampede.job_inst.submit.start' or \ job_instance.event == 'stampede.job_inst.pre.start': iid = self.get_job_instance_id(job_instance, quiet=True) if not iid: # explicit insert job_instance.commit_to_db(self.session) # seed the cache noop = self.get_job_instance_id(job_instance) if job_instance.event == 'stampede.job_inst.pre.start': self.jobstate(linedata) return if job_instance.event == 'stampede.job_inst.main.end' or \ job_instance.event == 'stampede.job_inst.post.end': job_instance.job_instance_id = self.get_job_instance_id(job_instance) if self._batch: self._batch_cache['update_events'].append(job_instance) else: job_instance.merge_to_db(self.session) self.jobstate(linedata) pass pass def jobstate(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a jobstate insert event. """ js = self.linedataToObject(linedata, Jobstate()) self.log.debug('jobstate', msg=js) states = { # array maps to status [-1, 0] 'stampede.job_inst.pre.start' : ['PRE_SCRIPT_STARTED', 'PRE_SCRIPT_STARTED'], # statusless 'stampede.job_inst.pre.term' : ['PRE_SCRIPT_TERMINATED', 'PRE_SCRIPT_TERMINATED'], # s-less 'stampede.job_inst.pre.end' : ['PRE_SCRIPT_FAILED', 'PRE_SCRIPT_SUCCESS'], 'stampede.job_inst.submit.end' : ['SUBMIT_FAILED', 'SUBMIT'], 'stampede.job_inst.main.start' : ['EXECUTE', 'EXECUTE'], # s-less 'stampede.job_inst.main.term' : ['JOB_EVICTED', 'JOB_TERMINATED'], 'stampede.job_inst.main.end' : ['JOB_FAILURE', 'JOB_SUCCESS'], 'stampede.job_inst.post.start' : ['POST_SCRIPT_STARTED', 'POST_SCRIPT_STARTED'], # s-less 'stampede.job_inst.post.term' : ['POST_SCRIPT_TERMINATED', 'POST_SCRIPT_TERMINATED'], # s-less 'stampede.job_inst.post.end' : ['POST_SCRIPT_FAILED', 'POST_SCRIPT_SUCCESS'], 'stampede.job_inst.held.start' : ['JOB_HELD', 'JOB_HELD'], # s-less 'stampede.job_inst.held.end' : ['JOB_RELEASED', 'JOB_RELEASED'], # s-less 'stampede.job_inst.image.info' : ['IMAGE_SIZE', 'IMAGE_SIZE'], # s-less 'stampede.job_inst.grid.submit.end' : ['GRID_SUBMIT_FAILED', 'GRID_SUBMIT'], 'stampede.job_inst.globus.submit.end' : ['GLOBUS_SUBMIT_FAILED', 'GLOBUS_SUBMIT'], } if not states.has_key(js.event): # corner case event js.state = js.event.split('.')[2].upper() else: # doctor status-less events to simplify code if not hasattr(js, 'status'): js.status = 0 js.state = states[js.event][int(js.status)+1] js.job_instance_id = self.get_job_instance_id(js) if not js.job_instance_id: self.log.error('jobstate', msg='No job_instance_id for event: %s -%s' % (linedata, js)) return js.timestamp = js.ts if self._batch: self._batch_cache['batch_events'].append(js) else: js.commit_to_db(self.session) pass def invocation(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a invocation insert event. """ invocation = self.linedataToObject(linedata, Invocation()) self.log.debug('invocation', msg=invocation) invocation.wf_id = self.wf_uuid_to_id(invocation.wf_uuid) invocation.job_instance_id = self.get_job_instance_id(invocation) if invocation.job_instance_id == None: self.log.error('invocation', msg='Could not determine job_instance_id for invocation: %s' % invocation) return if self._batch: self._batch_cache['batch_events'].append(invocation) else: invocation.commit_to_db(self.session) pass def task(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a task insert event """ task = self.linedataToObject(linedata, Task()) self.log.debug('task', msg=task) task.wf_id = self.wf_uuid_to_id(task.wf_uuid) if self._batch: self._batch_cache['batch_events'].append(task) else: task.commit_to_db(self.session) pass def task_edge(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a task edge insert event """ if not self._task_edge_flush.has_key(linedata['xwf.id']): if self._batch: self.hard_flush() self._task_edge_flush[linedata['xwf.id']] = True te = self.linedataToObject(linedata, TaskEdge()) self.log.debug('task_event', msg=te) te.wf_id = self.wf_uuid_to_id(te.wf_uuid) if self._batch: self._batch_cache['batch_events'].append(te) else: te.commit_to_db(self.session) pass def task_map(self, linedata): """ @type linedata: dict @param linedata: One line of DB data dict-ified Handles a task.map event. Updates a Task table row to include the proper job_id event. """ # Flush previous events to ensure that all the batched # Job table entries are written. if not self._task_map_flush.has_key(linedata['xwf.id']): if self._batch: self.hard_flush() self._task_map_flush[linedata['xwf.id']] = True wf_id = self.wf_uuid_to_id(linedata['xwf.id']) job_id = self.get_job_id(wf_id, linedata['job.id']) if not job_id: self.log.error('task_map', msg='Could not determine job_id for task map: %s' % linedata) return try: task = self.session.query(Task).filter(Task.wf_id == wf_id).filter(Task.abs_task_id == linedata['task.id']).one() task.job_id = job_id except orm.exc.MultipleResultsFound, e: self.log.error('task_map', msg='Multiple task results: cant map task: %s ' % linedata) return except orm.exc.NoResultFound, e: self.log.error('task_map', msg='No task found: cant map task: %s ' % linedata) return if self._batch: # next flush will catch this - no cache pass else: self.session.commit() pass def subwf_map(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a subworkflow job map event. """ self.log.debug('subwf_map', msg=linedata) wf_id = self.wf_uuid_to_id(linedata['xwf.id']) subwf_id = self.wf_uuid_to_id(linedata['subwf.id']) job_id = self.get_job_id(wf_id, linedata['job.id']) try: job_inst = self.session.query(JobInstance).filter(JobInstance.job_id == job_id).filter(JobInstance.job_submit_seq == linedata['job_inst.id']).one() job_inst.subwf_id = subwf_id except orm.exc.MultipleResultsFound, e: self.log.error('subwf_map', msg='Multiple job instance results: cant map subwf: %s ' % linedata) return except orm.exc.NoResultFound, e: self.log.error('subwf_map', msg='No job instance found: cant map subwf: %s ' % linedata) return if self._batch: # next flush will catch this - no cache pass else: self.session.commit() pass def host(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. Handles a host insert event. """ host = self.linedataToObject(linedata, Host()) self.log.debug('host', msg=host) if self.hosts_written_cache == None: self.hosts_written_cache = {} query = self.session.query(Host) for row in query.all(): self.hosts_written_cache[(row.wf_id,row.site,row.hostname,row.ip)] = True host.wf_id = self.wf_uuid_to_root_id(host.wf_uuid) # handle inserts into the host table if not self.hosts_written_cache.has_key((host.wf_id,host.site,host.hostname,host.ip)): if self._batch: self._batch_cache['batch_events'].append(host) else: host.commit_to_db(self.session) self.hosts_written_cache[(host.wf_id,host.site,host.hostname,host.ip)] = True # handle mappings if self._batch: self._batch_cache['host_map_events'].append(host) else: self.map_host_to_job_instance(host) pass def static_end(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. This forces a flush after all of the static events have been processed. """ self.log.debug('static_end', msg=linedata) if self._batch: self.hard_flush() pass def noop(self, linedata): """ @type linedata: dict @param linedata: One line of BP data dict-ified. A NOOP method for events that are being ignored. """ self.log.debug('noop', msg=linedata) pass #################################### # DB helper/lookup/caching functions #################################### def wf_uuid_to_id(self, wf_uuid): """ @type wf_uuid: string @param wf_uuid: wf_uuid string from BP logs Attempts to retrieve a workflow wf_id PK/FK from cache. If not in cache, retrieve from st_workflow table in DB and cache. Cuts down on DB queries during insert processing. """ if not self.wf_id_cache.has_key(wf_uuid): query = self.session.query(Workflow).filter(Workflow.wf_uuid == wf_uuid) try: self.wf_id_cache[wf_uuid] = query.one().wf_id except orm.exc.MultipleResultsFound, e: self.log.error('wf_uuid_to_id', msg='Multiple wf_id results for wf_uuid %s : %s' % (wf_uuid, e)) return None except orm.exc.NoResultFound, e: self.log.error('wf_uuid_to_id', msg='No wf_id results for wf_uuid %s : %s' % (wf_uuid, e)) return None pass return self.wf_id_cache[wf_uuid] def wf_uuid_to_root_id(self, wf_uuid): """ @type wf_uuid: string @param wf_uuid: wf_uuid string from BP logs Attempts to retrieve a root workflow wf_id PK/FK from cache. If not in cache, retrieve from st_workflow table in DB and cache. Cuts down on DB queries during insert processing. """ if not self.root_wf_id_cache.has_key(wf_uuid): query = self.session.query(Workflow).filter(Workflow.wf_uuid == wf_uuid) try: self.root_wf_id_cache[wf_uuid] = query.one().root_wf_id except orm.exc.MultipleResultsFound, e: self.log.error('wf_uuid_to_root_id', msg='Multiple wf_id results for wf_uuid %s : %s' % (wf_uuid, e)) return None except orm.exc.NoResultFound, e: self.log.error('wf_uuid_to_root_id', msg='No wf_id results for wf_uuid %s : %s' % (wf_uuid, e)) return None pass return self.root_wf_id_cache[wf_uuid] def get_job_id(self, wf_id, exec_id): """ @type wf_id: int @param wf_id: A workflow id from the workflow table. @type exec_id: string @param exec_id: The exec_job_id for a given job. Gets and caches job_id for job_instance inserts and static table updating. """ if not self.job_id_cache.has_key((wf_id, exec_id)): query = self.session.query(Job.job_id).filter(Job.wf_id == wf_id).filter(Job.exec_job_id == exec_id) try: self.job_id_cache[((wf_id, exec_id))] = query.one().job_id except orm.exc.MultipleResultsFound, e: self.log.error('get_job_id', msg='Multiple results found for wf_uuid/exec_job_id: %s/%s' % (wf_id, exec_id)) return None except orm.exc.NoResultFound, e: self.log.error('get_job_id', msg='No results found for wf_uuid/exec_job_id: %s/%s' % (wf_id, exec_id)) return None return self.job_id_cache[((wf_id, exec_id))] def get_job_instance_id(self, o, quiet=False): """ @type o: class instance @param o: Mapper object containing wf_uuid and exec_job_id. Attempts to retrieve a job job_instance_id PK/FK from cache. If not in cache, retrieve from st_job table. """ wf_id = self.wf_uuid_to_id(o.wf_uuid) cached_job_id = self.get_job_id(wf_id, o.exec_job_id) uniqueIdIdx = (cached_job_id, o.job_submit_seq) if not self.job_instance_id_cache.has_key(uniqueIdIdx): query = self.session.query(JobInstance).filter(JobInstance.job_id == cached_job_id).filter(JobInstance.job_submit_seq == o.job_submit_seq) try: self.job_instance_id_cache[uniqueIdIdx] = query.one().job_instance_id except orm.exc.MultipleResultsFound, e: if not quiet: self.log.error('get_job_instance_id', msg='Multple job_instance_id results for tuple %s : %s' % (uniqueIdIdx, e)) return None except orm.exc.NoResultFound, e: if not quiet: self.log.error('get_job_instance_id', msg='No job_instance_id results for tuple %s : %s' % (uniqueIdIdx, e)) return None return self.job_instance_id_cache[uniqueIdIdx] def map_host_to_job_instance(self, host): """ @type host: class instance of stampede_schema.Host @param host: Host object with info from a host event in the log A single job may have multiple (redundant) host events. This checks the cache to see if a job had already had its host_id, and if not, do the proper update and note it in the cache. """ self.log.debug('map_host_to_job_instance', msg=host) wf_id = self.wf_uuid_to_id(host.wf_uuid) cached_job_id = self.get_job_id(wf_id, host.exec_job_id) if not self.host_cache.has_key((cached_job_id, host.job_submit_seq)): if not host.host_id: try: host.host_id = self.session.query(Host.host_id).filter(Host.wf_id == host.wf_id).filter(Host.site == host.site).filter(Host.hostname == host.hostname).filter(Host.ip == host.ip).one().host_id except orm.exc.MultipleResultsFound, e: self.log.error('map_host_to_job_instance', msg='Multiple host_id results for host: %s' % host) job_instance = self.session.query(JobInstance).filter(JobInstance.job_id == cached_job_id).filter(JobInstance.job_submit_seq == host.job_submit_seq).one() job_instance.host_id = host.host_id job_instance.merge_to_db(self.session, batch=self._batch) self.host_cache[(cached_job_id, host.job_submit_seq)] = True pass def flushCaches(self, wfs): """ @type wfs: class instance of stampede_schema.Workflowstate @param wfs: Workflow state object from an end event. Flushes information from the lookup caches after a workflow.end event has been recieved. """ self.log.debug('flushCaches', msg='Flushing caches for: %s' % wfs) for k,v in self.wf_id_cache.items(): if k == wfs.wf_uuid: del self.wf_id_cache[k] for k,v in self.root_wf_id_cache.items(): if k == wfs.wf_uuid: del self.root_wf_id_cache[k] for k,v in self.job_instance_id_cache.items(): if k[0] == wfs.wf_id: del self.job_instance_id_cache[k] for k,v in self.host_cache.items(): if k[0] == wfs.wf_uuid: del self.host_cache[k] for k,v in self.job_id_cache.items(): if k[0] == wfs.wf_id: del self.job_id_cache[k] if self._task_map_flush.has_key(wfs.wf_uuid): del self._task_map_flush[wfs.wf_uuid] pass ################ # Cleanup, etc ################ def finish(self): BaseAnalyzer.finish(self) if self._batch: self.log.info('finish', msg='Executing final flush') self.hard_flush() self.disconnect() if self._perf: run_time = time.time() - self._start_time self.log.info("performance", insert_time=self._insert_time, insert_num=self._insert_num, total_time=run_time, run_time_delta=run_time - self._insert_time, mean_time=self._insert_time / self._insert_num) def main(): if os.path.exists('pegasusTest.db'): os.unlink('pegasusTest.db') loader = Analyzer('sqlite:///pegasusTest.db') f = open('pegasus.db.log', 'rU') for line in f.readlines(): rowdict = {} for l in line.strip().split(' '): k,v = l.split('=') rowdict[k] = v loader.process(rowdict) pass if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/modules/_base.py0000644000175000017500000002645211757531137026507 0ustar ryngerynge""" Base for analysis modules. """ from logging import DEBUG import Queue import re import sys import threading import time # from netlogger import util from netlogger.nllog import DoesLogging, TRACE from netlogger.nlapi import TS_FIELD, EVENT_FIELD, HASH_FIELD from netlogger.util import hash_event from netlogger.analysis import schemacfg """ Standard Exceptions for all Analyzers """ class AnalyzerException(Exception): """Common base class. """ pass class ConnectionException(AnalyzerException): def __init__(self, value): self.value = value def __str__(self): return repr(self.value) class PreprocessException(AnalyzerException): pass class ProcessException(AnalyzerException): pass """ Database imports """ # SQLite sqlite = None try: # Python 2.5 import sqlite3 as sqlite except ImportError: try: # Python 2.4 from pysqlite2 import dbapi2 as sqlite except ImportError: pass try: from sqlalchemy import create_engine, MetaData, orm except ImportError: pass def dsn_dialect(s): """Data source name (dsn) dialect.""" dialect = "" m = re.match(r'(.*?)[:+]', s) if m and (len(m.groups()) == 1): dialect = m.group(1) return dialect.lower() """ Database classes """ class DBConnectError(Exception): pass class Connection: NAME = None def __init__(self, dsn=None, database=None, kw=None): """Connect to the database. :Parameter: dsn - DBMS filename (sqlite) or host (others) database - Database inside DBMS, ignored for sqlite kw - Additional keywords On error, raise DBConnectError """ self.connection = None class SQLiteConnection(Connection): NAME = 'sqlite' def __init__(self, dsn=None, database=None, kw=None): self.connection = sqlite.connect(dsn, isolation_level="DEFERRED") """ User-level name for each connection class, from the NAME constant in each class. """ CONNECTION_CLASSMAP = { } for clazz in (SQLiteConnection,): CONNECTION_CLASSMAP[clazz.NAME] = clazz """ Mixin class to provide SQLAlchemy database initialization/mapping. Takes a SQLAlchemy connection string and a module function as required arguments. The initialization function takes the db and metadata objects (and optional args) as args, initializes to the appropriate schema and sets "self.session" as a class member for loader classes to interact with the DB with. See: netlogger.analysis.schema.stampede_schema.initializeToPegasusDB For an example of what the intialization function needs to do to setup the schema mappings and the metadata object. This should be __init__'ed in the subclass AFTER the Analyzer superclass gets called. The module netlogger.analysis.modules.stampede_loader shows the use of this to initialize to a DB. """ class SQLAlchemyInit: def __init__(self, connString, initFunction, **kwarg): if not hasattr(self, '_dbg'): # The Analyzer superclass SHOULD have been _init__'ed # already but if not, bulletproof this attr. self._dbg = False self.db = create_engine(connString, echo=self._dbg, pool_recycle=True) self.metadata = MetaData() dialect_kw = kwarg.get(dsn_dialect(connString), {}) initFunction(self.db, self.metadata, kw=dialect_kw) self.metadata.bind = self.db sm = orm.sessionmaker(bind=self.db, autoflush=False, autocommit=False, expire_on_commit=False) self.session = orm.scoped_session(sm) def disconnect(self): self.session.connection().close() self.session.close_all() self.session.bind.dispose() self.db.dispose() """ Base classes """ class Analyzer(DoesLogging): """Base analysis class. Doesn't do much. Parameters: - add_hash {yes,no,no*}: To each input event, add a new field, 'nlhash', which is a probabilistically unique (MD5) hash of all the other fields in the event. - schemata {,None*}: If given, read a simple form of schema from file(s). The schema uses INI format with a [section] for each event name and = describing the type of each field for that event. """ FLUSH_SEC = 5 # time to wait before calling flush() def __init__(self, add_hash="no", _validate=False, schemata=None): """Will be overridden by subclasses to take parameters specific to their function. """ DoesLogging.__init__(self) self._do_preprocess = False # may get set to True, below self.last_flush = time.time() self._validate = _validate # Parameter: add_hash try: self._add_hash = util.as_bool(add_hash) self._do_preprocess = True except ValueError, err: self.log.error("parameter.error", name="add_hash", value=add_hash, msg=err) self._add_hash = False # Parameter: schemata self._schema = None if schemata: schema_files = [s.strip() for s in schemata.split(',')] try: p = schemacfg.SchemaParser(files=schema_files) self._schema = p.get_schema() self._do_preprocess = True except (IOError, ValueError),err: self.log.error("parameter.error", name="schemata", value=schema_files, msg=err) def process(self, data): """Override with logic; 'data' is a dictionary with timestamp, event, and other values. """ pass def _preprocess(self, data): """Called before data is handed to subclass in order to allow standardized massaging / filtering of input data. Returns: - Result of module's process() function, or None if the data was rejected by validation. Exceptions: - ValueError """ if self._schema: self._schema.event(data) if self._add_hash: data[HASH_FIELD] = hash_event(data) return data def flush(self): """Override to flush any intermediate results, e.g. call flush() on open file descriptors. """ pass def finish(self): """Override with logic if subclass needs a cleanup method, ie: threading or whatnot """ pass def notify(self, data): """Called by framework code to notify loader of new data, which will result in a call to process(data). Do not override this unless you know what you are doing. Args: - data: NetLogger event dictionary Returns: - Whatever the loading code returns, in most cases the return value should be ignored. Exceptions: - PreprocessException (AnalyzerException): Something went wrong during preprocessing - ProcessException (AnalyzerException): Something went wrong during processing """ if self._validate: valid = (TS_FIELD in data and EVENT_FIELD in data) if not valid: raise PreprocessException("Invalid event data in '%s'" % str(data)) if self._do_preprocess: try: data = self._preprocess(data) except Exception, err: raise PreprocessException(str(err)) try: result = self.process(data) except Exception, err: raise ProcessException(str(err)) t = time.time() if t - self.last_flush >= self.FLUSH_SEC: if self._dbg: self.log.debug("flush") self.flush() self.last_flush = t return result class BufferedAnalyzer(Analyzer, threading.Thread): """Threaded class to process input in a buffered fashion. Intended for cases (db inserts, etc) where processing may lag behind input from the infomation broker, et al. """ def __init__(self, *args, **kwargs): Analyzer.__init__(self, *args, **kwargs) threading.Thread.__init__(self) self.daemon = True self.running = False self.finishing = False self.queue = Queue.Queue() def process(self, data): """Get input from controlling process as per usual, put the input into the queue and return immediately. """ if not self.running: self.running = True self.start() # if finish() has been called, stop queueing intput if not self.finishing: self.queue.put(data) def run(self): """Thread method - pull data FIFO style from the queue and pass off to the worker method. """ self.log.info('run.start') while self.running: if not self.queue.empty(): row = self.queue.get() self.process_buffer(row) if sys.version_info >= (2,5): self.queue.task_done() else: time.sleep(0.1) self.log.info('run.end') def process_buffer(self, row): """Override with logic - this is the worker method where the user defines the loading behavior. """ pass def finish(self): """This is called when processing is finished. Waits for any queued data to be processed, and shuts down the processing thread. See nl_load for an example on the appropriate time/place to call. """ self.log.info('finish.begin') if not self.finishing: self.log.info('finish.finishing queue') self.finishing = True while not self.queue.empty(): time.sleep(0.1) self.running = False if self.isAlive(): self.join() #time.sleep(1) self.log.info('finish.end') class Loader: """Abstract class for loading into database-like things. """ def __init__(self, type=None, dsn=None, **kw): """Initialize state. :Parameters: type - Name for type of database dsn - DBMS filename (sqlite) or host (others) kw - Additional connection keywords """ # get connection class try: self.type = type.lower() except AttributeError: raise ValueError("Database type not a string") self.conn_class = CONNECTION_CLASSMAP.get(self.type, None) if self.conn_class is None: raise NotImplementedError("Unknown DB type '%s'" % type) # set server (or file) DSN if dsn is None: if self.conn_class is sqlite: self.dsn = "db.sqlite" else: self.dsn = "localhost" else: self.dsn = dsn # save connection keywords self.conn_kw = kw def connect(self): """Connect to the database. Return new connection (also in self._conn.connection) """ self._conn = self.conn_class(self.dsn, self.conn_kw) return self._conn.connection def disconnect(self): """Disconnect, if connected. """ if self._conn: self._conn.connection.close() self._conn = None pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/modules/__init__.py0000644000175000017500000000000011757531137027152 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/analysis/schemacfg.py0000644000175000017500000002207511757531137025703 0ustar ryngerynge""" Read and handle simple event "schemas" encoded in a Python configuration file. Format:: [] = [{default-value}] .. The is the target type for the field value. Accepted values for are: int(eger) - int/long-int number float - FP number date - datetime object str - doesn't do anything, but good for documentation and also used in 'drop not mentioned' mode. The "default-value" is optional. Sample schema:: [enter.stage.left] ts = date name = str height = float weight = float age = int gender = str {female} # event: # ts=1602-02-23T12:22:34 event=enter.stage.left name=Hamlet height=70 \ # weight=160 age=20 gender=male Sample usage:: from import SchemaParser, Schema parser = SchemaParser(files=("myschemafile.cfg",)) schema = parser.get_schema() for e in event_list: print "before: %s" % e schema.event(e) print "after: %s" % e """ __author__ = "Dan Gunter " __rcsid__ = "$Id: schemacfg.py 26624 2010-10-18 14:05:03Z dang $" # System imports from datetime import datetime import ConfigParser import re # Local imports from netlogger import nldate from netlogger import nlapi from netlogger import nllog from netlogger import util from netlogger.parsers.base import parseDate def convert_to_date(x): """Convert value to date. Exceptions: - ValueError: if string date is invalid. """ result = x if isinstance(x,float): result = datetime.utcfromtimestamp(x) elif isinstance(x,str): sec = parseDate(x) result = datetime.utcfromtimestamp(sec) return result def drop(x): return None # causes 'x' to be ignored def identity(x): return x class SchemaParser(nllog.DoesLogging): _TYPEFN = { 'int' : int, 'integer' : int, 'float' : float, 'date' : convert_to_date, 'str' : str, # no-op 'string' : str, # no-op '@drop' : drop, # drop the item } # Regular expression for the item value. _SCHEMAVAL = re.compile(""" (?P\w+) # Type of the named item (?:\s+(?P\{.*\}))?\s* # Optional default value, only WS after """, flags=re.VERBOSE) def __init__(self, files=[], **read_kw): """Constructor. Args: - files (str[]): File objects or names passed to read(), if present. Kwargs: - **read_kw: Keywords passed through to read() function Exceptions: If `files` is non-empty, then will raise exceptions just like read(). """ nllog.DoesLogging.__init__(self) self._parser = ConfigParser.RawConfigParser() self._mapping, self._defaults, self._drop = { }, { }, { } for f in files: self.read(f, **read_kw) def read(self, str_or_file): """Read and parse the data. Args: - str_or_file (str|file): A string or file-like object, which must implement readline(). If it is a string attempt to open the file with that name. Exceptions: - IOError: If a file is specified but can't be opened - ValueError: Bad type specification """ if hasattr(str_or_file, "readline"): fileobj = str_or_file else: fileobj = open(str(str_or_file), 'r') self._parser.readfp(fileobj) name_expr = re.compile("^[0-9a-zA-Z._-]+$") msg = "must be 1 or more of alphanumeric, dash, underline or dot" for sect in self._parser.sections(): # check that section name is legal m = name_expr.match(sect) if m is None: raise ValueError("Event name [%s]: %s" % (sect, msg)) type_map, defaults = { }, { } # process directives (since they apply to all values in loop) try: drop_opt = self._parser.get(sect, '@drop') drop = util.as_bool(drop_opt) except ConfigParser.NoOptionError: drop = False for name, value in self._parser.items(sect): # skip to next, if name is directive if name[0] == '@': continue # check that name is legal m = name_expr.match(name) if m is None: raise ValueError("Field name '%s': %s" % (name, msg)) # extract type and default value, if any m = self._SCHEMAVAL.match(value) if m is None: raise ValueError("Bad value '%s' for field '%s'" % ( value, name)) mgd = m.groupdict() value_type, value_default = mgd['type'], mgd['default'] # set type function try: fn = self._TYPEFN[value_type] # If not dropping, make the 'str' function even cheaper # by skipping the type map if (not drop) and (fn is str): pass # Otherwise, put function into mapping else: type_map[name] = fn except KeyError: raise ValueError("Unknown type '%s' in '%s=%s' " "in section [%s]" % ( value_type, name, value, sect)) # set default value if value_default is not None: s = value_default[1:-1] # strip { } defaults[name] = s self._mapping[sect] = type_map self._defaults[sect] = defaults self._drop[sect] = drop def get_schema(self): """Get the schema so far. Returns: - Schema: The schema as an object. """ return Schema(self._mapping, self._defaults, drop_unknown=self._drop) class Schema(nllog.DoesLogging): """Thin wrapper around a mapping that specifies functions for converting field values for a given event type. Attributes: - mapping: the original mapping """ def __init__(self, mapping, defaults, drop_unknown={}): """Constructor. Args: - mapping (dict): Type mapping. Layout of dictionary is { event-name : { field-name : function, .. }, .. } - defaults (dict): Default values. Layout of dictionary is { event-name : { field-name : value, .. }, .. } - drop_unknown (dict): When processing events, if this is True then drop all event attributes not named in the schema. Key is event-name, value is boolean. """ nllog.DoesLogging.__init__(self) self.mapping = mapping self.defaults = defaults self.drop_unknown = drop_unknown def event(self, event): """Modify input event dictionary, in place, parsing types as specified by the schema. Args: - event (dict): NetLogger event dictionary Exceptions: - ValueError: If the event doesn't have required fields, or there is an error parsing one of the values. """ try: event_name = event[nlapi.EVENT_FIELD] except KeyError: raise ValueError("Bad event, missing required field '%s'" % nlapi.EVENT_FIELD) # Look for type map for this event if self.mapping.has_key(event_name): type_map = self.mapping[event_name] for key in type_map: # Extract value. If not present, look for default value. value = None if key not in event: if key in self.defaults[event_name]: value = self.defaults[event_name][key] else: value = event[key] # If there is a value, apply conversion. if value is not None: if self._trace: self.log.trace("convert.start", key=key) fn = type_map[key] if fn is not None: try: result = fn(value) except ValueError, err: if self._trace: self.log.trace("convert.end", key=key, status=-1, msg=err) raise ValueError("parsing '%s': %s" % (key, err)) if result is not None: event[key] = result if self._trace: self.log.trace("convert.end", key=key, status=0) # Drop unknown by walking event if self.drop_unknown.get(event_name, False): for ekey in event.keys(): if ekey != 'event' and ekey != 'ts' and \ ekey not in type_map: del event[ekey] pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/version.py0000644000175000017500000000104711757531137023621 0ustar ryngerynge## Copyright (c) 2004-2012, The Regents of the University of California, ## through Lawrence Berkeley National Laboratory ## (subject to receipt of any required approvals from ## the U.S. Dept. of Energy). All rights reserved. """ Software version """ NL_VERSION = "4.3.0" NL_CREATE_DATE = "$Date$"[7:-1].strip() NL_COPYRIGHT = """Copyright (c) 2004-2012, The Regents of the University of California, through Lawrence Berkeley National Laboratory (subject to receipt of any required approvals from the U.S. Dept. of Energy). All rights reserved.""" pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/configobj.py0000644000175000017500000025507611757531137024111 0ustar ryngerynge# configobj.py # A config file reader/writer that supports nested sections in config files. # Copyright (C) 2005-2008 Michael Foord, Nicola Larosa # E-mail: fuzzyman AT voidspace DOT org DOT uk # nico AT tekNico DOT net # ConfigObj 4 # http://www.voidspace.org.uk/python/configobj.html # Released subject to the BSD License # Please see http://www.voidspace.org.uk/python/license.shtml # Scripts maintained at http://www.voidspace.org.uk/python/index.shtml # For information about bugfixes, updates and support, please join the # ConfigObj mailing list: # http://lists.sourceforge.net/lists/listinfo/configobj-develop # Comments, suggestions and bug reports welcome. from __future__ import generators import sys INTP_VER = sys.version_info[:2] if INTP_VER < (2, 2): raise RuntimeError("Python v.2.2 or later needed") import os, re compiler = None try: import compiler except ImportError: # for IronPython pass from types import StringTypes from warnings import warn try: from codecs import BOM_UTF8, BOM_UTF16, BOM_UTF16_BE, BOM_UTF16_LE except ImportError: # Python 2.2 does not have these # UTF-8 BOM_UTF8 = '\xef\xbb\xbf' # UTF-16, little endian BOM_UTF16_LE = '\xff\xfe' # UTF-16, big endian BOM_UTF16_BE = '\xfe\xff' if sys.byteorder == 'little': # UTF-16, native endianness BOM_UTF16 = BOM_UTF16_LE else: # UTF-16, native endianness BOM_UTF16 = BOM_UTF16_BE # A dictionary mapping BOM to # the encoding to decode with, and what to set the # encoding attribute to. BOMS = { BOM_UTF8: ('utf_8', None), BOM_UTF16_BE: ('utf16_be', 'utf_16'), BOM_UTF16_LE: ('utf16_le', 'utf_16'), BOM_UTF16: ('utf_16', 'utf_16'), } # All legal variants of the BOM codecs. # TODO: the list of aliases is not meant to be exhaustive, is there a # better way ? BOM_LIST = { 'utf_16': 'utf_16', 'u16': 'utf_16', 'utf16': 'utf_16', 'utf-16': 'utf_16', 'utf16_be': 'utf16_be', 'utf_16_be': 'utf16_be', 'utf-16be': 'utf16_be', 'utf16_le': 'utf16_le', 'utf_16_le': 'utf16_le', 'utf-16le': 'utf16_le', 'utf_8': 'utf_8', 'u8': 'utf_8', 'utf': 'utf_8', 'utf8': 'utf_8', 'utf-8': 'utf_8', } # Map of encodings to the BOM to write. BOM_SET = { 'utf_8': BOM_UTF8, 'utf_16': BOM_UTF16, 'utf16_be': BOM_UTF16_BE, 'utf16_le': BOM_UTF16_LE, None: BOM_UTF8 } def match_utf8(encoding): return BOM_LIST.get(encoding.lower()) == 'utf_8' # Quote strings used for writing values squot = "'%s'" dquot = '"%s"' noquot = "%s" wspace_plus = ' \r\t\n\v\t\'"' tsquot = '"""%s"""' tdquot = "'''%s'''" try: enumerate except NameError: def enumerate(obj): """enumerate for Python 2.2.""" i = -1 for item in obj: i += 1 yield i, item try: True, False except NameError: True, False = 1, 0 __version__ = '4.5.2' __revision__ = '$Id: configobj.py 23978 2009-10-21 21:43:02Z ksb $' __docformat__ = "restructuredtext en" __all__ = ( '__version__', 'DEFAULT_INDENT_TYPE', 'DEFAULT_INTERPOLATION', 'ConfigObjError', 'NestingError', 'ParseError', 'DuplicateError', 'ConfigspecError', 'ConfigObj', 'SimpleVal', 'InterpolationError', 'InterpolationLoopError', 'MissingInterpolationOption', 'RepeatSectionError', 'ReloadError', 'UnreprError', 'UnknownType', '__docformat__', 'flatten_errors', ) DEFAULT_INTERPOLATION = 'configparser' DEFAULT_INDENT_TYPE = ' ' MAX_INTERPOL_DEPTH = 10 OPTION_DEFAULTS = { 'interpolation': True, 'raise_errors': False, 'list_values': True, 'create_empty': False, 'file_error': False, 'configspec': None, 'stringify': True, # option may be set to one of ('', ' ', '\t') 'indent_type': None, 'encoding': None, 'default_encoding': None, 'unrepr': False, 'write_empty_values': False, } def getObj(s): s = "a=" + s if compiler is None: raise ImportError('compiler module not available') p = compiler.parse(s) return p.getChildren()[1].getChildren()[0].getChildren()[1] class UnknownType(Exception): pass class Builder(object): def build(self, o): m = getattr(self, 'build_' + o.__class__.__name__, None) if m is None: raise UnknownType(o.__class__.__name__) return m(o) def build_List(self, o): return map(self.build, o.getChildren()) def build_Const(self, o): return o.value def build_Dict(self, o): d = {} i = iter(map(self.build, o.getChildren())) for el in i: d[el] = i.next() return d def build_Tuple(self, o): return tuple(self.build_List(o)) def build_Name(self, o): if o.name == 'None': return None if o.name == 'True': return True if o.name == 'False': return False # An undefined Name raise UnknownType('Undefined Name') def build_Add(self, o): real, imag = map(self.build_Const, o.getChildren()) try: real = float(real) except TypeError: raise UnknownType('Add') if not isinstance(imag, complex) or imag.real != 0.0: raise UnknownType('Add') return real+imag def build_Getattr(self, o): parent = self.build(o.expr) return getattr(parent, o.attrname) def build_UnarySub(self, o): return -self.build_Const(o.getChildren()[0]) def build_UnaryAdd(self, o): return self.build_Const(o.getChildren()[0]) _builder = Builder() def unrepr(s): if not s: return s return _builder.build(getObj(s)) class ConfigObjError(SyntaxError): """ This is the base class for all errors that ConfigObj raises. It is a subclass of SyntaxError. """ def __init__(self, msg='', line_number=None, line=''): self.line = line self.line_number = line_number self.msg = msg SyntaxError.__init__(self, msg) class NestingError(ConfigObjError): """ This error indicates a level of nesting that doesn't match. """ class ParseError(ConfigObjError): """ This error indicates that a line is badly written. It is neither a valid ``key = value`` line, nor a valid section marker line. """ class ReloadError(IOError): """ A 'reload' operation failed. This exception is a subclass of ``IOError``. """ def __init__(self): IOError.__init__(self, 'reload failed, filename is not set.') class DuplicateError(ConfigObjError): """ The keyword or section specified already exists. """ class ConfigspecError(ConfigObjError): """ An error occured whilst parsing a configspec. """ class InterpolationError(ConfigObjError): """Base class for the two interpolation errors.""" class InterpolationLoopError(InterpolationError): """Maximum interpolation depth exceeded in string interpolation.""" def __init__(self, option): InterpolationError.__init__( self, 'interpolation loop detected in value "%s".' % option) class RepeatSectionError(ConfigObjError): """ This error indicates additional sections in a section with a ``__many__`` (repeated) section. """ class MissingInterpolationOption(InterpolationError): """A value specified for interpolation was missing.""" def __init__(self, option): InterpolationError.__init__( self, 'missing option "%s" in interpolation.' % option) class UnreprError(ConfigObjError): """An error parsing in unrepr mode.""" class InterpolationEngine(object): """ A helper class to help perform string interpolation. This class is an abstract base class; its descendants perform the actual work. """ # compiled regexp to use in self.interpolate() _KEYCRE = re.compile(r"%\(([^)]*)\)s") def __init__(self, section): # the Section instance that "owns" this engine self.section = section def interpolate(self, key, value): def recursive_interpolate(key, value, section, backtrail): """The function that does the actual work. ``value``: the string we're trying to interpolate. ``section``: the section in which that string was found ``backtrail``: a dict to keep track of where we've been, to detect and prevent infinite recursion loops This is similar to a depth-first-search algorithm. """ # Have we been here already? if backtrail.has_key((key, section.name)): # Yes - infinite loop detected raise InterpolationLoopError(key) # Place a marker on our backtrail so we won't come back here again backtrail[(key, section.name)] = 1 # Now start the actual work match = self._KEYCRE.search(value) while match: # The actual parsing of the match is implementation-dependent, # so delegate to our helper function k, v, s = self._parse_match(match) if k is None: # That's the signal that no further interpolation is needed replacement = v else: # Further interpolation may be needed to obtain final value replacement = recursive_interpolate(k, v, s, backtrail) # Replace the matched string with its final value start, end = match.span() value = ''.join((value[:start], replacement, value[end:])) new_search_start = start + len(replacement) # Pick up the next interpolation key, if any, for next time # through the while loop match = self._KEYCRE.search(value, new_search_start) # Now safe to come back here again; remove marker from backtrail del backtrail[(key, section.name)] return value # Back in interpolate(), all we have to do is kick off the recursive # function with appropriate starting values value = recursive_interpolate(key, value, self.section, {}) return value def _fetch(self, key): """Helper function to fetch values from owning section. Returns a 2-tuple: the value, and the section where it was found. """ # switch off interpolation before we try and fetch anything ! save_interp = self.section.main.interpolation self.section.main.interpolation = False # Start at section that "owns" this InterpolationEngine current_section = self.section while True: # try the current section first val = current_section.get(key) if val is not None: break # try "DEFAULT" next val = current_section.get('DEFAULT', {}).get(key) if val is not None: break # move up to parent and try again # top-level's parent is itself if current_section.parent is current_section: # reached top level, time to give up break current_section = current_section.parent # restore interpolation to previous value before returning self.section.main.interpolation = save_interp if val is None: raise MissingInterpolationOption(key) return val, current_section def _parse_match(self, match): """Implementation-dependent helper function. Will be passed a match object corresponding to the interpolation key we just found (e.g., "%(foo)s" or "$foo"). Should look up that key in the appropriate config file section (using the ``_fetch()`` helper function) and return a 3-tuple: (key, value, section) ``key`` is the name of the key we're looking for ``value`` is the value found for that key ``section`` is a reference to the section where it was found ``key`` and ``section`` should be None if no further interpolation should be performed on the resulting value (e.g., if we interpolated "$$" and returned "$"). """ raise NotImplementedError() class ConfigParserInterpolation(InterpolationEngine): """Behaves like ConfigParser.""" _KEYCRE = re.compile(r"%\(([^)]*)\)s") def _parse_match(self, match): key = match.group(1) value, section = self._fetch(key) return key, value, section class TemplateInterpolation(InterpolationEngine): """Behaves like string.Template.""" _delimiter = '$' _KEYCRE = re.compile(r""" \$(?: (?P\$) | # Two $ signs (?P[_a-z][_a-z0-9]*) | # $name format {(?P[^}]*)} # ${name} format ) """, re.IGNORECASE | re.VERBOSE) def _parse_match(self, match): # Valid name (in or out of braces): fetch value from section key = match.group('named') or match.group('braced') if key is not None: value, section = self._fetch(key) return key, value, section # Escaped delimiter (e.g., $$): return single delimiter if match.group('escaped') is not None: # Return None for key and section to indicate it's time to stop return None, self._delimiter, None # Anything else: ignore completely, just return it unchanged return None, match.group(), None interpolation_engines = { 'configparser': ConfigParserInterpolation, 'template': TemplateInterpolation, } class Section(dict): """ A dictionary-like object that represents a section in a config file. It does string interpolation if the 'interpolation' attribute of the 'main' object is set to True. Interpolation is tried first from this object, then from the 'DEFAULT' section of this object, next from the parent and its 'DEFAULT' section, and so on until the main object is reached. A Section will behave like an ordered dictionary - following the order of the ``scalars`` and ``sections`` attributes. You can use this to change the order of members. Iteration follows the order: scalars, then sections. """ def __init__(self, parent, depth, main, indict=None, name=None): """ * parent is the section above * depth is the depth level of this section * main is the main ConfigObj * indict is a dictionary to initialise the section with """ if indict is None: indict = {} dict.__init__(self) # used for nesting level *and* interpolation self.parent = parent # used for the interpolation attribute self.main = main # level of nesting depth of this Section self.depth = depth # purely for information self.name = name # self._initialise() # we do this explicitly so that __setitem__ is used properly # (rather than just passing to ``dict.__init__``) for entry, value in indict.iteritems(): self[entry] = value def _initialise(self): # the sequence of scalar values in this Section self.scalars = [] # the sequence of sections in this Section self.sections = [] # for comments :-) self.comments = {} self.inline_comments = {} # for the configspec self.configspec = {} self._order = [] self._configspec_comments = {} self._configspec_inline_comments = {} self._cs_section_comments = {} self._cs_section_inline_comments = {} # for defaults self.defaults = [] self.default_values = {} def _interpolate(self, key, value): try: # do we already have an interpolation engine? engine = self._interpolation_engine except AttributeError: # not yet: first time running _interpolate(), so pick the engine name = self.main.interpolation if name is True: # note that "if name:" would be incorrect here # backwards-compatibility: interpolation=True means use default name = DEFAULT_INTERPOLATION name = name.lower() # so that "Template", "template", etc. all work class_ = interpolation_engines.get(name, None) if class_ is None: # invalid value for self.main.interpolation self.main.interpolation = False return value else: # save reference to engine so we don't have to do this again engine = self._interpolation_engine = class_(self) # let the engine do the actual work return engine.interpolate(key, value) def __getitem__(self, key): """Fetch the item and do string interpolation.""" val = dict.__getitem__(self, key) if self.main.interpolation and isinstance(val, StringTypes): return self._interpolate(key, val) return val def __setitem__(self, key, value, unrepr=False): """ Correctly set a value. Making dictionary values Section instances. (We have to special case 'Section' instances - which are also dicts) Keys must be strings. Values need only be strings (or lists of strings) if ``main.stringify`` is set. `unrepr`` must be set when setting a value to a dictionary, without creating a new sub-section. """ if not isinstance(key, StringTypes): raise ValueError('The key "%s" is not a string.' % key) # add the comment if not self.comments.has_key(key): self.comments[key] = [] self.inline_comments[key] = '' # remove the entry from defaults if key in self.defaults: self.defaults.remove(key) # if isinstance(value, Section): if not self.has_key(key): self.sections.append(key) dict.__setitem__(self, key, value) elif isinstance(value, dict) and not unrepr: # First create the new depth level, # then create the section if not self.has_key(key): self.sections.append(key) new_depth = self.depth + 1 dict.__setitem__( self, key, Section( self, new_depth, self.main, indict=value, name=key)) else: if not self.has_key(key): self.scalars.append(key) if not self.main.stringify: if isinstance(value, StringTypes): pass elif isinstance(value, (list, tuple)): for entry in value: if not isinstance(entry, StringTypes): raise TypeError('Value is not a string "%s".' % entry) else: raise TypeError('Value is not a string "%s".' % value) dict.__setitem__(self, key, value) def __delitem__(self, key): """Remove items from the sequence when deleting.""" dict. __delitem__(self, key) if key in self.scalars: self.scalars.remove(key) else: self.sections.remove(key) del self.comments[key] del self.inline_comments[key] def get(self, key, default=None): """A version of ``get`` that doesn't bypass string interpolation.""" try: return self[key] except KeyError: return default def update(self, indict): """ A version of update that uses our ``__setitem__``. """ for entry in indict: self[entry] = indict[entry] def pop(self, key, *args): """ 'D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised' """ val = dict.pop(self, key, *args) if key in self.scalars: del self.comments[key] del self.inline_comments[key] self.scalars.remove(key) elif key in self.sections: del self.comments[key] del self.inline_comments[key] self.sections.remove(key) if self.main.interpolation and isinstance(val, StringTypes): return self._interpolate(key, val) return val def popitem(self): """Pops the first (key,val)""" sequence = (self.scalars + self.sections) if not sequence: raise KeyError(": 'popitem(): dictionary is empty'") key = sequence[0] val = self[key] del self[key] return key, val def clear(self): """ A version of clear that also affects scalars/sections Also clears comments and configspec. Leaves other attributes alone : depth/main/parent are not affected """ dict.clear(self) self.scalars = [] self.sections = [] self.comments = {} self.inline_comments = {} self.configspec = {} def setdefault(self, key, default=None): """A version of setdefault that sets sequence if appropriate.""" try: return self[key] except KeyError: self[key] = default return self[key] def items(self): """D.items() -> list of D's (key, value) pairs, as 2-tuples""" return zip((self.scalars + self.sections), self.values()) def keys(self): """D.keys() -> list of D's keys""" return (self.scalars + self.sections) def values(self): """D.values() -> list of D's values""" return [self[key] for key in (self.scalars + self.sections)] def iteritems(self): """D.iteritems() -> an iterator over the (key, value) items of D""" return iter(self.items()) def iterkeys(self): """D.iterkeys() -> an iterator over the keys of D""" return iter((self.scalars + self.sections)) __iter__ = iterkeys def itervalues(self): """D.itervalues() -> an iterator over the values of D""" return iter(self.values()) def __repr__(self): """x.__repr__() <==> repr(x)""" return '{%s}' % ', '.join([('%s: %s' % (repr(key), repr(self[key]))) for key in (self.scalars + self.sections)]) __str__ = __repr__ __str__.__doc__ = "x.__str__() <==> str(x)" # Extra methods - not in a normal dictionary def dict(self): """ Return a deepcopy of self as a dictionary. All members that are ``Section`` instances are recursively turned to ordinary dictionaries - by calling their ``dict`` method. >>> n = a.dict() >>> n == a 1 >>> n is a 0 """ newdict = {} for entry in self: this_entry = self[entry] if isinstance(this_entry, Section): this_entry = this_entry.dict() elif isinstance(this_entry, list): # create a copy rather than a reference this_entry = list(this_entry) elif isinstance(this_entry, tuple): # create a copy rather than a reference this_entry = tuple(this_entry) newdict[entry] = this_entry return newdict def merge(self, indict): """ A recursive update - useful for merging config files. >>> a = '''[section1] ... option1 = True ... [[subsection]] ... more_options = False ... # end of file'''.splitlines() >>> b = '''# File is user.ini ... [section1] ... option1 = False ... # end of file'''.splitlines() >>> c1 = ConfigObj(b) >>> c2 = ConfigObj(a) >>> c2.merge(c1) >>> c2 {'section1': {'option1': 'False', 'subsection': {'more_options': 'False'}}} """ for key, val in indict.items(): if (key in self and isinstance(self[key], dict) and isinstance(val, dict)): self[key].merge(val) else: self[key] = val def rename(self, oldkey, newkey): """ Change a keyname to another, without changing position in sequence. Implemented so that transformations can be made on keys, as well as on values. (used by encode and decode) Also renames comments. """ if oldkey in self.scalars: the_list = self.scalars elif oldkey in self.sections: the_list = self.sections else: raise KeyError('Key "%s" not found.' % oldkey) pos = the_list.index(oldkey) # val = self[oldkey] dict.__delitem__(self, oldkey) dict.__setitem__(self, newkey, val) the_list.remove(oldkey) the_list.insert(pos, newkey) comm = self.comments[oldkey] inline_comment = self.inline_comments[oldkey] del self.comments[oldkey] del self.inline_comments[oldkey] self.comments[newkey] = comm self.inline_comments[newkey] = inline_comment def walk(self, function, raise_errors=True, call_on_sections=False, **keywargs): """ Walk every member and call a function on the keyword and value. Return a dictionary of the return values If the function raises an exception, raise the errror unless ``raise_errors=False``, in which case set the return value to ``False``. Any unrecognised keyword arguments you pass to walk, will be pased on to the function you pass in. Note: if ``call_on_sections`` is ``True`` then - on encountering a subsection, *first* the function is called for the *whole* subsection, and then recurses into it's members. This means your function must be able to handle strings, dictionaries and lists. This allows you to change the key of subsections as well as for ordinary members. The return value when called on the whole subsection has to be discarded. See the encode and decode methods for examples, including functions. .. caution:: You can use ``walk`` to transform the names of members of a section but you mustn't add or delete members. >>> config = '''[XXXXsection] ... XXXXkey = XXXXvalue'''.splitlines() >>> cfg = ConfigObj(config) >>> cfg {'XXXXsection': {'XXXXkey': 'XXXXvalue'}} >>> def transform(section, key): ... val = section[key] ... newkey = key.replace('XXXX', 'CLIENT1') ... section.rename(key, newkey) ... if isinstance(val, (tuple, list, dict)): ... pass ... else: ... val = val.replace('XXXX', 'CLIENT1') ... section[newkey] = val >>> cfg.walk(transform, call_on_sections=True) {'CLIENT1section': {'CLIENT1key': None}} >>> cfg {'CLIENT1section': {'CLIENT1key': 'CLIENT1value'}} """ out = {} # scalars first for i in range(len(self.scalars)): entry = self.scalars[i] try: val = function(self, entry, **keywargs) # bound again in case name has changed entry = self.scalars[i] out[entry] = val except Exception: if raise_errors: raise else: entry = self.scalars[i] out[entry] = False # then sections for i in range(len(self.sections)): entry = self.sections[i] if call_on_sections: try: function(self, entry, **keywargs) except Exception: if raise_errors: raise else: entry = self.sections[i] out[entry] = False # bound again in case name has changed entry = self.sections[i] # previous result is discarded out[entry] = self[entry].walk( function, raise_errors=raise_errors, call_on_sections=call_on_sections, **keywargs) return out def decode(self, encoding): """ Decode all strings and values to unicode, using the specified encoding. Works with subsections and list values. Uses the ``walk`` method. Testing ``encode`` and ``decode``. >>> m = ConfigObj(a) >>> m.decode('ascii') >>> def testuni(val): ... for entry in val: ... if not isinstance(entry, unicode): ... print >> sys.stderr, type(entry) ... raise AssertionError, 'decode failed.' ... if isinstance(val[entry], dict): ... testuni(val[entry]) ... elif not isinstance(val[entry], unicode): ... raise AssertionError, 'decode failed.' >>> testuni(m) >>> m.encode('ascii') >>> a == m 1 """ warn('use of ``decode`` is deprecated.', DeprecationWarning) def decode(section, key, encoding=encoding, warn=True): """ """ val = section[key] if isinstance(val, (list, tuple)): newval = [] for entry in val: newval.append(entry.decode(encoding)) elif isinstance(val, dict): newval = val else: newval = val.decode(encoding) newkey = key.decode(encoding) section.rename(key, newkey) section[newkey] = newval # using ``call_on_sections`` allows us to modify section names self.walk(decode, call_on_sections=True) def encode(self, encoding): """ Encode all strings and values from unicode, using the specified encoding. Works with subsections and list values. Uses the ``walk`` method. """ warn('use of ``encode`` is deprecated.', DeprecationWarning) def encode(section, key, encoding=encoding): """ """ val = section[key] if isinstance(val, (list, tuple)): newval = [] for entry in val: newval.append(entry.encode(encoding)) elif isinstance(val, dict): newval = val else: newval = val.encode(encoding) newkey = key.encode(encoding) section.rename(key, newkey) section[newkey] = newval self.walk(encode, call_on_sections=True) def istrue(self, key): """A deprecated version of ``as_bool``.""" warn('use of ``istrue`` is deprecated. Use ``as_bool`` method ' 'instead.', DeprecationWarning) return self.as_bool(key) def as_bool(self, key): """ Accepts a key as input. The corresponding value must be a string or the objects (``True`` or 1) or (``False`` or 0). We allow 0 and 1 to retain compatibility with Python 2.2. If the string is one of ``True``, ``On``, ``Yes``, or ``1`` it returns ``True``. If the string is one of ``False``, ``Off``, ``No``, or ``0`` it returns ``False``. ``as_bool`` is not case sensitive. Any other input will raise a ``ValueError``. >>> a = ConfigObj() >>> a['a'] = 'fish' >>> a.as_bool('a') Traceback (most recent call last): ValueError: Value "fish" is neither True nor False >>> a['b'] = 'True' >>> a.as_bool('b') 1 >>> a['b'] = 'off' >>> a.as_bool('b') 0 """ val = self[key] if val is True: return True elif val is False: return False else: try: if not isinstance(val, StringTypes): # TODO: Why do we raise a KeyError here? raise KeyError() else: return self.main._bools[val.lower()] except KeyError: raise ValueError('Value "%s" is neither True nor False' % val) def as_int(self, key): """ A convenience method which coerces the specified value to an integer. If the value is an invalid literal for ``int``, a ``ValueError`` will be raised. >>> a = ConfigObj() >>> a['a'] = 'fish' >>> a.as_int('a') Traceback (most recent call last): ValueError: invalid literal for int(): fish >>> a['b'] = '1' >>> a.as_int('b') 1 >>> a['b'] = '3.2' >>> a.as_int('b') Traceback (most recent call last): ValueError: invalid literal for int(): 3.2 """ return int(self[key]) def as_float(self, key): """ A convenience method which coerces the specified value to a float. If the value is an invalid literal for ``float``, a ``ValueError`` will be raised. >>> a = ConfigObj() >>> a['a'] = 'fish' >>> a.as_float('a') Traceback (most recent call last): ValueError: invalid literal for float(): fish >>> a['b'] = '1' >>> a.as_float('b') 1.0 >>> a['b'] = '3.2' >>> a.as_float('b') 3.2000000000000002 """ return float(self[key]) def restore_default(self, key): """ Restore (and return) default value for the specified key. This method will only work for a ConfigObj that was created with a configspec and has been validated. If there is no default value for this key, ``KeyError`` is raised. """ default = self.default_values[key] dict.__setitem__(self, key, default) if key not in self.defaults: self.defaults.append(key) return default def restore_defaults(self): """ Recursively restore default values to all members that have them. This method will only work for a ConfigObj that was created with a configspec and has been validated. It doesn't delete or modify entries without default values. """ for key in self.default_values: self.restore_default(key) for section in self.sections: self[section].restore_defaults() class ConfigObj(Section): """An object to read, create, and write config files.""" _keyword = re.compile(r'''^ # line start (\s*) # indentation ( # keyword (?:".*?")| # double quotes (?:'.*?')| # single quotes (?:[^'"=].*?) # no quotes ) \s*=\s* # divider (.*) # value (including list values and comments) $ # line end ''', re.VERBOSE) _sectionmarker = re.compile(r'''^ (\s*) # 1: indentation ((?:\[\s*)+) # 2: section marker open ( # 3: section name open (?:"\s*\S.*?\s*")| # at least one non-space with double quotes (?:'\s*\S.*?\s*')| # at least one non-space with single quotes (?:[^'"\s].*?) # at least one non-space unquoted ) # section name close ((?:\s*\])+) # 4: section marker close \s*(\#.*)? # 5: optional comment $''', re.VERBOSE) # this regexp pulls list values out as a single string # or single values and comments # FIXME: this regex adds a '' to the end of comma terminated lists # workaround in ``_handle_value`` _valueexp = re.compile(r'''^ (?: (?: ( (?: (?: (?:".*?")| # double quotes (?:'.*?')| # single quotes (?:[^'",\#][^,\#]*?) # unquoted ) \s*,\s* # comma )* # match all list items ending in a comma (if any) ) ( (?:".*?")| # double quotes (?:'.*?')| # single quotes (?:[^'",\#\s][^,]*?)| # unquoted (?:(? 1: msg = "Parsing failed with several errors.\nFirst error %s" % info error = ConfigObjError(msg) else: error = self._errors[0] # set the errors attribute; it's a list of tuples: # (error_type, message, line_number) error.errors = self._errors # set the config attribute error.config = self raise error # delete private attributes del self._errors if configspec is None: self.configspec = None else: self._handle_configspec(configspec) def _initialise(self, options=None): if options is None: options = OPTION_DEFAULTS # initialise a few variables self.filename = None self._errors = [] self.raise_errors = options['raise_errors'] self.interpolation = options['interpolation'] self.list_values = options['list_values'] self.create_empty = options['create_empty'] self.file_error = options['file_error'] self.stringify = options['stringify'] self.indent_type = options['indent_type'] self.encoding = options['encoding'] self.default_encoding = options['default_encoding'] self.BOM = False self.newlines = None self.write_empty_values = options['write_empty_values'] self.unrepr = options['unrepr'] self.initial_comment = [] self.final_comment = [] self.configspec = {} # Clear section attributes as well Section._initialise(self) def __repr__(self): return ('ConfigObj({%s})' % ', '.join([('%s: %s' % (repr(key), repr(self[key]))) for key in (self.scalars + self.sections)])) def _handle_bom(self, infile): """ Handle any BOM, and decode if necessary. If an encoding is specified, that *must* be used - but the BOM should still be removed (and the BOM attribute set). (If the encoding is wrongly specified, then a BOM for an alternative encoding won't be discovered or removed.) If an encoding is not specified, UTF8 or UTF16 BOM will be detected and removed. The BOM attribute will be set. UTF16 will be decoded to unicode. NOTE: This method must not be called with an empty ``infile``. Specifying the *wrong* encoding is likely to cause a ``UnicodeDecodeError``. ``infile`` must always be returned as a list of lines, but may be passed in as a single string. """ if ((self.encoding is not None) and (self.encoding.lower() not in BOM_LIST)): # No need to check for a BOM # the encoding specified doesn't have one # just decode return self._decode(infile, self.encoding) if isinstance(infile, (list, tuple)): line = infile[0] else: line = infile if self.encoding is not None: # encoding explicitly supplied # And it could have an associated BOM # TODO: if encoding is just UTF16 - we ought to check for both # TODO: big endian and little endian versions. enc = BOM_LIST[self.encoding.lower()] if enc == 'utf_16': # For UTF16 we try big endian and little endian for BOM, (encoding, final_encoding) in BOMS.items(): if not final_encoding: # skip UTF8 continue if infile.startswith(BOM): ### BOM discovered ##self.BOM = True # Don't need to remove BOM return self._decode(infile, encoding) # If we get this far, will *probably* raise a DecodeError # As it doesn't appear to start with a BOM return self._decode(infile, self.encoding) # Must be UTF8 BOM = BOM_SET[enc] if not line.startswith(BOM): return self._decode(infile, self.encoding) newline = line[len(BOM):] # BOM removed if isinstance(infile, (list, tuple)): infile[0] = newline else: infile = newline self.BOM = True return self._decode(infile, self.encoding) # No encoding specified - so we need to check for UTF8/UTF16 for BOM, (encoding, final_encoding) in BOMS.items(): if not line.startswith(BOM): continue else: # BOM discovered self.encoding = final_encoding if not final_encoding: self.BOM = True # UTF8 # remove BOM newline = line[len(BOM):] if isinstance(infile, (list, tuple)): infile[0] = newline else: infile = newline # UTF8 - don't decode if isinstance(infile, StringTypes): return infile.splitlines(True) else: return infile # UTF16 - have to decode return self._decode(infile, encoding) # No BOM discovered and no encoding specified, just return if isinstance(infile, StringTypes): # infile read from a file will be a single string return infile.splitlines(True) return infile def _a_to_u(self, aString): """Decode ASCII strings to unicode if a self.encoding is specified.""" if self.encoding: return aString.decode('ascii') else: return aString def _decode(self, infile, encoding): """ Decode infile to unicode. Using the specified encoding. if is a string, it also needs converting to a list. """ if isinstance(infile, StringTypes): # can't be unicode # NOTE: Could raise a ``UnicodeDecodeError`` return infile.decode(encoding).splitlines(True) for i, line in enumerate(infile): if not isinstance(line, unicode): # NOTE: The isinstance test here handles mixed lists of unicode/string # NOTE: But the decode will break on any non-string values # NOTE: Or could raise a ``UnicodeDecodeError`` infile[i] = line.decode(encoding) return infile def _decode_element(self, line): """Decode element to unicode if necessary.""" if not self.encoding: return line if isinstance(line, str) and self.default_encoding: return line.decode(self.default_encoding) return line def _str(self, value): """ Used by ``stringify`` within validate, to turn non-string values into strings. """ if not isinstance(value, StringTypes): return str(value) else: return value def _parse(self, infile): """Actually parse the config file.""" temp_list_values = self.list_values if self.unrepr: self.list_values = False comment_list = [] done_start = False this_section = self maxline = len(infile) - 1 cur_index = -1 reset_comment = False while cur_index < maxline: if reset_comment: comment_list = [] cur_index += 1 line = infile[cur_index] sline = line.strip() # do we have anything on the line ? if not sline or sline.startswith('#'): reset_comment = False comment_list.append(line) continue if not done_start: # preserve initial comment self.initial_comment = comment_list comment_list = [] done_start = True reset_comment = True # first we check if it's a section marker mat = self._sectionmarker.match(line) if mat is not None: # is a section line (indent, sect_open, sect_name, sect_close, comment) = mat.groups() if indent and (self.indent_type is None): self.indent_type = indent cur_depth = sect_open.count('[') if cur_depth != sect_close.count(']'): self._handle_error("Cannot compute the section depth at line %s.", NestingError, infile, cur_index) continue if cur_depth < this_section.depth: # the new section is dropping back to a previous level try: parent = self._match_depth(this_section, cur_depth).parent except SyntaxError: self._handle_error("Cannot compute nesting level at line %s.", NestingError, infile, cur_index) continue elif cur_depth == this_section.depth: # the new section is a sibling of the current section parent = this_section.parent elif cur_depth == this_section.depth + 1: # the new section is a child the current section parent = this_section else: self._handle_error("Section too nested at line %s.", NestingError, infile, cur_index) sect_name = self._unquote(sect_name) if parent.has_key(sect_name): self._handle_error('Duplicate section name at line %s.', DuplicateError, infile, cur_index) continue # create the new section this_section = Section( parent, cur_depth, self, name=sect_name) parent[sect_name] = this_section parent.inline_comments[sect_name] = comment parent.comments[sect_name] = comment_list continue # # it's not a section marker, # so it should be a valid ``key = value`` line mat = self._keyword.match(line) if mat is None: # it neither matched as a keyword # or a section marker self._handle_error( 'Invalid line at line "%s".', ParseError, infile, cur_index) else: # is a keyword value # value will include any inline comment (indent, key, value) = mat.groups() if indent and (self.indent_type is None): self.indent_type = indent # check for a multiline value if value[:3] in ['"""', "'''"]: try: (value, comment, cur_index) = self._multiline( value, infile, cur_index, maxline) except SyntaxError: self._handle_error( 'Parse error in value at line %s.', ParseError, infile, cur_index) continue else: if self.unrepr: comment = '' try: value = unrepr(value) except Exception, e: if type(e) == UnknownType: msg = 'Unknown name or type in value at line %s.' else: msg = 'Parse error in value at line %s.' self._handle_error(msg, UnreprError, infile, cur_index) continue else: if self.unrepr: comment = '' try: value = unrepr(value) except Exception, e: if isinstance(e, UnknownType): msg = 'Unknown name or type in value at line %s.' else: msg = 'Parse error in value at line %s.' self._handle_error(msg, UnreprError, infile, cur_index) continue else: # extract comment and lists try: (value, comment) = self._handle_value(value) except SyntaxError: self._handle_error( 'Parse error in value at line %s.', ParseError, infile, cur_index) continue # key = self._unquote(key) if this_section.has_key(key): self._handle_error( 'Duplicate keyword name at line %s.', DuplicateError, infile, cur_index) continue # add the key. # we set unrepr because if we have got this far we will never # be creating a new section this_section.__setitem__(key, value, unrepr=True) this_section.inline_comments[key] = comment this_section.comments[key] = comment_list continue # if self.indent_type is None: # no indentation used, set the type accordingly self.indent_type = '' # preserve the final comment if not self and not self.initial_comment: self.initial_comment = comment_list elif not reset_comment: self.final_comment = comment_list self.list_values = temp_list_values def _match_depth(self, sect, depth): """ Given a section and a depth level, walk back through the sections parents to see if the depth level matches a previous section. Return a reference to the right section, or raise a SyntaxError. """ while depth < sect.depth: if sect is sect.parent: # we've reached the top level already raise SyntaxError() sect = sect.parent if sect.depth == depth: return sect # shouldn't get here raise SyntaxError() def _handle_error(self, text, ErrorClass, infile, cur_index): """ Handle an error according to the error settings. Either raise the error or store it. The error will have occured at ``cur_index`` """ line = infile[cur_index] cur_index += 1 message = text % cur_index error = ErrorClass(message, cur_index, line) if self.raise_errors: # raise the error - parsing stops here raise error # store the error # reraise when parsing has finished self._errors.append(error) def _unquote(self, value): """Return an unquoted version of a value""" if (value[0] == value[-1]) and (value[0] in ('"', "'")): value = value[1:-1] return value def _quote(self, value, multiline=True): """ Return a safely quoted version of a value. Raise a ConfigObjError if the value cannot be safely quoted. If multiline is ``True`` (default) then use triple quotes if necessary. Don't quote values that don't need it. Recursively quote members of a list and return a comma joined list. Multiline is ``False`` for lists. Obey list syntax for empty and single member lists. If ``list_values=False`` then the value is only quoted if it contains a ``\n`` (is multiline) or '#'. If ``write_empty_values`` is set, and the value is an empty string, it won't be quoted. """ if multiline and self.write_empty_values and value == '': # Only if multiline is set, so that it is used for values not # keys, and not values that are part of a list return '' if multiline and isinstance(value, (list, tuple)): if not value: return ',' elif len(value) == 1: return self._quote(value[0], multiline=False) + ',' return ', '.join([self._quote(val, multiline=False) for val in value]) if not isinstance(value, StringTypes): if self.stringify: value = str(value) else: raise TypeError('Value "%s" is not a string.' % value) if not value: return '""' no_lists_no_quotes = not self.list_values and '\n' not in value and '#' not in value need_triple = multiline and ((("'" in value) and ('"' in value)) or ('\n' in value )) hash_triple_quote = multiline and not need_triple and ("'" in value) and ('"' in value) and ('#' in value) check_for_single = (no_lists_no_quotes or not need_triple) and not hash_triple_quote if check_for_single: if not self.list_values: # we don't quote if ``list_values=False`` quot = noquot # for normal values either single or double quotes will do elif '\n' in value: # will only happen if multiline is off - e.g. '\n' in key raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) elif ((value[0] not in wspace_plus) and (value[-1] not in wspace_plus) and (',' not in value)): quot = noquot else: quot = self._get_single_quote(value) else: # if value has '\n' or "'" *and* '"', it will need triple quotes quot = self._get_triple_quote(value) if quot == noquot and '#' in value and self.list_values: quot = self._get_single_quote(value) return quot % value def _get_single_quote(self, value): if ("'" in value) and ('"' in value): raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) elif '"' in value: quot = squot else: quot = dquot return quot def _get_triple_quote(self, value): if (value.find('"""') != -1) and (value.find("'''") != -1): raise ConfigObjError('Value "%s" cannot be safely quoted.' % value) if value.find('"""') == -1: quot = tdquot else: quot = tsquot return quot def _handle_value(self, value): """ Given a value string, unquote, remove comment, handle lists. (including empty and single member lists) """ # do we look for lists in values ? if not self.list_values: mat = self._nolistvalue.match(value) if mat is None: raise SyntaxError() # NOTE: we don't unquote here return mat.groups() # mat = self._valueexp.match(value) if mat is None: # the value is badly constructed, probably badly quoted, # or an invalid list raise SyntaxError() (list_values, single, empty_list, comment) = mat.groups() if (list_values == '') and (single is None): # change this if you want to accept empty values raise SyntaxError() # NOTE: note there is no error handling from here if the regex # is wrong: then incorrect values will slip through if empty_list is not None: # the single comma - meaning an empty list return ([], comment) if single is not None: # handle empty values if list_values and not single: # FIXME: the '' is a workaround because our regex now matches # '' at the end of a list if it has a trailing comma single = None else: single = single or '""' single = self._unquote(single) if list_values == '': # not a list value return (single, comment) the_list = self._listvalueexp.findall(list_values) the_list = [self._unquote(val) for val in the_list] if single is not None: the_list += [single] return (the_list, comment) def _multiline(self, value, infile, cur_index, maxline): """Extract the value, where we are in a multiline situation.""" quot = value[:3] newvalue = value[3:] single_line = self._triple_quote[quot][0] multi_line = self._triple_quote[quot][1] mat = single_line.match(value) if mat is not None: retval = list(mat.groups()) retval.append(cur_index) return retval elif newvalue.find(quot) != -1: # somehow the triple quote is missing raise SyntaxError() # while cur_index < maxline: cur_index += 1 newvalue += '\n' line = infile[cur_index] if line.find(quot) == -1: newvalue += line else: # end of multiline, process it break else: # we've got to the end of the config, oops... raise SyntaxError() mat = multi_line.match(line) if mat is None: # a badly formed line raise SyntaxError() (value, comment) = mat.groups() return (newvalue + value, comment, cur_index) def _handle_configspec(self, configspec): """Parse the configspec.""" # FIXME: Should we check that the configspec was created with the # correct settings ? (i.e. ``list_values=False``) if not isinstance(configspec, ConfigObj): try: configspec = ConfigObj(configspec, raise_errors=True, file_error=True, list_values=False) except ConfigObjError, e: # FIXME: Should these errors have a reference # to the already parsed ConfigObj ? raise ConfigspecError('Parsing configspec failed: %s' % e) except IOError, e: raise IOError('Reading configspec failed: %s' % e) self._set_configspec_value(configspec, self) def _set_configspec_value(self, configspec, section): """Used to recursively set configspec values.""" if '__many__' in configspec.sections: section.configspec['__many__'] = configspec['__many__'] if len(configspec.sections) > 1: # FIXME: can we supply any useful information here ? raise RepeatSectionError() if hasattr(configspec, 'initial_comment'): section._configspec_initial_comment = configspec.initial_comment section._configspec_final_comment = configspec.final_comment section._configspec_encoding = configspec.encoding section._configspec_BOM = configspec.BOM section._configspec_newlines = configspec.newlines section._configspec_indent_type = configspec.indent_type for entry in configspec.scalars: section._configspec_comments[entry] = configspec.comments[entry] section._configspec_inline_comments[entry] = configspec.inline_comments[entry] section.configspec[entry] = configspec[entry] section._order.append(entry) for entry in configspec.sections: if entry == '__many__': continue section._cs_section_comments[entry] = configspec.comments[entry] section._cs_section_inline_comments[entry] = configspec.inline_comments[entry] if not section.has_key(entry): section[entry] = {} self._set_configspec_value(configspec[entry], section[entry]) def _handle_repeat(self, section, configspec): """Dynamically assign configspec for repeated section.""" try: section_keys = configspec.sections scalar_keys = configspec.scalars except AttributeError: section_keys = [entry for entry in configspec if isinstance(configspec[entry], dict)] scalar_keys = [entry for entry in configspec if not isinstance(configspec[entry], dict)] if '__many__' in section_keys and len(section_keys) > 1: # FIXME: can we supply any useful information here ? raise RepeatSectionError() scalars = {} sections = {} for entry in scalar_keys: val = configspec[entry] scalars[entry] = val for entry in section_keys: val = configspec[entry] if entry == '__many__': scalars[entry] = val continue sections[entry] = val section.configspec = scalars for entry in sections: if not section.has_key(entry): section[entry] = {} self._handle_repeat(section[entry], sections[entry]) def _write_line(self, indent_string, entry, this_entry, comment): """Write an individual line, for the write method""" # NOTE: the calls to self._quote here handles non-StringType values. if not self.unrepr: val = self._decode_element(self._quote(this_entry)) else: val = repr(this_entry) return '%s%s%s%s%s' % (indent_string, self._decode_element(self._quote(entry, multiline=False)), self._a_to_u(' = '), val, self._decode_element(comment)) def _write_marker(self, indent_string, depth, entry, comment): """Write a section marker line""" return '%s%s%s%s%s' % (indent_string, self._a_to_u('[' * depth), self._quote(self._decode_element(entry), multiline=False), self._a_to_u(']' * depth), self._decode_element(comment)) def _handle_comment(self, comment): """Deal with a comment.""" if not comment: return '' start = self.indent_type if not comment.startswith('#'): start += self._a_to_u(' # ') return (start + comment) # Public methods def write(self, outfile=None, section=None): """ Write the current ConfigObj as a file tekNico: FIXME: use StringIO instead of real files >>> filename = a.filename >>> a.filename = 'test.ini' >>> a.write() >>> a.filename = filename >>> a == ConfigObj('test.ini', raise_errors=True) 1 """ if self.indent_type is None: # this can be true if initialised from a dictionary self.indent_type = DEFAULT_INDENT_TYPE out = [] cs = self._a_to_u('#') csp = self._a_to_u('# ') if section is None: int_val = self.interpolation self.interpolation = False section = self for line in self.initial_comment: line = self._decode_element(line) stripped_line = line.strip() if stripped_line and not stripped_line.startswith(cs): line = csp + line out.append(line) indent_string = self.indent_type * section.depth for entry in (section.scalars + section.sections): if entry in section.defaults: # don't write out default values continue for comment_line in section.comments[entry]: comment_line = self._decode_element(comment_line.lstrip()) if comment_line and not comment_line.startswith(cs): comment_line = csp + comment_line out.append(indent_string + comment_line) this_entry = section[entry] comment = self._handle_comment(section.inline_comments[entry]) if isinstance(this_entry, dict): # a section out.append(self._write_marker( indent_string, this_entry.depth, entry, comment)) out.extend(self.write(section=this_entry)) else: out.append(self._write_line( indent_string, entry, this_entry, comment)) if section is self: for line in self.final_comment: line = self._decode_element(line) stripped_line = line.strip() if stripped_line and not stripped_line.startswith(cs): line = csp + line out.append(line) self.interpolation = int_val if section is not self: return out if (self.filename is None) and (outfile is None): # output a list of lines # might need to encode # NOTE: This will *screw* UTF16, each line will start with the BOM if self.encoding: out = [l.encode(self.encoding) for l in out] if (self.BOM and ((self.encoding is None) or (BOM_LIST.get(self.encoding.lower()) == 'utf_8'))): # Add the UTF8 BOM if not out: out.append('') out[0] = BOM_UTF8 + out[0] return out # Turn the list to a string, joined with correct newlines newline = self.newlines or os.linesep output = self._a_to_u(newline).join(out) if self.encoding: output = output.encode(self.encoding) if self.BOM and ((self.encoding is None) or match_utf8(self.encoding)): # Add the UTF8 BOM output = BOM_UTF8 + output if not output.endswith(newline): output += newline if outfile is not None: outfile.write(output) else: h = open(self.filename, 'wb') h.write(output) h.close() def validate(self, validator, preserve_errors=False, copy=False, section=None): """ Test the ConfigObj against a configspec. It uses the ``validator`` object from *validate.py*. To run ``validate`` on the current ConfigObj, call: :: test = config.validate(validator) (Normally having previously passed in the configspec when the ConfigObj was created - you can dynamically assign a dictionary of checks to the ``configspec`` attribute of a section though). It returns ``True`` if everything passes, or a dictionary of pass/fails (True/False). If every member of a subsection passes, it will just have the value ``True``. (It also returns ``False`` if all members fail). In addition, it converts the values from strings to their native types if their checks pass (and ``stringify`` is set). If ``preserve_errors`` is ``True`` (``False`` is default) then instead of a marking a fail with a ``False``, it will preserve the actual exception object. This can contain info about the reason for failure. For example the ``VdtValueTooSmallError`` indicates that the value supplied was too small. If a value (or section) is missing it will still be marked as ``False``. You must have the validate module to use ``preserve_errors=True``. You can then use the ``flatten_errors`` function to turn your nested results dictionary into a flattened list of failures - useful for displaying meaningful error messages. """ if section is None: if self.configspec is None: raise ValueError('No configspec supplied.') if preserve_errors: # We do this once to remove a top level dependency on the validate module # Which makes importing configobj faster from validate import VdtMissingValue self._vdtMissingValue = VdtMissingValue section = self # spec_section = section.configspec if copy and hasattr(section, '_configspec_initial_comment'): section.initial_comment = section._configspec_initial_comment section.final_comment = section._configspec_final_comment section.encoding = section._configspec_encoding section.BOM = section._configspec_BOM section.newlines = section._configspec_newlines section.indent_type = section._configspec_indent_type if '__many__' in section.configspec: many = spec_section['__many__'] # dynamically assign the configspecs # for the sections below for entry in section.sections: self._handle_repeat(section[entry], many) # out = {} ret_true = True ret_false = True order = [k for k in section._order if k in spec_section] order += [k for k in spec_section if k not in order] for entry in order: if entry == '__many__': continue if (not entry in section.scalars) or (entry in section.defaults): # missing entries # or entries from defaults missing = True val = None if copy and not entry in section.scalars: # copy comments section.comments[entry] = ( section._configspec_comments.get(entry, [])) section.inline_comments[entry] = ( section._configspec_inline_comments.get(entry, '')) # else: missing = False val = section[entry] try: check = validator.check(spec_section[entry], val, missing=missing ) except validator.baseErrorClass, e: if not preserve_errors or isinstance(e, self._vdtMissingValue): out[entry] = False else: # preserve the error out[entry] = e ret_false = False ret_true = False else: try: section.default_values.pop(entry, None) except AttributeError: # For Python 2.2 compatibility try: del section.default_values[entry] except KeyError: pass if hasattr(validator, 'get_default_value'): try: section.default_values[entry] = validator.get_default_value(spec_section[entry]) except KeyError: # No default pass ret_false = False out[entry] = True if self.stringify or missing: # if we are doing type conversion # or the value is a supplied default if not self.stringify: if isinstance(check, (list, tuple)): # preserve lists check = [self._str(item) for item in check] elif missing and check is None: # convert the None from a default to a '' check = '' else: check = self._str(check) if (check != val) or missing: section[entry] = check if not copy and missing and entry not in section.defaults: section.defaults.append(entry) # Missing sections will have been created as empty ones when the # configspec was read. for entry in section.sections: # FIXME: this means DEFAULT is not copied in copy mode if section is self and entry == 'DEFAULT': continue if copy: section.comments[entry] = section._cs_section_comments[entry] section.inline_comments[entry] = ( section._cs_section_inline_comments[entry]) check = self.validate(validator, preserve_errors=preserve_errors, copy=copy, section=section[entry]) out[entry] = check if check is False: ret_true = False elif check is True: ret_false = False else: ret_true = False ret_false = False # if ret_true: return True elif ret_false: return False return out def reset(self): """Clear ConfigObj instance and restore to 'freshly created' state.""" self.clear() self._initialise() # FIXME: Should be done by '_initialise', but ConfigObj constructor (and reload) # requires an empty dictionary self.configspec = None # Just to be sure ;-) self._original_configspec = None def reload(self): """ Reload a ConfigObj from file. This method raises a ``ReloadError`` if the ConfigObj doesn't have a filename attribute pointing to a file. """ if not isinstance(self.filename, StringTypes): raise ReloadError() filename = self.filename current_options = {} for entry in OPTION_DEFAULTS: if entry == 'configspec': continue current_options[entry] = getattr(self, entry) configspec = self._original_configspec current_options['configspec'] = configspec self.clear() self._initialise(current_options) self._load(filename, configspec) class SimpleVal(object): """ A simple validator. Can be used to check that all members expected are present. To use it, provide a configspec with all your members in (the value given will be ignored). Pass an instance of ``SimpleVal`` to the ``validate`` method of your ``ConfigObj``. ``validate`` will return ``True`` if all members are present, or a dictionary with True/False meaning present/missing. (Whole missing sections will be replaced with ``False``) """ def __init__(self): self.baseErrorClass = ConfigObjError def check(self, check, member, missing=False): """A dummy check method, always returns the value unchanged.""" if missing: raise self.baseErrorClass() return member # Check / processing functions for options def flatten_errors(cfg, res, levels=None, results=None): """ An example function that will turn a nested dictionary of results (as returned by ``ConfigObj.validate``) into a flat list. ``cfg`` is the ConfigObj instance being checked, ``res`` is the results dictionary returned by ``validate``. (This is a recursive function, so you shouldn't use the ``levels`` or ``results`` arguments - they are used by the function. Returns a list of keys that failed. Each member of the list is a tuple : :: ([list of sections...], key, result) If ``validate`` was called with ``preserve_errors=False`` (the default) then ``result`` will always be ``False``. *list of sections* is a flattened list of sections that the key was found in. If the section was missing then key will be ``None``. If the value (or section) was missing then ``result`` will be ``False``. If ``validate`` was called with ``preserve_errors=True`` and a value was present, but failed the check, then ``result`` will be the exception object returned. You can use this as a string that describes the failure. For example *The value "3" is of the wrong type*. >>> import validate >>> vtor = validate.Validator() >>> my_ini = ''' ... option1 = True ... [section1] ... option1 = True ... [section2] ... another_option = Probably ... [section3] ... another_option = True ... [[section3b]] ... value = 3 ... value2 = a ... value3 = 11 ... ''' >>> my_cfg = ''' ... option1 = boolean() ... option2 = boolean() ... option3 = boolean(default=Bad_value) ... [section1] ... option1 = boolean() ... option2 = boolean() ... option3 = boolean(default=Bad_value) ... [section2] ... another_option = boolean() ... [section3] ... another_option = boolean() ... [[section3b]] ... value = integer ... value2 = integer ... value3 = integer(0, 10) ... [[[section3b-sub]]] ... value = string ... [section4] ... another_option = boolean() ... ''' >>> cs = my_cfg.split('\\n') >>> ini = my_ini.split('\\n') >>> cfg = ConfigObj(ini, configspec=cs) >>> res = cfg.validate(vtor, preserve_errors=True) >>> errors = [] >>> for entry in flatten_errors(cfg, res): ... section_list, key, error = entry ... section_list.insert(0, '[root]') ... if key is not None: ... section_list.append(key) ... else: ... section_list.append('[missing]') ... section_string = ', '.join(section_list) ... errors.append((section_string, ' = ', error)) >>> errors.sort() >>> for entry in errors: ... print entry[0], entry[1], (entry[2] or 0) [root], option2 = 0 [root], option3 = the value "Bad_value" is of the wrong type. [root], section1, option2 = 0 [root], section1, option3 = the value "Bad_value" is of the wrong type. [root], section2, another_option = the value "Probably" is of the wrong type. [root], section3, section3b, section3b-sub, [missing] = 0 [root], section3, section3b, value2 = the value "a" is of the wrong type. [root], section3, section3b, value3 = the value "11" is too big. [root], section4, [missing] = 0 """ if levels is None: # first time called levels = [] results = [] if res is True: return results if res is False: results.append((levels[:], None, False)) if levels: levels.pop() return results for (key, val) in res.items(): if val == True: continue if isinstance(cfg.get(key), dict): # Go down one level levels.append(key) flatten_errors(cfg[key], val, levels, results) continue results.append((levels[:], key, val)) # # Go up one level if levels: levels.pop() # return results """*A programming language is a medium of expression.* - Paul Graham""" pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/parsers/0000755000175000017500000000000011757531667023247 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/parsers/base.py0000644000175000017500000005527311757531137024537 0ustar ryngerynge""" Common code for NetLogger parsers """ __author__ = 'Dan Gunter ' __rcsid__ = '$Id: base.py 28287 2011-08-18 03:42:53Z dang $' import calendar from netlogger.configobj import ConfigObj, Section import glob import imp from itertools import starmap import os from Queue import Queue, Empty import re from select import select import sys import time # try: from pyparsing import Word, alphanums, CharsNotIn, ZeroOrMore from pyparsing import Group, Literal from pyparsing import StringEnd, White, QuotedString, ParseException from pyparsing import Each, OneOrMore, Optional, oneOf HAVE_PYPARSING = True except ImportError: HAVE_PYPARSING = False # from netlogger import nldate from netlogger.nllog import DoesLogging from netlogger import nlapi from netlogger.nlapi import Log, Level from netlogger.nlapi import TS_FIELD, EVENT_FIELD, HASH_FIELD from netlogger.parsers import nlreadline from netlogger.util import hash_event # Special result code for parsers to return # when they 'on purpose' skip a line LINE_SKIPPED = 999 try: from hashlib import md5 md5_new = md5 except ImportError: import md5 md5_new = md5.new def getGuid(*strings): m5 = md5_new() for s in strings: m5.update(s) t = m5.hexdigest() guid = "%s-%s-%s-%s-%s" % ( t[:8], t[8:12], t[12:16], t[16:20], t[20:]) return guid def autoParseValue(vstr): try: value = int(vstr) except ValueError: try: value = float(vstr) except ValueError: value = vstr return value def parse_ts(ts): "Parse a netlogger timestamp" # until 2033, a '1' in the first place means a float if ts[0] == '1': return float(ts) ts, subs = ts.split('.') subs = float('.' + subs[:-1]) return calendar.timegm(time.strptime(ts, r'%Y-%m-%dT%H:%M:%S')) + subs parseDate = parse_ts def tolerateBlanksAndComments(line=None, error=None, linenum=0): """Callback function fitting the signature of the callback expected by NLBaseParser.parseStream() that re-raises the error unless the line is empty or starts with a hash character, in which case it does nothing. """ if len(line) <= 1 or line[0] == '#': pass else: raise(error) # Parse BP log lines with Regular Expressions class BPError(ValueError): """ Exception class to indicate violations from the logging best practices standard. """ def __init__(self, lineno, msg): """Create error object. Arguments: lineno - The line number on which the error occured. msg - The error message. """ self.lineno = lineno self.msg = msg def __str__(self): return "Parser error on line %i: %s" % (self.lineno, self.msg) class BPValidationError(BPError): def __init__(self, msg): self.msg = msg def __str__(self): return "Validation error: " + str(self.msg) class BPParseError(BPError): def __init__(self, msg): self.msg = msg def __str__(self): return "Parse error: " + str(self.msg) """ Regular expression that captures names and data """ BP_EXPR = re.compile(r""" (?: \s* # leading whitespace ([0-9a-zA-Z_.\-]+) # Name = (?: # Value: ([^"\s]+) | # a) simple value "((?:[^"] | (?<=\\)")*)" # b) quoted string ) \s* ) """, flags = re.X) """ For validation, regular expression that captures all valid characters, so by simply comparing with string length we can tell if there are invalid characters in the string. """ BP_EXPR_WS = re.compile(r""" (?: (\s*) # leading whitespace ([0-9a-zA-Z_.\-]+) # Name = (?: # Value: ([^"\s]+) | # a) simple value (")((?:[^"] | (?<=\\)")*)(") # b) quoted string ) (\s*) )""", flags=re.X) def _bp_extract(s, as_dict=True, validate=False): """Parse BP log line and extract key=value pairs. If validate is False, this will skip over many types of "junk" data. The only escape sequence recognized is a backslash-escaped double-quote, within a quoted value. Args: s - Input line as_dict - If True, return a dictionary, otherwise, a list of tuples. Note: when OrderedDict (Python 3.1+, 2.7+) becomes common, this will go away. validate - If True, see if there are any 'extra' characters in the string. If there are, raise a BPValidationError. Raises: BPParseError - If a token doesn't contain a recognizable name=value pair, BPValidationError - If (optional) validation fails. Returns: Dictionary or list of tuples (see `as_dict` arg) """ if as_dict: result = { } else: result = [ ] if not validate: for n, v, vq in BP_EXPR.findall(s): # check input if not n: raise BPParseError("Bad key: '{0}'".format(n)) # add to result if vq: v = vq.replace('\\"', '"') if as_dict: result[n] = v else: result.append((n,v)) else: valid_data_len = 0 for ws1, n, v, q1, vq, q2, ws2 in BP_EXPR_WS.findall(s): #print(",".join(["<"+x+">" for x in (ws1, n, v, q1, vq, q2, ws2)])) # check input if not n: raise BPParseError("Bad key: '{0}'".format(n)) valid_data_len += sum(map(len, (ws1, n, v, q1, vq, q2, ws2))) + 1 # 1 for '=' # add to result if vq: v = vq.replace('\\"', '"') if as_dict: result[n] = v else: result.append((n,v)) # check overall input junk_chars = len(s) - valid_data_len if junk_chars != 0: raise BPValidationError("{0:d} junk chars in '{1}'".format(junk_chars, s)) return result class ProcessInterface: """Process interface for a parser """ def process(self, line): """Subclasses must override this method to return a list of dictionaries or formatted log strings (with newlines). If there is an error with the format, they should raise a ValueError, KeyError, or this module's ParseError. If nothing is yet ready, return an empty list or tuple. To cause the caller to stop parsing this log, i.e. nothing will ever be ready, return None. """ pass class BaseParser(ProcessInterface, DoesLogging): """Base class for all other Parser classes in the parser modules. Uses iterator protocol to return one result at a time; where a result is a Best Practices log line (a string). Calls read(file) on the subclass to actually get and parse data. Each read() can return multiple events, or multiple read()s can return one event, transparently to the caller who only sees one log line per iteration. If 'unparsed_file' is not None, write all lines that returned an error, or the constant LINE_SKIPPED, to this file. Parameters: - add_hash {yes,no,no*}: To each output event, add a new field, '_hash', which is a probabilistically unique (MD5) hash of all the other fields in the event. """ def __init__(self, input_file, fullname='unknown', unparsed_file=None, parse_date=True, add_hash='no', **kw): """Initialize base parser. Parameters: input_file - File object (must support readline) fullname - For logging, the fully qualified name for the logger (matches 'qualname' in the logging config). unparsed_file - File object to place records that caused a parse exception parse_date - Whether to parse the ISO date to a number or represent it as a string. **kw - Remaining keyword, value pairs are appended to each line of the log. If the same keyword is in a parsed result, the newer value takes precedence. The exception to this is if the parser returns a string instead of a dictionary, e.g. the 'bp' parser: to avoid O(N*M) behavior where N is the number of the keywords and M is the length of the output string, duplicates are not checked. """ if not input_file: raise ValueError("input file cannot be empty") DoesLogging.__init__(self, fullname) # common parameters self._add_hash = self.boolParam(add_hash) # rest of parameters self._infile = nlreadline.BufferedReadline(input_file) if hasattr(input_file, 'fileno'): self._fake_file = False self._infile_rlist = (input_file.fileno(),) # used in read_line else: # not a real file self._fake_file = True self._infile_rlist = () try: self._offs = self._infile.tell() except IOError: self._offs = 0 self._prev_len, self._saved_len = 0, 0 self._saved = [ ] self._name = fullname self._ufile = unparsed_file self._header_values = { } self._parser = NLSimpleParser(parse_date=parse_date) # Constant to add to each record self._const_nvp = { } # add GUID in env, if present guid = nlapi.getGuid(create=False) if guid: self._const_nvp['guid'] = guid # add user-provided values (can override guid) self._const_nvp.update(kw) # cache string-valued version, will be empty string if kw == {} self._const_nvp_str = ' '.join(["%s=%s" % (k,v) for k,v in self._const_nvp.items()]) self.parse_date = parse_date def close(self): if self._infile: self._infile.close() def getFilename(self): if self._infile: return self._infile.name else: return "" def getOffset(self): """Return the offset of the last entirely parsed line. In the case of a single line that returned multiple items, all of which haven't yet been yet consumed, return the offset at the start of this line. This avoids dropping events at the expense of possible duplicates. It is best to call flush() first to avoid this issue entirely. """ return self._offs def setOffset(self, offs): """Explicitly set offset. This is not normally necessary, as the next() function will advance self._offs every time all the resulting items from the associated input line have been returned. """ self._infile.seek(offs) self._offs = offs def getParameters(self): """Subclasses should override this method to return a dictionary, with all basic types, representing any additional state that needs to be saved and restored. """ return { } def setParameters(self, param): """Subclasses should override this method to update their state with the contents of the arg 'param', a dictionary. """ pass def setHeaderValues(self, value_dict): """Set a dictionary of header keyword, value pairs. """ self._header_values = value_dict def getHeaderValue(self, key): """Get value from group named 'key', or None. """ return self._header_values.get(key, None) def __iter__(self): return self def next(self): """ Return one saved or new item. Get new item(s) by reading and parsing the file. Return None if no result, so caller can count how many lines were processed and thus do fair sharing across multiple inputs """ self.updateOffset() # get an item to return if self._saved: # multiple items were returned before, so just return one item = self._saved.pop() # if saved is now empty, then we have processed # all the items from the last readline, so # advance offset by its (saved) length if not self._saved: self._prev_len = self._saved_len else: line = self._read_line() # stop if line is empty if line == '': raise StopIteration item = line.strip() # main processing for the module try: result = self.process(item) except (ValueError, KeyError), E: if self._ufile: self._ufile.write(line) else: self.log.warn("unparsed.event", value=line.strip(), msg=E) result = False # A skipped line means that it will never # be parsed, but there was no error. # Like an error, it is written to the unparsed-events # file (if that exists). if result == LINE_SKIPPED: if self._ufile: self._ufile.write(line) result = False if not result: self._offs += len(line) if result is None: raise StopIteration("EOF") item = None # return this to caller else: item = result[0] if len(result) == 1: # advance offset by this on next call self._prev_len = len(line) else: # don't advance offset until all results are returned self._saved = list(result[1:]) self._saved.reverse() # so we can pop() self._saved_len = len(line) # return the item if item is None: return None else: return self._result(item) def _read_line(self): """Read one line. """ if self._trace: self.log.trace("readline.start") if self._fake_file or select(self._infile_rlist, (), (), 0.1)[0]: line = self._infile.readline() else: line = '' if self._trace: self.log.trace("readline.end", n=len(line)) return line def updateOffset(self): """Advance offset by length previously parsed input. """ self._offs += self._prev_len self._prev_len = 0 # do not add this to offset again def _result(self, item): """Make item into a returnable result. Normalize 'level' and add constant attributes. Also run any postprocessing steps, such as adding a hash. """ # Parse if a string if isinstance(item, str): item = self._parser.parseLine(item) # Normalize the 'level' value if item.has_key('level'): level = item['level'] if hasattr(level, 'upper'): lvlname = item['level'].upper() item['level'] = Level.getLevel(lvlname) # Add constant key, value pairs: do a copy and # reverse update so new values override old ones. if self._const_nvp: _tmp = self._const_nvp.copy() _tmp.update(item) item = _tmp # Do post-processing. if self._add_hash: item[HASH_FIELD] = hash_event(item) # Done return item def flush(self): """Return a list of all saved items, i.e., of all items that were parsed but not returned yet, and clear this list. """ result = [ ] # pending items, returned by parser for item in self._saved: result.append(self._result(item)) self._saved = [ ] self._offs += self._saved_len self._saved_len, self._prev_len = 0, 0 return result def done(self): """Close internal state in parser. Useful if the parser is waiting for something, but hits EOF. """ # items waiting in parser for item in self.finalize(): self._saved.append(item) def finalize(self): """Any events to return at end of parse. Default is an empty list. """ return () def boolParam(self, s): """Convert a possibly-string-valued boolean parameter, from the configuration file, into a proper boolean value. """ if isinstance(s, bool): return s if isinstance(s, str): sl = s.lower() if sl in ('yes', 'on', 'true', '1'): return True else: return False return bool(s) def __str__(self): return "%s(%s)" % (self._name, self._infile) class NLBaseParser(BaseParser): def __init__(self, input_file=None, err_cb=None, **kw): """Create a NetLogger parser, that implements the BaseParser interface as well as its own API for parsing individual lines and streams with user-supplied callbacks. Arguments: input_file - File to be parsed err_cb - Optional callback on errors. Signature: err_cb(line=, error=, linenum=) If set to a function, the function is called. If set to False, errors are completely ignored. If set to True, errors are appended to err_list. If None (the default) errors are propagated to the caller. These errors are of type BPError. """ self.err_cb = err_cb self.err_list = [ ] if input_file is None: input_file = NullFile() BaseParser.__init__(self, input_file, fullname='NLParser', **kw) def parseLine(self, line): """Return a dictionary corresponding to the name,value pairs in the input 'line'. Raises ValueError if the format is incorrect. """ pass def parseStream(self): """Parse input stream, calling parseLine() for each line. Return: generator function, yielding the result of parseLine() for each line in the input stream """ for line_num, line in enumerate(self._infile.xreadlines()): try: d = self.parseLine(line) yield d except ValueError,E: if self.err_cb is False: pass else: bpe = BPError(line_num, E) if self.err_cb is True: self.err_list.append(bpe) elif self.err_cb is None: raise bpe else: self.err_cb(line=line, error=bpe, linenum=line_num) class NLSimpleParser(DoesLogging): """Simple, fast, not-too-flexible NL parser. Does *not* inherit from NLBaseParser. This is important to allow the BaseParser to itself parse netlogger input. """ def __init__(self, verify=False, parse_date=True, **kw): DoesLogging.__init__(self) self.verify, self.parse_date = verify, parse_date def parseLine(self, line): """Parse a BP-formatted line. For lines which are completely whitespace, raise BPParseError """ try: fields = _bp_extract(line, validate=self.verify) except BPError, err: raise BPParseError("BP parse error: " + str(err)) # higher-level verification for key in TS_FIELD, EVENT_FIELD: if not fields.has_key(key): raise BPParseError("missing required key '{0}'".format(key)) # Pre-process date, if requested if self.parse_date: fields[TS_FIELD] = parse_ts(fields[TS_FIELD]) # Done. return fields class NLFastParser(NLSimpleParser, NLBaseParser): """NetLogger parser that does inherit from NLBaseParser Simpler, faster, less flexible NL parser. * Optionally does some error-checking, but can't tell why it is wrong. Note: error-checking takes 50% or so longer. * Uses regular expressions instead of pyparsing. * Observed speedups on order of 50x (YMMV). """ def __init__(self, *args, **kw): # strip out kw for simple parser simple_parser_kw = { } for key, value in kw.items(): if key in ('verify', 'parse_date', 'strip_quotes'): simple_parser_kw[key] = value for key in simple_parser_kw.keys(): if key != "parse_date": #shared del kw[key] NLSimpleParser.__init__(self, **simple_parser_kw) NLBaseParser.__init__(self, *args, **kw) # implementation of the BaseParser API def process(self, line): return (self.parseLine(line),) if HAVE_PYPARSING: class NLPyParser(NLBaseParser): """pyparsing--based implementation of the NLBaseParser """ notSpace = CharsNotIn(" \n") eq = Literal('=').suppress() value = (QuotedString('"', escChar=chr(92), unquoteResults=False) \ ^ OneOrMore(notSpace)) ts = Group(Literal('ts') + eq + value) event = Group(Literal('event') + eq + value) name = ~oneOf("ts event") + Word(alphanums +'-_.') nv = ZeroOrMore(Group(name + eq + value)) nvp = Each([ts, event, nv]) + White('\n').suppress() + StringEnd() def parseLine(self, line): try: rlist = self.nvp.parseString(line).asList() except ParseException, E: raise ValueError(E) result = {} for a in rlist: if self.parse_date and a[0] == 'ts': result[a[0]] = parse_ts(a[1]) else: result[a[0]] = a[1] return result # implementation of the BaseParser API def process(self, line): return (self.parseLine(line),) else: class NLPyParser: BADNESS = """ Can't use the NLPyParser class because pyparsing is not installed. You can use NLFastParser instead, run 'easy_install pyparsing', or install from http://pyparsing.wikispaces.com/ . """ def __init__(self, *args, **kw): raise NotImplementedError(self.BADNESS) class NullFile: def __init__(self, *args): return def read(self, n): return '' def readline(self): return '' def seek(self, n, mode): pass def tell(self): return 0 def getTimezone(t=None): """Return current timezone as UTC offset, formatted as [+/-]HH:MM """ hr, min, sign = nldate.getLocaltimeOffsetParts(t) return "%s%02d:%02d" % (sign, hr, min) pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/parsers/__init__.py0000644000175000017500000000000011757531137025336 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/parsers/nlreadline.py0000644000175000017500000000430211757531137025725 0ustar ryngerynge""" File wrapper that changes the semantics of the readline function to never return a non-empty line without a line terminator. """ __rcsid__ = '$Id: nlreadline.py 22911 2008-06-03 05:06:10Z dang $' __author__ = 'Dan Gunter' class BufferedReadline: """Change semantics of file.readline() to return either a complete line with a newline terminator or an empty line. Partial lines are buffered between calls until the newline is found. """ def __init__(self, fileobj): self._f = fileobj self._buf = '' def __getattr__(self, x): """Delegate all public methods except readline().""" if x and x[0] == '_': # look up private methods/vars locally and raise # a normal-looking AttributeError if not found try: return self.__dict__[x] except KeyError: raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, x)) return getattr(self._f, x) def readline(self): """Override readline() to get new semantics.""" if self._f is None: return '' line = self._f.readline() if line == '': # empty lines are returned as before pass elif line[-1] == '\n': # complete lines are returned along with # the buffered data (if any) if self._buf: line = self._buf + line self._buf = '' else: # incomplete lines are buffered, and an empty # line is returned self._buf += line line = '' return line def readlines(self): """Override readlines() so it calls our readline().""" if self._f is None: return '' return [line for line in self.readline()] def xreadlines(self): """Override xreadlines() so it calls our readline().""" while 1: line = self.readline() if line: yield line else: raise StopIteration def close(self): if self._f is not None: self._f.close() self._f = None pegasus-wms_4.0.1+dfsg/lib/pegasus/python/netlogger/__init__.py0000644000175000017500000000000011757531137023657 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/javascript/0000755000175000017500000000000011757531667020427 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/lib/pegasus/javascript/protovis-r3.2.js0000644000175000017500000034354311757531137023340 0ustar ryngerynge// fba9dc2 var a;if(!Array.prototype.map)Array.prototype.map=function(b,c){for(var d=this.length,f=new Array(d),g=0;g>>0,f=0;f=d)throw new Error("reduce: no values, no initial value");}for(;f=0&&d=69&&m<100?1900:0)});return"([0-9]+)";case "%Y":q.push(function(m){g=m});return"([0-9]+)";case "%%":q.push(function(){}); return"%"}return n});(f=f.match(o))&&f.forEach(function(n,m){q[m](n)});return new Date(g,h,i,j,l,k)};return c}; pv.Format.time=function(b){function c(f){f=Number(f);switch(b){case "short":if(f>=31536E6)return(f/31536E6).toFixed(1)+" years";else if(f>=6048E5)return(f/6048E5).toFixed(1)+" weeks";else if(f>=864E5)return(f/864E5).toFixed(1)+" days";else if(f>=36E5)return(f/36E5).toFixed(1)+" hours";else if(f>=6E4)return(f/6E4).toFixed(1)+" minutes";return(f/1E3).toFixed(1)+" seconds";case "long":var g=[],h=f%36E5/6E4>>0;g.push(d("0",2,f%6E4/1E3>>0));if(f>=36E5){var i=f%864E5/36E5>>0;g.push(d("0",2,h));if(f>=864E5){g.push(d("0", 2,i));g.push(Math.floor(f/864E5).toFixed())}else g.push(i.toFixed())}else g.push(h.toFixed());return g.reverse().join(":")}}var d=pv.Format.pad;c.format=c;c.parse=function(f){switch(b){case "short":for(var g=/([0-9,.]+)\s*([a-z]+)/g,h,i=0;h=g.exec(f);){var j=parseFloat(h[0].replace(",","")),l=0;switch(h[2].toLowerCase()){case "year":case "years":l=31536E6;break;case "week":case "weeks":l=6048E5;break;case "day":case "days":l=864E5;break;case "hour":case "hours":l=36E5;break;case "minute":case "minutes":l= 6E4;break;case "second":case "seconds":l=1E3;break}i+=j*l}return i;case "long":h=f.replace(",","").split(":").reverse();i=0;if(h.length)i+=parseFloat(h[0])*1E3;if(h.length>1)i+=parseFloat(h[1])*6E4;if(h.length>2)i+=parseFloat(h[2])*36E5;if(h.length>3)i+=parseFloat(h[3])*864E5;return i}};return c}; pv.Format.number=function(){function b(n){if(Infinity>h)n=Math.round(n*i)/i;var m=String(Math.abs(n)).split("."),r=m[0];n=n<0?"-":"";if(r.length>d)r=r.substring(r.length-d);if(k&&r.length3)r=r.replace(/\B(?=(?:\d{3})+(?!\d))/g,o);if(!k&&r.lengthd)m=m.substring(m.length-d);n=n[1]?Number("0."+n[1]):0;if(Infinity>h)n=Math.round(n*i)/i;return Math.round(m)+n};b.integerDigits=function(n,m){if(arguments.length){c=Number(n);d=arguments.length>1?Number(m):c;f=c+Math.floor(c/3)*o.length;return this}return[c,d]};b.fractionDigits=function(n,m){if(arguments.length){g= Number(n);h=arguments.length>1?Number(m):g;i=Math.pow(10,h);return this}return[g,h]};b.integerPad=function(n){if(arguments.length){j=String(n);k=/\d/.test(j);return this}return j};b.fractionPad=function(n){if(arguments.length){l=String(n);return this}return l};b.decimal=function(n){if(arguments.length){q=String(n);return this}return q};b.group=function(n){if(arguments.length){o=n?String(n):"";f=c+Math.floor(c/3)*o.length;return this}return o};return b}; pv.map=function(b,c){var d={};return c?b.map(function(f,g){d.index=g;return c.call(d,f)}):b.slice()};pv.repeat=function(b,c){if(arguments.length==1)c=2;return pv.blend(pv.range(c).map(function(){return b}))};pv.cross=function(b,c){for(var d=[],f=0,g=b.length,h=c.length;fc){b.length=d;for(var f=c;fc?1:0}; pv.reverseOrder=function(b,c){return cb?1:0};pv.search=function(b,c,d){if(!d)d=pv.identity;for(var f=0,g=b.length-1;f<=g;){var h=f+g>>1,i=d(b[h]);if(ic)g=h-1;else return h}return-f-1};pv.search.index=function(b,c,d){b=pv.search(b,c,d);return b<0?-b-1:b}; pv.range=function(b,c,d){if(arguments.length==1){c=b;b=0}if(d==undefined)d=1;if((c-b)/d==Infinity)throw new Error("range must be finite");var f=[],g=0,h;if(d<0)for(;(h=b+d*g++)>c;)f.push(h);else for(;(h=b+d*g++)f){f=i;d=h}}return d}; pv.min=function(b,c){if(c==pv.index)return 0;return Math.min.apply(null,c?pv.map(b,c):b)};pv.min.index=function(b,c){if(!b.length)return-1;if(c==pv.index)return 0;if(!c)c=pv.identity;for(var d=0,f=Infinity,g={},h=0;h0?Math.pow(c,Math.floor(pv.log(b,c))):-Math.pow(c,-Math.floor(-pv.log(-b,c)))};pv.logCeil=function(b,c){return b>0?Math.pow(c,Math.ceil(pv.log(b,c))):-Math.pow(c,-Math.ceil(-pv.log(-b,c)))}; (function(){var b=Math.PI/180,c=180/Math.PI;pv.radians=function(d){return b*d};pv.degrees=function(d){return c*d}})();pv.keys=function(b){var c=[];for(var d in b)c.push(d);return c};pv.entries=function(b){var c=[];for(var d in b)c.push({key:d,value:b[d]});return c};pv.values=function(b){var c=[];for(var d in b)c.push(b[d]);return c};pv.dict=function(b,c){for(var d={},f={},g=0;g=94608E6){n=31536E6;t="%Y";p=function(w){w.setFullYear(w.getFullYear()+v)}}else if(u>=7776E6){n=2592E6;t="%m/%Y";p=function(w){w.setMonth(w.getMonth()+v)}}else if(u>=18144E5){n=6048E5;t="%m/%d";p=function(w){w.setDate(w.getDate()+7*v)}}else if(u>=2592E5){n=864E5;t="%m/%d";p=function(w){w.setDate(w.getDate()+v)}}else if(u>=108E5){n=36E5;t="%I:%M %p";p=function(w){w.setHours(w.getHours()+ v)}}else if(u>=18E4){n=6E4;t="%I:%M %p";p=function(w){w.setMinutes(w.getMinutes()+v)}}else if(u>=3E3){n=1E3;t="%I:%M:%S";p=function(w){w.setSeconds(w.getSeconds()+v)}}else{n=1;t="%S.%Qs";p=function(w){w.setTime(w.getTime()+v)}}q=pv.Format.date(t);s=new Date(s);t=[];x(s,n);u=u/n;if(u>10)switch(n){case 36E5:v=u>20?6:3;s.setHours(Math.floor(s.getHours()/v)*v);break;case 2592E6:v=3;s.setMonth(Math.floor(s.getMonth()/v)*v);break;case 6E4:v=u>30?15:u>15?10:5;s.setMinutes(Math.floor(s.getMinutes()/v)*v); break;case 1E3:v=u>90?15:u>60?10:5;s.setSeconds(Math.floor(s.getSeconds()/v)*v);break;case 1:v=u>1E3?250:u>200?100:u>100?50:u>50?25:5;s.setMilliseconds(Math.floor(s.getMilliseconds()/v)*v);break;default:v=pv.logCeil(u/15,10);if(u/v<2)v/=5;else if(u/v<5)v/=2;s.setFullYear(Math.floor(s.getFullYear()/v)*v);break}for(;;){p(s);if(s>m)break;t.push(new Date(s))}return r?t.reverse():t}arguments.length||(o=10);v=pv.logFloor(u/o,10);n=o/(u/v);if(n<=0.15)v*=10;else if(n<=0.35)v*=5;else if(n<=0.75)v*=2;n=Math.ceil(s/ v)*v;m=Math.floor(m/v)*v;q=pv.Format.number().fractionDigits(Math.max(0,-Math.floor(pv.log(v,10)+0.01)));m=pv.range(n,m+v,v);return r?m.reverse():m};c.tickFormat=function(o){return q(o)};c.nice=function(){if(d.length!=2)return this;var o=d[0],n=d[d.length-1],m=n0;i--)k.push(-g(-j)*i);else{for(;jh[1];l--);return k.slice(j,l)};b.tickFormat=function(h){return h.toPrecision(1)}; b.nice=function(){var h=b.domain();return b.domain(pv.logFloor(h[0],c),pv.logCeil(h[1],c))};b.base=function(h){if(arguments.length){c=Number(h);d=Math.log(c);b.transform(f,g);return this}return c};b.domain.apply(b,arguments);return b.base(10)};pv.Scale.root=function(){var b=pv.Scale.quantitative();b.power=function(c){if(arguments.length){var d=Number(c),f=1/d;b.transform(function(g){return Math.pow(g,f)},function(g){return Math.pow(g,d)});return this}return d};b.domain.apply(b,arguments);return b.power(2)}; pv.Scale.ordinal=function(){function b(g){g in d||(d[g]=c.push(g)-1);return f[d[g]%f.length]}var c=[],d={},f=[];b.domain=function(g,h){if(arguments.length){g=g instanceof Array?arguments.length>1?pv.map(g,h):g:Array.prototype.slice.call(arguments);c=[];for(var i={},j=0;j1?pv.map(g,h):g:Array.prototype.slice.call(arguments); if(typeof f[0]=="string")f=f.map(pv.color);return this}return f};b.split=function(g,h){var i=(h-g)/this.domain().length;f=pv.range(g+i/2,h,i);return this};b.splitFlush=function(g,h){var i=this.domain().length,j=(h-g)/(i-1);f=i==1?[(g+h)/2]:pv.range(g,h+j/2,j);return this};b.splitBanded=function(g,h,i){if(arguments.length<3)i=1;if(i<0){var j=this.domain().length;j=(h-g- -i*j)/(j+1);f=pv.range(g+j,h,j-i);f.band=-i}else{j=(h-g)/(this.domain().length+(1-i));f=pv.range(g+j*(1-i),h,j);f.band=j*i}return this}; b.by=function(g){function h(){return b(g.apply(this,arguments))}for(var i in b)h[i]=b[i];return h};b.domain.apply(b,arguments);return b}; pv.Scale.quantile=function(){function b(i){return h(Math.max(0,Math.min(d,pv.search.index(f,i)-1))/d)}var c=-1,d=-1,f=[],g=[],h=pv.Scale.linear();b.quantiles=function(i){if(arguments.length){c=Number(i);if(c<0){f=[g[0]].concat(g);d=g.length-1}else{f=[];f[0]=g[0];for(var j=1;j<=c;j++)f[j]=g[~~(j*(g.length-1)/c)];d=c-1}return this}return f};b.domain=function(i,j){if(arguments.length){g=i instanceof Array?pv.map(i,j):Array.prototype.slice.call(arguments);g.sort(pv.naturalOrder);b.quantiles(c);return this}return g}; b.range=function(){if(arguments.length){h.range.apply(h,arguments);return this}return h.range()};b.by=function(i){function j(){return b(i.apply(this,arguments))}for(var l in b)j[l]=b[l];return j};b.domain.apply(b,arguments);return b}; pv.histogram=function(b,c){var d=true;return{bins:function(f){var g=pv.map(b,c),h=[];arguments.length||(f=pv.Scale.linear(g).ticks());for(var i=0;i360)j-=360;else if(j<0)j+=360;if(j<60)return i+(h-i)*j/60;if(j<180)return h;if(j<240)return i+(h-i)*(240-j)/60;return i}function c(j){return Math.round(b(j)*255)}var d=this.h,f=this.s,g=this.l;d%=360;if(d<0)d+=360;f=Math.max(0,Math.min(f,1));g=Math.max(0,Math.min(g,1));var h=g<=0.5?g*(1+f):g+f-g*f,i=2*g-h;return pv.rgb(c(d+120),c(d),c(d-120),this.a)}; pv.Color.names={aliceblue:"#f0f8ff",antiquewhite:"#faebd7",aqua:"#00ffff",aquamarine:"#7fffd4",azure:"#f0ffff",beige:"#f5f5dc",bisque:"#ffe4c4",black:"#000000",blanchedalmond:"#ffebcd",blue:"#0000ff",blueviolet:"#8a2be2",brown:"#a52a2a",burlywood:"#deb887",cadetblue:"#5f9ea0",chartreuse:"#7fff00",chocolate:"#d2691e",coral:"#ff7f50",cornflowerblue:"#6495ed",cornsilk:"#fff8dc",crimson:"#dc143c",cyan:"#00ffff",darkblue:"#00008b",darkcyan:"#008b8b",darkgoldenrod:"#b8860b",darkgray:"#a9a9a9",darkgreen:"#006400", darkgrey:"#a9a9a9",darkkhaki:"#bdb76b",darkmagenta:"#8b008b",darkolivegreen:"#556b2f",darkorange:"#ff8c00",darkorchid:"#9932cc",darkred:"#8b0000",darksalmon:"#e9967a",darkseagreen:"#8fbc8f",darkslateblue:"#483d8b",darkslategray:"#2f4f4f",darkslategrey:"#2f4f4f",darkturquoise:"#00ced1",darkviolet:"#9400d3",deeppink:"#ff1493",deepskyblue:"#00bfff",dimgray:"#696969",dimgrey:"#696969",dodgerblue:"#1e90ff",firebrick:"#b22222",floralwhite:"#fffaf0",forestgreen:"#228b22",fuchsia:"#ff00ff",gainsboro:"#dcdcdc", ghostwhite:"#f8f8ff",gold:"#ffd700",goldenrod:"#daa520",gray:"#808080",green:"#008000",greenyellow:"#adff2f",grey:"#808080",honeydew:"#f0fff0",hotpink:"#ff69b4",indianred:"#cd5c5c",indigo:"#4b0082",ivory:"#fffff0",khaki:"#f0e68c",lavender:"#e6e6fa",lavenderblush:"#fff0f5",lawngreen:"#7cfc00",lemonchiffon:"#fffacd",lightblue:"#add8e6",lightcoral:"#f08080",lightcyan:"#e0ffff",lightgoldenrodyellow:"#fafad2",lightgray:"#d3d3d3",lightgreen:"#90ee90",lightgrey:"#d3d3d3",lightpink:"#ffb6c1",lightsalmon:"#ffa07a", lightseagreen:"#20b2aa",lightskyblue:"#87cefa",lightslategray:"#778899",lightslategrey:"#778899",lightsteelblue:"#b0c4de",lightyellow:"#ffffe0",lime:"#00ff00",limegreen:"#32cd32",linen:"#faf0e6",magenta:"#ff00ff",maroon:"#800000",mediumaquamarine:"#66cdaa",mediumblue:"#0000cd",mediumorchid:"#ba55d3",mediumpurple:"#9370db",mediumseagreen:"#3cb371",mediumslateblue:"#7b68ee",mediumspringgreen:"#00fa9a",mediumturquoise:"#48d1cc",mediumvioletred:"#c71585",midnightblue:"#191970",mintcream:"#f5fffa",mistyrose:"#ffe4e1", moccasin:"#ffe4b5",navajowhite:"#ffdead",navy:"#000080",oldlace:"#fdf5e6",olive:"#808000",olivedrab:"#6b8e23",orange:"#ffa500",orangered:"#ff4500",orchid:"#da70d6",palegoldenrod:"#eee8aa",palegreen:"#98fb98",paleturquoise:"#afeeee",palevioletred:"#db7093",papayawhip:"#ffefd5",peachpuff:"#ffdab9",peru:"#cd853f",pink:"#ffc0cb",plum:"#dda0dd",powderblue:"#b0e0e6",purple:"#800080",red:"#ff0000",rosybrown:"#bc8f8f",royalblue:"#4169e1",saddlebrown:"#8b4513",salmon:"#fa8072",sandybrown:"#f4a460",seagreen:"#2e8b57", seashell:"#fff5ee",sienna:"#a0522d",silver:"#c0c0c0",skyblue:"#87ceeb",slateblue:"#6a5acd",slategray:"#708090",slategrey:"#708090",snow:"#fffafa",springgreen:"#00ff7f",steelblue:"#4682b4",tan:"#d2b48c",teal:"#008080",thistle:"#d8bfd8",tomato:"#ff6347",turquoise:"#40e0d0",violet:"#ee82ee",wheat:"#f5deb3",white:"#ffffff",whitesmoke:"#f5f5f5",yellow:"#ffff00",yellowgreen:"#9acd32",transparent:pv.Color.transparent=pv.rgb(0,0,0,0)};(function(){var b=pv.Color.names;for(var c in b)b[c]=pv.color(b[c])})(); pv.colors=function(){var b=pv.Scale.ordinal();b.range.apply(b,arguments);return b};pv.Colors={};pv.Colors.category10=function(){var b=pv.colors("#1f77b4","#ff7f0e","#2ca02c","#d62728","#9467bd","#8c564b","#e377c2","#7f7f7f","#bcbd22","#17becf");b.domain.apply(b,arguments);return b}; pv.Colors.category20=function(){var b=pv.colors("#1f77b4","#aec7e8","#ff7f0e","#ffbb78","#2ca02c","#98df8a","#d62728","#ff9896","#9467bd","#c5b0d5","#8c564b","#c49c94","#e377c2","#f7b6d2","#7f7f7f","#c7c7c7","#bcbd22","#dbdb8d","#17becf","#9edae5");b.domain.apply(b,arguments);return b}; pv.Colors.category19=function(){var b=pv.colors("#9c9ede","#7375b5","#4a5584","#cedb9c","#b5cf6b","#8ca252","#637939","#e7cb94","#e7ba52","#bd9e39","#8c6d31","#e7969c","#d6616b","#ad494a","#843c39","#de9ed6","#ce6dbd","#a55194","#7b4173");b.domain.apply(b,arguments);return b};pv.ramp=function(){var b=pv.Scale.linear();b.range.apply(b,arguments);return b}; pv.Scene=pv.SvgScene={svg:"http://www.w3.org/2000/svg",xmlns:"http://www.w3.org/2000/xmlns",xlink:"http://www.w3.org/1999/xlink",xhtml:"http://www.w3.org/1999/xhtml",scale:1,events:["DOMMouseScroll","mousewheel","mousedown","mouseup","mouseover","mouseout","mousemove","click","dblclick"],implicit:{svg:{"shape-rendering":"auto","pointer-events":"painted",x:0,y:0,dy:0,"text-anchor":"start",transform:"translate(0,0)",fill:"none","fill-opacity":1,stroke:"none","stroke-opacity":1,"stroke-width":1.5,"stroke-linejoin":"miter"}, css:{font:"10px sans-serif"}}};pv.SvgScene.updateAll=function(b){if(b.length&&b[0].reverse&&b.type!="line"&&b.type!="area"){for(var c=pv.extend(b),d=0,f=b.length-1;f>=0;d++,f--)c[d]=b[f];b=c}this.removeSiblings(this[b.type](b))};pv.SvgScene.create=function(b){return document.createElementNS(this.svg,b)}; pv.SvgScene.expect=function(b,c,d,f){if(b){if(b.tagName=="a")b=b.firstChild;if(b.tagName!=c){c=this.create(c);b.parentNode.replaceChild(c,b);b=c}}else b=this.create(c);for(var g in d){c=d[g];if(c==this.implicit.svg[g])c=null;c==null?b.removeAttribute(g):b.setAttribute(g,c)}for(g in f){c=f[g];if(c==this.implicit.css[g])c=null;if(c==null)b.style.removeProperty(g);else b.style[g]=c}return b}; pv.SvgScene.append=function(b,c,d){b.$scene={scenes:c,index:d};b=this.title(b,c[d]);b.parentNode||c.$g.appendChild(b);return b.nextSibling};pv.SvgScene.title=function(b,c){var d=b.parentNode;if(d&&d.tagName!="a")d=null;if(c.title){if(!d){d=this.create("a");b.parentNode&&b.parentNode.replaceChild(d,b);d.appendChild(b)}d.setAttributeNS(this.xlink,"title",c.title);return d}d&&d.parentNode.replaceChild(b,d);return b}; pv.SvgScene.dispatch=pv.listener(function(b){var c=b.target.$scene;if(c){var d=b.type;switch(d){case "DOMMouseScroll":d="mousewheel";b.wheel=-480*b.detail;break;case "mousewheel":b.wheel=(window.opera?12:1)*b.wheelDelta;break}pv.Mark.dispatch(d,c.scenes,c.index)&&b.preventDefault()}});pv.SvgScene.removeSiblings=function(b){for(;b;){var c=b.nextSibling;b.parentNode.removeChild(b);b=c}};pv.SvgScene.undefined=function(){}; pv.SvgScene.pathBasis=function(){function b(f,g,h,i,j){return{x:f[0]*g.left+f[1]*h.left+f[2]*i.left+f[3]*j.left,y:f[0]*g.top+f[1]*h.top+f[2]*i.top+f[3]*j.top}}var c=[[1/6,2/3,1/6,0],[0,2/3,1/3,0],[0,1/3,2/3,0],[0,1/6,2/3,1/6]],d=function(f,g,h,i){var j=b(c[1],f,g,h,i),l=b(c[2],f,g,h,i);f=b(c[3],f,g,h,i);return"C"+j.x+","+j.y+","+l.x+","+l.y+","+f.x+","+f.y};d.segment=function(f,g,h,i){var j=b(c[0],f,g,h,i),l=b(c[1],f,g,h,i),k=b(c[2],f,g,h,i);f=b(c[3],f,g,h,i);return"M"+j.x+","+j.y+"C"+l.x+","+l.y+ ","+k.x+","+k.y+","+f.x+","+f.y};return d}();pv.SvgScene.curveBasis=function(b){if(b.length<=2)return"";var c="",d=b[0],f=d,g=d,h=b[1];c+=this.pathBasis(d,f,g,h);for(var i=2;i1){j=c[1];h=b[l];l++;f+="C"+(g.left+i.x)+","+(g.top+i.y)+","+(h.left-j.x)+","+(h.top-j.y)+","+h.left+","+h.top;for(g=2;g9){l=3/Math.sqrt(l);f[h]= l*i*d[h];f[h+1]=l*j*d[h]}}for(h=0;h2&&(g.interpolate=="basis"||g.interpolate=="cardinal"||g.interpolate=="monotone")?d:c)(k,q-1));k=q-1}}if(!j.length)return f;f=this.expect(f,"path",{"shape-rendering":g.antialias?null:"crispEdges","pointer-events":g.events,cursor:g.cursor,d:"M"+j.join("ZM")+"Z",fill:h.color,"fill-opacity":h.opacity|| null,stroke:i.color,"stroke-opacity":i.opacity||null,"stroke-width":i.opacity?g.lineWidth/this.scale:null});return this.append(f,b,0)}; pv.SvgScene.areaSegment=function(b){var c=b.$g.firstChild,d=b[0],f,g;if(d.interpolate=="basis"||d.interpolate=="cardinal"||d.interpolate=="monotone"){f=[];g=[];for(var h=0,i=b.length;h2&&(d.interpolate=="basis"||d.interpolate=="cardinal"||d.interpolate=="monotone"))switch(d.interpolate){case "basis":h+=this.curveBasis(b);break;case "cardinal":h+=this.curveCardinal(b,d.tension);break;case "monotone":h+=this.curveMonotone(b); break}else for(var i=1;i1)break;return"A"+f+","+f+" 0 0,"+d+" "+c.left+","+c.top;case "step-before":return"V"+c.top+"H"+c.left;case "step-after":return"H"+c.left+"V"+c.top}return"L"+c.left+","+c.top};pv.SvgScene.lineIntersect=function(b,c,d,f){return b.plus(c.times(d.minus(b).dot(f.perp())/c.dot(f.perp())))}; pv.SvgScene.pathJoin=function(b,c,d,f){var g=pv.vector(c.left,c.top);d=pv.vector(d.left,d.top);var h=d.minus(g),i=h.perp().norm(),j=i.times(c.lineWidth/(2*this.scale));c=g.plus(j);var l=d.plus(j),k=d.minus(j);j=g.minus(j);if(b&&b.visible){b=g.minus(b.left,b.top).perp().norm().plus(i);j=this.lineIntersect(g,b,j,h);c=this.lineIntersect(g,b,c,h)}if(f&&f.visible){f=pv.vector(f.left,f.top).minus(d).perp().norm().plus(i);k=this.lineIntersect(d,f,k,h);l=this.lineIntersect(d,f,l,h)}return"M"+c.x+","+c.y+ "L"+l.x+","+l.y+" "+k.x+","+k.y+" "+j.x+","+j.y}; pv.SvgScene.panel=function(b){for(var c=b.$g,d=c&&c.firstChild,f=0;f=2*Math.PI)i=i?"M0,"+j+"A"+j+","+j+" 0 1,1 0,"+-j+"A"+j+","+j+" 0 1,1 0,"+j+"M0,"+i+"A"+i+","+i+" 0 1,1 0,"+-i+"A"+i+","+i+" 0 1,1 0,"+i+"Z":"M0,"+j+"A"+j+","+j+" 0 1,1 0,"+-j+"A"+j+","+j+" 0 1,1 0,"+j+"Z";else{var k=Math.min(f.startAngle,f.endAngle),q=Math.max(f.startAngle,f.endAngle), o=Math.cos(k),n=Math.cos(q);k=Math.sin(k);q=Math.sin(q);i=i?"M"+j*o+","+j*k+"A"+j+","+j+" 0 "+(l1?c:null)}; a.anchor=function(b){function c(g){for(var h=d,i=[];!(f=h.scene);){g=g.parent;i.push({index:g.index,childIndex:h.childIndex});h=h.parent}for(;i.length;){g=i.pop();f=f[g.index].children[g.childIndex]}if(d.hasOwnProperty("index")){i=pv.extend(f[d.index]);i.right=i.top=i.left=i.bottom=0;return[i]}return f}var d=this,f;b||(b="center");return(new pv.Anchor(this)).name(b).def("$mark.anchor",function(){f=this.scene.target=c(this)}).data(function(){return f.map(function(g){return g.data})}).visible(function(){return f[this.index].visible}).left(function(){var g= f[this.index],h=g.width||0;switch(this.name()){case "bottom":case "top":case "center":return g.left+h/2;case "left":return null}return g.left+h}).top(function(){var g=f[this.index],h=g.height||0;switch(this.name()){case "left":case "right":case "center":return g.top+h/2;case "top":return null}return g.top+h}).right(function(){var g=f[this.index];return this.name()=="left"?g.right+(g.width||0):null}).bottom(function(){var g=f[this.index];return this.name()=="top"?g.bottom+(g.height||0):null}).textAlign(function(){switch(this.name()){case "bottom":case "top":case "center":return"center"; case "right":return"right"}return"left"}).textBaseline(function(){switch(this.name()){case "right":case "left":case "center":return"middle";case "top":return"top"}return"bottom"})};a.anchorTarget=function(){return this.proto.anchorTarget()};a.margin=function(b){return this.left(b).right(b).top(b).bottom(b)};a.instance=function(b){var c=this.scene||this.parent.instance(-1).children[this.childIndex],d=!arguments.length||this.hasOwnProperty("index")?this.index:b;return c[d<0?c.length-1:d]};a.first=function(){return this.scene[0]}; a.last=function(){return this.scene[this.scene.length-1]};a.sibling=function(){return this.index==0?null:this.scene[this.index-1]};a.cousin=function(){var b=this.parent;return(b=b&&b.sibling())&&b.children?b.children[this.childIndex][this.index]:null}; a.render=function(){function b(i,j,l){i.scale=l;if(j=0;k--){var q=l[k];if(!(q.name in c)){c[q.name]=q;switch(q.name){case "data":f=q;break;case "visible":g=q;break;default:d[q.type].push(q);break}}}while(j=j.proto)}var c={},d=[[],[],[],[]],f,g;b(this);b(this.defaults);d[1].reverse();d[3].reverse();var h=this;do for(var i in h.properties)i in c||d[2].push(c[i]={name:i,type:2,value:null});while(h=h.proto);h=d[0].concat(d[1]);for(i=0;ih.id)d[g.name]={id:0,value:g.type&1?g.value.apply(this,c):g.value}}}d=this.binds.data;d=d.type&1?d.value.apply(this,c):d.value;c.unshift(null); b.length=d.length;for(f=0;f0;k--){n=m[k];n.scale=q;q*=n.scene[n.index].transform.k}if(o.children){k=0;for(m=o.children.length;k=3*Math.PI/2};pv.Wedge.prototype.buildImplied=function(b){if(b.angle==null)b.angle=b.endAngle-b.startAngle;else if(b.endAngle==null)b.endAngle=b.startAngle+b.angle;pv.Mark.prototype.buildImplied.call(this,b)};pv.simulation=function(b){return new pv.Simulation(b)};pv.Simulation=function(b){for(var c=0;c=s,t=q.y>=u;k.leaf=false;switch((t<<1)+x){case 0:k=k.c1||(k.c1=new pv.Quadtree.Node);break;case 1:k=k.c2||(k.c2=new pv.Quadtree.Node);break;case 2:k=k.c3||(k.c3=new pv.Quadtree.Node);break;case 3:k=k.c4||(k.c4=new pv.Quadtree.Node); break}if(x)o=s;else m=s;if(t)n=u;else r=u;c(k,q,o,n,m,r)}var f,g=Number.POSITIVE_INFINITY,h=g,i=Number.NEGATIVE_INFINITY,j=i;for(f=b;f;f=f.next){if(f.xi)i=f.x;if(f.y>j)j=f.y}f=i-g;var l=j-h;if(f>l)j=h+f;else i=g+l;this.xMin=g;this.yMin=h;this.xMax=i;this.yMax=j;this.root=new pv.Quadtree.Node;for(f=b;f;f=f.next)c(this.root,f,g,h,i,j)};pv.Quadtree.Node=function(){this.leaf=true;this.p=this.c4=this.c3=this.c2=this.c1=null};pv.Force={}; pv.Force.charge=function(b){function c(k){function q(m){c(m);k.cn+=m.cn;o+=m.cn*m.cx;n+=m.cn*m.cy}var o=0,n=0;k.cn=0;if(!k.leaf){k.c1&&q(k.c1);k.c2&&q(k.c2);k.c3&&q(k.c3);k.c4&&q(k.c4)}if(k.p){k.cn+=b;o+=b*k.p.x;n+=b*k.p.y}k.cx=o/k.cn;k.cy=n/k.cn}function d(k,q,o,n,m,r){var s=k.cx-q.x,u=k.cy-q.y,x=1/Math.sqrt(s*s+u*u);if(k.leaf&&k.p!=q||(m-o)*xg)x=g;k=k.cn*x*x*x;s=s*k;u=u*k;q.fx+=s;q.fy+=u}}else if(!k.leaf){var t=(o+m)*0.5,p=(n+r)*0.5;k.c1&&d(k.c1,q,o,n,t,p);k.c2&&d(k.c2,q,t,n, m,p);k.c3&&d(k.c3,q,o,p,t,r);k.c4&&d(k.c4,q,t,p,m,r);if(!(xg)x=g;if(k.p&&k.p!=q){k=b*x*x*x;s=s*k;u=u*k;q.fx+=s;q.fy+=u}}}}var f=2,g=1/f,h=500,i=1/h,j=0.9,l={};arguments.length||(b=-40);l.constant=function(k){if(arguments.length){b=Number(k);return l}return b};l.domain=function(k,q){if(arguments.length){f=Number(k);g=1/f;h=Number(q);i=1/h;return l}return[f,h]};l.theta=function(k){if(arguments.length){j=Number(k);return l}return j};l.apply=function(k,q){c(q.root);for(k=k;k;k=k.next)d(q.root, k,q.xMin,q.yMin,q.xMax,q.yMax)};return l};pv.Force.drag=function(b){var c={};arguments.length||(b=0.1);c.constant=function(d){if(arguments.length){b=d;return c}return b};c.apply=function(d){if(b)for(d=d;d;d=d.next){d.fx-=b*d.vx;d.fy-=b*d.vy}};return c}; pv.Force.spring=function(b){var c=0.1,d=20,f,g,h={};arguments.length||(b=0.1);h.links=function(i){if(arguments.length){f=i;g=i.map(function(j){return 1/Math.sqrt(Math.max(j.sourceNode.linkDegree,j.targetNode.linkDegree))});return h}return f};h.constant=function(i){if(arguments.length){b=Number(i);return h}return b};h.damping=function(i){if(arguments.length){c=Number(i);return h}return c};h.length=function(i){if(arguments.length){d=Number(i);return h}return d};h.apply=function(){for(var i=0;ig,p=sh){k.c1&&t&&c(k.c1,q,o,n,s,u);k.c2&&p&&c(k.c2,q,s,n,m,u)}if(x){k.c3&&t&&c(k.c3,q,o,u,s,r);k.c4&&p&&c(k.c4,q,s,u,m,r)}}if(k.p&&k.p!=q){o=q.x-k.p.x;n=q.y-k.p.y;m=Math.sqrt(o*o+n*n);r=f+b(k.p);if(mm)m=n}for(var r=0;rc.max?c.max:g.x;if(d)for(g=f;g;g=g.next)g.y=g.yd.max?d.max:g.y};return b};pv.Layout=function(){pv.Panel.call(this)};pv.Layout.prototype=pv.extend(pv.Panel); pv.Layout.prototype.property=function(b,c){if(!this.hasOwnProperty("properties"))this.properties=pv.extend(this.properties);this.properties[b]=true;this.propertyMethod(b,false,pv.Mark.cast[b]=c);return this}; pv.Layout.Network=function(){pv.Layout.call(this);var b=this;this.$id=pv.id();(this.node=(new pv.Mark).data(function(){return b.nodes()}).strokeStyle("#1f77b4").fillStyle("#fff").left(function(c){return c.x}).top(function(c){return c.y})).parent=this;this.link=(new pv.Mark).extend(this.node).data(function(c){return[c.sourceNode,c.targetNode]}).fillStyle(null).lineWidth(function(c,d){return d.linkValue*1.5}).strokeStyle("rgba(0,0,0,.2)");this.link.add=function(c){return b.add(pv.Panel).data(function(){return b.links()}).add(c).extend(this)}; (this.label=(new pv.Mark).extend(this.node).textMargin(7).textBaseline("middle").text(function(c){return c.nodeName||c.nodeValue}).textAngle(function(c){c=c.midAngle;return pv.Wedge.upright(c)?c:c+Math.PI}).textAlign(function(c){return pv.Wedge.upright(c.midAngle)?"left":"right"})).parent=this}; pv.Layout.Network.prototype=pv.extend(pv.Layout).property("nodes",function(b){return b.map(function(c,d){if(typeof c!="object")c={nodeValue:c};c.index=d;c.linkDegree=0;return c})}).property("links",function(b){return b.map(function(c){if(isNaN(c.linkValue))c.linkValue=isNaN(c.value)?1:c.value;return c})});pv.Layout.Network.prototype.reset=function(){this.$id=pv.id();return this}; pv.Layout.Network.prototype.buildProperties=function(b,c){if((b.$id||0)=this.$id)return true;b.$id=this.$id;b.links.forEach(function(c){var d=c.linkValue;(c.sourceNode||(c.sourceNode=b.nodes[c.source])).linkDegree+=d;(c.targetNode||(c.targetNode=b.nodes[c.target])).linkDegree+=d})}; pv.Layout.Hierarchy=function(){pv.Layout.Network.call(this);this.link.strokeStyle("#ccc")};pv.Layout.Hierarchy.prototype=pv.extend(pv.Layout.Network);pv.Layout.Hierarchy.prototype.buildImplied=function(b){if(!b.links)b.links=pv.Layout.Hierarchy.links.call(this);pv.Layout.Network.prototype.buildImplied.call(this,b)};pv.Layout.Hierarchy.links=function(){return this.nodes().filter(function(b){return b.parentNode}).map(function(b){return{sourceNode:b,targetNode:b.parentNode,linkValue:1}})}; pv.Layout.Hierarchy.NodeLink={buildImplied:function(b){function c(m){return m.parentNode?m.depth*(o-q)+q:0}function d(m){return m.parentNode?(m.breadth-0.25)*2*Math.PI:0}function f(m){switch(i){case "left":return m.depth*l;case "right":return l-m.depth*l;case "top":return m.breadth*l;case "bottom":return l-m.breadth*l;case "radial":return l/2+c(m)*Math.cos(m.midAngle)}}function g(m){switch(i){case "left":return m.breadth*k;case "right":return k-m.breadth*k;case "top":return m.depth*k;case "bottom":return k- m.depth*k;case "radial":return k/2+c(m)*Math.sin(m.midAngle)}}var h=b.nodes,i=b.orient,j=/^(top|bottom)$/.test(i),l=b.width,k=b.height;if(i=="radial"){var q=b.innerRadius,o=b.outerRadius;if(q==null)q=0;if(o==null)o=Math.min(l,k)/2}for(b=0;bb.dy?0:-Math.PI/2});(this.leaf=(new pv.Mark).extend(this.node).fillStyle(null).strokeStyle(null).visible(function(b){return!b.firstChild})).parent= this;delete this.link};pv.Layout.Treemap.prototype=pv.extend(pv.Layout.Hierarchy).property("round",Boolean).property("paddingLeft",Number).property("paddingRight",Number).property("paddingTop",Number).property("paddingBottom",Number).property("mode",String).property("order",String);a=pv.Layout.Treemap.prototype;a.defaults=(new pv.Layout.Treemap).extend(pv.Layout.Hierarchy.prototype.defaults).mode("squarify").order("ascending");a.padding=function(b){return this.paddingLeft(b).paddingRight(b).paddingTop(b).paddingBottom(b)}; a.$size=function(b){return Number(b.nodeValue)};a.size=function(b){this.$size=pv.functor(b);return this}; a.buildImplied=function(b){function c(r,s,u,x,t,p,v){for(var w=0,y=0;wu)u=v;t+=v}t*=t;s*=s;return Math.max(s*u/t,t/(s*x))}function f(r,s){function u(A){var D=p==y,G=pv.sum(A,o),E=y?n(G/y):0;c(A,G,D,x,t,D?p:E,D?E:v);if(D){t+=E;v-=E}else{x+= E;p-=E}y=Math.min(p,v);return D}var x=r.x+j,t=r.y+k,p=r.dx-j-l,v=r.dy-k-q;if(m!="squarify")c(r.childNodes,r.size,m=="slice"?true:m=="dice"?false:s&1,x,t,p,v);else{var w=[];s=Infinity;var y=Math.min(p,v),z=p*v/r.size;if(!(r.size<=0)){r.visitBefore(function(A){A.size*=z});for(r=r.childNodes.slice();r.length;){var C=r[r.length-1];if(C.size){w.push(C);z=d(w,y);if(z<=s){r.pop();s=z}else{w.pop();u(w);w.length=0;s=Infinity}}else r.pop()}if(u(w))for(s=0;s0){i(l(C,p,v),p,B);A+=B;D+=B}G+=C.mod;A+=y.mod;E+=w.mod;D+=z.mod;C=h(C);y=g(y)}if(C&&!h(z)){z.thread=C;z.mod+=G-D}if(y&&!g(w)){w.thread=y;w.mod+=A-E;v=p}}return v}function g(p){return p.firstChild||p.thread}function h(p){return p.lastChild||p.thread}function i(p,v,w){var y=v.number-p.number;v.change-=w/y;v.shift+=w;p.change+= w/y;v.prelim+=w;v.mod+=w}function j(p){var v=0,w=0;for(p=p.lastChild;p;p=p.previousSibling){p.prelim+=v;p.mod+=v;w+=p.change;v+=p.shift+w}}function l(p,v,w){return p.ancestor.parentNode==v.parentNode?p.ancestor:w}function k(p,v){return(v?1:u+1)/(m=="radial"?p:1)}function q(p){return m=="radial"?p.breadth/r:0}function o(p){switch(m){case "left":return p.depth;case "right":return x-p.depth;case "top":case "bottom":return p.breadth+x/2;case "radial":return x/2+p.depth*Math.cos(q(p))}}function n(p){switch(m){case "left":case "right":return p.breadth+ t/2;case "top":return p.depth;case "bottom":return t-p.depth;case "radial":return t/2+p.depth*Math.sin(q(p))}}if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var m=b.orient,r=b.depth,s=b.breadth,u=b.group,x=b.width,t=b.height;b=b.nodes[0];b.visitAfter(function(p,v){p.ancestor=p;p.prelim=0;p.mod=0;p.change=0;p.shift=0;p.number=p.previousSibling?p.previousSibling.number+1:0;p.depth=v});c(b);d(b,-b.prelim,0);b.visitAfter(function(p){p.breadth*=s;p.depth*=r;p.midAngle=q(p);p.x=o(p);p.y=n(p); if(p.firstChild)p.midAngle+=Math.PI;delete p.breadth;delete p.depth;delete p.ancestor;delete p.prelim;delete p.mod;delete p.change;delete p.shift;delete p.number;delete p.thread})}};pv.Layout.Indent=function(){pv.Layout.Hierarchy.call(this);this.link.interpolate("step-after")};pv.Layout.Indent.prototype=pv.extend(pv.Layout.Hierarchy).property("depth",Number).property("breadth",Number);pv.Layout.Indent.prototype.defaults=(new pv.Layout.Indent).extend(pv.Layout.Hierarchy.prototype.defaults).depth(15).breadth(15); pv.Layout.Indent.prototype.buildImplied=function(b){function c(i,j,l){i.x=g+l++*f;i.y=h+j++*d;i.midAngle=0;for(i=i.firstChild;i;i=i.nextSibling)j=c(i,j,l);return j}if(!pv.Layout.Hierarchy.prototype.buildImplied.call(this,b)){var d=b.breadth,f=b.depth,g=0,h=0;c(b.nodes[0],1,1)}};pv.Layout.Pack=function(){pv.Layout.Hierarchy.call(this);this.node.radius(function(b){return b.radius}).strokeStyle("rgb(31, 119, 180)").fillStyle("rgba(31, 119, 180, .25)");this.label.textAlign("center");delete this.link}; pv.Layout.Pack.prototype=pv.extend(pv.Layout.Hierarchy).property("spacing",Number).property("order",String);pv.Layout.Pack.prototype.defaults=(new pv.Layout.Pack).extend(pv.Layout.Hierarchy.prototype.defaults).spacing(1).order("ascending");pv.Layout.Pack.prototype.$radius=function(){return 1};pv.Layout.Pack.prototype.size=function(b){this.$radius=typeof b=="function"?function(){return Math.sqrt(b.apply(this,arguments))}:(b=Math.sqrt(b),function(){return b});return this}; pv.Layout.Pack.prototype.buildImplied=function(b){function c(o){var n=pv.Mark.stack;n.unshift(null);for(var m=0,r=o.length;m0.0010}var u=Infinity,x=-Infinity,t=Infinity,p=-Infinity,v,w,y,z,C;v=o[0];v.x=-v.radius;v.y=0;n(v);if(o.length>1){w=o[1];w.x=w.radius;w.y=0;n(w);if(o.length>2){y=o[2];g(v,w,y);n(y);m(v,y);v.p= y;m(y,w);w=v.n;for(var A=3;A0){r(v,z);w=z;A--}else if(D<0){r(z,w);v=z;A--}}}}v=(u+x)/2;w=(t+p)/2;for(A=y=0;Ao.min){o.sim.step(); q=true}q&&d.render()},42)}else for(l=0;lg)g=j;i.size=i.firstChild?pv.sum(i.childNodes,function(l){return l.size}):c.$size.apply(c,(f[0]=i,f))});f.shift();switch(b.order){case "ascending":d.sort(function(i,j){return i.size-j.size});break;case "descending":d.sort(function(i,j){return j.size-i.size});break}var h=1/g;d.minBreadth=0;d.breadth= 0.5;d.maxBreadth=1;d.visitBefore(function(i){for(var j=i.minBreadth,l=i.maxBreadth-j,k=i.firstChild;k;k=k.nextSibling){k.minBreadth=j;k.maxBreadth=j+=k.size/i.size*l;k.breadth=(j+k.minBreadth)/2}});d.visitAfter(function(i,j){i.minDepth=(j-1)*h;i.maxDepth=i.depth=j*h});pv.Layout.Hierarchy.NodeLink.buildImplied.call(this,b)}};pv.Layout.Partition.Fill=function(){pv.Layout.Partition.call(this);pv.Layout.Hierarchy.Fill.constructor.call(this)};pv.Layout.Partition.Fill.prototype=pv.extend(pv.Layout.Partition); pv.Layout.Partition.Fill.prototype.buildImplied=function(b){pv.Layout.Partition.prototype.buildImplied.call(this,b)||pv.Layout.Hierarchy.Fill.buildImplied.call(this,b)};pv.Layout.Arc=function(){pv.Layout.Network.call(this);var b,c,d,f=this.buildImplied;this.buildImplied=function(g){f.call(this,g);c=g.directed;b=g.orient=="radial"?"linear":"polar";d=g.orient=="right"||g.orient=="top"};this.link.data(function(g){var h=g.sourceNode;g=g.targetNode;return d!=(c||h.breadth>1)*f:null}).bottom(function(l,k){return d=="mirror"?k&1?null:(k+1>>1)*-f:(k&1||-1)*(k+1>>1)*f}).fillStyle(function(l,k){return(k&1?h:i)((k>>1)+1)});this.band.add=function(l){return b.add(pv.Panel).extend(c).add(l).extend(this)}};pv.Layout.Horizon.prototype=pv.extend(pv.Layout).property("bands",Number).property("mode",String).property("backgroundStyle",pv.color).property("positiveStyle",pv.color).property("negativeStyle",pv.color); pv.Layout.Horizon.prototype.defaults=(new pv.Layout.Horizon).extend(pv.Layout.prototype.defaults).bands(2).mode("offset").backgroundStyle("white").positiveStyle("#1f77b4").negativeStyle("#d62728"); pv.Layout.Rollup=function(){pv.Layout.Network.call(this);var b=this,c,d,f=b.buildImplied;this.buildImplied=function(g){f.call(this,g);c=g.$rollup.nodes;d=g.$rollup.links};this.node.data(function(){return c}).size(function(g){return g.nodes.length*20});this.link.interpolate("polar").eccentricity(0.8);this.link.add=function(g){return b.add(pv.Panel).data(function(){return d}).add(g).extend(this)}};pv.Layout.Rollup.prototype=pv.extend(pv.Layout.Network).property("directed",Boolean); pv.Layout.Rollup.prototype.x=function(b){this.$x=pv.functor(b);return this};pv.Layout.Rollup.prototype.y=function(b){this.$y=pv.functor(b);return this}; pv.Layout.Rollup.prototype.buildImplied=function(b){function c(r){return i[r]+","+j[r]}if(!pv.Layout.Network.prototype.buildImplied.call(this,b)){var d=b.nodes,f=b.links,g=b.directed,h=d.length,i=[],j=[],l=0,k={},q={},o=pv.Mark.stack,n={parent:this};o.unshift(null);for(var m=0;ml.index?l.index+","+d.index:d.index+","+l.index;(o=q[h])||(o=q[h]={sourceNode:d,targetNode:l,linkValue:0,links:[]});o.links.push(f[m]);o.linkValue+=f[m].linkValue}b.$rollup={nodes:pv.values(k),links:pv.values(q)}}}; pv.Layout.Matrix=function(){pv.Layout.Network.call(this);var b,c,d,f,g,h=this.buildImplied;this.buildImplied=function(i){h.call(this,i);b=i.nodes.length;c=i.width/b;d=i.height/b;f=i.$matrix.labels;g=i.$matrix.pairs};this.link.data(function(){return g}).left(function(){return c*(this.index%b)}).top(function(){return d*Math.floor(this.index/b)}).width(function(){return c}).height(function(){return d}).lineWidth(1.5).strokeStyle("#fff").fillStyle(function(i){return i.linkValue?"#555":"#eee"}).parent= this;delete this.link.add;this.label.data(function(){return f}).left(function(){return this.index&1?c*((this.index>>1)+0.5):null}).top(function(){return this.index&1?null:d*((this.index>>1)+0.5)}).textMargin(4).textAlign(function(){return this.index&1?"left":"right"}).textAngle(function(){return this.index&1?-Math.PI/2:0});delete this.node};pv.Layout.Matrix.prototype=pv.extend(pv.Layout.Network).property("directed",Boolean);pv.Layout.Matrix.prototype.sort=function(b){this.$sort=b;return this}; pv.Layout.Matrix.prototype.buildImplied=function(b){if(!pv.Layout.Network.prototype.buildImplied.call(this,b)){var c=b.nodes,d=b.links,f=this.$sort,g=c.length,h=pv.range(g),i=[],j=[],l={};b.$matrix={labels:i,pairs:j};f&&h.sort(function(m,r){return f(c[m],c[r])});for(var k=0;kl)k=null;if(g){if(k&&g.scene==k.scene&&g.index==k.index)return;pv.Mark.dispatch("unpoint",g.scene,g.index)}if(g=k){pv.Mark.dispatch("point",k.scene,k.index);pv.listen(this.root.canvas(),"mouseout",f)}}function f(k){if(g&&!pv.ancestor(this,k.relatedTarget)){pv.Mark.dispatch("unpoint",g.scene,g.index);g=null}}var g,h=null,i=1,j=1,l=arguments.length?b*b:900;d.collapse=function(k){if(arguments.length){h=String(k);switch(h){case "y":i= 1;j=0;break;case "x":i=0;j=1;break;default:j=i=1;break}return d}return h};return d}; pv.Behavior.select=function(){function b(j){g=this.index;f=this.scene;i=this.mouse();h=j;h.x=i.x;h.y=i.y;h.dx=h.dy=0;pv.Mark.dispatch("selectstart",f,g)}function c(){if(f){f.mark.context(f,g,function(){var j=this.mouse();h.x=Math.max(0,Math.min(i.x,j.x));h.y=Math.max(0,Math.min(i.y,j.y));h.dx=Math.min(this.width(),Math.max(j.x,i.x))-h.x;h.dy=Math.min(this.height(),Math.max(j.y,i.y))-h.y;this.render()});pv.Mark.dispatch("select",f,g)}}function d(){if(f){pv.Mark.dispatch("selectend",f,g);f=null}}var f, g,h,i;pv.listen(window,"mousemove",c);pv.listen(window,"mouseup",d);return b}; pv.Behavior.resize=function(b){function c(l){h=this.index;g=this.scene;j=this.mouse();i=l;switch(b){case "left":j.x=i.x+i.dx;break;case "right":j.x=i.x;break;case "top":j.y=i.y+i.dy;break;case "bottom":j.y=i.y;break}pv.Mark.dispatch("resizestart",g,h)}function d(){if(g){g.mark.context(g,h,function(){var l=this.mouse();i.x=Math.max(0,Math.min(j.x,l.x));i.y=Math.max(0,Math.min(j.y,l.y));i.dx=Math.min(this.parent.width(),Math.max(l.x,j.x))-i.x;i.dy=Math.min(this.parent.height(),Math.max(l.y,j.y))-i.y; this.render()});pv.Mark.dispatch("resize",g,h)}}function f(){if(g){pv.Mark.dispatch("resizeend",g,h);g=null}}var g,h,i,j;pv.listen(window,"mousemove",d);pv.listen(window,"mouseup",f);return c}; pv.Behavior.pan=function(){function b(){g=this.index;f=this.scene;i=pv.vector(pv.event.pageX,pv.event.pageY);h=this.transform();j=1/(h.k*this.scale);if(l)l={x:(1-h.k)*this.width(),y:(1-h.k)*this.height()}}function c(){if(f){f.mark.context(f,g,function(){var k=h.translate((pv.event.pageX-i.x)*j,(pv.event.pageY-i.y)*j);if(l){k.x=Math.max(l.x,Math.min(0,k.x));k.y=Math.max(l.y,Math.min(0,k.y))}this.transform(k).render()});pv.Mark.dispatch("pan",f,g)}}function d(){f=null}var f,g,h,i,j,l;b.bound=function(k){if(arguments.length){l= Boolean(k);return this}return Boolean(l)};pv.listen(window,"mousemove",c);pv.listen(window,"mouseup",d);return b}; pv.Behavior.zoom=function(b){function c(){var f=this.mouse(),g=pv.event.wheel*b;f=this.transform().translate(f.x,f.y).scale(g<0?1E3/(1E3-g):(1E3+g)/1E3).translate(-f.x,-f.y);if(d){f.k=Math.max(1,f.k);f.x=Math.max((1-f.k)*this.width(),Math.min(0,f.x));f.y=Math.max((1-f.k)*this.height(),Math.min(0,f.y))}this.transform(f).render();pv.Mark.dispatch("zoom",this.scene,this.index)}var d;arguments.length||(b=1/48);c.bound=function(f){if(arguments.length){d=Boolean(f);return this}return Boolean(d)};return c}; pv.Geo=function(){}; pv.Geo.projections={mercator:{project:function(b){return{x:b.lng/180,y:b.lat>85?1:b.lat<-85?-1:Math.log(Math.tan(Math.PI/4+pv.radians(b.lat)/2))/Math.PI}},invert:function(b){return{lng:b.x*180,lat:pv.degrees(2*Math.atan(Math.exp(b.y*Math.PI))-Math.PI/2)}}},"gall-peters":{project:function(b){return{x:b.lng/180,y:Math.sin(pv.radians(b.lat))}},invert:function(b){return{lng:b.x*180,lat:pv.degrees(Math.asin(b.y))}}},sinusoidal:{project:function(b){return{x:pv.radians(b.lng)*Math.cos(pv.radians(b.lat))/Math.PI, y:b.lat/90}},invert:function(b){return{lng:pv.degrees(b.x*Math.PI/Math.cos(b.y*Math.PI/2)),lat:b.y*90}}},aitoff:{project:function(b){var c=pv.radians(b.lng);b=pv.radians(b.lat);var d=Math.acos(Math.cos(b)*Math.cos(c/2));return{x:2*(d?Math.cos(b)*Math.sin(c/2)*d/Math.sin(d):0)/Math.PI,y:2*(d?Math.sin(b)*d/Math.sin(d):0)/Math.PI}},invert:function(b){var c=b.y*Math.PI/2;return{lng:pv.degrees(b.x*Math.PI/2/Math.cos(c)),lat:pv.degrees(c)}}},hammer:{project:function(b){var c=pv.radians(b.lng);b=pv.radians(b.lat); var d=Math.sqrt(1+Math.cos(b)*Math.cos(c/2));return{x:2*Math.SQRT2*Math.cos(b)*Math.sin(c/2)/d/3,y:Math.SQRT2*Math.sin(b)/d/1.5}},invert:function(b){var c=b.x*3;b=b.y*1.5;var d=Math.sqrt(1-c*c/16-b*b/4);return{lng:pv.degrees(2*Math.atan2(d*c,2*(2*d*d-1))),lat:pv.degrees(Math.asin(d*b))}}},identity:{project:function(b){return{x:b.lng/180,y:b.lat/90}},invert:function(b){return{lng:b.x*180,lat:b.y*90}}}}; pv.Geo.scale=function(b){function c(m){if(!o||m.lng!=o.lng||m.lat!=o.lat){o=m;m=d(m);n={x:l(m.x),y:k(m.y)}}return n}function d(m){return j.project({lng:m.lng-q.lng,lat:m.lat})}function f(m){m=j.invert(m);m.lng+=q.lng;return m}var g={x:0,y:0},h={x:1,y:1},i=[],j=pv.Geo.projections.identity,l=pv.Scale.linear(-1,1).range(0,1),k=pv.Scale.linear(-1,1).range(1,0),q={lng:0,lat:0},o,n;c.x=function(m){return c(m).x};c.y=function(m){return c(m).y};c.ticks={lng:function(m){var r;if(i.length>1){var s=pv.Scale.linear(); if(m==undefined)m=10;r=s.domain(i,function(u){return u.lat}).ticks(m);m=s.domain(i,function(u){return u.lng}).ticks(m)}else{r=pv.range(-80,81,10);m=pv.range(-180,181,10)}return m.map(function(u){return r.map(function(x){return{lat:x,lng:u}})})},lat:function(m){return pv.transpose(c.ticks.lng(m))}};c.invert=function(m){return f({x:l.invert(m.x),y:k.invert(m.y)})};c.domain=function(m,r){if(arguments.length){i=m instanceof Array?arguments.length>1?pv.map(m,r):m:Array.prototype.slice.call(arguments); if(i.length>1){var s=i.map(function(x){return x.lng}),u=i.map(function(x){return x.lat});q={lng:(pv.max(s)+pv.min(s))/2,lat:(pv.max(u)+pv.min(u))/2};s=i.map(d);l.domain(s,function(x){return x.x});k.domain(s,function(x){return x.y})}else{q={lng:0,lat:0};l.domain(-1,1);k.domain(-1,1)}o=null;return this}return i};c.range=function(m,r){if(arguments.length){if(typeof m=="object"){g={x:Number(m.x),y:Number(m.y)};h={x:Number(r.x),y:Number(r.y)}}else{g={x:0,y:0};h={x:Number(m),y:Number(r)}}l.range(g.x,h.x); k.range(h.y,g.y);o=null;return this}return[g,h]};c.projection=function(m){if(arguments.length){j=typeof m=="string"?pv.Geo.projections[m]||pv.Geo.projections.identity:m;return this.domain(i)}return m};c.by=function(m){function r(){return c(m.apply(this,arguments))}for(var s in c)r[s]=c[s];return r};arguments.length&&c.projection(b);return c}; pegasus-wms_4.0.1+dfsg/pegasus.spec0000644000175000017500000000561211757531137016363 0ustar ryngeryngeName: pegasus Version: 4.0.1cvs Release: 1%{?dist} Summary: Workflow management system for Condor, grids, and clouds Group: Applications/System License: Apache Software License URL: http://pegasus.isi.edu/ Packager: Mats Rynge Source: pegasus-source-%{version}.tar.gz BuildRoot: %{_tmppath}/%{name}-root BuildRequires: ant, ant-apache-regexp, java, gcc, groff, python-devel, gcc-c++, make Requires: java >= 1.6, python >= 2.4, condor >= 7.4, graphviz-gd %define sourcedir %{name}-source-%{version} %description The Pegasus project encompasses a set of technologies that help workflow-based applications execute in a number of different environments including desktops, campus clusters, grids, and now clouds. Scientific workflows allow users to easily express multi-step computations. Once an application is formalized as a workflow the Pegasus Workflow Management Service can map it onto available compute resources and execute the steps in appropriate order. %prep %setup -q -n %{sourcedir} %build ant dist # strip executables strip dist/pegasus-%{version}/bin/pegasus-invoke strip dist/pegasus-%{version}/bin/pegasus-cluster strip dist/pegasus-%{version}/bin/pegasus-kickstart strip dist/pegasus-%{version}/bin/pegasus-keg # fix pegasus-config on 64 bit systems if (echo %{_libdir} | grep lib64); then perl -p -i -e 's/^my \$lib.*/my \$lib = "lib64";/' \ dist/pegasus-%{version}/bin/pegasus-config fi %install rm -Rf %{buildroot} mkdir -p %{buildroot}/%{_sysconfdir}/%{name} mkdir -p %{buildroot}/%{_bindir} mkdir -p %{buildroot}/%{_libdir} mkdir -p %{buildroot}/%{_datadir} cp -aR dist/pegasus-%{version}/etc/* %{buildroot}/%{_sysconfdir}/%{name}/ cp -aR dist/pegasus-%{version}/bin/* %{buildroot}/%{_bindir}/ cp -aR dist/pegasus-%{version}/lib/* %{buildroot}/%{_libdir}/ cp -aR dist/pegasus-%{version}/share/* %{buildroot}/%{_datadir}/ # rm unwanted files rm -f %{buildroot}/%{_bindir}/keg.condor rm -f %{buildroot}/%{_datadir}/%{name}/java/COPYING.* rm -f %{buildroot}/%{_datadir}/%{name}/java/EXCEPTIONS.* rm -f %{buildroot}/%{_datadir}/%{name}/java/LICENSE.* rm -f %{buildroot}/%{_datadir}/%{name}/java/NOTICE.* %clean ant clean rm -Rf %{buildroot} %files %defattr(-,root,root,-) %config(noreplace) %{_sysconfdir}/%{name}/ %{_bindir}/* %{_libdir}/%{name}/ %{_datadir}/doc/%{name} %{_datadir}/man/man1/* %{_datadir}/%{name} %changelog * Tue Feb 7 2012 Mats Rynge 4.0.0cvs-1 - Preparing for 4.0.0 - Added graphviz-gd as dep * Mon Aug 29 2011 Mats Rynge 3.2.0cvs-1 - Moved to 3.2.0cvs which is FHS compliant * Fri Jul 22 2011 Doug Strain 3.0.3-2 - Fixing common.pm - Adding g++ to dependencies * Wed Jul 20 2011 Doug Strain 3.0.3-1 - Initial creation of spec file - Installs into /usr/share/pegasus-3.0.3 - Binaries into /usr/bin pegasus-wms_4.0.1+dfsg/LicenseBlurb.txt0000644000175000017500000000233411757531137017150 0ustar ryngerynge/** * Copyright 2007-2011 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## pegasus-wms_4.0.1+dfsg/build.xml0000755000175000017500000005222611757531137015667 0ustar ryngerynge Manages Pegasus Architecture ${dist.arch} full ISO timestamp: ${ISODATE.full} JAVA TARGET is ${build.target} JAVA SOURCE is ${build.source} Pegasus Version: ${pegasus.version} ${ISODATE.utc} ${dist.arch}${line.separator} ${ISODATE.utc} ${dist.arch}${line.separator} Copyright © 2011 The University of Southern California. All Rights Reserved.]]> make.available=${make.available} groff.available=${groff.available} FIXME: This is a stand-in dummy for Java unit testing. FIXME: This is a stand-in dummy for Python unit testing. FIXME: This is a stand-in dummy for Perl testing wrapping into JUnit API. pegasus-wms_4.0.1+dfsg/libexec/0000755000175000017500000000000011757531667015457 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/libexec/pegasus-check-helper.10000755000175000017500000000454511757531137021543 0ustar ryngerynge#!/bin/sh # # This script is staged to fork jobmanager to return a couple of things # $Id: pegasus-check-helper.1 50 2007-05-19 00:48:32Z gmehta $ # PATH=/bin:/usr/bin # install trap for premature exit trap 'echo "FATAL: premature exit"' EXIT # temp file fn=$1 shift # remove filename that gridftp check put there if [ "X$fn" != "X" ]; then /bin/ls "$fn" >> /dev/null 2>&1 if [ -e "$fn" ]; then /bin/rm $fn 2>&1 | sed -e 's/^/DELETE: /' else echo "DELETE: $fn: no such file" fi fi # supposed workdir wd=$1 shift # kickstart location ks=$1 shift # reflect the environment settings echo "ENV: GLOBUS_LOCATION=$GLOBUS_LOCATION" echo "ENV: GLOBUS_TCP_PORT_RANGE=$GLOBUS_TCP_PORT_RANGE" echo "ENV: GLOBUS_TCP_SOURCE_RANGE=$GLOBUS_TCP_SOURCE_RANGE" echo "ENV: LD_LIBRARY_PATH=$LD_LIBRARY_PATH" echo "ENV: HOME=$HOME" # vds-get-sites echo "ENV: app=$app" echo "ENV: data=$data" echo "ENV: tmp=$tmp" echo "ENV: wntmp=$wntmp" echo "ENV: grid3=$grid3" # new in VDT 1.3.8 echo "ENV: GRID3_SITE_NAME=$GRID3_SITE_NAME" echo "ENV: GRID3_BASE_DIR=$GRID3_BASE_DIR" echo "ENV: GRID3_APP_DIR=$GRID3_APP_DIR" echo "ENV: GRID3_DATA_DIR=$GRID3_DATA_DIR" echo "ENV: GRID3_TMP_DIR=$GRID3_TMP_DIR" echo "ENV: GRID3_TMP_WN_DIR=$GRID3_TMP_WN_DIR" echo "ENV: GRID3_JOB_CONTACT=$GRID3_JOB_CONTACT" echo "ENV: GRID3_TRANSFER_CONTACT=$GRID3_TRANSFER_CONTACT" echo "ENV: GRID3_SPONSOR=$GRID3_SPONSOR" echo "ENV: GRID3_SITEINFO=$GRID3_SITEINFO" # wild goose chase sanity check - you never know for i in "$GLOBUS_LOCATION" "$HOME" "$wd" "$app" "$data" "$tmp" "$wntmp" "$grid3"; do if [ "X$i" != "X" ]; then /bin/ls -d "$i" >> /dev/null 2>&1 if [ -d "$i" ]; then echo "DIR: OK $i" else echo "DIR: NOTADIR $i" fi fi done # show some app /bin/date +'DATE: %Y-%m-%dT%H:%M:%S%z' # check out kickstart if [ -x $ks ]; then version=`$ks -V 2>&1` if [ $? -eq 0 ]; then echo "KICKSTART: $version" else echo "KICKSTART: FAILED: $version" fi else echo "KICKSTART: INVALID" fi # obtain remote system arch info if tmpdir=`mktemp -d /tmp/$$.XXXXXX`; then if wget -P $tmpdir 'http://smarty.isi.edu/cgi-bin/cvsweb.cgi/~checkout~/vds/release-tools/getsystem/getsystem' >> /dev/null 2>&1; then echo "SYSINFO: `perl $tmpdir/getsystem`" else echo 'SYSINFO: (no network)' fi rm -rf $tmpdir else echo 'SYSINFO: UNKNOWN' fi # done trap - EXIT echo 'OK' exit 0 pegasus-wms_4.0.1+dfsg/libexec/README0000755000175000017500000000443611757531137016341 0ustar ryngeryngeMINI HOWTO for PEGASUS-INFORMATION PROVIDER. Required: 1) GLOBUS MDS 2) Gram Reporter for jobmanager-* (depending on what jobmanagers u have installed) 3) This directory. INSTALLATION and configuration: a) Install the globus gatekeeper,jobmanagers and MDS as per instructions on the globus website. b) Edit the $GLOBUS_LOCATION/etc/grid-info-slapd.conf c) Add this line among the many include lines at the top include <$PEGASUS>/etc/infoproviders/gpgs-pool-info.schema d) Edit the $GLOBUS_LOCATION/etc/grid-info-resource-ldif.conf e) Make sure that GPT postinstall adds the entries for jobmanager-* in the resource-ldif.conf f) At the bottom add these entries (modify as necessary paths in <>) # generate gpgs pool information every 30 secs dn: Gpgs-Software-deployment=Gpgs, Mds-Host-hn= , Mds-Vo-name=local, o=grid objectclass: GlobusTop objectclass: GlobusActiveObject objectclass: GlobusActiveSearch type: exec path: base: gpgs-infoprovider args: -dn Gpgs-Software-deployment=Gpgs,Mds-Host-hn=,Mds-Vo-name=local,o=grid -f cachetime: 30 timelimit: 20 sizelimit: 100 # END Gpgs pool info FORMAT of gpgs.pool.config ##################################### # GPGS POOL CONFIGURATION # ##################################### gpgs.pool.id : (can be one and only one) gpgs.pool.lrc : (can be one or many) #gpgs.pool.lrc : rls://smarty.isi.edu gpgs.pool.gridftp : (can be one or many) #eg : gpgs.pool.gridftp : gsiftp://smarty.isi.edu/smarty/sources/@2.2.4 gpgs.pool.universe : #(Has 2 be atleast vanilla and transfer. can be more then one of each type) #gpgs.pool.universe : vanilla@birdie.isi.edu/jobmanager-condor@2.2.4 #gpgs.pool.universe : transfer@birdie.isi.edu/jobmanager-fork@2.2.4 gpgs.pool.gridlaunch : (one and only one) #gpgs.pool.gridlaunch : /users/bin/grid-launch gpgs.pool.workdir : (one and only one) #gpgs.pool.workdir : /smarty/temp/ gpgs.pool.profile : (can be one or many) #gpgs.pool.profile : env@GLOBUS_LOCATION@/smarty/gt2.2.4 #gpgs.pool.profile : env@JAVA_HOME@/smarty/jdk1.4.1 #gpgs.pool.profile : pgs@PEGASUS@/nfs/asd2/gmehta/pgs pegasus-wms_4.0.1+dfsg/libexec/dot-ps2-to-eps.pl0000755000175000017500000000145111757531137020505 0ustar ryngerynge#!/usr/bin/env perl # # This program fixes the ps2 type output of graphviz's dot and # attempts to make it a better-behaved eps file. # # $Id$ # # Usage: dot -Tps2 foo.dot | $0 > foo.eps # use 5.006; use strict; # # skip first line - original PS header written by dot # my $head = ; # # write our own fake PS header with EPS extensions # print "%!PS-Adobe-3.0 EPSF-2.0\n"; # # read rest of file for bounding boxes and page sizes # my (@bb,$bb); while ( ) { if ( /\%\%PageBoundingBox: ([0-9.]+) ([0-9.]+) ([0-9.]+) ([0-9.]+)/ ) { @bb = ( $1, $2, $3, $4 ); $bb = "$1 $2 $3 $4"; } elsif ( m{/PageSize} ) { s{/PageSize \[\S+ [0-9.]+\]}{/PageSize \[$bb[2] $bb[3]\]}; } elsif ( m{^\S+ \S+ \S+ \S+ boxprim clip newpath}o ) { $_ = "$bb boxprim clip newpath\n"; } print ; } pegasus-wms_4.0.1+dfsg/libexec/perl-docs0000755000175000017500000000142611757531137017270 0ustar ryngerynge#!/bin/bash set -e TARGET_DIR=$1 if [ "x$TARGET_DIR" = "x" ]; then echo "Please specify a target dir as first argument" echo "Example: ./libexec/perl-doc dist/pegasus-3.1/docs/html/perl" exit 1 fi mkdir -p $TARGET_DIR/Pegasus/DAX TARGET_DIR=`cd $TARGET_DIR && pwd` cd lib/pegasus/perl/ for i in Pegasus/DAX/*.pm; do pod2html --htmldir /tmp/xxx --podroot $PWD --podpath Pegasus/DAX \ --infile $i --outfile $TARGET_DIR/`basename $i .pm`.html done # flatten the web space cd $TARGET_DIR perl -p -i -e 's:href="/Pegasus/DAX/:href=":g' *.html # remove references to "manpages" perl -p -i -e 's/the ([\w:]+) manpage/\1/g' *.html # provide a nice index.html cp $TARGET_DIR/Factory.html $TARGET_DIR/index.html echo "Perl documentation successfully generated" pegasus-wms_4.0.1+dfsg/libexec/reassemble-chunks0000755000175000017500000000436511757531137021020 0ustar ryngerynge#!/usr/bin/env perl # # Reassembles kickstart feedback channel messages from chunks # # Requires some Perl XML modules to be post-installed. # # $Id: reassemble-chunks 2437 2010-09-21 21:22:37Z voeckler $ # require 5.005; use strict; use XML::Parser::Expat; %main::data = (); # contents @main::stack = (); # written by s_e, read by c_h $main::tail = "\n"; $main::head = "\n" . "" . " " . " " . " " . " " . "]>\n" . "\n"; sub start_element { # purpose: callback for open tag my ($self,$element,%attr) = @_; if ( $element eq 'chunk' ) { push( @main::stack, [ @attr{'channel','start'} ] ); $self->setHandlers( Char => \&content_handler ); } } sub final_element { # purpose: callback for close tag my ($self,$element) = @_; if ( $element eq 'chunk' ) { $self->setHandlers( Char => \&skip_handler ); pop( @main::stack ); } } sub skip_handler { # purpose: generic character handler, ignores text my ($self,$text) = @_; # noop } sub content_handler { # purpose: special character handler, active within chunks my $self = shift; my @tos = @{ $main::stack[ $#main::stack ] }; push( @{$main::data{$tos[0]}{$tos[1]}}, shift() ); } # read contents into $contents $/ = undef; # big gulp mode my $fn = shift || die "Usage: $0 filename"; open( XML, '<' . $fn ) || die "open $fn: $!\n"; my $content = ; close XML; # init XML parser my $xml = new XML::Parser::Expat; $xml->setHandlers( Start => \&start_element, End => \&final_element, Char => \&skip_handler ); # artificially introduce a root element to contain all chunks # and any other data the remote scheduler may have messed into the stream. #$content = "\n" . $content . ""; $content = $main::head . $content . $main::tail; $xml->parsestring($content); undef $content; # now produce content sorted by timestamp foreach my $channel ( sort { $a <=> $b } keys %main::data ) { foreach my $time ( sort keys %{$main::data{$channel}} ) { print( join('',@{$main::data{$channel}{$time}}) ); } } pegasus-wms_4.0.1+dfsg/libexec/pegasus-check-helper.20000755000175000017500000000072411757531137021537 0ustar ryngerynge#!/bin/sh # # This script is staged to batch jobmanager to check all Pegasus # transformations. # # $Id: pegasus-check-helper.2 2406 2010-09-17 21:19:34Z voeckler $ # PATH=/bin:/usr/bin # install trap for premature exit trap 'echo "FATAL: premature exit"' EXIT # read list of application filesnames from staged stdin while [ "X$1" != "X" ]; do # app=$1 # shift read app test -x "$app" || echo "ERROR: $app" done # done trap - EXIT echo 'OK' exit 0 pegasus-wms_4.0.1+dfsg/libexec/latex-sample-props0000755000175000017500000002765111757531137021145 0ustar ryngerynge#!/usr/bin/env perl # # Parses $PEGASUS_HOME/etc/sample.properties and generates LaTeX from it # $Id: latex-sample-props 3745 2011-05-12 18:29:41Z voeckler $ # # Usage: latex-sample-props propfn > doc.tex # pdflatex doc # makeindex doc.idx # pdflatex doc # pdflatex doc # # The following markup is required to be adhered to: # # 0. You must now (new) start your document with a title like this: # # # # # TITLE "ALL CAPS TITLE IN QUOTES" # # # # Only after the TITLE was met in the input, the table of contents # will be generated right there. Thus, it is imperative that you # start your document with the TITLE you want to give it. # # 1. Any new section starts like this: # # # # # SECTION "ALL CAPS TITLE IN QUOTES" # # # # Three comment-marked lines, the reserved word SECTION (all caps), # and the section's title inside quotes. Use all caps for the title. # It will be word-wise de-capped into the LaTeX document. # # # # # SUBSECTION "ALL CAPS SUBTITLE IN QUOTES" # # # # See above, except that a sub-section will be generated. # # 2. Item format # # Any item starts out with the key "Property: some.prop", followed # by other such tabular keywords, colon, and their respective values. # The following keywords are approved, parsed, and may have special # meaning: # # Systems the subsystem this applies to, e.g. Pegasus # Type a symbolic description of the type the prop can have # Value[X] For enumeration types, the possible values 1 <= X <= N # Default value assumed in absence of property # See also Cross reference to other props, repeat as necessary! # Example Sample for complex properties # Moved to Cross references the new name of a property (deprecated) # New Name Cross references the new name of a property (preferred) # # After the initial table comes the description. Please use paragraphs # (empty comment lines) as necessary. # # The final section in any item is always the property followed by its # default value in a paragraph of its own. It is followed by an empty (!) # line without comment. This final section is skipped in the output. # # 3. The following markup is permitted inside sample.properties: # # as is verbatim

# visual markup    
# numbered list  
# bullet list
# definitions
# 2-column table & # 3-column table & & # # Note: While and are optional (but should be used for # XHTML), is _not_ optional, and must be used. # use 5.006; use strict; use File::Spec; use Cwd qw(abs_path); use POSIX qw(strftime); die "Set your PEGASUS_HOME environment variable\n" unless exists $ENV{'PEGASUS_HOME'}; my $fn = shift || File::Spec->catfile( $ENV{'PEGASUS_HOME'}, 'etc', 'sample.properties' ); my @stack = (); my %replace = ( 'screen' => "\n\\begin{verbatim}", '/screen' => "\\end{verbatim}\n", 'para' => '', '/para' => '', 'emphasis' => "\\emph{", '/emphasis' => "}", 'orderedlist' => sub { push(@stack,'O'); "\n\\begin{enumerate}\n"; }, '/orderedlist' => sub { die unless pop(@stack) eq 'O'; "\n\\end{enumerate}\n"; }, 'itemizedlist' => sub { push(@stack,'I'); "\n\\begin{itemize}\n"; }, '/itemizedlist' => sub { die unless pop(@stack) eq 'I'; "\n\\end{itemize}\n"; }, 'variablelist' => sub { push(@stack,'D'); "\n\\begin{description}\n"; }, '/variablelist' => sub { die unless pop(@stack) eq 'D'; "\n\\end{description}\n"; }, 'term' => "\n\\item[", '/term' => "] ", 'listitem' => sub { $stack[$#stack] eq 'V' ? '' : "\n\\item "; }, '/listitem' => '', 'varlistentry' => sub { push(@stack,'V'); ''; }, '/varlistentry' => sub { die unless pop(@stack) eq 'V'; ''; }, 'pre' => "\n\\begin{verbatim}", '/pre' => "\\end{verbatim}\n", 'ol' => "\n\\begin{enumerate}\n", '/ol' => "\n\\end{enumerate}\n", 'ul' => "\n\\begin{itemize}\n", '/ul' => "\n\\end{itemize}\n", 'li' => "\n\\item ", '/li' => '', 'i' => "\\emph{", '/i' => "}", 'b' => "\\textbf{", '/b' => "}", 'tt' => "\\texttt{", '/tt' => "}", 'tb2' => "\n\\begin{tabular}{lp{100mm}}\n", '/tb2' => "\n\\end{tabular}\n", 'tb3' => "\n\\begin{tabular}{lll}\n", '/tb3' => "\n\\end{tabular}\n", '/te' => "\\\\\n", 'dl' => "\n\\begin{description}\n", '/dl' => "\n\\end{description}\n", 'dt' => "\n\\item[", '/dt' => "] ", 'dd' => '', '/dd' => '' ); sub lookup($) { my $key = shift; my $result; if ( exists $replace{$key} ) { if ( ref $replace{$key} eq 'CODE' ) { $result = &{$replace{$key}}($key); } else { $result = $replace{$key}; } } else { $result = $key; } $result; } sub escape($) { # purpose: LaTeX escape an arbitrary sentence (imperfect) # paramtr: $x (IN): some raw string # returns: cooked string local $_ = shift; # are we inside a pre? $main::inpre=1 if m{<(pre|screen)>}; $main::inpre=0 if m{}; # escape all TeX special chars (that I can think of, not complete) s/([\$_%{}])/\\$1/g unless $main::inpre; # special escape for 1-line verbatims -- only do once per line s{
(.*)
}{\\verb|$1|}; # replace only known items with LaTeX formatting. # unknown "TAG" strings are copied verbatim. s/<([^>]+)>/lookup($1)/eg; # if something looks like a HTTP URL, try to hyperlink it s^((ftp|https?)://\S+)^\\url{$1}^g; # done $_; } sub trim($) { # purpose: Trims redundant whitespace from head and tail of a string # paramtr: $x (IN): some raw string # returns: trimmed string, may be identical to argument, or even empty. local $_ = shift; s/^\s+//; s/\s+$//; $_; } # open file (and fail) before print LaTeX preambles $/ = ''; # activate paragraph mode open( SP, '<' . $fn ) || die "open $fn: $!\n"; # # print LaTeX preamble -- do not interpolate anything here # print << 'EOF'; \documentclass[11pt,letterpaper,headsepline,pagesize,letterpaper,DIV11]{scrartcl} \usepackage[T1]{fontenc} \usepackage{times,verbatim} \usepackage{makeidx} %\usepackage[latin1]{inputenc} % paragraph setup \setlength{\parindent}{0pt} \setlength{\parskip}{0.4\baselineskip plus1pt minus1pt} % not in this country: \frenchspacing % % page setup % \usepackage[automark]{scrpage2} \renewcommand{\headfont}{\normalfont\sffamily} \renewcommand{\pnumfont}{\normalfont\sffamily} \clearscrheadfoot \ohead{\pagemark}\chead{}\ihead{\headmark} \ofoot{}\cfoot{}\ifoot{} \pagestyle{scrheadings} \thispagestyle{empty} % % I hate KOMA page layout, so I fix it my way % %\setlength{\topmargin}{0pt} % headexclude document style is broken \setlength{\textheight}{1.05\textheight} % add n% more length \setlength{\textwidth}{1.03\textwidth} % add n% more width % % my verbatim stuff % \makeatletter \newlength{\myverbatimindent} \setlength{\myverbatimindent}{10mm} \renewcommand{\verbatim@processline}{% \leavevmode\hspace*{\myverbatimindent}\the\verbatim@line\par} \renewcommand{\verbatim@font}{\ttfamily\small\baselineskip10pt} \makeatother % % personal shortcuts % \def\dd{\\\hline} \def\at{\symbol{64}} \def\ul#1{\underline{#1}} \def\rref#1{\ref{#1} (page~\pageref{#1})} % % should be last % \makeindex \usepackage{hyperref} % \begin{document} \subject{The Pegasus Workflow Planner Properties} %% \title{Property Documentation} EOF # which version my $version=$ENV{'PEGASUS_VERSION'}; if ( ! defined $version || $version eq '' ) { # didn't find a valid version, ask pegasus-version my $v_v = File::Spec->catfile( $ENV{'PEGASUS_HOME'}, 'bin', 'pegasus-version' ); chomp($version = `$v_v`); } # # print LaTeX preamble, 2nd part, now with interpolation # print << "EOF"; \\author{V~$version automatically generated on} \\date{@{[POSIX::strftime("%Y-%m-%d %H:%M", localtime())]}} %% defer until we hit TITLE in input %% \\maketitle %% \\tableofcontents %% \\clearpage EOF $main::title = 0; my (@line,%line,$k,$v,$i); my $depth = 0; while ( ) { if ( /^\# Property : (.*)/ ) { my $title = $1; if ( $depth == 2 ) { print "\\subsubsection{$title}\n"; } else { print "\\subsection{$title}\n"; } print '\label{sec:', $title, "}\n"; # my @title = split /\./, $title; # print '\index{', join('.',@title[0,1]), "}\n"; # print( '\index{', join('.',@title[0,1]), '!', # join('.',@title[2..$#title]) , "}\n" ) if @title > 2; print '\index{', $title, "}\n"; print "\n"; # break paragraph into lines, and remove comment+space @line = map { trim($_) } map { substr($_,2) } split /[\r\n]/; # rip first section %line = (); print " \\begin{tabular}{|p{20mm}|p{120mm}|}\\hline\n"; #print " \\textsf{Key} & \\textsf{Meaning}\\dd\\hline\n"; for ( $i=0; $i<@line; ++$i ) { # exit at first empty line last if length($line[$i]) < 1; ($k,$v) = map { escape($_) } split( /\s+:\s+/, $line[$i], 2 ); $line{$k} = $v; if ( $k eq 'Property' ) { # ignore - this is already the subsection } elsif ( $k =~ /moved?\s*to/i || $k =~ /see\s*also/i || $k =~ /new\s*name/i ) { # generate cross reference $k = "New name" if $k =~ /moved?\s*to/i; print "\t$k & $v, section~\\rref{sec:$v}\\dd\n"; } else { # default action print "\t$k & $v\\dd\n"; } } print " \\end{tabular}\n"; print "\n"; # print everything but last paragraph for ( ; $i < @line - 2; ++$i ) { print escape($line[$i]), "\n"; } # done with the subsection print "\n\n"; } elsif ( /\# (SECTION|SUBSECTION|TITLE)/ ) { @line = map { trim($_) } map { substr($_,2) } split /[\r\n]/; my $flag = undef; for ( $i=0; $i<@line; ++$i ) { last if ( length($line[$i]) <= 1 && $flag ); if ( $line[$i] =~ /^SECTION "([^\"]+)"/ ) { my @title = map { ucfirst lc } split /\s+/,$1; print "\n\n\\section{@title}\n"; print '\label{sec:', join('',@title), "}\n\n"; $flag = 1; $depth = 1; } elsif ( $line[$i] =~ /^SUBSECTION "([^\"]+)"/ ) { my @title = map { ucfirst lc } split /\s+/,$1; print "\n\n\\subsection{@title}\n"; print '\label{sec:', join('',@title), "}\n\n"; $flag = 1; $depth = 2; } elsif ( $line[$i] =~ /^TITLE "([^\"]+)"/ ) { $main::title += 1; # remember my @title = map { ucfirst lc } split /\s+/,$1; print "\n\n\\title{@title}\n"; print "\\maketitle\n\\tableofcontents\n\\clearpage\n\n"; $flag = 1; $depth = 0; } } for ( ; $i<@line; ++$i ) { print escape($line[$i]), "\n"; } } elsif ( /^\#/ ) { warn "Warning: $.: Found regular textual paragraph, copying\n"; warn "<<<$_>>>\n"; @line = map { trim($_) } map { substr($_,2) } split /[\r\n]/; for ( $i=0; $i<@line; ++$i ) { print escape($line[$i]), "\n"; } } else { warn "Warning: $.: Found uncommented paragraph, ignoring\n"; warn "<<<$_>>>\n"; } } close SP; die "FATAL: You didn't have a TITLE in your input\n" unless $main::title > 0; # # print LaTeX post-amble # print << 'EOF'; \clearpage \addcontentsline{toc}{section}{Index} \printindex \end{document} %%% Local Variables: %%% mode: latex %%% TeX-master: t %%% End: EOF pegasus-wms_4.0.1+dfsg/libexec/statistics/0000755000175000017500000000000011757531667017651 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/libexec/statistics/genstats-breakdown0000755000175000017500000001754011757531137023400 0ustar ryngerynge#!/usr/bin/perl use Getopt::Long; use List::Util qw[min max]; use File::Basename qw(dirname basename); use File::Path; use File::Copy; use File::Spec; use Cwd; use POSIX qw(ceil floor); sub usage(;$) { my $ec = shift || 0; my $basename = basename($0); print << "EOF"; Usage: $basename --help --output= Optional arguments: -h|--help Print this help message. -o|--output file Write outputs to specified file. -f|--file input file path which contains newline separated run directory path. -x|--extra Generate individual statistics for each run directory. EOF exit($ec); } sub parse_kick_start_files($$){ ($cur,$dir) =@_; chdir $dir; $samples{$dir} = (); @kickstartFiles = glob "*.out*"; for $file (@kickstartFiles) { if (open(FILE, $file)) { while ($line = ) { chomp $line; if ($line =~ /.* duration=\"([\.0-9]*)\" transformation=\"([-_A-Za-z0-9:\.]*)\" .*/) { $duration = $1; $transformation = $2; if (!defined($samples{$dir}{$transformation})) { $samples{$dir}{$transformation}{'count'} = 0; $samples{$dir}{$transformation}{'mean'} = 0; $samples{$dir}{$transformation}{'diff'} = 0; $samples{$dir}{$transformation}{'min'} = 1e100; $samples{$dir}{$transformation}{'max'} = 0; if (!defined($samples{$allKey}{$transformation})) { $samples{$allKey}{$transformation}{'count'} = 0; $samples{$allKey}{$transformation}{'mean'} = 0; $samples{$allKey}{$transformation}{'diff'} = 0; $samples{$allKey}{$transformation}{'min'} = 1e100; $samples{$allKey}{$transformation}{'max'} = 0; } } $samples{$allKey}{$transformation}{'count'}++; $delta = $duration - $samples{$allKey}{$transformation}{'mean'}; $samples{$allKey}{$transformation}{'mean'} += $delta / $samples{$allKey}{$transformation}{'count'}; $samples{$allKey}{$transformation}{'diff'} += $delta * ($duration - $samples{$allKey}{$transformation}{'mean'}); $min = $samples{$allKey}{$transformation}{'min'}; $samples{$allKey}{$transformation}{'min'} = ($duration < $min) ? $duration : $min; $max = $samples{$allKey}{$transformation}{'max'}; $samples{$allKey}{$transformation}{'max'} = ($duration > $max) ? $duration : $max; $samples{$dir}{$transformation}{'count'}++; $delta = $duration - $samples{$dir}{$transformation}{'mean'}; $samples{$dir}{$transformation}{'mean'} += $delta / $samples{$dir}{$transformation}{'count'}; $samples{$dir}{$transformation}{'diff'} += $delta * ($duration - $samples{$dir}{$transformation}{'mean'}); $min = $samples{$dir}{$transformation}{'min'}; $samples{$dir}{$transformation}{'min'} = ($duration < $min) ? $duration : $min; $max = $samples{$dir}{$transformation}{'max'}; $samples{$dir}{$transformation}{'max'} = ($duration > $max) ? $duration : $max; } } } } chdir $cur; } sub directory_exists(\@$){ # purpose : checks if the directory exists in the directory list. #returns : 1 if directory exists,0 otherwise ($list_ref) = shift; ($check_dir) =shift; foreach $dir (@$list_ref) { if ( (File::Spec->canonpath($dir)) eq (File::Spec->canonpath($check_dir))){ return 1; } } return 0; } $extra = 0; $file = undef; $result = GetOptions( "help|h" => \&usage, "output|o=s" => \$output, "file|f=s" =>\$file, "extra|x" => sub { $extra++; }, ); if(!$file){ if ($#ARGV < 0) { push @ARGV, getcwd(); } } %samples = (); $allKey = "__ALL__"; $samples{$allKey} = (); $cur = getcwd(); for $dir (@ARGV) { parse_kick_start_files($cur ,$dir); } if($file){ @dirs_arr = keys(%samples); $file= File::Spec->rel2abs($file); open CONFIG, "$file" or die "Unable to open file: $file . $! \n"; for () { chomp; s/\#.*//; s/^\s+//; s/\s+$//; next unless length; $dir = $_; #check if the directory exists in the list created by -x option if(!directory_exists(@dirs_arr,$dir)){ parse_kick_start_files($cur ,$dir); } } close CONFIG; } if (defined($output)) { open $oldout, ">&STDOUT" or die "Can't dup STDOUT $!"; open STDOUT, '>', $output or die "Can't redirect STDOUT $!"; } my $submit_dir_count = 0; print "#Legends "; print "\n#Transformation - name of the transformation"; print "\n#Count - the number of times the transformation was executed"; print "\n#Mean(sec.) - the mean of the transformation runtime "; print "\n#Variance(sec.) - the variance of the transformation runtime"; print "\n#Min(sec.) - the minimum transformation runtime value"; print "\n#Max(sec.) - the maximum transformation runtime value"; print "\n#Total(sec.) - the cumulative of transformation runtime \n"; if ($extra) { for $dir (@ARGV) { print sprintf("\n#$dir\n#%-39s % 6s % 12s % 16s % 12s % 12s % 12s\n", "Transformation", "Count", "Mean", "Variance", "Min", "Max", "Total"); for $transformation (keys %{$samples{$dir}}) { $count = $samples{$dir}{$transformation}{'count'}; if ($count == 1) { $denom = 1; } else { $denom = $count - 1; } $out = sprintf("%-40s % 6d % 12.4f % 16.4f % 12.4f % 12.4f % 12.4f\n", $transformation, $count, $samples{$dir}{$transformation}{'mean'}, $samples{$dir}{$transformation}{'diff'} / $denom, $samples{$dir}{$transformation}{'min'}, $samples{$dir}{$transformation}{'max'}, $count * $samples{$dir}{$transformation}{'mean'}); print $out; } ++$submit_dir_count; } } # Read the element from the file and print the breakdown result if($file){ open CONFIG, "$file" or die "Unable to open file: $file . $! \n"; for () { chomp; s/\#.*//; s/^\s+//; s/\s+$//; next unless length; $dir = $_; # check if the element exists in the arguments passed through -x option if(!directory_exists(@ARGV,$dir)){ print sprintf("\n#$dir\n#%-39s % 6s % 12s % 16s % 12s % 12s % 12s\n", "Transformation", "Count", "Mean", "Variance", "Min", "Max", "Total"); for $transformation (keys %{$samples{$dir}}) { $count = $samples{$dir}{$transformation}{'count'}; if ($count == 1) { $denom = 1; } else { $denom = $count - 1; } $out = sprintf("%-40s % 6d % 12.4f % 16.4f % 12.4f % 12.4f % 12.4f\n", $transformation, $count, $samples{$dir}{$transformation}{'mean'}, $samples{$dir}{$transformation}{'diff'} / $denom, $samples{$dir}{$transformation}{'min'}, $samples{$dir}{$transformation}{'max'}, $count * $samples{$dir}{$transformation}{'mean'}); print $out; } ++$submit_dir_count; } } close CONFIG; } if ($submit_dir_count >1){ print sprintf("\n#All\n#%-39s % 6s % 12s % 16s % 12s % 12s % 12s\n", "Transformation", "Count", "Mean", "Variance", "Min", "Max", "Total"); for $transformation (keys %{$samples{$allKey}}) { $count = $samples{$allKey}{$transformation}{'count'}; if ($count == 1) { $denom = 1; } else { $denom = $count - 1; } $out = sprintf("%-40s % 6d % 12.4f % 16.4f % 12.4f % 12.4f % 12.4f\n", $transformation, $count, $samples{$allKey}{$transformation}{'mean'}, $samples{$allKey}{$transformation}{'diff'} / $denom, $samples{$allKey}{$transformation}{'min'}, $samples{$allKey}{$transformation}{'max'}, $count * $samples{$allKey}{$transformation}{'mean'}); print $out; } } if (defined($output)) { open STDOUT, '>&', $oldout or die "Can't dup \$oldout $!"; close $oldout; } pegasus-wms_4.0.1+dfsg/libexec/statistics/genstats0000755000175000017500000010003611757531137021417 0ustar ryngerynge#!/usr/bin/env perl use strict; use Getopt::Long; use List::Util qw[min max]; use File::Basename qw(dirname basename fileparse); use File::Path; use File::Copy; use File::Spec; use Switch; my %jobs = (); my %map = (); my %files = (); my $defaultFilenameLength = 40; my $defaultJobNameLength = 40; my $longJobNames = 0; my $longFilenames = 0; sub usage(;$) { my $ec = shift || 0; my $basename = basename($0,'.pl'); print << "EOF"; Usage: $basename --dag= --adax=|--jobstate-log= --output= --help Mandatory arguments: -d|--dag dagname name of the dag file to process -o|--output dir write outputs in given directory Complex arguments: -a|--adax use annotations from dax -l|--jobstate-log log jobstate log to process Optional arguments: -c|--condor pure condor run - no GRID_SUBMIT events -h|--help print this help message and exit -i|--input dir read inputs from given directory EOF exit($ec); } sub processTransfers { my $jobsRef = shift; my $filesRef = shift; my $job = shift; my $link = shift; my ($srcSite, $dstURL, $dstSite); my @files = (); if (open(INPUT, "$job.in")) { # List of transfers # Format: # Comment indicating src site # Source url # Comment indicating dest site # Dest url while ($srcSite = ) { chomp $srcSite; $srcSite =~ s/^#//; #ignore src url ; $dstSite = ; chomp $dstSite; $dstSite =~ s/^#//; $dstURL = ; chomp $dstURL; $dstURL =~ s/.*\///; push(@files, $dstURL); if (length($dstURL) > $defaultFilenameLength) { $longFilenames++; } #Save the source only for stage-in jobs if ($job !~ /stage_out_/) { my %fileParams = (); $filesRef->{$dstURL} = \%fileParams; $filesRef->{$dstURL}->{'src'} = $srcSite; } } close(INPUT); } $jobsRef->{$job}->{$link} = \@files; } sub processFiles { my $jobsRef = shift; my $filesRef = shift; my $job = shift; my $link = shift; my @files = (); if (open(INPUT, "$job.$link.lof")) { while (my $file = ) { chomp $file; push(@files, $file); if (length($file) > $defaultFilenameLength) { $longFilenames++; } my %fileParams = (); $filesRef->{$file} = \%fileParams; } close(INPUT); } $jobsRef->{$job}->{$link} = \@files; } sub processDAGManOutput { # # Read dagman.out # Create hash of jobs # Create parent -> child relationships # my $jobsRef = shift; my $mapRef = shift; my $filesRef = shift; my $dagName = shift; my $outputDir = shift; open(DAGIN, "$dagName") || die "$dagName not found\n"; open(DAGOUT, ">$outputDir/dag") || die "Could not open $outputDir/dag\n"; foreach my $line () { chomp($line); if ($line =~ '^JOB') { my @tokens = split(' ', $line); my $job = $tokens[1]; my %jobParams; $jobsRef->{$job} = \%jobParams; if ($job =~ /stage_in_/) { processTransfers($jobsRef, $filesRef, $job, 'in'); # We're not processing the outputs of # the transfer job. } else { processFiles($jobsRef, $filesRef, $job, 'in'); processFiles($jobsRef, $filesRef, $job, 'out'); } my @parents = (); $mapRef->{$job} = \@parents; if (length($job) > $defaultJobNameLength) { $longJobNames++; } print DAGOUT $line, "\n"; }elsif ($line =~ '^SUBDAG') { my @tokens = split(' ', $line); my $job = $tokens[2]; my %jobParams; $jobsRef->{$job} = \%jobParams; my @parents = (); $mapRef->{$job} = \@parents; if (length($job) > $defaultJobNameLength) { $longJobNames++; } print DAGOUT $line, "\n"; }elsif ($line =~ '^PARENT') { my @tokens = split(' ', $line); my $parent = $tokens[1]; my $child = $tokens[3]; push(@{$mapRef->{$child}}, $parent); print DAGOUT $line, "\n"; } } foreach my $job (keys %{$jobsRef}) { foreach my $file (@{$jobsRef->{$job}->{'in'}}) { print DAGOUT "FILE $file INPUT $job\n"; } foreach my $file (@{$jobsRef->{$job}->{'out'}}) { print DAGOUT "FILE $file OUTPUT $job\n"; } } close(DAGOUT); close(DAGIN); } sub processFileStats { # # Read file size information # Exitpost only # my $filesRef = shift; my @statfiles = glob "*.out*"; foreach my $file (@statfiles) { if (open(FILE, $file)) { my $filename; while (my $line = ) { chomp $line; if ($line =~ /.*<\/file>/) { $filename = $1; } elsif ($line =~ //\1/; if (defined $filesRef->{$filename}) { $filesRef->{$filename}->{'size'} = $line; } } } } } } sub processAnnotations { my $jobsRef = shift; my $filesRef = shift; my $adaxName = shift; open(INPUT, "<$adaxName") || die "$adaxName not found\n"; while (my $line = ) { chomp($line); my $job; if ($line =~ //) { $job = "$2_$1"; if (defined($jobsRef->{$job})) { $jobsRef->{$job}->{"ksruntime"} = $3; #$jobsRef->{$job}->{"postscripttime"} = $4; } else { warn "$job not known\n"; } my $siteInfo = `grep pegasus_site "$job.sub"`; chomp $siteInfo; $siteInfo =~ s/\+pegasus_site\s+=\s+\"([^\"]*)\"/\1/; $jobsRef->{$job}->{"site"} = $siteInfo; } elsif ($line =~ //) { my $filename=$1; if (defined($filesRef->{$filename})) { $filesRef->{$filename}->{"size"} = $3; } else { warn "$filename not known\n"; } } } close(INPUT); } sub processJobstateLog { # # Read jobstate.log # Note epochs for job submission, execute and termination # Also note site job was run on # Copy log with "relative" timestamps into OUTPUTDIR/log # # If this is a pure condor (glidein) run, there will not be # any GRID_SUBMIT events. In this case, calculate # condorDelay = EXECUTE - SUBMIT # Calculate condorQLen at the time of an EXECUTE event. # my $jobsRef = shift; my $mapRef = shift; my $jobstateLog = shift; my $outputDir = shift; my $condorRun = shift; open(JOBSTATE, "$jobstateLog") || die "$jobstateLog not found\n"; open(OUT, ">$outputDir/out") || die "Cannot open $outputDir/out\n"; my $condorQLength = 0; my $foundStart = 0; my $globalStartTime; foreach my $line () { if ($line =~ /INTERNAL/) { if ($line =~ /DAGMAN_([A-Z]*)/) { my @tokens = split(' ', $line); if ($1 eq "STARTED" && !$foundStart) { $foundStart = 1; chomp($line); $globalStartTime = $tokens[0]; } $tokens[0] -= $globalStartTime; print OUT join(' ', @tokens), "\n"; } else { if ($line =~ /DAGMAN_FINISHED ([0-9])+/) { if ($1 != 0) { warn "Workflow execution failed/restarted."; } } print OUT $line; } } else { chomp($line); my @tokens = split(' ', $line); my $job = $tokens[1]; my $event = $tokens[2]; if (defined $jobsRef->{$job}) { $tokens[0] -= $globalStartTime; # Some events need additional processing. switch($event) { case 'SUBMIT' { $jobsRef->{$job}->{'site'} = $tokens[4]; if ($jobsRef->{$job}->{'site'} ne "local") { $condorQLength++; } } case 'EXECUTE' { if (defined($jobs{$job}{'EXECUTE'})) { # For whatever reason, this job was executed twice. # This run may not be good. warn "$job was executed more than once.\n"; } else{ if ($condorRun && $jobsRef->{$job}->{'site'} ne "local") { $jobsRef->{$job}->{'condorQLength'} = $condorQLength; $condorQLength--; } } } case 'GRID_SUBMIT' { # We won't have GRID_SUBMIT events if condorRun is false. # This is made explicit. # Also, if we have GRID_SUBMIT, site won't be local. # This is also mentioned explicitly for readability. if (!$condorRun && $jobsRef->{$job}->{'site'} ne "local") { $jobsRef->{$job}->{'condorQLength'} = $condorQLength; $condorQLength--; } } } $jobsRef->{$job}->{$event} = $tokens[0]; #cumulative run time if($event eq 'JOB_TERMINATED'){ if (defined ($jobsRef->{$job}->{'EXECUTE'}) or defined($jobsRef->{$job}->{'SUBMIT'}) ) { my $exec_runtime_start = (defined ($jobsRef->{$job}->{'EXECUTE'}))? $jobsRef->{$job}->{'EXECUTE'}:$jobsRef->{$job}->{'SUBMIT'}; if(defined($jobsRef->{$job}->{'cumulative_runtime'})){ if(!($jobsRef->{$job}->{'cumulative_runtime'} eq '-')){ $jobsRef->{$job}->{'cumulative_runtime'} += $jobsRef->{$job}->{'JOB_TERMINATED'} - $exec_runtime_start; } }else{ $jobsRef->{$job}->{'cumulative_runtime'} = $jobsRef->{$job}->{'JOB_TERMINATED'} - $exec_runtime_start; } } else { # Mismatch between the tags. warn "Unable to calculate runtime for job '$job'.Absence or mismatch of events JOB_TERMINATED,EXECUTE.\n"; $jobsRef->{$job}->{'cumulative_runtime'} = '-'; } } #cumulative post script time if ( $event eq 'POST_SCRIPT_TERMINATED') { if ( defined($jobsRef->{$job}->{'POST_SCRIPT_STARTED'}) or defined($jobsRef->{$job}->{'JOB_TERMINATED'})) { my $post_start = (defined($jobsRef->{$job}->{'POST_SCRIPT_STARTED'}) )? $jobsRef->{$job}->{'POST_SCRIPT_STARTED'}:$jobsRef->{$job}->{'JOB_TERMINATED'}; if (defined($jobsRef->{$job}->{'cumulative_postscripttime'})){ if(!($jobsRef->{$job}->{'cumulative_postscripttime'} eq '-')){ $jobsRef->{$job}->{'cumulative_postscripttime'} += $jobsRef->{$job}->{'POST_SCRIPT_TERMINATED'} - $post_start; } }else{ $jobsRef->{$job}->{'cumulative_postscripttime'} = $jobsRef->{$job}->{'POST_SCRIPT_TERMINATED'} - $post_start; } }else { # Mismatch between the tags. warn "Unable to calculate post script time for job '$job'.Absence or mismatch of events POST_SCRIPT_TERMINATED ,POST_SCRIPT_STARTED/JOB_TERMINATED.\n"; $jobsRef->{$job}->{'cumulative_postscripttime'} = '-'; } } # cumulative resource delay and dagman delay #Find with GRID_SUBMIT if(!$condorRun){ if ( $event eq 'GRID_SUBMIT') { if(defined ( $jobsRef->{$job}->{'SUBMIT'}) ){ if (defined($jobsRef->{$job}->{'cumulative_condorDelay'})){ if(!($jobsRef->{$job}->{'cumulative_condorDelay'} eq '-')){ $jobsRef->{$job}->{'cumulative_condorDelay'} += $jobsRef->{$job}->{'GRID_SUBMIT'} - $jobsRef->{$job}->{'SUBMIT'}; } }else{ $jobsRef->{$job}->{'cumulative_condorDelay'} = $jobsRef->{$job}->{'GRID_SUBMIT'} - $jobsRef->{$job}->{'SUBMIT'}; } }else{ # Mismatch between the tags. warn "Calculate of condor delay may fail for job '$job'.Absence or mismatch of events GRID_SUBMIT , SUBMIT.\n"; $jobsRef->{$job}->{'cumulative_condorDelay'} ='-'; } } } #Find with GLOBUS_SUBMIT if(!$condorRun){ if ( $event eq 'GLOBUS_SUBMIT') { if(defined ( $jobsRef->{$job}->{'SUBMIT'}) ){ if (defined($jobsRef->{$job}->{'cumulative_condorDelay_globus'})){ if(!($jobsRef->{$job}->{'cumulative_condorDelay_globus'} eq '-')){ $jobsRef->{$job}->{'cumulative_condorDelay_globus'} += $jobsRef->{$job}->{'GLOBUS_SUBMIT'} - $jobsRef->{$job}->{'SUBMIT'}; } }else{ $jobsRef->{$job}->{'cumulative_condorDelay_globus'} =$jobsRef->{$job}->{'GLOBUS_SUBMIT'} - $jobsRef->{$job}->{'SUBMIT'}; } }else{ # Mismatch between the tags. warn "Calculate of condor delay may fail for job '$job'.Absence or mismatch of events GLOBUS_SUBMIT , SUBMIT.\n"; $jobsRef->{$job}->{'cumulative_condorDelay_globus'} ='-'; } } } if ($event eq 'EXECUTE'){ if ( defined($jobsRef->{$job}->{'GRID_SUBMIT'}) or defined($jobsRef->{$job}->{'GLOBUS_SUBMIT'})) { my $remote_submit_start = (defined($jobsRef->{$job}->{'GRID_SUBMIT'}) )? $jobsRef->{$job}->{'GRID_SUBMIT'}:$jobsRef->{$job}->{'GLOBUS_SUBMIT'}; if (defined($jobsRef->{$job}->{'cumulative_resourceDelay'})){ if(!($jobsRef->{$job}->{'cumulative_resourceDelay'} eq '-')){ $jobsRef->{$job}->{'cumulative_resourceDelay'} += $jobsRef->{$job}->{'EXECUTE'} - $remote_submit_start; } }else{ $jobsRef->{$job}->{'cumulative_resourceDelay'} = $jobsRef->{$job}->{'EXECUTE'} - $remote_submit_start; } }else{ # Mismatch between the tags. warn "Unable to calculate resource delay for job '$job' .Absence or mismatch of events GRID_SUBMIT/ GLOBUS_SUBMIT , EXECUTE.\n"; $jobsRef->{$job}->{'cumulative_resourceDelay'} ='-'; } if($condorRun){ if(defined ( $jobsRef->{$job}->{'SUBMIT'}) ){ if (defined($jobsRef->{$job}->{'cumulative_condorDelay'})){ if(!($jobsRef->{$job}->{'cumulative_condorDelay'} eq '-')){ $jobsRef->{$job}->{'cumulative_condorDelay'} += $jobsRef->{$job}->{'EXECUTE'} -$jobsRef->{$job}->{'SUBMIT'}; } }else{ $jobsRef->{$job}->{'cumulative_condorDelay'} = $jobsRef->{$job}->{'EXECUTE'} -$jobsRef->{$job}->{'SUBMIT'}; } }else{ # Mismatch between the tags. warn "Unable to calculate condor delay for job '$job'.Absence or mismatch of events EXECUTE , SUBMIT.\n"; $jobsRef->{$job}->{'cumulative_condorDelay'} ='-'; } } } } print OUT join(' ', @tokens), "\n"; } } close(OUT); close(JOBSTATE); # # Calculate runtimes for each job # foreach my $job (keys %{$jobsRef}) { if (defined $jobsRef->{$job}->{'JOB_TERMINATED'}) { if (defined $jobsRef->{$job}->{'EXECUTE'}) { $jobsRef->{$job}->{'runtime'} = $jobsRef->{$job}->{'JOB_TERMINATED'} - $jobsRef->{$job}->{'EXECUTE'}; } else { $jobsRef->{$job}->{'runtime'} = $jobsRef->{$job}->{'JOB_TERMINATED'} - $jobsRef->{$job}->{'SUBMIT'}; } }else{ $jobsRef->{$job}->{'runtime'} =0; } # kickstart time is calculated from the kickstart file generated.kickstart time is set to zero if there is no associated kick start file $jobsRef->{$job}->{'ksruntime'} = 0; my @grepLines = `grep -h '&1`; foreach my $line (@grepLines) { chomp($line); my $i1 = index($line, "duration=\""); $line = substr($line, $i1 + 10); my $i2 = index($line, "\""); $line= substr($line, 0, $i2); if ($line =~ /^[\.0-9]*$/) { $jobsRef->{$job}->{'ksruntime'} += $line; if(defined ($jobsRef->{$job}->{'cumulative_ksruntime'})){ $jobsRef->{$job}->{'cumulative_ksruntime'} += $line; }else{ $jobsRef->{$job}->{'cumulative_ksruntime'} = $line; } } } if ($jobsRef->{$job}->{'site'} eq 'local' && $job !~ /register_/ ) { $jobsRef->{$job}->{'seqexec'} = "-"; $jobsRef->{$job}->{'seqexec-delay'} = "-"; $jobsRef->{$job}->{'cumulative_seqexec'} = "-"; $jobsRef->{$job}->{'cumulative_seqexec-delay'} = "-"; } #elsif ($job =~ /stage_in/) { # $jobsRef->{$job}->{'ksruntime'} = 0; #} else { #for clustered jobs try to get seqexec progress at end of .out if( $job =~ /merge_/ ){ my $tail = `tail -n 1 $job.out* 2>&1`; chomp($tail); if( index($tail, "[struct") == 0 ){ my $i1 = index($tail, "duration="); $tail = substr($tail, $i1 + 9); my $i2 = index($tail, ","); $tail=substr( $tail, 0, $i2); if ($tail =~ /^[\.0-9]*$/) { $jobsRef->{$job}->{'seqexec'} = $tail; $jobsRef->{$job}->{'seqexec-delay'} = sprintf( "%.2f", $tail - $jobsRef->{$job}->{'ksruntime'}); } else{ #some problem with the parsing of .out files $jobsRef->{$job}->{'seqexec'} = "-"; $jobsRef->{$job}->{'seqexec-delay'} = "-"; } } my @clusterLines = `grep -h '.*struct.*duration=.*' $job.out* 2>&1`; foreach my $clusterLine (@clusterLines) { chomp($clusterLine); if( index($clusterLine, "[struct") == 0 ){ my $i1 = index($clusterLine, "duration="); $clusterLine = substr($clusterLine, $i1 + 9); my $i2 = index($clusterLine, ","); $clusterLine=substr( $clusterLine, 0, $i2); if ($clusterLine =~ /^[\.0-9]*$/) { if(defined ($jobsRef->{$job}->{'cumulative_seqexec'})){ if(!($jobsRef->{$job}->{'cumulative_seqexec'} eq '-')){ $jobsRef->{$job}->{'cumulative_seqexec'} += sprintf( "%.2f",$clusterLine); } }else{ $jobsRef->{$job}->{'cumulative_seqexec'} = sprintf( "%.2f",$clusterLine); } } else{ #some problem with the parsing of .out files $jobsRef->{$job}->{'cumulative_seqexec'} = "-"; } } } # Find the seqexec-delay after taking the cumulative seqexec time if(defined ($jobsRef->{$job}->{'cumulative_seqexec'}) and defined ($jobsRef->{$job}->{'cumulative_ksruntime'})){ $jobsRef->{$job}->{'cumulative_seqexec-delay'} = sprintf( "%.2f",$jobsRef->{$job}->{'cumulative_seqexec'} - $jobsRef->{$job}->{'cumulative_ksruntime'}); }else{ $jobsRef->{$job}->{'cumulative_seqexec-delay'} = "-"; } }else{ $jobsRef->{$job}->{'seqexec'} = "-"; $jobsRef->{$job}->{'seqexec-delay'} = "-"; $jobsRef->{$job}->{'cumulative_seqexec'} = "-"; $jobsRef->{$job}->{'cumulative_seqexec-delay'} = "-"; } } if (defined $jobsRef->{$job}->{'POST_SCRIPT_TERMINATED'}) { if (!defined $jobsRef->{$job}->{'POST_SCRIPT_STARTED'}) { $jobsRef->{$job}->{'POST_SCRIPT_STARTED'} = $jobsRef->{$job}->{'JOB_TERMINATED'}; } $jobsRef->{$job}->{'postscripttime'} = $jobsRef->{$job}->{'POST_SCRIPT_TERMINATED'} - $jobsRef->{$job}->{'POST_SCRIPT_STARTED'}; } else { $jobsRef->{$job}->{'postscripttime'} = 0; } } # # Calculate dagmanDelays and condor Delays # foreach my $child (keys %{$jobsRef}) { my $maxParentEnd = 0; foreach my $parent (@{$mapRef->{$child}}) { my $parentEnd = $jobsRef->{$parent}->{'JOB_TERMINATED'}; if (defined $jobsRef->{$parent}->{'POST_SCRIPT_TERMINATED'}) { $parentEnd = $jobsRef->{$parent}->{'POST_SCRIPT_TERMINATED'}; } if ($parentEnd > $maxParentEnd) { $maxParentEnd = $parentEnd; } } my $dagmanDelay = 0; if(defined $jobsRef->{$child}->{'SUBMIT'}){ $dagmanDelay = $jobsRef->{$child}->{'SUBMIT'} - $maxParentEnd; # dagman delay is not cumulative only name is to match the other values $jobsRef->{$child}->{'cumulative_dagmanDelay'} = $jobsRef->{$child}->{'SUBMIT'} - $maxParentEnd; } my ($condorDelay, $resourceDelay); if (defined $jobsRef->{$child}->{'GRID_SUBMIT'}) { $condorDelay = $jobsRef->{$child}->{'GRID_SUBMIT'} - $jobsRef->{$child}->{'SUBMIT'}; $resourceDelay = $jobsRef->{$child}->{'EXECUTE'} - $jobsRef->{$child}->{'GRID_SUBMIT'}; } elsif ($condorRun) { if (defined $jobsRef->{$child}->{'EXECUTE'}) { # In some cases (pegasus_concat) under glidein (atleast), # there is no EXECUTE event. $condorDelay = $jobsRef->{$child}->{'EXECUTE'} - $jobsRef->{$child}->{'SUBMIT'}; } else { $condorDelay = 0; } $resourceDelay = 0; } else { $condorDelay = 0; $resourceDelay = 0; } $jobsRef->{$child}->{'dagmanDelay'} = $dagmanDelay; $jobsRef->{$child}->{'condorDelay'} = $condorDelay; $jobsRef->{$child}->{'resourceDelay'} = $resourceDelay; # # If condorQLength is not known, set it to 0. # if (!defined($jobsRef->{$child}->{'condorQLength'})) { $jobsRef->{$child}->{'condorQLength'} = 0; } } } sub writeJobs { my $jobsRef = shift; my $outputDir = shift; my $jobsFile = "$outputDir/jobs"; open(JOBS, ">$jobsFile") || die "Could not open $jobsFile\n"; #print JOBS "#Legends \n"; #print JOBS "#Job - the name of the job \n"; #print JOBS "#Site - the site where the job ran \n"; #print JOBS "#Kickstart(sec.) - the actual duration of the job in seconds on the remote compute node \n"; #print JOBS "#Post(sec.) - the postscript time as reported by DAGMan \n"; #print JOBS "#DAGMan(sec.) - the time between the last parent job of a job completes and the job gets submitted \n"; #print JOBS "#CondorQTime(sec.) - the time between submission by DAGMan and the remote Grid submission. It is an estimate of the time spent in the condor q on the submit node \n"; #print JOBS "#Resource(sec.) - the time between the remote Grid submission and start of remote execution . It is an estimate of the time job spent in the remote queue \n"; #print JOBS "#Runtime(sec.) - the time spent on the resource as seen by Condor DAGMan . Is always >=kickstart \n"; #print JOBS "#CondorQLen - the number of outstanding jobs in the queue when this job was released \n"; #print JOBS "#Seqexec(sec.) - the time taken for the completion of a clustered job\n"; #print JOBS "#Seqexec-Delay(sec.) - the time difference between the time for the completion of a clustered job and sum of all the individual tasks kickstart time\n\n"; if ($longJobNames) { print JOBS sprintf("#%-79s %20s %8s %8s %8s %13s %8s %8s %7s %13s\n", "Job", "Site", "Kickstart", "Post", "DAGMan", "CondorQTime", "Res", "Runtime", "Seqexec", "Seqexec-Delay"); } else { print JOBS sprintf("#%-39s %20s %8s %8s %8s %13s %8s %8s %7s %13s\n", "Job", "Site", "Kickstart", "Post", "DAGMan", "CondorQTime", "Resource", "Runtime", "Seqexec", "Seqexec-Delay"); } foreach my $job (sort keys %{$jobsRef}) { #Setting default values if (!defined($jobsRef->{$job}->{"site"})) { $jobsRef->{$job}->{"site"} = "local"; } $jobsRef->{$job}->{'cumulative_ksruntime'} = defined($jobsRef->{$job}->{'cumulative_ksruntime'} )?$jobsRef->{$job}->{'cumulative_ksruntime'} :'-'; if (!($jobsRef->{$job}->{'cumulative_ksruntime'} eq '-')){ $jobsRef->{$job}->{'cumulative_ksruntime'} = sprintf( "%.2f",$jobsRef->{$job}->{'cumulative_ksruntime'} ); } $jobsRef->{$job}->{'cumulative_postscripttime'} = defined($jobsRef->{$job}->{'cumulative_postscripttime'} )?$jobsRef->{$job}->{'cumulative_postscripttime'} :'-'; if(!defined($jobsRef->{$job}->{'cumulative_condorDelay'}) or $jobsRef->{$job}->{'cumulative_condorDelay'} eq '-'){ if (defined($jobsRef->{$job}->{'cumulative_condorDelay_globus'})){ $jobsRef->{$job}->{'cumulative_condorDelay'} = $jobsRef->{$job}->{'cumulative_condorDelay_globus'}; } } $jobsRef->{$job}->{'cumulative_dagmanDelay'} = defined($jobsRef->{$job}->{'cumulative_dagmanDelay'})?$jobsRef->{$job}->{'cumulative_dagmanDelay'}:'-'; $jobsRef->{$job}->{'cumulative_condorDelay'} = defined($jobsRef->{$job}->{'cumulative_condorDelay'})?$jobsRef->{$job}->{'cumulative_condorDelay'}:'-'; $jobsRef->{$job}->{'cumulative_resourceDelay'} = defined($jobsRef->{$job}->{'cumulative_resourceDelay'})?$jobsRef->{$job}->{'cumulative_resourceDelay'}:'-'; $jobsRef->{$job}->{'cumulative_runtime'} = defined($jobsRef->{$job}->{'cumulative_runtime'})?$jobsRef->{$job}->{'cumulative_runtime'}:'-'; if ($longJobNames) { print JOBS sprintf("%-80s %20s %8s %8s %8s %13s %8s %8s %7s %13s\n", $job, $jobsRef->{$job}->{'site'}, $jobsRef->{$job}->{'cumulative_ksruntime'}, $jobsRef->{$job}->{'cumulative_postscripttime'}, $jobsRef->{$job}->{'cumulative_dagmanDelay'}, $jobsRef->{$job}->{'cumulative_condorDelay'}, $jobsRef->{$job}->{'cumulative_resourceDelay'}, $jobsRef->{$job}->{'cumulative_runtime'}, $jobsRef->{$job}->{'cumulative_seqexec'}, $jobsRef->{$job}->{'cumulative_seqexec-delay'}); } else { print JOBS sprintf("%-40s %20s %8s %8s %8s %13s %8s %8s %7s %13s\n", $job, $jobsRef->{$job}->{'site'}, $jobsRef->{$job}->{'cumulative_ksruntime'}, $jobsRef->{$job}->{'cumulative_postscripttime'}, $jobsRef->{$job}->{'cumulative_dagmanDelay'}, $jobsRef->{$job}->{'cumulative_condorDelay'}, $jobsRef->{$job}->{'cumulative_resourceDelay'}, $jobsRef->{$job}->{'cumulative_runtime'}, $jobsRef->{$job}->{'cumulative_seqexec'},$jobsRef->{$job}->{'cumulative_seqexec-delay'}); } } close(JOBS); } sub writeFiles { # # Write out files in OUTPUTDIR/files # my $filesRef = shift; my $outputDir = shift; my $filesFile = "$outputDir/files"; open(FILES, ">$filesFile") || die "Could not open $filesFile\n"; if ($longFilenames) { print FILES sprintf("#%-79s %12s %20s\n", "Filename", "Size", "Source"); } else { print FILES sprintf("#%-39s % 12s %20s\n", "Filename", "Size", "Source"); } foreach my $fileName (sort keys %{$filesRef}) { if ($longFilenames) { print FILES sprintf("%-80s % 12d %20s\n", $fileName, $filesRef->{$fileName}->{'size'}, $filesRef->{$fileName}->{'src'}); } else { print FILES sprintf("%-40s % 12d %20s\n", $fileName, $filesRef->{$fileName}->{'size'}, $filesRef->{$fileName}->{'src'}); } } close(FILES); } sub writeDAX { my $jobsRef = shift; my $mapRef = shift; my $filesRef = shift; my $outputDir = shift; my $daxFile = "$outputDir/dax"; open(DAX, ">$daxFile") || die "Could not open $daxFile\n"; print DAX << "HERE"; HERE foreach my $file (sort keys %{$filesRef}) { if (defined $filesRef->{$file}->{'used'}) { print DAX "\t{$file}->{'size'}\""; if (defined $filesRef->{$file}->{'src'}) { print DAX " src=\"$filesRef->{$file}->{'src'}\""; } print DAX "/>\n"; } } foreach my $job (sort keys %{$jobsRef}) { my $runtime = $jobsRef->{$job}->{'runtime'}; my $ksruntime = $jobsRef->{$job}->{'ksruntime'}; my $postscripttime = $jobsRef->{$job}->{'postscripttime'}; my $dagmanDelay = $jobsRef->{$job}->{'dagmanDelay'}; my $condorDelay = $jobsRef->{$job}->{'condorDelay'}; my $resourceDelay = $jobsRef->{$job}->{'resourceDelay'}; my $site = $jobsRef->{$job}->{'site'}; my $condorQLength = $jobsRef->{$job}->{'condorQLength'}; print DAX "\t\n"; foreach my $file (@{$jobsRef->{$job}->{'in'}}) { print DAX "\t\t\n"; } foreach my $file (@{$jobsRef->{$job}->{'out'}}) { print DAX "\t\t\n"; } print DAX "\t\n"; } foreach my $child (sort keys %{$jobsRef}) { if (scalar @{$mapRef->{$child}} > 0) { print DAX "\t\n"; foreach my $parent (@{$mapRef->{$child}}) { print DAX "\t\t\n"; } print DAX "\t\n"; } } print DAX "\n"; close(DAX); } my ($adaxName, $condorRun, $dagName, $inputDir, $jobstateLog, $outputDir); my $result = GetOptions( "adax|a=s" => \$adaxName, "condor|c" => sub { $condorRun++; }, "dag|d=s" => \$dagName, "help|h" => \&usage, "input|i=s" => \$inputDir, "jobstate-log|l=s" => \$jobstateLog, "output|o=s" => \$outputDir, ); if (!defined($dagName) || !defined($outputDir)) { usage(); } die "only one of --adax or --jobstate-log can be specified\n" if (defined($adaxName) && defined($jobstateLog)); $outputDir = File::Spec->rel2abs($outputDir); if (! -d $outputDir) { eval {mkpath($outputDir), {verbose => 1}}; if ($@) { die "Could not create $outputDir\n"; } } if (defined $jobstateLog) { $jobstateLog =File::Spec->rel2abs($jobstateLog); } $dagName= File::Spec->rel2abs($dagName); if (defined($inputDir)) { $inputDir= File::Spec->rel2abs($inputDir); chdir $inputDir || die "Cannot chdir to $inputDir\n"; }else{ my ($dagbase,$submit_dir ,$type ) = fileparse( $dagName ,qr{\..*}); chdir $submit_dir || die "Cannot chdir to $submit_dir\n"; } processDAGManOutput(\%jobs,\%map,\%files, $dagName, $outputDir); processFileStats(\%files); if (defined $jobstateLog) { processJobstateLog(\%jobs, \%map, $jobstateLog, $outputDir, $condorRun); } elsif (defined $adaxName) { processAnnotations(\%jobs, \%files, $adaxName); } # # Write out various files. # writeJobs(\%jobs, $outputDir); writeFiles(\%files, $outputDir); # #Write out dax in OUTPUTDIR/dax # writeDAX(\%jobs, \%map, \%files, $outputDir); #Copy info.txt if it exists copy("info.txt", "$outputDir/info.txt"); pegasus-wms_4.0.1+dfsg/libexec/groff.sh0000755000175000017500000000047211757531137017114 0ustar ryngerynge#!/bin/sh # # takes a file input and a file output # echo "calling groff $1 $2" # >> /dev/tty case "$2" in *.ps) exec groff -mandoc $1 > $2 ;; *.txt) exec groff -mandoc -Tlatin1 $1 > $2 ;; *.html) exec groff -mandoc -Thtml $1 > $2 ;; *) echo "Illegal usage of script" 1>&2 exit 1 ;; esac pegasus-wms_4.0.1+dfsg/libexec/docbook-sample-props0000755000175000017500000002504711757531137021445 0ustar ryngerynge#!/usr/bin/env perl # # Parses $PEGASUS_HOME/etc/basic.properties and generated docbook from it # $Id$ # # Usage: docbook-sample-props [basic.properties] > properties.xml # # The following markup is required to be adhered to: # # 1. Any new section starts like this: # # # # # SECTION "ALL CAPS TITLE IN QUOTES" # # # # Three comment-marked lines, the reserved word SECTION (all caps), # and the section's title inside quotes. Use all caps for the title. # It will be word-wise de-capped into the LaTeX document. # # # # # SUBSECTION "ALL CAPS SUBTITLE IN QUOTES" # # # # See above, except that a sub-section will be generated. # # 2. Item format # # Any item starts out with the key "Property: some.prop", followed # by other such tabular keywords, colon, and their respective values. # The following keywords are approved, parsed, and may have special # meaning: # # Systems the subsystem this applies to, e.g. Pegasus, Chimera # Type a symbolic description of the type the prop can have # Value[X] For enumeration types, the possible values 1 <= X <= N # Default value assumed in absence of property # See also Cross reference to other props, repeat as necessary! # Example Sample for complex properties # Moved to Cross references the new name of a property (deprecated) # New Name Cross references the new name of a property (preferred) # # After the initial table comes the description. Please use paragraphs # (empty comment lines) as necessary. # # The final section in any item is always the property followed by its # default value in a paragraph of its own. It is followed by an empty (!) # line without comment. This final section is skipped in the output. # # 3. The following markup is permitted inside sample.properties: # # as is verbatim # visual markup # numbered list # bullet list # definitions # 2-column table & # 3-column table & & # # use 5.006; use strict; use File::Spec; use File::Temp qw(tempfile); use POSIX qw(strftime); die "Set your PEGASUS_HOME environment variable\n" unless exists $ENV{'PEGASUS_HOME'}; my $fn = shift || File::Spec->catfile( $ENV{'PEGASUS_HOME'}, 'etc', 'sample.properties' ); my %replace = ( 'tb2' => "\n" , '/tb2' => "\n" , 'tb3' => "\n" , '/tb3' => "\n" , '/te' => "\n" ); # we only need to escape the basic three, ignoring quot and apos my %escape = ( '&' => '&', '<' => '<', '>' => '>' ); my $rx_escape = '([' . join('',keys %escape) . '])'; # but on the reverse track, we need to do the right thing. my %unescape = ( 'amp' => '&', 'lt' => '<', 'gt' => '>', 'quot' => '"', 'apos' => "'" ); my $rx_unescape = '&(' . join('|',keys %unescape) . ');'; sub lookup($) { my $key = shift; exists $replace{$key} ? $replace{$key} : "<$key>"; } sub escape($) { # purpose: XML escape an arbitrary sentence (imperfect) # paramtr: $x (IN): some raw string # returns: cooked string local $_ = shift; # are we inside a screen? $main::inpre=1 if m{}; $main::inpre=0 if m{}; # replace only known items with Docbook formatting. # unknown "TAG" strings are copied verbatim. s/<([^>]+)>/lookup($1)/eg; s{&}{}g; # special escape for 1-line verbatims -- only do once per line #s{
(.*?)
}{$1}; # done $_; } sub trim($) { # purpose: Trims redundant whitespace from head and tail of a string # paramtr: $x (IN): some raw string # returns: trimmed string, may be identical to argument, or even empty. local $_ = shift; s/^\s+//; s/\s+$//; $_; } my $__flag = 0; sub regular($) { my $p = shift; if ( length($p) > 1 ) { $__flag = 0; print "$p\n"; } else { print "\n\n" unless ( $__flag || $main::inpre ); $__flag = 1; } } # # --- main --- # my $tmp = $ENV{'MY_TMP'} || # Wei likes MY_TMP, so try that first $ENV{TMP} || # standard $ENV{TEMP} || # windows standard $ENV{TMPDIR} || # also somewhat used File::Spec->tmpdir() || # OK, this gets used if all above fail '/tmp'; # last resort my ($tmpfh,$tmpfn) = tempfile( 'tmp-XXXXXX', DIR => $tmp, UNLINK => 1 ); die "FATAL: Unable to create files in $tmp\n" unless defined $tmpfh; warn "# toc into $tmpfn\n"; # open file (and fail) before printing docbook preambles $/ = ''; # activate paragraph mode open( SP, '<' . $fn ) || die "open $fn: $!\n"; # # print DOCBOOK preamble -- do not interpolate anything here # my $now = POSIX::strftime("%Y-%m-%d %H:%M", localtime()); print << "EOF"; EOF select $tmpfh; # write all regular prints to tmpfh my %index = (); my (@line,%line,$k,$v,$i); my $depth = 0; my @count = ( 0, 0, 0 ); # section, subsection, subsubsection my $idbase = ""; while ( ) { if ( /^\# Property : (.*)/ ) { my $title = $1; my $secmark = undef; if ( $depth == 2 ) { $count[2]++; $secmark = join('.',@count[0,1,2]); print "\n\n\n
\n"; print " $title\n"; print "\n"; } else { $count[1]++; $count[2] = 0; $secmark = join('.',@count[0,1]); print "\n\n\n
\n"; print " $title\n"; print "\n"; } $index{$title} = defined $secmark ? $secmark : 42; print "\n"; # break paragraph into lines, and remove comment+space @line = map { trim($_) } map { substr($_,2) } split /[\r\n]/; # rip first section %line = (); print "\n"; print "\n"; print "\n"; for ( $i=0; $i<@line; ++$i ) { # exit at first empty line last if length($line[$i]) < 1; ($k,$v) = map { escape($_) } split( /\s+:\s+/, $line[$i], 2 ); $line{$k} = $v; if ( $k eq 'Property' ) { # ignore - this is already the subsection } elsif ( $k =~ /moved?\s*to/i || $k =~ /see\s*also/i || $k =~ /new\s*name/i ) { # generate cross reference $k = "New name" if $k =~ /moved?\s*to/i; print "$k:"; print "$v\n"; } else { # default action print "$k:"; print "$v\n"; } } print "\n"; print "\n"; print "\n"; print "\n\n"; # print everything but last paragraph print ""; for ( ; $i < @line - 2; ++$i ) { regular( escape($line[$i]) ); } print "\n\n"; print "\n
\n"; } elsif ( /\# (SECTION|SUBSECTION|TITLE)/ ) { @line = map { trim($_) } map { substr($_,2) } split /[\r\n]/; my $flag = undef; for ( $i=0; $i<@line; ++$i ) { last if ( length($line[$i]) <= 1 && $flag ); if ( $line[$i] =~ /^SECTION "([^\"]+)"/ ) { if ($depth >= 2) { print "\n
\n"; } if ($depth >= 1) { print "\n\n"; } my @title = map { ucfirst lc } split /\s+/,$1; $count[0]++; $count[1] = $count[2] = 0; my $title = join('',@title); print "\n\n\n
\n"; print " @title\n\n"; $flag = 1; $depth = 1; } elsif ( $line[$i] =~ /^SUBSECTION "([^\"]+)"/ ) { if ($depth >= 2) { print "\n
\n"; } my @title = map { ucfirst lc } split /\s+/,$1; my $title = join('',@title); $count[1]++; $count[2] = 0; my $mark = join('.',@count[0,1]); print "\n\n\n
\n"; print " @title\n\n"; $flag = 1; $depth = 2; } elsif ( $line[$i] =~ /^TITLE "([^\"]+)"/ ) { my @title = map { ucfirst lc } split /\s+/,$1; my $title = join('',@title); print "
\n"; print " @title\n\n"; $idbase = $title; # used for unique ids across the book $flag = 1; $depth = 0; } } print ""; for ( ; $i<@line; ++$i ) { regular( escape($line[$i]) ); } print "\n\n"; } elsif ( /^\#/ ) { warn "Warning: $.: Found regular textual paragraph, copying\n"; warn "<<<$_>>>\n"; print ""; @line = map { trim($_) } map { substr($_,2) } split /[\r\n]/; for ( $i=0; $i<@line; ++$i ) { regular( escape($line[$i]) ); } print "\n\n"; } else { warn "Warning: $.: Found uncommented paragraph, ignoring\n"; warn "<<<$_>>>\n"; } } close SP; select STDOUT; $/="\n"; seek( $tmpfh, 0, 0 ) || die "FATAL: seek $tmpfn: $!\n"; while ( <$tmpfh> ) { print STDOUT $_ ; } close $tmpfh; unlink $tmpfn if -e $tmpfn; if ($depth >= 2) { print "\n
\n"; } if ($depth >= 1) { print "\n
\n"; } # # print Docbook post-amble # print << 'EOF'; EOF pegasus-wms_4.0.1+dfsg/share/0000755000175000017500000000000011757531667015146 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/share/man/0000755000175000017500000000000011757531667015721 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/share/man/man1/0000755000175000017500000000000011757531667016555 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-kickstart.10000644000175000017500000007162111757531137022302 0ustar ryngerynge'\" t .\" Title: pegasus-kickstart .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-KICKSTART" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-kickstart \- run an executable in a more universal environment\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-kickstart\fR [\fB\-n\fR \fItr\fR] [\fB\-N\fR \fIdv\fR] [\fB\-H\fR] [\fB\-R\fR \fIsite\fR] [\fB\-W\fR | \fB\-w\fR \fIdir\fR] [\fB\-L\fR \fIlbl\fR \fB\-T\fR \fIiso\fR] [\fB\-s\fR \fIp\fR | \fI@fn\fR] [\fB\-S\fR \fIp\fR | \fI@fn\fR] [\fB\-i\fR \fIfn\fR] [\fB\-o\fR \fIfn\fR] [\fB\-e\fR \fIfn\fR] [\fB\-X\fR] [\fB\-l\fR \fIfn\fR \fIsz\fR] [\fB\-F\fR] (\fB\-I\fR \fIfn\fR | \fIapp\fR [\fIappflags\fR]) \fBpegasus\-kickstart\fR \fB\-V\fR .fi .SH "DESCRIPTION" .sp The \fBpegasus\-kickstart\fR executable is a light wrapper program which connects the \fIstdin\fR, \fIstdout\fR and \fIstderr\fR file handles for grid jobs on the remote site, and reports back the remote application termination condition\&. .sp Sitting in between the remote scheduler and the executable, it is possible for \fBpegasus\-kickstart\fR to gather additional information about the executable run\-time behavior and resource usage, including the exit status of jobs\&. This information is important for the Pegasus invocation tracking as well as to Condor DAGMan\(cqs awareness of Globus job failures\&. .sp \fBpegasus\-kickstart\fR allows the optional execution of jobs before and after the main application job that run in chained execution with the main application job\&. See section \fBSUBJOBS\fR for details about this feature\&. .sp All jobs with relative path specifications to the application are part of search relative to the current working directory (yes, this is unsafe), and by prepending each component from the \fIPATH\fR environment variable\&. The first match is used\&. Jobs that use absolute pathnames, starting in a slash, are exempt\&. Using an absolute path to your executable is the safe and recommended option\&. .sp \fBpegasus\-kickstart\fR rewrites the command line of any job (pre, post and main) with variable substitutions from Unix environment variables\&. See section \fBVARIABLE REWRITING\fR below for details on this feature\&. .sp \fBpegasus\-kickstart\fR provides a temporary named pipe (fifo) for applications that are gridstart\-aware\&. Any data an application writes to the FIFO will be propagated back to the submit host, thus enabling progress meters and other application dependent monitoring\&. See section \fBFEEDBACK CHANNEL\fR below for details on this feature\&. .SH "OPTIONS" .PP \fB\-n\fR \fItr\fR .RS 4 In order to associate the minimal performance information of the job with the invocation records, the jobs needs to carry which \fItransformation\fR was responsible for producing it\&. The format is the textual notation for fully\-qualified definition names, like namespace::name:version, with only the name portion being mandatory\&. .sp There is no default\&. If no value is given, "null" will be reported\&. .RE .PP \fB\-N\fR \fIdv\fR .RS 4 The jobs may carry which instantiation of a transformation was responsible for producing it\&. The format is the textual notation for fully\-qualified definition names, like namespace::name:version, with only the name portion being mandatory\&. .sp There is no default\&. If no value is given, "null" will be reported\&. .RE .PP \fB\-H\fR .RS 4 This option avoids pegasus\-kickstart writing the XML preamble (entity), if you need to combine multiple pegasus\-kickstart records into one document\&. .sp Additionally, if specified, the environment and the resource usage segments will not be written, assuming that a in a concatenated record version, the initial run will have captured those settings\&. .RE .PP \fB\-R\fR \fIsite\fR .RS 4 In order to provide the greater picture, pegasus\-kickstart can reflect the site handle (resource identifier) into its output\&. .sp There is no default\&. If no value is given, the attribute will not be generated\&. .RE .PP \fB\-L\fR \fIlbl\fR, \fB\-T\fR \fIiso\fR .RS 4 These optional arguments denote the workflow label (from DAX) and the workflow\(cqs last modification time (from DAX)\&. The label \fIlbl\fR can be any sensible string of up to 32 characters, but should use C identifier characters\&. The timestamp \fIiso\fR must be an ISO 8601 compliant time\-stamp\&. .RE .PP \fB\-S\fR \fIl=p\fR .RS 4 If stat information on any file is required \fIbefore\fR any jobs were started, logical to physical file mappings to stat can be passed using the \fB\-S\fR option\&. The LFN and PFN are concatenated by an equals (=) sign\&. The LFN is optional: If no equals sign is found, the argument is taken as sole PFN specification without LFN\&. .sp This option may be specified multiple times\&. To reduce and overcome command line length limits, if the argument is prefixed with an at (@) sign, the argument is taken to be a textual file of LFN to PFN mappings\&. The optionality mentioned above applies\&. Each line inside the file argument is the name of a file to stat\&. Comments (#) and empty lines are permitted\&. .sp Each PFN will incur a \fIstatcall\fR record (element) with attribute \fIid\fR set to value \fIinitial\fR\&. The optional \fIlfn\fR attribute is set to the LFN stat\(cqed\&. The filename is part of the \fIstatinfo\fR record inside\&. .sp There is no default\&. .RE .PP \fB\-s\fR \fIfn\fR .RS 4 If stat information on any file is required \fIafter\fR all jobs have finished, logical to physical file mappings to stat can be passed using the \fB\-s\fR option\&. The LFN and PFN are concatenated by an equals (=) sign\&. The LFN is optional: If no equals sign is found, the argument is taken as sole PFN specification without LFN\&. .sp This option may be specified multiple times\&. To reduce and overcome commandline length limits, if the argument is prefixed with an at (@) sign, the argument is taken to be a textual file of LFN to PFN mappings\&. The optionality mentioned above applies\&. Each line inside the file argument is the name of a file to stat\&. Comments (#) and empty lines are permitted\&. .sp Each PFN will incur a \fIstatcall\fR record (element) with attribute \fIid\fR set to value \fIfinal\fR\&. The optional \fIlfn\fR attribute is set to the LFN stat\(cqed\&. The filename is part of the \fIstatinfo\fR record inside\&. .sp There is no default\&. .RE .PP \fB\-i\fR \fIfn\fR .RS 4 This option allows \fBpegasus\-kickstart\fR to re\-connect the stdin of the application that it starts\&. Use a single hyphen to share \fIstdin\fR with the one provided to \fBpegasus\-kickstart\fR\&. .sp The default is to connect \fIstdin\fR to \fI/dev/null\fR\&. .RE .PP \fB\-o\fR \fIfn\fR .RS 4 This option allows \fBpegasus\-kickstart\fR to re\-connect the \fIstdout\fR of the application that it starts\&. The mode is used whenever an application produces meaningful results on its \fIstdout\fR that need to be tracked by Pegasus\&. The real \fIstdout\fR of Globus jobs is staged via GASS (GT2) or RFT (GT4)\&. The real \fIstdout\fR is used to propagate the invocation record back to the submit site\&. Use the single hyphen to share the application\(cqs \fIstdout\fR with the one that is provided to \fBpegasus\-kickstart\fR\&. In that case, the output from \fBpegasus\-kickstart\fR will interleave with application output\&. For this reason, such a mode is not recommended\&. .sp In order to provide an un\-captured \fIstdout\fR as part of the results, it is the default to connect the \fIstdout\fR of the application to a temporary file\&. The content of this temporary file will be transferred as payload data in the \fBpegasus\-kickstart\fR results\&. The content size is subject to payload limits, see the \fB\-B\fR option\&. If the content grows large, only an initial portion will become part of the payload\&. If the temporary file grows too large, it may flood the worker node\(cqs temporary space\&. The temporary file will be deleted after \fBpegasus\-kickstart\fR finishes\&. .sp If the filename is prefixed with an exclamation point, the file will be opened in append mode instead of overwrite mode\&. Note that you may need to escape the exclamation point from the shell\&. .sp The default is to connect \fIstdout\fR to a temporary file\&. .RE .PP \fB\-e\fR \fIfn\fR .RS 4 This option allows \fBpegasus\-kickstart\fR to re\-connect the \fIstderr\fR of the application that it starts\&. This option is used whenever an application produces meaningful results on \fIstderr\fR that needs tracking by Pegasus\&. The real \fIstderr\fR of Globus jobs is staged via GASS (GT2) or RFT (GT4)\&. It is used to propagate abnormal behavior from both, \fBpegasus\-kickstart\fR and the application that it starts, though its main use is to propagate application dependent data and heartbeats\&. Use a single hyphen to share \fIstderr\fR with the \fIstderr\fR that is provided to \fBpegasus\-kickstart\fR\&. This is the backward compatible behavior\&. .sp In order to provide an un\-captured \fIstderr\fR as part of the results, by default the \fIstderr\fR of the application will be connected to a temporary file\&. Its content is transferred as payload data in the \fBpegasus\-kickstart\fR results\&. If too large, only the an initial portion will become part of the payload\&. If the temporary file grows too large, it may flood the worker node\(cqs temporary space\&. The temporary file will be deleted after \fBpegasus\-kickstart\fR finishes\&. .sp If the filename is prefixed with an exclamation point, the file will be opened in append mode instead of overwrite mode\&. Note that you may need to escape the exclamation point from the shell\&. .sp The default is to connect \fIstderr\fR to a temporary file\&. .RE .PP \fB\-l\fR \fIlogfn\fR .RS 4 allows to append the performance data to the specified file\&. Thus, multiple XML documents may end up in the same file, including their XML preamble\&. \fIstdout\fR is normally used to stream back the results\&. Usually, this is a GASS\-staged stream\&. Use a single hyphen to generate the output on the \fIstdout\fR that was provided to \fBpegasus\-kickstart\fR, the default behavior\&. .sp Default is to append the invocation record onto the provided \fIstdout\fR\&. .RE .PP \fB\-w\fR \fIdir\fR .RS 4 permits the explicit setting of a new working directory once pegasus\-kickstart is started\&. This is useful in a remote scheduling environment, when the chosen working directory is not visible on the job submitting host\&. If the directory does not exist, \fBpegasus\-kickstart\fR will fail\&. This option is mutually exclusive with the \fB\-W\fR \fIdir\fR option\&. .sp Default is to use the working directory that the application was started in\&. This is usually set up by a remote scheduling environment\&. .RE .PP \fB\-W\fR \fIdir\fR .RS 4 permits the explicit creation and setting of a new working directory once pegasus\-kickstart is started\&. This is useful in a remote scheduling environment, when the chosen working directory is not visible on the job submitting host\&. If the directory does not exist, \fBpegasus\-kickstart\fR will attempt to create it, and then change into it\&. Both, creation and directory change may still fail\&. This option is mutually exclusive with the \fB\-w\fR \fIdir\fR option\&. .sp Default is to use the working directory that the application was started in\&. This is usually set up by a remote scheduling environment\&. .RE .PP \fB\-X\fR .RS 4 make an application executable, no matter what\&. It is a work\-around code for a weakness of \fBglobus\-url\-copy\fR which does not copy the permissions of the source to the destination\&. Thus, if an executable is staged\-in using GridFTP, it will have the wrong permissions\&. Specifying the \fB\-X\fR flag will attempt to change the mode to include the necessary x (and r) bits to make the application executable\&. .sp Default is not to change the mode of the application\&. Note that this feature can be misused by hackers, as it is attempted to call chmod on whatever path is specified\&. .RE .PP \fB\-B\fR \fIsz\fR .RS 4 varies the size of the debug output data section\&. If the file descriptors \fIstdout\fR and \fIstderr\fR remain untracked, \fBpegasus\-kickstart\fR tracks that output in temporary files\&. The first few pages from this output is copied into a data section in the output\&. In order to resize the length of the output within reasonable boundaries, this option permits a changes\&. Data beyond the size will not be copied, i\&.e\&. is truncated\&. .sp Warning: This is not a cheap way to obtain the stdio file handle data\&. Please use tracked files for that\&. Due to output buffer pre\-allocation, using arbitrary large arguments may result in failures of \fBpegasus\-kickstart\fR itself to allocate the necessary memory\&. .sp The default maximum size of the data section is 262144 byte\&. .RE .PP \fB\-F\fR .RS 4 This flag will issue an explicit \fBfsync()\fR call on kickstart\(cqs own \fIstdout\fR file\&. Typically you won\(cqt need this flag\&. Albeit, certain shared file system situations may improve when adding a flush after the written invocation record\&. .sp The default is to just use kickstart\(cqs NFS alleviation strategy by locking and unlocking \fIstdout\fR\&. .RE .PP \fB\-I\fR \fIfn\fR .RS 4 In this mode, the application name and any arguments to the application are specified inside of file \fIfn\fR\&. The file contains one argument per line\&. Escaping from Globus, Condor and shell meta characters is not required\&. This mode permits to use the maximum possible command line length of the underlying operating system, e\&.g\&. 128k for Linux\&. Using the \fB\-I\fR mode stops any further command line processing of \fBpegasus\-kickstart\fR command lines\&. .sp Default is to use the \fIapp flags\fR mode, where the application is specified explicitly on the command\-line\&. .RE .PP \fIapp\fR .RS 4 The path to the application has to be completely specified\&. The application is a mandatory option\&. .RE .PP \fIappflags\fR .RS 4 Application may or may not have additional flags\&. .RE .SH "RETURN VALUE" .sp \fBpegasus\-kickstart\fR will return the return value of the main job\&. In addition, the error code 127 signals that the call to exec failed, and 126 that reconnecting the stdio failed\&. A job failing with the same exit codes is indistinguishable from \fBpegasus\-kickstart\fR failures\&. .SH "SEE ALSO" .sp pegasus\-plan(1), condor_submit_dag(1), condor_submit(1), getrusage(3c)\&. .sp \m[blue]\fBhttp://pegasus\&.isi\&.edu/wms/docs/schemas/iv\-2\&.1/iv\-2\&.1\&.html\fR\m[] .sp \m[blue]\fBhttp://pegasus\&.isi\&.edu/documentation\fR\m[] .SH "SUBJOBS" .sp Subjobs are a new feature and may have a few wrinkles left\&. .sp In order to allow specific setups and assertion checks for compute nodes, \fBpegasus\-kickstart\fR allows the optional execution of a \fIprejob\fR\&. This \fIprejob\fR is anything that the remote compute node is capable of executing\&. For modern Unix systems, this includes #! scripts interpreter invocations, as long as the x bits on the executed file are set\&. The main job is run if and only if the prejob returned regularly with an exit code of zero\&. .sp With similar restrictions, the optional execution of a \fIpostjob\fR is chained to the success of the main job\&. The postjob will be run, if the main job terminated normally with an exit code of zero\&. .sp In addition, a user may specify a \fIsetup\fR and a \fIcleanup\fR job\&. The \fIsetup\fR job sets up the remote execution environment\&. The \fIcleanup\fR job may tear down and clean\-up after any job ran\&. Failure to run the setup job has no impact on subsequent jobs\&. The cleanup is a job that will even be attempted to run for all failed jobs\&. No job information is passed\&. If you need to invoke multiple setup or clean\-up jobs, bundle them into a script, and invoke the clean\-up script\&. Failure of the clean\-up job is not meant to affect the progress of the remote workflow (DAGMan)\&. This may change in the future\&. .sp The setup\-, pre\-, and post\- and cleanup\-job run on the same compute node as the main job to execute\&. However, since they run in separate processes as children of \fBpegasus\-kickstart\fR, they are unable to influence each others nor the main jobs environment settings\&. .sp All jobs and their arguments are subject to variable substitutions as explained in the next section\&. .sp To specify the prejob, insert the the application invocation and any optional commandline argument into the environment variable \fIGRIDSTART_PREJOB\fR\&. If you are invoking from a shell, you might want to use single quotes to protect against the shell\&. If you are invoking from Globus, you can append the RSL string feature\&. From Condor, you can use Condor\(cqs notion of environment settings\&. In Pegasus use the \fIprofile\fR command to set generic scripts that will work on multiple sites, or the transformation catalog to set environment variables in a pool\-specific fashion\&. Please remember that the execution of the main job is chained to the success of the prejob\&. .sp To set up the postjob, use the environment variable \fIGRIDSTART_POSTJOB\fR to point to an application with potential arguments to execute\&. The same restrictions as for the prejob apply\&. Please note that the execution of the post job is chained to the main job\&. .sp To provide the independent setup job, use the environment variable \fIGRIDSTART_SETUP\fR\&. The exit code of the setup job has no influence on the remaining chain of jobs\&. To provide an independent cleanup job, use the environment variable \fIGRIDSTART_CLEANUP\fR to point to an application with possible arguments to execute\&. The same restrictions as for prejob and postjob apply\&. The cleanup is run regardless of the exit status of any other jobs\&. .SH "VARIABLE REWRITING" .sp Variable substitution is a new feature and may have a few wrinkles left\&. .sp The variable substitution employs simple rules from the Bourne shell syntax\&. Simple quoting rules for backslashed characters, double quotes and single quotes are obeyed\&. Thus, in order to pass a dollar sign to as argument to your job, it must be escaped with a backslash from the variable rewriting\&. .sp For pre\- and postjobs, double quotes allow the preservation of whitespace and the insertion of special characters like \ea (alarm), \eb (backspace), \en (newline), \er (carriage return), \et (horizontal tab), and \ev (vertical tab)\&. Octal modes are \fInot\fR allowed\&. Variables are still substituted in double quotes\&. Single quotes inside double quotes have no special meaning\&. .sp Inside single quotes, no variables are expanded\&. The backslash only escapes a single quote or backslash\&. .sp Backticks are not supported\&. .sp Variables are only substituted once\&. You cannot have variables in variables\&. If you need this feature, please request it\&. .sp Outside quotes, arguments from the pre\- and postjob are split on linear whitespace\&. The backslash makes the next character verbatim\&. .sp Variables that are rewritten must start with a dollar sign either outside quotes or inside double quotes\&. The dollar may be followed by a valid identifier\&. A valid identifier starts with a letter or the underscore\&. A valid identifier may contain further letters, digits or underscores\&. The identifier is case sensitive\&. .sp The alternative use is to enclose the identifier inside curly braces\&. In this case, almost any character is allowed for the identifier, including whitespace\&. This is the \fIonly\fR curly brace expansion\&. No other Bourne magic involving curly braces is supported\&. .sp One of the advantages of variable substitution is, for example, the ability to specify the application as \fI$HOME/bin/app1\fR in the transformation catalog, and thus to gridstart\&. As long as your home directory on any compute node has a \fIbin\fR directory that contains the application, the transformation catalog does not need to care about the true location of the application path on each pool\&. Even better, an administrator may decide to move your home directory to a different place\&. As long as the compute node is set up correctly, you don\(cqt have to adjust any Pegasus data\&. .sp Mind that variable substitution is an expert feature, as some degree of tricky quoting is required to protect substitutable variables and quotes from Globus, Condor and Pegasus in that order\&. Note that Condor uses the dollar sign for its own variables\&. .sp The variable substitution assumptions for the main job differ slightly from the prejob and postjob for technical reasons\&. The pre\- and postjob command lines are passed as one string\&. However, the main jobs command line is already split into pieces by the time it reaches \fBpegasus\-kickstart\fR\&. Thus, any whitespace on the main job\(cqs command line must be preserved, and further argument splitting avoided\&. .sp It is highly recommended to experiment on the Unix command line with the \fIecho\fR and \fIenv\fR applications to obtain a feeling for the different quoting mechanisms needed to achieve variable substitution\&. .SH "FEEDBACK CHANNEL" .sp A long\-running application may consider to stream back heart beats and other application\-specific monitoring and progress data\&. For this reason, \fBpegasus\-kickstart\fR provides a feedback channel\&. At start\-up, a transient named pipe, also known as FIFO, is created\&. While waiting for started jobs to finish, \fBpegasus\-kickstart\fR will attempt to read from the FIFO\&. By default, any information read will be encapsulated in XML tags, and written to \fIstderr\fR\&. Please note that in a Pegasus, Globus, Condor\-G environment, \fIstderr\fR will be GASS streamed or staged to the submit host\&. At the submit host, an application specific monitor may unpack the data chunks and could for instance visually display them, or aggregate them with other data\&. Please note that \fBpegasus\-kickstart\fR only provides a feedback channel\&. The content and interpretation is up to, and specific for the application\&. .sp In order to make an application gridstart aware, it needs to be able to write to a FIFO\&. The filename can be picked up from the environment variable \fBGRIDSTART_CHANNEL\fR which is provided to all jobs\&. Please note that the application must be prepared to handle the PIPE signal when writing to a FIFO, and must be able to cope with failing write operations\&. .SH "EXAMPLE" .sp You can run the \fBpegasus\-kickstart\fR executable locally to verify that it is functioning well\&. In the initial phase, the format of the performance data may be slightly adjusted\&. .sp .if n \{\ .RS 4 .\} .nf $ env GRIDSTART_PREJOB=\*(Aq/bin/usleep 250000\*(Aq \e\e GRIDSTART_POSTJOB=\*(Aq/bin/date \-u\*(Aq \e\e pegasus\-kickstart \-l xx \e\e$PEGASUS_HOME/bin/keg \-T1 \-o\- $ cat xx \&.\&.\&. .fi .if n \{\ .RE .\} .sp Please take note a few things in the above example: .sp The output from the postjob is appended to the output of the main job on \fIstdout\fR\&. The output could potentially be separated into different data sections through different temporary files\&. If you truly need the separation, request that feature\&. .sp The log file is reported with a size of zero, because the log file did indeed barely exist at the time the data structure was (re\-) initialized\&. With regular GASS output, it will report the status of the socket file descriptor, though\&. .sp The file descriptors reported for the temporary files are from the perspective of \fBpegasus\-kickstart\fR\&. Since the temporary files have the close\-on\-exec flag set, \fBpegasus\-kickstart\fR\fIs file descriptors are invisible to the job processes\&. Still, the \*(Aqstdio\fR of the job processes are connected to the temporary files\&. .sp Even this output already appears large\&. The output may already be too large to guarantee that the append operation on networked pipes (GASS, NFS) are atomically written\&. .sp The current format of the performance data is as follows: .SH "OUTPUT FORMAT" .sp Refer to \m[blue]\fBhttp://pegasus\&.isi\&.edu/wms/docs/schemas/iv\-2\&.1/iv\-2\&.1\&.html\fR\m[] for an up\-to\-date description of elements and their attributes\&. Check with \m[blue]\fBhttp://pegasus\&.isi\&.edu/documentation\fR\m[] for invocation schemas with a higher version number\&. .SH "RESTRICTIONS" .sp There is no version for the Condor \fIstandard\fR universe\&. It is simply not possible within the constraints of Condor\&. .sp Due to its very nature, \fBpegasus\-kickstart\fR will also prove difficult to port outside the Unix environment\&. .sp Any of the pre\-, main\-, cleanup and postjob are unable to influence one another\(cqs visible environment\&. .sp Do not use a Pegasus transformation with just the name \fInull\fR and no namespace nor version\&. .sp First Condor, and then Unix, place a limit on the length of the command line\&. The additional space required for the gridstart invocation may silently overflow the maximum space, and cause applications to fail\&. If you suspect to work with many argument, try an argument\-file based approach\&. .sp A job failing with exit code 126 or 127 is indistinguishable from \fBpegasus\-kickstart\fR failing with the same exit codes\&. Sometimes, careful examination of the returned data can help\&. .sp If the logfile is collected into a shared file, due to the size of the data, simultaneous appends on a shared filesystem from different machines may still mangle data\&. Currently, file locking is not even attempted, although all data is written atomically from the perspective of \fBpegasus\-kickstart\fR\&. .sp The upper limit of characters of command line characters is currently not checked by \fBpegasus\-kickstart\fR\&. Thus, some variable substitutions could potentially result in a command line that is larger than permissible\&. .sp If the output or error file is opened in append mode, but the application decides to truncate its output file, as in the above example by opening \fI/dev/fd/1\fR inside \fIkeg\fR, the resulting file will still be truncated\&. This is correct behavior, but sometimes not obvious\&. .SH "FILES" .PP \fB/usr/share/pegasus/schema/iv\-2\&.1\&.xsd\fR .RS 4 is the suggested location of the latest XML schema describing the data on the submit host\&. .RE .SH "ENVIRONMENT VARIABLES" .PP \fBGRIDSTART_TMP\fR .RS 4 is the hightest priority to look for a temporary directory, if specified\&. This rather special variable was introduced to overcome some peculiarities with the FNAL cluster\&. .RE .PP \fBTMP\fR .RS 4 is the next hightest priority to look for a temporary directory, if specified\&. .RE .PP \fBTEMP\fR .RS 4 is the next priority for an environment variable denoting a temporary files directory\&. .RE .PP \fBTMPDIR\fR .RS 4 is next in the checklist\&. If none of these are found, either the \fIstdio\fR definition \fIP_tmpdir\fR is taken, or the fixed string \fI/tmp\fR\&. .RE .PP \fBGRIDSTART_SETUP\fR .RS 4 contains a string that starts a job to be executed unconditionally before any other jobs, see above for a detailed description\&. .RE .PP \fBGRIDSTART_PREJOB\fR .RS 4 contains a string that starts a job to be executed before the main job, see above for a detailed description\&. .RE .PP \fBGRIDSTART_POSTJOB\fR .RS 4 contains a string that starts a job to be executed conditionally after the main job, see above for a detailed description\&. .RE .PP \fBGRIDSTART_CLEANUP\fR .RS 4 contains a string that starts a job to be executed unconditionally after any of the previous jobs, see above for a detailed description\&. .RE .PP \fBGRIDSTART_CHANNEL\fR .RS 4 is the name of a FIFO for an application\-specific feedback\-channel, see above for a detailed description\&. .RE .SH "HISTORY" .sp As you may have noticed, \fBpegasus\-kickstart\fR had the name \fBkickstart\fR in previous incantations\&. We are slowly moving to the new name to avoid clashes in a larger OS installation setting\&. However, there is no pertinent need to change the internal name, too, as no name clashes are expected\&. .SH "AUTHORS" .sp Michael Milligan .sp Mike Wilde .sp Yong Zhao .sp Jens\-S\&. Vöckler .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu/\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-run.10000644000175000017500000001271511757531137021106 0ustar ryngerynge'\" t .\" Title: pegasus-run .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-RUN" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-run \- executes a workflow that has been planned using *pegasus\-plan*\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-run\fR [\fB\-D\fR\fIproperty=value\fR\&...][\fB\-c\fR \fIpropsfile\fR][\fB\-d\fR \fIlevel\fR] [\fB\-v][\fR\-\-grid*][rundir] .fi .SH "DESCRIPTION" .sp The \fBpegasus\-run\fR command executes a workflow that has been planned using \fBpegasus\-plan\fR\&. By default pegasus\-run can be invoked either in the planned directory with no options and arguments or just the full path to the run directory\&. \fBpegasus\-run\fR also can be used to resubmit a failed workflow by running the same command again\&. .SH "OPTIONS" .sp By default \fBpegasus\-run\fR does not require any options or arguments if invoked from within the planned workflow directory\&. If running the command outside the workflow directory then a full path to the workflow directory needs to be specified\&. .sp \fBpegasus\-run\fR takes the following options .PP \fB\-D\fR\fIproperty=value\fR .RS 4 The \fB\-D\fR option allows an advanced user to override certain properties which influence \fBpegasus\-run\fR\&. One may set several CLI properties by giving this option multiple times\&. .sp The \fB\-D\fR option(s) must be the first option on the command line\&. CLI properties take precedence over the file\-based properties of the same key\&. .sp See the \fBPROPERTIES\fR section below\&. .RE .PP \fB\-c\fR \fIpropsfile\fR, \fB\-\-conf\fR \fIpropsfile\fR .RS 4 Provide a property file to override the default Pegasus properties file from the planning directory\&. Ordinary users do not need to use this option unless the specifically want to override several properties .RE .PP \fB\-d\fR \fIlevel\fR, \fB\-\-debug\fR \fIlevel\fR .RS 4 Set the debug level for the client\&. Default is 0\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Raises debug level\&. Each invocation increase the level by 1\&. .RE .PP \fB\-\-grid\fR .RS 4 Enable grid checks to see if your submit machine is GRID enabled\&. .RE .PP \fIrundir\fR .RS 4 Is the full qualified path to the base directory containing the planned workflow DAG and submit files\&. This is optional if the \fBpegasus\-run\fR command is invoked from within the run directory\&. .RE .SH "RETURN VALUE" .sp If the workflow is submitted for execution \fBpegasus\-run\fR returns with an exit code of 0\&. However, in case of error, a non\-zero return value indicates problems\&. An error message clearly marks the cause\&. .SH "FILES" .sp The following files are created, opened or written to: .PP \fBbraindump\fR .RS 4 This file is located in the rundir\&. pegasus\-run uses this file to find out paths to several other files, properties configurations etc\&. .RE .PP \fBpegasus\&.?????????\&.properties\fR .RS 4 This file is located in the rundir\&. pegasus\-run uses this properties file by default to configure its internal settings\&. .RE .PP \fBworkflowname\&.dag\fR .RS 4 pegasus\-run uses the workflowname\&.dag or workflowname\&.sh file and submits it either to condor for execution or runs it locally in a shell environment .RE .SH "PROPERTIES" .sp pegasus\-run reads its properties from several locations\&. .PP \fBRUNDIR/pegasus\&.??????????\&.properties\fR .RS 4 The default location for pegasus\-run to read the properties from .RE .PP \fB\-\-conf propfile\fR .RS 4 properties file provided in the conf option replaces the default properties file used\&. .RE .PP \fB$HOME/\&.pegasusrc\fR .RS 4 will be used if neither default rundir properties or \-\-conf propertiesfile are found\&. .sp Additionally properties can be provided individually using the \fB\-Dpropkey\fR=\fIpropvalue\fR option on the command line before all other options\&. These properties will override properties provided using either \fB\-\-conf\fR or \fIRUNDIR/pegasus\&.???????\&.properties\fR or the \fI$HOME/\&.pegasusrc\fR .sp The merge logic is CONF PROPERTIES || DEFAULT RUNDIR PROPERTIES || PEGASUSRC overriden by Command line properties .RE .SH "ENVIRONMENT VARIABLES" .PP \fBPATH\fR .RS 4 The path variable is used to locate binaries for condor\-submit\-dag, condor\-dagman, condor\-submit,pegasus\-submit\-dag, pegasus\-dagman and pegasus\-monitord .RE .SH "SEE ALSO" .sp pegasus\-plan(1) .SH "AUTHORS" .sp Gaurang Mehta .sp Jens\-S\&. Vöckler .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-remove.10000644000175000017500000000630111757531137021571 0ustar ryngerynge'\" t .\" Title: pegasus-remove .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-REMOVE" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-remove \- removes a workflow that has been planned and submitted using pegasus\-plan and pegasus\-run .SH "SYNOPSIS" .sp .nf pegasus\-remove [\fB\-d\fR \fIdagid\fR] [\fB\-v\fR] [\fIrundir\fR] .fi .SH "DESCRIPTION" .sp The pegasus\-remove command remove a submitted/running workflow that has been planned and submitted using \fBpegasus\-plan\fR and \fBpegasus\-run\fR\&. The command can be invoked either in the planned directory with no options and arguments or just the full path to the run directory\&. .SH "OPTIONS" .sp By default pegasus\-remove does not require any options or arguments if invoked from within the planned workflow directory\&. If running the command outside the workflow directory then a full path to the workflow directory needs to be specified or the \fIdagid\fR of the workflow to be removed\&. .sp \fBpegasus\-remove\fR takes the following options: .PP \fB\-d\fR \fIdagid\fR, \fB\-\-dagid\fR \fIdagid\fR .RS 4 The workflow dagid to remove .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Raises debug level\&. Each invocation increase the level by 1\&. .RE .PP \fIrundir\fR .RS 4 Is the full qualified path to the base directory containing the planned workflow DAG and submit files\&. This is optional if pegasus\-remove command is invoked from within the run directory\&. .RE .SH "RETURN VALUE" .sp If the workflow is removed successfully pegasus\-remove returns with an exit code of 0\&. However, in case of error, a non\-zero exit code indicates problems\&. An error message clearly marks the cause\&. .SH "FILES" .sp The following files are opened: .PP \fBbraindump\fR .RS 4 This file is located in the rundir\&. pegasus\-remove uses this file to find out paths to several other files\&. .RE .SH "ENVIRONMENT VARIABLES" .PP \fBPATH\fR .RS 4 The path variable is used to locate binary for \fBcondor_rm\fR\&. .RE .SH "SEE ALSO" .sp pegasus\-plan(1), pegasus\-run(1) .SH "AUTHORS" .sp Gaurang Mehta .sp Jens\-S\&. Vöckler .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-submit-dag.10000644000175000017500000000362211757531137022333 0ustar ryngerynge'\" t .\" Title: pegasus-submit-dag .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-SUBMIT\-DAG" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-submit-dag \- Wrapper around *condor_submit_dag*\&. Not to be run by user\&. .SH "DESCRIPTION" .sp The \fBpegasus\-submit\-dag\fR is a wrapper that invokes \fBcondor_submit_dag\fR\&. This is started automatically by \fBpegasus\-run\fR\&. \fBDO NOT USE DIRECTLY\fR .SH "RETURN VALUE" .sp If the workflow is submitted succesfully \fBpegasus\-submit\-dag\fR exits with 0, else exits with non\-zero\&. .SH "ENVIRONMENT VARIABLES" .PP \fBPATH\fR .RS 4 The path variable is used to locate binary for \fBcondor_submit_dag\fR and \fBpegasus\-dagman\fR .RE .SH "SEE ALSO" .sp pegasus\-run(1) pegasus\-dagman(1) .SH "AUTHORS" .sp Gaurang Mehta .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-cluster.10000644000175000017500000003402011757531137021754 0ustar ryngerynge'\" t .\" Title: pegasus-cluster .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-CLUSTER" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-cluster \- run a list of applications .SH "SYNOPSIS" .sp .nf \fBpegasus\-cluster\fR [\fB\-d\fR] [\fB\-e\fR | \fB\-f\fR] [\fB\-S ec\fR] [\fB\-s fn\fR] [\fB\-R fn\fR] [\fB\-n nr\fR] [\fBinputfile\fR] .fi .SH "DESCRIPTION" .sp The \fBpegasus\-cluster\fR tool executes a list of application in the order specified (assuming sequential mode\&.) It is generally used to do horizontal clustering of independent application, and does not care about any application failures\&. Such failures should be caught by using \fBpegasus\-kickstart\fR to start application\&. .sp In vertical clustering mode, the \fIhard failure\fR mode is encouraged, ending execution as soon as one application fails\&. When running a complex workflow through \fBpegasus\-cluster\fR , the order of applications in the input file must be topologically sorted\&. .sp Applications are usually using \fBpegasus\-kickstart\fR to execute\&. In the \fBpegasus\-kickstart\fR case, all invocations of \fBpegasus\-kickstart\fR except the first should add the \fBpegasus\-kickstart\fR option \fI\-H\fR to supress repeating the XML preamble and certain other headers of no interest when repeated\&. .sp \fBpegasus\-cluster\fR permits shell\-style quoting\&. One level of quoting is removed from the arguments\&. Please note that \fBpegasus\-kickstart\fR will also remove one level of quoting\&. .SH "ARGUMENTS" .PP \fB\-d\fR .RS 4 This option increases the debug level\&. Debug message are generated on \fIstdout\fR \&. By default, debugging is minimal\&. .RE .PP \fB\-e\fR .RS 4 This flag turns on the old behavior of \fBpegasus\-cluster\fR to always run everything \fIand\fR return success no matter what\&. The \fB\-e\fR flag is mutually exclusive with the \fB\-f\fR flag\&. By default, all applications are executed regardles of failures\&. Any detected application failure results in a non\-zero exit status from \fBpegasus\-cluster\fR\&. .RE .PP \fB\-f\fR .RS 4 In hard failure mode, as soon as one application fails, either through a non\-zero exit code, or by dying on a signal, further execution is stopped\&. In parallel execution mode, one or more other applications later in the sequence file may have been started already by the time failure is detected\&. \fBPegasus\-cluster\fR will wait for the completion of these applications, but not start new ones\&. The \fB\-f\fR flag is mutually exclusive with the \fB\-e\fR flag\&. By default, all applications are executed regardless of failures\&. Any detected application failure results in a non\-zero exit status from \fBpegasus\-cluster\fR\&. .RE .PP \fB\-h\fR .RS 4 This option prints the help message and exits the program\&. .RE .PP \fB\-s fn\fR .RS 4 This option will send protocol message (for Mei) to the specified file\&. By default, all message are written to \fIstdout\fR \&. .RE .PP \fB\-R fn\fR .RS 4 The progress reporting feature, if turned on, will write one event record whenever an application is started, and one event record whenever an application finished\&. This is to enable tracking of jobs in progress\&. By default, track logs are not written, unless the environment variable \fISEQEXEC_PROGRESS_REPORT\fR is set\&. If set, progress reports are appended to the file pointed to by the environment variable\&. .RE .PP \fB\-S ec\fR .RS 4 This option is a multi\-option, which may be used multiple times\&. For each given non\-zero exit\-code of an application, mark it as a form of success\&. In \fB\-f\fR mode, this means that \fBpegasus\-cluster\fR will not fail when seeing this exit code from any application it runs\&. By default, all non\-zero exit code constitute failure\&. .RE .PP \fB\-n nr\fR .RS 4 This option determines the amount of parallel execution\&. Typically, parallel execution is only recommended on multi\-core systems, and must be deployed rather carefully, i\&.e\&. only completely independent jobs across of whole \fIinputfile\fR should ever be attempted to be run in parallel\&. The argument \fBnr\fR is the number of parallel jobs that should be used\&. In addition to a non\-negative integer, the word \fIauto\fR is also understood\&. When \fIauto\fR is specified, \fBpegasus\-cluster\fR will attempt to automatically determine the number of cores available in the system\&. Strictly sequential execution, as if \fInr\fR was 1, is the default\&. If the environment variable \fISEQEXEC_CPUS\fR is set, it will determine the default number of CPUs\&. .RE .PP \fBinputfile\fR .RS 4 The input file specifies a list of application to run, one per line\&. Comments and empty lines are permitted\&. The comment character is the octothorpe (#), and extends to the end of line\&. By default, \fBpegasus\-cluster\fR uses \fIstdin\fR to read the list of applications to execute\&. .RE .SH "RETURN VALUE" .sp The \fBpegasus\-cluster\fR tool returns 1, if an illegal option was used\&. It returns 2, if the status file from option \fB\-s\fR cannot be opened\&. It returns 3, if the input file cannot be opened\&. It does \fInot\fR return any failure for failed applications in old\-exit \fB\-e\fR mode\&. In \fIdefault\fR and hard failure \fB\-f\fR mode, it will return 5 for true failure\&. The determination of failure is modified by the \fB\-S\fR option\&. .sp All other internal errors being absent, \fBpegasus\-cluster\fR will always return 0 when run without \fB\-f\fR \&. Unlike shell, it will \fInot\fR return the last application\(cqs exit code\&. In \fIdefault\fR mode, it will return 5, if any application failed\&. Unlike shell, it will \fInot\fR return the last application\(cqs exit code\&. However, it will execute all applications\&. The determination of failure is modified by the \fB\-S\fR flag\&. In \fB\-f\fR mode, *pegasus\-cluster returns either 0 if all main sequence applications succeeded, or 5 if one failed; or more than one in parallel execution mode\&. It will run only as long as applications were successful\&. As before, the *\-S flag determines what constitutes a failure\&. .sp The \fBpegasus\-cluster\fR application will also create a small summary on \fIstdout\fR for each job, and one for itself, about the success and failure\&. The field \fBfailed\fR reports any exit code that was not zero or a signal of death termination\&. It does \fInot\fR include non\-zero exit codes that were marked as success using the \fB\-S\fR option\&. .SH "TASK SUMMARY" .sp Each task executed by \fBpegasus\-cluster\fR generates a record bracketed by square brackets like this (each entry is broken over two lines for readability): .sp .if n \{\ .RS 4 .\} .nf [seqexec\-task id=1, start="2011\-04\-27T14:31:25\&.340\-07:00", duration=0\&.521, status=0, line=1, pid=18543, app="/bin/usleep"] [seqexec\-task id=2, start="2011\-04\-27T14:31:25\&.342\-07:00", duration=0\&.619, status=0, line=2, pid=18544, app="/bin/usleep"] [seqexec\-task id=3, start="2011\-04\-27T14:31:25\&.862\-07:00", duration=0\&.619, status=0, line=3, pid=18549, app="/bin/usleep"] .fi .if n \{\ .RE .\} .sp Each record is introduced by the string \fIseqexec\-task\fR with the following constituents, where strings are quoted: .PP \fBid\fR .RS 4 This is a numerical value for main sequence application, indicating the application\(cqs place in the sequence file\&. The setup task uses the string \fIsetup\fR , and the cleanup task uses the string \fIcleanup\fR \&. .RE .PP \fBstart\fR .RS 4 is the ISO 8601 time stamp, with millisecond resolution, when the application was started\&. This string is quoted\&. .RE .PP \fBduration\fR .RS 4 is the application wall\-time duration in seconds, with millisecond resolution\&. .RE .PP \fBstatus\fR .RS 4 is the \fIraw\fR exit status as returned by the \fIwait\fR family of system calls\&. Typically, the exit code is found in the high byte, and the signal of death in the low byte\&. Typically, 0 indicates a successful execution, and any other value a problem\&. However, details could differ between systems, and exit codes are only meaningful on the same os and architecture\&. .RE .PP \fBline\fR .RS 4 is the line number where the task was found in the main sequence file\&. Setup\- and cleanup tasks don\(cqt have this attribute\&. .RE .PP \fBpid\fR .RS 4 is the process id under which the application had run\&. .RE .PP \fBapp\fR .RS 4 is the path to the application that was started\&. As with the progress record, any \fBpegasus\-kickstart\fR will be parsed out so that you see the true application\&. .RE .SH "PEGASUS-CLUSTER SUMMARY" .sp The final summary of counts is a record bracketed by square brackets like this (broken over two lines for readability): .sp .if n \{\ .RS 4 .\} .nf [seqexec\-summary stat="ok", lines=3, tasks=3, succeeded=3, failed=0, extra=0, duration=1\&.143, start="2011\-04\-27T14:31:25\&.338\-07:00", pid=18542, app="\&./seqexec"] .fi .if n \{\ .RE .\} .sp The record is introduced by the string \fIseqexec\-summary\fR with the following constituents: .PP \fBstat\fR .RS 4 The string \fIfail\fR when \fBpegasus\-cluster\fR would return with an exit status of 5\&. Concretely, this is any failure in \fIdefault\fR mode, and first failure in \fB\-f\fR mode\&. Otherwise, it will always be the string \fIok\fR , if the record is produced\&. .RE .PP \fBlines\fR .RS 4 is the stopping line number of the input sequence file, indicating how far processing got\&. Up to the number of cores additional lines may have been parsed in case of \fB\-f\fR mode\&. .RE .PP \fBtasks\fR .RS 4 is the number of tasks processed\&. .RE .PP \fBsucceeded\fR .RS 4 is the number of main sequence jobs that succeeded\&. .RE .PP \fBfailed\fR .RS 4 is the number of main sequence jobs that failed\&. The failure condition depends on the \fB\-S\fR settings, too\&. .RE .PP \fBextra\fR .RS 4 is 0, 1 or 2, depending on the existence of setup\- and cleanup jobs\&. .RE .PP \fBduration\fR .RS 4 is the duration in seconds, with millisecond resolution, how long *pegasus\-cluster ran\&. .RE .PP \fBstart\fR .RS 4 is the start time of \fBpegasus\-cluster\fR as ISO 8601 time stamp\&. .RE .SH "SEE ALSO" .sp \fBpegasus\-kickstart(1)\fR .SH "CAVEATS" .sp The \fB\-S\fR option sets success codes globally\&. It is not possible to activate success codes only for one specific application, and doing so would break the shell compatibility\&. Due to the global nature, use success codes sparingly as last resort emergency handler\&. In better plannable environments, you should use an application wrapper instead\&. .SH "EXAMPLE" .sp The following shows an example input file to \fBpegasus\-cluster\fR making use of \fBpegasus\-kickstart\fR to track applications\&. .sp .if n \{\ .RS 4 .\} .nf # # mkdir /path/to/pegasus\-kickstart \-R HPC \-n mkdir /bin/mkdir \-m 2755 \-p split\-corpus split\-ne\-corpus # # drop\-dian /path/to/pegasus\-kickstart \-H \-R HPC \-n drop\-dian \-o \*(Aq^f\-new\&.plain\*(Aq /path/to/drop\-dian /path/to/f\-tok\&.plain /path/to/f\-tok\&.NE # # split\-corpus /path/to/pegasus\-kickstart \-H \-R HPC \-n split\-corpus /path/to/split\-seq\-new\&.pl 23 f\-new\&.plain split\-corpus/corpus\&. # # split\-corpus /path/to/pegasus\-kickstart \-H \-R HPC \-n split\-corpus /path/to/split\-seq\-new\&.pl 23 /path/to/f\-tok\&.NE split\-ne\-corpus/corpus\&. .fi .if n \{\ .RE .\} .SH "ENVIRONMENT VARIABLES" .sp A number of environment variables permits to influence the behavior of \fBpegasus\-cluster\fR during run\-time\&. .PP \fBSEQEXEC_PROGRESS_REPORT\fR .RS 4 If this variable is set, and points to a writable file location, progress report records are appended to the file\&. While care is taken to atomically append records to the log file, in case concurrent instances of \fBpegasus\-cluster\fR are running, broken Linux NFS may still garble some content\&. .RE .PP \fBSEQEXEC_CPUS\fR .RS 4 If this variable is set to a non\-negative integer, that many CPUs are attempted to be used\&. The special value \fIauto\fR permits to auto\-detect the number of CPUs available to \fBpegasus\-cluster\fR on the system\&. .RE .PP \fBSEQEXEC_SETUP\fR .RS 4 If this variable is set, and contains a single fully\-qualified path to an executable and arguments, this executable will be run before any jobs are started\&. The exit code of this setup job will have no effect upon the main job sequence\&. Success or failure will not be counted towards the summary\&. .RE .PP \fBSEQEXEC_CLEANUP\fR .RS 4 If this variable is set, and contains a single fully\-qualified path to an executable and arguments, this executable will be before \fBpegasus\-cluster\fR quits\&. Failure of any previous job will have no effect on the ability to run this job\&. The exit code of the cleanup job will have no effect on the overall success or failure state\&. Success or failure will not be counted towards the summary\&. .RE .SH "HISTORY" .sp As you may have noticed, \fBpegasus\-cluster\fR had the name \fBseqexec\fR in previous incantations\&. We are slowly moving to the new name to avoid clashes in a larger OS installation setting\&. However, there is no pertinent need to change the internal name, too, as no name clashes are expected\&. .SH "AUTHORS" .sp Jens\-S\&. Vöckler .sp Pegasus \fBhttp://pegasus\&.isi\&.edu/\fR pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-statistics.10000644000175000017500000001256011757531137022472 0ustar ryngerynge'\" t .\" Title: pegasus-statistics .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-STATISTICS" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-statistics \- A tool to generate statistics about the workflow run\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-statistics\fR [\fB\-h\fR|\fB\-\-help\fR] [\fB\-o\fR|\fB\-\-output\fR \fIdir\fR] [\fB\-c\fR|\fB\-\-conf\fR \fIpropfile\fR] [\fB\-p\fR|\fB\-\-statistics\-level\fR \fIlevel\fR] [\fB\-t\fR|\fB\-\-time\-filter\fR \fIfilter\fR] [\fB\-i\fR|\fB\-\-ignore\-db\-inconsistency\fR] [\fB\-v\fR|\fB\-\-verbose\fR] [\fB\-q\fR|\fB\-\-quiet\fR] [\fIsubmitdir\fR] .fi .SH "DESCRIPTION" .sp pegasus\-statistics generates statistics about the workflow run like total jobs/tasks/sub workflows ran , how many succeeded/failed etc\&. It generates job instance statistics like run time, condor queue delay etc\&. It generates invocation statistics information grouped by transformation name\&. It also generates job instance and invocation statistics information grouped by time and host\&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints a usage summary with all the available command\-line options\&. .RE .PP \fB\-o\fR \fIdir\fR, \fB\-\-output\fR \fIdir\fR .RS 4 Writes the output to the given directory\&. .RE .PP \fB\-c\fR \fIpropfile\fR, \fB\-\-conf\fR \fIpropfile\fR .RS 4 The properties file to use\&. This option overrides all other property files\&. .RE .PP \fB\-s\fR \fIlevel\fR, \fB\-\-statistics\-level\fR \fIlevel\fR .RS 4 Specifies the statistics information to generate\&. Valid levels are: \fBall\fR, \fBsummary\fR, \fBwf_stats\fR, \fBjb_stats\fR, \fBtf_stats\fR, and \fBti_stats\fR\&. Default is \fBsummary\fR\&. The output generated by pegasus\-statistics is based on the the \fIlevel\fR set: .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBall\fR: generates all the statistics information\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBsummary\fR: generates the workflow statistics summary\&. In the case of a hierarchical workflow the summary is across all sub workflows\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBwf_stats\fR: generates the workflow statistics information of each individual workflow\&. In case of a hierarchical workflow the workflow statistics are created for each sub workflow\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBjb_stats\fR: generates the job statistics information of each individual workflow\&. In case of hierarchical workflow the job statistics is created for each sub workflows\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBtf_stats\fR: generates the invocation statistics information of each individual workflow grouped by transformation name \&.In case of hierarchical workflow the transformation statistics is created for each sub workflows\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBti_stats\fR: generates the job instance and invocation statistics like total count and runtime grouped by time and host\&. .RE .RE .PP \fB\-t\fR \fIfilter\fR, \fB\-\-time\-filter\fR \fIfilter\fR .RS 4 Specifies the time filter to group the time statistics\&. Valid \fIfilter\fR values are: \fBmonth\fR, \fBweek\fR, \fBday\fR, \fBhour\fR\&. Default is \fBday\fR\&. .RE .PP \fB\-i\fR, \fB\-\-ignore\-db\-inconsistency\fR .RS 4 Turn off the the check for database consistency\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Increases the log level\&. If omitted, the default level will be set to WARNING\&. When this option is given, the log level is changed to INFO\&. If this option is repeated, the log level will be changed to DEBUG\&. .RE .PP \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Decreases the log level\&. If omitted, the default level will be set to WARNING\&. When this option is given, the log level is changed to ERROR\&. .RE .SH "EXAMPLE" .sp Runs pegasus\-statistics and writes the output to the given directory: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-statistics \-o /scratch/statistics /scratch/grid\-setup/run0001 .fi .if n \{\ .RE .\} .SH "AUTHORS" .sp Prasanth Thomas .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-analyzer.10000644000175000017500000003064011757531137022124 0ustar ryngerynge'\" t .\" Title: pegasus-analyzer .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-ANALYZER" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-analyzer \- debugs a workflow\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-analyzer\fR [\fB\-\-help\fR|\fB\-h\fR] [\fB\-\-quiet\fR|\fB\-q\fR] [\fB\-\-strict\fR|\fB\-s\fR] [\fB\-\-monitord\fR|\fB\-m\fR|\fB\-t\fR] [\fB\-\-verbose\fR|\fB\-v\fR] [\fB\-\-output\-dir\fR|\fB\-o\fR \fIoutput_dir\fR] [\fB\-\-dag\fR \fIdag_filename\fR] [\fB\-\-dir\fR|\fB\-d\fR|\fB\-i\fR \fIinput_dir\fR] [\fB\-\-print\fR|\fB\-p\fR \fIprint_options\fR] [\fB\-\-debug\-job\fR \fIjob\fR] [\fB\-\-debug\-dir\fR \fIdebug_dir\fR] [\fB\-\-type\fR \fIworkflow_type\fR] [\fB\-\-conf\fR|\fB\-c\fR \fIproperty_file\fR] [\fB\-\-files\fR] [\fB\-\-top\-dir\fR \fIdir_name\fR] [\fIworkflow_directory\fR] .fi .SH "DESCRIPTION" .sp \fBpegasus\-analyzer\fR is a command\-line utility for parsing the \fIjobstate\&.log\fR file and reporting successful and failed jobs\&. When executed without any options, it will query the \fBSQLite\fR or \fBMySQL\fR database and retrieve failed job information for the particular workflow\&. When invoked with the \fB\-\-files\fR option, it will retrieve information from several log files, isolating jobs that did not complete successfully, and printing their \fIstdout\fR and \fIstderr\fR so that users can get detailed information about their workflow runs\&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints a usage summary with all the available command\-line options\&. .RE .PP \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Only print the the output and error filenames instead of their contents\&. .RE .PP \fB\-s\fR, \fB\-\-strict\fR .RS 4 Get jobs\*(Aq output and error filenames from the job\(cqs submit file\&. .RE .PP \fB\-m\fR, \fB\-t\fR, \fB\-\-monitord\fR .RS 4 Invoke \fBpegasus\-monitord\fR before analyzing the \fIjobstate\&.log\fR file\&. Although \fBpegasus\-analyzer\fR can be executed during the workflow execution as well as after the workflow has already completed execution, \fBpegasus\-monitord"\fR is always invoked with the \fB\-\-replay\fR option\&. Since multiple instances of \fBpegasus\-monitord"\fR should not be executed simultaneously in the same workflow directory, the user should ensure that no other instances of \fBpegasus\-monitord\fR are running\&. If the \fIrun_directory\fR is writable, \fBpegasus\-analyzer\fR will create a \fIjobstate\&.log\fR file there, rotating an older log, if it is found\&. If the \fIrun_directory\fR is not writable (e\&.g\&. when the user debugging the workflow is not the same user that ran the workflow), \fBpegasus\-analyzer\fR will exit and ask the user to provide the \fB\-\-output\-dir\fR option, in order to provide an alternative location for \fBpegasus\-monitord\fR log files\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Sets the log level for \fBpegasus\-analyzer\fR\&. If omitted, the default \fIlevel\fR will be set to \fIWARNING\fR\&. When this option is given, the log level is changed to \fIINFO\fR\&. If this option is repeated, the log level will be changed to \fIDEBUG\fR\&. .RE .PP \fB\-o\fR \fIoutput_dir\fR, \fB\-\-output\-dir\fR \fIoutput_dir\fR .RS 4 This option provides an alternative location for all monitoring log files for a particular workflow\&. It is mainly used when an user does not have write privileges to a workflow directory and needs to generate the log files needed by \fBpegasus\-analyzer\fR\&. If this option is used in conjunction with the \fB\-\-monitord\fR option, it will invoke \fBpegasus\-monitord\fR using \fIoutput_dir\fR to store all output files\&. Because workflows can have sub\-workflows, \fBpegasus\-monitord\fR will create its files prepending the workflow \fIwf_uuid\fR to each filename\&. This way, multiple workflow files can be stored in the same directory\&. \fBpegasus\-analyzer\fR has built\-in logic to find the specific \fIjobstate\&.log\fR file by looking at the workflow \fIbraindump\&.txt\fR file first and figuring out the corresponding \fIwf_uuid\&.\fR If \fIoutput_dir\fR does not exist, it will be created\&. .RE .PP \fB\-\-dag\fR \*(Aqdag_filename .RS 4 In this option, \fIdag_filename\fR specifies the path to the \fIDAG\fR file to use\&. \fBpegasus\-analyzer\fR will get the directory information from the \fIdag_filename\fR\&. This option overrides the \fB\-\-dir\fR option below\&. .RE .PP \fB\-d\fR \fIinput_dir\fR, \fB\-i\fR \fIinput_dir\fR, \fB\-\-dir\fR \fIinput_dir\fR .RS 4 Makes \fBpegasus\-analyzer\fR look for the \fIjobstate\&.log\fR file in the \fIinput_dir\fR directory\&. If this option is omitted, \fBpegasus\-analyzer\fR will look in the current directory\&. .RE .PP \fB\-p\fR \fIprint_options\fR, \fB\-\-print\fR \fIprint_options\fR .RS 4 Tells \fBpegasus\-analyzer\fR what extra information it should print for failed jobs\&. \fIprint_options\fR is a comma\-delimited list of options, that include \fIpre\fR, \fIinvocation\fR, and/or \fIall\fR, which activates all printing options\&. With the \fIpre\fR option, \fBpegasus\-analyzer\fR will print the \fIpre\-script\fR information for failed jobs\&. For the \fIinvocation\fR option, \fBpegasus\-analyzer\fR will print the \fIinvocation\fR command, so users can manually run the failed job\&. .RE .PP \fB\-\-debug\-job\fR \fIjob\fR .RS 4 When given this option, \fBpegasus\-analyzer\fR turns on its \fIdebug_mode\fR, when it can be used to debug a particular job\&. In this mode, \fBpegasus\-analyzer\fR will create a shell script in the \fIdebug_dir\fR (see below, for specifying it) and copy all necessary files to this local directory and then execute the job locally\&. .RE .PP \fB\-\-debug\-dir\fR \fIdebug_dir\fR .RS 4 When in \fIdebug_mode\fR, \fBpegasus\-analyzer\fR will create a temporary debug directory\&. Users can give this option in order to specify a particular \fIdebug_dir\fR directory to be used instead\&. .RE .PP \fB\-\-type\fR \fIworkflow_type\fR .RS 4 In this options, users specify what \fIworkflow_type\fR they want to debug\&. At this moment, the only \fIworkflow_type\fR available is \fBcondor\fR and it is the default value if this option is not specified\&. .RE .PP \fB\-c\fR \fIproperty_file\fR, \fB\-\-conf\fR \fIproperty_file\fR .RS 4 This option is used to specify an alternative property file, which may contain the path to the database to be used by \fBpegasus\-analyzer\fR\&. If this option is not specified, the config file specified in the \fBbraindump\&.txt\fR file will take precedence\&. .RE .PP \fB\-\-files\fR .RS 4 This option allows users to run \fBpegasus\-analyzer\fR using the files in the workflow directory instead of the database as the source of information\&. \fBpegasus\-analyzer\fR will output the same information, this option only changes where the data comes from\&. .RE .PP \fB\-\-top\-dir\fR \fIdir_name\fR .RS 4 This option enables \fBpegasus\-analyzer\fR to show information about sub\-workflows when using the database mode\&. When debugging a top\-level workflow with failures in sub\-workflows, the analyzer will automatically print the command users should use to debug a failed sub\-workflow\&. This allows the analyzer to find the database it needs to access\&. .RE .SH "ENVIRONMENT VARIABLES" .sp \fBpegasus\-analyzer\fR does not require that any environmental variables be set\&. It locates its required Python modules based on its own location, and therefore should not be moved outside of Pegasus\*(Aq bin directory\&. .SH "EXAMPLE" .sp The simplest way to use \fBpegasus\-analyzer\fR is to go to the \fIrun_directory\fR and invoke the analyzer: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-analyzer \&. .fi .if n \{\ .RE .\} .sp which will cause \fBpegasus\-analyzer\fR to print information about the workflow in the current directory\&. .sp \fBpegasus\-analyzer\fR output contains a summary, followed by detailed information about each job that either failed, or is in an unknown state\&. Here is the summary section of the output: .sp .if n \{\ .RS 4 .\} .nf **************************Summary*************************** Total jobs : 75 (100\&.00%) # jobs succeeded : 41 (54\&.67%) # jobs failed : 0 (0\&.00%) # jobs unsubmitted : 33 (44\&.00%) # jobs unknown : 1 (1\&.33%) .fi .if n \{\ .RE .\} .sp \fIjobs_succeeded\fR are jobs that have completed successfully\&. \fIjobs_failed\fR are jobs that have finished, but that did not complete successfully\&. \fIjobs_unsubmitted\fR are jobs that are listed in the \fIdag_file\fR, but no information about them was found in the \fIjobstate\&.log\fR file\&. Finally, \fIjobs_unknown\fR are jobs that have started, but have not reached completion\&. .sp After the summary section, \fBpegasus\-analyzer\fR will display information about each job in the \fIjob_failed\fR and \fIjob_unknown\fR categories\&. .sp .if n \{\ .RS 4 .\} .nf ******************Failed jobs\*(Aq details********************** =======================findrange_j3========================= last state: POST_SCRIPT_FAILURE site: local submit file: /home/user/diamond\-submit/findrange_j3\&.sub output file: /home/user/diamond\-submit/findrange_j3\&.out\&.000 error file: /home/user/diamond\-submit/findrange_j3\&.err\&.000 \-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-Task #1 \- Summary\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\- site : local hostname : server\-machine\&.domain\&.com executable : (null) arguments : \-a findrange \-T 60 \-i f\&.b2 \-o f\&.c2 error : 2 working dir : .fi .if n \{\ .RE .\} .sp In the example above, the \fIfindrange_j3\fR job has failed, and the analyzer displays information about the job, showing that the job finished with a \fIPOST_SCRIPT_FAILURE\fR, and lists the \fIsubmit\fR, \fIoutput\fR and \fIerror\fR files for this job\&. Whenever \fBpegasus\-analyzer\fR detects that the output file contains a kickstart record, it will display the breakdown containing each task in the job (in this case we only have one task)\&. Because \fBpegasus\-analyzer\fR was not invoked with the \fB\-\-quiet\fR flag, it will also display the contents of the \fIoutput\fR and \fIerror\fR files (or the stdout and stderr sections of the kickstart record), which in this case are both empty\&. .sp In the case of \fISUBDAG\fR and \fIsubdax\fR jobs, \fBpegasus\-analyzer\fR will indicate it, and show the command needed for the user to debug that sub\-workflow\&. For example: .sp .if n \{\ .RS 4 .\} .nf =================subdax_black_ID000009===================== last state: JOB_FAILURE site: local submit file: /home/user/run1/subdax_black_ID000009\&.sub output file: /home/user/run1/subdax_black_ID000009\&.out error file: /home/user/run1/subdax_black_ID000009\&.err This job contains sub workflows! Please run the command below for more information: pegasus\-analyzer \-d /home/user/run1/blackdiamond_ID000009\&.000 \-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-subdax_black_ID000009\&.out\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\- Executing condor dagman \&.\&.\&. \-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-subdax_black_ID000009\&.err\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\-\- .fi .if n \{\ .RE .\} .sp tells the user the \fIsubdax_black_ID000009\fR sub\-workflow failed, and that it can be debugged by using the indicated \fBpegasus\-analyzer\fR command\&. .SH "SEE ALSO" .sp pegasus\-status(1), pegasus\-monitord(1), pegasus\-statistics(1)\&. .SH "AUTHORS" .sp Fabio Silva .sp Karan Vahi .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-s3.10000644000175000017500000004637211757531137020635 0ustar ryngerynge'\" t .\" Title: pegasus-s3 .\" Author: [see the "Author" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-S3" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-s3 \- Upload, download, delete objects in Amazon S3 .SH "SYNOPSIS" .sp .nf \fBpegasus\-s3\fR \fBhelp\fR \fBpegasus\-s3\fR \fBls\fR [options] \fIURL\fR \fBpegasus\-s3\fR \fBmkdir\fR [options] \fIURL\&...\fR \fBpegasus\-s3\fR \fBrmdir\fR [options] URL\&... \fBpegasus\-s3\fR \fBrm\fR [options] [\fIURL\&...\fR] \fBpegasus\-s3\fR \fBput\fR [options] \fIFILE\fR \fIURL\fR \fBpegasus\-s3\fR \fBget\fR [options] \fIURL\fR [\fIFILE\fR] \fBpegasus\-s3\fR \fBlsup\fR [options] \fIURL\fR \fBpegasus\-s3\fR \fBrmup\fR [options] \fIURL\fR [\fIUPLOAD\fR] .fi .SH "DESCRIPTION" .sp \fBpegasus\-s3\fR is a client for the Amazon S3 object storage service and any other storage services that conform to the Amazon S3 API, such as Eucalyptus Walrus\&. .SH "OPTIONS" .SS "Global Options" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Show help message for subcommand and exit .RE .PP \fB\-d\fR, \fB\-\-debug\fR .RS 4 Turn on debugging .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Show progress messages .RE .PP \fB\-C\fR \fIFILE\fR, \fB\-\-conf\fR=\fIFILE\fR .RS 4 Path to configuration file .RE .SS "rm Options" .PP \fB\-f\fR, \fB\-\-force\fR .RS 4 If the URL does not exist, then ignore the error\&. .RE .PP \fB\-F\fR \fIFILE\fR, \fB\-\-file\fR=\fIFILE\fR .RS 4 File containing a list of URLs to delete .RE .SS "put Options" .PP \fB\-c\fR \fIX\fR, \fB\-\-chunksize\fR=\fIX\fR .RS 4 Set the chunk size for multipart uploads to X MB\&. A value of 0 disables multipart uploads\&. The default is 10MB, the min is 5MB and the max is 1024MB\&. This parameter only applies for sites that support multipart uploads (see multipart_uploads configuration parameter in the \fBCONFIGURATION\fR section)\&. The maximum number of chunks is 10,000, so if you are uploading a large file, then the chunk size is automatically increased to enable the upload\&. Choose smaller values to reduce the impact of transient failures\&. .RE .PP \fB\-p\fR \fIN\fR, \fB\-\-parallel\fR=\fIN\fR .RS 4 Use N threads to upload \fIFILE\fR in parallel\&. The default value is 0, which disables parallel uploads\&. This parameter is only valid if the site supports mulipart uploads and the \fB\-\-chunksize\fR parameter is not 0\&. .RE .PP \fB\-b\fR, \fB\-\-create\-bucket\fR .RS 4 Create the destination bucket if it does not already exist .RE .SS "get Options" .PP \fB\-c\fR \fIX\fR, \fB\-\-chunksize\fR=\fIX\fR .RS 4 Set the chunk size for parallel downloads to X megabytes\&. A value of 0 will avoid chunked reads\&. This option only applies for sites that support ranged downloads (see ranged_downloads configuration parameter)\&. The default chunk size is 10MB, the min is 1MB and the max is 1024MB\&. Choose smaller values to reduce the impact of transient failures\&. .RE .PP \fB\-p\fR \fIN\fR, \fB\-\-parallel\fR=\fIN\fR .RS 4 Use N threads to upload FILE in parallel\&. The default value is 0, which disables parallel downloads\&. This parameter is only valid if the site supports ranged downloads and the \fB\-\-chunksize\fR parameter is not 0\&. .RE .SS "rmup Options" .PP \fB\-a\fR, \fB\-\-all\fR .RS 4 Cancel all uploads for the specified bucket .RE .SH "SUBCOMMANDS" .sp \fBpegasus\-s3\fR has several subcommands for different storage service operations\&. .PP \fBhelp\fR .RS 4 The help subcommand lists all available subcommands\&. .RE .PP \fBls\fR .RS 4 The \fBls\fR subcommand lists the contents of a URL\&. If the URL does not contain a bucket, then all the buckets owned by the user are listed\&. If the URL contains a bucket, but no key, then all the keys in the bucket are listed\&. If the URL contains a bucket and a key, then all keys in the bucket that begin with the specified key are listed\&. .RE .PP \fBmkdir\fR .RS 4 The \fBmkdir\fR subcommand creates one or more buckets\&. .RE .PP \fBrmdir\fR .RS 4 The \fBrmdir\fR subcommand deletes one or more buckets from the storage service\&. In order to delete a bucket, the bucket must be empty\&. .RE .PP \fBrm\fR .RS 4 The \fBrm\fR subcommand deletes one or more keys from the storage service\&. .RE .PP \fBput\fR .RS 4 The \fBput\fR subcommand stores the file specified by FILE in the storage service under the bucket and key specified by URL\&. If the URL contains a bucket, but not a key, then the file name is used as the key\&. .sp If a transient failure occurs, then the upload will be retried several times before \fBpegasus\-s3\fR gives up and fails\&. .sp The \fBput\fR subcommand can do both chunked and parallel uploads if the service supports multipart uploads (see \fBmultipart_uploads\fR in the \fBCONFIGURATION\fR section)\&. Currently only Amazon S3 supports multipart uploads\&. .sp This subcommand will check the size of the file to make sure it can be stored before attempting to store it\&. .sp Chunked uploads are useful to reduce the probability of an upload failing\&. If an upload is chunked, then \fBpegasus\-s3\fR issues separate PUT requests for each chunk of the file\&. Specifying smaller chunks (using \fB\-\-chunksize\fR) will reduce the chances of an upload failing due to a transient error\&. Chunksizes can range from 5 MB to 1GB (chunk sizes smaller than 5 MB produced incomplete uploads on Amazon S3)\&. The maximum number of chunks for any single file is 10,000, so if a large file is being uploaded with a small chunksize, then the chunksize will be increased to fit within the 10,000 chunk limit\&. By default, the file will be split into 10 MB chunks if the storage service supports multipart uploads\&. Chunked uploads can be disabled by specifying a chunksize of 0\&. If the upload is chunked, then each chunk is retried independently under transient failures\&. If any chunk fails permanently, then the upload is aborted\&. .sp Parallel uploads can increase performance for services that support multipart uploads\&. In a parallel upload the file is split into N chunks and each chunk is uploaded concurrently by one of M threads in first\-come, first\-served fashion\&. If the chunksize is set to 0, then parallel uploads are disabled\&. If M > N, then the actual number of threads used will be reduced to N\&. The number of threads can be specified using the \-\-parallel argument\&. If \-\-parallel is 0 or 1, then only a single thread is used\&. The default value is 0\&. There is no maximum number of threads, but it is likely that the link will be saturated by 4 threads\&. Very high\-bandwidth, long\-delay links may get better results with up to8 threads\&. .sp Under certain circumstances, when a multipart upload fails it could leave behind data on the server\&. When a failure occurs the \fBput\fR subcommand will attempt to abort the upload\&. If the upload cannot be aborted, then a partial upload may remain on the server\&. To check for partial uploads run the \fBlsup\fR subcommand\&. If you see an upload that failed in the output of \fBlsup\fR, then run the \fBrmup\fR subcommand to remove it\&. .RE .PP \fBget\fR .RS 4 The \fBget\fR subcommand retrieves an object from the storage service identified by URL and stores it in the file specified by FILE\&. If FILE is not specified, then the key is used as the file name (Note: if the key has slashes, then the file name will be a relative subdirectory, but \fBpegasus\-s3\fR will not create the subdirectory if it does not exist)\&. .sp If a transient failure occurs, then the download will be retried several times before \fBpegasus\-s3\fR gives up and fails\&. .sp The \fBget\fR subcommand can do both chunked and parallel downloads if the service supports ranged downloads (see \fBranged_downloads\fR in the \fBCONFIGURATION\fR section)\&. Currently only Amazon S3 has good support for ranged downloads\&. Eucalyptus Walrus supports ranged downloads, but the current release, 1\&.6, is inconsistent with the Amazon interface and has a bug that causes ranged downloads to hang in some cases\&. It is recommended that ranged downloads not be used with Eucalyptus until these issues are resolved\&. .sp Chunked downloads can be used to reduce the probability of a download failing\&. When a download is chunked, \fBpegasus\-s3\fR issues separate GET requests for each chunk of the file\&. Specifying smaller chunks (using \fB\-\-chunksize\fR) will reduce the chances that a download will fail to do a transient error\&. Chunk sizes can range from 1 MB to 1 GB\&. By default, a download will be split into 10 MB chunks if the site supports ranged downloads\&. Chunked downloads can be disabled by specifying a \fB\-\-chunksize\fR of 0\&. If a download is chunked, then each chunk is retried independently under transient failures\&. If any chunk fails permanently, then the download is aborted\&. .sp Parallel downloads can increase performance for services that support ranged downloads\&. In a parallel download, the file to be retrieved is split into N chunks and each chunk is downloaded concurrently by one of M threads in a first\-come, first\-served fashion\&. If the chunksize is 0, then parallel downloads are disabled\&. If M > N, then the actual number of threads used will be reduced to N\&. The number of threads can be specified using the \-\-parallel argument\&. If \-\-parallel is 0 or 1, then only a single thread is used\&. The default value is 0\&. There is no maximum number of threads, but it is likely that the link will be saturated by 4 threads\&. Very high\-bandwidth, long\-delay links may get better results with up to8 threads\&. .RE .PP \fBlsup\fR .RS 4 The \fBlsup\fR subcommand lists active multipart uploads\&. The URL specified should point to a bucket\&. This command is only valid if the site supports multipart uploads\&. The output of this command is a list of keys and upload IDs\&. .sp This subcommand is used with \fBrmup\fR to help recover from failures of multipart uploads\&. .RE .PP \fBrmup\fR .RS 4 The \fBrmup\fR subcommand cancels and active upload\&. The URL specified should point to a bucket, and UPLOAD is the long, complicated upload ID shown by the \fBlsup\fR subcommand\&. .sp This subcommand is used with \fBlsup\fR to recover from failures of multipart uploads\&. .RE .SH "URL FORMAT" .sp All URLs for objects stored in S3 should be specified in the following format: .sp .if n \{\ .RS 4 .\} .nf s3[s]://USER@SITE[/BUCKET[/KEY]] .fi .if n \{\ .RE .\} .sp The protocol part can be \fIs3://\fR or \fIs3s://\fR\&. If \fIs3s://\fR is used, then \fBpegasus\-s3\fR will force the connection to use SSL and override the setting in the configuration file\&. If s3:// is used, then whether the connection uses SSL or not is determined by the value of the \fIendpoint\fR variable in the configuration for the site\&. .sp The \fIUSER@SITE\fR part is required, but the \fIBUCKET\fR and \fIKEY\fR parts may be optional depending on the context\&. .sp The \fIUSER@SITE\fR portion is referred to as the \(lqidentity\(rq, and the \fISITE\fR portion is referred to as the \(lqsite\(rq\&. Both the identity and the site are looked up in the configuration file (see \fBCONFIGURATION\fR) to determine the parameters to use when establishing a connection to the service\&. The site portion is used to find the host and port, whether to use SSL, and other things\&. The identity portion is used to determine which authentication tokens to use\&. This format is designed to enable users to easily use multiple services with multiple authentication tokens\&. Note that neither the \fIUSER\fR nor the \fISITE\fR portion of the URL have any meaning outside of \fBpegasus\-s3\fR\&. They do not refer to real usernames or hostnames, but are rather handles used to look up configuration values in the configuration file\&. .sp The BUCKET portion of the URL is the part between the 3rd and 4th slashes\&. Buckets are part of a global namespace that is shared with other users of the storage service\&. As such, they should be unique\&. .sp The KEY portion of the URL is anything after the 4th slash\&. Keys can include slashes, but S3\-like storage services do not have the concept of a directory like regular file systems\&. Instead, keys are treated like opaque identifiers for individual objects\&. So, for example, the keys \fIa/b\fR and \fIa/c\fR have a common prefix, but cannot be said to be in the same \fIdirectory\fR\&. .sp Some example URLs are: .sp .if n \{\ .RS 4 .\} .nf s3://ewa@amazon s3://juve@skynet/gideon\&.isi\&.edu s3://juve@magellan/pegasus\-images/centos\-5\&.5\-x86_64\-20101101\&.part\&.1 s3s://ewa@amazon/pegasus\-images/data\&.tar\&.gz .fi .if n \{\ .RE .\} .SH "CONFIGURATION" .sp Each user should specify a configuration file that \fBpegasus\-s3\fR will use to look up connection parameters and authentication tokens\&. .SS "Search Path" .sp This client will look in the following locations, in order, to locate the user\(cqs configuration file: .sp .RS 4 .ie n \{\ \h'-04' 1.\h'+01'\c .\} .el \{\ .sp -1 .IP " 1." 4.2 .\} The \-C/\-\-conf argument .RE .sp .RS 4 .ie n \{\ \h'-04' 2.\h'+01'\c .\} .el \{\ .sp -1 .IP " 2." 4.2 .\} The S3CFG environment variable .RE .sp .RS 4 .ie n \{\ \h'-04' 3.\h'+01'\c .\} .el \{\ .sp -1 .IP " 3." 4.2 .\} $HOME/\&.s3cfg .RE .sp If it does not find the configuration file in one of these locations it will fail with an error\&. .SS "Configuration File Format" .sp The configuration file is in INI format and contains two types of entries\&. .sp The first type of entry is a site entry, which specifies the configuration for a storage service\&. This entry specifies the service endpoint that \fBpegasus\-s3\fR should connect to for the site, and some optional features that the site may support\&. Here is an example of a site entry for Amazon S3: .sp .if n \{\ .RS 4 .\} .nf [amazon] endpoint = http://s3\&.amazonaws\&.com/ .fi .if n \{\ .RE .\} .sp The other type of entry is an identity entry, which specifies the authentication information for a user at a particular site\&. Here is an example of an identity entry: .sp .if n \{\ .RS 4 .\} .nf [pegasus@amazon] access_key = 90c4143642cb097c88fe2ec66ce4ad4e secret_key = a0e3840e5baee6abb08be68e81674dca .fi .if n \{\ .RE .\} .sp It is important to note that user names and site names used are only logical\(emthey do not correspond to actual hostnames or usernames, but are simply used as a convenient way to refer to the services and identities used by the client\&. .sp The configuration file should be saved with limited permissions\&. Only the owner of the file should be able to read from it and write to it (i\&.e\&. it should have permissions of 0600 or 0400)\&. If the file has more liberal permissions, then \fBpegasus\-s3\fR will fail with an error message\&. The purpose of this is to prevent the authentication tokens stored in the configuration file from being accessed by other users\&. .SS "Configuration Variables" .PP \fBendpoint\fR (site) .RS 4 The URL of the web service endpoint\&. If the URL begins with \fIhttps\fR, then SSL will be used\&. .RE .PP \fBmax_object_size\fR (site) .RS 4 The maximum size of an object in GB (default: 5GB) .RE .PP \fBmultipart_uploads\fR (site) .RS 4 Does the service support multipart uploads (True/False, default: False) .RE .PP \fBranged_downloads\fR (site) .RS 4 Does the service support ranged downloads? (True/False, default: False) .RE .PP \fBaccess_key\fR (identity) .RS 4 The access key for the identity .RE .PP \fBsecret_key\fR (identity) .RS 4 The secret key for the identity .RE .SS "Example Configuration" .sp This is an example configuration that specifies a two sites (amazon and magellan) and three identities (pegasus@amazon,juve@magellan, and voeckler@magellan)\&. For the amazon site the maximum object size is 5TB, and the site supports both multipart uploads and ranged downloads, so both uploads and downloads can be done in parallel\&. .sp .if n \{\ .RS 4 .\} .nf [amazon] endpoint = https://s3\&.amazonaws\&.com/ max_object_size = 5120 multipart_uploads = True ranged_downloads = True [pegasus@amazon] access_key = 90c4143642cb097c88fe2ec66ce4ad4e secret_key = a0e3840e5baee6abb08be68e81674dca [magellan] # NERSC Magellan is a Eucalyptus site\&. It doesn\*(Aqt support multipart uploads, # or ranged downloads (the defaults), and the maximum object size is 5GB # (also the default) endpoint = https://128\&.55\&.69\&.235:8773/services/Walrus [juve@magellan] access_key = quwefahsdpfwlkewqjsdoijldsdf secret_key = asdfa9wejalsdjfljasldjfasdfa [voeckler@magellan] # Each site can have multiple associated identities access_key = asdkfaweasdfbaeiwhkjfbaqwhei secret_key = asdhfuinakwjelfuhalsdflahsdl .fi .if n \{\ .RE .\} .SH "EXAMPLE" .sp List all buckets owned by identity \fIuser@amazon\fR: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 ls s3://user@amazon .fi .if n \{\ .RE .\} .sp List the contents of bucket \fIbar\fR for identity \fIuser@amazon\fR: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 ls s3://user@amazon/bar .fi .if n \{\ .RE .\} .sp List all objects in bucket \fIbar\fR that start with \fIhello\fR: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 ls s3://user@amazon/bar/hello .fi .if n \{\ .RE .\} .sp Create a bucket called \fImybucket\fR for identity \fIuser@amazon\fR: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 mkdir s3://user@amazon/mybucket .fi .if n \{\ .RE .\} .sp Delete a bucket called \fImybucket\fR: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 rmdir s3://user@amazon/mybucket .fi .if n \{\ .RE .\} .sp Upload a file \fIfoo\fR to bucket \fIbar\fR: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 putfoo s3://user@amazon/bar/foo .fi .if n \{\ .RE .\} .sp Download an object \fIfoo\fR in bucket \fIbar\fR: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 get s3://user@amazon/bar/foo foo .fi .if n \{\ .RE .\} .sp Upload a file in parallel with 4 threads and 100MB chunks: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 put \-\-parallel 4 \-\-chunksize 100 foo s3://user@amazon/bar/foo .fi .if n \{\ .RE .\} .sp Download an object in parallel with 4 threads and 100MB chunks: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 get \-\-parallel 4 \-\-chunksize 100 s3://user@amazon/bar/foo foo .fi .if n \{\ .RE .\} .sp List all partial uploads for bucket \fIbar\fR: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 lsup s3://user@amazon/bar .fi .if n \{\ .RE .\} .sp Remove all partial uploads for bucket \fIbar\fR: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-s3 rmup \-\-all s3://user@amazon/bar .fi .if n \{\ .RE .\} .SH "RETURN VALUE" .sp \fBpegasus\-s3\fR returns a zero exist status if the operation is successful\&. A non\-zero exit status is returned in case of failure\&. .SH "AUTHOR" .sp Gideon Juve .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-rc-client.10000644000175000017500000002305711757531137022163 0ustar ryngerynge'\" t .\" Title: pegasus-rc-client .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-RC\-CLIENT" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-rc-client \- shell client for replica implementations .SH "SYNOPSIS" .sp .nf \fBpegasus\-rc\-client\fR [\fB\-D\fR\fIproperty=value\fR[\&...]] [\fB\-V\fR] [\fB\-c\fR \fIfn\fR] [\fB\-p\fR \fIk=v\fR] [[\fB\-f\fR \fIfn\fR]|[\fB\-i\fR|\fB\-d\fR \fIfn\fR]|[\fIcmd\fR [\fIargs\fR]] .fi .SH "DESCRIPTION" .sp The shell interface to replica catalog implementations is a prototype\&. It determines from various property setting which class implements the replica manager interface, and loads that driver at run\-time\&. Some commands depend on the implementation\&. .SH "OPTIONS" .sp Any option will be displayed with its long options synonym(s)\&. .PP \fB\-D\fR\fIproperty=value\fR .RS 4 The \fB\-D\fR option allows an experienced user to override certain properties which influence the program execution, among them the default location of the user\(cqs properties file and the PEGASUS home location\&. One may set several CLI properties by giving this option multiple times\&. The \fB\-D\fR option(s) must be the first option on the command line\&. A CLI property take precedence over the properties file property of the same key\&. .RE .PP \fB\-c\fR \fIfn\fR, \fB\-\-conf\fR \fIfn\fR .RS 4 Path to the property file .RE .PP \fB\-f\fR \fIfn\fR, \fB\-\-file\fR \fIfn\fR .RS 4 The optional input file argument permits to enter non\-interactive bulk mode\&. If this option is not present, replica manager specific commands should be issued on the command\-line\&. The special filename hyphen (\-) can be used to read from pipes\&. .sp Default is to use an interactive interface reading from \fIstdin\fR\&. .RE .PP \fB\-i\fR \fIfn\fR, \fB\-\-insert\fR \fIfn\fR .RS 4 The optional input file argument permits insertion of entries from the Replica Catalog in a bulk mode, wherever supported by the underlying implementation\&. This is highly, useful when interfacing with the Globus RLS as the backend, and one wants to inserts millions of entries in it\&. .sp Each line in the file denotes one mapping of the format \fB [k=v [\&.\&.]]\fR .RE .PP \fB\-d\fR \fIfn\fR, \fB\-\-delete\fR \fIfn\fR .RS 4 The optional input file argument permits deletion of entries from the Replica Catalog in a bulk mode, wherever supported by the underlying implementation\&. This is highly, useful when interfacing with the Globus RLS as the backend, and one wants to delete millions of entries from it\&. .sp Each line in the file denotes one mapping of the format: \fB [k=v [\&.\&.]]\fR .RE .PP \fB\-p\fR \fIk=v\fR, \fB\-\-pref\fR \fIk=v\fR .RS 4 This option may be specified multiple times\&. Each specification populates instance preferences\&. Preferences control the extend of log information, or the output format string to use in listings\&. .sp The keys \fBformat\fR and \fBlevel\fR are recognized as of this writing\&. .sp There are no defaults\&. .RE .PP \fIcmd [args]\fR .RS 4 If not in file\-driven mode, a single command can be specified with its arguments\&. .sp Default is to use interactive mode\&. .RE .PP \fB\-V\fR, \fB\-\-version\fR .RS 4 displays the version of Pegasus you are using\&. .RE .SH "RETURN VALUE" .sp Regular and planned program terminations will result in an exit code of 0\&. Abnormal termination will result in a non\-zero exit code\&. .SH "FILES" .PP \fB$PEGASUS_HOME/etc/properties\fR .RS 4 contains the basic properties with all configurable options\&. .RE .PP \fB$HOME/\&.pegasusrc\fR .RS 4 contains the basic properties with all configurable options\&. .RE .PP \fBpegasus\&.jar\fR .RS 4 contains all compiled Java bytecode to run the replica manager\&. .RE .SH "ENVIRONMENT VARIABLES" .PP \fBPEGASUS_HOME\fR .RS 4 is the suggested base directory of your the execution environment\&. .RE .PP \fBJAVA_HOME\fR .RS 4 should be set and point to a valid location to start the intended Java virtual machine as \fI$JAVA_HOME/bin/java\fR\&. .RE .PP \fBCLASSPATH\fR .RS 4 should be set to contain all necessary files for the execution environment\&. Please make sure that your \fICLASSPATH\fR includes pointer to the replica implementation required jar files\&. .RE .SH "PROPERTIES" .sp The complete branch of properties \fIpegasus\&.catalog\&.replica\fR including itself are interpreted by the prototype\&. While the \fIpegasus\&.catalog\&.replica\fR property itself steers the backend to connect to, any meaning of branched keys is dependent on the backend\&. The same key may have different meanings for different backends\&. .PP \fBpegasus\&.catalog\&.replica\fR .RS 4 determines the name of the implementing class to load at run\-time\&. If the class resides in \fIorg\&.griphyn\&.common\&.catalog\&.replica\fR no prefix is required\&. Otherwise, the fully qualified class name must be specified\&. .RE .PP \fBpegasus\&.catalog\&.replica\&.url\fR .RS 4 is used by the RLS|LRC implementations\&. It determines the RLI / LRC url to use\&. .RE .PP \fBpegasus\&.catalog\&.replica\&.file\fR .RS 4 is used by the SimpleFile implementation\&. It specifies the path to the file to use as the backend for the catalog\&. .RE .PP \fBpegasus\&.catalog\&.replica\&.db\&.driver\fR .RS 4 is used by a simple rDBMs implementation\&. The string is the fully\-qualified class name of the JDBC driver used by the RDBMS implementer\&. .RE .PP \fBpegasus\&.catalog\&.replica\&.db\&.url\fR .RS 4 is the JDBC URL to use to connect to the database\&. .RE .PP \fBpegasus\&.catalog\&.replica\&.db\&.user\fR .RS 4 is used by a simple rDBMS implementation\&. It constitutes the database user account that contains the \fIRC_LFN\fR and \fIRC_ATTR\fR tables\&. .RE .PP \fBpegasus\&.catalog\&.replica\&.db\&.password\fR .RS 4 is used by a simple RDBMS implementation\&. It constitutes the database user account that contains the \fIRC_LFN\fR and \fIRC_ATTR\fR tables\&. .RE .PP \fBpegasus\&.catalog\&.replica\&.chunk\&.size\fR .RS 4 is used by \fBthe pegasus\-rc\-client\fR for the bulk insert and delete operations\&. The value determines the number of lines that are read in at a time, and worked upon at together\&. .RE .SH "COMMANDS" .sp The command line tool provides a simplified shell\-wrappable interface to manage a replica catalog backend\&. The commands can either be specified in a file in bulk mode, in a pipe, or as additional arguments to the invocation\&. .sp Note that you must escape special characters from the shell\&. .PP \fBhelp\fR .RS 4 displays a small resume of the commands\&. .RE .PP \fBexit\fR, \fBquit\fR .RS 4 should only be used in interactive mode to exit the interactive mode\&. .RE .PP \fBclear\fR .RS 4 drops all contents from the backend\&. Use with special care! .RE .PP \fBinsert [k=v [\&...]]\fR .RS 4 inserts a given \fBlfn\fR and \fBpfn\fR, and an optional \fBsite\fR string into the backend\&. If the site is not specified, a \fInull\fR value is inserted for the \fBsite\fR\&. .RE .PP \fBdelete [k=v [\&...]]\fR .RS 4 removes a triple of \fBlfn\fR, \fBpfn\fR and, optionally, \fBsite\fR from the replica backend\&. If the site was not specified, all matches of the \fBlfn\fR \fBpfn\fR pairs will be removed, regardless of the \fBsite\fR\&. .RE .PP \fBlookup [ [\&...]]\fR .RS 4 retrieves one or more mappings for a given \fBlfn\fR from the replica backend\&. .RE .PP \fBremove [ [\&...]]\fR .RS 4 removes all mappings for each \fBlfn\fR from the replica backend\&. .RE .PP \fBlist [lfn ] [pfn ] [ ]\fR .RS 4 obtains all matches from the replica backend\&. If no arguments were specified, all contents of the replica backend are matched\&. You must use the word \fBlfn\fR, \fBpfn\fR or \fB\fR before specifying a pattern\&. The pattern is meaningful only to the implementation\&. Thus, a SQL implementation may chose to permit SQL wild\-card characters\&. A memory\-resident service may chose to interpret the pattern as regular expression\&. .RE .PP \fBset [var [value]]\fR .RS 4 sets an internal variable that controls the behavior of the front\-end\&. With no arguments, all possible behaviors are displayed\&. With one argument, just the matching behavior is listed\&. With two arguments, the matching behavior is set to the value\&. .RE .SH "DATABASE SCHEMA" .sp The tables are set up as part of the PEGASUS database setup\&. The files concerned with the database have a suffix \fI\-rc\&.sql\fR\&. .SH "AUTHORS" .sp Karan Vahi .sp Gaurang Mehta .sp Jens\-S\&. Vöckler .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu/\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-plots.10000644000175000017500000001226511757531137021443 0ustar ryngerynge'\" t .\" Title: pegasus-plots .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-PLOTS" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-plots \- A tool to generate graphs and charts to visualize workflow run\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-plots\fR [\fB\-h\fR|\fB\-\-help\fR] [\fB\-o\fR|\fB\-\-output\fR \fIoutdir\fR] [\fB\-c\fR|\fB\-\-conf\fR \fIpropfile\fR] [\fB\-m\fR|\fB\-\-max\-graph\-nodes\fR \fImax\fR] [\fB\-p\fR|\fB\-\-plotting\-level\fR \fIlevel\fR] [\fB\-i\fR|\fB\-\-ignore\-db\-inconsistency\fR] [\fB\-v\fR|\fB\-\-verbose\fR] [\fB\-q\fR|\fB\-\-quiet\fR] [\fIsubmitdir\fR] .fi .SH "DESCRIPTION" .sp pegasus\-plots generates graphs and charts to visualize workflow run\&. It generates workflow execution Gantt chart, job over time chart, time chart, dax and dag graph\&. It uses executable \*(Aqdot\e\*(Aq to generate graphs\&. pegasus\-plots looks for the executable in your path and generates graphs based on it\(cqs availability \&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints a usage summary with all the available command\-line options\&. .RE .PP \fB\-o\fR \fIoutdir\fR, \fB\-\-output\fR \fIoutdir\fR .RS 4 Writes the output to the given directory .RE .PP \fB\-c\fR \fIpropfile\fR, \fB\-\-conf\fR \fIpropfile\fR .RS 4 The properties file to use\&. This option overrides all other property files\&. .RE .PP \fB\-m\fR \fImax\fR, \fB\-\-max\-graph\-nodes\fR \fImax\fR .RS 4 Maximum limit on the number of tasks/jobs in the dax/dag up to which the graph should be generated\&. The default value is 100\&. .RE .PP \fB\-p\fR \fIlevel\fR, \fB\-\-plotting\-level\fR \fIlevel\fR .RS 4 Specifies the charts and graphs to generate\&. Valid levels are: \fBall\fR, \fBall_charts\fR, \fBall_graphs\fR, \fBdax_graph\fR, \fBdag_graph\fR, \fBgantt_chart\fR, \fBhost_chart\fR, \fBtime_chart\fR, \fBbreakdown_chart\fR\&. Default is \fBall_charts\fR\&. The output generated by \fBpegasus\-plots\fR is based on the \fIlevel\fR set: .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBall\fR: generates all charts and graphs\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBall_charts\fR: generates all charts\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBall_graphs\fR: generates all graphs\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBdax_graph\fR: generates dax graph\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBdag_graph\fR: generates dag graph\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBgantt_chart\fR: generates the workflow execution Gantt chart\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBhost_chart\fR: generates the host over time chart\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBtime_chart\fR: generates the time chart which shows the job instance/invocation count and runtime over time\&. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBbreakdown_chart\fR: generates the breakdown chart which shows the invocation count and runtime grouped by transformation name\&. .RE .RE .PP \fB\-i\fR, \fB\-\-ignore\-db\-inconsistency\fR .RS 4 Turn off the the check for database consistency\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Increases the log level\&. If omitted, the default level will be set to WARNING\&. When this option is given, the log level is changed to INFO\&. If this option is repeated, the log level will be changed to DEBUG\&. .RE .PP \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Decreases the log level\&. If omitted, the default level will be set to WARNING\&. When this option is given, the log level is changed to ERROR\&. .RE .SH "EXAMPLE" .sp Runs pegasus\-plots and writes the output to the given directory: .sp .if n \{\ .RS 4 .\} .nf pegasus\-plots \-o /scratch/plot /scratch/grid\-setup/run0001 .fi .if n \{\ .RE .\} .SH "AUTHORS" .sp Prasanth Thomas .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-exitcode.10000644000175000017500000001443211757531137022104 0ustar ryngerynge'\" t .\" Title: pegasus-exitcode .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-EXITCODE" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-exitcode \- Checks the stdout/stderr files of a workflow job for any indication that an error occurred in the job\&. This script is intended to be invoked automatically by DAGMan as the POST script of a job\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-exitcode\fR [\fB\-h\fR][\fB\-t\fR \fIn\fR][\fB\-r\fR \fIrv\fR][\fB\-n\fR] \fIjob\&.out\fR .fi .SH "DESCRIPTION" .sp \fBpegasus\-exitcode\fR is a utility that examines the STDOUT of a job to determine if the job failed, and renames the STDOUT and STDERR files of a job to preserve them in case the job is retried\&. .sp Pegasus uses \fBpegasus\-exitcode\fR as the DAGMan postscript for all jobs submitted via Globus GRAM\&. This tool exists as a workaround to a known problem with Globus where the exitcodes of GRAM jobs are not returned\&. This is a problem because Pegasus uses the exitcode of a job to determine if the job failed or not\&. .sp In order to get around the exitcode problem, Pegasus wraps all GRAM jobs with Kickstart, which records the exitcode of the job in an XML invocation record, which it writes to the job\(cqs STDOUT\&. The STDOUT is transferred from the execution host back to the submit host when the job terminates\&. After the job terminates, DAGMan runs the job\(cqs postscript, which Pegasus sets to be \fBpegasus\-exitcode\fR\&. \fBpegasus\-exitcode\fR looks at the invocation record generated by kickstart to see if the job succeeded or failed\&. If the invocation record indicates a failure, then \fBpegasus\-exitcode\fR returns a non\-zero result, which indicates to DAGMan that the job has failed\&. If the invocation record indicates that the job succeeded, then \fBpegasus\-exitcode\fR returns 0, which tells DAGMan that the job succeeded\&. .sp \fBpegasus\-exitcode\fR performs several checks to determine whether a job failed or not\&. These checks include: .sp .RS 4 .ie n \{\ \h'-04' 1.\h'+01'\c .\} .el \{\ .sp -1 .IP " 1." 4.2 .\} Is STDOUT empty? If it is empty, then the job failed\&. .RE .sp .RS 4 .ie n \{\ \h'-04' 2.\h'+01'\c .\} .el \{\ .sp -1 .IP " 2." 4.2 .\} Are there any tags with a non\-zero value? If there are, then the job failed\&. Note that, if this is a clustered job, there could be multiple tags, one for each task\&. If any of them are non\-zero, then the job failed\&. .RE .sp .RS 4 .ie n \{\ \h'-04' 3.\h'+01'\c .\} .el \{\ .sp -1 .IP " 3." 4.2 .\} Is there at least one tag with a zero value? There must be at least one successful invocation or the job has failed\&. .RE .sp In addition, \fBpegasus\-exitcode\fR allows the caller to specify the exitcode returned by Condor using the \fB\-\-return\fR argument\&. This can be passed to \fBpegasus\-exitcode\fR in a DAGMan post script by using the $RETURN variable\&. If this value is non\-zero, then \fBpegasus\-exitcode\fR returns a non\-zero result before performing any other checks\&. For GRAM jobs, the value of $RETURN will always be 0 regardless of whether the job failed or not\&. .sp Also, \fBpegasus\-exitcode\fR allows the caller to specify the number of successful tasks it should see using the \fB\-\-tasks\fR argument\&. If \fBpegasus\-exitcode\fR does not see N successful tasks, where N is set by \fB\-\-tasks\fR, then it will return a non\-zero result\&. The default value is 1\&. This can be used to detect failures in clustered jobs where, for any number of reasons, invocation records do not get generated for all the tasks in the clustered job\&. .sp In addition to checking the success/failure of a job, \fBpegasus\-exitcode\fR also renames the STDOUT and STDERR files of the job so that if the job is retried, the STDOUT and STDERR of the previous run are not lost\&. It does this by appending a sequence number to the end of the files\&. For example, if the STDOUT file is called "job\&.out", then the first time the job is run \fBpegasus\-exitcode\fR will rename the file "job\&.out\&.000"\&. If the job is run again, then \fBpegasus\-exitcode\fR sees that "job\&.out\&.000" already exists and renames the file "job\&.out\&.001"\&. It will continue to rename the file by incrementing the sequence number every time the job is executed\&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints a usage summary with all the available command\-line options\&. .RE .PP \fB\-t\fR \fIn\fR, \fB\-\-tasks\fR \fIn\fR .RS 4 Number of tasks expected\&. If less than \fIn\fR tasks succeeded, then \fBpegasus\-exitcode\fR will fail with a non\-zero return value\&. This is used in cases where we may not get a Kickstart invocation record for some tasks\&. Normally Seqexec will detect failed Kickstart invocations and fail accordingly\&. .RE .PP \fB\-r\fR \fIrv\fR, \fB\-\-return\fR \fIrv\fR .RS 4 Return value reported by DAGMan\&. This can be specified in the DAG using the $RETURN variable\&. If this is non\-zero, then \fBpegasus\-exitcode\fR immediately fails with a non\-zero return value itself\&. .RE .PP \fB\-n\fR, \fB\-\-no\-rename\fR .RS 4 Don\(cqt rename \fIjob\&.out\fR and \fIjob\&.err\fR to \fI\&.out\&.XXX\fR and \fI\&.err\&.XXX\fR\&. This option is used primarily for testing\&. .RE .SH "AUTHORS" .sp Gideon Juve .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-config.10000644000175000017500000001231111757531137021537 0ustar ryngerynge'\" t .\" Title: pegasus-config .\" Author: [see the "Author" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/29/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-CONFIG" "1" "02/29/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-config \- The authority for where parts of the Pegasus system exists on the filesystem\&. pegasus\-config can be used to find libraries such as the DAX generators\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-config\fR [\fB\-h\fR] [\fB\-\-help\fR] [\fB\-V\fR] [\fB\-\-version\fR] [\fB\-\-noeoln\fR] [\fB\-\-perl\-dump\fR] [\fB\-\-perl\-hash\fR] [\fB\-\-python\-dump\fR] [\fB\-\-sh\-dump\fR] [\fB\-\-bin\fR] [\fB\-\-conf\fR] [\fB\-\-java\fR] [\fB\-\-perl\fR] [\fB\-\-python\fR] [\fB\-\-python\-externals\fR] [\fB\-\-schema\fR] [\fB\-\-classpath\fR] [\fB\-\-local\-site\fR] [\fB\-\-full\-local\fR] .fi .SH "DESCRIPTION" .sp \fBpegasus\-config\fR is used to find locations of Pegasus system components\&. The tool is used internally in Pegasus and by users who need to find paths for DAX generator libraries and schemas\&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints help and exits\&. .RE .PP \fB\-V\fR, \fB\-\-version\fR .RS 4 Prints Pegasus version information .RE .PP \fB\-\-perl\-dump\fR .RS 4 Dumps all settings in perl format as separate variables\&. .RE .PP \fB\-\-perl\-hash\fR .RS 4 Dumps all settings in perl format as single perl hash\&. .RE .PP \fB\-\-python\-dump\fR .RS 4 Dumps all settings in python format\&. .RE .PP \fB\-\-sh\-dump\fR .RS 4 Dumps all settings in shell format\&. .RE .PP \fB\-\-bin\fR .RS 4 Print the directory containing Pegasus binaries\&. .RE .PP \fB\-\-conf\fR .RS 4 Print the directory containing configuration files\&. .RE .PP \fB\-\-java\fR .RS 4 Print the directory containing the jars\&. .RE .PP \fB\-\-perl\fR .RS 4 Print the directory to include into your PERL5LIB\&. .RE .PP \fB\-\-python\fR .RS 4 Print the directory to include into your PYTHONLIB\&. .RE .PP \fB\-\-python\-externals\fR .RS 4 Print the directory to the external Python libraries\&. .RE .PP \fB\-\-schema\fR .RS 4 Print the directory containing schemas\&. .RE .PP \fB\-\-classpath\fR .RS 4 Builds a classpath containing the Pegasus jars\&. .RE .PP \fB\-\-noeoln\fR .RS 4 Do not produce a end\-of\-line after output\&. This is useful when being called from non\-shell backticks in scripts\&. However, order is important for this option: If you intend to use it, specify it first\&. .RE .PP \fB\-\-local\-site\fR [d] .RS 4 Create a site catalog entry for site "local"\&. This is only an XML snippet without root element nor XML headers\&. The optional argument "d" points to the mount point to use\&. If not specified, defaults to the user\(cqs $HOME directory\&. .RE .PP \fB\-\-full\-local\fR [d] .RS 4 Create a complete site catalog with only site "local"\&. The an XML snippet without root element nor XML headers\&. The optional argument "d" points to the mount point to use\&. If not specified, defaults to the user\(cqs $HOME directory\&. .RE .SH "EXAMPLE" .sp To set the PYTHONPATH variable in your shell for using the Python DAX API: .sp .if n \{\ .RS 4 .\} .nf export PYTHONPATH=`pegasus\-config \-\-python` .fi .if n \{\ .RE .\} .sp To set the same path inside Python: .sp .if n \{\ .RS 4 .\} .nf config = subprocess\&.Popen("pegasus\-config \-\-python\-dump", stdout=subprocess\&.PIPE, shell=True)\&.communicate()[0] exec config .fi .if n \{\ .RE .\} .sp To set the PERL5LIB variable in your shell for using the Perl DAX API: .sp .if n \{\ .RS 4 .\} .nf export PERL5LIB=`pegasus\-config \-\-perl` .fi .if n \{\ .RE .\} .sp To set the same path inside Perl: .sp .if n \{\ .RS 4 .\} .nf eval `pegasus\-config \-\-perl\-dump`; die("Unable to eval pegasus\-config output: $@") if $@; .fi .if n \{\ .RE .\} .sp will set variables a number of lexically local\-scoped \fBmy\fR variables with prefix "pegasus_" and expand Perl\(cqs search path for this script\&. .sp Alternatively, you can fail early and collect all Pegasus\-related variables into a single global %pegasus variable for convenience: .sp .if n \{\ .RS 4 .\} .nf BEGIN { eval `pegasus\-config \-\-perl\-hash`; die("Unable to eval pegasus\-config output: $@") if $@; } .fi .if n \{\ .RE .\} .SH "AUTHOR" .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-monitord.10000644000175000017500000003761411757531137022142 0ustar ryngerynge'\" t .\" Title: pegasus-monitord .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-MONITORD" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-monitord \- tracks a workflow progress, mining information .SH "SYNOPSIS" .sp .nf \fBpegasus\-monitord\fR [\fB\-\-help\fR|\fB\-help\fR] [\fB\-\-verbose\fR|\fB\-v\fR] [\fB\-\-adjust\fR|\fB\-a\fR \fIi\fR] [\fB\-\-foreground\fR|\fB\-N\fR] [\fB\-\-no\-daemon\fR|\fB\-n\fR] [\fB\-\-job\fR|\fB\-j\fR \fIjobstate\&.log file\fR] [\fB\-\-log\fR|\fB\-l\fR \fIlogfile\fR] [\fB\-\-conf\fR \fIproperties file\fR] [\fB\-\-no\-recursive\fR] [\fB\-\-no\-database\fR | \fB\-\-no\-events\fR] [\fB\-\-replay\fR|\fB\-r\fR] [\fB\-\-no\-notifications\fR] [\fB\-\-notifications\-max\fR \fImax_notifications\fR] [\fB\-\-notifications\-timeout\fR \fItimeout\fR] [\fB\-\-sim\fR|\fB\-s\fR \fImillisleep\fR] [\fB\-\-db\-stats\fR] [\fB\-\-skip\-stdout\fR] [\fB\-\-force\fR|\fB\-f\fR] [\fB\-\-socket\fR] [\fB\-\-output\-dir\fR | \fB\-o\fR \fIdir\fR] [\fB\-\-dest\fR|\fB\-d\fR \fIPATH\fR or \fIURL\fR] [\fB\-\-encoding\fR|\fB\-e\fR \fIbp\fR | \fIbson\fR] \fIDAGMan output file\fR .fi .SH "DESCRIPTION" .sp This program follows a workflow, parsing the output of DAGMAN\(cqs dagman\&.out file\&. In addition to generating the jobstate\&.log file, \fBpegasus\-monitord\fR can also be used mine information from the workflow dag file and jobs\*(Aq submit and output files, and either populate a database or write a NetLogger events file with that information\&. \fBpegasus\-monitord\fR can also perform notifications when tracking a workflow\(cqs progress in real\-time\&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints a usage summary with all the available command\-line options\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Sets the log level for \fBpegasus\-monitord\fR\&. If omitted, the default \fIlevel\fR will be set to \fIWARNING\fR\&. When this option is given, the log level is changed to \fIINFO\fR\&. If this option is repeated, the log level will be changed to \fIDEBUG\fR\&. .sp The log level in \fBpegasus\-monitord\fR can also be adjusted interactively, by sending the \fIUSR1\fR and \fIUSR2\fR signals to the process, respectively for incrementing and decrementing the log level\&. .RE .PP \fB\-a\fR \fIi\fR, \fB\-\-adjust\fR \fIi\fR .RS 4 For adjusting time zone differences by \fIi\fR seconds, default is 0\&. .RE .PP \fB\-N\fR, \fB\-\-foreground\fR .RS 4 Do not daemonize \fBpegasus\-monitord\fR, go through the motions as if (Condor)\&. .RE .PP \fB\-n\fR, \fB\-\-no\-daemon\fR .RS 4 Do not daemonize \fBpegasus\-monitord\fR, keep it in the foreground (for debugging)\&. .RE .PP \fB\-j\fR \fIjobstate\&.log file\fR, \fB\-\-job\fR \fIjobstate\&.log file\fR .RS 4 Alternative location for the \fIjobstate\&.log\fR file\&. The default is to write a \fIjobstate\&.log\fR in the workflow directory\&. An absolute file name should only be used if the workflow does not have any sub\-workflows, as each sub\-workflow will generate its own \fIjobstate\&.log\fR file\&. If an alternative, non\-absolute, filename is given with this option, \fBpegasus\-monitord\fR will create one file in each workflow (and sub\-workflow) directory with the filename provided by the user with this option\&. If an absolute filename is provided and sub\-workflows are found, a warning message will be printed and \fBpegasus\-monitord\fR will not track any sub\-workflows\&. .RE .PP \fB\-\-log\fR \fIlogfile\fR, \fB\-\-log\-file\fR \fIlogfile\fR .RS 4 Specifies an alternative \fIlogfile\fR to use instead of the \fImonitord\&.log\fR file in the main workflow directory\&. Differently from the \fIjobstate\&.log\fR file above, \fBpegasus\-monitord\fR only generates one \fIlogfile\fR per execution (and not one per workflow and sub\-workflow it tracks)\&. .RE .PP \fB\-\-conf\fR \fIproperties_file\fR .RS 4 is an alternative file containing properties in the \fIkey=value\fR format, and allows users to override values read from the \fIbraindump\&.txt\fR file\&. This option has precedence over the properties file specified in the \fIbraindump\&.txt\fR file\&. Please note that these properties will apply not only to the main workflow, but also to all sub\-workflows found\&. .RE .PP \fB\-\-no\-recursive\fR .RS 4 This options disables \fBpegasus\-monitord\fR to automatically follow any sub\-workflows that are found\&. .RE .PP \fB\-\-nodatabase\fR, \fB\-\-no\-database\fR, \fB\-\-no\-events\fR .RS 4 Turns off generating events (when this option is given, \fBpegasus\-monitord\fR will only generate the jobstate\&.log file)\&. The default is to automatically log information to a SQLite database (see the \fB\-\-dest\fR option below for more details)\&. This option overrides any parameter given by the \fB\-\-dest\fR option\&. .RE .PP \fB\-r\fR, \fB\-\-replay\fR .RS 4 This option is used to replay the output of an already finished workflow\&. It should only be used after the workflow is finished (not necessarily successfully)\&. If a \fIjobstate\&.log\fR file is found, it will be rotated\&. However, when using a database, all previous references to that workflow (and all its sub\-workflows) will be erased from it\&. When outputing to a bp file, the file will be deleted\&. When running in replay mode, \fBpegasus\-monitord\fR will always run with the \fB\-\-no\-daemon\fR option, and any errors will be output directly to the terminal\&. Also, \fBpegasus\-monitord\fR will not process any notifications while in replay mode\&. .RE .PP \fB\-\-no\-notifications\fR .RS 4 This options disables notifications completely, making \fBpegasus\-monitord\fR ignore all the \&.notify files for all workflows it tracks\&. .RE .PP \fB\-\-notifications\-max\fR \fImax_notifications\fR .RS 4 This option sets the maximum number of concurrent notifications that \fBpegasus\-monitord\fR will start\&. When the \fImax_notifications\fR limit is reached, \fBpegasus\-monitord\fR will queue notifications and wait for a pending notification script to finish before starting a new one\&. If \fImax_notifications\fR is set to 0, notifications will be disabled\&. .RE .PP \fB\-\-notifications\-timeout\fR \fItimeout\fR .RS 4 Normally, \fBpegasus\-monitord\fR will start a notification script and wait indefinitely for it to finish\&. This option allows users to set up a maximum \fItimeout\fR that \fBpegasus\-monitord\fR will wait for a notification script to finish before terminating it\&. If notification scripts do not finish in a reasonable amount of time, it can cause other notification scripts to be queued due to the maximum number of concurrent scripts allowed by \fBpegasus\-monitord\fR\&. Additionally, until all notification scripts finish, \fBpegasus\-monitord\fR will not terminate\&. .RE .PP \fB\-s\fR \fImillisleep\fR, \fB\-\-sim\fR \fImillisleep\fR .RS 4 This option simulates delays between reads, by sleeping \fImillisleep\fR milliseconds\&. This option is mainly used by developers\&. .RE .PP \fB\-\-db\-stats\fR .RS 4 This option causes the database module to collect and print database statistics at the end of the execution\&. It has no effect if the \fB\-\-no\-database\fR option is given\&. .RE .PP \fB\-\-skip\-stdout\fR .RS 4 This option causes \fBpegasus\-monitord\fR not to populate jobs\*(Aq stdout and stderr into the BP file or the Stampede database\&. It should be used to avoid increasing the database size substantially in cases where jobs are very verbose in their output\&. .RE .PP \fB\-f\fR, \fB\-\-force\fR .RS 4 This option causes \fBpegasus\-monitord\fR to skip checking for another instance of itself already running on the same workflow directory\&. The default behavior prevents two or more \fBpegasus\-monitord\fR instances from starting and running simultaneously (which would cause the bp file and database to be left in an unstable state)\&. This option should noly be used when the user knows the previous instance of \fBpegasus\-monitord\fR is \fBNOT\fR running anymore\&. .RE .PP \fB\-\-socket\fR .RS 4 This option causes \fBpegasus\-monitord\fR to start a socket interface that can be used for advanced debugging\&. The port number for connecting to \fBpegasus\-monitord\fR can be found in the \fImonitord\&.sock\fR file in the workflow directory (the file is deleted when \fBpegasus\-monitord\fR finishes)\&. If not already started, the socket interface is also created when \fBpegasus\-monitord\fR receives a \fIUSR1\fR signal\&. .RE .PP \fB\-o\fR \fIdir\fR, \fB\-\-ouput\-dir\fR \fIdir\fR .RS 4 When this option is given, \fBpegasus\-monitord\fR will create all its output files in the directory specified by \fIdir\&.\fR This option is useful for allowing a user to debug a workflow in a directory the user does not have write permissions\&. In this case, all files generated by \fBpegasus\-monitord\fR will have the workflow \fIwf_uuid\fR as a prefix so that files from multiple sub\-workflows can be placed in the same directory\&. This option is mainly used by \fBpegasus\-analyzer\fR\&. It is important to note that the location for the output BP file or database is not changed by this option and should be set via the \fB\-\-dest\fR option\&. .RE .PP \fB\-d\fR \fIURL\fR \fIparams\fR, \fB\-\-dest\fR \fIURL\fR \fIparams\fR .RS 4 This option allows users to specify the destination for the log events generated by \fBpegasus\-monitord\fR\&. If this option is omitted, \fBpegasus\-monitord\fR will create a SQLite database in the workflow\(cqs run directory with the same name as the workflow, but with a \fI\&.stampede\&.db\fR prefix\&. For an \fIempty\fR scheme, \fIparams\fR are a file path with \fB\-\fR meaning standard output\&. For a \fIx\-tcp\fR scheme, \fIparams\fR are \fITCP_host[:port=14380]\fR\&. For a database scheme, \fIparams\fR are a \fISQLAlchemy engine URL\fR with a database connection string that can be used to specify different database engines\&. Please see the examples section below for more information on how to use this option\&. Note that when using a database engine other than \fBsqlite\fR, the necessary Python database drivers will need to be installed\&. .RE .PP \fB\-e\fR \fIencoding\fR, \fB\-\-encoding\fR \fIencoding\fR .RS 4 This option specifies how to encode log events\&. The two available possibilities are \fIbp\fR and \fIbson\fR\&. If this option is not specified, events will be generated in the \fIbp\fR format\&. .RE .PP \fIDAGMan_output_file\fR .RS 4 The \fIDAGMan_output_file\fR is the only requires command\-line argument in \fBpegasus\-monitord\fR and must have the \fI\&.dag\&.dagman\&.out\fR extension\&. .RE .SH "RETURN VALUE" .sp If the plan could be constructed, \fBpegasus\-monitord\fR returns with an exit code of 0\&. However, in case of error, a non\-zero exit code indicates problems\&. In that case, the \fIlogfile\fR should contain additional information about the error condition\&. .SH "ENVIRONMENT VARIABLES" .sp \fBpegasus\-monitord\fR does not require that any environmental variables be set\&. It locates its required Python modules based on its own location, and therefore should not be moved outside of Pegasus\*(Aq bin directory\&. .SH "EXAMPLES" .sp Usually, \fBpegasus\-monitord\fR is invoked automatically by \fBpegasus\-run\fR and tracks the workflow progress in real\-time, producing the \fIjobstate\&.log\fR file and a corresponding SQLite database\&. When a workflow fails, and is re\-submitted with a rescue DAG, \fBpegasus\-monitord\fR will automatically pick up from where it left previously and continue the \fIjobstate\&.log\fR file and the database\&. .sp If users need to create the \fIjobstate\&.log\fR file after a workflow is already finished, the \fB\-\-replay | \-r\fR option should be used when running \fBpegasus\-monitord\fR manually\&. For example: .sp .if n \{\ .RS 4 .\} .nf $ pegasus_monitord \-r diamond\-0\&.dag\&.dagman\&.out .fi .if n \{\ .RE .\} .sp will launch \fBpegasus\-monitord\fR in replay mode\&. In this case, if a \fIjobstate\&.log\fR file already exists, it will be rotated and a new file will be created\&. If a \fIdiamond\-0\&.stampede\&.db\fR SQLite database already exists, \fBpegasus\-monitord\fR will purge all references to the workflow id specified in the \fIbraindump\&.txt\fR file, including all sub\-workflows associated with that workflow id\&. .sp .if n \{\ .RS 4 .\} .nf $ pegasus_monitord \-r \-\-no\-database diamond\-0\&.dag\&.dagman\&.out .fi .if n \{\ .RE .\} .sp will do the same thing, but without generating any log events\&. .sp .if n \{\ .RS 4 .\} .nf $ pegasus_monitord \-r \-\-dest `pwd`/diamond\-0\&.bp diamond\-0\&.dag\&.dagman\&.out .fi .if n \{\ .RE .\} .sp will create the file \fIdiamond\-0\&.bp\fR in the current directory, containing NetLogger events with all the workflow data\&. This is in addition to the \fIjobstate\&.log\fR file\&. .sp For using a database, users should provide a database connection string in the format of: .sp .if n \{\ .RS 4 .\} .nf dialect://username:password@host:port/database .fi .if n \{\ .RE .\} .sp Where \fIdialect\fR is the name of the underlying driver (\fImysql\fR, \fIsqlite\fR, \fIoracle\fR, \fIpostgres\fR) and \fIdatabase\fR is the name of the database running on the server at the \fIhost\fR computer\&. .sp If users want to use a different \fISQLite\fR database, \fBpegasus\-monitord\fR requires them to specify the absolute path of the alternate file\&. For example: .sp .if n \{\ .RS 4 .\} .nf $ pegasus_monitord \-r \-\-dest sqlite:////home/user/diamond_database\&.db diamond\-0\&.dag\&.dagman\&.out .fi .if n \{\ .RE .\} .sp Here are docs with details for all of the supported drivers: \m[blue]\fBhttp://www\&.sqlalchemy\&.org/docs/05/reference/dialects/index\&.html\fR\m[] .sp Additional per\-database options that work into the connection strings are outlined there\&. .sp It is important to note that one will need to have the appropriate db interface library installed\&. Which is to say, \fISQLAlchemy\fR is a wrapper around the mysql interface library (for instance), it does not provide a \fIMySQL\fR driver itself\&. The \fBPegasus\fR distribution includes both \fBSQLAlchemy\fR and the \fBSQLite\fR Python driver\&. .sp As a final note, it is important to mention that unlike when using \fISQLite\fR databases, using \fBSQLAlchemy\fR with other database servers, e\&.g\&. \fIMySQL\fR or \fIPostgres\fR, the target database needs to exist\&. So, if a user wanted to connect to: .sp .if n \{\ .RS 4 .\} .nf mysql://pegasus\-user:supersecret@localhost:localport/diamond .fi .if n \{\ .RE .\} .sp it would need to first connect to the server at \fIlocalhost\fR and issue the appropriate create database command before running \fBpegasus\-monitord\fR as \fBSQLAlchemy\fR will take care of creating the tables and indexes if they do not already exist\&. .SH "SEE ALSO" .sp pegasus\-run(1) .SH "AUTHORS" .sp Gaurang Mehta .sp Fabio Silva .sp Karan Vahi .sp Jens\-S\&. Vöckler .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-status.10000644000175000017500000002374411757531137021631 0ustar ryngerynge'\" t .\" Title: pegasus-status .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-STATUS" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-status \- Pegasus workflow\- and run\-time status .SH "SYNOPSIS" .sp .nf \fBpegasus\-status\fR [\fB\-h\fR|\fB\-\-help\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fB\-v\fR|\fB\-\-verbose\fR] [\fB\-d\fR|\fB\-\-debug\fR] [\fB\-w\fR|\fB\-\-watch\fR [\fIs\fR]] [\fB\-L\fR|\fB\-\-[no]legend\fR] [\fB\-c\fR|\fB\-\-[no]color\fR] [\fB\-U\fR|\fB\-\-[no]utf8\fR] [\fB\-Q\fR|\fB\-\-[no]queue\fR] [\fB\-i\fR|\fB\-\-[no]idle\fR] [\fB\-\-[no]held\fR] [\fB\-\-[no]heavy\fR] [\fB\-S\fR|\fB\-\-[no]success\fR] [\fB\-j\fR|\fB\-\-jobtype\fR \fIjt\fR] [\fB\-s\fR|\fB\-\-site\fR \fIsid\fR] [\fB\-u\fR|\fB\-\-user\fR \fIname\fR] { [\fB\-l\fR|\fB\-\-long\fR] | [\fB\-r\fR|\fB\-\-rows\fR] } [\fIrundir\fR] .fi .SH "DESCRIPTION" .sp \fBpegasus\-status\fR shows the current state of the Condor Q and a workflow, depending on settings\&. If no valid run directory could be determined, including the current directory, \fBpegasus\-status\fR will show all jobs of the current user and no workflows\&. If a run directory was specified, or the current directory is a valid run directory, status about the workflow will also be shown\&. .sp Many options will modify the behavior of this program, not withstanding a proper UTF\-8 capable terminal, watch mode, the presence of jobs in the queue, progress in the workflow directory, etc\&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints a concise help and exits\&. .RE .PP \fB\-V\fR, \fB\-\-version\fR .RS 4 Prints the version information and exits\&. .RE .PP \fB\-w\fR [\fIsec\fR], \fB\-\-watch\fR [\fIsec\fR] .RS 4 This option enables the \fIwatch mode\fR\&. In watch mode, the program repeatedly polls the status sources and shows them in an updating window\&. The optional argument \fIsec\fR to this option determines how often these sources are polled\&. .sp We \fIstrongly\fR recommend to set this interval not too low, as frequent polling will degrade the scheduler performance and increase the host load\&. In watch mode, the terminal size is the limiting factor, and parts of the output may be truncated to fit it onto the given terminal\&. .sp Watch mode is disabled by default\&. The \fIsec\fR argument defaults to 60 seconds\&. .RE .PP \fB\-L\fR, \fB\-\-legend\fR, \fB\-\-nolegend\fR .RS 4 This option shows a legend explaining the columns in the output, or turns off legends\&. .sp By default, legends are turned off to save terminal real estate\&. .RE .PP \fB\-c\fR, \fB\-\-color\fR, \fB\-\-nocolor\fR .RS 4 This option turns on (or off) ANSI color escape sequences in the output\&. The single letter option can only switch on colors\&. .sp By default, colors are turned off, as they will not display well on a terminal with black background\&. .RE .PP \fB\-U\fR, \fB\-\-utf8\fR, \fB\-\-noutf8\fR .RS 4 This option turns on (or off) the output of Unicode box drawing characters as UTF\-8 encoded sequences\&. The single option can only turn on box drawing characters\&. .sp The defaults for this setting depend on the \fILANG\fR environment variable\&. If the variable contains a value ending in something indicating UTF\-8 capabilities, the option is turned on by default\&. It is off otherwise\&. .RE .PP \fB\-Q\fR, \fB\-\-queue\fR, \fB\-\-noqueue\fR .RS 4 This option turns on (or off) the output from parsing Condor Q\&. .sp By default, Condor Q will be parsed for jobs of the current user\&. If a workflow run directory is specified, it will furthermore be limited to jobs only belonging to the workflow\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 This option increases the expert level, showing more information about the condor_q state\&. Being an incremental option, two increases are supported\&. .sp Additionally, the signals \fISIGUSR1\fR and \fISIGUSR2\fR will increase and decrease the expert level respectively during run\-time\&. .sp By default, the simplest queue view is enabled\&. .RE .PP \fB\-d\fR, \fB\-\-debug\fR .RS 4 This is an internal debugging tool and should not be used outside the development team\&. As incremental option, it will show Pegasus\-specific ClassAd tuples for each job, more in the second level\&. .sp By default, debug mode is off\&. .RE .PP \fB\-u\fR \fIname\fR, \fB\-\-user\fR \fIname\fR .RS 4 This option permits to query the queue for a different user than the current one\&. This may be of interest, if you are debugging the workflow of another user\&. .sp By default, the current user is assumed\&. .RE .PP \fB\-i\fR, \fB\-\-idle\fR, \fB\-\-noidle\fR .RS 4 With this option, jobs in Condor state \fIidle\fR are omitted from the queue output\&. .sp By default, \fIidle\fR jobs are shown\&. .RE .PP \fB\-\-held\fR, \fB\-\-noheld\fR .RS 4 This option enables or disabled showing of the reason a job entered Condor\(cqs \fIheld\fR state\&. The reason will somewhat destroy the screen layout\&. .sp By default, the reason is shown\&. .RE .PP \fB\-\-heavy\fR, \fB\-\-noheavy\fR .RS 4 If the terminal is UTF\-8 capable, and output is to a terminal, this option decides whether to use heavyweight or lightweight line drawing characters\&. .sp By default, heavy lines connect the jobs to workflows\&. .RE .PP \fB\-j\fR \fIjt\fR, \fB\-\-jobtype\fR \fIjt\fR .RS 4 This option filters the Condor jobs shown only to the Pegasus jobtypes given as argument or arguments to this option\&. It is a multi\-option, and may be specified multiple times, and may use comma\-separated lists\&. Use this option with an argument \fIhelp\fR to see all valid and recognized jobtypes\&. .sp By default, all Pegasus jobtypes are shown\&. .RE .PP \fB\-s\fR \fIsite\fR, \fB\-\-site\fR \fIsite\fR .RS 4 This option limits the Condor jobs shown to only those pertaining to the (remote) site \fIsite\fR\&. This is an multi\-option, and may be specified multiple times, and may use comma\-separated lists\&. .sp By default, all sites are shown\&. .RE .PP \fB\-l\fR, \fB\-\-long\fR .RS 4 This option will show one line per sub\-DAG, including one line for the workflow\&. If there is only a single DAG pertaining to the \fIrundir\fR, only total will be shown\&. .sp This option is mutually exclusive with the \fB\-\-rows\fR option\&. If both are specified, the \fB\-\-long\fR option takes precedence\&. .sp By default, only DAG totals (sums) are shown\&. .RE .PP \fB\-r\fR, \fB\-\-rows\fR, \fB\-\-norows\fR .RS 4 This option is shows the workflow summary statistics in rows instead of columns\&. This option is useful for sending the statistics in email and later viewing them in a proportional font\&. .sp This option is mutually exclusive with the \fB\-\-long\fR option\&. If both are specified, the \fB\-\-long\fR option takes precedence\&. .sp By default, the summary is shown in columns\&. .RE .PP \fB\-S\fR, \fB\-\-success\fR, \fB\-\-nosuccess\fR .RS 4 This option modifies the previous \fB\-\-long\fR option\&. It will omit (or show) fully successful sub\-DAGs from the output\&. .sp By default, all DAGs are shown\&. .RE .PP \fIrundir\fR .RS 4 This option show statistics about the given DAG that runs in \fIrundir\fR\&. To gather proper statistics, \fBpegasus\-status\fR needs to traverse the directory and all sub\-directories\&. This can become an expensive operation on shared filesystems\&. .sp By default, the \fIrundir\fR is assumed to be the current directory\&. If the current directory is not a valid \fIrundir\fR, no DAG statistics will be shown\&. .RE .SH "RETURN VALUE" .sp \fBpegasus\-status\fR will typically return success in regular mode, and the termination signal in watch mode\&. Abnormal behavior will result in a non\-zero exit code\&. .SH "EXAMPLE" .PP \fBpegasus\-status\fR .RS 4 This invocation will parse the Condor Q for the current user and show all her jobs\&. Additionally, if the current directory is a valid Pegasus workflow directory, totals about the DAG in that directory are displayed\&. .RE .PP \fBpegasus\-status \-l rundir\fR .RS 4 As above, but providing a specific Pegasus workflow directory in argument \fIrundir\fR and requesting to itemize sub\-DAGs\&. .RE .PP \fBpegasus\-status \-j help\fR .RS 4 This option will show all permissible job types and exit\&. .RE .PP \fBpegasus\-status \-vvw 300 \-Ll\fR .RS 4 This invocation will parse the queue, print it in high\-expert mode, show legends, itemize DAG statistics of the current working directory, and redraw the terminal every five minutes with updated statistics\&. .RE .SH "RESTRICTIONS" .sp Currently only supports a single (optional) run directory\&. If you want to watch multiple run directories, I suggest to open multiple terminals and watch them separately\&. If that is not an option, or deemed too expensive, you can ask \fIpegasus\-support at isi dot edu\fR to extend the program\&. .SH "SEE ALSO" .sp condor_q(1), pegasus\-statistics(1) .SH "AUTHORS" .sp Jens\-S\&. Vöckler .sp Gaurang Mehta .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu/\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-dax-validator.10000644000175000017500000000721511757531137023040 0ustar ryngerynge'\" t .\" Title: pegasus-dax-validator .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-DAX\-VALIDA" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-dax-validator \- determines if a given DAX file is valid\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-dax\-validator\fR \fIdaxfile\fR [\fIverbose\fR] .fi .SH "DESCRIPTION" .sp The \fBpegasus\-dax\-validator\fR is a simple application that determines, if a given DAX file is valid XML\&. For this, it parses the file with as many XML validity checks that the Apache Xerces XML parser framework supports\&. .SH "OPTIONS" .PP \fIdaxfile\fR .RS 4 The location of the file containing the DAX\&. .RE .PP \fIverbose\fR .RS 4 If any kind of second argument was specified, not limited to the string \fIverbose\fR, the verbose output mode is switched on\&. .RE .SH "RETURN VALUE" .sp If the DAX was parsed successfully, or only \fIwarning\(cqs were issued, the exit code is 0\&. Any \*(Aqerror\fR or \fIfatal error\fR will result in an exit code of 1\&. .sp Additionally, a summary statistics with counts of warnings, errors, and fatal errors will be displayed\&. .SH "EXAMPLE" .sp The following shows the parsing of a DAX file that uses the wrong kind of value for certain enumerations\&. The output shows the errors with the respective line number and column number of the input DAX file, so that one can find and fix them more easily\&. (The lines in the example were broken to fit the manpage format\&.) .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-dax\-validator bd\&.dax ERROR in line 14, col 110: cvc\-enumeration\-valid: Value \*(Aqi386\*(Aq is not facet\-valid with respect to enumeration \*(Aq[x86, x86_64, ppc, ppc_64, ia64, sparcv7, sparcv9, amd64]\*(Aq\&. It must be a value from the enumeration\&. ERROR in line 14, col 110: cvc\-attribute\&.3: The value \*(Aqi386\*(Aq of attribute \*(Aqarch\*(Aq on element \*(Aqexecutable\*(Aq is not valid with respect to its type, \*(AqArchitectureType\*(Aq\&. ERROR in line 14, col 110: cvc\-enumeration\-valid: Value \*(Aqdarwin\*(Aq is not facet\-valid with respect to enumeration \*(Aq[aix, sunos, linux, macosx, windows]\*(Aq\&. It must be a value from the enumeration\&. ERROR in line 14, col 110: cvc\-attribute\&.3: The value \*(Aqdarwin\*(Aq of attribute \*(Aqos\*(Aq on element \*(Aqexecutable\*(Aq is not valid with respect to its type, \*(AqOSType\*(Aq\&. 0 warnings, 4 errors, and 0 fatal errors detected\&. .fi .if n \{\ .RE .\} .SH "SEE ALSO" .sp Apache Xerces\-J \m[blue]\fBhttp://xerces\&.apache\&.org/xerces2\-j/\fR\m[] .SH "AUTHORS" .sp Jens\-S\&. Vöckler .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu/\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-invoke.10000644000175000017500000001134111757531137021567 0ustar ryngerynge'\" t .\" Title: pegasus-invoke .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-INVOKE" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-invoke \- invokes a command from a file .SH "SYNOPSIS" .sp .nf \fBpegasus\-invoke\fR ( \fBapp\fR | \fB@fn\fR ) [ \fBarg | *@fn\fR [\&.\&.]] .fi .SH "DESCRIPTION" .sp The \fBpegasus\-invoke\fR tool invokes a single application with as many arguments as your Unix permits (128k characters for Linux)\&. Arguments are come from two places, either the command\-line as regular arguments, or from a special file, which contains one argument per line\&. .sp The \fBpegasus\-invoke\fR tool became necessary to work around the 4k argument length limit in Condor\&. It also permits to use arguments inside argument files without worry about shell, Condor or Globus escape necessities\&. All argument file contents are passed as is, one line per argument entry\&. .SH "ARGUMENTS" .PP \fB\-d\fR .RS 4 This option increases the debug level\&. Currently, only debugging or no debugging is distinguished\&. Debug message are generated on \fIstdout\fR \&. By default, debugging is disabled\&. .RE .PP \fB\-h\fR .RS 4 This option prints the help message and exits the program\&. .RE .PP \fB\-\-\fR .RS 4 This option stops any option processing\&. It may only be necessary, if the application is stated on the command\-line, and starts with a hyphen itself\&.The first argument must either be the application to run as fully\-specified location (either absolute, or relative to current wd), or a file containing one argument per line\&. The \fIPATH\fR environment variables is \fBnot\fR used to locate an application\&. Subsequent arguments may either be specified explicitely on the commandline\&. Any argument that starts with an at (@) sign is taken to introduce a filename, which contains one argument per line\&. The textual file may contain long arguments and filenames\&. However, Unices still impose limits on the maximum length of a directory name, and the maximum length of a file name\&. These lengths are not checked, because \fBpegasus\-invoke\fR is oblivious of the application (e\&.g\&. what argument is a filename, and what argument is a mere string resembling a filename)\&. .RE .SH "RETURN VALUE" .sp The \fBpegasus\-invoke\fR tool returns 127, if it was unable to find the application\&. It returns 126, if there was a problem parsing the file\&. All other exit status, including 126 and 127, come from the application\&. .SH "SEE ALSO" .sp \fBpegasus\-kickstart(1)\fR .SH "EXAMPLE" .sp .if n \{\ .RS 4 .\} .nf $ echo "/bin/date" > X $ echo "\-Isec" >> X $ pegasus\-invoke @X 2005\-11\-03T15:07:01\-0600 .fi .if n \{\ .RE .\} .sp Recursion is also possible\&. Please mind not to use circular inclusions\&. Also note how duplicating the initial at (@) sign will escape its meaning as inclusion symbol\&. .sp .if n \{\ .RS 4 .\} .nf $ cat test\&.3 This is test 3 $ cat test\&.2 /bin/echo @test\&.3 @@test\&.3 $ pegasus\-invoke @test\&.2 This is test 3 @test\&.3 .fi .if n \{\ .RE .\} .SH "RESTRICTIONS" .sp While the arguments themselves may contain files with arguments to parse, starting with an at (@) sign as before, the maximum recursion limit is 32 levels of inclusions\&. It is not possible (yet) to use \fIstdin\fR as source of inclusion\&. .SH "HISTORY" .sp As you may have noticed, \fBpegasus\-invoke\fR had the name \fBinvoke\fR in previous incantations\&. We are slowly moving to the new name to avoid clashes in a larger OS installation setting\&. However, there is no pertinent need to change the internal name, too, as no name clashes are expected\&. .SH "AUTHORS" .sp Mike Wilde .sp Jens\-S\&. Vöckler .sp Pegasus \fBhttp://pegasus\&.isi\&.edu/\fR pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-tc-client.10000644000175000017500000002736311757531137022171 0ustar ryngerynge'\" t .\" Title: pegasus-tc-client .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-TC\-CLIENT" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-tc-client \- A full featured generic client to handle adds, deletes and queries to the Transformation Catalog (TC)\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-tc\-client\fR [\fB\-D\fR\fIproperty=value\fR\&...] [\fI\-h\fR] [\fI\-v\fR] [\fI\-V\fR] \fIOPERATION\fR \fITRIGGERS\fR [\fIOPTIONS\fR] .fi .SH "DESCRIPTION" .sp The \fBpegasus\-tc\-client\fR command is a generic client that performs the three basic operation of adding, deleting and querying of any Transformation Catalog implemented to the TC API\&. The client implements all the operations supported by the TC API\&. It is up to the TC implementation whether they support all operations or modes\&. .sp The following 3 operations are supported by the \fBpegasus\-tc\-client\fR\&. One of these operations have to be specified to run the client\&. .PP \fBADD\fR .RS 4 This operation allows the client to add or update entries in the Transformation Catalog\&. Entries can be added one by one on the command line or in bulk by using the \fIBULK\fR Trigger and providing a file with the necessary entries\&. Also Profiles can be added to either the logical transformation or the physical transformation\&. .RE .PP \fBDELETE\fR .RS 4 This operation allows the client to delete entries from the Transformation Catalog\&. Entries can be deleted based on logical transformation, by resource, by transformation type as well as the transformation system information\&. Also Profiles associated with the logical or physical transformation can be deleted\&. .RE .PP \fBQUERY\fR .RS 4 This operation allows the client to query for entries from the Transformation Catalog\&. Queries can be made for printing all the contents of the Catalog or for specific entries, for all the logical transformations or resources etc\&. .RE .sp See the \fBTRIGGERS\fR and \fBVALID COMBINATIONS\fR section for more details\&. .SH "OPERATIONS" .sp To select one of the 3 operations\&. .PP \fB\-a, \-\-add\fR .RS 4 Perform addition operations on the TC\&. .RE .PP \fB\-d, \-\-delete\fR .RS 4 Perform delete operations on the TC\&. .RE .PP \fB\-q, \-\-query\fR .RS 4 Perform query operations on the TC\&. .RE .SH "TRIGGERS" .sp Triggers modify the behavior of an \fBOPERATION\fR\&. For example, if you want to perform a bulk operation you would use a \fIBULK\fR Trigger or if you want to perform an operation on a Logical Transformation then you would use the \fILFN\fR Trigger\&. .sp The following 7 Triggers are available\&. See the \fBVALID COMBINATIONS\fR section for the correct grouping and usage\&. .PP \fB\-B\fR .RS 4 Triggers a bulk operation\&. .RE .PP \fB\-L\fR .RS 4 Triggers an operation on a logical transformation\&. .RE .PP \fB\-P\fR .RS 4 Triggers an operation on a physical transformation .RE .PP \fB\-R\fR .RS 4 Triggers an operation on a resource\&. .RE .PP \fB\-E\fR .RS 4 Triggers an operation on a Profile\&. .RE .PP \fB\-T\fR .RS 4 Triggers an operation on a Type\&. .RE .PP \fB\-S\fR .RS 4 Triggers an operation on a System information\&. .RE .SH "OPTIONS" .sp The following options are applicable for all the operations\&. .PP \fB\-D\fR\fIproperty=value\fR .RS 4 The \-D options allows an experienced user to override certain properties which influence the program execution, among them the default location of the user\(cqs properties file and the PEGASUS home location\&. One may set several CLI properties by giving this option multiple times\&. The \fB\-D\fR option(s) must be the first option on the command line\&. A CLI property take precedence over the properties file property of the same key\&. .RE .PP \fB\-l, \-\-lfn\fR \fIlogical\fR .RS 4 The logical transformation to be added\&. The format is: \fBNAMESPACE::NAME:VERSION\fR\&. The name is always required, namespace and version are optional\&. .RE .PP \fB\-p, \-\-pfn\fR \fIphysical\fR .RS 4 The physical transformation to be added\&. For INSTALLED executables its a local file path, for all others its a url\&. .RE .PP \fB\-t, \-\-type\fR \fItype\fR .RS 4 The type of physical transformation\&. Valid values are: INSTALLED, STATIC_BINARY, DYNAMIC_BINARY, SCRIPT, SOURCE, PACMAN_PACKAGE\&. .RE .PP \fB\-r, \-\-resource\fR \fIresource\fR .RS 4 The resourceID where the transformation is located\&. .RE .PP \fB\-e, \-\-profile\fR \fIprofiles\fR .RS 4 The profiles for the transformation\&. Multiple profiles of same namespace can be added simultaneously by separating them with a comma \fB","\fR\&. Each profile section is written as \fBNAMESPACE::KEY=VALUE,KEY2=VALUE2\fR e\&.g\&. ENV::JAVA_HOME=/usr/bin/java2,PEGASUS_HOME=/usr/local/pegasus\&. To add multiple namespaces you need to repeat the \-e option for each namespace\&. e\&.g\&. \-e ENV::JAVA_HOME=/usr/bin/java \-e GLOBUS::JobType=MPI,COUNT=10 .RE .PP \fB\-s, \-\-system\fR \fIsysteminfo\fR .RS 4 The architecture, os, osversion and glibc if any for the executable\&. Each system info is written in the form \fBARCH::OS:OSVER:GLIBC\fR .RE .PP \fB\-v, \-\-verbose\fR .RS 4 Displays the output in verbose mode (Lots of Debugging info)\&. .RE .PP \fB\-V, \-\-version\fR .RS 4 Displays the Pegasus version\&. .RE .PP \fB\-h, \-\-help\fR .RS 4 Generates help .RE .SH "OTHER OPTIONS" .PP \fB\-o, \-\-oldformat\fR .RS 4 Generates the output in the old single line format .RE .PP \fB\-c, \-\-conf\fR .RS 4 path to property file .RE .SH "VALID COMBINATIONS" .sp The following are valid combinations of \fBOPERATIONS, TRIGGERS, OPTIONS\fR for the \fBpegasus\-tc\-client\fR\&. .SS "ADD" .PP \fBAdd TC Entry\fR .RS 4 \-a \-l \fIlfn\fR \-p \fIpfn\fR \-t \fItype\fR \-r \fIresource\fR \-s \fIsystem\fR [\-e \fIprofiles\fR\&...] .sp Adds a single entry into the transformation catalog\&. .RE .PP \fBAdd PFN Profile\fR .RS 4 \-a \-P \-E \-p \fIpfn\fR \-t \fItype\fR \-r \fIresource\fR \-e \fIprofiles\fR \&... .sp Adds profiles to a specified physical transformation on a given resource and of a given type\&. .RE .PP \fBAdd LFN Profile\fR .RS 4 \-a \-L \-E \-l \fIlfn\fR \-e \fIprofiles\fR \&... .sp Adds profiles to a specified logical transformation\&. .RE .PP \fBAdd Bulk Entries\fR .RS 4 \-a \-B \-f \fIfile\fR .sp Adds entries in bulk mode by supplying a file containing the entries\&. The format of the file contains 6 columns\&. E\&.g\&. .sp .if n \{\ .RS 4 .\} .nf #RESOURCE LFN PFN TYPE SYSINFO PROFILES # isi NS::NAME:VER /bin/date INSTALLED ARCH::OS:OSVERS:GLIBC NS::KEY=VALUE,KEY=VALUE;NS2::KEY=VALUE,KEY=VALUE .fi .if n \{\ .RE .\} .RE .SS "DELETE" .PP \fBDelete all TC\fR .RS 4 \-d \-BPRELST .sp Deletes the entire contents of the TC\&. .sp \fBWARNING : USE WITH CAUTION\&.\fR .RE .PP \fBDelete by LFN\fR .RS 4 \-d \-L \-l \fIlfn\fR [\-r \fIresource\fR] [\-t \fItype\fR] .sp Deletes entries from the TC for a particular logical transformation and additionally a resource and or type\&. .RE .PP \fBDelete by PFN\fR .RS 4 \-d \-P \-l \fIlfn\fR \-p \fIpfn\fR [\-r \fIresource\fR] [\-t \fItype\fR] .sp Deletes entries from the TC for a given logical and physical transformation and additionally on a particular resource and or of a particular type\&. .RE .PP \fBDelete by Type\fR .RS 4 \-d \-T \-t \fItype\fR [\-r \fIresource\fR] .sp Deletes entries from TC of a specific type and/or on a specific resource\&. .RE .PP \fBDelete by Resource\fR .RS 4 \-d \-R \-r \fIresource\fR .sp Deletes the entries from the TC on a particular resource\&. .RE .PP \fBDelete by SysInfo\fR .RS 4 \-d \-S \-s \fIsysinfo\fR .sp Deletes the entries from the TC for a particular system information type\&. .RE .PP \fBDelete Pfn Profile\fR .RS 4 \-d \-P \-E \-p \fIpfn\fR \-r \fIresource\fR \-t \fItype\fR [\-e \fIprofiles\fR \&.\&.] .sp Deletes all or specific profiles associated with a physical transformation\&. .RE .PP \fBDelete Lfn Profile\fR .RS 4 \-d \-L \-E \-l \fIlfn\fR \-e \fIprofiles\fR \&...\&. .sp Deletes all or specific profiles associated with a logical transformation\&. .RE .SS "QUERY" .PP \fBQuery Bulk\fR .RS 4 \-q \-B .sp Queries for all the contents of the TC\&. It produces a file format TC which can be added to another TC using the bulk option\&. .RE .PP \fBQuery LFN\fR .RS 4 \-q \-L [\-r \fIresource\fR] [\-t \fItype\fR] .sp Queries the TC for logical transformation and/or on a particular resource and/or of a particular type\&. .RE .PP \fBQuery PFN\fR .RS 4 \-q \-P \-l \fIlfn\fR [\-r \fIresource\fR] [\-t \fItype\fR] .sp Queries the TC for physical transformations for a give logical transformation and/or on a particular resource and/or of a particular type\&. .RE .PP \fBQuery Resource\fR .RS 4 \-q \-R \-l \fIlfn\fR [\-t \fItype\fR] .sp Queries the TC for resources that are registered and/or resources registered for a specific type of transformation\&. .RE .PP \fBQuery LFN Profile\fR .RS 4 \-q \-L \-E \-l \fIlfn\fR .sp Queries for profiles associated with a particular logical transformation .RE .PP \fBQuery Pfn Profile\fR .RS 4 \-q \-P \-E \-p \fIpfn\fR \-r \fIresource\fR \-t \fItype\fR .sp Queries for profiles associated with a particular physical transformation .RE .SH "PROPERTIES" .sp These are the properties you will need to set to use either the \fBFile\fR or \fBDatabase\fR TC\&. .sp For more details please check the \fB$PEGASUS_HOME/etc/sample\&.properties\fR file\&. .PP \fBpegasus\&.catalog\&.transformation\fR .RS 4 Identifies what impelemntation of TC will be used\&. If relative name is used then the path org\&.griphyn\&.cPlanner\&.tc is prefixed to the name and used as the class name to load\&. The default value if \fBText\fR\&. Other supported mode is \fBFile\fR .RE .PP \fBpegasus\&.catalog\&.transformation\&.file\fR .RS 4 The file path where the text based TC is located\&. By default the path \fB$PEGASUS_HOME/var/tc\&.data\fR is used\&. .RE .SH "FILES" .PP \fB$PEGASUS_HOME/var/tc\&.data\fR .RS 4 is the suggested location for the file corresponding to the Transformation Catalog .RE .PP \fB$PEGASUS_HOME/etc/properties\fR .RS 4 is the location to specify properties to change what Transformation Catalog Implementation to use and the implementation related \fBPROPERTIES\fR\&. .RE .PP \fBpegasus\&.jar\fR .RS 4 contains all compiled Java bytecode to run the Pegasus planner\&. .RE .SH "ENVIRONMENT VARIABLES" .PP \fBPEGASUS_HOME\fR .RS 4 Path to the PEGASUS installation directory\&. .RE .PP \fBJAVA_HOME\fR .RS 4 Path to the JAVA 1\&.4\&.x installation directory\&. .RE .PP \fBCLASSPATH\fR .RS 4 The classpath should be set to contain all necessary PEGASUS files for the execution environment\&. To automatically add the \fICLASSPATH\fR to you environment, in the \fI$PEGASUS_HOME\fR directory run the script \fIsource setup\-user\-env\&.csh\fR or \fIsource setup\-user\-env\&.sh\fR\&. .RE .SH "AUTHORS" .sp Gaurang Mehta .sp Karan Vahi .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-dagman.10000644000175000017500000000400311757531137021520 0ustar ryngerynge'\" t .\" Title: pegasus-dagman .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-DAGMAN" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-dagman \- Wrapper around *condor_dagman*\&. Not to be run by user\&. .SH "DESCRIPTION" .sp The \fBpegasus\-dagman\fR is a python wrapper that invokes \fBpegasus\-monitord\fR and \fBcondor_dagman\fR both\&. This is started automatically by \fBpegasus\-submit\-dag\fR and ultimately \fBcondor_submit_dag\fR\&. \fBDO NOT USE DIRECTLY\fR .SH "RETURN VALUE" .sp If the \fBcondor_dagman\fR and \fBpegasus\-monitord\fR exit successfully, \fBpegasus\-dagman\fR exits with 0, else exits with non\-zero\&. .SH "ENVIRONMENT VARIABLES" .PP \fBPATH\fR .RS 4 The path variable is used to locate binary for \fBcondor_dagman\fR and \fBpegasus\-monitord\fR .RE .SH "SEE ALSO" .sp pegasus\-run(1) pegasus\-monitord(1) pegasus\-submit\-dag(1) .SH "AUTHORS" .sp Gaurang Mehta .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-tc-converter.10000644000175000017500000001331111757531137022706 0ustar ryngerynge'\" t .\" Title: pegasus-tc-converter .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-TC\-CONVERT" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-tc-converter \- A client to convert transformation catalog from one format to another format\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-tc\-converter\fR [\fB\-D\fR\fIproperty=value\fR\&...] [\fB\-v\fR] [\fB\-q\fR] [\fB\-V\fR] [\fB\-h\fR] [\fB\-I\fR \fIfmt\fR] [\fB\-O\fR \fIfmt\fR] [\fB\-N\fR \fIdbusername\fR] [\fB\-P\fR \fIdbpassword\fR] [\fB\-U\fR \fIdburl\fR] [\fB\-H\fR \fIdbhost\fR] \fB\-i\fR \fIinfile\fR[,\fIinfile\fR,\&...] \fB\-o\fR \fIoutfile\fR .fi .SH "DESCRIPTION" .sp The tc\-convert program is used to convert the transformation catalog from one format to another\&. .sp Currently, the following formats of transformation catalog exist: .PP \fBText\fR .RS 4 This is a easy to read multi line textual format\&. .sp A sample entry in this format looks as follows: .sp .if n \{\ .RS 4 .\} .nf tr example::keg:1\&.0 { site isi { profile env "JAVA_HOME" "/bin/java\&.1\&.6" pfn "/path/to/keg" arch "x86" os "linux" osrelease "fc" osversion "4" type "installed" } } .fi .if n \{\ .RE .\} .RE .PP \fBFile\fR .RS 4 This is a tuple based format which contains 6 columns\&. .sp .if n \{\ .RS 4 .\} .nf RESOURCE LFN PFN TYPE SYSINFO PROFILES .fi .if n \{\ .RE .\} .sp A sample entry in this format looks as follows .sp .if n \{\ .RS 4 .\} .nf isi example::keg:1\&.0 /path/to/keg INSTALLED INTEL32::LINUX:fc_4: env::JAVA_HOME="/bin/java\&.1\&.6" .fi .if n \{\ .RE .\} .RE .PP \fBDatabase\fR .RS 4 Only MySQL is supported for the time being\&. .RE .SH "OPTIONS" .PP \fB\-D\fR\fIproperty=value\fR .RS 4 The \fB\-D\fR option allows an experienced user to override certain properties which influence the program execution, among them the default location of the user\(cqs properties file and the \fBPEGASUS_HOME\fR location\&. One may set several CLI properties by giving this option multiple times\&. .sp The \fB\-D\fR option(s) must be the first option on the command line\&. CLI properties take precedence over the file\-based properties of the same key\&. .RE .PP \fB\-I\fR \fIfmt\fR, \fB\-\-iformat\fR \fIfmt\fR .RS 4 The input format of the input files\&. Valid values for the input format are: \fBFile\fR, \fBText\fR, and \fBDatabase\fR\&. .RE .sp \fB\-O\fR \fIfmt\fR \fB\-\-oformat\fR \fIfmt\fR The output format of the output file\&. Valid values for the output format are: \fBFile\fR, \fBText\fR, and \fBDatabase\fR\&. .sp \fB\-i\fR \fIinfile\fR[,\fIinfile\fR,\&...] \fB\-\-input\fR \fIinfile\fR[,\fIinfile\fR,\&...] The comma separated list of input files that need to be converted to a file in the format specified by the \fB\-\-oformat\fR option\&. .PP \fB\-o\fR \fIoutfile\fR, \fB\-\-output\fR \fIoutfile\fR .RS 4 The output file to which the output needs to be written out to\&. .RE .SS "Other Options" .PP \fB\-N\fR \fIdbusername\fR, \fB\-\-db\-user\-name\fR \fIdbusername\fR .RS 4 The database user name\&. .RE .PP \fB\-P\fR \fIdbpassword\fR, \fB\-\-db\-user\-pwd\fR \fIdbpassword\fR .RS 4 The database user password\&. .RE .PP \fB\-U\fR \fIdburl\fR, \fB\-\-db\-url\fR \fIdburl\fR .RS 4 The database url\&. .RE .PP \fB\-H\fR \fIdbhost\fR, \fB\-\-db\-host\fR \fIdbhost\fR .RS 4 The database host\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Increases the verbosity of messages about what is going on\&. By default, all FATAL ERROR, ERROR , CONSOLE and WARNINGS messages are logged\&. .RE .PP \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Decreases the verbosity of messages about what is going on\&. By default, all FATAL ERROR, ERROR , CONSOLE and WARNINGS messages are logged\&. .RE .PP \fB\-V\fR, \fB\-\-version\fR .RS 4 Displays the current version number of the Pegasus Workflow Planner Software\&. .RE .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Displays all the options to the \fBpegasus\-tc\-converter\fR command\&. .RE .SH "EXAMPLE" .PP Text to file format conversion .RS 4 .RE .sp .if n \{\ .RS 4 .\} .nf pegasus\-tc\-converter \-i tc\&.data \-I File \-o tc\&.txt \-O Text \-v .fi .if n \{\ .RE .\} .PP File to Database(new) format conversion .RS 4 .RE .sp .if n \{\ .RS 4 .\} .nf pegasus\-tc\-converter \-i tc\&.data \-I File \-N mysql_user \-P mysql_pwd \-U jdbc:mysql://localhost:3306/tc \-H localhost \-O Database \-v .fi .if n \{\ .RE .\} .PP Database (username, password, host, url specified in properties file) to text format conversion .RS 4 .RE .sp .if n \{\ .RS 4 .\} .nf pegasus\-tc\-converter \-I Database \-o tc\&.txt \-O Text \-vvvvv .fi .if n \{\ .RE .\} .SH "AUTHORS" .sp Prasanth Thomas .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-version.10000644000175000017500000000765511757531137021776 0ustar ryngerynge'\" t .\" Title: pegasus-version .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-VERSION" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-version \- print or match the version of the toolkit\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-version\fR [\fB\-D\fR\fIproperty=value\fR] [\fB\-m\fR [\fB\-q\fR]] [\fB\-V\fR] [\fB\-f\fR] [\fB\-l\fR] .fi .SH "DESCRIPTION" .sp This program prints the version string of the currently active Pegasus toolkit on \fIstdout\fR\&. .sp pegasus\-version is a simple command\-line tool that reports the version number of the Pegasus distribution being used\&. In its most basic invocation, it will show the current version of the Pegasus software you have installed: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-version 3\&.1\&.0cvs .fi .if n \{\ .RE .\} .sp If you want to know more details about the installed version, i\&.e\&. which system it was compiled for and when, use the long or full mode: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-version \-f 3\&.1\&.0cvs\-x86_64_cent_5\&.6\-20110706191019Z .fi .if n \{\ .RE .\} .SH "OPTIONS" .PP \fB\-D\fR\fIproperty=value\fR .RS 4 The \fB\-D\fR option allows an experienced user to override certain properties which influence the program execution, among them the default location of the user\(cqs properties file and the \fBPEGASUS_HOME\fR location\&. One may set several CLI properties by giving this option multiple times\&. .sp The \fB\-D\fR option(s) must be the first option on the command line\&. CLI properties take precedence over the file\-based properties of the same key\&. .RE .PP \fB\-f\fR, \fB\-\-full\fR .RS 4 The \fB\-\-full\fR mode displays internal build metrics, like OS type and libc version, addition to the version number\&. It appends the build time as time stamp to the version\&. The time stamp uses ISO 8601 format, and is a UTC stamp\&. .RE .PP \fB\-l\fR, \fB\-\-long\fR .RS 4 This option is an alias for \fB\-\-full\fR\&. .RE .PP \fB\-V\fR, \fB\-\-version\fR .RS 4 Displays the version of the Pegasus planner you are using\&. .RE .PP \fB\-\-verbose\fR .RS 4 is ignored in this tool\&. However, to provide a uniform interface for all tools, the option is recognized and will not trigger an error\&. .RE .SH "RETURN VALUE" .sp The program will usually return with success (0)\&. In match mode, if the internal version does not match the external installation, an exit code of 1 is returned\&. If run\-time errors are detected, an exit code of 2 is returned, 3 for fatal errors\&. .SH "ENVIRONMENT VARIABLES" .PP \fBJAVA_HOME\fR .RS 4 should be set and point to a valid location to start the intended Java virtual machine as \fI$JAVA_HOME/bin/java\fR\&. .RE .SH "EXAMPLE" .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-version 3\&.1\&.0cvs $ pegasus\-version \-f 3\&.1\&.0cvs\-x86_64_cent_5\&.6\-20110706191019Z .fi .if n \{\ .RE .\} .SH "AUTHORS" .sp Jens\-S\&. Vöckler .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-transfer.10000644000175000017500000000500611757531137022121 0ustar ryngerynge'\" t .\" Title: pegasus-transfer .\" Author: [see the "Author" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-TRANSFER" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-transfer \- Handles data transfers in Pegasus workflows\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-transfer\fR [\fB\-h\fR] [\fB\-l\fR \fIlevel\fR] [\-f \fIinputfile\fR] [\fB\-\-max\-attempts\fR \fIattempts\fR] .fi .SH "DESCRIPTION" .sp \fBpegasus\-transfer\fR takes a list of url pairs, either on stdin or with an input file, determines the correct tool to use for the transfer and executes the transfer\&. Some of the protocols pegasus\-transfer can handle are GridFTP, SRM, Amazon S3, HTTP, and local cp/symlinking\&. Failed transfers are retried\&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints a usage summary with all the available command\-line options\&. .RE .PP \fB\-l\fR \fIlevel\fR, \fB\-\-loglevel\fR \fIlevel\fR .RS 4 The debugging output level\&. Valid values are: \fBdebug\fR, \fBinfo\fR, \fBwarning\fR, \fBerror\fR\&. Default value is \fBinfo\fR\&. .RE .PP \fB\-f\fR \fIinputfile\fR, \fB\-\-file\fR \fIinputfile\fR .RS 4 File with input pairs\&. If not given, stdin will be used\&. .RE .PP \fB\-\-max\-attempts\fR \fIattempts\fR .RS 4 Maximum number of attempts for retrying failed transfers\&. .RE .SH "EXAMPLE" .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-transfer file:///etc/hosts file:///tmp/foo CTRL+D .fi .if n \{\ .RE .\} .SH "AUTHOR" .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-sc-converter.10000644000175000017500000001546011757531137022714 0ustar ryngerynge'\" t .\" Title: pegasus-sc-converter .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-SC\-CONVERT" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-sc-converter \- A client to convert site catalog from one format to another format\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-sc\-converter\fR [\fB\-v\fR] [\fB\-V\fR] [\fB\-h\fR] [\fB\-Dproperty\fR=\fIvalue\fR\&...] [\fB\-I\fR \fIfmt\fR] [\fB\-O\fR \fIfmt\fR] \fB\-i\fR \fIinfile\fR[,\fIinfile\fR,\&...] \fB\-o\fR \fIoutfile\fR .fi .SH "DESCRIPTION" .sp The \fBpegasus\-sc\-converter\fR program is used to convert the site catalog from one format to another\&. .sp Currently, the following formats of site catalog exist\&. .PP \fBText\fR .RS 4 This is a easy to read multiline textual format\&. .sp A sample entry in this format looks as follows .sp .if n \{\ .RS 4 .\} .nf site local{ sysinfo "INTEL32::LINUX" gridlaunch "/nfs/software/pegasus/default/bin/kickstart" workdir "/scratch" gridftp "gsiftp://viz\-login\&.isi\&.edu/scratch" "4\&.0\&.4" universe transfer "viz\-login\&.isi\&.edu/jobmanager\-fork" "4\&.0\&.4" universe vanilla "viz\-login\&.isi\&.edu/jobmanager\-pbs" "4\&.0\&.4" lrc "rlsn://smarty\&.isi\&.edu" profile env "GLOBUS_LOCATION" "/nfs/software/globus/default" profile env "LD_LIBRARY_PATH" "/nfs/software/globus/default/lib" profile env "PEGASUS_HOME" "/nfs/software/pegasus/default" } .fi .if n \{\ .RE .\} .RE .PP \fBXML\fR .RS 4 This is equivalent to the \fBText\fR format in XML\&. All information in the Text format can be represented in the XMLS format and vice\-a\-versa\&. .sp A sample entry in this format looks as follows .sp .if n \{\ .RS 4 .\} .nf /nfs/software/globus/default /nfs/software/globus/default/lib /nfs/software/pegasus/default /scratch .fi .if n \{\ .RE .\} .sp This format conforms to the XML schema found at \m[blue]\fBhttp://pegasus\&.isi\&.edu/schema/sc\-2\&.0\&.xsd\fR\m[]\&. .RE .PP \fBXML3\fR .RS 4 This format is a superset of previous formats\&. All information about a site that can be described about a site can be described in this format\&. In addition, the user has finer grained control over the specification of directories and FTP servers that are accessible at the \fBhead node\fR and the \fBworker node\fR\&. .sp A sample entry in this format looks as follows .sp .if n \{\ .RS 4 .\} .nf /nfs/software/globus/default /nfs/software/globus/default/lib /nfs/software/pegasus/default .fi .if n \{\ .RE .\} .sp This format conforms to the XML schema found at \m[blue]\fBhttp://pegasus\&.isi\&.edu/schema/sc\-3\&.0\&.xsd\fR\m[]\&. .RE .SH "OPTIONS" .PP \fB\-i\fR \fIinfile\fR[,\fIinfile\fR,\&...], \fB\-\-input\fR \fIinfile\fR[,\fIinfile\fR,\&...] .RS 4 The comma separated list of input files that need to be converted to a file in the format specified by \fB\-\-oformat\fR option\&. .RE .PP \fB\-o\fR \fIoutfile\fR, \fB\-\-output\fR \fIoutfile\fR .RS 4 The output file to which the output needs to be written out to\&. .RE .SS "Other Options" .PP \fB\-I\fR \fIfmt\fR, \fB\-\-iformat\fR \fIfmt\fR .RS 4 The input format of the input files\&. .sp Valid values for the input format are \fBXML\fR and \fBText\fR\&. .RE .PP \fB\-O\fR \fIfmt\fR, \fB\-\-oformat\fR \fIfmt\fR .RS 4 The output format of the output file\&. .sp Valid values for the output format is \fBXML3\fR\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Increases the verbosity of messages about what is going on\&. .sp By default, all FATAL ERROR, ERROR , WARNINGS and INFO messages are logged\&. .RE .PP \fB\-V\fR, \fB\-\-version\fR .RS 4 Displays the current version number of the Pegasus Workflow Planner Software\&. .RE .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Displays all the options to the \fBpegasus\-plan\fR command\&. .RE .SH "EXAMPLE" .sp .if n \{\ .RS 4 .\} .nf pegasus\-sc\-converter \-i sites\&.xml \-I XML \-o sites\&.xml\&.new \-O XML3 \-vvvvv .fi .if n \{\ .RE .\} .SH "AUTHORS" .sp Karan Vahi .sp Gaurang Mehta .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/Makefile0000644000175000017500000000231411757531137020205 0ustar ryngerynge# # make documentation # PS2PDF = ps2pdf14 NROFF = groff -mandoc TEXT = -Tlatin1 HTML = -Thtml # use /usr/ucb/install on Solaris INSTALL = install SRC = pegasus-analyzer.1 \ pegasus-cleanup.1 \ pegasus-create-dir.1 \ pegasus-dax-validator.1 \ pegasus-exitcode.1 \ pegasus-monitord.1 \ pegasus-plan.1 \ pegasus-plots.1 \ pegasus-rc-client.1 \ pegasus-remove.1 \ pegasus-run.1 \ pegasus-s3.1 \ pegasus-sc-client.1 \ pegasus-sc-converter.1 \ pegasus-statistics.1 \ pegasus-status.1 \ pegasus-tc-client.1 \ pegasus-tc-converter.1 \ pegasus-transfer.1 \ pegasus-version.1 OUTPS = $(SRC:.1=.ps) OUTPDF = $(SRC:.1=.pdf) OUTTXT = $(SRC:.1=.txt) OUTHTML = $(SRC:.1=.html) %.html : %.1 ; $(NROFF) $(HTML) $< > $@ %.ps : %.1 ; $(NROFF) $< > $@ %.pdf : %.1 ; $(NROFF) $< | $(PS2PDF) > $@ %.txt : %.1 $(NROFF) $(TEXT) $< > $@ all : all-ps all-txt all-html all-ps : $(OUTPS) all-txt : $(OUTTXT) all-html : $(OUTHTML) install: all @test "$PEGASUS_HOME" || exit 1 # $(INSTALL) -m 0644 $(SRC) $(PEGASUS_HOME)/man/man1 $(INSTALL) -m 0644 $(OUTPS) $(PEGASUS_HOME)/man $(INSTALL) -m 0644 $(OUTTXT) $(PEGASUS_HOME)/man $(INSTALL) -m 0644 $(OUTHTML) $(PEGASUS_HOME)/man clean : rm -f $(OUTPS) $(OUTTXT) $(OUTHTML) pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-keg.10000644000175000017500000002110711757531137021043 0ustar ryngerynge'\" t .\" Title: pegasus-keg .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-KEG" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-keg \- kanonical executable for grids .SH "SYNOPSIS" .sp .nf \fBpegasus\-keg\fR [\-a appname] [\-t interval |\-T interval] [\-l logname] [\-P prefix] [\-o fn [\&.\&.]] [\-i fn [\&.\&.]] [\-G sz] [\-C] [\-e env [\&.\&.]] [\-p parm [\&.\&.]] .fi .SH "DESCRIPTION" .sp The kanonical executable is a stand\-in for regular binaries in a DAG \- but not for their arguments\&. It allows to trace the shape of the execution of a DAG, and thus is an aid to debugging DAG related issues\&. .sp Key feature of \fBpegasus\-keg\fR is that it can copy any number of input files, including the \fIgenerator\fR case, to any number of output files, including the \fIdatasink\fR case\&. In addition, it protocols the IPv4 and hostname of the host it ran upon, the current timestamp, and the run time from start til the point of logging the information, the current working directory and some information on the system environment\&. \fBpegasus\-keg\fR will also report all input files, the current output files and any requested string and environment value\&. .SH "ARGUMENTS" .sp The \fB\-e\fR, \fB\-i\fR, \fB\-o\fR and \fB\-p\fR arguments allow lists with arbitrary number of arguments\&. These options may also occur repeatedly on the command line\&. The file options may be provided with the special filename \- to indicate \fIstdout\fR in append mode for writing, or \fIstdin\fR for reading\&. The \fB\-a\fR, \fB\-l\fR , \fB\-P\fR , \fB\-T\fR and \fB\-t\fR arguments should only occur a single time with a single argument\&. .sp If \fBpegasus\-keg\fR is called without any arguments, it will display its usage and exit with success\&. .PP \fB\-a appname\fR .RS 4 This option allows \fBpegasus\-keg\fR to display a different name as its applications\&. This mode of operation is useful in make\-believe mode\&. The default is the basename of \fIargv[0]\fR\&. .RE .PP \fB\-e env [\&.\&.]\fR .RS 4 This option names any number of environment variables, whose value should be reported as part of the data dump\&. By default, no environment variables are reported\&. .RE .PP \fB\-i infile [\&.\&.]\fR .RS 4 The \fBpegasus\-keg\fR binary can work on any number of input files\&. For each output file, every input file will be opened, and its content copied to the output file\&. Textual input files are assumed\&. Each input line is indented by two spaces\&. The input file content is bracketed between an start and end section, see below\&. By default, \fBpegasus\-keg\fR operates in \fIgenerator\fR mode\&. .RE .PP \fB\-l logfile\fR .RS 4 The \fIlogfile\fR is the name of a file to append atomically the self\-info, see below\&. The atomic write guarantees that the multi\-line information will not interleave with other processes that simultaneously write to the same file\&. The default is not to use any log file\&. .RE .PP \fB\-o outfile [\&.\&.]\fR .RS 4 The \fBpegasus\-keg\fR can work on any number of output files\&. For each output file, every input file will be opened, and its content copied to the output file\&. Textual input files are assumed\&. Each input line is indented by two spaces\&. The input file content is bracketed between an start and end section, see 2nd example\&. After all input files are copied, the data dump from this instance of \fBpegasus\-keg\fR is appended to the output file\&. Without output files, \fBpegasus\-keg\fR operates in \fIdata sink\fR mode\&. .RE .PP \fB\-G size\fR .RS 4 If you want \fBpegasus\-keg\fR to generate a lot of output, the generator option will do that for you\&. Just specify how much, in bytes, you want\&. This option is off by default\&. .RE .PP \fB\-C\fR .RS 4 This option causes \fBpegasus\-keg\fR to list all environment variables that start with the prefix \fI\e_CONDOR\fR The option is useful, if \&.B pegasus\-keg is run as (part of) a Condor job\&. This option is off by default\&. .RE .PP \fB\-p string [\&.\&.]\fR .RS 4 Any number of parameters can be reported, without being specific on their content\&. Effectively, these strings are copied straight from the command line\&. By default, no extra arguments are shown\&. .RE .PP \fB\-P prefix\fR .RS 4 Each line from every input file is indented with a prefix string to visually emphasize the provenance of an input files through multiple instances of \fBpegasus\-keg\fR\&. By default, two spaces are used as prefix string\&. .RE .PP \fB\-t interval\fR .RS 4 The interval is an amount of sleep time that the \fBpegasus\-keg\fR executable is to sleep\&. This can be used to emulate light work without straining the pool resources\&. If used together with the \fB\-T\fR spin option, the sleep interval comes before the spin interval\&. The default is no sleep time\&. .RE .PP \fB\-T interval\fR .RS 4 The interval is an amount of busy spin time that the \fBpegasus\-keg\fR executable is to simulate intense computation\&. The simulation is done by random julia set calculations\&. This option can be used to emulate an intense work to strain pool resources\&. If used together with the \fB\-t\fR sleep option, the sleep interval comes before the spin interval\&. The default is no spin time\&. .RE .SH "RETURN VALUE" .sp Execution as planned will return 0\&. The failure to open an input file will return 1, the failure to open an output file, including the log file, will return with exit code 2\&. .SH "EXAMPLE" .sp The example shows the bracketing of an input file, and the copy produced on the output file\&. For illustration purposes, the output file is connected to \fIstdout\fR : .sp .if n \{\ .RS 4 .\} .nf $ date > xx $ pegasus\-keg \-i xx \-p a b c \-o \- \-\-\- start xx \-\-\-\- Thu May 5 10:55:45 PDT 2011 \-\-\- final xx \-\-\-\- Timestamp Today: 20110505T105552\&.910\-07:00 (1304618152\&.910;0\&.000) Applicationname: pegasus\-keg [3661M] @ 128\&.9\&.xxx\&.xxx (xxx\&.isi\&.edu) Current Workdir: /opt/pegasus/default/bin/pegasus\-keg Systemenvironm\&.: x86_64\-Linux 2\&.6\&.18\-238\&.9\&.1\&.el5 Processor Info\&.: 4 x Intel(R) Core(TM) i5 CPU 750 @ 2\&.67GHz @ 2660\&.068 Load Averages : 0\&.298 0\&.135 0\&.104 Memory Usage MB: 11970 total, 8089 free, 0 shared, 695 buffered Swap Usage MB: 12299 total, 12299 free Filesystem Info: / ext3 62GB total, 20GB avail Filesystem Info: /lfs/balefire ext4 1694GB total, 1485GB avail Filesystem Info: /boot ext2 493MB total, 447MB avail Output Filename: \- Input Filenames: xx Other Arguments: a b c .fi .if n \{\ .RE .\} .SH "RESTRICTIONS" .sp The input file must be textual files\&. The behaviour with binary files is unspecified\&. .sp The host address is determined from the primary interface\&. If there is no active interface besides loopback, the host address will default to 0\&.0\&.0\&.0\&. If the host address is within a \fIvirtual private network\fR address range, only \fI(VPN)\fR will be displayed as hostname, and no reverse address lookup will be attempted\&. .sp The \fIprocessor info\fR line is only available on Linux systems\&. The line will be missing on other operating systems\&. Its information is assuming symmetrical multi processing, reflecting the CPU name and speed of the last CPU available in \fI/dev/cpuinfo\fR \&. .sp There is a limit of \fI4 * page size\fR to the output buffer of things that \&.B pegasus\-keg can report in its self\-info dump\&. There is no such restriction on the input to output file copy\&. .SH "AUTHORS" .sp Jens\-S\&. Vöckler .sp Mike Wilde .sp Yong Zhao .sp Pegasus \- \m[blue]\fBhttp://pegasus\&.isi\&.edu/\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-create-dir.10000644000175000017500000000423211757531137022314 0ustar ryngerynge'\" t .\" Title: pegasus-create-dir .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-CREATE\-DIR" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-create-dir \- Creates work directories in Pegasus workflows\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-create\-dir\fR [\fB\-h\fR][\fB\-l\fR \fIlevel\fR][\fB\-u\fR \fIURL\fR] .fi .SH "DESCRIPTION" .sp \fBpegasus\-create\-dir\fR creates a directory for the given URL\&. Some of the protocols it can handle are GridFTP, SRM, Amazon S3, HTTP, and file:// (using mkdir)\&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints a usage summary with all the available command\-line options\&. .RE .PP \fB\-l\fR \fIlevel\fR, \fB\-\-loglevel\fR \fIlevel\fR .RS 4 The debugging output level\&. Valid values are \fIdebug\fR, \fIinfo\fR, \fIwarning\fR, and \fIerror\fR\&. Default value is \fIinfo\fR\&. .RE .PP \fB\-u\fR \fIURL\fR, \fB\-\-url\fR \fIURL\fR .RS 4 Specifies the directory to create\&. .RE .SH "EXAMPLE" .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-create\-dir \-u gsiftp://somehost/some/path .fi .if n \{\ .RE .\} .SH "AUTHORS" .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-sc-client.10000644000175000017500000001075611757531137022166 0ustar ryngerynge'\" t .\" Title: pegasus-sc-client .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-SC\-CLIENT" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-sc-client \- generates a site catalog by querying sources\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-sc\-client\fR [\fB\-D\fR\fIproperty=value\fR\&...] [\fB\-\-source\fR \fIsrc\fR] [\fB\-g\fR|\fB\-\-grid\fR \fIgrid\fR] [\fB\-o\fR|\fB\-\-vo\fR \fIvo\fR] [\fB\-s\fR|\fB\-\-sc\fR \fIscfile\fR] [\fB\-p\fR|\fB\-\-properties\fR \fIpropfile\fR] [\fB\-V\fR|\fB\-\-version\fR] [\fB\-v\fR|\fB\-\-verbose\fR] [\fB\-h\fR|\fB\-\-help\fR] .fi .SH "DESCRIPTION" .sp \fBpegasus\-sc\-client\fR generates site catalog by querying sources like OSGMM, MYSOG, etc\&. .SH "OPTIONS" .PP \fB\-D\fR\fIproperty=value\fR .RS 4 The \fB\-D\fR option allows an experienced user to override certain properties which influence the program execution, among them the default location of the user\(cqs properties file and the \fBPEGASUS_HOME\fR location\&. One may set several CLI properties by giving this option multiple times\&. .sp The \fB\-D\fR option(s) must be the first option on the command line\&. CLI properties take precedence over the file\-based properties of the same key\&. .RE .PP \fB\-\-source\fR \fIsrc\fR .RS 4 the source to query for site information\&. Valid sources are: OSGMM|MYOSG|VORS .RE .PP \fB\-g\fR \fIgrid\fR, \fB\-\-grid\fR \fIgrid\fR .RS 4 the grid for which to generate the site catalog information\&. .RE .PP \fB\-o\fR \fIvo\fR, \fB\-\-vo\fR \fIvo\fR .RS 4 The Virtual Organization (VO) to which the user belongs\&. The default VO is LIGO\&. The collector host should be set by default unless overridden by the property \fBpegasus\&.catalog\&.site\&.osgmm\&.collector\&.host\fR according to the following rules: .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} if VO is ligo then collector host queried is ligo\-osgmm\&.renci\&.org .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} if VO is engage then collector host queried is engage\-central\&.renci\&.org .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} for any other VO, engage\-central\&.renci\&.org will be queried and in all the paths the name of the engage VO will be replaced with the name of the VO passed\&. e\&.g if user specifies the VO to be \fBcigi\fR, \fBengage\fR will be replaced by \fBcigi\fR in the directory paths\&. .RE .RE .PP \fB\-s\fR \fIscfile\fR, \fB\-\-sc\fR \fIscfile\fR .RS 4 The path to the created site catalog file .RE .PP \fB\-p\fR \fIpropfile\fR, \fB\-\-properties\fR \fIpropfile\fR .RS 4 Generate a Pegasus properties file containing the SRM properties\&. The properties file is created only if \fB\-\-source\fR is set to \fIOSGMM\fR\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Increases the verbosity of messages about what is going on\&. .RE .PP \fB\-V\fR, \fB\-\-version\fR .RS 4 Displays the current version of Pegasus\&. .RE .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Displays all the options to the \fBpegasus\-sc\-client\fR command\&. .RE .SH "EXAMPLE" .sp Runs \fBpegasus\-sc\-client\fR and generates the site catalog: .sp .if n \{\ .RS 4 .\} .nf $ pegasus\-sc\-client \-\-source OSGMM \-\-sc osg\-sites\&.xml \-\-vo LIGO \-\-grid OSG .fi .if n \{\ .RE .\} .SH "AUTHORS" .sp Prasanth Thomas .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-cleanup.10000644000175000017500000000434611757531137021732 0ustar ryngerynge'\" t .\" Title: pegasus-cleanup .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-CLEANUP" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-cleanup \- Removes files during Pegasus workflows enactment\&. .SH "SYNOPSIS" .sp .nf \fBpegasus\-cleanup\fR [\fB\-h\fR][\fB\-l\fR \fIlevel\fR][\fB\-f\fR \fIurls\fR] .fi .SH "DESCRIPTION" .sp \fBpegasus\-cleanup\fR removes the files associated with the given URL\&. Some of the protocols it can handle are GridFTP, SRM, Amazon S3, HTTP, and file://\&. .SH "OPTIONS" .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Prints a usage summary with all the available command\-line options\&. .RE .PP \fB\-l\fR \fIlevel\fR, \fB\-\-loglevel\fR \fIlevel\fR .RS 4 The debugging output level\&. Valid values are \fIdebug\fR, \fIinfo\fR, \fIwarning\fR, and \fIerror\fR\&. Default value is \fIinfo\fR\&. .RE .PP \fB\-f\fR \fIurls\fR, \fB\-\-file\fR \fIurls\fR .RS 4 Specifies the file with URLs to clean up (one per line)\&. If this option is not given the list of URLs will be read from stdin\&. .RE .SH "EXAMPLE" .sp .if n \{\ .RS 4 .\} .nf echo gsiftp://somehost/some/path | pegasus\-cleanup .fi .if n \{\ .RE .\} .SH "AUTHORS" .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/man/man1/pegasus-plan.10000644000175000017500000005531711757531137021241 0ustar ryngerynge'\" t .\" Title: pegasus-plan .\" Author: [see the "Authors" section] .\" Generator: DocBook XSL Stylesheets v1.75.2 .\" Date: 02/28/2012 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "PEGASUS\-PLAN" "1" "02/28/2012" "\ \&" "\ \&" .\" ----------------------------------------------------------------- .\" * Define some portability stuff .\" ----------------------------------------------------------------- .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .\" http://bugs.debian.org/507673 .\" http://lists.gnu.org/archive/html/groff/2009-02/msg00013.html .\" ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ .ie \n(.g .ds Aq \(aq .el .ds Aq ' .\" ----------------------------------------------------------------- .\" * set default formatting .\" ----------------------------------------------------------------- .\" disable hyphenation .nh .\" disable justification (adjust text to left margin only) .ad l .\" ----------------------------------------------------------------- .\" * MAIN CONTENT STARTS HERE * .\" ----------------------------------------------------------------- .SH "NAME" pegasus-plan \- runs Pegasus to generate the executable workflow .SH "SYNOPSIS" .sp .nf \fBpegasus\-plan\fR [\fB\-v\fR] [\fB\-q\fR] [\fB\-V\fR] [\fB\-h\fR] [\fB\-D\fR\fIprop=value\fR\&...]] [\fB\-b\fR prefix] [\fB\-\-conf\fR \fIpropsfile\fR] [\fB\-c\fR \fIcachefile\fR[,\fIcachefile\fR\&...]] [\fB\-C\fR \fIstyle\fR[,\fIstyle\fR\&...]] [\fB\-\-dir\fR \fIdir\fR] [\fB\-\-force\fR] [\fB\-\-force\-replan\fR] [\fB\-\-inherited\-rc\-files\fR] [\fB\-j\fR \fIprefix\fR] [\fB\-n\fR] [\fB\-o\fR \fIsite\fR] [\fB\-s\fR \fIsite1\fR[,\fIsite2\fR\&...]] [\fB\-\-staging\-site\fR s1=ss1[,s2=ss2[\&.\&.]] [\fB\-\-randomdir\fR[=\fIdirname\fR]] [\fB\-\-relative\-dir\fR \fIdir\fR] [\fB\-\-relative\-submit\-dir\fR \fIdir\fR] \fB\-d\fR \fIdaxfile\fR .fi .SH "DESCRIPTION" .sp The \fBpegasus\-plan\fR command takes in as input the DAX and generates an executable workflow usually in form of \fBcondor\fR submit files, which can be submitted to an \fIexecution\fR site for execution\&. .sp As part of generating an executable workflow, the planner needs to discover: .PP \fBdata\fR .RS 4 The Pegasus Workflow Planner ensures that all the data required for the execution of the executable workflow is transferred to the execution site by adding transfer nodes at appropriate points in the DAG\&. This is done by looking up an appropriate \fBReplica Catalog\fR to determine the locations of the input files for the various jobs\&. At present the default replica mechanism used is RLS\&. .sp The Pegasus Workflow Planner also tries to reduce the workflow, unless specified otherwise\&. This is done by deleting the jobs whose output files have been found in some location in the Replica Catalog\&. At present no cost metrics are used\&. However preference is given to a location corresponding to the execution site .sp The planner can also add nodes to transfer all the materialized files to an output site\&. The location on the output site is determined by looking up the site catalog file, the path to which is picked up from the \fBpegasus\&.catalog\&.site\&.file\fR property value\&. .RE .PP \fBexecutables\fR .RS 4 The planner looks up a Transformation Catalog to discover locations of the executables referred to in the executable workflow\&. Users can specify INSTALLED or STAGEABLE executables in the catalog\&. Stageable executables can be used by Pegasus to stage executables to resources where they are not pre\-installed\&. .RE .PP \fBresources\fR .RS 4 The layout of the sites, where Pegasus can schedule jobs of a workflow are described in the Site Catalog\&. The planner looks up the site catalog to determine for a site what directories a job can be executed in, what servers to use for staging in and out data and what jobmanagers (if applicable) can be used for submitting jobs\&. .RE .sp The data and executable locations can now be specified in DAX\(cqes conforming to DAX schema version 3\&.2 or higher\&. .SH "OPTIONS" .sp Any option will be displayed with its long options synonym(s)\&. .PP \fB\-D\fR\fIproperty=value\fR .RS 4 The \fB\-D\fR option allows an experienced user to override certain properties which influence the program execution, among them the default location of the user\(cqs properties file and the PEGASUS home location\&. One may set several CLI properties by giving this option multiple times\&. The \fB\-D\fR option(s) must be the first option on the command line\&. A CLI property take precedence over the properties file property of the same key\&. .RE .PP \fB\-d\fR \fIfile\fR, \fB\-\-dax\fR \fIfile\fR .RS 4 The DAX is the XML input file that describes an abstract workflow\&. This is a mandatory option, which has to be used\&. .RE .PP \fB\-b\fR \fIprefix\fR, \fB\-\-basename\fR \fIprefix\fR .RS 4 The basename prefix to be used while constructing per workflow files like the dagman file (\&.dag file) and other workflow specific files that are created by Condor\&. Usually this prefix, is taken from the name attribute specified in the root element of the dax files\&. .RE .PP \fB\-c\fR \fIfile\fR[,\fIfile\fR,\&...], \fB\-\-cache\fR \fIfile\fR[,\fIfile\fR,\&...] .RS 4 A comma separated list of paths to replica cache files that override the results from the replica catalog for a particular LFN\&. .sp Each entry in the cache file describes a LFN , the corresponding PFN and the associated attributes\&. The pool attribute should be specified for each entry\&. .sp .if n \{\ .RS 4 .\} .nf LFN_1 PFN_1 pool=[site handle 1] LFN_2 PFN_2 pool=[site handle 2] \&.\&.\&. LFN_N PFN_N [site handle N] .fi .if n \{\ .RE .\} .sp To treat the cache files as supplemental replica catalogs set the property \fBpegasus\&.catalog\&.replica\&.cache\&.asrc\fR to true\&. This results in the mapping in the cache files to be merged with the mappings in the replica catalog\&. Thus, for a particular LFN both the entries in the cache file and replica catalog are available for replica selection\&. .RE .PP \fB\-C\fR \fIstyle\fR[,\fIstyle\fR,\&...], \fB\-\-cluster\fR \fIstyle\fR[,\fIstyle\fR,\&...] .RS 4 Comma\-separated list of clustering styles to apply to the workflow\&. This mode of operation results in clustering of n compute jobs into a larger jobs to reduce remote scheduling overhead\&. You can specify a list of clustering techniques to recursively apply them to the workflow\&. For example, this allows you to cluster some jobs in the workflow using horizontal clustering and then use label based clustering on the intermediate workflow to do vertical clustering\&. .sp The clustered jobs can be run at the remote site, either sequentially or by using MPI\&. This can be specified by setting the property \fBpegasus\&.job\&.aggregator\fR\&. The property can be overridden by associating the PEGASUS profile key \fIcollapser\fR either with the transformation in the transformation catalog or the execution site in the site catalog\&. The value specified (to the property or the profile), is the logical name of the transformation that is to be used for clustering jobs\&. Note that clustering will only happen if the corresponding transformations are catalogued in the transformation catalog\&. .sp PEGASUS ships with a clustering executable \fIseqexec\fR that can be found in the \fI$PEGASUS_HOME/bin\fR directory\&. It runs the jobs in the clustered job sequentially on the same node at the remote site\&. .sp In addition, an MPI wrapper \fImpiexec\fR, is distributed as source with PEGASUS\&. It can be found in the \fI$PEGASUS_HOME/src/tools/cluster\fR directory\&. The wrapper is run on every MPI node, with the first one being the master and the rest of the ones as workers\&. The number of instances of \fImpiexec\fR that are invoked is equal to the value of the Globus RSL key \fInodecount\fR\&. The master distributes the smaller constituent jobs to the workers\&. For e\&.g\&. If there were 10 jobs in the clustered job and \fInodecount\fR was 5, then one node acts as master, and the 10 jobs are distributed amongst the 4 slaves on demand\&. The master hands off a job to the slave node as and when it gets free\&. So initially all the 4 nodes are given a single job each, and then as and when they get done are handed more jobs till all the 10 jobs have been executed\&. .sp By default, \fIseqexec\fR is used for clustering jobs unless overridden in the properties or by the pegasus profile key \fIcollapser\fR\&. .sp The following type of clustering styles are currently supported: .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBhorizontal\fR is the style of clustering in which jobs on the same level are aggregated into larger jobs\&. A level of the workflow is defined as the greatest distance of a node, from the root of the workflow\&. Clustering occurs only on jobs of the same type i\&.e they refer to the same logical transformation in the transformation catalog\&. .sp Horizontal Clustering can operate in one of two modes\&. a\&. Job count based\&. .sp The granularity of clustering can be specified by associating either the PEGASUS profile key \fIclusters\&.size\fR or the PEGASUS profile key \fIclusters\&.num\fR with the transformation\&. .sp The \fIclusters\&.size\fR key indicates how many jobs need to be clustered into the larger clustered job\&. The clusters\&.num key indicates how many clustered jobs are to be created for a particular level at a particular execution site\&. If both keys are specified for a particular transformation, then the clusters\&.num key value is used to determine the clustering granularity\&. .sp .RS 4 .ie n \{\ \h'-04' 1.\h'+01'\c .\} .el \{\ .sp -1 .IP " 1." 4.2 .\} Runtime based\&. .sp To cluster jobs according to runtimes user needs to set one property and two profile keys\&. The property pegasus\&.clusterer\&.preference must be set to the value \fIruntime\fR\&. In addition user needs to specify two Pegasus profiles\&. a\&. clusters\&.maxruntime which specifies the maximum duration for which the clustered job should run for\&. b\&. job\&.runtime which specifies the duration for which the job with which the profile key is associated, runs for\&. Ideally, clusters\&.maxruntime should be set in transformation catalog and job\&.runtime should be set for each job individually\&. .RE .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ .sp -1 .IP \(bu 2.3 .\} \fBlabel\fR is the style of clustering in which you can label the jobs in your workflow\&. The jobs with the same level are put in the same clustered job\&. This allows you to aggregate jobs across levels, or in a manner that is best suited to your application\&. .sp To label the workflow, you need to associate PEGASUS profiles with the jobs in the DAX\&. The profile key to use for labeling the workflow can be set by the property \fIpegasus\&.clusterer\&.label\&.key\fR\&. It defaults to label, meaning if you have a PEGASUS profile key label with jobs, the jobs with the same value for the pegasus profile key label will go into the same clustered job\&. .RE .RE .PP \fB\-\-conf\fR \fIpropfile\fR .RS 4 The path to properties file that contains the properties planner needs to use while planning the workflow\&. .RE .PP \fB\-\-dir\fR \fIdir\fR .RS 4 The base directory where you want the output of the Pegasus Workflow Planner usually condor submit files, to be generated\&. Pegasus creates a directory structure in this base directory on the basis of username, VO Group and the label of the workflow in the DAX\&. .sp By default the base directory is the directory from which one runs the \fBpegasus\-plan\fR command\&. .RE .PP \fB\-f\fR, \fB\-\-force\fR .RS 4 This bypasses the reduction phase in which the abstract DAG is reduced, on the basis of the locations of the output files returned by the replica catalog\&. This is analogous to a \fBmake\fR style generation of the executable workflow\&. .RE .PP \fB\-\-force\-replan\fR .RS 4 By default, for hierarichal workflows if a DAX job fails, then on job retry the rescue DAG of the associated workflow is submitted\&. This option causes Pegasus to replan the DAX job in case of failure instead\&. .RE .PP \fB\-g\fR, \fB\-\-group\fR .RS 4 The VO Group to which the user belongs to\&. .RE .PP \fB\-h\fR, \fB\-\-help\fR .RS 4 Displays all the options to the \fBpegasus\-plan\fR command\&. .RE .PP \fB\-\-inherited\-rc\-files\fR \fIfile\fR[,\fIfile\fR,\&...] .RS 4 A comma separated list of paths to replica files\&. Locations mentioned in these have a lower priority than the locations in the DAX file\&. This option is usually used internally for hierarchical workflows, where the file locations mentioned in the parent (encompassing) workflow DAX, passed to the sub workflows (corresponding) to the DAX jobs\&. .RE .PP \fB\-j\fR \fIprefix\fR, \fB\-\-job\-prefix\fR \fIprefix\fR .RS 4 The job prefix to be applied for constructing the filenames for the job submit files\&. .RE .PP \fB\-n\fR, \fB\-\-nocleanup\fR .RS 4 This results in the generation of the separate cleanup workflow that removes the directories created during the execution of the executable workflow\&. The cleanup workflow is to be submitted after the executable workflow has finished\&. .sp If this option is not specified, then Pegasus adds cleanup nodes to the executable workflow itself that cleanup files on the remote sites when they are no longer required\&. .RE .PP \fB\-o\fR \fIsite\fR, \fB\-\-o\fR \fIsite\fR .RS 4 The output site where all the materialized data is transferred to\&. .sp By default the \fBmaterialized data\fR remains in the working directory on the \fBexecution\fR site where it was created\&. Only those output files are transferred to an output site for which transfer attribute is set to true in the DAX\&. .RE .PP \fB\-q\fR, \fB\-\-quiet\fR .RS 4 Decreases the logging level\&. .RE .PP \fB\-r\fR[\fIdirname\fR], \fB\-\-randomdir\fR[=\fIdirname\fR] .RS 4 Pegasus Worfklow Planner adds create directory jobs to the executable workflow that create a directory in which all jobs for that workflow execute on a particular site\&. The directory created is in the working directory (specified in the site catalog with each site)\&. .sp By default, Pegasus duplicates the relative directory structure on the submit host on the remote site\&. The user can specify this option without arguments to create a random timestamp based name for the execution directory that are created by the create dir jobs\&. The user can can specify the optional argument to this option to specify the basename of the directory that is to be created\&. .sp The create dir jobs refer to the \fBdirmanager\fR executable that is shipped as part of the PEGASUS worker package\&. The transformation catalog is searched for the transformation named \fBpegasus::dirmanager\fR for all the remote sites where the workflow has been scheduled\&. Pegasus can create a default path for the dirmanager executable, if \fBPEGASUS_HOME\fR environment variable is associated with the sites in the site catalog as an environment profile\&. .RE .PP \fB\-\-relative\-dir\fR \fIdir\fR .RS 4 The directory relative to the base directory where the executable workflow it to be generated and executed\&. This overrides the default directory structure that Pegasus creates based on username, VO Group and the DAX label\&. .RE .PP \fB\-\-relative\-submit\-dir\fR \fIdir\fR .RS 4 The directory relative to the base directory where the executable workflow it to be generated\&. This overrides the default directory structure that Pegasus creates based on username, VO Group and the DAX label\&. By specifying \fB\-\-relative\-dir\fR and \fB\-\-relative\-submit\-dir\fR you can have different relative execution directory on the remote site and different relative submit directory on the submit host\&. .RE .PP \fB\-s\fR \fIsite\fR[,\fIsite\fR,\&...], \fB\-\-sites\fR \fIsite\fR[,\fIsite\fR,\&...] .RS 4 A comma separated list of execution sites on which the workflow is to be executed\&. Each of the sites should have an entry in the site catalog, that is being used\&. To run on the submit host, specify the execution site as \fBlocal\fR\&. .sp In case this option is not specified, all the sites in the site catalog are picked up as candidates for running the workflow\&. .RE .PP \fB\-\-staging\-site\fR \fIs1=ss1\fR[,s2=ss2[\&.\&.]] .RS 4 A comma separated list of key=value pairs , where the key is the execution site and value is the staging site for that execution site\&. .sp In case of running on a shared filesystem, the staging site is automatically associated by the planner to be the execution site\&. If only a value is specified, then that is taken to be the staging site for all the execution sites\&. e\&.g \fB\-\-staging\-site\fR local means that the planner will use the local site as the staging site for all jobs in the workflow\&. .RE .PP \fB\-s\fR, \fB\-\-submit\fR .RS 4 Submits the generated \fBexecutable workflow\fR using \fBpegasus\-run\fR script in $PEGASUS_HOME/bin directory\&. By default, the Pegasus Workflow Planner only generates the Condor submit files and does not submit them\&. .RE .PP \fB\-v\fR, \fB\-\-verbose\fR .RS 4 Increases the verbosity of messages about what is going on\&. By default, all FATAL, ERROR, CONSOLE and WARN messages are logged\&. The logging hierarchy is as follows: .sp .RS 4 .ie n \{\ \h'-04' 1.\h'+01'\c .\} .el \{\ .sp -1 .IP " 1." 4.2 .\} FATAL .RE .sp .RS 4 .ie n \{\ \h'-04' 2.\h'+01'\c .\} .el \{\ .sp -1 .IP " 2." 4.2 .\} ERROR .RE .sp .RS 4 .ie n \{\ \h'-04' 3.\h'+01'\c .\} .el \{\ .sp -1 .IP " 3." 4.2 .\} CONSOLE .RE .sp .RS 4 .ie n \{\ \h'-04' 4.\h'+01'\c .\} .el \{\ .sp -1 .IP " 4." 4.2 .\} WARN .RE .sp .RS 4 .ie n \{\ \h'-04' 5.\h'+01'\c .\} .el \{\ .sp -1 .IP " 5." 4.2 .\} INFO .RE .sp .RS 4 .ie n \{\ \h'-04' 6.\h'+01'\c .\} .el \{\ .sp -1 .IP " 6." 4.2 .\} CONFIG .RE .sp .RS 4 .ie n \{\ \h'-04' 7.\h'+01'\c .\} .el \{\ .sp -1 .IP " 7." 4.2 .\} DEBUG .RE .sp .RS 4 .ie n \{\ \h'-04' 8.\h'+01'\c .\} .el \{\ .sp -1 .IP " 8." 4.2 .\} TRACE .RE .sp For example, to see the INFO, CONFIG and DEBUG messages additionally, set \fB\-vvv\fR\&. .RE .PP \fB\-V\fR, \fB\-\-version\fR .RS 4 Displays the current version number of the Pegasus Workflow Management System\&. .RE .SH "RETURN VALUE" .sp If the Pegasus Workflow Planner is able to generate an executable workflow successfully, the exitcode will be 0\&. All runtime errors result in an exitcode of 1\&. This is usually in the case when you have misconfigured your catalogs etc\&. In the case of an error occurring while loading a specific module implementation at run time, the exitcode will be 2\&. This is usually due to factory methods failing while loading a module\&. In case of any other error occurring during the running of the command, the exitcode will be 1\&. In most cases, the error message logged should give a clear indication as to where things went wrong\&. .SH "PEGASUS PROPERTIES" .sp This is not an exhaustive list of properties used\&. For the complete description and list of properties refer to \fB$PEGASUS_HOME/doc/advanced\-properties\&.pdf\fR .PP \fBpegasus\&.selector\&.site\fR .RS 4 Identifies what type of site selector you want to use\&. If not specified the default value of \fBRandom\fR is used\&. Other supported modes are \fBRoundRobin\fR and \fBNonJavaCallout\fR that calls out to a external site selector\&. .RE .PP \fBpegasus\&.catalog\&.replica\fR .RS 4 Specifies the type of replica catalog to be used\&. .sp If not specified, then the value defaults to \fBRLS\fR\&. .RE .PP \fBpegasus\&.catalog\&.replica\&.url\fR .RS 4 Contact string to access the replica catalog\&. In case of RLS it is the RLI url\&. .RE .PP \fBpegasus\&.dir\&.exec\fR .RS 4 A suffix to the workdir in the site catalog to determine the current working directory\&. If relative, the value will be appended to the working directory from the site\&.config file\&. If absolute it constitutes the working directory\&. .RE .PP \fBpegasus\&.catalog\&.transformation\fR .RS 4 Specifies the type of transformation catalog to be used\&. One can use either a file based or a database based transformation catalog\&. At present the default is \fBText\fR\&. .RE .PP \fBpegasus\&.catalog\&.transformation\&.file\fR .RS 4 The location of file to use as transformation catalog\&. .sp If not specified, then the default location of $PEGASUS_HOME/var/tc\&.data is used\&. .RE .PP \fBpegasus\&.catalog\&.site\fR .RS 4 Specifies the type of site catalog to be used\&. One can use either a text based or an xml based site catalog\&. At present the default is \fBXML3\fR\&. .RE .PP \fBpegasus\&.catalog\&.site\&.file\fR .RS 4 The location of file to use as a site catalog\&. If not specified, then default value of $PEGASUS_HOME/etc/sites\&.xml is used in case of the xml based site catalog and $PEGASUS_HOME/etc/sites\&.txt in case of the text based site catalog\&. .RE .PP \fBpegasus\&.data\&.configuration\fR .RS 4 This property sets up Pegasus to run in different environments\&. This can be set to .sp \fBsharedfs\fR If this is set, Pegasus will be setup to execute jobs on the shared filesystem on the execution site\&. This assumes, that the head node of a cluster and the worker nodes share a filesystem\&. The staging site in this case is the same as the execution site\&. .sp \fBnonsharedfs\fR If this is set, Pegasus will be setup to execute jobs on an execution site without relying on a shared filesystem between the head node and the worker nodes\&. .sp \fBcondorio\fR If this is set, Pegasus will be setup to run jobs in a pure condor pool, with the nodes not sharing a filesystem\&. Data is staged to the compute nodes from the submit host using Condor File IO\&. .RE .PP \fBpegasus\&.code\&.generator\fR .RS 4 The code generator to use\&. By default, Condor submit files are generated for the executable workflow\&. Setting to \fBShell\fR results in Pegasus generating a shell script that can be executed on the submit host\&. .RE .SH "FILES" .PP \fB$PEGASUS_HOME/etc/dax\-3\&.3\&.xsd\fR .RS 4 is the suggested location of the latest DAX schema to produce DAX output\&. .RE .PP \fB$PEGASUS_HOME/etc/sc\-3\&.0\&.xsd\fR .RS 4 is the suggested location of the latest Site Catalog schema that is used to create the XML3 version of the site catalog .RE .PP \fB$PEGASUS_HOME/etc/tc\&.data\&.text\fR .RS 4 is the suggested location for the file corresponding to the Transformation Catalog\&. .RE .PP \fB$PEGASUS_HOME/etc/sites\&.xml3\fR | \fB$PEGASUS_HOME/etc/sites\&.xml\fR .RS 4 is the suggested location for the file containing the site information\&. .RE .PP \fB$PEGASUS_HOME/lib/pegasus\&.jar\fR .RS 4 contains all compiled Java bytecode to run the Pegasus Workflow Planner\&. .RE .SH "SEE ALSO" .sp pegasus\-sc\-client(1), pegasus\-tc\-client(1), pegasus\-rc\-client(1) .SH "AUTHORS" .sp Karan Vahi .sp Gaurang Mehta .sp Pegasus Team \m[blue]\fBhttp://pegasus\&.isi\&.edu\fR\m[] pegasus-wms_4.0.1+dfsg/share/pegasus/0000755000175000017500000000000011757531667016615 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/share/pegasus/schema/0000755000175000017500000000000011757531667020055 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/share/pegasus/schema/dax-3.3.xsd0000644000175000017500000004221511757531137021646 0ustar ryngerynge $Id: dax-3.3.xsd 4495 2011-08-26 00:59:52Z rynge $ defines the tri-state transfer modes. defines the usage of a logical filename. narrowly defines a string subset safe in most file systems as file name (excluding path). defines an ASCII-restricted set of identifiers for graph nodes (jobs, daxes, dags). describes restriction on a version number. defines the valid architecture types. defines the valid OS types. defines the attribute set decribing system information. defines the legal namespaces of execution profiles. defines notifications. type of the ubiquitous profile element. defines just a filename. logical filename representation. Add "executable" attribute yourself. type for job-like uses elements that refers either to data or to executables. type for transformation uses elements that refers either to data or to executables. common elements in job, dag, and dax nodes. Arguments on the commandline, text interrupted by filenames Execution environment specific data to be passed to lower levels. stand-in for "file", linkage is "input" fixed. stand-in for "file", linkage is "output" fixed. stand-in for "file", linkage is "output" fixed. very simple notifications. data or executable catalog entry type. abstract DAG in XML. A dependency must not be used before all the jobs it references were declared. It is suggested to keep all jobs, all deps and all lfns together, a total of three sections. very simple workflow-level notifications (no warrantees). Optional list of all data files used. The new extension is a kind of included replica catalog. Optional list of all executables. The new extension is a kind of included transformation catalog. repeatable simple notification forwarded to job instantiation. An aggregation of executables as its own item. A job component is one of a true job, an included DAG, or an included DAX. The recursive "adag" is an unsupported place-holder. Job specification in terms of a logical transformation. An already concrete workflow, stored mostly as file reference. A workflow to be planned, stored mostly as file reference. List of dependencies. parent node refering to a job. pegasus-wms_4.0.1+dfsg/share/pegasus/schema/dax-3.0.xsd0000644000175000017500000002760011757531137021644 0ustar ryngerynge defines the tri-state transfer modes. defines the type of files data|executable|pattern defines the usage of a logical filename. defines the legal namespaces of execution profiles. defines just a filename. logical filename representation. Derivation of Plain filename, with added attributes for variable name recording. common elements in job, dag, and dax nodes. Execution environment specific data to be passed to lower levels. Execution environment specific data to be passed to lower levels. Arguments on the commandline, text interrupted by filenames stand-in for "file", linkage is "input" fixed. stand-in for "file", linkage is "output" fixed. stand-in for "file", linkage is "output" fixed. abstract DAG in XML A dependency must not be used before all the jobs it references were declared. It is suggested to keep all jobs, all deps and all lfns together, a total of three sections. List of all filenames used. Job specification in terms of a logical transformation. Level from search in router. Level from search in router. Level from search in router. List of dependencies. parent node refering to a job. pegasus-wms_4.0.1+dfsg/share/pegasus/schema/sc-3.0.xsd0000644000175000017500000003153311757531137021475 0ustar ryngerynge defines the legal namespaces of execution profiles. Different Schedulers on the Grid Different Grid Type's Supported by Pegasus Type of Jobs in the executable workflow this Grid Supports root element aggregating all sites information there is. Describes a single site Each site supports various (usually two) jobmanagers. The universe name is actually the primary key for the jobmanager identification. The contact string is the secondary key for any job manager. Administrative profile defaults associated with a site. Information about the shared directory on the Worker Nodes Information about the shared directory on the Worker Nodes Administrative profile defaults associated with a site. Each site may report to multiple LRCs. Administrative profile defaults associated with a site. Describes the fileserver to access data from outside Administrative profile defaults associated with a site. Describes the filesystem layout (shared and local) on a site Information about the local directory on a Node Information about the shared directory on the Cluster Defines the valid architecture types Defines the valid OS types Defines the attributes to define system information Defines the interal view/mount point and its characteristics Defines a directory's external and internal view/mount-point and access parameters pegasus-wms_4.0.1+dfsg/share/pegasus/schema/iv-2.0.xsd0000644000175000017500000006172311757531137021511 0ustar ryngerynge dotted quad notation of a IPv4 host address. restricted version of the 'decimal' type. value for a resource usage Information from the rusage record. Total amount of user time used, in seconds with millisecond fraction. Total amount of system time used, in seconds with millisecond fraction. Number of soft page faults (i.e. those serviced by reclaiming a page from the list of pages awaiting reallocation. Number of hard page faults (i.e. those that required I/O). Number of times a process was swapped out of physical memory. Number of signals delivered. Number of voluntary context switches, i.e. because the process gave up the process before it had to (usually to wait for some resource to be available). Number of involuntary context switches, i.e. a higher priority process became runnable or the current process used up its time slice. Information about a named file or descriptor. Named file in filesystem. A named file may be a regular file like a shell script or executable, or it may be a device file like /dev/null. Only regular files may incur element content. Pre-opened descriptor. Created temporary file. Auto-created feedback FIFO. Kernel inode information. file protection in octal number with leading zero. One page of content from temporary files. This flag will be true, if there is more information than included in the data tag. result from the stat call on a named file or descriptor. Information about a specific job that ran as part of the invocation. Excerpt of the struct rusage returned for the application. Exit status of application, which may be a signal with core dump. Failure to start application. This is an error by gridstart. Controlled application exit, may indicate error. Application died on signal. This should not happen. Inode information about the application that was run. Name and arguments of the executable of application. DEPRECATED - redundant attribute will be dropped soon. Enumerates the argument vector for debugging. Single argument vector entry. record of an application run for a given TR and DV. Optional job to run independently first. Optional job to run before invoking the application. The application itself. Optional job to run after invoking the application. Optional job to run independently last. Record the current working directory at time of execution. Resource usage recorded of gridstart itself. Report architecture information. IA32, IA64, ILP32, LP64 Inode info on shared and outside apps and streams. Environment setting as seen by process. single environment key-value pair. Resource limits as imposed by the compute enviroment. Start of application according to host clock. Duration of application run in seconds with microsecond fraction, according to host clock. Host address of the primary interface, as far as discernable. process number of the gridlaunch application itself. real user id on the compute node that started grid launch. real group id of the user that started gridlaunch pegasus-wms_4.0.1+dfsg/share/pegasus/schema/pdax-2.0.xsd0000644000175000017500000000646711757531137022033 0ustar ryngerynge partition DAG in XML partition specification in terms of job making up the partition logical name of the job and logical id, as in the dax child node referring to a job in the partition parent node referring to a job in the partition child node referring to a partition parent node referring to a partition pegasus-wms_4.0.1+dfsg/share/pegasus/schema/iv-2.1.xsd0000644000175000017500000007665211757531137021521 0ustar ryngerynge dotted quad notation of a IPv4 host address. restricted version of the 'decimal' type. value for a resource usage Information from the rusage record. Total amount of user time used, in seconds with millisecond fraction. Total amount of system time used, in seconds with millisecond fraction. Number of soft page faults (i.e. those serviced by reclaiming a page from the list of pages awaiting reallocation. Number of hard page faults (i.e. those that required I/O). Number of times a process was swapped out of physical memory. Number of signals delivered. Number of voluntary context switches, i.e. because the process gave up the process before it had to (usually to wait for some resource to be available). Number of involuntary context switches, i.e. a higher priority process became runnable or the current process used up its time slice. Information about a named file or descriptor. Named file in filesystem. A named file may be a regular file like a shell script or executable, or it may be a device file like /dev/null. Only regular files may incur element content. Pre-opened descriptor. Created temporary file. Auto-created feedback FIFO. Kernel inode information. file protection in octal number with leading zero. One page of content from temporary files. This flag will be true, if there is more information than included in the data tag. result from the stat call on a named file or descriptor. Information about a specific job that ran as part of the invocation. Excerpt of the struct rusage returned for the application. Exit status of application, which may be a signal with core dump. Failure to start application. This is an error by gridstart. Controlled application exit, may indicate error. Application died on signal. This should not happen. Inode information about the application that was run. Name and arguments of the executable of application. DEPRECATED - redundant attribute will be dropped soon. Enumerates the argument vector for debugging. Single argument vector entry. Machine-specific info for Mac OS X. Main memory states. Swap disk states. Time stamp of last boot. Information about installed CPUs. Load average info. Process states count. Machine-specific info for Linux kernels 2.4 and 2.6. Main memory states. Swap disk states. Time stamp of last boot. Information about installed CPUs. Load average info. Process states count. Task/thread state count. Machine-specific info for Solaris 8 or higher. Main memory states. Swap disk states. Time stamp of last boot. Information about installed CPUs. Load average info. Process state count. Solaris LWP state count. Machine-unspecific when we cannot identify it. record of an application that was run. Optional job to run independently first. Optional job to run before invoking the application. The application itself. Optional job to run after invoking the application. Optional job to run independently last. Record the current working directory at time of execution. Resource usage recorded of gridstart itself. Report machine-specific characteristics like OS, release, CPUs, etc. snapshot time. Report basic kernel information. IA32, IA64, ILP32, LP64 Inode info on shared and outside apps and streams. Environment setting as seen by process. single environment key-value pair. Resource limits as imposed by the compute enviroment. Start of application according to host clock. Duration of application run in seconds with microsecond fraction, according to host clock. Host address of the primary interface, as far as discernable. process number of the gridlaunch application itself. real user id on the compute node that started grid launch. real group id of the user that started gridlaunch pegasus-wms_4.0.1+dfsg/share/pegasus/schema/dax-3.2.xsd0000644000175000017500000004113311757531137021643 0ustar ryngerynge $Id: dax-3.2.xsd 4495 2011-08-26 00:59:52Z rynge $ defines the tri-state transfer modes. defines the usage of a logical filename. narrowly defines a string subset safe in most file systems as file name (excluding path). defines an ASCII-restricted set of identifiers for graph nodes (jobs, daxes, dags). describes restriction on a version number. defines the valid architecture types. defines the valid OS types. defines the attribute set decribing system information. defines the legal namespaces of execution profiles. defines notifications. Notifications are currently NOT SUPPORTED. type of the ubiquitous profile element. defines just a filename. logical filename representation. Add "executable" attribute yourself. type for job-like uses elements that refers either to data or to executables. type for transformation uses elements that refers either to data or to executables. common elements in job, dag, and dax nodes. Arguments on the commandline, text interrupted by filenames Execution environment specific data to be passed to lower levels. stand-in for "file", linkage is "input" fixed. stand-in for "file", linkage is "output" fixed. stand-in for "file", linkage is "output" fixed. very simple notifications. data or executable catalog entry type. abstract DAG in XML. A dependency must not be used before all the jobs it references were declared. It is suggested to keep all jobs, all deps and all lfns together, a total of three sections. Optional list of all data files used. The new extension is a kind of included replica catalog. Optional list of all executables. The new extension is a kind of included transformation catalog. An aggregation of executables as its own item. A job component is one of a true job, an included DAG, or an included DAX. The recursive "adag" is an unsupported place-holder. Job specification in terms of a logical transformation. An already concrete workflow, stored mostly as file reference. A workflow to be planned, stored mostly as file reference. List of dependencies. parent node refering to a job. pegasus-wms_4.0.1+dfsg/share/pegasus/schema/stampede-schema.yang0000644000175000017500000006201511757531137023771 0ustar ryngeryngemodule stampede-schema { namespace "http://stampede-project.nsf.gov"; prefix "stmp"; // used when imported import ietf-inet-types { prefix "inet"; } organization "STAMPEDE project. NSF grant OCI-0943705"; contact "Dan Gunter Lawrence Berkeley National Laboratory Email: dkgunter@lbl.gov"; reference "https://confluence.pegasus.isi.edu/display/stampede/"; revision "2012-03-14" { description "Updated schema compatible with Pegasus 4.0 release"; } /* * Derived types * ------------- */ // (Grid) DN typedef distinguished-name { type string; } // UUID // eg: B7B27B5E-0221-4D90-AF5D-A325C2DD951C // Lowercase letters (or a mix) also accepted. typedef uuid { type string { length "36"; pattern "[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}"; } } // Integer representation of boolean value as '0' or '1' typedef intbool { type uint8 { range "0 .. 1"; } } // NetLogger timestamp typedef nl_ts { type string { pattern '(\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?' + '(Z|[\+\-]\d{2}:\d{2}))|(\d{1,9}(\.\d+)?)'; } } // Integer Pegasus job types typedef peg_inttype { type uint8 { range "0 .. 11"; } } // Valid Pegasus Job Types as String typedef peg_strtype{ type enumeration { enum "unknown"; enum "compute"; enum "stage-in-tx"; enum "stage-out-tx"; enum "registration"; enum "inter-site-tx"; enum "create-dir"; enum "staged-compute"; //since 3.1 no longer generated enum "cleanup"; enum "chmod"; enum "dax"; enum "dag"; } } //Valid Condor Job States i.e the various ULOG_ events typedef condor_jobstates{ type enumeration{ enum "PRE_SCRIPT_STARTED"; enum "PRE_SCRIPT_TERMINATED"; enum "PRE_SCRIPT_SUCCESS"; enum "PRE_SCRIPT_FAILED"; enum "SUBMIT"; enum "GRID_SUBMIT"; enum "GLOBUS_SUBMIT"; enum "SUBMIT_FAILED"; enum "EXECUTE"; enum "REMOTE_ERROR"; enum "IMAGE_SIZE"; enum "JOB_TERMINATED"; enum "JOB_SUCCESS"; enum "JOB_FAILURE"; enum "JOB_HELD"; enum "JOB_EVICTED"; enum "JOB_RELEASED"; enum "POST_SCRIPT_STARTED"; enum "POST_SCRIPT_TERMINATED"; enum "POST_SCRIPT_SUCCESS"; enum "POST_SCRIPT_FAILED"; } } //Valid Condor Workflow States typedef condor_wfstates{ type enumeration{ enum "WORKFLOW_STARTED"; enum "WORKFLOW_TERMINATED"; } } /* * Standard fields * --------------- */ grouping base-event { description "Common components in all events"; leaf ts { description "Timestamp, ISO8601 or numeric seconds since 1/1/1970"; mandatory true; type nl_ts; } /*leaf event { description "Unique name of event"; type string; mandatory true; }*/ leaf level { description "Severity level of event. " + "Subset of NetLogger BP levels. " + "For '*.end' events, if status is non-zero then level should be Error."; type enumeration { enum Info; enum Error; } } leaf xwf.id { description "DAG workflow id"; type uuid; } } grouping base-job-inst { uses base-event; description "Common components for all job instance events"; leaf job_inst.id { description "Job instance identifier i.e the submit sequence generated by monitord"; type int32; mandatory true; } leaf js.id { description "Jobstate identifier"; type int32; } leaf job.id { description "Identifier for corresponding job in the DAG"; type string; mandatory true; } } grouping sched-job-inst { uses base-job-inst; description "Scheduled job instance"; leaf sched.id { description "Identifier for job in scheduler"; type string; mandatory true; } } /* * Event stream containing NetLogger events. * * Naming conventions: * wf = Workflow in DAX (abstract) * xwf = Workflow in DAG * xsubwf = Sub-workflow in DAG * task = DAX node * job = DAG node * job_inst = Runnable (Condor) job * inv = Executable that creates an invocation record */ container events { list event { config false; choice an_event { container stampede.wf.plan { uses base-event; leaf submit.hostname { description "The hostname of the Pegasus submit host"; type inet:host; mandatory true; } leaf dax.label { description "Label for abstract workflow specification"; type string; default "workflow"; } leaf dax.index { description "Index for the DAX"; type string; default "workflow"; } leaf dax.version { description "Version number for DAX"; type string; mandatory true; } leaf dax.file { description "Filename for for the DAX"; type string; mandatory true; } leaf dag.file.name { description "Filename for the DAG"; type string; mandatory true; } leaf planner.version { description "Version string for Pegasus planner, e.g. 3.0.0cvs"; type string; mandatory true; } leaf grid_dn { description "Grid DN of submitter"; type distinguished-name; } leaf user { description "User name of submitter"; type string; } leaf submit.dir { description "Directory path from which workflow was submitted"; type string; mandatory true; } leaf argv { description "All arguments given to planner on command-line"; type string; } leaf parent.xwf.id { description "Parent workflow in DAG, if any"; type uuid; } leaf root.xwf.id { description "Root of workflow hierarchy, in DAG. " + "Use this workflow's UUID if it is the root"; type string; mandatory true; } } container stampede.static.start{ uses base-event; } container stampede.static.end{ uses base-event; } container stampede.xwf.start { uses base-event; leaf restart_count { description "Number of times workflow was restarted (due to failures)"; type uint32; mandatory true; } } container stampede.xwf.end { uses base-event; leaf restart_count { description "Number of times workflow was restarted (due to failures)"; type uint32; mandatory true; } leaf status { description "Status of workflow. 0=success, -1=failure"; type int16; mandatory true; } } container stampede.task.info { description "Information about task in DAX"; uses base-event; leaf transformation { description "Logical name of the underlying executable"; type string; mandatory true; } leaf argv { description "All arguments given to transformation on command-line"; type string; } leaf type{ description "Type of task"; type peg_inttype; mandatory true; } leaf type_desc { description "String description of task type"; type peg_strtype; mandatory true; } leaf task.id { description "Identifier for this task in the DAX"; type string; mandatory true; } } container stampede.task.edge { description "Represents child/parent relationship between two tasks in DAX"; uses base-event; leaf parent.task.id { description "Parent task"; type string; mandatory true; } leaf child.task.id { description "Child task"; type string; mandatory true; } } container stampede.wf.map.task_job { description "Relates a DAX task to a DAG job."; uses base-event; leaf task.id { description "Identifier for the task in the DAX"; type string; mandatory true; } leaf job.id { description "Identifier for corresponding job in the DAG"; type string; mandatory true; } } container stampede.xwf.map.subwf_job { description "Relates a sub workflow to the corresponding job instance"; uses base-event; leaf subwf.id { description "Sub Workflow Identified / UUID"; type string; mandatory true; } leaf job.id { description "Identifier for corresponding job in the DAG"; type string; mandatory true; } leaf job_inst.id { description "Job instance identifier i.e the submit sequence generated by monitord"; type int32; mandatory true; } } container stampede.job.info { description "A description of a job in the DAG"; uses base-event; leaf job.id { description "Identifier for job in the DAG"; type string; mandatory true; } leaf submit_file { description "Name of file being submitted to the scheduler"; type string; mandatory true; } leaf type{ description "Type of task"; type peg_inttype; mandatory true; } leaf type_desc { description "String description of task type"; type peg_strtype; mandatory true; } leaf clustered { description "Whether job is clustered or not"; type intbool; mandatory true; } leaf max_retries { description "How many retries are allowed for this job before giving up"; type uint32; mandatory true; } leaf task_count { description "Number of DAX tasks for this job. " + "Auxiliary jobs without a task in the DAX will have the value '0'"; type uint32; mandatory true; } leaf executable { description "Program to execute"; type string; mandatory true; } leaf argv { description "All arguments given to executable (on command-line)"; type string; } } container stampede.job.edge { description "Parent/child relationship between two jobs in the DAG"; uses base-event; leaf parent.job.id { description "Parent job"; type string; mandatory true; } leaf child.job.id { description "Child job"; type string; mandatory true; } } container stampede.job_inst.pre.start { description "Start of a prescript for a job instance"; uses base-job-inst; } container stampede.job_inst.pre.term { description "Job prescript is terminated (success or failure not yet known)"; uses base-job-inst; } container stampede.job_inst.pre.end { description "End of a prescript for a job instance"; uses base-job-inst; leaf status { description "Status of prescript. 0 is success, -1 is error"; type int32; mandatory true; } leaf exitcode{ description "the exitcode with which the prescript exited"; type int32; mandatory true; } } container stampede.job_inst.submit.start { uses sched-job-inst; description "When job instance is going to be submitted. " + "Scheduler job id is not yet known"; } container stampede.job_inst.submit.end { uses sched-job-inst; description "When executable job is submitted"; leaf status { description "Status of workflow. 0=success, -1=failure"; type int16; mandatory true; } } container stampede.job_inst.held.start { uses sched-job-inst; description "When Condor holds the jobs"; } container stampede.job_inst.held.end { uses sched-job-inst; description "When the job is released after being held"; leaf status { description "Status of workflow. 0=success, -1=failure"; type int16; mandatory true; } } container stampede.job_inst.main.start { uses sched-job-inst; description "Start of execution of a scheduler job"; leaf stdin.file { description "Path to file containing standard input of job"; type string; //mandatory true; } leaf stdout.file { description "Path to file containing standard output of job"; type string; mandatory true; } leaf stderr.file { description "Path to file containing standard error of job"; type string; mandatory true; } } container stampede.job_inst.main.term { uses sched-job-inst; description "Job is terminated (success or failure not yet known)"; leaf status { description "Execution status. 0=means job terminated, -1=job was evicted, not terminated"; type int32; mandatory true; } } container stampede.job_inst.main.end { uses sched-job-inst; description "End of main part of scheduler job"; leaf stdin.file { description "Path to file containing standard input of job"; type string; } leaf stdout.file { description "Path to file containing standard output of job"; type string; mandatory true; } leaf stdout.text { description "Text containing output of job"; type string; } leaf stderr.file { description "Path to file containing standard error of job"; type string; mandatory true; } leaf stderr.text { description "Text containing standard error of job"; type string; } leaf user { description "Scheduler's name for user"; type string; } leaf site { description "DAX name for the site at which the job ran"; type string; mandatory true; } leaf work_dir { description "Path to working directory"; type string; } leaf local.dur { description "Duration as seen at the local node"; units "seconds"; type decimal64 { fraction-digits "6"; } } leaf status { description "Execution status. 0=success, -1=failure"; type int32; mandatory true; } leaf exitcode{ description "the exitcode with which the executable exited"; type int32; mandatory true; } leaf multiplier_factor{ description "the multiplier factor for use in statistics"; type int32; mandatory true; } // These values will be set if the job is clustered. leaf cluster.start { description "When the enclosing cluster started"; type nl_ts; } leaf cluster.dur { description "Duration of enclosing cluster"; units "seconds"; type decimal64 { fraction-digits "6"; } } } container stampede.job_inst.post.start { description "Start of a postscript for a job instance"; uses sched-job-inst; } container stampede.job_inst.post.term { description "Job postscript is terminated (success or failure not yet known)"; uses sched-job-inst; } container stampede.job_inst.post.end { description "End of a postscript for a job instance"; uses sched-job-inst; leaf status { description "Status of postscript. 0 is success, -1=failure"; type int32; mandatory true; } leaf exitcode{ description "the exitcode with which the postscript exited"; type int32; mandatory true; } } container stampede.job_inst.host.info { description "Host information associated with a job instance"; uses base-job-inst; leaf site { description "Site name"; type string; mandatory true; } leaf hostname { description "Host name"; type inet:host; mandatory true; } leaf ip { description "IP address"; type inet:ip-address; mandatory true; } leaf total_memory { description "Total RAM on host"; type uint64; /* mandatory true; */ } leaf uname { description "Operating system name"; type string; /* mandatory true; */ } } container stampede.job_inst.image.info { description "Image size associated with a job instance"; uses base-job-inst; leaf size { description "Image size"; type uint64; } leaf sched.id { description "Identifier for job in scheduler"; type string; mandatory true; } } container stampede.inv.start { description "Start of an invocation"; uses base-event; leaf job_inst.id { description "Job instance identifier i.e the submit sequence generated by monitord"; type int32; mandatory true; } leaf job.id { description "Identifier for corresponding job in the DAG"; type string; mandatory true; } leaf inv.id { description "Identifier for invocation. " + "Sequence number, with -1=prescript and -2=postscript"; type int32; mandatory true; } } container stampede.inv.end { description "End of an invocation"; uses base-event; leaf job_inst.id { description "Job instance identifier i.e the submit sequence generated by monitord"; type int32; mandatory true; } leaf inv.id { description "Identifier for invocation. " + "Sequence number, with -1=prescript and -2=postscript"; type int32; mandatory true; } leaf job.id { description "Identifier for corresponding job in the DAG"; type string; mandatory true; } leaf start_time{ description "The start time of the event"; type nl_ts; } leaf dur { description "Duration of invocation"; units "seconds"; type decimal64 { fraction-digits "6"; } } leaf remote_cpu_time { description "remote CPU time computed as the stime + utime"; units "seconds"; type decimal64 { fraction-digits "6"; } } leaf exitcode{ description "the exitcode with which the executable exited"; type int32; } leaf transformation { description "Transformation associated with this invocation"; type string; mandatory true; } leaf executable { description "Program executed for this invocation"; type string; mandatory true; } leaf argv { description "All arguments given to executable on command-line"; type string; } leaf task.id { description "Identifier for related task in the DAX"; type string; } } } // choice } // event } // events } pegasus-wms_4.0.1+dfsg/share/pegasus/schema/sc-2.0.xsd0000644000175000017500000002443311757531137021475 0ustar ryngerynge defines the legal namespaces of execution profiles. A set of attributes to define a Globus version. root element aggregating all sites information there is. Describes a single site Administrative profile defaults associated with a site. Each site may report to multiple LRCs. Each site may have multiple gridftp servers. Stores the bandwidth informaion related to each gridftp server. The URL (actually, it may be more a URI, but hey, so what) is the access URL to the gridftp server. Each pool may have multiple gridftp servers, or run multiple versions of Globus on different ports. This element is the storage mount point prefix. Of course, this may get turned over into other things, augmented by user and system requirements etc. I believe that default works quite well for default Globus setups. Each site supports various (usually two) jobmanagers. The universe name is actually the primary key for the jobmanager identification. The contact string is the secondary key for any job manager. Any pool may have multiple versions of Globus installed, and these versions may have multiple jobmanagers listening on different ports. Each site may have one and only one work directory mount point. This is the path to grid launch. As attributes are optional, each pool can have at most one grid launch path. More does not make sense, unless there are different launchers for different remote schedulers, and a pool supports more than two (fork + one other) remote schedulers. pegasus-wms_4.0.1+dfsg/share/pegasus/schema/dax-2.1.xsd0000755000175000017500000002223111757531137021642 0ustar ryngerynge defines the tri-state transfer modes. defines the type of files data|executable|pattern defines the usage of a logical filename. defines the legal namespaces of execution profiles. defines just a filename. logical filename representation. Derivation of Plain filename, with added attributes for variable name recording. abstract DAG in XML A dependency must not be used before all the jobs it references were declared. It is suggested to keep all jobs, all deps and all lfns together, a total of three sections. List of all filenames used. Job specification in terms of a logical transformation. Arguments on the commandline, text interrupted by filenames Execution environment specific data to be passed to lower levels. stand-in for "filename", linkage is "input" fixed. stand-in for "filename", linkage is "output" fixed. stand-in for "filename", linkage is "output" fixed. Level from search in router. List of dependencies. parent node refering to a job. pegasus-wms_4.0.1+dfsg/share/pegasus/schema/dax-2.0.xsd0000644000175000017500000002617711757531137021653 0ustar ryngerynge defines the tri-state transfer modes. defines the usage of a logical filename. defines the legal namespaces of execution profiles. defines just a filename. logical filename representation. Derivation of Plain filename, with added attributes for variable name recording. abstract DAG in XML A dependency must not be used before all the jobs it references were declared. It is suggested to keep all jobs, all deps and all lfns together, a total of three sections. List of all filenames used. Job specification in terms of a logical transformation. Arguments on the commandline, text interrupted by filenames Execution environment specific data to be passed to lower levels. stand-in for "filename", linkage is "input" fixed. stand-in for "filename", linkage is "output" fixed. stand-in for "filename", linkage is "output" fixed. Level from search in router. List of dependencies. parent node refering to a job. pegasus-wms_4.0.1+dfsg/share/pegasus/sql/0000755000175000017500000000000011757531667017414 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/share/pegasus/sql/create-my-init.sql0000644000175000017500000000064311757531137022757 0ustar ryngerynge-- -- schema: all -- driver: MySQL 4.0.* -- $Revision: 1653 $ -- CREATE TABLE sequences ( name VARCHAR(32) NOT NULL, currval BIGINT DEFAULT 0, CONSTRAINT pk_sequences PRIMARY KEY(name) ) type=InnoDB; CREATE TABLE pegasus_schema ( name VARCHAR(64) NOT NULL, catalog VARCHAR(16), version FLOAT, creator VARCHAR(32), creation DATETIME, CONSTRAINT pk_pegasus_schema PRIMARY KEY(name) ) type=InnoDB; pegasus-wms_4.0.1+dfsg/share/pegasus/sql/create-my-rc.sql0000644000175000017500000000137611757531137022424 0ustar ryngerynge-- -- schema: org.griphyn.common.catalog.ReplicaCatalog -- driver: MySQL 4.* -- $Revision: 150 $ -- INSERT INTO pegasus_schema VALUES ('JDBCRC','rc','1.2',current_user(),current_timestamp(0)); CREATE TABLE rc_lfn ( id BIGINT DEFAULT NULL auto_increment, lfn VARCHAR(245) NOT NULL, pfn VARCHAR(245) NOT NULL, CONSTRAINT pk_rc_lfn PRIMARY KEY(id), CONSTRAINT sk_rc_lfn UNIQUE(lfn,pfn) ) type=InnoDB; CREATE INDEX ix_rc_lfn ON rc_lfn(lfn); CREATE TABLE rc_attr ( id BIGINT, name VARCHAR(64) NOT NULL, value VARCHAR(255) NOT NULL, CONSTRAINT pk_rc_attr PRIMARY KEY(id,name), CONSTRAINT fk_rc_attr FOREIGN KEY(id) REFERENCES rc_lfn(id) ON DELETE CASCADE ) type=InnoDB; CREATE INDEX ix_rc_attr ON rc_attr(name); pegasus-wms_4.0.1+dfsg/share/pegasus/sql/delete-pg-rc.sql0000644000175000017500000000041211757531137022372 0ustar ryngerynge-- -- schema: org.griphyn.common.catalog.ReplicaCatalog -- driver: PostGreSQL 7.4.* -- $Revision: 150 $ -- DROP INDEX ix_rc_attr; DROP TABLE rc_attr; DROP INDEX ix_rc_lfn; DROP TABLE rc_lfn; DROP SEQUENCE rc_lfn_id; DELETE FROM pegasus_schema WHERE name='JDBCRC'; pegasus-wms_4.0.1+dfsg/share/pegasus/sql/create-my.sql0000644000175000017500000000020511757531137022010 0ustar ryngerynge--- --- schema: all --- driver: MySQL 4.0.* --- $Revision: 2440 $ --- \. create-my-init.sql \. create-my-tc.sql \. create-my-rc.sql pegasus-wms_4.0.1+dfsg/share/pegasus/sql/delete-my-rc.sql0000644000175000017500000000037711757531137022423 0ustar ryngerynge-- -- schema: org.griphyn.common.catalog.ReplicaCatalog -- driver: MySQL 4.* -- $Revision: 150 $ -- DROP INDEX ix_rc_attr ON rc_attr; DROP INDEX ix_rc_lfn ON rc_lfn; DROP TABLE rc_attr; DROP TABLE rc_lfn; DELETE FROM pegasus_schema WHERE name='JDBCRC'; pegasus-wms_4.0.1+dfsg/share/pegasus/sql/README0000644000175000017500000000244111757531137020265 0ustar ryngeryngeThe annotational VDC is the new default VDC. If you have a previous VDC before version 1.3.6, please use the -old.sql files for it. The "real" Documentation ------------------------ Please refer to the Database Admin Guide for the in-depth documentation. Filenames --------- The schema-related filenames follow a pattern: --.sql where is either "create" to instantiate a schema or "delete" to drop the schema and all data. The "update" is used to migrate an existing database from an old schema to a new schema. is the database backend for which this flavor of DDL instruction is being issued: my MySQL 4.0.* pg PostGreSQL 7.4.* (we do NOT recommend Pg 8.0.*) sl SQLite 2.* (not supported) is a short for a database schema description. Currently we support: init schema metadata upon which the rest depends tc org.griphyn.common.catalog.transformation.Database rc JDBC generic (uses autoinc) implementation The schemas marked with a "+" are mutually exclusive. The special files: -.sql creates, deletes, or updates a recommended set of schemas (init,tc,rc) for Annotations. Alternatively, the special files: --old.sql creates, deletes, or updates a recommended set of schemas (init,tc,rc) for chunks. pegasus-wms_4.0.1+dfsg/share/pegasus/sql/delete-my.sql0000644000175000017500000000020511757531137022007 0ustar ryngerynge--- --- schema: all --- driver: MySQL 4.0.* --- $Revision: 2440 $ --- \. delete-my-rc.sql \. delete-my-tc.sql \. delete-my-init.sql pegasus-wms_4.0.1+dfsg/share/pegasus/sql/delete-my-tc.sql0000644000175000017500000000066311757531137022423 0ustar ryngerynge--DROP TABLE IF EXISTS tc_sysinfo; --DROP TABLE IF EXISTS tc_lfnpfnmap; --DROP TABLE IF EXISTS tc_lfnprofile; --DROP TABLE IF EXISTS tc_logicaltx; --DROP TABLE IF EXISTS tc_pfnprofile; --DROP TABLE IF EXISTS tc_physicaltx; DROP TABLE IF EXISTS tc_lfnpfnmap; DROP TABLE IF EXISTS tc_lfnprofile; DROP TABLE IF EXISTS tc_pfnprofile; DROP TABLE IF EXISTS tc_physicaltx; DROP TABLE IF EXISTS tc_logicaltx; DROP TABLE IF EXISTS tc_sysinfo; pegasus-wms_4.0.1+dfsg/share/pegasus/sql/create-pg-tc.sql0000644000175000017500000000645211757531137022407 0ustar ryngerynge-- -- schema : org.griphyn.common.catalog.TransformationCatalog -- driver : PostGreSQL -- $Revision: 150 $ -- Author : Gaurang Mehta gmehta@isi.edu -- -- Table structure for table `tc_sysinfo` -- INSERT INTO pegasus_schema(name,catalog,version) VALUES ('Database','tc','1.3'); CREATE TABLE tc_sysinfo ( id BIGSERIAL PRIMARY KEY, architecture VARCHAR(10) NOT NULL, os VARCHAR(10) NOT NULL, glibc VARCHAR(10) DEFAULT NULL, osversion VARCHAR(10) DEFAULT NULL, UNIQUE(architecture,os,glibc,osversion) ); --COMMENT='Stores the different types of architecture'; -- -- Table structure for table `tc_physicaltx` -- CREATE TABLE tc_physicaltx ( id BIGSERIAL PRIMARY KEY, resourceid VARCHAR(255) NOT NULL, pfn VARCHAR(255) DEFAULT NULL, type VARCHAR(20) NOT NULL DEFAULT 'INSTALLED', --enum('INSTALLED','STATIC_BINARY','DYNAMIC_BINARY','SOURCE','PACMAN_PACKAGE','SCRIPT') NOT NULL DEFAULT 'INSTALLED', archid BIGINT DEFAULT NULL, UNIQUE(resourceid,pfn,type), FOREIGN KEY(archid) REFERENCES tc_sysinfo(id) ON DELETE CASCADE ); --COMMENT='Stores info about the physical transformation'; CREATE INDEX idx_tc_physicaltx ON tc_physicaltx(resourceid); CREATE INDEX idx_tc_physicaltx2 ON tc_physicaltx(type); CREATE INDEX idx_tc_physicaltx3 ON tc_physicaltx(pfn); CREATE INDEX idx_tc_physicaltx4 ON tc_physicaltx(archid); -- -- Table structure for table `tc_logicaltx` -- CREATE TABLE tc_logicaltx ( id BIGSERIAL PRIMARY KEY, namespace VARCHAR(255) NOT NULL, name VARCHAR(255) NOT NULL, version VARCHAR(20) NOT NULL, UNIQUE(namespace,name,version) ); --COMMENT='Stores the infor about the logical transformation'; CREATE INDEX idx_tc_logicaltx ON tc_logicaltx(name); -- -- Table structure for table `tc_lfnprofile` -- CREATE TABLE tc_lfnprofile ( namespace VARCHAR(20) NOT NULL DEFAULT 'env', --enum('globus','condor','env','hints','dagman','vds') NOT NULL DEFAULT 'env', name VARCHAR(64) NOT NULL, value TEXT NOT NULL, lfnid BIGINT NOT NULL, PRIMARY KEY(namespace,name,value,lfnid), FOREIGN KEY(lfnid) REFERENCES tc_logicaltx(id) ON DELETE CASCADE ); --COMMENT='Stores the profile information for lfns'; CREATE INDEX idx_tc_lfnprofile ON tc_lfnprofile(lfnid); CREATE INDEX idx_tc_lfnprofile2 ON tc_lfnprofile(namespace); CREATE INDEX idx_tc_lfnprofile3 ON tc_lfnprofile(name); -- -- Table structure for table `tc_lfnpfnmap` -- CREATE TABLE tc_lfnpfnmap ( lfnid BIGINT NOT NULL, pfnid BIGINT NOT NULL, PRIMARY KEY(lfnid,pfnid), FOREIGN KEY(lfnid) REFERENCES tc_logicaltx(id) ON DELETE CASCADE, FOREIGN KEY(pfnid) REFERENCES tc_physicaltx(id) ON DELETE CASCADE ); --COMMENT='Maps Lfns to Pfns'; CREATE INDEX idx_tc_lfnpfnmap_l ON tc_lfnpfnmap(lfnid); CREATE INDEX idx_tc_lfnpfnmap_p ON tc_lfnpfnmap(pfnid); -- -- Table structure for table `tc_pfnprofile` -- CREATE TABLE tc_pfnprofile ( namespace VARCHAR(20) NOT NULL DEFAULT 'env', --enum('globus','condor','env','hints','dagman','vds') NOT NULL DEFAULT 'env', name VARCHAR(64) NOT NULL, value TEXT NOT NULL, pfnid BIGINT NOT NULL, PRIMARY KEY(namespace,name,value,pfnid), FOREIGN KEY(pfnid) REFERENCES tc_physicaltx(id) ON DELETE CASCADE ); --COMMENT='Stores the profile information for pfns'; CREATE INDEX idx_tc_pfnprofile ON tc_pfnprofile(pfnid); CREATE INDEX idx_tc_pfnprofile2 ON tc_pfnprofile(namespace); CREATE INDEX idx_tc_pfnprofile3 ON tc_pfnprofile(name); pegasus-wms_4.0.1+dfsg/share/pegasus/sql/create-pg-rc.sql0000644000175000017500000000140411757531137022375 0ustar ryngerynge-- -- schema: org.griphyn.common.catalog.ReplicaCatalog -- driver: PostGreSQL 7.4.* -- $Revision: 150 $ -- INSERT INTO pegasus_schema(name,catalog,version) VALUES ('JDBCRC','rc','1.2'); CREATE SEQUENCE rc_lfn_id; CREATE TABLE rc_lfn ( id BIGINT DEFAULT nextval('rc_lfn_id'::text), lfn VARCHAR(255) NOT NULL, pfn VARCHAR(255) NOT NULL, CONSTRAINT pk_rc_lfn PRIMARY KEY(id), CONSTRAINT sk_rc_lfn UNIQUE(lfn,pfn) ); CREATE INDEX ix_rc_lfn ON rc_lfn(lfn); CREATE TABLE rc_attr ( id BIGINT, name VARCHAR(64) NOT NULL, value VARCHAR(255) NOT NULL, CONSTRAINT pk_rc_attr PRIMARY KEY(id,name), CONSTRAINT fk_rc_attr FOREIGN KEY(id) REFERENCES rc_lfn(id) ON DELETE CASCADE ); CREATE INDEX ix_rc_attr ON rc_attr(name); pegasus-wms_4.0.1+dfsg/share/pegasus/sql/delete-pg.sql0000644000175000017500000000021311757531137021767 0ustar ryngerynge--- --- schema: all --- driver: PostGreSQL 7.4.* --- $Revision: 2440 $ --- \i delete-pg-rc.sql \i delete-pg-tc.sql \i delete-pg-init.sql pegasus-wms_4.0.1+dfsg/share/pegasus/sql/create-my-tc.sql0000644000175000017500000000704011757531137022420 0ustar ryngerynge-- MySQL dump 9.10 -- -- Host: localhost Database: txcatalog -- ------------------------------------------------------ -- Server version 4.0.18-standard -- -- DROP ALL THE TABLES IF THEY EXIST -- DROP TABLE IF EXISTS tc_lfnpfnmap; DROP TABLE IF EXISTS tc_lfnprofile; DROP TABLE IF EXISTS tc_pfnprofile; DROP TABLE IF EXISTS tc_physicaltx; DROP TABLE IF EXISTS tc_logicaltx; DROP TABLE IF EXISTS tc_sysinfo; INSERT INTO pegasus_schema VALUES ('Database','tc','1.3',current_user(),current_timestamp(0)); -- -- Table structure for table `tc_sysinfo` -- INSERT INTO sequences VALUES('tc_sysinfo_id_seq',0); CREATE TABLE tc_sysinfo ( id BIGINT NOT NULL auto_increment, architecture VARCHAR(10) NOT NULL default '', os VARCHAR(10) NOT NULL default '', glibc VARCHAR(10) DEFAULT NULL, osversion VARCHAR(10) DEFAULT NULL, PRIMARY KEY(id), UNIQUE KEY unique_arch(architecture,os,glibc,osversion) ) TYPE=InnoDB COMMENT='Stores the different types of architecture'; -- -- Table structure for table `tc_physicaltx` -- INSERT INTO sequences VALUES('tc_physicaltx_id_seq',0); CREATE TABLE tc_physicaltx ( id BIGINT NOT NULL AUTO_INCREMENT, resourceid VARCHAR(255) NOT NULL DEFAULT '', pfn VARCHAR(255) DEFAULT NULL, type VARCHAR(20) NOT NULL DEFAULT 'INSTALLED', -- type enum('INSTALLED','STATIC_BINARY','DYNAMIC_BINARY','SOURCE','PACMAN_PACKAGE','SCRIPT') NOT NULL DEFAULT 'INSTALLED', archid BIGINT DEFAULT NULL, PRIMARY KEY(id), UNIQUE KEY unique_physicaltx(resourceid,pfn,type), INDEX (archid), FOREIGN KEY tc_physicaltx(archid) REFERENCES tc_sysinfo(id) ON DELETE CASCADE ) TYPE=InnoDB COMMENT='Stores info about the physical transformation'; -- -- Table structure for table `tc_logicaltx` -- INSERT INTO sequences VALUES('tc_logicaltx_id_seq',0); CREATE TABLE tc_logicaltx ( id BIGINT NOT NULL AUTO_INCREMENT, namespace VARCHAR(255) NOT NULL DEFAULT '', name VARCHAR(255) NOT NULL DEFAULT '', version VARCHAR(20) NOT NULL DEFAULT '', PRIMARY KEY(id), UNIQUE KEY unique_logicaltx(namespace,name,version) ) TYPE=InnoDB COMMENT='Stores the infor about the logical transformation'; -- -- Table structure for table `tc_lfnprofile` -- CREATE TABLE tc_lfnprofile ( -- namespace enum('globus','condor','env','hints','dagman','vds') NOT NULL DEFAULT 'env', namespace VARCHAR(20) NOT NULL DEFAULT 'env', name VARCHAR(64) NOT NULL DEFAULT '', value TEXT NOT NULL DEFAULT '', lfnid BIGINT NOT NULL DEFAULT '0', PRIMARY KEY(namespace,name,value(255),lfnid), INDEX (lfnid), FOREIGN KEY tc_lfnprofile(lfnid) REFERENCES tc_logicaltx(id) ON DELETE CASCADE ) TYPE=InnoDB COMMENT='Stores the profile information for lfns'; -- -- Table structure for table `tc_lfnpfnmap` -- CREATE TABLE tc_lfnpfnmap ( lfnid BIGINT NOT NULL DEFAULT '0', pfnid BIGINT NOT NULL DEFAULT '0', PRIMARY KEY(lfnid,pfnid), INDEX (lfnid), INDEX (pfnid), FOREIGN KEY tc_lfnpfnmap(lfnid) REFERENCES tc_logicaltx(id) ON DELETE CASCADE, FOREIGN KEY tc_lfnpfnmap(pfnid) REFERENCES tc_physicaltx(id) ON DELETE CASCADE ) TYPE=InnoDB COMMENT='Maps Lfns to Pfns'; -- -- Table structure for table `tc_pfnprofile` -- CREATE TABLE tc_pfnprofile ( -- namespace enum('globus','condor','env','hints','dagman','vds') NOT NULL DEFAULT 'env', namespace VARCHAR(20) NOT NULL DEFAULT 'env', name VARCHAR(64) NOT NULL DEFAULT '', value TEXT NOT NULL DEFAULT '', pfnid BIGINT NOT NULL default '0', PRIMARY KEY(namespace,name,value(255),pfnid), INDEX (pfnid), FOREIGN KEY tc_pfnprofile(pfnid) REFERENCES tc_physicaltx(id) ON DELETE CASCADE ) TYPE=InnoDB COMMENT='Stores the profile information for pfns'; pegasus-wms_4.0.1+dfsg/share/pegasus/sql/delete-pg-init.sql0000644000175000017500000000014711757531137022736 0ustar ryngerynge--- --- schema: all --- driver: PostGreSQL 7.4.* --- $Revision: 150 $ --- DROP TABLE pegasus_schema; pegasus-wms_4.0.1+dfsg/share/pegasus/sql/create-pg-init.sql0000644000175000017500000000051611757531137022737 0ustar ryngerynge--- --- schema: all --- driver: PostGreSQL 7.4.* --- $Revision: 150 $ --- CREATE TABLE pegasus_schema ( name VARCHAR(64) NOT NULL, catalog VARCHAR(16), version FLOAT, creator VARCHAR(8) DEFAULT current_user, creation TIMESTAMP WITH TIME ZONE DEFAULT current_timestamp(0), CONSTRAINT pk_pegasus_schema PRIMARY KEY(name) ); pegasus-wms_4.0.1+dfsg/share/pegasus/sql/delete-my-init.sql0000644000175000017500000000017511757531137022756 0ustar ryngerynge--- --- schema: all --- driver: MySQL 4.* --- $Revision: 150 $ --- DROP TABLE pegasus_schema CASCADE; DROP TABLE sequences; pegasus-wms_4.0.1+dfsg/share/pegasus/sql/delete-pg-tc.sql0000644000175000017500000000027011757531137022376 0ustar ryngerynge--DELETES the TC tables DROP TABLE tc_lfnpfnmap; DROP TABLE tc_lfnprofile; DROP TABLE tc_logicaltx; DROP TABLE tc_pfnprofile; DROP TABLE tc_physicaltx; DROP TABLE tc_sysinfo CASCADE; pegasus-wms_4.0.1+dfsg/share/pegasus/sql/schema_tool.py0000755000175000017500000000520711757531137022262 0ustar ryngerynge#!/usr/bin/env python """ A simple command-line tool to query and set schema version information in a stampede database. Will also upgrade an existing database to a newer schema version. Will need to be run as a user that has CREATE | ALTER TABLE permissions in the database. Requires a standard SQLALchemy connection string. """ __rcsid__ = "$Id: schema_tool.py 29514 2012-01-23 18:22:03Z mgoode $" __author__ = "Monte Goode" import os import sys import logging import subprocess # use pegasus-config to get basic pegasus settings bin_dir = os.path.join(os.path.normpath(os.path.join(os.path.dirname(sys.argv[0]))), "../../../bin") pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --noeoln --python" lib_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] pegasus_config = os.path.join(bin_dir, "pegasus-config") + " --noeoln --python-externals" lib_ext_dir = subprocess.Popen(pegasus_config, stdout=subprocess.PIPE, shell=True).communicate()[0] # Insert this directory in our search path os.sys.path.insert(0, lib_ext_dir) os.sys.path.insert(0, lib_dir) from netlogger.analysis.schema.schema_check import ConnHandle, SchemaCheck from netlogger.nllog import OptionParser, get_logger, get_root_logger def main(): usage = "%prog {-c | -u} connString='required' mysql_engine='optional'" desc = ' '.join(__doc__.split()) parser = OptionParser(usage=usage, description=desc) parser.add_option('-c', '--check', dest='schema_check', action='store_true', default=False, help="Perform a schema check") parser.add_option('-u', '--upgrade', dest='upgrade', action='store_true', default=False, help="Upgrade database to current version.") options, args = parser.parse_args(sys.argv[1:]) log = get_logger(__file__) if len(args) == 0: parser.print_help() parser.error("Option flag and connection string required.") if log.getEffectiveLevel() >= logging.DEBUG: get_root_logger().setLevel(logging.INFO) num_modes = (0,1)[bool(options.schema_check)] + (0,1)[bool(options.upgrade)] if num_modes > 1: parser.error('Choose only one option flag') init = {} for a in args: k,v = a.split('=') if k in ['connString', 'mysql_engine']: init[k] = v conn = ConnHandle(**init) s_check = SchemaCheck(conn.get_session()) if options.schema_check: log.info('Executing schema check') s_check.check_schema() elif options.upgrade: log.info('Performing upgrade') s_check.upgrade() pass if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/share/pegasus/sql/create-pg.sql0000644000175000017500000000021211757531137021767 0ustar ryngerynge--- --- schema: all --- driver: PostGreSQL 7.4.* --- $Revision: 2440 $ --- \i create-pg-init.sql \i create-pg-tc.sql \i create-pg-rc.sql pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/0000755000175000017500000000000011757531667020630 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/share/pegasus/visualize/summary.pl0000755000175000017500000003673311757531137022671 0ustar ryngerynge#!/usr/bin/env perl # # summarizes a given DAGMan Condor log file # # $Id: summary.pl 2393 2010-09-16 20:33:19Z gmehta $ # require 5.005; use strict; use Fcntl; use DB_File; use POSIX qw(); use Socket; use File::Spec; use File::Basename; use Time::Local; use Getopt::Long; my $year = (localtime())[5]; my ($tag,$job,$mon,$day,$h,$m,$s,@lines); my ($help,$kickstart,%submit,%output); my $result = GetOptions('help' => \$help, 'kickstart' => \$kickstart ); if ( $help || ( @ARGV == 0 && -t STDIN ) ) { print "Usage: $0 [--kickstart] condor.log [..]\n"; print " --kickstart enable kickstart record parsing for true runtime.\n"; exit 1; } BEGIN { $main::dnsfn = File::Spec->catfile( $ENV{HOME}, '.dns' ); $main::dbref = tie( %main::dns, 'DB_File', $main::dnsfn ) or die "dbfile error on $main::dnsfn: $!\n"; } END { untie %main::dns; undef $main::dbref; } $main::minstamp = 1E300; $main::maxstamp = -$main::minstamp; foreach my $log ( @ARGV ) { local $/="...\n"; if ( open( LOG, "<$log" ) ) { while ( ) { if ( m{^((...)\s+\((\d+)\.\d+\.\d+\)\s+(..)/(..) (..):(..):(..)\s+)} ) { ($tag,$job,$mon,$day,$h,$m,$s) = ($2,$3,$4,$5,$6,$7,$8); substr( $_, 0, length($1), '' ); @lines = grep { length($_) > 0 } split /\n/, $_; process( $ARGV, $tag, $job, timelocal($s,$m,$h,$day,$mon-1,$year), @lines ); } else { warn "$ARGV:$.: illegal log line\n"; } } close LOG; } else { warn "open $log: $!\n"; } my ($job,$sfn); my $dag = substr( $log, 0, rindex($log,'.') ) . '.dag'; $/="\n"; if ( open( DAG, "<$dag" ) ) { while ( ) { # map job id to submit file (to find ks) if ( /^\s*job\s+(\S+)\s+(\S+)/i ) { ($job,$sfn) = ($1,$2); $submit{$job} = $sfn; if ( open( SUB, "<$sfn" ) ) { while ( ) { $output{$job} = $1 if ( /^\s*output\s*=\s*(\S+)/ ); } close SUB; } else { warn "open $sfn: $!\n"; } } } close DAG; } else { warn "open $dag: $!\n"; } } #$/=""; # # FIXME: Collapse jobs into DAG nodes (submit files) # This is for the special occasion that jobs are manually killed (condor_rm), # and thus create a new Condor job to rerun the same DAG node. # sub hsort { # symbolically sort by domainname my $aa = join( '.', reverse split /\./, $a ); my $bb = join( '.', reverse split /\./, $b ); $aa cmp $bb; } my (%replan,%finish,%nack); foreach my $node ( keys %main::node ) { my $n = 0 + @{$main::node{$node}}; # jobs that were replanned my ($job,$host); for ( my $i=0; $i < $n-1; ++$i ) { $job = $main::node{$node}[$i]; $host = $main::db{$job}{'017'}[1]; $replan{$host}++; $nack{$job}=1; # remember replanned job id } # jobs that executed $job = $main::node{$node}[$n-1]; $host = $main::db{$job}{'017'}[1]; $finish{$host}++; } # # failure rate # printf( "%-32s %6s %6s %6s %6s %8s %6s\n", 'JOBMANAGER', 'SUBMIT', 'BEGUN', 'SUCCSS', 'FAILED', 'REPLAN', 'FINISH' ); my (%good,@s,$max,$count,@median,$replan,$finish); foreach my $host ( sort hsort keys %main::host ) { next if length($host) < 1; my @x = @{$main::host{$host}}; my $d = $x[2]-$x[1]; printf( "%-32s %6u %6u %6u %6u %8u %6u\n", $host, $x[2], $x[0], $x[1], $d, $replan{$host}, $finish{$host} ); $replan += $replan{$host}; $finish += $finish{$host}; $s[0] += $x[0]; $s[1] += $x[2]; $s[2] += $x[1]; $good{$host} = $x[1] if $x[1] > 0; $max = $x[1] if $x[1] > $max; $count++; push( @median, $x[1] ); } printf( "%-32s %6s %6s %6s %6s %8s %6s\n", '', '-' x 6, '-' x 6, '-' x 6, '-' x 6, '-' x 6, '-' x 6 ); printf( "%-32s %6u %6u %6u %6u %8u %6u\n", 'TOTAL JOBS', @s[1,0,2], $s[1]-$s[2], $replan, $finish ); print "\n"; # # Marco's detail # sub adjust ($) { my $x = shift; if ( $x >= 0 ) { sprintf( "%6u", $x ); } else { '-'; } } printf( "%-16s %6s %-15s %6s %6s %6s %6s", 'NODENAME', 'JOBID', 'STARTTIME', 'TOTAL', 'SUBMIT', 'PEND', 'RUN' ); printf( " %8s", 'KS' ) if $kickstart; printf( " %s\n", 'GATEKEEPER' ); @s = (); my ($ks,%ks,$ksc); foreach my $node ( sort { $main::node{$a}->[0] <=> $main::node{$b}->[0] } keys %main::node ) { my $flag = undef; foreach my $job ( @{$main::node{$node}} ) { next unless $job; my $host = $main::db{$job}{'017'}[1]; my @ts = ( $main::db{$job}{'000'}[0], $main::db{$job}{'017'}[0], $main::db{$job}{'001'}[0], $main::db{$job}{'005'}[0] ); my @d = ( $ts[3] - $ts[0], $ts[1] - $ts[0], $ts[2] - $ts[1], $ts[3] - $ts[2] ); printf( "%-16s %6s %15s %6s %6s %6s %6s", ($flag ? '' : substr($node,-16)), $job, mkiso($ts[0]), adjust($d[0]), adjust($d[1]), adjust($d[2]), adjust($d[3]) ); $flag=1; unless ( $nack{$job} ) { for ( my $i=0; $i<@d; ++$i ) { $s[$i] += $d[$i]; } # read kickstart records if ( $kickstart ) { my $t1 = dirname($node); # remove node name my $t2 = dirname($t1); # my $fn = ( defined $t2 ? "$t2/" : '' ) . # basename($node) . '.out'; my $fn = ( defined $t2 && $t2 ne '/' ? "$t2/" : '' ) . $output{basename($node)}; if ( -s $fn && open( KS, "<$fn" ) ) { while ( ) { next if ( index($_,'invocation ') == -1 ); if ( /duration="([0-9.]+)"/ ) { my $x = $1 + 0.0; $ks{$job} = $x; $ks += $x; printf( " %8.1f", $x ); $ksc++; } last; } close KS; } else { warn "open $fn: $!\n"; } } } else { printf( " %8s", '' ) if $kickstart; } # last column print " $host\n"; } } printf( "%-16s %6s %-15s %6s %6s %6s %6s", '', '', '', '-----', '-----', '------', '------' ); printf( " %8s", '---------' ) if $kickstart; print "\n"; printf( "%-16s %6s %-15s %6u %6u %6u %6u", '', '', 'SUCCESSFUL JOBS', @s ); printf( " %8.1f", $ks ) if $kickstart; print "\n"; my $diff = $main::maxstamp - $main::minstamp; printf( "%-16s %6u %-15s %6.2f %6.2f %6.2f %6.2f", 'Time in DAGMans', $diff, 'speedups', map { $_ / $diff } @s ); printf( " %8.2f", $ks / $diff ) if $kickstart; print "\n"; print "\n"; ## ## job to node mapping ## #printf "%-16s %6s %s\n", 'NODE', 'JOBID', 'SITE'; #foreach my $node ( keys %main::node ) { # # only over-defined nodes # if ( @{$main::node{$node}} > 1 ) { # my $flag = undef; # foreach my $job ( @{$main::node{$node}} ) { # printf( "%-16s %6u %s\n", ( $flag ? '' : $node), $job, # $main::db{$job}{'017'}[1] ); # $flag=1; # } # } #} #print "\n"; # # involvement of successful jobs # printf "%-32s %6s %s\n", 'JOBMANAGER', 'OK', 'INVOLVEMENT'; foreach my $host ( sort { $good{$a} <=> $good{$b} } keys %good ) { printf( "%-32s %6u %s\n", $host, $good{$host}, produce( '#', $good{$host}, 39, $max, median(@median) ) ); } print "\n"; print "# from here, look only at jobs that were not replanned:\n\n"; # # site delays # @median = (); my (%a,%b,%c,%d,%e,%r,%r1,%median,$min,$med,$avg); foreach my $job ( sort keys %main::job ) { # only rates successful jobs that were not replanned next if $nack{$job}; # rate my $host = myhost($main::db{$job}{'017'}[1]); my @ts = ( $main::db{$job}{'000'}[0], $main::db{$job}{'017'}[0], $main::db{$job}{'001'}[0], $main::db{$job}{'005'}[0] ); push( @{$a{$host}}, $ts[1] - $ts[0] ); push( @{$b{$host}}, $ts[2] - $ts[1] ); push( @{$c{$host}}, $ts[3] - $ts[2] ); push( @{$d{$host}}, $ts[3] - $ts[0] ); push( @{$e{$host}}, $ks{$job} ) if $kickstart; } foreach my $host ( keys %d ) { my $a = median( @{$a{$host}} ); my $b = median( @{$b{$host}} ); my $c = median( @{$c{$host}} ); my $d = median( @{$d{$host}} ); push( @{$median[0]}, $a ); push( @{$median[1]}, $b ); push( @{$median[2]}, $c ); push( @{$median[3]}, $d ); if ( $kickstart ) { my $e = median( @{$e{$host}} ); $median{$host} = [ $a, $b, $c, $d, $e ]; push( @{$median[4]}, $e ); $r1{$host} = [ $e, $d-$c, $d+$e-$c, $e / ($d-$c) ]; } else { # no kickstart $median{$host} = [ $a, $b, $c, $d ]; } $r{$host} = [ $b, $c, $b+$c, $c / $b ]; } # # other sections # ($min,$max,$med,$avg) = minmax( @{$median[0]} ); printf "%-32s %6s %s\n", 'JOBMANAGER', '[s]', 'TIME SPENT IN SUBMITTED'; foreach my $host ( sort { $median{$a}->[0] <=> $median{$b}->[0] } keys %median ) { printf( "%-32s %6u %s\n", $host, $median{$host}->[0], produce( '#', $median{$host}->[0], 39, $max, $med ) ); } print "\n"; ($min,$max,$med,$avg) = minmax( @{$median[1]} ); printf "%-32s %6s %s\n", 'JOBMANAGER', '[s]', 'TIME SPENT IN PENDING'; foreach my $host ( sort { $median{$a}->[1] <=> $median{$b}->[1] } keys %median ) { printf( "%-32s %6u %s\n", $host, $median{$host}->[1], produce( '#', $median{$host}->[1], 39, $max, $med ) ); } print "\n"; ($min,$max,$med,$avg) = minmax( @{$median[2]} ); printf "%-32s %6s %s\n", 'JOBMANAGER', '[s]', 'TIME SPENT IN RUNNING'; foreach my $host ( sort { $median{$a}->[2] <=> $median{$b}->[2] } keys %median ) { printf( "%-32s %6u %s\n", $host, $median{$host}->[2], produce( '#', $median{$host}->[2], 39, $max, $med ) ); } print "\n"; if ( $kickstart ) { ($min,$max,$med,$avg) = minmax( @{$median[4]} ); printf "%-32s %8s %s\n", 'JOBMANAGER', '[s]', 'JOBTIME ACC2 KICKSTART'; foreach my $host ( sort { $median{$a}->[4] <=> $median{$b}->[4] } keys %median ) { printf( "%-32s %8.2f %s\n", $host, $median{$host}->[4], produce( '#', $median{$host}->[4], 39, $max, $med ) ); } print "\n"; } ($min,$max,$med,$avg) = minmax( @{$median[3]} ); printf "%-32s %6s %s\n", 'JOBMANAGER', '[s]', 'TOTAL TIME'; foreach my $host ( sort { $median{$a}->[3] <=> $median{$b}->[3] } keys %median ) { printf( "%-32s %6u %s\n", $host, $median{$host}->[3], produce( '#', $median{$host}->[3], 39, $max, $med ) ); } print "\n"; printf "%-32s %6s %s\n", 'JOBMANAGER', '[s]', 'SUBMITTED+PENDING+RUNNING'; foreach my $host ( sort { $median{$a}->[3] <=> $median{$b}->[3] } keys %median ) { my @x = @{$median{$host}}; $x[3] = $x[0] + $x[1] + $x[2]; my @y = ( $x[0]*39.0/$x[3], $x[1]*39.0/$x[3], $x[2]*39.0/$x[3] ); #printf( "\t%.3f %.3f %.3f %.3f\n", @y, $x[3] ); printf( "%-32s %6.0f %s%s%s\n", $host, $x[3], 's' x $y[0], 'p' x $y[1], 'r' x $y[2] ); } print "\n"; printf "%-32s %6s %s\n", 'JOBMANAGER', 'RATIO', 'RUNNING / PENDING'; foreach my $host ( sort { $r{$a}->[3] <=> $r{$b}->[3] } keys %r ) { my @x = @{$r{$host}}; printf( "%-32s %6.2f %s%s\n", $host, $x[3], 'p' x ( ($x[0] * 39.0 ) / $x[2] ), 'r' x ( ($x[1] * 39.0 ) / $x[2] ) ); } print "\n"; if ( $kickstart ) { printf "%-32s %6s %s\n", 'JOBMANAGER', 'RATIO', 'KICKSTART / (TOTAL-RUNNING)'; foreach my $host ( sort { $r1{$a}->[3] <=> $r1{$b}->[3] } keys %r1 ) { my @x = @{$r1{$host}}; printf( "%-32s %6.2f %s%s\n", $host, $x[3], '.' x ( ($x[0] * 39.0 ) / $x[2] ), 'x' x ( ($x[1] * 39.0 ) / $x[2] ) ); } print "\n"; } exit 0; # # --- helpers # sub produce ($$$$$) { my $ch = shift; my $x = shift; my $scale = shift; my $max = shift; my $med = shift; my $result; if ( $max > 10 * $med ) { # there are extreme values present my $cutoff = 2 * $med; if ( $x < $cutoff ) { # smaller than cutoff, regular scaling $result = $ch x ( ($x * $scale) / $cutoff ); } else { # larger than cutoff, extreme value my $f = ($max+1) / $scale; while ( $x > 0 ) { $result .= '+'; $x -= $f; } } } else { # no extreme values present, simple $result = $ch x ( ($x * $scale) / $max ); } # done $result; } sub minmax { my $min = 1E300; my $max = -1E300; my $med = median(@_); my $sum = 0.0; foreach my $x ( @_ ) { $min = $x if $x < $min; $max = $x if $x > $max; $sum += $x; } my $n = 0 + @_; ( $min, $max, $med, ($n ? $sum / $n : undef) ); } sub median { my @x = sort { $a <=> $b } @_; my $n = 0 + @x; if ( ($n & 1) == 1 ) { ($x[$n/2]+$x[$n/2+1])/2; } else { $x[$n/2]; } } sub myhost($) { # purpose: send hostname back and forth through DNS # paramtr: hostname or ipv4 # returns: canonical primary name of host my $host = lc shift; my ($stamp,$value) = split /\#/, $main::dns{$host}; unless ( defined $stamp && $stamp > time() ) { $stamp = time() + 604800; $value = (gethostbyaddr( inet_aton($host), AF_INET ))[0]; $main::dns{$host} = join('#',($stamp,$value)); } $value; } sub mkiso(;$) { my @ts = localtime(shift() || time()); POSIX::strftime( "%Y%m%dT%H%M%S", @ts[0..6]); } # Job submitted ULOG_SUBMIT = 0, # Job now running ULOG_EXECUTE = 1, # Error in executable ULOG_EXECUTABLE_ERROR = 2, # Job was checkpointed ULOG_CHECKPOINTED = 3, # Job evicted from machine ULOG_JOB_EVICTED = 4, # Job terminated ULOG_JOB_TERMINATED = 5, # Image size of job updated ULOG_IMAGE_SIZE = 6, # Shadow threw an exception ULOG_SHADOW_EXCEPTION = 7, # Generic Log Event ULOG_GENERIC = 8, # Job Aborted ULOG_JOB_ABORTED = 9, # Job was suspended ULOG_JOB_SUSPENDED = 10, # Job was unsuspended ULOG_JOB_UNSUSPENDED = 11, # Job was held ULOG_JOB_HELD = 12, # Job was released ULOG_JOB_RELEASED = 13, # Parallel Node executed ULOG_NODE_EXECUTE = 14, # Parallel Node terminated ULOG_NODE_TERMINATED = 15, # POST script terminated ULOG_POST_SCRIPT_TERMINATED = 16, # Job Submitted to Globus ULOG_GLOBUS_SUBMIT = 17, # Globus Submit failed ULOG_GLOBUS_SUBMIT_FAILED = 18, # Globus Resource Up ULOG_GLOBUS_RESOURCE_UP = 19, # Globus Resource Down ULOG_GLOBUS_RESOURCE_DOWN = 20 sub process ($$$$@) { my $fn = shift; my $tag = shift; my $job = shift; my $stamp = shift; # my $msg = join('', @_); # adjust global stamps $main::minstamp = $stamp if $main::minstamp > $stamp; $main::maxstamp = $stamp if $main::maxstamp < $stamp; if ( $tag eq '000' ) { # SUBMIT my ($host,$port,$node); ($host,$port) = (myhost($1),$2) if ( $_[0] =~ /from host: <((?:\d{1,3}\.){3}\d{1,3}):(\d+)>/ ); $node="$fn/$1" if ( $_[1] =~ /DAG Node: (.+)/ ); $main::db{$job}{$tag} = [ $stamp, $host, $port, $node ]; push( @{$main::node{$node}}, $job ); } elsif ( $tag eq '017' ) { # GLOBUS SUBMIT my ($host,$port,$jm); $jm=$1 if ( $_[1] =~ /\/jobmanager-(.+)/ ); ($host,$port) = (myhost($1),$2) if ( $_[2] =~ m{://([^:/]+):(\d+)} ); $main::db{$job}{$tag} = [ $stamp, $host, $port, $jm || 'fork' ]; $main::host{$host}[2]++; } elsif ( $tag eq '001' ) { # EXECUTE my $host; $host=myhost($1) if ( $_[0] =~ /on host: (.*)/ ); $main::db{$job}{$tag} = [ $stamp, $host ]; $main::host{$host}[0]++; } elsif ( $tag eq '005' || $tag eq '016' ) { # (POST SCRIPT) TERMINATE my $ec; $ec=$1 if ( $_[1] =~ /return value (\d+)/ ); $main::db{$job}{$tag} = [ $stamp, $ec ]; if ( $ec != 0 ) { $main::fail{$job}{$tag}{$stamp}=$ec; } else { # remember good jobs my $host = $main::db{$job}{'001'}[1]; $main::host{$host}[1]++ if ( $tag eq '016' ); $main::job{$job} = 1; } } elsif ( $tag eq '012' ) { # HELD my ($ec,$reason); ($ec,$reason)=($1,$2) if ( $_[1] =~ /Globus error (\d+): (.+)/ ); $main::db{$job}{$tag} = [ $stamp, $ec, $reason ]; $main::fail{$job}{$tag}{$stamp}=$ec if ( $ec != 0 ); } elsif ( $tag eq '013' ) { # RELEASED $main::db{$job}{$tag} = [ $stamp ]; } elsif ( $tag eq '009' ) { # ABORTED (e.g. by condor_rm) push( @{$main::fail{$job}{$tag}}, $stamp ); } else { # AOB } } pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/README0000644000175000017500000000404311757531137021501 0ustar ryngeryngeThe newly revamped show-run replaces show-chimera. There is one subtle change in commandline options, which should make things easier: OLD STYLE NEW STYLE --------- --------- -b base (mandatory) -b base is optional -l clog (mandatory) log location is "found" n.a. -D dag file, mandatory The show-run tool relies on a number of files: The .dag file, the .log file that all your Condor jobs logged into, the .sub files and the .out files produced by kickstart. The submit files will be found relative to the base directory, from information inside the DAG file. The kickstart output records are expected to be found in the same location. gensim ------ Produces job and file statistics from execution logs. Outputs are: DAG and dax including annotated with job runtimes and file sizes (if available)and source locations. Relative jobstate.log Job statistics (kickstart runtime, delays due to DagMAN, Condor-G, etc.) File statistics (name, size, initial location) Usage: gensim --dag= --jobstate|--no-jobstate --jobstate-log= --output= --help Mandatory arguments: -d|--dag dagname name of the dag file to process -o|--output dir write outputs in given directory Complex arguments: -j|--jobstate yes, we have jobstate --no-jobstate no, we don't have jobstate -l|--jobstate-log log jobstate log to process Optional arguments: -h|--help print this help message and exit -i|--input dir read inputs from given directory gentimes -------- Produces another form of job runtime statistics. Outputs are: For each transformation, mean runtime (as reported by kickstart) and variance. Usage: gentimes --help --output= Optional arguments: -h|--help Print this help message. -o|--output file Write outputs to specified file. -x|--extra Generate individual statistics for each run directory. pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/dag2image0000755000175000017500000002016211757531137022367 0ustar ryngerynge#!/usr/bin/env perl # # Convert DAG into image representing the workflow # use 5.006; use strict; use warnings; use Getopt::Long qw(:config no_bundling no_ignore_case); use File::Temp (); use File::Spec; use File::Basename qw(basename dirname); my @relations = (); my @jobs = (); my @postjobs = (); my %color = ( 'local' => [ '"#CCCCFF"', '"#5555FF"' ], 'fork' => [ '"#CCFFCC"', '"#00FF00"' ], 'batch' => [ 'yellow', 'gold' ] ); my $image = ''; my $verbose = 0; my $concentrate = 0; my $size = 3; my $processor = 'dot'; my $nodeshape = 'ellipse'; my $label = 'verbose'; my $arrowhead = 'normal'; my $arrowsize = '1.0'; my $type = 'eps'; my $keep; my $remove; my @extra = (); sub usage { my $basename = basename( $0, '.pl' ); print << "EOF"; Usage: $basename -o imagefile [options] dagfile $basename --output imagefile [options] dagfile Mandatory arguments: dagfile Path to the .dag file for Condor DAGMan. Optional arguments: -o|--output fn Path where to put the output, default is stdout. The use of this option is highly recommended. -t|--type type Output image representation, default eps. -s|--size n 1: 10x8, 2: 17x11, 3: unlimited, default 3. -l|--label style (none,info,submit,verbose) Labelling of the workflow nodes, default is verbose. -c|--color t=a,b For given job type t, use color a for single jobs and color b for multi jobs. You need to dquote \#RBG specs. Supported types are: local, fork, batch. --processor s GraphViz graph processor, default is dot. --extra 'ins' Add instruction to graph\'s top-level spec. --keep fn If specified, copy GraphViz file to fn. --remove re regular expression of nodes to preclude from graph. --concentrate Apply -Gconcentrate to processor options. --shape shape (circle,ellipse,doublecircle,point,sqare,triangle) Shape to draw the graph nodes with, default is circle. --ahead style (normal,dot,invdot,open,invnormal) Arrow-head drawing, default style is normal. --asize size Arrow size. EOF exit(1); } usage unless @ARGV; my $result = GetOptions( 'help|h' => \&usage , 'color|c=s' => sub { my ($k,$v) = split /[:=]/, $_[1], 2; $k = lc($k); die "ERROR: Illegal job type $k\n" unless exists $color{$k}; $color{$k} = [ split /,/, $v ]; } , 'output|o=s' => \$image , 'type|t=s' => \$type , 'size|s=i' => \$size , 'label|l=s' => sub { $label = lc($_[1]) } , 'processor=s' => \$processor , 'extra=s@' => \@extra , 'keep=s' => \$keep , 'remove=s' => \$remove , 'concentrate' => \$concentrate , 'shape=s' => \$nodeshape , 'ahead=s' => \$arrowhead , 'asize=s' => \$arrowsize ) || die "ERROR: Unknown option in arguments:\n@ARGV\n"; my $dagfile = shift || die "ERROR: The .dag file is a mandatory argument, see --help\n"; die "ERROR: $dagfile does not exist\n" unless -e $dagfile; die "ERROR: $dagfile not readable\n" unless -r _; my $dagdir = dirname(File::Spec->rel2abs($dagfile)); my $tmp = $ENV{TMPDIR} || $ENV{TMP} || $ENV{TEMP} || File::Spec->tmpdir() || '/tmp'; my $dot = new File::Temp( TEMPLATE => 'dotXXXXXX', DIR => $tmp, SUFFIX => '.dot', UNLINK => 1 ); my $dotfile = $dot->filename; open( DAG, "<$dagfile" ) || die "ERROR: open $dagfile $!\n"; sub process_submit($) { # purpose: slurp submit file contents # paramtr: $fn (IN): submit filename # returns: hash of submit command to value # my $subfn = File::Spec->catfile( $dagdir, shift() ); my %result = (); local(*SUB); if ( open( SUB, "<$subfn" ) ) { my ($k,$v); while ( ) { next if substr($_,0,1) eq '#'; s/[\r\n\t ]+$//; s/^\s*//; next unless length($_); ($k,$v) = split /\s*=\s*/, $_, 2; $v=substr($v,1,-1) while ( defined $v && length($v) > 2 && ( substr($v,0,1) eq '"' || substr($v,0,1) eq "'" ) ); $k = lc($k) if ( $k =~ /^[a-z]/i ); $result{$k} = $v; } close SUB; } else { warn "Warning: Read $subfn: $!\n"; } %result; } # set up counter for statistics my %count = ( dep => 0, job => 0, post => 0, fork => 0, batch => 0, local => 0 ); print $dot "digraph E {\n"; foreach my $e ( @extra ) { print $dot "$e\n"; } if ( $size==2 ) { print $dot " size=\"17.0,11.0\"\n ratio=auto\n"; } elsif ( $size==1 ) { print $dot " size=\"11.5,10.0\"\n ratio=auto\n"; } print $dot " node [shape=$nodeshape fontname=Helvetica]\n"; print $dot " edge [arrowhead=$arrowhead, arrowsize=$arrowsize]\n"; my %remove = (); # which DAG-IDs to remove while ( ) { next if /^\#/; # skip comments s/^\s+//; # remove leading whitespace s/[ \r\n\t]+$//; # remove trailing whitespace including CRLF if ( /^PARENT\s/i && /\sCHILD\s/i ) { s/^PARENT\s+//i; my ($parents,$children) = split /\s+CHILD\s+/i, $_, 2; foreach my $parent ( split( /\s+/, $parents ) ) { next if ( exists $remove{$parent} && $remove{$parent} ); foreach my $child ( split( /\s+/, $children ) ) { # one line per link next if ( exists $remove{$child} && $remove{$child} ); my $what = "\"$parent\" -> \"$child\""; $relations[$count{dep}] = $what; $count{dep}++; print $dot " $what\n"; print STDERR "Adding arc $what\n" if $verbose; } } } elsif ( /^JOB\s/i ) { # special job processing my @x = split; my $job = $jobs[$count{job}] = $x[1]; if ( defined $remove && $x[2] =~ m/$remove/o ) { $remove{$x[1]} = $x[2]; next; } my %submit = process_submit($x[2]); my $tempstring = " \"$job\""; my $templabel = ''; $templabel=basename( $x[2], '.sub' ) if $label eq 'submit'; $templabel=$job if $label eq 'verbose'; # local or remote job? my $multijob = 1; $multijob = ( $submit{executable} =~ /(seq|mpi)exec/ ) if exists $submit{executable}; if ( lc($submit{universe}) eq 'grid' || ! exists $submit{universe} ) { # remote if ( $submit{'grid_resource'} =~ /jobmanager-fork/ ) { $tempstring .= " [color=" . $color{fork}[$multijob] . ", style=filled"; $templabel = "fork-$count{fork}" if $label eq 'info'; $count{fork}++; } else { $tempstring .= " [color=" . $color{batch}[$multijob] . ", style=filled"; $templabel = "batch-$count{fork}" if $label eq 'info'; $count{batch}++; } } elsif ( lc($submit{universe}) eq 'globus' ) { # remote die "Sorry, not supported any more"; } else { # local $tempstring .= " [color=" . $color{local}[$multijob] . ", style=filled"; $templabel = "local-$count{local}" if $label eq 'info'; $count{local}++; } print $dot $tempstring, ', label="', $templabel, "\"]\n"; print STDERR "Adding node $job\n" if $verbose; $count{job}++; } elsif ( /^POSTJOB\s/i ) { $postjobs[$count{post}]=$_; $count{post}++; } } print STDERR "$count{job} jobs, $count{dep} dependencies, $count{post} post scripts\n"; close DAG; print $dot "}\n"; if ( defined $keep ) { open( KEEP, ">$keep" ) || die "ERROR: open $keep: $!\n"; seek( $dot, 0, 0 ) || die "ERROR: seek $dotfile: $!\n"; while ( <$dot> ) { print KEEP ; } close KEEP; } close $dot; print STDERR "Written dot file $dotfile\n"; print STDERR "Generating Image...\n"; my $command = $processor; $command .= " -Gconcentrate" if $concentrate; if ( $type eq 'eps' ) { # eps is not supported by dot. # This is evil trickery to generate LaTeX figures ;-P $command .= " -Tps2 $dotfile"; $command .= ' | perl -pe \'$_="%!PS-Adobe-3.0 EPSF-3.0\n" if ( /^%!PS-Adobe-/ )\''; $command .= " > $image" if $image; } else { # the normal path $command .= " -o$image" if $image; $command .= " -T$type $dotfile"; } $result=''; if ( $image ) { $result = `$command`; } else { system($command); } my $rc = $?; if ( ($rc & 127) > 0 ) { print STDERR $result if $image; die "ERROR: Died on signal", ($rc & 127), "\n"; } elsif ( ($rc >> 8) > 0 ) { print STDERR $result if $image; die "ERROR: Unsuccessful execution: ", $rc >> 8, "\n"; } else { print STDERR "Successful graphics generation\n"; exit 0; } pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/show-run0000755000175000017500000010346111757531137022335 0ustar ryngerynge#!/usr/bin/env perl # # preparse Condor combined log and gridstart output for ploticus drawing. # # $Id: show-run 3190 2010-12-17 23:26:04Z voeckler $ require 5.6.0; use strict; use Socket; use POSIX qw(); use Time::Local; use File::Temp; use File::Basename; use File::Spec; use Getopt::Long qw(:config bundling); use IO::File; use Data::Dumper; use XML::Parser; use XML::Parser::Expat; use lib File::Basename::dirname($0); use Common; sub ungarble($) { # purpose: permit oct, dec and hex input # paramtr: input string # returns: numeric value # remarks: oct() also does hex, bin, ... but not dec my $x = shift; substr($x,0,1) eq '0' ? oct($x) : $x+0; } %main::status = ( '000' => [ 'Job submitted', 'ULOG_SUBMIT' ], '001' => [ 'Job executing', 'ULOG_EXECUTE' ], '002' => [ 'Error in executable', 'ULOG_EXECUTABLE_ERROR' ], '003' => [ 'Job was checkpointed', 'ULOG_CHECKPOINTED' ], '004' => [ 'Job evicted from machine', 'ULOG_JOB_EVICTED' ], '005' => [ 'Job terminated', 'ULOG_JOB_TERMINATED' ], '006' => [ 'Image size of job updated', 'ULOG_IMAGE_SIZE' ], '007' => [ 'Shadow threw an exception', 'ULOG_SHADOW_EXCEPTION' ], '008' => [ 'Generic log event', 'ULOG_GENERIC' ], '009' => [ 'Job aborted', 'ULOG_JOB_ABORTED' ], '010' => [ 'Job was suspended', 'ULOG_JOB_SUSPENDED' ], '011' => [ 'Job was unsuspended', 'ULOG_JOB_UNSUSPENDED' ], '012' => [ 'Job was held', 'ULOG_JOB_HELD' ], '013' => [ 'Job was released', 'ULOG_JOB_RELEASED' ], '014' => [ 'Parallel node executed', 'ULOG_NODE_EXECUTE' ], '015' => [ 'Parallel node terminated', 'ULOG_NODE_TERMINATED' ], '016' => [ 'POST script terminated', 'ULOG_POST_SCRIPT_TERMINATED' ], '017' => [ 'Job submitted to Globus', 'ULOG_GLOBUS_SUBMIT' ], '018' => [ 'Globus submit failed', 'ULOG_GLOBUS_SUBMIT_FAILED' ], '019' => [ 'Globus resource up', 'ULOG_GLOBUS_RESOURCE_UP' ], '020' => [ 'Globus resource down', 'ULOG_GLOBUS_RESOURCE_DOWN' ], '021' => [ 'Remote error', 'ULOG_REMOTE_ERROR' ], '022' => [ 'RSC socket lost', 'ULOG_JOB_DISCONNECTED' ], '023' => [ 'RSC socket re-established', 'ULOG_JOB_RECONNECTED' ], '024' => [ 'RSC reconnect failure', 'ULOG_JOB_RECONNECT_FAILED' ], '025' => [ 'Grid machine is up', 'ULOG_GRID_RESOURCE_UP' ], '026' => [ 'Grid machine is down', 'ULOG_GRID_RESOURCE_DOWN' ], '027' => [ 'Job submitted to Grid', 'ULOG_GRID_SUBMIT' ], '028' => [ 'Job Ad information update', 'ULOG_JOB_AD_INFORMATION' ], '029' => [ 'Job state is unknown', 'ULOG_JOB_STATUS_UNKNOWN' ], '030' => [ 'Job state is known', 'ULOG_JOB_STATUS_KNOWN' ], '031' => [ 'Condor-I/O stage-in', 'ULOG_JOB_STAGE_IN' ], '032' => [ 'Condor-I/O stage-out', 'ULOG_JOB_STAGE_OUT' ] ); @main::statmsg = ( 'failed', 'worker', 'stage-in', 'stage-out', 'replica', 'double fault', 'failed worker', 'failed stage-in', 'failed stage-out', 'failed replica' ); my %opts = ( S => 0 ); my $tmpdir = $ENV{TMPDIR} || $ENV{TMP} || $ENV{TEMP} || File::Spec->tmpdir() || '/tmp'; GetOptions( 'help|h|?' => sub { $opts{h}=1 }, 'offset|a=i' => sub { $opts{a}=$_[1] }, 'base|b=s' => sub { $opts{b}=$_[1] }, 'debug|d=o' => sub { $opts{d}=$_[1] }, 'dag|dagfile|D=s' => sub { $opts{D}=$_[1] }, 'eps|e' => sub { warn "Option -e has been deprecated, ignoring\n" }, 'omit|g=o' => sub { $opts{g}=$_[1] }, 'keep|k' => sub { $opts{k}=1 }, 'kickstart|K' => sub { $opts{K}=1 }, 'log|l=s' => sub { $opts{l}=$_[1] }, 'ypic|o=s' => sub { $opts{o}=$_[1] }, 'zpic|O=s' => sub { $opts{O}=$_[1] }, 'ploticus|p' => sub { $opts{p}=1 }, 'png|P' => sub { $opts{P} = 1 }, 'use-site|S=i' => sub { $opts{S}=$_[1]+0 }, 'ytitle|t=s' => sub { $opts{t}=$_[1] }, 'ztitle|T=s' => sub { $opts{T}=$_[1] }, 'tmpdir=s' => \$tmpdir ); if ( exists $opts{h} || ! exists $opts{D} ) { my $base = basename($0,'.pl'); print << "EOF"; Usage: $base [-d level] -D dagfile [-b basedir] [-o ypic] [-O zpic] [-p [-P]] -a|--offset s use the time zone offset in seconds for non-local logs -b|--base dir specifies the dir where DAG was started, default dirname(dagfn) -d|--debug lvl logical OR of the following levels, default is no debugging 0x01:\treport job types and job names correlation. 0x02:\treport data for each job tag. 0x04:\treport \%dbase data structure composition for ypic 0x08:\treport \%host data structure composition for zpic 0x10:\tdump composed data structure, iff combined with any previous two 0x20:\tshow details on the auto-scaling 0x40:\treport files that were accessed. 0x80:\tadd debug message to XML parser. -D|--dag fn specifies the Condor DAGMan dag file to parse, mandatory option -g|--omit lvl tomit the gridstart gray boxes in the output, default draw 0x01:\tomit gridstart gray boxes from ypic 0x02:\tomit gridstart gray boxes from zpic -k|--keep if set, keep the temporary files in /tmp, and don\'t remove -K|--kickstart parallelize zpic from kickstart intervals, default is Condor -l|--log fn Condor\'s common user log, if in a non-standard location -o|--ypic fn specifies the jobnr picture, default is ypic.eps -O|--zpic fn specifies the host picture, default is zpic.eps -P|--png Use ImageMagick\'s convert to create PNG from EPS files. -p|--ploticus try to post-process with ploticus to EPS, default is not -S|--use-site 0: use gatekeeper host (default) 1: use site handle 2: use reported workernode handle -t|--ytitle t title phrase on top of ypic - and zpic in absence of -T -T|--ztitle t title phrase on top of zpic, default is timestamp/user --tmpdir dir which directory to use for /tmp. Default is complex... EOF exit 0; } if ( $opts{d} ) { foreach my $i ( qw(a b d D e g k K l o O p P t T S) ) { print "## option $i is $opts{$i}\n" if ( defined $opts{$i} && exists $opts{$i} ); } } # globals %main::dag = (); $main::debug = ungarble($opts{d}); $main::year = (localtime())[5]; $main::min = $main::gridmin = 1E12; $main::max = $main::gridmax = -1E12; END { # big clean-up handler unlink( @main::unlink ) if @main::unlink > 0; } my @y=File::Temp->tempfile( 'ydata-XXXXX', DIR => $tmpdir, SUFFIX => '.dat' ); die "ERROR: Unable to create transient ydata file" unless defined $y[0]; my @z=File::Temp->tempfile( 'zdata-XXXXX', DIR => $tmpdir, SUFFIX => '.dat' ); die "ERROR: Unable to create transient zdata file" unless defined $z[0]; my $ypic = $y[1]; my $zpic = $z[1]; push( @main::unlink, $y[1], $z[1] ) unless $opts{k}; $opts{o}=File::Spec->catfile( File::Spec->curdir, "ypic.eps" ) unless $opts{o}; $opts{O}=File::Spec->catfile( File::Spec->curdir, "zpic.eps" ) unless $opts{O}; my $dagfn = $opts{D}; my $basedir = $opts{b} ? $opts{b} : dirname($opts{D}); die "Need a base directory to find submit files\n" unless -d $basedir; # read .dag file to associate job ID <=> submit file my ($subfn,$logfn); open( DAG, "<$dagfn" ) || die "open $dagfn: $!\n"; warn "# reading $dagfn...\n" if ( $main::debug & 0x40 ); while ( ) { $main::dag{$1} = $subfn = $2 if ( /^\s*job\s+(\S+)\s+(\S+)/i ); } close DAG; # read any submit file to find location of logfile common to all submit files $subfn = File::Spec->catfile( $basedir, $subfn ); open( SUB, "<$subfn" ) || die "open $subfn: $!\n"; warn "# reading $subfn...\n" if ( $main::debug & 0x40 ); while ( ) { $logfn = $1 if /^\s*log\s*=\s*(\S+)/; } close SUB; undef $subfn; if ( exists $opts{l} ) { $logfn = $opts{l}; } elsif ( ! -r $logfn ) { my $dagbase = basename( $dagfn ); $dagbase =~ s/(?:\.(?:rescue|dag))+$//; $logfn = File::Spec->catfile( $basedir, $dagbase . '.log' ); } $logfn = File::Spec->rel2abs( $logfn, $basedir ) unless -r $logfn; die "Unable to read Condor's common log $logfn\n" unless -r $logfn; print "# dagfn=$dagfn\n# basedir=$basedir\n# logfn=$logfn\n" if $main::debug; sub mytime ($$) { my ($month,$day) = split(/\//,shift()); my ($h,$m,$s) = split(/:/,shift()); my $r = timelocal($s,$m,$h,$day,$month-1,$main::year); $r += $opts{a} if exists $opts{a}; $main::min = $r if $r < $main::min; $main::max = $r if $r > $main::max; $r; } sub graphics($$$$$) { my ($datafn,$outfn,$width,$height,$count,$ok,@arg) = @_; $width = sprintf("%.2f",$width+1); $height = sprintf("%.2f",$height); my $ploticus = find_exec('ploticus') || find_exec('pl'); my $convert = find_exec('convert'); if ( defined $ploticus ) { my $pos = rindex( $datafn, '.' ); my $plsfn = ($pos >= 0 ? substr($datafn,0,$pos) : $datafn) . '.pls'; @arg = ( $ploticus, $plsfn, '-eps' ); # @arg = ( $ploticus, $plsfn, '-jpeg', '-pagesize', "$width,$height" ); push( @arg, '-maxrows', $count+1, '-cpulimit', 600 ) if $count > 9999; warn "# running @arg\n"; # if $main::debug; system { $arg[0] } @arg; if ( $? == 0 ) { my $fn = ($pos >= 0 ? substr($datafn,0,$pos) : $datafn) . '.eps'; @arg = ( '/bin/mv', '-f', $fn, $outfn ); warn "# running @arg\n"; # if $main::debug; system { $arg[0] } @arg; if ( $? == 0 ) { if ( defined $convert ) { @arg = ( $convert, '-density', '96x96', '-background', 'white', '-flatten' ); $fn = basename($outfn,('.eps','.png','.jpeg')); push( @arg, $outfn, File::Spec->catfile( dirname($outfn), $fn.".png" ) ); warn "# running @arg\n"; # if $main::debug; system { $arg[0] } @arg; warn( "Warning: @arg returned ", ($?>>8), '/', ($? & 127), "\n" ) if $?; } } else { warn( "Warning: @arg returned ", ($?>>8), '/', ($? & 127), "\n" ); } } else { warn( "Warning: @arg returned ", ($?>>8), '/', ($? & 127), "\n" ); } } else { warn "# unable to run ploticus: executable not found\n"; } $ok; } sub gridstart ($) { # purpose: parse a provenance tracking record # paramtr: $jobid (IN): job ID to determine the filename from # globals: $main::gridmin, $main::gridmax: min and max ks timestamps # globals: $final: greatest timestamp encountered # returns: array of hash with several important values # host, start, duration, name (TR), raw status, exit my $jobid = shift; my @result = (); my $kfn = File::Spec->catfile( $basedir, $main::dag{$jobid} ); substr( $kfn, -4 ) = '.out'; # s/sub$/out/ # FIXME: t'is a guess my @backup = sort glob( "$kfn.???" ); # new style @backup = ( $kfn ) if @backup == 0; # old style local($/) = "\n"; foreach my $fn ( @backup ) { log( "reading kickstart $fn" ) if ( $main::debug > 1 ); my @xml = (); if ( -r $fn && -s _ && open( KS, "<$fn" ) ) { my $state = 0; my $tmp = ''; while ( ) { if ( $state == 0 ) { next unless m{^\s*} ) { $state = 0; push( @xml, $tmp ); $tmp = ''; } } } close KS; log( "found ", @xml+0, " invocation records" ) if $main::debug > 1; } else { warn "Warning: Unable to read from $fn\n"; } foreach my $xml ( @xml ) { my @stack = (); my %result = (); my $parser = new XML::Parser::Expat( 'Namespaces' => 1 ) || die "ERROR: Unable to instantiate an XML parser\n"; $parser->setHandlers( 'Start' => sub { my $self = shift; my $element = shift; my %attr = @_; push( @stack, $element ); # warn '# ', join('/',@stack), "\n" if ( $main::debug & 0x80 ); if ( @stack == 1 && $element ne 'invocation' ) { die "ERROR: Wrong kind of XML input file\n"; } elsif ( @stack == 1 && $element eq 'invocation' ) { $result{host} = $attr{hostname} || $attr{hostaddr}; $result{start} = iso2unix( $attr{start} ); $result{duration} = $attr{duration}; $result{final} = $result{start} + $attr{duration}; $result{name} = $attr{transformation}; } elsif ( @stack == 3 && $stack[1] =~ /job$/ && $element eq 'status' ) { $result{raw} = $attr{raw}; } }, 'End' => sub { my $self = shift; my $element = shift; pop(@stack) eq $element; }, 'Char' => sub { 1; } ); my $result; eval { $result = $parser->parse($xml) }; if ( defined $result ) { # generic info post-processing $main::gridmin = $result{start} if $result{start} < $main::gridmin; $main::gridmax = $result{final} if $result{final} > $main::gridmax; push( @result, { %result } ); } else { warn "# unable to parse $fn\n"; } if ( $main::debug & 0x80 ) { warn "## XML from $fn\n"; while ( my ($k,$v) = each %result ) { warn "## $k = $v\n"; } } } } @result; } sub node_type ($) { # purpose: Determine the type of the node # paramtr: $subfn (IN): submit file (node) name # returns: a node type number # 0 unknown/error # 1 worker # 2 stage-in # 3 stage-out # 4 rc job # 5 inter-pool my $result = 0; # to err is default my $subfn = File::Spec->catfile( $basedir, $_[0] ); if ( open( SUB, "<$subfn" ) ) { warn "# reading $subfn...\n" if ( $main::debug & 0x40 ); # search for ClassAd with job type while ( ) { $result=$1 if /^\s*\+(?:pegasus|vds)_job_class\s*=\s*(\S+)/; } close SUB; goto GUESS if ( $result <= 0 ); $result=0 if ( $result > 5 ); } else { # make an educated guess GUESS: local($_) = basename($subfn); if ( /^stage_in_/ ) { # stage-in job $result = 2; } elsif ( /^stage_out_/ ) { # stage-out job $result = 3; } elsif ( /^register_/ ) { # replica mgmt job $result = 4; } elsif ( /^stage_inter_/ ) { # inter pool transfer $result = 5; } elsif ( /\S+/ ) { # worker job $result = 1; } } warn "# assigning jobtype $result to $_[0]\n" if ( $main::debug & 0x01 ); $result; } sub read_site ($) { # purpose: Determine the resource handle from info in the submit file # paramtr: $jobid (IN) # returns: a site handle, if possible my $jobid = shift; my $subfn = File::Spec->catfile( $basedir, $main::dag{$jobid} ); my $result; local(*SUB); local($/) = "\n"; if ( open( SUB, "<$subfn" ) ) { warn "# reading $subfn...\n" if ( $main::debug & 0x40 ); while ( ) { if ( /^\s*\+(vds_|wf_|pegasus_){1,2}site\s*=\s*(\S+)/i ) { $result = $2; my $first = substr($result,0,1); $result = substr($result,1,-1) if ( $first eq "\'" || $first eq '"' ); last if length($result); } } close SUB; } else { warn "# unable to read $subfn: $!\n"; } warn "# assigning site=$result\n" if ( $main::debug & 0x01 ); $result; } my (%jobid,$jobid); sub xlate($) { my $id=shift; if ( exists $jobid{$id} ) { $jobid{$id}; } else { $jobid{$id} = ++$jobid; } } sub read_log ($\%\%\%) { my $logfn = shift; # name of logfile my $dbaseref = shift; # ref to dbase hash my $realhost = shift; my $warnings = shift; open( LOG, "<$logfn" ) or die "reading $logfn: $!\n"; ## warn "# reading $logfn...\n" if ( $main::debug & 0x40 ); my ($lastname,%dbase,%host,$tag,%warn,@stat,$ipv4,$node,$site,$rc); $/="\n...\n"; while ( ) { if ( /\# DONE/ or eof(LOG) ) { $main::done=1; last unless /\.\.\.[\r\n]*$/; } $_ = substr($_,index($_,"\n")+1) while ( ! /^0/ && length ); next unless length; if ( /^(\d{3}) \((\d+)[0-9.]+\) ([0-9\/]+) ([0-9:]+)/ ) { $tag = $1; my $job = xlate($2); my $tds = mytime($3,$4); if ( $tag eq '000' ) { # submission notice # [ stamp, SH_ipv4, dagnodeid, nodetype ] /((?:\d{1,3}\.){3}\d{1,3})/ ? $ipv4=$1 : undef $ipv4; /DAG Node:\s(\S+)/ ? $node=$1 : undef $node; /pool:(\S+)/ ? $site=$1 : undef $site; $dbase{$job}{$tag} = [ $tds, $ipv4, $node, node_type($node), $site ]; warn "# ($job,$tag)=($tds,$ipv4,$node,$site)\n" if ( $main::debug & 0x02 ); } elsif ( $tag eq '001' ) { # execution notice # [ stamp, GK_host ] my $host; if ( /: gt. (\S+)/ ) { my @a = split /\//, $1; $host = ( @a > 3 ? $a[2] : $a[0] ); $dbase{$job}{$tag} = [ $tds, $host ]; warn "# ($job,$tag)=($tds,$host)\n" if ( $main::debug & 0x02 ); } elsif ( /((?:\d{1,3}\.){3}\d{1,3})/ ) { $host = $1; $dbase{$job}{$tag} = [ $tds, $host ]; warn "# ($job,$tag)=($tds,$host)\n" if ( $main::debug & 0x02 ); } elsif ( /: (\S+)/ ) { $host = $1; # inet_ntoa(inet_aton($1)); $dbase{$job}{$tag} = [ $tds, $host ]; warn "# ($job,$tag)=($tds,$host)\n" if ( $main::debug & 0x02 ); } else { warn "# unable to extract hostname for $tag/$job/$tds!\n"; } } elsif ( $tag eq '017' || $tag eq '027' ) { # job was sumitted to Globus or Grid # [ stamp, GK_host ] if ( /RM-Contact:\s+(\S+)/ ) { my @a = split /\//, $1; my $host = ( @a > 3 ? $a[2] : $a[0] ); $dbase{$job}{'017'} = [ $tds, $host ]; warn "# ($job,$tag)=($tds,$host)\n" if ( $main::debug & 0x02 ); } elsif ( /GridResource:\s+\S+\s+(\S+)/ ) { my @a = split /\//, $1; my $host = ( @a > 3 ? $a[2] : $a[0] ); $dbase{$job}{'017'} = [ $tds, $host ]; warn "# ($job,$tag)=($tds,$host)\n" if ( $main::debug & 0x02 ); } else { warn "# unable to extract hostname for $tag/$job/$tds\n"; } } elsif ( $tag eq '005' ) { # result # [ stamp, result, start, duration, WN_ipv4, exitcode [, site ] ] /\(return value (-?\d+)/ ? $rc=$1 : undef $rc; my @y = ($tds,$rc,1E20,0); if ( defined $dbase{$job}{'000'}->[2] ) { my @x = gridstart($dbase{$job}{'000'}->[2]); foreach my $x ( @x ) { if ( $x->{start} >= 1000000000 ) { ## push( @y, @x{'start','duration','host','exit'} ); $y[2] = $x->{start} if $y[2] > $x->{start}; $y[3] += $x->{duration}; $y[4] = $x->{host}; # set failure mode from true exit code $y[1] = $x->{rawexit} == 0 ? 0 : 1 if ( $y[1] == 0 ); $main::duration{total} += $x->{duration}; if ( $y[1] ) { $main::duration{bad} += $x->{duration}; } else { $main::duration{good} += $x->{duration}; } } } } if ( abs($y[2] - 1E20) < 100 ) { # Ayieeh, no sane data, @y is still defaults $y[2] = $tds; $y[3] = 0; } $dbase{$job}{$tag} = [ @y ]; $stat[$y[1] ? 0 : $dbase{$job}{'000'}->[3]]++; warn "# ($job,$tag)=(", join(',',@y), ")\n" if ( $main::debug & 0x02 ); } elsif ( $tag eq '016' ) { # post script -- Euryale /\(return value (-?\d+)/ ? $rc=$1 : undef $rc; my @y = @{$dbase{$job}{'005'}}; if ( $rc ) { # error case $dbase{$job}{'005'} = [ $y[0], 1 ]; $main::duration{good} -= $y[3]; $main::duration{bad} += $y[3]; $stat[0]++; $stat[$dbase{$job}{'000'}[3]]--; } else { # good case my ($node,$site); if ( defined ($node = $dbase{$job}{'000'}[2]) ) { $site = $dbase{$job}{'000'}[4] || read_site($node); if ( defined $site ) { $dbase{$job}{'001'}[2] = $site; $dbase{$job}{'005'}[7] = $site; } } } warn "# ($job,$tag)=(", join(',',@y), ")\n" if ( $main::debug & 0x02 ); } elsif ( $tag eq '009' ) { # manual job removal? # 009 (1870.000.000) 11/09 17:51:17 Job was aborted by the user. # via condor_rm (by user voeckler) # mark as bad in 005 # [ stamp, result, start, duration, WN_ipv4, exitcode [, site ] ] # mark as bad in 001 # [ stamp, GK_host [, site ] ] my $gk = $dbase{$job}{'017'}[1]; $dbase{$job}{'001'} = [ $tds, $gk || 'unsubmitted', undef ] unless exists $dbase{$job}{'001'}; $dbase{$job}{'005'} = [ $tds, 1, undef, undef, undef, -1 ]; #$main::duration{good} -= $y[3]; #$main::duration{bad} += $y[3]; $stat[0]++; #$stat[$dbase{$job}{'000'}[3]]--; warn "# ($job,$tag)=($tds)\n" if ( $main::debug & 0x02 ); } else { # unknown } $warn{$tag}++; } last if $main::done; } close(LOG); print "# post-processing...\n"; my $sometime = $main::max - $main::min + 10; foreach my $i ( keys %dbase ) { # substract base time foreach my $x ( qw(000 001 005) ) { if ( exists $dbase{$i}->{$x} ) { $dbase{$i}{$x}->[0] -= $main::min; } else { warn "# removing unfinished job $i\n"; delete $dbase{$i}; last; # $dbase{$i}{$x}->[0] = $sometime; } } if ( exists $dbase{$i} && exists $dbase{$i}->{'005'} ) { $dbase{$i}{'005'}->[2] -= $main::min if defined $dbase{$i}{'005'}->[2]; } } foreach my $i ( keys %dbase ) { # prepare stage2 if ( exists $dbase{$i}->{'001'} ) { # my $host = ( defined $dbase{$i}{'005'}->[4] ) ? # $dbase{$i}{'005'}->[4] : # WN_ipv4 # $dbase{$i}{'001'}->[1]; # GK_host # my $host = $dbase{$i}{'001'}->[2] ? # $dbase{$i}{'001'}->[2] : $dbase{$i}{'001'}->[1]; my $host = 'unknown'; if ( $opts{S} == 1 ) { $host = $dbase{$i}{'001'}->[2]; # site handle } elsif ( $opts{S} == 2 ) { $host = $dbase{$i}{'005'}->[4]; # WN host } else { $host = $dbase{$i}{'001'}->[1]; # gatekeeper host } push( @{$host{$host}}, $rc = [ $dbase{$i}{'001'}->[0], $dbase{$i}{'000'}->[0], $dbase{$i}{'005'}->[0], $dbase{$i}{'005'}->[1], # exit code $dbase{$i}{'000'}->[3], # kind of job $dbase{$i}{'005'}->[2], $dbase{$i}{'005'}->[2] + $dbase{$i}{'005'}->[3] ] ); warn "# ypic: (", join(',',@$rc), ")\n" if ( $main::debug & 0x04 ); } } warn "# ypic: ", Data::Dumper->Dump([\%dbase], [qw(%dbase)]), "\n" if ( ($main::debug & 0x14) == 0x14 ); #!!my ($lo,$hi) = 0 ? (0,2) : (5,6); my ($lo,$hi) = $opts{K} ? (5,6) : (1,2); my %real; foreach my $host ( sort { $a cmp $b } keys %host ) { my @list = sort { $a->[$lo] <=> $b->[$lo] } @{$host{$host}}; unshift( @{$real{$host}->[0]}, $rc = $list[0] ); warn "# zpic: (", join(',',@$rc), ")\n" if ( $main::debug & 0x08 ); for ( my $i=1; $i<@list; $i++ ) { my ($j,$k); if ( $list[$i]->[$lo] < $list[$i-1]->[$hi] && $list[$i]->[$lo] >= 0 ) { # overlap detected $j++ while ( $list[$i]->[$lo] < $real{$host}->[$j]->[0]->[$hi] ); } if ( $list[$i]->[2] ) { unshift( @{$real{$host}->[$j]}, $rc=$list[$i] ); warn "# zpic: (", join(',',@$rc), ")\n" if ( $main::debug & 0x08 ); } } } warn "# zpic: ", Data::Dumper->Dump([\%host], [qw(%host)]), "\n" if ( ($main::debug & 0x18) == 0x18 ); # return results in call-by-ref vars %{$dbaseref} = %dbase; %{$realhost} = %real; %{$warnings} = %warn; # return stats as result @stat; } sub xtime ($$) { defined $_[0] ? $_[0] : $_[1]; } sub write_ypic_data ($\%) { my $fn = shift; my %dbase = %{shift()}; my @x = sort { $a <=> $b } keys %dbase; my $sometime = time - $main::min; my $df1 = new IO::File ">$fn" || die "open $fn: $!\n"; foreach my $i ( @x ) { my @y = ( xtime($dbase{$i}{'001'}->[0], $sometime), $i - $x[0] + 1, xtime($dbase{$i}{'000'}->[0], $sometime), xtime($dbase{$i}{'005'}->[0], $sometime), $dbase{$i}{'005'}->[1], $dbase{$i}{'000'}->[3] ); $df1->printf( "%5d %5d %5d %5d %d %d", @y ); warn "# ypic: ($i)=(", join(',',@y), ")\n" if ( $main::debug & 0x04 ); if ( my $x = $dbase{$i}{'005'}->[2] ) { $df1->printf( " %9.3f %9.3f\n", $x, $x+$dbase{$i}{'005'}->[3] ); } else { $df1->print(" \"\" \"\"\n"); } } $df1->close(); # return all job ids as result @x; } sub write_ypic_ploticus ($$$$\$\$\$\$\$\$) { my $ypic = shift; my $mint = shift; my $maxt = shift; my $jobs = shift; # number of jobnumbers my ($upperx,$uppery,$xstub,$xtics,$ystub,$ytics) = @_; my $deftitle = default_title($dagfn,$main::min); my $title = $opts{t} || $deftitle; my $pos = rindex($ypic,'.'); my $fn = ($pos >= 0 ? substr($ypic,0,$pos) : $ypic) . '.pls'; push( @main::unlink, $fn ) unless $opts{k}; print "# generating plotticus driver $fn...\n"; if ( open( OUT, ">$fn" ) ) { warn "# writing $fn...\n" if ( $main::debug & 0x40 ); # phase 1: basic scaling $$upperx = ($maxt - $mint) / 60.0; warn "# phase0: diff_t=", $$upperx, "\n" if ( ($main::debug & 0x20) == 0x20 ); $$upperx = 4.0 if ( $$upperx < 4 ); $$uppery = $jobs / 13.0; $$uppery = 2.0 if ( $$uppery < 2 ); warn( "# phase1: upperx=", $$upperx, ", uppery=", $$uppery, ", div=", sprintf("%.3f",$$upperx / $$uppery), "\n" ) if ( ($main::debug & 0x20) == 0x20 ); # phase 2: extreme imbalance adjustments $$xtics = 10; $$xstub = 60; while ( $$upperx > 20 ) { warn( "# extreme x-scaling [", join(",",$$upperx,$$xtics,$$xstub), "]\n" ); $$upperx /= 5; $$xtics *= 5; $$xstub *= 5; } $$upperx = 4.0 if ( $$upperx < 4 ); warn( "# phase2a: upperx=", $$upperx, ", uppery=", $$uppery, ", div=", sprintf("%.3f",$$upperx / $$uppery), "\n" ) if ( ($main::debug & 0x20) == 0x20 ); # $$ytics = 1; # $$ystub = 5; # while ( ($$uppery / $$upperx) > 5 ) { # warn "# extreme y-scaling\n"; # $$uppery /= 5; # $$ytics *= 5; # $$ystub *= 5; # } # $$uppery = 2.0 if ( $$uppery < 2 ); # warn "# phase2b: upperx=", $$upperx, ", uppery=", $$uppery,"\n" # if ( ($main::debug & 0x20) == 0x20 ); # phase 3: large number adjustments for ( my $x = $$upperx; $x > 30; $x /= 10 ) { warn "# large number x-adjustment\n"; $$xtics *= 10; $$xstub *= 10; } for ( my $y = $$uppery; $y > 40; $y /= 10 ) { warn "# large number y-adjustment\n"; $$ytics *= 10; $$ystub *= 10; } print OUT << "END" // // generated: $deftitle // #proc getdata file: $ypic #proc areadef rectangle 0.5 0.5 @{[sprintf("%.1f",$$upperx)]} @{[sprintf("%.1f",$$uppery)]} xautorange: datafields=1,3,4 yautorange: datafield=2 frame: width=0.5 color=gray(0.3) title: $title titledetails: align=C style=I // adjust=0,0.2 // size=18 #proc xaxis ticincrement: $$xtics grid: color=rgb(1,0.9,0.8) style=1 dashscale=2 #proc xaxis label: Jobs over Time tics: yes stubs: incremental $$xstub minorticinc: $$xtics grid: color=gray(0.8) #proc yaxis ticincrement: $$ytics grid: color=rgb(1,0.9,0.8) style=1 dashscale=2 #proc yaxis // label: job [#] tics: yes stubs: incremental $$ystub minorticinc: $$ytics grid: color=gray(0.8) #proc legendentry sampletype: color label: Worker job details: green tag: 1 #proc legendentry sampletype: color label: Stage-in job details: skyblue tag: 2 #proc legendentry sampletype: color label: Stage-out job details: lightpurple tag: 3 #proc legendentry sampletype: color label: Replica job details: yellow2 tag: 4 #proc legendentry sampletype: color label: Interpool Xfer details: pink tag: 5 #proc legendentry sampletype: color label: Unknown job details: red tag: 0 #proc legendentry sampletype: symbol label: GridStart info details: shape=square style=outline linecolor=gray(0.4) fillcolor=gray(0.75) // output boxes without errors #proc bars select: \@5 = 0 outline: no barwidth: 0.03 horizontalbars: yes segmentfields: 3 4 locfield: 2 colorfield: 6 tails: 0.03 // output boxes with errors #proc bars select: \@5 != 0 outline: color=redorange width=0.8 barwidth: 0.03 horizontalbars: yes segmentfields: 3 4 locfield: 2 // colorfield: 6 color: red tails: 0.03 END ; if ( ($opts{g} & 0x01) == 0 ) { # print gridstart gray boxes print OUT << "END" // gray gridstart boxes #proc bars select: \@5 = 0 outline: color=gray(0.4) color: gray(0.75) horizontalbars: yes barwidth: 0.04 tails: no segmentfields: 7 8 locfield: 2 END ; } print OUT << "END" // execution cross #proc scatterplot select: \@5 = 0 xfield: 1 yfield: 2 symbol: shape=square style=spokes linecolor=black #proc legend format: multiline location: min+0.5 max END ; close(OUT); } else { warn "unable to generate $fn: $!\n"; } } sub write_zpic_data ($\%) { my $fn = shift; my %host = %{shift()}; my @host = sort { inet_aton($b) cmp inet_aton($a) } keys %host; my $df1 = new IO::File ">$fn" || die "open $fn: $!\n"; # collect y-axis into extra file my $df2 = new IO::File ">${fn}2" || die "open ${fn}2: $!\n"; push( @main::unlink, "${fn}2" ) unless $opts{k}; my %done; my $count=1; # my $skip = int( POSIX::ceil( @host / 20.0 ) ); foreach my $host ( @host ) { warn "# zpic: processing $host\n" if ( $main::debug & 0x08 ); for ( my $i=0; $i<@{$host{$host}}; $i++ ) { for ( my $j=@{$host{$host}->[$i]}-1; $j >= 0; $j-- ) { my $x = $host{$host}->[$i]->[$j]; print '.', next if ( $x->[5] < 0 && $x->[6] < 0 ); warn "# zpic: ($host,$i,$j)=(", join(',',@$x), ")\n" if ( $main::debug & 0x08 ); if ( defined $x->[1] ) { if ( $done{$host} < $count+$i ) { if ( @{$host{$host}} == 1 ) { $df2->printf( "%4u %s\n", $count+$i, $host ); # ($count % $skip) == 0 ? $host : "" ); } else { $df2->printf( "%4u %s:%d\n", $count+$i, $host, $i ); # ($count % $skip) == 0 ? $host : "" ); } $done{$host} = $count+$i; } $df1->printf( "%5d %4u %5d %5d %d %d", $x->[0], $count+$i, @$x[1..4] ); if ( $x->[5] ) { $df1->printf( " %9.3f %9.3f\n", $x->[5], $x->[6] ); } else { $df1->print( " \"\" \"\"\n" ); } } } } $count += @{$host{$host}}; } $df2->close(); $df1->close(); # return host count as result $count-1; } sub write_zpic_ploticus ($$\$\$\$\$) { my $zpic = shift; my $hostcount = shift; # number of hosts my ($upperx,$uppery,$xstub,$xtics) = @_; my $deftitle = default_title($dagfn,$main::min); my $title = $opts{T} || $opts{t} || $deftitle; my $pos = rindex($zpic,'.'); my $fn = ($pos >= 0 ? substr($zpic,0,$pos) : $zpic) . '.pls'; push( @main::unlink, $fn ) unless $opts{k}; print "# generating plotticus driver $fn...\n"; if ( open( OUT, ">$fn" ) ) { # keep x scaling from last diagram! warn "# writing $fn...\n" if ( $main::debug & 0x40 ); $$uppery = $hostcount / 4.0; #!! $$uppery /= 5 while ( $$uppery > 20 ); $$uppery = 2.0 if ( $$uppery < 2 ); $$upperx = 8.0 if $main::duration{total} > 14400; warn "# zpic: upperx=", $$upperx, ", uppery=", $$uppery,"\n" if ( $main::debug & 0x20 ); print OUT << "END" // // generated: $deftitle // #proc getdata file: $zpic #proc areadef rectangle 2.5 0.5 @{[sprintf("%.1f",$$upperx+1.0)]} @{[sprintf("%.1f",$$uppery)]} xautorange: datafields=1,3,4 // yscaletype: categories // ycategories: datafield=2 yrange: 0 @{[$hostcount+1]} frame: width=0.5 color=gray(0.3) title: $title titledetails: align=C style=I // size=18 // align=0,0.2 #proc xaxis ticincrement: $$xtics grid: color=rgb(1,0.9,0.8) style=1 dashscale=2 #proc xaxis label: Hosts over Time tics: yes stubs: incremental $$xstub minorticinc: $$xtics grid: color=gray(0.8) #proc yaxis // stubs: categories selflocatingstubs: file ${zpic}2 grid: color=gray(0.8) #proc legendentry sampletype: color label: Worker job details: green tag: 1 #proc legendentry sampletype: color label: Stage-in job details: skyblue tag: 2 #proc legendentry sampletype: color label: Stage-out job details: lightpurple tag: 3 #proc legendentry sampletype: color label: Replica job details: yellow2 tag: 4 #proc legendentry sampletype: color label: Interpool Xfer details: pink tag: 5 #proc legendentry sampletype: color label: Unknown job details: red tag: 0 #proc legendentry sampletype: symbol label: GridStart info details: shape=square style=outline linecolor=gray(0.4) fillcolor=gray(0.75) // colored boxes for regular jobs #proc bars select: \@5 = 0 outline: no barwidth: 0.06 tails: 0.06 horizontalbars: yes //!! segmentfields: 1 4 segmentfields: 3 4 locfield: 2 colorfield: 6 // red-framed boxes for failed jobs #proc bars select: \@5 != 0 outline: color=redorange width=1.44 barwidth: 0.06 tails: no horizontalbars: yes //!! segmentfields: 1 4 segmentfields: 3 4 locfield: 2 // colorfield: 6 color: red END ; if ( ($opts{g} & 0x02) == 0 ) { # print gridstart gray boxes print OUT << "END" // gray gridstart boxes #proc bars select: \@5 = 0 outline: color=gray(0.4) color: gray(0.75) horizontalbars: yes barwidth: 0.04 tails: no segmentfields: 7 8 locfield: 2 END ; } print OUT << "END" // execution cross #proc scatterplot select: \@5 = 0 xfield: 1 yfield: 2 symbol: shape=square style=spokes linecolor=black #proc legend format: multiline location: max+0.5 max END ; close(OUT); } else { warn "unable to generate $fn: $!\n"; } } # # start main # undef $main::done; my (%dbase,%real,%warn,$upperx,$uppery,$xstub,$xtics,$ystub,$ytics); print "# reading $logfn...\n"; my @stat = read_log($logfn,%dbase,%real,%warn); # is there anything to produce? my $sum = 0; for ( my $i=0; $i<=4; $i++ ) { $sum += $stat[$i]; } if ( $sum == 0 ) { print "# no jobs - no pictures\n"; } else { print "# generating $ypic...\n"; my @jobs = write_ypic_data($ypic,%dbase); write_ypic_ploticus($ypic,$main::min,$main::max,$#jobs, $upperx,$uppery,$xstub,$xtics,$ystub,$ytics); my @ypic = ($upperx+1,$uppery+0.2); print "# generating $zpic...\n"; $upperx += 1.0; my $hostcount = write_zpic_data($zpic,%real); write_zpic_ploticus($zpic,$hostcount,$upperx,$uppery,$xstub,$xtics); my @zpic = ($upperx+2,$uppery+0.2); # time saver if ( exists $opts{p} ) { graphics($ypic,$opts{o},$ypic[0],$ypic[1],0+@jobs); graphics($zpic,$opts{O},$zpic[0],$zpic[1],$hostcount); } } print "\nTAG COUNT MESSAGE\n"; foreach my $tag ( sort %warn ) { printf( "%3s %5d %s\n", $tag, $warn{$tag}, $main::status{$tag}->[0] ) if exists $warn{$tag}; } print "\nTAG COUNT MESSAGE\n"; for ( my $i=0; $i<=4; $i++ ) { printf( " %2d %5d %s job%s\n", $i, $stat[$i], $main::statmsg[$i], $stat[$i] == 1 ? "" : "s" ); } printf " %5d job%s total\n", $sum, $sum == 1 ? "" : "s"; print "\nSTATISTICS\n"; my $diff = $main::max - $main::min; printf( "Condor: %s .. %s = %lu s\n", unix2iso($main::min), unix2iso($main::max), $diff ); printf( "Launch: %s .. %s = %lu s\n", unix2iso($main::gridmin), unix2iso($main::gridmax), $main::gridmax - $main::gridmin ); printf( "Duration: %.3f good + %.3f bad = %.3f s, speed-up %.2f\n", @main::duration{'good','bad','total'}, $main::duration{total} / $diff );print "\n"; exit 0; pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/dag2image2.pl0000755000175000017500000001641511757531137023071 0ustar ryngerynge#!/usr/bin/env perl # # Convert DAG into image representing the workflow # use 5.006; use strict; use warnings; use Getopt::Long qw(:config bundling no_ignore_case); use File::Temp (); use File::Spec; my $image = ''; my $verbose = 0; my @relations = (); my @jobs = (); my @postjobs = (); my @nodecolors= qw(blueviolet mediumvioletred violet indigo violetred4); my $size=1; my $nodeshape = 'circle'; my $iptxnodecolor = 'skyblue'; my $optxnodecolor = 'gold'; my $registernodecolor = 'orange'; my $nodecolor = 'blueviolet'; my $internodecolor = 'limegreen'; my $cleanupnodecolor = 'red'; my $label = 'none'; my $arrowhead = 'normal'; my $arrowsize = '1.0'; my $type = 'jpeg'; my $keep = 0; sub usage { print STDERR "\n"; print STDERR "Usage: dag2image -o [options] dagfile\n"; print STDERR " dag2image --output [options] dagfile\n"; print STDERR "\n"; print STDERR "MANDATORY ARGUMENTS:\n"; print STDERR " dagfile path to the .dag file\n"; print STDERR "\n"; print STDERR "OPTIONAL ARGUMENTS:\n"; print STDERR " -o|--output \n"; print STDERR "\tWhere to put the produced graphic, default is stdout\n"; print STDERR "\tUse of this option is highly recommended.\n"; print STDERR " -l|--label \n"; print STDERR "\tDefault is none (verbose|none|info)\n"; print STDERR " -a|--arrow \n"; print STDERR "\tDefault is normal (normal|dot|invdot|open|invnormal)\n"; print STDERR " -s|--shape \n"; print STDERR "\tDefault is circle (circle|ellipse|doublecircle|point|square|traingle)\n"; print STDERR " --iptxcolor \n"; print STDERR "\tDefault is skyblue. Check graphviz documentation for more.\n"; print STDERR " --optxcolor, see iptxcolor\n"; print STDERR " --nodecolor, see iptxcolor\n"; print STDERR " --regcolor, see iptxcolor\n"; print STDERR " --intertxcolor, see iptxcolor\n"; print STDERR " -t|--type Type of output jpeg png etc, default jpeg\n"; print STDERR " --size Restrict the size 1 = 10x8, 2=17x11 , 3 = unrestricted\n"; print STDERR " --keep if specified, do not remove the dot file\n"; print STDERR "\n"; exit(1); } my $result = GetOptions ( "output|o=s" =>\$image, "label|l=s" =>\$label, "intertxcolor|x=s"=>\$internodecolor, "ahead=s" =>\$arrowhead, "shape|s=s" =>\$nodeshape, "iptxcolor=s" =>\$iptxnodecolor, "optxcolor=s" =>\$optxnodecolor, "nodecolor=s" =>\$nodecolor, "regcolor=s" =>\$registernodecolor, "verbose|v" =>\$verbose, "asize|a=f" =>\$arrowsize, "type|t=s" =>\$type, "size=i" =>\$size, "keep!" => \$keep, "help|h" =>\&usage); my $dagfile = shift || die "ERROR: The .dag file is a mandatory argument, see --help\n"; die "ERROR: $dagfile does not exist\n" unless -e $dagfile; die "ERROR: $dagfile not readable\n" unless -r _; my $tmp = $ENV{TMPDIR} || $ENV{TMP} || $ENV{TEMP} || File::Spec->tmpdir() || '/tmp'; my $dot = new File::Temp( TEMPLATE => 'dotXXXXXX', DIR => $tmp, SUFFIX => '.dot', UNLINK => (! $keep) ); my $dotfile = $dot->filename; open( DAG, "<$dagfile" ) || die "ERROR: Unable to open dag file $dagfile $!\n"; open( DOT, ">$dotfile" ) || die "ERROR: Unable to write to dotfile $dotfile $!\n"; # set up counter for statistics my %count = ( dep => 0, job => 0, post => 0, cleanup=> 0, stagein => 0, stageout => 0, register => 0, exe => 0, inter => 0 ); print DOT "digraph E {\n"; if ( $size==2 ) { print DOT " size=\"17.0,11.0\"\n ratio=auto\n"; } elsif ( $size==1 ) { print DOT " size=\"11.5,10.0\"\n ratio=auto\n"; } print DOT " node [shape=$nodeshape]\n"; print DOT " edge [arrowhead=$arrowhead, arrowsize=$arrowsize]\n"; while ( ) { next if /^\#/; # skip comments s/^\s+//; # remove leading whitespace s/[ \r\n\t]+$//; # remove trailing whitespace including CRLF if ( /^PARENT\s/i && /\sCHILD\s/i ) { s/^PARENT\s+//i; my ($parents,$children) = split /\s+CHILD\s+/i, $_, 2; foreach my $parent ( split( /\s+/, $parents ) ) { foreach my $child ( split( /\s+/, $children ) ) { # one line per link my $what = "$parent -> $child"; $relations[$count{dep}] = $what; $count{dep}++; print DOT " $what\n"; print STDERR "Adding arc $what\n" if $verbose; } } } elsif ( /^JOB\s/i ) { # special job processing my ($templabel, $tempstring) = ''; my $job = $jobs[$count{job}] = (split)[1]; if ( $job =~ /^rc_tx/ || $job =~ /^stage_in/ ) { $tempstring = " $job [color=$iptxnodecolor, style=filled"; if ( $label =~ /info/i ) { $templabel="IP-TX-$count{stagein}"; } elsif ( $label =~ /verbose/i ) { $templabel="$job"; } $count{stagein}++; } elsif ( $job =~ /^new_rc_tx/ || $job =~ /^stage_out/ ) { $tempstring = " $job [color=$optxnodecolor, style=filled"; if ( $label =~ /info/i ) { $templabel="OP-TX-$count{stageout}"; } elsif ( $label =~ /verbose/i ) { $templabel="$job"; } $count{stageout}++; } elsif ( $job =~ /^new_rc_register/ || $job =~ /^register/ ) { $tempstring = " $job [color=$registernodecolor, style=filled"; if ( $label =~ /info/i ) { $templabel="RLS_REG-$count{register}"; } elsif ( $label =~ /verbose/i ) { $templabel="$job"; } $count{register}++; } elsif ( $job =~ /^inter_tx/ ) { $tempstring = " $job [color=$internodecolor, style=filled"; if ( $label =~ /info/i ) { $templabel="INTER-TX-$count{inter}"; } elsif ( $label =~ /verbose/i ) { $templabel="$job"; } $count{inter}++; } elsif ( $job =~ /^clean_up/ ) { $tempstring = " $job [color=$cleanupnodecolor, style=filled"; if ( $label =~ /info/i ) { $templabel="CLEANUP-$count{cleanup}"; } elsif ( $label =~ /verbose/i ) { $templabel="$job"; } $count{cleanup}++; } else { $tempstring = " $job [color=$nodecolor, style=filled"; if ( $label =~ /info/i ) { $templabel="Exec-$count{exe}"; } elsif ( $label =~ /verbose/i ) { $templabel="$job"; } $count{exe}++; } print DOT $tempstring, ', label="', $templabel, "\"]\n"; print STDERR "Adding node $job\n" if $verbose; $count{job}++; } elsif ( /^POSTJOB\s/i ) { $postjobs[$count{post}]=$_; $count{post}++; } } print STDERR "$count{job} jobs, $count{dep} dependencies, $count{post} post scripts\n"; close DAG; print DOT "}\n"; close DOT; print STDERR "Written dot file $dotfile\n"; print STDERR "Generating Image...\n"; my $command="dot -Gconcentrate"; if ( $type eq 'eps' ) { # eps is not supported by dot. # This is evil trickery to generate LaTeX figures ;-P $command .= " -Tps2 $dotfile"; $command .= ' | perl -pe \'$_="%!PS-Adobe-3.0 EPSF-3.0\n" if ( /^%!PS-Adobe-/ )\''; $command .= " > $image" if $image; } else { # the normal path $command .= " -o$image" if $image; $command .= " -T$type $dotfile"; } $result=''; if ( $image ) { $result = `$command`; } else { system($command); } my $rc = $?; if ( ($rc & 127) > 0 ) { print STDERR $result if $image; die "ERROR: Died on signal", ($rc & 127), "\n"; } elsif ( ($rc >> 8) > 0 ) { print STDERR $result if $image; die "ERROR: Unsuccessful execution: ", $rc >> 8, "\n"; } else { print STDERR "Successful graphics generation\n"; exit 0; } pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/fix-dot-eps.pl0000755000175000017500000000115011757531137023314 0ustar ryngerynge#!/usr/bin/env perl # # This program fixes the ps2 type output of graphviz's dot and # tries to make it well-behaved eps # use 5.006; use strict; my $head = ; # ignore original header print "%!PS-Adobe-3.0 EPSF-2.0\n"; # write fake EPSF header my (@bb,$bb); while ( <> ) { if ( /\%\%PageBoundingBox: ([0-9.]+) ([0-9.]+) ([0-9.]+) ([0-9.]+)/ ) { @bb = ( $1, $2, $3, $4 ); $bb = "$1 $2 $3 $4"; } elsif ( m{/PageSize} ) { s{/PageSize \[\S+ [0-9.]+\]}{/PageSize \[$bb[2] $bb[3]\]}; } elsif ( m{^\S+ \S+ \S+ \S+ boxprim clip newpath}o ) { $_ = "$bb boxprim clip newpath\n"; } print ; } pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/Common.pm0000644000175000017500000001340511757531137022411 0ustar ryngerynge#!/usr/bin/env perl # # common functionality used in the various log processors # # $Id: Common.pm 2558 2010-10-12 18:47:53Z voeckler $ # package Common; use 5.006; use strict; use subs qw(log); # replace Perl's math log with logging use Exporter; our @ISA = qw(Exporter); # create the function prototypes for type coercion sub log; # { } sub unix2iso(;$); # { } sub iso2unix($); # { } sub find_exec($;@); # { } sub read_submit_file($); # { } sub default_title($;$); # { } sub ploticus_version($); # { } # create the export lists our $VERSION = '0.1'; our %EXPORT_TAGS = (); our @EXPORT = qw(log unix2iso iso2unix find_exec read_submit_file default_title ploticus_version); our @EXPORT_OK = qw($VERSION); $VERSION=$1 if ( '$Revision: 2558 $' =~ /Revision:\s+([0-9.]+)/o ); $__PACKAGE__::prefix = undef; # # --- start ----------------------------------------------------- # use Carp; use POSIX qw(strftime modf); use Time::Local qw(timegm timelocal); use Time::HiRes qw(gettimeofday); use File::Basename; use File::Spec; sub utc2iso(;$) { # purpose: converts a UTC timestamp into an ISO timestamp for logging. # paramtr: $now (opt. IN): timestamp to convert, or current time # returns: ISO 8601 dense-formatted string representing the timestamp. my (@now); if ( @_ > 0 ) { @now = reverse POSIX::modf( shift() ); $now[1] *= 1E6; } else { # no argument @now = Time::HiRes::gettimeofday(); } my @tm = localtime($now[0]); sprintf( "%4d%02d%02dT%02d%02d%02d.%03d", $tm[5]+1900, $tm[4]+1, $tm[3], $tm[2], $tm[1], $tm[0], $now[1] / 1000 ); } sub log { # purpose: print whatever onto log stream STDERR with timestamp prefix # This log is a kind of debug log, containing lotsa infos # paramtr: any number of more parameters # returns: - my $prefix = unix2iso() . sprintf( ' %7s ', "[$$]" ); print STDERR $prefix, @_, "\n"; # yes, permit buffering } sub unix2iso (;$) { # purpose: Convert a UTC timestamp into ISO 8601 notation # paramtr: $stamp (opt. IN): UTC seconds; defaults to current time # returns: a ISO 8601 compliant timestamp string # my $stamp = shift || time(); my $offset = int($stamp) - timelocal( (gmtime($stamp))[0..5] ); my @stamp = localtime($stamp); my $result = sprintf( "%04d-%02d-%02dT%02d:%02d:%02d", $stamp[5]+1900, $stamp[4]+1, $stamp[3], $stamp[2], $stamp[1], $stamp[0] ); $result .= ( ( $offset >= 0 ) ? '+' : '-' ); $offset = abs($offset); $result .= sprintf( "%02d:%02d", $offset / 3600, ($offset % 3600) / 60 ); } sub iso2unix ($) { # purpose: Convert a ISO 8601 timestamp into UTC seconds since epoch # paramtr: $stamp (IN): a ISO 8601 compliant time to convert # returns: UTC seconds since epoch # local $_ = shift; die unless /(\d{4})-?(\d{2})-?(\d{2})T(\d{2}):?(\d{2}):?(\d{2})/; my $stamp = timegm($6,$5,$4,$3,$2-1,$1-1900); die unless /\.(\d+)([-+])(\d{2}):?(\d{2})$/; my ($fraction,$pm,$offset) = ("0.$1",$2,$3*3600+$4*60); $stamp += $fraction; $stamp += (( $pm eq '-' ) ? $offset : -$offset); } sub find_exec($;@) { # purpose: determine location of given binary in $PATH # paramtr: $program (IN): basename of application to look for # @additional (opt. IN): additional paths to check (e.g. ".") # returns: fully qualified path to binary, undef if not found # my $program = shift; local($_); foreach ( (File::Spec->path, @_) ) { my $fs = File::Spec->catfile( $_, $program ); return $fs if -x $fs; } undef; } sub read_submit_file($) { # purpose: read the submit file and extracts commands as k-v-pairs # paramtr: $subfn (IN): path to submit file # globals: $main::debug (IN): If set, log the open read etc. # returns: hash representation of Condor submit file, with lc commands # my $subfn = shift; my %result = (); log( "reading sub file $subfn" ) if $main::debug > 1; local(*SUB); my ($k,$v); if ( open( SUB, "<$subfn" ) ) { while ( ) { next if substr($_,0,1) eq '#'; s/[\r\n\t ]+$//; s/^\s*//; next unless length($_); ($k,$v) = split /\s*=\s*/, $_, 2; $v=substr($v,1,-1) if ( substr($v,0,1) eq '"' or substr($v,0,1) eq "'" ); $k = lc($k) if ( $k =~ /^[a-z]/i ); $result{$k} = $v; } close SUB; } else { warn "Warning: open $subfn: $!, ignoring\n"; } %result; } sub default_title($;$) { # purpose: Generate a default title string for a diagram # paramtr: $dagfn (IN): some filename to base title upon # $start (opt. IN): a UTC seconds stamp, default DAG file mtime # returns: title string my $dagfn = shift; my @stat = stat( $dagfn ); my $start = shift; # may be undef if ( @stat == 0 ) { # use current time if stat failed $start = time() unless defined $start; } else { # use DAG file mtime $start = $stat[9] unless defined $start; } my $result = POSIX::strftime( '%Y-%m-%d %H:%M%z', localtime($start) ); $result .= ' by ' . scalar( getpwuid($stat[4]) ); my @dir = split /\//, dirname(File::Spec->rel2abs($dagfn)); if ( @dir >= 2 ) { $result .= ' [' . join('/',@dir[$#dir-1,$#dir]) . ']'; } elsif ( @dir == 1 ) { $result .= ' [' . $dir[$#dir] . ']'; } $result; } sub ploticus_version($) { # purpose: determine the version number of your ploticus installation # paramtr: $app (IN): Full path to ploticus # returns: a version number as major * 1000 + minor, undef on failures # my $exe = shift; my $result = undef; local(*P); if ( open( P, "$exe -version|" ) ) { if ( defined ($_ =

) ) { $result = ($1*1000 + $2) if /ploticus\s+(\d+)\.(\d+)/; } close P; } $result; } # must 1; __END__ pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/dax2dot0000777000175000017500000000000011757531137023354 2dag2dotustar ryngeryngepegasus-wms_4.0.1+dfsg/share/pegasus/visualize/dag2dot0000755000175000017500000002056511757531137022102 0ustar ryngerynge#!/usr/bin/env python import sys import os import xml.sax.handler import xml.sax from optparse import OptionParser COLORS = [ "#1b9e77", "#d95f02", "#7570b3", "#e7298a", "#66a61e", "#e6ab02", "#a6761d", "#666666", "#8dd3c7", "#bebada", "#fb8072", "#80b1d3", "#fdb462", "#b3de69", "#fccde5", "#d9d9d9", "#bc80bd", "#ccebc5", "#ffed6f", "#ffffb3" ] class Job: def __init__(self): self.id = None self.xform = None self.label = None self.level = 0 self.parents = [] self.children = [] class DAXHandler(xml.sax.handler.ContentHandler): """ This is a DAX file parser """ def __init__(self): self.dag = {} def startElement(self, name, attrs): if name in ["job","dax","dag"]: job = Job() job.id = attrs.get("id") if job.id is None: raise Exception("Invalid DAX: attribute 'id' missing") if name == "job": job.xform = attrs.get("name") if job.xform is None: raise Exception("Invalid DAX: job name missing for job %s" % job.id) ns = attrs.get("namespace") version = attrs.get("version") if ns is not None: job.xform = ns + "::" + job.xform if version is not None: job.xform = job.xform + ":" + version elif name == "dax": job.xform = "pegasus::dax" else: job.xform = "pegasus::dag" job.label = attrs.get("node-label") if job.label is None: job.label = attrs.get("file") if job.label is None: job.label = job.xform self.dag[job.id] = job elif name == "child": self.lastchild = attrs.get("ref") elif name == "parent": if self.lastchild is None: raise Exception("Invalid DAX: outside ") pid = attrs.get("ref") child = self.dag[self.lastchild] parent = self.dag[pid] child.parents.append(parent) parent.children.append(child) def endElement(self, name): if name == "child": self.lastchild = None def parse_daxfile(fname): """ Parse DAG from a Pegasus DAX file. """ handler = DAXHandler() parser = xml.sax.make_parser() parser.setContentHandler(handler) f = open(fname,"r") parser.parse(f) f.close() return handler.dag def parse_xform_name(path): """ Parse the transformation name from a submit script. Usually the transformation is in a special classad called '+pegasus_wf_xformation'. For special pegasus jobs (create_dir, etc.) set the name manually. """ # Handle special cases fname = os.path.basename(path) if fname.startswith("create_dir_"): return "pegasus::create_dir" if fname.startswith("stage_in_"): return "pegasus::stage_in" if fname.startswith("stage_out_"): return "pegasus::stage_out" if fname.startswith("stage_inter_"): return "pegasus::stage_inter" if fname.startswith("stage_worker_"): return "pegasus::stage_worker" if fname.startswith("register_"): return "pegasus::register" if fname.startswith("clean_up_"): return "pegasus::clean_up" # Get it from the submit file if os.path.isfile(path): f = open(path,'r') for line in f.readlines(): if '+pegasus_wf_xformation' in line: return line.split('"')[1] return None def parse_dagfile(fname): """ Parse a DAG from a dagfile. """ dagdir = os.path.dirname(fname) dag = {} lastchild = None f = open(fname,'r') for line in f.readlines(): line = line.strip() if line.startswith("JOB"): rec = line.split() job = Job() if len(rec) < 3: raise Exception("Invalid line:",line) job.id = rec[1] # Job id subfile = rec[2] # submit script if not os.path.isabs(subfile): subfile = os.path.join(dagdir,subfile) job.xform = parse_xform_name(subfile) if job.xform is None: # Otherwise just use the ID job.xform = job.id job.label = job.id dag[job.id] = job elif line.startswith("PARENT"): rec = line.split() if len(rec) < 4: raise Exception("Invalid line:",line) p = dag[rec[1]] c = dag[rec[3]] p.children.append(c) c.parents.append(p) f.close() return dag def remove_xforms(dag, xforms): """ Remove transformations in the DAG by name """ if len(xforms) == 0: return for id in dag.keys(): job = dag[id] if job.xform in xforms: print "Removing %s" % job.id for p in job.parents: p.children.remove(job) for c in job.children: c.parents.remove(job) del dag[id] def inv_reachable(a, b): """ Is a reachable from b using reverse edges? Reverse edges are used because it is a little more efficient than forward edges assuming that a node is more likely to have children than parents. Does a BFS using the child->parent edges instead of the parent->child edges. """ fifo = [a] while len(fifo) > 0: n = fifo.pop() for p in n.parents: if p == b: return True fifo.append(p) return False def simplify(dag): """ Simplify a DAG by removing redundant edges. Redundant edges are edges that go from a grandparent to a grandchild. In other words, they are edges that, if removed, do not change the dependencies in the workflow. We want to remove these because they clutter up the diagram and make it hard to read. """ # Find roots roots = [] for id in dag: j = dag[id] if len(j.parents) == 0: roots.append(j) # Assign surrogate root root = Job() root.level = 0 for j in roots: root.children.append(j) # Label all levels of the workflow (BFS) fifo = [root] while len(fifo) > 0: n = fifo.pop() for c in n.children: fifo.append(c) c.level = max(c.level, n.level + 1) # Eliminate any redundant edges (BFS) fifo = [root] while len(fifo) > 0: n = fifo.pop() children = n.children[:] for c in children: fifo.append(c) dist = c.level - n.level if dist > 1: c.parents.remove(n) if inv_reachable(c, n): sys.stderr.write( "Removing redunant edge: %s -> %s\n" % (n.id, c.id)) n.children.remove(c) else: c.parents.append(n) return dag def emit_dot(dag, use_xforms=False, outfile="/dev/stdout", width=8.0, height=10.0): """ Write a DOT-formatted diagram. use_xforms: Use transformation names instead of job names outfile: The file name to write the diagam out to. """ next_color = 0 # Keep track of next color xforms = {} # Keep track of transformation names to assign colors out = open(outfile,'w') out.write("""digraph dag { size="%s,%s" ratio=fill node [shape=ellipse, style=filled] edge [arrowhead=normal, arrowsize=1.0] \n""" % (width,height)) for id in dag: j = dag[id] if use_xforms: label = j.xform else: label = j.label if j.xform not in xforms: xforms[j.xform] = next_color next_color += 1 # Just in case we run out of colors next_color = min(len(COLORS)-1, next_color) color = xforms[j.xform] out.write('\t"%s" [color="%s",label="%s"]\n' % (j.id,COLORS[color],label)) out.write('\n') for id in dag: j = dag[id] for c in j.children: out.write('\t"%s" -> "%s"\n' % (j.id,c.id)) out.write("}\n") out.close() def main(): usage = "%prog [options] DAGFILE" description = """Parses DAGFILE and generates a DOT-formatted graphical representation of the DAG. DAGFILE can be a Condor DAGMan file, or a Pegasus DAX file.""" parser = OptionParser(usage=usage,description=description) parser.add_option("-s", "--nosimplify", action="store_false", dest="simplify", default=True, help="Do not simplify the graph by removing redundant edges") parser.add_option("-x", "--xforms", action="store_true", dest="xforms", default=False, help="Use transformation names as labels instead of the default label") parser.add_option("-o", "--output", action="store", dest="outfile", metavar="FILE", default="/dev/stdout", help="Write output to FILE [default: stdout]") parser.add_option("-r", "--remove", action="append", dest="remove", metavar="XFORM", default=[], help="Remove jobs from the workflow by transformation name") parser.add_option("-W", "--width", action="store", dest="width", default="8.0", help="Width of the digraph [default: 8.0]") parser.add_option("-H", "--height", action="store", dest="height", default="10.0", help="Height of the digraph [default: 10.0]") (options, args) = parser.parse_args() if len(args) < 1: parser.error("Please specify DAGFILE") if len(args) > 1: parser.error("Invalid argument") dagfile = args[0] if dagfile.endswith(".dag"): dag = parse_dagfile(dagfile) else: dag = parse_daxfile(dagfile) remove_xforms(dag, options.remove) if options.simplify: dag = simplify(dag) emit_dot(dag, options.xforms, options.outfile, options.width, options.height) if __name__ == '__main__': main() pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/ant2dot.sh0000755000175000017500000000023711757531137022534 0ustar ryngerynge#!/bin/sh # # Constructs the most common use case # dir=`dirname $0`; antfile=$1; perl $dir/ant2dot.pl $antfile > build.dot && dot -Tpng build.dot > build.png pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/show-id0000755000175000017500000006346211757531137022133 0ustar ryngerynge#!/usr/bin/env perl # # creates one timeline diagram, job over time # use 5.006; use strict; use subs qw(log); use File::Spec; use File::Basename; use File::Temp qw(tempfile); use Time::Local; use Time::HiRes qw(); use POSIX qw(floor ceil); use Data::Dumper; use Getopt::Long qw(:config bundling no_ignore_case); use lib File::Basename::dirname($0); use Common; $main::version = 'unknown'; $_ = '$Revision: 3322 $'; # don't edit $main::version = $1 if /Revision:\s+([0-9.]+)/o; # globals $main::debug = 1; # debug mode $main::duration = 0.0; # duration of workflow $main::min = $main::kmin = 1E20; $main::max = $main::kmax = 0; $main::adjust = 0; # adjustment for non-local timestamp sub usage { my $base = basename($0,'.pl'); print "Usage: $base [options] dot-dag-file\n\n"; print "Mandatory arguments:\n"; print " dot-dat-file path to the DAGMan input file.\n"; print "\n"; print "Optional arguments:\n"; print " -l|--log fn Force Condor common log file, ignore submit's log entry.\n"; print " -j|--jobstate In case you lost your common log, use jobstate.log.\n"; print " -?|--help print usage information and exit.\n"; print " -d|--debug increase debug level as often as specified.\n"; print " -V|--version print version information and exit.\n"; print " -T|--title s use this title string on the diagram.\n"; print " -a|--adjust s adjust non-local time stamp by this many seconds.\n"; print " -w|--width f set paper width to the size f in inches.\n"; print " --duration f set the diagram's duration to f (instead of setting width).\n"; print " -h|--height f set paper height to size f in inches.\n"; print " -l|--limit re limit jobs only to those matching the regexp.\n"; print " --special re for all jobs matching re, also protocol data.\n"; print " --pass k[=v] pass an option to ploticus.\n"; print " -s|-sort i 0: use topological sort order (default).\n"; print " sort by {1:SUBMIT,2:EXECUTE,3:TERMINATE} time.\n"; print " --eps fn Put the resulting EPS picture into the indicated file\n"; print " --png fn Put the resulting PNG picture into the indicated file\n"; print "\n"; exit(1); } sub max { if ( @_ == 2 ) { $_[0] < $_[1] ? $_[1] : $_[0]; } else { my @x = sort { $a <=> $b } @_; $x[$#x]; } } sub find_max($) { my $x = shift; max( $x->{'001'}, $x->{'017'} || $x->{'027'}, $x->{'000'}, $x->{'005'} || $x->{'009'} ); } sub min { if ( @_ == 2 ) { $_[0] > $_[1] ? $_[1] : $_[0]; } else { my @x = sort { $a <=> $b } @_; for ( my $i=0; $i<@x; ++$i ) { return $x[$i] if $x[$i]; } } } sub find_min($) { my $x = shift; min( $x->{'001'}, $x->{'017'} || $x->{'027'}, $x->{'000'}, $x->{'005'} || $x->{'009'} ); } sub mytime ($$) { # purpose: Convert Condor's log file timestamp into UTC seconds # paramtr: $date (IN): date portion # $time (IN): time portion # globals: $main::adjust (IN): difference by which to adjust non-local time # returns: UTC seconds since epoch # warning: Since the log file time stamp is not zoned, this may be off! # my ($month,$day) = split(/\//,shift()); my ($h,$m,$s) = split(/:/,shift()); # year fix $main::year = (localtime())[5] unless defined $main::year; my $r = timelocal($s,$m,$h,$day,$month-1,$main::year); $r += $main::adjust if $main::adjust; $main::min = $r if $r < $main::min; $main::max = $r if $r > $main::max; $r; } sub idsort { # purpose: sort by ID rather than jobname if ( $a =~/ID0*(\d+)/ ) { my $aa = $1; if ( $b =~ /ID0*(\d+)/ ) { my $bb = $1; $aa <=> $bb; } else { $a cmp $b; } } else { $a cmp $b; } } sub slurp_dagfile($;$) { # purpose: read all jobs from a dag file, and determine submit file names etc. # paramtr: $dagfn (IN): location of the dag file # $force (opt. IN): Overwrite for log file location # returns: [0]: name of common log file, extracted from submit files # [1]: hashref { dagjobid => $subfn } # [2]: hashref { dagjobid => $outfn } # my $dagfn = shift; my $force = shift; # may be undef my $dagdir = File::Spec->rel2abs( dirname($dagfn) ); my $count = 0; my %result = (); my %output = (); my $logfn = $force; # may still be undef # protect the DAG variable, and safely open DAG file local(*DAG); open( DAG, "<$dagfn" ) || die "ERROR: open $dagfn: $!\n"; # slurp all jobs from DAG file, and derive submit filenames log( "reading dag file $dagfn" ) if $main::debug; my ($subfn,%submit,@x,$mylog,$myout,$myapp,%parent,%child); while ( ) { chomp; if ( /^\s*job/i ) { $count++; @x = split; $result{$x[1]} = $subfn = ( index($x[2],0,1) eq '/' ? $x[2] : File::Spec->catfile( $dagdir, $x[2] ) ); # check Condor common log sanity %submit = read_submit_file( $subfn ); ($mylog,$myout,$myapp) = map { substr($_,0,1) eq '/' ? $_ : File::Spec->catfile( $dagdir, $_ ) } @submit{'log','output','executable'}; unless ( defined $force ) { # no pre-specified log file location if ( defined $mylog ) { if ( defined $logfn ) { die( "ERROR: Mis-match in Condor common log:\n", "\$mylog=$mylog\n\$logfn=$logfn\n" ) unless ( $mylog eq $logfn ); } else { $logfn = $mylog; } } else { warn "Warning: $subfn may be broken, ignoring\n"; } } # remember output file, if we are in kickstart mode if ( defined $myout && defined $myapp && $myapp =~ /(kickstart|seqexec)/ ) { $output{$x[1]} = $myout; } } elsif ( /^\s*parent/i ) { s/^\s*parent\s+//i; my ($p,$c) = split /\s+child\s+/i; my @p = split /\s+/, $p; my @c = split /\s+/, $c; foreach $p ( @p ) { foreach $c ( @c ) { $parent{$p}{$c} = 1; $child{$c}{$p} = 1; } } } } # done close DAG; log( "found $count job records" ) if $main::debug; # determine start nodes (no incoming edges) my %start = map { $_ => 1 } keys %result; foreach my $p ( keys %parent ) { foreach my $c ( keys %{$parent{$p}} ) { delete $start{$c}; } } # compute topological sort order my %topo = (); my @topo = (); my @q = sort keys %start; while ( @q ) { my $n = shift(@q); push( @topo, $n ) unless exists $topo{$n}; $topo{$n} = 1; foreach my $x ( sort idsort keys %{$parent{$n}} ) { delete $parent{$n}{$x}; delete $child{$x}{$n}; push( @q, $x ) if ( keys %{$child{$x}} == 0 ); } } ($logfn,\%result,\%output,@topo); } sub slurp_logfile($) { # purpose: read all event records from the Condor common log # paramtr: $logfn (IN): location of the condor common log # returns: # my $logfn = shift; my %result = (); # collects job tracks my %condor = (); # maps condor jobs to dagman ids my $count = 0; # use paragraph mode local $/ = "\n...\n"; local(*LOG); open( LOG, "<$logfn" ) || die "ERROR: open $logfn: $!\n"; # slurp one multi-line event at a time log( "reading log file $logfn" ) if $main::debug; my ($tag,$condorjob,$tds); while ( ) { if ( /^(\d{3})\s+\((\d+)[0-9.]+\)\s+([0-9\/]+)\s+([0-9:]+)/ ) { ($tag,$condorjob,$tds) = ($1,$2,mytime($3,$4)); $count++; if ( $tag eq '000' ) { # SUBMIT event if ( /DAG Node: ([^\n]+)/ ) { $condor{$condorjob} = $1; push ( @{$result{$1}}, { '000' => $tds } ); # open record } else { warn "# SUBMIT event without DAG node translation\n"; } } elsif ( exists $condor{$condorjob} ) { my $n = @{$result{$condor{$condorjob}}} - 1; if ( $tag eq '001' ) { # EXECUTE event $result{$condor{$condorjob}}->[$n]->{$tag} = $tds; } elsif ( $tag eq '005' ) { # FINISH event $result{$condor{$condorjob}}->[$n]->{$tag} = $tds; } elsif ( $tag eq '009' ) { # JOB_ABORT event $result{$condor{$condorjob}}->[$n]->{$tag} = $tds; } elsif ( $tag eq '012' || $tag eq '013' ) { # JOB_HELD and JOB_RELEASED event $result{$condor{$condorjob}}->[$n]->{$tag} = $tds; } elsif ( $tag eq '016' ) { # POST_SCRIPT event $result{$condor{$condorjob}}->[$n]->{$tag} = $tds; } elsif ( $tag eq '017' || $tag eq '027' ) { # GLOBUS_SUBMIT event, GRID_SUBMIT event $result{$condor{$condorjob}}->[$n]->{$tag} = $tds; } else { warn "# Skipping unknown event with tag $tag\n"; } } else { warn "# Cannot map Condor job $condorjob ($tag) to a DAG job id\n"; } } } # done close LOG; log( "found $count event records" ) if $main::debug; %result; } my %submit_event = ( SUBMIT => '000', EXECUTE => '001', EXECUTABLE_ERROR => '002', CHECKPOINTED => '003', JOB_EVICTED => '004', JOB_TERMINATED => '005', IMAGE_SIZE => '006', SHADOW_EXCEPTION => '007', GENERIC => '008', JOB_ABORTED => '009', JOB_SUSPENDED => '010', JOB_UNSUSPENDED => '011', JOB_HELD => '012', JOB_RELEASED => '013', NODE_EXECUTE => '014', NODE_TERMINATED => '015', POST_SCRIPT_TERMINATED => '016', GLOBUS_SUBMIT => '017', GLOBUS_SUBMIT_FAILED => '018', GLOBUS_RESOURCE_UP => '019', GLOBUS_RESOURCE_DOWN => '020', REMOTE_ERROR => '021', JOB_DISCONNECTED => '022', JOB_RECONNECTED => '023', JOB_RECONNECT_FAILED => '024', GRID_RESOURCE_UP => '025', GRID_RESOURCE_DOWN => '026', GRID_SUBMIT => '027', JOB_AD_INFORMATION => '028', JOB_STATUS_UNKNOWN => '029', JOB_STATUS_KNOWN => '030', JOB_STAGE_IN => '031', JOB_STAGE_OUT => '032' ); sub slurp_jobstate($) { # purpose: read all event records from the jobstate.log file # paramtr: $jsfn (IN): location of the jobstate.log file # globals: $main::min (IO): update smallest state timestamp # $main::max (IO): update largest state timestamp # $main::year (OUT): updated (old common log didn't have year) # returns: {jobid} -> [ re-run# ] -> { submit event } => time stamp # my $fn = shift; my %result = (); # collects job tracks my %good = map { $_ => 1 } qw(SUBMIT EXECUTE FINISH JOB_ABORTED JOB_HELD JOB_RELEASED POST_SCRIPT JOB_TERMINATED GLOBUS_SUBMIT GRID_SUBMIT); my $count = 0; local(*LOG); open( LOG, "<$fn" ) || die "ERROR: open $fn: $!\n"; # slurp one line (event) at a time log( "reading log file $fn" ) if $main::debug; my ($tag,$job,$condorjob,$tds,@x); while ( ) { chomp ; @x = split; ($tds,$job,$tag,$condorjob) = @x; # mytime $tds += $main::adjust if $main::adjust; unless ( /MONITORD_/ ) { # exclude monitord messages - it may run after the fact! $main::min = $tds if $tds < $main::min; $main::max = $tds if $tds > $main::max; } (undef,undef,undef,undef,undef,$main::year) = localtime($tds); next if $tag eq '***'; $count++; my $n = exists $result{$job} ? @{$result{$job}}-1 : 0; if ( exists $good{$tag} ) { if ( $submit_event{$tag} == 0 && exists $result{$job}->[$n]->{ $submit_event{$tag} } ) { # start new cycle with new SUBMIT event (focus on retries) # FIXME: This ignores other problems like RECONNECT_FAILED ++$n; } $result{$job}->[$n]->{ $submit_event{$tag} } = $tds; } else { warn( "# Skipping unknown event $tag (", $submit_event{$tag}, ")\n" ) if exists $submit_event{$tag}; } } # done close LOG; log( "found $count event records" ) if $main::debug; log( "$main::max - $main::min = ", $main::max-$main::min ) if $main::debug > 2; %result; } sub read_kickstart($;$) { # purpose: determine duration from kickstart output # paramtr: $fn (IN): location of kickstart file # returns: ( [ kmin, kmax, duration ] ) timestamps, or empty vector # my $fn = shift; my $match = shift || 0; my @result = (); local(*KS); my @backup = sort glob( "$fn.???" ); @backup = ( $fn ) if @backup == 0; foreach my $kickfn ( @backup ) { log( "reading kickstart $kickfn" ) if ( $main::debug > 1 || $match ); if ( open( KS, "<$kickfn" ) ) { my @x = (); my ($kss,$ksf,$d); while ( ) { if ( /^\s* $kss; $main::kmax = $ksf if $main::kmax < $ksf; push( @x, [ $kss, $ksf, $d, 0 ] ); } elsif ( /^\s+/ ) { $x[$#x][3] |= ($1 != 0); } } close KS; log( "found ", @x+0, " invocation records" ) if $main::debug > 1; if ( @x > 0 ) { foreach my $x ( @x ) { push( @result, [ @{$x} ] ) if ( defined $x->[0] && $x->[0] > 0 ); } } } else { warn "Warning: open $kickfn: $!, ignoring\n"; } } log( "found ", @result+0, " total records" ) if $match; @result; } # global variables my $logfn; # Condor common log file overwerite my $adjustment = 0; # time zone adjustment (@#~! Condor) my $nounlink = 0; # remove temporary files later my ($title,$special,$width,$height,@pass,$limit); my ($minimum,$maximum) = ($main::kmin,$main::kmax); my $duration = 0; my $sort = 0; my ($jobstate,$outeps,$outpng); GetOptions( 'help|?' => \&usage, 'debug|d+' => \$main::debug, 'log|l=s' => \$logfn, 'png=s' => \$outpng, 'eps=s' => \$outeps, 'title|T=s' => \$title, 'jobstate|j=s' => \$jobstate, 'version|V' => sub { print "$main::version\n"; exit(0); }, 'adjust|a=i' => \$main::adjust, 'special=s' => \$special, 'width|w=f' => \$width, 'height|h=f' => \$height, 'duration=f' => \$duration, 'limit|l=s' => \$limit, 'pass=s' => \@pass, 'keep|k!' => \$nounlink, 'sort|s=i' => \$sort ); BEGIN { $main::start = Time::HiRes::time() } END { log("done after ", sprintf('%.3f s', Time::HiRes::time() - $main::start ) ) if $main::debug } # determine dag filename to find submit files to find kickstart records my $dagfn = shift || die "ERROR: Need the name of the DAG file\n"; die "ERROR: No DAGMan file $dagfn\n" unless -r $dagfn; my $dagdir = File::Spec->rel2abs( dirname($dagfn) ); # sanity check: find apps first, and fail early my %app = (); foreach my $app ( qw(ploticus convert) ) { $app{$app} = find_exec($app) || die "ERROR: Unable to locate $app\n"; } # determine all submit file locations my ($submit,$output,@sort); ($logfn,$submit,$output,@sort) = slurp_dagfile( $dagfn, $logfn ); die "ERROR: Unable to derives the name of a common Condor logfile\n" unless ( defined $logfn && length($logfn) ); if ( defined $jobstate ) { die "ERROR: No jobstate log $jobstate\n" unless -r $jobstate; } else { die "ERROR: No Condor common log $logfn\n" unless -r $logfn; } die "ERROR: Empty Condor common log $logfn\n" unless -s _; my %events; if ( defined $jobstate ) { %events = slurp_jobstate($jobstate); } else { %events = slurp_logfile($logfn); } # data file generation my $tmp = $ENV{TMPDIR} || $ENV{TMP} || File::Spec->tmpdir() || '/tmp'; my $data = new File::Temp( TEMPLATE => 'show-XXXXXX', DIR => $tmp, SUFFIX => '.dat', UNLINK => (! $nounlink) ); die "FATAL: Unable to create transient data file in $tmp" unless defined $data; log( "writing regular data into ", $data->filename ) if $main::debug; my $kick = new File::Temp( TEMPLATE => 'show-XXXXXX', DIR => $tmp, SUFFIX => '.dat', UNLINK => (! $nounlink) ); die "FATAL: Unable to create transient kick file in $tmp" unless defined $kick; log( "writing kickstart data into ", $kick->filename ) if $main::debug; # new ways to sort job order sub figure($$\%) { my $id = shift; my $sort = shift; my $eref = shift; if ( $sort == 1 ) { min( map { $_->{'000'} } grep { defined $_ && exists $_->{'000'} } @{$eref->{$id}} ); } elsif ( $sort == 2 ) { min( map { $_->{'001'} } grep { defined $_ && exists $_->{'001'} } @{$eref->{$id}} ); } elsif ( $sort == 3 ) { max( map { $_->{'005'} } grep { defined $_ && exists $_->{'005'} } @{$eref->{$id}} ); } else { undef; } } # use different sort order, if requested (default: topo sort) if ( $sort ) { @sort = map { $_->[0] } sort { $b->[1] <=> $a->[1] } map { [ $_ => figure($_,$sort,%events) ] } keys %events; } my $total = 0.0; my ($n,$r1,$r2) = (1,0,0); foreach my $dagjob ( @sort ) { my $id = basename( $submit->{$dagjob}, '.sub' ); next unless defined $events{$dagjob}; if ( defined $limit && $id !~ /$limit/o ) { log( "Job $id does not match /$limit/, skipping" ); next; } my $maxindex = @{$events{$dagjob}} - 1; # my $stamp = find_min( $events{$dagjob}->[0] ); # $minimum = $stamp if $minimum > $stamp; # $stamp = find_max( $events{$dagjob}->[$maxindex] ); # $maximum = $stamp if $maximum < $stamp; $minimum = $main::min; $maximum = $main::max; for ( my $cycle=0; $cycle <= $maxindex; ++$cycle ) { my $x = $events{$dagjob}->[$cycle]; my $exe = $x->{'001'}; my $grid = $x->{'017'} || $x->{'027'}; # Globus SUBMIT event my $submit = $x->{'000'}; # Condor SUBMIT event my $finish = $x->{'005'} || $x->{'009'}; my $match = ( defined $special ? $id =~ /$special/ : 0 ); my @kick = (); if ( $cycle == $maxindex ) { my $kickfn = $output->{$dagjob}; @kick = read_kickstart( $kickfn, $match ) if defined $kickfn; } unless ( defined $grid && $grid >= $minimum && $grid <= $maximum ) { # I hope *this* change does not break grid displays log( "adjusting grid start to Condor submit of $dagjob" ); $grid=$submit; # no-grid submit -> use Condor submit? } unless ( defined $exe && $exe >= $minimum && $exe <= $maximum ) { log( "adjusting executable to minimum $dagjob" ); $exe=$minimum; } # $id =~ s/-/_/g; if ( defined $submit && $submit >= $minimum && defined $finish && $finish <= $maximum ) { printf $data "\"%s\" %.3f %.3f %.3f %.3f\n", $id, $submit - $minimum, $finish - $minimum, $grid - $minimum, $exe - $minimum; printf STDERR "DEBUG: data \"%s\" %.3f %.3f %.3f %.3f\n", $id, $submit - $minimum, $finish - $minimum, $grid - $minimum, $exe - $minimum if $match; $r1++; } else { warn "# Unable to determine interval for $dagjob, cycle $cycle\n"; } foreach my $x ( @kick ) { $total += $x->[2]; printf $kick "\"%s\" %.3f %.3f %d\n", $id, $x->[0] - $minimum, $x->[1] - $minimum, $x->[3]; printf STDERR "DEBUG: kick \"%s\" %.3f %.3f %d\n", $id, $x->[0] - $minimum, $x->[1] - $minimum, $x->[3] if $match; $r2++; } } ++$n; } my $rescale_flag = 0; # scale by min/max if 0, kmin/kmax if true log( "sequential execution sum is ", sprintf('%.3f seconds', $total ), sprintf( ' (%.1f days)', $total / 86400.0 ) ) if $main::debug; if ( $minimum < $maximum ) { $main::duration = $maximum - $minimum; log( "dag worked for $main::duration seconds" ) if $main::debug; if ( $main::kmin > 1E9 && $main::kmin - $minimum < -10 ) { log( "Warning: Kickstart lower boundary before workflow, adjusting" ); $minimum = int(floor($main::kmin)); $rescale_flag |= 1; } if ( $main::kmax > 1E9 && $maximum - $main::kmax < +10 ) { log( "Warning: Kickstart upper bounary after workflow, adjusting" ); $maximum = int(ceil($main::kmax)); $rescale_flag |= 2; } if ( $rescale_flag ) { $main::duration = $maximum - $minimum; log( "Adjusted workflow duration is $main::duration seconds" ) if $main::debug; } log( "speed-up of ", sprintf('%.1f', $total / $main::duration ) ); } else { die "No good timestamps, very suspicious, giving up.\n"; } if ( $duration > 0 && $maximum - $minimum < $duration ) { log( "Using user-specified max-duration $duration" ); $maximum = $minimum + ( $main::duration = $duration ); } # auto-scaling for x-axis my ($xstubs,$xtics,$xlabel,$xdivider,$mywidth); if ( $main::duration <= 60 ) { $xstubs = 5; $xtics = 1; $mywidth = 8.0; $xlabel = 's'; $xdivider = 1; } elsif ( $main::duration <= 600 ) { $xstubs = 60; $xtics = 10; $mywidth = 8.0; $xlabel = 'min'; $xdivider = 60; } elsif ( $main::duration <= 3600 ) { $xstubs = 600; $xtics = 60; $mywidth = 8.0; $xlabel = 'min'; $xdivider = 60; } elsif ( $main::duration <= 14400 ) { $xstubs = 1800; $xtics = 600; $mywidth = max( 8.0, $main::duration / 900 ); $xlabel = 'min'; $xdivider = 60; } elsif ( $main::duration <= 43200 ) { $xstubs = 3600; $xtics = 600; $mywidth = max( 8.0, $main::duration / 3600 ); $xlabel = 'h'; $xdivider = 3600; } elsif ( $main::duration <= 86400 ) { $xstubs = 7200; $xtics = 1200; $mywidth = max( 8.0, $main::duration / 7200 ); $xlabel = 'h'; $xdivider = 3600; } elsif ( $main::duration <= 86400*2 ) { $xstubs = 14400; $xtics = 3600; $mywidth = max( 8.0, $main::duration / 14400 ); $xlabel = 'h'; $xdivider = 3600; } elsif ( $main::duration <= 86400*8 ) { $xstubs = 86400; $xtics = 14400; $mywidth = max( 8.0, $main::duration / 86400 ); $xlabel = 'd'; $xdivider = 86400; } elsif ( $main::duration <= 86400*60 ) { $xstubs = 604800; $xtics = 86400; $mywidth = max( 8.0, $main::duration / 604800 ); $xlabel = 'week'; $xdivider = 604800; } else { die( "ERROR: $main::duration s workflow is just too long!", "Extend this perl script with sensible larger data\n\t" ); } while ( $mywidth > 24.0 ) { warn "Warning: too wide picture, halfing width\n"; $mywidth /= 2.0; } $width=$mywidth unless defined $width && $width > 1.0; log( 'width=', sprintf("%.1f",$width) ); log( "xstubs=$xstubs, xticks=$xtics" ) if $main::debug > 1; # ploticus file generation $title = default_title($dagfn,$minimum) unless ( defined $title ); my $myheight = ( $n <= 51 ? 5.0 : $n / 10 ); $height=$myheight unless defined $height && $height > 2.0; # too high (too much data)? $height /= 2.0 while ( $height > 200 ); log( "n=$n, height=$height, title=\"$title\"" ) if $main::debug > 1; my $scaleduration = int($main::duration)+1; $scaleduration = int( ceil($main::duration / 3600) * 3600.0 ) if $main::duration > 7200; my $selflocatingstubs = ''; for ( my $x = 0; $x < $scaleduration; $x += $xstubs ) { $selflocatingstubs .= sprintf "\t%d %.1f\n", $x, $x / $xdivider; } my $plot = substr( $data->filename, 0, -4 ) . '.pls'; log( "writing commands into $plot" ) if $main::debug; open( PLOT, ">$plot" ) || die "open $plot: $!\n"; # areadef.xautorange: datafields=2,3 print PLOT << "END" // // generated: @{[scalar localtime]} // #proc getdata file: @{[$data->filename]} #proc categories axis: y datafield: 1 listsize: @{[$n * 2]} roundrobin: yes comparemethod: exact #proc areadef rectangle: 0 0 $width $height xrange: 0 $scaleduration yrange: categories frame: width=0.5 color=gray(0.3) title: $title titledetails: align=C style=I adjust=0,0.2 #proc xaxis ticincrement: $xtics grid: color=rgb(1,0.9,0.8) style=1 dashscale=2 #proc xaxis label: time [$xlabel] tics: yes selflocatingstubs: text $selflocatingstubs minorticinc: $xtics grid: color=gray(0.8) #proc yaxis ticincrement: 1 grid: color=rgb(1,0.9,0.8) style=1 dashscale=2 #proc yaxis // label: DAGMan job id labeldetails: adjust=-0.5 stubs: categories grid: color=gray(0.8) tics: yes minorticinc: 1 #proc bars outline: no barwidth: 0.03 horizontalbars: yes segmentfields: 2 3 locfield: 1 color: green tails: 0.03 #proc legendentry sampletype: color label: Condor job duration details: green #proc bars select: \@4 > 0 && \@5 > 0 outline: color=yellow color: yellow2 horizontalbars: yes barwidth: 0.04 segmentfields: 4 5 locfield: 1 tails: no #proc legendentry sampletype: color label: Perceived queuing delay details: yellow2 #proc getdata file: @{[$kick->filename]} #proc bars select: \@2 > 0 && \@4 == 0 outline: color=gray(0.4) horizontalbars: yes barwidth: 0.06 segmentfields: 2 3 locfield: 1 color: gray(0.75) tails: no #proc legendentry sampletype: color label: True job duration details: gray(0.75) #proc bars select: \@2 > 0 && \@4 > 0 outline: color=red horizontalbars: yes barwidth: 0.06 segmentfields: 2 3 locfield: 1 color: rgb(1.0,0.5,0.5) tails: no #proc getdata file: @{[$data->filename]} #proc scatterplot select: \@5 > 0 xfield: 5 yfield: 1 symbol: shape=square style=spokes linecolor=black #proc legendentry sampletype: symbol label: Condor App Start details: shape=square style=spokes linecolor=black #proc legend format: across location: min+0.5 max+0.2 END ; close PLOT; END { unlink("$plot") unless $nounlink } my $dagbase = basename( $dagfn ); $dagbase =~ s/(?:\.(?:rescue|dag))+$//; $dagbase =~ s/-\d+$//; #my $epsfn = File::Spec->catfile( $dagdir, $dagbase . '.eps' ); #my $pngfn = File::Spec->catfile( $dagdir, $dagbase . '.png' ); my $epsfn = defined $outeps ? $outeps : $dagbase . '.eps'; my $pngfn = defined $outpng ? $outpng : $dagbase . '.png'; my @extra = (); foreach ( @pass ) { my ($opt,$val) = split /=/, $_, 2; #/; push( @extra, "-$opt" ); push( @extra, $val ) if defined $val; } my $maxrows = max($r1,$r2); my @arg = ( $app{ploticus}, $plot, '-eps', '-o', $epsfn ); push( @arg, '-maxrows', $maxrows, '-cpulimit', 600 ) if $maxrows > 10000; push( @arg, @extra ) if @extra; log( "@arg" ) if $main::debug; system { $arg[0] } @arg; if ( $? == 0 ) { @arg = ( $app{convert}, '-density', '96x96', '-background', 'white', '-flatten', $epsfn, $pngfn ); log( "@arg" ) if $main::debug; system { $arg[0] } @arg; if ( $? == 0 ) { # all is well, both executions were proper exit 0; } else { # ImageMagick failed warn( "ERROR: @arg returned ", ($?>>8), '/', ($? & 127), "\n" ); exit 1; } } else { warn( "ERROR: @arg returned ", ($?>>8), '/', ($? & 127), "\n" ); exit 1; } pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/show-job0000755000175000017500000005346111757531137022307 0ustar ryngerynge#!/usr/bin/env perl # # creates diagrams from jobstate log file, see --help for usage info. # use 5.006; use strict; use File::Spec; use File::Basename; use File::Temp qw(tempfile); use Time::Local; use Data::Dumper; use Getopt::Long qw(:config bundling no_ignore_case); $main::version = 'unknown'; $_ = '$Revision: 3291 $'; # don't edit $main::version = $1 if /Revision:\s+([0-9.]+)/o; sub usage { my $base = basename($0,'.pl'); print "Usage: $base [options] dot-dag-file\n\n"; print "Mandatory arguments:\n"; print " dot-dag-file path to the DAGMan input file. This path is also\n"; print " used to derive the location of the jobstate.log.\n"; print "\n"; print "Optional arguments:\n"; print " -h|--help print usage information and exit.\n"; print " -V|--version print version information and exit.\n"; print " -k|--keep don't unlink temporary data files.\n"; print " -a|--adjust s adjust time information by s seconds.\n"; print " -c|--color-file the file mapping job transformation names to color.\n"; print " -t|--title the title to be assigned to generated graph. Replaces the default title.\n"; print " -s|--show-jobnames whether to show the jobnames on right y axis \n"; print "\n"; exit(1); } # global variables my $adjustment = 0; # time zone adjustment (@#~! Condor) my $nounlink = 0; # remove temporary files later my $color_fn ; # the path to the file containing color scheme for jobs my $user_title; # the user provided title to be applied to the graph my $show_jobnames; #whether to display jobnames on the right y axis GetOptions( 'help|h' => \&usage, 'version|V' => sub { print "$main::version\n"; exit(0); }, "keep|k!" => \$nounlink, "show-jobnames|s!" => \$show_jobnames, "adjust|a=i" => \$adjustment, "color-file|c=s" => \$color_fn, "title|t=s" => \$user_title ); # determine dag filename to find kickstart records my $dagfn = shift || die "ERROR: Need the name of the DAG file\n"; die "ERROR: No DAGMan file $dagfn\n" unless -r $dagfn; my $dagdir = File::Spec->rel2abs( dirname($dagfn) ); # determine job log my $joblog = shift || File::Spec->catfile( $dagdir, 'jobstate.log' ); die "ERROR: No jobstate file $joblog\n" unless -r $joblog; # globals my $dpo; # used later as $diff+1 $main::duration = 0.0; $main::min = $main::kmin = 1E20; $main::max = $main::kmax = 0; $main::failure = 0; %main::fail = (); %main::order = (); # records earliest timestamp for jobid %main::state = ( UN_READY => -1, PRE_SCRIPT_STARTED => 0, PRE_SCRIPT_SUCCESS => 1, PRE_SCRIPT_FAILURE => 1, SUBMIT => 2, GLOBUS_SUBMIT => 3, GRID_SUBMIT => 3, EXECUTE => 4, IMAGE_SIZE => 4, JOB_TERMINATED => 5, JOB_ABORTED => 5, JOB_SUCCESS => 5, #success and failure go to JOB_TERMINATED JOB_FAILURE => 5, POST_SCRIPT_STARTED => 6, POST_SCRIPT_SUCCESS => 7, POST_SCRIPT_TERMINATED => -1, POST_SCRIPT_FAILURE => 7 ); #ASSIGN DIFFERENT COLORS TO DIFFERENT JOB #THE HASH IS POPULATED AT RUNTIME by --color-file option %main::color = ( "pegasus::dirmanager" => "lavender", "pegasus::pegasus-transfer" => "magenta", "pegasus::rc-client" => "powderblue2", "unknown" => "gray(0.1)", ); sub load_job_colors( $ ){ # purpose: load the color scheme file. # paramtr: path to file. # fileformat: each line in file is complete_transformation_name color # e.g. genome::solsanger:1.0 blue my $color_fn = shift; if ( -r $color_fn && -s $color_fn && open( CS, "<$color_fn" ) ) { my ($job, $color); while ( <CS> ) { chomp; #work on non empty strings if( $_ ){ ($job,$color)=split /\s+/, $_ ; #print("For job $job color coding is $color \n"); $main::color{$job}=$color; } } } else{ die "Unable to open color file $color_fn"; } } sub find_job_out_file($){ # purpose: determine the latest .out file to pick up for a job # paramtr: $program (IN): the job name # # returns: name for the .out file to load my $job = shift; my ($out,$new_out); for( my $i = 0; $i < 1000; $i++){ $new_out = sprintf( "%s.out.%03d",$job,$i); #try and open the file to detect if( open( KS, "<$new_out" ) ){ close KS; $out = $new_out; next; } else{ #break last; } } #if no job.out.xxx exists try for .out $out = sprintf( "%s.out", $job ) unless defined( $out ); #print "Correct out file to pick is $out\n"; return $out; } sub find_exec($;$) { # purpose: determine location of given binary in $PATH # paramtr: $program (IN): executable basename to look for # $curdir (opt. IN): if true, logically also check '.' # returns: fully qualified path to binary, undef if not found my $program = shift; my $curdir = shift; foreach my $dir ( File::Spec->path ) { my $fs = File::Spec->catfile( $dir, $program ); return $fs if -x $fs; } if ( defined $curdir && $curdir ) { my $fs = File::Spec->catfile( File::Spec->curdir(), $program ); return $fs if -x $fs; } undef; } sub unix2iso (;$) { my $stamp = shift || time(); my $offset = int($stamp) - timelocal( (gmtime($stamp))[0..5] ); my @stamp = localtime($stamp); my $result = sprintf( "%04d-%02d-%02dT%02d:%02d:%02d", $stamp[5]+1900, $stamp[4]+1, $stamp[3], $stamp[2], $stamp[1], $stamp[0] ); $result .= ( ( $offset >= 0 ) ? '+' : '-' ); $offset = abs($offset); $result .= sprintf( "%02d:%02d", $offset / 3600, ($offset % 3600) / 60 ); } sub iso2unix ($) { local $_ = shift; die unless /(\d{4})-?(\d{2})-?(\d{2})T(\d{2}):?(\d{2}):?(\d{2})/; my $stamp = timegm($6,$5,$4,$3,$2-1,$1-1900); die unless /\.(\d+)([-+])(\d{2}):?(\d{2})$/; my ($fraction,$pm,$offset) = ("0.$1",$2,$3*3600+$4*60); $stamp += $fraction; $stamp += (( $pm eq '-' ) ? $offset : -$offset); } sub default_title($) { my $dagfn = shift; my @stat = stat($dagfn); my $result; if ( @stat > 0 ) { $result = unix2iso($stat[9]) . ' by ' . getpwuid($stat[4]); } else { $result = unix2iso() . ' by ' . getpwuid($>); } $result; } sub slurp_dagfile($) { my $dagfn = shift; my $dagdir = File::Spec->rel2abs( dirname($dagfn) ); my %result = (); open( DAG, "<$dagfn" ) || die "ERROR: open $dagfn: $!\n"; my @x; while ( <DAG> ) { next unless /^job/i; chomp; @x = split; $result{$x[1]} = ( index($x[2],0,1) eq '/' ) ? $x[2] : File::Spec->catfile( $dagdir, $x[2] ); } close DAG; %result; } sub slurp_jobstate($) { my $fn = shift; open( JOB, "<$fn" ) || die "ERROR: Unable to read jobstate from $fn: $!\n"; # stamp jobid state condorid sitehandle walltime # * 1113521855 ID000017 UN_READY - - - # x 1113521856 ID000017 PRE_SCRIPT_STARTED - - - # x 1113521990 ID000017 PRE_SCRIPT_SUCCESS - - - # 1113521996 ID000017 SUBMIT 1633.0 - - # * 1113522089 ID000017 GLOBUS_SUBMIT 1633.0 griodine 120 # 1113522114 ID000017 EXECUTE 1633.0 griodine 120 # 1113522224 ID000017 JOB_TERMINATED 1633.0 griodine 120 # x 1113522224 ID000017 POST_SCRIPT_STARTED - griodine 120 # x 1113522426 ID000017 POST_SCRIPT_TERMINATED 1633.0 griodine 120 # x 1113522426 ID000017 POST_SCRIPT_SUCCESS - griodine 120 my (%result,@x); while ( <JOB> ) { my ($stamp,$jobid,$state,$condorid,$site) = split; #print( "[DEBUG] $stamp $jobid $state $condorid $site\n" ); if ( $stamp !~ /^\d+$/ && $stamp > 0 ) { warn "Warning: Ignoring illegal input line $_"; next; } $stamp += $adjustment; # $main::min = $stamp if $main::min > $stamp; # $main::max = $stamp if $main::max < $stamp; if ( $jobid eq 'INTERNAL' ) { $main::min = $stamp if ( $condorid eq 'DAGMAN_STARTED' && $main::min > $stamp ); $main::max = $stamp if ( $condorid eq 'DAGMAN_FINISHED' && $main::max < $stamp ); } else { $main::min = $stamp if $main::min > $stamp; $main::max = $stamp if $main::max < $stamp; if ( exists $main::state{$state} ) { if ( $state eq 'PRE_SCRIPT_FAILURE' ) { push( @{$main::fail{$jobid}}, [ $result{$jobid}[0], $stamp ] ); $main::failure++; } elsif ( $state eq 'POST_SCRIPT_FAILURE' ) { push( @{$main::fail{$jobid}}, [ $result{$jobid}[6], $stamp ] ); $main::failure++; } elsif ( $state eq 'JOB_ABORTED' ) { push( @{$main::fail{$jobid}}, [ $result{$jobid}[2], $stamp ] ); $main::failure++; } elsif( $state eq 'JOB_FAILURE' ){ #display between EXECUTE and JOB_FAILURE push( @{$main::fail{$jobid}}, [ $result{$jobid}[4], $stamp ] ); $main::failure++; } if ( $main::state{$state} >= 0 ) { $main::order{$jobid} = 1E20 unless exists $main::order{$jobid}; $main::order{$jobid} = $stamp if $main::order{$jobid} > $stamp; $result{$jobid}[$main::state{$state}] = $stamp; } } else { warn "Warning: $jobid: Ignoring state $state\n"; } } } close JOB; %result; } sub gen_data($$$\%) { my $dfn = shift; my $y = shift || die; my $n = shift; # number of jobs my $p = shift; die unless ref $p eq 'HASH'; warn "# Ploticus Data File generated is $dfn\n" unless !$nounlink; my $nn = $n+1; # try auto-guess x scale my ($xstubs,$xtics,$width); my $diff = $main::max - $main::min; if ( $diff <= 60 ) { $xstubs = 5; $xtics = 1; $width = 8.0; } elsif ( $diff <= 3600 ) { $xstubs = 600; $xtics = 60; $width = 8.0; } elsif ( $diff <= 14400 ) { $xstubs = 1800; $xtics = 600; $width = ( $diff < 7200 ? 8.0 : $diff / 900 ); } elsif ( $diff <= 43200 ) { $xstubs = 3600; $xtics = 600; $width = $diff / 3600; } elsif ( $diff <= 86400 ) { $xstubs = 7200; $xtics = 1200; $width = $diff / 7200; } elsif ( $diff <= 345600 ) { $xstubs = 28800; $xtics = 4800; $width = $diff / 28800; } else { die "ERROR: $diff s workflow is just too long!\n"; } my $title = defined( $user_title )? $user_title : default_title($dagfn); my $divisor = (defined $show_jobnames )? 10 : 20; #for show-jobnames we need names on right y axis to be displayed correctly. my $height = ( $n <= 101 ? 5.0 : $n / $divisor ); warn "# xstubs=$xstubs, xticks=$xtics, width=$width, height=$height\n"; my $cfn = substr( $dfn, 0, -4 ) . "-$y.pls"; #print "CFN file is $cfn \n"; open( OUT, ">$cfn" ) || die "ERROR: open $cfn: $!\n"; print OUT << "END"; // // generated: @{[scalar localtime]} // #proc getdata file: $dfn #proc areadef rectangle: 0 0 $width $height xautorange: datafields=3,5,10,12 yautorange: datafield=$y frame: width=0.5 color=gray(0.3) title: $title titledetails: align=C style=B size=14 #proc xaxis ticincrement: $xtics grid: color=rgb(1,0.9,0.8) style=1 dashscale=2 #proc xaxis label: jobs over time ( in seconds ) tics: yes stubs: incremental $xstubs minorticinc: $xtics grid: color=gray(0.8) #proc yaxis ticincrement: 1 grid: color=rgb(1,0.9,0.8) style=1 dashscale=2 #proc yaxis tics: yes stubs: incremental 5 minorticinc: 1 grid: color=gray(0.8) #proc bars select: \@3 != $dpo outline: no barwidth: 0.03 horizontalbars: yes segmentfields: 3 4 locfield: $y color: darkblue tails: 0.03 #proc legendentry sampletype: color label: pre script details: darkblue #proc bars outline: no barwidth: 0.03 horizontalbars: yes segmentfields: 5 8 locfield: $y color: yellow tails: 0.03 #proc legendentry sampletype: color label: condor job details: yellow #proc bars select: \@9 != $dpo outline: no barwidth: 0.03 horizontalbars: yes segmentfields: 9 10 locfield: $y color: purple tails: 0.03 #proc legendentry sampletype: color label: post script details: purple #proc bars select: \@6 != $dpo outline: color=orange color: lightorange horizontalbars: yes barwidth: 0.04 tails: no segmentfields: 6 7 locfield: $y #proc legendentry sampletype: color label: Q delay details: lightorange #proc scatterplot select: \@7 != $dpo xfield: 7 yfield: $y symbol: shape=square style=spokes linecolor=black END ; # taken care of in for loop below #proc bars # select: \@11 != $dpo # outline: color=gray(0.4) # color: gray(0.75) # horizontalbars: yes # barwidth: 0.06 # tails: no # segmentfields: 11 12 # locfield: $y # ##proc legendentry # sampletype: color # label: job duration # details: gray(0.75) # #END # ; #generate legends for specific type of jobs foreach my $job (keys %main::color) { my $color = $main::color{$job}; # do something with $key and $value print "job $job has color $color\n"; print OUT << "END"; #proc bars select: \@\@13 = $color outline: color=$color color: $color horizontalbars: yes barwidth: 0.06 tails: no segmentfields: 11 12 locfield: $y #proc legendentry sampletype: color label: $job details: $color END ; } # deal with failures here (optional output) if ( %main::fail > 0 ) { print OUT "#proc getdata\n"; my $flag; foreach my $jobid ( keys %main::fail ) { foreach my $item ( @{$main::fail{$jobid}} ) { print OUT ( $flag ? "\t" : "data:\t" ); $flag=1; print "$main::min\n"; print "$p->{$jobid} $item->[0] $item->[1]\n"; for my $timestamp ($p->{$jobid}) { print "$timestamp:"; } printf OUT " %d", $p->{$jobid}; printf OUT " %d", $item->[0] - $main::min; printf OUT " %d\n", $item->[1] - $main::min; } } print OUT << "END"; #proc bars outline: no barwidth: 0.03 horizontalbars: yes segmentfields: 2 3 locfield: 1 color: red tails: 0.03 #proc legendentry sampletype: color label: failed script details: red END ; } if ( $y == 1 ) { print OUT "#proc legend\n"; print OUT " format: multiline\n"; print OUT " location: max-0.5 max\n\n"; } else { #this is when we are creating graph sorted by start time print OUT "#proc legend\n"; print OUT " format: singlerow\n"; print OUT " extent: 8.0 \n"; #location is upper left #print OUT " location: min+0.5 max\n\n"; # change by Karan. Want in lower right always. #print OUT " location: max-1 min+3\n\n"; #location is below the x axis print OUT " location: min+0.5 min-0.5\n\n"; if (defined $show_jobnames ){ #generate right hand y axis that lists the jobnames #using the proc categories print OUT << "END"; #proc categories axis: y listsize: $nn comparemethod: exact categories: END ; #print out the names for categories in descending order accd #to start time of the jobs i.e descending numeric sort on value my %phash = %$p; my $tic_count = $n; foreach my $key (sort { $phash{$b} <=> $phash{$a} } keys %phash) { if($key =~ m/subdax_.*ID(.*)/) { #for subdax jobs we want to shorten the names. #my $short_name = "subdax...ID$1"; #for time being , we do no tinkering. my $short_name = "$tic_count:$key"; #print "Shortened subworkflow name for $key is $short_name \n"; printf OUT " $short_name\n"; } else{ #print the whole name printf OUT " $tic_count:$key\n"; } $tic_count--; } print OUT << "END"; #proc areadef rectangle: 0 0 $width $height xautorange: datafields=3,5,10,12 yautorange: categories frame: width=0.5 color=gray(0.3) #proc yaxis tics: yes stubs: usecategories minorticinc: 1 grid: color=gray(0.8) location: max stubdetails: adjust=4.8,0 color=redorange END ; }#end of if defined show_jobnames } close OUT; $cfn; } # sanity check: find apps first, and fail early my %app = (); foreach my $app ( qw(ploticus) ) { $app{$app} = (find_exec($app) || find_exec(qw(pl)) ) or die "ERROR: Unable to locate $app\n"; } foreach my $app ( qw(convert) ) { $app{$app} = find_exec($app); if( ! defined $app{$app} ){ warn "WARNING: convert not available. Only eps file will be generated.\n"; } } my %submit = slurp_dagfile($dagfn); my %job = slurp_jobstate($joblog); #load the color file if( defined $color_fn ){ load_job_colors( $color_fn ); } warn( "# min=$main::min @{[unix2iso($main::min)]}\n", "# max=$main::max @{[unix2iso($main::max)]}\n", "# diff=", $main::max-$main::min, "\n" ); my @keylist = sort keys %job; # sort by jobid -- Euryale only? my $count = 1; my %keylist = map { $_ => $count++ } @keylist; $count = 1; my %start = map { $_ => $count++ } sort { $main::order{$a} <=> $main::order{$b} } @keylist; # sort { $job{$a}[0] <=> $job{$b}[0] } @keylist; my $jsize = length( sprintf( "%d", $#keylist ) ); my $diff = $main::max - $main::min; $dpo = $diff + 1; my $dsize = length( sprintf( "%d", $diff ) ); my ($dfh,$dfn) = tempfile( 'sj-XXXXXX', SUFFIX => '.dat', DIR => File::Spec->tmpdir() ); die "ERROR: Unable to create temporary file\n" unless defined $dfh; for ( my $j=0; $j < @keylist; ++$j ) { my $jobid = $keylist[$j]; printf $dfh "%*d %*d", $jsize, $j, $jsize, $start{$jobid}; # id y pss psf s gs ex t pss psf ksb ksf # ------- --------- ------- ------- for ( my $i=0; $i < @{$job{$jobid}}; ++$i ) { if ( defined $job{$jobid}[$i] && $job{$jobid}[$i] > 0 ) { printf $dfh " %*d", $dsize, $job{$jobid}[$i] - $main::min; } else { printf $dfh " %*d", $dsize, $dpo; } } #print "\n $jobid "; #for my $timestamp (@{$job{$jobid}}) { # print "$timestamp : "; #} my $ksfound; #tracks whether valid kickstart output found #check to see if the number of elements in array holding timestamps from jobstate are equal to 8 if ( @{$job{$jobid}} == 8 ) { #my $kfn = substr( $submit{$jobid}, 0, -4 ) . '.out'; my $kfn = find_job_out_file( substr( $submit{$jobid}, 0, -4 ) ); if ( -r $kfn && -s _ && open( KS, "<$kfn" ) ) { my ($kss,$ksf,$d, $txn); while ( <KS> ) { next unless /<invocation\s/; $ksfound = "true"; $kss = iso2unix($1) if /start=\"([^\"]+)\"/; $ksf = $kss+($d=$1) if /duration=\"([^\"]+)\"/; #get the transformation name $txn = $1 if /transformation=\"([^\"]+)\"/; $main::duration += $d if $d; $main::kmin = $kss if $main::kmin > $kss; $main::kmax = $ksf if $main::kmax < $ksf; $kss -= $main::min; $ksf -= $main::min; if ( $kss > -100 && $ksf > -10 ) { printf $dfh " %*d", $dsize, $kss; printf $dfh " %*d", $dsize, $ksf; printf $dfh " %s", defined($main::color{$txn})?$main::color{$txn}:$main::color{"unknown"}; #print the jobid also in the end. useful for postmortem analysis printf $dfh " %s", $jobid; } else { warn "Warning: kickstart duration out of range\n"; printf $dfh " %*d %*d", $dsize, $dpo, $dsize, $dpo; } last; } close KS; } else { warn "Warning: Not reading kickstart $kfn: $!\n"; print $dfh " %*d %*d", $dsize, $dpo, $dsize, $dpo; } } if( @{$job{$jobid}} != 8 || !defined( $ksfound ) ) { warn "Warning: job $jobid is underspecified\n"; #added by karan jan 28,2010 #most probably missing the postscript started and finished events #take kickstart start and finish as EXECUTE and JOB_TERMINATED my ($pss, $psf, $kss,$ksf,$txn); $kss = $job{$jobid}[4]; $ksf = $job{$jobid}[5]; #subtract the dag start time as we plot from when dag started $kss -= $main::min; $ksf -= $main::min; #print "For $jobid Execute $kss JOB_TERMINATED $ksf\n"; #assign postscript start and finish to JOB_TERMINATED if #POSTSCRIPT_STARTED is not defined and printout to .dat file printf $dfh " %*d %*d", $dsize, $ksf, $dsize,$ksf unless defined( $job{$jobid}[6] ) ; #print out kickstart start and end time printf $dfh " %*d %*d", $dsize, $kss, $dsize, $ksf; #determine the transformation name from submit file my $sub = $submit{$jobid}; #try and open the file to detect if( open( SF, "<$sub" ) ){ while( <SF> ){ next unless /\+pegasus_wf_xformation\s/; my($key,$value)=split /=/, $_ ; #strip out the enclosing quotes if($value =~ m/\"(.*)\"/) { my $txn = $1; #print "transformation name is $txn \n"; printf $dfh " %s", defined($main::color{$txn})?$main::color{$txn}:$main::color{"unknown"}; #print the jobid also in the end. useful for postmortem analysis printf $dfh " %s", $jobid; }else{ print "WARNING: Unable to determine transformation name from $value \n"; } } close SF; }else { print "WARNING :Unable to open submit file $sub \n"; } } print $dfh "\n"; } close $dfh; my $n = 0 + @keylist; my (@arg); for ( my $y=1; $y <= 2; ++$y ) { warn "# running y=$y...\n"; my $dagbase = basename( $dagfn ); $dagbase =~ s/(?:\.(?:rescue|dag))+$//; $dagbase =~ s/-\d+$//; my $epsfn = File::Spec->catfile( $dagdir, $dagbase . "-$y.eps" ); my $jpgfn = File::Spec->catfile( $dagdir, $dagbase . "-$y.png" ); my $cfn = $y == 1 ? gen_data( $dfn, $y, $n, %keylist ) : gen_data( $dfn, $y, $n, %start ); @arg = ( $app{ploticus}, $cfn, '-eps', '-o', $epsfn ); push( @arg, '-maxrows', $count+1, '-cpulimit', 600 ) if $n > 4999; warn "# @arg\n"; system( @arg ) == 0 || warn( join(' ',@arg), ": $?\n" ); #only convert to png if covert is available if( $app{convert} ){ @arg = ( $app{convert}, '-density', '96x96', '-background', 'white', '-flatten', $epsfn, $jpgfn ); warn "# @arg\n"; system( @arg ) == 0 || warn( join(' ',@arg), ": $?\n" ); } unlink $cfn unless $nounlink; } # statistics print "\n"; printf( "number of jobs: %d\n", $count ); printf( "number of script failures: %d\n", $main::failure ); printf( "sequential duration of jobs: %.0f s\n", $main::duration ); printf( "total workflow duration: %d s (speed-up %.1f)\n\n", $diff, $main::duration / $diff ); unlink $dfn unless $nounlink; exit 0; ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/visualize/ant2dot.pl�������������������������������������������0000755�0001750�0001750�00000002512�11757531137�022533� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env perl # # converts the dependencies in an ant build file into graphviz format # $Id: ant2dot.pl 4494 2011-08-26 00:27:39Z rynge $ # use 5.005; use warnings; use diagnostics; use strict; use XML::Parser::Expat; my $buildfn = shift || 'build.xml'; my $parser = new XML::Parser::Expat() || die "ERROR: Unable to instantiate XML parser"; print 'digraph E {', "\n"; print ' size="16.0,11.0"', "\n"; print ' ratio = fill', "\n"; print ' node [fontname="Courier",shape=rectangle, color=lightblue, style=filled]', "\n"; my (%result,@stack,@deps,$name) = (); $parser->setHandlers( 'Start' => sub { my $self = shift; my $element = shift; my %attr = @_; if ( $element eq 'target' ) { # <target name="xxx" depends="y1,y2..." ...> push( @stack, $attr{name} ); $name = '"' . $attr{name} . '"'; print " $name\n"; if ( exists $attr{depends} ) { foreach my $dep ( split /,/, $attr{depends} ) { print " $name->\"$dep\"\n"; } } } elsif ( $element eq 'antcall' ) { # <antcall target="y"/> $name = '"' . $stack[$#stack] . '"'; print " $name->\"", $attr{target}, "\" [ color=blue ]\n"; } }, 'End' => sub { my $self = shift; my $element = shift; pop(@stack) if $element eq 'target'; } ); $parser->parsefile($buildfn); print "}\n"; ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/notification/��������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�021303� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/notification/jabber��������������������������������������������0000755�0001750�0001750�00000010444�11757531137�022451� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python """ Pegasus utility for sending workflow notifications over jabber """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## __author__ = "Mats Rynge <rynge@isi.edu>" import warnings warnings.simplefilter("ignore") import os import sys import time import optparse # --- functions ----------------------------------------------------------------------- def usage(parser): print "" print "Usage: This tool is used by pegasus-monitord to send event notifications" print "over jabber. A set of environment variables are expected to be set:" print " PEGASUS_EVENT" print " PEGASUS_EVENT_TIMESTAMP_ISO" print " PEGASUS_JOBID" print " PEGASUS_STATUS (only for end events)" print "" parser.print_help() print "" myexit(1) def validate_env_var(key): if not key in os.environ: raise RuntimeError(key + " is not defined in the current environment") def myexit(rc): """ system exit without a stack trace - silly python """ try: sys.exit(rc) except SystemExit: sys.exit(rc) # --- main ---------------------------------------------------------------------------- try: import xmpp except ImportError: print >> sys.stderr, "The Python xmpp library is missing. On RHEL and Debian based systems," print >> sys.stderr, "please install the python-xmpp package." myexit(1) # Configure command line option parser parser = optparse.OptionParser() parser.add_option("-i", "--jabberid", action = "store", dest = "jabber_id", help = "Your jabber id. Example: user@jabberhost.com") parser.add_option("-p", "--password", action = "store", dest = "password", help = "Your jabber password") parser.add_option("-s", "--host", action = "store", dest = "host", help = "Jabber host, if different from the host in your jabber id." \ + " For Google talk, set this to talk.google.com") parser.add_option("-r", "--recipient", action = "store", dest = "recipient", help = "Jabber id of the recipient. Not necessary if you want to send to your own jabber id") # Parse command line options (options, args) = parser.parse_args() if options.jabber_id == None: print >> sys.stderr, "You have to provide a jabber id" usage(parser) jabber_id = options.jabber_id if options.password == None: print >> sys.stderr, "You have to provide a password" usage(parser) password = options.password host = options.host recipient = options.recipient if recipient == None: recipient = jabber_id try: validate_env_var("PEGASUS_EVENT") validate_env_var("PEGASUS_EVENT_TIMESTAMP_ISO") validate_env_var("PEGASUS_JOBID") except RuntimeError, err: print >> sys.stderr, err usage(parser) msg = " === Pegasus Workflow Event ===\n" \ + "Time: " + os.environ['PEGASUS_EVENT_TIMESTAMP_ISO'] + "\n" \ + "Job id: " + os.environ['PEGASUS_JOBID'] + "\n" \ + "Event: " + os.environ['PEGASUS_EVENT'] + "\n" if 'PEGASUS_STATUS' in os.environ: msg = msg \ + "Status: " + os.environ['PEGASUS_STATUS'] + "\n" jid = xmpp.protocol.JID(jabber_id) cl = xmpp.Client(jid.getDomain(), debug = []) if host == None: host = jid.getDomain() con = cl.connect((host, 5223), use_srv=False) if not con: con = cl.connect((host, 5222), use_srv=False) if not con: print >> sys.stderr, "Unable to connect to " + host myexit(1) auth = cl.auth(jid.getNode(), password, resource = "Pegasus") if not auth: print >> sys.stderr, "Unable to authenticate with " + host myexit(1) mid = cl.send(xmpp.protocol.Message(recipient, msg)) time.sleep(1) # some servers will not send the message if you disconnect immediately after sending cl.disconnect() myexit(0) ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/notification/email���������������������������������������������0000755�0001750�0001750�00000012676�11757531137�022324� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python """ Pegasus utility for sending workflow notifications over email """ ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## import os import sys import optparse import smtplib import subprocess __author__ = "Mats Rynge <rynge@isi.edu>" # --- functions ----------------------------------------------------------------------- def usage(parser): print "" print "Usage: This tool is used by pegasus-monitord to send event notifications" print "over email. A set of environment variables are expected to be set:" print " PEGASUS_BIN_DIR" print " PEGASUS_SUBMIT_DIR" print " PEGASUS_EVENT" print " PEGASUS_EVENT_TIMESTAMP_ISO" print " PEGASUS_JOBID" print " PEGASUS_STATUS (only for end events)" print "" parser.print_help() print "" myexit(1) def validate_env_var(key): if not key in os.environ: raise RuntimeError(key + " is not defined in the current environment") def send_using_smtp(sender, recipient, msg): server = smtplib.SMTP('localhost') server.sendmail(sender, recipient, msg) server.quit() def send_using_sendmail(sender, recipient, msg): p = os.popen("/usr/sbin/sendmail -t", "w") p.write(msg) rc = p.close() if rc is not None and rc >> 8: raise RuntimeError("Sendmail exit status: %d" % (rc >> 8)) def backticks(cmd_line): """ what would a python program be without some perl love? """ return subprocess.Popen(cmd_line, shell=True, stdout=subprocess.PIPE).communicate()[0] def myexit(rc): """ system exit without a stack trace - silly python """ try: sys.exit(rc) except SystemExit: sys.exit(rc) # --- main ---------------------------------------------------------------------------- # Configure command line option parser parser = optparse.OptionParser() parser.add_option("-t", "--to", action = "append", dest = "to_address", help = "The To: email address. Defines the recipient(s) for the notification.") parser.add_option("-f", "--from", action = "store", dest = "from_address", help = "The From: email address. Defaults to the required To: address.") parser.add_option("-r", "--report", action = "store", dest = "report", help = "Include workflow report. Valid values are: none pegasus-analyzer pegasus-status (default)") # Parse command line options (options, args) = parser.parse_args() if options.to_address == None: print >> sys.stderr, "You have to provide a To: email address" usage(parser) to_address = ','.join(options.to_address) from_address = options.to_address[0] if options.from_address != None: from_address = options.from_address report = "pegasus-status" if options.report != None: report = str.lower(options.report) try: validate_env_var("PEGASUS_BIN_DIR") validate_env_var("PEGASUS_SUBMIT_DIR") validate_env_var("PEGASUS_EVENT") validate_env_var("PEGASUS_EVENT_TIMESTAMP_ISO") validate_env_var("PEGASUS_JOBID") except RuntimeError, err: print >> sys.stderr, err usage(parser) subject = "** Pegasus Notification - " + os.environ['PEGASUS_JOBID'] + " - " \ + os.environ['PEGASUS_EVENT'] + " **" msg = "From: " + from_address + "\r\n" \ + "To: " + to_address + "\r\n" \ + "Subject: " + subject + "\r\n" \ + "\r\n" \ + "***** Pegasus Workflow Event ****\r\n" \ + "\r\n" \ + "Time: " + os.environ['PEGASUS_EVENT_TIMESTAMP_ISO'] + "\r\n" \ + "Workflow: " + os.environ['PEGASUS_SUBMIT_DIR'] + "\r\n" \ + "Job id: " + os.environ['PEGASUS_JOBID'] + "\r\n" \ + "Event: " + os.environ['PEGASUS_EVENT'] + "\r\n" if 'PEGASUS_STATUS' in os.environ: msg = msg \ + "Status: " + os.environ['PEGASUS_STATUS'] + "\r\n" # pegasus-status report if report == "pegasus-status": msg = msg + "\r\n\r\npegasus-status:\r\n\r\n" try: out = backticks(os.environ['PEGASUS_BIN_DIR'] + "/pegasus-status --noutf8 --nocolor" \ + " --noqueue" \ + " " + os.environ['PEGASUS_SUBMIT_DIR'] + " 2>&1") msg = msg + out except err: print >> sys.stderr, "Error running pegasus-status: " + err # pegasus-analyzer report if report == "pegasus-analyzer": try: msg = msg + "\r\n\r\npegasus-analyzer:\r\n\r\n" out = backticks(os.environ['PEGASUS_BIN_DIR'] + "/pegasus-analyzer -d " \ + os.environ['PEGASUS_SUBMIT_DIR'] + " 2>&1") msg = msg + out except err: print >> sys.stderr, "Error running pegasus-analyzer: " + err # try to send using smtp first, and if that does not work, sendmail try: send_using_smtp(from_address, to_address, msg) except Exception, e1: try: send_using_sendmail(from_address, to_address, msg) except Exception, e2: print >> sys.stderr, "Unable to send email:\n", e1, "\n", e2 myexit(1) myexit(0) ������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/java/����������������������������������������������������������0000755�0001750�0001750�00000000000�11757532340�017523� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/���������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�017756� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/��������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�021223� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/�����������������������������������������0000755�0001750�0001750�00000000000�11757531667�023110� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/right-fade.png���������������������������0000644�0001750�0001750�00000004356�11757531137�025630� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz���gAMA��BO��iCCPICC Profile��xTkA6n"Zkx"IYhE6bk Ed3In6&*Ezd/JZE(ޫ(b-nL~7}ov� r4 Ril|Bj� A4%UN$As{z[V{wwҶ@G*q Y<ߡ)t9Nyx+=Y"|@5-MS%@H8qR>׋infObN~N>! ?F?aĆ=5`5_M'Tq. VJp8dasZHOLn}&wVQygE0  HPEaP@<14r?#{2u$jtbDA{6=Q<("qCA*Oy\V;噹sM^|vWGyz?W15s-_̗)UKuZ17ߟl;=..s7VgjHUO^gc)1&v!.K `m)m$``/]?[xF QT*d4o(/lșmSqens}nk~8X<R5 vz)Ӗ9R,bRPCRR%eKUbvؙn9BħJeRR~NցoEx��� pHYs�� �� ���IDATX V_HUΌ%EVWaYzp #BB6^Ve}!z]^ <!҂B!K[ ƚfkr/Ň^v{;O@SSӵUUUƘӚ Q7GFF.G"gWWWIMIlX4}Nk}b'V,挵Ve2dDgg獒guHq=00BJ;n}f��WT Sǁ4ÈUXj|.^"ދ v||P'FuuŮK55�0DxZWY090cE9d\۫W/omm.9Ή\ 2ҞHe2pF+dI,F&d34f�KB2qG�!'0̥JS2p,4 ]曯|O1*e9TYY:{좒nFRG F^.,\ȧ+=eh00^X[[c#2@*9 X k 5a�H Ayמ<C[ԡnMMME[pƿ}AH"Dх¨8 G=$p* JY2VTTĀ% >[̽HEZOf Sp;/ð�Ey҆tdI[|Z�k B. 7(N֎!{Žb@[ZZj\M$bYp !Ё<0}Ύ"("ZzJ$ޣ=฾U~zM$&H AwM:#A|)li-kC8k.Q-s![ paakP^PL2:`L2T֥AlA(5)G.y' "9Vbfzzg83ehS΃rE*{A(| ^^~g{c8RNC� 8g5 ᏅL !‘=}{&`뀆 q (؀MPɟUCeomm}SE&T,� ~RI:iAZ~iP`A!WSSS/..~�ORFaS[[Zy.--A'ժИ۱?&&&8Wb ((u4fff'''@3;~Ec;7�7.)up<o�[.蜷WT#Z,P/z�p;J/(7p1J����IENDB`����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/return.png�������������������������������0000644�0001750�0001750�00000016320�11757531137�025127� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz��XiCCPICC Profile��xYuXVͶ&R/ )"t7(!(*" !!!(H( {y=׬Y3{Y`\!#( <ap; d‚5͍-?`n=_*w07� s=-�ƍ� ܂C@n0F˜& oz]a46�h�P/�p<0# �C�jnw�`C c/@p[&7.pOx``Bԯ*?^ 5\ :F 恘<m`! t55 yY kbA㢽MaLӳ=t)% 1;"ta (}_<:O=<jpñh`s !?� (0@Zx ?p O9ޯ~^p`ވ=<>xöم9$3Cyf#Q'$gN(>JREDQ2(&J )=gzFE)zí?f�|V�'}�vPpTw8&s=D8 D8$$%ߔ3d,E?�(5k&n�VGnoh@(�7@C`8ox   d+*A5 {Os0^)0 2X?.AX!F⅄!)HRt!cr\ /(bP* ]J uB}0 C X|qBaF8!!h9D:"QhFt"#^!fˈ $@"鐜HQRi<D")dنAD"W( @TerCPQWQըfT7%jG,ha�mB@'sЕ&+c1/&sS ca6X,#V5Dl>v""%b'"#:JH@CTKN4BhX،؝8u6A]*~Uk_3$y$$OHIHIIHI-H}HO&%#&&"&;FANVEAl 4pGqt\ 1nE'#7 w''/ o&!BALKIq""bbR@GY@J9NA2 HUKGH֥v>G]NkgO 4~_T44Դ2'i h! .ݥۡgפOd`f``HaaQя2=7L(&!& LLOViݘS2O XX,YbXYY6XX>f]ac`efkg[bdzg?bAˡϑͱ©Y9eÕu 7 'w6w*; O,O$/17o.o&?_=E~~h:i@@ FPAOHpH!$+-T 4(.A(jF։Ήщ%"#~Txu)IjIC6RBRnRR8i=xo2222xY$.ٟrrrrK<. 4 zъZ•*}UUSU^TWPNKZ:ơV6ΩNP/Spר)yC󋖄VV֦)]j]ݫ3z\z^zuzGdG_7`5p31X57<emDfdethX8Ըabhe2mkhz e171o0(`)ikcrae}zF&¦˖m]ݬ)L>-GGmVpupLX1'~N}ǙLLpnpAعԺe WBU7m\ew l%UL^^Y^K9+>>W}^3;@Ht2h8X881x6D)JjQheN;#"" "N؞h8Iu2dPTrhT[LW,gعSJ8׸xs >Cr̋̄vgα;}#CǓ]@]0,,U"5'ugiiyi.g`23._Τʌ|e՜͑~J_Lε\܈<㼖|W_h*d)L.,r/)(z-NOD2rLyd{**j**S+VVV[VwԲ^CE-8vc͖z[tRo?q3vnWBC}#oca)jj^}ošeհM >N~QNw]]Sv[t<1zT͞GZ)<\ysl Mr̓-CCm*##/u^>5} MFNNFO|32SVYهs:sVS-{pCG5R>9~ZX^]ILƯ_WW~;~qqj]fk|cG͔-ƭm'{y?OB |$\#<=^p0�I@0ɀHm)t HCY(~T/:# *' 9Oz,WE>NHA@:a at`ffϝ̓[W_)P!X!T"/!$/%))+}R樬Z|B2 Be^K\.#LNLycn~aߑqJgJJ+;o_w `Ȱ[}'vhEbbOEe<ۄsT.&ԥ_r8#rcJ6眬'yW  /+^(.U/+O^Q^9^W+TfR}ͭ緿ޥoPolJm7вFtCvG&]:5U(<c{6V Þ#R/WYcf^MMMI xk=<9_y7q!CGE%Oc +>}`u;M݈рAXddhaA:S1Ǽ>$ . !-${ۡP#iic20V%} Q<M DDE;M$%6%*cded+H)|WB]uTEP#^󸖂6Nޣ#7 2 SLRL3̯YTY[]1ݲ{iPx41XSέ.wծn=x   Q'OE%D'Ťf*o8yf(ayD$ ~)NȠ,i囝pJNMTEJKj{/VW V^yc-2wlF4\ilhn܂mlSoaHG: :<nϫ}|Q2P8?7;RzUO)i7:3voCgs:?_YXY9+kś)??#P!"πBhطE9П05DMįHhpєEToiPBtF^ 1L1,Ѭl\,\?xxygR g􈺉Bq%W8)=y^g/+h**^S2W7UUU[|ԙ5kjnh[}D�>'* < y LLޘy?Yܱҵ[ܶ3gz1cNs}] UnU:^<HyNx4_kCRBL#',>#Kdܫ/<O=s|ObO R\|6>{i9cNn6l]sfr,a+fʝjZ뺓7o>_M}GsCbc}X~ e[GΌ+Oh*8m`0fWfcL4=^]|]e{PsK6#@y�vp@m@&UA3�kE3U�Y,m?pNF8 " |L$A>pX A:!@! Hb,DS6Z}݃8an`JQ-1ؗxĜ):i+,YNDDAO1HiO9OHC}OuJw5c6kS,\,}!llOC89prYsSs˖-l.).,.H2]YZFLl\i2?=Rq]3_E$SGNx327172c5 Zamlj;q?ՕG纷U�@G!a'"@4*ތ8u'ttly ɫiibC,YW>|,.a(ӸTYVX}U[ $M$Zu?t's$J&_r dSwˢ'眯k3y7lm5HVdOu~qQ w`b! JʡhG"q1D#%KN:J偪B}FˣϢG0|x4V [IDEth8xv<p?ɯRPSP,j 6m =;S@FLf,M+r6S]:\<< m|-&BEED7&;%j%Hq3PPPTURTTVאTӶ9s$U of(sF K +[�DRS,ԝ܏8 zLz{N} =4?Qpr1Z>BTxofhԒ̑>xy,1{1dI~Irdґ3]7Mo߅exj֑d/ A#Ucl㓯sP f[Yibyj5u[/wvmu~Sag@.L&D Az+vh#_A@ ڿ@y~ct"<QyMMNG-QPQNPPhZi/33bodƲyqqjsqh  ԉ>_DKH=_UdS2VVVWhkjjO2)g({ӦRfZ[eZ5t4)K{(g, /چuFEfG`֩GZ\<_d~a+3u,M?-CrEkvn>R>ղBkZ%}efS5_obsnii0jmlW~0AYX{iB/O1/*G|Ge0ӯNM(ms!΢Rϲk~ jʷۭkO7l6FX4|%UM=cӸ˰{jwnOw'O}BI ?SZz�L v&g�fnr8؀t_qȌsəۿ|g��� pHYs�� �� ���IDATX Ŗ[lTEn۽mVj1E@&!j&h|Q||�ED1Fi$[Di %.n9ge n5Lr̙?d,;:#�M0Ae"&*GGJADd{fO7~͞3LaUzDt{*ZS� ͆[rgmiZ4Mm';֨gή[b�&h| ={Bv,̶|9%]pDKU,Y2zT3#�7Ƈ'gnqzu5/0!}}5C8nd]lj9<aQ�WE7^L *$-2b&F y~ D#nub\Ŕ+ ur:?eN�5?ݽ/qܨPMѹ}C@Vw.y /xkMrw~R*4jN"27QV0]3ȟIh ^cX @,q#g:mxLj;|Jv5 VQUĐFǵ|sc|KٜB|9ĆI�D,@IĒ֙D#4=Wvp$%EAk/\ m_Po* j CĂ{AO.6k5j?QOc>ZH x! *Y4/C}lV-YNeQ YDPy9Ex܁-wǪ2$E?{D:G Q]Y(B !|!Y<AlE~Nt&gZ�FX?}hfPXRV`(1O@M΍@ sq)18gx*.0*ʱ[ Z(JEAG""jGׄ52mz޺1)q7 ^9+)]9Ht-Ԉ3|U (]\MBfd͢x &OM uKsB=#'˛R&H\9fs{۟O`9yoad1]D&*Ǣ4=LKG3>[|O1O-$υӑ5lDY"_ ϯ|- �9>(`+u84 r=#Ci[`L!˦(qPc`"٪8t" ቃlR٩T3pGӫf ' 8����IENDB`����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/jobstates_all.png������������������������0000644�0001750�0001750�00000216724�11757531137�026450� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR����Y����]_�� iCCPICC Profile��xo@B) B|t2L@U'6( Q ډvرl'@пa,?  =cK{w{9Zןม߼^[W2 #8c-# $SgHE5E4-=ӂ'nEv: Ic3v$~Hfl7mvoGFs E18/3_0<ϲ/SǓGU<ɝs4>} Ss I9N}`}zEh=M#H3ՁfB8ETYEmxL/@X''@\4ju7Jۮߤ=_A9TVy9MTOWQgu7.rWWRJٴu6Lhas5ểToZtonZQFMAhU=&iʗk-к4*GZy|; (jy��� pHYs�� �� ��� �IDATx eGU?g&;g@D6P@ D¢. (*(*B ʦID$3[]yKf^f>:uSέ[ɕ.u,pY[^^,,,͹ >|x޽.3'''kQT Lvڢ: \sX\\YZZ2�rIsrQL_AvѪ# e@gmZ 1(`&)-@zfxqWYv, I%#\l2zы^tY -�CvI @]y啟L~?쳿k]zu-SI;q UE7vJڳ\׽O:$?:׹D4V1۫:fJ@N5H{ooR}%dk:v{U[[W&,`*3�{uYK_R C9MZ[�4K,H*pE?w~w*22˶pmPߡ[@̛`I`?>T"a@)Zl74,}D%lK@__ۿs,45*T&n]::Sa-쬽D?׽^W*zS򲗽~QWRj$lZӅ]qػb"dCϹJ}k_ZVj%iG%0I)xC> U:wj觞z~, mo{kJ H q%;n[8N񵀐Qg:")oo̤8`/}VJ߯Rp=X`XP+a+Q/~VӅk̻)"\z§>ooN8ApDv|Ih8 "NKBPk!]cMJ"+^Q״^b<r䈯Sַ� 7'x[|#דE?n<~n+;\!H/=ZCjt[O*AH_Q+ U6*S$hGjʔ???茻Ixy9Mb q?] TUwz؝to|KSu=N_ t :ͪ<_Z }rơj12=/IC0nw|Y: & ie&x&7 exHTܿTb1CM(Jd"$'NE9@kM'd$O|Yz@ W͌nuR_�뻾(A0)NG�ٮ~窢}*dX)Y{*fʴ@1y͉`nq[w=AT�R\3W2*1 7nvʨ*Jݼuxt,d &Ȋ7 }Wa Ì<__|/x3^^q?3?\<;y 1QIEqq\έVp Ӷm2csU D:HL8}յ{-C &c=bC 1g=:|ߊyؖD(kϞ=(ɏ'I�J։m<+pStݬ~ _?EꤶH0Shet+ `=2x&.p'?}vӝ^xa<6rn\ٟ};yC[[vkmlh'2oraG3Amd1T8 J3듞$`o2BV)s5ldڦ]k [d.-փr+Wv4e5UQv}ꓟ)1~V1,Bbw )"UIQY2<~nu[1'LDin$y x{7G>;묳OۿCkNIgr7W7qzۼ5+5EVV�sH<=lHp&-4>]SVДtU3<S*yUrv!$7+g?iO`cߊ?Z<NUdGIO?s>S<43R[' 0Wue)篭_s�4N Eeb=Jt<B50UՎ [I+/AUwۭb52EL;;nx7گxt̻<'E8$Ii._rxJJV|3-[6 SN9Zl<Sx|oo?Mw ?Qm 3nHҲ(HS3#.f)Q5@1҄j_E`)4JU!W\:<;}*\[ʕϡOU'&ЮhWmGꘌ!XP� 2R .1Y2'q* .U0iYxGze>7MD+"x+:î^id1C>};G4&C f8mvwH$꯾/�"kSnG`ΧI5kɂ>mN]]d^푒AHA|5X᝝bn+2v~!p_~e?яZ^3q6&|ٽ_y(lnpSR$NѺV�9zC]vY F$N>|R=y~2߁V7hwTru"0Ø2ln vU8C\'hsfGuc4c.{XL0 +W OW$̤Xyv;黿{َO& >1-~Vwi~AGȝ}۷$!UBo?ce] SDGlG,M]ɀTo0x즕e N<;Jn%?G`#XZm*9=,s {Ǟ6XҎaE kg!e(rgcw Tɣ8ސ"4 D lV~fJ1ulR`4]SFCP7hG?H=kkgy6w?O|;sǣT|FMZP8>jdJ?*;%w v??,*</x 7`.Y6Qu D,T A0A9hۖrh#7R79'S@*2T);ettV(78+[8ũ<1T!ZLlz`-̙.r+'Ei]j mLjN? Opy{,b!&J1rRɜɦgeU2cc)̝x?V*eاGL u<i]R6)#j 7d4}/{>[__HC!`Y ku7r >w)s.>s~֠odi::TdE'~'Z &UF~1W5�Cxy)=`1VFN(g^*7Lы^fѢPwY :a! |D>l[Hq98 a^p_`Q8ͅs0O(EPq6ǼQ 9nS6E\r%)*(Fj"/Rn*9#I7@-;^SU#d º"0^9:w" ۑΠ8=�:)ö }y Id8@&2ٷy!%wbv\gQ墼 ފKg^z/- Z�j)mӣɉ)L7097x6PEmPi"mjŽѳ 뇼舩wwb,i 928D_>I$GTISHIjU= ӜKc0C�CpXR˞,WYu<pxۿm*)b]%I$1U(cPؚF1%&u&l9)1Rc\i|W[ @]%/p&/#L\%fY5kK<Hɍ9G%x`\MZdܐ1iPer C 4֠Z'0=9x+YoJ4Ue;7 M1o禁>:L�~&^v3YDTpCګr`SMWe> 48E_ms1@Rwȏ-ޓW!0bTP)<ճd2pypt]7=?WF>xUa H(gRV]| , S+g3U٭c_;/QmRa<kDŽIf <Ih“l$K^bhSyW$E928. ^kpX|-sb{hmu{:yE3LdS�JMj&A3'<Qì(ql$gpo<Зj[F8ʴzB0ye17{3�Pqޖe&7iL r?Y|J+r`B |Ԣ6=8!\)G*1f量hkR[鑧6|.y_QKMS<,a*x |LQK\^+YRAXU~Hso۽Ide j 1z-R`ܫMJM`WKOfva~Et0$U֩mo#G+d&\{; vC4WUc(UD(H rCVa^q8"uc5(o{|uL8}5 MŸT[8I>ے6؆2 &}�YyX y|_X+_U45sW:QgJFKRRn(<#0pr 0QY^80 "m.FM<0򞄳(G[~v:[qr-.U_nZIK~@6!"<*kX?*FoxjWQzm d@[3Q m\GYPl'j5Nup[s0^ 1 Z *�I5VaC.Gu�_-#M[T'Aq5SQ/^Eh؆=<n^'Xۣyh!3y"? J:Ks;A"{92㿌2E%ka2!t0b-$0)-250|\ 54Hmm+ B NyaH;fw 9Bih'q%ȶDO-z8EM{BG(=>`Y݆8,6x, H(H0# o8B#xtbAf,ڛUHRn,qȪ1dʉ*g' F֌a׾ LU+L¥Gj3iVȬ"T_j D!H*SQ.qS=dvI<pcDܩ4 G!&!ZD+ IJ!}4rnȿ#]Hs+}-`)+Eq8K_Ϲ2vY=hU{TQ`Ȋ%aZmNN#I# s,Ø3KTžPp%ΐ({�ӤuTsj lp5�LC�ҡ9cb*#4hh;ֺX3`|ô~.!-*݂j]]dt1<[�r <MxW!!ӂ,|*TkqlbRpz`\I#GP``)A8ԉ!vPH/5T )�Ro|5@G.kR.x?)bA GG7b+dh4E\Cd)聅g[~V8@<'px�m@02 VѨAOL 2ڕ+e*YeU&─&'X( >OYBtJѶCEZ*ni̱La2QJ"I.^hȥa5|yۃrٮF?)>)ּ°+|I]NwөN0#,Wsjql0Yxlk|{DJn?4|0rREc cI$Q}J%]QKqK{.mԠ7No}[/f_teZ_KByEXco=֐$jN?l*<y �9Sw'H칵vZchtѡq#,.a,__ 74b\ȥWAgBՍnt#:@$E[or{s?cMm]k cư1BT&2~S"ϛ8f竾꫘&x00HBַJme4i=>kgxFL!2˖Pis9i!lSQ^ 0ʴ ZWi-<<~0^t rc]r${ƞVHn^b|A(usx5FXPVBf.ùcDƬ%`1ly]aYpƊT\j6ny/It7zUDiF;;=RYf5NZ9V's=,qVprQq6opDs̱U걣珞i@A, )nEe02 8%|' i2cF/jqH]⩨u0ܵC[%MM#KjfAWYKQ~.Hmm9_>̊* J*}pd(w"pč.kxM\V(cD 0{Hch/.&:]dMZyܩF+.�$DžrG<Lkx# 8k`i"Nb_ؼޡBx-jZud,iH[FVHb *؉ g*O1%E:m-ʕKJM1[uqKp̗.ɇpXTkRy|W<(}QEn@ÑPNuɇJw2". 8!sKirR?? 02MI0DFȓdVKj*3w2^TDN@1$0OF $s&EwOs4VR~ M2v&:-vaίVs 4siG&?|~ZLE#3ޘP[8�Sza|WO(vܢNh{[S"=Bc*�<+bLP2Л[Oo֥c{jlAMM:`y'KLL|| 8f|f�?޳%`"OyO/&/Ķ"0V-|-#LN^<14< pp.wu XӁDMpVJGvƽI�� �IDAT]yс!򛣦gi4hj0GӌK$lq<3pq/ں[>Vl+/vIJ<1Pf3=վ\[TYV`C.rIo̧98pKp"J!\`̥ *r).?^-^MJ`P,SAF@3sa ϝ$&DvPr-|||qkU2'_sJg5nYk Ц#Gd0*O+IHȥ,R,nZH}X::=!Τ]V7W4I@7+o'Uğ*xP6 k_:0u �/oJ)|;hڨ2wFGŸgV% G+.XvbH-{ <;{&N;Pn{ڿ?Lս^Bn3)ՌCG>JLܼJ^~n1i6фhoRmʳm)= TsR7yvi. s5@^"eo9@8BlVeDd=@|' gz1B:2,9s8b P*nzCe“5Ӌ"/."9b6mW;)G븪p5b{T-<|a07btpO /A]L`{W Եyo&L;} @hAP<0JoܢiDP'O͆l6p+zֳ<AS9C<qce<+d>d0Gf7 /Q SƝHz8˘"fy8f([Y lg,� <;Kat@ɾ/RJ0m"D؋r‡8q*yXm*Yrq$CKW~HgM1B?1dmwCdLq6]ŒʫnQXUwm{G_h(Cm`nMdar GϤpLh‹M⸇rg �-$Z/z;QUf*U%z_`5Ύ!s4S*21u@CJ9 (@*JӰ2$pē<r\9_v,WJItReǁz`gR"ۧ=(ʢ#OVǀEY!}5W@&\p-yASЄs�T93%1E$-]b UfF E^U4|nZEЇ=a>,*!@"z0}_�Hj-Jy~jMw򞈁R׆\V84]-zeX9Y q6Q\E%I�Lg_][4 Q-- ,q֡L"z [ِ�9]c)=^fUc|q-YY(8phseꉀ{y,qCnS4$l=٘д&C1T[uwʭ>rpq+ĶݛͧIvh2hk$'/x7Hc}+M!5v2+DZOD]rQ(,e&ԉѵжYZvG"ض'}'de ~ P1^5lg8ClQ\tSMoM5jtnƋy (Ko[Ip-8wY59JJ"vFMT44[N#.0>!ƧeXғ7Mor]d)�aʝ.M\g3P~CWT:v'A䰴v-rU4@j"lKAmzTD;U;necb>s2x>Ob/mza5͆L QqNG]uCcqo +,0>ބɟ%4qׁ|I`ɼ}FY,gY(n"=ZKw`R2P+Чn;@)c* @[C>i4 }h5 ͛Umkuҹq?/t_ȳs z6fe'9i3" KG̉ }Tn镦5a;e>n5%1$[z| OwzI KHb~1*[ 3%,F*rqEHqm$kύμM!0Ef҄=m*[+7grw|4NCF\ϰR ,qtle:dVex_K(Ig8/2PK>葭gCy5@MI15Zc'*7]l4fHb4ᛚ,LC^WtG z"+]t&AnlD6-ux !6Źʀ?XO7Ѕ%*%kek U>„Yb{Ѓ䰘Oh3wQ1 isY{-O +;*�g�$4)yt�N2l3KJ3v<_9z| OxZgZJ>w8f %P^VMa =Z8ط/:5|R4:6%&L`'"".Ҋisܖ $ 4aԖ^ׇsj lI)<搢']]o*n=ՏӒ^@10s߿ VpSJ5ֳ! }~ 'YEB*l:*y,hЉM?&60*]B_`F 1 @ rW- ljPM)[h  ծg<K�?zwǓc�^w:�0 �Pb؋Rz,vǘnt>"Ҙ �L\u"dͼ̷l{E;Gby>AoSJ%L9WeQ lMOΔ,FAY =Xn jy0 C`6(!uL_@=2+Dր-f۪h Y~k_Bέ96};8�g\GQФM p?Ҹ J7H J=p�1A2qZp) FB /83uЏfq.<2 J>SE:'0`Z.q?Z `fL &մ6aI5ԊFTұl4ZZ3z᭐>GEq6_랧M4�K۩X٪iGaZV*q."/ :f:Y$6U ?0G7 q1^{+j�(R~Kܣg3whLCPCU1ZleJ>er\kd.<G}p޹N).˵g[Ixՠ.{(b /tp뜐Y tӨg x6*To,ҜkG,)3Mր>i9f]xᅶ=q$?{ZW@2hS+"O|¼އ_c$9L`b0!:iᕷQXSrXDB% CL`[n{޼e}j[Y+V�*ֺsITFT5rzsn#D4G/FmT+M}X x#4nxV Gߑ: IV.\p 86oWHNc{4f\j;n=FҨK#1elϧzJv<J6uzǪOxέ}4wZUm+RjKy[bm+2j sJ+i=% N30|7϶-ߣ]9}G�LA6dR T#??,( U +ǩ2 sDz>CA](ߕ4T=!&#Z2dFmJ@"@dz(ihʊ [|9}m 1"" lf ?6 T GNݸ/i4a*(l7 78'ϩ)47dR 금%j;ow0BxGAAJTo#L\b2<I[)J~BK.\ҴR0|ZYKx#u�A<専}tEL|EG*)1HRjL t*߽Rjl $ր>q$xjiv`R+MOy8;zl\Zfs{,4.28D9Z a>'D*!0<8R[+txň!Q0 'X:Px(.5Ⴌx$gz ;^P*2+23:0[KZĪ4"!9almRVzj=[P&^0Ԗ3fIݝjGr,x֪wγ<~`u7 v-8i UytXنIo!)NYS҆7L6…C:>@~8=HD}-ɓNPN4PJu %:/uL7"Aet HՅАerSNyb,b+34Lf(Ix|.g$WV+' !O ::7cby6Y2XjY9cX 3(8OEɗE%s&vc%6!3ǪNxʰsyeeoO.G6,YDUX31\iL"}*a9*0lslG(+R!ؼQgQoHs$'0"SgRmm(@G0&qR<xB\~b!0 n0Ǵn0G002nwZoT7 [3gP.SKS޵ JnnC*em840=* uC%]|NQy|bc+[nb =L+^t55fJEEI+@rZݭM'ރ�YZX!+!|^Yź:b*1 U9Q]Ҡ-V`9'v&ٛKnΎ!lg5}0b4΅*LneR a,)Ն)jnn�TNZ{jw4T3_2! ]&e(Ȋ-vzpi<79+}<!.!L9'/`DuvqxLolKbzmS UAF:.@)-u$r9b͌G#g2E6OOץE'":1H-4�\B0z*iA*){.{ hͶ2P4x 5=TY_p5w&b[ "y O�ް-Z雷B<CØIYd@?c2dt Puwe@};<0հVN@jޑɓ55ry=;>ibE㴾Gx+aIX0vb)r% N9z^U\{g;bsї1j7e&? <״�L9PBxXO-gЮurmu`ׁ;H 8f^Ga ^=P)C TgFikl~ qC/:I$Q‘/dvPY*2ȸbY,<@ádJ+.Ҍcp<Ѯ5GI]Iz9 UZ“3z7rZT(V\9` X6'n!۩Fk0ɛN;̊C`1{`Jф'#MVHt@in!8^8sFp;@=s}.i䰬@,b01hϒa1$c uvp =s>y9wyVg&Cbp-U@Q$eU !hs ;ˏyc* QܴAܑH+ ݷꖐ!coQ>]I .y*rc-k oX?%l`yDžRYah R6qLvfa'HN^>GmL$x},VR?U|+X}}7~c.&Ra*' %7dlPjHNwI liivg"RWt- 183 6'FN?T ^EϬD76MfX:3v*U24<#yT?v C敢wa,LQ|!˛&4EKQ%%Xd6q  `zJEJ0ib p i+̽ib-O xmX+Vâϯiw :bc" "[ti_ ~ p?6lZy-ae/{7$q8*Tn6Xm8g``82CPyٱ⟾hVc &rVUZ&CRyh;E ~C}*$΁`z< �,( ۍ,-:#Cс}5Np/n4c t)9;*^\] -jˡ.y^n8hInpY^-`c 6!o [H@cx5ləT Q׬xv߲̖oxSsCH NţKlN!w K+1t <=OqzOlC#P>+7M`5Gdؑj p~wqoN/HqB{F|c,�1iK$E6rgPUlMq@yhe#/Q  LNYγŀ<!̴P]l5ƛ&u(l_Flb з/@ʥJȼ!NI _4 E>m6hg-03FvwUJá<,!߁bgD<?jtULaJ} Po ^p8CR?U*>8$n%\$lTıD5CL;^[ ַK>WFItLf8La%~T 2m?ˈ5װMkB8(p'd!R׾$9=!hZwᶳDjϤӨ7mv74KGp4-fU<6FYʹG^h=&'ʤj/4WD1Pc O1R3MVxQ-`[W,\臨b 򎹕Hh ,rYswUfu[f:=^lWn@BZ x _+v�AT//ukuIi;?=G7d!vܳ=;N ࢪ2w~x+|*^>u3ئb$S70Rh6"lqASD/EPԶW孺jRȫi&RiqB hfmmp8Ba[)1#C߮2ҴB0̥@P7dS+=>l-bmݞ4' uUr s9QGhV/GgΊ0]NHlf=ksSX^eVE6z:'5l>" 6<˴++z3Ign<�g>Qs7E=Zm4f1g@̧;†JZz3^oRn2喟8d\4?3x/%\bĠ [&G*}S|@UtRc%3Yw6w f,*Vݏ)0#�_Fͫ[d)9la;afȰM!0!GQ%ףlmC<%dtj{[h P ܩv7Ա�� �IDAT#8&п ܛIC> @&P!PT[q0qNA"pFЊ*?JpS%26qE!J0 jbӥ<J57 8f KbqG>DٖF2mBv Y kv]ZLZkjHQ{FnVb1XۓX ObK&6D M@Vr[Ks�GwGӢ<9[grEyCq"�9O/O�i=74ܴK�9$V?jdqWǑV42Va 5,2ӱKy$8>UQKzTUZDD.H&ةEhc#D\d|<2*w?cLMB\ԐB_3PE2|j!l]j2\pIR?qŞE?4*ॊw19~ 2muؖ3X`+!xbLE(b:[Ct7dɦXky:" $)* ^# r<C}ų9L|*AaR`" 0og{'ģ1ޘrsrNތ"1Rpc.@W#>Xt�(x@&~qhoyN� pp�WDL [EHCXp>È4VvUQT0 \BgVbv 6uVX�XHz/R방]+*MUBɥEkHb̢obJH T=GȪWՎ;D([@?7-׼GNGy|.p`%jIUĊT'"^O8mlE~;mG '+KU;B5@,jhLgI~UI\4`TT9Aqz iSk: &NWS@^ZvKՕ 5m^\1@!S TQ*_S1dVben J:ӎZ*4|PW2OOMzB]1h&%aG"{?�g>4$dJF0Dl횞8lWj8͵)IWxVZThw̴16$�wdtqOONY@`d!ZYX<LrY־K*=p4&7RrY [KZs&DHƞ~,դ X LhB,.dz s )EO &.vW„pO6m+8eki픬c9n<ѵ3@H˱,Q8"ye[89&<M^7mᡑTJ֮8pŽ"MA � ## bQ+'5@T50ZĤU'<L4h:0|'PQ=S+6l`PrlO"VgaX3@n(ɰY.@8dhK<$.qz?ڶJ RCnC5 5 Xe�e)J [(<g%\޿*,_{v  ƲDlSl SQ1MoBy0O"ڥm&PɥZk͹3 ޯN�>}6�=ؤ6TQXqԶF&31bk(#T(SJг~ nӤt<rJPm]tEp7{Xu8"ȴU(%GIRz|:mV*m~8O[0=5KmJ EC6e5h^0%Ox<sPk"]S4mQ04F:j]/!s?@cBC6*js[gEӶ"9d,ئ[bD(#Ar/j 7 p24Em5$8e3:ێ8�7F8Q&s٦EpfeAfWfj uU|T2M H>H +)|YĪ"<ɫ5mSxaERRUMqwn>)Ā + U/sE*miF^2�"]�0{UTi lՆs�Qox!-h=FE*jׇm.8)EӃِ~tihmY}; `uP'jF!u+ Yr\[M<8CPK(29I(u4;n=R[=f OeUihŸ[CFg Jح_S*\V鷦g5PсIPrk"pm(ĹA§9n,P=j46K_�DOѮd%rnvvl[?)U} lψFL5"E;((vvuwtwz K~h6K_j[J3 &cp`DZOy3mG&IX8"QȴRS׊"�ZtQ3v_sv)rrӳa2@-RY!:jHr 'ψK&SDl7?O�UCS)CII`FlfyNҢ\iw6E-4!ݦtĻ:`]x +e8ɦ7e.djqB@sS|!1iG?%ĸrz7s 7i^mUe8:UC1 1.a ZtUv;!R$rauт+nv+mߋ)E+eeFVv eqšn4KBUCmw!\bBrO|/ꓛb%kVTΐS~BBP6lHt!&SDi7j.w-?D!E(sE\vw D8dzVDCJ1v(g Q9^^QX?:!M )ra>(M;\@n 4 #dIPUO*٣ )=5#x#Ǧ؎H}Gl# �@j||ba>x2~WCbӶ9j5enۦ7bq<]hy+Яr=f0 U-qFWU`Ċ;H F-p z7K?geU;z9*^ MoGC S}uw\FRs(wtTeԢ�z3={.`rͶlw]R:Cd<Raqxs773nvOyz_~ofU2![eӖjtxoVlȡ#؍zYz=% JY U 2xksb+,z^Vb^V~k:*fE5KgU: t,0B8B'CgX U+,Y`,ЅqN6@60PWY8X U : l`.Tm`@gq@ơ:: t]@]qg`.TC/t2t,P]^d,Y` tjuŝ: P5YB; t tjz@g ,Ѕ w,0B8B'CgX U+,Y`,ЅqN6@60PWY8X U : l`.Tm`@gq@ơ:: t]@]qg`.TC/t2t,P]^d,Y` tjuŝ: P5YB; t Lcrr2vd$jyyy;FbQ|S|:h%n_drX~=!pY1X#9SljCQT *}05ul;"p{%c\F|} !EؤVlˊ(|Ɛ:JUFچD#U"Qk(bԩb;W+ݽȠׁkzTw�T_#x0LuyX.wu⣂Tm+JNj_n߂Um?xU@a%t=8w�U!0$RW=�6F6.c&"=2U[)TczSgeZJE$4~ HkWx snckC\F\ۮǟqvq߮O߮ۆ`M[0ʰA/۬i{Zo YaDx=kzȥpK َDj5j4F@j=R9IWtX@W-#J)|R`9ߐ 㫕C5¹VqC&mcF=]ݫJj z9df`^qtʨWU;%<6mXZZQeR?sX 3F9\z@2� 2aqq1z7 _%mCl)�&U/C|N5Q�tL"jJ,B|(;;;30Ǎ{<\_s8|QQLoAU W&K!ꡬ=$aTCU ߙw{TAq>|nuʮT$-~;񎹙ɕ?|ԽMoaV=K]w}y/9СCwny[*B$oKA{ٹ2oɩ+=C/ۻխ{-vhj47{}nnsr[2K`rsKS >ַ q<Emo;`^i(n?1uyckO:/]ȩT+x;?.!*syz߾}i7k//NPk�4 {^z{2鈠"zW&$&|N:+-##& W9::.<59K*LNpIR]mF pF ϣ>@ڷgW\~¾BCbTqShhdq?O$ >9!|z:~#$ )_J/]5~[vzv9td=O^Wt[Dj?G/a..> O`qU{cPlB$ǁ&)Oy/2!cqqIOz?<^XGDHGvB=-b0Z)OyJimm~vj v>baf\>Ⅲs=7^}3 X~SЕ<1jUYaH3 7O~rx&$$E&%Vӈ"DkE]^ ;DJл>ô5ԉNG>b 2 7&nW6&E/&}s%LM�"*Mz"ZX~uC+v0;s.`ZCfiPK]||޸2.VM"x0gg?q<:Om Ii4cVV0|* ela"-¤B1dH!iĨCb 1@kȥn})Koo!P RzE]Wob:r [ -n!XLN>o3~~b K]Ҵv4:gzbvF7RYG.~;ռ+,P:45YB-o ;V/, ✇vBonm.{ZS;]R0AUa~4/V X 1cA/sy0opITTVeӯ*e$KԹ92~8nr32Z?bbj[mB,WƐmHaB:ǜ|ҩ+Kx#xnv.1h8c^o.RE;˟2{[Us{t< O>^6z4~-(YY;zԣ/.O^yLM┢=s;Kqs\2vTy!O==q‡l!򿦑l.0 }cO9Ÿ|sI_ɥŕC~骙}}<x=9YvۡcH>wM?O_Zgy 4] كY,\4A`!ʀ/1wL>t3('>AvE&L2klGShYON^q_V{g>}4QhL,Ҳ\[9pѵ�qrH/ukt <0%>WfVԴ N9UJ#+ CJ&<s]zŸZ5\n4:FF $+ ߟԧN;H[~'N<qw{㞧zX*q_}!/, �I@sS2]X9k^\3(g[<=݄bfo\bTᆈ_N:i{;O;q].Ѥ`�oQ ӫ9 ˞ٽ%N1_qam.@YVLNd,KWvک9|)H!)LK[YX\Hכ =0w]8a8ǭ)mMw!{EpCrClB�X?u{S Zp_r8{1 |$3dfٴ)1#NYd.-/ .|.eky*lkʂvfNY[\1[+\5;VVo|7Tfm涖ʆJ Mk٨¥캕QARxy pr]e2ͯ,)fe&_^:BͮFcnLŝ]tِgox{ɜUq~۸WR*7o&f70| mr,xدyk\r&)dwcR@*_z:eH4)!K.7< .S ably@VTf4 P^0ec$;e7[Lp&VN #4gjyҝ rsjRU2UX*줶?ʒ17эLu¿kt)L4Wo05y(^LH~4==Gɲ6S*%_2/Ye%ŘR co ?=+3ə !le72.<hc-4ht!98oϔ27L4FfְJI'>&7jm& _s*[)Ї>tX)DE&M/G8ؖVc &gmIĠu -9⌛.p;e3 "eE$ �Ii2pxqza쪫aSz<3Ss3%T5F6ZQbUF(OgfbiwIN%T Ťn nQ*a|uXɲlEζg1t9F + rY z*+6fLJ)QK^!u(-SvE3@JZ5-�I"Uv]R'Cڀ̓K+U e8{B9V}1H1ĂP6/N.v& E0-;ԤP7{LJ] RhD 3%,/ 5S! jMSAPlmtt+eryeJ\^Ѽ8e ~+W(ڨZRvM(V0DF]wrn䙛ۻ81Y4SLLҔ'| + kk,ws 3Se9457yZ'^[hZ�� �IDAT�lqE~hzv<cq]ڌ֙Sz3 +^ip6~L#\rA)XRjQaCZfˡw11 j!.RDeilrʾ} "ՕsЂ,VJ *B3<xSJTHU"y>MZl환88=5{ȺwfjD%l@eKH\5 0ZƚkH4`Lxiʽrsr c+S{.ӜmhBՊ'%`7<H.ptiX9.cWţ"k~krֆ g5_\+ >ɣg&LNYY7Ξܒ&'c"~Y%M@`)J Unˁ_TӴD'Iؒ'o ?y+.4âU۞ɉe f1e*ïHw2b3Mˎ|, ')_>lq4,ReX"OpE%5}�wpBY`TjcހwRaqqvybFC^2))s>XR_UŽ,J>rK6qTLй$@M?4wvnarb0unUR R%H7jfdBu]WYZY3k4LLVY-Y,Y9LMr9H(UOy㴓Oݷw,6G HCyh%$a�+cHB HnrBmMGT]9/ͼʩϜ!<&K!mb̢2#c2¦t(FfLK_wz /'^ 3wrbƌs#fĮXGP o/ ɓe~d|mFCe|nȗOW<qs{mX5k2iN-X~6srYx_\/T)Fܬg/19+NY\2e HeZZBWPEV.. 讲Pn\mݽ#έnre=Rsr[k^ڙ.kO^339{ ƺ#WF UE(/*wnxjL EmQ2)p;ȅ*v+M\`OGt29\VˇX +^/zS5;3*{VƘ2 :3gt*z]vCqOّ<*4:<X-UZ&1tYM,6J@/O|K=s8鄹})!h*6o*orl֡Eْ3<yǻ17{lLX6sJVעsZOazeKzTYe|jV_źqɥOZvYN./L\8<5;slOM.<˯b#'{a^>6{U-R\Iy>LveTF9nɔ*8@^x:0ݽoDz^ɛxUqgbwLz@YE3^j.l,nʤgbbv&Ϙ?K7^9\leI84AIf&eኽ\RSa3Ziߌ8o=㔉Oge>/ XW8?M쉓N˦gg|F _"쑢׊[j _U<M*JޥcVQ`ecbN;qS9C(ӧLzv<6<u~Y<ٕŜ)s{S>& 4-!9@*Q n$)y 3VWC"]-`!M`{TNYF'jeP*^53e9UTɢv<Lo 9{4,\ Fj;=N/ cUZl7`oV$n iCZC N*te)Eކ̅ls a㋬a *LT ڔ-(1GaZVkiUĵ[ 'QNm<GRN.Ш4K_—Lxԉ^{0V]2J!\)&N!z<ҒjyV\X[ KLO9с$O'l-Ye$~(+p˪2d饲h}:Rx;Pv Ytn4Z풌{K5hd(;B"�re �.V5rєH^],KeNai 7YMjSv%o\#47N,np!\Tј+~މs.Lx{gRv$NIP0m� TuY3[Ay"|-5_-S̪j #<yTUfpQ8ZƸ+(tP[2RLa-Yy/ML6QcWKEff->EjxSPNk#NóRY#. 5ޑeTki00/IWP0 eXn[gYtg;錋ň.x~f]WNw,k'1r¾={=a_vMK˨d,NedTmyŏЖFEFrpOAjkW.^9?s TA1eKGf=b-+n k^i)@)&"QXV_1=LC5ͮMܠ2.3}=,8\`3Mxc Ųt!.av VM EMLIKE*mZIwl-Voww`+]a\uĢʯl}/}ib{N9|H`Իe'%NXj"!,c>VcD&R+ J% -CW *#@.#ےƐcf 6` QIͤ9(k:1 MS!dĖ&ZNe>U]C6(Wy7TaZ`WQ4(5nTD9 m/$zSjLVc>0tcQ<,fUgk-!nwVʨ(;e(ax@fԩ%,官Z9tI9L^UͤVPefyiA0Rd$FԞ[,_8\u1o#X]LJjvY`G8= mE^0mʳŕN-S4j&S�-3K[4{@Kow孳3 (AAPMTLY_1Q\q7�F FV 3޻K/ӷ} opּ[]]u9:U]]-3Z6+R7C?g!Hº*G$ 5vzS''8N^ġ;n'УO#̲4ܨ-(8KZ\,Q"yi$y% ~U~њn:3k � xc M&2jq~_RYL9Iv-OrB ?PSr\ ULk]wR\i. [, N6 >fbf Hy(U[Vj9*mW۪Rʯ*2_R/}Ҹ/o ^zhn R?#L �{'(>oFjx=aapU.dn܇K4Y!YoޖƗ* ]6G%݂c4QDLj[=_["8TƄ�:}'[>@PE5^[YWlAGSUbq933l[QZ)Vd=Y/_%r0uzzhӄe.-(805@6>e:vVJiQ`%-xR"}ztQ4ROZv艊ͺJH$Y&@f{5",0_jD5PI&g ꬨBZ]Q+j@lXTT"iZyq{Җr}YKy Na֫C) HS?O`DAe~qLY t( EPHpc�аTШ *- 2w@ <){+;9aMEj Z6,kN�YE`^a'z,{ЦD.m%pC-EkRٛWA$�@[" )"caJF-5@}xB)z!3U(tE�&nݞ]V9fTɊjּX51> 5tA9`Pb]֐h)ma Zb3z%pG*5p qfqz �!DꑱΠ>{,^&T&@WV!cԏ3nI!rԪ&R&u.m=(#~B40܋fu=#@EJKEŃp8y2ogeS=ҰjƯT_3T߬Z)z3>E5lik|:q9[3<7/?X<R%ͪZ> WrCЂTD6pQ?.R@I,"Sȁ:IR6H rwc(2hqҺ%5ɤ&k&l9^SB$f^i]Qvh ǯ%;@BL<۫PV VO~+eL;a=ᤐO=d/.|#lUYSQ)0<UwF]pnעH5n=c�ߐ|E\Kbrŧ |⣩ ͅnVQD/|q�k33 Bn9 oz cÞoh:r2!.DC!<gY}9y(+hځ[DqWcv1TeN`C +'>K9AġXϦtꁕ_rZWX%qUL$[܊E45=('u'ی1ejV }aPO{9%%]L#`5+=}ʍXL9c Hk9H�_bv0<\qJp",p y| kM&p0/B୵\[o-E bO)ݲ]o�rο[*`9G~fw1k.s:Pb=p'],(bz;}TSj;lZ*ǯdVxj)3*[^I\ς΁b;IL*2A;_BC:6 "Pg5DshQ gs-۲{WdS/TCk3L A�ԼLBL!cl gmY E9H)!v<} iL?YRPLC^UE{kCQK%@jg%|?e<#n1jFƇ)0,M& Q( W(D1sv<boSP "h+qsOG^ $ZY6Wg^u:ؤ<fiۀtL0FZ-s4b(Z ^tTo m|-*(WK\iF#A gqʇJ)P{5g>w#3d+HZJj~/jH%(w e8jQo ݅`&jm<чH(pR+opKOό_\<_c;n314WՕu[@KzI=Oj0i'PsCK\f;b?>/NS^헑%ZFL[zfSz`(r+AEprjex*s�]6C\P[uɤX;Y ^cX[ d5JSIBju,"5]?"-9 z-+rBby(J>cz'*@'kǒ< cJK5#@n^';Tm`؆B{qc ,^*1eYN%٫ahpo  ̏"q U (6ph[!CV겲m%tڍnؚ;ȭڞǜ 9PZ#-Ε]ܱ[Jo[빆WRX/*Ł”lF/3xO!�0xhac(Vlķj- v=*cԖ{_Y;UqޙT6\2l9n)Rͫ,JHqjqG|}*$r/P-Ud uA5ז8 [V;kt- #%Vmfcl3rPpwrj=P#~۶&$~LޘT qLsanhN4'ƀvkKM8D vFyZq.@ r�Z0s|NϭEB+ĵnQķtCJ5f&۬ĸ{G6nXnIG[+yLmKbZ|(qf,ەF�Mc™V\:=7]e0LY[ J!NaܸY$+C4Is�\ڭH%8rUBKG -kE^44Ttj C۶O>5^{U7l8=.ˊQ=S:rDikcUzĞzI ! p,@<h5G|do&S>l Y:~W^ ەXT|DW&lU<ẫgr̶-[2ȫ(cb#utxOYfUu4 uڷ7sTZDc]i`Q4 fwVIbmi}�=�hEЛAN۽V*R_ [: :TQvl{QIe,2`B:<9O4Ü|K<sȨ-o.DߏbJ(uk~58 h&WA`|U{YjZ}btIN<VGIGR6*aÆ{۶mgR0uxW`̥^ؚx/=qnpY$cNiiٌLSp٢Jr_kh*$c> dV0{m9=ժKV=ݣ-:|"N*¡JC44 իeͣnM3U֜A@6缭 A>8ar6%SNks.?epBXف (akK2ء>c!Ƞӛ _H`?YvVзT9 ޾B&R:;o_-fV8B\eRcN-Bceacc# QuZD) ͜sؘ" ܮ^+@zZd^.^|q;\1P+WR@s>A %laWcc4P*jk,fTҲ)mQ]Nd5%$X7"KN|tEMYUn 8ZK. dWMmȊL4/^g</ g21eN4"\a9_qia-Efϓ8 -MB*B�26s9dKdB9l"+֬[јذfJmd{UGͼ 238aOG<s=16o Bf%hSR< O(9|AQO)j>V+j3 Fx:32vZ%'&xf�:{W1g[&(urr F6o f�tO*NR#Scx(\3a`mÑF<mfY&`; ".ċĕʲ|h2UC/cI4rU~�� �IDATCD)T r 8vܹ,(Lxi<;^WfVWX:H1Z0uЫO$&Vl\k֬ʹbbeGHHjp e"Xn݊n3 dQǀ$!>L~b'R$Rq^/@ [UV"C<IO k0dp4#uFVSs<-]"$'Y[ wߝ[ruN -ȋ+0䤸Mܙ/#ċW|rx=wzd䙺c %\1~~\<�  $'!Q]D#R!Z<2@{W"?(!u[Uŕ[*qegf;C4Tz 7P{ӦMB\qbQ!G 牉 Hq#Ry̒;zdxF"DTr8) W\s^2w"Yp.g0×/\ :~WxWG Z2;#q g"\z@DrYbe$D JKJ) uA;(ByոR믿R#WJH5|#qyQO _A<dRPw!.)iIr>YS2Ѝ\J`w*w(xZr_>!tTFK. \q_ \ݕ~j#ȭO??O}SO}S !"2WG9ihJ^<ed )% *PB`e/{ٗN8!̯D(kA|n{;׭[ Me9#<-tIi3{k)9w|K.X^Ľ�mI-]>ZG1gL,і \Ժ\3c8 >/M |k_#w\u%"!<$AP+AE;#"8x\DәCK]Y` x<3(G?ѧ=i«z`>1R`F*H8Q"38SzP_�a"2|:;!>Ĺ W 1hu >y'\Wd:h09y.~%ϯ`C�MD՝K&Wx =DS'YgHoY_ҺG?2Fd1\=x\JRKC]APTCxKo|-gд,CĽmHo$}I%S@|Hq8�3 Lt:|&뉟կ>:}%Sqܝ`w$wR\yFwwLvtr!s-ED~o}ka#̎A M( "Gu9-xD^4ˆǹD->_+^ 9kwCjN H6e8A ) \_:&YYSȄ;C˄ !{g>IO#'*,&9C'=,=!> �@-L. |jid^Iq0DnhJI=.T<DhDF?@tL֯_SSt<^6p@"r 8E9iU(ZO~2'csC2O%{ o`xppʢ礍.$$ߒ~3 Sޤp=_ZKY,ːgȁL( dj r=_}.~u:26)|C=ȳpŕzRlxB8� <UW]E ^]wuE$1x%"ppã҃8J;ÿq/\Ð>�d '!xQIaO<G/-49eB#+^Nus . ivNKvQ)1@'(tuG>R Y6,9 q<[0XV͐0xRKQ6JBM;5c圡<fwe5@5r+[^=8xPO~"Y a^iΏ@rՉ;Gs<750dȁ_Υ[S81`[ۓpJabvt kK"`_[}kG FE0EfMنNS� QCƽL%l80b1R�' -w]<)_ 8+P𚀪/zыr؉dJvE`vC"Q4rKJ5y z1ǐOZWR<D9g?9f>HYw']qxwבG\|P-!'; OHI!N<qLr== ʮlP9C,\ %A)<tX~kVV6{O&8\S׿ kRf f;R+\*K#GzLJk~!KGҿ$d÷c+0Hx) ^@ODMoLNP~̫7.Nߓo/aJ DLB4B6dJBd 1E+8s1I[u_}яga/kc6iMW[ekZd=GxRVKbOc J FݱFd Bqxx< ĝZv-h3:_5,:%S698"^a.3qp *KFOI$ ]7ow:|싲iF$Q!!K$jHiK,T3dsF{ɩ<F -Y<* E;zM( =c=XgH1廃"  9c^ bbYs>hc[ 9mM|BD~$k$Um#mGg%WYZ \j"2VN 䝸;KTPVCfiT++0S=;"߇|:}�LydV8Ε=iH.ܦϊCse@Atj 1NS@{x ~WY5ŕ@nE[U۳DY0d:~ 4h;5' }J`zƪ-@Ivr`1NzPK?}(JttЈ�u;;<P^lyɘqP{uJnR`0T <5yuWKx.X][0=!P^}3?<#PCX�{(@#3g<,�L3S̫p 'FUVTQ)Z.Q�=U˩.X;Z[03daT,~#65M>6+UbCk,b@ K݈];sRlII~b^30j0]  N�pRTU}T@gh#9k�O9Ҽfl=48e0{0Ł*Ӊj#u;茲e壅PU0eBV%ʫݬ)Sxӡȁ 1`jQN^/$�r< {@ma$>uBɕi2W Yt8uDC7r� lN]!~MA, <�2ie4uZ: ҀLrF|PV@*2*'BXlX%<)y@>$īPF. =C<@o r򹑤',J&t&NBx>cAv~-dǵ~*}ʢFL^W "Y%S, c9pr�Y7c'aq!// s �6Cp('yšpɩ{0!TN+3CnRc^rc۟EB2)JZxh7[cQZQ_L0')<*)*#HĒjZx YYv/l@M4MӫslUըt֩cݛvGAUF%gyԊU3n!WpLHT}jsz_<7j4s^ruRHeM\B^^҂}n%9eeӑvFکK%tc_g)<^wȁ==ly8Α3iQpp^{~+f~woX'wV? WL˒E"}]OF}]k}oh66/"xGM?=ZYl)(x g^%^bEǕ=ļw*tISYN|Pzx9esiޚ*cqj[ϯ_8ӟɶmj݂7>M++w>>~ГGXf{$UQtt--Lh- [ Ts=#;)) & ([b^Àfo TUo)ǢQ)Y*[a6T' n"*wxrk r(F@mj%8uӏ~<15Qj8nlu(Ak~wgЃ3v^lތ@9{\vI/貛Dݒ(z}J4 (TbE� /3`e:(�]AEm-*^J2N- B,d~@ZWʶ*XQϐzZ#I6E4$`k ̂7G߹[ [-d$YצZ_t`0ŷ9>InR;�TP302?`36\hY 4n ei֍JY(κYD38h=hU-KB�B+<M~aCpjO,OJ9h1/~^q/JǟM= ݐ~HoșF`c)?n3=yS=Ms٧㎟\W [9=A^'+Lr&Kigky/K0BApKPsvQF*Y52G5޻$=i[°MLJe>Ӝa]9u#Tv2⭟-,ACkcEޛWdWQ,5dIU$EdX @ FG~*QY1aqOxq?\x6j[,p]ވw;NFq7,Vi'UPC;*Ƈ!`YdUyJ \|-ްH'0>va1b xM("gAוZ7 \4V3K0 Wwm+U79H4@Yi>6r3U^rt8|ː+L$76EȃZ7l#Ï>7nxIϽ3o?+'i7$]8tpHƆá@ emNM _"t1&L?!+uW*UD'gNJJZcg y4au&㵕  2h?U*Q!}~)$`TjQc&iCkAj+*O;:yjk4-vVVd˲^^@ҊWߟҵ̺<j0PD#< 8hgTū`:gՠI tK{M}V9j3lLVX^ ,m؊UÑ EP0O%⋍mvmL.'6FN;MLK u,Ͱ7vVȯFnX$27E>%,}`2*oLhPF(<Z(j(3`c ,Uҡr`pqG5VW*@= e&0 lj9Bd٫:$ルQbu| x$K"MĬg<r� [�Sm(u3> B1%jlKvE D,ٝlwՂ6o'ծ4 f7||`0L9\-:!5Kunk~|yT|ࣣn6Fv:]׌3S+8 bf"13mji(#4Gnr@"' z^׍1GļOQ uzKqc쉌)hgżM U׃H JᜋՔA+rd8lsϹ_:O^ΦNkQ8uݛxkvm5F s, Ӛ@M#VԜ o3]{LUa2~kaC.H#Uz]/}ßzvcxI~�h`n|џu8S nrR"Ꮏf#;͖1DaMڼsH! I_AV%U=+4'I:jeȨYE;!; Gǣq5LV3xݗ]S?qGhXo|B/ 9uO}ۯzʧ?+jQ\e, [a+{>Won}X3ibinƪ?;xuji+̤7nXXaPok0[M?]ܸ/ ٭uM19rFNzI>W368ly&HZMY fȁHٳ`T_1&/a}Q G?}S=A|YG32̰1i2޿SxEf XSHah;~q%_Ǯ%{V7mT۝f42cLP3V"yŻK/o淼/~3^{/XgϕXMf-bqj]wyup&ڂz-&DtTGkۚw҂V*KV}˪hؙnOu :6ۮG7V +[=g9ڇ2oOty,5/@$=_E>yێ?lTyeo eEpNm̺dIiMb&aH%MIp_Eʩ|i2WtV}˃5czT jw_w=<\|ݽַo aw=DpS[omE'w\rEzwrJz L wY1[E4V/DL k joly}gGwN&V`/ikrV `ÞA|/}ypfjjEIR&fF1ޗtvw. 98jRxGCT5;2Q_cy0QC\sH /?l|sT5TKiydlmݶ<ޖmg;~z)lv4`Ӝn6Ҡm+T$`agT~ӷ|ub/QМ>S㶍75 1ݒDn=?Y1ѮxtQ>c8t ?5)qZL}?= k4Ұ˗^JHWUAx0+/ sY}a,s%d�]M3˿0Ĕb0PyrP�� �IDAT4ûYWڽ_׿W?3v16#c{/|W[~qavH 6LlVS^<ƚ=x+'AӬbFٳyfuۭ_\<8஫7~om =;xVP[jwލ͠2_V{HI>c7y:Z�iOW̄v3IJw֔d˽͆_FA8Z, ÿ,rYx8eEַǿ(6�#2 xdi; g3Xv+nW:-6g x?~󜯽ݹj3W?f,ذɯ|c`=;IV[ ,Jbkx2=&TrS3u7O%,vJQcK^H2fBGkyq?6d,dQDiqƳ>|_la?rUbַX2neâ4h0q5dp'sk\;n6oяW\9 g.e bHB,;U%a1֚ MQVC7xut/|e}3g~+tHhw,<+[+÷p3:{zZx�|2Zܱq#Ej-Aq&]oxs ZJ!WjNX҃ R<@W,L҆bV2z㪵ZUTD,I]B!@A(?�>Q&Zr=p*nΑ3??<4 H> $dbtqɡD@@aσB3שFa݌G$o_`j(,TV??YG/Š^YCrEOF܀\R ռcYmr<$tfƍ71ՕVä\ 0ߔI4FuY%Yʲv U))Pu5j#b;wȁZMA4w&UYI|L}_"K Y͠!GO&09.F ĩ~1_SOG >Bk=|wO?UYT鋿;y21 s60Q>316)9A<‰-\vN*l$SS#C98tb@T}XKXb)IXϩ~Id0f zzT܉Goys�C\q}Oú*^gr;\^7{>ն|CЭm̒VhwL")RlZ4?=:"hy?}è ٙY_s}Ąv'֪ؗII3tnGsIڬ^_c:L-ϲU3?{T3XKoܹ)حuEn e1 ԇʼViQx%Qv&ĤR3}ȃT{pU&XQ%YfI>[n;3?9'O쾶VGg͢޹޿[$ZQdjj jڙў*5' ̛VϜ=L`K/Aδ� p?1Bz%aGZ˸+0A*]uz=:>=2jwo :w6Ac-8c 0Uv@ W45+{')i7C*+]nȁ\-&P u6Ww7i7RS= ЩOB>^0 zoxS3/XU'ϴ*g'?)}HʳΊ,](X`4[ hu}` 4@3^io`aX>lHWC9zb 3tt|LR}4~Ayda\*:K {hr�a�)XC8=xQm }ŪZ3$btLh1֐VTxϞ@Fu2GYr OEk[JT9aʸdj:gbWoQA& +ee|Y4&_wewx|w׵Wgw3VLD ^ܗF|G! cЊo'>.ex9fvQnSP.[di'b6iD+ 2%Xp l[wivrRL Xx^B`,ѧqxY3͍]zN߽zUm߇-p@{KaJL&_<TolVhCnd0IdfhCJes2nvsƗ ?@wȁ%$K"ÌRR͸cvg1E*b#:~<zv馨&T’d<3贓C MngUpc9͘uXĊ[="#IlQQ`(Kky,}fn5 !D@?vXѤ%$̞/h]}926Phy1 ]jI a—VCB|n4\h!VuQwi`<["uf, ̞q @ Fʘ, 1].�߲ aݭhCHB oܞ#EpXp«w�qHY9:HvyO r( `T̉Y- 1BjO[ka^*n|,ˉu-hsnb){hi8X�)&0D`{uzC,%jE/wM.5Tc~?1qʘxk l>_l9A:6$/2rRM�sNRy ͤs=XUeOdGQ/oN?HZlXl&[S#SbCrJcj<`] ��} �3dOj zoȐ\s,SҞ14$Ի Be 6"ѰKd ^2)+-[S묨Û!0LYlV gm,#؎Y|dKRhp3mW@Io&pᰌ *2.gu,Juw7=@K%gtƀZЦDj+E9g {e2|-U .&`,v#?m[d-Y!}ۺbBaL= ءo[ց�+vd�0E(M*WPQj=R(f"n{];C6Y UL1ZDpf ߨVlH`MWE+ 9T9G 0'Z>;cWq- ~oAKt/l(OMH 2O%,K3Y-|hh(nD<a~7 Vg{$C,!TM-=AKPQz|σ* 6hF՛fXJ"�a1SZ@s|yw롦\P 2=G\'ЊCyr698=wY7텓Ǟ22& *4W <H6!N "c(1ȵՁF-]8xğ8Ȇ{ָXd Y;DTx-sy mL8 r`)p�-P/ǖ9Q~Pev@P2"iI4%.&=,!Գ<C26(cBУzJ9f~E 99 1wI i TÎP,۽h`/J` Qxh((`3MEdؤTqi=U~{7}fqP\"92ȯqVzzQC,+6?bR=׭x)o>|:Vh0v;ۋ^sU{@ _v (g/5cCѶqB-nw'C,YXƲ!vjA(4�G|9V_E!E6R "̿U?4f% j!<\51Xm)0\1TBӞ"G)O6/{BP4D3@~TQ4[1cq=HOagð!0biP SI%g9 WXU(ϩ`81uk�7͂9_͏V&)i3C_= Nu4)ang]z LMo/,Çm3ۇ,лR/L+,Ak;ʕF.-LT2]e|sIϻ!`.xq 7yB-#3KӉ T9 %J"GߜmIװ͔Cְ0h.t轢;k]</Gq3�tR1CW15.PI>~(=Cp^zfqv*Y?O8hX͎8?g1fx386 *>T9_<D<8厧hm0{xHko8{qğgK!@\u*j oH=q~h5߈JхJ>�TLYPX" ۚ +yQ ;@kl Ӱ}8e#̯v!QXfRNS*%W,9{䎘8ͶRa[-bQ\E' P'Ԙ�َ`V{# WЪnKΗ4,w67Wήj,vuWa)3 q2:[*THb*ӵۖI!"-CƩI9nVVˬG~^ ʩ((;k\Jǂ"qm-tW 8ٰqyU+2 Kƿ^a{8 /~-s< \E{cܕgS`"rf߷s )DJ]+2hAzd<,3cǒ<ǽ^0ŵW߲^c) K7>k!dR&29pXCBy',NcT:ڢ-ɬV LPڑNf0׫$k=}.[o=c-8WbI^Uąs&kC>}:jZ fO7\J rH>aTiR؟yuZA Bnh4OՊbbܒw߹F*d@Hot^Ndx˽s1Eo{ɮ_}Ɠ j;~,1G41΋zʀt:)^UnzEn!) :7-c`2CX *\Riqpq|uٌU#B^ +[G2)gCOSj13eDϿTTӓ*gs5+OU"wW`Zd<2(-qr*Q:DS)&&beWS#P?וaK6QAnE Eh_jʌ |+Y1Gpm2XY͢$}QTV!$4BEqn,C4c p{PV{<R٨A}("E,j8ZB@Mf1tђnQ7;%u9 huãi4v.6D18ݯ=VmĜ$>Hԯ^]iZy !tkd{[8_jq#'B<Xج$,\fW+<Xhc t˙2\w<uKգG#xhJS/­D?uJt #@XG1dJ7j PX%U3ÞgIB\up|ka]L9UN9[Q X Tz ga1T8vAщ)%?䚧 @]2_ )t@"Bv`WQ"Y1!Stz% E.^YfZJjNMpPY/H\W%6X0r?EB*OռKtS.vpR!(f dmHPP- B&= h2jAdlEJvjz+P@pP 26Vq̩T% f) 'жE)9s]| /)^r4v iVn.XV4hR&.֖OP]B!]ՄT\dJRbYjY<Oܻ~->9V2[d&zk~HΞJ ˖+g7,mЏg/:IAFZE #>øU oۜ,,P2"5 &Ue ZL:grBcJ)R%Q[Q#+ 9+F!;P "ٗH!E57Bi8I"S&ĘIßt K4iqgRNi◬epMEf*ΫMi6L>OǓ$|s#-?VgJ{rto0#kJ'*'"6׳25pߨ[F"Ijh6QDpXEDZْ<4S.:6%tP YnU�dtUYB�h}U~EYϫA"GlWK0O*t`Q?L=Sx^pȲw`,oqdE|$"K<)S^=fn:,J|;DX'CUe#<A$lLf:ݜLVOmd5/ESrpE,6y]N/uUUfc Ϧ '9={q$D!jlS:>oif)xCc `Z)LHb! e8ia q'-ϰ18.X$\[4آ`9m%q "/Pg\T�9EP-/ɽ0? RZ PZTJ罋!R071:d[E^>F۟v^~;[:+FP+ J(uHEV'ZU~ S1hzJU4)m4X"Ǔ(#z63!I˺TfH+(%T~hS(D8j2\h"U#gl!F*W5(K B ų`=څN`عEW0 -XگT]֜*9dH1eF.m 680OɩiEHYآ-nfQۘԟte8Ŵ:l$)s֭q|'mH  +:GJƦ,jBv|G (8Z&rO^KP<u54k' XD? /=K6ӦLJɀT^-(XbpTw t%&e5"%.NY 2amHՂ:$y:=Do7!cwg!y*dR2r T8&M$_ZR4kA1: R:Q(*T5KFXF4)KWBE0:jy5h�UvD'U%;SM3GĔ<!]a-2FڢƧ -W`T`˦UըL5XEh\fI=Ƈ pAL0~16B$ ķ j8!J@]  P%m'kXshX * FU,B$D:ޏ,"P;SHY!2?ڜnMa2o~UϠ&_h iv(%T&`>QgRn-!*Mj1<P�� �IDATA)yIWi1:U,IQL dWS ȒF3,HI.B8WfDŪLIKJ${K|%9c<D{Tկ5LF(fN,6mHFu([pA_K9 TYInbTGJTI,U2(ұyT2HJ꽡N.dا=]e:ݦ#8 5Ք>!fJ"QcZsI3XY8BRV^([TI,[TZ'4DVʜ s�l򌿂|Mp6$Ic?Q} 8O{ -*;Z�FPTB<hQN۶IߜA&c>̵"K7 �AGVQi2�X/:O;0*# Z{"qҼd�* &)R(ddOkY(;1hp\DZeU/Ix\^:ݞԷ qNER @sNGI^!:9 !S(#&hF'3Hΰ[ Na2 /RP> A 2B:&Eb7/JצI�̂1ɳM J+9$*:u[Z"62] G ]3Ɉuo}, ]'oR8Ы%M0t@zh"p mPUfB:ԧ]lЦDz HQ:R1)`-cr&tdѼ@xZPz;[r+9$X5iLOZ>UiơiN K4KOQi� H:&ۏ]IT=F9tgD�JźF&71  7NWi5ٰ)@r*,#0KxC6s N5qҮXA4NAWUiPNcK1JV- d>tʼn?R#3PW,a:+3FdP: ,͆>DU$6*u)jA$O$Cr 92]PE:9$`@,"'4Wօd(ZĔ:H?5,jvhTMT$wZ$֞Nπ HP RJ0 8T.|aD='F8^f-(S4]au[N�=ȂHTw4d|RȾ* 1x/be3찊k:5Xo*@$6oTr؅*bd Y�(T:yP6GD ]0VBHӪ@Ȟh!cD@*%ȓF1q$*H�̯]Ԍd.G*؂sӭW !ty 3Ԝ O|t E\KdWLbQSz WA9,K k"Es yZR*&.0M O8UWLK l \"2PdPf5 %1}⓸.l] $>f@mcp1 0AZH4H5KT<4@)-.∰HW@ x 5/>+ÂU+ TZm$o5"%FK-TN0\pn1De`4jg[oROb_P#khNNG#q<̄ݙz1H[tc&ib|tob#!X,hxEAL+oD9 1=+eIJ4-`mQ`r$hTF& EXY�KUC}e< kـi/ʂM*P=$cX+5`.&)xlhTp:`:hޒ ;66&Y _1lD;zGh%u;(RC0|\% 14D)!ɦ[?B T;sK:e#[\M6M]Eݤّ D$IvT+59L&^q8g vhmf  P8S�iGf&Ji `>CuJ �: .ɢKDZLso;or'4HcB0 hLp(=/e+ Ȍ_ ׭ PhqAԿ'(r6F5fBchJcΕ!&QufZ1[T3J=\ bfIZWmʸ,^䡼7OU|D(xb:T)dQrg֔\ҾͶŽ Y}ؓ>yjf!4I:ջDZ{ģy{[%f駝vӞb~w/mOgwx]C4anzG<Yk&Q,4|]^Mx|a&<nƉѱU+#Mc257s70N+ׯzc97 -ydF(F*,P3CqQK+e3ɕn֪ɶF:C|<::a2-VREHٟt;IҪ\'gVc6R\f]uwΣ~-3;Rɦa][DMȰB (gXN]9nX$Y/iH*f^أFH&[=O _;! $]l!b+`?C{˵\>ɕj>s~dӷok}㏕ VRo(k &G߽Ϟ}Ґ@ҥ]n݆ =1إɡ{ɗn;??c閿 _̛Q5 +1m ɂ.u</|hC3p9MBdԱ0tu޼ӊmy<np<[~g46`!qDq@ fHFjkj7"1[/j-,BS@YB˩IE!`1(itx<g>0~ti6yX#3%ILV0YzWlقNޱ9QpOMp'V0;7Oco9W:As&l z0=CҞl m\˚nN6>F@*e;=cGFAF0^JK-ۂ;Z;׽/Fj3mԦ^ȄW}֐9rD~2}`f,;2Cmx'KTk 0,TÑ`tBt>b}5[G1A_Tj Zo9:xWOᴟ<UI8vfYU|zMɝޘ孰V aB@ÄVy|%S _Aj#&[HS8\9 6Q} ܗ棎7[MPr.f:n>{W{Qǯ)z1 կ?ַ FٶFc1\0( A=z|uim&r?}O 6W5ǵS{1^?wΣ/g\[llsBeeۦ(<3Al7v2 f;RVB31H"N% GG蟗ZQ a0 /y_n:96J5~-O׊)^ns=#Z0.Rl*Lc՜iLf6ozpCPۃVYeȎ< ڵQFDx>y:uwqLXkLa M{훿I#mu-^NGït#[["0ҽk_\uN!wo_1kw7qㆇ|_L{՚]w7կcƆkio{0'x~o?wFuz蚛~yQ'#VҦb1z=(I%W{Yf|s5Uzsh>rӕ?%agz<D}u=n^G>UoFcQ?K&Gqg~C<?w_ٗrׯ>#{l\h͆#y5PT2s$srxD}}fsm?g (`=O[MnɑOx2ʶ睷a rXЭ?|=7O9`A ѧ"+AC}(vUiMbJ=>#_lA8;|G< -L>/=omo}JΦg~+u1U\K1'#7oim~s{#s:|#ow'[Ϯ8Oy͛Ow/:犯5{;O4/v\z+VjN%|s{2ӛΟ+Ƿ\pE<앏}13Wwrou7{._=C}5/};=*;7͇۫`t;x"8{wA__=}2$/xɋN0b(`;yʩpOy0~^rG>2:9 r}Ӟi ZI( E_ Ee[ew{%$@ @HB( " U \UPH= FHlzv7<o*^\gly9s̙3gř]1_N<'6"·1b,[f*a6XNQ$=aC 2kWV6=nnwL˂9?WM\mw&j_dqMM}ri~|4,~ԕHt4+&WgʬHYsd62`=mOqS=jÃW矹aVڋgeꃇu'敿FOޯqCBzWn]-8Zҟ:nkt/Pf-/9rM1eCw{MKfBz8̳zיkWQR5]z<# <{u>G1kwз7u㏛ۀ'n"ؾ,vDbѽQ7TDM>hyX~_cŌNיՋ5q+gϰ+^Xgћϛ啮_hhՑl08c'/S/+41 bFQ 1'Y3W]d#JK?uKY .<G"+ϻ÷qG [?8vi5{GŪ^06r Ƽ%~vѥwnwMU-7E_Ӻ⵷Ǝ]sei7~+ok/xȴ :GNhiHv`S/=sEkYd#6}~KL<[m=;zǦ#zO<SS [S?OMYŖl0 Lظ.ز?~_2!s4B4Bt_~V|lyѻŕ߹?O܃!Y零Mⲙ{8=ڿ_T <SjZ}АY!lN3Utz -4-^7%T$ ASNW=}H:׆ܐݦ43K5h&͚-n$ނy O9BSiw߽c<+%' =ck_|%S/=帓+ꣻizg_sC'&ƎF'*ƌ2 }{_؍`ނ1cƛ _e#W-[0nH(VtѢƄ f=#._X?V __*dr{M3͎Η#SQk>}ZWW΄FA?\[ C~}LYp}Ӧsb=/>d:ak 7˗Yrf14,*?Zpp) hA6_ ~25'UԔesj{ƍ7 'zӇx1r\mSrՄvbu~蠁sPX0ohr(UaGɗ"WjlaRH_+-qv`䣿dcy_Yg!bíF9e2;`E-kn[D'<ϔILu^ 7/~-wuެf}oo 3߻ໝKWiƜ]kM,[(&e?zS<a֙w>ؿoڄe\CS /GnJE><TF0<܄{|PFpy,l6;%ӹ^sUcV/4`pBȡQTeU<TS.8釟ه dc (%N#05E2om_7&N`yӿOpP�T.{/Ϟ  L?9?ƖJXnP;6n`yU[KSTH) v%9eqNK1p~WH5YRn@IǣxL1K޵Zk1#nJTEg!gTS5t[+b̪[puWҕuuH6?:$ZQi "oIWmʛ ƿnke4|׀&ڲ$<†T,k ˤhl"u6uۏpAM _ ^ G:tc#T 8ryO>G2}u뚺{Ѧ70}R8dBB!@M"WE6aa#R_|}Ci/W$ΈmEkG1JT&L%k W~yG;Zeo|7B HL4NK&?񶮥_۰N"bDy|¦"C}QAz"+ Vv;QsCxX p8 mw 6%p,*GYe׉ 2W_aw?g{n_l3l(BHiKwMuag}o Ƶ.[6j` b&K^#Lu E#i]LnX_S4>yfSՅ`:�QN2󜯞N$<#&ƌnSgrS/N?2Z֭idqK㇗â*+aY^geY#S a1'7Ѩٰ;VnU$ӹ)Ӻn}7ۯ~ѹ1֎ک eL{\wE֎>5*61tr7_y9s}#ZJFY\Zn>DUl:ӛDON:x:yId)4=. zKVjO@uشֵ BdU,TV Q2{Y]&:Z&d:;T=«>h ƒL/kc <l7Oit&c(d(LH2tɥKd`]Ԛ )ƜMfeMäCی߄{QϺɰ[ŵ@wjٷY4V':D"&pFgӤ=7>ywV/XjO"۱>,+)ˣ\3�� �IDATD!oLR>Ij heLz]޼I5`#GB]h`T*;gBg!$R@b`{iJcƏ9`S+r.PK˖<!M:0ӵK3rKXB6D+ >4-IJ nL.!X2_K{Wg NԾdG:]VU"Tx7tࠅ 2ַW*zi K~۳<Ӏ⢖ 3nXYq)/+V 6͏[i.in_ф1&X̺eJ@ 4z\XHv7!gp }>rQ/ֹ 3gsE{_cOӺWYU6ueG@ҸCmӾ89׬9+l!R O(_ (i-Ï<>YE{OM`D©D܄[ooջϒٕ { wyw.�#0 b^Riuܣb-vvoi"U�"ȯ?^nT2۴\nmx,{1={? H;$#R)e#[^,y>?%{L/cEyZ3tMfmڸhC 5.8&m,psn.$?Rb _57]װ}RrɬokqM?8ȗ. stGg%rgiofǏZڲv>7}6kGkQⱵ3li5XYz\)3_;M=UYm&j;Rnz V ^`?^[MXQ3lz4m޹덿4uWM-hAllodD b_{K.R5ޔgt%L+{uW5W mBq&&/ ܝfʅi}Twg"`4bT޳̔|饗.Dxgh4.}0 Y\ӹTI1>Y5K=k/t-_jַKK[ ؜MVQHW"NȣHԹWx`wf,I"q>b0q[fu¬J'.ee*kكߐ47v'vÏ:]]^ L Uk|EOҡּq6Z|Y̆W=nx&|qXR\ҼxC8lP9j|!e:2^|ٽ7݂)뢅 ѫ@ =h$-e|37^e-iZjˠ2Ŷm\UmbSnnKEe7 $2ln޸L;{_80Y{#P*e2[eah:¼ O:TTT6D61B.vPi&Ý^bUBdYvs8\mC?֎#աZ|曒@ـ c2g}&%f?u7.u-I9R[B9 92U`{Ϲ/6N0SSQW_=PuѬ4͛n2xƎi}AyÍ5eYQhOZ{{_הw&Ni=1J뜼&,ΧBQ:񃧞[$}hSM!c&P=niESǎywМ5s+_3b*'onw oxwS) T|`"F6X?Ԭ_ FQO.E ־P*FK&1v3@(0AJ}_8Lۯ Mvg>rlW:/H=a.m]q%]^` ZӉmءleW<﨡M혿`}&k8́T/L"g3N>|\vgr(LBU?⧗|pDZB᳿{~%Lj=wyQw{ GqǗUMc'<|vڗj`.D`4cx;~뎾C5ꅭ 婩G瞛NӁJs�2 ,5q3ͮ)\1fᏕ*ӿ~aǦ9'/~ȴo3O\3O-gwv/sY J '* ܫsZ ČNUJ#/}&uNŔ Ff']%9&DX99O'*;M8^;. hfbHl,4RpTUf`Fd:gQufSյ1K'pRG+ȅ<@z|.後[hih<d*jgҩ_Zưnr̯eR�꣬?2G."`|/ ѵj՚^exY frkd.UQ3-QUYmqUnH$qe4IIusqVf4,bGf8KpD3ѕĢe2 Y 8*3ňto o@ƗNmXHvCР\{x |lLhGw{sX(LZWZc^EboE~JZH6zFSmcbbPo va_jMgggFepd;lp8nd O&0CDG njRqq/+<Q56YʢmXbǏ;Kǭ=A&  ۪א\c/{0tM0`ZB[JXu9[љNŠt޶XζnPSWGm ̻V[ۻBE[}gZ C.LO8K-ƌ < #;gdGKz ㋟'e-Y= c*jIp% ^@mjQv ȶM=Jna"e&B 4LcB>"nk.>|X# 2 p qo$Nhr9e[d"!9x|e�FKhMX`Qy~Ἃ9;Zjǘ!S?cj4's`k \$!.P|%-3Y\4V34NȆ`lO|'o<J4zt#"W%sr19j&˗KSo(c?.\X# 2p:EGrj%{zB:$r2 TX4KbUɬon;g]} ?R?5/eRZmӶ.}m p1Zk،Wnj '{q)D3F7U8FƎۅT<�y * )%D$FE4b^X0TW�@Cx6[UUx@& 3[c >/EԢ"h \?d:e +XE9nѾpSM竊We{Vm= yF/E #B*;-+vk,'; >B}UXI zjE_ʼnS3/~#G~RM#XZ9{.;vRIT|vga˿mAD`~r(\rE)O r^L@ ͈-Qt;`cbA;? c4g#pcRD!10DG"O!VF.&8}ZzAb؀GZAQJek،7d#$wJ+O ChbR_V` R-8{Mia}dYO匪|i{zU呍XX4P id x6PIT Z`Qt)/eD (_Q{zږOP6H1IT{׾o"(vwԁ% ۸Dvm+6Oy,hEB>mG`$q,ok9pd3XbYQ�L2ʢXzSvʍT}!SEXBVȱdսkL@DCN@^v 1J[\۳Ui "FXTT*uJ8;Ӡ<MzTCQ>�e:LZ �`Y@Ju`ʨ2j#7Nx*�i D5ͭwӧ3�i i{7Q߽V&QQ5YvgA bO{MPӀ;se$ mu �QOm'x< t�mi2rB]LhIBnK"3!$*TH2!/`;`q"ڼ"hM&W./ \"=D;qYX,m*,i٤L&+VV>?5.Y@ Uq8r#Ҏ6jVV[[h@6"Hz(,+�VBZDPF2k&ȍp4{TűNakc?)EVS-ıA$A>rI;de : m,K`]tg )Lqf� !RU~zrZ to#t?@a!f7$.ddtW(rJ| :GOf2M7 (�:G!YlWM,ӠIZEv݇i^! (op@v Q \%(+4>Uh!\l " nUH!u*Y0îJX`Řk:4+%.€84.^TQR>ZQo9N s ѶUAJuVȈ(Cp&vjEM,q{R} W`! NB/($B2$7(aG.408d'ᅣOmj*hL:}d L7[TȪO譞(H`FMVA"1^=D5119()Yʪa_:1FEeC^V` Uur'S�\<8Qt.]\e/5CAX JT4Y H[;#\PdѣPl\vL0>H�+'nNFQ`b"ӌ-Lb%-*'%8p` 'eG`gL,tFc+]R'HhDʎMP.GLq  " J2Pcsz*l饺y;*(䰍$҃N*�jZ8Mt9LT?w]@ %Ӑ@ Pc[s5 {9TR:<1%q  e/,Vt 1ق $>f``1= $topUU^BHmaxR�TK&J6Tb,BLu!uPq 6{J!#:f/#x*8ԍ G1Z@c8  l+nhoi$.B dd%-27ʳQ/Q WՂq0zf罽g1*Pe*-j_t5}ҝ|iuf_x#$Gg0D!)U GNj. 3 (9 �maBZ:SB!\ x4DZxkr`30rM(BHdKTNLd4Y>it*ʨ�$\�?.ev.‹ /" BҒ_n08\K.9T)TM \:rA#pb2Ϙg%\G-dXP~$J.ntc% q,`i,^ H (`%BM<Q H-d3`*PpIH%!h:ioҨ!Ɍ*wdG_X.Ugi[Ie ?a䱀I l�@[nBDT)䅓V5lgۏգH`GKլ]`l!&AP)1PT-Ȗͣ f)-C R>YW:ߐ̅)N*J}t#[%UI@MXTdV+��8y�@[H؜,_(";#qA NAIt(J,LPmc]mv[ 68VwU(+q82^~2!^meT6�Huf/X*GsmhU"@UY%]) 1AJٮl%b*°PUQ +(q 4%iG(h,ZU܅',cYX6ZG{o]k]ڡRJB<4r+Ye"+J $4H:y ЄFPd J'")q9:=EڒLP5HV Rhk!DI@Ǥ6/tBRu9 E8`Q-[ʳeK璤8u?dT'f3 ֗�:Gdp)[xBJ1*f!gZxy+} B=vJ R BOݪ唧"<iij(*h&䏋Z%NABH-H)dC҇,VU}4 Rd(IKJ=1 ̭T*`VvE[t,hOB-*˶]LC%M0nL;B),xH)>Ì6 S-@o/1։#JNeFĹ!bY*Yip#O[U+s`_:hәrQGYN[+iN4�YFu`�31:.BgGfL4WYK[RUBHP|ԂzYTGZ<шQMUB�,նU[Yb-ClXt8(Q.å$2) 5@wHSqRO+MFFzUX#/ T;^ l@|`XVx@踚L Ha"^P/(BtVEV)*W}SN_"u1^XLI"L^sRa͞$L"[ x@ դok:74H*Ϊt0P- V:c`RcuT(&Qėv%Ԁڛ YxbE0,æPFRW[CCU.2eCG+QBAQOĘX8AX{eҔC#mAN;Ɣ:$llQiQuZ@OrI TIZ7k0(JT-C4,RNxԄg쳨g뉊 P,kyL%NK \RAWOD$�mo�fx% 0H VX*V]]X5 2HZI�+#d$!X(' lqTQB9JoYh 7A T"Q*Fb֌zaU s'/OT96!w\U%_.za#nx5:K)E~HJ-l;*toIt~!nkS*ր <%QƏ:PtK-])65רL2 ;g;t-@¥-M$N@S~% :RUc"6V$TKHoKʦ #dU mKY=bNv[X0%+"GXj'$l ̤�a!@yOy^.^BomUǣ j<]OZBٮ8Պu9rg*jp\i_Zg62z AQ6}m��IDATZ(^dػmS#vX#LdUE3fmWZEʮKPK#q7F0iЃBa* /5E*&U*ENBv@/t;[4lB״8\jTu jRN9"5\4K�AtUH% =,8m½^'m']/ /{Wj 4/-Wo h0ޠЀj%a%p.ZmsXAWSFz 01a"בF)Ev]節myWwEXг]}ryKXU1rVCF]j[kҘG/Ķ{v|sD$B)(Z^"PB]5ˡ)r(-7e)2I9a.K_LS̾ǧls{ ឨښXAב�xa (2]k'0tI<w3X(n8v)VQ_BX"cIᵀT7jJEZ:X0<%ؤ%;juks;恇 \Y+`  ab_Yz*nE.ia`%cĶ"eQү X84Hz=XJ4ڦW4 &/(BChT̮bE0=x)m@)NjzVĦͥ/i轢KZB.fB[O9<-p=klvջ\Hxy]mR+p94\{.l+T6.fDۉOTZx$f_>̓pCGWf)(dJ4nyoBa6#(j9 CUl@aۜx$UKH @L*dK/m*KZ2g*«hv \m![)ka2 -r�n=2AryYnr 'nV9)dV]@ gr (~E^-qQ?bkMl'uqZXI}V,{TKB(^iIQdBNO݈ir^cow'Rl)=!5FYPrNYl) o;(VIjȂNT 7Fk?lURB"t%qSI+k[7U:$y)TQA\oZd_ Z&ʹJ%; HEB1BёBr m%Q[[O`0%�Qs?Ґu@%7&4E|ʲmuXo0)UUPP̛#a^6Zj` V6t{<NRug/] _UpYv=ܢbIbA˝tuR\vWOxBl ӁlHw+>y@<,CsmE\q;q4[\2n-ڔݏX҈"K 黑no)Lٻ QS4[$}B-JVPLB@I݈R7>E!k.v;�iT|rX_y(WXbIYnQ%% Zw'8lЮ9 ­KV;j- (#qbĄ7 D!Nnֵ#pwm?}3 Ҟ)$e-|JDa)>oxHWpL>h-hSV�ZR;[~d;GumvH0H+C2dLEۍUDmBԝ'aMh\fr4DXV3M ʋRGRpK a[$Нhd݋"}rnMʶߩbe\V:dtDdE|IcIe6\Ho7\6rVS߲O >/fURO/[cUZ̷IFm'm*\X7;=+tqNgM~19=U| 2(o\ 4=urፃD!(DXڼerFz9EīV*]%,%xt%n#BA[`YP*n R` [A<z%t`l#6e;A ^vNgPm@FЧ l/Sߓ\{)Q%^%[1">Hę/{1bv٤<+g3!E]I$8 OaR$2ֳr8Y ݁Ȟ Pۜ�"7--x"Oz Hb0B.Rz@f(/d IMQ##ֺ:$ބ p U JsPv$'뎕5B!z#"ӊ&ǧY>il*U6KtT6F؂xq⦐R&ԕWJRyr>z(C gDکS 9;s0pA|`YsΐKEqCX"�@x-UQrY 4o{I,h){&{N y{*38JA{(CSA‚*00pd䈎aҁn9}䑜BZ9K<*k@N5_)@sV4ŁgQfbҍRJ[E<P,M fzǑxiIS<xFA!u:a9x*sX~D /J/$G C"LDLƝ?{ J!Ag[X(@v`\@Ͻ=@ E  ;z910(UrG+rԙm5ĚDBHčrtrI0R2^ݙ)dNEfd$Z3P%vWf!緇=砀+=(}njBJҹ"&Ir_3)lE0ȥOEΎАKRqt4*1fBpP =QDdXb_)}^4WI, ) 7O$[)It@=Z}͞(-=Dx6%IhWBΞ P|(q=`;;ؐ+!%S;sn^ߜ'8-sUC86@RGh˪.g@&)[|#з/P $_ :I7i;DHX̄aU H28PߩNg :M@_g?T*& '=:'T%<̣aEA"]sXd?)=(�`΁|A\HNYEC+{M_!;7_>;QK |q>@?2ݲ?P[EhuVҌM&Kw9p"ҁ~$] 2hxG~ʐ]N;K73N\p@hS.$2|,J9ϿZa, Be! u¬dG+.~ .C |nt]t>jh/ˣ0KkzQEo6n'vᥗ^~(ݘI' Eeq'$NF0od! t$ g"30JT?(18s .0}/f?x v .y_qGE݊"1ȇ`R'jM|2>M8[28xh#?ܭJ5$WAsP/q7&P''NY)Kz7zŦ.Sm:/u^!>~vCWܬi18ԍw[.J<pߕS&l}W{wӴ' 'O5doO;!8ڵ/pyӵ'|__y9 wҹ?&\mпn>:~m2 n{Nl┷T tvm&a]5?uO?#`np>T"`֯;?s$Uv3>ΗڥLE0=PЙR/X4DW|6A3># )zI+8w̓q뵷~w}qlI/ןK<k֥?iߐoxyy1uo"cr+[]?݃75\qOL8劫п!6!\8/2|g?.>/{խ}DU]uI_?㪛o DϦWl$/JZٳG '%ؠi#^ FRz<P ^|도_CW +iֆYȟ˄C SX`q'{WxUmf^_жr[/޴cu9}=3W[z?O0qfذ/~ٴwfLv~}wˆƂ;4|4d҉6>. EY:t7k5k+0c&Bw/A{^}!&SϘMɕ7j~}H.J;E^d}.Shz(9H'h55"Ӎ&zwyS_upP>r}Lʥ'N0N 0`CVgeuohk^xn=3 {Mfq|V4 jy}Ux#)1r,sO4$6SdrCnӚ#c)poً]}m^4~k:*?w|kjj4ͥ8/0 4+sRGr!6C |n` ל%$T X7q5{iHO?~V/oeW/^x1H 4H`,D EUδ,0:;G%iLg[<jktH$͛a2$$ f#�Ne<'9l DSo}=؄\6LXy2vrO9\=>rZջ~R/y׋O�kX,Z"i4X=scΞ{(M}' qdү,9"h= mܳ+W_O<{2IA~?H,NI)f+·?t<[„#ONs&V1i>\fHij5[U.hTQ�H&u(&Lq߆mwׯ-4}rdPȴu񷿧@tV,k8T4roJ$M l/•?jرjq_`u0ګt%P*VrtPgI3" ?7&vM=K: 3_Yka3D\`<UV Oy3޷!x:bqp5wt_EA!'�"HMq:u}w/?60S?듧1ӎ5-}TWi8[}﨓=]'O~w}s>fݦ0g)G01}*,y~G,}S~O ſGſI@3U2dVV51 *|<u˚`ph M;}>hb8})\P(fM׆uq]Yke&7aVeUu*LҏIq{>q0VF|J1o)'|Uql|8]XiZƐwMpe}uH&BѲ@$f</㦊P?䫊CZ.{NOL>+ 4a�Rx`"%xbn_|+(4MitX&9:Oa|y$VerˑU 'Y5akge شd.WT9�_G`"rUdB]"$kUع] r Ugx8KK&|&ːD>U0{Cizsy t}BrR{(sP4ÀLܸpU>L#EHޕ yL+lr OY!%G-$ Y/~_L>DT\.Y Gbͦ,l,IN>Nv^B08V@L Q:&t:FG�&Ӳv?y F]͏UO˾}Y2Zy@>/ )wNC/㷮RAXB(,*}P&4Bkp+�߰AEjpq ɱ&Ԓކ$83iN'(1~0Vŋ'r�Ҋ\fgl Q@4XKKr7%<b"ۗP(42y=(� 3&#@(\*籇=(Ԋ<U:xTD1Z1>2NG"!2dj+و$h[ &2.C"( G�<z;Iɒ_�8'qF`(Mg94%5"P4BZf@Xeu DHq)j}A6TsUDO'*O]mKװ'yUG>!7 ǙU1mӬN*[0DH*|4 Uu'[&-$T \P$qzhy<0$dKT,'pL(ʴiGd3%ٗg2Pl4z-DXtכl[Y.C S%MPT����IENDB`��������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/down-fade.png����������������������������0000644�0001750�0001750�00000004333�11757531137�025455� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz���gAMA��BO��iCCPICC Profile��xTkA6n"Zkx"IYhE6bk Ed3In6&*Ezd/JZE(ޫ(b-nL~7}ov� r4 Ril|Bj� A4%UN$As{z[V{wwҶ@G*q Y<ߡ)t9Nyx+=Y"|@5-MS%@H8qR>׋infObN~N>! ?F?aĆ=5`5_M'Tq. VJp8dasZHOLn}&wVQygE0  HPEaP@<14r?#{2u$jtbDA{6=Q<("qCA*Oy\V;噹sM^|vWGyz?W15s-_̗)UKuZ17ߟl;=..s7VgjHUO^gc)1&v!.K `m)m$``/]?[xF QT*d4o(/lșmSqens}nk~8X<R5 vz)Ӗ9R,bRPCRR%eKUbvؙn9BħJeRR~NցoEx��� pHYs�� �� ���IDATX WOcEy"Y ew F=x2x̓eYB5…7 $t)R7۾JmF'wf{JE7?>>mkk=h-籎jNNO~ӏ(UZ>==544fsssز-؎rrm[9!GjjjRPHa3L>@ �4Ci/ ŢgQ!E&ؖAJ1 �T#6�`y_mixFSõ¤!|>hi鼴b2u1o<yа�e E }dK@(@ Q \$}Ғ!fo| �X' UgQ` 0~c ~% R5Q\f�yR,e�qTlNS9\pa�'4uJ̫ЯpZ=S C=t2&�io%${a"�,.J<؜W=GSax3 +%v9̅B; 7#@H.;܆7p6'9��p(6rsTcw8F_k % 0W219!wKc RqT*'''{y=yONNT6${�eܒ~t@J1p5lxMasz 77;1֯;P:611 Pkf9�sxǵ-ʫIo>sϱM|^XXb �<wFFFx zZƘ sAxw�Az *[d2oee%5;;:\.J^>B8 2"7Sm)K =ͯ'dVӿ,..}?Ʃ>A`_v/2B르G �dK�|`бMn2�*�"o><<F~# (<v2)A~IU4x�eDO�상X,vYpIPRu:�,%vQ[qG�s2 7mmm��atqffO`Wx][��/p$_Af }81 r 5/< ~t˲�P~��[0D"H>}⟧/Z�*K0ƝD^ "" ̓ē�w^UyP�w-A6>~2;H0Vje�P ±ĴQviiSøn Ԝ<w!r3ޔz4 bwȜ����IENDB`�����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/left.png���������������������������������0000644�0001750�0001750�00000004715�11757531137�024547� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz��iCCPICC Profile��xTkA6n"Zkx"IYhE6bk Ed3In6&*Ezd/JZE(ޫ(b-nL~7}ov� r4 Ril|Bj� A4%UN$As{z[V{wwҶ@G*q Y<ߡ)t9Nyx+=Y"|@5-MS%@H8qR>׋infObN~N>! ?F?aĆ=5`5_M'Tq. VJp8dasZHOLn}&wVQygE0  HPEaP@<14r?#{2u$jtbDA{6=Q<("qCA*Oy\V;噹sM^|vWGyz?W15s-_̗)UKuZ17ߟl;=..s7VgjHUO^gc)1&v!.K `m)m$``/]?[xF QT*d4o(/lșmSqens}nk~8X<R5 vz)Ӗ9R,bRPCRR%eKUbvؙn9BħJeRR~NցoEx��� pHYs�� �� ���IDATX U[lTUuigjyJ1`"G䃀F@LL$FC$~?4X$&?ņ# 5J`-ЀH[}9s-vH<0=kC T%֒_ضٞ~V�֭cZ/?~j_]V{mb?N͵M5O?o+k^jLwTvU`ô3Y溝h!G0++�à8= ]kQ3V9iDE<wn˦(sH,Lk(Վtm{v ܘHF7 hhthT@]+KU[ͅc`HIS8if268gၨBۯd19cuc<l.rebdL`)22E"S1nC M``6s*Džc\,֦ �l E5977D}D؁gk@ukwsu-�n9ꥥ0OB# � :08h1 9=nB$iqŒ$59`hY�0.⚥M 1R(6@ aP$eIUlCPe �}[x[Ϊ+uy:K 5Dȵ)�ys2Vqx<{<)3@ЁnC"8%3XrdVu9PV`ۜr1Y]0Ʃ.\ f5@l( 댡vCZ|z;-NMXOAPй\YQ1kT A}?jv�Lq50@;Hֱտ0?gs9>XQ� @+@[,! f�  /|+(wP@zPV1O\)h $Jh�H\Fc+qLsH)Ĉ^;tiu{d2)vGt<4 ĬP<b}06vb+Xg$&0&=X@[ ` $ˈ D[qЎ#t$9CZ)OEkqoBe};b{x .Qr*B\0 <Z`G1YP$�@ R!,í1@?ӱzPʕ5�8*3ls*$S\ Jω~ZM]=M�vw-K⯟:r-iȇ"Sn<�Z1 %,(PhB@wPp<D7 ĵye=<t�R{:u3@cR_r`?GsgZ�kCa^he Ȫ sӓ_R9zu2v70\N sUv:>�s#g/0ޟELr=?3Ɲ4 CzDe=๭'=zA*-Q57toJJjLg3cLwnU�˶|����IENDB`���������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/zoom-out.png�����������������������������0000644�0001750�0001750�00000002610�11757531137�025376� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz��OIDATxڵoLuwmӵaKĐ@daa!cLC,j ,Ƙl{F]/LnyQL&pȂ 1$ gP`\Za0s=)=lffIJJ8nQ8^TT٥)~�:E p(T*^oZ nW3 Y(D@$O A4<�W�k@Բ, RSS �}LMM=YE"f-Yzz(B!pNs2X Vq?`zz2M`psrr s�EÁ�p++jXvs/~<d}0110d1 s.4M1;-7pF5ZcO,,�w0h4~fod Ubf#W꒝r4..`||{Xfǥ9챹qi(A 57ܜ|HnP3kkgc�l6px⒰�%G%kkk1VmX+&[+(( @ ':д�Y^^0`pJibd$|DLUTT K`^^  �X*M# tJ8Xҭ�GrL߯~-)A5rd2 bX iI}~~^~?.+9dO �m``y(dggoWpki}:Q�!4_i8drga̤[ nΎ�z{{58|",!BªRfYrO^7_I�How[wQSKSSHQރ+n5@& Ywww>@I#|/=O2LT\#D 4xܺIt744DR ?35"L~/T,B9D!J3 \y�D2x&Дo~|=@+im;; ON%ZV~rc�DF4aH:<m � ?N7%FnԞ>8C'JT/Of:y-ʸkA<;˛WM  ‘1� V,p<փ[+-k1D w&aܫKOQLf_F PF7>p\+F& �=Ɲ",0MNPjd˜N˪ǂW<Ư511 Ŗ}od_$V����IENDB`������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/up.png�����������������������������������0000644�0001750�0001750�00000004665�11757531137�024245� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz��iCCPICC Profile��xTkA6n"Zkx"IYhE6bk Ed3In6&*Ezd/JZE(ޫ(b-nL~7}ov� r4 Ril|Bj� A4%UN$As{z[V{wwҶ@G*q Y<ߡ)t9Nyx+=Y"|@5-MS%@H8qR>׋infObN~N>! ?F?aĆ=5`5_M'Tq. VJp8dasZHOLn}&wVQygE0  HPEaP@<14r?#{2u$jtbDA{6=Q<("qCA*Oy\V;噹sM^|vWGyz?W15s-_̗)UKuZ17ߟl;=..s7VgjHUO^gc)1&v!.K `m)m$``/]?[xF QT*d4o(/lșmSqens}nk~8X<R5 vz)Ӗ9R,bRPCRR%eKUbvؙn9BħJeRR~NցoEx��� pHYs�� �� ���mIDATX VklTE>3n[Bk>(mmXG cbBjL4>PIDDJ"I#hB /BJvwwfanIogwΜ|;猈¤aR8G9l=it,5I7|=bϓݠoFeCΦ{*E{XlrWoS %\c'ccj:sY]ۺ[C+N҂ ɔ?K'WosCStT* Aa8THTQ-}NӪrQ 5Ό uJ$|J#2HĂ/gwv(KtRL̂eT"ܷꙶ]YsB eX f fK>缙/j&F*6o/殪n2eF$ )- Cz* uLTupx(:4=?!H$W[6M8V᪊ aC huQ}~Kkk?J>-MnZ۪{7} ][m:zy$X8aÒpز9+k.5'j!M�jN__i禮NK:NJc|V`Q.xEk�`2�Ӻ+;{)ow|_�]c]{Z]ߒIrIL3f|3,>^P`Jj�J2W2hIJo&*w Sg_Csڇ?o`XW) xiռQ"SI@(]0>C %!+B03L&#-3o゗xoQU윁o,CA0(@( Srf̘#3StߑZݻ0 b1󕏏Ca64(8 F{eKj \p&@Y//rMG 8gFp:9QnrNWc:O?fGٛͨugWF(E( lœ'ğ@I )6.}$HO\OFAWT<<|` A V,ӆ&a@` +( �{8 1(E(5#}9'oV|@ˇ,0!�`XL/e@�pTd!`?$¹ y ( �8o"�z�&dZ>U#+Y K:�0Qm`&^}? -@mb!2(D8̋Zpe3aZUXY`�c"�@HE."`\6/|,`:S�`7).$A4cW3,<(i^[ 7rߐ S5[~is J2: P,./A:7d DujlzQ�~,.Mp "@ 7$�D7Q.J% "ԕs^E"i!!ȏ(jF Q8@z;QQeօ#ٓ@/Dn_%Z ;QϷvG1a3p����IENDB`���������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/down.png���������������������������������0000644�0001750�0001750�00000004772�11757531137�024567� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz��iCCPICC Profile��xTkA6n"Zkx"IYhE6bk Ed3In6&*Ezd/JZE(ޫ(b-nL~7}ov� r4 Ril|Bj� A4%UN$As{z[V{wwҶ@G*q Y<ߡ)t9Nyx+=Y"|@5-MS%@H8qR>׋infObN~N>! ?F?aĆ=5`5_M'Tq. VJp8dasZHOLn}&wVQygE0  HPEaP@<14r?#{2u$jtbDA{6=Q<("qCA*Oy\V;噹sM^|vWGyz?W15s-_̗)UKuZ17ߟl;=..s7VgjHUO^gc)1&v!.K `m)m$``/]?[xF QT*d4o(/lșmSqens}nk~8X<R5 vz)Ӗ9R,bRPCRR%eKUbvؙn9BħJeRR~NցoEx��� pHYs�� �� ���IDATX W]lU>nKPOVX5x"A11bLxB"> HD~, ?vgfw^35ݭx;s=wνZk?Yx P'HrsBh!H/<[껒�Zy3KEm:1aJ2,EIZ6tL2m\ ܻ7CKw%P39Jf)E)i(LAZV`*C&KږXMr�J RQ TTpԈ H=d n�|k$<c! BT� R:g˷ œdc͜!p tH =X ̀�؀Lr"� Хl6 cq<� )B] OV�^ |X^Op  `BL?S�V0Y`M{=E,DS<§ CLJ ΂�) /(*-MuGA9L SyB(8 2 \ X` 2J:@=/2a3f=y :2 (L)iC00Cp ƺ<xX f g@`93qާ|SNK"</0�YEQzaixfޒe~XDR՘b-h3f3(8QJB_ӸK%cF97#˟Ӝ]3دÐU$8")PIieŕĀAvcJ+nkl"Ĵ >zԹ_:D0L?鷝_GڶF$xi l}q˞l@yPh58tQ!!J)t/=?x's�u2{;7_rxۨ[jhkK(`Hۼ>!2\} hs�g1Ǻo\BCN_Rt816+=KM/o1H~q4Hp`"r7V"rydQ�# sg];2q+uh/aU7<ѕ{vqzJbd Ov0H-5!tw]fdlkbDekH>b$B*=;OꮖeT2 C#mB#'N/u}-՛8N3~b(ܷW^`3VKp#�%@5;d4\pDAMC0{�<ovq^=-#P|,#ٳ?67ܘlt� Чkc\_XNUݣr>NpFC)t" Ɍ]ya;_JN46-rRZhs֯Tyul6qg鞶Ķ/S5y,'?~9}2'_:+iƢ„d2-M[+5v*bHt@ᡆފ Ewgst.ίޢYiX=Tނnhjz֠Ls-?-!C ֹKymXa1r_`3�!iaO'Q\����IENDB`������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/zoom-reset.png���������������������������0000644�0001750�0001750�00000012205�11757531137�025712� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz��� pHYs�� �� ��� OiCCPPhotoshop ICC profile��xڝSgTS=BKKoR RB&*! J!QEEȠQ, !{kּ> H3Q5 B.@ $p�d!s#�~<<+"�x �M0B\t8K�@zB�@F&S��`cb�P-�`'�{�[!� eD�h;�VE�X0�fK9�-�0IWfH�� � �0Q)�{�`##x��FW<+*��x<$9E[-qWW.(I+6aa@.y24��x6_-"bbϫp@��t~,/;m%h^ uf@�Wp~<<EJB[aW}g_Wl~<$2]GLϒ bG "IbX*QqD2"B)%d,>5�j>{-]cK'Xt��o(hw?G%�fIq��^D$.Tʳ?��D*A, `6B$BB dr`)B(Ͱ*`/@4Qhp.U=pa( Aa!ڈbX#!H$ ɈQ"K5H1RT UH=r9\F;�2G1Q= C7F dt1r=6Ыhڏ>C03l0.B8, c˱" VcϱwE 6wB aAHXLXNH $4 7 Q'"K&b21XH,#/{C7$C2'ITFnR#,4H#dk9, +ȅ3![ b@qS(RjJ4e2AURݨT5ZBRQ4u9̓IKhhitݕNWGw Ljg(gwLӋT071oUX**| J&*/Tު UUT^S}FU3S ԖUPSSg;goT?~YYLOCQ_ cx,!k u5&|v*=9C3J3WRf?qtN (~))4L1e\kXHQG6EYAJ'\'GgSSݧ M=:.kDwn^Loy}/TmG X $ <5qo</QC]@Caaᄑ<FFi\$mmƣ&&!&KMMRM);L;L֙͢5=12כ߷`ZxZ,eIZYnZ9YXUZ]F%ֻNNgðɶۮm}agbgŮ}}= Z~sr:V:ޚΜ?}/gX3)iSGggs󈋉K.>.ȽJtq]zۯ6iܟ4)Y3sCQ? 0k߬~OCOg#/c/Wװwa>>r><72Y_7ȷOo_C#dz�%gA[z|!?:eAAA!h쐭!ΑiP~aa~ 'W?pX15wCsDDDޛg1O9-J5*>.j<74?.fYXXIlK9.*6nl {/]py.,:@LN8A*%w% yg"/6шC\*NH*Mz쑼5y$3,幄'L Lݛ:v m2=:1qB!Mggfvˬen/kY- BTZ(*geWf͉9+̳ې7ᒶKW-X潬j9<qy +V<*mOW~&zMk^ʂk U }]OX/Yߵa>(xoʿܔĹdff-[n ڴ VE/(ۻC<e;?TTTT6ݵan{4[>ɾUUMfeI?m]Nmq#׹=TR+Gw- 6 U#pDy  :v{vg/jBFS[b[O>zG4<YyJTiӓgό}~.`ۢ{cjotE;;\tWW:_mt<Oǻ\kz{f7y՞9=ݽzo~r'˻w'O_@AC݇?[jwGCˆ 8>99?rCd&ˮ/~јѡ򗓿m|x31^VwwO| (hSЧc3-��� cHRM��z%��������u0��`��:��o_F�� IDATxė[lT3s\vm`ccp$$VRJUZUyJڷT}VM6IE I 6 `]{뽜s҇,*ROÙ!Zk?͸=8p��@Jp8֥8|s4 W"m&kڨ�ĬrD y2r!x,pmSxꩽw�3B </ =o*--8c�!ZC+ OHӅDbl6 9<peM j[dDW1XnS�p,Z]Yꭎ]Db؇n><1�Hg6_u.>O>;&(_P 5d˪Ȋfv]*ڽzk+38秿 Nw} u-\p9Wi6mrڐJR“ėBϖ/)^#5KqՉPqJ]Qz…VlRA{ReBi*Ur!W꺞4TVEaÝ$xԉ)%y<@ mm6OLSSOw-YVQ5;>WJssoo ڦBD#çŏ뻺Vo3!˫*_0/\͑keAZR\g(%é㘌2FfYK6j)#&7lʊ#Z~_'^mebbjIH}b(=smZN6T;70kя&08#V{u'hol4^ ŭ'�]Y/`5RHE|TZE*pDX_#XHyx{׊N DPxPm=̥E;!϶  ݾb<SuPFIJAJ�mqL3mbTf5!` ͹oƒ3X0MFbJ#:MMW!iPTQh 2K$ci7б!d[vRҦa箤biڀyRp̠ ": 1s#дf=7_ЮO)<$e Ek S|)OUjf<_^Y5]9OTNzy?v=]H'98VRy@Eд�2ZJNG8 x5e<ۚ>=3;}<D*hZ(C绗=V:Dޙ,lVh0R){1t:v.JC0dw[ غT4aY6iifZk֞&MC b*UTY˹Hs)!w@</i%[B)c;VgM\TfaZXa 5 'Υ -w�%ѢZMLLmmn޾ %g_Yi|l&Yn$�JX?[ X\ {r?%PפּdQ5'=d>]j9=-ldYSMi( LIYy�HJ`JDŽOn<v=޷z~<088L:wx߹z+g[cܶ ZRЦ)�5|A(R'^➒U~ehgΞ P**VE9.K/�:otF;W-g[m $!XIΧvA" i"|੖X ;ٖJ|[_Lgә.45c Ri(@!8x dgK-ng횧[f12r/ƌ\\Nq�p4֣ieK+0xmo'djۦG7‘0_%5;8k}#0Gm}ͳwڞUU0P((ۂe;!ŗ>|[xq玭pe3;�w+G>{o۶O-m[v(!Sh>w䩏N8�g6^hu&(vAkGap> -J�i֖HmB$&'oMMMO�`Ndţ[6kZggDc\t;?Hqs�tQ9�(fׅi;+l =XHvŋS9~ ]#&�@ `q̋j4/^?w/ODFqGݽ[oJɥ{@nf,~]Ŏ}ՍzZy Τf1a9E?wAͮY[o{u -sO}׿ aP>kCl'fOf!u<5?#D_۞dE(�1����IENDB`�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/jobstates.png����������������������������0000644�0001750�0001750�00000124737�11757531137�025622� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��9��u��� ��KiCCPICC Profile��xYWTK=׉Ԥnr9眑i$AQTPPD@# $ ( Q3ì5/j>zUU�<s*5el3&HB @񏥚89#��<SP[@`?��~� �ԟJ�@b5� �d�v �<�}�pX� \�p�tL -(� q�Ds�<{T@h� �oB �)���N�'7 ��ЙR#(IȈr�ST�pt��`d; rwp ﵈5� j}_r�0  8�3�r��H{l� C<,Q4(gv�2�*m; 4(P�;0?h� h   4go%P_!8B�DS b}C~ 4卿汒XUk50 Xu6kj`u>� 74y-(8:I#O~wmGj5w'�,D 6R#六UUT}@��G{�uG'�4 �+%ְ��8`� H&1X 8x.$B* Ȇ\8 T܁�<0S0 ! D B""ڈ!b!.7 Fx$9d#yiC"-}D^" YC1(JFPIT FMP[ ݉1h2zA*"z>A, ##Qhc0Lهb0VLf3Y$0Vºc1}c ؛N3v#qr8]5 Ke q5&Cs'KVxo|> _oA`@p$Pq B "a0EXcS󡋢K+kDNJ/AKH@D}+}?:CbF c :Ό0v3N02321`gaejgzɴ@$%Db1XG|@|K\a&1+2[3012dbB"b²%:K?+=$+uk)k (M͑-[=[4;]݂={5$ IdF'$#=$Md)59MD#sssxp(1Ή<yssK˄+(W#271w wek<<<<'yn:&V>#e]{ŏW/  l <4 ,l"  ,!l"!\,)<//b%/R)'.*%..zYXXXؼxx+ z m".eI)IOL[RRRR RcDi#*aLL̀,*!"[*/iʅʕ uGL&9o)~UWQ:ԥ|N JJUYURa5ZZwu9@ $ {L_Z4F-qZeZdm'c:8S4;:qt)MKo j@1476mxpHĈbTe4i,f`\cD$WSeSi鲙^vs6,> v wo-E--,ilKn:i5j-`o]g=oeצӖͮϷspr֎obn;㝝K?t\}]]p{.ãc3sKko^PfOvOؑcd=;{vuחŗ{}7nGJeگo̿68 `& 0/SAP^tAp~LQHa\YaVag–k÷"<#.GElb =HfPctcNli5H8r57^:pDaBiJG=l{&&Ml|>ґ*z ub}>}ibiҦo�ÁOӕzl=$phn`Πefe9=zђYY 7{|\x񭜠'*rQ#'N^cK{o@ `艹B3E EEv%%%CN?/5-\_vl<|¸3kgCϾVyJ_Pǹjxkk~FՎ_pYUWW_mo%K͍ 9/g_+W>_}uoH(k"5eDn&ݜrkٻyŦU;"wJr=vm^vj;|;^?z0a#GLutiy'7{5zj<mٯ<3:?6d4tGO;<qy1ctE/Jxzn, ·oɼ<9~w|wu{b?lLHXISݴ˙?ORg2})*7o^Si߷~[Y]T_XrZz3r ʅUծ5ϵO_2Z7m7Ƕ"��0� z�)���pA2*1X)l7./Jhe8xX<~pxs^fU@JQ(L8A$I4MxDdTygJs2ӔTTSh(i4h5iZFLL y,-6,md NϜ\\^9ཹ~N]Y)~d�@ `P,TwZBl~$r|eޤ}i?:.}a L#dz{?a䍼SEO˔z)U1uҨ*jFke._lʷk\׍n5el<Jw[]6{.;w?~֥-#гxތN >go>:P~rֱo2Fsכ$Lνpcc꧝3|3GfϝȷR ދƖUWW76�� P ӈrC!N*'?ҝ/e(d,cz@\cgMc$aF)\xȼz|]B_E>?/qL* -%!'[&,*Юd,үZnAACn>af24]f f9&]*γɲʹ˴ru,v:\z-Cc 2ۏ8d%ߖW)*KeI M FDEFDERCcBiaqĤ={ғ3S-Wv}gfjq:]y9K' O*+S<X2uz\YjѪs52l5\i2⯟qifBHm;~wc;^zu=i{|ޚgKJ]n~=rWBc*ozMO| M}T254+>K7?xz*Vs<1Ќ0!Hz1fa, 'CkΰD&*0$#c9d9mSxxS#EDEuPx+--5!!#+HOvC@^^K_˔t+SU*uj4jjNh6ў9z610Y2la.g٢2dOV5ց66l(o*)NN$Z&ܯxsd;;w6pQd`B&C OEKɢ)œ^%S쓢ʖm}?=$`ס]]<8ڑ #9'fsN,-|[_Vtzt_!vʤzO֬]Swksң+W^ϸ4rsYťu;m{jtS֓}6C^>w}͘h3_,nnm~[�kT�x�8/�D8t� γ07P ס>""#D*VdYDQEO8 atXS^l+v ۏ³w/7 ΄+]==JF?шI:QĬb:Ŷ~E!k9s|<uۍ{W%^~QA>B1boDDņ%<$y%IHGhɬ^ pN1HINiQJ:GͳZ):2z,z+  ]1>gRbzyEmVT`NvZNl .W;'v;v'p$-h1D6/tp$sut^jV\bDTW1 Kg8:}b@n|E%oK?2>]SuF}獟\ghb~nĽ+}8X7r3ѣx޾0z#go6 ?r%.k\SYſ�$K$ȇK aCg$Dv- &h ^C_ap/>f+ bpqCxI|~`Hc;@DI?ќɚiH|Ee5 ٥I;( G)#(2m^ +@PA@Q+19qF)V#RҜ/deirNJ l K+׫R=FSp״2QUГ՗113R20104s1ea-۪ԺSN<.Ʈnw^{oa3`Woy?iW\!Ea"#ꢿǨĶKL'kI}O)7pl>*v<2g2wɑӅ{JΖꕽ{Vry H]Cnjq.q{^N]=O |}㹓S/>~~1G|Ee7WͬX� VB2Š"T m8AP;4-CПY/ˀfap|pC@"Бu_2$3 10% ,/ ٽHB\VR<i^+!B¶"fbf^R12i9r wfTpjEuu/፵LL+^[p[zm+zoni7x靋k{ϫ޼>Y۷vOy/8r<t66R)::K/ܟj^t:=r*5W|BRщUe-gLT9Vy}!kC%|cɫmoLh&Tֻ3NG=O<U Zs KDr?Mte|I~^r?ЅsܗZYRʲ:fvc{}ĆF/_!z6e737̶*�bT��a2Z :^�3�� PV�@󿽑|wkA���bKGD������ pHYs�� �� ����tIME'u6x�� �IDATx}yxE[=Gp -(좢޸wWE}9uWE ʊ "K8 \ 7 Bb rUNgrDcRtWwo[A�D$Xc,V>qkzOB~hkVylU^KWy 8:�@Ϲ<N ƘWPݻ Gū P*(((^UPPxUAAA񪂂Uū B? AB"Xι^y]*oIAחfc@5PB@`k0"*XAR� a/U$RȪ@Jky >*o yrǤ}{G40!2c4M[ti߾} r.^ @^3g}ݭZڻwE>f͚)^UPh@@#Gʯs΍뮻Ѐ 4رc3!d׮]eee GJu3f;vlRRҖ-[۷khū K?͛;6xW] yfU٫ >8'iС͛7y_1&PCu]oyc;5tOVPP%u`Z8NJ)Bjꡇڰaҁ#Gl6ɽ7o+8ʷ 䪙 iӦc^{҂4áxUA@ٔ?~ƍyyy r`)^UPh, �0>}|l544tƌݺu0aQPidG}tҥEo �ڵkce*(4,q>cBiVLW~}[k]6;;2!A5j\T&[VALJiNN… O:E);vl̙q0Ƭ,jL2~PQPh8URIڷoOٳ75tP]eB뺕E8� ð.`'*1ɜ3i (Ph8�x.\cz<9灼J)}$lΨKqiN1J*4f(..>UW]k.�ȑ#U-Iάʩ^5%u7ӥըF),,GEEQJժ�,&._|…۷oѣGN6oޜ֩SR K^n7El T͝O4iƍ̙r"##NC!1ek4ٺuƍg̘5(]vEDD4L_N1r}СCG< tҞ={6XWB5YwofffXM/^|}뫕kӫKǏbŊ1RcbbNtߥ?SoVii韧,ڴiO2D.QJ۵k`Ꚏ;wܼy?.%U0 LpTTTvV*غuaQ7mڴpcǎ1 p:-8p 6!%ֲlTG0]|Ǭ7UUk΄yi rΫ<5'aÆѣG81FٸqcHHH='>>g4h4Ѿ}(jj:YAnRtXA_!C 6UjeW* tM&M~6|)!G(+֭[5w ͇&?V7)"n:Ƨd?Jh9RrG^fMRRR֭n7o^jjjrd!Df͎=t:KKKp$uJjUm(էw;wl6 �gڵȮ:uÇ fvK!j.HS<JkJ9`2%Pҡ}@~KFe]qrgrs2]cƚTIN8?�SRBB2uDqNp ycR(eqCJ]v?^7Dx<v]i[o 8):2SP!$99Ag*r!f#FdffvAJ'~xHfrGvstMtc=C\.i_;vVDt.PhѢ3g[woh9v#F\.!9W^ݻwM1yuֽ|u-׆oZݻӯfBceTk[\\CBB(͛74iRrrr$%4b;DGEWk�@6m\.dְ7'eqRJ u16gΜ>}]_aÆQFp8Ν۷o_9*EʍQݭ[7�u===w].*???\_J_ T-W{'//2]@Ib/,,OVZղeˠONNJL<z(Lpiv8b75HdɒǏOJJJKK۽{={§&M6_WBBGz_~U)6izN:M:UV2@p8xaj\{Ç'j|1bĈ\9FEEׯ_/a5Fϥs(??Æ G7OJ$.6.+++22T$Vr%U?~7oX9`).k5 %V>G&&&'oi J9z7:th=M6;'O޼y?^zIqSt+zJi۶m΀(ӱv2ɷ$(**;voc 3_y9ry+^pkf۷ۢ"y~;%%%33Slڿݧzii˖-ݻT5>>.E뭷ޚo?ձcǤhi1o۶2jUfΜv,Y+eM@SN1/㡡Gm6z'vm3fxg-RIԼl۽^UYpS\ 0v]. (.. ܀pm|Lf1'߿]vѣ5kV/M6={6--m̘1Æ {GTSc,11~W&BX۶mO>t8VuӜf^hhhzzɓG5jԨ_~YV7L@뺜w&L.0,AN3{7mԥK#G~3B… 'O q)U;vӦMVU2Mu}l;}M uaaa`۷7 kIBy?z|nEEEN5' ȑ#ꫯNMMMMMׯ߶m*f~2d�DFF۷W^{ޱcGu6VO?ݲeK|1mcjV6ҿ^T毸ÇGDFH3fwQS&Ne˖s9s}SL1UmpᒒӉJǎ *\�8!$>>>22 L8q"%%@~իWmfyby7ڷoƠ7o̘1]v-..N/^W~̙_ϙ3ٳ)))v +4>xޢ"͗9Bl߾=77:w]=z$'v~w&N4o|̘1#GرcfjDiæ0u{eeeEEE_XbE6V@NW?~ڮ]s h8۷oߣ>* Zl?o1u!lxgK 7ۧk4GMTr9rEL%0 :L8`X�vQ|)]B0R?>�8C$eˆ u,%!S)))qqqgxXx}DM/k<y0ؾ}-bT1F_cf+yvͪ7ӷdMؽ{_~9k֬:x<Yg8YW^yebbbXeRNK;LW!VuB'O{ۙ3g^p!&&fԩAO4wMIIӧcZ FPܸ!_0rY9t]`lL(о=�TW&Y */`\.͛k&8s ]MBY|![놂|NnSL9r?~:c߾7pàAwD1oAn8xІ.((=zzWپXAuGOι P6%dҮm(c:u8++-(˞}m#1%O||ٳeMٙg|E5fAI={<3رcsss}^{|fU=^ϦMVZ&?wD:x9焐nuֵknl eD~~~^^^vvEEE/b&IԩSwuWZZ[nc軺PuN~>Z4#3s~Сݻwݻ7#3cϞ={-,, 9;w;tt-\nwy͚5qqq&L֭=;P!&`߶m!DSJm6irnݺ*"? :t__K/cϞ=UZ=@yaaaN>\ NtʼP|||[W1\ƏѣCCC;r bcc?#B<0yӧ7kWr ڮVNu&m0ڴiw&osݻwO6mѢEn*idl޽󟥾C 3> ,ꪫ Nk6p 7!7߼/#c7vuذaA_NАOfԨQ�0hРT� ;駎;꺮IA 役-LСCѳe˖#Fګi&&&+ҩzDDD|||݆,YeRmriʧzQ{1;v n큼""ʬc!,23]J6W$ k2QKJJKc,[SSSKKKW^m>.0dw)tMg1RRRzݢEGc* ה!eyݓK/t 7Xn�<*n[V{<RRbԦ Ze*$I&˯=� 0'N7nmfᤔ#G!ÇCBBdeٛoY\O?411';wg͚UOiCYYق ڵkrԓGal6ۗ_~i:TaG@`ii5PB#]oRޒ �o}ꫯvm>`曗^z)%%%22Rr  egrN>=rȨ(t%޳g\as.dq2RZVV�;w9Lҹs^w7%i9L4Mu۹s-ZT+Wc'O^lڣG7 ˫/yn*ŬI?hРg>C;wή?Ou5RK￿ܾ}'OZy͌&$$n޼9~5|MiiBӧO@II<o~:TxG?30C-Zgm۶-Wu+ &=;vVG'BѣGWIBi ɤQ￟oL81,, �222N8ѱc:讈xر1cDDD\oD|ܹLD|衇d0Zoٲe|B|<FLT*bSpl܂Itbd\fY [܇e[nѢ_|ӧO;weƚ}8iΜ91 s:1W ,Tǎ>} �`]t.ޘ1cbX2{<M|gGM lWy)Sl63*}ܭiZHHHǎͼ]]4_VG||:sl;bĈG7K79mӉ/zi#G|ǿ;kvrPBr-!9?vɇXZZZ5qr:䜗rx~ʕ /MB-�6mZvzѷoߞ={n۶M6Ӵk׮sεy*{H^04:@>kK0M'xw ;|ȰC;u}WZ=�6&J{�ТE !7 ҁLYϟ3g0>}Zt )fc6Zm3ּH9Չep7 xiL_}USLٿpyUn%iIƠ储ֻ Hyr^K(mR0IIN_?y{?>TlHFx<T̬; 9UܪʳJ~+<822riii6<%V 2~>~ܹs'O<}?kقΨ2KGu\2*O"F�_{{С7lelICVtMyyygZo\& gQofϜ, n.]ڵKZAJ�صkW.]@)6ma>hpr%�p뭷.Ydʔ)ҥd-U MM2Ν;iӦxݓJJJ+9˚O)>l_/5(ê\_322|(�$''k#·FRgFv֭[k׎1f/X לa(44 ^e˖Ǐ/~뭷(w_u?9r_~Ӷm[c֯Z@[+W[;v @ qԩ+sʱԄ|+V={ݻ{yWoڴiӦMIIIU9 �Gϝؾ}*jͥr$/FUXXhx Ø3gu]D1 yO?p³g !<^ωO<쳲>|Z PҖpW_}գGN:AU$=ǎ0aBFF8p}71WU돭|oqJp)--l{裏 Vun+CkZҍ7xZ-[\lٺuTj1FE}t8+ڒeiӆJ(9zA6mtB9rw{R/'w^oЪ񤤤\(:=)zBەl0?^d "4/Q)M .ru{<ӧ%1WpW_m9o޼W^y1(jHرc>}$%%u޽O>=ztS#ݻwǃw!n:td^N]6O8s:<!!A:wr0JyC-kIYTTTNN΁LڷoRSVvggg3 5kvGN 6fBrrr811Aέψ#꫖-[&$$l^{}+֊+ϟ}^z%&&]yJnR0Iq̘1W6312,e˞}qoذ!;;{-QQ5lkON.;6mZBBB|${>*Xtttl\uM`y?U^oxD㉹"&:*:֣[.m?W^zJ65vBdeeu)??/><hpǎ+wE߿f! 7лwo_{O:?~W9r`FQQŋeW:.D,,,\vW˯ 9|s_J.xTym"xꩧNXBB°w�� �IDAT)S\y~ P-)lG+fϞm߻w:VXp!�ٳ16xNeG4ƈ!C z#y6YWRRҥKҲ2U\R<}O>DYjR000wIjVXssĄN:%$$$%%u)))))))---WӿI[6mJNN6\( ;sL-K嶨!]nn޽{\`ݵkWVd ;h'NTߦNhzB^xa3.\8~RM2e֭YK.9rٳgcUlxޱc^xv:eʔK֮]?A%q/^X68Y;wź/pzG `NM+ֆVh8jC Z>..nB-I񥥥&]͛7߯ X*l6ʕ+M\zzzk/Y&\SJm֯_?iI\NeW1?9rD6 �YA"l躦iWҨ<yRYUM̓3Κ5^8zh\\f[~5\�WKj"biiDžf۶m\TWޑ�S-=%pcwرc7nvwiӦNix}اzj…ǏG Ø6mĉ111qʔ)ڵ ׯ~eI3Vaҥ111>… GxY̙3@K,MUfϞ-o :::pݳf͚1cΝ;M=>=1XHHڱc%,_SQhN`@KTk׮EǏ=:99ĉUO{-[6iU_\e.0 .\0~#<ү ֮][VV6{V`3"!?^TTԳgPB۷o(kevt]kɒA[Tѿrɀn/k6mڴ9s8Ǧ<ğ;taÆ-]O>)5T#ZJM5;ic;v]WLtݚa{:;jU$ 8}?O?~mVvos՟s.#""7lk׮ FaۜܜΝ;/^6?ٸqرct钔߹sgzU+~]wIGQ~~~lllVVV۶m/3'.MoW_}={BCC,5o޼VZM4^s۷momݺubbbӤBhrN'><tPس>ۻw%r D+ëU0رcϟ??|p5kRSS_Kbz"Y-�lok;1t?`({~aC٪OVk׮_~|8M6zaaaqk8о\ڐ&RO8##c޼ysΕNj-EcMW=WPO="\KoG3gѣ;w,jHbȐ!V2 #;;{̘1ݺu;vu-F")..^b\ޓWe?__o5$Ք_}RN.RݱcM4c^0?0X=G޹?[Ӄ ݻz1c|oO<1uT�XZju]͚55kVhh;3s, 5/LJJj֬nB̝;gϞU:!… ⋼R{XrIgBŋsssرcɒ%˗/Vol#9? {Sn馧z{ZBaԲ]fggT7scƙF5BȾ}(t4$$Df2EGG_ĵ߻g̙3VZBf_5hּ%ۿhhAnnW_}uwIImOOO_pCn 4hP|tsժUv駟NII ( VhzLGaaٳg[n-deeuҥP4BCCo;nᡇk1~娎Fg߿Į r(.+xvrW_16<ȳxWѣΞ=�<fk߾}B3fH\VNn;..N)|R:gΜ[n%666--4]֕.F=Ќ3FQm2\߽{wD@~(((1bYXSy3-I],YҪU+?5nܸvZccc$ѷ'x<%,YDƢUM\t,Iڵk޼y<̷~+;To/**JLL3gkVE~BO:uv355uTt5`k[qB}ݗѧOgN:5++yAa7Nh "}߇JMr'55ddd 6 �F_~_-[P&"""N<yasݻ###!~e缬z.\hݺL970 ! 8BeѣGY&%%EN zO6/^{?Wvu~BӵUbٲeO>ĉ;v/##caaa[||zvSJ뮿o;vgLصkO?v#F;wN·-Zo߾RtMn ;V~RARڴis-[L4i-6N%7)LU%eYYf7uMr<f޽֭z7pÀ\SϖأG&'':|/^Ps"D䇒wȑ#{طoݻPm$STTKvwhѢ֭[?s1bDϞ= U>pSTwչs'OKKK'Ol323t%z<k \9|_|Ѥ3Xjjꉓ'ke1o_~ЇߣGBHHHȰaV\Yyьٳg?s[nz/UhL5kVBB™3g fٳk׮} Sxdffgƍȑ#yoh\4o֭={4 $$uy XeKL@?Sv333[ncǎHw6l":4%%G={ׯ_e}9<ŴMu/X ))i޼yNs֬YV~)..={L0rrsf̘ѫW/sJܮ[o555u֬Y=\JJʴijf^V>|8�n:99gϞ=zزeK= IIIٳgǰX6ܷo_ffΝ;3226yڵkK̘BTq>l~灹Vu:+?^˵}]6m/_XUmmyWϟkHCgcǎÈ;x!dff ٜ:}:"<� 5k@Jn:yK4 G)--1cFJJJxx\EbN)50??_:Y׷ߑ#GFWi&IquM[n!]4`&X7UV`W5wn;:::,,,hIKjb_}աCu]mڴIKK׿^bvFiӦ:=.kڴiow; x f7|s7$5 �233o&_Trҥ>";vW^LR j 6lݺ5..nG} "{g̘}v(P &<gΜ<={u6x`K*s:ݺu[`yٳg_-Z;w {zү:11qy3|ӧ_Ϟ=zj_OcСC%ALrJMMݔ&2dƌ'->;w<|M^|EƘigB) *3Z=zt JMM^z :Xqw[k%mv;K8iiiz~}N0o zDQTT;k֬29g>))).^`5k�[6FRrgu*ZqL˓ .GDD$''g˪C:\qԩSIIIrON&O,өo߾`Y1Τ__uUO=THHy-O~Wnܸ1v#N:瞛9sf^^^YYYIIM(WpDK /w�<^:�Vw%33Qޟ'URZwv]4?ret�h֬L'(232'&(w%/}ܹ7lZ;zhy1o߾𐐐{om۶mxxxDD,(o ZҡY- B_ne )iuRB.OK zُ<ȝwQ@Mӧo\ �x/bAue;%{HI{m} ]{U]tɗ\MHDKٙݔ%r9 Jr+9hʕIIIG?ϟTKq̙[ӴN:@>}s...NNN~Wvd?=<OWДXc颶ne!X%>~|w<xwޅ;v޽օ~zuc겂]d2SQPQ%kI4ѿƤtƚ/jU.]V3((̶0+((^,U7 *hT{\5gzܯ,VhrDe:QUUɨX~+]_t-2\V[7',&�5fB/yi_SY%0 %Tj}k<1gl_Kc%e.i|cZ7B4Senzi6"U@4G'~\=}rU_g.uIJ1jdZk|%G&"zdK"K$.d*~IXmmj^qڳ,QMyp&TeSb5D򏯣7ҁ~iXAAWUū W*(((^UPPPxUAAA񪂂Uū W*((^UPPPxUAA񪂂Uū W*((^UPPPxUAA񪂂Uū W*(((^UPPxUAA񪂂Uū W*(((^UPPyUaymy?ϯ5˹k %AmyRJsBHqf~& bS_W`&~] MӪ%%0Ƞ6il=$5BkGtιd]C~k�!1�cR~{% OgI(z9*^Ep~(I0:O6r+GYOT[VY`̼�}Fusj4GH)U" !L0)ߪI¨zOri8p�@UV={vY %<g]`ʕ�pUWu-Y:;"0tML{Сq�hG.]tEqS.m6�Xt"lDfm *5F)u䫃| 8ЂJnw\V8k9GD^땟$:NB)-++r&U܋O+r$͙3'44 x<͓`}yϝ;7ZH4Mj(?^s>o<ŨM̙��\.$?ߚz}~Q+c(ENiM%t:O<)S' @BPͱu"rsrs4M3o0̈xɐR rrs8u- fd  Qx@ƅhۦ~s��_߯C(�:Ӵ.W/@�ОK�@�� ��pQpQ�4 mZp: { Rj :t�( @Ͼ׵iC)ݟ]�@]bbBրcbb�r+ ` hUV3пM!}ʰH�l~W7 Vqh<4zͪiz~*]UB?䓰0�p4䴺!N hk[z3߭r8/`Q "B!ݶU{7Xd(n"Zyh4bW\iXl�Q|� FvG3'"Cnv\(DFF&zyڋT�># ZHٟsEF&> �DԗSڵ?wiQaq3�@bV3J7kə7$:2� 5U� ��2_`zJ]0�>ydDDT[B A;G0&9.*2�HDD{ 0?FFF" p)� kG�UJ p8'O~�&M,2ܾ)-][Iw) �3�ߡ FGBƏM2*�h&TGh#<�cƌ*PQd )MWLOOvnXuE"nNOm#u'Yw-K.v|�;w>_Oڐ@}�BX+8 2}+lEӹ==�o^NV[Ȟ Dn�c\]ԑ#5!@p5j'_rXvRҕįuCK:n HU�@qqg�*ש^"M1q;=4 BҊQFcPQ8駓r2unq:t|\5" l4McK* ��qNu Ԩ5΋Pge( oZ'@�A3ts &m}JHt`k�^|\JA/g=@IⳊ�Q@�@4 @A4!4K\  RЀ'@4* � )q_Ҋn �,CmqMPA$pmN4-aBpM  !:�ᾇV (@5:(4MJQs9Aj:cL t]GDOHf*brNTLBjPљ`%"''/e"rp؝08rБ HZP@r@_ r$$T&s �QLF@,QfN~� :cPpD5S( = P4-J&P�AF�PBSIR�pR`R�M#t(l )mv�ШFr"�LF@X₀R!k@t!aj ve4!"�HPTP@#APFP 2 D�"& `�Z^��(!B"#m!rD@ :! 5p!�^CP.�" @  A" krb'DPɫ:5 4�@HMwԁPhWci)vGE?P-XB�9r*?Rv(pB\mt ¦@0&�O1=4B�MD@�؅mԙPZ @�!HAW�/kK̬l �9٠xh6jg\&1 Cdﮣm NB8D# @ 9kۚF jf*U8"!,*(v�� �IDATqYT/� @#*n(<JH1h T%((� &xIbFM@B 5Zi|3O3{R(u]4KT K|4 ՁqG!A@P\z6nf2 :h20C(P6΁ۄaB])z%W%*g D!4FA zj�t6bD8flP���u T0Uۼ"$ O? Bm:=-ׁH GuʓSA.kKVpE`wA8З,% LݎRV*3҄8o!J M >b]0�)p'v7/PPY%RZ*@`Bc bG�ǎT*-Z@qj6!ۀ�@fFk+FC_pRJ0~S?F+˳  h7hY�@~2Ԫ (%�H t DB#EEuCe!]64 "gcp^WxZ_ZAs�5ߜ/ _�"!2Jl@Po poz�R%>;I*Lu mZD�?H`) B?wB#3JvVAv2` 0n7iP�A ʢU<F*} ЩF -U͟Sk%@? d@ը (�yRA"0( 6 /%Dy@?~ܤ vk6S)!�N)!!QM /PXRRRVVxcS *v!G4"HC?!<苼 "ټ'VTubB#OE�J52vC�`Yg�sosIX5c* (kjcn-,((.*++xap$'@$ 82J&S�r֋ĊZEJ)!HPH])HėE>UrLe܀pGF6 |TQ)RK@%r4ʼ27uB=ne镣52X#sΙac0JJ@ Hy?g6ˈ *P,2k~Uu %*cbRUhtf82qhA'॥ep1 L|ȹL^CRV1'bmd$z _?i"@/Uv~ 2' @.EP֌XtaOgHѸI�@%_JXW% !pv@ H�n: 0 "r[?"*&?kPD -BD>/!\A$86`@ BI� $/_JqVW �JҗFH\punJJDŸc][3 p!C`rτTm#ۢޣ~gPۨf`/R}L՗L#j_O҂�̯˽Ъ JZ8!@|VL_ktUk#W5�}E�TA)r.(�\̓y/  QyϿ#|0ݢuԼr7=hm[XMҶD> Dȅ@naTD6I"j@4J?C hՉ?i>EN"D� fFBɭo>"hg+J �@s5Xs \%SQvnЦ2I�8gNƈTh@(p)\l*_B.!A#(ѫO0.Jl-5C2$2RlpV+EE6ۀ߈H"�>uSРuB@DF@2 |Vުmc""$OHjz+>\(!.P 5UeH �:q@)�+~)�h\ |w+)R]Zn>�@ЉϱE&Kh= 2O) 4� D*]?~$ؠ썆 bMѭR1i8!3`�6^bOX^;?0BSJf&fy[*/329Q `Ӏ pjg9 UGHR"�GP?P %$HF@!R`fZF?}3q1`hv!�i҃kd./#f2qZ:B�4{T[9 mY)M"`&o k_!Ŀ̄@@"@#BC鴜Y ٠~ !(; 6BBu-'TDT%8"d ݌;z.ب۩pWjUxY*$^b!3H(qT=BGj'J@t�v ΀`+xgThrU Aq PYe�SB8 G*h CfxB N5MZ7?BJMh' 6 (jHEKCa ќ s( mq{Qe�aa\JA A�." Jv;RS`(40Bg xx6@YτoNST[s/G]{8� h"5u 5ft] �REijTFh(7xLx^�f9`AJ$ : $.4#?9\e6Da@E@SP HT %@l 5B~vuP!()ĉ]K=IPBVő C1jq0� RD"jAMs{뺦kdc@(Ft졺3T¢DwY˲Rv Fuþbaxa0qq(|^17x<e{<nDp �@qi9mYj"aeӘ pVBlg!KA3LiM jd@>@B"PkB)ޛUj 2Aa 'nv=޾{Z}kVzP[ZP@DЖy<C@ȜJ*5s^k}9ԐSTN>]~LLp<cTLh_0sƌB\.Ap)!RL+&XbtP(v[-BMZ Tʼn8笵mLaWha4*GZ֭J)kKu{@֓Q~5JS1Z,sI2[UH�Dؑf"mw}y~.y7J)ϙ@11)*aRwvvuu(Jr̅B! C=^zJkuds_*s. QTz5 #LGGvhh蠃ȫAuO\-眱wq\ V[;H3NR*j*X}|.ZkJ JR}ԩAL:5 (DJ;Y9s΄$�hT)1;uxjR@gg'3 ([y z:!18k^Ū1~Wu|ע9\O<SG5jk=[ODŽa 0 ںEDk١',"JH㸧ZE_̉ Ū8EZvwwGQ4:::eʔZV/PÓ,$;1â~'轁s>a5]5J9Uea�AX. ðX*y( kq<cƌA" Dv6g- VkmV+YcbuL�y16 >1NDM㱚=C7Vӫ1&q]*0TJL2000cbX*͉˺|]l{l\[AV0ϢC2ZĹ8,29ڴ_9B75³7()4\~O"ޝUf&&_>0Q (aXλk򖷄a8Al5֬[Z;88W.Xk &S}N%-[x≏<Hooo"Ÿ=l5 _ďft!Z&* -6$իYKԱJ� !uPZ{pWviA`/D3Nc_~p .ېueⷩ1?iX\lٻn[gk՚EQhU:?8Y)jVK֞{c<7FcifRJbqgկ~ED^z??{v y^iN2ehh(Vz nՂ̘!:gΜN:o㘙3vƖ7m;=ܳ`"2:m \w`5cA-k>1[P__\}l:HZy]Rihhȧ={Ei_oKw33w}^=۶m9sf Bb2x6?я7WUƁZu)}XOSI<胵bZz /:ceLze{u/_`GGǏ\K7nreߔ_~T*@k׮(ؾ} OOc1[lCCCw[X�P_JD|{x衇C;2&xj.2ƫFr]xSNY �snR,_zKYxԩOOie_z-34Oݱj-Yk*.W]uŬY8 "z_w_aBZ7/x,y]:眑ry֭aZks>"c>3<G?z3… /^:~]ts=NJDmmm�>%:馛bGx.\û*7kKV.]:<<,"W^qe lgY5\>Wte_ʎѻ ֻB#6"ܖGD^zu}k_ϮGr8-1m:T%K,Y#\{GusɮW\ +ˍ1B}VuR%}YŨ{;OMj5KkܺwBhܫXA3KoHGGGaf\rG]7z}ů1jR|<eyh.*).c5mKz;SyAPbXeX*[ wV+{M]տ؇Z{kYS̾>zU^fkNjUrN*`u'u+\?2[_Z(O=MR׽j Ⱥ{mV \; )ЮǖrK.91`(&x˯Nr .FHC[E?is d]\_ҒI)�9]~#>um7E[_'BHwkE2 D$Ǟ{Ʌ'T&KP`J䕒ҽAgR_?!�1b89LL [@ܧ ~!@ܿWMS0vNMX'˝<sZR\ +ɲIF�Bl @.M«H!鏖 *rBP@d_z<\^7Qvxy )bY`"(]T" F*U *^(pa 4l:dhp@ZZ)@YR- (>V PXpȉ+kOSA8S d `rAlCq~@0clk6�8ga,hmD��"Yi0ǚZMV2lrו*�G0D#3^5 &1! Gp�(*�Hy S� W6 L4Zև,؂s%R|9$2SD\@q9`PA(v€EaZ W/5 ɠ1ZysUk@J0M GCg~H 5!``Y!Zu!"| `SD?UA$E<Δ˾-.i5-f)}=5lN4ɋ")8GH%)8P@DCe {wn55 ൪FE$ B�qD Q,_;p f΃W Ayt)}Z|hc@Wc}z0,L)nC{;BBf`?oǴiФ)qErpUTP?qnjP8UrG.Y-67U1uPkp�)ld4wC.,X�giXoPr}?@ F+//mqӏ92ջ_^C|;/.= A|9S"/p `"-աKO !�b D�@ abt�2ЈY?uyZij0cb�D+~k. @ѫiN%1�2m|[vɊ SƩH:1JaC_{ :AsTsaXPTi Ed eSL HDXhH-(@նZHu"6(vt~Ug#8dJ<p@e!p ֓iV& 0AKn"ÌXuS'E�ሽo}wrd~yq:v%Zx翋+{ۋGdx}9 SQZN*7 g �4t|h Aq>йmQgH݋O=㘥pY5ARלxM #y{W*l<l @>qggTF TX[k%.FGAX 1kauFijl}-oG[ot.H�v?K]_*A!pFA,5Y]wTqm;IOgj<r#8Bm \)7#Q1g޳G^x+.K;ccGܵh{+/?O n6RӏZO}O Zl%"S%=+p"ؑpsajj*A@΁ p-xx©8`6T a `mVLXoo^~ԤsfuΉ #߷xۯ/]^(uEׅ?8#u;nig9 BP�)jѨ1iAJQ@bİ5XweE[ � rxj< `8hVZ+ձJ ' k.7gg_h"ͬb,+mQ:b3}l؈nD{v˿w|?A0Aj& mHJnD�b2H��T<v{WI릟yu $J:A@ &cA8U:[27xnHR)8(@:fn]w@]3 sqaӡ;( Zck7*s`O# \V=U0[*^|RQD`2BNE:4r b_!eQMzt�� �IDATW1 TcjҔN<z?_Wp.ܼ=$V �DJ1,+sRRӜ4(�*O-+)ň mzT Br+lH �D6�tTsERsi̷"uno-Ysݿ�q".l:Oz@UW?JR`L5޺۶1ddMI(RsٟJ$�#Q: =q-"ؚ" w_KBlKBqX[7m]BJNkjEQ;_Eo)ei>%bc=14QX+/ςBXD*pmŽ=z17gX%BG#O-l[X\=Mn7_V_K\riXqVZ`WjOC-i[WZnimÅ-Կ[n0RY8pg3 cs'sڂ.mNp#'<άT*9?p.(b(vS~`"w?{~lYW_Kq{{8t«"X+P%R¾ѧ p)\GGGsYr}z :N �cq Xë_SBDpRN풿6#}PHC LjZ u]& j'X0DEP#T5N)RSyט�H sIF3j<(IjX�C]*FiX(`'YJ\ &q)VV;>wre_VxUX3jy e૑vw5}B�XD3IqL8+]Cs_8 XoF&LXE BNC(%Eub*4[wQ`e*e?wu|;P*5 Фֈ󬅔xUK/BDijlb?\/W򕑑 Ҋ0:(�ÛZJ`E` t)ǢVvj+7se?SSU؁mmmcqjU)3Cx]t6ALlHTf6ņ;({ڐNR09_. /Kp%6R0m'ث]0|)\pXe$ifK f'Vݠ$US9_dٕb\rS�&Y+(I3\&M;=_Zse?-d&$\,;n]| H̖tqFޒ5\ruu_+NPߒ eFXmUJ̆9׫Xm i(o^e\cΰޱ\r$hWǖrɱ7AQH]* A%ѓ-/Ws%.)XZ] orKVi{'ܜ_aK=ի�%IC"{9\rB<K.8xoYO_Yhk2Oy-~j7gVޕg-XA,(>mCQp01!\sV 0''CQ'(sr\٨WXRf~[B5wWz. N"4 K)rC -S΂-@ظP)?U�@MĞ:ڍ=%^\v<6Ϗ0B]"rʹ�V?ⷼ bv@AsqΙ=+ SLp1%S+5)1OU2:8>z"7 ݕH,;8aũwcoaL`Ir3_%E#^ ZG Nfg9QGEuYroW�E~VF^n$�(pI!U�hV^8Uk֒,xSlkZIChiH?5.P=clzO瀵N&%18}Ȩ{0jojT"*Tj p(k ;jAu T3 (\e^VC2@T*mBR8`f̞s01'ru3hR1 li6OFҳύy8PrÙV?1Ír%i:uKή-ͯrX $IE5�Ieg E�VeWb`AP_{Sh̯mN< JPҍy1LPw N(BƟWkں:kgޓc�1_Ɖó99�kV@nIk [_Z1Z5J%eʫ-lR(2Dgz@Ok‘4?>gWs{mOm֡l0MP-t!W]Ip2ƁIDj::tk *5=5Z9vW[wr9!v�b� ]jvPb :R]%6s*3CXbB v;n4bU�Wth�FL|8s)he"9xPf]%fH�"HU|zOSb9M.ky?u+Lae+(85@0+fB8Ab7B%bFT$`mJ "  )pFʵ%W-X)tc8wIE@7҄FW4sDQYF_I,!#NccKhX4J %KWdxm֡kM~ 8jqr@ [a�8D a a@�.�a~E)B@X S*05 ˬ~|S ""iX`,HJ3muU@,0*ۡD*DP 0 gV8qDKO+@z? t0yTsTJnU``k  PC]vnٱP8&bhD{s~;q1*˟]vuEalPSʋ2P~['�7òxDFGk=HE"gOӃCժ[@N�8( Wvβ8A 3:a}ԡ:;!Ұ V<;-ΞpKR &kDy3 $*9Nj1 "A  bRB!jpJ%<[}#E{f_8(D! d_f"𖳔01#,&f&Rp1 VtQUq0#fn)3�/z/<(>R8XY h  nw5t;C/>>ͧpQ'z(ax薇~} �``j�plvPmU�p `({,Z 0�"b"D=]?Y(5\]eu&� 0Uppnk�fDT C"DPYpYA,CІ`�W1-}[1'10-P!lq8B7\aG4bH[KFfZRB qpDYzI}k$EiulɺKN<`]C)۟MԿqXF#wT]∃fmWoXw2b+7o&5XS޹es]x;Q,n}^Fo\- ?x7::\<_|1&,:jI /8eas<Գ #O^r2¶D%C[MXu)7_mO'Jz^z⡊:-1^Y7 L9#N{':7?voyqPʣk}ء/|y x`Ix/Jnv˺5S0eg殾z:.[["PFwa3GoG{6}m6gᡧbxI#B;nھneOgOsݽ0c|۶l<l�E%~ ~c\U 3}K]"��[rHyl+qM\]pXuca&T~8}>$ںaV֬W)-2VbZ,](Qzח>?xXzo.nx|C7̚ื_n{얣:og6<q^lz^<,>hcN̙xUto?܏N֡0;0 Wc`%58`knN8/>zٕ+.㸷qꙧ/ oo>[AuKп l72X}e{8b__;ԏ}bZɼp%mZ~os>戻 F4ji51�mYS}ʋ͞}?"箻qD^|޶rxʹ]oM]m^d_G(ubCqզuuK""((Я]-!Qǜ:z3N9 ĕU nxS (NOά^␅"ăwv<XhGv}j+VUp9Gu|oǗQs<p!38n '^tԛރvYp'sb{z`u1Gi? nt'v8`}17l|wցiUXa*>kg!瞝9k6mQwVX{' 2|-ĦkW,yкS훟{%U=ÎJb381t6rf[~؉oo+T(™S (S}-qzFisO;ơtXZbU9\ZV,],۶zO~v0~O?SU\�@UqyoO } sjomߴ' NS8䳻OZ:zOW9sݺV?o.W.1Rn].2m5QRMf 0E_g88T  l}  Xl׌I1sBtρ'kP%T/ћ\R^ &Q׻`#?j똻tksCwB[(R 5pb@ڃio(!lߍ/aa*@05@`8*`}.#Wo۰,LFP 3ξ˟n2%_6}Z (h u!3Iߋحϯ, \.O9ieW��FlTT6�+b>:2xD0q<2SW:LG['&]X! ǟ:0̘9s* uՀOJ@#R�oƁؼU@dxy &"8 [[t"? TBlt�Db>E�Վ6/8@l݄i3EpyG u}-_6mۺW:\1Յr)[ ںXGƅ1uz2 ; KVJAX8ӟmT m3|J-fTjud3frH!lC5FșXh  x/D=k/>4дPxlސ}iVWޏETU[Vc!';6bx#g":ȱ"sJ3܃<{qx3\m-<@s(rR�am׾!�X²/( bDk3Ăq�kg=l'0+VuY 8*AHw!_Z P}eJ'!! K*z龙XP֧.{n={Q˝s xPT{)sVbx}ՓPbqT*mJg x/^|zL<$^D x#0+}ƛ=lbs׮[Q g]s%HbFu8 `@ :01D0gկ<|^pTB +NZ_z}Uq*!|֣W_)Ι{Ǟnn7N(po.P1$ ._cތbʨ*hV0 a`31O>+UU<A}N XS҆6DGK+kB]fPmwm7Y*қ޶_xbkʼ4 X#]PBX6S7\uǓό ~RLV}|?/7v?HG/wu>GݘG_󳟴wt=FB{ڇ2Xp]Yw{7wSѣO~LbhACx XvOCSg͙siN qN|?i "hztt w_Z843ފ63E9⳾ǽ swΆ+W8lɒ\ax=6wgQGAyqQ%hm4"*  EA%@C�xYR0qR`x;}S ` J[&EZBD 5 `6('%~&1{wu K(uB xClģp!ʁFGTGeT0AO6V,3XA*t2 6U5;`A@e$NEX%cpl [EW=B�M*@ *C`A;ěW|}7u-Zthۣhh1"LZ[RjJ)KeUnG;(r`.xΙ"ŀ1 �)P�wtp23 _34X8 m:.4J{'>3lB p@B3:D"�¤)ž:2Qؕ,i렘K](u٨thAD=e8 R`e,nwrQZݨ"`K.t�XT*mmHR R" .5tNuΥ^8X!8D@Aa`Kϯ޴w U,l^xuI,FԞ(ٴV5JC?Dkyԑu4S V)2|8['f*9 H3K2tpL@)+b@99PN;h/jP щQ& _s�NcQJSXpq&XA(qU80CSһ�g4bNr2,H `"V c)iL(@A,T6�+<%%} � ؍pG? UNA�rֲzMQ!=d85[oc� A_zR 8+€ڇq.,jv\YIY1 X% gwd "qBB 35I68V3Pr�i(Ɖk%I�"��VqPC$pD3j&؈fp-L1$3I'yj"XB9hEuH #N=fk\lE@ N Ӧ8K{2q+{~!zs>V3-g9P|{^$5 t;HSJ^ZcW)apU:LlmN )Ɣ*>]R7)v<EY09-$MV"−L e,]4h&@ L�<%�8E@7A+M  b0N!v (̀-@, < PC͇<(t%$xSX"}_xHmKHKM껼9<$oR$`ЫdشN4aP2:bM֨ N;O>aŤj&i3&Lڜj9huЬ[%pQ^`I��IDATBq=kYW)W#9�Yk Xe Prؑ?/pY<aoZشw86pV{?Z`u".C/1-:FR("n]*'=T~&&iBWHչFUÖu�JsnvLހ~z XQҍ Ji=.9)!P!"/Utm 0&IGΤ:]w'G&692L7k)`lⴀ+Pځ88�${x&.)nilz5 NЈ!%L3@^g~>YS^54mP.oJ( WceJh?P hj*:Ji7M; B0ɶs4m DdBzt_Eۘ kinjԟ?QOi MNVD@�$Sn$Q%W0y K("( 0T"7v)2R6@{s (jdh(5>y͕hqC&IL7$8CIy=wy:"џu!sۧI&jf`@Z-I)yC;]6z^Uۗ^ݶm[OONs- >Ev&NZIJuuv1C\%̬ L04nn4@6edS}[f~1Wi3b;-.!eb'!)GHzE5; uMdZkWw�TW[62#UxRm6IU;׍-"i~q ?|niajCͷ,QP:UǍ:K=<{܇LΐS j振jiJӋiu!T5\,<1 \gJcvi,yS_ҘgōeUKAYC<[Auv^+X&L̹97G|}1QV?dPMp2 ~Ӂ.K;-խFdD&gܿKߛJrw+<>2Fn' ̻m쓃[D(�ǵs#:1鹿S]vQØd ٿU..|<i]|krS4"7"Ƭ3/ƛ:&%M&۾޳̛U@c[9[fMnؿBٻO�v_ƪJlCA l6O4Wl# F%{1`fU=.%v@l"uzΆDŽ|v$+;y %$RP X `M# zf~@mNn=r%7Vy֕P{1aݛPP׍ Zpu箄re4uJ/e(~M|fs}s.9Vw #ɿ�@{.re?=3%H#vDp6Շ635iT�PB"b6 |1BpB T ^EL47E7 `cFW$P^Lp$,SE9XOM0ܞcy&Jq]卥WuYͬԄgMT8:8'֛F;W'tZ8ݤb#HJ)Ox).o*N6Rp�%Rll�>d<]I#IK�SvaHH.NLYӢg,Jޱubq\ҫ*"k_x.1>JpP8뢨֕BQ&fDUؘNTKNS‚ӖV1&sُdj!,пw9sO?}fLgp&4?W B-pj> "L|м?%ns?BB~ 3:syc`U䢯~\7X@o|ݷ>@DZ&ϕ@h%괮Ɣk¡A"F ^z=SO_>Kf1@JD؆j` "ˡ1b( Nf$7c]yJ6 /,[7_@[Uaǝ�ֈ?3Ϻ QA5g?ҳj;>rG?wLEcXՉ'}ףwO]uOn ~R.QQ.;漿+~x\ S{{s`#!._|g_/۾'m[CT[z=3#;}9X%5 <J�TW98<zt?]ӿ]q[oskz?|3o/~+Cp6}|ksPOO58\?wN=-o qČb ̇/>bú[.n@ò?~?<?ɽ_'j!s%粟U@dxx8,(O&0 iOlogw䓷7pH0o!|Om[^|y,]㖼O?n,NysϛބiSO=bd9w)'eyTh:XXÊ@ QF-]yާt‰>Ǐt;"8s ̗WǣYs9g?pǝɧ]tD0>J4�3fN_!ۮ̷m/?r1l 1۠Q{pqgtd{=@u q[ּkݽmm>Kً8H_<5qAႿv*>#D za-qީB{I?엿v 78 ,eZcUL΍lyNbGկ|هq s؍,Z|3&.v,<k[ cƗWs0/Z `VՍS+&@T " 4>1K{/^ T-4S*ǞTOp-7pHʽ=b|ps|dK y.J=}̟ [8ls?aw[ozb@vǭKN? DG_kdH9'o6oVtwa׭3Ow@SX5#�L0XK%Bdl[.ŕꝟSz(Jo|+."t!mxe1KNӦo11YG9K)HA7K.V gioK<쳎=,h:]<S_xG?Zi/L9q H9(w>/ZUa>9Ѣ MRl3[ؤ &M"-E [iզA-٦A?DAk!3iqs{Y {{s= R(Ma=N}tȇl #;^?xOO>CAt{A`ޡȦ Ë>3楟~99dp>{DqN9lö^pO=^^ػ﷟{d uSk9?sg rA\[KV@13b1!\dx 0cQF�Y5JH@>ƍfgW1 n<LM@ P�|ʠ92 @zo` /<{_ms84Ͼo\bTx6 6Fo;iWNPx"JD"ūIh|]gv&+j|MLӖ:h@ SO.Z1DKs>�kܫ?&J1WI! s* ǫK0Ҝ+o3i?Ij}&LҤ}1Mk=xiMv6ʀ�ePbF `$%Of+ƀ9nEbu{yS $P{1^F' z *p x4Q4 ĥ͏;&DE-O8 f3fRKY 3FY:OJ`AAŬsN˾UTcu^NW^MU)R59f̸ 78O�<[XeuMw cպ T >okMyj5esr[Pi(x]<PU%rpsq0SWBvnvٓZ�nRt WHJ|{?rD$!�D2GlVcu+PETt4j0Gvli0 ӪaV 0i0 ӪaU0aZ5 'Ch����IENDB`���������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/right.png��������������������������������0000644�0001750�0001750�00000003307�11757531137�024726� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz��IDATx]UWk{ܙaDaPHqK۠V4FOT5MZcM(I3~XԄҴXL-t{9{a@)ak [2 >[ݱY|uQ}:_z~rͷ%{xz:_�kǤ*Kt՛98ԻO?<1ܼnU_gV/oxy/JPn<y#pvx�UXFIJ"Jc[֬ްG=>߿vUлA|w9t5] "@R`˗ w:Or��n.gpQcA tq. �.==q$b D.�Xš\޿~�t.H5&y-Af Hf:7VI)L$7EUQCi*XUpM,۶ |kG6~c�p*bY"D,qU*a�k2 C &9UtX ,#:0cݎ$�t˲?J+)=R #i1FТE @*=X s-҅rOw<~he�!*PQv*j4&:lTA4!8hP] _ѽo~~}+K3ekK?1D],�4Q\ATbZ *=B$0J%"7v#l]2<}dr C0S .P0Q'.*F�P]CTsPqPg zto};pIh`i+aPPШʄQ3clj;lOj;6ٶ!Tř&EVc]t4ky;\�@4F@B@\qLڌMDԕP">8\DWNIhhX[Cge� W*ǧxk+PWO*PQŕ.ĕ$D+U5wIɞp|fW@ȩgO^ � *P"rBǚch@H�)@bR�q*A8(z^~7G\tZHT00QS!5*Ebj"TP)Ne@Bj e*@ͨ* x8z: .9hDQOϼZtn Q%�J`v* SR�8#axf(YE�VliYD򬇴2RIH]=!&Z@JДLӏs?7eB8�Mp_kQ;˹X.(.eH! .,;d>=p%̸sCㅋe1[s-!@S@.O$jyFݵFƮ;;n˞oTN^%{`')})h`aэcnrc>|~oōn||y/wIg{~CyHRyoSmoE]X۷\>س8~Gxezw7 }Ufז?`jHذe^[*빩seMo?:_(�'cc����IENDB`�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/zoom-in.png������������������������������0000644�0001750�0001750�00000002766�11757531137�025211� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz��IDATxڵlSUOkndnRe0CDFP0%50(Pe0G),sh-!1ֲ{}^6Nrrn{Ϲ{SM/Cej:#+¨(>QTTt!?C"`S�Pmdnj 0e06Шk,0DӀ0(?ytnY06)�Vqav$(' h4u?]kŠon->�/CPPr`:  c{qqzsc�\@oo\̪3 (k$'< EffCׇl:BM0֗]sihڕ֜~�x4 ~!ݯhzw$<C}hOfxױ-ŭ{b�,q-ef4(eG�vw·R)a {Rc�:;;wػ@@̔Ay>R|L #WI}C @WWq|(kJSqa=H$H@Qoxԃ_tFQ19�v-9| @{{{7H*?>#+�V&&&͸a[[[?lQ FF�q``�pG\M`𾳉~GÁʔNe"z9_]Μ9gμSGjE�e�---tx*TAL@5r%0]eE� t*d2Ib4 9`x`7wӫw:uiƯ/fdd@NNNdzv;TvQkK! ⚚PSzz,$NK{D9x ᪋߹g[hllaFG@2Y8*8v-xu ZF}V& @@M4Yg~ uxb!-5nw_u#i�b+6סϋOdž<N:)`vf:e'pwJ?muuu9~BW+**BKw}sӜVW:& m7/۷*�nf _fTLϛC/thJ3 C2 ۂw.,7>Y]^'(D/AŖqOW1�BzCF2ӿz]3@pg<?6%EF[sdܻ /'j#O>:%@MMJSGˋyhj~2,;MO�(}gS;]~3dN&q]#ܫҩldѫC 7,t]/Ge6ȍ�鍄% ._:_RBoE%�FV%LY`g},x6LȑvD7lE FIR$����IENDB`����������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/new-window.png���������������������������0000644�0001750�0001750�00000003136�11757531137�025707� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz���bKGD������ pHYs���H���H�Fk>��� vpAg��� ��� ���uIDATXõߋ?9ٙݙMvt7jr+#!&E)BJ ^^iF *"UhZAn6i&w;y^33I3˜y{8:~mY+R}`gYq_=&�[rO?yTy>gߐd5C @/t}tC/V4@p =tC I�b$^ܥpp8y{.;v K |QO]-a?Y|fM"/}֣:E?뗈 e!3 #z̽O<bV|31Rv@ڻ]:RfFN%HVϒ$xeΗ=Vtնl;{ܓqX]]emmzĉ4(|A!GH3̑:\vmݼ'i%έ$>!}e_ȢbKA NfNbs SMj$!Zm Q;&*'.nNҲ0Č}t4H y{I b<cGe dqJ, `Q]S&Al>ΥϾ^kɁ-p56&[Ã/ftؽ~v!9MD$`f�`p[O&mG?]hmgy /"_}NˏqTq^xw9cO (!HCW*.V1OW3>v7{Q?KI@O:lQDa}CDH( '"<FΑ#+rA9>Ĺs @OF!~(p C͜ 8Z4🩪9 h8fLb=KHV +B0E6nF$|4?_Zė#6Wyq08"ɲTQ3KG,:-F0DLUE9qnѭwU4Ӳ jujSnࠡs`bCM"%VyQJa=)$!C=N+ps]ML1LM9ソ#i^dPn 8Ä�AD97sF:.4ղ*Ll䁻Nc~pN&lLPdܻqUCq[̔`m_܆⊩b6G?ŧ {(Bl#>�* rb ֬eU;^<xjM'bF Ԕ fUEQL4[&N:zJѲ9iYL7���.zTXtcreate-date��x320552 14222620��BZq���.zTXtmodify-date��x320552 14222620��B8I]����IENDB`����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/protovis/new-window-press.png���������������������0000644�0001750�0001750�00000004351�11757531137�027041� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR��� ��� ���szz���gAMA��BO��iCCPICC Profile��xTkA6n"Zkx"IYhE6bk Ed3In6&*Ezd/JZE(ޫ(b-nL~7}ov� r4 Ril|Bj� A4%UN$As{z[V{wwҶ@G*q Y<ߡ)t9Nyx+=Y"|@5-MS%@H8qR>׋infObN~N>! ?F?aĆ=5`5_M'Tq. VJp8dasZHOLn}&wVQygE0  HPEaP@<14r?#{2u$jtbDA{6=Q<("qCA*Oy\V;噹sM^|vWGyz?W15s-_̗)UKuZ17ߟl;=..s7VgjHUO^gc)1&v!.K `m)m$``/]?[xF QT*d4o(/lșmSqens}nk~8X<R5 vz)Ӗ9R,bRPCRR%eKUbvؙn9BħJeRR~NցoEx��� pHYs�� �� ���IDATX WKoU>3fl'v\%E$ b RW, a $e"AA)T!%mqNy}Ijr=X%ʫ~vVWQ:^vO_XY+rYi;ʍ7V[zB td% #iw$"!!}ٌL:m"5jv2dRiI:p0̎X$1DNScm[!{^c}#ad'lzI$X{P:/őSOWk!􁆤0i=}f߽tçN` qh$Bmiw|?dFG%Xbs]‚,V˺_ssz@6I&lJ|cKH-iC)dHӑ%YVq\HDB<_}{m@ *FV#ZkcCV ͊ čMY7L!nF:y1=`5d82udqiY͖$T+ٖG29)94sD`O@=oHl~;(SR[h�pZX5`=dDA?h5k(~H3ozTlSwsOjtz~!+eT< T\RI,C \bP$=Uqtx?csu̬On'U9 gawܰsWd)ݫYt=<NO\GN.9o4`ՈB=fͤ$"B1.?٨0Jsݜ፡B -حp` ^^IՒ B҄ s.ـ f-wo=73#޺*>4)I%ta5"owX&۶g^G5qj~<x0k_1`+(qŊdu8x=;,3>p#$~��su"?H}麛6_�3.%2n®vfQ6&0�b7/,<@@C$NTtL cA/c02{!w>OB[fP=<aT_x@CcZBKI"P#h[x�إd<AM!SӷE5P;cRb84<dH@@1?lpZtwTK2Zqˬp_DܽYŗ.V*G*H؍Q10o0ֲI$@�}z}ν;V\:(䉻E㯭>b@ oHlő}"1Tٳ`w8k9qN����IENDB`���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/common/�������������������������������������������0000755�0001750�0001750�00000000000�11757531667�022513� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/common/not_available.jpg��������������������������0000644�0001750�0001750�00000540632�11757531137�026017� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������JFIF��H�H��}ICC_PROFILE��XKCMS���mntrRGB Lab �� ��2�acspMSFT����KODAsRGB�������������������KODA�����������������������������������������������A2B0��D��4bXYZ��x���bTRC��nj�� B2A0��Ϙ�jcprt����Gdmnd�L���gdmdd����kgXYZ� ���gTRC��nj�� lumi�4���wtpt�H���desc�\���zrXYZ����rTRC��nj�� vued���Fview�4���$mft2�������������������������������������������g6Q O ( -WkI\ !#$X%&()A*x+,./I0{124 576d789;<6=^>?@ACD:E\F}GHIJLM5NQOmPQRSTVWX3YIZ_[t\]^_`abdef'g6hEiSj`knlzmnopqrstuvwxyz|}~ !$')*,-....-,*)&$! ٢ѣʤ¥wlaVK?3' �عʺpaQ@0ɶʣˑ~kXD1 ն֡׋v`J4{cJ2�hM3vZ>"uX:���g6Q O ( -WkI\ !#$X%&()A*x+,./I0{124 576d789;<6=^>?@ACD:E\F}GHIJLM5NQOmPQRSTVWX3YIZ_[t\]^_`abdef'g6hEiSj`knlzmnopqrstuvwxyz|}~ !$')*,-....-,*)&$! ٢ѣʤ¥wlaVK?3' �عʺpaQ@0ɶʣˑ~kXD1 ն֡׋v`J4{cJ2�hM3vZ>"uX:���g6Q O ( -WkI\ !#$X%&()A*x+,./I0{124 576d789;<6=^>?@ACD:E\F}GHIJLM5NQOmPQRSTVWX3YIZ_[t\]^_`abdef'g6hEiSj`knlzmnopqrstuvwxyz|}~ !$')*,-....-,*)&$! ٢ѣʤ¥wlaVK?3' �عʺpaQ@0ɶʣˑ~kXD1 ն֡׋v`J4{cJ2�hM3vZ>"uX:�v%ZkXb:#X ,PYWHͦA%;5$U24+?-1޶L'Z8Y ?feF*rL 0v :yq}{|ssujHQ7a#XP\ nHx%@+E 9d1/27 #*=*#CeI1P±&loƅ}sA|)xEr~6i4"L`%IW)\N-5F>2JU>57K46d<.BB'IGʹ}MS})]gb)iˍG*l+pY{H-uCq/zhG1pE^4WU7qM;|Dq?k<D.3H,Nx$AS`Xݸ$8bѝx8dk9vf:4i;Emrzg<rIp>wg`@}]C`bTFjKI֑BM*:YQ1V$)Zҫ!_G^@H_HuaIc`IfJjyLNoTpN tf|P!z] RSU^JX#A[8_S0Bcҡ'h& WYW@Z1W[W]X`DYpcZgx[lbo"]qe_w\!a}Rd{Igf@qjL7n.q|&+f<UcfkU fV䡼fX^g}Zth,]4Ki`'j.dwkirnAm,ndotd[:q@zrQsHvq>?Wyt6N|v-kusPuQ9uRvS9vzTw WAJwZ+yx]Iyav{,fma|kc~qJZVw<P}rG>Lpc5,KӄLM3.N7OQT^W)QZit^vcl[hb"n8Ys#tO`z.F׀=KGnGLHsǔQIP۔J�L7jN\b+QȖT+ʗW~\u m`kebk+XpOvEBݿC1CCuMC\yDzEJFHנK&MQ0T}Yt?]jba7Bh#W%mN0R>Mf>~?̲?ٲ@%AmCEEgGWJ.3N85R|ʸ^V6s^Zi޻-_`VeV9ȳ: ƍ:f:;ŻB<޶˜>LX@ËBD.Dٗ>G؎\KB9O{SOr}RWh\_v5+k5p˄5X�6@$*6d7|ѯ942:˯҆<_? AyԉDԍpHN\yL,{ פPhqTh0#0q1 ~11˘V23Ǫ3¸42"5#7[ 9n;ݞkU>#AӌE\~IAz)GMpɆhՉ:xcnZd [SKID$ I=&63- /W3(:!@GJ£KMƪ }~VA vc8lvceZl2R]"J<'Ba-5:2ū3N8,�>~X$DJľyQ +tw=Rz~ C}tv"4j$a'ˍX+eO/xG3z?]8ʥ7o=/CO(#H NfT/+im+o{,q-u&|.y^s�0~gi{3$`5V902N<ޖdEe@<EZ4J\,O $TGDY:gH:qi C:jЍt;mp<p{N> uaq?zh@A.^D;UGLJCN/; Rr2W*\["J`|n`HaݣI3cIduJfJiKm<z3MTqpO vg&Q|]S~T\V?K1YX>B4\ŕ9f`0dr(ThѨ W\X]|Xa^X`TYnbZEeれ[\iy,\nJo^askf`Vy\bSAe/?Jh@kB#7n/0rz&fWjgX1hgSYgZSh*\dh_+ibzjfox/l0jnmpe#ou[q{R7tHHv?y6}=-vRqev*SvXSبSvU5wVwXxP[ĉy;_3zZc:w;{gm}Fld/rZZ*x1Q9}~MG܆>5{#MCNHiNDODžQ(sSFUևXz\`vK<dlʌic@o!YtPBzFܕ<-=HHúcI8IijJ�KӨdMvOR*\UF SX~șx]u^akޜRfbU kXΟqOQ wEqDpDgD߹E$F{eHI'eL,N2R"&U}ߩ Ytt`^jcalhWsnuNd?K"?@ @%AسpBӯDnqNFoHܘ+K\|OC}R|Vs[j s`t`eV::Ư;CA;B<„=5?F@C nEf/HKaO|?SrɏXi&]n_5͉ 64ˢ6w:7Cd7ў89SI;=ҿ=qO?ABhEyըH턀ְL{-Pq*Uh@,1I>:1ЌJ1Ιd2=˴23�4ڼU6DA7>:<x??UBeAE郞LIzHxNpJ}< T { q gp@^_DV$HN<zF#HA?;),8�/>05s);º"B&H7Oiy*}\yJqyoݐVfJ픷]?!eT%Ş5L0*h D/i<K44:G=-7@ %EXL�R(6 v|!+}N"#Jvu%l'lc*MZ^-Q1[H5ܠ@:8?|T0Dr)J-t!OѺZ6U&-s@.<u\.w6</y}1}th2ށj5an7ƌmX6:"O;>wFBj=F5KS,-P6%UVZ_g;la7<!m;<oAw=Fq>Ft|X?xrAV}diCCs_EKVH܎6MjL$DDOȚ[;Si3[Xb+\?"a^J2e¤,J|f؜\JhzK]jNL"lZM.p6zNtfqwP2y9gR6~^qTRUWHVKZTB]: ac1ce\6(iS X_Y`8Yba.Yc?Zje:[>h{+\Ql$y]pupF_Ku\fa:z];cwSfJhތArl8}ovy/s+' gZ'gZh"[hy\h^iah4jdkhbxllo,nqepgw\r|RtI]w@*zv7}ٕ.0vTvU8IwUwIWwX@x>Zޓx]8y` zdw|QiJn!}n:ds[yNQMH>>k5ׅOcYO鲓P3QRVTW,]ZCM]w masvem Kjcp2Z uPA{G/=ߔJQJķKL?.LWxMO'N_PSOVRZ^*u=bl'gbylYfrOxdF*EYE٤ F2¤7F]uG𯶤IU<K"dM_'zPqNS=eJV~oZt_yk3@daaiX"o5N@uP @߳#A2$HAһ2B C˴+EGɵ?IaLOzS}/WsúJ\UjCa9`mfrW2;+;@<Fka<‘=*>b'@ÓAب2CĻF|Is]LͅaP|BȊTrY<iVP^_@6ͬP7b7jɛ7hѩ89.:ζ{ҍ<^f>Fӓ@k=C2F<IMx{WQqkV+hkj2]w2TЬ2κ3 34=57d8z:ʧ=/?C|FJhznNq�RE[Y.vz-l^czۢCZ]R-"J'CB7,:2{3+8K+>De$D`NJ>P3ЌH}* sr ,i"Әp`& UW)͠N.F2C>76<޲&/BZ'H (MRSæ% ~%i&r'FyE)3So+P f -E\1S4K8B=:oAլ2nFڱk*L?"QsW9 1{41|_2%}׉H34@vx5冮l7ca:Z={P@ޚ0H&D?Hǥ7'M<d.Q&V#\(r>Kr>sݗ?u*?w#8@y}A}Kt[CljEaIHAWJېNNZEQ=Uj4Y,%^A#bL'k$*LolsLm7MHn{NqSO tm|1PXx6rQ|iS쁍_V9V4X݌L[֒iC_#`;ba2Rf_)jЪP!oZpd4iZeZe[Ugp[i)\l=]oz_sq7`x7gb}I^%dʂTgJKnj|BHm29Jp0ut='h]ͭi*^}i`_MGi`j2bWjdkglkFysnoqoot!fbqyF\s~SjvJx@{7~ɗ.wWϲCwXexYCxOZ)-x[y@]y`[zc{gDxE}Gkn~pJe6uw[{�R7HՇv?>6gR ʆRS8 T%dUxևW?qY3\TɊ _>cw)gmldqZwXQ}'Gk/>\|L\M$MNj1OQYSVUXęz[_vdQlmic"nMY sP+yFyGcGĽ-H6H赵IjKDWMæhO4Q֐ҧTǨX|~\|uV`keb jX^p]O�B>ėB(BnC~ DtWSEG2/I"KzN@YQuϸXU}~Y$t]jFbgagW”=3<¥=}º=ī>` ?'mK@6àA CNzđEgh2GJɎNQ|Us KZXi_'`Ѭ8<ѻ8~89HƢ9WN:�ҙ<#=n?A̟ԦDjmGiSJ˄YN{؃Rr+W'h3VҎ334E&4Y5+6ԽL86Z9:;Jp>K!A.D/GK_z Oq:!wl|ri)!~_$ԧV(NT-F1)>�6δ62<$d.A$'GsMZS`ǰl"L)"F#Ɩ*$w&nD(d+[/cR2ȧ/I6)A;B9k@fl1E)J".PV=Jf*+u ~,.h-|.;s&0a(i2`5{V8ӠM< EN@<E'4Iβ,Nѷ]$T.Y}~56c677T8uHy69Go;f�>:\@cSdD*JiGAK9#O0Tn(Y; ^C Az3tB{B|)C}ljSDE9vzFņulHcRJYMPP!GT p>W6[ƩA-`%cdENqȦO%rOvs1OuPw5Qy}R}tSTrjVSwa3XUW[N{]E\a*<jdD3hw+ l"\i\j?]kM]{lݖ^nz^q&r_t2|a"wrb|hd_pfUi0Lk֐Cxnޖ:or.1uât(jbЮjcpxk$d-EkweRkfli mkkۃnyo,zos qqMwlgts|A]u*wTswKzA|8!/y-\#yR\y]Sy^Rz(_za{dd|Eg}Zjy+~no+s"fx\}[S.I@bk74U߷kVXV诵HWǪDYZ؉\ޓoe_ObSif]wꍵjnh7odېt[QymQҕ Het?O喢PXPյ4QS;R*T,VXrb[Jh{^)bevfm=dkGcpMZ'uP{FG4`J>mwJK K3LOMݦOKJQƚTQ^WOTZ^u.bl"gbRlY ,r�OJD^EvEsشF FǵH{IkKoM-BPiaSJW}<[t_^kdaiWC?lɕU?sj@ Ê@ùASB[ NCWĹE`>GlIژIƝLi|OG~S|ɤWs[j c``G:73W:wNi:%҆;<ү;¬<սV3>Ӓ?AbEԕC>F$eItLhKP{T(raXi T5a5U%r546Q6f7ux%8z9;a=?;BtE^HSfLzPq#oe$$ݣ&y'%o0)�e,>\F/ڬSC3J7B<(%9@1F/*-Kg"PĖVa*ř+$o,+˜9,̝l} .8s?07i2u`5KV8M<PE@@l<<D4I,N$S Y[)1Ǔ2=ct2˕;3 4ޘMwR6m8bd;Z='QAYHE ?I187M/TRS'LWH.o\v; 3;;̏D< %=|? _r@ǕLiEB阳_EqVkH_MKKDbOY;SY036W*\9"a WF,1F{Fك”GlH@䂺I^Hy,JΌ;oLeN\qQ9STIWI^A�ZѦU8<^b/bʰy'Dg. RjyJMRyRzÚ9Sn{7T}[UV4v|WKlYw3cL[Y^;Pv`Ę4GEc֝a>?g75ej,nѭ~$5_|pԫ_qr_r-`PsO`t3aw 2by}c|tLePjga%i%WkNHn'E q?;tO3wͤv*=mhmIixm}j!mk&nBlnnwoq]pt{qwrusv{hu6 _Sw9UyLn| {C&~ܕ90{$a{Hb{sb%{c |d՝|f}Mhӌ~)k8nz_|rpځvgJ{i]hT;݅JϊPAA}8HqZB[[\g2@]_!ӋEa$ncfVj xBmokrPeܒpw\OZ|%R͖}I\ڇ'@T" Tn+T\UVX!əY͚#\+=^܊Ebew9innd]r[<xQ}P}HM NNHN:OZާ1PEAQS }U#ͩ(WZo]ά awvcSeljcVnnYɲCt,PDZH ňnH\HhIJ~J^(K9(L Ny3PѸKSD"VGY~>]u:akfNb.k5X2B\CBXBņxCxħD3QE2;F~ťHm)J_LuDžO3bRUbU}ʆYt^jAba<Ο*=̼<=bʔX=eӂ>Ӻ?d@cByCѨdE HtKR״NظR+|zV"s 'Zqi7y57ц-7ϖG8^̵l89:Ҿ4<%W=ů? A_D*GJN{qBRr- ..>|.٨./ȨP1�ue2ૅk5gb7ǯX:(O>xFBk>EF35KT-P7%UWť)ZO3r䞼3d@4m5@w6n2x8n:�e,<no[?J;RBOIF:@J@!8XN0S;'Xb ]<JM9pǡ;9՛P:N; <Q|=|"r:?F`hAw^D UG[LJtCN2;RE2V*M[I":`+OAHAⓖBIA BH CϖEvFmH}cJɡZ MtPP~uGS⬅>W6'[-_%edGKwLxKݞLL)M\Nb|jOrrQW\iSQ_UqVXML[MC^R:bD2&f0)ja~!BVÁCrVѣ>WFyW~XWY1܂ZMGy%[1o]^e_[`\ZaRdFIg2@jl7m猪.q&5cxacCx秷c~ycz}da{ݒYe}`fg1v~h+ljNcIlGYnPUqiG sp=w4ze,poapBp_Pptp:pqנq2sqt֐Crw/sy}t|tGv5jw愎ay׉�W| N&~Dҁ?;;2}gD}hQ}h׬~3i~jԟRlh2nnspMs{wSrbH{3hυ}_=ބ$ULCiUB 9`XV`a8 aa\cdu[fNh֎kXnz<Ar1pvCg']z]=TUJA8YAYaZ -Zr[\ٛM^`VcAaf6%ixmko8aqev6\{RPI,R٨BR޿\SASڷT$UyWi YO[}^R oatŭe�wZhmذ>mDdKqZvQ5L*<LtԶL" MW=>N!O-Pָ\RET]VؑܺvYǻn]~`vdlAi cmYwhFʭyFQȏōF(ŭG¥GѽHƸ>nJK#YMzOȲRbɍUgʊX~Iˬ\t`k[`e*a-@-+<@iIN@$jA"ԓAõBeC%rE+FdrH+KbN)ؼQMtپT}"Xs)\j4:{Ӳ:":;;Y7`;P<g='?@ BzD?NG>J#Mb\P|*Tr8 8q 8vR9 :{<1Sq>h@F^qBUE~KIr:CM@-:zQcN2UÖ)Z!_vʂ<= d=}>-9?(@~G@{tjB+SjD?baFWIrN\LӶbE_Pl<TZ4Xb+]#aǴAiB ӜYBqRCC/E+wCFum{HcJ1ZMM PP.GS?�W6R[-` %dĈgHuII]k5I:YJbKǞ{7M(qvNޣ.gP^,SVTVGKY4Bw\9``0dh(|h /Q?yQΒFR='RSDDT1ڀ%UdvxVJlXc ZԡTY]JP8`Gc.=f5jIR,rnA#[`\ԥ\[^\‹4]Wf^"_)|H`s}rbchcߘ__fXUh}QLk?CNnM:Aq1^u?W(g^WgǩgāMRh'hHiF؋j'ւkAFy l(on4vep(\Lr94RtIsw[#@9zT7#}.4swx/sw8txb7tYy(tzDuW{Òqv}dwx.vyl{.cI} Y,P?F߄/=4~nπoWoή.ApEqrtƐNw-y}ʅ|tD Nj:aWN D;`2fܺOg8 rg*hQ׎iG;bjlAHnWozp؅?s{ؓw#rS"zh _,UNL#ZB_4!_?_%n`[akIb)d):"fhkŸk)nRzqpzug z#]x~S䃱Jn#Wت9XBSX|Y,̪Y=[q\|^B묝`i@eb<Segi=xlo qe~u~[pz=RbsQQYĸQ R.+R<S깘U4ֺVӤ X͛P[& ]Խ`.d{w phWmldoqZJsJXK1Kv_L.ǝMNErWOQsS~,V>Y!\b7`�u_clIhCbՆD8ՕDrէDE"ȰEp#F%lGI�:J/LiN[*Qn TGX})[tm_k8>)OF>]ңV>зo>?@MAH;XBD <:EݩTH�zJwBBMDG'Ph+S|NWsUBWSB&CWCDIEóUx GKn,I0JdnKt5ZNfQQH^Tu?qX%f6\%t.6`mȩ%dF.%F|uF۬ԗGniHB>I_\z6JϰpZLfN\Q:STwJWWJLATZҿV8^Ž/b'|g/i<Jn;J K`KL\ކbMf |NrPh`hRi_XTIJUWwLZCw]߻:aI1e)Riy PzPӣ,Q>QR=҉S.Tgv3U먖lnWªbYxY2\j_O_=Fba=eӺ4iH, m(#XV筰XDX֚YB`YWZJ[˟Lz]%Sp^ʣg-`v]bTeJhn}AkM8oI/rʼg'agaƩb&7b0c 㒑cȕ7^dev2g]uliLbk}YGmcOoFr׬=[u4ZyhW+lZrLlѬlAm mn#>�no|.q@rr•dht_Cv;Uxڠ LJ{kHB~@9ЁW50wjJx ưrxM3xݥxѝvyz9ċ{т|7Ey} o~^e܀ڒ\CR("IO?uJ6̈́_vv{wN&wP:xڣ8z0O{}g,vlcKލoYP2'FÔ=nXn=tnWnّoRp.}qXwrٙ)tWv+y}Ɩ|tA'jagWcMCDef*f&gٟhgӟh8j_ؠl"XnDkfpɅ3ns{ˤvrFzh~_W�ULL ^ ^b[^]_:_e`bH}OcVeꕱhFkon"zqpHuwfy]^~!SϺV+VͺWI%W¿K Xq=PY^Z \E]Z`%,(be,hxihlnpeXZt[OdOLP4 Ps7QDuR"S<,TʪVXDXbZŒ]`Zd vgmkсkc,Hб:I.LIo̴hIɐאJiUK2L8jMO(cPS0YUo۠XIܜ[~ݺ_kucOl BBeB{CNΡ C<D}EFm4H#I3@KNL&P-TW\}[t=LL6M?8MõN?O|~YP̸tlRhjTZ`VWLYAM\74D_;cǦ3fʤ*ik!OݱsP PpP}Q-R"SϵKv>UWlfW2bY`Y[O^XFla=be_ 4i!'+m)j#`S}S٥T+T}OU&cV ]#W6xJX&ntZmd\[^|Qa HRd?6g6IkQ-oȵ$X¨Xg~YAGYDZI[ \1]{@]qq_,ga%^cZT~esK hú{Ak콷8o]#0 sĵ']_6_衂`&ܣ`ia5+aԤGbͥ!dZu`eakgGaiVaXWkRNnTEqC<oty3swV*h/5h]lhfhrihCij^Ijʃlz(m\ppnfpͦ]rSuUHJ&x@z7~..qr$ӯrT2"rģ8s ,sbtg'#u]vuwlMyib{Y } OF%Ŭ<ㅳ3|܇|Ҳ}*/+}l}̉M~N~эㅟڐ|rychх?_/,UWL BgM9|~~~ʶ%jk~݁r/͂/Dyyo|Jeۑ\?RšI6$"?ԕu!uAvEΕsv̱w xyZH{}j +v}l�bcM/YHP)ŕFmm]-mKYn/n簡o⩘cq(rEt^v*y}ç|t>ja=W{$MWdxle$ίeqٯe軝f#1gh"j4ldrn5gTp)]s{vr<zpho~X_#U ] m]G1]rS]^_|"`}bckeŕh(|j[‹myqfplu'fƣy8]HU͂ Un V>VoŠjW˧Wѻ^X۴[Z+e[ťgm] _bejhxG1l@nӐp%e7$Nbѱ2NDNͻ_O3ʛنOfټP|&Qs]RT(PSU!X ܮZs݇]2}ހ`D-ߙcvg_m>vGGMGfHEϐH˵It>JY KzoLܳ kN>PzR7gUN@GX1D[f~a^uRV򹝱W-ѨWuWqXwY_Zyz[۽p]`f_]EaSdl.JYgWǑA'j'8#n/Nq&YsYԩZ, Z{J[1\׆ ]|?^frZ`haa^d$eU:f¥KiBl9pʫ0sͲ'\ͳC�]{]EA]*^=Ñw_` }aOtb۹jKdV`fҽnViAMkVD6o;rQ2%u8)]afoaa֮b5abcy݊FdmeUvglhͶc'jѹYmPo<Fr%=uu@4ny-Lj+g-gkg\hL+hʩԖivjUkoOz.lƯpen` fp>Y\rcSgtϸIw@zz7}‰.o5o_oI4o¢pRqp_qr~ustucjwYaIy W{IN*}ǵ7Dɀ;G2sx2nxWxzxʙ{y0-y zoY{S߃n|ky}pDYfc !\4S)WIܱ@X7SRtĂ/ِ3^tR$N<r=!uЈl$6{byXێ OP_Eޒ䬧<eع&܇O+QbB%y؋㐵ׅ|rjdahĖ_ ݜUL4B;}&T}Ӽs~~B~I,Ɓ\ٛdؕ'ɂ)Cymo{8eڢ\<amRnrI%tÃtuC= uGv[w; x`駕yΛ{B{}l *vvlBcOYpP$l+lfzlm_$monnoMpr[tc{v(|y}|t<jqauWwc�d"Ȑdd7d-ge_3f)#g0(oh{pj%œkm`n(cEp Ns{|vr2zBhO~_ ͐\͠\4ͳ\rk\]]p8^΅_>`JaaǦczФeqh^jʃHnmyԠq6pZtfsTiہTےTۭU-UǥVamNWHDܦXlLYҭݘ[5]x_bTe;hqx*knM!MN]MyMЧNQNNO7P߻�R.UwSxU&WxkZ.H\XA_ZcUva?BarnabbqcTdHezwf>m/hĽc`jwYliP&oʒFry=u}4y6+cc̻ۮdidbide%foPgxhinj`dl'[n)QqqcHtV>wms5z@,fOfKf(dg+gh?i"j@ykpm=f>o#\qPÖRsI~ve@3y7|.jWyj`jk Dk̙l,mtn|/o^rWp껲hr^t̿U-w%)KyBW|ǘ9&ʗ0oЮӸUop+Epy=p5qrMsJ(<tuou_kw_ayX<{ͽ$N~GEI<2vvܨ*w k4wQЧwdxE,x0yu {yQ|Xo}eA\ŶRI ?q6z~"T; {wءXepҤԦT} 3t4y\jy `ʼnW#"MD((:׈T1sѸ͈Bј2%㉘Ɡ.׊Mԝy~7o͏fmS\r\dRٕIVD?풹Ԏђ/“)9t2gڑ` gٕ͖ՖK*uWl㞆bXXO$ǨEw )#Wv"$ǟkLءͅ~{ݒr^A^h՘_UyK|ŕ|¥ }l5}۪p~~+\-X# Ƃ$Cyco{Ϗ)eڳh\;0;RTsgt_~tG~t\u6 u~v0x/&y>{}m *vql&cQ}Y>k?̊Pkr@ekǮÅl ôlmJIn:Ĵoia9pܢrlƘtgwv'yy}ɟ|t:j_^aЇbLЗc%CЩc^cȑd7 *dtveʷf갞MhJiAӈktQn`9pAs{mvr*ؽzhZ3*[(b;[^NU[:{\*ޯ\]I^ߴ`96acmeVgEj8Tmyq pISN8Sy֗ SԶ"SDTksULUÎVѼ_X Y_[D]I_b.e~h@xkk k;l@Äl횬mZ|n.7Lo9"}ap@strȓis_uVx+LzC0}O90mmϳmmnGInoZI)p( gq,~rj ts|ju`wWAyM|qDI=w;K 1op ?pRopq+qÓ^rrsot¯uvlwųbGyNjX{ɚO~gE#U<E 3 sIzstsܺt& tjju$,uՋvսցwwy_nz0dK|Z~:PaȔG!>(4xe0x]xMxyb]yz׎*{z|)z}껫pskg9i]T>SJ,ƽ@NJɗ7~~~E[+}z Ěm߂ ~(t`.pj׹`ڈW/ܾbM;)D($:ӆ"yBgцIkʦ·n{W`Oʫ쉸߂R׮x)bnۍer[mlQГ|HK>ӟdC ϷH*�cxM<tۇ}xPusƕj R&`Z$V,M!mCr뾽Ɨ&ʘ`<ӕ~>Ykܝy?ʢ$o@ew\6ŪR֭I )A]14DǏ ǫŤۛϓF�+ə0uk竞b=PX2Oޮ$֮څ~Y�`ïŇF%񱥋ЎβÅq{ƒrU$Zhs_ dUn~{{z|$Ŧ|||FC}Ở~M* ĂBy[ oz¿eNt\;re s%sNɛ?smt ǫtŻu1evxVɁy:{u}o )vllՉcTjTjjͰ kUIkzԁlOl(m՜o9(pBr{גtktv&vy}ڛ|t9j"bշ/b,@b]Yb~cʳcōd{vHewf/h iנWwkҗzCn]-p7sv{`vr#Y٘Z!ZPZX[ϐ,[h\Q]A^gN_ɯ~aj6cNiex=gz.j*=myunjvǭv/ձXvwxvpwnAx(ɒyaz2^y{̋o} e~|\2ARY7I]?װ6\wœwŵ2wޱx?+x}uy0%yǦ~zy{|z}7ʯp~fͱ]5SрIhӱ@07MyN#yqz ÛzQުz>e{<™-{n|F}O{*Ȋrh+l˞^bjwTσK#"A-8g|q}[}0;}r2}~Tk~< }Ss΃iD`+1gVx\̄LCikS:?Pav\߁ǻ݂"R؃A߅ 9vDwln:9b3X%dONtE̒�a<jߵȆ$ݶ^',(&a֌m4\ylڼo_eՐ\RtCŝH疮P?vٮֽh2Të[Ǐz"ِ,L}CI_sDiTf_*VP8]L~+C9æÀݧ!vS8/zwvڨ}\|֘�|ʫO+ޮx-nHdbxZۡJ'Q9hGמoܟ :J{-֠]L+A0dn*y놴}sjig`h(VY]%Lוǻđ41Kl-)hnpYn#."+y n:o_ݤez\FRd\Ǐ΍6w*esԏf Ojŵ瑩'x爑w3uwkмdb'XG,˿A(cwǿׅv.O=PN&͊’yfă{ųrL Whȋh_!zs2z=E{,˽c{yː{H|b}E~?r͒)E0AyTozӲeסrװr)rYϜrLs<s`؃t`uROv{w}yy6{k}p )vhlihiiգiӚjЖjj̃jhRk\l mop_rztnqv%ty}|}t7a#a4يa`׳ab3=buxc=dIe-fwLg iigkŗ~8nZ$p -sl{(0JNrq̩iT0�αϒ 1Оvl5<Cb]X1֦N؞En<r ʐʵE@4˱Eͅ솰w6mѧcCHYO׏FLhH<⃗m.ȌAȱPt?ɳJRˇHo x Pn'^dO*qZ1LPߑsVGQ׎=genńԆŪWU<=1ƴMJO K~"yiroe珰Ϊ\!БRrҩHޖJ?iQ;݊o\x؋–$ 6_"{А~q h :^Y T>K NAičʏ9ݼy*֬VvξΒ Јi~ltՖB ka<W(M̚DZŋɵ=Ŗ"ɖdǷ^H츍Q͙ւ˼Kx\nd̟O[8PQhWGԝM0qM|aEͪ~'+^p ɳ1յ9|r¤ui� _EսU"KNȗs&qN'K{֮jꨋ.vpe#9׊˩a4 'wX)m̭ҳgdzZ`SPʦ ǖ"<HbɹΟUv3 Rcٲa|ȵ/sii_Ȱ]V / BPYAK|j{úR3<ǘX7UD֜ɝvx Io5e["ϼ3MH<ʔhƥė׍tC+*ŕ>*Ƹ}u RوyR~v5uak1bϱұЅԃ0u{ŻUbТ$z | [ҷӁj]u{֣rEShyz z7ѹzzu%zZ{aĥܠ|D|d}GBޅÞ:=4@yNozHqHUq;׈eqeՆ}qҋr΃rts/u_tu &;vIwqyr4{c}q)v*h}6hDDhrZiֹzibiOjȮ(kO8lMm~Msn pwrntpov$sy}u D@BшLSDk1׆V|j5r|*hZ؞^Õ?>U] KaAۋHcσWϵb嚙[ы<WJK}3ksHշid0_UИګL*ܫBz:͕ЍͶv鵕J3ΙDkǑ͐З�ѐ~Ҵt6jT"Յ`3VMOC ʿq&IH!}1q}3⑽okA^Duutkna:ԶX*=֝N~wزD퓒Γ8nKǽ){4ȶfhCFJyw!mxcg=Z)Y3PyXF1bK<j_S˜4ԙAYꠥ`ģT%Ņƕ,z4sEp_f̼\ǣsSIu9;X*$}R! fg՚A͡؇)"}ißs<PiϦ6` OVQ˛Lĵfd۵Ʒ˥!i_?$Q_f2)ߩ+}4NwumcZ0GP㬹ȖҮO4"jªLm}Ҳ ɵ{ݳhr!dkhb^TwͺĵDŽǦuER_已[g˹=Z7ŻX]wkmcc4Y%7?L7MnvN91סœUÃĎ|Žr5i)ȕ_xɒ^ɣ%ɷNUՔzj?dDʏ3կfsӧ <l ͔ ΉMϠ�xܢUo?eeԧԶȋDД䋃 aEX Վƴy^¬e׏UQ2͈e3~U6uOܛkJX3i9wਂ{ۃIs܅[ѩ1㥉ɗs^Ti{畒r?ixSuyٙyCןyԮyвzL˱*zſx{|L}zH7r8y@y p/޹pN*ps]pج=q�hqtWr rX?s8tε)vNwkya1{[}r���� �0�@�P�`�p��������� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p   ! 1 A Q a q   ! 1 A Q a q   ! 1 A Q a q   ! 1 A Q a q   ! 1 A Q a q !1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq"2BRbr"2BRbr"2BRbr"2BRbr"2BRbr"2BRbr   " 2 B R b r !!!"!2!B!R!b!r!!!!!!!!"""""2"B"R"b"r""""""""###"#2#B#R#b#r########$$$"$2$B$R$b$r$$$$$$$$%%%"%2%B%R%b%r%%%%%%%%&&&"&2&B&R&b&r&&&&&&&&'''"'2'B'R'b'r''''''''((("(2(B(R(b(r(((((((()))")2)B)R)b)r))))))))***"*2*B*R*b*r********+++#+3+C+S+c+s++++++++,,,#,3,C,S,c,s,,,,,,,,---#-3-C-S-c-s--------...#.3.C.S.c.s........///#/3/C/S/c/s////////000#030C0S0c0s00000000111#131C1S1c1s11111111222#232C2S2c2s22222222333#333C3S3c3s33333333444#434C4S4c4s44444444555#535C5S5c5s55555555666#636C6S6c6s66666666777#737C7S7c7s77777777888#838C8S8c8s88888888999#939C9S9c9s99999999:::#:3:C:S:c:s::::::::;;;#;3;C;S;c;s;;;;;;;;<<<$<4<D<T<d<t<<<<<<<<===$=4=D=T=d=t========>>>$>4>D>T>d>t>>>>>>>>???$?4?D?T?d?t????????@@@$@4@D@T@d@t@@@@@@@@AAA$A4ADATAdAtAAAAAAAABBB$B4BDBTBdBtBBBBBBBBCCC$C4CDCTCdCtCCCCCCCCDDD$D4DDDTDdDtDDDDDDDDEEE$E4EDETEdEtEEEEEEEEFFF$F4FDFTFdFtFFFFFFFFGGG$G4GDGTGdGtGGGGGGGGHHH$H4HDHTHdHtHHHHHHHHIII$I4IDITIdItIIIIIIIIJJJ$J4JDJTJdJtJJJJJJJJKKK$K4KDKTKdKtKKKKKKKKLLL$L4LDLTLdLtLLLLLLLLMMM%M5MEMUMeMuMMMMMMMMNNN%N5NENUNeNuNNNNNNNNOOO%O5OEOUOeOuOOOOOOOOPPP%P5PEPUPePuPPPPPPPPQQQ%Q5QEQUQeQuQQQQQQQQRRR%R5RERUReRuRRRRRRRRSSS%S5SESUSeSuSSSSSSSSTTT%T5TETUTeTuTTTTTTTTUUU%U5UEUUUeUuUUUUUUUUVVV%V5VEVUVeVuVVVVVVVVWWW%W5WEWUWeWuWWWWWWWWXXX%X5XEXUXeXuXXXXXXXXYYY%Y5YEYUYeYuYYYYYYYYZZZ%Z5ZEZUZeZuZZZZZZZZ[[[%[5[E[U[e[u[[[[[[[[\\\%\5\E\U\e\u\\\\\\\\]]]%]5]E]U]e]u]]]]]]]]^^^&^6^F^V^f^v^^^^^^^^___&_6_F_V_f_v________```&`6`F`V`f`v````````aaa&a6aFaVafavaaaaaaaabbb&b6bFbVbfbvbbbbbbbbccc&c6cFcVcfcvccccccccddd&d6dFdVdfdvddddddddeee&e6eFeVefeveeeeeeeefff&f6fFfVfffvffffffffggg&g6gFgVgfgvgggggggghhh&h6hFhVhfhvhhhhhhhhiii&i6iFiVifiviiiiiiiijjj&j6jFjVjfjvjjjjjjjjkkk&k6kFkVkfkvkkkkkkkklll&l6lFlVlflvllllllllmmm&m6mFmVmfmvmmmmmmmmnnn&n6nFnVnfnvnnnnnnnnooo'o7oGoWogowooooooooppp'p7pGpWpgpwppppppppqqq'q7qGqWqgqwqqqqqqqqrrr'r7rGrWrgrwrrrrrrrrsss's7sGsWsgswssssssssttt't7tGtWtgtwttttttttuuu'u7uGuWuguwuuuuuuuuvvv'v7vGvWvgvwvvvvvvvvwww'w7wGwWwgwwwwwwwwwwxxx'x7xGxWxgxwxxxxxxxxyyy'y7yGyWygywyyyyyyyyzzz'z7zGzWzgzwzzzzzzzz{{{'{7{G{W{g{w{{{{{{{{|||'|7|G|W|g|w||||||||}}}'}7}G}W}g}w}}}}}}}}~~~'~7~G~W~g~w~~~~~~~~'7GWgw(8HXhxȀ؀(8HXhxȁ؁(8HXhxȂ؂(8HXhxȃ؃(8HXhxȄ؄(8HXhxȅ؅(8HXhxȆ؆(8HXhxȇ؇(8HXhxȈ؈(8HXhxȉ؉(8HXhxȊ؊(8HXhxȋ؋(8HXhxȌ،(8HXhxȍ؍(8HXhxȎ؎(8HXhxȏ؏(8HXhxȐؐ)9IYiyɑّ )9IYiyɒْ )9IYiyɓٓ )9IYiyɔٔ )9IYiyɕٕ )9IYiyɖٖ )9IYiyɗٗ )9IYiyɘ٘ )9IYiyəٙ )9IYiyɚٚ )9IYiyɛٛ )9IYiyɜٜ )9IYiyɝٝ )9IYiyɞٞ )9IYiyɟٟ )9IYiyɠ٠ )9IYiyɡ١ *:JZjzʢڢ *:JZjzʣڣ *:JZjzʤڤ *:JZjzʥڥ *:JZjzʦڦ *:JZjzʧڧ *:JZjzʨڨ *:JZjzʩک *:JZjzʪڪ *:JZjzʫګ *:}ICC_PROFILE�JZjzʬڬ *:JZjzʭڭ *:JZjzʮڮ *:JZjzʯگ *:JZjzʰڰ *:JZjzʱڱ *:JZjzʲڲ *;K[k{˳۳ +;K[k{˴۴ +;K[k{˵۵ +;K[k{˶۶ +;K[k{˷۷ +;K[k{˸۸ +;K[k{˹۹ +;K[k{˺ۺ +;K[k{˻ۻ +;K[k{˼ۼ +;K[k{˽۽ +;K[k{˾۾ +;K[k{˿ۿ +;K[k{ +;K[k{ +;K[k{‹›«» +;K[k{ËÛëû +;L\l|ČĜĬļ ,<L\l|ŌŜŬż ,<L\l|ƌƜƬƼ ,<L\l|njǜǬǼ ,<L\l|ȌȜȬȼ ,<L\l|Ɍɜɬɼ ,<L\l|ʌʜʬʼ ,<L\l|ˌ˜ˬ˼ ,<L\l|̜̬̼̌ ,<L\l|͌ͬ͜ͼ ,<L\l|ΌΜάμ ,<L\l|όϜϬϼ ,<L\l|ЌМЬм ,<L\l|ьќѬѼ ,<L\l|ҌҜҬҼ ,<L\l|ӌӜӬӼ ,<L\l|ԌԜԬԼ ,<L]m}Ս՝խս -=M]m}֍ֽ֭֝ -=M]m}׍ם׭׽ -=M]m}؍؝حؽ -=M]m}ٍٝ٭ٽ -=M]m}ڍڝڭڽ -=M]m}ۍ۝ۭ۽ -=M]m}܍ܝܭܽ -=M]m}ݍݝݭݽ -=M]m}ލޝޭ޽ -=M]m}ߍߝ߽߭ -=M]m} -=M]m} -=M]m} -=M]m} -=M]m} -=M]m} -=M]n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n/?O_o/?O_o/?O_o/?O_o/?O_o/?O_o/?O_o���� �0�@�P�`�p��������� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p   ! 1 A Q a q   ! 1 A Q a q   ! 1 A Q a q   ! 1 A Q a q   ! 1 A Q a q !1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq"2BRbr"2BRbr"2BRbr"2BRbr"2BRbr"2BRbr   " 2 B R b r !!!"!2!B!R!b!r!!!!!!!!"""""2"B"R"b"r""""""""###"#2#B#R#b#r########$$$"$2$B$R$b$r$$$$$$$$%%%"%2%B%R%b%r%%%%%%%%&&&"&2&B&R&b&r&&&&&&&&'''"'2'B'R'b'r''''''''((("(2(B(R(b(r(((((((()))")2)B)R)b)r))))))))***"*2*B*R*b*r********+++#+3+C+S+c+s++++++++,,,#,3,C,S,c,s,,,,,,,,---#-3-C-S-c-s--------...#.3.C.S.c.s........///#/3/C/S/c/s////////000#030C0S0c0s00000000111#131C1S1c1s11111111222#232C2S2c2s22222222333#333C3S3c3s33333333444#434C4S4c4s44444444555#535C5S5c5s55555555666#636C6S6c6s66666666777#737C7S7c7s77777777888#838C8S8c8s88888888999#939C9S9c9s99999999:::#:3:C:S:c:s::::::::;;;#;3;C;S;c;s;;;;;;;;<<<$<4<D<T<d<t<<<<<<<<===$=4=D=T=d=t========>>>$>4>D>T>d>t>>>>>>>>???$?4?D?T?d?t????????@@@$@4@D@T@d@t@@@@@@@@AAA$A4ADATAdAtAAAAAAAABBB$B4BDBTBdBtBBBBBBBBCCC$C4CDCTCdCtCCCCCCCCDDD$D4DDDTDdDtDDDDDDDDEEE$E4EDETEdEtEEEEEEEEFFF$F4FDFTFdFtFFFFFFFFGGG$G4GDGTGdGtGGGGGGGGHHH$H4HDHTHdHtHHHHHHHHIII$I4IDITIdItIIIIIIIIJJJ$J4JDJTJdJtJJJJJJJJKKK$K4KDKTKdKtKKKKKKKKLLL$L4LDLTLdLtLLLLLLLLMMM%M5MEMUMeMuMMMMMMMMNNN%N5NENUNeNuNNNNNNNNOOO%O5OEOUOeOuOOOOOOOOPPP%P5PEPUPePuPPPPPPPPQQQ%Q5QEQUQeQuQQQQQQQQRRR%R5RERUReRuRRRRRRRRSSS%S5SESUSeSuSSSSSSSSTTT%T5TETUTeTuTTTTTTTTUUU%U5UEUUUeUuUUUUUUUUVVV%V5VEVUVeVuVVVVVVVVWWW%W5WEWUWeWuWWWWWWWWXXX%X5XEXUXeXuXXXXXXXXYYY%Y5YEYUYeYuYYYYYYYYZZZ%Z5ZEZUZeZuZZZZZZZZ[[[%[5[E[U[e[u[[[[[[[[\\\%\5\E\U\e\u\\\\\\\\]]]%]5]E]U]e]u]]]]]]]]^^^&^6^F^V^f^v^^^^^^^^___&_6_F_V_f_v________```&`6`F`V`f`v````````aaa&a6aFaVafavaaaaaaaabbb&b6bFbVbfbvbbbbbbbbccc&c6cFcVcfcvccccccccddd&d6dFdVdfdvddddddddeee&e6eFeVefeveeeeeeeefff&f6fFfVfffvffffffffggg&g6gFgVgfgvgggggggghhh&h6hFhVhfhvhhhhhhhhiii&i6iFiVifiviiiiiiiijjj&j6jFjVjfjvjjjjjjjjkkk&k6kFkVkfkvkkkkkkkklll&l6lFlVlflvllllllllmmm&m6mFmVmfmvmmmmmmmmnnn&n6nFnVnfnvnnnnnnnnooo'o7oGoWogowooooooooppp'p7pGpWpgpwppppppppqqq'q7qGqWqgqwqqqqqqqqrrr'r7rGrWrgrwrrrrrrrrsss's7sGsWsgswssssssssttt't7tGtWtgtwttttttttuuu'u7uGuWuguwuuuuuuuuvvv'v7vGvWvgvwvvvvvvvvwww'w7wGwWwgwwwwwwwwwwxxx'x7xGxWxgxwxxxxxxxxyyy'y7yGyWygywyyyyyyyyzzz'z7zGzWzgzwzzzzzzzz{{{'{7{G{W{g{w{{{{{{{{|||'|7|G|W|g|w||||||||}}}'}7}G}W}g}w}}}}}}}}~~~'~7~G~W~g~w~~~~~~~~'7GWgw(8HXhxȀ؀(8HXhxȁ؁(8HXhxȂ؂(8HXhxȃ؃(8HXhxȄ؄(8HXhxȅ؅(8HXhxȆ؆(8HXhxȇ؇(8HXhxȈ؈(8HXhxȉ؉(8HXhxȊ؊(8HXhxȋ؋(8HXhxȌ،(8HXhxȍ؍(8HXhxȎ؎(8HXhxȏ؏(8HXhxȐؐ)9IYiyɑّ )9IYiyɒْ )9IYiyɓٓ )9IYiyɔٔ )9IYiyɕٕ )9IYiyɖٖ )9IYiyɗٗ )9IYiyɘ٘ )9IYiyəٙ )9IYiyɚٚ )9IYiyɛٛ )9IYiyɜٜ )9IYiyɝٝ )9IYiyɞٞ )9IYiyɟٟ )9IYiyɠ٠ )9IYiyɡ١ *:JZjzʢڢ *:JZjzʣڣ *:JZjzʤڤ *:JZjzʥڥ *:JZjzʦڦ *:JZjzʧڧ *:JZjzʨڨ *:JZjzʩک *:JZjzʪڪ *:JZjzʫګ *:JZjzʬڬ *:JZjzʭڭ *:JZjzʮڮ *:JZjzʯگ *:JZjzʰڰ *:JZjzʱڱ *:JZjzʲڲ *;K[k{˳۳ +;K[k{˴۴ +;K[k{˵۵ +;K[k{˶۶ +;K[k{˷۷ +;K[k{˸۸ +;K[k{˹۹ +;K[k{˺ۺ +;K[k{˻ۻ +;K[k{˼ۼ +;K[k{˽۽ +;K[k{˾۾ +;K[k{˿ۿ +;K[k{ +;K[k{ +;K[k{‹›«» +;K[k{ËÛëû +;L\l|ČĜĬļ ,<L\l|ŌŜŬż ,<L\l|ƌƜƬƼ ,<L\l|njǜǬǼ ,<L\l|ȌȜȬȼ ,<L\l|Ɍɜɬɼ ,<L\l|ʌʜʬʼ ,<L\l|ˌ˜ˬ˼ ,<L\l|̜̬̼̌ ,<L\l|͌ͬ͜ͼ ,<L\l|ΌΜάμ ,<L\l|όϜϬϼ ,<L\l|ЌМЬм ,<L\l|ьќѬѼ ,<L\l|ҌҜҬҼ ,<L\l|ӌӜӬӼ ,<L\l|ԌԜԬԼ ,<L]m}Ս՝խս -=M]m}֍ֽ֭֝ -=M]m}׍ם׭׽ -=M]m}؍؝حؽ -=M]m}ٍٝ٭ٽ -=M]m}ڍڝڭڽ -=M]m}ۍ۝ۭ۽ -=M]m}܍ܝܭܽ -=M]m}ݍݝݭݽ -=M]m}ލޝޭ޽ -=M]m}ߍߝ߽߭ -=M]m} -=M]m} -=M]m} -=M]m} -=M]m} -=M]m} -=M]n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n/?O_o/?O_o/?O_o/?O_o/?O_o/?O_o/?O_o���� �0�@�P�`�p��������� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p� 0@P`p   ! 1 A Q a q   ! 1 A Q a q   ! 1 A Q a q   ! 1 A Q a q   ! 1 A Q a q !1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq!1AQaq"2BRbr"2BRbr"2BRbr"2BRbr"2BRbr"2BRbr   " 2 B R b r !!!"!2!B!R!b!r!!!!!!!!"""""2"B"R"b"r""""""""###"#2#B#R#b#r########$$$"$2$B$R$b$r$$$$$$$$%%%"%2%B%R%b%r%%%%%%%%&&&"&2&B&R&b&r&&&&&&&&'''"'2'B'R'b'r''''''''((("(2(B(R(b(r(((((((()))")2)B)R)b)r))))))))***"*2*B*R*b*r********+++#+3+C+S+c+s++++++++,,,#,3,C,S,c,s,,,,,,,,---#-3-C-S-c-s--------...#.3.C.S.c.s........///#/3/C/S/c/s////////000#030C0S0c0s00000000111#131C1S1c1s11111111222#232C2S2c2s22222222333#333C3S3c3s33333333444#434C4S4c4s44444444555#535C5S5c5s55555555666#636C6S6c6s66666666777#737C7S7c7s77777777888#838C8S8c8s88888888999#939C9S9c9s99999999:::#:3:C:S:c:s::::::::;;;#;3;C;S;c;s;;;;;;;;<<<$<4<D<T<d<t<<<<<<<<===$=4=D=T=d=t========>>>$>4>D>T>d>t>>>>>>>>???$?4?D?T?d?t????????@@@$@4@D@T@d@t@@@@@@@@AAA$A4ADATAdAtAAAAAAAABBB$B4BDBTBdBtBBBBBBBBCCC$C4CDCTCdCtCCCCCCCCDDD$D4DDDTDdDtDDDDDDDDEEE$E4EDETEdEtEEEEEEEEFFF$F4FDFTFdFtFFFFFFFFGGG$G4GDGTGdGtGGGGGGGGHHH$H4HDHTHdHtHHHHHHHHIII$I4IDITIdItIIIIIIIIJJJ$J4JDJTJdJtJJJJJJJJKKK$K4KDKTKdKtKKKKKKKKLLL$L4LDLTLdLtLLLLLLLLMMM%M5MEMUMeMuMMMMMMMMNNN%N5NENUNeNuNNNNNNNNOOO%O5OEOUOeOuOOOOOOOOPPP%P5PEPUPePuPPPPPPPPQQQ%Q5QEQUQeQuQQQQQQQQRRR%R5RERUReRuRRRRRRRRSSS%S5SESUSeSuSSSSSSSSTTT%T5TETUTeTuTTTTTTTTUUU%U5UEUUUeUuUUUUUUUUVVV%V5VEVUVeVuVVVVVVVVWWW%W5WEWUWeWuWWWWWWWWXXX%X5XEXUXeXuXXXXXXXXYYY%Y5YEYUYeYuYYYYYYYYZZZ%Z5ZEZUZeZuZZZZZZZZ[[[%[5[E[U[e[u[[[[[[[[\\\%\5\E\U\e\u\\\\\\\\]]]%]5]E]U]e]u]]]]]]]]^^^&^6^F^V^f^v^^^^^^^^___&_6_F_V_f_v________```&`6`F`V`f`v````````aaa&a6aFaVafavaaaaaaaabbb&b6bFbVbfbvbbbbbbbbccc&c6cFcVcfcvccccccccddd&d6dFdVdfdvddddddddeee&e6eFeVefeveeeeeeeefff&f6fFfVfffvffffffffggg&g6gFgVgfgvgggggggghhh&h6hFhVhfhvhhhhhhhhiii&i6iFiVifiviiiiiiiijjj&j6jFjVjfjvjjjjjjjjkkk&k6kFkVkfkvkkkkkkkklll&l6lFlVlflvllllllllmmm&m6mFmVmfmvmmmmmmmmnnn&n6nFnVnfnvnnnnnnnnooo'o7oGoWogowooooooooppp'p7pGpWpgpwppppppppqqq'q7qGqWqgqwqqqqqqqqrrr'r7rGrWrgrwrrrrrrrrsss's7sGsWsgswssssssssttt't7tGtWtgtwttttttttuuu'u7uGuWuguwuuuuuuuuvvv'v7vGvWvgvwvvvvvvvvwww'w7wGwWwgwwwwwwwwwwxxx'x7xGxWxgxwxxxxxxxxyyy'y7yGyWygywyyyyyyyyzzz'z7zGzWzgzwzzzzzzzz{{{'{7{G{W{g{w{{{{{{{{|||'|7|G|W|g|w||||||||}}}'}7}G}W}g}w}}}}}}}}~~~'~7~G~W~g~w~~~~~~~~'7GWgw(8HXhxȀ؀(8HXhxȁ؁(8HXhxȂ؂(8HXhxȃ؃(8HXhxȄ؄(8HXhxȅ؅(8HXhxȆ؆(8HXhxȇ؇(8HXhxȈ؈(8HXhxȉ؉(8HXhxȊ؊(8HXhxȋ؋(8HXhxȌ،(8HXhxȍ؍(8HXhxȎ؎(8HXhxȏ؏(8HXhxȐؐ)9IYiyɑّ )9IYiyɒْ )9IYiyɓٓ )9IYiyɔٔ )9IYiyɕٕ )9IYiyɖٖ )9IYiyɗٗ )9IYiyɘ٘ )9IYiyəٙ )9IYiyɚٚ )9IYiyɛٛ )9IYiyɜٜ )9IYiyɝٝ )9IYiyɞٞ )9IYiyɟٟ )9IYiyɠ٠ )9IYiyɡ١ *:JZjzʢڢ *:JZjzʣڣ *:JZjzʤڤ *:JZjzʥڥ *:JZjzʦڦ *:JZjzʧڧ *:JZjzʨڨ *:JZjzʩک *:JZjzʪڪ *:JZjzʫګ *:JZjzʬڬ *:JZjzʭڭ *:JZjzʮڮ *:JZjzʯگ *:JZjzʰڰ *:JZjzʱڱ *:JZjzʲڲ *;K[k{˳۳ +;K[k{˴۴ +;K[k{˵۵ +;K[k{˶۶ +;K[k{˷۷ +;K[k{˸۸ +;K[k{˹۹ +;K[k{˺ۺ +;K[k{˻ۻ +;K[k{˼ۼ +;K[k{˽۽ +;K[k{˾۾ +;K[k{˿ۿ +;K[k{ +;K[k{ +;K[k{‹›«» +;K[k{ËÛëû +;L\l|ČĜĬļ ,<L\l|ŌŜŬż ,<L\l|ƌƜƬƼ ,<L\l|njǜǬǼ ,<L\l|ȌȜȬȼ ,<L\l|Ɍɜɬɼ ,<L\l|ʌʜʬʼ ,<L\l|ˌ˜ˬ˼ ,<L\l|̜̬̼̌ ,<L\l|͌ͬ͜ͼ ,<L\l|ΌΜάμ ,<L\l|όϜϬϼ ,<L\l|ЌМЬм ,<L\l|ьќѬѼ ,<L\l|ҌҜҬҼ ,<L\l|ӌӜӬӼ ,<L\l|ԌԜԬԼ ,<L]m}Ս՝խս -=M]m}֍ֽ֭֝ -=M]m}׍ם׭׽ -=M]m}؍؝حؽ -=M]m}ٍٝ٭ٽ -=M]m}ڍڝڭڽ -=M]m}ۍ۝ۭ۽ -=M]m}܍ܝܭܽ -=M]m}ݍݝݭݽ -=M]m}ލޝޭ޽ -=M]m}ߍߝ߽߭ -=M]m} -=M]m} -=M]m} -=M]m} -=M]m} -=M]m} -=M]n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n~.>N^n/?O_o/?O_o/?O_o/?O_o/?O_o/?O_o/?O_oXYZ ������$����curv����������� �����#�(�-�2�7�;�@�E�J�O�T�Y�^�c�h�m�r�w�|������������������������� %+28>ELRY`gnu| &/8AKT]gqz� !-8COZfr~ -;HUcq~ +:IXgw'7HYj{+=Oat 2FZn  % : O d y  ' = T j " 9 Q i  * C \ u & @ Z t .Id %A^z &Ca~1Om&Ed#Cc'Ij4Vx&IlAe@e Ek*Qw;c*R{Gp@j>i  A l !!H!u!!!"'"U"""# #8#f###$$M$|$$% %8%h%%%&'&W&&&''I'z''( (?(q(())8)k))**5*h**++6+i++,,9,n,,- -A-v--..L.../$/Z///050l0011J1112*2c223 3F3334+4e4455M555676r667$7`7788P8899B999:6:t::;-;k;;<'<e<<="=a==> >`>>?!?a??@#@d@@A)AjAAB0BrBBC:C}CDDGDDEEUEEF"FgFFG5G{GHHKHHIIcIIJ7J}JK KSKKL*LrLMMJMMN%NnNO�OIOOP'PqPQQPQQR1R|RSS_SSTBTTU(UuUVV\VVWDWWX/X}XYYiYZZVZZ[E[[\5\\]']x]^^l^__a_``W``aOaabIbbcCccd@dde=eef=ffg=ggh?hhiCiijHjjkOkklWlmm`mnnknooxop+ppq:qqrKrss]sttptu(uuv>vvwVwxxnxy*yyzFz{{c{|!||}A}~~b~#G k͂0WGrׇ;iΉ3dʋ0cʍ1fΏ6n֑?zM _ɖ4 uL$h՛BdҞ@iءG&vVǥ8nRĩ7u\ЭD-�u`ֲK³8%yhYѹJº;.! zpg_XQKFAǿ=ȼ:ɹ8ʷ6˶5̵5͵6ζ7ϸ9к<Ѿ?DINU\dlvۀ܊ݖޢ)߯6DScs 2F[p(@Xr4Pm8Ww)Kmmft2������������������������������������������/Rq  # 4EUet  !'"0#:$B%K&T'\(d)l*s+{,-./0123456789:;<=>?@ABCE�FGH I JKLMNOP!Q#R%S(T*U,V.W/X1Y3Z4[5\7]8^9_:`;a<b=c>d>e?f@g@h@iAjAkAlAmAnAoAp@q@r@s?t?u>v>w=x<y;z:{9|8}7~65421/.,*)'%#! ݥ٦֧Ҩϩ˪ȫĬ|xsojfa]XSNJE@;61,'" ysmg`ZTMGA:4-'  ���  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~�  "#%&()+,.0134689;=?@BDFGIKMOQSUVXZ\^`bdgikmoqsuxz|~πЃх҇ӊԌՎ֑דؕ٘ښ۝ܟݢޤߧ���  !"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~�  "#%&()+,.0134689;=?@BDFGIKMOQSUVXZ\^`bdgikmoqsuxz|~πЃх҇ӊԌՎ֑דؕ٘ښ۝ܟݢޤߧEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUEaVysHbV1nJUhM0UdO UaPUm]QUO[RU8YSU&XT$UVTUVTU UUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUUFVAsIUnKUhMUdOUYa QJU5^RU[SU�YTITXTTVU<TVUTUUTU UTU UTU UTU UTU UTU UTU UTU UTU UTU UTU UTU UTU FV sIUnLbUiNUMdPtU!a QT^S=T[T:TYTTXUTVUTVV3TUVmTU VmTU VmTU VmTU VmTU VmTU VmTU VmTU VmTU VmTU VmTU VmTU VmTU G}UsJ~Un MUJiOLUdQ(TaRT^ST[TTYUT~X V@TqVVTgVVTbUW!T\UW!T\UW!T\UW!T\UW!T\UW!T\UW!T\UW!T\UW!T\UW!T\UW!T\UW!T\UW!T\UH;UsK=UMnMUiP TdQTaSpT^ TTk[UTTYVoTBXVT5VWcT,VWT&UWT!UWT!UWT!UWT!UWT!UWT!UWT!UWT!UWT!UWT!UWT!UWT!UWT!UI UNsL"UnNTi PTdRTfaTUTA^UT$[VT YWTSXWSVXHSVXSUXSUXSUXSUXSUXSUXSUXSUXSUXSUXSUXSUXSUXSUJ0TsM2TnOTriR�T=dSTaUeS^VS[WSYXdSXXSVYXSV$YSUYSUYSUYSUYSUYSUYSUYSUYSUYSUYSUYSUYSUYSUKoTsNqTMnQ TiS?SdUSa!VS^WSk[XSTYYSBXZ2S5VZS,V)ZS&U[S!U"[S!U"[S!U"[S!U"[S!U"[S!U"[S!U"[S!U"[S!U"[S!U"[S!U"[S!U"[S!U"LT"sOSn#RzSiTSfdVS:a'XS^YUR[ZRRY[RX#[RV\RV0\JRU\RU)\RU)\RU)\RU)\RU)\RU)\RU)\RU)\RU)\RU)\RU)\RU)\RU)NSsQSUn*T"Si#VXRdX4Ra/YR^&ZRt[[R]Y\RKX*]KR>W]R5V8]R/U^,R*U1^,R*U1^,R*U1^,R*U1^,R*U1^,R*U1^,R*U1^,R*U1^,R*U1^,R*U1^,R*U1^,R*U1^,R*U1PjSt�SkRn2VRi+X9RLdZR a7[Q^/\Q[]QY^QX3_-QW_QV@_QU`QU:`QU:`QU:`QU:`QU:`QU:`QU:`QU:`QU:`QU:`QU:`QU:`QU:RR^t URn;X"Qi4ZXQd\4Qva@]QR^8^Q4[_QY`Q X<aLPWaPVJaPUb-PUDb-PUDb-PUDb-PUDb-PUDb-PUDb-PUDb-PUDb-PUDb-PUDb-PUDb-PUDb-PUDTQtWQXnFZQi>\Pd^PaK`P^Ba]Pv[bZP_YcPMXGcP@W#dP7VUdRP1UdP,UOdP,UOdP,UOdP,UOdP,UOdP,UOdP,UOdP,UOdP,UOdP,UOdP,UOdP,UOdP,UOBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBFWEV{dHVsKVhmM.V>hNVc}PkV�_QU\RUZ-S5UXRSUVTUVTLUUZTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTTqUTBVFV{fHV{sKnVJmMV hOUUcPU_QU\RUZ/SUXTTUVTnUVTUU\TUTTUTTUTTUTTUTTUTTUTTUTTUTTUTTUTTUTCZVFV|{jIVCsL"VmNBUhP UcQU_RU\SUZ3TIUtXXTUkVU#UeV U`U`U`UU^TUU^TUU^TUU^TUU^TUU^TUU^TUU^TUU^TUU^TUU^TUU^TDVGkVD{nJUV t�LUmNUhPUcR3Uq_S_U[\TKUIZ7TU<X[UU3VUU,VVU(UdV9U&TV9U&TV9U&TV9U&TV9U&TV9U&TV9U&TV9U&TV9U&TV9U&TV9U&TV9U&TDVNH V {qK UtMUmOUwh#QsUUcRU9_TU"\U�UZ;UUX_V4TWVTVVTUhVTTVTTVTTVTTVTTVTTVTTVTTVTTVTTVTTVTTEV HU{vKUtN^U_mP~U6h'REUcST_TT\UTZ?VTXdWTWW^TVWTUmWTUWTUWTUWTUWTUWTUWTUWTUWTUWTUWTUWTUFU�IU{{{LUBt OZUmQzTh-SATcTT_UT\VTZEWTsXiXTjW XZTdVXT_UsXT]U XT]U XT]U XT]U XT]U XT]U XT]U XT]U XT]U XT]U XT]U XT]U GUaKU{NTtPTmRTh3TjThcUTL_W T5\WT$ZKXTXpY+T WYTV$YTUzYT�UYT�UYT�UYT�UYT�UYT�UYT�UYT�UYT�UYT�UYT�UYT�UIT LqT{O\TytQTHmSTh:UScW9S_XfS\YQSZSZSXwZSWZSV,[SU[@SU[@SU[@SU[@SU[@SU[@SU[@SU[@SU[@SU[@SU[@SU[@SUJTxN�T6{PSt#SlSmUShCWRScXSc_YSM\ZS;Z[[S.X\S%W$\lSV4\SU\SU"\SU"\SU"\SU"\SU"\SU"\SU"\SU"\SU"\SU"\SU"\SU"LkSOS{RSnt-U2S=mWRShLYRcZR_[R\\RZe]YRX]RW.^3RV?^pRU^RU-^RU-^RU-^RU-^RU-^RU-^RU-^RU-^RU-^RU-^RU-^RU-NmSJ*QS{TRt7W5RmYURshW[RQc\R4_]R\^R Zp_\QX_QW9`5QVJ`sQU`QU8`QU8`QU8`QU8`QU8`QU8`QU8`QU8`QU8`QU8`QU8`QU8PR6T RS{VRtCYwQm[Qhc]^Qc^Q_`�Qi\`QXZ}aQKXbQAWFbxQ;VWbQ7UbQ4UFbQ4UFbQ4UFbQ4UFbQ4UFbQ4UFbQ4UFbQ4UFbQ4UFbQ4UFbQ4UFbQ4UFS5Q̄CVQ{Y|QPtQ[Qm^Phq_PcaYP` bP\cqPZd$PXdPxWTdPqVee;PmUe_PjUTe_PjUTe_PjUTe_PjUTe_PjUTe_PjUTe_PjUTe_PjUTe_PjUTe_PjUTe_PjUTe_PjUTVPRY]P{\HPpt`^P?m`PhbOcd%O`eRO]f=OZfOXgqOWdgOVuhOUh+OUeh+OUeh+OUeh+OUeh+OUeh+OUeh+OUeh+OUeh+OUeh+OUeh+OUeh+OUe?CWBWTFWpyHW:qKAW kM@VfNVb(PDV^QXV[R/VYoRV}WS@VuVSVpUSVlUSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkT?CWBWTFWpyHW:qKAW kM@VfNVb(PDV^QXV[R/VYoRV}WS@VuVSVpUSVlUSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkT?CWBWTFWpyHW:qKAW kM@VfNVb(PDV^QXV[R/VYoRV}WS@VuVSVpUSVlUSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkT?CWBWTFWpyHW:qKAW kM@VfNVb(PDV^QXV[R/VYoRV}WS@VuVSVpUSVlUSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkT?CWBWTFWpyHW:qKAW kM@VfNVb(PDV^QXV[R/VYoRV}WS@VuVSVpUSVlUSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkT?CWBWTFWpyHW:qKAW kM@VfNVb(PDV^QXV[R/VYoRV}WS@VuVSVpUSVlUSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkT?CWBWTFWpyHW:qKAW kM@VfNVb(PDV^QXV[R/VYoRV}WS@VuVSVpUSVlUSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkT?CWBWTFWpyHW:qKAW kM@VfNVb(PDV^QXV[R/VYoRV}WS@VuVSVpUSVlUSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkT?CWBWTFWpyHW:qKAW kM@VfNVb(PDV^QXV[R/VYoRV}WS@VuVSVpUSVlUSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkTSVkT?ZWCWTF6WhyHW3qKXWkMWVfO�Vb)P[V^QoV[RFVYpRVvWSWVnVSViUSVdUSVcTSVcTSVcTSVcTSVcTSVcTSVcTSVcTSVcTSVcTSVcT@WCWnXFW0yIVqL VkN VfOVb-QVm^R$VY[RVIYtSV>WT V5VTWV0UTV,U TV+TTV+TTV+TTV+TTV+TTV+TTV+TTV+TTV+TTV+TTV+T@W~DmW6\GVyJcVqLVkNVnfPiVNb0QV5^RV [SVYxTNVWTUVU UUUHUUUWUTUWUTUWUTUWUTUWUTUWU}ICC_PROFILE�TUWUTUWUTUWUTUWUTUWUTAwWEE!V_HTVy KVqMuV\kOtV6fQVb4RxU^SU[TcUY|UUWUtUVUUUUUUV UTV UTV UTV UTV UTV UTV UTV UTV UTV UTV UTB:WEVdIVy$KVMqN8VkP7UfQUb8S;U^TOU[U&UYUUWV7UVVUUVUUVU~TVU~TVU~TVU~TVU~TVU~TVU~TVU~TVU~TVU~TVU~TC%VFVwhJV9y)LVqO#UkQ"UfRUb=T&Uv^U:Ub[VURYVUGWW"U?VWnU9UWU5UWU4TWU4TWU4TWU4TWU4TWU4TWU4TWU4TWU4TWU4TWU4TD<VhGV nKUy/MUrP:UkR9UYfSU9bCU=U^VQU [W(TYWTWX9TVXTUXTU%XTTXTTXTTXTTXTTXTTXTTXTTXTTXTTXTTEVI,UuL_U|y6O#UGr QUkSTfU)TbJVT^WT[XnTYYTWYTVYT|UZTxU,ZTwU�ZTwU�ZTwU�ZTwU�ZTwU�ZTwU�ZTwU�ZTwU�ZTwU�ZTwU�ZTwU�FUJUE}MUy>PTrRTkTT}fVT]bSWTC^YT/[YTYZTWZT V[ETU[TU5[TU [TU [TU [TU [TU [TU [TU [TU [TU [TU [TU HULVTOTyGRMTJrTTkVSfXSSb\YS^ZS[[SY\8SW\SV\SU]1S{U?]ASzU]ASzU]ASzU]ASzU]ASzU]ASzU]ASzU]ASzU]ASzU]ASzU]ASzUJTlNBT$QtSyRT9Sr'VSkXS\fZ>S=bf[S#^\S[]RY^#RW^RV^RU_RUJ_,RU_,RU_,RU_,RU_,RU_,RU_,RU_,RU_,RU_,RU_,RULSPkSwSS9y]VbSr3XRl ZRf\hRbr]Rv^^Rb[_RRY`MRGX `R>Va R9VaFR5UWaUR4U+aUR4U+aUR4U+aUR4U+aUR4U+aUR4U+aUR4U+aUR4U+aUR4U+aUR4U+aUR4U+O-RRRV RxyjXRBr@[+Rl]*Qf^Qb`.Q^aBQ\bQYbQXc*Q}VcuQxVcQtUecQsU:cQsU:cQsU:cQsU:cQsU:cQsU:cQsU:cQsU:cQsU:cQsU:cQsU:QR'UQ߁XQyy[~QkrO]Q=l&_QfaPbbP^cP\dPYeiPX&ePVf&PVfbPUufqPUIfqPUIfqPUIfqPUIfqPUIfqPUIfqPUIfqPUIfqPUIfqPUIfqPUITQ9/XP[Py^xP}r_`PPl7bP)fd}P beO_ fO\(gOYhcOX7hOW�iOV/i\OUikOU[ikOU[ikOU[ikOU[ikOU[ikOU[ikOU[ikOU[ikOU[ikOU[ikOU[XP3@[O^OyaOwrqdOJlHfO#ggObiN_j3N\:k NYkNXIlNWlfNVBlNUlNUnlNUnlNUnlNUnlNUnlNUnlNUnlNUnlNUnlNUnlNUn;X?XCcXj~FnX/vIWoKIWiM*WdNW`OWz]zPWhZQWZXRGWOW0RWHVRWDU`S#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@T;X?XCcXj~FnX/vIWoKIWiM*WdNW`OWz]zPWhZQWZXRGWOW0RWHVRWDU`S#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@T;X?XCcXj~FnX/vIWoKIWiM*WdNW`OWz]zPWhZQWZXRGWOW0RWHVRWDU`S#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@T;X?XCcXj~FnX/vIWoKIWiM*WdNW`OWz]zPWhZQWZXRGWOW0RWHVRWDU`S#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@T;X?XCcXj~FnX/vIWoKIWiM*WdNW`OWz]zPWhZQWZXRGWOW0RWHVRWDU`S#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@T;X?XCcXj~FnX/vIWoKIWiM*WdNW`OWz]zPWhZQWZXRGWOW0RWHVRWDU`S#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@T;X?XCcXj~FnX/vIWoKIWiM*WdNW`OWz]zPWhZQWZXRGWOW0RWHVRWDU`S#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@T;X?XCcXj~FnX/vIWoKIWiM*WdNW`OWz]zPWhZQWZXRGWOW0RWHVRWDU`S#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@TS#W@T<pXђ@kXCX@~FXvIWoKWiMWdO=Wg`PWP]|Q|W=ZR?W/XRW%W3S1WVSsWUcSWTSWTSWTSWTSWTSWTSWTSWTSWTSWTSWT=%X A XKDX~GWvJIWoLWpiNeWLe�OW/`Q3W]R1WZRVXSVW7SVV"T(VUgT_VTT_VTT_VTT_VTT_VTT_VTT_VTT_VTT_VTT_VTT_VT=X`AXESW~H_WvJWboM:W7iOWePV`QV]RVZSVXT7VW;TVV&TVUkUVTUVTUVTUVTUVTUVTUVTUVTUVTUVTUVT>X&BWهF W~IWZvKW(oMViOVeQ`V`RV]SVZTaVXTV{W?USVtV*UVoUoUVlTUVlTUVlTUVlTUVlTUVlTUVlTUVlTUVlTUVlTUVlT?nWCiWFWP~IWvLVoNVjPVe R;Vx`S}Va]TzVNZU=V@XUV6WDV/V/V/VqV+UtVV'TVV'TVV'TVV'TVV'TVV'TVV'TVV'TVV'TVV'TVV'T@tWDpWBGV~JVvMVoOVgj QVDeSBV&`TV]UUZVCUXVUWJW5UV5WxUUzWUTWUTWUTWUTWUTWUTWUTWUTWUTWUTWUTAW/#EVI#V~L.VdvNV1oQ VjRUeTvU`UU]VUZWxUXXUWQXjU}V<XUxUXUuTXUuTXUuTXUuTXUuTXUuTXUuTXUuTXUuTXUuTXUuTCV*G VrJV.~MUvP4UoRpUjTPUse UUV`WU>]XU,ZXUXYmUWYYU VDZUUZIUTZIUTZIUTZIUTZIUTZIUTZIUTZIUTZIUTZIUTZIUTDV>3HUL&U~O1UsvQUApT Uj!UTe)WyTaXT]YTZZ{TX[ TWb[mTVM[TU[TT[TT[TT[TT[TT[TT[TT[TT[TT[TT[TTFU=J}U_MU~QTvSTp UTj+WT`e3YOTCaZT+][T[\PT X\TWl]BSVX]SU]SU�]SU�]SU�]SU�]SU�]SU�]SU�]SU�]SU�]SU�]SU�HUHLTPTu~ST;vUTpWSj6YSe?[aSa\S]]Ss[^cSeY^S[Wx_USTVd_SOU_SLU _SLU _SLU _SLU _SLU _SLU _SLU _SLU _SLU _SLU _SLU JTLUNS RbS~UmSwX SOp%ZHS$jC\(S�eL]Ra+^R]_R[ `RYaERWaRVqaRUb!RUb!RUb!RUb!RUb!RUb!RUb!RUb!RUb!RUb!RUb!RUMS}cQzS0TRXRwZRp3\RUjQ^R1eZ`LRa9aQ]bQ[.cMQYcQWd@QVdQUdQU*dQU*dQU*dQU*dQU*dQU*dQU*dQU*dQU*dQU*dQU*P^RrTYRJ&WRZQw"]QpC_QojaaQLejc+Q.aIdmQ]ekQ[>f-PY-fPWgPVgaPUgPU;gPU;gPU;gPU;gPU;gPU;gPU;gPU;gPU;gPU;gPU;SQWQM8[Q #^Pw3`PpTbPrjsdPNe{fVP1a[gP]hP[PiXOY?iOWjJOVjOUjOUNjOUNjOUNjOUNjOUNjOUNjOUNjOUNjOUNjOUNjOUNWPZP7J^}O6aOwFd(OpgfcO\jhDO9eiOankO^lN[dlNYSm`NWmNVnNUn<NUcn<NUcn<NUcn<NUcn<NUcn<NUcn<NUcn<NUcn<NUcn<NUcn<NUcZOU^O_bJNKeUNw[gNWp|j0N,jlN emManM^&oM[ypMYhq-MWqMVqMVr MUyr MUyr MUyr MUyr MUyr MUyr MUyr MUyr MUyr MUyr MUy8ZO<aY@1YCYs|�FmY<t^HY mKXh-LXcwN9X_ObX\kPKXYPXvX QzXmVQXgUR XcUR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaT8ZO<aY@1YCYs|�FmY<t^HY mKXh-LXcwN9X_ObX\kPKXYPXvX QzXmVQXgUR XcUR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaT8ZO<aY@1YCYs|�FmY<t^HY mKXh-LXcwN9X_ObX\kPKXYPXvX QzXmVQXgUR XcUR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaT8ZO<aY@1YCYs|�FmY<t^HY mKXh-LXcwN9X_ObX\kPKXYPXvX QzXmVQXgUR XcUR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaT8ZO<aY@1YCYs|�FmY<t^HY mKXh-LXcwN9X_ObX\kPKXYPXvX QzXmVQXgUR XcUR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaT8ZO<aY@1YCYs|�FmY<t^HY mKXh-LXcwN9X_ObX\kPKXYPXvX QzXmVQXgUR XcUR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaTR/XaT8VZ:<Y掸@vYCY]|FY&t`I.XmKJXh/M XcxN~X_OX~\mPXmYQ@X`X QXWVRXQURSXMURuXKTRuXKTRuXKTRuXKTRuXKTRuXKTRuXKTRuXKTRuXKTRuXKT9 Z=[YA+YeDY%|GgXtdIXmKXh3MXvc|O3X[_P\XE\qQEX4YQX(XRtXVRXUSXUS*XTS*XTS*XTS*XTS*XTS*XTS*XTS*XTS*XTS*XT9Yə>YuAY,E6X| HXthJXmLX^h7NvX=cOX"_QX \uQWYRWXS)WVSWUSWU SWTSWTSWTSWTSWTSWTSWTSWTSWTSWT:tY>Y=BXEX| HX}tkKMXMmMhX&h:O+XcPW_QW\yRWYS^WXSWVT4WUTqWUTWTTWTTWTTWTTWTTWTTWTTWTTWTTWT;BYP?XCbXFXt|IX<tpLX mN6Wh?OWcQjW_RW\}S|WZT,WwXTWnVUWhUU?WdUUaWbTUaWbTUaWbTUaWbTUaWbTUaWbTUaWbTUaWbTUaWbTUaWbT<8Y @XDYXfGX'|JWtuMWmO,WhDPWwcRaW\_SWG\TsW6ZU#W*X"UW!VUWUV6WUVWWTVWWTVWWTVWWTVWWTVWWTVWWTVWWTVWWTVWWT=\XAXUE}X HW|KWt{N5WemPPW=hKRWcSW_TV\UVZ VGVX(VVVWVUWYVU"W{VTW{VTW{VTW{VTW{VTW{VTW{VTW{VTW{VTW{VT>X>CWFWJ(Wa|$M W*tOVmQVhRShVcTV_VV\VVqZWVdX0XV[VXqVUUXVQU*XVOTXVOTXVOTXVOTXVOTXVOTXVOTXVOTXVOTXVOT@:WÚDWoH[W&KV|-NVtQVmS.VXh[TV7cVcV_WV\XtUZY$UX8YUVYUUZ7UU3ZYUTZYUTZYUTZYUTZYUTZYUTZYUTZYUTZYUTZYUTAW7(FMVJVMsVZ|6PXV#tRUn�TUhdVUcX$U_YMUz\Z6UjZ'ZU]XC[eUTV[UNU[UJU=\UHT\UHT\UHT\UHT\UHT\UHT\UHT\UHT\UHT\UHTCV3HJVDLUOpU|ARUUtTUUn VU-hoXU cZ!T_[JT\\3TZ3\TXN]bTV]TU]TUI^TT^TT^TT^TT^TT^TT^TT^TT^TT^TTF5U?JUNVUHQU|MTTtWTnY)Tzh|ZTYc\^T>_]T)\^pTZ?_T X[_TV_SV`2SUV`TST`TST`TST`TST`TST`TST`TST`TST`TST`TSTHULMTʏPTT,T@|[WT tYSn%[Sh]lSc^Sv_`Sa\`SPZNaSDXibS;WbuS5VbS0UebS.UbS.UbS.UbS.UbS.UbS.UbS.UbS.UbS.UbS.UK|T?[OS SS+VSc|jYS+t\TRn5^pRh`2RcaR_bR\cRrZ]dfRfXydR]We;RWV$eyRSUueRPUeRPUeRPUeRPUeRPUeRPUeRPUeRPUeRPUeRPUNSJlRR1VR<ZRn|{\R6t_eRnEaQhcCQcdQ`eQ\fQ}ZogwQqXgQhW)hLQbV6hQ]UhQ[U%hQ[U%hQ[U%hQ[U%hQ[U%hQ[U%hQ[U%hQ[U%hQ[U%hQ[U%QR=~V=QCZ QO]cQ`|`HQ)tbPnXdPhfPdhP`#i=P\j&PpZjPcXkUPZW<kPTVJkPPUl PNU:l PNU:l PNU:l PNU:l PNU:l PNU:l PNU:l PNU:l PNU:l PNU:UQYPW]PycaP9|cPuftOnlhOhjROdkOo`7lOZ]mOIZnO<XoO3WQo[O-V_oO)UoO'UPoO'UPoO'UPoO'UPoO'UPoO'UPoO'UPoO'UPoO'UPoO'UPYOԚ]OmaO8yeN|gNujzNnlNjhnXNId2oN.`NpN](qNZrMXs MWisaMVwsMUsMUgsMUgsMUgsMUgsMUgsMUgsMUgsMUgsMUgsMUg^�NwbQN$f!MۅiwM|l\Mcu/nM4npM i�rLdJt(L`fuQL]Av:LZvLXwiLWwLVwLUxLUxLUxLUxLUxLUxLUxLUxLUxLUxLU3\�8X[U<|[Wu@"[CQZyiFZrHlZtkJiZOfuLZ1aMhZ^UNzZ[dOOYYOYW^P^YV PYUJPYTPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTn3\�8X[U<|[Wu@"[CQZyiFZrHlZtkJiZOfuLZ1aMhZ^UNzZ[dOOYYOYW^P^YV PYUJPYTPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTn3\�8X[U<|[Wu@"[CQZyiFZrHlZtkJiZOfuLZ1aMhZ^UNzZ[dOOYYOYW^P^YV PYUJPYTPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTn3\�8X[U<|[Wu@"[CQZyiFZrHlZtkJiZOfuLZ1aMhZ^UNzZ[dOOYYOYW^P^YV PYUJPYTPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTn3[8][U<[Uu@'[CVZyiFZrHrZskJnZNfuLZ/aMmZ^UNZ[dOTYYOYW^PcYV PYUJPYTPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTnPYTn4g[Ţ9[lY=7[y@ZׁD ZymFZfrI'Z:kK#ZfyLYbN"Y^YO5Y[hP YYPYWbQYV$QcYUNQYTQYTrQYTrQYTrQYTrQYTrQYTrQYTrQYTrQYTr5[9[3]=Z}AZDZbyqGZ.rIZkKYf}MYbNY^]OY[lPYY!Q]YwWfQYoV(RYjUSRTYfTRbYfTvRbYfTvRbYfTvRbYfTvRbYfTvRbYfTvRbYfTvRbYfTvRbYfTv5[T:|Za>ZBFZfEuZ*yuH6Yr"JYkLYfN3Yb OYm^aPYY[pQsYJY%RY?WjRY7V,RY2UWS Y.TSY-T{SY-T{SY-T{SY-T{SY-T{SY-T{SY-T{SY-T{SY-T{6[;=Ze?aZoCZ*F6YyyHYr'KQYkMNYhfNYIbPMY0^eQ_Y[uR4YY)RYWnSCXV1SXU[SXTSXTSXTSXTSXTSXTSXTSXTSXTSXT7zZТ<%Zvj@JZ'CYGYy~IYqr,L:YEkN6YfOYbQ5X^jRHX[zSXY.SXWtT+XV6TvXUaTXTTXTTXTTXTTXTTXTTXTTXTTXTTXT8Zy=9Z pA^YыEYH2YOyJYr2MNXkOJXfPXbRIX^pS\X~[T0XoY4TXdWzU?X\V<UXWUgUXSTUXRTUXRTUXRTUXRTUXRTUXRTUXRTUXRTUXRT9Z>}YwBYkFGY&IvXyL7Xr9NXkPXdfR4XEb!SX-^wTX[UtX Y<VWWVWVDVWUoW WTWWTWWTWWTWWTWWTWWTWWTWWTWWT;IY?YFDXGXJXtyMX@rAPXkRWfSWb)UW^VW[VWYDWWWWWVLXEW}UwXWyTXWxTXWxTXWxTXWxTXWxTXWxTXWxTXWxTXWxT<YAXEXpIlX+ LWyO\WrJQWkSWifUYWJb3VW1^WW[XWYNY8WWYVVVYVUZ/VTZ=VTZ=VTZ=VTZ=VTZ=VTZ=VTZ=VTZ=VTZ=VT>XCX'GW׋KUWNWVyQEW"rUSVl UVfWBVb=XV^YV[ZVvYY[ VkW[VcVa[V^U\VZT\%VYT\%VYT\%VYT\%VYT\%VYT\%VYT\%VYT\%VYT\%VYTAWԢEW{IW+M|V!PVySlVuraUVIlWV$fYiVbIZU^[U[\UYe]HUW]UVn^UU^?UT^LUT^LUT^LUT^LUT^LUT^LUT^LUT^LUT^LUTCpWHVL@VkOV%.SUyUUrnX0Ul#Z,Ucf[UEbW]+U,^^>U[_U Ys_TW`!TV|`lTU`TT`TT`TT`TT`TT`TT`TT`TT`TT`TTFV=JU㖺NURUO=UUyXTr}ZTl2\Tf^Tnbf_TU^`TB[aT3Yb_T'WbT VcTUcWTUcdTTcdTTcdTTcdTTcdTTcdTTcdTTcdTTcdTTIUP�MTQTUTaMXT%y[|Sr]SlC_SfazSbwbSh^cST[dSEYeXS:WeS2VfS-UfOS)Uf\S(Tf\S(Tf\S(Tf\S(Tf\S(Tf\S(Tf\S(Tf\S(Tf\S(TL\TJQSU,SXS\_\�S y^RraRlUcRfdR{bfRc^g*RO[gR@YhR5Wi R-ViXR(UiR$U-iR#UiR#UiR#UiR#UiR#UiR#UiR#UiR#UiR#UOS,%TRҖXR\gR>s_RzbWQrdQlifQ|ghTQ]biQD^jQ0\kQ!Yl2QXlQVlQ Um)QUCm7QUm7QUm7QUm7QUm7QUm7QUm7QUm7QUm7QUSQ;XQ\QJ%`PQcPzf@PrhPhljPCg)l>P$bmP _ nO\o}OYpOXpOVpOVqOUZq!OU3q!OU3q!OU3q!OU3q!OU3q!OU3q!OU3q!OU3q!OU3XPR\PE`O=dOgOtz3jO@rlOlnNg@pNbqN_#rN\3sNYt]NX0tNVuN|V!uTNxUtubNwULubNwULubNwULubNwULubNwULubNwULubNwULubNwULubNwUL\O.kaeN՗5eNVi.N@l^NzLoMrqyMlsuM~gZuM_bvtMG_=wM3\Mx[M$ZxMXJyjMWyM V<yMUyMUhyMUhyMUhyMUhyMUhyMUhyMUhyMUhyMUhaMf`MGQjLqn*LqYLuzgtLAsvtLlxqKgvzKc{pK_Y|K\j}WKZ }KXg~fKW,~K}VY~KyU~KxU~KxU~KxU~KxU~KxU~KxU~KxU~KxU~KxU.^3]88]b<1]@?\~B\vEU\joG\AiIn\dJ\`L:[]M7[ZbM[XDN[VN[UO,[TOb[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4.^3]88]b<1]@?\~B\vEU\joG\AiIn\dJ\`L:[]M7[ZbM[XDN[VN[UO,[TOb[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4.^3]88]b<1]@?\~B\vEU\joG\AiIn\dJ\`L:[]M7[ZbM[XDN[VN[UO,[TOb[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4Ob[T4/.]47]8];<\C@*\~C3\uvE\DoH \iI[dKv[`L[]!M[ZdNu[XGO[VOf[UO[TO[T7O[T7O[T7O[T7O[T7O[T7O[T7O[T7O[T7/]4]X9i]=b\F@\uC\<vF\ oH[iJ[dL+[`Mk[]%Nh[zZhO*[lXKO[bVP[\UP][WTP[TT;P[TT;P[TT;P[TT;P[TT;P[TT;P[TT;P[TT;P[TT;0]5] :\ʒ>\JA\=D\vG;[oIu[iKT[dL[j`N [S])O[AZlO[4XOPm[*VP[#UQ[TQH[T?QH[T?QH[T?QH[T?QH[T?QH[T?QH[T?QH[T?QH[T?1Q]F6[\枨:\>\ENBM\EV[vG[oJ.[oiL [MdM[0`N[]-O[ZqPZXTQ&ZVQZUQZTRZTCRZTCRZTCRZTCRZTCRZTCRZTCRZTCRZTC2,]75\;\L?\SC([ F1[vH[UoK [+iL[dNsZ`OZ]2PZZvQrZXYRZVRdZURZTRZTIRZTIRZTIRZTIRZTIRZTIRZTIRZTIRZTI31\8:\P<[@[YD-[mG6[4vI[oLZiMZdOxZ`PZ]8QZrZ{RwZdX^SZZVSiZTUSZOTSZLTOSZLTOSZLTOSZLTOSZLTOSZLTOSZLTOSZLTOSZLTO4e\P9n[=[A[O_E`[ HjZvKZoMAZziO ZWdPZ:`QZ$]?RZZSZXeT9YVTYUTYTUYTVUYTVUYTVUYTVUYTVUYTVUYTVUYTVUYTV5[:[?P[+CIZ߈gFZ!IZdwLmZ3pNZ j�PYdRY`SRY]FTOYZUYXmUYVVYUVDYUVzY|T_VzY|T_VzY|T_VzY|T_VzY|T_VzY|T_VzY|T_VzY|T_VzY|T_7f[`<o[�@ZDZ_pHbZ*KkYwN Yp PCYj R"YgdSYJ`TY3]PUY!ZVYXwW;Y VWYUWXU XXThXXThXXThXXThXXThXXThXXThXXThXXTh9;Zά>DZmBZFŸzJ7Y4M@YQwOY pRXjSXeUX`VX]ZWXZXXXYXwVYsXqUYXlUYXiTtYXiTtYXiTtYXiTtYXiTtYXiTtYXiTtYXiTtYXiTt;NZ(@WYȞDYr HY'LIX@OSXw%QX{p T*XQjV X/e WX`XW]fYWZZWX["WV[WU[WU#[WT[WT[WT[WT[WT[WT[WT[WT[WT=Yn'BYG'XK XmNX+MQWw1TDWp-V~Wj,X]WueYWX`[)WA]s\&W/Z\W"X]vWW ]WU^W U1^QW T^QW T^QW T^QW T^QW T^QW T^QW T^QW T^QW T@9X5EBX>IW$MWQ5W\[T>W"w@VVp<YVj:ZVe(\V`]Vr]^V`Z_VRX`VIW`qVBU`V=U@`V:T`V:T`V:T`V:T`V:T`V:T`V:T`V:T`V:TCWEH#WY LW4PVTVvkWV=wPYV pL[UjJ]Ue8_aUa`U]aUzZb`UmXbUcW*cQU\VcUWUQcUTTcUTTcUTTcUTTcUTTcUTTcUTTcUTTcUTTFFVVKOV[OVFSUWAUx}ZKU?wb\Up]_"Tj\aTeJbTacT]dT|ZeToXfTeW<f}T^V"fTZUdfTVTfTVTfTVTfTVTfTVTfTVTfTVTfTVTfTVTIUiNUE.SGTYW@TZTb]T)wu`dSpqbSjpd}Se^f Sa'gISx]hFSfZiSXXiSOWPiSHV7j;SCUyjpS@TjpS@TjpS@TjpS@TjpS@TjpS@TjpS@TjpS@TjpS@TMTt~RTCWSn[Ss^S1aRwd3RpfmRjhLR{esiR^a=kRH]lR6[lR(XmeRWfmRVMn RUn?RTn?RTn?RTn?RTn?RTn?RTn?RTn?RTn?RTQS)VRɟZ[<Rs_4R(bQeQwhXQ|pjQRjlqQ/emQaTo=P]p:P[*pPYqPW~qPVfr/PUrePU rePU rePU rePU rePU rePU rePU rePU rePU V6Q[?Qas_Q cPg1P~j:PEwlPpoOjpOer}OansO]tO[Du|OuY)v OkWvmOdVvO`UvO\U&vO\U&vO\U&vO\U&vO\U&vO\U&vO\U&vO\U&vO\U&[P<`OܟdOhO;5lNoNwqNpsNfjuNCewZN&axN^yM[`zYMYEzMW{JMV{MU{MUC{MUC{MUC{MUC{MUC{MUC{MUC{MUC{MUC`RNe[N9iMmMRqNMV tWMwvLpy/Lj{Le|La}Ll^9~LZ[~LMYc&LCWӀL<VL7UL4UcL4UcL4UcL4UcL4UcL4UcL4UcL4UcL4UceL֭jLvo|L suKՉqvK,yKZx|K)q~JkJe=Jaǃ}J^YzJ[<JYJW-JyV܆oJtV JqUJqUJqUJqUJqUJqUJqUJqUJqU)`.~`=3X_ᚵ7_?;v_H>_ |'A^t^D-^mFH^~gH ^^cI|^D_#J^/[K^YbL>^WqL^ VM^U MQ^�TRMs]SMs]SMs]SMs]SMs]SMs]SMs]SMs]S)R`.`)3_͚7_|@;_4? ^|)A^t_Dl^mF^kgHI^JcI^0_%J^^ YcL}]WsL]VMS]U M]TSM]SM]SM]SM]SM]SM]SM]SM]S*`X/s_4L_8_CD<k^?^|-B^tcE!^YmG<^2gH^c#Jp]_)K][L]YgM2]WwM]V N]UNF]TXNh]SNh]SNh]SNh]SNh]SNh]SNh]SNh]S*`0(_5_\9Q_ H= ^Å @v^|1CZ^NtgE^ mG]gI]c'K&]_-LO][M8]YkM]W{Ng]VN]UN]{T\O]xSO]xSO]xSO]xSO]xSO]xSO]xSO]xS+r_0_5_$:^ҏL=^$A+^L|5D^tkF]mH]gJi]c+K]_1M]q[M]aYoN]TWO]LVOs]FUO]BTaO]@SO]@SO]@SO]@SO]@SO]@SO]@SO]@S,?_1_@6^:^P>^J(A^ |9D]tpGY]mIt]gK7]`c/L]F_5M]1[N]!YtOj]WO] VP@]UP}]TfP]�SP]�SP]�SP]�SP]�SP]�SP]�SP]�S-6_Z2^7{^;^EV?]-B]|>E]tuHP]ZmJk]3hL-]c5M\_;N\\O\YzPa\WP\VQ7\U$Qt\TlQ\TQ\TQ\TQ\TQ\TQ\TQ\TQ\T.Z^3^8^;<]\@]4D]c|EF]-t|It\mK\h MQ\c;N\_AO\\ P\xYQ\lWR\cV%R[\^U+R\ZTtR\WT R\WT R\WT R\WT R\WT R\WT R\WT R\WT /^5^-9]њ>D]cB]7;Eh\|LHM\tJ\mL\nhN\McCP\3_IQA\\R*\YR\WSY[V-S[U3S[T|T[TT[TT[TT[TT[TT[TT[TT[T19^6];~]V?]lC\DF\~|UI\HtLS\mNn[hP0[cKQ[_RR[\S[YTd[WT[~V6U:[xU<Uw[tTU[rTU[rTU[rTU[rTU[rTU[rTU[rTU[rT2] 8g]%=@\ɚA\xvE_\0MH[|_K[tN[mP0[fh%Q[FcUSe[+_\T[\$Uv[YV&ZWVZV@VZUFW9ZTW\ZT(W\ZT(W\ZT(W\ZT(W\ZT(W\ZT(W\ZT(W\ZT(4\:e\??\*C[؏G^[XJ[R|jM[tPZmR/Zh0SZcaUcZ_gVZw\0WuZgYX%Z[WXZRVLXZLURY8ZHTYZZFT4YZZFT4YZZFT4YZZFT4YZZFT4YZZFT4YZZFT4YZZFT479\:%<[ҧA}[vE[%IZ݅eLZ|vOZhtRRZ:mTmZh=V0YcmWY_tXY\=YYYZcYWZYVY[9YU`[vYT[YTB[YTB[YTB[YTB[YTB[YTB[YTB[YTB9[q2?&[ CZHOZ\LZsOtY|RXYtTYrnVYKhKXY+c|Z$Y_[MX\K\5XY\XW]eXVh]XUo]XT^XTQ^XTQ^XTQ^XTQ^XTQ^XTQ^XTQ^XTQ<ZBAZ+FYϛ KY~NY6R=X|U"XtWXnYXlh[[{XLc\X1_^X\[^X Y_X�W`.WVx`WU`WT`WTb`WTb`WTb`WTb`WTb`WTb`WTb`WTb?YSEY5IXٛ1N-XQX@UQX|X6WtZWn%\Wvhl^WVc`W;_a*W&\mbWYbW WcBWVcVUcVTcVTucVTucVTucVTucVTucVTucVTucVTuBXeHfX&M@WʛCQWyU^W1XV|[Vt^Vn8`/VghaVGccdV-_dV\euVYf%UXfUVfUUg8UTgZUTgZUTgZUTgZUTgZUTgZUTgZUTgZUTFWezLV.PVXUDVQYV \iU|_MUuaUfnMcU?heUcgU_hBT\i*TZ iTXjYTVjTUjTUkTTkTTkTTkTTkTTkTTkTTkTTJV"P%UETU_nYOU]Tƅ`sT|cXTQueT#ndgShiSck#S_lLS\m5SZ$mSX5ndSVnSUnS}U!oS{ToS{ToS{ToS{ToS{ToS{ToS{ToS{TOTĶTT\]YcT�]SaSgdS(|gRu3j8Rn|lSRhnR}coRb_pRM\qR=Z=rHR1XOrR(VsR"Us[RU;s}RTs}RTs}RTs}RTs}RTs}RTs}RTs}RTSSHYJRw^$RbtR3,fBQiQ}l}QvuMnQHnqQ!hrQdtHP`uqP\vYPZYw PXjwPWwPV xPUXx>PTx>PTx>PTx>PTx>PTx>PTx>PTx>PTY�Q^kQFcEP꛽gPHkdPQ!nP}3qOujtOnv5OhwOgd-yiOL`5zO7\{{O'Zv|*OX|OW}�O V(}=OUv}_OU}_OU}_OU}_OU}_OU}_OU}_OU}_OU^OcOhO1mNߐgpN?t>NY}Qw#N#uyMn{Mi}|MdL~M`TM~]�MnZMbX/MYW@MSVIMOUMLU4MLU4MLU4MLU4MLU4MLU4MLU4MLU4drNiMnMWsMvL`z+L}r}LIuLnKi<iKdnK`vK]@KZKXʈKWbrKyVlKuUKsUWKsUWKsUWKsUWKsUWKsUWKsUWKsUWjL @p5KuK\y_K }.JÆJ}hJNu΅J oIi`Id3I`\I]dDIZݍIXsIWI~VIzU)IxU})IxU})IxU})IxU})IxU})IxU})IxU})IxU}#zcy)Lc 4.bQ31bO7Vb:a>+ayc@aOqCHa$kEE`f F`a~HE`]IX`ZJ.`XfJ`VK>`UZK`T}K`SK`SK`SK`SK`SK`SK`SK`S$0c@*bұ8/;boU3b8 aɋ;a�>aIygAaqC`kE`fG`aH`]J`}ZJ`nXjK`cVK`[U^L?`VTL{`SSL`RSL`RSL`RSL`RSL`RSL`RSL`RS$c*b</b6Y4aޖ8a<gaL?aykBW`qD`kF`fHW`paI`W]J`DZK`5XnL8`*VL`#UbL`TM1`SM?`SM?`SM?`SM?`SM?`SM?`SM?`S%b+xb]@0a]5]a9aT=(a@X`yoC`qEt`vkGq`QfI`3aJr`]K`ZLZ_XsL_VMj_UgM_TM_SN�_SN�_SN�_SN�_SN�_SN�_SN�_S&b,abE1ab6FaY:ka >`ǂ AA`ytD`XqF]`-kHZ` fJ_aK[_]Ln_ZMC_XxM_VNS_UlN_TN_SN_SN_SN_SN_SN_SN_SN_S'b,-vaK2aZh7\a;`?&`pBV`5yzE`rGr_kIo_f$K_aLp_|]M_hZNX_YX~N_OVOh_GUrO_BTO_>SO_=SO_=SO_=SO_=SO_=SO_=SO_=S(a.aXR3`o8`<`O@k` C_yF\_r H_qkJ_Mf+L\_.aM_]N_ZO^XP<^VP^UzP^TQ5^SQD^SQD^SQD^SQD^SQD^SQD^SQD^S*caQ04`Z5n`w:`'>>_ًA_"E_ZyG_'rJ0^kL-^f4M^aO.^]PA^ZQ^~XQ^sVR&^lURq^gTR^cSR^bSR^bSR^bSR^bSR^bSR^bSR^bS,`1`[c7_;_?_RC_,F^yI^rK^tkM^Pf=O^2aP^]Q^ZR]XSe]VS]UT!]TT^]STl]STl]STl]STl]STl]STl]STl]S-`03_±n9 __=_A^E^u7H^9yKp^r&M]kO]fHQp]aR]]S]mZT]^XUQ]SVU]KUV ]FTVI]CTVX]BSVX]BSVX]BSVX]BSVX]BSVX]BSVX]BS0(_5_z;4^?^Z%D^ G]ȂCJ]yM]Yr2O].kQ] fUS\aT\^ V\[V\XW{\VW\UX7\TXt\TX\SX\SX\SX\SX\SX\SX\S2^8g^S=]BL]3Fp]JJ]PMF\yP\r@Rc\lkT`\HfbV\*aW`\^Xs[[YH[XY[VZY[UZ[TZ[T Z[SZ[SZ[SZ[SZ[SZ[SZ[S5H];]{@T]D\BI#\r,L\._O[yR[rOU[kW[pfrX[RaZ[9^'[&[&[&[[X\[ W ] [U]V[�T]ZT1]ZT]ZT]ZT]ZT]ZT]ZT]ZT8E\ >\CQ\)G[їRL [=O[?pR[yUZr`XZkZZf[Zca]ZK^8^#Z7[7^Z(X_ZW`ZU`SZT`Z TC`Z T`Z T`Z T`Z T`Z T`Z T`Z T;[Aa[F["KFZʗeOkZ}OSZ8V@YyYYrr[]Yl]ZYzf_Y\b`ZYD^KamY0[JbBY!XbYW.cSYUcY UcYTYcYT,cYT,cYT,cYT,cYT,cYT,cYT,?+Z3DZeJ6ZNYySY\cVYYXy\Xr^X~l`XZfbX<bcX#^`e X[_eXYf}WWCfWUg9WU$guWTogWTBgWTBgWTBgWTBgWTBgWTBgWTBCYIHY*N'XǤ�RXoVX!yZW݂]Wz`WorbWCl2dWffWb3gV^vhV[viVYjmVWZjVVk)VU<kfVTktVT[ktVT[ktVT[ktVT[ktVT[ktVT[ktVT[GdXB`M5WԱRoWqWW[?Vˌ^VbVKz,dVrg1UlJi.UfjUbKl/U^mBU[nUpY7nUeWso'U]V/orUXUUoUTToUSTvoUSTvoUSTvoUSTvoUSTvoUSTvoUSTvL VzQVaWU2[U_UXcUfTzFi{TrkTzlemTUfozT7bfpT^qT [rSYRs[SWsSVJtSUqtSSTtaSTtaSTtaSTtaSTtaSTtaSTtaSTQ U>VTв/\TmM`TdSnjhSkSGzbnSrpRlrRgt~RbuR^vRz[wRkYox_RaWxRYVhyRTUyWRPTyeROTyeROTyeROTyeROTyeROTyeROTyeROTVtS\FS MaRkf+RejPRmQӃq%QzsQds vBQ9lx?Qg.yPb{?P^|RP[}'PY}PW~7PV~PU~PT~PT~PT~PT~PT~PT~PT~PT\BQbQPmgNPkPpPGsP9vOzyOs*|Oil~ ODgOO&b O_ N\NYNWNVPNU҄NU!NTNTNTNTNTNTNTb{OhLO^mNr1N=vVNV(yN\}+MzMsMHMwlEMSgrM5bEM_*XM \+-LYԉLX=LVϊLULUGLULULULULULULUi!MnMKt,LxLa|LBMKKz醓KssKdm K@gK!c K _PJ\QJYrJX8JV.JVkJUoyJUEyJUEyJUEyJUEyJUEyJUEyJUEp8KAv K{DJJYJ tIǃI{IYsI-m1I gHc4H_yH\zHZ$HXbHW EHVHHUHUoHUoHUoHUoHUoHUoHUof&#f@)ue֬.ew&2e#6dوa:vd~=d_v@ d/oB\diaD<cd4Ec_G c\bH cYHcWkI[cUIcTJc~SJ9c{S9J9c{S9J9c{S9J9c{S9J9c{S9J9c{S9J9c{S9Tf|*$f*.e/9e=*3d7de;0d^~>:d%v@coCcieDcd8Fc_Gcx\fHcfYIcYWoJcOUJycHTJcDSJcAS=JcAS=JcAS=JcAS=JcAS=JcAS=JcAS=0f7.%ke+ eW0d.4d 8dZi< d~?cvAcoCcijEcfd=G^cJ_Hc3\kIc!YJacWtJc UKUcTKbSKbSBKbSBKbSBKbSBKbSBKbSBKbSB 7e4&reo,e1d45dR&9do=c~@cvBc_oDc6ipFcdCHeb_Ib\qJbYKhbWzKbUL[bTLbSLbSHLbSHLbSHLbSHLbSHLbSHLbSH!le;'e-Gd2QdF;6c-:cv>HcfASc.vCboF.biwHbdIIb_Jb\xKboYLbaWM-bXUMbQTMbMSN bISPN bISPN bISPN bISPN bISPN bISPN bISP"eC)d.d43c՟C88c4<2c7~?b BbvEZboGbeiIvbBdRKb&`LEb\MCaYNaWNaUNaTO<aT�OsaSXOsaSXOsaSXOsaSXOsaSXOsaSXOsaSX$sdK*d0Mc5XcTL9c�==bAObtDYb<vFb oI4aiKad[La` Ma\Na}YOaoWP4afUPa_TPa[T QaWSbQaWSbQaWSbQaWSbQaWSbQaWSbQaWSb&LcV,c2&c71bV;blH?b"C'aF2avHaxoK aOiLa-deNza`O`\P`YQ}`WR `VRp`TR`TR`SmR`SmR`SmR`SmR`SmR`SmR`Sm(bcXa.b4=bx9Gba=aœSAa{E>a:*HIavJ`oM#`iO`dqP`j`#Q`T\R`BYS`5WT#`+VT`$TT` T#U`SzU`SzU`SzU`SzU`SzU`SzU`Sz*bn0b'6a;a^o@a `D`G`7J`FvM@`oO{_iQ\_dR_`1T*_\U)_YU_yWV{_pVV_iTW!_eT2WY_aSWY_aSWY_aSWY_aSWY_aSWY_aSWY_aS-Wa}3aV92`><`}B`9oF_J3_FM>_uwO_EoR_iS^dU^`@V^\W^YX^WY^V+Y|^UY^TBY^SY^SY^SY^SY^SY^SY^S0=`̍6y`n�<`A"_E_QI_M^VP$^wR^]oT^5iV^dXl]`PY]\Z]Z[o]W[]V<\b]U\]TT\]S\]S\]S\]S\]S\]S\]S3p_̞9_n?K_DU^H^QL^PL]hSW]w%U]^oX1]5iZ]d[\`c\\\]\Z^\W_1\VO_\U-_\Th`\S`\S`\S`\S`\S`\S`\S6^̲=/^V%B],G]LV]9PP\S\|V\uw9Yy\Ep[\i][d_![`w`c[\aa[Z)b$[Wb[Vdc[UBcZ[T}c[Sc[Sc[Sc[Sc[Sc[Sc[S:]A]#:F\AK\ZP-\T'[W[{Z[CwN]O[p'_ZjakZdbZ`d:Z] e8ZZ?eZvXfZlV{fZeUYg1ZaTghZ]SghZ]SghZ]SghZ]SghZ]SghZ]SghZ]S>\KE4[ռQJ[kXO[ T[ZXUZn-^Ywfa~Yp?cYjeYydg&Y]`hhYF]#ifY5ZWj)Y'X.jYVkYUrk_YTkYTkYTkYTkYTkYTkYTkYTCZIZjjO\Z�rTfYXYM\Y5`]XchXwfXYpXhBX1j9j#Xe kW`lW]=mWZrnWXIoBWVoWUoWTpWT pWT pWT pWT pWT pWT pWT HiYXNX⼅TDXxYNX]WŔaW{PeEW:hPWwjVptm*VjTo Ve(pVj`qVT]YrVBZsV5Xft*V+VtV$UtV TuVT@uVT@uVT@uVT@uVT@uVT@uVT@MW/SW;YVѭ^Vr1cV$gUԉnjUmU[wp9U*pruUjrtUTeFuT`w#T]xx"TZxTXytTVyT}UzTyUzQTuTazQTuTazQTuTazQTuTazQTuTazQTuTazQTuTaSdUNYUt_>U dHTQhTWClT p?S̀sJSwuSdpx%S;jzSeg{Ra|R]}RZ~RX$RW RURU)RTRTRTRTRTRTRTY}Tp_SeWS#jbRĠsnRperR&vYQ>ycQw|Q|p~>QSjQ1eQa=P]PZPXʅ=PW0PVPUNPTPTPTPTPTPTPT`Q͔f?QkQpPufPfyaP|OۀbOx OspOJjچO(e1O acsN]qN[4NXNWV'NV7iNUuNTҌNTҌNTҌNTҌNTҌNTҌNTfOͺm6OW.rN6wN|]N:WMMMvxGMFq!MkLe׏(LajL^ hL[@+LYLWLV``LULTLTLTLTLTLTLTneM|tMXz?L_JL=KڇK%@K^KK%xqJqK&Jk,JfJaJw^4Jf[kJXYD%JNWJHVJCU˛J@U)J@U)J@U)J@U)J@U)J@U)J@U)vGK|J!J&,IǡIsI(Q#Hߖ-HxHqxHVkYH4f.uHa⟶H^bG[wGYrGW٢jGVGUGUYGUYGUYGUYGUYGUYGUYj}jƾ#i)'i0.h֛2VhZ6'h>9h{<fgs>gmAgwgHBgWb^D:g=^NEeg)[FOgXiGg VlGgTGfSHfS1H;fRH;fRH;fRH;fRH;fRH;fRj8؂i$iH* h.h 3Oh7`7 g:xg{=_g~s?gPm%Ag*gMCg bdE3f^TF^f[ GHfXoGfVqHzfTHfSIfS7I4fRI4fRI4fRI4fRI4fRI4fR(i؈i^%h춯+Gh#0#h,4ugۏf8Fg;gW{>g"tAfm+C!fgTDfbjFYf^ZGf[HofoXvI fcVxIfZUIfTSJ7fQS>JZfNRJZfNRJZfNRJZfNRJZfNRJZfNRipؐ!*h'2h,h*1{g5gpn9g)<f{?ft B\fm3Dyfbg\F>fBbrGf(^bHf[IfX~JxeVJeU KQeTKeSFKeRKeRKeRKeRKeRKeRhؘ"hv(h.-g33 gD!7[fw;,f>fo|Akf:tCf m<FegdGeb{I?e^kJje[!KTeXLe{VLerULemTMeiSPM@egRM@egRM@egRM@egRM@egRM@egRhfآ$~g*gw/g=4f+9"ff<f(@Ke|C2etEe~mFGeXgoIe8bKe^uL0e [+MdXMdVNMdUNdTNdS[OdROdROdROdROdROdRgح&gG,fֶ1fpH6f7;%eŏ>e~4BNe@|E5e t*GdmQIdgzKdbM d}^N4dh[7OdXXOdLVPPdDU*Pd>T&Pd:SgQ d8RQ d8RQ d8RQ d8RQ d8RQ d8R"gغ(f.f!4:eU9e`C=ieA:dɅ@Dd|'GydVt7Id)m^LdgMcbOLc^Pxc[DQbcXRcVRcU7RcT4S*cSuSNcS SNcS SNcS SNcS SNcS SNcS $fF+Me 1UeW6dc;dQ?dFCcOGc|5JctFLc^mmNc8gPacbQb^S�b[SSbXTbVUbUGUtbTCUbSUbSUbSUbSUbSUbSUbS'see.d4%dv9dr>ncaBceFc^Ib|ELbtVOOb}m}QlbWgS1b7bTb^Ub[cVaXWkaVWaUWXDaTUXaSXaS*XaS*XaS*XaS*XaS*XaS**dm19c+7@c}<cAbrEblIb&pMa|VOatgRjamTa_gVLa?bWa%^Xa[uYa�XZ`V[`Uj[_`Th[`S[`S=[`S=[`S=[`S=[`S=[`S=-c[4b>:bl#@bDaIGa[MaPp`|jSW`tzU`tmW`MgY`-b[+`^\V_[]@_X]_V^r_U~^_T|__S_,_SR_,_SR_,_SR_,_SR_,_SR_,_SR1b08aaR>iaA8C`ۨH`M`0P_酘T._|W_vtY_Im[_"g]u_b^^^`^[`^Ya^W b0^Ub^Tb^Sb^Shb^Shb^Shb^Shb^Shb^Sh5`'<u`miB}_NG_L_;Q^T^XB^f|[)^0t]^m_]ga]cb]_�d(][e]}Ye]qW!fD]iUf]cTf]_Sf]]Sf]]Sf]]Sf]]Sf]]Sf]]S::_?@_ ǂF^gLY^3Q5]؛U] YX]A\]|_\tb\md3\zhe\Zc(gk\@_h\+[i\Y8j2\W<j\Uk \TkH[T kl[Skl[Skl[Skl[Skl[Skl[S?^ ZE]ǜK]Q%\V\YZT\;^%[…a}[|dd[Otf[!nhZh,jZcDl7Z_5mbZ[nMZYTnZWXoZUoZTpZ}T+p8Z{Sp8Z{Sp8Z{Sp8Z{Sp8Z{Sp8Z{SD4\kvJ[ǹP[{VS[[/Z_ZjXcRZ$�fY|iYtlYn n-Y]hJoY=cbqeY#_SrY\ szXYrt,XWvtXVuXUuAXTKufXSufXSufXSufXSufXSufXSIZٕPpZ/VxY[YX2`X!eXwhXfl=X(}o$WuqWn@sWhjuWcvWe_sx"WP\*y W@YyW4Wz?W+V$zW&U$zW!TmzWSzWSzWSzWSzWSzWSOXٶVlXQ\sWaWzSfWBkVϐnVAr8VJ}(uVu:wUnbyUh{Uc|U_~Ur\MUbYUVW:UMVHUGUHUCTUAT$UAT$UAT$UAT$UAT$UAT$V)V\VRbUhHUzwm$UfqvTϐuGTexTJ}L{Tu^~Sn"ShScɃZS_Sr\roSbYۆ SVWSMVnSGUo6SCTZSATLZSATLZSATLZSATLZSATLZSATL]�TcT/BiS(oSXsRxMR|RfvR(}s]QuQnQh؈Q~c1Qd_\QP\FQ?ZQ3X xQ+VQ%U Q!T1QTv1QTv1QTv1QTv1QTv1QTvdJRf(jQkpQwQviQ{EPPf hPO}Ou%O~n׎BOXiO8d{O` O \ēNZ-ANX3NV•NUÕWNU{NT{NT{NT{NT{NT{NTl OSrO|ȖxO |~*NNJXM7)MMu}ȑhM@uړMoLi-LdF;L`9fL\PLZ[LXaLxVLrULnU=;LlTҝ;LlTҝ;LlTҝ;LlTҝ;LlTҝ;LlTtFMhڀzLÀLydL@KKhedK!J}Jv !Jo1>JZi\J:dvvJ `hJ ]!IZ<IXIW IV#RIUnvIUvIUvIUvIUvIUvIU|JڰJ2IڏIZOH?KHHh?tH*~([Gv:GocGiGd.Gg`YGR]TDGBZG6XŭuG-WTG'VW G#U.G!U9.G!U9.G!U9.G!U9.G!U9.G!U9n+nSҮmf}ICC_PROFILE�#tmm(m -`l1lg51l$8dkx;(kqO=kj?khe3A0kJ`Bk2\CkYDykW;EkUhEjTEjS*FjRtF'jRCF'jRCF'jRCF'jRCF'jRCXnrtmҵmsm$m)l.lN2l6zk9kx<qkPqV>k%j@ke;Byj`Cj\DjYEjWCFcjUpFjTG"jS2G_jR|GojRKGojRKGojRKGojRKGojRKmmuҽ elu&:l+wl.0&kח4Nk7kF;*k x=jq_@LjjBLjeCCjl`ERjT\FgjAYG>j2WLGj(UyHRj T&HjS;HjRHjRTHjRTHjRTHjRTHjRTmsl"lt'l-,k1kN 6k9j<jy?jQqhBj&jDjeMEi`Gi\HiYHiWVIiUJiT1JTiSFJiRJiR^JiR^JiR^JiR^JiR^zllR$ k)kl/k 3j7jf;j#>iyAiqsCijEigeXGiI`Hi1\J iYJiWaKiUKhT<LDhSRLhRLhRjLhRjLhRjLhRjLhRjl)k&;k*,j1Mj[5j":#i=it@i9yCiqF"hjH"heeIh`K'h\L=hoYMh`WnMhUUN(hNTJNthIS`NhERNhDRwNhDRwNhDRwNhDRwNhDRwke":j(jf.i3i8oi@0<h@@hCshuy+F7hCqHhkJgesL?g`Mg\NgYOgW}P(gUPgTYPgSpQ$gRQ5gRQ5gRQ5gRQ5gRQ5gRj$j+hi1=i 6{h;)hf??QhBgցF-gy:HgiqKOg>kMOgeNf`PUf] QjfYRBfWRfUSUfTiSfSSfRSfRSfRSfRSfRSfR i'i .mh4Bh.9g̣>.guPBVg(Ef I2fyKKfxqNTfMk#PTf(eQf `SZe]ToeZUFeWUeUVZeT{VeSVeRVeRVeRVeRVeRVeR$0h+Lh 1g7g$,<f¤ AfkcEfIReۂ Ley^OIenqQeCk6SeeUQea�Vd]-WdZXdWY;dUYdTYdSZ6dRZGdRZGdRZGdRZGdRZGdR'gm,.f25ffn;:fA@xe!E&eHwINd/Ld4P*d}ysRdJqUMdkKWMceXcaZRc]C[gcZ0\?cW\cU]ScT]cS]cS]cR]cR]cR]cR]cR+f.B2eH9`e/�?5d²WDsda7I!d MIcEPcyKT%c?yVc qYGbkb[Gbe\ba,^Mb]Z_bbtZG`:beW`bZVaMbSTabNSabJSabIRabIRabIRabIRabIR0$dZ7@dO`=cCchoHcOMubQbc^UFbcXyay[=ar]akz_aceaEaEaEba-]scaZ`da We/a�V'e`Te`Sf*`S8f;`Sf;`Sf;`Sf;`Sf;`S4c]t;bzBfb^3H:aMxajR'a9VN`xY`~]*`ny_`;r bM`kdM_fe_a`gR_]hg_Z|i?_Xi_VCjR_Tj_}Tj_ySUj_xS#j_xS#j_xS#j_xS#j_xS#9aAaBӖGw`OML`\R_W8_[`_W_ _b<^ye�^r=g^^{ki^^Wf$k^9a~lc^!]my^ZnP]X2n]Vaod]Uo]T0o]Suo]SBo]SBo]SBo]SBo]SB?]`Fy_ӵL_mR^W^F\]`]d]_g]$yjv\r\l\kn\fCp~\aq\l]r\YZs\KXRtg\@Vt\8U3u&\3TQub\/Sus\.Scus\.Scus\.Scus\.Scus\.ScE:^?LV]R]@ŽX\Ӳ]\qb\f[͌j\[m[OzpS[r~rZktZfev[ZawZ]xZZyZvXuzDZkVzZcUV{Z^Tu{@ZZS{PZYS{PZYS{PZYS{PZYS{PZYSK\IR[Y[J±^Zݳd%Z{hZ$?lY׌pYsYYz=vY&rxXlzXf|Xa}X^X[�XXXtVʀXmU|LXhTXdS偙XcSXcSXcSXcSXcSR;Z0YVY_Y1eXij-jXboX esWw\W{$zW@zc}SW rVl=Vf[Vb V^9Vu['VfXDV[VVTUVOTĈ@VKTPVJSۈPVJSۈPVJSۈPVJSۈPVJSYdW>`WmEfVlVUrV%6vUΘzUF~U>LUz}TrTlfTfيTdb3TL^bT8[QT*XnTWTUϏ-TTiTT;zT TzT TzT TzT TzT TaUih U onT)tiT&ySĥa~USm}S q&R݃wYRzRos{RDl{R g%Rb_Q^Q[~mQYQWIQUQU QTjQT7QT7QT7QT7QT7iS p9RԜvR V|QQ=nP抖P?PUrPz6OsJOlOg3>O{bOc^OP[OAYH'O6WyO.V-O)UN"O%T3O$Ti3O$Ti3O$Ti3O$Ti3O$TiqP\xODO]ÅN݊VNN7,MϖM֚Ml{M:s{*Ml*LgdLb0L^EL[ߦLYzLW0LV`|L{ULwTϧLvTLvTLvTLvTLvTzMM�ZLø/LmKKaICKJЄ J{IJcsAJ8m%AJgIbFI_$[I\3IYIWFIVIUIUITհITհITհITհITՄeJ-J�4IIEH'HaH8Gф?G{}GcsG8m\GgϵFc+F_\F\LFYnFXFVϺ,FUiFU@yFUyFUyFUyFUyFUsLs(n1rqr2#qɬ(#qk-,q0pψ4&p~c74pWu9p'n<ohk=oc"?o^@o[#AoXCBoV C'oT^Co{S+CowRZD osQD osQD osQD osQD osQ ^s3rwr$q%$qF)p7.Ip2GpL 5p ~l8ov;{on=o{hu?oYc,A.o>^Bso'[,CsoXMD9oVDnThE0nS5EtnRdEnQEnQEnQEnQEnQ <rr߂q q/&p+pSA0'p�4&o7ov~w:o>v =Yon?nhA|nc6C n^DQn[7ERnXXFnsVFniTsGncSAGRn_RpGn[QGn[QGn[QGn[QGn[QYqqfߍp #pq;(p-oM2DoW 6Bo "9n~<nv?vnfnAn=hCncCE)m^Fnm[CGomXdH4mV*HmTI,mSNIomR}ImQImQImQImQImQq7Jpߚ/p(%ooH+oJ0!nZ4n8nP/<$n~?2mv%AmnDmhEm^cPGmB^Hm+[QImXrJm V8K%mTKlS]KlRLlQLlQLlQLlQLlQ]pdoߩ!oT)(nW-nw2ni7HmƓ&;Gm|>>m<~Amv4DzlnFlhHlc`J-ln^KrlX[aLslGXM8l9VHMl0TN0l)SnNsl%RNl!QNl!QNl!QNl!QNl!QLoyn߹$nj:+mg0m.5m/z:7lۓ6>5lOAlR~DlvEGiknIkhKkcpMk_ Nakn[rOak\XP'kOVZPkETQk>SQbk:RQk7QQk7QQk7QQk7QQk7Qnw !m'mgL.?ly3l@8l,=skٓHAqkaDkO~HkvWJjo LjhNjcPXj_Qjk[RjYXScjLVmSjBTTZj<STj8RTj4RTj4RTj4RTj4RTj4Rm[!$l+lL`1k׼7oknT<~kA�j\DjsuHj3~KivlN2io PrihRUicSie_2U*iO[V+i>XVi0VWi'TWi SX+iRXeiR)XeiR)XeiR)XeiR)XeiR) l%6(k/nku5j;Qj8j@`iڟDirHi=Lch~OqhvRho7TShmhV7hKcWh/_HY h[Z hXZgV[cgT[gS\ gR\GgR@\GgR@\GgR@\GgR@\GgR@%1jN,jD 3i͍9iO?h歂DhIh5Mg눣PgSgsvVOgCoOXgiZqfc\f_a]Ff[^GfX_ fV_fU `fS`GfS `fRZ`fRZ`fRZ`fRZ`fRZ)idg1Zh&8?hUͦ>gD"gwI1gMfƓQf|U4f<XBfvZeoi]$ei)_ec`en_{aeX[beFYce9Vd4e/U%de)Sde%S'ee!Rvee!Rvee!Rvee!Rvee!Rv.g6PgIA=5fCufSIeꭷN'eRe9VdZ*d;]8dxv_dHobd iFcccec_fc\gcY#hcVi*cUCicTicSGjcRjcRjcRjcRjcR4f,;e_BeHdNqd?Sc!Xc\�cD_cYbbve4bogsbtieiWbRdjb6_l+b \ m,bYCmbW naUcnaT6o,aSiogaRogaRogaRogaRogaR9d`AjcHNcQ�Nbܽ.T1bsY@bB]a“aayeCa8zhRawj`om4`io`d>p`k_q`T\Br`CYes`5W-tC`+Ut`%TYt` Su'`Ru'`Ru'`Ru'`Ru'`R@bsGaNyad"T`QZ\`_k`(dc_Ք!g_;kn_Kn|_w3q^os^^iuB^dav^~_x^g\fy^VYy^HWQzn^?Uz^8T{^3S{Q^0R{Q^0R{Q^0R{Q^0R{Q^0RF`dN-_U_UG[R^u`^w=f^j]ƔGn]|`r]<u]wXw\py\i{\d}j\n`#~\X\\FYu\9Wx\/Uсl\(T\$S݁\!S'\!S'\!S'\!S'\!S'M^1.U7]\]"nb\\g\Ddm[栱q[nu[Jy[ |Zw~Zp6ZyiZWduZ<`KZ%\ZYهZWYUvYTшYTYSSYSSYSSYSSYSSU$[X\[LcZΘiZVo~YtYyY<}X򉳀XXzwAXJpaX"j#dX�dۋW`w8W\9WZWWΏWV(WT9WT6sWSsWSsWSsWSsWS]Y]dXCkXMqWٽwwWp|WVƅVuV5BUw؎9UpxUjP\Ue Ug`0UQ]1U?Z4U2WU(VXU!U.1UTfkUSkUSkUSkUSkUSeVm#V+rtUzHU5#T̮Tn7|TzSъSr SYx S*pSjRe:`R`֝R]ARZekRX/RVbR}UaRyTRuSRuSRuSRuSRuSnSvS_|R&<RiUR�QjpQO(nQBPƀPx<P^pP5jPenTOa O]uOZ^OXdOVUOUOTЩOT"OT"OT"OT"OT"wPPkنsOZOuVO ReNN[]NxgMрۦvMxrMiq(XMAj;MeMaAL]LZѱLXgLVLUгLU JLT\JLT\JLT\JLT\JLT\MQMLqLϒLVTKbKץK<J󊰭eJtJzxJKqaUJ"k$9J�e޸Ia{ I]I[ IX׼eIW3IV IUFGITGITGITGITGITJJKI͡9I HūHFlGѴkGGhOG1x潞GqFkaFfQFaĕF^$ŖFr[J[FeYF[WrRFTVJǕFPUFLTFLTFLTFLTFLTyx! xuxC$w=0w"v'vX+v/u„e3u{ 5uPs 8|u#l:tf<eta=t\? tY?tV@tTA0tSJAtR4AtQyAt}QAt}QAt}QAt}QxS, %ww`O.vH:vs$v )u-ue1up5t{,8ts:tl<tYf#>ot:a?t \At YBsVBsTC:sSWCsRACsQCsQCsQCsQCsQw9 pw2v[yv,U!u&uW-+t01t4tg}7ct*{9:Oss#<sl'>sf0@sa,B2sh]C`sTYDNsDVEs8TEs/SeEs)ROFs%QFDs#QFDs#QFDs#QFDs#Q _vGve-Zuj u`c$t)t<.jt02s6s9s]{G<s(s1?arl5Arf?CKra;Dr]ErYFrwWGrkTHrbSuHor]R_HrYQHrVQ.HrVQ.HrVQ.HrVQ. 8uWu=3tz t|s&t ,bsL1CsM5r#9pr<rz{W?rEsBB:rlFD\qfPF$qaKGq]#HqYIqWJkqU Jq~SKHqyRqKquQKqrQ?KqrQ?KqrQ?KqrQ?]titNYsً$sDž*s'/r^4hrQ8r5<q?q~{iBqIsTE`qlXGpfbIIpa]Jp]6KpYLpW+MpUNpSNnp}RNpyQNpwQSNpwQSNpwQSNpwQSs|usra rٞ'}rlǘ-q;2qq7q='<5pI@ pCgpj{}FSp5sgHpllJofvLoaqN6o]JOeoYPRoW?QowU2QonSQoiRR!oeQRIobQgRIobQgRIobQgRIobQgr>rCv$qٴ+Gq=Ǯ1SpͷP6ph;p=?o^CoxG1o;{Jos}LnlNnfPnaR�ny]`S.ndZTnTWVTnHUIURn@SUn:RUn6QVn4Q~Vn4Q~Vn4Q~Vn4Q~qc!_p(pp/ho5tog:o?něTDntvGn.KRm{N=msPmlRmifTmIaV m/]xWOmZX<m WnXlUbYslSYlRZ lR Z3lQZ3lQZ3lQZ3lQ 9o%o-5o 3n9n?cmDDm^mHmLqlȅOl{RlUsU<l(lW]lfY%kaZk][kZ4\kW]lkU|]kS^IkR^kR(^kQ^kQ^kQ^kQ%nx*n2m8m >lD>l3IkٛMvkQLkD$Tk{WjsZjl\8j~f^�j^a_wjD]`j0ZPaj WbGjUbj Tc$jS cbjRFciQciQciQciQ*Rl/ll7Mk=kfD jI{jN\j7RiViAYic{\i/s_Tilauhfc=hadh]ehZofh}WghqUhhhT7hahbS)hh^Rfhh\Qhh\Qhh\Qhh\Q/k5j<j <Ci6Ii3OhΩT�htXWh$\-gޅa_g|bugltdg?m ggghfbjXf]kfZltfWm(fUmfTYnfSLnCfRnjfRnjfRnjfRnjfR6i/:;h Bh<]IgXOgOU,f1Z f^df@ b:eee|@het+ke[m0m%e5g;neb8ped^qdZrdX s5dUsdT~tdSrtPdRtwdR8twdR8twdR8twdR8<|g)^BfDIxf6ڂP'e|V3eI[dV`d dd;/hclc|enctPq~cUmVsc/gauhcb^vb^7xbZxbX0ybV%z2bTzbSzbRzbR`zbR`zbR`zbR`Che�I djPcd ڨWcȣ]c Fbb}grba4kbVoa˅ra|uaYtxxia,m}zag|S`b}`^`~`[`XY`VO`Tρv`Sā`S`R`R`R`RJbPibIWa^qaCd~`Ӹoi`nn`]s)_Đv_~z[_A|}G_ t^m^g^b*^^X^j[.E^ZX^NV{|^ET^?S^;S4;^9R;^9R;^9R;^9RR`@X@___MfI^lU^`q]v]{�]Q~] &3\|\tχ\lmՉ\Fgዊ\&bލ\ ^/[[\[X[VS[U*[T![Se[R[R[R[RZ]`]=g\,n\7't[Ǹz~[SZ܇)ZrVZ5}qZ�tYnYhYcTYs^閂Y^[oYNX#YBVۘY9U\�Y3TT>Y/SeY-SeY-SeY-SeY-ScZ9ieZ|pY]wmYvX}zYX3XG%W WWWt}ECW?u1Wn7VhDVcA&V_TV[AVYVWxVxUVrTVnSϢ7VkSW7VkSW7VkSW7VkSWmWlrWSzW ۑVɌV0AUh"U]yU BOTdžT}zTUufT(nm:ThyScwzS_SS[SYOISWGSUɫ%ST«dSTSSSSSSSSvT|TSȊSyÐSgRRIWSQz)QQv}qQAuQnPhPcTP_P\1oPY#PWPzV�PtT>PpTDePnSεePnSεePnSεePnS΁VQ݆Q@ĎQP�P: OʹOeڥ`O NNu0N8}NuڶWMnxMh@McMv_ɼMa\nMQYǾMDW M<VCcM6U<M2TM/TM/TM/TM/TFN?M�AML?L;L_߫pKPKϴKQ}K nJ~,JvHJko iJEi-1J%d,ƨJ ` I\IZwIX�IVTIU~ʒITʹITQʹITQʹITQʹITQJZiJ>AI܀qI8{~HȺHcXH )G4Gs[G6~nFGv[FobFipвFdo)Fs`LWF_\DFNZKFBXD{F9VF4UF/U :F-T:F-T:F-T:F-T`~#~~ p}l}r' ||"+|-T&{זs+ {.{H1{ w4zp!7zis9#zc:zo_<1zW[=JzDW>%z5Us>z*S?>z#R(?zQ8?zP?zPG?zPG?zPG}0} }0z|5|4c{ȱ${gb)[{-zċ17z4ozGw78zp/9yi;yc=My_>y[&?y}W@yoUAEydSAy\R9B yWQGBEySPBYyRPVBYyRPVBYyRPV|@||T{EL{W"'z'jzq,z30Ly3y72yjx9y8p?<_y i>cxc@x_!Aqx[6BxX CdxUDxSD~xRJDxzQXE xwPExuPgExuPgExuPg {Q {{_zV[zb%6y*yy/-y?3[x'7 x:Axux= xCpQ?nxiArwcC w_3Dw[HEwX FswUGwSGwR^GwQjHwPH,wPyH,wPyH,wPy zd KzzR?yi"yU(x-x2x26w::hw=whx&@iw6pdBw iDvd F~v_GGv[\HvX4IvUJvvSJv}RsK:vyQKvvuPKvsPKvsPKvsPyxyy+x}&kx.,Ew²1wa6=w :kvO>v{'AQvAx;DvpyF}uiHudJ0u_\Ku[rLuwXIMuhUN'u^SNuVRNuQQO'uMPO<uLPO<uLPO<uLPxxzw#wdӔ*sv0Mv5v:EuȖ>ru|fB!u9>EYtxSH"tpJtiLt~d6N8t`_tOtH[Pt5XaQt&UR/tTRtRRtQS/t PSDt PSDt PSDt P)w fw!8v([vӭ.u4u!59t>tjBtFsځWIsxlLsnpNsCiPsdORs_Sr[UrX{UrVVrTWrRWUrQWrQWrPWrPWrPu&u%u -t3t:9psP>sDCgrGrKCr_rN{r$xQEqpSqjUqdkWZq_Xqn[Yq[XZqLV[QqAT;[q:R\q5Q\Rq1Q1\fq0P\fq0P\fq0P# t #Ht:+st)2=r8rwW>r mCqHqT2LqPepāSpxVfpXpXp-j5Zp d\|o_]o[^oX_oV<`soTZ`oRa7oRasoQPaoQaoQaoQ(rQ(rLX0qH7q5>>pvDpQI\o1NoQR>oNUo Y$nx[nq^PnsjU`UnOdbn2_cbn[d{nXeVmV]emT|fomSfmR*fmQrgmQ5gmQ5gmQ5.pv.pqz6oi=o[$D.n—JnwOLnST�msX-ms[m0_lxalq$d@ljxfDludglW` iRl?\!jkl,XkFlVklTl_l SBllROllQllQYllQYllQY4nzB5nu<mDm^HJl»PelzUlwZ\k×^kwb8k4epjy h9jqIjjjljxdnOj[`/ojC\Fpj/Yqj!VrFjTrjSis j RxsFjQsZjQsZjQsZjQ;lZh;lVCkJk?nQVjW1j[\tia)ieViWCiil<hy1ohqpqih}jsmhYeuh;`Wvzh#\nwhYGxnhVygTygSygRzgQz'gQz'gQz'gQBjC(jJiRhԗXh ^qh!cghig`lgmpDfтEs|fy[vFfdqxf9jzfeA|[e`}e\~eYreVReUeSeRҁReRfeQցfeQցfeQJgJgRgYf`Mf5f(eLkkeNp dtMdwdhq{3d.y}cqƀ`ckdcenc`qcw\ƆcdYecUW( cJUI~cBSc=S c:RDc8Rc8Rc8RReSeDZd4bdhccnZczsb xRbhA|bǀ-aقeay/alqaAkIaeDa�`ލ`\`YЏ`WX;`Uz`T `S5;`RwO`R8O`R8O`R8[bh[bducaejaM!q/`Ôw `i|M`Q_r/_e_"ь^y^r'B^k{F^geД^IaS^1])l^ZG^W^U`]TU]Sj]R]Rn]Rn]Rnd_Md_l^s^nTza]Ȁ<]߅])4\Әa\-\CG\ z[r\s[kx[f&[jaF[R]^[?Z9x[0Wâ[%U墑[T[S[R1[R1[R1[Rnm\n\|v|[}[fՋZZ7Z YʘݘY~dY;=Y�zTXr*Xk/Xf>Xba<XI]UX6Zs0X(WXVHXTǬXSެX S'X RX RX RxYMxYIXX2ĎXW83WNPvV+VXVJVx>UzUrϮjUpl$oULfyU.a|U]ӴUZoTX9TV]TUTTTSg'TS('TS('TS(yUUTUXDT%T[uSﴌCS3S8T$RܰR Rnz̶R<s 6Rlb;QfQaHQ^aQZ;QXyQVTQUFQ}T_QySQxSlQxSlQxSlRb8R^Q鄞QG@Pĵ[Pc̯PsSOO_.OfN{ 0NsNĒNlƗNafDNCb<ɤN+^UʽN[2˗N X;MV̰MUMT:MSOMSOMSOMSN{N֢NƩMփpMJLLHBKoK`Kb:UK({RJsЁJl҆Jg?3JbՓJq^֬J^[x׆JOY*JDW(؟J<UJ7T)J3T8=J2S=J2S=J2SIJJYJ) {IɼI,>HVH_H GԣGyG?{ڤG sFm1 FgFbF^1Fu[ FfYMF[Ws$FSVrFNU7FJTFITHFITHFITH����)q ƒ|!C&=$*A -ʁg}@0ށ/t3�m;5ˀf7ar9H\:YA;rVL<^eS<[RA=\UP=PP-=݀LOy=߀LOt=߀LOtTT" *$U)65-;0Ày}Q3׀Bt6mL8f:a<A]=YR>V^?XwT?nRT@UgQ@cP?@_O@_O@_O))j<6'"'O,~H0/4 s}d7<t9 m_< ~f=~a?~]@~YfA~~VrB~qT%C4~gRhC~aQ#C~]PSD~YOD ~XOD ~XO 5= 5= uhQ ;&y+/0~ܒ\4~D7~S}x:~t=a}mt?}gA}aC"}]+Dk}pY{Ep}_VF9}QT:F}HR~G6}AQ9G{}=PiG}9OG}9OG}9O $oS $oS;/x-g$~Q*i~R/~}4}r8 }YZ;}}>|tAP|mC|g*E}|haG|L]BHZ|5YI_|$VJ(|TRJ| RK%|QQKj|PK{OK{OK{Omkmk<~~WG"})}ci.|3|8O|K<T|s?{}B{uE{[mG{3gBI{aKZz][LzYMzVNqzTlOzROnzQlOzPOzOOzOOzO}}}q {|a'g|d͙-{3W{ 8l{*<zؒ@zDzO}Gzu.J>ymLyg]NkyaP�y]vQIykYRNyZVSyMTSyCRTy<QTYy8PTy4PTy4OTy4O||{ %{R},mz͵2z^8^y(=sy;AyFF�xIx}LxuKOExVmQx.g{Srx bUw]VOwYWTwVXwTXwRYwQY_wPYwP!YwPYwP zb zb#Sz.+*y1y!8x=xEGBwYGgwKkwLNw }RvukTvmVv|gXv[b4Zqv?][v)Z\vW]v T^v�S ^uQ^uP_uPB_uP=_uP=&Wx&Wx)&x\K0w7wO=vۼCvshHvzM:uÓQ>uzTu:~WuuZtn\tg^tbV`Dtm]atWZ'btEW5c[t8Tct.S1dXt(Qdt$Qdt Pfdt Padt Pa,v,v/evin6u=u\D1tItNt#SxsГ%W}s[sG~C^su`rnAcrgdrbzfrz]grdZLhrRWZirEUj.r;SWjr4Rjr0QBkr,Pkr,Pkr,P3Dt)3Dt)6tS=sDsF=Jrҽ(PrjUr Z&qK^+qq4aq1~idpugppngiphkpbm1pd^"nzpNZso~p<WpGp/U7pp%SqDpRDqpQkqpPqpPqpP:erNQ:erNQ=3rDq-Kq fQpPWp0\oӟaGoteKo7]hn~knunnnpnhh0rnFbtRn*^KunZvnWwhmUbwmSxemRpxmQxmPxmPxmPAo{Ao{DoLdo.XSPnΐYn:{_@mѮdVmthm"ll؈pkl~slav+v'l1nxll h\zTkb{k^x}1kZ~6kW~kUkSkRAkQǀ}k~Qk~Q k~Q J mjJ mjLm6Tul[al)ξakgRkM2lgjEpjtjTx|j~{ivY~9in}iheibc&iG^Ci0ZGiX iUiT iRшRhQhQ@hQ;hQ;RjRjUkjC]ici}j6i ohbthCvy~g}g ghg0vg�o fhfcXf^ٍf[,frX<feU3f[T>fUSfPR1fMQufLQpfLQp[g [g ^{gvfg(m�f!sFf4 xe̮~eoe1dӉdQ/d\vd,oPdhccc_c[acXqcV*CcTucS>c|Rk-cxQ.cxQ.cxQe@d@e@d@hdod+vcW|c7Bbή̇bqߌ"bh'aՉRaa^vla.oai)`cş-`_Fu`[z`XC`Vc`T?`Sx`~R`zQ`zQ`zQo]ayo]ayr,ayaV`ϐ`{__I?^C^^m ^6w0^o¥]ic]cI]_][֫]xX_]jV]aT\]ZS]UR]RR(]RR#]RR#z^pz^p|^< m]⒋Y].̑\I\RA^[U[ޤ[YȨt[Zwm0Zp�tZi]Zhd>ZL_:Z6\?Z$Y%ZV߷Z U,ZSIZS(YRmYRgYRg<Z<Z Z_Z.іY Y:XүXtX"#WيW?WawgW2p@W i⽔Vd(V`qV\VvVYh>VW"VUo;VT;ÀVSmüV}RþV}RþV}RWA5WA5W nVZUMU9JU#ij_TơطTsaT*KuSꀂ‰Sw1SpuS[j'^S9dS`G;S\?RYRWhΜRURTIRSυRRχRRχRRfSd{fSd{4S0RYR"Г�QQF PHPLPMP Ox9ёOpO~jnսO\e RO@`ؚO)\ٟOYgO WO�V�dNT۩NTNSKNSFNSF_OWê_OWí.O#/N㢻NMǣM9S̸LܢgALEL@L�KxފKqKpjKOeWKK3`K]1K ZD`JWJVN]JUJTPJSJSJSKKJ{]JWII(ϏIc9HNHHK?H*cGaxGx G[qedG2kLGeFa*)F].FZFXPFVFUn7FTtFSuFSuFS������*s eӊ&艶ybRr!J%)c&,&y/ڇqw2bjW4d@6V_7ӇeZ9PWd9@T:4R;5,P;&O;ч"O ;N;N�t�t�t= yV ,j:$̇)*-K:0jy3\q5䆬jk8 dU9نf_2;UMZ<8Wy=y(T>1R>P?O?S O!?~N?~NʉAʉAʉA vVَ-BAk#(-^0ކP4Ay73q9yj;Sdk=4_I?,[@_WAPTBRBQBOC*O8CUNCUN   D٦\Z"p'노,Ӆ_1115Ƀh8qy;bXq=+j@dA߃_aC[[DWETF7RFQ+GPGYORGNGN % % %І%1 !s&,vM1^K599^<!z?qBvjDdFky_|G`[:IKWJ ;UJÂ/RKI&QIK!P+KOmLNLNqAqAqA%ԃǏ+(1bç6Jjg:U>ՃA聘z.DځcqGb6jIdKV_LӀ[WNWNU OS�P5QiPPIPрOPO PO U_U_U_ $lj+#ǭ176;?rtCԀ,G7zMJ)rLjNgdPH_R".[vSUX�TF U@T~S U~QU~PjV ~OVK~O*VK~O*www"*'*#0ـ&6.<hR(AP~E~I~cL~'znO}r0Rh}kT}dV\}_W}e[Y }PX"Y}@UbZ}4SC[;},Q[}&P[}"O\}OM\}OM###(@J0D~<6~<=}̷QB}hKGq}K|O|yS|<zV|rTX{k5Z{e\~{_]{{[_,{fXG`{VU`{JSha\{AQa{<Pa{8Ob#{5Orb#{5Or*(}*(}*(}/s}4o6|a=|/C{vI{\pN{R_zV<zm&Yz0z\yrz_yk[a@yeEc y`$dyn[eyZXmfyJUgey=Sgy5QhHy/Phy+Phy(Ohy(O1*{f1*{f1*{f6u{=z{ډDy=JyPy+UxқYax]>x=N`x�zcwrfwkhBwxenjwX`Mkw>\ lw)XmwUngw SnwR(oJvQovPGovOovO8y8y8y=xENx&ڳLwgRw;Wv֨\{v}A`v./duyhu{ k uvrmuIkou#equ`yst\8t6tXu'tVutSvetRVvtQ6wtPuw,tOw,tO@vD@vD@vDEv4M?uSu0ȔZ t_t\dltnhs]lsmp s0{8rrrurkwreyxr`zrn\g|'rZX}rIV4}r=T~Vr5R~r/Qi~r+Pr(P#r(P#I�ssI�ssI�ssNKsUs \crbwr%gqlq`q8qup˄xwp{h{ipYs,}p,lpeo`كbo\oY%oVg=oTKoR oQ_oPڇoPVoPVQq*Qq*Qq*W9pN^p@AeQokeoTWpnRunz&nG~n em{Wms_mblAm<f-ma Pm\͍lYYtlV+lTlRlQِMlQxlPxlP[an7[an7[an7`mh mMvnl+tlazRk:kkTtk@j{ҍjsQjolxxjIfdEj*aDj]iYiV՘iT#iS,iRiQLiPǙiPeZkeZkeZkjjrj/ۯxic~iDńKh3h@h7/ngzg| gsКKgRlqg,f?g af]@fY͡fWfTfShyfRRfQfQfQogMogMogMu)go|fAfkɟUeee=|dkdUdk|HGd6t d lcfۨca?c]~rcZ ccWOcxU5coScjR<cfQϮhccQFhccQFzd\zd\zd\<c5cq(Tbݔhb@b";aȜ)aya3h`|Z`tL`m/ `ng`NaR`4]`ZLv`W-`Ux_S_RعO_Rz_Qz_Q`͆`͆`͋`WvB_j_S ^㹂^}o^%]֐]8 ]S|ʸ]t\ms\g_{\bA\^)\}Z\lW\`UX\WT2Ĵ\QS\MR_\KQ\KQՒ\\\\|[ܯ3[dG[DZZ¶ZTCZ3Y~GY}9YMtY mXg˵Xb1X^KcXZUXX XVВXT|XSj-X|RYXyR%YXyR%XYXYXYXOWWʬWV |VSV|˷UU}[ ULu ԔUnֺTg؈TbT^6T[%'TXkTVTeTTTS�T{R+TxRv+TxRvTTT]TSOSCuSNƊRߺ[RzXR!JQґ&Q܉QO}{QunPnR*Ph?Pc"sP^Px[uPhXNP[VPSU0PMT oPHSMPFRPFRɻ"PQ"PQ"PQlOOfݒ΄NHԙN{NM(YMn5M(eL}LuLn8LchLCctL)_8L[LY]KVKUp?KT_~KSKS KS KKKKXրJ6JT˛KIII&| HגmHJHT~M<HvGnGhGcG_G}\GmYfGaWPGXUGRTGNSGKSzGKSz��q��q��q��q]s  x$ 9b VJ%* (E,v..[nE1X1gs3c a5\6{X7U8xR9 P9Oz9鎞N:&M:?M��������t- "1;Ʊ Pez% b)AÉ,\06FvF3n\5og7ya9-\:X;}U<nS=6cQ =\O>�WN><SM>UQM5W5W5W5W E&Twb*$Ž)~{-`1iv4v`7xnv9ጆg;ba=E\?-X@!UA� S!A�Q(B!OBrNBNBNjM    HAeMo$HE).O+2߉6:9ybv|<I0n>g@aBp]CՊYDUEъS>FyQFFxOGCrNGnN$GmM;6;6;6;6댨$_#)|Ac.NJ37?;n>vA}nCegEAbG$]3I Y.J&UKS^KQfL&OLwNLNDL̈MՋvՋvՋvՋv "d~)3/4b!9˕=S'A<DGvGnIh�Kb2M>d]TNLYOO9VP*SQGQQPRORNNfRf Nىىىى!/(Ӡ/8 5:f@?"CWJG [JL!vMnOh#QbUSC]wTkYsUŅWV3VISWL>QWŅ7P6X1O@XR.NXk,N@ K@ K@ K@ K@'S/'5 ;@؅=3EIɄoMXPwSoUhHXb{Y][gY\7TVZ]ES];Q^73P^^.Og^ă*N^݃)Ng'/kf'/kf'/kf'/kf.z6 Z<BpvGZLxCPtTb1?Ww*ZqoA\ځhp^vb`Y]aAYc-VcSdR�e PekOeNeN. . . . 68=cC恗DIɁ+OˣSЀumX)[j^wTazold3Ohf=+bg]iU~Yjs~VkQ~T!k~R-lr~Pl~Om�~Om~N6X6X6X6X> "E4AK&pQ~GV~Z[~_}c}uf};wi} ol|hn|bo|^q&|ZrD|qVs"|cTPs|XR]tC|PPt|KOt|GO8t|FN>~>~>~>~FV}M}pT|ŸY|#v_2{£c{mh#{ kzހozwqzqotPzGhv[z#c-xz^PysyZMzyW{oyT|yR|yQ!|yP"}yOk}6yO Gr{YGr{YGr{YGr{YO#z0VNzGԢ\ybydgylxpxaOtxwwwzwo}wi,'wcc`wF^?w.Z]wWD<w TwRƅ\vQXvPYvOvOVPxpPPxpPPxpPPxpPXswd_w^f vlv{qOvGv uŗ1z@uy}u6/4txtp2mticwt{c+t]^tEZt2W{t#T3tRtQt P:tOڏStOZu^Zu^Zu^Zu^bJtittLos>usi{%srirfr#g qxSqpkDqiNqhcєqK^fq3ZqWbqU* qS:pQИpPљpP)pOdr dr dr dr lqsqIzXpx;p+PoʤBouwo(-n恣lnx<nypnOi؜n+d bn _1m[0mWmUikmSymR4mQqmPWmP on�on�on�on�wn(~mՇGm-ö*lula1l fk7k|[kBxΣ*kp祓jjjdMQj_rj[pjxX5jiUYj^SjVRS#jQQ[`jMPyjLPO{ik@{ik@{ik@{ik@jUDj Ȑii*ϛhɥ:hs%h'yg$gygxq)gMj[g)dg _6f[SfXy2fUfTRfRfQfPfP~gY~gY~gY~gY/f陖YfG e<ed edjdadjcyVcqo(cj2cddcF_Jc.[hcXGc V8c�TJfbRŷbQbQ/ bP+cd̔+cd̔+cd̔+cd̛bbRTaĄlao\aǹs``l^`)ĝ_yl_q_j_ne!͓_P`G_8\G_%Y _Vћ_ T_S1d^R=ҡ^QҺ^Q1t_>t_>t_>t_>%^+O^,֟]ϼ]I�\Ƽ\\FSΧ\[yԵ[r[lk8([Heo[*`@[\^ZY\<ZVZT\ZS߭ZRZQZQ\Zd\Zd\Zd\Zd ZXz7YĺY]ʝXXbԤX;MW܎WOWqz<W?rVWkVeV`(V\EVY$VW&VU;DVSVRV|R-V{QV\V\V\V\ŗUUJ@DTp'TgIsT/ScSdS!XRz(RrRkRffNRHa<R0]=RZRW}VRUQT/QS>[QRtQR;QQQQQ!PזuPXOOH _NNMINcN)zMsMl6MfnMaMr]M^Z_MPWMDUM=TM7SM3RM2RLLLLL{KOK! 2J}JUfIRIIpUI6{CIs^HlHfHaH]HkZH\X7HQVMHITHDSH@SFH?R��$��$��$��$��~lߣ ̒_y90[ b$͘a(]{g+yr.(sk0sJd2b)_3 Zg5JV6TS7!Q+7OP8&N 8mM58L��������.ј߽Ƙs̭�Tɗ <u%x)|-a{00)r2ߕk-5*d7_!8Z:}V; lS;ؕ_QH<qUOn<ݕNN&=$JMR=`FL0000G \ߖ/iq %*R. 2-{5Ir7bkK:C:d<2_?=̓Z?V@$S@QhAOANEB<MqByL e]O e]O e]O e]O Ɩ1h^% &`J+~흲04R7{:ǒs=vkk?dAa_`CJEZD/VESFoQGOGt�NgGMGLMpMpMpMpYP EM &ڻ,Hr1f5Ñ%:z=:{@s$C^kEe G_I2mZJWWKETLW8QL.OM\'NM#MMߐM_>&S/,߻2x 7<KȑI@X�C@|GsIIkKe.M쎏_Os[ PՎ]WAQގKT6R>QSD4PS-NS)MT3%M)gmgmgmgmǐA@&je-_6U3»9b[G>CpGb'J#|-MɌspPxkRÌeVTr_VLV[2W?WiX.T_Yq QZ P,ZuNZ NZMR#,#,#,#,&�i-t4~:茁&@pEHJJjNV!PQ|WUsWzlYRe[0_][]^ӉW_݉T`R*aCPZaO aN3b2M~+R+R+R+R-5U <JͪBQHLMjUtQV}Yz|\Cs_bl0aec`'e6[fWgTh[xRXhnPi`hO8icNbi_M3~7<3~7<3~7<3~7<5߉ =Dv�JƇPy%UȞZ(u^4-aą|dsgl`iڅ]ekɅ<`Wmc [n WoTpRq PqOkqӄNrM<*m<*m<*m<*m>WF-M"K Sr׼Y$p^BbӃ'fwjp8|m�t(p;lrftt`vk[w\TX&xfCUy35Rŷ+Pz8%Oz~ NzNEXEXEXEXGy'O[LVPm=\bS0gp5 l[psZ}v"t]yil{fE}`=\$vX]eUUaWRMQ,eGOۃBO?NLO O O O Qmq^Y`etfT~l~hq%~-@u}ےy}J}R}R}Rn}t|m�h|f~V|`|\^?|oXH|]U|OS2|FQh|?Pa|;O=|7NYJ}jYJ}jYJ}jYJ}j[}>cM|jB|2ήp{WvE{W{bz{z΄�z_z}ytэ[ym<yfyna5.yR\|y<Xԕy*U͖SySpyQWy P[yO}yNdz Ndz Ndz Ndz NfvynySu x{]x`w߆.wwI w�Ò[v}̕vvu&vZm{qv1f_vauu\ڟGuYQuV uSuQ"uPiuOuOoqvoqvoqvoqvqvTytu:iuH,tԽՌktm tsM'sts5~ ruQrmrg;raUrg]rQYWr?VQyr2Sr(R0}r!PrPrON{ar{ar{ar{ar}rXdr ~Yqpq[pdypV> poIoz~RoCuqonngnaEn]dnYnVinwT>nmRymngQ3nbPRn_OnnnnInn#ŘmϷ0m1`lɮlllٲkш.k~IkYu߻k*nKCkg2jbGj]jY#jVjTʼnjRj}Q;jyPxjuO jb jb jb jbkjj iSih#hRѻg$gPgw~lg?v+gnffhUfbf]=fZ7FfW2ftTҫfjSfcQ]f^PӚf[P7fvfvfvfv-fJ7e]e>Od˾dcEducs͂ck,c+6-bv{bn'bhgbzbܰb^^LbGZb6Wb(U,lbSibR'bQN[bP0a0a0a0aa3a@(`Сw`MK*_寗H_r_6^u^^vv?^Fo<^hy]c:]^a]Zj]W7]U]S:]R]Q]P9]FV9]FV9]FV9]FV™];\0\׀[3[3PZ֡ZZ;~YYw%HYoYliYJcY.^iY[7sYX4?XUXTCXRXRXQLX`X`X`X`JX45W\W(O2VVMFU!UvUT0/U:TwToTioTccTG_WT1[TXTV;TTzT�S;SRfSQHSCHSCHSCHSCSKR@R ѬQVBQ0PԢPP8OOwOpMOiiOGdNO+_O[OXNVNTNSNRNRVMVMVMVMMXM8L LEKݱKK.5JJJnxBJ>pJj2IdI`I\ZIYYIWIUDITIS3IR}��_��_��_��_��_��& :ڢ0Y]̦NsY #$ށ)(w*lo#-?g/a1\`3X4KTo5BQ5OX6M6蟠L7(K��L~��L~��L~��L~��L~D ^lY=w3!Ęx&u0)/H-iw0boC2g5ja6K\8v1X!9T: Q;a�Oz;M<KL<K}}}}}$e) !z"NU'}+䝦R/ɝ`i34#w6-od8h:뜛a<{\>AaXC?yMT@p=QA,1OA(MB"LBVL 3a0!0Ɲ#?(Ü]x-2u6�o9k2w<eo>hBA"b B\DxpXhE\TFLQGc@OG7NHM1LH-L4ǜxǜxǜxǜxǜx U#ɛ)皭/jI4[8™<[@xC oEhiGɘb0Iw\K]XLXHTMN8R'N ,ON#N=NMO5L\@@@@@$#`~*1u 6;v ?ݗiC—#G-x@J'oLhN^bZP?]R:%XSrU*Ti�RRU%PUNiVMHVOLs:s:s:s:s:$I+٩2u86>C[7Go KTƂ NxlQUpTH'hVvbXJ]EY̓X[UW[R\PJ]AN]Mw]ᓄL'`h'`h'`h'`h'`h,U/3:D@dF#1 KؙfO{S`C7V˒xYőp4\Th^~b`V^]uaؑDYc/UdRdÑP{eN NeMe�L0 0 0 0 0 4\_<LC YuI*궔NQS-XލP[돘h_V[xbO&pedߎi g bhᎳ]jcYJkUltRmNhPm؎`On8ZMnxUM99999=평EX:LǨR7W\Ya dłhcy�k\Spm&iUp�cq]soYtUuSvZPv䋌O:wDNwMRBBBBBGNpU[ɋaLf=\j nǂqy8tVpw})iycU{^}Y~9V*0SU숗Q#vOyֈNOMLĉ;Lĉ;Lĉ;Lĉ;Lĉ;QdYڪ_ƈae8kgpX5;tx|cyr .q iȃƅc^QYTVgJ|SpQagOaN1]MWliwWliwWliwWliwWliw\A>cjm Upuv:2{�yg3LLLyqK@jmcϐAg^ÂMZ48V(SҔQ8O NЕ؂ N bbbbbgy~n&uȕ{ÁFs7`}ICC_PROFILE�s˃yZqw,jHdy~^Ҝ~Zw3~V)~T~Qo~PD~O~NQno:no:no:no:no:sD~z~Yip}؇}o} |j|bO|ѝ{z6{qУB{~jp{WdVD{8_{Z{ W0zT]zR.:zPzO`zNz{\?z{\?z{\?z{\?z{\?{z{ۯz�y?ty-exԛDxx?xz}wrwjձwydwZ_a(w@[`w+WzVwTwRzwPڸvO<vNwNwNwNwNwNvPvmuhupuGatśtvIt0cszɺsrdsk!skdsK_%s1[S]sWSs Ts�RřrQ+rP9rO8hsՕhsՕhsՕhsՕhs՚>r𝡩r.FjqɵqC״ pߩpܽcp7Hoijo{Ǭor<oRkqio,e;=o _Ͽn[nXnUHҩnS3nQӒnP[nOn&n&n&n&n&znDmܗm@lѹ(Glm8l.˟kŏτkkB{jk sxjkڥjeyj`Rj[3jkXn)j[UjNSrojEQj?Pj;O貆iziziziziz[iBiŇh[˥h,}(gȪ<gnڀg?efمZf{fhs\Yf:lfeZe`e\QeX eUeSPeR1eQePIeeeeedPd<Ccʳ.cRbb bEaZa|Sasa`lua:fAaaea�\`Y#`VTO`T*`R`Qq`PD` -D` -D` -D` -D` -_ޅ_-ݟF^c^B2]ު]9?]6$\\|x\~t\Ql\+f\ ae[] [Y[V[T[R[Q[Q)Z)Z)Z)Z)ZZnUiY*YkpHXXRX>WWWsWl|W8twW m7VgVaV]qVYVWVxTVoSZViR=VeQzUIUIUIUIUITThcSS~SRRqR+Q}@QtQmQfgjQFb0Q,]QZQQWPU\PSPRPQOOOOOO9 NN6<Mǻ_McM hL%LtBL7}LuHKnKgKbKu^EK`ZKPWKCUK:T4K4SK0RW��������������   ګC\ ުz$^|'$s*k--d/>^0Y2cnU3ZR54kLO5AMm59K54J������������! %%'["&*w}-<s0֨ k3Gd5Y^7Y8~U9rRZ:dO;2YM;QL<LKCCCCCC y"J<3)$~P)D-1Al};42s7`�k9ѥd;⥱^=Y?{U@+hRAYOANMB8GLBBAKF ?l ?l ?l ?l ?l ?luIer 8[&%+z"0@̓4~8=>}d;t!>\l@ͣeBޣ_'DeZ(FMVG'9RH +PH MI4LmIKq??????褕t!Ҝ'-/2ϡH7y;-?}BtLEϡ~l:H?SeCJQ/_SL ZUMvV3NRO}P0P*NPLPKllllll!Ԣ) /!5H:٠Uv?&CݟEGq}J7tzMlhP,eqR=_SZUbVcVlS Wj^P`XSNKXKLXFK######*>{1t8=xCCH _VLGuP}SMtV%dlX9eZ_\aZ]̛V^S<_ԛP`N`LaQL+6+6+6+6+6+63*:_.,@웷FٛLL/PU2JX~!\8t_lapecL_eM.ZfVgۙSrhPikNiM6j<L95cC5cC5cC5cC5cC5cC<9Cϙ<bJ\KPIYUZd^Wba~Xeuhmj~fmZ` n<[#p'$WqKSr.QrەNsXMqsLr?\&<?\&<?\&<?\&<?\&<?\&<FqMǖӚTUZB<_ܢId]h:lY~ouPrxm?tafIv=`Zx[^zW>{CS|'QA|ӒO6}PM}LIxIxIxIxIxIxQZXL^ْ_dƑWjn>6sVvݐ~z%uu|Cm|mf`9[W}ǏT&QWOxԏM(LThThThThThTh\*c_ai�oَ~u/yȕv~2|:8uΈmfȌ~`ڎLa[ޏHWڌ5Ti&QŒjON6;M4``````gЌA.oXuFB{۱ـՊ{%ىىٍS߉\v*n'g 9a\#]XTdR xPpNkM|l׈>l׈>l׈>l׈>l׈>l׈>t ot{CԝЇtˆ N؆S ԅvYWnId-gTv ag0\lXNTRVMPQʄNͫMyyyyyyl轈控rєh邦PIjvˁUn;*gLa\qXUFxR$PO"N"Ӈ"Ӈ"Ӈ"Ӈ"Ӈ"ӎY9 3>~Ӳ^~r#~a}ъ }2g}Tv?}!n⼯|g|b{|]|X |U|zRĘ|oP|gOwi|bNl8|W#8|W#8|W#8|W#8|W#8|W#n{Y{OՃ1znzmsz 59ywyk 5y(|xwDTxo4xhAxmbUΐxO][x6Y?x#UxSKҭx QI*xO~wNģwvwvwvwvwvwv(w:魲]v׸v?¾uԳZ-usu=0tҋ^t6tUwt"o~shېsbJs]޴sYsVDs{SgspQshP,7scO#SrγSrγSrγSrγSrγSrκrlq/Lqr9qӎpTpPܒpPo2owooUoo*hocn^ nY9nVnTnRDnPnO`m)`m)`m)`m)`m)`m)ʖmi_l֊YlovFlk>`kMk]jjxP|jRpBj'iPjcei^l"iZREiW�iTbiRciPiOhhhhhhRh0gg5fʴnVfifRZenjteeJxepdidcd^dZdWddpTdeRd]QUdXPVcCcCcCcCcCcCb!b;La9aX```V`U_y_q _|j_Xd._:_6_"[_W^U0^S3^Q^P]Q]Q]Q]Q]Q]Q]\״\Š[9[NjZZAZjZ/yYqsYjYdY_Yx[YdX9YUUYJSYBR/Y=Q2WWWWWWW1VV7 U˵UkUTɍT+TLyTqSjSeS`S[SXSrVSfTS^RSYQQQQQQQQbP؎P|OOLGNNNhN-z_MrSMkcMe{M`Mv\mMbYMSVMHTM@SM:R���������������R4ޙR@ Bjک}J.+c!D$ݴx(ko*;h- ae/[0V1S3O3ڳMg4wK4泏J0��������������00hHpXq$ (" +}x.߲Fo1h83a5[7W&8ֱS>9屈P:{M;UqK;ıjJZ�s�s�s�s�s�sճQFˑޱӹ!k&ı+_/ut734y63p#8hc;=a=3\>կfWR@)PSkA8>PCB 1MB'KC J ̱H ̱H ̱H ̱H ̱H ̱H>PVP˾#c")i,.3,M7Bd:ۭy;>�pQ@]hC 5aE�\>FWGSIPsIجMJuLJ䬱JmwmwmwmwmwmwKmE%@+ͺQ1e[6ڬ ;u?mC%.ykFIpIhKTbMJ|\oN`WP?JSQO8PR!+N'R!L:S.J૮૮૮૮૮૮'dw.d 4:|?J+D@EHVKoyO8pQ˨hTbLV\WWY T�ZyPZlN_[bLp[[K'.'.'.'.'.'.)0߬7U> &C˦Hb`MzQǃU=yXaPp[ i,]lb_b\aXbWT8cgQd9NdפzLeFsKX1111112٥ :A̍GMRɣPWd[z6_uzb7=q$difgBbi8]j١XYl-Ttm<QPnrNnhLoaK;evP;evP;evP;evP;evP;evP=;UGDKJRD׻,Xo7]*aşe۟wsit7zJl�qaoPiqbs]Ru:jXvSTwAQxp4Oy *M)y}#KFU FU FU FU FU FU H*Oٞ_^Vٝ]4lkbvhlV.p˜ tdzwqz@fi|=c:~])X؁~T􂍚Qу_OZMnlLQכtQכtQכtQכtQכtQכtSS[[b[GHhԻnsmsVx7q|Mu5z̓ q—j&c~ ]׋gY�QU9?R1O(M!L_]]]]]]_ŗ gs�ns͍tΖ zIOe;n{"7r+ڔjm,cŖ"^ēYeU'xR`jO뛖aNZLj]j]j]j]j]j]lvSt$ ,{%ց;=GeS���w{]Ԑ@rujޏdԏ^juYɏUΦ؏RtP9GjNYcLwwwwwwyďmsxsb"΍'+1NؑMdѥP{"r«ًk,d_"^ËZ�kV&YRLPBN;MLދ5ދ5ދ5ދ5ދ5ދ5aȖbqעz.㧢ў=S6"{sȇkWfdD_ (ZSVq�SRPÄOMqJqJqJqJqJqJFA|ūO * 74Dպσ僩wi|Pţ1siZkʭẹ_bDZϘVЧsSyeQ9[O_҆TN�.:q.GF୎n- 1I~Ѹ~|~psה~Al~e`}_~}[}W$}T}QP}O}Nc||||||d||C{vm{Q+zSzz:y+y}yztxyJlcy"ex`bx[bxWxTfxQ4xPxN"wY"wY"wY"wY"wY"wYwPͥw*ԥv�v$;uHu`u tć/t}eTtMt tl^sfTs`|s[IsWsTsR[svPsoO2fr`fr`fr`fr`fr`fr`;r>qq37DppX*oKoio`o }ntnm)nfnn`nR\,n;XMn)U2nRnPn O_l!_l!_l!_l!_l!_l!4llkО>k#jj_j iÈWi~2iLuMimhfhaLh\hXhUhS1huQ^hnP ffffffff?]e eMpd~dd6<cc~cuucFn�cg\bab]bY(bVbSbQbP````````._x_<^ԯ^w^%]܉4]]dv+]5nr] g\b-\]y\Y\V\T\RF\PZlZlZlZlZlZlZmcY?YbXRXaX+Wؕ!WWOWvVnVhEVbV]VjZVXVVJTV@RV9QuTTTTTTSSXRbReQQ{QOQ"PPwP^obP6hPc O^mOZOWzOUOSAOQ����������������±*ać@cܣ3k!~O%ft(h|k+�Od-6(^/X0T;1ھP2־M3K24'I��������������EL1 ׍IijڳBv !>%Ή)~|-Kt0l2d4^M6X8HTi9tP:cM;FXKa;ԼOI) () () () () () () ( o_8׻ܼM$k;)gE-ٻA1Ⱥ~5<t8?lK:׺\d= 6^}>Y@rTAPBMCoKCIһYYһYYһYYһYYһYYһYYһYY !\'-5m2ܖw6:qG~= u@l~CeE^GbYLIHTJY3QKV#N*LKLJ{{{{{{{E-#!*+H0$6AX;=?.CjG-uGJlLeENⴤ^PYRGjUSUQWTENbUD9LUӴ0JQ!t!t!t!t!t!t!t%&-pX4<#:f\?PDIbfMQbLPƲ%uSȱlV`e~X_ Zq}Y[cU?]9NQ^6=N^2LB_)J+A+A+A+A+A+A+A0677kؓ>zźDJ3O/ėSuW/[u^m(`ebԮj_]dJYf90U}gwQht Ni6LiĭJ6=6=6=6=6=6=6=;tBz IGOp"TSYe^^lb[eЫuhҫ^mhkj1em _o|Z:qUrDRs@OtLtKBU}BU}BU}BU}BU}BU}BU}FaMتT9ZΩ`]2eYؗiʨ"mCq.v<t0mvȧf=x~_zڧ^Z~|cDV}/RV~Oe_M KUNNNNNNNRTLY̦V`I}fڵ[lPvqLu΋gyN}!Kv#mf`'ͣZňVVIsRcORWM`NKZ Z Z Z Z Z Z ^aAfXٞm$^sMﵣxܡ"}ء2-I㋰8`vˌ+n:Hf͑}`qY[⟝V RxOޟlMmcKgßTgßTgßTgßTgßTgßTgßTlEszLBvӵom�yrNja՜Dw؜npg`[^ VIlS:E\PLPNGL?uEuEuEuEuEuEuEyКځHp8_>=͙"ɘɘɝ:yL)33winت8glnnaIN[җ4W6S PΗNW]L<w,ڊtDz84H vw Ao,gْaf\=W|SxP9NȒxLJJJJJJJNƑ:॒PJfFsDxoǵ_h8aƎ\_OWΎT?ϊQSKOڍMUō&ō&ō&ō&ō&ō&ō& ܮP:ObxfAtZRc:׉xqډoruhv٧Nbۃ.\ XEJTFQOoM///////u:0ۘŹFq m,ޅ:̈́AxDpA܄Th.b}]vXUﰃRqON"JbJbJbJbJbJbJbȐ`ԁ^"⌀爀1]_y4_*p~i;,~b~]~Y ~Ug~wR~jP=~aN}}}}}}}`||I_{Ɉ{_h\zWzzR|z dyyyq ymiyGcKy']y YwxUxRxPxNwU1wU1wU1wU1wU1wU1wU1wj_v+vuu2St؛attCtzsqzsjs}cs]^\sCYs.VBsS[sQsOnqQqQqQqQqQqQqQqp|7paoAo.nԛnVn?@nzxmqmjmyd*mY^m?ZYm*VmSm QmOkkkkkkkjJj=ݩiiRh5hDhFh�gzgr`gajg:dg_Eg�ZfW-fTHfR fP`ddddddddHc cFJbֺ+brbaʐCa-aG{far`kr`e`_`[L`nW`^T`QR`HP]�]�]�]�]�]�]�]9]ޙ\\2[Ϋ'[u6[&ZZ{ZnsVZ@kZeY`>Y[YX)YUFYS YQb]�]�]�]�]�]�]�]9]ޙ\\2[Ϋ'[u6[&ZZ{ZnsVZ@kZeY`>Y[YX)YUFYS YQb������������������uS1 z' ̯Y' s"y&AːpE)#^h+3`-Z/xU0Qj2M2ʷK3ʬH��C��C��C��C��C��C��C��C` Gb<о5eXG#gɯ'c+x y.pw1ȴh84%ȉa6>e[8�GU9r.Q:N#; KR<9I*ppppppppktЕG8"@ǒ'2{,rܐ0Ɛ4Mz7p:hn=0ŶaN?IŒ[8A tVB}[QCHN[D9KED.IbZsIZsIZsIZsIZsIZsIZsIZsI?x%o+ĕ1256ߑ(:LÓ>PzVAfpDGhF¹aH•[qJwVPL^RM9KNN#<KN1IuKuKuKuKuKuKuKuK"()ZP/پ5m;N @cDgkKH0(zKqNchPܿaRm[TOVV*6RLWT#NX>L X I&&&&&&&&,e4C:@;E*JǽcORټzV*q^Y hi [=b][_aV`ӻRaObLMcJ1Xv1Xv1Xv1Xv1Xv1Xv1Xv1Xv7?=zфEֻ'Kк{Q17kVZJ^S{adqdFibfbDhٸ\/jyWl aRm6MOYn >LnӸ4Jd='C='C='C='C='C='C='C='CCɸMK QSkW访\aж2&fiⵣ{Vm3iqp6ir bt\vviWVw۴SyOyﴎLzJIIIIIIIIP0n/Wr^ tdifn7RnrWvH{yr-|{Wi~,bӁ\аWBSblOUM+JVVVVVVVV]3^zduYkdqRvhB{9BK{ꆜyrz}Gj=c ] ҬWDSnP<XM| KNd2!d2!d2!d2!d2!d2!d2!d2!jլrҧx"K~ V۪$r|9>7rɗjcp]^tX@TlP]MСRKrw srw srw srw srw srw srw srw sy[$BO[h@E1|scjݤecũA]#X* TXTP>N(LaxȁaxȁaxȁaxȁaxȁaxȁaxȁaxȈnF|Nߢ١H999 䓱SU|mstNk9ȟd៙^ {XbT>OQA(@Nڟ4Lc!!!!!!!!"Ǟڝӧt(LmΜ\ 盺w}?=s k\dyu^g7YK˩UӚqQͼbNnWLơ8}8}8}8}8}8}8}8}ڙ$�Hj)Ud}C)t/$k֝dط^yYqUq]RNOGޯCM,*<޲*<޲*<޲*<޲*<޲*<޲*<޲*<޸̓@fƨ ̡^aSґ\~�5tlX琐e=驐]_,k?Zܐ&URhOMЎCЎCЎCЎCЎCЎCЎCЎCsAѵNFqG۱㨌{y%2ى~gۋ\t)l6eO_Z{VARPuN........ЈS4꬇1#ֆi~uemm+Bf`ZVSBPNuGGGGGGGG邎+ աŁH(ȣr &BuvmKf'`s [Z~W"~S~Q�~N|||||||||\1{{búzzz@~ykyywvIyDnyfx`x[xWxT-xQyxOevfvfvfvfvfvfvfvfuuj։t0tt({sҖssC-svrnrgqracrh\KrPXr<Tr-Qr!OoyoyoyoyoyoyoyoyoB!nnGīmܴm|m&qlڋ_ll\w>l*okgkak\kXkU*kRxkuPgnnnnnnnnm?m !ll= kݥkk;}jjw\jo%j`h j<b�j\jXiUJiRiPnnnnnnnnm?m !ll= kݥkk;}jjw\jo%j`h j<b�j\jXiUJiRiPnnnnnnnnm?m !ll= kݥkk;}jjw\jo%j`h j<b�j\jXiUJiRiP��۳j��۳j��۳j��۳j��۳j��۳j��۳j��۳j��۝��ܩzڒ %ٸ[L, x$K؀u'yHl*;d,]D.Wt0>ױR1כN2׉K>3|H���������������� ثUנ=Q-[ŧ$!Oi%c*-ՍuS0Vl:3&dN6]|8W9ԿR; ԨN< ԖKy<ԉH գ գ գ գ գ գ գ գ Վԃw I&Ө^+GL/4Ұ7pu:8lu= d?]AѾWCѢSEыO�FyKFlImZmZmZmZmZmZmZmZEZѹT$g:ɳ*Ƿ0_5:tϰ>g'B9'uEglH(οdJΗ]LuX'N,XSGOBO@P0KQw#IPUUUUUUUU (pCݔ/}5P;@̌:E:IhMO˱v P}ylS>Ie U ^7WXiYBSZO[ʺL=\ʬI+.@+.@+.@+.@+.@+.@+.@+.@,^*L4ʞ;(6AɬUGYDL~Q4ȕ^UULX vP\'m8^ǥeNaD|^|cCZXd>SfG'Og]Lh6I7ol7ol7ol7ol7ol7ol7ol7ol8V@\GiK|MظSqeX]uaxe:8vhimk*emè^oÆXq-jTrSPsALtx4J-DLh'DLh'DLh'DLh'DLh'DLh'DLh'DLh'E}RM9gTFGZԸ`xlejRnst>r4vuFmxezb_|aYE~ eTgeOPc{=M!T/JQȿ2tQȿ2tQȿ2tQȿ2tQȿ2tQȿ2tQȿ2tQȿ2tR)Z޴a¾h)3m7s'ښ]wμ?{>w3nf2޻n_bܻLY0TPMtϺJ________ayhҺ(o߹evG6|ΩOEq ցߍw߷^nn.f_YꙣU Q MʜK4n-n-n-n-n-n-n-n-oٶwY~ ˺ يԴ2՛г:5twܟnŢcfܤi` GZBf+Uf²QdײN%K~\q~\q~\q~\q~\q~\q~\q~\qFG&߲<wɺ2Aau^@<i)x6o Эg7+`h*vZӭZUù.CQD1N#K/U͎/U͎/U͎/U͎/U͎/U͎/U͎/U͏_@)5oºZZZ5Vb"x(o~騺gE`CoZSV#G<R#]*N5LW--------)vnϵZ#Wm$OãxoҲ|gSa+ 1[bصVR&OMLuuuuuuuuFb�o4נT̠ ԟI{eڜ@ny]n6pG/ha拝a艝[2V덝R죝vO{hM,©©©©©©©©ڛ˖Q;ң͜ ^ԙ诙GИty󢘇pcWh.a \7fW^S`חP&M՘ e՘ e՘ e՘ e՘ e՘ e՘ e՘ eȕބg咔 u*Ô Y^=z6q"mi<Dbo"\WSPNDDDDDDDDu0C>zPh9�zxqIi b]XETHQNJJJJJJJJnc+Aؑ&wO{ r j(c]]XiTWQIOxCiQX:{yrIj c~^~Y>~UC~R~O��������~C~uϦ~}}>||0|b{|*r{j{d{^S{Y~{|U{iRN{[O��������~C~uϦ~}}>||0|b{|*r{j{d{^S{Y~{|U{iRN{[O��������~C~uϦ~}}>||0|b{|*r{j{d{^S{Y~{|U{iRN{[O��������~C~uϦ~}}>||0|b{|*r{j{d{^S{Y~{|U{iRN{[O��������~C~uϦ~}}>||0|b{|*r{j{d{^S{Y~{|U{iRN{[O����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������j? -g ,@KN H!<")##$%&s'A( ()*M++,r-"-.{/$/0n112K23445>56`67{8899:$:;+;<-<=)=>">??@@|@AfABMBC/CDD}DEWEF/FGGlGH=HI IpIJ9JK�KcKL&LLMGMNNcNOO{OP2PPQBQQRNRRSVSTTZTUUZUVVWVVWPWWXFXXY9YYZ(ZwZ[[c[[\L\\]2]~]^^a^^_A__``h``aBaabbabbc6c}cd dOddeedeef1fuffg@ggh hKhhiiSiijjXjjkkZkkllYllmmUmmnnNnnooEooop9pvppq*qfqqrrUrrss@s{sst*tdttuuKuuuv/vhvvwwJwwwx*xaxxyy?yvyyzzQzzz{*{_{{||6|k||} }@}u}}~~G~{~~KMLI{߃Cu؄ ;lυ�1bÅ$UFvՈ4dÈ!P݊ ;iƊ#Q܌ 7eIvэ+Xߏ 8eBoǐJv͑%P{Ғ(S~ԓ*TԔ)S}ҕ&Pz͖!JtǗCl9bݚ.WК!Iq9a؝�(OwƝ<c؟�'MtŸ6]РCjܢ)Ou 2W}ȣ9^Τ>cҥAeԦBfӧ@eѨ=aͩ8\Ǫ2Uy)Lo٬Beͭ5Xz'Jlӯ:\~°(Jlѱ6Xz޳�!Cdȳ ,MnѴ4Uvٵ;\|޶@`"Ccø$Ddù#Ccº!@`޻<[{ؼ6Utѽ.Mlɾ%Dbݿ9Wv,Ji�<Zx–´,JhÆã8Vtđį%C`~śŸ.KhƆƣ5Ronjǩ�:WtȑȮ!>Zwɔɰ#?\xʕʱ#@\x˕˱">Zv̯̓;Ws͏ͫ6RnΊΥ0LgσϞϺ (D_{Жб:Uqьѧ/JeҁҜҷ#>YtӏӪ0KeԀԛԶ ;VpՋե*E_y֮֔2Lgׁכ׶8Sm؇ءػ #=Wqً٥ٿ '@Ztڎڨ(B\uۏ۩(B[u܎ܨ'@Zsݍݦݿ $>Wpމޢ޻ 9Rk߄ߝ߶3Le~,E]v #<Tm�1Jb{ &>Wo1Iaz #;Sk+C[s2Iay7Nf~ #:Ri%=Tk&=Ul%=Tk #;Ri 7Ne|2I`v,CYp$;Qh~2H_u(>Tj2H_u%;Qg}-CYo3I_u #8Ndy&<Qg})>Si~)?Ti)>Si~'<Qf|����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������j? -g ,@KN H!<")##$%&s'A( ()*M++,r-"-.{/$/0n112K23445>56`67{8899:$:;+;<-<=)=>">??@@|@AfABMBC/CDD}DEWEF/FGGlGH=HI IpIJ9JK�KcKL&LLMGMNNcNOO{OP2PPQBQQRNRRSVSTTZTUUZUVVWVVWPWWXFXXY9YYZ(ZwZ[[c[[\L\\]2]~]^^a^^_A__``h``aBaabbabbc6c}cd dOddeedeef1fuffg@ggh hKhhiiSiijjXjjkkZkkllYllmmUmmnnNnnooEooop9pvppq*qfqqrrUrrss@s{sst*tdttuuKuuuv/vhvvwwJwwwx*xaxxyy?yvyyzzQzzz{*{_{{||6|k||} }@}u}}~~G~{~~KML6hICC_PROFILE�I{߃Cu؄ ;lυ�1bÅ$UFvՈ4dÈ!P݊ ;iƊ#Q܌ 7eIvэ+Xߏ 8eBoǐJv͑%P{Ғ(S~ԓ*TԔ)S}ҕ&Pz͖!JtǗCl9bݚ.WК!Iq9a؝�(OwƝ<c؟�'MtŸ6]РCjܢ)Ou 2W}ȣ9^Τ>cҥAeԦBfӧ@eѨ=aͩ8\Ǫ2Uy)Lo٬Beͭ5Xz'Jlӯ:\~°(Jlѱ6Xz޳�!Cdȳ ,MnѴ4Uvٵ;\|޶@`"Ccø$Ddù#Ccº!@`޻<[{ؼ6Utѽ.Mlɾ%Dbݿ9Wv,Ji�<Zx–´,JhÆã8Vtđį%C`~śŸ.KhƆƣ5Ronjǩ�:WtȑȮ!>Zwɔɰ#?\xʕʱ#@\x˕˱">Zv̯̓;Ws͏ͫ6RnΊΥ0LgσϞϺ (D_{Жб:Uqьѧ/JeҁҜҷ#>YtӏӪ0KeԀԛԶ ;VpՋե*E_y֮֔2Lgׁכ׶8Sm؇ءػ #=Wqً٥ٿ '@Ztڎڨ(B\uۏ۩(B[u܎ܨ'@Zsݍݦݿ $>Wpމޢ޻ 9Rk߄ߝ߶3Le~,E]v #<Tm�1Jb{ &>Wo1Iaz #;Sk+C[s2Iay7Nf~ #:Ri%=Tk&=Ul%=Tk #;Ri 7Ne|2I`v,CYp$;Qh~2H_u(>Tj2H_u%;Qg}-CYo3I_u #8Ndy&<Qg})>Si~)?Ti)>Si~'<Qf|����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������j? -g ,@KN H!<")##$%&s'A( ()*M++,r-"-.{/$/0n112K23445>56`67{8899:$:;+;<-<=)=>">??@@|@AfABMBC/CDD}DEWEF/FGGlGH=HI IpIJ9JK�KcKL&LLMGMNNcNOO{OP2PPQBQQRNRRSVSTTZTUUZUVVWVVWPWWXFXXY9YYZ(ZwZ[[c[[\L\\]2]~]^^a^^_A__``h``aBaabbabbc6c}cd dOddeedeef1fuffg@ggh hKhhiiSiijjXjjkkZkkllYllmmUmmnnNnnooEooop9pvppq*qfqqrrUrrss@s{sst*tdttuuKuuuv/vhvvwwJwwwx*xaxxyy?yvyyzzQzzz{*{_{{||6|k||} }@}u}}~~G~{~~KMLI{߃Cu؄ ;lυ�1bÅ$UFvՈ4dÈ!P݊ ;iƊ#Q܌ 7eIvэ+Xߏ 8eBoǐJv͑%P{Ғ(S~ԓ*TԔ)S}ҕ&Pz͖!JtǗCl9bݚ.WК!Iq9a؝�(OwƝ<c؟�'MtŸ6]РCjܢ)Ou 2W}ȣ9^Τ>cҥAeԦBfӧ@eѨ=aͩ8\Ǫ2Uy)Lo٬Beͭ5Xz'Jlӯ:\~°(Jlѱ6Xz޳�!Cdȳ ,MnѴ4Uvٵ;\|޶@`"Ccø$Ddù#Ccº!@`޻<[{ؼ6Utѽ.Mlɾ%Dbݿ9Wv,Ji�<Zx–´,JhÆã8Vtđį%C`~śŸ.KhƆƣ5Ronjǩ�:WtȑȮ!>Zwɔɰ#?\xʕʱ#@\x˕˱">Zv̯̓;Ws͏ͫ6RnΊΥ0LgσϞϺ (D_{Жб:Uqьѧ/JeҁҜҷ#>YtӏӪ0KeԀԛԶ ;VpՋե*E_y֮֔2Lgׁכ׶8Sm؇ءػ #=Wqً٥ٿ '@Ztڎڨ(B\uۏ۩(B[u܎ܨ'@Zsݍݦݿ $>Wpމޢ޻ 9Rk߄ߝ߶3Le~,E]v #<Tm�1Jb{ &>Wo1Iaz #;Sk+C[s2Iay7Nf~ #:Ri%=Tk&=Ul%=Tk #;Ri 7Ne|2I`v,CYp$;Qh~2H_u(>Tj2H_u%;Qg}-CYo3I_u #8Ndy&<Qg})>Si~)?Ti)>Si~'<Qf|��text����Copyright (c) 1996 Eastman Kodak Company, All Rights Reserved.��desc������� Internal RGB�����������������ز@����� � ��������������������� ����� ������������desc�������sRGB���������s�R�G�B����sRGB����������������������������������������������������������������XYZ ������b����XYZ �����_ �d���lXYZ ��������,��Wdesc������� Internal RGB KODAK sRGB Display�����������������ز@�����h �@ ���������������������@ �����@ �������������XYZ ������o��8��desc�������ND65 monitor with typical user interface, gray surround, and roughly 1.8 gamma��������O�D�6�5� �m�o�n�i�t�o�r� �w�i�t�h� �t�y�p�i�c�a�l� �u�s�e�r� �i�n�t�e�r�f�a�c�e�,� �g�r�a�y� �s�u�r�r�o�u�n�d�,� �a�n�d� �r�o�u�g�h�l�y� �1�.�8� �g�a�m�m�a����CD65 monitor with typical user interface, gray surround, and roughl���view������T������0��33��7����C��C��`�`��������������  �@��� �����!U 7w"#1AQa$2q&W��������������E������!1AQRa"246Tuvq#B3SVb� ��?�yP7+Iz&;b^۬誥^rq)+"p;r, H$"=/.vi)Lܻ,ʶ]a*RXqryԧ � Ԫ/ѨTwvBT*J-*517WITi%&Ĥ-KY,]]dk/{CPGb7n%|/nD;+c/r_:vCPGa77 }dk/;A!Ө{R0| IJwܗN=QwMľoieleY;KNHtԨL;_7oò2,w%Sh$:ujT~q/spa[xN)Ӵ:*?S鸗𿹸[0쯌K'\Ӵ:*?S鸗𿹸[1߳`�y<U <n~yQR0T6WzS;L9w \㳨#y)L6?+@|Wp/~i7и4"in^o7t Av318]T15d41J+y@z,22 pSqt(&! Q9D *,kFcri1 \iɌ#38hSẲTi�*M TJeW+/4h͹pd"J&dU#D)NEb) SûR)$@AX7p"#սIz˾:x!TX=|_I%a!!!!!!!"J5QmQz&EUMdZ-q%$) ХZ G,o˸^ii[n6i7 JyОWxM®+L]12J؎IOɜRΑycZ.ۏ7G(JMˈ[+YKGUʃ߾ Iٱm6din~Zaf#a4][6UPEe&ٳ4 96Ve%;.˴J/Oǿd)eW+RbIjj+iiiiiiiii;�&+7Z1!SmDXQvwo>DrDBv>m gI$uqȺ!9m##bzj_Q1@wH8C,fnIP A[є.ӥ fc~ɾR~VĒW+]xSFfݰ7P5a3l[JYs%fa&VMC(:밓iٵ%Î<tc8! JIP&׸Շ'eK!:*컭*F�xwmW`m-iDFĞTrt:6fC=9rd[UAc ~e8q;I!H|dR3-a EJ�FjPS͵!!)IhF&;yaVUj8 [Df[,QW*NP$<f.&^52)Ё�UD`:]צg[Re@G*m$+*nl@6jj]a/&QCcOWYؐtJ)|>mys m+J+PAK- Pv~Rd*tkŬQf%KKkd4us@-jH<+ 6cz;+BIm6JSxq~뒉iie;{˫j/2av2�Y�j6Kv]iҁ S30,7jLI-ME{ ! !庫 wTlX]0rTEG9Q@D8;5nj(qCt&qFB\J32/G*/W6=pBXV(Deha� }^!/rO-L]7Pͪ/b n*IJNvmH'�V>x~|z5v+YTQZ֐PG*˴E@;yY~_a2RPZ`.\u\lJ; И4*,!j9I(q5TT  ૱"b6L ٣$HD!(|��użuEn8-jQRnI'{t%%)H H�6ZFwS7v"V- _ee,Ǫj6Kv]iҁ S30,7jLI-ME{ ! !D@ "#wp�x}~_~[#:猢Y?e1y+دf."#WbV@YDRAd ;T1 x.O~4 Pnr�bR 1W&10v6+N**4 ZZ菏tq{r϶ͻա2HŶjF2C.*딫$ n $!~bRQb*T%h\Qy$B7bM#4I-0gR9nEp Eþ1 �`@@>@C죝! ! !u>}gb/yumBAFZ,.^RzVCjތgeyv( HU>#3ɾR~VĒW�*~Xm �J !rgp@v]z'Ir'#1!TqL)ˮ PY$89rfToe�FVW scO )rظDmFxgOFcݦOfR8+,S tyupSc =js.#C]9)$R%UٔT"lZ<�� *j/CCmKlͩSǾ|ێ�)\mYvxɚUJ F``rnSX$A`ܜu偫)r4BݕlsVUW}D5$k|iRJ�^)J}@y+wNu#qv!9 ޡ""}$@„DTJ"N�ɘ⡸hTTJgen\wR56TDr�9զ1$еĤ44F2յ _zBhyK12Z z2}tx!T;&K/ Z+|O,;|pK 8Gk+V>Yk>AFYFn @ ^sP_ 3sXُYͶ& ~"_;;g#N R+81CSmFڋ4us؝y3cqxD]"er(rD" ] lUI kq>߾�+mE-]/{8pXޱzHgcVw?2G2nL8J3\1@˜9{2M,IN?,e%`I3򲯭*}P P >%V38~ Pҩ̽Y8u!܄?ȕD r8"C2~iy@8̢�^`6=[mPJE=cY1`۶rS;e2`}6Az^~-Hہ@Hb:&0� c:g\u )RAquAmHTE{Sm%y"ɉ1G pQUTJ~8 (W=Bf�P"% TjSB'&ߙ@9YPJ5\1Ĭ +eQ(X߫b/"@۷i*.ɺA5d&)kB. ,V>TeNQ:BDGȫ\mkc?>.m8,Xk|bZ@_ýr. sEjb&@9 pt \~xӜߟpi(ƻ73f3vu 0tM{<vREIFQ+ HR#-c/)f=W_+U~fR\w~?�}�V_*d)eW+Rb Kd4\P-j`L/{JJ4-I#\hX;֩5XMCD%i ВNkǯ�b)9ԡ r6�9V͠n!r۽=?.qi[e=]K5MIJ h%*^A*e g2Kַac)5$ܛli&նxy&uV/mX=xX"X*Q Вϛ2 2EEQCRRߐ^lusmm7=[l"3歔n.d62K+_Dx1 jWkno<UEt*u5bm >7=@N ۟j-IIm|NwbŘjJ2(I,sҤsHjR/&SMC%@bA:uA؁ܓ_lpyj)aYE`܅'a*lb62ɮ7OQV%,XKu wAo Xo}AZ7c-F[cy;S%h#Aґl) Ue�7m<f:Bfon�@Guۅ}x8F*Weۊ<m>5Z y Q)4S@KxPJ:x(cd،""`o^h딂5$]PH6}. YaQzB%<xIMYTNiET_5GE@u8 �I7ܝ-b5ֵͶSscﺪg\s="~R2QSL$k:D6{7pv'^19 ,�-9l;ElG8'E {xn}F^ HR#-/c/)f=W_+U!t'АӔx(:6� tJ?8ȥghp% O(RZ:p*y ̥R51>"qxb̽[amH5烊7Hv`'iDY@?.t448~_HCwq94?HC뻝!u>}gb/yumBAFZ,.^RzVE]u~i$ŝo$1fC*R$ܠ'ph*P#A=Dzҩ9R\Be琀J � O%IvJ1Du '0J̽SÏjRQBq,k6f[W&6IQzEYIT&)5*i B(!I7}q[O46n$hZI JlAX<!!!!!!!"6єoE.Z�#EYUʵ}ͯ9D*C)b S�xS6Qkkx)!I$n;=òQn`,ǸҫƝҦ8VaO]@u"V`uǢH$,B&UL[jh̕7QHQbMH[eG-5qKyBYcRCHNVSe۩�MKLwDۀ^7X^Z߱^ZI'rGѦt@�'5nsrdUP]U&\v5dTܶnΠdS0NHosb!U;*cI6Jmnvk5:2];T/ Ȳ_o:e~6e| 兾8rw,𶝿T=Qhw2ľqvje;K[Nt*˨4;b_8aoCN2ܝ%-o:e~̱/a|ݡڧxnNEӷ ?MX0X[S7'{" iCus,K_,-hv^dB캏C%/;T/ Ȳ_'LXw2ľq¿wVmoft< Mnt5gY*1U[\EFj## )+"i6kW&l `)Y10jh%R1)3*j`&aG\%-ɘv�IRn+;;E.517WNyS&Ĥ̖Pa(������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/common/home.png�����������������������������������0000644�0001750�0001750�00000021300�11757531137�024135� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR���d���d������ pHYs�� �� ��� MiCCPPhotoshop ICC profile��xڝSwX>eVBl�"#Y�a@Ņ VHUĂ H(gAZU\8ܧ}zy&j�9R<:OHɽH g��yx~t?o��p.$P&W� �" R�.T��Sd ���ly|B"� �I>�ة�آ��(G$@�`UR,�@".Y2G�vX@`�B,� 8�C L0ҿ_pH�˕͗K3w!lBa)f "#HL ��8?flŢko">!�N_puk[�V�h]3 Z zy8@P< %b0>3o~@z�q@qanvRB1n#Dž)4\,XP"MyRD!ɕ2 w �ONl~Xv�@~- �g42y��@+�͗��\L��D*A aD@ $<B AT:18 \p` Aa!:b""aH4 Q"rBj]H#-r9\@ 2G1Qu@Ơst4]k=Kut�}c1fa\E`X&cX5V5cX7va$^lGXLXC%#W 1'"O%zxb:XF&!!%^'_H$ɒN !%2I IkHH-S>iL&m O:ňL $RJ5e?2BQͩ:ZImvP/S4u%͛Cˤ-Кigih/t ݃EЗkw Hb(k{/LӗT02goUX**|:V~TUsU?y TU^V}FUP թU6RwRPQ__c FHTc!2eXBrV,kMb[Lvv/{LSCsfffqƱ9ٜJ! {--?-jf~7zھbrup@,:m:u 6Qu>cy Gm7046l18c̐ckihhI'&g5x>fob4ek<abi2ۤĤ)͔kfѴt,ܬج9՜kaټEJ6ǖږ|MV>VyVV׬I\,mWlPW :˶vm))Sn1 9a%m;t;|rtuvlp4éĩWggs5KvSmnz˕ҵܭm=}M.]=AXq㝧/^v^Y^O&0m[{`:>=e>>z"=#~~~;yN`k5/ >B Yroc3g,Z0&L~oL̶Gli})*2.QStqt,֬Yg񏩌;jrvgjlRlc웸xEt$ =sl3Ttcܢ˞w<Y5Y|8? BP/OnM򄛅OEQJ<V8;}ChOFu3 OR+y#MVDެq-9R i+0(Of++ ym#slLѣRPL/+x[[xHHZ3f# |PظxY"E#Sw.1]Rdxi}h˲PXRUjyRҥC+W4nZcadUj[V*_pFWN_|ymJHnYJjAІ _mJtzjʹ5a5[̶6z]V&ֿw{;켵+xWkE}nݏb~ݸGwOŞ{{Ejtolܯ mR6H:p囀oڛwpZ*A'ߦ|{PߙHy+:u-m=茣^G~1cu5W(=䂓dN?=ԙyLk]Q]gCϞ?tL_]p"b%K==G~pH[oeW<tM;js.]yn&%vw L]zxem``Y ӇGG#F# dΓ᧲~VysKX Ͽyr﫩:#y=}ǽ(@PcǧO>|/%ҟ3���gAMA��|Q��� cHRM��z%��������u0��`��:��o_F��IDATx]i$Wuιgmb'6ae b@(HMY$"q"!l(P(Q@"`ϼ̼-]u[uxz]U}uwܶąmM/@p `]�X�.u^\̰18W@ �$͛rOC}"�h!^N=}vh/�䇊P᱃b fK*xH$IHX}HDhH00X KZ(|C@TR4=* Tq /e/<巎Nݥ{3OzD,ۑtLDi#(*nt�*p*8IHUBsաw =ks>)ɐA E hV`4E-~'Hw '[[;Jn @n:t4"θ&$z;qC1.c?G>?�Ɇ@(XZZ, FtEH%*:P7tZEzJ sK~a9^K*Ap5}t:'b@!Kh1A˴_xH"%M7f-U йS2aT"[4`놨l n[/y;`v]1֡f@CM&$ B,?^@GES'nP%'NE8DUeeSB|AZ*xRyqo\ߡ&|F=e4Ff $5!1Z)(Y<7>͘yhn8paM2QA #HD* TzdX-_:{]o<_ =,Ʌ` @-%QI RTluIU  fp=0Q{x@T'p`TGV"XCV6燞[X5::fu 2dV b@4H V'C!xlZhFAHBDӁsU%.b;H+$ykbhLU=^T"NQ D*с:t_?nϖп`Xh:< c" b&F Y@Jq9H `ͶmfqAJ< e"1(9MS@(;QPN( 't@9 +ʀAX <uiu| փ_u¢X@ʌV<0Z~d EEJJB=EZHQ ,+HkED!BxJQ)R8Q `TX�`7¶YM@xbƨ ?$E�% zg5us�d8>Z5?YR QEk@1 (@0(�:`%T@!x[{PFѤ8$eNtXΨfdͦ##EI8/FDg�#]bES$U%-  h"F8XT ĠM4dCxhY |F:<Țj3j(1GK#B''řg1AulmvGq%\/B Ix-ofb^H:xڢF7eOg 5f5R$[ rLi%B@;vIڜ) lqE`EI u!q0j~94[GjZ "k_!>Id6VߋXN%#J(`&Ŗ&0"\,`4Zfa;RQ[Fa,lyhfoi`ڗ@'mM<+)b`�H*45f5fi5B.ρ "c>NCD?<4ؙun L$R(}H ZAF6%>w5d6bERV~_PLg#F@uW.>m܌pLפA X&(i�Ш{B "le&\?|bQN@�fsmﱗV}bşY *W1#FTNr',|<MPw17Q0E%cx(Ѓk~/w ni֛+̪iQpwƊm_ \{;gk(]aP(.T{ͻ`4i{\UK#7>_>z #^͙3}ī;</F:rQ7y7gt.*ݍC%6-I 8I0DZ;4~[ZWfGsIҸt}ymx 's"ʦSCaQZ[PYkLs"<Ҝ]U}/˯,.wn}/��9|XS|,gTVC!)>Y~ @|DbzdByP^$rΩ8'upNDYa}9;jUTg"WZTguM|ݝ�?vN�dcW4mdQn-j BBc F$iTeYr4-6y4#V[u1h<~үB>T@{K;3O+S}u6YQ YR^සoV%~f(Eۦ1`I|lJX1b u#B Rou)8~20{wN}q_8CάӢ]RY&{nR& 8!:\MH e1I)Y. 溈l ("RlaDV3s?>W(p۱=NraxiP[ȡS*mK&z)$mH_Nc :]g%d[ n( p_]kXgOΡ$n;v߽'�eje=˄*ϒPh̏<uh>$}CiXT:JǠ0laa aRżL1<W4;mn'ו/z_//dMl6a2ZDɎb&w pzǔu?ض;>|p9'-#G[}u%oah$CݘgV0`U⬘ �&zXFvdlت5&l}|υnz�Cjm,5$E|�W@UtIEsÆVf -VU,f4�9Ow?ݟ8!d_w}Ջ_Ѭ+'kUo)eKI /VlQ2ղQm/kp͓_y7>.2X},r84<X V8-\<v)tYfl~l89F~o(y&>r7qۃ{`G!x�cy cIRB-YegK#g?[?o?Y|pnx}`"M䐐 ӁVB&S"mhE0P#ǨgAc&:y[^w|F;Cپ~7 #>'R>TN�V Ehp"vn/UP3Ա> _@g39[_Wxg7|kOrQTjpQU-9ƐR1sz/ Fjs 'Ofp|Gn_}ofP.l\&% Z419,1,ofMbTVN5zrsܾ?7};z-* E)eWdXܳΊG[0ЏkƏ^}?$o�ܾ~ WV>Ԝ>b[n/Pcݰ_rÀ@ Su\dx>iVx\v> k /_'6? rԭX�T3Mh|oř}[gvChK=:-K.'5Xoӧ8I+u-T$-z\ QPFy86X4Tf wS :Xyåon hC(L0<vUfE-'>e;KzSq}hS78`s+_*5RH9XŶe^k婠gtI5?ݻ %a/{++O(b0XE߰l8p;Lz , "m*f< r84ߑ�)v.P:Dv�_|~v4B\X6P;iJڹűbx$),ylYyԗ?%U!�}Oi '^^LeՄb`vp#)׎ C[;mZҒTF6 <؇b6d,�۪yGx+LPTu;:,BCm{s7;]$&j|*ytCu7<b(]/o(gwK0{ $pk,<儝5(%fʣzCη FZqFn uWdqY/ iZt<D'ݐtV4μ.̀,vyड़',69.(ͅ'q¸zVizytCt#mуxZ#|9aEZ2$!md'!]C應>XxcY6Z/$s.gؔ$4l.Yo/¦c/%g_cK,䝶s'N mRs)͚8 ϴCˍܸ0l2WD۩e1UxĊqE0MN|<L6&wf?ܠe+*2wwl:QЏ*He2ĺ +x$zJ/Zo~c:u]RCn{w1zG˂j 3=ߐV&Y۾H{r<h Ku<OO"%LY&(w)[I $19h#U7nJ1`mFW e3q .<SjyauP&xVG\68'=LYntz8acFZZ 97﯉T(ܱX:/9ŖRUT"NEvEry|w0qn52RdesF+@-80g TRd.҅9RKB<KNrR0 ى%F}D�&)M 9x)K8 Nƹ`BɜE^ *VY>-qAf씟ݔ;dG) u+߫؎OM?S+1IlF?;Xn}{uk?#S:Y$*J{fJ+r=n^E_-̋[{>s{&;):Gݜ% ̟_8{Ga.T˹s'9r KKG^ysdFmK|c ~5r?2R'<eo]ܕU._cWC_kďu]vXoy\?1WǿvU{ |7ꇾ+ȣ6x~_}WG~HUȃ9X VIk˗]5^]䬶~ݕ}ﯿ-jUO:t/}k^=j?zXB#;W\'Λy؜9N�6Kfz>].u `]�q�J ;����IENDB`��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/images/common/download.jpg�������������������������������0000644�0001750�0001750�00000001740�11757531137�025016� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������JFIF��������    ! #'2*#%/-$+;,/3@888!$=A<*507,5 5$$5550555555555555555555,555555555555525,55555555655��0�0"�����������������/���������!A"1Qq3Ba2R��������������$�����������12!"aR� ��?�xU;?ki'U5Zݘa:Z n <7YN".vHNZ5p$nJy鑮_26w[m  &ImPH\IqPP wܩR\ }X]`R:>JX3uFrK(kZK-իͭl9UV˜c~Bx%Jr0yNm}t5[ĈLp!MGv;JHQh05d!n5=|0흷<A{tGEV^[@wtt,Ta\bW:B=ct+tUIV@ߥOV6ʭ}Ф55_i*K7]Hzj�'mvGyJ@ Ҏql8r{9:=cXh,HJ�j?*J[GulF͜My7ǚAK%dx'$n Dp.oHKJoJq v#QQ~Xa~bSB# :\YQsqqIc\\^PsmQJjf/M 2^z-1S#gSGQe64,T0SGtYŹ'fd��������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/css/�����������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�020546� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/plots/css/default.css������������������������������������������0000644�0001750�0001750�00000000437�11757531137�022700� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������.header { text-align: center; } .left { float: left; padding: 0% 2% 0% 0%; width: 5%; } .middle { top: 10px; margin-left: 5%; margin-right: 1%; } .right { float: right; padding: 0% 0% 0% 2%; width: 5%; } .footer { padding-top: 2%; text-align: center; clear: both; } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/common.sh������������������������������������������������������0000755�0001750�0001750�00000003560�11757531137�020440� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash # # Common structure to all shell wrappers. This file # is sourced from the shell wrapper scripts. # $Id: common.sh 4471 2011-08-24 19:15:43Z rynge $ # # If JAVA_HOME is not set, try some system defaults. This is useful for # RPMs and DEBs which have explicit Java dependencies if [ "X${JAVA_HOME}" = "X" ]; then for TARGET in \ /usr/lib/jvm/java-1.6.0-openjdk-1.6.0.0.x86_64/jre \ /usr/lib/jvm/java-1.6.0-openjdk-1.6.0.0/jre \ /usr/lib/jvm/java-6-openjdk/jre \ ; do if [ -e "${TARGET}" -a -x "${TARGET}/bin/java" ]; then JAVA_HOME="${TARGET}" export JAVA_HOME break fi done # macos if [ "X${JAVA_HOME}" = "X" -a -x /usr/libexec/java_home ]; then JAVA_HOME=`/usr/libexec/java_home -version 1.6` fi fi # Find Java if [ "X${JAVA_HOME}" != "X" ]; then JAVA="${JAVA_HOME}/bin/java" fi if [ ! -x "${JAVA}" ]; then JAVA="`which java`" fi if [ ! -e "${JAVA}" ]; then echo "ERROR: java not found. Please set JAVA_HOME or PATH." exit 1 fi JAVA_VERSION=`${JAVA} -mx128m -version 2>&1 | awk '/^java version/ {gsub(/"/,""); print $3}'` if [ `echo ${JAVA_VERSION} | cut -c1,3` -lt 16 ]; then echo "ERROR: Java 1.6 or later required. Please set JAVA_HOME or PATH to point to a newer Java." exit 1 fi addon='' while [ true ]; do case "$1" in -[XD][_a-zA-Z]*) addon="$addon $1" shift ;; -D) shift addon="$addon -D$1" shift ;; *) break ;; esac done # set no_heap_setup to anything, if you do NOT want heap setup # FIXME: What about a user specifying their own values, but not # using the env vars? Will JRE take the first or last found? if [ "X$no_heap_setup" = "X" ]; then test "X${JAVA_HEAPMAX}" = "X" || addon="$addon -Xmx${JAVA_HEAPMAX}m" test "X${JAVA_HEAPMIN}" = "X" || addon="$addon -Xms${JAVA_HEAPMIN}m" fi ������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�020433� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-condorio/�������������������������0000755�0001750�0001750�00000000000�11757531667�026137� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-condorio/BlackDiamondDAX.java�����0000644�0001750�0001750�00000010241�11757531137�031635� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import edu.isi.pegasus.planner.dax.*; public class BlackDiamondDAX { /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length != 2) { System.out.println("Usage: java ADAG <pegasus_bin_dir> <filename.dax>"); System.exit(1); } try { Diamond(args[0]).writeToFile(args[1]); } catch (Exception e) { e.printStackTrace(); } } private static ADAG Diamond(String pegasus_bin_dir) throws Exception { java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); ADAG dax = new ADAG("blackdiamond"); File fa = new File("f.a"); fa.addPhysicalFile("file://" + cwd + "/f.a", "local"); dax.addFile(fa); File fb1 = new File("f.b1"); File fb2 = new File("f.b2"); File fc1 = new File("f.c1"); File fc2 = new File("f.c2"); File fd = new File("f.d"); fd.setRegister(true); Executable preprocess = new Executable("pegasus", "preprocess", "4.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); preprocess.setInstalled(false); preprocess.addPhysicalFile("file://" + pegasus_bin_dir + "/pegasus-keg", "local"); Executable findrange = new Executable("pegasus", "findrange", "4.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.setInstalled(false); findrange.addPhysicalFile("file://" + pegasus_bin_dir + "/pegasus-keg", "local"); Executable analyze = new Executable("pegasus", "analyze", "4.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.setInstalled(false); analyze.addPhysicalFile("file://" + pegasus_bin_dir + "/pegasus-keg", "local"); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable(analyze); // Add a preprocess job Job j1 = new Job("j1", "pegasus", "preprocess", "4.0"); j1.addArgument("-a preprocess -T 60 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1); j1.addArgument(" ").addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses(fb2, File.LINK.OUTPUT); dax.addJob(j1); // Add left Findrange job Job j2 = new Job("j2", "pegasus", "findrange", "4.0"); j2.addArgument("-a findrange -T 60 -i ").addArgument(fb1); j2.addArgument("-o ").addArgument(fc1); j2.uses(fb1, File.LINK.INPUT); j2.uses(fc1, File.LINK.OUTPUT); dax.addJob(j2); // Add right Findrange job Job j3 = new Job("j3", "pegasus", "findrange", "4.0"); j3.addArgument("-a findrange -T 60 -i ").addArgument(fb2); j3.addArgument("-o ").addArgument(fc2); j3.uses(fb2, File.LINK.INPUT); j3.uses(fc2, File.LINK.OUTPUT); dax.addJob(j3); // Add analyze job Job j4 = new Job("j4", "pegasus", "analyze", "4.0"); j4.addArgument("-a analyze -T 60 -i ").addArgument(fc1); j4.addArgument(" ").addArgument(fc2); j4.addArgument("-o ").addArgument(fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); dax.addJob(j4); dax.addDependency("j1", "j2"); dax.addDependency("j1", "j3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-condorio/pegasusrc����������������0000644�0001750�0001750�00000000307�11757531137�030046� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink=false pegasus.data.configuration = condorio �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-condorio/submit�������������������0000755�0001750�0001750�00000003410�11757531137�027356� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e TOPDIR=`pwd` # pegasus bin directory is needed to find keg BIN_DIR=`pegasus-config --bin` # generate the input file echo "This is sample input to KEG" >f.a # build the dax generator CLASSPATH=`pegasus-config --classpath` export CLASSPATH=".:$CLASSPATH" javac BlackDiamondDAX.java # generate the dax java BlackDiamondDAX $BIN_DIR blackdiamond.dax # create the site catalog cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86" os="LINUX"> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/work"/> <internal-mount-point mount-point="$TOPDIR/work"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs"/> </shared> </storage> </head-fs> </site> <site handle="condorpool" arch="x86" os="LINUX"> <head-fs> <scratch /> <storage /> </head-fs> <profile namespace="pegasus" key="style" >condor</profile> <profile namespace="condor" key="universe" >vanilla</profile> </site> </sitecatalog> EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites condorpool \ --dir work \ --output local \ --dax blackdiamond.dax \ --submit ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/shell-blackdiamond/�����������������������������������0000755�0001750�0001750�00000000000�11757531667�024150� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/shell-blackdiamond/blackdiamond.py��������������������0000755�0001750�0001750�00000005505�11757531137�027132� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python from Pegasus.DAX3 import * import os import sys # Create a DAX diamond = ADAG("diamond") # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN("file://" + os.getcwd() + "/f.a", "local")) diamond.addFile(a) # Add executables to the DAX-level replica catalog # In this case the binary is keg, which is shipped with Pegasus e_preprocess = Executable(namespace="diamond", name="preprocess", version="2.0", os="linux", arch="x86_64") e_preprocess.addPFN(PFN("file://" + sys.argv[1] + "/bin/keg", "local")) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="2.0", os="linux", arch="x86_64") e_findrange.addPFN(PFN("file://" + sys.argv[1] + "/bin/keg", "local")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="2.0", os="linux", arch="x86_64") e_analyze.addPFN(PFN("file://" + sys.argv[1] + "/bin/keg", "local")) diamond.addExecutable(e_analyze) # Add transformations to the DAX-level transformation catalog #t_preprocess = Transformation(e_preprocess) #diamond.addTransformation(t_preprocess) #t_findrange = Transformation(e_findrange) #diamond.addTransformation(t_findrange) #t_analyze = Transformation(e_analyze) #diamond.addTransformation(t_analyze) # Create some logical file names a = File("f.a") b1 = File("f.b1") b2 = File("f.b2") c1 = File("f.c1") c2 = File("f.c2") d = File("f.d") # Add a preprocess job preprocess = Job(namespace="diamond",name="preprocess",version="2.0") preprocess.addArguments("-a preprocess","-T60","-i",a,"-o",b1,b2) preprocess.uses(a,link=Link.INPUT) preprocess.uses(b1,link=Link.OUTPUT, register=False) preprocess.uses(b2,link=Link.OUTPUT, register=False) diamond.addJob(preprocess) # Add left Findrange job frl = Job(namespace="diamond",name="findrange",version="2.0") frl.addArguments("-a findrange","-T60","-i",b1,"-o",c1) frl.uses(b1,link=Link.INPUT) frl.uses(c1,link=Link.OUTPUT, register=False) diamond.addJob(frl) # Add right Findrange job frr = Job(namespace="diamond",name="findrange",version="2.0") frr.addArguments("-a findrange","-T60","-i",b2,"-o",c2) frr.uses(b2,link=Link.INPUT) frr.uses(c2,link=Link.OUTPUT, register=False) diamond.addJob(frr) # Add Analyze job analyze = Job(namespace="diamond",name="analyze",version="2.0") analyze.addArguments("-a analyze","-T60","-i",c1,c2,"-o",d) analyze.uses(c1,link=Link.INPUT) analyze.uses(c2,link=Link.INPUT) analyze.uses(d,link=Link.OUTPUT, register=False) diamond.addJob(analyze) # Add control-flow dependencies diamond.addDependency(Dependency(parent=preprocess, child=frl)) diamond.addDependency(Dependency(parent=preprocess, child=frr)) diamond.addDependency(Dependency(parent=frl, child=analyze)) diamond.addDependency(Dependency(parent=frr, child=analyze)) # Write the DAX to stdout diamond.writeXML(sys.stdout) �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/shell-blackdiamond/pegasusrc��������������������������0000644�0001750�0001750�00000000331�11757531137�026054� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ # tell pegasus to generate shell version of # the workflow pegasus.code.generator = Shell pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/shell-blackdiamond/submit�����������������������������0000755�0001750�0001750�00000004013�11757531137�025367� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e TOPDIR=`pwd` # figure out where Pegasus is installed export PEGASUS_HOME=`which pegasus-plan | sed 's:/bin/pegasus-plan::'` if [ "x$PEGASUS_HOME" = "x" ]; then echo "Unable to determine location of your Pegasus install" echo "Please make sure pegasus-plan is in your path" exit 1 fi # geneate the input file echo "This is sample input file" >f.a # generate the dax export PYTHONPATH=$PEGASUS_HOME/lib/python ./blackdiamond.py $PEGASUS_HOME >blackdiamond.dax # create the site catalog cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86_64" os="LINUX"> <grid type="gt2" contact="localhost/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <grid type="gt2" contact="localhost/jobmanager-fork" scheduler="Fork" jobtype="compute"/> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs" free-size="100G" total-size="30G"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs" free-size="100G" total-size="30G"/> </shared> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="PEGASUS_HOME" >$PEGASUS_HOME</profile> </site> </sitecatalog> EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites local \ --dir work \ --output local \ --dax blackdiamond.dax \ --submit ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-sharedfs/�������������������������0000755�0001750�0001750�00000000000�11757531667�026122� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-sharedfs/BlackDiamondDAX.java�����0000644�0001750�0001750�00000010241�11757531137�031620� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import edu.isi.pegasus.planner.dax.*; public class BlackDiamondDAX { /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length != 2) { System.out.println("Usage: java ADAG <pegasus_bin_dir> <filename.dax>"); System.exit(1); } try { Diamond(args[0]).writeToFile(args[1]); } catch (Exception e) { e.printStackTrace(); } } private static ADAG Diamond(String pegasus_bin_dir) throws Exception { java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); ADAG dax = new ADAG("blackdiamond"); File fa = new File("f.a"); fa.addPhysicalFile("file://" + cwd + "/f.a", "local"); dax.addFile(fa); File fb1 = new File("f.b1"); File fb2 = new File("f.b2"); File fc1 = new File("f.c1"); File fc2 = new File("f.c2"); File fd = new File("f.d"); fd.setRegister(true); Executable preprocess = new Executable("pegasus", "preprocess", "4.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); preprocess.setInstalled(false); preprocess.addPhysicalFile("file://" + pegasus_bin_dir + "/pegasus-keg", "local"); Executable findrange = new Executable("pegasus", "findrange", "4.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.setInstalled(false); findrange.addPhysicalFile("file://" + pegasus_bin_dir + "/pegasus-keg", "local"); Executable analyze = new Executable("pegasus", "analyze", "4.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.setInstalled(false); analyze.addPhysicalFile("file://" + pegasus_bin_dir + "/pegasus-keg", "local"); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable(analyze); // Add a preprocess job Job j1 = new Job("j1", "pegasus", "preprocess", "4.0"); j1.addArgument("-a preprocess -T 60 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1); j1.addArgument(" ").addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses(fb2, File.LINK.OUTPUT); dax.addJob(j1); // Add left Findrange job Job j2 = new Job("j2", "pegasus", "findrange", "4.0"); j2.addArgument("-a findrange -T 60 -i ").addArgument(fb1); j2.addArgument("-o ").addArgument(fc1); j2.uses(fb1, File.LINK.INPUT); j2.uses(fc1, File.LINK.OUTPUT); dax.addJob(j2); // Add right Findrange job Job j3 = new Job("j3", "pegasus", "findrange", "4.0"); j3.addArgument("-a findrange -T 60 -i ").addArgument(fb2); j3.addArgument("-o ").addArgument(fc2); j3.uses(fb2, File.LINK.INPUT); j3.uses(fc2, File.LINK.OUTPUT); dax.addJob(j3); // Add analyze job Job j4 = new Job("j4", "pegasus", "analyze", "4.0"); j4.addArgument("-a analyze -T 60 -i ").addArgument(fc1); j4.addArgument(" ").addArgument(fc2); j4.addArgument("-o ").addArgument(fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); dax.addJob(j4); dax.addDependency("j1", "j2"); dax.addDependency("j1", "j3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-sharedfs/pegasusrc����������������0000644�0001750�0001750�00000000357�11757531137�030036� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink=false pegasus.transfer.worker.package = true pegasus.data.configuration = sharedfs ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-sharedfs/submit�������������������0000755�0001750�0001750�00000004433�11757531137�027347� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e TOPDIR=`pwd` # pegasus bin directory is needed to find keg BIN_DIR=`pegasus-config --bin` # generate the input file echo "This is sample input to KEG" >f.a # build the dax generator CLASSPATH=`pegasus-config --classpath` export CLASSPATH=".:$CLASSPATH" javac BlackDiamondDAX.java # generate the dax java BlackDiamondDAX $BIN_DIR blackdiamond.dax # create the site catalog cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86" os="LINUX"> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/work"/> <internal-mount-point mount-point="$TOPDIR/work"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs"/> </shared> </storage> </head-fs> </site> <site handle="condorpool" arch="x86" os="LINUX"> <head-fs> <scratch> <shared> <file-server protocol="gsiftp" url="gsiftp://obelix.isi.edu" mount-point="/nfs/ccg3/scratch"/> <internal-mount-point mount-point="/nfs/ccg3/scratch"/> </shared> </scratch> <storage /> </head-fs> <profile namespace="pegasus" key="style" >condor</profile> <profile namespace="condor" key="universe" >vanilla</profile> <profile namespace="condor" key="requirements" >TARGET.FileSystemDomain != ""</profile> <profile namespace="condor" key="should_transfer_files">Yes</profile> <profile namespace="condor" key="when_to_transfer_output">ON_EXIT</profile> </site> </sitecatalog> EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites condorpool \ --dir work \ --output local \ --dax blackdiamond.dax \ --submit �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-java/�������������������������������0000755�0001750�0001750�00000000000�11757531667�024705� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-java/README�������������������������0000644�0001750�0001750�00000002561�11757531137�025561� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ This is a sample workflow with 4 nodes, layed out in a diamond shape: a / \ b c \ / d The binary for the nodes is a simple "mock application" name "keg" ("canonical example for the grid") which reads input files designated by arguments, writes them back onto output files, and produces on STDOUT a summary of where and when it was run. Keg ships with Pegasus in the bin directory. This example assumes you have access to a cluster with Globus installed. A pre-ws gatekeeper and gridftp server is required. You also need Globus and Pegasus installed, both on the machine you are submitting from, and the cluster. Condor needs to be installed and running on your submit machine. See the Pegasus manual. This example ships with a "submit" script which will build the site catalog. When you create your own workflows, such a submit script is not needed if you want to maintain the catalogs manually. To test the example, edit the "submit" script and change the cluster config to the setup and install locations for your cluster. Then run: ./submit The workflow should now be submitted and in the output you should see a submit dir location for the instance. With that directory you can monitor the workflow with: pegasus-status [submitdir] Once the workflow is done, you can make sure it was sucessful with: pegasus-analyzer -d [submitdir] �����������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-java/BlackDiamondDAX.java�����������0000644�0001750�0001750�00000010346�11757531137�030411� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import edu.isi.pegasus.planner.dax.*; public class BlackDiamondDAX { /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length != 3) { System.out.println("Usage: java ADAG <site_handle> <pegasus_location> <filename.dax>"); System.exit(1); } try { Diamond(args[0], args[1]).writeToFile(args[2]); } catch (Exception e) { e.printStackTrace(); } } private static ADAG Diamond(String site_handle, String pegasus_location) throws Exception { java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); ADAG dax = new ADAG("blackdiamond"); File fa = new File("f.a"); fa.addPhysicalFile("file://" + cwd + "/f.a", "local"); dax.addFile(fa); File fb1 = new File("f.b1"); File fb2 = new File("f.b2"); File fc1 = new File("f.c1"); File fc2 = new File("f.c2"); File fd = new File("f.d"); fd.setRegister(true); Executable preprocess = new Executable("pegasus", "preprocess", "4.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); preprocess.setInstalled(true); preprocess.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", site_handle); Executable findrange = new Executable("pegasus", "findrange", "4.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.setInstalled(true); findrange.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", site_handle); Executable analyze = new Executable("pegasus", "analyze", "4.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.setInstalled(true); analyze.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", site_handle); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable(analyze); // Add a preprocess job Job j1 = new Job("j1", "pegasus", "preprocess", "4.0"); j1.addArgument("-a preprocess -T 60 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1); j1.addArgument(" ").addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses(fb2, File.LINK.OUTPUT); dax.addJob(j1); // Add left Findrange job Job j2 = new Job("j2", "pegasus", "findrange", "4.0"); j2.addArgument("-a findrange -T 60 -i ").addArgument(fb1); j2.addArgument("-o ").addArgument(fc1); j2.uses(fb1, File.LINK.INPUT); j2.uses(fc1, File.LINK.OUTPUT); dax.addJob(j2); // Add right Findrange job Job j3 = new Job("j3", "pegasus", "findrange", "4.0"); j3.addArgument("-a findrange -T 60 -i ").addArgument(fb2); j3.addArgument("-o ").addArgument(fc2); j3.uses(fb2, File.LINK.INPUT); j3.uses(fc2, File.LINK.OUTPUT); dax.addJob(j3); // Add analyze job Job j4 = new Job("j4", "pegasus", "analyze", "4.0"); j4.addArgument("-a analyze -T 60 -i ").addArgument(fc1); j4.addArgument(" ").addArgument(fc2); j4.addArgument("-o ").addArgument(fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); dax.addJob(j4); dax.addDependency("j1", "j2"); dax.addDependency("j1", "j3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-java/pegasusrc����������������������0000644�0001750�0001750�00000000237�11757531137�026616� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink=false �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-java/submit�������������������������0000755�0001750�0001750�00000007324�11757531137�026134� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e ####################################################################### # # Settings # CLUSTER_NAME="Obelix" CLUSTER_HOSTNAME="obelix.isi.edu" CLUSTER_SCHEDULER="condor" CLUSTER_WORK_DIR="/nfs/ccg3/scratch" CLUSTER_PEGASUS_HOME="/ccg/software/pegasus/dev/trunk" CLUSTER_GLOBUS_LOCATION="/ccg/software/globus/default" ####################################################################### TOPDIR=`pwd` # pegasus bin directory is needed to find keg BIN_DIR=`pegasus-config --bin` # figure out where Pegasus is installed if [ "x$GLOBUS_LOCATION" = "x" ]; then echo "Pelase set GLOBUS_LOCATION to the location of your Pegasus install" exit 1 fi # generate the input file echo "This is sample input to KEG" >f.a # build the dax generator CLASSPATH=`pegasus-config --classpath` export CLASSPATH=".:$CLASSPATH" javac BlackDiamondDAX.java # generate the dax java BlackDiamondDAX $CLUSTER_NAME $CLUSTER_PEGASUS_HOME blackdiamond.dax # create the site catalog cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86" os="LINUX"> <grid type="gt2" contact="localhost/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <grid type="gt2" contact="localhost/jobmanager-fork" scheduler="unknown" jobtype="compute"/> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/work/outputs" free-size="100G" total-size="30G"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/work/outputs" free-size="100G" total-size="30G"/> </shared> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="PEGASUS_BIN" >$PEGASUS_BIN_DIR</profile> <profile namespace="env" key="GLOBUS_LOCATION" >$GLOBUS_LOCATION</profile> </site> <site handle="$CLUSTER_NAME" arch="x86" os="LINUX"> <grid type="gt2" contact="$CLUSTER_HOSTNAME/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <grid type="gt2" contact="$CLUSTER_HOSTNAME/jobmanager-$CLUSTER_SCHEDULER" scheduler="unknown" jobtype="compute"/> <head-fs> <scratch> <shared> <file-server protocol="gsiftp" url="gsiftp://$CLUSTER_HOSTNAME" mount-point="$CLUSTER_WORK_DIR"/> <internal-mount-point mount-point="$CLUSTER_WORK_DIR"/> </shared> </scratch> <storage> <shared> <file-server protocol="gsiftp" url="gsiftp://$CLUSTER_HOSTNAME" mount-point="$CLUSTER_WORK_DIR"/> <internal-mount-point mount-point="$CLUSTER_WORK_DIR"/> </shared> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="PEGASUS_HOME" >$CLUSTER_PEGASUS_HOME</profile> <profile namespace="env" key="GLOBUS_LOCATION" >$CLUSTER_GLOBUS_LOCATION</profile> </site> </sitecatalog> EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites $CLUSTER_NAME \ --dir work \ --output local \ --dax blackdiamond.dax \ --submit ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-python/�����������������������������0000755�0001750�0001750�00000000000�11757531667�025305� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-python/README�����������������������0000644�0001750�0001750�00000002561�11757531137�026161� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ This is a sample workflow with 4 nodes, layed out in a diamond shape: a / \ b c \ / d The binary for the nodes is a simple "mock application" name "keg" ("canonical example for the grid") which reads input files designated by arguments, writes them back onto output files, and produces on STDOUT a summary of where and when it was run. Keg ships with Pegasus in the bin directory. This example assumes you have access to a cluster with Globus installed. A pre-ws gatekeeper and gridftp server is required. You also need Globus and Pegasus installed, both on the machine you are submitting from, and the cluster. Condor needs to be installed and running on your submit machine. See the Pegasus manual. This example ships with a "submit" script which will build the site catalog. When you create your own workflows, such a submit script is not needed if you want to maintain the catalogs manually. To test the example, edit the "submit" script and change the cluster config to the setup and install locations for your cluster. Then run: ./submit The workflow should now be submitted and in the output you should see a submit dir location for the instance. With that directory you can monitor the workflow with: pegasus-status [submitdir] Once the workflow is done, you can make sure it was sucessful with: pegasus-analyzer -d [submitdir] �����������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-python/blackdiamond.py��������������0000755�0001750�0001750�00000005241�11757531137�030264� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python from Pegasus.DAX3 import * import sys import os if len(sys.argv) != 2: print "Usage: %s PEGASUS_HOME" % (sys.argv[0]) sys.exit(1) # Create a abstract dag diamond = ADAG("diamond") # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN("file://" + os.getcwd() + "/f.a", "local")) diamond.addFile(a) # Add executables to the DAX-level replica catalog # In this case the binary is keg, which is shipped with Pegasus, so we use # the remote PEGASUS_HOME to build the path. e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", arch="x86_64", installed=True) e_preprocess.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "TestCluster")) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86_64", installed=True) e_findrange.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "TestCluster")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86_64", installed=True) e_analyze.addPFN(PFN("file://" + sys.argv[1] + "/bin/pegasus-keg", "TestCluster")) diamond.addExecutable(e_analyze) # Add a preprocess job preprocess = Job(namespace="diamond", name="preprocess", version="4.0") b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T60","-i",a,"-o",b1,b2) preprocess.uses(a, link=Link.INPUT) preprocess.uses(b1, link=Link.OUTPUT) preprocess.uses(b2, link=Link.OUTPUT) diamond.addJob(preprocess) # Add left Findrange job frl = Job(namespace="diamond", name="findrange", version="4.0") c1 = File("f.c1") frl.addArguments("-a findrange","-T60","-i",b1,"-o",c1) frl.uses(b1, link=Link.INPUT) frl.uses(c1, link=Link.OUTPUT) diamond.addJob(frl) # Add right Findrange job frr = Job(namespace="diamond", name="findrange", version="4.0") c2 = File("f.c2") frr.addArguments("-a findrange","-T60","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT) diamond.addJob(frr) # Add Analyze job analyze = Job(namespace="diamond", name="analyze", version="4.0") d = File("f.d") analyze.addArguments("-a analyze","-T60","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, register=True) diamond.addJob(analyze) # Add control-flow dependencies diamond.addDependency(Dependency(parent=preprocess, child=frl)) diamond.addDependency(Dependency(parent=preprocess, child=frr)) diamond.addDependency(Dependency(parent=frl, child=analyze)) diamond.addDependency(Dependency(parent=frr, child=analyze)) # Write the DAX to stdout diamond.writeXML(sys.stdout) ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-python/pegasusrc��������������������0000644�0001750�0001750�00000000176�11757531137�027220� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-python/submit�����������������������0000755�0001750�0001750�00000006666�11757531137�026544� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e ################################################################################## # # Settings # CLUSTER_HOSTNAME="obelix.isi.edu" CLUSTER_SCHEDULER="condor" CLUSTER_WORK_DIR="/nfs/ccg1/90-day-scratch" CLUSTER_PEGASUS_HOME="/ccg/software/pegasus/dev/trunk" CLUSTER_GLOBUS_LOCATION="/ccg/software/globus/default" ################################################################################## TOPDIR=`pwd` # figure out where Pegasus is installed export PEGASUS_HOME=`which pegasus-plan | sed 's:/bin/pegasus-plan::'` if [ "x$PEGASUS_HOME" = "x" ]; then echo "Unable to determine location of your Pegasus install" echo "Please make sure pegasus-plan is in your path" exit 1 fi if [ "x$GLOBUS_LOCATION" = "x" ]; then echo "Please set GLOBUS_LOCATION to the location of your Pegasus install" exit 1 fi # generate the input file echo "This is sample input to KEG" >f.a # generate the dax export PYTHONPATH=`pegasus-config --python` ./blackdiamond.py $CLUSTER_PEGASUS_HOME >blackdiamond.dax # create the site catalog cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86_64" os="LINUX"> <grid type="gt2" contact="localhost/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/scratch"/> <internal-mount-point mount-point="$TOPDIR/scratch" free-size="100G" total-size="30G"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs" free-size="100G" total-size="30G"/> </shared> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="PEGASUS_HOME" >$PEGASUS_HOME</profile> <profile namespace="env" key="GLOBUS_LOCATION" >$GLOBUS_LOCATION</profile> </site> <site handle="TestCluster" arch="x86_64" os="LINUX"> <grid type="gt2" contact="$CLUSTER_HOSTNAME/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <grid type="gt2" contact="$CLUSTER_HOSTNAME/jobmanager-$CLUSTER_SCHEDULER" scheduler="unknown" jobtype="compute"/> <head-fs> <scratch> <shared> <file-server protocol="gsiftp" url="gsiftp://$CLUSTER_HOSTNAME" mount-point="$CLUSTER_WORK_DIR"/> <internal-mount-point mount-point="$CLUSTER_WORK_DIR"/> </shared> </scratch> <storage> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="PEGASUS_HOME" >$CLUSTER_PEGASUS_HOME</profile> <profile namespace="env" key="GLOBUS_LOCATION" >$CLUSTER_GLOBUS_LOCATION</profile> </site> </sitecatalog> EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites TestCluster \ --dir work \ --output local \ --dax blackdiamond.dax \ --submit ��������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-recursive/��������������������������������������0000755�0001750�0001750�00000000000�11757531667�023532� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-recursive/README��������������������������������0000644�0001750�0001750�00000002226�11757531137�024404� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������This directory contains an example of multi-level recursive workflows. The workflows are simplistic, run on the local Condor (assumed), and each workflow runs a 'pegasus-keg' in sleep mode plus its sub-workflow (except for the bottom leaf, of course). To create, plan and run the workflow, please make sure that you have a valid Condor on your system that you can submit jobs into. Simply run the shell script "likethis.sh". As optional argument you can specify the number of levels depth (suggest to stick between 2 and 16). If you do not specify anything, the depth is 3. The script will create a 'work' directory for the workflows, a 'conf' directory with the dynamically generated Pegasus catalogs and configuration file, and an 'output' directory. You can watch the progress of the workflow using 'pegasus-status', i.e. pegasus-status -vvl work/<pathtotoplevelwf> -w Once the workflow is done, you are free to run more, or poke into the directories. It will give you an idea how multilple levels of workflows can be nested. The abstract workflow descriptions are in the top-level directory ($PWD). To clean up everything, run the "clean.sh" shell script. ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-recursive/deepthought.pl������������������������0000644�0001750�0001750�00000004207�11757531137�026402� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env perl # # creation script for nested workflows of multiple levels # $Id: deepthought.pl 4543 2011-09-27 18:23:33Z voeckler $ # use 5.006; use strict; use IO::Handle; use Cwd; use File::Spec; use File::Basename; use POSIX (); BEGIN { eval `pegasus-config --perl-hash`; die "Unable to eval pegasus-config: $@" if $@; } use Pegasus::DAX::Factory qw(:all); use Pegasus::Common qw(find_exec); my $depth = shift || die "Usage: $0 depths [sleeptime]"; my $sleep = shift || 600; # 10 minutes for now my %hash = ( namespace => 'deepthought' ); my $keg = find_exec( 'pegasus-keg', $pegasus{bin} ); die "FATAL: Unable to find a \'pegasus-keg\'\n" unless defined $keg; my @os = POSIX::uname(); $os[2] =~ s/^(\d+(\.\d+(\.\d+)?)?).*/$1/; $os[4] =~ s/i.86/x86/; sub add_keg_job($) { my $adag = shift; my $app = newExecutable( %hash , name => 'sleep' , installed => 'true' , arch => $os[4] , os => lc($^O) ); $app->addProfile( 'dagman', 'RETRY', '3' ); $app->addProfile( 'dagman', 'POST.SCOPE', 'all' ); $app->addPFN( newPFN( url => "file://$keg", site => 'local' ) ); $adag->addExecutable($app); my $job = newJob( %hash, name => 'sleep' ); $job->addArgument( '-t', $sleep ); $adag->addJob($job); } # # level=1: always there # my $adag = newADAG( name => 'level-1' ); add_keg_job($adag); open( OUT, ">level-1.dax" ) || die "open level-1.dax: $!\n"; $adag->toXML( \*OUT, '', undef ); close OUT; my ($dax); for ( my $level=2; $level <= $depth; ++$level ) { $adag = newADAG( name => "level-$level" ); my $fn = 'level-' . ($level-1) . '.dax'; $dax = newDAX( %hash, file => $fn ); $dax->addArgument( '--sites', 'local', '-vvv', '--nocleanup', '--force', '--output', 'local' ); $adag->addJob($dax); my $file = newFile( name => $fn ); $file->addPFN( newPFN( url => 'file://' . Cwd::abs_path($fn) , site => 'local' ) ); $adag->addFile($file); add_keg_job($adag); open( OUT, ">level-$level.dax" ) || die "open $fn: level-$level.dax: $!\n"; $adag->toXML( \*OUT, '', undef ); close OUT; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-recursive/clean.sh������������������������������0000755�0001750�0001750�00000000214�11757531137�025140� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh # # clean up everything for dist # $Id: clean.sh 4245 2011-07-29 00:27:15Z voeckler $ # exec rm -rf work output conf level-*.dax ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-recursive/likethis.sh���������������������������0000755�0001750�0001750�00000002626�11757531137�025703� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh # # submittor for nested workflows # $Id: likethis.sh 4245 2011-07-29 00:27:15Z voeckler $ # if [ "X$1" = 'X' ]; then depth=3 else depth=$(( $1 + 0 )) fi # # OVERWRITE conf CONTENTS # test -d conf && rm -rf conf mkdir -p conf work output date +"# file-based replica catalog: %FT%T%z" > conf/rc.data cp /dev/null conf/tc.text pegasus-config --full-local $PWD/output > conf/sites.xml cat <<EOF > conf/properties pegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=conf/rc.data pegasus.catalog.site=XML3 pegasus.catalog.site.file=conf/sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=conf/tc.text pegasus.dir.useTimestamp=false pegasus.dir.storage.deep=false pegasus.monitord.events=false pegasus.monitord.output=file:///tmp/throwaway.tmp pegasus.data.configuration=Condor EOF fstype=`stat -f -L -c %T $PWD` if [ "X$fstype" = 'Xnfs' ]; then echo "pegasus.condor.logs.symlink=true" >> conf/properties else echo "pegasus.condor.logs.symlink=false" >> conf/properties fi # # create workflow of workflows # perl deepthought.pl $depth || exit 42 # # plan and run # base=`date +"%Y%m%dT%H%M"` test -d work/$base && rm -rf work/$base dax="level-${depth}.dax" pegasus-plan \ --conf $PWD/conf/properties \ -vvv \ --dir work \ --relative-submit-dir $base \ --sites local \ --output local \ --dax $dax \ --nocleanup \ --submit ����������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-nonsharedfs/����������������������0000755�0001750�0001750�00000000000�11757531667�026635� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-nonsharedfs/BlackDiamondDAX.java��0000644�0001750�0001750�00000010357�11757531137�032343� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import edu.isi.pegasus.planner.dax.*; public class BlackDiamondDAX { /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length != 3) { System.out.println("Usage: java ADAG <pegasus_bin_dir> <filename.dax>"); System.exit(1); } try { Diamond(args[0], args[1]).writeToFile(args[2]); } catch (Exception e) { e.printStackTrace(); } } private static ADAG Diamond(String hostname, String pegasus_bin_dir) throws Exception { java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); ADAG dax = new ADAG("blackdiamond"); File fa = new File("f.a"); fa.addPhysicalFile("gsiftp://" + hostname + cwd + "/f.a", "local"); dax.addFile(fa); File fb1 = new File("f.b1"); File fb2 = new File("f.b2"); File fc1 = new File("f.c1"); File fc2 = new File("f.c2"); File fd = new File("f.d"); fd.setRegister(true); Executable preprocess = new Executable("pegasus", "preprocess", "4.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); preprocess.setInstalled(false); preprocess.addPhysicalFile("gsiftp://" + hostname + pegasus_bin_dir + "/pegasus-keg", "local"); Executable findrange = new Executable("pegasus", "findrange", "4.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.setInstalled(false); findrange.addPhysicalFile("gsiftp://" + hostname + pegasus_bin_dir + "/pegasus-keg", "local"); Executable analyze = new Executable("pegasus", "analyze", "4.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.setInstalled(false); analyze.addPhysicalFile("gsiftp://" + hostname + pegasus_bin_dir + "/pegasus-keg", "local"); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable(analyze); // Add a preprocess job Job j1 = new Job("j1", "pegasus", "preprocess", "4.0"); j1.addArgument("-a preprocess -T 60 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1); j1.addArgument(" ").addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses(fb2, File.LINK.OUTPUT); dax.addJob(j1); // Add left Findrange job Job j2 = new Job("j2", "pegasus", "findrange", "4.0"); j2.addArgument("-a findrange -T 60 -i ").addArgument(fb1); j2.addArgument("-o ").addArgument(fc1); j2.uses(fb1, File.LINK.INPUT); j2.uses(fc1, File.LINK.OUTPUT); dax.addJob(j2); // Add right Findrange job Job j3 = new Job("j3", "pegasus", "findrange", "4.0"); j3.addArgument("-a findrange -T 60 -i ").addArgument(fb2); j3.addArgument("-o ").addArgument(fc2); j3.uses(fb2, File.LINK.INPUT); j3.uses(fc2, File.LINK.OUTPUT); dax.addJob(j3); // Add analyze job Job j4 = new Job("j4", "pegasus", "analyze", "4.0"); j4.addArgument("-a analyze -T 60 -i ").addArgument(fc1); j4.addArgument(" ").addArgument(fc2); j4.addArgument("-o ").addArgument(fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); dax.addJob(j4); dax.addDependency("j1", "j2"); dax.addDependency("j1", "j3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-nonsharedfs/pegasusrc�������������0000644�0001750�0001750�00000000312�11757531137�030540� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink=false pegasus.data.configuration = nonsharedfs ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/condor-blackdiamond-nonsharedfs/submit����������������0000755�0001750�0001750�00000003575�11757531137�030070� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e TOPDIR=`pwd` # pegasus bin directory is needed to find keg BIN_DIR=`pegasus-config --bin` # generate the input file echo "This is sample input to KEG" >f.a # hostname for gridftp server HOSTNAME=`hostname -f` # build the dax generator CLASSPATH=`pegasus-config --classpath` export CLASSPATH=".:$CLASSPATH" javac BlackDiamondDAX.java # generate the dax java BlackDiamondDAX $HOSTNAME $BIN_DIR blackdiamond.dax # create the site catalog cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86" os="LINUX"> <head-fs> <scratch> <shared> <file-server protocol="gsiftp" url="gsiftp://$HOSTNAME" mount-point="$TOPDIR/work"/> <internal-mount-point mount-point="$TOPDIR/work"/> </shared> </scratch> <storage> <shared> <file-server protocol="gsiftp" url="gsiftp://$HOSTNAME" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs"/> </shared> </storage> </head-fs> </site> <site handle="condorpool" arch="x86" os="LINUX"> <head-fs> <scratch /> <storage /> </head-fs> <profile namespace="pegasus" key="style" >condor</profile> <profile namespace="condor" key="universe" >vanilla</profile> </site> </sitecatalog> EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites condorpool \ --staging-site local \ --dir work \ --output local \ --dax blackdiamond.dax \ --submit �����������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/glideinwms-montage/�����������������������������������0000755�0001750�0001750�00000000000�11757531667�024225� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/glideinwms-montage/pegasusrc��������������������������0000644�0001750�0001750�00000000530�11757531137�026132� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ pegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=rc pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=tc pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.condor.logs.symlink=false pegasus.data.configuration = condorio ������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/glideinwms-montage/submit�����������������������������0000755�0001750�0001750�00000006147�11757531137�025456� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e set -x TOP_DIR=`pwd` DEGREES=1.0 MONTAGE_HOME=/ccg/software/montage/Montage_v3.3_patched_2 # unique directory for this run RUN_ID=`/bin/date +'%F_%H%M%S'` RUN_DIR=$TOP_DIR/work/$RUN_ID mkdir -p $RUN_DIR echo echo "Work directory: $RUN_DIR" echo cd $RUN_DIR cp $TOP_DIR/pegasusrc . # create the transformation catalog (tc) echo echo "Creating the transformation catalog..." for BINARY in `(cd $MONTAGE_HOME/bin && ls)`; do name=$BINARY:3.3 if [ "${BINARY}" = "mFitplane" ] || [ "$BINARY" = "mDiff" ]; then name=$BINARY fi cat >>tc <<EOF tr $name { site local { pfn "$MONTAGE_HOME/bin/$BINARY" arch "x86_64" os "linux" type "STAGEABLE" profile pegasus "clusters.size" "20" profile env "MONTAGE_BIN" "." } } EOF done echo echo "Creating the site catalog..." cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86_64" os="LINUX"> <grid type="gt2" contact="localhost/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$RUN_DIR/scratch"/> <internal-mount-point mount-point="$RUN_DIR/scratch"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$RUN_DIR/outputs"/> </shared> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="MONTAGE_HOME" >$MONTAGE_HOME</profile> <profile namespace="env" key="PEGASUS_HOME" >/usr</profile> </site> <site handle="glideinwms" arch="x86_64" os="LINUX"> <head-fs> <scratch /> <storage /> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="pegasus" key="style">condor</profile> <profile namespace="condor" key="universe">vanilla</profile> <profile namespace="condor" key="requirements" >(Target.Arch == "X86_64")</profile> </site> </sitecatalog> EOF echo echo "Running mDAG (finding input images, generating DAX, ...)..." mDAG 2mass j M17 $DEGREES $DEGREES 0.0002777778 . "file://$RUN_DIR" "gsiftp://$RUN_DIR/inputs" echo echo "Adding input images to the replica catalog..." echo " " `cat cache.list | wc -l` "images found" cat cache.list | grep -v ".fits " | sed 's/ipac_cluster/local/' >rc cat url.list | sed 's/ http:.*ref=/ http:\/\/obelix.isi.edu\/irsa-cache/' >>rc echo echo "Planning and submitting the workflow..." pegasus-plan \ --conf pegasusrc \ --sites glideinwms \ --dir work \ --output local \ --nocleanup \ --dax dag.xml \ --cluster horizontal \ --submit �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/notifications/����������������������������������������0000755�0001750�0001750�00000000000�11757531667�023304� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/notifications/blackdiamond.py�������������������������0000755�0001750�0001750�00000006222�11757531137�026263� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python from Pegasus.DAX3 import * import sys import os if len(sys.argv) != 2: print "Usage: %s PEGASUS_BIN" % (sys.argv[0]) sys.exit(1) # Create a abstract dag diamond = ADAG("diamond") # dax level notifications diamond.invoke('all', os.getcwd() + "/my-notify.sh") # Add input file to the DAX-level replica catalog a = File("f.a") a.addPFN(PFN("file://" + os.getcwd() + "/f.a", "local")) diamond.addFile(a) # Add executables to the DAX-level replica catalog # In this case the binary is keg, which is shipped with Pegasus, so we use # the remote PEGASUS_HOME to build the path. e_preprocess = Executable(namespace="diamond", name="preprocess", version="4.0", os="linux", arch="x86_64", installed=True) e_preprocess.addPFN(PFN("file://" + sys.argv[1] + "/pegasus-keg", "local")) diamond.addExecutable(e_preprocess) e_findrange = Executable(namespace="diamond", name="findrange", version="4.0", os="linux", arch="x86_64", installed=True) e_findrange.addPFN(PFN("file://" + sys.argv[1] + "/pegasus-keg", "local")) diamond.addExecutable(e_findrange) e_analyze = Executable(namespace="diamond", name="analyze", version="4.0", os="linux", arch="x86_64", installed=True) e_analyze.addPFN(PFN("file://" + sys.argv[1] + "/nonexistant", "local")) diamond.addExecutable(e_analyze) # Add a preprocess job preprocess = Job(namespace="diamond", name="preprocess", version="4.0") b1 = File("f.b1") b2 = File("f.b2") preprocess.addArguments("-a preprocess","-T60","-i",a,"-o",b1,b2) preprocess.uses(a, link=Link.INPUT) preprocess.uses(b1, link=Link.OUTPUT) preprocess.uses(b2, link=Link.OUTPUT) # job level notifications - in this case for all events preprocess.invoke('all', os.getcwd() + "/my-notify.sh") diamond.addJob(preprocess) # Add left Findrange job frl = Job(namespace="diamond", name="findrange", version="4.0") c1 = File("f.c1") frl.addArguments("-a findrange","-T60","-i",b1,"-o",c1) frl.uses(b1, link=Link.INPUT) frl.uses(c1, link=Link.OUTPUT) # job level notifications - in this case for at_end events frl.invoke('at_end', os.getcwd() + "/my-notify.sh") diamond.addJob(frl) # Add right Findrange job frr = Job(namespace="diamond", name="findrange", version="4.0") c2 = File("f.c2") frr.addArguments("-a findrange","-T60","-i",b2,"-o",c2) frr.uses(b2, link=Link.INPUT) frr.uses(c2, link=Link.OUTPUT) # job level notifications - in this case for at_end events frr.invoke('at_end', os.getcwd() + "/my-notify.sh") diamond.addJob(frr) # Add Analyze job analyze = Job(namespace="diamond", name="analyze", version="4.0") d = File("f.d") analyze.addArguments("-a analyze","-T60","-i",c1,c2,"-o",d) analyze.uses(c1, link=Link.INPUT) analyze.uses(c2, link=Link.INPUT) analyze.uses(d, link=Link.OUTPUT, register=True) # job level notifications - in this case for at_end events analyze.invoke('at_end', os.getcwd() + "/my-notify.sh") diamond.addJob(analyze) # Add control-flow dependencies diamond.addDependency(Dependency(parent=preprocess, child=frl)) diamond.addDependency(Dependency(parent=preprocess, child=frr)) diamond.addDependency(Dependency(parent=frl, child=analyze)) diamond.addDependency(Dependency(parent=frr, child=analyze)) # Write the DAX to stdout diamond.writeXML(sys.stdout) ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/notifications/README.txt������������������������������0000644�0001750�0001750�00000000421�11757531137�024767� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ Note that this workflow is set up to fail! This is to show the on_error notifications. This example is using a wrapper script around the notification tools shipped with Pegasus. By using a wrapper you can add extra logic for when and how to deliver the notifications. �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/notifications/my-notify.sh����������������������������0000755�0001750�0001750�00000001212�11757531137�025562� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash # Pegasus ships with a couple of basic notification tools. Below # we show how to notify via email and gtalk. # all notifications will be sent to email # change $USER to your full email addess $PEGASUS_SHARE_DIR/notification/email -t $USER # this sends notifications about failed jobs to gtalk. # note that you can also set which events to trigger on in your DAX. # set jabberid to your gmail address, and put in yout # password # uncomment to enable #if [ "x$PEGASUS_STATUS" != "x" -a "$PEGASUS_STATUS" != "0" ]; then # $PEGASUS_SHARE_DIR/notification/jabber --jabberid FIXME@gmail.com --password FIXME --host talk.google.com #fi ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/notifications/pegasusrc�������������������������������0000644�0001750�0001750�00000000175�11757531137�025216� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/notifications/submit����������������������������������0000755�0001750�0001750�00000002731�11757531137�024530� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e TOPDIR=`pwd` # pegasus bin directory is needed to find keg BIN_DIR=`pegasus-config --bin` # generate the input file echo "This is sample input to KEG" >f.a # generate the dax export PYTHONPATH=`pegasus-config --python` ./blackdiamond.py $BIN_DIR >blackdiamond.dax # create the site catalog cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86_64" os="LINUX"> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/work"/> <internal-mount-point mount-point="$TOPDIR/work"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs"/> </shared> </storage> </head-fs> <profile namespace="env" key="PEGASUS_BIN" >$BIN_DIR</profile> </site> </sitecatalog> EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites local \ --dir work \ --output local \ --dax blackdiamond.dax \ --submit ���������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/recursive/��������������������������������������������0000755�0001750�0001750�00000000000�11757531667�022442� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/recursive/recursive.dax�������������������������������0000644�0001750�0001750�00000003160�11757531137�025137� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������<?xml version="1.0" encoding="UTF-8"?> <!-- generated: 2007-10-25T17:11:15-07:00 --> <!-- generated by: vahi [??] --> <adag xmlns="http://pegasus.isi.edu/schema/DAX" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/DAX http://pegasus.isi.edu/schema/dax-2.1.xsd" version="2.1" count="1" index="0" name="daxfile" jobCount="4" fileCount="0" childCount="3"> <!-- part 1: list of all referenced files (may be empty) --> <!-- part 2: definition of all jobs (at least one) --> <job id="ID0000001" namespace="pegasus" name="pegasus-plan" version="2.0"> <profile namespace="pegasus" key="type">recursive</profile> <argument>-Dpegasus.user.properties=/lfs1/work/conf/properties --dax /lfs1/work/dax1 -s sdsc -o local --nocleanup --force --rescue 1 --cluster horizontal -vvvvv --dir ./dag_1 </argument> </job> <job id="ID0000002" namespace="pegasus" name="pegasus-plan" version="2.0"> <profile namespace="pegasus" key="type">recursive</profile> <argument>-Dpegasus.user.properties=/lfs1/work/conf/properties --dax /lfs1/work/dax2 -s ncsa -o local --nocleanup --force --rescue 1 --cluster horizontal -vvvvv --dir ./dag_2 </argument> </job> <job id="ID0000003" namespace="pegasus" name="pegasus-plan" version="2.0"> <profile namespace="pegasus" key="type">recursive</profile> <argument>-Dpegasus.user.properties=/lfs1/work/conf/properties --dax /lfs1/work/dax3 -s tacc -o local --nocleanup --force --rescue 1 --cluster horizontal -vvvvv --dir ./dag_3 </argument> </job> <!-- part 3: list of control-flow dependencies (may be empty) --> </adag> ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/galactic-plane/���������������������������������������0000755�0001750�0001750�00000000000�11757531667�023277� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/galactic-plane/example.config�������������������������0000644�0001750�0001750�00000001626�11757531137�026116� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ [main] mode: full notification_email: rynge@isi.edu [tiles] survey: 2mass band: j min_lon: -5.0 max_lon: 5.0 min_lat: -5.0 max_lat: 5.0 size: 5.0 overlap: 1.0 [local] work_dir: /scratch/rynge/galactic-plane/clemson montage_location: /home/rynge/software/Montage_v3.3_mats [cluster] name: Clemson gatekeeper: osg-gw.clemson.edu storage_proto: gsifto storage_url: gsiftp://osg-gw.clemson.edu storage_mount: /common1/osg/data/engage/tmp/rynge scheduler: condor work_dir: /common1/osg/data/engage/tmp/rynge pegasus_home: /common1/osg/app/engage/rynge/pegasus/3.1.0cvs globus_location: /common1/osg/grid/globus montage_location: /common1/osg/app/engage/rynge/montage/3.3_mats [output] name: CCG gatekeeper: obelix.isi.edu gridftp_server: obelix.isi.edu output_dir: /nfs/ccg3/projects/ipac/galactic-plane/outputs ����������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/galactic-plane/local-tile-setup�����������������������0000755�0001750�0001750�00000000265�11757531137�026403� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e TILE_ID=$1 . $TILE_ID.params START_DIR=`pwd` echo "Job started in $START_DIR" mkdir -p $TILE_WORK_DIR cd $TILE_WORK_DIR tar xzf $START_DIR/$TILE_ID.tar.gz �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/galactic-plane/remote-extra-cleanup�������������������0000755�0001750�0001750�00000000160�11757531137�027253� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e TILE_ID=$1 . $TILE_ID.params START_DIR=`pwd` rm -rf tile-setup/$TILE_ID rm -rf $TILE_ID ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/galactic-plane/remote-tile-setup����������������������0000755�0001750�0001750�00000002113�11757531137�026576� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e MODE=$1 TILE_ID=$2 . $TILE_ID.params WORK_DIR=`pwd`/tile-setup/$TILE_ID rm -rf $WORK_DIR mkdir -p $WORK_DIR cd $WORK_DIR # generate dag for the tile $MONTAGE_HOME/bin/mDAGGalacticPlane $SURVEY $BAND $CENTER_LON $CENTER_LAT $TILE_SIZE $TILE_SIZE 0.0002777778 . "gsiftp://$WF_MANAGER_HOST$TILE_WORK_DIR" "gsiftp://$WF_MANAGER_HOST$TILE_WORK_DIR/inputs" echo echo "Number of images for this tile: "`cat images.tbl | grep http | wc -l` echo # add the inputs to the rc echo cd $WORK_DIR cat cache.list | grep -v ".fits " >rc.data cat url.list | sed 's/ http:.*ref=/ http:\/\/obelix.isi.edu\/irsa-cache/' >>rc.data if [ "$MODE" = "prefetch" ]; then echo "Prefteching data..." cat rc.data | grep obelix | sed 's/.*http:/http:/' | sed 's/fits\.gz.*/fits.gz/' >prefetch.list cat prefetch.list | while read URL; do echo " ... $URL" wget -q -O /dev/null $URL done fi # prepare tarball that we can give to the local setup script tar czf ../../$TILE_ID.tar.gz *.hdr *.list *.data *.tbl *.xml cd ../../ echo "Tarball created." pwd ls -l $TILE_ID.tar.gz �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/galactic-plane/galactic-plane�������������������������0000755�0001750�0001750�00000046157�11757531137�026076� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python """ Creates a uber workflow over Montage to generate tiles for the galactic plane Usage: galactic-plane [options] """ ## # Copyright 2007-2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## import os import re import sys import errno import logging import optparse import tempfile import subprocess import math import time import socket import string import ConfigParser # set PEGASUS_HOME - transition from Pegasus 2.4 pegasus_home = None paths = string.split(os.environ["PATH"], ":") for path in paths: if os.path.isfile(os.path.join(path, "pegasus-plan")): pegasus_home = os.path.normpath(os.path.join(path, "..")) os.environ["PEGASUS_HOME"] = pegasus_home break if pegasus_home == None: raise RuntimeError("pegasus-plan not found in the PATH") sys.path.append(os.getenv("PEGASUS_HOME") + "/lib/python") from Pegasus.DAX3 import * __author__ = "Mats Rynge <rynge@isi.edu>" # --- settings ------------------------------------------------------------------------ notification_email = None monitord_output = None config = ConfigParser.ConfigParser() config.read(sys.argv[1]) mode = config.get('main', 'mode') if config.has_option('main', 'notification_email'): notification_email = config.get('main', 'notification_email') if config.has_option('main', 'monitord_output'): monitord_output = config.get('main', 'monitord_output') survey = config.get('tiles', 'survey') band = config.get('tiles', 'band') min_lon = config.getfloat('tiles', 'min_lon') max_lon = config.getfloat('tiles', 'max_lon') min_lat = config.getfloat('tiles', 'min_lat') max_lat = config.getfloat('tiles', 'max_lat') tile_size = config.getfloat('tiles', 'size') tile_overlap = config.getfloat('tiles', 'overlap') local_work_dir = config.get('local', 'work_dir') local_montage_location = config.get('local', 'montage_location') cluster_name = config.get('cluster', 'name') cluster_gatekeeper = config.get('cluster', 'gatekeeper') cluster_storage_proto = config.get('cluster', 'storage_proto') cluster_storage_url = config.get('cluster', 'storage_url') cluster_storage_mount = config.get('cluster', 'storage_mount') cluster_scheduler = config.get('cluster', 'scheduler') cluster_work_dir = config.get('cluster', 'work_dir') cluster_pegasus_home = config.get('cluster', 'pegasus_home') cluster_globus_location = config.get('cluster', 'globus_location') cluster_montage_location = config.get('cluster', 'montage_location') output_name = config.get('output', 'name') output_gatekeeper = config.get('output', 'gatekeeper') output_gridftp_server = config.get('output', 'gridftp_server') output_dir = config.get('output', 'output_dir') # --- classes ------------------------------------------------------------------------- class Tile: center_lon = 0.0 center_lat = 0.0 size = 1.0 def __init__(self, center_lon, center_lat, size): self.center_lon = center_lon self.center_lat = center_lat self.size = size # --- global variables ---------------------------------------------------------------- local_galacticplane_location = os.path.dirname(os.path.realpath( __file__ )) local_hostname = socket.getfqdn() run_id = "" work_dir = "" gp_files = [] gp_jobs = [] gp_relations = [] # --- functions ----------------------------------------------------------------------- def myexec(cmd_line): sys.stdout.flush() p = subprocess.Popen(cmd_line + " 2>&1", shell=True) stdoutdata, stderrdata = p.communicate() r = p.returncode if r != 0: raise RuntimeError("Command '%s' failed with error code %s" % (cmd_line, r)) def create_work_dir(): global run_id global work_dir lt = time.localtime(time.time()) run_id = "galactic-plane-%04d%02d%02d-%02d%02d%02d" % (lt[0], lt[1], lt[2], lt[3], lt[4], lt[5]) work_dir = "%s/%s" % (local_work_dir, run_id) print "Work dir is: " + work_dir os.makedirs(work_dir) def add_tile(mode, uberdax, tile_id, lon, lat): tile_work_dir = "%s/tiles/%s" % (work_dir, tile_id) # parameters file pf = open("%s/%s.params" % (work_dir, tile_id), 'w') pf.write("export TILE_ID=\"%s\"\n" % (tile_id)) pf.write("export CLUSTER_NAME=\"%s\"\n" % (cluster_name)) pf.write("export WF_MANAGER_HOST=\"%s\"\n" % (local_hostname)) pf.write("export TILE_WORK_DIR=\"%s\"\n" % (tile_work_dir)) pf.write("export SURVEY=\"%s\"\n" % (survey)) pf.write("export BAND=\"%s\"\n" % (band)) pf.write("export CENTER_LON=\"%f\"\n" % (lon)) pf.write("export CENTER_LAT=\"%f\"\n" % (lat)) pf.write("export TILE_SIZE=\"%f\"\n" % (tile_size)) pf.close() # params input file params = File("%s.params" % (tile_id)) params.addPFN(PFN("gsiftp://%s%s/%s.params" % (local_hostname, work_dir, tile_id), "local")) uberdax.addFile(params) mdagtar = File("%s.tar.gz" % (tile_id)) remote_tile_setup = Job(namespace="gp", name="remote_tile_setup", version="1.0", id="rts-%s"%(tile_id)) remote_tile_setup.addArguments(mode) remote_tile_setup.addArguments(tile_id) remote_tile_setup.addProfile(Profile("dagman", "CATEGORY", "remote_tile_setup")) remote_tile_setup.uses(params, link=Link.INPUT, register=False) remote_tile_setup.uses(mdagtar, link=Link.OUTPUT, register=False, transfer=True) # email notifications if notification_email != None: remote_tile_setup.invoke('on_error', pegasus_home + "/libexec/notification/email -t " + notification_email) uberdax.addJob(remote_tile_setup) if mode == "prefetch": return local_tile_setup = Job(namespace="gp", name="local_tile_setup", version="1.0", id="lts-%s"%(tile_id)) local_tile_setup.addArguments(tile_id) local_tile_setup.addProfile(Profile("hints", "executionPool", "local")) local_tile_setup.uses(params, link=Link.INPUT, register=False) local_tile_setup.uses(mdagtar, link=Link.INPUT, register=False) uberdax.addJob(local_tile_setup) uberdax.depends(parent=remote_tile_setup, child=local_tile_setup) # dax file subdax_file = File("%s.dax" % (tile_id)) subdax_file.addPFN(PFN("file://%s/dag.xml" % (tile_work_dir), "local")) uberdax.addFile(subdax_file) subwf = DAX("%s.dax" % (tile_id), id="sub-%s" % (tile_id)) subwf.addArguments("-Dpegasus.catalog.replica.file=%s/rc.data" % (tile_work_dir), "-Dpegasus.catalog.site.file=%s/sites.xml" % (work_dir), "-Dpegasus.transfer.links=true", "--cluster", "horizontal", "--sites", cluster_name, "--basename", tile_id, "--force", "--force-replan", "--output", output_name) subwf.addProfile(Profile("dagman", "CATEGORY", "subworkflow")) subwf.uses(subdax_file, link=Link.INPUT, register=False) # email notifications if notification_email != None: subwf.invoke('at_end', pegasus_home + "/libexec/notification/email -t " + notification_email) uberdax.addDAX(subwf) uberdax.depends(parent=local_tile_setup, child=subwf) remote_extra_cleanup = Job(namespace="gp", name="remote_extra_cleanup", version="1.0", id="rec-%s"%(tile_id)) remote_extra_cleanup.addArguments(tile_id) remote_extra_cleanup.uses(params, link=Link.INPUT, register=False) uberdax.addJob(remote_extra_cleanup) uberdax.depends(parent=subwf, child=remote_extra_cleanup) def generate_pegasus_rc(mode): rc = open(work_dir + "/pegasusrc", "w") rc.write("pegasus.catalog.replica=SimpleFile\n") rc.write("pegasus.catalog.replica.file=%s/rc.data\n" % (work_dir)) rc.write("pegasus.catalog.site=XML3\n") rc.write("pegasus.catalog.site.file=%s/sites.xml\n" % (work_dir)) rc.write("pegasus.catalog.transformation=File\n") rc.write("pegasus.catalog.transformation.file=%s/tc.data\n" % (work_dir)) rc.write("pegasus.clusterer.job.aggregator.seqexec.firstjobfail=true\n") rc.write("pegasus.file.cleanup.scope=deferred\n") rc.write("pegasus.dir.useTimestamp=true\n") rc.write("pegasus.dir.storage.deep=false\n") rc.write("pegasus.condor.logs.symlink=false\n") rc.write("pegasus.stagein.clusters=10\n") rc.write("pegasus.stageout.clusters=1000\n") rc.write("pegasus.transfer.stagein.remote.sites=%s\n" % (cluster_name)) rc.write("condor.periodic_release=2\n") rc.write("condor.periodic_remove=2\n") rc.write("dagman.maxpre=5\n") rc.write("dagman.retry=2\n") if mode == "prefetch": rc.write("dagman.remote_tile_setup.maxjobs=25\n") else: rc.write("dagman.remote_tile_setup.maxjobs=15\n") rc.write("dagman.subworkflow.maxjobs=35\n") if monitord_output != None: rc.write("pegasus.monitord.output=%s\n" % (monitord_output)) rc.close() def generate_sc(): sc = open(work_dir + "/sites.xml", 'w') sc.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n") sc.write("<sitecatalog xmlns=\"http://pegasus.isi.edu/schema/sitecatalog\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd\" version=\"3.0\">\n") sc.write(" <site handle=\"local\" arch=\"x86\" os=\"LINUX\">\n") sc.write(" <grid type=\"gt2\" contact=\"localhost/jobmanager-fork\" scheduler=\"Fork\" jobtype=\"auxillary\"/>\n") sc.write(" <grid type=\"gt2\" contact=\"localhost/jobmanager-condor\" scheduler=\"Condor\" jobtype=\"compute\"/>\n") sc.write(" <head-fs>\n") sc.write(" <scratch>\n") sc.write(" <shared>\n") sc.write(" <file-server protocol=\"gsiftp\" url=\"gsiftp://%s\" mount-point=\"%s/scratch\"/>\n" % (local_hostname, work_dir)) sc.write(" <internal-mount-point mount-point=\"%s/scratch\" free-size=\"100G\" total-size=\"30G\"/>\n" % (work_dir)) sc.write(" </shared>\n") sc.write(" </scratch>\n") sc.write(" <storage>\n") sc.write(" <shared>\n") sc.write(" <file-server protocol=\"gsiftp\" url=\"gsiftp://%s\" mount-point=\"%s/storage\"/>\n" % (local_hostname, work_dir)) sc.write(" <internal-mount-point mount-point=\"%s/storage\" free-size=\"100G\" total-size=\"30G\"/>\n" % (work_dir)) sc.write(" </shared>\n") sc.write(" </storage>\n") sc.write(" </head-fs>\n") sc.write(" <replica-catalog type=\"LRC\" url=\"rlsn://dummyValue.url.edu\" />\n") sc.write(" <profile namespace=\"env\" key=\"PEGASUS_HOME\" >%s</profile>\n" % (os.environ["PEGASUS_HOME"])) sc.write(" <profile namespace=\"env\" key=\"GLOBUS_LOCATION\" >%s</profile>\n" % (os.environ["GLOBUS_LOCATION"])) sc.write(" <profile namespace=\"env\" key=\"PATH\" >%s:%s/bin:%s</profile>\n" %(local_galacticplane_location, local_montage_location, os.environ["PATH"])) sc.write(" </site>\n") sc.write(" <site handle=\"%s\" arch=\"x86\" os=\"LINUX\">\n" %(cluster_name)) sc.write(" <grid type=\"gt2\" contact=\"%s/jobmanager-fork\" scheduler=\"Fork\" jobtype=\"auxillary\"/>\n" % (cluster_gatekeeper)) sc.write(" <grid type=\"gt2\" contact=\"%s/jobmanager-%s\" scheduler=\"unknown\" jobtype=\"compute\"/>\n" % (cluster_gatekeeper, cluster_scheduler)) sc.write(" <head-fs>\n") sc.write(" <scratch>\n") sc.write(" <shared>\n") sc.write(" <file-server protocol=\"%s\" url=\"%s\" mount-point=\"%s\"/>\n" % (cluster_storage_proto, cluster_storage_url, cluster_storage_mount)) sc.write(" <internal-mount-point mount-point=\"%s\"/>\n" % (cluster_work_dir)) sc.write(" </shared>\n") sc.write(" </scratch>\n") sc.write(" <storage>\n") sc.write(" <shared>\n") sc.write(" <file-server protocol=\"%s\" url=\"%s\" mount-point=\"%s\"/>\n" % (cluster_storage_proto, cluster_storage_url, cluster_storage_mount)) sc.write(" <internal-mount-point mount-point=\"%s\"/>\n" % (cluster_work_dir)) sc.write(" </shared>\n") sc.write(" </storage>\n") sc.write(" </head-fs>\n") sc.write(" <replica-catalog type=\"LRC\" url=\"rlsn://dummyValue.url.edu\" />\n") sc.write(" <profile namespace=\"pegasus\" key=\"style\">glideinwms</profile>\n") sc.write(" <profile namespace=\"condor\" key=\"+DESIRED_Sites\">"%s"</profile>\n" % (cluster_name)) sc.write(" <profile namespace=\"condor\" key=\"x509userproxy\">/tmp/x509up_u1031</profile>\n") sc.write(" <profile namespace=\"env\" key=\"PEGASUS_HOME\" >%s</profile>\n" % (cluster_pegasus_home)) sc.write(" <profile namespace=\"env\" key=\"GLOBUS_LOCATION\" >%s</profile>\n" % (cluster_globus_location)) sc.write(" <profile namespace=\"env\" key=\"MONTAGE_HOME\" >%s</profile>\n" %(cluster_montage_location)) sc.write(" </site>\n") sc.write(" <site handle=\"%s\" arch=\"x86\" os=\"LINUX\">\n" %(output_name)) sc.write(" <grid type=\"gt2\" contact=\"%s/jobmanager-fork\" scheduler=\"Fork\" jobtype=\"auxillary\"/>\n" % (output_gatekeeper)) sc.write(" <grid type=\"gt2\" contact=\"%s/jobmanager-fork\" scheduler=\"unknown\" jobtype=\"compute\"/>\n" % (output_gatekeeper)) sc.write(" <head-fs>\n") sc.write(" <scratch>\n") sc.write(" <shared>\n") sc.write(" <file-server protocol=\"gsiftp\" url=\"gsiftp://%s\" mount-point=\"%s/%s\"/>\n" % (output_gridftp_server, output_dir, run_id)) sc.write(" <internal-mount-point mount-point=\"%s/%s\"/>\n" % (output_dir, run_id)) sc.write(" </shared>\n") sc.write(" </scratch>\n") sc.write(" <storage>\n") sc.write(" <shared>\n") sc.write(" <file-server protocol=\"gsiftp\" url=\"gsiftp://%s\" mount-point=\"%s/%s\"/>\n" % (output_gridftp_server, output_dir, run_id)) sc.write(" <internal-mount-point mount-point=\"%s/%s\"/>\n" % (output_dir, run_id)) sc.write(" </shared>\n") sc.write(" </storage>\n") sc.write(" </head-fs>\n") sc.write(" <replica-catalog type=\"LRC\" url=\"rlsn://dummyValue.url.edu\" />\n") sc.write(" </site>\n") sc.write("</sitecatalog>\n") sc.close() def generate_tc(): # tc needs to be in old format to work with montage tc = open(work_dir + "/tc.data", 'w') tc.write("%s gp::remote_tile_setup:1.0 gsiftp://%s%s/remote-tile-setup STATIC_BINARY INTEL32::LINUX condor::priority=100\n" % (cluster_name, local_hostname, local_galacticplane_location)) tc.write("%s gp::remote_extra_cleanup:1.0 gsiftp://%s%s/remote-extra-cleanup STATIC_BINARY INTEL32::LINUX condor::priority=1000\n" % (cluster_name, local_hostname, local_galacticplane_location)) tc.write("local gp::local_tile_setup:1.0 %s/local-tile-setup INSTALLED INTEL32::LINUX\n" % (local_galacticplane_location)) # for the sub workflows tc.write("%s pegasus::dirmanager %s/bin/pegasus-dirmanager INSTALLED INTEL32::LINUX\n" % (cluster_name, cluster_pegasus_home)) tc.write("%s pegasus::kickstart %s/bin/kickstart INSTALLED INTEL32::LINUX\n" % (cluster_name, cluster_pegasus_home)) tc.write("%s pegasus::seqexec %s/bin/seqexec INSTALLED INTEL32::LINUX\n" % (cluster_name, cluster_pegasus_home)) tc.write("%s pegasus::cleanup %s/bin/pegasus-cleanup INSTALLED INTEL32::LINUX\n" % (cluster_name, cluster_pegasus_home)) tc.write("%s pegasus::pegasus-transfer %s/bin/pegasus-transfer INSTALLED INTEL32::LINUX\n" % (cluster_name, cluster_pegasus_home)) # list all montage binaries for binary in os.listdir(local_montage_location + "/bin/"): cluster = 100 if binary == "mProject": cluster = 50 tc.write("%s %s:3.3 %s/bin/%s INSTALLED INTEL32::LINUX PEGASUS::clusters.size=%d\n" % (cluster_name, binary, cluster_montage_location, binary, cluster)) tc.close() def main(): create_work_dir() # find the center, and use that as a starting point for our calculations # this is so that we tiles will overshoot equally much on each boundry clon = (max_lon + min_lon) / 2.0 clat = (max_lat + min_lat) / 2.0 print "Center of the tiled area is: %f, %f" % (clon, clat) # spacing between tiles spacing = (float)(tile_size - tile_overlap) print "Spacing between the tiles will be %f" % (spacing) # tiles needed tiles_hori = int(math.ceil((max_lon - min_lon) / spacing)) tiles_vert = int(math.ceil((max_lat - min_lat) / spacing)) print "%d tiles needed horizontally" %(tiles_hori) print "%d tiles needed vertically" %(tiles_vert) print "Total number of tiles: %d" % (tiles_vert * tiles_hori) # uber dax uberdax = ADAG("gp") # start from top left, and move down in rows start_lon = clon - spacing * (tiles_vert / 2.0) + (spacing / 2) start_lat = clat + spacing * (tiles_vert / 2.0) - (spacing / 2) tile_id = 0 for ny in range(0, tiles_vert): for nx in range(0, tiles_hori): lon = start_lon + (nx * spacing) lat = start_lat - (ny * spacing) tile_id = "tile_%+06.0f_%+06.0f" % (lat * 100, lon * 100) tile_id = tile_id.replace("+", "_") print " adding tile " + tile_id add_tile(mode, uberdax, tile_id, lon, lat) generate_pegasus_rc(mode) generate_sc() generate_tc() daxfile = open(work_dir + "/gp.dax", "w") uberdax.writeXML(daxfile) daxfile.close() print "Planning and submitting the uberdax..." os.chdir(work_dir) os.environ["JAVA_HEAPMAX"] = "512" cmd = "pegasus-plan --conf pegasusrc --relative-dir " + run_id + " --sites " + cluster_name + " --dir . --output local --dax gp.dax --nocleanup --submit 2>&1 | tee pegasus-plan.out" myexec(cmd) # --- main ---------------------------------------------------------------------------- main() �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/README.txt��������������������������������������������0000644�0001750�0001750�00000000251�11757531137�022117� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ Descriptions of these workflows can be found on in the Pegasus documentation under the chapter named "Example Workflows". Please see: http://pegasus.isi.edu/wms/ �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-rosetta/�����������������������������������������0000755�0001750�0001750�00000000000�11757531667�023037� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-rosetta/design.resfile���������������������������0000644�0001750�0001750�00000001337�11757531137�025657� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#karyopherin flex bb design protocol karyopherin_design_topsail_peptext.cc gen 2 resfile ALLAAxc EX 1 EX 2 USE_INPUT_SC start #fix aa for peptide, fix rotamer for conserved aa at the Cterm of peptide (KRXKLX) 1 A PIKAA D 2 A PIKAA E 3 A PIKAA I 4 A PIKAA M 5 A PIKAA K 6 A PIKAA E 7 A PIKAA I 8 A PIKAA E 9 A PIKAA R 10 A PIKAA E 11 A PIKAA S # 12 A PIKAA K 12 A NATRO # 13 A PIKAA R 13 A NATRO 14 A PIKAA I # 15 A PIKAA K 15 A NATRO # 16 A PIKAA L 16 A NATRO 17 A PIKAA N #fix rotamer for the residues on the protein that is interacting with the conserved residues on the peptide 84 C NATRO 88 C NATRO 97 C NATRO 123 C NATRO 126 C NATRO 130 C NATRO 134 C NATRO 168 C NATRO 207 C NATRO �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-rosetta/repack.resfile���������������������������0000644�0001750�0001750�00000001245�11757531137�025651� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#karyopherin flex bb design protocol karyopherin_design_topsail_peptext.cc gen 2 REPACK resfile NATAA EX 1 EX 2 USE_INPUT_SC start #fix aa for peptide, fix rotamer for conserved aa at the Cterm of peptide (KRXKLX) 1 A PIKAA D 2 A PIKAA E 3 A PIKAA I 4 A PIKAA M 5 A PIKAA K 6 A PIKAA E 7 A PIKAA I 8 A PIKAA E 9 A PIKAA R 10 A PIKAA E 11 A PIKAA S 12 A NATRO 13 A NATRO 14 A PIKAA I 15 A NATRO 16 A NATRO 17 A PIKAA N #fix rotamer for the residues on the protein that is interacting with the conserved residues on the peptide 84 C NATRO 88 C NATRO 97 C NATRO 123 C NATRO 126 C NATRO 130 C NATRO 134 C NATRO 168 C NATRO 207 C NATRO �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-rosetta/RosettaDAX.java��������������������������0000644�0001750�0001750�00000013756�11757531137�025664� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.util.ArrayList; import java.util.List; import edu.isi.pegasus.planner.dax.*; public class RosettaDAX { public void constructDAX(String daxfile){ try{ java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); // construct a dax object ADAG dax = new ADAG("rosetta"); // executables and transformations // including this in the dax is a new feature in // 3.0. Earlier you had a standalone transformation catalog Executable exe = new Executable("rosetta.exe"); // the executable is not installed on the remote sites, so // pick it up from the local file system exe.setInstalled(false); exe.addPhysicalFile("file://" + cwd + "/rosetta.exe", "local"); // cluster the jobs together to lessen the grid overhead exe.addProfile("pegasus", "clusters.size", "3"); // the dag needs to know about the executable to handle // transferrring dax.addExecutable(exe); // all jobs depend on the flatfile databases List<File> inputs = new ArrayList<File>(); recursiveAddToFileCollection(inputs, "minirosetta_database", "Rosetta Database"); dax.addFiles(inputs); // for replica catalog // and some top level files File f1 = new File("design.resfile", File.LINK.INPUT); f1.addPhysicalFile("file://" + cwd + "/design.resfile", "local"); dax.addFile(f1); inputs.add(f1); // dependency for the job File f2 = new File("repack.resfile", File.LINK.INPUT); f2.addPhysicalFile("file://" + cwd + "/repack.resfile", "local"); dax.addFile(f2); inputs.add(f2); // dependency for the job java.io.File pdbDir = new java.io.File("pdbs/"); String pdbs[] = pdbDir.list(); for (int i = 0; i < pdbs.length; i++) { java.io.File pdb = new java.io.File("pdbs/" + pdbs[i]); if (pdb.isFile()) { Job j = createJobFromPDB(dax, pdb, inputs); dax.addJob(j); } } //write DAX to file dax.writeToFile(daxfile); } catch (Exception e) { e.printStackTrace(); } } /* * This adds all the files in a directory to a set which can be used for job * data dependencies */ private void recursiveAddToFileCollection(List<File> list, String dir, String desc) { try { java.io.File d = new java.io.File(dir); String items[] = d.list(); for (int i = 0; i < items.length; i++) { if (items[i].substring(0,1).equals(".")) { continue; } java.io.File f = new java.io.File(dir + "/" + items[i]); if (f.isFile()) { // File found, let's add it to the list File input = new File(dir + "/" + items[i], File.LINK.INPUT); input.addPhysicalFile("file://" + f.getAbsolutePath(), "local"); list.add(input); } else { recursiveAddToFileCollection(list, f.getPath(), desc); } } } catch (Exception e) { e.printStackTrace(); } } private Job createJobFromPDB(ADAG dax, java.io.File pdb, List<File> inputs) { Job job = null; try { String id = pdb.getName(); id = id.replaceAll(".pdb", ""); job = new Job(id, "rosetta.exe"); // general rosetta inputs (database, design, ...) job.uses(inputs, File.LINK.INPUT); // input pdb file File pdbFile = new File(pdb.getName()); pdbFile.addPhysicalFile("file://" + pdb.getAbsolutePath(), "local"); job.uses(pdbFile, File.LINK.INPUT); // the job uses the file dax.addFile(pdbFile); // the dax needs to know about it to handle transfers // outputs File outFile = new File(pdb.getName() + ".score.sc"); job.uses(outFile, File.LINK.OUTPUT); // the job uses the file // add the arguments to the job job.addArgument(" -in:file:s "); job.addArgument(pdbFile); job.addArgument(" -out:prefix " + pdb.getName() + "."); job.addArgument(" -database ./minirosetta_database"); job.addArgument(" -linmem_ig 10"); job.addArgument(" -nstruct 1"); job.addArgument(" -pert_num 2"); job.addArgument(" -inner_num 1"); job.addArgument(" -jd2::ntrials 1"); } catch (Exception e) { e.printStackTrace(); return null; } return job; } /** * Usage : RosettaDAX daxfile * * @param args the arguments passed */ public static void main(String[] args) { RosettaDAX daxgen = new RosettaDAX(); if (args.length == 1) { daxgen.constructDAX(args[0]); } else { System.out.println("Usage: RosettaDAX <outputdaxfile>"); } } } ������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-rosetta/pegasusrc��������������������������������0000644�0001750�0001750�00000000302�11757531137�024741� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.transfer.worker.package = true pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false pegasus.stagein.clusters=8 ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-rosetta/submit�����������������������������������0000755�0001750�0001750�00000010225�11757531137�024260� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e ####################################################################### # # Settings # CLUSTER_NAME="CCG" CLUSTER_HOSTNAME="obelix.isi.edu" CLUSTER_GATEKEEPER_PORT="2119" CLUSTER_GRID_TYPE="gt5" CLUSTER_SCHEDULER="condor" CLUSTER_WORK_DIR="/nfs/ccg1/90-day-scratch" CLUSTER_GLOBUS_LOCATION="/ccg/software/globus/default" ####################################################################### TOPDIR=`pwd` # figure out where Pegasus is installed export PEGASUS_BIN_DIR=`pegasus-config --bin` if [ "x$PEGASUS_BIN_DIR" = "x" ]; then echo "Please make sure pegasus-plan is in your path" exit 1 fi # download rosetta binary - this is to save space in the Pegasus distribution if [ ! -e rosetta.exe ]; then wget http://pegasus.isi.edu/wms/example-workflows/rosetta/rosetta.exe chmod 755 rosetta.exe fi # do we have the required minirosetta_database? if [ ! -e minirosetta_database ]; then wget http://pegasus.isi.edu/wms/example-workflows/rosetta/minirosetta_database.tar.gz tar xzf minirosetta_database.tar.gz rm minirosetta_database.tar.gz fi # what about the required pdbs? if [ ! -e pdbs ]; then wget http://pegasus.isi.edu/wms/example-workflows/rosetta/pdbs.tar.gz tar xzf pdbs.tar.gz rm pdbs.tar.gz fi # build the dax generator export CLASSPATH=.:`pegasus-config --classpath` javac RosettaDAX.java # generate the dax java RosettaDAX dax.xml # site catalog cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86" os="LINUX"> <grid type="gt2" contact="localhost/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs" free-size="100G" total-size="30G"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs" free-size="100G" total-size="30G"/> </shared> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="PEGASUS_BIN_DIR" >$PEGASUS_BIN_DIR</profile> <profile namespace="env" key="GLOBUS_LOCATION" >$GLOBUS_LOCATION</profile> <profile namespace="env" key="LD_LIBRARY_PATH" >$GLOBUS_LOCATION/lib</profile> </site> <site handle="$CLUSTER_NAME" arch="x86" os="LINUX"> <grid type="$CLUSTER_GRID_TYPE" contact="$CLUSTER_HOSTNAME:$CLUSTER_GATEKEEPER_PORT/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <grid type="$CLUSTER_GRID_TYPE" contact="$CLUSTER_HOSTNAME:$CLUSTER_GATEKEEPER_PORT/jobmanager-$CLUSTER_SCHEDULER" scheduler="unknown" jobtype="compute"/> <head-fs> <scratch> <shared> <file-server protocol="gsiftp" url="gsiftp://$CLUSTER_HOSTNAME" mount-point="$CLUSTER_WORK_DIR"/> <internal-mount-point mount-point="$CLUSTER_WORK_DIR"/> </shared> </scratch> <storage> <shared> <file-server protocol="gsiftp" url="gsiftp://$CLUSTER_HOSTNAME" mount-point="$CLUSTER_WORK_DIR"/> <internal-mount-point mount-point="$CLUSTER_WORK_DIR"/> </shared> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="GLOBUS_LOCATION" >$CLUSTER_GLOBUS_LOCATION</profile> </site> </sitecatalog> EOF echo echo echo "Planning and submitting the workflow..." pegasus-plan \ --conf pegasusrc \ --sites $CLUSTER_NAME \ --cluster horizontal \ --dir work \ --output local \ --dax dax.xml \ --submit ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-perl/�������������������������������0000755�0001750�0001750�00000000000�11757531667�024726� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-blackdiamond-perl/blackdiamond.pl����������������0000644�0001750�0001750�00000007222�11757531137�027666� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env perl # use 5.006; use strict; use IO::Handle; use Cwd; use File::Spec; use File::Basename; use Sys::Hostname; use POSIX (); BEGIN { eval `pegasus-config --perl-hash`; die "Unable to eval pegasus-config: $@" if $@; } use Pegasus::DAX::Factory qw(:all); use constant NS => 'diamond'; my $adag = newADAG( name => NS ); my $job1 = newJob( namespace => NS, name => 'preprocess', version => '2.0' ); my $job2 = newJob( namespace => NS, name => 'findrange', version => '2.0' ); my $job3 = newJob( namespace => NS, name => 'findrange', version => '2.0' ); my $job4 = newJob( namespace => NS, name => 'analyze', version => '2.0' ); # create "f.a" locally my $fn = "f.a"; open( F, ">$fn" ) || die "FATAL: Unable to open $fn: $!\n"; my @now = gmtime(); printf F "%04u-%02u-%02u %02u:%02u:%02uZ\n", $now[5]+1900, $now[4]+1, @now[3,2,1,0]; close F; my $file = newFile( name => 'f.a' ); $file->addPFN( newPFN( url => 'file://' . Cwd::abs_path($fn), site => 'local' ) ); $adag->addFile($file); # follow this path, if we know how to find 'pegasus-keg' my $keg = File::Spec->catfile( $pegasus{bin}, 'pegasus-keg' ); if ( -x $keg ) { my @os = POSIX::uname(); # $os[2] =~ s/^(\d+(\.\d+(\.\d+)?)?).*/$1/; ## create a proper osversion $os[4] =~ s/i.86/x86/; # add Executable instances to DAX-included TC. This will only work, # if we know how to access the keg executable. HOWEVER, for a grid # workflow, these entries are not used, and you need to # [1] install the work tools remotely # [2] create a TC with the proper entries if ( -x $keg ) { for my $j ( $job1, $job2, $job4 ) { my $app = newExecutable( namespace => $j->namespace, name => $j->name, version => $j->version, installed => 'false', arch => $os[4], os => lc($^O) ); $app->addProfile( 'globus', 'maxtime', '2' ); $app->addProfile( 'dagman', 'RETRY', '3' ); $app->addPFN( newPFN( url => "file://$keg", site => 'local' ) ); $adag->addExecutable($app); } } } else { die "Hmmm, where is pegasus-keg? I thought it was \"$keg\", giving up for now.\n"; } my %hash = ( link => LINK_OUT, register => 'false', transfer => 'true' ); my $fna = newFilename( name => $file->name, link => LINK_IN ); my $fnb1 = newFilename( name => 'f.b1', %hash ); my $fnb2 = newFilename( name => 'f.b2', %hash ); $job1->addArgument( '-a', $job1->name, '-T60', '-i', $fna, '-o', $fnb1, $fnb2 ); $adag->addJob($job1); my $fnc1 = newFilename( name => 'f.c1', %hash ); $fnb1->link( LINK_IN ); $job2->addArgument( '-a', $job2->name, '-T60', '-i', $fnb1, '-o', $fnc1 ); $adag->addJob($job2); my $fnc2 = newFilename( name => 'f.c2', %hash ); $fnb2->link( LINK_IN ); $job3->addArgument( '-a', $job3->name, '-T60', '-i', $fnb2, '-o', $fnc2 ); $adag->addJob($job3); # a convenience function -- you can specify multiple dependents $adag->addDependency( $job1, $job2, $job3 ); my $fnd = newFilename( name => 'f.d', %hash ); $fnc1->link( LINK_IN ); $fnc2->link( LINK_IN ); $job4->separator(''); # just to show the difference wrt default $job4->addArgument( '-a ', $job4->name, ' -T60 -i ', $fnc1, ' ', $fnc2, ' -o ', $fnd ); $adag->addJob($job4); # this is a convenience function adding parents to a child. # it is clearer than overloading addDependency $adag->addInverse( $job4, $job2, $job3 ); # workflow level notification in case of failure # refer to Pegasus::DAX::Invoke for details my $user = $ENV{USER} || $ENV{LOGNAME} || scalar getpwuid($>); $adag->invoke( INVOKE_ON_ERROR, "/bin/mailx -s 'blackdiamond failed' $user" ); my $xmlns = shift; $adag->toXML( \*STDOUT, '', $xmlns ); ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-montage/�����������������������������������������0000755�0001750�0001750�00000000000�11757531667�023010� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-montage/pegasusrc��������������������������������0000644�0001750�0001750�00000000416�11757531137�024720� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ pegasus.catalog.replica=SimpleFile pegasus.catalog.replica.file=rc pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.catalog.transformation=Text pegasus.catalog.transformation.file=tc pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/grid-montage/submit�����������������������������������0000755�0001750�0001750�00000011277�11757531137�024241� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e # note - this requires Montage 3.2beta6 or higher ####################################################################### # # Settings # DEGREES=2.0 LOCAL_PEGASUS_HOME="/ccg/software/pegasus/dev/trunk" LOCAL_GLOBUS_LOCATION="/ccg/software/globus/default" LOCAL_MONTAGE_LOCATION="/ccg/software/montage/Montage_v3.2_beta6_mats" LOCAL_GLOBUS_TCP_PORT_RANGE="40000,50000" CLUSTER_NAME="CCG" CLUSTER_HOSTNAME="obelix.isi.edu" CLUSTER_GATEKEEPER_TYPE="gt5" CLUSTER_GATEKEEPER_PORT="2119" CLUSTER_SCHEDULER="condor" CLUSTER_WORK_DIR="/nfs/ccg1/90-day-scratch" CLUSTER_PEGASUS_HOME="/ccg/software/pegasus/dev/trunk" CLUSTER_GLOBUS_LOCATION="/ccg/software/globus/default" CLUSTER_MONTAGE_LOCATION="/ccg/software/montage/Montage_v3.2_beta6_mats" # this will bundle N jobs togehter JOB_CLUSTERS_SIZE="25" ####################################################################### TOP_DIR=`pwd` LOCAL_HOSTNAME=`hostname -f` export PATH=$LOCAL_MONTAGE_LOCATION/bin:$PATH # unique directory for this run RUN_ID=`/bin/date +'%F_%H%M%S'` RUN_DIR=`pwd`/work/$RUN_ID echo "Work directory: $RUN_DIR" mkdir -p $RUN_DIR cd $RUN_DIR cp $TOP_DIR/pegasusrc . # create the transformation catalogue (tc) echo echo "Creating the transformation catalog..." for BINARY in `(cd $LOCAL_MONTAGE_LOCATION/bin && ls)`; do cat >>tc <<EOF tr $BINARY:3.0 { site $CLUSTER_NAME { pfn "$CLUSTER_MONTAGE_LOCATION/bin/$BINARY" arch "x86" os "linux" type "INSTALLED" profile pegasus "clusters.size" "$JOB_CLUSTERS_SIZE" } } EOF done # site catalog echo echo "Creating the site catalog..." cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86" os="LINUX"> <grid type="gt2" contact="localhost/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$RUN_DIR/work"/> <internal-mount-point mount-point="$RUN_DIR/work"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$RUN_DIR/outputs"/> <internal-mount-point mount-point="$RUN_DIR/outputs"/> </shared> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="PEGASUS_HOME" >$LOCAL_PEGASUS_HOME</profile> <profile namespace="env" key="GLOBUS_LOCATION" >$LOCAL_GLOBUS_LOCATION</profile> <profile namespace="env" key="GLOBUS_TCP_PORT_RANGE" >$LOCAL_GLOBUS_TCP_PORT_RANGE</profile> </site> <site handle="$CLUSTER_NAME" arch="x86" os="LINUX"> <grid type="$CLUSTER_GATEKEEPER_TYPE" contact="$CLUSTER_HOSTNAME:$CLUSTER_GATEKEEPER_PORT/jobmanager-fork" scheduler="Fork" jobtype="auxillary"/> <grid type="$CLUSTER_GATEKEEPER_TYPE" contact="$CLUSTER_HOSTNAME:$CLUSTER_GATEKEEPER_PORT/jobmanager-$CLUSTER_SCHEDULER" scheduler="unknown" jobtype="compute"/> <head-fs> <scratch> <shared> <file-server protocol="gsiftp" url="gsiftp://$CLUSTER_HOSTNAME" mount-point="$CLUSTER_WORK_DIR"/> <internal-mount-point mount-point="$CLUSTER_WORK_DIR"/> </shared> </scratch> <storage> </storage> </head-fs> <replica-catalog type="LRC" url="rlsn://dummyValue.url.edu" /> <profile namespace="env" key="PEGASUS_HOME" >$CLUSTER_PEGASUS_HOME</profile> <profile namespace="env" key="GLOBUS_LOCATION" >$CLUSTER_GLOBUS_LOCATION</profile> <profile namespace="env" key="MONTAGE_HOME" >$CLUSTER_MONTAGE_LOCATION</profile> </site> </sitecatalog> EOF echo echo "Running mDAG (finding input images, generating DAX, ...)..." mDAG 2mass j M17 $DEGREES $DEGREES 0.0002777778 . "gsiftp://$LOCAL_HOSTNAME$RUN_DIR" "gsiftp://$LOCAL_HOSTNAME$RUN_DIR/inputs" echo echo "Adding input images to the replica catalog..." echo " " `cat cache.list | wc -l` "images found" cat cache.list | grep -v ".fits " >rc cat url.list | sed 's/ http:.*ref=/ http:\/\/obelix.isi.edu\/irsa-cache/' >>rc echo echo "Planning and submitting the workflow..." pegasus-plan \ --conf pegasusrc \ --sites $CLUSTER_NAME \ --cluster horizontal \ --dir work \ --output local \ --dax dag.xml \ --submit \ 2>&1 | tee pegasus-plan.out ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-blackdiamond/�����������������������������������0000755�0001750�0001750�00000000000�11757531667�024133� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-blackdiamond/BlackDiamondDAX.java.template������0000644�0001750�0001750�00000011330�11757531137�031443� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import edu.isi.pegasus.planner.dax.*; import edu.isi.pegasus.planner.dax.Invoke.WHEN; public class BlackDiamondDAX { /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length != 3) { System.out.println("Usage: java ADAG <site_handle> <pegasus_location> <filename.dax>"); System.exit(1); } try { Diamond(args[0], args[1]).writeToFile(args[2]); } catch (Exception e) { e.printStackTrace(); } } private static ADAG Diamond(String site_handle, String pegasus_location) throws Exception { java.io.File cwdFile = new java.io.File ("."); String cwd = cwdFile.getCanonicalPath(); ADAG dax = new ADAG("blackdiamond"); dax.addNotification(WHEN.start,"NOTIFYME"); dax.addNotification(WHEN.at_end,"NOTIFYME"); File fa = new File("f.a"); fa.addPhysicalFile("file://" + cwd + "/f.a", "local"); dax.addFile(fa); File fb1 = new File("f.b1"); File fb2 = new File("f.b2"); File fc1 = new File("f.c1"); File fc2 = new File("f.c2"); File fd = new File("f.d"); fd.setRegister(true); Executable preprocess = new Executable("pegasus", "preprocess", "4.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); preprocess.setInstalled(true); preprocess.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", site_handle); Executable findrange = new Executable("pegasus", "findrange", "4.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.setInstalled(true); findrange.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", site_handle); Executable analyze = new Executable("pegasus", "analyze", "4.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.setInstalled(true); analyze.addPhysicalFile("file://" + pegasus_location + "/bin/pegasus-keg", site_handle); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable(analyze); // Add a preprocess job Job j1 = new Job("j1", "pegasus", "preprocess", "4.0"); j1.addArgument("-a preprocess -T 60 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1); j1.addArgument(" ").addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses(fb2, File.LINK.OUTPUT); j1.addNotification(WHEN.start,"NOTIFYME"); j1.addNotification(WHEN.at_end,"NOTIFYME"); dax.addJob(j1); // Add left Findrange job Job j2 = new Job("j2", "pegasus", "findrange", "4.0"); j2.addArgument("-a findrange -T 60 -i ").addArgument(fb1); j2.addArgument("-o ").addArgument(fc1); j2.uses(fb1, File.LINK.INPUT); j2.uses(fc1, File.LINK.OUTPUT); j2.addNotification(WHEN.start,"NOTIFYME"); j2.addNotification(WHEN.at_end,"NOTIFYME"); dax.addJob(j2); // Add right Findrange job Job j3 = new Job("j3", "pegasus", "findrange", "4.0"); j3.addArgument("-a findrange -T 60 -i ").addArgument(fb2); j3.addArgument("-o ").addArgument(fc2); j3.uses(fb2, File.LINK.INPUT); j3.uses(fc2, File.LINK.OUTPUT); j3.addNotification(WHEN.start,"NOTIFYME"); j3.addNotification(WHEN.at_end,"NOTIFYME"); dax.addJob(j3); // Add analyze job Job j4 = new Job("j4", "pegasus", "analyze", "4.0"); j4.addArgument("-a analyze -T 60 -i ").addArgument(fc1); j4.addArgument(" ").addArgument(fc2); j4.addArgument("-o ").addArgument(fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); j4.addNotification(WHEN.start,"NOTIFYME"); j4.addNotification(WHEN.at_end,"NOTIFYME"); dax.addJob(j4); dax.addDependency("j1", "j2"); dax.addDependency("j1", "j3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-blackdiamond/notifications����������������������0000644�0001750�0001750�00000001174�11757531137�026722� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# Pegasus ships with a couple of basic notification tools. Below # we show how to notify via email and gtalk. # all notifications will be sent to email # change $USER to your full email addess export NOTIFY="$PEGASUS_HOME/libexec/notification/email -t $USER" # this sends notifications about failed jobs to gtalk. # note that you can also set which events to trigger on in your DAX. # set jabberid to your jabber username address, and put in your # password and jabber host. # uncomment to enable jabber #export NOTIFY="$PEGASUS_HOME/libexec/notification/jabber --jabberid USER@gmail.com --password PASSWORD --host talk.google.com" ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-blackdiamond/pegasusrc��������������������������0000644�0001750�0001750�00000000175�11757531137�026045� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus.catalog.site=XML3 pegasus.catalog.site.file=sites.xml pegasus.dir.useTimestamp=true pegasus.dir.storage.deep=false ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/examples/local-blackdiamond/submit�����������������������������0000755�0001750�0001750�00000003644�11757531137�025363� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash set -e TOPDIR=`pwd` # figure out where Pegasus is installed export PEGASUS_HOME=`which pegasus-plan | sed 's:/bin/pegasus-plan::'` if [ "x$PEGASUS_HOME" = "x" ]; then echo "Unable to determine location of your Pegasus install" echo "Please make sure pegasus-plan is in your path" exit 1 fi # generate the input file echo "This is sample input to KEG" >f.a # build the dax generator export CLASSPATH=.:`pegasus-config --classpath` source notifications #NOTIFY="$PEGASUS_HOME/libexec/notification/email -t $USER" echo $NOTIFY sed -e "s|NOTIFYME|$NOTIFY|g" BlackDiamondDAX.java.template > BlackDiamondDAX.java javac BlackDiamondDAX.java # generate the dax java BlackDiamondDAX local $PEGASUS_HOME blackdiamond.dax # create the site catalog cat >sites.xml <<EOF <?xml version="1.0" encoding="UTF-8"?> <sitecatalog xmlns="http://pegasus.isi.edu/schema/sitecatalog" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://pegasus.isi.edu/schema/sitecatalog http://pegasus.isi.edu/schema/sc-3.0.xsd" version="3.0"> <site handle="local" arch="x86" os="LINUX"> <head-fs> <scratch> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/work"/> <internal-mount-point mount-point="$TOPDIR/work"/> </shared> </scratch> <storage> <shared> <file-server protocol="file" url="file://" mount-point="$TOPDIR/outputs"/> <internal-mount-point mount-point="$TOPDIR/outputs"/> </shared> </storage> </head-fs> <profile namespace="env" key="PEGASUS_HOME" >$PEGASUS_HOME</profile> </site> </sitecatalog> EOF # plan and submit the workflow pegasus-plan \ --conf pegasusrc \ --sites local \ --dir work \ --output local \ --dax blackdiamond.dax \ --submit ��������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/sh/������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�017227� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/sh/shell-runner-functions.sh�����������������������������������0000755�0001750�0001750�00000013271�11757531137�024206� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash # # Common shell script that is sourced by Pegasus generated shell script, # when the SHELL code generator is used. # $Id: shell-runner-functions.sh 4596 2011-10-25 18:19:33Z vahi $ # check_predefined_variables() { #purpose: checks for variables that need to be predefined. # The variables are PEGASUS_SUBMIT_DIR and JOBSTATE_LOG if [ "X${PEGASUS_SUBMIT_DIR}" = "X" ]; then echo "ERROR: Set your PEGASUS_SUBMIT_DIR variable" 1>&2 exit 1 fi if [ "X${JOBSTATE_LOG}" = "X" ]; then echo "ERROR: Set your JOBSTATE_LOG variable" 1>&2 exit 1 fi } create_jobstate_log_entry() { # purpose: creates a jobstate log entry # paramtr: $jobname (IN): the name of the job to execute # $state (IN): the state in which the job is # # # returns: the entry for the jobstate.log file in ENTRY variable #1239666049 create_dir_blackdiamond_0_isi_viz SUBMIT 3758.0 isi_viz - jobstate=$JOBSTATE_LOG jobname=$1 state=$2 iso_date=`date "+%s"` ENTRY="$iso_date $jobname $state - local" echo $ENTRY >> $jobstate } execute_job() { # purpose: executes a job in a subshell # paramtr: $jobname (IN): the name of the job to execute # $dir (IN): the directory in which to execute the job # $exec (IN): the executable to be invoked # $args (IN): the arg string for the executable # $stdin (IN): the stdin for the job. Pass "" if no stdin. # ... (IN): key=value pair for the evnvironment # variable to set for the job # # returns: #sanity check check_predefined_variables if [ $# -lt 5 ] ; then echo "execute_job requires at a minimum 5 arguments" exit 1 fi jobname=$1 dir=$2 exec=$3 args=$4 stdin=$5 shift 5 create_jobstate_log_entry $jobname SUBMIT create_jobstate_log_entry $jobname EXECUTE #execute each job in a sub shell #we dont want environment being clobbered ( cd $dir #go through all the environment variables passed #as arguments and set them in the environment for #the executable to be invoked while [ $1 ]; do env=$1 #echo "Env passed is $env" key=`echo $env | awk -F"=" '{print $1}'`; value=`echo $env | awk -F"=" '{print $2}'`; export $key=$value #echo "key is $key value is $value" shift done; echo "Executing JOB $exec $args" jobout="${PEGASUS_SUBMIT_DIR}/${jobname}.out" joberr="${PEGASUS_SUBMIT_DIR}/${jobname}.err" if [ "X${stdin}" = "X" ]; then #execute the job without setting the stdin $exec $args 1> $jobout 2> $joberr else #execute the job without setting the stdin $exec $args 0<$stdin 1> $jobout 2> $joberr fi ) status=$? echo "JOB $jobname Returned with $status" return $status #exitcode $status } execute_post_script() { # purpose: executes a postscript in a subshell # paramtr: $jobname (IN): the name of the job to execute # $dir (IN): the directory in which to execute the job # $exec (IN): the executable to be invoked # $args (IN): the arg string for the executable # $stdin (IN): the stdin for the job. Pass "" if no stdin. # ... (IN): key=value pair for the evnvironment # variable to set for the job # # returns: #sanity check check_predefined_variables if [ $# -lt 5 ] ; then echo "execute_job requires at a minimum 5 arguments" exit 1 fi jobname=$1 dir=$2 exec=$3 args=$4 stdin=$5 shift 5 create_jobstate_log_entry $jobname POST_SCRIPT_STARTED #execute each job in a sub shell #we dont want environment being clobbered ( cd $dir #go through all the environment variables passed #as arguments and set them in the environment for #the executable to be invoked while [ $1 ]; do env=$1 #echo "Env passed is $env" key=`echo $env | awk -F"=" '{print $1}'`; value=`echo $env | awk -F"=" '{print $2}'`; export $key=$value #echo "key is $key value is $value" shift done; echo "Executing POSTSCRIPT $exec $args" jobout="${PEGASUS_SUBMIT_DIR}/${jobname}.post.out" joberr="${PEGASUS_SUBMIT_DIR}/${jobname}.post.err" if [ "X${stdin}" = "X" ]; then #execute the job without setting the stdin $exec $args 1> $jobout 2> $joberr else #execute the job without setting the stdin $exec $args 0<$stdin 1> $jobout 2> $joberr fi ) status=$? echo "POSTSCRIPT FOR JOB $jobname Returned with $status" return $status #exitcode $status } check_exitcode() { # purpose: checks a job exitcode and creates appropriate jobstate entries. # On error exits the program # paramtr: $jobname (IN): the name of the job # $prefix (IN): prefix to be applied for jobstate events. # Can be JOB|POST_SCRIPT|PRE_SCRIPT # $status (IN): the status with which job executed #sanity check check_predefined_variables jobstate=$JOBSTATE_LOG jobname=$1 prefix=$2 status=$3 create_jobstate_log_entry $jobname ${prefix}_TERMINATED if [ $status -ne 0 ] ; then create_jobstate_log_entry $jobname ${prefix}_FAILURE echo "INTERNAL *** SHELL_SCRIPT_FINISHED $status ***" >> $jobstate echo "ERROR: ${prefix} $jobname failed with status $status" 1>&2 exit $status else create_jobstate_log_entry $jobname ${prefix}_SUCCESS fi return } #if [ "X${PEGASUS_HOME}" = "X" ]; then # echo "ERROR: Set your PEGASUS_HOME variable" 1>&2 # exit 1 #fi ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/sh/pegasus-lite-common.sh��������������������������������������0000755�0001750�0001750�00000016132�11757531137�023451� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # # This file contains a set of common bash funtions to be used by # Pegasus Lite jobs # # Author: Mats Rynge <rynge@isi.edu> # function pegasus_lite_log() { TS=`/bin/date +'%F %H:%M:%S'` echo "$TS: $1" 1>&2 } function pegasus_lite_worker_package() { # many ways of providing worker package if pegasus_lite_internal_wp_shipped || pegasus_lite_internal_wp_in_env || pegasus_lite_internal_wp_download; then return 0 fi return 1 } function pegasus_lite_internal_wp_shipped() { # was the job shipped with a Pegasus worker package? if ls $pegasus_lite_start_dir/pegasus-worker-*.tar.gz >/dev/null 2>&1; then pegasus_lite_log "The job contained a Pegasus worker package" tar xzf $pegasus_lite_start_dir/pegasus-worker-*.tar.gz rm -f $pegasus_lite_start_dir/pegasus-worker-*.tar.gz unset PEGASUS_HOME export PATH=${pegasus_lite_work_dir}/pegasus-${pegasus_lite_full_version}/bin:$PATH return 0 fi return 1 } function pegasus_lite_internal_wp_in_env() { old_path=$PATH # use PEGASUS_HOME if set if [ "x$PEGASUS_HOME" != "x" ]; then PATH="$PEGASUS_HOME/bin:$PATH" export PATH fi # is there already a pegasus install in our path? detected_pegasus_bin=`which pegasus-config 2>/dev/null || /bin/true` if [ "x$detected_pegasus_bin" != "x" ]; then detected_pegasus_bin=`dirname $detected_pegasus_bin` # does the version match? if $detected_pegasus_bin/pegasus-config --version 2>/dev/null | grep -E "^${pegasus_lite_version_major}\.${pegasus_lite_version_minor}\." >/dev/null 2>/dev/null; then pegasus_lite_log "Using existing Pegasus binaries in $detected_pegasus_bin" return 0 else pegasus_lite_log "Pegasus binaries in $detected_pegasus_bin do not match Pegasus version used for current workflow" fi fi # back out env changes unset PEGASUS_HOME PATH=$old_path export PATH return 1 } function pegasus_lite_internal_wp_download() { # fall back - download a worker package from pegasus.isi.edu os=rhel major=5 arch=`uname -m` if [ $arch != "x86_64" ]; then arch="x86" fi if [ -e /etc/redhat-release ]; then os=rhel major=`cat /etc/redhat-release | sed 's/.*release //' | sed 's/[\. ].*//'` else if [ -e /etc/debian_version ]; then os=deb major=`cat /etc/debian_version | sed 's/\..*//'` fi fi url="http://download.pegasus.isi.edu/wms/download/${pegasus_lite_version_major}" url="${url}.${pegasus_lite_version_minor}" if echo ${pegasus_lite_version_patch} | grep cvs >/dev/null 2>/dev/null; then url="${url}/nightly" fi url="${url}/pegasus-worker" url="${url}-${pegasus_lite_version_major}.${pegasus_lite_version_minor}.${pegasus_lite_version_patch}" url="${url}-${arch}_${os}_${major}.tar.gz" pegasus_lite_log "Downloading Pegasus worker package from $url" wget -q -O pegasus-worker.tar.gz "$url" tar xzf pegasus-worker.tar.gz rm -f pegasus-worker.tar.gz unset PEGASUS_HOME export PATH="${pegasus_lite_work_dir}/pegasus-${pegasus_lite_full_version}/bin:$PATH" } function pegasus_lite_setup_work_dir() { # remember where we started from pegasus_lite_start_dir=`pwd` if [ "x$pegasus_lite_work_dir" != "x" ]; then pegasus_lite_log "Not creating a new work directory as it is already set to $pegasus_lite_work_dir" return fi targets="$PEGASUS_WN_TMP $_CONDOR_SCRATCH_DIR $OSG_WN_TMP $TG_NODE_SCRATCH $TG_CLUSTER_SCRATCH $SCRATCH $TMPDIR $TMP /tmp" unset TMPDIR if [ "x$PEGASUS_WN_TMP_MIN_SPACE" = "x" ]; then PEGASUS_WN_TMP_MIN_SPACE=1000000 fi for d in $targets; do pegasus_lite_log "Checking $d for potential use as work space... " # does the target exist? if [ ! -e $d ]; then pegasus_lite_log " Workdir: $d does not exist" continue fi # make sure there is enough available diskspace cd $d free=`df -kP . | awk '{if (NR==2) print $4}'` if [ "x$free" == "x" -o $free -lt $PEGASUS_WN_TMP_MIN_SPACE ]; then pegasus_lite_log " Workdir: not enough disk space available in $d" continue fi if touch $d/.dirtest.$$ >/dev/null 2>&1; then rm -f $d/.dirtest.$$ >/dev/null 2>&1 d=`mktemp -d $d/pegasus.XXXXXX` export pegasus_lite_work_dir=$d export pegasus_lite_work_dir_created=1 pegasus_lite_log " Work dir is $d - $free kB available" cd $pegasus_lite_work_dir return 0 fi pegasus_lite_log " Workdir: not allowed to write to $d" done return 1 } function pegasus_lite_init() { pegasus_lite_full_version=${pegasus_lite_version_major}.${pegasus_lite_version_minor}.${pegasus_lite_version_patch} # announce version - we do this so pegasus-exitcode and other tools # can tell the job was a PegasusLite job echo "PegasusLite: version ${pegasus_lite_full_version}" 1>&2 # for S3CFG, axpand to include local path if needed if [ "x$S3CFG" != "x" ]; then if ! (echo $S3CFG | grep "^/") >/dev/null 2>&1; then S3CFG=`pwd`"/$S3CFG" pegasus_lite_log "Expanded \$S3CFG to $S3CFG" fi chmod 0600 $S3CFG fi # for irodsEnvFile, axpand to include local path if needed if [ "x$irodsEnvFile" != "x" ]; then if ! (echo $irodsEnvFile | grep "^/") >/dev/null 2>&1; then irodsEnvFile=`pwd`"/$irodsEnvFile" pegasus_lite_log "Expanded \$irodsEnvFile to $irodsEnvFile" fi chmod 0600 $irodsEnvFile fi # for ssh private key, axpand to include local path if needed if [ "x$SSH_PRIVATE_KEY" != "x" ]; then if ! (echo $SSH_PRIVATE_KEY | grep "^/") >/dev/null 2>&1; then SSH_PRIVATE_KEY=`pwd`"/$SSH_PRIVATE_KEY" pegasus_lite_log "Expanded \$SSH_PRIVATE_KEY to $SSH_PRIVATE_KEY" fi chmod 0600 $SSH_PRIVATE_KEY fi } function pegasus_lite_exit() { rc=$? if [ "x$rc" = "x" ]; then rc=0 fi if [ $rc != 0 ]; then pegasus_lite_log "FAILURE: Last command exited with $rc" fi if [ "x$pegasus_lite_work_dir_created" = "x1" ]; then cd / rm -rf $pegasus_lite_work_dir pegasus_lite_log "$pegasus_lite_work_dir cleaned up" fi echo "PegasusLite: exitcode $rc" 1>&2 exit $rc } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/share/pegasus/sh/pegasus-lite-local.sh���������������������������������������0000755�0001750�0001750�00000005234�11757531137�023254� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash ## # Copyright 2007-2011 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ## # # # This is a launcher script for pegasus lite local jobs # # Author: Karan Vahi <vahi@isi.edu> # Author: Mats Rynge <rynge@isi.edu> # set -e check_predefined_variables() { #purpose: checks for variables that need to be predefined. # The variables are PEGASUS_SUBMIT_DIR and JOBSTATE_LOG if [ "X${_CONDOR_SCRATCH_DIR}" = "X" ]; then echo "ERROR: _CONDOR_SCRATCH_DIR was not set" 1>&2 exit 1 fi } check_predefined_variables dir=$_CONDOR_SCRATCH_DIR if [ "${_PEGASUS_EXECUTE_IN_INITIAL_DIR}" = "true" ];then dir=$_PEGASUS_INITIAL_DIR fi #sanity check on arguments if [ $# -lt 1 ] ; then echo "pegasus-lite-local requires path to executable followed by arguments"; exit 1 fi executable=$1 cd $dir shift args=$@ #transfer any input files if required if [ "X${_PEGASUS_TRANSFER_INPUT_FILES}" != "X" ]; then #split files on , IFS=, read -a FILES <<< "$_PEGASUS_TRANSFER_INPUT_FILES" for file in "${FILES[@]}";do #echo "FILES NEED TO BE TRANSFERRED $file" if [[ $file == /* ]] ; then #file starts with / cp $file $dir else #file is relative grab from initialdir #check for initialdir if [ "X${_PEGASUS_INITIAL_DIR}" = "X" ]; then echo "ERROR: _PEGASUS_INITIAL_DIR not populated" 1>&2 exit 1; fi file=$_PEGASUS_INITIAL_DIR/$file cp $file $dir fi done fi #execute the executable with the args if [ "X${_PEGASUS_CONNECT_STDIN}" = "X" ]; then #dont connect stdin $executable "$@" else #cat is used to connect the stdin cat - | $executable "$@" fi # transfer any output files back to the Pegasus initial dir if [ "X${_PEGASUS_TRANSFER_OUTPUT_FILES}" != "X" ]; then #check for initialdir if [ "X${_PEGASUS_INITIAL_DIR}" = "X" ]; then echo "ERROR: _PEGASUS_INITIAL_DIR not populated" 1>&2 exit 1; fi outputdir=$_PEGASUS_INITIAL_DIR #split files on , IFS=, read -a FILES <<< "$_PEGASUS_TRANSFER_OUTPUT_FILES" for file in "${FILES[@]}";do #echo "FILES NEED TO BE TRANSFERRED $file" cp $file $outputdir done fi��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/����������������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�015266� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/rules�����������������������������������������������������������������0000755�0001750�0001750�00000003474�11757531137�016346� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/make -f # -*- makefile -*- # Uncomment this to turn on verbose mode. #export DH_VERBOSE=1 build: build-stamp build-stamp: dh_testdir ant dist rm -f dist/*.tar.gz mv dist/pegasus-* dist/pegasus touch build-stamp install: build dh_testdir dh_testroot dh_prep dh_installdirs # /etc/pegasus mkdir -p debian/pegasus/etc/pegasus cp -a dist/pegasus/etc/* debian/pegasus/etc/pegasus/ # /usr/bin mkdir -p debian/pegasus/usr cp -a dist/pegasus/bin debian/pegasus/usr/ strip debian/pegasus/usr/bin/pegasus-cluster strip debian/pegasus/usr/bin/pegasus-invoke strip debian/pegasus/usr/bin/pegasus-keg strip debian/pegasus/usr/bin/pegasus-kickstart # /usr/share/ mkdir -p debian/pegasus/usr cp -a dist/pegasus/share debian/pegasus/usr/ rm -f debian/pegasus/usr/share/pegasus/java/COPYING.* rm -f debian/pegasus/usr/share/pegasus/java/EXCEPTIONS.* rm -f debian/pegasus/usr/share/pegasus/java/LICENSE.* rm -f debian/pegasus/usr/share/pegasus/java/NOTICE.* # /usr/lib mkdir -p debian/pegasus/usr cp -a dist/pegasus/lib debian/pegasus/usr/ strip debian/pegasus/usr/lib/pegasus/externals/python/pysqlite2/_sqlite.so # remove stuff we don't need rm -rf debian/pegasus/usr/lib/pegasus/externals/python/boto/tests # Build architecture-independent files here. binary-indep: build install # We have nothing to do by default. # Build architecture-dependent files here. binary-arch: build install dh_testdir dh_testroot dh_installchangelogs dh_installdocs dh_installman dh_compress dh_fixperms dh_installdeb dh_shlibdeps dh_gencontrol dh_md5sums dh_builddeb binary: binary-indep binary-arch clean: dh_testdir dh_testroot rm -f build-stamp ant clean rm -f src/externals/._boto* rm -rf dist dh_clean %: echo "dh $@" exit 1 .PHONY: build binary-indep binary-arch binary install clean ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/docs������������������������������������������������������������������0000644�0001750�0001750�00000000030�11757531137�016122� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������LicenseBlurb.txt README ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/patches/��������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�016715� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/changelog�������������������������������������������������������������0000644�0001750�0001750�00000000433�11757531137�017130� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus (4.0.1cvs-1) unstable; urgency=low * Initial packaging of Pegasus 4.0.1 -- Mats Rynge <mats@rynge.net> Wed, 29 Feb 2012 16:31:15 -0800 pegasus (4.0.0-1) unstable; urgency=low * Initial FHS packaging -- Mats Rynge <mats@rynge.net> Tue, 28 Feb 2012 19:54:27 -0800 �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/source/���������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�016566� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/source/format���������������������������������������������������������0000644�0001750�0001750�00000000014�11757531137�017764� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������3.0 (quilt) ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/copyright�������������������������������������������������������������0000644�0001750�0001750�00000001104�11757531137�017205� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������This work was packaged for Debian by: Mats Rynge <mats@rynge.net> on Mon, 22 Aug 2011 09:11:11 -0700 It was downloaded from: http://pegasus.isi.edu Upstream Author(s): Pegasus Development Team <pegasus-support@isi.edu> Copyright: Copyright 2007-2011 University Of Southern California License: Apache version 2.0 see "/usr/share/common-licenses/Apache-2.0" The Debian packaging is: Copyright 2007-2011 University Of Southern California and is licenced under the Apache version 2.0, see "/usr/share/common-licenses/Apache-2.0" ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/compat����������������������������������������������������������������0000644�0001750�0001750�00000000002�11757531137�016454� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������7 ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/debian/control���������������������������������������������������������������0000644�0001750�0001750�00000001700�11757531137�016657� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Source: pegasus Section: science Priority: extra Maintainer: Mats Rynge <mats@rynge.net> Build-Depends: debhelper (>= 7), ant, openjdk-6-jdk, perl, python Standards-Version: 3.9.1 Homepage: http://pegasus.isi.edu Package: pegasus Architecture: any Depends: openjdk-6-jre, perl, python, ${shlibs:Depends}, ${misc:Depends} Recommends: condor, globus Description: Workflow management system for Condor, Grid, and Clouds The Pegasus project encompasses a set of technologies the help workflow-based applications execute in a number of different environments including desktops, campus clusters, grids, and now clouds. Scientific workflows allow users to easily express multi-step computations, for example retrieve data from a database, reformat the data, and run an analysis. Once an application is formalized as a workflow the Pegasus Workflow Management Service can map it onto available compute resources and execute the steps in appropriate order. ����������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/�������������������������������������������������������������������������0000755�0001750�0001750�00000000000�11757551532�014624� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/ShowProperties.java������������������������������������������������������0000644�0001750�0001750�00000016277�11757531137�020500� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * Globus Toolkit Public License (GTPL) * * Copyright (c) 1999 University of Chicago and The University of * Southern California. All Rights Reserved. * * 1) The "Software", below, refers to the Globus Toolkit (in either * source-code, or binary form and accompanying documentation) and a * "work based on the Software" means a work based on either the * Software, on part of the Software, or on any derivative work of * the Software under copyright law: that is, a work containing all * or a portion of the Software either verbatim or with * modifications. Each licensee is addressed as "you" or "Licensee." * * 2) The University of Southern California and the University of * Chicago as Operator of Argonne National Laboratory are copyright * holders in the Software. The copyright holders and their third * party licensors hereby grant Licensee a royalty-free nonexclusive * license, subject to the limitations stated herein and * U.S. Government license rights. * * 3) A copy or copies of the Software may be given to others, if you * meet the following conditions: * * a) Copies in source code must include the copyright notice and * this license. * * b) Copies in binary form must include the copyright notice and * this license in the documentation and/or other materials * provided with the copy. * * 4) All advertising materials, journal articles and documentation * mentioning features derived from or use of the Software must * display the following acknowledgement: * * "This product includes software developed by and/or derived from * the Globus project (http://www.globus.org/)." * * In the event that the product being advertised includes an intact * Globus distribution (with copyright and license included) then * this clause is waived. * * 5) You are encouraged to package modifications to the Software * separately, as patches to the Software. * * 6) You may make modifications to the Software, however, if you * modify a copy or copies of the Software or any portion of it, * thus forming a work based on the Software, and give a copy or * copies of such work to others, either in source code or binary * form, you must meet the following conditions: * * a) The Software must carry prominent notices stating that you * changed specified portions of the Software. * * b) The Software must display the following acknowledgement: * * "This product includes software developed by and/or derived * from the Globus Project (http://www.globus.org/) to which the * U.S. Government retains certain rights." * * 7) You may incorporate the Software or a modified version of the * Software into a commercial product, if you meet the following * conditions: * * a) The commercial product or accompanying documentation must * display the following acknowledgment: * * "This product includes software developed by and/or derived * from the Globus Project (http://www.globus.org/) to which the * U.S. Government retains a paid-up, nonexclusive, irrevocable * worldwide license to reproduce, prepare derivative works, and * perform publicly and display publicly." * * b) The user of the commercial product must be given the following * notice: * * "[Commercial product] was prepared, in part, as an account of * work sponsored by an agency of the United States Government. * Neither the United States, nor the University of Chicago, nor * University of Southern California, nor any contributors to * the Globus Project or Globus Toolkit nor any of their employees, * makes any warranty express or implied, or assumes any legal * liability or responsibility for the accuracy, completeness, or * usefulness of any information, apparatus, product, or process * disclosed, or represents that its use would not infringe * privately owned rights. * * IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO * OR THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS * TO THE GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY * DAMAGES, INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL * DAMAGES RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR * THE USE OF THE [COMMERCIAL PRODUCT]." * * 8) LICENSEE AGREES THAT THE EXPORT OF GOODS AND/OR TECHNICAL DATA * FROM THE UNITED STATES MAY REQUIRE SOME FORM OF EXPORT CONTROL * LICENSE FROM THE U.S. GOVERNMENT AND THAT FAILURE TO OBTAIN SUCH * EXPORT CONTROL LICENSE MAY RESULT IN CRIMINAL LIABILITY UNDER U.S. * LAWS. * * 9) Portions of the Software resulted from work developed under a * U.S. Government contract and are subject to the following license: * the Government is granted for itself and others acting on its * behalf a paid-up, nonexclusive, irrevocable worldwide license in * this computer software to reproduce, prepare derivative works, and * perform publicly and display publicly. * * 10) The Software was prepared, in part, as an account of work * sponsored by an agency of the United States Government. Neither * the United States, nor the University of Chicago, nor The * University of Southern California, nor any contributors to the * Globus Project or Globus Toolkit, nor any of their employees, * makes any warranty express or implied, or assumes any legal * liability or responsibility for the accuracy, completeness, or * usefulness of any information, apparatus, product, or process * disclosed, or represents that its use would not infringe privately * owned rights. * * 11) IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO OR * THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS TO THE * GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY DAMAGES, * INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES * RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR THE USE OF * THE SOFTWARE. * * END OF LICENSE */ import java.io.*; import java.util.*; /** * Displays single specific values or all values from the current * system properties. This class solely exists to check the currently * active property values. */ public class ShowProperties { /** * Runs the main program. If no arguments were specified, all property * values, sorted by their key, will be shown. Otherwise, only property * values of matching keys are shown. * * @param args are the commandline interface arguments. */ public static void main( String[] args ) { Properties sp = System.getProperties(); if ( args.length > 0 ) { // each component for ( int i=0; i<args.length; ++i ) { System.out.println( sp.getProperty(args[i]) ); } } else { // all components for ( Iterator i=(new TreeSet(sp.keySet())).iterator(); i.hasNext(); ) { String key = (String) i.next(); System.out.println( key + '=' + ((String) sp.get(key)) ); } } } } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/externals/���������������������������������������������������������������0000755�0001750�0001750�00000000000�11757551516�016633� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/externals/Makefile�������������������������������������������������������0000644�0001750�0001750�00000001773�11757531137�020300� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ifndef ${prefix} prefix = $(PEGASUS_HOME) endif ifndef ${libdir} libdir = ${prefix}/lib endif PySQLite=pysqlite-2.6.3 BOTO=boto-2.0b3-24047c8 SQLAlchemy=SQLAlchemy-0.7.6 all: ${libdir}/python/pysqlite2 ${libdir}/python/sqlalchemy ${libdir}/python/boto ${libdir}/python/pysqlite2: @rm -rf $(PySQLite) tar xzf $(PySQLite).tar.gz (cd $(PySQLite) && \ python setup.py build_static && \ cp -R build/lib*/pysqlite2 ${libdir}/python/ \ ) @rm -rf $(PySQLite) ${libdir}/python/sqlalchemy: @rm -rf $(SQLAlchemy) tar xzf $(SQLAlchemy).tar.gz (cd $(SQLAlchemy) && \ python setup.py build && \ cp -R build/lib*/sqlalchemy ${libdir}/python/ \ ) @rm -rf $(SQLAlchemy) ${libdir}/python/boto: @rm -rf $(BOTO) tar xzf $(BOTO).tar.gz (cd $(BOTO) && \ python setup.py build && \ rm -rf ${prefix}/lib/python/boto && \ cp -R build/lib*/boto ${libdir}/python/ \ ) @rm -rf $(BOTO) clean: rm -rf touch.pysqlite pysqlite-2.6.0 rm -rf touch.sqlalchemy $(SQLAlchemy) rm -rf touch.boto $(BOTO) .boto* �����pegasus-wms_4.0.1+dfsg/src/GTPL���������������������������������������������������������������������0000644�0001750�0001750�00000011750�11757531137�015320� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Globus Toolkit Public License Version 2, July 31, 2003 Copyright 1999-2003 University of Chicago and The University of Southern California. All rights reserved. This software referred to as the Globus Toolkit software ("Software") includes voluntary contributions made to the Globus Project collaboration. Persons and entities that have made voluntary contributions are hereinafter referred to as "Contributors." This Globus Toolkit Public License is referred to herein as "the GTPL." For more information on the Globus Project, please see http://www.globus.org/. Permission is granted for the installation, use, reproduction, modification, display, performance and redistribution of this Software, with or without modification, in source and binary forms. Permission is granted for the installation, use, reproduction, modification, display, performance and redistribution of user files, manuals, and training and demonstration slides ("Documentation") distributed with or specifically designated as distributed under the GTPL. Any exercise of rights under the GTPL is subject to the following conditions: 1. Redistributions of this Software, with or without modification, must reproduce the GTPL in: (1) the Software, or (2) the Documentation or some other similar material which is provided with the Software (if any). 2. The Documentation, alone or if included with a redistribution of the Software, must include the following notice: "This product includes material developed by the Globus Project (http://www.globus.org/)." Alternatively, if that is where third-party acknowledgments normally appear, this acknowledgment must be reproduced in the Software itself. 3. Globus Toolkit and Globus Project are trademarks of the University of Chicago. Any trademarks of the University of Chicago or the University of Southern California may not be used to endorse or promote software, or products derived therefrom, and except as expressly provided herein may not be affixed to modified redistributions of this Software or Documentation except with prior written approval, obtainable at the discretion of the trademark owner from info@globus.org. 4. To the extent that patent claims licensable by the University of Southern California and/or by the University of Chicago (as Operator of Argonne National Laboratory) are necessarily infringed by the use or sale of the Software, you and your transferees are granted a non-exclusive, worldwide, royalty-free license under such patent claims, with the rights to make, use, sell, offer to sell, import and otherwise transfer the Software in source code and object code form. This patent license shall not apply to Documentation or to any other software combinations which include the Software. No hardware per se is licensed hereunder. If you or any subsequent transferee (a "Recipient") institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Software infringes such Recipient's patent(s), then such Recipient's rights granted under the patent license above shall terminate as of the date such litigation is filed. 5. DISCLAIMER SOFTWARE AND DOCUMENTATION ARE PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, OF SATISFACTORY QUALITY, AND FITNESS FOR A PARTICULAR PURPOSE OR USE ARE DISCLAIMED. THE COPYRIGHT HOLDERS AND CONTRIBUTORS MAKE NO REPRESENTATION THAT THE SOFTWARE, DOCUMENTATION, MODIFICATIONS, ENHANCEMENTS OR DERIVATIVE WORKS THEREOF, WILL NOT INFRINGE ANY PATENT, COPYRIGHT, TRADEMARK, TRADE SECRET OR OTHER PROPRIETARY RIGHT. 6. LIMITATION OF LIABILITY THE COPYRIGHT HOLDERS AND CONTRIBUTORS SHALL HAVE NO LIABILITY TO LICENSEE OR OTHER PERSONS FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, CONSEQUENTIAL, EXEMPLARY, OR PUNITIVE DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES, LOSS OF USE, DATA OR PROFITS, OR BUSINESS INTERRUPTION, HOWEVER CAUSED AND ON ANY THEORY OF CONTRACT, WARRANTY, TORT (INCLUDING NEGLIGENCE), PRODUCT LIABILITY OR OTHERWISE, ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE OR DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 7. The Globus Project may publish revised and/or new versions of the GTPL from time to time. Each version will be given a distinguishing version number. Once Software or Documentation has been published under a particular version of the GTPL, you may always continue to use it under the terms of that version. You may also choose to use such Software or Documentation under the terms of any subsequent version of the GTPL published by the Globus Project. No one other than the Globus Project has the right to modify the terms of the GTPL. Globus Toolkit Public License 7-31-03 ������������������������pegasus-wms_4.0.1+dfsg/src/tools/�������������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�015773� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/�����������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�017454� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/mypopen.h��������������������������������������������������0000644�0001750�0001750�00000002121�11757531137�021300� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MYPOPEN_H #define _MYPOPEN_H #include <sys/types.h> typedef struct { pid_t child; /* pid of process that runs things */ int readfd; /* fd to read output from process */ } PipeCmd; extern int exec_cmd( char* cmd, char* envp[], char* buffer, size_t blen ); extern int exec_cmd2( char* cmd, char* buffer, size_t blen ); extern int mysystem( const char* tag, char* argv[], char* envp[], int fd_input, int fd_output, int fd_error ); #endif /* _MYPOPEN_H */ �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/Makefile.Darwin��������������������������������������������0000644�0001750�0001750�00000003044�11757531137�022330� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# Filename: Makefile.LINUX # Version Developer Date Change # ------- --------------- ------- ----------------------- # 1.1 Mei-Hui Su 20Sep04 Added into montage v3.0 .SUFFIXES: .SUFFIXES: .c .o #ISI dc/condor MPICC_LSF = /cluster/mpich/mpich-1.2.5.3/bin/mpicc MPICC_CONDOR = /nfs/asd2/pegasus/software/linux/mpi/mpich-1.2.5-gt3.0.2/bin/mpicc #SDSC teragrid MPICC_PBS = /usr/local/apps/mpich-vmi-intel/bin/mpicc #UC teragrid #MPICC_PBS = /soft/mpich-gm-1.2.5..10-intel-r2/bin/mpicc #CALTECH teragrid #MPICC_PBS = /usr/local/mpich/mpich-gm-1.2.5..10-intel-r2/bin/mpicc #NCSA teragrid #MPICC_PBS = /usr/local/mpich/mpich-gm-1.2.5..10-intel-r2/bin/mpicc MPICC = $(MPICC_CONDOR) CC = cc -D_FILE_OFFSET_BITS=64 CFLAGS = -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -g -I. LIBS = -static -lm .c.o: $(CC) $(CFLAGS) -c $*.c all: seqexec seqexec: seqexec.o mypopen.o $(CC) -o seqexec seqexec.o mypopen.o $(LIBS) mpiexec: mpiexec.c mypopen.c mympi.c $(MPICC) $(CFLAGS) -c -o mypopenMPI.o mypopen.c ;\ $(MPICC) $(CFLAGS) -c -o mympiMPI.o mympi.c ;\ $(MPICC) $(CFLAGS) -o mpiexec mpiexec.c mypopenMPI.o mympiMPI.o $(LIBS) -lpthread mpiexec.condor: mpiexec.c mypopen.c $(MPICC_CONDOR) $(CFLAGS) -c -o mypopenMPI_condor.o mypopen.c ;\ $(MPICC_CONDOR) $(CFLAGS) -c -o mympiMPI_condor.o mympi.c ;\ $(MPICC_CONDOR) $(CFLAGS) -o mpiexec.condor mpiexec.c mypopenMPI_condor.o mympiMPI_condor.o $(LIBS) -lpthread install: cp seqexec ../../bin clean: rm -f seqexec mpiexec *.o ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/mympi.h����������������������������������������������������0000644�0001750�0001750�00000001616�11757531137�020754� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * */ /* * Author : Mei-hui Su * Revision : $REVISION$ */ #ifndef mympi_H #define mympi_H extern int *init_my_list(int); extern void reset_idle_node(int); extern void set_idle_node(int); extern int next_idle_node(); extern void free_my_list(); extern int all_idling(); #endif ������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/mei.c������������������������������������������������������0000644�0001750�0001750�00000007351�11757531137�020370� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * */ /* * Module: seqexec.c * Author : Mei-hui Su * Revision : $REVISION$ */ #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <string.h> #include <time.h> #include "mypopen.h" #define MAXSTR 4096 char *fileName(); extern char *optarg; extern int optind, opterr; int debug; FILE *fstatus; /*******************************************************************/ /* */ /* mGenericExec */ /* */ /* Runs some commands from some input file or stdin sequentially */ /* */ /*******************************************************************/ int main(int argc, char **argv, char *envp[]) { int c, count, failed; char fname [MAXSTR]; char cmd [MAXSTR]; char msg [MAXSTR]; int status; char rline [1024]; long result; FILE *fin; FILE *fout = stdout; time_t start_time; time_t end_time; double elapsed_time; time(&start_time); /***************************************/ /* Process the command-line parameters */ /***************************************/ debug = 0; fstatus = stdout; while ((c = getopt(argc, argv, "ds:")) != EOF) { switch (c) { case 'd': debug = 1; break; case 's': if((fstatus = fopen(optarg, "w+")) == (FILE *)NULL) { printf("[struct stat=\"ERROR\", msg=\"Cannot open status file: %s\"]\n", optarg); exit(1); } break; default: printf("[struct stat=\"ERROR\", msg=\"Usage: %s [-d] [-s statusfile] [inputfile] \"]\n", argv[0]); exit(1); break; } } if (!((argc - optind != 1) || (argc == optind))) { printf("[struct stat=\"ERROR\", msg=\"Usage: %s [-d] [-s statusfile] [inputfile]\"]\n", argv[0]); exit(1); } if(optind != argc) { strcpy(fname, argv[optind]); fin = fopen(fname, "r"); if(fin == (FILE *)NULL) { fprintf(fstatus, "[struct stat=\"ERROR\", msg=\"Can't open command file.\"]\n"); exit(1); } } else fin=stdin; /************************************************/ /* Read the commands and call each sequentially */ /************************************************/ count = 0; failed = 0; while(fgets(cmd, MAXSTR, fin) != (char *)NULL) { result = -1; status=exec_cmd(cmd, envp, rline, sizeof(rline)); ++count; if ( status == -1 ) { fprintf(fstatus, "[struct stat=\"ERROR\", msg=\"%s\"]\n", rline); failed++; } } if(fin != stdin) fclose(fin); time(&end_time); elapsed_time=difftime(start_time,end_time); fprintf(fstatus, "[struct stat=\"OK\", count=%d, failed=%d, time=%.0f, stime=(%s)]\n", count, failed, elapsed_time, ctime(&start_time)); fflush(stdout); exit(0); } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/Makefile.LINUX���������������������������������������������0000644�0001750�0001750�00000003052�11757531137�022002� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# Filename: Makefile.LINUX # Version Developer Date Change # ------- --------------- ------- ----------------------- # 1.1 Mei-Hui Su 20Sep04 Added into montage v3.0 .SUFFIXES: .SUFFIXES: .c .o #ISI dc/condor MPICC_LSF = /cluster/mpich/mpich-1.2.5.3/bin/mpicc MPICC_CONDOR = /nfs/asd2/pegasus/software/linux/mpi/mpich-1.2.5-gt3.0.2/bin/mpicc #SDSC teragrid MPICC_PBS = /usr/local/apps/mpich-vmi-intel/bin/mpicc #UC teragrid #MPICC_PBS = /soft/mpich-gm-1.2.5..10-intel-r2/bin/mpicc #CALTECH teragrid #MPICC_PBS = /usr/local/mpich/mpich-gm-1.2.5..10-intel-r2/bin/mpicc #NCSA teragrid #MPICC_PBS = /usr/local/mpich/mpich-gm-1.2.5..10-intel-r2/bin/mpicc MPICC = $(MPICC_CONDOR) CC = cc -D_FILE_OFFSET_BITS=64 CFLAGS = -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -g -I. LIBS = -static -lnsl -lm .c.o: $(CC) $(CFLAGS) -c $*.c all: seqexec seqexec: seqexec.o mypopen.o $(CC) -o seqexec seqexec.o mypopen.o $(LIBS) mpiexec: mpiexec.c mypopen.c mympi.c $(MPICC) $(CFLAGS) -c -o mypopenMPI.o mypopen.c ;\ $(MPICC) $(CFLAGS) -c -o mympiMPI.o mympi.c ;\ $(MPICC) $(CFLAGS) -o mpiexec mpiexec.c mypopenMPI.o mympiMPI.o $(LIBS) -lpthread mpiexec.condor: mpiexec.c mypopen.c $(MPICC_CONDOR) $(CFLAGS) -c -o mypopenMPI_condor.o mypopen.c ;\ $(MPICC_CONDOR) $(CFLAGS) -c -o mympiMPI_condor.o mympi.c ;\ $(MPICC_CONDOR) $(CFLAGS) -o mpiexec.condor mpiexec.c mypopenMPI_condor.o mympiMPI_condor.o $(LIBS) -lpthread install: cp seqexec ../../bin clean: rm -f seqexec mpiexec *.o ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/make.SunOS�������������������������������������������������0000755�0001750�0001750�00000000265�11757531137�021320� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh gmake distclean gmake EXTRACFLAGS='$(V7FLAGS)' seqexec mv seqexec seqexec.sparcv7 gmake clean gmake EXTRACFLAGS='$(V9FLAGS)' seqexec mv seqexec seqexec.sparcv9 gmake clean�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/mympi.c����������������������������������������������������0000644�0001750�0001750�00000005062�11757531137�020746� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * */ /* * Author : Mei-hui Su * Revision : $REVISION$ */ #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <string.h> #include <math.h> #include <sys/types.h> #include <pthread.h> #include "mympi.h" #define FREE 0 #define NOT_FREE 1 /* master node is not tracked in the node_list, so be careful with off by one thing */ static int *node_list=NULL; static int numproc; static int pidx; static int free_cnt; pthread_mutex_t m_mutex; /* our mutex */ pthread_cond_t m_cond; /* the condition variable */ int *init_my_list(int num) { int i; numproc=num; pidx=0; node_list = (int *) malloc( sizeof(int) * numproc); for(i=0;i<numproc;i++) node_list[i]=FREE; free_cnt=(numproc); pthread_mutex_init( &m_mutex, NULL ); pthread_cond_init( &m_cond, NULL ); return node_list; } void free_my_list() { if(node_list==NULL) return; free(node_list); pthread_mutex_destroy( &m_mutex ); pthread_cond_destroy( &m_cond ); node_list=NULL; } int next_idle_node() { int idx=-1; int oidx; pthread_mutex_lock(&m_mutex); if(free_cnt!=0) { while(1) { pidx=(pidx+1) % numproc; if(node_list[pidx]==FREE) { node_list[pidx]=NOT_FREE; free_cnt--; idx=pidx; break; } } } pthread_mutex_unlock(&m_mutex); pthread_cond_signal(&m_cond); if(idx == -1) return -1; oidx=idx+1; return oidx; } void reset_idle_node(int oidx) { int idx=oidx-1; pthread_mutex_lock(&m_mutex); node_list[idx]=NOT_FREE; free_cnt--; pthread_mutex_unlock(&m_mutex); } void set_idle_node(int oidx) { int idx=oidx-1; pthread_mutex_lock(&m_mutex); node_list[idx]=FREE; free_cnt++; pthread_mutex_unlock(&m_mutex); pthread_cond_signal(&m_cond); } int all_idling() { int ret=0; pthread_mutex_lock(&m_mutex); if(numproc == free_cnt) ret=1; pthread_mutex_unlock(&m_mutex); return ret; } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/Makefile���������������������������������������������������0000644�0001750�0001750�00000007225�11757531137�021112� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # Makefile # $Id: Makefile 4537 2011-09-27 17:02:53Z voeckler $ # INSTALL = install RM = rm -f CC = gcc CXX = g++ SYSTEM = $(shell uname -s | tr '[a-z]' '[A-Z]' | tr -d '_ -/') VERSION = $(shell uname -r) MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') MAJOR = $(firstword $(subst ., ,$(VERSION))) LOADLIBES = -lm ifndef ${prefix} prefix = $(PEGASUS_HOME) endif ifndef ${datadir} datadir = $(prefix)/share endif NROFF = groff -mandoc TEXT = -Tutf8 HTML = -Thtml ifeq (SUNOS,${SYSTEM}) ifeq (5,${MAJOR}) CC = cc CXX = CC V7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CFLAGS = -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACFLAGS) CFLAGS := -DSOLARIS $(CFLAGS) -xO4 -D__EXTENSIONS__=1 LDFLAGS += $(EXTRACFLAGS) INSTALL = /usr/ucb/install LOADLIBES += -lnsl else # old Solaris 1 not supported endif endif ifeq (AIX,${SYSTEM}) CXX = xlC CC = xlc endif ifeq (LINUX,${SYSTEM}) ifeq (ia64,${MARCH}) # # Linux IA64 # CFLAGS = -Wall -O2 -DMARCH_IA64=1 -ggdb # SDSC teragrid MPICC_PBS = /usr/local/apps/mpich-vmi-intel/bin/mpicc # UC teragrid #MPICC_PBS = /soft/mpich-gm-1.2.5..10-intel-r2/bin/mpicc # CALTECH teragrid #MPICC_PBS = /usr/local/mpich/mpich-gm-1.2.5..10-intel-r2/bin/mpicc # NCSA teragrid #MPICC_PBS = /usr/local/mpich/mpich-gm-1.2.5..10-intel-r2/bin/mpicc else ifeq (x86_64,${MARCH}) # # Linux AMD64 # CFLAGS = -Wall -O2 -m64 -ggdb MPICC_PBS = /sys/software/mpich/bin/mpicc else ifeq (armv7l, ${MARCH}) CFLAGS = -Wall -O2 -ggdb else # # Linux IA32 # CFLAGS = -Wall -O2 -march=i686 -ggdb #LDFLAGS += -static #ISI dc/condor MPICC_LSF = /cluster/mpich/mpich-1.2.5.3/bin/mpicc MPICC_CONDOR = /nfs/asd2/pegasus/software/linux/mpi/mpich-1.2.5-gt3.0.2/bin/mpicc endif endif endif endif # # === [3] ======================================================= rules section # There is no need to change things below this line. CFLAGS += -D${SYSTEM} -DMAJOR=${MAJOR} LD = $(CC) -D_FILE_OFFSET_BITS=64 CC += -D_FILE_OFFSET_BITS=64 CFLAGS += -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE GENDOC = seqexec.html seqexec.txt seqexec.ps ifeq (,${MPICC}) MPICC = $(MPICC_CONDOR) endif %.html : %.1 ; $(NROFF) $(HTML) $< > $@ %.ps : %.1 ; $(NROFF) $< > $@ %.txt : %.1 ; $(NROFF) $(TEXT) $< > $@ %.o : %.c $(CC) $(CFLAGS) $< -c -o $@ all: seqexec $(GENDOC) job.o: job.c job.h #mei.o: mei.c mypopen.h #mpiexec.o: mpiexec.c mypopen.h mympi.h #mympi.o: mympi.c mympi.h #mypopen.o: mypopen.c mypopen.h mysystem.o: mysystem.c tools.h report.h mysystem.h parser.o: parser.c parser.h tools.h report.o: report.c tools.h report.h seqexec.o: seqexec.c tools.h parser.h report.h mysystem.h job.h tools.o: tools.c tools.h try-cpus.o: try-cpus.c seqexec: seqexec.o tools.o parser.o report.o mysystem.o job.o $(LD) $(LDFLAGS) $^ -o $@ $(LOADLIBES) try-cpus: try-cpus.o $(LD) $(LDFLAGS) $^ -o $@ $(LOADLIBES) mpiexec: mpiexec.c mypopen.c mympi.c $(MPICC) $(CFLAGS) -c -o mypopenMPI.o mypopen.c $(MPICC) $(CFLAGS) -c -o mympiMPI.o mympi.c $(MPICC) $(LDFLAGS) -o mpiexec mpiexec.c mypopenMPI.o mympiMPI.o $(LOADLIBES) -lpthread mpiexec.condor: mpiexec.c mypopen.c $(MPICC_CONDOR) $(CFLAGS) -c -o mypopenMPI_condor.o mypopen.c ;\ $(MPICC_CONDOR) $(CFLAGS) -c -o mympiMPI_condor.o mympi.c ;\ $(MPICC_CONDOR) $(LDFLAGS) -o mpiexec.condor mpiexec.c mypopenMPI_condor.o mympiMPI_condor.o $(LOADLIBES) -lpthread install: seqexec $(INSTALL) -m 0755 $^ $(prefix)/bin install.man: seqexec.1 $(INSTALL) -m 0644 $^ $(datadir)/man/man1 install.doc: $(GENDOC) $(INSTALL) -m 0644 $(GENDOC) $(datadir)/man install.all: install install.man install.doc doc: $(GENDOC) clean: $(RM) *.o $(GENDOC) core core.* distclean: clean $(RM) seqexec mpiexec try-cpus mpiexec.condor ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/Makefile.SunOS���������������������������������������������0000644�0001750�0001750�00000003063�11757531137�022114� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# Filename: Makefile.LINUX # Version Developer Date Change # ------- --------------- ------- ----------------------- # 1.1 Mei-Hui Su 20Sep04 Added into montage v3.0 .SUFFIXES: .SUFFIXES: .c .o #ISI dc/condor MPICC_LSF = /cluster/mpich/mpich-1.2.5.3/bin/mpicc MPICC_CONDOR = /nfs/asd2/pegasus/software/linux/mpi/mpich-1.2.5-gt3.0.2/bin/mpicc #SDSC teragrid MPICC_PBS = /usr/local/apps/mpich-vmi-intel/bin/mpicc #UC teragrid #MPICC_PBS = /soft/mpich-gm-1.2.5..10-intel-r2/bin/mpicc #CALTECH teragrid #MPICC_PBS = /usr/local/mpich/mpich-gm-1.2.5..10-intel-r2/bin/mpicc #NCSA teragrid #MPICC_PBS = /usr/local/mpich/mpich-gm-1.2.5..10-intel-r2/bin/mpicc MPICC = $(MPICC_CONDOR) CC = cc -D_FILE_OFFSET_BITS=64 CFLAGS = -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -g -I. LIBS = -static -lsocket -lnsl -lm .c.o: $(CC) $(CFLAGS) -c $*.c all: seqexec seqexec: seqexec.o mypopen.o $(CC) -o seqexec seqexec.o mypopen.o $(LIBS) mpiexec: mpiexec.c mypopen.c mympi.c $(MPICC) $(CFLAGS) -c -o mypopenMPI.o mypopen.c ;\ $(MPICC) $(CFLAGS) -c -o mympiMPI.o mympi.c ;\ $(MPICC) $(CFLAGS) -o mpiexec mpiexec.c mypopenMPI.o mympiMPI.o $(LIBS) -lpthread mpiexec.condor: mpiexec.c mypopen.c $(MPICC_CONDOR) $(CFLAGS) -c -o mypopenMPI_condor.o mypopen.c ;\ $(MPICC_CONDOR) $(CFLAGS) -c -o mympiMPI_condor.o mympi.c ;\ $(MPICC_CONDOR) $(CFLAGS) -o mpiexec.condor mpiexec.c mypopenMPI_condor.o mympiMPI_condor.o $(LIBS) -lpthread install: cp seqexec ../../bin clean: rm -f seqexec mpiexec *.o �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/mpiexec.c��������������������������������������������������0000644�0001750�0001750�00000021705�11757531137�021247� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * */ /* * Module: mpiexec.c * Author : Mei-hui Su * Revision : $REVISION$ */ #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <string.h> #include <math.h> #include <mpi.h> #include <time.h> #include "mypopen.h" #include "mympi.h" #define MAXSTR 4096 #define CMD_TAG 1 #define RESULT_TAG 2 #define EXIT_TAG 3 void exit_panic(int); FILE *parse_cmd_line(int,char**); int run_it(char *, char*[], char *); extern char *optarg; extern int optind, opterr; int debug; FILE *fstatus; /*******************************************************************/ /* */ /* mGenericExecMPI */ /* */ /* Runs some commands from an input file/or stdin using MPI */ /* */ /*******************************************************************/ int main(int argc, char **argv, char *envp[]) { int count, failed; char cmd [MAXSTR]; FILE *fin; MPI_Status mstatus; char mpistatus[MAXSTR]; int myid; int numprocs; MPI_Init(&argc, &argv); MPI_Comm_size(MPI_COMM_WORLD, &numprocs); MPI_Comm_rank(MPI_COMM_WORLD, &myid); debug = 0; fstatus = stderr; /************************************************/ /* Read the corresponding commands and run it */ /************************************************/ count = 0; failed = 0; fin=parse_cmd_line(argc, argv); if (myid == 0) { /* I am the master */ int free_idx; int pause_for_result=0; int ret; int more_cmd=1; char *save_cmd=NULL; int pflag; time_t start_time; time_t end_time; double elapsed_time; time(&start_time); /* special case, I am the only one */ if(numprocs==1) { while((save_cmd=fgets(cmd, MAXSTR, fin)) != (char *)NULL) { run_it(cmd, envp, mpistatus); count++; if(strstr( mpistatus, "[struct stat=\"ABORT\"") != NULL || strstr( mpistatus, "[struct stat=\"ERROR\"") != NULL) { fprintf(fstatus, "%s\n", mpistatus); failed++; } } } else { init_my_list(numprocs-1); /* counting just the slaves */ while(1) { while(more_cmd) { if((save_cmd!=NULL) || (save_cmd=fgets(cmd, MAXSTR, fin)) != (char *)NULL) { free_idx=next_idle_node(); if(free_idx!= -1) { /* send it out */ MPI_Send(cmd, /* buff */ strlen(cmd)+1, /* count */ MPI_CHAR, /* type */ free_idx, /* dest */ CMD_TAG, /* tag */ MPI_COMM_WORLD); /* comm */ save_cmd=NULL; } else { pause_for_result=1; break; } } else { /* no more command */ int i; for(i=1; i< numprocs; i++) { MPI_Send(NULL, /* buff */ 0, /* count */ MPI_CHAR, /* type */ i, /* dest */ EXIT_TAG, /* tag */ MPI_COMM_WORLD); /* comm */ } more_cmd=0; break; } } if(more_cmd==0) { if(all_idling()) /* all is done */ break; else /* just waiting for the result */ pause_for_result=1; } while(1) { pflag=0; if(pause_for_result) { ret=MPI_Probe( MPI_ANY_SOURCE, RESULT_TAG, MPI_COMM_WORLD, &mstatus ); pause_for_result=0; pflag=1; } else { ret=MPI_Iprobe( MPI_ANY_SOURCE, RESULT_TAG, MPI_COMM_WORLD, &pflag, &mstatus ); } if(ret != MPI_SUCCESS || pflag == 0) break; MPI_Recv(mpistatus, /* buff */ MAXSTR, /* count */ MPI_CHAR, /* type */ MPI_ANY_SOURCE, /* source */ RESULT_TAG, /* tag */ MPI_COMM_WORLD, /* comm */ &mstatus); /* status */ free_idx=mstatus.MPI_SOURCE; set_idle_node(free_idx); count++; if(strstr( mpistatus, "[struct stat=\"ABORT\"") != NULL || strstr( mpistatus, "[struct stat=\"ERROR\"") != NULL) { fprintf(fstatus, "%s\n", mpistatus); failed++; } } } } time(&end_time); elapsed_time=difftime(start_time,end_time); fprintf(fstatus, "[struct stat=\"OK\", count=%d, failed=%d, time=%.0f]\n", count, failed, elapsed_time); fflush(fstatus); } else { /* I am the slave */ int more_cmd=0; int done=0; while(1) { int ret; ret=MPI_Probe(0, MPI_ANY_TAG, MPI_COMM_WORLD, &mstatus); if(mstatus.MPI_TAG == CMD_TAG) more_cmd=1; else if(mstatus.MPI_TAG == EXIT_TAG) { done=1; } if(more_cmd) { MPI_Recv(cmd, /* buff */ MAXSTR, /* count */ MPI_CHAR, /* type */ 0, /* source */ CMD_TAG, /* tag */ MPI_COMM_WORLD, /* comm */ &mstatus); /* status */ run_it(cmd, envp, mpistatus); MPI_Send(mpistatus, /* buff */ strlen(mpistatus)+1, /* count */ MPI_CHAR, /* type */ 0, /* dest */ RESULT_TAG, /* tag */ MPI_COMM_WORLD); /* comm */ more_cmd=0; continue; } if(done) { MPI_Recv(cmd, /* buff */ MAXSTR, /* count */ MPI_CHAR, /* type */ 0, /* source */ EXIT_TAG, /* tag */ MPI_COMM_WORLD, /* comm */ &mstatus); /* status */ break; } } } MPI_Finalize(); free_my_list(); if(fin!=stdin) fclose(fin); exit(0); } void exit_panic(int e) { MPI_Finalize(); free_my_list(); exit(e); } /* Process the command-line parameters */ FILE *parse_cmd_line(int argc, char **argv) { char fname [MAXSTR]; int c; FILE *fin; while ((c = getopt(argc, argv, "ds:")) != -1) { switch (c) { case 'd': debug = 1; break; case 's': if((fstatus = fopen(optarg, "w+")) == (FILE *)NULL) { fprintf(fstatus,"[struct stat=\"ERROR\", msg=\"Cannot open status file: %s\"]\n", optarg); exit_panic(1); } break; default: fprintf(fstatus,"[struct stat=\"ERROR\", msg=\"Usage: %s [-d] [-s statusfile] [inputfile]\"]\n", argv[0]); exit_panic(1); break; } } if (!((optind==argc) || (argc - optind == 1))) { fprintf(fstatus,"[struct stat=\"ERROR\", msg=\"Usage: %s [-d] [-s statusfile] [inputfile]\"]\n", argv[0]); exit_panic(1); } if(optind != argc) { strcpy(fname, argv[optind]); fin = fopen(fname, "r"); if(fin == (FILE *)NULL) { fprintf(fstatus, "[struct stat=\"ERROR\", msg=\"Can't open command file.\"]\n"); fflush(fstatus); exit_panic(1); } } else fin=stdin; return fin; } int run_it(char *cmd, char *envp[], char *mpistatus) { char rline[125]; int status=0; status=exec_cmd(cmd, envp, rline, sizeof(rline)); if(status== -1) { sprintf(mpistatus, "[struct stat=\"ERROR\", msg=\"%s\"]\n", rline); return 1; } sprintf(mpistatus, "[struct stat=\"OK\"]\n" ); return 0; } �����������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/cluster/mypopen.c��������������������������������������������������0000644�0001750�0001750�00000017611�11757531137�021305� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <stdlib.h> #include <unistd.h> #include <stdio.h> #include <string.h> #include <memory.h> #include <errno.h> #include <sys/wait.h> #include <signal.h> #include "mypopen.h" int mysystem( const char* tag, char* argv[], char* envp[], int fd_input, int fd_output, int fd_error ) /* purpose: fork off a command and reconnect its stdio * paramtr: tag (IN): Some name to use while logging. * argv (IN): the true argv[] vector for execve * envp (IN): the true envp[] vector for execve * fd_input (IN): if >= 0, connect stdin to this FD * fd_output (IN): if >= 0, connect stdout to this FD * fd_error (IN): if >= 0, connect stderr to this FD * returns: >=0: see status in wait(2) and waitpid(2) * special child exit code 126: unable to connect FDs * special child exit code 127: unable to exec command * -1: error setting up * globals: modifies errno. */ { struct sigaction ignore, saveintr, savequit; sigset_t childmask, savemask; pid_t child; int saverr, status = -1; ignore.sa_handler = SIG_IGN; sigemptyset( &ignore.sa_mask ); ignore.sa_flags = 0; if ( sigaction( SIGINT, &ignore, &saveintr ) < 0 ) return status; if ( sigaction( SIGQUIT, &ignore, &savequit ) < 0 ) { sigaction( SIGINT, &saveintr, NULL ); return status; } sigemptyset( &childmask ); sigaddset( &childmask, SIGCHLD ); if ( sigprocmask( SIG_BLOCK, &childmask, &savemask ) < 0 ) { sigaction( SIGINT, &saveintr, NULL ); sigaction( SIGQUIT, &savequit, NULL ); return status; } if ( (child=fork()) < 0 ) { /* no more process table space */ fprintf( stderr, "Error: %s fork: %s\n", tag, strerror(errno) ); status = -1; } else if ( child == (pid_t) 0 ) { /* connect jobs stdio */ if ( fd_input >= 0 && fd_input != STDIN_FILENO ) { if ( dup2( fd_input, STDIN_FILENO ) ) _exit(126); } if ( fd_output >= 0 && fd_output != STDOUT_FILENO ) { if ( dup2( fd_output, STDOUT_FILENO ) ) _exit(126); } if ( fd_error >= 0 && fd_error != STDERR_FILENO ) { if ( dup2( fd_error, STDERR_FILENO ) ) _exit(126); } /* undo signal handlers */ sigaction( SIGINT, &saveintr, NULL ); sigaction( SIGQUIT, &savequit, NULL ); sigprocmask( SIG_SETMASK, &savemask, NULL ); execve( argv[0], argv, envp ); _exit(127); /* executed in child process */ } else { /* parent */ while ( waitpid( child, &status, 0 ) < 0 ) { if ( errno != EINTR ) { saverr = errno; fprintf( stderr, "Error: %s waitpid: %s\n", tag, strerror(saverr) ); errno = saverr; status = -1; break; } } /* sanity check */ saverr = errno; if ( kill( child, 0 ) == 0 ) fprintf( stderr, "Warning: %s's job %d is still running!\n", tag, child ); errno = saverr; } /* save any errors before anybody overwrites this */ saverr = errno; /* ignore errors on these, too. */ sigaction( SIGINT, &saveintr, NULL ); sigaction( SIGQUIT, &savequit, NULL ); sigprocmask( SIG_SETMASK, &savemask, NULL ); /* finalize */ errno = saverr; return status; } PipeCmd* mypopen( const char* tag, char* argv[], char* envp[] ) /* purpose: fork off a commend and capture its stderr and stdout. * warning: does not use /bin/sh -c internally. * paramtr: tag (IN): some short tag to name the app * argv (IN): the true argv[] vector for execve * envp (IN): the true envp[] vector for execve * returns: a structure which contains information about the child process. * it will return NULL on failure. */ { pid_t child; int pfds[2]; PipeCmd* result = NULL; /* create communication with subprocess */ if ( pipe(pfds) == -1 ) { fprintf( stderr, "Error: %s create pipe: %s\n", tag, strerror(errno) ); return result; } /* prepare for fork */ fflush( stdout ); fflush( stderr ); /* popen(): spawn child process to execute grid-proxy-info */ if ( (child=fork()) == (pid_t) -1 ) { /* unable to fork */ fprintf( stderr, "Error: %s fork: %s\n", tag, strerror(errno) ); return result; } else if ( child == 0 ) { /* child - redirect stdout and stderr onto communication channel */ close(pfds[0]); if ( dup2( pfds[1], STDOUT_FILENO ) == -1 ) _exit(126); if ( dup2( pfds[1], STDERR_FILENO ) == -1 ) _exit(126); close(pfds[1]); execve( argv[0], argv, envp ); _exit(127); /* if you reach this, exec failed */ } /* parent */ close(pfds[1]); /* prepare result */ if ( (result = (PipeCmd*) malloc( sizeof(PipeCmd) )) != NULL ) { result->child = child; result->readfd = pfds[0]; } return result; } extern char** environ; int mypclose( PipeCmd* po ) /* purpose: free the data structure and all associated resources. * paramtr: po (IO): is a valid pipe open structure. * returns: process exit status, or -1 for invalid po structure. */ { int status = -1; /* sanity check */ if ( po != NULL ) { /* close fd early to send SIGPIPE */ close(po->readfd); /* wait for child */ while ( waitpid( po->child, &status, 0 ) == -1 ) { fprintf(stderr,"xx a wait\n"); if ( errno == EINTR || errno == EAGAIN ) continue; fprintf( stderr, "Error: waiting for child %d: %s\n", po->child, strerror(errno) ); status = -1; } /* done with memory piece */ free( (void*) po ); } return status; } int pipe_out_cmd( const char* tag, char* argv[], char* envp[], char* buffer, size_t blen ) /* purpose: fork off a commend and capture its stderr and stdout * paramtr: name (IN): some short tag to name the app * argv (IN): the true argv[] vector for execve * envp (IN): the true envp[] vector for execve * buffer (OUT): area to store output into. Will be cleared * blen (IN): length of the area that is usable to us. * returns: -1 for regular failure, exit code from application otherwise */ { ssize_t rsize, wsize = 0; PipeCmd* cmd = mypopen( tag, argv, envp ); /* prepare */ if ( cmd == NULL ) return -1; else memset( buffer, 0, blen ); /* read result(s) */ while ( (rsize=read( cmd->readfd, buffer+wsize, blen-wsize )) > 0 && wsize < blen ) { wsize += rsize; } /* done with it */ return mypclose(cmd); } int chop_up_cmd(char *argvstr, char* argv[], int cnt) { int i=0; char *tmpstr; char *tmpptr; char *ptr; if ( argvstr==NULL ) return 0; /* skip '\n', for some reason there is a \n */ for ( i=0; i<strlen(argvstr); i++ ) if ( argvstr[i] == '\n' ) argvstr[i]='\0'; tmpstr=(char *) malloc(sizeof(char*)*(strlen(argvstr)+1)); tmpptr=tmpstr; i=0; if ( (ptr=strtok_r(argvstr," ",&tmpstr)) != NULL ) { argv[i]=ptr; i++; } else { free((void*) tmpptr); return 0; } while (i<cnt && (ptr=strtok_r(NULL," ",&tmpstr))!= NULL) { argv[i]=ptr; i++; } argv[i]=NULL; free(tmpptr); return i; } int exec_cmd( char* cmd, char* envp[], char* buffer, size_t blen ) { char *argv[125]; char *argvstr; int result = -1; argvstr=strdup(cmd); if ( chop_up_cmd(argvstr,argv,125) != 0 ) result = pipe_out_cmd( "mGenericExec", argv, envp, buffer, blen ); free((void*) argvstr); return result; } int exec_cmd2( char* cmd, char* buffer, size_t blen ) { return exec_cmd( cmd, environ, buffer, blen ); } �����������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pin/���������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�016561� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pin/pin.pl���������������������������������������������������������0000755�0001750�0001750�00000006640�11757531137�017705� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env perl # # internet file pin service # require 5.005; use strict; use Socket; use POSIX qw(:sys_wait_h strftime); use Time::HiRes qw(); use GDBM_File; my $port = 7876; my $dbfile = 'pin.db'; my $proto = getprotobyname('tcp') || die "unable to determine protocol number for TCP: $!\n"; socket( SOCKFD, PF_INET, SOCK_STREAM, $proto ) || die "socket( PF_INET, SOCK_STREAM ): $!\n"; bind( SOCKFD, pack_sockaddr_in($port,INADDR_ANY) ) || die "bind($port): $!\n"; setsockopt( SOCKFD, SOL_SOCKET, SO_REUSEADDR, pack("i",1) ) || die "setsockopt( SO_REUSEADDR ): $!\n"; listen( SOCKFD, 128 ) || die "listen: $!\n"; $SIG{INT} = sub { $main::terminate=1 }; $SIG{TERM} = sub { $main::terminate=1 }; $SIG{CHLD} = \&REAPER; sub logit { printf STDERR "%.3f @_\n", Time::HiRes::time(); } sub REAPER { my $pid; while ( ($pid = waitpid(-1,WNOHANG)) > 0 ) { my $status = $?; my $rc = $status >> 8; logit( "$pid reaped with status $rc" ); } $SIG{CHLD} = \&REAPER; } sub serve_pin (*$$) { local(*FD) = shift; my $dbase = shift; my ($url,$stamp) = split( /\s+/,shift(),2 ); my $now = time(); logit( "[$$] in PIN" ); if ( exists $dbase->{$url} ) { if ( $dbase->{$url} > $now ) { # file is pinned, treat as extension print FD "201 Pin adjusted.\r\n"; } else { # expired pin, treat as new print FD "200 File pinned.\r\n"; } } else { print FD "200 File pinned.\r\n"; } $dbase->{$url} = $now + $stamp; } sub serve_unpin (*$$) { local(*FD) = shift; my $dbase = shift; my $url = shift; logit( "[$$] in UNPIN" ); if ( exists $dbase->{$url} ) { delete $dbase->{$url}; print FD "200 Pin removed.\r\n"; } else { print FD "401 No such URL.\r\n"; } } sub serve_stat (*$$) { local(*FD) = shift; my $dbase = shift; my $url = shift; logit( "[$$] in STAT" ); if ( exists $dbase->{$url} ) { my $diff = $dbase->{$url} - time(); print FD "200 $diff remaining.\r\n"; } else { print FD "401 No such URL.\r\n"; } } sub serve_quit (*$$) { local(*FD) = shift; logit( "[$$] in QUIT" ); print FD "200 Good-bye.\r\n"; } my %callout = ( 'PIN' => \&serve_pin, 'UNPIN' => \&serve_unpin, 'STAT' => \&serve_stat, 'QUIT' => \&serve_quit ); sub serve (*) { local(*FD) = shift; # connect to pin database my %dbase; if ( tie( %dbase, 'GDBM_File', $dbfile, GDBM_WRCREAT, 0644 ) ) { logit( "[$$] database opened" ); } else { print FD "501 Internal error.\r\n"; die "tie $dbfile: $!\n"; } while ( <FD> ) { s/[\r\n]+$//; # safer than chomp my ($cmd,$remainder) = split(/\s+/,$_,2); if ( exists $callout{$cmd} ) { no strict; $callout{$cmd}->( FD, \%dbase, $remainder ); } else { logit( "[$$] illegal command: $cmd" ); print FD "400 Illegal instruction.\r\n"; } last if $cmd eq 'QUIT'; } logit( "[$$] done" ); untie( %dbase ); } while ( ! $main::terminate ) { my $paddr = accept( CLIENT, SOCKFD ); next unless defined $paddr; my ($port,$host) = sockaddr_in($paddr); logit( "connection from @{[inet_ntoa($host)]}:$port" ); my $pid = fork(); if ( $pid == -1 ) { # failure die "unable to fork: $!\n"; } elsif ( $pid > 0 ) { # parent close( CLIENT ); logit( "forking off $pid" ); } else { # child close( SOCKFD ); select( CLIENT ); $|=1; serve( CLIENT ); close( CLIENT ); exit(0); } } exit 0; ������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/free/��������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�016714� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/free/util.h��������������������������������������������������������0000644�0001750�0001750�00000002670�11757531137�020037� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _UTIL_H #define _UTIL_H #include <sys/types.h> #if ! defined(MAX) #define MAX(a,b) ((a) > (b) ? (a) : (b)) #endif #if ! defined(MIN) #define MIN(a,b) ((a) < (b) ? (a) : (b)) #endif extern void show( unsigned megs ); /* purpose: Create the necessary command line information on stdout. * paramtr: megs (IN): is the size in MB of true RAM of the host * environ: VDS_JAVA_HEAPMAX: maximum size of heap in MB, or 0 for don't set * VDS_JAVA_HEADMIN: minimum size of heap in MB, or 0 for don't set */ extern void help( int argc, char* argv[], const char* mainid ); /* purpose: Check for the presence of -h, -? or --help, and help. * paramtr: argc (IN): see main() * argv (IN): see main() * mainid (IN): main's RCS Id string * returns: only in the absence of options */ #endif /* _UTIL_H */ ������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/free/util.c��������������������������������������������������������0000644�0001750�0001750�00000004414�11757531137�020030� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <stdlib.h> #include <stdio.h> #include <string.h> #include "util.h" static const char* RCS_ID = "$Id: util.c 50 2007-05-19 00:48:32Z gmehta $"; void show( unsigned megs ) /* purpose: Create the necessary command line information on stdout. * paramtr: megs (IN): is the size in MB of true RAM of the host * environ: VDS_JAVA_HEAPMAX: maximum size of heap in MB, or 0 for don't set * VDS_JAVA_HEADMIN: minimum size of heap in MB, or 0 for don't set */ { unsigned min = -1u; unsigned max = -1u; char* env_max = getenv("VDS_JAVA_HEAPMAX"); char* env_min = getenv("VDS_JAVA_HEAPMIN"); if ( env_min != NULL ) min = strtoul( env_min, 0, 0 ); if ( env_max != NULL ) max = strtoul( env_max, 0, 0 ); if ( min == -1u ) min = MAX( 64, megs >> 3 ); if ( max == -1u ) max = MIN( megs, max ); if ( min > 0 ) printf( " -Xms%dm", min ); if ( max > 0 ) printf( " -Xmx%dm", max ); putchar( '\n' ); } void help( int argc, char* argv[], const char* mainid ) /* purpose: Check for the presence of -h, -? or --help, and help. * paramtr: argc (IN): see main() * argv (IN): see main() * mainid (IN): main's RCS Id string * returns: only in the absence of options */ { if ( argc > 1 ) { if ( argv[1][0] == '-' ) { if ( ((argv[1][1] == 'h' || argv[1][1] == '?') && argv[1][2] == 0 ) || ( argv[1][1] == '-' && strcmp(argv[1]+2,"help") == 0 ) ) { puts( "Provide Java 1.4 with appropriate memory settings.\n" ); puts( mainid ); puts( RCS_ID ); } else { fprintf( stderr, "Illegal option \"%s\"\n", argv[1] ); } } else { fprintf( stderr, "Illegal argument \"%s\"\n", argv[1] ); } exit(1); } } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/free/lnx-free.c����������������������������������������������������0000644�0001750�0001750�00000002645�11757531137�020577� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <stdio.h> #include <string.h> #include "util.h" static const char* RCS_ID = "$Id: lnx-free.c 50 2007-05-19 00:48:32Z gmehta $"; int main( int argc, char* argv[] ) { char line[1024]; FILE* mem; unsigned megs = 0u; help( argc, argv, RCS_ID ); #ifndef DEBUG if ( (mem = popen( "/usr/bin/free -m", "r" )) == NULL ) { perror( "open /usr/bin/free" ); return 1; } #else if ( (mem = fopen( "free.txt", "r" )) == NULL ) { perror( "fopen free.txt" ); return 1; } #endif while ( fgets( line, sizeof(line), mem ) != NULL ) { if ( strncmp( line, "Mem", 3 ) == 0 ) { if ( sscanf( line, "%*s %u", &megs ) > 0 ) break; } } #ifndef DEBUG pclose(mem); #else fclose(mem); #endif /* Grrrrr!!!!! */ if ( megs > 1900 ) megs = 1024; show( megs ); return 0; } �������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/free/Makefile������������������������������������������������������0000644�0001750�0001750�00000004404�11757531137�020346� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # Makefile # INSTALL = install # use /usr/ucb/install on Solaris STRIP = strip CC = gcc LD = gcc CXX = g++ RM = rm -f SYSTEM = $(shell uname -s | tr '[a-z]' '[A-Z]' | tr -d '_ -/') VERSION = $(shell uname -r) MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') MAJOR = $(firstword $(subst ., ,$(VERSION))) CFLAGS += -O LOADLIBES += -lm ifndef ${prefix} prefix = $(PEGASUS_HOME) endif ifeq (SUNOS,${SYSTEM}) ifeq (5,${MAJOR}) # use these for the SUN CC compiler CC = cc LD = $(CC) ## SPARCv7 CFLAGS = -dalign -ftrap=%none -fsimple -xlibmil -xtarget=generic ## SPARCv9 #CFLAGS = -dalign -ftrap=%none -fsimple -xlibmil -xtarget=ultra -xarch=v9 CFLAGS := -DSOLARIS $(CFLAGS) -xO4 -D__EXTENSIONS__=1 LOADLIBES += -lnsl -lsocket INSTALL = /usr/ucb/install PRODUCT = sol-free SOURCE = $(PRODUCT) else # old Solaris 1 not supported! endif # on Solaris use this link string for gcc: # gcc -Wl,-Bstatic xx.o -lstdc++ -lm -lnsl -lsocket -Wl,-Bdynamic -ldl -o xx endif ifeq (IRIX64,${SYSTEM}) # The regular 64bit Irix stuff is just too slow, use n32! SYSTEM := IRIX endif ifeq (IRIX,${SYSTEM}) CC = cc -n32 -mips3 -r4000 LD = $(CC) OPT_NORM = -O3 -IPA -LNO:opt=1 endif ifeq (LINUX,${SYSTEM}) ## determine highest version of all installed libc's. #LIBCVER = $(shell /bin/ls /lib/libc.so.? | \ # awk -F'.' '{ if (m<$$3) m=$$3;} END { print m} ') ifeq (ia64,${MARCH}) CFLAGS = -Wall -O2 -ggdb PRODUCT = lnx-ia64-free SOURCE = lnx-free else CFLAGS = -Wall -O2 -march=i686 -ggdb #LDFLAGS += -static PRODUCT = lnx-free SOURCE = $(PRODUCT) endif endif ifeq (AIX,${SYSTEM}) CC = xlc PRODUCT = aix-free SOURCE = $(PRODUCT) endif # # === [3] ======================================================= rules section # There is no need to change things below this line. CFLAGS += -D${SYSTEM} -DMAJOR=${MAJOR} # -DSOCKLEN=${SOCKLEN} %.o : %.c $(CC) $(CFLAGS) $< -c -o $@ all : $(PRODUCT) $(PRODUCT): $(SOURCE).o util.o $(LD) $(LDFLAGS) $(CFLAGS) $^ -o $@ $(LOADLIBES) $(SOURCE).o: $(SOURCE).c util.o: util.c util.h install: $(PRODUCT) $(INSTALL) -m 0755 $(PRODUCT) $(prefix)/bin install.man: @echo "no manpage to install" install.doc: @echo "no documentation to install" install.all: install # install.man install.doc clean: $(RM) $(PRODUCT).o util.o distclean: clean $(RM) $(PRODUCT) ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/free/aix-free.c����������������������������������������������������0000644�0001750�0001750�00000002467�11757531137�020561� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <stdio.h> #include "util.h" static const char* RCS_ID = "$Id: aix-free.c 50 2007-05-19 00:48:32Z gmehta $"; int main( int argc, char* argv[] ) { char line[1024]; FILE* mem; unsigned long kilos; unsigned megs; help( argc, argv, RCS_ID ); /* lsattr -E -l sys0 -a realmem */ /* realmem 3137536 Amount of usable physical memory in Kbytes False */ if ( (mem = popen( "/usr/sbin/lsattr -E -l sys0 -a realmem", "r" )) == NULL ) { perror( "open /usr/sbin/lsattr" ); return 1; } while ( fgets( line, sizeof(line), mem ) != NULL ) { if ( sscanf( line, "realmem %lu Amount", &kilos ) > 0 ) break; } pclose(mem); megs = kilos >> 10; show( megs ); return 0; } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/free/sol-free.c����������������������������������������������������0000644�0001750�0001750�00000002304�11757531137�020563� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <stdio.h> #include "util.h" static const char* RCS_ID = "$Id: sol-free.c 50 2007-05-19 00:48:32Z gmehta $"; int main( int argc, char* argv[] ) { char line[1024]; FILE* mem; unsigned megs; help( argc, argv, RCS_ID ); if ( (mem = popen( "/usr/sbin/prtconf", "r" )) == NULL ) { perror( "open /usr/sbin/prtconf" ); return 1; } while ( fgets( line, sizeof(line), mem ) != NULL ) { if ( sscanf( line, "Memory size: %u Megabytes", &megs ) > 0 ) break; } pclose(mem); /* Grrrrr!!!!! */ if ( megs > 4095 ) megs = 4095; show( megs ); return 0; } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-invoke/����������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�020733� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-invoke/test.1����������������������������������������������0000644�0001750�0001750�00000000020�11757531137�021754� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/bin/date -Isec ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-invoke/make.SunOS������������������������������������������0000755�0001750�0001750�00000000340�11757531137�022571� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh gmake distclean gmake EXTRACFLAGS='$(V7FLAGS)' pegasus-invoke mv pegasus-invoke pegasus-invoke.sparcv7 gmake clean gmake EXTRACFLAGS='$(V9FLAGS)' pegasus-invoke mv pegasus-invoke pegasus-invoke.sparcv9 gmake clean ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-invoke/Makefile��������������������������������������������0000644�0001750�0001750�00000004131�11757531137�022362� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # Makefile # INSTALL = install # use /usr/ucb/install on Solaris RM = rm -f CC = gcc LD = $(CC) SYSTEM = $(shell uname -s | tr '[a-z]' '[A-Z]' | tr -d '_ -/') VERSION = $(shell uname -r) MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') MAJOR = $(firstword $(subst ., ,$(VERSION))) MINOR = $(strip $(word 2,$(subst ., ,$(VERSION)))) ifndef ${prefix} prefix = $(PEGASUS_HOME) endif ifndef ${datadir} datadir = $(prefix)/share endif ifeq (SUNOS,${SYSTEM}) ifeq (5,${MAJOR}) # use these for the SUN CC compiler CC = cc LD = $(CC) ## SPARCv7 V7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CFLAGV7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CFLAGS = -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACFLAGS) #EXTRACFLAGS = $(V7FLAGS) CFLAGS := -DSOLARIS $(CFLAGS) -xO4 -D__EXTENSIONS__=1 LOADLIBES += -lnsl -lsocket INSTALL = /usr/ucb/install else # old Solaris 1 not supported! endif endif ifeq (IRIX64,${SYSTEM}) # The regular 64bit Irix stuff is just too slow, use n32! SYSTEM := IRIX endif ifeq (IRIX,${SYSTEM}) CC = cc -n32 -mips3 -r4000 LD = $(CC) OPT_NORM = -O3 -IPA -LNO:opt=1 endif ifeq (AIX,${SYSTEM}) CXX = xlC_r CC = xlc_r endif ifeq (LINUX,${SYSTEM}) ifeq (ia64,${MARCH}) CFLAGS = -Wall -O2 -ggdb else ifeq (x86_64,${MARCH}) CFLAGS = -Wall -O2 -ggdb -m64 else ifeq (armv7l,${MARCH}) CFLAGS = -Wall -02 -ggdb else CFLAGS = -Wall -O2 -march=i686 -ggdb #EXTRA_LDFLAGS += -static endif endif endif #CFLAGS += -D__USE_POSIX=199309 endif # # === [3] ======================================================= rules section # There is no need to change things below this line. CFLAGS += -D${SYSTEM} -DMAJOR=${MAJOR} -DMINOR=${MINOR} #CFLAGS += -D_POSIX_C_SOURCE=199506 -D__USE_XOPEN_EXTENDED %.o : %.c $(CC) $(CFLAGS) $< -c -o $@ all : pegasus-invoke pegasus-invoke: pegasus-invoke.o main.o $(LD) $(LDFLAGS) $(CFLAGS) $(OBJS) $^ -o $@ $(LOADLIBES) pegasus-invoke.o: pegasus-invoke.c pegasus-invoke.h main.o: main.c install: pegasus-invoke $(INSTALL) -m 0755 pegasus-invoke $(prefix)/bin clean: $(RM) *.o core core.* distclean: clean $(RM) pegasus-invoke ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-invoke/test.2����������������������������������������������0000755�0001750�0001750�00000000033�11757531137�021764� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/bin/echo @test.3 @@test.3 �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-invoke/main.c����������������������������������������������0000644�0001750�0001750�00000006003�11757531137�022012� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <stdio.h> #include <unistd.h> #include <errno.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include <limits.h> #include "pegasus-invoke.h" static const char* RCS_ID = "$Id: main.c 4562 2011-10-04 16:36:33Z voeckler $"; int debug = 0; static void helpMe( const char* arg0 ) /* purpose: print invocation quick help with currently set parameters * paramtr: arg0 (IN): argument vector element 0 from main */ { const char* p = strrchr( arg0, '/' ); if ( p ) ++p; else p = arg0; puts( RCS_ID ); printf( "Usage:\t%s [options] (app | @fn) [arg | @fn [..]]\n\n", p ); printf( "Optional arguments:\n" " -d\tincrease debug level, show what is going on.\n" " -h\tprint this help message and exit.\n" " --\tend options procesinng.\n" "\n" "Mandatory arguments (mutually exclusive):\n" " app\tname of the application to run w/o relying on PATH.\n" " @fn\tname of a file with one argument per line, app as 1st.\n" "\n" "Further options (inclusive, repetitive, any order):\n" " arg\tcommand-line argument\n" " @fn\tname of file with one argument per line.\n" "\n" ); } static int log10( long x ) { int result; if ( x == 0 ) return 1; else for ( result=0; x > 0; ++result ) x/=10; return result; } static long limit( int name, long defValue ) { long result = sysconf( name ); if ( result <= 0 ) result = defValue; return result; } int main( int argc, char* argv[], char* envp[] ) { size_t i, j, size, total, capacity = argc; char** arg = malloc( capacity * sizeof(char*) ); int width, keeploop = 1; long maxArgSize = limit( _SC_ARG_MAX, ARG_MAX ); /* show help, if invoked empty */ if ( argc == 1 ) { helpMe(argv[0]); return 127; } /* parse options to invoke without disturbing app options. */ for ( i=1; i<argc && argv[i][0]=='-' && keeploop; ++i ) { switch ( argv[i][1] ) { case 'd': for ( j=1; argv[i][j]=='d'; ++j ) debug++; break; case 'h': helpMe(argv[0]); return 0; case '-': keeploop = 0; break; default: fprintf( stderr, "Illegal argument %zd: %s", i, argv[i] ); helpMe(argv[0]); return 127; } } /* parse rest of command line */ for ( size=0; i < argc; ++i ) { if ( add_arg( argv[i], &arg, &size, &capacity, 1 ) == -1 ) { /* check for errors */ fprintf( stderr, "Problems with argument %zd: %s\n", i, strerror(errno) ); } } /* finalize argument array */ arg[size] = NULL; /* determine size */ width = log10(size); for ( total=size, i=0; i<size; ++i ) { if ( debug ) printf( "# %*zd: %s\n", width, i, arg[i] ); total += strlen(arg[i]); } if ( debug ) printf( "# length=%zd, limit=%ld\n", total, maxArgSize ); /* warn about system limits */ if ( total >= maxArgSize ) { fprintf( stderr, "Warning: command-line length (%zd) exceeds system limit (%ld)\n", total, maxArgSize ); } /* run program */ execve( arg[0], arg, envp ); /* only reached in case of error */ perror( arg[0] ); return 127; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-invoke/test.3����������������������������������������������0000755�0001750�0001750�00000000017�11757531137�021767� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������This is test 3 �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-invoke/pegasus-invoke.h������������������������������������0000644�0001750�0001750�00000004702�11757531137�024037� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _PEGASUS_INVOKE_H #define _PEGASUS_INVOKE_H #include <sys/types.h> #ifndef ARG_MAX #ifdef POSIX_ARG_MAX #define ARG_MAX (POSIX_ARG_MAX) #else #define ARG_MAX (4096) #endif #endif extern int append_arg( char* data, char*** arg, size_t* index, size_t* capacity ); /* purpose: adds a string to a list of arguments * This is a low-level function, use add_arg instead. * paramtr: data (IN): string to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * returns: 0 means ok, -1 means error, see errno * warning: Always creates a strdup of data */ extern int expand_arg( const char* fn, char*** arg, size_t* index, size_t* capacity, int level ); /* purpose: adds the contents of a file, line by line, to an argument vector * This is a low-level function, use add_arg instead. * paramtr: fn (IN): name of file with contents to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * level (IN): level of recursion * returns: 0 means ok, -1 means error, see errno */ extern int add_arg( char* data, char*** arg, size_t* index, size_t* capacity, int level ); /* purpose: sorts a given full argument string, whether to add or extend * This is the high-level interface to previous functions. * paramtr: data (IN): string to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * level (IN): level of recursion, use 1 * returns: 0 means ok, -1 means error, see errno */ #endif /* _PEGASUS_INVOKE_H */ ��������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-invoke/pegasus-invoke.c������������������������������������0000644�0001750�0001750�00000013375�11757531137�024040� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <errno.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include "pegasus-invoke.h" static const char* RCS_ID = "$Id: pegasus-invoke.c 4562 2011-10-04 16:36:33Z voeckler $"; extern int debug; int append_arg( char* data, char*** arg, size_t* index, size_t* capacity ) /* purpose: adds a string to a list of arguments * This is a low-level function, use add_arg instead. * paramtr: data (IN): string to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * returns: 0 means ok, -1 means error, see errno * warning: Always creates a strdup of data */ { if ( debug ) fprintf( stderr, "# data=%p arg=%p index=%zd cap=%zd: \"%s\"\n", data, *arg, *index, *capacity, data ); if ( *index >= *capacity ) { *capacity <<= 1; if ( debug > 1 ) fputs( "# realloc\n", stderr ); *arg = realloc( *arg, *capacity * sizeof(char*) ); if ( *arg == NULL ) return -1; /* re-calloc: init new space with NULL */ memset( *arg + *index, 0, sizeof(char*) * (*capacity - *index) ); } (*arg)[(*index)++] = data ? strdup(data) : NULL; return 0; } static char* merge( char* s1, char* s2 ) /* purpose: merge two strings and return the result * paramtr: s1 (IN): first string, may be NULL * s2 (IN): second string, must not be NULL * returns: merge of strings into newly allocated area. * NULL, if the allocation failed. */ { if ( s1 == NULL ) { return strdup(s2); } else { size_t len = strlen(s1) + strlen(s2) + 2; char* temp = (char*) malloc(len); if ( temp == NULL ) return NULL; strncpy( temp, s1, len ); strncat( temp, " ", len ); strncat( temp, s2, len ); return temp; } } int expand_arg( const char* fn, char*** arg, size_t* index, size_t* capacity, int level ) /* purpose: adds the contents of a file, line by line, to an argument vector * This is a low-level function, use add_arg instead. * paramtr: fn (IN): name of file with contents to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * level (IN): level of recursion * returns: 0 means ok, -1 means error, see errno */ { FILE* f; char line[4096]; size_t len; char* cmd, *save = NULL; unsigned long lineno = 0ul; if ( level >= 32 ) { fprintf( stderr, "ERROR: Nesting too deep (%d levels), " "circuit breaker triggered!\n", level ); errno = EMLINK; return -1; } if ( (f = fopen( fn, "r" )) == NULL ) { /* error while opening file for reading */ return -1; } while ( fgets( line, sizeof(line), f ) ) { ++lineno; /* check for skippable line */ if ( line[0] == 0 || line[0] == '\r' || line[0] == '\n' ) continue; /* check for unterminated line (larger than buffer) */ len = strlen(line); if ( line[len-1] != '\r' && line[len-1] != '\n' ) { /* read buffer was too small, save and append */ char* temp = merge( save, line ); if ( temp == NULL ) { /* error while merging strings */ int saverr = errno; fclose(f); if ( save != NULL ) free((void*) save); errno = saverr; return -1; } if ( save != NULL ) free((void*) save); save = temp; lineno--; continue; } else { /* remove terminating character(s) */ while ( len > 0 && (line[len-1] == '\r' || line[len-1] == '\n') ) { line[len-1] = 0; len--; } } /* final assembly of argument */ if ( save != NULL ) { /* assemble merged line */ cmd = merge( save, line ); free((void*) save); save = NULL; if ( cmd == NULL ) { /* error while merging strings */ int saverr = errno; fclose(f); errno = saverr; return -1; } } else { /* no overlong lines */ cmd = line; } if ( debug ) { printf( "# %s:%lu: %s\n", fn, lineno, cmd ); } if ( (len=strlen(cmd)) > 0 ) { /* recursion here */ if ( add_arg( cmd, arg, index, capacity, level+1 ) == -1 ) { int saverr = errno; fclose(f); if ( cmd != line ) free((void*) cmd); errno = saverr; return -1; } } /* done with this argument */ if ( cmd != line ) free((void*) cmd); } fclose(f); return 0; } int add_arg( char* s, char*** arg, size_t* index, size_t* capacity, int level ) /* purpose: sorts a given full argument string, whether to add or extend * This is the high-level interface to previous functions. * paramtr: s (IN): string to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * level (IN): level of recursion, use 1 * returns: 0 means ok, -1 means error, see errno */ { if ( s[0] == '@' && s[1] != 0 ) { if ( s[1] == '@' ) { return append_arg( s+1, arg, index, capacity ); } else { return expand_arg( s+1, arg, index, capacity, level+1 ); } } else { return append_arg( s, arg, index, capacity ); } } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/�������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�020206� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/pegasus-keg.cc�����������������������������������������0000644�0001750�0001750�00000046133�11757531137�022727� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <ctype.h> #include <errno.h> #include <math.h> #include <stdarg.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <sys/time.h> #include <time.h> #include <unistd.h> #include <sys/utsname.h> #include <sys/socket.h> #include <sys/stat.h> #include <fcntl.h> #include <netinet/in.h> #include <arpa/inet.h> #include <sys/ioctl.h> #include <net/if.h> #include <netdb.h> #ifdef HAS_SYS_SOCKIO #include <sys/sockio.h> #endif #ifdef MACHINE_SPECIFIC #ifdef DARWIN #include "darwin.hh" extern char** environ; #endif // DARWIN #if defined(SUNOS) || defined(SOLARIS) #include "sunos.hh" #endif // SUNOS || SOLARIS #ifdef LINUX #include "basic.hh" #include "linux.hh" #endif // LINUX #endif // MACHINE_SPECIFIC static const char* RCS_ID = "$Id: pegasus-keg.cc 4536 2011-09-27 01:25:24Z voeckler $"; static char output[4096]; static char pattern[] = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz\r\n"; class DirtyVector { // using malloc/free instead of new/delete to avoid linking // in new libgcc_s and libstdc++ public: typedef const char* CharP; DirtyVector() :m_save(0),m_size(0),m_capacity(0) { } ~DirtyVector() { if ( m_capacity ) free( static_cast<void*>(m_save) ); } size_t size() const { return m_size; } const char* operator[]( unsigned i ) const { return ( i <= m_size ? m_save[i] : 0 ); } const char* at( unsigned i ) const { return ( i <= m_size ? m_save[i] : 0 ); } void push_back( const char* s ) { if ( m_size+1 >= m_capacity ) { if ( m_capacity ) m_capacity <<= 1; else m_capacity = 5; CharP* newsave = static_cast<CharP*>( malloc(m_capacity*sizeof(CharP)) ); memset( newsave, 0, m_capacity*sizeof(CharP) ); memcpy( newsave, m_save, m_size*sizeof(CharP) ); free( static_cast<void*>(m_save) ); m_save = newsave; } m_save[m_size] = s; m_size++; } private: CharP* m_save; size_t m_size; size_t m_capacity; }; double now( void ) // purpose: determine the current time in microsecond resolution // returns: the current timestamp as seconds w/ us fraction, or -1.0 { int timeout = 0; struct timeval now = { -1, 0 }; while ( gettimeofday( &now, 0 ) == -1 && timeout < 10 ) timeout++; return ( now.tv_sec + now.tv_usec / 1E6 ); } template <class T> inline T sqr( T x ) // purpose: function to calculate the square // paramtr: x (IN): value to square // returns: the square of the value // warning: might run into exception, if x is too large { return x*x; } size_t fractal( double x, double y, double a, double b, size_t max ) // purpose: calculate z := z^2 + c repeatedly // paramtr: x (IN): real part of z // y (IN): imag part of z // a (IN): real part of c // b (IN): imag part of c // max (IN): maximum number of iterations // returns: iterations until |z| >= 2.0 or maximum iterations reached // abbrev1: since |z| = sqrt(x^2 + y^2) squaring both sides of |z| >= 2.0 // will result in x^2 + y^2 >= 4.0 // abbrev2: z := z^2 + c = (x^2 - y^2 + a) + i(2xy + b) // squares of x and y are kept for the loop condition { double xx; // temp double qx = sqr(x); double qy = sqr(y); size_t n = 0; for ( ; n < max && qx+qy < 4.0; ++n ) { xx = qx - qy + a; qy = sqr((y=2.0 * x * y + b)); qx = sqr((x=xx)); } return n; } unsigned long spin( unsigned long interval ) { double stop = now() + interval; unsigned long count = 0; seed48( (unsigned short*) ((void*) &stop) ); double julia_x = 1.0-2.0*drand48(); double julia_y = 1.0-2.0*drand48(); do { for ( int i=0; i<16; ++i ) fractal( 1.0-2.0*drand48(), 1.0-2.0*drand48(), julia_x, julia_y, 1024 ); ++count; } while ( now() < stop ); return count; } char* append( char* buffer, size_t capacity, const char* fmt, ... ) { va_list ap; char line[4096]; // many systems don't like alloca() va_start( ap, fmt ); vsnprintf( line, 4096-4, fmt, ap ); va_end(ap); return strncat( buffer, line, capacity ); } char* append( char* buffer, size_t capacity, char ch ) { size_t size = strlen(buffer); if ( size+1 < capacity ) { buffer[size] = ch; buffer[size+1] = '\0'; } return buffer; } static int debug( const char* fmt, ... ) { int result; va_list ap; char buffer[4096]; int saverr = errno; va_start( ap, fmt ); vsnprintf( buffer, sizeof(buffer), fmt, ap ); va_end( ap ); result = write( STDERR_FILENO, buffer, strlen(buffer) ); errno = saverr; return result; } static unsigned long vpn_network[5] = { 0, 0, 0, 0, 0 }; static unsigned long vpn_netmask[5] = { 0, 0, 0, 0, 0 }; static void singleton_init( void ) { /* singleton init */ if ( vpn_network[0] == 0ul ) { vpn_network[0] = inet_addr("127.0.0.0"); /* loopbacknet */ vpn_network[1] = inet_addr("10.0.0.0"); /* class A VPN net */ vpn_network[2] = inet_addr("172.16.0.0"); /* class B VPN nets */ vpn_network[3] = inet_addr("192.168.0.0"); /* class C VPN nets */ vpn_network[4] = inet_addr("169.254.0.0"); /* link-local M$ junk */ } /* singleton init */ if ( vpn_netmask[0] == 0ul ) { vpn_netmask[0] = inet_addr("255.0.0.0"); /* loopbackmask */ vpn_netmask[1] = inet_addr("255.0.0.0"); /* class A mask */ vpn_netmask[2] = inet_addr("255.240.0.0"); /* class B VPN mask */ vpn_netmask[3] = inet_addr("255.255.0.0"); /* class C VPN mask */ vpn_netmask[4] = inet_addr("255.254.0.0"); /* link-local M$ junk */ } } struct ifreq* primary_interface( void ) /* purpose: obtain the primary interface information * returns: a newly-allocated structure containing the interface info, * or NULL to indicate an error. */ { #if defined(SIOCGLIFNUM) struct lifnum ifnr; #endif struct sockaddr_in sa; struct ifconf ifc; struct ifreq result, primary; struct ifreq* ifrcopy = NULL; char *ptr, *buf = 0; int lastlen, len, sockfd, flag = 0; /* * phase 0: init */ memset( &result, 0, sizeof(result) ); memset( &primary, 0, sizeof(primary) ); singleton_init(); /* create a socket */ if ( (sockfd = socket( AF_INET, SOCK_DGRAM, 0 )) == -1 ) { debug( "ERROR: socket DGRAM: %d: %s\n", errno, strerror(errno) ); return ifrcopy; } /* * phase 1: guestimate size of buffer necessary to contain all interface * information records. */ #if defined(SIOCGLIFNUM) /* API exists to determine the correct buffer size */ memset( &ifnr, 0, sizeof(ifnr) ); ifnr.lifn_family = AF_INET; if ( ioctl( sockfd, SIOCGLIFNUM, &ifnr ) < 0 ) { debug( "ERROR: ioctl SIOCGLIFNUM: %d: %s\n", errno, strerror(errno) ); if ( errno != EINVAL ) { close(sockfd); return ifrcopy; } } else { len = lastlen = ifnr.lifn_count * sizeof(struct ifreq); } #else /* does not have SIOCGLIFNUM */ /* determine by repetitive guessing a buffer size */ lastlen = len = 4 * sizeof(struct ifreq); /* 1st guesstimate */ #endif /* POST CONDITION: some buffer size determined */ /* FIXME: Missing upper bound */ for (;;) { /* guestimate correct buffer length */ buf = (char*) malloc(len); memset( buf, 0, len ); ifc.ifc_len = len; ifc.ifc_buf = buf; if ( ioctl( sockfd, SIOCGIFCONF, &ifc ) < 0 ) { debug( "WARN: ioctl SIOCGIFCONF: %d: %s\n", errno, strerror(errno) ); if ( errno != EINVAL || lastlen != 0 ) { close(sockfd); return ifrcopy; } } else { if ( ifc.ifc_len == lastlen ) break; /* success */ lastlen = ifc.ifc_len; } len <<= 1; free((void*) buf); } /* POST CONDITION: Now the buffer contains list of all interfaces */ /* * phase 2: walk interface list until a good interface is reached */ /* Notice: recycle meaning of "len" in here */ for ( ptr = buf; ptr < buf + ifc.ifc_len; ) { struct ifreq* ifr = (struct ifreq*) ptr; len = sizeof(*ifr); ptr += len; /* interested in IPv4 interfaces only */ if ( ifr->ifr_addr.sa_family != AF_INET ) continue; memcpy( &sa, &(ifr->ifr_addr), sizeof(struct sockaddr_in) ); /* Do not use localhost aka loopback interfaces. While loopback * interfaces traditionally start with "lo", this is not mandatory. * It is safer to check that the address is in the 127.0.0.0 class A * network. */ if ( (sa.sin_addr.s_addr & vpn_netmask[0]) == vpn_network[0] ) continue; /* prime candidate - check, if interface is UP */ result = *ifr; ioctl( sockfd, SIOCGIFFLAGS, &result ); /* interface is up - our work is done. Or is it? */ if ( (result.ifr_flags & IFF_UP) ) { if ( ! flag ) { /* remember first found primary interface */ primary = result; flag = 1; } /* check for VPNs */ if ( (sa.sin_addr.s_addr & vpn_netmask[1]) != vpn_network[1] && (sa.sin_addr.s_addr & vpn_netmask[2]) != vpn_network[2] && (sa.sin_addr.s_addr & vpn_netmask[3]) != vpn_network[3] ) { flag = 2; break; } } } /* check for loop exceeded - if yes, fall back on first primary */ if ( flag == 1 && ptr >= buf + ifc.ifc_len ) result = primary; /* clean up */ free((void*) buf); close(sockfd); /* create a freshly allocated copy */ ifrcopy = (struct ifreq*) malloc( sizeof(struct ifreq) ); memcpy( ifrcopy, &result, sizeof(struct ifreq) ); return ifrcopy; } struct in_addr whoami( char* buffer, size_t size ) /* purpose: copy the primary interface's IPv4 dotted quad into the given buffer * paramtr: buffer (IO): start of buffer * size (IN): maximum capacity the buffer is willing to accept * returns: the modified buffer. */ { /* enumerate interfaces, and guess primary one */ struct sockaddr_in sa; struct ifreq* ifr = primary_interface(); if ( ifr != NULL ) { memcpy( &sa, &(ifr->ifr_addr), sizeof(struct sockaddr) ); strncpy( buffer, inet_ntoa(sa.sin_addr), size ); free((void*) ifr); } else { /* error while trying to determine address of primary interface */ memset( &sa, 0, sizeof(sa) ); #if 0 /* future lab */ strncpy( buffer, "xsi:null", size ); #else /* for now */ strncpy( buffer, "0.0.0.0", size ); #endif } return sa.sin_addr; } void identify( char* result, size_t size, const char* arg0, double start, bool condor, const DirtyVector iox[4], const char* outfn ) { size_t linsize = getpagesize(); char* line = static_cast<char*>( malloc(linsize) ); // phase 2: where am i struct utsname uts; char* release = const_cast<char*>(""); char* machine = const_cast<char*>(""); char* sysname = const_cast<char*>(""); char* hostname = getenv("HOSTNAME"); if ( uname(&uts) >= 0 ) { release = strdup( uts.release ); machine = strdup( uts.machine ); sysname = strdup( uts.sysname ); if ( hostname == 0 ) hostname = uts.nodename; } // who am i char ifaddr[20]; struct in_addr me = whoami( ifaddr, sizeof(ifaddr) ); singleton_init(); if ( (me.s_addr & vpn_netmask[1]) == vpn_network[1] || (me.s_addr & vpn_netmask[2]) == vpn_network[2] || (me.s_addr & vpn_netmask[3]) == vpn_network[3] ) { // private network, no loopkup hostname = static_cast<char*>( malloc(32) ); sprintf( hostname, "%s (VPN)", ifaddr ); } else { struct hostent* h = ( me.s_addr == 0ul || me.s_addr == 0xFFFFFFFFul ) ? gethostbyname(hostname) : gethostbyaddr( (const char*) &me.s_addr, sizeof(me.s_addr), AF_INET ); if ( h ) { struct in_addr ipv4; hostname = static_cast<char*>( malloc(strlen(h->h_name)+20) ); memcpy( &ipv4.s_addr, h->h_addr, 4 ); sprintf( hostname, "%s (%s)", inet_ntoa(ipv4), h->h_name ); } else { hostname = static_cast<char*>( malloc(strlen(ifaddr)+2) ); strcpy( hostname, ifaddr ); } } // timestamp stuff double integral, that = now(); modf( start, &integral ); time_t intint = (time_t) integral; // determine timezone offset struct tm tm0, tm1; memcpy( &tm0, gmtime(&intint), sizeof(struct tm) ); memcpy( &tm1, localtime(&intint), sizeof(struct tm) ); tm0.tm_isdst = tm1.tm_isdst; time_t offset = intint - mktime(&tm0); int hours = offset / 3600; #if defined (__GNUC__) int minutes = abs(offset) % 60; #else // Solaris has overloading ambiguity between std::abs(int|double) problems int minutes = ( offset < 0 ? -offset : offset ) % 60; #endif // time stamp ISO format strftime( line, linsize, "%Y%m%dT%H%M%S", &tm1 ); char ms[8]; snprintf( ms, sizeof(ms), "%.3f", start - floor(start) ); append( result, size, "Timestamp Today: %s%s%+03d:%02d (%.3f;%.3f)\n", line, ms+1, hours, minutes, start, that-start ); // phase 1: Say hi #ifdef HAS_SVNVERSION append( result, size, "Applicationname: %s [v%s] @ %s\n", arg0, HAS_SVNVERSION, hostname ); #else append( result, size, "Applicationname: %s @ %s\n", arg0, hostname ); #endif // HAS_SVNVERSION if ( getcwd( line, linsize ) == 0 ) strcpy( line, "(n.a.)" ); append( result, size, "Current Workdir: %s\n", line ); // phase 2: this machine? append( result, size, "Systemenvironm.: %s-%s %s\n", machine, sysname, release ); #ifdef MACHINE_SPECIFIC pegasus_cpuinfo( result, size ); pegasus_loadavg( result, size ); pegasus_meminfo( result, size ); pegasus_statfs( result, size ); #endif // MACHINE_SPECIFIC if ( condor ) { for ( char** p = environ; *p; p++ ) { if ( strncmp( *p, "_CONDOR", 7 ) == 0 ) { append( result, size, "Condor Variable: %s\n", *p ); } } } append( result, size, "Output Filename: %s\n", outfn ); if ( iox[1].size() ) { append( result, size, "Input Filenames:" ); for ( unsigned j=0; j<iox[1].size(); ++j ) { append( result, size, ' ' ); strncat( result, iox[1][j], size ); } append( result, size, '\n' ); } if ( iox[0].size() ) { append( result, size, "Other Arguments:" ); for ( unsigned j=0; j<iox[0].size(); ++j ) { append( result, size, ' ' ); strncat( result, iox[0][j], size ); } append( result, size, '\n' ); } for ( unsigned j=0; j<iox[3].size(); ++j ) { append( result, size, "Environmentvar.: %s=", iox[3][j] ); char* e = getenv(iox[3][j]); if ( e && *e ) strncat( result, e, size ); append( result, size, '\n' ); } free( static_cast<void*>(line) ); } void helpMe( const char* ptr, unsigned long timeout, unsigned long spinout, const char* prefix ) { puts( RCS_ID ); printf( "Usage:\t%s [-a appname] [(-t|-T) thinktime] [-l fn] [-o fn [..]]\n" "\t[-i fn [..] | -G size] [-e env [..]] [-p p [..]] [-P ps]\n", ptr ); printf( " -a app\tset name of application to something else, default %s\n", ptr ); printf( " -t to\tsleep for 'to' seconds during execution, default %lu\n", timeout ); printf( " -T to\tspin for 'to' seconds during execution, default %lu\n", spinout ); printf( " -l fn\tappend own information atomically to a logfile\n" ); printf( " -o ..\tenumerate space-separated list output files to create\n" ); printf( " -i ..\tenumerate space-separated list input to read and copy\n" ); printf( " -G sz\tuse the generated size pattern instead of input files\n" ); printf( " -p ..\tenumerate space-separated parameters to mention\n" ); printf( " -e ..\tenumerate space-separated environment values to print\n" ); printf( " -C\tprint all environment variables starting with _CONDOR\n" ); printf( " -P ps\tprefix input file lines with 'ps', default \"%s\"\n", prefix ); } int main( int argc, char* argv[] ) { int state = 0; bool condor = false; unsigned long timeout = 0; unsigned long spinout = 0; unsigned long gensize = 0; DirtyVector iox[4]; // when did we start double start = now(); char* prefix = strdup(" "); // determine base name of input file char* logfile = 0; char* ptr = 0; if ( (ptr=strrchr(argv[0],'/')) == 0 ) ptr = argv[0]; else ptr++; // complain, if no parameters were given if ( argc == 1 ) { helpMe( ptr, timeout, spinout, prefix ); return 0; } // prepare generator pattern for ( size_t i=0; i<sizeof(output); i++ ) output[i] = pattern[i & 63]; for ( int i=1; i<argc; ++i ) { char* s = argv[i]; if ( s[0] == '-' && s[1] != 0 ) { if ( strchr( "iotTGaepPlC\0", s[1] ) != NULL ) { switch (s[1]) { case 'i': state = 1; break; case 'o': state = 2; break; case 'e': state = 3; break; case 'a': state = 10; break; case 't': state = 11; break; case 'l': state = 12; break; case 'T': state = 13; break; case 'p': state = 0; break; case 'P': state = 14; break; case 'G': state = 15; break; case 'C': condor = true; continue; } s += 2; } } if ( strlen(s) == 0 ) continue; if ( state >= 10 ) { switch ( state ) { case 10: ptr = s; break; case 11: timeout = strtoul(s,0,10); break; case 12: logfile = s; break; case 13: spinout = strtoul(s,0,10); break; case 14: free( static_cast<void*>(prefix) ); prefix = strdup(s); break; case 15: gensize = strtoul(s,0,10); break; } state = 0; } else { iox[state].push_back(s); } } // thinktime if ( timeout ) sleep(timeout); if ( spinout ) spin(spinout); // output buffer size_t bufsize = getpagesize() << 4; if ( bufsize < 16384 ) bufsize = 16384; char* buffer = static_cast<char*>( malloc(bufsize) ); #ifndef MIN #define MIN(a,b) ((a) < (b) ? (a) : (b)) #endif // MIN // all input, all output files FILE *out, *in; for ( unsigned i=0; i<iox[2].size(); ++i ) { out = ( iox[2][i][0] == '-' && iox[2][i][1] == '\0' ) ? fdopen( STDOUT_FILENO, "a" ) : fopen( iox[2][i], "w" ); if ( out ) { if ( gensize > 0 ) { unsigned long xsize = gensize-1; // final LF counts while ( xsize > 0 ) { ssize_t wsize = fwrite( output, sizeof(char), MIN(xsize,sizeof(output)), out ); if ( wsize > 0 ) xsize -= wsize; else break; } fputc( '\n', out ); } else { // copy input files for ( unsigned j=0; j<iox[1].size(); ++j ) { in = ( iox[1][j][0] == '-' && iox[1][j][1] == '\0' ) ? fdopen( STDIN_FILENO, "r" ) : fopen( iox[1][j], "r" ); fprintf( out, "--- start %s ----\n", iox[1][j] ); if ( in ) { while ( fgets( buffer, bufsize, in ) ) { fputs( prefix, out ); fputs( buffer, out ); } fclose(in); } else { fprintf( out, " open \"%s\": %d: %s\n", iox[1][j], errno, strerror(errno) ); } fprintf( out, "--- final %s ----\n", iox[1][j] ); } } // create buffer, and fill with content memset( buffer, 0, bufsize ); identify( buffer, bufsize, ptr, start, condor, iox, iox[2][i] ); fputs( buffer, out ); fclose(out); } else { fprintf( stderr, "open(%s): %s\n", iox[2][i], strerror(errno) ); free( static_cast<void*>(buffer) ); return 2; } } // append atomically to logfile if ( logfile != 0 ) { int fd = -1; if ( (fd=open( logfile, O_WRONLY | O_CREAT | O_APPEND, 0666 )) == -1 ) { fprintf( stderr, "WARNING: open(%s): %s\n", logfile, strerror(errno) ); } else { memset( buffer, 0, bufsize ); identify( buffer, bufsize, ptr, start, condor, iox, logfile ); append( buffer, bufsize, '\n' ); write( fd, buffer, strlen(buffer) ); // atomic write close(fd); } } free( static_cast<void*>(buffer) ); return 0; } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/linux.cc�����������������������������������������������0000644�0001750�0001750�00000011075�11757531137�021650� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "linux.hh" static const char* RCS_ID = "$Id: linux.cc 5035 2012-02-28 02:23:51Z voeckler $"; #include <fstab.h> #include <sys/statvfs.h> #include <sys/sysinfo.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include "basic.hh" void pegasus_statfs( char* buffer, size_t capacity ) { if ( setfsent() ) { struct fstab* mtab; char line[1024]; while ( (mtab = getfsent()) ) { struct statvfs vfs; /* Linux mount points may use [1] device, [2] label, [3] uuid * thus checking fs_spec for slash becomes futile. * Checking mount point for slash instead. */ if ( mtab->fs_file[0] == '/' && statvfs( mtab->fs_file, &vfs ) != -1 ) { if ( vfs.f_bsize > 0 && vfs.f_blocks > 0 ) { char total[16], avail[16]; unsigned long long size = vfs.f_frsize; smart_units( total, sizeof(total), (size * vfs.f_blocks) ); smart_units( avail, sizeof(avail), (size * vfs.f_bavail) ); snprintf( line, sizeof(line), "Filesystem Info: %-24s %s %s total, %s avail\n", mtab->fs_file, mtab->fs_vfstype, total, avail ); strncat( buffer, line, capacity ); } } } } } static struct sysinfo si; static int init_flag = 1; void pegasus_loadavg( char* buffer, size_t capacity ) { if ( init_flag == 1 ) init_flag = sysinfo(&si); if ( init_flag == 0 ) { char line[128]; snprintf( line, sizeof(line), "Load Averages : %.3f %.3f %.3f\n", si.loads[0] / 65535.0, si.loads[1] / 65536.0, si.loads[2] / 65536.0 ); strncat( buffer, line, capacity ); } } void pegasus_meminfo( char* buffer, size_t capacity ) { if ( init_flag == 1 ) init_flag = sysinfo(&si); if ( init_flag == 0 ) { char line[128]; unsigned long long pagesize = si.mem_unit; unsigned long ram_total = megs(si.totalram * pagesize); unsigned long ram_free = megs(si.freeram * pagesize); unsigned long ram_shared = megs(si.sharedram * pagesize); unsigned long ram_buffer = megs(si.bufferram * pagesize); unsigned long swap_total = megs(si.totalswap * pagesize); unsigned long swap_free = megs(si.freeswap * pagesize); snprintf( line, sizeof(line), "Memory Usage MB: %lu total, %lu free, %lu shared, %lu buffered\n", ram_total, ram_free, ram_shared, ram_buffer ); strncat( buffer, line, capacity ); snprintf( line, sizeof(line), "Swap Usage MB: %lu total, %lu free\n", swap_total, swap_free ); strncat( buffer, line, capacity ); } } static const char* cpu_info = 0; void pegasus_cpuinfo( char* buffer, size_t capacity ) { // fill cache once, if empty if ( cpu_info == 0 ) { unsigned n_cpu = 0; char* cpu_speed = 0; char* model_name = 0; char line[1024]; // open /proc/cpuinfo to read FILE* proc = fopen( "/proc/cpuinfo", "r" ); if ( proc == 0 ) return; // FIXME: This assumes SMP for now bool within = false; while ( fgets( line, sizeof(line), proc ) ) { within = true; if ( strncasecmp( line, "processor", 9 ) == 0 ) n_cpu++; #ifdef MARCH_IA64 if ( model_name == 0 && strncasecmp( line, "family", 6 ) == 0 ) { #else if ( model_name == 0 && strncasecmp( line, "model name", 10 ) == 0 ) { #endif line[strlen(line)-1] = '\0'; char* s = strchr(line,':'); if ( s ) model_name = strdup(s+2); } if ( cpu_speed == 0 && strncasecmp( line, "cpu mhz", 7 ) == 0 ) { line[strlen(line)-1] = '\0'; char* s = strchr(line,':'); if ( s ) cpu_speed = strdup(s+2); } } fclose(proc); if ( within ) { size_t cpu_size = 256; char* dynamic = static_cast<char*>( malloc(cpu_size) ); snprintf( dynamic, cpu_size, "Processor Info.: %d x %s @ %s\n", n_cpu, model_name ? model_name : "[unknown]", cpu_speed ? cpu_speed : "[unknown]" ); cpu_info = const_cast<const char*>( dynamic ); } else { cpu_info = ""; } } // append information to buffer, if we got this far if ( cpu_info && *cpu_info ) strncat( buffer, cpu_info, capacity ); } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/sunos-swap.hh������������������������������������������0000644�0001750�0001750�00000002025�11757531137�022635� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _PEGASUS_SUNOS_SWAP_HH #define _PEGASUS_SUNOS_SWAP_HH extern int gather_sunos_swap( unsigned long long* total, unsigned long long* free ); /* purpose: collect swap information from solaris * warning: This compilation unit MUST be compiled WITHOUT LFS support! * paramtr: total (OUT): total size of all swapping * free (OUT): free size of all swapping */ #endif // _PEGASUS_SUNOS_SWAP_HH �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/darwin.hh����������������������������������������������0000644�0001750�0001750�00000002011�11757531137�021775� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _PEGASUS_DARWIN_HH #define _PEGASUS_DARWIN_HH #define _DARWIN_FEATURE_64_BIT_INODE 1 #include <sys/types.h> extern void pegasus_statfs( char* buffer, size_t capacity ); extern void pegasus_loadavg( char* buffer, size_t capacity ); extern void pegasus_meminfo( char* buffer, size_t capacity ); extern void pegasus_cpuinfo( char* buffer, size_t capacity ); #endif // _PEGASUS_DARWIN_HH �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/basic.cc�����������������������������������������������0000644�0001750�0001750�00000002510�11757531137�021564� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "basic.hh" static const char* RCS_ID = "$Id: basic.cc 2774 2010-11-09 01:52:38Z voeckler $"; #include <stdio.h> #include <string.h> void smart_units( char* buffer, size_t capacity, unsigned long long int value ) { if ( value < 8192ull ) { unsigned long t = (unsigned long) value; snprintf( buffer, capacity, "%5luB", t ); } else if ( value < 8388608ull ) { snprintf( buffer, capacity, "%5lukB", kils(value) ); } else if ( value < 8589934592ull ) { snprintf( buffer, capacity, "%5luMB", megs(value) ); } else if ( value < 8796093022208ull ) { snprintf( buffer, capacity, "%5luGB", gigs(value) ); } else { snprintf( buffer, capacity, "%5luTB", ters(value) ); } } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/README�������������������������������������������������0000644�0001750�0001750�00000000020�11757531137�021046� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#README FOR KEG ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/basic.hh�����������������������������������������������0000644�0001750�0001750�00000002213�11757531137�021576� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _PEGASUS_BASIC_HH #define _PEGASUS_BASIC_HH inline unsigned long int kils( unsigned long long int x ) { return (x >> 10); } inline unsigned long int megs( unsigned long long int x ) { return (x >> 20); } inline unsigned long int gigs( unsigned long long int x ) { return (x >> 30); } inline unsigned long int ters( unsigned long long int x ) { return (x >> 40); } #include <sys/types.h> extern void smart_units( char* buffer, size_t capacity, unsigned long long int value ); #endif // _PEGASUS_BASIC_HH �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/darwin.cc����������������������������������������������0000644�0001750�0001750�00000010420�11757531137�021766� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "darwin.hh" static const char* RCS_ID = "$Id: darwin.cc 2773 2010-11-09 01:50:17Z voeckler $"; #include <ctype.h> #include <stdio.h> #include <string.h> #include <unistd.h> #include <sys/param.h> #include <sys/ucred.h> #include <sys/mount.h> #include "basic.hh" #include <sys/sysctl.h> #include <mach/mach.h> #include <mach/host_info.h> #include <mach/vm_statistics.h> void pegasus_statfs( char* buffer, size_t capacity ) { struct statfs* mtab; int i, n = getmntinfo( &mtab, MNT_NOWAIT ); // possibly inaccurate char line[1024]; if ( n > 1 ) { for ( i=0; i<n; ++i ) { struct statfs* e = mtab + i; if ( mtab[i].f_mntfromname[0] == '/' && mtab[i].f_blocks > 0 ) { char total[16], avail[16]; unsigned long long size = mtab[i].f_bsize; smart_units( total, sizeof(total), (size * mtab[i].f_blocks) ); smart_units( avail, sizeof(avail), (size * mtab[i].f_bavail) ); snprintf( line, sizeof(line), "Filesystem Info: %-24s %s %s total, %s avail\n", mtab[i].f_mntonname, mtab[i].f_fstypename, total, avail ); strncat( buffer, line, capacity ); } } } } void pegasus_loadavg( char* buffer, size_t capacity ) { struct loadavg l; size_t len = sizeof(l); if ( sysctlbyname( "vm.loadavg", &l, &len, NULL, 0 ) == 0 ) { float load[3]; for ( int i=0; i<3; ++i ) load[i] = l.ldavg[i] / ((float) l.fscale); char line[128]; snprintf( line, sizeof(line), "Load Averages : %.3f %.3f %.3f\n", load[0], load[1], load[2] ); strncat( buffer, line, capacity ); } } void pegasus_meminfo( char* buffer, size_t capacity ) { char line[128]; unsigned long long pagesize = getpagesize(); vm_statistics_data_t vm; mach_msg_type_number_t ic = HOST_VM_INFO_COUNT; struct xsw_usage s; unsigned long long ram_total; size_t len = sizeof(ram_total); if ( sysctlbyname( "hw.memsize", &ram_total, &len, NULL, 0 ) == 0 ) { host_statistics( mach_host_self(), HOST_VM_INFO, (host_info_t) &vm, &ic ); unsigned long ram_avail = megs( pagesize * vm.free_count ); unsigned long ram_active = megs( pagesize * vm.active_count ); unsigned long ram_inactive = megs( pagesize * vm.inactive_count ); unsigned long ram_wired = megs( pagesize * vm.wire_count ); snprintf( line, sizeof(line), "Memory Usage MB: %lu total, %lu avail, %lu active, %lu inactive, %lu wired\n", megs(ram_total), ram_avail, ram_active, ram_inactive, ram_wired ); strncat( buffer, line, capacity ); } len = sizeof(s); if ( sysctlbyname( "vm.swapusage", &s, &len, NULL, 0 ) == 0 ) { unsigned long swap_total = megs( s.xsu_total ); unsigned long swap_avail = megs( s.xsu_avail ); snprintf( line, sizeof(line), "Swap Usage MB: %lu total, %lu free\n", swap_total, swap_avail ); strncat( buffer, line, capacity ); } } void pegasus_cpuinfo( char* buffer, size_t capacity ) { int i; size_t len; unsigned long freq; char model[128]; unsigned short cpu_online = 0; len = sizeof(i); if ( sysctlbyname( "hw.activecpu", &i, &len, NULL, 0 ) == 0 ) cpu_online = i; char cpu_model[80]; len = sizeof(model); if ( sysctlbyname( "machdep.cpu.brand_string", model, &len, NULL, 0 ) == 0 ) { char* s = model; char* d = cpu_model; while ( *s && d - cpu_model < sizeof(cpu_model) ) { while ( *s && ! isspace(*s) ) *d++ = *s++; if ( *s && *s == ' ' ) *d++ = *s++; while ( *s && isspace(*s) ) ++s; } *d = 0; } else { memset( cpu_model, 0, sizeof(cpu_model) ); } char line[256]; snprintf( line, sizeof(line), "Processor Info.: %hu x %s\n", cpu_online, cpu_model ); strncat( buffer, line, capacity ); } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/linux.hh�����������������������������������������������0000644�0001750�0001750�00000001736�11757531137�021665� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _PEGASUS_LINUX_HH #define _PEGASUS_LINUX_HH #include <sys/types.h> extern void pegasus_statfs( char* buffer, size_t capacity ); extern void pegasus_loadavg( char* buffer, size_t capacity ); extern void pegasus_meminfo( char* buffer, size_t capacity ); extern void pegasus_cpuinfo( char* buffer, size_t capacity ); #endif // _PEGASUS_LINUX_HH ����������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/sunos-swap.cc������������������������������������������0000644�0001750�0001750�00000004150�11757531137�022624� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <sys/types.h> #include <sys/stat.h> #include <sys/swap.h> #include <memory.h> #include <string.h> #include <stdlib.h> #include <unistd.h> #include "sunos-swap.hh" static const char* RCS_ID = "$Id: sunos-swap.cc 2742 2010-11-01 18:55:50Z voeckler $"; int gather_sunos_swap( unsigned long long* total, unsigned long long* free ) /* purpose: collect swap information from solaris * warning: This compilation unit MUST be compiled WITHOUT LFS support! * paramtr: total (OUT): total size of all swapping * free (OUT): free size of all swapping */ { unsigned long long pagesize = getpagesize(); int num = swapctl( SC_GETNSWP, 0 ); *total = *free = 0ull; if ( num > 0 ) { size_t size = (num+1) * sizeof(swapent_t) + sizeof(swaptbl_t); swaptbl_t* table = static_cast<swaptbl_t*>( ::malloc(size) ); char dummy[80]; int i; // we don't care for the path, so init all to the same memset( table, 0, size ); for ( i=0; i<num+1; ++i ) table->swt_ent[i].ste_path = dummy; table->swt_n = num+1; if ( swapctl( SC_LIST, table ) > 0 ) { for ( i=0; i<num; ++i ) { // only pages that are not in the process of being deleted if ( (table->swt_ent[i].ste_flags & ( ST_INDEL | ST_DOINGDEL) ) == 0 ) { *total += ( table->swt_ent[i].ste_pages * pagesize ); *free += ( table->swt_ent[i].ste_free * pagesize ); } } } ::free( static_cast<void*>(table) ); return num; } else { return 0; } } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/make.SunOS���������������������������������������������0000755�0001750�0001750�00000000322�11757531137�022044� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh gmake distclean gmake EXTRACXXFLAGS='$(V7FLAGS)' pegasus-keg mv pegasus-keg pegasus-keg.sparcv7 gmake clean gmake EXTRACXXFLAGS='$(V9FLAGS)' pegasus-keg mv pegasus-keg pegasus-keg.sparcv9 gmake clean ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/sunos.hh�����������������������������������������������0000644�0001750�0001750�00000001736�11757531137�021675� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _PEGASUS_SUNOS_HH #define _PEGASUS_SUNOS_HH #include <sys/types.h> extern void pegasus_statfs( char* buffer, size_t capacity ); extern void pegasus_loadavg( char* buffer, size_t capacity ); extern void pegasus_meminfo( char* buffer, size_t capacity ); extern void pegasus_cpuinfo( char* buffer, size_t capacity ); #endif // _PEGASUS_SUNOS_HH ����������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/Makefile�����������������������������������������������0000644�0001750�0001750�00000012101�11757531137�021631� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # Makefile # INSTALL = install STRIP = strip RM = rm -f CXX = g++ -ffor-scope CXXFLAGS = -O LD = $(CXX) LOADLIBES = -lm LOADLIBES_VANILLA = LOADLIBES_CONDOR = SYSTEM = $(shell uname -s | tr '[a-z]' '[A-Z]' | tr -d '_ -/') VERSION = $(shell uname -r) MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') MAJOR = $(firstword $(subst ., ,$(VERSION))) CONDOR = condor_compile SOCKIO = $(shell /bin/ls /usr/include/sys/sockio.h 2>/dev/null) CONDOR_LOCATION = $(shell condor_config_val RELEASE_DIR 2>/dev/null) EXTRA_OBJ = basic.o EXTRA_INC = LFS_CFLAGS = -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE LFS_LDFLAGS = ifndef ${prefix} prefix = $(PEGASUS_HOME) endif ifndef ${datadir} datadir = $(prefix)/share endif ifeq (DARWIN,${SYSTEM}) #SDK=/Developer/SDKs/MacOSX10.6.sdk SDK=$(lastword $(sort $(wildcard /Developer/SDKs/MacOSX10.*.sdk))) ifneq (,${SDK}) CXXFLAGS = -g -pipe -arch x86_64 -arch i686 -nostdinc \ -B${SDK}/usr/include/gcc -B${SDK}/usr/lib/gcc \ -isystem${SDK}/usr/include -F${SDK}/System/Library/Frameworks LDFLAGS = -arch x86_64 -arch i686 -Wl,-syslibroot,${SDK} endif EXTRA_OBJ += darwin.o EXTRA_INC = darwin.hh CXXFLAGS += -DMACHINE_SPECIFIC=darwin endif ifeq (SUNOS,${SYSTEM}) ifeq (5,${MAJOR}) # use these for the SUN CC compiler # on Solaris use this link string with gcc # gcc -Wl,-Bstatic pegasus-keg.o -lstdc++ -lm -lnsl -lsocket -Wl,-Bdynamic -ldl -o pegasus-keg CXX = CC ## SPARCv7 LFS_CFLAGS = $(shell getconf LFS_CFLAGS 2>>/dev/null) LFS_LDFLAGS = $(shell getconf LFS_LDFLAGS 2>>/dev/null) V7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CXXFLAGS = '-library=%none,Cstd,Crun' -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACXXFLAGS) #EXTRACXXFLAGS = $(V7FLAGS) LD := $(CXX) $(EXTRACXXFLAGS) CXXFLAGS := -DSOLARIS $(CXXFLAGS) -xO4 -D__EXTENSIONS__=1 CXXFLAGS += -DMACHINE_SPECIFIC=sunos EXTRA_OBJ += sunos.o sunos-swap.o EXTRA_INC = sunos.hh LOADLIBES_VANILLA = -lkstat -lnsl -lsocket LOADLIBES_CONDOR = -lkstat -lnsl -lsocket INSTALL = /usr/ucb/install else # old Solaris 1 not supported! endif endif ifeq (IRIX64,${SYSTEM}) # The regular 64bit Irix stuff is just too slow, use n32! SYSTEM := IRIX endif ifeq (IRIX,${SYSTEM}) CXX = CC -n32 -mips3 -r4000 LD = $(CXX) OPT_NORM = -O3 -IPA -LNO:opt=1 endif ifeq (AIX,${SYSTEM}) CXX = xlC CC = xlc endif ifeq (LINUX,${SYSTEM}) ifeq (ia64,${MARCH}) CXXFLAGS = -Wall -O2 -DMARCH_IA64=1 -ggdb LD = $(CXX) else ifeq (x86_64,${MARCH}) CXXFLAGS = -Wall -O2 -m64 -ggdb LD = gcc -m64 else ifeq (armv7l, ${MARCH}) CXXFLAGS = -Wall -O2 -ggdb else CXXFLAGS = -Wall -O2 -march=i686 -ggdb #LDFLAGS += -static endif endif LOADLIBES_VANILLA = -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic LOADLIBES_CONDOR = -lstdc++ LD = gcc endif CXXFLAGS += -DMACHINE_SPECIFIC=linux EXTRA_OBJ += linux.o EXTRA_INC = linux.hh LFS_CFLAGS = $(shell getconf LFS_CFLAGS 2>>/dev/null) LFS_LDFLAGS = $(shell getconf LFS_LDFLAGS 2>>/dev/null) endif # # === [3] ======================================================= rules section # There is no need to change things below this line. CXXFLAGS += -D${SYSTEM} -DMAJOR=${MAJOR} # -DSOCKLEN=${SOCKLEN} ifneq (,${SOCKIO}) CXXFLAGS += -DHAS_SYS_SOCKIO=1 endif ##ifneq (,${STATVFS}) ##CXXFLAGS += -DHAS_SYS_STATVFS=1 ##endif CONDOR_LD = $(CONDOR) $(LD) CONDOR_CXX = $(CONDOR) $(CXX) SVNVERSION = $(shell svnversion . 2>> /dev/null) ifneq (,${SVNVERSION}) CXXFLAGS += -DHAS_SVNVERSION=\"${SVNVERSION}\" endif # add large file support ifneq (,${LFS_CFLAGS}) NOLFS_CFLAGS := $(CXXFLAGS) CXXFLAGS += $(LFS_CFLAGS) endif ifneq (,${LFS_LDFLAGS}) LDFLAGS += $(LFS_LDFLAGS) endif %.o : %.cc $(CXX) $(CXXFLAGS) $< -c -o $@ %.co : %.cc $(CONDOR_CXX) $(CXXFLAGS) $< -c -o $@ TARGET = pegasus-keg INST_TG = install-keg ifneq (,${CONDOR_LOCATION}) ifneq (ia64,${MARCH}) ifneq (x86_64,${MARCH}) ifneq (DARWIN,${SYSTEM}) TARGET += pegasus-keg.condor INST_TG += install-condor endif endif endif endif all : $(TARGET) EXTRA_COBJ = $(EXTRA_OBJ:.o=.co) pegasus-keg.o: pegasus-keg.cc $(EXTRA_INC) pegasus-keg: pegasus-keg.o $(EXTRA_OBJ) $(LD) $(LDFLAGS) $^ -o $@ $(LOADLIBES_VANILLA) $(LOADLIBES) pegasus-keg.condor: pegasus-keg.co $(EXTRA_COBJ) $(CONDOR_LD) $(LDFLAGS) $^ -o $@ $(LOADLIBES_CONDOR) $(LOADLIBES) pegasus-keg.co: pegasus-keg.cc $(EXTRA_INC) $(CONDOR_CXX) $(CXXFLAGS) $< -c -o $@ basic.o: basic.cc basic.hh basic.co: basic.cc basic.hh darwin.o: darwin.cc darwin.hh basic.hh darwin.co: darwin.cc darwin.hh basic.hh linux.o: linux.cc linux.hh basic.hh linux.co: linux.cc linux.hh basic.hh sunos.o: sunos.cc sunos.hh basic.hh sunos.co: sunos.cc sunos.hh basic.hh sunos-swap.o: sunos-swap.cc sunos-swap.hh $(CXX) $(NOLFS_CFLAGS) $< -c -o $@ sunos-swap.co: sunos-swap.cc sunos-swap.hh $(CONDOR_CXX) $(NOLFS_CFLAGS) $< -c -o $@ install-keg: pegasus-keg $(INSTALL) -m 0755 pegasus-keg $(prefix)/bin install-condor: pegasus-keg.condor $(INSTALL) -m 0755 pegasus-keg.condor $(prefix)/bin install: $(INST_TG) test check: pegasus-keg @./pegasus-keg -o /dev/fd/1 || echo "test failed" && echo "test ok" clean: $(RM) pegasus-keg.o pegasus-keg.co core core.* $(EXTRA_OBJ) $(EXTRA_COBJ) distclean: clean $(RM) pegasus-keg pegasus-keg.condor ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/date.txt�����������������������������������������������0000644�0001750�0001750�00000000043�11757531137�021651� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������2010-12-08T13:52:53,368212000-0800 ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-keg/sunos.cc�����������������������������������������������0000644�0001750�0001750�00000015537�11757531137�021667� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "sunos.hh" #include "sunos-swap.hh" static const char* RCS_ID = "$Id: sunos.cc 2773 2010-11-09 01:50:17Z voeckler $"; #include <stdio.h> #include <sys/vfstab.h> #include <sys/statvfs.h> #include <string.h> #include "basic.hh" #include <stdlib.h> #include <unistd.h> #include <sys/stat.h> #if defined(_LP64) || _FILE_OFFSET_BITS != 64 #include <sys/swap.h> #endif #include <sys/sysinfo.h> #include <sys/param.h> /* FSCALE */ #include <kstat.h> #include <sys/loadavg.h> void pegasus_statfs( char* buffer, size_t capacity ) { FILE* vfstab = fopen( VFSTAB, "r" ); if ( vfstab ) { struct vfstab mtab; char line[1024]; while ( getvfsent(vfstab,&mtab) == 0 ) { struct statvfs vfs; if ( mtab.vfs_special[0] == '/' && statvfs( mtab.vfs_mountp, &vfs ) != -1 ) { if ( vfs.f_bsize > 0 && vfs.f_blocks > 0 ) { char total[16], avail[16]; unsigned long long size = vfs.f_frsize; smart_units( total, sizeof(total), (size * vfs.f_blocks) ); smart_units( avail, sizeof(avail), (size * vfs.f_bavail) ); snprintf( line, sizeof(line), "Filesystem Info: %-24s %s %s total, %s avail\n", mtab.vfs_mountp, mtab.vfs_fstype, total, avail ); strncat( buffer, line, capacity ); } } } fclose(vfstab); } } static int load_flag = -2; static double load_avg[3]; void pegasus_loadavg( char* buffer, size_t capacity ) { if ( load_flag == -2 ) { load_flag = getloadavg( load_avg, 3 ); } if ( load_flag >= 0 ) { char line[128]; snprintf( line, sizeof(line), "Load Averages : %.3f %.3f %.3f\n", load_avg[0], load_avg[1], load_avg[2] ); strncat( buffer, line, capacity ); } } static int swap_flag = -1; static unsigned long long swap_total; static unsigned long long swap_free; static unsigned long long ram_total; static unsigned long long ram_free; void pegasus_meminfo( char* buffer, size_t capacity ) { if ( ram_total == 0 ) { unsigned long long pagesize = getpagesize(); long tmp; if ( (tmp=sysconf(_SC_PHYS_PAGES)) != -1 ) ram_total = (tmp * pagesize); if ( (tmp=sysconf(_SC_AVPHYS_PAGES)) != -1 ) ram_free = (tmp * pagesize); } if ( ram_total > 0 ) { char line[128]; snprintf( line, sizeof(line), "Memory Usage MB: %lu total, %lu free\n", megs(ram_total), megs(ram_free) ); strncat( buffer, line, capacity ); } if ( swap_flag == -1 ) { swap_flag = gather_sunos_swap( &swap_total, &swap_free ); } if ( swap_flag > 0 ) { char line[128]; snprintf( line, sizeof(line), "Swap Usage MB: %lu total, %lu free\n", megs(swap_total), megs(swap_free) ); strncat( buffer, line, capacity ); } } static void assign( void* dst, size_t size, kstat_named_t* knp ) { switch ( size ) { case 4: // 32 bit target switch ( knp->data_type ) { case KSTAT_DATA_INT32: case KSTAT_DATA_UINT32: *((uint32_t*) dst) = knp->value.ui32; break; case KSTAT_DATA_INT64: case KSTAT_DATA_UINT64: *((uint32_t*) dst) = (uint32_t) ( knp->value.ui64 & 0xFFFFFFFFFull ); break; } break; case 8: // 64 bit target switch ( knp->data_type ) { case KSTAT_DATA_INT32: *((int64_t*) dst) = knp->value.i32; break; case KSTAT_DATA_UINT32: *((uint64_t*) dst) = knp->value.ui32; break; case KSTAT_DATA_INT64: case KSTAT_DATA_UINT64: *((uint64_t*) dst) = knp->value.ui64; break; } break; } } struct SunOSMachineInfo { unsigned short cpu_count; unsigned short cpu_online; unsigned long megahertz; char brand_id[20]; char cpu_type[20]; char model_name[20]; }; static struct SunOSMachineInfo info; static int init_flag = -1; static int initMachine( struct SunOSMachineInfo* p ) { unsigned long long pagesize = getpagesize(); kstat_ctl_t* kc; int result = -1; memset( p, 0, sizeof(struct SunOSMachineInfo) ); /* access kernel statistics API * run /usr/sbin/kstat -p to see most of the things available. */ if ( (kc = kstat_open()) != NULL ) { kstat_t* ksp; size_t j; // iterate over kernel statistics chain, module by module for ( ksp = kc->kc_chain; ksp != NULL; ksp = ksp->ks_next ) { if ( strcmp( ksp->ks_module, "cpu_info" ) == 0 ) { kstat_read( kc, ksp, NULL ); /* * module == "cpu_info" */ p->cpu_count++; for ( j=0; j < ksp->ks_ndata; ++j ) { kstat_named_t* knp = ((kstat_named_t*) ksp->ks_data) + j; if ( strcmp( knp->name, "state" ) == 0 ) { if ( knp->data_type == KSTAT_DATA_CHAR && strcmp( knp->value.c, "on-line") == 0 || knp->data_type == KSTAT_DATA_STRING && strcmp( KSTAT_NAMED_STR_PTR(knp), "on-line" ) == 0 ) p->cpu_online++; } else if ( strcmp( knp->name, "clock_MHz" ) == 0 ) { assign( &p->megahertz, sizeof(p->megahertz), knp ); } else if ( strcmp( knp->name, "brand" ) == 0 ) { strncpy( p->brand_id, ( knp->data_type == KSTAT_DATA_STRING ? KSTAT_NAMED_STR_PTR(knp) : knp->value.c ), sizeof(p->brand_id) ); } else if ( strcmp( knp->name, "cpu_type" ) == 0 ) { strncpy( p->cpu_type, ( knp->data_type == KSTAT_DATA_STRING ? KSTAT_NAMED_STR_PTR(knp) : knp->value.c ), sizeof(p->cpu_type) ); } else if ( strcmp( knp->name, "implementation" ) == 0 ) { strncpy( p->model_name, ( knp->data_type == KSTAT_DATA_STRING ? KSTAT_NAMED_STR_PTR(knp) : knp->value.c ), sizeof(p->model_name) ); } } /* for j */ } /* module == "unix" */ } /* for */ kstat_close(kc); result = 0; } return result; } void pegasus_cpuinfo( char* buffer, size_t capacity ) { if ( init_flag == -1 ) init_flag = initMachine( &info ); if ( init_flag != -1 ) { char line[1024]; snprintf( line, sizeof(line), "Processor Info.: %d x %s [%s] @ %ld\n", info.cpu_online, info.model_name, info.cpu_type, info.megahertz ); strncat( buffer, line, capacity ); } } �����������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�017276� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/list.h������������������������������������������������������0000644�0001750�0001750�00000002312�11757531137�020410� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#ifndef WFPROF_LIST_H #define WFPROF_LIST_H #define LIST_PREPEND(head,add) \ do { \ (add)->next = head; \ if (head) { \ (add)->prev = (head)->prev; \ (head)->prev = (add); \ } else { \ (add)->prev = (add); \ } \ (head) = (add); \ } while (0) #define LIST_APPEND(head,add) \ do { \ if (head) { \ (add)->prev = (head)->prev; \ (head)->prev->next = (add); \ (head)->prev = (add); \ (add)->next = NULL; \ } else { \ (head)=(add); \ (head)->prev = (head); \ (head)->next = NULL; \ } \ } while (0) #define LIST_DELETE(head,del) \ do { \ if ((del)->prev == (del)) { \ (head)=NULL; \ } else if ((del)==(head)) { \ (del)->next->prev = (del)->prev; \ (head) = (del)->next; \ } else { \ (del)->prev->next = (del)->next; \ if ((del)->next) { \ (del)->next->prev = (del)->prev; \ } else { \ (head)->prev = (del)->prev; \ } \ } \ } while (0) #define LIST_FOREACH(head,el) \ for(el=head;el;el=el->next) #endif /* WFPROF_LIST_H */ ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/wfprof.c����������������������������������������������������0000644�0001750�0001750�00000020423�11757531137�020736� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * Copyright 2009 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* This is a kickstart-like wrapper that collects memory and CPU usage info * about a transformation and all of its child processes. Each child (and * grandchild) is traced using ptrace. When the child is about to exit the * tracing proces (this process) looks it up in the /proc file system and * determines: what the maximum virtual memory size was (vmpeak), what the * maximum physical memory size was (rsspeak), how much time the process * spent in the kernel (stime), how much time the process spent in user * mode (utime) and how much wall-clock time elapsed between when the * process was launched and when it exited (wtime). The data is written to * stderr (because kickstart writes everything to stdout) and the child is * allowed to exit. * * NOTE: * This wrapper won't work if the transformation requires any executable to * be notified when one of its children stops (i.e. some process needs to * wait() for a child to get a SIGSTOP and then deliver a SIGCONT). See also * the man page for ptrace(). */ #include <sys/ptrace.h> #include <sys/types.h> #include <sys/wait.h> #include <unistd.h> #include <sys/user.h> /* struct user_regs_struct */ #include <stdio.h> #include <stdlib.h> #include <string.h> #include <sys/time.h> /* Check kernel version */ #include <linux/version.h> #if LINUX_VERSION_CODE < KERNEL_VERSION(2,5,46) #error "Linux 2.5.46 or greater is required" #endif /* Prior to version 2.5 glibc did not have these */ #if __GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ < 5) /* 0x4200-0x4300 are reserved for architecture-independent additions. */ #define PTRACE_SETOPTIONS 0x4200 #define PTRACE_GETEVENTMSG 0x4201 #define PTRACE_GETSIGINFO 0x4202 #define PTRACE_SETSIGINFO 0x4203 #endif /* Prior to version 2.7 glibc did not have these */ #if __GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ < 7) /* options set using PTRACE_SETOPTIONS */ #define PTRACE_O_TRACESYSGOOD 0x00000001 #define PTRACE_O_TRACEFORK 0x00000002 #define PTRACE_O_TRACEVFORK 0x00000004 #define PTRACE_O_TRACECLONE 0x00000008 #define PTRACE_O_TRACEEXEC 0x00000010 #define PTRACE_O_TRACEVFORKDONE 0x00000020 #define PTRACE_O_TRACEEXIT 0x00000040 #define PTRACE_O_MASK 0x0000007f /* Wait extended result codes for the above trace options. */ #define PTRACE_EVENT_FORK 1 #define PTRACE_EVENT_VFORK 2 #define PTRACE_EVENT_CLONE 3 #define PTRACE_EVENT_EXEC 4 #define PTRACE_EVENT_VFORK_DONE 5 #define PTRACE_EVENT_EXIT 6 #endif #include "child.h" #include "syscall.h" char XFORM[BUFSIZ] = "none"; double get_time() { struct timeval tv; gettimeofday(&tv, NULL); return tv.tv_sec + ((double)tv.tv_usec / 1e6); } void print_header() { fprintf(stderr, "xform pid ppid exe lstart lstop tstart tstop vmpeak rsspeak utime stime wtime cutime cstime\n"); } void print_report(child_t *item) { /* Don't report threads */ if (item->tgid != item->pid) return; fprintf(stderr, "%s %d %d %s %d %d %lf %lf %d %d %lf %lf %lf %lf %lf\n", XFORM, item->pid, item->ppid, item->exe, item->lstart, item->lstop, item->tstart, item->tstop, item->vmpeak, item->rsspeak, item->utime, item->stime, item->tstop - item->tstart, item->cutime, item->cstime); } int main(int argc, char **argv) { pid_t cpid; child_t *child; int i, status, event, lclock; char *PEGASUS_HOME; char kickstart[BUFSIZ]; struct user_regs_struct regs; /* check for kickstart in local dir */ sprintf(kickstart, "./kickstart"); if (access(kickstart, X_OK) < 0) { /* check for PEGASUS_HOME env var */ PEGASUS_HOME = getenv("PEGASUS_HOME"); if (PEGASUS_HOME == NULL) { fprintf(stderr, "Please set PEGASUS_HOME\n"); exit(1); } /* check for kickstart in $PEGASUS_HOME/bin */ sprintf(kickstart, "%s/bin/kickstart", PEGASUS_HOME); if (access(kickstart, X_OK) < 0) { fprintf(stderr, "cannot execute kickstart: %s\n", kickstart); exit(1); } } /* Get transformation name if possible */ for (i=0; i<argc; i++) { if (strcmp(argv[i], "-n") == 0) { strcpy(XFORM, argv[i+1]); break; } } /* Fork kickstart */ cpid = fork(); if (cpid < 0) { perror("fork"); exit(1); } else if(cpid == 0) { if (ptrace(PTRACE_TRACEME, 0, NULL, NULL) < 0) { perror("PTRACE_TRACEME"); exit(1); } dup2(1, 2); /* redirect stderr to stdout */ argv[0] = "kickstart"; execv(kickstart, argv); _exit(0); } else { /* initialize logical clock */ lclock = 0; print_header(); while (1) { /* __WALL is needed so that we can wait on threads too */ cpid = waitpid(0, &status, __WALL); /* find the child */ child = find_child(cpid); /* if not found, then it is new, so add it */ if (child == NULL) { child = add_child(cpid); child->tstart = get_time(); child->lstart = lclock++; if (ptrace(PTRACE_SETOPTIONS, cpid, NULL, PTRACE_O_TRACESYSGOOD|PTRACE_O_TRACEEXIT| PTRACE_O_TRACEFORK|PTRACE_O_TRACEVFORK| PTRACE_O_TRACECLONE)) { perror("PTRACE_SETOPTIONS"); exit(1); } } /* child exited */ if (WIFEXITED(status)) { remove_child(cpid); if (no_children()) break; } /* child was stopped */ if (WIFSTOPPED(status)) { /* Because of a special event we wanted to see */ if(WSTOPSIG(status) == SIGTRAP) { event = status >> 16; if (event == PTRACE_EVENT_EXIT) { child->tstop = get_time(); child->lstop = lclock++; /* fill in exe name */ if (read_exeinfo(child) < 0) { perror("read_exeinfo"); exit(1); } /* fill in memory info */ if (read_meminfo(child) < 0) { perror("read_meminfo"); exit(1); } /* fill in stat info */ if (read_statinfo(child) < 0) { perror("read_statinfo"); exit(1); } /* print stats */ print_report(child); } if (ptrace(PTRACE_SYSCALL, cpid, NULL, NULL)) { perror("PTRACE_SYSCALL event"); exit(1); } } /* Because of a system call */ else if(WSTOPSIG(status) == (SIGTRAP|0x80)) { if (ptrace(PTRACE_GETREGS, cpid, NULL, ®s)) { perror("PTRACE_GETREGS"); exit(1); } if (child->insyscall) { child->sc_rval = SC_RVAL(regs); int (*handler)(child_t *c) = syscalls[child->sc_nr].handler; if (handler) handler(child); child->insyscall = 0; } else { child->sc_nr = SC_NR(regs); child->sc_args[0] = SC_ARG0(regs); child->sc_args[1] = SC_ARG1(regs); child->sc_args[2] = SC_ARG2(regs); child->sc_args[3] = SC_ARG3(regs); child->sc_args[4] = SC_ARG4(regs); child->sc_args[5] = SC_ARG5(regs); child->insyscall = 1; } if (ptrace(PTRACE_SYSCALL, cpid, NULL, NULL)) { perror("PTRACE_SYSCALL syscall"); exit(1); } } /* Because it got a signal */ else { /* pass the signal on to the child */ if (ptrace(PTRACE_SYSCALL, cpid, 0, WSTOPSIG(status))) { perror("PTRACE_SYSCALL signal"); exit(1); } } } } } return 0; } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/syscall_64.h������������������������������������������������0000644�0001750�0001750�00000025235�11757531137�021431� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������{"read", handle_read}, /*0*/ {"write", handle_write}, /*1*/ {"open", handle_open}, /*2*/ {"close", handle_close}, /*3*/ {"stat", handle_none}, /*4*/ {"fstat", handle_none}, /*5*/ {"lstat", handle_none}, /*6*/ {"poll", handle_none}, /*7*/ {"lseek", handle_none}, /*8*/ {"mmap", handle_none}, /*9*/ {"mprotect", handle_none}, /*10*/ {"munmap", handle_none}, /*11*/ {"brk", handle_none}, /*12*/ {"rt_sigaction", handle_none}, /*13*/ {"rt_sigprocmask", handle_none}, /*14*/ {"rt_sigreturn", handle_none}, /*15*/ {"ioctl", handle_none}, /*16*/ {"pread64", handle_none}, /*17*/ {"pwrite64", handle_none}, /*18*/ {"readv", handle_none}, /*19*/ {"writev", handle_none}, /*20*/ {"access", handle_none}, /*21*/ {"pipe", handle_none}, /*22*/ {"select", handle_none}, /*23*/ {"sched_yield", handle_none}, /*24*/ {"mremap", handle_none}, /*25*/ {"msync", handle_none}, /*26*/ {"mincore", handle_none}, /*27*/ {"madvise", handle_none}, /*28*/ {"shmget", handle_none}, /*29*/ {"shmat", handle_none}, /*30*/ {"shmctl", handle_none}, /*31*/ {"dup", handle_none}, /*32*/ {"dup2", handle_none}, /*33*/ {"pause", handle_none}, /*34*/ {"nanosleep", handle_none}, /*35*/ {"getitimer", handle_none}, /*36*/ {"alarm", handle_none}, /*37*/ {"setitimer", handle_none}, /*38*/ {"getpid", handle_none}, /*39*/ {"sendfile", handle_none}, /*40*/ {"socket", handle_none}, /*41*/ {"connect", handle_none}, /*42*/ {"accept", handle_none}, /*43*/ {"sendto", handle_none}, /*44*/ {"recvfrom", handle_none}, /*45*/ {"sendmsg", handle_none}, /*46*/ {"recvmsg", handle_none}, /*47*/ {"shutdown", handle_none}, /*48*/ {"bind", handle_none}, /*49*/ {"listen", handle_none}, /*50*/ {"getsockname", handle_none}, /*51*/ {"getpeername", handle_none}, /*52*/ {"socketpair", handle_none}, /*53*/ {"setsockopt", handle_none}, /*54*/ {"getsockopt", handle_none}, /*55*/ {"clone", handle_none}, /*56*/ {"fork", handle_none}, /*57*/ {"vfork", handle_none}, /*58*/ {"execve", handle_none}, /*59*/ {"exit", handle_none}, /*60*/ {"wait4", handle_none}, /*61*/ {"kill", handle_none}, /*62*/ {"uname", handle_none}, /*63*/ {"semget", handle_none}, /*64*/ {"semop", handle_none}, /*65*/ {"semctl", handle_none}, /*66*/ {"shmdt", handle_none}, /*67*/ {"msgget", handle_none}, /*68*/ {"msgsnd", handle_none}, /*69*/ {"msgrcv", handle_none}, /*70*/ {"msgctl", handle_none}, /*71*/ {"fcntl", handle_none}, /*72*/ {"flock", handle_none}, /*73*/ {"fsync", handle_none}, /*74*/ {"fdatasync", handle_none}, /*75*/ {"truncate", handle_none}, /*76*/ {"ftruncate", handle_none}, /*77*/ {"getdents", handle_none}, /*78*/ {"getcwd", handle_none}, /*79*/ {"chdir", handle_none}, /*80*/ {"fchdir", handle_none}, /*81*/ {"rename", handle_none}, /*82*/ {"mkdir", handle_none}, /*83*/ {"rmdir", handle_none}, /*84*/ {"creat", handle_none}, /*85*/ {"link", handle_none}, /*86*/ {"unlink", handle_none}, /*87*/ {"symlink", handle_none}, /*88*/ {"readlink", handle_none}, /*89*/ {"chmod", handle_none}, /*90*/ {"fchmod", handle_none}, /*91*/ {"chown", handle_none}, /*92*/ {"fchown", handle_none}, /*93*/ {"lchown", handle_none}, /*94*/ {"umask", handle_none}, /*95*/ {"gettimeofday", handle_none}, /*96*/ {"getrlimit", handle_none}, /*97*/ {"getrusage", handle_none}, /*98*/ {"sysinfo", handle_none}, /*99*/ {"times", handle_none}, /*100*/ {"ptrace", handle_none}, /*101*/ {"getuid", handle_none}, /*102*/ {"syslog", handle_none}, /*103*/ {"getgid", handle_none}, /*104*/ {"setuid", handle_none}, /*105*/ {"setgid", handle_none}, /*106*/ {"geteuid", handle_none}, /*107*/ {"getegid", handle_none}, /*108*/ {"setpgid", handle_none}, /*109*/ {"getppid", handle_none}, /*110*/ {"getpgrp", handle_none}, /*111*/ {"setsid", handle_none}, /*112*/ {"setreuid", handle_none}, /*113*/ {"setregid", handle_none}, /*114*/ {"getgroups", handle_none}, /*115*/ {"setgroups", handle_none}, /*116*/ {"setresuid", handle_none}, /*117*/ {"getresuid", handle_none}, /*118*/ {"setresgid", handle_none}, /*119*/ {"getresgid", handle_none}, /*120*/ {"getpgid", handle_none}, /*121*/ {"setfsuid", handle_none}, /*122*/ {"setfsgid", handle_none}, /*123*/ {"getsid", handle_none}, /*124*/ {"capget", handle_none}, /*125*/ {"capset", handle_none}, /*126*/ {"rt_sigpending", handle_none}, /*127*/ {"rt_sigtimedwait", handle_none}, /*128*/ {"rt_sigqueueinfo", handle_none}, /*129*/ {"rt_sigsuspend", handle_none}, /*130*/ {"sigaltstack", handle_none}, /*131*/ {"utime", handle_none}, /*132*/ {"mknod", handle_none}, /*133*/ {"uselib", handle_none}, /*134*/ {"personality", handle_none}, /*135*/ {"ustat", handle_none}, /*136*/ {"statfs", handle_none}, /*137*/ {"fstatfs", handle_none}, /*138*/ {"sysfs", handle_none}, /*139*/ {"getpriority", handle_none}, /*140*/ {"setpriority", handle_none}, /*141*/ {"sched_setparam", handle_none}, /*142*/ {"sched_getparam", handle_none}, /*143*/ {"sched_setscheduler", handle_none}, /*144*/ {"sched_getscheduler", handle_none}, /*145*/ {"sched_get_priority_max", handle_none}, /*146*/ {"sched_get_priority_min", handle_none}, /*147*/ {"sched_rr_get_interval", handle_none}, /*148*/ {"mlock", handle_none}, /*149*/ {"munlock", handle_none}, /*150*/ {"mlockall", handle_none}, /*151*/ {"munlockall", handle_none}, /*152*/ {"vhangup", handle_none}, /*153*/ {"modify_ldt", handle_none}, /*154*/ {"pivot_root", handle_none}, /*155*/ {"_sysctl", handle_none}, /*156*/ {"prctl", handle_none}, /*157*/ {"arch_prctl", handle_none}, /*158*/ {"adjtimex", handle_none}, /*159*/ {"setrlimit", handle_none}, /*160*/ {"chroot", handle_none}, /*161*/ {"sync", handle_none}, /*162*/ {"acct", handle_none}, /*163*/ {"settimeofday", handle_none}, /*164*/ {"mount", handle_none}, /*165*/ {"umount2", handle_none}, /*166*/ {"swapon", handle_none}, /*167*/ {"swapoff", handle_none}, /*168*/ {"reboot", handle_none}, /*169*/ {"sethostname", handle_none}, /*170*/ {"setdomainname", handle_none}, /*171*/ {"iopl", handle_none}, /*172*/ {"ioperm", handle_none}, /*173*/ {"create_module", handle_none}, /*174*/ {"init_module", handle_none}, /*175*/ {"delete_module", handle_none}, /*176*/ {"get_kernel_syms", handle_none}, /*177*/ {"query_module", handle_none}, /*178*/ {"quotactl", handle_none}, /*179*/ {"nfsservctl", handle_none}, /*180*/ {"getpmsg", handle_none}, /*181*/ {"putpmsg", handle_none}, /*182*/ {"afs_syscall", handle_none}, /*183*/ {"tuxcall", handle_none}, /*184*/ {"security", handle_none}, /*185*/ {"gettid", handle_none}, /*186*/ {"readahead", handle_none}, /*187*/ {"setxattr", handle_none}, /*188*/ {"lsetxattr", handle_none}, /*189*/ {"fsetxattr", handle_none}, /*190*/ {"getxattr", handle_none}, /*191*/ {"lgetxattr", handle_none}, /*192*/ {"fgetxattr", handle_none}, /*193*/ {"listxattr", handle_none}, /*194*/ {"llistxattr", handle_none}, /*195*/ {"flistxattr", handle_none}, /*196*/ {"removexattr", handle_none}, /*197*/ {"lremovexattr", handle_none}, /*198*/ {"fremovexattr", handle_none}, /*199*/ {"tkill", handle_none}, /*200*/ {"time", handle_none}, /*201*/ {"futex", handle_none}, /*202*/ {"sched_setaffinity", handle_none}, /*203*/ {"sched_getaffinity", handle_none}, /*204*/ {"set_thread_area", handle_none}, /*205*/ {"io_setup", handle_none}, /*206*/ {"io_destroy", handle_none}, /*207*/ {"io_getevents", handle_none}, /*208*/ {"io_submit", handle_none}, /*209*/ {"io_cancel", handle_none}, /*210*/ {"get_thread_area", handle_none}, /*211*/ {"lookup_dcookie", handle_none}, /*212*/ {"epoll_create", handle_none}, /*213*/ {"epoll_ctl_old", handle_none}, /*214*/ {"epoll_wait_old", handle_none}, /*215*/ {"remap_file_pages", handle_none}, /*216*/ {"getdents64", handle_none}, /*217*/ {"set_tid_address", handle_none}, /*218*/ {"restart_syscall", handle_none}, /*219*/ {"semtimedop", handle_none}, /*220*/ {"fadvise64", handle_none}, /*221*/ {"timer_create", handle_none}, /*222*/ {"timer_settime", handle_none}, /*223*/ {"timer_gettime", handle_none}, /*224*/ {"timer_getoverrun", handle_none}, /*225*/ {"timer_delete", handle_none}, /*226*/ {"clock_settime", handle_none}, /*227*/ {"clock_gettime", handle_none}, /*228*/ {"clock_getres", handle_none}, /*229*/ {"clock_nanosleep", handle_none}, /*230*/ {"exit_group", handle_none}, /*231*/ {"epoll_wait", handle_none}, /*232*/ {"epoll_ctl", handle_none}, /*233*/ {"tgkill", handle_none}, /*234*/ {"utimes", handle_none}, /*235*/ {"vserver", handle_none}, /*236*/ {"mbind", handle_none}, /*237*/ {"set_mempolicy", handle_none}, /*238*/ {"get_mempolicy", handle_none}, /*239*/ {"mq_open", handle_none}, /*240*/ {"mq_unlink", handle_none}, /*241*/ {"mq_timedsend", handle_none}, /*242*/ {"mq_timedreceive", handle_none}, /*243*/ {"mq_notify", handle_none}, /*244*/ {"mq_getsetattr", handle_none}, /*245*/ {"kexec_load", handle_none}, /*246*/ {"waitid", handle_none}, /*247*/ {"add_key", handle_none}, /*248*/ {"request_key", handle_none}, /*249*/ {"keyctl", handle_none}, /*250*/ {"ioprio_set", handle_none}, /*251*/ {"ioprio_get", handle_none}, /*252*/ {"inotify_init", handle_none}, /*253*/ {"inotify_add_watch", handle_none}, /*254*/ {"inotify_rm_watch", handle_none}, /*255*/ {"migrate_pages", handle_none}, /*256*/ {"openat", handle_none}, /*257*/ {"mkdirat", handle_none}, /*258*/ {"mknodat", handle_none}, /*259*/ {"fchownat", handle_none}, /*260*/ {"futimesat", handle_none}, /*261*/ {"newfstatat", handle_none}, /*262*/ {"unlinkat", handle_none}, /*263*/ {"renameat", handle_none}, /*264*/ {"linkat", handle_none}, /*265*/ {"symlinkat", handle_none}, /*266*/ {"readlinkat", handle_none}, /*267*/ {"fchmodat", handle_none}, /*268*/ {"faccessat", handle_none}, /*269*/ {"pselect6", handle_none}, /*270*/ {"ppoll", handle_none}, /*271*/ {"unshare", handle_none}, /*272*/ {"set_robust_list", handle_none}, /*273*/ {"get_robust_list", handle_none}, /*274*/ {"splice", handle_none}, /*275*/ {"tee", handle_none}, /*276*/ {"sync_file_range", handle_none}, /*277*/ {"vmsplice", handle_none}, /*278*/ {"move_pages", handle_none}, /*279*/ {"utimensat", handle_none}, /*280*/ {"epoll_pwait", handle_none}, /*281*/ {"signalfd", handle_none}, /*282*/ {"timerfd_create", handle_none}, /*283*/ {"eventfd", handle_none}, /*284*/ {"fallocate", handle_none}, /*285*/ {"timerfd_settime", handle_none}, /*286*/ {"timerfd_gettime", handle_none}, /*287*/ {"paccept", handle_none}, /*288*/ {"signalfd4", handle_none}, /*289*/ {"eventfd2", handle_none}, /*290*/ {"epoll_create1", handle_none}, /*291*/ {"dup3", handle_none}, /*292*/ {"pipe2", handle_none}, /*293*/ {"inotify_init1", handle_none}, /*294*/ �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/child.h�����������������������������������������������������0000644�0001750�0001750�00000002426�11757531137�020526� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#ifndef WFPROF_CHILD_H #define WFPROF_CHILD_H #include <sys/types.h> /* pid_t */ #define MAX_NAME 1024 #define SC_ARGS 6 typedef struct _child_t { pid_t pid; /* child's process ID */ pid_t ppid; /* parent process ID */ pid_t tgid; /* thread group ID */ char exe[MAX_NAME]; /* exe name */ int lstart; /* logical clock start time */ int lstop; /* logical clock stop time */ int vmpeak; /* peak virtual memory usage */ int rsspeak; /* peak physical memory usage */ double utime; /* time spent in user mode */ double stime; /* time spent in kernel mode */ double cutime; /* time waited-on children were in user mode */ double cstime; /* time waited-on children were in kernel mode */ double tstart; /* start time (seconds from epoch) */ double tstop; /* stop time (seconds from epoch) */ int insyscall; /* in a system call? */ int sc_nr; /* system call number */ long sc_args[SC_ARGS];/* system call arguments */ long sc_rval; /* system call return value */ struct _child_t *next; struct _child_t *prev; } child_t; child_t *find_child(pid_t pid); child_t *add_child(pid_t pid); void remove_child(pid_t pid); int no_children(); int read_exeinfo(child_t *c); int read_meminfo(child_t *c); int read_statinfo(child_t *c); #endif /* WFPROF_CHILD_H */ ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/��������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�020054� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/memstats������������������������������������������������0000755�0001750�0001750�00000007536�11757531137�021642� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python # # Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Parses the output of pprof and produces summary memory statistics.""" __author__ = "Gideon Juve <juve@usc.edu>" __all__ = [] __version__ = "1.0" import sys, os, re from stats import Variable from analysis import Analysis, sorteditems class Event: def __init__(self,op,time,rec): self.op = op self.time = time self.rec = rec class Executable: def __init__(self,xform,name): self.xform = xform self.name = name self.vmpeak = Variable() self.rsspeak = Variable() class Transformation: def __init__(self,name): self.name = name self.vmpeak = Variable() self.rsspeak = Variable() class MemAnalysis(Analysis): def __init__(self): self.exes = {} self.xforms = {} self.file_re = re.compile("\.err(\.[0-9]{3})?$") def print_stats(self): print ",,vmpeak,,,,,,rsspeak" print "transformation,executable,count,min,max,avg,stddev,sum,count,min,max,avg,stddev,sum" for exe in sorteditems(self.exes): print "%s,%s,%s,%s" % (exe.xform,exe.name,exe.vmpeak,exe.rsspeak) print "\n" print ",vmpeak,,,,,,rsspeak" print "transformation,count,min,max,avg,stddev,sum,count,min,max,avg,stddev,sum" for xform in sorteditems(self.xforms): print "%s,%s,%s" % (xform.name,xform.vmpeak,xform.rsspeak) def is_datafile(self, file): return self.file_re.search(file) is not None def process_datafile(self, file): f = open(file, 'r') line = f.readline() while line: if 'xform' in line: self.process_invocation(f) line = f.readline() f.close() def process_invocation(self, file): events = {} # Read lines line = file.readline() while line: # Skip lines if 'xform' in line: file.seek(len(line) * -1, os.SEEK_CUR) break if 'WARNING' in line: line = file.readline() continue if 'PTRACE_' in line: line = file.readline() continue #xform pid ppid exe lstart lstop tstart tstop vmpeak rsspeak utime stime wtime cutime cstime tok = line.split(' ') if len(tok) != 15: continue xfn = tok[0] exn = os.path.basename(tok[3]) start = int(tok[4]) stop = int(tok[5]) vmpeak = float(tok[8])/1024 rsspeak = float(tok[9])/1024 if xfn in events: tline = events[xfn] else: tline = [] events[xfn] = tline if xfn in self.xforms: xform = self.xforms[xfn] else: xform = Transformation(xfn) self.xforms[xfn] = xform id = xfn+"$"+exn if id in self.exes: exe = self.exes[id] else: exe = Executable(xfn,exn) self.exes[id] = exe tline.append(Event('add',start,(vmpeak,rsspeak))) tline.append(Event('sub',stop,(vmpeak,rsspeak))) exe.vmpeak.update(vmpeak) exe.rsspeak.update(rsspeak) line = file.readline() # Process events for each xform for xfn in events: tline = events[xfn] tline.sort(lambda x,y: x.time - y.time) vmpeak = 0 rsspeak = 0 maxpeak = 0 maxhwm = 0 for event in tline: if event.op == 'add': vmpeak = vmpeak + event.rec[0] rsspeak = rsspeak + event.rec[1] elif event.op == 'sub': vmpeak = vmpeak - event.rec[0] rsspeak = rsspeak - event.rec[1] maxpeak = max(maxpeak, vmpeak) maxhwm = max(maxhwm, rsspeak) xform = self.xforms[xfn] xform.vmpeak.update(maxpeak) xform.rsspeak.update(maxhwm) if __name__ == '__main__': MemAnalysis().analyze() ������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/analysis.py���������������������������������������������0000644�0001750�0001750�00000003421�11757531137�022241� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python # # Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = "Gideon Juve <juve@usc.edu>" __all__ = ["Analysis"] __version__ = "1.0" import sys, os, re def sorteditems(dict): keys = dict.keys() keys.sort() return map(dict.get, keys) class Analysis: def __init__(self): pass def print_stats(self): pass def process_datafile(self): pass def is_datafile(self, file): return True def process_file(self, file): if self.is_datafile(file): self.process_datafile(file) def process_dir(self, dir): for file in os.listdir(dir): path = os.path.join(dir, file) if os.path.isdir(path): self.process_dir(path) elif os.path.isfile(path): self.process_file(path) def process_arg(self, arg): if arg == '-h' or arg == '-help' or arg == '--help': print "Usage: %s [PATH...] [< LIST_OF_PATHS]" % sys.argv[0] sys.exit(1) elif os.path.isdir(arg): self.process_dir(arg) elif os.path.isfile(arg): self.process_file(arg) else: print "Unrecognized argument: %s" % arg sys.exit(1) def analyze(self): if len(sys.argv) == 1: for arg in sys.stdin.readlines(): self.process_arg(arg[:-1]) else: for arg in sys.argv[1:]: self.process_arg(arg) self.print_stats() �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/README��������������������������������������������������0000644�0001750�0001750�00000000104�11757531137�020717� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������These are the old versions for reference when creating the new one. ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/ioprof��������������������������������������������������0000755�0001750�0001750�00000042265�11757531137�021301� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python # # Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This program traces the I/O of an application and prints a summary of files accessed and various I/O operations. It wraps the application in strace and parses the strace output. You use it like this: $ ioprof ls -al That will trace all the I/O performed by the 'ls' command and print a summary of all the files opened and how much I/O was performed on each of them. This is meant as a replacement for kickstart in Pegasus so that we can collect I/O statistics of our workflow applications. As such it is a drop-in replacement for kickstart. You just set gridstart="/path/to/ioprof" in your site catalog instead of gridstart="/path/to/kickstart". As long as the job has PEGASUS_HOME in its environment ioprof will use kickstart to invoke strace so you will still get an invocation record. OUTPUT: Kickstart writes all of its output to stdout and ioprof writes all its output to stderr. ioprof output consists of a table with: XFORM - The name of the transformation (via Kickstart args) or 'None' PID - The process ID EXE - The executable name passed to execve() FNAME - The file name BREAD - Number of bytes read NREAD - Number of read()s performed BWRITE - Number of bytes written NWRITE - Number of write()s performed NSEEK - Number of lseek()s performed MODE - Mode passed to open() FLAGS - Flags passed to open() ERRORS: ioprof does not currently keep track of statistics on I/O errors, but this is planned for the future. Currently all failed I/O syscalls are ignored. SHARED LIBRARIES: ioprof does not report I/O performed on shared libraries. This includes anything in /lib, /lib64, /usr/lib, /usr/lib64, etc. It also tries to ignore files called 'lib*.so.*', python files (*.py, *.pyc), and files in /etc, /usr, /sys, /dev, and /proc. Note that it tries to track all open file descriptors, but it doesn't collect or report statistics on everything. STDIN/STDOUT/STDERR: Yes, it traces I/O on stdin/stdout/stderr. PIPES: Yes, it does trace I/O performed on pipes. SOCKETS: Yes, it traces I/O performed on sockets. FORK/CLONE/VFORK: Yes, it traces children and reports their I/O separately. However, it is not currently able to figure out exactly which descriptors are inherited by children. For example, it doesn't keep track of files opened with O_CLOEXEC, so it may think that a child has an open descriptor when it actually does not (this probably won't make a difference in practice because a child won't successfully read and write from/to a descriptor it doesn't have). THREADS: Yes, it can deal with threads on Linux created using clone. It assumes that if clone() is called with CLONE_FILES, then the child is a thread that shares its descriptors with its parent. Note that, although threads are technically different processes under Linux, their I/O is not reported separately by ioprof. """ __author__ = "Gideon Juve <juve@usc.edu>" __all__ = [] __version__ = "1.0" import sys if not hasattr(sys, 'version_info') or \ tuple(sys.version_info[:2]) < (2,4): sys.stderr.write('Please use Python 2.4 or later.\n') sys.exit(1) import os, re, signal, time from subprocess import Popen, call try: from uuid import uuid4 except: # use uuidgen if we can if os.path.isfile('/usr/bin/uuidgen'): import commands def uuid4(): return commands.getoutput('/usr/bin/uuidgen') else: raise # TODO Accept arguments on the command line or in an environment variable # TODO Optionally write report in XML # TODO Track error statistics (e.g. number of failed close, etc.) # TODO Make kickstart optional # TODO Make SYSCALLS table a dictionary and use it to map the regexes and parse_* functions # TODO Implement and test handlers for currently unhandled syscalls # TODO Add an argument to optionally ignore certain file patterns # TODO Add arguments to optionally ignore sockets, pipes, and stdio # TODO Add a feature so that if you CTRL^c ioprof prints out its current stats before exiting # TODO Keep track of O_CLOEXEC and its various aliases (fcntl may set this too) MODULE = sys.modules[__name__] # system calls to trace SYSCALLS = [ 'open', 'openat', 'creat', 'close', 'read', 'write', 'lseek', 'dup', 'dup2', 'vfork', 'clone', 'execve', 'pipe', 'fcntl', # only F_DUPFD 'fcntl64', # likewise 'socket', 'socketpair', 'accept', # creates a client fd 'bind', # just for addr info 'connect', # just for addr info 'recvfrom', 'recvmsg', # These could create descriptors but # are not handled yet because I haven't # seen them in anything #'pread', # not on 2.6.18 'pread64', #'pwrite', # not on 2.6.18 'pwrite64', 'readv', 'writev', 'fork', # aliased to clone on linux? 'signalfd', 'mq_open', 'eventfd', 'epoll_create' #'timerfd_create' ] # List of valid kickstart arguments # The flag indicates if they have a value KICKSTART_ARGS = { '-i':True, # asi '-o':True, # aso '-e':True, # ase '-l':True, # log '-n':True, # xid '-N':True, # did '-R':True, # res '-B':True, # sz '-L':True, # lbl '-T':True, # iso '-H':False, '-I':True, # fn '-V':False, '-X':False, '-w':True, # cwd '-W':True, # cwd '-S':True, # l=p '-s':True # l=p } # Files to ignore if they are opened by the app IGNORE = [ re.compile(r'^/lib(64)?/'), re.compile(r'^/usr/'), re.compile(r'^/sys/'), re.compile(r'^/proc/'), re.compile(r'^/etc/'), re.compile(r'^/dev/'), re.compile(r'^/selinux/'), re.compile(r'\.pyc?$'), re.compile(r'/lib[^/]+\.so'), re.compile(r'\.jar$') ] # Process map PIDS = {} class ParseError(Exception): pass # Stats about files class FileStat: def __init__(self,fname): self.fname = fname self.flags = None self.mode = None self.nread = 0 self.bread = 0 self.nwrite = 0 self.bwrite = 0 self.nseek = 0 # Stats about processes class PidStat: def __init__(self,pid): self.ppid = None self.pid = pid self.thread = False self.exe = None self.files = [] self.fds = {} # Is the fd open in the pid? def check_open(self,fd): if not fd in self.fds: raise ParseError('unknown fd %d for pid %d\n'%(fd,self.pid)) # Regular expressions to match each system call match_syscall = re.compile(r'^(\d+)\s+(\w+)\(.*\)\s+=\s+([-0-9?]+)') match_signal = re.compile(r'^(\d+)\s+---.*---') match_clone = re.compile(r'^\d+\s+clone\(.*, flags=(.+), .*\)\s+=\s+(-?\d+)') match_vfork = re.compile(r'^\d+\s+vfork\(.*\)\s+=\s+(-?\d+)') match_execve = re.compile(r'^\d+\s+execve\("(.+)".*\)\s+=\s+(-?\d+)') match_open = re.compile(r'^\d+\s+open\("(.+)", ([^,)]+),? ?(.+)?\)\s+=\s+(-?\d+)') match_openat = re.compile(r'^\d+\s+openat\(.+, "(.+)", ([^,)]+),? ?(.+)?\)\s+=\s+(-?\d+)') match_close = re.compile(r'^\d+\s+close\((\d+)\)\s+=\s+(-?\d+)') match_read = re.compile(r'^\d+\s+read\((\d+), ".*"\.?\.?\.?, \d+\)\s+=\s+(-?\d+)') match_write = re.compile(r'^\d+\s+write\((\d+), ".*"\.?\.?\.?, \d+\)\s+=\s+(-?\d+)') match_lseek = re.compile(r'^\d+\s+lseek\((\d+), (-?\d+), (\w+)\)\s+=\s+(-?\d+)') match_dup = re.compile(r'^\d+\s+dup\((\d+)\)\s+=\s+(-?\d+)') match_dup2 = re.compile(r'^\d+\s+dup2\((\d+), (\d+)\)\s+=\s+(-?\d+)') match_pipe = re.compile(r'^\d+\s+pipe\(\[(\d+), (\d+)\]\)\s+=\s+(-?\d+)') match_fcntl = re.compile(r'^\d+\s+fcntl\((\d+), ([^,]+)(, .+)?\)\s+=\s+(-?\d+)') match_fcntl64 = re.compile(r'^\d+\s+fcntl64\((\d+), ([^,]+)(, .+)?\)\s+=\s+(-?\d+)') match_socket = re.compile(r'^\d+\s+socket\([^\)]+\)\s+=\s+(-?\d+)') match_socketpair = re.compile(r'^\d+\s+socketpair\([^\[]+\[(\d+), (\d+)\]\)\s+=\s+(-?\d+)') match_writev = re.compile(r'^\d+\s+writev\((\d+),.*\)\s+=\s+(-?\d+)') match_readv = re.compile(r'^\d+\s+readv\((\d+),.*\)\s+=\s+(-?\d+)') match_accept = re.compile(r'^\d+\s+accept\(.*,\s({.*}),.*\)\s+=\s+(-?\d+)') match_bind = re.compile(r'^\d+\s+bind\((\d+),\s({.*}),.*\)\s+=\s+(-?\d+)') match_connect = re.compile(r'^\d+\s+connect\((\d+),\s({.*}),.*\)') match_recvmsg = re.compile(r'^\d+\s+recvmsg\((\d+),.*\)\s+=\s+(-?\d+)') match_recvfrom = re.compile(r'^\d+\s+recvfrom\((\d+),.*\)\s+=\s+(-?\d+)') def usage(): sys.stderr.write('Usage: %s [kickstart args] app [app args]\n' % sys.argv[0]) sys.exit(1) # Search PATH for an executable def which(exe): # Search PATH for exe PATH = os.getenv('PATH', '/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin') PATH = PATH.split(':') for d in PATH: abspath = os.path.join(d,exe) if os.path.isfile(abspath): return abspath return None # not found! # Create process if it doesn't exist def check_pid(pid): if not type(pid).__name__ == 'int': raise TypeError('pid must be an int') if not pid in PIDS: p = PidStat(pid) PIDS[pid] = p if len(PIDS) == 1: # Set up std* for first process stdin = FileStat('<stdin>') stdout = FileStat('<stdout>') stderr = FileStat('<stderr>') p.files = p.files + [stdin, stdout, stderr] p.fds[0] = stdin p.fds[1] = stdout p.fds[2] = stderr return PIDS[pid] # Return true if we should ignore this file def should_ignore(fname): for i in IGNORE: if i.search(fname): return True return False # Parse a system call def parse_syscall(call,proc,line): try: re = getattr(MODULE,'match_'+call) m = re.match(line) if not m: raise ParseError(call+':\n'+line) record = m.groups() parsefn = getattr(MODULE,'parse_'+call) apply(parsefn,[proc,record]) except AttributeError: raise ParseError('Unhandled system call: %s\n%s'%(call,line)) def parse_clone(p,rec): flags = rec[0] child = int(rec[1]) c = check_pid(child) c.ppid = p.pid c.exe = p.exe # In case no execve if 'CLONE_THREAD' in flags: # Child is a thread c.thread = True if 'CLONE_FILES' in flags: # Parent and child have same descriptor table c.fds = p.fds c.files = p.files else: # Child gets copy of descriptor table for fd in p.fds: if not fd in c.fds: fname = p.fds[fd].fname file = FileStat(fname) c.fds[fd] = file if should_ignore(fname): continue c.files.append(file) parse_vfork = parse_clone parse_fork = parse_clone def parse_execve(p,rec): exe = rec[0] if p.thread: raise ParseException('pid %d is a thread, but it did execve' % p.pid) p.exe = exe def parse_open(p,rec): fname = rec[0] flags = rec[1] mode = rec[2] fd = int(rec[3]) file = FileStat(fname) file.mode = mode file.flags = flags p.fds[fd] = file if should_ignore(fname): return p.files.append(file) parse_openat = parse_open def parse_close(p,rec): fd = int(rec[0]) if fd in p.fds: del p.fds[fd] else: # if you get this its probably because of some untraced # socket operation like recvmsg that I am not handling sys.stderr.write("WARNING: fd %d not open in pid %d\n"%(fd,p.pid)) def parse_dup(p,rec): old = int(rec[0]) new = int(rec[1]) p.check_open(old) p.fds[new] = p.fds[old] parse_dup2 = parse_dup def parse_read(p,rec): fd = int(rec[0]) read = int(rec[1]) p.check_open(fd) file = p.fds[fd] file.bread = file.bread + read file.nread = file.nread + 1 parse_readv = parse_read def parse_write(p,rec): fd = int(rec[0]) written = int(rec[1]) p.check_open(fd) file = p.fds[fd] file.bwrite = file.bwrite + written file.nwrite = file.nwrite + 1 parse_writev = parse_write def parse_lseek(p,rec): fd = int(rec[0]) offset = int(rec[1]) whence = rec[2] p.check_open(fd) file = p.fds[fd] file.nseek = file.nseek + 1 def parse_pipe(p,rec): read = int(rec[0]) write = int(rec[1]) readf = FileStat('<pipe>') p.fds[read] = readf p.files.append(readf) writef = FileStat('<pipe>') p.fds[write] = writef p.files.append(writef) def parse_fcntl(p,rec): fd = int(rec[0]) cmd = rec[1] newfd = int(rec[3]) # Only if its creating a new fd if 'F_DUPFD' in cmd: p.check_open(fd) p.fds[newfd] = p.fds[fd] parse_fcntl64 = parse_fcntl def parse_socket(p,rec): fd = int(rec[0]) sock = FileStat('<socket>') p.fds[fd] = sock p.files.append(sock) def parse_socketpair(p,rec): sv0 = int(rec[0]) sv1 = int(rec[1]) sock0 = FileStat('<socket>') sock1 = FileStat('<socket>') p.fds[sv0] = sock0 p.fds[sv1] = sock1 p.files = p.files + [sock0, sock1] def parse_bind(p,rec): fd = int(rec[0]) sock = p.fds[fd] sock.flags = rec[1] sock.mode = 'bind' def parse_connect(p,rec): fd = int(rec[0]) addr = rec[1] sock = p.fds[fd] sock.flags = addr sock.mode = 'connect' def parse_accept(p,rec): addr = rec[0] fd = int(rec[1]) sock = FileStat('<socket>') sock.flags = addr sock.mode = 'accept' p.fds[fd] = sock p.files.append(sock) def parse_recvmsg(p,rec): fd = int(rec[0]) bread = int(rec[1]) sock = p.fds[fd] sock.bread += bread sock.nread += 1 parse_recvfrom = parse_recvmsg def join_lines(unfinished,resumed): unfinished = unfinished[0:unfinished.find('<unfinished')] resumed = resumed[resumed.rfind('>')+1:] joined = " ".join((unfinished+resumed).split()) return joined.replace(' )',')').replace('( ','(') # Parse strace output from log file def parse_strace_output(log): for line in open(log): line = line[:-1] # Correct unfinished lines if '...>' in line: pid = int(line.split(None,1)[0]) p = check_pid(pid) p.unfinished = line continue if '<...' in line: pid = int(line.split(None,1)[0]) p = check_pid(pid) line = join_lines(p.unfinished,line) # Is line a system call? m = match_syscall.match(line) if m: pid = int(m.group(1)) call = m.group(2) ret = m.group(3) # skip unknown lines if '?' in ret: continue result = int(m.group(3)) # skip failed calls except connect # connect is a special case because if the socket is # set to non-blocking, then connect will return -1 with # errno = EINPROGRESS. So just ignore errors on connect. if result < 0 and not call == 'connect': continue p = check_pid(pid) parse_syscall(call,p,line) continue # Is line a signal? if match_signal.match(line): continue # Don't know what it is? log it! raise ParseError('Unrecognized strace line:\n'+line) def main(): # Look for kickstart in locak directory KICKSTART = './kickstart' if not (os.path.isfile(KICKSTART) and os.access(KICKSTART, os.X_OK)): # Check for PEGASUS_HOME PEGASUS_HOME = os.getenv('PEGASUS_HOME') if not PEGASUS_HOME: sys.stderr.write('Please set PEGASUS_HOME\n') sys.exit(1) # Look for kickstart in $PEGASUS_HOME/bin KICKSTART = os.path.join(PEGASUS_HOME,'bin','kickstart') if not (os.path.isfile(KICKSTART) and os.access(KICKSTART, os.X_OK)): sys.stderr.write('Kickstart not found: %s\n' % KICKSTART) sys.exit(1) # Check for strace STRACE = which('strace') if not STRACE: sys.stderr.write('strace not found: %s\n', STRACE) sys.exit(1) if not os.access(STRACE, os.X_OK): sys.stderr.write('strace is not executable: %s\n', STRACE) sys.exit(1) # Separate kickstart arguments from normal arguments XFORM = None args = sys.argv[1:] if len(args) == 0: usage() kargs = [] while len(args)>0: arg = args[0] if arg in KICKSTART_ARGS: # Get the transformation name if available if args[0] == '-n': XFORM = args[1] kargs.append(args.pop(0)) if KICKSTART_ARGS[arg]: kargs.append(args.pop(0)) else: break if len(args) == 0: usage() # Generate a random name for the fifo TMPDIR = os.getenv('TMPDIR','/tmp') fifo = os.path.join(TMPDIR,'strace-'+str(uuid4())) # Make a fifo for strace to write to os.mkfifo(fifo, 0600) # Construct complete argument string myargs = [KICKSTART] + kargs + \ [STRACE,'-o',fifo,'-f','-F','-s','-x','-e',','.join(SYSCALLS)] + args # Launch kickstart + strace + application p = Popen(myargs) # Make sure it didn't fail immediately time.sleep(0.5) if p.poll() is not None: os.unlink(fifo) sys.exit(p.wait()) try: try: # parse strace output parse_strace_output(fifo) except Exception: # Kill kickstart, no need to continue os.kill(p.pid, signal.SIGTERM) raise finally: # Remove the fifo os.unlink(fifo) # Wait for kickstart result = p.wait() # Print stats if result OK if result == 0: print >> sys.stderr,'xform','pid','exe','file','bread','nread','bwrite','nwrite','nseek','mode','flags' keys = PIDS.keys() keys.sort() for pid in keys: p = PIDS[pid] # Skip threads if p.thread: continue for file in p.files: print >> sys.stderr,XFORM,pid,p.exe,file.fname,file.bread,file.nread,file.bwrite,file.nwrite,file.nseek,file.mode,file.flags # Report the status from kickstart sys.exit(result) if __name__ == '__main__': main() �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/runstats������������������������������������������������0000755�0001750�0001750�00000006142�11757531137�021660� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python # # Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Parses the output of pprof and produces summary runtime statistics.""" __author__ = "Gideon Juve <juve@usc.edu>" __all__ = [] __version__ = "1.0" import sys, os, re from stats import Variable from analysis import Analysis, sorteditems class Executable: def __init__(self,xform,name): self.xform = xform self.name = name self.runtime = Variable() class Transformation: def __init__(self,name): self.name = name self.runtime = Variable() class RuntimeAnalysis(Analysis): def __init__(self): self.exes = {} self.xforms = {} self.file_re = re.compile("\.err(\.[0-9]{3})?$") def print_stats(self): print ",,runtime" print "transformation,executable,count,min,max,avg,stddev,sum" for exe in sorteditems(self.exes): print "%s,%s,%s" % (exe.xform,exe.name,exe.runtime) print "\n" print ",runtime" print "transformation,count,min,max,avg,stddev,sum" for xform in sorteditems(self.xforms): print "%s,%s" % (xform.name,xform.runtime) def is_datafile(self, file): return self.file_re.search(file) is not None def process_datafile(self,file): f = open(file,'r') line = f.readline() while line: if 'xform' in line: self.process_invocation(f) line = f.readline() f.close() def process_invocation(self,file): xfn = None xform_start = 1.0e100 xform_stop = 0.0 # Read lines line = file.readline() while line: # Skip lines if 'xform' in line: file.seek(len(line) * -1, os.SEEK_CUR) break if 'WARNING' in line: line = file.readline() continue if 'PTRACE_' in line: line = file.readline() continue #xform pid ppid exe lstart lstop tstart tstop vmpeak rsspeak utime stime wtime cutime cstime tok = line.split(' ') if len(tok) != 15: line = file.readline() continue if xfn is not None: if xfn != tok[0]: raise Exception("Transformation changed") else: xfn = tok[0] exn = os.path.basename(tok[3]) exe_start = float(tok[6]) exe_stop = float(tok[7]) id = xfn+"$"+exn if id in self.exes: exe = self.exes[id] else: exe = Executable(xfn,exn) self.exes[id] = exe exe.runtime.update(exe_stop - exe_start) xform_start = min(exe_start, xform_start) xform_stop = max(exe_stop, xform_stop) line = file.readline() #end of loop if xfn in self.xforms: xform = self.xforms[xfn] else: xform = Transformation(xfn) self.xforms[xfn] = xform xform.runtime.update(xform_stop - xform_start) if __name__ == '__main__': RuntimeAnalysis().analyze() ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/iostats�������������������������������������������������0000755�0001750�0001750�00000010060�11757531137�021455� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python # # Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use self file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Parses the output of ioprof and produces summary IO statistics.""" __author__ = "Gideon Juve <juve@usc.edu>" __all__ = [] __version__ = "1.0" import sys, os, re from stats import Variable from analysis import Analysis, sorteditems class Process: def __init__(self,exe,pid): self.exe = exe self.pid = pid self.mread = 0.0 self.mwrite = 0.0 class Executable: def __init__(self,xform,name): self.xform = xform self.name = name self.mread = Variable() self.mwrite = Variable() class Transformation: def __init__(self,name): self.name = name self.mread = Variable() self.mwrite = Variable() class IOAnalysis(Analysis): def __init__(self): self.exes = {} self.xforms = {} self.file_re = re.compile("\.err(\.[0-9]{3})?$") def print_stats(self): print ",,mread,,,,,,mwrite" print "transformation,executable,count,min,max,avg,stddev,sum,count,min,max,avg,stddev,sum" for exe in sorteditems(self.exes): print "%s,%s,%s,%s" % (exe.xform,exe.name,exe.mread,exe.mwrite) print "\n" print ",mread,,,,,,mwrite" print "transformation,count,min,max,avg,stddev,sum,count,min,max,avg,stddev,sum" for xform in sorteditems(self.xforms): print "%s,%s,%s" % (xform.name,xform.mread,xform.mwrite) def update_xform(self,xform_name,mread,mwrite): # Update transformation stats if xform_name in self.xforms: xform = self.xforms[xform_name] else: xform = Transformation(xform_name) self.xforms[xform_name] = xform xform.mread.update(mread) xform.mwrite.update(mwrite) def update_exe(self,xform_name,exe_name,mread,mwrite): id = xform_name+"$"+exe_name if id in self.exes: exe = self.exes[id] else: exe = Executable(xform_name,exe_name) self.exes[id] = exe exe.mread.update(mread) exe.mwrite.update(mwrite) def is_datafile(self, file): return self.file_re.search(file) is not None def process_datafile(self,file): f = open(file,'r') line = f.readline() while line: if 'xform' in line: self.process_transformation(f) line = f.readline() f.close() def process_transformation(self,file): xform_name = None xform_mread = 0.0 xform_mwrite = 0.0 lpid = None line = file.readline() while line: # New transformation encountered if 'xform' in line: file.seek(len(line) * -1, os.SEEK_CUR) break # Skip non-data lines if 'WARNING' in line: line = file.readline() continue #xform pid exe file bread nread bwrite nwrite nseek mode flags tok = line.split(' ',9) if len(tok) != 10: continue xform = tok[0] pid = tok[1] exe = os.path.basename(tok[2]) fname = tok[3] mread = float(tok[4])/(1024*1024) mwrite = float(tok[6])/(1024*1024) # if new process was encountered if pid != lpid: if lpid is not None: self.update_exe(xform_name,exe_name,exe_mread,exe_mwrite) # process the old one exe_mread = 0.0 exe_mwrite = 0.0 lpid = pid xform_name = xform exe_name = exe # Skip non-files if fname in ['<stdin>','<stdout>','<stderr>','<socket>','<pipe>']: line = file.readline() continue # add up everything exe_mread += mread exe_mwrite += mwrite xform_mread += mread xform_mwrite += mwrite line = file.readline() #end of loop # process last exe if lpid is not None: self.update_exe(xform_name,exe_name,exe_mread,exe_mwrite) # process xform self.update_xform(xform_name,xform_mread,xform_mwrite) if __name__ == '__main__': IOAnalysis().analyze() ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/pprof.c�������������������������������������������������0000644�0001750�0001750�00000024750�11757531137�021346� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * Copyright 2009 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* This is a kickstart-like wrapper that collects memory and CPU usage info * about a transformation and all of its child processes. Each child (and * grandchild) is traced using ptrace. When the child is about to exit the * tracing proces (this process) looks it up in the /proc file system and * determines: what the maximum virtual memory size was (vmpeak), what the * maximum physical memory size was (rsspeak), how much time the process * spent in the kernel (stime), how much time the process spent in user * mode (utime) and how much wall-clock time elapsed between when the * process was launched and when it exited (wtime). The data is written to * stderr (because kickstart writes everything to stdout) and the child is * allowed to exit. * * NOTE: * This wrapper won't work if the transformation requires any executable to * be notified when one of its children stops (i.e. some process needs to * wait() for a child to get a SIGSTOP and then deliver a SIGCONT). See also * the man page for ptrace(). */ #include <sys/ptrace.h> #include <sys/wait.h> #include <unistd.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <sys/time.h> /* Check kernel version */ #include <linux/version.h> #if LINUX_VERSION_CODE < KERNEL_VERSION(2,5,46) #error "Linux 2.5.46 or greater is required" #endif /* Prior to version 2.5 glibc did not have these */ #if __GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ < 5) /* 0x4200-0x4300 are reserved for architecture-independent additions. */ #define PTRACE_SETOPTIONS 0x4200 #define PTRACE_GETEVENTMSG 0x4201 #define PTRACE_GETSIGINFO 0x4202 #define PTRACE_SETSIGINFO 0x4203 #endif /* Prior to version 2.7 glibc did not have these */ #if __GLIBC__ < 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ < 7) /* options set using PTRACE_SETOPTIONS */ #define PTRACE_O_TRACESYSGOOD 0x00000001 #define PTRACE_O_TRACEFORK 0x00000002 #define PTRACE_O_TRACEVFORK 0x00000004 #define PTRACE_O_TRACECLONE 0x00000008 #define PTRACE_O_TRACEEXEC 0x00000010 #define PTRACE_O_TRACEVFORKDONE 0x00000020 #define PTRACE_O_TRACEEXIT 0x00000040 #define PTRACE_O_MASK 0x0000007f /* Wait extended result codes for the above trace options. */ #define PTRACE_EVENT_FORK 1 #define PTRACE_EVENT_VFORK 2 #define PTRACE_EVENT_CLONE 3 #define PTRACE_EVENT_EXEC 4 #define PTRACE_EVENT_VFORK_DONE 5 #define PTRACE_EVENT_EXIT 6 #endif char XFORM[BUFSIZ] = "none"; typedef struct _pidlist_t { int pid; int ppid; int tgid; char exe[BUFSIZ]; int lstart; /* logical clock start time */ int lstop; /* logical clock stop time */ int vmpeak; /* peak virtuam memory size */ int rsspeak; /* peak physical memory usage */ double utime; /* time spent in user mode */ double stime; /* time spent in kernel mode */ double cutime; /* time waited on children were in user mode */ double cstime; /* time waited on children were in kernel mode */ double tstart; /* start time in seconds from epoch */ double tstop; /* stop time in seconds from epoch */ struct _pidlist_t *next; struct _pidlist_t *prev; } pidlist_t; pidlist_t *PIDS = NULL; pidlist_t *lookup(pid_t pid) { pidlist_t *cur; for (cur = PIDS; cur!=NULL; cur = cur->next) { if (cur->pid == pid) return cur; } return NULL; } pidlist_t *add_pid(pid_t pid) { pidlist_t *new; pidlist_t *cur; new = malloc(sizeof(pidlist_t)); new->pid = pid; new->next = NULL; new->prev = NULL; if (PIDS == NULL) { PIDS = new; } else { for (cur = PIDS; cur->next!=NULL; cur = cur->next); cur->next = new; new->prev = cur; } return new; } void remove_pid(pid_t pid) { pidlist_t *del; for (del = PIDS; del != NULL; del = del->next) { if (del->pid == pid) break; } if (del == NULL) return; if (del->prev != NULL) del->prev->next = del->next; if (del->next != NULL) del->next->prev = del->prev; if (del->next == NULL && del->prev == NULL) PIDS = NULL; free(del); } double get_time() { struct timeval tv; gettimeofday(&tv, NULL); return tv.tv_sec + ((double)tv.tv_usec / 1e6); } int read_exe(pidlist_t *item) { char link[BUFSIZ]; int size; sprintf(link,"/proc/%d/exe", item->pid); size = readlink(link,item->exe,BUFSIZ); if (size >= 0 && size < BUFSIZ) item->exe[size] = '\0'; return size; } int startswith(const char *line, const char *tok) { return strncmp(line, tok, strlen(tok)) == 0; } int read_meminfo(pidlist_t *item) { char statf[BUFSIZ], line[BUFSIZ]; FILE *f; sprintf(statf,"/proc/%d/status", item->pid); f = fopen(statf,"r"); while (fgets(line, BUFSIZ, f) != NULL) { if (startswith(line, "PPid")) { sscanf(line,"PPid:%d\n",&(item->ppid)); } else if (startswith(line, "Tgid")) { sscanf(line,"Tgid:%d\n",&(item->tgid)); } else if (startswith(line,"VmPeak")) { sscanf(line,"VmPeak:%d kB\n",&(item->vmpeak)); } else if (startswith(line,"VmHWM")) { sscanf(line,"VmHWM:%d kB\n",&(item->rsspeak)); } } if (ferror(f)) { fclose(f); return -1; } return fclose(f); } int read_statinfo(pidlist_t *item) { char statf[BUFSIZ]; FILE *f; unsigned long utime, stime; long cutime, cstime; long clocks; sprintf(statf,"/proc/%d/stat", item->pid); f = fopen(statf,"r"); fscanf(f, "%*d %*s %*c %*d %*d %*d %*d %*d %*u %*u %*u %*u %*u %lu %lu %ld %ld", &utime, &stime, &cutime, &cstime); /* Adjust by number of clock ticks per second */ clocks = sysconf(_SC_CLK_TCK); item->utime = ((double)utime) / clocks; item->stime = ((double)stime) / clocks; item->cutime = ((double)cutime) / clocks; item->cstime = ((double)cstime) / clocks; if (ferror(f)) { fclose(f); return -1; } return fclose(f); } void print_header() { fprintf(stderr, "xform pid ppid exe lstart lstop tstart tstop vmpeak rsspeak utime stime wtime cutime cstime\n"); } void print_report(pidlist_t *item) { /* Don't report threads */ if (item->tgid != item->pid) return; fprintf(stderr, "%s %d %d %s %d %d %lf %lf %d %d %lf %lf %lf %lf %lf\n", XFORM, item->pid, item->ppid, item->exe, item->lstart, item->lstop, item->tstart, item->tstop, item->vmpeak, item->rsspeak, item->utime, item->stime, item->tstop - item->tstart, item->cutime, item->cstime); } int main(int argc, char **argv) { pid_t cpid; pidlist_t *child; int i, status, event, lclock; char *PEGASUS_HOME; char kickstart[BUFSIZ]; /* check for kickstart in local dir */ sprintf(kickstart, "./kickstart"); if (access(kickstart, X_OK) < 0) { /* check for PEGASUS_HOME env var */ PEGASUS_HOME = getenv("PEGASUS_HOME"); if (PEGASUS_HOME == NULL) { fprintf(stderr, "Please set PEGASUS_HOME\n"); exit(1); } /* check for kickstart in $PEGASUS_HOME/bin */ sprintf(kickstart, "%s/bin/kickstart", PEGASUS_HOME); if (access(kickstart, X_OK) < 0) { fprintf(stderr, "cannot execute kickstart: %s\n", kickstart); exit(1); } } /* Get transformation name if possible */ for (i=0; i<argc; i++) { if (strcmp(argv[i], "-n") == 0) { strcpy(XFORM, argv[i+1]); break; } } /* Fork kickstart */ cpid = fork(); if (cpid < 0) { perror("fork"); exit(1); } else if(cpid == 0) { if (ptrace(PTRACE_TRACEME, 0, NULL, NULL) < 0) { perror("PTRACE_TRACEME"); exit(1); } dup2(1, 2); /* redirect stderr to stdout */ argv[0] = "kickstart"; execv(kickstart, argv); _exit(0); } else { /* initialize logical clock */ lclock = 0; print_header(); while (1) { /* __WALL is needed so that we can wait on threads too */ cpid = waitpid(0, &status, __WALL); /* find the child */ child = lookup(cpid); /* if not found, then it is new, so add it */ if (child == NULL) { child = add_pid(cpid); child->tstart = get_time(); child->lstart = lclock++; if (ptrace(PTRACE_SETOPTIONS, cpid, NULL, PTRACE_O_TRACEEXIT|PTRACE_O_TRACEFORK| PTRACE_O_TRACEVFORK|PTRACE_O_TRACECLONE)) { perror("PTRACE_SETOPTIONS"); exit(1); } } /* child exited */ if (WIFEXITED(status)) { remove_pid(cpid); if (PIDS == NULL) break; } /* child was stopped */ if (WIFSTOPPED(status)) { /* because of an event we wanted to see */ if(WSTOPSIG(status) == SIGTRAP) { event = status >> 16; if (event == PTRACE_EVENT_EXIT) { child->tstop = get_time(); child->lstop = lclock++; /* fill in exe name */ if (read_exe(child) < 0) { perror("read_exe"); exit(1); } /* fill in memory info */ if (read_meminfo(child) < 0) { perror("read_meminfo"); exit(1); } if (read_statinfo(child) < 0) { perror("read_statinfo"); exit(1); } /* print stats */ print_report(child); } /* tell child to continue */ if (ptrace(PTRACE_CONT, cpid, NULL, NULL)) { perror("PTRACE_CONT"); exit(1); } } /* because it got a signal */ else { /* pass the signal on to the child */ if (ptrace(PTRACE_CONT, cpid, 0, WSTOPSIG(status))) { perror("PTRACE_CONT"); exit(1); } } } } } return 0; } ������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/utilstats�����������������������������������������������0000755�0001750�0001750�00000006307�11757531137�022034� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python # # Copyright 2009 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Parses the output of pprof and produces summary utilization statistics.""" __author__ = "Gideon Juve <juve@usc.edu>" __all__ = [] __version__ = "1.0" import sys, os, re from stats import Variable from analysis import Analysis, sorteditems class Executable: def __init__(self,xform,name): self.xform = xform self.name = name self.util = Variable() class Transformation: def __init__(self,name): self.name = name self.util = Variable() class UtilizationAnalysis(Analysis): def __init__(self): self.exes = {} self.xforms = {} self.file_re = re.compile("\.err(\.[0-9]{3})?$") def print_stats(self): print ",,util" print "transformation,executable,count,min,max,avg,stddev,sum" for exe in sorteditems(self.exes): print "%s,%s,%s" % (exe.xform,exe.name,exe.util) print "\n" print ",util" print "transformation,count,min,max,avg,stddev,sum" for xform in sorteditems(self.xforms): print "%s,%s" % (xform.name,xform.util) def is_datafile(self, file): return self.file_re.search(file) is not None def process_datafile(self,file): f = open(file,'r') line = f.readline() while line: if 'xform' in line: self.process_invocation(f) line = f.readline() f.close() def process_invocation(self,file): xfn = None tstart = 1.0e100 tstop = 0.0 sum_utime = 0.0 sum_stime = 0.0 # Read lines line = file.readline() while line: # Skip lines if 'xform' in line: file.seek(len(line) * -1, os.SEEK_CUR) break if 'WARNING' in line: line = file.readline() continue if 'PTRACE_' in line: line = file.readline() continue #xform pid ppid exe lstart lstop tstart tstop vmpeak rsspeak utime stime wtime cutime cstime tok = line.split(' ') if len(tok) != 15: line = file.readline() continue if xfn is not None: if xfn != tok[0]: raise Exception("Transformation changed") else: xfn = tok[0] exn = os.path.basename(tok[3]) tstart = min(float(tok[6]), tstart) tstop = max(float(tok[7]), tstop) utime = float(tok[10]) stime = float(tok[11]) wtime = float(tok[12]) id = xfn+"$"+exn if id in self.exes: exe = self.exes[id] else: exe = Executable(xfn,exn) self.exes[id] = exe util = (utime + stime) / wtime exe.util.update(util) sum_utime += utime sum_stime += stime line = file.readline() #end of loop if xfn in self.xforms: xform = self.xforms[xfn] else: xform = Transformation(xfn) self.xforms[xfn] = xform xform.util.update((sum_utime+sum_stime)/(tstop-tstart)) if __name__ == '__main__': UtilizationAnalysis().analyze() �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/Makefile������������������������������������������������0000644�0001750�0001750�00000000144�11757531137�021503� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������CC = gcc CFLAGS = -Wall TARGET = pprof all: $(TARGET) pprof: pprof.o clean: rm -f *.o $(TARGET) ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/old/stats.py������������������������������������������������0000644�0001750�0001750�00000002656�11757531137�021565� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env python # # Copyright 2010 University Of Southern California # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """This module contains utilities for computing summary statistics""" __author__ = "Gideon Juve <juve@usc.edu>" __all__ = ["Variable"] __version__ = "1.0" from math import sqrt class Variable: def __init__(self): self.n = 0 self.max = 0 self.min = 1e100 self.mean = 0 self.M2 = 0 self.sum = 0 def update(self, x): self.min = min(self.min, x) self.max = max(self.max, x) self.n += 1 delta = x - self.mean self.mean = self.mean + (delta/self.n) self.M2 = self.M2 + (delta*(x-self.mean)) self.sum += x def stddev(self): if self.n <= 1: return 0.0 else: return sqrt(self.M2/(self.n - 1)) def variance(self): if self.n <= 1: return 0.0 else: return self.M2/(self.n - 1) def __str__(self): return "%d,%f,%f,%f,%f,%f" % (self.n, self.min, self.max, self.mean, self.stddev(), self.sum) ����������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/syscall_32.h������������������������������������������������0000644�0001750�0001750�00000027317�11757531137�021427� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������{"restart_syscall", handle_none}, /*0*/ {"exit", handle_none}, /*1*/ {"fork", handle_none}, /*2*/ {"read", handle_none}, /*3*/ {"write", handle_none}, /*4*/ {"open", handle_none}, /*5*/ {"close", handle_none}, /*6*/ {"waitpid", handle_none}, /*7*/ {"creat", handle_none}, /*8*/ {"link", handle_none}, /*9*/ {"unlink", handle_none}, /*10*/ {"execve", handle_none}, /*11*/ {"chdir", handle_none}, /*12*/ {"time", handle_none}, /*13*/ {"mknod", handle_none}, /*14*/ {"chmod", handle_none}, /*15*/ {"lchown", handle_none}, /*16*/ {"break", handle_none}, /*17*/ {"oldstat", handle_none}, /*18*/ {"lseek", handle_none}, /*19*/ {"getpid", handle_none}, /*20*/ {"mount", handle_none}, /*21*/ {"umount", handle_none}, /*22*/ {"setuid", handle_none}, /*23*/ {"getuid", handle_none}, /*24*/ {"stime", handle_none}, /*25*/ {"ptrace", handle_none}, /*26*/ {"alarm", handle_none}, /*27*/ {"oldfstat", handle_none}, /*28*/ {"pause", handle_none}, /*29*/ {"utime", handle_none}, /*30*/ {"stty", handle_none}, /*31*/ {"gtty", handle_none}, /*32*/ {"access", handle_none}, /*33*/ {"nice", handle_none}, /*34*/ {"ftime", handle_none}, /*35*/ {"sync", handle_none}, /*36*/ {"kill", handle_none}, /*37*/ {"rename", handle_none}, /*38*/ {"mkdir", handle_none}, /*39*/ {"rmdir", handle_none}, /*40*/ {"dup", handle_none}, /*41*/ {"pipe", handle_none}, /*42*/ {"times", handle_none}, /*43*/ {"prof", handle_none}, /*44*/ {"brk", handle_none}, /*45*/ {"setgid", handle_none}, /*46*/ {"getgid", handle_none}, /*47*/ {"signal", handle_none}, /*48*/ {"geteuid", handle_none}, /*49*/ {"getegid", handle_none}, /*50*/ {"acct", handle_none}, /*51*/ {"umount2", handle_none}, /*52*/ {"lock", handle_none}, /*53*/ {"ioctl", handle_none}, /*54*/ {"fcntl", handle_none}, /*55*/ {"mpx", handle_none}, /*56*/ {"setpgid", handle_none}, /*57*/ {"ulimit", handle_none}, /*58*/ {"oldolduname", handle_none}, /*59*/ {"umask", handle_none}, /*60*/ {"chroot", handle_none}, /*61*/ {"ustat", handle_none}, /*62*/ {"dup2", handle_none}, /*63*/ {"getppid", handle_none}, /*64*/ {"getpgrp", handle_none}, /*65*/ {"setsid", handle_none}, /*66*/ {"sigaction", handle_none}, /*67*/ {"sgetmask", handle_none}, /*68*/ {"ssetmask", handle_none}, /*69*/ {"setreuid", handle_none}, /*70*/ {"setregid", handle_none}, /*71*/ {"sigsuspend", handle_none}, /*72*/ {"sigpending", handle_none}, /*73*/ {"sethostname", handle_none}, /*74*/ {"setrlimit", handle_none}, /*75*/ {"getrlimit", handle_none}, /*76*/ {"getrusage", handle_none}, /*77*/ {"gettimeofday", handle_none}, /*78*/ {"settimeofday", handle_none}, /*79*/ {"getgroups", handle_none}, /*80*/ {"setgroups", handle_none}, /*81*/ {"select", handle_none}, /*82*/ {"symlink", handle_none}, /*83*/ {"oldlstat", handle_none}, /*84*/ {"readlink", handle_none}, /*85*/ {"uselib", handle_none}, /*86*/ {"swapon", handle_none}, /*87*/ {"reboot", handle_none}, /*88*/ {"readdir", handle_none}, /*89*/ {"mmap", handle_none}, /*90*/ {"munmap", handle_none}, /*91*/ {"truncate", handle_none}, /*92*/ {"ftruncate", handle_none}, /*93*/ {"fchmod", handle_none}, /*94*/ {"fchown", handle_none}, /*95*/ {"getpriority", handle_none}, /*96*/ {"setpriority", handle_none}, /*97*/ {"profil", handle_none}, /*98*/ {"statfs", handle_none}, /*99*/ {"fstatfs", handle_none}, /*100*/ {"ioperm", handle_none}, /*101*/ {"socketcall", handle_none}, /*102*/ {"syslog", handle_none}, /*103*/ {"setitimer", handle_none}, /*104*/ {"getitimer", handle_none}, /*105*/ {"stat", handle_none}, /*106*/ {"lstat", handle_none}, /*107*/ {"fstat", handle_none}, /*108*/ {"olduname", handle_none}, /*109*/ {"iopl", handle_none}, /*110*/ {"vhangup", handle_none}, /*111*/ {"idle", handle_none}, /*112*/ {"vm86old", handle_none}, /*113*/ {"wait4", handle_none}, /*114*/ {"swapoff", handle_none}, /*115*/ {"sysinfo", handle_none}, /*116*/ {"ipc", handle_none}, /*117*/ {"fsync", handle_none}, /*118*/ {"sigreturn", handle_none}, /*119*/ {"clone", handle_none}, /*120*/ {"setdomainname", handle_none}, /*121*/ {"uname", handle_none}, /*122*/ {"modify_ldt", handle_none}, /*123*/ {"adjtimex", handle_none}, /*124*/ {"mprotect", handle_none}, /*125*/ {"sigprocmask", handle_none}, /*126*/ {"create_module", handle_none}, /*127*/ {"init_module", handle_none}, /*128*/ {"delete_module", handle_none}, /*129*/ {"get_kernel_syms", handle_none}, /*130*/ {"quotactl", handle_none}, /*131*/ {"getpgid", handle_none}, /*132*/ {"fchdir", handle_none}, /*133*/ {"bdflush", handle_none}, /*134*/ {"sysfs", handle_none}, /*135*/ {"personality", handle_none}, /*136*/ {"afs_syscall", handle_none}, /*137*/ {"setfsuid", handle_none}, /*138*/ {"setfsgid", handle_none}, /*139*/ {"_llseek", handle_none}, /*140*/ {"getdents", handle_none}, /*141*/ {"_newselect", handle_none}, /*142*/ {"flock", handle_none}, /*143*/ {"msync", handle_none}, /*144*/ {"readv", handle_none}, /*145*/ {"writev", handle_none}, /*146*/ {"getsid", handle_none}, /*147*/ {"fdatasync", handle_none}, /*148*/ {"_sysctl", handle_none}, /*149*/ {"mlock", handle_none}, /*150*/ {"munlock", handle_none}, /*151*/ {"mlockall", handle_none}, /*152*/ {"munlockall", handle_none}, /*153*/ {"sched_setparam", handle_none}, /*154*/ {"sched_getparam", handle_none}, /*155*/ {"sched_setscheduler", handle_none}, /*156*/ {"sched_getscheduler", handle_none}, /*157*/ {"sched_yield", handle_none}, /*158*/ {"sched_get_priority_max", handle_none}, /*159*/ {"sched_get_priority_min", handle_none}, /*160*/ {"sched_rr_get_interval", handle_none}, /*161*/ {"nanosleep", handle_none}, /*162*/ {"mremap", handle_none}, /*163*/ {"setresuid", handle_none}, /*164*/ {"getresuid", handle_none}, /*165*/ {"vm86", handle_none}, /*166*/ {"query_module", handle_none}, /*167*/ {"poll", handle_none}, /*168*/ {"nfsservctl", handle_none}, /*169*/ {"setresgid", handle_none}, /*170*/ {"getresgid", handle_none}, /*171*/ {"prctl", handle_none}, /*172*/ {"rt_sigreturn", handle_none}, /*173*/ {"rt_sigaction", handle_none}, /*174*/ {"rt_sigprocmask", handle_none}, /*175*/ {"rt_sigpending", handle_none}, /*176*/ {"rt_sigtimedwait", handle_none}, /*177*/ {"rt_sigqueueinfo", handle_none}, /*178*/ {"rt_sigsuspend", handle_none}, /*179*/ {"pread64", handle_none}, /*180*/ {"pwrite64", handle_none}, /*181*/ {"chown", handle_none}, /*182*/ {"getcwd", handle_none}, /*183*/ {"capget", handle_none}, /*184*/ {"capset", handle_none}, /*185*/ {"sigaltstack", handle_none}, /*186*/ {"sendfile", handle_none}, /*187*/ {"getpmsg", handle_none}, /*188*/ {"putpmsg", handle_none}, /*189*/ {"vfork", handle_none}, /*190*/ {"ugetrlimit", handle_none}, /*191*/ {"mmap2", handle_none}, /*192*/ {"truncate64", handle_none}, /*193*/ {"ftruncate64", handle_none}, /*194*/ {"stat64", handle_none}, /*195*/ {"lstat64", handle_none}, /*196*/ {"fstat64", handle_none}, /*197*/ {"lchown32", handle_none}, /*198*/ {"getuid32", handle_none}, /*199*/ {"getgid32", handle_none}, /*200*/ {"geteuid32", handle_none}, /*201*/ {"getegid32", handle_none}, /*202*/ {"setreuid32", handle_none}, /*203*/ {"setregid32", handle_none}, /*204*/ {"getgroups32", handle_none}, /*205*/ {"setgroups32", handle_none}, /*206*/ {"fchown32", handle_none}, /*207*/ {"setresuid32", handle_none}, /*208*/ {"getresuid32", handle_none}, /*209*/ {"setresgid32", handle_none}, /*210*/ {"getresgid32", handle_none}, /*211*/ {"chown32", handle_none}, /*212*/ {"setuid32", handle_none}, /*213*/ {"setgid32", handle_none}, /*214*/ {"setfsuid32", handle_none}, /*215*/ {"setfsgid32", handle_none}, /*216*/ {"pivot_root", handle_none}, /*217*/ {"mincore", handle_none}, /*218*/ {"madvise", handle_none}, /*219*/ {"getdents64", handle_none}, /*220*/ {"fcntl64", handle_none}, /*221*/ {"SYS_222", handle_none}, /*222*/ {"SYS_223", handle_none}, /*223*/ {"gettid", handle_none}, /*224*/ {"readahead", handle_none}, /*225*/ {"setxattr", handle_none}, /*226*/ {"lsetxattr", handle_none}, /*227*/ {"fsetxattr", handle_none}, /*228*/ {"getxattr", handle_none}, /*229*/ {"lgetxattr", handle_none}, /*230*/ {"fgetxattr", handle_none}, /*231*/ {"listxattr", handle_none}, /*232*/ {"llistxattr", handle_none}, /*233*/ {"flistxattr", handle_none}, /*234*/ {"removexattr", handle_none}, /*235*/ {"lremovexattr", handle_none}, /*236*/ {"fremovexattr", handle_none}, /*237*/ {"tkill", handle_none}, /*238*/ {"sendfile64", handle_none}, /*239*/ {"futex", handle_none}, /*240*/ {"sched_setaffinity", handle_none}, /*241*/ {"sched_getaffinity", handle_none}, /*242*/ {"set_thread_area", handle_none}, /*243*/ {"get_thread_area", handle_none}, /*244*/ {"io_setup", handle_none}, /*245*/ {"io_destroy", handle_none}, /*246*/ {"io_getevents", handle_none}, /*247*/ {"io_submit", handle_none}, /*248*/ {"io_cancel", handle_none}, /*249*/ {"fadvise64", handle_none}, /*250*/ {"SYS_251", handle_none}, /*251*/ {"exit_group", handle_none}, /*252*/ {"lookup_dcookie", handle_none}, /*253*/ {"epoll_create", handle_none}, /*254*/ {"epoll_ctl", handle_none}, /*255*/ {"epoll_wait", handle_none}, /*256*/ {"remap_file_pages", handle_none}, /*257*/ {"set_tid_address", handle_none}, /*258*/ {"timer_create", handle_none}, /*259*/ {"timer_settime", handle_none}, /*260*/ {"timer_gettime", handle_none}, /*261*/ {"timer_getoverrun", handle_none}, /*262*/ {"timer_delete", handle_none}, /*263*/ {"clock_settime", handle_none}, /*264*/ {"clock_gettime", handle_none}, /*265*/ {"clock_getres", handle_none}, /*266*/ {"clock_nanosleep", handle_none}, /*267*/ {"statfs64", handle_none}, /*268*/ {"fstatfs64", handle_none}, /*269*/ {"tgkill", handle_none}, /*270*/ {"utimes", handle_none}, /*271*/ {"fadvise64_64", handle_none}, /*272*/ {"vserver", handle_none}, /*273*/ {"mbind", handle_none}, /*274*/ {"get_mempolicy", handle_none}, /*275*/ {"set_mempolicy", handle_none}, /*276*/ {"mq_open", handle_none}, /*277*/ {"mq_unlink", handle_none}, /*278*/ {"mq_timedsend", handle_none}, /*279*/ {"mq_timedreceive", handle_none}, /*280*/ {"mq_notify", handle_none}, /*281*/ {"mq_getsetattr", handle_none}, /*282*/ {"kexec_load", handle_none}, /*283*/ {"waitid", handle_none}, /*284*/ {"setaltroot", handle_none}, /*285*/ {"add_key", handle_none}, /*286*/ {"request_key", handle_none}, /*287*/ {"keyctl", handle_none}, /*288*/ {"ioprio_set", handle_none}, /*289*/ {"ioprio_get", handle_none}, /*290*/ {"inotify_init", handle_none}, /*291*/ {"inotify_add_watch", handle_none}, /*292*/ {"inotify_rm_watch", handle_none}, /*293*/ {"migrate_pages", handle_none}, /*294*/ {"openat", handle_none}, /*295*/ {"mkdirat", handle_none}, /*296*/ {"mknodat", handle_none}, /*297*/ {"fchownat", handle_none}, /*298*/ {"futimesat", handle_none}, /*299*/ {"fstatat64", handle_none}, /*300*/ {"unlinkat", handle_none}, /*301*/ {"renameat", handle_none}, /*302*/ {"linkat", handle_none}, /*303*/ {"symlinkat", handle_none}, /*304*/ {"readlinkat", handle_none}, /*305*/ {"fchmodat", handle_none}, /*306*/ {"faccessat", handle_none}, /*307*/ {"pselect6", handle_none}, /*308*/ {"ppoll", handle_none}, /*309*/ {"unshare", handle_none}, /*310*/ {"set_robust_list", handle_none}, /*311*/ {"get_robust_list", handle_none}, /*312*/ {"splice", handle_none}, /*313*/ {"sync_file_range", handle_none}, /*314*/ {"tee", handle_none}, /*315*/ {"vmsplice", handle_none}, /*316*/ {"move_pages", handle_none}, /*317*/ {"getcpu", handle_none}, /*318*/ {"epoll_pwait", handle_none}, /*319*/ {"utimensat", handle_none}, /*320*/ {"signalfd", handle_none}, /*321*/ {"timerfd_create", handle_none}, /*322*/ {"eventfd", handle_none}, /*323*/ {"fallocate", handle_none}, /*324*/ {"timerfd_settime", handle_none}, /*325*/ {"timerfd_gettime", handle_none}, /*326*/ {"signalfd4", handle_none}, /*327*/ {"eventfd2", handle_none}, /*328*/ {"epoll_create1", handle_none}, /*329*/ {"dup3", handle_none}, /*330*/ {"pipe2", handle_none}, /*331*/ {"inotify_init1", handle_none}, /*332*/ �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/syscall.c���������������������������������������������������0000644�0001750�0001750�00000001517�11757531137�021110� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdio.h> #include "syscall.h" /* On i386 we have to do something special because there * is only one system call, socketcall, for sockets. */ #ifdef __i386__ # include <linux/net.h> /* for SYS_SOCKET, etc subcalls */ #endif /* System call handler table */ const struct syscallent syscalls[] = { #ifdef __i386__ # include "syscall_32.h" #else # include "syscall_64.h" #endif }; int handle_open(child_t *c) { fprintf(stderr, "PID %d: open %ld\n", c->pid, c->sc_rval); return 0; } int handle_close(child_t *c) { fprintf(stderr, "PID %d: close %ld = %ld\n", c->pid, c->sc_args[0], c->sc_rval); return 0; } int handle_read(child_t *c) { fprintf(stderr, "PID %d: read %ld = %ld\n", c->pid, c->sc_args[0], c->sc_rval); return 0; } int handle_write(child_t *c) { fprintf(stderr, "write\n"); return 0; } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/Makefile����������������������������������������������������0000644�0001750�0001750�00000000171�11757531137�020725� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������CC = gcc CFLAGS = -Wall TARGET = wfprof all: $(TARGET) wfprof: wfprof.o syscall.o child.o clean: rm -f *.o $(TARGET) �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/child.c�����������������������������������������������������0000644�0001750�0001750�00000004502�11757531137�020516� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdlib.h> #include <stdio.h> #include <string.h> #include <unistd.h> #include "child.h" #include "list.h" static child_t *CHILDREN = NULL; child_t *find_child(pid_t pid) { child_t *cur; LIST_FOREACH(CHILDREN, cur) { if (cur->pid == pid) return cur; } return NULL; } child_t *add_child(pid_t pid) { child_t *new; new = malloc(sizeof(child_t)); new->pid = pid; new->ppid = 0; new->tgid = 0; new->insyscall = 0; new->sc_nr = -1; new->next = NULL; new->prev = NULL; LIST_APPEND(CHILDREN, new); return new; } void remove_child(pid_t pid) { child_t *del; del = find_child(pid); if (del == NULL) return; LIST_DELETE(CHILDREN, del); free(del); } int no_children() { return (CHILDREN == NULL) ? 1 : 0; } int read_exeinfo(child_t *c) { char link[128]; int size; sprintf(link, "/proc/%d/exe", c->pid); size = readlink(link, c->exe, MAX_NAME); if (size >= 0 && size < MAX_NAME) c->exe[size] = '\0'; return size; } int startswith(const char *line, const char *tok) { return strncmp(line, tok, strlen(tok)) == 0; } int read_meminfo(child_t *c) { char statf[128], line[BUFSIZ]; FILE *f; sprintf(statf, "/proc/%d/status", c->pid); f = fopen(statf, "r"); while (fgets(line, BUFSIZ, f) != NULL) { if (startswith(line, "PPid")) { sscanf(line,"PPid:%d\n",&(c->ppid)); } else if (startswith(line, "Tgid")) { sscanf(line,"Tgid:%d\n",&(c->tgid)); } else if (startswith(line,"VmPeak")) { sscanf(line,"VmPeak:%d kB\n",&(c->vmpeak)); } else if (startswith(line,"VmHWM")) { sscanf(line,"VmHWM:%d kB\n",&(c->rsspeak)); } } if (ferror(f)) { fclose(f); return -1; } return fclose(f); } int read_statinfo(child_t *c) { char statf[128]; FILE *f; unsigned long utime, stime; long cutime, cstime; long clocks; sprintf(statf,"/proc/%d/stat", c->pid); f = fopen(statf,"r"); fscanf(f, "%*d %*s %*c %*d %*d %*d %*d %*d %*u %*u %*u %*u %*u %lu %lu %ld %ld", &utime, &stime, &cutime, &cstime); /* Adjust by number of clock ticks per second */ clocks = sysconf(_SC_CLK_TCK); c->utime = ((double)utime) / clocks; c->stime = ((double)stime) / clocks; c->cutime = ((double)cutime) / clocks; c->cstime = ((double)cstime) / clocks; if (ferror(f)) { fclose(f); return -1; } return fclose(f); } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/wfprof/syscall.h���������������������������������������������������0000644�0001750�0001750�00000003572�11757531137�021120� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#ifndef WFPROF_SYSCALL_H #define WFPROF_SYSCALL_H #include "child.h" /* Macros to inspect user_regs_struct */ #ifdef __i386__ #define SC_NR(r) r.orig_eax #define SC_ARG0(r) r.ebx #define SC_ARG1(r) r.ecx #define SC_ARG2(r) r.edx #define SC_ARG3(r) r.esi #define SC_ARG4(r) r.edi #define SC_ARG5(r) r.ebp #define SC_RVAL(r) r.eax #else #define SC_NR(r) r.orig_rax #define SC_ARG0(r) r.rdi #define SC_ARG1(r) r.rsi #define SC_ARG2(r) r.rdx #define SC_ARG3(r) r.rcx #define SC_ARG4(r) r.r8 #define SC_ARG5(r) r.r9 #define SC_RVAL(r) r.rax #endif struct syscallent { const char *name; int (*handler)(child_t *c); }; extern const struct syscallent syscalls[]; #define handle_none 0 int handle_open(child_t *c); int handle_openat(child_t *c); int handle_creat(child_t *c); int handle_close(child_t *c); int handle_read(child_t *c); int handle_write(child_t *c); int handle_lseek(child_t *c); int handle_dup(child_t *c); int handle_dup2(child_t *c); int handle_pipe(child_t *c); int handle_fcntl(child_t *c); /* only F_DUPFD */ int handle_readv(child_t *c); int handle_writev(child_t *c); int handle_pread64(child_t *c); int handle_pwrite64(child_t *c); int handle_mq_open(child_t *c); int handle_sendfile(child_t *c); int handle_epoll_create(child_t *c); /* since linux 2.5.44 */ int handle_signalfd(child_t *c); /* since linux 2.6.22 */ int handle_eventfd(child_t *c); /* since linux 2.6.22 */ int handle_timerfd_create(child_t *c); /* since linux 2.6.25 */ #ifdef __i386__ int handle_socketcall(child_t *c); int handle__llseek(child_t *c); /* same as lseek64? */ int handle_pread(child_t *c); int handle_pwrite(child_t *c); int handle_fcntl64(child_t *c); int handle_sendfile64(child_t *c); #endif #ifdef __amd64__ int handle_socket(child_t *c); int handle_accept(child_t *c); int handle_socketpair(child_t *c); #endif #endif /* WFPROF_SYSCALL_H */ ��������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/���������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�021121� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/tools.c��������������������������������������������0000644�0001750�0001750�00000010030�11757531137�022407� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <errno.h> #include <math.h> #include <stdio.h> #include <string.h> #include <unistd.h> #include "tools.h" static const char* RCS_ID = "$Id: tools.c 4537 2011-09-27 17:02:53Z voeckler $"; ssize_t writen( int fd, const char* buffer, ssize_t n, unsigned restart ) /* purpose: write all n bytes in buffer, if possible at all * paramtr: fd (IN): filedescriptor open for writing * buffer (IN): bytes to write (must be at least n byte long) * n (IN): number of bytes to write * restart (IN): if true, try to restart write at max that often * returns: n, if everything was written, or * [0..n-1], if some bytes were written, but then failed, * < 0, if some error occurred. */ { int start = 0; while ( start < n ) { int size = write( fd, buffer+start, n-start ); if ( size < 0 ) { if ( restart && errno == EINTR ) { restart--; continue; } return size; } else { start += size; } } return n; } ssize_t showerr( const char* fmt, ... ) { char line[MAXSTR]; va_list ap; va_start( ap, fmt ); vsnprintf( line, sizeof(line), fmt, ap ); va_end(ap); /* (almost) atomic write */ return writen( STDOUT_FILENO, line, strlen(line), 3 ); } double timespec( struct timeval* tv ) { return ( tv->tv_sec + tv->tv_usec / 1E6 ); } double now( time_t* when ) /* purpose: obtains an UTC timestamp with microsecond resolution. * paramtr: when (opt. OUT): where to save integral seconds into. * returns: the timestamp, or -1.0 if it was completely impossible. */ { int timeout = 0; struct timeval t = { -1, 0 }; while ( gettimeofday( &t, NULL ) == -1 && timeout < 10 ) timeout++; if ( when != NULL ) *when = t.tv_sec; return timespec(&t); } char* isodate( time_t seconds, char* buffer, size_t size ) /* purpose: formats ISO 8601 timestamp into given buffer (simplified) * paramtr: seconds (IN): time stamp * buffer (OUT): where to put the results * size (IN): capacity of buffer * returns: pointer to start of buffer for convenience. */ { struct tm zulu = *gmtime(&seconds); struct tm local = *localtime(&seconds); zulu.tm_isdst = local.tm_isdst; { time_t distance = (seconds - mktime(&zulu)) / 60; int hours = distance / 60; int minutes = distance < 0 ? -distance % 60 : distance % 60; size_t len = strftime( buffer, size, "%Y-%m-%dT%H:%M:%S", &local ); snprintf( buffer+len, size-len, "%+03d:%02d", hours, minutes ); } return buffer; } char* iso2date( double seconds_wf, char* buffer, size_t size ) /* purpose: formats ISO 8601 timestamp into given buffer (simplified) * paramtr: seconds_wf (IN): time stamp with fractional seconds (millis) * buffer (OUT): where to put the results * size (IN): capacity of buffer * returns: pointer to start of buffer for convenience. */ { char millis[8]; double integral, fractional = modf(seconds_wf,&integral); time_t seconds = (time_t) integral; struct tm zulu = *gmtime(&seconds); struct tm local = *localtime(&seconds); zulu.tm_isdst = local.tm_isdst; snprintf( millis, sizeof(millis), "%.3f", fractional ); { time_t distance = (seconds - mktime(&zulu)) / 60; int hours = distance / 60; int minutes = distance < 0 ? -distance % 60 : distance % 60; size_t len = strftime( buffer, size, "%Y-%m-%dT%H:%M:%S", &local ); snprintf( buffer+len, size-len, "%s%+03d:%02d", millis+1, hours, minutes ); } return buffer; } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/statinfo.c�����������������������������������������0000644�0001750�0001750�00000006032�11757531137�023105� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2011 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <sys/types.h> #include <sys/stat.h> #include <errno.h> #include <paths.h> #include <string.h> #include <stdlib.h> #include <unistd.h> #include "statinfo.h" static const char* RCS_ID = "$Id: statinfo.c 4638 2011-11-07 20:18:54Z voeckler $"; int myaccess( const char* path ) /* purpose: check a given file for being accessible and executable * under the currently effective user and group id. * paramtr: path (IN): current path to check * returns: 0 if the file is accessible, -1 for not */ { /* sanity check */ if ( path && *path ) { struct stat st; if ( stat(path,&st) == 0 && S_ISREG(st.st_mode) ) { /* stat on file succeeded, and it is a regular file */ if ( ( st.st_uid == geteuid() && (S_IXUSR & st.st_mode) == S_IXUSR ) || ( st.st_gid == getegid() && (S_IXGRP & st.st_mode) == S_IXGRP ) || ( (S_IXOTH & st.st_mode) == S_IXOTH ) ) { /* all is well, app is executable and accessible */ return 0; } else { return -1; } } else { /* stat call failed, or file is not a regular file */ return -1; } } else { /* illegal filename string (empty or NULL) */ return -1; } } char* find_executable( const char* fn ) /* purpose: check the executable filename and correct it if necessary * paramtr: fn (IN): current knowledge of filename * returns: newly allocated fqpn of exectuble, or NULL if not found * globals: this will muck up the value in 'errno'. */ { char* s, *path, *t = NULL; /* sanity check */ if ( fn == NULL || *fn == '\0' ) return NULL; /* don't touch absolute paths */ if ( *fn == '/' ) { if ( myaccess(fn) == 0 ) return strdup(fn); else return NULL; } #if 0 /* try reaching executable from CWD */ if ( myaccess(fn) == 0 ) return strdup(fn); #endif /* continue only if there is a PATH to check */ if ( (s=getenv("PATH")) == NULL ) { #ifdef _PATH_DEFPATH path = strdup(_PATH_DEFPATH); #else return NULL; #endif /* _PATH_DEFPATH */ } else { /* yes, there is a PATH variable */ path = strdup(s); } /* tokenize to compare */ for ( s=strtok(path,":"); s; s=strtok(NULL,":") ) { size_t len = strlen(fn) + strlen(s) + 2; t = (char*) malloc(len); strncpy( t, s, len ); strncat( t, "/", len ); strncat( t, fn, len ); if ( myaccess(t) == 0 ) break; else { free((void*) t); t = NULL; } } /* some or no matches found */ free((void*) path); return t; } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/test.ks.usleep�������������������������������������0000644�0001750�0001750�00000001021�11757531137�023715� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/opt/pegasus/latest/bin/kickstart /bin/usleep 500000 /opt/pegasus/latest/bin/kickstart /bin/usleep 450000 /opt/pegasus/latest/bin/kickstart /bin/usleep 400000 /opt/pegasus/latest/bin/kickstart /bin/usleep 350000 /opt/pegasus/latest/bin/kickstart /bin/usleep 300000 /opt/pegasus/latest/bin/kickstart /bin/usleep 250000 /opt/pegasus/latest/bin/kickstart /bin/usleep 200000 /opt/pegasus/latest/bin/kickstart /bin/usleep 150000 /opt/pegasus/latest/bin/kickstart /bin/usleep 100000 /opt/pegasus/latest/bin/kickstart /bin/usleep 50000 ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/check.sh�������������������������������������������0000755�0001750�0001750�00000001266�11757531137�022532� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash # # $Id: check.sh 4860 2012-01-31 02:10:35Z voeckler $ # attempt to test various toggles and dials # test ./pegasus-cluster || exit 42 false=`type -P false` true=`type -P true` cat <<EOF > check.1 $false $false $false $false EOF cat <<EOF > check.2 $true $true $true $true EOF cat <<EOF > check.3 $true $true $false $true EOF for parallel in 1 2; do for mode in '' '-f' '-e'; do for i in 1 2 3; do arg="./pegasus-cluster -n $parallel $mode check.${i}" echo "" echo "### $arg ###" $arg echo "# \$?=$?" done echo "-----------------------------------------------------" done echo "=====================================================" done ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/pegasus-cluster.c����������������������������������0000644�0001750�0001750�00000036101�11757531137�024404� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * */ #include <sys/types.h> #include <errno.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <time.h> #include <sys/time.h> #include <sys/resource.h> #include <sys/wait.h> #include <fcntl.h> #include <unistd.h> #include "tools.h" #include "parser.h" #include "report.h" #include "mysystem.h" #include "job.h" #include "statinfo.h" static const char* RCS_ID = "$Id: pegasus-cluster.c 4860 2012-01-31 02:10:35Z voeckler $"; #ifndef APPLICATION_NAME #define APPLICATION_NAME "seqexec" #endif /* APPLICATION_NAME */ extern char *optarg; extern int optind, opterr, optopt; int debug = 0; int progress = -1; char* application = APPLICATION_NAME; static char success[257]; static void helpMe( const char* programname, int rc ) /* purpose: write help message and exit * paramtr: programname (IN): application of the program (us) * rc (IN): exit code to exit with * returns: procedure does not return. */ { printf( "%s\nUsage:\t%s [options] [inputfile]\n\n", RCS_ID, programname ); printf( "Optional arguments:\n" " -d\tIncrease debug mode.\n" " -s fn\tProtocol anything to given status file, default stdout.\n" " -R fn\tRecords progress into the given file, see also SEQEXEC_PROGRESS_REPORT.\n" " -S ec\tMulti-option: Mark non-zero exit-code ec as success.\n" " -n nr\tNumber of CPUs to use, defaults to 1, string 'auto' permitted.\n" " input\tFile with list of applications and args to execute, default stdin.\n\n" "Execution control and exit code:\n" "\tExecute everything but return success only if all were successful.\n" " -e\tExecute everything (old default mode) and always return success.\n" " -f\tFail hard on first error (non-zero exit code or signal death).\n" "\tOption -e and -f are mutually exclusive.\n" ); exit(rc); } static int processors( void ) { long config = #ifdef _SC_NPROCESSORS_CONF sysconf( _SC_NPROCESSORS_CONF ) #else 1 #endif ; long online = #ifdef _SC_NPROCESSORS_ONLN sysconf( _SC_NPROCESSORS_ONLN ) #else 1 #endif ; if ( config <= 0 ) config = 1; if ( online <= 0 ) online = 1; return config < online ? config : online; } static void parseCommandline( int argc, char* argv[], int* fail_hard, int* old_mode, int* cpus ) { char *s, *ptr = strrchr( argv[0], '/' ); int option, tmp; /* exit code 0 is always good, just in case */ memset( success, 0, sizeof(success) ); success[0] = 1; *cpus = 1; if ( ptr == NULL ) ptr = argv[0]; else ptr++; application = ptr; /* default progress report location from environment variable */ if ( (s = getenv("SEQEXEC_PROGRESS_REPORT")) != NULL ) { if ( (progress = open( s, O_WRONLY | O_APPEND | O_CREAT, 0666 )) == -1 ) { showerr( "%s: open progress %s: %d: %s\n", application, s, errno, strerror(errno) ); } } /* default parallelism from environment variable */ if ( (s = getenv("SEQEXEC_CPUS")) != NULL ) { if ( strcasecmp( s, "auto" ) == 0 ) *cpus = processors(); else *cpus = atoi(s); if ( *cpus < 0 ) *cpus = 1; } opterr = 0; while ( (option = getopt( argc, argv, "R:S:defhn:s:" )) != -1 ) { switch ( option ) { case 'R': if ( progress != -1 ) close(progress); if ( (progress = open( optarg, O_WRONLY | O_APPEND | O_CREAT, 0666 )) == -1 ) { showerr( "%s: open progress %s: %d: %s\n", application, optarg, errno, strerror(errno) ); } break; case 'S': tmp = atoi(optarg); if ( tmp > 0 && tmp < sizeof(success) ) success[tmp] = 1; else showerr( "%s: Ignoring unreasonable success code %d\n", application, tmp ); break; case 'd': debug++; break; case 'e': *old_mode = 1; *fail_hard = 0; break; case 'f': *fail_hard = 1; *old_mode = 0; break; case 'n': if ( strcasecmp( optarg, "auto" ) == 0 ) *cpus = processors(); else *cpus = atoi(optarg); if ( *cpus < 0 ) *cpus = 1; break; case 's': if ( freopen( optarg, "w", stdout ) == NULL ) { showerr( "%s: open status %s: %d: %s\n", application, optarg, errno, strerror(errno) ); exit(2); } break; case 'h': case '?': helpMe( ptr, 0 ); break; default: helpMe( ptr, 1 ); break; } } if ( (argc - optind) > 1 ) helpMe( ptr, 1 ); if ( argc != optind ) { if ( (freopen( argv[optind], "r", stdin )) == NULL ) { showerr( "%s: open input %s: %d: %s\n", application, argv[optind], errno, strerror(errno) ); exit(3); } } } static char* merge( char* s1, char* s2, int use_space ) /* purpose: merge two strings and return the result * paramtr: s1 (IN): first string, may be NULL * s2 (IN): second string, must not be null * use_space (IN): flag, if true, separate by space * returns: merge of strings into newly allocated area. */ { if ( s1 == NULL ) { return strdup(s2); } else { size_t l1 = strlen(s1); size_t l2 = strlen(s2); size_t len = l1 + l2 + 2; char* temp = (char*) malloc(len); strncpy( temp, s1, len ); if ( use_space ) strncat( temp, " ", len ); strncat( temp, s2, len ); return temp; } } pid_t wait_for_child( Jobs* jobs, int* status ) { struct rusage usage; Signals save; int saverr; double final; size_t slot; pid_t child = ((pid_t) -1); /* FIXME: Not sure, if I need these. I take it to mean: While I am * blocked in kernel wait()ing for children, do not interrupt ^C me, * and do not send me SIGCHLD since I am inside wait() anyways. Do * send the signals to the children, though (which hopefully exit.) */ save_signals(&save); errno = 0; /* we rely later on wait4 results */ while ( (child = wait4( ((pid_t) 0), status, 0, &usage )) < 0 ) { saverr = errno; perror( "wait4" ); errno = saverr; if ( errno != EINTR ) { *status = -1; break; } } saverr = errno; final = now(NULL); /* FIXME: see above, end bracket. */ restore_signals(&save); /* find child that has finished */ for ( slot=0; slot < jobs->cpus; ++slot ) { if ( jobs->jobs[slot].child == child ) break; } if ( slot == jobs->cpus ) { /* reaped child not found, not good */ showerr( "%s: process %d (status %d) is not a known child, ignoring.\n", application, child, *status ); } else { /* free slot and report */ char date[32]; Job* j = (jobs->jobs) + slot; /* 20110419 PM-364: new requirement */ showout( "[%s-task id=%lu, start=\"%s\", duration=%.3f, status=%d, " "line=%lu, pid=%d, app=\"%s\"]\n", APPLICATION_NAME, j->count, iso2date( j->start, date, sizeof(date) ), (final - j->start), *status, j->lineno, child, j->argv[ find_application(j->argv) ] ); /* progress report at finish of job */ if ( progress != -1 ) report( progress, j->start, (final - j->start), *status, j->argv, &usage, NULL #ifndef MONOTONICALLY_INCREASING , j->count #endif /* MONOTONICALLY_INCREASING */ ); /* free reported job */ job_done(j); } errno = saverr; return child; } void run_independent_task( char* cmd, char* envp[], unsigned long* extra, const char* special ) { if ( cmd != NULL ) { #ifndef USE_SYSTEM_SYSTEM size_t len; int appc, other; char** appv; if ( (appc = interpreteArguments( cmd, &appv )) > 0 ) { /* determine full path to application according to PATH */ char* fqpn = find_executable( appv[0] ); if ( fqpn ) { /* found a FQPN, exchange first item in argument vector */ free((void*) appv[0]); appv[0] = fqpn; } other = mysystem( appv, envp, special ); if ( other || debug ) showerr( "%s: %s returned %d/%d\n", application, special, (other >> 8), (other & 127) ); for ( len=0; len<appc; len++ ) free((void*) appv[len]); free((void*) appv); } else { /* unparsable cleanup argument string */ showerr( "%s: unparsable %s string, ignoring\n", application, special ); } #else int other = system( cmd ); if ( other || debug ) showerr( "%s: %s returned %d/%d\n", application, special, (other >> 8), (other & 127) ); #endif /* USE_SYSTEM_SYSTEM */ (*extra)++; } } int isafailure( int status ) { /* FIXME: On systems with exit codes outside 0..256 this may core dump! */ return ( WIFEXITED(status) && success[ WEXITSTATUS(status) ] == 1 ) ? 0 : 1; } void massage_failure( int fail_hard, int current_ec, int* collect_ec ) { if ( fail_hard ) { /* only propagate first failure in hard-fail mode */ if ( ! ( *collect_ec && isafailure(*collect_ec) ) ) *collect_ec = current_ec; } else { /* always retain last exit code in no-hard-fail mode */ *collect_ec = current_ec; } } int main( int argc, char* argv[], char* envp[] ) { size_t len; char line[MAXSTR]; int other, exitstatus, status = 0; int slot, cpus, fail_hard = 0, old_mode = 0; char* cmd; char* save = NULL; unsigned long total = 0; unsigned long failure = 0; unsigned long lineno = 0; unsigned long extra = 0; time_t when; Jobs jobs; double diff, start = now(&when); parseCommandline( argc, argv, &fail_hard, &old_mode, &cpus ); /* progress report finish */ if ( progress != -1 ) { report( progress, start, 0.0, -1, argv, NULL, NULL #ifndef MONOTONICALLY_INCREASING , 0ul #endif /* MONOTONICALLY_INCREASING */ ); } /* allocate job management memory */ if ( jobs_init( &jobs, cpus ) == -1 ) { showerr( "%s: out of memory: %d: %s\n", application, errno, strerror(errno) ); return 42; } /* since we will create multiple concurrent processes, let's create a * process group to order them by. */ if ( setpgid( 0, 0 ) == -1 ) { showerr( "%s: unable to become process group leader: %d: %s (ignoring)\n", application, errno, strerror(errno) ); } /* NEW: unconditionally run a setup job */ run_independent_task( getenv("SEQEXEC_SETUP"), envp, &extra, "setup" ); /* Read the commands and call each sequentially */ while ( fgets(line,sizeof(line),stdin) != (char*) NULL ) { ++lineno; /* check for skippable line */ if ( line[0] == 0 || /* empty line */ line[0] == '\r' || /* CR */ line[0] == '\n' || /* LF */ line[0] == '#' /* comment */ ) continue; /* check for unterminated line (line larger than buffer) */ len = strlen(line); if ( line[len-1] != '\r' && line[len-1] != '\n' ) { /* read buffer was too small, save and append */ char* temp = merge( save, line, 0 ); if ( save != NULL ) free((void*) save); save = temp; lineno--; showerr( "%s: continuation line %lu\n", application, lineno ); continue; } else { /* remove line termination character(s) */ do { line[len-1] = 0; len--; } while ( len > 0 && (line[len-1] == '\r' || line[len-1] == '\n') ); } /* Assemble command. * FIXME: barf if commandline becomes too long, see _SC_ARG_MAX. */ if ( save != NULL ) { cmd = merge( save, line, 0 ); free((void*) save); save = NULL; } else { cmd = line; } /* find a free slot */ while ( (slot = jobs_first_slot( &jobs, EMPTY )) == jobs.cpus ) { /* wait for any child to finish */ if ( debug ) showerr( "%s: %d slot%s busy, wait()ing\n", application, jobs.cpus, ( jobs.cpus == 1 ? "" : "s" ) ); wait_for_child( &jobs, &other ); if ( errno == 0 && isafailure(other) ) failure++; massage_failure( fail_hard, other, &status ); } /* post-condition: there is a free slot; slot number in "slot" */ /* found free slot */ if ( fail_hard && status && isafailure(status) ) { /* we are in failure mode already, skip starting new stuff */ } else if ( slot < jobs.cpus ) { /* there is a free slot. Spawn and continue */ Signals save; Job* j = jobs.jobs + slot; if ( (j->argc = interpreteArguments( cmd, &(j->argv) )) > 0 ) { /* determine full path to application according to PATH */ char* fqpn = find_executable( j->argv[0] ); if ( fqpn ) { /* found a FQPN, exchange first item in argument vector */ free((void*) j->argv[0]); j->argv[0] = fqpn; } total++; j->envp = envp; /* for now */ j->lineno = lineno; /* WARNING: Must propagate "save" to start_child() */ save_signals( &save ); if ( (j->child = fork()) == ((pid_t) -1) ) { /* fork error, bad */ showerr( "%s: fork: %d: %s\n", application, errno, strerror(errno) ); failure++; job_done( j ); } else if ( j->child == ((pid_t) 0) ) { /* child code */ start_child( j->argv, j->envp, &save ); return 127; /* never reached, just in case */ } else { /* parent code */ j->count = total; j->state = RUNNING; j->start = now( &(j->when) ); } /* END BRACKET */ restore_signals( &save ); } else { /* error parsing args */ if ( debug ) showerr( "%s: error parsing arguments on line %lu, ignoring\n", application, lineno ); } } else { /* no free slots, wait for children to finish */ showerr( "%s: %s:%d THIS SHOULD NOT HAPPEN! (ignoring)\n", application, __FILE__, __LINE__ ); } if ( cmd != line ) free((void*) cmd); /* fail hard mode, if requested */ if ( fail_hard && status && isafailure(status) ) break; } /* wait for all children */ while ( (slot=jobs_in_state( &jobs, EMPTY )) < jobs.cpus ) { /* wait for any child to finish */ size_t n = jobs.cpus - slot; if ( debug ) showerr( "%s: %d task%s remaining\n", application, n, (n == 1 ? "" : "s" ) ); wait_for_child( &jobs, &other ); if ( errno == 0 && isafailure(other) ) failure++; massage_failure( fail_hard, other, &status ); } /* NEW: unconditionally run a clean-up job */ run_independent_task( getenv("SEQEXEC_CLEANUP"), envp, &extra, "cleanup" ); /* compute if seqexec should return any form of failure */ if ( old_mode ) { exitstatus = 0; } else if ( fail_hard ) { exitstatus = (status && isafailure(status)) ? 5 : 0; } else { exitstatus = failure ? 5 : 0; } /* provide final statistics */ jobs_done( &jobs ); diff = now(NULL) - start; showout( "[%s-summary stat=\"%s\", lines=%lu, tasks=%lu, succeeded=%lu, failed=%lu, " "extra=%lu, duration=%.3f, start=\"%s\", pid=%d, app=\"%s\"]\n", APPLICATION_NAME, exitstatus ? "fail" : "ok", lineno, total, total-failure, failure, extra, diff, iso2date(start,line,sizeof(line)), getpid(), argv[0] ); fflush(stdout); /* just in case */ exit( exitstatus ); } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/try-cpus.c�����������������������������������������0000644�0001750�0001750�00000001076�11757531137�023047� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <errno.h> #include <stdio.h> #include <unistd.h> static int processors( void ) { long config = #ifdef _SC_NPROCESSORS_CONF sysconf( _SC_NPROCESSORS_CONF ) #else 1 #endif ; long online = #ifdef _SC_NPROCESSORS_ONLN sysconf( _SC_NPROCESSORS_ONLN ) #else 1 #endif ; if ( config <= 0 ) config = 1; if ( online <= 0 ) online = 1; return config < online ? config : online; } int main( int argc, char* argv[] ) { int cpus = processors(); printf( "found %d processor%s\n", cpus, ( cpus == 1 ? "" : "s" ) ); return 0; } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/parser.h�������������������������������������������0000644�0001750�0001750�00000001663�11757531137�022564� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _PARSER_H #define _PARSER_H extern size_t interpreteArguments( char* cmd, char*** argv ); /* purpose: removes one layer of quoting and escaping, shell-style * paramtr: cmd (IO): commandline to split * paramtr: argv (OUT): argv[] vector, newly allocated vector * returns: argc */ #endif /* _PARSER_H */ �����������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/make.SunOS�����������������������������������������0000755�0001750�0001750�00000000345�11757531137�022764� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh gmake distclean gmake EXTRACFLAGS='$(V7FLAGS)' pegasus-cluster mv pegasus-cluster pegasus-cluster.sparcv7 gmake clean gmake EXTRACFLAGS='$(V9FLAGS)' pegasus-cluster mv pegasus-cluster pegasus-cluster.sparcv9 gmake clean�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/mysystem.c�����������������������������������������0000644�0001750�0001750�00000010612�11757531137�023147� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <sys/wait.h> #include <errno.h> #include <stdio.h> #include <string.h> #include <unistd.h> #include <fcntl.h> #include "tools.h" #include "report.h" #include "mysystem.h" static const char* RCS_ID = "$Id: mysystem.c 4537 2011-09-27 17:02:53Z voeckler $"; extern int debug; extern int progress; extern char* application; int save_signals( Signals* save ) { struct sigaction ignore; sigset_t childmask; ignore.sa_handler = SIG_IGN; sigemptyset( &ignore.sa_mask ); ignore.sa_flags = 0; if ( sigaction( SIGINT, &ignore, &(save->intr) ) < 0 ) return -1; if ( sigaction( SIGQUIT, &ignore, &(save->quit) ) < 0 ) return -1; sigemptyset( &childmask ); sigaddset( &childmask, SIGCHLD ); if ( sigprocmask( SIG_BLOCK, &childmask, &(save->mask) ) < 0 ) return -1; return 0; } int restore_signals( Signals* save ) { int result = 0; /* count errors on these, but use them all */ if ( sigaction( SIGINT, &(save->intr), NULL ) < 0 ) result++; if ( sigaction( SIGQUIT, &(save->quit), NULL ) < 0 ) result++; if ( sigprocmask( SIG_SETMASK, &(save->mask), NULL ) < 0 ) result++; return result; } void start_child( char* argv[], char* envp[], Signals* save ) /* purpose: start a child process with stdin connected to /dev/null * paramtr: argv (IN): argument vector, NULL terminated * envp (IN): environment vector, NULL terminated * save (IN): if not NULL, saved signals to restore * returns: DOES NOT RETURN */ { int null = open( "/dev/null", O_RDONLY ); if ( null != -1 ) { if ( dup2( null, STDIN_FILENO ) == -1 && debug ) showerr( "%s: dup2 stdin: %d: %s\n", application, errno, strerror(errno) ); } else { if ( debug ) showerr( "%s: open /dev/null: %d: %s\n", application, errno, strerror(errno) ); } /* undo signal handlers */ if ( save ) restore_signals( save ); execve( argv[0], (char* const*) argv, envp ); showerr( "%s: exec %s: %d: %s\n", application, argv[0], errno, strerror(errno) ); _exit(127); /* never reached unless error */ } int mysystem( char* argv[], char* envp[], const char* special ) /* purpose: implement system(3c) call w/o calling the shell * paramtr: argv (IN): NULL terminated argument vector * envp (IN): NULL terminated environment vector * special (IN): set for setup/cleanup jobs * returns: exit status from wait() family */ { char date[32]; struct rusage usage; Signals save; pid_t child; time_t when, then; double diff, start = now(&when); int saverr = 0; int status = -1; save_signals( &save ); if ( (child=fork()) < 0 ) { /* no more process table space */ return -1; } else if ( child == (pid_t) 0 ) { /* child */ start_child( argv, envp, &save ); } else { /* parent */ /* wait for child */ while ( wait4( child, &status, 0, &usage ) < 0 ) { if ( errno != EINTR ) { status = -1; break; } } /* remember why/how we did exit */ saverr = errno; /* sanity check */ if ( kill( child, 0 ) == 0 ) showerr( "Warning: job %d (%s) is still running!\n", child, argv[0] ); } /* ignore errors on these, too. */ restore_signals( &save ); /* 20110419 PM-364: new requirement */ diff = now(&then) - start; showout( "[seqexec-task id=%s, start=\"%s\", duration=%.3f, status=%d, " "pid=%d, app=\"%s\"]\n", special ? special : "(unknown)", iso2date( start, date, sizeof(date) ), diff, status, child, argv[ find_application(argv) ] ); /* progress report finish */ if ( progress != -1 ) { report( progress, start, diff, status, argv, &usage, special #ifndef MONOTONICALLY_INCREASING , -1ul #endif /* MONOTONICALLY_INCREASING */ ); } errno = saverr; return status; } ����������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/tools.h��������������������������������������������0000644�0001750�0001750�00000005520�11757531137�022424� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _TOOLS_H #define _TOOLS_H #include <sys/types.h> #include <stdarg.h> #include <time.h> #include <sys/time.h> #ifndef MAXSTR #define MAXSTR 4096 #endif extern ssize_t writen( int fd, const char* buffer, ssize_t n, unsigned restart ); /* purpose: write all n bytes in buffer, if possible at all * paramtr: fd (IN): filedescriptor open for writing * buffer (IN): bytes to write (must be at least n byte long) * n (IN): number of bytes to write * restart (IN): if true, try to restart write at max that often * returns: n, if everything was written, or * [0..n-1], if some bytes were written, but then failed, * < 0, if some error occurred. */ extern ssize_t showerr( const char* fmt, ... ); /* purpose: printf-like error using (hopefully) atomic writes * paramtr: see printf() * returns: number of bytes written, -1 for error */ /* as long as showerr() reports on stdout, we can use this macro */ #ifndef showout #define showout showerr #endif extern double timespec( struct timeval* tv ); /* purpose: convert a timeval into float seconds with microseconds. * paramtr: tv (IN): pointer to struct timeval to convert * returns: seconds as float with microseconds. * warning: anything below millisecond resolution is unrealistic. */ extern double now( time_t* when ); /* purpose: obtains an UTC timestamp with microsecond resolution. * paramtr: when (opt. OUT): where to save integral seconds into. * returns: the timestamp, or -1.0 if it was completely impossible. */ extern char* isodate( time_t seconds, char* buffer, size_t size ); /* purpose: formats ISO 8601 timestamp into given buffer (simplified) * paramtr: seconds (IN): time stamp * buffer (OUT): where to put the results * size (IN): capacity of buffer * returns: pointer to start of buffer for convenience. */ extern char* iso2date( double seconds_wf, char* buffer, size_t size ); /* purpose: formats ISO 8601 timestamp into given buffer (simplified) * paramtr: seconds_wf (IN): time stamp with fractional seconds (millis) * buffer (OUT): where to put the results * size (IN): capacity of buffer * returns: pointer to start of buffer for convenience. */ #endif /* _TOOLS_H */ ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/mysystem.h�����������������������������������������0000644�0001750�0001750�00000003714�11757531137�023161� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _MYSYSTEM_H #define _MYSYSTEM_H #include <sys/types.h> #include <signal.h> typedef struct { struct sigaction intr; struct sigaction quit; sigset_t mask; } Signals; extern int save_signals( Signals* save ); /* purpose: ignore SIG{INT,QUIT} and block SIGCHLD * paramtr: save (IO): place to store original signal vectors * returns: 0 on success, -1 on failure. */ extern int restore_signals( Signals* save ); /* purpose: restore previously save signals * paramtr: save (IN): previously saved signal state * returns: 0 on success, count of errors on failure * warning: errno may be useless to check in this case */ extern void start_child( char* argv[], char* envp[], Signals* save ); /* purpose: start a child process with stdin connected to /dev/null * paramtr: argv (IN): argument vector, NULL terminated * envp (IN): environment vector, NULL terminated * save (IN): if not NULL, saved signals to restore * returns: DOES NOT RETURN */ extern int mysystem( char* argv[], char* envp[], const char* special ); /* purpose: implement system(3c) call w/o calling the shell * paramtr: argv (IN): NULL terminated argument vector * envp (IN): NULL terminated environment vector * special (IN): set for setup/cleanup jobs * returns: exit status from wait() family */ #endif /* _MYSYSTEM_H */ ����������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/job.h����������������������������������������������0000644�0001750�0001750�00000004544�11757531137�022043� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _JOB_H #define _JOB_H #include <sys/types.h> typedef enum { EMPTY, RUNNING, } JobState; typedef struct { int argc; /* number of arguments */ char** argv; /* argument vector */ char** envp; /* environment settings -- future lab per job */ JobState state; /* where in the life cycle of a job are we */ pid_t child; /* pid of process -- when in state running */ double start; /* starting time */ time_t when; /* start time_t */ unsigned long count; /* copy from job counter */ unsigned long lineno; /* copy from lineno */ } Job; extern void job_done( Job* job ); /* purpose: free up the argv vector * paramtr: job (IO): job to free and initialize to 0 * warning: does not touch envp (for now) */ typedef struct { Job* jobs; size_t cpus; } Jobs; extern int jobs_init( Jobs* jobs, int cpus ); /* purpose: Initialize maintainance data structure * paramtr: jobs (IO): pointer to Jobs data structure * cpus (IN): how many job slots to allocate * returns: 0 on success, -1 on error. */ extern void jobs_done( Jobs* jobs ); /* purpose: d'tor for Jobs structure * paramtr: jobs (IO): pointer to Jobs data structure */ extern size_t jobs_in_state( Jobs* jobs, JobState state ); /* purpose: count number of jobs having a certain job state * paramtr: jobs (IN): pointer to maintenance structure * state (IN): job state to compare to * returns: count */ extern size_t jobs_first_slot( Jobs* jobs, JobState state ); /* purpose: find first slot of a job with state state * paramtr: jobs (IN): pointer to maintanance structure * state (IN): job state to search * returns: 0 .. cpus-1: valid job slot * cpus: no such slot found */ #endif /* _JOB_H */ ������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/job.c����������������������������������������������0000644�0001750�0001750�00000005143�11757531137�022032� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <sys/types.h> #include <stdlib.h> #include <string.h> #include "job.h" static const char* RCS_ID = "$Id: job.c 4537 2011-09-27 17:02:53Z voeckler $"; void job_done( Job* job ) /* purpose: free up the argv vector * paramtr: job (IO): job to free and initialize to 0 * warning: does not touch envp (for now) */ { if ( job ) { int i; for ( i=0; i<job->argc; ++i ) { if ( job->argv[i] ) { free((void*) job->argv[i]); job->argv[i] = NULL; } } free((void*) job->argv); memset( job, 0, sizeof(Job) ); } } int jobs_init( Jobs* jobs, int cpus ) /* purpose: Initialize maintainance data structure * paramtr: jobs (IO): pointer to Jobs data structure * cpus (IN): how many job slots to allocate * returns: 0 on success, -1 on error. */ { if ( jobs ) { jobs->cpus = cpus; return ( (jobs->jobs = calloc( sizeof(Job), cpus )) == NULL ) ? -1 : 0; } else { return -1; } } void jobs_done( Jobs* jobs ) /* purpose: d'tor for Jobs structure * paramtr: jobs (IO): pointer to Jobs data structure */ { if ( jobs ) { if ( jobs->jobs ) free((void*) jobs->jobs); memset( jobs, 0, sizeof(Jobs) ); } } size_t jobs_in_state( Jobs* jobs, JobState state ) /* purpose: count number of jobs having a certain job state * paramtr: jobs (IN): pointer to maintenance structure * state (IN): job state to compare to * returns: count */ { size_t i, result = 0; for ( i=0; i < jobs->cpus; ++i ) { if ( jobs->jobs[i].state == state ) result++; } return result; } size_t jobs_first_slot( Jobs* jobs, JobState state ) /* purpose: find first slot of a job with state state * paramtr: jobs (IN): pointer to maintanance structure * state (IN): job state to search * returns: 0 .. cpus-1: valid job slot * cpus: no such slot found */ { size_t result; for ( result=0; result < jobs->cpus; ++result ) if ( jobs->jobs[result].state == state ) return result; return result; /* == jobs->cpus */ } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/Makefile�������������������������������������������0000644�0001750�0001750�00000004425�11757531137�022556� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # Makefile # $Id: Makefile 5027 2012-02-27 23:59:50Z voeckler $ # INSTALL = install RM = rm -f CC = gcc CXX = g++ SYSTEM = $(shell uname -s | tr '[a-z]' '[A-Z]' | tr -d '_ -/') VERSION = $(shell uname -r) MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') MAJOR = $(firstword $(subst ., ,$(VERSION))) LOADLIBES = -lm ifndef ${prefix} prefix = $(PEGASUS_HOME) endif ifndef ${datadir} datadir = $(prefix)/share endif ifeq (SUNOS,${SYSTEM}) ifeq (5,${MAJOR}) CC = cc CXX = CC V7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CFLAGS = -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACFLAGS) CFLAGS := -DSOLARIS $(CFLAGS) -xO4 -D__EXTENSIONS__=1 LDFLAGS += $(EXTRACFLAGS) INSTALL = /usr/ucb/install LOADLIBES += -lnsl else # old Solaris 1 not supported endif endif ifeq (AIX,${SYSTEM}) CXX = xlC CC = xlc endif ifeq (LINUX,${SYSTEM}) CFLAGS := -Wall -O2 -ggdb ifeq (ia64,${MARCH}) # # Linux IA64 # CFLAGS += -DMARCH_IA64=1 else ifeq (x86_64,${MARCH}) # # Linux AMD64 # CFLAGS += -m64 else ifeq (armv7l, ${MARCH}) # nothing to do else # # Linux IA32 # CFLAGS += -march=i686 # LDFLAGS += -static endif endif endif endif # # === [3] ======================================================= rules section # There is no need to change things below this line. CFLAGS += -D${SYSTEM} -DMAJOR=${MAJOR} LD = $(CC) -D_FILE_OFFSET_BITS=64 CC += -D_FILE_OFFSET_BITS=64 CFLAGS += -D_LARGEFILE_SOURCE -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE %.html : %.1 ; $(NROFF) $(HTML) $< > $@ %.ps : %.1 ; $(NROFF) $< > $@ %.txt : %.1 ; $(NROFF) $(TEXT) $< > $@ %.o : %.c $(CC) $(CFLAGS) $< -c -o $@ all: pegasus-cluster job.o: job.c job.h mysystem.o: mysystem.c tools.h report.h mysystem.h parser.o: parser.c parser.h tools.h report.o: report.c tools.h report.h pegasus-cluster.o: pegasus-cluster.c tools.h parser.h report.h mysystem.h \ job.h statinfo.h statinfo.o: statinfo.c statinfo.h tools.o: tools.c tools.h try-cpus.o: try-cpus.c pegasus-cluster: pegasus-cluster.o tools.o parser.o report.o mysystem.o \ job.o statinfo.o $(LD) $(LDFLAGS) $^ -o $@ $(LOADLIBES) try-cpus: try-cpus.o $(LD) $(LDFLAGS) $^ -o $@ $(LOADLIBES) install: pegasus-cluster $(INSTALL) -m 0755 $^ $(prefix)/bin check: pegasus-cluster bash check.sh clean: $(RM) *.o core core.* check.? distclean: clean $(RM) pegasus-cluster try-cpus �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/test.usleep����������������������������������������0000644�0001750�0001750�00000000275�11757531137�023313� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/bin/usleep 500000 /bin/usleep 450000 /bin/usleep 400000 /bin/usleep 350000 /bin/usleep 300000 /bin/usleep 250000 /bin/usleep 200000 /bin/usleep 150000 /bin/usleep 100000 /bin/usleep 50000 �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/statinfo.h�����������������������������������������0000644�0001750�0001750�00000002366�11757531137�023120� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2011 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _STATINFO_H #define _STATINFO_H extern int myaccess( const char* path ); /* purpose: check a given file for being accessible and executable * under the currently effective user and group id. * paramtr: path (IN): current path to check * returns: 0 if the file is accessible, -1 for not */ extern char* find_executable( const char* fn ); /* purpose: check the executable filename and correct it if necessary * paramtr: fn (IN): current knowledge of filename * returns: newly allocated fqpn of path to exectuble, or NULL if not found * globals: this will muck up the value in 'errno'. */ #endif /* _STATINFO_H */ ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/report.c�������������������������������������������0000644�0001750�0001750�00000023363�11757531137�022577� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <sys/types.h> #include <errno.h> #include <fcntl.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #include <sys/poll.h> #include <sys/utsname.h> #include <unistd.h> #include "tools.h" #include "report.h" static const char* RCS_ID = "$Id: report.c 4640 2011-11-07 21:32:00Z voeckler $"; static char* identifier; struct utsname uname_cache; static char* create_identifier( void ) { char buffer[128]; if ( uname(&uname_cache) != -1 ) { char* s = strchr( uname_cache.nodename, '.' ); if ( s != NULL ) *s = 0; snprintf( buffer, sizeof(buffer), "%s:%d", uname_cache.nodename, getpid() ); } else { fprintf( stderr, "uname: %d: %s\n", errno, strerror(errno) ); memset( &uname_cache, 0, sizeof(uname_cache) ); snprintf( buffer, sizeof(buffer), "unknown:%d", getpid() ); } return (identifier = strdup(buffer)); } int find_application( char* argv[] ) /* purpose: find start of argv excluding kickstart * paramtr: argv (IN): invocation argument vector * returns: start of argv. Returns 0 if unsure. */ { int i, flag = 0; #if 0 /* FIXME: Maybe use this starting with 3.3? */ static const char* ks = "pegasus-kickstart"; static const size_t kslen = 17; /* strlen(ks); */ #else /* backward compatible - for now */ static const char* ks = "kickstart"; static const size_t kslen = 9; /* strlen(ks); */ #endif for ( i=0 ; argv[i]; ++i ) { char* s = argv[i]; size_t slen = strlen(s); /* detect presence of kickstart */ if ( i == 0 && slen>=kslen && strcmp( s+slen-kslen, ks ) == 0 ) { flag = 1; continue; } if ( flag ) { /* in kickstart mode, skip options of kickstart */ if ( s[0] == '-' && strchr( "ioelnNRBLTIwWSs", s[1] ) != NULL ) { /* option with argument */ if ( s[2] == 0 ) ++i; continue; } else if ( s[0] == '-' && strchr( "FHVX", s[1] ) != NULL ) { /* option without argument */ continue; } else { flag = 0; } } if ( ! flag ) { /* found */ return ( argv[i] == NULL ? 0 : i ); } } /* invalid arguments? */ return 0; } static size_t append_argument( char* msg, size_t size, size_t len, char* argv[] ) /* purpose: append invocation to logging buffer, but skip kickstart * paramtr: msg (IO): initialized buffer to append to * size (IN): capacity of buffer * len (IN): current length of the buffer * argv (IN): invocation argument vector * returns: new length of modified buffer */ { size_t slen; int i, flag = 0; static const char* ks = "kickstart"; char* extra[3] = { NULL, NULL, NULL }; for ( i=0 ; argv[i]; ++i ) { char* s = argv[i]; slen = strlen(s); /* detect presence of kickstart */ if ( i == 0 && strcmp( s+slen-strlen(ks), ks ) == 0 ) { flag = 1; continue; } if ( flag ) { /* in kickstart mode, skip options of kickstart */ if ( s[0] == '-' && strchr( "ioelnNRBLTIwWSs", s[1] ) != NULL ) { /* option with argument */ switch ( s[1] ) { case 'i': if ( s[2] == 0 ) extra[0] = argv[++i]; else extra[0] = &s[2]; break; case 'o': if ( s[2] == 0 ) extra[1] = argv[++i]; else extra[1] = &s[2]; break; case 'e': if ( s[2] == 0 ) extra[2] = argv[++i]; else extra[2] = &s[2]; break; default: if ( s[2] == 0 ) ++i; break; } continue; } else if ( s[0] == '-' && strchr( "HVX", s[1] ) != NULL ) { /* option without argument */ continue; } else { flag = 0; } } if ( ! flag ) { /* in regular mode, add argument to output */ if ( len + slen + 1 > size ) { /* continuation dots */ static const char* dots = " ..."; if ( len < size-strlen(dots)-1 ) { strncat( msg+len, dots, size-len ); len += strlen(dots); } break; } /* append argument */ strncat( msg+len, " ", size-len ); strncat( msg+len, s, size-len ); len += slen + 1; } } /* simulate stdio redirection */ for ( i=0; i<3; ++i ) { if ( extra[i] != NULL ) { int skip = 0; char* s = extra[i]; if ( len + (slen=strlen(s)) + 4 < size ) { switch ( i ) { case 0: strncat( msg+len, " < ", size-len ); break; case 1: strncat( msg+len, " > ", size-len ); break; case 2: strncat( msg+len, " 2> ", size-len ); break; } skip = ( *s == '!' || *s == '^' ); strncat( msg+len, s+skip, size-len ); len += slen + 3 + ( i == 2 ) - skip; } else { break; } } } return len; } static int lockit( int fd, short cmd, short type ) /* purpose: fill in POSIX lock structure and attempt lock (or unlock) * paramtr: fd (IN): which file descriptor to lock * cmd (IN): F_SETLK, F_GETLK, F_SETLKW * type (IN): F_WRLCK, F_RDLCK, F_UNLCK * returns: result from fcntl call */ { struct flock l; memset( &l, 0, sizeof(l) ); l.l_type = type; /* full file */ l.l_whence = SEEK_SET; l.l_start = 0; l.l_len = 0; /* run it */ return fcntl( fd, cmd, &l ); } static int mytrylock( int fd ) /* purpose: Try to lock the file * paramtr: fd (IN): open file descriptor * returns: -1: fatal error while locking the file, file not locked * 0: all backoff attempts failed, file is not locked * 1: file is locked */ { int backoff = 50; /* milliseconds, increasing */ int retries = 10; /* 2.2 seconds total */ while ( lockit( fd, F_SETLK, F_WRLCK ) == -1 ) { if ( errno != EACCES && errno != EAGAIN ) return -1; if ( --retries == 0 ) return 0; backoff += 50; poll( NULL, 0, backoff ); } return 1; } ssize_t report( int progress, double start, double duration , int status, char* argv[], struct rusage* use , const char* special #ifndef MONOTONICALLY_INCREASING , size_t taskid #endif /* MONOTONICALLY_INCREASING */ ) /* purpose: report what has just finished. * paramtr: progress (IN): file description open for writing * start (IN): start time (no millisecond resolution) * duration (IN): duration with millisecond resolution * status (IN): return value from wait() family * argv (IN): NULL-delimited argument vector of app * use (IN): resource usage from wait4() call * special (IN): set for setup/cleanup jobs. * taskid (IN): task number from input file. * returns: number of bytes written onto "progress" */ { #ifdef MONOTONICALLY_INCREASING static unsigned long counter = 0; #endif /* MONOTONICALLY_INCREASING */ int save, locked; char date[32]; size_t len, size = getpagesize(); char* msg = (char*) malloc( size<<1 ); ssize_t wsize = -1; /* sanity checks */ if ( progress == -1 || argv == NULL ) return 0; /* singleton */ if ( identifier == NULL ) identifier = create_identifier(); /* message start */ if ( status == -1 && duration == 0.0 && use == NULL ) { /* report of seqexec itself */ snprintf( msg, size, "%s %s %lu 0/0 START" , iso2date(start,date,sizeof(date)) , identifier #ifdef MONOTONICALLY_INCREASING , counter++ #else , 0ul #endif /* MONOTONICALLY_INCREASING */ ); } else if ( special != NULL ) { /* report from setup/cleanup invocations */ snprintf( msg, size, "%s %s %s %d/%d %.3f" , iso2date(start,date,sizeof(date)) , identifier , special , (status >> 8), (status & 127), duration ); } else { /* report from child invocations */ snprintf( msg, size, "%s %s %lu %d/%d %.3f" , iso2date(start,date,sizeof(date)) , identifier #ifdef MONOTONICALLY_INCREASING , counter++ #else , taskid #endif /* MONOTONICALLY_INCREASING */ , (status >> 8), (status & 127), duration ); } /* add program arguments */ len = append_argument( msg, size-2, strlen(msg), argv ); /* optionally add uname (seqexec) or rusage (children) info */ if ( status == -1 && duration == 0.0 && use == NULL ) { /* report uname info for seqexec itself */ snprintf( msg+len, size-len, " ### sysname=%s machine=%s release=%s", uname_cache.sysname, uname_cache.machine, uname_cache.release ); len += strlen(msg+len); } else if ( use != NULL ) { double utime = use->ru_utime.tv_sec + use->ru_utime.tv_usec / 1E6; double stime = use->ru_stime.tv_sec + use->ru_stime.tv_usec / 1E6; snprintf( msg+len, size-len, " ### utime=%.3f stime=%.3f minflt=%ld majflt=%ld" #ifndef linux /* Linux is broken and does not fill in these values */ " maxrss=%ld idrss=%ld inblock=%ld oublock=%ld" " nswap=%ld nsignals=%ld nvcws=%ld nivcsw=%ld" #endif ,utime, stime, use->ru_minflt, use->ru_majflt #ifndef linux /* Linux is broken and does not fill in these values */ ,use->ru_maxrss, use->ru_idrss, use->ru_inblock, use->ru_oublock, use->ru_nswap, use->ru_nsignals, use->ru_nvcsw, use->ru_nivcsw #endif ); len += strlen(msg+len); } /* terminate line */ strncat( msg+len, "\n", size-len ); /* Atomic append -- will still garble on Linux NFS */ /* Warning: Fcntl-locking may block in syscall on broken Linux kernels */ locked = mytrylock( progress ); wsize = write( progress, msg, len+1 ); save = errno; if ( locked==1 ) lockit( progress, F_SETLK, F_UNLCK ); free((void*) msg ); errno = save; return wsize; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/parser.c�������������������������������������������0000644�0001750�0001750�00000012526�11757531137�022557� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <ctype.h> #include <stdio.h> #include <stdlib.h> #include <string.h> #include <unistd.h> #include "parser.h" #include "tools.h" static const char* RCS_ID = "$Id: parser.c 4838 2012-01-27 00:53:44Z voeckler $"; extern int debug; /* create finite state automaton to remove one level of quoting in the * same manner as a shell. This means in particular, in case you are not * aware (see man of your shell): * * o backslashes are meaningless inside single quotes * o single quotes are meaningless inside double quotes * * | sq | dq | bs | lws | otr | EOS * ----+-----+-----+-----+-----+-----+----- * 0 | 3,- | 4,- | 2,- | 0,- | 1,s | F,- * 1 | 3,- | 4,- | 2,- | 0,F | 1,s | F,F * 2 | 1,s | 1,s | 1,s | 1,s | 1,s | E1 * 3 | 1,- | 3,s | 3,s | 3,s | 3,s | E2 * 4 | 4,s | 1,- | 5,- | 4,s | 4,s | E3 * 5 | 5,s | 5,s | 5,s | 5,s | 5,s | E1 * ----+----------------------------------- * 6 | F | final state, done with success * 7 | E1 | error: premature end of string * 8 | E2 | error: missing single quote * 9 | E3 | error: missing double quote */ static char c_state[6][6] = { { 3, 4, 2, 0, 1, 6 }, /* 0: skip linear whitespace */ { 3, 4, 2, 0, 1, 6 }, /* 1: gobble unquoted nonspaces */ { 1, 1, 1, 1, 1, 7 }, /* 2: unquoted backslash */ { 1, 3, 3, 3, 3, 8 }, /* 3: single quote mode */ { 4, 1, 5, 4, 4, 9 }, /* 4: double quote mode */ { 4, 4, 4, 4, 4, 7 } }; /* 5: double quote backslash mode */ static char c_action[6][6] = { { 0, 0, 0, 0, 1, 0 }, /* 0: skip linear whitespace */ { 0, 0, 0, 2, 1, 2 }, /* 1: gobble unquoted nonspaces */ { 1, 1, 1, 1, 1, 0 }, /* 2: unquoted backslash */ { 0, 1, 1, 1, 1, 0 }, /* 3: single quote mode */ { 1, 0, 0, 1, 1, 0 }, /* 4: double quote mode */ { 1, 1, 1, 1, 1, 0 } }; /* 5: double quote backslash mode */ static int charclass( char input ) { if ( input == 0 ) return 5; else switch ( input ) { case '\'': return 0; case '"' : return 1; case '\\': return 2; case ' ' : return 3; case '\t': return 3; default: return 4; } } typedef struct s_node { char* data; struct s_node* next; } t_node; char* format_printable( char* buf, size_t size, char ch ) { if ( isprint(ch) ) { snprintf( buf, size, "\"%c\"", ch ); } else { snprintf( buf, size, "0x%02x", (unsigned char) ch ); } return buf; } size_t interpreteArguments( char* cmd, char*** argv ) /* purpose: removes one layer of quoting and escaping, shell-style * paramtr: cmd (IO): commandline to split * paramtr: argv (OUT): argv[] vector, newly allocated vector * returns: argc */ { t_node* head = NULL; t_node* tail = NULL; char* s = cmd; size_t capacity = getpagesize(); size_t size = 0; size_t argc = 0; char* store = (char*) malloc( capacity ); int class, state = 0; char ch, temp[8]; while ( state < 6 ) { if ( (class = charclass((ch=*s))) != 5 ) s++; if ( debug > 2 ) showerr( "[debug state=\"%d\" class=\"%d\" ch=%s]\n", state, class, format_printable(temp,sizeof(temp),ch) ); /* handle action */ switch ( c_action[state][class] ) { case 0: /* noop */ break; case 1: /* save char */ if ( size+1 >= capacity ) { /* need to increate buffer to accomodate longer string */ size_t c = capacity << 1; char* t = (char*) malloc(c); memcpy( t, store, size ); free((void*) store ); capacity = c; store = t; } /* put character into buffer, and finish the C-string */ store[size++] = ch; store[size] = 0; break; case 2: /* finalize this argument */ if ( head == NULL && tail == NULL ) { /* initially */ head = tail = (t_node*) malloc( sizeof(t_node) ); } else { /* later */ tail->next = (t_node*) malloc( sizeof(t_node) ); tail = tail->next; } /* copy string so far into data section, and reset string */ tail->data = strdup( store ); tail->next = NULL; size = 0; store[size] = 0; /* counts number of arguments in vector we later must allocate */ argc++; break; default: /* must not happen - FIXME: Complain bitterly */ break; } /* advance state */ state = c_state[state][class]; } /* FIXME: What if state is not 6 ? */ if ( state != 6 ) { showerr( "[syntax-error state=\"%d\" argc=\"%zu\" cmd=\"%s\"]\n", state, argc, cmd ); free((void*) store); return 0; } /* create result vector from list while freeing list */ *argv = (char**) calloc( sizeof(char*), argc+1 ); for ( size=0; head != NULL; ) { (*argv)[size++] = head->data; tail = head; head = head->next; free((void*) tail); } /* finalize argument vector */ (*argv)[size] = NULL; free((void*) store); return argc; } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-cluster/report.h�������������������������������������������0000644�0001750�0001750�00000003363�11757531137�022602� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #ifndef _REPORT_H #define _REPORT_H #include <sys/types.h> #include <time.h> #include <sys/resource.h> extern int find_application( char* argv[] ); /* purpose: find start of argv excluding kickstart * paramtr: argv (IN): invocation argument vector * returns: start of argv. Returns 0 if unsure. */ extern ssize_t report( int progress, double start, double duration , int status, char* argv[], struct rusage* use , const char* special #ifndef MONOTONICALLY_INCREASING , size_t taskid #endif /* MONOTONICALLY_INCREASING */ ); /* purpose: report what has just finished. * paramtr: progress (IN): file description open for writing * start (IN): start time (no millisecond resolution) * duration (IN): duration with millisecond resolution * status (IN): return value from wait() family * argv (IN): NULL-delimited argument vector of app * use (IN): resource usage from wait4() call * special (IN): set for setup/cleanup jobs. * taskid (IN): task number from input file. * returns: number of bytes written onto "progress" */ #endif /* _REPORT_H */ �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/�������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�021437� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/tools.c������������������������������������������0000644�0001750�0001750�00000026644�11757531137�022747� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "tools.h" #include <stdlib.h> #include <stdio.h> #include <string.h> #include <stdarg.h> #include <sys/time.h> #include <sys/types.h> #include <sys/stat.h> #include <unistd.h> static const char* RCS_ID = "$Id: tools.c 307 2007-09-21 18:03:47Z voeckler $"; void full_append( char* buffer, const size_t size, size_t* len, const char* msg, size_t msglen ) /* purpose: append a binary message to the buffer while maintaining length information. * paramtr: buffer (IO): buffer area to put strings into * size (IN): capacity of buffer * len (IO): current end of buffer, updated on return * msg (IN): message to append to buffer * mlen (IN): length of message area to append * returns: nada */ { if ( *len + msglen + 1 < size ) { /* JSV 20070921: msglen may be smaller than strlen(msg) ! */ strncat( buffer + *len, msg, msglen ); *len += msglen; } else { strncat( buffer + *len, msg, size - *len - 1 ); *len += strlen( buffer + *len ); } } #if 0 void append( char* buffer, const size_t size, size_t* len, const char* msg ) /* purpose: append a string to the buffer while maintaining length information. * paramtr: buffer (IO): buffer area to put strings into * size (IN): capacity of buffer * len (IO): current end of buffer, updated on return * msg (IN): message to append to buffer * returns: nada */ { full_append( buffer, size, len, msg, strlen(msg) ); } #endif static const char* iso88591lookup[256] = { "", "", "", "", "", "", "", "", "", "\t", "\n", "", "", "\r", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", " ", "!", """, "#", "$", "%", "&", "'", "(", ")", "*", "+", ",", "-", ".", "/", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", ":", ";", "<", "=", ">", "?", "@", "A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "[", "\\", "]", "^", "_", "`", "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "{", "|", "}", "~", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "", "" }; void xmlquote( char* buffer, const size_t size, size_t* len, const char* msg, size_t msglen ) /* purpose: append a possibly binary message to the buffer while XML * quoting and maintaining buffer length information. * paramtr: buffer (IO): buffer area to put strings into * size (IN): capacity of buffer * len (IO): current end of buffer, updated on return * msg (IN): message to append to buffer * mlen (IN): length of message area to append * returns: nada */ { size_t i, tsize = size-2; for ( i=0; i < msglen; ++i ) { append( buffer, tsize, len, iso88591lookup[ (unsigned char) msg[i] ] ); } } void myprint( char* buffer, const size_t size, size_t* len, const char* fmt, ... ) /* purpose: format a string at the end of a buffer while maintaining length information. * paramtr: buffer (IO): buffer area to put strings into * size (IN): capacity of buffer * len (IO): current end of buffer, updated on return * fmt (IN): printf compatible format * ... (IN): parameters to format * returns: nada */ { va_list ap; va_start( ap, fmt ); vsnprintf( buffer + *len, size - *len, fmt, ap ); *len += strlen(buffer + *len); va_end(ap); } size_t mydatetime( char* buffer, const size_t size, size_t* offset, int isLocal, int isExtended, time_t seconds, long micros ) /* purpose: append an ISO timestamp to a buffer * paramtr: buffer (IO): buffer area to store things into * size (IN): capacity of buffer * offset (IO): current position of end of meaningful buffer * isLocal (IN): flag, if 0 use UTC, otherwise use local time * isExtd (IN): flag, if 0 use concise format, otherwise extended * seconds (IN): tv_sec part of timeval * micros (IN): if negative, don't show micros. * returns: number of characters added */ { char line[32]; size_t len; struct tm zulu; memcpy( &zulu, gmtime(&seconds), sizeof(struct tm) ); if ( isLocal ) { /* local time requires that we state the offset */ int hours, minutes; time_t distance; struct tm local; memcpy( &local, localtime(&seconds), sizeof(struct tm) ); zulu.tm_isdst = local.tm_isdst; distance = seconds - mktime(&zulu); hours = distance / 3600; minutes = abs(distance) % 60; strftime( line, sizeof(line), isExtended ? "%Y-%m-%dT%H:%M:%S" : "%Y%m%dT%H%M%S", &local ); len = strlen(line); if ( micros < 0 ) myprint( line, sizeof(line), &len, "%+03d:%02d", hours, minutes ); else myprint( line, sizeof(line), &len, isExtended ? ".%03ld%+03d:%02d" : ".%03ld%+03d%02d", micros / 1000, hours, minutes ); } else { /* zulu time aka UTC */ strftime( line, sizeof(line), isExtended ? "%Y-%m-%dT%H:%M:%S" : "%Y%m%dT%H%M%S", &zulu ); len = strlen(line); if ( micros < 0 ) append( line, sizeof(line), &len, "Z" ); else myprint( line, sizeof(line), &len, ".%03ldZ", micros / 1000 ); } append( buffer, size, offset, line ); return len; } double mymaketime( struct timeval t ) /* purpose: convert a structured timeval into seconds with fractions. * paramtr: t (IN): a timeval as retured from gettimeofday(). * returns: the number of seconds with microsecond fraction. */ { return ( t.tv_sec + t.tv_usec / 1E6 ); } void now( struct timeval* t ) /* purpose: capture a point in time with microsecond extension * paramtr: t (OUT): where to store the captured time */ { int timeout = 0; t->tv_sec = -1; t->tv_usec = 0; while ( gettimeofday( t, 0 ) == -1 && timeout < 10 ) timeout++; } static int isDir( const char* tmp ) /* purpose: Check that the given dir exists and is writable for us * paramtr: tmp (IN): designates a directory location * returns: true, if tmp exists, isa dir, and writable */ { struct stat st; if ( stat( tmp, &st ) == 0 && S_ISDIR(st.st_mode) ) { /* exists and isa directory */ if ( (geteuid() != st.st_uid || (st.st_mode & S_IWUSR) == 0) && (getegid() != st.st_gid || (st.st_mode & S_IWGRP) == 0) && ((st.st_mode & S_IWOTH) == 0) ) { /* not writable to us */ return 0; } else { /* yes, writable dir for us */ return 1; } } else { /* location does not exist, or is not a directory */ return 0; } } const char* getTempDir( void ) /* purpose: determine a suitable directory for temporary files. * warning: remote schedulers may chose to set a different TMP.. * returns: a string with a temporary directory, may still be NULL. */ { char* tempdir = getenv("GRIDSTART_TMP"); if ( tempdir != NULL && isDir(tempdir) ) return tempdir; tempdir = getenv("TMP"); if ( tempdir != NULL && isDir(tempdir) ) return tempdir; tempdir = getenv("TEMP"); if ( tempdir != NULL && isDir(tempdir) ) return tempdir; tempdir = getenv("TMPDIR"); if ( tempdir != NULL && isDir(tempdir) ) return tempdir; #ifdef P_tmpdir /* in stdio.h */ tempdir = P_tmpdir; if ( tempdir != NULL && isDir(tempdir) ) return tempdir; #endif tempdir = "/tmp"; if ( isDir(tempdir) ) return tempdir; tempdir = "/var/tmp"; if ( isDir(tempdir) ) return tempdir; /* whatever we have by now is it - may still be NULL */ return tempdir; } char* sizer( char* buffer, size_t capacity, size_t vsize, const void* value ) /* purpose: format an unsigned integer of less-known size. Note that * 64bit ints on 32bit systems need %llu, but 64/64 uses %lu * paramtr: buffer (IO): area to output into * capacity (IN): extent of the buffer to store things into * vsize (IN): size of the value * value (IN): value to format * returns: buffer */ { switch ( vsize ) { case 2: snprintf( buffer, capacity, "%hu", *((const short unsigned*) value) ); break; case 4: if ( sizeof(long) == 4 ) snprintf( buffer, capacity, "%lu", *((const long unsigned*) value) ); else snprintf( buffer, capacity, "%u", *((const unsigned*) value) ); break; case 8: if ( sizeof(long) == 4 ) { snprintf( buffer, capacity, "%llu", *((const long long unsigned*) value) ); } else { snprintf( buffer, capacity, "%lu", *((const long unsigned*) value) ); } break; default: snprintf( buffer, capacity, "unknown" ); break; } return buffer; } ��������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/meminfo.h����������������������������������������0000644�0001750�0001750�00000005323�11757531137�023235� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MEMINFO_H #define _MEMINFO_H #include <sys/types.h> #include <time.h> #include <sys/types.h> #include <sys/time.h> typedef struct { /* private */ struct timeval stamp; /* point of time that app was started */ unsigned long n; /* running average helper */ /* the following info is in pages */ unsigned long size; /* memory (virtual) size */ unsigned long rss; /* resident set (present) size */ unsigned long share; /* amount of sharable memory */ unsigned long trs; /* executable (and lib) size */ unsigned long lrs; /* local (usually zero) size */ unsigned long drs; /* data PLUS stack size */ unsigned long dirty; /* dirty (need to write) pages */ } MemInfo; extern void initMemInfo( MemInfo* mem, pid_t pid ); /* purpose: initialize the data structure from process status * paramtr: mem (OUT): initialized memory block * pid (IN): process id to use for initialization. */ extern void maxMemInfo( MemInfo* max, const MemInfo* add ); /* purpose: keeps the maximum found for the memory info. * paramtr: max (IO): adjusted to the maximum * add (IN): look for maxima here */ extern void avgMemInfo( MemInfo* avg, const MemInfo* add ); /* purpose: keeps a running average * paramtr: max (IO): keeping the running average * avg (IN): new values to add to average */ extern int printXMLMemInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MemInfo* mem ); /* purpose: format the status information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * mem (IN): job status info to xml format. * returns: number of characters put into buffer (buffer length) */ extern void deleteMemInfo( MemInfo* mem ); /* purpose: destructor * paramtr: mem (IO): valid MemInfo structure to destroy. */ #endif /* _MEMINFO_H */ �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/parse.h������������������������������������������0000644�0001750�0001750�00000004476�11757531137�022725� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _PARSE_H #define _PARSE_H typedef struct _Node { const char* data; struct _Node* next; } Node; extern size_t countNodes( const Node* head ); /* purpose: count the number of element in list * paramtr: head (IN): start of the list. * returns: number of elements in list. */ extern void deleteNodes( Node* head ); /* purpose: clean up the created list and free its memory. * paramtr: head (IO): start of the list. */ extern Node* parseCommandLine( const char* line, int* state ); /* purpose: parse a commandline into a list of arguments while * obeying single quotes, double quotes and replacing * environment variable names. * paramtr: line (IN): commandline to parse * state (IO): start state to begin, final state on exit * state==32 is ok, state>32 is an error condition which * lead to a premature exit in parsing. * returns: A (partial on error) list of split arguments. */ extern Node* parseArgVector( int argc, char* const* argv, int* state ); /* purpose: parse an already split commandline into a list of arguments while * ONLY translating environment variable names that are not prohibited * from translation by some form of quoting (not double quotes, though). * paramtr: argc (IN): number of arguments in the argument vector * argv (IN): argument vector to parse * state (IO): start state to begin, final state on exit * state==32 is ok, state>32 is an error condition which * lead to a premature exit in parsing. * returns: A (partial on error) list of split arguments. The argument number * stays the same, but environment variables were translated. */ #endif /* _PARSE_H */ ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/useinfo.h����������������������������������������0000644�0001750�0001750�00000003400�11757531137�023245� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _USEINFO_H #define _USEINFO_H #include <sys/types.h> #include <sys/resource.h> #ifdef SOLARIS #ifndef HAS_USAGE_MEM #define HAS_USAGE_MEM 1 #endif #ifndef HAS_USAGE_IO #define HAS_USAGE_IO 1 #endif #ifndef HAS_USAGE_MSG #define HAS_USAGE_MSG 1 #endif #endif /* SOLARIS */ extern int printXMLUseInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* id, const struct rusage* use ); /* purpose: format the rusage record into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * id (IN): object identifier to use as element tag name. * use (IN): struct rusage info * returns: number of characters put into buffer (buffer length) */ extern void addUseInfo( struct rusage* sum, const struct rusage* summand ); /* purpose: add a given rusage record to an existing one * paramtr: sum (IO): initialized rusage record to add to * summand (IN): values to add to * returns: sum += summand; */ #endif /* _USEINFO_H */ ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/fail.c�������������������������������������������0000644�0001750�0001750�00000001760�11757531137�022512� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <sys/types.h> #include <signal.h> #include <unistd.h> #include <stdlib.h> static const char* RCS_ID = "$Id: fail.c 50 2007-05-19 00:48:32Z gmehta $"; int main( int argc, char* argv[] ) { /* no args, die right now */ if ( argc < 2 ) return 1; /* send myself the given signal */ kill( getpid(), atoi(argv[1]) ); /* for all ignored signals */ return 127; } ����������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/zio.h��������������������������������������������0000644�0001750�0001750�00000002402�11757531137�022377� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _ZIO_H #define _ZIO_H #include <sys/types.h> #ifndef GZIP_PATH #define GZIP_PATH "/bin/gzip" #endif /* GZIP_PATH */ int zopen( const char* pathname, int flags, mode_t mode ); /* purpose: open a file, but put gzip into the io-path * paramtr: pathname (IN): file to read or create * flags (IN): if O_RDONLY, use gunzip on file * if O_WRONLY, use gzip on file * mode (IN): file mode, see open(2) * returns: -1 in case of error, or an open file descriptor */ int zclose( int fd ); /* purpose: close a file that has a gzip in its io path * returns: process status from gzip */ #endif /* _ZIO_H */ ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/statinfo.c���������������������������������������0000644�0001750�0001750�00000054261�11757531137�023432� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <sys/param.h> #include <limits.h> #include <errno.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <unistd.h> #include <fcntl.h> #include <grp.h> #include <pwd.h> #include "mynss.h" #include "debug.h" #include "statinfo.h" #include "tools.h" static const char* RCS_ID = "$Id: statinfo.c 4535 2011-09-26 22:14:19Z voeckler $"; extern int isExtended; /* timestamp format concise or extended */ extern int isLocal; /* timestamp time zone, UTC or local */ int make_application_executable = 0; size_t data_section_size = 262144ul; int myaccess( const char* path ) /* purpose: check a given file for being accessible and executable * under the currently effective user and group id. * paramtr: path (IN): current path to check * globals: make_application_executable (IN): if true, chmod to exec * returns: 0 if the file is accessible, -1 for not */ { /* sanity check */ if ( path && *path ) { struct stat st; if ( stat(path,&st) == 0 && S_ISREG(st.st_mode) ) { /* stat on file succeeded, and it is a regular file */ if ( make_application_executable ) { mode_t mode = st.st_mode; if ( st.st_uid == geteuid() ) mode |= (S_IXUSR | S_IRUSR); if ( st.st_gid == getegid() ) mode |= (S_IXGRP | S_IRGRP); mode |= (S_IXOTH | S_IROTH); chmod( path, mode ); /* update stat record */ if ( stat(path,&st) != 0 || ! S_ISREG(st.st_mode) ) { /* restat'ing the file now failed */ return -1; } } if ( ( st.st_uid == geteuid() && (S_IXUSR & st.st_mode) == S_IXUSR ) || ( st.st_gid == getegid() && (S_IXGRP & st.st_mode) == S_IXGRP ) || ( (S_IXOTH & st.st_mode) == S_IXOTH ) ) { /* all is well, app is executable and accessible */ return 0; } else { return -1; } } else { /* stat call failed, or file is not a regular file */ return -1; } } else { /* illegal filename string (empty or NULL) */ return -1; } } char* findApp( const char* fn ) /* purpose: check the executable filename and correct it if necessary * paramtr: fn (IN): current knowledge of filename * returns: newly allocated fqpn of path to exectuble, or NULL if not found */ { char* s, *path, *t = NULL; /* sanity check */ if ( fn == NULL || *fn == '\0' ) return NULL; /* don't touch absolute paths */ if ( *fn == '/' ) { if ( myaccess(fn) == 0 ) return strdup(fn); else return NULL; } /* try from CWD */ if ( myaccess(fn) == 0 ) return strdup(fn); /* continue only if there is a PATH to check */ if ( (s=getenv("PATH")) == NULL ) return NULL; else path = strdup(s); /* tokenize to compare */ for ( s=strtok(path,":"); s; s=strtok(NULL,":") ) { size_t len = strlen(fn) + strlen(s) + 2; t = (char*) malloc(len); strncpy( t, s, len ); strncat( t, "/", len ); strncat( t, fn, len ); if ( myaccess(t) == 0 ) break; else { free((void*) t); t = NULL; } } /* some or no matches found */ free((void*) path); return t; } int forcefd( const StatInfo* info, int fd ) /* purpose: force open a file on a certain fd * paramtr: info (IN): is the StatInfo of the file to connect to (fn or fd) * the mode for potential open() is determined from this, too. * fd (IN): is the file descriptor to plug onto. If this fd is * the same as the descriptor in info, nothing will be done. * returns: 0 if all is well, or fn was NULL or empty. * 1 if opening a filename failed, * 2 if dup2 call failed */ { /* is this a regular file with name, or is this a descriptor to copy from? */ int isHandle = ( info->source == IS_HANDLE || info->source == IS_TEMP ); int mode = info->file.descriptor; /* openmode for IS_FILE */ /* initialize the newHandle variable by opening regular files, or copying the fd */ int newfd = isHandle ? info->file.descriptor : ( ( (mode & O_ACCMODE) == O_RDONLY ) ? open( info->file.name, mode ) : /* FIXME: as long as stdout/stderr is shared between jobs, * we must always use append mode. Truncation happens during * initialization of the shared stdio. */ open( info->file.name, mode | O_APPEND, 0666 ) ); /* this should only fail in the open() case */ if ( newfd == -1 ) return 1; /* create a duplicate of the new fd onto the given (stdio) fd. This operation * is guaranteed to close the given (stdio) fd first, if open. */ if ( newfd != fd ) { /* FIXME: Does dup2 guarantee noop for newfd==fd on all platforms ? */ if ( dup2( newfd, fd ) == -1 ) return 2; } /* if we opened a file, we need to close it again. */ if ( ! isHandle ) close(newfd); return 0; } int initStatInfoAsTemp( StatInfo* statinfo, char* pattern ) /* purpose: Initialize a stat info buffer with a temporary file * paramtr: statinfo (OUT): the newly initialized buffer * pattern (IO): is the input pattern to mkstemp(), will be modified! * returns: a value of -1 indicates an error */ { int result = mkstemp(pattern); memset( statinfo, 0, sizeof(StatInfo) ); if ( result == -1 ) { /* mkstemp has failed, au weia! */ statinfo->source = IS_INVALID; statinfo->error = errno; } else { /* try to ensure append mode for the file, because it is shared * between jobs. If the SETFL operation fails, well there is nothing * we can do about that. */ int flags = fcntl( result, F_GETFL ); if ( flags != -1 ) fcntl( result, F_SETFL, flags | O_APPEND ); /* this file descriptor is NOT to be passed to the jobs? So far, the * answer is true. We close this fd on exec of sub jobs, so it will * be invisible to them. */ flags = fcntl( result, F_GETFD ); if ( flags != -1 ) fcntl( result, F_SETFD, flags | FD_CLOEXEC ); /* the return is the chosen filename as well as the opened descriptor. * we *could* unlink the filename right now, and be truly private, but * users may want to look into the log files of long persisting operations. */ statinfo->source = IS_TEMP; statinfo->file.descriptor = result; statinfo->file.name = strdup(pattern); errno = 0; result = fstat( result, &statinfo->info ); statinfo->error = errno; } return result; } int initStatInfoAsFifo( StatInfo* statinfo, char* pattern, const char* key ) /* purpose: Initialize a stat info buffer associated with a named pipe * paramtr: statinfo (OUT): the newly initialized buffer * pattern (IO): is the input pattern to mkstemp(), will be modified! * key (IN): is the environment key at which to store the filename * returns: a value of -1 indicates an error */ { int result = -1; char* race = strdup(pattern); memset( statinfo, 0, sizeof(StatInfo) ); RETRY: if ( (result = mkstemp(pattern)) == -1 ) { /* mkstemp has failed, au weia! */ statinfo->source = IS_INVALID; statinfo->error = errno; debugmsg( "Warning! Invalid FIFO: mkstemp failed: %d: %s\n", errno, strerror(errno) ); } else { /* create a FIFO instead of a regular tmp file. */ /* we could have used mktemp() right away, mkstemp() is NOT safer here! */ close( result ); unlink( pattern ); if ( (result = mkfifo( pattern, 0660 )) == -1 ) { if ( errno == EEXIST ) { /* filename was taken, restore pattern and retry */ strcpy( pattern, race ); goto RETRY; } else { /* other errors are treated as more fatal */ statinfo->source = IS_INVALID; statinfo->error = errno; debugmsg( "Warning! Invalid FIFO: mkfifo failed: %d: %s\n", errno, strerror(errno) ); } } else { /* open in non-blocking mode for reading. * WARNING: DO NOT open in O_RDONLY or suffer the consequences. * You must open in O_RDWR to avoid having to deal with EOF * whenever the clients drop to zero. */ if ( (result = open( pattern, O_RDWR | O_NONBLOCK )) == -1 ) { statinfo->source = IS_INVALID; statinfo->error = errno; debugmsg( "Warning! Invalid FIFO: open failed: %d: %s\n", errno, strerror(errno) ); } else { /* this file descriptor is NOT to be passed to the jobs? So far, * the answer is true. We close this fd on exec of sub jobs, so * it will be invisible to them. */ int flags = fcntl( result, F_GETFD ); if ( flags != -1 ) fcntl( result, F_SETFD, flags | FD_CLOEXEC ); /* the return is the chosen filename as well as the opened * descriptor. We cannot unlink the filename right now. */ statinfo->source = IS_FIFO; statinfo->file.descriptor = result; statinfo->file.name = strdup(pattern); /* fix environment */ if ( key != NULL ) { size_t size = strlen(key) + strlen(pattern) + 2; char* temp = (char*) malloc(size); memset( temp, 0, size-- ); strncpy( temp, key, size ); strncat( temp, "=", size ); strncat( temp, pattern, size ); if ( putenv( temp ) == -1 ) debugmsg( "Warning: Unable to putenv %s: %d: %s\n", key, errno, strerror(errno) ); /* do not free this string here nor now */ } errno = 0; result = fstat( result, &statinfo->info ); statinfo->error = errno; } } } free((void*) race); return result; } static int preserveFile( const char* fn ) /* purpose: preserve the given file by renaming it with a backup extension. * paramtr: fn (IN): name of the file * returns: 0: ok; -1: error, check errno */ { int i, fd = open( fn, O_RDONLY ); if ( fd != -1 ) { /* file exists, do something */ size_t size = strlen(fn)+8; char* newfn = malloc(size); close(fd); strncpy( newfn, fn, size ); for ( i=0; i<1000; ++i ) { snprintf( newfn + size-8, 8, ".%03d", i ); if ( (fd = open( newfn, O_RDONLY )) == -1 ) { if ( errno == ENOENT ) break; else return -1; } close(fd); } if ( i < 1000 ) { return rename( fn, newfn ); } else { /* too many backups */ errno = EEXIST; return -1; } } else { /* file does not exist, nothing to backup */ errno = 0; return 0; } } int initStatInfoFromName( StatInfo* statinfo, const char* filename, int openmode, int flag ) /* purpose: Initialize a stat info buffer with a filename to point to * paramtr: statinfo (OUT): the newly initialized buffer * filename (IN): the filename to memorize (deep copy) * openmode (IN): are the fcntl O_* flags to later open calls * flag (IN): bit#0 truncate: whether to reset the file size to zero * bit#1 defer op: whether to defer opening the file for now * bit#2 preserve: whether to backup existing target file * returns: the result of the stat() system call on the provided file */ { int result = -1; memset( statinfo, 0, sizeof(StatInfo) ); statinfo->source = IS_FILE; statinfo->file.descriptor = openmode; statinfo->file.name = strdup(filename); if ( (flag & 0x01) == 1 ) { /* FIXME: As long as we use shared stdio for stdout and stderr, we need * to explicitely truncate (and create) file to zero, if not appending. */ if ( (flag & 0x02) == 0 ) { int fd; if ( (flag & 0x04) == 4 ) preserveFile( filename ); fd = open( filename, (openmode & O_ACCMODE) | O_CREAT | O_TRUNC, 0666 ); if ( fd != -1 ) close(fd); } else { statinfo->deferred = 1 | (flag & 0x04); } } /* POST-CONDITION: statinfo->deferred == 1, iff (flag & 3) == 3 */ errno = 0; result = stat( filename, &statinfo->info ); statinfo->error = errno; /* special case, read the start of file (for magic) */ if ( (flag & 0x02) == 0 && result != -1 && S_ISREG(statinfo->info.st_mode) && statinfo->info.st_size > 0 ) { int fd = open( filename, O_RDONLY ); if ( fd != -1 ) { read( fd, (char*) statinfo->client.header, sizeof(statinfo->client.header) ); close(fd); } } return result; } int updateStatInfo( StatInfo* statinfo ) /* purpose: update existing and initialized statinfo with latest info * paramtr: statinfo (IO): stat info pointer to update * returns: the result of the stat() or fstat() system call. */ { int result = -1; if ( statinfo->source == IS_FILE && (statinfo->deferred & 1) == 1 ) { /* FIXME: As long as we use shared stdio for stdout and stderr, we need * to explicitely truncate (and create) file to zero, if not appending. */ int fd; if ( (statinfo->deferred & 4) == 4 ) preserveFile( statinfo->file.name ); fd = open( statinfo->file.name, (statinfo->file.descriptor & O_ACCMODE) | O_CREAT | O_TRUNC, 0666 ); if ( fd != -1 ) close(fd); /* once only */ statinfo->deferred &= ~1; /* remove deferred bit */ statinfo->deferred |= 2; /* mark as having gone here */ } if ( statinfo->source == IS_FILE || statinfo->source == IS_HANDLE || statinfo->source == IS_TEMP || statinfo->source == IS_FIFO ) { errno = 0; result = statinfo->source == IS_FILE ? stat( statinfo->file.name, &(statinfo->info) ) : fstat( statinfo->file.descriptor, &(statinfo->info) ); statinfo->error = errno; if ( result != -1 && statinfo->source == IS_FILE && S_ISREG(statinfo->info.st_mode) && statinfo->info.st_size > 0 ) { int fd = open( statinfo->file.name, O_RDONLY ); if ( fd != -1 ) { read( fd, (char*) statinfo->client.header, sizeof(statinfo->client.header) ); close(fd); } } } return result; } int initStatInfoFromHandle( StatInfo* statinfo, int descriptor ) /* purpose: Initialize a stat info buffer with a filename to point to * paramtr: statinfo (OUT): the newly initialized buffer * descriptor (IN): the handle to attach to * returns: the result of the fstat() system call on the provided handle */ { int result = -1; memset( statinfo, 0, sizeof(StatInfo) ); statinfo->source = IS_HANDLE; statinfo->file.descriptor = descriptor; errno = 0; result = fstat( descriptor, &statinfo->info ); statinfo->error = errno; return result; } int addLFNToStatInfo( StatInfo* info, const char* lfn ) /* purpose: optionally replaces the LFN field with the specified LFN * paramtr: info (IO): stat info pointer to update * lfn (IN): LFN to store, use NULL to free * returns: errno in case of error, 0 if OK. */ { /* sanity check */ if ( info->source == IS_INVALID ) return EINVAL; if ( info->lfn != NULL ) free((void*) info->lfn ); if ( lfn == NULL ) info->lfn = NULL; else if ( (info->lfn = strdup(lfn)) == NULL ) return ENOMEM; return 0; } size_t printXMLStatInfo( char* buffer, const size_t size, size_t* len, size_t indent, const char* tag, const char* id, const StatInfo* info ) /* purpose: XML format a stat info record into a given buffer * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level of tag * tag (IN): name of element to generate * id (IN): id attribute, use NULL to not generate * info (IN): stat info to print. * returns: number of characters put into buffer (buffer length) */ { char* real = NULL; /* sanity check */ if ( info->source == IS_INVALID ) return *len; /* start main tag */ myprint( buffer, size, len, "%*s<%s error=\"%d\"", indent, "", tag, info->error ); if ( id != NULL ) myprint( buffer, size, len, " id=\"%s\"", id ); if ( info->lfn != NULL ) myprint( buffer, size, len, " lfn=\"%s\"", info->lfn ); append( buffer, size, len, ">\n" ); /* NEW: ignore "file not found" error for "gridstart" */ if ( id != NULL && info->error == 2 && strcmp( id, "gridstart" ) == 0 ) myprint( buffer, size, len, "%*s<!-- ignore above error -->\n", indent+2, "" ); /* either a <name> or <descriptor> sub element */ switch ( info->source ) { case IS_TEMP: /* preparation for <temporary> element */ /* late update for temp files */ errno = 0; if ( fstat( info->file.descriptor, (struct stat*) &info->info ) != -1 && ( ((StatInfo*) info)->error = errno) == 0 ) { /* obtain header of file */ #if 0 /* implementation alternative 1: use a new filetable kernel structure */ int fd = open( info->file.name, O_RDONLY ); if ( fd != -1 ) { read( fd, (char*) info->client.header, sizeof(info->client.header) ); close(fd); } #else /* implementation alternative 2: share the kernel filetable structure */ int fd = dup( info->file.descriptor ); if ( fd != -1 ) { if ( lseek( fd, 0, SEEK_SET ) != -1 ) read( fd, (char*) info->client.header, sizeof(info->client.header) ); close(fd); } #endif } myprint( buffer, size, len, "%*s<temporary name=\"%s\" descriptor=\"%d\"/>\n", indent+2, "", info->file.name, info->file.descriptor ); break; case IS_FIFO: /* <fifo> element */ myprint( buffer, size, len, "%*s<fifo name=\"%s\" descriptor=\"%d\" count=\"%u\" rsize=\"%u\" wsize=\"%u\"/>\n", indent+2, "", info->file.name, info->file.descriptor, info->client.fifo.count, info->client.fifo.rsize, info->client.fifo.wsize ); break; case IS_FILE: /* <file> element */ #if 0 /* some debug info - for now */ myprint( buffer, size, len, "%*s<!-- deferred flag: %d -->\n", indent+2, "", info->deferred ); #endif #ifdef HAS_REALPATH_EXT real = realpath( info->file.name, NULL ); #endif /* HAS_REALPATH_EXT */ myprint( buffer, size, len, "%*s<file name=\"%s\"", indent+2, "", real ? real : info->file.name ); #ifdef HAS_REALPATH_EXT if ( real ) free((void*) real); #endif /* HAS_REALPATH_EXT */ if ( info->error == 0 && S_ISREG(info->info.st_mode) && info->info.st_size > 0 ) { /* optional hex information */ size_t i, end = sizeof(info->client.header); if ( info->info.st_size < end ) end = info->info.st_size; append( buffer, size, len, ">" ); for ( i=0; i<end; ++i ) myprint( buffer, size, len, "%02X", info->client.header[i] ); append( buffer, size, len, "</file>\n" ); } else { append( buffer, size, len, "/>\n" ); } break; case IS_HANDLE: /* <descriptor> element */ myprint( buffer, size, len, "%*s<descriptor number=\"%u\"/>\n", indent+2, "", info->file.descriptor ); break; default: /* this must not happen! */ myprint( buffer, size, len, "%*s<!-- ERROR: No valid file info available -->\n", indent+2, "" ); break; } if ( info->error == 0 && info->source != IS_INVALID ) { /* <stat> subrecord */ char my[32]; struct passwd* user = wrap_getpwuid( info->info.st_uid ); struct group* group = wrap_getgrgid( info->info.st_gid ); myprint( buffer, size, len, "%*s<statinfo mode=\"0%o\"", indent+2, "", info->info.st_mode ); /* Grmblftz, are we in 32bit, 64bit LFS on 32bit, or 64bit on 64 */ sizer( my, sizeof(my), sizeof(info->info.st_size), &info->info.st_size ); myprint( buffer, size, len, " size=\"%s\"", my ); sizer( my, sizeof(my), sizeof(info->info.st_ino), &info->info.st_ino ); myprint( buffer, size, len, " inode=\"%s\"", my ); sizer( my, sizeof(my), sizeof(info->info.st_nlink), &info->info.st_nlink ); myprint( buffer, size, len, " nlink=\"%s\"", my ); sizer( my, sizeof(my), sizeof(info->info.st_blksize), &info->info.st_blksize ); myprint( buffer, size, len, " blksize=\"%s\"", my ); /* st_blocks is new in iv-1.8 */ sizer( my, sizeof(my), sizeof(info->info.st_blocks), &info->info.st_blocks ); myprint( buffer, size, len, " blocks=\"%s\"", my ); append( buffer, size, len, " mtime=\"" ); mydatetime( buffer, size, len, isLocal, isExtended, info->info.st_mtime, -1 ); append( buffer, size, len, "\" atime=\"" ); mydatetime( buffer, size, len, isLocal, isExtended, info->info.st_atime, -1 ); append( buffer, size, len, "\" ctime=\"" ); mydatetime( buffer, size, len, isLocal, isExtended, info->info.st_ctime, -1 ); myprint( buffer, size, len, "\" uid=\"%lu\"", info->info.st_uid ); if ( user ) myprint( buffer, size, len, " user=\"%s\"", user->pw_name ); myprint( buffer, size, len, " gid=\"%lu\"", info->info.st_gid ); if ( group ) myprint( buffer, size, len, " group=\"%s\"", group->gr_name ); append( buffer, size, len, "/>\n" ); } /* data section from stdout and stderr of application */ if ( info->source == IS_TEMP && info->error == 0 && info->info.st_size && data_section_size > 0 ) { size_t dsize = data_section_size; size_t fsize = info->info.st_size; myprint( buffer, size, len, "%*s<data%s", indent+2, "", ( fsize > dsize ? " truncated=\"true\"" : "" ) ); if ( fsize > 0 ) { char* data = (char*) malloc(dsize+1); int fd = dup(info->file.descriptor); append( buffer, size, len, ">" ); if ( fd != -1 ) { if ( lseek( fd, SEEK_SET, 0 ) != -1 ) { ssize_t rsize = read( fd, data, dsize ); xmlquote( buffer, size, len, data, rsize ); } close(fd); } append( buffer, size, len, "</data>\n" ); free((void*) data); } else { append( buffer, size, len, "/>\n" ); } } myprint( buffer, size, len, "%*s</%s>\n", indent, "", tag ); return *len; } void deleteStatInfo( StatInfo* statinfo ) /* purpose: clean up and invalidates structure after being done. * paramtr: statinfo (IO): clean up record. */ { #ifdef EXTRA_DEBUG debugmsg( "# deleteStatInfo(%p)\n", statinfo ); #endif if ( statinfo->source == IS_FILE || statinfo->source == IS_TEMP || statinfo->source == IS_FIFO ) { if ( statinfo->source == IS_TEMP || statinfo->source == IS_FIFO ) { close( statinfo->file.descriptor ); unlink( statinfo->file.name ); } if ( statinfo->file.name ) { free( (void*) statinfo->file.name ); statinfo->file.name = NULL; /* avoid double free */ } } /* invalidate */ statinfo->source = IS_INVALID; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/testme�������������������������������������������0000755�0001750�0001750�00000000033�11757531137�022652� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh echo test exit 0 �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/getif.h������������������������������������������0000644�0001750�0001750�00000003555�11757531137�022706� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _GETIF_H #define _GETIF_H #include <sys/types.h> #include <arpa/inet.h> #include <netinet/in.h> #include <sys/socket.h> #include <net/if.h> extern int getif_debug; /* enable debugging code paths */ extern int interface_list( struct ifconf* ifc ); /* purpose: returns the list of interfaces * paramtr: ifc (IO): initializes structure with buffer and length * returns: sockfd for further queries, or -1 to indicate an error. * warning: caller must free memory in ifc.ifc_buf * caller must close sockfd (result value) */ extern struct ifreq* primary_interface(); /* purpose: obtain the primary interface information * returns: a newly-allocated structure containing the interface info, * or NULL to indicate an error. */ extern void whoami( char* abuffer, size_t asize, char* ibuffer, size_t isize ); /* purpose: copy the primary interface's IPv4 dotted quad into the given buffer * paramtr: abuffer (OUT): start of buffer to put IPv4 dotted quad * asize (IN): maximum capacity the abuffer is willing to accept * ibuffer (OUT): start of buffer to put the primary if name * isize (IN): maximum capacity the ibuffer is willing to accept * returns: the modified buffers. */ #endif /* _GETIF_H */ ���������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine.h����������������������������������������0000644�0001750�0001750�00000003532�11757531137�023207� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MACHINE_H #define _MACHINE_H #include <sys/types.h> typedef struct { /* virtual method table */ void* (*ctor)( void ); int (*show)( char*, size_t, size_t*, size_t, const char*, const void* ); void (*dtor)( void* ); /* mutable object data */ void* data; } MachineInfo; extern void initMachineInfo( MachineInfo* machine ); /* purpose: initialize the data structure. * paramtr: machine (OUT): initialized MachineInfo structure. */ extern int printXMLMachineInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MachineInfo* machine ); /* purpose: format the job information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * machine (IN): machine info to print. * returns: number of characters put into buffer (buffer length) */ extern void deleteMachineInfo( MachineInfo* machine ); /* purpose: destructor * paramtr: machine (IO): valid MachineInfo structure to destroy. */ #endif /* _MACHINE_H */ ����������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/getif.c������������������������������������������0000644�0001750�0001750�00000023332�11757531137�022674� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "debug.h" #include "getif.h" #include <errno.h> #include <string.h> #include <fcntl.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <unistd.h> #include <sys/utsname.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #include <sys/ioctl.h> #ifdef HAS_SYS_SOCKIO #include <sys/sockio.h> #endif static const char* RCS_ID = "$Id: getif.c 2218 2010-07-28 18:43:17Z voeckler $"; int getif_debug = 0; /* enable debugging code paths */ static unsigned long vpn_network[6] = { 0, 0, 0, 0, 0 }; static unsigned long vpn_netmask[6] = { 0, 0, 0, 0, 0 }; static void singleton_init( void ) { /* singleton init */ if ( vpn_network[0] == 0ul ) { vpn_network[0] = inet_addr("127.0.0.0"); /* loopbacknet */ vpn_network[1] = inet_addr("10.0.0.0"); /* class A VPN net */ vpn_network[2] = inet_addr("172.16.0.0"); /* class B VPN nets */ vpn_network[3] = inet_addr("192.168.0.0"); /* class C VPN nets */ vpn_network[4] = inet_addr("169.254.0.0"); /* link-local junk */ vpn_network[5] = inet_addr("0.0.0.0"); /* no address */ } /* singleton init */ if ( vpn_netmask[0] == 0ul ) { vpn_netmask[0] = inet_addr("255.0.0.0"); /* loopbackmask */ vpn_netmask[1] = inet_addr("255.0.0.0"); /* class A mask */ vpn_netmask[2] = inet_addr("255.240.0.0"); /* class B VPN mask */ vpn_netmask[3] = inet_addr("255.255.0.0"); /* class C VPN mask */ vpn_netmask[4] = inet_addr("255.254.0.0"); /* link-local junk */ vpn_netmask[5] = inet_addr("255.255.255.255"); /* no mask */ } } int interface_list( struct ifconf* ifc ) /* purpose: returns the list of interfaces * paramtr: ifc (IO): initializes structure with buffer and length * returns: sockfd for further queries, or -1 to indicate an error. * warning: caller must free memory in ifc.ifc_buf * caller must close sockfd (result value) */ { #if defined(SIOCGLIFNUM) struct lifnum ifnr; #endif char *buf = 0; int lastlen, len, sockfd = 0; /* create a socket */ if ( (sockfd = socket( AF_INET, SOCK_DGRAM, 0 )) == -1 ) { int saverr = errno; debugmsg( "ERROR: socket DGRAM: %d: %s\n", errno, strerror(errno) ); errno = saverr; return -1; } /* * phase 1: guestimate size of buffer necessary to contain all interface * information records. */ #if defined(SIOCGLIFNUM) /* API exists to determine the correct buffer size */ if ( getif_debug ) debugmsg( "DEBUG: SIOCGLIFNUM ioctl supported\n" ); memset( &ifnr, 0, sizeof(ifnr) ); ifnr.lifn_family = AF_INET; if ( ioctl( sockfd, SIOCGLIFNUM, &ifnr ) < 0 ) { debugmsg( "ERROR: ioctl SIOCGLIFNUM: %d: %s\n", errno, strerror(errno) ); if ( errno != EINVAL ) { int saverr = errno; close(sockfd); errno = saverr; return -1; } } else { len = lastlen = ifnr.lifn_count * sizeof(struct ifreq); } #else /* does not have SIOCGLIFNUM */ /* determine by repetitive guessing a buffer size */ if ( getif_debug ) debugmsg( "DEBUG: SIOCGLIFNUM ioctl *not* supported\n" ); lastlen = len = 3.5 * sizeof(struct ifreq); /* 1st guesstimate */ #endif /* POST CONDITION: some buffer size determined */ /* FIXME: Missing upper bound */ for (;;) { /* guestimate correct buffer length */ if ( getif_debug ) debugmsg( "DEBUG: lastlen=%d, len=%d\n", lastlen, len ); buf = (char*) malloc(len); memset( buf, 0, len ); ifc->ifc_len = len; ifc->ifc_buf = buf; if ( ioctl( sockfd, SIOCGIFCONF, ifc ) < 0 ) { debugmsg( "WARN: ioctl SIOCGIFCONF: %d: %s\n", errno, strerror(errno) ); if ( errno != EINVAL || lastlen != 0 ) { int saverr = errno; close(sockfd); errno = saverr; return -1; } } else { if ( ifc->ifc_len == lastlen ) break; /* success */ if ( getif_debug ) debugmsg( "DEBUG: size mismatch, next round\n" ); lastlen = ifc->ifc_len; } len <<= 1; free((void*) buf); } /* POST CONDITION: Now the buffer contains list of all interfaces */ if ( getif_debug ) { debugmsg( "DEBUG: correct buffer length %d\n", ifc->ifc_len ); hexdump( ifc->ifc_buf, ifc->ifc_len ); } return sockfd; } struct ifreq* primary_interface( void ) /* purpose: obtain the primary interface information * returns: a newly-allocated structure containing the interface info, * or NULL to indicate an error. */ { struct sockaddr_in sa; struct ifconf ifc; struct ifreq result, primary; struct ifreq* ifrcopy = NULL; char *ptr; int sockfd, flag = 0; /* * phase 0: init */ memset( &result, 0, sizeof(result) ); memset( &primary, 0, sizeof(primary) ); singleton_init(); /* * phase 1: obtain list of interfaces */ if ( (sockfd=interface_list( &ifc )) == -1 ) return NULL; /* * phase 2: walk interface list until a good interface is reached */ /* Notice: recycle meaning of "len" in here */ for ( ptr = ifc.ifc_buf; ptr < ifc.ifc_buf + ifc.ifc_len; ) { struct ifreq* ifr = (struct ifreq*) ptr; #ifndef _SIZEOF_ADDR_IFREQ size_t len = sizeof(*ifr); #if 0 /* Linux does not support (struct sockaddr).sa_len */ size_t len = sizeof(ifr->ifr_name) + ( ifr->ifr_addr.sa_len > sizeof(struct sockaddr) ? ifr->ifr_addr.sa_len : sizeof(struct sockaddr) ); #endif #else size_t len = _SIZEOF_ADDR_IFREQ(*ifr); #endif /* _SIZEOF_ADDR_IFREQ */ if ( getif_debug ) debugmsg( "DEBUG: stepping by %d\n", len ); ptr += len; /* report current entry's interface name */ if ( getif_debug ) debugmsg( "DEBUG: interface %s\n", ifr->ifr_name ); /* interested in IPv4 interfaces only */ if ( ifr->ifr_addr.sa_family != AF_INET ) { if ( getif_debug ) debugmsg( "DEBUG: interface %s has wrong family, skipping\n", ifr->ifr_name ); continue; } memcpy( &sa, &(ifr->ifr_addr), sizeof(struct sockaddr_in) ); if ( getif_debug ) debugmsg( "DEBUG: address %s\n", inet_ntoa(sa.sin_addr) ); /* Do not use localhost aka loopback interfaces. While loopback * interfaces traditionally start with "lo", this is not mandatory. * It is safer to check that the address is in the 127.0.0.0 class A * network. */ if ( (sa.sin_addr.s_addr & vpn_netmask[0]) == vpn_network[0] ) { if ( getif_debug ) debugmsg( "DEBUG: interface is localhost, skipping\n" ); continue; } /* prime candidate - check, if interface is UP */ result = *ifr; if ( ioctl( sockfd, SIOCGIFFLAGS, &result ) < 0 ) { if ( getif_debug ) debugmsg( "DEBUG: ioctl SIOCGIFFLAGS %s: %s\n", ifr->ifr_name, strerror(errno) ); } /* interface is up - our work is done. Or is it? */ if ( (result.ifr_flags & IFF_UP) ) { if ( ! flag ) { /* remember first found primary interface */ if ( getif_debug ) debugmsg( "DEBUG: first primary interface %s\n", ifr->ifr_name ); primary = result; flag = 1; } /* check for VPNs */ if ( (sa.sin_addr.s_addr & vpn_netmask[1]) == vpn_network[1] || (sa.sin_addr.s_addr & vpn_netmask[2]) == vpn_network[2] || (sa.sin_addr.s_addr & vpn_netmask[3]) == vpn_network[3] || (sa.sin_addr.s_addr & vpn_netmask[4]) == vpn_network[4] || (sa.sin_addr.s_addr & vpn_netmask[5]) == vpn_network[5] ) { if ( getif_debug ) debugmsg( "DEBUG: interface has VPN or bad address, trying next\n" ); } else { if ( getif_debug ) debugmsg( "DEBUG: interface is good\n" ); flag = 2; break; } } else { if ( getif_debug ) debugmsg( "DEBUG: interface is down\n" ); } } /* check for loop exceeded - if yes, fall back on first primary */ if ( flag == 1 && ptr >= ifc.ifc_buf + ifc.ifc_len ) { if ( getif_debug ) debugmsg( "DEBUG: no better interface found, falling back\n" ); result = primary; } /* clean up */ free((void*) ifc.ifc_buf); close(sockfd); /* create a freshly allocated copy */ ifrcopy = (struct ifreq*) malloc( sizeof(struct ifreq) ); memcpy( ifrcopy, &result, sizeof(struct ifreq) ); return ifrcopy; } void whoami( char* abuffer, size_t asize, char* ibuffer, size_t isize ) /* purpose: copy the primary interface's IPv4 dotted quad into the given buffer * paramtr: abuffer (OUT): start of buffer to put IPv4 dotted quad * asize (IN): maximum capacity the abuffer is willing to accept * ibuffer (OUT): start of buffer to put the primary if name * isize (IN): maximum capacity the ibuffer is willing to accept * returns: the modified buffers. */ { /* enumerate interfaces, and guess primary one */ struct ifreq* ifr = primary_interface(); if ( ifr != NULL ) { struct sockaddr_in sa; if ( abuffer ) { memcpy( &sa, &(ifr->ifr_addr), sizeof(struct sockaddr) ); strncpy( abuffer, inet_ntoa(sa.sin_addr), asize ); } if ( ibuffer ) { strncpy( ibuffer, ifr->ifr_name, isize ); } free((void*) ifr); } else { /* error while trying to determine address of primary interface */ #if 0 /* future lab */ if ( abuffer ) strncpy( abuffer, "xsi:null", asize ); if ( ibuffer ) strncpy( ibuffer, "xsi:null", isize ); #else /* for now */ if ( abuffer ) strncpy( abuffer, "0.0.0.0", asize ); if ( ibuffer ) strncpy( ibuffer, "(none)", isize ); #endif } } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/msave.c������������������������������������������0000644�0001750�0001750�00000026714�11757531137�022720� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "machine.h" #include "debug.h" #include "tools.h" #include <sys/types.h> #include <sys/utsname.h> #include <ctype.h> #include <math.h> #ifdef LINUX #include <sys/sysinfo.h> #endif /* LINUX */ #ifdef DARWIN #include <sys/sysctl.h> #endif /* DARWIN */ #ifdef SUNOS #include <sys/loadavg.h> #endif /* SOLARIS */ #include <string.h> #include <stdio.h> #include <time.h> static const char* RCS_ID = "$Id$"; extern int isExtended; /* timestamp format concise or extended */ extern int isLocal; /* timestamp time zone, UTC or local */ static size_t mystrlen( const char* s, size_t max ) { /* array version */ size_t i = 0; while ( i < max && s[i] ) ++i; return i; } static char* mytolower( char* s, size_t max ) { /* array version */ size_t i; for ( i=0; i < max && s[i]; ++i ) s[i] = tolower(s[i]); return s; } #ifdef SOLARIS static void gather_sunos_loadavg( MachineInfo* machine ) { double load[3]; if ( getloadavg( load, 3 ) != -1 ) { int i; for ( i=0; i<3; ++i ) { machine->load[i] = load[i]; } } } #endif /* SOLARIS */ /* -------------------------------------------------------------- */ #ifdef DARWIN static void gather_darwin_cpuinfo( MachineInfo* machine ) { size_t len; unsigned long freq; char model[128]; len = sizeof(machine->cpu_count); if ( sysctlbyname( "hw.ncpu", &machine->cpu_count, &len, NULL, 0 ) != 0 ) machine->cpu_count = -1; len = sizeof(machine->cpu_online); if ( sysctlbyname( "hw.availcpu", &machine->cpu_online, &len, NULL, 0 ) != 0 ) machine->cpu_online = -1; len = sizeof(freq); if ( sysctlbyname( "hw.cpufrequency", &freq, &len, NULL, 0 ) == 0 ) machine->megahertz = freq / 1000000; len = sizeof(machine->vendor_id); if ( sysctlbyname( "machdep.cpu.vendor", machine->vendor_id, &len, NULL, 0 ) != 0 ) memset( machine->vendor_id, 0, sizeof(machine->vendor_id) ); len = sizeof(model); if ( sysctlbyname( "machdep.cpu.brand_string", model, &len, NULL, 0 ) == 0 ) { char* s = model; char* d = machine->model_name; while ( *s && d - machine->model_name < sizeof(machine->model_name) ) { while ( *s && ! isspace(*s) ) *d++ = *s++; if ( *s && *s == ' ' ) *d++ = *s++; while ( *s && isspace(*s) ) ++s; } *d = 0; } else { memset( machine->model_name, 0, sizeof(machine->model_name) ); } } static void gather_darwin_uptime( MachineInfo* machine ) { size_t len = sizeof(machine->boottime); if ( sysctlbyname( "kern.boottime", &machine->boottime, &len, NULL, 0 ) == -1 ) machine->idletime = 0.0; } static void gather_darwin_loadavg( MachineInfo* machine ) { struct loadavg l; size_t len = sizeof(l); if ( sysctlbyname( "vm.loadavg", &l, &len, NULL, 0 ) == 0 ) { int i; for ( i=0; i<3; ++i ) machine->load[i] = l.ldavg[i] / ((float) l.fscale); } } static void gather_darwin_meminfo( MachineInfo* machine ) { struct xsw_usage s; size_t len = sizeof(s); if ( sysctlbyname( "vm.swapusage", &s, &len, NULL, 0 ) == 0 ) { machine->swap_total = s.xsu_avail; machine->swap_free = s.xsu_used; } len = sizeof(machine->ram_total); if ( sysctlbyname( "hw.memsize", &machine->ram_total, &len, NULL, 0 ) == -1 ) machine->ram_total = 0; } #endif /* DARWIN */ /* -------------------------------------------------------------- */ #ifdef LINUX static void gather_proc_cpuinfo( MachineInfo* machine ) { FILE* f = fopen( "/proc/cpuinfo", "r" ); if ( f != NULL ) { char line[256]; while ( fgets( line, 256, f ) ) { if ( *(machine->vendor_id) == 0 && strncmp( line, "vendor_id", 9 ) == 0 ) { char* s = strchr( line, ':' )+1; char* d = machine->vendor_id; while ( *s && isspace(*s) ) ++s; while ( *s && ! isspace(*s) && d - machine->vendor_id < sizeof(machine->vendor_id) ) *d++ = *s++; *d = 0; } else if ( *(machine->model_name) == 0 && strncmp( line, "model name", 10 ) == 0 ) { char* s = strchr( line, ':' )+2; char* d = machine->model_name; while ( *s && d - machine->model_name < sizeof(machine->model_name) ) { while ( *s && ! isspace(*s) ) *d++ = *s++; if ( *s && *s == ' ' ) *d++ = *s++; while ( *s && isspace(*s) ) ++s; } *d = 0; } else if ( machine->megahertz == 0.0 && strncmp( line, "cpu MHz", 7 ) == 0 ) { char* s = strchr( line, ':' )+2; float mhz; sscanf( s, "%f", &mhz ); machine->megahertz = (unsigned long) mhz; } else if ( strncmp( line, "processor ", 10 ) == 0 ) { machine->cpu_count += 1; } } fclose(f); } } static void gather_proc_loadavg( MachineInfo* machine ) { FILE* f = fopen( "/proc/loadavg", "r" ); if ( f != NULL ) { int maxpid; fscanf( f, "%f %f %f %d/%d %d", &(machine->load[0]), &(machine->load[1]), &(machine->load[2]), &(machine->pid_running), &(machine->pid_total), &maxpid ); fclose(f); } } static void gather_proc_uptime( MachineInfo* machine ) { FILE* f = fopen( "/proc/uptime", "r" ); if ( f != NULL ) { double uptime, r, sec; struct timeval tv; now( &tv ); fscanf( f, "%lf %lf", &uptime, &(machine->idletime) ); fclose(f); r = ( tv.tv_sec + tv.tv_usec * 1E-6 ) - uptime; machine->boottime.tv_sec = sec = (time_t) floor(r); machine->boottime.tv_usec = (time_t) floor(1E6 * (r - sec)); } } #endif /* LINUX */ /* -------------------------------------------------------------- */ void initMachineInfo( MachineInfo* machine ) /* purpose: initialize the data structure. * paramtr: machine (OUT): initialized MachineInfo structure. */ { #ifdef LINUX struct sysinfo si; #endif uint64_t pagesize; long ppages; /* some paths may not use it */ /* extra sanity check */ if ( machine == NULL ) return ; memset( machine, 0, sizeof(MachineInfo) ); now( &machine->now ); if ( uname( &machine->uname ) == -1 ) { memset( &machine->uname, 0, sizeof(machine->uname) ); } else { /* remove mixed case */ mytolower( machine->uname.sysname, SYS_NMLN ); mytolower( machine->uname.nodename, SYS_NMLN ); mytolower( machine->uname.machine, SYS_NMLN ); } pagesize = machine->pagesize = getpagesize(); #ifndef LINUX #if defined(_SC_PHYS_PAGES) if ( (ppages = sysconf(_SC_PHYS_PAGES)) != -1 ) machine->ram_total = pagesize * ppages; #endif #if defined(_SC_AVPHYS_PAGES) if ( (ppages = sysconf(_SC_AVPHYS_PAGES)) != -1 ) machine->ram_free = pagesize * ppages; #endif #endif #if defined(_SC_NPROCESSORS_CONF) if ( (ppages = sysconf(_SC_NPROCESSORS_CONF)) != -1 ) machine->cpu_count = ppages; #endif #if defined(_SC_NPROCESSORS_ONLN) if ( (ppages = sysconf(_SC_NPROCESSORS_ONLN)) != -1 ) machine->cpu_online = ppages; #endif #ifdef DARWIN gather_darwin_meminfo( machine ); gather_darwin_uptime( machine ); gather_darwin_loadavg( machine ); gather_darwin_cpuinfo( machine ); #endif /* DARWIN */ #ifdef LINUX /* obtain some memory information */ if ( sysinfo(&si) != -1 ) { pagesize = si.mem_unit; machine->ram_total = si.totalram * pagesize; machine->ram_free = si.freeram * pagesize; machine->swap_total = si.totalswap * pagesize; machine->swap_free = si.freeswap * pagesize; } gather_proc_uptime(machine); gather_proc_loadavg(machine); gather_proc_cpuinfo(machine); #endif /* LINUX */ } int printXMLMachineInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MachineInfo* machine ) /* purpose: format the job information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * machine (IN): machine info to print. * returns: number of characters put into buffer (buffer length) */ { /* <machine> open tag */ myprint( buffer, size, len, "%*s<%s start=\"", indent, "", tag ); mydatetime( buffer, size, len, isLocal, isExtended, machine->now.tv_sec, machine->now.tv_usec ); append( buffer, size, len, "\">\n" ); /* <uname> */ myprint( buffer, size, len, "%*s<uname system=\"", indent+2, "" ); full_append( buffer, size, len, machine->uname.sysname, mystrlen(machine->uname.sysname,SYS_NMLN) ); append( buffer, size, len, "\" nodename=\"" ); full_append( buffer, size, len, machine->uname.nodename, mystrlen(machine->uname.nodename,SYS_NMLN) ); append( buffer, size, len, "\" release=\"" ); full_append( buffer, size, len, machine->uname.release, mystrlen(machine->uname.release,SYS_NMLN) ); append( buffer, size, len, "\" machine=\"" ); full_append( buffer, size, len, machine->uname.machine, mystrlen(machine->uname.machine,SYS_NMLN) ); append( buffer, size, len, "\">" ); full_append( buffer, size, len, machine->uname.version, mystrlen(machine->uname.version,SYS_NMLN) ); append( buffer, size, len, "</uname>\n" ); /* <ram .../> tag */ myprint( buffer, size, len, "%*s<ram page-size=\"%lu\" total=\"%lu\"", indent+2, "", machine->pagesize, machine->ram_total ); if ( machine->ram_free ) myprint( buffer, size, len, " free=\"%lu\"", machine->ram_free ); append( buffer, size, len, "/>\n" ); #if defined(LINUX) || defined(DARWIN) /* <boot> element */ myprint( buffer, size, len, "%*s<boot", indent+2, "" ); if ( machine->idletime > 0.0 ) myprint( buffer, size, len, " idle=\"%.3f\"", machine->idletime ); append( buffer, size, len, ">" ); mydatetime( buffer, size, len, isLocal, isExtended, machine->boottime.tv_sec, machine->boottime.tv_usec ); append( buffer, size, len, "</boot>\n" ); #endif /* LINUX || DARWIN */ #if defined(LINUX) || defined(DARWIN) /* <swap .../> tag */ myprint( buffer, size, len, "%*s<swap total=\"%lu\" free=\"%lu\"/>\n", indent+2, "", machine->swap_total, machine->swap_free ); #endif #if defined(_SC_NPROCESSORS_CONF) || defined(DARWIN) /* <cpu> element */ myprint( buffer, size, len, "%*s<cpu count=\"%u\" speed=\"%.0f\" vendor=\"%s\">%s</cpu>\n", indent+2, "", machine->cpu_count, machine->megahertz + 0.5, machine->vendor_id, machine->model_name ); #endif #if defined(LINUX) || defined(DARWIN) /* <load .../> tag */ myprint( buffer, size, len, "%*s<load min1=\"%.2f\" min5=\"%.2f\" min15=\"%.2f\"" #ifdef LINUX " running=\"%u\" total-pid=\"%u\"" #endif "/>\n", indent+2, "", machine->load[0], machine->load[1], machine->load[2] #ifdef LINUX , machine->pid_running, machine->pid_total #endif ); #endif /* </machine> close tag */ myprint( buffer, size, len, "%*s</%s>\n", indent, "", tag ); return *len; } void deleteMachineInfo( MachineInfo* machine ) /* purpose: destructor * paramtr: machine (IO): valid MachineInfo structure to destroy. */ { #ifdef EXTRA_DEBUG debugmsg( "# deleteAppInfo(%p)\n", runinfo ); #endif machine->pagesize = 0; } ����������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/event.c������������������������������������������0000644�0001750�0001750�00000016764�11757531137�022732� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <sys/types.h> #include <sys/time.h> #include <sys/poll.h> #include <fcntl.h> #include <errno.h> #include <string.h> #include <stdlib.h> #include <unistd.h> #include <stdio.h> #include "rwio.h" #include "debug.h" #include "tools.h" #include "event.h" #include "mysignal.h" static const char* RCS_ID = "$Id: event.c 4535 2011-09-26 22:14:19Z voeckler $"; static sig_atomic_t seen_sigpipe; /* defaults to 0 */ static SIGRETTYPE sig_pipe( int signo ) { seen_sigpipe = 1; } extern int isExtended; extern int isLocal; ssize_t send_message( int outfd, char* msg, ssize_t msize, unsigned channel ) /* purpose: sends a XML-encoded message chunk back to the application * paramtr: outfd (IN): output file descriptor, writable (STDERR_FILENO) * msg (IN): pointer to message * msize (IN): length of message content * channel (IN): which channel to send upon (0 - app) */ { int locked; #if 0 size_t i; #endif size_t len = 0; size_t size = msize + 256; char* buffer = (char*) malloc(size); struct timeval t; if ( buffer == NULL ) { errno = ENOMEM; return -1; } now( &t ); myprint( buffer, size, &len, "<chunk channel=\"%u\" size=\"%ld\" start=\"", channel, msize ); mydatetime( buffer, size, &len, isLocal, isExtended, t.tv_sec, t.tv_usec ); append( buffer, size, &len, "\"><![CDATA[" ); #if 1 full_append( buffer, size, &len, msg, msize ); #else for ( i=0; i<msize; ++i ) { switch ( msg[i] ) { case '\'': append( buffer, size, &len, "'" ); break; case '"': append( buffer, size, &len, """ ); break; case '>': append( buffer, size, &len, ">" ); break; case '&': append( buffer, size, &len, "&" ); break; case '<': append( buffer, size, &len, "<" ); break; default: if ( len < size ) { buffer[len++] = msg[i]; buffer[len] = 0; } } } #endif append( buffer, size, &len, "]]></chunk>\n" ); /* atomic write, bracketted by POSIX locks (also forces NFS updates) */ locked = mytrylock(outfd); msize = writen( outfd, buffer, len, 3 ); if ( locked==1 ) lockit( outfd, F_SETLK, F_UNLCK ); free( (void*) buffer ); return msize; } #ifdef MUST_USE_SELECT_NOT_POLL int poll_via_select( struct pollfd* fds, unsigned nfds, long timeout ) /* purpose: emulate poll() through select() <yikes!> * warning: this is an incomplete and very simplified emulation! * paramtr: see poll() arguments -- however, this only handles read events! * returns: return value from select() */ { struct timeval tv = { timeout / 1000, (timeout % 1000) * 1000 }; fd_set rfds, efds; unsigned i, status; int max = 0; FD_ZERO( &rfds ); FD_ZERO( &efds ); for ( i = 0; i < nfds; ++i ) { if ( fds[i].events & ( POLLIN | POLLRDNORM ) && fds[i].fd != -1 ) { FD_SET( fds[i].fd, &rfds ); FD_SET( fds[i].fd, &efds ); if ( fds[i].fd >= max ) max = fds[i].fd+1; fds[i].revents = 0; } } if ( (status = select( max, &rfds, NULL, NULL, &tv )) > 0 ) { for ( i = 0; i < nfds; ++i ) { if ( fds[i].fd != -1 ) { if ( FD_ISSET( fds[i].fd, &rfds ) ) fds[i].revents |= POLLIN; if ( FD_ISSET( fds[i].fd, &efds ) ) fds[i].revents |= POLLERR; } } } return status; } #endif /* MUST_USE_SELECT_NOT_POLL */ int eventLoop( int outfd, StatInfo* fifo, volatile sig_atomic_t* terminate ) /* purpose: copy from input file(s) to output fd while not interrupted. * paramtr: outfd (IN): output file descriptor, ready for writing. * fifo (IO): contains input fd, and maintains statistics. * terminate (IN): volatile flag, set in signal handlers. * returns: -1 in case of error, 0 for o.k. * -3 for a severe interruption of poll() */ { size_t count, bufsize = getpagesize(); int timeout = 30000; int result = 0; int saverr, status = 0; int mask = POLLIN | POLLERR | POLLHUP | POLLNVAL; char* rbuffer; struct pollfd pfds; struct sigaction old_pipe, new_pipe; /* sanity checks first */ if ( outfd == -1 || fifo->source != IS_FIFO ) return 0; /* prepare poll fds */ pfds.fd = fifo->file.descriptor; pfds.events = POLLIN; /* become aware of SIGPIPE for write failures */ memset( &new_pipe, 0, sizeof(new_pipe) ); memset( &old_pipe, 0, sizeof(old_pipe) ); new_pipe.sa_handler = sig_pipe; sigemptyset( &new_pipe.sa_mask ); #ifdef SA_INTERRUPT new_pipe.sa_flags |= SA_INTERRUPT; /* SunOS, obsoleted by POSIX */ #endif seen_sigpipe = 0; /* ATLAS 20050331: clear previous failures */ if ( sigaction( SIGPIPE, &new_pipe, &old_pipe ) < 0 ) return -1; /* allocate read buffer */ if ( (rbuffer = (char*) malloc( bufsize )) == NULL ) return -1; #ifdef DEBUG_EVENTLOOP fputs( "# starting event loop\n", stderr ); #endif /* DEBUG_EVENTLOOP */ /* poll (may have been interrupted by SIGCHLD) */ for ( count=0; 1; count++ ) { /* race condition possible, thus we MUST time out */ /* However, we MUST transfer everything that is waiting */ if ( *terminate || seen_sigpipe ) { timeout = 0; } else if ( count < 5 ) { timeout = 200; } else if ( count < 15 ) { timeout = 1000; } else { timeout = 30000; } pfds.revents = 0; #ifdef DEBUG_EVENTLOOP debugmsg( "# tm=%d, s_sp=%d, calling poll([%d:%x:%x],%d,%d)\n", *terminate, seen_sigpipe, pfds.fd, pfds.events, pfds.revents, 1, timeout ); #endif /* DEBUG_EVENTLOOP */ errno = 0; #ifdef MUST_USE_SELECT_NOT_POLL status = poll_via_select( &pfds, 1, timeout ); #else status = poll( &pfds, 1, timeout ); #endif /* MUST_USE_SELECT_NOT_POLL */ saverr = errno; #ifdef DEBUG_EVENTLOOP debugmsg( "# poll() returned %d [errno=%d: %s] [%d:%x:%x]\n", status, saverr, strerror(saverr), pfds.fd, pfds.events, pfds.revents ); #endif /* DEBUG_EVENTLOOP */ errno = saverr; if ( status == -1 ) { /* poll ERR */ if ( errno != EINTR ) { /* not an interruption */ result = -3; break; } } else if ( status == 0 ) { /* timeout -- only exit, if we were wrapping up anyway! */ if ( timeout == 0 ) break; } else if ( status > 0 ) { /* poll OK */ if ( (pfds.revents & mask) > 0 ) { ssize_t rsize = read( pfds.fd, rbuffer, bufsize-1 ); if ( rsize == -1 ) { /* ERR */ if ( errno != EINTR ) { result = -1; break; } } else if ( rsize == 0 ) { /* EOF */ result = 0; break; } else { /* data */ ssize_t wsize; rbuffer[rsize] = '\0'; if ( (wsize = send_message( outfd, rbuffer, rsize, 1 )) == -1 ) { /* we'll be unable to send anything further */ result = -1; break; } else { /* update statistics */ fifo->client.fifo.count++; fifo->client.fifo.rsize += rsize; fifo->client.fifo.wsize += wsize; } } } /* if pfds mask */ } /* if status > 0 */ } /* forever */ sigaction( SIGPIPE, &old_pipe, NULL ); free( (void*) rbuffer ); return result; } ������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/jobinfo.h����������������������������������������0000644�0001750�0001750�00000006147�11757531137�023236� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _JOBINFO_H #define _JOBINFO_H #include <time.h> #include <sys/types.h> #include <sys/time.h> #include <sys/resource.h> #include "statinfo.h" #ifdef USE_MEMINFO #include "meminfo.h" #endif /* USE_MEMINFO */ typedef struct { /* private */ int isValid; /* 0: uninitialized, 1:valid, 2:app not found */ char* copy; /* buffer for argument separation */ /* public */ char* const* argv; /* application executable and arguments */ int argc; /* application CLI number of arguments */ StatInfo executable; /* stat() info for executable, if available */ struct timeval start; /* point of time that app was started */ struct timeval finish; /* point of time that app was reaped */ pid_t child; /* pid of process that ran application */ int status; /* raw exit status of application */ int saverr; /* errno for status < 0 */ char* prefix; /* prefix to error message for status < 0 */ struct rusage use; /* rusage record from reaping application status */ #ifdef USE_MEMINFO MemInfo peakmem; /* maximum memory usage during lifetime */ #endif /* USE_MEMINFO */ } JobInfo; extern void initJobInfo( JobInfo* jobinfo, int argc, char* const* argv ); /* purpose: initialize the data structure with defaults. * paramtr: appinfo (OUT): initialized memory block * argc (IN): adjusted argument count * argv (IN): adjusted argument vector to point to app. */ extern void initJobInfoFromString( JobInfo* jobinfo, const char* commandline ); /* purpose: initialize the data structure with default * paramtr: jobinfo (OUT): initialized memory block * commandline (IN): commandline concatenated string to separate */ extern int printXMLJobInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const JobInfo* job ); /* purpose: format the job information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * job (IN): job info to print. * returns: number of characters put into buffer (buffer length) */ extern void deleteJobInfo( JobInfo* jobinfo ); /* purpose: destructor * paramtr: runinfo (IO): valid JobInfo structure to destroy. */ #endif /* _JOBINFO_H */ �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine.c����������������������������������������0000644�0001750�0001750�00000005224�11757531137�023202� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "machine.h" static const char* RCS_ID = "$Id$"; #ifdef LINUX #define __MFLAG 1 #include "machine/linux.h" #endif /* LINUX */ #ifdef DARWIN #define __MFLAG 2 #include "machine/darwin.h" #endif /* DARWIN */ #ifdef SUNOS #define __MFLAG 3 #include "machine/sunos.h" #include <memory.h> #endif /* SUNOS */ #ifndef __MFLAG #include "machine/basic.h" #endif /* unknown */ #ifdef EXTRA_DEBUG #include <stdio.h> #endif /* EXTRA_DEBUG */ #include <string.h> #include "debug.h" void initMachineInfo( MachineInfo* machine ) /* purpose: initialize the data structure. * paramtr: machine (OUT): initialized MachineInfo structure. */ { /* initialize virtual method table */ #ifdef __MFLAG machine->ctor = initMachine; machine->show = printMachine; machine->dtor = deleteMachine; #else machine->ctor = initBasicMachine; machine->show = printBasicMachine; machine->dtor = deleteBasicMachine; #endif /* __MFLAG */ /* call constructor on data */ machine->data = machine->ctor(); } int printXMLMachineInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MachineInfo* machine ) /* purpose: format the job information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * machine (IN): machine info to print. * returns: number of characters put into buffer (buffer length) */ { /* sanity check */ if ( machine && machine->show && machine->data ) machine->show( buffer, size, len, indent, tag, machine->data ); return *len; } void deleteMachineInfo( MachineInfo* machine ) /* purpose: destructor * paramtr: machine (IO): valid MachineInfo structure to destroy. */ { #ifdef EXTRA_DEBUG debugmsg( "# deleteMachineInfo(%p)\n", machine ); #endif machine->dtor( machine->data ); memset( machine, 0, sizeof(MachineInfo) ); } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/invoke.h�����������������������������������������0000644�0001750�0001750�00000004474�11757531137�023104� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _INVOKE_H #define _INVOKE_H #include <sys/types.h> extern int append_arg( char* data, char*** arg, size_t* index, size_t* capacity ); /* purpose: adds a string to a list of arguments * This is a low-level function, use add_arg instead. * paramtr: data (IN): string to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * returns: 0 means ok, -1 means error, see errno * warning: Always creates a strdup of data */ extern int expand_arg( const char* fn, char*** arg, size_t* index, size_t* capacity, int level ); /* purpose: adds the contents of a file, line by line, to an argument vector * This is a low-level function, use add_arg instead. * paramtr: fn (IN): name of file with contents to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * level (IN): level of recursion * returns: 0 means ok, -1 means error, see errno */ extern int add_arg( char* data, char*** arg, size_t* index, size_t* capacity, int level ); /* purpose: sorts a given full argument string, whether to add or extend * This is the high-level interface to previous functions. * paramtr: data (IN): string to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * level (IN): level of recursion, use 1 * returns: 0 means ok, -1 means error, see errno */ #endif /* _INVOKE_H */ ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/show-if.c����������������������������������������0000644�0001750�0001750�00000002470�11757531137�023152� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <sys/types.h> #include <ctype.h> #include <errno.h> #include <stdio.h> #include <time.h> #include <sys/time.h> #include <sys/resource.h> #include <sys/wait.h> #include <signal.h> #include <unistd.h> #include <fcntl.h> #include <string.h> #include <stdlib.h> #include "getif.h" static const char* RCS_ID = "$Id: show-if.c 2217 2010-07-28 18:30:19Z voeckler $"; int main( int argc, char* argv[] ) { int result = 0; char abuffer[128], ibuffer[128]; if ( argc > 1 ) { getif_debug = atoi(argv[1]); } else { getif_debug = -1; } whoami( abuffer, sizeof(abuffer), ibuffer, sizeof(ibuffer) ); printf( "primary interface %s has address %s\n", ibuffer, abuffer ); return result; } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/test-ascii.c�������������������������������������0000644�0001750�0001750�00000000306�11757531137�023637� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdio.h> int main( int argc, char* argv[] ) { int i; for ( i=0; i<256; ++i ) { printf( "i=%-3d (%02x) %c\n", i, i, i ); if ( (i & 15) == 15 ) putchar('\n'); } return 0; } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/�����������������������������������������������0000755�0001750�0001750�00000000000�11757531667�021702� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/alarmme.c��������������������������������������0000644�0001750�0001750�00000000614�11757531137�023455� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <signal.h> #include <stdlib.h> #include <unistd.h> #include <stdio.h> volatile sig_atomic_t boo; static void sig_alarm( int signo ) { boo = 1; } int main( int argc, char* argv[] ) { if ( signal( SIGALRM, sig_alarm ) == SIG_ERR ) { perror( "signal(SIGALRM)" ); return 1; } alarm(1); while ( ! boo ) { usleep(50000); } return 0; } ��������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/exitcodes.sh�����������������������������������0000644�0001750�0001750�00000000452�11757531137�024216� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh # # test all exit codes from 0 .. 127 # # $Id: exitcodes.sh 4923 2012-02-14 20:17:18Z voeckler $ # TEMPFILE=`mktemp` || exit 1 i=0 while [[ $i -lt 128 ]]; do ../pegasus-kickstart /bin/sh -c "exit $i" > $TEMPFILE fgrep '<status ' $TEMPFILE i=$(( $i + 1 )) done rm $TEMPFILE ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/fifo5nolf.sh�����������������������������������0000755�0001750�0001750�00000000626�11757531137�024124� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh # # $Id: fifo5nolf.sh 4927 2012-02-14 21:27:39Z voeckler $ # # sanity check if [ "X$GRIDSTART_CHANNEL" = 'X' ]; then echo 'Warning: no feedback channel - using stdout' 1>&2 GRIDSTART_CHANNEL=/dev/fd/1 test -w $GRIDSTART_CHANNEL || exit 42 fi i=0 while [[ $i -lt 5 ]]; do i=$(( $i + 1 )) /bin/echo -n "testing #$i and <&>... " >> $GRIDSTART_CHANNEL sleep 1 done exit 0 ����������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/threadme.c�������������������������������������0000644�0001750�0001750�00000001516�11757531137�023632� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <stdio.h> #include <string.h> #include <unistd.h> #include <pthread.h> #include <stdlib.h> void* doit( void* x ) { long id = ((long) x); flockfile(stdout); printf( "I am thread %ld\n", id ); funlockfile(stdout); return NULL; } int main( int argc, char* argv[] ) { long i; int status, max = ( argc > 1 ? atoi(argv[1]) : 3 ); pthread_t* t = calloc( max, sizeof(pthread_t) ); for ( i=0; i<max; ++i ) { if ( (status = pthread_create( t+i, NULL, doit, (void*) i )) ) { fprintf( stderr, "Error: pthread_create(%ld): %s\n", i, strerror(status) ); } } for ( i=0; i<max; ++i ) { if ( (status = pthread_join( t[i], NULL )) ) { fprintf( stderr, "Error: pthread_join(%ld): %s\n", i, strerror(status) ); } } free((void*) t); return 0; } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/threadach.c������������������������������������0000644�0001750�0001750�00000002236�11757531137�023764� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <stdio.h> #include <string.h> #include <unistd.h> #include <pthread.h> #include <stdlib.h> #include <time.h> void* doit( void* x ) { long id = ((long) x); usleep(50000 + (rand() & 0x1FFFF) ); flockfile(stdout); printf( "I am thread %ld\n", id ); funlockfile(stdout); return NULL; } int main( int argc, char* argv[] ) { long i; int status, max = ( argc > 1 ? atoi(argv[1]) : 3 ); pthread_t* t = calloc( max, sizeof(pthread_t) ); pthread_attr_t attr; srand( time(NULL) ); status = pthread_attr_init( &attr ); if ( status ) { fprintf( stderr, "Error: pthread_attr_init: %s\n", strerror(status) ); return 1; } status = pthread_attr_setdetachstate( &attr, PTHREAD_CREATE_DETACHED ); if ( status ) { fprintf( stderr, "Error: pthread_attr_setdetachstate: %s\n", strerror(status) ); return 1; } for ( i=0; i<max; ++i ) { if ( (status = pthread_create( t+i, &attr, doit, (void*) i )) ) { fprintf( stderr, "Error: pthread_create(%ld): %s\n", i, strerror(status) ); } } sleep(1); free((void*) t); pthread_attr_destroy(&attr); return 0; } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/dienow.c���������������������������������������0000644�0001750�0001750�00000000413�11757531137�023321� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <signal.h> #include <stdlib.h> #include <unistd.h> int main( int argc, char* argv[] ) { int which = argc > 1 ? atoi(argv[1]) : SIGTERM; if ( which < 0 || which > 64 ) which = SIGTERM; kill( getpid(), which ); return 127; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/run-programs.sh��������������������������������0000644�0001750�0001750�00000002162�11757531137�024663� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh # # run a number of tests # if ! make all; then echo "FATAL: Unable to create all helper programs" 1>&2 exit 1 fi if perl -MXML::Twig -le 'print $XML::Twig::VERSION' 2>&1 >> /dev/null; then xml_grep=`type -p xml_grep` else xml_grep=':' fi OUTFILE=`mktemp` || exit 1 ERRFILE=`mktemp` || exit 1 trap 'rm -f $OUTFILE $ERRFILE' 0 kickstart=../pegasus-kickstart for try in hello forkme grandfather threadme threadach alarmme \ fifo5lf.sh fifo5nolf.sh; do echo '+---------------------------------------------------------+' printf "| %-55s |\n" "`date -Ins` $try" echo '+---------------------------------------------------------+' $kickstart $try > $OUTFILE 2> $ERRFILE rc=$? echo "kickstart itself returned with exit code $rc" $xml_grep --nowrap 'invocation/statcall[@id="stdout"]/data' $OUTFILE $xml_grep --nowrap 'invocation/statcall[@id="stderr"]/data' $OUTFILE $xml_grep --nowrap 'invocation/mainjob/status' $OUTFILE if [ -s $ERRFILE ]; then # something happened echo "--- &< stderr &< ---" cat $ERRFILE echo "--- &< stderr &< ---" fi echo '' done ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/grandfather.c����������������������������������0000644�0001750�0001750�00000000764�11757531137�024332� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <stdio.h> #include <unistd.h> #include <sys/wait.h> int main( int argc, char* argv[] ) { switch ( fork() ) { case -1: perror( "fork" ); return 1; case 0: switch( fork() ) { case -1: perror("fork"); return 2; case 0: puts( "grand-child" ); return 0; default: puts( "child" ); return ( wait(NULL) == -1 ); } default: puts( "parent" ); return ( wait(NULL) == -1 ); } return 0; } ������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/forkme.c���������������������������������������0000644�0001750�0001750�00000000505�11757531137�023321� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <stdio.h> #include <unistd.h> #include <sys/wait.h> int main( int argc, char* argv[] ) { switch ( fork() ) { case -1: perror( "fork" ); return 1; case 0: puts( "child" ); return 0; default: puts( "parent" ); return ( wait(NULL) == -1 ); } return 0; } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/fifo5lf.sh�������������������������������������0000755�0001750�0001750�00000000621�11757531137�023562� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh # # $Id: fifo5lf.sh 4927 2012-02-14 21:27:39Z voeckler $ # # sanity check if [ "X$GRIDSTART_CHANNEL" = 'X' ]; then echo 'Warning: no feedback channel - using stdout' 1>&2 GRIDSTART_CHANNEL=/dev/fd/1 test -w $GRIDSTART_CHANNEL || exit 42 fi i=0 while [[ $i -lt 5 ]]; do i=$(( $i + 1 )) /bin/echo "testing #$i and <&>... " >> $GRIDSTART_CHANNEL sleep 1 done exit 0 ���������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/Makefile���������������������������������������0000644�0001750�0001750�00000004311�11757531137�023331� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������CC = gcc LD = $(CC) SYSTEM = $(shell uname -s | tr '[a-z]' '[A-Z]' | tr -d '_ -/') MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') LFS_CFLAGS = -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE LFS_LDFLAGS = ifeq (DARWIN,${SYSTEM}) SDK=$(lastword $(sort $(wildcard /Developer/SDKs/MacOSX10.*.sdk))) ifneq (,${SDK}) CFLAGS := -g -pipe -arch x86_64 -arch i686 -nostdinc \ -B${SDK}/usr/include/gcc -B${SDK}/usr/lib/gcc \ -isystem${SDK}/usr/include -F${SDK}/System/Library/Frameworks LDFLAGS := -arch x86_64 -arch i686 -Wl,-syslibroot,${SDK} CFLAGS += -DMACHINE_SPECIFIC=darwin endif endif ifeq (SUNOS,${SYSTEM}) CC = cc V7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CFLAGS := -DSOLARIS '-library=%none,Cstd,Crun' -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACFLAGS) -xO4 -D__EXTENSIONS__=1 LD := $(CC) $(EXTRACFLAGS) LFS_CFLAGS = $(shell getconf LFS_CFLAGS 2>>/dev/null) LFS_LDFLAGS = $(shell getconf LFS_LDFLAGS 2>>/dev/null) CFLAGS += -DMACHINE_SPECIFIC=sunos endif ifeq (LINUX,${SYSTEM}) CFLAGS = -Wall -ggdb -O ifeq (ia64,${MARCH}) CFLAGS += -m64 # is this true? endif ifeq (x86_64,${MARCH}) CFLAGS += -m64 endif ifeq (armv7l,${MARCH}) endif ifeq (i386,$(MARCH,S)) CFLAGS += -m32 endif ifeq (i686,$(MARCH,S)) CFLAGS += -m32 endif LFS_CFLAGS = $(shell getconf LFS_CFLAGS 2>>/dev/null) LFS_LDFLAGS = $(shell getconf LFS_LDFLAGS 2>>/dev/null) CFLAGS += -DMACHINE_SPECIFIC=linux endif # # no changes # CFLAGS += -D${SYSTEM} $(LFS_CFLAGS) -DMARCH=${MARCH} LDFLAGS += $(LFS_LDFLAGS) OBJS = alarmme.o dienow.o hello.o forkme.o \ grandfather.o threadme.o threadach.o APPS = $(OBJS:%.o=%) %.o : %.c $(CC) $(CFLAGS) $< -c -o $@ % : %.o $(LD) $(LDFLAGS) $< -o $@ $(LOADLIBES) all: $(APPS) debug.o: debug.c debug.h alarmme: alarmme.o $(LD) $(LDFLAGS) $< -o $@ $(LOADLIBES) dienow: dienow.o $(LD) $(LDFLAGS) $< -o $@ $(LOADLIBES) hello: hello.o $(LD) $(LDFLAGS) $< -o $@ $(LOADLIBES) forkme: forkme.o $(LD) $(LDFLAGS) $< -o $@ $(LOADLIBES) grandfather: grandfather.o $(LD) $(LDFLAGS) $< -o $@ $(LOADLIBES) threadme: threadme.o $(LD) $(LDFLAGS) $^ -o $@ $(LOADLIBES) -lpthread threadach: threadach.o $(LD) $(LDFLAGS) $^ -o $@ $(LOADLIBES) -lpthread clean: $(RM) $(OBJS) distclean: clean $(RM) $(APPS) �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/hello.c����������������������������������������0000644�0001750�0001750�00000000140�11757531137�023134� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdio.h> int main( int argc, char* argv[] ) { puts( "Hello world" ); return 0; } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/t/failself.sh������������������������������������0000755�0001750�0001750�00000000202�11757531137�024010� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh # # $Id: failself.sh 4924 2012-02-14 20:17:38Z voeckler $ # if [ "X$1" = 'X' ]; then exit 1 else kill -$1 $$ fi ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/�����������������������������������������0000755�0001750�0001750�00000000000�11757531667�023043� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/sol-print-swap.c�������������������������0000644�0001750�0001750�00000006267�11757531137�026111� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <sys/stat.h> #include <sys/swap.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #define MAXSTRSIZE 80 char* sizer( char* buffer, size_t capacity, size_t vsize, const void* value ) /* purpose: format an unsigned integer of less-known size. Note that * 64bit ints on 32bit systems need %llu, but 64/64 uses %lu * paramtr: buffer (IO): area to output into * capacity (IN): extent of the buffer to store things into * vsize (IN): size of the value * value (IN): value to format * warning: only for 32bit and 64bit platforms * returns: buffer */ { switch ( vsize ) { case 2: snprintf( buffer, capacity, "%hu", *((const short unsigned*) value) ); break; case 4: if ( sizeof(long) == 4 ) snprintf( buffer, capacity, "%lu", *((const long unsigned*) value) ); else snprintf( buffer, capacity, "%u", *((const unsigned*) value) ); break; case 8: if ( sizeof(long) == 4 ) { snprintf( buffer, capacity, "%llu", *((const long long unsigned*) value) ); } else { snprintf( buffer, capacity, "%lu", *((const long unsigned*) value) ); } break; default: snprintf( buffer, capacity, "unknown" ); break; } return buffer; } int main( int argc, char* argv[] ) { swaptbl_t *s; int i, n, num; char b[32], *strtab; /* string table for path names */ again: if ((num = swapctl(SC_GETNSWP, 0)) == -1) { perror("swapctl: GETNSWP"); exit(1); } if (num == 0) { fprintf(stderr, "No Swap Devices Configured\n"); exit(2); } /* allocate swaptable for num+1 entries */ if ((s = (swaptbl_t *) malloc(num * sizeof(swapent_t) + sizeof(struct swaptable))) == (void *) 0) { fprintf(stderr, "Malloc Failed\n"); exit(3); } /* allocate num+1 string holders */ if ((strtab = (char *) malloc((num + 1) * MAXSTRSIZE)) == (void *) 0) { fprintf(stderr, "Malloc Failed\n"); exit(3); } /* initialize string pointers */ for (i = 0; i < (num + 1); i++) { s->swt_ent[i].ste_path = strtab + (i * MAXSTRSIZE); } s->swt_n = num + 1; if ((n = swapctl(SC_LIST, s)) < 0) { perror("swapctl"); exit(1); } if (n > num) { /* more were added */ free(s); free(strtab); goto again; } for (i = 0; i < n; i++) { double tmp; struct swapent* e = s->swt_ent+i; printf( "DEVICE %s\n", e->ste_path ); printf( "\tstarting block for swapping : %s\n", sizer( b, 32, sizeof(e->ste_start), &e->ste_start ) ); tmp = e->ste_length / ( 1048576.0 / 512 ); printf( "\tlength of swap area in blocks: %s (%.1f MB)\n", sizer( b, 32, sizeof(e->ste_length), &e->ste_length ), tmp ); tmp = e->ste_pages / ( 1048576.0 / getpagesize() ); printf( "\tnumbers of pages for swapping: %s (%.1f MB)\n", sizer( b, 32, sizeof(e->ste_pages), &e->ste_pages ), tmp ); tmp = e->ste_free / ( 1048576.0 / getpagesize() ); printf( "\tnumbers of ste_pages free : %s (%.1f MB)\n", sizer( b, 32, sizeof(e->ste_free), &e->ste_free ), tmp ); } return 0; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/sunos.c����������������������������������0000644�0001750�0001750�00000022213�11757531137�024346� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "basic.h" #include "sunos.h" #include "sunos-swap.h" #include "../tools.h" #include "../debug.h" #include <unistd.h> #include <string.h> #include <stdio.h> #include <stdlib.h> #include <sys/stat.h> #if defined(_LP64) || _FILE_OFFSET_BITS != 64 #include <sys/swap.h> #endif #include <sys/sysinfo.h> #include <sys/param.h> /* FSCALE */ #include <kstat.h> static const char* RCS_ID = "$Id$"; extern int isExtended; /* timestamp format concise or extended */ extern int isLocal; /* timestamp time zone, UTC or local */ static void assign( void* dst, size_t size, kstat_named_t* knp ) { switch ( size ) { case 4: // 32 bit target switch ( knp->data_type ) { case KSTAT_DATA_INT32: case KSTAT_DATA_UINT32: *((uint32_t*) dst) = knp->value.ui32; break; case KSTAT_DATA_INT64: case KSTAT_DATA_UINT64: *((uint32_t*) dst) = (uint32_t) ( knp->value.ui64 & 0xFFFFFFFFFull ); break; } break; case 8: // 64 bit target switch ( knp->data_type ) { case KSTAT_DATA_INT32: *((int64_t*) dst) = knp->value.i32; break; case KSTAT_DATA_UINT32: *((uint64_t*) dst) = knp->value.ui32; break; case KSTAT_DATA_INT64: case KSTAT_DATA_UINT64: *((uint64_t*) dst) = knp->value.ui64; break; } break; } } void* initMachine( void ) /* purpose: initialize the data structure. * returns: initialized MachineSunosInfo structure. */ { uint64_t pagesize = getpagesize(); kstat_ctl_t* kc; /* kernel statistics context handle */ MachineSunosInfo* p = (MachineSunosInfo*) malloc(sizeof(MachineSunosInfo)); /* extra sanity check */ if ( p == NULL ) { fputs( "initMachine c'tor failed\n", stderr ); return NULL; } else memset( p, 0, sizeof(MachineSunosInfo) ); /* name of this provider -- overwritten by importers */ p->basic = initBasicMachine(); p->basic->provider = "sunos"; gather_sunos_swap( &p->swap_total, &p->swap_free ); gather_sunos_proc( &p->ps_total, &p->ps_good, &p->ps_thr_active, &p->ps_thr_zombie, &p->ps_size, &p->ps_rss ); /* access kernel statistics API * run /usr/sbin/kstat -p to see most of the things available. */ if ( (kc = kstat_open()) != NULL ) { kstat_t* ksp; size_t j; /* iterate over kernel statistics chain, module by module */ for ( ksp = kc->kc_chain; ksp != NULL; ksp = ksp->ks_next ) { if ( strcmp( ksp->ks_module, "cpu_stat" ) == 0 ) { /* * module == "cpu_stat" */ cpu_stat_t cpu; if ( kstat_read( kc, ksp, &cpu ) != -1 ) { int i; cpu_sysinfo_t* si = &cpu.cpu_sysinfo; for ( i=0; i<CPU_STATES; ++i ) p->cpu_state[i] += si->cpu[i]; } } else if ( strcmp( ksp->ks_module, "cpu_info" ) == 0 ) { kstat_read( kc, ksp, NULL ); /* * module == "cpu_info" */ p->cpu_count++; for ( j=0; j < ksp->ks_ndata; ++j ) { kstat_named_t* knp = ((kstat_named_t*) ksp->ks_data) + j; if ( strcmp( knp->name, "state" ) == 0 ) { if ( knp->data_type == KSTAT_DATA_CHAR && strcmp( knp->value.c, "on-line") == 0 || knp->data_type == KSTAT_DATA_STRING && strcmp( KSTAT_NAMED_STR_PTR(knp), "on-line" ) == 0 ) p->cpu_online++; } else if ( strcmp( knp->name, "clock_MHz" ) == 0 ) { assign( &p->megahertz, sizeof(p->megahertz), knp ); } else if ( strcmp( knp->name, "brand" ) == 0 ) { strncpy( p->brand_id, ( knp->data_type == KSTAT_DATA_STRING ? KSTAT_NAMED_STR_PTR(knp) : knp->value.c ), sizeof(p->brand_id) ); } else if ( strcmp( knp->name, "cpu_type" ) == 0 ) { strncpy( p->cpu_type, ( knp->data_type == KSTAT_DATA_STRING ? KSTAT_NAMED_STR_PTR(knp) : knp->value.c ), sizeof(p->cpu_type) ); } else if ( strcmp( knp->name, "implementation" ) == 0 ) { strncpy( p->model_name, ( knp->data_type == KSTAT_DATA_STRING ? KSTAT_NAMED_STR_PTR(knp) : knp->value.c ), sizeof(p->model_name) ); } } /* for j */ } else if ( strcmp( ksp->ks_module, "unix" ) == 0 ) { if ( strcmp( ksp->ks_name, "system_misc" ) == 0 ) { double scale = (FSCALE); kstat_read( kc, ksp, NULL ); /* * module == "unix" && name == "system_misc" */ for ( j=0; j < ksp->ks_ndata; ++j ) { kstat_named_t* knp = ((kstat_named_t*) ksp->ks_data) + j; if ( strcmp( knp->name, "avenrun_1min" ) == 0 ) { p->load[0] = knp->value.ui32 / scale; } else if ( strcmp( knp->name, "avenrun_5min" ) == 0 ) { p->load[1] = knp->value.ui32 / scale; } else if ( strcmp( knp->name, "avenrun_15min" ) == 0 ) { p->load[2] = knp->value.ui32 / scale; } else if ( strcmp( knp->name, "boot_time" ) == 0 ) { p->boottime = (time_t) knp->value.ui32; } else if ( strcmp( knp->name, "nproc" ) == 0 ) { p->pid_total = knp->value.ui32; } } /* for j */ } else if ( strcmp( ksp->ks_name, "system_pages" ) == 0 ) { /* * module == "unix" && name == "system_pages" */ kstat_read( kc, ksp, NULL ); for ( j=0; j < ksp->ks_ndata; ++j ) { kstat_named_t* knp = ((kstat_named_t*) ksp->ks_data) + j; if ( strcmp( knp->name, "physmem" ) == 0 ) { assign( &p->ram_avail, sizeof(p->ram_avail), knp ); p->ram_avail *= pagesize; } else if ( strcmp( knp->name, "freemem" ) == 0 ) { assign( &p->ram_free, sizeof(p->ram_free), knp ); p->ram_free *= pagesize; } } } } /* module == "unix" */ } /* for */ kstat_close(kc); } return p; } int printMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const void* data ) /* purpose: format the information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * data (IN): MachineSunosInfo info to print. * returns: number of characters put into buffer (buffer length) */ { char b[2][32]; const MachineSunosInfo* ptr = (const MachineSunosInfo*) data; /* sanity check */ if ( ptr == NULL ) return *len; /* start basic info */ startBasicMachine( buffer, size, len, indent+2, tag, ptr->basic ); /* <ram> element */ myprint( buffer, size, len, "%*s<ram", indent+2, "" ); #ifdef _SC_PHYS_PAGES myprint( buffer, size, len, " total=\"%s\"", sizer( b[0], 32, sizeof(ptr->basic->ram_total), &(ptr->basic->ram_total) ) ); #endif /* _SC_PHYS_PAGES */ myprint( buffer, size, len, " avail=\"%s\" free=\"%s\"/>\n", sizer( b[0], 32, sizeof(ptr->ram_avail), &(ptr->ram_avail) ), sizer( b[1], 32, sizeof(ptr->ram_free), &(ptr->ram_free) ) ); /* <swap> element -- only in 64bit environments */ myprint( buffer, size, len, "%*s<swap total=\"%s\" free=\"%s\"/>\n", indent+2, "", sizer( b[0], 32, sizeof(ptr->swap_total), &(ptr->swap_total) ), sizer( b[1], 32, sizeof(ptr->swap_free), &(ptr->swap_free) ) ); /* <boot> element */ myprint( buffer, size, len, "%*s<boot>", indent+2, "" ); mydatetime( buffer, size, len, isLocal, isExtended, ptr->boottime, -1 ); append( buffer, size, len, "</boot>\n" ); /* <cpu> element */ myprint( buffer, size, len, "%*s<cpu count=\"%hu\" online=\"%hu\" speed=\"%lu\"" " type=\"%s\" brand=\"%s\">%s</cpu>\n", indent+2, "", ptr->cpu_count, ptr->cpu_online, ptr->megahertz, ptr->cpu_type, ptr->brand_id, ptr->model_name ); /* load average data */ myprint( buffer, size, len, "%*s<load min1=\"%.2f\" min5=\"%.2f\" min15=\"%.2f\"/>\n", indent+2, "", ptr->load[0], ptr->load[1], ptr->load[2] ); /* <proc> element */ myprint( buffer, size, len, "%*s<proc total=\"%u\" found=\"%u\" size=\"%s\" rss=\"%s\"/>\n", indent+2, "", ptr->pid_total, ptr->ps_good, sizer( b[0], 32, sizeof(ptr->ps_size), &ptr->ps_size ), sizer( b[1], 32, sizeof(ptr->ps_rss), &ptr->ps_rss ) ); /* <lwp> element */ myprint( buffer, size, len, "%*s<lwp active=\"%u\" zombie=\"%u\"/>\n", indent+2, "", ptr->ps_thr_active, ptr->ps_thr_zombie ); /* finish tag */ finalBasicMachine( buffer, size, len, indent+2, tag, ptr->basic ); return *len; } void deleteMachine( void* data ) /* purpose: destructor * paramtr: data (IO): valid MachineSunosInfo structure to destroy. */ { MachineSunosInfo* ptr = (MachineSunosInfo*) data; #ifdef EXTRA_DEBUG fprintf( stderr, "# deleteSunosMachineInfo(%p)\n", data ); #endif if ( ptr ) { deleteBasicMachine( ptr->basic ); free((void*) ptr); } } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/darwin-showmib.c�������������������������0000644�0001750�0001750�00000002042�11757531137�026127� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/sysctl.h> #include <errno.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include "../debug.h" int main( int argc, char* argv[] ) { int i, mib[4]; if ( argc == 1 && argc > 5 ) { fprintf( stderr, "Usage: %s mib0 [mib1 [mib2 [mib3]]]\n", argv[0] ); return 0; } else { size_t len, m = argc - 1; for ( i=1; i<argc; ++i ) mib[i-1] = strcmp(argv[i],"self") ? atoi( argv[i] ) : getpid(); if ( sysctl( mib, m, NULL, &len, NULL, 0 ) == -1 ) { fprintf( stderr, "sysctl %d", mib[0] ); for ( i=1; i<m; ++i ) fprintf( stderr, ":%d", mib[i] ); fprintf( stderr, ": %s\n", strerror(errno) ); return 1; } else { void* buffer = malloc(len); if ( sysctl( mib, m, buffer, &len, NULL, 0 ) == -1 ) { fprintf( stderr, "sysctl %d", mib[0] ); for ( i=1; i<m; ++i ) fprintf( stderr, ":%d", mib[i] ); fprintf( stderr, ": %s\n", strerror(errno) ); free(buffer); return 1; } else { hexdump( buffer, len ); } free(buffer); } } return 0; } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/darwin-thr.c�����������������������������0000644�0001750�0001750�00000014063�11757531137�025262� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <sys/wait.h> #include <mach/task.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <mach/mach.h> #include <mach/host_info.h> #include <mach/mach_host.h> #include <mach/mach_vm.h> #include <mach/vm_statistics.h> #include <math.h> /* fmod */ #include <sys/sysctl.h> /* FSCALE */ natural_t basic_info( mach_port_t port ) { host_basic_info_data_t bi; mach_msg_type_number_t ic = HOST_BASIC_INFO_COUNT; host_info( port, HOST_BASIC_INFO, (host_info_t) &bi, &ic ); puts( "\nHOST_BASIC_INFO" ); printf( "max number of CPUs possible %d\n", bi.max_cpus ); printf( "number of CPUs now available %d\n", bi.avail_cpus ); printf( "size of memory in bytes (2GB cap) %lu\n", bi.memory_size ); printf( "actual size of physical memory %llu\n", bi.max_mem ); printf( "CPU type %d\n", bi.cpu_type ); printf( "CPU sub-type %d\n", bi.cpu_subtype ); printf( "CPU thread-type %d\n", bi.cpu_threadtype ); printf( "available physical CPUs %d\n", bi.physical_cpu ); printf( "maximum physical CPUs %d\n", bi.physical_cpu_max ); printf( "available logical CPUs %d\n", bi.logical_cpu ); printf( "maximum logical CPUs %d\n", bi.logical_cpu_max ); return bi.max_cpus; } void host_load_info( mach_port_t port ) { static const char* state[3] = { "user", "system", "idle" }; int i; host_load_info_data_t li; mach_msg_type_number_t ic = HOST_LOAD_INFO_COUNT; host_statistics( port, HOST_LOAD_INFO, (host_info_t) &li, &ic ); puts( "\nHOST_LOAD_INFO" ); for ( i=0; i<3; ++i ) { printf( "state %-6s average number of runnable processes divided by CPUs %ld\n", state[i], li.avenrun[i] ); printf( "state %-6s processing resources avail. to new threads (mach factor) %ld\n", state[i], li.mach_factor[i] ); } } static const char* cpu_state[CPU_STATE_MAX] = { "user", "system", "idle", "nice" }; void host_cpu_load_info( mach_port_t port, long ticks, natural_t cpus ) { int i; double up, sum = 0.0; host_cpu_load_info_data_t li; mach_msg_type_number_t ic = HOST_CPU_LOAD_INFO_COUNT; host_statistics( port, HOST_CPU_LOAD_INFO, (host_info_t) &li, &ic ); puts( "\nHOST_CPU_LOAD_INFO" ); for ( i=0; i<CPU_STATE_MAX; ++i ) sum += li.cpu_ticks[i]; for ( i=0; i<CPU_STATE_MAX; ++i ) printf( "tick sum in state %-6s %10ld (%.1f %%)\n", cpu_state[i], li.cpu_ticks[i], (100.0 * li.cpu_ticks[i]) / sum ); /* * Note: the difference between boot time and uptime is the * amount that the system was put to sleep (laptop lid close) */ up = (sum / (ticks * cpus) ) ; printf( "uptime %.0fd%02.0f:%02.0f:%05.2f\n", up / 86400, fmod(up,86400) / 3600, fmod(up,3600) / 60, fmod(up,60) ); } void host_vm_info( mach_port_t port ) { vm_statistics_data_t vm; mach_msg_type_number_t ic = HOST_VM_INFO_COUNT; double f = getpagesize() / 1048576.0; host_statistics( port, HOST_VM_INFO, (host_info_t) &vm, &ic ); puts( "\nHOST_VM_INFO" ); printf( "free %8ld %7.2f MB\n", vm.free_count, vm.free_count * f ); printf( "active %8ld %7.2f MB\n", vm.active_count, vm.active_count * f ); printf( "inactive %8ld %7.2f MB\n", vm.inactive_count, vm.inactive_count * f ); printf( "wired down %8ld %7.2f MB\n", vm.wire_count, vm.wire_count * f ); printf( "# of zero fill pages %ld\n", vm.zero_fill_count ); printf( "# of reactivated pages %ld\n", vm.reactivations ); printf( "# of requests from a pager (pageins) %ld\n", vm.pageins ); printf( "# of pageouts %ld\n", vm.pageouts ); printf( "# of times vm_fault was called %ld\n", vm.faults ); printf( "# of copy-on-write faults %ld\n", vm.cow_faults ); printf( "object cache lookups %ld (%.0f %% hit-rate)\n", vm.lookups, (100.0 * vm.hits) / vm.lookups ); } #ifdef CPUINFO void cpu_info( host_name_port_t port, long ticks ) /* * warning: This method requires sudo privileges for host_get_host_priv_port() */ { host_priv_t host_priv; kern_return_t kr; processor_port_array_t processor_list; natural_t i, j, processor_count, info_count; puts( "\nHOST_PROCESSORS" ); if ( (kr = host_get_host_priv_port( port, &host_priv )) != KERN_SUCCESS ) { mach_error( "host_get_host_priv_port:", kr ); return ; } if ( (kr = host_processors( host_priv, &processor_list, &processor_count )) != KERN_SUCCESS ) { mach_error( "host_processors:", kr ); return ; } for ( i=0; i < processor_count; ++i ) { processor_basic_info_data_t bi; processor_cpu_load_info_data_t li; double up, sum = 0.0; info_count = PROCESSOR_BASIC_INFO_COUNT; if ( (kr=processor_info( processor_list[i], PROCESSOR_BASIC_INFO, &port, (processor_info_t) &bi, &info_count )) == KERN_SUCCESS ) { printf( "CPU: slot %d%s, %srunning, type %d, subtype %d\n", bi.slot_num, ( bi.is_master ? " (master)" : "" ), ( bi.running ? "" : "not " ), bi.cpu_type, bi.cpu_subtype ); } info_count = PROCESSOR_CPU_LOAD_INFO_COUNT; if ( (kr=processor_info( processor_list[i], PROCESSOR_CPU_LOAD_INFO, &port, (processor_info_t)&li, &info_count)) == KERN_SUCCESS ) { sum = 0.0; for ( j=0; j<CPU_STATE_MAX; ++j ) sum += li.cpu_ticks[j]; for ( j=0; j<CPU_STATE_MAX; ++j ) printf( " %s %ld (%.1f %%)", cpu_state[j], li.cpu_ticks[j], (100.0 * li.cpu_ticks[j]) / sum ); up = (sum / ticks); printf( "\n uptime %.0fd%02.0f:%02.0f:%05.2f\n", up / 86400, fmod(up,86400) / 3600, fmod(up,3600) / 60, fmod(up,60) ); } } /* for */ vm_deallocate( mach_task_self(), (vm_address_t) processor_list, processor_count * sizeof(processor_t*) ); } #endif /* CPUINFO */ int main( int argc, char* argv[] ) { mach_port_t self = mach_host_self(); long ticks = sysconf(_SC_CLK_TCK); natural_t cpus = basic_info( self ); host_load_info( self ); host_cpu_load_info( self, ticks, cpus ); host_vm_info( self ); #ifdef CPUINFO cpu_info( self, ticks ); #endif return 0; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/linux.c����������������������������������0000644�0001750�0001750�00000040051�11757531137�024336� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "basic.h" #include "linux.h" #include "../tools.h" #include "../debug.h" #include <ctype.h> #include <errno.h> #include <math.h> #include <string.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <fcntl.h> #include <dirent.h> #include <signal.h> /* signal names */ #include <sys/sysinfo.h> static const char* RCS_ID = "$Id$"; extern int isExtended; /* timestamp format concise or extended */ extern int isLocal; /* timestamp time zone, UTC or local */ static uint64_t unscale( unsigned long value, char scale ) { uint64_t result = value; switch ( scale ) { case 'B': /* just bytes */ break; case 'k': result <<= 10; break; case 'M': result <<= 20; break; case 'G': result <<= 30; break; } return result; } static void parse_status_file( const char* fn, LinuxStatus* status ) { char line[256]; FILE* f; if ( (f = fopen( fn, "r" )) ) { while ( fgets( line, sizeof(line), f ) ) { if ( strncmp( line, "State:", 6 ) == 0 ) { char* s = line+7; while ( *s && isspace(*s) ) ++s; switch ( *s ) { case 'R': status->state[S_RUNNING]++; break; case 'S': status->state[S_SLEEPING]++; break; case 'D': status->state[S_WAITING]++; break; case 'T': status->state[S_STOPPED]++; break; case 'Z': status->state[S_ZOMBIE]++; break; default: status->state[S_OTHER]++; break; } } else if ( line[0] == 'V' ) { unsigned long value; char scale[4]; if ( strncmp( line, "VmSize:", 7 ) == 0 ) { char* s = line+8; while ( *s && isspace(*s) ) ++s; sscanf( s, "%lu %4s", &value, scale ); status->size += unscale( value, scale[0] ); } else if ( strncmp( line, "VmRSS:", 6 ) == 0 ) { char* s = line+7; while ( *s && isspace(*s) ) ++s; sscanf( s, "%lu %4s", &value, scale ); status->rss += unscale( value, scale[0] ); } } } fclose(f); #ifdef DEBUG_PROCFS } else { fprintf( stderr, "open %s: %s\n", fn, strerror(errno) ); #endif } } #if 0 /*** currently unused ***/ static void addtoinfo( LinuxStatus* io, LinuxStatus* summand ) { LinuxState i; io->size += summand->size; io->rss += summand->rss; io->total += summand->total; for ( i=0; i < MAX_STATE; ++i ) { io->state[i] += summand->state[i]; } } #endif /*** unused ***/ void gather_linux_proc26( LinuxStatus* procs, LinuxStatus* tasks ) /* purpose: collect proc information on Linux 2.6 kernel * paramtr: procs (OUT): aggregation on process level * tasks (OUT): aggregation on task level */ { struct dirent* dp; struct dirent* dt; DIR* taskdir; DIR* procdir; /* assume procfs is mounted at /proc */ if ( (procdir=opendir("/proc")) ) { char procinfo[128]; while ( (dp = readdir(procdir)) ) { /* real proc files start with digit in 2.6 */ if ( isdigit(dp->d_name[0]) ) { procs->total++; snprintf( procinfo, sizeof(procinfo), "/proc/%s/task", dp->d_name ); if ( (taskdir=opendir(procinfo)) ) { while ( (dt = readdir(taskdir)) ) { if ( isdigit(dt->d_name[0]) ) { char taskinfo[128]; tasks->total++; snprintf( taskinfo, sizeof(taskinfo), "%s/%s/status", procinfo, dt->d_name ); parse_status_file( taskinfo, tasks ); } } closedir(taskdir); #ifdef DEBUG_PROCFS } else { fprintf( stderr, "opendir %s: %s\n", procinfo, strerror(errno) ); #endif } snprintf( procinfo, sizeof(procinfo), "/proc/%s/status", dp->d_name ); parse_status_file( procinfo, procs ); } } closedir(procdir); #ifdef DEBUG_PROCFS } else { perror( "opendir /proc" ); #endif } } static void parse_stat_file( const char* fn, LinuxStatus* proc, LinuxStatus* task ) { char line[256]; FILE* f; if ( (f = fopen( fn, "r" )) ) { if ( fgets( line, sizeof(line), f ) ) { pid_t pid, ppid; char state; unsigned long flags, vmsize, text, stack; signed long rss; int exitsignal, notatask = 0; sscanf( line, "%d %*s %c %d %*d %*d %*d %*d %lu %*u " /* 1 - 10 */ "%*u %*u %*u %*u %*u %*d %*d %*d %*d %*d " /* 11 - 20 */ "%*d %*u %lu %ld %*u %lu %*u %lu %*u %*u " /* 21 - 30 */ "%*u %*u %*u %*u %*u %*u %*u %d %*d %*d " /* 31 - 40 */ #if 0 "%*d %*d %*d %*d %*d" /* 41 - 45 */ #endif , &pid, &state, &ppid, &flags , &vmsize, &rss, &text, &stack , &exitsignal /* SIGCHLD == normal process * SIGRTxx == threaded task */ ); rss *= getpagesize(); if ( exitsignal == SIGCHLD ) { /* regular process */ notatask = 1; } else if ( exitsignal == SIGRTMIN ) { /* Do we need to check ancient LinuxThreads, which on 2.0 kernels * were forced to use SIGUSR1 and SIGUSR2 for communication? */ /* regular thread */ notatask = 0; } else if ( exitsignal == 0 ) { if ( text == 0 && stack == 0 ) { /* kernel magic task -- count as process */ notatask = 1; } else { /* thread manager task -- count as thread except (init) */ notatask = ( ppid == 0 ); } } switch ( state ) { case 'R': task->state[S_RUNNING]++; if ( notatask ) proc->state[S_RUNNING]++; break; case 'S': task->state[S_SLEEPING]++; if ( notatask) proc->state[S_SLEEPING]++; break; case 'D': task->state[S_WAITING]++; if ( notatask ) proc->state[S_WAITING]++; break; case 'T': task->state[S_STOPPED]++; if ( notatask ) proc->state[S_STOPPED]++; break; case 'Z': task->state[S_ZOMBIE]++; if ( notatask ) proc->state[S_ZOMBIE]++; break; default: task->state[S_OTHER]++; if ( notatask ) proc->state[S_OTHER]++; break; } task->size += vmsize; if ( notatask ) proc->size += vmsize; task->rss += rss; if ( notatask ) proc->rss += rss; task->total++; if ( notatask ) proc->total++; } fclose(f); #ifdef DEBUG_PROCFS } else { fprintf( stderr, "open %s: %s\n", fn, strerror(errno) ); #endif } } void gather_linux_proc24( LinuxStatus* procs, LinuxStatus* tasks ) /* purpose: collect proc information on Linux 2.4 kernel * paramtr: procs (OUT): aggregation on process level * tasks (OUT): aggregation on task level * grmblftz Linux uses multiple schemes for threads/tasks. */ { struct dirent* dp; DIR* procdir; /* assume procfs is mounted at /proc */ if ( (procdir=opendir("/proc")) ) { char procinfo[128]; while ( (dp = readdir(procdir)) ) { /* real processes start with digit, tasks *may* start with dot-digit */ if ( isdigit(dp->d_name[0]) || ( dp->d_name[0] == '.' && isdigit(dp->d_name[1]) ) ) { snprintf( procinfo, sizeof(procinfo), "/proc/%s/stat", dp->d_name ); parse_stat_file( procinfo, procs, tasks ); } } closedir(procdir); #ifdef DEBUG_PROCFS } else { perror( "opendir /proc" ); #endif } } void gather_loadavg( float load[3] ) /* purpose: collect load averages * primary: provide functionality for monitoring * paramtr: load (OUT): array of 3 floats */ { FILE* f = fopen( "/proc/loadavg", "r" ); if ( f != NULL ) { fscanf( f, "%f %f %f", load+0, load+1, load+2 ); fclose(f); } } void gather_meminfo( uint64_t* ram_total, uint64_t* ram_free, uint64_t* ram_shared, uint64_t* ram_buffer, uint64_t* swap_total, uint64_t* swap_free ) /* purpose: collect system-wide memory usage * primary: provide functionality for monitoring * paramtr: ram_total (OUT): all RAM * ram_free (OUT): free RAM * ram_shared (OUT): unused? * ram_buffer (OUT): RAM used for buffers by kernel * swap_total (OUT): all swap space * swap_free (OUT): free swap space */ { struct sysinfo si; /* remaining information */ if ( sysinfo(&si) != -1 ) { uint64_t pagesize = si.mem_unit; *ram_total = si.totalram * pagesize; *ram_free = si.freeram * pagesize; *ram_shared = si.sharedram * pagesize; *ram_buffer = si.bufferram * pagesize; *swap_total = si.totalswap * pagesize; *swap_free = si.freeswap * pagesize; } } static void gather_proc_uptime( struct timeval* boottime, double* idletime ) { FILE* f = fopen( "/proc/uptime", "r" ); if ( f != NULL ) { double uptime, r, sec; struct timeval tv; now( &tv ); fscanf( f, "%lf %lf", &uptime, idletime ); fclose(f); r = ( tv.tv_sec + tv.tv_usec * 1E-6 ) - uptime; boottime->tv_sec = sec = (time_t) floor(r); boottime->tv_usec = (time_t) floor(1E6 * (r - sec)); } } static void gather_proc_cpuinfo( MachineLinuxInfo* machine ) { FILE* f = fopen( "/proc/cpuinfo", "r" ); if ( f != NULL ) { char line[256]; while ( fgets( line, 256, f ) ) { if ( *(machine->vendor_id) == 0 && strncmp( line, "vendor_id", 9 ) == 0 ) { char* s = strchr( line, ':' )+1; char* d = machine->vendor_id; while ( *s && isspace(*s) ) ++s; while ( *s && ! isspace(*s) && d - machine->vendor_id < sizeof(machine->vendor_id) ) *d++ = *s++; *d = 0; } else if ( *(machine->model_name) == 0 && strncmp( line, "model name", 10 ) == 0 ) { char* s = strchr( line, ':' )+2; char* d = machine->model_name; while ( *s && d - machine->model_name < sizeof(machine->model_name) ) { while ( *s && ! isspace(*s) ) *d++ = *s++; if ( *s && *s == ' ' ) *d++ = *s++; while ( *s && isspace(*s) ) ++s; } *d = 0; } else if ( machine->megahertz == 0.0 && strncmp( line, "cpu MHz", 7 ) == 0 ) { char* s = strchr( line, ':' )+2; float mhz; sscanf( s, "%f", &mhz ); machine->megahertz = (unsigned long) (mhz + 0.5); } else if ( strncmp( line, "processor", 9 ) == 0 ) { machine->cpu_count += 1; } } fclose(f); } } static unsigned long extract_version( const char* release ) /* purpose: extract a.b.c version from release string, ignoring extra junk * paramtr: release (IN): pointer to kernel release string (with junk) * returns: integer representation of a version * version := major * 1,000,000 + minor * 1,000 + patch */ { unsigned major = 0; unsigned minor = 0; unsigned patch = 0; sscanf( release, "%u.%u.%u", &major, &minor, &patch ); return major * 1000000ul + minor * 1000 + patch; } /* * -------------------------------------------------------------- */ void* initMachine( void ) /* purpose: initialize the data structure. * returns: initialized MachineLinuxInfo structure. */ { unsigned long version; MachineLinuxInfo* p = (MachineLinuxInfo*) malloc(sizeof(MachineLinuxInfo)); /* extra sanity check */ if ( p == NULL ) { fputs( "initMachine c'tor failed\n", stderr ); return NULL; } else memset( p, 0, sizeof(MachineLinuxInfo) ); /* name of this provider -- overwritten by importers */ p->basic = initBasicMachine(); p->basic->provider = "linux"; gather_meminfo( &p->ram_total, &p->ram_free, &p->ram_shared, &p->ram_buffer, &p->swap_total, &p->swap_free ); gather_loadavg( p->load ); gather_proc_cpuinfo( p ); gather_proc_uptime( &p->boottime, &p->idletime ); version = extract_version( p->basic->uname.release ); /* PM-571: We are safe including 3.2 series */ if ( version >= 2006000 && version <= 3002999 ) { gather_linux_proc26( &p->procs, &p->tasks ); } else if ( version >= 2004000 && version <= 2004999 ) { gather_linux_proc24( &p->procs, &p->tasks ); } else { fprintf( stderr, "Info: Kernel v%lu.%lu.%lu is not supported for proc stats gathering\n", version / 1000000, (version % 1000000) / 1000, version % 1000 ); } return p; } int printMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const void* data ) /* purpose: format the information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * data (IN): MachineLinuxInfo info to print. * returns: number of characters put into buffer (buffer length) */ { static const char* c_state[MAX_STATE] = { "running", "sleeping", "waiting", "stopped", "zombie", "other" }; char b[4][32]; const MachineLinuxInfo* ptr = (const MachineLinuxInfo*) data; LinuxState s; /* sanity check */ if ( ptr == NULL ) return *len; /* start basic info */ startBasicMachine( buffer, size, len, indent+2, tag, ptr->basic ); /* <ram .../> tag */ myprint( buffer, size, len, "%*s<ram total=\"%s\" free=\"%s\" shared=\"%s\" buffer=\"%s\"/>\n", indent+2, "", sizer( b[0], 32, sizeof(ptr->ram_total), &(ptr->ram_total) ), sizer( b[1], 32, sizeof(ptr->ram_free), &(ptr->ram_free) ), sizer( b[2], 32, sizeof(ptr->ram_total), &(ptr->ram_shared) ), sizer( b[3], 32, sizeof(ptr->ram_free), &(ptr->ram_buffer) ) ); /* <swap .../> tag */ myprint( buffer, size, len, "%*s<swap total=\"%s\" free=\"%s\"/>\n", indent+2, "", sizer( b[0], 32, sizeof(ptr->swap_total), &(ptr->swap_total) ), sizer( b[1], 32, sizeof(ptr->swap_free), &(ptr->swap_free) ) ); /* <boot> element */ myprint( buffer, size, len, "%*s<boot idle=\"%.3f\">", indent+2, "", ptr->idletime ); mydatetime( buffer, size, len, isLocal, isExtended, ptr->boottime.tv_sec, ptr->boottime.tv_usec ); append( buffer, size, len, "</boot>\n" ); /* <cpu> element */ myprint( buffer, size, len, "%*s<cpu count=\"%hu\" speed=\"%lu\" vendor=\"%s\">%s</cpu>\n", indent+2, "", ptr->cpu_count, ptr->megahertz, ptr->vendor_id, ptr->model_name ); /* <load> element */ myprint( buffer, size, len, "%*s<load min1=\"%.2f\" min5=\"%.2f\" min15=\"%.2f\"/>\n", indent+2, "", ptr->load[0], ptr->load[1], ptr->load[2] ); if ( ptr->procs.total && ptr->tasks.total ) { /* <proc> element */ myprint( buffer, size, len, "%*s<proc total=\"%u\"", indent+2, "", ptr->procs.total ); for ( s=S_RUNNING; s<=S_OTHER; ++s ) { if ( ptr->procs.state[s] ) myprint( buffer, size, len, " %s=\"%hu\"", c_state[s], ptr->procs.state[s] ); } myprint( buffer, size, len, " vmsize=\"%s\" rss=\"%s\"/>\n", sizer( b[0], 32, sizeof(ptr->procs.size), &ptr->procs.size ), sizer( b[1], 32, sizeof(ptr->procs.rss), &ptr->procs.rss ) ); /* <task> element */ myprint( buffer, size, len, "%*s<task total=\"%u\"", indent+2, "", ptr->tasks.total ); for ( s=S_RUNNING; s<=S_OTHER; ++s ) { if ( ptr->tasks.state[s] ) myprint( buffer, size, len, " %s=\"%hu\"", c_state[s], ptr->tasks.state[s] ); } #if 0 /* does not make sense for threads, since they share memory */ myprint( buffer, size, len, " vmsize=\"%s\" rss=\"%s\"/>\n", sizer( b[0], 32, sizeof(ptr->tasks.size), &ptr->tasks.size ), sizer( b[1], 32, sizeof(ptr->tasks.rss), &ptr->tasks.rss ) ); #else append( buffer, size, len, "/>\n" ); #endif } /* finish tag */ finalBasicMachine( buffer, size, len, indent+2, tag, ptr->basic ); return *len; } void deleteMachine( void* data ) /* purpose: destructor * paramtr: data (IO): valid MachineLinuxInfo structure to destroy. */ { MachineLinuxInfo* ptr = (MachineLinuxInfo*) data; #ifdef EXTRA_DEBUG fprintf( stderr, "# deleteLinuxMachineInfo(%p)\n", data ); #endif if ( ptr ) { deleteBasicMachine( ptr->basic ); free((void*) ptr); } } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/sol-kstat.txt����������������������������0000644�0001750�0001750�00002033056�11757531137�025526� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������audiocs:0:audiocs:class controller audiocs:0:audiocs:crtime 116.0824768 audiocs:0:audiocs:hard 0 audiocs:0:audiocs:multiple_service 0 audiocs:0:audiocs:snaptime 8464512.3536208 audiocs:0:audiocs:soft 0 audiocs:0:audiocs:spurious 0 audiocs:0:audiocs:watchdog 0 caps:0:lockedmem_project_0:class project_caps caps:0:lockedmem_project_0:crtime 69.262928 caps:0:lockedmem_project_0:snaptime 8464512.3543884 caps:0:lockedmem_project_0:usage 2490368 caps:0:lockedmem_project_0:value 18446744073709551615 caps:0:lockedmem_project_0:zonename global caps:0:lockedmem_project_3:class project_caps caps:0:lockedmem_project_3:crtime 7700910.4664434 caps:0:lockedmem_project_3:snaptime 8464512.3547684 caps:0:lockedmem_project_3:usage 0 caps:0:lockedmem_project_3:value 18446744073709551615 caps:0:lockedmem_project_3:zonename global caps:0:lockedmem_zone_0:class zone_caps caps:0:lockedmem_zone_0:crtime 69.2630424 caps:0:lockedmem_zone_0:snaptime 8464512.355134 caps:0:lockedmem_zone_0:usage 2490368 caps:0:lockedmem_zone_0:value 18446744073709551615 caps:0:lockedmem_zone_0:zonename global caps:0:swapresv_zone_0:class zone_caps caps:0:swapresv_zone_0:crtime 69.263055 caps:0:swapresv_zone_0:snaptime 8464512.3555954 caps:0:swapresv_zone_0:usage 79896576 caps:0:swapresv_zone_0:value 18446744073709551615 caps:0:swapresv_zone_0:zonename global conskbd:0:activity:class misc conskbd:0:activity:crtime 84.4719678 conskbd:0:activity:idle_sec 140742 conskbd:0:activity:snaptime 8464512.3560052 consms:0:activity:class misc consms:0:activity:crtime 84.4775058 consms:0:activity:idle_sec 140746 consms:0:activity:snaptime 8464512.356315 cpu:0:intrstat:class misc cpu:0:intrstat:crtime 82.766642 cpu:0:intrstat:level-1-count 1054632106 cpu:0:intrstat:level-1-time 6127438052090 cpu:0:intrstat:level-10-count 846444289 cpu:0:intrstat:level-10-time 6537902760843 cpu:0:intrstat:level-11-count 0 cpu:0:intrstat:level-11-time 0 cpu:0:intrstat:level-12-count 4 cpu:0:intrstat:level-12-time 166363 cpu:0:intrstat:level-13-count 311189 cpu:0:intrstat:level-13-time 218807019 cpu:0:intrstat:level-14-count 858448188 cpu:0:intrstat:level-14-time 4216362111494 cpu:0:intrstat:level-15-count 0 cpu:0:intrstat:level-15-time 0 cpu:0:intrstat:level-2-count 0 cpu:0:intrstat:level-2-time 0 cpu:0:intrstat:level-3-count 0 cpu:0:intrstat:level-3-time 0 cpu:0:intrstat:level-4-count 65089522 cpu:0:intrstat:level-4-time 556848559853 cpu:0:intrstat:level-5-count 11 cpu:0:intrstat:level-5-time 87571 cpu:0:intrstat:level-6-count 15 cpu:0:intrstat:level-6-time 472231 cpu:0:intrstat:level-7-count 0 cpu:0:intrstat:level-7-time 0 cpu:0:intrstat:level-8-count 0 cpu:0:intrstat:level-8-time 0 cpu:0:intrstat:level-9-count 2539589723 cpu:0:intrstat:level-9-time 4174845210930 cpu:0:intrstat:snaptime 8464512.3567392 cpu:0:sys:bawrite 829 cpu:0:sys:bread 830 cpu:0:sys:bwrite 5865 cpu:0:sys:canch 343 cpu:0:sys:class misc cpu:0:sys:cpu_load_intr 52 cpu:0:sys:cpu_nsec_idle 8412812345501659 cpu:0:sys:cpu_nsec_intr 3242042018128600 cpu:0:sys:cpu_nsec_kernel 35851385881471 cpu:0:sys:cpu_nsec_user 15785939451470 cpu:0:sys:cpu_ticks_idle 841281234 cpu:0:sys:cpu_ticks_kernel 3585138 cpu:0:sys:cpu_ticks_user 1578593 cpu:0:sys:cpu_ticks_wait 0 cpu:0:sys:cpumigrate 16737418 cpu:0:sys:crtime 82.7665792 cpu:0:sys:idlethread 271360429 cpu:0:sys:intr 5364515047 cpu:0:sys:intrblk 9412 cpu:0:sys:intrthread 4505755666 cpu:0:sys:inv_swtch 784638 cpu:0:sys:iowait 0 cpu:0:sys:lread 294769 cpu:0:sys:lwrite 29973 cpu:0:sys:mdmint 0 cpu:0:sys:modload 119 cpu:0:sys:modunload 0 cpu:0:sys:msg 0 cpu:0:sys:mutex_adenters 1063657 cpu:0:sys:namei 12640989 cpu:0:sys:nthreads 223821 cpu:0:sys:outch 2577554 cpu:0:sys:phread 175 cpu:0:sys:phwrite 0 cpu:0:sys:procovf 0 cpu:0:sys:pswitch 554310394 cpu:0:sys:rawch 10538 cpu:0:sys:rcvint 0 cpu:0:sys:readch 1430464179 cpu:0:sys:rw_rdfails 51 cpu:0:sys:rw_wrfails 124 cpu:0:sys:sema 0 cpu:0:sys:snaptime 8464512.3585422 cpu:0:sys:syscall 142365970 cpu:0:sys:sysexec 14286 cpu:0:sys:sysfork 89080 cpu:0:sys:sysread 15368158 cpu:0:sys:sysvfork 916 cpu:0:sys:syswrite 8209071 cpu:0:sys:trap 42200687 cpu:0:sys:ufsdirblk 22122 cpu:0:sys:ufsiget 17640 cpu:0:sys:ufsinopage 0 cpu:0:sys:ufsipage 0 cpu:0:sys:wait_ticks_io 0 cpu:0:sys:writech 736780774 cpu:0:sys:xcalls 63577549 cpu:0:sys:xmtint 0 cpu:0:vm:anonfree 0 cpu:0:vm:anonpgin 0 cpu:0:vm:anonpgout 0 cpu:0:vm:as_fault 26791736 cpu:0:vm:class misc cpu:0:vm:cow_fault 15493953 cpu:0:vm:crtime 82.7665992 cpu:0:vm:dfree 6678 cpu:0:vm:execfree 1 cpu:0:vm:execpgin 6526 cpu:0:vm:execpgout 1 cpu:0:vm:fsfree 6677 cpu:0:vm:fspgin 21632 cpu:0:vm:fspgout 8348 cpu:0:vm:hat_fault 0 cpu:0:vm:kernel_asflt 0 cpu:0:vm:maj_fault 12471 cpu:0:vm:pgfrec 569757 cpu:0:vm:pgin 13549 cpu:0:vm:pgout 5567 cpu:0:vm:pgpgin 28158 cpu:0:vm:pgpgout 8349 cpu:0:vm:pgrec 571428 cpu:0:vm:pgrrun 0 cpu:0:vm:pgswapin 0 cpu:0:vm:pgswapout 0 cpu:0:vm:prot_fault 14402980 cpu:0:vm:rev 0 cpu:0:vm:scan 0 cpu:0:vm:snaptime 8464512.3614326 cpu:0:vm:softlock 3480 cpu:0:vm:swapin 0 cpu:0:vm:swapout 0 cpu:0:vm:zfod 1049715 cpu:1:intrstat:class misc cpu:1:intrstat:crtime 85.1925 cpu:1:intrstat:level-1-count 858452491 cpu:1:intrstat:level-1-time 4588436286322 cpu:1:intrstat:level-10-count 3 cpu:1:intrstat:level-10-time 4927 cpu:1:intrstat:level-11-count 0 cpu:1:intrstat:level-11-time 0 cpu:1:intrstat:level-12-count 0 cpu:1:intrstat:level-12-time 0 cpu:1:intrstat:level-13-count 1369449 cpu:1:intrstat:level-13-time 920443404 cpu:1:intrstat:level-14-count 866910705 cpu:1:intrstat:level-14-time 1864107874414 cpu:1:intrstat:level-15-count 0 cpu:1:intrstat:level-15-time 0 cpu:1:intrstat:level-2-count 0 cpu:1:intrstat:level-2-time 0 cpu:1:intrstat:level-3-count 0 cpu:1:intrstat:level-3-time 0 cpu:1:intrstat:level-4-count 1823 cpu:1:intrstat:level-4-time 63267778 cpu:1:intrstat:level-5-count 0 cpu:1:intrstat:level-5-time 0 cpu:1:intrstat:level-6-count 51714075 cpu:1:intrstat:level-6-time 828236173900 cpu:1:intrstat:level-7-count 0 cpu:1:intrstat:level-7-time 0 cpu:1:intrstat:level-8-count 0 cpu:1:intrstat:level-8-time 0 cpu:1:intrstat:level-9-count 2539328135 cpu:1:intrstat:level-9-time 4608974912542 cpu:1:intrstat:snaptime 8464512.3631614 cpu:1:sys:bawrite 6045 cpu:1:sys:bread 788 cpu:1:sys:bwrite 18856 cpu:1:sys:canch 195 cpu:1:sys:class misc cpu:1:sys:cpu_load_intr 55 cpu:1:sys:cpu_nsec_idle 8398320186364915 cpu:1:sys:cpu_nsec_intr 1783610242822200 cpu:1:sys:cpu_nsec_kernel 55211362247994 cpu:1:sys:cpu_nsec_user 10895625900891 cpu:1:sys:cpu_ticks_idle 839832018 cpu:1:sys:cpu_ticks_kernel 5521136 cpu:1:sys:cpu_ticks_user 1089562 cpu:1:sys:cpu_ticks_wait 0 cpu:1:sys:cpumigrate 16614511 cpu:1:sys:crtime 85.192457 cpu:1:sys:idlethread 303851394 cpu:1:sys:intr 4317776681 cpu:1:sys:intrblk 548 cpu:1:sys:intrthread 3449496527 cpu:1:sys:inv_swtch 961705 cpu:1:sys:iowait 0 cpu:1:sys:lread 457212 cpu:1:sys:lwrite 39350 cpu:1:sys:mdmint 0 cpu:1:sys:modload 57 cpu:1:sys:modunload 0 cpu:1:sys:msg 0 cpu:1:sys:mutex_adenters 874201 cpu:1:sys:namei 16115653 cpu:1:sys:nthreads 349857 cpu:1:sys:outch 1563962 cpu:1:sys:phread 104 cpu:1:sys:phwrite 0 cpu:1:sys:procovf 0 cpu:1:sys:pswitch 625354454 cpu:1:sys:rawch 13789 cpu:1:sys:rcvint 0 cpu:1:sys:readch 1034305088 cpu:1:sys:rw_rdfails 58 cpu:1:sys:rw_wrfails 90 cpu:1:sys:sema 0 cpu:1:sys:snaptime 8464512.3647716 cpu:1:sys:syscall 222830456 cpu:1:sys:sysexec 13152 cpu:1:sys:sysfork 124146 cpu:1:sys:sysread 12537231 cpu:1:sys:sysvfork 784 cpu:1:sys:syswrite 11551411 cpu:1:sys:trap 19590968 cpu:1:sys:ufsdirblk 26374 cpu:1:sys:ufsiget 20822 cpu:1:sys:ufsinopage 0 cpu:1:sys:ufsipage 0 cpu:1:sys:wait_ticks_io 0 cpu:1:sys:writech 872522029 cpu:1:sys:xcalls 69038456 cpu:1:sys:xmtint 0 cpu:1:vm:anonfree 0 cpu:1:vm:anonpgin 0 cpu:1:vm:anonpgout 0 cpu:1:vm:as_fault 12642751 cpu:1:vm:class misc cpu:1:vm:cow_fault 6104716 cpu:1:vm:crtime 85.1924764 cpu:1:vm:dfree 0 cpu:1:vm:execfree 0 cpu:1:vm:execpgin 5878 cpu:1:vm:execpgout 0 cpu:1:vm:fsfree 0 cpu:1:vm:fspgin 12742 cpu:1:vm:fspgout 0 cpu:1:vm:hat_fault 0 cpu:1:vm:kernel_asflt 0 cpu:1:vm:maj_fault 10565 cpu:1:vm:pgfrec 477448 cpu:1:vm:pgin 11476 cpu:1:vm:pgout 0 cpu:1:vm:pgpgin 18620 cpu:1:vm:pgpgout 0 cpu:1:vm:pgrec 477448 cpu:1:vm:pgrrun 4 cpu:1:vm:pgswapin 0 cpu:1:vm:pgswapout 0 cpu:1:vm:prot_fault 6174305 cpu:1:vm:rev 0 cpu:1:vm:scan 55292 cpu:1:vm:snaptime 8464512.3675202 cpu:1:vm:softlock 1955 cpu:1:vm:swapin 0 cpu:1:vm:swapout 0 cpu:1:vm:zfod 795469 cpu_info:0:cpu_info0:brand UltraSPARC-III cpu_info:0:cpu_info0:chip_id 0 cpu_info:0:cpu_info0:class misc cpu_info:0:cpu_info0:clock_MHz 750 cpu_info:0:cpu_info0:core_id 0 cpu_info:0:cpu_info0:cpu_fru hc:///component=Slot 0 cpu_info:0:cpu_info0:cpu_type sparcv9 cpu_info:0:cpu_info0:crtime 82.766564 cpu_info:0:cpu_info0:device_ID 1119577710918 cpu_info:0:cpu_info0:fpu_type sparcv9 cpu_info:0:cpu_info0:implementation UltraSPARC-III (portid 0 impl 0x14 ver 0x34 clock 750 MHz) cpu_info:0:cpu_info0:snaptime 8464512.36932 cpu_info:0:cpu_info0:state on-line cpu_info:0:cpu_info0:state_begin 1211478887 cpu_info:1:cpu_info1:brand UltraSPARC-III cpu_info:1:cpu_info1:chip_id 1 cpu_info:1:cpu_info1:class misc cpu_info:1:cpu_info1:clock_MHz 750 cpu_info:1:cpu_info1:core_id 1 cpu_info:1:cpu_info1:cpu_fru hc:///component=Slot 1 cpu_info:1:cpu_info1:cpu_type sparcv9 cpu_info:1:cpu_info1:crtime 85.1902356 cpu_info:1:cpu_info1:device_ID 1121495638242 cpu_info:1:cpu_info1:fpu_type sparcv9 cpu_info:1:cpu_info1:implementation UltraSPARC-III (portid 1 impl 0x14 ver 0x34 clock 750 MHz) cpu_info:1:cpu_info1:snaptime 8464512.370142 cpu_info:1:cpu_info1:state on-line cpu_info:1:cpu_info1:state_begin 1211478889 cpu_stat:0:cpu_stat0:anonfree 0 cpu_stat:0:cpu_stat0:anonpgin 0 cpu_stat:0:cpu_stat0:anonpgout 0 cpu_stat:0:cpu_stat0:as_fault 26791736 cpu_stat:0:cpu_stat0:bawrite 829 cpu_stat:0:cpu_stat0:bread 830 cpu_stat:0:cpu_stat0:bwrite 5865 cpu_stat:0:cpu_stat0:canch 343 cpu_stat:0:cpu_stat0:class misc cpu_stat:0:cpu_stat0:cow_fault 15493953 cpu_stat:0:cpu_stat0:cpumigrate 16737419 cpu_stat:0:cpu_stat0:crtime 82.7666318 cpu_stat:0:cpu_stat0:dfree 6678 cpu_stat:0:cpu_stat0:execfree 1 cpu_stat:0:cpu_stat0:execpgin 6526 cpu_stat:0:cpu_stat0:execpgout 1 cpu_stat:0:cpu_stat0:fileovf 0 cpu_stat:0:cpu_stat0:fsfree 6677 cpu_stat:0:cpu_stat0:fspgin 21632 cpu_stat:0:cpu_stat0:fspgout 8348 cpu_stat:0:cpu_stat0:hat_fault 0 cpu_stat:0:cpu_stat0:idle 841281235 cpu_stat:0:cpu_stat0:idlethread 271360430 cpu_stat:0:cpu_stat0:inodeovf 0 cpu_stat:0:cpu_stat0:intr 1069547763 cpu_stat:0:cpu_stat0:intrblk 9412 cpu_stat:0:cpu_stat0:intrthread 210788380 cpu_stat:0:cpu_stat0:inv_swtch 784638 cpu_stat:0:cpu_stat0:iowait 0 cpu_stat:0:cpu_stat0:kernel 3585138 cpu_stat:0:cpu_stat0:kernel_asflt 0 cpu_stat:0:cpu_stat0:lread 294769 cpu_stat:0:cpu_stat0:lwrite 29973 cpu_stat:0:cpu_stat0:maj_fault 12471 cpu_stat:0:cpu_stat0:mdmint 0 cpu_stat:0:cpu_stat0:modload 119 cpu_stat:0:cpu_stat0:modunload 0 cpu_stat:0:cpu_stat0:msg 0 cpu_stat:0:cpu_stat0:mutex_adenters 1063657 cpu_stat:0:cpu_stat0:namei 12640989 cpu_stat:0:cpu_stat0:nthreads 223821 cpu_stat:0:cpu_stat0:outch 2577554 cpu_stat:0:cpu_stat0:pgfrec 569757 cpu_stat:0:cpu_stat0:pgin 13549 cpu_stat:0:cpu_stat0:pgout 5567 cpu_stat:0:cpu_stat0:pgpgin 28158 cpu_stat:0:cpu_stat0:pgpgout 8349 cpu_stat:0:cpu_stat0:pgrec 571428 cpu_stat:0:cpu_stat0:pgrrun 0 cpu_stat:0:cpu_stat0:pgswapin 0 cpu_stat:0:cpu_stat0:pgswapout 0 cpu_stat:0:cpu_stat0:phread 175 cpu_stat:0:cpu_stat0:phwrite 0 cpu_stat:0:cpu_stat0:physio 0 cpu_stat:0:cpu_stat0:procovf 0 cpu_stat:0:cpu_stat0:prot_fault 14402980 cpu_stat:0:cpu_stat0:pswitch 554310396 cpu_stat:0:cpu_stat0:rawch 10538 cpu_stat:0:cpu_stat0:rcvint 0 cpu_stat:0:cpu_stat0:readch 1430464179 cpu_stat:0:cpu_stat0:rev 0 cpu_stat:0:cpu_stat0:rw_rdfails 51 cpu_stat:0:cpu_stat0:rw_wrfails 124 cpu_stat:0:cpu_stat0:scan 0 cpu_stat:0:cpu_stat0:sema 0 cpu_stat:0:cpu_stat0:snaptime 8464512.3709422 cpu_stat:0:cpu_stat0:softlock 3480 cpu_stat:0:cpu_stat0:swap 0 cpu_stat:0:cpu_stat0:swapin 0 cpu_stat:0:cpu_stat0:swapout 0 cpu_stat:0:cpu_stat0:syscall 142365970 cpu_stat:0:cpu_stat0:sysexec 14286 cpu_stat:0:cpu_stat0:sysfork 89080 cpu_stat:0:cpu_stat0:sysread 15368158 cpu_stat:0:cpu_stat0:sysvfork 916 cpu_stat:0:cpu_stat0:syswrite 8209071 cpu_stat:0:cpu_stat0:trap 42200687 cpu_stat:0:cpu_stat0:ufsdirblk 22122 cpu_stat:0:cpu_stat0:ufsiget 17640 cpu_stat:0:cpu_stat0:ufsinopage 0 cpu_stat:0:cpu_stat0:ufsipage 0 cpu_stat:0:cpu_stat0:user 1578593 cpu_stat:0:cpu_stat0:wait 0 cpu_stat:0:cpu_stat0:wait_io 0 cpu_stat:0:cpu_stat0:wait_pio 0 cpu_stat:0:cpu_stat0:wait_swap 0 cpu_stat:0:cpu_stat0:writech 736780774 cpu_stat:0:cpu_stat0:xcalls 63577549 cpu_stat:0:cpu_stat0:xmtint 0 cpu_stat:0:cpu_stat0:zfod 1049715 cpu_stat:1:cpu_stat1:anonfree 0 cpu_stat:1:cpu_stat1:anonpgin 0 cpu_stat:1:cpu_stat1:anonpgout 0 cpu_stat:1:cpu_stat1:as_fault 12642752 cpu_stat:1:cpu_stat1:bawrite 6045 cpu_stat:1:cpu_stat1:bread 788 cpu_stat:1:cpu_stat1:bwrite 18856 cpu_stat:1:cpu_stat1:canch 195 cpu_stat:1:cpu_stat1:class misc cpu_stat:1:cpu_stat1:cow_fault 6104716 cpu_stat:1:cpu_stat1:cpumigrate 16614511 cpu_stat:1:cpu_stat1:crtime 85.1924908 cpu_stat:1:cpu_stat1:dfree 0 cpu_stat:1:cpu_stat1:execfree 0 cpu_stat:1:cpu_stat1:execpgin 5878 cpu_stat:1:cpu_stat1:execpgout 0 cpu_stat:1:cpu_stat1:fileovf 0 cpu_stat:1:cpu_stat1:fsfree 0 cpu_stat:1:cpu_stat1:fspgin 12742 cpu_stat:1:cpu_stat1:fspgout 0 cpu_stat:1:cpu_stat1:hat_fault 0 cpu_stat:1:cpu_stat1:idle 839832018 cpu_stat:1:cpu_stat1:idlethread 303851394 cpu_stat:1:cpu_stat1:inodeovf 0 cpu_stat:1:cpu_stat1:intr 22809390 cpu_stat:1:cpu_stat1:intrblk 548 cpu_stat:1:cpu_stat1:intrthread 3449496531 cpu_stat:1:cpu_stat1:inv_swtch 961705 cpu_stat:1:cpu_stat1:iowait 0 cpu_stat:1:cpu_stat1:kernel 5521136 cpu_stat:1:cpu_stat1:kernel_asflt 0 cpu_stat:1:cpu_stat1:lread 457212 cpu_stat:1:cpu_stat1:lwrite 39350 cpu_stat:1:cpu_stat1:maj_fault 10565 cpu_stat:1:cpu_stat1:mdmint 0 cpu_stat:1:cpu_stat1:modload 57 cpu_stat:1:cpu_stat1:modunload 0 cpu_stat:1:cpu_stat1:msg 0 cpu_stat:1:cpu_stat1:mutex_adenters 874201 cpu_stat:1:cpu_stat1:namei 16115653 cpu_stat:1:cpu_stat1:nthreads 349857 cpu_stat:1:cpu_stat1:outch 1563962 cpu_stat:1:cpu_stat1:pgfrec 477448 cpu_stat:1:cpu_stat1:pgin 11476 cpu_stat:1:cpu_stat1:pgout 0 cpu_stat:1:cpu_stat1:pgpgin 18620 cpu_stat:1:cpu_stat1:pgpgout 0 cpu_stat:1:cpu_stat1:pgrec 477448 cpu_stat:1:cpu_stat1:pgrrun 4 cpu_stat:1:cpu_stat1:pgswapin 0 cpu_stat:1:cpu_stat1:pgswapout 0 cpu_stat:1:cpu_stat1:phread 104 cpu_stat:1:cpu_stat1:phwrite 0 cpu_stat:1:cpu_stat1:physio 0 cpu_stat:1:cpu_stat1:procovf 0 cpu_stat:1:cpu_stat1:prot_fault 6174305 cpu_stat:1:cpu_stat1:pswitch 625354454 cpu_stat:1:cpu_stat1:rawch 13789 cpu_stat:1:cpu_stat1:rcvint 0 cpu_stat:1:cpu_stat1:readch 1034305088 cpu_stat:1:cpu_stat1:rev 0 cpu_stat:1:cpu_stat1:rw_rdfails 58 cpu_stat:1:cpu_stat1:rw_wrfails 90 cpu_stat:1:cpu_stat1:scan 55292 cpu_stat:1:cpu_stat1:sema 0 cpu_stat:1:cpu_stat1:snaptime 8464512.3751486 cpu_stat:1:cpu_stat1:softlock 1955 cpu_stat:1:cpu_stat1:swap 0 cpu_stat:1:cpu_stat1:swapin 0 cpu_stat:1:cpu_stat1:swapout 0 cpu_stat:1:cpu_stat1:syscall 222830466 cpu_stat:1:cpu_stat1:sysexec 13152 cpu_stat:1:cpu_stat1:sysfork 124146 cpu_stat:1:cpu_stat1:sysread 12537231 cpu_stat:1:cpu_stat1:sysvfork 784 cpu_stat:1:cpu_stat1:syswrite 11551412 cpu_stat:1:cpu_stat1:trap 19590970 cpu_stat:1:cpu_stat1:ufsdirblk 26374 cpu_stat:1:cpu_stat1:ufsiget 20822 cpu_stat:1:cpu_stat1:ufsinopage 0 cpu_stat:1:cpu_stat1:ufsipage 0 cpu_stat:1:cpu_stat1:user 1089563 cpu_stat:1:cpu_stat1:wait 0 cpu_stat:1:cpu_stat1:wait_io 0 cpu_stat:1:cpu_stat1:wait_pio 0 cpu_stat:1:cpu_stat1:wait_swap 0 cpu_stat:1:cpu_stat1:writech 872530221 cpu_stat:1:cpu_stat1:xcalls 69038456 cpu_stat:1:cpu_stat1:xmtint 0 cpu_stat:1:cpu_stat1:zfod 795470 dls:0:dls_stat:class net dls:0:dls_stat:crtime 84.0830104 dls:0:dls_stat:snaptime 8464512.3792934 dls:0:dls_stat:soft_ring_pkt_drop 0 ecpp:0:ecpp0:backchan 0 ecpp:0:ecpp0:class misc ecpp:0:ecpp0:crtime 7784068.8028824 ecpp:0:ecpp0:ctx_cf 0 ecpp:0:ecpp0:ctx_obytes 0 ecpp:0:ecpp0:ctxpio_obytes 0 ecpp:0:ecpp0:diag_obytes 0 ecpp:0:ecpp0:ecp_ibytes 0 ecpp:0:ecpp0:ecp_obytes 0 ecpp:0:ecpp0:epp_ibytes 0 ecpp:0:ecpp0:epp_obytes 0 ecpp:0:ecpp0:iomode 0 ecpp:0:ecpp0:isr_reattempt_high 0 ecpp:0:ecpp0:joblen 0 ecpp:0:ecpp0:mode 0 ecpp:0:ecpp0:nib_ibytes 0 ecpp:0:ecpp0:phase 0 ecpp:0:ecpp0:snaptime 8464512.3796306 ecpp:0:ecpp0:state 0 ecpp:0:ecpp0:to_ctx 0 ecpp:0:ecpp0:to_diag 0 ecpp:0:ecpp0:to_ecp 0 ecpp:0:ecpp0:to_epp 0 ecpp:0:ecpp0:to_nib 0 ecpp:0:ecpp0:xfer_tout 0 ecpp:0:ecppc0:class controller ecpp:0:ecppc0:crtime 7784068.8028486 ecpp:0:ecppc0:hard 0 ecpp:0:ecppc0:multiple_service 0 ecpp:0:ecppc0:snaptime 8464512.3812542 ecpp:0:ecppc0:soft 0 ecpp:0:ecppc0:spurious 0 ecpp:0:ecppc0:watchdog 0 eri:0:eri0:allocbfail 0 eri:0:eri0:bad_pkts 0 eri:0:eri0:brdcstrcv 66275348 eri:0:eri0:brdcstxmt 39802 eri:0:eri0:class net eri:0:eri0:collisions 0 eri:0:eri0:crtime 93.5027306 eri:0:eri0:defer_timer_exp 0 eri:0:eri0:drop 0 eri:0:eri0:excessive_coll 0 eri:0:eri0:first_coll 0 eri:0:eri0:ierrors 0 eri:0:eri0:ifspeed 100000000 eri:0:eri0:inits 2 eri:0:eri0:ipackets 68361334 eri:0:eri0:ipackets64 68361334 eri:0:eri0:jabber 0 eri:0:eri0:late_coll 0 eri:0:eri0:link_duplex 2 eri:0:eri0:link_up 1 eri:0:eri0:multircv 24 eri:0:eri0:multixmt 0 eri:0:eri0:no_free_rx_desc 0 eri:0:eri0:no_tmds 0 eri:0:eri0:nocanput 0 eri:0:eri0:nocarrier 1 eri:0:eri0:norcvbuf 0 eri:0:eri0:noxmtbuf 0 eri:0:eri0:obytes 496670924 eri:0:eri0:obytes64 496670924 eri:0:eri0:oerrors 0 eri:0:eri0:opackets 2263824 eri:0:eri0:opackets64 2263824 eri:0:eri0:parity_error 0 eri:0:eri0:pause_off_cnt 0 eri:0:eri0:pause_on_cnt 0 eri:0:eri0:pause_rcv_cnt 0 eri:0:eri0:pause_time_cnt 0 eri:0:eri0:pci_data_parity_err 0 eri:0:eri0:pci_det_parity_err 0 eri:0:eri0:pci_error_interrupt 0 eri:0:eri0:pci_rcvd_master_abort 0 eri:0:eri0:pci_rcvd_target_abort 0 eri:0:eri0:pci_signal_system_err 0 eri:0:eri0:pci_signal_target_abort 0 eri:0:eri0:peak_attempt_cnt 0 eri:0:eri0:pmcap 4 eri:0:eri0:rbytes 730811550 eri:0:eri0:rbytes64 5025778846 eri:0:eri0:runt 0 eri:0:eri0:rx_align_err 0 eri:0:eri0:rx_code_viol_err 0 eri:0:eri0:rx_corr 0 eri:0:eri0:rx_crc_err 0 eri:0:eri0:rx_hang 0 eri:0:eri0:rx_inits 1 eri:0:eri0:rx_length_err 0 eri:0:eri0:rx_overflow 0 eri:0:eri0:rxtag_error 0 eri:0:eri0:snaptime 8464512.3818398 eri:0:eri0:toolong_pkts 0 eri:0:eri0:tx_hang 0 eri:0:eri0:tx_inits 1 eri:0:eri0:txmac_maxpkt_err 0 eri:0:eri0:txmac_urun 0 eri:0:eri0:unknown_fatal 0 fssnap:0:highwater:class misc fssnap:0:highwater:crtime 7784069.7194418 fssnap:0:highwater:highwater 0 fssnap:0:highwater:snaptime 8464512.3859856 icmp:0:rawip:class mib2 icmp:0:rawip:crtime 84.2123906 icmp:0:rawip:inCksumErrs 0 icmp:0:rawip:inDatagrams 0 icmp:0:rawip:inErrors 0 icmp:0:rawip:outDatagrams 0 icmp:0:rawip:outErrors 0 icmp:0:rawip:snaptime 8464512.3863138 ip:0:icmp:class mib2 ip:0:icmp:crtime 83.9778114 ip:0:icmp:inAddrMaskReps 0 ip:0:icmp:inAddrMasks 4 ip:0:icmp:inBadRedirects 0 ip:0:icmp:inChksumErrs 0 ip:0:icmp:inDestUnreachs 110 ip:0:icmp:inEchoReps 3 ip:0:icmp:inEchos 830 ip:0:icmp:inErrors 0 ip:0:icmp:inFragNeeded 0 ip:0:icmp:inMsgs 979 ip:0:icmp:inOverFlows 0 ip:0:icmp:inParmProbs 0 ip:0:icmp:inRedirects 0 ip:0:icmp:inSrcQuenchs 0 ip:0:icmp:inTimeExcds 8 ip:0:icmp:inTimestampReps 0 ip:0:icmp:inTimestamps 16 ip:0:icmp:inUnknowns 2 ip:0:icmp:outAddrMaskReps 4 ip:0:icmp:outAddrMasks 0 ip:0:icmp:outDestUnreachs 1125 ip:0:icmp:outDrops 2 ip:0:icmp:outEchoReps 830 ip:0:icmp:outEchos 0 ip:0:icmp:outErrors 0 ip:0:icmp:outFragNeeded 0 ip:0:icmp:outMsgs 1959 ip:0:icmp:outParmProbs 0 ip:0:icmp:outRedirects 0 ip:0:icmp:outSrcQuenchs 0 ip:0:icmp:outTimeExcds 0 ip:0:icmp:outTimestampReps 0 ip:0:icmp:outTimestamps 0 ip:0:icmp:snaptime 8464512.3869344 ip:0:ip:addrEntrySize 100 ip:0:ip:class mib2 ip:0:ip:crtime 83.9777826 ip:0:ip:defaultTTL 255 ip:0:ip:forwDatagrams 0 ip:0:ip:forwProhibits 0 ip:0:ip:forwarding 2 ip:0:ip:fragCreates 0 ip:0:ip:fragFails 0 ip:0:ip:fragOKs 0 ip:0:ip:inAddrErrors 0 ip:0:ip:inCksumErrs 0 ip:0:ip:inDelivers 2291017 ip:0:ip:inDiscards 7 ip:0:ip:inErrs 0 ip:0:ip:inHdrErrors 1 ip:0:ip:inIPv6 0 ip:0:ip:inReceives 10156614 ip:0:ip:inUnknownProtos 2 ip:0:ip:ipsecInFailed 0 ip:0:ip:ipsecInSucceeded 0 ip:0:ip:memberEntrySize 28 ip:0:ip:netToMediaEntrySize 120 ip:0:ip:noPorts 7856445 ip:0:ip:outDiscards 0 ip:0:ip:outIPv6 0 ip:0:ip:outNoRoutes 0 ip:0:ip:outRequests 2223694 ip:0:ip:outSwitchIPv6 0 ip:0:ip:rawipInOverflows 0 ip:0:ip:reasmDuplicates 0 ip:0:ip:reasmFails 0 ip:0:ip:reasmOKs 0 ip:0:ip:reasmPartDups 0 ip:0:ip:reasmReqds 0 ip:0:ip:reasmTimeout 60 ip:0:ip:routeEntrySize 156 ip:0:ip:routingDiscards 0 ip:0:ip:snaptime 8464512.3886896 ip:0:ip:udpInCksumErrs 0 ip:0:ip:udpInOverflows 170 ip:0:ip6stat:class net ip:0:ip6stat:crtime 83.9778502 ip:0:ip6stat:ip6_frag_mdt_addpdescfail 0 ip:0:ip6stat:ip6_frag_mdt_allocd 0 ip:0:ip6stat:ip6_frag_mdt_allocfail 0 ip:0:ip6stat:ip6_frag_mdt_discarded 0 ip:0:ip6stat:ip6_frag_mdt_pkt_out 0 ip:0:ip6stat:ip6_in_sw_cksum 0 ip:0:ip6stat:ip6_out_sw_cksum 0 ip:0:ip6stat:ip6_tcp_in_full_hw_cksum_err 0 ip:0:ip6stat:ip6_tcp_in_part_hw_cksum_err 0 ip:0:ip6stat:ip6_tcp_in_sw_cksum_err 0 ip:0:ip6stat:ip6_tcp_out_sw_cksum_bytes 0 ip:0:ip6stat:ip6_udp_fanmb 0 ip:0:ip6stat:ip6_udp_fannorm 0 ip:0:ip6stat:ip6_udp_fast_path 0 ip:0:ip6stat:ip6_udp_in_full_hw_cksum_err 0 ip:0:ip6stat:ip6_udp_in_part_hw_cksum_err 0 ip:0:ip6stat:ip6_udp_in_sw_cksum_err 0 ip:0:ip6stat:ip6_udp_out_sw_cksum_bytes 0 ip:0:ip6stat:ip6_udp_slow_path 0 ip:0:ip6stat:snaptime 8464512.390953 ip:0:ipdrop:ah_bad_auth 0 ip:0:ipdrop:ah_bad_length 0 ip:0:ipdrop:ah_bad_v4_opts 0 ip:0:ipdrop:ah_bad_v6_hdrs 0 ip:0:ipdrop:ah_bytes_expire 0 ip:0:ipdrop:ah_crypto_failed 0 ip:0:ipdrop:ah_early_replay 0 ip:0:ipdrop:ah_no_sa 0 ip:0:ipdrop:ah_nomem 0 ip:0:ipdrop:ah_replay 0 ip:0:ipdrop:class net ip:0:ipdrop:crtime 83.9782216 ip:0:ipdrop:esp_bad_auth 0 ip:0:ipdrop:esp_bad_padding 0 ip:0:ipdrop:esp_bad_padlen 0 ip:0:ipdrop:esp_bytes_expire 0 ip:0:ipdrop:esp_crypto_failed 0 ip:0:ipdrop:esp_early_replay 0 ip:0:ipdrop:esp_icmp 0 ip:0:ipdrop:esp_no_sa 0 ip:0:ipdrop:esp_nomem 0 ip:0:ipdrop:esp_replay 0 ip:0:ipdrop:ip_ipsec_not_loaded 0 ip:0:ipdrop:sadb_acquire_nomem 0 ip:0:ipdrop:sadb_acquire_timeout 0 ip:0:ipdrop:sadb_acquire_toofull 0 ip:0:ipdrop:sadb_inlarval_replace 0 ip:0:ipdrop:sadb_inlarval_timeout 0 ip:0:ipdrop:snaptime 8464512.3920822 ip:0:ipdrop:spd_ah_badid 0 ip:0:ipdrop:spd_ah_innermismatch 0 ip:0:ipdrop:spd_ahesp_diffid 0 ip:0:ipdrop:spd_bad_ahalg 0 ip:0:ipdrop:spd_bad_espaalg 0 ip:0:ipdrop:spd_bad_espealg 0 ip:0:ipdrop:spd_bad_selfencap 0 ip:0:ipdrop:spd_esp_badid 0 ip:0:ipdrop:spd_esp_innermismatch 0 ip:0:ipdrop:spd_evil_frag 0 ip:0:ipdrop:spd_explicit 0 ip:0:ipdrop:spd_got_ah 0 ip:0:ipdrop:spd_got_clear 0 ip:0:ipdrop:spd_got_esp 0 ip:0:ipdrop:spd_got_secure 0 ip:0:ipdrop:spd_got_selfencap 0 ip:0:ipdrop:spd_loopback_mismatch 0 ip:0:ipdrop:spd_malformed_frag 0 ip:0:ipdrop:spd_malformed_packet 0 ip:0:ipdrop:spd_max_frags 0 ip:0:ipdrop:spd_no_policy 0 ip:0:ipdrop:spd_nomem 0 ip:0:ipdrop:spd_overlap_frag 0 ip:0:ipdrop:tcp_clear 0 ip:0:ipdrop:tcp_ipsec_alloc 0 ip:0:ipdrop:tcp_mismatch 0 ip:0:ipdrop:tcp_secure 0 ip:0:ipsec_stat:ah_stat_in_discards 0 ip:0:ipsec_stat:ah_stat_in_requests 0 ip:0:ipsec_stat:ah_stat_lookup_failure 0 ip:0:ipsec_stat:class net ip:0:ipsec_stat:crtime 83.9782688 ip:0:ipsec_stat:esp_stat_in_discards 0 ip:0:ipsec_stat:esp_stat_in_requests 0 ip:0:ipsec_stat:esp_stat_lookup_failure 0 ip:0:ipsec_stat:sadb_acquire_maxpackets 4 ip:0:ipsec_stat:sadb_acquire_qhiwater 0 ip:0:ipsec_stat:snaptime 8464512.3948456 ip:0:ipstat:class net ip:0:ipstat:crtime 83.9778354 ip:0:ipstat:ip_classify_fail 0 ip:0:ipstat:ip_conn_flputbq 0 ip:0:ipstat:ip_conn_walk_drain 0 ip:0:ipstat:ip_db_ref 0 ip:0:ipstat:ip_frag_mdt_addpdescfail 0 ip:0:ipstat:ip_frag_mdt_allocd 0 ip:0:ipstat:ip_frag_mdt_allocfail 0 ip:0:ipstat:ip_frag_mdt_discarded 0 ip:0:ipstat:ip_frag_mdt_pkt_out 0 ip:0:ipstat:ip_in_sw_cksum 8073240 ip:0:ipstat:ip_input_multi_squeue 0 ip:0:ipstat:ip_ipoptions 0 ip:0:ipstat:ip_ire_arp_timer_expired 7053 ip:0:ipstat:ip_ire_pmtu_timer_expired 14107 ip:0:ipstat:ip_ire_redirect_timer_expired 141072 ip:0:ipstat:ip_multimblk3 0 ip:0:ipstat:ip_multimblk4 0 ip:0:ipstat:ip_multipkttcp 0 ip:0:ipstat:ip_notaligned1 0 ip:0:ipstat:ip_notaligned2 0 ip:0:ipstat:ip_opt 0 ip:0:ipstat:ip_out_sw_cksum 0 ip:0:ipstat:ip_tcp_fast_path 0 ip:0:ipstat:ip_tcp_in_full_hw_cksum_err 0 ip:0:ipstat:ip_tcp_in_part_hw_cksum_err 0 ip:0:ipstat:ip_tcp_in_sw_cksum_err 0 ip:0:ipstat:ip_tcp_input_error 0 ip:0:ipstat:ip_tcp_out_sw_cksum_bytes 0 ip:0:ipstat:ip_tcp_slow_path 0 ip:0:ipstat:ip_tcpoptions 0 ip:0:ipstat:ip_tcppullup 0 ip:0:ipstat:ip_trash_ire_reclaim_calls 0 ip:0:ipstat:ip_trash_ire_reclaim_success 0 ip:0:ipstat:ip_udp_fanmb 217926 ip:0:ipstat:ip_udp_fannorm 34 ip:0:ipstat:ip_udp_fanothers 7856450 ip:0:ipstat:ip_udp_fast_path 247393 ip:0:ipstat:ip_udp_in_full_hw_cksum_err 0 ip:0:ipstat:ip_udp_in_part_hw_cksum_err 0 ip:0:ipstat:ip_udp_in_sw_cksum_err 0 ip:0:ipstat:ip_udp_input_err 0 ip:0:ipstat:ip_udp_out_sw_cksum_bytes 0 ip:0:ipstat:ip_udp_rput_local 0 ip:0:ipstat:ip_udp_slow_path 8074351 ip:0:ipstat:ipsec_fanout_proto 0 ip:0:ipstat:ipsec_proto_ahesp 0 ip:0:ipstat:snaptime 8464512.3954488 ipf:0:inbound:acct 0 ipf:0:inbound:bad frag state alloc 0 ipf:0:inbound:bad ip pkt 0 ipf:0:inbound:bad pkt state alloc 0 ipf:0:inbound:block 0 ipf:0:inbound:block, logged 0 ipf:0:inbound:cachehit 0 ipf:0:inbound:class net ipf:0:inbound:crtime 7784070.2008212 ipf:0:inbound:dropped:pps ceiling 0 ipf:0:inbound:ip upd. fail 0 ipf:0:inbound:ipv6 pkt 0 ipf:0:inbound:logged 0 ipf:0:inbound:new frag state compl. pkt 0 ipf:0:inbound:new frag state kept 0 ipf:0:inbound:new pkt kept state 0 ipf:0:inbound:nomatch 0 ipf:0:inbound:nomatch, logged 0 ipf:0:inbound:pass 0 ipf:0:inbound:pass, logged 0 ipf:0:inbound:pullup nok 0 ipf:0:inbound:pullup ok 0 ipf:0:inbound:return sent 0 ipf:0:inbound:short 0 ipf:0:inbound:skip 0 ipf:0:inbound:snaptime 8464512.3979976 ipf:0:inbound:src != route 0 ipf:0:inbound:tcp cksum bad 0 ipf:0:inbound:ttl invalid 0 ipf:0:outbound:acct 0 ipf:0:outbound:bad frag state alloc 0 ipf:0:outbound:bad ip pkt 0 ipf:0:outbound:bad pkt state alloc 0 ipf:0:outbound:block 0 ipf:0:outbound:block, logged 0 ipf:0:outbound:cachehit 0 ipf:0:outbound:class net ipf:0:outbound:crtime 7784070.2008548 ipf:0:outbound:dropped:pps ceiling 0 ipf:0:outbound:ip upd. fail 0 ipf:0:outbound:ipv6 pkt 0 ipf:0:outbound:logged 0 ipf:0:outbound:new frag state compl. pkt 0 ipf:0:outbound:new frag state kept 0 ipf:0:outbound:new pkt kept state 0 ipf:0:outbound:nomatch 0 ipf:0:outbound:nomatch, logged 0 ipf:0:outbound:pass 0 ipf:0:outbound:pass, logged 0 ipf:0:outbound:pullup nok 0 ipf:0:outbound:pullup ok 0 ipf:0:outbound:return sent 0 ipf:0:outbound:short 0 ipf:0:outbound:skip 0 ipf:0:outbound:snaptime 8464512.3993982 ipf:0:outbound:src != route 0 ipf:0:outbound:tcp cksum bad 0 ipf:0:outbound:ttl invalid 0 ipsecah:0:ah_stat:acquire_requests 0 ipsecah:0:ah_stat:bad_auth 0 ipsecah:0:ah_stat:bytes_expired 0 ipsecah:0:ah_stat:class net ipsecah:0:ah_stat:crtime 87.574641 ipsecah:0:ah_stat:crypto_async 0 ipsecah:0:ah_stat:crypto_failures 0 ipsecah:0:ah_stat:crypto_sync 0 ipsecah:0:ah_stat:good_auth 0 ipsecah:0:ah_stat:in_accelerated 0 ipsecah:0:ah_stat:keysock_in 0 ipsecah:0:ah_stat:noaccel 0 ipsecah:0:ah_stat:num_aalgs 0 ipsecah:0:ah_stat:out_accelerated 0 ipsecah:0:ah_stat:out_discards 0 ipsecah:0:ah_stat:out_requests 0 ipsecah:0:ah_stat:replay_early_failures 0 ipsecah:0:ah_stat:replay_failures 0 ipsecah:0:ah_stat:snaptime 8464512.400908 ipsecesp:0:esp_stat:acquire_requests 0 ipsecesp:0:esp_stat:bad_auth 0 ipsecesp:0:esp_stat:bad_decrypt 0 ipsecesp:0:esp_stat:bad_padding 0 ipsecesp:0:esp_stat:bytes_expired 0 ipsecesp:0:esp_stat:class net ipsecesp:0:esp_stat:crtime 87.5749822 ipsecesp:0:esp_stat:crypto_async 0 ipsecesp:0:esp_stat:crypto_failures 0 ipsecesp:0:esp_stat:crypto_sync 0 ipsecesp:0:esp_stat:good_auth 0 ipsecesp:0:esp_stat:in_accelerated 0 ipsecesp:0:esp_stat:keysock_in 0 ipsecesp:0:esp_stat:noaccel 0 ipsecesp:0:esp_stat:num_aalgs 0 ipsecesp:0:esp_stat:num_ealgs 0 ipsecesp:0:esp_stat:out_accelerated 0 ipsecesp:0:esp_stat:out_discards 0 ipsecesp:0:esp_stat:out_requests 0 ipsecesp:0:esp_stat:replay_early_failures 0 ipsecesp:0:esp_stat:replay_failures 0 ipsecesp:0:esp_stat:snaptime 8464512.4019838 iscsi:0:iscsi_hba_1:_alias iscsi:0:iscsi_hba_1:_cntr_sess 0 iscsi:0:iscsi_hba_1:_name iscsi:0:iscsi_hba_1:class iscsi_hba iscsi:0:iscsi_hba_1:crtime 85.6923334 iscsi:0:iscsi_hba_1:snaptime 8464512.4030928 kcf:0:framework_stats:class crypto kcf:0:framework_stats:crtime 83.91988 kcf:0:framework_stats:idle threads in pool 0 kcf:0:framework_stats:max requests in gswq 4194304 kcf:0:framework_stats:max threads in pool 2 kcf:0:framework_stats:maxalloc for HW taskq 2097152 kcf:0:framework_stats:min threads in pool 1 kcf:0:framework_stats:minalloc for HW taskq 64 kcf:0:framework_stats:requests in gswq 0 kcf:0:framework_stats:snaptime 8464512.4035194 kcf:0:framework_stats:threads for HW taskq 8 kcf:0:framework_stats:total threads in pool 0 kcf:0:md5_provider_stats:class crypto kcf:0:md5_provider_stats:crtime 83.9199636 kcf:0:md5_provider_stats:kcf_ops_failed 0 kcf:0:md5_provider_stats:kcf_ops_passed 0 kcf:0:md5_provider_stats:kcf_ops_returned_busy 0 kcf:0:md5_provider_stats:kcf_ops_total 0 kcf:0:md5_provider_stats:snaptime 8464512.4041706 kcf:0:sha1_provider_stats:class crypto kcf:0:sha1_provider_stats:crtime 83.9368628 kcf:0:sha1_provider_stats:kcf_ops_failed 0 kcf:0:sha1_provider_stats:kcf_ops_passed 0 kcf:0:sha1_provider_stats:kcf_ops_returned_busy 0 kcf:0:sha1_provider_stats:kcf_ops_total 0 kcf:0:sha1_provider_stats:snaptime 8464512.4045882 kcf:0:swrand_provider_stats:class crypto kcf:0:swrand_provider_stats:crtime 83.9200496 kcf:0:swrand_provider_stats:kcf_ops_failed 0 kcf:0:swrand_provider_stats:kcf_ops_passed 0 kcf:0:swrand_provider_stats:kcf_ops_returned_busy 0 kcf:0:swrand_provider_stats:kcf_ops_total 0 kcf:0:swrand_provider_stats:snaptime 8464512.4050004 kssl:0:kssl_stats:class crypto kssl:0:kssl_stats:crtime 86.6381102 kssl:0:kssl_stats:kssl_alloc_fails 0 kssl:0:kssl_stats:kssl_appdata_record_ins 0 kssl:0:kssl_stats:kssl_appdata_record_outs 0 kssl:0:kssl_stats:kssl_bad_pre_master_secret 0 kssl:0:kssl_stats:kssl_compute_mac_failure 0 kssl:0:kssl_stats:kssl_fallback_connections 0 kssl:0:kssl_stats:kssl_fatal_alerts 0 kssl:0:kssl_stats:kssl_full_handshakes 0 kssl:0:kssl_stats:kssl_internal_errors 0 kssl:0:kssl_stats:kssl_no_suite_found 0 kssl:0:kssl_stats:kssl_proxy_fallback_failed 0 kssl:0:kssl_stats:kssl_record_decrypt_failure 0 kssl:0:kssl_stats:kssl_resumed_sessions 0 kssl:0:kssl_stats:kssl_sid_cache_hits 0 kssl:0:kssl_stats:kssl_sid_cache_lookups 0 kssl:0:kssl_stats:kssl_sid_uncached 0 kssl:0:kssl_stats:kssl_verify_mac_failure 0 kssl:0:kssl_stats:kssl_warning_alerts 0 kssl:0:kssl_stats:snaptime 8464512.4054546 lgrp:0:lgrp0:alloc fail 100184 lgrp:0:lgrp0:class misc lgrp:0:lgrp0:cpus 2 lgrp:0:lgrp0:crtime 85.1891746 lgrp:0:lgrp0:default policy 0 lgrp:0:lgrp0:load average 32032 lgrp:0:lgrp0:lwp migrations 0 lgrp:0:lgrp0:next-touch policy 0 lgrp:0:lgrp0:pages avail 1022289 lgrp:0:lgrp0:pages failed to mark 0 lgrp:0:lgrp0:pages failed to migrate from 0 lgrp:0:lgrp0:pages failed to migrate to 0 lgrp:0:lgrp0:pages free 980894 lgrp:0:lgrp0:pages installed 1048576 lgrp:0:lgrp0:pages marked for migration 0 lgrp:0:lgrp0:pages migrated from 0 lgrp:0:lgrp0:pages migrated to 0 lgrp:0:lgrp0:random policy 0 lgrp:0:lgrp0:round robin policy 0 lgrp:0:lgrp0:snaptime 8464512.4065474 lgrp:0:lgrp0:span process policy 0 lgrp:0:lgrp0:span psrset policy 0 lo:0:lo0:class net lo:0:lo0:crtime 91.072893 lo:0:lo0:ipackets 186623 lo:0:lo0:opackets 186623 lo:0:lo0:snaptime 8464512.4077472 nfs:0:nfs4_callback_stats:callbacks 0 nfs:0:nfs4_callback_stats:cb_dispatch 0 nfs:0:nfs4_callback_stats:cb_getattr 0 nfs:0:nfs4_callback_stats:cb_illegal 0 nfs:0:nfs4_callback_stats:cb_null 0 nfs:0:nfs4_callback_stats:cb_recall 0 nfs:0:nfs4_callback_stats:claim_cur 0 nfs:0:nfs4_callback_stats:claim_cur_ok 0 nfs:0:nfs4_callback_stats:class misc nfs:0:nfs4_callback_stats:crtime 114.1534968 nfs:0:nfs4_callback_stats:deleg_recover 0 nfs:0:nfs4_callback_stats:delegaccept_r 0 nfs:0:nfs4_callback_stats:delegaccept_rw 0 nfs:0:nfs4_callback_stats:delegations 0 nfs:0:nfs4_callback_stats:delegreturn 0 nfs:0:nfs4_callback_stats:recall_failed 0 nfs:0:nfs4_callback_stats:recall_trunc 0 nfs:0:nfs4_callback_stats:return_limit_addmap 0 nfs:0:nfs4_callback_stats:return_limit_write 0 nfs:0:nfs4_callback_stats:snaptime 8464512.4082166 nfs:0:nfs_client:badcalls 2 nfs:0:nfs_client:calls 204510 nfs:0:nfs_client:class misc nfs:0:nfs_client:clgets 204509 nfs:0:nfs_client:cltoomany 12 nfs:0:nfs_client:crtime 114.146025 nfs:0:nfs_client:snaptime 8464512.4092248 nfs:0:nfs_server:badcalls 0 nfs:0:nfs_server:calls 0 nfs:0:nfs_server:class misc nfs:0:nfs_server:crtime 114.1535226 nfs:0:nfs_server:snaptime 8464512.4097896 nfs:0:rfsproccnt_v2:class misc nfs:0:rfsproccnt_v2:create 0 nfs:0:rfsproccnt_v2:crtime 114.1535514 nfs:0:rfsproccnt_v2:getattr 0 nfs:0:rfsproccnt_v2:link 0 nfs:0:rfsproccnt_v2:lookup 0 nfs:0:rfsproccnt_v2:mkdir 0 nfs:0:rfsproccnt_v2:null 0 nfs:0:rfsproccnt_v2:read 0 nfs:0:rfsproccnt_v2:readdir 0 nfs:0:rfsproccnt_v2:readlink 0 nfs:0:rfsproccnt_v2:remove 0 nfs:0:rfsproccnt_v2:rename 0 nfs:0:rfsproccnt_v2:rmdir 0 nfs:0:rfsproccnt_v2:root 0 nfs:0:rfsproccnt_v2:setattr 0 nfs:0:rfsproccnt_v2:snaptime 8464512.4102272 nfs:0:rfsproccnt_v2:statfs 0 nfs:0:rfsproccnt_v2:symlink 0 nfs:0:rfsproccnt_v2:wrcache 0 nfs:0:rfsproccnt_v2:write 0 nfs:0:rfsproccnt_v3:access 0 nfs:0:rfsproccnt_v3:class misc nfs:0:rfsproccnt_v3:commit 0 nfs:0:rfsproccnt_v3:create 0 nfs:0:rfsproccnt_v3:crtime 114.153618 nfs:0:rfsproccnt_v3:fsinfo 0 nfs:0:rfsproccnt_v3:fsstat 0 nfs:0:rfsproccnt_v3:getattr 0 nfs:0:rfsproccnt_v3:link 0 nfs:0:rfsproccnt_v3:lookup 0 nfs:0:rfsproccnt_v3:mkdir 0 nfs:0:rfsproccnt_v3:mknod 0 nfs:0:rfsproccnt_v3:null 0 nfs:0:rfsproccnt_v3:pathconf 0 nfs:0:rfsproccnt_v3:read 0 nfs:0:rfsproccnt_v3:readdir 0 nfs:0:rfsproccnt_v3:readdirplus 0 nfs:0:rfsproccnt_v3:readlink 0 nfs:0:rfsproccnt_v3:remove 0 nfs:0:rfsproccnt_v3:rename 0 nfs:0:rfsproccnt_v3:rmdir 0 nfs:0:rfsproccnt_v3:setattr 0 nfs:0:rfsproccnt_v3:snaptime 8464512.4112542 nfs:0:rfsproccnt_v3:symlink 0 nfs:0:rfsproccnt_v3:write 0 nfs:0:rfsproccnt_v4:access 0 nfs:0:rfsproccnt_v4:class misc nfs:0:rfsproccnt_v4:close 0 nfs:0:rfsproccnt_v4:commit 0 nfs:0:rfsproccnt_v4:compound 0 nfs:0:rfsproccnt_v4:create 0 nfs:0:rfsproccnt_v4:crtime 114.1537088 nfs:0:rfsproccnt_v4:delegpurge 0 nfs:0:rfsproccnt_v4:delegreturn 0 nfs:0:rfsproccnt_v4:getattr 0 nfs:0:rfsproccnt_v4:getfh 0 nfs:0:rfsproccnt_v4:illegal 0 nfs:0:rfsproccnt_v4:link 0 nfs:0:rfsproccnt_v4:lock 0 nfs:0:rfsproccnt_v4:lockt 0 nfs:0:rfsproccnt_v4:locku 0 nfs:0:rfsproccnt_v4:lookup 0 nfs:0:rfsproccnt_v4:lookupp 0 nfs:0:rfsproccnt_v4:null 0 nfs:0:rfsproccnt_v4:nverify 0 nfs:0:rfsproccnt_v4:open 0 nfs:0:rfsproccnt_v4:open_confirm 0 nfs:0:rfsproccnt_v4:open_downgrade 0 nfs:0:rfsproccnt_v4:openattr 0 nfs:0:rfsproccnt_v4:putfh 0 nfs:0:rfsproccnt_v4:putpubfh 0 nfs:0:rfsproccnt_v4:putrootfh 0 nfs:0:rfsproccnt_v4:read 0 nfs:0:rfsproccnt_v4:readdir 0 nfs:0:rfsproccnt_v4:readlink 0 nfs:0:rfsproccnt_v4:release_lockowner 0 nfs:0:rfsproccnt_v4:remove 0 nfs:0:rfsproccnt_v4:rename 0 nfs:0:rfsproccnt_v4:renew 0 nfs:0:rfsproccnt_v4:reserved 0 nfs:0:rfsproccnt_v4:restorefh 0 nfs:0:rfsproccnt_v4:savefh 0 nfs:0:rfsproccnt_v4:secinfo 0 nfs:0:rfsproccnt_v4:setattr 0 nfs:0:rfsproccnt_v4:setclientid 0 nfs:0:rfsproccnt_v4:setclientid_confirm 0 nfs:0:rfsproccnt_v4:snaptime 8464512.4124522 nfs:0:rfsproccnt_v4:verify 0 nfs:0:rfsproccnt_v4:write 0 nfs:0:rfsreqcnt_v2:class misc nfs:0:rfsreqcnt_v2:create 0 nfs:0:rfsreqcnt_v2:crtime 114.1535372 nfs:0:rfsreqcnt_v2:getattr 23 nfs:0:rfsreqcnt_v2:link 0 nfs:0:rfsreqcnt_v2:lookup 0 nfs:0:rfsreqcnt_v2:mkdir 0 nfs:0:rfsreqcnt_v2:null 0 nfs:0:rfsreqcnt_v2:read 0 nfs:0:rfsreqcnt_v2:readdir 0 nfs:0:rfsreqcnt_v2:readlink 0 nfs:0:rfsreqcnt_v2:remove 0 nfs:0:rfsreqcnt_v2:rename 0 nfs:0:rfsreqcnt_v2:rmdir 0 nfs:0:rfsreqcnt_v2:root 0 nfs:0:rfsreqcnt_v2:setattr 0 nfs:0:rfsreqcnt_v2:snaptime 8464512.4145768 nfs:0:rfsreqcnt_v2:statfs 2 nfs:0:rfsreqcnt_v2:symlink 0 nfs:0:rfsreqcnt_v2:wrcache 0 nfs:0:rfsreqcnt_v2:write 0 nfs:0:rfsreqcnt_v3:access 9284 nfs:0:rfsreqcnt_v3:class misc nfs:0:rfsreqcnt_v3:commit 1217 nfs:0:rfsreqcnt_v3:create 341 nfs:0:rfsreqcnt_v3:crtime 114.1535988 nfs:0:rfsreqcnt_v3:fsinfo 55 nfs:0:rfsreqcnt_v3:fsstat 60 nfs:0:rfsreqcnt_v3:getattr 156523 nfs:0:rfsreqcnt_v3:link 22 nfs:0:rfsreqcnt_v3:lookup 24982 nfs:0:rfsreqcnt_v3:mkdir 16 nfs:0:rfsreqcnt_v3:mknod 0 nfs:0:rfsreqcnt_v3:null 0 nfs:0:rfsreqcnt_v3:pathconf 0 nfs:0:rfsreqcnt_v3:read 5656 nfs:0:rfsreqcnt_v3:readdir 6 nfs:0:rfsreqcnt_v3:readdirplus 1993 nfs:0:rfsreqcnt_v3:readlink 24 nfs:0:rfsreqcnt_v3:remove 432 nfs:0:rfsreqcnt_v3:rename 144 nfs:0:rfsreqcnt_v3:rmdir 15 nfs:0:rfsreqcnt_v3:setattr 521 nfs:0:rfsreqcnt_v3:snaptime 8464512.4155734 nfs:0:rfsreqcnt_v3:symlink 335 nfs:0:rfsreqcnt_v3:write 2318 nfs:0:rfsreqcnt_v4:access 0 nfs:0:rfsreqcnt_v4:class misc nfs:0:rfsreqcnt_v4:close 0 nfs:0:rfsreqcnt_v4:commit 0 nfs:0:rfsreqcnt_v4:compound 180 nfs:0:rfsreqcnt_v4:create 0 nfs:0:rfsreqcnt_v4:crtime 114.1536856 nfs:0:rfsreqcnt_v4:delegpurge 0 nfs:0:rfsreqcnt_v4:delegreturn 0 nfs:0:rfsreqcnt_v4:getattr 0 nfs:0:rfsreqcnt_v4:getfh 0 nfs:0:rfsreqcnt_v4:link 0 nfs:0:rfsreqcnt_v4:lock 0 nfs:0:rfsreqcnt_v4:lockt 0 nfs:0:rfsreqcnt_v4:locku 0 nfs:0:rfsreqcnt_v4:lookup 0 nfs:0:rfsreqcnt_v4:lookupp 0 nfs:0:rfsreqcnt_v4:null 0 nfs:0:rfsreqcnt_v4:nverify 0 nfs:0:rfsreqcnt_v4:open 0 nfs:0:rfsreqcnt_v4:open_confirm 0 nfs:0:rfsreqcnt_v4:open_downgrade 0 nfs:0:rfsreqcnt_v4:openattr 0 nfs:0:rfsreqcnt_v4:putfh 0 nfs:0:rfsreqcnt_v4:putpubfh 0 nfs:0:rfsreqcnt_v4:putrootfh 180 nfs:0:rfsreqcnt_v4:read 0 nfs:0:rfsreqcnt_v4:readdir 0 nfs:0:rfsreqcnt_v4:readlink 0 nfs:0:rfsreqcnt_v4:remove 0 nfs:0:rfsreqcnt_v4:rename 0 nfs:0:rfsreqcnt_v4:renew 0 nfs:0:rfsreqcnt_v4:reserved 0 nfs:0:rfsreqcnt_v4:restorefh 0 nfs:0:rfsreqcnt_v4:savefh 0 nfs:0:rfsreqcnt_v4:secinfo 0 nfs:0:rfsreqcnt_v4:setattr 0 nfs:0:rfsreqcnt_v4:setclientid 0 nfs:0:rfsreqcnt_v4:setclientid_confirm 0 nfs:0:rfsreqcnt_v4:snaptime 8464512.416765 nfs:0:rfsreqcnt_v4:verify 0 nfs:0:rfsreqcnt_v4:write 0 nfs:1:mntinfo:class misc nfs:1:mntinfo:crtime 115.4030446 nfs:1:mntinfo:lookup_deviate 0 nfs:1:mntinfo:lookup_rtxcur 0 nfs:1:mntinfo:lookup_srtt 0 nfs:1:mntinfo:mik_acdirmax 60 nfs:1:mntinfo:mik_acdirmin 30 nfs:1:mntinfo:mik_acregmax 60 nfs:1:mntinfo:mik_acregmin 3 nfs:1:mntinfo:mik_curread 32768 nfs:1:mntinfo:mik_curserver nitro nfs:1:mntinfo:mik_curwrite 32768 nfs:1:mntinfo:mik_failover 0 nfs:1:mntinfo:mik_flags 535557 nfs:1:mntinfo:mik_noresponse 0 nfs:1:mntinfo:mik_proto tcp nfs:1:mntinfo:mik_remap 0 nfs:1:mntinfo:mik_retrans 5 nfs:1:mntinfo:mik_secmod 1 nfs:1:mntinfo:mik_timeo 600 nfs:1:mntinfo:mik_vers 3 nfs:1:mntinfo:read_deviate 0 nfs:1:mntinfo:read_rtxcur 0 nfs:1:mntinfo:read_srtt 0 nfs:1:mntinfo:snaptime 8464512.4187268 nfs:1:mntinfo:write_deviate 0 nfs:1:mntinfo:write_rtxcur 0 nfs:1:mntinfo:write_srtt 0 nfs:1:nfs1:class nfs nfs:1:nfs1:crtime 115.4030316 nfs:1:nfs1:nread 132930 nfs:1:nfs1:nwritten 0 nfs:1:nfs1:rcnt 0 nfs:1:nfs1:reads 21 nfs:1:nfs1:rlastupdate 8304470.7480992 nfs:1:nfs1:rlentime 0.0490654 nfs:1:nfs1:rtime 0.0490654 nfs:1:nfs1:snaptime 8464512.4202546 nfs:1:nfs1:wcnt 0 nfs:1:nfs1:wlastupdate 0 nfs:1:nfs1:wlentime 0 nfs:1:nfs1:writes 0 nfs:1:nfs1:wtime 0 nfs:2:mntinfo:class misc nfs:2:mntinfo:crtime 123.591357 nfs:2:mntinfo:lookup_deviate 2 nfs:2:mntinfo:lookup_rtxcur 1 nfs:2:mntinfo:lookup_srtt 2 nfs:2:mntinfo:mik_acdirmax 60 nfs:2:mntinfo:mik_acdirmin 30 nfs:2:mntinfo:mik_acregmax 60 nfs:2:mntinfo:mik_acregmin 3 nfs:2:mntinfo:mik_curread 8192 nfs:2:mntinfo:mik_curserver for volume management (/vol) nfs:2:mntinfo:mik_curwrite 512 nfs:2:mntinfo:mik_failover 0 nfs:2:mntinfo:mik_flags 85 nfs:2:mntinfo:mik_noresponse 0 nfs:2:mntinfo:mik_proto - nfs:2:mntinfo:mik_remap 0 nfs:2:mntinfo:mik_retrans 5 nfs:2:mntinfo:mik_secmod 21982 nfs:2:mntinfo:mik_timeo 350 nfs:2:mntinfo:mik_vers 2 nfs:2:mntinfo:read_deviate 0 nfs:2:mntinfo:read_rtxcur 0 nfs:2:mntinfo:read_srtt 0 nfs:2:mntinfo:snaptime 8464512.4210848 nfs:2:mntinfo:write_deviate 0 nfs:2:mntinfo:write_rtxcur 0 nfs:2:mntinfo:write_srtt 0 nfs:2:nfs2:class nfs nfs:2:nfs2:crtime 123.5913454 nfs:2:nfs2:nread 0 nfs:2:nfs2:nwritten 0 nfs:2:nfs2:rcnt 0 nfs:2:nfs2:reads 0 nfs:2:nfs2:rlastupdate 0 nfs:2:nfs2:rlentime 0 nfs:2:nfs2:rtime 0 nfs:2:nfs2:snaptime 8464512.4223944 nfs:2:nfs2:wcnt 0 nfs:2:nfs2:wlastupdate 0 nfs:2:nfs2:wlentime 0 nfs:2:nfs2:writes 0 nfs:2:nfs2:wtime 0 nfs:86:mntinfo:class misc nfs:86:mntinfo:crtime 8463822.253289 nfs:86:mntinfo:lookup_deviate 0 nfs:86:mntinfo:lookup_rtxcur 0 nfs:86:mntinfo:lookup_srtt 0 nfs:86:mntinfo:mik_acdirmax 60 nfs:86:mntinfo:mik_acdirmin 30 nfs:86:mntinfo:mik_acregmax 60 nfs:86:mntinfo:mik_acregmin 3 nfs:86:mntinfo:mik_curread 32768 nfs:86:mntinfo:mik_curserver nitro nfs:86:mntinfo:mik_curwrite 32768 nfs:86:mntinfo:mik_failover 0 nfs:86:mntinfo:mik_flags 535557 nfs:86:mntinfo:mik_noresponse 0 nfs:86:mntinfo:mik_proto tcp nfs:86:mntinfo:mik_remap 0 nfs:86:mntinfo:mik_retrans 5 nfs:86:mntinfo:mik_secmod 1 nfs:86:mntinfo:mik_timeo 600 nfs:86:mntinfo:mik_vers 3 nfs:86:mntinfo:read_deviate 0 nfs:86:mntinfo:read_rtxcur 0 nfs:86:mntinfo:read_srtt 0 nfs:86:mntinfo:snaptime 8464512.4231872 nfs:86:mntinfo:write_deviate 0 nfs:86:mntinfo:write_rtxcur 0 nfs:86:mntinfo:write_srtt 0 nfs:86:nfs86:class nfs nfs:86:nfs86:crtime 8463822.2532712 nfs:86:nfs86:nread 684853 nfs:86:nfs86:nwritten 493111 nfs:86:nfs86:rcnt 0 nfs:86:nfs86:reads 132 nfs:86:nfs86:rlastupdate 8464512.4133806 nfs:86:nfs86:rlentime 1.2154618 nfs:86:nfs86:rtime 1.2003328 nfs:86:nfs86:snaptime 8464512.4245794 nfs:86:nfs86:wcnt 0 nfs:86:nfs86:wlastupdate 8464512.4096498 nfs:86:nfs86:wlentime 0.002374 nfs:86:nfs86:writes 61 nfs:86:nfs86:wtime 0.002207 nfs_acl:0:aclproccnt_v2:access 0 nfs_acl:0:aclproccnt_v2:class misc nfs_acl:0:aclproccnt_v2:crtime 114.1535738 nfs_acl:0:aclproccnt_v2:getacl 0 nfs_acl:0:aclproccnt_v2:getattr 0 nfs_acl:0:aclproccnt_v2:getxattrdir 0 nfs_acl:0:aclproccnt_v2:null 0 nfs_acl:0:aclproccnt_v2:setacl 0 nfs_acl:0:aclproccnt_v2:snaptime 8464512.4254308 nfs_acl:0:aclproccnt_v3:class misc nfs_acl:0:aclproccnt_v3:crtime 114.1536648 nfs_acl:0:aclproccnt_v3:getacl 0 nfs_acl:0:aclproccnt_v3:getxattrdir 0 nfs_acl:0:aclproccnt_v3:null 0 nfs_acl:0:aclproccnt_v3:setacl 0 nfs_acl:0:aclproccnt_v3:snaptime 8464512.4259316 nfs_acl:0:aclproccnt_v4:class misc nfs_acl:0:aclproccnt_v4:crtime 114.1537372 nfs_acl:0:aclproccnt_v4:getacl 0 nfs_acl:0:aclproccnt_v4:null 0 nfs_acl:0:aclproccnt_v4:setacl 0 nfs_acl:0:aclproccnt_v4:snaptime 8464512.4263338 nfs_acl:0:aclreqcnt_v2:access 0 nfs_acl:0:aclreqcnt_v2:class misc nfs_acl:0:aclreqcnt_v2:crtime 114.1535622 nfs_acl:0:aclreqcnt_v2:getacl 0 nfs_acl:0:aclreqcnt_v2:getattr 1 nfs_acl:0:aclreqcnt_v2:getxattrdir 0 nfs_acl:0:aclreqcnt_v2:null 0 nfs_acl:0:aclreqcnt_v2:setacl 0 nfs_acl:0:aclreqcnt_v2:snaptime 8464512.426693 nfs_acl:0:aclreqcnt_v3:class misc nfs_acl:0:aclreqcnt_v3:crtime 114.1536334 nfs_acl:0:aclreqcnt_v3:getacl 539 nfs_acl:0:aclreqcnt_v3:getxattrdir 0 nfs_acl:0:aclreqcnt_v3:null 0 nfs_acl:0:aclreqcnt_v3:setacl 0 nfs_acl:0:aclreqcnt_v3:snaptime 8464512.4271826 nfs_acl:0:aclreqcnt_v4:class misc nfs_acl:0:aclreqcnt_v4:crtime 114.1537254 nfs_acl:0:aclreqcnt_v4:getacl 0 nfs_acl:0:aclreqcnt_v4:null 0 nfs_acl:0:aclreqcnt_v4:setacl 0 nfs_acl:0:aclreqcnt_v4:snaptime 8464512.4276562 pcis:0:counters:class bus pcis:0:counters:crtime 69.294229 pcis:0:counters:pcr 0 pcis:0:counters:pic0 4695035 pcis:0:counters:pic1 4695035 pcis:0:counters:snaptime 8464512.4281184 pcis:0:pic0:E*_slow_cycles_per_64 432 pcis:0:pic0:E*_slow_transitions 416 pcis:0:pic0:class bus pcis:0:pic0:clear_pic 18446744073709551119 pcis:0:pic0:crtime 69.2919594 pcis:0:pic0:dvma_const_rd 32 pcis:0:pic0:dvma_const_wr 48 pcis:0:pic0:dvma_cycles 80 pcis:0:pic0:dvma_rd_buf_timeout 336 pcis:0:pic0:dvma_rd_rtry_nonstc 384 pcis:0:pic0:dvma_rd_rtry_stc 352 pcis:0:pic0:dvma_stream_buf_mis 64 pcis:0:pic0:dvma_stream_rd 0 pcis:0:pic0:dvma_stream_wr 16 pcis:0:pic0:dvma_tlb_misses 256 pcis:0:pic0:dvma_wd_xfr 96 pcis:0:pic0:dvma_wr_rtry_nonstc 400 pcis:0:pic0:dvma_wr_rtry_stc 368 pcis:0:pic0:interrupts 272 pcis:0:pic0:pio_cycles 112 pcis:0:pic0:pio_reads 304 pcis:0:pic0:pio_writes 320 pcis:0:pic0:saf_inter_nack 288 pcis:0:pic0:snaptime 8464512.428494 pcis:0:pic1:E*_slow_cycles_per_64 55296 pcis:0:pic1:E*_slow_transitions 53248 pcis:0:pic1:class bus pcis:0:pic1:clear_pic 18446744073709488127 pcis:0:pic1:crtime 69.2919826 pcis:0:pic1:dvma_const_rd 4096 pcis:0:pic1:dvma_const_wr 6144 pcis:0:pic1:dvma_cycles 10240 pcis:0:pic1:dvma_rd_buf_timeout 43008 pcis:0:pic1:dvma_rd_rtry_nonstc 49152 pcis:0:pic1:dvma_rd_rtry_stc 45056 pcis:0:pic1:dvma_stream_buf_mis 8192 pcis:0:pic1:dvma_stream_rd 0 pcis:0:pic1:dvma_stream_wr 2048 pcis:0:pic1:dvma_tlb_misses 32768 pcis:0:pic1:dvma_wd_xfr 12288 pcis:0:pic1:dvma_wr_rtry_nonstc 51200 pcis:0:pic1:dvma_wr_rtry_stc 47104 pcis:0:pic1:interrupts 34816 pcis:0:pic1:pio_cycles 14336 pcis:0:pic1:pio_reads 38912 pcis:0:pic1:pio_writes 40960 pcis:0:pic1:saf_inter_nack 36864 pcis:0:pic1:snaptime 8464512.4296828 pcis:1:counters:class bus pcis:1:counters:crtime 69.2987662 pcis:1:counters:pcr 0 pcis:1:counters:pic0 4127259 pcis:1:counters:pic1 4127259 pcis:1:counters:snaptime 8464512.4309518 pcisch:0:fm:acc_err 0 pcisch:0:fm:class misc pcisch:0:fm:crtime 69.2940478 pcisch:0:fm:dma_err 0 pcisch:0:fm:erpt_dropped 0 pcisch:0:fm:fm_cache_full 0 pcisch:0:fm:fm_cache_grew 0 pcisch:0:fm:snaptime 8464512.4313746 pcisch:1:fm:acc_err 0 pcisch:1:fm:class misc pcisch:1:fm:crtime 69.2986256 pcisch:1:fm:dma_err 0 pcisch:1:fm:erpt_dropped 0 pcisch:1:fm:fm_cache_full 0 pcisch:1:fm:fm_cache_grew 0 pcisch:1:fm:snaptime 8464512.4318508 poll:0:pollstats:class misc poll:0:pollstats:crtime 65.9026986 poll:0:pollstats:pollcachehit 52194519 poll:0:pollstats:pollcachemiss 500257 poll:0:pollstats:pollcachephit 108291 poll:0:pollstats:polllistmiss 33383033 poll:0:pollstats:snaptime 8464512.4323422 portfs:0:Event Ports:class misc portfs:0:Event Ports:crtime 89.7146444 portfs:0:Event Ports:ports 1 portfs:0:Event Ports:snaptime 8464512.4328024 rds:0:rds_kstat:class misc rds:0:rds_kstat:crtime 87.7843326 rds:0:rds_kstat:rds_enobufs 0 rds:0:rds_kstat:rds_ewouldblocks 0 rds:0:rds_kstat:rds_failovers 0 rds:0:rds_kstat:rds_nports 0 rds:0:rds_kstat:rds_nsessions 0 rds:0:rds_kstat:rds_port_quota 0 rds:0:rds_kstat:rds_port_quota_adjusted 0 rds:0:rds_kstat:rds_post_recv_buf_called 0 rds:0:rds_kstat:rds_rx_bytes 0 rds:0:rds_kstat:rds_rx_errors 0 rds:0:rds_kstat:rds_rx_pkts 0 rds:0:rds_kstat:rds_rx_pkts_pending 0 rds:0:rds_kstat:rds_stalls_ignored 0 rds:0:rds_kstat:rds_stalls_recvd 0 rds:0:rds_kstat:rds_stalls_sent 0 rds:0:rds_kstat:rds_stalls_triggered 0 rds:0:rds_kstat:rds_tx_acks 0 rds:0:rds_kstat:rds_tx_bytes 0 rds:0:rds_kstat:rds_tx_errors 0 rds:0:rds_kstat:rds_tx_pkts 0 rds:0:rds_kstat:rds_unstalls_recvd 0 rds:0:rds_kstat:rds_unstalls_sent 0 rds:0:rds_kstat:rds_unstalls_triggered 0 rds:0:rds_kstat:snaptime 8464512.43312 rootnex:-1:fm:acc_err 0 rootnex:-1:fm:class misc rootnex:-1:fm:crtime 69.2663826 rootnex:-1:fm:dma_err 0 rootnex:-1:fm:erpt_dropped 0 rootnex:-1:fm:fm_cache_full 0 rootnex:-1:fm:fm_cache_grew 0 rootnex:-1:fm:snaptime 8464512.4345318 saf:0:counters:class bus saf:0:counters:crtime 69.2942134 saf:0:counters:pcr 0 saf:0:counters:pic0 0 saf:0:counters:pic1 0 saf:0:counters:snaptime 8464512.4350338 saf:0:pic0:class bus saf:0:pic0:clear_pic 18446744073709551119 saf:0:pic0:crtime 69.2920034 saf:0:pic0:csr_pios 192 saf:0:pic0:dvma_reads 288 saf:0:pic0:dvma_writes 304 saf:0:pic0:interrupts 160 saf:0:pic0:merge_buffer 144 saf:0:pic0:pcia_pios 224 saf:0:pic0:pcib_pios 240 saf:0:pic0:saf_bus_cycles 16 saf:0:pic0:saf_data_in_cycles 336 saf:0:pic0:saf_data_out_cycles 352 saf:0:pic0:saf_frn_coherent_cmds 48 saf:0:pic0:saf_frn_coherent_hits 64 saf:0:pic0:saf_frn_io_cmds 112 saf:0:pic0:saf_frn_io_hits 128 saf:0:pic0:saf_my_coherent_cmds 80 saf:0:pic0:saf_my_coherent_hits 96 saf:0:pic0:saf_orq_full_cycles 320 saf:0:pic0:saf_pause_asserted_cycles 32 saf:0:pic0:saf_pause_seen_cycles 272 saf:0:pic0:snaptime 8464512.4353984 saf:0:pic0:upa_pios 208 saf:0:pic1:class bus saf:0:pic1:clear_pic 18446744073709488127 saf:0:pic1:crtime 69.292025 saf:0:pic1:csr_pios 24576 saf:0:pic1:dvma_reads 36864 saf:0:pic1:dvma_writes 38912 saf:0:pic1:interrupts 20480 saf:0:pic1:merge_buffer 18432 saf:0:pic1:pcia_pios 28672 saf:0:pic1:pcib_pios 30720 saf:0:pic1:saf_bus_cycles 2048 saf:0:pic1:saf_data_in_cycles 43008 saf:0:pic1:saf_data_out_cycles 45056 saf:0:pic1:saf_frn_coherent_cmds 6144 saf:0:pic1:saf_frn_coherent_hits 8192 saf:0:pic1:saf_frn_io_cmds 14336 saf:0:pic1:saf_frn_io_hits 16384 saf:0:pic1:saf_my_coherent_cmds 10240 saf:0:pic1:saf_my_coherent_hits 12288 saf:0:pic1:saf_orq_full_cycles 40960 saf:0:pic1:saf_pause_asserted_cycles 4096 saf:0:pic1:saf_pause_seen_cycles 34816 saf:0:pic1:snaptime 8464512.4365852 saf:0:pic1:upa_pios 26624 sctp:0:sctp:class mib2 sctp:0:sctp:crtime 83.9812266 sctp:0:sctp:sctpAborted 0 sctp:0:sctp:sctpActiveEstab 0 sctp:0:sctp:sctpChecksumError 0 sctp:0:sctp:sctpCurrEstab 0 sctp:0:sctp:sctpFragUsrMsgs 0 sctp:0:sctp:sctpInAck 0 sctp:0:sctp:sctpInAckUnsent 0 sctp:0:sctp:sctpInClosed 0 sctp:0:sctp:sctpInCtrlChunks 0 sctp:0:sctp:sctpInDupAck 0 sctp:0:sctp:sctpInInvalidCookie 0 sctp:0:sctp:sctpInOrderChunks 0 sctp:0:sctp:sctpInSCTPPkts 0 sctp:0:sctp:sctpInUnorderChunks 0 sctp:0:sctp:sctpListenDrop 0 sctp:0:sctp:sctpMaxAssocs -1 sctp:0:sctp:sctpMaxInitRetr 8 sctp:0:sctp:sctpOutAck 0 sctp:0:sctp:sctpOutAckDelayed 0 sctp:0:sctp:sctpOutCtrlChunks 0 sctp:0:sctp:sctpOutFastRetrans 0 sctp:0:sctp:sctpOutOfBlue 0 sctp:0:sctp:sctpOutOrderChunks 0 sctp:0:sctp:sctpOutSCTPPkts 0 sctp:0:sctp:sctpOutUnorderChunks 0 sctp:0:sctp:sctpOutWinProbe 0 sctp:0:sctp:sctpOutWinUpdate 0 sctp:0:sctp:sctpPassiveEstab 0 sctp:0:sctp:sctpReasmUsrMsgs 0 sctp:0:sctp:sctpRetransChunks 0 sctp:0:sctp:sctpRtoAlgorithm 2 sctp:0:sctp:sctpRtoInitial 3000 sctp:0:sctp:sctpRtoMax 60000 sctp:0:sctp:sctpRtoMin 1000 sctp:0:sctp:sctpShutdowns 0 sctp:0:sctp:sctpTimHearBeatDrop 0 sctp:0:sctp:sctpTimHearBeatProbe 0 sctp:0:sctp:sctpTimRetrans 0 sctp:0:sctp:sctpTimRetransDrop 0 sctp:0:sctp:sctpValCookieLife 60000 sctp:0:sctp:snaptime 8464512.4378922 sd:6:sd6:class disk sd:6:sd6:crtime 123.3637194 sd:6:sd6:nread 42 sd:6:sd6:nwritten 0 sd:6:sd6:rcnt 0 sd:6:sd6:reads 4 sd:6:sd6:rlastupdate 0 sd:6:sd6:rlentime 0 sd:6:sd6:rtime 0 sd:6:sd6:snaptime 8464512.4402182 sd:6:sd6:wcnt 0 sd:6:sd6:wlastupdate 0 sd:6:sd6:wlentime 0 sd:6:sd6:writes 8 sd:6:sd6:wtime 0 sderr:6:sd6,err:Device Not Ready 0 sderr:6:sd6,err:Hard Errors 0 sderr:6:sd6,err:Illegal Request 2 sderr:6:sd6,err:Media Error 0 sderr:6:sd6,err:No Device 0 sderr:6:sd6,err:Predictive Failure Analysis 0 sderr:6:sd6,err:Product DVD-ROM SD-M1401Revision sderr:6:sd6,err:Recoverable 0 sderr:6:sd6,err:Revision 1007 sderr:6:sd6,err:Serial No sderr:6:sd6,err:Size 0 sderr:6:sd6,err:Soft Errors 2 sderr:6:sd6,err:Transport Errors 0 sderr:6:sd6,err:Vendor TOSHIBA sderr:6:sd6,err:class device_error sderr:6:sd6,err:crtime 123.36373 sderr:6:sd6,err:snaptime 8464512.4410308 ssd:0:ssd0:class disk ssd:0:ssd0:crtime 82.3763626 ssd:0:ssd0:nread 63417877 ssd:0:ssd0:nwritten 4856320 ssd:0:ssd0:rcnt 0 ssd:0:ssd0:reads 5693 ssd:0:ssd0:rlastupdate 8463952.4317522 ssd:0:ssd0:rlentime 117.6357416 ssd:0:ssd0:rtime 34.1992796 ssd:0:ssd0:snaptime 8464512.441965 ssd:0:ssd0:wcnt 0 ssd:0:ssd0:wlastupdate 8463952.421981 ssd:0:ssd0:wlentime 6.3680184 ssd:0:ssd0:writes 2704 ssd:0:ssd0:wtime 1.890274 ssd:0:ssd0,c:class partition ssd:0:ssd0,c:crtime 82.4533928 ssd:0:ssd0,c:nread 63389184 ssd:0:ssd0,c:nwritten 4856320 ssd:0:ssd0,c:rcnt 0 ssd:0:ssd0,c:reads 5580 ssd:0:ssd0,c:rlastupdate 8463952.4317524 ssd:0:ssd0,c:rlentime 117.6347482 ssd:0:ssd0,c:rtime 34.1983976 ssd:0:ssd0,c:snaptime 8464512.4427528 ssd:0:ssd0,c:wcnt 0 ssd:0:ssd0,c:wlastupdate 8463952.4219814 ssd:0:ssd0,c:wlentime 6.3684234 ssd:0:ssd0,c:writes 2671 ssd:0:ssd0,c:wtime 1.8906388 ssd:1:ssd1:class disk ssd:1:ssd1:crtime 82.2915014 ssd:1:ssd1:nread 230092957 ssd:1:ssd1:nwritten 323200512 ssd:1:ssd1:rcnt 0 ssd:1:ssd1:reads 16463 ssd:1:ssd1:rlastupdate 8464459.9045616 ssd:1:ssd1:rlentime 3413.8506906 ssd:1:ssd1:rtime 880.4000218 ssd:1:ssd1:snaptime 8464512.443572 ssd:1:ssd1:wcnt 0 ssd:1:ssd1:wlastupdate 8464459.9006248 ssd:1:ssd1:wlentime 191.5863476 ssd:1:ssd1:writes 128260 ssd:1:ssd1:wtime 53.4398284 ssd:1:ssd1,a:class partition ssd:1:ssd1,a:crtime 82.3697246 ssd:1:ssd1,a:nread 230067096 ssd:1:ssd1,a:nwritten 323159552 ssd:1:ssd1,a:rcnt 0 ssd:1:ssd1,a:reads 16448 ssd:1:ssd1,a:rlastupdate 8464459.9045618 ssd:1:ssd1,a:rlentime 3413.7452716 ssd:1:ssd1,a:rtime 880.359547 ssd:1:ssd1,a:snaptime 8464512.4443694 ssd:1:ssd1,a:wcnt 0 ssd:1:ssd1,a:wlastupdate 8464459.9006252 ssd:1:ssd1,a:wlentime 191.5895104 ssd:1:ssd1,a:writes 128250 ssd:1:ssd1,a:wtime 53.4416648 ssd:1:ssd1,b:class partition ssd:1:ssd1,b:crtime 82.3697356 ssd:1:ssd1,b:nread 24576 ssd:1:ssd1,b:nwritten 40960 ssd:1:ssd1,b:rcnt 0 ssd:1:ssd1,b:reads 3 ssd:1:ssd1,b:rlastupdate 124.8685288 ssd:1:ssd1,b:rlentime 0.0896426 ssd:1:ssd1,b:rtime 0.0896426 ssd:1:ssd1,b:snaptime 8464512.4451556 ssd:1:ssd1,b:wcnt 0 ssd:1:ssd1,b:wlastupdate 124.8459718 ssd:1:ssd1,b:wlentime 0.0001144 ssd:1:ssd1,b:writes 5 ssd:1:ssd1,b:wtime 0.0001144 ssd:1:ssd1,c:class partition ssd:1:ssd1,c:crtime 82.3697442 ssd:1:ssd1,c:nread 0 ssd:1:ssd1,c:nwritten 0 ssd:1:ssd1,c:rcnt 0 ssd:1:ssd1,c:reads 0 ssd:1:ssd1,c:rlastupdate 0 ssd:1:ssd1,c:rlentime 0 ssd:1:ssd1,c:rtime 0 ssd:1:ssd1,c:snaptime 8464512.4459332 ssd:1:ssd1,c:wcnt 0 ssd:1:ssd1,c:wlastupdate 0 ssd:1:ssd1,c:wlentime 0 ssd:1:ssd1,c:writes 0 ssd:1:ssd1,c:wtime 0 ssderr:0:ssd0,err:Device Not Ready 0 ssderr:0:ssd0,err:Hard Errors 0 ssderr:0:ssd0,err:Illegal Request 0 ssderr:0:ssd0,err:Media Error 0 ssderr:0:ssd0,err:No Device 0 ssderr:0:ssd0,err:Predictive Failure Analysis 0 ssderr:0:ssd0,err:Product ST336704FSUN36G Revision ssderr:0:ssd0,err:Recoverable 0 ssderr:0:ssd0,err:Revision 0726 ssderr:0:ssd0,err:Serial No 0119D20C6Y ssderr:0:ssd0,err:Size 36418595328 ssderr:0:ssd0,err:Soft Errors 0 ssderr:0:ssd0,err:Transport Errors 0 ssderr:0:ssd0,err:Vendor SEAGATE ssderr:0:ssd0,err:class device_error ssderr:0:ssd0,err:crtime 82.3763924 ssderr:0:ssd0,err:snaptime 8464512.4468418 ssderr:1:ssd1,err:Device Not Ready 0 ssderr:1:ssd1,err:Hard Errors 0 ssderr:1:ssd1,err:Illegal Request 0 ssderr:1:ssd1,err:Media Error 0 ssderr:1:ssd1,err:No Device 0 ssderr:1:ssd1,err:Predictive Failure Analysis 0 ssderr:1:ssd1,err:Product ST336704FSUN36G Revision ssderr:1:ssd1,err:Recoverable 0 ssderr:1:ssd1,err:Revision 0726 ssderr:1:ssd1,err:Serial No 0119D20CWB ssderr:1:ssd1,err:Size 36418595328 ssderr:1:ssd1,err:Soft Errors 0 ssderr:1:ssd1,err:Transport Errors 0 ssderr:1:ssd1,err:Vendor SEAGATE ssderr:1:ssd1,err:class device_error ssderr:1:ssd1,err:crtime 82.291514 ssderr:1:ssd1,err:snaptime 8464512.4477148 streams:0:strstat:bcwaits 0 streams:0:strstat:bufcalls 0 streams:0:strstat:class net streams:0:strstat:crtime 69.2823992 streams:0:strstat:freebs 0 streams:0:strstat:qhelps 0 streams:0:strstat:qremoved 0 streams:0:strstat:qwr_outer 1 streams:0:strstat:rservice 18844820 streams:0:strstat:snaptime 8464512.4486048 streams:0:strstat:sqenables 29 streams:0:strstat:sqremoved 0 streams:0:strstat:sqtoomany 0 streams:0:strstat:stenables 818274 streams:0:strstat:strwaits 29 streams:0:strstat:syncqservice 29 streams:0:strstat:taskqfails 99 tcp:0:tcp:activeOpens 1761 tcp:0:tcp:attemptFails 7 tcp:0:tcp:class mib2 tcp:0:tcp:connTableSize 56 tcp:0:tcp:connTableSize6 84 tcp:0:tcp:crtime 83.979241 tcp:0:tcp:currEstab 4 tcp:0:tcp:estabResets 78 tcp:0:tcp:halfOpenDrop 0 tcp:0:tcp:inAckBytes 317067157 tcp:0:tcp:inAckSegs 1029569 tcp:0:tcp:inAckUnsent 0 tcp:0:tcp:inClosed 19 tcp:0:tcp:inDataDupBytes 55131 tcp:0:tcp:inDataDupSegs 1966 tcp:0:tcp:inDataInorderBytes 213876245 tcp:0:tcp:inDataInorderSegs 1177754 tcp:0:tcp:inDataPartDupBytes 0 tcp:0:tcp:inDataPartDupSegs 0 tcp:0:tcp:inDataPastWinBytes 0 tcp:0:tcp:inDataPastWinSegs 0 tcp:0:tcp:inDataUnorderBytes 22707 tcp:0:tcp:inDataUnorderSegs 370 tcp:0:tcp:inDupAck 99254 tcp:0:tcp:inSegs 1736461 tcp:0:tcp:inWinProbe 0 tcp:0:tcp:inWinUpdate 0 tcp:0:tcp:listenDrop 0 tcp:0:tcp:listenDropQ0 0 tcp:0:tcp:maxConn -1 tcp:0:tcp:outAck 528013 tcp:0:tcp:outAckDelayed 208566 tcp:0:tcp:outControl 206648 tcp:0:tcp:outDataBytes 401627105 tcp:0:tcp:outDataSegs 1155369 tcp:0:tcp:outFastRetrans 0 tcp:0:tcp:outRsts 4560 tcp:0:tcp:outSackRetransSegs 0 tcp:0:tcp:outSegs 1492704 tcp:0:tcp:outUrg 28 tcp:0:tcp:outWinProbe 0 tcp:0:tcp:outWinUpdate 3 tcp:0:tcp:passiveOpens 97628 tcp:0:tcp:retransBytes 845886 tcp:0:tcp:retransSegs 4974 tcp:0:tcp:rtoAlgorithm 4 tcp:0:tcp:rtoMax 60000 tcp:0:tcp:rtoMin 400 tcp:0:tcp:rttNoUpdate 928820 tcp:0:tcp:rttUpdate 2295 tcp:0:tcp:snaptime 8464512.4497824 tcp:0:tcp:timKeepalive 2392 tcp:0:tcp:timKeepaliveDrop 1 tcp:0:tcp:timKeepaliveProbe 25 tcp:0:tcp:timRetrans 4481 tcp:0:tcp:timRetransDrop 172 tcp:0:tcpstat: tcp:0:tcpstat:class net tcp:0:tcpstat:crtime 83.9791546 tcp:0:tcpstat:snaptime 8464512.4526156 tcp:0:tcpstat:tcp_ack_timer_cnt 0 tcp:0:tcpstat:tcp_clean_death_nondetached 713 tcp:0:tcpstat:tcp_detach_non_time_wait 0 tcp:0:tcpstat:tcp_detach_time_wait 0 tcp:0:tcpstat:tcp_eager_blowoff_calls 0 tcp:0:tcpstat:tcp_eager_blowoff_q 0 tcp:0:tcpstat:tcp_eager_blowoff_q0 0 tcp:0:tcpstat:tcp_eager_err1 0 tcp:0:tcpstat:tcp_eager_err2 0 tcp:0:tcpstat:tcp_eager_has_listener1 0 tcp:0:tcpstat:tcp_eagerfree2 0 tcp:0:tcpstat:tcp_eagerfree3 0 tcp:0:tcpstat:tcp_eagerfree4 0 tcp:0:tcpstat:tcp_eagerfree5 0 tcp:0:tcpstat:tcp_flwctl_on 268 tcp:0:tcpstat:tcp_found_eager 0 tcp:0:tcpstat:tcp_found_eager_binding1 0 tcp:0:tcpstat:tcp_found_eager_bound1 0 tcp:0:tcpstat:tcp_fusion_aborted 0 tcp:0:tcpstat:tcp_fusion_backenabled 62 tcp:0:tcpstat:tcp_fusion_flowctl 62 tcp:0:tcpstat:tcp_fusion_putnext 116 tcp:0:tcpstat:tcp_fusion_rrw_busy 0 tcp:0:tcpstat:tcp_fusion_rrw_msgcnt 171656 tcp:0:tcpstat:tcp_fusion_rrw_plugged 0 tcp:0:tcpstat:tcp_fusion_unfusable 0 tcp:0:tcpstat:tcp_fusion_unqualified 0 tcp:0:tcpstat:tcp_fusion_urg 0 tcp:0:tcpstat:tcp_in_ack_unsent_drop 0 tcp:0:tcpstat:tcp_ip_ire_send 178 tcp:0:tcpstat:tcp_ip_output 0 tcp:0:tcpstat:tcp_ip_send 0 tcp:0:tcpstat:tcp_ire_null 1704 tcp:0:tcpstat:tcp_ire_null1 0 tcp:0:tcpstat:tcp_listen_badflags 0 tcp:0:tcpstat:tcp_listendrop 0 tcp:0:tcpstat:tcp_listendropq0 0 tcp:0:tcpstat:tcp_mdt_addpdescfail 0 tcp:0:tcpstat:tcp_mdt_allocd 0 tcp:0:tcpstat:tcp_mdt_allocfail 0 tcp:0:tcpstat:tcp_mdt_conn_halted1 0 tcp:0:tcpstat:tcp_mdt_conn_halted2 0 tcp:0:tcpstat:tcp_mdt_conn_halted3 0 tcp:0:tcpstat:tcp_mdt_conn_resumed1 0 tcp:0:tcpstat:tcp_mdt_conn_resumed2 0 tcp:0:tcpstat:tcp_mdt_discarded 0 tcp:0:tcpstat:tcp_mdt_legacy_all 0 tcp:0:tcpstat:tcp_mdt_legacy_ret 0 tcp:0:tcpstat:tcp_mdt_legacy_small 0 tcp:0:tcpstat:tcp_mdt_linked 0 tcp:0:tcpstat:tcp_mdt_pkt_out 0 tcp:0:tcpstat:tcp_mdt_pkt_out_v4 0 tcp:0:tcpstat:tcp_mdt_pkt_out_v6 0 tcp:0:tcpstat:tcp_no_listener 36273 tcp:0:tcpstat:tcp_not_hard_bound 0 tcp:0:tcpstat:tcp_open_alloc 0 tcp:0:tcpstat:tcp_open_detached_alloc 0 tcp:0:tcpstat:tcp_out_sw_cksum 0 tcp:0:tcpstat:tcp_out_sw_cksum_bytes 0 tcp:0:tcpstat:tcp_push_timer_cnt 0 tcp:0:tcpstat:tcp_reinit_calls 714 tcp:0:tcpstat:tcp_reinput_syn 0 tcp:0:tcpstat:tcp_rput_time_wait 0 tcp:0:tcpstat:tcp_rput_v6_error 0 tcp:0:tcpstat:tcp_rsrv_calls 1 tcp:0:tcpstat:tcp_sock_fallback 64 tcp:0:tcpstat:tcp_time_wait 0 tcp:0:tcpstat:tcp_time_wait_fail 0 tcp:0:tcpstat:tcp_time_wait_reap 0 tcp:0:tcpstat:tcp_time_wait_success 24 tcp:0:tcpstat:tcp_time_wait_syn 0 tcp:0:tcpstat:tcp_timeout_cached_alloc 0 tcp:0:tcpstat:tcp_timeout_calls 0 tcp:0:tcpstat:tcp_timeout_cancel_reqs 0 tcp:0:tcpstat:tcp_timeout_canceled 0 tcp:0:tcpstat:tcp_timer_fire_early 2 tcp:0:tcpstat:tcp_timer_fire_miss 115 tcp:0:tcpstat:tcp_timermp_freed 0 tcp:0:tcpstat:tcp_timewait_syn_fail 0 tcp:0:tcpstat:tcp_wrong_queue 0 tcp:0:tcpstat:tcp_wrong_rq 0 tcp:0:tcpstat:tcp_wsrv_called 0 tcp:0:tcpstat:tcp_zcopy_backoff 0 tcp:0:tcpstat:tcp_zcopy_disable 0 tcp:0:tcpstat:tcp_zcopy_off 0 tcp:0:tcpstat:tcp_zcopy_on 0 tcp:0:tcpstat_g:class net tcp:0:tcpstat_g:crtime 83.9785722 tcp:0:tcpstat_g:snaptime 8464512.4570328 tcp:0:tcpstat_g:tcp_freelist_cleanup 983 tcp:0:tcpstat_g:tcp_timermp_allocdblfail 0 tcp:0:tcpstat_g:tcp_timermp_alloced 0 tcp:0:tcpstat_g:tcp_timermp_allocfail 0 udp:0:udp:class mib2 udp:0:udp:crtime 84.1477758 udp:0:udp:entry6Size 64 udp:0:udp:entrySize 40 udp:0:udp:inDatagrams 465157 udp:0:udp:inErrors 0 udp:0:udp:outDatagrams 516955 udp:0:udp:outErrors 0 udp:0:udp:snaptime 8464512.4575122 udp:0:udpstat:class net udp:0:udpstat:crtime 84.1477622 udp:0:udpstat:snaptime 8464512.458018 udp:0:udpstat:udp_drain 16 udp:0:udpstat:udp_in_pktinfo 0 udp:0:udpstat:udp_in_recvdstaddr 158641 udp:0:udpstat:udp_in_recvdstopts 0 udp:0:udpstat:udp_in_recvhoplimit 0 udp:0:udpstat:udp_in_recvhopopts 0 udp:0:udpstat:udp_in_recvif 0 udp:0:udpstat:udp_in_recvopts 0 udp:0:udpstat:udp_in_recvpktinfo 0 udp:0:udpstat:udp_in_recvrtdstopts 0 udp:0:udpstat:udp_in_recvrthdr 0 udp:0:udpstat:udp_in_recvslla 0 udp:0:udpstat:udp_in_recvtclass 0 udp:0:udpstat:udp_in_recvttl 0 udp:0:udpstat:udp_in_recvucred 6 udp:0:udpstat:udp_ip_ire_send 28 udp:0:udpstat:udp_ip_send 0 udp:0:udpstat:udp_ire_null 5011 udp:0:udpstat:udp_out_err_notconn 0 udp:0:udpstat:udp_out_err_output 0 udp:0:udpstat:udp_out_err_tudr 0 udp:0:udpstat:udp_out_opt 4 udp:0:udpstat:udp_out_sw_cksum 0 udp:0:udpstat:udp_out_sw_cksum_bytes 0 udp:0:udpstat:udp_rrw_busy 0 udp:0:udpstat:udp_rrw_msgcnt 81440 udp:0:udpstat:udp_sock_fallback 0 ufs:0:inode_cache:cache allocs 19575 ufs:0:inode_cache:cache frees 0 ufs:0:inode_cache:class ufs ufs:0:inode_cache:crtime 69.290559 ufs:0:inode_cache:hits 18887 ufs:0:inode_cache:kmem allocs 19575 ufs:0:inode_cache:kmem frees 0 ufs:0:inode_cache:lookup idles 0 ufs:0:inode_cache:maxsize 129797 ufs:0:inode_cache:maxsize reached 19580 ufs:0:inode_cache:misses 19575 ufs:0:inode_cache:pushes at close 0 ufs:0:inode_cache:puts at backlist 750 ufs:0:inode_cache:puts at frontlist 6423 ufs:0:inode_cache:queues to free 4610 ufs:0:inode_cache:scans 3402043093 ufs:0:inode_cache:size 19575 ufs:0:inode_cache:snaptime 8464512.4595268 ufs:0:inode_cache:thread idles 0 ufs:0:inode_cache:vget idles 0 ufs_log:0:deltastats:ab0_deltas 59 ufs_log:0:deltastats:ab0_rolled 41 ufs_log:0:deltastats:allocblk_deltas 1685 ufs_log:0:deltastats:allocblk_rolled 286 ufs_log:0:deltastats:bitmap_deltas 6877 ufs_log:0:deltastats:bitmap_rolled 3416 ufs_log:0:deltastats:class ufs ufs_log:0:deltastats:crtime 69.2907636 ufs_log:0:deltastats:dir_deltas 12230 ufs_log:0:deltastats:dir_rolled 2936 ufs_log:0:deltastats:fbiwrite_deltas 114 ufs_log:0:deltastats:fbiwrite_rolled 106 ufs_log:0:deltastats:inode_deltas 234530 ufs_log:0:deltastats:inode_rolled 129960 ufs_log:0:deltastats:quota_deltas 0 ufs_log:0:deltastats:quota_rolled 0 ufs_log:0:deltastats:shadow_deltas 0 ufs_log:0:deltastats:shadow_rolled 0 ufs_log:0:deltastats:snaptime 8464512.460663 ufs_log:0:deltastats:suminfo_deltas 0 ufs_log:0:deltastats:suminfo_rolled 0 ufs_log:0:deltastats:superblock_deltas 6459 ufs_log:0:deltastats:superblock_rolled 2882 ufs_log:0:logstats:class ufs ufs_log:0:logstats:crtime 69.2907412 ufs_log:0:logstats:log_master_reads 0 ufs_log:0:logstats:log_reads 2 ufs_log:0:logstats:log_reads_inmem 75775 ufs_log:0:logstats:log_roll_reads 958 ufs_log:0:logstats:log_roll_writes 89107 ufs_log:0:logstats:log_writes 23944 ufs_log:0:logstats:master_reads 20912 ufs_log:0:logstats:master_writes 17825 ufs_log:0:logstats:snaptime 8464512.4618974 unix:0:Hex0x30002167588_minor_1:align 1 unix:0:Hex0x30002167588_minor_1:alloc 0 unix:0:Hex0x30002167588_minor_1:alloc_fail 0 unix:0:Hex0x30002167588_minor_1:buf_avail 0 unix:0:Hex0x30002167588_minor_1:buf_constructed 0 unix:0:Hex0x30002167588_minor_1:buf_inuse 0 unix:0:Hex0x30002167588_minor_1:buf_max 0 unix:0:Hex0x30002167588_minor_1:buf_size 1 unix:0:Hex0x30002167588_minor_1:buf_total 0 unix:0:Hex0x30002167588_minor_1:chunk_size 1 unix:0:Hex0x30002167588_minor_1:class kmem_cache unix:0:Hex0x30002167588_minor_1:crtime 93.55928 unix:0:Hex0x30002167588_minor_1:depot_alloc 0 unix:0:Hex0x30002167588_minor_1:depot_contention 0 unix:0:Hex0x30002167588_minor_1:depot_free 0 unix:0:Hex0x30002167588_minor_1:empty_magazines 0 unix:0:Hex0x30002167588_minor_1:free 0 unix:0:Hex0x30002167588_minor_1:full_magazines 0 unix:0:Hex0x30002167588_minor_1:hash_lookup_depth 0 unix:0:Hex0x30002167588_minor_1:hash_rescale 0 unix:0:Hex0x30002167588_minor_1:hash_size 64 unix:0:Hex0x30002167588_minor_1:magazine_size 15 unix:0:Hex0x30002167588_minor_1:slab_alloc 0 unix:0:Hex0x30002167588_minor_1:slab_create 0 unix:0:Hex0x30002167588_minor_1:slab_destroy 0 unix:0:Hex0x30002167588_minor_1:slab_free 0 unix:0:Hex0x30002167588_minor_1:slab_size 64 unix:0:Hex0x30002167588_minor_1:snaptime 8464512.465238 unix:0:Hex0x30002167588_minor_1:vmem_source 58 unix:0:Hex0x300022cd648_minor_1:align 1 unix:0:Hex0x300022cd648_minor_1:alloc 0 unix:0:Hex0x300022cd648_minor_1:alloc_fail 0 unix:0:Hex0x300022cd648_minor_1:buf_avail 0 unix:0:Hex0x300022cd648_minor_1:buf_constructed 0 unix:0:Hex0x300022cd648_minor_1:buf_inuse 0 unix:0:Hex0x300022cd648_minor_1:buf_max 0 unix:0:Hex0x300022cd648_minor_1:buf_size 1 unix:0:Hex0x300022cd648_minor_1:buf_total 0 unix:0:Hex0x300022cd648_minor_1:chunk_size 1 unix:0:Hex0x300022cd648_minor_1:class kmem_cache unix:0:Hex0x300022cd648_minor_1:crtime 93.5590648 unix:0:Hex0x300022cd648_minor_1:depot_alloc 0 unix:0:Hex0x300022cd648_minor_1:depot_contention 0 unix:0:Hex0x300022cd648_minor_1:depot_free 0 unix:0:Hex0x300022cd648_minor_1:empty_magazines 0 unix:0:Hex0x300022cd648_minor_1:free 0 unix:0:Hex0x300022cd648_minor_1:full_magazines 0 unix:0:Hex0x300022cd648_minor_1:hash_lookup_depth 0 unix:0:Hex0x300022cd648_minor_1:hash_rescale 0 unix:0:Hex0x300022cd648_minor_1:hash_size 64 unix:0:Hex0x300022cd648_minor_1:magazine_size 15 unix:0:Hex0x300022cd648_minor_1:slab_alloc 0 unix:0:Hex0x300022cd648_minor_1:slab_create 0 unix:0:Hex0x300022cd648_minor_1:slab_destroy 0 unix:0:Hex0x300022cd648_minor_1:slab_free 0 unix:0:Hex0x300022cd648_minor_1:slab_size 64 unix:0:Hex0x300022cd648_minor_1:snaptime 8464512.466793 unix:0:Hex0x300022cd648_minor_1:vmem_source 57 unix:0:IP_INJECT_QUEUE_IN:class taskq unix:0:IP_INJECT_QUEUE_IN:crtime 83.966481 unix:0:IP_INJECT_QUEUE_IN:executed 0 unix:0:IP_INJECT_QUEUE_IN:maxtasks 0 unix:0:IP_INJECT_QUEUE_IN:nactive 1 unix:0:IP_INJECT_QUEUE_IN:nalloc 0 unix:0:IP_INJECT_QUEUE_IN:priority 60 unix:0:IP_INJECT_QUEUE_IN:snaptime 8464512.468327 unix:0:IP_INJECT_QUEUE_IN:tasks 0 unix:0:IP_INJECT_QUEUE_IN:threads 1 unix:0:IP_INJECT_QUEUE_IN:totaltime 0 unix:0:IP_INJECT_QUEUE_OUT:class taskq unix:0:IP_INJECT_QUEUE_OUT:crtime 83.9664346 unix:0:IP_INJECT_QUEUE_OUT:executed 0 unix:0:IP_INJECT_QUEUE_OUT:maxtasks 0 unix:0:IP_INJECT_QUEUE_OUT:nactive 1 unix:0:IP_INJECT_QUEUE_OUT:nalloc 0 unix:0:IP_INJECT_QUEUE_OUT:priority 60 unix:0:IP_INJECT_QUEUE_OUT:snaptime 8464512.468927 unix:0:IP_INJECT_QUEUE_OUT:tasks 0 unix:0:IP_INJECT_QUEUE_OUT:threads 1 unix:0:IP_INJECT_QUEUE_OUT:totaltime 0 unix:0:IP_NIC_EVENT_QUEUE:class taskq unix:0:IP_NIC_EVENT_QUEUE:crtime 83.9665398 unix:0:IP_NIC_EVENT_QUEUE:executed 8 unix:0:IP_NIC_EVENT_QUEUE:maxtasks 1 unix:0:IP_NIC_EVENT_QUEUE:nactive 1 unix:0:IP_NIC_EVENT_QUEUE:nalloc 0 unix:0:IP_NIC_EVENT_QUEUE:priority 60 unix:0:IP_NIC_EVENT_QUEUE:snaptime 8464512.4695106 unix:0:IP_NIC_EVENT_QUEUE:tasks 8 unix:0:IP_NIC_EVENT_QUEUE:threads 1 unix:0:IP_NIC_EVENT_QUEUE:totaltime 38600 unix:0:NFS_idmap_cache:align 8 unix:0:NFS_idmap_cache:alloc 0 unix:0:NFS_idmap_cache:alloc_fail 0 unix:0:NFS_idmap_cache:buf_avail 0 unix:0:NFS_idmap_cache:buf_constructed 0 unix:0:NFS_idmap_cache:buf_inuse 0 unix:0:NFS_idmap_cache:buf_max 0 unix:0:NFS_idmap_cache:buf_size 48 unix:0:NFS_idmap_cache:buf_total 0 unix:0:NFS_idmap_cache:chunk_size 48 unix:0:NFS_idmap_cache:class kmem_cache unix:0:NFS_idmap_cache:crtime 114.1532922 unix:0:NFS_idmap_cache:depot_alloc 0 unix:0:NFS_idmap_cache:depot_contention 0 unix:0:NFS_idmap_cache:depot_free 0 unix:0:NFS_idmap_cache:empty_magazines 0 unix:0:NFS_idmap_cache:free 0 unix:0:NFS_idmap_cache:full_magazines 0 unix:0:NFS_idmap_cache:hash_lookup_depth 0 unix:0:NFS_idmap_cache:hash_rescale 0 unix:0:NFS_idmap_cache:hash_size 0 unix:0:NFS_idmap_cache:magazine_size 15 unix:0:NFS_idmap_cache:slab_alloc 0 unix:0:NFS_idmap_cache:slab_create 0 unix:0:NFS_idmap_cache:slab_destroy 0 unix:0:NFS_idmap_cache:slab_free 0 unix:0:NFS_idmap_cache:slab_size 8192 unix:0:NFS_idmap_cache:snaptime 8464512.4702526 unix:0:NFS_idmap_cache:vmem_source 23 unix:0:anon_cache:align 16 unix:0:anon_cache:alloc 24912979 unix:0:anon_cache:alloc_fail 0 unix:0:anon_cache:buf_avail 54231 unix:0:anon_cache:buf_constructed 54125 unix:0:anon_cache:buf_inuse 9989 unix:0:anon_cache:buf_max 64220 unix:0:anon_cache:buf_size 48 unix:0:anon_cache:buf_total 64220 unix:0:anon_cache:chunk_size 48 unix:0:anon_cache:class kmem_cache unix:0:anon_cache:crtime 69.2855846 unix:0:anon_cache:depot_alloc 1351314 unix:0:anon_cache:depot_contention 8 unix:0:anon_cache:depot_free 1354926 unix:0:anon_cache:empty_magazines 125 unix:0:anon_cache:free 24906602 unix:0:anon_cache:full_magazines 3608 unix:0:anon_cache:hash_lookup_depth 0 unix:0:anon_cache:hash_rescale 0 unix:0:anon_cache:hash_size 0 unix:0:anon_cache:magazine_size 15 unix:0:anon_cache:slab_alloc 64114 unix:0:anon_cache:slab_create 380 unix:0:anon_cache:slab_destroy 0 unix:0:anon_cache:slab_free 0 unix:0:anon_cache:slab_size 8192 unix:0:anon_cache:snaptime 8464512.4716578 unix:0:anon_cache:vmem_source 23 unix:0:anonmap_cache:align 8 unix:0:anonmap_cache:alloc 4877389 unix:0:anonmap_cache:alloc_fail 0 unix:0:anonmap_cache:buf_avail 2475 unix:0:anonmap_cache:buf_constructed 2443 unix:0:anonmap_cache:buf_inuse 1462 unix:0:anonmap_cache:buf_max 3937 unix:0:anonmap_cache:buf_size 64 unix:0:anonmap_cache:buf_total 3937 unix:0:anonmap_cache:chunk_size 64 unix:0:anonmap_cache:class kmem_cache unix:0:anonmap_cache:crtime 69.2856008 unix:0:anonmap_cache:depot_alloc 220801 unix:0:anonmap_cache:depot_contention 0 unix:0:anonmap_cache:depot_free 220967 unix:0:anonmap_cache:empty_magazines 10 unix:0:anonmap_cache:free 4876093 unix:0:anonmap_cache:full_magazines 162 unix:0:anonmap_cache:hash_lookup_depth 0 unix:0:anonmap_cache:hash_rescale 0 unix:0:anonmap_cache:hash_size 0 unix:0:anonmap_cache:magazine_size 15 unix:0:anonmap_cache:slab_alloc 3905 unix:0:anonmap_cache:slab_create 31 unix:0:anonmap_cache:slab_destroy 0 unix:0:anonmap_cache:slab_free 0 unix:0:anonmap_cache:slab_size 8192 unix:0:anonmap_cache:snaptime 8464512.4730532 unix:0:anonmap_cache:vmem_source 23 unix:0:arc_buf_hdr_t:align 8 unix:0:arc_buf_hdr_t:alloc 0 unix:0:arc_buf_hdr_t:alloc_fail 0 unix:0:arc_buf_hdr_t:buf_avail 0 unix:0:arc_buf_hdr_t:buf_constructed 0 unix:0:arc_buf_hdr_t:buf_inuse 0 unix:0:arc_buf_hdr_t:buf_max 0 unix:0:arc_buf_hdr_t:buf_size 144 unix:0:arc_buf_hdr_t:buf_total 0 unix:0:arc_buf_hdr_t:chunk_size 144 unix:0:arc_buf_hdr_t:class kmem_cache unix:0:arc_buf_hdr_t:crtime 97.139398 unix:0:arc_buf_hdr_t:depot_alloc 0 unix:0:arc_buf_hdr_t:depot_contention 0 unix:0:arc_buf_hdr_t:depot_free 0 unix:0:arc_buf_hdr_t:empty_magazines 0 unix:0:arc_buf_hdr_t:free 0 unix:0:arc_buf_hdr_t:full_magazines 0 unix:0:arc_buf_hdr_t:hash_lookup_depth 0 unix:0:arc_buf_hdr_t:hash_rescale 0 unix:0:arc_buf_hdr_t:hash_size 0 unix:0:arc_buf_hdr_t:magazine_size 7 unix:0:arc_buf_hdr_t:slab_alloc 0 unix:0:arc_buf_hdr_t:slab_create 0 unix:0:arc_buf_hdr_t:slab_destroy 0 unix:0:arc_buf_hdr_t:slab_free 0 unix:0:arc_buf_hdr_t:slab_size 8192 unix:0:arc_buf_hdr_t:snaptime 8464512.4744398 unix:0:arc_buf_hdr_t:vmem_source 23 unix:0:arc_buf_t:align 8 unix:0:arc_buf_t:alloc 0 unix:0:arc_buf_t:alloc_fail 0 unix:0:arc_buf_t:buf_avail 0 unix:0:arc_buf_t:buf_constructed 0 unix:0:arc_buf_t:buf_inuse 0 unix:0:arc_buf_t:buf_max 0 unix:0:arc_buf_t:buf_size 40 unix:0:arc_buf_t:buf_total 0 unix:0:arc_buf_t:chunk_size 40 unix:0:arc_buf_t:class kmem_cache unix:0:arc_buf_t:crtime 97.1394162 unix:0:arc_buf_t:depot_alloc 0 unix:0:arc_buf_t:depot_contention 0 unix:0:arc_buf_t:depot_free 0 unix:0:arc_buf_t:empty_magazines 0 unix:0:arc_buf_t:free 0 unix:0:arc_buf_t:full_magazines 0 unix:0:arc_buf_t:hash_lookup_depth 0 unix:0:arc_buf_t:hash_rescale 0 unix:0:arc_buf_t:hash_size 0 unix:0:arc_buf_t:magazine_size 15 unix:0:arc_buf_t:slab_alloc 0 unix:0:arc_buf_t:slab_create 0 unix:0:arc_buf_t:slab_destroy 0 unix:0:arc_buf_t:slab_free 0 unix:0:arc_buf_t:slab_size 8192 unix:0:arc_buf_t:snaptime 8464512.475903 unix:0:arc_buf_t:vmem_source 23 unix:0:as_cache:align 8 unix:0:as_cache:alloc 255170 unix:0:as_cache:alloc_fail 0 unix:0:as_cache:buf_avail 134 unix:0:as_cache:buf_constructed 112 unix:0:as_cache:buf_inuse 46 unix:0:as_cache:buf_max 180 unix:0:as_cache:buf_size 224 unix:0:as_cache:buf_total 180 unix:0:as_cache:chunk_size 224 unix:0:as_cache:class kmem_cache unix:0:as_cache:crtime 69.2827032 unix:0:as_cache:depot_alloc 14506 unix:0:as_cache:depot_contention 0 unix:0:as_cache:depot_free 14524 unix:0:as_cache:empty_magazines 1 unix:0:as_cache:free 255142 unix:0:as_cache:full_magazines 14 unix:0:as_cache:hash_lookup_depth 0 unix:0:as_cache:hash_rescale 0 unix:0:as_cache:hash_size 0 unix:0:as_cache:magazine_size 7 unix:0:as_cache:slab_alloc 158 unix:0:as_cache:slab_create 5 unix:0:as_cache:slab_destroy 0 unix:0:as_cache:slab_free 0 unix:0:as_cache:slab_size 8192 unix:0:as_cache:snaptime 8464512.4772702 unix:0:as_cache:vmem_source 23 unix:0:authdes_cache_handle:align 8 unix:0:authdes_cache_handle:alloc 0 unix:0:authdes_cache_handle:alloc_fail 0 unix:0:authdes_cache_handle:buf_avail 0 unix:0:authdes_cache_handle:buf_constructed 0 unix:0:authdes_cache_handle:buf_inuse 0 unix:0:authdes_cache_handle:buf_max 0 unix:0:authdes_cache_handle:buf_size 80 unix:0:authdes_cache_handle:buf_total 0 unix:0:authdes_cache_handle:chunk_size 80 unix:0:authdes_cache_handle:class kmem_cache unix:0:authdes_cache_handle:crtime 114.1389814 unix:0:authdes_cache_handle:depot_alloc 0 unix:0:authdes_cache_handle:depot_contention 0 unix:0:authdes_cache_handle:depot_free 0 unix:0:authdes_cache_handle:empty_magazines 0 unix:0:authdes_cache_handle:free 0 unix:0:authdes_cache_handle:full_magazines 0 unix:0:authdes_cache_handle:hash_lookup_depth 0 unix:0:authdes_cache_handle:hash_rescale 0 unix:0:authdes_cache_handle:hash_size 0 unix:0:authdes_cache_handle:magazine_size 7 unix:0:authdes_cache_handle:slab_alloc 0 unix:0:authdes_cache_handle:slab_create 0 unix:0:authdes_cache_handle:slab_destroy 0 unix:0:authdes_cache_handle:slab_free 0 unix:0:authdes_cache_handle:slab_size 8192 unix:0:authdes_cache_handle:snaptime 8464512.4787318 unix:0:authdes_cache_handle:vmem_source 23 unix:0:authkern_cache:align 8 unix:0:authkern_cache:alloc 275940 unix:0:authkern_cache:alloc_fail 0 unix:0:authkern_cache:buf_avail 111 unix:0:authkern_cache:buf_constructed 17 unix:0:authkern_cache:buf_inuse 2 unix:0:authkern_cache:buf_max 113 unix:0:authkern_cache:buf_size 72 unix:0:authkern_cache:buf_total 113 unix:0:authkern_cache:chunk_size 72 unix:0:authkern_cache:class kmem_cache unix:0:authkern_cache:crtime 114.138947 unix:0:authkern_cache:depot_alloc 100 unix:0:authkern_cache:depot_contention 0 unix:0:authkern_cache:depot_free 103 unix:0:authkern_cache:empty_magazines 1 unix:0:authkern_cache:free 275941 unix:0:authkern_cache:full_magazines 0 unix:0:authkern_cache:hash_lookup_depth 0 unix:0:authkern_cache:hash_rescale 0 unix:0:authkern_cache:hash_size 0 unix:0:authkern_cache:magazine_size 7 unix:0:authkern_cache:slab_alloc 19 unix:0:authkern_cache:slab_create 1 unix:0:authkern_cache:slab_destroy 0 unix:0:authkern_cache:slab_free 0 unix:0:authkern_cache:slab_size 8192 unix:0:authkern_cache:snaptime 8464512.4801842 unix:0:authkern_cache:vmem_source 23 unix:0:authloopback_cache:align 8 unix:0:authloopback_cache:alloc 71229 unix:0:authloopback_cache:alloc_fail 0 unix:0:authloopback_cache:buf_avail 113 unix:0:authloopback_cache:buf_constructed 15 unix:0:authloopback_cache:buf_inuse 0 unix:0:authloopback_cache:buf_max 113 unix:0:authloopback_cache:buf_size 72 unix:0:authloopback_cache:buf_total 113 unix:0:authloopback_cache:chunk_size 72 unix:0:authloopback_cache:class kmem_cache unix:0:authloopback_cache:crtime 114.1389622 unix:0:authloopback_cache:depot_alloc 46 unix:0:authloopback_cache:depot_contention 0 unix:0:authloopback_cache:depot_free 49 unix:0:authloopback_cache:empty_magazines 1 unix:0:authloopback_cache:free 71232 unix:0:authloopback_cache:full_magazines 0 unix:0:authloopback_cache:hash_lookup_depth 0 unix:0:authloopback_cache:hash_rescale 0 unix:0:authloopback_cache:hash_size 0 unix:0:authloopback_cache:magazine_size 7 unix:0:authloopback_cache:slab_alloc 15 unix:0:authloopback_cache:slab_create 1 unix:0:authloopback_cache:slab_destroy 0 unix:0:authloopback_cache:slab_free 0 unix:0:authloopback_cache:slab_size 8192 unix:0:authloopback_cache:snaptime 8464512.4816288 unix:0:authloopback_cache:vmem_source 23 unix:0:biostats:buffer_cache_hits 751980 unix:0:biostats:buffer_cache_lookups 753654 unix:0:biostats:buffers_locked_by_someone 75 unix:0:biostats:class misc unix:0:biostats:crtime 65.9026494 unix:0:biostats:duplicate_buffers_found 0 unix:0:biostats:new_buffer_requests 0 unix:0:biostats:snaptime 8464512.4830306 unix:0:biostats:waits_for_buffer_allocs 0 unix:0:bp_map_114688:align 16384 unix:0:bp_map_114688:alloc 0 unix:0:bp_map_114688:alloc_fail 0 unix:0:bp_map_114688:buf_avail 0 unix:0:bp_map_114688:buf_constructed 0 unix:0:bp_map_114688:buf_inuse 0 unix:0:bp_map_114688:buf_max 0 unix:0:bp_map_114688:buf_size 114688 unix:0:bp_map_114688:buf_total 0 unix:0:bp_map_114688:chunk_size 114688 unix:0:bp_map_114688:class kmem_cache unix:0:bp_map_114688:crtime 66.0106866 unix:0:bp_map_114688:depot_alloc 0 unix:0:bp_map_114688:depot_contention 0 unix:0:bp_map_114688:depot_free 0 unix:0:bp_map_114688:empty_magazines 0 unix:0:bp_map_114688:free 0 unix:0:bp_map_114688:full_magazines 0 unix:0:bp_map_114688:hash_lookup_depth 0 unix:0:bp_map_114688:hash_rescale 0 unix:0:bp_map_114688:hash_size 64 unix:0:bp_map_114688:magazine_size 1 unix:0:bp_map_114688:slab_alloc 0 unix:0:bp_map_114688:slab_create 0 unix:0:bp_map_114688:slab_destroy 0 unix:0:bp_map_114688:slab_free 0 unix:0:bp_map_114688:slab_size 524288 unix:0:bp_map_114688:snaptime 8464512.4835644 unix:0:bp_map_114688:vmem_source 29 unix:0:bp_map_131072:align 16384 unix:0:bp_map_131072:alloc 0 unix:0:bp_map_131072:alloc_fail 0 unix:0:bp_map_131072:buf_avail 0 unix:0:bp_map_131072:buf_constructed 0 unix:0:bp_map_131072:buf_inuse 0 unix:0:bp_map_131072:buf_max 0 unix:0:bp_map_131072:buf_size 131072 unix:0:bp_map_131072:buf_total 0 unix:0:bp_map_131072:chunk_size 131072 unix:0:bp_map_131072:class kmem_cache unix:0:bp_map_131072:crtime 66.010703 unix:0:bp_map_131072:depot_alloc 0 unix:0:bp_map_131072:depot_contention 0 unix:0:bp_map_131072:depot_free 0 unix:0:bp_map_131072:empty_magazines 0 unix:0:bp_map_131072:free 0 unix:0:bp_map_131072:full_magazines 0 unix:0:bp_map_131072:hash_lookup_depth 0 unix:0:bp_map_131072:hash_rescale 0 unix:0:bp_map_131072:hash_size 64 unix:0:bp_map_131072:magazine_size 1 unix:0:bp_map_131072:slab_alloc 0 unix:0:bp_map_131072:slab_create 0 unix:0:bp_map_131072:slab_destroy 0 unix:0:bp_map_131072:slab_free 0 unix:0:bp_map_131072:slab_size 524288 unix:0:bp_map_131072:snaptime 8464512.4849316 unix:0:bp_map_131072:vmem_source 29 unix:0:bp_map_16384:align 16384 unix:0:bp_map_16384:alloc 18981 unix:0:bp_map_16384:alloc_fail 0 unix:0:bp_map_16384:buf_avail 32 unix:0:bp_map_16384:buf_constructed 5 unix:0:bp_map_16384:buf_inuse 0 unix:0:bp_map_16384:buf_max 32 unix:0:bp_map_16384:buf_size 16384 unix:0:bp_map_16384:buf_total 32 unix:0:bp_map_16384:chunk_size 16384 unix:0:bp_map_16384:class kmem_cache unix:0:bp_map_16384:crtime 66.0103536 unix:0:bp_map_16384:depot_alloc 30 unix:0:bp_map_16384:depot_contention 0 unix:0:bp_map_16384:depot_free 35 unix:0:bp_map_16384:empty_magazines 1 unix:0:bp_map_16384:free 18986 unix:0:bp_map_16384:full_magazines 2 unix:0:bp_map_16384:hash_lookup_depth 0 unix:0:bp_map_16384:hash_rescale 0 unix:0:bp_map_16384:hash_size 64 unix:0:bp_map_16384:magazine_size 1 unix:0:bp_map_16384:slab_alloc 5 unix:0:bp_map_16384:slab_create 1 unix:0:bp_map_16384:slab_destroy 0 unix:0:bp_map_16384:slab_free 0 unix:0:bp_map_16384:slab_size 524288 unix:0:bp_map_16384:snaptime 8464512.4863666 unix:0:bp_map_16384:vmem_source 29 unix:0:bp_map_32768:align 16384 unix:0:bp_map_32768:alloc 1447 unix:0:bp_map_32768:alloc_fail 0 unix:0:bp_map_32768:buf_avail 16 unix:0:bp_map_32768:buf_constructed 10 unix:0:bp_map_32768:buf_inuse 0 unix:0:bp_map_32768:buf_max 16 unix:0:bp_map_32768:buf_size 32768 unix:0:bp_map_32768:buf_total 16 unix:0:bp_map_32768:chunk_size 32768 unix:0:bp_map_32768:class kmem_cache unix:0:bp_map_32768:crtime 66.0103704 unix:0:bp_map_32768:depot_alloc 126 unix:0:bp_map_32768:depot_contention 0 unix:0:bp_map_32768:depot_free 136 unix:0:bp_map_32768:empty_magazines 1 unix:0:bp_map_32768:free 1457 unix:0:bp_map_32768:full_magazines 7 unix:0:bp_map_32768:hash_lookup_depth 0 unix:0:bp_map_32768:hash_rescale 0 unix:0:bp_map_32768:hash_size 64 unix:0:bp_map_32768:magazine_size 1 unix:0:bp_map_32768:slab_alloc 10 unix:0:bp_map_32768:slab_create 1 unix:0:bp_map_32768:slab_destroy 0 unix:0:bp_map_32768:slab_free 0 unix:0:bp_map_32768:slab_size 524288 unix:0:bp_map_32768:snaptime 8464512.487767 unix:0:bp_map_32768:vmem_source 29 unix:0:bp_map_49152:align 16384 unix:0:bp_map_49152:alloc 0 unix:0:bp_map_49152:alloc_fail 0 unix:0:bp_map_49152:buf_avail 0 unix:0:bp_map_49152:buf_constructed 0 unix:0:bp_map_49152:buf_inuse 0 unix:0:bp_map_49152:buf_max 0 unix:0:bp_map_49152:buf_size 49152 unix:0:bp_map_49152:buf_total 0 unix:0:bp_map_49152:chunk_size 49152 unix:0:bp_map_49152:class kmem_cache unix:0:bp_map_49152:crtime 66.010616 unix:0:bp_map_49152:depot_alloc 0 unix:0:bp_map_49152:depot_contention 0 unix:0:bp_map_49152:depot_free 0 unix:0:bp_map_49152:empty_magazines 0 unix:0:bp_map_49152:free 0 unix:0:bp_map_49152:full_magazines 0 unix:0:bp_map_49152:hash_lookup_depth 0 unix:0:bp_map_49152:hash_rescale 0 unix:0:bp_map_49152:hash_size 64 unix:0:bp_map_49152:magazine_size 1 unix:0:bp_map_49152:slab_alloc 0 unix:0:bp_map_49152:slab_create 0 unix:0:bp_map_49152:slab_destroy 0 unix:0:bp_map_49152:slab_free 0 unix:0:bp_map_49152:slab_size 524288 unix:0:bp_map_49152:snaptime 8464512.4891378 unix:0:bp_map_49152:vmem_source 29 unix:0:bp_map_65536:align 16384 unix:0:bp_map_65536:alloc 0 unix:0:bp_map_65536:alloc_fail 0 unix:0:bp_map_65536:buf_avail 0 unix:0:bp_map_65536:buf_constructed 0 unix:0:bp_map_65536:buf_inuse 0 unix:0:bp_map_65536:buf_max 0 unix:0:bp_map_65536:buf_size 65536 unix:0:bp_map_65536:buf_total 0 unix:0:bp_map_65536:chunk_size 65536 unix:0:bp_map_65536:class kmem_cache unix:0:bp_map_65536:crtime 66.0106344 unix:0:bp_map_65536:depot_alloc 0 unix:0:bp_map_65536:depot_contention 0 unix:0:bp_map_65536:depot_free 0 unix:0:bp_map_65536:empty_magazines 0 unix:0:bp_map_65536:free 0 unix:0:bp_map_65536:full_magazines 0 unix:0:bp_map_65536:hash_lookup_depth 0 unix:0:bp_map_65536:hash_rescale 0 unix:0:bp_map_65536:hash_size 64 unix:0:bp_map_65536:magazine_size 1 unix:0:bp_map_65536:slab_alloc 0 unix:0:bp_map_65536:slab_create 0 unix:0:bp_map_65536:slab_destroy 0 unix:0:bp_map_65536:slab_free 0 unix:0:bp_map_65536:slab_size 524288 unix:0:bp_map_65536:snaptime 8464512.490658 unix:0:bp_map_65536:vmem_source 29 unix:0:bp_map_81920:align 16384 unix:0:bp_map_81920:alloc 0 unix:0:bp_map_81920:alloc_fail 0 unix:0:bp_map_81920:buf_avail 0 unix:0:bp_map_81920:buf_constructed 0 unix:0:bp_map_81920:buf_inuse 0 unix:0:bp_map_81920:buf_max 0 unix:0:bp_map_81920:buf_size 81920 unix:0:bp_map_81920:buf_total 0 unix:0:bp_map_81920:chunk_size 81920 unix:0:bp_map_81920:class kmem_cache unix:0:bp_map_81920:crtime 66.010652 unix:0:bp_map_81920:depot_alloc 0 unix:0:bp_map_81920:depot_contention 0 unix:0:bp_map_81920:depot_free 0 unix:0:bp_map_81920:empty_magazines 0 unix:0:bp_map_81920:free 0 unix:0:bp_map_81920:full_magazines 0 unix:0:bp_map_81920:hash_lookup_depth 0 unix:0:bp_map_81920:hash_rescale 0 unix:0:bp_map_81920:hash_size 64 unix:0:bp_map_81920:magazine_size 1 unix:0:bp_map_81920:slab_alloc 0 unix:0:bp_map_81920:slab_create 0 unix:0:bp_map_81920:slab_destroy 0 unix:0:bp_map_81920:slab_free 0 unix:0:bp_map_81920:slab_size 524288 unix:0:bp_map_81920:snaptime 8464512.4921324 unix:0:bp_map_81920:vmem_source 29 unix:0:bp_map_98304:align 16384 unix:0:bp_map_98304:alloc 0 unix:0:bp_map_98304:alloc_fail 0 unix:0:bp_map_98304:buf_avail 0 unix:0:bp_map_98304:buf_constructed 0 unix:0:bp_map_98304:buf_inuse 0 unix:0:bp_map_98304:buf_max 0 unix:0:bp_map_98304:buf_size 98304 unix:0:bp_map_98304:buf_total 0 unix:0:bp_map_98304:chunk_size 98304 unix:0:bp_map_98304:class kmem_cache unix:0:bp_map_98304:crtime 66.010669 unix:0:bp_map_98304:depot_alloc 0 unix:0:bp_map_98304:depot_contention 0 unix:0:bp_map_98304:depot_free 0 unix:0:bp_map_98304:empty_magazines 0 unix:0:bp_map_98304:free 0 unix:0:bp_map_98304:full_magazines 0 unix:0:bp_map_98304:hash_lookup_depth 0 unix:0:bp_map_98304:hash_rescale 0 unix:0:bp_map_98304:hash_size 64 unix:0:bp_map_98304:magazine_size 1 unix:0:bp_map_98304:slab_alloc 0 unix:0:bp_map_98304:slab_create 0 unix:0:bp_map_98304:slab_destroy 0 unix:0:bp_map_98304:slab_free 0 unix:0:bp_map_98304:slab_size 524288 unix:0:bp_map_98304:snaptime 8464512.4935102 unix:0:bp_map_98304:vmem_source 29 unix:0:callout_taskq:class taskq unix:0:callout_taskq:crtime 69.2734612 unix:0:callout_taskq:executed 183471669 unix:0:callout_taskq:maxtasks 4 unix:0:callout_taskq:nactive 4 unix:0:callout_taskq:nalloc 0 unix:0:callout_taskq:priority 99 unix:0:callout_taskq:snaptime 8464512.4948762 unix:0:callout_taskq:tasks 183471669 unix:0:callout_taskq:threads 2 unix:0:callout_taskq:totaltime 3084176832200 unix:0:cbstats:L2 0 unix:0:cbstats:alloc 0 unix:0:cbstats:asked 0 unix:0:cbstats:class misc unix:0:cbstats:crtime 66.6585176 unix:0:cbstats:delete 0 unix:0:cbstats:grow 0 unix:0:cbstats:maxlist 0 unix:0:cbstats:maxreq 0 unix:0:cbstats:new 0 unix:0:cbstats:run 0 unix:0:cbstats:runouts 0 unix:0:cbstats:snaptime 8464512.4954698 unix:0:ce_queue:cancelled 0 unix:0:ce_queue:class errorq unix:0:ce_queue:commit_fail 0 unix:0:ce_queue:committed 0 unix:0:ce_queue:crtime 69.2660544 unix:0:ce_queue:dispatched 0 unix:0:ce_queue:dropped 0 unix:0:ce_queue:logged 0 unix:0:ce_queue:reserve_fail 0 unix:0:ce_queue:reserved 0 unix:0:ce_queue:snaptime 8464512.4961432 unix:0:ch_private_cache:align 512 unix:0:ch_private_cache:alloc 2 unix:0:ch_private_cache:alloc_fail 0 unix:0:ch_private_cache:buf_avail 6 unix:0:ch_private_cache:buf_constructed 0 unix:0:ch_private_cache:buf_inuse 2 unix:0:ch_private_cache:buf_max 8 unix:0:ch_private_cache:buf_size 8208 unix:0:ch_private_cache:buf_total 8 unix:0:ch_private_cache:chunk_size 8704 unix:0:ch_private_cache:class kmem_cache unix:0:ch_private_cache:crtime 85.189267 unix:0:ch_private_cache:depot_alloc 0 unix:0:ch_private_cache:depot_contention 0 unix:0:ch_private_cache:depot_free 0 unix:0:ch_private_cache:empty_magazines 0 unix:0:ch_private_cache:free 0 unix:0:ch_private_cache:full_magazines 0 unix:0:ch_private_cache:hash_lookup_depth 0 unix:0:ch_private_cache:hash_rescale 0 unix:0:ch_private_cache:hash_size 64 unix:0:ch_private_cache:magazine_size 1 unix:0:ch_private_cache:slab_alloc 2 unix:0:ch_private_cache:slab_create 1 unix:0:ch_private_cache:slab_destroy 0 unix:0:ch_private_cache:slab_free 0 unix:0:ch_private_cache:slab_size 73728 unix:0:ch_private_cache:snaptime 8464512.4968082 unix:0:ch_private_cache:vmem_source 8 unix:0:ciputctrl_cache:align 64 unix:0:ciputctrl_cache:alloc 0 unix:0:ciputctrl_cache:alloc_fail 0 unix:0:ciputctrl_cache:buf_avail 0 unix:0:ciputctrl_cache:buf_constructed 0 unix:0:ciputctrl_cache:buf_inuse 0 unix:0:ciputctrl_cache:buf_max 0 unix:0:ciputctrl_cache:buf_size 128 unix:0:ciputctrl_cache:buf_total 0 unix:0:ciputctrl_cache:chunk_size 128 unix:0:ciputctrl_cache:class kmem_cache unix:0:ciputctrl_cache:crtime 69.2822836 unix:0:ciputctrl_cache:depot_alloc 0 unix:0:ciputctrl_cache:depot_contention 0 unix:0:ciputctrl_cache:depot_free 0 unix:0:ciputctrl_cache:empty_magazines 0 unix:0:ciputctrl_cache:free 0 unix:0:ciputctrl_cache:full_magazines 0 unix:0:ciputctrl_cache:hash_lookup_depth 0 unix:0:ciputctrl_cache:hash_rescale 0 unix:0:ciputctrl_cache:hash_size 0 unix:0:ciputctrl_cache:magazine_size 7 unix:0:ciputctrl_cache:slab_alloc 0 unix:0:ciputctrl_cache:slab_create 0 unix:0:ciputctrl_cache:slab_destroy 0 unix:0:ciputctrl_cache:slab_free 0 unix:0:ciputctrl_cache:slab_size 8192 unix:0:ciputctrl_cache:snaptime 8464512.498205 unix:0:ciputctrl_cache:vmem_source 23 unix:0:client_handle4_cache:align 8 unix:0:client_handle4_cache:alloc 1 unix:0:client_handle4_cache:alloc_fail 0 unix:0:client_handle4_cache:buf_avail 253 unix:0:client_handle4_cache:buf_constructed 0 unix:0:client_handle4_cache:buf_inuse 1 unix:0:client_handle4_cache:buf_max 254 unix:0:client_handle4_cache:buf_size 32 unix:0:client_handle4_cache:buf_total 254 unix:0:client_handle4_cache:chunk_size 32 unix:0:client_handle4_cache:class kmem_cache unix:0:client_handle4_cache:crtime 114.1532178 unix:0:client_handle4_cache:depot_alloc 0 unix:0:client_handle4_cache:depot_contention 0 unix:0:client_handle4_cache:depot_free 0 unix:0:client_handle4_cache:empty_magazines 0 unix:0:client_handle4_cache:free 0 unix:0:client_handle4_cache:full_magazines 0 unix:0:client_handle4_cache:hash_lookup_depth 0 unix:0:client_handle4_cache:hash_rescale 0 unix:0:client_handle4_cache:hash_size 0 unix:0:client_handle4_cache:magazine_size 15 unix:0:client_handle4_cache:slab_alloc 1 unix:0:client_handle4_cache:slab_create 1 unix:0:client_handle4_cache:slab_destroy 0 unix:0:client_handle4_cache:slab_free 0 unix:0:client_handle4_cache:slab_size 8192 unix:0:client_handle4_cache:snaptime 8464512.4995786 unix:0:client_handle4_cache:vmem_source 23 unix:0:client_handle_cache:align 8 unix:0:client_handle_cache:alloc 12 unix:0:client_handle_cache:alloc_fail 0 unix:0:client_handle_cache:buf_avail 242 unix:0:client_handle_cache:buf_constructed 0 unix:0:client_handle_cache:buf_inuse 12 unix:0:client_handle_cache:buf_max 254 unix:0:client_handle_cache:buf_size 32 unix:0:client_handle_cache:buf_total 254 unix:0:client_handle_cache:chunk_size 32 unix:0:client_handle_cache:class kmem_cache unix:0:client_handle_cache:crtime 114.1460068 unix:0:client_handle_cache:depot_alloc 0 unix:0:client_handle_cache:depot_contention 0 unix:0:client_handle_cache:depot_free 0 unix:0:client_handle_cache:empty_magazines 0 unix:0:client_handle_cache:free 0 unix:0:client_handle_cache:full_magazines 0 unix:0:client_handle_cache:hash_lookup_depth 0 unix:0:client_handle_cache:hash_rescale 0 unix:0:client_handle_cache:hash_size 0 unix:0:client_handle_cache:magazine_size 15 unix:0:client_handle_cache:slab_alloc 12 unix:0:client_handle_cache:slab_create 1 unix:0:client_handle_cache:slab_destroy 0 unix:0:client_handle_cache:slab_free 0 unix:0:client_handle_cache:slab_size 8192 unix:0:client_handle_cache:snaptime 8464512.5011856 unix:0:client_handle_cache:vmem_source 23 unix:0:clnt_clts_endpnt_cache:align 8 unix:0:clnt_clts_endpnt_cache:alloc 2 unix:0:clnt_clts_endpnt_cache:alloc_fail 0 unix:0:clnt_clts_endpnt_cache:buf_avail 90 unix:0:clnt_clts_endpnt_cache:buf_constructed 0 unix:0:clnt_clts_endpnt_cache:buf_inuse 2 unix:0:clnt_clts_endpnt_cache:buf_max 92 unix:0:clnt_clts_endpnt_cache:buf_size 88 unix:0:clnt_clts_endpnt_cache:buf_total 92 unix:0:clnt_clts_endpnt_cache:chunk_size 88 unix:0:clnt_clts_endpnt_cache:class kmem_cache unix:0:clnt_clts_endpnt_cache:crtime 114.1383618 unix:0:clnt_clts_endpnt_cache:depot_alloc 0 unix:0:clnt_clts_endpnt_cache:depot_contention 0 unix:0:clnt_clts_endpnt_cache:depot_free 0 unix:0:clnt_clts_endpnt_cache:empty_magazines 0 unix:0:clnt_clts_endpnt_cache:free 0 unix:0:clnt_clts_endpnt_cache:full_magazines 0 unix:0:clnt_clts_endpnt_cache:hash_lookup_depth 0 unix:0:clnt_clts_endpnt_cache:hash_rescale 0 unix:0:clnt_clts_endpnt_cache:hash_size 0 unix:0:clnt_clts_endpnt_cache:magazine_size 7 unix:0:clnt_clts_endpnt_cache:slab_alloc 2 unix:0:clnt_clts_endpnt_cache:slab_create 1 unix:0:clnt_clts_endpnt_cache:slab_destroy 0 unix:0:clnt_clts_endpnt_cache:slab_free 0 unix:0:clnt_clts_endpnt_cache:slab_size 8192 unix:0:clnt_clts_endpnt_cache:snaptime 8464512.5025848 unix:0:clnt_clts_endpnt_cache:vmem_source 23 unix:0:cred_cache:align 8 unix:0:cred_cache:alloc 15900 unix:0:cred_cache:alloc_fail 0 unix:0:cred_cache:buf_avail 67 unix:0:cred_cache:buf_constructed 31 unix:0:cred_cache:buf_inuse 71 unix:0:cred_cache:buf_max 138 unix:0:cred_cache:buf_size 172 unix:0:cred_cache:buf_total 138 unix:0:cred_cache:chunk_size 176 unix:0:cred_cache:class kmem_cache unix:0:cred_cache:crtime 69.2623686 unix:0:cred_cache:depot_alloc 15 unix:0:cred_cache:depot_contention 0 unix:0:cred_cache:depot_free 21 unix:0:cred_cache:empty_magazines 1 unix:0:cred_cache:free 15835 unix:0:cred_cache:full_magazines 2 unix:0:cred_cache:hash_lookup_depth 0 unix:0:cred_cache:hash_rescale 0 unix:0:cred_cache:hash_size 0 unix:0:cred_cache:magazine_size 7 unix:0:cred_cache:slab_alloc 102 unix:0:cred_cache:slab_create 3 unix:0:cred_cache:slab_destroy 0 unix:0:cred_cache:slab_free 0 unix:0:cred_cache:slab_size 8192 unix:0:cred_cache:snaptime 8464512.5039784 unix:0:cred_cache:vmem_source 23 unix:0:crypto_session_cache:align 8 unix:0:crypto_session_cache:alloc 0 unix:0:crypto_session_cache:alloc_fail 0 unix:0:crypto_session_cache:buf_avail 0 unix:0:crypto_session_cache:buf_constructed 0 unix:0:crypto_session_cache:buf_inuse 0 unix:0:crypto_session_cache:buf_max 0 unix:0:crypto_session_cache:buf_size 96 unix:0:crypto_session_cache:buf_total 0 unix:0:crypto_session_cache:chunk_size 96 unix:0:crypto_session_cache:class kmem_cache unix:0:crypto_session_cache:crtime 161.7552976 unix:0:crypto_session_cache:depot_alloc 0 unix:0:crypto_session_cache:depot_contention 0 unix:0:crypto_session_cache:depot_free 0 unix:0:crypto_session_cache:empty_magazines 0 unix:0:crypto_session_cache:free 0 unix:0:crypto_session_cache:full_magazines 0 unix:0:crypto_session_cache:hash_lookup_depth 0 unix:0:crypto_session_cache:hash_rescale 0 unix:0:crypto_session_cache:hash_size 0 unix:0:crypto_session_cache:magazine_size 7 unix:0:crypto_session_cache:slab_alloc 0 unix:0:crypto_session_cache:slab_create 0 unix:0:crypto_session_cache:slab_destroy 0 unix:0:crypto_session_cache:slab_free 0 unix:0:crypto_session_cache:slab_size 8192 unix:0:crypto_session_cache:snaptime 8464512.5053666 unix:0:crypto_session_cache:vmem_source 23 unix:0:cyclic_id_cache:align 8 unix:0:cyclic_id_cache:alloc 10 unix:0:cyclic_id_cache:alloc_fail 0 unix:0:cyclic_id_cache:buf_avail 117 unix:0:cyclic_id_cache:buf_constructed 0 unix:0:cyclic_id_cache:buf_inuse 10 unix:0:cyclic_id_cache:buf_max 127 unix:0:cyclic_id_cache:buf_size 64 unix:0:cyclic_id_cache:buf_total 127 unix:0:cyclic_id_cache:chunk_size 64 unix:0:cyclic_id_cache:class kmem_cache unix:0:cyclic_id_cache:crtime 69.2736878 unix:0:cyclic_id_cache:depot_alloc 0 unix:0:cyclic_id_cache:depot_contention 0 unix:0:cyclic_id_cache:depot_free 0 unix:0:cyclic_id_cache:empty_magazines 0 unix:0:cyclic_id_cache:free 0 unix:0:cyclic_id_cache:full_magazines 0 unix:0:cyclic_id_cache:hash_lookup_depth 0 unix:0:cyclic_id_cache:hash_rescale 0 unix:0:cyclic_id_cache:hash_size 0 unix:0:cyclic_id_cache:magazine_size 15 unix:0:cyclic_id_cache:slab_alloc 10 unix:0:cyclic_id_cache:slab_create 1 unix:0:cyclic_id_cache:slab_destroy 0 unix:0:cyclic_id_cache:slab_free 0 unix:0:cyclic_id_cache:slab_size 8192 unix:0:cyclic_id_cache:snaptime 8464512.5068098 unix:0:cyclic_id_cache:vmem_source 23 unix:0:dev_info_node_cache:align 8 unix:0:dev_info_node_cache:alloc 357 unix:0:dev_info_node_cache:alloc_fail 0 unix:0:dev_info_node_cache:buf_avail 15 unix:0:dev_info_node_cache:buf_constructed 2 unix:0:dev_info_node_cache:buf_inuse 225 unix:0:dev_info_node_cache:buf_max 240 unix:0:dev_info_node_cache:buf_size 480 unix:0:dev_info_node_cache:buf_total 240 unix:0:dev_info_node_cache:chunk_size 480 unix:0:dev_info_node_cache:class kmem_cache unix:0:dev_info_node_cache:crtime 66.623312 unix:0:dev_info_node_cache:depot_alloc 0 unix:0:dev_info_node_cache:depot_contention 0 unix:0:dev_info_node_cache:depot_free 2 unix:0:dev_info_node_cache:empty_magazines 0 unix:0:dev_info_node_cache:free 134 unix:0:dev_info_node_cache:full_magazines 0 unix:0:dev_info_node_cache:hash_lookup_depth 0 unix:0:dev_info_node_cache:hash_rescale 0 unix:0:dev_info_node_cache:hash_size 0 unix:0:dev_info_node_cache:magazine_size 3 unix:0:dev_info_node_cache:slab_alloc 227 unix:0:dev_info_node_cache:slab_create 15 unix:0:dev_info_node_cache:slab_destroy 0 unix:0:dev_info_node_cache:slab_free 0 unix:0:dev_info_node_cache:slab_size 8192 unix:0:dev_info_node_cache:snaptime 8464512.5081962 unix:0:dev_info_node_cache:vmem_source 23 unix:0:directio_buf_cache:align 8 unix:0:directio_buf_cache:alloc 0 unix:0:directio_buf_cache:alloc_fail 0 unix:0:directio_buf_cache:buf_avail 0 unix:0:directio_buf_cache:buf_constructed 0 unix:0:directio_buf_cache:buf_inuse 0 unix:0:directio_buf_cache:buf_max 0 unix:0:directio_buf_cache:buf_size 272 unix:0:directio_buf_cache:buf_total 0 unix:0:directio_buf_cache:chunk_size 272 unix:0:directio_buf_cache:class kmem_cache unix:0:directio_buf_cache:crtime 69.2906706 unix:0:directio_buf_cache:depot_alloc 0 unix:0:directio_buf_cache:depot_contention 0 unix:0:directio_buf_cache:depot_free 0 unix:0:directio_buf_cache:empty_magazines 0 unix:0:directio_buf_cache:free 0 unix:0:directio_buf_cache:full_magazines 0 unix:0:directio_buf_cache:hash_lookup_depth 0 unix:0:directio_buf_cache:hash_rescale 0 unix:0:directio_buf_cache:hash_size 0 unix:0:directio_buf_cache:magazine_size 3 unix:0:directio_buf_cache:slab_alloc 0 unix:0:directio_buf_cache:slab_create 0 unix:0:directio_buf_cache:slab_destroy 0 unix:0:directio_buf_cache:slab_free 0 unix:0:directio_buf_cache:slab_size 8192 unix:0:directio_buf_cache:snaptime 8464512.5095944 unix:0:directio_buf_cache:vmem_source 23 unix:0:dld_ctl_1:align 1 unix:0:dld_ctl_1:alloc 0 unix:0:dld_ctl_1:alloc_fail 0 unix:0:dld_ctl_1:buf_avail 0 unix:0:dld_ctl_1:buf_constructed 0 unix:0:dld_ctl_1:buf_inuse 0 unix:0:dld_ctl_1:buf_max 0 unix:0:dld_ctl_1:buf_size 1 unix:0:dld_ctl_1:buf_total 0 unix:0:dld_ctl_1:chunk_size 1 unix:0:dld_ctl_1:class kmem_cache unix:0:dld_ctl_1:crtime 84.0831506 unix:0:dld_ctl_1:depot_alloc 0 unix:0:dld_ctl_1:depot_contention 0 unix:0:dld_ctl_1:depot_free 0 unix:0:dld_ctl_1:empty_magazines 0 unix:0:dld_ctl_1:free 0 unix:0:dld_ctl_1:full_magazines 0 unix:0:dld_ctl_1:hash_lookup_depth 0 unix:0:dld_ctl_1:hash_rescale 0 unix:0:dld_ctl_1:hash_size 64 unix:0:dld_ctl_1:magazine_size 15 unix:0:dld_ctl_1:slab_alloc 0 unix:0:dld_ctl_1:slab_create 0 unix:0:dld_ctl_1:slab_destroy 0 unix:0:dld_ctl_1:slab_free 0 unix:0:dld_ctl_1:slab_size 64 unix:0:dld_ctl_1:snaptime 8464512.5112114 unix:0:dld_ctl_1:vmem_source 52 unix:0:dld_str_cache:align 8 unix:0:dld_str_cache:alloc 1 unix:0:dld_str_cache:alloc_fail 0 unix:0:dld_str_cache:buf_avail 29 unix:0:dld_str_cache:buf_constructed 1 unix:0:dld_str_cache:buf_inuse 0 unix:0:dld_str_cache:buf_max 29 unix:0:dld_str_cache:buf_size 272 unix:0:dld_str_cache:buf_total 29 unix:0:dld_str_cache:chunk_size 272 unix:0:dld_str_cache:class kmem_cache unix:0:dld_str_cache:crtime 84.0832292 unix:0:dld_str_cache:depot_alloc 0 unix:0:dld_str_cache:depot_contention 0 unix:0:dld_str_cache:depot_free 1 unix:0:dld_str_cache:empty_magazines 0 unix:0:dld_str_cache:free 2 unix:0:dld_str_cache:full_magazines 0 unix:0:dld_str_cache:hash_lookup_depth 0 unix:0:dld_str_cache:hash_rescale 0 unix:0:dld_str_cache:hash_size 0 unix:0:dld_str_cache:magazine_size 3 unix:0:dld_str_cache:slab_alloc 1 unix:0:dld_str_cache:slab_create 1 unix:0:dld_str_cache:slab_destroy 0 unix:0:dld_str_cache:slab_free 0 unix:0:dld_str_cache:slab_size 8192 unix:0:dld_str_cache:snaptime 8464512.5126706 unix:0:dld_str_cache:vmem_source 23 unix:0:dls_cache:align 8 unix:0:dls_cache:alloc 0 unix:0:dls_cache:alloc_fail 0 unix:0:dls_cache:buf_avail 0 unix:0:dls_cache:buf_constructed 0 unix:0:dls_cache:buf_inuse 0 unix:0:dls_cache:buf_max 0 unix:0:dls_cache:buf_size 160 unix:0:dls_cache:buf_total 0 unix:0:dls_cache:chunk_size 160 unix:0:dls_cache:class kmem_cache unix:0:dls_cache:crtime 84.0829802 unix:0:dls_cache:depot_alloc 0 unix:0:dls_cache:depot_contention 0 unix:0:dls_cache:depot_free 0 unix:0:dls_cache:empty_magazines 0 unix:0:dls_cache:free 0 unix:0:dls_cache:full_magazines 0 unix:0:dls_cache:hash_lookup_depth 0 unix:0:dls_cache:hash_rescale 0 unix:0:dls_cache:hash_size 0 unix:0:dls_cache:magazine_size 7 unix:0:dls_cache:slab_alloc 0 unix:0:dls_cache:slab_create 0 unix:0:dls_cache:slab_destroy 0 unix:0:dls_cache:slab_free 0 unix:0:dls_cache:slab_size 8192 unix:0:dls_cache:snaptime 8464512.5140552 unix:0:dls_cache:vmem_source 23 unix:0:dls_link_cache:align 8 unix:0:dls_link_cache:alloc 0 unix:0:dls_link_cache:alloc_fail 0 unix:0:dls_link_cache:buf_avail 0 unix:0:dls_link_cache:buf_constructed 0 unix:0:dls_link_cache:buf_inuse 0 unix:0:dls_link_cache:buf_max 0 unix:0:dls_link_cache:buf_size 368 unix:0:dls_link_cache:buf_total 0 unix:0:dls_link_cache:chunk_size 368 unix:0:dls_link_cache:class kmem_cache unix:0:dls_link_cache:crtime 84.0830906 unix:0:dls_link_cache:depot_alloc 0 unix:0:dls_link_cache:depot_contention 0 unix:0:dls_link_cache:depot_free 0 unix:0:dls_link_cache:empty_magazines 0 unix:0:dls_link_cache:free 0 unix:0:dls_link_cache:full_magazines 0 unix:0:dls_link_cache:hash_lookup_depth 0 unix:0:dls_link_cache:hash_rescale 0 unix:0:dls_link_cache:hash_size 0 unix:0:dls_link_cache:magazine_size 3 unix:0:dls_link_cache:slab_alloc 0 unix:0:dls_link_cache:slab_create 0 unix:0:dls_link_cache:slab_destroy 0 unix:0:dls_link_cache:slab_free 0 unix:0:dls_link_cache:slab_size 8192 unix:0:dls_link_cache:snaptime 8464512.5154282 unix:0:dls_link_cache:vmem_source 23 unix:0:dls_vlan_cache:align 8 unix:0:dls_vlan_cache:alloc 0 unix:0:dls_vlan_cache:alloc_fail 0 unix:0:dls_vlan_cache:buf_avail 0 unix:0:dls_vlan_cache:buf_constructed 0 unix:0:dls_vlan_cache:buf_inuse 0 unix:0:dls_vlan_cache:buf_max 0 unix:0:dls_vlan_cache:buf_size 72 unix:0:dls_vlan_cache:buf_total 0 unix:0:dls_vlan_cache:chunk_size 72 unix:0:dls_vlan_cache:class kmem_cache unix:0:dls_vlan_cache:crtime 84.0830234 unix:0:dls_vlan_cache:depot_alloc 0 unix:0:dls_vlan_cache:depot_contention 0 unix:0:dls_vlan_cache:depot_free 0 unix:0:dls_vlan_cache:empty_magazines 0 unix:0:dls_vlan_cache:free 0 unix:0:dls_vlan_cache:full_magazines 0 unix:0:dls_vlan_cache:hash_lookup_depth 0 unix:0:dls_vlan_cache:hash_rescale 0 unix:0:dls_vlan_cache:hash_size 0 unix:0:dls_vlan_cache:magazine_size 7 unix:0:dls_vlan_cache:slab_alloc 0 unix:0:dls_vlan_cache:slab_create 0 unix:0:dls_vlan_cache:slab_destroy 0 unix:0:dls_vlan_cache:slab_free 0 unix:0:dls_vlan_cache:slab_size 8192 unix:0:dls_vlan_cache:snaptime 8464512.516869 unix:0:dls_vlan_cache:vmem_source 23 unix:0:dmu_buf_impl_t:align 8 unix:0:dmu_buf_impl_t:alloc 0 unix:0:dmu_buf_impl_t:alloc_fail 0 unix:0:dmu_buf_impl_t:buf_avail 0 unix:0:dmu_buf_impl_t:buf_constructed 0 unix:0:dmu_buf_impl_t:buf_inuse 0 unix:0:dmu_buf_impl_t:buf_max 0 unix:0:dmu_buf_impl_t:buf_size 192 unix:0:dmu_buf_impl_t:buf_total 0 unix:0:dmu_buf_impl_t:chunk_size 192 unix:0:dmu_buf_impl_t:class kmem_cache unix:0:dmu_buf_impl_t:crtime 97.1376814 unix:0:dmu_buf_impl_t:depot_alloc 0 unix:0:dmu_buf_impl_t:depot_contention 0 unix:0:dmu_buf_impl_t:depot_free 0 unix:0:dmu_buf_impl_t:empty_magazines 0 unix:0:dmu_buf_impl_t:free 0 unix:0:dmu_buf_impl_t:full_magazines 0 unix:0:dmu_buf_impl_t:hash_lookup_depth 0 unix:0:dmu_buf_impl_t:hash_rescale 0 unix:0:dmu_buf_impl_t:hash_size 0 unix:0:dmu_buf_impl_t:magazine_size 7 unix:0:dmu_buf_impl_t:slab_alloc 0 unix:0:dmu_buf_impl_t:slab_create 0 unix:0:dmu_buf_impl_t:slab_destroy 0 unix:0:dmu_buf_impl_t:slab_free 0 unix:0:dmu_buf_impl_t:slab_size 8192 unix:0:dmu_buf_impl_t:snaptime 8464512.518268 unix:0:dmu_buf_impl_t:vmem_source 23 unix:0:dnlc_space_cache:align 8 unix:0:dnlc_space_cache:alloc 102 unix:0:dnlc_space_cache:alloc_fail 0 unix:0:dnlc_space_cache:buf_avail 237 unix:0:dnlc_space_cache:buf_constructed 0 unix:0:dnlc_space_cache:buf_inuse 102 unix:0:dnlc_space_cache:buf_max 339 unix:0:dnlc_space_cache:buf_size 24 unix:0:dnlc_space_cache:buf_total 339 unix:0:dnlc_space_cache:chunk_size 24 unix:0:dnlc_space_cache:class kmem_cache unix:0:dnlc_space_cache:crtime 69.2818442 unix:0:dnlc_space_cache:depot_alloc 0 unix:0:dnlc_space_cache:depot_contention 0 unix:0:dnlc_space_cache:depot_free 0 unix:0:dnlc_space_cache:empty_magazines 0 unix:0:dnlc_space_cache:free 0 unix:0:dnlc_space_cache:full_magazines 0 unix:0:dnlc_space_cache:hash_lookup_depth 0 unix:0:dnlc_space_cache:hash_rescale 0 unix:0:dnlc_space_cache:hash_size 0 unix:0:dnlc_space_cache:magazine_size 15 unix:0:dnlc_space_cache:slab_alloc 102 unix:0:dnlc_space_cache:slab_create 1 unix:0:dnlc_space_cache:slab_destroy 0 unix:0:dnlc_space_cache:slab_free 0 unix:0:dnlc_space_cache:slab_size 8192 unix:0:dnlc_space_cache:snaptime 8464512.5196356 unix:0:dnlc_space_cache:vmem_source 23 unix:0:dnlcstats:class misc unix:0:dnlcstats:crtime 69.2818642 unix:0:dnlcstats:dir_add_abort 0 unix:0:dnlcstats:dir_add_max 0 unix:0:dnlcstats:dir_add_no_memory 0 unix:0:dnlcstats:dir_cached_current 8 unix:0:dnlcstats:dir_cached_total 8 unix:0:dnlcstats:dir_entries_cached_current 9312 unix:0:dnlcstats:dir_fini_purge 0 unix:0:dnlcstats:dir_hits 2458 unix:0:dnlcstats:dir_misses 36102 unix:0:dnlcstats:dir_reclaim_any 0 unix:0:dnlcstats:dir_reclaim_last 0 unix:0:dnlcstats:dir_remove_entry_fail 0 unix:0:dnlcstats:dir_remove_space_fail 0 unix:0:dnlcstats:dir_start_no_memory 0 unix:0:dnlcstats:dir_update_fail 0 unix:0:dnlcstats:double_enters 7413 unix:0:dnlcstats:enters 90768 unix:0:dnlcstats:hits 177790196 unix:0:dnlcstats:misses 54802 unix:0:dnlcstats:negative_cache_hits 885742 unix:0:dnlcstats:pick_free 0 unix:0:dnlcstats:pick_heuristic 0 unix:0:dnlcstats:pick_last 0 unix:0:dnlcstats:purge_all 1 unix:0:dnlcstats:purge_fs1 0 unix:0:dnlcstats:purge_total_entries 53188 unix:0:dnlcstats:purge_vfs 231 unix:0:dnlcstats:purge_vp 263 unix:0:dnlcstats:snaptime 8464512.5213346 unix:0:dnode_t:align 8 unix:0:dnode_t:alloc 0 unix:0:dnode_t:alloc_fail 0 unix:0:dnode_t:buf_avail 0 unix:0:dnode_t:buf_constructed 0 unix:0:dnode_t:buf_inuse 0 unix:0:dnode_t:buf_max 0 unix:0:dnode_t:buf_size 648 unix:0:dnode_t:buf_total 0 unix:0:dnode_t:chunk_size 648 unix:0:dnode_t:class kmem_cache unix:0:dnode_t:crtime 97.1377156 unix:0:dnode_t:depot_alloc 0 unix:0:dnode_t:depot_contention 0 unix:0:dnode_t:depot_free 0 unix:0:dnode_t:empty_magazines 0 unix:0:dnode_t:free 0 unix:0:dnode_t:full_magazines 0 unix:0:dnode_t:hash_lookup_depth 0 unix:0:dnode_t:hash_rescale 0 unix:0:dnode_t:hash_size 0 unix:0:dnode_t:magazine_size 3 unix:0:dnode_t:slab_alloc 0 unix:0:dnode_t:slab_create 0 unix:0:dnode_t:slab_destroy 0 unix:0:dnode_t:slab_free 0 unix:0:dnode_t:slab_size 8192 unix:0:dnode_t:snaptime 8464512.523153 unix:0:dnode_t:vmem_source 23 unix:0:dtrace_state_cache:align 64 unix:0:dtrace_state_cache:alloc 0 unix:0:dtrace_state_cache:alloc_fail 0 unix:0:dtrace_state_cache:buf_avail 0 unix:0:dtrace_state_cache:buf_constructed 0 unix:0:dtrace_state_cache:buf_inuse 0 unix:0:dtrace_state_cache:buf_max 0 unix:0:dtrace_state_cache:buf_size 2048 unix:0:dtrace_state_cache:buf_total 0 unix:0:dtrace_state_cache:chunk_size 2048 unix:0:dtrace_state_cache:class kmem_cache unix:0:dtrace_state_cache:crtime 113.9732348 unix:0:dtrace_state_cache:depot_alloc 0 unix:0:dtrace_state_cache:depot_contention 0 unix:0:dtrace_state_cache:depot_free 0 unix:0:dtrace_state_cache:empty_magazines 0 unix:0:dtrace_state_cache:free 0 unix:0:dtrace_state_cache:full_magazines 0 unix:0:dtrace_state_cache:hash_lookup_depth 0 unix:0:dtrace_state_cache:hash_rescale 0 unix:0:dtrace_state_cache:hash_size 64 unix:0:dtrace_state_cache:magazine_size 3 unix:0:dtrace_state_cache:slab_alloc 0 unix:0:dtrace_state_cache:slab_create 0 unix:0:dtrace_state_cache:slab_destroy 0 unix:0:dtrace_state_cache:slab_free 0 unix:0:dtrace_state_cache:slab_size 8192 unix:0:dtrace_state_cache:snaptime 8464512.5245528 unix:0:dtrace_state_cache:vmem_source 23 unix:0:dv_node_cache:align 8 unix:0:dv_node_cache:alloc 284 unix:0:dv_node_cache:alloc_fail 0 unix:0:dv_node_cache:buf_avail 24 unix:0:dv_node_cache:buf_constructed 0 unix:0:dv_node_cache:buf_inuse 244 unix:0:dv_node_cache:buf_max 268 unix:0:dv_node_cache:buf_size 120 unix:0:dv_node_cache:buf_total 268 unix:0:dv_node_cache:chunk_size 120 unix:0:dv_node_cache:class kmem_cache unix:0:dv_node_cache:crtime 82.4584338 unix:0:dv_node_cache:depot_alloc 4 unix:0:dv_node_cache:depot_contention 0 unix:0:dv_node_cache:depot_free 6 unix:0:dv_node_cache:empty_magazines 2 unix:0:dv_node_cache:free 42 unix:0:dv_node_cache:full_magazines 0 unix:0:dv_node_cache:hash_lookup_depth 0 unix:0:dv_node_cache:hash_rescale 0 unix:0:dv_node_cache:hash_size 0 unix:0:dv_node_cache:magazine_size 7 unix:0:dv_node_cache:slab_alloc 244 unix:0:dv_node_cache:slab_create 4 unix:0:dv_node_cache:slab_destroy 0 unix:0:dv_node_cache:slab_free 0 unix:0:dv_node_cache:slab_size 8192 unix:0:dv_node_cache:snaptime 8464512.52593 unix:0:dv_node_cache:vmem_source 23 unix:0:ebus_nexus_enum_tq:class taskq unix:0:ebus_nexus_enum_tq:crtime 69.294988 unix:0:ebus_nexus_enum_tq:executed 0 unix:0:ebus_nexus_enum_tq:maxtasks 0 unix:0:ebus_nexus_enum_tq:nactive 1 unix:0:ebus_nexus_enum_tq:nalloc 0 unix:0:ebus_nexus_enum_tq:priority 60 unix:0:ebus_nexus_enum_tq:snaptime 8464512.5273912 unix:0:ebus_nexus_enum_tq:tasks 0 unix:0:ebus_nexus_enum_tq:threads 1 unix:0:ebus_nexus_enum_tq:totaltime 0 unix:0:exacct_object_cache:align 8 unix:0:exacct_object_cache:alloc 0 unix:0:exacct_object_cache:alloc_fail 0 unix:0:exacct_object_cache:buf_avail 0 unix:0:exacct_object_cache:buf_constructed 0 unix:0:exacct_object_cache:buf_inuse 0 unix:0:exacct_object_cache:buf_max 0 unix:0:exacct_object_cache:buf_size 40 unix:0:exacct_object_cache:buf_total 0 unix:0:exacct_object_cache:chunk_size 40 unix:0:exacct_object_cache:class kmem_cache unix:0:exacct_object_cache:crtime 85.1890728 unix:0:exacct_object_cache:depot_alloc 0 unix:0:exacct_object_cache:depot_contention 0 unix:0:exacct_object_cache:depot_free 0 unix:0:exacct_object_cache:empty_magazines 0 unix:0:exacct_object_cache:free 0 unix:0:exacct_object_cache:full_magazines 0 unix:0:exacct_object_cache:hash_lookup_depth 0 unix:0:exacct_object_cache:hash_rescale 0 unix:0:exacct_object_cache:hash_size 0 unix:0:exacct_object_cache:magazine_size 15 unix:0:exacct_object_cache:slab_alloc 0 unix:0:exacct_object_cache:slab_create 0 unix:0:exacct_object_cache:slab_destroy 0 unix:0:exacct_object_cache:slab_free 0 unix:0:exacct_object_cache:slab_size 8192 unix:0:exacct_object_cache:snaptime 8464512.528001 unix:0:exacct_object_cache:vmem_source 23 unix:0:fcp1_cache:align 8 unix:0:fcp1_cache:alloc 189700 unix:0:fcp1_cache:alloc_fail 0 unix:0:fcp1_cache:buf_avail 26 unix:0:fcp1_cache:buf_constructed 26 unix:0:fcp1_cache:buf_inuse 2 unix:0:fcp1_cache:buf_max 28 unix:0:fcp1_cache:buf_size 1168 unix:0:fcp1_cache:buf_total 28 unix:0:fcp1_cache:chunk_size 1168 unix:0:fcp1_cache:class kmem_cache unix:0:fcp1_cache:crtime 77.2902224 unix:0:fcp1_cache:depot_alloc 36576 unix:0:fcp1_cache:depot_contention 0 unix:0:fcp1_cache:depot_free 36587 unix:0:fcp1_cache:empty_magazines 0 unix:0:fcp1_cache:free 189709 unix:0:fcp1_cache:full_magazines 7 unix:0:fcp1_cache:hash_lookup_depth 0 unix:0:fcp1_cache:hash_rescale 0 unix:0:fcp1_cache:hash_size 64 unix:0:fcp1_cache:magazine_size 3 unix:0:fcp1_cache:slab_alloc 28 unix:0:fcp1_cache:slab_create 4 unix:0:fcp1_cache:slab_destroy 0 unix:0:fcp1_cache:slab_free 0 unix:0:fcp1_cache:slab_size 8192 unix:0:fcp1_cache:snaptime 8464512.5293798 unix:0:fcp1_cache:vmem_source 23 unix:0:fctl_cache:align 8 unix:0:fctl_cache:alloc 4 unix:0:fctl_cache:alloc_fail 0 unix:0:fctl_cache:buf_avail 72 unix:0:fctl_cache:buf_constructed 2 unix:0:fctl_cache:buf_inuse 0 unix:0:fctl_cache:buf_max 72 unix:0:fctl_cache:buf_size 112 unix:0:fctl_cache:buf_total 72 unix:0:fctl_cache:chunk_size 112 unix:0:fctl_cache:class kmem_cache unix:0:fctl_cache:crtime 69.2996564 unix:0:fctl_cache:depot_alloc 0 unix:0:fctl_cache:depot_contention 0 unix:0:fctl_cache:depot_free 2 unix:0:fctl_cache:empty_magazines 0 unix:0:fctl_cache:free 6 unix:0:fctl_cache:full_magazines 0 unix:0:fctl_cache:hash_lookup_depth 0 unix:0:fctl_cache:hash_rescale 0 unix:0:fctl_cache:hash_size 0 unix:0:fctl_cache:magazine_size 7 unix:0:fctl_cache:slab_alloc 2 unix:0:fctl_cache:slab_create 1 unix:0:fctl_cache:slab_destroy 0 unix:0:fctl_cache:slab_free 0 unix:0:fctl_cache:slab_size 8192 unix:0:fctl_cache:snaptime 8464512.531 unix:0:fctl_cache:vmem_source 23 unix:0:fdb_cache:align 8 unix:0:fdb_cache:alloc 0 unix:0:fdb_cache:alloc_fail 0 unix:0:fdb_cache:buf_avail 0 unix:0:fdb_cache:buf_constructed 0 unix:0:fdb_cache:buf_inuse 0 unix:0:fdb_cache:buf_max 0 unix:0:fdb_cache:buf_size 104 unix:0:fdb_cache:buf_total 0 unix:0:fdb_cache:chunk_size 104 unix:0:fdb_cache:class kmem_cache unix:0:fdb_cache:crtime 69.2857228 unix:0:fdb_cache:depot_alloc 0 unix:0:fdb_cache:depot_contention 0 unix:0:fdb_cache:depot_free 0 unix:0:fdb_cache:empty_magazines 0 unix:0:fdb_cache:free 0 unix:0:fdb_cache:full_magazines 0 unix:0:fdb_cache:hash_lookup_depth 0 unix:0:fdb_cache:hash_rescale 0 unix:0:fdb_cache:hash_size 0 unix:0:fdb_cache:magazine_size 7 unix:0:fdb_cache:slab_alloc 0 unix:0:fdb_cache:slab_create 0 unix:0:fdb_cache:slab_destroy 0 unix:0:fdb_cache:slab_free 0 unix:0:fdb_cache:slab_size 8192 unix:0:fdb_cache:snaptime 8464512.532492 unix:0:fdb_cache:vmem_source 23 unix:0:file_cache:align 8 unix:0:file_cache:alloc 5083523 unix:0:file_cache:alloc_fail 0 unix:0:file_cache:buf_avail 194 unix:0:file_cache:buf_constructed 109 unix:0:file_cache:buf_inuse 386 unix:0:file_cache:buf_max 580 unix:0:file_cache:buf_size 56 unix:0:file_cache:buf_total 580 unix:0:file_cache:chunk_size 56 unix:0:file_cache:class kmem_cache unix:0:file_cache:crtime 69.282172 unix:0:file_cache:depot_alloc 1356 unix:0:file_cache:depot_contention 0 unix:0:file_cache:depot_free 1365 unix:0:file_cache:empty_magazines 2 unix:0:file_cache:free 5083146 unix:0:file_cache:full_magazines 5 unix:0:file_cache:hash_lookup_depth 0 unix:0:file_cache:hash_rescale 0 unix:0:file_cache:hash_size 0 unix:0:file_cache:magazine_size 15 unix:0:file_cache:slab_alloc 495 unix:0:file_cache:slab_create 4 unix:0:file_cache:slab_destroy 0 unix:0:file_cache:slab_free 0 unix:0:file_cache:slab_size 8192 unix:0:file_cache:snaptime 8464512.5338822 unix:0:file_cache:vmem_source 23 unix:0:flk_edges:align 8 unix:0:flk_edges:alloc 0 unix:0:flk_edges:alloc_fail 0 unix:0:flk_edges:buf_avail 0 unix:0:flk_edges:buf_constructed 0 unix:0:flk_edges:buf_inuse 0 unix:0:flk_edges:buf_max 0 unix:0:flk_edges:buf_size 48 unix:0:flk_edges:buf_total 0 unix:0:flk_edges:chunk_size 48 unix:0:flk_edges:class kmem_cache unix:0:flk_edges:crtime 69.285683 unix:0:flk_edges:depot_alloc 0 unix:0:flk_edges:depot_contention 0 unix:0:flk_edges:depot_free 0 unix:0:flk_edges:empty_magazines 0 unix:0:flk_edges:free 0 unix:0:flk_edges:full_magazines 0 unix:0:flk_edges:hash_lookup_depth 0 unix:0:flk_edges:hash_rescale 0 unix:0:flk_edges:hash_size 0 unix:0:flk_edges:magazine_size 15 unix:0:flk_edges:slab_alloc 0 unix:0:flk_edges:slab_create 0 unix:0:flk_edges:slab_destroy 0 unix:0:flk_edges:slab_free 0 unix:0:flk_edges:slab_size 8192 unix:0:flk_edges:snaptime 8464512.535359 unix:0:flk_edges:vmem_source 23 unix:0:flushmeter:class hat unix:0:flushmeter:crtime 65.902658 unix:0:flushmeter:f_ctx 0 unix:0:flushmeter:f_page 0 unix:0:flushmeter:f_partial 0 unix:0:flushmeter:f_region 0 unix:0:flushmeter:f_segment 0 unix:0:flushmeter:f_usr 0 unix:0:flushmeter:snaptime 8464512.5367354 unix:0:fm:class misc unix:0:fm:crtime 66.6665582 unix:0:fm:erpt-dropped 0 unix:0:fm:erpt-set-failed 0 unix:0:fm:fmri-set-failed 0 unix:0:fm:payload-set-failed 0 unix:0:fm:snaptime 8464512.5372422 unix:0:fm_ereport_queue:cancelled 0 unix:0:fm_ereport_queue:class errorq unix:0:fm_ereport_queue:commit_fail 0 unix:0:fm_ereport_queue:committed 0 unix:0:fm_ereport_queue:crtime 66.6586094 unix:0:fm_ereport_queue:dispatched 0 unix:0:fm_ereport_queue:dropped 0 unix:0:fm_ereport_queue:logged 0 unix:0:fm_ereport_queue:reserve_fail 0 unix:0:fm_ereport_queue:reserved 0 unix:0:fm_ereport_queue:snaptime 8464512.5376588 unix:0:fnode_cache:align 32 unix:0:fnode_cache:alloc 341 unix:0:fnode_cache:alloc_fail 0 unix:0:fnode_cache:buf_avail 35 unix:0:fnode_cache:buf_constructed 3 unix:0:fnode_cache:buf_inuse 7 unix:0:fnode_cache:buf_max 42 unix:0:fnode_cache:buf_size 176 unix:0:fnode_cache:buf_total 42 unix:0:fnode_cache:chunk_size 192 unix:0:fnode_cache:class kmem_cache unix:0:fnode_cache:crtime 87.8316598 unix:0:fnode_cache:depot_alloc 0 unix:0:fnode_cache:depot_contention 0 unix:0:fnode_cache:depot_free 2 unix:0:fnode_cache:empty_magazines 0 unix:0:fnode_cache:free 336 unix:0:fnode_cache:full_magazines 0 unix:0:fnode_cache:hash_lookup_depth 0 unix:0:fnode_cache:hash_rescale 0 unix:0:fnode_cache:hash_size 0 unix:0:fnode_cache:magazine_size 7 unix:0:fnode_cache:slab_alloc 10 unix:0:fnode_cache:slab_create 1 unix:0:fnode_cache:slab_destroy 0 unix:0:fnode_cache:slab_free 0 unix:0:fnode_cache:slab_size 8192 unix:0:fnode_cache:snaptime 8464512.5382528 unix:0:fnode_cache:vmem_source 23 unix:0:fp1_cache:align 8 unix:0:fp1_cache:alloc 6 unix:0:fp1_cache:alloc_fail 0 unix:0:fp1_cache:buf_avail 10 unix:0:fp1_cache:buf_constructed 2 unix:0:fp1_cache:buf_inuse 1 unix:0:fp1_cache:buf_max 11 unix:0:fp1_cache:buf_size 728 unix:0:fp1_cache:buf_total 11 unix:0:fp1_cache:chunk_size 728 unix:0:fp1_cache:class kmem_cache unix:0:fp1_cache:crtime 77.2667234 unix:0:fp1_cache:depot_alloc 0 unix:0:fp1_cache:depot_contention 0 unix:0:fp1_cache:depot_free 1 unix:0:fp1_cache:empty_magazines 0 unix:0:fp1_cache:free 6 unix:0:fp1_cache:full_magazines 0 unix:0:fp1_cache:hash_lookup_depth 0 unix:0:fp1_cache:hash_rescale 0 unix:0:fp1_cache:hash_size 0 unix:0:fp1_cache:magazine_size 3 unix:0:fp1_cache:slab_alloc 3 unix:0:fp1_cache:slab_create 1 unix:0:fp1_cache:slab_destroy 0 unix:0:fp1_cache:slab_free 0 unix:0:fp1_cache:slab_size 8192 unix:0:fp1_cache:snaptime 8464512.5397306 unix:0:fp1_cache:vmem_source 23 unix:0:fpu_info:class misc unix:0:fpu_info:crtime 69.2662772 unix:0:fpu_info:fpu_sim_fabsd 0 unix:0:fpu_info:fpu_sim_fabsq 0 unix:0:fpu_info:fpu_sim_fabss 0 unix:0:fpu_info:fpu_sim_faddd 0 unix:0:fpu_info:fpu_sim_faddq 0 unix:0:fpu_info:fpu_sim_fadds 2903 unix:0:fpu_info:fpu_sim_fcmpd 0 unix:0:fpu_info:fpu_sim_fcmped 0 unix:0:fpu_info:fpu_sim_fcmpeq 0 unix:0:fpu_info:fpu_sim_fcmpes 0 unix:0:fpu_info:fpu_sim_fcmpq 0 unix:0:fpu_info:fpu_sim_fcmps 0 unix:0:fpu_info:fpu_sim_fdivd 0 unix:0:fpu_info:fpu_sim_fdivq 0 unix:0:fpu_info:fpu_sim_fdivs 2903 unix:0:fpu_info:fpu_sim_fdmulx 0 unix:0:fpu_info:fpu_sim_fdtoi 2 unix:0:fpu_info:fpu_sim_fdtox 0 unix:0:fpu_info:fpu_sim_fitod 0 unix:0:fpu_info:fpu_sim_fitoq 0 unix:0:fpu_info:fpu_sim_fitos 0 unix:0:fpu_info:fpu_sim_fmaddd 0 unix:0:fpu_info:fpu_sim_fmadds 0 unix:0:fpu_info:fpu_sim_fmovcc 0 unix:0:fpu_info:fpu_sim_fmovd 0 unix:0:fpu_info:fpu_sim_fmovq 0 unix:0:fpu_info:fpu_sim_fmovr 0 unix:0:fpu_info:fpu_sim_fmovs 0 unix:0:fpu_info:fpu_sim_fmsubd 0 unix:0:fpu_info:fpu_sim_fmsubs 0 unix:0:fpu_info:fpu_sim_fmuld 0 unix:0:fpu_info:fpu_sim_fmulq 0 unix:0:fpu_info:fpu_sim_fmuls 2902 unix:0:fpu_info:fpu_sim_fnegd 0 unix:0:fpu_info:fpu_sim_fnegq 0 unix:0:fpu_info:fpu_sim_fnegs 0 unix:0:fpu_info:fpu_sim_fnmaddd 0 unix:0:fpu_info:fpu_sim_fnmadds 0 unix:0:fpu_info:fpu_sim_fnmsubd 0 unix:0:fpu_info:fpu_sim_fnmsubs 0 unix:0:fpu_info:fpu_sim_fqtoi 0 unix:0:fpu_info:fpu_sim_fqtox 0 unix:0:fpu_info:fpu_sim_fsmuld 0 unix:0:fpu_info:fpu_sim_fsqrtd 0 unix:0:fpu_info:fpu_sim_fsqrtq 0 unix:0:fpu_info:fpu_sim_fsqrts 0 unix:0:fpu_info:fpu_sim_fstoi 0 unix:0:fpu_info:fpu_sim_fstox 0 unix:0:fpu_info:fpu_sim_fsubd 0 unix:0:fpu_info:fpu_sim_fsubq 0 unix:0:fpu_info:fpu_sim_fsubs 0 unix:0:fpu_info:fpu_sim_fxtod 150 unix:0:fpu_info:fpu_sim_fxtoq 0 unix:0:fpu_info:fpu_sim_fxtos 0 unix:0:fpu_info:fpu_sim_invalid 0 unix:0:fpu_info:snaptime 8464512.5412026 unix:0:fpu_traps:class misc unix:0:fpu_traps:crtime 69.2662688 unix:0:fpu_traps:fpu_ieee_traps 0 unix:0:fpu_traps:fpu_unfinished_traps 8860 unix:0:fpu_traps:fpu_unimplemented 0 unix:0:fpu_traps:snaptime 8464512.5440126 unix:0:glm0_cache:align 8 unix:0:glm0_cache:alloc 18 unix:0:glm0_cache:alloc_fail 0 unix:0:glm0_cache:buf_avail 15 unix:0:glm0_cache:buf_constructed 0 unix:0:glm0_cache:buf_inuse 4 unix:0:glm0_cache:buf_max 19 unix:0:glm0_cache:buf_size 416 unix:0:glm0_cache:buf_total 19 unix:0:glm0_cache:chunk_size 416 unix:0:glm0_cache:class kmem_cache unix:0:glm0_cache:crtime 123.0786306 unix:0:glm0_cache:depot_alloc 0 unix:0:glm0_cache:depot_contention 0 unix:0:glm0_cache:depot_free 1 unix:0:glm0_cache:empty_magazines 0 unix:0:glm0_cache:free 15 unix:0:glm0_cache:full_magazines 0 unix:0:glm0_cache:hash_lookup_depth 0 unix:0:glm0_cache:hash_rescale 0 unix:0:glm0_cache:hash_size 0 unix:0:glm0_cache:magazine_size 3 unix:0:glm0_cache:slab_alloc 4 unix:0:glm0_cache:slab_create 1 unix:0:glm0_cache:slab_destroy 0 unix:0:glm0_cache:slab_free 0 unix:0:glm0_cache:slab_size 8192 unix:0:glm0_cache:snaptime 8464512.5443976 unix:0:glm0_cache:vmem_source 23 unix:0:glm_nexus_enum_tq:class taskq unix:0:glm_nexus_enum_tq:crtime 123.0615516 unix:0:glm_nexus_enum_tq:executed 0 unix:0:glm_nexus_enum_tq:maxtasks 0 unix:0:glm_nexus_enum_tq:nactive 1 unix:0:glm_nexus_enum_tq:nalloc 0 unix:0:glm_nexus_enum_tq:priority 60 unix:0:glm_nexus_enum_tq:snaptime 8464512.545842 unix:0:glm_nexus_enum_tq:tasks 0 unix:0:glm_nexus_enum_tq:threads 1 unix:0:glm_nexus_enum_tq:totaltime 0 unix:0:hal0_cache:align 8 unix:0:hal0_cache:alloc 1 unix:0:hal0_cache:alloc_fail 0 unix:0:hal0_cache:buf_avail 17 unix:0:hal0_cache:buf_constructed 1 unix:0:hal0_cache:buf_inuse 0 unix:0:hal0_cache:buf_max 17 unix:0:hal0_cache:buf_size 464 unix:0:hal0_cache:buf_total 17 unix:0:hal0_cache:chunk_size 464 unix:0:hal0_cache:class kmem_cache unix:0:hal0_cache:crtime 86.0010524 unix:0:hal0_cache:depot_alloc 0 unix:0:hal0_cache:depot_contention 0 unix:0:hal0_cache:depot_free 1 unix:0:hal0_cache:empty_magazines 0 unix:0:hal0_cache:free 2 unix:0:hal0_cache:full_magazines 0 unix:0:hal0_cache:hash_lookup_depth 0 unix:0:hal0_cache:hash_rescale 0 unix:0:hal0_cache:hash_size 0 unix:0:hal0_cache:magazine_size 3 unix:0:hal0_cache:slab_alloc 1 unix:0:hal0_cache:slab_create 1 unix:0:hal0_cache:slab_destroy 0 unix:0:hal0_cache:slab_free 0 unix:0:hal0_cache:slab_size 8192 unix:0:hal0_cache:snaptime 8464512.5464526 unix:0:hal0_cache:vmem_source 23 unix:0:hci1394_nexus_enum_tq:class taskq unix:0:hci1394_nexus_enum_tq:crtime 85.9538036 unix:0:hci1394_nexus_enum_tq:executed 0 unix:0:hci1394_nexus_enum_tq:maxtasks 0 unix:0:hci1394_nexus_enum_tq:nactive 1 unix:0:hci1394_nexus_enum_tq:nalloc 0 unix:0:hci1394_nexus_enum_tq:priority 60 unix:0:hci1394_nexus_enum_tq:snaptime 8464512.5479066 unix:0:hci1394_nexus_enum_tq:tasks 0 unix:0:hci1394_nexus_enum_tq:threads 1 unix:0:hci1394_nexus_enum_tq:totaltime 0 unix:0:id32_cache:align 32 unix:0:id32_cache:alloc 30 unix:0:id32_cache:alloc_fail 0 unix:0:id32_cache:buf_avail 251 unix:0:id32_cache:buf_constructed 19 unix:0:id32_cache:buf_inuse 3 unix:0:id32_cache:buf_max 254 unix:0:id32_cache:buf_size 32 unix:0:id32_cache:buf_total 254 unix:0:id32_cache:chunk_size 32 unix:0:id32_cache:class kmem_cache unix:0:id32_cache:crtime 66.0100292 unix:0:id32_cache:depot_alloc 0 unix:0:id32_cache:depot_contention 0 unix:0:id32_cache:depot_free 2 unix:0:id32_cache:empty_magazines 0 unix:0:id32_cache:free 29 unix:0:id32_cache:full_magazines 0 unix:0:id32_cache:hash_lookup_depth 0 unix:0:id32_cache:hash_rescale 0 unix:0:id32_cache:hash_size 0 unix:0:id32_cache:magazine_size 15 unix:0:id32_cache:slab_alloc 22 unix:0:id32_cache:slab_create 1 unix:0:id32_cache:slab_destroy 0 unix:0:id32_cache:slab_free 0 unix:0:id32_cache:slab_size 8192 unix:0:id32_cache:snaptime 8464512.5485038 unix:0:id32_cache:vmem_source 28 unix:0:ip_minor_arena_1:align 1 unix:0:ip_minor_arena_1:alloc 381672 unix:0:ip_minor_arena_1:alloc_fail 0 unix:0:ip_minor_arena_1:buf_avail 128 unix:0:ip_minor_arena_1:buf_constructed 114 unix:0:ip_minor_arena_1:buf_inuse 64 unix:0:ip_minor_arena_1:buf_max 192 unix:0:ip_minor_arena_1:buf_size 1 unix:0:ip_minor_arena_1:buf_total 192 unix:0:ip_minor_arena_1:chunk_size 1 unix:0:ip_minor_arena_1:class kmem_cache unix:0:ip_minor_arena_1:crtime 83.9662186 unix:0:ip_minor_arena_1:depot_alloc 595 unix:0:ip_minor_arena_1:depot_contention 0 unix:0:ip_minor_arena_1:depot_free 603 unix:0:ip_minor_arena_1:empty_magazines 2 unix:0:ip_minor_arena_1:free 381616 unix:0:ip_minor_arena_1:full_magazines 5 unix:0:ip_minor_arena_1:hash_lookup_depth 0 unix:0:ip_minor_arena_1:hash_rescale 1 unix:0:ip_minor_arena_1:hash_size 256 unix:0:ip_minor_arena_1:magazine_size 15 unix:0:ip_minor_arena_1:slab_alloc 178 unix:0:ip_minor_arena_1:slab_create 3 unix:0:ip_minor_arena_1:slab_destroy 0 unix:0:ip_minor_arena_1:slab_free 0 unix:0:ip_minor_arena_1:slab_size 64 unix:0:ip_minor_arena_1:snaptime 8464512.5498966 unix:0:ip_minor_arena_1:vmem_source 50 unix:0:ipcl_conn_cache:align 64 unix:0:ipcl_conn_cache:alloc 281482 unix:0:ipcl_conn_cache:alloc_fail 0 unix:0:ipcl_conn_cache:buf_avail 19 unix:0:ipcl_conn_cache:buf_constructed 7 unix:0:ipcl_conn_cache:buf_inuse 26 unix:0:ipcl_conn_cache:buf_max 45 unix:0:ipcl_conn_cache:buf_size 496 unix:0:ipcl_conn_cache:buf_total 45 unix:0:ipcl_conn_cache:chunk_size 512 unix:0:ipcl_conn_cache:class kmem_cache unix:0:ipcl_conn_cache:crtime 83.9662618 unix:0:ipcl_conn_cache:depot_alloc 24 unix:0:ipcl_conn_cache:depot_contention 0 unix:0:ipcl_conn_cache:depot_free 28 unix:0:ipcl_conn_cache:empty_magazines 2 unix:0:ipcl_conn_cache:free 281460 unix:0:ipcl_conn_cache:full_magazines 0 unix:0:ipcl_conn_cache:hash_lookup_depth 0 unix:0:ipcl_conn_cache:hash_rescale 0 unix:0:ipcl_conn_cache:hash_size 0 unix:0:ipcl_conn_cache:magazine_size 3 unix:0:ipcl_conn_cache:slab_alloc 33 unix:0:ipcl_conn_cache:slab_create 3 unix:0:ipcl_conn_cache:slab_destroy 0 unix:0:ipcl_conn_cache:slab_free 0 unix:0:ipcl_conn_cache:slab_size 8192 unix:0:ipcl_conn_cache:snaptime 8464512.5513636 unix:0:ipcl_conn_cache:vmem_source 23 unix:0:ipcl_tcpconn_cache:align 64 unix:0:ipcl_tcpconn_cache:alloc 101226 unix:0:ipcl_tcpconn_cache:alloc_fail 0 unix:0:ipcl_tcpconn_cache:buf_avail 113 unix:0:ipcl_tcpconn_cache:buf_constructed 113 unix:0:ipcl_tcpconn_cache:buf_inuse 40 unix:0:ipcl_tcpconn_cache:buf_max 153 unix:0:ipcl_tcpconn_cache:buf_size 1704 unix:0:ipcl_tcpconn_cache:buf_total 153 unix:0:ipcl_tcpconn_cache:chunk_size 1728 unix:0:ipcl_tcpconn_cache:class kmem_cache unix:0:ipcl_tcpconn_cache:crtime 83.966279 unix:0:ipcl_tcpconn_cache:depot_alloc 1524 unix:0:ipcl_tcpconn_cache:depot_contention 0 unix:0:ipcl_tcpconn_cache:depot_free 1563 unix:0:ipcl_tcpconn_cache:empty_magazines 5 unix:0:ipcl_tcpconn_cache:free 101225 unix:0:ipcl_tcpconn_cache:full_magazines 35 unix:0:ipcl_tcpconn_cache:hash_lookup_depth 0 unix:0:ipcl_tcpconn_cache:hash_rescale 0 unix:0:ipcl_tcpconn_cache:hash_size 64 unix:0:ipcl_tcpconn_cache:magazine_size 3 unix:0:ipcl_tcpconn_cache:slab_alloc 153 unix:0:ipcl_tcpconn_cache:slab_create 17 unix:0:ipcl_tcpconn_cache:slab_destroy 0 unix:0:ipcl_tcpconn_cache:slab_free 0 unix:0:ipcl_tcpconn_cache:slab_size 16384 unix:0:ipcl_tcpconn_cache:snaptime 8464512.5528404 unix:0:ipcl_tcpconn_cache:vmem_source 23 unix:0:ipp_action:align 64 unix:0:ipp_action:alloc 0 unix:0:ipp_action:alloc_fail 0 unix:0:ipp_action:buf_avail 0 unix:0:ipp_action:buf_constructed 0 unix:0:ipp_action:buf_inuse 0 unix:0:ipp_action:buf_max 0 unix:0:ipp_action:buf_size 368 unix:0:ipp_action:buf_total 0 unix:0:ipp_action:chunk_size 384 unix:0:ipp_action:class kmem_cache unix:0:ipp_action:crtime 66.1446024 unix:0:ipp_action:depot_alloc 0 unix:0:ipp_action:depot_contention 0 unix:0:ipp_action:depot_free 0 unix:0:ipp_action:empty_magazines 0 unix:0:ipp_action:free 0 unix:0:ipp_action:full_magazines 0 unix:0:ipp_action:hash_lookup_depth 0 unix:0:ipp_action:hash_rescale 0 unix:0:ipp_action:hash_size 0 unix:0:ipp_action:magazine_size 3 unix:0:ipp_action:slab_alloc 0 unix:0:ipp_action:slab_create 0 unix:0:ipp_action:slab_destroy 0 unix:0:ipp_action:slab_free 0 unix:0:ipp_action:slab_size 8192 unix:0:ipp_action:snaptime 8464512.5542314 unix:0:ipp_action:vmem_source 23 unix:0:ipp_mod:align 64 unix:0:ipp_mod:alloc 0 unix:0:ipp_mod:alloc_fail 0 unix:0:ipp_mod:buf_avail 0 unix:0:ipp_mod:buf_constructed 0 unix:0:ipp_mod:buf_inuse 0 unix:0:ipp_mod:buf_max 0 unix:0:ipp_mod:buf_size 304 unix:0:ipp_mod:buf_total 0 unix:0:ipp_mod:chunk_size 320 unix:0:ipp_mod:class kmem_cache unix:0:ipp_mod:crtime 66.1344252 unix:0:ipp_mod:depot_alloc 0 unix:0:ipp_mod:depot_contention 0 unix:0:ipp_mod:depot_free 0 unix:0:ipp_mod:empty_magazines 0 unix:0:ipp_mod:free 0 unix:0:ipp_mod:full_magazines 0 unix:0:ipp_mod:hash_lookup_depth 0 unix:0:ipp_mod:hash_rescale 0 unix:0:ipp_mod:hash_size 0 unix:0:ipp_mod:magazine_size 3 unix:0:ipp_mod:slab_alloc 0 unix:0:ipp_mod:slab_create 0 unix:0:ipp_mod:slab_destroy 0 unix:0:ipp_mod:slab_free 0 unix:0:ipp_mod:slab_size 8192 unix:0:ipp_mod:snaptime 8464512.5555976 unix:0:ipp_mod:vmem_source 23 unix:0:ipp_packet:align 64 unix:0:ipp_packet:alloc 0 unix:0:ipp_packet:alloc_fail 0 unix:0:ipp_packet:buf_avail 0 unix:0:ipp_packet:buf_constructed 0 unix:0:ipp_packet:buf_inuse 0 unix:0:ipp_packet:buf_max 0 unix:0:ipp_packet:buf_size 64 unix:0:ipp_packet:buf_total 0 unix:0:ipp_packet:chunk_size 64 unix:0:ipp_packet:class kmem_cache unix:0:ipp_packet:crtime 66.1446172 unix:0:ipp_packet:depot_alloc 0 unix:0:ipp_packet:depot_contention 0 unix:0:ipp_packet:depot_free 0 unix:0:ipp_packet:empty_magazines 0 unix:0:ipp_packet:free 0 unix:0:ipp_packet:full_magazines 0 unix:0:ipp_packet:hash_lookup_depth 0 unix:0:ipp_packet:hash_rescale 0 unix:0:ipp_packet:hash_size 0 unix:0:ipp_packet:magazine_size 15 unix:0:ipp_packet:slab_alloc 0 unix:0:ipp_packet:slab_create 0 unix:0:ipp_packet:slab_destroy 0 unix:0:ipp_packet:slab_free 0 unix:0:ipp_packet:slab_size 8192 unix:0:ipp_packet:snaptime 8464512.5570272 unix:0:ipp_packet:vmem_source 23 unix:0:ipsec_actions:align 8 unix:0:ipsec_actions:alloc 1959 unix:0:ipsec_actions:alloc_fail 0 unix:0:ipsec_actions:buf_avail 113 unix:0:ipsec_actions:buf_constructed 5 unix:0:ipsec_actions:buf_inuse 0 unix:0:ipsec_actions:buf_max 113 unix:0:ipsec_actions:buf_size 72 unix:0:ipsec_actions:buf_total 113 unix:0:ipsec_actions:chunk_size 72 unix:0:ipsec_actions:class kmem_cache unix:0:ipsec_actions:crtime 83.977907 unix:0:ipsec_actions:depot_alloc 0 unix:0:ipsec_actions:depot_contention 0 unix:0:ipsec_actions:depot_free 2 unix:0:ipsec_actions:empty_magazines 0 unix:0:ipsec_actions:free 1961 unix:0:ipsec_actions:full_magazines 0 unix:0:ipsec_actions:hash_lookup_depth 0 unix:0:ipsec_actions:hash_rescale 0 unix:0:ipsec_actions:hash_size 0 unix:0:ipsec_actions:magazine_size 7 unix:0:ipsec_actions:slab_alloc 5 unix:0:ipsec_actions:slab_create 1 unix:0:ipsec_actions:slab_destroy 0 unix:0:ipsec_actions:slab_free 0 unix:0:ipsec_actions:slab_size 8192 unix:0:ipsec_actions:snaptime 8464512.558414 unix:0:ipsec_actions:vmem_source 23 unix:0:ipsec_info:align 8 unix:0:ipsec_info:alloc 6482 unix:0:ipsec_info:alloc_fail 0 unix:0:ipsec_info:buf_avail 23 unix:0:ipsec_info:buf_constructed 5 unix:0:ipsec_info:buf_inuse 0 unix:0:ipsec_info:buf_max 23 unix:0:ipsec_info:buf_size 352 unix:0:ipsec_info:buf_total 23 unix:0:ipsec_info:chunk_size 352 unix:0:ipsec_info:class kmem_cache unix:0:ipsec_info:crtime 83.977957 unix:0:ipsec_info:depot_alloc 0 unix:0:ipsec_info:depot_contention 0 unix:0:ipsec_info:depot_free 3 unix:0:ipsec_info:empty_magazines 0 unix:0:ipsec_info:free 6485 unix:0:ipsec_info:full_magazines 0 unix:0:ipsec_info:hash_lookup_depth 0 unix:0:ipsec_info:hash_rescale 0 unix:0:ipsec_info:hash_size 0 unix:0:ipsec_info:magazine_size 3 unix:0:ipsec_info:slab_alloc 5 unix:0:ipsec_info:slab_create 1 unix:0:ipsec_info:slab_destroy 0 unix:0:ipsec_info:slab_free 0 unix:0:ipsec_info:slab_size 8192 unix:0:ipsec_info:snaptime 8464512.559947 unix:0:ipsec_info:vmem_source 23 unix:0:ipsec_policy:align 8 unix:0:ipsec_policy:alloc 0 unix:0:ipsec_policy:alloc_fail 0 unix:0:ipsec_policy:buf_avail 0 unix:0:ipsec_policy:buf_constructed 0 unix:0:ipsec_policy:buf_inuse 0 unix:0:ipsec_policy:buf_max 0 unix:0:ipsec_policy:buf_size 72 unix:0:ipsec_policy:buf_total 0 unix:0:ipsec_policy:chunk_size 72 unix:0:ipsec_policy:class kmem_cache unix:0:ipsec_policy:crtime 83.9779424 unix:0:ipsec_policy:depot_alloc 0 unix:0:ipsec_policy:depot_contention 0 unix:0:ipsec_policy:depot_free 0 unix:0:ipsec_policy:empty_magazines 0 unix:0:ipsec_policy:free 0 unix:0:ipsec_policy:full_magazines 0 unix:0:ipsec_policy:hash_lookup_depth 0 unix:0:ipsec_policy:hash_rescale 0 unix:0:ipsec_policy:hash_size 0 unix:0:ipsec_policy:magazine_size 7 unix:0:ipsec_policy:slab_alloc 0 unix:0:ipsec_policy:slab_create 0 unix:0:ipsec_policy:slab_destroy 0 unix:0:ipsec_policy:slab_free 0 unix:0:ipsec_policy:slab_size 8192 unix:0:ipsec_policy:snaptime 8464512.5614238 unix:0:ipsec_policy:vmem_source 23 unix:0:ipsec_selectors:align 8 unix:0:ipsec_selectors:alloc 0 unix:0:ipsec_selectors:alloc_fail 0 unix:0:ipsec_selectors:buf_avail 0 unix:0:ipsec_selectors:buf_constructed 0 unix:0:ipsec_selectors:buf_inuse 0 unix:0:ipsec_selectors:buf_max 0 unix:0:ipsec_selectors:buf_size 80 unix:0:ipsec_selectors:buf_total 0 unix:0:ipsec_selectors:chunk_size 80 unix:0:ipsec_selectors:class kmem_cache unix:0:ipsec_selectors:crtime 83.9779276 unix:0:ipsec_selectors:depot_alloc 0 unix:0:ipsec_selectors:depot_contention 0 unix:0:ipsec_selectors:depot_free 0 unix:0:ipsec_selectors:empty_magazines 0 unix:0:ipsec_selectors:free 0 unix:0:ipsec_selectors:full_magazines 0 unix:0:ipsec_selectors:hash_lookup_depth 0 unix:0:ipsec_selectors:hash_rescale 0 unix:0:ipsec_selectors:hash_size 0 unix:0:ipsec_selectors:magazine_size 7 unix:0:ipsec_selectors:slab_alloc 0 unix:0:ipsec_selectors:slab_create 0 unix:0:ipsec_selectors:slab_destroy 0 unix:0:ipsec_selectors:slab_free 0 unix:0:ipsec_selectors:slab_size 8192 unix:0:ipsec_selectors:snaptime 8464512.562858 unix:0:ipsec_selectors:vmem_source 23 unix:0:ire_cache:align 8 unix:0:ire_cache:alloc 12058 unix:0:ire_cache:alloc_fail 0 unix:0:ire_cache:buf_avail 19 unix:0:ire_cache:buf_constructed 11 unix:0:ire_cache:buf_inuse 25 unix:0:ire_cache:buf_max 44 unix:0:ire_cache:buf_size 368 unix:0:ire_cache:buf_total 44 unix:0:ire_cache:chunk_size 368 unix:0:ire_cache:class kmem_cache unix:0:ire_cache:crtime 83.9662942 unix:0:ire_cache:depot_alloc 1675 unix:0:ire_cache:depot_contention 0 unix:0:ire_cache:depot_free 1680 unix:0:ire_cache:empty_magazines 3 unix:0:ire_cache:free 12038 unix:0:ire_cache:full_magazines 1 unix:0:ire_cache:hash_lookup_depth 0 unix:0:ire_cache:hash_rescale 0 unix:0:ire_cache:hash_size 0 unix:0:ire_cache:magazine_size 3 unix:0:ire_cache:slab_alloc 36 unix:0:ire_cache:slab_create 2 unix:0:ire_cache:slab_destroy 0 unix:0:ire_cache:slab_free 0 unix:0:ire_cache:slab_size 8192 unix:0:ire_cache:snaptime 8464512.5642614 unix:0:ire_cache:vmem_source 23 unix:0:ire_gw_secattr_cache:align 64 unix:0:ire_gw_secattr_cache:alloc 0 unix:0:ire_gw_secattr_cache:alloc_fail 0 unix:0:ire_gw_secattr_cache:buf_avail 0 unix:0:ire_gw_secattr_cache:buf_constructed 0 unix:0:ire_gw_secattr_cache:buf_inuse 0 unix:0:ire_gw_secattr_cache:buf_max 0 unix:0:ire_gw_secattr_cache:buf_size 32 unix:0:ire_gw_secattr_cache:buf_total 0 unix:0:ire_gw_secattr_cache:chunk_size 64 unix:0:ire_gw_secattr_cache:class kmem_cache unix:0:ire_gw_secattr_cache:crtime 83.9812562 unix:0:ire_gw_secattr_cache:depot_alloc 0 unix:0:ire_gw_secattr_cache:depot_contention 0 unix:0:ire_gw_secattr_cache:depot_free 0 unix:0:ire_gw_secattr_cache:empty_magazines 0 unix:0:ire_gw_secattr_cache:free 0 unix:0:ire_gw_secattr_cache:full_magazines 0 unix:0:ire_gw_secattr_cache:hash_lookup_depth 0 unix:0:ire_gw_secattr_cache:hash_rescale 0 unix:0:ire_gw_secattr_cache:hash_size 0 unix:0:ire_gw_secattr_cache:magazine_size 15 unix:0:ire_gw_secattr_cache:slab_alloc 0 unix:0:ire_gw_secattr_cache:slab_create 0 unix:0:ire_gw_secattr_cache:slab_destroy 0 unix:0:ire_gw_secattr_cache:slab_free 0 unix:0:ire_gw_secattr_cache:slab_size 8192 unix:0:ire_gw_secattr_cache:snaptime 8464512.5656304 unix:0:ire_gw_secattr_cache:vmem_source 23 unix:0:iscsi_SLP:class taskq unix:0:iscsi_SLP:crtime 85.7524188 unix:0:iscsi_SLP:executed 0 unix:0:iscsi_SLP:maxtasks 0 unix:0:iscsi_SLP:nactive 1 unix:0:iscsi_SLP:nalloc 0 unix:0:iscsi_SLP:priority 60 unix:0:iscsi_SLP:snaptime 8464512.567011 unix:0:iscsi_SLP:tasks 0 unix:0:iscsi_SLP:threads 1 unix:0:iscsi_SLP:totaltime 0 unix:0:iscsi_SendTarget:class taskq unix:0:iscsi_SendTarget:crtime 85.7523704 unix:0:iscsi_SendTarget:executed 0 unix:0:iscsi_SendTarget:maxtasks 0 unix:0:iscsi_SendTarget:nactive 1 unix:0:iscsi_SendTarget:nalloc 0 unix:0:iscsi_SendTarget:priority 60 unix:0:iscsi_SendTarget:snaptime 8464512.5676886 unix:0:iscsi_SendTarget:tasks 0 unix:0:iscsi_SendTarget:threads 1 unix:0:iscsi_SendTarget:totaltime 0 unix:0:iscsi_Static:class taskq unix:0:iscsi_Static:crtime 85.7523066 unix:0:iscsi_Static:executed 0 unix:0:iscsi_Static:maxtasks 0 unix:0:iscsi_Static:nactive 1 unix:0:iscsi_Static:nalloc 0 unix:0:iscsi_Static:priority 60 unix:0:iscsi_Static:snaptime 8464512.568268 unix:0:iscsi_Static:tasks 0 unix:0:iscsi_Static:threads 1 unix:0:iscsi_Static:totaltime 0 unix:0:iscsi_iSNS:class taskq unix:0:iscsi_iSNS:crtime 85.7524616 unix:0:iscsi_iSNS:executed 0 unix:0:iscsi_iSNS:maxtasks 0 unix:0:iscsi_iSNS:nactive 1 unix:0:iscsi_iSNS:nalloc 0 unix:0:iscsi_iSNS:priority 60 unix:0:iscsi_iSNS:snaptime 8464512.5688446 unix:0:iscsi_iSNS:tasks 0 unix:0:iscsi_iSNS:threads 1 unix:0:iscsi_iSNS:totaltime 0 unix:0:iscsi_nexus_enum_tq:class taskq unix:0:iscsi_nexus_enum_tq:crtime 85.6917532 unix:0:iscsi_nexus_enum_tq:executed 0 unix:0:iscsi_nexus_enum_tq:maxtasks 0 unix:0:iscsi_nexus_enum_tq:nactive 1 unix:0:iscsi_nexus_enum_tq:nalloc 0 unix:0:iscsi_nexus_enum_tq:priority 60 unix:0:iscsi_nexus_enum_tq:snaptime 8464512.569425 unix:0:iscsi_nexus_enum_tq:tasks 0 unix:0:iscsi_nexus_enum_tq:threads 1 unix:0:iscsi_nexus_enum_tq:totaltime 0 unix:0:ism_blk_cache:align 512 unix:0:ism_blk_cache:alloc 0 unix:0:ism_blk_cache:alloc_fail 0 unix:0:ism_blk_cache:buf_avail 0 unix:0:ism_blk_cache:buf_constructed 0 unix:0:ism_blk_cache:buf_inuse 0 unix:0:ism_blk_cache:buf_max 0 unix:0:ism_blk_cache:buf_size 272 unix:0:ism_blk_cache:buf_total 0 unix:0:ism_blk_cache:chunk_size 512 unix:0:ism_blk_cache:class kmem_cache unix:0:ism_blk_cache:crtime 66.2390626 unix:0:ism_blk_cache:depot_alloc 0 unix:0:ism_blk_cache:depot_contention 0 unix:0:ism_blk_cache:depot_free 0 unix:0:ism_blk_cache:empty_magazines 0 unix:0:ism_blk_cache:free 0 unix:0:ism_blk_cache:full_magazines 0 unix:0:ism_blk_cache:hash_lookup_depth 0 unix:0:ism_blk_cache:hash_rescale 0 unix:0:ism_blk_cache:hash_size 0 unix:0:ism_blk_cache:magazine_size 3 unix:0:ism_blk_cache:slab_alloc 0 unix:0:ism_blk_cache:slab_create 0 unix:0:ism_blk_cache:slab_destroy 0 unix:0:ism_blk_cache:slab_free 0 unix:0:ism_blk_cache:slab_size 8192 unix:0:ism_blk_cache:snaptime 8464512.5700578 unix:0:ism_blk_cache:vmem_source 8 unix:0:ism_ment_cache:align 8 unix:0:ism_ment_cache:alloc 0 unix:0:ism_ment_cache:alloc_fail 0 unix:0:ism_ment_cache:buf_avail 0 unix:0:ism_ment_cache:buf_constructed 0 unix:0:ism_ment_cache:buf_inuse 0 unix:0:ism_ment_cache:buf_max 0 unix:0:ism_ment_cache:buf_size 32 unix:0:ism_ment_cache:buf_total 0 unix:0:ism_ment_cache:chunk_size 32 unix:0:ism_ment_cache:class kmem_cache unix:0:ism_ment_cache:crtime 66.2390746 unix:0:ism_ment_cache:depot_alloc 0 unix:0:ism_ment_cache:depot_contention 0 unix:0:ism_ment_cache:depot_free 0 unix:0:ism_ment_cache:empty_magazines 0 unix:0:ism_ment_cache:free 0 unix:0:ism_ment_cache:full_magazines 0 unix:0:ism_ment_cache:hash_lookup_depth 0 unix:0:ism_ment_cache:hash_rescale 0 unix:0:ism_ment_cache:hash_size 0 unix:0:ism_ment_cache:magazine_size 15 unix:0:ism_ment_cache:slab_alloc 0 unix:0:ism_ment_cache:slab_create 0 unix:0:ism_ment_cache:slab_destroy 0 unix:0:ism_ment_cache:slab_free 0 unix:0:ism_ment_cache:slab_size 8192 unix:0:ism_ment_cache:snaptime 8464512.5715344 unix:0:ism_ment_cache:vmem_source 23 unix:0:isns_reg_query_taskq:class taskq unix:0:isns_reg_query_taskq:crtime 85.6924246 unix:0:isns_reg_query_taskq:executed 0 unix:0:isns_reg_query_taskq:maxtasks 0 unix:0:isns_reg_query_taskq:nactive 1 unix:0:isns_reg_query_taskq:nalloc 0 unix:0:isns_reg_query_taskq:priority 60 unix:0:isns_reg_query_taskq:snaptime 8464512.5729142 unix:0:isns_reg_query_taskq:tasks 0 unix:0:isns_reg_query_taskq:threads 1 unix:0:isns_reg_query_taskq:totaltime 0 unix:0:isns_scn_taskq:class taskq unix:0:isns_scn_taskq:crtime 85.6924718 unix:0:isns_scn_taskq:executed 0 unix:0:isns_scn_taskq:maxtasks 0 unix:0:isns_scn_taskq:nactive 1 unix:0:isns_scn_taskq:nalloc 0 unix:0:isns_scn_taskq:priority 60 unix:0:isns_scn_taskq:snaptime 8464512.5735994 unix:0:isns_scn_taskq:tasks 0 unix:0:isns_scn_taskq:threads 1 unix:0:isns_scn_taskq:totaltime 0 unix:0:kcf_areq_cache:align 64 unix:0:kcf_areq_cache:alloc 0 unix:0:kcf_areq_cache:alloc_fail 0 unix:0:kcf_areq_cache:buf_avail 0 unix:0:kcf_areq_cache:buf_constructed 0 unix:0:kcf_areq_cache:buf_inuse 0 unix:0:kcf_areq_cache:buf_max 0 unix:0:kcf_areq_cache:buf_size 288 unix:0:kcf_areq_cache:buf_total 0 unix:0:kcf_areq_cache:chunk_size 320 unix:0:kcf_areq_cache:class kmem_cache unix:0:kcf_areq_cache:crtime 83.919309 unix:0:kcf_areq_cache:depot_alloc 0 unix:0:kcf_areq_cache:depot_contention 0 unix:0:kcf_areq_cache:depot_free 0 unix:0:kcf_areq_cache:empty_magazines 0 unix:0:kcf_areq_cache:free 0 unix:0:kcf_areq_cache:full_magazines 0 unix:0:kcf_areq_cache:hash_lookup_depth 0 unix:0:kcf_areq_cache:hash_rescale 0 unix:0:kcf_areq_cache:hash_size 0 unix:0:kcf_areq_cache:magazine_size 3 unix:0:kcf_areq_cache:slab_alloc 0 unix:0:kcf_areq_cache:slab_create 0 unix:0:kcf_areq_cache:slab_destroy 0 unix:0:kcf_areq_cache:slab_free 0 unix:0:kcf_areq_cache:slab_size 8192 unix:0:kcf_areq_cache:snaptime 8464512.57419 unix:0:kcf_areq_cache:vmem_source 23 unix:0:kcf_context_cache:align 64 unix:0:kcf_context_cache:alloc 0 unix:0:kcf_context_cache:alloc_fail 0 unix:0:kcf_context_cache:buf_avail 0 unix:0:kcf_context_cache:buf_constructed 0 unix:0:kcf_context_cache:buf_inuse 0 unix:0:kcf_context_cache:buf_max 0 unix:0:kcf_context_cache:buf_size 112 unix:0:kcf_context_cache:buf_total 0 unix:0:kcf_context_cache:chunk_size 128 unix:0:kcf_context_cache:class kmem_cache unix:0:kcf_context_cache:crtime 83.9193244 unix:0:kcf_context_cache:depot_alloc 0 unix:0:kcf_context_cache:depot_contention 0 unix:0:kcf_context_cache:depot_free 0 unix:0:kcf_context_cache:empty_magazines 0 unix:0:kcf_context_cache:free 0 unix:0:kcf_context_cache:full_magazines 0 unix:0:kcf_context_cache:hash_lookup_depth 0 unix:0:kcf_context_cache:hash_rescale 0 unix:0:kcf_context_cache:hash_size 0 unix:0:kcf_context_cache:magazine_size 7 unix:0:kcf_context_cache:slab_alloc 0 unix:0:kcf_context_cache:slab_create 0 unix:0:kcf_context_cache:slab_destroy 0 unix:0:kcf_context_cache:slab_free 0 unix:0:kcf_context_cache:slab_size 8192 unix:0:kcf_context_cache:snaptime 8464512.575569 unix:0:kcf_context_cache:vmem_source 23 unix:0:kcf_sreq_cache:align 64 unix:0:kcf_sreq_cache:alloc 0 unix:0:kcf_sreq_cache:alloc_fail 0 unix:0:kcf_sreq_cache:buf_avail 0 unix:0:kcf_sreq_cache:buf_constructed 0 unix:0:kcf_sreq_cache:buf_inuse 0 unix:0:kcf_sreq_cache:buf_max 0 unix:0:kcf_sreq_cache:buf_size 48 unix:0:kcf_sreq_cache:buf_total 0 unix:0:kcf_sreq_cache:chunk_size 64 unix:0:kcf_sreq_cache:class kmem_cache unix:0:kcf_sreq_cache:crtime 83.9192816 unix:0:kcf_sreq_cache:depot_alloc 0 unix:0:kcf_sreq_cache:depot_contention 0 unix:0:kcf_sreq_cache:depot_free 0 unix:0:kcf_sreq_cache:empty_magazines 0 unix:0:kcf_sreq_cache:free 0 unix:0:kcf_sreq_cache:full_magazines 0 unix:0:kcf_sreq_cache:hash_lookup_depth 0 unix:0:kcf_sreq_cache:hash_rescale 0 unix:0:kcf_sreq_cache:hash_size 0 unix:0:kcf_sreq_cache:magazine_size 15 unix:0:kcf_sreq_cache:slab_alloc 0 unix:0:kcf_sreq_cache:slab_create 0 unix:0:kcf_sreq_cache:slab_destroy 0 unix:0:kcf_sreq_cache:slab_free 0 unix:0:kcf_sreq_cache:slab_size 8192 unix:0:kcf_sreq_cache:snaptime 8464512.5769412 unix:0:kcf_sreq_cache:vmem_source 23 unix:0:keysock_1:align 1 unix:0:keysock_1:alloc 0 unix:0:keysock_1:alloc_fail 0 unix:0:keysock_1:buf_avail 0 unix:0:keysock_1:buf_constructed 0 unix:0:keysock_1:buf_inuse 0 unix:0:keysock_1:buf_max 0 unix:0:keysock_1:buf_size 1 unix:0:keysock_1:buf_total 0 unix:0:keysock_1:chunk_size 1 unix:0:keysock_1:class kmem_cache unix:0:keysock_1:crtime 87.7192104 unix:0:keysock_1:depot_alloc 0 unix:0:keysock_1:depot_contention 0 unix:0:keysock_1:depot_free 0 unix:0:keysock_1:empty_magazines 0 unix:0:keysock_1:free 0 unix:0:keysock_1:full_magazines 0 unix:0:keysock_1:hash_lookup_depth 0 unix:0:keysock_1:hash_rescale 0 unix:0:keysock_1:hash_size 64 unix:0:keysock_1:magazine_size 15 unix:0:keysock_1:slab_alloc 0 unix:0:keysock_1:slab_create 0 unix:0:keysock_1:slab_destroy 0 unix:0:keysock_1:slab_free 0 unix:0:keysock_1:slab_size 64 unix:0:keysock_1:snaptime 8464512.5784086 unix:0:keysock_1:vmem_source 54 unix:0:kmem_alloc_112:align 8 unix:0:kmem_alloc_112:alloc 3346 unix:0:kmem_alloc_112:alloc_fail 0 unix:0:kmem_alloc_112:buf_avail 244 unix:0:kmem_alloc_112:buf_constructed 192 unix:0:kmem_alloc_112:buf_inuse 116 unix:0:kmem_alloc_112:buf_max 360 unix:0:kmem_alloc_112:buf_size 112 unix:0:kmem_alloc_112:buf_total 360 unix:0:kmem_alloc_112:chunk_size 112 unix:0:kmem_alloc_112:class kmem_cache unix:0:kmem_alloc_112:crtime 65.9500234 unix:0:kmem_alloc_112:depot_alloc 47 unix:0:kmem_alloc_112:depot_contention 0 unix:0:kmem_alloc_112:depot_free 76 unix:0:kmem_alloc_112:empty_magazines 2 unix:0:kmem_alloc_112:free 3259 unix:0:kmem_alloc_112:full_magazines 25 unix:0:kmem_alloc_112:hash_lookup_depth 0 unix:0:kmem_alloc_112:hash_rescale 0 unix:0:kmem_alloc_112:hash_size 0 unix:0:kmem_alloc_112:magazine_size 7 unix:0:kmem_alloc_112:slab_alloc 308 unix:0:kmem_alloc_112:slab_create 5 unix:0:kmem_alloc_112:slab_destroy 0 unix:0:kmem_alloc_112:slab_free 0 unix:0:kmem_alloc_112:slab_size 8192 unix:0:kmem_alloc_112:snaptime 8464512.5797768 unix:0:kmem_alloc_112:vmem_source 23 unix:0:kmem_alloc_1152:align 64 unix:0:kmem_alloc_1152:alloc 14465512 unix:0:kmem_alloc_1152:alloc_fail 0 unix:0:kmem_alloc_1152:buf_avail 210 unix:0:kmem_alloc_1152:buf_constructed 204 unix:0:kmem_alloc_1152:buf_inuse 392 unix:0:kmem_alloc_1152:buf_max 602 unix:0:kmem_alloc_1152:buf_size 1152 unix:0:kmem_alloc_1152:buf_total 602 unix:0:kmem_alloc_1152:chunk_size 1152 unix:0:kmem_alloc_1152:class kmem_cache unix:0:kmem_alloc_1152:crtime 65.9501738 unix:0:kmem_alloc_1152:depot_alloc 21408 unix:0:kmem_alloc_1152:depot_contention 0 unix:0:kmem_alloc_1152:depot_free 21476 unix:0:kmem_alloc_1152:empty_magazines 12 unix:0:kmem_alloc_1152:free 14465188 unix:0:kmem_alloc_1152:full_magazines 65 unix:0:kmem_alloc_1152:hash_lookup_depth 0 unix:0:kmem_alloc_1152:hash_rescale 1 unix:0:kmem_alloc_1152:hash_size 512 unix:0:kmem_alloc_1152:magazine_size 3 unix:0:kmem_alloc_1152:slab_alloc 596 unix:0:kmem_alloc_1152:slab_create 86 unix:0:kmem_alloc_1152:slab_destroy 0 unix:0:kmem_alloc_1152:slab_free 0 unix:0:kmem_alloc_1152:slab_size 8192 unix:0:kmem_alloc_1152:snaptime 8464512.5812246 unix:0:kmem_alloc_1152:vmem_source 23 unix:0:kmem_alloc_12288:align 64 unix:0:kmem_alloc_12288:alloc 210 unix:0:kmem_alloc_12288:alloc_fail 0 unix:0:kmem_alloc_12288:buf_avail 5 unix:0:kmem_alloc_12288:buf_constructed 4 unix:0:kmem_alloc_12288:buf_inuse 11 unix:0:kmem_alloc_12288:buf_max 16 unix:0:kmem_alloc_12288:buf_size 12288 unix:0:kmem_alloc_12288:buf_total 16 unix:0:kmem_alloc_12288:chunk_size 12288 unix:0:kmem_alloc_12288:class kmem_cache unix:0:kmem_alloc_12288:crtime 65.9502858 unix:0:kmem_alloc_12288:depot_alloc 5 unix:0:kmem_alloc_12288:depot_contention 0 unix:0:kmem_alloc_12288:depot_free 10 unix:0:kmem_alloc_12288:empty_magazines 2 unix:0:kmem_alloc_12288:free 204 unix:0:kmem_alloc_12288:full_magazines 1 unix:0:kmem_alloc_12288:hash_lookup_depth 0 unix:0:kmem_alloc_12288:hash_rescale 0 unix:0:kmem_alloc_12288:hash_size 64 unix:0:kmem_alloc_12288:magazine_size 1 unix:0:kmem_alloc_12288:slab_alloc 15 unix:0:kmem_alloc_12288:slab_create 8 unix:0:kmem_alloc_12288:slab_destroy 0 unix:0:kmem_alloc_12288:slab_free 0 unix:0:kmem_alloc_12288:slab_size 24576 unix:0:kmem_alloc_12288:snaptime 8464512.5827608 unix:0:kmem_alloc_12288:vmem_source 23 unix:0:kmem_alloc_128:align 64 unix:0:kmem_alloc_128:alloc 5373430 unix:0:kmem_alloc_128:alloc_fail 0 unix:0:kmem_alloc_128:buf_avail 359 unix:0:kmem_alloc_128:buf_constructed 334 unix:0:kmem_alloc_128:buf_inuse 271 unix:0:kmem_alloc_128:buf_max 630 unix:0:kmem_alloc_128:buf_size 128 unix:0:kmem_alloc_128:buf_total 630 unix:0:kmem_alloc_128:chunk_size 128 unix:0:kmem_alloc_128:class kmem_cache unix:0:kmem_alloc_128:crtime 65.9500346 unix:0:kmem_alloc_128:depot_alloc 6665 unix:0:kmem_alloc_128:depot_contention 0 unix:0:kmem_alloc_128:depot_free 6715 unix:0:kmem_alloc_128:empty_magazines 2 unix:0:kmem_alloc_128:free 5373209 unix:0:kmem_alloc_128:full_magazines 46 unix:0:kmem_alloc_128:hash_lookup_depth 0 unix:0:kmem_alloc_128:hash_rescale 0 unix:0:kmem_alloc_128:hash_size 0 unix:0:kmem_alloc_128:magazine_size 7 unix:0:kmem_alloc_128:slab_alloc 605 unix:0:kmem_alloc_128:slab_create 10 unix:0:kmem_alloc_128:slab_destroy 0 unix:0:kmem_alloc_128:slab_free 0 unix:0:kmem_alloc_128:slab_size 8192 unix:0:kmem_alloc_128:snaptime 8464512.5841534 unix:0:kmem_alloc_128:vmem_source 23 unix:0:kmem_alloc_1344:align 64 unix:0:kmem_alloc_1344:alloc 1565 unix:0:kmem_alloc_1344:alloc_fail 0 unix:0:kmem_alloc_1344:buf_avail 10 unix:0:kmem_alloc_1344:buf_constructed 8 unix:0:kmem_alloc_1344:buf_inuse 14 unix:0:kmem_alloc_1344:buf_max 24 unix:0:kmem_alloc_1344:buf_size 1344 unix:0:kmem_alloc_1344:buf_total 24 unix:0:kmem_alloc_1344:chunk_size 1344 unix:0:kmem_alloc_1344:class kmem_cache unix:0:kmem_alloc_1344:crtime 65.950188 unix:0:kmem_alloc_1344:depot_alloc 0 unix:0:kmem_alloc_1344:depot_contention 0 unix:0:kmem_alloc_1344:depot_free 5 unix:0:kmem_alloc_1344:empty_magazines 0 unix:0:kmem_alloc_1344:free 1556 unix:0:kmem_alloc_1344:full_magazines 1 unix:0:kmem_alloc_1344:hash_lookup_depth 0 unix:0:kmem_alloc_1344:hash_rescale 0 unix:0:kmem_alloc_1344:hash_size 64 unix:0:kmem_alloc_1344:magazine_size 3 unix:0:kmem_alloc_1344:slab_alloc 22 unix:0:kmem_alloc_1344:slab_create 4 unix:0:kmem_alloc_1344:slab_destroy 0 unix:0:kmem_alloc_1344:slab_free 0 unix:0:kmem_alloc_1344:slab_size 8192 unix:0:kmem_alloc_1344:snaptime 8464512.5855348 unix:0:kmem_alloc_1344:vmem_source 23 unix:0:kmem_alloc_16:align 8 unix:0:kmem_alloc_16:alloc 37050707 unix:0:kmem_alloc_16:alloc_fail 0 unix:0:kmem_alloc_16:buf_avail 566 unix:0:kmem_alloc_16:buf_constructed 260 unix:0:kmem_alloc_16:buf_inuse 6038 unix:0:kmem_alloc_16:buf_max 6604 unix:0:kmem_alloc_16:buf_size 16 unix:0:kmem_alloc_16:buf_total 6604 unix:0:kmem_alloc_16:chunk_size 16 unix:0:kmem_alloc_16:class kmem_cache unix:0:kmem_alloc_16:crtime 65.949925 unix:0:kmem_alloc_16:depot_alloc 33407 unix:0:kmem_alloc_16:depot_contention 0 unix:0:kmem_alloc_16:depot_free 33427 unix:0:kmem_alloc_16:empty_magazines 38 unix:0:kmem_alloc_16:free 37044689 unix:0:kmem_alloc_16:full_magazines 16 unix:0:kmem_alloc_16:hash_lookup_depth 0 unix:0:kmem_alloc_16:hash_rescale 0 unix:0:kmem_alloc_16:hash_size 0 unix:0:kmem_alloc_16:magazine_size 15 unix:0:kmem_alloc_16:slab_alloc 6298 unix:0:kmem_alloc_16:slab_create 13 unix:0:kmem_alloc_16:slab_destroy 0 unix:0:kmem_alloc_16:slab_free 0 unix:0:kmem_alloc_16:slab_size 8192 unix:0:kmem_alloc_16:snaptime 8464512.586919 unix:0:kmem_alloc_16:vmem_source 23 unix:0:kmem_alloc_160:align 8 unix:0:kmem_alloc_160:alloc 3490774 unix:0:kmem_alloc_160:alloc_fail 0 unix:0:kmem_alloc_160:buf_avail 85 unix:0:kmem_alloc_160:buf_constructed 41 unix:0:kmem_alloc_160:buf_inuse 665 unix:0:kmem_alloc_160:buf_max 750 unix:0:kmem_alloc_160:buf_size 160 unix:0:kmem_alloc_160:buf_total 750 unix:0:kmem_alloc_160:chunk_size 160 unix:0:kmem_alloc_160:class kmem_cache unix:0:kmem_alloc_160:crtime 65.9500458 unix:0:kmem_alloc_160:depot_alloc 14767 unix:0:kmem_alloc_160:depot_contention 0 unix:0:kmem_alloc_160:depot_free 14774 unix:0:kmem_alloc_160:empty_magazines 13 unix:0:kmem_alloc_160:free 3490116 unix:0:kmem_alloc_160:full_magazines 3 unix:0:kmem_alloc_160:hash_lookup_depth 0 unix:0:kmem_alloc_160:hash_rescale 0 unix:0:kmem_alloc_160:hash_size 0 unix:0:kmem_alloc_160:magazine_size 7 unix:0:kmem_alloc_160:slab_alloc 706 unix:0:kmem_alloc_160:slab_create 15 unix:0:kmem_alloc_160:slab_destroy 0 unix:0:kmem_alloc_160:slab_free 0 unix:0:kmem_alloc_160:slab_size 8192 unix:0:kmem_alloc_160:snaptime 8464512.5883808 unix:0:kmem_alloc_160:vmem_source 23 unix:0:kmem_alloc_1600:align 64 unix:0:kmem_alloc_1600:alloc 3326 unix:0:kmem_alloc_1600:alloc_fail 0 unix:0:kmem_alloc_1600:buf_avail 9 unix:0:kmem_alloc_1600:buf_constructed 8 unix:0:kmem_alloc_1600:buf_inuse 6 unix:0:kmem_alloc_1600:buf_max 15 unix:0:kmem_alloc_1600:buf_size 1600 unix:0:kmem_alloc_1600:buf_total 15 unix:0:kmem_alloc_1600:chunk_size 1600 unix:0:kmem_alloc_1600:class kmem_cache unix:0:kmem_alloc_1600:crtime 65.9502024 unix:0:kmem_alloc_1600:depot_alloc 15 unix:0:kmem_alloc_1600:depot_contention 0 unix:0:kmem_alloc_1600:depot_free 19 unix:0:kmem_alloc_1600:empty_magazines 1 unix:0:kmem_alloc_1600:free 3324 unix:0:kmem_alloc_1600:full_magazines 0 unix:0:kmem_alloc_1600:hash_lookup_depth 0 unix:0:kmem_alloc_1600:hash_rescale 0 unix:0:kmem_alloc_1600:hash_size 64 unix:0:kmem_alloc_1600:magazine_size 3 unix:0:kmem_alloc_1600:slab_alloc 14 unix:0:kmem_alloc_1600:slab_create 3 unix:0:kmem_alloc_1600:slab_destroy 0 unix:0:kmem_alloc_1600:slab_free 0 unix:0:kmem_alloc_1600:slab_size 8192 unix:0:kmem_alloc_1600:snaptime 8464512.5897538 unix:0:kmem_alloc_1600:vmem_source 23 unix:0:kmem_alloc_16384:align 8192 unix:0:kmem_alloc_16384:alloc 215 unix:0:kmem_alloc_16384:alloc_fail 0 unix:0:kmem_alloc_16384:buf_avail 0 unix:0:kmem_alloc_16384:buf_constructed 0 unix:0:kmem_alloc_16384:buf_inuse 27 unix:0:kmem_alloc_16384:buf_max 27 unix:0:kmem_alloc_16384:buf_size 16384 unix:0:kmem_alloc_16384:buf_total 27 unix:0:kmem_alloc_16384:chunk_size 16384 unix:0:kmem_alloc_16384:class kmem_cache unix:0:kmem_alloc_16384:crtime 65.9503118 unix:0:kmem_alloc_16384:depot_alloc 1 unix:0:kmem_alloc_16384:depot_contention 0 unix:0:kmem_alloc_16384:depot_free 5 unix:0:kmem_alloc_16384:empty_magazines 0 unix:0:kmem_alloc_16384:free 192 unix:0:kmem_alloc_16384:full_magazines 0 unix:0:kmem_alloc_16384:hash_lookup_depth 0 unix:0:kmem_alloc_16384:hash_rescale 0 unix:0:kmem_alloc_16384:hash_size 64 unix:0:kmem_alloc_16384:magazine_size 1 unix:0:kmem_alloc_16384:slab_alloc 27 unix:0:kmem_alloc_16384:slab_create 27 unix:0:kmem_alloc_16384:slab_destroy 0 unix:0:kmem_alloc_16384:slab_free 0 unix:0:kmem_alloc_16384:slab_size 16384 unix:0:kmem_alloc_16384:snaptime 8464512.5912088 unix:0:kmem_alloc_16384:vmem_source 23 unix:0:kmem_alloc_192:align 64 unix:0:kmem_alloc_192:alloc 1561921 unix:0:kmem_alloc_192:alloc_fail 0 unix:0:kmem_alloc_192:buf_avail 42 unix:0:kmem_alloc_192:buf_constructed 22 unix:0:kmem_alloc_192:buf_inuse 126 unix:0:kmem_alloc_192:buf_max 168 unix:0:kmem_alloc_192:buf_size 192 unix:0:kmem_alloc_192:buf_total 168 unix:0:kmem_alloc_192:chunk_size 192 unix:0:kmem_alloc_192:class kmem_cache unix:0:kmem_alloc_192:crtime 65.950057 unix:0:kmem_alloc_192:depot_alloc 16911 unix:0:kmem_alloc_192:depot_contention 0 unix:0:kmem_alloc_192:depot_free 16915 unix:0:kmem_alloc_192:empty_magazines 6 unix:0:kmem_alloc_192:free 1561799 unix:0:kmem_alloc_192:full_magazines 1 unix:0:kmem_alloc_192:hash_lookup_depth 0 unix:0:kmem_alloc_192:hash_rescale 0 unix:0:kmem_alloc_192:hash_size 0 unix:0:kmem_alloc_192:magazine_size 7 unix:0:kmem_alloc_192:slab_alloc 148 unix:0:kmem_alloc_192:slab_create 4 unix:0:kmem_alloc_192:slab_destroy 0 unix:0:kmem_alloc_192:slab_free 0 unix:0:kmem_alloc_192:slab_size 8192 unix:0:kmem_alloc_192:snaptime 8464512.5926574 unix:0:kmem_alloc_192:vmem_source 23 unix:0:kmem_alloc_2048:align 64 unix:0:kmem_alloc_2048:alloc 277742 unix:0:kmem_alloc_2048:alloc_fail 0 unix:0:kmem_alloc_2048:buf_avail 111 unix:0:kmem_alloc_2048:buf_constructed 109 unix:0:kmem_alloc_2048:buf_inuse 77 unix:0:kmem_alloc_2048:buf_max 188 unix:0:kmem_alloc_2048:buf_size 2048 unix:0:kmem_alloc_2048:buf_total 188 unix:0:kmem_alloc_2048:chunk_size 2048 unix:0:kmem_alloc_2048:class kmem_cache unix:0:kmem_alloc_2048:crtime 65.9502168 unix:0:kmem_alloc_2048:depot_alloc 10512 unix:0:kmem_alloc_2048:depot_contention 0 unix:0:kmem_alloc_2048:depot_free 10549 unix:0:kmem_alloc_2048:empty_magazines 4 unix:0:kmem_alloc_2048:free 277702 unix:0:kmem_alloc_2048:full_magazines 34 unix:0:kmem_alloc_2048:hash_lookup_depth 0 unix:0:kmem_alloc_2048:hash_rescale 1 unix:0:kmem_alloc_2048:hash_size 256 unix:0:kmem_alloc_2048:magazine_size 3 unix:0:kmem_alloc_2048:slab_alloc 186 unix:0:kmem_alloc_2048:slab_create 47 unix:0:kmem_alloc_2048:slab_destroy 0 unix:0:kmem_alloc_2048:slab_free 0 unix:0:kmem_alloc_2048:slab_size 8192 unix:0:kmem_alloc_2048:snaptime 8464512.594106 unix:0:kmem_alloc_2048:vmem_source 23 unix:0:kmem_alloc_224:align 8 unix:0:kmem_alloc_224:alloc 341289 unix:0:kmem_alloc_224:alloc_fail 0 unix:0:kmem_alloc_224:buf_avail 128 unix:0:kmem_alloc_224:buf_constructed 107 unix:0:kmem_alloc_224:buf_inuse 376 unix:0:kmem_alloc_224:buf_max 504 unix:0:kmem_alloc_224:buf_size 224 unix:0:kmem_alloc_224:buf_total 504 unix:0:kmem_alloc_224:chunk_size 224 unix:0:kmem_alloc_224:class kmem_cache unix:0:kmem_alloc_224:crtime 65.9500684 unix:0:kmem_alloc_224:depot_alloc 14774 unix:0:kmem_alloc_224:depot_contention 0 unix:0:kmem_alloc_224:depot_free 14790 unix:0:kmem_alloc_224:empty_magazines 2 unix:0:kmem_alloc_224:free 340929 unix:0:kmem_alloc_224:full_magazines 13 unix:0:kmem_alloc_224:hash_lookup_depth 0 unix:0:kmem_alloc_224:hash_rescale 0 unix:0:kmem_alloc_224:hash_size 0 unix:0:kmem_alloc_224:magazine_size 7 unix:0:kmem_alloc_224:slab_alloc 483 unix:0:kmem_alloc_224:slab_create 14 unix:0:kmem_alloc_224:slab_destroy 0 unix:0:kmem_alloc_224:slab_free 0 unix:0:kmem_alloc_224:slab_size 8192 unix:0:kmem_alloc_224:snaptime 8464512.5955154 unix:0:kmem_alloc_224:vmem_source 23 unix:0:kmem_alloc_24:align 8 unix:0:kmem_alloc_24:alloc 1840367 unix:0:kmem_alloc_24:alloc_fail 0 unix:0:kmem_alloc_24:buf_avail 451 unix:0:kmem_alloc_24:buf_constructed 354 unix:0:kmem_alloc_24:buf_inuse 7007 unix:0:kmem_alloc_24:buf_max 7458 unix:0:kmem_alloc_24:buf_size 24 unix:0:kmem_alloc_24:buf_total 7458 unix:0:kmem_alloc_24:chunk_size 24 unix:0:kmem_alloc_24:class kmem_cache unix:0:kmem_alloc_24:crtime 65.949936 unix:0:kmem_alloc_24:depot_alloc 38520 unix:0:kmem_alloc_24:depot_contention 0 unix:0:kmem_alloc_24:depot_free 38546 unix:0:kmem_alloc_24:empty_magazines 130 unix:0:kmem_alloc_24:free 1833386 unix:0:kmem_alloc_24:full_magazines 22 unix:0:kmem_alloc_24:hash_lookup_depth 0 unix:0:kmem_alloc_24:hash_rescale 0 unix:0:kmem_alloc_24:hash_size 0 unix:0:kmem_alloc_24:magazine_size 15 unix:0:kmem_alloc_24:slab_alloc 7361 unix:0:kmem_alloc_24:slab_create 22 unix:0:kmem_alloc_24:slab_destroy 0 unix:0:kmem_alloc_24:slab_free 0 unix:0:kmem_alloc_24:slab_size 8192 unix:0:kmem_alloc_24:snaptime 8464512.596896 unix:0:kmem_alloc_24:vmem_source 23 unix:0:kmem_alloc_24576:align 8192 unix:0:kmem_alloc_24576:alloc 137 unix:0:kmem_alloc_24576:alloc_fail 0 unix:0:kmem_alloc_24576:buf_avail 3 unix:0:kmem_alloc_24576:buf_constructed 3 unix:0:kmem_alloc_24576:buf_inuse 9 unix:0:kmem_alloc_24576:buf_max 12 unix:0:kmem_alloc_24576:buf_size 24576 unix:0:kmem_alloc_24576:buf_total 12 unix:0:kmem_alloc_24576:chunk_size 24576 unix:0:kmem_alloc_24576:class kmem_cache unix:0:kmem_alloc_24576:crtime 65.9503318 unix:0:kmem_alloc_24576:depot_alloc 13 unix:0:kmem_alloc_24576:depot_contention 0 unix:0:kmem_alloc_24576:depot_free 15 unix:0:kmem_alloc_24576:empty_magazines 4 unix:0:kmem_alloc_24576:free 130 unix:0:kmem_alloc_24576:full_magazines 0 unix:0:kmem_alloc_24576:hash_lookup_depth 0 unix:0:kmem_alloc_24576:hash_rescale 0 unix:0:kmem_alloc_24576:hash_size 64 unix:0:kmem_alloc_24576:magazine_size 1 unix:0:kmem_alloc_24576:slab_alloc 12 unix:0:kmem_alloc_24576:slab_create 12 unix:0:kmem_alloc_24576:slab_destroy 0 unix:0:kmem_alloc_24576:slab_free 0 unix:0:kmem_alloc_24576:slab_size 24576 unix:0:kmem_alloc_24576:snaptime 8464512.5982682 unix:0:kmem_alloc_24576:vmem_source 23 unix:0:kmem_alloc_256:align 64 unix:0:kmem_alloc_256:alloc 19613843 unix:0:kmem_alloc_256:alloc_fail 0 unix:0:kmem_alloc_256:buf_avail 171 unix:0:kmem_alloc_256:buf_constructed 160 unix:0:kmem_alloc_256:buf_inuse 294 unix:0:kmem_alloc_256:buf_max 465 unix:0:kmem_alloc_256:buf_size 256 unix:0:kmem_alloc_256:buf_total 465 unix:0:kmem_alloc_256:chunk_size 256 unix:0:kmem_alloc_256:class kmem_cache unix:0:kmem_alloc_256:crtime 65.9500792 unix:0:kmem_alloc_256:depot_alloc 4438 unix:0:kmem_alloc_256:depot_contention 0 unix:0:kmem_alloc_256:depot_free 4461 unix:0:kmem_alloc_256:empty_magazines 6 unix:0:kmem_alloc_256:free 19613572 unix:0:kmem_alloc_256:full_magazines 20 unix:0:kmem_alloc_256:hash_lookup_depth 0 unix:0:kmem_alloc_256:hash_rescale 0 unix:0:kmem_alloc_256:hash_size 0 unix:0:kmem_alloc_256:magazine_size 7 unix:0:kmem_alloc_256:slab_alloc 454 unix:0:kmem_alloc_256:slab_create 15 unix:0:kmem_alloc_256:slab_destroy 0 unix:0:kmem_alloc_256:slab_free 0 unix:0:kmem_alloc_256:slab_size 8192 unix:0:kmem_alloc_256:snaptime 8464512.5997386 unix:0:kmem_alloc_256:vmem_source 23 unix:0:kmem_alloc_2688:align 64 unix:0:kmem_alloc_2688:alloc 27879 unix:0:kmem_alloc_2688:alloc_fail 0 unix:0:kmem_alloc_2688:buf_avail 27 unix:0:kmem_alloc_2688:buf_constructed 26 unix:0:kmem_alloc_2688:buf_inuse 90 unix:0:kmem_alloc_2688:buf_max 117 unix:0:kmem_alloc_2688:buf_size 2688 unix:0:kmem_alloc_2688:buf_total 117 unix:0:kmem_alloc_2688:chunk_size 2688 unix:0:kmem_alloc_2688:class kmem_cache unix:0:kmem_alloc_2688:crtime 65.9502322 unix:0:kmem_alloc_2688:depot_alloc 13 unix:0:kmem_alloc_2688:depot_contention 0 unix:0:kmem_alloc_2688:depot_free 23 unix:0:kmem_alloc_2688:empty_magazines 3 unix:0:kmem_alloc_2688:free 27799 unix:0:kmem_alloc_2688:full_magazines 6 unix:0:kmem_alloc_2688:hash_lookup_depth 0 unix:0:kmem_alloc_2688:hash_rescale 0 unix:0:kmem_alloc_2688:hash_size 64 unix:0:kmem_alloc_2688:magazine_size 3 unix:0:kmem_alloc_2688:slab_alloc 116 unix:0:kmem_alloc_2688:slab_create 39 unix:0:kmem_alloc_2688:slab_destroy 0 unix:0:kmem_alloc_2688:slab_free 0 unix:0:kmem_alloc_2688:slab_size 8192 unix:0:kmem_alloc_2688:snaptime 8464512.601215 unix:0:kmem_alloc_2688:vmem_source 23 unix:0:kmem_alloc_32:align 8 unix:0:kmem_alloc_32:alloc 8114832 unix:0:kmem_alloc_32:alloc_fail 0 unix:0:kmem_alloc_32:buf_avail 1598 unix:0:kmem_alloc_32:buf_constructed 1414 unix:0:kmem_alloc_32:buf_inuse 12372 unix:0:kmem_alloc_32:buf_max 13970 unix:0:kmem_alloc_32:buf_size 32 unix:0:kmem_alloc_32:buf_total 13970 unix:0:kmem_alloc_32:chunk_size 32 unix:0:kmem_alloc_32:class kmem_cache unix:0:kmem_alloc_32:crtime 65.9499466 unix:0:kmem_alloc_32:depot_alloc 313181 unix:0:kmem_alloc_32:depot_contention 3 unix:0:kmem_alloc_32:depot_free 313278 unix:0:kmem_alloc_32:empty_magazines 142 unix:0:kmem_alloc_32:free 8102557 unix:0:kmem_alloc_32:full_magazines 93 unix:0:kmem_alloc_32:hash_lookup_depth 0 unix:0:kmem_alloc_32:hash_rescale 0 unix:0:kmem_alloc_32:hash_size 0 unix:0:kmem_alloc_32:magazine_size 15 unix:0:kmem_alloc_32:slab_alloc 13786 unix:0:kmem_alloc_32:slab_create 55 unix:0:kmem_alloc_32:slab_destroy 0 unix:0:kmem_alloc_32:slab_free 0 unix:0:kmem_alloc_32:slab_size 8192 unix:0:kmem_alloc_32:snaptime 8464512.6025948 unix:0:kmem_alloc_32:vmem_source 23 unix:0:kmem_alloc_320:align 64 unix:0:kmem_alloc_320:alloc 158210 unix:0:kmem_alloc_320:alloc_fail 0 unix:0:kmem_alloc_320:buf_avail 21 unix:0:kmem_alloc_320:buf_constructed 7 unix:0:kmem_alloc_320:buf_inuse 129 unix:0:kmem_alloc_320:buf_max 150 unix:0:kmem_alloc_320:buf_size 320 unix:0:kmem_alloc_320:buf_total 150 unix:0:kmem_alloc_320:chunk_size 320 unix:0:kmem_alloc_320:class kmem_cache unix:0:kmem_alloc_320:crtime 65.9500914 unix:0:kmem_alloc_320:depot_alloc 30936 unix:0:kmem_alloc_320:depot_contention 0 unix:0:kmem_alloc_320:depot_free 30940 unix:0:kmem_alloc_320:empty_magazines 3 unix:0:kmem_alloc_320:free 158085 unix:0:kmem_alloc_320:full_magazines 1 unix:0:kmem_alloc_320:hash_lookup_depth 0 unix:0:kmem_alloc_320:hash_rescale 0 unix:0:kmem_alloc_320:hash_size 0 unix:0:kmem_alloc_320:magazine_size 3 unix:0:kmem_alloc_320:slab_alloc 136 unix:0:kmem_alloc_320:slab_create 6 unix:0:kmem_alloc_320:slab_destroy 0 unix:0:kmem_alloc_320:slab_free 0 unix:0:kmem_alloc_320:slab_size 8192 unix:0:kmem_alloc_320:snaptime 8464512.6042218 unix:0:kmem_alloc_320:vmem_source 23 unix:0:kmem_alloc_32768:align 8192 unix:0:kmem_alloc_32768:alloc 10597 unix:0:kmem_alloc_32768:alloc_fail 0 unix:0:kmem_alloc_32768:buf_avail 4 unix:0:kmem_alloc_32768:buf_constructed 4 unix:0:kmem_alloc_32768:buf_inuse 62 unix:0:kmem_alloc_32768:buf_max 66 unix:0:kmem_alloc_32768:buf_size 32768 unix:0:kmem_alloc_32768:buf_total 66 unix:0:kmem_alloc_32768:chunk_size 32768 unix:0:kmem_alloc_32768:class kmem_cache unix:0:kmem_alloc_32768:crtime 65.9503536 unix:0:kmem_alloc_32768:depot_alloc 3029 unix:0:kmem_alloc_32768:depot_contention 0 unix:0:kmem_alloc_32768:depot_free 3034 unix:0:kmem_alloc_32768:empty_magazines 4 unix:0:kmem_alloc_32768:free 10540 unix:0:kmem_alloc_32768:full_magazines 2 unix:0:kmem_alloc_32768:hash_lookup_depth 0 unix:0:kmem_alloc_32768:hash_rescale 0 unix:0:kmem_alloc_32768:hash_size 64 unix:0:kmem_alloc_32768:magazine_size 1 unix:0:kmem_alloc_32768:slab_alloc 66 unix:0:kmem_alloc_32768:slab_create 66 unix:0:kmem_alloc_32768:slab_destroy 0 unix:0:kmem_alloc_32768:slab_free 0 unix:0:kmem_alloc_32768:slab_size 32768 unix:0:kmem_alloc_32768:snaptime 8464512.6056062 unix:0:kmem_alloc_32768:vmem_source 23 unix:0:kmem_alloc_384:align 64 unix:0:kmem_alloc_384:alloc 372732 unix:0:kmem_alloc_384:alloc_fail 0 unix:0:kmem_alloc_384:buf_avail 18 unix:0:kmem_alloc_384:buf_constructed 7 unix:0:kmem_alloc_384:buf_inuse 276 unix:0:kmem_alloc_384:buf_max 294 unix:0:kmem_alloc_384:buf_size 384 unix:0:kmem_alloc_384:buf_total 294 unix:0:kmem_alloc_384:chunk_size 384 unix:0:kmem_alloc_384:class kmem_cache unix:0:kmem_alloc_384:crtime 65.9501026 unix:0:kmem_alloc_384:depot_alloc 976 unix:0:kmem_alloc_384:depot_contention 0 unix:0:kmem_alloc_384:depot_free 980 unix:0:kmem_alloc_384:empty_magazines 18 unix:0:kmem_alloc_384:free 372460 unix:0:kmem_alloc_384:full_magazines 0 unix:0:kmem_alloc_384:hash_lookup_depth 0 unix:0:kmem_alloc_384:hash_rescale 0 unix:0:kmem_alloc_384:hash_size 0 unix:0:kmem_alloc_384:magazine_size 3 unix:0:kmem_alloc_384:slab_alloc 283 unix:0:kmem_alloc_384:slab_create 14 unix:0:kmem_alloc_384:slab_destroy 0 unix:0:kmem_alloc_384:slab_free 0 unix:0:kmem_alloc_384:slab_size 8192 unix:0:kmem_alloc_384:snaptime 8464512.6069836 unix:0:kmem_alloc_384:vmem_source 23 unix:0:kmem_alloc_40:align 8 unix:0:kmem_alloc_40:alloc 675091 unix:0:kmem_alloc_40:alloc_fail 0 unix:0:kmem_alloc_40:buf_avail 411 unix:0:kmem_alloc_40:buf_constructed 231 unix:0:kmem_alloc_40:buf_inuse 10145 unix:0:kmem_alloc_40:buf_max 10556 unix:0:kmem_alloc_40:buf_size 40 unix:0:kmem_alloc_40:buf_total 10556 unix:0:kmem_alloc_40:chunk_size 40 unix:0:kmem_alloc_40:class kmem_cache unix:0:kmem_alloc_40:crtime 65.9499584 unix:0:kmem_alloc_40:depot_alloc 8118 unix:0:kmem_alloc_40:depot_contention 0 unix:0:kmem_alloc_40:depot_free 8136 unix:0:kmem_alloc_40:empty_magazines 21 unix:0:kmem_alloc_40:free 664964 unix:0:kmem_alloc_40:full_magazines 14 unix:0:kmem_alloc_40:hash_lookup_depth 0 unix:0:kmem_alloc_40:hash_rescale 0 unix:0:kmem_alloc_40:hash_size 0 unix:0:kmem_alloc_40:magazine_size 15 unix:0:kmem_alloc_40:slab_alloc 10376 unix:0:kmem_alloc_40:slab_create 52 unix:0:kmem_alloc_40:slab_destroy 0 unix:0:kmem_alloc_40:slab_free 0 unix:0:kmem_alloc_40:slab_size 8192 unix:0:kmem_alloc_40:snaptime 8464512.6084454 unix:0:kmem_alloc_40:vmem_source 23 unix:0:kmem_alloc_4096:align 64 unix:0:kmem_alloc_4096:alloc 23045 unix:0:kmem_alloc_4096:alloc_fail 0 unix:0:kmem_alloc_4096:buf_avail 11 unix:0:kmem_alloc_4096:buf_constructed 11 unix:0:kmem_alloc_4096:buf_inuse 49 unix:0:kmem_alloc_4096:buf_max 60 unix:0:kmem_alloc_4096:buf_size 4096 unix:0:kmem_alloc_4096:buf_total 60 unix:0:kmem_alloc_4096:chunk_size 4096 unix:0:kmem_alloc_4096:class kmem_cache unix:0:kmem_alloc_4096:crtime 65.9502482 unix:0:kmem_alloc_4096:depot_alloc 10062 unix:0:kmem_alloc_4096:depot_contention 0 unix:0:kmem_alloc_4096:depot_free 10075 unix:0:kmem_alloc_4096:empty_magazines 10 unix:0:kmem_alloc_4096:free 23009 unix:0:kmem_alloc_4096:full_magazines 9 unix:0:kmem_alloc_4096:hash_lookup_depth 0 unix:0:kmem_alloc_4096:hash_rescale 0 unix:0:kmem_alloc_4096:hash_size 64 unix:0:kmem_alloc_4096:magazine_size 1 unix:0:kmem_alloc_4096:slab_alloc 60 unix:0:kmem_alloc_4096:slab_create 30 unix:0:kmem_alloc_4096:slab_destroy 0 unix:0:kmem_alloc_4096:slab_free 0 unix:0:kmem_alloc_4096:slab_size 8192 unix:0:kmem_alloc_4096:snaptime 8464512.609839 unix:0:kmem_alloc_4096:vmem_source 23 unix:0:kmem_alloc_448:align 64 unix:0:kmem_alloc_448:alloc 9187902 unix:0:kmem_alloc_448:alloc_fail 0 unix:0:kmem_alloc_448:buf_avail 129 unix:0:kmem_alloc_448:buf_constructed 114 unix:0:kmem_alloc_448:buf_inuse 1113 unix:0:kmem_alloc_448:buf_max 1242 unix:0:kmem_alloc_448:buf_size 448 unix:0:kmem_alloc_448:buf_total 1242 unix:0:kmem_alloc_448:chunk_size 448 unix:0:kmem_alloc_448:class kmem_cache unix:0:kmem_alloc_448:crtime 65.9501142 unix:0:kmem_alloc_448:depot_alloc 63808 unix:0:kmem_alloc_448:depot_contention 0 unix:0:kmem_alloc_448:depot_free 63847 unix:0:kmem_alloc_448:empty_magazines 3 unix:0:kmem_alloc_448:free 9186828 unix:0:kmem_alloc_448:full_magazines 35 unix:0:kmem_alloc_448:hash_lookup_depth 0 unix:0:kmem_alloc_448:hash_rescale 0 unix:0:kmem_alloc_448:hash_size 0 unix:0:kmem_alloc_448:magazine_size 3 unix:0:kmem_alloc_448:slab_alloc 1227 unix:0:kmem_alloc_448:slab_create 69 unix:0:kmem_alloc_448:slab_destroy 0 unix:0:kmem_alloc_448:slab_free 0 unix:0:kmem_alloc_448:slab_size 8192 unix:0:kmem_alloc_448:snaptime 8464512.6112848 unix:0:kmem_alloc_448:vmem_source 23 unix:0:kmem_alloc_48:align 8 unix:0:kmem_alloc_48:alloc 1015000 unix:0:kmem_alloc_48:alloc_fail 0 unix:0:kmem_alloc_48:buf_avail 377 unix:0:kmem_alloc_48:buf_constructed 289 unix:0:kmem_alloc_48:buf_inuse 16523 unix:0:kmem_alloc_48:buf_max 16900 unix:0:kmem_alloc_48:buf_size 48 unix:0:kmem_alloc_48:buf_total 16900 unix:0:kmem_alloc_48:chunk_size 48 unix:0:kmem_alloc_48:class kmem_cache unix:0:kmem_alloc_48:crtime 65.9499692 unix:0:kmem_alloc_48:depot_alloc 2659 unix:0:kmem_alloc_48:depot_contention 0 unix:0:kmem_alloc_48:depot_free 2680 unix:0:kmem_alloc_48:empty_magazines 227 unix:0:kmem_alloc_48:free 998498 unix:0:kmem_alloc_48:full_magazines 18 unix:0:kmem_alloc_48:hash_lookup_depth 0 unix:0:kmem_alloc_48:hash_rescale 0 unix:0:kmem_alloc_48:hash_size 0 unix:0:kmem_alloc_48:magazine_size 15 unix:0:kmem_alloc_48:slab_alloc 16812 unix:0:kmem_alloc_48:slab_create 100 unix:0:kmem_alloc_48:slab_destroy 0 unix:0:kmem_alloc_48:slab_free 0 unix:0:kmem_alloc_48:slab_size 8192 unix:0:kmem_alloc_48:snaptime 8464512.612681 unix:0:kmem_alloc_48:vmem_source 23 unix:0:kmem_alloc_512:align 64 unix:0:kmem_alloc_512:alloc 254658 unix:0:kmem_alloc_512:alloc_fail 0 unix:0:kmem_alloc_512:buf_avail 2149 unix:0:kmem_alloc_512:buf_constructed 2135 unix:0:kmem_alloc_512:buf_inuse 101 unix:0:kmem_alloc_512:buf_max 2250 unix:0:kmem_alloc_512:buf_size 512 unix:0:kmem_alloc_512:buf_total 2250 unix:0:kmem_alloc_512:chunk_size 512 unix:0:kmem_alloc_512:class kmem_cache unix:0:kmem_alloc_512:crtime 65.9501252 unix:0:kmem_alloc_512:depot_alloc 54267 unix:0:kmem_alloc_512:depot_contention 0 unix:0:kmem_alloc_512:depot_free 54979 unix:0:kmem_alloc_512:empty_magazines 4 unix:0:kmem_alloc_512:free 255269 unix:0:kmem_alloc_512:full_magazines 708 unix:0:kmem_alloc_512:hash_lookup_depth 0 unix:0:kmem_alloc_512:hash_rescale 0 unix:0:kmem_alloc_512:hash_size 0 unix:0:kmem_alloc_512:magazine_size 3 unix:0:kmem_alloc_512:slab_alloc 2236 unix:0:kmem_alloc_512:slab_create 150 unix:0:kmem_alloc_512:slab_destroy 0 unix:0:kmem_alloc_512:slab_free 0 unix:0:kmem_alloc_512:slab_size 8192 unix:0:kmem_alloc_512:snaptime 8464512.6142074 unix:0:kmem_alloc_512:vmem_source 23 unix:0:kmem_alloc_56:align 8 unix:0:kmem_alloc_56:alloc 8639540 unix:0:kmem_alloc_56:alloc_fail 0 unix:0:kmem_alloc_56:buf_avail 373 unix:0:kmem_alloc_56:buf_constructed 246 unix:0:kmem_alloc_56:buf_inuse 12387 unix:0:kmem_alloc_56:buf_max 12760 unix:0:kmem_alloc_56:buf_size 56 unix:0:kmem_alloc_56:buf_total 12760 unix:0:kmem_alloc_56:chunk_size 56 unix:0:kmem_alloc_56:class kmem_cache unix:0:kmem_alloc_56:crtime 65.94998 unix:0:kmem_alloc_56:depot_alloc 3271 unix:0:kmem_alloc_56:depot_contention 0 unix:0:kmem_alloc_56:depot_free 3291 unix:0:kmem_alloc_56:empty_magazines 345 unix:0:kmem_alloc_56:free 8627173 unix:0:kmem_alloc_56:full_magazines 16 unix:0:kmem_alloc_56:hash_lookup_depth 0 unix:0:kmem_alloc_56:hash_rescale 0 unix:0:kmem_alloc_56:hash_size 0 unix:0:kmem_alloc_56:magazine_size 15 unix:0:kmem_alloc_56:slab_alloc 12633 unix:0:kmem_alloc_56:slab_create 88 unix:0:kmem_alloc_56:slab_destroy 0 unix:0:kmem_alloc_56:slab_free 0 unix:0:kmem_alloc_56:slab_size 8192 unix:0:kmem_alloc_56:snaptime 8464512.6156142 unix:0:kmem_alloc_56:vmem_source 23 unix:0:kmem_alloc_64:align 64 unix:0:kmem_alloc_64:alloc 11932415 unix:0:kmem_alloc_64:alloc_fail 0 unix:0:kmem_alloc_64:buf_avail 1981 unix:0:kmem_alloc_64:buf_constructed 1964 unix:0:kmem_alloc_64:buf_inuse 4369 unix:0:kmem_alloc_64:buf_max 6350 unix:0:kmem_alloc_64:buf_size 64 unix:0:kmem_alloc_64:buf_total 6350 unix:0:kmem_alloc_64:chunk_size 64 unix:0:kmem_alloc_64:class kmem_cache unix:0:kmem_alloc_64:crtime 65.9499906 unix:0:kmem_alloc_64:depot_alloc 14574 unix:0:kmem_alloc_64:depot_contention 0 unix:0:kmem_alloc_64:depot_free 14707 unix:0:kmem_alloc_64:empty_magazines 149 unix:0:kmem_alloc_64:free 11928179 unix:0:kmem_alloc_64:full_magazines 129 unix:0:kmem_alloc_64:hash_lookup_depth 0 unix:0:kmem_alloc_64:hash_rescale 0 unix:0:kmem_alloc_64:hash_size 0 unix:0:kmem_alloc_64:magazine_size 15 unix:0:kmem_alloc_64:slab_alloc 6333 unix:0:kmem_alloc_64:slab_create 50 unix:0:kmem_alloc_64:slab_destroy 0 unix:0:kmem_alloc_64:slab_free 0 unix:0:kmem_alloc_64:slab_size 8192 unix:0:kmem_alloc_64:snaptime 8464512.6170098 unix:0:kmem_alloc_64:vmem_source 23 unix:0:kmem_alloc_640:align 64 unix:0:kmem_alloc_640:alloc 786 unix:0:kmem_alloc_640:alloc_fail 0 unix:0:kmem_alloc_640:buf_avail 20 unix:0:kmem_alloc_640:buf_constructed 15 unix:0:kmem_alloc_640:buf_inuse 76 unix:0:kmem_alloc_640:buf_max 96 unix:0:kmem_alloc_640:buf_size 640 unix:0:kmem_alloc_640:buf_total 96 unix:0:kmem_alloc_640:chunk_size 640 unix:0:kmem_alloc_640:class kmem_cache unix:0:kmem_alloc_640:crtime 65.9501366 unix:0:kmem_alloc_640:depot_alloc 25 unix:0:kmem_alloc_640:depot_contention 0 unix:0:kmem_alloc_640:depot_free 32 unix:0:kmem_alloc_640:empty_magazines 2 unix:0:kmem_alloc_640:free 717 unix:0:kmem_alloc_640:full_magazines 3 unix:0:kmem_alloc_640:hash_lookup_depth 0 unix:0:kmem_alloc_640:hash_rescale 0 unix:0:kmem_alloc_640:hash_size 0 unix:0:kmem_alloc_640:magazine_size 3 unix:0:kmem_alloc_640:slab_alloc 91 unix:0:kmem_alloc_640:slab_create 8 unix:0:kmem_alloc_640:slab_destroy 0 unix:0:kmem_alloc_640:slab_free 0 unix:0:kmem_alloc_640:slab_size 8192 unix:0:kmem_alloc_640:snaptime 8464512.618386 unix:0:kmem_alloc_640:vmem_source 23 unix:0:kmem_alloc_768:align 64 unix:0:kmem_alloc_768:alloc 355 unix:0:kmem_alloc_768:alloc_fail 0 unix:0:kmem_alloc_768:buf_avail 14 unix:0:kmem_alloc_768:buf_constructed 6 unix:0:kmem_alloc_768:buf_inuse 16 unix:0:kmem_alloc_768:buf_max 30 unix:0:kmem_alloc_768:buf_size 768 unix:0:kmem_alloc_768:buf_total 30 unix:0:kmem_alloc_768:chunk_size 768 unix:0:kmem_alloc_768:class kmem_cache unix:0:kmem_alloc_768:crtime 65.950148 unix:0:kmem_alloc_768:depot_alloc 1 unix:0:kmem_alloc_768:depot_contention 0 unix:0:kmem_alloc_768:depot_free 4 unix:0:kmem_alloc_768:empty_magazines 0 unix:0:kmem_alloc_768:free 342 unix:0:kmem_alloc_768:full_magazines 0 unix:0:kmem_alloc_768:hash_lookup_depth 0 unix:0:kmem_alloc_768:hash_rescale 0 unix:0:kmem_alloc_768:hash_size 0 unix:0:kmem_alloc_768:magazine_size 3 unix:0:kmem_alloc_768:slab_alloc 22 unix:0:kmem_alloc_768:slab_create 3 unix:0:kmem_alloc_768:slab_destroy 0 unix:0:kmem_alloc_768:slab_free 0 unix:0:kmem_alloc_768:slab_size 8192 unix:0:kmem_alloc_768:snaptime 8464512.6198234 unix:0:kmem_alloc_768:vmem_source 23 unix:0:kmem_alloc_8:align 8 unix:0:kmem_alloc_8:alloc 42660482 unix:0:kmem_alloc_8:alloc_fail 0 unix:0:kmem_alloc_8:buf_avail 1714 unix:0:kmem_alloc_8:buf_constructed 1642 unix:0:kmem_alloc_8:buf_inuse 6422 unix:0:kmem_alloc_8:buf_max 8136 unix:0:kmem_alloc_8:buf_size 8 unix:0:kmem_alloc_8:buf_total 8136 unix:0:kmem_alloc_8:chunk_size 8 unix:0:kmem_alloc_8:class kmem_cache unix:0:kmem_alloc_8:crtime 65.9499144 unix:0:kmem_alloc_8:depot_alloc 123354 unix:0:kmem_alloc_8:depot_contention 1 unix:0:kmem_alloc_8:depot_free 123466 unix:0:kmem_alloc_8:empty_magazines 16 unix:0:kmem_alloc_8:free 42654172 unix:0:kmem_alloc_8:full_magazines 108 unix:0:kmem_alloc_8:hash_lookup_depth 0 unix:0:kmem_alloc_8:hash_rescale 0 unix:0:kmem_alloc_8:hash_size 0 unix:0:kmem_alloc_8:magazine_size 15 unix:0:kmem_alloc_8:slab_alloc 8064 unix:0:kmem_alloc_8:slab_create 8 unix:0:kmem_alloc_8:slab_destroy 0 unix:0:kmem_alloc_8:slab_free 0 unix:0:kmem_alloc_8:slab_size 8192 unix:0:kmem_alloc_8:snaptime 8464512.621485 unix:0:kmem_alloc_8:vmem_source 23 unix:0:kmem_alloc_80:align 8 unix:0:kmem_alloc_80:alloc 1600379 unix:0:kmem_alloc_80:alloc_fail 0 unix:0:kmem_alloc_80:buf_avail 2053 unix:0:kmem_alloc_80:buf_constructed 2037 unix:0:kmem_alloc_80:buf_inuse 3098 unix:0:kmem_alloc_80:buf_max 5151 unix:0:kmem_alloc_80:buf_size 80 unix:0:kmem_alloc_80:buf_total 5151 unix:0:kmem_alloc_80:chunk_size 80 unix:0:kmem_alloc_80:class kmem_cache unix:0:kmem_alloc_80:crtime 65.9500016 unix:0:kmem_alloc_80:depot_alloc 41691 unix:0:kmem_alloc_80:depot_contention 5 unix:0:kmem_alloc_80:depot_free 41984 unix:0:kmem_alloc_80:empty_magazines 135 unix:0:kmem_alloc_80:free 1597574 unix:0:kmem_alloc_80:full_magazines 290 unix:0:kmem_alloc_80:hash_lookup_depth 0 unix:0:kmem_alloc_80:hash_rescale 0 unix:0:kmem_alloc_80:hash_size 0 unix:0:kmem_alloc_80:magazine_size 7 unix:0:kmem_alloc_80:slab_alloc 5135 unix:0:kmem_alloc_80:slab_create 51 unix:0:kmem_alloc_80:slab_destroy 0 unix:0:kmem_alloc_80:slab_free 0 unix:0:kmem_alloc_80:slab_size 8192 unix:0:kmem_alloc_80:snaptime 8464512.6228918 unix:0:kmem_alloc_80:vmem_source 23 unix:0:kmem_alloc_8192:align 8192 unix:0:kmem_alloc_8192:alloc 1017414 unix:0:kmem_alloc_8192:alloc_fail 0 unix:0:kmem_alloc_8192:buf_avail 368 unix:0:kmem_alloc_8192:buf_constructed 368 unix:0:kmem_alloc_8192:buf_inuse 1809 unix:0:kmem_alloc_8192:buf_max 2177 unix:0:kmem_alloc_8192:buf_size 8192 unix:0:kmem_alloc_8192:buf_total 2177 unix:0:kmem_alloc_8192:chunk_size 8192 unix:0:kmem_alloc_8192:class kmem_cache unix:0:kmem_alloc_8192:crtime 65.9502662 unix:0:kmem_alloc_8192:depot_alloc 92926 unix:0:kmem_alloc_8192:depot_contention 0 unix:0:kmem_alloc_8192:depot_free 93294 unix:0:kmem_alloc_8192:empty_magazines 430 unix:0:kmem_alloc_8192:free 1015973 unix:0:kmem_alloc_8192:full_magazines 366 unix:0:kmem_alloc_8192:hash_lookup_depth 0 unix:0:kmem_alloc_8192:hash_rescale 2 unix:0:kmem_alloc_8192:hash_size 1024 unix:0:kmem_alloc_8192:magazine_size 1 unix:0:kmem_alloc_8192:slab_alloc 2177 unix:0:kmem_alloc_8192:slab_create 2177 unix:0:kmem_alloc_8192:slab_destroy 0 unix:0:kmem_alloc_8192:slab_free 0 unix:0:kmem_alloc_8192:slab_size 8192 unix:0:kmem_alloc_8192:snaptime 8464512.6243816 unix:0:kmem_alloc_8192:vmem_source 23 unix:0:kmem_alloc_896:align 64 unix:0:kmem_alloc_896:alloc 504 unix:0:kmem_alloc_896:alloc_fail 0 unix:0:kmem_alloc_896:buf_avail 7 unix:0:kmem_alloc_896:buf_constructed 7 unix:0:kmem_alloc_896:buf_inuse 11 unix:0:kmem_alloc_896:buf_max 18 unix:0:kmem_alloc_896:buf_size 896 unix:0:kmem_alloc_896:buf_total 18 unix:0:kmem_alloc_896:chunk_size 896 unix:0:kmem_alloc_896:class kmem_cache unix:0:kmem_alloc_896:crtime 65.9501596 unix:0:kmem_alloc_896:depot_alloc 18 unix:0:kmem_alloc_896:depot_contention 0 unix:0:kmem_alloc_896:depot_free 21 unix:0:kmem_alloc_896:empty_magazines 1 unix:0:kmem_alloc_896:free 496 unix:0:kmem_alloc_896:full_magazines 0 unix:0:kmem_alloc_896:hash_lookup_depth 0 unix:0:kmem_alloc_896:hash_rescale 0 unix:0:kmem_alloc_896:hash_size 0 unix:0:kmem_alloc_896:magazine_size 3 unix:0:kmem_alloc_896:slab_alloc 18 unix:0:kmem_alloc_896:slab_create 2 unix:0:kmem_alloc_896:slab_destroy 0 unix:0:kmem_alloc_896:slab_free 0 unix:0:kmem_alloc_896:slab_size 8192 unix:0:kmem_alloc_896:snaptime 8464512.625897 unix:0:kmem_alloc_896:vmem_source 23 unix:0:kmem_alloc_96:align 8 unix:0:kmem_alloc_96:alloc 2371617 unix:0:kmem_alloc_96:alloc_fail 0 unix:0:kmem_alloc_96:buf_avail 998 unix:0:kmem_alloc_96:buf_constructed 921 unix:0:kmem_alloc_96:buf_inuse 514 unix:0:kmem_alloc_96:buf_max 1512 unix:0:kmem_alloc_96:buf_size 96 unix:0:kmem_alloc_96:buf_total 1512 unix:0:kmem_alloc_96:chunk_size 96 unix:0:kmem_alloc_96:class kmem_cache unix:0:kmem_alloc_96:crtime 65.9500122 unix:0:kmem_alloc_96:depot_alloc 10488 unix:0:kmem_alloc_96:depot_contention 0 unix:0:kmem_alloc_96:depot_free 10621 unix:0:kmem_alloc_96:empty_magazines 18 unix:0:kmem_alloc_96:free 2371236 unix:0:kmem_alloc_96:full_magazines 129 unix:0:kmem_alloc_96:hash_lookup_depth 0 unix:0:kmem_alloc_96:hash_rescale 0 unix:0:kmem_alloc_96:hash_size 0 unix:0:kmem_alloc_96:magazine_size 7 unix:0:kmem_alloc_96:slab_alloc 1435 unix:0:kmem_alloc_96:slab_create 18 unix:0:kmem_alloc_96:slab_destroy 0 unix:0:kmem_alloc_96:slab_free 0 unix:0:kmem_alloc_96:slab_size 8192 unix:0:kmem_alloc_96:snaptime 8464512.6272744 unix:0:kmem_alloc_96:vmem_source 23 unix:0:kmem_bufctl_audit_cache:align 8 unix:0:kmem_bufctl_audit_cache:alloc 0 unix:0:kmem_bufctl_audit_cache:alloc_fail 0 unix:0:kmem_bufctl_audit_cache:buf_avail 0 unix:0:kmem_bufctl_audit_cache:buf_constructed 0 unix:0:kmem_bufctl_audit_cache:buf_inuse 0 unix:0:kmem_bufctl_audit_cache:buf_max 0 unix:0:kmem_bufctl_audit_cache:buf_size 128 unix:0:kmem_bufctl_audit_cache:buf_total 0 unix:0:kmem_bufctl_audit_cache:chunk_size 128 unix:0:kmem_bufctl_audit_cache:class kmem_cache unix:0:kmem_bufctl_audit_cache:crtime 65.949253 unix:0:kmem_bufctl_audit_cache:depot_alloc 0 unix:0:kmem_bufctl_audit_cache:depot_contention 0 unix:0:kmem_bufctl_audit_cache:depot_free 0 unix:0:kmem_bufctl_audit_cache:empty_magazines 0 unix:0:kmem_bufctl_audit_cache:free 0 unix:0:kmem_bufctl_audit_cache:full_magazines 0 unix:0:kmem_bufctl_audit_cache:hash_lookup_depth 0 unix:0:kmem_bufctl_audit_cache:hash_rescale 0 unix:0:kmem_bufctl_audit_cache:hash_size 0 unix:0:kmem_bufctl_audit_cache:magazine_size 7 unix:0:kmem_bufctl_audit_cache:slab_alloc 0 unix:0:kmem_bufctl_audit_cache:slab_create 0 unix:0:kmem_bufctl_audit_cache:slab_destroy 0 unix:0:kmem_bufctl_audit_cache:slab_free 0 unix:0:kmem_bufctl_audit_cache:slab_size 8192 unix:0:kmem_bufctl_audit_cache:snaptime 8464512.628651 unix:0:kmem_bufctl_audit_cache:vmem_source 13 unix:0:kmem_bufctl_cache:align 8 unix:0:kmem_bufctl_cache:alloc 15549 unix:0:kmem_bufctl_cache:alloc_fail 0 unix:0:kmem_bufctl_cache:buf_avail 45 unix:0:kmem_bufctl_cache:buf_constructed 0 unix:0:kmem_bufctl_cache:buf_inuse 15549 unix:0:kmem_bufctl_cache:buf_max 15594 unix:0:kmem_bufctl_cache:buf_size 24 unix:0:kmem_bufctl_cache:buf_total 15594 unix:0:kmem_bufctl_cache:chunk_size 24 unix:0:kmem_bufctl_cache:class kmem_cache unix:0:kmem_bufctl_cache:crtime 65.9492434 unix:0:kmem_bufctl_cache:depot_alloc 0 unix:0:kmem_bufctl_cache:depot_contention 0 unix:0:kmem_bufctl_cache:depot_free 0 unix:0:kmem_bufctl_cache:empty_magazines 0 unix:0:kmem_bufctl_cache:free 0 unix:0:kmem_bufctl_cache:full_magazines 0 unix:0:kmem_bufctl_cache:hash_lookup_depth 0 unix:0:kmem_bufctl_cache:hash_rescale 0 unix:0:kmem_bufctl_cache:hash_size 0 unix:0:kmem_bufctl_cache:magazine_size 15 unix:0:kmem_bufctl_cache:slab_alloc 15549 unix:0:kmem_bufctl_cache:slab_create 46 unix:0:kmem_bufctl_cache:slab_destroy 0 unix:0:kmem_bufctl_cache:slab_free 0 unix:0:kmem_bufctl_cache:slab_size 8192 unix:0:kmem_bufctl_cache:snaptime 8464512.630182 unix:0:kmem_bufctl_cache:vmem_source 13 unix:0:kmem_magazine_1:align 8 unix:0:kmem_magazine_1:alloc 2215 unix:0:kmem_magazine_1:alloc_fail 0 unix:0:kmem_magazine_1:buf_avail 325 unix:0:kmem_magazine_1:buf_constructed 0 unix:0:kmem_magazine_1:buf_inuse 2215 unix:0:kmem_magazine_1:buf_max 2540 unix:0:kmem_magazine_1:buf_size 16 unix:0:kmem_magazine_1:buf_total 2540 unix:0:kmem_magazine_1:chunk_size 16 unix:0:kmem_magazine_1:class kmem_cache unix:0:kmem_magazine_1:crtime 65.9491394 unix:0:kmem_magazine_1:depot_alloc 0 unix:0:kmem_magazine_1:depot_contention 0 unix:0:kmem_magazine_1:depot_free 0 unix:0:kmem_magazine_1:empty_magazines 0 unix:0:kmem_magazine_1:free 0 unix:0:kmem_magazine_1:full_magazines 0 unix:0:kmem_magazine_1:hash_lookup_depth 0 unix:0:kmem_magazine_1:hash_rescale 0 unix:0:kmem_magazine_1:hash_size 0 unix:0:kmem_magazine_1:magazine_size 15 unix:0:kmem_magazine_1:slab_alloc 2215 unix:0:kmem_magazine_1:slab_create 5 unix:0:kmem_magazine_1:slab_destroy 0 unix:0:kmem_magazine_1:slab_free 0 unix:0:kmem_magazine_1:slab_size 8192 unix:0:kmem_magazine_1:snaptime 8464512.631558 unix:0:kmem_magazine_1:vmem_source 13 unix:0:kmem_magazine_143:align 64 unix:0:kmem_magazine_143:alloc 0 unix:0:kmem_magazine_143:alloc_fail 0 unix:0:kmem_magazine_143:buf_avail 0 unix:0:kmem_magazine_143:buf_constructed 0 unix:0:kmem_magazine_143:buf_inuse 0 unix:0:kmem_magazine_143:buf_max 0 unix:0:kmem_magazine_143:buf_size 1152 unix:0:kmem_magazine_143:buf_total 0 unix:0:kmem_magazine_143:chunk_size 1152 unix:0:kmem_magazine_143:class kmem_cache unix:0:kmem_magazine_143:crtime 65.9492246 unix:0:kmem_magazine_143:depot_alloc 0 unix:0:kmem_magazine_143:depot_contention 0 unix:0:kmem_magazine_143:depot_free 0 unix:0:kmem_magazine_143:empty_magazines 0 unix:0:kmem_magazine_143:free 0 unix:0:kmem_magazine_143:full_magazines 0 unix:0:kmem_magazine_143:hash_lookup_depth 0 unix:0:kmem_magazine_143:hash_rescale 0 unix:0:kmem_magazine_143:hash_size 0 unix:0:kmem_magazine_143:magazine_size 3 unix:0:kmem_magazine_143:slab_alloc 0 unix:0:kmem_magazine_143:slab_create 0 unix:0:kmem_magazine_143:slab_destroy 0 unix:0:kmem_magazine_143:slab_free 0 unix:0:kmem_magazine_143:slab_size 8192 unix:0:kmem_magazine_143:snaptime 8464512.6329282 unix:0:kmem_magazine_143:vmem_source 13 unix:0:kmem_magazine_15:align 64 unix:0:kmem_magazine_15:alloc 11518 unix:0:kmem_magazine_15:alloc_fail 0 unix:0:kmem_magazine_15:buf_avail 30 unix:0:kmem_magazine_15:buf_constructed 0 unix:0:kmem_magazine_15:buf_inuse 11499 unix:0:kmem_magazine_15:buf_max 11529 unix:0:kmem_magazine_15:buf_size 128 unix:0:kmem_magazine_15:buf_total 11529 unix:0:kmem_magazine_15:chunk_size 128 unix:0:kmem_magazine_15:class kmem_cache unix:0:kmem_magazine_15:crtime 65.9491716 unix:0:kmem_magazine_15:depot_alloc 1 unix:0:kmem_magazine_15:depot_contention 0 unix:0:kmem_magazine_15:depot_free 3 unix:0:kmem_magazine_15:empty_magazines 0 unix:0:kmem_magazine_15:free 21 unix:0:kmem_magazine_15:full_magazines 0 unix:0:kmem_magazine_15:hash_lookup_depth 0 unix:0:kmem_magazine_15:hash_rescale 0 unix:0:kmem_magazine_15:hash_size 0 unix:0:kmem_magazine_15:magazine_size 7 unix:0:kmem_magazine_15:slab_alloc 11499 unix:0:kmem_magazine_15:slab_create 183 unix:0:kmem_magazine_15:slab_destroy 0 unix:0:kmem_magazine_15:slab_free 0 unix:0:kmem_magazine_15:slab_size 8192 unix:0:kmem_magazine_15:snaptime 8464512.6343022 unix:0:kmem_magazine_15:vmem_source 13 unix:0:kmem_magazine_3:align 16 unix:0:kmem_magazine_3:alloc 5209 unix:0:kmem_magazine_3:alloc_fail 0 unix:0:kmem_magazine_3:buf_avail 75 unix:0:kmem_magazine_3:buf_constructed 0 unix:0:kmem_magazine_3:buf_inuse 5005 unix:0:kmem_magazine_3:buf_max 5080 unix:0:kmem_magazine_3:buf_size 32 unix:0:kmem_magazine_3:buf_total 5080 unix:0:kmem_magazine_3:chunk_size 32 unix:0:kmem_magazine_3:class kmem_cache unix:0:kmem_magazine_3:crtime 65.9491508 unix:0:kmem_magazine_3:depot_alloc 10 unix:0:kmem_magazine_3:depot_contention 0 unix:0:kmem_magazine_3:depot_free 12 unix:0:kmem_magazine_3:empty_magazines 7 unix:0:kmem_magazine_3:free 206 unix:0:kmem_magazine_3:full_magazines 0 unix:0:kmem_magazine_3:hash_lookup_depth 0 unix:0:kmem_magazine_3:hash_rescale 0 unix:0:kmem_magazine_3:hash_size 0 unix:0:kmem_magazine_3:magazine_size 15 unix:0:kmem_magazine_3:slab_alloc 5005 unix:0:kmem_magazine_3:slab_create 20 unix:0:kmem_magazine_3:slab_destroy 0 unix:0:kmem_magazine_3:slab_free 0 unix:0:kmem_magazine_3:slab_size 8192 unix:0:kmem_magazine_3:snaptime 8464512.6358518 unix:0:kmem_magazine_3:vmem_source 13 unix:0:kmem_magazine_31:align 64 unix:0:kmem_magazine_31:alloc 46 unix:0:kmem_magazine_31:alloc_fail 0 unix:0:kmem_magazine_31:buf_avail 16 unix:0:kmem_magazine_31:buf_constructed 0 unix:0:kmem_magazine_31:buf_inuse 46 unix:0:kmem_magazine_31:buf_max 62 unix:0:kmem_magazine_31:buf_size 256 unix:0:kmem_magazine_31:buf_total 62 unix:0:kmem_magazine_31:chunk_size 256 unix:0:kmem_magazine_31:class kmem_cache unix:0:kmem_magazine_31:crtime 65.949182 unix:0:kmem_magazine_31:depot_alloc 0 unix:0:kmem_magazine_31:depot_contention 0 unix:0:kmem_magazine_31:depot_free 0 unix:0:kmem_magazine_31:empty_magazines 0 unix:0:kmem_magazine_31:free 0 unix:0:kmem_magazine_31:full_magazines 0 unix:0:kmem_magazine_31:hash_lookup_depth 0 unix:0:kmem_magazine_31:hash_rescale 0 unix:0:kmem_magazine_31:hash_size 0 unix:0:kmem_magazine_31:magazine_size 7 unix:0:kmem_magazine_31:slab_alloc 46 unix:0:kmem_magazine_31:slab_create 2 unix:0:kmem_magazine_31:slab_destroy 0 unix:0:kmem_magazine_31:slab_free 0 unix:0:kmem_magazine_31:slab_size 8192 unix:0:kmem_magazine_31:snaptime 8464512.63723 unix:0:kmem_magazine_31:vmem_source 13 unix:0:kmem_magazine_47:align 64 unix:0:kmem_magazine_47:alloc 0 unix:0:kmem_magazine_47:alloc_fail 0 unix:0:kmem_magazine_47:buf_avail 0 unix:0:kmem_magazine_47:buf_constructed 0 unix:0:kmem_magazine_47:buf_inuse 0 unix:0:kmem_magazine_47:buf_max 0 unix:0:kmem_magazine_47:buf_size 384 unix:0:kmem_magazine_47:buf_total 0 unix:0:kmem_magazine_47:chunk_size 384 unix:0:kmem_magazine_47:class kmem_cache unix:0:kmem_magazine_47:crtime 65.949193 unix:0:kmem_magazine_47:depot_alloc 0 unix:0:kmem_magazine_47:depot_contention 0 unix:0:kmem_magazine_47:depot_free 0 unix:0:kmem_magazine_47:empty_magazines 0 unix:0:kmem_magazine_47:free 0 unix:0:kmem_magazine_47:full_magazines 0 unix:0:kmem_magazine_47:hash_lookup_depth 0 unix:0:kmem_magazine_47:hash_rescale 0 unix:0:kmem_magazine_47:hash_size 0 unix:0:kmem_magazine_47:magazine_size 3 unix:0:kmem_magazine_47:slab_alloc 0 unix:0:kmem_magazine_47:slab_create 0 unix:0:kmem_magazine_47:slab_destroy 0 unix:0:kmem_magazine_47:slab_free 0 unix:0:kmem_magazine_47:slab_size 8192 unix:0:kmem_magazine_47:snaptime 8464512.638601 unix:0:kmem_magazine_47:vmem_source 13 unix:0:kmem_magazine_63:align 64 unix:0:kmem_magazine_63:alloc 0 unix:0:kmem_magazine_63:alloc_fail 0 unix:0:kmem_magazine_63:buf_avail 0 unix:0:kmem_magazine_63:buf_constructed 0 unix:0:kmem_magazine_63:buf_inuse 0 unix:0:kmem_magazine_63:buf_max 0 unix:0:kmem_magazine_63:buf_size 512 unix:0:kmem_magazine_63:buf_total 0 unix:0:kmem_magazine_63:chunk_size 512 unix:0:kmem_magazine_63:class kmem_cache unix:0:kmem_magazine_63:crtime 65.9492036 unix:0:kmem_magazine_63:depot_alloc 0 unix:0:kmem_magazine_63:depot_contention 0 unix:0:kmem_magazine_63:depot_free 0 unix:0:kmem_magazine_63:empty_magazines 0 unix:0:kmem_magazine_63:free 0 unix:0:kmem_magazine_63:full_magazines 0 unix:0:kmem_magazine_63:hash_lookup_depth 0 unix:0:kmem_magazine_63:hash_rescale 0 unix:0:kmem_magazine_63:hash_size 0 unix:0:kmem_magazine_63:magazine_size 3 unix:0:kmem_magazine_63:slab_alloc 0 unix:0:kmem_magazine_63:slab_create 0 unix:0:kmem_magazine_63:slab_destroy 0 unix:0:kmem_magazine_63:slab_free 0 unix:0:kmem_magazine_63:slab_size 8192 unix:0:kmem_magazine_63:snaptime 8464512.6400694 unix:0:kmem_magazine_63:vmem_source 13 unix:0:kmem_magazine_7:align 32 unix:0:kmem_magazine_7:alloc 7780 unix:0:kmem_magazine_7:alloc_fail 0 unix:0:kmem_magazine_7:buf_avail 1676 unix:0:kmem_magazine_7:buf_constructed 1559 unix:0:kmem_magazine_7:buf_inuse 3912 unix:0:kmem_magazine_7:buf_max 5588 unix:0:kmem_magazine_7:buf_size 64 unix:0:kmem_magazine_7:buf_total 5588 unix:0:kmem_magazine_7:chunk_size 64 unix:0:kmem_magazine_7:class kmem_cache unix:0:kmem_magazine_7:crtime 65.9491612 unix:0:kmem_magazine_7:depot_alloc 143 unix:0:kmem_magazine_7:depot_contention 0 unix:0:kmem_magazine_7:depot_free 248 unix:0:kmem_magazine_7:empty_magazines 134 unix:0:kmem_magazine_7:free 3973 unix:0:kmem_magazine_7:full_magazines 103 unix:0:kmem_magazine_7:hash_lookup_depth 0 unix:0:kmem_magazine_7:hash_rescale 0 unix:0:kmem_magazine_7:hash_size 0 unix:0:kmem_magazine_7:magazine_size 15 unix:0:kmem_magazine_7:slab_alloc 5471 unix:0:kmem_magazine_7:slab_create 44 unix:0:kmem_magazine_7:slab_destroy 0 unix:0:kmem_magazine_7:slab_free 0 unix:0:kmem_magazine_7:slab_size 8192 unix:0:kmem_magazine_7:snaptime 8464512.6414466 unix:0:kmem_magazine_7:vmem_source 13 unix:0:kmem_magazine_95:align 64 unix:0:kmem_magazine_95:alloc 0 unix:0:kmem_magazine_95:alloc_fail 0 unix:0:kmem_magazine_95:buf_avail 0 unix:0:kmem_magazine_95:buf_constructed 0 unix:0:kmem_magazine_95:buf_inuse 0 unix:0:kmem_magazine_95:buf_max 0 unix:0:kmem_magazine_95:buf_size 768 unix:0:kmem_magazine_95:buf_total 0 unix:0:kmem_magazine_95:chunk_size 768 unix:0:kmem_magazine_95:class kmem_cache unix:0:kmem_magazine_95:crtime 65.9492142 unix:0:kmem_magazine_95:depot_alloc 0 unix:0:kmem_magazine_95:depot_contention 0 unix:0:kmem_magazine_95:depot_free 0 unix:0:kmem_magazine_95:empty_magazines 0 unix:0:kmem_magazine_95:free 0 unix:0:kmem_magazine_95:full_magazines 0 unix:0:kmem_magazine_95:hash_lookup_depth 0 unix:0:kmem_magazine_95:hash_rescale 0 unix:0:kmem_magazine_95:hash_size 0 unix:0:kmem_magazine_95:magazine_size 3 unix:0:kmem_magazine_95:slab_alloc 0 unix:0:kmem_magazine_95:slab_create 0 unix:0:kmem_magazine_95:slab_destroy 0 unix:0:kmem_magazine_95:slab_free 0 unix:0:kmem_magazine_95:slab_size 8192 unix:0:kmem_magazine_95:snaptime 8464512.6428258 unix:0:kmem_magazine_95:vmem_source 13 unix:0:kmem_slab_cache:align 8 unix:0:kmem_slab_cache:alloc 3914 unix:0:kmem_slab_cache:alloc_fail 0 unix:0:kmem_slab_cache:buf_avail 1 unix:0:kmem_slab_cache:buf_constructed 0 unix:0:kmem_slab_cache:buf_inuse 3914 unix:0:kmem_slab_cache:buf_max 3915 unix:0:kmem_slab_cache:buf_size 56 unix:0:kmem_slab_cache:buf_total 3915 unix:0:kmem_slab_cache:chunk_size 56 unix:0:kmem_slab_cache:class kmem_cache unix:0:kmem_slab_cache:crtime 65.949234 unix:0:kmem_slab_cache:depot_alloc 0 unix:0:kmem_slab_cache:depot_contention 0 unix:0:kmem_slab_cache:depot_free 0 unix:0:kmem_slab_cache:empty_magazines 0 unix:0:kmem_slab_cache:free 0 unix:0:kmem_slab_cache:full_magazines 0 unix:0:kmem_slab_cache:hash_lookup_depth 0 unix:0:kmem_slab_cache:hash_rescale 0 unix:0:kmem_slab_cache:hash_size 0 unix:0:kmem_slab_cache:magazine_size 15 unix:0:kmem_slab_cache:slab_alloc 3914 unix:0:kmem_slab_cache:slab_create 27 unix:0:kmem_slab_cache:slab_destroy 0 unix:0:kmem_slab_cache:slab_free 0 unix:0:kmem_slab_cache:slab_size 8192 unix:0:kmem_slab_cache:snaptime 8464512.6442664 unix:0:kmem_slab_cache:vmem_source 13 unix:0:kmem_taskq:class taskq unix:0:kmem_taskq:crtime 69.2639482 unix:0:kmem_taskq:executed 2319361 unix:0:kmem_taskq:maxtasks 6 unix:0:kmem_taskq:nactive 300 unix:0:kmem_taskq:nalloc 0 unix:0:kmem_taskq:priority 60 unix:0:kmem_taskq:snaptime 8464512.6456444 unix:0:kmem_taskq:tasks 2319361 unix:0:kmem_taskq:threads 1 unix:0:kmem_taskq:totaltime 2626011000 unix:0:kmem_va_16384:align 8192 unix:0:kmem_va_16384:alloc 78 unix:0:kmem_va_16384:alloc_fail 0 unix:0:kmem_va_16384:buf_avail 2 unix:0:kmem_va_16384:buf_constructed 0 unix:0:kmem_va_16384:buf_inuse 78 unix:0:kmem_va_16384:buf_max 80 unix:0:kmem_va_16384:buf_size 16384 unix:0:kmem_va_16384:buf_total 80 unix:0:kmem_va_16384:chunk_size 16384 unix:0:kmem_va_16384:class kmem_cache unix:0:kmem_va_16384:crtime 65.9497842 unix:0:kmem_va_16384:depot_alloc 0 unix:0:kmem_va_16384:depot_contention 0 unix:0:kmem_va_16384:depot_free 0 unix:0:kmem_va_16384:empty_magazines 0 unix:0:kmem_va_16384:free 0 unix:0:kmem_va_16384:full_magazines 0 unix:0:kmem_va_16384:hash_lookup_depth 0 unix:0:kmem_va_16384:hash_rescale 0 unix:0:kmem_va_16384:hash_size 64 unix:0:kmem_va_16384:magazine_size 1 unix:0:kmem_va_16384:slab_alloc 78 unix:0:kmem_va_16384:slab_create 5 unix:0:kmem_va_16384:slab_destroy 0 unix:0:kmem_va_16384:slab_free 0 unix:0:kmem_va_16384:slab_size 262144 unix:0:kmem_va_16384:snaptime 8464512.6463958 unix:0:kmem_va_16384:vmem_source 22 unix:0:kmem_va_24576:align 8192 unix:0:kmem_va_24576:alloc 22 unix:0:kmem_va_24576:alloc_fail 0 unix:0:kmem_va_24576:buf_avail 8 unix:0:kmem_va_24576:buf_constructed 0 unix:0:kmem_va_24576:buf_inuse 22 unix:0:kmem_va_24576:buf_max 30 unix:0:kmem_va_24576:buf_size 24576 unix:0:kmem_va_24576:buf_total 30 unix:0:kmem_va_24576:chunk_size 24576 unix:0:kmem_va_24576:class kmem_cache unix:0:kmem_va_24576:crtime 65.9498 unix:0:kmem_va_24576:depot_alloc 0 unix:0:kmem_va_24576:depot_contention 0 unix:0:kmem_va_24576:depot_free 0 unix:0:kmem_va_24576:empty_magazines 0 unix:0:kmem_va_24576:free 0 unix:0:kmem_va_24576:full_magazines 0 unix:0:kmem_va_24576:hash_lookup_depth 0 unix:0:kmem_va_24576:hash_rescale 0 unix:0:kmem_va_24576:hash_size 64 unix:0:kmem_va_24576:magazine_size 1 unix:0:kmem_va_24576:slab_alloc 22 unix:0:kmem_va_24576:slab_create 3 unix:0:kmem_va_24576:slab_destroy 0 unix:0:kmem_va_24576:slab_free 0 unix:0:kmem_va_24576:slab_size 262144 unix:0:kmem_va_24576:snaptime 8464512.6478598 unix:0:kmem_va_24576:vmem_source 22 unix:0:kmem_va_32768:align 8192 unix:0:kmem_va_32768:alloc 66 unix:0:kmem_va_32768:alloc_fail 0 unix:0:kmem_va_32768:buf_avail 6 unix:0:kmem_va_32768:buf_constructed 0 unix:0:kmem_va_32768:buf_inuse 66 unix:0:kmem_va_32768:buf_max 72 unix:0:kmem_va_32768:buf_size 32768 unix:0:kmem_va_32768:buf_total 72 unix:0:kmem_va_32768:chunk_size 32768 unix:0:kmem_va_32768:class kmem_cache unix:0:kmem_va_32768:crtime 65.9498148 unix:0:kmem_va_32768:depot_alloc 0 unix:0:kmem_va_32768:depot_contention 0 unix:0:kmem_va_32768:depot_free 0 unix:0:kmem_va_32768:empty_magazines 0 unix:0:kmem_va_32768:free 0 unix:0:kmem_va_32768:full_magazines 0 unix:0:kmem_va_32768:hash_lookup_depth 0 unix:0:kmem_va_32768:hash_rescale 0 unix:0:kmem_va_32768:hash_size 64 unix:0:kmem_va_32768:magazine_size 1 unix:0:kmem_va_32768:slab_alloc 66 unix:0:kmem_va_32768:slab_create 9 unix:0:kmem_va_32768:slab_destroy 0 unix:0:kmem_va_32768:slab_free 0 unix:0:kmem_va_32768:slab_size 262144 unix:0:kmem_va_32768:snaptime 8464512.6493132 unix:0:kmem_va_32768:vmem_source 22 unix:0:kmem_va_40960:align 8192 unix:0:kmem_va_40960:alloc 2 unix:0:kmem_va_40960:alloc_fail 0 unix:0:kmem_va_40960:buf_avail 4 unix:0:kmem_va_40960:buf_constructed 0 unix:0:kmem_va_40960:buf_inuse 2 unix:0:kmem_va_40960:buf_max 6 unix:0:kmem_va_40960:buf_size 40960 unix:0:kmem_va_40960:buf_total 6 unix:0:kmem_va_40960:chunk_size 40960 unix:0:kmem_va_40960:class kmem_cache unix:0:kmem_va_40960:crtime 65.9498304 unix:0:kmem_va_40960:depot_alloc 0 unix:0:kmem_va_40960:depot_contention 0 unix:0:kmem_va_40960:depot_free 0 unix:0:kmem_va_40960:empty_magazines 0 unix:0:kmem_va_40960:free 0 unix:0:kmem_va_40960:full_magazines 0 unix:0:kmem_va_40960:hash_lookup_depth 0 unix:0:kmem_va_40960:hash_rescale 0 unix:0:kmem_va_40960:hash_size 64 unix:0:kmem_va_40960:magazine_size 1 unix:0:kmem_va_40960:slab_alloc 2 unix:0:kmem_va_40960:slab_create 1 unix:0:kmem_va_40960:slab_destroy 0 unix:0:kmem_va_40960:slab_free 0 unix:0:kmem_va_40960:slab_size 262144 unix:0:kmem_va_40960:snaptime 8464512.6508684 unix:0:kmem_va_40960:vmem_source 22 unix:0:kmem_va_49152:align 8192 unix:0:kmem_va_49152:alloc 0 unix:0:kmem_va_49152:alloc_fail 0 unix:0:kmem_va_49152:buf_avail 0 unix:0:kmem_va_49152:buf_constructed 0 unix:0:kmem_va_49152:buf_inuse 0 unix:0:kmem_va_49152:buf_max 0 unix:0:kmem_va_49152:buf_size 49152 unix:0:kmem_va_49152:buf_total 0 unix:0:kmem_va_49152:chunk_size 49152 unix:0:kmem_va_49152:class kmem_cache unix:0:kmem_va_49152:crtime 65.9498458 unix:0:kmem_va_49152:depot_alloc 0 unix:0:kmem_va_49152:depot_contention 0 unix:0:kmem_va_49152:depot_free 0 unix:0:kmem_va_49152:empty_magazines 0 unix:0:kmem_va_49152:free 0 unix:0:kmem_va_49152:full_magazines 0 unix:0:kmem_va_49152:hash_lookup_depth 0 unix:0:kmem_va_49152:hash_rescale 0 unix:0:kmem_va_49152:hash_size 64 unix:0:kmem_va_49152:magazine_size 1 unix:0:kmem_va_49152:slab_alloc 0 unix:0:kmem_va_49152:slab_create 0 unix:0:kmem_va_49152:slab_destroy 0 unix:0:kmem_va_49152:slab_free 0 unix:0:kmem_va_49152:slab_size 262144 unix:0:kmem_va_49152:snaptime 8464512.652273 unix:0:kmem_va_49152:vmem_source 22 unix:0:kmem_va_57344:align 8192 unix:0:kmem_va_57344:alloc 2 unix:0:kmem_va_57344:alloc_fail 0 unix:0:kmem_va_57344:buf_avail 2 unix:0:kmem_va_57344:buf_constructed 0 unix:0:kmem_va_57344:buf_inuse 2 unix:0:kmem_va_57344:buf_max 4 unix:0:kmem_va_57344:buf_size 57344 unix:0:kmem_va_57344:buf_total 4 unix:0:kmem_va_57344:chunk_size 57344 unix:0:kmem_va_57344:class kmem_cache unix:0:kmem_va_57344:crtime 65.949861 unix:0:kmem_va_57344:depot_alloc 0 unix:0:kmem_va_57344:depot_contention 0 unix:0:kmem_va_57344:depot_free 0 unix:0:kmem_va_57344:empty_magazines 0 unix:0:kmem_va_57344:free 0 unix:0:kmem_va_57344:full_magazines 0 unix:0:kmem_va_57344:hash_lookup_depth 0 unix:0:kmem_va_57344:hash_rescale 0 unix:0:kmem_va_57344:hash_size 64 unix:0:kmem_va_57344:magazine_size 1 unix:0:kmem_va_57344:slab_alloc 2 unix:0:kmem_va_57344:slab_create 1 unix:0:kmem_va_57344:slab_destroy 0 unix:0:kmem_va_57344:slab_free 0 unix:0:kmem_va_57344:slab_size 262144 unix:0:kmem_va_57344:snaptime 8464512.6536386 unix:0:kmem_va_57344:vmem_source 22 unix:0:kmem_va_65536:align 8192 unix:0:kmem_va_65536:alloc 0 unix:0:kmem_va_65536:alloc_fail 0 unix:0:kmem_va_65536:buf_avail 0 unix:0:kmem_va_65536:buf_constructed 0 unix:0:kmem_va_65536:buf_inuse 0 unix:0:kmem_va_65536:buf_max 0 unix:0:kmem_va_65536:buf_size 65536 unix:0:kmem_va_65536:buf_total 0 unix:0:kmem_va_65536:chunk_size 65536 unix:0:kmem_va_65536:class kmem_cache unix:0:kmem_va_65536:crtime 65.9498756 unix:0:kmem_va_65536:depot_alloc 0 unix:0:kmem_va_65536:depot_contention 0 unix:0:kmem_va_65536:depot_free 0 unix:0:kmem_va_65536:empty_magazines 0 unix:0:kmem_va_65536:free 0 unix:0:kmem_va_65536:full_magazines 0 unix:0:kmem_va_65536:hash_lookup_depth 0 unix:0:kmem_va_65536:hash_rescale 0 unix:0:kmem_va_65536:hash_size 64 unix:0:kmem_va_65536:magazine_size 1 unix:0:kmem_va_65536:slab_alloc 0 unix:0:kmem_va_65536:slab_create 0 unix:0:kmem_va_65536:slab_destroy 0 unix:0:kmem_va_65536:slab_free 0 unix:0:kmem_va_65536:slab_size 262144 unix:0:kmem_va_65536:snaptime 8464512.6550018 unix:0:kmem_va_65536:vmem_source 22 unix:0:kmem_va_8192:align 8192 unix:0:kmem_va_8192:alloc 7042 unix:0:kmem_va_8192:alloc_fail 0 unix:0:kmem_va_8192:buf_avail 22 unix:0:kmem_va_8192:buf_constructed 0 unix:0:kmem_va_8192:buf_inuse 7018 unix:0:kmem_va_8192:buf_max 7040 unix:0:kmem_va_8192:buf_size 8192 unix:0:kmem_va_8192:buf_total 7040 unix:0:kmem_va_8192:chunk_size 8192 unix:0:kmem_va_8192:class kmem_cache unix:0:kmem_va_8192:crtime 65.9497674 unix:0:kmem_va_8192:depot_alloc 11 unix:0:kmem_va_8192:depot_contention 0 unix:0:kmem_va_8192:depot_free 13 unix:0:kmem_va_8192:empty_magazines 9 unix:0:kmem_va_8192:free 26 unix:0:kmem_va_8192:full_magazines 0 unix:0:kmem_va_8192:hash_lookup_depth 0 unix:0:kmem_va_8192:hash_rescale 2 unix:0:kmem_va_8192:hash_size 4096 unix:0:kmem_va_8192:magazine_size 1 unix:0:kmem_va_8192:slab_alloc 7018 unix:0:kmem_va_8192:slab_create 220 unix:0:kmem_va_8192:slab_destroy 0 unix:0:kmem_va_8192:slab_free 0 unix:0:kmem_va_8192:slab_size 262144 unix:0:kmem_va_8192:snaptime 8464512.6564668 unix:0:kmem_va_8192:vmem_source 22 unix:0:kssl_cache:align 8 unix:0:kssl_cache:alloc 0 unix:0:kssl_cache:alloc_fail 0 unix:0:kssl_cache:buf_avail 0 unix:0:kssl_cache:buf_constructed 0 unix:0:kssl_cache:buf_inuse 0 unix:0:kssl_cache:buf_max 0 unix:0:kssl_cache:buf_size 1560 unix:0:kssl_cache:buf_total 0 unix:0:kssl_cache:chunk_size 1560 unix:0:kssl_cache:class kmem_cache unix:0:kssl_cache:crtime 86.6380828 unix:0:kssl_cache:depot_alloc 0 unix:0:kssl_cache:depot_contention 0 unix:0:kssl_cache:depot_free 0 unix:0:kssl_cache:empty_magazines 0 unix:0:kssl_cache:free 0 unix:0:kssl_cache:full_magazines 0 unix:0:kssl_cache:hash_lookup_depth 0 unix:0:kssl_cache:hash_rescale 0 unix:0:kssl_cache:hash_size 64 unix:0:kssl_cache:magazine_size 3 unix:0:kssl_cache:slab_alloc 0 unix:0:kssl_cache:slab_create 0 unix:0:kssl_cache:slab_destroy 0 unix:0:kssl_cache:slab_free 0 unix:0:kssl_cache:slab_size 8192 unix:0:kssl_cache:snaptime 8464512.6579292 unix:0:kssl_cache:vmem_source 23 unix:0:linkinfo_cache:align 8 unix:0:linkinfo_cache:alloc 92 unix:0:linkinfo_cache:alloc_fail 0 unix:0:linkinfo_cache:buf_avail 164 unix:0:linkinfo_cache:buf_constructed 18 unix:0:linkinfo_cache:buf_inuse 5 unix:0:linkinfo_cache:buf_max 169 unix:0:linkinfo_cache:buf_size 48 unix:0:linkinfo_cache:buf_total 169 unix:0:linkinfo_cache:chunk_size 48 unix:0:linkinfo_cache:class kmem_cache unix:0:linkinfo_cache:crtime 69.2822688 unix:0:linkinfo_cache:depot_alloc 0 unix:0:linkinfo_cache:depot_contention 0 unix:0:linkinfo_cache:depot_free 3 unix:0:linkinfo_cache:empty_magazines 0 unix:0:linkinfo_cache:free 90 unix:0:linkinfo_cache:full_magazines 0 unix:0:linkinfo_cache:hash_lookup_depth 0 unix:0:linkinfo_cache:hash_rescale 0 unix:0:linkinfo_cache:hash_size 0 unix:0:linkinfo_cache:magazine_size 15 unix:0:linkinfo_cache:slab_alloc 23 unix:0:linkinfo_cache:slab_create 1 unix:0:linkinfo_cache:slab_destroy 0 unix:0:linkinfo_cache:slab_free 0 unix:0:linkinfo_cache:slab_size 8192 unix:0:linkinfo_cache:snaptime 8464512.6593042 unix:0:linkinfo_cache:vmem_source 23 unix:0:lm_async:align 8 unix:0:lm_async:alloc 0 unix:0:lm_async:alloc_fail 0 unix:0:lm_async:buf_avail 0 unix:0:lm_async:buf_constructed 0 unix:0:lm_async:buf_inuse 0 unix:0:lm_async:buf_max 0 unix:0:lm_async:buf_size 32 unix:0:lm_async:buf_total 0 unix:0:lm_async:chunk_size 32 unix:0:lm_async:class kmem_cache unix:0:lm_async:crtime 114.536914 unix:0:lm_async:depot_alloc 0 unix:0:lm_async:depot_contention 0 unix:0:lm_async:depot_free 0 unix:0:lm_async:empty_magazines 0 unix:0:lm_async:free 0 unix:0:lm_async:full_magazines 0 unix:0:lm_async:hash_lookup_depth 0 unix:0:lm_async:hash_rescale 0 unix:0:lm_async:hash_size 0 unix:0:lm_async:magazine_size 15 unix:0:lm_async:slab_alloc 0 unix:0:lm_async:slab_create 0 unix:0:lm_async:slab_destroy 0 unix:0:lm_async:slab_free 0 unix:0:lm_async:slab_size 8192 unix:0:lm_async:snaptime 8464512.6607156 unix:0:lm_async:vmem_source 23 unix:0:lm_client:align 8 unix:0:lm_client:alloc 2 unix:0:lm_client:alloc_fail 0 unix:0:lm_client:buf_avail 61 unix:0:lm_client:buf_constructed 0 unix:0:lm_client:buf_inuse 2 unix:0:lm_client:buf_max 63 unix:0:lm_client:buf_size 128 unix:0:lm_client:buf_total 63 unix:0:lm_client:chunk_size 128 unix:0:lm_client:class kmem_cache unix:0:lm_client:crtime 114.5368984 unix:0:lm_client:depot_alloc 0 unix:0:lm_client:depot_contention 0 unix:0:lm_client:depot_free 0 unix:0:lm_client:empty_magazines 0 unix:0:lm_client:free 0 unix:0:lm_client:full_magazines 0 unix:0:lm_client:hash_lookup_depth 0 unix:0:lm_client:hash_rescale 0 unix:0:lm_client:hash_size 0 unix:0:lm_client:magazine_size 7 unix:0:lm_client:slab_alloc 2 unix:0:lm_client:slab_create 1 unix:0:lm_client:slab_destroy 0 unix:0:lm_client:slab_free 0 unix:0:lm_client:slab_size 8192 unix:0:lm_client:snaptime 8464512.6621782 unix:0:lm_client:vmem_source 23 unix:0:lm_config:align 8 unix:0:lm_config:alloc 13 unix:0:lm_config:alloc_fail 0 unix:0:lm_config:buf_avail 100 unix:0:lm_config:buf_constructed 2 unix:0:lm_config:buf_inuse 1 unix:0:lm_config:buf_max 101 unix:0:lm_config:buf_size 80 unix:0:lm_config:buf_total 101 unix:0:lm_config:chunk_size 80 unix:0:lm_config:class kmem_cache unix:0:lm_config:crtime 114.5369438 unix:0:lm_config:depot_alloc 0 unix:0:lm_config:depot_contention 0 unix:0:lm_config:depot_free 1 unix:0:lm_config:empty_magazines 0 unix:0:lm_config:free 13 unix:0:lm_config:full_magazines 0 unix:0:lm_config:hash_lookup_depth 0 unix:0:lm_config:hash_rescale 0 unix:0:lm_config:hash_size 0 unix:0:lm_config:magazine_size 7 unix:0:lm_config:slab_alloc 3 unix:0:lm_config:slab_create 1 unix:0:lm_config:slab_destroy 0 unix:0:lm_config:slab_free 0 unix:0:lm_config:slab_size 8192 unix:0:lm_config:snaptime 8464512.6635466 unix:0:lm_config:vmem_source 23 unix:0:lm_sleep:align 8 unix:0:lm_sleep:alloc 0 unix:0:lm_sleep:alloc_fail 0 unix:0:lm_sleep:buf_avail 0 unix:0:lm_sleep:buf_constructed 0 unix:0:lm_sleep:buf_inuse 0 unix:0:lm_sleep:buf_max 0 unix:0:lm_sleep:buf_size 96 unix:0:lm_sleep:buf_total 0 unix:0:lm_sleep:chunk_size 96 unix:0:lm_sleep:class kmem_cache unix:0:lm_sleep:crtime 114.536929 unix:0:lm_sleep:depot_alloc 0 unix:0:lm_sleep:depot_contention 0 unix:0:lm_sleep:depot_free 0 unix:0:lm_sleep:empty_magazines 0 unix:0:lm_sleep:free 0 unix:0:lm_sleep:full_magazines 0 unix:0:lm_sleep:hash_lookup_depth 0 unix:0:lm_sleep:hash_rescale 0 unix:0:lm_sleep:hash_size 0 unix:0:lm_sleep:magazine_size 7 unix:0:lm_sleep:slab_alloc 0 unix:0:lm_sleep:slab_create 0 unix:0:lm_sleep:slab_destroy 0 unix:0:lm_sleep:slab_free 0 unix:0:lm_sleep:slab_size 8192 unix:0:lm_sleep:snaptime 8464512.6649146 unix:0:lm_sleep:vmem_source 23 unix:0:lm_sysid:align 8 unix:0:lm_sysid:alloc 2 unix:0:lm_sysid:alloc_fail 0 unix:0:lm_sysid:buf_avail 48 unix:0:lm_sysid:buf_constructed 0 unix:0:lm_sysid:buf_inuse 2 unix:0:lm_sysid:buf_max 50 unix:0:lm_sysid:buf_size 160 unix:0:lm_sysid:buf_total 50 unix:0:lm_sysid:chunk_size 160 unix:0:lm_sysid:class kmem_cache unix:0:lm_sysid:crtime 114.5368836 unix:0:lm_sysid:depot_alloc 0 unix:0:lm_sysid:depot_contention 0 unix:0:lm_sysid:depot_free 0 unix:0:lm_sysid:empty_magazines 0 unix:0:lm_sysid:free 0 unix:0:lm_sysid:full_magazines 0 unix:0:lm_sysid:hash_lookup_depth 0 unix:0:lm_sysid:hash_rescale 0 unix:0:lm_sysid:hash_size 0 unix:0:lm_sysid:magazine_size 7 unix:0:lm_sysid:slab_alloc 2 unix:0:lm_sysid:slab_create 1 unix:0:lm_sysid:slab_destroy 0 unix:0:lm_sysid:slab_free 0 unix:0:lm_sysid:slab_size 8192 unix:0:lm_sysid:snaptime 8464512.6663952 unix:0:lm_sysid:vmem_source 23 unix:0:lm_vnode:align 8 unix:0:lm_vnode:alloc 0 unix:0:lm_vnode:alloc_fail 0 unix:0:lm_vnode:buf_avail 0 unix:0:lm_vnode:buf_constructed 0 unix:0:lm_vnode:buf_inuse 0 unix:0:lm_vnode:buf_max 0 unix:0:lm_vnode:buf_size 184 unix:0:lm_vnode:buf_total 0 unix:0:lm_vnode:chunk_size 184 unix:0:lm_vnode:class kmem_cache unix:0:lm_vnode:crtime 114.5368476 unix:0:lm_vnode:depot_alloc 0 unix:0:lm_vnode:depot_contention 0 unix:0:lm_vnode:depot_free 0 unix:0:lm_vnode:empty_magazines 0 unix:0:lm_vnode:free 0 unix:0:lm_vnode:full_magazines 0 unix:0:lm_vnode:hash_lookup_depth 0 unix:0:lm_vnode:hash_rescale 0 unix:0:lm_vnode:hash_size 0 unix:0:lm_vnode:magazine_size 7 unix:0:lm_vnode:slab_alloc 0 unix:0:lm_vnode:slab_create 0 unix:0:lm_vnode:slab_destroy 0 unix:0:lm_vnode:slab_free 0 unix:0:lm_vnode:slab_size 8192 unix:0:lm_vnode:snaptime 8464512.6677608 unix:0:lm_vnode:vmem_source 23 unix:0:lm_xprt:align 8 unix:0:lm_xprt:alloc 0 unix:0:lm_xprt:alloc_fail 0 unix:0:lm_xprt:buf_avail 0 unix:0:lm_xprt:buf_constructed 0 unix:0:lm_xprt:buf_inuse 0 unix:0:lm_xprt:buf_max 0 unix:0:lm_xprt:buf_size 32 unix:0:lm_xprt:buf_total 0 unix:0:lm_xprt:chunk_size 32 unix:0:lm_xprt:class kmem_cache unix:0:lm_xprt:crtime 114.5368686 unix:0:lm_xprt:depot_alloc 0 unix:0:lm_xprt:depot_contention 0 unix:0:lm_xprt:depot_free 0 unix:0:lm_xprt:empty_magazines 0 unix:0:lm_xprt:free 0 unix:0:lm_xprt:full_magazines 0 unix:0:lm_xprt:hash_lookup_depth 0 unix:0:lm_xprt:hash_rescale 0 unix:0:lm_xprt:hash_size 0 unix:0:lm_xprt:magazine_size 15 unix:0:lm_xprt:slab_alloc 0 unix:0:lm_xprt:slab_create 0 unix:0:lm_xprt:slab_destroy 0 unix:0:lm_xprt:slab_free 0 unix:0:lm_xprt:slab_size 8192 unix:0:lm_xprt:snaptime 8464512.669126 unix:0:lm_xprt:vmem_source 23 unix:0:lnode_cache:align 8 unix:0:lnode_cache:alloc 2 unix:0:lnode_cache:alloc_fail 0 unix:0:lnode_cache:buf_avail 252 unix:0:lnode_cache:buf_constructed 0 unix:0:lnode_cache:buf_inuse 2 unix:0:lnode_cache:buf_max 254 unix:0:lnode_cache:buf_size 32 unix:0:lnode_cache:buf_total 254 unix:0:lnode_cache:chunk_size 32 unix:0:lnode_cache:class kmem_cache unix:0:lnode_cache:crtime 91.6389814 unix:0:lnode_cache:depot_alloc 0 unix:0:lnode_cache:depot_contention 0 unix:0:lnode_cache:depot_free 0 unix:0:lnode_cache:empty_magazines 0 unix:0:lnode_cache:free 0 unix:0:lnode_cache:full_magazines 0 unix:0:lnode_cache:hash_lookup_depth 0 unix:0:lnode_cache:hash_rescale 0 unix:0:lnode_cache:hash_size 0 unix:0:lnode_cache:magazine_size 15 unix:0:lnode_cache:slab_alloc 2 unix:0:lnode_cache:slab_create 1 unix:0:lnode_cache:slab_destroy 0 unix:0:lnode_cache:slab_free 0 unix:0:lnode_cache:slab_size 8192 unix:0:lnode_cache:snaptime 8464512.6706908 unix:0:lnode_cache:vmem_source 23 unix:0:log_cons_cache:align 8 unix:0:log_cons_cache:alloc 109 unix:0:log_cons_cache:alloc_fail 0 unix:0:log_cons_cache:buf_avail 160 unix:0:log_cons_cache:buf_constructed 8 unix:0:log_cons_cache:buf_inuse 9 unix:0:log_cons_cache:buf_max 169 unix:0:log_cons_cache:buf_size 48 unix:0:log_cons_cache:buf_total 169 unix:0:log_cons_cache:chunk_size 48 unix:0:log_cons_cache:class kmem_cache unix:0:log_cons_cache:crtime 65.9572372 unix:0:log_cons_cache:depot_alloc 0 unix:0:log_cons_cache:depot_contention 0 unix:0:log_cons_cache:depot_free 2 unix:0:log_cons_cache:empty_magazines 0 unix:0:log_cons_cache:free 102 unix:0:log_cons_cache:full_magazines 0 unix:0:log_cons_cache:hash_lookup_depth 0 unix:0:log_cons_cache:hash_rescale 0 unix:0:log_cons_cache:hash_size 0 unix:0:log_cons_cache:magazine_size 15 unix:0:log_cons_cache:slab_alloc 17 unix:0:log_cons_cache:slab_create 1 unix:0:log_cons_cache:slab_destroy 0 unix:0:log_cons_cache:slab_free 0 unix:0:log_cons_cache:slab_size 8192 unix:0:log_cons_cache:snaptime 8464512.6720786 unix:0:log_cons_cache:vmem_source 23 unix:0:lufs_bufs:align 8 unix:0:lufs_bufs:alloc 16016 unix:0:lufs_bufs:alloc_fail 0 unix:0:lufs_bufs:buf_avail 31 unix:0:lufs_bufs:buf_constructed 16 unix:0:lufs_bufs:buf_inuse 0 unix:0:lufs_bufs:buf_max 31 unix:0:lufs_bufs:buf_size 256 unix:0:lufs_bufs:buf_total 31 unix:0:lufs_bufs:chunk_size 256 unix:0:lufs_bufs:class kmem_cache unix:0:lufs_bufs:crtime 69.2907152 unix:0:lufs_bufs:depot_alloc 1781 unix:0:lufs_bufs:depot_contention 0 unix:0:lufs_bufs:depot_free 1784 unix:0:lufs_bufs:empty_magazines 1 unix:0:lufs_bufs:free 16019 unix:0:lufs_bufs:full_magazines 0 unix:0:lufs_bufs:hash_lookup_depth 0 unix:0:lufs_bufs:hash_rescale 0 unix:0:lufs_bufs:hash_size 0 unix:0:lufs_bufs:magazine_size 7 unix:0:lufs_bufs:slab_alloc 16 unix:0:lufs_bufs:slab_create 1 unix:0:lufs_bufs:slab_destroy 0 unix:0:lufs_bufs:slab_free 0 unix:0:lufs_bufs:slab_size 8192 unix:0:lufs_bufs:snaptime 8464512.673449 unix:0:lufs_bufs:vmem_source 23 unix:0:lufs_mapentry_cache:align 8 unix:0:lufs_mapentry_cache:alloc 301817 unix:0:lufs_mapentry_cache:alloc_fail 0 unix:0:lufs_mapentry_cache:buf_avail 2808 unix:0:lufs_mapentry_cache:buf_constructed 2745 unix:0:lufs_mapentry_cache:buf_inuse 0 unix:0:lufs_mapentry_cache:buf_max 2808 unix:0:lufs_mapentry_cache:buf_size 112 unix:0:lufs_mapentry_cache:buf_total 2808 unix:0:lufs_mapentry_cache:chunk_size 112 unix:0:lufs_mapentry_cache:class kmem_cache unix:0:lufs_mapentry_cache:crtime 69.2907308 unix:0:lufs_mapentry_cache:depot_alloc 26401 unix:0:lufs_mapentry_cache:depot_contention 0 unix:0:lufs_mapentry_cache:depot_free 26793 unix:0:lufs_mapentry_cache:empty_magazines 2 unix:0:lufs_mapentry_cache:free 302209 unix:0:lufs_mapentry_cache:full_magazines 389 unix:0:lufs_mapentry_cache:hash_lookup_depth 0 unix:0:lufs_mapentry_cache:hash_rescale 0 unix:0:lufs_mapentry_cache:hash_size 0 unix:0:lufs_mapentry_cache:magazine_size 7 unix:0:lufs_mapentry_cache:slab_alloc 2745 unix:0:lufs_mapentry_cache:slab_create 39 unix:0:lufs_mapentry_cache:slab_destroy 0 unix:0:lufs_mapentry_cache:slab_free 0 unix:0:lufs_mapentry_cache:slab_size 8192 unix:0:lufs_mapentry_cache:snaptime 8464512.6748266 unix:0:lufs_mapentry_cache:vmem_source 23 unix:0:lufs_save:align 8 unix:0:lufs_save:alloc 15065 unix:0:lufs_save:alloc_fail 0 unix:0:lufs_save:buf_avail 339 unix:0:lufs_save:buf_constructed 32 unix:0:lufs_save:buf_inuse 0 unix:0:lufs_save:buf_max 339 unix:0:lufs_save:buf_size 24 unix:0:lufs_save:buf_total 339 unix:0:lufs_save:chunk_size 24 unix:0:lufs_save:class kmem_cache unix:0:lufs_save:crtime 69.2907008 unix:0:lufs_save:depot_alloc 830 unix:0:lufs_save:depot_contention 0 unix:0:lufs_save:depot_free 833 unix:0:lufs_save:empty_magazines 1 unix:0:lufs_save:free 15068 unix:0:lufs_save:full_magazines 0 unix:0:lufs_save:hash_lookup_depth 0 unix:0:lufs_save:hash_rescale 0 unix:0:lufs_save:hash_size 0 unix:0:lufs_save:magazine_size 15 unix:0:lufs_save:slab_alloc 32 unix:0:lufs_save:slab_create 1 unix:0:lufs_save:slab_destroy 0 unix:0:lufs_save:slab_free 0 unix:0:lufs_save:slab_size 8192 unix:0:lufs_save:snaptime 8464512.6762742 unix:0:lufs_save:vmem_source 23 unix:0:lwp_cache:align 8 unix:0:lwp_cache:alloc 16675 unix:0:lwp_cache:alloc_fail 0 unix:0:lwp_cache:buf_avail 104 unix:0:lwp_cache:buf_constructed 104 unix:0:lwp_cache:buf_inuse 176 unix:0:lwp_cache:buf_max 280 unix:0:lwp_cache:buf_size 912 unix:0:lwp_cache:buf_total 280 unix:0:lwp_cache:chunk_size 912 unix:0:lwp_cache:class kmem_cache unix:0:lwp_cache:crtime 69.262033 unix:0:lwp_cache:depot_alloc 2585 unix:0:lwp_cache:depot_contention 0 unix:0:lwp_cache:depot_free 2622 unix:0:lwp_cache:empty_magazines 4 unix:0:lwp_cache:free 16536 unix:0:lwp_cache:full_magazines 34 unix:0:lwp_cache:hash_lookup_depth 0 unix:0:lwp_cache:hash_rescale 0 unix:0:lwp_cache:hash_size 0 unix:0:lwp_cache:magazine_size 3 unix:0:lwp_cache:slab_alloc 280 unix:0:lwp_cache:slab_create 35 unix:0:lwp_cache:slab_destroy 0 unix:0:lwp_cache:slab_free 0 unix:0:lwp_cache:slab_size 8192 unix:0:lwp_cache:snaptime 8464512.6776686 unix:0:lwp_cache:vmem_source 23 unix:0:mac_impl_cache:align 8 unix:0:mac_impl_cache:alloc 0 unix:0:mac_impl_cache:alloc_fail 0 unix:0:mac_impl_cache:buf_avail 0 unix:0:mac_impl_cache:buf_constructed 0 unix:0:mac_impl_cache:buf_inuse 0 unix:0:mac_impl_cache:buf_max 0 unix:0:mac_impl_cache:buf_size 384 unix:0:mac_impl_cache:buf_total 0 unix:0:mac_impl_cache:chunk_size 384 unix:0:mac_impl_cache:class kmem_cache unix:0:mac_impl_cache:crtime 84.0829406 unix:0:mac_impl_cache:depot_alloc 0 unix:0:mac_impl_cache:depot_contention 0 unix:0:mac_impl_cache:depot_free 0 unix:0:mac_impl_cache:empty_magazines 0 unix:0:mac_impl_cache:free 0 unix:0:mac_impl_cache:full_magazines 0 unix:0:mac_impl_cache:hash_lookup_depth 0 unix:0:mac_impl_cache:hash_rescale 0 unix:0:mac_impl_cache:hash_size 0 unix:0:mac_impl_cache:magazine_size 3 unix:0:mac_impl_cache:slab_alloc 0 unix:0:mac_impl_cache:slab_create 0 unix:0:mac_impl_cache:slab_destroy 0 unix:0:mac_impl_cache:slab_free 0 unix:0:mac_impl_cache:slab_size 8192 unix:0:mac_impl_cache:snaptime 8464512.6790476 unix:0:mac_impl_cache:vmem_source 23 unix:0:marker_cache:align 8 unix:0:marker_cache:alloc 32425 unix:0:marker_cache:alloc_fail 0 unix:0:marker_cache:buf_avail 63 unix:0:marker_cache:buf_constructed 18 unix:0:marker_cache:buf_inuse 0 unix:0:marker_cache:buf_max 63 unix:0:marker_cache:buf_size 128 unix:0:marker_cache:buf_total 63 unix:0:marker_cache:chunk_size 128 unix:0:marker_cache:class kmem_cache unix:0:marker_cache:crtime 69.28272 unix:0:marker_cache:depot_alloc 43 unix:0:marker_cache:depot_contention 0 unix:0:marker_cache:depot_free 46 unix:0:marker_cache:empty_magazines 1 unix:0:marker_cache:free 32428 unix:0:marker_cache:full_magazines 0 unix:0:marker_cache:hash_lookup_depth 0 unix:0:marker_cache:hash_rescale 0 unix:0:marker_cache:hash_size 0 unix:0:marker_cache:magazine_size 7 unix:0:marker_cache:slab_alloc 18 unix:0:marker_cache:slab_create 1 unix:0:marker_cache:slab_destroy 0 unix:0:marker_cache:slab_free 0 unix:0:marker_cache:slab_size 8192 unix:0:marker_cache:snaptime 8464512.6804512 unix:0:marker_cache:vmem_source 23 unix:0:memscrub_kstat:class misc unix:0:memscrub_kstat:crtime 82.7669948 unix:0:memscrub_kstat:done_early 196 unix:0:memscrub_kstat:done_late 0 unix:0:memscrub_kstat:early_sec 6128 unix:0:memscrub_kstat:errors_found 0 unix:0:memscrub_kstat:force_run 0 unix:0:memscrub_kstat:interval_ticks 4218 unix:0:memscrub_kstat:late_sec 0 unix:0:memscrub_kstat:snaptime 8464512.6818866 unix:0:memseg_cache:align 8 unix:0:memseg_cache:alloc 0 unix:0:memseg_cache:alloc_fail 0 unix:0:memseg_cache:buf_avail 0 unix:0:memseg_cache:buf_constructed 0 unix:0:memseg_cache:buf_inuse 0 unix:0:memseg_cache:buf_max 0 unix:0:memseg_cache:buf_size 112 unix:0:memseg_cache:buf_total 0 unix:0:memseg_cache:chunk_size 112 unix:0:memseg_cache:class kmem_cache unix:0:memseg_cache:crtime 66.0107318 unix:0:memseg_cache:depot_alloc 0 unix:0:memseg_cache:depot_contention 0 unix:0:memseg_cache:depot_free 0 unix:0:memseg_cache:empty_magazines 0 unix:0:memseg_cache:free 0 unix:0:memseg_cache:full_magazines 0 unix:0:memseg_cache:hash_lookup_depth 0 unix:0:memseg_cache:hash_rescale 0 unix:0:memseg_cache:hash_size 0 unix:0:memseg_cache:magazine_size 7 unix:0:memseg_cache:slab_alloc 0 unix:0:memseg_cache:slab_create 0 unix:0:memseg_cache:slab_destroy 0 unix:0:memseg_cache:slab_free 0 unix:0:memseg_cache:slab_size 8192 unix:0:memseg_cache:snaptime 8464512.68254 unix:0:memseg_cache:vmem_source 8 unix:0:mmu_ctx:class hat unix:0:mmu_ctx:crtime 66.2378644 unix:0:mmu_ctx:mmu_ctx_tsb_exceptions 230685 unix:0:mmu_ctx:mmu_ctx_tsb_raise_exception 1249 unix:0:mmu_ctx:mmu_ctx_wrap_around 28 unix:0:mmu_ctx:snaptime 8464512.6839186 unix:0:mmuctxdom_cache:align 64 unix:0:mmuctxdom_cache:alloc 2 unix:0:mmuctxdom_cache:alloc_fail 0 unix:0:mmuctxdom_cache:buf_avail 40 unix:0:mmuctxdom_cache:buf_constructed 0 unix:0:mmuctxdom_cache:buf_inuse 2 unix:0:mmuctxdom_cache:buf_max 42 unix:0:mmuctxdom_cache:buf_size 192 unix:0:mmuctxdom_cache:buf_total 42 unix:0:mmuctxdom_cache:chunk_size 192 unix:0:mmuctxdom_cache:class kmem_cache unix:0:mmuctxdom_cache:crtime 66.2372002 unix:0:mmuctxdom_cache:depot_alloc 0 unix:0:mmuctxdom_cache:depot_contention 0 unix:0:mmuctxdom_cache:depot_free 0 unix:0:mmuctxdom_cache:empty_magazines 0 unix:0:mmuctxdom_cache:free 0 unix:0:mmuctxdom_cache:full_magazines 0 unix:0:mmuctxdom_cache:hash_lookup_depth 0 unix:0:mmuctxdom_cache:hash_rescale 0 unix:0:mmuctxdom_cache:hash_size 0 unix:0:mmuctxdom_cache:magazine_size 7 unix:0:mmuctxdom_cache:slab_alloc 2 unix:0:mmuctxdom_cache:slab_create 1 unix:0:mmuctxdom_cache:slab_destroy 0 unix:0:mmuctxdom_cache:slab_free 0 unix:0:mmuctxdom_cache:slab_size 8192 unix:0:mmuctxdom_cache:snaptime 8464512.6842996 unix:0:mmuctxdom_cache:vmem_source 23 unix:0:mod_hash_entries:align 8 unix:0:mod_hash_entries:alloc 5672 unix:0:mod_hash_entries:alloc_fail 0 unix:0:mod_hash_entries:buf_avail 452 unix:0:mod_hash_entries:buf_constructed 259 unix:0:mod_hash_entries:buf_inuse 226 unix:0:mod_hash_entries:buf_max 678 unix:0:mod_hash_entries:buf_size 24 unix:0:mod_hash_entries:buf_total 678 unix:0:mod_hash_entries:chunk_size 24 unix:0:mod_hash_entries:class kmem_cache unix:0:mod_hash_entries:crtime 66.1332094 unix:0:mod_hash_entries:depot_alloc 126 unix:0:mod_hash_entries:depot_contention 0 unix:0:mod_hash_entries:depot_free 144 unix:0:mod_hash_entries:empty_magazines 0 unix:0:mod_hash_entries:free 5464 unix:0:mod_hash_entries:full_magazines 15 unix:0:mod_hash_entries:hash_lookup_depth 0 unix:0:mod_hash_entries:hash_rescale 0 unix:0:mod_hash_entries:hash_size 0 unix:0:mod_hash_entries:magazine_size 15 unix:0:mod_hash_entries:slab_alloc 485 unix:0:mod_hash_entries:slab_create 2 unix:0:mod_hash_entries:slab_destroy 0 unix:0:mod_hash_entries:slab_free 0 unix:0:mod_hash_entries:slab_size 8192 unix:0:mod_hash_entries:snaptime 8464512.6856684 unix:0:mod_hash_entries:vmem_source 23 unix:0:multidata:align 64 unix:0:multidata:alloc 0 unix:0:multidata:alloc_fail 0 unix:0:multidata:buf_avail 0 unix:0:multidata:buf_constructed 0 unix:0:multidata:buf_inuse 0 unix:0:multidata:buf_max 0 unix:0:multidata:buf_size 248 unix:0:multidata:buf_total 0 unix:0:multidata:chunk_size 256 unix:0:multidata:class kmem_cache unix:0:multidata:crtime 65.9561302 unix:0:multidata:depot_alloc 0 unix:0:multidata:depot_contention 0 unix:0:multidata:depot_free 0 unix:0:multidata:empty_magazines 0 unix:0:multidata:free 0 unix:0:multidata:full_magazines 0 unix:0:multidata:hash_lookup_depth 0 unix:0:multidata:hash_rescale 0 unix:0:multidata:hash_size 0 unix:0:multidata:magazine_size 7 unix:0:multidata:slab_alloc 0 unix:0:multidata:slab_create 0 unix:0:multidata:slab_destroy 0 unix:0:multidata:slab_free 0 unix:0:multidata:slab_size 8192 unix:0:multidata:snaptime 8464512.687124 unix:0:multidata:vmem_source 23 unix:0:multidata_pattbl:align 64 unix:0:multidata_pattbl:alloc 0 unix:0:multidata_pattbl:alloc_fail 0 unix:0:multidata_pattbl:buf_avail 0 unix:0:multidata_pattbl:buf_constructed 0 unix:0:multidata_pattbl:buf_inuse 0 unix:0:multidata_pattbl:buf_max 0 unix:0:multidata_pattbl:buf_size 32 unix:0:multidata_pattbl:buf_total 0 unix:0:multidata_pattbl:chunk_size 64 unix:0:multidata_pattbl:class kmem_cache unix:0:multidata_pattbl:crtime 65.9564052 unix:0:multidata_pattbl:depot_alloc 0 unix:0:multidata_pattbl:depot_contention 0 unix:0:multidata_pattbl:depot_free 0 unix:0:multidata_pattbl:empty_magazines 0 unix:0:multidata_pattbl:free 0 unix:0:multidata_pattbl:full_magazines 0 unix:0:multidata_pattbl:hash_lookup_depth 0 unix:0:multidata_pattbl:hash_rescale 0 unix:0:multidata_pattbl:hash_size 0 unix:0:multidata_pattbl:magazine_size 15 unix:0:multidata_pattbl:slab_alloc 0 unix:0:multidata_pattbl:slab_create 0 unix:0:multidata_pattbl:slab_destroy 0 unix:0:multidata_pattbl:slab_free 0 unix:0:multidata_pattbl:slab_size 8192 unix:0:multidata_pattbl:snaptime 8464512.6884956 unix:0:multidata_pattbl:vmem_source 23 unix:0:multidata_pdslab:align 64 unix:0:multidata_pdslab:alloc 0 unix:0:multidata_pdslab:alloc_fail 0 unix:0:multidata_pdslab:buf_avail 0 unix:0:multidata_pdslab:buf_constructed 0 unix:0:multidata_pdslab:buf_inuse 0 unix:0:multidata_pdslab:buf_max 0 unix:0:multidata_pdslab:buf_size 7112 unix:0:multidata_pdslab:buf_total 0 unix:0:multidata_pdslab:chunk_size 7168 unix:0:multidata_pdslab:class kmem_cache unix:0:multidata_pdslab:crtime 65.9563922 unix:0:multidata_pdslab:depot_alloc 0 unix:0:multidata_pdslab:depot_contention 0 unix:0:multidata_pdslab:depot_free 0 unix:0:multidata_pdslab:empty_magazines 0 unix:0:multidata_pdslab:free 0 unix:0:multidata_pdslab:full_magazines 0 unix:0:multidata_pdslab:hash_lookup_depth 0 unix:0:multidata_pdslab:hash_rescale 0 unix:0:multidata_pdslab:hash_size 64 unix:0:multidata_pdslab:magazine_size 1 unix:0:multidata_pdslab:slab_alloc 0 unix:0:multidata_pdslab:slab_create 0 unix:0:multidata_pdslab:slab_destroy 0 unix:0:multidata_pdslab:slab_free 0 unix:0:multidata_pdslab:slab_size 57344 unix:0:multidata_pdslab:snaptime 8464512.6898574 unix:0:multidata_pdslab:vmem_source 23 unix:0:namefs_inodes_1:align 1 unix:0:namefs_inodes_1:alloc 19 unix:0:namefs_inodes_1:alloc_fail 0 unix:0:namefs_inodes_1:buf_avail 46 unix:0:namefs_inodes_1:buf_constructed 1 unix:0:namefs_inodes_1:buf_inuse 18 unix:0:namefs_inodes_1:buf_max 64 unix:0:namefs_inodes_1:buf_size 1 unix:0:namefs_inodes_1:buf_total 64 unix:0:namefs_inodes_1:chunk_size 1 unix:0:namefs_inodes_1:class kmem_cache unix:0:namefs_inodes_1:crtime 89.6917846 unix:0:namefs_inodes_1:depot_alloc 0 unix:0:namefs_inodes_1:depot_contention 0 unix:0:namefs_inodes_1:depot_free 1 unix:0:namefs_inodes_1:empty_magazines 0 unix:0:namefs_inodes_1:free 2 unix:0:namefs_inodes_1:full_magazines 0 unix:0:namefs_inodes_1:hash_lookup_depth 0 unix:0:namefs_inodes_1:hash_rescale 0 unix:0:namefs_inodes_1:hash_size 64 unix:0:namefs_inodes_1:magazine_size 15 unix:0:namefs_inodes_1:slab_alloc 19 unix:0:namefs_inodes_1:slab_create 1 unix:0:namefs_inodes_1:slab_destroy 0 unix:0:namefs_inodes_1:slab_free 0 unix:0:namefs_inodes_1:slab_size 64 unix:0:namefs_inodes_1:snaptime 8464512.6913568 unix:0:namefs_inodes_1:vmem_source 56 unix:0:ncstats:class misc unix:0:ncstats:crtime 69.2818562 unix:0:ncstats:dbl_enters 7413 unix:0:ncstats:enters 90769 unix:0:ncstats:hits 177790030 unix:0:ncstats:long_enter 0 unix:0:ncstats:long_look 0 unix:0:ncstats:misses 54802 unix:0:ncstats:move_to_front 6433 unix:0:ncstats:purges 495 unix:0:ncstats:snaptime 8464512.6928892 unix:0:nfs4_access_cache:align 8 unix:0:nfs4_access_cache:alloc 0 unix:0:nfs4_access_cache:alloc_fail 0 unix:0:nfs4_access_cache:buf_avail 0 unix:0:nfs4_access_cache:buf_constructed 0 unix:0:nfs4_access_cache:buf_inuse 0 unix:0:nfs4_access_cache:buf_max 0 unix:0:nfs4_access_cache:buf_size 56 unix:0:nfs4_access_cache:buf_total 0 unix:0:nfs4_access_cache:chunk_size 56 unix:0:nfs4_access_cache:class kmem_cache unix:0:nfs4_access_cache:crtime 114.15315 unix:0:nfs4_access_cache:depot_alloc 0 unix:0:nfs4_access_cache:depot_contention 0 unix:0:nfs4_access_cache:depot_free 0 unix:0:nfs4_access_cache:empty_magazines 0 unix:0:nfs4_access_cache:free 0 unix:0:nfs4_access_cache:full_magazines 0 unix:0:nfs4_access_cache:hash_lookup_depth 0 unix:0:nfs4_access_cache:hash_rescale 0 unix:0:nfs4_access_cache:hash_size 0 unix:0:nfs4_access_cache:magazine_size 15 unix:0:nfs4_access_cache:slab_alloc 0 unix:0:nfs4_access_cache:slab_create 0 unix:0:nfs4_access_cache:slab_destroy 0 unix:0:nfs4_access_cache:slab_free 0 unix:0:nfs4_access_cache:slab_size 8192 unix:0:nfs4_access_cache:snaptime 8464512.693506 unix:0:nfs4_access_cache:vmem_source 23 unix:0:nfs4_ace4_list_cache:align 8 unix:0:nfs4_ace4_list_cache:alloc 0 unix:0:nfs4_ace4_list_cache:alloc_fail 0 unix:0:nfs4_ace4_list_cache:buf_avail 0 unix:0:nfs4_ace4_list_cache:buf_constructed 0 unix:0:nfs4_ace4_list_cache:buf_inuse 0 unix:0:nfs4_ace4_list_cache:buf_max 0 unix:0:nfs4_ace4_list_cache:buf_size 264 unix:0:nfs4_ace4_list_cache:buf_total 0 unix:0:nfs4_ace4_list_cache:chunk_size 264 unix:0:nfs4_ace4_list_cache:class kmem_cache unix:0:nfs4_ace4_list_cache:crtime 114.153258 unix:0:nfs4_ace4_list_cache:depot_alloc 0 unix:0:nfs4_ace4_list_cache:depot_contention 0 unix:0:nfs4_ace4_list_cache:depot_free 0 unix:0:nfs4_ace4_list_cache:empty_magazines 0 unix:0:nfs4_ace4_list_cache:free 0 unix:0:nfs4_ace4_list_cache:full_magazines 0 unix:0:nfs4_ace4_list_cache:hash_lookup_depth 0 unix:0:nfs4_ace4_list_cache:hash_rescale 0 unix:0:nfs4_ace4_list_cache:hash_size 0 unix:0:nfs4_ace4_list_cache:magazine_size 3 unix:0:nfs4_ace4_list_cache:slab_alloc 0 unix:0:nfs4_ace4_list_cache:slab_create 0 unix:0:nfs4_ace4_list_cache:slab_destroy 0 unix:0:nfs4_ace4_list_cache:slab_free 0 unix:0:nfs4_ace4_list_cache:slab_size 8192 unix:0:nfs4_ace4_list_cache:snaptime 8464512.6950124 unix:0:nfs4_ace4_list_cache:vmem_source 23 unix:0:nfs4_ace4vals_cache:align 8 unix:0:nfs4_ace4vals_cache:alloc 0 unix:0:nfs4_ace4vals_cache:alloc_fail 0 unix:0:nfs4_ace4vals_cache:buf_avail 0 unix:0:nfs4_ace4vals_cache:buf_constructed 0 unix:0:nfs4_ace4vals_cache:buf_inuse 0 unix:0:nfs4_ace4vals_cache:buf_max 0 unix:0:nfs4_ace4vals_cache:buf_size 48 unix:0:nfs4_ace4vals_cache:buf_total 0 unix:0:nfs4_ace4vals_cache:chunk_size 48 unix:0:nfs4_ace4vals_cache:class kmem_cache unix:0:nfs4_ace4vals_cache:crtime 114.1532428 unix:0:nfs4_ace4vals_cache:depot_alloc 0 unix:0:nfs4_ace4vals_cache:depot_contention 0 unix:0:nfs4_ace4vals_cache:depot_free 0 unix:0:nfs4_ace4vals_cache:empty_magazines 0 unix:0:nfs4_ace4vals_cache:free 0 unix:0:nfs4_ace4vals_cache:full_magazines 0 unix:0:nfs4_ace4vals_cache:hash_lookup_depth 0 unix:0:nfs4_ace4vals_cache:hash_rescale 0 unix:0:nfs4_ace4vals_cache:hash_size 0 unix:0:nfs4_ace4vals_cache:magazine_size 15 unix:0:nfs4_ace4vals_cache:slab_alloc 0 unix:0:nfs4_ace4vals_cache:slab_create 0 unix:0:nfs4_ace4vals_cache:slab_destroy 0 unix:0:nfs4_ace4vals_cache:slab_free 0 unix:0:nfs4_ace4vals_cache:slab_size 8192 unix:0:nfs4_ace4vals_cache:snaptime 8464512.6965214 unix:0:nfs4_ace4vals_cache:vmem_source 23 unix:0:nfs_access_cache:align 8 unix:0:nfs_access_cache:alloc 9484 unix:0:nfs_access_cache:alloc_fail 0 unix:0:nfs_access_cache:buf_avail 3897 unix:0:nfs_access_cache:buf_constructed 3820 unix:0:nfs_access_cache:buf_inuse 163 unix:0:nfs_access_cache:buf_max 4060 unix:0:nfs_access_cache:buf_size 56 unix:0:nfs_access_cache:buf_total 4060 unix:0:nfs_access_cache:chunk_size 56 unix:0:nfs_access_cache:class kmem_cache unix:0:nfs_access_cache:crtime 114.1459892 unix:0:nfs_access_cache:depot_alloc 202 unix:0:nfs_access_cache:depot_contention 0 unix:0:nfs_access_cache:depot_free 458 unix:0:nfs_access_cache:empty_magazines 9 unix:0:nfs_access_cache:free 9577 unix:0:nfs_access_cache:full_magazines 253 unix:0:nfs_access_cache:hash_lookup_depth 0 unix:0:nfs_access_cache:hash_rescale 0 unix:0:nfs_access_cache:hash_size 0 unix:0:nfs_access_cache:magazine_size 15 unix:0:nfs_access_cache:slab_alloc 3983 unix:0:nfs_access_cache:slab_create 28 unix:0:nfs_access_cache:slab_destroy 0 unix:0:nfs_access_cache:slab_free 0 unix:0:nfs_access_cache:slab_size 8192 unix:0:nfs_access_cache:snaptime 8464512.698004 unix:0:nfs_access_cache:vmem_source 23 unix:0:ohci_nexus_enum_tq:class taskq unix:0:ohci_nexus_enum_tq:crtime 84.7437844 unix:0:ohci_nexus_enum_tq:executed 0 unix:0:ohci_nexus_enum_tq:maxtasks 0 unix:0:ohci_nexus_enum_tq:nactive 1 unix:0:ohci_nexus_enum_tq:nalloc 0 unix:0:ohci_nexus_enum_tq:priority 60 unix:0:ohci_nexus_enum_tq:snaptime 8464512.6993826 unix:0:ohci_nexus_enum_tq:tasks 0 unix:0:ohci_nexus_enum_tq:threads 1 unix:0:ohci_nexus_enum_tq:totaltime 0 unix:0:pa_hment_cache:align 8 unix:0:pa_hment_cache:alloc 296474 unix:0:pa_hment_cache:alloc_fail 0 unix:0:pa_hment_cache:buf_avail 208 unix:0:pa_hment_cache:buf_constructed 112 unix:0:pa_hment_cache:buf_inuse 46 unix:0:pa_hment_cache:buf_max 254 unix:0:pa_hment_cache:buf_size 64 unix:0:pa_hment_cache:buf_total 254 unix:0:pa_hment_cache:chunk_size 64 unix:0:pa_hment_cache:class kmem_cache unix:0:pa_hment_cache:crtime 66.2390466 unix:0:pa_hment_cache:depot_alloc 346 unix:0:pa_hment_cache:depot_contention 0 unix:0:pa_hment_cache:depot_free 355 unix:0:pa_hment_cache:empty_magazines 1 unix:0:pa_hment_cache:free 296437 unix:0:pa_hment_cache:full_magazines 5 unix:0:pa_hment_cache:hash_lookup_depth 0 unix:0:pa_hment_cache:hash_rescale 0 unix:0:pa_hment_cache:hash_size 0 unix:0:pa_hment_cache:magazine_size 15 unix:0:pa_hment_cache:slab_alloc 158 unix:0:pa_hment_cache:slab_create 2 unix:0:pa_hment_cache:slab_destroy 0 unix:0:pa_hment_cache:slab_free 0 unix:0:pa_hment_cache:slab_size 8192 unix:0:pa_hment_cache:snaptime 8464512.6999772 unix:0:pa_hment_cache:vmem_source 8 unix:0:page_retire:class misc unix:0:page_retire:crtime 69.2857802 unix:0:page_retire:pages_deferred 0 unix:0:page_retire:pages_deferred_kernel 0 unix:0:page_retire:pages_fma 0 unix:0:page_retire:pages_limit 1040 unix:0:page_retire:pages_limit_exceeded 0 unix:0:page_retire:pages_multiple_ce 0 unix:0:page_retire:pages_notdequeued 0 unix:0:page_retire:pages_notenqueued 0 unix:0:page_retire:pages_pending 0 unix:0:page_retire:pages_retire_request 0 unix:0:page_retire:pages_retire_request_free 0 unix:0:page_retire:pages_retired 0 unix:0:page_retire:pages_ue 0 unix:0:page_retire:pages_ue_cleared_freed 0 unix:0:page_retire:pages_ue_cleared_retired 0 unix:0:page_retire:pages_ue_persistent 0 unix:0:page_retire:pages_unretired 0 unix:0:page_retire:snaptime 8464512.701472 unix:0:pcf8584_nexus_enum_tq:class taskq unix:0:pcf8584_nexus_enum_tq:crtime 83.1109108 unix:0:pcf8584_nexus_enum_tq:executed 0 unix:0:pcf8584_nexus_enum_tq:maxtasks 0 unix:0:pcf8584_nexus_enum_tq:nactive 1 unix:0:pcf8584_nexus_enum_tq:nalloc 0 unix:0:pcf8584_nexus_enum_tq:priority 60 unix:0:pcf8584_nexus_enum_tq:snaptime 8464512.702593 unix:0:pcf8584_nexus_enum_tq:tasks 0 unix:0:pcf8584_nexus_enum_tq:threads 1 unix:0:pcf8584_nexus_enum_tq:totaltime 0 unix:0:pci_ecc_queue:cancelled 0 unix:0:pci_ecc_queue:class errorq unix:0:pci_ecc_queue:commit_fail 0 unix:0:pci_ecc_queue:committed 0 unix:0:pci_ecc_queue:crtime 69.294017 unix:0:pci_ecc_queue:dispatched 0 unix:0:pci_ecc_queue:dropped 0 unix:0:pci_ecc_queue:logged 0 unix:0:pci_ecc_queue:reserve_fail 0 unix:0:pci_ecc_queue:reserved 0 unix:0:pci_ecc_queue:snaptime 8464512.7031792 unix:0:pci_target_queue:cancelled 0 unix:0:pci_target_queue:class errorq unix:0:pci_target_queue:commit_fail 0 unix:0:pci_target_queue:committed 0 unix:0:pci_target_queue:crtime 69.2940328 unix:0:pci_target_queue:dispatched 0 unix:0:pci_target_queue:dropped 0 unix:0:pci_target_queue:logged 0 unix:0:pci_target_queue:reserve_fail 0 unix:0:pci_target_queue:reserved 0 unix:0:pci_target_queue:snaptime 8464512.7038428 unix:0:pcisch0_dvma_8192:align 8192 unix:0:pcisch0_dvma_8192:alloc 38886 unix:0:pcisch0_dvma_8192:alloc_fail 0 unix:0:pcisch0_dvma_8192:buf_avail 0 unix:0:pcisch0_dvma_8192:buf_constructed 0 unix:0:pcisch0_dvma_8192:buf_inuse 1052 unix:0:pcisch0_dvma_8192:buf_max 1052 unix:0:pcisch0_dvma_8192:buf_size 8192 unix:0:pcisch0_dvma_8192:buf_total 1052 unix:0:pcisch0_dvma_8192:chunk_size 8192 unix:0:pcisch0_dvma_8192:class kmem_cache unix:0:pcisch0_dvma_8192:crtime 69.2933254 unix:0:pcisch0_dvma_8192:depot_alloc 1025 unix:0:pcisch0_dvma_8192:depot_contention 0 unix:0:pcisch0_dvma_8192:depot_free 1029 unix:0:pcisch0_dvma_8192:empty_magazines 1020 unix:0:pcisch0_dvma_8192:free 37838 unix:0:pcisch0_dvma_8192:full_magazines 0 unix:0:pcisch0_dvma_8192:hash_lookup_depth 0 unix:0:pcisch0_dvma_8192:hash_rescale 1 unix:0:pcisch0_dvma_8192:hash_size 1024 unix:0:pcisch0_dvma_8192:magazine_size 1 unix:0:pcisch0_dvma_8192:slab_alloc 1052 unix:0:pcisch0_dvma_8192:slab_create 263 unix:0:pcisch0_dvma_8192:slab_destroy 0 unix:0:pcisch0_dvma_8192:slab_free 0 unix:0:pcisch0_dvma_8192:slab_size 32768 unix:0:pcisch0_dvma_8192:snaptime 8464512.7044426 unix:0:pcisch0_dvma_8192:vmem_source 48 unix:0:pcisch1_dvma_8192:align 8192 unix:0:pcisch1_dvma_8192:alloc 250594 unix:0:pcisch1_dvma_8192:alloc_fail 0 unix:0:pcisch1_dvma_8192:buf_avail 26 unix:0:pcisch1_dvma_8192:buf_constructed 23 unix:0:pcisch1_dvma_8192:buf_inuse 2 unix:0:pcisch1_dvma_8192:buf_max 28 unix:0:pcisch1_dvma_8192:buf_size 8192 unix:0:pcisch1_dvma_8192:buf_total 28 unix:0:pcisch1_dvma_8192:chunk_size 8192 unix:0:pcisch1_dvma_8192:class kmem_cache unix:0:pcisch1_dvma_8192:crtime 69.298205 unix:0:pcisch1_dvma_8192:depot_alloc 106543 unix:0:pcisch1_dvma_8192:depot_contention 1 unix:0:pcisch1_dvma_8192:depot_free 106566 unix:0:pcisch1_dvma_8192:empty_magazines 0 unix:0:pcisch1_dvma_8192:free 250615 unix:0:pcisch1_dvma_8192:full_magazines 21 unix:0:pcisch1_dvma_8192:hash_lookup_depth 0 unix:0:pcisch1_dvma_8192:hash_rescale 0 unix:0:pcisch1_dvma_8192:hash_size 64 unix:0:pcisch1_dvma_8192:magazine_size 1 unix:0:pcisch1_dvma_8192:slab_alloc 25 unix:0:pcisch1_dvma_8192:slab_create 7 unix:0:pcisch1_dvma_8192:slab_destroy 0 unix:0:pcisch1_dvma_8192:slab_free 0 unix:0:pcisch1_dvma_8192:slab_size 32768 unix:0:pcisch1_dvma_8192:snaptime 8464512.7058354 unix:0:pcisch1_dvma_8192:vmem_source 49 unix:0:pcisch_nexus_enum_tq:class taskq unix:0:pcisch_nexus_enum_tq:crtime 69.2921014 unix:0:pcisch_nexus_enum_tq:executed 0 unix:0:pcisch_nexus_enum_tq:maxtasks 0 unix:0:pcisch_nexus_enum_tq:nactive 1 unix:0:pcisch_nexus_enum_tq:nalloc 0 unix:0:pcisch_nexus_enum_tq:priority 60 unix:0:pcisch_nexus_enum_tq:snaptime 8464512.7072878 unix:0:pcisch_nexus_enum_tq:tasks 0 unix:0:pcisch_nexus_enum_tq:threads 1 unix:0:pcisch_nexus_enum_tq:totaltime 0 unix:0:physio_buf_cache:align 8 unix:0:physio_buf_cache:alloc 13 unix:0:physio_buf_cache:alloc_fail 0 unix:0:physio_buf_cache:buf_avail 32 unix:0:physio_buf_cache:buf_constructed 2 unix:0:physio_buf_cache:buf_inuse 0 unix:0:physio_buf_cache:buf_max 32 unix:0:physio_buf_cache:buf_size 248 unix:0:physio_buf_cache:buf_total 32 unix:0:physio_buf_cache:chunk_size 248 unix:0:physio_buf_cache:class kmem_cache unix:0:physio_buf_cache:crtime 69.286061 unix:0:physio_buf_cache:depot_alloc 0 unix:0:physio_buf_cache:depot_contention 0 unix:0:physio_buf_cache:depot_free 2 unix:0:physio_buf_cache:empty_magazines 0 unix:0:physio_buf_cache:free 15 unix:0:physio_buf_cache:full_magazines 0 unix:0:physio_buf_cache:hash_lookup_depth 0 unix:0:physio_buf_cache:hash_rescale 0 unix:0:physio_buf_cache:hash_size 0 unix:0:physio_buf_cache:magazine_size 7 unix:0:physio_buf_cache:slab_alloc 2 unix:0:physio_buf_cache:slab_create 1 unix:0:physio_buf_cache:slab_destroy 0 unix:0:physio_buf_cache:slab_free 0 unix:0:physio_buf_cache:slab_size 8192 unix:0:physio_buf_cache:snaptime 8464512.7078924 unix:0:physio_buf_cache:vmem_source 23 unix:0:pipe_cache:align 32 unix:0:pipe_cache:alloc 19368 unix:0:pipe_cache:alloc_fail 0 unix:0:pipe_cache:buf_avail 33 unix:0:pipe_cache:buf_constructed 17 unix:0:pipe_cache:buf_inuse 17 unix:0:pipe_cache:buf_max 50 unix:0:pipe_cache:buf_size 320 unix:0:pipe_cache:buf_total 50 unix:0:pipe_cache:chunk_size 320 unix:0:pipe_cache:class kmem_cache unix:0:pipe_cache:crtime 87.8317608 unix:0:pipe_cache:depot_alloc 172 unix:0:pipe_cache:depot_contention 0 unix:0:pipe_cache:depot_free 178 unix:0:pipe_cache:empty_magazines 1 unix:0:pipe_cache:free 19357 unix:0:pipe_cache:full_magazines 3 unix:0:pipe_cache:hash_lookup_depth 0 unix:0:pipe_cache:hash_rescale 0 unix:0:pipe_cache:hash_size 0 unix:0:pipe_cache:magazine_size 3 unix:0:pipe_cache:slab_alloc 34 unix:0:pipe_cache:slab_create 2 unix:0:pipe_cache:slab_destroy 0 unix:0:pipe_cache:slab_free 0 unix:0:pipe_cache:slab_size 8192 unix:0:pipe_cache:snaptime 8464512.7092738 unix:0:pipe_cache:vmem_source 23 unix:0:port_cache:align 8 unix:0:port_cache:alloc 2 unix:0:port_cache:alloc_fail 0 unix:0:port_cache:buf_avail 99 unix:0:port_cache:buf_constructed 0 unix:0:port_cache:buf_inuse 2 unix:0:port_cache:buf_max 101 unix:0:port_cache:buf_size 80 unix:0:port_cache:buf_total 101 unix:0:port_cache:chunk_size 80 unix:0:port_cache:class kmem_cache unix:0:port_cache:crtime 89.7146208 unix:0:port_cache:depot_alloc 0 unix:0:port_cache:depot_contention 0 unix:0:port_cache:depot_free 0 unix:0:port_cache:empty_magazines 0 unix:0:port_cache:free 0 unix:0:port_cache:full_magazines 0 unix:0:port_cache:hash_lookup_depth 0 unix:0:port_cache:hash_rescale 0 unix:0:port_cache:hash_size 0 unix:0:port_cache:magazine_size 7 unix:0:port_cache:slab_alloc 2 unix:0:port_cache:slab_create 1 unix:0:port_cache:slab_destroy 0 unix:0:port_cache:slab_free 0 unix:0:port_cache:slab_size 8192 unix:0:port_cache:snaptime 8464512.7108564 unix:0:port_cache:vmem_source 23 unix:0:process_cache:align 8 unix:0:process_cache:alloc 218833 unix:0:process_cache:alloc_fail 0 unix:0:process_cache:buf_avail 116 unix:0:process_cache:buf_constructed 112 unix:0:process_cache:buf_inuse 49 unix:0:process_cache:buf_max 165 unix:0:process_cache:buf_size 3096 unix:0:process_cache:buf_total 165 unix:0:process_cache:chunk_size 3096 unix:0:process_cache:class kmem_cache unix:0:process_cache:crtime 85.1890452 unix:0:process_cache:depot_alloc 3904 unix:0:process_cache:depot_contention 0 unix:0:process_cache:depot_free 3942 unix:0:process_cache:empty_magazines 4 unix:0:process_cache:free 218822 unix:0:process_cache:full_magazines 34 unix:0:process_cache:hash_lookup_depth 0 unix:0:process_cache:hash_rescale 0 unix:0:process_cache:hash_size 64 unix:0:process_cache:magazine_size 3 unix:0:process_cache:slab_alloc 161 unix:0:process_cache:slab_create 33 unix:0:process_cache:slab_destroy 0 unix:0:process_cache:slab_free 0 unix:0:process_cache:slab_size 16384 unix:0:process_cache:snaptime 8464512.7122616 unix:0:process_cache:vmem_source 23 unix:0:pset:avenrun_15min 4 unix:0:pset:avenrun_1min 9 unix:0:pset:avenrun_5min 7 unix:0:pset:class misc unix:0:pset:crtime 66.5820842 unix:0:pset:ncpus 2 unix:0:pset:runnable 257378 unix:0:pset:snaptime 8464512.7137516 unix:0:pset:updates 846444344 unix:0:pset:waiting 0 unix:0:pseudo_nexus_enum_tq:class taskq unix:0:pseudo_nexus_enum_tq:crtime 69.2676704 unix:0:pseudo_nexus_enum_tq:executed 0 unix:0:pseudo_nexus_enum_tq:maxtasks 0 unix:0:pseudo_nexus_enum_tq:nactive 1 unix:0:pseudo_nexus_enum_tq:nalloc 0 unix:0:pseudo_nexus_enum_tq:priority 60 unix:0:pseudo_nexus_enum_tq:snaptime 8464512.7143126 unix:0:pseudo_nexus_enum_tq:tasks 0 unix:0:pseudo_nexus_enum_tq:threads 1 unix:0:pseudo_nexus_enum_tq:totaltime 0 unix:0:pty_map:align 8 unix:0:pty_map:alloc 81 unix:0:pty_map:alloc_fail 0 unix:0:pty_map:buf_avail 143 unix:0:pty_map:buf_constructed 10 unix:0:pty_map:buf_inuse 2 unix:0:pty_map:buf_max 145 unix:0:pty_map:buf_size 56 unix:0:pty_map:buf_total 145 unix:0:pty_map:chunk_size 56 unix:0:pty_map:class kmem_cache unix:0:pty_map:crtime 97.588337 unix:0:pty_map:depot_alloc 0 unix:0:pty_map:depot_contention 0 unix:0:pty_map:depot_free 2 unix:0:pty_map:empty_magazines 0 unix:0:pty_map:free 81 unix:0:pty_map:full_magazines 0 unix:0:pty_map:hash_lookup_depth 0 unix:0:pty_map:hash_rescale 0 unix:0:pty_map:hash_size 0 unix:0:pty_map:magazine_size 15 unix:0:pty_map:slab_alloc 12 unix:0:pty_map:slab_create 1 unix:0:pty_map:slab_destroy 0 unix:0:pty_map:slab_free 0 unix:0:pty_map:slab_size 8192 unix:0:pty_map:snaptime 8464512.7149112 unix:0:pty_map:vmem_source 23 unix:0:qband_cache:align 8 unix:0:qband_cache:alloc 2 unix:0:qband_cache:alloc_fail 0 unix:0:qband_cache:buf_avail 125 unix:0:qband_cache:buf_constructed 0 unix:0:qband_cache:buf_inuse 2 unix:0:qband_cache:buf_max 127 unix:0:qband_cache:buf_size 64 unix:0:qband_cache:buf_total 127 unix:0:qband_cache:chunk_size 64 unix:0:qband_cache:class kmem_cache unix:0:qband_cache:crtime 69.2822538 unix:0:qband_cache:depot_alloc 0 unix:0:qband_cache:depot_contention 0 unix:0:qband_cache:depot_free 0 unix:0:qband_cache:empty_magazines 0 unix:0:qband_cache:free 0 unix:0:qband_cache:full_magazines 0 unix:0:qband_cache:hash_lookup_depth 0 unix:0:qband_cache:hash_rescale 0 unix:0:qband_cache:hash_size 0 unix:0:qband_cache:magazine_size 15 unix:0:qband_cache:slab_alloc 2 unix:0:qband_cache:slab_create 1 unix:0:qband_cache:slab_destroy 0 unix:0:qband_cache:slab_free 0 unix:0:qband_cache:slab_size 8192 unix:0:qband_cache:snaptime 8464512.7163992 unix:0:qband_cache:vmem_source 23 unix:0:qlc_nexus_enum_tq:class taskq unix:0:qlc_nexus_enum_tq:crtime 69.2997442 unix:0:qlc_nexus_enum_tq:executed 0 unix:0:qlc_nexus_enum_tq:maxtasks 0 unix:0:qlc_nexus_enum_tq:nactive 1 unix:0:qlc_nexus_enum_tq:nalloc 0 unix:0:qlc_nexus_enum_tq:priority 60 unix:0:qlc_nexus_enum_tq:snaptime 8464512.7177742 unix:0:qlc_nexus_enum_tq:tasks 0 unix:0:qlc_nexus_enum_tq:threads 1 unix:0:qlc_nexus_enum_tq:totaltime 0 unix:0:queue_cache:align 8 unix:0:queue_cache:alloc 1130726 unix:0:queue_cache:alloc_fail 0 unix:0:queue_cache:buf_avail 212 unix:0:queue_cache:buf_constructed 204 unix:0:queue_cache:buf_inuse 388 unix:0:queue_cache:buf_max 600 unix:0:queue_cache:buf_size 656 unix:0:queue_cache:buf_total 600 unix:0:queue_cache:chunk_size 656 unix:0:queue_cache:class kmem_cache unix:0:queue_cache:crtime 69.2822254 unix:0:queue_cache:depot_alloc 39382 unix:0:queue_cache:depot_contention 0 unix:0:queue_cache:depot_free 39451 unix:0:queue_cache:empty_magazines 17 unix:0:queue_cache:free 1130407 unix:0:queue_cache:full_magazines 65 unix:0:queue_cache:hash_lookup_depth 0 unix:0:queue_cache:hash_rescale 0 unix:0:queue_cache:hash_size 0 unix:0:queue_cache:magazine_size 3 unix:0:queue_cache:slab_alloc 592 unix:0:queue_cache:slab_create 50 unix:0:queue_cache:slab_destroy 0 unix:0:queue_cache:slab_free 0 unix:0:queue_cache:slab_size 8192 unix:0:queue_cache:snaptime 8464512.7184344 unix:0:queue_cache:vmem_source 23 unix:0:radix_mask:align 8 unix:0:radix_mask:alloc 1 unix:0:radix_mask:alloc_fail 0 unix:0:radix_mask:buf_avail 253 unix:0:radix_mask:buf_constructed 0 unix:0:radix_mask:buf_inuse 1 unix:0:radix_mask:buf_max 254 unix:0:radix_mask:buf_size 32 unix:0:radix_mask:buf_total 254 unix:0:radix_mask:chunk_size 32 unix:0:radix_mask:class kmem_cache unix:0:radix_mask:crtime 83.9663284 unix:0:radix_mask:depot_alloc 0 unix:0:radix_mask:depot_contention 0 unix:0:radix_mask:depot_free 0 unix:0:radix_mask:empty_magazines 0 unix:0:radix_mask:free 0 unix:0:radix_mask:full_magazines 0 unix:0:radix_mask:hash_lookup_depth 0 unix:0:radix_mask:hash_rescale 0 unix:0:radix_mask:hash_size 0 unix:0:radix_mask:magazine_size 15 unix:0:radix_mask:slab_alloc 1 unix:0:radix_mask:slab_create 1 unix:0:radix_mask:slab_destroy 0 unix:0:radix_mask:slab_free 0 unix:0:radix_mask:slab_size 8192 unix:0:radix_mask:snaptime 8464512.719846 unix:0:radix_mask:vmem_source 23 unix:0:radix_node:align 8 unix:0:radix_node:alloc 2 unix:0:radix_node:alloc_fail 0 unix:0:radix_node:buf_avail 70 unix:0:radix_node:buf_constructed 0 unix:0:radix_node:buf_inuse 2 unix:0:radix_node:buf_max 72 unix:0:radix_node:buf_size 112 unix:0:radix_node:buf_total 72 unix:0:radix_node:chunk_size 112 unix:0:radix_node:class kmem_cache unix:0:radix_node:crtime 83.9663436 unix:0:radix_node:depot_alloc 0 unix:0:radix_node:depot_contention 0 unix:0:radix_node:depot_free 0 unix:0:radix_node:empty_magazines 0 unix:0:radix_node:free 0 unix:0:radix_node:full_magazines 0 unix:0:radix_node:hash_lookup_depth 0 unix:0:radix_node:hash_rescale 0 unix:0:radix_node:hash_size 0 unix:0:radix_node:magazine_size 7 unix:0:radix_node:slab_alloc 2 unix:0:radix_node:slab_create 1 unix:0:radix_node:slab_destroy 0 unix:0:radix_node:slab_free 0 unix:0:radix_node:slab_size 8192 unix:0:radix_node:snaptime 8464512.7212998 unix:0:radix_node:vmem_source 23 unix:0:rctl_cache:align 8 unix:0:rctl_cache:alloc 2705376 unix:0:rctl_cache:alloc_fail 0 unix:0:rctl_cache:buf_avail 1517 unix:0:rctl_cache:buf_constructed 1328 unix:0:rctl_cache:buf_inuse 716 unix:0:rctl_cache:buf_max 2233 unix:0:rctl_cache:buf_size 40 unix:0:rctl_cache:buf_total 2233 unix:0:rctl_cache:chunk_size 40 unix:0:rctl_cache:class kmem_cache unix:0:rctl_cache:crtime 69.2624352 unix:0:rctl_cache:depot_alloc 115032 unix:0:rctl_cache:depot_contention 0 unix:0:rctl_cache:depot_free 115123 unix:0:rctl_cache:empty_magazines 6 unix:0:rctl_cache:free 2704751 unix:0:rctl_cache:full_magazines 87 unix:0:rctl_cache:hash_lookup_depth 0 unix:0:rctl_cache:hash_rescale 0 unix:0:rctl_cache:hash_size 0 unix:0:rctl_cache:magazine_size 15 unix:0:rctl_cache:slab_alloc 2044 unix:0:rctl_cache:slab_create 11 unix:0:rctl_cache:slab_destroy 0 unix:0:rctl_cache:slab_free 0 unix:0:rctl_cache:slab_size 8192 unix:0:rctl_cache:snaptime 8464512.7227422 unix:0:rctl_cache:vmem_source 23 unix:0:rctl_val_cache:align 8 unix:0:rctl_val_cache:alloc 6017088 unix:0:rctl_val_cache:alloc_fail 0 unix:0:rctl_val_cache:buf_avail 3045 unix:0:rctl_val_cache:buf_constructed 3006 unix:0:rctl_val_cache:buf_inuse 1400 unix:0:rctl_val_cache:buf_max 4445 unix:0:rctl_val_cache:buf_size 64 unix:0:rctl_val_cache:buf_total 4445 unix:0:rctl_val_cache:chunk_size 64 unix:0:rctl_val_cache:class kmem_cache unix:0:rctl_val_cache:crtime 69.2624492 unix:0:rctl_val_cache:depot_alloc 309388 unix:0:rctl_val_cache:depot_contention 0 unix:0:rctl_val_cache:depot_free 309591 unix:0:rctl_val_cache:empty_magazines 12 unix:0:rctl_val_cache:free 6015891 unix:0:rctl_val_cache:full_magazines 199 unix:0:rctl_val_cache:hash_lookup_depth 0 unix:0:rctl_val_cache:hash_rescale 0 unix:0:rctl_val_cache:hash_size 0 unix:0:rctl_val_cache:magazine_size 15 unix:0:rctl_val_cache:slab_alloc 4406 unix:0:rctl_val_cache:slab_create 35 unix:0:rctl_val_cache:slab_destroy 0 unix:0:rctl_val_cache:slab_free 0 unix:0:rctl_val_cache:slab_size 8192 unix:0:rctl_val_cache:snaptime 8464512.7241372 unix:0:rctl_val_cache:vmem_source 23 unix:0:rds_alloc_cache:align 8 unix:0:rds_alloc_cache:alloc 0 unix:0:rds_alloc_cache:alloc_fail 0 unix:0:rds_alloc_cache:buf_avail 0 unix:0:rds_alloc_cache:buf_constructed 0 unix:0:rds_alloc_cache:buf_inuse 0 unix:0:rds_alloc_cache:buf_max 0 unix:0:rds_alloc_cache:buf_size 88 unix:0:rds_alloc_cache:buf_total 0 unix:0:rds_alloc_cache:chunk_size 88 unix:0:rds_alloc_cache:class kmem_cache unix:0:rds_alloc_cache:crtime 87.7841792 unix:0:rds_alloc_cache:depot_alloc 0 unix:0:rds_alloc_cache:depot_contention 0 unix:0:rds_alloc_cache:depot_free 0 unix:0:rds_alloc_cache:empty_magazines 0 unix:0:rds_alloc_cache:free 0 unix:0:rds_alloc_cache:full_magazines 0 unix:0:rds_alloc_cache:hash_lookup_depth 0 unix:0:rds_alloc_cache:hash_rescale 0 unix:0:rds_alloc_cache:hash_size 0 unix:0:rds_alloc_cache:magazine_size 7 unix:0:rds_alloc_cache:slab_alloc 0 unix:0:rds_alloc_cache:slab_create 0 unix:0:rds_alloc_cache:slab_destroy 0 unix:0:rds_alloc_cache:slab_free 0 unix:0:rds_alloc_cache:slab_size 8192 unix:0:rds_alloc_cache:snaptime 8464512.725526 unix:0:rds_alloc_cache:vmem_source 23 unix:0:rnode4_cache:align 8 unix:0:rnode4_cache:alloc 0 unix:0:rnode4_cache:alloc_fail 0 unix:0:rnode4_cache:buf_avail 0 unix:0:rnode4_cache:buf_constructed 0 unix:0:rnode4_cache:buf_inuse 0 unix:0:rnode4_cache:buf_max 0 unix:0:rnode4_cache:buf_size 960 unix:0:rnode4_cache:buf_total 0 unix:0:rnode4_cache:chunk_size 960 unix:0:rnode4_cache:class kmem_cache unix:0:rnode4_cache:crtime 114.1494934 unix:0:rnode4_cache:depot_alloc 0 unix:0:rnode4_cache:depot_contention 0 unix:0:rnode4_cache:depot_free 0 unix:0:rnode4_cache:empty_magazines 0 unix:0:rnode4_cache:free 0 unix:0:rnode4_cache:full_magazines 0 unix:0:rnode4_cache:hash_lookup_depth 0 unix:0:rnode4_cache:hash_rescale 0 unix:0:rnode4_cache:hash_size 0 unix:0:rnode4_cache:magazine_size 3 unix:0:rnode4_cache:slab_alloc 0 unix:0:rnode4_cache:slab_create 0 unix:0:rnode4_cache:slab_destroy 0 unix:0:rnode4_cache:slab_free 0 unix:0:rnode4_cache:slab_size 8192 unix:0:rnode4_cache:snaptime 8464512.7269612 unix:0:rnode4_cache:vmem_source 23 unix:0:rnode_cache:align 8 unix:0:rnode_cache:alloc 27574 unix:0:rnode_cache:alloc_fail 0 unix:0:rnode_cache:buf_avail 7642 unix:0:rnode_cache:buf_constructed 7634 unix:0:rnode_cache:buf_inuse 722 unix:0:rnode_cache:buf_max 8364 unix:0:rnode_cache:buf_size 648 unix:0:rnode_cache:buf_total 8364 unix:0:rnode_cache:chunk_size 648 unix:0:rnode_cache:class kmem_cache unix:0:rnode_cache:crtime 114.1423948 unix:0:rnode_cache:depot_alloc 4759 unix:0:rnode_cache:depot_contention 0 unix:0:rnode_cache:depot_free 7305 unix:0:rnode_cache:empty_magazines 230 unix:0:rnode_cache:free 29398 unix:0:rnode_cache:full_magazines 2544 unix:0:rnode_cache:hash_lookup_depth 0 unix:0:rnode_cache:hash_rescale 0 unix:0:rnode_cache:hash_size 0 unix:0:rnode_cache:magazine_size 3 unix:0:rnode_cache:slab_alloc 8356 unix:0:rnode_cache:slab_create 697 unix:0:rnode_cache:slab_destroy 0 unix:0:rnode_cache:slab_free 0 unix:0:rnode_cache:slab_size 8192 unix:0:rnode_cache:snaptime 8464512.728399 unix:0:rnode_cache:vmem_source 23 unix:0:rpc_client:badcalls 1 unix:0:rpc_client:badverfs 0 unix:0:rpc_client:badxids 0 unix:0:rpc_client:calls 26 unix:0:rpc_client:cantsend 0 unix:0:rpc_client:class rpc unix:0:rpc_client:crtime 114.1387432 unix:0:rpc_client:newcreds 0 unix:0:rpc_client:nomem 0 unix:0:rpc_client:retrans 0 unix:0:rpc_client:snaptime 8464512.7297924 unix:0:rpc_client:timeouts 0 unix:0:rpc_client:timers 16 unix:0:rpc_clts_client:badcalls 1 unix:0:rpc_clts_client:badverfs 0 unix:0:rpc_clts_client:badxids 0 unix:0:rpc_clts_client:calls 26 unix:0:rpc_clts_client:cantsend 0 unix:0:rpc_clts_client:class rpc unix:0:rpc_clts_client:crtime 114.1387324 unix:0:rpc_clts_client:newcreds 0 unix:0:rpc_clts_client:nomem 0 unix:0:rpc_clts_client:retrans 0 unix:0:rpc_clts_client:snaptime 8464512.7305294 unix:0:rpc_clts_client:timeouts 0 unix:0:rpc_clts_client:timers 16 unix:0:rpc_clts_server:badcalls 0 unix:0:rpc_clts_server:badlen 0 unix:0:rpc_clts_server:calls 0 unix:0:rpc_clts_server:class rpc unix:0:rpc_clts_server:crtime 114.138755 unix:0:rpc_clts_server:dupchecks 0 unix:0:rpc_clts_server:dupreqs 0 unix:0:rpc_clts_server:nullrecv 0 unix:0:rpc_clts_server:snaptime 8464512.7311986 unix:0:rpc_clts_server:xdrcall 0 unix:0:rpc_cots_client:badcalls 1 unix:0:rpc_cots_client:badverfs 0 unix:0:rpc_cots_client:badxids 1 unix:0:rpc_cots_client:calls 275856 unix:0:rpc_cots_client:cantconn 0 unix:0:rpc_cots_client:class rpc unix:0:rpc_cots_client:crtime 114.1387758 unix:0:rpc_cots_client:interrupts 1 unix:0:rpc_cots_client:newcreds 0 unix:0:rpc_cots_client:nomem 0 unix:0:rpc_cots_client:snaptime 8464512.7317424 unix:0:rpc_cots_client:timeouts 0 unix:0:rpc_cots_client:timers 0 unix:0:rpc_cots_server:badcalls 0 unix:0:rpc_cots_server:badlen 0 unix:0:rpc_cots_server:calls 0 unix:0:rpc_cots_server:class rpc unix:0:rpc_cots_server:crtime 114.1388756 unix:0:rpc_cots_server:dupchecks 0 unix:0:rpc_cots_server:dupreqs 0 unix:0:rpc_cots_server:nullrecv 0 unix:0:rpc_cots_server:snaptime 8464512.73241 unix:0:rpc_cots_server:xdrcall 0 unix:0:rpc_server:badcalls 0 unix:0:rpc_server:badlen 0 unix:0:rpc_server:calls 0 unix:0:rpc_server:class rpc unix:0:rpc_server:crtime 114.138764 unix:0:rpc_server:dupchecks 0 unix:0:rpc_server:dupreqs 0 unix:0:rpc_server:nullrecv 0 unix:0:rpc_server:snaptime 8464512.7329448 unix:0:rpc_server:xdrcall 0 unix:0:rt_entry:align 8 unix:0:rt_entry:alloc 4 unix:0:rt_entry:alloc_fail 0 unix:0:rt_entry:buf_avail 45 unix:0:rt_entry:buf_constructed 0 unix:0:rt_entry:buf_inuse 3 unix:0:rt_entry:buf_max 48 unix:0:rt_entry:buf_size 168 unix:0:rt_entry:buf_total 48 unix:0:rt_entry:chunk_size 168 unix:0:rt_entry:class kmem_cache unix:0:rt_entry:crtime 83.9663124 unix:0:rt_entry:depot_alloc 0 unix:0:rt_entry:depot_contention 0 unix:0:rt_entry:depot_free 1 unix:0:rt_entry:empty_magazines 0 unix:0:rt_entry:free 2 unix:0:rt_entry:full_magazines 0 unix:0:rt_entry:hash_lookup_depth 0 unix:0:rt_entry:hash_rescale 0 unix:0:rt_entry:hash_size 0 unix:0:rt_entry:magazine_size 7 unix:0:rt_entry:slab_alloc 3 unix:0:rt_entry:slab_create 1 unix:0:rt_entry:slab_destroy 0 unix:0:rt_entry:slab_free 0 unix:0:rt_entry:slab_size 8192 unix:0:rt_entry:snaptime 8464512.7334844 unix:0:rt_entry:vmem_source 23 unix:0:scsi_vhci_nexus_enum_tq:class taskq unix:0:scsi_vhci_nexus_enum_tq:crtime 69.2706968 unix:0:scsi_vhci_nexus_enum_tq:executed 0 unix:0:scsi_vhci_nexus_enum_tq:maxtasks 0 unix:0:scsi_vhci_nexus_enum_tq:nactive 1 unix:0:scsi_vhci_nexus_enum_tq:nalloc 0 unix:0:scsi_vhci_nexus_enum_tq:priority 60 unix:0:scsi_vhci_nexus_enum_tq:snaptime 8464512.734958 unix:0:scsi_vhci_nexus_enum_tq:tasks 0 unix:0:scsi_vhci_nexus_enum_tq:threads 1 unix:0:scsi_vhci_nexus_enum_tq:totaltime 0 unix:0:sctp_assoc:align 8 unix:0:sctp_assoc:alloc 0 unix:0:sctp_assoc:alloc_fail 0 unix:0:sctp_assoc:buf_avail 0 unix:0:sctp_assoc:buf_constructed 0 unix:0:sctp_assoc:buf_inuse 0 unix:0:sctp_assoc:buf_max 0 unix:0:sctp_assoc:buf_size 64 unix:0:sctp_assoc:buf_total 0 unix:0:sctp_assoc:chunk_size 64 unix:0:sctp_assoc:class kmem_cache unix:0:sctp_assoc:crtime 83.9814324 unix:0:sctp_assoc:depot_alloc 0 unix:0:sctp_assoc:depot_contention 0 unix:0:sctp_assoc:depot_free 0 unix:0:sctp_assoc:empty_magazines 0 unix:0:sctp_assoc:free 0 unix:0:sctp_assoc:full_magazines 0 unix:0:sctp_assoc:hash_lookup_depth 0 unix:0:sctp_assoc:hash_rescale 0 unix:0:sctp_assoc:hash_size 0 unix:0:sctp_assoc:magazine_size 15 unix:0:sctp_assoc:slab_alloc 0 unix:0:sctp_assoc:slab_create 0 unix:0:sctp_assoc:slab_destroy 0 unix:0:sctp_assoc:slab_free 0 unix:0:sctp_assoc:slab_size 8192 unix:0:sctp_assoc:snaptime 8464512.7355628 unix:0:sctp_assoc:vmem_source 23 unix:0:sctp_conn_cache:align 8 unix:0:sctp_conn_cache:alloc 1 unix:0:sctp_conn_cache:alloc_fail 0 unix:0:sctp_conn_cache:buf_avail 6 unix:0:sctp_conn_cache:buf_constructed 0 unix:0:sctp_conn_cache:buf_inuse 1 unix:0:sctp_conn_cache:buf_max 7 unix:0:sctp_conn_cache:buf_size 2264 unix:0:sctp_conn_cache:buf_total 7 unix:0:sctp_conn_cache:chunk_size 2264 unix:0:sctp_conn_cache:class kmem_cache unix:0:sctp_conn_cache:crtime 83.9792776 unix:0:sctp_conn_cache:depot_alloc 0 unix:0:sctp_conn_cache:depot_contention 0 unix:0:sctp_conn_cache:depot_free 0 unix:0:sctp_conn_cache:empty_magazines 0 unix:0:sctp_conn_cache:free 0 unix:0:sctp_conn_cache:full_magazines 0 unix:0:sctp_conn_cache:hash_lookup_depth 0 unix:0:sctp_conn_cache:hash_rescale 0 unix:0:sctp_conn_cache:hash_size 64 unix:0:sctp_conn_cache:magazine_size 3 unix:0:sctp_conn_cache:slab_alloc 1 unix:0:sctp_conn_cache:slab_create 1 unix:0:sctp_conn_cache:slab_destroy 0 unix:0:sctp_conn_cache:slab_free 0 unix:0:sctp_conn_cache:slab_size 16384 unix:0:sctp_conn_cache:snaptime 8464512.7369376 unix:0:sctp_conn_cache:vmem_source 23 unix:0:sctp_faddr_cache:align 8 unix:0:sctp_faddr_cache:alloc 0 unix:0:sctp_faddr_cache:alloc_fail 0 unix:0:sctp_faddr_cache:buf_avail 0 unix:0:sctp_faddr_cache:buf_constructed 0 unix:0:sctp_faddr_cache:buf_inuse 0 unix:0:sctp_faddr_cache:buf_max 0 unix:0:sctp_faddr_cache:buf_size 168 unix:0:sctp_faddr_cache:buf_total 0 unix:0:sctp_faddr_cache:chunk_size 168 unix:0:sctp_faddr_cache:class kmem_cache unix:0:sctp_faddr_cache:crtime 83.979294 unix:0:sctp_faddr_cache:depot_alloc 0 unix:0:sctp_faddr_cache:depot_contention 0 unix:0:sctp_faddr_cache:depot_free 0 unix:0:sctp_faddr_cache:empty_magazines 0 unix:0:sctp_faddr_cache:free 0 unix:0:sctp_faddr_cache:full_magazines 0 unix:0:sctp_faddr_cache:hash_lookup_depth 0 unix:0:sctp_faddr_cache:hash_rescale 0 unix:0:sctp_faddr_cache:hash_size 0 unix:0:sctp_faddr_cache:magazine_size 7 unix:0:sctp_faddr_cache:slab_alloc 0 unix:0:sctp_faddr_cache:slab_create 0 unix:0:sctp_faddr_cache:slab_destroy 0 unix:0:sctp_faddr_cache:slab_free 0 unix:0:sctp_faddr_cache:slab_size 8192 unix:0:sctp_faddr_cache:snaptime 8464512.7383796 unix:0:sctp_faddr_cache:vmem_source 23 unix:0:sctp_ftsn_set_cache:align 8 unix:0:sctp_ftsn_set_cache:alloc 0 unix:0:sctp_ftsn_set_cache:alloc_fail 0 unix:0:sctp_ftsn_set_cache:buf_avail 0 unix:0:sctp_ftsn_set_cache:buf_constructed 0 unix:0:sctp_ftsn_set_cache:buf_inuse 0 unix:0:sctp_ftsn_set_cache:buf_max 0 unix:0:sctp_ftsn_set_cache:buf_size 16 unix:0:sctp_ftsn_set_cache:buf_total 0 unix:0:sctp_ftsn_set_cache:chunk_size 16 unix:0:sctp_ftsn_set_cache:class kmem_cache unix:0:sctp_ftsn_set_cache:crtime 83.979324 unix:0:sctp_ftsn_set_cache:depot_alloc 0 unix:0:sctp_ftsn_set_cache:depot_contention 0 unix:0:sctp_ftsn_set_cache:depot_free 0 unix:0:sctp_ftsn_set_cache:empty_magazines 0 unix:0:sctp_ftsn_set_cache:free 0 unix:0:sctp_ftsn_set_cache:full_magazines 0 unix:0:sctp_ftsn_set_cache:hash_lookup_depth 0 unix:0:sctp_ftsn_set_cache:hash_rescale 0 unix:0:sctp_ftsn_set_cache:hash_size 0 unix:0:sctp_ftsn_set_cache:magazine_size 15 unix:0:sctp_ftsn_set_cache:slab_alloc 0 unix:0:sctp_ftsn_set_cache:slab_create 0 unix:0:sctp_ftsn_set_cache:slab_destroy 0 unix:0:sctp_ftsn_set_cache:slab_free 0 unix:0:sctp_ftsn_set_cache:slab_size 8192 unix:0:sctp_ftsn_set_cache:snaptime 8464512.7398868 unix:0:sctp_ftsn_set_cache:vmem_source 23 unix:0:sctp_set_cache:align 8 unix:0:sctp_set_cache:alloc 0 unix:0:sctp_set_cache:alloc_fail 0 unix:0:sctp_set_cache:buf_avail 0 unix:0:sctp_set_cache:buf_constructed 0 unix:0:sctp_set_cache:buf_inuse 0 unix:0:sctp_set_cache:buf_max 0 unix:0:sctp_set_cache:buf_size 24 unix:0:sctp_set_cache:buf_total 0 unix:0:sctp_set_cache:chunk_size 24 unix:0:sctp_set_cache:class kmem_cache unix:0:sctp_set_cache:crtime 83.979309 unix:0:sctp_set_cache:depot_alloc 0 unix:0:sctp_set_cache:depot_contention 0 unix:0:sctp_set_cache:depot_free 0 unix:0:sctp_set_cache:empty_magazines 0 unix:0:sctp_set_cache:free 0 unix:0:sctp_set_cache:full_magazines 0 unix:0:sctp_set_cache:hash_lookup_depth 0 unix:0:sctp_set_cache:hash_rescale 0 unix:0:sctp_set_cache:hash_size 0 unix:0:sctp_set_cache:magazine_size 15 unix:0:sctp_set_cache:slab_alloc 0 unix:0:sctp_set_cache:slab_create 0 unix:0:sctp_set_cache:slab_destroy 0 unix:0:sctp_set_cache:slab_free 0 unix:0:sctp_set_cache:slab_size 8192 unix:0:sctp_set_cache:snaptime 8464512.7413872 unix:0:sctp_set_cache:vmem_source 23 unix:0:sctpsock:align 8 unix:0:sctpsock:alloc 0 unix:0:sctpsock:alloc_fail 0 unix:0:sctpsock:buf_avail 0 unix:0:sctpsock:buf_constructed 0 unix:0:sctpsock:buf_inuse 0 unix:0:sctpsock:buf_max 0 unix:0:sctpsock:buf_size 616 unix:0:sctpsock:buf_total 0 unix:0:sctpsock:chunk_size 616 unix:0:sctpsock:class kmem_cache unix:0:sctpsock:crtime 83.9814176 unix:0:sctpsock:depot_alloc 0 unix:0:sctpsock:depot_contention 0 unix:0:sctpsock:depot_free 0 unix:0:sctpsock:empty_magazines 0 unix:0:sctpsock:free 0 unix:0:sctpsock:full_magazines 0 unix:0:sctpsock:hash_lookup_depth 0 unix:0:sctpsock:hash_rescale 0 unix:0:sctpsock:hash_size 0 unix:0:sctpsock:magazine_size 3 unix:0:sctpsock:slab_alloc 0 unix:0:sctpsock:slab_create 0 unix:0:sctpsock:slab_destroy 0 unix:0:sctpsock:slab_free 0 unix:0:sctpsock:slab_size 8192 unix:0:sctpsock:snaptime 8464512.742763 unix:0:sctpsock:vmem_source 23 unix:0:seg_cache:align 8 unix:0:seg_cache:alloc 9916242 unix:0:seg_cache:alloc_fail 0 unix:0:seg_cache:buf_avail 4155 unix:0:seg_cache:buf_constructed 4113 unix:0:seg_cache:buf_inuse 2286 unix:0:seg_cache:buf_max 6441 unix:0:seg_cache:buf_size 72 unix:0:seg_cache:buf_total 6441 unix:0:seg_cache:chunk_size 72 unix:0:seg_cache:class kmem_cache unix:0:seg_cache:crtime 66.2398374 unix:0:seg_cache:depot_alloc 972029 unix:0:seg_cache:depot_contention 12 unix:0:seg_cache:depot_free 972618 unix:0:seg_cache:empty_magazines 36 unix:0:seg_cache:free 9914545 unix:0:seg_cache:full_magazines 586 unix:0:seg_cache:hash_lookup_depth 0 unix:0:seg_cache:hash_rescale 0 unix:0:seg_cache:hash_size 0 unix:0:seg_cache:magazine_size 7 unix:0:seg_cache:slab_alloc 6399 unix:0:seg_cache:slab_create 57 unix:0:seg_cache:slab_destroy 0 unix:0:seg_cache:slab_free 0 unix:0:seg_cache:slab_size 8192 unix:0:seg_cache:snaptime 8464512.7441984 unix:0:seg_cache:vmem_source 23 unix:0:segadvstat:MADV_FREE_hit 0 unix:0:segadvstat:MADV_FREE_miss 0 unix:0:segadvstat:class vm unix:0:segadvstat:crtime 66.2398526 unix:0:segadvstat:snaptime 8464512.7455886 unix:0:segkp_16384:align 8192 unix:0:segkp_16384:alloc 0 unix:0:segkp_16384:alloc_fail 0 unix:0:segkp_16384:buf_avail 0 unix:0:segkp_16384:buf_constructed 0 unix:0:segkp_16384:buf_inuse 0 unix:0:segkp_16384:buf_max 0 unix:0:segkp_16384:buf_size 16384 unix:0:segkp_16384:buf_total 0 unix:0:segkp_16384:chunk_size 16384 unix:0:segkp_16384:class kmem_cache unix:0:segkp_16384:crtime 69.201107 unix:0:segkp_16384:depot_alloc 0 unix:0:segkp_16384:depot_contention 0 unix:0:segkp_16384:depot_free 0 unix:0:segkp_16384:empty_magazines 0 unix:0:segkp_16384:free 0 unix:0:segkp_16384:full_magazines 0 unix:0:segkp_16384:hash_lookup_depth 0 unix:0:segkp_16384:hash_rescale 0 unix:0:segkp_16384:hash_size 64 unix:0:segkp_16384:magazine_size 1 unix:0:segkp_16384:slab_alloc 0 unix:0:segkp_16384:slab_create 0 unix:0:segkp_16384:slab_destroy 0 unix:0:segkp_16384:slab_free 0 unix:0:segkp_16384:slab_size 131072 unix:0:segkp_16384:snaptime 8464512.7459232 unix:0:segkp_16384:vmem_source 38 unix:0:segkp_24576:align 8192 unix:0:segkp_24576:alloc 0 unix:0:segkp_24576:alloc_fail 0 unix:0:segkp_24576:buf_avail 0 unix:0:segkp_24576:buf_constructed 0 unix:0:segkp_24576:buf_inuse 0 unix:0:segkp_24576:buf_max 0 unix:0:segkp_24576:buf_size 24576 unix:0:segkp_24576:buf_total 0 unix:0:segkp_24576:chunk_size 24576 unix:0:segkp_24576:class kmem_cache unix:0:segkp_24576:crtime 69.201128 unix:0:segkp_24576:depot_alloc 0 unix:0:segkp_24576:depot_contention 0 unix:0:segkp_24576:depot_free 0 unix:0:segkp_24576:empty_magazines 0 unix:0:segkp_24576:free 0 unix:0:segkp_24576:full_magazines 0 unix:0:segkp_24576:hash_lookup_depth 0 unix:0:segkp_24576:hash_rescale 0 unix:0:segkp_24576:hash_size 64 unix:0:segkp_24576:magazine_size 1 unix:0:segkp_24576:slab_alloc 0 unix:0:segkp_24576:slab_create 0 unix:0:segkp_24576:slab_destroy 0 unix:0:segkp_24576:slab_free 0 unix:0:segkp_24576:slab_size 131072 unix:0:segkp_24576:snaptime 8464512.7472936 unix:0:segkp_24576:vmem_source 38 unix:0:segkp_32768:align 8192 unix:0:segkp_32768:alloc 63564 unix:0:segkp_32768:alloc_fail 0 unix:0:segkp_32768:buf_avail 89 unix:0:segkp_32768:buf_constructed 87 unix:0:segkp_32768:buf_inuse 379 unix:0:segkp_32768:buf_max 468 unix:0:segkp_32768:buf_size 32768 unix:0:segkp_32768:buf_total 468 unix:0:segkp_32768:chunk_size 32768 unix:0:segkp_32768:class kmem_cache unix:0:segkp_32768:crtime 69.2011472 unix:0:segkp_32768:depot_alloc 27895 unix:0:segkp_32768:depot_contention 0 unix:0:segkp_32768:depot_free 27984 unix:0:segkp_32768:empty_magazines 13 unix:0:segkp_32768:free 63274 unix:0:segkp_32768:full_magazines 85 unix:0:segkp_32768:hash_lookup_depth 0 unix:0:segkp_32768:hash_rescale 1 unix:0:segkp_32768:hash_size 256 unix:0:segkp_32768:magazine_size 1 unix:0:segkp_32768:slab_alloc 466 unix:0:segkp_32768:slab_create 117 unix:0:segkp_32768:slab_destroy 0 unix:0:segkp_32768:slab_free 0 unix:0:segkp_32768:slab_size 131072 unix:0:segkp_32768:snaptime 8464512.7487406 unix:0:segkp_32768:vmem_source 38 unix:0:segkp_40960:align 8192 unix:0:segkp_40960:alloc 0 unix:0:segkp_40960:alloc_fail 0 unix:0:segkp_40960:buf_avail 0 unix:0:segkp_40960:buf_constructed 0 unix:0:segkp_40960:buf_inuse 0 unix:0:segkp_40960:buf_max 0 unix:0:segkp_40960:buf_size 40960 unix:0:segkp_40960:buf_total 0 unix:0:segkp_40960:chunk_size 40960 unix:0:segkp_40960:class kmem_cache unix:0:segkp_40960:crtime 69.2011664 unix:0:segkp_40960:depot_alloc 0 unix:0:segkp_40960:depot_contention 0 unix:0:segkp_40960:depot_free 0 unix:0:segkp_40960:empty_magazines 0 unix:0:segkp_40960:free 0 unix:0:segkp_40960:full_magazines 0 unix:0:segkp_40960:hash_lookup_depth 0 unix:0:segkp_40960:hash_rescale 0 unix:0:segkp_40960:hash_size 64 unix:0:segkp_40960:magazine_size 1 unix:0:segkp_40960:slab_alloc 0 unix:0:segkp_40960:slab_create 0 unix:0:segkp_40960:slab_destroy 0 unix:0:segkp_40960:slab_free 0 unix:0:segkp_40960:slab_size 131072 unix:0:segkp_40960:snaptime 8464512.7501886 unix:0:segkp_40960:vmem_source 38 unix:0:segkp_8192:align 8192 unix:0:segkp_8192:alloc 49062 unix:0:segkp_8192:alloc_fail 0 unix:0:segkp_8192:buf_avail 24 unix:0:segkp_8192:buf_constructed 14 unix:0:segkp_8192:buf_inuse 40 unix:0:segkp_8192:buf_max 64 unix:0:segkp_8192:buf_size 8192 unix:0:segkp_8192:buf_total 64 unix:0:segkp_8192:chunk_size 8192 unix:0:segkp_8192:class kmem_cache unix:0:segkp_8192:crtime 69.2010588 unix:0:segkp_8192:depot_alloc 963 unix:0:segkp_8192:depot_contention 0 unix:0:segkp_8192:depot_free 978 unix:0:segkp_8192:empty_magazines 6 unix:0:segkp_8192:free 49037 unix:0:segkp_8192:full_magazines 11 unix:0:segkp_8192:hash_lookup_depth 0 unix:0:segkp_8192:hash_rescale 0 unix:0:segkp_8192:hash_size 64 unix:0:segkp_8192:magazine_size 1 unix:0:segkp_8192:slab_alloc 54 unix:0:segkp_8192:slab_create 4 unix:0:segkp_8192:slab_destroy 0 unix:0:segkp_8192:slab_free 0 unix:0:segkp_8192:slab_size 131072 unix:0:segkp_8192:snaptime 8464512.7516422 unix:0:segkp_8192:vmem_source 38 unix:0:segmap:class vm unix:0:segmap:crtime 65.9026336 unix:0:segmap:fault 827360 unix:0:segmap:faulta 0 unix:0:segmap:free 0 unix:0:segmap:free_dirty 0 unix:0:segmap:free_notfree 0 unix:0:segmap:get_nofree 0 unix:0:segmap:get_nomtx 0 unix:0:segmap:get_reclaim 3355188 unix:0:segmap:get_reuse 0 unix:0:segmap:get_unused 0 unix:0:segmap:get_use 16 unix:0:segmap:getmap 3384689 unix:0:segmap:pagecreate 79539 unix:0:segmap:rel_abort 0 unix:0:segmap:rel_async 7864 unix:0:segmap:rel_dontneed 7826 unix:0:segmap:rel_free 38 unix:0:segmap:rel_write 20078 unix:0:segmap:release 3364573 unix:0:segmap:snaptime 8464512.7530172 unix:0:segmap:stolen 0 unix:0:segvn_cache:align 8 unix:0:segvn_cache:alloc 9013109 unix:0:segvn_cache:alloc_fail 0 unix:0:segvn_cache:buf_avail 4112 unix:0:segvn_cache:buf_constructed 4112 unix:0:segvn_cache:buf_inuse 2284 unix:0:segvn_cache:buf_max 6396 unix:0:segvn_cache:buf_size 104 unix:0:segvn_cache:buf_total 6396 unix:0:segvn_cache:chunk_size 104 unix:0:segvn_cache:class kmem_cache unix:0:segvn_cache:crtime 69.285666 unix:0:segvn_cache:depot_alloc 969844 unix:0:segvn_cache:depot_contention 3 unix:0:segvn_cache:depot_free 970433 unix:0:segvn_cache:empty_magazines 36 unix:0:segvn_cache:free 9011414 unix:0:segvn_cache:full_magazines 586 unix:0:segvn_cache:hash_lookup_depth 0 unix:0:segvn_cache:hash_rescale 0 unix:0:segvn_cache:hash_size 0 unix:0:segvn_cache:magazine_size 7 unix:0:segvn_cache:slab_alloc 6396 unix:0:segvn_cache:slab_create 82 unix:0:segvn_cache:slab_destroy 0 unix:0:segvn_cache:slab_free 0 unix:0:segvn_cache:slab_size 8192 unix:0:segvn_cache:snaptime 8464512.754156 unix:0:segvn_cache:vmem_source 23 unix:0:serializer_cache:align 8 unix:0:serializer_cache:alloc 770 unix:0:serializer_cache:alloc_fail 0 unix:0:serializer_cache:buf_avail 101 unix:0:serializer_cache:buf_constructed 14 unix:0:serializer_cache:buf_inuse 26 unix:0:serializer_cache:buf_max 127 unix:0:serializer_cache:buf_size 64 unix:0:serializer_cache:buf_total 127 unix:0:serializer_cache:chunk_size 64 unix:0:serializer_cache:class kmem_cache unix:0:serializer_cache:crtime 69.2825794 unix:0:serializer_cache:depot_alloc 0 unix:0:serializer_cache:depot_contention 0 unix:0:serializer_cache:depot_free 2 unix:0:serializer_cache:empty_magazines 0 unix:0:serializer_cache:free 746 unix:0:serializer_cache:full_magazines 0 unix:0:serializer_cache:hash_lookup_depth 0 unix:0:serializer_cache:hash_rescale 0 unix:0:serializer_cache:hash_size 0 unix:0:serializer_cache:magazine_size 15 unix:0:serializer_cache:slab_alloc 40 unix:0:serializer_cache:slab_create 1 unix:0:serializer_cache:slab_destroy 0 unix:0:serializer_cache:slab_free 0 unix:0:serializer_cache:slab_size 8192 unix:0:serializer_cache:snaptime 8464512.7556328 unix:0:serializer_cache:vmem_source 23 unix:0:sfmmu1_cache:align 8 unix:0:sfmmu1_cache:alloc 1711675 unix:0:sfmmu1_cache:alloc_fail 0 unix:0:sfmmu1_cache:buf_avail 206 unix:0:sfmmu1_cache:buf_constructed 192 unix:0:sfmmu1_cache:buf_inuse 7614 unix:0:sfmmu1_cache:buf_max 7820 unix:0:sfmmu1_cache:buf_size 88 unix:0:sfmmu1_cache:buf_total 7820 unix:0:sfmmu1_cache:chunk_size 88 unix:0:sfmmu1_cache:class kmem_cache unix:0:sfmmu1_cache:crtime 66.2390332 unix:0:sfmmu1_cache:depot_alloc 31670 unix:0:sfmmu1_cache:depot_contention 0 unix:0:sfmmu1_cache:depot_free 31700 unix:0:sfmmu1_cache:empty_magazines 53 unix:0:sfmmu1_cache:free 1704091 unix:0:sfmmu1_cache:full_magazines 26 unix:0:sfmmu1_cache:hash_lookup_depth 0 unix:0:sfmmu1_cache:hash_rescale 0 unix:0:sfmmu1_cache:hash_size 0 unix:0:sfmmu1_cache:magazine_size 7 unix:0:sfmmu1_cache:slab_alloc 7806 unix:0:sfmmu1_cache:slab_create 85 unix:0:sfmmu1_cache:slab_destroy 0 unix:0:sfmmu1_cache:slab_free 0 unix:0:sfmmu1_cache:slab_size 8192 unix:0:sfmmu1_cache:snaptime 8464512.7570208 unix:0:sfmmu1_cache:vmem_source 34 unix:0:sfmmu8_cache:align 8 unix:0:sfmmu8_cache:alloc 16716102 unix:0:sfmmu8_cache:alloc_fail 0 unix:0:sfmmu8_cache:buf_avail 75707 unix:0:sfmmu8_cache:buf_constructed 75692 unix:0:sfmmu8_cache:buf_inuse 6895 unix:0:sfmmu8_cache:buf_max 82602 unix:0:sfmmu8_cache:buf_size 312 unix:0:sfmmu8_cache:buf_total 82602 unix:0:sfmmu8_cache:chunk_size 312 unix:0:sfmmu8_cache:class kmem_cache unix:0:sfmmu8_cache:crtime 66.2387132 unix:0:sfmmu8_cache:depot_alloc 845675 unix:0:sfmmu8_cache:depot_contention 54 unix:0:sfmmu8_cache:depot_free 850879 unix:0:sfmmu8_cache:empty_magazines 32 unix:0:sfmmu8_cache:free 16739520 unix:0:sfmmu8_cache:full_magazines 5045 unix:0:sfmmu8_cache:hash_lookup_depth 0 unix:0:sfmmu8_cache:hash_rescale 0 unix:0:sfmmu8_cache:hash_size 0 unix:0:sfmmu8_cache:magazine_size 15 unix:0:sfmmu8_cache:slab_alloc 107696 unix:0:sfmmu8_cache:slab_create 3959 unix:0:sfmmu8_cache:slab_destroy 782 unix:0:sfmmu8_cache:slab_free 25109 unix:0:sfmmu8_cache:slab_size 8192 unix:0:sfmmu8_cache:snaptime 8464512.7583996 unix:0:sfmmu8_cache:vmem_source 10 unix:0:sfmmu_global_stat:class hat unix:0:sfmmu_global_stat:crtime 66.1537314 unix:0:sfmmu_global_stat:sf_clrwrt 875 unix:0:sfmmu_global_stat:sf_ctx_inv 55463 unix:0:sfmmu_global_stat:sf_get_free_fail 1 unix:0:sfmmu_global_stat:sf_get_free_success 401 unix:0:sfmmu_global_stat:sf_get_free_throttle 3 unix:0:sfmmu_global_stat:sf_hblk1_nalloc 6 unix:0:sfmmu_global_stat:sf_hblk1_ncreate 9 unix:0:sfmmu_global_stat:sf_hblk8_nalloc 3155 unix:0:sfmmu_global_stat:sf_hblk8_ncreate 3962 unix:0:sfmmu_global_stat:sf_hblk_hit 139971018 unix:0:sfmmu_global_stat:sf_hblk_recurse_cnt 0 unix:0:sfmmu_global_stat:sf_hblk_reserve_cnt 4 unix:0:sfmmu_global_stat:sf_hblk_reserve_hit 0 unix:0:sfmmu_global_stat:sf_hblk_slab_cnt 405 unix:0:sfmmu_global_stat:sf_ism_recache 0 unix:0:sfmmu_global_stat:sf_ism_uncache 0 unix:0:sfmmu_global_stat:sf_kernel_xcalls 47139527 unix:0:sfmmu_global_stat:sf_khash_links 0 unix:0:sfmmu_global_stat:sf_khash_searches 0 unix:0:sfmmu_global_stat:sf_pagefaults 59554925 unix:0:sfmmu_global_stat:sf_pagesync 4852 unix:0:sfmmu_global_stat:sf_pagesync_invalid 0 unix:0:sfmmu_global_stat:sf_pgcolor_conflict 220 unix:0:sfmmu_global_stat:sf_put_free_fail 31032873 unix:0:sfmmu_global_stat:sf_put_free_success 349373 unix:0:sfmmu_global_stat:sf_recache 90 unix:0:sfmmu_global_stat:sf_steal_count 0 unix:0:sfmmu_global_stat:sf_swapout 0 unix:0:sfmmu_global_stat:sf_tlb_reprog_pgsz 0 unix:0:sfmmu_global_stat:sf_tsb_alloc 536792 unix:0:sfmmu_global_stat:sf_tsb_allocfail 0 unix:0:sfmmu_global_stat:sf_tsb_exceptions 389796 unix:0:sfmmu_global_stat:sf_tsb_grow 55463 unix:0:sfmmu_global_stat:sf_tsb_load4m 1 unix:0:sfmmu_global_stat:sf_tsb_load8k 97492 unix:0:sfmmu_global_stat:sf_tsb_raise_exception 2089 unix:0:sfmmu_global_stat:sf_tsb_reloc 0 unix:0:sfmmu_global_stat:sf_tsb_resize_failures 0 unix:0:sfmmu_global_stat:sf_tsb_sectsb_create 1 unix:0:sfmmu_global_stat:sf_tsb_shrink 0 unix:0:sfmmu_global_stat:sf_tteload256m 0 unix:0:sfmmu_global_stat:sf_tteload32m 0 unix:0:sfmmu_global_stat:sf_tteload4m 401435 unix:0:sfmmu_global_stat:sf_tteload512k 0 unix:0:sfmmu_global_stat:sf_tteload64k 0 unix:0:sfmmu_global_stat:sf_tteload8k 155922591 unix:0:sfmmu_global_stat:sf_uhash_links 0 unix:0:sfmmu_global_stat:sf_uhash_searches 0 unix:0:sfmmu_global_stat:sf_uncache_conflict 320 unix:0:sfmmu_global_stat:sf_unload_conflict 89 unix:0:sfmmu_global_stat:sf_user_vtop 11140397 unix:0:sfmmu_global_stat:sf_user_xcalls 21638632 unix:0:sfmmu_global_stat:snaptime 8464512.759782 unix:0:sfmmu_tsb8k_cache:align 8192 unix:0:sfmmu_tsb8k_cache:alloc 0 unix:0:sfmmu_tsb8k_cache:alloc_fail 0 unix:0:sfmmu_tsb8k_cache:buf_avail 0 unix:0:sfmmu_tsb8k_cache:buf_constructed 0 unix:0:sfmmu_tsb8k_cache:buf_inuse 0 unix:0:sfmmu_tsb8k_cache:buf_max 0 unix:0:sfmmu_tsb8k_cache:buf_size 8192 unix:0:sfmmu_tsb8k_cache:buf_total 0 unix:0:sfmmu_tsb8k_cache:chunk_size 8192 unix:0:sfmmu_tsb8k_cache:class kmem_cache unix:0:sfmmu_tsb8k_cache:crtime 66.238248 unix:0:sfmmu_tsb8k_cache:depot_alloc 0 unix:0:sfmmu_tsb8k_cache:depot_contention 0 unix:0:sfmmu_tsb8k_cache:depot_free 0 unix:0:sfmmu_tsb8k_cache:empty_magazines 0 unix:0:sfmmu_tsb8k_cache:free 0 unix:0:sfmmu_tsb8k_cache:full_magazines 0 unix:0:sfmmu_tsb8k_cache:hash_lookup_depth 0 unix:0:sfmmu_tsb8k_cache:hash_rescale 0 unix:0:sfmmu_tsb8k_cache:hash_size 64 unix:0:sfmmu_tsb8k_cache:magazine_size 0 unix:0:sfmmu_tsb8k_cache:slab_alloc 0 unix:0:sfmmu_tsb8k_cache:slab_create 0 unix:0:sfmmu_tsb8k_cache:slab_destroy 0 unix:0:sfmmu_tsb8k_cache:slab_free 0 unix:0:sfmmu_tsb8k_cache:slab_size 8192 unix:0:sfmmu_tsb8k_cache:snaptime 8464512.762718 unix:0:sfmmu_tsb8k_cache:vmem_source 8 unix:0:sfmmu_tsb_cache:align 8192 unix:0:sfmmu_tsb_cache:alloc 262178 unix:0:sfmmu_tsb_cache:alloc_fail 0 unix:0:sfmmu_tsb_cache:buf_avail 114 unix:0:sfmmu_tsb_cache:buf_constructed 114 unix:0:sfmmu_tsb_cache:buf_inuse 34 unix:0:sfmmu_tsb_cache:buf_max 148 unix:0:sfmmu_tsb_cache:buf_size 8192 unix:0:sfmmu_tsb_cache:buf_total 148 unix:0:sfmmu_tsb_cache:chunk_size 8192 unix:0:sfmmu_tsb_cache:class kmem_cache unix:0:sfmmu_tsb_cache:crtime 66.2386936 unix:0:sfmmu_tsb_cache:depot_alloc 21513 unix:0:sfmmu_tsb_cache:depot_contention 0 unix:0:sfmmu_tsb_cache:depot_free 21628 unix:0:sfmmu_tsb_cache:empty_magazines 5 unix:0:sfmmu_tsb_cache:free 262259 unix:0:sfmmu_tsb_cache:full_magazines 111 unix:0:sfmmu_tsb_cache:hash_lookup_depth 0 unix:0:sfmmu_tsb_cache:hash_rescale 0 unix:0:sfmmu_tsb_cache:hash_size 64 unix:0:sfmmu_tsb_cache:magazine_size 1 unix:0:sfmmu_tsb_cache:slab_alloc 148 unix:0:sfmmu_tsb_cache:slab_create 148 unix:0:sfmmu_tsb_cache:slab_destroy 0 unix:0:sfmmu_tsb_cache:slab_free 0 unix:0:sfmmu_tsb_cache:slab_size 8192 unix:0:sfmmu_tsb_cache:snaptime 8464512.7641102 unix:0:sfmmu_tsb_cache:vmem_source 33 unix:0:sfmmu_tsbinfo_cache:align 8 unix:0:sfmmu_tsbinfo_cache:alloc 542871 unix:0:sfmmu_tsbinfo_cache:alloc_fail 0 unix:0:sfmmu_tsbinfo_cache:buf_avail 208 unix:0:sfmmu_tsbinfo_cache:buf_constructed 114 unix:0:sfmmu_tsbinfo_cache:buf_inuse 46 unix:0:sfmmu_tsbinfo_cache:buf_max 254 unix:0:sfmmu_tsbinfo_cache:buf_size 64 unix:0:sfmmu_tsbinfo_cache:buf_total 254 unix:0:sfmmu_tsbinfo_cache:chunk_size 64 unix:0:sfmmu_tsbinfo_cache:class kmem_cache unix:0:sfmmu_tsbinfo_cache:crtime 66.2382126 unix:0:sfmmu_tsbinfo_cache:depot_alloc 6079 unix:0:sfmmu_tsbinfo_cache:depot_contention 0 unix:0:sfmmu_tsbinfo_cache:depot_free 6089 unix:0:sfmmu_tsbinfo_cache:empty_magazines 0 unix:0:sfmmu_tsbinfo_cache:free 542835 unix:0:sfmmu_tsbinfo_cache:full_magazines 6 unix:0:sfmmu_tsbinfo_cache:hash_lookup_depth 0 unix:0:sfmmu_tsbinfo_cache:hash_rescale 0 unix:0:sfmmu_tsbinfo_cache:hash_size 0 unix:0:sfmmu_tsbinfo_cache:magazine_size 15 unix:0:sfmmu_tsbinfo_cache:slab_alloc 160 unix:0:sfmmu_tsbinfo_cache:slab_create 2 unix:0:sfmmu_tsbinfo_cache:slab_destroy 0 unix:0:sfmmu_tsbinfo_cache:slab_free 0 unix:0:sfmmu_tsbinfo_cache:slab_size 8192 unix:0:sfmmu_tsbinfo_cache:snaptime 8464512.7654862 unix:0:sfmmu_tsbinfo_cache:vmem_source 23 unix:0:sfmmu_tsbsize_stat:class hat unix:0:sfmmu_tsbsize_stat:crtime 66.1537512 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_128k 49 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_16k 34432 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_1m 3 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_256k 18 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_2m 0 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_32k 20849 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_4m 0 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_512k 5 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_64k 107 unix:0:sfmmu_tsbsize_stat:sf_tsbsz_8k 481329 unix:0:sfmmu_tsbsize_stat:snaptime 8464512.7668636 unix:0:sfmmuid_cache:align 64 unix:0:sfmmuid_cache:alloc 255171 unix:0:sfmmuid_cache:alloc_fail 0 unix:0:sfmmuid_cache:buf_avail 121 unix:0:sfmmuid_cache:buf_constructed 112 unix:0:sfmmuid_cache:buf_inuse 47 unix:0:sfmmuid_cache:buf_max 168 unix:0:sfmmuid_cache:buf_size 184 unix:0:sfmmuid_cache:buf_total 168 unix:0:sfmmuid_cache:chunk_size 192 unix:0:sfmmuid_cache:class kmem_cache unix:0:sfmmuid_cache:crtime 66.2381974 unix:0:sfmmuid_cache:depot_alloc 14506 unix:0:sfmmuid_cache:depot_contention 0 unix:0:sfmmuid_cache:depot_free 14524 unix:0:sfmmuid_cache:empty_magazines 1 unix:0:sfmmuid_cache:free 255142 unix:0:sfmmuid_cache:full_magazines 14 unix:0:sfmmuid_cache:hash_lookup_depth 0 unix:0:sfmmuid_cache:hash_rescale 0 unix:0:sfmmuid_cache:hash_size 0 unix:0:sfmmuid_cache:magazine_size 7 unix:0:sfmmuid_cache:slab_alloc 159 unix:0:sfmmuid_cache:slab_create 4 unix:0:sfmmuid_cache:slab_destroy 0 unix:0:sfmmuid_cache:slab_free 0 unix:0:sfmmuid_cache:slab_size 8192 unix:0:sfmmuid_cache:snaptime 8464512.7675504 unix:0:sfmmuid_cache:vmem_source 8 unix:0:snode_cache:align 8 unix:0:snode_cache:alloc 2623669 unix:0:snode_cache:alloc_fail 0 unix:0:snode_cache:buf_avail 100 unix:0:snode_cache:buf_constructed 68 unix:0:snode_cache:buf_inuse 271 unix:0:snode_cache:buf_max 371 unix:0:snode_cache:buf_size 152 unix:0:snode_cache:buf_total 371 unix:0:snode_cache:chunk_size 152 unix:0:snode_cache:class kmem_cache unix:0:snode_cache:crtime 69.2862062 unix:0:snode_cache:depot_alloc 131 unix:0:snode_cache:depot_contention 0 unix:0:snode_cache:depot_free 143 unix:0:snode_cache:empty_magazines 1 unix:0:snode_cache:free 2623410 unix:0:snode_cache:full_magazines 8 unix:0:snode_cache:hash_lookup_depth 0 unix:0:snode_cache:hash_rescale 0 unix:0:snode_cache:hash_size 0 unix:0:snode_cache:magazine_size 7 unix:0:snode_cache:slab_alloc 339 unix:0:snode_cache:slab_create 7 unix:0:snode_cache:slab_destroy 0 unix:0:snode_cache:slab_free 0 unix:0:snode_cache:slab_size 8192 unix:0:snode_cache:snaptime 8464512.7689314 unix:0:snode_cache:vmem_source 23 unix:0:socktpi_cache:align 8 unix:0:socktpi_cache:alloc 127370 unix:0:socktpi_cache:alloc_fail 0 unix:0:socktpi_cache:buf_avail 117 unix:0:socktpi_cache:buf_constructed 110 unix:0:socktpi_cache:buf_inuse 36 unix:0:socktpi_cache:buf_max 153 unix:0:socktpi_cache:buf_size 456 unix:0:socktpi_cache:buf_total 153 unix:0:socktpi_cache:chunk_size 456 unix:0:socktpi_cache:class kmem_cache unix:0:socktpi_cache:crtime 83.9814476 unix:0:socktpi_cache:depot_alloc 7071 unix:0:socktpi_cache:depot_contention 0 unix:0:socktpi_cache:depot_free 7110 unix:0:socktpi_cache:empty_magazines 3 unix:0:socktpi_cache:free 127373 unix:0:socktpi_cache:full_magazines 35 unix:0:socktpi_cache:hash_lookup_depth 0 unix:0:socktpi_cache:hash_rescale 0 unix:0:socktpi_cache:hash_size 0 unix:0:socktpi_cache:magazine_size 3 unix:0:socktpi_cache:slab_alloc 146 unix:0:socktpi_cache:slab_create 9 unix:0:socktpi_cache:slab_destroy 0 unix:0:socktpi_cache:slab_free 0 unix:0:socktpi_cache:slab_size 8192 unix:0:socktpi_cache:snaptime 8464512.7704732 unix:0:socktpi_cache:vmem_source 23 unix:0:socktpi_unix_cache:align 8 unix:0:socktpi_unix_cache:alloc 82 unix:0:socktpi_unix_cache:alloc_fail 0 unix:0:socktpi_unix_cache:buf_avail 42 unix:0:socktpi_unix_cache:buf_constructed 26 unix:0:socktpi_unix_cache:buf_inuse 9 unix:0:socktpi_unix_cache:buf_max 51 unix:0:socktpi_unix_cache:buf_size 456 unix:0:socktpi_unix_cache:buf_total 51 unix:0:socktpi_unix_cache:chunk_size 456 unix:0:socktpi_unix_cache:class kmem_cache unix:0:socktpi_unix_cache:crtime 83.9814614 unix:0:socktpi_unix_cache:depot_alloc 1 unix:0:socktpi_unix_cache:depot_contention 0 unix:0:socktpi_unix_cache:depot_free 12 unix:0:socktpi_unix_cache:empty_magazines 1 unix:0:socktpi_unix_cache:free 84 unix:0:socktpi_unix_cache:full_magazines 7 unix:0:socktpi_unix_cache:hash_lookup_depth 0 unix:0:socktpi_unix_cache:hash_rescale 0 unix:0:socktpi_unix_cache:hash_size 0 unix:0:socktpi_unix_cache:magazine_size 3 unix:0:socktpi_unix_cache:slab_alloc 35 unix:0:socktpi_unix_cache:slab_create 3 unix:0:socktpi_unix_cache:slab_destroy 0 unix:0:socktpi_unix_cache:slab_free 0 unix:0:socktpi_unix_cache:slab_size 8192 unix:0:socktpi_unix_cache:snaptime 8464512.7718508 unix:0:socktpi_unix_cache:vmem_source 23 unix:0:soft_ring_cache:align 64 unix:0:soft_ring_cache:alloc 0 unix:0:soft_ring_cache:alloc_fail 0 unix:0:soft_ring_cache:buf_avail 0 unix:0:soft_ring_cache:buf_constructed 0 unix:0:soft_ring_cache:buf_inuse 0 unix:0:soft_ring_cache:buf_max 0 unix:0:soft_ring_cache:buf_size 176 unix:0:soft_ring_cache:buf_total 0 unix:0:soft_ring_cache:chunk_size 192 unix:0:soft_ring_cache:class kmem_cache unix:0:soft_ring_cache:crtime 84.0829992 unix:0:soft_ring_cache:depot_alloc 0 unix:0:soft_ring_cache:depot_contention 0 unix:0:soft_ring_cache:depot_free 0 unix:0:soft_ring_cache:empty_magazines 0 unix:0:soft_ring_cache:free 0 unix:0:soft_ring_cache:full_magazines 0 unix:0:soft_ring_cache:hash_lookup_depth 0 unix:0:soft_ring_cache:hash_rescale 0 unix:0:soft_ring_cache:hash_size 0 unix:0:soft_ring_cache:magazine_size 7 unix:0:soft_ring_cache:slab_alloc 0 unix:0:soft_ring_cache:slab_create 0 unix:0:soft_ring_cache:slab_destroy 0 unix:0:soft_ring_cache:slab_free 0 unix:0:soft_ring_cache:slab_size 8192 unix:0:soft_ring_cache:snaptime 8464512.7733012 unix:0:soft_ring_cache:vmem_source 23 unix:0:spdsock_1:align 1 unix:0:spdsock_1:alloc 1 unix:0:spdsock_1:alloc_fail 0 unix:0:spdsock_1:buf_avail 64 unix:0:spdsock_1:buf_constructed 1 unix:0:spdsock_1:buf_inuse 0 unix:0:spdsock_1:buf_max 64 unix:0:spdsock_1:buf_size 1 unix:0:spdsock_1:buf_total 64 unix:0:spdsock_1:chunk_size 1 unix:0:spdsock_1:class kmem_cache unix:0:spdsock_1:crtime 87.7639516 unix:0:spdsock_1:depot_alloc 0 unix:0:spdsock_1:depot_contention 0 unix:0:spdsock_1:depot_free 1 unix:0:spdsock_1:empty_magazines 0 unix:0:spdsock_1:free 2 unix:0:spdsock_1:full_magazines 0 unix:0:spdsock_1:hash_lookup_depth 0 unix:0:spdsock_1:hash_rescale 0 unix:0:spdsock_1:hash_size 64 unix:0:spdsock_1:magazine_size 15 unix:0:spdsock_1:slab_alloc 1 unix:0:spdsock_1:slab_create 1 unix:0:spdsock_1:slab_destroy 0 unix:0:spdsock_1:slab_free 0 unix:0:spdsock_1:slab_size 64 unix:0:spdsock_1:snaptime 8464512.7746848 unix:0:spdsock_1:vmem_source 55 unix:0:sppptun_map:align 8 unix:0:sppptun_map:alloc 0 unix:0:sppptun_map:alloc_fail 0 unix:0:sppptun_map:buf_avail 0 unix:0:sppptun_map:buf_constructed 0 unix:0:sppptun_map:buf_inuse 0 unix:0:sppptun_map:buf_max 0 unix:0:sppptun_map:buf_size 432 unix:0:sppptun_map:buf_total 0 unix:0:sppptun_map:chunk_size 432 unix:0:sppptun_map:class kmem_cache unix:0:sppptun_map:crtime 7784070.001292 unix:0:sppptun_map:depot_alloc 0 unix:0:sppptun_map:depot_contention 0 unix:0:sppptun_map:depot_free 0 unix:0:sppptun_map:empty_magazines 0 unix:0:sppptun_map:free 0 unix:0:sppptun_map:full_magazines 0 unix:0:sppptun_map:hash_lookup_depth 0 unix:0:sppptun_map:hash_rescale 0 unix:0:sppptun_map:hash_size 0 unix:0:sppptun_map:magazine_size 3 unix:0:sppptun_map:slab_alloc 0 unix:0:sppptun_map:slab_create 0 unix:0:sppptun_map:slab_destroy 0 unix:0:sppptun_map:slab_free 0 unix:0:sppptun_map:slab_size 8192 unix:0:sppptun_map:snaptime 8464512.776156 unix:0:sppptun_map:vmem_source 23 unix:0:squeue_cache:align 64 unix:0:squeue_cache:alloc 2 unix:0:squeue_cache:alloc_fail 0 unix:0:squeue_cache:buf_avail 40 unix:0:squeue_cache:buf_constructed 0 unix:0:squeue_cache:buf_inuse 2 unix:0:squeue_cache:buf_max 42 unix:0:squeue_cache:buf_size 136 unix:0:squeue_cache:buf_total 42 unix:0:squeue_cache:chunk_size 192 unix:0:squeue_cache:class kmem_cache unix:0:squeue_cache:crtime 83.9784496 unix:0:squeue_cache:depot_alloc 0 unix:0:squeue_cache:depot_contention 0 unix:0:squeue_cache:depot_free 0 unix:0:squeue_cache:empty_magazines 0 unix:0:squeue_cache:free 0 unix:0:squeue_cache:full_magazines 0 unix:0:squeue_cache:hash_lookup_depth 0 unix:0:squeue_cache:hash_rescale 0 unix:0:squeue_cache:hash_size 0 unix:0:squeue_cache:magazine_size 7 unix:0:squeue_cache:slab_alloc 2 unix:0:squeue_cache:slab_create 1 unix:0:squeue_cache:slab_destroy 0 unix:0:squeue_cache:slab_free 0 unix:0:squeue_cache:slab_size 8192 unix:0:squeue_cache:snaptime 8464512.7775182 unix:0:squeue_cache:vmem_source 23 unix:0:stream_head_cache:align 8 unix:0:stream_head_cache:alloc 429495 unix:0:stream_head_cache:alloc_fail 0 unix:0:stream_head_cache:buf_avail 116 unix:0:stream_head_cache:buf_constructed 106 unix:0:stream_head_cache:buf_inuse 164 unix:0:stream_head_cache:buf_max 280 unix:0:stream_head_cache:buf_size 400 unix:0:stream_head_cache:buf_total 280 unix:0:stream_head_cache:chunk_size 400 unix:0:stream_head_cache:class kmem_cache unix:0:stream_head_cache:crtime 69.2822098 unix:0:stream_head_cache:depot_alloc 8706 unix:0:stream_head_cache:depot_contention 0 unix:0:stream_head_cache:depot_free 8741 unix:0:stream_head_cache:empty_magazines 7 unix:0:stream_head_cache:free 429366 unix:0:stream_head_cache:full_magazines 32 unix:0:stream_head_cache:hash_lookup_depth 0 unix:0:stream_head_cache:hash_rescale 0 unix:0:stream_head_cache:hash_size 0 unix:0:stream_head_cache:magazine_size 3 unix:0:stream_head_cache:slab_alloc 270 unix:0:stream_head_cache:slab_create 14 unix:0:stream_head_cache:slab_destroy 0 unix:0:stream_head_cache:slab_free 0 unix:0:stream_head_cache:slab_size 8192 unix:0:stream_head_cache:snaptime 8464512.7788874 unix:0:stream_head_cache:vmem_source 23 unix:0:streams_dblk_1040:align 64 unix:0:streams_dblk_1040:alloc 68748622 unix:0:streams_dblk_1040:alloc_fail 0 unix:0:streams_dblk_1040:buf_avail 1286 unix:0:streams_dblk_1040:buf_constructed 1280 unix:0:streams_dblk_1040:buf_inuse 16 unix:0:streams_dblk_1040:buf_max 1302 unix:0:streams_dblk_1040:buf_size 1152 unix:0:streams_dblk_1040:buf_total 1302 unix:0:streams_dblk_1040:chunk_size 1152 unix:0:streams_dblk_1040:class kmem_cache unix:0:streams_dblk_1040:crtime 65.9538394 unix:0:streams_dblk_1040:depot_alloc 288516 unix:0:streams_dblk_1040:depot_contention 1 unix:0:streams_dblk_1040:depot_free 288945 unix:0:streams_dblk_1040:empty_magazines 0 unix:0:streams_dblk_1040:free 68749035 unix:0:streams_dblk_1040:full_magazines 425 unix:0:streams_dblk_1040:hash_lookup_depth 0 unix:0:streams_dblk_1040:hash_rescale 2 unix:0:streams_dblk_1040:hash_size 1024 unix:0:streams_dblk_1040:magazine_size 3 unix:0:streams_dblk_1040:slab_alloc 1296 unix:0:streams_dblk_1040:slab_create 186 unix:0:streams_dblk_1040:slab_destroy 0 unix:0:streams_dblk_1040:slab_free 0 unix:0:streams_dblk_1040:slab_size 8192 unix:0:streams_dblk_1040:snaptime 8464512.7804456 unix:0:streams_dblk_1040:vmem_source 23 unix:0:streams_dblk_12112:align 64 unix:0:streams_dblk_12112:alloc 1378 unix:0:streams_dblk_12112:alloc_fail 0 unix:0:streams_dblk_12112:buf_avail 4 unix:0:streams_dblk_12112:buf_constructed 4 unix:0:streams_dblk_12112:buf_inuse 0 unix:0:streams_dblk_12112:buf_max 4 unix:0:streams_dblk_12112:buf_size 12224 unix:0:streams_dblk_12112:buf_total 4 unix:0:streams_dblk_12112:chunk_size 12224 unix:0:streams_dblk_12112:class kmem_cache unix:0:streams_dblk_12112:crtime 65.954064 unix:0:streams_dblk_12112:depot_alloc 412 unix:0:streams_dblk_12112:depot_contention 0 unix:0:streams_dblk_12112:depot_free 417 unix:0:streams_dblk_12112:empty_magazines 1 unix:0:streams_dblk_12112:free 1383 unix:0:streams_dblk_12112:full_magazines 1 unix:0:streams_dblk_12112:hash_lookup_depth 0 unix:0:streams_dblk_12112:hash_rescale 0 unix:0:streams_dblk_12112:hash_size 64 unix:0:streams_dblk_12112:magazine_size 1 unix:0:streams_dblk_12112:slab_alloc 4 unix:0:streams_dblk_12112:slab_create 2 unix:0:streams_dblk_12112:slab_destroy 0 unix:0:streams_dblk_12112:slab_free 0 unix:0:streams_dblk_12112:slab_size 24576 unix:0:streams_dblk_12112:snaptime 8464512.7818284 unix:0:streams_dblk_12112:vmem_source 23 unix:0:streams_dblk_144:align 64 unix:0:streams_dblk_144:alloc 1429551 unix:0:streams_dblk_144:alloc_fail 0 unix:0:streams_dblk_144:buf_avail 122 unix:0:streams_dblk_144:buf_constructed 110 unix:0:streams_dblk_144:buf_inuse 2 unix:0:streams_dblk_144:buf_max 124 unix:0:streams_dblk_144:buf_size 256 unix:0:streams_dblk_144:buf_total 124 unix:0:streams_dblk_144:chunk_size 256 unix:0:streams_dblk_144:class kmem_cache unix:0:streams_dblk_144:crtime 65.9536922 unix:0:streams_dblk_144:depot_alloc 11767 unix:0:streams_dblk_144:depot_contention 0 unix:0:streams_dblk_144:depot_free 11785 unix:0:streams_dblk_144:empty_magazines 0 unix:0:streams_dblk_144:free 1429567 unix:0:streams_dblk_144:full_magazines 14 unix:0:streams_dblk_144:hash_lookup_depth 0 unix:0:streams_dblk_144:hash_rescale 0 unix:0:streams_dblk_144:hash_size 0 unix:0:streams_dblk_144:magazine_size 7 unix:0:streams_dblk_144:slab_alloc 112 unix:0:streams_dblk_144:slab_create 4 unix:0:streams_dblk_144:slab_destroy 0 unix:0:streams_dblk_144:slab_free 0 unix:0:streams_dblk_144:slab_size 8192 unix:0:streams_dblk_144:snaptime 8464512.7833294 unix:0:streams_dblk_144:vmem_source 23 unix:0:streams_dblk_1488:align 64 unix:0:streams_dblk_1488:alloc 129981 unix:0:streams_dblk_1488:alloc_fail 0 unix:0:streams_dblk_1488:buf_avail 20 unix:0:streams_dblk_1488:buf_constructed 18 unix:0:streams_dblk_1488:buf_inuse 0 unix:0:streams_dblk_1488:buf_max 20 unix:0:streams_dblk_1488:buf_size 1600 unix:0:streams_dblk_1488:buf_total 20 unix:0:streams_dblk_1488:chunk_size 1600 unix:0:streams_dblk_1488:class kmem_cache unix:0:streams_dblk_1488:crtime 65.9538756 unix:0:streams_dblk_1488:depot_alloc 11019 unix:0:streams_dblk_1488:depot_contention 0 unix:0:streams_dblk_1488:depot_free 11026 unix:0:streams_dblk_1488:empty_magazines 0 unix:0:streams_dblk_1488:free 129988 unix:0:streams_dblk_1488:full_magazines 4 unix:0:streams_dblk_1488:hash_lookup_depth 0 unix:0:streams_dblk_1488:hash_rescale 0 unix:0:streams_dblk_1488:hash_size 64 unix:0:streams_dblk_1488:magazine_size 3 unix:0:streams_dblk_1488:slab_alloc 18 unix:0:streams_dblk_1488:slab_create 4 unix:0:streams_dblk_1488:slab_destroy 0 unix:0:streams_dblk_1488:slab_free 0 unix:0:streams_dblk_1488:slab_size 8192 unix:0:streams_dblk_1488:snaptime 8464512.7847878 unix:0:streams_dblk_1488:vmem_source 23 unix:0:streams_dblk_16:align 64 unix:0:streams_dblk_16:alloc 2716029 unix:0:streams_dblk_16:alloc_fail 0 unix:0:streams_dblk_16:buf_avail 137 unix:0:streams_dblk_16:buf_constructed 80 unix:0:streams_dblk_16:buf_inuse 52 unix:0:streams_dblk_16:buf_max 189 unix:0:streams_dblk_16:buf_size 128 unix:0:streams_dblk_16:buf_total 189 unix:0:streams_dblk_16:chunk_size 128 unix:0:streams_dblk_16:class kmem_cache unix:0:streams_dblk_16:crtime 65.9536612 unix:0:streams_dblk_16:depot_alloc 18540 unix:0:streams_dblk_16:depot_contention 1 unix:0:streams_dblk_16:depot_free 18554 unix:0:streams_dblk_16:empty_magazines 2 unix:0:streams_dblk_16:free 2715991 unix:0:streams_dblk_16:full_magazines 10 unix:0:streams_dblk_16:hash_lookup_depth 0 unix:0:streams_dblk_16:hash_rescale 0 unix:0:streams_dblk_16:hash_size 0 unix:0:streams_dblk_16:magazine_size 7 unix:0:streams_dblk_16:slab_alloc 132 unix:0:streams_dblk_16:slab_create 3 unix:0:streams_dblk_16:slab_destroy 0 unix:0:streams_dblk_16:slab_free 0 unix:0:streams_dblk_16:slab_size 8192 unix:0:streams_dblk_16:snaptime 8464512.786184 unix:0:streams_dblk_16:vmem_source 23 unix:0:streams_dblk_16384:align 64 unix:0:streams_dblk_16384:alloc 551 unix:0:streams_dblk_16384:alloc_fail 0 unix:0:streams_dblk_16384:buf_avail 63 unix:0:streams_dblk_16384:buf_constructed 18 unix:0:streams_dblk_16384:buf_inuse 0 unix:0:streams_dblk_16384:buf_max 63 unix:0:streams_dblk_16384:buf_size 112 unix:0:streams_dblk_16384:buf_total 63 unix:0:streams_dblk_16384:chunk_size 128 unix:0:streams_dblk_16384:class kmem_cache unix:0:streams_dblk_16384:crtime 65.954101 unix:0:streams_dblk_16384:depot_alloc 26 unix:0:streams_dblk_16384:depot_contention 0 unix:0:streams_dblk_16384:depot_free 30 unix:0:streams_dblk_16384:empty_magazines 0 unix:0:streams_dblk_16384:free 555 unix:0:streams_dblk_16384:full_magazines 1 unix:0:streams_dblk_16384:hash_lookup_depth 0 unix:0:streams_dblk_16384:hash_rescale 0 unix:0:streams_dblk_16384:hash_size 0 unix:0:streams_dblk_16384:magazine_size 7 unix:0:streams_dblk_16384:slab_alloc 18 unix:0:streams_dblk_16384:slab_create 1 unix:0:streams_dblk_16384:slab_destroy 0 unix:0:streams_dblk_16384:slab_free 0 unix:0:streams_dblk_16384:slab_size 8192 unix:0:streams_dblk_16384:snaptime 8464512.7875726 unix:0:streams_dblk_16384:vmem_source 23 unix:0:streams_dblk_1936:align 64 unix:0:streams_dblk_1936:alloc 284287 unix:0:streams_dblk_1936:alloc_fail 0 unix:0:streams_dblk_1936:buf_avail 22 unix:0:streams_dblk_1936:buf_constructed 22 unix:0:streams_dblk_1936:buf_inuse 2 unix:0:streams_dblk_1936:buf_max 24 unix:0:streams_dblk_1936:buf_size 2048 unix:0:streams_dblk_1936:buf_total 24 unix:0:streams_dblk_1936:chunk_size 2048 unix:0:streams_dblk_1936:class kmem_cache unix:0:streams_dblk_1936:crtime 65.9539164 unix:0:streams_dblk_1936:depot_alloc 44982 unix:0:streams_dblk_1936:depot_contention 0 unix:0:streams_dblk_1936:depot_free 44989 unix:0:streams_dblk_1936:empty_magazines 1 unix:0:streams_dblk_1936:free 284292 unix:0:streams_dblk_1936:full_magazines 5 unix:0:streams_dblk_1936:hash_lookup_depth 0 unix:0:streams_dblk_1936:hash_rescale 0 unix:0:streams_dblk_1936:hash_size 64 unix:0:streams_dblk_1936:magazine_size 3 unix:0:streams_dblk_1936:slab_alloc 24 unix:0:streams_dblk_1936:slab_create 6 unix:0:streams_dblk_1936:slab_destroy 0 unix:0:streams_dblk_1936:slab_free 0 unix:0:streams_dblk_1936:slab_size 8192 unix:0:streams_dblk_1936:snaptime 8464512.788953 unix:0:streams_dblk_1936:vmem_source 23 unix:0:streams_dblk_20304:align 64 unix:0:streams_dblk_20304:alloc 203 unix:0:streams_dblk_20304:alloc_fail 0 unix:0:streams_dblk_20304:buf_avail 4 unix:0:streams_dblk_20304:buf_constructed 4 unix:0:streams_dblk_20304:buf_inuse 0 unix:0:streams_dblk_20304:buf_max 4 unix:0:streams_dblk_20304:buf_size 20416 unix:0:streams_dblk_20304:buf_total 4 unix:0:streams_dblk_20304:chunk_size 20416 unix:0:streams_dblk_20304:class kmem_cache unix:0:streams_dblk_20304:crtime 65.9541714 unix:0:streams_dblk_20304:depot_alloc 58 unix:0:streams_dblk_20304:depot_contention 0 unix:0:streams_dblk_20304:depot_free 63 unix:0:streams_dblk_20304:empty_magazines 1 unix:0:streams_dblk_20304:free 208 unix:0:streams_dblk_20304:full_magazines 1 unix:0:streams_dblk_20304:hash_lookup_depth 0 unix:0:streams_dblk_20304:hash_rescale 0 unix:0:streams_dblk_20304:hash_size 64 unix:0:streams_dblk_20304:magazine_size 1 unix:0:streams_dblk_20304:slab_alloc 4 unix:0:streams_dblk_20304:slab_create 2 unix:0:streams_dblk_20304:slab_destroy 0 unix:0:streams_dblk_20304:slab_free 0 unix:0:streams_dblk_20304:slab_size 40960 unix:0:streams_dblk_20304:snaptime 8464512.7904812 unix:0:streams_dblk_20304:vmem_source 23 unix:0:streams_dblk_208:align 64 unix:0:streams_dblk_208:alloc 997487 unix:0:streams_dblk_208:alloc_fail 0 unix:0:streams_dblk_208:buf_avail 63 unix:0:streams_dblk_208:buf_constructed 44 unix:0:streams_dblk_208:buf_inuse 212 unix:0:streams_dblk_208:buf_max 275 unix:0:streams_dblk_208:buf_size 320 unix:0:streams_dblk_208:buf_total 275 unix:0:streams_dblk_208:chunk_size 320 unix:0:streams_dblk_208:class kmem_cache unix:0:streams_dblk_208:crtime 65.953705 unix:0:streams_dblk_208:depot_alloc 77110 unix:0:streams_dblk_208:depot_contention 19 unix:0:streams_dblk_208:depot_free 77138 unix:0:streams_dblk_208:empty_magazines 1 unix:0:streams_dblk_208:free 997347 unix:0:streams_dblk_208:full_magazines 4 unix:0:streams_dblk_208:hash_lookup_depth 0 unix:0:streams_dblk_208:hash_rescale 0 unix:0:streams_dblk_208:hash_size 0 unix:0:streams_dblk_208:magazine_size 7 unix:0:streams_dblk_208:slab_alloc 300 unix:0:streams_dblk_208:slab_create 12 unix:0:streams_dblk_208:slab_destroy 1 unix:0:streams_dblk_208:slab_free 44 unix:0:streams_dblk_208:slab_size 8192 unix:0:streams_dblk_208:snaptime 8464512.7918768 unix:0:streams_dblk_208:vmem_source 23 unix:0:streams_dblk_24576:align 64 unix:0:streams_dblk_24576:alloc 26 unix:0:streams_dblk_24576:alloc_fail 0 unix:0:streams_dblk_24576:buf_avail 63 unix:0:streams_dblk_24576:buf_constructed 7 unix:0:streams_dblk_24576:buf_inuse 0 unix:0:streams_dblk_24576:buf_max 63 unix:0:streams_dblk_24576:buf_size 112 unix:0:streams_dblk_24576:buf_total 63 unix:0:streams_dblk_24576:chunk_size 128 unix:0:streams_dblk_24576:class kmem_cache unix:0:streams_dblk_24576:crtime 65.9542078 unix:0:streams_dblk_24576:depot_alloc 0 unix:0:streams_dblk_24576:depot_contention 0 unix:0:streams_dblk_24576:depot_free 2 unix:0:streams_dblk_24576:empty_magazines 0 unix:0:streams_dblk_24576:free 28 unix:0:streams_dblk_24576:full_magazines 0 unix:0:streams_dblk_24576:hash_lookup_depth 0 unix:0:streams_dblk_24576:hash_rescale 0 unix:0:streams_dblk_24576:hash_size 0 unix:0:streams_dblk_24576:magazine_size 7 unix:0:streams_dblk_24576:slab_alloc 7 unix:0:streams_dblk_24576:slab_create 1 unix:0:streams_dblk_24576:slab_destroy 0 unix:0:streams_dblk_24576:slab_free 0 unix:0:streams_dblk_24576:slab_size 8192 unix:0:streams_dblk_24576:snaptime 8464512.7933298 unix:0:streams_dblk_24576:vmem_source 23 unix:0:streams_dblk_2576:align 64 unix:0:streams_dblk_2576:alloc 134815 unix:0:streams_dblk_2576:alloc_fail 0 unix:0:streams_dblk_2576:buf_avail 62 unix:0:streams_dblk_2576:buf_constructed 62 unix:0:streams_dblk_2576:buf_inuse 1024 unix:0:streams_dblk_2576:buf_max 1086 unix:0:streams_dblk_2576:buf_size 2688 unix:0:streams_dblk_2576:buf_total 1086 unix:0:streams_dblk_2576:chunk_size 2688 unix:0:streams_dblk_2576:class kmem_cache unix:0:streams_dblk_2576:crtime 65.9539434 unix:0:streams_dblk_2576:depot_alloc 23361 unix:0:streams_dblk_2576:depot_contention 1 unix:0:streams_dblk_2576:depot_free 23382 unix:0:streams_dblk_2576:empty_magazines 320 unix:0:streams_dblk_2576:free 133812 unix:0:streams_dblk_2576:full_magazines 18 unix:0:streams_dblk_2576:hash_lookup_depth 0 unix:0:streams_dblk_2576:hash_rescale 1 unix:0:streams_dblk_2576:hash_size 1024 unix:0:streams_dblk_2576:magazine_size 3 unix:0:streams_dblk_2576:slab_alloc 1086 unix:0:streams_dblk_2576:slab_create 362 unix:0:streams_dblk_2576:slab_destroy 0 unix:0:streams_dblk_2576:slab_free 0 unix:0:streams_dblk_2576:slab_size 8192 unix:0:streams_dblk_2576:snaptime 8464512.7947066 unix:0:streams_dblk_2576:vmem_source 23 unix:0:streams_dblk_272:align 64 unix:0:streams_dblk_272:alloc 195158 unix:0:streams_dblk_272:alloc_fail 0 unix:0:streams_dblk_272:buf_avail 42 unix:0:streams_dblk_272:buf_constructed 22 unix:0:streams_dblk_272:buf_inuse 0 unix:0:streams_dblk_272:buf_max 42 unix:0:streams_dblk_272:buf_size 384 unix:0:streams_dblk_272:buf_total 42 unix:0:streams_dblk_272:chunk_size 384 unix:0:streams_dblk_272:class kmem_cache unix:0:streams_dblk_272:crtime 65.9537174 unix:0:streams_dblk_272:depot_alloc 5550 unix:0:streams_dblk_272:depot_contention 2 unix:0:streams_dblk_272:depot_free 5559 unix:0:streams_dblk_272:empty_magazines 1 unix:0:streams_dblk_272:free 195167 unix:0:streams_dblk_272:full_magazines 5 unix:0:streams_dblk_272:hash_lookup_depth 0 unix:0:streams_dblk_272:hash_rescale 0 unix:0:streams_dblk_272:hash_size 0 unix:0:streams_dblk_272:magazine_size 3 unix:0:streams_dblk_272:slab_alloc 22 unix:0:streams_dblk_272:slab_create 2 unix:0:streams_dblk_272:slab_destroy 0 unix:0:streams_dblk_272:slab_free 0 unix:0:streams_dblk_272:slab_size 8192 unix:0:streams_dblk_272:snaptime 8464512.7961774 unix:0:streams_dblk_272:vmem_source 23 unix:0:streams_dblk_28496:align 64 unix:0:streams_dblk_28496:alloc 11 unix:0:streams_dblk_28496:alloc_fail 0 unix:0:streams_dblk_28496:buf_avail 4 unix:0:streams_dblk_28496:buf_constructed 3 unix:0:streams_dblk_28496:buf_inuse 0 unix:0:streams_dblk_28496:buf_max 4 unix:0:streams_dblk_28496:buf_size 28608 unix:0:streams_dblk_28496:buf_total 4 unix:0:streams_dblk_28496:chunk_size 28608 unix:0:streams_dblk_28496:class kmem_cache unix:0:streams_dblk_28496:crtime 65.9542504 unix:0:streams_dblk_28496:depot_alloc 0 unix:0:streams_dblk_28496:depot_contention 0 unix:0:streams_dblk_28496:depot_free 5 unix:0:streams_dblk_28496:empty_magazines 0 unix:0:streams_dblk_28496:free 16 unix:0:streams_dblk_28496:full_magazines 1 unix:0:streams_dblk_28496:hash_lookup_depth 0 unix:0:streams_dblk_28496:hash_rescale 0 unix:0:streams_dblk_28496:hash_size 64 unix:0:streams_dblk_28496:magazine_size 1 unix:0:streams_dblk_28496:slab_alloc 3 unix:0:streams_dblk_28496:slab_create 2 unix:0:streams_dblk_28496:slab_destroy 0 unix:0:streams_dblk_28496:slab_free 0 unix:0:streams_dblk_28496:slab_size 57344 unix:0:streams_dblk_28496:snaptime 8464512.7975598 unix:0:streams_dblk_28496:vmem_source 23 unix:0:streams_dblk_32768:align 64 unix:0:streams_dblk_32768:alloc 587 unix:0:streams_dblk_32768:alloc_fail 0 unix:0:streams_dblk_32768:buf_avail 62 unix:0:streams_dblk_32768:buf_constructed 21 unix:0:streams_dblk_32768:buf_inuse 1 unix:0:streams_dblk_32768:buf_max 63 unix:0:streams_dblk_32768:buf_size 112 unix:0:streams_dblk_32768:buf_total 63 unix:0:streams_dblk_32768:chunk_size 128 unix:0:streams_dblk_32768:class kmem_cache unix:0:streams_dblk_32768:crtime 65.9542888 unix:0:streams_dblk_32768:depot_alloc 38 unix:0:streams_dblk_32768:depot_contention 0 unix:0:streams_dblk_32768:depot_free 41 unix:0:streams_dblk_32768:empty_magazines 1 unix:0:streams_dblk_32768:free 589 unix:0:streams_dblk_32768:full_magazines 1 unix:0:streams_dblk_32768:hash_lookup_depth 0 unix:0:streams_dblk_32768:hash_rescale 0 unix:0:streams_dblk_32768:hash_size 0 unix:0:streams_dblk_32768:magazine_size 7 unix:0:streams_dblk_32768:slab_alloc 22 unix:0:streams_dblk_32768:slab_create 1 unix:0:streams_dblk_32768:slab_destroy 0 unix:0:streams_dblk_32768:slab_free 0 unix:0:streams_dblk_32768:slab_size 8192 unix:0:streams_dblk_32768:snaptime 8464512.798931 unix:0:streams_dblk_32768:vmem_source 23 unix:0:streams_dblk_336:align 64 unix:0:streams_dblk_336:alloc 7043 unix:0:streams_dblk_336:alloc_fail 0 unix:0:streams_dblk_336:buf_avail 18 unix:0:streams_dblk_336:buf_constructed 14 unix:0:streams_dblk_336:buf_inuse 0 unix:0:streams_dblk_336:buf_max 18 unix:0:streams_dblk_336:buf_size 448 unix:0:streams_dblk_336:buf_total 18 unix:0:streams_dblk_336:chunk_size 448 unix:0:streams_dblk_336:class kmem_cache unix:0:streams_dblk_336:crtime 65.9537834 unix:0:streams_dblk_336:depot_alloc 790 unix:0:streams_dblk_336:depot_contention 0 unix:0:streams_dblk_336:depot_free 796 unix:0:streams_dblk_336:empty_magazines 0 unix:0:streams_dblk_336:free 7049 unix:0:streams_dblk_336:full_magazines 3 unix:0:streams_dblk_336:hash_lookup_depth 0 unix:0:streams_dblk_336:hash_rescale 0 unix:0:streams_dblk_336:hash_size 0 unix:0:streams_dblk_336:magazine_size 3 unix:0:streams_dblk_336:slab_alloc 14 unix:0:streams_dblk_336:slab_create 1 unix:0:streams_dblk_336:slab_destroy 0 unix:0:streams_dblk_336:slab_free 0 unix:0:streams_dblk_336:slab_size 8192 unix:0:streams_dblk_336:snaptime 8464512.800652 unix:0:streams_dblk_336:vmem_source 23 unix:0:streams_dblk_36688:align 64 unix:0:streams_dblk_36688:alloc 1 unix:0:streams_dblk_36688:alloc_fail 0 unix:0:streams_dblk_36688:buf_avail 2 unix:0:streams_dblk_36688:buf_constructed 1 unix:0:streams_dblk_36688:buf_inuse 0 unix:0:streams_dblk_36688:buf_max 2 unix:0:streams_dblk_36688:buf_size 36800 unix:0:streams_dblk_36688:buf_total 2 unix:0:streams_dblk_36688:chunk_size 36800 unix:0:streams_dblk_36688:class kmem_cache unix:0:streams_dblk_36688:crtime 65.954588 unix:0:streams_dblk_36688:depot_alloc 0 unix:0:streams_dblk_36688:depot_contention 0 unix:0:streams_dblk_36688:depot_free 1 unix:0:streams_dblk_36688:empty_magazines 0 unix:0:streams_dblk_36688:free 2 unix:0:streams_dblk_36688:full_magazines 0 unix:0:streams_dblk_36688:hash_lookup_depth 0 unix:0:streams_dblk_36688:hash_rescale 0 unix:0:streams_dblk_36688:hash_size 64 unix:0:streams_dblk_36688:magazine_size 1 unix:0:streams_dblk_36688:slab_alloc 1 unix:0:streams_dblk_36688:slab_create 1 unix:0:streams_dblk_36688:slab_destroy 0 unix:0:streams_dblk_36688:slab_free 0 unix:0:streams_dblk_36688:slab_size 73728 unix:0:streams_dblk_36688:snaptime 8464512.802093 unix:0:streams_dblk_36688:vmem_source 23 unix:0:streams_dblk_3920:align 64 unix:0:streams_dblk_3920:alloc 2828 unix:0:streams_dblk_3920:alloc_fail 0 unix:0:streams_dblk_3920:buf_avail 6 unix:0:streams_dblk_3920:buf_constructed 5 unix:0:streams_dblk_3920:buf_inuse 0 unix:0:streams_dblk_3920:buf_max 6 unix:0:streams_dblk_3920:buf_size 4032 unix:0:streams_dblk_3920:buf_total 6 unix:0:streams_dblk_3920:chunk_size 4032 unix:0:streams_dblk_3920:class kmem_cache unix:0:streams_dblk_3920:crtime 65.9539806 unix:0:streams_dblk_3920:depot_alloc 1033 unix:0:streams_dblk_3920:depot_contention 0 unix:0:streams_dblk_3920:depot_free 1039 unix:0:streams_dblk_3920:empty_magazines 0 unix:0:streams_dblk_3920:free 2834 unix:0:streams_dblk_3920:full_magazines 3 unix:0:streams_dblk_3920:hash_lookup_depth 0 unix:0:streams_dblk_3920:hash_rescale 0 unix:0:streams_dblk_3920:hash_size 64 unix:0:streams_dblk_3920:magazine_size 1 unix:0:streams_dblk_3920:slab_alloc 5 unix:0:streams_dblk_3920:slab_create 3 unix:0:streams_dblk_3920:slab_destroy 0 unix:0:streams_dblk_3920:slab_free 0 unix:0:streams_dblk_3920:slab_size 8192 unix:0:streams_dblk_3920:snaptime 8464512.8035688 unix:0:streams_dblk_3920:vmem_source 23 unix:0:streams_dblk_40960:align 64 unix:0:streams_dblk_40960:alloc 1 unix:0:streams_dblk_40960:alloc_fail 0 unix:0:streams_dblk_40960:buf_avail 63 unix:0:streams_dblk_40960:buf_constructed 1 unix:0:streams_dblk_40960:buf_inuse 0 unix:0:streams_dblk_40960:buf_max 63 unix:0:streams_dblk_40960:buf_size 112 unix:0:streams_dblk_40960:buf_total 63 unix:0:streams_dblk_40960:chunk_size 128 unix:0:streams_dblk_40960:class kmem_cache unix:0:streams_dblk_40960:crtime 65.9546168 unix:0:streams_dblk_40960:depot_alloc 0 unix:0:streams_dblk_40960:depot_contention 0 unix:0:streams_dblk_40960:depot_free 1 unix:0:streams_dblk_40960:empty_magazines 0 unix:0:streams_dblk_40960:free 2 unix:0:streams_dblk_40960:full_magazines 0 unix:0:streams_dblk_40960:hash_lookup_depth 0 unix:0:streams_dblk_40960:hash_rescale 0 unix:0:streams_dblk_40960:hash_size 0 unix:0:streams_dblk_40960:magazine_size 7 unix:0:streams_dblk_40960:slab_alloc 1 unix:0:streams_dblk_40960:slab_create 1 unix:0:streams_dblk_40960:slab_destroy 0 unix:0:streams_dblk_40960:slab_free 0 unix:0:streams_dblk_40960:slab_size 8192 unix:0:streams_dblk_40960:snaptime 8464512.8049554 unix:0:streams_dblk_40960:vmem_source 23 unix:0:streams_dblk_44880:align 64 unix:0:streams_dblk_44880:alloc 0 unix:0:streams_dblk_44880:alloc_fail 0 unix:0:streams_dblk_44880:buf_avail 0 unix:0:streams_dblk_44880:buf_constructed 0 unix:0:streams_dblk_44880:buf_inuse 0 unix:0:streams_dblk_44880:buf_max 0 unix:0:streams_dblk_44880:buf_size 44992 unix:0:streams_dblk_44880:buf_total 0 unix:0:streams_dblk_44880:chunk_size 44992 unix:0:streams_dblk_44880:class kmem_cache unix:0:streams_dblk_44880:crtime 65.954655 unix:0:streams_dblk_44880:depot_alloc 0 unix:0:streams_dblk_44880:depot_contention 0 unix:0:streams_dblk_44880:depot_free 0 unix:0:streams_dblk_44880:empty_magazines 0 unix:0:streams_dblk_44880:free 0 unix:0:streams_dblk_44880:full_magazines 0 unix:0:streams_dblk_44880:hash_lookup_depth 0 unix:0:streams_dblk_44880:hash_rescale 0 unix:0:streams_dblk_44880:hash_size 64 unix:0:streams_dblk_44880:magazine_size 1 unix:0:streams_dblk_44880:slab_alloc 0 unix:0:streams_dblk_44880:slab_create 0 unix:0:streams_dblk_44880:slab_destroy 0 unix:0:streams_dblk_44880:slab_free 0 unix:0:streams_dblk_44880:slab_size 90112 unix:0:streams_dblk_44880:snaptime 8464512.8064256 unix:0:streams_dblk_44880:vmem_source 23 unix:0:streams_dblk_49152:align 64 unix:0:streams_dblk_49152:alloc 1 unix:0:streams_dblk_49152:alloc_fail 0 unix:0:streams_dblk_49152:buf_avail 63 unix:0:streams_dblk_49152:buf_constructed 1 unix:0:streams_dblk_49152:buf_inuse 0 unix:0:streams_dblk_49152:buf_max 63 unix:0:streams_dblk_49152:buf_size 112 unix:0:streams_dblk_49152:buf_total 63 unix:0:streams_dblk_49152:chunk_size 128 unix:0:streams_dblk_49152:class kmem_cache unix:0:streams_dblk_49152:crtime 65.9546824 unix:0:streams_dblk_49152:depot_alloc 0 unix:0:streams_dblk_49152:depot_contention 0 unix:0:streams_dblk_49152:depot_free 1 unix:0:streams_dblk_49152:empty_magazines 0 unix:0:streams_dblk_49152:free 2 unix:0:streams_dblk_49152:full_magazines 0 unix:0:streams_dblk_49152:hash_lookup_depth 0 unix:0:streams_dblk_49152:hash_rescale 0 unix:0:streams_dblk_49152:hash_size 0 unix:0:streams_dblk_49152:magazine_size 7 unix:0:streams_dblk_49152:slab_alloc 1 unix:0:streams_dblk_49152:slab_create 1 unix:0:streams_dblk_49152:slab_destroy 0 unix:0:streams_dblk_49152:slab_free 0 unix:0:streams_dblk_49152:slab_size 8192 unix:0:streams_dblk_49152:snaptime 8464512.8077992 unix:0:streams_dblk_49152:vmem_source 23 unix:0:streams_dblk_528:align 64 unix:0:streams_dblk_528:alloc 66304253 unix:0:streams_dblk_528:alloc_fail 0 unix:0:streams_dblk_528:buf_avail 24 unix:0:streams_dblk_528:buf_constructed 13 unix:0:streams_dblk_528:buf_inuse 0 unix:0:streams_dblk_528:buf_max 24 unix:0:streams_dblk_528:buf_size 640 unix:0:streams_dblk_528:buf_total 24 unix:0:streams_dblk_528:chunk_size 640 unix:0:streams_dblk_528:class kmem_cache unix:0:streams_dblk_528:crtime 65.9538136 unix:0:streams_dblk_528:depot_alloc 2826 unix:0:streams_dblk_528:depot_contention 0 unix:0:streams_dblk_528:depot_free 2832 unix:0:streams_dblk_528:empty_magazines 1 unix:0:streams_dblk_528:free 66304259 unix:0:streams_dblk_528:full_magazines 2 unix:0:streams_dblk_528:hash_lookup_depth 0 unix:0:streams_dblk_528:hash_rescale 0 unix:0:streams_dblk_528:hash_size 0 unix:0:streams_dblk_528:magazine_size 3 unix:0:streams_dblk_528:slab_alloc 13 unix:0:streams_dblk_528:slab_create 2 unix:0:streams_dblk_528:slab_destroy 0 unix:0:streams_dblk_528:slab_free 0 unix:0:streams_dblk_528:slab_size 8192 unix:0:streams_dblk_528:snaptime 8464512.8091728 unix:0:streams_dblk_528:vmem_source 23 unix:0:streams_dblk_53072:align 64 unix:0:streams_dblk_53072:alloc 3 unix:0:streams_dblk_53072:alloc_fail 0 unix:0:streams_dblk_53072:buf_avail 2 unix:0:streams_dblk_53072:buf_constructed 2 unix:0:streams_dblk_53072:buf_inuse 0 unix:0:streams_dblk_53072:buf_max 2 unix:0:streams_dblk_53072:buf_size 53184 unix:0:streams_dblk_53072:buf_total 2 unix:0:streams_dblk_53072:chunk_size 53184 unix:0:streams_dblk_53072:class kmem_cache unix:0:streams_dblk_53072:crtime 65.9547194 unix:0:streams_dblk_53072:depot_alloc 0 unix:0:streams_dblk_53072:depot_contention 0 unix:0:streams_dblk_53072:depot_free 3 unix:0:streams_dblk_53072:empty_magazines 0 unix:0:streams_dblk_53072:free 6 unix:0:streams_dblk_53072:full_magazines 0 unix:0:streams_dblk_53072:hash_lookup_depth 0 unix:0:streams_dblk_53072:hash_rescale 0 unix:0:streams_dblk_53072:hash_size 64 unix:0:streams_dblk_53072:magazine_size 1 unix:0:streams_dblk_53072:slab_alloc 2 unix:0:streams_dblk_53072:slab_create 1 unix:0:streams_dblk_53072:slab_destroy 0 unix:0:streams_dblk_53072:slab_free 0 unix:0:streams_dblk_53072:slab_size 106496 unix:0:streams_dblk_53072:snaptime 8464512.810798 unix:0:streams_dblk_53072:vmem_source 23 unix:0:streams_dblk_57344:align 64 unix:0:streams_dblk_57344:alloc 0 unix:0:streams_dblk_57344:alloc_fail 0 unix:0:streams_dblk_57344:buf_avail 0 unix:0:streams_dblk_57344:buf_constructed 0 unix:0:streams_dblk_57344:buf_inuse 0 unix:0:streams_dblk_57344:buf_max 0 unix:0:streams_dblk_57344:buf_size 112 unix:0:streams_dblk_57344:buf_total 0 unix:0:streams_dblk_57344:chunk_size 128 unix:0:streams_dblk_57344:class kmem_cache unix:0:streams_dblk_57344:crtime 65.9547464 unix:0:streams_dblk_57344:depot_alloc 0 unix:0:streams_dblk_57344:depot_contention 0 unix:0:streams_dblk_57344:depot_free 0 unix:0:streams_dblk_57344:empty_magazines 0 unix:0:streams_dblk_57344:free 0 unix:0:streams_dblk_57344:full_magazines 0 unix:0:streams_dblk_57344:hash_lookup_depth 0 unix:0:streams_dblk_57344:hash_rescale 0 unix:0:streams_dblk_57344:hash_size 0 unix:0:streams_dblk_57344:magazine_size 7 unix:0:streams_dblk_57344:slab_alloc 0 unix:0:streams_dblk_57344:slab_create 0 unix:0:streams_dblk_57344:slab_destroy 0 unix:0:streams_dblk_57344:slab_free 0 unix:0:streams_dblk_57344:slab_size 8192 unix:0:streams_dblk_57344:snaptime 8464512.8122092 unix:0:streams_dblk_57344:vmem_source 23 unix:0:streams_dblk_61264:align 64 unix:0:streams_dblk_61264:alloc 0 unix:0:streams_dblk_61264:alloc_fail 0 unix:0:streams_dblk_61264:buf_avail 0 unix:0:streams_dblk_61264:buf_constructed 0 unix:0:streams_dblk_61264:buf_inuse 0 unix:0:streams_dblk_61264:buf_max 0 unix:0:streams_dblk_61264:buf_size 61376 unix:0:streams_dblk_61264:buf_total 0 unix:0:streams_dblk_61264:chunk_size 61376 unix:0:streams_dblk_61264:class kmem_cache unix:0:streams_dblk_61264:crtime 65.9547834 unix:0:streams_dblk_61264:depot_alloc 0 unix:0:streams_dblk_61264:depot_contention 0 unix:0:streams_dblk_61264:depot_free 0 unix:0:streams_dblk_61264:empty_magazines 0 unix:0:streams_dblk_61264:free 0 unix:0:streams_dblk_61264:full_magazines 0 unix:0:streams_dblk_61264:hash_lookup_depth 0 unix:0:streams_dblk_61264:hash_rescale 0 unix:0:streams_dblk_61264:hash_size 64 unix:0:streams_dblk_61264:magazine_size 1 unix:0:streams_dblk_61264:slab_alloc 0 unix:0:streams_dblk_61264:slab_create 0 unix:0:streams_dblk_61264:slab_destroy 0 unix:0:streams_dblk_61264:slab_free 0 unix:0:streams_dblk_61264:slab_size 122880 unix:0:streams_dblk_61264:snaptime 8464512.8136752 unix:0:streams_dblk_61264:vmem_source 23 unix:0:streams_dblk_65536:align 64 unix:0:streams_dblk_65536:alloc 162 unix:0:streams_dblk_65536:alloc_fail 0 unix:0:streams_dblk_65536:buf_avail 63 unix:0:streams_dblk_65536:buf_constructed 17 unix:0:streams_dblk_65536:buf_inuse 0 unix:0:streams_dblk_65536:buf_max 63 unix:0:streams_dblk_65536:buf_size 112 unix:0:streams_dblk_65536:buf_total 63 unix:0:streams_dblk_65536:chunk_size 128 unix:0:streams_dblk_65536:class kmem_cache unix:0:streams_dblk_65536:crtime 65.9548108 unix:0:streams_dblk_65536:depot_alloc 16 unix:0:streams_dblk_65536:depot_contention 0 unix:0:streams_dblk_65536:depot_free 18 unix:0:streams_dblk_65536:empty_magazines 1 unix:0:streams_dblk_65536:free 164 unix:0:streams_dblk_65536:full_magazines 0 unix:0:streams_dblk_65536:hash_lookup_depth 0 unix:0:streams_dblk_65536:hash_rescale 0 unix:0:streams_dblk_65536:hash_size 0 unix:0:streams_dblk_65536:magazine_size 7 unix:0:streams_dblk_65536:slab_alloc 17 unix:0:streams_dblk_65536:slab_create 1 unix:0:streams_dblk_65536:slab_destroy 0 unix:0:streams_dblk_65536:slab_free 0 unix:0:streams_dblk_65536:slab_size 8192 unix:0:streams_dblk_65536:snaptime 8464512.8150624 unix:0:streams_dblk_65536:vmem_source 23 unix:0:streams_dblk_69456:align 64 unix:0:streams_dblk_69456:alloc 0 unix:0:streams_dblk_69456:alloc_fail 0 unix:0:streams_dblk_69456:buf_avail 0 unix:0:streams_dblk_69456:buf_constructed 0 unix:0:streams_dblk_69456:buf_inuse 0 unix:0:streams_dblk_69456:buf_max 0 unix:0:streams_dblk_69456:buf_size 69568 unix:0:streams_dblk_69456:buf_total 0 unix:0:streams_dblk_69456:chunk_size 69568 unix:0:streams_dblk_69456:class kmem_cache unix:0:streams_dblk_69456:crtime 65.954848 unix:0:streams_dblk_69456:depot_alloc 0 unix:0:streams_dblk_69456:depot_contention 0 unix:0:streams_dblk_69456:depot_free 0 unix:0:streams_dblk_69456:empty_magazines 0 unix:0:streams_dblk_69456:free 0 unix:0:streams_dblk_69456:full_magazines 0 unix:0:streams_dblk_69456:hash_lookup_depth 0 unix:0:streams_dblk_69456:hash_rescale 0 unix:0:streams_dblk_69456:hash_size 64 unix:0:streams_dblk_69456:magazine_size 1 unix:0:streams_dblk_69456:slab_alloc 0 unix:0:streams_dblk_69456:slab_create 0 unix:0:streams_dblk_69456:slab_destroy 0 unix:0:streams_dblk_69456:slab_free 0 unix:0:streams_dblk_69456:slab_size 139264 unix:0:streams_dblk_69456:snaptime 8464512.8165032 unix:0:streams_dblk_69456:vmem_source 23 unix:0:streams_dblk_73728:align 64 unix:0:streams_dblk_73728:alloc 0 unix:0:streams_dblk_73728:alloc_fail 0 unix:0:streams_dblk_73728:buf_avail 0 unix:0:streams_dblk_73728:buf_constructed 0 unix:0:streams_dblk_73728:buf_inuse 0 unix:0:streams_dblk_73728:buf_max 0 unix:0:streams_dblk_73728:buf_size 112 unix:0:streams_dblk_73728:buf_total 0 unix:0:streams_dblk_73728:chunk_size 128 unix:0:streams_dblk_73728:class kmem_cache unix:0:streams_dblk_73728:crtime 65.9548752 unix:0:streams_dblk_73728:depot_alloc 0 unix:0:streams_dblk_73728:depot_contention 0 unix:0:streams_dblk_73728:depot_free 0 unix:0:streams_dblk_73728:empty_magazines 0 unix:0:streams_dblk_73728:free 0 unix:0:streams_dblk_73728:full_magazines 0 unix:0:streams_dblk_73728:hash_lookup_depth 0 unix:0:streams_dblk_73728:hash_rescale 0 unix:0:streams_dblk_73728:hash_size 0 unix:0:streams_dblk_73728:magazine_size 7 unix:0:streams_dblk_73728:slab_alloc 0 unix:0:streams_dblk_73728:slab_create 0 unix:0:streams_dblk_73728:slab_destroy 0 unix:0:streams_dblk_73728:slab_free 0 unix:0:streams_dblk_73728:slab_size 8192 unix:0:streams_dblk_73728:snaptime 8464512.817911 unix:0:streams_dblk_73728:vmem_source 23 unix:0:streams_dblk_80:align 64 unix:0:streams_dblk_80:alloc 22750730 unix:0:streams_dblk_80:alloc_fail 0 unix:0:streams_dblk_80:buf_avail 1338 unix:0:streams_dblk_80:buf_constructed 1330 unix:0:streams_dblk_80:buf_inuse 384 unix:0:streams_dblk_80:buf_max 1722 unix:0:streams_dblk_80:buf_size 192 unix:0:streams_dblk_80:buf_total 1722 unix:0:streams_dblk_80:chunk_size 192 unix:0:streams_dblk_80:class kmem_cache unix:0:streams_dblk_80:crtime 65.9536738 unix:0:streams_dblk_80:depot_alloc 1170416 unix:0:streams_dblk_80:depot_contention 65 unix:0:streams_dblk_80:depot_free 1170472 unix:0:streams_dblk_80:empty_magazines 1 unix:0:streams_dblk_80:free 22751452 unix:0:streams_dblk_80:full_magazines 41 unix:0:streams_dblk_80:hash_lookup_depth 0 unix:0:streams_dblk_80:hash_rescale 0 unix:0:streams_dblk_80:hash_size 0 unix:0:streams_dblk_80:magazine_size 31 unix:0:streams_dblk_80:slab_alloc 2764 unix:0:streams_dblk_80:slab_create 53 unix:0:streams_dblk_80:slab_destroy 12 unix:0:streams_dblk_80:slab_free 1050 unix:0:streams_dblk_80:slab_size 8192 unix:0:streams_dblk_80:snaptime 8464512.8192802 unix:0:streams_dblk_80:vmem_source 23 unix:0:streams_dblk_8192:align 64 unix:0:streams_dblk_8192:alloc 18254 unix:0:streams_dblk_8192:alloc_fail 0 unix:0:streams_dblk_8192:buf_avail 63 unix:0:streams_dblk_8192:buf_constructed 27 unix:0:streams_dblk_8192:buf_inuse 0 unix:0:streams_dblk_8192:buf_max 63 unix:0:streams_dblk_8192:buf_size 112 unix:0:streams_dblk_8192:buf_total 63 unix:0:streams_dblk_8192:chunk_size 128 unix:0:streams_dblk_8192:class kmem_cache unix:0:streams_dblk_8192:crtime 65.9540128 unix:0:streams_dblk_8192:depot_alloc 1050 unix:0:streams_dblk_8192:depot_contention 0 unix:0:streams_dblk_8192:depot_free 1055 unix:0:streams_dblk_8192:empty_magazines 0 unix:0:streams_dblk_8192:free 18259 unix:0:streams_dblk_8192:full_magazines 2 unix:0:streams_dblk_8192:hash_lookup_depth 0 unix:0:streams_dblk_8192:hash_rescale 0 unix:0:streams_dblk_8192:hash_size 0 unix:0:streams_dblk_8192:magazine_size 7 unix:0:streams_dblk_8192:slab_alloc 27 unix:0:streams_dblk_8192:slab_create 1 unix:0:streams_dblk_8192:slab_destroy 0 unix:0:streams_dblk_8192:slab_free 0 unix:0:streams_dblk_8192:slab_size 8192 unix:0:streams_dblk_8192:snaptime 8464512.82076 unix:0:streams_dblk_8192:vmem_source 23 unix:0:streams_dblk_esb:align 64 unix:0:streams_dblk_esb:alloc 13606 unix:0:streams_dblk_esb:alloc_fail 0 unix:0:streams_dblk_esb:buf_avail 63 unix:0:streams_dblk_esb:buf_constructed 20 unix:0:streams_dblk_esb:buf_inuse 0 unix:0:streams_dblk_esb:buf_max 63 unix:0:streams_dblk_esb:buf_size 112 unix:0:streams_dblk_esb:buf_total 63 unix:0:streams_dblk_esb:chunk_size 128 unix:0:streams_dblk_esb:class kmem_cache unix:0:streams_dblk_esb:crtime 65.9549026 unix:0:streams_dblk_esb:depot_alloc 165 unix:0:streams_dblk_esb:depot_contention 0 unix:0:streams_dblk_esb:depot_free 168 unix:0:streams_dblk_esb:empty_magazines 1 unix:0:streams_dblk_esb:free 13609 unix:0:streams_dblk_esb:full_magazines 0 unix:0:streams_dblk_esb:hash_lookup_depth 0 unix:0:streams_dblk_esb:hash_rescale 0 unix:0:streams_dblk_esb:hash_size 0 unix:0:streams_dblk_esb:magazine_size 7 unix:0:streams_dblk_esb:slab_alloc 20 unix:0:streams_dblk_esb:slab_create 1 unix:0:streams_dblk_esb:slab_destroy 0 unix:0:streams_dblk_esb:slab_free 0 unix:0:streams_dblk_esb:slab_size 8192 unix:0:streams_dblk_esb:snaptime 8464512.8222492 unix:0:streams_dblk_esb:vmem_source 23 unix:0:streams_ftblk:align 32 unix:0:streams_ftblk:alloc 0 unix:0:streams_ftblk:alloc_fail 0 unix:0:streams_ftblk:buf_avail 0 unix:0:streams_ftblk:buf_constructed 0 unix:0:streams_ftblk:buf_inuse 0 unix:0:streams_ftblk:buf_max 0 unix:0:streams_ftblk:buf_size 232 unix:0:streams_ftblk:buf_total 0 unix:0:streams_ftblk:chunk_size 256 unix:0:streams_ftblk:class kmem_cache unix:0:streams_ftblk:crtime 65.9561118 unix:0:streams_ftblk:depot_alloc 0 unix:0:streams_ftblk:depot_contention 0 unix:0:streams_ftblk:depot_free 0 unix:0:streams_ftblk:empty_magazines 0 unix:0:streams_ftblk:free 0 unix:0:streams_ftblk:full_magazines 0 unix:0:streams_ftblk:hash_lookup_depth 0 unix:0:streams_ftblk:hash_rescale 0 unix:0:streams_ftblk:hash_size 0 unix:0:streams_ftblk:magazine_size 7 unix:0:streams_ftblk:slab_alloc 0 unix:0:streams_ftblk:slab_create 0 unix:0:streams_ftblk:slab_destroy 0 unix:0:streams_ftblk:slab_free 0 unix:0:streams_ftblk:slab_size 8192 unix:0:streams_ftblk:snaptime 8464512.8236326 unix:0:streams_ftblk:vmem_source 23 unix:0:streams_fthdr:align 32 unix:0:streams_fthdr:alloc 0 unix:0:streams_fthdr:alloc_fail 0 unix:0:streams_fthdr:buf_avail 0 unix:0:streams_fthdr:buf_constructed 0 unix:0:streams_fthdr:buf_inuse 0 unix:0:streams_fthdr:buf_max 0 unix:0:streams_fthdr:buf_size 264 unix:0:streams_fthdr:buf_total 0 unix:0:streams_fthdr:chunk_size 288 unix:0:streams_fthdr:class kmem_cache unix:0:streams_fthdr:crtime 65.9560984 unix:0:streams_fthdr:depot_alloc 0 unix:0:streams_fthdr:depot_contention 0 unix:0:streams_fthdr:depot_free 0 unix:0:streams_fthdr:empty_magazines 0 unix:0:streams_fthdr:free 0 unix:0:streams_fthdr:full_magazines 0 unix:0:streams_fthdr:hash_lookup_depth 0 unix:0:streams_fthdr:hash_rescale 0 unix:0:streams_fthdr:hash_size 0 unix:0:streams_fthdr:magazine_size 3 unix:0:streams_fthdr:slab_alloc 0 unix:0:streams_fthdr:slab_create 0 unix:0:streams_fthdr:slab_destroy 0 unix:0:streams_fthdr:slab_free 0 unix:0:streams_fthdr:slab_size 8192 unix:0:streams_fthdr:snaptime 8464512.8251058 unix:0:streams_fthdr:vmem_source 23 unix:0:streams_mblk:align 32 unix:0:streams_mblk:alloc 1067092 unix:0:streams_mblk:alloc_fail 0 unix:0:streams_mblk:buf_avail 127 unix:0:streams_mblk:buf_constructed 51 unix:0:streams_mblk:buf_inuse 4826 unix:0:streams_mblk:buf_max 4953 unix:0:streams_mblk:buf_size 64 unix:0:streams_mblk:buf_total 4953 unix:0:streams_mblk:chunk_size 64 unix:0:streams_mblk:class kmem_cache unix:0:streams_mblk:crtime 65.953626 unix:0:streams_mblk:depot_alloc 9114 unix:0:streams_mblk:depot_contention 0 unix:0:streams_mblk:depot_free 9119 unix:0:streams_mblk:empty_magazines 56 unix:0:streams_mblk:free 1062271 unix:0:streams_mblk:full_magazines 2 unix:0:streams_mblk:hash_lookup_depth 0 unix:0:streams_mblk:hash_rescale 0 unix:0:streams_mblk:hash_size 0 unix:0:streams_mblk:magazine_size 15 unix:0:streams_mblk:slab_alloc 4877 unix:0:streams_mblk:slab_create 39 unix:0:streams_mblk:slab_destroy 0 unix:0:streams_mblk:slab_free 0 unix:0:streams_mblk:slab_size 8192 unix:0:streams_mblk:snaptime 8464512.8264698 unix:0:streams_mblk:vmem_source 23 unix:0:svnode_cache:align 8 unix:0:svnode_cache:alloc 0 unix:0:svnode_cache:alloc_fail 0 unix:0:svnode_cache:buf_avail 0 unix:0:svnode_cache:buf_constructed 0 unix:0:svnode_cache:buf_inuse 0 unix:0:svnode_cache:buf_max 0 unix:0:svnode_cache:buf_size 40 unix:0:svnode_cache:buf_total 0 unix:0:svnode_cache:chunk_size 40 unix:0:svnode_cache:class kmem_cache unix:0:svnode_cache:crtime 114.1495182 unix:0:svnode_cache:depot_alloc 0 unix:0:svnode_cache:depot_contention 0 unix:0:svnode_cache:depot_free 0 unix:0:svnode_cache:empty_magazines 0 unix:0:svnode_cache:free 0 unix:0:svnode_cache:full_magazines 0 unix:0:svnode_cache:hash_lookup_depth 0 unix:0:svnode_cache:hash_rescale 0 unix:0:svnode_cache:hash_size 0 unix:0:svnode_cache:magazine_size 15 unix:0:svnode_cache:slab_alloc 0 unix:0:svnode_cache:slab_create 0 unix:0:svnode_cache:slab_destroy 0 unix:0:svnode_cache:slab_free 0 unix:0:svnode_cache:slab_size 8192 unix:0:svnode_cache:snaptime 8464512.8279408 unix:0:svnode_cache:vmem_source 23 unix:0:syncq_cache:align 8 unix:0:syncq_cache:alloc 379 unix:0:syncq_cache:alloc_fail 0 unix:0:syncq_cache:buf_avail 33 unix:0:syncq_cache:buf_constructed 30 unix:0:syncq_cache:buf_inuse 17 unix:0:syncq_cache:buf_max 50 unix:0:syncq_cache:buf_size 160 unix:0:syncq_cache:buf_total 50 unix:0:syncq_cache:chunk_size 160 unix:0:syncq_cache:class kmem_cache unix:0:syncq_cache:crtime 69.2822404 unix:0:syncq_cache:depot_alloc 6 unix:0:syncq_cache:depot_contention 0 unix:0:syncq_cache:depot_free 11 unix:0:syncq_cache:empty_magazines 1 unix:0:syncq_cache:free 367 unix:0:syncq_cache:full_magazines 2 unix:0:syncq_cache:hash_lookup_depth 0 unix:0:syncq_cache:hash_rescale 0 unix:0:syncq_cache:hash_size 0 unix:0:syncq_cache:magazine_size 7 unix:0:syncq_cache:slab_alloc 47 unix:0:syncq_cache:slab_create 1 unix:0:syncq_cache:slab_destroy 0 unix:0:syncq_cache:slab_free 0 unix:0:syncq_cache:slab_size 8192 unix:0:syncq_cache:snaptime 8464512.8293138 unix:0:syncq_cache:vmem_source 23 unix:0:sysinfo:class misc unix:0:sysinfo:crtime 65.9026164 unix:0:sysinfo:runocc 1519 unix:0:sysinfo:runque 2491 unix:0:sysinfo:snaptime 8464512.8307638 unix:0:sysinfo:swpocc 0 unix:0:sysinfo:swpque 0 unix:0:sysinfo:updates 8464319 unix:0:sysinfo:waiting 245 unix:0:system_misc:avenrun_15min 4 unix:0:system_misc:avenrun_1min 9 unix:0:system_misc:avenrun_5min 7 unix:0:system_misc:boot_time 1211478887 unix:0:system_misc:class misc unix:0:system_misc:clk_intr 846444356 unix:0:system_misc:crtime 65.9026734 unix:0:system_misc:deficit 0 unix:0:system_misc:lbolt 846444356 unix:0:system_misc:ncpus 2 unix:0:system_misc:nproc 50 unix:0:system_misc:snaptime 8464512.8312748 unix:0:system_misc:vac 1 unix:0:system_pages:availrmem 1001204 unix:0:system_pages:class pages unix:0:system_pages:crtime 65.9026824 unix:0:system_pages:desfree 7993 unix:0:system_pages:desscan 25 unix:0:system_pages:econtig 54525952 unix:0:system_pages:fastscan 135367 unix:0:system_pages:freemem 980770 unix:0:system_pages:kernelbase 16777216 unix:0:system_pages:lotsfree 15987 unix:0:system_pages:minfree 3996 unix:0:system_pages:nalloc 22138687 unix:0:system_pages:nalloc_calls 10991 unix:0:system_pages:nfree 21788447 unix:0:system_pages:nfree_calls 6213 unix:0:system_pages:nscan 0 unix:0:system_pages:pagesfree 980770 unix:0:system_pages:pageslocked 22014 unix:0:system_pages:pagestotal 1023218 unix:0:system_pages:physmem 1040081 unix:0:system_pages:pp_kernel 45168 unix:0:system_pages:slowscan 100 unix:0:system_pages:snaptime 8464512.831966 unix:0:system_taskq:bexecuted 2934 unix:0:system_taskq:bmaxtasks 2 unix:0:system_taskq:bnactive 0 unix:0:system_taskq:bnalloc 4 unix:0:system_taskq:btasks 2934 unix:0:system_taskq:btotaltime 162737000 unix:0:system_taskq:class taskq_d unix:0:system_taskq:crtime 69.2739324 unix:0:system_taskq:disptcreates 0 unix:0:system_taskq:hits 870487 unix:0:system_taskq:maxthreads 6 unix:0:system_taskq:misses 3132 unix:0:system_taskq:nalloc 0 unix:0:system_taskq:nfree 3 unix:0:system_taskq:nomem 0 unix:0:system_taskq:overflows 0 unix:0:system_taskq:priority 60 unix:0:system_taskq:snaptime 8464512.83321 unix:0:system_taskq:tcreates 2936 unix:0:system_taskq:tdeaths 2933 unix:0:system_taskq:totaltime 92654176400 unix:0:task_cache:align 8 unix:0:task_cache:alloc 431 unix:0:task_cache:alloc_fail 0 unix:0:task_cache:buf_avail 41 unix:0:task_cache:buf_constructed 15 unix:0:task_cache:buf_inuse 31 unix:0:task_cache:buf_max 72 unix:0:task_cache:buf_size 112 unix:0:task_cache:buf_total 72 unix:0:task_cache:chunk_size 112 unix:0:task_cache:class kmem_cache unix:0:task_cache:crtime 69.2631106 unix:0:task_cache:depot_alloc 2 unix:0:task_cache:depot_contention 0 unix:0:task_cache:depot_free 5 unix:0:task_cache:empty_magazines 1 unix:0:task_cache:free 403 unix:0:task_cache:full_magazines 0 unix:0:task_cache:hash_lookup_depth 0 unix:0:task_cache:hash_rescale 0 unix:0:task_cache:hash_size 0 unix:0:task_cache:magazine_size 7 unix:0:task_cache:slab_alloc 46 unix:0:task_cache:slab_create 1 unix:0:task_cache:slab_destroy 0 unix:0:task_cache:slab_free 0 unix:0:task_cache:slab_size 8192 unix:0:task_cache:snaptime 8464512.8342518 unix:0:task_cache:vmem_source 23 unix:0:taskq_cache:align 8 unix:0:taskq_cache:alloc 64 unix:0:taskq_cache:alloc_fail 0 unix:0:taskq_cache:buf_avail 16 unix:0:taskq_cache:buf_constructed 1 unix:0:taskq_cache:buf_inuse 58 unix:0:taskq_cache:buf_max 74 unix:0:taskq_cache:buf_size 216 unix:0:taskq_cache:buf_total 74 unix:0:taskq_cache:chunk_size 216 unix:0:taskq_cache:class kmem_cache unix:0:taskq_cache:crtime 66.0090962 unix:0:taskq_cache:depot_alloc 0 unix:0:taskq_cache:depot_contention 0 unix:0:taskq_cache:depot_free 2 unix:0:taskq_cache:empty_magazines 0 unix:0:taskq_cache:free 8 unix:0:taskq_cache:full_magazines 0 unix:0:taskq_cache:hash_lookup_depth 0 unix:0:taskq_cache:hash_rescale 0 unix:0:taskq_cache:hash_size 0 unix:0:taskq_cache:magazine_size 7 unix:0:taskq_cache:slab_alloc 59 unix:0:taskq_cache:slab_create 2 unix:0:taskq_cache:slab_destroy 0 unix:0:taskq_cache:slab_free 0 unix:0:taskq_cache:slab_size 8192 unix:0:taskq_cache:snaptime 8464512.835701 unix:0:taskq_cache:vmem_source 23 unix:0:taskq_ent_cache:align 8 unix:0:taskq_ent_cache:alloc 4628 unix:0:taskq_ent_cache:alloc_fail 0 unix:0:taskq_ent_cache:buf_avail 90 unix:0:taskq_ent_cache:buf_constructed 29 unix:0:taskq_ent_cache:buf_inuse 1650 unix:0:taskq_ent_cache:buf_max 1740 unix:0:taskq_ent_cache:buf_size 56 unix:0:taskq_ent_cache:buf_total 1740 unix:0:taskq_ent_cache:chunk_size 56 unix:0:taskq_ent_cache:class kmem_cache unix:0:taskq_ent_cache:crtime 66.0090826 unix:0:taskq_ent_cache:depot_alloc 10 unix:0:taskq_ent_cache:depot_contention 0 unix:0:taskq_ent_cache:depot_free 14 unix:0:taskq_ent_cache:empty_magazines 1 unix:0:taskq_ent_cache:free 2982 unix:0:taskq_ent_cache:full_magazines 0 unix:0:taskq_ent_cache:hash_lookup_depth 0 unix:0:taskq_ent_cache:hash_rescale 0 unix:0:taskq_ent_cache:hash_size 0 unix:0:taskq_ent_cache:magazine_size 15 unix:0:taskq_ent_cache:slab_alloc 1679 unix:0:taskq_ent_cache:slab_create 12 unix:0:taskq_ent_cache:slab_destroy 0 unix:0:taskq_ent_cache:slab_free 0 unix:0:taskq_ent_cache:slab_size 8192 unix:0:taskq_ent_cache:snaptime 8464512.8371488 unix:0:taskq_ent_cache:vmem_source 23 unix:0:tcp_iphc_cache:align 8 unix:0:tcp_iphc_cache:alloc 100008 unix:0:tcp_iphc_cache:alloc_fail 0 unix:0:tcp_iphc_cache:buf_avail 162 unix:0:tcp_iphc_cache:buf_constructed 119 unix:0:tcp_iphc_cache:buf_inuse 39 unix:0:tcp_iphc_cache:buf_max 201 unix:0:tcp_iphc_cache:buf_size 120 unix:0:tcp_iphc_cache:buf_total 201 unix:0:tcp_iphc_cache:chunk_size 120 unix:0:tcp_iphc_cache:class kmem_cache unix:0:tcp_iphc_cache:crtime 83.9783932 unix:0:tcp_iphc_cache:depot_alloc 310 unix:0:tcp_iphc_cache:depot_contention 0 unix:0:tcp_iphc_cache:depot_free 329 unix:0:tcp_iphc_cache:empty_magazines 2 unix:0:tcp_iphc_cache:free 99988 unix:0:tcp_iphc_cache:full_magazines 15 unix:0:tcp_iphc_cache:hash_lookup_depth 0 unix:0:tcp_iphc_cache:hash_rescale 0 unix:0:tcp_iphc_cache:hash_size 0 unix:0:tcp_iphc_cache:magazine_size 7 unix:0:tcp_iphc_cache:slab_alloc 158 unix:0:tcp_iphc_cache:slab_create 3 unix:0:tcp_iphc_cache:slab_destroy 0 unix:0:tcp_iphc_cache:slab_free 0 unix:0:tcp_iphc_cache:slab_size 8192 unix:0:tcp_iphc_cache:snaptime 8464512.8385472 unix:0:tcp_iphc_cache:vmem_source 23 unix:0:tcp_sack_info_cache:align 8 unix:0:tcp_sack_info_cache:alloc 99076 unix:0:tcp_sack_info_cache:alloc_fail 0 unix:0:tcp_sack_info_cache:buf_avail 198 unix:0:tcp_sack_info_cache:buf_constructed 128 unix:0:tcp_sack_info_cache:buf_inuse 4 unix:0:tcp_sack_info_cache:buf_max 202 unix:0:tcp_sack_info_cache:buf_size 80 unix:0:tcp_sack_info_cache:buf_total 202 unix:0:tcp_sack_info_cache:chunk_size 80 unix:0:tcp_sack_info_cache:class kmem_cache unix:0:tcp_sack_info_cache:crtime 83.9783778 unix:0:tcp_sack_info_cache:depot_alloc 306 unix:0:tcp_sack_info_cache:depot_contention 0 unix:0:tcp_sack_info_cache:depot_free 326 unix:0:tcp_sack_info_cache:empty_magazines 1 unix:0:tcp_sack_info_cache:free 99092 unix:0:tcp_sack_info_cache:full_magazines 16 unix:0:tcp_sack_info_cache:hash_lookup_depth 0 unix:0:tcp_sack_info_cache:hash_rescale 0 unix:0:tcp_sack_info_cache:hash_size 0 unix:0:tcp_sack_info_cache:magazine_size 7 unix:0:tcp_sack_info_cache:slab_alloc 132 unix:0:tcp_sack_info_cache:slab_create 2 unix:0:tcp_sack_info_cache:slab_destroy 0 unix:0:tcp_sack_info_cache:slab_free 0 unix:0:tcp_sack_info_cache:slab_size 8192 unix:0:tcp_sack_info_cache:snaptime 8464512.839933 unix:0:tcp_sack_info_cache:vmem_source 23 unix:0:tcp_timercache:align 8 unix:0:tcp_timercache:alloc 100103 unix:0:tcp_timercache:alloc_fail 0 unix:0:tcp_timercache:buf_avail 180 unix:0:tcp_timercache:buf_constructed 109 unix:0:tcp_timercache:buf_inuse 280 unix:0:tcp_timercache:buf_max 460 unix:0:tcp_timercache:buf_size 88 unix:0:tcp_timercache:buf_total 460 unix:0:tcp_timercache:chunk_size 88 unix:0:tcp_timercache:class kmem_cache unix:0:tcp_timercache:crtime 83.9783444 unix:0:tcp_timercache:depot_alloc 2523 unix:0:tcp_timercache:depot_contention 0 unix:0:tcp_timercache:depot_free 2539 unix:0:tcp_timercache:empty_magazines 1 unix:0:tcp_timercache:free 99839 unix:0:tcp_timercache:full_magazines 13 unix:0:tcp_timercache:hash_lookup_depth 0 unix:0:tcp_timercache:hash_rescale 0 unix:0:tcp_timercache:hash_size 0 unix:0:tcp_timercache:magazine_size 7 unix:0:tcp_timercache:slab_alloc 389 unix:0:tcp_timercache:slab_create 5 unix:0:tcp_timercache:slab_destroy 0 unix:0:tcp_timercache:slab_free 0 unix:0:tcp_timercache:slab_size 8192 unix:0:tcp_timercache:snaptime 8464512.8413854 unix:0:tcp_timercache:vmem_source 23 unix:0:thread_cache:align 32 unix:0:thread_cache:alloc 499827 unix:0:thread_cache:alloc_fail 0 unix:0:thread_cache:buf_avail 104 unix:0:thread_cache:buf_constructed 104 unix:0:thread_cache:buf_inuse 176 unix:0:thread_cache:buf_max 280 unix:0:thread_cache:buf_size 800 unix:0:thread_cache:buf_total 280 unix:0:thread_cache:chunk_size 800 unix:0:thread_cache:class kmem_cache unix:0:thread_cache:crtime 69.261989 unix:0:thread_cache:depot_alloc 2617 unix:0:thread_cache:depot_contention 0 unix:0:thread_cache:depot_free 2654 unix:0:thread_cache:empty_magazines 5 unix:0:thread_cache:free 499688 unix:0:thread_cache:full_magazines 34 unix:0:thread_cache:hash_lookup_depth 0 unix:0:thread_cache:hash_rescale 0 unix:0:thread_cache:hash_size 0 unix:0:thread_cache:magazine_size 3 unix:0:thread_cache:slab_alloc 280 unix:0:thread_cache:slab_create 28 unix:0:thread_cache:slab_destroy 0 unix:0:thread_cache:slab_free 0 unix:0:thread_cache:slab_size 8192 unix:0:thread_cache:snaptime 8464512.8428274 unix:0:thread_cache:vmem_source 8 unix:0:timer_cache:align 8 unix:0:timer_cache:alloc 1 unix:0:timer_cache:alloc_fail 0 unix:0:timer_cache:buf_avail 58 unix:0:timer_cache:buf_constructed 0 unix:0:timer_cache:buf_inuse 1 unix:0:timer_cache:buf_max 59 unix:0:timer_cache:buf_size 136 unix:0:timer_cache:buf_total 59 unix:0:timer_cache:chunk_size 136 unix:0:timer_cache:class kmem_cache unix:0:timer_cache:crtime 69.285759 unix:0:timer_cache:depot_alloc 0 unix:0:timer_cache:depot_contention 0 unix:0:timer_cache:depot_free 0 unix:0:timer_cache:empty_magazines 0 unix:0:timer_cache:free 0 unix:0:timer_cache:full_magazines 0 unix:0:timer_cache:hash_lookup_depth 0 unix:0:timer_cache:hash_rescale 0 unix:0:timer_cache:hash_size 0 unix:0:timer_cache:magazine_size 7 unix:0:timer_cache:slab_alloc 1 unix:0:timer_cache:slab_create 1 unix:0:timer_cache:slab_destroy 0 unix:0:timer_cache:slab_free 0 unix:0:timer_cache:slab_size 8192 unix:0:timer_cache:snaptime 8464512.844228 unix:0:timer_cache:vmem_source 23 unix:0:tl_cache:align 8 unix:0:tl_cache:alloc 931 unix:0:tl_cache:alloc_fail 0 unix:0:tl_cache:buf_avail 33 unix:0:tl_cache:buf_constructed 26 unix:0:tl_cache:buf_inuse 39 unix:0:tl_cache:buf_max 72 unix:0:tl_cache:buf_size 432 unix:0:tl_cache:buf_total 72 unix:0:tl_cache:chunk_size 432 unix:0:tl_cache:class kmem_cache unix:0:tl_cache:crtime 87.6511908 unix:0:tl_cache:depot_alloc 17 unix:0:tl_cache:depot_contention 0 unix:0:tl_cache:depot_free 26 unix:0:tl_cache:empty_magazines 2 unix:0:tl_cache:free 901 unix:0:tl_cache:full_magazines 6 unix:0:tl_cache:hash_lookup_depth 0 unix:0:tl_cache:hash_rescale 0 unix:0:tl_cache:hash_size 0 unix:0:tl_cache:magazine_size 3 unix:0:tl_cache:slab_alloc 65 unix:0:tl_cache:slab_create 4 unix:0:tl_cache:slab_destroy 0 unix:0:tl_cache:slab_free 0 unix:0:tl_cache:slab_size 8192 unix:0:tl_cache:snaptime 8464512.84561 unix:0:tl_cache:vmem_source 23 unix:0:tslabel_cache:align 8 unix:0:tslabel_cache:alloc 2 unix:0:tslabel_cache:alloc_fail 0 unix:0:tslabel_cache:buf_avail 167 unix:0:tslabel_cache:buf_constructed 0 unix:0:tslabel_cache:buf_inuse 2 unix:0:tslabel_cache:buf_max 169 unix:0:tslabel_cache:buf_size 48 unix:0:tslabel_cache:buf_total 169 unix:0:tslabel_cache:chunk_size 48 unix:0:tslabel_cache:class kmem_cache unix:0:tslabel_cache:crtime 69.2622572 unix:0:tslabel_cache:depot_alloc 0 unix:0:tslabel_cache:depot_contention 0 unix:0:tslabel_cache:depot_free 0 unix:0:tslabel_cache:empty_magazines 0 unix:0:tslabel_cache:free 0 unix:0:tslabel_cache:full_magazines 0 unix:0:tslabel_cache:hash_lookup_depth 0 unix:0:tslabel_cache:hash_rescale 0 unix:0:tslabel_cache:hash_size 0 unix:0:tslabel_cache:magazine_size 15 unix:0:tslabel_cache:slab_alloc 2 unix:0:tslabel_cache:slab_create 1 unix:0:tslabel_cache:slab_destroy 0 unix:0:tslabel_cache:slab_free 0 unix:0:tslabel_cache:slab_size 8192 unix:0:tslabel_cache:snaptime 8464512.8470574 unix:0:tslabel_cache:vmem_source 23 unix:0:turnstile_cache:align 8 unix:0:turnstile_cache:alloc 576587 unix:0:turnstile_cache:alloc_fail 0 unix:0:turnstile_cache:buf_avail 148 unix:0:turnstile_cache:buf_constructed 105 unix:0:turnstile_cache:buf_inuse 360 unix:0:turnstile_cache:buf_max 508 unix:0:turnstile_cache:buf_size 64 unix:0:turnstile_cache:buf_total 508 unix:0:turnstile_cache:chunk_size 64 unix:0:turnstile_cache:class kmem_cache unix:0:turnstile_cache:crtime 69.2620462 unix:0:turnstile_cache:depot_alloc 2901 unix:0:turnstile_cache:depot_contention 0 unix:0:turnstile_cache:depot_free 2910 unix:0:turnstile_cache:empty_magazines 2 unix:0:turnstile_cache:free 576236 unix:0:turnstile_cache:full_magazines 5 unix:0:turnstile_cache:hash_lookup_depth 0 unix:0:turnstile_cache:hash_rescale 0 unix:0:turnstile_cache:hash_size 0 unix:0:turnstile_cache:magazine_size 15 unix:0:turnstile_cache:slab_alloc 465 unix:0:turnstile_cache:slab_create 4 unix:0:turnstile_cache:slab_destroy 0 unix:0:turnstile_cache:slab_free 0 unix:0:turnstile_cache:slab_size 8192 unix:0:turnstile_cache:snaptime 8464512.848517 unix:0:turnstile_cache:vmem_source 23 unix:0:udp_cache:align 64 unix:0:udp_cache:alloc 281468 unix:0:udp_cache:alloc_fail 0 unix:0:udp_cache:buf_avail 13 unix:0:udp_cache:buf_constructed 8 unix:0:udp_cache:buf_inuse 23 unix:0:udp_cache:buf_max 36 unix:0:udp_cache:buf_size 440 unix:0:udp_cache:buf_total 36 unix:0:udp_cache:chunk_size 448 unix:0:udp_cache:class kmem_cache unix:0:udp_cache:crtime 84.1476786 unix:0:udp_cache:depot_alloc 23 unix:0:udp_cache:depot_contention 0 unix:0:udp_cache:depot_free 28 unix:0:udp_cache:empty_magazines 1 unix:0:udp_cache:free 281450 unix:0:udp_cache:full_magazines 1 unix:0:udp_cache:hash_lookup_depth 0 unix:0:udp_cache:hash_rescale 0 unix:0:udp_cache:hash_size 0 unix:0:udp_cache:magazine_size 3 unix:0:udp_cache:slab_alloc 31 unix:0:udp_cache:slab_create 2 unix:0:udp_cache:slab_destroy 0 unix:0:udp_cache:slab_free 0 unix:0:udp_cache:slab_size 8192 unix:0:udp_cache:snaptime 8464512.8499004 unix:0:udp_cache:vmem_source 23 unix:0:ue_queue:cancelled 0 unix:0:ue_queue:class errorq unix:0:ue_queue:commit_fail 0 unix:0:ue_queue:committed 0 unix:0:ue_queue:crtime 69.2660348 unix:0:ue_queue:dispatched 0 unix:0:ue_queue:dropped 0 unix:0:ue_queue:logged 0 unix:0:ue_queue:reserve_fail 0 unix:0:ue_queue:reserved 0 unix:0:ue_queue:snaptime 8464512.8513214 unix:0:ufs_inode_cache:align 8 unix:0:ufs_inode_cache:alloc 19575 unix:0:ufs_inode_cache:alloc_fail 0 unix:0:ufs_inode_cache:buf_avail 5 unix:0:ufs_inode_cache:buf_constructed 0 unix:0:ufs_inode_cache:buf_inuse 19575 unix:0:ufs_inode_cache:buf_max 19580 unix:0:ufs_inode_cache:buf_size 368 unix:0:ufs_inode_cache:buf_total 19580 unix:0:ufs_inode_cache:chunk_size 368 unix:0:ufs_inode_cache:class kmem_cache unix:0:ufs_inode_cache:crtime 69.290543 unix:0:ufs_inode_cache:depot_alloc 0 unix:0:ufs_inode_cache:depot_contention 0 unix:0:ufs_inode_cache:depot_free 0 unix:0:ufs_inode_cache:empty_magazines 0 unix:0:ufs_inode_cache:free 0 unix:0:ufs_inode_cache:full_magazines 0 unix:0:ufs_inode_cache:hash_lookup_depth 0 unix:0:ufs_inode_cache:hash_rescale 0 unix:0:ufs_inode_cache:hash_size 0 unix:0:ufs_inode_cache:magazine_size 3 unix:0:ufs_inode_cache:slab_alloc 19575 unix:0:ufs_inode_cache:slab_create 890 unix:0:ufs_inode_cache:slab_destroy 0 unix:0:ufs_inode_cache:slab_free 0 unix:0:ufs_inode_cache:slab_size 8192 unix:0:ufs_inode_cache:snaptime 8464512.851913 unix:0:ufs_inode_cache:vmem_source 23 unix:0:umem_np_16384:align 8192 unix:0:umem_np_16384:alloc 37 unix:0:umem_np_16384:alloc_fail 0 unix:0:umem_np_16384:buf_avail 16 unix:0:umem_np_16384:buf_constructed 2 unix:0:umem_np_16384:buf_inuse 0 unix:0:umem_np_16384:buf_max 16 unix:0:umem_np_16384:buf_size 16384 unix:0:umem_np_16384:buf_total 16 unix:0:umem_np_16384:chunk_size 16384 unix:0:umem_np_16384:class kmem_cache unix:0:umem_np_16384:crtime 69.2616856 unix:0:umem_np_16384:depot_alloc 0 unix:0:umem_np_16384:depot_contention 0 unix:0:umem_np_16384:depot_free 2 unix:0:umem_np_16384:empty_magazines 0 unix:0:umem_np_16384:free 39 unix:0:umem_np_16384:full_magazines 0 unix:0:umem_np_16384:hash_lookup_depth 0 unix:0:umem_np_16384:hash_rescale 0 unix:0:umem_np_16384:hash_size 64 unix:0:umem_np_16384:magazine_size 1 unix:0:umem_np_16384:slab_alloc 2 unix:0:umem_np_16384:slab_create 1 unix:0:umem_np_16384:slab_destroy 0 unix:0:umem_np_16384:slab_free 0 unix:0:umem_np_16384:slab_size 262144 unix:0:umem_np_16384:snaptime 8464512.8533808 unix:0:umem_np_16384:vmem_source 41 unix:0:umem_np_24576:align 8192 unix:0:umem_np_24576:alloc 1 unix:0:umem_np_24576:alloc_fail 0 unix:0:umem_np_24576:buf_avail 10 unix:0:umem_np_24576:buf_constructed 1 unix:0:umem_np_24576:buf_inuse 0 unix:0:umem_np_24576:buf_max 10 unix:0:umem_np_24576:buf_size 24576 unix:0:umem_np_24576:buf_total 10 unix:0:umem_np_24576:chunk_size 24576 unix:0:umem_np_24576:class kmem_cache unix:0:umem_np_24576:crtime 69.2617052 unix:0:umem_np_24576:depot_alloc 0 unix:0:umem_np_24576:depot_contention 0 unix:0:umem_np_24576:depot_free 1 unix:0:umem_np_24576:empty_magazines 0 unix:0:umem_np_24576:free 2 unix:0:umem_np_24576:full_magazines 0 unix:0:umem_np_24576:hash_lookup_depth 0 unix:0:umem_np_24576:hash_rescale 0 unix:0:umem_np_24576:hash_size 64 unix:0:umem_np_24576:magazine_size 1 unix:0:umem_np_24576:slab_alloc 1 unix:0:umem_np_24576:slab_create 1 unix:0:umem_np_24576:slab_destroy 0 unix:0:umem_np_24576:slab_free 0 unix:0:umem_np_24576:slab_size 262144 unix:0:umem_np_24576:snaptime 8464512.8547572 unix:0:umem_np_24576:vmem_source 41 unix:0:umem_np_32768:align 8192 unix:0:umem_np_32768:alloc 11 unix:0:umem_np_32768:alloc_fail 0 unix:0:umem_np_32768:buf_avail 8 unix:0:umem_np_32768:buf_constructed 2 unix:0:umem_np_32768:buf_inuse 0 unix:0:umem_np_32768:buf_max 8 unix:0:umem_np_32768:buf_size 32768 unix:0:umem_np_32768:buf_total 8 unix:0:umem_np_32768:chunk_size 32768 unix:0:umem_np_32768:class kmem_cache unix:0:umem_np_32768:crtime 69.2617246 unix:0:umem_np_32768:depot_alloc 0 unix:0:umem_np_32768:depot_contention 0 unix:0:umem_np_32768:depot_free 2 unix:0:umem_np_32768:empty_magazines 0 unix:0:umem_np_32768:free 13 unix:0:umem_np_32768:full_magazines 0 unix:0:umem_np_32768:hash_lookup_depth 0 unix:0:umem_np_32768:hash_rescale 0 unix:0:umem_np_32768:hash_size 64 unix:0:umem_np_32768:magazine_size 1 unix:0:umem_np_32768:slab_alloc 2 unix:0:umem_np_32768:slab_create 1 unix:0:umem_np_32768:slab_destroy 0 unix:0:umem_np_32768:slab_free 0 unix:0:umem_np_32768:slab_size 262144 unix:0:umem_np_32768:snaptime 8464512.8561216 unix:0:umem_np_32768:vmem_source 41 unix:0:umem_np_40960:align 8192 unix:0:umem_np_40960:alloc 0 unix:0:umem_np_40960:alloc_fail 0 unix:0:umem_np_40960:buf_avail 0 unix:0:umem_np_40960:buf_constructed 0 unix:0:umem_np_40960:buf_inuse 0 unix:0:umem_np_40960:buf_max 0 unix:0:umem_np_40960:buf_size 40960 unix:0:umem_np_40960:buf_total 0 unix:0:umem_np_40960:chunk_size 40960 unix:0:umem_np_40960:class kmem_cache unix:0:umem_np_40960:crtime 69.2617438 unix:0:umem_np_40960:depot_alloc 0 unix:0:umem_np_40960:depot_contention 0 unix:0:umem_np_40960:depot_free 0 unix:0:umem_np_40960:empty_magazines 0 unix:0:umem_np_40960:free 0 unix:0:umem_np_40960:full_magazines 0 unix:0:umem_np_40960:hash_lookup_depth 0 unix:0:umem_np_40960:hash_rescale 0 unix:0:umem_np_40960:hash_size 64 unix:0:umem_np_40960:magazine_size 1 unix:0:umem_np_40960:slab_alloc 0 unix:0:umem_np_40960:slab_create 0 unix:0:umem_np_40960:slab_destroy 0 unix:0:umem_np_40960:slab_free 0 unix:0:umem_np_40960:slab_size 262144 unix:0:umem_np_40960:snaptime 8464512.8574972 unix:0:umem_np_40960:vmem_source 41 unix:0:umem_np_49152:align 8192 unix:0:umem_np_49152:alloc 0 unix:0:umem_np_49152:alloc_fail 0 unix:0:umem_np_49152:buf_avail 0 unix:0:umem_np_49152:buf_constructed 0 unix:0:umem_np_49152:buf_inuse 0 unix:0:umem_np_49152:buf_max 0 unix:0:umem_np_49152:buf_size 49152 unix:0:umem_np_49152:buf_total 0 unix:0:umem_np_49152:chunk_size 49152 unix:0:umem_np_49152:class kmem_cache unix:0:umem_np_49152:crtime 69.2618356 unix:0:umem_np_49152:depot_alloc 0 unix:0:umem_np_49152:depot_contention 0 unix:0:umem_np_49152:depot_free 0 unix:0:umem_np_49152:empty_magazines 0 unix:0:umem_np_49152:free 0 unix:0:umem_np_49152:full_magazines 0 unix:0:umem_np_49152:hash_lookup_depth 0 unix:0:umem_np_49152:hash_rescale 0 unix:0:umem_np_49152:hash_size 64 unix:0:umem_np_49152:magazine_size 1 unix:0:umem_np_49152:slab_alloc 0 unix:0:umem_np_49152:slab_create 0 unix:0:umem_np_49152:slab_destroy 0 unix:0:umem_np_49152:slab_free 0 unix:0:umem_np_49152:slab_size 262144 unix:0:umem_np_49152:snaptime 8464512.8590336 unix:0:umem_np_49152:vmem_source 41 unix:0:umem_np_57344:align 8192 unix:0:umem_np_57344:alloc 0 unix:0:umem_np_57344:alloc_fail 0 unix:0:umem_np_57344:buf_avail 0 unix:0:umem_np_57344:buf_constructed 0 unix:0:umem_np_57344:buf_inuse 0 unix:0:umem_np_57344:buf_max 0 unix:0:umem_np_57344:buf_size 57344 unix:0:umem_np_57344:buf_total 0 unix:0:umem_np_57344:chunk_size 57344 unix:0:umem_np_57344:class kmem_cache unix:0:umem_np_57344:crtime 69.2618558 unix:0:umem_np_57344:depot_alloc 0 unix:0:umem_np_57344:depot_contention 0 unix:0:umem_np_57344:depot_free 0 unix:0:umem_np_57344:empty_magazines 0 unix:0:umem_np_57344:free 0 unix:0:umem_np_57344:full_magazines 0 unix:0:umem_np_57344:hash_lookup_depth 0 unix:0:umem_np_57344:hash_rescale 0 unix:0:umem_np_57344:hash_size 64 unix:0:umem_np_57344:magazine_size 1 unix:0:umem_np_57344:slab_alloc 0 unix:0:umem_np_57344:slab_create 0 unix:0:umem_np_57344:slab_destroy 0 unix:0:umem_np_57344:slab_free 0 unix:0:umem_np_57344:slab_size 262144 unix:0:umem_np_57344:snaptime 8464512.860474 unix:0:umem_np_57344:vmem_source 41 unix:0:umem_np_65536:align 8192 unix:0:umem_np_65536:alloc 5 unix:0:umem_np_65536:alloc_fail 0 unix:0:umem_np_65536:buf_avail 4 unix:0:umem_np_65536:buf_constructed 2 unix:0:umem_np_65536:buf_inuse 0 unix:0:umem_np_65536:buf_max 4 unix:0:umem_np_65536:buf_size 65536 unix:0:umem_np_65536:buf_total 4 unix:0:umem_np_65536:chunk_size 65536 unix:0:umem_np_65536:class kmem_cache unix:0:umem_np_65536:crtime 69.2618742 unix:0:umem_np_65536:depot_alloc 0 unix:0:umem_np_65536:depot_contention 0 unix:0:umem_np_65536:depot_free 2 unix:0:umem_np_65536:empty_magazines 0 unix:0:umem_np_65536:free 7 unix:0:umem_np_65536:full_magazines 0 unix:0:umem_np_65536:hash_lookup_depth 0 unix:0:umem_np_65536:hash_rescale 0 unix:0:umem_np_65536:hash_size 64 unix:0:umem_np_65536:magazine_size 1 unix:0:umem_np_65536:slab_alloc 2 unix:0:umem_np_65536:slab_create 1 unix:0:umem_np_65536:slab_destroy 0 unix:0:umem_np_65536:slab_free 0 unix:0:umem_np_65536:slab_size 262144 unix:0:umem_np_65536:snaptime 8464512.8618504 unix:0:umem_np_65536:vmem_source 41 unix:0:umem_np_8192:align 8192 unix:0:umem_np_8192:alloc 48 unix:0:umem_np_8192:alloc_fail 0 unix:0:umem_np_8192:buf_avail 32 unix:0:umem_np_8192:buf_constructed 4 unix:0:umem_np_8192:buf_inuse 0 unix:0:umem_np_8192:buf_max 32 unix:0:umem_np_8192:buf_size 8192 unix:0:umem_np_8192:buf_total 32 unix:0:umem_np_8192:chunk_size 8192 unix:0:umem_np_8192:class kmem_cache unix:0:umem_np_8192:crtime 69.261652 unix:0:umem_np_8192:depot_alloc 0 unix:0:umem_np_8192:depot_contention 0 unix:0:umem_np_8192:depot_free 4 unix:0:umem_np_8192:empty_magazines 0 unix:0:umem_np_8192:free 52 unix:0:umem_np_8192:full_magazines 0 unix:0:umem_np_8192:hash_lookup_depth 0 unix:0:umem_np_8192:hash_rescale 0 unix:0:umem_np_8192:hash_size 64 unix:0:umem_np_8192:magazine_size 1 unix:0:umem_np_8192:slab_alloc 4 unix:0:umem_np_8192:slab_create 1 unix:0:umem_np_8192:slab_destroy 0 unix:0:umem_np_8192:slab_free 0 unix:0:umem_np_8192:slab_size 262144 unix:0:umem_np_8192:snaptime 8464512.8632982 unix:0:umem_np_8192:vmem_source 41 unix:0:upa64s_nexus_enum_tq:class taskq unix:0:upa64s_nexus_enum_tq:crtime 84.5339056 unix:0:upa64s_nexus_enum_tq:executed 0 unix:0:upa64s_nexus_enum_tq:maxtasks 0 unix:0:upa64s_nexus_enum_tq:nactive 1 unix:0:upa64s_nexus_enum_tq:nalloc 0 unix:0:upa64s_nexus_enum_tq:priority 60 unix:0:upa64s_nexus_enum_tq:snaptime 8464512.8646816 unix:0:upa64s_nexus_enum_tq:tasks 0 unix:0:upa64s_nexus_enum_tq:threads 1 unix:0:upa64s_nexus_enum_tq:totaltime 0 unix:0:us_pm_monitor:class taskq unix:0:us_pm_monitor:crtime 82.8366242 unix:0:us_pm_monitor:executed 1692886 unix:0:us_pm_monitor:maxtasks 1 unix:0:us_pm_monitor:nactive 2 unix:0:us_pm_monitor:nalloc 0 unix:0:us_pm_monitor:priority 98 unix:0:us_pm_monitor:snaptime 8464512.8652686 unix:0:us_pm_monitor:tasks 1692886 unix:0:us_pm_monitor:threads 1 unix:0:us_pm_monitor:totaltime 8458323600 unix:0:var:class misc unix:0:var:crtime 65.9026654 unix:0:var:snaptime 8464512.8658596 unix:0:var:v_autoup 30 unix:0:var:v_buf 100 unix:0:var:v_bufhwm 166408 unix:0:var:v_call 0 unix:0:var:v_clist 0 unix:0:var:v_hbuf 8192 unix:0:var:v_hmask 8191 unix:0:var:v_maxpmem 0 unix:0:var:v_maxsyspri 99 unix:0:var:v_maxup 29995 unix:0:var:v_maxupttl 29995 unix:0:var:v_nglobpris 170 unix:0:var:v_pbuf 0 unix:0:var:v_proc 30000 unix:0:var:v_sptmap 0 unix:0:vis_info:class misc unix:0:vis_info:crtime 69.266295 unix:0:vis_info:snaptime 8464512.8667822 unix:0:vis_info:vis_array16 0 unix:0:vis_info:vis_array32 0 unix:0:vis_info:vis_array8 0 unix:0:vis_info:vis_bmask 0 unix:0:vis_info:vis_bshuffle 0 unix:0:vis_info:vis_edge16 0 unix:0:vis_info:vis_edge16l 0 unix:0:vis_info:vis_edge16ln 0 unix:0:vis_info:vis_edge16n 0 unix:0:vis_info:vis_edge32 0 unix:0:vis_info:vis_edge32l 0 unix:0:vis_info:vis_edge32ln 0 unix:0:vis_info:vis_edge32n 0 unix:0:vis_info:vis_edge8 0 unix:0:vis_info:vis_edge8l 0 unix:0:vis_info:vis_edge8ln 0 unix:0:vis_info:vis_edge8n 0 unix:0:vis_info:vis_fcmpeq16 0 unix:0:vis_info:vis_fcmpeq32 0 unix:0:vis_info:vis_fcmpgt16 0 unix:0:vis_info:vis_fcmpgt32 0 unix:0:vis_info:vis_fcmple16 0 unix:0:vis_info:vis_fcmple32 0 unix:0:vis_info:vis_fcmpne16 0 unix:0:vis_info:vis_fcmpne32 0 unix:0:vis_info:vis_fexpand 0 unix:0:vis_info:vis_fmul8sux16 0 unix:0:vis_info:vis_fmul8ulx16 0 unix:0:vis_info:vis_fmul8x16 0 unix:0:vis_info:vis_fmul8x16al 0 unix:0:vis_info:vis_fmul8x16au 0 unix:0:vis_info:vis_fmuld8sux16 0 unix:0:vis_info:vis_fmuld8ulx16 0 unix:0:vis_info:vis_fpack16 0 unix:0:vis_info:vis_fpack32 0 unix:0:vis_info:vis_fpackfix 0 unix:0:vis_info:vis_fpmerge 0 unix:0:vis_info:vis_pdist 0 unix:0:vminfo:class vm unix:0:vminfo:crtime 65.902626 unix:0:vminfo:freemem 8323021564395 unix:0:vminfo:snaptime 8464512.8688176 unix:0:vminfo:swap_alloc 107308827867 unix:0:vminfo:swap_avail 25036418449522 unix:0:vminfo:swap_free 25050029372175 unix:0:vminfo:swap_resv 120919750520 unix:0:vmu_bound_cache:align 8 unix:0:vmu_bound_cache:alloc 0 unix:0:vmu_bound_cache:alloc_fail 0 unix:0:vmu_bound_cache:buf_avail 0 unix:0:vmu_bound_cache:buf_constructed 0 unix:0:vmu_bound_cache:buf_inuse 0 unix:0:vmu_bound_cache:buf_max 0 unix:0:vmu_bound_cache:buf_size 32 unix:0:vmu_bound_cache:buf_total 0 unix:0:vmu_bound_cache:chunk_size 32 unix:0:vmu_bound_cache:class kmem_cache unix:0:vmu_bound_cache:crtime 69.2860314 unix:0:vmu_bound_cache:depot_alloc 0 unix:0:vmu_bound_cache:depot_contention 0 unix:0:vmu_bound_cache:depot_free 0 unix:0:vmu_bound_cache:empty_magazines 0 unix:0:vmu_bound_cache:free 0 unix:0:vmu_bound_cache:full_magazines 0 unix:0:vmu_bound_cache:hash_lookup_depth 0 unix:0:vmu_bound_cache:hash_rescale 0 unix:0:vmu_bound_cache:hash_size 0 unix:0:vmu_bound_cache:magazine_size 15 unix:0:vmu_bound_cache:slab_alloc 0 unix:0:vmu_bound_cache:slab_create 0 unix:0:vmu_bound_cache:slab_destroy 0 unix:0:vmu_bound_cache:slab_free 0 unix:0:vmu_bound_cache:slab_size 8192 unix:0:vmu_bound_cache:snaptime 8464512.8693034 unix:0:vmu_bound_cache:vmem_source 23 unix:0:vmu_object_cache:align 8 unix:0:vmu_object_cache:alloc 0 unix:0:vmu_object_cache:alloc_fail 0 unix:0:vmu_object_cache:buf_avail 0 unix:0:vmu_object_cache:buf_constructed 0 unix:0:vmu_object_cache:buf_inuse 0 unix:0:vmu_object_cache:buf_max 0 unix:0:vmu_object_cache:buf_size 32 unix:0:vmu_object_cache:buf_total 0 unix:0:vmu_object_cache:chunk_size 32 unix:0:vmu_object_cache:class kmem_cache unix:0:vmu_object_cache:crtime 69.2860468 unix:0:vmu_object_cache:depot_alloc 0 unix:0:vmu_object_cache:depot_contention 0 unix:0:vmu_object_cache:depot_free 0 unix:0:vmu_object_cache:empty_magazines 0 unix:0:vmu_object_cache:free 0 unix:0:vmu_object_cache:full_magazines 0 unix:0:vmu_object_cache:hash_lookup_depth 0 unix:0:vmu_object_cache:hash_rescale 0 unix:0:vmu_object_cache:hash_size 0 unix:0:vmu_object_cache:magazine_size 15 unix:0:vmu_object_cache:slab_alloc 0 unix:0:vmu_object_cache:slab_create 0 unix:0:vmu_object_cache:slab_destroy 0 unix:0:vmu_object_cache:slab_free 0 unix:0:vmu_object_cache:slab_size 8192 unix:0:vmu_object_cache:snaptime 8464512.8708668 unix:0:vmu_object_cache:vmem_source 23 unix:0:vn_cache:align 64 unix:0:vn_cache:alloc 3424387 unix:0:vn_cache:alloc_fail 0 unix:0:vn_cache:buf_avail 201 unix:0:vn_cache:buf_constructed 185 unix:0:vn_cache:buf_inuse 21530 unix:0:vn_cache:buf_max 21731 unix:0:vn_cache:buf_size 240 unix:0:vn_cache:buf_total 21731 unix:0:vn_cache:chunk_size 256 unix:0:vn_cache:class kmem_cache unix:0:vn_cache:crtime 69.2819036 unix:0:vn_cache:depot_alloc 18917 unix:0:vn_cache:depot_contention 0 unix:0:vn_cache:depot_free 18945 unix:0:vn_cache:empty_magazines 1165 unix:0:vn_cache:free 3402885 unix:0:vn_cache:full_magazines 25 unix:0:vn_cache:hash_lookup_depth 0 unix:0:vn_cache:hash_rescale 0 unix:0:vn_cache:hash_size 0 unix:0:vn_cache:magazine_size 7 unix:0:vn_cache:slab_alloc 21715 unix:0:vn_cache:slab_create 701 unix:0:vn_cache:slab_destroy 0 unix:0:vn_cache:slab_free 0 unix:0:vn_cache:slab_size 8192 unix:0:vn_cache:snaptime 8464512.872262 unix:0:vn_cache:vmem_source 23 unix:0:vopstats_1d80002:class misc unix:0:vopstats_1d80002:crtime 102.8095292 unix:0:vopstats_1d80002:naccess 13030 unix:0:vopstats_1d80002:naddmap 211285 unix:0:vopstats_1d80002:nclose 11511 unix:0:vopstats_1d80002:ncmp 109127 unix:0:vopstats_1d80002:ncreate 54 unix:0:vopstats_1d80002:ndelmap 211275 unix:0:vopstats_1d80002:ndispose 265279 unix:0:vopstats_1d80002:ndump 0 unix:0:vopstats_1d80002:ndumpctl 0 unix:0:vopstats_1d80002:nfid 0 unix:0:vopstats_1d80002:nfrlock 0 unix:0:vopstats_1d80002:nfsync 0 unix:0:vopstats_1d80002:ngetattr 63568 unix:0:vopstats_1d80002:ngetpage 2804057 unix:0:vopstats_1d80002:ngetsecattr 225 unix:0:vopstats_1d80002:ninactive 111 unix:0:vopstats_1d80002:nioctl 567 unix:0:vopstats_1d80002:nlink 0 unix:0:vopstats_1d80002:nlookup 1539217 unix:0:vopstats_1d80002:nmap 15275 unix:0:vopstats_1d80002:nmkdir 0 unix:0:vopstats_1d80002:nopen 12621 unix:0:vopstats_1d80002:npageio 0 unix:0:vopstats_1d80002:npathconf 74 unix:0:vopstats_1d80002:npoll 0 unix:0:vopstats_1d80002:nputpage 903 unix:0:vopstats_1d80002:nread 8929 unix:0:vopstats_1d80002:nreaddir 5287 unix:0:vopstats_1d80002:nreadlink 101937 unix:0:vopstats_1d80002:nrealvp 2916 unix:0:vopstats_1d80002:nremove 54 unix:0:vopstats_1d80002:nrename 0 unix:0:vopstats_1d80002:nrmdir 0 unix:0:vopstats_1d80002:nrwlock 14270 unix:0:vopstats_1d80002:nrwunlock 14270 unix:0:vopstats_1d80002:nseek 1712 unix:0:vopstats_1d80002:nsetattr 54 unix:0:vopstats_1d80002:nsetfl 0 unix:0:vopstats_1d80002:nsetsecattr 0 unix:0:vopstats_1d80002:nshrlock 0 unix:0:vopstats_1d80002:nspace 0 unix:0:vopstats_1d80002:nsymlink 0 unix:0:vopstats_1d80002:nvnevent 0 unix:0:vopstats_1d80002:nwrite 54 unix:0:vopstats_1d80002:read_bytes 8586171 unix:0:vopstats_1d80002:readdir_bytes 6684152 unix:0:vopstats_1d80002:snaptime 8464512.8737328 unix:0:vopstats_1d80002:write_bytes 2554 unix:0:vopstats_1d80008:class misc unix:0:vopstats_1d80008:crtime 82.5147896 unix:0:vopstats_1d80008:naccess 5303022 unix:0:vopstats_1d80008:naddmap 5033411 unix:0:vopstats_1d80008:nclose 1901085 unix:0:vopstats_1d80008:ncmp 17043428 unix:0:vopstats_1d80008:ncreate 131311 unix:0:vopstats_1d80008:ndelmap 5060820 unix:0:vopstats_1d80008:ndispose 813331 unix:0:vopstats_1d80008:ndump 0 unix:0:vopstats_1d80008:ndumpctl 0 unix:0:vopstats_1d80008:nfid 0 unix:0:vopstats_1d80008:nfrlock 7714 unix:0:vopstats_1d80008:nfsync 459 unix:0:vopstats_1d80008:ngetattr 19083252 unix:0:vopstats_1d80008:ngetpage 11053267 unix:0:vopstats_1d80008:ngetsecattr 135459 unix:0:vopstats_1d80008:ninactive 132266 unix:0:vopstats_1d80008:nioctl 33915454 unix:0:vopstats_1d80008:nlink 0 unix:0:vopstats_1d80008:nlookup 173384227 unix:0:vopstats_1d80008:nmap 472227 unix:0:vopstats_1d80008:nmkdir 106 unix:0:vopstats_1d80008:nopen 1590080 unix:0:vopstats_1d80008:npageio 0 unix:0:vopstats_1d80008:npathconf 2148 unix:0:vopstats_1d80008:npoll 102372552 unix:0:vopstats_1d80008:nputpage 36522 unix:0:vopstats_1d80008:nread 22621686 unix:0:vopstats_1d80008:nreaddir 1558774 unix:0:vopstats_1d80008:nreadlink 1844252 unix:0:vopstats_1d80008:nrealvp 451635 unix:0:vopstats_1d80008:nremove 2177 unix:0:vopstats_1d80008:nrename 2491 unix:0:vopstats_1d80008:nrmdir 89 unix:0:vopstats_1d80008:nrwlock 42117290 unix:0:vopstats_1d80008:nrwunlock 42117527 unix:0:vopstats_1d80008:nseek 304101 unix:0:vopstats_1d80008:nsetattr 1131 unix:0:vopstats_1d80008:nsetfl 195603 unix:0:vopstats_1d80008:nsetsecattr 110 unix:0:vopstats_1d80008:nshrlock 0 unix:0:vopstats_1d80008:nspace 156 unix:0:vopstats_1d80008:nsymlink 16 unix:0:vopstats_1d80008:nvnevent 0 unix:0:vopstats_1d80008:nwrite 17936969 unix:0:vopstats_1d80008:read_bytes 2153489967 unix:0:vopstats_1d80008:readdir_bytes 322365880 unix:0:vopstats_1d80008:snaptime 8464512.876165 unix:0:vopstats_1d80008:write_bytes 1463464457 unix:0:vopstats_5680000:class misc unix:0:vopstats_5680000:crtime 82.64648 unix:0:vopstats_5680000:naccess 303092 unix:0:vopstats_5680000:naddmap 0 unix:0:vopstats_5680000:nclose 303767 unix:0:vopstats_5680000:ncmp 100 unix:0:vopstats_5680000:ncreate 0 unix:0:vopstats_5680000:ndelmap 0 unix:0:vopstats_5680000:ndispose 0 unix:0:vopstats_5680000:ndump 0 unix:0:vopstats_5680000:ndumpctl 0 unix:0:vopstats_5680000:nfid 0 unix:0:vopstats_5680000:nfrlock 0 unix:0:vopstats_5680000:nfsync 0 unix:0:vopstats_5680000:ngetattr 12207 unix:0:vopstats_5680000:ngetpage 0 unix:0:vopstats_5680000:ngetsecattr 52 unix:0:vopstats_5680000:ninactive 614378 unix:0:vopstats_5680000:nioctl 3277 unix:0:vopstats_5680000:nlink 0 unix:0:vopstats_5680000:nlookup 615375 unix:0:vopstats_5680000:nmap 0 unix:0:vopstats_5680000:nmkdir 0 unix:0:vopstats_5680000:nopen 303088 unix:0:vopstats_5680000:npageio 0 unix:0:vopstats_5680000:npathconf 26 unix:0:vopstats_5680000:npoll 21376 unix:0:vopstats_5680000:nputpage 0 unix:0:vopstats_5680000:nread 19467 unix:0:vopstats_5680000:nreaddir 1202 unix:0:vopstats_5680000:nreadlink 8897 unix:0:vopstats_5680000:nrealvp 0 unix:0:vopstats_5680000:nremove 0 unix:0:vopstats_5680000:nrename 0 unix:0:vopstats_5680000:nrmdir 0 unix:0:vopstats_5680000:nrwlock 20949 unix:0:vopstats_5680000:nrwunlock 20949 unix:0:vopstats_5680000:nseek 2069 unix:0:vopstats_5680000:nsetattr 0 unix:0:vopstats_5680000:nsetfl 0 unix:0:vopstats_5680000:nsetsecattr 0 unix:0:vopstats_5680000:nshrlock 0 unix:0:vopstats_5680000:nspace 0 unix:0:vopstats_5680000:nsymlink 0 unix:0:vopstats_5680000:nvnevent 0 unix:0:vopstats_5680000:nwrite 280 unix:0:vopstats_5680000:read_bytes 8809096 unix:0:vopstats_5680000:readdir_bytes 398816 unix:0:vopstats_5680000:snaptime 8464512.87851 unix:0:vopstats_5680000:write_bytes 5816 unix:0:vopstats_56c0001:class misc unix:0:vopstats_56c0001:crtime 82.6868534 unix:0:vopstats_56c0001:naccess 332 unix:0:vopstats_56c0001:naddmap 0 unix:0:vopstats_56c0001:nclose 356 unix:0:vopstats_56c0001:ncmp 0 unix:0:vopstats_56c0001:ncreate 0 unix:0:vopstats_56c0001:ndelmap 0 unix:0:vopstats_56c0001:ndispose 0 unix:0:vopstats_56c0001:ndump 0 unix:0:vopstats_56c0001:ndumpctl 0 unix:0:vopstats_56c0001:nfid 0 unix:0:vopstats_56c0001:nfrlock 11 unix:0:vopstats_56c0001:nfsync 0 unix:0:vopstats_56c0001:ngetattr 133 unix:0:vopstats_56c0001:ngetpage 0 unix:0:vopstats_56c0001:ngetsecattr 0 unix:0:vopstats_56c0001:ninactive 331 unix:0:vopstats_56c0001:nioctl 6134 unix:0:vopstats_56c0001:nlink 0 unix:0:vopstats_56c0001:nlookup 0 unix:0:vopstats_56c0001:nmap 0 unix:0:vopstats_56c0001:nmkdir 0 unix:0:vopstats_56c0001:nopen 332 unix:0:vopstats_56c0001:npageio 0 unix:0:vopstats_56c0001:npathconf 0 unix:0:vopstats_56c0001:npoll 0 unix:0:vopstats_56c0001:nputpage 0 unix:0:vopstats_56c0001:nread 31 unix:0:vopstats_56c0001:nreaddir 0 unix:0:vopstats_56c0001:nreadlink 0 unix:0:vopstats_56c0001:nrealvp 0 unix:0:vopstats_56c0001:nremove 0 unix:0:vopstats_56c0001:nrename 0 unix:0:vopstats_56c0001:nrmdir 0 unix:0:vopstats_56c0001:nrwlock 31 unix:0:vopstats_56c0001:nrwunlock 31 unix:0:vopstats_56c0001:nseek 841 unix:0:vopstats_56c0001:nsetattr 0 unix:0:vopstats_56c0001:nsetfl 0 unix:0:vopstats_56c0001:nsetsecattr 0 unix:0:vopstats_56c0001:nshrlock 0 unix:0:vopstats_56c0001:nspace 0 unix:0:vopstats_56c0001:nsymlink 0 unix:0:vopstats_56c0001:nvnevent 0 unix:0:vopstats_56c0001:nwrite 0 unix:0:vopstats_56c0001:read_bytes 5043 unix:0:vopstats_56c0001:readdir_bytes 0 unix:0:vopstats_56c0001:snaptime 8464512.8809164 unix:0:vopstats_56c0001:write_bytes 0 unix:0:vopstats_5700001:class misc unix:0:vopstats_5700001:crtime 82.7283284 unix:0:vopstats_5700001:naccess 3487 unix:0:vopstats_5700001:naddmap 0 unix:0:vopstats_5700001:nclose 63415 unix:0:vopstats_5700001:ncmp 119753 unix:0:vopstats_5700001:ncreate 61546 unix:0:vopstats_5700001:ndelmap 0 unix:0:vopstats_5700001:ndispose 62250 unix:0:vopstats_5700001:ndump 0 unix:0:vopstats_5700001:ndumpctl 0 unix:0:vopstats_5700001:nfid 0 unix:0:vopstats_5700001:nfrlock 17949 unix:0:vopstats_5700001:nfsync 63 unix:0:vopstats_5700001:ngetattr 17679 unix:0:vopstats_5700001:ngetpage 13 unix:0:vopstats_5700001:ngetsecattr 62229 unix:0:vopstats_5700001:ninactive 33588950 unix:0:vopstats_5700001:nioctl 353 unix:0:vopstats_5700001:nlink 0 unix:0:vopstats_5700001:nlookup 340292 unix:0:vopstats_5700001:nmap 0 unix:0:vopstats_5700001:nmkdir 58 unix:0:vopstats_5700001:nopen 63237 unix:0:vopstats_5700001:npageio 0 unix:0:vopstats_5700001:npathconf 0 unix:0:vopstats_5700001:npoll 0 unix:0:vopstats_5700001:nputpage 47804734 unix:0:vopstats_5700001:nread 52932 unix:0:vopstats_5700001:nreaddir 0 unix:0:vopstats_5700001:nreadlink 0 unix:0:vopstats_5700001:nrealvp 297436 unix:0:vopstats_5700001:nremove 3485 unix:0:vopstats_5700001:nrename 57760 unix:0:vopstats_5700001:nrmdir 0 unix:0:vopstats_5700001:nrwlock 161405 unix:0:vopstats_5700001:nrwunlock 161405 unix:0:vopstats_5700001:nseek 80107 unix:0:vopstats_5700001:nsetattr 457 unix:0:vopstats_5700001:nsetfl 0 unix:0:vopstats_5700001:nsetsecattr 0 unix:0:vopstats_5700001:nshrlock 0 unix:0:vopstats_5700001:nspace 1 unix:0:vopstats_5700001:nsymlink 0 unix:0:vopstats_5700001:nvnevent 0 unix:0:vopstats_5700001:nwrite 108473 unix:0:vopstats_5700001:read_bytes 54197268 unix:0:vopstats_5700001:readdir_bytes 0 unix:0:vopstats_5700001:snaptime 8464512.8832686 unix:0:vopstats_5700001:write_bytes 68792751 unix:0:vopstats_5700002:class misc unix:0:vopstats_5700002:crtime 98.2823748 unix:0:vopstats_5700002:naccess 2854 unix:0:vopstats_5700002:naddmap 29 unix:0:vopstats_5700002:nclose 5348 unix:0:vopstats_5700002:ncmp 1056 unix:0:vopstats_5700002:ncreate 985 unix:0:vopstats_5700002:ndelmap 29 unix:0:vopstats_5700002:ndispose 1983 unix:0:vopstats_5700002:ndump 0 unix:0:vopstats_5700002:ndumpctl 0 unix:0:vopstats_5700002:nfid 0 unix:0:vopstats_5700002:nfrlock 0 unix:0:vopstats_5700002:nfsync 19 unix:0:vopstats_5700002:ngetattr 10529 unix:0:vopstats_5700002:ngetpage 189 unix:0:vopstats_5700002:ngetsecattr 1247 unix:0:vopstats_5700002:ninactive 178059 unix:0:vopstats_5700002:nioctl 3043 unix:0:vopstats_5700002:nlink 3 unix:0:vopstats_5700002:nlookup 6416 unix:0:vopstats_5700002:nmap 13 unix:0:vopstats_5700002:nmkdir 10 unix:0:vopstats_5700002:nopen 3599 unix:0:vopstats_5700002:npageio 0 unix:0:vopstats_5700002:npathconf 130 unix:0:vopstats_5700002:npoll 0 unix:0:vopstats_5700002:nputpage 645263 unix:0:vopstats_5700002:nread 7433 unix:0:vopstats_5700002:nreaddir 102 unix:0:vopstats_5700002:nreadlink 42 unix:0:vopstats_5700002:nrealvp 2456 unix:0:vopstats_5700002:nremove 678 unix:0:vopstats_5700002:nrename 1 unix:0:vopstats_5700002:nrmdir 0 unix:0:vopstats_5700002:nrwlock 11093 unix:0:vopstats_5700002:nrwunlock 11093 unix:0:vopstats_5700002:nseek 8194 unix:0:vopstats_5700002:nsetattr 10 unix:0:vopstats_5700002:nsetfl 0 unix:0:vopstats_5700002:nsetsecattr 0 unix:0:vopstats_5700002:nshrlock 0 unix:0:vopstats_5700002:nspace 35 unix:0:vopstats_5700002:nsymlink 20 unix:0:vopstats_5700002:nvnevent 0 unix:0:vopstats_5700002:nwrite 3558 unix:0:vopstats_5700002:read_bytes 53992243 unix:0:vopstats_5700002:readdir_bytes 15544 unix:0:vopstats_5700002:snaptime 8464512.8856464 unix:0:vopstats_5700002:write_bytes 13707395 unix:0:vopstats_5700003:class misc unix:0:vopstats_5700003:crtime 98.2955456 unix:0:vopstats_5700003:naccess 16 unix:0:vopstats_5700003:naddmap 0 unix:0:vopstats_5700003:nclose 29 unix:0:vopstats_5700003:ncmp 2 unix:0:vopstats_5700003:ncreate 17 unix:0:vopstats_5700003:ndelmap 0 unix:0:vopstats_5700003:ndispose 0 unix:0:vopstats_5700003:ndump 0 unix:0:vopstats_5700003:ndumpctl 0 unix:0:vopstats_5700003:nfid 0 unix:0:vopstats_5700003:nfrlock 0 unix:0:vopstats_5700003:nfsync 3 unix:0:vopstats_5700003:ngetattr 81 unix:0:vopstats_5700003:ngetpage 0 unix:0:vopstats_5700003:ngetsecattr 14 unix:0:vopstats_5700003:ninactive 850864 unix:0:vopstats_5700003:nioctl 2 unix:0:vopstats_5700003:nlink 0 unix:0:vopstats_5700003:nlookup 280036 unix:0:vopstats_5700003:nmap 0 unix:0:vopstats_5700003:nmkdir 6 unix:0:vopstats_5700003:nopen 29 unix:0:vopstats_5700003:npageio 0 unix:0:vopstats_5700003:npathconf 0 unix:0:vopstats_5700003:npoll 0 unix:0:vopstats_5700003:nputpage 1134300 unix:0:vopstats_5700003:nread 28 unix:0:vopstats_5700003:nreaddir 0 unix:0:vopstats_5700003:nreadlink 0 unix:0:vopstats_5700003:nrealvp 7 unix:0:vopstats_5700003:nremove 0 unix:0:vopstats_5700003:nrename 0 unix:0:vopstats_5700003:nrmdir 0 unix:0:vopstats_5700003:nrwlock 32 unix:0:vopstats_5700003:nrwunlock 32 unix:0:vopstats_5700003:nseek 29 unix:0:vopstats_5700003:nsetattr 6 unix:0:vopstats_5700003:nsetfl 0 unix:0:vopstats_5700003:nsetsecattr 0 unix:0:vopstats_5700003:nshrlock 0 unix:0:vopstats_5700003:nspace 1 unix:0:vopstats_5700003:nsymlink 0 unix:0:vopstats_5700003:nvnevent 0 unix:0:vopstats_5700003:nwrite 4 unix:0:vopstats_5700003:read_bytes 56 unix:0:vopstats_5700003:readdir_bytes 0 unix:0:vopstats_5700003:snaptime 8464512.887932 unix:0:vopstats_5700003:write_bytes 20 unix:0:vopstats_5980001:class misc unix:0:vopstats_5980001:crtime 115.403782 unix:0:vopstats_5980001:naccess 3190 unix:0:vopstats_5980001:naddmap 0 unix:0:vopstats_5980001:nclose 3248 unix:0:vopstats_5980001:ncmp 3 unix:0:vopstats_5980001:ncreate 0 unix:0:vopstats_5980001:ndelmap 0 unix:0:vopstats_5980001:ndispose 13 unix:0:vopstats_5980001:ndump 0 unix:0:vopstats_5980001:ndumpctl 0 unix:0:vopstats_5980001:nfid 0 unix:0:vopstats_5980001:nfrlock 0 unix:0:vopstats_5980001:nfsync 0 unix:0:vopstats_5980001:ngetattr 19497 unix:0:vopstats_5980001:ngetpage 28 unix:0:vopstats_5980001:ngetsecattr 0 unix:0:vopstats_5980001:ninactive 25 unix:0:vopstats_5980001:nioctl 527 unix:0:vopstats_5980001:nlink 0 unix:0:vopstats_5980001:nlookup 21905 unix:0:vopstats_5980001:nmap 0 unix:0:vopstats_5980001:nmkdir 0 unix:0:vopstats_5980001:nopen 3189 unix:0:vopstats_5980001:npageio 0 unix:0:vopstats_5980001:npathconf 0 unix:0:vopstats_5980001:npoll 0 unix:0:vopstats_5980001:nputpage 1 unix:0:vopstats_5980001:nread 6559 unix:0:vopstats_5980001:nreaddir 0 unix:0:vopstats_5980001:nreadlink 113 unix:0:vopstats_5980001:nrealvp 6 unix:0:vopstats_5980001:nremove 0 unix:0:vopstats_5980001:nrename 0 unix:0:vopstats_5980001:nrmdir 0 unix:0:vopstats_5980001:nrwlock 6559 unix:0:vopstats_5980001:nrwunlock 6559 unix:0:vopstats_5980001:nseek 505 unix:0:vopstats_5980001:nsetattr 0 unix:0:vopstats_5980001:nsetfl 0 unix:0:vopstats_5980001:nsetsecattr 0 unix:0:vopstats_5980001:nshrlock 0 unix:0:vopstats_5980001:nspace 0 unix:0:vopstats_5980001:nsymlink 0 unix:0:vopstats_5980001:nvnevent 0 unix:0:vopstats_5980001:nwrite 0 unix:0:vopstats_5980001:read_bytes 6163234 unix:0:vopstats_5980001:readdir_bytes 0 unix:0:vopstats_5980001:snaptime 8464512.8903578 unix:0:vopstats_5980001:write_bytes 0 unix:0:vopstats_5980002:class misc unix:0:vopstats_5980002:crtime 123.591584 unix:0:vopstats_5980002:naccess 0 unix:0:vopstats_5980002:naddmap 0 unix:0:vopstats_5980002:nclose 0 unix:0:vopstats_5980002:ncmp 0 unix:0:vopstats_5980002:ncreate 0 unix:0:vopstats_5980002:ndelmap 0 unix:0:vopstats_5980002:ndispose 0 unix:0:vopstats_5980002:ndump 0 unix:0:vopstats_5980002:ndumpctl 0 unix:0:vopstats_5980002:nfid 0 unix:0:vopstats_5980002:nfrlock 0 unix:0:vopstats_5980002:nfsync 0 unix:0:vopstats_5980002:ngetattr 22 unix:0:vopstats_5980002:ngetpage 0 unix:0:vopstats_5980002:ngetsecattr 2 unix:0:vopstats_5980002:ninactive 25 unix:0:vopstats_5980002:nioctl 0 unix:0:vopstats_5980002:nlink 0 unix:0:vopstats_5980002:nlookup 0 unix:0:vopstats_5980002:nmap 0 unix:0:vopstats_5980002:nmkdir 0 unix:0:vopstats_5980002:nopen 0 unix:0:vopstats_5980002:npageio 0 unix:0:vopstats_5980002:npathconf 1 unix:0:vopstats_5980002:npoll 0 unix:0:vopstats_5980002:nputpage 0 unix:0:vopstats_5980002:nread 0 unix:0:vopstats_5980002:nreaddir 0 unix:0:vopstats_5980002:nreadlink 0 unix:0:vopstats_5980002:nrealvp 0 unix:0:vopstats_5980002:nremove 0 unix:0:vopstats_5980002:nrename 0 unix:0:vopstats_5980002:nrmdir 0 unix:0:vopstats_5980002:nrwlock 0 unix:0:vopstats_5980002:nrwunlock 0 unix:0:vopstats_5980002:nseek 0 unix:0:vopstats_5980002:nsetattr 0 unix:0:vopstats_5980002:nsetfl 0 unix:0:vopstats_5980002:nsetsecattr 0 unix:0:vopstats_5980002:nshrlock 0 unix:0:vopstats_5980002:nspace 0 unix:0:vopstats_5980002:nsymlink 0 unix:0:vopstats_5980002:nvnevent 0 unix:0:vopstats_5980002:nwrite 0 unix:0:vopstats_5980002:read_bytes 0 unix:0:vopstats_5980002:readdir_bytes 0 unix:0:vopstats_5980002:snaptime 8464512.8927058 unix:0:vopstats_5980002:write_bytes 0 unix:0:vopstats_5980056:class misc unix:0:vopstats_5980056:crtime 8463822.2536916 unix:0:vopstats_5980056:naccess 646 unix:0:vopstats_5980056:naddmap 80 unix:0:vopstats_5980056:nclose 402 unix:0:vopstats_5980056:ncmp 114 unix:0:vopstats_5980056:ncreate 56 unix:0:vopstats_5980056:ndelmap 160 unix:0:vopstats_5980056:ndispose 162 unix:0:vopstats_5980056:ndump 0 unix:0:vopstats_5980056:ndumpctl 0 unix:0:vopstats_5980056:nfid 0 unix:0:vopstats_5980056:nfrlock 5 unix:0:vopstats_5980056:nfsync 0 unix:0:vopstats_5980056:ngetattr 1140 unix:0:vopstats_5980056:ngetpage 183 unix:0:vopstats_5980056:ngetsecattr 56 unix:0:vopstats_5980056:ninactive 610 unix:0:vopstats_5980056:nioctl 13 unix:0:vopstats_5980056:nlink 2 unix:0:vopstats_5980056:nlookup 5351 unix:0:vopstats_5980056:nmap 78 unix:0:vopstats_5980056:nmkdir 0 unix:0:vopstats_5980056:nopen 325 unix:0:vopstats_5980056:npageio 0 unix:0:vopstats_5980056:npathconf 0 unix:0:vopstats_5980056:npoll 0 unix:0:vopstats_5980056:nputpage 40 unix:0:vopstats_5980056:nread 241 unix:0:vopstats_5980056:nreaddir 164 unix:0:vopstats_5980056:nreadlink 22 unix:0:vopstats_5980056:nrealvp 197 unix:0:vopstats_5980056:nremove 21 unix:0:vopstats_5980056:nrename 0 unix:0:vopstats_5980056:nrmdir 0 unix:0:vopstats_5980056:nrwlock 494 unix:0:vopstats_5980056:nrwunlock 494 unix:0:vopstats_5980056:nseek 163 unix:0:vopstats_5980056:nsetattr 4 unix:0:vopstats_5980056:nsetfl 0 unix:0:vopstats_5980056:nsetsecattr 0 unix:0:vopstats_5980056:nshrlock 0 unix:0:vopstats_5980056:nspace 27 unix:0:vopstats_5980056:nsymlink 22 unix:0:vopstats_5980056:nvnevent 0 unix:0:vopstats_5980056:nwrite 89 unix:0:vopstats_5980056:read_bytes 521212 unix:0:vopstats_5980056:readdir_bytes 77944 unix:0:vopstats_5980056:snaptime 8464512.8950574 unix:0:vopstats_5980056:write_bytes 445936 unix:0:vopstats_59c0001:class misc unix:0:vopstats_59c0001:crtime 115.9651768 unix:0:vopstats_59c0001:naccess 0 unix:0:vopstats_59c0001:naddmap 0 unix:0:vopstats_59c0001:nclose 0 unix:0:vopstats_59c0001:ncmp 0 unix:0:vopstats_59c0001:ncreate 0 unix:0:vopstats_59c0001:ndelmap 0 unix:0:vopstats_59c0001:ndispose 0 unix:0:vopstats_59c0001:ndump 0 unix:0:vopstats_59c0001:ndumpctl 0 unix:0:vopstats_59c0001:nfid 0 unix:0:vopstats_59c0001:nfrlock 0 unix:0:vopstats_59c0001:nfsync 0 unix:0:vopstats_59c0001:ngetattr 22 unix:0:vopstats_59c0001:ngetpage 0 unix:0:vopstats_59c0001:ngetsecattr 2 unix:0:vopstats_59c0001:ninactive 0 unix:0:vopstats_59c0001:nioctl 0 unix:0:vopstats_59c0001:nlink 0 unix:0:vopstats_59c0001:nlookup 0 unix:0:vopstats_59c0001:nmap 0 unix:0:vopstats_59c0001:nmkdir 0 unix:0:vopstats_59c0001:nopen 0 unix:0:vopstats_59c0001:npageio 0 unix:0:vopstats_59c0001:npathconf 1 unix:0:vopstats_59c0001:npoll 0 unix:0:vopstats_59c0001:nputpage 0 unix:0:vopstats_59c0001:nread 0 unix:0:vopstats_59c0001:nreaddir 0 unix:0:vopstats_59c0001:nreadlink 0 unix:0:vopstats_59c0001:nrealvp 0 unix:0:vopstats_59c0001:nremove 0 unix:0:vopstats_59c0001:nrename 0 unix:0:vopstats_59c0001:nrmdir 0 unix:0:vopstats_59c0001:nrwlock 0 unix:0:vopstats_59c0001:nrwunlock 0 unix:0:vopstats_59c0001:nseek 0 unix:0:vopstats_59c0001:nsetattr 0 unix:0:vopstats_59c0001:nsetfl 0 unix:0:vopstats_59c0001:nsetsecattr 0 unix:0:vopstats_59c0001:nshrlock 0 unix:0:vopstats_59c0001:nspace 0 unix:0:vopstats_59c0001:nsymlink 0 unix:0:vopstats_59c0001:nvnevent 0 unix:0:vopstats_59c0001:nwrite 0 unix:0:vopstats_59c0001:read_bytes 0 unix:0:vopstats_59c0001:readdir_bytes 0 unix:0:vopstats_59c0001:snaptime 8464512.897331 unix:0:vopstats_59c0001:write_bytes 0 unix:0:vopstats_59c0002:class misc unix:0:vopstats_59c0002:crtime 115.9730072 unix:0:vopstats_59c0002:naccess 194049 unix:0:vopstats_59c0002:naddmap 0 unix:0:vopstats_59c0002:nclose 7 unix:0:vopstats_59c0002:ncmp 0 unix:0:vopstats_59c0002:ncreate 0 unix:0:vopstats_59c0002:ndelmap 0 unix:0:vopstats_59c0002:ndispose 0 unix:0:vopstats_59c0002:ndump 0 unix:0:vopstats_59c0002:ndumpctl 0 unix:0:vopstats_59c0002:nfid 0 unix:0:vopstats_59c0002:nfrlock 0 unix:0:vopstats_59c0002:nfsync 0 unix:0:vopstats_59c0002:ngetattr 327 unix:0:vopstats_59c0002:ngetpage 0 unix:0:vopstats_59c0002:ngetsecattr 2 unix:0:vopstats_59c0002:ninactive 306 unix:0:vopstats_59c0002:nioctl 0 unix:0:vopstats_59c0002:nlink 0 unix:0:vopstats_59c0002:nlookup 193530 unix:0:vopstats_59c0002:nmap 0 unix:0:vopstats_59c0002:nmkdir 0 unix:0:vopstats_59c0002:nopen 7 unix:0:vopstats_59c0002:npageio 0 unix:0:vopstats_59c0002:npathconf 1 unix:0:vopstats_59c0002:npoll 0 unix:0:vopstats_59c0002:nputpage 0 unix:0:vopstats_59c0002:nread 0 unix:0:vopstats_59c0002:nreaddir 4 unix:0:vopstats_59c0002:nreadlink 0 unix:0:vopstats_59c0002:nrealvp 0 unix:0:vopstats_59c0002:nremove 0 unix:0:vopstats_59c0002:nrename 0 unix:0:vopstats_59c0002:nrmdir 0 unix:0:vopstats_59c0002:nrwlock 4 unix:0:vopstats_59c0002:nrwunlock 4 unix:0:vopstats_59c0002:nseek 0 unix:0:vopstats_59c0002:nsetattr 0 unix:0:vopstats_59c0002:nsetfl 0 unix:0:vopstats_59c0002:nsetsecattr 0 unix:0:vopstats_59c0002:nshrlock 0 unix:0:vopstats_59c0002:nspace 0 unix:0:vopstats_59c0002:nsymlink 0 unix:0:vopstats_59c0002:nvnevent 0 unix:0:vopstats_59c0002:nwrite 0 unix:0:vopstats_59c0002:read_bytes 0 unix:0:vopstats_59c0002:readdir_bytes 544 unix:0:vopstats_59c0002:snaptime 8464512.8996674 unix:0:vopstats_59c0002:write_bytes 0 unix:0:vopstats_autofs:class misc unix:0:vopstats_autofs:crtime 115.9424562 unix:0:vopstats_autofs:naccess 194049 unix:0:vopstats_autofs:naddmap 0 unix:0:vopstats_autofs:nclose 7 unix:0:vopstats_autofs:ncmp 0 unix:0:vopstats_autofs:ncreate 0 unix:0:vopstats_autofs:ndelmap 0 unix:0:vopstats_autofs:ndispose 0 unix:0:vopstats_autofs:ndump 0 unix:0:vopstats_autofs:ndumpctl 0 unix:0:vopstats_autofs:nfid 0 unix:0:vopstats_autofs:nfrlock 0 unix:0:vopstats_autofs:nfsync 0 unix:0:vopstats_autofs:ngetattr 349 unix:0:vopstats_autofs:ngetpage 0 unix:0:vopstats_autofs:ngetsecattr 4 unix:0:vopstats_autofs:ninactive 306 unix:0:vopstats_autofs:nioctl 0 unix:0:vopstats_autofs:nlink 0 unix:0:vopstats_autofs:nlookup 193530 unix:0:vopstats_autofs:nmap 0 unix:0:vopstats_autofs:nmkdir 0 unix:0:vopstats_autofs:nopen 7 unix:0:vopstats_autofs:npageio 0 unix:0:vopstats_autofs:npathconf 2 unix:0:vopstats_autofs:npoll 0 unix:0:vopstats_autofs:nputpage 0 unix:0:vopstats_autofs:nread 0 unix:0:vopstats_autofs:nreaddir 4 unix:0:vopstats_autofs:nreadlink 0 unix:0:vopstats_autofs:nrealvp 0 unix:0:vopstats_autofs:nremove 0 unix:0:vopstats_autofs:nrename 0 unix:0:vopstats_autofs:nrmdir 0 unix:0:vopstats_autofs:nrwlock 4 unix:0:vopstats_autofs:nrwunlock 4 unix:0:vopstats_autofs:nseek 0 unix:0:vopstats_autofs:nsetattr 0 unix:0:vopstats_autofs:nsetfl 0 unix:0:vopstats_autofs:nsetsecattr 0 unix:0:vopstats_autofs:nshrlock 0 unix:0:vopstats_autofs:nspace 0 unix:0:vopstats_autofs:nsymlink 0 unix:0:vopstats_autofs:nvnevent 0 unix:0:vopstats_autofs:nwrite 0 unix:0:vopstats_autofs:read_bytes 0 unix:0:vopstats_autofs:readdir_bytes 544 unix:0:vopstats_autofs:snaptime 8464512.901986 unix:0:vopstats_autofs:write_bytes 0 unix:0:vopstats_lofs:class misc unix:0:vopstats_lofs:crtime 91.6391056 unix:0:vopstats_lofs:naccess 27273 unix:0:vopstats_lofs:naddmap 0 unix:0:vopstats_lofs:nclose 27273 unix:0:vopstats_lofs:ncmp 0 unix:0:vopstats_lofs:ncreate 0 unix:0:vopstats_lofs:ndelmap 0 unix:0:vopstats_lofs:ndispose 0 unix:0:vopstats_lofs:ndump 0 unix:0:vopstats_lofs:ndumpctl 0 unix:0:vopstats_lofs:nfid 0 unix:0:vopstats_lofs:nfrlock 0 unix:0:vopstats_lofs:nfsync 0 unix:0:vopstats_lofs:ngetattr 46702 unix:0:vopstats_lofs:ngetpage 0 unix:0:vopstats_lofs:ngetsecattr 0 unix:0:vopstats_lofs:ninactive 0 unix:0:vopstats_lofs:nioctl 6 unix:0:vopstats_lofs:nlink 0 unix:0:vopstats_lofs:nlookup 0 unix:0:vopstats_lofs:nmap 27267 unix:0:vopstats_lofs:nmkdir 0 unix:0:vopstats_lofs:nopen 27273 unix:0:vopstats_lofs:npageio 0 unix:0:vopstats_lofs:npathconf 0 unix:0:vopstats_lofs:npoll 0 unix:0:vopstats_lofs:nputpage 0 unix:0:vopstats_lofs:nread 39 unix:0:vopstats_lofs:nreaddir 0 unix:0:vopstats_lofs:nreadlink 0 unix:0:vopstats_lofs:nrealvp 0 unix:0:vopstats_lofs:nremove 0 unix:0:vopstats_lofs:nrename 0 unix:0:vopstats_lofs:nrmdir 0 unix:0:vopstats_lofs:nrwlock 39 unix:0:vopstats_lofs:nrwunlock 39 unix:0:vopstats_lofs:nseek 39 unix:0:vopstats_lofs:nsetattr 0 unix:0:vopstats_lofs:nsetfl 0 unix:0:vopstats_lofs:nsetsecattr 0 unix:0:vopstats_lofs:nshrlock 0 unix:0:vopstats_lofs:nspace 0 unix:0:vopstats_lofs:nsymlink 0 unix:0:vopstats_lofs:nvnevent 0 unix:0:vopstats_lofs:nwrite 0 unix:0:vopstats_lofs:read_bytes 164550 unix:0:vopstats_lofs:readdir_bytes 0 unix:0:vopstats_lofs:snaptime 8464512.9044146 unix:0:vopstats_lofs:write_bytes 0 unix:0:vopstats_mntfs:class misc unix:0:vopstats_mntfs:crtime 82.6854302 unix:0:vopstats_mntfs:naccess 332 unix:0:vopstats_mntfs:naddmap 0 unix:0:vopstats_mntfs:nclose 356 unix:0:vopstats_mntfs:ncmp 0 unix:0:vopstats_mntfs:ncreate 0 unix:0:vopstats_mntfs:ndelmap 0 unix:0:vopstats_mntfs:ndispose 0 unix:0:vopstats_mntfs:ndump 0 unix:0:vopstats_mntfs:ndumpctl 0 unix:0:vopstats_mntfs:nfid 0 unix:0:vopstats_mntfs:nfrlock 11 unix:0:vopstats_mntfs:nfsync 0 unix:0:vopstats_mntfs:ngetattr 133 unix:0:vopstats_mntfs:ngetpage 0 unix:0:vopstats_mntfs:ngetsecattr 0 unix:0:vopstats_mntfs:ninactive 331 unix:0:vopstats_mntfs:nioctl 6134 unix:0:vopstats_mntfs:nlink 0 unix:0:vopstats_mntfs:nlookup 0 unix:0:vopstats_mntfs:nmap 0 unix:0:vopstats_mntfs:nmkdir 0 unix:0:vopstats_mntfs:nopen 332 unix:0:vopstats_mntfs:npageio 0 unix:0:vopstats_mntfs:npathconf 0 unix:0:vopstats_mntfs:npoll 0 unix:0:vopstats_mntfs:nputpage 0 unix:0:vopstats_mntfs:nread 31 unix:0:vopstats_mntfs:nreaddir 0 unix:0:vopstats_mntfs:nreadlink 0 unix:0:vopstats_mntfs:nrealvp 0 unix:0:vopstats_mntfs:nremove 0 unix:0:vopstats_mntfs:nrename 0 unix:0:vopstats_mntfs:nrmdir 0 unix:0:vopstats_mntfs:nrwlock 31 unix:0:vopstats_mntfs:nrwunlock 31 unix:0:vopstats_mntfs:nseek 841 unix:0:vopstats_mntfs:nsetattr 0 unix:0:vopstats_mntfs:nsetfl 0 unix:0:vopstats_mntfs:nsetsecattr 0 unix:0:vopstats_mntfs:nshrlock 0 unix:0:vopstats_mntfs:nspace 0 unix:0:vopstats_mntfs:nsymlink 0 unix:0:vopstats_mntfs:nvnevent 0 unix:0:vopstats_mntfs:nwrite 0 unix:0:vopstats_mntfs:read_bytes 5043 unix:0:vopstats_mntfs:readdir_bytes 0 unix:0:vopstats_mntfs:snaptime 8464512.9066844 unix:0:vopstats_mntfs:write_bytes 0 unix:0:vopstats_nfs:class misc unix:0:vopstats_nfs:crtime 114.153935 unix:0:vopstats_nfs:naccess 0 unix:0:vopstats_nfs:naddmap 0 unix:0:vopstats_nfs:nclose 0 unix:0:vopstats_nfs:ncmp 0 unix:0:vopstats_nfs:ncreate 0 unix:0:vopstats_nfs:ndelmap 0 unix:0:vopstats_nfs:ndispose 0 unix:0:vopstats_nfs:ndump 0 unix:0:vopstats_nfs:ndumpctl 0 unix:0:vopstats_nfs:nfid 0 unix:0:vopstats_nfs:nfrlock 0 unix:0:vopstats_nfs:nfsync 0 unix:0:vopstats_nfs:ngetattr 22 unix:0:vopstats_nfs:ngetpage 0 unix:0:vopstats_nfs:ngetsecattr 2 unix:0:vopstats_nfs:ninactive 25 unix:0:vopstats_nfs:nioctl 0 unix:0:vopstats_nfs:nlink 0 unix:0:vopstats_nfs:nlookup 0 unix:0:vopstats_nfs:nmap 0 unix:0:vopstats_nfs:nmkdir 0 unix:0:vopstats_nfs:nopen 0 unix:0:vopstats_nfs:npageio 0 unix:0:vopstats_nfs:npathconf 1 unix:0:vopstats_nfs:npoll 0 unix:0:vopstats_nfs:nputpage 0 unix:0:vopstats_nfs:nread 0 unix:0:vopstats_nfs:nreaddir 0 unix:0:vopstats_nfs:nreadlink 0 unix:0:vopstats_nfs:nrealvp 0 unix:0:vopstats_nfs:nremove 0 unix:0:vopstats_nfs:nrename 0 unix:0:vopstats_nfs:nrmdir 0 unix:0:vopstats_nfs:nrwlock 0 unix:0:vopstats_nfs:nrwunlock 0 unix:0:vopstats_nfs:nseek 0 unix:0:vopstats_nfs:nsetattr 0 unix:0:vopstats_nfs:nsetfl 0 unix:0:vopstats_nfs:nsetsecattr 0 unix:0:vopstats_nfs:nshrlock 0 unix:0:vopstats_nfs:nspace 0 unix:0:vopstats_nfs:nsymlink 0 unix:0:vopstats_nfs:nvnevent 0 unix:0:vopstats_nfs:nwrite 0 unix:0:vopstats_nfs:read_bytes 0 unix:0:vopstats_nfs:readdir_bytes 0 unix:0:vopstats_nfs:snaptime 8464512.909032 unix:0:vopstats_nfs:write_bytes 0 unix:0:vopstats_nfs3:class misc unix:0:vopstats_nfs3:crtime 114.1540814 unix:0:vopstats_nfs3:naccess 65863 unix:0:vopstats_nfs3:naddmap 16556 unix:0:vopstats_nfs3:nclose 129574 unix:0:vopstats_nfs3:ncmp 2892 unix:0:vopstats_nfs3:ncreate 1374 unix:0:vopstats_nfs3:ndelmap 33112 unix:0:vopstats_nfs3:ndispose 13447 unix:0:vopstats_nfs3:ndump 0 unix:0:vopstats_nfs3:ndumpctl 0 unix:0:vopstats_nfs3:nfid 0 unix:0:vopstats_nfs3:nfrlock 25 unix:0:vopstats_nfs3:nfsync 1 unix:0:vopstats_nfs3:ngetattr 270864 unix:0:vopstats_nfs3:ngetpage 29401 unix:0:vopstats_nfs3:ngetsecattr 2408 unix:0:vopstats_nfs3:ninactive 44136 unix:0:vopstats_nfs3:nioctl 27188 unix:0:vopstats_nfs3:nlink 22 unix:0:vopstats_nfs3:nlookup 1079421 unix:0:vopstats_nfs3:nmap 16539 unix:0:vopstats_nfs3:nmkdir 13 unix:0:vopstats_nfs3:nopen 62905 unix:0:vopstats_nfs3:npageio 0 unix:0:vopstats_nfs3:npathconf 434 unix:0:vopstats_nfs3:npoll 0 unix:0:vopstats_nfs3:nputpage 826 unix:0:vopstats_nfs3:nread 73309 unix:0:vopstats_nfs3:nreaddir 28308 unix:0:vopstats_nfs3:nreadlink 2732 unix:0:vopstats_nfs3:nrealvp 5350 unix:0:vopstats_nfs3:nremove 433 unix:0:vopstats_nfs3:nrename 58 unix:0:vopstats_nfs3:nrmdir 15 unix:0:vopstats_nfs3:nrwlock 858658 unix:0:vopstats_nfs3:nrwunlock 858658 unix:0:vopstats_nfs3:nseek 34443 unix:0:vopstats_nfs3:nsetattr 40 unix:0:vopstats_nfs3:nsetfl 0 unix:0:vopstats_nfs3:nsetsecattr 0 unix:0:vopstats_nfs3:nshrlock 0 unix:0:vopstats_nfs3:nspace 65 unix:0:vopstats_nfs3:nsymlink 335 unix:0:vopstats_nfs3:nvnevent 0 unix:0:vopstats_nfs3:nwrite 757041 unix:0:vopstats_nfs3:read_bytes 165662294 unix:0:vopstats_nfs3:readdir_bytes 8980152 unix:0:vopstats_nfs3:snaptime 8464512.9114312 unix:0:vopstats_nfs3:write_bytes 25556289 unix:0:vopstats_nfs4:class misc unix:0:vopstats_nfs4:crtime 114.1542176 unix:0:vopstats_nfs4:naccess 0 unix:0:vopstats_nfs4:naddmap 0 unix:0:vopstats_nfs4:nclose 0 unix:0:vopstats_nfs4:ncmp 0 unix:0:vopstats_nfs4:ncreate 0 unix:0:vopstats_nfs4:ndelmap 0 unix:0:vopstats_nfs4:ndispose 0 unix:0:vopstats_nfs4:ndump 0 unix:0:vopstats_nfs4:ndumpctl 0 unix:0:vopstats_nfs4:nfid 0 unix:0:vopstats_nfs4:nfrlock 0 unix:0:vopstats_nfs4:nfsync 0 unix:0:vopstats_nfs4:ngetattr 0 unix:0:vopstats_nfs4:ngetpage 0 unix:0:vopstats_nfs4:ngetsecattr 0 unix:0:vopstats_nfs4:ninactive 0 unix:0:vopstats_nfs4:nioctl 0 unix:0:vopstats_nfs4:nlink 0 unix:0:vopstats_nfs4:nlookup 0 unix:0:vopstats_nfs4:nmap 0 unix:0:vopstats_nfs4:nmkdir 0 unix:0:vopstats_nfs4:nopen 0 unix:0:vopstats_nfs4:npageio 0 unix:0:vopstats_nfs4:npathconf 0 unix:0:vopstats_nfs4:npoll 0 unix:0:vopstats_nfs4:nputpage 0 unix:0:vopstats_nfs4:nread 0 unix:0:vopstats_nfs4:nreaddir 0 unix:0:vopstats_nfs4:nreadlink 0 unix:0:vopstats_nfs4:nrealvp 0 unix:0:vopstats_nfs4:nremove 0 unix:0:vopstats_nfs4:nrename 0 unix:0:vopstats_nfs4:nrmdir 0 unix:0:vopstats_nfs4:nrwlock 0 unix:0:vopstats_nfs4:nrwunlock 0 unix:0:vopstats_nfs4:nseek 0 unix:0:vopstats_nfs4:nsetattr 0 unix:0:vopstats_nfs4:nsetfl 0 unix:0:vopstats_nfs4:nsetsecattr 0 unix:0:vopstats_nfs4:nshrlock 0 unix:0:vopstats_nfs4:nspace 0 unix:0:vopstats_nfs4:nsymlink 0 unix:0:vopstats_nfs4:nvnevent 0 unix:0:vopstats_nfs4:nwrite 0 unix:0:vopstats_nfs4:read_bytes 0 unix:0:vopstats_nfs4:readdir_bytes 0 unix:0:vopstats_nfs4:snaptime 8464512.9137366 unix:0:vopstats_nfs4:write_bytes 0 unix:0:vopstats_proc:class misc unix:0:vopstats_proc:crtime 82.644069 unix:0:vopstats_proc:naccess 303092 unix:0:vopstats_proc:naddmap 0 unix:0:vopstats_proc:nclose 303767 unix:0:vopstats_proc:ncmp 100 unix:0:vopstats_proc:ncreate 0 unix:0:vopstats_proc:ndelmap 0 unix:0:vopstats_proc:ndispose 0 unix:0:vopstats_proc:ndump 0 unix:0:vopstats_proc:ndumpctl 0 unix:0:vopstats_proc:nfid 0 unix:0:vopstats_proc:nfrlock 0 unix:0:vopstats_proc:nfsync 0 unix:0:vopstats_proc:ngetattr 12207 unix:0:vopstats_proc:ngetpage 0 unix:0:vopstats_proc:ngetsecattr 52 unix:0:vopstats_proc:ninactive 614378 unix:0:vopstats_proc:nioctl 3277 unix:0:vopstats_proc:nlink 0 unix:0:vopstats_proc:nlookup 615375 unix:0:vopstats_proc:nmap 0 unix:0:vopstats_proc:nmkdir 0 unix:0:vopstats_proc:nopen 303088 unix:0:vopstats_proc:npageio 0 unix:0:vopstats_proc:npathconf 26 unix:0:vopstats_proc:npoll 21376 unix:0:vopstats_proc:nputpage 0 unix:0:vopstats_proc:nread 19467 unix:0:vopstats_proc:nreaddir 1202 unix:0:vopstats_proc:nreadlink 8897 unix:0:vopstats_proc:nrealvp 0 unix:0:vopstats_proc:nremove 0 unix:0:vopstats_proc:nrename 0 unix:0:vopstats_proc:nrmdir 0 unix:0:vopstats_proc:nrwlock 20948 unix:0:vopstats_proc:nrwunlock 20949 unix:0:vopstats_proc:nseek 2069 unix:0:vopstats_proc:nsetattr 0 unix:0:vopstats_proc:nsetfl 0 unix:0:vopstats_proc:nsetsecattr 0 unix:0:vopstats_proc:nshrlock 0 unix:0:vopstats_proc:nspace 0 unix:0:vopstats_proc:nsymlink 0 unix:0:vopstats_proc:nvnevent 0 unix:0:vopstats_proc:nwrite 280 unix:0:vopstats_proc:read_bytes 8809096 unix:0:vopstats_proc:readdir_bytes 398816 unix:0:vopstats_proc:snaptime 8464512.916199 unix:0:vopstats_proc:write_bytes 5816 unix:0:vopstats_tmpfs:class misc unix:0:vopstats_tmpfs:crtime 82.7259592 unix:0:vopstats_tmpfs:naccess 6357 unix:0:vopstats_tmpfs:naddmap 29 unix:0:vopstats_tmpfs:nclose 68792 unix:0:vopstats_tmpfs:ncmp 120811 unix:0:vopstats_tmpfs:ncreate 62548 unix:0:vopstats_tmpfs:ndelmap 29 unix:0:vopstats_tmpfs:ndispose 64233 unix:0:vopstats_tmpfs:ndump 0 unix:0:vopstats_tmpfs:ndumpctl 0 unix:0:vopstats_tmpfs:nfid 0 unix:0:vopstats_tmpfs:nfrlock 17949 unix:0:vopstats_tmpfs:nfsync 85 unix:0:vopstats_tmpfs:ngetattr 28289 unix:0:vopstats_tmpfs:ngetpage 202 unix:0:vopstats_tmpfs:ngetsecattr 63490 unix:0:vopstats_tmpfs:ninactive 34617873 unix:0:vopstats_tmpfs:nioctl 3398 unix:0:vopstats_tmpfs:nlink 3 unix:0:vopstats_tmpfs:nlookup 626744 unix:0:vopstats_tmpfs:nmap 13 unix:0:vopstats_tmpfs:nmkdir 74 unix:0:vopstats_tmpfs:nopen 66865 unix:0:vopstats_tmpfs:npageio 0 unix:0:vopstats_tmpfs:npathconf 130 unix:0:vopstats_tmpfs:npoll 0 unix:0:vopstats_tmpfs:nputpage 49584297 unix:0:vopstats_tmpfs:nread 60393 unix:0:vopstats_tmpfs:nreaddir 102 unix:0:vopstats_tmpfs:nreadlink 42 unix:0:vopstats_tmpfs:nrealvp 299899 unix:0:vopstats_tmpfs:nremove 4163 unix:0:vopstats_tmpfs:nrename 57761 unix:0:vopstats_tmpfs:nrmdir 0 unix:0:vopstats_tmpfs:nrwlock 172530 unix:0:vopstats_tmpfs:nrwunlock 172530 unix:0:vopstats_tmpfs:nseek 88331 unix:0:vopstats_tmpfs:nsetattr 473 unix:0:vopstats_tmpfs:nsetfl 0 unix:0:vopstats_tmpfs:nsetsecattr 0 unix:0:vopstats_tmpfs:nshrlock 0 unix:0:vopstats_tmpfs:nspace 37 unix:0:vopstats_tmpfs:nsymlink 20 unix:0:vopstats_tmpfs:nvnevent 0 unix:0:vopstats_tmpfs:nwrite 112035 unix:0:vopstats_tmpfs:read_bytes 108189567 unix:0:vopstats_tmpfs:readdir_bytes 15544 unix:0:vopstats_tmpfs:snaptime 8464512.9184972 unix:0:vopstats_tmpfs:write_bytes 82500166 unix:0:vopstats_ufs:class misc unix:0:vopstats_ufs:crtime 69.2908306 unix:0:vopstats_ufs:naccess 5316052 unix:0:vopstats_ufs:naddmap 5244695 unix:0:vopstats_ufs:nclose 1912597 unix:0:vopstats_ufs:ncmp 17152564 unix:0:vopstats_ufs:ncreate 131365 unix:0:vopstats_ufs:ndelmap 5272092 unix:0:vopstats_ufs:ndispose 1078610 unix:0:vopstats_ufs:ndump 0 unix:0:vopstats_ufs:ndumpctl 0 unix:0:vopstats_ufs:nfid 0 unix:0:vopstats_ufs:nfrlock 7714 unix:0:vopstats_ufs:nfsync 459 unix:0:vopstats_ufs:ngetattr 19146817 unix:0:vopstats_ufs:ngetpage 13857306 unix:0:vopstats_ufs:ngetsecattr 135684 unix:0:vopstats_ufs:ninactive 132376 unix:0:vopstats_ufs:nioctl 33916029 unix:0:vopstats_ufs:nlink 0 unix:0:vopstats_ufs:nlookup 174923382 unix:0:vopstats_ufs:nmap 487501 unix:0:vopstats_ufs:nmkdir 106 unix:0:vopstats_ufs:nopen 1602701 unix:0:vopstats_ufs:npageio 0 unix:0:vopstats_ufs:npathconf 2222 unix:0:vopstats_ufs:npoll 102366160 unix:0:vopstats_ufs:nputpage 37425 unix:0:vopstats_ufs:nread 22630635 unix:0:vopstats_ufs:nreaddir 1564061 unix:0:vopstats_ufs:nreadlink 1946189 unix:0:vopstats_ufs:nrealvp 454551 unix:0:vopstats_ufs:nremove 2231 unix:0:vopstats_ufs:nrename 2491 unix:0:vopstats_ufs:nrmdir 89 unix:0:vopstats_ufs:nrwlock 42131578 unix:0:vopstats_ufs:nrwunlock 42131776 unix:0:vopstats_ufs:nseek 305813 unix:0:vopstats_ufs:nsetattr 1185 unix:0:vopstats_ufs:nsetfl 195603 unix:0:vopstats_ufs:nsetsecattr 110 unix:0:vopstats_ufs:nshrlock 0 unix:0:vopstats_ufs:nspace 156 unix:0:vopstats_ufs:nsymlink 16 unix:0:vopstats_ufs:nvnevent 0 unix:0:vopstats_ufs:nwrite 17937017 unix:0:vopstats_ufs:read_bytes 2162078075 unix:0:vopstats_ufs:readdir_bytes 329050032 unix:0:vopstats_ufs:snaptime 8464512.920942 unix:0:vopstats_ufs:write_bytes 1463466205 unix:0:vopstats_zfs:class misc unix:0:vopstats_zfs:crtime 97.1645318 unix:0:vopstats_zfs:naccess 0 unix:0:vopstats_zfs:naddmap 0 unix:0:vopstats_zfs:nclose 0 unix:0:vopstats_zfs:ncmp 0 unix:0:vopstats_zfs:ncreate 0 unix:0:vopstats_zfs:ndelmap 0 unix:0:vopstats_zfs:ndispose 0 unix:0:vopstats_zfs:ndump 0 unix:0:vopstats_zfs:ndumpctl 0 unix:0:vopstats_zfs:nfid 0 unix:0:vopstats_zfs:nfrlock 0 unix:0:vopstats_zfs:nfsync 0 unix:0:vopstats_zfs:ngetattr 0 unix:0:vopstats_zfs:ngetpage 0 unix:0:vopstats_zfs:ngetsecattr 0 unix:0:vopstats_zfs:ninactive 0 unix:0:vopstats_zfs:nioctl 0 unix:0:vopstats_zfs:nlink 0 unix:0:vopstats_zfs:nlookup 0 unix:0:vopstats_zfs:nmap 0 unix:0:vopstats_zfs:nmkdir 0 unix:0:vopstats_zfs:nopen 0 unix:0:vopstats_zfs:npageio 0 unix:0:vopstats_zfs:npathconf 0 unix:0:vopstats_zfs:npoll 0 unix:0:vopstats_zfs:nputpage 0 unix:0:vopstats_zfs:nread 0 unix:0:vopstats_zfs:nreaddir 0 unix:0:vopstats_zfs:nreadlink 0 unix:0:vopstats_zfs:nrealvp 0 unix:0:vopstats_zfs:nremove 0 unix:0:vopstats_zfs:nrename 0 unix:0:vopstats_zfs:nrmdir 0 unix:0:vopstats_zfs:nrwlock 0 unix:0:vopstats_zfs:nrwunlock 0 unix:0:vopstats_zfs:nseek 0 unix:0:vopstats_zfs:nsetattr 0 unix:0:vopstats_zfs:nsetfl 0 unix:0:vopstats_zfs:nsetsecattr 0 unix:0:vopstats_zfs:nshrlock 0 unix:0:vopstats_zfs:nspace 0 unix:0:vopstats_zfs:nsymlink 0 unix:0:vopstats_zfs:nvnevent 0 unix:0:vopstats_zfs:nwrite 0 unix:0:vopstats_zfs:read_bytes 0 unix:0:vopstats_zfs:readdir_bytes 0 unix:0:vopstats_zfs:snaptime 8464512.923295 unix:0:vopstats_zfs:write_bytes 0 unix:0:vsk_anchor_cache:align 8 unix:0:vsk_anchor_cache:alloc 67 unix:0:vsk_anchor_cache:alloc_fail 0 unix:0:vsk_anchor_cache:buf_avail 191 unix:0:vsk_anchor_cache:buf_constructed 6 unix:0:vsk_anchor_cache:buf_inuse 12 unix:0:vsk_anchor_cache:buf_max 203 unix:0:vsk_anchor_cache:buf_size 40 unix:0:vsk_anchor_cache:buf_total 203 unix:0:vsk_anchor_cache:chunk_size 40 unix:0:vsk_anchor_cache:class kmem_cache unix:0:vsk_anchor_cache:crtime 69.2821278 unix:0:vsk_anchor_cache:depot_alloc 0 unix:0:vsk_anchor_cache:depot_contention 0 unix:0:vsk_anchor_cache:depot_free 2 unix:0:vsk_anchor_cache:empty_magazines 0 unix:0:vsk_anchor_cache:free 57 unix:0:vsk_anchor_cache:full_magazines 0 unix:0:vsk_anchor_cache:hash_lookup_depth 0 unix:0:vsk_anchor_cache:hash_rescale 0 unix:0:vsk_anchor_cache:hash_size 0 unix:0:vsk_anchor_cache:magazine_size 15 unix:0:vsk_anchor_cache:slab_alloc 18 unix:0:vsk_anchor_cache:slab_create 1 unix:0:vsk_anchor_cache:slab_destroy 0 unix:0:vsk_anchor_cache:slab_free 0 unix:0:vsk_anchor_cache:slab_size 8192 unix:0:vsk_anchor_cache:snaptime 8464512.9256316 unix:0:vsk_anchor_cache:vmem_source 23 unix:0:wbuf32_cache:align 8 unix:0:wbuf32_cache:alloc 501639 unix:0:wbuf32_cache:alloc_fail 0 unix:0:wbuf32_cache:buf_avail 112 unix:0:wbuf32_cache:buf_constructed 104 unix:0:wbuf32_cache:buf_inuse 173 unix:0:wbuf32_cache:buf_max 285 unix:0:wbuf32_cache:buf_size 512 unix:0:wbuf32_cache:buf_total 285 unix:0:wbuf32_cache:chunk_size 512 unix:0:wbuf32_cache:class kmem_cache unix:0:wbuf32_cache:crtime 69.2620048 unix:0:wbuf32_cache:depot_alloc 4436 unix:0:wbuf32_cache:depot_contention 0 unix:0:wbuf32_cache:depot_free 4473 unix:0:wbuf32_cache:empty_magazines 5 unix:0:wbuf32_cache:free 501503 unix:0:wbuf32_cache:full_magazines 34 unix:0:wbuf32_cache:hash_lookup_depth 0 unix:0:wbuf32_cache:hash_rescale 0 unix:0:wbuf32_cache:hash_size 0 unix:0:wbuf32_cache:magazine_size 3 unix:0:wbuf32_cache:slab_alloc 277 unix:0:wbuf32_cache:slab_create 19 unix:0:wbuf32_cache:slab_destroy 0 unix:0:wbuf32_cache:slab_free 0 unix:0:wbuf32_cache:slab_size 8192 unix:0:wbuf32_cache:snaptime 8464512.9270918 unix:0:wbuf32_cache:vmem_source 8 unix:0:wbuf64_cache:align 8 unix:0:wbuf64_cache:alloc 9735 unix:0:wbuf64_cache:alloc_fail 0 unix:0:wbuf64_cache:buf_avail 11 unix:0:wbuf64_cache:buf_constructed 9 unix:0:wbuf64_cache:buf_inuse 3 unix:0:wbuf64_cache:buf_max 14 unix:0:wbuf64_cache:buf_size 1024 unix:0:wbuf64_cache:buf_total 14 unix:0:wbuf64_cache:chunk_size 1024 unix:0:wbuf64_cache:class kmem_cache unix:0:wbuf64_cache:crtime 69.262018 unix:0:wbuf64_cache:depot_alloc 1824 unix:0:wbuf64_cache:depot_contention 0 unix:0:wbuf64_cache:depot_free 1829 unix:0:wbuf64_cache:empty_magazines 0 unix:0:wbuf64_cache:free 9737 unix:0:wbuf64_cache:full_magazines 1 unix:0:wbuf64_cache:hash_lookup_depth 0 unix:0:wbuf64_cache:hash_rescale 0 unix:0:wbuf64_cache:hash_size 0 unix:0:wbuf64_cache:magazine_size 3 unix:0:wbuf64_cache:slab_alloc 12 unix:0:wbuf64_cache:slab_create 2 unix:0:wbuf64_cache:slab_destroy 0 unix:0:wbuf64_cache:slab_free 0 unix:0:wbuf64_cache:slab_size 8192 unix:0:wbuf64_cache:snaptime 8464512.9284734 unix:0:wbuf64_cache:vmem_source 8 unix:0:xcalppm_nexus_enum_tq:class taskq unix:0:xcalppm_nexus_enum_tq:crtime 69.2958416 unix:0:xcalppm_nexus_enum_tq:executed 0 unix:0:xcalppm_nexus_enum_tq:maxtasks 0 unix:0:xcalppm_nexus_enum_tq:nactive 1 unix:0:xcalppm_nexus_enum_tq:nalloc 0 unix:0:xcalppm_nexus_enum_tq:priority 60 unix:0:xcalppm_nexus_enum_tq:snaptime 8464512.9298474 unix:0:xcalppm_nexus_enum_tq:tasks 0 unix:0:xcalppm_nexus_enum_tq:threads 1 unix:0:xcalppm_nexus_enum_tq:totaltime 0 unix:0:zfs_znode_cache:align 8 unix:0:zfs_znode_cache:alloc 0 unix:0:zfs_znode_cache:alloc_fail 0 unix:0:zfs_znode_cache:buf_avail 0 unix:0:zfs_znode_cache:buf_constructed 0 unix:0:zfs_znode_cache:buf_inuse 0 unix:0:zfs_znode_cache:buf_max 0 unix:0:zfs_znode_cache:buf_size 184 unix:0:zfs_znode_cache:buf_total 0 unix:0:zfs_znode_cache:chunk_size 184 unix:0:zfs_znode_cache:class kmem_cache unix:0:zfs_znode_cache:crtime 97.1643418 unix:0:zfs_znode_cache:depot_alloc 0 unix:0:zfs_znode_cache:depot_contention 0 unix:0:zfs_znode_cache:depot_free 0 unix:0:zfs_znode_cache:empty_magazines 0 unix:0:zfs_znode_cache:free 0 unix:0:zfs_znode_cache:full_magazines 0 unix:0:zfs_znode_cache:hash_lookup_depth 0 unix:0:zfs_znode_cache:hash_rescale 0 unix:0:zfs_znode_cache:hash_size 0 unix:0:zfs_znode_cache:magazine_size 7 unix:0:zfs_znode_cache:slab_alloc 0 unix:0:zfs_znode_cache:slab_create 0 unix:0:zfs_znode_cache:slab_destroy 0 unix:0:zfs_znode_cache:slab_free 0 unix:0:zfs_znode_cache:slab_size 8192 unix:0:zfs_znode_cache:snaptime 8464512.9306568 unix:0:zfs_znode_cache:vmem_source 23 unix:0:zil_lwb_cache:align 8 unix:0:zil_lwb_cache:alloc 0 unix:0:zil_lwb_cache:alloc_fail 0 unix:0:zil_lwb_cache:buf_avail 0 unix:0:zil_lwb_cache:buf_constructed 0 unix:0:zil_lwb_cache:buf_inuse 0 unix:0:zil_lwb_cache:buf_max 0 unix:0:zil_lwb_cache:buf_size 200 unix:0:zil_lwb_cache:buf_total 0 unix:0:zil_lwb_cache:chunk_size 200 unix:0:zil_lwb_cache:class kmem_cache unix:0:zil_lwb_cache:crtime 97.1395602 unix:0:zil_lwb_cache:depot_alloc 0 unix:0:zil_lwb_cache:depot_contention 0 unix:0:zil_lwb_cache:depot_free 0 unix:0:zil_lwb_cache:empty_magazines 0 unix:0:zil_lwb_cache:free 0 unix:0:zil_lwb_cache:full_magazines 0 unix:0:zil_lwb_cache:hash_lookup_depth 0 unix:0:zil_lwb_cache:hash_rescale 0 unix:0:zil_lwb_cache:hash_size 0 unix:0:zil_lwb_cache:magazine_size 7 unix:0:zil_lwb_cache:slab_alloc 0 unix:0:zil_lwb_cache:slab_create 0 unix:0:zil_lwb_cache:slab_destroy 0 unix:0:zil_lwb_cache:slab_free 0 unix:0:zil_lwb_cache:slab_size 8192 unix:0:zil_lwb_cache:snaptime 8464512.9320526 unix:0:zil_lwb_cache:vmem_source 23 unix:0:zio_buf_1024:align 512 unix:0:zio_buf_1024:alloc 0 unix:0:zio_buf_1024:alloc_fail 0 unix:0:zio_buf_1024:buf_avail 0 unix:0:zio_buf_1024:buf_constructed 0 unix:0:zio_buf_1024:buf_inuse 0 unix:0:zio_buf_1024:buf_max 0 unix:0:zio_buf_1024:buf_size 1024 unix:0:zio_buf_1024:buf_total 0 unix:0:zio_buf_1024:chunk_size 1024 unix:0:zio_buf_1024:class kmem_cache unix:0:zio_buf_1024:crtime 97.1043952 unix:0:zio_buf_1024:depot_alloc 0 unix:0:zio_buf_1024:depot_contention 0 unix:0:zio_buf_1024:depot_free 0 unix:0:zio_buf_1024:empty_magazines 0 unix:0:zio_buf_1024:free 0 unix:0:zio_buf_1024:full_magazines 0 unix:0:zio_buf_1024:hash_lookup_depth 0 unix:0:zio_buf_1024:hash_rescale 0 unix:0:zio_buf_1024:hash_size 64 unix:0:zio_buf_1024:magazine_size 3 unix:0:zio_buf_1024:slab_alloc 0 unix:0:zio_buf_1024:slab_create 0 unix:0:zio_buf_1024:slab_destroy 0 unix:0:zio_buf_1024:slab_free 0 unix:0:zio_buf_1024:slab_size 8192 unix:0:zio_buf_1024:snaptime 8464512.9334252 unix:0:zio_buf_1024:vmem_source 23 unix:0:zio_buf_10240:align 2048 unix:0:zio_buf_10240:alloc 0 unix:0:zio_buf_10240:alloc_fail 0 unix:0:zio_buf_10240:buf_avail 0 unix:0:zio_buf_10240:buf_constructed 0 unix:0:zio_buf_10240:buf_inuse 0 unix:0:zio_buf_10240:buf_max 0 unix:0:zio_buf_10240:buf_size 10240 unix:0:zio_buf_10240:buf_total 0 unix:0:zio_buf_10240:chunk_size 10240 unix:0:zio_buf_10240:class kmem_cache unix:0:zio_buf_10240:crtime 97.1050238 unix:0:zio_buf_10240:depot_alloc 0 unix:0:zio_buf_10240:depot_contention 0 unix:0:zio_buf_10240:depot_free 0 unix:0:zio_buf_10240:empty_magazines 0 unix:0:zio_buf_10240:free 0 unix:0:zio_buf_10240:full_magazines 0 unix:0:zio_buf_10240:hash_lookup_depth 0 unix:0:zio_buf_10240:hash_rescale 0 unix:0:zio_buf_10240:hash_size 64 unix:0:zio_buf_10240:magazine_size 1 unix:0:zio_buf_10240:slab_alloc 0 unix:0:zio_buf_10240:slab_create 0 unix:0:zio_buf_10240:slab_destroy 0 unix:0:zio_buf_10240:slab_free 0 unix:0:zio_buf_10240:slab_size 40960 unix:0:zio_buf_10240:snaptime 8464512.9348042 unix:0:zio_buf_10240:vmem_source 23 unix:0:zio_buf_106496:align 8192 unix:0:zio_buf_106496:alloc 0 unix:0:zio_buf_106496:alloc_fail 0 unix:0:zio_buf_106496:buf_avail 0 unix:0:zio_buf_106496:buf_constructed 0 unix:0:zio_buf_106496:buf_inuse 0 unix:0:zio_buf_106496:buf_max 0 unix:0:zio_buf_106496:buf_size 106496 unix:0:zio_buf_106496:buf_total 0 unix:0:zio_buf_106496:chunk_size 106496 unix:0:zio_buf_106496:class kmem_cache unix:0:zio_buf_106496:crtime 97.1057982 unix:0:zio_buf_106496:depot_alloc 0 unix:0:zio_buf_106496:depot_contention 0 unix:0:zio_buf_106496:depot_free 0 unix:0:zio_buf_106496:empty_magazines 0 unix:0:zio_buf_106496:free 0 unix:0:zio_buf_106496:full_magazines 0 unix:0:zio_buf_106496:hash_lookup_depth 0 unix:0:zio_buf_106496:hash_rescale 0 unix:0:zio_buf_106496:hash_size 64 unix:0:zio_buf_106496:magazine_size 1 unix:0:zio_buf_106496:slab_alloc 0 unix:0:zio_buf_106496:slab_create 0 unix:0:zio_buf_106496:slab_destroy 0 unix:0:zio_buf_106496:slab_free 0 unix:0:zio_buf_106496:slab_size 106496 unix:0:zio_buf_106496:snaptime 8464512.9362572 unix:0:zio_buf_106496:vmem_source 23 unix:0:zio_buf_114688:align 8192 unix:0:zio_buf_114688:alloc 0 unix:0:zio_buf_114688:alloc_fail 0 unix:0:zio_buf_114688:buf_avail 0 unix:0:zio_buf_114688:buf_constructed 0 unix:0:zio_buf_114688:buf_inuse 0 unix:0:zio_buf_114688:buf_max 0 unix:0:zio_buf_114688:buf_size 114688 unix:0:zio_buf_114688:buf_total 0 unix:0:zio_buf_114688:chunk_size 114688 unix:0:zio_buf_114688:class kmem_cache unix:0:zio_buf_114688:crtime 97.1058754 unix:0:zio_buf_114688:depot_alloc 0 unix:0:zio_buf_114688:depot_contention 0 unix:0:zio_buf_114688:depot_free 0 unix:0:zio_buf_114688:empty_magazines 0 unix:0:zio_buf_114688:free 0 unix:0:zio_buf_114688:full_magazines 0 unix:0:zio_buf_114688:hash_lookup_depth 0 unix:0:zio_buf_114688:hash_rescale 0 unix:0:zio_buf_114688:hash_size 64 unix:0:zio_buf_114688:magazine_size 1 unix:0:zio_buf_114688:slab_alloc 0 unix:0:zio_buf_114688:slab_create 0 unix:0:zio_buf_114688:slab_destroy 0 unix:0:zio_buf_114688:slab_free 0 unix:0:zio_buf_114688:slab_size 114688 unix:0:zio_buf_114688:snaptime 8464512.9377232 unix:0:zio_buf_114688:vmem_source 23 unix:0:zio_buf_12288:align 2048 unix:0:zio_buf_12288:alloc 0 unix:0:zio_buf_12288:alloc_fail 0 unix:0:zio_buf_12288:buf_avail 0 unix:0:zio_buf_12288:buf_constructed 0 unix:0:zio_buf_12288:buf_inuse 0 unix:0:zio_buf_12288:buf_max 0 unix:0:zio_buf_12288:buf_size 12288 unix:0:zio_buf_12288:buf_total 0 unix:0:zio_buf_12288:chunk_size 12288 unix:0:zio_buf_12288:class kmem_cache unix:0:zio_buf_12288:crtime 97.1050762 unix:0:zio_buf_12288:depot_alloc 0 unix:0:zio_buf_12288:depot_contention 0 unix:0:zio_buf_12288:depot_free 0 unix:0:zio_buf_12288:empty_magazines 0 unix:0:zio_buf_12288:free 0 unix:0:zio_buf_12288:full_magazines 0 unix:0:zio_buf_12288:hash_lookup_depth 0 unix:0:zio_buf_12288:hash_rescale 0 unix:0:zio_buf_12288:hash_size 64 unix:0:zio_buf_12288:magazine_size 1 unix:0:zio_buf_12288:slab_alloc 0 unix:0:zio_buf_12288:slab_create 0 unix:0:zio_buf_12288:slab_destroy 0 unix:0:zio_buf_12288:slab_free 0 unix:0:zio_buf_12288:slab_size 24576 unix:0:zio_buf_12288:snaptime 8464512.9391068 unix:0:zio_buf_12288:vmem_source 23 unix:0:zio_buf_122880:align 8192 unix:0:zio_buf_122880:alloc 0 unix:0:zio_buf_122880:alloc_fail 0 unix:0:zio_buf_122880:buf_avail 0 unix:0:zio_buf_122880:buf_constructed 0 unix:0:zio_buf_122880:buf_inuse 0 unix:0:zio_buf_122880:buf_max 0 unix:0:zio_buf_122880:buf_size 122880 unix:0:zio_buf_122880:buf_total 0 unix:0:zio_buf_122880:chunk_size 122880 unix:0:zio_buf_122880:class kmem_cache unix:0:zio_buf_122880:crtime 97.1059178 unix:0:zio_buf_122880:depot_alloc 0 unix:0:zio_buf_122880:depot_contention 0 unix:0:zio_buf_122880:depot_free 0 unix:0:zio_buf_122880:empty_magazines 0 unix:0:zio_buf_122880:free 0 unix:0:zio_buf_122880:full_magazines 0 unix:0:zio_buf_122880:hash_lookup_depth 0 unix:0:zio_buf_122880:hash_rescale 0 unix:0:zio_buf_122880:hash_size 64 unix:0:zio_buf_122880:magazine_size 1 unix:0:zio_buf_122880:slab_alloc 0 unix:0:zio_buf_122880:slab_create 0 unix:0:zio_buf_122880:slab_destroy 0 unix:0:zio_buf_122880:slab_free 0 unix:0:zio_buf_122880:slab_size 122880 unix:0:zio_buf_122880:snaptime 8464512.9405468 unix:0:zio_buf_122880:vmem_source 23 unix:0:zio_buf_131072:align 8192 unix:0:zio_buf_131072:alloc 0 unix:0:zio_buf_131072:alloc_fail 0 unix:0:zio_buf_131072:buf_avail 0 unix:0:zio_buf_131072:buf_constructed 0 unix:0:zio_buf_131072:buf_inuse 0 unix:0:zio_buf_131072:buf_max 0 unix:0:zio_buf_131072:buf_size 131072 unix:0:zio_buf_131072:buf_total 0 unix:0:zio_buf_131072:chunk_size 131072 unix:0:zio_buf_131072:class kmem_cache unix:0:zio_buf_131072:crtime 97.105958 unix:0:zio_buf_131072:depot_alloc 0 unix:0:zio_buf_131072:depot_contention 0 unix:0:zio_buf_131072:depot_free 0 unix:0:zio_buf_131072:empty_magazines 0 unix:0:zio_buf_131072:free 0 unix:0:zio_buf_131072:full_magazines 0 unix:0:zio_buf_131072:hash_lookup_depth 0 unix:0:zio_buf_131072:hash_rescale 0 unix:0:zio_buf_131072:hash_size 64 unix:0:zio_buf_131072:magazine_size 1 unix:0:zio_buf_131072:slab_alloc 0 unix:0:zio_buf_131072:slab_create 0 unix:0:zio_buf_131072:slab_destroy 0 unix:0:zio_buf_131072:slab_free 0 unix:0:zio_buf_131072:slab_size 131072 unix:0:zio_buf_131072:snaptime 8464512.9420062 unix:0:zio_buf_131072:vmem_source 23 unix:0:zio_buf_14336:align 2048 unix:0:zio_buf_14336:alloc 0 unix:0:zio_buf_14336:alloc_fail 0 unix:0:zio_buf_14336:buf_avail 0 unix:0:zio_buf_14336:buf_constructed 0 unix:0:zio_buf_14336:buf_inuse 0 unix:0:zio_buf_14336:buf_max 0 unix:0:zio_buf_14336:buf_size 14336 unix:0:zio_buf_14336:buf_total 0 unix:0:zio_buf_14336:chunk_size 14336 unix:0:zio_buf_14336:class kmem_cache unix:0:zio_buf_14336:crtime 97.1051358 unix:0:zio_buf_14336:depot_alloc 0 unix:0:zio_buf_14336:depot_contention 0 unix:0:zio_buf_14336:depot_free 0 unix:0:zio_buf_14336:empty_magazines 0 unix:0:zio_buf_14336:free 0 unix:0:zio_buf_14336:full_magazines 0 unix:0:zio_buf_14336:hash_lookup_depth 0 unix:0:zio_buf_14336:hash_rescale 0 unix:0:zio_buf_14336:hash_size 64 unix:0:zio_buf_14336:magazine_size 1 unix:0:zio_buf_14336:slab_alloc 0 unix:0:zio_buf_14336:slab_create 0 unix:0:zio_buf_14336:slab_destroy 0 unix:0:zio_buf_14336:slab_free 0 unix:0:zio_buf_14336:slab_size 57344 unix:0:zio_buf_14336:snaptime 8464512.9433912 unix:0:zio_buf_14336:vmem_source 23 unix:0:zio_buf_1536:align 512 unix:0:zio_buf_1536:alloc 0 unix:0:zio_buf_1536:alloc_fail 0 unix:0:zio_buf_1536:buf_avail 0 unix:0:zio_buf_1536:buf_constructed 0 unix:0:zio_buf_1536:buf_inuse 0 unix:0:zio_buf_1536:buf_max 0 unix:0:zio_buf_1536:buf_size 1536 unix:0:zio_buf_1536:buf_total 0 unix:0:zio_buf_1536:chunk_size 1536 unix:0:zio_buf_1536:class kmem_cache unix:0:zio_buf_1536:crtime 97.1044328 unix:0:zio_buf_1536:depot_alloc 0 unix:0:zio_buf_1536:depot_contention 0 unix:0:zio_buf_1536:depot_free 0 unix:0:zio_buf_1536:empty_magazines 0 unix:0:zio_buf_1536:free 0 unix:0:zio_buf_1536:full_magazines 0 unix:0:zio_buf_1536:hash_lookup_depth 0 unix:0:zio_buf_1536:hash_rescale 0 unix:0:zio_buf_1536:hash_size 64 unix:0:zio_buf_1536:magazine_size 3 unix:0:zio_buf_1536:slab_alloc 0 unix:0:zio_buf_1536:slab_create 0 unix:0:zio_buf_1536:slab_destroy 0 unix:0:zio_buf_1536:slab_free 0 unix:0:zio_buf_1536:slab_size 8192 unix:0:zio_buf_1536:snaptime 8464512.9447568 unix:0:zio_buf_1536:vmem_source 23 unix:0:zio_buf_16384:align 8192 unix:0:zio_buf_16384:alloc 0 unix:0:zio_buf_16384:alloc_fail 0 unix:0:zio_buf_16384:buf_avail 0 unix:0:zio_buf_16384:buf_constructed 0 unix:0:zio_buf_16384:buf_inuse 0 unix:0:zio_buf_16384:buf_max 0 unix:0:zio_buf_16384:buf_size 16384 unix:0:zio_buf_16384:buf_total 0 unix:0:zio_buf_16384:chunk_size 16384 unix:0:zio_buf_16384:class kmem_cache unix:0:zio_buf_16384:crtime 97.1051728 unix:0:zio_buf_16384:depot_alloc 0 unix:0:zio_buf_16384:depot_contention 0 unix:0:zio_buf_16384:depot_free 0 unix:0:zio_buf_16384:empty_magazines 0 unix:0:zio_buf_16384:free 0 unix:0:zio_buf_16384:full_magazines 0 unix:0:zio_buf_16384:hash_lookup_depth 0 unix:0:zio_buf_16384:hash_rescale 0 unix:0:zio_buf_16384:hash_size 64 unix:0:zio_buf_16384:magazine_size 1 unix:0:zio_buf_16384:slab_alloc 0 unix:0:zio_buf_16384:slab_create 0 unix:0:zio_buf_16384:slab_destroy 0 unix:0:zio_buf_16384:slab_free 0 unix:0:zio_buf_16384:slab_size 16384 unix:0:zio_buf_16384:snaptime 8464512.9461932 unix:0:zio_buf_16384:vmem_source 23 unix:0:zio_buf_2048:align 512 unix:0:zio_buf_2048:alloc 0 unix:0:zio_buf_2048:alloc_fail 0 unix:0:zio_buf_2048:buf_avail 0 unix:0:zio_buf_2048:buf_constructed 0 unix:0:zio_buf_2048:buf_inuse 0 unix:0:zio_buf_2048:buf_max 0 unix:0:zio_buf_2048:buf_size 2048 unix:0:zio_buf_2048:buf_total 0 unix:0:zio_buf_2048:chunk_size 2048 unix:0:zio_buf_2048:class kmem_cache unix:0:zio_buf_2048:crtime 97.10447 unix:0:zio_buf_2048:depot_alloc 0 unix:0:zio_buf_2048:depot_contention 0 unix:0:zio_buf_2048:depot_free 0 unix:0:zio_buf_2048:empty_magazines 0 unix:0:zio_buf_2048:free 0 unix:0:zio_buf_2048:full_magazines 0 unix:0:zio_buf_2048:hash_lookup_depth 0 unix:0:zio_buf_2048:hash_rescale 0 unix:0:zio_buf_2048:hash_size 64 unix:0:zio_buf_2048:magazine_size 3 unix:0:zio_buf_2048:slab_alloc 0 unix:0:zio_buf_2048:slab_create 0 unix:0:zio_buf_2048:slab_destroy 0 unix:0:zio_buf_2048:slab_free 0 unix:0:zio_buf_2048:slab_size 8192 unix:0:zio_buf_2048:snaptime 8464512.9475838 unix:0:zio_buf_2048:vmem_source 23 unix:0:zio_buf_20480:align 4096 unix:0:zio_buf_20480:alloc 0 unix:0:zio_buf_20480:alloc_fail 0 unix:0:zio_buf_20480:buf_avail 0 unix:0:zio_buf_20480:buf_constructed 0 unix:0:zio_buf_20480:buf_inuse 0 unix:0:zio_buf_20480:buf_max 0 unix:0:zio_buf_20480:buf_size 20480 unix:0:zio_buf_20480:buf_total 0 unix:0:zio_buf_20480:chunk_size 20480 unix:0:zio_buf_20480:class kmem_cache unix:0:zio_buf_20480:crtime 97.105209 unix:0:zio_buf_20480:depot_alloc 0 unix:0:zio_buf_20480:depot_contention 0 unix:0:zio_buf_20480:depot_free 0 unix:0:zio_buf_20480:empty_magazines 0 unix:0:zio_buf_20480:free 0 unix:0:zio_buf_20480:full_magazines 0 unix:0:zio_buf_20480:hash_lookup_depth 0 unix:0:zio_buf_20480:hash_rescale 0 unix:0:zio_buf_20480:hash_size 64 unix:0:zio_buf_20480:magazine_size 1 unix:0:zio_buf_20480:slab_alloc 0 unix:0:zio_buf_20480:slab_create 0 unix:0:zio_buf_20480:slab_destroy 0 unix:0:zio_buf_20480:slab_free 0 unix:0:zio_buf_20480:slab_size 40960 unix:0:zio_buf_20480:snaptime 8464512.9490292 unix:0:zio_buf_20480:vmem_source 23 unix:0:zio_buf_24576:align 8192 unix:0:zio_buf_24576:alloc 0 unix:0:zio_buf_24576:alloc_fail 0 unix:0:zio_buf_24576:buf_avail 0 unix:0:zio_buf_24576:buf_constructed 0 unix:0:zio_buf_24576:buf_inuse 0 unix:0:zio_buf_24576:buf_max 0 unix:0:zio_buf_24576:buf_size 24576 unix:0:zio_buf_24576:buf_total 0 unix:0:zio_buf_24576:chunk_size 24576 unix:0:zio_buf_24576:class kmem_cache unix:0:zio_buf_24576:crtime 97.105267 unix:0:zio_buf_24576:depot_alloc 0 unix:0:zio_buf_24576:depot_contention 0 unix:0:zio_buf_24576:depot_free 0 unix:0:zio_buf_24576:empty_magazines 0 unix:0:zio_buf_24576:free 0 unix:0:zio_buf_24576:full_magazines 0 unix:0:zio_buf_24576:hash_lookup_depth 0 unix:0:zio_buf_24576:hash_rescale 0 unix:0:zio_buf_24576:hash_size 64 unix:0:zio_buf_24576:magazine_size 1 unix:0:zio_buf_24576:slab_alloc 0 unix:0:zio_buf_24576:slab_create 0 unix:0:zio_buf_24576:slab_destroy 0 unix:0:zio_buf_24576:slab_free 0 unix:0:zio_buf_24576:slab_size 24576 unix:0:zio_buf_24576:snaptime 8464512.9505156 unix:0:zio_buf_24576:vmem_source 23 unix:0:zio_buf_2560:align 512 unix:0:zio_buf_2560:alloc 0 unix:0:zio_buf_2560:alloc_fail 0 unix:0:zio_buf_2560:buf_avail 0 unix:0:zio_buf_2560:buf_constructed 0 unix:0:zio_buf_2560:buf_inuse 0 unix:0:zio_buf_2560:buf_max 0 unix:0:zio_buf_2560:buf_size 2560 unix:0:zio_buf_2560:buf_total 0 unix:0:zio_buf_2560:chunk_size 2560 unix:0:zio_buf_2560:class kmem_cache unix:0:zio_buf_2560:crtime 97.1045068 unix:0:zio_buf_2560:depot_alloc 0 unix:0:zio_buf_2560:depot_contention 0 unix:0:zio_buf_2560:depot_free 0 unix:0:zio_buf_2560:empty_magazines 0 unix:0:zio_buf_2560:free 0 unix:0:zio_buf_2560:full_magazines 0 unix:0:zio_buf_2560:hash_lookup_depth 0 unix:0:zio_buf_2560:hash_rescale 0 unix:0:zio_buf_2560:hash_size 64 unix:0:zio_buf_2560:magazine_size 3 unix:0:zio_buf_2560:slab_alloc 0 unix:0:zio_buf_2560:slab_create 0 unix:0:zio_buf_2560:slab_destroy 0 unix:0:zio_buf_2560:slab_free 0 unix:0:zio_buf_2560:slab_size 8192 unix:0:zio_buf_2560:snaptime 8464512.9519822 unix:0:zio_buf_2560:vmem_source 23 unix:0:zio_buf_28672:align 4096 unix:0:zio_buf_28672:alloc 0 unix:0:zio_buf_28672:alloc_fail 0 unix:0:zio_buf_28672:buf_avail 0 unix:0:zio_buf_28672:buf_constructed 0 unix:0:zio_buf_28672:buf_inuse 0 unix:0:zio_buf_28672:buf_max 0 unix:0:zio_buf_28672:buf_size 28672 unix:0:zio_buf_28672:buf_total 0 unix:0:zio_buf_28672:chunk_size 28672 unix:0:zio_buf_28672:class kmem_cache unix:0:zio_buf_28672:crtime 97.1053092 unix:0:zio_buf_28672:depot_alloc 0 unix:0:zio_buf_28672:depot_contention 0 unix:0:zio_buf_28672:depot_free 0 unix:0:zio_buf_28672:empty_magazines 0 unix:0:zio_buf_28672:free 0 unix:0:zio_buf_28672:full_magazines 0 unix:0:zio_buf_28672:hash_lookup_depth 0 unix:0:zio_buf_28672:hash_rescale 0 unix:0:zio_buf_28672:hash_size 64 unix:0:zio_buf_28672:magazine_size 1 unix:0:zio_buf_28672:slab_alloc 0 unix:0:zio_buf_28672:slab_create 0 unix:0:zio_buf_28672:slab_destroy 0 unix:0:zio_buf_28672:slab_free 0 unix:0:zio_buf_28672:slab_size 57344 unix:0:zio_buf_28672:snaptime 8464512.9533592 unix:0:zio_buf_28672:vmem_source 23 unix:0:zio_buf_3072:align 512 unix:0:zio_buf_3072:alloc 0 unix:0:zio_buf_3072:alloc_fail 0 unix:0:zio_buf_3072:buf_avail 0 unix:0:zio_buf_3072:buf_constructed 0 unix:0:zio_buf_3072:buf_inuse 0 unix:0:zio_buf_3072:buf_max 0 unix:0:zio_buf_3072:buf_size 3072 unix:0:zio_buf_3072:buf_total 0 unix:0:zio_buf_3072:chunk_size 3072 unix:0:zio_buf_3072:class kmem_cache unix:0:zio_buf_3072:crtime 97.1046506 unix:0:zio_buf_3072:depot_alloc 0 unix:0:zio_buf_3072:depot_contention 0 unix:0:zio_buf_3072:depot_free 0 unix:0:zio_buf_3072:empty_magazines 0 unix:0:zio_buf_3072:free 0 unix:0:zio_buf_3072:full_magazines 0 unix:0:zio_buf_3072:hash_lookup_depth 0 unix:0:zio_buf_3072:hash_rescale 0 unix:0:zio_buf_3072:hash_size 64 unix:0:zio_buf_3072:magazine_size 3 unix:0:zio_buf_3072:slab_alloc 0 unix:0:zio_buf_3072:slab_create 0 unix:0:zio_buf_3072:slab_destroy 0 unix:0:zio_buf_3072:slab_free 0 unix:0:zio_buf_3072:slab_size 24576 unix:0:zio_buf_3072:snaptime 8464512.9547248 unix:0:zio_buf_3072:vmem_source 23 unix:0:zio_buf_32768:align 8192 unix:0:zio_buf_32768:alloc 0 unix:0:zio_buf_32768:alloc_fail 0 unix:0:zio_buf_32768:buf_avail 0 unix:0:zio_buf_32768:buf_constructed 0 unix:0:zio_buf_32768:buf_inuse 0 unix:0:zio_buf_32768:buf_max 0 unix:0:zio_buf_32768:buf_size 32768 unix:0:zio_buf_32768:buf_total 0 unix:0:zio_buf_32768:chunk_size 32768 unix:0:zio_buf_32768:class kmem_cache unix:0:zio_buf_32768:crtime 97.1053496 unix:0:zio_buf_32768:depot_alloc 0 unix:0:zio_buf_32768:depot_contention 0 unix:0:zio_buf_32768:depot_free 0 unix:0:zio_buf_32768:empty_magazines 0 unix:0:zio_buf_32768:free 0 unix:0:zio_buf_32768:full_magazines 0 unix:0:zio_buf_32768:hash_lookup_depth 0 unix:0:zio_buf_32768:hash_rescale 0 unix:0:zio_buf_32768:hash_size 64 unix:0:zio_buf_32768:magazine_size 1 unix:0:zio_buf_32768:slab_alloc 0 unix:0:zio_buf_32768:slab_create 0 unix:0:zio_buf_32768:slab_destroy 0 unix:0:zio_buf_32768:slab_free 0 unix:0:zio_buf_32768:slab_size 32768 unix:0:zio_buf_32768:snaptime 8464512.9561018 unix:0:zio_buf_32768:vmem_source 23 unix:0:zio_buf_3584:align 512 unix:0:zio_buf_3584:alloc 0 unix:0:zio_buf_3584:alloc_fail 0 unix:0:zio_buf_3584:buf_avail 0 unix:0:zio_buf_3584:buf_constructed 0 unix:0:zio_buf_3584:buf_inuse 0 unix:0:zio_buf_3584:buf_max 0 unix:0:zio_buf_3584:buf_size 3584 unix:0:zio_buf_3584:buf_total 0 unix:0:zio_buf_3584:chunk_size 3584 unix:0:zio_buf_3584:class kmem_cache unix:0:zio_buf_3584:crtime 97.1046942 unix:0:zio_buf_3584:depot_alloc 0 unix:0:zio_buf_3584:depot_contention 0 unix:0:zio_buf_3584:depot_free 0 unix:0:zio_buf_3584:empty_magazines 0 unix:0:zio_buf_3584:free 0 unix:0:zio_buf_3584:full_magazines 0 unix:0:zio_buf_3584:hash_lookup_depth 0 unix:0:zio_buf_3584:hash_rescale 0 unix:0:zio_buf_3584:hash_size 64 unix:0:zio_buf_3584:magazine_size 1 unix:0:zio_buf_3584:slab_alloc 0 unix:0:zio_buf_3584:slab_create 0 unix:0:zio_buf_3584:slab_destroy 0 unix:0:zio_buf_3584:slab_free 0 unix:0:zio_buf_3584:slab_size 32768 unix:0:zio_buf_3584:snaptime 8464512.9575512 unix:0:zio_buf_3584:vmem_source 23 unix:0:zio_buf_4096:align 1024 unix:0:zio_buf_4096:alloc 0 unix:0:zio_buf_4096:alloc_fail 0 unix:0:zio_buf_4096:buf_avail 0 unix:0:zio_buf_4096:buf_constructed 0 unix:0:zio_buf_4096:buf_inuse 0 unix:0:zio_buf_4096:buf_max 0 unix:0:zio_buf_4096:buf_size 4096 unix:0:zio_buf_4096:buf_total 0 unix:0:zio_buf_4096:chunk_size 4096 unix:0:zio_buf_4096:class kmem_cache unix:0:zio_buf_4096:crtime 97.104732 unix:0:zio_buf_4096:depot_alloc 0 unix:0:zio_buf_4096:depot_contention 0 unix:0:zio_buf_4096:depot_free 0 unix:0:zio_buf_4096:empty_magazines 0 unix:0:zio_buf_4096:free 0 unix:0:zio_buf_4096:full_magazines 0 unix:0:zio_buf_4096:hash_lookup_depth 0 unix:0:zio_buf_4096:hash_rescale 0 unix:0:zio_buf_4096:hash_size 64 unix:0:zio_buf_4096:magazine_size 1 unix:0:zio_buf_4096:slab_alloc 0 unix:0:zio_buf_4096:slab_create 0 unix:0:zio_buf_4096:slab_destroy 0 unix:0:zio_buf_4096:slab_free 0 unix:0:zio_buf_4096:slab_size 8192 unix:0:zio_buf_4096:snaptime 8464512.9589208 unix:0:zio_buf_4096:vmem_source 23 unix:0:zio_buf_40960:align 8192 unix:0:zio_buf_40960:alloc 0 unix:0:zio_buf_40960:alloc_fail 0 unix:0:zio_buf_40960:buf_avail 0 unix:0:zio_buf_40960:buf_constructed 0 unix:0:zio_buf_40960:buf_inuse 0 unix:0:zio_buf_40960:buf_max 0 unix:0:zio_buf_40960:buf_size 40960 unix:0:zio_buf_40960:buf_total 0 unix:0:zio_buf_40960:chunk_size 40960 unix:0:zio_buf_40960:class kmem_cache unix:0:zio_buf_40960:crtime 97.1053892 unix:0:zio_buf_40960:depot_alloc 0 unix:0:zio_buf_40960:depot_contention 0 unix:0:zio_buf_40960:depot_free 0 unix:0:zio_buf_40960:empty_magazines 0 unix:0:zio_buf_40960:free 0 unix:0:zio_buf_40960:full_magazines 0 unix:0:zio_buf_40960:hash_lookup_depth 0 unix:0:zio_buf_40960:hash_rescale 0 unix:0:zio_buf_40960:hash_size 64 unix:0:zio_buf_40960:magazine_size 1 unix:0:zio_buf_40960:slab_alloc 0 unix:0:zio_buf_40960:slab_create 0 unix:0:zio_buf_40960:slab_destroy 0 unix:0:zio_buf_40960:slab_free 0 unix:0:zio_buf_40960:slab_size 40960 unix:0:zio_buf_40960:snaptime 8464512.9604638 unix:0:zio_buf_40960:vmem_source 23 unix:0:zio_buf_49152:align 8192 unix:0:zio_buf_49152:alloc 0 unix:0:zio_buf_49152:alloc_fail 0 unix:0:zio_buf_49152:buf_avail 0 unix:0:zio_buf_49152:buf_constructed 0 unix:0:zio_buf_49152:buf_inuse 0 unix:0:zio_buf_49152:buf_max 0 unix:0:zio_buf_49152:buf_size 49152 unix:0:zio_buf_49152:buf_total 0 unix:0:zio_buf_49152:chunk_size 49152 unix:0:zio_buf_49152:class kmem_cache unix:0:zio_buf_49152:crtime 97.105444 unix:0:zio_buf_49152:depot_alloc 0 unix:0:zio_buf_49152:depot_contention 0 unix:0:zio_buf_49152:depot_free 0 unix:0:zio_buf_49152:empty_magazines 0 unix:0:zio_buf_49152:free 0 unix:0:zio_buf_49152:full_magazines 0 unix:0:zio_buf_49152:hash_lookup_depth 0 unix:0:zio_buf_49152:hash_rescale 0 unix:0:zio_buf_49152:hash_size 64 unix:0:zio_buf_49152:magazine_size 1 unix:0:zio_buf_49152:slab_alloc 0 unix:0:zio_buf_49152:slab_create 0 unix:0:zio_buf_49152:slab_destroy 0 unix:0:zio_buf_49152:slab_free 0 unix:0:zio_buf_49152:slab_size 49152 unix:0:zio_buf_49152:snaptime 8464512.961907 unix:0:zio_buf_49152:vmem_source 23 unix:0:zio_buf_512:align 512 unix:0:zio_buf_512:alloc 0 unix:0:zio_buf_512:alloc_fail 0 unix:0:zio_buf_512:buf_avail 0 unix:0:zio_buf_512:buf_constructed 0 unix:0:zio_buf_512:buf_inuse 0 unix:0:zio_buf_512:buf_max 0 unix:0:zio_buf_512:buf_size 512 unix:0:zio_buf_512:buf_total 0 unix:0:zio_buf_512:chunk_size 512 unix:0:zio_buf_512:class kmem_cache unix:0:zio_buf_512:crtime 97.10434 unix:0:zio_buf_512:depot_alloc 0 unix:0:zio_buf_512:depot_contention 0 unix:0:zio_buf_512:depot_free 0 unix:0:zio_buf_512:empty_magazines 0 unix:0:zio_buf_512:free 0 unix:0:zio_buf_512:full_magazines 0 unix:0:zio_buf_512:hash_lookup_depth 0 unix:0:zio_buf_512:hash_rescale 0 unix:0:zio_buf_512:hash_size 0 unix:0:zio_buf_512:magazine_size 3 unix:0:zio_buf_512:slab_alloc 0 unix:0:zio_buf_512:slab_create 0 unix:0:zio_buf_512:slab_destroy 0 unix:0:zio_buf_512:slab_free 0 unix:0:zio_buf_512:slab_size 8192 unix:0:zio_buf_512:snaptime 8464512.963302 unix:0:zio_buf_512:vmem_source 23 unix:0:zio_buf_5120:align 1024 unix:0:zio_buf_5120:alloc 0 unix:0:zio_buf_5120:alloc_fail 0 unix:0:zio_buf_5120:buf_avail 0 unix:0:zio_buf_5120:buf_constructed 0 unix:0:zio_buf_5120:buf_inuse 0 unix:0:zio_buf_5120:buf_max 0 unix:0:zio_buf_5120:buf_size 5120 unix:0:zio_buf_5120:buf_total 0 unix:0:zio_buf_5120:chunk_size 5120 unix:0:zio_buf_5120:class kmem_cache unix:0:zio_buf_5120:crtime 97.1047696 unix:0:zio_buf_5120:depot_alloc 0 unix:0:zio_buf_5120:depot_contention 0 unix:0:zio_buf_5120:depot_free 0 unix:0:zio_buf_5120:empty_magazines 0 unix:0:zio_buf_5120:free 0 unix:0:zio_buf_5120:full_magazines 0 unix:0:zio_buf_5120:hash_lookup_depth 0 unix:0:zio_buf_5120:hash_rescale 0 unix:0:zio_buf_5120:hash_size 64 unix:0:zio_buf_5120:magazine_size 1 unix:0:zio_buf_5120:slab_alloc 0 unix:0:zio_buf_5120:slab_create 0 unix:0:zio_buf_5120:slab_destroy 0 unix:0:zio_buf_5120:slab_free 0 unix:0:zio_buf_5120:slab_size 40960 unix:0:zio_buf_5120:snaptime 8464512.9646744 unix:0:zio_buf_5120:vmem_source 23 unix:0:zio_buf_57344:align 8192 unix:0:zio_buf_57344:alloc 0 unix:0:zio_buf_57344:alloc_fail 0 unix:0:zio_buf_57344:buf_avail 0 unix:0:zio_buf_57344:buf_constructed 0 unix:0:zio_buf_57344:buf_inuse 0 unix:0:zio_buf_57344:buf_max 0 unix:0:zio_buf_57344:buf_size 57344 unix:0:zio_buf_57344:buf_total 0 unix:0:zio_buf_57344:chunk_size 57344 unix:0:zio_buf_57344:class kmem_cache unix:0:zio_buf_57344:crtime 97.1054856 unix:0:zio_buf_57344:depot_alloc 0 unix:0:zio_buf_57344:depot_contention 0 unix:0:zio_buf_57344:depot_free 0 unix:0:zio_buf_57344:empty_magazines 0 unix:0:zio_buf_57344:free 0 unix:0:zio_buf_57344:full_magazines 0 unix:0:zio_buf_57344:hash_lookup_depth 0 unix:0:zio_buf_57344:hash_rescale 0 unix:0:zio_buf_57344:hash_size 64 unix:0:zio_buf_57344:magazine_size 1 unix:0:zio_buf_57344:slab_alloc 0 unix:0:zio_buf_57344:slab_create 0 unix:0:zio_buf_57344:slab_destroy 0 unix:0:zio_buf_57344:slab_free 0 unix:0:zio_buf_57344:slab_size 57344 unix:0:zio_buf_57344:snaptime 8464512.9660408 unix:0:zio_buf_57344:vmem_source 23 unix:0:zio_buf_6144:align 1024 unix:0:zio_buf_6144:alloc 0 unix:0:zio_buf_6144:alloc_fail 0 unix:0:zio_buf_6144:buf_avail 0 unix:0:zio_buf_6144:buf_constructed 0 unix:0:zio_buf_6144:buf_inuse 0 unix:0:zio_buf_6144:buf_max 0 unix:0:zio_buf_6144:buf_size 6144 unix:0:zio_buf_6144:buf_total 0 unix:0:zio_buf_6144:chunk_size 6144 unix:0:zio_buf_6144:class kmem_cache unix:0:zio_buf_6144:crtime 97.1048088 unix:0:zio_buf_6144:depot_alloc 0 unix:0:zio_buf_6144:depot_contention 0 unix:0:zio_buf_6144:depot_free 0 unix:0:zio_buf_6144:empty_magazines 0 unix:0:zio_buf_6144:free 0 unix:0:zio_buf_6144:full_magazines 0 unix:0:zio_buf_6144:hash_lookup_depth 0 unix:0:zio_buf_6144:hash_rescale 0 unix:0:zio_buf_6144:hash_size 64 unix:0:zio_buf_6144:magazine_size 1 unix:0:zio_buf_6144:slab_alloc 0 unix:0:zio_buf_6144:slab_create 0 unix:0:zio_buf_6144:slab_destroy 0 unix:0:zio_buf_6144:slab_free 0 unix:0:zio_buf_6144:slab_size 24576 unix:0:zio_buf_6144:snaptime 8464512.9674936 unix:0:zio_buf_6144:vmem_source 23 unix:0:zio_buf_65536:align 8192 unix:0:zio_buf_65536:alloc 0 unix:0:zio_buf_65536:alloc_fail 0 unix:0:zio_buf_65536:buf_avail 0 unix:0:zio_buf_65536:buf_constructed 0 unix:0:zio_buf_65536:buf_inuse 0 unix:0:zio_buf_65536:buf_max 0 unix:0:zio_buf_65536:buf_size 65536 unix:0:zio_buf_65536:buf_total 0 unix:0:zio_buf_65536:chunk_size 65536 unix:0:zio_buf_65536:class kmem_cache unix:0:zio_buf_65536:crtime 97.1055364 unix:0:zio_buf_65536:depot_alloc 0 unix:0:zio_buf_65536:depot_contention 0 unix:0:zio_buf_65536:depot_free 0 unix:0:zio_buf_65536:empty_magazines 0 unix:0:zio_buf_65536:free 0 unix:0:zio_buf_65536:full_magazines 0 unix:0:zio_buf_65536:hash_lookup_depth 0 unix:0:zio_buf_65536:hash_rescale 0 unix:0:zio_buf_65536:hash_size 64 unix:0:zio_buf_65536:magazine_size 1 unix:0:zio_buf_65536:slab_alloc 0 unix:0:zio_buf_65536:slab_create 0 unix:0:zio_buf_65536:slab_destroy 0 unix:0:zio_buf_65536:slab_free 0 unix:0:zio_buf_65536:slab_size 65536 unix:0:zio_buf_65536:snaptime 8464512.9688722 unix:0:zio_buf_65536:vmem_source 23 unix:0:zio_buf_7168:align 1024 unix:0:zio_buf_7168:alloc 0 unix:0:zio_buf_7168:alloc_fail 0 unix:0:zio_buf_7168:buf_avail 0 unix:0:zio_buf_7168:buf_constructed 0 unix:0:zio_buf_7168:buf_inuse 0 unix:0:zio_buf_7168:buf_max 0 unix:0:zio_buf_7168:buf_size 7168 unix:0:zio_buf_7168:buf_total 0 unix:0:zio_buf_7168:chunk_size 7168 unix:0:zio_buf_7168:class kmem_cache unix:0:zio_buf_7168:crtime 97.104848 unix:0:zio_buf_7168:depot_alloc 0 unix:0:zio_buf_7168:depot_contention 0 unix:0:zio_buf_7168:depot_free 0 unix:0:zio_buf_7168:empty_magazines 0 unix:0:zio_buf_7168:free 0 unix:0:zio_buf_7168:full_magazines 0 unix:0:zio_buf_7168:hash_lookup_depth 0 unix:0:zio_buf_7168:hash_rescale 0 unix:0:zio_buf_7168:hash_size 64 unix:0:zio_buf_7168:magazine_size 1 unix:0:zio_buf_7168:slab_alloc 0 unix:0:zio_buf_7168:slab_create 0 unix:0:zio_buf_7168:slab_destroy 0 unix:0:zio_buf_7168:slab_free 0 unix:0:zio_buf_7168:slab_size 57344 unix:0:zio_buf_7168:snaptime 8464512.9703154 unix:0:zio_buf_7168:vmem_source 23 unix:0:zio_buf_73728:align 8192 unix:0:zio_buf_73728:alloc 0 unix:0:zio_buf_73728:alloc_fail 0 unix:0:zio_buf_73728:buf_avail 0 unix:0:zio_buf_73728:buf_constructed 0 unix:0:zio_buf_73728:buf_inuse 0 unix:0:zio_buf_73728:buf_max 0 unix:0:zio_buf_73728:buf_size 73728 unix:0:zio_buf_73728:buf_total 0 unix:0:zio_buf_73728:chunk_size 73728 unix:0:zio_buf_73728:class kmem_cache unix:0:zio_buf_73728:crtime 97.105577 unix:0:zio_buf_73728:depot_alloc 0 unix:0:zio_buf_73728:depot_contention 0 unix:0:zio_buf_73728:depot_free 0 unix:0:zio_buf_73728:empty_magazines 0 unix:0:zio_buf_73728:free 0 unix:0:zio_buf_73728:full_magazines 0 unix:0:zio_buf_73728:hash_lookup_depth 0 unix:0:zio_buf_73728:hash_rescale 0 unix:0:zio_buf_73728:hash_size 64 unix:0:zio_buf_73728:magazine_size 1 unix:0:zio_buf_73728:slab_alloc 0 unix:0:zio_buf_73728:slab_create 0 unix:0:zio_buf_73728:slab_destroy 0 unix:0:zio_buf_73728:slab_free 0 unix:0:zio_buf_73728:slab_size 73728 unix:0:zio_buf_73728:snaptime 8464512.9718398 unix:0:zio_buf_73728:vmem_source 23 unix:0:zio_buf_8192:align 8192 unix:0:zio_buf_8192:alloc 0 unix:0:zio_buf_8192:alloc_fail 0 unix:0:zio_buf_8192:buf_avail 0 unix:0:zio_buf_8192:buf_constructed 0 unix:0:zio_buf_8192:buf_inuse 0 unix:0:zio_buf_8192:buf_max 0 unix:0:zio_buf_8192:buf_size 8192 unix:0:zio_buf_8192:buf_total 0 unix:0:zio_buf_8192:chunk_size 8192 unix:0:zio_buf_8192:class kmem_cache unix:0:zio_buf_8192:crtime 97.104907 unix:0:zio_buf_8192:depot_alloc 0 unix:0:zio_buf_8192:depot_contention 0 unix:0:zio_buf_8192:depot_free 0 unix:0:zio_buf_8192:empty_magazines 0 unix:0:zio_buf_8192:free 0 unix:0:zio_buf_8192:full_magazines 0 unix:0:zio_buf_8192:hash_lookup_depth 0 unix:0:zio_buf_8192:hash_rescale 0 unix:0:zio_buf_8192:hash_size 64 unix:0:zio_buf_8192:magazine_size 1 unix:0:zio_buf_8192:slab_alloc 0 unix:0:zio_buf_8192:slab_create 0 unix:0:zio_buf_8192:slab_destroy 0 unix:0:zio_buf_8192:slab_free 0 unix:0:zio_buf_8192:slab_size 8192 unix:0:zio_buf_8192:snaptime 8464512.973236 unix:0:zio_buf_8192:vmem_source 23 unix:0:zio_buf_81920:align 8192 unix:0:zio_buf_81920:alloc 0 unix:0:zio_buf_81920:alloc_fail 0 unix:0:zio_buf_81920:buf_avail 0 unix:0:zio_buf_81920:buf_constructed 0 unix:0:zio_buf_81920:buf_inuse 0 unix:0:zio_buf_81920:buf_max 0 unix:0:zio_buf_81920:buf_size 81920 unix:0:zio_buf_81920:buf_total 0 unix:0:zio_buf_81920:chunk_size 81920 unix:0:zio_buf_81920:class kmem_cache unix:0:zio_buf_81920:crtime 97.105618 unix:0:zio_buf_81920:depot_alloc 0 unix:0:zio_buf_81920:depot_contention 0 unix:0:zio_buf_81920:depot_free 0 unix:0:zio_buf_81920:empty_magazines 0 unix:0:zio_buf_81920:free 0 unix:0:zio_buf_81920:full_magazines 0 unix:0:zio_buf_81920:hash_lookup_depth 0 unix:0:zio_buf_81920:hash_rescale 0 unix:0:zio_buf_81920:hash_size 64 unix:0:zio_buf_81920:magazine_size 1 unix:0:zio_buf_81920:slab_alloc 0 unix:0:zio_buf_81920:slab_create 0 unix:0:zio_buf_81920:slab_destroy 0 unix:0:zio_buf_81920:slab_free 0 unix:0:zio_buf_81920:slab_size 81920 unix:0:zio_buf_81920:snaptime 8464512.974602 unix:0:zio_buf_81920:vmem_source 23 unix:0:zio_buf_90112:align 8192 unix:0:zio_buf_90112:alloc 0 unix:0:zio_buf_90112:alloc_fail 0 unix:0:zio_buf_90112:buf_avail 0 unix:0:zio_buf_90112:buf_constructed 0 unix:0:zio_buf_90112:buf_inuse 0 unix:0:zio_buf_90112:buf_max 0 unix:0:zio_buf_90112:buf_size 90112 unix:0:zio_buf_90112:buf_total 0 unix:0:zio_buf_90112:chunk_size 90112 unix:0:zio_buf_90112:class kmem_cache unix:0:zio_buf_90112:crtime 97.1056586 unix:0:zio_buf_90112:depot_alloc 0 unix:0:zio_buf_90112:depot_contention 0 unix:0:zio_buf_90112:depot_free 0 unix:0:zio_buf_90112:empty_magazines 0 unix:0:zio_buf_90112:free 0 unix:0:zio_buf_90112:full_magazines 0 unix:0:zio_buf_90112:hash_lookup_depth 0 unix:0:zio_buf_90112:hash_rescale 0 unix:0:zio_buf_90112:hash_size 64 unix:0:zio_buf_90112:magazine_size 1 unix:0:zio_buf_90112:slab_alloc 0 unix:0:zio_buf_90112:slab_create 0 unix:0:zio_buf_90112:slab_destroy 0 unix:0:zio_buf_90112:slab_free 0 unix:0:zio_buf_90112:slab_size 90112 unix:0:zio_buf_90112:snaptime 8464512.975978 unix:0:zio_buf_90112:vmem_source 23 unix:0:zio_buf_98304:align 8192 unix:0:zio_buf_98304:alloc 0 unix:0:zio_buf_98304:alloc_fail 0 unix:0:zio_buf_98304:buf_avail 0 unix:0:zio_buf_98304:buf_constructed 0 unix:0:zio_buf_98304:buf_inuse 0 unix:0:zio_buf_98304:buf_max 0 unix:0:zio_buf_98304:buf_size 98304 unix:0:zio_buf_98304:buf_total 0 unix:0:zio_buf_98304:chunk_size 98304 unix:0:zio_buf_98304:class kmem_cache unix:0:zio_buf_98304:crtime 97.1057 unix:0:zio_buf_98304:depot_alloc 0 unix:0:zio_buf_98304:depot_contention 0 unix:0:zio_buf_98304:depot_free 0 unix:0:zio_buf_98304:empty_magazines 0 unix:0:zio_buf_98304:free 0 unix:0:zio_buf_98304:full_magazines 0 unix:0:zio_buf_98304:hash_lookup_depth 0 unix:0:zio_buf_98304:hash_rescale 0 unix:0:zio_buf_98304:hash_size 64 unix:0:zio_buf_98304:magazine_size 1 unix:0:zio_buf_98304:slab_alloc 0 unix:0:zio_buf_98304:slab_create 0 unix:0:zio_buf_98304:slab_destroy 0 unix:0:zio_buf_98304:slab_free 0 unix:0:zio_buf_98304:slab_size 98304 unix:0:zio_buf_98304:snaptime 8464512.9774508 unix:0:zio_buf_98304:vmem_source 23 unix:0:zio_data_buf_1024:align 512 unix:0:zio_data_buf_1024:alloc 0 unix:0:zio_data_buf_1024:alloc_fail 0 unix:0:zio_data_buf_1024:buf_avail 0 unix:0:zio_data_buf_1024:buf_constructed 0 unix:0:zio_data_buf_1024:buf_inuse 0 unix:0:zio_data_buf_1024:buf_max 0 unix:0:zio_data_buf_1024:buf_size 1024 unix:0:zio_data_buf_1024:buf_total 0 unix:0:zio_data_buf_1024:chunk_size 1024 unix:0:zio_data_buf_1024:class kmem_cache unix:0:zio_data_buf_1024:crtime 97.1044142 unix:0:zio_data_buf_1024:depot_alloc 0 unix:0:zio_data_buf_1024:depot_contention 0 unix:0:zio_data_buf_1024:depot_free 0 unix:0:zio_data_buf_1024:empty_magazines 0 unix:0:zio_data_buf_1024:free 0 unix:0:zio_data_buf_1024:full_magazines 0 unix:0:zio_data_buf_1024:hash_lookup_depth 0 unix:0:zio_data_buf_1024:hash_rescale 0 unix:0:zio_data_buf_1024:hash_size 64 unix:0:zio_data_buf_1024:magazine_size 3 unix:0:zio_data_buf_1024:slab_alloc 0 unix:0:zio_data_buf_1024:slab_create 0 unix:0:zio_data_buf_1024:slab_destroy 0 unix:0:zio_data_buf_1024:slab_free 0 unix:0:zio_data_buf_1024:slab_size 8192 unix:0:zio_data_buf_1024:snaptime 8464512.9788114 unix:0:zio_data_buf_1024:vmem_source 40 unix:0:zio_data_buf_10240:align 2048 unix:0:zio_data_buf_10240:alloc 0 unix:0:zio_data_buf_10240:alloc_fail 0 unix:0:zio_data_buf_10240:buf_avail 0 unix:0:zio_data_buf_10240:buf_constructed 0 unix:0:zio_data_buf_10240:buf_inuse 0 unix:0:zio_data_buf_10240:buf_max 0 unix:0:zio_data_buf_10240:buf_size 10240 unix:0:zio_data_buf_10240:buf_total 0 unix:0:zio_data_buf_10240:chunk_size 10240 unix:0:zio_data_buf_10240:class kmem_cache unix:0:zio_data_buf_10240:crtime 97.1050436 unix:0:zio_data_buf_10240:depot_alloc 0 unix:0:zio_data_buf_10240:depot_contention 0 unix:0:zio_data_buf_10240:depot_free 0 unix:0:zio_data_buf_10240:empty_magazines 0 unix:0:zio_data_buf_10240:free 0 unix:0:zio_data_buf_10240:full_magazines 0 unix:0:zio_data_buf_10240:hash_lookup_depth 0 unix:0:zio_data_buf_10240:hash_rescale 0 unix:0:zio_data_buf_10240:hash_size 64 unix:0:zio_data_buf_10240:magazine_size 1 unix:0:zio_data_buf_10240:slab_alloc 0 unix:0:zio_data_buf_10240:slab_create 0 unix:0:zio_data_buf_10240:slab_destroy 0 unix:0:zio_data_buf_10240:slab_free 0 unix:0:zio_data_buf_10240:slab_size 40960 unix:0:zio_data_buf_10240:snaptime 8464512.98026 unix:0:zio_data_buf_10240:vmem_source 40 unix:0:zio_data_buf_106496:align 8192 unix:0:zio_data_buf_106496:alloc 0 unix:0:zio_data_buf_106496:alloc_fail 0 unix:0:zio_data_buf_106496:buf_avail 0 unix:0:zio_data_buf_106496:buf_constructed 0 unix:0:zio_data_buf_106496:buf_inuse 0 unix:0:zio_data_buf_106496:buf_max 0 unix:0:zio_data_buf_106496:buf_size 106496 unix:0:zio_data_buf_106496:buf_total 0 unix:0:zio_data_buf_106496:chunk_size 106496 unix:0:zio_data_buf_106496:class kmem_cache unix:0:zio_data_buf_106496:crtime 97.1058334 unix:0:zio_data_buf_106496:depot_alloc 0 unix:0:zio_data_buf_106496:depot_contention 0 unix:0:zio_data_buf_106496:depot_free 0 unix:0:zio_data_buf_106496:empty_magazines 0 unix:0:zio_data_buf_106496:free 0 unix:0:zio_data_buf_106496:full_magazines 0 unix:0:zio_data_buf_106496:hash_lookup_depth 0 unix:0:zio_data_buf_106496:hash_rescale 0 unix:0:zio_data_buf_106496:hash_size 64 unix:0:zio_data_buf_106496:magazine_size 1 unix:0:zio_data_buf_106496:slab_alloc 0 unix:0:zio_data_buf_106496:slab_create 0 unix:0:zio_data_buf_106496:slab_destroy 0 unix:0:zio_data_buf_106496:slab_free 0 unix:0:zio_data_buf_106496:slab_size 106496 unix:0:zio_data_buf_106496:snaptime 8464512.9817174 unix:0:zio_data_buf_106496:vmem_source 40 unix:0:zio_data_buf_114688:align 8192 unix:0:zio_data_buf_114688:alloc 0 unix:0:zio_data_buf_114688:alloc_fail 0 unix:0:zio_data_buf_114688:buf_avail 0 unix:0:zio_data_buf_114688:buf_constructed 0 unix:0:zio_data_buf_114688:buf_inuse 0 unix:0:zio_data_buf_114688:buf_max 0 unix:0:zio_data_buf_114688:buf_size 114688 unix:0:zio_data_buf_114688:buf_total 0 unix:0:zio_data_buf_114688:chunk_size 114688 unix:0:zio_data_buf_114688:class kmem_cache unix:0:zio_data_buf_114688:crtime 97.105897 unix:0:zio_data_buf_114688:depot_alloc 0 unix:0:zio_data_buf_114688:depot_contention 0 unix:0:zio_data_buf_114688:depot_free 0 unix:0:zio_data_buf_114688:empty_magazines 0 unix:0:zio_data_buf_114688:free 0 unix:0:zio_data_buf_114688:full_magazines 0 unix:0:zio_data_buf_114688:hash_lookup_depth 0 unix:0:zio_data_buf_114688:hash_rescale 0 unix:0:zio_data_buf_114688:hash_size 64 unix:0:zio_data_buf_114688:magazine_size 1 unix:0:zio_data_buf_114688:slab_alloc 0 unix:0:zio_data_buf_114688:slab_create 0 unix:0:zio_data_buf_114688:slab_destroy 0 unix:0:zio_data_buf_114688:slab_free 0 unix:0:zio_data_buf_114688:slab_size 114688 unix:0:zio_data_buf_114688:snaptime 8464512.9831872 unix:0:zio_data_buf_114688:vmem_source 40 unix:0:zio_data_buf_12288:align 2048 unix:0:zio_data_buf_12288:alloc 0 unix:0:zio_data_buf_12288:alloc_fail 0 unix:0:zio_data_buf_12288:buf_avail 0 unix:0:zio_data_buf_12288:buf_constructed 0 unix:0:zio_data_buf_12288:buf_inuse 0 unix:0:zio_data_buf_12288:buf_max 0 unix:0:zio_data_buf_12288:buf_size 12288 unix:0:zio_data_buf_12288:buf_total 0 unix:0:zio_data_buf_12288:chunk_size 12288 unix:0:zio_data_buf_12288:class kmem_cache unix:0:zio_data_buf_12288:crtime 97.1051154 unix:0:zio_data_buf_12288:depot_alloc 0 unix:0:zio_data_buf_12288:depot_contention 0 unix:0:zio_data_buf_12288:depot_free 0 unix:0:zio_data_buf_12288:empty_magazines 0 unix:0:zio_data_buf_12288:free 0 unix:0:zio_data_buf_12288:full_magazines 0 unix:0:zio_data_buf_12288:hash_lookup_depth 0 unix:0:zio_data_buf_12288:hash_rescale 0 unix:0:zio_data_buf_12288:hash_size 64 unix:0:zio_data_buf_12288:magazine_size 1 unix:0:zio_data_buf_12288:slab_alloc 0 unix:0:zio_data_buf_12288:slab_create 0 unix:0:zio_data_buf_12288:slab_destroy 0 unix:0:zio_data_buf_12288:slab_free 0 unix:0:zio_data_buf_12288:slab_size 24576 unix:0:zio_data_buf_12288:snaptime 8464512.984576 unix:0:zio_data_buf_12288:vmem_source 40 unix:0:zio_data_buf_122880:align 8192 unix:0:zio_data_buf_122880:alloc 0 unix:0:zio_data_buf_122880:alloc_fail 0 unix:0:zio_data_buf_122880:buf_avail 0 unix:0:zio_data_buf_122880:buf_constructed 0 unix:0:zio_data_buf_122880:buf_inuse 0 unix:0:zio_data_buf_122880:buf_max 0 unix:0:zio_data_buf_122880:buf_size 122880 unix:0:zio_data_buf_122880:buf_total 0 unix:0:zio_data_buf_122880:chunk_size 122880 unix:0:zio_data_buf_122880:class kmem_cache unix:0:zio_data_buf_122880:crtime 97.105938 unix:0:zio_data_buf_122880:depot_alloc 0 unix:0:zio_data_buf_122880:depot_contention 0 unix:0:zio_data_buf_122880:depot_free 0 unix:0:zio_data_buf_122880:empty_magazines 0 unix:0:zio_data_buf_122880:free 0 unix:0:zio_data_buf_122880:full_magazines 0 unix:0:zio_data_buf_122880:hash_lookup_depth 0 unix:0:zio_data_buf_122880:hash_rescale 0 unix:0:zio_data_buf_122880:hash_size 64 unix:0:zio_data_buf_122880:magazine_size 1 unix:0:zio_data_buf_122880:slab_alloc 0 unix:0:zio_data_buf_122880:slab_create 0 unix:0:zio_data_buf_122880:slab_destroy 0 unix:0:zio_data_buf_122880:slab_free 0 unix:0:zio_data_buf_122880:slab_size 122880 unix:0:zio_data_buf_122880:snaptime 8464512.9859568 unix:0:zio_data_buf_122880:vmem_source 40 unix:0:zio_data_buf_131072:align 8192 unix:0:zio_data_buf_131072:alloc 0 unix:0:zio_data_buf_131072:alloc_fail 0 unix:0:zio_data_buf_131072:buf_avail 0 unix:0:zio_data_buf_131072:buf_constructed 0 unix:0:zio_data_buf_131072:buf_inuse 0 unix:0:zio_data_buf_131072:buf_max 0 unix:0:zio_data_buf_131072:buf_size 131072 unix:0:zio_data_buf_131072:buf_total 0 unix:0:zio_data_buf_131072:chunk_size 131072 unix:0:zio_data_buf_131072:class kmem_cache unix:0:zio_data_buf_131072:crtime 97.1059774 unix:0:zio_data_buf_131072:depot_alloc 0 unix:0:zio_data_buf_131072:depot_contention 0 unix:0:zio_data_buf_131072:depot_free 0 unix:0:zio_data_buf_131072:empty_magazines 0 unix:0:zio_data_buf_131072:free 0 unix:0:zio_data_buf_131072:full_magazines 0 unix:0:zio_data_buf_131072:hash_lookup_depth 0 unix:0:zio_data_buf_131072:hash_rescale 0 unix:0:zio_data_buf_131072:hash_size 64 unix:0:zio_data_buf_131072:magazine_size 1 unix:0:zio_data_buf_131072:slab_alloc 0 unix:0:zio_data_buf_131072:slab_create 0 unix:0:zio_data_buf_131072:slab_destroy 0 unix:0:zio_data_buf_131072:slab_free 0 unix:0:zio_data_buf_131072:slab_size 131072 unix:0:zio_data_buf_131072:snaptime 8464512.9874008 unix:0:zio_data_buf_131072:vmem_source 40 unix:0:zio_data_buf_14336:align 2048 unix:0:zio_data_buf_14336:alloc 0 unix:0:zio_data_buf_14336:alloc_fail 0 unix:0:zio_data_buf_14336:buf_avail 0 unix:0:zio_data_buf_14336:buf_constructed 0 unix:0:zio_data_buf_14336:buf_inuse 0 unix:0:zio_data_buf_14336:buf_max 0 unix:0:zio_data_buf_14336:buf_size 14336 unix:0:zio_data_buf_14336:buf_total 0 unix:0:zio_data_buf_14336:chunk_size 14336 unix:0:zio_data_buf_14336:class kmem_cache unix:0:zio_data_buf_14336:crtime 97.105155 unix:0:zio_data_buf_14336:depot_alloc 0 unix:0:zio_data_buf_14336:depot_contention 0 unix:0:zio_data_buf_14336:depot_free 0 unix:0:zio_data_buf_14336:empty_magazines 0 unix:0:zio_data_buf_14336:free 0 unix:0:zio_data_buf_14336:full_magazines 0 unix:0:zio_data_buf_14336:hash_lookup_depth 0 unix:0:zio_data_buf_14336:hash_rescale 0 unix:0:zio_data_buf_14336:hash_size 64 unix:0:zio_data_buf_14336:magazine_size 1 unix:0:zio_data_buf_14336:slab_alloc 0 unix:0:zio_data_buf_14336:slab_create 0 unix:0:zio_data_buf_14336:slab_destroy 0 unix:0:zio_data_buf_14336:slab_free 0 unix:0:zio_data_buf_14336:slab_size 57344 unix:0:zio_data_buf_14336:snaptime 8464512.9887962 unix:0:zio_data_buf_14336:vmem_source 40 unix:0:zio_data_buf_1536:align 512 unix:0:zio_data_buf_1536:alloc 0 unix:0:zio_data_buf_1536:alloc_fail 0 unix:0:zio_data_buf_1536:buf_avail 0 unix:0:zio_data_buf_1536:buf_constructed 0 unix:0:zio_data_buf_1536:buf_inuse 0 unix:0:zio_data_buf_1536:buf_max 0 unix:0:zio_data_buf_1536:buf_size 1536 unix:0:zio_data_buf_1536:buf_total 0 unix:0:zio_data_buf_1536:chunk_size 1536 unix:0:zio_data_buf_1536:class kmem_cache unix:0:zio_data_buf_1536:crtime 97.1044514 unix:0:zio_data_buf_1536:depot_alloc 0 unix:0:zio_data_buf_1536:depot_contention 0 unix:0:zio_data_buf_1536:depot_free 0 unix:0:zio_data_buf_1536:empty_magazines 0 unix:0:zio_data_buf_1536:free 0 unix:0:zio_data_buf_1536:full_magazines 0 unix:0:zio_data_buf_1536:hash_lookup_depth 0 unix:0:zio_data_buf_1536:hash_rescale 0 unix:0:zio_data_buf_1536:hash_size 64 unix:0:zio_data_buf_1536:magazine_size 3 unix:0:zio_data_buf_1536:slab_alloc 0 unix:0:zio_data_buf_1536:slab_create 0 unix:0:zio_data_buf_1536:slab_destroy 0 unix:0:zio_data_buf_1536:slab_free 0 unix:0:zio_data_buf_1536:slab_size 8192 unix:0:zio_data_buf_1536:snaptime 8464512.9902352 unix:0:zio_data_buf_1536:vmem_source 40 unix:0:zio_data_buf_16384:align 8192 unix:0:zio_data_buf_16384:alloc 0 unix:0:zio_data_buf_16384:alloc_fail 0 unix:0:zio_data_buf_16384:buf_avail 0 unix:0:zio_data_buf_16384:buf_constructed 0 unix:0:zio_data_buf_16384:buf_inuse 0 unix:0:zio_data_buf_16384:buf_max 0 unix:0:zio_data_buf_16384:buf_size 16384 unix:0:zio_data_buf_16384:buf_total 0 unix:0:zio_data_buf_16384:chunk_size 16384 unix:0:zio_data_buf_16384:class kmem_cache unix:0:zio_data_buf_16384:crtime 97.1051904 unix:0:zio_data_buf_16384:depot_alloc 0 unix:0:zio_data_buf_16384:depot_contention 0 unix:0:zio_data_buf_16384:depot_free 0 unix:0:zio_data_buf_16384:empty_magazines 0 unix:0:zio_data_buf_16384:free 0 unix:0:zio_data_buf_16384:full_magazines 0 unix:0:zio_data_buf_16384:hash_lookup_depth 0 unix:0:zio_data_buf_16384:hash_rescale 0 unix:0:zio_data_buf_16384:hash_size 64 unix:0:zio_data_buf_16384:magazine_size 1 unix:0:zio_data_buf_16384:slab_alloc 0 unix:0:zio_data_buf_16384:slab_create 0 unix:0:zio_data_buf_16384:slab_destroy 0 unix:0:zio_data_buf_16384:slab_free 0 unix:0:zio_data_buf_16384:slab_size 16384 unix:0:zio_data_buf_16384:snaptime 8464512.9916152 unix:0:zio_data_buf_16384:vmem_source 40 unix:0:zio_data_buf_2048:align 512 unix:0:zio_data_buf_2048:alloc 0 unix:0:zio_data_buf_2048:alloc_fail 0 unix:0:zio_data_buf_2048:buf_avail 0 unix:0:zio_data_buf_2048:buf_constructed 0 unix:0:zio_data_buf_2048:buf_inuse 0 unix:0:zio_data_buf_2048:buf_max 0 unix:0:zio_data_buf_2048:buf_size 2048 unix:0:zio_data_buf_2048:buf_total 0 unix:0:zio_data_buf_2048:chunk_size 2048 unix:0:zio_data_buf_2048:class kmem_cache unix:0:zio_data_buf_2048:crtime 97.104488 unix:0:zio_data_buf_2048:depot_alloc 0 unix:0:zio_data_buf_2048:depot_contention 0 unix:0:zio_data_buf_2048:depot_free 0 unix:0:zio_data_buf_2048:empty_magazines 0 unix:0:zio_data_buf_2048:free 0 unix:0:zio_data_buf_2048:full_magazines 0 unix:0:zio_data_buf_2048:hash_lookup_depth 0 unix:0:zio_data_buf_2048:hash_rescale 0 unix:0:zio_data_buf_2048:hash_size 64 unix:0:zio_data_buf_2048:magazine_size 3 unix:0:zio_data_buf_2048:slab_alloc 0 unix:0:zio_data_buf_2048:slab_create 0 unix:0:zio_data_buf_2048:slab_destroy 0 unix:0:zio_data_buf_2048:slab_free 0 unix:0:zio_data_buf_2048:slab_size 8192 unix:0:zio_data_buf_2048:snaptime 8464512.9931212 unix:0:zio_data_buf_2048:vmem_source 40 unix:0:zio_data_buf_20480:align 4096 unix:0:zio_data_buf_20480:alloc 0 unix:0:zio_data_buf_20480:alloc_fail 0 unix:0:zio_data_buf_20480:buf_avail 0 unix:0:zio_data_buf_20480:buf_constructed 0 unix:0:zio_data_buf_20480:buf_inuse 0 unix:0:zio_data_buf_20480:buf_max 0 unix:0:zio_data_buf_20480:buf_size 20480 unix:0:zio_data_buf_20480:buf_total 0 unix:0:zio_data_buf_20480:chunk_size 20480 unix:0:zio_data_buf_20480:class kmem_cache unix:0:zio_data_buf_20480:crtime 97.105228 unix:0:zio_data_buf_20480:depot_alloc 0 unix:0:zio_data_buf_20480:depot_contention 0 unix:0:zio_data_buf_20480:depot_free 0 unix:0:zio_data_buf_20480:empty_magazines 0 unix:0:zio_data_buf_20480:free 0 unix:0:zio_data_buf_20480:full_magazines 0 unix:0:zio_data_buf_20480:hash_lookup_depth 0 unix:0:zio_data_buf_20480:hash_rescale 0 unix:0:zio_data_buf_20480:hash_size 64 unix:0:zio_data_buf_20480:magazine_size 1 unix:0:zio_data_buf_20480:slab_alloc 0 unix:0:zio_data_buf_20480:slab_create 0 unix:0:zio_data_buf_20480:slab_destroy 0 unix:0:zio_data_buf_20480:slab_free 0 unix:0:zio_data_buf_20480:slab_size 40960 unix:0:zio_data_buf_20480:snaptime 8464512.9945132 unix:0:zio_data_buf_20480:vmem_source 40 unix:0:zio_data_buf_24576:align 8192 unix:0:zio_data_buf_24576:alloc 0 unix:0:zio_data_buf_24576:alloc_fail 0 unix:0:zio_data_buf_24576:buf_avail 0 unix:0:zio_data_buf_24576:buf_constructed 0 unix:0:zio_data_buf_24576:buf_inuse 0 unix:0:zio_data_buf_24576:buf_max 0 unix:0:zio_data_buf_24576:buf_size 24576 unix:0:zio_data_buf_24576:buf_total 0 unix:0:zio_data_buf_24576:chunk_size 24576 unix:0:zio_data_buf_24576:class kmem_cache unix:0:zio_data_buf_24576:crtime 97.1052882 unix:0:zio_data_buf_24576:depot_alloc 0 unix:0:zio_data_buf_24576:depot_contention 0 unix:0:zio_data_buf_24576:depot_free 0 unix:0:zio_data_buf_24576:empty_magazines 0 unix:0:zio_data_buf_24576:free 0 unix:0:zio_data_buf_24576:full_magazines 0 unix:0:zio_data_buf_24576:hash_lookup_depth 0 unix:0:zio_data_buf_24576:hash_rescale 0 unix:0:zio_data_buf_24576:hash_size 64 unix:0:zio_data_buf_24576:magazine_size 1 unix:0:zio_data_buf_24576:slab_alloc 0 unix:0:zio_data_buf_24576:slab_create 0 unix:0:zio_data_buf_24576:slab_destroy 0 unix:0:zio_data_buf_24576:slab_free 0 unix:0:zio_data_buf_24576:slab_size 24576 unix:0:zio_data_buf_24576:snaptime 8464512.9958936 unix:0:zio_data_buf_24576:vmem_source 40 unix:0:zio_data_buf_2560:align 512 unix:0:zio_data_buf_2560:alloc 0 unix:0:zio_data_buf_2560:alloc_fail 0 unix:0:zio_data_buf_2560:buf_avail 0 unix:0:zio_data_buf_2560:buf_constructed 0 unix:0:zio_data_buf_2560:buf_inuse 0 unix:0:zio_data_buf_2560:buf_max 0 unix:0:zio_data_buf_2560:buf_size 2560 unix:0:zio_data_buf_2560:buf_total 0 unix:0:zio_data_buf_2560:chunk_size 2560 unix:0:zio_data_buf_2560:class kmem_cache unix:0:zio_data_buf_2560:crtime 97.104526 unix:0:zio_data_buf_2560:depot_alloc 0 unix:0:zio_data_buf_2560:depot_contention 0 unix:0:zio_data_buf_2560:depot_free 0 unix:0:zio_data_buf_2560:empty_magazines 0 unix:0:zio_data_buf_2560:free 0 unix:0:zio_data_buf_2560:full_magazines 0 unix:0:zio_data_buf_2560:hash_lookup_depth 0 unix:0:zio_data_buf_2560:hash_rescale 0 unix:0:zio_data_buf_2560:hash_size 64 unix:0:zio_data_buf_2560:magazine_size 3 unix:0:zio_data_buf_2560:slab_alloc 0 unix:0:zio_data_buf_2560:slab_create 0 unix:0:zio_data_buf_2560:slab_destroy 0 unix:0:zio_data_buf_2560:slab_free 0 unix:0:zio_data_buf_2560:slab_size 8192 unix:0:zio_data_buf_2560:snaptime 8464512.997342 unix:0:zio_data_buf_2560:vmem_source 40 unix:0:zio_data_buf_28672:align 4096 unix:0:zio_data_buf_28672:alloc 0 unix:0:zio_data_buf_28672:alloc_fail 0 unix:0:zio_data_buf_28672:buf_avail 0 unix:0:zio_data_buf_28672:buf_constructed 0 unix:0:zio_data_buf_28672:buf_inuse 0 unix:0:zio_data_buf_28672:buf_max 0 unix:0:zio_data_buf_28672:buf_size 28672 unix:0:zio_data_buf_28672:buf_total 0 unix:0:zio_data_buf_28672:chunk_size 28672 unix:0:zio_data_buf_28672:class kmem_cache unix:0:zio_data_buf_28672:crtime 97.1053302 unix:0:zio_data_buf_28672:depot_alloc 0 unix:0:zio_data_buf_28672:depot_contention 0 unix:0:zio_data_buf_28672:depot_free 0 unix:0:zio_data_buf_28672:empty_magazines 0 unix:0:zio_data_buf_28672:free 0 unix:0:zio_data_buf_28672:full_magazines 0 unix:0:zio_data_buf_28672:hash_lookup_depth 0 unix:0:zio_data_buf_28672:hash_rescale 0 unix:0:zio_data_buf_28672:hash_size 64 unix:0:zio_data_buf_28672:magazine_size 1 unix:0:zio_data_buf_28672:slab_alloc 0 unix:0:zio_data_buf_28672:slab_create 0 unix:0:zio_data_buf_28672:slab_destroy 0 unix:0:zio_data_buf_28672:slab_free 0 unix:0:zio_data_buf_28672:slab_size 57344 unix:0:zio_data_buf_28672:snaptime 8464512.9987326 unix:0:zio_data_buf_28672:vmem_source 40 unix:0:zio_data_buf_3072:align 512 unix:0:zio_data_buf_3072:alloc 0 unix:0:zio_data_buf_3072:alloc_fail 0 unix:0:zio_data_buf_3072:buf_avail 0 unix:0:zio_data_buf_3072:buf_constructed 0 unix:0:zio_data_buf_3072:buf_inuse 0 unix:0:zio_data_buf_3072:buf_max 0 unix:0:zio_data_buf_3072:buf_size 3072 unix:0:zio_data_buf_3072:buf_total 0 unix:0:zio_data_buf_3072:chunk_size 3072 unix:0:zio_data_buf_3072:class kmem_cache unix:0:zio_data_buf_3072:crtime 97.1046736 unix:0:zio_data_buf_3072:depot_alloc 0 unix:0:zio_data_buf_3072:depot_contention 0 unix:0:zio_data_buf_3072:depot_free 0 unix:0:zio_data_buf_3072:empty_magazines 0 unix:0:zio_data_buf_3072:free 0 unix:0:zio_data_buf_3072:full_magazines 0 unix:0:zio_data_buf_3072:hash_lookup_depth 0 unix:0:zio_data_buf_3072:hash_rescale 0 unix:0:zio_data_buf_3072:hash_size 64 unix:0:zio_data_buf_3072:magazine_size 3 unix:0:zio_data_buf_3072:slab_alloc 0 unix:0:zio_data_buf_3072:slab_create 0 unix:0:zio_data_buf_3072:slab_destroy 0 unix:0:zio_data_buf_3072:slab_free 0 unix:0:zio_data_buf_3072:slab_size 24576 unix:0:zio_data_buf_3072:snaptime 8464513.0001792 unix:0:zio_data_buf_3072:vmem_source 40 unix:0:zio_data_buf_32768:align 8192 unix:0:zio_data_buf_32768:alloc 0 unix:0:zio_data_buf_32768:alloc_fail 0 unix:0:zio_data_buf_32768:buf_avail 0 unix:0:zio_data_buf_32768:buf_constructed 0 unix:0:zio_data_buf_32768:buf_inuse 0 unix:0:zio_data_buf_32768:buf_max 0 unix:0:zio_data_buf_32768:buf_size 32768 unix:0:zio_data_buf_32768:buf_total 0 unix:0:zio_data_buf_32768:chunk_size 32768 unix:0:zio_data_buf_32768:class kmem_cache unix:0:zio_data_buf_32768:crtime 97.1053684 unix:0:zio_data_buf_32768:depot_alloc 0 unix:0:zio_data_buf_32768:depot_contention 0 unix:0:zio_data_buf_32768:depot_free 0 unix:0:zio_data_buf_32768:empty_magazines 0 unix:0:zio_data_buf_32768:free 0 unix:0:zio_data_buf_32768:full_magazines 0 unix:0:zio_data_buf_32768:hash_lookup_depth 0 unix:0:zio_data_buf_32768:hash_rescale 0 unix:0:zio_data_buf_32768:hash_size 64 unix:0:zio_data_buf_32768:magazine_size 1 unix:0:zio_data_buf_32768:slab_alloc 0 unix:0:zio_data_buf_32768:slab_create 0 unix:0:zio_data_buf_32768:slab_destroy 0 unix:0:zio_data_buf_32768:slab_free 0 unix:0:zio_data_buf_32768:slab_size 32768 unix:0:zio_data_buf_32768:snaptime 8464513.001553 unix:0:zio_data_buf_32768:vmem_source 40 unix:0:zio_data_buf_3584:align 512 unix:0:zio_data_buf_3584:alloc 0 unix:0:zio_data_buf_3584:alloc_fail 0 unix:0:zio_data_buf_3584:buf_avail 0 unix:0:zio_data_buf_3584:buf_constructed 0 unix:0:zio_data_buf_3584:buf_inuse 0 unix:0:zio_data_buf_3584:buf_max 0 unix:0:zio_data_buf_3584:buf_size 3584 unix:0:zio_data_buf_3584:buf_total 0 unix:0:zio_data_buf_3584:chunk_size 3584 unix:0:zio_data_buf_3584:class kmem_cache unix:0:zio_data_buf_3584:crtime 97.1047136 unix:0:zio_data_buf_3584:depot_alloc 0 unix:0:zio_data_buf_3584:depot_contention 0 unix:0:zio_data_buf_3584:depot_free 0 unix:0:zio_data_buf_3584:empty_magazines 0 unix:0:zio_data_buf_3584:free 0 unix:0:zio_data_buf_3584:full_magazines 0 unix:0:zio_data_buf_3584:hash_lookup_depth 0 unix:0:zio_data_buf_3584:hash_rescale 0 unix:0:zio_data_buf_3584:hash_size 64 unix:0:zio_data_buf_3584:magazine_size 1 unix:0:zio_data_buf_3584:slab_alloc 0 unix:0:zio_data_buf_3584:slab_create 0 unix:0:zio_data_buf_3584:slab_destroy 0 unix:0:zio_data_buf_3584:slab_free 0 unix:0:zio_data_buf_3584:slab_size 32768 unix:0:zio_data_buf_3584:snaptime 8464513.0031224 unix:0:zio_data_buf_3584:vmem_source 40 unix:0:zio_data_buf_4096:align 1024 unix:0:zio_data_buf_4096:alloc 0 unix:0:zio_data_buf_4096:alloc_fail 0 unix:0:zio_data_buf_4096:buf_avail 0 unix:0:zio_data_buf_4096:buf_constructed 0 unix:0:zio_data_buf_4096:buf_inuse 0 unix:0:zio_data_buf_4096:buf_max 0 unix:0:zio_data_buf_4096:buf_size 4096 unix:0:zio_data_buf_4096:buf_total 0 unix:0:zio_data_buf_4096:chunk_size 4096 unix:0:zio_data_buf_4096:class kmem_cache unix:0:zio_data_buf_4096:crtime 97.1047504 unix:0:zio_data_buf_4096:depot_alloc 0 unix:0:zio_data_buf_4096:depot_contention 0 unix:0:zio_data_buf_4096:depot_free 0 unix:0:zio_data_buf_4096:empty_magazines 0 unix:0:zio_data_buf_4096:free 0 unix:0:zio_data_buf_4096:full_magazines 0 unix:0:zio_data_buf_4096:hash_lookup_depth 0 unix:0:zio_data_buf_4096:hash_rescale 0 unix:0:zio_data_buf_4096:hash_size 64 unix:0:zio_data_buf_4096:magazine_size 1 unix:0:zio_data_buf_4096:slab_alloc 0 unix:0:zio_data_buf_4096:slab_create 0 unix:0:zio_data_buf_4096:slab_destroy 0 unix:0:zio_data_buf_4096:slab_free 0 unix:0:zio_data_buf_4096:slab_size 8192 unix:0:zio_data_buf_4096:snaptime 8464513.0045396 unix:0:zio_data_buf_4096:vmem_source 40 unix:0:zio_data_buf_40960:align 8192 unix:0:zio_data_buf_40960:alloc 0 unix:0:zio_data_buf_40960:alloc_fail 0 unix:0:zio_data_buf_40960:buf_avail 0 unix:0:zio_data_buf_40960:buf_constructed 0 unix:0:zio_data_buf_40960:buf_inuse 0 unix:0:zio_data_buf_40960:buf_max 0 unix:0:zio_data_buf_40960:buf_size 40960 unix:0:zio_data_buf_40960:buf_total 0 unix:0:zio_data_buf_40960:chunk_size 40960 unix:0:zio_data_buf_40960:class kmem_cache unix:0:zio_data_buf_40960:crtime 97.1054092 unix:0:zio_data_buf_40960:depot_alloc 0 unix:0:zio_data_buf_40960:depot_contention 0 unix:0:zio_data_buf_40960:depot_free 0 unix:0:zio_data_buf_40960:empty_magazines 0 unix:0:zio_data_buf_40960:free 0 unix:0:zio_data_buf_40960:full_magazines 0 unix:0:zio_data_buf_40960:hash_lookup_depth 0 unix:0:zio_data_buf_40960:hash_rescale 0 unix:0:zio_data_buf_40960:hash_size 64 unix:0:zio_data_buf_40960:magazine_size 1 unix:0:zio_data_buf_40960:slab_alloc 0 unix:0:zio_data_buf_40960:slab_create 0 unix:0:zio_data_buf_40960:slab_destroy 0 unix:0:zio_data_buf_40960:slab_free 0 unix:0:zio_data_buf_40960:slab_size 40960 unix:0:zio_data_buf_40960:snaptime 8464513.0059238 unix:0:zio_data_buf_40960:vmem_source 40 unix:0:zio_data_buf_49152:align 8192 unix:0:zio_data_buf_49152:alloc 0 unix:0:zio_data_buf_49152:alloc_fail 0 unix:0:zio_data_buf_49152:buf_avail 0 unix:0:zio_data_buf_49152:buf_constructed 0 unix:0:zio_data_buf_49152:buf_inuse 0 unix:0:zio_data_buf_49152:buf_max 0 unix:0:zio_data_buf_49152:buf_size 49152 unix:0:zio_data_buf_49152:buf_total 0 unix:0:zio_data_buf_49152:chunk_size 49152 unix:0:zio_data_buf_49152:class kmem_cache unix:0:zio_data_buf_49152:crtime 97.105465 unix:0:zio_data_buf_49152:depot_alloc 0 unix:0:zio_data_buf_49152:depot_contention 0 unix:0:zio_data_buf_49152:depot_free 0 unix:0:zio_data_buf_49152:empty_magazines 0 unix:0:zio_data_buf_49152:free 0 unix:0:zio_data_buf_49152:full_magazines 0 unix:0:zio_data_buf_49152:hash_lookup_depth 0 unix:0:zio_data_buf_49152:hash_rescale 0 unix:0:zio_data_buf_49152:hash_size 64 unix:0:zio_data_buf_49152:magazine_size 1 unix:0:zio_data_buf_49152:slab_alloc 0 unix:0:zio_data_buf_49152:slab_create 0 unix:0:zio_data_buf_49152:slab_destroy 0 unix:0:zio_data_buf_49152:slab_free 0 unix:0:zio_data_buf_49152:slab_size 49152 unix:0:zio_data_buf_49152:snaptime 8464513.0073076 unix:0:zio_data_buf_49152:vmem_source 40 unix:0:zio_data_buf_512:align 512 unix:0:zio_data_buf_512:alloc 0 unix:0:zio_data_buf_512:alloc_fail 0 unix:0:zio_data_buf_512:buf_avail 0 unix:0:zio_data_buf_512:buf_constructed 0 unix:0:zio_data_buf_512:buf_inuse 0 unix:0:zio_data_buf_512:buf_max 0 unix:0:zio_data_buf_512:buf_size 512 unix:0:zio_data_buf_512:buf_total 0 unix:0:zio_data_buf_512:chunk_size 512 unix:0:zio_data_buf_512:class kmem_cache unix:0:zio_data_buf_512:crtime 97.1043724 unix:0:zio_data_buf_512:depot_alloc 0 unix:0:zio_data_buf_512:depot_contention 0 unix:0:zio_data_buf_512:depot_free 0 unix:0:zio_data_buf_512:empty_magazines 0 unix:0:zio_data_buf_512:free 0 unix:0:zio_data_buf_512:full_magazines 0 unix:0:zio_data_buf_512:hash_lookup_depth 0 unix:0:zio_data_buf_512:hash_rescale 0 unix:0:zio_data_buf_512:hash_size 0 unix:0:zio_data_buf_512:magazine_size 3 unix:0:zio_data_buf_512:slab_alloc 0 unix:0:zio_data_buf_512:slab_create 0 unix:0:zio_data_buf_512:slab_destroy 0 unix:0:zio_data_buf_512:slab_free 0 unix:0:zio_data_buf_512:slab_size 8192 unix:0:zio_data_buf_512:snaptime 8464513.008775 unix:0:zio_data_buf_512:vmem_source 40 unix:0:zio_data_buf_5120:align 1024 unix:0:zio_data_buf_5120:alloc 0 unix:0:zio_data_buf_5120:alloc_fail 0 unix:0:zio_data_buf_5120:buf_avail 0 unix:0:zio_data_buf_5120:buf_constructed 0 unix:0:zio_data_buf_5120:buf_inuse 0 unix:0:zio_data_buf_5120:buf_max 0 unix:0:zio_data_buf_5120:buf_size 5120 unix:0:zio_data_buf_5120:buf_total 0 unix:0:zio_data_buf_5120:chunk_size 5120 unix:0:zio_data_buf_5120:class kmem_cache unix:0:zio_data_buf_5120:crtime 97.104789 unix:0:zio_data_buf_5120:depot_alloc 0 unix:0:zio_data_buf_5120:depot_contention 0 unix:0:zio_data_buf_5120:depot_free 0 unix:0:zio_data_buf_5120:empty_magazines 0 unix:0:zio_data_buf_5120:free 0 unix:0:zio_data_buf_5120:full_magazines 0 unix:0:zio_data_buf_5120:hash_lookup_depth 0 unix:0:zio_data_buf_5120:hash_rescale 0 unix:0:zio_data_buf_5120:hash_size 64 unix:0:zio_data_buf_5120:magazine_size 1 unix:0:zio_data_buf_5120:slab_alloc 0 unix:0:zio_data_buf_5120:slab_create 0 unix:0:zio_data_buf_5120:slab_destroy 0 unix:0:zio_data_buf_5120:slab_free 0 unix:0:zio_data_buf_5120:slab_size 40960 unix:0:zio_data_buf_5120:snaptime 8464513.0102196 unix:0:zio_data_buf_5120:vmem_source 40 unix:0:zio_data_buf_57344:align 8192 unix:0:zio_data_buf_57344:alloc 0 unix:0:zio_data_buf_57344:alloc_fail 0 unix:0:zio_data_buf_57344:buf_avail 0 unix:0:zio_data_buf_57344:buf_constructed 0 unix:0:zio_data_buf_57344:buf_inuse 0 unix:0:zio_data_buf_57344:buf_max 0 unix:0:zio_data_buf_57344:buf_size 57344 unix:0:zio_data_buf_57344:buf_total 0 unix:0:zio_data_buf_57344:chunk_size 57344 unix:0:zio_data_buf_57344:class kmem_cache unix:0:zio_data_buf_57344:crtime 97.105506 unix:0:zio_data_buf_57344:depot_alloc 0 unix:0:zio_data_buf_57344:depot_contention 0 unix:0:zio_data_buf_57344:depot_free 0 unix:0:zio_data_buf_57344:empty_magazines 0 unix:0:zio_data_buf_57344:free 0 unix:0:zio_data_buf_57344:full_magazines 0 unix:0:zio_data_buf_57344:hash_lookup_depth 0 unix:0:zio_data_buf_57344:hash_rescale 0 unix:0:zio_data_buf_57344:hash_size 64 unix:0:zio_data_buf_57344:magazine_size 1 unix:0:zio_data_buf_57344:slab_alloc 0 unix:0:zio_data_buf_57344:slab_create 0 unix:0:zio_data_buf_57344:slab_destroy 0 unix:0:zio_data_buf_57344:slab_free 0 unix:0:zio_data_buf_57344:slab_size 57344 unix:0:zio_data_buf_57344:snaptime 8464513.0116222 unix:0:zio_data_buf_57344:vmem_source 40 unix:0:zio_data_buf_6144:align 1024 unix:0:zio_data_buf_6144:alloc 0 unix:0:zio_data_buf_6144:alloc_fail 0 unix:0:zio_data_buf_6144:buf_avail 0 unix:0:zio_data_buf_6144:buf_constructed 0 unix:0:zio_data_buf_6144:buf_inuse 0 unix:0:zio_data_buf_6144:buf_max 0 unix:0:zio_data_buf_6144:buf_size 6144 unix:0:zio_data_buf_6144:buf_total 0 unix:0:zio_data_buf_6144:chunk_size 6144 unix:0:zio_data_buf_6144:class kmem_cache unix:0:zio_data_buf_6144:crtime 97.1048286 unix:0:zio_data_buf_6144:depot_alloc 0 unix:0:zio_data_buf_6144:depot_contention 0 unix:0:zio_data_buf_6144:depot_free 0 unix:0:zio_data_buf_6144:empty_magazines 0 unix:0:zio_data_buf_6144:free 0 unix:0:zio_data_buf_6144:full_magazines 0 unix:0:zio_data_buf_6144:hash_lookup_depth 0 unix:0:zio_data_buf_6144:hash_rescale 0 unix:0:zio_data_buf_6144:hash_size 64 unix:0:zio_data_buf_6144:magazine_size 1 unix:0:zio_data_buf_6144:slab_alloc 0 unix:0:zio_data_buf_6144:slab_create 0 unix:0:zio_data_buf_6144:slab_destroy 0 unix:0:zio_data_buf_6144:slab_free 0 unix:0:zio_data_buf_6144:slab_size 24576 unix:0:zio_data_buf_6144:snaptime 8464513.0131494 unix:0:zio_data_buf_6144:vmem_source 40 unix:0:zio_data_buf_65536:align 8192 unix:0:zio_data_buf_65536:alloc 0 unix:0:zio_data_buf_65536:alloc_fail 0 unix:0:zio_data_buf_65536:buf_avail 0 unix:0:zio_data_buf_65536:buf_constructed 0 unix:0:zio_data_buf_65536:buf_inuse 0 unix:0:zio_data_buf_65536:buf_max 0 unix:0:zio_data_buf_65536:buf_size 65536 unix:0:zio_data_buf_65536:buf_total 0 unix:0:zio_data_buf_65536:chunk_size 65536 unix:0:zio_data_buf_65536:class kmem_cache unix:0:zio_data_buf_65536:crtime 97.1055558 unix:0:zio_data_buf_65536:depot_alloc 0 unix:0:zio_data_buf_65536:depot_contention 0 unix:0:zio_data_buf_65536:depot_free 0 unix:0:zio_data_buf_65536:empty_magazines 0 unix:0:zio_data_buf_65536:free 0 unix:0:zio_data_buf_65536:full_magazines 0 unix:0:zio_data_buf_65536:hash_lookup_depth 0 unix:0:zio_data_buf_65536:hash_rescale 0 unix:0:zio_data_buf_65536:hash_size 64 unix:0:zio_data_buf_65536:magazine_size 1 unix:0:zio_data_buf_65536:slab_alloc 0 unix:0:zio_data_buf_65536:slab_create 0 unix:0:zio_data_buf_65536:slab_destroy 0 unix:0:zio_data_buf_65536:slab_free 0 unix:0:zio_data_buf_65536:slab_size 65536 unix:0:zio_data_buf_65536:snaptime 8464513.0145484 unix:0:zio_data_buf_65536:vmem_source 40 unix:0:zio_data_buf_7168:align 1024 unix:0:zio_data_buf_7168:alloc 0 unix:0:zio_data_buf_7168:alloc_fail 0 unix:0:zio_data_buf_7168:buf_avail 0 unix:0:zio_data_buf_7168:buf_constructed 0 unix:0:zio_data_buf_7168:buf_inuse 0 unix:0:zio_data_buf_7168:buf_max 0 unix:0:zio_data_buf_7168:buf_size 7168 unix:0:zio_data_buf_7168:buf_total 0 unix:0:zio_data_buf_7168:chunk_size 7168 unix:0:zio_data_buf_7168:class kmem_cache unix:0:zio_data_buf_7168:crtime 97.104868 unix:0:zio_data_buf_7168:depot_alloc 0 unix:0:zio_data_buf_7168:depot_contention 0 unix:0:zio_data_buf_7168:depot_free 0 unix:0:zio_data_buf_7168:empty_magazines 0 unix:0:zio_data_buf_7168:free 0 unix:0:zio_data_buf_7168:full_magazines 0 unix:0:zio_data_buf_7168:hash_lookup_depth 0 unix:0:zio_data_buf_7168:hash_rescale 0 unix:0:zio_data_buf_7168:hash_size 64 unix:0:zio_data_buf_7168:magazine_size 1 unix:0:zio_data_buf_7168:slab_alloc 0 unix:0:zio_data_buf_7168:slab_create 0 unix:0:zio_data_buf_7168:slab_destroy 0 unix:0:zio_data_buf_7168:slab_free 0 unix:0:zio_data_buf_7168:slab_size 57344 unix:0:zio_data_buf_7168:snaptime 8464513.0159274 unix:0:zio_data_buf_7168:vmem_source 40 unix:0:zio_data_buf_73728:align 8192 unix:0:zio_data_buf_73728:alloc 0 unix:0:zio_data_buf_73728:alloc_fail 0 unix:0:zio_data_buf_73728:buf_avail 0 unix:0:zio_data_buf_73728:buf_constructed 0 unix:0:zio_data_buf_73728:buf_inuse 0 unix:0:zio_data_buf_73728:buf_max 0 unix:0:zio_data_buf_73728:buf_size 73728 unix:0:zio_data_buf_73728:buf_total 0 unix:0:zio_data_buf_73728:chunk_size 73728 unix:0:zio_data_buf_73728:class kmem_cache unix:0:zio_data_buf_73728:crtime 97.1055976 unix:0:zio_data_buf_73728:depot_alloc 0 unix:0:zio_data_buf_73728:depot_contention 0 unix:0:zio_data_buf_73728:depot_free 0 unix:0:zio_data_buf_73728:empty_magazines 0 unix:0:zio_data_buf_73728:free 0 unix:0:zio_data_buf_73728:full_magazines 0 unix:0:zio_data_buf_73728:hash_lookup_depth 0 unix:0:zio_data_buf_73728:hash_rescale 0 unix:0:zio_data_buf_73728:hash_size 64 unix:0:zio_data_buf_73728:magazine_size 1 unix:0:zio_data_buf_73728:slab_alloc 0 unix:0:zio_data_buf_73728:slab_create 0 unix:0:zio_data_buf_73728:slab_destroy 0 unix:0:zio_data_buf_73728:slab_free 0 unix:0:zio_data_buf_73728:slab_size 73728 unix:0:zio_data_buf_73728:snaptime 8464513.017311 unix:0:zio_data_buf_73728:vmem_source 40 unix:0:zio_data_buf_8192:align 8192 unix:0:zio_data_buf_8192:alloc 0 unix:0:zio_data_buf_8192:alloc_fail 0 unix:0:zio_data_buf_8192:buf_avail 0 unix:0:zio_data_buf_8192:buf_constructed 0 unix:0:zio_data_buf_8192:buf_inuse 0 unix:0:zio_data_buf_8192:buf_max 0 unix:0:zio_data_buf_8192:buf_size 8192 unix:0:zio_data_buf_8192:buf_total 0 unix:0:zio_data_buf_8192:chunk_size 8192 unix:0:zio_data_buf_8192:class kmem_cache unix:0:zio_data_buf_8192:crtime 97.1050022 unix:0:zio_data_buf_8192:depot_alloc 0 unix:0:zio_data_buf_8192:depot_contention 0 unix:0:zio_data_buf_8192:depot_free 0 unix:0:zio_data_buf_8192:empty_magazines 0 unix:0:zio_data_buf_8192:free 0 unix:0:zio_data_buf_8192:full_magazines 0 unix:0:zio_data_buf_8192:hash_lookup_depth 0 unix:0:zio_data_buf_8192:hash_rescale 0 unix:0:zio_data_buf_8192:hash_size 64 unix:0:zio_data_buf_8192:magazine_size 1 unix:0:zio_data_buf_8192:slab_alloc 0 unix:0:zio_data_buf_8192:slab_create 0 unix:0:zio_data_buf_8192:slab_destroy 0 unix:0:zio_data_buf_8192:slab_free 0 unix:0:zio_data_buf_8192:slab_size 8192 unix:0:zio_data_buf_8192:snaptime 8464513.0187766 unix:0:zio_data_buf_8192:vmem_source 40 unix:0:zio_data_buf_81920:align 8192 unix:0:zio_data_buf_81920:alloc 0 unix:0:zio_data_buf_81920:alloc_fail 0 unix:0:zio_data_buf_81920:buf_avail 0 unix:0:zio_data_buf_81920:buf_constructed 0 unix:0:zio_data_buf_81920:buf_inuse 0 unix:0:zio_data_buf_81920:buf_max 0 unix:0:zio_data_buf_81920:buf_size 81920 unix:0:zio_data_buf_81920:buf_total 0 unix:0:zio_data_buf_81920:chunk_size 81920 unix:0:zio_data_buf_81920:class kmem_cache unix:0:zio_data_buf_81920:crtime 97.1056382 unix:0:zio_data_buf_81920:depot_alloc 0 unix:0:zio_data_buf_81920:depot_contention 0 unix:0:zio_data_buf_81920:depot_free 0 unix:0:zio_data_buf_81920:empty_magazines 0 unix:0:zio_data_buf_81920:free 0 unix:0:zio_data_buf_81920:full_magazines 0 unix:0:zio_data_buf_81920:hash_lookup_depth 0 unix:0:zio_data_buf_81920:hash_rescale 0 unix:0:zio_data_buf_81920:hash_size 64 unix:0:zio_data_buf_81920:magazine_size 1 unix:0:zio_data_buf_81920:slab_alloc 0 unix:0:zio_data_buf_81920:slab_create 0 unix:0:zio_data_buf_81920:slab_destroy 0 unix:0:zio_data_buf_81920:slab_free 0 unix:0:zio_data_buf_81920:slab_size 81920 unix:0:zio_data_buf_81920:snaptime 8464513.0204188 unix:0:zio_data_buf_81920:vmem_source 40 unix:0:zio_data_buf_90112:align 8192 unix:0:zio_data_buf_90112:alloc 0 unix:0:zio_data_buf_90112:alloc_fail 0 unix:0:zio_data_buf_90112:buf_avail 0 unix:0:zio_data_buf_90112:buf_constructed 0 unix:0:zio_data_buf_90112:buf_inuse 0 unix:0:zio_data_buf_90112:buf_max 0 unix:0:zio_data_buf_90112:buf_size 90112 unix:0:zio_data_buf_90112:buf_total 0 unix:0:zio_data_buf_90112:chunk_size 90112 unix:0:zio_data_buf_90112:class kmem_cache unix:0:zio_data_buf_90112:crtime 97.105679 unix:0:zio_data_buf_90112:depot_alloc 0 unix:0:zio_data_buf_90112:depot_contention 0 unix:0:zio_data_buf_90112:depot_free 0 unix:0:zio_data_buf_90112:empty_magazines 0 unix:0:zio_data_buf_90112:free 0 unix:0:zio_data_buf_90112:full_magazines 0 unix:0:zio_data_buf_90112:hash_lookup_depth 0 unix:0:zio_data_buf_90112:hash_rescale 0 unix:0:zio_data_buf_90112:hash_size 64 unix:0:zio_data_buf_90112:magazine_size 1 unix:0:zio_data_buf_90112:slab_alloc 0 unix:0:zio_data_buf_90112:slab_create 0 unix:0:zio_data_buf_90112:slab_destroy 0 unix:0:zio_data_buf_90112:slab_free 0 unix:0:zio_data_buf_90112:slab_size 90112 unix:0:zio_data_buf_90112:snaptime 8464513.0218668 unix:0:zio_data_buf_90112:vmem_source 40 unix:0:zio_data_buf_98304:align 8192 unix:0:zio_data_buf_98304:alloc 0 unix:0:zio_data_buf_98304:alloc_fail 0 unix:0:zio_data_buf_98304:buf_avail 0 unix:0:zio_data_buf_98304:buf_constructed 0 unix:0:zio_data_buf_98304:buf_inuse 0 unix:0:zio_data_buf_98304:buf_max 0 unix:0:zio_data_buf_98304:buf_size 98304 unix:0:zio_data_buf_98304:buf_total 0 unix:0:zio_data_buf_98304:chunk_size 98304 unix:0:zio_data_buf_98304:class kmem_cache unix:0:zio_data_buf_98304:crtime 97.1057758 unix:0:zio_data_buf_98304:depot_alloc 0 unix:0:zio_data_buf_98304:depot_contention 0 unix:0:zio_data_buf_98304:depot_free 0 unix:0:zio_data_buf_98304:empty_magazines 0 unix:0:zio_data_buf_98304:free 0 unix:0:zio_data_buf_98304:full_magazines 0 unix:0:zio_data_buf_98304:hash_lookup_depth 0 unix:0:zio_data_buf_98304:hash_rescale 0 unix:0:zio_data_buf_98304:hash_size 64 unix:0:zio_data_buf_98304:magazine_size 1 unix:0:zio_data_buf_98304:slab_alloc 0 unix:0:zio_data_buf_98304:slab_create 0 unix:0:zio_data_buf_98304:slab_destroy 0 unix:0:zio_data_buf_98304:slab_free 0 unix:0:zio_data_buf_98304:slab_size 98304 unix:0:zio_data_buf_98304:snaptime 8464513.0233708 unix:0:zio_data_buf_98304:vmem_source 40 unix:1:callout_taskq:class taskq unix:1:callout_taskq:crtime 69.2736584 unix:1:callout_taskq:executed 152570540 unix:1:callout_taskq:maxtasks 4 unix:1:callout_taskq:nactive 4 unix:1:callout_taskq:nalloc 0 unix:1:callout_taskq:priority 99 unix:1:callout_taskq:snaptime 8464513.0254766 unix:1:callout_taskq:tasks 152570540 unix:1:callout_taskq:threads 2 unix:1:callout_taskq:totaltime 3117979265600 unix:1:cheetah_check_ce:class taskq unix:1:cheetah_check_ce:crtime 69.2662554 unix:1:cheetah_check_ce:executed 0 unix:1:cheetah_check_ce:maxtasks 0 unix:1:cheetah_check_ce:nactive 48 unix:1:cheetah_check_ce:nalloc 0 unix:1:cheetah_check_ce:priority 60 unix:1:cheetah_check_ce:snaptime 8464513.0260872 unix:1:cheetah_check_ce:tasks 0 unix:1:cheetah_check_ce:threads 1 unix:1:cheetah_check_ce:totaltime 0 unix:1:fp_nexus_enum_tq:class taskq unix:1:fp_nexus_enum_tq:crtime 77.2666628 unix:1:fp_nexus_enum_tq:executed 2 unix:1:fp_nexus_enum_tq:maxtasks 2 unix:1:fp_nexus_enum_tq:nactive 1 unix:1:fp_nexus_enum_tq:nalloc 0 unix:1:fp_nexus_enum_tq:priority 60 unix:1:fp_nexus_enum_tq:snaptime 8464513.0266742 unix:1:fp_nexus_enum_tq:tasks 2 unix:1:fp_nexus_enum_tq:threads 1 unix:1:fp_nexus_enum_tq:totaltime 5159095600 unix:1:mmu_ctx:class hat unix:1:mmu_ctx:crtime 85.1902768 unix:1:mmu_ctx:mmu_ctx_tsb_exceptions 159112 unix:1:mmu_ctx:mmu_ctx_tsb_raise_exception 840 unix:1:mmu_ctx:mmu_ctx_wrap_around 19 unix:1:mmu_ctx:snaptime 8464513.0272592 unix:1:pcf8584_nexus_enum_tq:class taskq unix:1:pcf8584_nexus_enum_tq:crtime 82.8913934 unix:1:pcf8584_nexus_enum_tq:executed 0 unix:1:pcf8584_nexus_enum_tq:maxtasks 0 unix:1:pcf8584_nexus_enum_tq:nactive 1 unix:1:pcf8584_nexus_enum_tq:nalloc 0 unix:1:pcf8584_nexus_enum_tq:priority 60 unix:1:pcf8584_nexus_enum_tq:snaptime 8464513.0276306 unix:1:pcf8584_nexus_enum_tq:tasks 0 unix:1:pcf8584_nexus_enum_tq:threads 1 unix:1:pcf8584_nexus_enum_tq:totaltime 0 unix:1:pcisch_nexus_enum_tq:class taskq unix:1:pcisch_nexus_enum_tq:crtime 69.2970254 unix:1:pcisch_nexus_enum_tq:executed 0 unix:1:pcisch_nexus_enum_tq:maxtasks 0 unix:1:pcisch_nexus_enum_tq:nactive 1 unix:1:pcisch_nexus_enum_tq:nalloc 0 unix:1:pcisch_nexus_enum_tq:priority 60 unix:1:pcisch_nexus_enum_tq:snaptime 8464513.0282174 unix:1:pcisch_nexus_enum_tq:tasks 0 unix:1:pcisch_nexus_enum_tq:threads 1 unix:1:pcisch_nexus_enum_tq:totaltime 0 unix:1:us_pm_monitor:class taskq unix:1:us_pm_monitor:crtime 82.837324 unix:1:us_pm_monitor:executed 1692888 unix:1:us_pm_monitor:maxtasks 1 unix:1:us_pm_monitor:nactive 2 unix:1:us_pm_monitor:nalloc 0 unix:1:us_pm_monitor:priority 98 unix:1:us_pm_monitor:snaptime 8464513.0287948 unix:1:us_pm_monitor:tasks 1692888 unix:1:us_pm_monitor:threads 1 unix:1:us_pm_monitor:totaltime 8175915600 unix:2:mdi_taskq:class taskq unix:2:mdi_taskq:crtime 69.2709812 unix:2:mdi_taskq:executed 0 unix:2:mdi_taskq:maxtasks 0 unix:2:mdi_taskq:nactive 32 unix:2:mdi_taskq:nalloc 0 unix:2:mdi_taskq:priority 60 unix:2:mdi_taskq:snaptime 8464513.0294192 unix:2:mdi_taskq:tasks 0 unix:2:mdi_taskq:threads 8 unix:2:mdi_taskq:totaltime 0 unix:2:rpc_cots_connections:addr_family 0 unix:2:rpc_cots_connections:class rpc unix:2:rpc_cots_connections:crtime 235.9617848 unix:2:rpc_cots_connections:device 1 unix:2:rpc_cots_connections:port 0 unix:2:rpc_cots_connections:ref_count 0 unix:2:rpc_cots_connections:server 099.117.046.105 unix:2:rpc_cots_connections:snaptime 8464513.0300482 unix:2:rpc_cots_connections:status 402653184 unix:2:rpc_cots_connections:time_stamp 846436673 unix:2:rpc_cots_connections:write_queue 101788064 unix:3:vhci_taskq:class taskq unix:3:vhci_taskq:crtime 69.2713988 unix:3:vhci_taskq:executed 0 unix:3:vhci_taskq:maxtasks 0 unix:3:vhci_taskq:nactive 0 unix:3:vhci_taskq:nalloc 0 unix:3:vhci_taskq:priority 60 unix:3:vhci_taskq:snaptime 8464513.0308318 unix:3:vhci_taskq:tasks 0 unix:3:vhci_taskq:threads 1 unix:3:vhci_taskq:totaltime 0 unix:4:vhci_update_pathstates:class taskq unix:4:vhci_update_pathstates:crtime 69.2716566 unix:4:vhci_update_pathstates:executed 0 unix:4:vhci_update_pathstates:maxtasks 0 unix:4:vhci_update_pathstates:nactive 0 unix:4:vhci_update_pathstates:nalloc 0 unix:4:vhci_update_pathstates:priority 60 unix:4:vhci_update_pathstates:snaptime 8464513.0314446 unix:4:vhci_update_pathstates:tasks 0 unix:4:vhci_update_pathstates:threads 8 unix:4:vhci_update_pathstates:totaltime 0 unix:5:fp_ulp_callback:class taskq unix:5:fp_ulp_callback:crtime 77.266778 unix:5:fp_ulp_callback:executed 2 unix:5:fp_ulp_callback:maxtasks 1 unix:5:fp_ulp_callback:nactive 1 unix:5:fp_ulp_callback:nalloc 0 unix:5:fp_ulp_callback:priority 60 unix:5:fp_ulp_callback:snaptime 8464513.0320426 unix:5:fp_ulp_callback:tasks 2 unix:5:fp_ulp_callback:threads 1 unix:5:fp_ulp_callback:totaltime 10000689800 unix:6:ssd_drv_taskq:class taskq unix:6:ssd_drv_taskq:crtime 77.300058 unix:6:ssd_drv_taskq:executed 0 unix:6:ssd_drv_taskq:maxtasks 0 unix:6:ssd_drv_taskq:nactive 256 unix:6:ssd_drv_taskq:nalloc 0 unix:6:ssd_drv_taskq:priority 97 unix:6:ssd_drv_taskq:snaptime 8464513.0327294 unix:6:ssd_drv_taskq:tasks 0 unix:6:ssd_drv_taskq:threads 8 unix:6:ssd_drv_taskq:totaltime 0 unix:7:ssd_rmw_taskq:class taskq unix:7:ssd_rmw_taskq:crtime 77.300344 unix:7:ssd_rmw_taskq:executed 0 unix:7:ssd_rmw_taskq:maxtasks 0 unix:7:ssd_rmw_taskq:nactive 256 unix:7:ssd_rmw_taskq:nalloc 0 unix:7:ssd_rmw_taskq:priority 97 unix:7:ssd_rmw_taskq:snaptime 8464513.0333444 unix:7:ssd_rmw_taskq:tasks 0 unix:7:ssd_rmw_taskq:threads 1 unix:7:ssd_rmw_taskq:totaltime 0 unix:8:xbuf_taskq:class taskq unix:8:xbuf_taskq:crtime 82.2914868 unix:8:xbuf_taskq:executed 0 unix:8:xbuf_taskq:maxtasks 0 unix:8:xbuf_taskq:nactive 64 unix:8:xbuf_taskq:nalloc 0 unix:8:xbuf_taskq:priority 97 unix:8:xbuf_taskq:snaptime 8464513.033945 unix:8:xbuf_taskq:tasks 0 unix:8:xbuf_taskq:threads 1 unix:8:xbuf_taskq:totaltime 0 unix:9:tcp_taskq:class taskq unix:9:tcp_taskq:crtime 83.9786204 unix:9:tcp_taskq:executed 0 unix:9:tcp_taskq:maxtasks 0 unix:9:tcp_taskq:nactive 1 unix:9:tcp_taskq:nalloc 0 unix:9:tcp_taskq:priority 60 unix:9:tcp_taskq:snaptime 8464513.034549 unix:9:tcp_taskq:tasks 0 unix:9:tcp_taskq:threads 1 unix:9:tcp_taskq:totaltime 0 unix:10:sctp_taskq:class taskq unix:10:sctp_taskq:crtime 83.9799938 unix:10:sctp_taskq:executed 0 unix:10:sctp_taskq:maxtasks 0 unix:10:sctp_taskq:nactive 1 unix:10:sctp_taskq:nalloc 0 unix:10:sctp_taskq:priority 60 unix:10:sctp_taskq:snaptime 8464513.03515 unix:10:sctp_taskq:tasks 0 unix:10:sctp_taskq:threads 1 unix:10:sctp_taskq:totaltime 0 unix:11:sctp_def_recvq_taskq:class taskq unix:11:sctp_def_recvq_taskq:crtime 83.9811804 unix:11:sctp_def_recvq_taskq:executed 0 unix:11:sctp_def_recvq_taskq:maxtasks 0 unix:11:sctp_def_recvq_taskq:nactive 5 unix:11:sctp_def_recvq_taskq:nalloc 0 unix:11:sctp_def_recvq_taskq:priority 60 unix:11:sctp_def_recvq_taskq:snaptime 8464513.0358454 unix:11:sctp_def_recvq_taskq:tasks 0 unix:11:sctp_def_recvq_taskq:threads 4 unix:11:sctp_def_recvq_taskq:totaltime 0 unix:12:USB_ohci_0_pipehndl_tq_0:class taskq unix:12:USB_ohci_0_pipehndl_tq_0:crtime 84.759408 unix:12:USB_ohci_0_pipehndl_tq_0:executed 33 unix:12:USB_ohci_0_pipehndl_tq_0:maxtasks 1 unix:12:USB_ohci_0_pipehndl_tq_0:nactive 8 unix:12:USB_ohci_0_pipehndl_tq_0:nalloc 0 unix:12:USB_ohci_0_pipehndl_tq_0:priority 60 unix:12:USB_ohci_0_pipehndl_tq_0:snaptime 8464513.0364546 unix:12:USB_ohci_0_pipehndl_tq_0:tasks 33 unix:12:USB_ohci_0_pipehndl_tq_0:threads 4 unix:12:USB_ohci_0_pipehndl_tq_0:totaltime 156000 unix:13:USB_ohci_81_pipehndl_tq_0:class taskq unix:13:USB_ohci_81_pipehndl_tq_0:crtime 84.8701098 unix:13:USB_ohci_81_pipehndl_tq_0:executed 6 unix:13:USB_ohci_81_pipehndl_tq_0:maxtasks 1 unix:13:USB_ohci_81_pipehndl_tq_0:nactive 4 unix:13:USB_ohci_81_pipehndl_tq_0:nalloc 0 unix:13:USB_ohci_81_pipehndl_tq_0:priority 60 unix:13:USB_ohci_81_pipehndl_tq_0:snaptime 8464513.0370622 unix:13:USB_ohci_81_pipehndl_tq_0:tasks 6 unix:13:USB_ohci_81_pipehndl_tq_0:threads 2 unix:13:USB_ohci_81_pipehndl_tq_0:totaltime 152800 unix:14:USB_device_0_pipehndl_tq_2_:class taskq unix:14:USB_device_0_pipehndl_tq_2_:crtime 85.1589754 unix:14:USB_device_0_pipehndl_tq_2_:executed 11 unix:14:USB_device_0_pipehndl_tq_2_:maxtasks 1 unix:14:USB_device_0_pipehndl_tq_2_:nactive 8 unix:14:USB_device_0_pipehndl_tq_2_:nalloc 0 unix:14:USB_device_0_pipehndl_tq_2_:priority 60 unix:14:USB_device_0_pipehndl_tq_2_:snaptime 8464513.0376662 unix:14:USB_device_0_pipehndl_tq_2_:tasks 11 unix:14:USB_device_0_pipehndl_tq_2_:threads 4 unix:14:USB_device_0_pipehndl_tq_2_:totaltime 243800 unix:15:ibmf_saa_event_taskq:bexecuted 0 unix:15:ibmf_saa_event_taskq:bmaxtasks 0 unix:15:ibmf_saa_event_taskq:bnactive 0 unix:15:ibmf_saa_event_taskq:bnalloc 1 unix:15:ibmf_saa_event_taskq:btasks 0 unix:15:ibmf_saa_event_taskq:btotaltime 0 unix:15:ibmf_saa_event_taskq:class taskq_d unix:15:ibmf_saa_event_taskq:crtime 85.1885076 unix:15:ibmf_saa_event_taskq:disptcreates 0 unix:15:ibmf_saa_event_taskq:hits 0 unix:15:ibmf_saa_event_taskq:maxthreads 2 unix:15:ibmf_saa_event_taskq:misses 0 unix:15:ibmf_saa_event_taskq:nalloc 0 unix:15:ibmf_saa_event_taskq:nfree 0 unix:15:ibmf_saa_event_taskq:nomem 0 unix:15:ibmf_saa_event_taskq:overflows 0 unix:15:ibmf_saa_event_taskq:priority 60 unix:15:ibmf_saa_event_taskq:snaptime 8464513.0382778 unix:15:ibmf_saa_event_taskq:tcreates 2 unix:15:ibmf_saa_event_taskq:tdeaths 2 unix:15:ibmf_saa_event_taskq:totaltime 0 unix:16:ibmf_taskq:class taskq unix:16:ibmf_taskq:crtime 85.1885662 unix:16:ibmf_taskq:executed 0 unix:16:ibmf_taskq:maxtasks 0 unix:16:ibmf_taskq:nactive 1 unix:16:ibmf_taskq:nalloc 0 unix:16:ibmf_taskq:priority 60 unix:16:ibmf_taskq:snaptime 8464513.039319 unix:16:ibmf_taskq:tasks 0 unix:16:ibmf_taskq:threads 1 unix:16:ibmf_taskq:totaltime 0 unix:17:USB_device_0_pipehndl_tq_4_:class taskq unix:17:USB_device_0_pipehndl_tq_4_:crtime 85.4058468 unix:17:USB_device_0_pipehndl_tq_4_:executed 10 unix:17:USB_device_0_pipehndl_tq_4_:maxtasks 1 unix:17:USB_device_0_pipehndl_tq_4_:nactive 8 unix:17:USB_device_0_pipehndl_tq_4_:nalloc 0 unix:17:USB_device_0_pipehndl_tq_4_:priority 60 unix:17:USB_device_0_pipehndl_tq_4_:snaptime 8464513.0399154 unix:17:USB_device_0_pipehndl_tq_4_:tasks 10 unix:17:USB_device_0_pipehndl_tq_4_:threads 4 unix:17:USB_device_0_pipehndl_tq_4_:totaltime 109800 unix:18:USB_hid_81_pipehndl_tq_1:class taskq unix:18:USB_hid_81_pipehndl_tq_1:crtime 85.5540642 unix:18:USB_hid_81_pipehndl_tq_1:executed 0 unix:18:USB_hid_81_pipehndl_tq_1:maxtasks 0 unix:18:USB_hid_81_pipehndl_tq_1:nactive 4 unix:18:USB_hid_81_pipehndl_tq_1:nalloc 0 unix:18:USB_hid_81_pipehndl_tq_1:priority 60 unix:18:USB_hid_81_pipehndl_tq_1:snaptime 8464513.0406012 unix:18:USB_hid_81_pipehndl_tq_1:tasks 0 unix:18:USB_hid_81_pipehndl_tq_1:threads 2 unix:18:USB_hid_81_pipehndl_tq_1:totaltime 0 unix:19:USB_hid_81_pipehndl_tq_3:class taskq unix:19:USB_hid_81_pipehndl_tq_3:crtime 85.6488902 unix:19:USB_hid_81_pipehndl_tq_3:executed 2837 unix:19:USB_hid_81_pipehndl_tq_3:maxtasks 1 unix:19:USB_hid_81_pipehndl_tq_3:nactive 4 unix:19:USB_hid_81_pipehndl_tq_3:nalloc 0 unix:19:USB_hid_81_pipehndl_tq_3:priority 60 unix:19:USB_hid_81_pipehndl_tq_3:snaptime 8464513.0412962 unix:19:USB_hid_81_pipehndl_tq_3:tasks 2837 unix:19:USB_hid_81_pipehndl_tq_3:threads 2 unix:19:USB_hid_81_pipehndl_tq_3:totaltime 92548400 unix:20:ah_taskq:class taskq unix:20:ah_taskq:crtime 7784068.488013 unix:20:ah_taskq:executed 0 unix:20:ah_taskq:maxtasks 0 unix:20:ah_taskq:nactive 0 unix:20:ah_taskq:nalloc 0 unix:20:ah_taskq:priority 60 unix:20:ah_taskq:snaptime 8464513.0419048 unix:20:ah_taskq:tasks 0 unix:20:ah_taskq:threads 1 unix:20:ah_taskq:totaltime 0 unix:21:esp_taskq:class taskq unix:21:esp_taskq:crtime 7784068.5130772 unix:21:esp_taskq:executed 0 unix:21:esp_taskq:maxtasks 0 unix:21:esp_taskq:nactive 0 unix:21:esp_taskq:nalloc 0 unix:21:esp_taskq:priority 60 unix:21:esp_taskq:snaptime 8464513.0424978 unix:21:esp_taskq:tasks 0 unix:21:esp_taskq:threads 1 unix:21:esp_taskq:totaltime 0 unix:22:console_taskq:class taskq unix:22:console_taskq:crtime 87.8485928 unix:22:console_taskq:executed 1 unix:22:console_taskq:maxtasks 1 unix:22:console_taskq:nactive 10 unix:22:console_taskq:nalloc 0 unix:22:console_taskq:priority 98 unix:22:console_taskq:snaptime 8464513.043101 unix:22:console_taskq:tasks 1 unix:22:console_taskq:threads 1 unix:22:console_taskq:totaltime 81000 unix:23:dtrace_taskq:class taskq unix:23:dtrace_taskq:crtime 113.9731976 unix:23:dtrace_taskq:executed 0 unix:23:dtrace_taskq:maxtasks 0 unix:23:dtrace_taskq:nactive 0 unix:23:dtrace_taskq:nalloc 0 unix:23:dtrace_taskq:priority 99 unix:23:dtrace_taskq:snaptime 8464513.0437014 unix:23:dtrace_taskq:tasks 0 unix:23:dtrace_taskq:threads 1 unix:23:dtrace_taskq:totaltime 0 unix:24:audio_mixer_taskq:class taskq unix:24:audio_mixer_taskq:crtime 3367063.3443572 unix:24:audio_mixer_taskq:executed 0 unix:24:audio_mixer_taskq:maxtasks 0 unix:24:audio_mixer_taskq:nactive 4 unix:24:audio_mixer_taskq:nalloc 0 unix:24:audio_mixer_taskq:priority 60 unix:24:audio_mixer_taskq:snaptime 8464513.0443042 unix:24:audio_mixer_taskq:tasks 0 unix:24:audio_mixer_taskq:threads 1 unix:24:audio_mixer_taskq:totaltime 0 unix:25:sd_drv_taskq:class taskq unix:25:sd_drv_taskq:crtime 123.3566742 unix:25:sd_drv_taskq:executed 0 unix:25:sd_drv_taskq:maxtasks 0 unix:25:sd_drv_taskq:nactive 256 unix:25:sd_drv_taskq:nalloc 0 unix:25:sd_drv_taskq:priority 97 unix:25:sd_drv_taskq:snaptime 8464513.0449024 unix:25:sd_drv_taskq:tasks 0 unix:25:sd_drv_taskq:threads 8 unix:25:sd_drv_taskq:totaltime 0 unix:26:sd_rmw_taskq:class taskq unix:26:sd_rmw_taskq:crtime 123.3569174 unix:26:sd_rmw_taskq:executed 0 unix:26:sd_rmw_taskq:maxtasks 0 unix:26:sd_rmw_taskq:nactive 256 unix:26:sd_rmw_taskq:nalloc 0 unix:26:sd_rmw_taskq:priority 97 unix:26:sd_rmw_taskq:snaptime 8464513.045502 unix:26:sd_rmw_taskq:tasks 0 unix:26:sd_rmw_taskq:threads 1 unix:26:sd_rmw_taskq:totaltime 0 unix:27:xbuf_taskq:class taskq unix:27:xbuf_taskq:crtime 123.3636994 unix:27:xbuf_taskq:executed 0 unix:27:xbuf_taskq:maxtasks 0 unix:27:xbuf_taskq:nactive 64 unix:27:xbuf_taskq:nalloc 0 unix:27:xbuf_taskq:priority 97 unix:27:xbuf_taskq:snaptime 8464513.0461008 unix:27:xbuf_taskq:tasks 0 unix:27:xbuf_taskq:threads 2 unix:27:xbuf_taskq:totaltime 0 unix:28:clts_endpnt_taskq:class taskq unix:28:clts_endpnt_taskq:crtime 123.5834752 unix:28:clts_endpnt_taskq:executed 15 unix:28:clts_endpnt_taskq:maxtasks 1 unix:28:clts_endpnt_taskq:nactive 15 unix:28:clts_endpnt_taskq:nalloc 0 unix:28:clts_endpnt_taskq:priority 60 unix:28:clts_endpnt_taskq:snaptime 8464513.0466998 unix:28:clts_endpnt_taskq:tasks 15 unix:28:clts_endpnt_taskq:threads 1 unix:28:clts_endpnt_taskq:totaltime 1590400 unix:1223:rpc_cots_connections:addr_family 2 unix:1223:rpc_cots_connections:class rpc unix:1223:rpc_cots_connections:crtime 8403028.1901656 unix:1223:rpc_cots_connections:device 2 unix:1223:rpc_cots_connections:port 111 unix:1223:rpc_cots_connections:ref_count 0 unix:1223:rpc_cots_connections:server 128.009.208.207 unix:1223:rpc_cots_connections:snaptime 8464513.0474402 unix:1223:rpc_cots_connections:status 134217728 unix:1223:rpc_cots_connections:time_stamp 840295892 unix:1223:rpc_cots_connections:write_queue 102896616 unix:1230:rpc_cots_connections:addr_family 2 unix:1230:rpc_cots_connections:class rpc unix:1230:rpc_cots_connections:crtime 8463822.2201692 unix:1230:rpc_cots_connections:device 2 unix:1230:rpc_cots_connections:port 2049 unix:1230:rpc_cots_connections:ref_count 1 unix:1230:rpc_cots_connections:server 128.009.208.207 unix:1230:rpc_cots_connections:snaptime 8464513.0480668 unix:1230:rpc_cots_connections:status 402653184 unix:1230:rpc_cots_connections:time_stamp 846444377 unix:1230:rpc_cots_connections:write_queue 138710552 usba:0:ohci0,bulk:class usb_byte_count usba:0:ohci0,bulk:crtime 84.7438512 usba:0:ohci0,bulk:nread 0 usba:0:ohci0,bulk:nwritten 0 usba:0:ohci0,bulk:rcnt 0 usba:0:ohci0,bulk:reads 0 usba:0:ohci0,bulk:rlastupdate 0 usba:0:ohci0,bulk:rlentime 0 usba:0:ohci0,bulk:rtime 0 usba:0:ohci0,bulk:snaptime 8464513.0487762 usba:0:ohci0,bulk:wcnt 0 usba:0:ohci0,bulk:wlastupdate 0 usba:0:ohci0,bulk:wlentime 0 usba:0:ohci0,bulk:writes 0 usba:0:ohci0,bulk:wtime 0 usba:0:ohci0,ctrl:class usb_byte_count usba:0:ohci0,ctrl:crtime 84.743834 usba:0:ohci0,ctrl:nread 376 usba:0:ohci0,ctrl:nwritten 18446744073709514827 usba:0:ohci0,ctrl:rcnt 0 usba:0:ohci0,ctrl:reads 12 usba:0:ohci0,ctrl:rlastupdate 0 usba:0:ohci0,ctrl:rlentime 0 usba:0:ohci0,ctrl:rtime 0 usba:0:ohci0,ctrl:snaptime 8464513.0495434 usba:0:ohci0,ctrl:wcnt 0 usba:0:ohci0,ctrl:wlastupdate 0 usba:0:ohci0,ctrl:wlentime 0 usba:0:ohci0,ctrl:writes 12 usba:0:ohci0,ctrl:wtime 0 usba:0:ohci0,error: usba:0:ohci0,error:Bit Stuffing Violations 0 usba:0:ohci0,error:Buffer Overruns 0 usba:0:ohci0,error:Buffer Underruns 0 usba:0:ohci0,error:CRC Errors 0 usba:0:ohci0,error:Command Timed Out 0 usba:0:ohci0,error:Data Overruns 0 usba:0:ohci0,error:Data Toggle PID Errors 0 usba:0:ohci0,error:Data Underruns 0 usba:0:ohci0,error:Device Not Responding 0 usba:0:ohci0,error:Endpoint Stalls 1 usba:0:ohci0,error:Invalid PID Errors 0 usba:0:ohci0,error:Not Accessed By Hardware 0 usba:0:ohci0,error:PID Check Bit Errors 0 usba:0:ohci0,error:Unspecified Error 0 usba:0:ohci0,error:class usb_errors usba:0:ohci0,error:crtime 84.7591056 usba:0:ohci0,error:snaptime 8464513.0503984 usba:0:ohci0,hotplug:Device Count 2 usba:0:ohci0,hotplug:Hotplug Failures 0 usba:0:ohci0,hotplug:Hotplug Successes 2 usba:0:ohci0,hotplug:Hotplug Total Failures 0 usba:0:ohci0,hotplug:Total Hotplug Successes 2 usba:0:ohci0,hotplug:class usb_hotplug usba:0:ohci0,hotplug:crtime 84.7590898 usba:0:ohci0,hotplug:snaptime 8464513.0514174 usba:0:ohci0,intr:class usb_byte_count usba:0:ohci0,intr:crtime 84.7438598 usba:0:ohci0,intr:nread 12348 usba:0:ohci0,intr:nwritten 0 usba:0:ohci0,intr:rcnt 0 usba:0:ohci0,intr:reads 2962 usba:0:ohci0,intr:rlastupdate 0 usba:0:ohci0,intr:rlentime 0 usba:0:ohci0,intr:rtime 0 usba:0:ohci0,intr:snaptime 8464513.0518826 usba:0:ohci0,intr:wcnt 0 usba:0:ohci0,intr:wlastupdate 0 usba:0:ohci0,intr:wlentime 0 usba:0:ohci0,intr:writes 0 usba:0:ohci0,intr:wtime 0 usba:0:ohci0,intrs:Change In Ownership 0 usba:0:ohci0,intrs:Frame No. Overflow 258475 usba:0:ohci0,intrs:Interrupts Total 261469 usba:0:ohci0,intrs:Not Claimed 0 usba:0:ohci0,intrs:Resume Detected 0 usba:0:ohci0,intrs:Root Hub Status Change 0 usba:0:ohci0,intrs:Schedule Overruns 0 usba:0:ohci0,intrs:Start Of Frame 7 usba:0:ohci0,intrs:Unrecoverable Error 0 usba:0:ohci0,intrs:Writeback Done Head 2987 usba:0:ohci0,intrs:class usb_interrupts usba:0:ohci0,intrs:crtime 84.7438088 usba:0:ohci0,intrs:snaptime 8464513.0526498 usba:0:ohci0,isoch:class usb_byte_count usba:0:ohci0,isoch:crtime 84.7438428 usba:0:ohci0,isoch:nread 0 usba:0:ohci0,isoch:nwritten 0 usba:0:ohci0,isoch:rcnt 0 usba:0:ohci0,isoch:reads 0 usba:0:ohci0,isoch:rlastupdate 0 usba:0:ohci0,isoch:rlentime 0 usba:0:ohci0,isoch:rtime 0 usba:0:ohci0,isoch:snaptime 8464513.053332 usba:0:ohci0,isoch:wcnt 0 usba:0:ohci0,isoch:wlastupdate 0 usba:0:ohci0,isoch:wlentime 0 usba:0:ohci0,isoch:writes 0 usba:0:ohci0,isoch:wtime 0 usba:0:ohci0,total:class usb_byte_count usba:0:ohci0,total:crtime 84.743825 usba:0:ohci0,total:nread 12724 usba:0:ohci0,total:nwritten 18446744073709514827 usba:0:ohci0,total:rcnt 0 usba:0:ohci0,total:reads 2974 usba:0:ohci0,total:rlastupdate 0 usba:0:ohci0,total:rlentime 0 usba:0:ohci0,total:rtime 0 usba:0:ohci0,total:snaptime 8464513.054088 usba:0:ohci0,total:wcnt 0 usba:0:ohci0,total:wlastupdate 0 usba:0:ohci0,total:wlentime 0 usba:0:ohci0,total:writes 12 usba:0:ohci0,total:wtime 0 vmem:1:heap:alloc 7923 vmem:1:heap:class vmem vmem:1:heap:contains 0 vmem:1:heap:contains_search 0 vmem:1:heap:crtime 65.192841 vmem:1:heap:fail 0 vmem:1:heap:free 4008 vmem:1:heap:lookup 222 vmem:1:heap:mem_import 0 vmem:1:heap:mem_inuse 168116224 vmem:1:heap:mem_total 4398046511104 vmem:1:heap:populate_fail 0 vmem:1:heap:populate_wait 0 vmem:1:heap:search 674 vmem:1:heap:snaptime 8464513.0554486 vmem:1:heap:vmem_source 0 vmem:1:heap:wait 0 vmem:2:vmem_metadata:alloc 271 vmem:2:vmem_metadata:class vmem vmem:2:vmem_metadata:contains 0 vmem:2:vmem_metadata:contains_search 0 vmem:2:vmem_metadata:crtime 65.1928898 vmem:2:vmem_metadata:fail 0 vmem:2:vmem_metadata:free 4 vmem:2:vmem_metadata:lookup 0 vmem:2:vmem_metadata:mem_import 2359296 vmem:2:vmem_metadata:mem_inuse 2244608 vmem:2:vmem_metadata:mem_total 2359296 vmem:2:vmem_metadata:populate_fail 0 vmem:2:vmem_metadata:populate_wait 0 vmem:2:vmem_metadata:search 0 vmem:2:vmem_metadata:snaptime 8464513.056352 vmem:2:vmem_metadata:vmem_source 1 vmem:2:vmem_metadata:wait 0 vmem:3:vmem_seg:alloc 228 vmem:3:vmem_seg:class vmem vmem:3:vmem_seg:contains 0 vmem:3:vmem_seg:contains_search 0 vmem:3:vmem_seg:crtime 65.1929278 vmem:3:vmem_seg:fail 0 vmem:3:vmem_seg:free 0 vmem:3:vmem_seg:lookup 0 vmem:3:vmem_seg:mem_import 1867776 vmem:3:vmem_seg:mem_inuse 1867776 vmem:3:vmem_seg:mem_total 1867776 vmem:3:vmem_seg:populate_fail 0 vmem:3:vmem_seg:populate_wait 0 vmem:3:vmem_seg:search 0 vmem:3:vmem_seg:snaptime 8464513.0572844 vmem:3:vmem_seg:vmem_source 2 vmem:3:vmem_seg:wait 0 vmem:4:vmem_hash:alloc 42 vmem:4:vmem_hash:class vmem vmem:4:vmem_hash:contains 0 vmem:4:vmem_hash:contains_search 0 vmem:4:vmem_hash:crtime 65.1929512 vmem:4:vmem_hash:fail 0 vmem:4:vmem_hash:free 15 vmem:4:vmem_hash:lookup 12 vmem:4:vmem_hash:mem_import 122880 vmem:4:vmem_hash:mem_inuse 116224 vmem:4:vmem_hash:mem_total 122880 vmem:4:vmem_hash:populate_fail 0 vmem:4:vmem_hash:populate_wait 0 vmem:4:vmem_hash:search 0 vmem:4:vmem_hash:snaptime 8464513.0581666 vmem:4:vmem_hash:vmem_source 2 vmem:4:vmem_hash:wait 0 vmem:5:vmem_vmem:alloc 77 vmem:5:vmem_vmem:class vmem vmem:5:vmem_vmem:contains 0 vmem:5:vmem_vmem:contains_search 0 vmem:5:vmem_vmem:crtime 65.192977 vmem:5:vmem_vmem:fail 0 vmem:5:vmem_vmem:free 5 vmem:5:vmem_vmem:lookup 0 vmem:5:vmem_vmem:mem_import 253952 vmem:5:vmem_vmem:mem_inuse 250560 vmem:5:vmem_vmem:mem_total 288752 vmem:5:vmem_vmem:populate_fail 0 vmem:5:vmem_vmem:populate_wait 0 vmem:5:vmem_vmem:search 12 vmem:5:vmem_vmem:snaptime 8464513.0590304 vmem:5:vmem_vmem:vmem_source 2 vmem:5:vmem_vmem:wait 0 vmem:6:heap32:alloc 153 vmem:6:heap32:class vmem vmem:6:heap32:contains 0 vmem:6:heap32:contains_search 0 vmem:6:heap32:crtime 65.1930318 vmem:6:heap32:fail 0 vmem:6:heap32:free 110 vmem:6:heap32:lookup 0 vmem:6:heap32:mem_import 0 vmem:6:heap32:mem_inuse 5089344 vmem:6:heap32:mem_total 134217728 vmem:6:heap32:populate_fail 0 vmem:6:heap32:populate_wait 0 vmem:6:heap32:search 4 vmem:6:heap32:snaptime 8464513.0598942 vmem:6:heap32:vmem_source 0 vmem:6:heap32:wait 0 vmem:7:heaptext:alloc 111 vmem:7:heaptext:class vmem vmem:7:heaptext:contains 216 vmem:7:heaptext:contains_search 216 vmem:7:heaptext:crtime 65.1930642 vmem:7:heaptext:fail 0 vmem:7:heaptext:free 22 vmem:7:heaptext:lookup 10 vmem:7:heaptext:mem_import 0 vmem:7:heaptext:mem_inuse 39976960 vmem:7:heaptext:mem_total 134217728 vmem:7:heaptext:populate_fail 0 vmem:7:heaptext:populate_wait 0 vmem:7:heaptext:search 169 vmem:7:heaptext:snaptime 8464513.0608556 vmem:7:heaptext:vmem_source 0 vmem:7:heaptext:wait 0 vmem:8:static:alloc 59 vmem:8:static:class vmem vmem:8:static:contains 0 vmem:8:static:contains_search 0 vmem:8:static:crtime 65.1930888 vmem:8:static:fail 0 vmem:8:static:free 0 vmem:8:static:lookup 0 vmem:8:static:mem_import 548864 vmem:8:static:mem_inuse 548864 vmem:8:static:mem_total 548864 vmem:8:static:populate_fail 0 vmem:8:static:populate_wait 0 vmem:8:static:search 0 vmem:8:static:snaptime 8464513.0617988 vmem:8:static:vmem_source 1 vmem:8:static:wait 0 vmem:9:static_alloc:alloc 3 vmem:9:static_alloc:class vmem vmem:9:static_alloc:contains 0 vmem:9:static_alloc:contains_search 0 vmem:9:static_alloc:crtime 65.193111 vmem:9:static_alloc:fail 0 vmem:9:static_alloc:free 0 vmem:9:static_alloc:lookup 0 vmem:9:static_alloc:mem_import 16384 vmem:9:static_alloc:mem_inuse 8320 vmem:9:static_alloc:mem_total 16384 vmem:9:static_alloc:populate_fail 0 vmem:9:static_alloc:populate_wait 0 vmem:9:static_alloc:search 1 vmem:9:static_alloc:snaptime 8464513.062669 vmem:9:static_alloc:vmem_source 8 vmem:9:static_alloc:wait 0 vmem:10:hat_memload:alloc 3959 vmem:10:hat_memload:class vmem vmem:10:hat_memload:contains 0 vmem:10:hat_memload:contains_search 0 vmem:10:hat_memload:crtime 65.193139 vmem:10:hat_memload:fail 0 vmem:10:hat_memload:free 782 vmem:10:hat_memload:lookup 519 vmem:10:hat_memload:mem_import 26025984 vmem:10:hat_memload:mem_inuse 26025984 vmem:10:hat_memload:mem_total 26025984 vmem:10:hat_memload:populate_fail 0 vmem:10:hat_memload:populate_wait 0 vmem:10:hat_memload:search 0 vmem:10:hat_memload:snaptime 8464513.0635348 vmem:10:hat_memload:vmem_source 1 vmem:10:hat_memload:wait 0 vmem:11:kstat:alloc 2394 vmem:11:kstat:class vmem vmem:11:kstat:contains 0 vmem:11:kstat:contains_search 0 vmem:11:kstat:crtime 65.9025478 vmem:11:kstat:fail 0 vmem:11:kstat:free 1767 vmem:11:kstat:lookup 5 vmem:11:kstat:mem_import 155648 vmem:11:kstat:mem_inuse 215408 vmem:11:kstat:mem_total 221184 vmem:11:kstat:populate_fail 0 vmem:11:kstat:populate_wait 0 vmem:11:kstat:search 108 vmem:11:kstat:snaptime 8464513.0644986 vmem:11:kstat:vmem_source 1 vmem:11:kstat:wait 0 vmem:12:kmem_metadata:alloc 371 vmem:12:kmem_metadata:class vmem vmem:12:kmem_metadata:contains 0 vmem:12:kmem_metadata:contains_search 0 vmem:12:kmem_metadata:crtime 65.9027322 vmem:12:kmem_metadata:fail 0 vmem:12:kmem_metadata:free 1 vmem:12:kmem_metadata:lookup 0 vmem:12:kmem_metadata:mem_import 3145728 vmem:12:kmem_metadata:mem_inuse 3055616 vmem:12:kmem_metadata:mem_total 3145728 vmem:12:kmem_metadata:populate_fail 0 vmem:12:kmem_metadata:populate_wait 0 vmem:12:kmem_metadata:search 0 vmem:12:kmem_metadata:snaptime 8464513.0653696 vmem:12:kmem_metadata:vmem_source 1 vmem:12:kmem_metadata:wait 0 vmem:13:kmem_msb:alloc 332 vmem:13:kmem_msb:class vmem vmem:13:kmem_msb:contains 0 vmem:13:kmem_msb:contains_search 0 vmem:13:kmem_msb:crtime 65.9029698 vmem:13:kmem_msb:fail 0 vmem:13:kmem_msb:free 5 vmem:13:kmem_msb:lookup 0 vmem:13:kmem_msb:mem_import 2678784 vmem:13:kmem_msb:mem_inuse 2678784 vmem:13:kmem_msb:mem_total 2678784 vmem:13:kmem_msb:populate_fail 0 vmem:13:kmem_msb:populate_wait 0 vmem:13:kmem_msb:search 0 vmem:13:kmem_msb:snaptime 8464513.066249 vmem:13:kmem_msb:vmem_source 12 vmem:13:kmem_msb:wait 0 vmem:14:kmem_cache:alloc 376 vmem:14:kmem_cache:class vmem vmem:14:kmem_cache:contains 0 vmem:14:kmem_cache:contains_search 0 vmem:14:kmem_cache:crtime 65.902987 vmem:14:kmem_cache:fail 0 vmem:14:kmem_cache:free 52 vmem:14:kmem_cache:lookup 1 vmem:14:kmem_cache:mem_import 229376 vmem:14:kmem_cache:mem_inuse 204768 vmem:14:kmem_cache:mem_total 229376 vmem:14:kmem_cache:populate_fail 0 vmem:14:kmem_cache:populate_wait 0 vmem:14:kmem_cache:search 758 vmem:14:kmem_cache:snaptime 8464513.067115 vmem:14:kmem_cache:vmem_source 12 vmem:14:kmem_cache:wait 0 vmem:15:kmem_hash:alloc 161 vmem:15:kmem_hash:class vmem vmem:15:kmem_hash:contains 0 vmem:15:kmem_hash:contains_search 0 vmem:15:kmem_hash:crtime 65.9032246 vmem:15:kmem_hash:fail 0 vmem:15:kmem_hash:free 27 vmem:15:kmem_hash:lookup 14 vmem:15:kmem_hash:mem_import 147456 vmem:15:kmem_hash:mem_inuse 139776 vmem:15:kmem_hash:mem_total 147456 vmem:15:kmem_hash:populate_fail 0 vmem:15:kmem_hash:populate_wait 0 vmem:15:kmem_hash:search 0 vmem:15:kmem_hash:snaptime 8464513.067979 vmem:15:kmem_hash:vmem_source 12 vmem:15:kmem_hash:wait 0 vmem:16:kmem_log:alloc 6 vmem:16:kmem_log:class vmem vmem:16:kmem_log:contains 0 vmem:16:kmem_log:contains_search 0 vmem:16:kmem_log:crtime 65.9032414 vmem:16:kmem_log:fail 0 vmem:16:kmem_log:free 0 vmem:16:kmem_log:lookup 0 vmem:16:kmem_log:mem_import 139264 vmem:16:kmem_log:mem_inuse 131488 vmem:16:kmem_log:mem_total 139264 vmem:16:kmem_log:populate_fail 0 vmem:16:kmem_log:populate_wait 0 vmem:16:kmem_log:search 1 vmem:16:kmem_log:snaptime 8464513.068843 vmem:16:kmem_log:vmem_source 1 vmem:16:kmem_log:wait 0 vmem:17:kmem_firewall_va:alloc 3170 vmem:17:kmem_firewall_va:class vmem vmem:17:kmem_firewall_va:contains 0 vmem:17:kmem_firewall_va:contains_search 0 vmem:17:kmem_firewall_va:crtime 65.9034564 vmem:17:kmem_firewall_va:fail 0 vmem:17:kmem_firewall_va:free 3099 vmem:17:kmem_firewall_va:lookup 12 vmem:17:kmem_firewall_va:mem_import 49528832 vmem:17:kmem_firewall_va:mem_inuse 49528832 vmem:17:kmem_firewall_va:mem_total 49528832 vmem:17:kmem_firewall_va:populate_fail 0 vmem:17:kmem_firewall_va:populate_wait 0 vmem:17:kmem_firewall_va:search 0 vmem:17:kmem_firewall_va:snaptime 8464513.0697114 vmem:17:kmem_firewall_va:vmem_source 1 vmem:17:kmem_firewall_va:wait 0 vmem:18:kmem_firewall:alloc 0 vmem:18:kmem_firewall:class vmem vmem:18:kmem_firewall:contains 0 vmem:18:kmem_firewall:contains_search 0 vmem:18:kmem_firewall:crtime 65.903473 vmem:18:kmem_firewall:fail 0 vmem:18:kmem_firewall:free 0 vmem:18:kmem_firewall:lookup 0 vmem:18:kmem_firewall:mem_import 0 vmem:18:kmem_firewall:mem_inuse 0 vmem:18:kmem_firewall:mem_total 0 vmem:18:kmem_firewall:populate_fail 0 vmem:18:kmem_firewall:populate_wait 0 vmem:18:kmem_firewall:search 0 vmem:18:kmem_firewall:snaptime 8464513.0707388 vmem:18:kmem_firewall:vmem_source 17 vmem:18:kmem_firewall:wait 0 vmem:20:mod_sysfile:alloc 0 vmem:20:mod_sysfile:class vmem vmem:20:mod_sysfile:contains 0 vmem:20:mod_sysfile:contains_search 0 vmem:20:mod_sysfile:crtime 65.9065598 vmem:20:mod_sysfile:fail 0 vmem:20:mod_sysfile:free 0 vmem:20:mod_sysfile:lookup 0 vmem:20:mod_sysfile:mem_import 0 vmem:20:mod_sysfile:mem_inuse 0 vmem:20:mod_sysfile:mem_total 0 vmem:20:mod_sysfile:populate_fail 0 vmem:20:mod_sysfile:populate_wait 0 vmem:20:mod_sysfile:search 0 vmem:20:mod_sysfile:snaptime 8464513.0716204 vmem:20:mod_sysfile:vmem_source 1 vmem:20:mod_sysfile:wait 0 vmem:21:kmem_oversize:alloc 3177 vmem:21:kmem_oversize:class vmem vmem:21:kmem_oversize:contains 0 vmem:21:kmem_oversize:contains_search 0 vmem:21:kmem_oversize:crtime 65.949124 vmem:21:kmem_oversize:fail 0 vmem:21:kmem_oversize:free 3106 vmem:21:kmem_oversize:lookup 3 vmem:21:kmem_oversize:mem_import 49528832 vmem:21:kmem_oversize:mem_inuse 49451563 vmem:21:kmem_oversize:mem_total 49528832 vmem:21:kmem_oversize:populate_fail 0 vmem:21:kmem_oversize:populate_wait 0 vmem:21:kmem_oversize:search 0 vmem:21:kmem_oversize:snaptime 8464513.0724916 vmem:21:kmem_oversize:vmem_source 17 vmem:21:kmem_oversize:wait 0 vmem:22:kmem_va:alloc 241 vmem:22:kmem_va:class vmem vmem:22:kmem_va:contains 0 vmem:22:kmem_va:contains_search 0 vmem:22:kmem_va:crtime 65.9498826 vmem:22:kmem_va:fail 0 vmem:22:kmem_va:free 0 vmem:22:kmem_va:lookup 0 vmem:22:kmem_va:mem_import 62832640 vmem:22:kmem_va:mem_inuse 62832640 vmem:22:kmem_va:mem_total 62832640 vmem:22:kmem_va:populate_fail 0 vmem:22:kmem_va:populate_wait 0 vmem:22:kmem_va:search 0 vmem:22:kmem_va:snaptime 8464513.0733676 vmem:22:kmem_va:vmem_source 1 vmem:22:kmem_va:wait 0 vmem:23:kmem_default:alloc 7203 vmem:23:kmem_default:class vmem vmem:23:kmem_default:contains 0 vmem:23:kmem_default:contains_search 0 vmem:23:kmem_default:crtime 65.9499026 vmem:23:kmem_default:fail 0 vmem:23:kmem_default:free 13 vmem:23:kmem_default:lookup 0 vmem:23:kmem_default:mem_import 61849600 vmem:23:kmem_default:mem_inuse 61849600 vmem:23:kmem_default:mem_total 61849600 vmem:23:kmem_default:populate_fail 0 vmem:23:kmem_default:populate_wait 0 vmem:23:kmem_default:search 0 vmem:23:kmem_default:snaptime 8464513.0742412 vmem:23:kmem_default:vmem_source 22 vmem:23:kmem_default:wait 0 vmem:24:logminor_space:alloc 125 vmem:24:logminor_space:class vmem vmem:24:logminor_space:contains 0 vmem:24:logminor_space:contains_search 0 vmem:24:logminor_space:crtime 65.9569554 vmem:24:logminor_space:fail 0 vmem:24:logminor_space:free 100 vmem:24:logminor_space:lookup 0 vmem:24:logminor_space:mem_import 0 vmem:24:logminor_space:mem_inuse 25 vmem:24:logminor_space:mem_total 262137 vmem:24:logminor_space:populate_fail 0 vmem:24:logminor_space:populate_wait 0 vmem:24:logminor_space:search 2 vmem:24:logminor_space:snaptime 8464513.0751804 vmem:24:logminor_space:vmem_source 0 vmem:24:logminor_space:wait 0 vmem:25:taskq_id_arena:alloc 34 vmem:25:taskq_id_arena:class vmem vmem:25:taskq_id_arena:contains 0 vmem:25:taskq_id_arena:contains_search 0 vmem:25:taskq_id_arena:crtime 66.0094018 vmem:25:taskq_id_arena:fail 0 vmem:25:taskq_id_arena:free 6 vmem:25:taskq_id_arena:lookup 0 vmem:25:taskq_id_arena:mem_import 0 vmem:25:taskq_id_arena:mem_inuse 28 vmem:25:taskq_id_arena:mem_total 2147483647 vmem:25:taskq_id_arena:populate_fail 0 vmem:25:taskq_id_arena:populate_wait 0 vmem:25:taskq_id_arena:search 0 vmem:25:taskq_id_arena:snaptime 8464513.0761342 vmem:25:taskq_id_arena:vmem_source 0 vmem:25:taskq_id_arena:wait 0 vmem:26:little_endian:alloc 58 vmem:26:little_endian:class vmem vmem:26:little_endian:contains 0 vmem:26:little_endian:contains_search 0 vmem:26:little_endian:crtime 66.009731 vmem:26:little_endian:fail 0 vmem:26:little_endian:free 34 vmem:26:little_endian:lookup 1 vmem:26:little_endian:mem_import 147456 vmem:26:little_endian:mem_inuse 138496 vmem:26:little_endian:mem_total 147456 vmem:26:little_endian:populate_fail 0 vmem:26:little_endian:populate_wait 0 vmem:26:little_endian:search 8 vmem:26:little_endian:snaptime 8464513.0770124 vmem:26:little_endian:vmem_source 1 vmem:26:little_endian:wait 0 vmem:27:big_endian:alloc 55 vmem:27:big_endian:class vmem vmem:27:big_endian:contains 0 vmem:27:big_endian:contains_search 0 vmem:27:big_endian:crtime 66.0097568 vmem:27:big_endian:fail 0 vmem:27:big_endian:free 33 vmem:27:big_endian:lookup 2 vmem:27:big_endian:mem_import 204800 vmem:27:big_endian:mem_inuse 139972 vmem:27:big_endian:mem_total 204800 vmem:27:big_endian:populate_fail 0 vmem:27:big_endian:populate_wait 0 vmem:27:big_endian:search 1 vmem:27:big_endian:snaptime 8464513.0778764 vmem:27:big_endian:vmem_source 1 vmem:27:big_endian:wait 0 vmem:28:id32:alloc 1 vmem:28:id32:class vmem vmem:28:id32:contains 0 vmem:28:id32:contains_search 0 vmem:28:id32:crtime 66.0100102 vmem:28:id32:fail 0 vmem:28:id32:free 0 vmem:28:id32:lookup 0 vmem:28:id32:mem_import 8192 vmem:28:id32:mem_inuse 8192 vmem:28:id32:mem_total 8192 vmem:28:id32:populate_fail 0 vmem:28:id32:populate_wait 0 vmem:28:id32:search 0 vmem:28:id32:snaptime 8464513.078739 vmem:28:id32:vmem_source 6 vmem:28:id32:wait 0 vmem:29:bp_map:alloc 2 vmem:29:bp_map:class vmem vmem:29:bp_map:contains 0 vmem:29:bp_map:contains_search 0 vmem:29:bp_map:crtime 66.0107118 vmem:29:bp_map:fail 0 vmem:29:bp_map:free 0 vmem:29:bp_map:lookup 0 vmem:29:bp_map:mem_import 1048576 vmem:29:bp_map:mem_inuse 1048576 vmem:29:bp_map:mem_total 1048576 vmem:29:bp_map:populate_fail 0 vmem:29:bp_map:populate_wait 0 vmem:29:bp_map:search 0 vmem:29:bp_map:snaptime 8464513.0795964 vmem:29:bp_map:vmem_source 1 vmem:29:bp_map:wait 0 vmem:30:ksyms:alloc 244 vmem:30:ksyms:class vmem vmem:30:ksyms:contains 40 vmem:30:ksyms:contains_search 1547 vmem:30:ksyms:crtime 66.0296784 vmem:30:ksyms:fail 0 vmem:30:ksyms:free 40 vmem:30:ksyms:lookup 12 vmem:30:ksyms:mem_import 1843200 vmem:30:ksyms:mem_inuse 1821152 vmem:30:ksyms:mem_total 1843200 vmem:30:ksyms:populate_fail 0 vmem:30:ksyms:populate_wait 0 vmem:30:ksyms:search 948 vmem:30:ksyms:snaptime 8464513.0805278 vmem:30:ksyms:vmem_source 1 vmem:30:ksyms:wait 0 vmem:31:ctf:alloc 240 vmem:31:ctf:class vmem vmem:31:ctf:contains 39 vmem:31:ctf:contains_search 2795 vmem:31:ctf:crtime 66.0296958 vmem:31:ctf:fail 0 vmem:31:ctf:free 39 vmem:31:ctf:lookup 11 vmem:31:ctf:mem_import 4792320 vmem:31:ctf:mem_inuse 4238519 vmem:31:ctf:mem_total 4792320 vmem:31:ctf:populate_fail 0 vmem:31:ctf:populate_wait 0 vmem:31:ctf:search 852 vmem:31:ctf:snaptime 8464513.081467 vmem:31:ctf:vmem_source 1 vmem:31:ctf:wait 0 vmem:32:kmem_tsb:alloc 1 vmem:32:kmem_tsb:class vmem vmem:32:kmem_tsb:contains 0 vmem:32:kmem_tsb:contains_search 0 vmem:32:kmem_tsb:crtime 66.238611 vmem:32:kmem_tsb:fail 0 vmem:32:kmem_tsb:free 0 vmem:32:kmem_tsb:lookup 0 vmem:32:kmem_tsb:mem_import 4194304 vmem:32:kmem_tsb:mem_inuse 4194304 vmem:32:kmem_tsb:mem_total 4194304 vmem:32:kmem_tsb:populate_fail 0 vmem:32:kmem_tsb:populate_wait 0 vmem:32:kmem_tsb:search 0 vmem:32:kmem_tsb:snaptime 8464513.08235 vmem:32:kmem_tsb:vmem_source 1 vmem:32:kmem_tsb:wait 0 vmem:33:kmem_tsb_default:alloc 55611 vmem:33:kmem_tsb_default:class vmem vmem:33:kmem_tsb_default:contains 0 vmem:33:kmem_tsb_default:contains_search 0 vmem:33:kmem_tsb_default:crtime 66.2386746 vmem:33:kmem_tsb_default:fail 0 vmem:33:kmem_tsb_default:free 55451 vmem:33:kmem_tsb_default:lookup 3 vmem:33:kmem_tsb_default:mem_import 4194304 vmem:33:kmem_tsb_default:mem_inuse 1507328 vmem:33:kmem_tsb_default:mem_total 4194304 vmem:33:kmem_tsb_default:populate_fail 0 vmem:33:kmem_tsb_default:populate_wait 0 vmem:33:kmem_tsb_default:search 55463 vmem:33:kmem_tsb_default:snaptime 8464513.0832194 vmem:33:kmem_tsb_default:vmem_source 32 vmem:33:kmem_tsb_default:wait 0 vmem:34:hat_memload1:alloc 85 vmem:34:hat_memload1:class vmem vmem:34:hat_memload1:contains 0 vmem:34:hat_memload1:contains_search 0 vmem:34:hat_memload1:crtime 66.2390136 vmem:34:hat_memload1:fail 0 vmem:34:hat_memload1:free 0 vmem:34:hat_memload1:lookup 0 vmem:34:hat_memload1:mem_import 696320 vmem:34:hat_memload1:mem_inuse 696320 vmem:34:hat_memload1:mem_total 696320 vmem:34:hat_memload1:populate_fail 0 vmem:34:hat_memload1:populate_wait 0 vmem:34:hat_memload1:search 0 vmem:34:hat_memload1:snaptime 8464513.0840932 vmem:34:hat_memload1:vmem_source 1 vmem:34:hat_memload1:wait 0 vmem:35:module_text:alloc 240 vmem:35:module_text:class vmem vmem:35:module_text:contains 0 vmem:35:module_text:contains_search 0 vmem:35:module_text:crtime 66.4499828 vmem:35:module_text:fail 0 vmem:35:module_text:free 40 vmem:35:module_text:lookup 9 vmem:35:module_text:mem_import 6414336 vmem:35:module_text:mem_inuse 8262492 vmem:35:module_text:mem_total 8413184 vmem:35:module_text:populate_fail 0 vmem:35:module_text:populate_wait 0 vmem:35:module_text:search 1043 vmem:35:module_text:snaptime 8464513.0849594 vmem:35:module_text:vmem_source 7 vmem:35:module_text:wait 0 vmem:36:module_data:alloc 332 vmem:36:module_data:class vmem vmem:36:module_data:contains 0 vmem:36:module_data:contains_search 0 vmem:36:module_data:crtime 66.4503488 vmem:36:module_data:fail 0 vmem:36:module_data:free 54 vmem:36:module_data:lookup 34 vmem:36:module_data:mem_import 4923392 vmem:36:module_data:mem_inuse 2151184 vmem:36:module_data:mem_total 5185536 vmem:36:module_data:populate_fail 0 vmem:36:module_data:populate_wait 0 vmem:36:module_data:search 943 vmem:36:module_data:snaptime 8464513.085836 vmem:36:module_data:vmem_source 6 vmem:36:module_data:wait 0 vmem:37:promplat:alloc 109 vmem:37:promplat:class vmem vmem:37:promplat:contains 0 vmem:37:promplat:contains_search 0 vmem:37:promplat:crtime 66.7337196 vmem:37:promplat:fail 0 vmem:37:promplat:free 109 vmem:37:promplat:lookup 0 vmem:37:promplat:mem_import 0 vmem:37:promplat:mem_inuse 0 vmem:37:promplat:mem_total 0 vmem:37:promplat:populate_fail 0 vmem:37:promplat:populate_wait 0 vmem:37:promplat:search 0 vmem:37:promplat:snaptime 8464513.0867004 vmem:37:promplat:vmem_source 6 vmem:37:promplat:wait 0 vmem:38:segkp:alloc 2075 vmem:38:segkp:class vmem vmem:38:segkp:contains 0 vmem:38:segkp:contains_search 0 vmem:38:segkp:crtime 69.2011778 vmem:38:segkp:fail 0 vmem:38:segkp:free 1953 vmem:38:segkp:lookup 280 vmem:38:segkp:mem_import 0 vmem:38:segkp:mem_inuse 20054016 vmem:38:segkp:mem_total 2147483648 vmem:38:segkp:populate_fail 0 vmem:38:segkp:populate_wait 0 vmem:38:segkp:search 6923 vmem:38:segkp:snaptime 8464513.0875614 vmem:38:segkp:vmem_source 0 vmem:38:segkp:wait 0 vmem:39:zio:alloc 0 vmem:39:zio:class vmem vmem:39:zio:contains 0 vmem:39:zio:contains_search 0 vmem:39:zio:crtime 69.2039308 vmem:39:zio:fail 0 vmem:39:zio:free 0 vmem:39:zio:lookup 0 vmem:39:zio:mem_import 0 vmem:39:zio:mem_inuse 0 vmem:39:zio:mem_total 8520343552 vmem:39:zio:populate_fail 0 vmem:39:zio:populate_wait 0 vmem:39:zio:search 0 vmem:39:zio:snaptime 8464513.0885698 vmem:39:zio:vmem_source 0 vmem:39:zio:wait 0 vmem:40:zio_buf:alloc 0 vmem:40:zio_buf:class vmem vmem:40:zio_buf:contains 0 vmem:40:zio_buf:contains_search 0 vmem:40:zio_buf:crtime 69.203975 vmem:40:zio_buf:fail 0 vmem:40:zio_buf:free 0 vmem:40:zio_buf:lookup 0 vmem:40:zio_buf:mem_import 0 vmem:40:zio_buf:mem_inuse 0 vmem:40:zio_buf:mem_total 0 vmem:40:zio_buf:populate_fail 0 vmem:40:zio_buf:populate_wait 0 vmem:40:zio_buf:search 0 vmem:40:zio_buf:snaptime 8464513.0894362 vmem:40:zio_buf:vmem_source 39 vmem:40:zio_buf:wait 0 vmem:41:umem_np:alloc 6 vmem:41:umem_np:class vmem vmem:41:umem_np:contains 0 vmem:41:umem_np:contains_search 0 vmem:41:umem_np:crtime 69.2618856 vmem:41:umem_np:fail 0 vmem:41:umem_np:free 0 vmem:41:umem_np:lookup 0 vmem:41:umem_np:mem_import 1441792 vmem:41:umem_np:mem_inuse 1441792 vmem:41:umem_np:mem_total 1441792 vmem:41:umem_np:populate_fail 0 vmem:41:umem_np:populate_wait 0 vmem:41:umem_np:search 0 vmem:41:umem_np:snaptime 8464513.0903638 vmem:41:umem_np:vmem_source 1 vmem:41:umem_np:wait 0 vmem:42:rctl_ids:alloc 34 vmem:42:rctl_ids:class vmem vmem:42:rctl_ids:contains 0 vmem:42:rctl_ids:contains_search 0 vmem:42:rctl_ids:crtime 69.2625038 vmem:42:rctl_ids:fail 0 vmem:42:rctl_ids:free 0 vmem:42:rctl_ids:lookup 0 vmem:42:rctl_ids:mem_import 0 vmem:42:rctl_ids:mem_inuse 34 vmem:42:rctl_ids:mem_total 32767 vmem:42:rctl_ids:populate_fail 0 vmem:42:rctl_ids:populate_wait 0 vmem:42:rctl_ids:search 2 vmem:42:rctl_ids:snaptime 8464513.0912436 vmem:42:rctl_ids:vmem_source 0 vmem:42:rctl_ids:wait 0 vmem:43:zoneid_space:alloc 0 vmem:43:zoneid_space:class vmem vmem:43:zoneid_space:contains 0 vmem:43:zoneid_space:contains_search 0 vmem:43:zoneid_space:crtime 69.2629606 vmem:43:zoneid_space:fail 0 vmem:43:zoneid_space:free 0 vmem:43:zoneid_space:lookup 0 vmem:43:zoneid_space:mem_import 0 vmem:43:zoneid_space:mem_inuse 0 vmem:43:zoneid_space:mem_total 9998 vmem:43:zoneid_space:populate_fail 0 vmem:43:zoneid_space:populate_wait 0 vmem:43:zoneid_space:search 0 vmem:43:zoneid_space:snaptime 8464513.0921096 vmem:43:zoneid_space:vmem_source 0 vmem:43:zoneid_space:wait 0 vmem:44:taskid_space:alloc 429 vmem:44:taskid_space:class vmem vmem:44:taskid_space:contains 0 vmem:44:taskid_space:contains_search 0 vmem:44:taskid_space:crtime 69.2631548 vmem:44:taskid_space:fail 0 vmem:44:taskid_space:free 398 vmem:44:taskid_space:lookup 4 vmem:44:taskid_space:mem_import 0 vmem:44:taskid_space:mem_inuse 31 vmem:44:taskid_space:mem_total 999999 vmem:44:taskid_space:populate_fail 0 vmem:44:taskid_space:populate_wait 0 vmem:44:taskid_space:search 2 vmem:44:taskid_space:snaptime 8464513.092972 vmem:44:taskid_space:vmem_source 0 vmem:44:taskid_space:wait 0 vmem:45:pool_ids:alloc 0 vmem:45:pool_ids:class vmem vmem:45:pool_ids:contains 0 vmem:45:pool_ids:contains_search 0 vmem:45:pool_ids:crtime 69.263411 vmem:45:pool_ids:fail 0 vmem:45:pool_ids:free 0 vmem:45:pool_ids:lookup 0 vmem:45:pool_ids:mem_import 0 vmem:45:pool_ids:mem_inuse 0 vmem:45:pool_ids:mem_total 999998 vmem:45:pool_ids:populate_fail 0 vmem:45:pool_ids:populate_wait 0 vmem:45:pool_ids:search 0 vmem:45:pool_ids:snaptime 8464513.0938998 vmem:45:pool_ids:vmem_source 0 vmem:45:pool_ids:wait 0 vmem:46:contracts:alloc 459 vmem:46:contracts:class vmem vmem:46:contracts:contains 0 vmem:46:contracts:contains_search 0 vmem:46:contracts:crtime 69.2656296 vmem:46:contracts:fail 0 vmem:46:contracts:free 421 vmem:46:contracts:lookup 5 vmem:46:contracts:mem_import 0 vmem:46:contracts:mem_inuse 38 vmem:46:contracts:mem_total 2147483646 vmem:46:contracts:populate_fail 0 vmem:46:contracts:populate_wait 0 vmem:46:contracts:search 2 vmem:46:contracts:snaptime 8464513.094777 vmem:46:contracts:vmem_source 0 vmem:46:contracts:wait 0 vmem:47:regspec:alloc 79 vmem:47:regspec:class vmem vmem:47:regspec:contains 0 vmem:47:regspec:contains_search 0 vmem:47:regspec:crtime 69.2664946 vmem:47:regspec:fail 0 vmem:47:regspec:free 31 vmem:47:regspec:lookup 0 vmem:47:regspec:mem_import 0 vmem:47:regspec:mem_inuse 598016 vmem:47:regspec:mem_total 5368709120 vmem:47:regspec:populate_fail 0 vmem:47:regspec:populate_wait 0 vmem:47:regspec:search 0 vmem:47:regspec:snaptime 8464513.095654 vmem:47:regspec:vmem_source 0 vmem:47:regspec:wait 0 vmem:48:pcisch0_dvma:alloc 393 vmem:48:pcisch0_dvma:class vmem vmem:48:pcisch0_dvma:contains 0 vmem:48:pcisch0_dvma:contains_search 0 vmem:48:pcisch0_dvma:crtime 69.2933372 vmem:48:pcisch0_dvma:fail 0 vmem:48:pcisch0_dvma:free 0 vmem:48:pcisch0_dvma:lookup 0 vmem:48:pcisch0_dvma:mem_import 0 vmem:48:pcisch0_dvma:mem_inuse 44662784 vmem:48:pcisch0_dvma:mem_total 1040187392 vmem:48:pcisch0_dvma:populate_fail 0 vmem:48:pcisch0_dvma:populate_wait 0 vmem:48:pcisch0_dvma:search 258 vmem:48:pcisch0_dvma:snaptime 8464513.0965264 vmem:48:pcisch0_dvma:vmem_source 0 vmem:48:pcisch0_dvma:wait 0 vmem:49:pcisch1_dvma:alloc 531 vmem:49:pcisch1_dvma:class vmem vmem:49:pcisch1_dvma:contains 0 vmem:49:pcisch1_dvma:contains_search 0 vmem:49:pcisch1_dvma:crtime 69.298216 vmem:49:pcisch1_dvma:fail 0 vmem:49:pcisch1_dvma:free 524 vmem:49:pcisch1_dvma:lookup 7 vmem:49:pcisch1_dvma:mem_import 0 vmem:49:pcisch1_dvma:mem_inuse 229376 vmem:49:pcisch1_dvma:mem_total 1040187392 vmem:49:pcisch1_dvma:populate_fail 0 vmem:49:pcisch1_dvma:populate_wait 0 vmem:49:pcisch1_dvma:search 524 vmem:49:pcisch1_dvma:snaptime 8464513.0973966 vmem:49:pcisch1_dvma:vmem_source 0 vmem:49:pcisch1_dvma:wait 0 vmem:50:ip_minor_arena:alloc 3 vmem:50:ip_minor_arena:class vmem vmem:50:ip_minor_arena:contains 0 vmem:50:ip_minor_arena:contains_search 0 vmem:50:ip_minor_arena:crtime 83.9662352 vmem:50:ip_minor_arena:fail 0 vmem:50:ip_minor_arena:free 0 vmem:50:ip_minor_arena:lookup 0 vmem:50:ip_minor_arena:mem_import 0 vmem:50:ip_minor_arena:mem_inuse 192 vmem:50:ip_minor_arena:mem_total 262140 vmem:50:ip_minor_arena:populate_fail 0 vmem:50:ip_minor_arena:populate_wait 0 vmem:50:ip_minor_arena:search 0 vmem:50:ip_minor_arena:snaptime 8464513.0982622 vmem:50:ip_minor_arena:vmem_source 0 vmem:50:ip_minor_arena:wait 0 vmem:51:dls_minor_arena:alloc 1 vmem:51:dls_minor_arena:class vmem vmem:51:dls_minor_arena:contains 0 vmem:51:dls_minor_arena:contains_search 0 vmem:51:dls_minor_arena:crtime 84.0830648 vmem:51:dls_minor_arena:fail 0 vmem:51:dls_minor_arena:free 0 vmem:51:dls_minor_arena:lookup 0 vmem:51:dls_minor_arena:mem_import 0 vmem:51:dls_minor_arena:mem_inuse 1 vmem:51:dls_minor_arena:mem_total 4294967295 vmem:51:dls_minor_arena:populate_fail 0 vmem:51:dls_minor_arena:populate_wait 0 vmem:51:dls_minor_arena:search 0 vmem:51:dls_minor_arena:snaptime 8464513.0991268 vmem:51:dls_minor_arena:vmem_source 0 vmem:51:dls_minor_arena:wait 0 vmem:52:dld_ctl:alloc 0 vmem:52:dld_ctl:class vmem vmem:52:dld_ctl:contains 0 vmem:52:dld_ctl:contains_search 0 vmem:52:dld_ctl:crtime 84.08316 vmem:52:dld_ctl:fail 0 vmem:52:dld_ctl:free 0 vmem:52:dld_ctl:lookup 0 vmem:52:dld_ctl:mem_import 0 vmem:52:dld_ctl:mem_inuse 0 vmem:52:dld_ctl:mem_total 4294967295 vmem:52:dld_ctl:populate_fail 0 vmem:52:dld_ctl:populate_wait 0 vmem:52:dld_ctl:search 0 vmem:52:dld_ctl:snaptime 8464513.1001408 vmem:52:dld_ctl:vmem_source 0 vmem:52:dld_ctl:wait 0 vmem:53:tl_minor_space:alloc 914 vmem:53:tl_minor_space:class vmem vmem:53:tl_minor_space:contains 0 vmem:53:tl_minor_space:contains_search 0 vmem:53:tl_minor_space:crtime 87.651236 vmem:53:tl_minor_space:fail 0 vmem:53:tl_minor_space:free 875 vmem:53:tl_minor_space:lookup 19 vmem:53:tl_minor_space:mem_import 0 vmem:53:tl_minor_space:mem_inuse 39 vmem:53:tl_minor_space:mem_total 262138 vmem:53:tl_minor_space:populate_fail 0 vmem:53:tl_minor_space:populate_wait 0 vmem:53:tl_minor_space:search 2 vmem:53:tl_minor_space:snaptime 8464513.101026 vmem:53:tl_minor_space:vmem_source 0 vmem:53:tl_minor_space:wait 0 vmem:54:keysock:alloc 0 vmem:54:keysock:class vmem vmem:54:keysock:contains 0 vmem:54:keysock:contains_search 0 vmem:54:keysock:crtime 87.719229 vmem:54:keysock:fail 0 vmem:54:keysock:free 0 vmem:54:keysock:lookup 0 vmem:54:keysock:mem_import 0 vmem:54:keysock:mem_inuse 0 vmem:54:keysock:mem_total 4294967295 vmem:54:keysock:populate_fail 0 vmem:54:keysock:populate_wait 0 vmem:54:keysock:search 0 vmem:54:keysock:snaptime 8464513.1020122 vmem:54:keysock:vmem_source 0 vmem:54:keysock:wait 0 vmem:55:spdsock:alloc 1 vmem:55:spdsock:class vmem vmem:55:spdsock:contains 0 vmem:55:spdsock:contains_search 0 vmem:55:spdsock:crtime 87.7639926 vmem:55:spdsock:fail 0 vmem:55:spdsock:free 0 vmem:55:spdsock:lookup 0 vmem:55:spdsock:mem_import 0 vmem:55:spdsock:mem_inuse 64 vmem:55:spdsock:mem_total 4294967295 vmem:55:spdsock:populate_fail 0 vmem:55:spdsock:populate_wait 0 vmem:55:spdsock:search 0 vmem:55:spdsock:snaptime 8464513.1028806 vmem:55:spdsock:vmem_source 0 vmem:55:spdsock:wait 0 vmem:56:namefs_inodes:alloc 1 vmem:56:namefs_inodes:class vmem vmem:56:namefs_inodes:contains 0 vmem:56:namefs_inodes:contains_search 0 vmem:56:namefs_inodes:crtime 89.6918066 vmem:56:namefs_inodes:fail 0 vmem:56:namefs_inodes:free 0 vmem:56:namefs_inodes:lookup 0 vmem:56:namefs_inodes:mem_import 0 vmem:56:namefs_inodes:mem_inuse 64 vmem:56:namefs_inodes:mem_total 65536 vmem:56:namefs_inodes:populate_fail 0 vmem:56:namefs_inodes:populate_wait 0 vmem:56:namefs_inodes:search 0 vmem:56:namefs_inodes:snaptime 8464513.103751 vmem:56:namefs_inodes:vmem_source 0 vmem:56:namefs_inodes:wait 0 vmem:57:Hex0x300022cd648_minor:alloc 0 vmem:57:Hex0x300022cd648_minor:class vmem vmem:57:Hex0x300022cd648_minor:contains 0 vmem:57:Hex0x300022cd648_minor:contains_search 0 vmem:57:Hex0x300022cd648_minor:crtime 93.5591084 vmem:57:Hex0x300022cd648_minor:fail 0 vmem:57:Hex0x300022cd648_minor:free 0 vmem:57:Hex0x300022cd648_minor:lookup 0 vmem:57:Hex0x300022cd648_minor:mem_import 0 vmem:57:Hex0x300022cd648_minor:mem_inuse 0 vmem:57:Hex0x300022cd648_minor:mem_total 262142 vmem:57:Hex0x300022cd648_minor:populate_fail 0 vmem:57:Hex0x300022cd648_minor:populate_wait 0 vmem:57:Hex0x300022cd648_minor:search 0 vmem:57:Hex0x300022cd648_minor:snaptime 8464513.1046168 vmem:57:Hex0x300022cd648_minor:vmem_source 0 vmem:57:Hex0x300022cd648_minor:wait 0 vmem:58:Hex0x30002167588_minor:alloc 0 vmem:58:Hex0x30002167588_minor:class vmem vmem:58:Hex0x30002167588_minor:contains 0 vmem:58:Hex0x30002167588_minor:contains_search 0 vmem:58:Hex0x30002167588_minor:crtime 93.5592918 vmem:58:Hex0x30002167588_minor:fail 0 vmem:58:Hex0x30002167588_minor:free 0 vmem:58:Hex0x30002167588_minor:lookup 0 vmem:58:Hex0x30002167588_minor:mem_import 0 vmem:58:Hex0x30002167588_minor:mem_inuse 0 vmem:58:Hex0x30002167588_minor:mem_total 262142 vmem:58:Hex0x30002167588_minor:populate_fail 0 vmem:58:Hex0x30002167588_minor:populate_wait 0 vmem:58:Hex0x30002167588_minor:search 0 vmem:58:Hex0x30002167588_minor:snaptime 8464513.1055784 vmem:58:Hex0x30002167588_minor:vmem_source 0 vmem:58:Hex0x30002167588_minor:wait 0 vmem:59:ptms_minor:alloc 81 vmem:59:ptms_minor:class vmem vmem:59:ptms_minor:contains 0 vmem:59:ptms_minor:contains_search 0 vmem:59:ptms_minor:crtime 97.588379 vmem:59:ptms_minor:fail 0 vmem:59:ptms_minor:free 79 vmem:59:ptms_minor:lookup 0 vmem:59:ptms_minor:mem_import 0 vmem:59:ptms_minor:mem_inuse 2 vmem:59:ptms_minor:mem_total 16 vmem:59:ptms_minor:populate_fail 0 vmem:59:ptms_minor:populate_wait 0 vmem:59:ptms_minor:search 0 vmem:59:ptms_minor:snaptime 8464513.1064486 vmem:59:ptms_minor:vmem_source 0 vmem:59:ptms_minor:wait 0 vmem:60:devfsadm_event_channel:alloc 0 vmem:60:devfsadm_event_channel:class vmem vmem:60:devfsadm_event_channel:contains 0 vmem:60:devfsadm_event_channel:contains_search 0 vmem:60:devfsadm_event_channel:crtime 99.5602538 vmem:60:devfsadm_event_channel:fail 0 vmem:60:devfsadm_event_channel:free 0 vmem:60:devfsadm_event_channel:lookup 0 vmem:60:devfsadm_event_channel:mem_import 0 vmem:60:devfsadm_event_channel:mem_inuse 0 vmem:60:devfsadm_event_channel:mem_total 101 vmem:60:devfsadm_event_channel:populate_fail 0 vmem:60:devfsadm_event_channel:populate_wait 0 vmem:60:devfsadm_event_channel:search 0 vmem:60:devfsadm_event_channel:snaptime 8464513.107311 vmem:60:devfsadm_event_channel:vmem_source 0 vmem:60:devfsadm_event_channel:wait 0 vmem:61:devfsadm_event_channel:alloc 1 vmem:61:devfsadm_event_channel:class vmem vmem:61:devfsadm_event_channel:contains 0 vmem:61:devfsadm_event_channel:contains_search 0 vmem:61:devfsadm_event_channel:crtime 99.5603088 vmem:61:devfsadm_event_channel:fail 0 vmem:61:devfsadm_event_channel:free 0 vmem:61:devfsadm_event_channel:lookup 0 vmem:61:devfsadm_event_channel:mem_import 0 vmem:61:devfsadm_event_channel:mem_inuse 1 vmem:61:devfsadm_event_channel:mem_total 2 vmem:61:devfsadm_event_channel:populate_fail 0 vmem:61:devfsadm_event_channel:populate_wait 0 vmem:61:devfsadm_event_channel:search 2 vmem:61:devfsadm_event_channel:snaptime 8464513.1081776 vmem:61:devfsadm_event_channel:vmem_source 0 vmem:61:devfsadm_event_channel:wait 0 vmem:62:syseventconfd_event_channel:alloc 0 vmem:62:syseventconfd_event_channel:class vmem vmem:62:syseventconfd_event_channel:contains 0 vmem:62:syseventconfd_event_channel:contains_search 0 vmem:62:syseventconfd_event_channel:crtime 99.699005 vmem:62:syseventconfd_event_channel:fail 0 vmem:62:syseventconfd_event_channel:free 0 vmem:62:syseventconfd_event_channel:lookup 0 vmem:62:syseventconfd_event_channel:mem_import 0 vmem:62:syseventconfd_event_channel:mem_inuse 0 vmem:62:syseventconfd_event_channel:mem_total 101 vmem:62:syseventconfd_event_channel:populate_fail 0 vmem:62:syseventconfd_event_channel:populate_wait 0 vmem:62:syseventconfd_event_channel:search 0 vmem:62:syseventconfd_event_channel:snaptime 8464513.1090408 vmem:62:syseventconfd_event_channel:vmem_source 0 vmem:62:syseventconfd_event_channel:wait 0 vmem:63:syseventconfd_event_channel:alloc 1 vmem:63:syseventconfd_event_channel:class vmem vmem:63:syseventconfd_event_channel:contains 0 vmem:63:syseventconfd_event_channel:contains_search 0 vmem:63:syseventconfd_event_channel:crtime 99.6990576 vmem:63:syseventconfd_event_channel:fail 0 vmem:63:syseventconfd_event_channel:free 0 vmem:63:syseventconfd_event_channel:lookup 0 vmem:63:syseventconfd_event_channel:mem_import 0 vmem:63:syseventconfd_event_channel:mem_inuse 1 vmem:63:syseventconfd_event_channel:mem_total 2 vmem:63:syseventconfd_event_channel:populate_fail 0 vmem:63:syseventconfd_event_channel:populate_wait 0 vmem:63:syseventconfd_event_channel:search 2 vmem:63:syseventconfd_event_channel:snaptime 8464513.1099164 vmem:63:syseventconfd_event_channel:vmem_source 0 vmem:63:syseventconfd_event_channel:wait 0 vmem:64:syseventd_channel:alloc 2 vmem:64:syseventd_channel:class vmem vmem:64:syseventd_channel:contains 0 vmem:64:syseventd_channel:contains_search 0 vmem:64:syseventd_channel:crtime 99.8627902 vmem:64:syseventd_channel:fail 0 vmem:64:syseventd_channel:free 0 vmem:64:syseventd_channel:lookup 0 vmem:64:syseventd_channel:mem_import 0 vmem:64:syseventd_channel:mem_inuse 2 vmem:64:syseventd_channel:mem_total 101 vmem:64:syseventd_channel:populate_fail 0 vmem:64:syseventd_channel:populate_wait 0 vmem:64:syseventd_channel:search 2 vmem:64:syseventd_channel:snaptime 8464513.1109954 vmem:64:syseventd_channel:vmem_source 0 vmem:64:syseventd_channel:wait 0 vmem:65:syseventd_channel:alloc 1 vmem:65:syseventd_channel:class vmem vmem:65:syseventd_channel:contains 0 vmem:65:syseventd_channel:contains_search 0 vmem:65:syseventd_channel:crtime 99.8628486 vmem:65:syseventd_channel:fail 0 vmem:65:syseventd_channel:free 0 vmem:65:syseventd_channel:lookup 0 vmem:65:syseventd_channel:mem_import 0 vmem:65:syseventd_channel:mem_inuse 1 vmem:65:syseventd_channel:mem_total 2 vmem:65:syseventd_channel:populate_fail 0 vmem:65:syseventd_channel:populate_wait 0 vmem:65:syseventd_channel:search 2 vmem:65:syseventd_channel:snaptime 8464513.1119024 vmem:65:syseventd_channel:vmem_source 0 vmem:65:syseventd_channel:wait 0 vmem:67:dtrace:alloc 3 vmem:67:dtrace:class vmem vmem:67:dtrace:contains 0 vmem:67:dtrace:contains_search 0 vmem:67:dtrace:crtime 113.9730624 vmem:67:dtrace:fail 0 vmem:67:dtrace:free 0 vmem:67:dtrace:lookup 0 vmem:67:dtrace:mem_import 0 vmem:67:dtrace:mem_inuse 3 vmem:67:dtrace:mem_total 4294967295 vmem:67:dtrace:populate_fail 0 vmem:67:dtrace:populate_wait 0 vmem:67:dtrace:search 6 vmem:67:dtrace:snaptime 8464513.1128688 vmem:67:dtrace:vmem_source 0 vmem:67:dtrace:wait 0 vmem:68:dtrace_minor:alloc 0 vmem:68:dtrace_minor:class vmem vmem:68:dtrace_minor:contains 0 vmem:68:dtrace_minor:contains_search 0 vmem:68:dtrace_minor:crtime 113.9731176 vmem:68:dtrace_minor:fail 0 vmem:68:dtrace_minor:free 0 vmem:68:dtrace_minor:lookup 0 vmem:68:dtrace_minor:mem_import 0 vmem:68:dtrace_minor:mem_inuse 0 vmem:68:dtrace_minor:mem_total 4294967293 vmem:68:dtrace_minor:populate_fail 0 vmem:68:dtrace_minor:populate_wait 0 vmem:68:dtrace_minor:search 0 vmem:68:dtrace_minor:snaptime 8464513.1137696 vmem:68:dtrace_minor:vmem_source 0 vmem:68:dtrace_minor:wait 0 vmem:69:lmsysid_space:alloc 2 vmem:69:lmsysid_space:class vmem vmem:69:lmsysid_space:contains 0 vmem:69:lmsysid_space:contains_search 0 vmem:69:lmsysid_space:crtime 114.5367116 vmem:69:lmsysid_space:fail 0 vmem:69:lmsysid_space:free 0 vmem:69:lmsysid_space:lookup 0 vmem:69:lmsysid_space:mem_import 0 vmem:69:lmsysid_space:mem_inuse 2 vmem:69:lmsysid_space:mem_total 16383 vmem:69:lmsysid_space:populate_fail 0 vmem:69:lmsysid_space:populate_wait 0 vmem:69:lmsysid_space:search 2 vmem:69:lmsysid_space:snaptime 8464513.1146686 vmem:69:lmsysid_space:vmem_source 0 vmem:69:lmsysid_space:wait 0 vmem:72:crypto:alloc 54 vmem:72:crypto:class vmem vmem:72:crypto:contains 0 vmem:72:crypto:contains_search 0 vmem:72:crypto:crtime 161.755376 vmem:72:crypto:fail 0 vmem:72:crypto:free 54 vmem:72:crypto:lookup 0 vmem:72:crypto:mem_import 0 vmem:72:crypto:mem_inuse 0 vmem:72:crypto:mem_total 16 vmem:72:crypto:populate_fail 0 vmem:72:crypto:populate_wait 0 vmem:72:crypto:search 0 vmem:72:crypto:snaptime 8464513.11562 vmem:72:crypto:vmem_source 0 vmem:72:crypto:wait 0 vmem:73:logdmux_minor:alloc 86 vmem:73:logdmux_minor:class vmem vmem:73:logdmux_minor:contains 0 vmem:73:logdmux_minor:contains_search 0 vmem:73:logdmux_minor:crtime 861.3195852 vmem:73:logdmux_minor:fail 0 vmem:73:logdmux_minor:free 86 vmem:73:logdmux_minor:lookup 0 vmem:73:logdmux_minor:mem_import 0 vmem:73:logdmux_minor:mem_inuse 0 vmem:73:logdmux_minor:mem_total 256 vmem:73:logdmux_minor:populate_fail 0 vmem:73:logdmux_minor:populate_wait 0 vmem:73:logdmux_minor:search 0 vmem:73:logdmux_minor:snaptime 8464513.116531 vmem:73:logdmux_minor:vmem_source 0 vmem:73:logdmux_minor:wait 0 vmem:75:ibcm_local_sid:alloc 0 vmem:75:ibcm_local_sid:class vmem vmem:75:ibcm_local_sid:contains 0 vmem:75:ibcm_local_sid:contains_search 0 vmem:75:ibcm_local_sid:crtime 7784068.4241152 vmem:75:ibcm_local_sid:fail 0 vmem:75:ibcm_local_sid:free 0 vmem:75:ibcm_local_sid:lookup 0 vmem:75:ibcm_local_sid:mem_import 0 vmem:75:ibcm_local_sid:mem_inuse 0 vmem:75:ibcm_local_sid:mem_total 4294967295 vmem:75:ibcm_local_sid:populate_fail 0 vmem:75:ibcm_local_sid:populate_wait 0 vmem:75:ibcm_local_sid:search 0 vmem:75:ibcm_local_sid:snaptime 8464513.1174264 vmem:75:ibcm_local_sid:vmem_source 0 vmem:75:ibcm_local_sid:wait 0 vmem:76:trapstat:alloc 0 vmem:76:trapstat:class vmem vmem:76:trapstat:contains 0 vmem:76:trapstat:contains_search 0 vmem:76:trapstat:crtime 7784069.0469844 vmem:76:trapstat:fail 0 vmem:76:trapstat:free 0 vmem:76:trapstat:lookup 0 vmem:76:trapstat:mem_import 0 vmem:76:trapstat:mem_inuse 0 vmem:76:trapstat:mem_total 0 vmem:76:trapstat:populate_fail 0 vmem:76:trapstat:populate_wait 0 vmem:76:trapstat:search 0 vmem:76:trapstat:snaptime 8464513.1183218 vmem:76:trapstat:vmem_source 6 vmem:76:trapstat:wait 0 vmem:77:sppptun_minor:alloc 0 vmem:77:sppptun_minor:class vmem vmem:77:sppptun_minor:contains 0 vmem:77:sppptun_minor:contains_search 0 vmem:77:sppptun_minor:crtime 7784070.0013418 vmem:77:sppptun_minor:fail 0 vmem:77:sppptun_minor:free 0 vmem:77:sppptun_minor:lookup 0 vmem:77:sppptun_minor:mem_import 0 vmem:77:sppptun_minor:mem_inuse 0 vmem:77:sppptun_minor:mem_total 16 vmem:77:sppptun_minor:populate_fail 0 vmem:77:sppptun_minor:populate_wait 0 vmem:77:sppptun_minor:search 0 vmem:77:sppptun_minor:snaptime 8464513.1192142 vmem:77:sppptun_minor:vmem_source 0 vmem:77:sppptun_minor:wait 0 zfs:0:arcstats:c 7446601728 zfs:0:arcstats:c_max 7446601728 zfs:0:arcstats:c_min 266260736 zfs:0:arcstats:class misc zfs:0:arcstats:crtime 97.1395116 zfs:0:arcstats:deleted 0 zfs:0:arcstats:demand_data_hits 0 zfs:0:arcstats:demand_data_misses 0 zfs:0:arcstats:demand_metadata_hits 0 zfs:0:arcstats:demand_metadata_misses 0 zfs:0:arcstats:evict_skip 0 zfs:0:arcstats:hash_chain_max 0 zfs:0:arcstats:hash_chains 0 zfs:0:arcstats:hash_collisions 0 zfs:0:arcstats:hash_elements 0 zfs:0:arcstats:hash_elements_max 0 zfs:0:arcstats:hits 0 zfs:0:arcstats:mfu_ghost_hits 0 zfs:0:arcstats:mfu_hits 0 zfs:0:arcstats:misses 0 zfs:0:arcstats:mru_ghost_hits 0 zfs:0:arcstats:mru_hits 0 zfs:0:arcstats:mutex_miss 0 zfs:0:arcstats:p 3723300864 zfs:0:arcstats:prefetch_data_hits 0 zfs:0:arcstats:prefetch_data_misses 0 zfs:0:arcstats:prefetch_metadata_hits 0 zfs:0:arcstats:prefetch_metadata_misses 0 zfs:0:arcstats:recycle_miss 0 zfs:0:arcstats:size 0 zfs:0:arcstats:snaptime 8464513.1202336 ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/darwin-show.c����������������������������0000644�0001750�0001750�00000000770�11757531137�025445� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/sysctl.h> #include <sys/vmmeter.h> #include <errno.h> #include <stdio.h> #include <string.h> #include "../debug.h" int main( int argc, char* argv[] ) { int i; char s[4096]; size_t len = sizeof(s); for ( i=1; i<argc; ++i ) { if ( sysctlbyname( argv[i], &s, &len, NULL, 0 ) == -1 ) { fprintf( stderr, "sysctl %s: %d: %s\n", argv[i], errno, strerror(errno) ); } else { debugmsg( "len=%d\n", len ); hexdump( (char*) &s, len ); } } return 0; } ��������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/darwin-kvm�������������������������������0000755�0001750�0001750�00000041240�11757531137�025041� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������� �����������8���__PAGEZERO����������������������������������������H��__TEXT�������������� ������� ���������������__text����������__TEXT����������"�� �������������������������__const���������__TEXT����������,�� ����������������������������__cstring�������__TEXT����������,��M��������������������������__literal8������__TEXT����������/�����������������������������������__DATA�����������0������ ������������������__data����������__DATA�����������0������ �������������������������__dyld����������__DATA����������0����� �������������������������������__IMPORT���������@������0������������������__pointers������__IMPORT���������@������0������������������������__jump_table����__IMPORT��������@@��F���@0������������������������8���__LINKEDIT�������P������@�������������������������@�����A�� �� ���P������������� ��� ������������������������������DA��������������������������� ���/usr/lib/dyld���������0Ky",)���P�������������������������������������������������"���������������������� ���4���������������/usr/lib/libgcc_s.1.dylib��� ���4���������o����/usr/lib/libSystem.B.dylib��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������j�]\$�ML$ˉ\$u\$ )��D$�*������X �� ������X ��UWVS�������[E����EEE������E¸R���D$D$����$��p�����@����@����@ ����f@��EԉD$Q ��D$E$��E�������EEE�EߋU؉ЍHEЍ1 ��D U؉ЍHE߉ƒ1 ��D U؉ЃX}u ƅ_-ƅ_ _XD}؉`}v}~w E߈gƅg.g`=pE�}w^EE;E MU؉ЍpU؉ЍHEƄp U؉ЃD DD D D5E��}vpD$R���D$E$T��UET����THf� ����@XD$E$��EEԃ�E;E Eļ���[^_UWVS|����[EEE���EE@D$��$��E�=�G~/E�=5w#EPE�T$D$ ��$b��!EPE�T$D$��$?��EP E@T$D$5��$��EP EHEpE@E@T$L$t$ |$D$T��$��EH0Ep,EP(E@$L$t$ T$D$��$��EH@E@<-���fn��f)Ef(EX�f)EMH ���f(^f(‹EP8E@4L$D$ T$D$��$*��E@Dt E@DE ��EȋUȉT$��$��EPLE@HT$D$��$��E@X*ЋE@\*ȍP ���f(^f(f(X؋E@P*ЋE@T*ȍP ���f(^f(X\$ D$(��$[��E@`*ЋE@d*ȍP ���f(^f(XD$K��$��E���E���E̋Epxx|uЉ}ԋEppxtEEUBhRlEUL$ M̉L$EЋUԉD$T$t$ |$UMT$L$X��$��E���E���E���T$ L$D$��$e��E���E���E���T$ L$D$��$0��U£���E���ȋE���E���T$L$ t$D$��$��E���ȋE���E���E���L$t$ T$D$ ��$��EPE�T$D$L ��$w��EH`Ep\EPXE@TL$t$ T$D$p ��$A��E���D$(���$E䋐���E���ȋE䋰���E䋸���E䋀���T$L$t$ |$D$ ��$��U ��M���E䋀���T$ L$D$ ��$��E䋸��E��ЋE��ȋE���E䋀���|$T$L$ t$D$ ��$F��$ ���?��|[^_UWVSl����[}~aE ���EUE���u}M����t FO)ȅtE �$��E��EEE��EMM؋E �D$D$ ����D$������D$$����p��E܃}�ttE����EԉD$ E؉D$D$���E܉$:��E}�u��$2��E���3E����E$GE�E9E|E܉$��E����Eăl[^_���0123456789ABCDEF������A��������%04X: �PROCESS %d �starttime %10u.%06u�forw %010p back %010p� vmspace %010p sigacts %010p ��flag %08x stat %08x pid %d oppid %d dupfd %d ���user stack %010p exit thread %010p debugger %d sigwait %d ��estcpu %u cpticks %d pctcpu %.2f sleep address (wchan) %010p �(null)�wmesg "%s"� swtime %u slptime %u ��realtimer interval %.6f value %.6f� rtime %.6f �uticks %llu sticks %llu iticks %llu traceflags %x %010p ����siglist %x textvp %010p noswap %d ��sigmask %08x sigignore %08x sigcatch %08x ��priority %u usrpri %u nice %d command "%s" �progress group %010p u-area %010p x-stat %04x acflag %04x ��proc address %010p session %010p ���real uid %d svuid %d rgid %d svgid %d ��ppid %d pgid %d tpgid %d job cc %hd tty %8x ����tty session %010p wchan msg "%s" login "%s" ����size %d rss %hd refs %hd swrss %hd flag %08x �self�/dev/vm-main�kvm_getprocs���������@@����.A����������������������� 0��0��0���0�����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#�������0#��.�����,��:�����0��L����� 0��T�����0��\������0��h��������|�����0�������>#�������;+�������%�������"�������������������������������������������������������������������������������������������������� �������������� ������������������������������������������@��� �dyld_stub_binding_helper�__dyld_func_lookup�_digit.3975�dyld__mach_header�_NXArgc�_NXArgv�___progname�__mh_execute_header�_environ�_hexdump�_main�_printkproc�start�___sF�_atoi�_exit�_fputs�_getpid�_kvm_close�_kvm_getprocs�_kvm_open�_memset�_perror�_printf�_putchar�_sprintf�_strncat��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/sunos.h����������������������������������0000644�0001750�0001750�00000005224�11757531137�024356� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2008 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MACHINE_SUNOS_H #define _MACHINE_SUNOS_H #include <sys/types.h> #include <sys/time.h> #include <sys/utsname.h> #include <sys/sysinfo.h> #include "basic.h" typedef struct { /* common (shared) portion */ MachineBasicInfo* basic; /* * provider-specific portion */ /* from swapctl(2) system call */ uint64_t swap_total; uint64_t swap_free; /* from proc(4) access */ unsigned ps_total; unsigned ps_good; unsigned ps_thr_active; unsigned ps_thr_zombie; uint64_t ps_size; uint64_t ps_rss; /* from kstat API cpuinfo:<instance> */ unsigned short cpu_count; unsigned short cpu_online; unsigned long megahertz; char brand_id[20]; char cpu_type[20]; char model_name[80]; /* from kstat API cpu_stat:*:* */ unsigned long cpu_state[CPU_STATES]; /* from kstat API unix:system_pages */ uint64_t ram_avail; uint64_t ram_free; /* from kstat API unix:system_misc */ double load[3]; time_t boottime; unsigned pid_total; } MachineSunosInfo; extern void* initMachine( void ); /* purpose: initialize the data structure. * returns: initialized MachineSunosInfo structure. */ extern int printMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const void* data ); /* purpose: format the information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * data (IN): MachineSunosInfo info to print. * returns: number of characters put into buffer (buffer length) */ extern void deleteMachine( void* data ); /* purpose: destructor * paramtr: data (IO): valid MachineSunosInfo structure to destroy. */ #endif /* _MACHINE_SUNOS_H */ ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/sunos-swap.c�����������������������������0000644�0001750�0001750�00000007246�11757531137�025327� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "sunos-swap.h" #include <sys/types.h> #include <sys/stat.h> #include <sys/swap.h> #include <memory.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <unistd.h> #include <fcntl.h> #include <dirent.h> #include <procfs.h> static const char* RCS_ID = "$Id$"; int gather_sunos_swap( uint64_t* stotal, uint64_t* sfree ) /* purpose: collect swap information from solaris * warning: This compilation unit MUST be compiled WITHOUT LFS support! * paramtr: total (OUT): total size of all swapping * free (OUT): free size of all swapping * returns: number of swap partitions */ { uint64_t pagesize = getpagesize(); int num = swapctl( SC_GETNSWP, 0 ); *stotal = *sfree = 0; if ( num > 0 ) { size_t size = (num+1) * sizeof(swapent_t) + sizeof(swaptbl_t); swaptbl_t* t = malloc(size); char dummy[80]; int i; /* we don't care for the path, so init all to the same */ memset( t, 0, size ); for ( i=0; i<num+1; ++i ) t->swt_ent[i].ste_path = dummy; t->swt_n = num+1; if ( swapctl( SC_LIST, t ) > 0 ) { for ( i=0; i<num; ++i ) { /* only pages that are not int he process of being deleted */ if ( (t->swt_ent[i].ste_flags & ( ST_INDEL | ST_DOINGDEL) ) == 0 ) { *stotal += t->swt_ent[i].ste_pages * pagesize; *sfree += t->swt_ent[i].ste_free * pagesize; } } } free((void*) t); return num; } else { return 0; } } void gather_sunos_proc( unsigned* total, unsigned* good, unsigned* active, unsigned* zombie, uint64_t* size, uint64_t* rss ) /* purpose: collect proc information from solaris * warning: This compilation unit MUST be compiled WITHOUT LFS support! * paramtr: total (OUT): all eligible entries found in /proc * good (OUT): portion of total we were able to read from * active (OUT): number of active THREADS (LWP) * zombie (OUT): number of zombie THREADS (LWP) * size (OUT): sum of every process's SIZE * rss (OUT): sum of every process's RSS */ { struct dirent* dp; DIR* proc = opendir("/proc"); if ( proc ) { char psinfo[128]; struct dirent* dp; while ( (dp = readdir(proc)) != NULL ) { /* assume proc files start with digit */ if ( dp->d_name[0] >= '0' && dp->d_name[0] <= '9' ) { int fd; snprintf( psinfo, sizeof(psinfo), "/proc/%s/psinfo", dp->d_name ); (*total)++; if ( (fd = open( psinfo, O_RDONLY )) != -1 ) { psinfo_t ps; if ( read( fd, &ps, sizeof(ps) ) >= sizeof(ps) ) { (*good)++; *active += ps.pr_nlwp; #if OSMINOR > 9 /* 20100728 (jsv): bug fix: Only Solaris 10 offers pr_nzomb */ *zombie += ps.pr_nzomb; #else /* "man -s 4 proc": If the process is a zombie, pr_nlwp and * pr_lwp.pr_lwpid are zero and the other fields of pr_lwp * are undefined */ if ( ps.pr_nlwp ==0 && ps.pr_lwp.pr_lwpid == 0 ) *zombie++; #endif *size += ps.pr_size; *rss += ps.pr_rssize; } close(fd); } } } closedir(proc); /* turn kbyte to byte */ (*size) <<= 10; (*rss ) <<= 10; } } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/sol-some-kstat.c�������������������������0000644�0001750�0001750�00000031557�11757531137�026074� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <errno.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <kstat.h> #include <sys/sysinfo.h> char* sizer( char* buffer, size_t capacity, size_t vsize, const void* value ) /* purpose: format an unsigned integer of less-known size. Note that * 64bit ints on 32bit systems need %llu, but 64/64 uses %lu * paramtr: buffer (IO): area to output into * capacity (IN): extent of the buffer to store things into * vsize (IN): size of the value * value (IN): value to format * warning: only for 32bit and 64bit platforms * returns: buffer */ { switch ( vsize ) { case 2: snprintf( buffer, capacity, "%hu", *((const short unsigned*) value) ); break; case 4: if ( sizeof(long) == 4 ) snprintf( buffer, capacity, "%lu", *((const long unsigned*) value) ); else snprintf( buffer, capacity, "%u", *((const unsigned*) value) ); break; case 8: if ( sizeof(long) == 4 ) { snprintf( buffer, capacity, "%llu", *((const long long unsigned*) value) ); } else { snprintf( buffer, capacity, "%lu", *((const long unsigned*) value) ); } break; default: snprintf( buffer, capacity, "unknown" ); break; } return buffer; } char* kstat_type( char* buffer, size_t capacity, uchar_t type ) /* purpose: show kstat type * paramtr: buffer (IO): area to output into * capacity (IN): extent of the buffer to store things into * type (IN): kstat type * returns: buffer */ { static const char* c_type[] = { "RAW", "NAMED", "INTR", "I/O", "TIMER" }; if ( type >= KSTAT_TYPE_RAW && type <= KSTAT_TYPE_TIMER ) strncpy( buffer, c_type[type], capacity ); else snprintf( buffer, capacity, "TYPE:%X", type ); return buffer; } char* kstat_name_type( char* buffer, size_t capacity, uchar_t type ) /* purpose: show kstat type * paramtr: buffer (IO): area to output into * capacity (IN): extent of the buffer to store things into * type (IN): kstat type * returns: buffer */ { static const char* c_named[] = { "CHAR", "INT32", "UINT32", "INT64", "UINT64", "FLOAT", "DOUBLE", "KLONG", "KULONG", "STRING" }; if ( type >= KSTAT_DATA_CHAR && type <= KSTAT_DATA_STRING ) strncpy( buffer, c_named[type], capacity ); else snprintf( buffer, capacity, "TYPE:%X", type ); return buffer; } char* hrtime( char* buffer, size_t capacity, hrtime_t t ) /* purpose: 64-bit nano-second counter conversion */ { char b[32]; snprintf( buffer, capacity, "%s ns (%.1f s)", sizer( b, 32, sizeof(t), &t ), t / 1E9 ); return buffer; } size_t min( size_t a, size_t b ) { return a < b ? a : b; } int main( int argc, char *argv[] ) { kstat_ctl_t* kc; kstat_t* ksp; void* tmp; kstat_named_t* knd; kstat_timer_t ktd; kstat_intr_t kid; kstat_io_t kio; unsigned int count = 0; unsigned long sum = 0; unsigned int rec = 0; if ( (kc = kstat_open()) == NULL ) { perror( "kstat_open" ); return 1; } for ( ksp = kc->kc_chain; ksp != NULL; ksp = ksp->ks_next ) { char b[32], c[128]; char name[KSTAT_STRLEN+1]; char class[KSTAT_STRLEN+1]; char module[KSTAT_STRLEN+1]; char ident[KSTAT_STRLEN+1]; /* don't trust developers to terminate strings properly */ strncpy( name, ksp->ks_name, KSTAT_STRLEN ); strncpy( class, ksp->ks_class, KSTAT_STRLEN ); strncpy( module, ksp->ks_module, KSTAT_STRLEN ); class[KSTAT_STRLEN] = '\0'; name[KSTAT_STRLEN] = '\0'; module[KSTAT_STRLEN] = '\0'; snprintf( c, sizeof(c), "%s:%u:%s", module, ksp->ks_instance, name ); printf( "FOUND %-32s %-12s %-8s ", c, class, kstat_type( b, 32, ksp->ks_type) ); putchar( (ksp->ks_flags & KSTAT_FLAG_VIRTUAL) == 0 ? '-' : 'v' ); putchar( (ksp->ks_flags & KSTAT_FLAG_VAR_SIZE) == 0 ? '-' : 's' ); putchar( (ksp->ks_flags & KSTAT_FLAG_WRITABLE) == 0 ? '-' : 'w' ); putchar( (ksp->ks_flags & KSTAT_FLAG_PERSISTENT) == 0 ? '-' : 'p' ); putchar( (ksp->ks_flags & KSTAT_FLAG_DORMANT) == 0 ? '-' : 'd' ); putchar( (ksp->ks_flags & KSTAT_FLAG_INVALID) == 0 ? '-' : 'i' ); printf( " (records=%u size=%u)\n", ksp->ks_ndata, ksp->ks_data_size, ksp->ks_type); count++; sum += ksp->ks_data_size; rec += ksp->ks_ndata; switch ( ksp->ks_type ) { /* * -------------------------------------------------------- */ case KSTAT_TYPE_NAMED: tmp = malloc( ksp->ks_data_size + 16 ); if ( kstat_read( kc, ksp, tmp ) == -1 ) { fprintf( stderr, "error reading kstat: %d: %s, skipping\n", errno, strerror(errno) ); } else { int i; double dbl; float flt; kstat_named_t* knd = (kstat_named_t*) tmp; for ( i=0; i < ksp->ks_ndata; ++i ) { strncpy( ident, knd[i].name, KSTAT_STRLEN ); ident[KSTAT_STRLEN] = '\0'; printf( "\t%*s %-8s ", -KSTAT_STRLEN, ident, kstat_name_type( c, 80, knd[i].data_type ) ); switch ( knd[i].data_type ) { case KSTAT_DATA_CHAR: strncpy( c, knd[i].value.c, 16 ); c[16] = '\0'; puts(c); break; case KSTAT_DATA_UINT32: case KSTAT_DATA_INT32: puts( sizer( c, sizeof(c), 4, &knd[i].value ) ); break; case KSTAT_DATA_INT64: case KSTAT_DATA_UINT64: puts( sizer( c, sizeof(c), 8, &knd[i].value ) ); break; case KSTAT_DATA_FLOAT: memcpy( &flt, &knd[i].value, sizeof(flt) ); printf( "%f", flt ); break; case KSTAT_DATA_DOUBLE: memcpy( &dbl, &knd[i].value, sizeof(dbl) ); printf( "%f", dbl ); break; case KSTAT_DATA_STRING: if ( KSTAT_NAMED_STR_PTR(knd+i) == NULL ) { puts("(null)"); } else { puts( KSTAT_NAMED_STR_PTR(knd+i) ); } break; default: puts( "(unsup. data type)" ); break; } } } free((void*) tmp); break; /* * -------------------------------------------------------- */ case KSTAT_TYPE_TIMER: if ( kstat_read( kc, ksp, &ktd ) == -1 ) { fprintf( stderr, "error reading kstat: %d: %s, skipping\n", errno, strerror(errno) ); continue; } else { strncpy( ident, ktd.name, KSTAT_STRLEN ); ident[KSTAT_STRLEN] = '\0'; printf( "\tevent name : %s\n", ident ); printf( "\tnumber of events : %s\n", sizer( b, 32, sizeof(ktd.num_events), &ktd.num_events ) ); printf( "\tcumulative elapsed time : %s\n", hrtime( c, 80, ktd.elapsed_time ) ); printf( "\tshortest event duration : %s\n", hrtime( c, 80, ktd.min_time ) ); printf( "\tlongest event duration : %s\n", hrtime( c, 80, ktd.max_time ) ); printf( "\tprevious even start time: %s\n", hrtime( c, 80, ktd.start_time ) ); printf( "\tprevious event stop time: %s\n", hrtime( c, 80, ktd.stop_time ) ); } break; /* * -------------------------------------------------------- */ case KSTAT_TYPE_INTR: if ( kstat_read( kc, ksp, &kid ) == -1 ) { fprintf( stderr, "error reading kstat: %d: %s, skipping\n", errno, strerror(errno) ); continue; } else { int i; static char* c_short[KSTAT_NUM_INTRS] = { "HARD", "SOFT", "WATCHDOG", "SPURIOUS", "MULTSVC" }; for ( i=0; i < KSTAT_NUM_INTRS; ++i ) { printf( "\t%-8s %s\n", c_short[i], sizer( b, 32, sizeof(kid.intrs[i]), &kid.intrs[i] ) ); } } break; /* * -------------------------------------------------------- */ case KSTAT_TYPE_IO: if ( kstat_read( kc, ksp, &kio ) == -1 ) { fprintf( stderr, "error reading kstat: %d: %s, skipping\n", errno, strerror(errno) ); continue; } else { printf( "\tnumber of bytes read : %s\n", sizer( b, 32, sizeof(kio.nread), &kio.nread ) ); printf( "\tnumber of bytes written : %s\n", sizer( b, 32, sizeof(kio.nwritten), &kio.nwritten ) ); printf( "\tnumber of read operations : %s\n", sizer( b, 32, sizeof(kio.reads), &kio.reads ) ); printf( "\tnumber of write operations : %s\n", sizer( b, 32, sizeof(kio.writes), &kio.writes ) ); printf( "\tcumulative wait (pre-service) time : %s\n", hrtime( c, 80, kio.wtime ) ); printf( "\tcumulative wait length*time product : %s\n", hrtime( c, 80, kio.wlentime ) ); printf( "\tlast time wait queue changed : %s\n", hrtime( c, 80, kio.wlastupdate ) ); printf( "\tcumulative run (service) time : %s\n", hrtime( c, 80, kio.rtime ) ); printf( "\tcumulative run length*time product : %s\n", hrtime( c, 80, kio.rlentime ) ); printf( "\tlast time run queue changed : %s\n", hrtime( c, 80, kio.wtime ) ); printf( "\telements in wait state : %s\n", sizer( b, 32, sizeof(kio.wcnt), &kio.wcnt ) ); printf( "\telements in run state : %s\n", sizer( b, 32, sizeof(kio.rcnt), &kio.rcnt ) ); } break; /* * -------------------------------------------------------- */ case KSTAT_TYPE_RAW: /* I can only deal with very few RAW entries that * I think I know something about */ if ( strcmp( class, "vm" ) == 0 && strcmp( module, "unix" ) == 0 && strcmp( name, "vminfo" ) == 0 ) { vminfo_t vm; if ( kstat_read( kc, ksp, &vm ) == -1 ) { fprintf( stderr, "error reading kstat: %d: %s, skipping\n", errno, strerror(errno) ); } else { printf( "\t/* I think these values are scaled by 2**10 */\n" ); printf( "\tfree memory : %s (%.1f MB)\n", sizer( b, 32, sizeof(vm.freemem), &vm.freemem ), vm.freemem / 1073741824.0 ); printf( "\tswap reserved : %s (%.1f MB)\n", sizer( b, 32, sizeof(vm.swap_resv), &vm.swap_resv ), vm.swap_resv / 1073741824.0 ); printf( "\tswap allocated: %s (%.1f MB)\n", sizer( b, 32, sizeof(vm.swap_alloc), &vm.swap_alloc ), vm.swap_alloc / 1073741824.0 ); printf( "\tswap available: %s (%.1f MB)\n", sizer( b, 32, sizeof(vm.swap_avail), &vm.swap_avail ), vm.swap_avail / 1073741824.0 ); printf( "\tswap free : %s (%.1f MB)\n", sizer( b, 32, sizeof(vm.swap_free), &vm.swap_free ), vm.swap_free / 1073741824.0 ); } } else if ( strcmp( module, "cpu_stat" ) == 0 ) { cpu_stat_t cpu; if ( kstat_read( kc, ksp, &cpu ) == -1 ) { fprintf( stderr, "error reading kstat: %d: %s, skipping\n", errno, strerror(errno) ); } else { const char* c_state[] = { "IDLE", "USER", "SYS", "WAIT" }; cpu_sysinfo_t* si = &cpu.cpu_sysinfo; cpu_syswait_t* sw = &cpu.cpu_syswait; cpu_vminfo_t* vm = &cpu.cpu_vminfo; unsigned long i, total; for ( total=i=0; i < CPU_STATES; ++i ) total += si->cpu[i]; for ( i=0; i < CPU_STATES; ++i ) { printf( "\tcpu state %-5s: %16u %5.1f %%\n", ( i < 4 ? c_state[i] : "???") , si->cpu[i], (100.0 * si->cpu[i]) / ((double) total) ); } printf( "\tphysical block reads %16u\n", si->bread ); printf( "\tphysical block writes (sync+async) %16u\n", si->bwrite ); printf( "\tlogical block reads %16u\n", si->lread ); printf( "\tlogical block writes %16u\n", si->lwrite ); printf( "\traw I/O reads %16u\n", si->phread ); printf( "\traw I/O writes %16u\n", si->phwrite ); printf( "\tcontext switches %16u\n", si->pswitch ); printf( "\tinvol. context switches %16u\n", si->inv_swtch ); printf( "\ttraps %16u\n", si->trap ); printf( "\tdevice interrupts %16u\n", si->intr ); printf( "\tsystem calls %16u\n", si->syscall ); printf( "\tread()+readv() calls %16u\n", si->sysread ); printf( "\twrite()+writev() calls %16u\n", si->syswrite ); printf( "\tfork() calls %16u\n", si->sysfork ); printf( "\tvfork() calls %16u\n", si->sysvfork ); printf( "\texec family calls %16u\n", si->sysexec ); printf( "\tthread creation calls %16u\n", si->nthreads ); printf( "\tprocesses waiting for block I/O %16d\n", sw->iowait ); printf( "\tpage reclaims (includes pageout) %16u\n", vm->pgrec ); printf( "\tpage reclaims from free list %16u\n", vm->pgfrec ); printf( "\tpageins %16u\n", vm->pgin ); printf( "\tpages paged in %16u\n", vm->pgpgin ); printf( "\tpageouts %16u\n", vm->pgout ); printf( "\tpages paged out %16u\n", vm->pgpgout ); printf( "\tswapins %16u\n", vm->swapin ); printf( "\tpages swapped in %16u\n", vm->pgswapin ); printf( "\tswapouts %16u\n", vm->swapout ); printf( "\tpages swapped out %16u\n", vm->pgswapout ); printf( "\tpages zero-filled on demand %16u\n", vm->zfod ); } } break; } /* * -------------------------------------------------------- */ putchar('\n'); } printf( "\nTOTAL: %u instances, %u records, %lu total size\n", count, rec, sum ); return 0; } �������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/vmmap.txt��������������������������������0000644�0001750�0001750�00000060766�11757531137�024733� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������729 issetugid = 0 (14 µs) 1502 mmap( 0x0, 16777216, 0x3, 0x1002, 0x2000000, 4294967296 ) = 0x200000 (32 µs) 1544 munmap( 0x200000, 6291456 ) = 0 (15 µs) 1565 munmap( 0x1000000, 2097152 ) = 0 (7 µs) 3056 mmap( 0x0, 16384, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x17000 (32 µs) 4391 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (42 µs) 4448 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (14 µs) 4472 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (14 µs) 4610 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (14 µs) 4631 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (14 µs) 4703 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (15 µs) 5064 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (15 µs) 5086 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (13 µs) 5105 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (13 µs) 5126 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (13 µs) 5385 __sysctl( 0xbfffd9c4 [CTL_KERN,45], 2, 0xbfffd8c5, 0xbfffd9cc [255], 0x0, 4294967296 ) = 0 (14 µs) 6608 mmap( 0x0, 20480, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x1b000 (17 µs) 7922 munmap( 0x17000, 16384 ) = 0 (17 µs) 8208 mmap( 0x0, 24576, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x20000 (13 µs) 17756 sigaction = 0 (11 µs) 17775 sigaction = 0 (5 µs) 17860 stat( "/usr/bin", 0xbffff350 ) = 0 (32 µs) 17899 open_nocancel( "/usr/bin", 0x100004, 01166430 ) = 3 (26 µs) 17932 fcntl_nocancel( 3, 0x2, 0x1 ) = 0 (10 µs) 17949 __sysctl( 0xbfffecd4 [CTL_HW,7], 2, 0xa0440cc0, 0xbfffecdc [4], 0x0, 4294967296 ) = 0 (50 µs) 18005 fstatfs = 0 (11 µs) 18024 getdirentries = 4092 (145 µs) 18366 getdirentries = 4096 (123 µs) 18731 getdirentries = 4092 (123 µs) 19045 getdirentries = 3748 (114 µs) 19294 getdirentries = 0 (26 µs) 19328 close_nocancel( 3 ) = 0 (16 µs) 19547 stat64( "/usr/bin/Contents", 0xbffff304 ) = -1 (22 µs) 19586 stat64( "/usr/bin/Resources", 0xbffff304 ) = -1 (15 µs) 19625 stat64( "/usr/bin/Support Files", 0xbffff304 ) = -1 (16 µs) 19651 stat64( "/usr/bin", 0xbffff394 ) = 0 (15 µs) 19993 stat( "/usr/bin/vmmap", 0xbffff828 ) = 0 (19 µs) 20019 open( "/usr/bin/vmmap", 0x0, 0777 ) = 3 (23 µs) 20048 read( 3, 0xbffff628, 512 ) = 512 (20 µs) 20096 lseek( 3, 49152, SEEK_SET ) = 49152 (12 µs) 20113 read( 3, 0xbffff0b0, 32 ) = 32 (10 µs) 20129 close( 3 ) = 0 (12 µs) 20231 __sysctl( 0xbffff9c8 [CTL_KERN,14,1,14707], 4, 0xbffff7dc, 0xbffff9d8 [492], 0x0, 4294967296 ) = 0 (54 µs) 20291 __sysctl( 0xbffff9c8 [CTL_KERN,14,1,9145], 4, 0xbffff7dc, 0xbffff9d8 [492], 0x0, 4294967296 ) = 0 (25 µs) 20329 ioctl( 1, 0x4004667a, 0xbffff9bc ) = 0 (11 µs) 20347 ioctl( 1, 0x40087468, 0xbffff9e8 ) = 0 (10 µs) 20374 getgid = 0 (5 µs) 20386 setgid = 0 (11 µs) 20469 __sysctl( 0xbffff95c [CTL_KERN,8], 2, 0xbffff968, 0xbffff96c [4], 0x0, 4294967296 ) = 0 (20 µs) 20500 mmap( 0x0, 262144, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x200000 (18 µs) 20526 __sysctl( 0xbffff95c [CTL_KERN,38,9145], 3, 0x200000, 0xbffff968 [262144], 0x0, 4294967296 ) = 0 (71 µs) 20631 munmap( 0x200000, 262144 ) = 0 (13 µs) 20753 fstat64( 1, 0xbffff164 ) = 0 (12 µs) 20771 ioctl( 1, 0x4004667a, 0xbffff1cc ) = 0 (6 µs) 20790 write_nocancel( 1, "Virtual Memory Map of process 9145 (bash)\n\b\tsmbpasswd\0", 42 ) = 42 (32 µs) 20831 write_nocancel( 1, "Output report format: 2.2 -- 32-bit process\nbpasswd\0", 46 ) = 46 (19 µs) 20857 write_nocancel( 1, "\nutput report format: 2.2 -- 32-bit process\nbpasswd\0", 1 ) = 1 (18 µs) 97951 getegid = 0 (7 µs) 101193 stat( "/usr/share/icu/icudt36l/en.res", 0xbfffdc90 ) = -1 (36 µs) 101281 stat( "/usr/share/icu/icudt36l.dat", 0xbfffda20 ) = 0 (33 µs) 101320 open( "/usr/share/icu/icudt36l.dat", 0x0, 027777755274 ) = 3 (37 µs) 101364 mmap( 0x0, 10235520, 0x1, 0x1, 3, -4294967296 ) = 0x1000000 (34 µs) 101406 close( 3 ) = 0 (15 µs) 101503 stat( "/usr/share/icu/icudt36l/root.res", 0xbfffdcf0 ) = -1 (19 µs) 101604 geteuid = 0 (6 µs) 101660 geteuid = 0 (5 µs) 104567 lstat( "/usr/lib/dyld.dSYM/Contents/Resources/DWARF/dyld", 0xbffff0d0 ) = -1 (40 µs) = 1272 (2186 µs) 113043 proc_info = 1272 (11 µs) 113066 proc_info = 1272 (7 µs) 113086 proc_info = 1272 (8 µs) 113106 proc_info = 1272 (27 µs) 113144 proc_info = 1272 (163 µs) 113319 proc_info = 1272 (1322 µs) 114672 proc_info = 1272 (9 µs) 114697 proc_info = 1272 (7 µs) 114717 proc_info = 1272 (7 µs) 114736 proc_info = 1272 (2184 µs) 116948 proc_info = 1272 (10 µs) 116970 proc_info = 1272 (7 µs) 116989 proc_info120606 lstat( "/usr", 0xbfffe798 ) = 0 (18 µs) 120630 lstat( "/usr/lib", 0xbfffe798 ) = 0 (13 µs) 120648 lstat( "/usr/lib/dyld", 0xbfffe798 ) = 0 (13 µs) 120676 lstat( "/usr", 0xbfffe7a8 ) = 0 (11 µs) 120693 lstat( "/usr/lib", 0xbfffe7a8 ) = 0 (12 µs) 120710 lstat( "/usr/lib/dyld", 0xbfffe7a8 ) = 0 (13 µs) 124464 lstat( "/usr/lib/libSystem.B.dylib", 0xbffff2b0 ) = 0 (34 µs) 124508 open( "/usr/lib/libSystem.B.dylib", 0x0, 00 ) = 3 (34 µs) 124548 fstat( 3, 0xbffff36c ) = 0 (10 µs) 124578 close( 3 ) = 0 (15 µs) 124834 mmap( 0x0, 90112, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0xc2000 (17 µs) 159124 mmap( 0x0, 16384, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0xdb000 (20 µs) 170725 mmap( 0x0, 65536, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0xc2000 (12 µs) 170796 munmap( 0x35000, 32768 ) = 0 (19 µs) 171708 mmap( 0x0, 262144, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x403000 (11 µs) 171743 munmap( 0xc2000, 131072 ) = 0 (17 µs) 378259 write_nocancel( 1, "__DATA a005e000-a005f000 [ 4K] rw-/rwx SM=COW /usr/lib/libiconv.2.dylib\nmon.A.dylib\nsmbtar\0", 92 ) = 92 (31 µs) 378321 write_nocancel( 1, "shared pmap a005f000-a0162000 [ 1036K] rw-/rwx SM=COW \nusr/lib/libiconv.2.dylib\nmon.A.dylib\nsmbtar\0", 67 ) = 67 (27 µs) 378498 write_nocancel( 1, "__DATA a0162000-a016b000 [ 36K] rw-/rwx SM=COW /usr/lib/libncurses.5.4.dylib\nA.dylib\nsmbtar\0", 96 ) = 96 (29 µs) 378557 write_nocancel( 1, "shared pmap a016b000-a0200000 [ 596K] rw-/rwx SM=COW \nusr/lib/libncurses.5.4.dylib\nA.dylib\nsmbtar\0", 67 ) = 67 (27 µs) 378613 write_nocancel( 1, "shared pmap a0400000-a0406000 [ 24K] rw-/rwx SM=COW \nusr/lib/libncurses.5.4.dylib\nA.dylib\nsmbtar\0", 67 ) = 67 (27 µs) 378671 write_nocancel( 1, "__DATA a0406000-a0480000 [ 488K] rw-/rwx SM=COW /usr/lib/libSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 93 ) = 93 (28 µs) 378728 write_nocancel( 1, "shared pmap a0480000-a0600000 [ 1536K] rw-/rwx SM=COW \nusr/lib/libSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 67 ) = 67 (27 µs) 378786 write_nocancel( 1, "Stack bf800000-c0000000 [ 8192K] rw-/rwx SM=COW thread 0\nlibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 75 ) = 75 (27 µs) 378819 write_nocancel( 1, "\ntack bf800000-c0000000 [ 8192K] rw-/rwx SM=COW thread 0\nlibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 1 ) = 1 (25 µs) 378849 write_nocancel( 1, "==== Legend\n bf800000-c0000000 [ 8192K] rw-/rwx SM=COW thread 0\nlibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 12 ) = 12 (25 µs) 378881 write_nocancel( 1, "SM=sharing mode: \n bf800000-c0000000 [ 8192K] rw-/rwx SM=COW thread 0\nlibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 19 ) = 19 (25 µs) 378913 write_nocancel( 1, "\tCOW=copy_on_write PRV=private NUL=empty ALI=aliased \nrwx SM=COW thread 0\nlibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 54 ) = 54 (25 µs) 378944 write_nocancel( 1, "\tSHM=shared ZER=zero_filled S/A=shared_alias\naliased \nrwx SM=COW thread 0\nlibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 45 ) = 45 (25 µs) 378975 write_nocancel( 1, "\nSHM=shared ZER=zero_filled S/A=shared_alias\naliased \nrwx SM=COW thread 0\nlibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 1 ) = 1 (25 µs) 379006 write_nocancel( 1, "==== Summary for process 9145\nA=shared_alias\naliased \nrwx SM=COW thread 0\nlibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 30 ) = 30 (25 µs) 379074 write_nocancel( 1, "ReadOnly portion of Libraries: Total=7384K resident=7384K(100%) swapped_out_or_unallocated=0K(0%)\ndylib\nsmbtar\0", 98 ) = 98 (27 µs) 379137 write_nocancel( 1, "Writable regions: Total=17.3M written=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 106 ) = 106 (28 µs) 379305 write_nocancel( 1, "\nritable regions: Total=17.3M written=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 1 ) = 1 (29 µs) 379402 write_nocancel( 1, "REGION TYPE [ VIRTUAL]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (27 µs) 379439 write_nocancel( 1, "=========== [ =======]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (26 µs) 379495 write_nocancel( 1, "MALLOC [ 9256K]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (26 µs) 379542 write_nocancel( 1, "STACK GUARD [ 56.0M]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (26 µs) 379589 write_nocancel( 1, "Stack [ 8192K]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (26 µs) 379635 write_nocancel( 1, "__DATA [ 796K]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (27 µs) 379681 write_nocancel( 1, "__IMPORT [ 24K]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (28 µs) 15951 geteuid = 0 (15 µs) 15976 getuid = 0 (5 µs) 16024 issetugid = 0 (5 µs) 16036 open( "/dev/autofs_nowait", 0x0, 00 ) = 3 (173 µs) 16233 close( 3 ) = 0 (17 µs) 16256 getuid = 0 (5 µs) 96097 lstat( "/usr/lib/dyld", 0xbffff260 ) = 0 (54 µs) 96365 open( "/usr/lib/dyld", 0x0, 00 ) = 3 (38 µs) 96461 fstat( 3, 0xbffff31c ) = 0 (11 µs) 96502 close( 3 ) = 0 (17 µs) 96712 geteuid = 0 (6 µs) 96779 geteuid = 0 (5 µs) 96816 geteuid = 0 (5 µs) 96826 geteuid = 0 (5 µs) 96836 getuid = 0 (4 µs) 97172 issetugid = 1 (6 µs) 97211 geteuid = 0 (5 µs) 97231 stat( "/Library/Managed Preferences", 0xbffff0c0 ) = -1 (25 µs) 97299 getuid = 0 (6 µs) 97311 gethostuuid = 0 (37 µs) 97368 geteuid = 0 (5 µs) 97378 getuid = 0 (4 µs) 97394 geteuid = 0 (5 µs) 97403 getuid = 0 (5 µs) 97414 stat( "/var/root", 0xbffff070 ) = 0 (30 µs) 97468 geteuid = 0 (6 µs) 97479 getegid = 0 (5 µs) 97497 geteuid = 0 (5 µs) 97506 getuid = 0 (5 µs) 97522 geteuid = 0 (5 µs) 97531 getuid = 0 (5 µs) 97542 stat( "/var/root", 0xbffff070 ) = 0 (16 µs) 97578 geteuid = 0 (5 µs) 97588 getegid = 0 (5 µs) 97621 geteuid = 0 (5 µs) 97631 getuid = 0 (4 µs) 97646 geteuid = 0 (5 µs) 97655 getuid = 0 (5 µs) 97666 stat( "/var/root", 0xbffff070 ) = 0 (16 µs) 97703 geteuid = 0 (5 µs) 97713 getegid = 0 (4 µs) 97730 geteuid = 0 (5 µs) 97740 getuid = 0 (4 µs) 97755 geteuid = 0 (5 µs) 97764 getuid = 0 (5 µs) 97775 stat( "/var/root", 0xbffff070 ) = 0 (16 µs) 97809 geteuid = 0 (5 µs) 97819 getegid = 0 (5 µs) 97861 geteuid = 0 (5 µs) 97870 getegid = 0 (5 µs) 97913 geteuid = 0 (5 µs) 98018 stat64( "/var/root/Library/Preferences/ByHost/vmmap.0016cb8a6bc6.plist", 0xbffff04c ) = -1 (84 µs) 98114 stat64( "/var/root/Library/Preferences/vmmap.plist", 0xbffff04c ) = -1 (49 µs) 98174 stat64( "/var/root/Library/Preferences/ByHost/.GlobalPreferences.0016cb8a6bc6.plist", 0xbffff04c ) = 0 (38 µs) 98226 open( "/var/root/Library/Preferences/ByHost/.GlobalPreferences.0016cb8a6bc6.plist", 0x0, 00 ) = 3 (65 µs) 98304 read( 3, 0xbfffcbd0, 8192 ) = 360 (24 µs) 98345 read( 3, 0xbfffcbd0, 8192 ) = 0 (7 µs) 98366 mmap( 0x0, 32768, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x26000 (23 µs) 98396 mmap( 0x0, 32768, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x2e000 (9 µs) 98588 munmap( 0x26000, 32768 ) = 0 (17 µs) 98611 munmap( 0x2e000, 32768 ) = 0 (9 µs) 98628 fstat64( 3, 0xbffff034 ) = 0 (12 µs) 98646 close( 3 ) = 0 (42 µs) 98705 stat64( "/var/root/Library/Preferences/.GlobalPreferences.plist", 0xbffff04c ) = 0 (36 µs) 98751 open( "/var/root/Library/Preferences/.GlobalPreferences.plist", 0x0, 00 ) = 3 (35 µs) 98794 read( 3, 0xbfffcbd0, 8192 ) = 266 (14 µs) 98814 read( 3, 0xbfffcbd0, 8192 ) = 0 (7 µs) 98862 fstat64( 3, 0xbffff034 ) = 0 (9 µs) 98877 close( 3 ) = 0 (21 µs) 98911 stat64( "/Library/Preferences/vmmap.plist", 0xbffff04c ) = -1 (21 µs) 98943 stat64( "/Library/Preferences/.GlobalPreferences.plist", 0xbffff04c ) = 0 (27 µs) 98979 open( "/Library/Preferences/.GlobalPreferences.plist", 0x0, 00 ) = 3 (28 µs) 99014 read( 3, 0xbfffcbd0, 8192 ) = 3260 (15 µs) 99036 read( 3, 0xbfffcbd0, 8192 ) = 0 (7 µs) 99057 mmap( 0x0, 32768, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x26000 (24 µs) 99087 mmap( 0x0, 32768, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x2e000 (15 µs) 99554 munmap( 0x26000, 32768 ) = 0 (19 µs) 99580 munmap( 0x2e000, 32768 ) = 0 (10 µs) 99597 fstat64( 3, 0xbffff034 ) = 0 (11 µs) 99615 close( 3 ) = 0 (17 µs) 99699 geteuid = 0 (6 µs) 99748 geteuid = 0 (5 µs) 99758 getuid = 0 (5 µs) 99777 geteuid = 0 (4 µs) 99786 getuid = 0 (4 µs) 99802 geteuid = 0 (5 µs) 99812 getuid = 0 (4 µs) 99828 geteuid = 0 (5 µs) 99838 getuid = 0 (4 µs) 99885 stat64( "/var/root/Library/Preferences/ByHost/vmmap.0016cb8a6bc6.plist", 0xbfffefcc ) = -1 (46 µs) 99942 stat64( "/var/root/Library/Preferences/vmmap.plist", 0xbfffefcc ) = -1 (24 µs) 99976 stat64( "/var/root/Library/Preferences/ByHost/.GlobalPreferences.0016cb8a6bc6.plist", 0xbfffefcc ) = 0 (38 µs) 100024 stat64( "/var/root/Library/Preferences/.GlobalPreferences.plist", 0xbfffefcc ) = 0 (30 µs) 100063 stat64( "/Library/Preferences/vmmap.plist", 0xbfffefcc ) = -1 (19 µs) 100091 stat64( "/Library/Preferences/.GlobalPreferences.plist", 0xbfffefcc ) = 0 (23 µs) 100137 stat64( "/var/root/Library/Preferences/ByHost/vmmap.0016cb8a6bc6.plist", 0xbfffefac ) = -1 (30 µs) 100178 stat64( "/var/root/Library/Preferences/vmmap.plist", 0xbfffefac ) = -1 (25 µs) 100219 stat64( "/Library/Preferences/vmmap.plist", 0xbfffefac ) = -1 (17 µs) 106117 __sysctl( 0xbffff434 [CTL_KERN,44,9145], 3, 0xbffff44c, 0xbffff448 [4], 0x0, 0 ) = 0 (41 µs) 106568 lstat( "/usr/lib/libSystem.B.dylib", 0xbffff110 ) = 0 (33 µs) 106616 open( "/usr/lib/libSystem.B.dylib", 0x0, 00 ) = 3 (34 µs) 106656 fstat( 3, 0xbffff1cc ) = 0 (10 µs) 106686 close( 3 ) = 0 (14 µs) 106791 open_nocancel( ".", 0x0, 01431700 ) = 3 (26 µs) 106824 fstat64( 3, 0xbfffeb14 ) = 0 (23 µs) 106854 fcntl_nocancel( 3, 0x32, 0xffffffffbfffecee ) = 0 (11 µs) 106871 close_nocancel( 3 ) = 0 (12 µs) 106889 stat64( "/Users/voeckler/src/kickstart/machine", 0xbfffeaa8 ) = 0 (27 µs) 106937 lstat( "/Users/voeckler/src/kickstart/machine/commpage [libSystem.B.dylib]", 0xbffff080 ) = -1 (64 µs) 107012 lstat( "commpage [libSystem.B.dylib]", 0xbffff080 ) = -1 (17 µs) 107183 open_nocancel( ".", 0x0, 07740200 ) = 3 (16 µs) 107204 fstat64( 3, 0xbfffe9e4 ) = 0 (7 µs) 107217 fcntl_nocancel( 3, 0x32, 0xffffffffbfffebbe ) = 0 (8 µs) 107230 close_nocancel( 3 ) = 0 (10 µs) 107246 stat64( "/Users/voeckler/src/kickstart/machine", 0xbfffe978 ) = 0 (21 µs) 107285 lstat( "/Users/voeckler/src/kickstart/machine/commpage [libSystem.B.dylib].dSYM/Contents/Resources/DWARF/commpage [libSystem.B.dylib]", 0xbfffef50 ) = -1 (59 µs) 107355 lstat( "commpage [libSystem.B.dylib].dSYM/Contents/Resources/DWARF/commpage [libSystem.B.dylib]", 0xbfffef50 ) = -1 (32 µs) 107686 proc_info = 1272 (32 µs) 107739 lstat( "/bin", 0xbfffe798 ) = 0 (15 µs) 107759 lstat( "/bin/bash", 0xbfffe798 ) = 0 (16 µs) 107791 lstat( "/bin", 0xbfffe7a8 ) = 0 (12 µs) 107809 lstat( "/bin/bash", 0xbfffe7a8 ) = 0 (12 µs) 107843 proc_info = 1272 (211 µs) 108066 lstat( "/bin", 0xbfffe798 ) = 0 (12 µs) 108084 lstat( "/bin/bash", 0xbfffe798 ) = 0 (12 µs) 108107 lstat( "/bin", 0xbfffe7a8 ) = 0 (11 µs) 108124 lstat( "/bin/bash", 0xbfffe7a8 ) = 0 (12 µs) 108204 proc_info = 1272 (8 µs) 108226 proc_info = 1272 (9 µs) 108246 proc_info = 1272 (2478 µs) 110757 proc_info = 1272 (10 µs) 110779 proc_info = 1272 (7 µs) 110803 proc_info = 1272 (7 µs) 110822 proc_info = 1272 (1931 µs) 118947 proc_info = 1272 (10 µs) 118966 proc_info = 1272 (38 µs) 119029 lstat( "/usr", 0xbfffe798 ) = 0 (25 µs) 119060 lstat( "/usr/lib", 0xbfffe798 ) = 0 (16 µs) 119081 lstat( "/usr/lib/dyld", 0xbfffe798 ) = 0 (15 µs) 119120 lstat( "/usr", 0xbfffe7a8 ) = 0 (12 µs) 119137 lstat( "/usr/lib", 0xbfffe7a8 ) = 0 (12 µs) 119154 lstat( "/usr/lib/dyld", 0xbfffe7a8 ) = 0 (13 µs) 119182 proc_info = 1272 (1377 µs) 123777 __sysctl( 0xbffff374 [CTL_KERN,44,9145], 3, 0xbffff38c, 0xbffff388 [4], 0x0, 4294967296 ) = 0 (40 µs) 133966 mmap( 0x0, 16384, 0x3, 0x1002, 0x3000000, -8654658507905171456 ) = 0xd8000 (19 µs) 135066 mmap( 0x0, 20480, 0x3, 0x1002, 0x3000000, -8654658507905171456 ) = 0xe0000 (11 µs) 137120 mmap( 0x0, 65536, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0xe5000 (14 µs) 137194 munmap( 0xd8000, 32768 ) = 0 (26 µs) 137238 mmap( 0x0, 36864, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0xf5000 (10 µs) 137292 munmap( 0xe0000, 20480 ) = 0 (10 µs) 139165 munmap( 0xc2000, 90112 ) = 0 (57 µs) 141481 lstat( "/usr/lib/libSystem.B.dylib.dSYM/Contents/Resources/DWARF/libSystem.B.dylib", 0xbffff180 ) = -1 (56 µs) 144606 munmap( 0xe5000, 65536 ) = 0 (22 µs) 145867 munmap( 0xf5000, 36864 ) = 0 (28 µs) 149271 __sysctl( 0xbffff384 [CTL_KERN,44,9145], 3, 0xbffff39c, 0xbffff398 [4], 0x0, 4294967296 ) = 0 (44 µs) 149855 lstat( "/usr/lib/libSystem.B.dylib", 0xbffff2c0 ) = 0 (39 µs) 149904 open( "/usr/lib/libSystem.B.dylib", 0x0, 00 ) = 3 (34 µs) 149945 fstat( 3, 0xbffff37c ) = 0 (9 µs) 149974 close( 3 ) = 0 (15 µs) 150103 mmap( 0x0, 90112, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0xc5000 (16 µs) 160174 mmap( 0x0, 20480, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0xe3000 (11 µs) 162112 mmap( 0x0, 65536, 0x3, 0x1002, 0x3000000, -4294967296 ) = 0xe8000 (10 µs) 162178 munmap( 0xdb000, 32768 ) = 0 (23 µs) 162218 mmap( 0x0, 36864, 0x3, 0x1002, 0x3000000, -4294967296 ) = 0x303000 (9 µs) 162269 munmap( 0xe3000, 20480 ) = 0 (11 µs) 163889 munmap( 0xc5000, 90112 ) = 0 (24 µs) 165864 lstat( "/usr/lib/libSystem.B.dylib.dSYM/Contents/Resources/DWARF/libSystem.B.dylib", 0xbffff190 ) = -1 (43 µs) 168930 munmap( 0xe8000, 65536 ) = 0 (19 µs) 170160 munmap( 0x303000, 36864 ) = 0 (15 µs) 170424 mmap( 0x0, 16384, 0x3, 0x1002, 0x3000000, 4294967296 ) = 0x35000 (16 µs) 375561 write_nocancel( 1, "==== Non-writable regions for process 9145\nss\nbpasswd\0", 43 ) = 43 (54 µs) 375738 write_nocancel( 1, "__PAGEZERO 00000000-00001000 [ 4K] ---/--- SM=NUL /bin/bash\n\230\236\271\001\024\0", 76 ) = 76 (24 µs) 375794 write_nocancel( 1, "__TEXT 00001000-00083000 [ 520K] r-x/rwx SM=COW /bin/bash\n\230\236\271\001\024\0", 76 ) = 76 (23 µs) 375848 write_nocancel( 1, "__LINKEDIT 0008c000-00090000 [ 16K] r--/rwx SM=COW /bin/bash\n\230\236\271\001\024\0", 76 ) = 76 (23 µs) 375899 write_nocancel( 1, "STACK GUARD 00090000-00091000 [ 4K] ---/rwx SM=NUL \nbin/bash\n\230\236\271\001\024\0", 67 ) = 67 (23 µs) 375949 write_nocancel( 1, "STACK GUARD 00092000-00093000 [ 4K] ---/rwx SM=NUL \nbin/bash\n\230\236\271\001\024\0", 67 ) = 67 (23 µs) 376003 write_nocancel( 1, "__TEXT 8fe00000-8fe2e000 [ 184K] r-x/rwx SM=COW /usr/lib/dyld\n\024\0", 80 ) = 80 (23 µs) 376057 write_nocancel( 1, "__LINKEDIT 8fe67000-8fe75000 [ 56K] r--/rwx SM=COW /usr/lib/dyld\n\024\0", 80 ) = 80 (22 µs) 376110 write_nocancel( 1, "__TEXT 9003f000-90047000 [ 32K] r-x/r-x SM=COW /usr/lib/libgcc_s.1.dylib\ns\0", 92 ) = 92 (25 µs) 376166 write_nocancel( 1, "__TEXT 900c4000-900c9000 [ 20K] r-x/r-x SM=COW /usr/lib/system/libmathCommon.A.dylib\nsmbtar\0", 104 ) = 104 (27 µs) 376232 write_nocancel( 1, "__TEXT 904a4000-90599000 [ 980K] r-x/r-x SM=COW /usr/lib/libiconv.2.dylib\nmon.A.dylib\nsmbtar\0", 92 ) = 92 (27 µs) 376290 write_nocancel( 1, "__TEXT 9133a000-9136a000 [ 192K] r-x/r-x SM=COW /usr/lib/libncurses.5.4.dylib\nA.dylib\nsmbtar\0", 96 ) = 96 (27 µs) 376348 write_nocancel( 1, "__TEXT 935e4000-93745000 [ 1412K] r-x/r-x SM=COW /usr/lib/libSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 93 ) = 93 (26 µs) 376405 write_nocancel( 1, "__LINKEDIT 9743a000-9781b000 [ 3972K] r--/r-- SM=COW /usr/lib/system/libmathCommon.A.dylib\nsmbtar\0", 104 ) = 104 (27 µs) 376464 write_nocancel( 1, "__IMPORT a0a03000-a0a04000 [ 4K] r-x/rwx SM=COW /usr/lib/libgcc_s.1.dylib\nmon.A.dylib\nsmbtar\0", 92 ) = 92 (28 µs) 376522 write_nocancel( 1, "__IMPORT a0a0b000-a0a0c000 [ 4K] r-x/rwx SM=COW /usr/lib/libiconv.2.dylib\nmon.A.dylib\nsmbtar\0", 92 ) = 92 (27 µs) 376588 write_nocancel( 1, "__IMPORT a0a27000-a0a28000 [ 4K] r-x/rwx SM=COW /usr/lib/libncurses.5.4.dylib\nA.dylib\nsmbtar\0", 96 ) = 96 (27 µs) 376646 write_nocancel( 1, "__IMPORT a0a61000-a0a63000 [ 8K] r-x/rwx SM=COW /usr/lib/libSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 93 ) = 93 (33 µs) 376786 write_nocancel( 1, "STACK GUARD bc000000-bf800000 [ 56.0M] ---/rwx SM=NUL \nusr/lib/libSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 67 ) = 67 (28 µs) 376821 write_nocancel( 1, "\nTACK GUARD bc000000-bf800000 [ 56.0M] ---/rwx SM=NUL \nusr/lib/libSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 1 ) = 1 (28 µs) 376873 write_nocancel( 1, "==== Writable regions for process 9145\n0 [ 56.0M] ---/rwx SM=NUL \nusr/lib/libSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 39 ) = 39 (27 µs) 376933 write_nocancel( 1, "__DATA 00083000-00089000 [ 24K] rw-/rwx SM=COW /bin/bash\nibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 76 ) = 76 (32 µs) 377017 write_nocancel( 1, "__DATA 00089000-0008b000 [ 8K] rw-/rwx SM=PRV /bin/bash\nibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 76 ) = 76 (27 µs) 377104 write_nocancel( 1, "__IMPORT 0008b000-0008c000 [ 4K] rwx/rwx SM=COW /bin/bash\nibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 76 ) = 76 (30 µs) 377193 write_nocancel( 1, "MALLOC (freed?) 00091000-00092000 [ 4K] rw-/rwx SM=PRV \nbin/bash\nibSystem.B.dylib\nib\nA.dylib\nsmbtar\0", 67 ) = 67 (29 µs) 377281 write_nocancel( 1, "MALLOC_LARGE 000b7000-000bb000 [ 16K] rw-/rwx SM=COW DefaultMallocZone_0x100000\nib\nA.dylib\nsmbtar\0", 93 ) = 93 (29 µs) 377390 write_nocancel( 1, "MALLOC_REALLOC 000bb000-000c0000 [ 20K] rw-/rwx SM=COW DefaultMallocZone_0x100000\nib\nA.dylib\nsmbtar\0", 93 ) = 93 (29 µs) 377450 write_nocancel( 1, "MALLOC_TINY 00100000-00200000 [ 1024K] rw-/rwx SM=COW DefaultMallocZone_0x100000\nib\nA.dylib\nsmbtar\0", 93 ) = 93 (29 µs) 377538 write_nocancel( 1, "MALLOC_SMALL 00800000-01000000 [ 8192K] rw-/rwx SM=COW DefaultMallocZone_0x100000\nib\nA.dylib\nsmbtar\0", 93 ) = 93 (29 µs) 377627 write_nocancel( 1, "__DATA 8fe2e000-8fe67000 [ 228K] rw-/rwx SM=COW /usr/lib/dyld\none_0x100000\nib\nA.dylib\nsmbtar\0", 80 ) = 80 (27 µs) 377707 write_nocancel( 1, "shared pmap a0000000-a0007000 [ 28K] rw-/rwx SM=COW \nusr/lib/dyld\none_0x100000\nib\nA.dylib\nsmbtar\0", 67 ) = 67 (27 µs) 377764 write_nocancel( 1, "__DATA a0007000-a0008000 [ 4K] rw-/rwx SM=COW /usr/lib/libgcc_s.1.dylib\n\nib\nA.dylib\nsmbtar\0", 92 ) = 92 (30 µs) 377851 write_nocancel( 1, "shared pmap a0008000-a0012000 [ 40K] rw-/rwx SM=COW \nusr/lib/libgcc_s.1.dylib\n\nib\nA.dylib\nsmbtar\0", 67 ) = 67 (28 µs) 377909 write_nocancel( 1, "__DATA a0012000-a0013000 [ 4K] rw-/rwx SM=COW /usr/lib/system/libmathCommon.A.dylib\nsmbtar\0", 104 ) = 104 (28 µs) 377966 write_nocancel( 1, "shared pmap a0013000-a005e000 [ 300K] rw-/rwx SM=COW \nusr/lib/system/libmathCommon.A.dylib\nsmbtar\0", 67 ) = 67 (27 µs) 379802 write_nocancel( 1, "__LINKEDIT [ 4044K]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (29 µs) 379851 write_nocancel( 1, "__PAGEZERO [ 4K]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (27 µs) 379904 write_nocancel( 1, "__TEXT [ 3340K]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (29 µs) 379960 write_nocancel( 1, "shared pmap [ 3560K]\nen=92K(1%) resident=716K(4%) swapped_out=0K(0%) unallocated=16.6M(96%)\nbtar\0", 35 ) = 35 (29 µs) 380224 munmap( 0x403000, 262144 ) = 0 (63 µs) 380687 exit( 0x0 [0:0] ) ����������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/darwin-proc.c����������������������������0000644�0001750�0001750�00000015160�11757531137�025427� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/sysctl.h> #include <sys/vmmeter.h> #include <errno.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <unistd.h> #include <sys/time.h> #include <sys/user.h> #include <sys/kernel_types.h> #include <sys/vnode_if.h> #include <sys/vnode.h> #include <sys/proc_info.h> #include <sys/proc.h> #include <sys/vm.h> #include <sys/mman.h> #include <mach/vm_statistics.h> static unsigned status[SZOMB+1]; static const char* c_state[] = { "total", "idle", "runable", "sleeping", "stopped", "zombie", "other" }; static char mystate[1 << 20]; int hexdump( void* area, size_t size ) /* purpose: dump a memory area in old-DOS style hex chars and printable ASCII * paramtr: area (IN): pointer to area start * size (IN): extent of area to print * returns: number of byte written */ { static const char digit[16] = "0123456789ABCDEF"; char a[82]; unsigned char b[18]; size_t i, j; unsigned char c; ssize_t result = 0; unsigned char* buffer = (unsigned char*) area; for ( i=0; i<size; i+=16 ) { memset( a, 0, sizeof(a) ); memset( b, 0, sizeof(b) ); sprintf( a, "%04X: ", i ); for ( j=0; j<16 && j+i<size; ++j ) { c = (unsigned char) buffer[i+j]; a[6+j*3] = digit[ c >> 4 ]; a[7+j*3] = digit[ c & 15 ]; a[8+j*3] = ( j == 7 ? '-' : ' ' ); b[j] = (char) (c < 32 || c >= 127 ? '.' : c); } for ( ; j<16; ++j ) { a[6+j*3] = a[7+j*3] = a[8+j*3] = b[j] = ' '; } strncat( a, (char*) b, sizeof(a) ); strncat( a, "\n", sizeof(a) ); result += fputs( a, stdout ); } return result; } void explore( int* mib, size_t miblen ) { size_t len = 0; if ( sysctl( mib, miblen, NULL, &len, NULL, 0 ) == -1 ) { fprintf( stderr, "sysctl %d:%d: %s\n", mib[0], mib[1], strerror(errno) ); } else { void* buffer = malloc(len); if ( sysctl( mib, miblen, buffer, &len, NULL, 0 ) == -1 ) fprintf( stderr, "sysctl %d:%d: %s\n", mib[0], mib[1], strerror(errno) ); else hexdump( buffer, len ); free(buffer); } } void read_ps( void ) { FILE* ps; memset( mystate, '?', sizeof(mystate) ); if ( (ps = popen( "ps ax -o pid,state", "r" )) ) { char line[128], state[8]; int pid; fgets( line, sizeof(line), ps); /* skip */ while ( fgets( line, sizeof(line), ps ) ) { sscanf( line, "%d %8s", &pid, state ); printf( "# %5d %s\n", pid, state ); mystate[pid] = state[0]; } pclose(ps); } } void printkproc( struct kinfo_proc* kp ) { struct extern_proc* p = &kp->kp_proc; struct eproc* e = &kp->kp_eproc; uint32_t u[1024]; size_t len = sizeof(u[0]); int mib[4]; status[0]++; if ( p->p_stat <= SZOMB ) { status[ p->p_stat ]++; } else { status[SZOMB+1]++; } printf( "PROCESS %d %c\n", p->p_pid, mystate[p->p_pid] ); if ( p->p_starttime.tv_sec > 1200000000 && p->p_starttime.tv_sec < 2000000000 ) { printf( "starttime %10u.%06u", p->p_starttime.tv_sec, p->p_starttime.tv_usec ); } else { printf( "forw %010p back %010p", p->p_forw, p->p_back ); } printf( " vmspace %010p sigacts %010p\n", p->p_vmspace, p->p_sigacts ); printf( "flag %08x stat %08x pid %d oppid %d dupfd %d\n", p->p_flag, p->p_stat, p->p_pid, p->p_oppid, p->p_dupfd ); printf( "user stack %010p exit thread %010p debugger %d sigwait %d\n", (void*) p->user_stack, p->exit_thread, p->p_debugger, p->sigwait ); printf( "estcpu %u cpticks %d pctcpu %.2f sleep address (wchan) %010p\n", p->p_estcpu, p->p_cpticks, p->p_pctcpu / ((double) LSCALE), p->p_wchan ); printf( "wmesg \"%s\"", p->p_wmesg ? p->p_wmesg : "(null)" ); printf( " swtime %u slptime %u\n", p->p_swtime, p->p_slptime ); printf( "realtimer interval %.6f value %.6f", p->p_realtimer.it_interval.tv_sec + p->p_realtimer.it_interval.tv_usec / 1E6, p->p_realtimer.it_value.tv_sec + p->p_realtimer.it_value.tv_usec / 1E6 ); printf( " rtime %.6f\n", p->p_rtime.tv_sec + ( p->p_rtime.tv_usec / 1E6 ) ); printf( "uticks %llu sticks %llu iticks %llu traceflags %x %010p\n", p->p_uticks, p->p_sticks, p->p_iticks, p->p_traceflag, p->p_tracep ); printf( "siglist %x textvp %010p noswap %d\n", p->p_siglist, p->p_textvp, p->p_holdcnt ); printf( "sigmask %08x sigignore %08x sigcatch %08x\n", p->p_sigmask, p->p_sigignore, p->p_sigcatch ); printf( "priority %u usrpri %u nice %d command \"%s\"\n", p->p_priority, p->p_usrpri, p->p_nice, p->p_comm ); printf( "progress group %010p u-area %010p x-stat %04x acflag %04x\n", p->p_pgrp, p->p_addr, p->p_xstat, p->p_acflag ); printf( "proc address %010p session %010p\n", e->e_paddr, e->e_sess ); printf( "real uid %d svuid %d rgid %d svgid %d\n", e->e_pcred.p_ruid, e->e_pcred.p_svuid, e->e_pcred.p_rgid, e->e_pcred.p_svgid ); hexdump( &e->e_vm, sizeof(struct vmspace) ); printf( "ppid %d pgid %d tpgid %d job cc %hd tty %8x\n", e->e_ppid, e->e_pgid, e->e_tpgid, e->e_jobc, e->e_tdev ); printf( "tty session %010p wchan msg \"%s\" login \"%s\"\n", e->e_tsess, e->e_wmesg, e->e_login ); printf( "size %d rss %hd refs %hd swrss %hd flag %08x\n", e->e_xsize, e->e_xrssize, e->e_xccount, e->e_xswrss, e->e_flag ); putchar( '\n' ); } int main( int argc, char* argv[] ) { int i, mib[4]; size_t len; struct kinfo_proc* kp; if ( argc > 1 ) { /* do specific processes */ for ( i=1; i<argc; ++i ) { mib[0] = CTL_KERN; mib[1] = KERN_PROC; mib[2] = KERN_PROC_PID; mib[3] = strcmp(argv[i],"self") ? atoi(argv[i]) : getpid(); if ( sysctl( mib, 4, NULL, &len, NULL, 0 ) == -1 ) { perror("sysctl"); return 1; } else { void* buffer = malloc(len); if ( sysctl( mib, 4, buffer, &len, NULL, 0 ) == -1 ) { fprintf( stderr, "sysctl %d: %s\n\n", mib[3], strerror(errno) ); } else { printkproc( (struct kinfo_proc*) buffer ); } free( buffer ); } } /* for */ } else { /* no args, do all */ read_ps(); /* Fill out the first three components of the mib */ mib[0] = CTL_KERN; mib[1] = KERN_PROC; mib[2] = KERN_PROC_ALL; if ( sysctl( mib, 3, NULL, &len, NULL, 0 ) == -1 ) { perror( "sysctl" ); return 1; } else if ( len > 0 ) { void* buffer = malloc( len + sizeof(struct kinfo_proc) ); if ( sysctl( mib, 3, buffer, &len, NULL, 0 ) == -1 ) { perror( "sysctl" ); return 1; } for ( kp = (struct kinfo_proc*) buffer; kp->kp_proc.p_pid && kp < ((struct kinfo_proc*) ((char*) buffer)+len); ++kp ) { printkproc(kp); } free(buffer); for ( i=0; i<SZOMB+1; ++i ) printf( "%-8s %u\n", c_state[i], status[i] ); } } return 0; } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/basic.h����������������������������������0000644�0001750�0001750�00000007337�11757531137�024277� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2008 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MACHINE_BASIC_H #define _MACHINE_BASIC_H #include <sys/types.h> #include <sys/time.h> #include <sys/utsname.h> #include <unistd.h> #ifndef SYS_NMLN #ifdef _SYS_NAMELEN /* DARWIN */ #define SYS_NMLN 65 #else #error "No SYS_NMLN nor _SYS_NAMELEN: check <sys/utsname.h>" #endif /* _SYS_NAMELEN */ #endif /* SYS_NMLN */ typedef struct { /* common (shared) portion */ const char* provider; /* name of this provider */ struct timeval stamp; /* when was this snapshot taken */ struct utsname uname; /* general system information */ unsigned long pagesize; /* size of a page in bytes */ /* fall-back provider-specific portion */ #ifdef _SC_PHYS_PAGES unsigned long long ram_total; #endif /* _SC_PHYS_PAGES */ #ifdef _SC_AVPHYS_PAGES unsigned long long ram_avail; #endif /* _SC_AVPHYS_PAGES */ #ifdef _SC_NPROCESSORS_CONF unsigned short cpu_total; #endif /* _SC_NPROCESSORS_CONF */ #ifdef _SC_NPROCESSORS_ONLN unsigned short cpu_online; #endif /* _SC_NPROCESSORS_ONLN */ } MachineBasicInfo; extern void* initBasicMachine( void ); /* purpose: initialize the data structure. * returns: initialized MachineBasicInfo structure. */ extern int startBasicMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MachineBasicInfo* machine ); /* purpose: start format the information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * machine (IN): basic machine structure info to print. * returns: number of characters put into buffer (buffer length) */ extern int finalBasicMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MachineBasicInfo* machine ); /* purpose: finish format the information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * machine (IN): basic machine structure info to print. * returns: number of characters put into buffer (buffer length) */ extern int printBasicMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const void* data ); /* purpose: format the machine information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * data (IN): MachineBasicInfo info to print. * returns: number of characters put into buffer (buffer length) */ extern void deleteBasicMachine( void* data ); /* purpose: destructor * paramtr: data (IO): valid MachineBasicInfo structure to destroy. */ #endif /* _MACHINE_BASIC_H */ �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/darwin-kvm.c�����������������������������0000644�0001750�0001750�00000010455�11757531137�025263� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <sys/types.h> #include <fcntl.h> #include <sys/sysctl.h> #include <kvm.h> #include <errno.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <unistd.h> int hexdump( void* area, size_t size ) /* purpose: dump a memory area in old-DOS style hex chars and printable ASCII * paramtr: area (IN): pointer to area start * size (IN): extent of area to print * returns: number of byte written */ { static const char digit[16] = "0123456789ABCDEF"; char a[82]; unsigned char b[18]; size_t i, j; unsigned char c; ssize_t result = 0; unsigned char* buffer = (unsigned char*) area; for ( i=0; i<size; i+=16 ) { memset( a, 0, sizeof(a) ); memset( b, 0, sizeof(b) ); sprintf( a, "%04X: ", i ); for ( j=0; j<16 && j+i<size; ++j ) { c = (unsigned char) buffer[i+j]; a[6+j*3] = digit[ c >> 4 ]; a[7+j*3] = digit[ c & 15 ]; a[8+j*3] = ( j == 7 ? '-' : ' ' ); b[j] = (char) (c < 32 || c >= 127 ? '.' : c); } for ( ; j<16; ++j ) { a[6+j*3] = a[7+j*3] = a[8+j*3] = b[j] = ' '; } strncat( a, (char*) b, sizeof(a) ); strncat( a, "\n", sizeof(a) ); #if 0 result += write( STDOUT_FILENO, a, strlen(a) ); #else result += fputs( a, stdout ); #endif } return result; } void printkproc( struct kinfo_proc* kp ) { struct extern_proc* p = &kp->kp_proc; struct eproc* e = &kp->kp_eproc; printf( "PROCESS %d\n", p->p_pid ); if ( p->p_starttime.tv_sec > 1200000000 && p->p_starttime.tv_sec < 2000000000 ) { printf( "starttime %10u.%06u", p->p_starttime.tv_sec, p->p_starttime.tv_usec ); } else { printf( "forw %010p back %010p", p->p_forw, p->p_back ); } printf( " vmspace %010p sigacts %010p\n", p->p_vmspace, p->p_sigacts ); printf( "flag %08x stat %08x pid %d oppid %d dupfd %d\n", p->p_flag, p->p_stat, p->p_pid, p->p_oppid, p->p_dupfd ); printf( "user stack %010p exit thread %010p debugger %d sigwait %d\n", (void*) p->user_stack, p->exit_thread, p->p_debugger, p->sigwait ); printf( "estcpu %u cpticks %d pctcpu %.2f sleep address (wchan) %010p\n", p->p_estcpu, p->p_cpticks, p->p_pctcpu / ((double) LSCALE), p->p_wchan ); printf( "wmesg \"%s\"", p->p_wmesg ? p->p_wmesg : "(null)" ); printf( " swtime %u slptime %u\n", p->p_swtime, p->p_slptime ); printf( "realtimer interval %.6f value %.6f", p->p_realtimer.it_interval.tv_sec + p->p_realtimer.it_interval.tv_usec / 1E6, p->p_realtimer.it_value.tv_sec + p->p_realtimer.it_value.tv_usec / 1E6 ); printf( " rtime %.6f\n", p->p_rtime.tv_sec + ( p->p_rtime.tv_usec / 1E6 ) ); printf( "uticks %llu sticks %llu iticks %llu traceflags %x %010p\n", p->p_uticks, p->p_sticks, p->p_iticks, p->p_traceflag, p->p_tracep ); printf( "siglist %x textvp %010p noswap %d\n", p->p_siglist, p->p_textvp, p->p_holdcnt ); printf( "sigmask %08x sigignore %08x sigcatch %08x\n", p->p_sigmask, p->p_sigignore, p->p_sigcatch ); printf( "priority %u usrpri %u nice %d command \"%s\"\n", p->p_priority, p->p_usrpri, p->p_nice, p->p_comm ); printf( "progress group %010p u-area %010p x-stat %04x acflag %04x\n", p->p_pgrp, p->p_addr, p->p_xstat, p->p_acflag ); printf( "proc address %010p session %010p\n", e->e_paddr, e->e_sess ); printf( "real uid %d svuid %d rgid %d svgid %d\n", e->e_pcred.p_ruid, e->e_pcred.p_svuid, e->e_pcred.p_rgid, e->e_pcred.p_svgid ); hexdump( &e->e_vm, sizeof(struct vmspace) ); printf( "ppid %d pgid %d tpgid %d job cc %hd tty %8x\n", e->e_ppid, e->e_pgid, e->e_tpgid, e->e_jobc, e->e_tdev ); printf( "tty session %010p wchan msg \"%s\" login \"%s\"\n", e->e_tsess, e->e_wmesg, e->e_login ); printf( "size %d rss %hd refs %hd swrss %hd flag %08x\n", e->e_xsize, e->e_xrssize, e->e_xccount, e->e_xswrss, e->e_flag ); putchar( '\n' ); } int main( int argc, char* argv[] ) { pid_t pid = argc > 1 ? ( strcmp(argv[1],"self") ? atoi(argv[1]) : getpid() ) : getpid(); kvm_t* kd = kvm_open( NULL, "/dev/vm-main", NULL, O_RDONLY, argv[0] ); if ( kd ) { int i, count = 0; struct kinfo_proc* kp = kvm_getprocs( kd, KERN_PROC_PID, pid, &count ); if ( kp == NULL ) { perror( "kvm_getprocs" ); return 1; } else { for ( i=0; i<count; ++i ) { printkproc( kp ); } } kvm_close(kd); } return 0; } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/linux.h����������������������������������0000644�0001750�0001750�00000006670�11757531137�024354� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2008 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MACHINE_LINUX_H #define _MACHINE_LINUX_H #include <sys/types.h> #include <sys/time.h> #include "basic.h" #include <stdint.h> typedef enum { S_RUNNING, S_SLEEPING, S_WAITING, S_STOPPED, S_ZOMBIE, S_OTHER, MAX_STATE } LinuxState; typedef struct { /* summaries from procfs status file */ uint64_t size; uint64_t rss; unsigned total; unsigned state[MAX_STATE]; } LinuxStatus; typedef struct { /* common (shared) portion */ MachineBasicInfo* basic; /* * provider-specific portion */ /* from sysinfo(2) call */ uint64_t ram_total; uint64_t ram_free; uint64_t ram_shared; uint64_t ram_buffer; uint64_t swap_total; uint64_t swap_free; /* from /proc/loadavg */ float load[3]; /* from /proc/cpuinfo */ unsigned short cpu_count; unsigned short cpu_online; unsigned long megahertz; char vendor_id[16]; char model_name[80]; /* from /proc/uptime */ double idletime; struct timeval boottime; /* from /proc/ ** /status */ LinuxStatus procs; LinuxStatus tasks; } MachineLinuxInfo; extern void gather_loadavg( float load[3] ); /* purpose: collect load averages * primary: provide functionality for monitoring * paramtr: load (OUT): array of 3 floats */ extern void gather_meminfo( uint64_t* ram_total, uint64_t* ram_free, uint64_t* ram_shared, uint64_t* ram_buffer, uint64_t* swap_total, uint64_t* swap_free ); /* purpose: collect system-wide memory usage * primary: provide functionality for monitoring * paramtr: ram_total (OUT): all RAM * ram_free (OUT): free RAM * ram_shared (OUT): unused? * ram_buffer (OUT): RAM used for buffers by kernel * swap_total (OUT): all swap space * swap_free (OUT): free swap space */ /* * the following 3 functions are required by the "machine" API */ extern void* initMachine( void ); /* purpose: initialize the data structure. * returns: initialized MachineLinuxInfo structure. */ extern int printMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const void* data ); /* purpose: format the information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * data (IN): MachineLinuxInfo info to print. * returns: number of characters put into buffer (buffer length) */ extern void deleteMachine( void* data ); /* purpose: destructor * paramtr: data (IO): valid MachineLinuxInfo structure to destroy. */ #endif /* _MACHINE_LINUX_H */ ������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/basic.c����������������������������������0000644�0001750�0001750�00000017311�11757531137�024263� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "basic.h" #include "../tools.h" #include <ctype.h> #include <string.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <time.h> static const char* RCS_ID = "$Id$"; extern int isExtended; /* timestamp format concise or extended */ extern int isLocal; /* timestamp time zone, UTC or local */ static size_t mystrlen( const char* s, size_t max ) { /* array version */ size_t i = 0; while ( i < max && s[i] ) ++i; return i; } static char* mytolower( char* s, size_t max ) { /* array version */ size_t i; for ( i=0; i < max && s[i]; ++i ) s[i] = tolower(s[i]); return s; } void* initBasicMachine(void) /* purpose: initialize the data structure. * returns: initialized MachineBasicInfo structure. */ { long result; MachineBasicInfo* p = (MachineBasicInfo*) malloc(sizeof(MachineBasicInfo)); /* extra sanity check */ if ( p == NULL ) { fputs( "initBasicMachine c'tor failed\n", stderr ); return NULL; } else memset( p, 0, sizeof(MachineBasicInfo) ); /* name of this provider -- overwritten by importers */ p->provider = "basic"; /* start of data gathering */ now( &p->stamp ); if ( uname( &p->uname ) == -1 ) { memset( &p->uname, 0, sizeof(p->uname) ); } else { /* remove mixed case */ mytolower( p->uname.sysname, SYS_NMLN ); mytolower( p->uname.nodename, SYS_NMLN ); mytolower( p->uname.machine, SYS_NMLN ); } p->pagesize = getpagesize(); #ifdef _SC_PHYS_PAGES if ( (result=sysconf(_SC_PHYS_PAGES)) != -1 ) { p->ram_total = result; p->ram_total *= p->pagesize; } #endif /* _SC_PHYS_PAGES */ #ifdef _SC_AVPHYS_PAGES if ( (result=sysconf(_SC_AVPHYS_PAGES)) != -1 ) { p->ram_avail = result; p->ram_avail *= p->pagesize; } #endif /* _SC_AVPHYS_PAGES */ #ifdef _SC_NPROCESSORS_CONF if ( (result=sysconf(_SC_NPROCESSORS_CONF)) != -1 ) p->cpu_total = result; #endif /* _SCN_PROCESSORS_CONF */ #ifdef _SC_NPROCESSORS_ONLN if ( (result=sysconf(_SC_NPROCESSORS_ONLN)) != -1 ) p->cpu_online = result; #endif /* _SC_NPROCESSORS_ONLN */ return p; } int startBasicMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MachineBasicInfo* machine ) /* purpose: format the uname information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * machine (IN): basic machine structure info to print. * returns: number of characters put into buffer (buffer length) */ { /* sanity check */ if ( machine == NULL ) return *len; /* <machine> open tag */ myprint( buffer, size, len, "%*s<%s page-size=\"%u\">\n", indent-2, "", tag, machine->pagesize ); /* <stamp> */ myprint( buffer, size, len, "%*s<stamp>", indent, "" ); mydatetime( buffer, size, len, isLocal, isExtended, machine->stamp.tv_sec, machine->stamp.tv_usec ); append( buffer, size, len, "</stamp>\n" ); /* <uname> */ myprint( buffer, size, len, "%*s<uname system=\"", indent, "" ); full_append( buffer, size, len, machine->uname.sysname, mystrlen(machine->uname.sysname,SYS_NMLN) ); append( buffer, size, len, "\" nodename=\"" ); full_append( buffer, size, len, machine->uname.nodename, mystrlen(machine->uname.nodename,SYS_NMLN) ); append( buffer, size, len, "\" release=\"" ); full_append( buffer, size, len, machine->uname.release, mystrlen(machine->uname.release,SYS_NMLN) ); append( buffer, size, len, "\" machine=\"" ); full_append( buffer, size, len, machine->uname.machine, mystrlen(machine->uname.machine,SYS_NMLN) ); append( buffer, size, len, "\">" ); full_append( buffer, size, len, machine->uname.version, mystrlen(machine->uname.version,SYS_NMLN) ); append( buffer, size, len, "</uname>\n" ); /* <"provider"> grouping tag */ myprint( buffer, size, len, "%*s<%s>\n", indent-1, "", machine->provider ); return *len; } int finalBasicMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MachineBasicInfo* machine ) /* purpose: finish format the information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * machine (IN): basic machine structure info to print. * returns: number of characters put into buffer (buffer length) */ { /* sanity check */ if ( machine == NULL ) return *len; /* </"provider"> close tag */ myprint( buffer, size, len, "%*s</%s>\n", indent-1, "", machine->provider ); /* </machine> close tag */ myprint( buffer, size, len, "%*s</%s>\n", indent-2, "", tag ); return *len; } int printBasicMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const void* data ) /* purpose: format the job information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * machine (IN): machine info to print. * returns: number of characters put into buffer (buffer length) */ { const MachineBasicInfo* ptr = (const MachineBasicInfo*) data; if ( ptr ) { char b[32]; startBasicMachine( buffer, size, len, indent+2, tag, ptr ); #if defined(_SC_PHYS_PAGES) || defined(_SC_AVPHYS_PAGES) myprint( buffer, size, len, "%*s<ram", indent, "" ); #ifdef _SC_PHYS_PAGES myprint( buffer, size, len, " total=\"%s\"", sizer( b, 32, sizeof(ptr->ram_total), &(ptr->ram_total) ) ); #endif /* _SC_PHYS_PAGES */ #ifdef _SC_AVPHYS_PAGES myprint( buffer, size, len, " avail=\"%s\"", sizer( b, 32, sizeof(ptr->ram_avail), &(ptr->ram_avail) ) ); #endif /* _SC_AVPHYS_PAGES */ append( buffer, size, len, "/>\n" ); #endif /* _SC_PHYS_PAGES || _SC_AVPHYS_PAGES */ #if defined(_SC_NPROCESSORS_CONF) || defined(_SC_NPROCESSORS_ONLN) myprint( buffer, size, len, "%*s<cpu", indent, "" ); #ifdef _SC_NPROCESSORS_CONF myprint( buffer, size, len, " total=\"%s\"", sizer( b, 32, sizeof(ptr->cpu_total), &(ptr->cpu_total) ) ); #endif /* _SCN_PROCESSORS_CONF */ #ifdef _SC_NPROCESSORS_ONLN myprint( buffer, size, len, " online=\"%s\"", sizer( b, 32, sizeof(ptr->cpu_online), &(ptr->cpu_online) ) ); #endif /* _SC_NPROCESSORS_ONLN */ append( buffer, size, len, "/>\n" ); #endif /* _SC_NPROCESSORS_CONF || _SC_NPROCESSORS_ONLN */ finalBasicMachine( buffer, size, len, indent+2, tag, ptr ); } return *len; } void deleteBasicMachine( void* data ) /* purpose: destructor * paramtr: data (IO): valid MachineInfo structure to destroy. */ { #ifdef EXTRA_DEBUG fprintf( stderr, "# deleteBasicMachineInfo(%p)\n", data ); #endif if ( data ) free(data); } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/darwin-sysctl.txt������������������������0000644�0001750�0001750�00000040325�11757531137�026403� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������kern.exec: unknown type returned kern.ostype = Darwin kern.osrelease = 9.4.0 kern.osrevision = 199506 kern.version = Darwin Kernel Version 9.4.0: Mon Jun 9 19:30:53 PDT 2008; root:xnu-1228.5.20~1/RELEASE_I386 kern.maxvnodes = 33792 kern.maxproc = 532 kern.maxfiles = 12288 kern.argmax = 262144 kern.securelevel = 0 kern.hostname = chaapaai.local kern.hostid = 0 kern.clockrate: hz = 100, tick = 10000, profhz = 100, stathz = 100 kern.posix1version = 200112 kern.ngroups = 16 kern.job_control = 1 kern.saved_ids = 1 kern.boottime = Thu Aug 21 17:28:49 2008 kern.nisdomainname = kern.maxfilesperproc = 10240 kern.maxprocperuid = 266 kern.dummy = 0 kern.dummy = 0 kern.usrstack = -1073741824 kern.dummy = 0 kern.netboot = 0 kern.dummy = 0 kern.dummy = 0 kern.aiomax = 90 kern.aioprocmax = 16 kern.aiothreads = 4 kern.corefile = /cores/core.%P kern.coredump = 1 kern.sugid_coredump = 0 kern.delayterm = 0 kern.shreg_private = 0 kern.proc_low_pri_io = 0 kern.usrstack64 = 3221225472 kern.nx = 1 kern.procname = kern.speculative_reads_disabled = 0 kern.osversion = 9E17 kern.safeboot = 0 kern.rage_vnode = 0 vfs.nfs has 1 mounted instance vfs.fdesc has 1 mounted instance vfs.hfs has 1 mounted instance vfs.devfs has 1 mounted instance vfs.autofs has 2 mounted instances hw.machine = i386 hw.model = MacBookPro1,1 hw.ncpu = 2 hw.byteorder = 1234 hw.physmem = 2147483648 hw.usermem = 1921191936 hw.pagesize = 4096 hw.epoch = 0 hw.vectorunit = 1 hw.busfrequency = 664000000 hw.cpufrequency = 2000000000 hw.cachelinesize = 64 hw.l1icachesize = 32768 hw.l1dcachesize = 32768 hw.l2settings = 1 hw.l2cachesize = 2097152 hw.tbfrequency = 1000000000 hw.memsize = 2147483648 hw.availcpu = 2 user.cs_path = /usr/bin:/bin:/usr/sbin:/sbin user.bc_base_max = 99 user.bc_dim_max = 2048 user.bc_scale_max = 99 user.bc_string_max = 1000 user.coll_weights_max = 2 user.expr_nest_max = 32 user.line_max = 2048 user.re_dup_max = 255 user.posix2_version = 200112 user.posix2_c_bind = 0 user.posix2_c_dev = 0 user.posix2_char_term = 0 user.posix2_fort_dev = 0 user.posix2_fort_run = 0 user.posix2_localedef = 0 user.posix2_sw_dev = 0 user.posix2_upe = 0 user.stream_max = 20 user.tzname_max = 255 kern.ostype: Darwin kern.osrelease: 9.4.0 kern.osrevision: 199506 kern.version: Darwin Kernel Version 9.4.0: Mon Jun 9 19:30:53 PDT 2008; root:xnu-1228.5.20~1/RELEASE_I386 kern.maxvnodes: 33792 kern.maxproc: 532 kern.maxfiles: 12288 kern.argmax: 262144 kern.securelevel: 0 kern.hostname: chaapaai.local kern.hostid: 0 kern.clockrate: { hz = 100, tick = 10000, tickadj = 1, profhz = 100, stathz = 100 } kern.posix1version: 200112 kern.ngroups: 16 kern.job_control: 1 kern.saved_ids: 1 kern.boottime: { sec = 1219364929, usec = 0 } Thu Aug 21 17:28:49 2008 kern.nisdomainname: kern.maxfilesperproc: 10240 kern.maxprocperuid: 266 kern.ipc.maxsockbuf: 8388608 kern.ipc.sockbuf_waste_factor: 8 kern.ipc.somaxconn: 128 kern.ipc.nmbclusters: 32768 kern.ipc.soqlimitcompat: 1 kern.ipc.mb_normalized: 0 kern.ipc.sosendjcl_ignore_capab: 0 kern.ipc.sosendjcl: 1 kern.ipc.sorecvmincopy: 16384 kern.ipc.sosendminchain: 16384 kern.ipc.soqlencomp: 0 kern.ipc.njclbytes: 16384 kern.ipc.njcl: 10920 kern.ipc.sbspace_factor: 8 kern.ipc.maxsockets: 512 kern.ipc.sendfileuiobufs: 64 kern.dummy: 0 kern.usrstack: -1073741824 kern.netboot: 0 kern.sysv.shmall: 1024 kern.sysv.shmseg: 8 kern.sysv.shmmni: 32 kern.sysv.shmmin: 1 kern.sysv.shmmax: 4194304 kern.sysv.semume: 10 kern.sysv.semmsl: 87381 kern.sysv.semmnu: 87381 kern.sysv.semmns: 87381 kern.sysv.semmni: 87381 kern.exec.archhandler.powerpc: /usr/libexec/oah/translate kern.aiomax: 90 kern.aioprocmax: 16 kern.aiothreads: 4 kern.corefile: /cores/core.%P kern.coredump: 1 kern.sugid_coredump: 0 kern.delayterm: 0 kern.shreg_private: 0 kern.proc_low_pri_io: 0 kern.posix.sem.max: 10000 kern.usrstack64: 3221225472 kern.nx: 1 kern.tfp.policy: 2 kern.procname: kern.speculative_reads_disabled: 0 kern.osversion: 9E17 kern.safeboot: 0 kern.lctx.max: 8192 kern.lctx.count: 0 kern.lctx.last: 1 kern.rage_vnode: 0 kern.tty.ptmx_max: 127 kern.sleeptime: { sec = 1219886549, usec = 393448 } Wed Aug 27 18:22:29 2008 kern.waketime: { sec = 1219889829, usec = 24 } Wed Aug 27 19:17:09 2008 kern.hibernatefile: /var/vm/sleepimage kern.bootsignature: 4893b37e17683f858fa4f4d356c8c970e37d1745 kern.hibernatemode: 31 kern.maxnbuf: 10485 kern.nbuf: 10485 kern.flush_cache_on_write: 0 kern.always_do_fullfsync: 0 kern.sugid_scripts: 0 kern.affinity_sets_mapping: 1 kern.affinity_sets_enabled: 1 kern.singleuser: 0 kern.bootargs: kern.msgbuf: 4096 kern.wq_timer_interval_msecs: 40 kern.wq_max_run_latency_usecs: 500 kern.wq_reduce_pool_window_usecs: 3000000 kern.wq_stalled_window_usecs: 20000 kern.secure_kernel: 0 vm.loadavg: { 1.06 1.11 1.00 } vm.swapusage: total = 64.00M used = 19.09M free = 44.91M (encrypted) vm.cs_debug: 0 vm.cs_force_hard: 0 vm.cs_force_kill: 0 vm.user_wire_limit: 2147482648 vm.global_user_wire_limit: 2147482648 vm.cs_blob_size_max: 83520 vm.cs_blob_size_peak: 1213184 vm.cs_blob_count_peak: 186 vm.cs_blob_size: 808752 vm.cs_blob_count: 117 vm.cs_validation: 1 vm.vm_page_free_target: 2000 vm.shared_region_persistence: 1 vm.shared_region_version: 3 vm.shared_region_trace_level: 1 vm.allow_data_exec: 1 vm.allow_stack_exec: 0 vfs.generic.nfs.server.nfsd_thread_count: 0 vfs.generic.nfs.server.nfsd_thread_max: 0 vfs.generic.nfs.server.fsevents: 1 vfs.generic.nfs.server.user_stats: 1 vfs.generic.nfs.server.request_queue_length: 128 vfs.generic.nfs.server.reqcache_size: 64 vfs.generic.nfs.server.async: 0 vfs.generic.nfs.server.require_resv_port: 0 vfs.generic.nfs.server.wg_delay_v3: 0 vfs.generic.nfs.server.wg_delay: 1000 vfs.generic.nfs.client.max_async_writes: 128 vfs.generic.nfs.client.lockd_mounts: 0 vfs.generic.nfs.client.nfsiod_thread_count: 0 vfs.generic.nfs.client.nfsiod_thread_max: 16 vfs.generic.nfs.client.statfs_rate_limit: 10 vfs.generic.nfs.client.allow_async: 0 vfs.generic.nfs.client.access_cache_timeout: 60 vfs.generic.nfs.client.iosize: 1048576 vfs.generic.nfs.client.nextdowndelay: 30 vfs.generic.nfs.client.initialdowndelay: 12 net.local.stream.recvspace: 8192 net.local.stream.sendspace: 8192 net.local.dgram.recvspace: 4096 net.local.dgram.maxdgram: 2048 net.local.inflight: 0 net.inet.ip.portrange.hilast: 65535 net.inet.ip.portrange.hifirst: 49152 net.inet.ip.portrange.last: 65535 net.inet.ip.portrange.first: 49152 net.inet.ip.portrange.lowlast: 600 net.inet.ip.portrange.lowfirst: 1023 net.inet.ip.forwarding: 0 net.inet.ip.redirect: 1 net.inet.ip.ttl: 64 net.inet.ip.rtexpire: 1600 net.inet.ip.rtminexpire: 10 net.inet.ip.rtmaxcache: 128 net.inet.ip.sourceroute: 0 net.inet.ip.intr_queue_maxlen: 50 net.inet.ip.intr_queue_drops: 0 net.inet.ip.accept_sourceroute: 0 net.inet.ip.fastforwarding: 0 net.inet.ip.keepfaith: 0 net.inet.ip.gifttl: 30 net.inet.ip.subnets_are_local: 0 net.inet.ip.use_route_genid: 1 net.inet.ip.check_route_selfref: 1 net.inet.ip.dummynet.debug: 0 net.inet.ip.dummynet.red_max_pkt_size: 1500 net.inet.ip.dummynet.red_avg_pkt_size: 512 net.inet.ip.dummynet.red_lookup_depth: 256 net.inet.ip.dummynet.max_chain_len: 16 net.inet.ip.dummynet.expire: 1 net.inet.ip.dummynet.search_steps: 0 net.inet.ip.dummynet.searches: 0 net.inet.ip.dummynet.extract_heap: 0 net.inet.ip.dummynet.ready_heap: 0 net.inet.ip.dummynet.curr_time: 0 net.inet.ip.dummynet.hash_size: 64 net.inet.ip.fw.dyn_keepalive: 1 net.inet.ip.fw.dyn_short_lifetime: 5 net.inet.ip.fw.dyn_udp_lifetime: 10 net.inet.ip.fw.dyn_rst_lifetime: 1 net.inet.ip.fw.dyn_fin_lifetime: 1 net.inet.ip.fw.dyn_syn_lifetime: 20 net.inet.ip.fw.dyn_ack_lifetime: 300 net.inet.ip.fw.static_count: 1 net.inet.ip.fw.dyn_max: 4096 net.inet.ip.fw.dyn_count: 0 net.inet.ip.fw.curr_dyn_buckets: 256 net.inet.ip.fw.dyn_buckets: 256 net.inet.ip.fw.verbose_limit: 0 net.inet.ip.fw.verbose: 2 net.inet.ip.fw.debug: 0 net.inet.ip.fw.one_pass: 0 net.inet.ip.fw.autoinc_step: 100 net.inet.ip.fw.enable: 1 net.inet.ip.random_id: 1 net.inet.ip.linklocal.in.allowbadttl: 1 net.inet.ip.check_interface: 0 net.inet.ip.maxfrags: 2048 net.inet.ip.maxfragsperpacket: 128 net.inet.ip.maxfragpackets: 1024 net.inet.ip.maxchainsent: 35 net.inet.icmp.maskrepl: 0 net.inet.icmp.icmplim: 250 net.inet.icmp.timestamp: 0 net.inet.icmp.bmcastecho: 1 net.inet.icmp.log_redirect: 0 net.inet.icmp.drop_redirect: 0 net.inet.tcp.rfc1323: 1 net.inet.tcp.rfc1644: 0 net.inet.tcp.mssdflt: 512 net.inet.tcp.keepidle: 7200000 net.inet.tcp.keepintvl: 75000 net.inet.tcp.sendspace: 65536 net.inet.tcp.recvspace: 65536 net.inet.tcp.keepinit: 75000 net.inet.tcp.v6mssdflt: 1024 net.inet.tcp.rexmt_thresh: 2 net.inet.tcp.rfc3465: 1 net.inet.tcp.maxseg_unacked: 8 net.inet.tcp.slowlink_wsize: 8192 net.inet.tcp.reass.overflows: 0 net.inet.tcp.reass.cursegments: 0 net.inet.tcp.reass.maxsegments: 2048 net.inet.tcp.drop_synfin: 1 net.inet.tcp.tcp_lq_overflow: 1 net.inet.tcp.delayed_ack: 3 net.inet.tcp.blackhole: 0 net.inet.tcp.log_in_vain: 0 net.inet.tcp.socket_unlocked_on_output: 1 net.inet.tcp.packetchain: 50 net.inet.tcp.ecn_negotiate_in: 0 net.inet.tcp.ecn_initiate_out: 0 net.inet.tcp.newreno: 0 net.inet.tcp.local_slowstart_flightsize: 8 net.inet.tcp.slowstart_flightsize: 1 net.inet.tcp.path_mtu_discovery: 1 net.inet.tcp.sack_globalholes: 0 net.inet.tcp.sack_globalmaxholes: 65536 net.inet.tcp.sack_maxholes: 128 net.inet.tcp.sack: 1 net.inet.tcp.rtt_min: 1 net.inet.tcp.background_io_enabled: 1 net.inet.tcp.isn_reseed_interval: 0 net.inet.tcp.strict_rfc1948: 0 net.inet.tcp.icmp_may_rst: 1 net.inet.tcp.pcbcount: 16 net.inet.tcp.do_tcpdrain: 0 net.inet.tcp.tcbhashsize: 4096 net.inet.tcp.minmssoverload: 0 net.inet.tcp.minmss: 216 net.inet.tcp.always_keepalive: 0 net.inet.tcp.msl: 15000 net.inet.tcp.background_io_trigger: 5 net.inet.tcp.sockthreshold: 64 net.inet.tcp.out_sw_cksum_bytes: 21779880 net.inet.tcp.out_sw_cksum: 218486 net.inet.tcp.in_sw_cksum_bytes: 411580222 net.inet.tcp.in_sw_cksum: 333375 net.inet.tcp.win_scale_factor: 3 net.inet.udp.checksum: 1 net.inet.udp.maxdgram: 9216 net.inet.udp.recvspace: 42080 net.inet.udp.pcbcount: 17 net.inet.udp.blackhole: 0 net.inet.udp.log_in_vain: 0 net.inet.udp.out_sw_cksum_bytes: 1299662 net.inet.udp.out_sw_cksum: 16807 net.inet.udp.in_sw_cksum_bytes: 11488410 net.inet.udp.in_sw_cksum: 95630 net.inet.ipsec.def_policy: 1 net.inet.ipsec.esp_trans_deflev: 1 net.inet.ipsec.esp_net_deflev: 1 net.inet.ipsec.ah_trans_deflev: 1 net.inet.ipsec.ah_net_deflev: 1 net.inet.ipsec.ah_cleartos: 1 net.inet.ipsec.ah_offsetmask: 0 net.inet.ipsec.dfbit: 0 net.inet.ipsec.ecn: 0 net.inet.ipsec.debug: 0 net.inet.ipsec.esp_randpad: -1 net.inet.ipsec.esp_port: 0 net.inet.ipsec.bypass: 1 net.inet.raw.recvspace: 8192 net.inet.raw.maxdgram: 8192 net.appletalk.routermix: 2000 net.link.generic.system.ifcount: 8 net.link.generic.system.dlil_input_sanity_check: 0 net.link.generic.system.multi_threaded_input: 1 net.link.ether.inet.send_conflicting_probes: 1 net.link.ether.inet.keep_announcements: 1 net.link.ether.inet.log_arp_warnings: 0 net.link.ether.inet.sendllconflict: 0 net.link.ether.inet.proxyall: 0 net.link.ether.inet.useloopback: 1 net.link.ether.inet.maxtries: 5 net.link.ether.inet.apple_hwcksum_rx: 1 net.link.ether.inet.apple_hwcksum_tx: 1 net.link.ether.inet.host_down_time: 20 net.link.ether.inet.max_age: 1200 net.link.ether.inet.prune_intvl: 300 net.key.debug: 0 net.key.spi_trycnt: 1000 net.key.spi_minval: 256 net.key.spi_maxval: 268435455 net.key.int_random: 60 net.key.larval_lifetime: 30 net.key.blockacq_count: 10 net.key.blockacq_lifetime: 20 net.key.esp_keymin: 256 net.key.esp_auth: 0 net.key.ah_keymin: 128 net.key.prefered_oldsa: 0 net.key.natt_keepalive_interval: 20 net.inet6.ip6.forwarding: 0 net.inet6.ip6.redirect: 1 net.inet6.ip6.hlim: 64 net.inet6.ip6.maxfragpackets: 1024 net.inet6.ip6.accept_rtadv: 0 net.inet6.ip6.keepfaith: 0 net.inet6.ip6.log_interval: 5 net.inet6.ip6.hdrnestlimit: 50 net.inet6.ip6.dad_count: 1 net.inet6.ip6.auto_flowlabel: 1 net.inet6.ip6.defmcasthlim: 1 net.inet6.ip6.gifhlim: 0 net.inet6.ip6.kame_version: 20010528/apple-darwin net.inet6.ip6.use_deprecated: 1 net.inet6.ip6.rr_prune: 5 net.inet6.ip6.v6only: 0 net.inet6.ip6.rtexpire: 3600 net.inet6.ip6.rtminexpire: 10 net.inet6.ip6.rtmaxcache: 128 net.inet6.ip6.use_tempaddr: 0 net.inet6.ip6.temppltime: 86400 net.inet6.ip6.tempvltime: 604800 net.inet6.ip6.auto_linklocal: 1 net.inet6.ip6.maxfrags: 8192 net.inet6.ip6.fw.verbose_limit: 0 net.inet6.ip6.fw.verbose: 0 net.inet6.ip6.fw.debug: 0 net.inet6.ip6.fw.enable: 1 net.inet6.ipsec6.def_policy: 1 net.inet6.ipsec6.esp_trans_deflev: 1 net.inet6.ipsec6.esp_net_deflev: 1 net.inet6.ipsec6.ah_trans_deflev: 1 net.inet6.ipsec6.ah_net_deflev: 1 net.inet6.ipsec6.ecn: 0 net.inet6.ipsec6.debug: 0 net.inet6.ipsec6.esp_randpad: -1 net.inet6.icmp6.rediraccept: 1 net.inet6.icmp6.redirtimeout: 600 net.inet6.icmp6.nd6_prune: 1 net.inet6.icmp6.nd6_delay: 5 net.inet6.icmp6.nd6_umaxtries: 3 net.inet6.icmp6.nd6_mmaxtries: 3 net.inet6.icmp6.nd6_useloopback: 1 net.inet6.icmp6.nodeinfo: 3 net.inet6.icmp6.errppslimit: 100 net.inet6.icmp6.nd6_maxnudhint: 0 net.inet6.icmp6.nd6_debug: 0 net.pstimeout: 20 20 net.athaggrqmin: 1 1 net.athaggrfmax: 28 28 net.athbgscan: 1 1 net.athCCAThreshold: 28 28 net.athpowermode: 0 0 net.athvendorie: 1 1 net.athdupie: 1 1 net.athaddbaignore: 0 0 net.athppmenable: 1 net.athforceBias: 2 2 net.athfixedDropThresh: 150 150 net.athqdepth: 0 0 debug.lowpri_max_waiting_msecs: 200 debug.lowpri_max_window_msecs: 200 debug.lowpri_IO_window_inc: 50 debug.lowpri_IO_initial_window_msecs: 100 debug.bpf_maxdevices: 256 debug.bpf_maxbufsize: 524288 debug.bpf_bufsize: 4096 debug.iokit: 0 debug.odls: 0 debug.net80211: 0 0 debug.athdriver: 0 0 hw.ncpu: 2 hw.byteorder: 1234 hw.memsize: 2147483648 hw.activecpu: 2 hw.optional.x86_64: 0 hw.optional.sse4_2: 0 hw.optional.sse4_1: 0 hw.optional.supplementalsse3: 0 hw.optional.sse3: 1 hw.optional.sse2: 1 hw.optional.sse: 1 hw.optional.mmx: 1 hw.optional.floatingpoint: 1 hw.packages: 1 hw.tbfrequency: 1000000000 hw.l2cachesize: 2097152 hw.l1dcachesize: 32768 hw.l1icachesize: 32768 hw.cachelinesize: 64 hw.cpufrequency_max: 2000000000 hw.cpufrequency_min: 2000000000 hw.cpufrequency: 2000000000 hw.busfrequency_max: 664000000 hw.busfrequency_min: 664000000 hw.busfrequency: 664000000 hw.pagesize: 4096 hw.cachesize: 2147483648 32768 2097152 0 0 0 0 0 0 0 hw.cacheconfig: 2 1 2 0 0 0 0 0 0 0 hw.cpufamily: 1943433984 hw.cpu64bit_capable: 0 hw.cpusubtype: 4 hw.cputype: 7 hw.logicalcpu_max: 2 hw.logicalcpu: 2 hw.physicalcpu_max: 2 hw.physicalcpu: 2 machdep.pmap.hashmax: 16 machdep.pmap.hashcnts: 27431795 machdep.pmap.hashwalks: 26948459 machdep.cpu.address_bits.virtual: 32 machdep.cpu.address_bits.physical: 32 machdep.cpu.cache.size: 2048 machdep.cpu.cache.L2_associativity: 6 machdep.cpu.cache.linesize: 64 machdep.cpu.arch_perf.fixed_width: 0 machdep.cpu.arch_perf.fixed_number: 0 machdep.cpu.arch_perf.events: 0 machdep.cpu.arch_perf.events_number: 7 machdep.cpu.arch_perf.width: 40 machdep.cpu.arch_perf.number: 2 machdep.cpu.arch_perf.version: 1 machdep.cpu.thermal.ACNT_MCNT: 1 machdep.cpu.thermal.thresholds: 2 machdep.cpu.thermal.dynamic_acceleration: 0 machdep.cpu.thermal.sensor: 1 machdep.cpu.mwait.sub_Cstates: 139808 machdep.cpu.mwait.extensions: 3 machdep.cpu.mwait.linesize_max: 64 machdep.cpu.mwait.linesize_min: 64 machdep.cpu.cores_per_package: 2 machdep.cpu.logical_per_package: 2 machdep.cpu.extfeatures: XD machdep.cpu.features: FPU VME DE PSE TSC MSR PAE MCE CX8 APIC SEP MTRR PGE MCA CMOV PAT CLFSH DS ACPI MMX FXSR SSE SSE2 SS HTT TM SSE3 MON VMX EST TM2 TPR PDCM machdep.cpu.brand: 0 machdep.cpu.signature: 1768 machdep.cpu.extfeature_bits: 1048576 0 machdep.cpu.feature_bits: -1075184641 49577 machdep.cpu.stepping: 8 machdep.cpu.extfamily: 0 machdep.cpu.extmodel: 0 machdep.cpu.model: 14 machdep.cpu.family: 6 machdep.cpu.brand_string: Genuine Intel(R) CPU T2500 @ 2.00GHz machdep.cpu.vendor: GenuineIntel security.mac.seatbelt.debug: 0 security.mac.seatbelt.profile_refcount: 34 security.mac.seatbelt.qtnstate_refcount: 3 security.mac.seatbelt.cred_label_refcount: 28 security.mac.vnode_enforce: 1 security.mac.vm_enforce: 1 security.mac.sysvshm_enforce: 1 security.mac.sysvsem_enforce: 1 security.mac.sysvmsg_enforce: 1 security.mac.system_enforce: 1 security.mac.socket_enforce: 1 security.mac.proc_enforce: 1 security.mac.posixshm_enforce: 1 security.mac.posixsem_enforce: 1 security.mac.pipe_enforce: 1 security.mac.iokit_enforce: 0 security.mac.file_enforce: 0 security.mac.device_enforce: 1 security.mac.mmap_revocation_via_cow: 0 security.mac.mmap_revocation: 0 security.mac.max_slots: 8 �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/more.d�����������������������������������0000644�0001750�0001750�00000010006�11757531137�024137� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#pragma D option bufsize=16M int start; int result_type; string mib0[int]; string whence[int]; dtrace:::BEGIN { start = walltimestamp; result_type = 0; /* there's gotta be a better way than this */ mib0[1] = "CTL_KERN"; mib0[2] = "CTL_VM"; mib0[3] = "CTL_VFS"; mib0[4] = "CTL_NET"; mib0[5] = "CTL_DEBUG"; mib0[6] = "CTL_HW"; mib0[7] = "CTL_MACHDEP"; mib0[8] = "CTL_USER"; whence[0] = "SEEK_SET"; whence[1] = "SEEK_CUR"; whence[2] = "SEEK_END"; } fbt:::entry /pid == $target/ { printf( "+\t%s\n", probefunc ); } syscall:::entry /pid == $target/ { self->ts = walltimestamp; printf( "%d\t%s", (self->ts - start) / 1000, probefunc ); } syscall::lseek:entry /pid == $target/ { printf( "( %d, %d, %s )", arg0, arg1, whence[arg2] ); } syscall::__sysctl:entry /pid == $target && arg1 == 2/ { mib = (int*) copyin( arg0, arg1 * 4 ); len = (int*) copyin( arg3, 4 ); printf( "( 0x%p [%s,%d], %d, 0x%p, 0x%p [%d], 0x%p, %d )", arg0, mib0[mib[0]], mib[1], arg1, arg2, arg3, *len, arg4, arg5 ); } syscall::__sysctl:entry /pid == $target && arg1 == 3/ { mib = (int*) copyin( arg0, arg1 * 4 ); len = (int*) copyin( arg3, 4 ); printf( "( 0x%p [%s,%d,%d], %d, 0x%p, 0x%p [%d], 0x%p, %d )", arg0, mib0[mib[0]], mib[1], mib[2], arg1, arg2, arg3, *len, arg4, arg5 ); } syscall::__sysctl:entry /pid == $target && arg1 == 4/ { mib = (int*) copyin( arg0, arg1 * 4 ); len = (int*) copyin( arg3, 4 ); printf( "( 0x%p [%s,%d,%d,%d], %d, 0x%p, 0x%p [%d], 0x%p, %d )", arg0, mib0[mib[0]], mib[1], mib[2], mib[3], arg1, arg2, arg3, *len, arg4, arg5 ); } syscall::write:entry, syscall::write_nocancel:entry /pid == $target/ { printf( "( %d, \"%S\", %d )", arg0, stringof(copyinstr(arg1)), arg2 ); } syscall::read:entry, syscall::read_nocancel:entry /pid == $target/ { printf( "( %d, 0x%x, %d )", arg0, arg1, arg2 ); result_type = arg1; } syscall::open:entry, syscall::open_nocancel:entry, syscall::shm_open:entry /pid == $target/ { fn = stringof(copyinstr(arg0)); printf( "( \"%s\", 0x%x, 0%o )", fn, arg1, arg2 ); } syscall::access:entry /pid == $target/ { fn = stringof(copyinstr(arg0)); printf( "( \"%s\", %d )", fn, arg1 ); } syscall::exit:entry /pid == $target/ { printf( "( 0x%x [%d:%d] )\n", arg0, (arg0 >> 8), (arg0 & 127) ); } syscall::mmap:entry /pid == $target && arg4 >= 0 && arg4 < 65536/ { printf( "( 0x%p, %d, 0x%x, 0x%x, %d, %d ) ", arg0, arg1, arg2, arg3, arg4, arg5 ); self->result_type = 1; } syscall::mmap:entry /pid == $target && (arg4 < 0 || arg4 >= 65536)/ { printf( "( 0x%p, %d, 0x%x, 0x%x, 0x%x, %d )", arg0, arg1, arg2, arg3, arg4, arg5 ); self->result_type = 1; } syscall::munmap:entry /pid == $target/ { printf( "( 0x%p, %d )", arg0, arg1 ); } syscall::close:entry, syscall::close_nocancel:entry /pid == $target/ { printf( "( %d )", arg0 ); } syscall::stat:entry, syscall::stat64:entry, syscall::lstat:entry, syscall::lstat64:entry /pid == $target/ { fn = stringof(copyinstr(arg0)); printf( "( \"%s\", 0x%p )", fn, arg1 ); } syscall::fstat:entry, syscall::fstat64:entry /pid == $target/ { printf( "( %d, 0x%p )", arg0, arg1 ); } syscall::ioctl*:entry, syscall::fcntl*:entry /pid == $target/ { printf( "( %d, 0x%x, 0x%x )", arg0, arg1, arg2 ); } syscall:::entry /pid == $target && self->ts/ { printf("\n"); } syscall:::return /pid == $target && self->ts && self->result_type == 0/ { diff = ( walltimestamp - self->ts ) / 1000; printf( " = %d (%d µs)\n", arg1, diff ); self->ts = 0; } syscall:::return /pid == $target && self->ts != 0 && self->result_type == 1/ { diff = ( walltimestamp - self->ts ) / 1000; printf( " = 0x%p (%d µs)\n", arg1, diff ); self->result_type = 0; self->ts = 0; } syscall:::return /pid == $target && self->ts != 0 && self->result_type > 10/ { diff = ( walltimestamp - self->ts ) / 1000; printf( " = %d (%d µs) [\"%S\"]\n", arg1, diff, stringof(copyinstr(self->result_type)) ); self->result_type = 0; self->ts = 0; } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/darwin.c���������������������������������0000644�0001750�0001750�00000022545�11757531137�024473� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "basic.h" #include "darwin.h" #include "../tools.h" #include <string.h> #include <stdio.h> #include <stdlib.h> #include <unistd.h> #include <sys/sysctl.h> #include <mach/mach.h> #include <mach/host_info.h> #include <mach/vm_statistics.h> static const char* RCS_ID = "$Id$"; extern int isExtended; /* timestamp format concise or extended */ extern int isLocal; /* timestamp time zone, UTC or local */ static void gather_darwin_cpuinfo( MachineDarwinInfo* machine ) { int i; size_t len; unsigned long freq; char model[128]; len = sizeof(i); if ( sysctlbyname( "hw.ncpu", &i, &len, NULL, 0 ) == 0 ) machine->cpu_count = i; len = sizeof(i); if ( sysctlbyname( "hw.activecpu", &i, &len, NULL, 0 ) == 0 ) machine->cpu_online = i; len = sizeof(freq); if ( sysctlbyname( "hw.cpufrequency", &freq, &len, NULL, 0 ) == 0 ) machine->megahertz = freq / 1000000; len = sizeof(machine->vendor_id); if ( sysctlbyname( "machdep.cpu.vendor", machine->vendor_id, &len, NULL, 0 ) != 0 ) memset( machine->vendor_id, 0, sizeof(machine->vendor_id) ); len = sizeof(model); if ( sysctlbyname( "machdep.cpu.brand_string", model, &len, NULL, 0 ) == 0 ) { char* s = model; char* d = machine->model_name; while ( *s && d - machine->model_name < sizeof(machine->model_name) ) { while ( *s && ! isspace(*s) ) *d++ = *s++; if ( *s && *s == ' ' ) *d++ = *s++; while ( *s && isspace(*s) ) ++s; } *d = 0; } else { memset( machine->model_name, 0, sizeof(machine->model_name) ); } } static void gather_darwin_uptime( MachineDarwinInfo* machine ) { size_t len = sizeof(machine->boottime); if ( sysctlbyname( "kern.boottime", &machine->boottime, &len, NULL, 0 ) == -1 ) memset( &machine->boottime, 0, sizeof(machine->boottime) ); } void gather_loadavg( float load[3] ) /* purpose: collect load averages * primary: provide functionality for monitoring * paramtr: load (OUT): array of 3 floats */ { struct loadavg l; size_t len = sizeof(l); if ( sysctlbyname( "vm.loadavg", &l, &len, NULL, 0 ) == 0 ) { int i; for ( i=0; i<3; ++i ) load[i] = l.ldavg[i] / ((float) l.fscale); } else { load[0] = load[1] = load[2] = 0.0; } } static void gather_darwin_meminfo( MachineDarwinInfo* machine ) { vm_statistics_data_t vm; mach_msg_type_number_t ic = HOST_VM_INFO_COUNT; uint64_t pagesize = getpagesize(); struct xsw_usage s; size_t len = sizeof(s); if ( sysctlbyname( "vm.swapusage", &s, &len, NULL, 0 ) == 0 ) { #if 0 fprintf( stderr, "# xsu_total %lu\n", s.xsu_total ); fprintf( stderr, "# xsu_avail %lu\n", s.xsu_avail ); fprintf( stderr, "# xsu_used %lu\n", s.xsu_used ); fprintf( stderr, "# xsu_pagesize %u\n", s.xsu_pagesize ); fprintf( stderr, "# xsu_encrypted %d\n", s.xsu_encrypted ); #endif machine->swap_total = s.xsu_total; machine->swap_avail = s.xsu_avail; machine->swap_used = s.xsu_used; } len = sizeof(machine->ram_total); if ( sysctlbyname( "hw.memsize", &machine->ram_total, &len, NULL, 0 ) == -1 ) machine->ram_total = 0; host_statistics( mach_host_self(), HOST_VM_INFO, (host_info_t) &vm, &ic ); machine->ram_avail = pagesize * vm.free_count; machine->ram_active = pagesize * vm.active_count; machine->ram_inactive = pagesize * vm.inactive_count; machine->ram_wired = pagesize * vm.wire_count; } static void gather_darwin_procstate( unsigned state[MAX_STATE] ) { int mib[4]; size_t len; mib[0] = CTL_KERN; mib[1] = KERN_PROC; mib[2] = KERN_PROC_ALL; if ( sysctl( mib, 3, NULL, &len, NULL, 0 ) != -1 && len > 0 ) { void* buffer = malloc( len + sizeof(struct kinfo_proc) ); if ( sysctl( mib, 3, buffer, &len, NULL, 0 ) != -1 && len > 0 ) { struct extern_proc* p; struct kinfo_proc* kp; struct kinfo_proc* end = ((struct kinfo_proc*) (((char*) buffer) + len)); for ( kp = (struct kinfo_proc*) buffer; kp->kp_proc.p_pid && kp < end; kp++ ) { p = &kp->kp_proc; state[STATE_TOTAL]++; switch ( p->p_stat ) { case SIDL: state[STATE_IDLE]++; break; case SRUN: /* SRUN is (runnable), not running. Need to dig deeper to find those procs * which are actually running, and those that are just runnable. */ state[ (p->p_realtimer.it_interval.tv_sec | p->p_realtimer.it_interval.tv_usec | p->p_realtimer.it_value.tv_sec | p->p_realtimer.it_value.tv_usec | p->p_rtime.tv_sec | p->p_rtime.tv_usec) != 0 ? STATE_RUNNING : STATE_SLEEPING]++; break; case SSLEEP: state[STATE_WAITING]++; break; case SSTOP: state[STATE_STOPPED]++; break; case SZOMB: state[STATE_ZOMBIE]++; break; default: state[STATE_OTHER]++; break; } } } free(buffer); } } void* initMachine( void ) /* purpose: initialize the data structure. * returns: initialized MachineDarwinInfo structure. */ { MachineDarwinInfo* p = (MachineDarwinInfo*) malloc(sizeof(MachineDarwinInfo)); /* extra sanity check */ if ( p == NULL ) { fputs( "initMachine c'tor failed\n", stderr ); return NULL; } else memset( p, 0, sizeof(MachineDarwinInfo) ); /* name of this provider -- overwritten by importers */ p->basic = initBasicMachine(); p->basic->provider = "darwin"; /* gather loadavg */ gather_darwin_meminfo( p ); gather_loadavg( p->load ); gather_darwin_uptime( p ); gather_darwin_cpuinfo( p ); gather_darwin_procstate( p->pid_state ); return p; } int printMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const void* data ) /* purpose: format the information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * data (IN): MachineDarwinInfo info to print. * returns: number of characters put into buffer (buffer length) */ { static const char* c_state[MAX_STATE] = { "total", "idle", "running", "sleeping", "waiting", "stopped", "zombie", "other" }; char b[3][32]; const MachineDarwinInfo* ptr = (const MachineDarwinInfo*) data; DarwinState s; /* sanity check */ if ( ptr == NULL ) return *len; /* start basic info */ startBasicMachine( buffer, size, len, indent+2, tag, ptr->basic ); /* <ram .../> tag */ myprint( buffer, size, len, "%*s<ram total=\"%s\" avail=\"%s\"", indent+2, "", sizer( b[0], 32, sizeof(ptr->ram_total), &(ptr->ram_total) ), sizer( b[1], 32, sizeof(ptr->ram_avail), &(ptr->ram_avail) ) ); myprint( buffer, size, len, " active=\"%s\" inactive=\"%s\" wired=\"%s\"/>\n", sizer( b[0], 32, sizeof(ptr->ram_active), &(ptr->ram_active) ), sizer( b[1], 32, sizeof(ptr->ram_inactive), &(ptr->ram_inactive) ), sizer( b[2], 32, sizeof(ptr->ram_wired), &(ptr->ram_wired) ) ); /* <swap .../> tag */ myprint( buffer, size, len, "%*s<swap total=\"%s\" avail=\"%s\" used=\"%s\"/>\n", indent+2, "", sizer( b[0], 32, sizeof(ptr->swap_total), &(ptr->swap_total) ), sizer( b[1], 32, sizeof(ptr->swap_avail), &(ptr->swap_avail) ), sizer( b[2], 32, sizeof(ptr->swap_used), &(ptr->swap_used) ) ); /* <boot> element */ myprint( buffer, size, len, "%*s<boot>", indent+2, "" ); mydatetime( buffer, size, len, isLocal, isExtended, ptr->boottime.tv_sec, ptr->boottime.tv_usec ); append( buffer, size, len, "</boot>\n" ); /* <cpu> element */ myprint( buffer, size, len, "%*s<cpu count=\"%s\" speed=\"%s\" vendor=\"%s\">%s</cpu>\n", indent+2, "", sizer( b[0], 32, sizeof(ptr->cpu_count), &(ptr->cpu_count) ), sizer( b[1], 32, sizeof(ptr->megahertz), &(ptr->megahertz) ), ptr->vendor_id, ptr->model_name ); /* loadavg data */ myprint( buffer, size, len, "%*s<load min1=\"%.2f\" min5=\"%.2f\" min15=\"%.2f\"/>\n", indent+2, "", ptr->load[0], ptr->load[1], ptr->load[2] ); /* <proc> element */ myprint( buffer, size, len, "%*s<proc", indent+2, "" ); for ( s=STATE_TOTAL; s < MAX_STATE; ++s ) { if ( ptr->pid_state[s] ) myprint( buffer, size, len, " %s=\"%u\"", c_state[s], ptr->pid_state[s] ); } append( buffer, size, len, "/>\n" ); /* finish tag */ finalBasicMachine( buffer, size, len, indent+2, tag, ptr->basic ); return *len; } void deleteMachine( void* data ) /* purpose: destructor * paramtr: data (IO): valid MachineDarwinInfo structure to destroy. */ { MachineDarwinInfo* ptr = (MachineDarwinInfo*) data; #ifdef EXTRA_DEBUG fprintf( stderr, "# deleteDarwinMachineInfo(%p)\n", data ); #endif if ( ptr ) { deleteBasicMachine( ptr->basic ); free((void*) ptr); } } �����������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/darwin.h���������������������������������0000644�0001750�0001750�00000007742�11757531137�024502� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2008 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MACHINE_DARWIN_H #define _MACHINE_DARWIN_H #include <sys/types.h> #include <sys/time.h> #include <sys/utsname.h> #include <stdint.h> #include "basic.h" typedef enum { STATE_TOTAL = 0, STATE_IDLE, /* Darwin SIDL */ STATE_RUNNING, /* Darwin SRUN if realtime != 0 */ STATE_SLEEPING, /* Darwin SRUN if realtime == 0 */ STATE_WAITING, /* Darwin SSLEEP */ STATE_STOPPED, /* Darwin SSTOP */ STATE_ZOMBIE, /* Darwin SZOMB */ STATE_OTHER, /* future versions */ MAX_STATE } DarwinState; typedef struct { /* common (shared) portion */ MachineBasicInfo* basic; /* * provider-specific portion */ /* memory statistics */ uint64_t ram_total; /* sysctl hw.memsize */ uint64_t ram_avail; /* mach vm_statistics:free_count */ uint64_t ram_active; /* mach vm_statistics:active_count */ uint64_t ram_inactive; /* mach vm_statistics:inactive_count */ uint64_t ram_wired; /* mach vm_statistics:wire_count */ uint64_t swap_total; /* sysctl vm.swapusage:xsu_total */ uint64_t swap_avail; /* sysctl vm.swapusage:xsu_avail */ uint64_t swap_used; /* sysctl vm.swapusage:xsu_used */ #if 0 /* * future lab memory stats -- these are very interesting for monitoring */ natural_t reactivate; /* The number of reactivated pages. */ natural_t pageins; /* The number of requests for pages from a pager */ natural_t pageouts; /* The number of pages that have been paged out. */ natural_t faults; /* The number of times the vm_fault routine has been called. */ natural_t cow_fault; /* The number of copy-on-write faults */ float hit_rate; /* object cache lookup hit rate */ #endif /* boot time stats */ struct timeval boottime; /* kern.boottime */ /* cpu information */ unsigned short cpu_count; /* hw.ncpu */ unsigned short cpu_online; /* hw.activecpu */ unsigned long megahertz; /* hw.cpufrequency */ char vendor_id[16]; /* machdep.cpu.vendor */ char model_name[80]; /* machdep.cpu.brand_string */ /* system load */ float load[3]; /* vm.loadavg */ /* process count */ unsigned pid_state[MAX_STATE]; } MachineDarwinInfo; extern void gather_loadavg( float load[3] ); /* purpose: collect load averages * primary: provide functionality for monitoring * paramtr: load (OUT): array of 3 floats */ extern void* initMachine( void ); /* purpose: initialize the data structure. * returns: initialized MachineDarwinInfo structure. */ extern int printMachine( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const void* data ); /* purpose: format the information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * data (IN): MachineDarwinInfo info to print. * returns: number of characters put into buffer (buffer length) */ extern void deleteMachine( void* data ); /* purpose: destructor * paramtr: data (IO): valid MachineDarwinInfo structure to destroy. */ #endif /* _MACHINE_DARWIN_H */ ������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/24���������������������������������������0000755�0001750�0001750�00000000402�11757531137�023202� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env perl # use 5.006; foreach my $pid ( @ARGV ) { if ( open( S, "</proc/$pid/stat" ) ) { @x = split ' ', <S>; close S; $x[1] = substr($x[1],1,-1); printf "%5d %5d %s 0x%010x %-16s %5u 0x%010x 0x%010x\n", @x[0,3,2,8,1,23,25,27]; } } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/machine/sunos-swap.h�����������������������������0000644�0001750�0001750�00000003153�11757531137�025325� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2008 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MACHINE_SUNOS_SWAP_H #define _MACHINE_SUNOS_SWAP_H #include <sys/types.h> int gather_sunos_swap( uint64_t* total, uint64_t* free ); /* purpose: collect swap information from solaris * warning: This compilation unit MUST be compiled WITHOUT LFS support! * paramtr: total (OUT): total size of all swapping * free (OUT): free size of all swapping */ void gather_sunos_proc( unsigned* total, unsigned* good, unsigned* active, unsigned* zombie, uint64_t* size, uint64_t* rss ); /* purpose: collect proc information from solaris * warning: This compilation unit MUST be compiled WITHOUT LFS support! * paramtr: total (OUT): all eligible entries found in /proc * good (OUT): portion of total we were able to read from * active (OUT): number of active THREADS (LWP) * zombie (OUT): number of zombie THREADS (LWP) * size (OUT): sum of every process's SIZE * rss (OUT): sum of every process's RSS */ #endif /* _MACHINE_SUNOS_SWAP_H */ ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/meminfo.c����������������������������������������0000644�0001750�0001750�00000003727�11757531137�023236� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "meminfo.h" #include <ctype.h> #include <errno.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #include <stdarg.h> #include <unistd.h> #include <fcntl.h> #include "debug.h" static const char* RCS_ID = "$Id: meminfo.c 4535 2011-09-26 22:14:19Z voeckler $"; #if 0 --- 2.4.29 Linux kernel, fs/proc/array.c --- #endif void initMemInfo( MemInfo* meminfo, pid_t pid ) /* purpose: initialize the data structure from process status * paramtr: meminfo (OUT): initialized memory block * pid (IN): process id to use for initialization. */ { } int printXMLMemInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MemInfo* mem ) /* purpose: format the status information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * mem (IN): job status info to xml format. * returns: number of characters put into buffer (buffer length) */ { return *len; } void deleteMemInfo( MemInfo* meminfo ) /* purpose: destructor * paramtr: meminfo (IO): valid MemInfo structure to destroy. */ { #ifdef EXTRA_DEBUG debugmsg( "# deleteMemInfo(%p)\n", meminfo ); #endif } �����������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/appinfo.h����������������������������������������0000644�0001750�0001750�00000010627�11757531137�023242� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _APPINFO_H #define _APPINFO_H #include <time.h> #include <sys/types.h> #include <sys/time.h> #include <sys/resource.h> #include "statinfo.h" #include "jobinfo.h" #include "limitinfo.h" #include <unistd.h> #include "machine.h" typedef struct { struct timeval start; /* point of time that app was started */ struct timeval finish; /* point of time that app was reaped */ int isPrinted; /* flag to set after successful print op */ int noHeader; /* avoid <?xml ?> premable and other things */ char* const* argv; /* application executable and arguments */ int argc; /* application CLI number of arguments */ char* const* envp; /* snapshot of environment */ size_t envc; /* size of the environment vector envp */ char ipv4[16]; /* host address of primary interface */ char prif[16]; /* name of primary interface NIC */ char* xformation; /* chosen VDC TR fqdn for this invocation */ char* derivation; /* chosen VDC DV fqdn for this invocation */ char* sitehandle; /* resource handle for the this site */ char* wf_label; /* label of workflow this job belongs to */ char* wf_stamp; /* time stamp of workflow this job belongs to */ char* workdir; /* CWD at point of execution */ pid_t child; /* pid of gridstart itself */ JobInfo setup; /* optional set-up application to run */ JobInfo prejob; /* optional pre-job application to run */ JobInfo application;/* the application itself that was run */ JobInfo postjob; /* optional post-job application to run */ JobInfo cleanup; /* optional clean-up application to run */ StatInfo input; /* stat() info for "input", if available */ StatInfo output; /* stat() info for "output", if available */ StatInfo error; /* stat() info for "error", if available */ StatInfo logfile; /* stat() info for "logfile", if available */ StatInfo gridstart; /* stat() info for this program, if available */ StatInfo channel; /* stat() on app channel FIFO, if avail. */ StatInfo* initial; /* stat() info for user-specified files. */ size_t icount; /* size of initial array, may be 0 */ StatInfo* final; /* stat() info for user-specified files. */ size_t fcount; /* size of final array, may be 0 */ mode_t umask; /* currently active umask */ struct rusage usage; /* rusage record for myself */ LimitInfo limits; /* hard- and soft limits */ MachineInfo machine; /* more system information */ } AppInfo; extern void initAppInfo( AppInfo* appinfo, int argc, char* const* argv ); /* purpose: initialize the data structure with defaults. * This will also parse the CLI arguments and assemble the app call CLI. * paramtr: appinfo (OUT): initialized memory block * argc (IN): from main() * argv (IN): from main() * except.: Will exit with code 1 on empty commandline */ extern int printAppInfo( const AppInfo* runinfo ); /* purpose: output the given app info onto the given fd * paramtr: appinfo (IN): is the collective information about the run * returns: the number of characters actually written (as of write() call). * sidekck: will update the self resource usage record before print. */ extern void envIntoAppInfo( AppInfo* runinfo, char* envp[] ); /* purpose: save a deep copy of the current environment * paramtr: runinfo (IO): place to store the deep copy * envp (IN): current environment pointer */ extern void deleteAppInfo( AppInfo* runinfo ); /* purpose: destructor * paramtr: runinfo (IO): valid AppInfo structure to destroy. */ #endif /* _APPINFO_H */ ���������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/mylist.c�����������������������������������������0000644�0001750�0001750�00000010516�11757531137�023117� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "mylist.h" #include <ctype.h> #include <errno.h> #include <string.h> #include <stdio.h> #include <stdlib.h> #include <sys/stat.h> #include <fcntl.h> #include <unistd.h> static const char* RCS_ID = "$Id: mylist.c 50 2007-05-19 00:48:32Z gmehta $"; #ifndef ITEM_MAGIC #define ITEM_MAGIC 0xcd82bd08 #endif #ifndef LIST_MAGIC #define LIST_MAGIC 0xbae21bdb #endif int mylist_item_init( mylist_item_p item, const char* data ) /* purpose: initial a data item. * paramtr: item (OUT): item pointer to initialize * data (IN): string to copy into item * returns: 0 on success, * EINVAL if arguments are NULL * ENOMEM if allocation failed */ { char* s; /* sanity check */ if ( item == NULL || data == NULL ) return EINVAL; memset( item, 0, sizeof(mylist_item_t) ); item->magic = ITEM_MAGIC; if ( (item->pfn = strdup(data)) == NULL ) return ENOMEM; if ( (s = strchr(item->pfn, '=')) ) { *s++ = '\0'; item->lfn = item->pfn; item->pfn = s; } else { item->lfn = NULL; } return 0; } int mylist_item_done( mylist_item_p item ) /* purpose: free allocated space of an item * paramtr: item (IO): area to free * returns: 0 on success, * EINVAL if the magic failed, or NULL argument */ { /* sanity check */ if ( item == NULL || item->magic != ITEM_MAGIC ) return EINVAL; /* free item */ if ( item->lfn ) free((void*) item->lfn); else if ( item->pfn ) free((void*) item->pfn); memset( item, 0, sizeof(mylist_item_t) ); return 0; } int mylist_init( mylist_p list ) { /* sanity check */ if ( list == NULL ) return EINVAL; memset( list, 0, sizeof(mylist_t) ); list->magic = LIST_MAGIC; return 0; } int mylist_add( mylist_p list, const char* data ) { int status; mylist_item_p temp; /* sanity check */ if ( list == NULL || list->magic != LIST_MAGIC ) return EINVAL; /* allocate item space */ if ( (temp = malloc( sizeof(mylist_item_t) )) == NULL ) return ENOMEM; if ( (status = mylist_item_init( temp, data )) != 0 ) { free((void*) temp); return status; } /* add item to list */ if ( list->count ) { list->tail->next = temp; list->tail = temp; } else { list->head = list->tail = temp; } list->count++; return 0; } int mylist_done( mylist_p list ) { /* sanity check */ if ( list == NULL || list->magic != LIST_MAGIC ) return EINVAL; if ( list->count ) { /* traverse list */ int status; mylist_item_p temp; while ( (temp = list->head) != NULL ) { list->head = list->head->next; if ( (status=mylist_item_done( temp )) != 0 ) return status; free((void*) temp); } } memset( list, 0, sizeof(mylist_t) ); return 0; } int mylist_fill( mylist_p list, const char* fn ) /* purpose: Add each line in the specified file to the list * paramtr: list (IO): list to modify * fn (IN): name of the file to read * returns: 0 on success, */ { FILE* file; char* line, *s; int result = 0; size_t size = getpagesize(); /* sanity check */ if ( list == NULL || list->magic != LIST_MAGIC ) return EINVAL; /* try to open file */ if ( (file=fopen( fn, "r" )) == NULL ) return errno; /* allocate line buffer */ if ( (line=(char*)malloc(size)) == NULL ) { fclose(file); return ENOMEM; } /* read lines from file */ while ( fgets( line, size, file ) ) { /* FIXME: unhandled overly long lines */ /* comments */ if ( (s = strchr( line, '#' )) ) *s-- = '\0'; else s = line + strlen(line) - 1; /* chomp */ while ( s > line && isspace(*s) ) *s-- = '\0'; /* skip empty lines */ if ( *line == 0 ) continue; if ( (result=mylist_add( list, line )) != 0 ) break; } /* done with file */ fclose(file); free((void*) line); return result; } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/mynss.c������������������������������������������0000644�0001750�0001750�00000005437�11757531137�022755� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "mynss.h" static const char* RCS_ID = "$Id: mynss.c 50 2007-05-19 00:48:32Z gmehta $"; #ifdef LINUX #include <setjmp.h> #include <errno.h> #include <memory.h> #include <string.h> #include "mysignal.h" /* * Linux statically linked binaries do not like libnss function calls. */ volatile sig_atomic_t noSymbolicLookups; static volatile sig_atomic_t canJump; static sigjmp_buf jump; static SIGRETTYPE sigsegv_handler( SIGPARAM signo ) { if ( canJump == 0 ) return; /* unexpected signal */ noSymbolicLookups = signo; canJump = 0; siglongjmp( jump, 1 ); } static int setup_sigsegv( struct sigaction* old, struct sigaction* new ) { memset( old, 0, sizeof(*old) ); memset( new, 0, sizeof(*new) ); new->sa_handler = sigsegv_handler; sigemptyset( &(new->sa_mask) ); #ifdef SA_INTERRUPT new->sa_flags |= SA_INTERRUPT; /* SunOS, obsoleted by POSIX */ #endif return sigaction( SIGSEGV, new, old ); } struct passwd* wrap_getpwuid( uid_t uid ) { struct sigaction old_segv, new_segv; struct passwd* result; if ( noSymbolicLookups ) return NULL; if ( sigsetjmp( jump, 1 ) ) { errno = ELIBACC; return NULL; } canJump = 1; setup_sigsegv( &old_segv, &new_segv ); result = getpwuid(uid); sigaction( SIGSEGV, &old_segv, NULL ); return ( noSymbolicLookups ? NULL : result ); } struct group* wrap_getgrgid( gid_t gid ) { struct sigaction old_segv, new_segv; struct group* result; if ( noSymbolicLookups ) return NULL; if ( sigsetjmp( jump, 1 ) ) { errno = ELIBACC; return NULL; } canJump = 1; setup_sigsegv( &old_segv, &new_segv ); result = getgrgid(gid); sigaction( SIGSEGV, &old_segv, NULL ); return ( noSymbolicLookups ? NULL : result ); } struct hostent* wrap_gethostbyaddr( const char* addr, int len, int type ) { struct sigaction old_segv, new_segv; struct hostent* result; if ( noSymbolicLookups ) return NULL; if ( sigsetjmp( jump, 1 ) ) { h_errno = NETDB_INTERNAL; errno = ELIBACC; return NULL; } canJump = 1; setup_sigsegv( &old_segv, &new_segv ); result = gethostbyaddr(addr,len,type); sigaction( SIGSEGV, &old_segv, NULL ); return ( noSymbolicLookups ? NULL : result ); } #endif /* LINUX */ ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/event.h������������������������������������������0000644�0001750�0001750�00000003014�11757531137�022717� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _EVENT_H #define _EVENT_H #include <sys/types.h> #include <signal.h> #include "statinfo.h" extern ssize_t send_message( int outfd, char* msg, ssize_t msize, unsigned channel ); /* purpose: sends a XML-encoded message chunk back to the application * paramtr: outfd (IN): output file descriptor, writable (STDERR_FILENO) * msg (IN): pointer to message * msize (IN): length of message content * channel (IN): which channel to send upon (0 - app) */ extern int eventLoop( int outfd, StatInfo* fifo, volatile sig_atomic_t* terminate ); /* purpose: copy from input file(s) to output fd while not interrupted. * paramtr: outfd (IN): output file descriptor, ready for writing. * fifo (IO): contains input fd, and maintains statistics. * terminate (IN): volatile flag, set in signal handlers. * returns: -1 in case of error, 0 for o.k. */ #endif /* _EVENT_H */ ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/parse.c������������������������������������������0000644�0001750�0001750�00000036157�11757531137�022721� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <ctype.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <unistd.h> #include "debug.h" #include "parse.h" static const char* RCS_ID = "$Id: parse.c 4535 2011-09-26 22:14:19Z voeckler $"; size_t countNodes( const Node* head ) /* purpose: count the number of element in list * paramtr: head (IN): start of the list. * returns: number of elements in list. */ { const Node* temp; size_t result = 0; for ( temp = head; temp; temp = temp->next ) result++; return result; } void deleteNodes( Node* head ) /* purpose: clean up the created list and free its memory. * paramtr: head (IO): start of the list. */ { Node* temp; while ( (temp=head) ) { head = head->next; free((void*) temp->data ); free((void*) temp ); } } static int debug = 0; static void add( Node** head, Node** tail, const char* data ) /* purpose: add an data item to the end of the list * paramtr: head (OUT): head of the list, NULL for no list * tail (OUT): tail of the list, will be adjusted * data (IN): string to save the pointer to (shallow copy) */ { Node* temp = (Node*) malloc( sizeof(Node) ); temp->data = data; temp->next = NULL; if ( *head == NULL ) *head = *tail = temp; else { (*tail)->next = temp; *tail = temp; } } static void resolve( char** v, char* varname, char** p, char* buffer, size_t size ) /* purpose: lookup the variable name in the environment, and * copy the environment value into the buffer * paramtr: v (IO): final position of the variable name buffer * varname (IN): start of variable name buffer * p (IO): cursor position of output buffer * buffer: (IO): start of output buffer * size (IN): size of output buffer */ { char* value = 0; **v = 0; if ( (value = getenv(varname)) ) { char* pp = *p; if ( debug ) debugmsg( "# %s=%s\n", varname, value ); while ( pp - buffer < size && *value ) *pp++ = *value++; *p = pp; } else { if ( debug ) debugmsg( "# %s does not exist\n", varname ); } *v = varname; } /* Parsing pre- and postjob argument line splits whitespaces in shell fashion. * state transition table maps from start state and input character to * new state and action. The actions are abbreviated as follows: * * abb | # | meaning * ----+---+-------------------------------------------------------- * Sb | 0 | store input char into argument buffer * Fb | 1 | flush regular buffer and reset argument buffer pointers * Sv | 2 | store input char into variable name buffer * Fv | 3 | flush varname via lookup into argument buffer and reset vpointers * Fvb | 4 | Do Fv followed by Fb * - | 5 | skip (ignore) input char (do nothing) * * | 6 | translate abfnrtv to controls, other store verbatim * FS | 7 | Do Fv followed by Sb * | 8 | print error and exit * * special final states: * * state | meaning * ------+----------------- * F 32 | final, leave machine * E1 33 | error 1: missing closing apostrophe * E2 34 | error 2: missing closing quote * E3 35 | error 3: illegal variable name * E4 36 | error 4: missing closing brace * E5 37 | error 5: premature end of string * * * STATE | eos | " | ' | { | } | $ | \ | alnum| wspc | else | * ------+------+------+------+------+------+------+------+------+------+------+ * 0 | F,- | 4,- | 2,- | 1,Sb | 1,Sb | 11,- | 14,- | 1,Sb | 0,- | 1,Sb | * 1 | F,Fb | 4,- | 2,- | 1,Sb | 1,Sb | 11,- | 14,- | 1,Sb | 0,Fb | 1,Sb | * 2 | E1 | 2,Sb | 1,- | 2,Sb | 2,Sb | 2,Sb | 3,- | 2,Sb | 2,Sb | 2,Sb | * 3 | E1 | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | * 4 | E2 | 1,- | 4,Sb | 4,Sb | 4,Sb | 8,- | 7,- | 4,Sb | 4,Sb | 4,Sb | * 7 | E2 | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,* | 4,Sb | 4,Sb | * 8 | E2 | E2 | E2 | 9,- | E3 | E3 | E3 |10,Sv | E3 | E3 | * 9 | E4 | E4 | E4 | E4 | 4,Fv | E3 | 9,Sv | 9,Sv | 9,Sv | 9,Sv | * 10 | E2 | 1,Fv | 4,Fv | 4,Fv | 4,Fv | 8,Fv | 4,Fv |10,Sv | 4,Fv |10,Sv | * 11 | E3 | E3 | E3 |12,- | E3 | E3 | E3 |13,Sv | E3 | E3 | * 12 | E4 | E4 | E4 | E4 | 1,Fv | E3 |12,Sv |12,Sv |12,Sv |12,Sv | * 13 | F,Fvb| 4,Fv | 2,Fv | 1,Fv | 1,Fv | E3 |13,Sv |13,Sv | 1,Fv | 1,FS | * 14 | E5 | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | * * '" REMOVED: * 5 | E1 | 5,Sb | 4,- | 5,Sb | 5,Sb | 5,Sb | 6,- | 5,Sb | 5,Sb | 5,Sb | * 6 | E1 | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | */ typedef const char Row[10]; typedef const Row Map[15]; static Map actionmap1 = { { 5, 5, 5, 0, 0, 5, 5, 0, 5, 0 }, /* 0 */ { 1, 5, 5, 0, 0, 5, 5, 0, 1, 0 }, /* 1 */ { 8, 0, 5, 0, 0, 0, 5, 0, 0, 0 }, /* 2 */ { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* 3 */ { 8, 5, 4, 0, 0, 5, 5, 0, 0, 0 }, /* 4 */ { 8, 0, 5, 0, 0, 0, 5, 0, 0, 0 }, /* 5 (unused) */ { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* 6 (unused) */ { 8, 0, 0, 0, 0, 0, 0, 6, 0, 0 }, /* 7 */ { 8, 8, 8, 5, 8, 8, 8, 2, 8, 8 }, /* 8 */ { 8, 8, 8, 8, 3, 8, 2, 2, 2, 2 }, /* 9 */ { 8, 3, 3, 3, 3, 3, 3, 2, 3, 2 }, /* 10 */ { 8, 8, 8, 5, 8, 8, 8, 2, 8, 8 }, /* 11 */ { 8, 8, 8, 8, 3, 8, 2, 2, 2, 2 }, /* 12 */ { 4, 3, 3, 3, 3, 8, 2, 2, 3, 7 }, /* 13 */ { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 } /* 14 */ }; static Map statemap1 = { { 32, 4, 2, 1, 1, 11, 14, 1, 0, 1 }, /* 0 */ { 32, 4, 2, 1, 1, 11, 14, 1, 0, 1 }, /* 1 */ { 33, 2, 1, 2, 2, 2, 3, 2, 2, 2 }, /* 2 */ { 33, 2, 2, 2, 2, 2, 2, 2, 2, 2 }, /* 3 */ { 34, 1, 0, 4, 4, 8, 7, 4, 4, 4 }, /* 4 */ { 33, 5, 4, 5, 5, 5, 6, 5, 5, 5 }, /* 5 (unused) */ { 33, 5, 5, 5, 5, 5, 5, 5, 5, 5 }, /* 6 (unused) */ { 34, 4, 4, 4, 4, 4, 4, 4, 4, 5 }, /* 7 */ { 34, 34, 34, 9, 34, 34, 34, 10, 34, 34 }, /* 8 */ { 36, 36, 36, 36, 4, 36, 9, 9, 9, 9 }, /* 9 */ { 34, 1, 4, 4, 4, 8, 4, 10, 4, 10 }, /* 10 */ { 35, 35, 35, 12, 35, 35, 35, 13, 35, 35 }, /* 11 */ { 36, 36, 36, 36, 1, 35, 12, 12, 12, 12 }, /* 12 */ { 32, 4, 2, 1, 1, 35, 13, 13, 1, 1 }, /* 13 */ { 37, 1, 1, 1, 1, 1, 1, 1, 1, 1 } /* 14 */ }; static const char* errormessage[5] = { "Error 1: missing closing apostrophe\n", "Error 2: missing closing quote\n", "Error 3: illegal variable name\n", "Error 4: missing closing brace\n", "Error 5: premature end of string\n" }; static const char* translation = "abnrtv"; static const char translationmap[] = "\a\b\n\r\t\v"; /* Parsing main job argument vector maintains whitespace. * state transition table maps from start state and input character to * new state and action. The actions are abbreviated as specified above. * * STATE | eos | " | ' | { | } | $ | \ | alnum| wspc | else | * ------+------+------+------+------+------+------+------+------+------+------+ * 0 | F,Fb | 4,Sb | 2,Sb | 0,Sb | 0,Sb | 11,- | 1,- | 0,Sb | 0,Sb | 0,Sb | * 1 | E5 | 0,Sb | 0,Sb | 0,Sb | 0,Sb | 0,Sb | 0,Sb | 0,Sb | 0,Sb | 0,Sb | * 2 | E1 | 2,Sb | 0,Sb | 2,Sb | 2,Sb | 2,Sb | 3,Sb | 2,Sb | 2,Sb | 2,Sb | * 3 | E1 | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | * 4 | E2 | 0,Sb | 4,Sb | 4,Sb | 4,Sb | 8,- | 7,Sb | 4,Sb | 4,Sb | 4,Sb | * 7 | E2 | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,Sb | * 8 | E2 | E2 | E2 | 9,- | E3 | E3 | E3 |10,Sv | E3 | E3 | * 9 | E4 | E4 | E4 | E4 | 4,Fv | E3 | 9,Sv | 9,Sv | 9,Sv | 9,Sv | * 10 | E2 | 0,FS | 4,FS | 4,FS | 4,FS | 8,Fv | 4,FS |10,Sv | 4,FS |10,Sv | * 11 | E3 | E3 | E3 |12,- | E3 | E3 | E3 |13,Sv | E3 | E3 | * 12 | E4 | E4 | E4 | E4 | 0,Fv | E3 |12,Sv |12,Sv |12,Sv |12,Sv | * 13 | F,Fvb| 4,FS | 2,FS | 0,FS | 0,FS | E3 |13,Sv |13,Sv | 0,FS |0,FS | * * '" REMOVED * 5 | E1 | 5,Sb | 4,Sb | 5,Sb | 5,Sb | 5,Sb | 6,Sb | 5,Sb | 5,Sb | 5,Sb | * 6 | E1 | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | */ static Map actionmap2 = { { 1, 0, 0, 0, 0, 5, 5, 0, 0, 0 }, /* 0 FIXED: \\ 0 -> 5 */ { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* 1 */ { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* 2 */ { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* 3 */ { 8, 0, 0, 0, 0, 5, 0, 0, 0, 0 }, /* 4 */ { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* 5 (unused) */ { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* 6 (unused) */ { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* 7 */ { 8, 8, 8, 5, 8, 8, 8, 2, 8, 8 }, /* 8 */ { 8, 8, 8, 8, 3, 8, 2, 2, 2, 2 }, /* 9 */ { 8, 7, 7, 7, 7, 3, 7, 2, 7, 2 }, /* 10 */ { 8, 8, 8, 5, 8, 8, 8, 2, 8, 8 }, /* 11 */ { 8, 8, 8, 8, 3, 8, 2, 2, 2, 2 }, /* 12 */ { 4, 7, 7, 7, 7, 8, 2, 2, 7, 7 }, /* 13 */ { 8, 8, 8, 8, 8, 8, 8, 8, 8, 8 } /* unused */ }; static Map statemap2 = { { 32, 4, 2, 0, 0, 11, 1, 0, 0, 0 }, /* 0 */ { 37, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, /* 1 */ { 33, 2, 0, 2, 2, 2, 3, 2, 2, 2 }, /* 2 */ { 33, 2, 2, 2, 2, 2, 2, 2, 2, 2 }, /* 3 */ { 34, 0, 4, 4, 4, 8, 7, 4, 4, 4 }, /* 4 */ { 33, 5, 4, 5, 5, 5, 6, 5, 5, 5 }, /* 5 (unused) */ { 33, 5, 5, 5, 5, 5, 5, 5, 5, 5 }, /* 6 (unused) */ { 34, 4, 4, 4, 4, 4, 4, 4, 4, 4 }, /* 7 */ { 34, 34, 34, 9, 35, 35, 35, 10, 35, 35 }, /* 8 */ { 36, 36, 36, 36, 4, 35, 9, 9, 9, 9 }, /* 9 */ { 34, 0, 4, 4, 4, 8, 4, 10, 4, 10 }, /* 10 */ { 35, 35, 35, 12, 35, 35, 35, 13, 35, 35 }, /* 11 */ { 36, 36, 36, 36, 0, 35, 12, 12, 12, 12 }, /* 12 */ { 32, 4, 2, 0, 0, 35, 13, 13, 0, 0 }, /* 13 */ { 32, 32, 32, 32, 32, 32, 32, 32, 32, 32 } /* unused */ }; static int xlate( char input ) /* purpose: translate an input character into the character class. * paramtr: input (IN): input character * returns: numerical character class for input character. */ { switch ( input ) { case 0: return 0; case '\"': /* " */ return 1; case '\'': /* ' */ return 2; case '{': return 3; case '}': return 4; case '$': return 5; case '\\': return 6; default: return ( (isalnum(input) || input=='_') ? 7 : ( isspace(input) ? 8 : 9 ) ); } } static void internalParse( const char* line, const char** cursor, int* state, Map actionmap, Map statemap, Node** headp, Node** tailp, char** pp, char* buffer, size_t size, char** vp, char* varname, size_t vsize ) { const char* s = *cursor; char* p = *pp; char* v = *vp; Node* head = *headp; Node* tail = *tailp; while ( *state < 32 ) { int charclass = xlate( *s ); int newstate = statemap[*state][charclass]; if ( debug ) debugmsg( "# state=%02d, class=%d, action=%d, newstate=%02d, char=%02X (%c)\n", *state, charclass, actionmap[*state][charclass], newstate, *s, ((*s & 127) >= 32) ? *s : '.' ); switch ( actionmap[*state][charclass] ) { case 0: /* store into buffer */ if ( p-buffer < size ) *p++ = *s; break; case 1: /* conditionally finalize buffer */ *p = '\0'; add( &head, &tail, strdup(buffer) ); p = buffer; break; case 2: /* store variable part */ if ( v-varname < vsize ) *v++ = *s; break; case 3: /* finalize variable name */ resolve( &v, varname, &p, buffer, size ); break; case 4: /* case 3 followed by case 1 */ resolve( &v, varname, &p, buffer, size ); *p = '\0'; add( &head, &tail, strdup(buffer) ); p = buffer; break; case 5: /* skip */ break; case 6: /* translate control escapes */ if ( p-buffer < size ) { char* x = strchr( translation, *s ); *p++ = ( x == NULL ? *s : translationmap[x-translation] ); if ( debug ) debugmsg( "# escape %c -> %d\n", *s, *(p-1) ); } break; case 7: /* case 3 followed by case 0 */ resolve( &v, varname, &p, buffer, size ); if ( p - buffer < size ) *p++ = *s; break; case 8: /* print error message */ if ( newstate > 32 ) fputs( errormessage[newstate-33], stderr ); else debugmsg( "# PARSER ERROR: state=%02d, class=%d, action=%d, newstate=%02d, char=%02X (%c)\n", *state, charclass, 8, newstate, *s, ((*s & 127) >= 32) ? *s : '.' ); break; } ++s; *state = newstate; } /* update various cursors */ *tailp = tail; *headp = head; *pp = p; *vp = v; *cursor = s; } Node* parseCommandLine( const char* line, int* state ) /* purpose: parse a commandline into a list of arguments while * obeying single quotes, double quotes and replacing * environment variable names. * paramtr: line (IN): commandline to parse * state (IO): start state to begin, final state on exit * state==32 is ok, state>32 is an error condition which * lead to a premature exit in parsing. * returns: A (partial on error) list of split arguments. */ { Node* head = NULL; Node* tail = NULL; char buffer[2048]; size_t size = sizeof(buffer); char* p = buffer; char varname[128]; size_t vsize = sizeof(varname); char* v = varname; const char* s = line; /* sanity check */ if ( line == NULL ) return head; /* invoke parsing only once */ internalParse( line, &s, state, actionmap1, statemap1, &head, &tail, &p, buffer, size, &v, varname, vsize ); /* finally */ return head; } Node* parseArgVector( int argc, char* const* argv, int* state ) /* purpose: parse an already split commandline into a list of arguments while * ONLY translating environment variable names that are not prohibited * from translation by some form of quoting (not double quotes, though). * paramtr: argc (IN): number of arguments in the argument vector * argv (IN): argument vector to parse * state (IO): start state to begin, final state on exit * state==32 is ok, state>32 is an error condition which * lead to a premature exit in parsing. * returns: A (partial on error) list of split arguments. The argument number * stays the same, but environment variables were translated. */ { int i; Node* head = NULL; Node* tail = NULL; char buffer[2048]; size_t size = sizeof(buffer); char* p = buffer; char varname[128]; size_t vsize = sizeof(varname); char* v = varname; /* sanity check */ if ( argc == 0 ) return head; /* invoke parsing once for each argument */ for ( i=0; i<argc && *state <= 32; ++i ) { const char* s = argv[i]; *state = 0; internalParse( argv[i], &s, state, actionmap2, statemap2, &head, &tail, &p, buffer, size, &v, varname, vsize ); } /* finally */ return head; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/dfp.png������������������������������������������0000644�0001750�0001750�00000006524�11757531137�022715� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������PNG  ��� IHDR����P���U���PLTE���ٟ�� IDATx]h�pCmlr >^%,&~[kt} Em\&: ] q[c[p'K+$N>i71;ٻ;vvgggqM=9"`s \ōߜ_]H g>ddh[nG6&sCN x4#`DєgWrN̏$O-rne֞0v0sk]'-}GV?Sjq0K녔uLx9-`+�>L h!m|vJ7XI=%`ΔI  xQcf# (GiЦbHaG~[~RL/gDY<㝽.`21gW i Z~ kuٕFv-^�2@ L R�m 8 J ;Kl]X�rJ`YZ_sW`CMvCjE�<_�wY2�\!ԯJ@3pKm1򝉜].P*UbXvbk î>}"'lu0օ =pP+瓞K:(�ZKn55$@ TsD%p@.Wdt5<kwx[{ ms7w7x�Md«:9m[s9<@Y�xwJe8<p�vP&$퍷F&삳Д[#_Qv<~[\*�ѵ +vIgx}Ӧ"/8DE`Su�&-٨A<ʬAN 9jW$h/@R8c@ ;w,w9B�^MԁG�ec"A[෴g{/7+7P�89�~0t%xQTIZ`1m<La\�#xqkmVXvCD1/n víN6+ԎLH ͌ OQev �a)Ǖ@�=-p[1jq�f8TGv]^ ,T=doqī~!hQ [ˆW<�^֊ hNj^Pst]ć 3k/@,ץ`_GS=^osuWQ&Au�w} lV<( �!0@mU{gz� ?S8R"9J.}i}|�ZXMM~C>0l![fyĝ8Hl+%]0C<$&Y!Ҟ^ D@[ ?"Ei\6\"Iq[pN%S@y}bZnN ;/sbLBLP'J`۵8F 0c:9@9U@kq]`ؐ~@q}x�CԮE[ThCw~S/^:_<ޓ!]#= nvmX~E( (!j/^$%]C[MGjK+n|�ڢ"^#}%�学MX yHɖbWVFؙx/y (8Cq� %$:�:6pN,o@*3F�}"TX3iċ�9o!�HR{%ke4e�˥nK"P&ثK \pt x !@] B@zû7�,ضG_}>˼)Wp%0B; �F/�K}Wj?AP[^�;Bπ x6Vyf~[S`-O T_CXUxu*@^R8 �:AWFÏx%@+��s�{•֗l-*[�ZuN_)y$qˇx:[ujSxq[(8v90hwQӓaKu uNB{dY-C[3WܸjzyTo_'e5pa(.[*�[\ ~&u D>a(�񊶃 Jt1YƥݖPD_ 'er#^= '#n7ྭI.yH)�Whɤ�!TaTp$�D dE_5·50\iʀ Ȁ Ȁ Ȁo+p,os?3:>rq>[2< x.2$�y<*cF4e@#u?wHX!w*ާN`X`vrb Ep xY3`�"N4vo"fz�bOQH<Nz4lV7Ra#9H@9^ j9`K",p$4%�\Gzl-" G󷂜CI!ޫcF @?9us�T\L tޅ9!D7>gm8 Lu7@ ,|ӏp,NqJ�M ( <׈[rF¹}T]O <:=5i9'1\N4iMMOLp!=:}6IhT[4j_I�܉R�pz{(0}~�0>/S c4 }_ĢݏN3/&U؄71P-M>|$Lߙꥼf 0Q/"�ʫ dJ'Yp5XM=D= lՊYp P&T�JNRd`ftbܾ"4ΝooKxQ�N5mSv#m/e1��@�h{׿{Li趚xRYMDoXY8ie$@D 'e$@/J7O(*=IOJ;A_N2x5c(oP{Lp P;W=uYd@d@d@d@d@d@d@d@�1)/Ej]����IENDB`����������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/test-post.c��������������������������������������0000644�0001750�0001750�00000001345�11757531137�023540� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <stdlib.h> int main( int argc, char* argv[] ) { putenv( "A=11" ); putenv( "B=-1" ); return 0; } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/test-pre.c���������������������������������������0000644�0001750�0001750�00000001345�11757531137�023341� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <stdlib.h> int main( int argc, char* argv[] ) { putenv( "A=15" ); putenv( "B=42" ); return 0; } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/debug.c������������������������������������������0000644�0001750�0001750�00000004555�11757531137�022672� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "debug.h" #include "rwio.h" #include <errno.h> #include <string.h> #include <unistd.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <stdarg.h> static const char* RCS_ID = "$Id$"; ssize_t debugmsg( char* fmt, ... ) /* purpose: create a log line on stderr. * paramtr: fmt (IN): printf-style format string * ... (IN): other arguments according to format * returns: number of bytes written to STDERR via write() */ { ssize_t result; va_list ap; char buffer[4096]; int saverr = errno; va_start( ap, fmt ); vsnprintf( buffer, sizeof(buffer), fmt, ap ); va_end( ap ); result = writen( STDERR_FILENO, buffer, strlen(buffer), 3 ); errno = saverr; return result; } int hexdump( void* area, size_t size ) /* purpose: dump a memory area in old-DOS style hex chars and printable ASCII * paramtr: area (IN): pointer to area start * size (IN): extent of area to print * returns: number of byte written */ { static const char digit[16] = "0123456789ABCDEF"; char a[82]; unsigned char b[18]; size_t i, j; unsigned char c; ssize_t result = 0; unsigned char* buffer = (unsigned char*) area; for ( i=0; i<size; i+=16 ) { memset( a, 0, sizeof(a) ); memset( b, 0, sizeof(b) ); sprintf( a, "%04zX: ", i ); for ( j=0; j<16 && j+i<size; ++j ) { c = (unsigned char) buffer[i+j]; a[6+j*3] = digit[ c >> 4 ]; a[7+j*3] = digit[ c & 15 ]; a[8+j*3] = ( j == 7 ? '-' : ' ' ); b[j] = (char) (c < 32 || c >= 127 ? '.' : c); } for ( ; j<16; ++j ) { a[6+j*3] = a[7+j*3] = a[8+j*3] = b[j] = ' '; } strncat( a, (char*) b, sizeof(a) ); strncat( a, "\n", sizeof(a) ); result += write( STDERR_FILENO, a, strlen(a) ); } return result; } ���������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/reassemble-chunks��������������������������������0000755�0001750�0001750�00000004470�11757531137�024775� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env perl # # Reassembles kickstart feedback channel messages from chunks # # Requires some Perl XML modules to be post-installed. # # $Id: reassemble-chunks 4535 2011-09-26 22:14:19Z voeckler $ # require 5.005; use strict; use XML::Parser::Expat; %main::data = (); # contents @main::stack = (); # written by s_e, read by c_h $main::tail = "</foo>\n"; $main::head = "<?xml version=\"1.0\"?>\n" . "<!DOCTYPE foo [" . " <!ELEMENT foo (#PCDATA|chunk)*>" . " <!ELEMENT chunk (#PCDATA)>" . " <!ATTLIST chunk channel CDATA \"0\">" . " <!ATTLIST chunk size CDATA #REQUIRED>" . " <!ATTLIST chunk start CDATA #REQUIRED>" . "]>\n" . "<foo>\n"; sub start_element { # purpose: callback for open tag my ($self,$element,%attr) = @_; if ( $element eq 'chunk' ) { push( @main::stack, [ @attr{'channel','start'} ] ); $self->setHandlers( Char => \&content_handler ); } } sub final_element { # purpose: callback for close tag my ($self,$element) = @_; if ( $element eq 'chunk' ) { $self->setHandlers( Char => \&skip_handler ); pop( @main::stack ); } } sub skip_handler { # purpose: generic character handler, ignores text my ($self,$text) = @_; # noop } sub content_handler { # purpose: special character handler, active within chunks my $self = shift; my @tos = @{ $main::stack[ $#main::stack ] }; push( @{$main::data{$tos[0]}{$tos[1]}}, shift() ); } # read contents into $contents $/ = undef; # big gulp mode my $fn = shift || die "Usage: $0 filename"; open( XML, '<' . $fn ) || die "open $fn: $!\n"; my $content = <XML>; close XML; # init XML parser my $xml = new XML::Parser::Expat; $xml->setHandlers( Start => \&start_element, End => \&final_element, Char => \&skip_handler ); # artificially introduce a root element to contain all chunks # and any other data the remote scheduler may have messed into the stream. #$content = "<foo>\n" . $content . "</foo>"; $content = $main::head . $content . $main::tail; $xml->parsestring($content); undef $content; # now produce content sorted by timestamp foreach my $channel ( sort { $a <=> $b } keys %main::data ) { next if $channel == 0; # ignore system reserved channel #0 foreach my $time ( sort keys %{$main::data{$channel}} ) { print( join('',@{$main::data{$channel}{$time}}) ); } } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/pegasus-kickstart.c������������������������������0000644�0001750�0001750�00000046507�11757531137�025253� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <sys/types.h> #include <ctype.h> #include <errno.h> #include <stdio.h> #include <time.h> #include <sys/time.h> #include <sys/resource.h> #include <sys/wait.h> #include <signal.h> #include <unistd.h> #include <fcntl.h> #include <string.h> #include <stdlib.h> #include "rwio.h" #include "debug.h" #include "appinfo.h" #include "mysystem.h" #include "mylist.h" #include "invoke.h" #include "tools.h" /* truely shared globals */ int isExtended = 1; /* timestamp format concise or extended */ int isLocal = 1; /* timestamp time zone, UTC or local */ extern int make_application_executable; extern size_t data_section_size; /* module local globals */ static int doFlush = 0; /* apply fsync() on kickstart's stdout if true */ static AppInfo appinfo; /* sigh, needs to be global for atexit handler */ static volatile sig_atomic_t global_no_atexit; static const char* RCS_ID = "$Id: pegasus-kickstart.c 4561 2011-10-04 16:21:37Z voeckler $"; static int obtainStatusCode( int raw ) /* purpose: convert the raw result from wait() into a status code * paramtr: raw (IN): the raw exit code * returns: a cooked exit code */ { int result = 127; if ( raw < 0 ) { /* nothing to do to result */ } else if ( WIFEXITED(raw) ) { result = WEXITSTATUS(raw); } else if ( WIFSIGNALED(raw) ) { result = 128 + WTERMSIG(raw); } else if ( WIFSTOPPED(raw) ) { /* nothing to do to result */ } return result; } static int prepareSideJob( JobInfo* scripting, const char* value ) /* purpose: prepare a side job from environment string * paramtr: scripting (OUT): side job info structure * value (IN): value of the environment setting * returns: 0 if there is no job to execute * 1 if there is a job to run thru mysystem next */ { /* no value, no job */ if ( value == NULL ) return 0; /* set-up scripting structure (which is part of the appinfo) */ initJobInfoFromString( scripting, value ); /* execute process, if there is any */ if ( scripting->isValid != 1 ) return 0; return 1; } StatInfo* initStatFromList( mylist_p list, size_t* size ) /* purpose: Initialize the statlist and statlist size in appinfo. * paramtr: list (IN): list of filenames * size (OUT): statlist size to be set * returns: a vector of initialized statinfo records, or NULL */ { StatInfo* result = NULL; if ( (*size = list->count) ) { size_t i = 0; mylist_item_p item = list->head; if ( (result = (StatInfo*) calloc( sizeof(StatInfo), *size )) ) { while ( item && i < *size ) { initStatInfoFromName( result+i, item->pfn, O_RDONLY, 0 ); if ( item->lfn != NULL ) addLFNToStatInfo( result+i, item->lfn ); item = item->next; ++i; } } } return result; } #define show( s ) ( s ? s : "(undefined)" ) static const char* xlate( const StatInfo* info ) /* purpose: small helper for helpMe() function. * paramtr: info (IN): is a record about a file * returns: a pointer to the filename, or a local or static buffer w/ info * warning: Returns static buffer pointer */ { static char buffer[16]; switch ( info->source ) { case IS_HANDLE: snprintf( buffer, sizeof(buffer), "&%d", info->file.descriptor ); return buffer; case IS_FIFO: case IS_TEMP: case IS_FILE: return show(info->file.name); default: return "[INVALID]"; } } static void helpMe( const AppInfo* run ) /* purpose: print invocation quick help with currently set parameters and * exit with error condition. * paramtr: run (IN): constitutes the set of currently set parameters. */ { const char* p = strrchr( run->argv[0], '/' ); if ( p ) ++p; else p=run->argv[0]; fprintf( stderr, "%s\n", RCS_ID ); fprintf( stderr, "Usage:\t%s [-i fn] [-o fn] [-e fn] [-l fn] [-n xid] [-N did] \\\n" "\t[-w|-W cwd] [-R res] [-s [l=]p] [-S [l=]p] [-X] [-H] [-L lbl -T iso] \\\n" "\t[-B sz] [-F] (-I fn | app [appflags])\n", p ); fprintf( stderr, " -i fn\tConnects stdin of app to file fn, default is \"%s\".\n", xlate(&run->input) ); fprintf( stderr, " -o fn\tConnects stdout of app to file fn, default is \"%s\".\n", xlate(&run->output) ); fprintf( stderr, " -e fn\tConnects stderr of app to file fn, default is \"%s\".\n", xlate(&run->error) ); fprintf( stderr, " -l fn\tProtocols invocation record into file fn, default is \"%s\".\n", xlate(&run->logfile) ); fprintf( stderr, " -n xid\tProvides the TR name, default is \"%s\".\n" " -N did\tProvides the DV name, default is \"%s\".\n" " -R res\tReflects the resource handle into record, default is \"%s\".\n" " -B sz\tResizes the data section size for stdio capture, default is %zu.\n", show(run->xformation), show(run->derivation), show(run->sitehandle), data_section_size ); fprintf( stderr, " -L lbl\tReflects the workflow label into record, no default.\n" " -T iso\tReflects the workflow time stamp into record, no default.\n" " -H\tOmit <?xml ...?> header and <resource|environment> from record.\n" " -I fn\tReads job and args from the file fn, one arg per line.\n" " -V\tDisplays the version and exit.\n" " -X\tMakes the application executable, no matter what.\n" " -w dir\tSets a different working directory dir for jobs.\n" " -W dir\tLike -w, but also creates the directory dir if necessary.\n" " -S l=p\tProvides filename pairs to stat after start, multi-option.\n" " \tIf the arg is prefixed with '@', it is a list-of-filenames file.\n" " -s l=p\tProvides filename pairs to stat before exit, multi-option.\n" " \tIf the arg is prefixed with '@', it is a list-of-filenames file.\n" " -F\tAttempt to fsync kickstart's stdout at exit (should not be necessary).\n" ); /* avoid printing of results in exit handler */ ((AppInfo*) run)->isPrinted = 1; /* exit with error condition */ exit(127); } static void finish( void ) { if ( ! global_no_atexit ) { /* log the output here in case of abnormal termination */ if ( ! appinfo.isPrinted ) { printAppInfo( &appinfo ); } deleteAppInfo( &appinfo ); } /* PM-466 debugging */ if ( doFlush ) { struct timeval start, final; int status; now(&start); status = fsync( STDOUT_FILENO ); now(&final); debugmsg( "# fsync(%d)=%d (errno=%d) in %.3f s\n", STDOUT_FILENO, status, errno, mymaketime(final)-mymaketime(start) ); } nfs_sync( STDERR_FILENO, DEFAULT_SYNC_IDLE ); } #ifdef DEBUG_ARGV static void show_args( const char* prefix, char** argv, int argc ) { int i; debugmsg( "argc=%d\n", argc ); for ( i=0; i<argc; ++i ) debugmsg( "%s%2d: %s\n", (prefix ? prefix : ""), i, (argv[i] ? argv[i] : "(null)" ) ); } #endif static int readFromFile( const char* fn, char*** argv, int* argc, int* i, int j ) { size_t newc = 2; size_t index = 0; char** newv = calloc( sizeof(char*), newc+1 ); if ( expand_arg( fn, &newv, &index, &newc, 0 ) == 0 ) { #if 0 /* insert newv into argv at position i */ char** result = calloc( sizeof(char*), j + index + 1 ); memcpy( result, *argv, sizeof(char*) * j ); memcpy( result+j, newv, sizeof(char*) * index ); *argv = result; *argc = j + index; *i = j-1; #else /* replace argv with newv */ *argv = newv; *argc = index; *i = -1; #endif #ifdef DEBUG_ARGV show_args( "result", *argv, *argc ); #endif return 0; } else { /* error parsing */ return -1; } } static void handleOutputStream( StatInfo* stream, const char* temp, int std_fileno ) /* purpose: Initialize stdout or stderr from commandline arguments * paramtr: stream (IO): pointer to the statinfo record for stdout or stderr * temp (IN): command-line argument * std_fileno (IN): STD(OUT|ERR)_FILENO matching to the stream */ { if ( temp[0] == '-' && temp[1] == '\0' ) { initStatInfoFromHandle( stream, std_fileno ); } else if ( temp[0] == '!' ) { if ( temp[1] == '^' ) { initStatInfoFromName( stream, temp+2, O_WRONLY | O_CREAT | O_APPEND, 6 ); } else { initStatInfoFromName( stream, temp+1, O_WRONLY | O_CREAT | O_APPEND, 2 ); } } else if ( temp[0] == '^' ) { if ( temp[1] == '!' ) { initStatInfoFromName( stream, temp+2, O_WRONLY | O_CREAT | O_APPEND, 6 ); } else { initStatInfoFromName( stream, temp+1, O_WRONLY | O_CREAT, 7 ); } } else { initStatInfoFromName( stream, temp, O_WRONLY | O_CREAT, 3 ); } } extern char** environ; static int areWeSane( const char* what ) /* purpose: count the number of occurances of a specific environment variable * paramtr: what (IN): environment variable name * returns: the count. * warning: Produces a warning on stderr if count==0 */ { size_t len = strlen(what); int count = 0; char** s = environ; while ( s && *s ) { if ( strncmp( *s, what, len ) == 0 && (*s)[len] == '=' ) count++; ++s; } if ( ! count ) debugmsg( "Warning! Did not find %s in environment!\n", what ); return count; } static char* noquote( char* s ) { size_t len; /* sanity check */ if ( ! s ) return NULL; else if ( ! *s ) return s; else len = strlen(s); if ( ( s[0] == '\'' && s[len-1] == '\'' ) || ( s[0] == '"' && s[len-1] == '"' ) ) { char* tmp = calloc( sizeof(char), len ); memcpy( tmp, s+1, len-2 ); return tmp; } else { return s; } } int main( int argc, char* argv[] ) { size_t m, cwd_size = getpagesize(); int status, result; int i, j, keeploop; int createDir = 0; #if 0 long fsflags = -1; #endif const char* temp; const char* workdir = NULL; mylist_t initial; mylist_t final; /* premature init with defaults */ if ( mylist_init( &initial ) ) return 43; if ( mylist_init( &final ) ) return 43; initAppInfo( &appinfo, argc, argv ); #if 0 debugmsg( "# appinfo=%d, jobinfo=%d, statinfo=%d, useinfo=%d\n", sizeof(AppInfo), sizeof(JobInfo), sizeof(StatInfo), sizeof(struct rusage) ); #endif #if 0 /* NEW: 2011-08-19: PM-466 -- best effort add O_SYNC flag to stdout */ /* Handled differently in finish() above using fsync() on stdout */ if ( (fsflags=fcntl( STDOUT_FILENO, F_GETFL )) != -1 ) { fcntl( STDOUT_FILENO, F_SETFL, ( fsflags | O_SYNC ) ); } #endif /* register emergency exit handler */ if ( atexit( finish ) == -1 ) { appinfo.application.status = -1; appinfo.application.saverr = errno; fputs( "unable to register an exit handler\n", stderr ); return 127; } else { global_no_atexit = 0; } /* no arguments whatsoever, print help and exit */ if ( argc == 1 ) helpMe( &appinfo ); /* * read commandline arguments * DO NOT use getopt to avoid cluttering flags to the application */ for ( keeploop=i=1; i < argc && argv[i][0] == '-' && keeploop; ++i ) { j = i; switch ( argv[i][1] ) { case 'B': temp = argv[i][2] ? &argv[i][2] : argv[++i]; m = strtoul( temp, 0, 0 ); /* limit max <data> size to 64 MB for each. */ if ( m < 67108863ul ) data_section_size = m; break; #if 0 case 'c': if ( appinfo.channel.source != IS_INVALID ) deleteStatInfo( &appinfo.channel ); temp = argv[i][2] ? &argv[i][2] : argv[++i]; initStatInfoAsFifo( &appinfo.channel, temp, "GRIDSTART_CHANNEL" ); break; #endif case 'e': if ( appinfo.error.source != IS_INVALID ) deleteStatInfo( &appinfo.error ); temp = ( argv[i][2] ? &argv[i][2] : argv[++i] ); handleOutputStream( &appinfo.error, temp, STDERR_FILENO ); break; case 'h': case '?': helpMe( &appinfo ); break; /* unreachable */ case 'V': puts( RCS_ID ); appinfo.isPrinted=1; return 0; case 'i': if ( appinfo.input.source != IS_INVALID ) deleteStatInfo( &appinfo.input ); temp = argv[i][2] ? &argv[i][2] : argv[++i]; if ( temp[0] == '-' && temp[1] == '\0' ) initStatInfoFromHandle( &appinfo.input, STDIN_FILENO ); else initStatInfoFromName( &appinfo.input, temp, O_RDONLY, 2 ); break; case 'H': appinfo.noHeader++; break; case 'F': doFlush++; break; case 'I': /* invoke application and args from given file */ temp = argv[i][2] ? &argv[i][2] : argv[++i]; if ( readFromFile( temp, &argv, &argc, &i, j ) == -1 ) { int saverr = errno; debugmsg( "ERROR: While parsing -I %s: %d: %s\n", temp, errno, strerror(saverr) ); appinfo.application.prefix = strerror(saverr); appinfo.application.status = -1; return 127; } keeploop = 0; break; case 'l': if ( appinfo.logfile.source != IS_INVALID ) deleteStatInfo( &appinfo.logfile ); temp = argv[i][2] ? &argv[i][2] : argv[++i]; if ( temp[0] == '-' && temp[1] == '\0' ) initStatInfoFromHandle( &appinfo.logfile, STDOUT_FILENO ); else initStatInfoFromName( &appinfo.logfile, temp, O_WRONLY | O_CREAT | O_APPEND, 2 ); break; case 'L': appinfo.wf_label = noquote( argv[i][2] ? &argv[i][2] : argv[++i] ); break; case 'n': appinfo.xformation = noquote( argv[i][2] ? &argv[i][2] : argv[++i] ); break; case 'N': appinfo.derivation = noquote( argv[i][2] ? &argv[i][2] : argv[++i] ); break; case 'o': if ( appinfo.output.source != IS_INVALID ) deleteStatInfo( &appinfo.output ); temp = ( argv[i][2] ? &argv[i][2] : argv[++i] ); handleOutputStream( &appinfo.output, temp, STDOUT_FILENO ); break; case 'R': appinfo.sitehandle = noquote( argv[i][2] ? &argv[i][2] : argv[++i] ); break; case 'S': temp = argv[i][2] ? &argv[i][2] : argv[++i]; if ( temp[0] == '@' ) { /* list-of-filenames file */ if ( (result=mylist_fill( &initial, temp+1 )) ) debugmsg( "ERROR: initial %s: %d: %s\n", temp+1, result, strerror(result) ); } else { /* direct filename */ if ( (result=mylist_add( &initial, temp )) ) debugmsg( "ERROR: initial %s: %d: %s\n", temp, result, strerror(result) ); } break; case 's': temp = argv[i][2] ? &argv[i][2] : argv[++i]; if ( temp[0] == '@' ) { /* list-of-filenames file */ if ( (result=mylist_fill( &final, temp+1 )) ) debugmsg( "ERROR: final %s: %d: %s\n", temp+1, result, strerror(result) ); } else { /* direct filename */ if ( (result=mylist_add( &final, temp )) ) debugmsg( "ERROR: final %s: %d: %s\n", temp, result, strerror(result) ); } break; case 'T': appinfo.wf_stamp = noquote( argv[i][2] ? &argv[i][2] : argv[++i] ); break; case 'w': workdir = noquote( argv[i][2] ? &argv[i][2] : argv[++i] ); createDir = 0; break; case 'W': workdir = noquote( argv[i][2] ? &argv[i][2] : argv[++i] ); createDir = 1; break; case 'X': make_application_executable++; break; case '-': keeploop = 0; break; default: i -= 1; keeploop = 0; break; } } /* sanity check -- for FNAL/ATLAS */ areWeSane("GRIDSTART_CHANNEL"); /* initialize app info and register CLI parameters with it */ if ( argc-i > 0 ) { /* there is an application to run */ initJobInfo( &appinfo.application, argc-i, argv+i ); /* is there really something to run? */ if ( appinfo.application.isValid != 1 ) { appinfo.application.status = -1; switch ( appinfo.application.isValid ) { case 2: /* permissions? */ appinfo.application.saverr = EACCES; break; default: /* no such file? */ appinfo.application.saverr = ENOENT; break; } fputs( "FATAL: The main job specification is invalid or missing.\n", stderr ); return 127; } } else { /* there is not even an application to run */ helpMe( &appinfo ); } /* make/change into new workdir NOW */ REDIR: if ( workdir != NULL && chdir(workdir) != 0 ) { /* shall we try to make the directory */ if ( createDir ) { createDir = 0; /* once only */ if ( mkdir( workdir, 0777 ) == 0 ) { /* If this causes an infinite loop, your file-system is * seriously whacked out -- run fsck or equivalent. */ goto REDIR; } /* else */ appinfo.application.saverr = errno; debugmsg( "Unable to mkdir %s: %d: %s\n", workdir, errno, strerror(errno) ); appinfo.application.prefix = "Unable to mkdir: "; appinfo.application.status = -1; return 127; } /* unable to use alternate workdir */ appinfo.application.saverr = errno; debugmsg( "Unable to chdir %s: %d: %s\n", workdir, errno, strerror(errno) ); appinfo.application.prefix = "Unable to chdir: "; appinfo.application.status = -1; return 127; } /* record the current working directory */ appinfo.workdir = calloc(cwd_size,sizeof(char)); if ( getcwd( appinfo.workdir, cwd_size ) == NULL && errno == ERANGE ) { /* error allocating sufficient space */ free((void*) appinfo.workdir ); appinfo.workdir = NULL; } /* update stdio and logfile *AFTER* we arrived in working directory */ updateStatInfo( &appinfo.input ); updateStatInfo( &appinfo.output ); updateStatInfo( &appinfo.error ); updateStatInfo( &appinfo.logfile ); /* stat pre files */ appinfo.initial = initStatFromList( &initial, &appinfo.icount ); mylist_done( &initial ); /* remember environment that all jobs will see */ if ( ! appinfo.noHeader ) envIntoAppInfo( &appinfo, environ ); /* Our own initially: an independent setup job */ if ( prepareSideJob( &appinfo.setup, getenv("GRIDSTART_SETUP") ) ) mysystem( &appinfo, &appinfo.setup, environ ); /* possible prae job */ result = 0; if ( prepareSideJob( &appinfo.prejob, getenv("GRIDSTART_PREJOB") ) ) { /* there is a prejob to be executed */ status = mysystem( &appinfo, &appinfo.prejob, environ ); result = obtainStatusCode(status); } /* start main application */ if ( result == 0 ) { status = mysystem( &appinfo, &appinfo.application, environ ); result = obtainStatusCode(status); } else { /* actively invalidate main record */ appinfo.application.isValid = 0; } /* possible post job */ if ( result == 0 ) { if ( prepareSideJob( &appinfo.postjob, getenv("GRIDSTART_POSTJOB") ) ) { status = mysystem( &appinfo, &appinfo.postjob, environ ); result = obtainStatusCode(status); } } /* Java's finally: an independent clean-up job */ if ( prepareSideJob( &appinfo.cleanup, getenv("GRIDSTART_CLEANUP") ) ) mysystem( &appinfo, &appinfo.cleanup, environ ); /* stat post files */ appinfo.final = initStatFromList( &final, &appinfo.fcount ); mylist_done( &final ); /* append results to log file */ printAppInfo( &appinfo ); /* clean up and close FDs */ global_no_atexit = 1; /* disable atexit handler */ deleteAppInfo( &appinfo ); /* force NFS sync for gatekeeper */ #if 0 /* FIXME: No locking on stdout, because printAppInfo will have done so */ nfs_sync( STDOUT_FILENO, DEFAULT_SYNC_IDLE ); #endif #if 0 /* FIXME: No locking on stderr, because atexit-handler finish() does it. */ nfs_sync( STDERR_FILENO, DEFAULT_SYNC_IDLE ); #endif /* done */ return result; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/make.SunOS���������������������������������������0000755�0001750�0001750�00000000362�11757531137�023301� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh gmake distclean gmake EXTRACFLAGS='$(V7FLAGS)' pegasus-kickstart mv pegasus-kickstart pegasus-kickstart.sparcv7 gmake clean gmake EXTRACFLAGS='$(V9FLAGS)' pegasus-kickstart mv pegasus-kickstart pegasus-kickstart.sparcv9 gmake clean ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/mysystem.c���������������������������������������0000644�0001750�0001750�00000015672�11757531137�023500� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <sys/types.h> #include <errno.h> #include <sys/resource.h> #include <sys/wait.h> #include <signal.h> #include <unistd.h> #include <string.h> #include <stdlib.h> #include <fcntl.h> #include <stdio.h> #include "debug.h" #include "tools.h" #include "appinfo.h" #include "statinfo.h" #include "event.h" #include "mysystem.h" static const char* RCS_ID = "$Id: mysystem.c 4535 2011-09-26 22:14:19Z voeckler $"; #include "mysignal.h" typedef struct { volatile sig_atomic_t count; /* OUT: number of signals seen */ volatile sig_atomic_t done; /* OUT: 0: to be done, 1: child reaped */ volatile int error; /* OUT: errno when something went bad */ JobInfo* job; /* IO: data repository */ } SignalHandlerCommunication; static SignalHandlerCommunication child; #ifdef DEBUG_WAIT static pid_t mywait4( pid_t wpid, int* status, int options, struct rusage* rusage ) { pid_t result = wait4( wpid, status, options, rusage ); debugmsg( "# wait4(%d,%p=%d,%d,%p) = %d\n", wpid, status, *status, options, rusage, result ); return result; } #else #define mywait4(a,b,c,d) wait4(a,b,c,d) #endif /* DEBUG_WAIT */ static SIGRETTYPE sig_child( SIGPARAM signo ) { int rc = -1; ++child.count; /* There have been known cases where Linux delivers signals twice * that may have been sent only once, grrrr. */ #ifdef DEBUG_WAIT debugmsg( "# child.count == %d\n", child.count ); #endif /* DEBUG_WAIT */ if ( child.job != NULL ) { int saverr = errno; errno = 0; #ifdef DEBUG_WAIT fputs( "# child.job != NULL\n", stderr ); #endif /* DEBUG_WAIT */ /* WARN: wait4 is not POSIX.1 reentrant safe */ while ( (rc=mywait4( child.job->child, &child.job->status, WNOHANG, &child.job->use )) < 0 ) { if ( errno != EINTR ) { child.error = errno; child.job->status = -42; break; } } errno = saverr; } #ifdef DEBUG_WAIT debugmsg( "# child.done := (%d != 0) => %d\n", rc, (rc != 0) ); #endif /* DEBUG_WAIT */ /* once set, never reset */ if ( ! child.done ) child.done = ( rc != 0 ); } static SIGRETTYPE sig_propagate( SIGPARAM signo ) /* purpose: propagate the signal to active children */ { if ( child.job != NULL ) kill( child.job->child, signo ); } int mysystem( AppInfo* appinfo, JobInfo* jobinfo, char* envp[] ) /* purpose: emulate the system() libc call, but save utilization data. * paramtr: appinfo (IO): shared record of information * isPrinted (IO): reset isPrinted in child process! * input (IN): connect to stdin or share * output (IN): connect to stdout or share * error (IN): connect to stderr or share * jobinfo (IO): updated record of job-specific information * argv (IN): assembled commandline * child (OUT): pid of child process * status (OUT): also returned as function result * saverr (OUT): will be set to value of errno * start (OUT): will be set to startup time * final (OUT): will be set to finish time after reap * use (OUT): rusage record from application call * input (IN): connect to stdin or share * output (IN): connect to stdout or share * error (IN): connect to stderr or share * envp (IN): vector with the parent's environment * returns: -1: failure in mysystem processing, check errno * 126: connecting child to its new stdout failed * 127: execve() call failed * else: status of child */ { struct sigaction ignore, saveintr, savequit; struct sigaction new_child, old_child; /* sanity checks first */ if ( ! jobinfo->isValid ) { errno = ENOEXEC; /* no executable */ return -1; } memset( &ignore, 0, sizeof(ignore) ); ignore.sa_handler = SIG_IGN; sigemptyset( &ignore.sa_mask ); ignore.sa_flags = 0; if ( sigaction( SIGINT, &ignore, &saveintr ) < 0 ) return -1; if ( sigaction( SIGQUIT, &ignore, &savequit ) < 0 ) return -1; /* install SIGCHLD handler */ memset( &child, 0, sizeof(child) ); child.job = jobinfo; memset( &new_child, 0, sizeof(new_child) ); new_child.sa_handler = sig_child; sigemptyset( &new_child.sa_mask ); new_child.sa_flags = SA_NOCLDSTOP; #ifdef SA_INTERRUPT new_child.sa_flags |= SA_INTERRUPT; /* SunOS, obsoleted by POSIX */ #endif if ( sigaction( SIGCHLD, &new_child, &old_child ) < 0 ) return -1; /* start wall-clock */ now( &(jobinfo->start) ); if ( (jobinfo->child=fork()) < 0 ) { /* no more process table space */ jobinfo->status = -1; } else if ( jobinfo->child == 0 ) { /* child */ appinfo->isPrinted=1; /* connect jobs stdio */ if ( forcefd( &appinfo->input, STDIN_FILENO ) ) _exit(126); if ( forcefd( &appinfo->output, STDOUT_FILENO ) ) _exit(126); if ( forcefd( &appinfo->error, STDERR_FILENO ) ) _exit(126); /* undo signal handlers */ sigaction( SIGINT, &saveintr, NULL ); sigaction( SIGQUIT, &savequit, NULL ); sigaction( SIGCHLD, &old_child, NULL ); execve( jobinfo->argv[0], (char* const*) jobinfo->argv, envp ); _exit(127); /* executed in child process */ } else { /* parent */ int saverr; errno = 0; /* insert event loop here */ while ( ! child.done ) eventLoop( STDERR_FILENO, &appinfo->channel, &child.done ); /* sanity check */ saverr = errno; if ( kill( jobinfo->child, 0 ) == 0 ) { debugmsg( "ERROR: job %d is still running!\n", jobinfo->child ); if ( ! child.error ) child.error = EINPROGRESS; } errno = child.error ? child.error : saverr; } /* save any errors before anybody overwrites this */ jobinfo->saverr = errno; /* move closer towards signal occurance -- ward off further signals */ sigaction( SIGCHLD, &old_child, NULL ); /* stop wall-clock */ now( &(jobinfo->finish) ); /* ignore errors on these, too. */ sigaction( SIGINT, &saveintr, NULL ); sigaction( SIGQUIT, &savequit, NULL ); /* only after handler was deactivated */ if ( child.count != 1 || child.error ) { char temp[256]; snprintf( temp, sizeof(temp), "%d x SIGCHLD; %d: %s", child.count, child.error, strerror(child.error) ); send_message( STDERR_FILENO, temp, strlen(temp), 0 ); } /* finalize */ return jobinfo->status; } ����������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/try-parse.c��������������������������������������0000644�0001750�0001750�00000001064�11757531137�023522� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdio.h> #include "parse.h" int main( int argc, char* argv[] ) { Node* node, *head = NULL; int i, state = 0; int l = 1; if ( argc >= 10 ) l = 2; else if ( argc >= 100 ) l = 3; for ( i=1; i<argc; ++i ) printf( "<%*d<<%s<<\n", l, i, argv[i] ); putchar('\n'); i = 1; for ( node = head = parseArgVector( argc-1, argv+1, &state ); node != NULL; node = node->next ) { printf( ">%*d>>%s>>\n", l, i, node->data ); i++; } putchar('\n'); printf( "final state (32==success): %d\n", state ); return ( state & 0x1F ); } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/tools.h������������������������������������������0000644�0001750�0001750�00000010522�11757531137�022740� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _TOOLS_H #define _TOOLS_H #include <sys/types.h> #include <sys/time.h> #include <time.h> extern void full_append( char* buffer, const size_t size, size_t* len, const char* msg, size_t msglen ); /* purpose: append a binary message to the buffer while maintaining length * information. * paramtr: buffer (IO): buffer area to put strings into * size (IN): capacity of buffer * len (IO): current end of buffer, updated on return * msg (IN): message to append to buffer * mlen (IN): length of message area to append * returns: nada */ extern void xmlquote( char* buffer, const size_t size, size_t* len, const char* msg, size_t msglen ); /* purpose: append a possibly binary message to the buffer while XML * quoting and maintaining buffer length information. * paramtr: buffer (IO): buffer area to put strings into * size (IN): capacity of buffer * len (IO): current end of buffer, updated on return * msg (IN): message to append to buffer * mlen (IN): length of message area to append * returns: nada */ #if 0 extern void append( char* buffer, const size_t size, size_t* len, const char* msg ); /* purpose: append a string to the buffer while maintaining length information. * paramtr: buffer (IO): buffer area to put strings into * size (IN): capacity of buffer * len (IO): current end of buffer, updated on return * msg (IN): message to append to buffer */ #else #define append( B, S, L, M ) full_append( B, S, L, M, strlen(M) ) #endif extern void myprint( char* buffer, const size_t size, size_t* len, const char* fmt, ... ); /* purpose: format a string at the end of a buffer while maintaining length information. * paramtr: buffer (IO): buffer area to put strings into * size (IN): capacity of buffer * len (IO): current end of buffer, updated on return * fmt (IN): printf compatible format * ... (IN): parameters to format * returns: nada */ extern size_t mydatetime( char* buffer, const size_t size, size_t* offset, int isLocal, int isExtended, time_t seconds, long micros ); /* purpose: append an ISO timestamp to a buffer * paramtr: buffer (IO): buffer area to store things into * size (IN): capacity of buffer * offset (IO): current position of end of meaningful buffer * isLocal (IN): flag, if 0 use UTC, otherwise use local time * isExtd (IN): flag, if 0 use concise format, otherwise extended * seconds (IN): tv_sec part of timeval * micros (IN): if negative, don't show micros. * returns: number of characters added */ extern double mymaketime( const struct timeval t ); /* purpose: convert a structured timeval into seconds with fractions. * paramtr: t (IN): a timeval as retured from gettimeofday(). * returns: the number of seconds with microsecond fraction. */ extern void now( struct timeval* t ); /* purpose: capture a point in time with microsecond extension * paramtr: t (OUT): where to store the captured time */ extern const char* getTempDir( void ); /* purpose: determine a suitable directory for temporary files. * returns: a string with a temporary directory, may still be NULL. */ extern char* sizer( char* buffer, size_t capacity, size_t vsize, const void* value ); /* purpose: format an unsigned integer of less-known size. Note that * 64bit ints on 32bit systems need %llu, but 64/64 uses %lu * paramtr: buffer (IO): area to output into * capacity (IN): extent of the buffer to store things into * vsize (IN): size of the value * value (IN): value to format * returns: buffer */ #endif /* _TOOLS_H */ ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/jobinfo.c����������������������������������������0000644�0001750�0001750�00000027742�11757531137�023235� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "getif.h" #include "debug.h" #include "tools.h" #include "useinfo.h" #include "jobinfo.h" #include <ctype.h> #include <errno.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #include <stdarg.h> #include <sys/wait.h> #include <unistd.h> #include <signal.h> #include <fcntl.h> #include "parse.h" extern int isExtended; /* timestamp format concise or extended */ extern int isLocal; /* timestamp time zone, UTC or local */ static const char* RCS_ID = "$Id: jobinfo.c 4535 2011-09-26 22:14:19Z voeckler $"; #ifdef sun #define sys_siglist _sys_siglist #endif #if defined(AIX) extern const char* const sys_siglist[64]; #endif #ifndef USE_PARSE static size_t countArguments( const char* cmdline ) /* purpose: count the number of arguments in a commandline * warning: any quoting or variable substitution is ignored * paramtr: cmdline (IN): string containing the concatenated commandline * returns: the number of arguments, 0 for an empty commandline. */ { size_t result = 0; const char* t, *s = cmdline; /* sanity check */ if ( cmdline == NULL || *cmdline == '\0' ) return 0; /* skip possible initial whitespace */ while ( *s && isspace(*s) ) ++s; while ( *s ) { /* save start position */ t = s; /* advance non whitespace characters */ while ( *s && ! isspace(*s) ) ++s; /* count only full arguments */ if ( s != t ) result++; /* move over whitespace */ while ( *s && isspace(*s) ) ++s; } return result; } #endif /* ! USE_PARSE */ void initJobInfoFromString( JobInfo* jobinfo, const char* commandline ) /* purpose: initialize the data structure with default * paramtr: jobinfo (OUT): initialized memory block * commandline (IN): commandline concatenated string to separate */ { size_t i; char* t; #ifdef USE_PARSE int state = 0; Node* head = parseCommandLine( commandline, &state ); #else char* s; #endif /* reset everything */ memset( jobinfo, 0, sizeof(JobInfo) ); #ifdef USE_PARSE /* only continue in ok state AND if there is anything to do */ if ( state == 32 && head ) { size_t size, argc = size = 0; Node* temp = head; while ( temp ) { size += (strlen(temp->data) + 1); argc++; temp = temp->next; } /* prepare copy area */ jobinfo->copy = (char*) malloc( size+argc ); /* prepare argument vector */ jobinfo->argc = argc; jobinfo->argv = (char* const*) calloc( argc+1, sizeof(char*) ); /* copy list while updating argument vector and freeing lose arguments */ t = jobinfo->copy; for ( i=0; i < argc && (temp=head); ++i ) { /* append string to copy area */ size_t len = strlen(temp->data)+1; memcpy( t, temp->data, len ); /* I hate nagging compilers which think they know better */ memcpy( (void*) &jobinfo->argv[i], &t, sizeof(char*) ); t += len; /* clear parse list while we are at it */ head = temp->next; free((void*) temp->data ); free((void*) temp ); } } /* free list of (partial) argv */ if ( head ) deleteNodes(head); #else /* activate copy area */ jobinfo->copy = strdup( commandline ? commandline : "" ); /* prepare argv buffer for arguments */ jobinfo->argc = countArguments(commandline); jobinfo->argv = (char* const*) calloc( 1+jobinfo->argc, sizeof(char*) ); /* copy argument positions into pointer vector */ for ( i=0, s=jobinfo->copy; *s && i < jobinfo->argc; i++ ) { while ( *s && isspace(*s) ) *s++ = '\0'; t = s; while ( *s && ! isspace(*s) ) ++s; jobinfo->argv[i] = t; } /* remove possible trailing whitespaces */ while ( *s && isspace(*s) ) *s++ = '\0'; /* finalize vector */ jobinfo->argv[i] = NULL; #endif /* this is a valid (and initialized) entry */ if ( jobinfo->argc > 0 ) { /* check out path to job */ char* realpath = findApp( jobinfo->argv[0] ); if ( realpath ) { /* I hate nagging compilers which think they know better */ memcpy( (void*) &jobinfo->argv[0], &realpath, sizeof(char*) ); jobinfo->isValid = 1; } else { jobinfo->status = -127; jobinfo->saverr = errno; jobinfo->isValid = 2; } /* initialize some data for myself */ initStatInfoFromName( &jobinfo->executable, jobinfo->argv[0], O_RDONLY, 0 ); } } void initJobInfo( JobInfo* jobinfo, int argc, char* const* argv ) /* purpose: initialize the data structure with defaults * paramtr: jobinfo (OUT): initialized memory block * argc (IN): adjusted argc string (maybe from main()) * argv (IN): adjusted argv string to point to executable */ { #ifdef USE_PARSE size_t i; char* t; int state = 0; Node* head = parseArgVector( argc, argv, &state ); #endif /* initialize memory */ memset( jobinfo, 0, sizeof(JobInfo) ); #ifdef USE_PARSE /* only continue in ok state AND if there is anything to do */ if ( state == 32 && head ) { size_t size, argc = size = 0; Node* temp = head; while ( temp ) { size += (strlen(temp->data) + 1); argc++; temp = temp->next; } /* prepare copy area */ jobinfo->copy = (char*) malloc( size+argc ); /* prepare argument vector */ jobinfo->argc = argc; jobinfo->argv = (char* const*) calloc( argc+1, sizeof(char*) ); /* copy list while updating argument vector and freeing lose arguments */ t = jobinfo->copy; for ( i=0; i < argc && (temp=head); ++i ) { /* append string to copy area */ size_t len = strlen(temp->data)+1; memcpy( t, temp->data, len ); /* I hate nagging compilers which think they know better */ memcpy( (void*) &jobinfo->argv[i], &t, sizeof(char*) ); t += len; /* clear parse list while we are at it */ head = temp->next; free((void*) temp->data ); free((void*) temp ); } } /* free list of (partial) argv */ if ( head ) deleteNodes(head); #else /* this may require overwriting after CLI parsing */ jobinfo->argc = argc; jobinfo->argv = argv; #endif /* this is a valid (and initialized) entry */ if ( jobinfo->argc > 0 ) { /* check out path to job */ char* realpath = findApp( jobinfo->argv[0] ); if ( realpath ) { /* I hate nagging compilers which think they know better */ memcpy( (void*) &jobinfo->argv[0], &realpath, sizeof(char*) ); jobinfo->isValid = 1; } else { jobinfo->status = -127; jobinfo->saverr = errno; jobinfo->isValid = 2; } /* initialize some data for myself */ initStatInfoFromName( &jobinfo->executable, jobinfo->argv[0], O_RDONLY, 0 ); } } int printXMLJobInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const JobInfo* job ) /* purpose: format the job information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * job (IN): job info to print. * returns: number of characters put into buffer (buffer length) */ { int status; /* $#@! broken Debian headers */ /* sanity check */ if ( ! job->isValid ) return *len; /* start tag with indentation */ myprint( buffer, size, len, "%*s<%s start=\"", indent, "", tag ); /* start time and duration */ mydatetime( buffer, size, len, isLocal, isExtended, job->start.tv_sec, job->start.tv_usec ); myprint( buffer, size, len, "\" duration=\"%.3f\"", mymaketime(job->finish) - mymaketime(job->start) ); /* optional attribute: application process id */ if ( job->child != 0 ) myprint( buffer, size, len, " pid=\"%d\"", job->child ); /* finalize open tag of element */ append( buffer, size, len, ">\n" ); /* <usage> */ printXMLUseInfo( buffer, size, len, indent+2, "usage", &job->use ); #ifdef USE_MEMINFO /* <meminfo> */ printXMLMemInfo( buffer, size, len, indent+2, "peak", &job->peakmem ); #endif /* USE_MEMINFO */ /* <status>: open tag */ myprint( buffer, size, len, "%*s<status raw=\"%d\">", indent+2, "", job->status ); /* <status>: cases of completion */ status = (int) job->status; /* $#@! broken Debian headers */ if ( job->status < 0 ) { /* <failure> */ myprint( buffer, size, len, "<failure error=\"%d\">%s%s</failure>", job->saverr, job->prefix && job->prefix[0] ? job->prefix : "", strerror(job->saverr) ); } else if ( WIFEXITED(status) ) { myprint( buffer, size, len, "<regular exitcode=\"%d\"/>", WEXITSTATUS(status) ); } else if ( WIFSIGNALED(status) ) { /* result = 128 + WTERMSIG(status); */ myprint( buffer, size, len, "<signalled signal=\"%u\"", WTERMSIG(status) ); #ifdef WCOREDUMP myprint( buffer, size, len, " corefile=\"%s\"", WCOREDUMP(status) ? "true" : "false" ); #endif myprint( buffer, size, len, ">%s</signalled>", #if defined(CYGWINNT50) || defined(CYGWINNT51) "unknown" #else sys_siglist[WTERMSIG(status)] #endif ); } else if ( WIFSTOPPED(status) ) { myprint( buffer, size, len, "<suspended signal=\"%u\">%s</suspended>", WSTOPSIG(status), #if defined(CYGWINNT50) || defined(CYGWINNT51) "unknown" #else sys_siglist[WSTOPSIG(status)] #endif ); } /* FIXME: else? */ append( buffer, size, len, "</status>\n" ); /* <executable> */ printXMLStatInfo( buffer, size, len, indent+2, "statcall", NULL, &job->executable ); #ifdef WITH_NEW_ARGS /* alternative 1: new-style <argument-vector> */ myprint( buffer, size, len, "%*s<argument-vector", indent+2, "" ); if ( job->argc == 1 ) { /* empty element */ append( buffer, size, len, "/>\n" ); } else { /* content are the CLI args */ int i=1; append( buffer, size, len, ">\n" ); for ( ; i < job->argc; ++i ) { myprint( buffer, size, len, "%*s<arg nr=\"%d\">", indent+4, "", i ); xmlquote( buffer, size, len, job->argv[i], strlen(job->argv[i]) ); append( buffer, size, len, "</arg>\n" ); } /* end tag */ myprint( buffer, size, len, "%*s</argument-vector>\n", indent+2, "" ); } #else /* alternative 2: old-stlye <arguments> */ myprint( buffer, size, len, "%*s<arguments", indent+2, "" ); if ( job->argc == 1 ) { /* empty element */ append( buffer, size, len, "/>\n" ); } else { /* content are the CLI args */ int i=1; append( buffer, size, len, ">" ); while ( i < job->argc ) { xmlquote( buffer, size, len, job->argv[i], strlen(job->argv[i]) ); if ( ++i < job->argc ) append( buffer, size, len, " " ); } /* end tag */ append( buffer, size, len, "</arguments>\n" ); } #endif /* WITH_NEW_ARGS */ /* finalize close tag of outmost element */ myprint( buffer, size, len, "%*s</%s>\n", indent, "", tag ); return *len; } void deleteJobInfo( JobInfo* jobinfo ) /* purpose: destructor * paramtr: runinfo (IO): valid AppInfo structure to destroy. */ { /* paranoia */ if ( jobinfo == NULL ) return; #ifdef EXTRA_DEBUG debugmsg( "# deleteJobInfo(%p)\n", jobinfo ); #endif if ( jobinfo->isValid ) { if ( jobinfo->argv[0] != NULL && jobinfo->argv[0] != jobinfo->copy ) free((void*) jobinfo->argv[0]); /* from findApp() allocation */ deleteStatInfo( &jobinfo->executable ); } if ( jobinfo->copy != NULL ) { free( (void*) jobinfo->copy ); free( (void*) jobinfo->argv ); jobinfo->copy = 0; } /* final invalidation */ jobinfo->isValid = 0; } ������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/mysystem.h���������������������������������������0000644�0001750�0001750�00000004001�11757531137�023465� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MYSYSTEM_H #define _MYSYSTEM_H #include "appinfo.h" #include "statinfo.h" #include "jobinfo.h" extern int mysystem( AppInfo* appinfo, JobInfo* jobinfo, char* envp[] ); /* purpose: emulate the system() libc call, but save utilization data. * paramtr: appinfo (IO): shared record of information * isPrinted (IO): only to reset isPrinted in child process! * input (IN): connect to stdin or share * output (IN): connect to stdout or share * error (IN): connect to stderr or share * jobinfo (IO): updated record of job-specific information * argv (IN): assembled commandline * child (OUT): pid of child process * status (OUT): also returned as function result * saverr (OUT): will be set to value of errno * start (OUT): will be set to startup time * final (OUT): will be set to finish time after reap * use (OUT): rusage record from application call * envp (IN): vector with the parent's environment * returns: -1: failure in mysystem processing, check errno * 126: connecting child to its new stdout failed * 127: execve() call failed * else: status of child */ #endif /* _MYSYSTEM_H */ �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/msave.h������������������������������������������0000644�0001750�0001750�00000006026�11757531137�022717� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MACHINE_H #define _MACHINE_H #include <sys/types.h> #include <sys/utsname.h> #include <sys/time.h> #include <unistd.h> #ifndef SYS_NMLN #ifdef _SYS_NAMELEN /* DARWIN */ #define SYS_NMLN 65 #else #error "No SYS_NMLN nor _SYS_NAMELEN: check <sys/utsname.h>" #endif /* _SYS_NAMELEN */ #endif /* SYS_NMLN */ #ifndef SOLARIS #include <stdint.h> /* uint64_t */ #endif typedef struct { /* from utsname(2) */ struct utsname uname; struct timeval now; /* from getpagesize(2) */ unsigned long pagesize; #if defined(_SC_NPROCESSORS_CONF) || defined(DARWIN) || defined(LINUX) unsigned short cpu_count; unsigned short cpu_online; #endif #if defined(LINUX) || defined(DARWIN) || defined(_SC_PHYS_PAGES) /* from sysinfo(2) or sysctl(3) or sysconf(2) */ uint64_t ram_total; #endif #if defined(LINUX) || defined(DARWIN) || defined(_SC_AVPHYS_PAGES) /* from sysinfo(2) or sysctl(3) or sysconf(2) */ uint64_t ram_free; #endif #if defined(LINUX) || defined(DARWIN) /* from sysinfo(2) or sysctl(3) */ uint64_t swap_total; uint64_t swap_free; /* from /proc/cpuinfo or sysctl(3) */ unsigned long megahertz; char vendor_id[16]; char model_name[80]; /* from /proc/uptime or sysctl(3) */ double idletime; struct timeval boottime; #endif #if defined(LINUX) || defined(DARWIN) || defined(SUNOS) float load[3]; #endif #ifdef LINUX /* from /proc/loadavg */ unsigned pid_running; unsigned pid_total; #endif } MachineInfo; extern void initMachineInfo( MachineInfo* machine ); /* purpose: initialize the data structure. * paramtr: machine (OUT): initialized MachineInfo structure. */ extern int printXMLMachineInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* tag, const MachineInfo* machine ); /* purpose: format the job information into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * tag (IN): name to use for element tags. * machine (IN): machine info to print. * returns: number of characters put into buffer (buffer length) */ extern void deleteMachineInfo( MachineInfo* machine ); /* purpose: destructor * paramtr: machine (IO): valid MachineInfo structure to destroy. */ #endif /* _MACHINE_H */ ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/just-machine�������������������������������������0000755�0001750�0001750�00000003255�11757531137�023751� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env perl # # script to just grab the machine element from kickstart output # use 5.006; use strict; use XML::Parser; my %break = ( machine => 1 ); # elements that enclose other elements my %state = ( machine => 1 ); # elements that trigger printing my $mixed = 0; # binary state on how to close elements my $state = 0; # binary (counted) state on when to print my @stack = (); # path from root element sub start_element { my $self = shift; my $element = shift; $state++ if exists $state{$element}; if ( $state ) { print ' ' x ( @stack*2 ), '<', $element; for ( my $i=0; $i < @_; $i += 2 ) { # keep original ordering of attributes print ' ', $_[$i], '="', $_[$i+1], '"'; } if ( exists $break{$element} ) { print ">\n"; $mixed = 0; } else { $mixed = 1; } } push( @stack, $element ); } sub final_element { my $self = shift; my $element = shift; if ( $state ) { if ( $mixed == 1 ) { print "/>\n"; } else { print ' ' x ($#stack*2) if exists $break{$element}; print "</$element>\n"; } $mixed = 0; } --$state if exists $state{$element}; pop(@stack) eq $element; } sub text_handler { my $self = shift; my $text = shift; $text =~ s/[\012\015]+//; if ( $state ) { print '>' if $mixed == 1; $mixed = 0; print $text unless $text =~ /^[ \t]*$/; } 1; } my $xml = new XML::Parser::Expat; $xml->setHandlers( Start => \&start_element, End => \&final_element, Char => \&text_handler ); if ( @ARGV ) { my $fn = shift; open( F, "<$fn" ) || die "open $fn: $!\n"; $xml->parse( \*F ); close F; } else { $xml->parse( \*STDIN ); } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/limitinfo.h��������������������������������������0000644�0001750�0001750�00000003774�11757531137�023605� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _LIMIT_INFO_H #define _LIMIT_INFO_H #include <sys/types.h> #include <sys/resource.h> typedef struct { int resource; /* which resource, e.g. RLIMIT_STACK */ int error; /* errno after call to getrlimit */ struct rlimit limit; /* resource limits acquired */ } SingleLimitInfo; typedef struct { size_t size; SingleLimitInfo* limits; } LimitInfo; extern void initLimitInfo( LimitInfo* limits ); /* purpose: initializes the data structure * paramtr: limits (OUT): sufficiently large memory block */ extern void updateLimitInfo( LimitInfo* limits ); /* purpose: initializes the data with current limits * paramtr: limits (IO): sufficiently large memory block */ extern void deleteLimitInfo( LimitInfo* limits ); /* purpose: destructor * paramtr: limits (IO): valid LimitInfo structure to destroy. */ extern int printXMLLimitInfo( char* buffer, size_t size, size_t* len, size_t indent, const LimitInfo* limits ); /* purpose: format the rusage record into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * limits (IN): observed resource limits * returns: number of characters put into buffer (buffer length) */ #endif /* _LIMIT_INFO_H */ ����pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/Makefile�����������������������������������������0000644�0001750�0001750�00000012270�11757531137�023071� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # Makefile # INSTALL = install RM = rm -f CC = gcc CFLAGS = -O LD = $(CC) LOADLIBES = -lm SYSTEM = $(shell uname -s | tr '[a-z]' '[A-Z]' | tr -d '_ -/') VERSION = $(shell uname -r) MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') MAJOR = $(firstword $(subst ., ,$(VERSION))) MINOR = $(strip $(word 2,$(subst ., ,$(VERSION)))) EXTRA_OBJ = machine/basic.o SOCKIO = $(shell /bin/ls /usr/include/sys/sockio.h 2>/dev/null) LFS_CFLAGS = -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE LFS_LDFLAGS = ifndef ${prefix} prefix = $(PEGASUS_HOME) endif ifndef ${datadir} datadir = $(prefix)/share endif ifeq (SUNOS,${SYSTEM}) ifeq (5,${MAJOR}) # use these for the SUN CC compiler CC = cc LD = $(CC) ## SPARCv7 LFS_CFLAGS = $(shell getconf LFS_CFLAGS 2>>/dev/null) LFS_LDFLAGS = $(shell getconf LFS_LDFLAGS 2>>/dev/null) V7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CFLAGS = -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACFLAGS) #EXTRACFLAGS = $(V7FLAGS) CFLAGS := -DSOLARIS $(CFLAGS) -xO4 -D__EXTENSIONS__=1 LOADLIBES += -lkstat -lnsl -lsocket INSTALL = /usr/ucb/install EXTRA_OBJ += machine/sunos-swap.o machine/sunos.o else # old Solaris 1 not supported! endif endif ifeq (IRIX64,${SYSTEM}) # The regular 64bit Irix stuff is just too slow, use n32! SYSTEM := IRIX endif ifeq (IRIX,${SYSTEM}) CC = cc -n32 -mips3 -r4000 LD = $(CC) OPT_NORM = -O3 -IPA -LNO:opt=1 endif ifeq (AIX,${SYSTEM}) CC = xlc CXX = xlC endif ifeq (DARWIN,${SYSTEM}) #CFLAGS += -DDEBUG_WAIT=1 -DDEBUG_EVENTLOOP=1 CFLAGS += -DMUST_USE_SELECT_NOT_POLL=1 -DHAS_REALPATH_EXT=1 -ggdb LOADLIBES += -ggdb EXTRA_OBJ += machine/darwin.o endif ifeq (CYGWINNT,$(findstring CYGWINNT,${SYSTEM})) CFLAGS += -DSYS_NMLN=20 -DCYGWIN=1 CFLAGS += -DDEBUG_EVENTLOOP=1 -DDEBUG_WAIT=1 endif ifeq (LINUX,${SYSTEM}) ifeq (ia64,${MARCH}) # old Intel-only IA64 architecture CFLAGS = -Wall -O2 -ggdb else ifeq (x86_64,${MARCH}) # new Intel/AMD 64bit architecture CFLAGS = -Wall -O2 -ggdb -m64 else ifeq (i686,${MARCH}) # regular 32bit x86 architecture CFLAGS = -Wall -O2 -march=i686 -ggdb else # UNKNOWN ARCHITECTURE -- MAKE NO ASSUMPTIONS CFLAGS = -Wall -O2 -ggdb endif endif endif #CFLAGS += -D__USE_POSIX=199309 #LOADLIBES += -Wl,-Bstatic -lefence -Wl,-Bdynamic LFS_CFLAGS = $(shell getconf LFS_CFLAGS 2>>/dev/null) LFS_LDFLAGS = $(shell getconf LFS_LDFLAGS 2>>/dev/null) EXTRA_OBJ += machine/linux.o CFLAGS += -DHAS_REALPATH_EXT=1 endif # # === [3] ======================================================= rules section # There is no need to change things below this line. CFLAGS += -D${SYSTEM} -DMAJOR=${MAJOR} -DWITH_NEW_ARGS=1 # -DSOCKLEN=${SOCKLEN} ifneq (,${SOCKIO}) CFLAGS += -DHAS_SYS_SOCKIO=1 endif CFLAGS += -DUSE_PARSE=1 # -ggdb # add large file support ifneq (,${LFS_CFLAGS}) NOLFS_CFLAGS := $(CFLAGS) CFLAGS += $(LFS_CFLAGS) endif ifneq (,${LFS_LDFLAGS}) LDFLAGS += $(LFS_LDFLAGS) endif %.o : %.c $(CC) $(CFLAGS) $< -c -o $@ all : pegasus-kickstart pegasus-kickstart: debug.o getif.o rwio.o tools.o useinfo.o mynss.o statinfo.o event.o jobinfo.o limitinfo.o $(EXTRA_OBJ) machine.o appinfo.o parse.o mysystem.o mylist.o invoke.o pegasus-kickstart.o $(LD) $(EXTRA_LDFLAGS) $(CFLAGS) $^ -o $@ $(LOADLIBES) show-if: show-if.o getif.o debug.o $(LD) $(EXTRA_LDFLAGS) $(CFLAGS) $^ -o $@ $(LOADLIBES) try-parse: try-parse.o parse.o $(LD) $(EXTRA_LDFLAGS) $(CFLAGS) $^ -o $@ $(LOADLIBES) appinfo.o: appinfo.c getif.h rwio.h debug.h tools.h useinfo.h machine.h \ jobinfo.h statinfo.h appinfo.h limitinfo.h mynss.h debug.o: debug.c debug.h rwio.h event.o: event.c rwio.h debug.h tools.h event.h statinfo.h mysignal.h fail.o: fail.c getif.o: getif.c debug.h getif.h invoke.o: invoke.c invoke.h jobinfo.o: jobinfo.c getif.h debug.h tools.h useinfo.h jobinfo.h \ statinfo.h parse.h pegasus-kickstart.o: pegasus-kickstart.c rwio.h debug.h appinfo.h \ statinfo.h jobinfo.h limitinfo.h machine.h mysystem.h mylist.h \ invoke.h tools.h limitinfo.o: limitinfo.c debug.h tools.h limitinfo.h machine.o: machine.c machine.h machine/basic.h debug.h meminfo.o: meminfo.c meminfo.h debug.h msave.o: msave.c machine.h debug.h tools.h mylist.o: mylist.c mylist.h mynss.o: mynss.c mynss.h mysystem.o: mysystem.c debug.h tools.h appinfo.h statinfo.h jobinfo.h \ limitinfo.h machine.h event.h mysystem.h mysignal.h parse.o: parse.c debug.h parse.h rwio.o: rwio.c rwio.h show-if.o: show-if.c getif.h statinfo.o: statinfo.c mynss.h debug.h statinfo.h tools.h test-ascii.o: test-ascii.c test-post.o: test-post.c test-pre.o: test-pre.c tools.o: tools.c tools.h try-parse.o: try-parse.c parse.h useinfo.o: useinfo.c tools.h useinfo.h zio.o: zio.c zio.h machine/basic.o: machine/basic.c machine/basic.h machine/../tools.h machine/sunos.o: machine/sunos.c machine/basic.c machine/sunos.h \ machine/sunos-swap.h machine/../tools.h machine/sunos-swap.o: machine/sunos-swap.c machine/sunos-swap.h $(CC) $(NOLFS_CFLAGS) $< -c -o $@ machine/linux.o: machine/linux.c machine/basic.c machine/linux.h \ machine/../tools.h machine/darwin.o: machine/darwin.c machine/basic.c machine/darwin.h \ machine/../tools.h install: pegasus-kickstart $(INSTALL) -m 0755 pegasus-kickstart $(prefix)/bin clean: $(RM) *.o machine/*.o core core.* distclean: clean $(RM) pegasus-kickstart fail test-post test-pre test-ascii show-if ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/rwio.h�������������������������������������������0000644�0001750�0001750�00000004373�11757531137�022567� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _RWIO_H #define _RWIO_H #include <sys/types.h> #ifndef DEFAULT_SYNC_IDLE #define DEFAULT_SYNC_IDLE 100 #endif extern ssize_t writen( int fd, const char* buffer, ssize_t n, unsigned restart ); /* purpose: write all n bytes in buffer, if possible at all * paramtr: fd (IN): filedescriptor open for writing * buffer (IN): bytes to write (must be at least n byte long) * n (IN): number of bytes to write * restart (IN): if true, try to restart write at max that often * returns: n, if everything was written, or * [0..n-1], if some bytes were written, but then failed, * < 0, if some error occurred. */ extern int lockit( int fd, int cmd, int type ); /* purpose: fill in POSIX lock structure and attempt lock or unlock * paramtr: fd (IN): which file descriptor to lock * cmd (IN): F_SETLK, F_GETLK, F_SETLKW * type (IN): F_WRLCK, F_RDLCK, F_UNLCK * warning: always locks full file ( offset=0, whence=SEEK_SET, len=0 ) * returns: result from fcntl call */ extern int mytrylock( int fd ); /* purpose: Try to lock the file * paramtr: fd (IN): open file descriptor * returns: -1: fatal error while locking the file, file not locked * 0: all backoff attempts failed, file is not locked * 1: file is locked */ extern int nfs_sync( int fd, unsigned idle ); /* purpose: tries to force NFS to update the given file descriptor * paramtr: fd (IN): descriptor of an open file * idle (IN): how many milliseconds between lock and unlock * seelaso: DEFAULT_SYNC_IDLE as suggested argument for idle * returns: 0 is ok, -1 for failure */ #endif /* _RWIO_H */ ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/mylist.h�����������������������������������������0000644�0001750�0001750�00000003735�11757531137�023131� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MYLIST_H #define _MYLIST_H #include <sys/types.h> typedef struct mylist_item_tag { unsigned long magic; const char* pfn; const char* lfn; struct mylist_item_tag* next; } mylist_item_t, *mylist_item_p; extern int mylist_item_init( mylist_item_p item, const char* data ); /* purpose: initial a data item. * paramtr: item (OUT): item pointer to initialize * data (IN): string to copy into item * returns: 0 on success, * EINVAL if arguments are NULL * ENOMEM if allocation failed */ extern int mylist_item_done( mylist_item_p item ); /* purpose: free allocated space of an item * paramtr: item (IO): area to free * returns: 0 on success, * EINVAL if the magic failed, or NULL argument */ typedef struct mylist_tag { unsigned long magic; struct mylist_item_tag* head; struct mylist_item_tag* tail; size_t count; } mylist_t, *mylist_p; extern int mylist_init( mylist_p list ); extern int mylist_add( mylist_p list, const char* data ); extern int mylist_done( mylist_p list ); extern int mylist_fill( mylist_p list, const char* fn ); /* purpose: Add each line in the specified file to the list * paramtr: list (IO): list to modify * fn (IN): name of the file to read * returns: 0 on success, */ #endif /* _MYLIST_H */ �����������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/invoke.c�����������������������������������������0000644�0001750�0001750�00000013576�11757531137�023102� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <errno.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include "invoke.h" static const char* RCS_ID = "$Id: invoke.c 50 2007-05-19 00:48:32Z gmehta $"; int append_arg( char* data, char*** arg, size_t* index, size_t* capacity ) /* purpose: adds a string to a list of arguments * This is a low-level function, use add_arg instead. * paramtr: data (IN): string to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * returns: 0 means ok, -1 means error, see errno * warning: Always creates a strdup of data */ { #ifdef DEBUG_ARGV fprintf( stderr, "# data=%p arg=%p index=%d cap=%d: \"%s\"\n", data, *arg, *index, *capacity, data ); #endif if ( *index >= *capacity ) { *capacity <<= 1; #ifdef DEBUG_ARGV fputs( "# realloc\n", stderr ); #endif *arg = realloc( *arg, *capacity * sizeof(char*) ); if ( *arg == NULL ) return -1; /* re-calloc: init new space with NULL */ memset( *arg + *index, 0, sizeof(char*) * (*capacity - *index) ); } (*arg)[(*index)++] = data ? strdup(data) : NULL; return 0; } static char* merge( char* s1, char* s2 ) /* purpose: merge two strings and return the result * paramtr: s1 (IN): first string, may be NULL * s2 (IN): second string, must not be NULL * returns: merge of strings into newly allocated area. * NULL, if the allocation failed. */ { if ( s1 == NULL ) { return strdup(s2); } else { size_t len = strlen(s1) + strlen(s2) + 2; char* temp = (char*) malloc(len); if ( temp == NULL ) return NULL; strncpy( temp, s1, len ); strncat( temp, " ", len ); strncat( temp, s2, len ); return temp; } } int expand_arg( const char* fn, char*** arg, size_t* index, size_t* capacity, int level ) /* purpose: adds the contents of a file, line by line, to an argument vector * This is a low-level function, use add_arg instead. * paramtr: fn (IN): name of file with contents to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * level (IN): level of recursion * returns: 0 means ok, -1 means error, see errno */ { FILE* f; char line[4096]; size_t len; char* cmd, *save = NULL; unsigned long lineno = 0ul; if ( level >= 32 ) { fprintf( stderr, "ERROR: Nesting too deep (%d levels), " "circuit breaker triggered!\n", level ); errno = EMLINK; return -1; } if ( (f = fopen( fn, "r" )) == NULL ) { /* error while opening file for reading */ return -1; } while ( fgets( line, sizeof(line), f ) ) { ++lineno; /* check for skippable line */ if ( line[0] == 0 || line[0] == '\r' || line[0] == '\n' ) continue; /* check for unterminated line (larger than buffer) */ len = strlen(line); if ( line[len-1] != '\r' && line[len-1] != '\n' ) { /* read buffer was too small, save and append */ char* temp = merge( save, line ); if ( temp == NULL ) { /* error while merging strings */ int saverr = errno; fclose(f); if ( save != NULL ) free((void*) save); errno = saverr; return -1; } if ( save != NULL ) free((void*) save); save = temp; lineno--; continue; } else { /* remove terminating character(s) */ while ( len > 0 && (line[len-1] == '\r' || line[len-1] == '\n') ) { line[len-1] = 0; len--; } } /* final assembly of argument */ if ( save != NULL ) { /* assemble merged line */ cmd = merge( save, line ); free((void*) save); save = NULL; if ( cmd == NULL ) { /* error while merging strings */ int saverr = errno; fclose(f); errno = saverr; return -1; } } else { /* no overlong lines */ cmd = line; } #ifdef DEBUG_ARGV printf( "# %s:%lu: %s\n", fn, lineno, cmd ); #endif /* DEBUG_ARGV */ if ( (len=strlen(cmd)) > 0 ) { int result = #ifdef PERMIT_RECURSION add_arg( cmd, arg, index, capacity, level+1 ) #else /* ! PERMIT_RECURSION */ append_arg( cmd, arg, index, capacity ) #endif /* PERMIT_RECURSION */ ; if ( result == -1 ) { int saverr = errno; fclose(f); if ( cmd != line ) free((void*) cmd); errno = saverr; return -1; } } /* done with this argument */ if ( cmd != line ) free((void*) cmd); } fclose(f); return 0; } int add_arg( char* s, char*** arg, size_t* index, size_t* capacity, int level ) /* purpose: sorts a given full argument string, whether to add or extend * This is the high-level interface to previous functions. * paramtr: s (IN): string to append * arg (OUT): list of arguments as vector * index (IO): index where a new data should be inserted into * capacity (IO): capacity (extend) of vector * level (IN): level of recursion, use 1 * returns: 0 means ok, -1 means error, see errno */ { if ( s[0] == '@' && s[1] != 0 ) { if ( s[1] == '@' ) { return append_arg( s+1, arg, index, capacity ); } else { return expand_arg( s+1, arg, index, capacity, level+1 ); } } else { return append_arg( s, arg, index, capacity ); } } ����������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/useinfo.c����������������������������������������0000644�0001750�0001750�00000013040�11757531137�023241� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "tools.h" #include "useinfo.h" #include <string.h> static const char* RCS_ID = "$Id: useinfo.c 50 2007-05-19 00:48:32Z gmehta $"; int printXMLUseInfo( char* buffer, size_t size, size_t* len, size_t indent, const char* id, const struct rusage* use ) /* purpose: format the rusage record into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * id (IN): object identifier to use as element tag name. * job (IN): job info to print. * returns: number of characters put into buffer (buffer length) */ { char b[4][32]; /* <usage> */ myprint( buffer, size, len, "%*s<%s utime=\"%.3f\" stime=\"%.3f\"", indent, "", id, mymaketime(use->ru_utime), mymaketime(use->ru_stime) ); #ifdef HAS_USAGE_MEM #ifdef HAS_USAGE_FULLMEM myrpint( buffer, size, len, " maxrss=\"%s\" ixrss=\"%s\" idrss=\"%s\" isrss=\"%s\"", sizer( b[0], 32, sizeof(use->ru_maxrss), &(use->ru_maxrss) ), sizer( b[1], 32, sizeof(use->ru_ixrss), &(use->ru_ixrss) ), sizer( b[2], 32, sizeof(use->ru_idrss), &(use->ru_idrss) ), sizer( b[3], 32, sizeof(use->ru_isrss), &(use->ru_isrss) ) ); #else myprint( buffer, size, len, " maxrss=\"%s\" idrss=\"%s\"", sizer( b[0], 32, sizeof(use->ru_maxrss), &(use->ru_maxrss) ), sizer( b[2], 32, sizeof(use->ru_idrss), &(use->ru_idrss) ) ); #endif /* HAS_USAGE_FULLMEM */ #endif /* HAS_USAGE_MEM */ myprint( buffer, size, len, " minflt=\"%s\" majflt=\"%s\" nswap=\"%s\"", sizer( b[0], 32, sizeof(use->ru_minflt), &(use->ru_minflt) ), sizer( b[1], 32, sizeof(use->ru_majflt), &(use->ru_majflt) ), sizer( b[2], 32, sizeof(use->ru_nswap), &(use->ru_nswap) ) ); #ifdef HAS_USAGE_IO myprint( buffer, size, len, " inblock=\"%s\" outblock=\"%s\"", sizer( b[0], 32, sizeof(use->ru_inblock), &(use->ru_inblock) ), sizer( b[1], 32, sizeof(use->ru_oublock), &(use->ru_oublock) ) ); #endif /* HAS_USAGE_IO */ #ifdef HAS_USAGE_MSG myprint( buffer, size, len, " msgsnd=\"%s\" msgrcv=\"%s\"", sizer( b[2], 32, sizeof(use->ru_msgsnd), &(use->ru_msgsnd) ), sizer( b[3], 32, sizeof(use->ru_msgrcv), &(use->ru_msgrcv) ) ); #endif /* HAS_USAGE_MSG */ myprint( buffer, size, len, " nsignals=\"%s\" nvcsw=\"%s\" nivcsw=\"%s\"/>\n", sizer( b[0], 32, sizeof(use->ru_nsignals), &(use->ru_nsignals) ), sizer( b[1], 32, sizeof(use->ru_nvcsw), &(use->ru_nvcsw) ), sizer( b[2], 32, sizeof(use->ru_nivcsw), &(use->ru_nivcsw) ) ); return *len; } static void add( struct timeval* sum, const struct timeval* summand ) { sum->tv_usec += summand->tv_usec; sum->tv_sec += summand->tv_sec; if ( sum->tv_usec >= 1000000 ) { sum->tv_sec++; sum->tv_usec -= 1000000; } } void addUseInfo( struct rusage* sum, const struct rusage* summand ) /* purpose: add a given rusage record to an existing one * paramtr: sum (IO): initialized rusage record to add to * summand (IN): values to add to * returns: sum += summand; */ { /* Total amount of user time used. */ add( &sum->ru_utime, &summand->ru_utime ); /* Total amount of system time used. */ add( &sum->ru_stime, &summand->ru_stime ); /* Maximum resident set size (in kilobytes). */ sum->ru_maxrss += summand->ru_maxrss; /* Amount of sharing of text segment memory with other processes (kilobyte-seconds). */ sum->ru_ixrss += summand->ru_ixrss; /* Amount of data segment memory used (kilobyte-seconds). */ sum->ru_idrss += summand->ru_idrss; /* Amount of stack memory used (kilobyte-seconds). */ sum->ru_isrss += summand->ru_isrss; /* Number of soft page faults (i.e. those serviced by reclaiming a page from the list of pages awaiting reallocation. */ sum->ru_minflt += summand->ru_minflt; /* Number of hard page faults (i.e. those that required I/O). */ sum->ru_majflt += summand->ru_majflt; /* Number of times a process was swapped out of physical memory. */ sum->ru_nswap += summand->ru_nswap; /* Number of input operations via the file system. Note: This and `ru_oublock' do not include operations with the cache. */ sum->ru_inblock += summand->ru_inblock; /* Number of output operations via the file system. */ sum->ru_oublock += summand->ru_oublock; /* Number of IPC messages sent. */ sum->ru_msgsnd += summand->ru_msgsnd; /* Number of IPC messages received. */ sum->ru_msgrcv += summand->ru_msgrcv; /* Number of signals delivered. */ sum->ru_nsignals += summand->ru_nsignals; /* Number of voluntary context switches, i.e. because the process gave up the process before it had to (usually to wait for some resource to be available). */ sum->ru_nvcsw += summand->ru_nvcsw; /* Number of involuntary context switches, i.e. a higher priority process became runnable or the current process used up its time slice. */ sum->ru_nivcsw += summand->ru_nivcsw; } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/appinfo.c����������������������������������������0000644�0001750�0001750�00000035330�11757531137�023233� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "getif.h" #include "rwio.h" #include "debug.h" #include "tools.h" #include "useinfo.h" #include "machine.h" #include "jobinfo.h" #include "statinfo.h" #include "appinfo.h" #include "mynss.h" #include <ctype.h> #include <errno.h> #include <stdlib.h> #include <stdio.h> #include <string.h> #include <stdarg.h> #include <sys/types.h> #include <sys/wait.h> #include <unistd.h> #include <fcntl.h> #include <grp.h> #include <pwd.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #include <netdb.h> extern int isExtended; /* timestamp format concise or extended */ extern int isLocal; /* timestamp time zone, UTC or local */ static const char* RCS_ID = "$Id: appinfo.c 4535 2011-09-26 22:14:19Z voeckler $"; static int mycompare( const void* a, const void* b ) { return strcmp( ( a ? *((const char**) a) : "" ), ( b ? *((const char**) b) : "" ) ); } static size_t convert2XML( char* buffer, size_t size, const AppInfo* run ) { size_t i; struct passwd* user = wrap_getpwuid( getuid() ); struct group* group = wrap_getgrgid( getgid() ); size_t len = 0; #define XML_SCHEMA_URI "http://pegasus.isi.edu/schema/invocation" #define XML_SCHEMA_VERSION "2.1" /* default is to produce XML preamble */ if ( ! run->noHeader ) append( buffer, size, &len, "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n" ); /* generate the XML header and start of root element */ append( buffer, size, &len, "<invocation xmlns=\"" XML_SCHEMA_URI "\"" " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" " xsi:schemaLocation=\"" XML_SCHEMA_URI " http://pegasus.isi.edu/schema/iv-" XML_SCHEMA_VERSION ".xsd\"" " version=\"" XML_SCHEMA_VERSION "\"" " start=\"" ); /* mandatory attributes for root element */ mydatetime( buffer, size, &len, isLocal, isExtended, run->start.tv_sec, run->start.tv_usec ); myprint( buffer, size, &len, "\" duration=\"%.3f\"", mymaketime(run->finish) - mymaketime(run->start) ); /* optional attributes for root element: transformation fqdn */ if ( run->xformation && strlen(run->xformation) ) { append( buffer, size, &len, " transformation=\"" ); xmlquote( buffer, size, &len, run->xformation, strlen(run->xformation) ); append( buffer, size, &len, "\"" ); } /* optional attributes for root element: derivation fqdn */ if ( run->derivation && strlen(run->derivation) ) { append( buffer, size, &len, " derivation=\"" ); xmlquote( buffer, size, &len, run->derivation, strlen(run->derivation) ); append( buffer, size, &len, "\"" ); } /* optional attributes for root element: name of remote site */ if ( run->sitehandle && strlen(run->sitehandle) ) { append( buffer, size, &len, " resource=\"" ); xmlquote( buffer, size, &len, run->sitehandle, strlen(run->sitehandle) ); append( buffer, size, &len, "\"" ); } /* optional attribute for workflow label: name of workflow */ if ( run->wf_label && strlen(run->wf_label) ) { append( buffer, size, &len, " wf-label=\"" ); xmlquote( buffer, size, &len, run->wf_label, strlen(run->wf_label) ); append( buffer, size, &len, "\"" ); } if ( run->wf_stamp && strlen(run->wf_stamp) ) { append( buffer, size, &len, " wf-stamp=\"" ); xmlquote( buffer, size, &len, run->wf_stamp, strlen(run->wf_stamp) ); append( buffer, size, &len, "\"" ); } /* optional attributes for root element: host address dotted quad */ if ( isdigit( run->ipv4[0] ) ) { struct hostent* h; in_addr_t address = inet_addr( run->ipv4 ); myprint( buffer, size, &len, " interface=\"%s\"", run->prif ); myprint( buffer, size, &len, " hostaddr=\"%s\"", run->ipv4 ); if ( (h = wrap_gethostbyaddr( (const char*) &address, sizeof(in_addr_t), AF_INET )) ) myprint( buffer, size, &len, " hostname=\"%s\"", h->h_name ); } /* optional attributes for root element: application process id */ if ( run->child != 0 ) myprint( buffer, size, &len, " pid=\"%d\"", run->child ); /* user info about who ran this thing */ myprint( buffer, size, &len, " uid=\"%d\"", getuid() ); if ( user ) myprint( buffer, size, &len, " user=\"%s\"", user->pw_name ); /* group info about who ran this thing */ myprint( buffer, size, &len, " gid=\"%d\"", getgid() ); if ( group ) myprint( buffer, size, &len, " group=\"%s\"", group->gr_name ); /* currently active umask settings */ myprint( buffer, size, &len, " umask=\"0%03o\"", run->umask ); /* finalize open tag of root element */ append( buffer, size, &len, ">\n" ); /* <setup>, <prejob>, <application>, <postjob>, <cleanup> */ printXMLJobInfo( buffer, size, &len, 2, "setup", &run->setup ); printXMLJobInfo( buffer, size, &len, 2, "prejob", &run->prejob ); printXMLJobInfo( buffer, size, &len, 2, "mainjob", &run->application ); printXMLJobInfo( buffer, size, &len, 2, "postjob", &run->postjob ); printXMLJobInfo( buffer, size, &len, 2, "cleanup", &run->cleanup ); /* <cwd> */ if ( run->workdir != NULL ) { append( buffer, size, &len, " <cwd>" ); append( buffer, size, &len, run->workdir ); append( buffer, size, &len, "</cwd>\n" ); } else { #if 0 append( buffer, size, &len, " <cwd xmlns:xsi=\"http://www.w3.org/2001/" "XMLSchema-instance\" xsi:nil=\"true\"/>\n" ); #else append( buffer, size, &len, " <cwd/>\n" ); #endif } /* <usage> own resources */ printXMLUseInfo( buffer, size, &len, 2, "usage", &run->usage ); if ( ! run->noHeader ) printXMLMachineInfo( buffer, size, &len, 2, "machine", &run->machine ); /* <statcall> records */ printXMLStatInfo( buffer, size, &len, 2, "statcall", "stdin", &run->input ); updateStatInfo( &(((AppInfo*) run)->output) ); printXMLStatInfo( buffer, size, &len, 2, "statcall", "stdout", &run->output ); updateStatInfo( &(((AppInfo*) run)->error) ); printXMLStatInfo( buffer, size, &len, 2, "statcall", "stderr", &run->error ); updateStatInfo( &(((AppInfo*) run)->logfile) ); printXMLStatInfo( buffer, size, &len, 2, "statcall", "gridstart", &run->gridstart ); printXMLStatInfo( buffer, size, &len, 2, "statcall", "logfile", &run->logfile ); printXMLStatInfo( buffer, size, &len, 2, "statcall", "channel", &run->channel ); /* initial and final arbitrary <statcall> records */ if ( run->icount && run->initial ) for ( i=0; i<run->icount; ++i ) printXMLStatInfo( buffer, size, &len, 2, "statcall", "initial", &run->initial[i] ); if ( run->fcount && run->final ) for ( i=0; i<run->fcount; ++i ) printXMLStatInfo( buffer, size, &len, 2, "statcall", "final", &run->final[i] ); if ( ! run->noHeader ) { /* <environment> */ if ( run->envp && run->envc ) { char* s; /* attempt a sorted version */ char** keys = malloc( sizeof(char*) * run->envc ); for ( i=0; i < run->envc; ++i ) { keys[i] = run->envp[i] ? strdup(run->envp[i]) : ""; } qsort( (void*) keys, run->envc, sizeof(char*), mycompare ); append( buffer, size, &len, " <environment>\n" ); for ( i=0; i < run->envc; ++i ) { if ( keys[i] && (s = strchr( keys[i], '=' )) ) { *s = '\0'; /* temporarily cut string here */ append( buffer, size, &len, " <env key=\"" ); append( buffer, size, &len, keys[i] ); append( buffer, size, &len, "\">" ); xmlquote( buffer, size, &len, s+1, strlen(s+1) ); append( buffer, size, &len, "</env>\n" ); *s = '='; /* reset string to original */ } } free((void*) keys); append( buffer, size, &len, " </environment>\n" ); } /* <resource> limits */ printXMLLimitInfo( buffer, size, &len, 2, &run->limits ); } /* ! run->noHeader */ /* finish root element */ append( buffer, size, &len, "</invocation>\n" ); return len; } static char* pattern( char* buffer, size_t size, const char* dir, const char* sep, const char* file ) { --size; buffer[size] = '\0'; /* reliably terminate string */ strncpy( buffer, dir, size ); strncat( buffer, sep, size ); strncat( buffer, file, size ); return buffer; } void initAppInfo( AppInfo* appinfo, int argc, char* const* argv ) /* purpose: initialize the data structure with defaults * paramtr: appinfo (OUT): initialized memory block * argc (IN): from main() * argv (IN): from main() */ { size_t tempsize = getpagesize(); char* tempname = (char*) malloc(tempsize); /* find a suitable directory for temporary files */ const char* tempdir = getTempDir(); /* reset everything */ memset( appinfo, 0, sizeof(AppInfo) ); /* init timestamps with defaults */ now( &appinfo->start ); appinfo->finish = appinfo->start; /* obtain umask */ appinfo->umask = umask(0); umask(appinfo->umask); /* obtain system information */ initMachineInfo( &appinfo->machine ); /* initialize some data for myself */ initStatInfoFromName( &appinfo->gridstart, argv[0], O_RDONLY, 0 ); /* default for stdin */ initStatInfoFromName( &appinfo->input, "/dev/null", O_RDONLY, 0 ); /* default for stdout */ #if 1 pattern( tempname, tempsize, tempdir, "/", "gs.out.XXXXXX" ); initStatInfoAsTemp( &appinfo->output, tempname ); #else initStatInfoFromName( &appinfo->output, "/dev/null", O_WRONLY | O_CREAT, 1 ); #endif /* default for stderr */ #if 1 pattern( tempname, tempsize, tempdir, "/", "gs.err.XXXXXX" ); initStatInfoAsTemp( &appinfo->error, tempname ); #else initStatInfoFromHandle( &appinfo->error, STDERR_FILENO ); #endif /* default for stdlog */ initStatInfoFromHandle( &appinfo->logfile, STDOUT_FILENO ); /* default for application-level feedback-channel */ pattern( tempname, tempsize, tempdir, "/", "gs.app.XXXXXX" ); initStatInfoAsFifo( &appinfo->channel, tempname, "GRIDSTART_CHANNEL" ); /* free pattern space */ free((void*) tempname ); /* original argument vector */ appinfo->argc = argc; appinfo->argv = argv; /* where do I run -- guess the primary interface IPv4 dotted quad */ /* find out where we run at (might stall LATER for some time on DNS) */ whoami( appinfo->ipv4, sizeof(appinfo->ipv4), appinfo->prif, sizeof(appinfo->prif) ); /* record resource limits */ initLimitInfo( &appinfo->limits ); /* which process is me */ appinfo->child = getpid(); } static size_t safe_strlen( const char* s ) { return ( s == NULL ? 0 : strlen(s) ); } int printAppInfo( const AppInfo* run ) /* purpose: output the given app info onto the given fd * paramtr: run (IN): is the collective information about the run * returns: the number of characters actually written (as of write() call). * if negative, check with errno for the cause of write failure. */ { int i, result = -1; int fd = run->logfile.source == IS_HANDLE ? run->logfile.file.descriptor : open( run->logfile.file.name, O_WRONLY | O_APPEND | O_CREAT, 0644 ); if ( fd != -1 ) { int locked; size_t wsize, size = getpagesize() << 5; /* initial assumption */ char* buffer = NULL; /* Adjust for final/initial sections */ if ( run->icount && run->initial ) for ( i=0; i<run->icount; ++i ) size += 256 + safe_strlen( run->initial[i].lfn ) + safe_strlen( run->initial[i].file.name ); if ( run->fcount && run->final ) for ( i=0; i<run->fcount; ++i ) size += 256 + safe_strlen( run->final[i].lfn ) + safe_strlen( run->final[i].file.name ); /* Adjust for <data> sections in stdout and stderr */ size += ( data_section_size << 1 ); /* Allocate buffer -- this may fail? */ buffer = (char*) calloc( size, sizeof(char) ); /* what about myself? Update stat info on log file */ updateStatInfo( &((AppInfo*) run)->logfile ); /* obtain resource usage for xxxx */ #if 0 struct rusage temp; getrusage( RUSAGE_SELF, &temp ); addUseInfo( (struct rusage*) &run->usage, &temp ); getrusage( RUSAGE_CHILDREN, &temp ); addUseInfo( (struct rusage*) &run->usage, &temp ); #else getrusage( RUSAGE_SELF, (struct rusage*) &run->usage ); #endif /* FIXME: is this true and necessary? */ updateLimitInfo( (LimitInfo*) &run->limits ); /* stop the clock */ now( (struct timeval*) &run->finish ); wsize = convert2XML( buffer, size, run ); locked = mytrylock(fd); result = writen( fd, buffer, wsize, 3 ); /* FIXME: what about wsize != result */ if ( locked==1 ) lockit( fd, F_SETLK, F_UNLCK ); free( (void*) buffer ); ((AppInfo*) run)->isPrinted = 1; if ( run->logfile.source == IS_FILE ) close(fd); } return result; } void envIntoAppInfo( AppInfo* runinfo, char* envp[] ) /* purpose: save a deep copy of the current environment * paramtr: appinfo (IO): place to store the deep copy * envp (IN): current environment pointer */ { /* only do something for an existing environment */ if ( envp ) { char** dst; char* const* src = envp; size_t size = 0; while ( *src++ ) ++size; runinfo->envc = size; runinfo->envp = (char**) calloc( size+1, sizeof(char*) ); dst = (char**) runinfo->envp; for ( src = envp; dst - runinfo->envp <= size; ++src ) { *dst++ = *src ? strdup(*src) : NULL; } } } void deleteAppInfo( AppInfo* runinfo ) /* purpose: destructor * paramtr: runinfo (IO): valid AppInfo structure to destroy. */ { size_t i; #ifdef EXTRA_DEBUG debugmsg( "# deleteAppInfo(%p)\n", runinfo ); #endif deleteLimitInfo( &runinfo->limits ); deleteStatInfo( &runinfo->input ); deleteStatInfo( &runinfo->output ); deleteStatInfo( &runinfo->error ); deleteStatInfo( &runinfo->logfile ); deleteStatInfo( &runinfo->gridstart ); deleteStatInfo( &runinfo->channel ); if ( runinfo->icount && runinfo->initial ) for ( i=0; i<runinfo->icount; ++i ) deleteStatInfo( &runinfo->initial[i] ); if ( runinfo->fcount && runinfo->final ) for ( i=0; i<runinfo->fcount; ++i ) deleteStatInfo( &runinfo->final[i] ); deleteJobInfo( &runinfo->setup ); deleteJobInfo( &runinfo->prejob ); deleteJobInfo( &runinfo->application ); deleteJobInfo( &runinfo->postjob ); deleteJobInfo( &runinfo->cleanup ); if ( runinfo->envc && runinfo->envp ) { char** p; for ( p = (char**) runinfo->envp; *p; p++ ) { if ( *p ) free((void*) *p ); } free((void*) runinfo->envp); runinfo->envp = NULL; runinfo->envc = 0; } /* release system information */ deleteMachineInfo( &runinfo->machine ); memset( runinfo, 0, sizeof(AppInfo) ); } ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/zio.c��������������������������������������������0000644�0001750�0001750�00000006363�11757531137�022404� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "zio.h" #include <errno.h> #include <string.h> #include <stdio.h> #include <sys/wait.h> #include <sys/stat.h> #include <fcntl.h> #include <unistd.h> #include <stdlib.h> static pid_t* childpid = NULL; static const char* RCS_ID = "$Id: zio.c 1142 2009-01-13 22:58:39Z gmehta $"; int zopen( const char* pathname, int flags, mode_t mode ) /* purpose: open a file, but put gzip into the io-path * paramtr: pathname (IN): file to read or create * flags (IN): if O_RDONLY, use gunzip on file * if O_WRONLY, use gzip on file * mode (IN): file mode, see open(2) * returns: -1 in case of error, or an open file descriptor */ { int pfd[2]; pid_t pid; long maxfd = sysconf( _SC_OPEN_MAX ); if ( maxfd == -1 ) maxfd = _POSIX_OPEN_MAX; if ( (flags & 3) != O_RDONLY && (flags & 3) != O_WRONLY ) { errno = EINVAL; return -1; } if ( childpid == NULL ) { if ( (childpid = calloc(maxfd,sizeof(pid_t))) == NULL ) return -1; } if ( pipe(pfd) < 0 ) return -1; if ( (pid=fork()) < 0 ) return -1; else if ( pid == 0 ) { /* child code */ char* argv[3]; int fd = open( pathname, flags, mode ); argv[0] = strdup( GZIP_PATH ); argv[2] = NULL; if ( fd == -1 ) _exit(126); if ( (flags & 3) == O_RDONLY ) { close(pfd[0]); if ( pfd[1] != STDOUT_FILENO ) { dup2( pfd[1], STDOUT_FILENO ); close(pfd[1]); } if ( fd != STDIN_FILENO ) { dup2( fd, STDIN_FILENO ); close(fd); } argv[1] = "-cd"; } else { close(pfd[1]); if ( pfd[0] != STDIN_FILENO ) { dup2( pfd[0], STDIN_FILENO ); close(pfd[0]); } if ( fd != STDOUT_FILENO ) { dup2( fd, STDOUT_FILENO ); close(fd); } argv[1] = "-cf"; } /* close descriptors in childpid for gzip */ for ( fd=0; fd<maxfd; ++fd ) if ( childpid[fd] > 0 ) close(childpid[fd]); execv( GZIP_PATH, argv ); _exit(127); } else { /* parent code */ int keep = -1; if ( (flags & 3) == O_RDONLY ) { close(pfd[1]); keep = pfd[0]; } else { close(pfd[0]); keep = pfd[1]; } childpid[keep] = pid; return keep; } } int zclose( int fd ) /* purpose: close a file that has a gzip in its io path * returns: process status from gzip */ { int status; pid_t pid; if ( childpid == NULL ) { errno = EBADF; return -1; } if ( (pid = childpid[fd]) == 0 ) { errno = EBADF; return -1; } childpid[fd] = 0; if ( close(fd) == -1 ) return -1; while ( waitpid( pid, &status, 0 ) < 0 ) { if ( errno != EINTR && errno != EAGAIN ) return -1; } return status; } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/rwio.c�������������������������������������������0000644�0001750�0001750�00000010525�11757531137�022556� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <fcntl.h> #include <utime.h> #include <sys/poll.h> #include <errno.h> #include <stdio.h> #include <time.h> #include <unistd.h> #ifdef sun #include <memory.h> #endif #include <string.h> #include "rwio.h" static const char* RCS_ID = "$Id: rwio.c 50 2007-05-19 00:48:32Z gmehta $"; ssize_t writen( int fd, const char* buffer, ssize_t n, unsigned restart ) /* purpose: write all n bytes in buffer, if possible at all * paramtr: fd (IN): filedescriptor open for writing * buffer (IN): bytes to write (must be at least n byte long) * n (IN): number of bytes to write * restart (IN): if true, try to restart write at max that often * returns: n, if everything was written, or * [0..n-1], if some bytes were written, but then failed, * < 0, if some error occurred. */ { int start = 0; while ( start < n ) { int size = write( fd, buffer+start, n-start ); if ( size < 0 ) { if ( restart && errno == EINTR ) { restart--; continue; } return size; } else { start += size; } } return n; } int lockit( int fd, int cmd, int type ) /* purpose: fill in POSIX lock structure and attempt lock or unlock * paramtr: fd (IN): which file descriptor to lock * cmd (IN): F_SETLK, F_GETLK, F_SETLKW * type (IN): F_WRLCK, F_RDLCK, F_UNLCK * warning: always locks full file ( offset=0, whence=SEEK_SET, len=0 ) * returns: result from fcntl call */ { struct flock lock; /* empty all -- even non-POSIX data fields */ memset( &lock, 0, sizeof(lock) ); lock.l_type = type; /* full file */ lock.l_whence = SEEK_SET; lock.l_start = 0; lock.l_len = 0; return ( fcntl( fd, cmd, &lock ) ); } int mytrylock( int fd ) /* purpose: Try to lock the file * paramtr: fd (IN): open file descriptor * returns: -1: fatal error while locking the file, file not locked * 0: all backoff attempts failed, file is not locked * 1: file is locked */ { int backoff = 50; /* milliseconds, increasing */ int retries = 10; /* 2.2 seconds total */ while ( lockit( fd, F_SETLK, F_WRLCK ) == -1 ) { if ( errno != EACCES && errno != EAGAIN ) return -1; if ( --retries == 0 ) return 0; backoff += 50; poll( NULL, 0, backoff ); } return 1; } int nfs_sync( int fd, unsigned idle ) /* purpose: tries to force NFS to update the given file descriptor * paramtr: fd (IN): descriptor of an open file * idle (IN): how many milliseconds between lock and unlock * seelaso: DEFAULT_SYNC_IDLE as suggested argument for idle * returns: 0 is ok, -1 for failure */ { /* lock file */ if ( lockit( fd, F_SETLK, F_WRLCK ) == -1 ) return -1; /* wait $idle ms */ if ( idle > 0 ) poll( NULL, 0, idle ); /* unlock file */ return lockit( fd, F_SETLK, F_UNLCK ); } /* * old code */ #if 0 int nfs_sync( int fd, unsigned idle ) /* purpose: tries to force NFS to update the given file descriptor * paramtr: fd (IN): descriptor of an open file * idle (IN): how many milliseconds between lock and unlock * returns: 0 is ok, -1 for failure */ { #ifndef LINUX /* lock file */ if ( lockit( fd, F_SETLK, F_WRLCK ) == -1 ) return -1; /* wait 100 ms */ if ( idle > 0 ) poll( NULL, 0, idle ); /* unlock file */ return lockit( fd, F_SETLK, F_UNLCK ); #else /* is LINUX */ /* how I loathe eternally broken NFS locking on Linux */ char src[32]; char dst[4096]; struct utimbuf utb; /* which FD to translate */ snprintf( src, sizeof(src), "/proc/%d/fd/%d", getpid(), fd ); /* read symlink information */ if ( readlink( src, dst, sizeof(dst) ) == -1 ) return -1; /* attempt an utime */ utb.actime = utb.modtime = time(NULL); return utime( dst, &utb ); #endif /* LINUX */ } #endif /* old code #if 0 */ ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/mynss.h������������������������������������������0000644�0001750�0001750�00000002454�11757531137�022756� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MYNSS_H #define _MYNSS_H #ifdef LINUX /* $@#! Linux */ #include <sys/types.h> #include <signal.h> #include <pwd.h> #include <grp.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #include <netdb.h> extern volatile sig_atomic_t noSymbolicLookups; extern struct passwd* wrap_getpwuid( uid_t uid ); extern struct group* wrap_getgrgid( gid_t gid ); extern struct hostent* wrap_gethostbyaddr( const char* addr, int len, int type ); #else /* These are _sane_ systems like Solaris */ #define wrap_getpwuid(uid) getpwuid((uid)) #define wrap_getgrgid(gid) getgrgid((gid)) #define wrap_gethostbyaddr(a,b,c) gethostbyaddr((a),(b),(c)) #endif #endif /* _MYNSS_H */ ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/debug.h������������������������������������������0000644�0001750�0001750�00000002310�11757531137�022662� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _DEBUG_H #define _DEBUG_H #include <sys/types.h> extern ssize_t debugmsg( char* fmt, ... ); /* purpose: create a log line on stderr. * paramtr: fmt (IN): printf-style format string * ... (IN): other arguments according to format * returns: number of bytes written to STDERR via write() */ extern int hexdump( void* area, size_t size ); /* purpose: dump a memory area in old-DOS style hex chars and printable ASCII * paramtr: area (IN): pointer to area start * size (IN): extent of area to print * returns: number of byte written */ #endif /* _DEBUG_H */ ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/statinfo.h���������������������������������������0000644�0001750�0001750�00000013502�11757531137�023430� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _VDS_STATINFO_H #define _VDS_STATINFO_H #include <sys/types.h> #include <sys/stat.h> #include <unistd.h> typedef enum { IS_INVALID = 0, IS_FILE = 1, IS_HANDLE = 2, IS_TEMP = 3, IS_FIFO = 4 } StatSource; typedef struct { StatSource source; struct { int descriptor; /* IS_HANDLE, IS_TEMP|FIFO, openmode IS_FILE */ const char* name; /* IS_FILE, IS_TEMP|FIFO */ } file; int error; int deferred; /* IS_FILE: truncate was deferred */ union { unsigned char header[16]; /* IS_FILE regular init */ struct { size_t count; /* IS_FIFO msg count */ size_t rsize; /* IS_FIFO input byte count */ size_t wsize; /* IS_FIFO output byte count */ } fifo; } client; struct stat info; const char* lfn; /* from -s/-S option */ } StatInfo; extern int make_application_executable; /* if set to 1, make the application executable, no matter what. */ extern size_t data_section_size; /* size of the <data> section returned for stdout and stderr. */ extern int myaccess( const char* path ); /* purpose: check a given file for being accessible and executable * under the currently effective user and group id. * paramtr: path (IN): current path to check * returns: 0 if the file is accessible, -1 for not */ extern char* findApp( const char* fn ); /* purpose: check the executable filename and correct it if necessary * paramtr: fn (IN): current knowledge of filename * returns: newly allocated fqpn of path to exectuble, or NULL if not found */ extern int forcefd( const StatInfo* info, int fd ); /* purpose: force open a file on a certain fd * paramtr: info (IN): is the StatInfo of the file to connect to (fn or fd) * the mode for potential open() is determined from this, too. * fd (IN): is the file descriptor to plug onto. If this fd is * the same as the descriptor in info, nothing will be done. * returns: 0 if all is well, or fn was NULL or empty. * 1 if opening a filename failed, * 2 if dup2 call failed */ extern int initStatInfoAsTemp( StatInfo* statinfo, char* pattern ); /* purpose: Initialize a stat info buffer with a temporary file * paramtr: statinfo (OUT): the newly initialized buffer * pattern (IO): is the input pattern to mkstemp(), will be modified! * returns: a value of -1 indicates an error */ extern int initStatInfoAsFifo( StatInfo* statinfo, char* pattern, const char* key ); /* purpose: Initialize a stat info buffer associated with a named pipe * paramtr: statinfo (OUT): the newly initialized buffer * pattern (IO): is the input pattern to mkstemp(), will be modified! * key (IN): is the environment key at which to store the filename * returns: a value of -1 indicates an error */ extern int initStatInfoFromName( StatInfo* statinfo, const char* filename, int openmode, int flag ); /* purpose: Initialize a stat info buffer with a filename to point to * paramtr: statinfo (OUT): the newly initialized buffer * filename (IN): the filename to memorize (deep copy) * openmode (IN): are the fcntl O_* flags to later open calls * flag (IN): bit#0 truncate: whether to reset the file size to zero * bit#1 defer op: whether to defer opening the file for now * bit#2 preserve: whether to backup existing target file * returns: the result of the stat() system call on the provided file */ extern int initStatInfoFromHandle( StatInfo* statinfo, int descriptor ); /* purpose: Initialize a stat info buffer with a filename to point to * paramtr: statinfo (OUT): the newly initialized buffer * descriptor (IN): the handle to attach to * returns: the result of the fstat() system call on the provided handle */ extern int updateStatInfo( StatInfo* statinfo ); /* purpose: update existing and initialized statinfo with latest info * paramtr: statinfo (IO): stat info pointer to update * returns: the result of the stat() or fstat() system call. */ extern int addLFNToStatInfo( StatInfo* info, const char* lfn ); /* purpose: optionally replaces the LFN field with the specified LFN * paramtr: statinfo (IO): stat info pointer to update * lfn (IN): LFN to store, use NULL to free * returns: -1 in case of error, 0 if OK. */ extern size_t printXMLStatInfo( char* buffer, const size_t size, size_t* len, size_t indent, const char* tag, const char* id, const StatInfo* info ); /* purpose: XML format a stat info record into a given buffer * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level of tag * tag (IN): name of element to generate * id (IN): id attribute, use NULL to not generate * info (IN): stat info to print. * returns: number of characters put into buffer (buffer length) */ extern void deleteStatInfo( StatInfo* statinfo ); /* purpose: clean up and invalidates structure after being done. * paramtr: statinfo (IO): clean up record. */ #endif /* _VDS_STATINFO_H */ ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/limitinfo.c��������������������������������������0000644�0001750�0001750�00000014263�11757531137�023573� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <errno.h> #ifdef sun #include <memory.h> #endif #include <string.h> #include <stdlib.h> #include <stdio.h> #include "debug.h" #include "tools.h" #include "limitinfo.h" static const char* RCS_ID = "$Id: limitinfo.c 4535 2011-09-26 22:14:19Z voeckler $"; #ifndef RLIMIT_NLIMITS #ifdef RLIM_NLIMITS #define RLIMIT_NLIMITS RLIM_NLIMITS #endif #endif extern void initLimitInfo( LimitInfo* limits ) /* purpose: initializes the data structure with current limits * paramtr: limits (OUT): initialized memory block */ { #ifdef RLIMIT_NLIMITS limits->size = RLIMIT_NLIMITS; #else #error "Need to write a fragment to guesstimate max# of resources" #endif limits->limits = (SingleLimitInfo*) calloc( sizeof(SingleLimitInfo), limits->size ); } extern void updateLimitInfo( LimitInfo* limits ) { int i; if ( limits == NULL || limits->limits == NULL ) return; for ( i=0; i<limits->size; ++i ) { limits->limits[i].resource = i; getrlimit( i, &(limits->limits[i].limit) ); limits->limits[i].error = errno; } } extern void deleteLimitInfo( LimitInfo* limits ) /* purpose: destructor * paramtr: limits (IO): valid LimitInfo structure to destroy. */ { #ifdef EXTRA_DEBUG debugmsg( "# deleteLimitInfo(%p)\n", limits ); #endif if ( limits != NULL ) { if ( limits->limits != NULL ) free((void*) limits->limits ); memset( limits, 0, sizeof(LimitInfo) ); } } static char* resource2string( char* buffer, size_t capacity, int resource ) /* purpose: converts the resource integer into a string * paramtr: resource (IN): resource integer * returns: string with name of resource, or NULL if unknown */ { switch ( resource ) { #ifdef RLIMIT_CPU case RLIMIT_CPU: return strncpy( buffer, "RLIMIT_CPU", capacity ); #endif #ifdef RLIMIT_FSIZE case RLIMIT_FSIZE: return strncpy( buffer, "RLIMIT_FSIZE", capacity ); #endif #ifdef RLIMIT_DATA case RLIMIT_DATA: return strncpy( buffer, "RLIMIT_DATA", capacity ); #endif #ifdef RLIMIT_STACK case RLIMIT_STACK: return strncpy( buffer, "RLIMIT_STACK", capacity ); #endif #ifdef RLIMIT_NOFILE case RLIMIT_NOFILE: return strncpy( buffer, "RLIMIT_NOFILE", capacity ); #endif #if defined(RLIMIT_OFILE) && ! defined(RLIMIT_NOFILE) case RLIMIT_OFILE: return strncpy( buffer, "RLIMIT_OFILE", capacity ); #endif #ifdef RLIMIT_AS case RLIMIT_AS: return strncpy( buffer, "RLIMIT_AS", capacity ); #endif #ifdef RLIMIT_NPROC case RLIMIT_NPROC: return strncpy( buffer, "RLIMIT_NPROC", capacity ); #endif #ifdef RLIMIT_LOCKS case RLIMIT_LOCKS: return strncpy( buffer, "RLIMIT_LOCKS", capacity ); #endif #ifdef RLIMIT_SIGPENDING case RLIMIT_SIGPENDING: return strncpy( buffer, "RLIMIT_SIGPENDING", capacity ); #endif #ifdef RLIMIT_MSGQUEUE case RLIMIT_MSGQUEUE: return strncpy( buffer, "RLIMIT_MSGQUEUE", capacity ); #endif #ifdef RLIMIT_NICE case RLIMIT_NICE: return strncpy( buffer, "RLIMIT_NICE", capacity ); #endif #ifdef RLIMIT_RTPRIO case RLIMIT_RTPRIO: return strncpy( buffer, "RLIMIT_RTPRIO", capacity ); #endif #ifdef RLIMIT_VMEM #if RLIMIT_AS != RLIMIT_VMEM case RLIMIT_VMEM: return strncpy( buffer, "RLIMIT_VMEM", capacity ); #endif #endif #ifdef RLIMIT_CORE case RLIMIT_CORE: return strncpy( buffer, "RLIMIT_CORE", capacity ); #endif #ifdef RLIMIT_MEMLOCK case RLIMIT_MEMLOCK: return strncpy( buffer, "RLIMIT_MEMLOCK", capacity ); #endif #ifdef RLIMIT_RSS #if RLIMIT_AS != RLIMIT_RSS case RLIMIT_RSS: return strncpy( buffer, "RLIMIT_RSS", capacity ); #endif #endif default: snprintf( buffer, capacity, "RESOURCE_%d", resource ); return buffer; } /* never reached */ return NULL; } static char* value2string( char* buffer, size_t capacity, rlim_t value ) { if ( value == RLIM_INFINITY ) strncpy( buffer, "unlimited", capacity ); else sizer( buffer, capacity, sizeof(rlim_t), &value ); return buffer; } static int formatLimit( char* buffer, size_t size, size_t* len, size_t indent, const SingleLimitInfo* l ) { char id[32],value[32]; if ( l->error != 0 ) return *len; if ( resource2string( id, sizeof(id), l->resource ) == NULL ) return *len; #if 1 /* Gaurang prefers this one */ myprint( buffer, size, len, "%*s<soft id=\"%s\">%s</soft>\n", indent, "", id, value2string(value,sizeof(value),l->limit.rlim_cur) ); myprint( buffer, size, len, "%*s<hard id=\"%s\">%s</hard>\n", indent, "", id, value2string(value,sizeof(value),l->limit.rlim_max) ); #else /* I like concise */ myprint( buffer, size, len, "%*s<limit id=\"%s\" soft=\"%s\"", indent, "", id, value2string(value,sizeof(value),l->limit.rlim_cur) ); myprint( buffer, size, len, " hard=\"%s\"/>\n", value2string(value,sizeof(value),l->limit.rlim_max) ); #endif return *len; } extern int printXMLLimitInfo( char* buffer, size_t size, size_t* len, size_t indent, const LimitInfo* limits ) /* purpose: format the rusage record into the given buffer as XML. * paramtr: buffer (IO): area to store the output in * size (IN): capacity of character area * len (IO): current position within area, will be adjusted * indent (IN): indentation level * limits (IN): observed resource limits * returns: number of characters put into buffer (buffer length) */ { int i; /* sanity check */ if ( limits == NULL || limits->limits == NULL ) return *len; myprint( buffer, size, len, "%*s<resource>\n", indent, "" ); for ( i=0; i<limits->size; ++i ) formatLimit( buffer, size, len, indent+2, &limits->limits[i] ); myprint( buffer, size, len, "%*s</resource>\n", indent, "" ); return *len; } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/mysignal.h���������������������������������������0000644�0001750�0001750�00000001752�11757531137�023430� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MYSIGNAL_H #define _MYSIGNAL_H #include <signal.h> #if 1 /* so far, all systems I know use void */ # define SIGRETTYPE void #else # define SIGRETTYPE int #endif #if defined(SUNOS) && defined(SUN) # define SIGPARAM void #else /* SOLARIS, LINUX, IRIX, AIX, SINIXY */ # define SIGPARAM int #endif typedef SIGRETTYPE SigFunc( SIGPARAM ); #endif /* _MYSIGNAL_H */ ����������������������pegasus-wms_4.0.1+dfsg/src/tools/pegasus-kickstart/dfp.eps������������������������������������������0000644�0001750�0001750�00000042052�11757531137�022714� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������%!PS-Adobe-2.0 EPSF-1.2 %%BoundingBox: 12 593 445 784 %%Title: dfp %%CreationDate: Tue Jul 1 17:27:40 2003 %%Creator: Tgif-4.1.41 written by William Chia-Wei Cheng (bill.cheng@acm.org) %%ProducedBy: (unknown) %%Pages: 1 %%DocumentFonts: (atend) %%EndComments %%BeginProlog /tgifdict 58 dict def tgifdict begin /tgifellipsedict 6 dict def tgifellipsedict /mtrx matrix put /TGEL % tgifellipse { tgifellipsedict begin /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 0 360 arc savematrix setmatrix end } def /tgifarrowtipdict 8 dict def tgifarrowtipdict /mtrx matrix put /TGAT % tgifarrowtip { tgifarrowtipdict begin /dy exch def /dx exch def /h exch def /w exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate dy dx atan rotate 0 0 moveto w neg h lineto w neg h neg lineto savematrix setmatrix end } def /tgifarcdict 8 dict def tgifarcdict /mtrx matrix put /TGAN % tgifarcn { tgifarcdict begin /endangle exch def /startangle exch def /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 startangle endangle arc savematrix setmatrix end } def /TGAR % tgifarc { tgifarcdict begin /endangle exch def /startangle exch def /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 startangle endangle arcn savematrix setmatrix end } def /TGMAX { exch dup 3 1 roll exch dup 3 1 roll gt { pop } { exch pop } ifelse } def /TGMIN { exch dup 3 1 roll exch dup 3 1 roll lt { pop } { exch pop } ifelse } def /TGSW { stringwidth pop } def /bd { bind def } bind def /GS { gsave } bd /GR { grestore } bd /NP { newpath } bd /CP { closepath } bd /CHP { charpath } bd /CT { curveto } bd /L { lineto } bd /RL { rlineto } bd /M { moveto } bd /RM { rmoveto } bd /S { stroke } bd /F { fill } bd /TR { translate } bd /RO { rotate } bd /SC { scale } bd /MU { mul } bd /DI { div } bd /DU { dup } bd /NE { neg } bd /AD { add } bd /SU { sub } bd /PO { pop } bd /EX { exch } bd /CO { concat } bd /CL { clip } bd /EC { eoclip } bd /EF { eofill } bd /IM { image } bd /IMM { imagemask } bd /ARY { array } bd /SG { setgray } bd /RG { setrgbcolor } bd /SD { setdash } bd /W { setlinewidth } bd /SM { setmiterlimit } bd /SLC { setlinecap } bd /SLJ { setlinejoin } bd /SH { show } bd /FF { findfont } bd /MS { makefont setfont } bd /AR { arcto 4 {pop} repeat } bd /CURP { currentpoint } bd /FLAT { flattenpath strokepath clip newpath } bd /TGSM { tgiforigctm setmatrix } def /TGRM { savematrix setmatrix } def end %%EndProlog %%Page: 1 1 %%PageBoundingBox: 12 593 445 784 tgifdict begin /tgifsavedpage save def 1 SM 1 W 0 SG 72 0 MU 72 11 MU TR 72 128 DI 100.000 MU 100 DI DU NE SC GS /tgiforigctm matrix currentmatrix def % TEXT NP 0 SG GS 1 W 470 130 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (kickstart) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (kickstart) SH GR GR % TEXT NP 0 SG GS 1 W 395 80 M GS 0 SG /Helvetica FF [14 0 0 -14 0 0] MS ([0]) SH GR 0 17 RM GS GR 0 17 RM GS GR 0 17 RM GS 0 SG /Helvetica FF [14 0 0 -14 0 0] MS ([1]) SH GR 0 17 RM GS GR 0 17 RM GS GR 0 17 RM GS 0 SG /Helvetica FF [14 0 0 -14 0 0] MS ([2]) SH GR GR % TEXT NP 0 SG GS 1 W 257 130 M GS GS 0 0 AD GR 2 DI NE 0 RM GR GR % BOX 0 SG GS 10 SM GS NP 25 50 M 275 50 L 275 350 L 25 350 L CP S GR GR % OVAL 0 SG GS GS NP 197 61 57 6 TGEL S GR GR % ARC 0 SG GS GS NP 197 89 57 6 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 140 61 M 140 89 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 255 61 M 255 89 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 197 87 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (config/param) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (config/param) SH GR GR % OVAL 0 SG GS GS NP 197 111 57 6 TGEL S GR GR % ARC 0 SG GS GS NP 197 139 57 6 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 140 111 M 140 139 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 255 111 M 255 139 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 197 137 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (invoc. rec.) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (invoc. rec.) SH GR GR % OVAL 0 SG GS GS NP 197 161 57 6 TGEL S GR GR % ARC 0 SG GS GS NP 197 189 57 6 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 140 161 M 140 189 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 255 161 M 255 189 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 197 187 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (app. channel) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (app. channel) SH GR GR % POLY/OPEN-SPLINE 0 SG GS NP 255 75 M 0 135 atan DU cos 10.000 MU 390 exch SU exch sin 10.000 MU 75 exch SU L TGSM 2 W S 1 W GR GS TGSM NP 390 75 10.000 4.000 135 0 TGAT 1 SG CP F 0 SG NP 390 75 10.000 4.000 135 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 390 125 M 0 -135 atan DU cos 10.000 MU 255 exch SU exch sin 10.000 MU 125 exch SU L TGSM 2 W S 1 W GR GS TGSM NP 255 125 10.000 4.000 -135 0 TGAT 1 SG CP F 0 SG NP 255 125 10.000 4.000 -135 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 390 175 M 0 -135 atan DU cos 10.000 MU 255 exch SU exch sin 10.000 MU 175 exch SU L TGSM 2 W S 1 W GR GS TGSM NP 255 175 10.000 4.000 -135 0 TGAT 1 SG CP F 0 SG NP 255 175 10.000 4.000 -135 0 TGAT CP F GR % TEXT NP 0 SG GS 1 W 200 45 M GS GS 0 /Times-Bold FF [14 0 0 -14 0 0] MS (submit host) TGSW AD GR 2 DI NE 0 RM 0 SG /Times-Bold FF [14 0 0 -14 0 0] MS (submit host) SH GR GR % TEXT NP 0 SG GS 1 W 425 30 M GS GS 0 /Times-Bold FF [14 0 0 -14 0 0] MS (worker node) TGSW AD GR 2 DI NE 0 RM 0 SG /Times-Bold FF [14 0 0 -14 0 0] MS (worker node) SH GR GR % TEXT NP 0 SG GS 1 W 390 70 M GS GS 0 /Times-Roman FF [14 0 0 -14 0 0] MS (stdin) TGSW AD GR NE 0 RM 0 SG /Times-Roman FF [14 0 0 -14 0 0] MS (stdin) SH GR GR % TEXT NP 0 SG GS 1 W 390 120 M GS GS 0 /Times-Roman FF [14 0 0 -14 0 0] MS (stdout) TGSW AD GR NE 0 RM 0 SG /Times-Roman FF [14 0 0 -14 0 0] MS (stdout) SH GR GR % TEXT NP 0 SG GS 1 W 390 170 M GS GS 0 /Times-Roman FF [14 0 0 -14 0 0] MS (stderr) TGSW AD GR NE 0 RM 0 SG /Times-Roman FF [14 0 0 -14 0 0] MS (stderr) SH GR GR % RCBOX 0 SG GS GS NP 524 50 M 540 50 540 200 16 AR 540 184 L 540 200 390 200 16 AR 406 200 L 390 200 390 50 16 AR 390 66 L 390 50 540 50 16 AR CP S GR GR % RCBOX 0 SG GS GS NP 674 160 M 690 160 690 200 16 AR 690 184 L 690 200 590 200 16 AR 606 200 L 590 200 590 160 16 AR 590 176 L 590 160 690 160 16 AR CP [2 2] 0 SD S GR GR % TEXT NP 0 SG GS 1 W 640 185 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (postjob) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (postjob) SH GR GR % RCBOX 0 SG GS GS NP 674 105 M 690 105 690 145 16 AR 690 129 L 690 145 590 145 16 AR 606 145 L 590 145 590 105 16 AR 590 121 L 590 105 690 105 16 AR CP S GR GR % TEXT NP 0 SG GS 1 W 640 130 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (main job) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (main job) SH GR GR % RCBOX 0 SG GS GS NP 674 50 M 690 50 690 90 16 AR 690 74 L 690 90 590 90 16 AR 606 90 L 590 90 590 50 16 AR 590 66 L 590 50 690 50 16 AR CP [2 2] 0 SD S GR GR % TEXT NP 0 SG GS 1 W 640 75 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (prejob) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (prejob) SH GR GR % POLY/OPEN-SPLINE 0 SG GS [12 4] 0 SD NP 540 120 M -50 50 atan DU cos 8.000 MU 590 exch SU exch sin 8.000 MU 70 exch SU L TGSM 1 W S [] 0 SD GR GS TGSM NP 590 70 8.000 3.000 50 -50 TGAT 1 SG CP F 0 SG NP 590 70 8.000 3.000 50 -50 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS [12 4] 0 SD NP 540 125 M 0 50 atan DU cos 8.000 MU 590 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S [] 0 SD GR GS TGSM NP 590 125 8.000 3.000 50 0 TGAT 1 SG CP F 0 SG NP 590 125 8.000 3.000 50 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS [12 4] 0 SD NP 540 130 M 50 50 atan DU cos 8.000 MU 590 exch SU exch sin 8.000 MU 180 exch SU L TGSM 1 W S [] 0 SD GR GS TGSM NP 590 180 8.000 3.000 50 50 TGAT 1 SG CP F 0 SG NP 590 180 8.000 3.000 50 50 TGAT CP F GR % BOX 0 SG GS 10 SM GS NP 350 35 M 790 35 L 790 300 L 350 300 L CP S GR GR % OVAL 0 SG GS GS NP 87 240 47 10 TGEL S GR GR % ARC 0 SG GS GS NP 87 290 47 11 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 40 241 M 40 290 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 135 241 M 135 290 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 87 282 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (VDC) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (VDC) SH GR GR % RCBOX 0 SG GS GS NP 84 100 M 100 100 100 155 16 AR 100 139 L 100 155 40 155 16 AR 56 155 L 40 155 40 100 16 AR 40 116 L 40 100 100 100 16 AR CP S GR GR % TEXT NP 0 SG GS 1 W 70 125 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (exit) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (exit) SH GR 0 17 RM GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (code) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (code) SH GR GR % POLY/OPEN-SPLINE 0 SG GS NP 140 125 M 0 -40 atan DU cos 8.000 MU 100 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S GR GS TGSM NP 100 125 8.000 3.000 -40 0 TGAT 1 SG CP F 0 SG NP 100 125 8.000 3.000 -40 0 TGAT CP F GR % ARC 0 SG GS GS NP 535 125 29 29 -59 48 TGAN S GR GR GS TGSM NP 550 150 8.000 3.000 -50 30 TGAT 1 SG CP F 0 SG NP 550 150 8.000 3.000 -50 30 TGAT CP F GR % OVAL 0 SG GS GS NP 547 236 57 6 TGEL S GR GR % ARC 0 SG GS GS NP 547 264 57 6 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 490 236 M 490 264 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 605 236 M 605 264 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 547 262 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (FIFO) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (FIFO) SH GR GR % POLY/OPEN-SPLINE 0 SG GS NP 690 125 M 715 125 L 715 250 L 0 -110 atan DU cos 8.000 MU 605 exch SU exch sin 8.000 MU 250 exch SU L TGSM 1 W S GR GS TGSM NP 605 250 8.000 3.000 -110 0 TGAT 1 SG CP F 0 SG NP 605 250 8.000 3.000 -110 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 490 250 M 465 250 L 465 175 L 0 -50 atan DU cos 8.000 MU 415 exch SU exch sin 8.000 MU 175 exch SU L TGSM 1 W S GR GS TGSM NP 415 175 8.000 3.000 -50 0 TGAT 1 SG CP F 0 SG NP 415 175 8.000 3.000 -50 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS [2 2] 0 SD NP 690 180 M 0 25 atan DU cos 8.000 MU 715 exch SU exch sin 8.000 MU 180 exch SU L TGSM 1 W S [] 0 SD GR GS TGSM NP 715 180 8.000 3.000 25 0 TGAT 1 SG CP F 0 SG NP 715 180 8.000 3.000 25 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS [2 2] 0 SD NP 690 65 M 715 65 L 60 0 atan DU cos 8.000 MU 715 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S [] 0 SD GR GS TGSM NP 715 125 8.000 3.000 0 60 TGAT 1 SG CP F 0 SG NP 715 125 8.000 3.000 0 60 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 75 155 M 75 200 L 85 200 L 30 0 atan DU cos 8.000 MU 85 exch SU exch sin 8.000 MU 230 exch SU L TGSM 1 W S GR GS TGSM NP 85 230 8.000 3.000 0 30 TGAT 1 SG CP F 0 SG NP 85 230 8.000 3.000 0 30 TGAT CP F GR % RCBOX 0 SG GS GS NP 234 230 M 250 230 250 300 16 AR 250 284 L 250 300 165 300 16 AR 181 300 L 165 300 165 230 16 AR 165 246 L 165 230 250 230 16 AR CP S GR GR % TEXT NP 0 SG GS 1 W 205 245 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (app) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (app) SH GR 0 17 RM GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (specific) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (specific) SH GR 0 17 RM GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (channel) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (channel) SH GR 0 17 RM GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (consumer) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (consumer) SH GR GR % POLY/OPEN-SPLINE 0 SG GS NP 200 195 M 35 0 atan DU cos 8.000 MU 200 exch SU exch sin 8.000 MU 230 exch SU L TGSM 1 W S GR GS TGSM NP 200 230 8.000 3.000 0 35 TGAT 1 SG CP F 0 SG NP 200 230 8.000 3.000 0 35 TGAT CP F GR GR tgifsavedpage restore end showpage %%Trailer %MatchingCreationDate: Tue Jul 1 17:27:40 2003 %%DocumentFonts: Times-Roman %%+ Times-Bold %%+ Helvetica %%+ Helvetica-Bold %%EOF ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/condor-log-parser/�������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�021330� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/condor-log-parser/userlog-reader.C���������������������������������0000755�0001750�0001750�00000006320�11757531137�024350� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdio.h> #include <time.h> #include <assert.h> #include <string.h> #include "user_log.c++.h" int foundbad=0; int print_detail(FILE *fp, ULogEvent *e) { if(e==NULL) return 1; fprintf(fp,"%d.%d.%d ", e->cluster, e->proc, e->subproc); return 0; } int print_terminated_detail(FILE *fp, ULogEvent *e) { TerminatedEvent *te=(TerminatedEvent *) e; if(e==NULL) return 1; if(te->returnValue==0) { } else { foundbad++; print_detail(fp,e); fprintf(fp,"0_TERMINATE_BAD\n"); } return te->returnValue; } int print_script_detail(FILE *fp, ULogEvent *e) { PostScriptTerminatedEvent *te=(PostScriptTerminatedEvent *) e; if(e==NULL) return 1; if(te->returnValue==0) { } else { foundbad++; print_detail(fp,e); fprintf(fp,"0_POSTSCRIPT_BAD\n"); } return te->returnValue; } int print_aborted_detail(FILE *fp, ULogEvent *e) { JobAbortedEvent *te=(JobAbortedEvent *) e; if(e==NULL) return 1; foundbad++; print_detail(fp,e); fprintf(fp,"0_ABORT_BAD \n"); return 0; } void print_submit_detail(FILE *fp, ULogEvent *e) { SubmitEvent *se=(SubmitEvent *)e; char ptr[125]; if(e==NULL) return; if(se->submitEventUserNotes != NULL) { sscanf(se->submitEventUserNotes," pool:%s",ptr); print_detail(fp, e); fprintf(fp,"%s\n", ptr); } } int main(int argc, char** argv) { int i; bool done = false; ReadUserLog *ru=NULL; ULogEvent* e = NULL; FILE *tfp=NULL; if(argc != 2) { fprintf(stderr,"Usage: condor-log-parser condor.log\n"); return 1; } tfp=fopen(argv[1],"r"); if(tfp==NULL) { return 0; } else fclose(tfp); ru=new ReadUserLog(argv[1]); while( !done ) { ULogEventOutcome outcome = ru->readEvent( e ); const char *eventName = NULL; switch (outcome) { case ULOG_NO_EVENT: case ULOG_RD_ERROR: case ULOG_UNK_ERROR: done = true; break; case ULOG_OK: { switch (e->eventNumber) { case ULOG_JOB_EVICTED: case ULOG_SHADOW_EXCEPTION: case ULOG_GLOBUS_SUBMIT_FAILED: case ULOG_GLOBUS_RESOURCE_DOWN: case ULOG_REMOTE_ERROR: print_detail(stdout, e); fprintf(stdout,"_BAD_%s\n",ULogEventNumberNames[e->eventNumber]); break; case ULOG_JOB_ABORTED: print_aborted_detail(stdout,e); break; case ULOG_POST_SCRIPT_TERMINATED: { int ret=0; if(((PostScriptTerminatedEvent *)e)->normal) ret=print_script_detail(stdout,e); break; } case ULOG_JOB_TERMINATED: { int ret=0; if(((TerminatedEvent *)e)->normal) ret=print_terminated_detail(stdout,e); } break; case ULOG_EXECUTABLE_ERROR: break; case ULOG_SUBMIT: print_submit_detail(stdout, e); break; default: break; } } break; default: assert( false ); break; } } delete ru; if(foundbad) return 1; return 0; } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/condor-log-parser/README�������������������������������������������0000644�0001750�0001750�00000000205�11757531137�022175� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������This is a condor-log parser written in C. To compile you need a Condor distribution and then edit the Condor paths in the Makefile. �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/condor-log-parser/make-it������������������������������������������0000755�0001750�0001750�00000000747�11757531137�022605� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh if [ "X$CONDOR_HOME" = "X" ]; then if [ "X$CONDOR_LOCATION" = "X" ]; then echo "ERROR! Please set either CONDOR_HOME or CONDOR_LOCATION" exit 1; else CONDOR_HOME=${CONDOR_LOCATION} export CONDOR_HOME fi fi set -x g++ -O2 -c -I${CONDOR_HOME}/include userlog-reader.C || exit 1 g++ userlog-reader.o -O2 -static -o condor-log-parser -L${CONDOR_HOME}/lib -lcondorapi -lc -lnss_files -lnss_dns -lresolv -lc -lnss_files -lnss_dns -lresolv -lc -ldl || exit 1 set +x �������������������������pegasus-wms_4.0.1+dfsg/src/tools/condor-log-parser/Makefile�����������������������������������������0000644�0001750�0001750�00000005122�11757531137�022760� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # Makefile # INSTALL = install STRIP = strip RM = rm -f CXX = g++ -ffor-scope CXXFLAGS = -O LD = $(CXX) LOADLIBES = -lm SYSTEM = $(shell uname -s | tr '[a-z]' '[A-Z]' | tr -d '_ -/') VERSION = $(shell uname -r) MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') MAJOR = $(firstword $(subst ., ,$(VERSION))) CONDOR = condor_compile CONDOR_LOCATION = $(shell condor_config_val RELEASE_DIR) ifndef ${prefix} prefix = $(PEGASUS_HOME) endif NROFF = groff -mandoc TEXT = -Tlatin1 HTML = -Thtml ifeq (SUNOS,${SYSTEM}) ifeq (5,${MAJOR}) # use these for the SUN CC compiler # on Solaris use this link string with gcc # gcc -Wl,-Bstatic keg.o -lstdc++ -lm -lnsl -lsocket -Wl,-Bdynamic -ldl -o keg CXX = CC ## SPARCv7 V7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CXXFLAGS = '-library=%none,Cstd,Crun' -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACXXFLAGS) #EXTRACXXFLAGS = $(V7FLAGS) LD := $(CXX) $(EXTRACXXFLAGS) CXXFLAGS := -DSOLARIS $(CXXFLAGS) -xO4 -D__EXTENSIONS__=1 LOADLIBES = -lnsl -lsocket -lm -lc INSTALL = /usr/ucb/install else # old Solaris 1 not supported! endif endif ifeq (IRIX64,${SYSTEM}) # The regular 64bit Irix stuff is just too slow, use n32! SYSTEM := IRIX endif ifeq (IRIX,${SYSTEM}) CXX = CC -n32 -mips3 -r4000 LD = $(CXX) OPT_NORM = -O3 -IPA -LNO:opt=1 endif ifeq (AIX,${SYSTEM}) CXX = xlC CC = xlc endif ifeq (LINUX,${SYSTEM}) ifeq (ia64,${MARCH}) CXXFLAGS = -Wall -O2 -DMARCH_IA64=1 -ggdb else ifeq (x86_64,${MARCH}) CXXFLAGS = -Wall -O2 -m64 -ggdb else CXXFLAGS = -Wall -O2 -march=i686 -ggdb #LDFLAGS += -static LOADLIBES := -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic $(LOADLIBES) LOADLIBES += -ldl endif endif LD = gcc endif ifeq (,${CONDOR_LOCATION}) all: @echo "ERROR! You must set your CONDOR_LOCATION to point to the" @echo "base directory of your (full) Condor installation. Exiting." @exit 1 else CXXFLAGS += -I${CONDOR_LOCATION}/include LOADLIBES := ${CONDOR_LOCATION}/lib/libcondorapi.a $(LOADLIBES) endif # # === [3] ======================================================= rules section # There is no need to change things below this line. CXXFLAGS += -D${SYSTEM} -DMAJOR=${MAJOR} all : condor-log-parser userlog-reader.o : userlog-reader.C $(CXX) $(CXXFLAGS) $< -c -o $@ condor-log-parser: userlog-reader.o $(LD) $(LDFLAGS) $^ -o $@ $(LOADLIBES) install: condor-log-parser $(INSTALL) -s -m 0755 condor-log-parser $(prefix)/bin install.man: @echo "no manual pages available" install.doc: @echo "no documentation available" install.all: install clean: $(RM) userlog-reader.o core distclean: clean $(RM) condor-log-parser ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/make.pl������������������������������������������������������������0000755�0001750�0001750�00000001050�11757531137�017234� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env perl # # prefer gmake over make -- ant can be so difficult! # require 5.005; use strict; use File::Spec; sub find_exec($) { # purpose: determine location of given binary in $PATH # returns: fully qualified path to binary, undef if not found my $program = shift; local($_); foreach ( File::Spec->path ) { my $fs = File::Spec->catfile( $_, $program ); return $fs if -x $fs; } undef; } my $make = find_exec('gmake') || find_exec('make'); exec { $make } $make, @ARGV if defined $make; exit 127; ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/vds-stat/����������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�017540� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/vds-stat/vds-stat.1������������������������������������������������0000644�0001750�0001750�00000007104�11757531137�021361� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������.\" Copyright 1999-2004 University of Chicago and The University of .\" Southern California. All rights reserved. .\" .\" .\" $Id: vds-stat.1 50 2007-05-19 00:48:32Z gmehta $ .\" .\" Authors: Pavel Krajcevski .\" .TH "vds\-stat" "1" "1.4.6" "GriPhyN Virtual Data System" .SH NAME vds\-stat \- tool for interpreting kickstart records .SH SYNOPSIS .B vds\-stat [\-h] [[\-v]|[\-s]|[\-b]|[\-n]|[\-a]] [\-o file] .I dir .br .B vds\-stat [\-h] [[\-k]|[\-d]] [\-o file] .I kickstart record .SH DESCRIPTION The tool used to extract various data from kickstart records in a given job directory. This tool also is able to interpret single files and return the .I stdout and .I stderr elements of a given kickstart record. .PP .SH ARGUMENTS Any option will be displayed with its long options synonym(s). .TP .PD 0 .B \-h .TP .PD 1 .B \-\-help A quick guide to the usability of the program .TP .PD 0 .B \-v .TP .PD 1 .B \-\-verbose This option provides information on every .out file in the given .IR dir . The default is to ignore files that are either empty or contain data unparsable by the kickstart formatter. These will be displayed as such. .TP .PD 0 .B \-s .TP .PD 1 .B \-\-stdouts This option implies the -b option. It iterates through all of the kickstart records which do not have an exitcode of 0, and then prints all of the stdout and stderr elements of these files. Generally this should be used in conjunction with the -o option because there is alot of output. .TP .PD 0 .B \-b .TP .PD 1 .B \-\-badfile This option iterates through a given .I dir and finds all of the kickstart records which exited with an errorcode other than 0. .TP .PD 0 .B \-k .TP .PD 1 .B \-\-vds\-format\-kickstart Run the given .I kickstart record through the kickstart formatter. .TP .PD 0 .B \-d .TP .PD 1 .B \-\-data Return the stdout and stderr elements of a given .I kickstart record. This is usually the preferrable method to the -s option .TP .PD 0 .B \-n .TP .PD 1 .B \-\-numerr Returns the number of .I kickstart records in a given .I dir which produced an error code other than 0. .TP .PD 0 .B \-S .TP .PD 1 .B \-\-sum Returns the total time (in seconds) it took to run all invocations of kickstart in the given .IR dir . .SH "RETURN VALUE" Regular and planned program terminations will result in an exit code of 0. Abnormal termination will result in a non-zero exit code. .SH "OUTPUT" Default output will be of the form: .I username date time duration stdout stderr exitcode filename .br .TP Single file output will also have the executable and arguments .br .TP .I stdout and .I stderr will be .B TEMP if they were redirected to a temporary file and .B NTEMP if they were redirected to a "real" file. .br .TP .I exitcode will contain a letter (r, f, or s) to indicate regular, failed, or signalled exits from the program. Or it might contain "sus" which indicates suspension, however according to kickstart this should not happen. Also, if it does not contain any letters, then the exit was a raw UNIX failure. .br .TP .I duration will be the time it took to complete in seconds. .SH "EXAMPLE" .nf \f(CB .PD 0 ~$ vds-stat -k ~/vds-test/mytest/dags/QuarkCode/wf1/run0001/lsnum_ID000001.out .br pavelk Jul 21 2006 10:07 0.014 NTEMP TEMP r0 lsnum_ID000001.out /home/pavelk/vds-test/mytest/lsnum 5 15 .PP ~$ vds-stat -n ~/vds-test/mytest/dags/QuarkCode/wf1/run0001 .br .TP 0 \fP .fi .SH "RESTRICTIONS" When pointing to a directory it only searches for .out files. Any other files will have to be manually pointed to using the -k and/or -d options. .SH "AUTHORS" Pavel Krajcevski <pavelk at uchicago dot edu> .PP VDS - .BR http://vds.isi.edu/ .br������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/vds-stat/vds-format-kickstart.c������������������������������������0000644�0001750�0001750�00000013142�11757531137�023754� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdio.h> #include <string.h> #include <stdlib.h> #include <assert.h> #include <getopt.h> #include <time.h> #include <libxml/xmlmemory.h> #include <libxml/xmlreader.h> #include <libxml/parser.h> #include <libxml/tree.h> #include <sys/types.h> #include <sys/stat.h> #include <fcntl.h> #include <unistd.h> #include "vds-format-kickstart.h" void pgusage(char *progname) { fprintf(stderr, "Usage: %s [--getdata | -v] [--help | -h] kickstart.xml\n", progname); exit(E_BADPARAMS); } int main(int argc, char **argv) { char kickfile[strlen(argv[argc - 1])], *kickfilename, opt; kickfilename=malloc(strlen(argv[argc-1])*sizeof(char)); int i, j; int getdata = 0; int option_index=0; invocation mainInvo; struct option long_options[] = { {"getdata", 0, 0, 'v'}, {"help", 0, 0, 'h'}, {0, 0, 0, 0} }; if(argc == 1) { pgusage(strdup(argv[0])); } while ((opt = getopt_long (argc, argv, "vh:", long_options, &option_index)) != -1) { switch(opt) { case 'v': getdata=1; break; case 'h': pgusage(argv[0]); case '?': pgusage(argv[0]); default: fprintf(stderr, "ERROR: Unrecognized option\n"); pgusage(argv[0]); } } strcpy(kickfile, argv[argc-1]); strcpy(kickfilename, getFileNameOnly(kickfile)); //setbuf(stdout, 0); int temp, toread; char buffer[8096]; if((temp = open(kickfile, O_RDONLY)) == -1) { printf("%s\t", kickfilename); fflush(NULL); fprintf(stderr, "Failed to open file\n"); return E_SYSERR; } else { if((toread = read(temp, buffer, 8000)) < 1) { printf("%s\t", kickfilename); fflush(NULL); fprintf(stderr, "Empty or corrupt file\n"); return E_SYSERR; } else { close(temp); } } xmlDocPtr kickDoc; xmlNodePtr kickRoot; xmlParserCtxtPtr ctxtkick; ctxtkick = xmlNewParserCtxt(); if (ctxtkick == NULL) { printf("%s\t", kickfilename); fflush(NULL); fprintf(stderr, "Failed to allocate parser context\n"); return E_SYSERR; } kickDoc = xmlCtxtReadFile(ctxtkick, kickfile, NULL, XML_PARSE_NOERROR | XML_PARSE_RECOVER); if((kickRoot = xmlDocGetRootElement(kickDoc)) == NULL) { printf("%s\t", kickfilename); fflush(NULL); fprintf(stderr, "Failed to parse xml\n"); return E_BADXML; } initInvo(&mainInvo); getInfo(kickRoot, &mainInvo); if(!getdata) { printf("%s\t%s\t%s\t",mainInvo.attr[10], parseDate(mainInvo.attr[1]), mainInvo.attr[2]); for(i = 0; i<mainInvo.numcalls; i++) { if(strcmp(mainInvo.instatcall[i].ident,"stdout")==0 || strcmp(mainInvo.instatcall[i].ident,"stderr")==0) { if(mainInvo.instatcall[i].type != NULL && strcmp(mainInvo.instatcall[i].type,"temporary")==0) { printf("TEMP\t"); } else { printf("NTEMP\t"); } } } if(mainInvo.numjobs == 1) { if(mainInvo.jobs[0].rawstatus >= 0) { if(strcmp(mainInvo.jobs[0].errortype, "regular") == 0) { printf("r%d\t", mainInvo.jobs[0].exitcode); } else if(strcmp(mainInvo.jobs[0].errortype, "failure") == 0) { printf("f%d\t", mainInvo.jobs[0].exitcode); } else if(strcmp(mainInvo.jobs[0].errortype, "signalled") == 0) { printf("s%d\t", mainInvo.jobs[0].exitcode); } else if(strcmp(mainInvo.jobs[0].errortype, "failure") == 0) { printf("sus\t"); } } else { printf("rf%d\t", mainInvo.jobs[0].rawstatus); } int k = strlen(kickfilename); kickfilename = realloc(kickfilename, 40*sizeof(char)); while(k < 40) { kickfilename[k] = ' '; k++; } printf("%s", kickfilename); printf("%s ", mainInvo.jobs[0].jstatcall.filename); for(i=0; i<mainInvo.jobs[0].numargs; i++) { printf("%s ", mainInvo.jobs[0].args[i]); } } else { for(i=0;i<mainInvo.numjobs;i++) { if(mainInvo.jobs[i].rawstatus >= 0) { if(strcmp(mainInvo.jobs[i].errortype, "regular") == 0) { printf("r%d\t", mainInvo.jobs[i].exitcode); } else if(strcmp(mainInvo.jobs[i].errortype, "failure") == 0) { printf("f%d\t", mainInvo.jobs[i].exitcode); } else if(strcmp(mainInvo.jobs[i].errortype, "signalled") == 0) { printf("s%d\t", mainInvo.jobs[i].exitcode); } else if(strcmp(mainInvo.jobs[i].errortype, "suspended") == 0) { printf("sus\t"); } } else { printf("%d\t", mainInvo.jobs[i].rawstatus); } int k = strlen(kickfilename); kickfilename = realloc(kickfilename, 40*sizeof(char)); while(k < 40) { kickfilename[k] = ' '; k++; } printf("%s", kickfilename); printf("%s: %s ", mainInvo.jobs[i].name, mainInvo.jobs[i].jstatcall.filename); for(j=i;j<mainInvo.jobs[i].numargs;j++) { printf("%s ", mainInvo.jobs[i].args[j]); } } } } else { printf("%s\t", kickfilename); printf("\n"); for(i=0;i<mainInvo.numcalls;i++) { if(strcmp(mainInvo.instatcall[i].ident,"stdout")==0) { printf("************************\n"); printf("*\tSTDOUT\n"); printf("************************\n"); if(strcmp(mainInvo.instatcall[i].type,"temporary")==0) { printf("%s\n",mainInvo.instatcall[i].data); } else { printf("Redirected to: %s\n", mainInvo.instatcall[i].filename); } } else if(strcmp(mainInvo.instatcall[i].ident,"stderr")==0) { printf("************************\n"); printf("*\tSTDERR\n"); printf("************************\n");if(strcmp(mainInvo.instatcall[i].type,"temporary")==0) { printf("%s\n",mainInvo.instatcall[i].data); } else { printf("Redirected to: %s\n", mainInvo.instatcall[i].filename); } } } printf("\n\n"); } printf("\n"); destroyInvo(&mainInvo); xmlFreeNode(kickRoot); xmlUnlinkNode(kickRoot); xmlFreeDoc(kickDoc); return SUCCESS; } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/vds-stat/vds-format-kickstart-help.c�������������������������������0000644�0001750�0001750�00000041302�11757531137�024701� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdio.h> #include <string.h> #include <stdlib.h> #include <assert.h> #include <libxml/xmlmemory.h> #include <libxml/xmlreader.h> #include <libxml/parser.h> #include <libxml/tree.h> #include "vds-format-kickstart.h" char *tempString; void initInvo(invocation *newInvo) { newInvo->jobs = malloc(sizeof(jtype)); newInvo->numjobs = 0; newInvo->inuname = malloc(6*sizeof(char *)); newInvo->env = malloc(sizeof(char *)); newInvo->numenv = 0; newInvo->instatcall = malloc(sizeof(statcall)); newInvo->numcalls = 0; newInvo->attr = malloc(15*sizeof(char *)); newInvo->numattr = 15; newInvo->cwd = malloc(sizeof(char)); } void getInvoProp(invocation *main, xmlNodePtr root) { main->attr[0] = (char *)xmlGetProp(root, (xmlChar *)"version"); main->attr[1] = (char *)xmlGetProp(root, (xmlChar *)"start"); // Start of application according to host clock main->attr[2] = (char *)xmlGetProp(root, (xmlChar *)"duration"); //Duration of application run in seconds with microsecond fraction, according to host clock main->attr[3] = (char *)xmlGetProp(root, (xmlChar *)"transformation"); main->attr[4] = (char *)xmlGetProp(root, (xmlChar *)"derivation"); main->attr[5] = (char *)xmlGetProp(root, (xmlChar *)"resource"); main->attr[6] = (char *)xmlGetProp(root, (xmlChar *)"hostaddr"); main->attr[7] = (char *)xmlGetProp(root, (xmlChar *)"hostname"); main->attr[8] = (char *)xmlGetProp(root, (xmlChar *)"pid"); main->attr[9] = (char *)xmlGetProp(root, (xmlChar *)"uid"); main->attr[10] = (char *)xmlGetProp(root, (xmlChar *)"user"); main->attr[11] = (char *)xmlGetProp(root, (xmlChar *)"gid"); main->attr[12] = (char *)xmlGetProp(root, (xmlChar *)"group"); main->attr[13] = (char *)xmlGetProp(root, (xmlChar *)"wfLabel"); main->attr[14] = (char *)xmlGetProp(root, (xmlChar *)"wfStamp"); } void setupJob(xmlNodePtr child, invocation *invo) { invo->numjobs++; invo->jobs = realloc(invo->jobs, invo->numjobs*sizeof(jtype)); // Initialize pointers so that free() will be happy with erasing the memory later invo->jobs[invo->numjobs-1].name = malloc(sizeof(char)); invo->jobs[invo->numjobs-1].start = malloc(sizeof(char)); invo->jobs[invo->numjobs-1].duration = malloc(sizeof(char)); invo->jobs[invo->numjobs-1].pid = malloc(sizeof(char)); invo->jobs[invo->numjobs-1].errortype = malloc(sizeof(char)); invo->jobs[invo->numjobs-1].corefile = malloc(sizeof(char)); invo->jobs[invo->numjobs-1].args = malloc(sizeof(char *)); // job name must be one of the following: setup, prejob, mainjob, postjob, cleanup invo->jobs[invo->numjobs-1].name = (char *)child->name; invo->jobs[invo->numjobs-1].start = (char *)xmlGetProp(child, (xmlChar *)"start"); invo->jobs[invo->numjobs-1].duration = (char *)xmlGetProp(child, (xmlChar *)"duration"); invo->jobs[invo->numjobs-1].pid = (char *)xmlGetProp(child, (xmlChar *)"pid"); for(xmlNodePtr child2 = child->children; child2 != NULL; child2 = child2->next) { if(child2->type == XML_ELEMENT_NODE) { if(xmlStrcmp(child2->name, (xmlChar *)"usage") == 0) { invo->jobs[invo->numjobs-1].jusage.utime = atof((char *)xmlGetProp(child2, (xmlChar *)"utime")); invo->jobs[invo->numjobs-1].jusage.stime = atof((char *)xmlGetProp(child2, (xmlChar *)"stime")); invo->jobs[invo->numjobs-1].jusage.minflt = atoi((char *)xmlGetProp(child2, (xmlChar *)"minflt")); invo->jobs[invo->numjobs-1].jusage.majflt = atoi((char *)xmlGetProp(child2, (xmlChar *)"majflt")); invo->jobs[invo->numjobs-1].jusage.nswap = atoi((char *)xmlGetProp(child2, (xmlChar *)"nswap")); invo->jobs[invo->numjobs-1].jusage.nsignals = atoi((char *)xmlGetProp(child2, (xmlChar *)"nsignals")); if((tempString = (char *)xmlGetProp(child2, (xmlChar *)"nvcsw")) != NULL) { invo->jobs[invo->numjobs-1].jusage.nvcsw = atoi(tempString); } if((tempString = (char *)xmlGetProp(child2, (xmlChar *)"nivcsw")) != NULL) { invo->jobs[invo->numjobs-1].jusage.nivcsw = atoi(tempString); } } else if(xmlStrcmp(child2->name, (xmlChar *)"status") == 0) { invo->jobs[invo->numjobs-1].rawstatus = atoi((char *)xmlGetProp(child2, (xmlChar *)"raw")); xmlNodePtr child3 = child2->children; while(child3->type != XML_ELEMENT_NODE) { child3 = child3->next; } if(xmlStrcmp(child3->name, (xmlChar *)"regular") == 0) { invo->jobs[invo->numjobs-1].errortype = (char *)child3->name; invo->jobs[invo->numjobs-1].exitcode = atoi((char *)xmlGetProp(child3, (xmlChar *)"exitcode")); } else if(xmlStrcmp(child3->name, (xmlChar *)"failure") == 0) { invo->jobs[invo->numjobs-1].errortype = (char *)child3->name; invo->jobs[invo->numjobs-1].exitcode = atoi((char *)xmlGetProp(child3, (xmlChar *)"error")); } else if(xmlStrcmp(child3->name, (xmlChar *)"signalled") == 0) { invo->jobs[invo->numjobs-1].errortype = (char *)child3->name; invo->jobs[invo->numjobs-1].exitcode = atoi((char *)xmlGetProp(child3, (xmlChar *)"signal")); invo->jobs[invo->numjobs-1].corefile = (char *)xmlGetProp(child3, (xmlChar *)"corefile"); } else if(xmlStrcmp(child3->name, (xmlChar *)"suspended") == 0) { invo->jobs[invo->numjobs-1].errortype = (char *)child3->name; invo->jobs[invo->numjobs-1].exitcode = atoi((char *)xmlGetProp(child3, (xmlChar *)"suspended")); } } else if(xmlStrcmp(child2->name, (xmlChar *)"statcall") == 0) { setupStatCall(child2, &invo->jobs[invo->numjobs-1].jstatcall); } else if(xmlStrcmp(child2->name, (xmlChar *)"argument-vector") == 0) { for(xmlNodePtr child5 = child2->children; child5 != NULL; child5 = child5->next) { if(child5->type == XML_ELEMENT_NODE) { if(xmlStrcmp(child5->name, (xmlChar *)"arg") == 0) { strcpy(tempString, (char *)xmlGetProp(child5, (xmlChar *)"nr")); invo->jobs[invo->numjobs-1].numargs = atoi(tempString); invo->jobs[invo->numjobs-1].args = realloc(invo->jobs[invo->numjobs-1].args, invo->jobs[invo->numjobs-1].numargs*sizeof(char *)); invo->jobs[invo->numjobs-1].args[invo->jobs[invo->numjobs-1].numargs-1] = malloc(1024*sizeof(char)); strcpy(invo->jobs[invo->numjobs-1].args[invo->jobs[invo->numjobs-1].numargs-1], (char *)xmlNodeGetContent(child5)); } } } } else if(xmlStrcmp(child2->name, (xmlChar *)"arguments")==0) { invo->jobs[invo->numjobs-1].numargs = 1; invo->jobs[invo->numjobs-1].args = realloc(invo->jobs[invo->numjobs-1].args, invo->jobs[invo->numjobs-1].numargs*sizeof(char *)); invo->jobs[invo->numjobs-1].args[invo->jobs[invo->numjobs-1].numargs-1] = malloc(1024*sizeof(char)); strcpy(invo->jobs[invo->numjobs-1].args[invo->jobs[invo->numjobs-1].numargs-1], (char *)xmlNodeGetContent(child2)); } } } } void setupStatCall(xmlNodePtr child, statcall *setup) { // More malloc to appease the free() gods. setup->type = malloc(sizeof(char)); setup->filename = malloc(sizeof(char)); setup->statinfo = malloc(12*sizeof(char *)); setup->data = malloc(sizeof(char)); setup->ident = malloc(sizeof(char)); xmlNodePtr child2 = child->children; setup->error = atoi((char *)xmlGetProp(child, (xmlChar *)"error")); setup->ident = (char *)xmlGetProp(child, (xmlChar *)"id"); for(child2 = child2->next; child2 != NULL; child2 = child2->next) { if(child2->type == XML_ELEMENT_NODE) { if(xmlStrcmp(child2->name, (xmlChar *)"file") == 0) { setup->type = strdup("file"); setup->filename = (char *)xmlGetProp(child2, (xmlChar *)"name"); } else if(xmlStrcmp(child2->name, (xmlChar *)"descriptor") == 0) { setup->type = strdup("descriptor"); setup->descriptor = atoi((char *)xmlGetProp(child2, (xmlChar *)"number")); } else if(xmlStrcmp(child2->name, (xmlChar *)"temporary") == 0) { setup->type = strdup("temporary"); setup->filename = (char *)xmlGetProp(child2, (xmlChar *)"name"); setup->descriptor = atoi((char *)xmlGetProp(child2, (xmlChar *)"descriptor")); } else if(xmlStrcmp(child2->name, (xmlChar *)"fifo") == 0) { setup->type = strdup("fifo"); setup->filename = (char *)xmlGetProp(child2, (xmlChar *)"name"); setup->descriptor = atoi((char *)xmlGetProp(child2, (xmlChar *)"descriptor")); if((tempString = (char *)xmlGetProp(child2, (xmlChar *)"count")) != NULL ) { setup->count = atoi(tempString); } if((tempString = (char *)xmlGetProp(child2, (xmlChar *)"rsize")) != NULL ) { setup->rsize = atoi(tempString); } if((tempString = (char *)xmlGetProp(child2, (xmlChar *)"wsize")) != NULL ) { setup->wsize = atoi(tempString); } } else if(xmlStrcmp(child2->name, (xmlChar *)"statinfo") == 0) { setup->statinfo[0] = (char *)xmlGetProp(child2, (xmlChar *)"size"); setup->statinfo[1] = (char *)xmlGetProp(child2, (xmlChar *)"mode"); setup->statinfo[2] = (char *)xmlGetProp(child2, (xmlChar *)"inode"); setup->statinfo[3] = (char *)xmlGetProp(child2, (xmlChar *)"nlink"); setup->statinfo[4] = (char *)xmlGetProp(child2, (xmlChar *)"blksize"); setup->statinfo[5] = (char *)xmlGetProp(child2, (xmlChar *)"atime"); setup->statinfo[6] = (char *)xmlGetProp(child2, (xmlChar *)"mtime"); setup->statinfo[7] = (char *)xmlGetProp(child2, (xmlChar *)"ctime"); setup->statinfo[8] = (char *)xmlGetProp(child2, (xmlChar *)"uid"); setup->statinfo[9] = (char *)xmlGetProp(child2, (xmlChar *)"user"); setup->statinfo[10] = (char *)xmlGetProp(child2, (xmlChar *)"gid"); setup->statinfo[11] = (char *)xmlGetProp(child2, (xmlChar *)"group"); } else if(xmlStrcmp(child2->name, (xmlChar *)"data") == 0) { setup->data = (char *)xmlNodeGetContent(child2); } } } } void Free(void *ptr) { if(ptr != NULL) { free(ptr); } } void destroyInvo(invocation *minvo) { int i; for(i=0;i<minvo->numjobs;i++) { Free(minvo->jobs[i].start); Free(minvo->jobs[i].duration); Free(minvo->jobs[i].pid); Free(minvo->jobs[i].corefile); Free(minvo->jobs[i].args); Free(minvo->jobs[i].jstatcall.type); Free(minvo->jobs[i].jstatcall.filename); Free(minvo->jobs[i].jstatcall.statinfo); Free(minvo->jobs[i].jstatcall.data); Free(minvo->jobs[i].jstatcall.ident); } Free(minvo->jobs); Free(minvo->cwd); Free(minvo->inuname); Free(minvo->env); Free(minvo->attr); for(i=0;i<minvo->numcalls;i++) { Free(minvo->instatcall[i].type); Free(minvo->instatcall[i].filename); Free(minvo->instatcall[i].statinfo); Free(minvo->instatcall[i].data); Free(minvo->instatcall[i].ident); } Free(minvo->instatcall); } void getInfo(xmlNodePtr kickRoot, invocation *mainInvo) { if(xmlStrcmp(kickRoot->name, (xmlChar *)"invocation") != 0) { fprintf(stderr, "Incorrect style of xml\n"); exit(E_BADXML); } else { getInvoProp(mainInvo, kickRoot); } for(xmlNodePtr child = kickRoot->children; child != NULL; child = child->next) { if(child->type == XML_ELEMENT_NODE) { if(xmlStrcmp(child->name, (xmlChar *)"setup") == 0 || //setup, prejob, mainjob, postjob, cleanup xmlStrcmp(child->name, (xmlChar *)"prejob") == 0 || xmlStrcmp(child->name, (xmlChar *)"mainjob") == 0 || xmlStrcmp(child->name, (xmlChar *)"postjob") == 0 || xmlStrcmp(child->name, (xmlChar *)"cleanup") == 0) { setupJob(child, mainInvo); } else if(xmlStrcmp(child->name, (xmlChar *)"cwd") == 0) { mainInvo->cwd = (char *)xmlNodeGetContent(child); } else if(xmlStrcmp(child->name, (xmlChar *)"usage") == 0) { mainInvo->inuse.utime = atof((char *)xmlGetProp(child, (xmlChar *)"utime")); mainInvo->inuse.stime = atof((char *)xmlGetProp(child, (xmlChar *)"stime")); mainInvo->inuse.minflt = atoi((char *)xmlGetProp(child, (xmlChar *)"minflt")); mainInvo->inuse.majflt = atoi((char *)xmlGetProp(child, (xmlChar *)"majflt")); mainInvo->inuse.nswap = atoi((char *)xmlGetProp(child, (xmlChar *)"nswap")); mainInvo->inuse.nsignals = atoi((char *)xmlGetProp(child, (xmlChar *)"nsignals")); if((tempString = (char *)xmlGetProp(child, (xmlChar *)"nvcsw")) != NULL) { mainInvo->inuse.nvcsw = atoi(tempString); } if((tempString = (char *)xmlGetProp(child, (xmlChar *)"nivcsw")) != NULL) { mainInvo->inuse.nivcsw = atoi(tempString); } } else if(xmlStrcmp(child->name, (xmlChar *)"uname") == 0) { mainInvo->inuname[0] = (char *)xmlGetProp(child, (xmlChar *)"archmode"); mainInvo->inuname[1] = (char *)xmlGetProp(child, (xmlChar *)"system"); mainInvo->inuname[2] = (char *)xmlGetProp(child, (xmlChar *)"nodename"); mainInvo->inuname[3] = (char *)xmlGetProp(child, (xmlChar *)"release"); mainInvo->inuname[4] = (char *)xmlGetProp(child, (xmlChar *)"machine"); mainInvo->inuname[5] = (char *)xmlGetProp(child, (xmlChar *)"domainname"); } else if(xmlStrcmp(child->name, (xmlChar *)"environment") == 0) { for(xmlNodePtr child2 = child->children; child2 != NULL; child2 = child2->next) { if(child2->type == XML_ELEMENT_NODE) { mainInvo->numenv += 2; mainInvo->env = realloc(mainInvo->env, mainInvo->numenv*sizeof(char *)); mainInvo->env[mainInvo->numenv-2] = (char *)xmlGetProp(child2, (xmlChar *)"key"); mainInvo->env[mainInvo->numenv-1] = (char *)xmlNodeGetContent(child2); } } } else if(xmlStrcmp(child->name, (xmlChar *)"statcall") == 0) { mainInvo->numcalls++; mainInvo->instatcall = realloc(mainInvo->instatcall, mainInvo->numcalls*sizeof(statcall)); setupStatCall(child, &mainInvo->instatcall[mainInvo->numcalls-1]); } } } } // 2006-07-24 T16:30:12. 696-05:00 // mainInvo->start in the form of YYYY-MM-DD Thh:mm:ss. ms -hh:mm // 0123456789 0123456789 012345678 // returns string in format: Jun 23, 2006 12:34 char *parseDate(char *date) { char *tempString1, *tempString2; char *monthstr, *parsemonth, *parsedate, *parseyear; char *time; char *finstring; int tempInt1, tempInt2, month; short timeint; time = malloc(25*sizeof(char)); parsemonth = malloc(2*sizeof(char)); finstring = malloc(256*sizeof(char)); monthstr = malloc(5*sizeof(char)); parsemonth[0] = date[5]; parsemonth[1] = date[6]; month = atoi(parsemonth); switch(month) { case 1: strcpy(monthstr, "Jan "); break; case 2: strcpy(monthstr, "Feb "); break; case 3: strcpy(monthstr, "Mar "); break; case 4: strcpy(monthstr, "Apr "); break; case 5: strcpy(monthstr, "May "); break; case 6: strcpy(monthstr, "Jun "); break; case 7: strcpy(monthstr, "Jul "); break; case 8: strcpy(monthstr, "Aug "); break; case 9: strcpy(monthstr, "Sep "); break; case 10: strcpy(monthstr, "Oct "); break; case 11: strcpy(monthstr, "Nov "); break; case 12: strcpy(monthstr, "Dec "); break; default: fprintf(stderr, "Error parsing date"); exit(E_SYSERR); } strcpy(finstring, monthstr); parsedate = malloc(3*sizeof(char)); //parsedate[0] = date[8]; //parsedate[1] = date[9]; parsedate[2] = ' '; //sprintf(parsedate, "%c%c%c", date[8], date[9], ' '); strncpy(parsedate, &date[8], 2); finstring = strncat(finstring, parsedate, 3); parseyear = malloc(5*sizeof(char)); //parseyear[0] = date[0]; //parseyear[1] = date[1]; //parseyear[2] = date[2]; //parseyear[3] = date[3]; //sprintf(parseyear, "%c%c%c%c%c", date[0], date[1], date[2], date[3], ' '); strncpy(parseyear, date, 4); parseyear[4] = ' '; finstring = strncat(finstring, parseyear, 5); tempString1 = malloc(2*sizeof(char)); tempString2 = malloc(3*sizeof(char)); //tempString1[0] = date[11]; //tempString1[1] = date[12]; //sprintf(tempString1,"%c%c", date[11], date[12]); strncpy(tempString1, &date[11], 2); //tempString2[0] = date[23]; //tempString2[1] = date[24]; //tempString2[2] = date[25]; //sprintf(tempString2,"%c%c%c", date[23], date[24], date[25]); strncpy(tempString2, &date[23], 3); tempInt1 = atoi(tempString1); tempInt2 = atoi(tempString2); timeint = tempInt1 + tempInt2; sprintf(time, "%d", timeint); finstring = strcat(finstring, time); finstring = strcat(finstring, ":"); //tempString1[0] = date[15]; //tempString1[1] = date[14]; //sprintf(tempString1,"%c%c", date[14], date[15]); free(tempString1); tempString1 = malloc(20*sizeof(char)); strncpy(tempString1, &date[14], 2); free(tempString2); tempString2 = malloc(20*sizeof(char)); tempString2[0] = date[23]; //tempString2[1] = date[27]; //tempString2[2] = date[28]; //sprintf(tempString2,"%c%c%c", date[23], date[27], date[28]); strncat(tempString2, &date[27], 2); tempInt1 = atoi(tempString1); tempInt2 = atoi(tempString2); timeint = tempInt1 + tempInt2; if(timeint > 9) { sprintf(time, "%d", timeint); } else { sprintf(time, "0%d", timeint); } finstring = strcat(finstring, time); free(tempString1); free(tempString2); free(parsemonth); free(parseyear); free(parsedate); free(monthstr); free(time); return finstring; } char *getFileNameOnly(char *path) { int i; i = strlen(path); while(i>0 && path[i] != '/') { i--; } if(i==0) { return path; } return &path[i+1]; } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/vds-stat/vds-format-kickstart.h������������������������������������0000644�0001750�0001750�00000005711�11757531137�023764� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdio.h> #include <string.h> #include <stdlib.h> #include <assert.h> #include <libxml/xmlmemory.h> #include <libxml/xmlreader.h> #include <libxml/parser.h> #include <libxml/tree.h> #define SUCCESS (0) #define E_BADPARAMS (1) #define E_BADXML (2) #define E_SYSERR (3) #define MALLOC_CHECK_ (1) struct usage { double utime; // Total amount of user time used, in seconds with millisecond fraction. double stime; // Total amount of system time used, in seconds with millisecond fraction int minflt; // Number of soft page faults int majflt; // Number of hard page faults int nswap; // Number of times a process was swapped out of physical memory int nsignals; // Number of signals delivered int nvcsw; // Number of voluntary context switches int nivcsw; // Number of involuntary context switches }; typedef struct usage usage; // UNAME string array conform to the following: /* 0 -- archmode IA32, IA64, ILP32, LP64 1 -- system 2 -- nodename 3 -- release 4 -- machine 5 -- domainname */ /* STATINFO array in the statcall struct contains the following information * 0 -- size * 1 -- mode * 2 -- inode * 3 -- nlink * 4 -- blksize * 5 -- atime * 6 -- mtime * 7 -- ctime * 8 -- uid * 9 -- user * 10 - gid * 11 - group */ struct statcall { char *ident; int error; //Result from the stat call on a named file or descriptor. char *filename; char *type; int descriptor; int count; int rsize; int wsize; char **statinfo; char *data; }; typedef struct statcall statcall; struct jobType { char *name; // One of the following: setup, prejob, mainjob, postjob, cleanup char *start; char *duration; char *pid; struct usage jusage; int rawstatus; char *errortype; int exitcode; char *corefile; struct statcall jstatcall; int numargs; char **args; }; typedef struct jobType jtype; /* Attr string array contains the following attributes of the invocation type: * 0 -- version * 1 -- start * 2 -- duration Duration of application run in seconds with microsecond fractions * 3 -- transformation * 4 -- derivation * 5 -- resource * 6 -- hostaddr * 7 -- hostname * 8 -- pid * 9 -- uid * 10 - user * 11 - gid * 12 - group * 13 - wfLabel * 14 - wfStamp */ struct invocation { int numjobs; jtype *jobs; char *cwd; struct usage inuse; char **inuname; //uname string array char **env; //environment variables int numenv; statcall *instatcall; int numcalls; char **attr; int numattr; }; typedef struct invocation invocation; void initInvo(invocation *newInvo); void getInvoProp(invocation *main, xmlNodePtr root); void setupJob(xmlNodePtr child, invocation *invo); void setupStatCall(xmlNodePtr child, statcall *setup); void destroyInvo(invocation *minvo); void getInfo(xmlNodePtr kickRoot, invocation *mainInvo); void printkrec(invocation *minvo); char *parseDate(char *date); char *getFileNameOnly(char *path); �������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/vds-stat/vds-stat��������������������������������������������������0000755�0001750�0001750�00000011777�11757531137�021240� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/bash my_usage () { # purpose: Show usage string and exit echo echo 'Tool to provide a human-readable representation of kickstart records.' echo echo 'Usage: '$self' [-h] [-v] [-s] [-b] [-k] [-d] [-n] [-S] [-o file]' echo echo '-h|--help provide this help.' echo '-v|--verbose provide information about every .out file in the directory' echo '-s|--stdouts retrieve the stdout and stderr of each vds-format-kickstart record with a nonzero exitcode' echo '-b|--badfile retrieve all kickstart records with a nonzero exitcode.' echo '-k|--vds-format-kickstart run the given file through vds-format-kickstart.' echo '-d|--data run the given file through vds-format-kickstart using the -v argument' echo '-n|--numerr give the number of kickstart records with nonzero exitcodes' echo '-S|--sum sum the amount of time for all jobs processed through kickstart' echo '-a|--args display the filename and arguments that were invoked by kickstart' echo '-o file| --output file put the output into file (not named 0) ' echo echo exit $1 } # # parse commandline options, long and short # OPTS=`getopt -l numerr,help,verbose,vds-format-kickstart,data,stdouts,badfiles,sum,args,output: -o hvkdnasbSo: -- "$@"` test $? -ne 0 && my_usage 1 eval set -- "$OPTS" self=vds-stat args=0 sum=0 verbose=0 kickstand=0 data=0 stdouts=0 badfiles=0 filename=0 numerr=0 while true; do case "$1" in --numerr|-n) shift badfiles=1 numerr=1 ;; --verbose|-v) shift verbose=1 ;; --sum|-S) shift sum=1 ;; --help|-h) shift my_usage 0 ;; --args|-a) shift args=1 ;; --kickstand|-k) shift kickstand=1 ;; --data|-d) shift kickstand=1 data=1 ;; --stdouts|-s) shift stdouts=1 ;; --badfiles|-b) shift badfiles=1 ;; --output|-o) shift filename=$1 shift ;; --) shift break ;; *) echo "$self: Error: Unknow argument \"$1\", see --help:" my_usage 1 ;; esac done TMPFILE=`mktemp /tmp/kickfileXXXXXXXXXXXXX` if [ ${#@} == 0 ] then echo $self': Incorrect number of arguments' my_usage 1 fi #echo verbose: $verbose #echo vds-format-kickstart: $vds-format-kickstart #echo data: $data #echo stdouts: $stdouts #echo badfiles: $badfiles #echo filename: $filename if [ -f $1 ] then if [ $kickstand == 1 ] then if [ $data == 1 ] then if [ $filename == 0 ] then vds-format-kickstart -v $1 exit 0 else vds-format-kickstart -v $1 > filename exit 0 fi else if [ $filename == 0 ] then vds-format-kickstart $1 exit 0 else vds-format-kickstart $1 > filename exit 0 fi fi else echo 'Invalid argument '$1'. Use -k for file, and -h for help options:' my_usage 1 fi elif [ -d $1 ] then for file in `find $1 | grep .*\.out` do vds-format-kickstart $file >> $TMPFILE 2>&1 done #make sure that there is a '/' at the end of the directory workingdir=`echo $1 | sed -e 's/\([/a-zA-Z0-9_]*[a-zA-Z0-9_]\)\/\?/\1\//g'` else echo '-'$self': '$1': directory or file not found.' exit 1 fi #echo workingdir: $workingdir if [ $verbose == 1 ] then if [ $filename == 0 ] then cat $TMPFILE else cp $TMPFILE $filename fi elif [ $badfiles == 1 ] then if [ $filename == 0 ] then if [ $numerr = 1 ] then grep 'N\?TEMP' $TMPFILE | awk '$10 != "r0" {print $0}' | wc -l else grep 'N\?TEMP' $TMPFILE | awk '$10 != "r0" {print $0}' fi else if [ $numerr = 1 ] then grep 'N\?TEMP' $TMPFILE | awk '$10 != "r0" {print $0}' | wc -l > $filename else grep 'N\?TEMP' $TMPFILE | awk '$10 != "r0" {print $0}' > $filename fi fi elif [ $stdouts == 1 ] then if [ $filename == 0 ] then for file in `grep 'N\?TEMP' $TMPFILE | awk '$10 != "r0" {print $1}'` do vds-format-kickstart -v $workingdir$file done else for file in `grep 'N\?TEMP' $TMPFILE | awk '$10 != "r0" {print $1}'` do vds-format-kickstart -v $workingdir$file >> $filename 2>&1 done fi elif [ $sum == 1 ] then if [ $filename == 0 ] then grep 'N\?TEMP' $TMPFILE | awk '{sumfile+=$6} END {print sumfile}' else grep 'N\?TEMP' $TMPFILE | awk '{sumfile+=$6} END {print sumfile}' > $filename fi elif [ $args == 1 ] then if [ $filename == 0 ] then grep 'N\?TEMP' $TMPFILE | sed -e 's/.*N\?TEMP[ \t]\(.*\)/\1/g' else grep 'N\?TEMP' $TMPFILE | sed -e 's/.*N\?TEMP[ \t]\(.*\)/\1/g' > $filename fi else if [ $filename == 0 ] then grep 'N\?TEMP' $TMPFILE | awk '{print $1"\t"$2" " $3" " $4"\t" $5"\t" $6"\t" $7"\t" $8"\t" $9"\t" $10"\t"}' else grep 'N\?TEMP' $TMPFILE | awk '{print $1"\t" $2" " $3" " $4"\t" $5"\t" $6"\t" $7"\t" $8"\t" $9"\t" $10"\t"}' > $filename fi fi rm $TMPFILE �pegasus-wms_4.0.1+dfsg/src/tools/vds-stat/Makefile��������������������������������������������������0000644�0001750�0001750�00000001663�11757531137�021176� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# $Id: Makefile 50 2007-05-19 00:48:32Z gmehta $ INSTALL = install RM = rm -f CC = gcc CFLAGS = -Wall -std=c99 `xml2-config --cflags` -D_GNU_SOURCE LD = $(CC) EXTRA_LDFLAGS= `xml2-config --libs` -ll -lm ifndef ${prefix} prefix = $(PEGASUS_HOME) endif all: vds-format-kickstart vds-format-kickstart: vds-format-kickstart.o vds-format-kickstart-help.o $(LD) $^ -o $@ $(LOADLIBES) $(EXTRA_LDFLAGS) %.o : %.c $(CC) $(CFLAGS) $< -c -o $@ clean: $(RM) *.o vds-format-kickstart install: vds-format-kickstart $(INSTALL) -m 0755 vds-stat $(prefix)/bin $(INSTALL) -m 0755 vds-format-kickstart $(prefix)/bin # At some point, we'll have a man page and other documentation... hopefully... # so here are the build targets for when we do. #install.doc: $(GENDOC) # $(INSTALL) -m 0644 $(GENDOC) $(prefix)/man install.man: vds-stat.1 $(INSTALL) -m 0644 vds-stat.1 $(prefix)/man/man1 install.all: install install.man #install.doc �����������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/Makefile�����������������������������������������������������������0000644�0001750�0001750�00000000722�11757531137�017424� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # build all C/C++ sources # # $Id: Makefile 5027 2012-02-27 23:59:50Z voeckler $ # TARGET = pegasus-cluster pegasus-invoke pegasus-keg pegasus-kickstart CONDOR_VERSION = $(shell condor_version | awk '{ print $2; exit }') MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') ifndef ${prefix} prefix = $(PEGASUS_HOME) endif .ONESHELL: all clean distclean install: for i in $(TARGET); do \ if [ -d $$i ]; then \ $(MAKE) -C $$i $@ || exit 1; \ fi; \ done ����������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/����������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�016260� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/mypopen.h�������������������������������������������������������0000644�0001750�0001750�00000017270�11757531137�020117� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * Globus Toolkit Public License (GTPL) * * Copyright (c) 1999 University of Chicago and The University of * Southern California. All Rights Reserved. * * 1) The "Software", below, refers to the Globus Toolkit (in either * source-code, or binary form and accompanying documentation) and a * "work based on the Software" means a work based on either the * Software, on part of the Software, or on any derivative work of * the Software under copyright law: that is, a work containing all * or a portion of the Software either verbatim or with * modifications. Each licensee is addressed as "you" or "Licensee." * * 2) The University of Southern California and the University of * Chicago as Operator of Argonne National Laboratory are copyright * holders in the Software. The copyright holders and their third * party licensors hereby grant Licensee a royalty-free nonexclusive * license, subject to the limitations stated herein and * U.S. Government license rights. * * 3) A copy or copies of the Software may be given to others, if you * meet the following conditions: * * a) Copies in source code must include the copyright notice and * this license. * * b) Copies in binary form must include the copyright notice and * this license in the documentation and/or other materials * provided with the copy. * * 4) All advertising materials, journal articles and documentation * mentioning features derived from or use of the Software must * display the following acknowledgement: * * "This product includes software developed by and/or derived from * the Globus project (http://www.globus.org/)." * * In the event that the product being advertised includes an intact * Globus distribution (with copyright and license included) then * this clause is waived. * * 5) You are encouraged to package modifications to the Software * separately, as patches to the Software. * * 6) You may make modifications to the Software, however, if you * modify a copy or copies of the Software or any portion of it, * thus forming a work based on the Software, and give a copy or * copies of such work to others, either in source code or binary * form, you must meet the following conditions: * * a) The Software must carry prominent notices stating that you * changed specified portions of the Software. * * b) The Software must display the following acknowledgement: * * "This product includes software developed by and/or derived * from the Globus Project (http://www.globus.org/) to which the * U.S. Government retains certain rights." * * 7) You may incorporate the Software or a modified version of the * Software into a commercial product, if you meet the following * conditions: * * a) The commercial product or accompanying documentation must * display the following acknowledgment: * * "This product includes software developed by and/or derived * from the Globus Project (http://www.globus.org/) to which the * U.S. Government retains a paid-up, nonexclusive, irrevocable * worldwide license to reproduce, prepare derivative works, and * perform publicly and display publicly." * * b) The user of the commercial product must be given the following * notice: * * "[Commercial product] was prepared, in part, as an account of * work sponsored by an agency of the United States Government. * Neither the United States, nor the University of Chicago, nor * University of Southern California, nor any contributors to * the Globus Project or Globus Toolkit nor any of their employees, * makes any warranty express or implied, or assumes any legal * liability or responsibility for the accuracy, completeness, or * usefulness of any information, apparatus, product, or process * disclosed, or represents that its use would not infringe * privately owned rights. * * IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO * OR THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS * TO THE GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY * DAMAGES, INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL * DAMAGES RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR * THE USE OF THE [COMMERCIAL PRODUCT]." * * 8) LICENSEE AGREES THAT THE EXPORT OF GOODS AND/OR TECHNICAL DATA * FROM THE UNITED STATES MAY REQUIRE SOME FORM OF EXPORT CONTROL * LICENSE FROM THE U.S. GOVERNMENT AND THAT FAILURE TO OBTAIN SUCH * EXPORT CONTROL LICENSE MAY RESULT IN CRIMINAL LIABILITY UNDER U.S. * LAWS. * * 9) Portions of the Software resulted from work developed under a * U.S. Government contract and are subject to the following license: * the Government is granted for itself and others acting on its * behalf a paid-up, nonexclusive, irrevocable worldwide license in * this computer software to reproduce, prepare derivative works, and * perform publicly and display publicly. * * 10) The Software was prepared, in part, as an account of work * sponsored by an agency of the United States Government. Neither * the United States, nor the University of Chicago, nor The * University of Southern California, nor any contributors to the * Globus Project or Globus Toolkit, nor any of their employees, * makes any warranty express or implied, or assumes any legal * liability or responsibility for the accuracy, completeness, or * usefulness of any information, apparatus, product, or process * disclosed, or represents that its use would not infringe privately * owned rights. * * 11) IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO OR * THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS TO THE * GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY DAMAGES, * INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES * RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR THE USE OF * THE SOFTWARE. * * END OF LICENSE */ #ifndef _MYPOPEN_H #define _MYPOPEN_H #include <sys/types.h> typedef struct { pid_t child; /* pid of process that runs things */ int readfd; /* fd to read output from process */ } PipeCmd; extern PipeCmd* mypopen( const char* tag, char* argv[], char* envp[] ); /* purpose: fork off a commend and capture its stderr and stdout. * warning: does not use /bin/sh -c internally. * paramtr: name (IN): some short tag to name the app * argv (IN): the true argv[] vector for execve * envp (IN): the true envp[] vector for execve * returns: a structure which contains information about the child process. * it will return NULL on failure. */ extern int mypclose( PipeCmd* po ); /* purpose: free the data structure and all associated resources. * paramtr: po (IO): is a valid pipe open structure. * returns: process exit status, or -1 for invalid po structure. */ extern int pipe_out_cmd( const char* tag, char* argv[], char* envp[], char* buffer, size_t blen ); /* purpose: fork off a commend and capture its stderr and stdout * paramtr: name (IN): some short tag to name the app * argv (IN): the true argv[] vector for execve * envp (IN): the true envp[] vector for execve * buffer (OUT): area to store output into. Will be cleared * blen (IN): length of the area that is usable to us. * returns: -1 for regular failure, exit code from application otherwise */ #endif /* _MYPOPEN_H */ ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/util.h����������������������������������������������������������0000644�0001750�0001750�00000005045�11757531137�017402� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _UTIL_H #define _UTIL_H extern void double2timeval( struct timeval* tv, double interval ); /* purpose: Converts a double timestamp into a timeval * paramtr: tv (OUT): destination to put the timeval into * interval (IN): time in seconds to convert */ #define timeval2double(tv) (tv.tv_sec + tv.tv_usec/1E6) /* purpose: Converts a timeval into a fractional seconds double rep * paramtr: tv (IN): timeval to convert * returns: double representing the seconds with fraction. */ extern double now( void ); /* purpose: obtains an UTC timestamp with microsecond resolution. * returns: the timestamp, or -1.0 if it was completely impossible. */ extern char* check_link( void ); /* purpose: Obtains the path to system's symlink tool ln * returns: absolute path to ln, or NULL if not found nor accessible */ extern char* default_globus_url_copy( void ); /* purpose: Determines the default path to default g-u-c. No checks! * returns: absolute path to g-u-c, or NULL if environment mismatch */ extern char* alter_globus_url_copy( const char* argv0 ); /* purpose: Determines the alternative g-u-c. Simple check only! * paramtr: argv0 (IN): main's argv[0] * returns: absolute path to g-u-c, or NULL if environment mismatch */ extern long check_globus_url_copy( char* location, char* envp[] ); /* purpose: Obtains the version of a given globus-url-copy * parmatr: location (IN): location of an alternative g-u-c, or * NULL to use $GLOBUS_LOCATION/bin/globus-url-copy * paramtr: env (IN): environment pointer from main() * returns: The version number as major * 1000 + minor, * or -1 if troubles running the g-u-c */ extern long check_grid_proxy_info( char* envp[] ); /* purpose: Obtains the time remaining on the current user certificate proxy. * paramtr: env (IN): environment pointer from main() * returns: the time remaining on the certificate, 0 for expired, -1 error */ #endif /* _UTIL_H */ �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/item.h����������������������������������������������������������0000644�0001750�0001750�00000005444�11757531137�017366� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * * based on examples in David Butenhof, "Programming with POSIX threads", * Addison-Wesley, 1997 */ #ifndef _ITEM_H #define _ITEM_H #ifndef DEFAULT_STREAMS #define DEFAULT_STREAMS 1 #endif #ifndef DEFAULT_BUFSIZE #define DEFAULT_BUFSIZE 0 #endif #ifndef DEFAULT_RETRIES #define DEFAULT_RETRIES 5 /* JSV: up'ed from 3 */ #endif #ifndef DEFAULT_BACKOFF #define DEFAULT_BACKOFF 5.0 #endif #ifndef DEFAULT_INITIAL #define DEFAULT_INITIAL 0.2 #endif #include "xfer.h" typedef struct item_tag { struct item_tag* m_next; /* next piece of work */ size_t m_magic; /* valid */ xfer_p m_xfer; /* transfer request */ unsigned m_bufsize; /* TCP buffer size, 0 is default */ unsigned m_streams; /* number of parallel data streams */ unsigned m_retries; /* number of retry attempts */ double m_initial; /* initial wait */ double m_backoff; /* exponential backoff time */ /* statistics */ double m_timesum; /* time taken for processing */ double m_queued; /* start time of being queued */ } item_t, *item_p; #define ITEM_MAGIC 0xa7126def extern int item_init( item_p item, xfer_p xfer ); /* purpose: initializes the work item request. * paramtr: item (IO): location of an item to initialize * paramtr: xfer (IN): description of what to do * returns: 0 for ok, error code for an error */ extern int item_full_init( item_p item, xfer_p xfer, unsigned bufsize, unsigned streams, unsigned retries, double initial, double backoff ); /* purpose: initializes the work item request. * paramtr: item (IO): location of an item to initialize * paramtr: xfer (IN): description of what to do * paramtr: bufsize (IN): TCP buffer size to use for copy * paramtr: streams (IN): number of concurrent data channels * paramtr: retries (IN): maximum number of retry attempts * paramtr: initial (IN): initial sleep time * paramtr: backoff (IN): first exponential backoff time * returns: 0 for ok, error code for an error */ extern int item_destroy( item_p item ); /* purpose: destroys a work item and frees its resources * paramtr: item (IO): location of an item to initialize * returns: 0 for ok, error code for an error */ #endif /* _ITEM_H */ ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/README����������������������������������������������������������0000644�0001750�0001750�00000005246�11757531137�017137� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������Input file format ----------------- Each input record of variable length is considered a "section". Each section contains one header, and multiple sources and destinations. Each section describes exactly one transfer. Flags are attached to the header. A section is terminated by either the next LFN, or by the EOF. +------- |LFN_1 [flag [..]] |ws TFN_src_1_1 |ws TFN_src_1_.. |ws TFN_src_1_N |ws ws TFN_dst_1_1 |ws ws TFN_dst_1_.. |ws ws TFN_dst_1_M |# comment |LFN_2 [flag [..]] |ws TFN_src_2_1 |ws TFN_src_2_.. |ws TFN_src_2_N |ws ws TFN_dst_2_1 |ws ws TFN_dst_2_.. |ws ws TFN_dst_2_M comments start with a hash (#), and extend to the end of line. A comment is being replaced with nothing when encountered. The header starts with a logical filename (LFN) on the leftmost side. A LFN must not contain any whitespaces. It may optionally be followed by flag, which modify the behavior for this transfer. The LFN is only used for reporting and debugging. A file may exist in multiple locations. Each source is a transfer filename (TFN). A TFN describes how to access a file from the outside world. Each source TFN is equivalent. At least one such must be specified. Each source TFN is indented with one whitespace. There may be multiple destinations for a file. Again, a TFN is used to describe where to place a copy, as viewed from the outside. Each destination is indented by two whitespaces. The transfer tool tries to transfer between the cartesian product of source and destination pairs, e.g. N x M and O x P pairs. The flags determine at what point and how to stop. Flags are separated by whitespace. The following optional flags are possible, being mutually exclusive, with "any" being the default in case no flag is specified: - "any" is the default option. Any source file transfer to any destination success determines the success of the section. If all pair candidates were exhausted without success, the transfer will fail. All sources are equivalent. However, missing a file on any source will result in immediate try for the next source. On the last source, it will result in failure. - "optional" means that failure to transfer this file does not consitute a failure of the transfer tool. All sources are equivalent. However, missing a file on any source will result in immediate try for the next source. On the last source, it will result still in success. - "all" is a short-cut to transfer any found source to all destinations. However, this option is currently not implemented. All sources are considered equivalent. It is attempted to distribute any reachable source file onto all reachable destinations. Partial success is still considered a failure. ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/T2.1������������������������������������������������������������0000644�0001750�0001750�00000020525�11757531137�016623� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������.\" This file or a portion of this file is licensed under the terms of .\" the Globus Toolkit Public License, found in file GTPL, or at .\" http://www.globus.org/toolkit/download/license.html. This notice must .\" appear in redistributions of this file, with or without modification. .\" .\" Redistributions of this Software, with or without modification, must .\" reproduce the GTPL in: (1) the Software, or (2) the Documentation or .\" some other similar material which is provided with the Software (if .\" any). .\" .\" Copyright 1999-2004 University of Chicago and The University of .\" Southern California. All rights reserved. .\" .\" $Id: T2.1 50 2007-05-19 00:48:32Z gmehta $ .\" .\" Authors: Jens-S. Vckler, Yong Zhao .\" .\" .TH T2 1 "April, 1st 2004" .SH NAME T2 \- a globus\-url\-copy wrapper .SH SYNOPSIS .B T2 [\-P\~pp] [\-t\~bs] [\-p\~ns] [\-r\~nr] [\-q] baseuri basemnt .PP .B T2 [\-P\~pp] [\-t\~bs] [\-p\~ns] [\-r\~nr] [\-q] baseuri basemnt fof .SH DESCRIPTION The .I T2 program is a multi-threaded wrapper around globus-url-copy to handle a list of source and destination URIs. The list is either supplied inside a file as third argument, or via .I stdin from another process. .PP .I T2 will check for the existence and accessibility of the programs globus-url-copy and grid-proxy-info. The .I T2 program will exit with an error if the user proxy certificate does not have at least one hour remaining. .SH ARGUMENTS .TP .B \-P pp configures the maximum number of parallel running globus-url-copy processes. Please note that no sanity checks are implemented, if you use an unreasonably high number. You might run out of filedescriptors or user processes. .IP The default is to run no more than 4 parallel active copy processes. .TP .B \-t bs configures the buffer size of the .I \-tcp\-bs parameter from globus-url-copy. A well-tuned buffer size may have beneficial effects while insane buffer sizes gobble resources. .IP The default is 0, which lets the system determine the appropriate size. .TP .B \-p ns configures the number of parallel streams that are use with the .I \-p option of globus-url-copy to transfer a single file. .IP By default, only 1 stream is being used. .TP .B \-r nr configures the number of retries for soft failures. Certain known hard failures will not trigger a retry, but most unknown failures are taken to be soft failures, and will trigger retries. In case of multiple retries and multiple failures, the last retry will attempt to set more friendly setting (e.g. only one data channel, no more than 64k buffers). .IP By default, 3 retries are attempted. .TP .B \-q triggers a quieter mode. .IP By default, the T2 program is rather talkive about what it does. .TP .B baseuri specifies the URI prefix of the host that the instance of the .I T2 program is running upon. It is the URI-prefix one would use from the outside to access files. You may use an (almost) arbitrary character string to denote that you don't intend to use this feature. See also .I basemnt. .IP This is a required argument. There are no defaults. .TP .B basemnt denotes the mount point that reflects the inside view where paths with the .I baseuri prefix would be mounted to. You can use an almost arbitrary string to denote not to use this feature. .IP Together, the two required arguments allow optimization by replacing an external URI with an internal file:/// schema, as the latter is usually faster. .IP This is a required argument. There are no defaults. .PP The list of URIs denoting files to transfer is either put onto .IR stdin , or it is the optional third argument. The list of URIs contains pairs. The logical filename, for debugging purposes, introduces a section, and may contain certain flags. The LFN must not contain whitespaces. The source URI list follows the section header, each URI indented by one whitespace. The list of source-URI constitutes alternatives for the same file, and each alternative is equivalent. The source-URI list is followed by a destination-URI list. Each destination URI is indented by two whitespaces. The URI-per-line format was chosen, because in praxi URIs may contain unquoted whitespaces. .SH "INPUT FILE FORMAT" Each input record of variable length is considered a .IR section . Each section contains one header, and multiple sources and destinations. Each section describes exactly one transfer. Flags are attached to the header. A section is terminated by either the next LFN, or by the EOF. .nf \f(CB LFN_1 [flag [..]] ws TFN_src_1_1 ws TFN_src_1_.. ws TFN_src_1_N ws ws TFN_dst_1_1 ws ws TFN_dst_1_.. ws ws TFN_dst_1_M # comment LFN_2 [flag [..]] ws TFN_src_2_1 ws TFN_src_2_.. ws TFN_src_2_N ws ws TFN_dst_2_1 ws ws TFN_dst_2_.. ws ws TFN_dst_2_M \fP .fi comments start with a hash (#), and extend to the end of line. A comment line is ignored when encountered. .PP The header starts with a logical filename (LFN) on the leftmost side. A LFN must not contain any whitespaces. It may optionally be followed by flags, which modify the behavior for this transfer. The LFN is only used for reporting and debugging. .PP A file may exist in multiple locations. Each source is a transfer filename (TFN). A TFN describes how to access a file from the outside world. Each source TFN is equivalent. At least one such must be specified. Each source TFN is indented with one whitespace. .PP There may be multiple destinations for a file. Again, a TFN is used to describe where to place a copy, as viewed from the outside. Each destination is indented by two whitespaces. .PP The transfer tool tries to transfer between the cartesian product of source and destination pairs, e.g. N x M and O x P pairs. The flags determine at what point and how to stop. .PP Flags are separated by whitespace. The following optional flags are possible, being mutually exclusive, with .I any being the default in case no flag is specified: .TP .B any is the default option. Any source file transfer to any destination success causes success of the section. If all pair candidates were exhausted without success, the transfer will fail. All sources are equivalent. However, missing a file on any source will result in immediate try for the next source. On the last source, it will result in failure. .TP .B optional means that failure to transfer this file does not consitute a failure of the transfer tool. All sources are equivalent. However, missing a file on any source will result in immediate try for the next source. On the last source, it will result still in success. .TP .B all is a short-cut to transfer any found source to all destinations. However, this option is currently not implemented. All sources are considered equivalent. It is attempted to distribute any reachable source file onto all reachable destinations. Partial success is still considered a failure. .SH "RETURN VALUE" Execution as planned will return 0. Normal failures will return 1. Check-up failures due to inaccessible globus-url-copy or grid-proxy-info will return 2. Failure to install the signal handler returns 3. Failure from the transfer of any file will return 42. .SH "SEE ALSO" .BR globus-url-copy, .BR grid-proxy-info .SH EXAMPLE If the input file .I fof looks like this .nf \f(CB # comment 1 a any gsiftp://some.host/some/where/a https://any.host/else/where/a b optional gsiftp://some.host/some/where/b gsiftp://some.host2/else/where/b https://any.host/else/where/b c all gsiftp://another.host/some/where/c https://any.host/else/where/c https://any.other/else/where/d \fP .fi and the .I T2 executable was invoked like this .nf \f(CB T2 gsiftp://another.host/some /mnt/some fof \fP .fi on host .IR another.host, the T2 program would do the following calls to globus-url-copy in parallel (since by default it starts four instances maximum): .nf \f(CB globus-url-copy gsiftp://some.host/some/where/a https://any.host/else/where/a globus-url-copy gsiftp://some.host/some/where/b https://any.host/else/where/b globus-url-copy file:///mnt/some/where/c https://any.host/else/where/c globus-url-copy file:///mnt/some/where/c https://any.other/else/where/d \fP .fi .SH RESTRICTIONS The list of filesnames must be a textual file. .PP Outragious requests for memory or parallel resources will lead to failure of the T2. .SH AUTHORS Jens-S. Vckler <voeckler@cs.uchicago.edu>, .\"Jens-S. V\*:ockler .br Mike Wilde <wilde@mcs.anl.gov>, .br Yong Zhao <yongzh@cs.uchicago.edu>. .PP Chimera .B http://www.griphyn.org/chimera/ .br GriPhyN .BR http://www.griphyn.org/ ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/item.c����������������������������������������������������������0000644�0001750�0001750�00000005031�11757531137�017351� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * * based on examples in David Butenhof, "Programming with POSIX threads", * Addison-Wesley, 1997 */ #ifdef sun #include <memory.h> #endif #include <errno.h> #include <stdlib.h> #include <string.h> #include "item.h" #include "util.h" static const char* RCS_ID = "$Id: item.c 50 2007-05-19 00:48:32Z gmehta $"; int item_init( item_p item, xfer_p xfer ) { /* sanity check */ if ( item == NULL ) return EINVAL; else memset( item, 0, sizeof(item_t) ); /* invisible parameters */ item->m_next = NULL; item->m_magic = ITEM_MAGIC; /* settable parameters */ item->m_xfer = xfer; /* defaults */ item->m_bufsize = DEFAULT_BUFSIZE; if ( (item->m_streams = DEFAULT_STREAMS) < 1 ) item->m_streams = 1; if ( (item->m_retries = DEFAULT_RETRIES) < 1 ) item->m_retries = 1; if ( (item->m_initial = DEFAULT_INITIAL) < 0 ) item->m_initial = 0.0; if ( (item->m_backoff = DEFAULT_BACKOFF) < 0 ) item->m_backoff = 0.0; item->m_queued = now(); return 0; } int item_full_init( item_p item, xfer_p xfer, unsigned bufsize, unsigned streams, unsigned retries, double initial, double backoff ) { /* sanity check */ if ( item == NULL ) return EINVAL; else memset( item, 0, sizeof(item_t) ); /* invisible parameters */ item->m_next = NULL; item->m_magic = ITEM_MAGIC; /* settable parameters */ item->m_xfer = xfer; item->m_bufsize = bufsize; if ( (item->m_streams = streams) < 1 ) item->m_streams = 1; if ( (item->m_retries = retries) < 1 ) item->m_retries = 1; if ( (item->m_initial = initial) < 0 ) item->m_initial = 0.0; if ( (item->m_backoff = backoff) < 0 ) item->m_backoff = 0.0; item->m_queued = now(); return 0; } int item_destroy( item_p item ) { /* sanity checks */ if ( item == NULL || item->m_magic != ITEM_MAGIC ) return EINVAL; if ( item->m_xfer ) { xfer_done( item->m_xfer ); free((void*) item->m_xfer); item->m_xfer = NULL; } item->m_magic = -1ul; return 0; } �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/test������������������������������������������������������������0000755�0001750�0001750�00000001042�11757531137�017152� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# comment 1 T2.c any gsiftp://griodine.uchicago.edu/home/voeckler/vds/src/tools/T2/T2.c gsiftp://e.cs.uchicago.edu/home/voeckler/src/T2/T2.c READXX optional gsiftp://griodine.uchicago.edu/home/voeckler/vds/src/tools/T2/READXX gsiftp://griodine.uchicago.edu/home/voeckler/vds/src/tools/T2/README gsiftp://e.cs.uchicago.edu/home/voeckler/src/T2/README T2.1 all gsiftp://griodine.uchicago.edu/home/voeckler/vds/src/tools/T2/T2.1 gsiftp://e.cs.uchicago.edu/home/voeckler/src/T2/T2.1 gsiftp://e.cs.uchicago.edu/home/voeckler/src/T2/T2.1.1 ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/util.c����������������������������������������������������������0000644�0001750�0001750�00000022130�11757531137�017367� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include <errno.h> #include <math.h> #include <string.h> #include <stdio.h> #include <stdlib.h> #include <sys/types.h> #include <sys/time.h> #include <sys/stat.h> #include <sys/wait.h> #include <unistd.h> #include <fcntl.h> #include "util.h" #include "mypopen.h" static const char* RCS_ID = "$Id: util.c 50 2007-05-19 00:48:32Z gmehta $"; void double2timeval( struct timeval* tv, double interval ) /* purpose: Converts a double timestamp into a timeval * paramtr: tv (OUT): destination to put the timeval into * interval (IN): time in seconds to convert */ { double integral, fraction = modf(interval,&integral); tv->tv_sec = (long) integral; tv->tv_usec = (long) (1E6*fraction); } double now( void ) /* purpose: obtains an UTC timestamp with microsecond resolution. * returns: the timestamp, or -1.0 if it was completely impossible. */ { int timeout = 0; struct timeval t = { -1, 0 }; while ( gettimeofday( &t, NULL ) == -1 && timeout < 10 ) timeout++; return timeval2double(t); /* t.tv_sec + t.tv_usec / 1E6; */ } char* check_link( void ) /* purpose: Obtains the path to system's symlink tool ln * returns: absolute path to ln, or NULL if not found nor accessible */ { struct stat st; #if 1 char* link = strdup("/bin/ln"); #else /* FIXME: to be implemented post-SC */ char* link, *s; char* path = NULL; char* temp = getenv("PATH"); if ( temp == NULL || *temp == '\0' ) temp = "/bin:/usr/bin"; if ( (path = strdup(temp)) == NULL ) return NULL; for ( s=strtok(path,":"); s; s=strtok(NULL,":") ) { } #endif if ( stat( link, &st ) == 0 ) { if ( (geteuid() != st.st_uid || (st.st_mode & S_IXUSR) == 0) && (getegid() != st.st_gid || (st.st_mode & S_IXGRP) == 0) && ((st.st_mode & S_IXOTH) == 0) && ! S_ISREG(st.st_mode) ) { fprintf( stderr, "ERROR: Check execute permissions on %s\n", link ); return NULL; } } else { fprintf( stderr, "ERROR: Unable to access %s: %d: %s\n", link, errno, strerror(errno) ); return NULL; } return link; } char* default_globus_url_copy( void ) /* purpose: Determines the default path to default g-u-c. No checks! * returns: absolute path to g-u-c, or NULL if environment mismatch */ { char* globus = getenv("GLOBUS_LOCATION"); char* guc; /* assemble default location */ if ( globus == NULL ) { fputs( "ERROR: You need to set your GLOBUS_LOCATION\n", stderr ); return NULL; } guc = (char*) malloc( strlen(globus) + 24 * sizeof(char) ); strcpy( guc, globus ); strcat( guc, "/bin/globus-url-copy" ); return guc; } static char* jerry_globus_url_copy( const char* argv0 ) /* purpose: Determines the alternative g-u-c. Simple check only! * paramtr: argv0 (IN): main's argv[0] * returns: absolute path to g-u-c, or NULL if mismatch */ { size_t size = strlen(argv0) + 16; int fd; char* s; char* guc; /* This is not a fixed alternative, no messages */ if ( argv0 == NULL ) return NULL; /* assemble default location */ guc = (char*) calloc( sizeof(char), size ); if ( guc == NULL ) return NULL; strncpy( guc, argv0, size-1 ); if ( (s = strrchr( guc, '/' )) == NULL ) { s = guc; *s = '\0'; } else { s++; } strncat( s, "guc", size - (s-guc) ); /* open() is faster than stat() for simple accessibility checks */ if ( (fd = open( guc, O_RDONLY )) >= 0 ) { /* exists, and is good */ close(fd); return guc; } else { /* does not exist, bad path */ free((void*) guc); return NULL; } } char* alter_globus_url_copy( const char* argv0 ) /* purpose: Determines the alternative g-u-c. Simple check only! * paramtr: argv0 (IN): main's argv[0] * returns: absolute path to g-u-c, or NULL if environment mismatch */ { size_t size; int fd; char* vds_home = getenv("PEGASUS_HOME"); char* guc = jerry_globus_url_copy( argv0 ); /* Jerry's request */ if ( guc != NULL ) return guc; /* This is not a fixed alternative, no messages */ if ( vds_home == NULL ) return NULL; /* assemble default location */ size = strlen(vds_home) + 48; guc = (char*) calloc( sizeof(char), size ); if ( guc == NULL ) return NULL; strncpy( guc, vds_home, size-1 ); strncat( guc, "/bin/guc", size ); /* open() is faster than stat() for simple accessibility checks */ if ( (fd = open( guc, O_RDONLY )) >= 0 ) { /* exists, and is good */ close(fd); return guc; } else { /* does not exist, bad path */ free((void*) guc); return NULL; } } long check_globus_url_copy( char* location, char* envp[] ) /* purpose: Obtains the version of a given globus-url-copy * parmatr: location (IN): location of an alternative g-u-c, or * NULL to use $GLOBUS_LOCATION/bin/globus-url-copy * paramtr: env (IN): environment pointer from main() * returns: The version number, as major*1000 + minor, * or -1 if troubles running the g-u-c */ { int status; char* argv[3]; char line[1024]; long result = -1; unsigned major, minor; struct stat st; char* guc = ( location == NULL ) ? default_globus_url_copy() : location; /* sanity check, if default fails */ if ( guc == NULL ) return result; /* check accessibility */ if ( stat( guc, &st ) == 0 ) { if ( (geteuid() != st.st_uid || (st.st_mode & S_IXUSR) == 0) && (getegid() != st.st_gid || (st.st_mode & S_IXGRP) == 0) && ((st.st_mode & S_IXOTH) == 0) && ! S_ISREG(st.st_mode) ) { fprintf( stderr, "ERROR: Check execute permissions on %s\n", guc ); return result; } } else { fprintf( stderr, "ERROR: Unable to access %s: %d: %s\n", guc, errno, strerror(errno) ); return result; } /* postcondition: We can access the g-u-c. Now let's dry-run it. */ /* This should also catch errors due to missing shared libraries. */ argv[0] = guc; argv[1] = "-version"; argv[2] = NULL; /* g-u-c -version exits with exit-code of 1 -- for how much longer? */ *line = '\0'; status = pipe_out_cmd( "g-u-c", argv, envp, line, sizeof(line) ); if ( *line ) { sscanf( line, "%*s %u.%u\n", &major, &minor ); result = major * 1000 + minor; } if ( status == -1 ) { result = -1; fprintf( stderr, "ERROR: While waiting for globus-url-copy: %s\n", strerror(errno) ); } else if ( status != 256 && status != 0 ) { result = -1; if ( WIFEXITED(status) ) { fprintf( stderr, "ERROR: globus-url-copy termined with exit code %d\n", WEXITSTATUS(status) ); } else if ( WIFSIGNALED(status) ) { fprintf( stderr, "ERROR: globus-url-copy terminated on signal %d\n", WTERMSIG(status) ); } else { fprintf( stderr, "ERROR: globus-url-copy died abnormally on an unspecified cause.\n" ); } } return result; } long check_grid_proxy_info( char* envp[] ) /* purpose: Obtains the time remaining on the current user certificate proxy. * paramtr: env (IN): environment pointer from main() * returns: the time remaining on the certificate, 0 for expired, -1 error */ { int status; char* gpi; char* globus; long result = -1; struct stat st; char* argv[3]; char line[256]; if ( (globus=getenv("GLOBUS_LOCATION")) == NULL ) { fputs( "ERROR: You need to set your GLOBUS_LOCATION\n", stderr ); return result; } gpi = (char*) malloc( strlen(globus) + 36*sizeof(char) ); strcpy( gpi, globus ); strcat( gpi, "/bin/grid-proxy-info" ); if ( stat( gpi, &st ) == 0 ) { if ( (geteuid() != st.st_uid || (st.st_mode & S_IXUSR) == 0) && (getegid() != st.st_gid || (st.st_mode & S_IXGRP) == 0) && ((st.st_mode & S_IXOTH) == 0) && ! S_ISREG(st.st_mode) ) { fprintf( stderr, "ERROR: Check execute permissions on %s\n", gpi ); return result; } } else { fprintf( stderr, "ERROR: Unable to access %s: %d: %s\n", gpi, errno, strerror(errno) ); return result; } argv[0] = gpi; argv[1] = "-timeleft"; argv[2] = NULL; if ( (status=pipe_out_cmd( "g-p-i", argv, envp, line, sizeof(line) )) == 0 ) sscanf( line, "%ld\n", &result ); if ( status == -1 ) { fprintf( stderr, "ERROR: While waiting for grid-proxy-info: %s\n", strerror(errno) ); result = -1; } else if ( status != 0 ) { result = -1; if ( WIFEXITED(status) ) { fprintf( stderr, "ERROR: grid-proxy-info termined with exit code %d\n", WEXITSTATUS(status) ); } else if ( WIFSIGNALED(status) ) { fprintf( stderr, "ERROR: grid-proxy-info terminated on signal %d\n", WTERMSIG(status) ); } else { fprintf( stderr, "ERROR: grid-proxy-info died abnormally on an unspecified cause.\n" ); } } free((void*) gpi); return result; } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/error.h���������������������������������������������������������0000644�0001750�0001750�00000001025�11757531137�017550� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * based on examples in David Butenhof, "Programming with POSIX threads", * Addison-Wesley, 1997 */ #ifndef _ERROR_H #define _ERROR_H #include <errno.h> #include <stdio.h> #define err_abort(code,text) do { \ fprintf( stderr, "%s at \"%s\":%d: %d: %s\n", \ text, __FILE__, __LINE__, code, strerror(code) ); \ abort(); \ } while (0) #define errno_abort(text) do { \ fprintf( stderr, "%s at \"%s\":%d: %d: %s\n", \ text, __FILE__, __LINE__, errno, strerror(errno) ); \ abort(); \ } while (0) #endif /* _ERROR_H */ �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/workq.h���������������������������������������������������������0000644�0001750�0001750�00000006207�11757531137�017571� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * * based on examples in David Butenhof, "Programming with POSIX threads", * Addison-Wesley, 1997 */ #ifndef _WORKQ_H #define _WORKQ_H #include <sys/types.h> #include <pthread.h> #include <signal.h> #include "xfer.h" #include "item.h" #ifndef DEFAULT_PARALLEL #define DEFAULT_PARALLEL 4 #endif typedef struct workq_tag { size_t m_magic; /* valid */ pthread_mutex_t m_mutex; /* our mutex */ pthread_cond_t m_cv; /* the condition variable */ pthread_attr_t m_attr; /* detachement */ pthread_cond_t m_go; /* prohibit Q overflow */ item_p m_head; /* head of queue */ item_p m_tail; /* tail of queue */ size_t m_limit; /* maximum Q length */ size_t m_qsize; /* current Q length */ size_t m_parallel; /* maximum number of threads */ size_t m_count; /* current number of busy threads */ size_t m_idle; /* current number of idle threads */ sig_atomic_t m_quit; /* termination flag */ int (*m_engine)(item_p); /* user engine */ /* statistics department */ size_t m_threads; /* total number of threads used */ size_t m_request; /* total number of requests */ size_t m_success; /* number of successful copies */ size_t m_failure; /* number of failed copies */ double m_timesum; /* time of processing */ double m_waitsum; /* time including queue wait */ } workq_t, *workq_p; #define WORKQ_MAGIC 0xcafebabe extern int workq_init( workq_p wq, size_t size, int (*engine)(item_p) ); /* purpose: initialize the workq data structure without starting children. * paramtr: wq (IO): pointer to datastructure to be initialized * size (IN): maximum number of threads to start * engine (IN): user thread handler to call * returns: 0 for success, the error in case of failure. */ extern int workq_destroy( workq_p wq ); /* purpose: wait for all to finish and be done with it. * paramtr: wq (IO): valid workq data structure. * returns: 0 for success, or the error. */ extern int workq_add( workq_p wq, xfer_p section, unsigned bufsize, unsigned streams, unsigned retries, double initial, double backoff ); /* purpose: add a request to the queue for tasking, may block * paramtr: wq (IN): is a valid workq data structure * section (IN): description of the transfer request section * bufsize (IN): TCP buffer size * streams (IN): parallel data channels * retries (IN): retry attempts * initial (IN): initial sleep time * backoff (IN): first exponential backoff time * returns: 0 for success, or the error. */ #endif /* _WORKQ_H */ �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/xfer.h����������������������������������������������������������0000644�0001750�0001750�00000001306�11757531137�017365� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#ifndef _T2_XFER_H #define _T2_XFER_H #include "dll.h" typedef enum { XFER_ANY = 0, XFER_ALL = 1, XFER_OPTIONAL = 2 } section_flags; typedef struct section_tag { unsigned long m_magic; /* valid flag */ const char* m_lfn; /* section header */ section_flags m_flags; /* any, all, optional */ dll_t m_src; /* list of source candidates */ dll_t m_dst; /* list of destination candidates */ } xfer_t, *xfer_p; #define T2_SECTION_MAGIC 0xa3a7c135 extern int xfer_init( xfer_p xfer, const char* lfn, unsigned flags ); extern int xfer_add_src( xfer_p xfer, const char* src ); extern int xfer_add_dst( xfer_p xfer, const char* dst ); extern int xfer_done( xfer_p xfer ); #endif ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/make.SunOS������������������������������������������������������0000755�0001750�0001750�00000000227�11757531137�020122� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/bin/sh gmake distclean gmake EXTRACFLAGS='$(V7FLAGS)' T2 mv T2 T2.sparcv7 gmake clean gmake EXTRACFLAGS='$(V9FLAGS)' T2 mv T2 T2.sparcv9 gmake clean�������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/dll.c�����������������������������������������������������������0000644�0001750�0001750�00000003042�11757531137�017166� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include "dll.h" #include <errno.h> #include <stdlib.h> #include <string.h> int dll_item_init( dll_item_p item, const char* data ) { if ( item == NULL ) return EINVAL; memset( item, 0, sizeof(dll_item_t) ); item->m_magic = T2_ITEM_MAGIC; if ( (item->m_data = strdup(data)) == NULL ) return ENOMEM; return 0; } int dll_item_done( dll_item_p item ) { if ( item == NULL || item->m_magic != T2_ITEM_MAGIC ) return EINVAL; if ( item->m_data ) free((void*) item->m_data); memset( item, 0, sizeof(dll_item_t) ); return 0; } int dll_init( dll_p dll ) { if ( dll == NULL ) return EINVAL; memset( dll, 0, sizeof(dll_t) ); dll->m_magic = T2_DLL_MAGIC; return 0; } int dll_add( dll_p dll, const char* data ) { int status; dll_item_p temp; if ( dll == NULL || dll->m_magic != T2_DLL_MAGIC ) return EINVAL; if ( (temp = malloc( sizeof(dll_item_t) )) == NULL ) return ENOMEM; if ( (status = dll_item_init( temp, data )) != 0 ) { free((void*) temp); return status; } if ( dll->m_count ) { dll->m_tail->m_next = temp; dll->m_tail = temp; } else { dll->m_head = dll->m_tail = temp; } dll->m_count++; return 0; } int dll_done( dll_p dll ) { if ( dll == NULL || dll->m_magic != T2_DLL_MAGIC ) return EINVAL; if ( dll->m_count ) { int status; dll_item_p temp; while ( (temp = dll->m_head) != NULL ) { dll->m_head = dll->m_head->m_next; if ( (status=dll_item_done( temp )) != 0 ) return status; free((void*) temp); } } memset( dll, 0, sizeof(dll_t) ); return 0; } ����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/workq.c���������������������������������������������������������0000644�0001750�0001750�00000020164�11757531137�017562� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * * based on examples in David Butenhof, "Programming with POSIX threads", * Addison-Wesley, 1997 */ #ifdef sun #include <thread.h> #include <memory.h> #endif #include <string.h> #include <stdlib.h> #include <time.h> #include "error.h" #include "item.h" #include "util.h" #include "workq.h" static const char* RCS_ID = "$Id: workq.c 50 2007-05-19 00:48:32Z gmehta $"; int workq_init( workq_p wq, size_t size, int (*engine)(item_p) ) /* purpose: initialize the workq data structure including starting children. * paramtr: wq (IO): pointer to datastructure to be initialized * size (IN): maximum number of threads to start * engine (IN): pointer to user thread function * returns: 0 for success, the error in case of failure. */ { int status; /* sanity checks first */ if ( wq == NULL ) return EINVAL; else memset( wq, 0, sizeof(workq_t) ); /* create detached worker threads */ if ( (status = pthread_attr_init( &wq->m_attr )) ) return status; status = pthread_attr_setdetachstate( &wq->m_attr, PTHREAD_CREATE_DETACHED ); if ( status ) { pthread_attr_destroy( &wq->m_attr ); return status; } if ( size > 1024 ) { pthread_attr_destroy( &wq->m_attr ); return EINVAL; } else { wq->m_parallel = size; } #ifdef sun if ( (status = thr_setconcurrency( size )) ) { pthread_attr_destroy( &wq->m_attr ); return status; } #endif /* pthreads init */ if ( (status = pthread_mutex_init( &wq->m_mutex, NULL )) ) { pthread_attr_destroy( &wq->m_attr ); return status; } if ( (status = pthread_cond_init( &wq->m_cv, NULL )) ) { pthread_mutex_destroy( &wq->m_mutex ); pthread_attr_destroy( &wq->m_attr ); return status; } if ( (status = pthread_cond_init( &wq->m_go, NULL )) ) { pthread_cond_destroy( &wq->m_cv ); pthread_mutex_destroy( &wq->m_mutex ); pthread_attr_destroy( &wq->m_attr ); return status; } /* the rest is auto-memset to zero */ wq->m_engine = engine; wq->m_magic = WORKQ_MAGIC; wq->m_limit = 2*size; return 0; } int workq_destroy( workq_p wq ) /* purpose: wait for all to finish and be done with it. * paramtr: wq (IO): valid workq data structure. * returns: 0 for success, or the error. */ { int status, s2, s3, s4; /* sanity checks first */ if ( wq == NULL || wq->m_magic != WORKQ_MAGIC ) return EINVAL; /* get lock to prevent other accesses */ if ( (status = pthread_mutex_lock( &wq->m_mutex )) ) return status; /* this is it */ wq->m_magic = -1ul; wq->m_quit = 1; /* run down any active threads: broadcast wake-up call and wait for all */ while ( wq->m_count > 0 ) { /* broadcast, if idles are lying around */ if ( wq->m_idle > 0 ) { if ( (status = pthread_cond_broadcast( &wq->m_cv )) ) { pthread_mutex_unlock( &wq->m_mutex ); return status; } } /* wait for all others to complete */ if ( wq->m_count > 0 ) { if ( (status = pthread_cond_wait( &wq->m_cv, &wq->m_mutex )) ) { pthread_mutex_unlock( &wq->m_mutex ); return status; } } } if ( (status = pthread_mutex_unlock( &wq->m_mutex )) ) return status; status = pthread_mutex_destroy( &wq->m_mutex ); s2 = pthread_cond_destroy( &wq->m_cv ); s3 = pthread_attr_destroy( &wq->m_attr ); s4 = pthread_cond_destroy( &wq->m_go ); return ( status ? status : ( s2 ? s2 : (s3 ? s3 : s4) ) ); } static void* workq_server( void* arg ) { struct timespec timeout; workq_p wq = (workq_p) arg; item_p item; int status, timedout; if ( (status = pthread_mutex_lock( &wq->m_mutex )) ) return NULL; /* forever */ for (;;) { /* if there are many threads, allow for more time (host sweating) */ if ( (timedout = (wq->m_parallel >> 4)) < 5 ) timedout = 5; timeout.tv_sec = time(NULL) + timedout; timeout.tv_nsec = 0; timedout = 0; while ( wq->m_head == NULL && ! wq->m_quit ) { /* server thread times out after a short while w/o work */ wq->m_idle++; status = pthread_cond_timedwait( &wq->m_cv, &wq->m_mutex, &timeout ); wq->m_idle--; if ( status == ETIMEDOUT ) { /* timeout, this is it */ flockfile( stderr ); fprintf( stderr, "# worker timed out\n" ); funlockfile( stderr ); timedout = 1; break; } else if ( status != 0 ) { /* failure that should not happen */ flockfile( stderr ); fprintf( stderr, "worker wait failed: %d: %s\n", status, strerror(status) ); funlockfile( stderr ); wq->m_count--; pthread_mutex_unlock( &wq->m_mutex ); return NULL; } } /* retrieve request from queue */ item = wq->m_head; if ( item != NULL ) { double temp0, temp1; wq->m_qsize--; if ( (wq->m_head = item->m_next) == NULL ) wq->m_tail = NULL; if ( (wq->m_head == NULL && wq->m_qsize != 0) || (wq->m_head != NULL && wq->m_qsize == 0) ) { /* small sanity check */ fprintf( stderr, "mismatch between Q and Q-length\n" ); } if ( (status = pthread_mutex_unlock( &wq->m_mutex )) ) return NULL; if ( (status = wq->m_engine(item)) == 0 ) wq->m_success++; else wq->m_failure++; temp0 = item->m_timesum; temp1 = now() - item->m_queued; item_destroy(item); free((void*) item); if ( (status = pthread_mutex_lock( &wq->m_mutex )) ) return NULL; wq->m_timesum += temp0; wq->m_waitsum += temp1; /* we may be able to add another item to the queue */ if ( wq->m_qsize < wq->m_limit ) pthread_cond_signal( &wq->m_go ); } /* are we done yet */ if ( wq->m_head == NULL && wq->m_quit ) { wq->m_count--; if ( wq->m_count == 0 ) pthread_cond_broadcast( &wq->m_cv ); pthread_mutex_unlock( &wq->m_mutex ); return NULL; } /* are we really done */ if ( wq->m_head == NULL && timedout ) { wq->m_count--; break; } } /* forever */ pthread_mutex_unlock( &wq->m_mutex ); return NULL; } int workq_add( workq_p wq, xfer_p xfer, unsigned bufsize, unsigned streams, unsigned retries, double initial, double backoff ) { int status; item_p item; /* sanity checks */ if ( wq == NULL || wq->m_magic != WORKQ_MAGIC ) return EINVAL; /* create item */ if ( (item = (item_p) malloc(sizeof(item_t))) == NULL ) return ENOMEM; if ( (status = item_full_init( item, xfer, bufsize, streams, retries, initial, backoff )) ) { free((void*) item); return status; } /* get lock */ if ( (status = pthread_mutex_lock( &wq->m_mutex )) ) { item_destroy(item); free((void*) item); return status; } /* check the Q length */ while ( wq->m_qsize >= wq->m_limit ) { if ( (status = pthread_cond_wait( &wq->m_go, &wq->m_mutex )) ) { item_destroy(item); free((void*) item); return status; } } /* add the request to the queue of work */ if ( wq->m_head == NULL ) wq->m_head = item; else wq->m_tail->m_next = item; wq->m_tail = item; wq->m_request++; wq->m_qsize++; #if 0 fprintf( stderr, "# [master] adding item %u to Q\n", wq->m_qsize ); #endif if ( wq->m_idle > 0 ) { /* wake-up any idle thread */ if ( (status = pthread_cond_signal( &wq->m_cv )) ) { pthread_mutex_unlock( &wq->m_mutex ); return status; } } else if ( wq->m_count < wq->m_parallel ) { /* add some more threads to work on the task */ pthread_t id; status = pthread_create( &id, &wq->m_attr, workq_server, (void*) wq ); if ( status != 0 ) { pthread_mutex_unlock( &wq->m_mutex ); return status; } else { wq->m_threads++; wq->m_count++; } } /* done */ return pthread_mutex_unlock( &wq->m_mutex ); } ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/T2.c������������������������������������������������������������0000644�0001750�0001750�00000064761�11757531137�016717� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * * based on examples in David Butenhof, "Programming with POSIX threads", * Addison-Wesley, 1997 */ #include <ctype.h> #include <errno.h> #include <stdio.h> #include <string.h> #include <stdlib.h> #include <math.h> #include <sys/wait.h> #include <sys/stat.h> #include <sys/time.h> #include <fcntl.h> #include <unistd.h> #include <pthread.h> #include <regex.h> #include "error.h" #include "item.h" #include "workq.h" #include "util.h" #include "mypopen.h" static const char* RCS_ID = "$Id: T2.c 50 2007-05-19 00:48:32Z gmehta $"; static const char* REGEX_RFC2396 = "^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)"; struct { workq_t crew; /* worker node management system */ char* guc; /* absolute path to globus-url-copy */ unsigned long version; /* version of globus-url-copy */ char* ln; /* absolute path to Unix ln tool for symlinks */ int force; /* set to not zero to use -f for ln -s */ void* envp; /* environment provided to main() */ int quiet; /* quietness level for informational logging */ int retry; /* if set, retry as often as necessary */ size_t argsize; /* length of extra guc args */ char** args; /* extra guc args */ regex_t rfc2396; /* URI regular expression matcher from RFC 2396 */ } global; static void atfork_child( void ) { if ( global.crew.m_magic == WORKQ_MAGIC ) pthread_mutex_init( &global.crew.m_mutex, NULL ); } static char* parse_file_url( char* url ) /* purpose: parse a file url to determine the start of the path component. * paramtr: file url (IN): the file:// URL * returns: pointer to the start of the path, or NULL if invalid (oopsa) */ { char* s = url + strlen("file:"); char* e = url + strlen(url) - 1; /* sanity check */ if ( s > e ) return NULL; /* point to the correct position inside the URL. Note the URL * may look like file:/path/to/file and file:///path/to/file. * While g-u-c does not parse file://bogus.host/path/to/file, * it is still a potential candidate. bogus.host is usually * "xx" or "localhost". */ if ( *s != '/' ) return s; /* be nice: file:path/to/file */ else s++; if ( s > e ) return NULL; /* postcondition: we matched "file:/" up to here */ if ( *s != '/' ) return s-1; /* be nice: file:/path/to/file */ else s++; if ( s > e ) return NULL; /* postcondition: we matched "file://" up to here */ if ( *s != '/' ) { /* uncommon case: file://bogus.host/path/to/file */ char* save = s; while ( s <= e && *s != '/' ) s++; /* forward to next slash */ if ( s > e ) s = save; } /* postcondition: s points to * [a] the 3rd slash in file:///path/to/url and file:///////hi/hi * [b] the absolute paths start slash in file://bogus.host/path * [c] or the 'n' in the erraneous case file://nopathhere */ return s; } #if 0 static char* parse_host_url( char* url ) /* purpose: Extract the hostname[:port] from the URL, if there is any * paramtr: supported URL (IN): gsiftp:// or file:// URL -- must not be NULL * returns: freshly allocated memarea containing the hostname */ { char* result = NULL; #if 0 char* s = url; char* e = url + strlen(url) - 1; char* host; /* sanity check */ if ( s > e ) return result; /* find first colon */ while ( s <= e && *s != ':' ) ++s; if ( s-url < 2 ) return result; /* early bail-out for file:// URIs */ if ( strncmp( url, "file", s-url ) == 0 ) return strdup("localhost"); /* skip "//" after ":" */ if ( s>e || *s++ != '/' ) return result; if ( s>e || *s++ != '/' ) return result; host = s; /* find next "/" after "://" */ while ( s <= e && *s != '/' ) ++s; if ( s-host < 2 ) return result; result = (char*) calloc( s-host+2, 1 ); strncpy( result, host, s-host ); #else regmatch_t pmatch[8]; if ( regexec( &global.rfc2396, url, sizeof(pmatch), pmatch, 0 ) == 0 ) { size_t n = pmatch[4].rm_eo - pmatch[4].rm_so; result = calloc( n+2, 1 ); strncpy( result, url+pmatch[4].rm_so, n ); } #endif return result; } #endif static char* err_msg[] = { /* 2: missing source errors, local and remote, both in 2 parts */ "error: a system call failed", "o such file or directory", "error: the server sent an error response: 550 550", "not a plain file", /* 1: URI with whitespace */ "ERROR: too many url strings specified", /* 1: something apart from a missing source file */ "error: a system call failed", /* 3: no gridftp server running */ "error: a system call failed (Connection refused)", /* 1: credential problems with server */ "Error with GSI credential", /* 0: timed out */ "timed out" }; static int arbitrate( int status, const char* line ) /* purpose: Arbitrate between hard error, soft errors and missing src * paramtr: status (IN): result code from child process * line (IN): stdout of the child process * returns: -1: done -- don't retry * 0: soft error -- retry * 1: hard error -- don't retry * 2: missing source file -- don't retry * 3: hard server error -- don't retry */ { if ( WIFEXITED(status) ) { /* case: it worked! Off we leave */ if ( WEXITSTATUS(status) == 0 ) return -1; /* case: g-u-c terminated with 126 or 127 * --> pipe_out_cmd error, wrong path to app, hard break */ if ( WEXITSTATUS(status) == 126 || WEXITSTATUS(status) == 127 ) return 1; /* case: always retry in face of other errors */ if ( global.retry ) return 0; /* FIXME: not zero, but regular exit - what now? must parse * <line> content. Assume for now that these errors are mostly * retryable, which is not true. */ if ( WEXITSTATUS(status) == 1 ) { /* source file not found has two possible error messages, * errcode==1: * * [1] "error: a system call failed (No such file or directory)" * [2] "error: the server sent an error response: 550 550 <SRC>: * not a plain file." */ if ( ( strncmp( line, err_msg[0], strlen(err_msg[0]) ) == 0 && strstr(line,err_msg[1]) != NULL ) || ( strncmp( line, err_msg[2], strlen(err_msg[2]) ) == 0 && strstr(line,err_msg[3]) != NULL ) ) return 2; } /* any kind of time-out should be considered soft */ if ( strstr( line, err_msg[8] ) != NULL ) return 0; /* no up and running gridftp server yields: * "error: a system call failed (Connection refused)" * May also be returned by an overburdened server, sigh! */ if ( strncmp( line, err_msg[6], strlen(err_msg[6]) ) == 0 ) return 3; /* no account on remote gridftp server yields (auth probs): * "Error with GSI credential" somewhere within. This error * can be issued for src as well as for dst, sigh! */ if ( strstr( line, err_msg[7] ) != NULL ) return 1; /* ERROR: too many url strings specified */ if ( strncmp( line, err_msg[4], strlen(err_msg[4]) ) == 0 ) return 1; /* error: a system call failed -- apart from missing src */ if ( strncmp( line, err_msg[5], strlen(err_msg[5]) ) == 0 ) return 1; /* server error or misc local error: hard */ if ( strncmp( line, err_msg[2], strlen(err_msg[2]) ) == 0 || strncmp( line, err_msg[0], strlen(err_msg[0]) ) == 0 ) return 1; /* assume soft error on rest for now */ return 0; } else if ( WIFSIGNALED(status) ) { /* case: g-u-c terminated on a signal --> hard error */ return 1; } /* should not be reached */ return 1; } static int sub_engine( item_p item, char* line, size_t size, int* status, char* src, char* dst ) /* purpose: the true engine to start whatever needs to be done. * paramtr: item (IO): pointer to structure * line (IN): area to capture things * size (IN): capacity of line area * src (IO): source URI * dst (IO): destination URI * returns: arbitration */ { double start, diff; size_t retry; char* s; char* tag; char** arg = (char**) calloc( 16+global.argsize, sizeof(char*) ); char s_bufsize[16]; char s_streams[16]; int i, j, arbit = 0; unsigned long backoff = item->m_backoff; pthread_t id = pthread_self(); struct timeval tv; for ( retry = 1; retry <= item->m_retries; ++retry ) { if ( retry == 1 ) { /* initial sleep -- way shorter than retry sleeps */ double2timeval( &tv, item->m_initial ); } else { /* implement exponential back-off */ double2timeval( &tv, backoff ); backoff *= 2.0; } /* report */ if ( global.quiet < 2 ) { flockfile( stdout ); printf( "# [%#010lx] %u sleeping for %.3f s\n", id, retry, timeval2double(tv) ); funlockfile( stdout ); } select( 0, NULL, NULL, NULL, &tv ); /* assemble commdline for g-u-c */ i = 0; if ( strncmp( src, "file:", 5 ) == 0 && strncmp( dst, "file:", 5 ) == 0 ) { /* symlink target instead of calling g-u-c */ tag = "ln"; arg[i++] = global.ln; if ( global.force ) arg[i++] = "-f"; arg[i++] = "-s"; /* point to the correct position inside the URL. Note the URL * may look like file:/path/to/file and file:///path/to/file. */ arg[i++] = parse_file_url( src ); arg[i++] = parse_file_url( dst ); arg[i] = NULL; } else { /* regular calls to g-u-c */ tag = "g-u-c"; arg[i++] = global.guc; /* 20050419: Add extra arguments */ for ( j=0; j < global.argsize; ++j ) arg[i++] = global.args[j]; if ( item->m_retries == 1 || (item->m_retries > 1 && retry < item->m_retries) ) { /* only assemble these, if more than one retry permitted, and this * is not the last retry. For single retries, use what the user * said. */ if ( item->m_bufsize > 1024 ) { snprintf( s_bufsize, sizeof(s_bufsize), "%u", item->m_bufsize ); arg[i++] = "-tcp-bs"; arg[i++] = s_bufsize; } if ( item->m_streams > 1 ) { snprintf( s_streams, sizeof(s_streams), "%u", item->m_streams ); arg[i++] = "-p"; arg[i++] = s_streams; } } if ( global.quiet < 0 ) arg[i++] = "-vb"; if ( global.quiet < -1 ) arg[i++] = "-dbg"; /* NEW: create directories with latest guc */ if ( global.version >= 3020 ) arg[i++] = "-cd"; /* finish commandline argument vector */ arg[i++] = src; arg[i++] = dst; arg[i] = NULL; } if ( global.quiet < 0 ) { int k; /* report */ flockfile( stdout ); printf( "# [%#010lx] %u", id, retry ); for ( k=0; k<i; ++k ) { fputc( ' ', stdout ); fputs( arg[k], stdout ); } fputc( '\n', stdout ); funlockfile( stdout ); } /* run g-u-c while capturing its output for later examination */ memset( line, 0, size ); start = now(); *status = pipe_out_cmd( tag, arg, global.envp, line, size ); diff = item->m_timesum = now() - start; /* poor man's basename */ if ( (s=strrchr( dst, '/' )) == NULL ) s=dst; else s++; /* report */ if ( global.quiet < 2 ) { flockfile( stdout ); printf( "# [%#010lx] %u %d/%d %.3fs 0x%02x \"%s\" %s\n", id, retry, *status >> 8, (*status & 127), diff, item->m_xfer->m_flags, s, line ); /* queue time is (start - item->m_queued) + diff */ funlockfile( stdout ); } /* Error arbitration: Exit loop on anything but soft errors */ if ( (arbit = arbitrate( *status, line )) ) break; } free((void*) arg); return arbit; } static void noop( void ) { /* noop */ } static int engine( item_p item ) { size_t size = getpagesize(); char* line = (char*) malloc(size); int transfer = (item->m_xfer->m_flags & 0x0003); int arbit = 0; int status = -1; dll_item_p src; dll_item_p dst; for ( src = item->m_xfer->m_src.m_head; src; src = src->m_next ) { for ( dst = item->m_xfer->m_dst.m_head; dst; dst = dst->m_next ) { /* skip trouble-burdened bad server from previous loop */ if ( (dst->m_flag & 0x0001) == 1 ) continue; /* skip destinations we already successfully transferred to */ if ( transfer == XFER_ALL && (dst->m_flag & 0x0002) == 2 ) continue; /* invoke sub engine */ arbit = sub_engine(item,line,size,&status,src->m_data,dst->m_data); if ( arbit == -1 ) { /* mark this dst as done */ dst->m_flag |= 0x0002; /* done -- ok */ if ( transfer != XFER_ALL ) goto done; } else if ( arbit == 2 ) { if ( transfer == XFER_OPTIONAL ) status = 0; /* mark this source as invalid */ src->m_flag |= 0x0001; /* source not found -- try next source */ goto next_src; } else if ( arbit == 3 ) { /* unrecoverable server error, never try this dst again */ dst->m_flag |= 0x0001; } } next_src: /* try next src */ noop(); } /* all tried, all failed -- don't fail for optional transfers */ if ( arbit == 2 && transfer == XFER_OPTIONAL ) status = 0; /* and don't overwrite last status for "all" transfers */ else if ( transfer != XFER_ALL ) status = -1; done: /* done, return last status */ free((void*) line ); return status; } void add_to_args( char* s, int hyphen ) /* purpose: adds the string to the global guc option list * paramtr: s (IO): string containing the option. * hyphen (IN): add this many hyphens before option * returns: ? */ { int i = 0; size_t where = global.argsize; global.argsize++; global.args = realloc( global.args, global.argsize * sizeof(char*) ); global.args[where] = (char*) malloc( strlen(s) + 1 + hyphen ); while ( i < hyphen ) global.args[where][i++] = '-'; strcpy( global.args[where]+i, s ); } void helpMe( const char* programname ) /* purpose: write help message and exit */ { printf( "%s\nUsage:\t%s [options] baseuri basemnt [fof]\n", RCS_ID, programname ); printf( " -g guc\tUse guc as the path to your version of globus-url-copy\n" " -G o,v\tPasses option o, prefixed with hyphen, to g-u-c with value v\n" "\tNote: Use just -G o for an option without value. Use multiple times\n" " -P n\tUse n as maximum number of parallel processes for g-u-c, default %d\n" " -f\tUse the -f option with ln -s for local files, default is not\n" " -t n\tUse n as TCP buffer size for g-u-c\'s -tcp-bs option, default %u\n" " -p n\tUse n as number of streams for g-u-c\'s -p option, default %u\n" " -r n\tUse n as the maximum number of g-u-c retry attempts, default %u\n" " -q\tUse multiple times to be less noisy, default is somewhat noisy\n" " -v\tUse a more verbose mode, opposite effect of using -q\n" " -R\tForce retries for almost all server-side errors\n" " -T iv\tUse interval iv for the initial exponential back-off, default %.1f s\n" " -i iv\tUse initial iv to sleep between transfers, default %.1f s\n" " -s\tIs a debug option to show the values of all options after parsing\n" " baseuri For optimizations, this is the base URI of the gatekeeper\n" " basemnt For optimizations, this is the corresponding storage mount point\n" " fof\tList of filename pairs, one filename per line, default stdin\n", DEFAULT_PARALLEL, DEFAULT_BUFSIZE, DEFAULT_STREAMS, DEFAULT_RETRIES, DEFAULT_BACKOFF, DEFAULT_INITIAL ); exit(1); } void parseCommandline( int argc, char* argv[], char* envp[], unsigned* parallel, char** basemnt, char** baseuri, FILE** input, unsigned* bufsize, unsigned* streams, unsigned* retries, double* initial, double* backoff ) { unsigned long guc_version; int status, option, showme = 0; char* e, *ptr = strrchr(argv[0],'/'); double temp; /* basename */ if ( ptr == NULL ) ptr = argv[0]; else ptr++; *parallel = DEFAULT_PARALLEL; *streams = DEFAULT_STREAMS; *bufsize = DEFAULT_BUFSIZE; *retries = DEFAULT_RETRIES; *backoff = DEFAULT_BACKOFF; *initial = DEFAULT_INITIAL; global.envp = envp; global.force = global.quiet = global.retry = 0; global.guc = NULL; if ( (status=regcomp( &global.rfc2396, REGEX_RFC2396, REG_EXTENDED )) != 0 ) { char buffer[512]; regerror( status, &global.rfc2396, buffer, sizeof(buffer) ); fprintf( stderr, "ERROR: Compling RFC 2396 regular expression: %s\n", buffer ); exit(1); } opterr = 0; while ( (option = getopt( argc, argv, "?P:RT:fg:hi:p:qr:st:v" )) != -1 ) { switch ( option ) { case 'G': if ( optarg && *optarg ) { char* arg = strdup(optarg); char* s = strchr( arg, ',' ); if ( s == NULL ) { /* option without value */ add_to_args( arg, 1 ); } else { /* option with value */ *s++ = '\0'; add_to_args( arg, 1 ); add_to_args( s, 0 ); } } break; case 'P': *parallel = strtoul( optarg, 0, 0 ); break; case 'R': global.retry++; break; case 'T': temp = strtod( optarg, &e ); if ( e != optarg && temp >= 0.0 ) *backoff = temp; break; case 'i': temp = strtod( optarg, &e ); if ( e != optarg && temp >= 0.0 ) *initial = temp; break; case 'f': global.force++; break; case 'g': if ( (guc_version = check_globus_url_copy( optarg, envp )) > 2000 ) { if ( global.guc != NULL ) free((void*) global.guc); global.guc = strdup(optarg); global.version = guc_version; } else { fprintf( stderr, "ERROR! Unable to use %s\n", optarg ); exit(1); } break; case 'p': *streams = strtoul( optarg, 0, 0 ); break; case 'q': global.quiet++; break; case 'v': global.quiet--; break; case 'r': *retries = strtoul( optarg, 0, 0 ); break; case 's': showme = 1; break; case 't': *bufsize = strtoul( optarg, 0, 0 ); break; case 'h': case '?': helpMe(ptr); break; default: helpMe(ptr); break; } } /* extract mandatory parameters */ if ( optind > argc-2 || optind < argc-3 ) helpMe(ptr); *baseuri = argv[optind+0]; *basemnt = (char*) malloc( strlen(argv[optind+1]) + 8 ); if ( argv[optind+1][0] == '/' ) strcpy( *basemnt, "file://" ); else strcpy( *basemnt, "file:///" ); strcat( *basemnt, argv[optind+1] ); if ( optind+2 >= argc ) *input = stdin; else { if ( (*input = fopen(argv[optind+2],"r")) == NULL ) { fprintf( stderr, "open %s: %s\n", argv[optind+2], strerror(errno) ); exit(1); } } /* compare with shipped version, unless -g was present */ if ( global.guc == NULL ) { char* our; global.guc = default_globus_url_copy(); global.version = check_globus_url_copy( global.guc, envp ); if ( (our=alter_globus_url_copy(argv[0])) != NULL ) { unsigned long our_version = check_globus_url_copy( our, envp ); if ( our_version > global.version ) { /* prefer our version -- it's newer */ free((void*) global.guc); global.guc = our; global.version = our_version; } else { /* other version is newer -- prefer the other version */ free((void*) our); } } } /* show results */ if ( showme ) { printf( "#\n# Currently active values for %s:\n# %s\n", ptr, RCS_ID ); printf( "# max. g-u-c streams: %u\n", *streams ); printf( "# max. g-u-c bufsize: %u\n", *bufsize ); printf( "# max. g-u-c retries: %u\n", *retries ); printf( "# location of g-u-c: %s\n", global.guc ); printf( "# version# of g-u-c: %lu.%lu\n", global.version / 1000, global.version % 1000 ); printf( "# max. forked g-u-c: %u\n", *parallel ); printf( "# chosen quietness : %d\n", global.quiet ); printf( "# use -f w/ symlink: %s\n", global.force ? "true" : "false" ); printf( "# forceful retries: %s\n", global.retry ? "true" : "false" ); printf( "# initial interval: %.3f s\n", *initial ); printf( "# backoff interval: %.3f s\n", *backoff ); printf( "# external TFN base: %s\n", *baseuri ); printf( "# internal SFN base: %s\n", *basemnt ); printf( "# list of file base: %s\n", ( optind+2>=argc ? "stdin" : argv[optind+2] ) ); fflush( stdout ); } } int manage( xfer_p section, unsigned bufsize, unsigned streams, unsigned retries, double initial, double backoff ) { int status; if ( (status = workq_add( &global.crew, section, bufsize, streams, retries, initial, backoff )) ) { fprintf( stderr, "Error queuing LFN %s, continuing: %d: %s\n", section->m_lfn, status, strerror(status) ); /* obtain mutex to increment failures */ if ( (status = pthread_mutex_lock( &global.crew.m_mutex )) ) { /* ok, this becomes serious now */ fprintf( stderr, "lock mutex: %d: %s\n", status, strerror(status) ); return 6; } global.crew.m_failure++; if ( (status = pthread_mutex_unlock( &global.crew.m_mutex )) ) { /* ok, this becomes serious now */ fprintf( stderr, "unlock mutex: %d: %s\n", status, strerror(status) ); return 6; } } return 0; } int main( int argc, char* argv[], char* envp[] ) { int status; char* baseuri; char* basemnt; char* source = NULL; FILE* input; long timeleft; size_t sizeuri; size_t sizemnt; size_t lineno = 0; size_t linesize = getpagesize() << 1; unsigned parallel, streams, bufsize, retries; char* s, *line = malloc(linesize); double start, diff, initial, backoff; xfer_p section = NULL; /* check that guc is set up and runnable */ global.envp = envp; parseCommandline( argc, argv, envp, ¶llel, &baseuri, &basemnt, &input, &bufsize, &streams, &retries, &initial, &backoff ); sizeuri = strlen(baseuri); sizemnt = strlen(basemnt); if ( global.version < 1000 ) { fprintf( stderr, "Error while checking usability of globus-url-copy\n" ); return 2; } /* check for symlink capabilities */ if ( (global.ln = check_link()) == 0 ) { fprintf( stderr, "Error while checking accessibility of link tool ln\n" ); return 2; } /* check our grid certificate */ if ( (timeleft = check_grid_proxy_info(envp)) <= 3600 ) { if ( timeleft == -1 ) fprintf( stderr, "Error while executing grid-proxy-info\n" ); else fprintf( stderr, "Error: Too little time left %ld s\n", timeleft ); return 3; } /* create the crew of worker threads */ if ( (status = workq_init( &global.crew, parallel, engine )) ) { fprintf( stderr, "Error while creating worker threads: %d: %s\n", status, strerror(status) ); return 4; } /* deal safely with mutexes during forks */ if ( (status = pthread_atfork( NULL, NULL, atfork_child )) ) { fprintf( stderr, "Error while registering fork handler: %d: %s\n", status, strerror(status) ); return 5; } /* * the big loop */ start = now(); while ( fgets( line, linesize, input ) ) { /* FIXME: unhandled overly long lines */ /* comment */ if ( (s = strchr( line, '#' )) ) *s-- = '\0'; else s = line + strlen(line) - 1; /* chomp */ while ( s > line && isspace(*s) ) *s-- = '\0'; /* skip empty (or meaningless) lines */ if ( strlen(line) == 0 ) continue; else lineno++; /* count initial whitespaces */ if ( ! isspace(line[0]) ) { unsigned flags = 0; char* lfn; char* s; /* new LFN, new section */ if ( section && section->m_magic == T2_SECTION_MAGIC ) { /* add section to work queue */ if ( (status = manage( section, bufsize, streams, retries, initial, backoff )) ) return status; } /* create new section -- deleted inside "engine" */ section = (xfer_p) malloc( sizeof(xfer_t) ); lfn = strtok( line, " \t\r\n\v" ); while ( (s = strtok( NULL, " \t\r\n\v" )) != NULL ) { if ( strcasecmp( s, "optional" ) == 0 ) flags = (flags & 0xFFFC) | XFER_OPTIONAL; if ( strcasecmp( s, "all" ) == 0 ) flags = (flags & 0xFFFC) | XFER_ALL; if ( strcasecmp( s, "any" ) == 0 ) flags = (flags & 0xFFFC) | XFER_ANY; } xfer_init( section, lfn, flags ); } else if ( ! isspace(line[1]) ) { /* isa src TFN */ s = line+1; /* match prefix (and replace) */ if ( sizeuri && strncasecmp( s, baseuri, sizeuri ) == 0 ) { memmove( s+sizemnt, s+sizeuri, strlen(s+sizeuri)+1 ); memcpy( s, basemnt, sizemnt ); } xfer_add_src( section, s ); } else { /* isa dst TFN */ s = line+2; /* match prefix (and replace) */ if ( sizeuri && strncasecmp( s, baseuri, sizeuri ) == 0 ) { memmove( s+sizemnt, s+sizeuri, strlen(s+sizeuri)+1 ); memcpy( s, basemnt, sizemnt ); } xfer_add_dst( section, s ); } } /* while */ if ( section && section->m_magic == T2_SECTION_MAGIC ) { /* add section to work queue */ if ( (status=manage( section, bufsize, streams, retries, initial, backoff )) ) return status; } if ( source ) free((void*) source); if ( input != stdin ) fclose(input); /* wait for children to join */ if ( (status = workq_destroy( &global.crew )) ) { fprintf( stderr, "while waiting for threads to exit: %d: %s\n", errno, strerror(errno) ); return 7; } /* this is actually the right place -- wait for threads to conclude */ diff = now() - start; /* post-condition: no more threads, safe to access crew and timers * directly */ if ( global.quiet < 2 ) { printf( "# %lu threads, %lu messages, %lu successes, %lu failures\n", global.crew.m_threads, global.crew.m_request, global.crew.m_success, global.crew.m_failure ); if ( global.quiet < 1 ) { double temp; if ( ! global.crew.m_request ) temp = 0.0; else temp = global.crew.m_timesum / global.crew.m_request; printf( "# %.3f s for %lu requests = %.3f s per request, ", global.crew.m_timesum, global.crew.m_request, temp ); if ( global.crew.m_timesum <= 1E-6 ) temp = 0.0; else temp = global.crew.m_request / global.crew.m_timesum; printf( "%.1f/s spawn rate\n", temp ); if ( ! global.crew.m_request ) temp = 0.0; else temp = global.crew.m_waitsum / global.crew.m_request; printf( "# %.3f s in Q of which %.3f s pure wait; %.3f s Qtime per request\n", global.crew.m_waitsum, global.crew.m_waitsum - global.crew.m_timesum, temp ); printf( "# %.3f s wall time, speed-up ", diff ); if ( diff <= 1E-3 ) puts( "unknown" ); else printf( "of %.1f\n", global.crew.m_timesum /* - diff */ / diff ); } } /* done */ fflush( stdout ); regfree( &global.rfc2396 ); return ( global.crew.m_failure > 0 ? 42 : 0 ); } ���������������pegasus-wms_4.0.1+dfsg/src/tools/T2/Makefile��������������������������������������������������������0000644�0001750�0001750�00000005744�11757531137�017722� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # Makefile # INSTALL = install # use /usr/ucb/install on Solaris STRIP = strip CC = gcc LD = gcc CXX = g++ RM = rm -f SYSTEM = $(shell uname -s | tr '[a-z]' '[A-Z]' | tr -d '_ -/') VERSION = $(shell uname -r) MARCH = $(shell uname -m | tr '[A-Z]' '[a-z]') TESTLIB = /bin/true # overwritten for Linux MAJOR = $(firstword $(subst ., ,$(VERSION))) MINOR = $(strip $(word 2,$(subst ., ,$(VERSION)))) CFLAGS += -O LOADLIBES += -lm -lpthread ifndef ${prefix} prefix = $(PEGASUS_HOME) endif NROFF = groff -mandoc TEXT = -Tlatin1 HTML = -Thtml ifeq (SUNOS,${SYSTEM}) ifeq (5,${MAJOR}) # use these for the SUN CC compiler CC = cc -mt LD = $(CC) ## SPARCv7 V7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CFLAGS = -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACFLAGS) #EXTRACFLAGS = $(V7FLAGS) CFLAGS := -DSOLARIS $(CFLAGS) -xO4 -D__EXTENSIONS__=1 LOADLIBES += -lnsl -lsocket -lthread INSTALL = /usr/ucb/install else # old Solaris 1 not supported! endif # on Solaris use this link string for gcc: # gcc -Wl,-Bstatic xx.o -lstdc++ -lm -lnsl -lsocket -Wl,-Bdynamic -ldl -o xx endif ifeq (IRIX64,${SYSTEM}) # The regular 64bit Irix stuff is just too slow, use n32! SYSTEM := IRIX endif ifeq (AIX,${SYSTEM}) CXX = xlC_r CC = xlc_r endif ifeq (IRIX,${SYSTEM}) CC = cc -n32 -mips3 -r4000 LD = $(CC) OPT_NORM = -O3 -IPA -LNO:opt=1 endif ifeq (LINUX,${SYSTEM}) TESTLIB = ./testlibc ifeq (ia64,${MARCH}) CFLAGS = -Wall -O2 -ggdb else ifeq (x86_64,${MARCH}) CFLAGS = -Wall -O2 -m64 -ggdb else CFLAGS = -Wall -O2 -march=i686 -ggdb endif endif LOADLIBES := -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic $(LOADLIBES) #LDFLAGS += -static endif ifeq (DARWIN,${SYSTEM}) TESTLIB = /usr/bin/true endif # # === [3] ======================================================= rules section # There is no need to change things below this line. CFLAGS += -D${SYSTEM} -DMAJOR=${MAJOR} -DMINOR=${MINOR} CFLAGS += -D_REENTRANT # -D_POSIX_C_SOURCE=199506 -D__USE_XOPEN_EXTENDED GENDOC = T2.html T2.txt T2.ps %.html : %.1 ; $(NROFF) $(HTML) $< > $@ %.ps : %.1 ; $(NROFF) $< > $@ %.txt : %.1 ; $(NROFF) $(TEXT) $< > $@ %.o : %.c $(CC) $(CFLAGS) $< -c -o $@ all : $(TESTLIB) T2 $(GENDOC) dll.o: dll.c dll.h item.o: item.c item.h xfer.h dll.h util.h mypopen.o: mypopen.c mypopen.h T2.o: T2.c error.h item.h xfer.h dll.h workq.h util.h mypopen.h testlibc.o: testlibc.c util.o: util.c util.h mypopen.h workq.o: workq.c error.h item.h xfer.h dll.h util.h workq.h xfer.o: xfer.c xfer.h dll.h OBJS = dll.o xfer.o item.o workq.o mypopen.o util.o T2.o testlibc: testlibc.o $(LD) $(LDFLAGS) $(CFLAGS) $^ -o $@ $(LOADLIBES) T2: $(TESTLIB) $(OBJS) $(TESTLIB) $(LD) $(LDFLAGS) $(CFLAGS) $(OBJS) -o $@ $(LOADLIBES) $(GENDOC): T2.1 install: T2 $(INSTALL) -m 0755 T2 $(prefix)/bin install.doc: $(GENDOC) $(INSTALL) -m 0644 $(GENDOC) $(prefix)/man install.man: T2.1 $(INSTALL) -m 0644 T2.1 $(prefix)/man/man1 install.all: install install.man install.doc clean: $(RM) *.o $(GENDOC) core distclean: clean $(RM) T2 testlibc ����������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/dll.h�����������������������������������������������������������0000644�0001750�0001750�00000001517�11757531137�017200� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#ifndef _T2_DLL_H #define _T2_DLL_H #include <sys/types.h> typedef struct dll_item_tag { unsigned long m_magic; /* signal valid element */ struct dll_item_tag* m_next; /* next item in list */ char* m_data; /* data item */ int m_flag; /* some flag data */ } dll_item_t, *dll_item_p; #define T2_ITEM_MAGIC 0xcd82bd08 extern int dll_item_init( dll_item_p item, const char* data ); extern int dll_item_done( dll_item_p item ); typedef struct dll_tag { unsigned long m_magic; /* magic for valid members */ struct dll_item_tag* m_head; struct dll_item_tag* m_tail; size_t m_count; } dll_t, *dll_p; #define T2_DLL_MAGIC 0xbae21bdb extern int dll_init( dll_p dll ); extern int dll_add( dll_p dll, const char* data ); extern int dll_done( dll_p dll ); #endif ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/testlibc.c������������������������������������������������������0000644�0001750�0001750�00000000766�11757531137�020236� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#ifdef linux #include <gnu/libc-version.h> #else #error "This will only work with Linux" #endif #include <stdio.h> int main( void ) { int major, minor, level; sscanf( gnu_get_libc_version(), "%d.%d.%d", &major, &minor, &level ); /* enforce at least 2.2.5 */ if ( ((major == 2 && minor > 2) || (major == 2 && minor == 2 && level >= 5)) ) { puts("Your glibc is fresh enough"); return 0; } else { puts("Your glibc is too old, required is a minimum of 2.2.5"); return 1; } } ����������pegasus-wms_4.0.1+dfsg/src/tools/T2/xfer.c����������������������������������������������������������0000644�0001750�0001750�00000002105�11757531137�017356� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include "xfer.h" #include <errno.h> #include <stdlib.h> #include <string.h> int xfer_init( xfer_p xfer, const char* lfn, unsigned flags ) { int status; if ( xfer == NULL ) return EINVAL; memset( xfer, 0, sizeof(xfer_t) ); if ( (xfer->m_lfn = strdup(lfn)) == NULL ) return ENOMEM; xfer->m_flags = flags; if ( (status=dll_init( &(xfer->m_src) )) ) return status; if ( (status=dll_init( &(xfer->m_dst) )) ) return status; xfer->m_magic = T2_SECTION_MAGIC; return 0; } int xfer_done( xfer_p xfer ) { if ( xfer == NULL || xfer->m_magic != T2_SECTION_MAGIC ) return EINVAL; if ( xfer->m_lfn ) free((void*) xfer->m_lfn); dll_done( &(xfer->m_src) ); dll_done( &(xfer->m_dst) ); memset( xfer, 0, sizeof(xfer_t) ); return 0; } int xfer_add_src( xfer_p xfer, const char* src ) { if ( xfer == NULL || xfer->m_magic != T2_SECTION_MAGIC ) return EINVAL; return dll_add( &(xfer->m_src), src ); } int xfer_add_dst( xfer_p xfer, const char* dst ) { if ( xfer == NULL || xfer->m_magic != T2_SECTION_MAGIC ) return EINVAL; return dll_add( &(xfer->m_dst), dst ); } �����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/doc/������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�017025� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/doc/T2.dvi������������������������������������������������������0000755�0001750�0001750�00000003474�11757531137�020021� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������;���� TeX output 2004.06.11:1440���������������������������������������papersize=614.295pt,794.96999pt%!ԣQhtml:<a name="page.1"> html:</a>a }z}DQ#html:<a name="Doc-Start"> html:</a>9f��q� ���phvb8tTWGransferV2(T2)�b{�5PSfile="T2.eps" llx=0 lly=0 urx=300 ury=220 rwi=1500 'j�J?|�ff� ���ptmr8tJens-S.Vckler"cэ�06/10/200449;vh��fflHޤ ͤ}� ��ffΟ!,� 3� ���ptmb8tAsuthor%"� ��ffJDate}� ��ff�Modicationl}� ��ffz��fflHޟ����fflHޡͤ}� ��ffΟ?|� 3� ���ptmr8tJensVckler͡� ��ffJ20040610͟}� ��ff�initialdocumentl}� ��ff��fflHގcff z/������������������������������������������*5"; Qhtml:<a name="page.2"> html:</a>a ځ� 3� ���phvr8tContentsݫ2}ff��ff��ff��ff�z}D#Q#�ff� ���phvb8tContentsQ ]html:<a name="section*.1"> html:</a> khtml:<a href="#section.2">� 3� ���phvb8t1lOverview html:</a>A2 񍍍Vb��*=� ��� ���manfnt9$?|� ��� ���ptmr8tDicultUsectionswhicharenotimportantforthecasualusersaree٠xpressedwithadangerousbendsigninthemargin.i@html:<a name="section.2"> html:</a>Ff14Over$vieȽwcG̟ }/��};����%F� �$?|� ��� ���ptmr8t#�ff� ���phvb8t!,� 3� ���ptmb8t?|�ff� ���ptmr8t�q� ���phvb8t� 3� ���phvb8tځ� 3� ���phvr8t?|� 3� ���ptmr8t*=� ��� ���manfnt��M����������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/doc/T2.pdf������������������������������������������������������0000755�0001750�0001750�00000071052�11757531137�020005� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������%PDF-1.2 %쏢 6 0 obj <</Length 7 0 R/Filter /FlateDecode>> stream xn0 :%qY , ^z wHgk,~}$i#hr-6"NlNSǡ\Y'DDHxeZ $AyYŤ( (/JLfa�>\m9"nctxB^3S=hJωd7|ފL4JmX?RpRX5g୭)835QӴL jC@1Sʘg -dW"Zd5>|#.I'(R9?n$_"5R+SG[ qwؾw_ XQtc.o�;_v``DvU-]6|6W7yyN9V<=a3! gf#sHgxl 834]jjg8#4ֽ%~Iendstream endobj 7 0 obj 486 endobj 22 0 obj <</Length 23 0 R/Filter /FlateDecode>> stream xSn0 zWV_X'jEB@Ϫ,*")䤟%E.!wwf{ 8ekՒ9PT-]$!<9N-85K-YTwx[&`ċņ, %\6X*@ZOd&+cMᲾ7a}5X/+Hťrm K% PxK=V}ijr$ _܁ eC.Fn"+G8-̙_ofX":>M�eǺ %z\j4hfre)șJY~Z9IX>TvHNm?D W2Wx, 8ss]PFPc 0ak3ͪ2=f! 8&iؔݮ<(D'r.d&Y@΂O.oer-83k:O/8P r-EJPuƞ7n}'kendstream endobj 23 0 obj 517 endobj 5 0 obj <</Type/Page/MediaBox [0 0 612 792] /Rotate 0/Parent 3 0 R /Resources<</ProcSet[/PDF /ImageB /ImageC /Text] /ExtGState 16 0 R /XObject 17 0 R /Font 18 0 R >> /Contents 6 0 R >> endobj 21 0 obj <</Type/Page/MediaBox [0 0 612 792] /Rotate 0/Parent 3 0 R /Resources<</ProcSet[/PDF /ImageB /Text] /Font 30 0 R >> /Contents 22 0 R >> endobj 3 0 obj << /Type /Pages /Kids [ 5 0 R 21 0 R ] /Count 2 >> endobj 1 0 obj <</Type /Catalog /Pages 3 0 R >> endobj 4 0 obj <</Type/ExtGState/Name/R4/TR/Identity>> endobj 10 0 obj <</Subtype/Image /ColorSpace/DeviceRGB /Width 300 /Height 220 /BitsPerComponent 8 /Filter/DCTDecode/Length 15887>>stream �Adobe�d�����C�  $, !$4.763.22:ASF:=N>22HbINVX]^]8EfmeZlS[]Y�C**Y;2;YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY��,"������������ ����}�!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz�������� ���w�!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz� ��?�-nfeHWgx+oo5 =i- v{MG;a5=ڻ {nYY"=NN-n*51&R%uiF+>USD#޸Rި+Uar}0+W/旕$$ V]@Ѥ# ꧘"_,F?r𣪚^Rnb|}Y^53(/<|!FG>6-g%[�pGoų{VbDן,l-hM*`2Z۽jԞRV8~ 4Qg3źjM7H($8=z{W{<^Le[iO?V<jx-OqJٳ;/e osR@zvJ? ?屭969=+Bσڭ vA?R.r;q\3y&nn"X T2O*|>E{ċMcvop؆v}i[E-EYVO* -YrJE+=ڛyyf/Z9s=Efi2{l X+I:m=M[U g�U7d(Jk6Z}a[eR,CXREvK "2$I5%7aMS$mØ]J[ۦQ5`u""u5n+d3}DYyvZ'H,=�ZΑ6í[[M+6;{`Ut v=k!X(!sдnR?p�J ^\$].MaӠjȱf|v]VH]GJ{bE5ˊv/x"\A֥ھfwݻ֔$ļ^u>^kKcVpCg:KƏy~hff16jrB/2};�J-塸.L:qڽ:0P^Z Aۂ!>Ž�I7L|$UCMtvd>ªNrO<�:=Mf�`2(6Ԟ=8[6] W16Iw.QLϏGҶlIY?Ժ)5 k: ]rx5rM wi7D= ue{ĺ$[ha{ZO{H<;E'A�֭lv6YL>ZlZI1qo2yR*8U%/;[qrqM4d$�|Usay^7ɺz& 尝9d_V/-7$=1M-E<bg+-Ri%kgЧ:y8#+޵5{Ͻ~=.E#mSFC)VLl{3(gҺ2rB짿"J=R[co_Q1yT_*9YRE{{US$MR3OT3&J'4RzҮ0v+6W?eG曏 k݁lp�k I lNڠSßR21H]֯$G)?v4=EQLH+>u0_R}S#%z6{o�NxuxbqƢk}Taw'ynՙ}m:H?�Xc]=*[.St8oϿW KƎy2<+qkoH� P}*|kճLH%:8 t:| �׭+8{0N2D _:eGm�= j+'$ӽȿHqyE�p81߭\4](*;iI(x^UweS>42M6c1, "Sa%o,$,@:f~)]d]?OΫ*ȑy29< NJV?oᔌ#9<0Gul",tjα*`~83<bzt/W.*iGš9UEHoQp/Djʯߦ:cҢ b�qǭqsۣܚ$y ]2{b1% !ж-Q%v۫y+y|S^!ZoFm[bՌ Q>O]Ӥc=>|LyBzW)5~Ds˟$�zD+[z]jU37>y]BGxݖ4ieG^G_oꖯu6y&N=֧$ھR[bm-!’:Pjv׈<N#b3٧t>J9>HɶkjiurWiRpw*miS]-#z42kCL9Z,ܟD]�_ʱlN9}?cOh$-e⮴bMHл8M)5VXQ["/vč.㡭afߗ)Zug\\8U&݉XkTk|E9."l˲N.}L**jh.y#`=m*}nqX\ܶdƑc]A#M'?W,ΥU 7*Ҭ:t^e% }G#y}y#*K#^O'UJ-çf-);Gt[[~[.{؇!N |Dիtpz,"a7'vGSuX.d}9 ߍ7Qڛwت=s=qJ4[UTt($yT)jt>q<ϚE8څsR4�Īz>¢f(2sGOtVuKȏA8:; yXc1lu֬qƎV&bm=b)nQf!rgQgN"&S}dY~h�y52?}*(ui[ϭqV)kIfϥ*#~Oϧ|2_0\峱0g-Sbu6W֚6|jCSlGX8aZFW܆E7zfjvofXbr\YO$~sQv%9m((dNO>R]@EOg[Il^<2k&ͿkILӺUxqc^6{󪭹oAڴcYcUi#=; }[X㉉>Nꒌn̆QL0;7;RxO񭛛{m*Vx>O_lmkru ņ(·o^F�ֆk={TPqkX ',$f}͆G<ۊҵng%̮'}I.v<b<a` s:4MdCDw8y5m?0JڼR'ΎnRL^j)Y�,BSׁccf\pWlVp~"?�KO8ow�m]/$r@`H.ܞ�@֛j}~j/#V}ĬEo0 �T B�?z.ZcjWktWq7(|bJޝGB>Ѵ #n~*)F'zNk$U`A@[,'w7ԜUiT1_7'p5iHkҋk S[o~{Zɬ纹 Xt+VmO<Ƹ 8XqzϷ}1ZEo.E+QsP:]$7>\ɐ]1>#fčnS[DfpUR 'k$[ztSN$Y%I�'zݾ5OM72X$u9]-z_(QlsJ-ֹj$l<OsVWY2m$A ?9zgHWP[H儘>he 5'B$iWL:XcQhq?uj˪fH7$:z}=Z_$aw.:`tƓvi.٭x6||}g'իk">?D 5x^jRQhX18U+IZo HCt^9EشΪA1B 5RDJ_~?5i8:iE{1Z$qP@=Ϋ*VmR2%|F�޵?#tQq^Zm5v�=d5V=I8ury0:ǘp=Km�;Y̍_<OCqZӇІn.e11~Cl!S\w�^U={ |r:֟uܛˀ=x]Ӆk2iR(տt7m2*=>SX|ǩ.mm<ϼf=N;Muao\E#VsTU)KVP ̷c\8AE b}NH&# S�Ku|cʁۡ=ȦvhiB54f/,o&3O2==veoy'=H:w�KM�|o$V7ԄA}S#JLF1JGr#5n')C"C紛69sڮʒ/karێZ.;ݛ7  cԷO)UҸ-Lo�V˦ɴ67"%<}�hZ�mxܡ8Ηd%Qͪ&Q#|Rh.4?r?^/5]ӭY7B7Ɩ/]5M GnhsG[5jך-쩟GU, WMM5y_lZӻ1?'=Ac8ҵ qj)ZHsikAqN Y`H> 6kt^<G*NŴOS{RC<e)Y e18ϦjJ3rt5^q<+#OʬXA馂I[YU$1*^<9뚻$uũ;\! m L$wK<4g;9#H<[&z 9fwV/xUַhxlc<z6؝GwX{eO?hZװ {7p.jգq(ݜ T=qlbRNeو<'$3OjMԖ+1̟)8s]Y4gn~rr!> M zMM%MHK51ɉ>jZvGTץQm7;4+wkT ~cPAݳROMw*<՘nVm_u?ҹd\Jr1A?�Zy'ʋ{"{Mǰwޭ.qP50ʑ"p9'{EN҉<=[:2q/OVj3e{K,^y|? z~Ta8.[O'G<ʢ6^NƳTOyBxH^W sֲ�njv qeLgTA%ҽԉ #L)�9Ͻ]%Jأ6<Jw(1�ީjA滒?U$ח-Co Sg󫷶mgۖ;s:zV=ͭ"$ѠÎ uI.]Lq]4i~M[oa׭5L=V �iYyj~rxt[ (DrUd>æK$v�qƄɰ6L/:VX[K|$Giv:>]ag%Hs #tR97n3{Sv #`1Y6qjJy  qHYMQpӶ)EH.tSZ-,p,I6^iw%#]q\y}>l6>f=%Fvvsۈz})^94O\-`W[m�ոVX-ozmmU-_AB`ՍڞNzTy߹qRMNЩ$Jz�NvwX7Q"7rE޸MKͶhD r3lֶz;9vyK6;eS{tZ>o2^񞵝-/.x�3O#ƚ{[h$NO\~t}դ4fszǧnNYֆ[yav%]1j^~dی6=OZZ^ ?zOGr+iqjozdA9zTZR҉> Y{U6N\zsU8--hFs;XGvi36|7$ʷe#hFFҶez㥻K_- )?框ؼ¢Qq)soyrVnqVU. Q=!$Eh/P QZ$i>a,2{WLբ6~~�@K,�6޼T%qG۲xM_rď2C=0=3W"] *#~Xm툲3J{t;4mYY݁@8%:t3Mr$;ۭASs|ROa= Էx%c'-"'7$Gh:~~7[`D0V6%djM֝eg3z.:r9zFG�=[OxDȉr3BO{9{I`ϖWj'xE'K}>~Y[k`J _lubeKw8mQ\iT{0TTryzyq]XCDzgwxdUV:޴lY�"|y;3 H6s.6NdBH :W?g]ɞ3mIb1"Î~u#q�c'RDƌ+!X%g IW9$Svg]EĤWsC<\3}k6[yR8Tt_;Uhω>$E)OLݧGE�g[He6_&ӲI$xR:fis6B=z?RdX‹7ӽlZ\#duxJ =E$"~bn4Ds]<K/^Y#hjd�f=>Wzc"jTv\ :eOW[$^Qn_jo,S.^P!9<sU>ڙ'7uyoai<ϑkAEH*7 w}=MM!(Y$= Zcqy)78%nșI<>�ٿCd!_͸[p(m/ؒnq�-O3 u:QCLik*m.(Xћoh"_ O<t&qPš砞XmmZɵɷ?ʲy]Lfosj7<A HPЏn~zL�y˷FRJH}6֭J/)\*&N.Eŵ,dt_RHobܶn.iV#GI4rhV3 $m I={ZAmkL-rkr>-[KqcFl1'8�Z6z`;_9#mdLqT!0)ϧҀ. =tYEgJʆfxnWRo/* ;r$<zzQq:(`q֫kFfĥy'k:-5m"D�5'9c5%֕-kc#,=s ӷgtqx~k\[? 놧gl$FbR<ubR+K bxOҟ_#k:$l^8S1EXc~~}!4qw %n=N~4^؊K/?i5?*\6SE¨Sr03ʬ.{%[<.KuP;8ׅblG$sE*=FSј�f=y08 �~>w[KH?ac{ߞCnyw35)]>dv;<޺)o ̬V'ҹ., ?5u!"U{im% \ry*`흽*RFkEِk]W52V2}SL="8~zH)SHl+VG,'/j٪3[>) #}sZH5ai~P\�1;Vd~%FgG'WCٛu'y@QZۈ/LAkR#b$_Cinw^%֩ +_� uMZͶkXOQ9ZOY!m#㧵Rbh[n-+LG:.z)<Nu 6%!۝+u>UxyS4l`3j^h<4񷟧#ɏEdY_\钱EPz0tbFFr(o8=R0(,"+JMP aN:ߣ=Iڶ|/yN1 u7D|ecF Br,mdc:զNw*{BCP?v[H7m8U=3Yj>)y3NA)~v6^>Z(w<exWDW).eT$=}*DiM7D�~eni-v*5:zFO wTG?.4LЭ>d)ibw\�RK&9=; k' S”8Eb Bx&Q)k4ἳ3?ƒ>n;Synj#F3/v ӷknꇁO1O1,M1qTD#eSa^4a,V =$Oȁ9U~Xvl"O \7/o"MoGskzS,OisǔյaDcܖdVM֧%1)m`^rǖOR޸6%~$1Bj+#ֺ(-'@^[#OֹSc}ʟjthgaqX�MD)_aʯ}�,7E{{9K/kjYv lY<Ã,:?沵{>U<I1dZ5CGˎ1,m=z~UmvY9ҹV"HtgsM\No^k*Ia}x+*9FZjHxcY}?V<([)Z*F% '6a-Mkjsr46is# ׏Q͕<~; 7͒8PkM �9O>[4\K3�ex2Lzww_1Ziw9tHҟk6۝lm 2qȤRWI*F O J?nL!W)IZl�:w&+6^F{�׫Z~q}jxBXܼ=*K>d%Zo:m=Z olۿs�'JJtc\ۿ >7ǙKқ}h{msq3L]8^Z2*[iJF#Ǡf7 lʶ0pq%H:Mlm rEr Y)ArGuS4wIgkmEI@%p{-:+%I1�<G^H;ޖќJi.=IYQ3 .ۻ 1=U\*61.Gzi$gmC4hw�ng -Ѳ;oz6oGU+H�pwi cPѼf1oް=tĞqkx"G\7enGUrX~E븥$RN(dV5U' ]fCU �E陾z}128MnO59m,|W' ?;_?),qM,-ܯq/#޵VT�_r06k{=45<>NT=rLS@\橑#=v.l~ hbVU>`GrS�8栱?a?a<Sע+O/m:o=k)Am̻Ӣ^:/nUϽI[;N@{Mnm[ikw`v\rmkb:sե,{t MLHL%A/gynaGYl%I�6zlZ,wMȗNJH-|e1߾*-cXCX]͏-OSQBj'}7 - Qg5ߑ=ڳ!dW.[f7sA5J-xB*?AϧBی'g0ud--D8?J',^ɂz4 bU"ʾU r57Xoѥؠ 0y`[Y}9 5ndxe9$~%^8X֗iMjQYQf&2fdICG~n0uXQ寧9~P'ds>Q5:JAmۣ#/N"玾޾Y7�s*nQ�IV `ͻrdz)Q�>WWl\Vg8 }j+O* Ҭƅ?˩+>T'Jv~HStTmOI>t|E :3w7y Ff{.hNI_J~UhnWLCL(fc]EZ8B`FیVF+.ZFk sj/T-g�:]Ȓ"o-{rjmw˂9fa~uQ?u-g?V@;==Cj.절V W #�c9QҪ/H\h߽?MNڪıoBQI?rVR݉ S=uqeX]_QJJE�kɦ}Ky:VvHuX#[o+ṇү^>tp~_SjEP֮B_|u.WszǝDMĠ,zպ&lV:YE A޳ -KR͚KPa`obS3:Wow#Ƈ T�t&40_/#] =hZYW1l]eNyYud]AeVB>}GJ;.xڧ5)| `dF 9Rp2~E'=N=Zˎ rʢ;YYNMC@ \"lczZ|pMz6{qmfv Xnd-;$�]FOwֹF k+9MƒG?;g�dzU T[<�,eWLzu57w޴q:J~\nOL wϥt62yr9s!hmJZ*?<XZTsF1ךu )1yeqh Fnʮ fѮ,X2� n^_Y3"Ϡ$*-d~e\�-A I5Mcc(hڐ.3R2Ɂڳu|̓9[96i)OT[6kZib]9QL6YчI|IXFSQFrFB:ο5JCu?ZdtSIֱ׍bwl#/iN=ӵ]$nGއMN/}26;/z5O^RQjRḘ"9|4&Ut-BMx?s;bY=s*K*/9:}j"*NO뫥DmH~R7z}]F-Îk}=#?2cS]RWQvܟlwsvdVa]TP]Ȭ'L ~G*qׁ.'V16g<g8bo%5?23F ��d ]͂Fo_)wͫ�^hhrR1d͆3.GTBqEqcIpI#>ߩ/5 S7ɥ�I� v+6NF.ZՠIu{?>+nM7!Dp}kex:ڼ/zM ;Uɏ=ULFSv`.#9uxV㠮O՚Keў~w6kw?OCtތj2L$m{ǯ{8E.O.pƵu35֍|cʷ�dG~Kb}ywSvI Y!Lp|Ҥ:A5ćj,p HxFeTX)r $W$ nM,N~yɻǷ_gh{YR4q8/{5iV t̺(!5.5@[[MH';ө5jz0l39Yb[``Gc~^R9 Q^lH Gp8?ʦi6*-'sZ}^|?/ʳn5ŷuz㥕&oOƵbc}zJJ\ޘGUuN+v;ٳ=΅/[osW_Fȑ?Bjp~J뚦%VȓK| <n ţw8PLF`�jb~nY Ŕ\/JHm帗h_lӰz7Uya.|p+0y۶h�ln%?3PbK+ηH2ۿD*n Vzvmk%�=y5<[c5�'8yM,|g#mߕydU^'̬tCuisqzڿ5t TF7 x+5RO~O8_ohg8-&T驻d 9뼔1;ʻ [&ky.#edC:&;{jt-?jZhA"Iaj̤I*K7ct>vYpdSvҠtvL[ޝKl3Y wvHle:vè#"ZG 8i9AX~>j5Q?1WQkkdEwo?{ֈ j+lg+ZEĒk$N=8<@ҸL/a]Z6`Նuh*y-A韼?e\3HĶjLߥ+dз]>ͤoX[wx�U=Zm2V 8Ȥ8'gln!MoE3a� M;7NL"+">:e� F~S$t$sc==;~* dv/<~HԐ+$oo#J`Q\c>܊f;,ShHqүWini.!c;~X9Z|4鷸4'5~?iչkƩHԲXm*y{\s/qG|>^B:C*]zRQM⅝ԭ~XүOH6kwvz=V"ХޭmX�yҖc:]1bOlj[;9wf?Zt->?E ~ukCdk #@IEzLS8OZlKu+&t�wb1yJsd Og%ͩp#*\�f݌|ڬ[ps@Fa@YYj1Z%dg)hEq%VFR?Ɲ*$}y7joiq|~՟jW?\}�8sZC죊LnEʷѷ]- T[J|ӎR $yar20>oFb67u_?c*jM*E\@yy5K=DMĊ.|qڧbBrE u6 endstream endobj 16 0 obj <</R4 4 0 R>> endobj 17 0 obj <</R10 10 0 R>> endobj 18 0 obj <</R15 15 0 R/R14 14 0 R/R12 12 0 R/R9 9 0 R>> endobj 28 0 obj <</Length 540 >>stream 0 0 0 0 138 189 d1 138 0 0 189 0 0 cm BI /IM true /W 138 /H 189 /BPC 1 /D[1 0] /F/CCF /DP<</K -1 /Columns 138>> ID &\adq^ @ 77> 77> 77> 7N"7CI8o<$@zQzD8SzAC|-l<$ ޫzJޕR $KT<%"H"6JIcH=P<@ްJ (0;AȐt|'``h*6xa >Rjx?K/dƗl:Xru@Al0]v%ImX~ iv@l.Atjl@Z aذaB[tm %nJ�"@a fS[HnxإKimKimKim- .`[_j�  EI endstream endobj 30 0 obj <</A 27 0 R/R25 25 0 R/R24 24 0 R/R20 20 0 R/R29 29 0 R>> endobj 27 0 obj <</Type/Font/Name/A/Subtype/Type3/Encoding 26 0 R/CharProcs<</a0 28 0 R>>/FontBBox[0 0 138 189]/FontMatrix[1 0 0 1 0 0]/FirstChar 0/LastChar 0/Widths[ 0] >> endobj 25 0 obj <</Subtype/Type1/BaseFont/Helvetica-Bold/Type/Font/Name/R25/FirstChar 1/LastChar 255/Widths[ 333 611 611 167 333 611 278 333 333 278 333 584 278 611 500 333 278 278 278 278 278 278 278 278 278 278 278 278 278 333 238 278 333 474 556 556 889 722 278 333 333 389 584 278 333 278 278 556 560 560 556 556 556 556 556 556 556 333 333 584 584 584 611 975 722 722 722 722 667 611 778 722 278 556 722 611 833 722 780 667 778 722 667 611 722 667 944 667 667 611 333 278 333 584 556 278 556 611 556 611 560 333 611 611 274 278 556 278 889 611 611 611 611 384 556 333 611 560 780 556 556 500 389 280 389 584 278 278 278 278 556 500 1000 556 556 333 1000 667 333 1000 278 278 278 278 278 278 500 500 350 556 1000 333 1000 556 333 944 278 278 667 278 333 556 556 556 556 280 556 333 737 370 556 584 333 737 333 400 584 333 333 333 611 556 278 333 333 365 556 834 834 834 611 722 722 722 722 722 722 1000 722 667 667 667 667 278 278 278 278 722 722 778 778 778 778 778 584 778 722 722 722 722 667 667 611 556 556 556 556 556 556 889 556 556 556 556 556 278 278 278 278 611 611 611 611 611 611 611 584 611 611 611 611 611 556 611 556] >> endobj 24 0 obj <</Subtype/Type1/BaseFont/Helvetica-Bold/Type/Font/Name/R24/FirstChar 1/LastChar 255/Widths[ 333 611 611 167 333 611 278 333 333 278 333 584 278 611 500 333 278 278 278 278 278 278 278 278 278 278 278 278 278 333 238 278 333 474 556 556 889 722 278 333 333 389 584 278 333 278 278 556 552 556 556 556 556 556 556 556 556 333 333 584 584 584 611 975 722 722 719 722 667 611 778 722 278 556 722 611 833 722 777 667 778 722 667 611 722 667 944 667 667 611 333 278 333 584 556 278 556 611 556 611 552 333 611 611 276 278 556 278 889 610 610 611 611 393 552 334 611 552 777 556 556 500 389 280 389 584 278 278 278 278 556 500 1000 556 556 333 1000 667 333 1000 278 278 278 278 278 278 500 500 350 556 1000 333 1000 556 333 944 278 278 667 278 333 556 556 556 556 280 556 333 737 370 556 584 333 737 333 400 584 333 333 333 611 556 278 333 333 365 556 834 834 834 611 722 722 722 722 722 722 1000 722 667 667 667 667 278 278 278 278 722 722 778 778 778 778 778 584 778 722 722 722 722 667 667 611 556 556 556 556 556 556 889 556 556 556 556 556 278 278 278 278 611 611 611 611 611 611 611 584 611 611 611 611 611 556 611 556] >> endobj 19 0 obj <</Type/FontDescriptor/FontName/Helvetica>> endobj 20 0 obj <</Subtype/Type1/BaseFont/Helvetica/Type/Font/Name/R20/FirstChar 1/LastChar 255/Widths[ 333 500 500 167 333 556 222 333 333 278 333 584 278 611 500 333 278 278 278 278 278 278 278 278 278 278 278 278 278 333 191 278 278 355 556 556 889 667 222 333 333 389 584 278 333 278 278 556 556 560 556 556 556 556 556 556 556 278 278 584 584 584 556 1015 667 667 725 722 667 611 778 722 278 500 667 556 833 722 778 667 778 722 667 611 722 667 944 667 667 611 278 278 278 469 556 222 556 556 500 556 560 278 556 556 222 222 500 222 833 560 560 556 556 333 494 274 556 500 722 500 500 500 334 260 334 584 278 278 278 222 556 333 1000 556 556 333 1000 667 333 1000 278 278 278 278 278 278 333 333 350 556 1000 333 1000 500 333 944 278 278 667 278 333 556 556 556 556 260 556 333 737 370 556 584 333 737 333 400 584 333 333 333 556 537 278 333 333 365 556 834 834 834 611 667 667 667 667 667 667 1000 722 667 667 667 667 278 278 278 278 722 722 778 778 778 778 778 584 778 722 722 722 722 667 667 611 556 556 556 556 556 556 889 500 556 556 556 556 278 278 278 278 556 556 556 556 556 556 556 584 611 556 556 556 556 500 556 500] >> endobj 15 0 obj <</Subtype/Type1/BaseFont/Times-Roman/Type/Font/Name/R15/FirstChar 1/LastChar 255/Widths[ 333 556 556 167 333 611 278 333 333 250 333 564 250 611 444 333 278 250 250 250 250 250 250 250 250 250 250 250 250 333 180 250 333 408 500 500 833 778 333 333 333 500 564 250 333 250 278 494 494 494 500 494 500 494 500 500 500 278 278 564 564 564 444 921 722 667 667 722 611 556 722 722 333 384 722 611 889 722 722 556 722 667 556 611 722 725 944 722 722 611 333 278 333 469 500 333 439 500 439 494 439 333 500 500 274 278 494 274 780 494 494 500 500 329 384 274 494 500 722 500 500 444 480 200 480 541 250 250 250 333 500 444 1000 500 500 333 1000 556 333 889 250 250 250 250 250 250 444 444 350 500 1000 333 980 389 333 722 250 250 722 250 333 500 500 500 500 200 500 333 760 276 500 564 333 760 333 400 564 300 300 333 500 453 250 333 300 310 500 750 750 750 444 722 722 722 722 722 722 889 667 611 611 611 611 333 333 333 333 722 722 722 722 722 722 722 564 722 722 722 722 722 722 556 500 444 444 444 444 444 444 667 444 444 444 444 444 278 278 278 278 500 500 500 500 500 500 494 564 500 500 500 500 500 500 500 500] /Encoding 31 0 R>> endobj 31 0 obj <</Type/Encoding/Differences[ 246/odieresis]>> endobj 13 0 obj <</Type/FontDescriptor/FontName/Times-Bold>> endobj 14 0 obj <</Subtype/Type1/BaseFont/Times-Bold/Type/Font/Name/R14/FirstChar 1/LastChar 255/Widths[ 333 560 556 167 333 667 278 333 333 250 333 570 250 667 444 333 278 250 250 250 250 250 250 250 250 250 250 250 250 333 278 250 333 555 500 500 1000 833 333 333 333 500 570 250 333 250 278 500 500 500 500 500 500 500 500 500 500 333 333 570 570 570 500 930 725 667 722 725 667 611 778 778 389 500 778 667 945 722 778 611 778 722 556 667 722 722 1000 722 722 667 333 278 333 581 500 333 494 556 439 560 439 333 500 560 274 333 556 278 833 560 494 556 556 439 389 329 560 500 722 500 500 444 394 220 394 520 250 250 250 333 500 500 1000 500 500 333 1000 556 333 1000 250 250 250 250 250 250 500 500 350 500 1000 333 1000 389 333 722 250 250 722 250 333 500 500 500 500 220 500 333 747 300 500 570 333 747 333 400 570 300 300 333 556 540 250 333 300 330 500 750 750 750 500 722 722 722 722 722 722 1000 722 667 667 667 667 389 389 389 389 722 722 778 778 778 778 778 570 778 722 722 722 722 722 611 556 500 500 500 500 500 500 722 444 444 444 444 444 278 278 278 278 500 556 500 500 500 500 500 570 500 556 556 556 556 500 556 500] /Encoding 32 0 R>> endobj 32 0 obj <</Type/Encoding/Differences[ 2/fi]>> endobj 12 0 obj <</Subtype/Type1/BaseFont/Times-Roman/Type/Font/Name/R12/FirstChar 1/LastChar 255/Widths[ 333 556 556 167 333 611 278 333 333 250 333 564 250 611 444 333 278 250 250 250 250 250 250 250 250 250 250 250 250 333 180 250 333 408 500 500 833 778 333 333 333 500 564 250 334 250 276 501 501 501 500 501 500 501 500 500 500 278 278 564 564 564 444 921 722 667 667 722 611 556 722 722 333 393 722 611 889 722 722 556 722 667 552 611 722 719 944 722 722 611 333 278 333 469 500 333 444 500 443 500 443 333 500 500 278 278 501 276 778 501 500 500 500 334 393 278 500 500 722 500 500 444 480 200 480 541 250 250 250 333 500 444 1000 500 500 333 1000 556 333 889 250 250 250 250 250 250 444 444 350 500 1000 333 980 389 333 722 250 250 722 250 333 500 500 500 500 200 500 333 760 276 500 564 334 760 333 400 564 300 300 333 500 453 250 333 300 310 500 750 750 750 444 722 722 722 722 722 722 889 667 611 611 611 611 333 333 333 333 722 722 722 722 722 722 722 564 722 722 722 722 722 722 556 500 444 444 444 444 444 444 667 444 444 444 444 444 278 278 278 278 500 500 500 500 500 500 501 564 500 500 500 500 500 500 500 500] /Encoding 33 0 R>> endobj 33 0 obj <</Type/Encoding/Differences[ 246/odieresis]>> endobj 9 0 obj <</Subtype/Type1/BaseFont/Helvetica-Bold/Type/Font/Name/R9/FirstChar 1/LastChar 255/Widths[ 333 611 611 167 333 611 278 333 333 278 333 584 278 611 500 333 278 278 278 278 278 278 278 278 278 278 278 278 278 333 238 278 333 474 556 556 889 722 278 331 331 389 584 278 333 278 278 556 556 557 556 556 556 556 556 556 556 333 333 584 584 584 611 975 722 722 722 722 667 611 778 722 278 556 722 611 833 722 778 667 778 722 667 609 722 667 944 667 667 611 333 278 333 584 556 278 557 611 556 611 557 331 611 611 278 278 556 278 889 609 611 611 611 389 557 333 611 556 778 556 556 500 389 280 389 584 278 278 278 278 556 500 1000 556 556 333 1000 667 333 1000 278 278 278 278 278 278 500 500 350 556 1000 333 1000 556 333 944 278 278 667 278 333 556 556 556 556 280 556 333 737 370 556 584 333 737 333 400 584 333 333 333 611 556 278 333 333 365 556 834 834 834 611 722 722 722 722 722 722 1000 722 667 667 667 667 278 278 278 278 722 722 778 778 778 778 778 584 778 722 722 722 722 667 667 611 556 556 556 556 556 556 889 556 556 556 556 556 278 278 278 278 611 611 611 611 611 611 611 584 611 611 611 611 611 556 611 556] >> endobj 11 0 obj <</Type/FontDescriptor/FontName/Times-Roman>> endobj 29 0 obj <</Subtype/Type1/BaseFont/Times-Roman/Type/Font/Name/R29/FirstChar 1/LastChar 255/Widths[ 333 554 556 167 333 611 278 333 333 250 333 564 250 611 444 333 278 250 250 250 250 250 250 250 250 250 250 250 250 333 180 250 333 408 500 500 833 778 333 333 333 500 564 250 333 252 278 500 500 500 500 500 500 500 500 500 500 278 278 564 564 564 444 921 722 667 667 722 611 556 722 722 333 389 722 611 889 722 722 556 722 667 556 611 722 722 944 722 722 611 333 278 333 469 500 333 445 505 445 505 445 337 505 505 277 278 500 277 782 505 505 505 500 337 385 277 505 500 722 505 500 444 480 200 480 541 250 250 250 333 500 444 1000 500 500 333 1000 556 333 889 250 250 250 250 250 250 444 444 350 500 1000 333 980 389 333 722 250 250 722 250 333 500 500 500 500 200 500 333 760 276 500 564 333 760 333 400 564 300 300 333 500 453 250 333 300 310 500 750 750 750 444 722 722 722 722 722 722 889 667 611 611 611 611 333 333 333 333 722 722 722 722 722 722 722 564 722 722 722 722 722 722 556 500 444 444 444 444 444 444 667 444 444 444 444 444 278 278 278 278 500 500 500 500 500 500 500 564 500 500 500 500 500 500 500 500] /Encoding 34 0 R>> endobj 34 0 obj <</Type/Encoding/Differences[ 2/fi]>> endobj 8 0 obj <</Type/FontDescriptor/FontName/Helvetica-Bold>> endobj 26 0 obj <</Type/Encoding/Differences[0 /a0 ] >> endobj 2 0 obj <</Producer(ESP Ghostscript 7.05)>>endobj xref 0 35 0000000000 65535 f 0000001609 00000 n 0000028397 00000 n 0000001543 00000 n 0000001657 00000 n 0000001199 00000 n 0000000015 00000 n 0000000571 00000 n 0000028277 00000 n 0000025875 00000 n 0000001712 00000 n 0000027012 00000 n 0000024663 00000 n 0000023395 00000 n 0000023456 00000 n 0000022183 00000 n 0000017747 00000 n 0000017777 00000 n 0000017809 00000 n 0000020988 00000 n 0000021048 00000 n 0000001391 00000 n 0000000590 00000 n 0000001179 00000 n 0000019849 00000 n 0000018710 00000 n 0000028341 00000 n 0000018537 00000 n 0000017872 00000 n 0000027074 00000 n 0000018463 00000 n 0000023332 00000 n 0000024609 00000 n 0000025812 00000 n 0000028223 00000 n trailer << /Size 35 /Root 1 0 R /Info 2 0 R >> startxref 28447 %%EOF ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/doc/T2.eps������������������������������������������������������0000644�0001750�0001750�00001433252�11757531137�020025� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������%!PS-Adobe-3.0 EPSF-3.0 %%Creator: (ImageMagick) %%Title: (T2.eps) %%CreationDate: (Fri Jun 11 14:37:37 2004) %%BoundingBox: 0 0 300 220 %%DocumentData: Clean7Bit %%LanguageLevel: 1 %%Pages: 1 %%EndComments %%BeginDefaults %%EndDefaults %%BeginProlog % % Display a color image. The image is displayed in color on % Postscript viewers or printers that support color, otherwise % it is displayed as grayscale. % /DirectClassPacket { % % Get a DirectClass packet. % % Parameters: % red. % green. % blue. % length: number of pixels minus one of this color (optional). % currentfile color_packet readhexstring pop pop compression 0 eq { /number_pixels 3 def } { currentfile byte readhexstring pop 0 get /number_pixels exch 1 add 3 mul def } ifelse 0 3 number_pixels 1 sub { pixels exch color_packet putinterval } for pixels 0 number_pixels getinterval } bind def /DirectClassImage { % % Display a DirectClass image. % systemdict /colorimage known { columns rows 8 [ columns 0 0 rows neg 0 rows ] { DirectClassPacket } false 3 colorimage } { % % No colorimage operator; convert to grayscale. % columns rows 8 [ columns 0 0 rows neg 0 rows ] { GrayDirectClassPacket } image } ifelse } bind def /GrayDirectClassPacket { % % Get a DirectClass packet; convert to grayscale. % % Parameters: % red % green % blue % length: number of pixels minus one of this color (optional). % currentfile color_packet readhexstring pop pop color_packet 0 get 0.299 mul color_packet 1 get 0.587 mul add color_packet 2 get 0.114 mul add cvi /gray_packet exch def compression 0 eq { /number_pixels 1 def } { currentfile byte readhexstring pop 0 get /number_pixels exch 1 add def } ifelse 0 1 number_pixels 1 sub { pixels exch gray_packet put } for pixels 0 number_pixels getinterval } bind def /GrayPseudoClassPacket { % % Get a PseudoClass packet; convert to grayscale. % % Parameters: % index: index into the colormap. % length: number of pixels minus one of this color (optional). % currentfile byte readhexstring pop 0 get /offset exch 3 mul def /color_packet colormap offset 3 getinterval def color_packet 0 get 0.299 mul color_packet 1 get 0.587 mul add color_packet 2 get 0.114 mul add cvi /gray_packet exch def compression 0 eq { /number_pixels 1 def } { currentfile byte readhexstring pop 0 get /number_pixels exch 1 add def } ifelse 0 1 number_pixels 1 sub { pixels exch gray_packet put } for pixels 0 number_pixels getinterval } bind def /PseudoClassPacket { % % Get a PseudoClass packet. % % Parameters: % index: index into the colormap. % length: number of pixels minus one of this color (optional). % currentfile byte readhexstring pop 0 get /offset exch 3 mul def /color_packet colormap offset 3 getinterval def compression 0 eq { /number_pixels 3 def } { currentfile byte readhexstring pop 0 get /number_pixels exch 1 add 3 mul def } ifelse 0 3 number_pixels 1 sub { pixels exch color_packet putinterval } for pixels 0 number_pixels getinterval } bind def /PseudoClassImage { % % Display a PseudoClass image. % % Parameters: % class: 0-PseudoClass or 1-Grayscale. % currentfile buffer readline pop token pop /class exch def pop class 0 gt { currentfile buffer readline pop token pop /depth exch def pop /grays columns 8 add depth sub depth mul 8 idiv string def columns rows depth [ columns 0 0 rows neg 0 rows ] { currentfile grays readhexstring pop } image } { % % Parameters: % colors: number of colors in the colormap. % colormap: red, green, blue color packets. % currentfile buffer readline pop token pop /colors exch def pop /colors colors 3 mul def /colormap colors string def currentfile colormap readhexstring pop pop systemdict /colorimage known { columns rows 8 [ columns 0 0 rows neg 0 rows ] { PseudoClassPacket } false 3 colorimage } { % % No colorimage operator; convert to grayscale. % columns rows 8 [ columns 0 0 rows neg 0 rows ] { GrayPseudoClassPacket } image } ifelse } ifelse } bind def /DisplayImage { % % Display a DirectClass or PseudoClass image. % % Parameters: % x & y translation. % x & y scale. % label pointsize. % image label. % image columns & rows. % class: 0-DirectClass or 1-PseudoClass. % compression: 0-none or 1-RunlengthEncoded. % hex color packets. % gsave /buffer 512 string def /byte 1 string def /color_packet 3 string def /pixels 768 string def currentfile buffer readline pop token pop /x exch def token pop /y exch def pop x y translate currentfile buffer readline pop token pop /x exch def token pop /y exch def pop currentfile buffer readline pop token pop /pointsize exch def pop /Times-Roman findfont pointsize scalefont setfont x y scale currentfile buffer readline pop token pop /columns exch def token pop /rows exch def pop currentfile buffer readline pop token pop /class exch def pop currentfile buffer readline pop token pop /compression exch def pop class 0 gt { PseudoClassImage } { DirectClassImage } ifelse grestore } bind def %%EndProlog %%Page: 1 1 %%PageBoundingBox: 0 0 300 220 userdict begin DisplayImage 0 0 300 220 12.000000 300 220 0 0 3a2826402e2c402e2c392725412f2d6e5c5ab7a5a3f1dfddc3daecc6d8e6cdd8decfd8d7 c9d2d1bbcbcbb2cbd0b0ced8bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3 bcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfb9bdbcb9bdbcb9bdbcb9bdbc b8bcbbb8bcbbb8bcbbb8bcbbb6b7b2b5b6b1b2b3aeafb0ababaca7a7a8a3a5a6a1a3a49f a7a898a09e9297928c988b82a69176c2a16cdfb563f3c15ee7b755e5bb59e3c15de0c762 e2c964e3c563e7c063e9bb63e9b23becba4bdda43dda8e2ef0963eec9e49dea850e3c160 fabc4febab47c98927cf9027fbbe49fcc044e1a52be2a532df9532c17623b76d2cb16a2a e4a353ffde7df1bc54ca952de18651d98a25dd9c36eeba80f5d195e6c65fc28d2fa2502a 858772c2c5b29c9f8e7f8676939b8c3e493b202c20a3afa3dbdddcd3d5d4d4d6d5dbdddc dbdddcd1d3d2b8bab99ea09f5758463239291b27194f5b4fa19e95ac91868f5b4e8e4a3d a64b2ebb5b45a7493d934338995a4b92554291452ead4e3a9b4d399c4e3a9f513ba2543d a5583ea85b3fab5e40ac6040b46844ad623ba65b31a85e31b1663cb36745a355418f3f34 be6257ce8439e1a64ce7bb5ae4c449e4c362eac57ff2d26beca53ddcb443d4b038dd9a27 e79d3ce4c076ddd99cdad9a3ddb754e5b73fe5a543d99447d69940e0a353e39f54e0982c d69d34d38a2cd88c1eebaf3df7cf78e5c270b47c418b32388e3547933d469445408f4538 88423684433d86464687494c8044467f43457d4244794142774141753f3f723f3e713e3d 823a3b803c3b7a3e3d763e3d733c3f74393f73343d75313c5135325735346236376a3638 7137367137356f38336d3832753b39753b39763c3a773d3b793f3d7a403e7b413f7c4240 8446498143467c4042753d3e713b3b6e3b3a6d3b3a6d3b3a6b39386b39386b39386b3938 6b39386b39386b39386b3938693b3d683a3c683a3c67393b66383a653739653739653739 5b34395935395733375333365032344e31334b31324a30313e2b2d3e2b2d3e2b2d3e2b2d 3e2b2d3e2b2d3e2b2d3e2b2d4335354234344133333f31313d2f2f3b2d2d392b2b382a2a 433032433032433032433032433032433032433032433032473231473231473231473231 473231473231473231473231352d2b392f2e3b2f2f3b2b2c3c282a452c3053383d604348 683f435d34385b3236683f4371484c6e45496d444871484c6d4e49694843613e3a5d3734 3f2d2b46343249373543312f42302e594745857371aa9896d3d6dbdce3e9d0dbe1b5c4cb b2c5ccc7dbe2cbe0e5b9ced1bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3 bcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbabebdbabebdb9bdbcb9bdbc b8bcbbb8bcbbb7bbbab7bbbab6b7b2b5b6b1b3b4afafb0abacada8a8a9a4a6a7a2a4a5a0 a9aa9aa19f9395908a94877ea18c71bb9a65d9af5dedbb58e6b858e5ba5be4bd62e4c167 e4c268e5c362e6c35de8c359ebc66bedc160dba646d0933ae19541e9983fe8a84ef3cb73 f3af64ce8948b56f34ca8540e7a551d69440b97431b66e3ece803eb8692eb3672dad6721 d5953effc36adfa454bf823fa14a2cce7e35d99334dda654f1cd79dec463c29c55cd9480 b1b2a49fa19386887b555b4f2d352a465048929d97c9d4cecfcec9cfcec9d3d2cdd9d8d3 dcdbd6dbdad5d6d5d0cfcec9b4bca4a9b29d8c9988424e401c1e116b5f53b497899c7564 88452b833423ae574dad5a52974f439a5546944639a24a3e9c4d3c9d4f3b9f513da1533c a4573da75a3ea95c3eaa5d3fb26547a85c38a75c32af643aac603ca15535a45838b16545 ce8952daa243e8b95feac46fe2c659ddbf65e0bd7be7c56be9ba74e4b66bddb172dbb47d dbba6bdfc358dbca70d8d09fddbb71e7b84ce7a63ede993ad9a43cdcb568dbb989d5b876 dbb052d79c40d79729e4ae4ceec87fe4c269c28b39a44b3d903942923d4293444091473e 8b473c84433d7f42417e41467c4143794142784041773f40743e3e733d3d703d3c6f3c3b 873b3d823c3c7b3e3d743e3c6f3b3d6c383c6d343b6d31395234325935356236376d3639 7238377238367039346e3933753b39763c3a773d3b783e3c793f3d7a403e7b413f7b413f 8345487f43457a3f41743c3d713b3b6d3a396c3a396c3c3a6d3b3a6d3b3a6d3b3a6d3b3a 6d3b3a6d3b3a6d3b3a6d3b3a693b3d683a3c683a3c67393b66383a653739653739653739 5c353a5935395834385333365133354e31334b31324a30313f2c2e3f2c2e3f2c2e3f2c2e 3f2c2e3f2c2e3f2c2e3f2c2e4133334032323f31313d2f2f3b2d2d392b2b382a2a372929 433032433032433032433032433032433032433032433032473231473231473231473231 4732314732314732314732313b2d2d3e30303f2f303e2b2d3a27293d292b4630334e383b 5b3b3c5333345535366040416444455d3d3e5a3a3b5f3f40674543674543664341633d3c 3c2a284432304b3937493735443230453331513f3d5d4b499b8a80c4bcb9dadfe3c5d7e3 b1c8d6b7cfd9c4d7dbc6d4d4bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3 bcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbbbfbebabebdbabebdb9bdbc b8bcbbb7bbbab7bbbab6bab9b7b8b3b5b6b1b3b4afb0b1acadaea9aaaba6a8a9a4a6a7a2 abac9ca2a09495908a92857c9d886db79661d4aa58e9b754e4bc5ae5ba5ee6b863e7b866 e6bc68e5c065e4c761e4ca5ce2d18ce9c565ddb249d1aa4ddba54be99b36efac44f8d37a d48c529c512a8f412ab46846c07740ac632c984b318c3a3ea14d35a15032b46636bf7631 d79541e6a458bf79489b533a84382bbb6c45df9045e4a641e8c359e5d283e4d5aef1dcd7 93928d4849433637322126220a100e6f7877e0eaebd6e0e2d1cac0d3ccc2cec7bdc7c0b6 c3bcb2c3bcb2c7c0b6d0c9bfbac3a6c8d1b6d2d7c3b4b8a96c6e613334265b594ab3b09d c2a2938b594e782f289a453ea6514a92423b90413d9847469c4d3e9d4e3d9e503ca0523c a2543da4573da5583ca6593dae604aa35735a75c32b2673fa5583a944828ad6332da9351 d9af41e3bf4febca79eace8fe1cc7ddbc57cdbc188e0c580e2d1b3e9c088e8be8edacfb9 d4d1a6d8c060dcb857dbc28adbcca5e4c670e8b14de3a43bdeaf3bdec171dbc9a3d5cd9f dec77bdcb558daaa3edfb164e6c28ee1c562d4a233c7714299464092413d8f413d8f4640 8b483f82433c783c3b74393d713e3d713e3d703d3c703d3c713b3b703a3a703a3a703a3a 8c3c3f873d3e7b3e3d703d3a683a3a6337386234376332365335335935356436386f373a 743838753938743a36713a35763c3a773d3b773d3b783e3c793f3d793f3d7a403e7a403e 8044467c4143773f40723c3c6e3b3a6c3a396b3b396a3d3a6e3c3b6e3c3b6e3c3b6e3c3b 6e3c3b6e3c3b6e3c3b6e3c3b693b3d683a3c683a3c67393b66383a653739653739653739 5c353a5935395834385434375133354f32344b31324b3132412e30412e30412e30412e30 412e30412e30412e30412e303d2f2f3d2f2f3c2e2e3a2c2c392b2b372929362828352727 433032433032433032433032433032433032433032433032473231473231473231473231 473231473231473231473231432d30452f324430323f2c2e392628332324332525362828 47323145302f4c373655403f523d3c442f2e3f2a29442f2e593736623e3e6c45466c4445 3624223a2826402e2c43312f42302e402e2c3e2c2a3c2a28483024705f57ada8a5d8dde1 d5e4ebbbccd4b0c3c9baccd0bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3 bcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc0bfbbbfbebabebdb9bdbc b8bcbbb7bbbab6bab9b5b9b8b7b8b3b6b7b2b4b5b0b1b2adafb0abacada8aaaba6a9aaa5 acad9da4a29697928c95887fa18c71bc9b66d9af5dedbb58e3bf5fe4bc5de7b85ce8b65f e8ba65e5c070e2c87bdfce80d3c98ce2c05fe5c14fddcb67ddc467e8af46ecb64ceed47d be7433843612833125a55441a95c30ad622bb0623e9b493e8b372d974434af5e40c17541 ce8541c97e44a75b3b8d3d3694504599473bca7443da9633b8912abeb17cbfc3b5899591 2d2b2c0e0e0e0e0e10363a3d787d83bdc4ccdae3ecccd5dedbd2c1dad1c0c9c0afbcb3a2 beb5a4beb5a4c0b7a6ccc3b2b4b397cdc9b0d2cab7e3d7c9ece4d9a5a599465141283929 b4b4acc5b1aa6d3b34732822a048448e38399c4b518e424c9b4c3f9b4c3f9c4d3e9e503c 9f513ba1533ca2553ba3563cab5c4ba45739a55a32aa5f38a15436a0552ec47d3bf3b153 dfbe3be4c75fe8ce95e6d2b1dfd3abdad2a4ddd1a9e1d4aadad8c9e5d08decd17ae4d4a1 d9ceb2d9be89dfb667e5b769dbdbc3dfd18ae4bc65e5b052e3b845e3c167e4c48be3c680 ded393dec76adebc50e1bb7ce2c4a2e2ca6ae1b532e09142ad6044984b3b863b36853e3c 85443e7d41397339386e353b673a376939376939376a38376c39386e38386e38386e3838 8e3e41873d3e793c3b6c3b376038365936345733335932335634335c343567363972373b 77393a783b3a773c38763c38773d3b783e3c783e3c783e3c783e3c783e3c793f3d793f3d 7a4243784041733d3d6e3b3a6b39386a3a38693c39683c396f3d3c6f3d3c6f3d3c6f3d3c 6f3d3c6f3d3c6f3d3c6f3d3c693b3d683a3c683a3c67393b66383a653739653739653739 5d363b5a363a5935395535385234364f32344c32334c3233412e30412e30412e30412e30 412e30412e30412e30412e303a2c2c3a2c2c392b2b382a2a372929362828352727342626 433032433032433032433032433032433032433032433032473231473231473231473231 4732314732314732314732314b2e334a2f34482f33412d2f3727282d2121261c1b221a18 3026243228263d3331453b393c3230291f1d221816271d1b4626275535366745466c484a 3c2a283927253624223826243d2b29402e2c402e2c3e2c2a36242035221c57463e9f9089 d3cecad7dcdfc0d1d9b2cad6bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3 bcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbdc1c0bdc1c0bbbfbeb9bdbc b8bcbbb6bab9b4b8b7b4b8b7b7b8b3b6b7b2b5b6b1b3b4afb1b2adafb0abadaea9acada8 acad9da6a4989c97919d9087ab967bc5a46fe1b765f4c25fe2c162e3bf5ee4bd58e7bd5b e4be6be1c385dec89fdcccb2d4c29addbb73e3c56bdcd587d7cf8edfc27ce9c881e9d799 be733094442197423da6514ca45335c47741dc8f59be6f44a3533ca24f3da14f39ad5d3a b56435af5e2fad5a38a85241965339822d28a74d33c8823aba964ab1aa8d81908b203c2d 010002403f446d6c71999ca3e6eaf3ecf3fdc6ced9cbd3dee1d9c6dad2bfbbb3a0aca491 bab29fb8b09dada592b3ab98c9bda7b8a795c1aa9acbb1a4d4beb3e5dbd1b3b6ab445245 2d3d3aa2a39dbba09988514a7d322d974245a04f568a3e4897473c98483d994a3d9b4c3d 9d4e3d9f513ba1533ca1533caa5b4aa75a3ea25632a0552ea95e37c07742e09c4bf7b64e e7c653e9c881e8ceade3d3c6dcd5cdd7d9c4d8ddbfdbdec9d5d0b0ddd999e8d579ecc270 e4bd82dcc58fe2c47beeb657e3d29ce2c871e3b963e4b563e3bd58e3c066e8be72eec15c d9d08de1c96fe6c659e7c585e5cbb2e2cf80e6c041e7a642c7814d9f583a7f3630793437 7e3e3c793e387139386f383e5e37325f36326236336437346937366b38376f37386f3738 8a3e40813d3c723b386439335936325334315334325334325733335e343669353974373c 7b3b3c7e3c3d7b3e3b7a3f3b793f3d793f3d783e3c783e3c783e3c783e3c783e3c773d3b 764040743e3e6f3c3b6b3938693937663a37653c38663d396e3c3b6e3c3b6e3c3b6e3c3b 6e3c3b6e3c3b6e3c3b6e3c3b693b3d683a3c683a3c67393b66383a653739653739653739 5e373c5b373b5a363a5535385335375033354d33344c3233402d2f402d2f402d2f402d2f 402d2f402d2f402d2f402d2f382a2a382a2a372929372929362828362828352727352727 433032433032433032433032433032433032433032433032473231473231473231473231 4732314732314732314732314c2f344a2f34472e32422e303a2a2b2f2323241a191d1513 1c1b171f1e1a2a292533322e2928241514100c0b07100f0b301818422829563c3d604244 493735402e2c3826243523213a2826402e2c42302e42302e463838402d293b23194e3729 816f63b6b1adcfd7dacfe0eabdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3 bcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbfc3c2bec2c1bcc0bfbabebd b7bbbab5b9b8b3b7b6b2b6b5b8b9b4b7b8b3b6b7b2b4b5b0b2b3aeb1b2adb0b1acafb0ab acad9da9a79ba39e98a69990b39e83c9a873e0b664f0be5be0c260e0c260e0c461dfc66c dfc882dbca9fd9cbbed7ccd0e4c8bcd9c1a5d7c69bd1caa0c8c6add2cfbee7d9beebd4aa b66337a24c33a95048a9504c9e4734bf6b46d88656bd6c37b46739ac5e3aa15033b76444 bf6d3eb25f2bb76132b45c369b4b289b41369b4433b6774bd3b788aaa79649564f071a07 0a080998989ae9e9ebe1e4e9e0e5ebd5dce4c2cbd4c9d2dbd3cebbcfcab7aea996a49f8c c1bca9c5c0adb8b3a0c2bdaabdafa4ad9b8db49d8fc2a899b9a294b5a598d4cdc3f8f8ee 2b3a352a362cb3b3a7b39b9177423c8c42417d2d2e90404391413892423995453a98493c 9c4d3e9f503fa1533fa3553fab5d46a6593f9f5333a3582ebc743ade994ef2af58f7b557 f1d188eccea8e6d2b9ddd4c3d5d3d6d3d5cad3d8b8d4d7c6d8c88dd8d6a5dece9de8b774 e7b961e1d275e4d47fefbe72ecc069e6b745e4ad48e6b25ee3bf5edec26fe4c17fecc86a d9c473e1bf66eac155ebca79e6d1b6e2d2a1e1c65fe4b146dd9d53a8653b79322c733039 7b3d3e763f386f39376d384056352e58352f5d36316236336737356c393870383973383a 7f3e3c773c38693a345c372f53342f5134305334325634335932336033366c353a77393e 803b3e823e3f81403e7e413e7a403e7a403e793f3d793f3d783e3c773d3b773d3b763c3a 713e3d6f3c3b6c3a39693937653936633a36643d38653e396c3a396c3a396c3a396c3a39 6c3a396c3a396c3a396c3a39693b3d683a3c683a3c67393b66383a653739653739653739 5e373c5c383c5b373b5636395335375134364e34354d33343e2b2d3e2b2d3e2b2d3e2b2d 3e2b2d3e2b2d3e2b2d3e2b2d372929372929372929372929372929372929372929372929 433032433032433032433032433032433032433032433032473231473231473231473231 473231473231473231473231483235452f32422e30402d2f402d2f39292a2f2121291b1b 1010101010101818182323231e1e1e0c0c0c0303030505051a0c0b291b1a3d2d2d493737 48363443312f3d2b293b29273e2c2a412f2d42302e42302e4e3936513c3748342d37241d 3a2b246257519e9791c7c4bfbdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3 bcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfc0c4c3bec2c1bcc0bfbabebd b7bbbab5b9b8b3b7b6b1b5b4b8b9b4b7b8b3b6b7b2b5b6b1b4b5b0b2b3aeb2b3aeb1b2ad aeaf9facaa9ea8a39dab9e95b49f84c4a36ed4aa58e0ae4be0c35fdfc568dcc97ad9cd91 d8d1a7d6d2b9d5d1c6d5d1cee8d0c4cfd0cacbd2c2d0c9acc5c6b6cbdddfdfe1cce6be80 a85237a34f2db5623aba673fa85432ad5839b7633fa9562cbb7436b46834a95b2dd17f4d df8e4dc7732dc36b2cb65a27b55233b5553f9d4c379c6b4da594766e6f5d2c2d2726211b 61605cbebebcf4f4f2dbdfe0c0c5c8c1c9ccc5ced3c4cdd2c3c4b4c8c9b9a9aa9a9c9d8d bbbcacc2c3b3c0c1b1d9dacaa9a8a3c7c5b9b7b09ea79c88beb29cbeb19ebdb1a3e2d8ce a6a9a0071508415645a5ab9d91766d7c413b7f2f289a433a8b3b348d3d3691413896463b 9b4c3f9f503fa35443a55743ac60409f513a9e5137b56b3cd48f41eaa848f6b35cfdb86d e7d8b1e3d3c3dbd5b1d5d6acd4d0c7d6cfbdd8d1a3dbcdb0e4c984dccd92d5cca3d7c7a6 ddc994e1d184e4d292e5cbaae5c57ce0b646e2aa39e6b044e5be49ddbf69dfc18de8cc8b dfb954e4b05ae9b547e8c55fe3cfaeddd0bfdbc782dbb852eaaf55ac6d3a79342d75333d 80444679423b6b383567343d52352d54362e5936306037336838366d3a39723a3b763a3c 743f396c3b345f393055342b4f322c51322f5634335b34355b33346234376e353b7a393f 813c3f853f418342408242407b413f7b413f7a403e793f3d783e3c773d3b763c3a753b39 6e3c3b6c3a39693937653835623935623b36623d37633e38693736693736693736693736 693736693736693736693736693b3d683a3c683a3c67393b66383a653739653739653739 5f383d5c383c5b373b57373a5436385235374e34354e34353c292b3c292b3c292b3c292b 3c292b3c292b3c292b3c292b382a2a382a2a382a2a382a2a392b2b392b2b392b2b392b2b 433032433032433032433032433032433032433032433032473231473231473231473231 4732314732314732314732314234343e30303c2c2d402d2f443133453133412b2e3b2528 0909090303030909091616161717170b0b0b010101010101060501100f0b211d1a2d2825 3e2c2a3e2c2a3e2c2a412f2d443230453331443230412f2d55372d462d28413131463d3e 3a31323126205644368b7462bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3bdcfd3 bcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfbcc9cfc0c4c3bfc3c2bdc1c0babebd b7bbbab4b8b7b2b6b5b1b5b4b8b9b4b7b8b3b7b8b3b6b7b2b4b5b0b3b4afb3b4afb2b3ae b0b1a1afada1aba6a0ada097b29d82bd9c67c89e4cd09e3be1c15adec670dacb90d5d2af d2d5c2d3d7c9d3d6c5d3d5bfe0d2b5c2dcd1c7e4d0d9d6abd0d1b1ccebe3d8e0b7daa53f ab5439ae5b25cb7d2bde903dcb7a3bb35e37af583aaf5938d08947c07639a95d21d3853d e89745d27d2ad27a33c66930bf4f3baa442ba156399b74577e775b666a5379726a9e878f b1b1a9bbbcb6babbb6bec3bfd3d9d7d6dfdecbd5d6c9d3d5cacec0cdd1c39ea29474786a 74786a64685a5a5e50777b6d97a39f9ba79db4c0acbdc3a7a8a88caea890c1b6a4bbada0 d8d0c343514000120075907fa39b90835148b96a5da043328837338a3a338e3e35944439 9a4b3ea05142a45544a65746ad623b984a339e503ac67d4ae5a247eaaa3ef3b059ffbe83 d8d7c2d5d2c9cfd6a2d0d794d5cfb9ddceb1e4d095e8cba1edce8ee3ca70d1d189c7dfd1 d0d9e0e0c6b5e2c6aed9d5d4d7d9b4d4c56adab13de7b031e7b82cddb756deb98ce8c79a e5b340e8a650e9ab3ce6bf4adfcca4daced0d7c69ad6bc5befb551ae70377a372f793745 864b4f7c473f683733602e374e362c53362e5837305f3833673a376e3b3a753a3c783c3e 6d3e36643c325a382e5034294e312b51322f5a33345e36375b33346234376e353b7a393f 833c408640428642418443417c42407b413f7a403e793f3d773d3b763c3a753b39753b39 6d3b3a6a3a38663936633734613834613a35623d37633e38673534673534673534673534 673534673534673534673534693b3d683a3c683a3c67393b66383a653739653739653739 5f383d5d393d5c383c57373a5436385235374f35364e34353a27293a27293a27293a2729 3a27293a27293a27293a2729382a2a392b2b392b2b392b2b3a2c2c3a2c2c3b2d2d3b2d2d 433032433032433032433032433032433032433032433032473231473231473231473231 4732314732314732314732313e3634392f2e372b2b3d2d2e4834364e35394c3136492c31 0604070100020100020f0d101513160c0a0d03010401000200020003080410120f1d1d1b 251b1a271d1c2c2221312726342a29362c2b372d2c372d2c3b31303a302f392f2e372d2c 352b2a33292832282731272653463d9d998ed6dcd2cfdcd5bacacac1cbd4cbceddcac7da b8c1c8b8c1c8b8c3c9b9c6ccb9c8cdb6c8ccb2c5c9aec3c6bcc4c6bcc4c6bbc3c5bac2c4 bac2c4b9c1c3b8c0c2b8c0c2b8bcbbb7bbbab6bab9b5b9b8b4b8b7b2b6b5b1b5b4b1b5b4 b5b1aeb3aea8b0aa9eaba58fa7a181a7a076a69e6fa69f6be3a157e4a651e5af4be5ba52 e0c268dacb8ad2cfacced2c4ced5b4d1d8afcacca4c5bea2d8c0b4e3beaec4956b956220 b86d2ab96f26ca8230e19a42e49d45d38b39c77d34c97e3adf933fca802bca7f2cda9142 d58d41ba742cb36e29c47f3cbc593a964c3385624fb6b6aa93a49a97a79d929286a89c8e c2b9a8ccc3b4c6beb3c8c1bbe2dcdce0dee3cac8d3d4d2dfbdbbaedfddd09e9c8f413f32 6b695c89877a424033282619291f16342d235a5b4d838776979b8aabac9edfd8ceb9afa6 c5c4b29e9c903236285a6753b2bfaec0baa4e3c484efbc49b88d3fa06d2e7d3c14833720 a55647a659479a5536a86842ab5a2dba6b44b16531d99542f7b756dd9d45e0a346ecb445 e9ce7de4d091dcd891d2d9a5d3d6c1dfdb91ead757eec365eed382e3c182e0c09ae1d4c3 d5e5dbc3e1d7c1d8ceccd7cfddd6d0cedf9bd2d65defbc47f9ae5de9b77cdcc285dec47a e3b347eebf55eec15ce6bb5ce5bd65eec875eec97be3bf73fdd170ac763a894739833b47 6f2836763a3c7d4a475d2d2b6535336636346937366c393871393a753a3c773b3d783c3e 7842426d3b3c613739452123502d335a36404c2132683c4d5d2b20773a358e4244943b41 95393c99413f95473d8944346b3d3d6b3d3d6b3d3d6a3c3c693b3b673939653737643636 5434355131324f31315034335139374f3a37483531422f2b34282c372b2f3d2e33423137 48333a4b343c4d333c4d333c57373a59393c5d3a3e603c40623b4063383f63363d61343b 61363d5f363c5a363a5434374c323346313042302e3f302d352727352727352727352727 352727352727352727352727352929372b2b392d2d3a2e2e3a2e2e392d2d372b2b352929 3a2c2c3c2e2e3f31314133334133333f31313c2e2e3a2c2c4a2d324b2e334e31364f3237 5033384f32374d30354b2e335e3d445d3e445a3f44553f424f3c3e463a3a4036353c3432 070904000100000200060d06050c05000200000100040601000000000000010101020202 261c1b281e1d2c2221302625322827332928332928332928382e2d372d2c372d2c362c2b 352b2a342a29342a2933292858453e4e4139746f69c0c2bfdde7e8c3ced4b0bdc6bdc9d5 c1c8d0bcc5ccb9c2c9b9c6ccbecdd2bdcfd3b6c9cdb0c3c7bac2c4bac2c4b9c1c3b9c1c3 b8c0c2b7bfc1b7bfc1b6bec0b6bab9b6bab9b5b9b8b5b9b8b4b8b7b3b7b6b3b7b6b3b7b6 b5b1aeb2afaaaeaa9faba595a8a089a79e7fa79d7aa79e77c29157ca9350d79a49e0a348 e4b156e2be70dbcb8dd6d39ed0c196dad0addcd6bed8d1bee1cfb7e7c0a1c88c689e542f c37835c87e35d8903ce9a246efa949e9a343e29b3fe19a40c77738bf6f30bc6e2ec37533 ca7d37d0873adc9440eaa34bbd6245ba775db49380646458b2c2b8707d738b8d80aea596 bbb2a1cdc6b6d4cdc3cec9c3c0bcbbc1bfc4d2d1d9d0ced9c0beb1bcbaad9896895b594c 7c7a6dc6c4b7afada0817f7290867c898377acad9f9ea2937d8172a5a698bab4a8c0b6ac b0aa94ada799606151707c66abb8a7b7b4a3ddc38aebbf54f2c570e2ad67b97949944a2d 893c288a3f2a914c2f9d5d39a85630af5f3eaa5e2cda9643ffbf5debac4fe4a846e5ae3a f2c371e5c57ce0d190e2daacdbcca5d3b965d9b442eab859e7b64fecc675ecdaa8dce5d0 c2ddd8b6d3cfc7d6cfe0e3d8d8d7dfced3cdd2cda3e7c472f0ba5ae7b763e0c06be4cc6a e7b85cecbf62edc063e8bd5fe7be62ecc367ecc568e7c063c794528b5227783626853f3f 7f3b3a7b3e397842406433366636346737356937366c393871393a753a3c773b3d783c3e 653133633535653f3e5535365f404564434a4e283562384668332b7c3d388e4040943a3c 963a3b9c444096493f8a4737643a3b643a3b643a3b643a3b63393a623839613738613738 5537375234344e32314c32314b3633483531402e2a3927232822242c232630252935292d 3b2c313e2d33422f354330365335375636395b383c5e3a3e613a3f62393f62373e63363d 60373d5d363b5835395234364c3233453130412f2d3e2f2c352727352727352727352727 352727352727352727352727352929362a2a382c2c3a2e2e3a2e2e382c2c362a2a352929 3b2d2d3c2e2e3d2f2f3e30303e30303d2f2f3c2e2e3b2d2d4b2e334d30354f3237513439 52353a5134394f32374e31365a394058393f53383d4c3639433334392d2d3129272b2623 11130e080a050204000204000305000507020c0e09161813000000000000010101020202 281e1d2a201f2c22212f25243026252f25242e24232d2322332928332928332928342a29 352b2a352b2a362c2b362c2b422f2934211b3e2f2c756b6ab1afb2c8d2d4bfd2d6b1cace b7bbc4bec5cdc4cdd4bec9cfb2bfc5aebdc2b6c8ccc2d4d8b8c0c2b7bfc1b7bfc1b6bec0 b5bdbfb4bcbeb4bcbeb4bcbeb3b7b6b3b7b6b3b7b6b4b8b7b5b9b8b5b9b8b6bab9b6bab9 b4b3afb1b0abaeaba4aaa69ba9a196a79e8fa89c8ca89d8ba68d6eb18c62c28b53d28f48 dc9a46dea84edab75bd5c063ddb76ee9cb97ead7b6ded1aedbc792ddb476cf8956b95c3b c97e3ad48a3fde9642e49e3feba641f1ac44f0ab43eaa53dbc6e2ec27237be6d36b86531 c06e36d78442e69448e99743a75a40ca907aa18675525044a1aea458645a56584ba9a395 b1a99ccdc7bbddd6ccdbd8d3acaba9a7a7a9dbdce1d0d0d8c7c5b8c2c0b399978a3d3b2e 3b392c8e8c7fa09e918a887bb1a79bd0cabececec2b5b9ab575b4d79796da7a195cac0b4 a09275bdb19b98927c91967fa7b3a5b1b1a7d3c193e3c265f0c463f9c673efb375c07b52 91472a8f452ca0593da25d3cab573ba5533ba15426d08c37f9ba53efb04fe7ac46e6af38 f0a853dcaf54dcc488efd6aeecc580d7a141d99d3bf1b754f7bd52f1c969e2d181d1d098 cad0aecdd6c5d0dbd3ceddd8d0dadbd0cbebd5c9dfddd1a7e2c867e4b54de7b957ebd069 ecc172eac06eebbe69ebc064ebbf5eeabd56e9bd52edbe52a06a3e8548297b38258a4438 914d4082433a723a396d3a416838366838366a38376d3a3971393a753a3c773b3d783c3e 6e3e3e663c3d5e3c3a4a2e2b4b313251343848282d5a373e733c39803f3b8e3f3b963d37 9d433b9f4b41954b4088463a5b34355b34355b34355b34355c35365d36375e37385e3738 543a3b4f353649313145302f432f2e3e2c2a3324212a1c191618171818181b191a211d1e 2620222c232631262a34282c4e31335133355535385b383c5e3a3e613a3f61383e62373e 5d363b5a363a57343850333549313142302e3e2f2c3c2e2b352727352727352727352727 352727352727352727352727362828382a2a3a2c2c3b2d2d3b2d2d3a2c2c382a2a362828 3d2f2f3c2e2e3b2d2d3a2c2c3a2c2c3b2d2d3c2e2e3d2f2f4b30354c31364f343951363b 52373c52373c50353a4f343954353b5134394a3135402c2e332525271d1c1c171416120f 25262124231f17161208040107030013120e1e1d191e1f1a010101010101020202020202 2a201f2b21202d23222e24232e24232c22212a201f281e1d2e24232f2524302625322827 342a29362c2b372d2c382e2d332a21493c364533313521235546499c9a9dc5cfd0c0d3d1 bcbfc8bfc2cbbec2cbb5bec5adb8beadbac0b6c5cac0cfd4b6bec0b5bdbfb5bdbfb4bcbe b3bbbdb2babcb2babcb1b9bbb1b5b4b1b5b4b2b6b5b4b8b7b5b9b8b6bab9b7bbbab8bcbb b3b4aeb0b1acadaca8aaa6a3a7a29fa89e9da99d9da99d9da49d93aa9987b19273bc8e5d c38e4ac9923fcd983ace9c39eabb53efc66ce7c377cfaf64c39d4aca9640d38c3cd27e38 cb8136d78e41d9913fd18934d38c32de973bdf983cd58e32c97d29cc7e33c9773bbf6c3a be673ac26a3cc36934bd642aa56b55ab7e6985705f8886798d978c576156414439938f83 90897fa9a59aaeaba2bebdb89d9d9b818382a8acada3a7aac8c6b9dcdacd959386131104 19170a6f6d60878578939184b6ad9edad4c6737367989b925d60577d7d71c0baaca79e8f a49472bead93b1a58ba7aa8fb1bbb0b7b9b6c7ba98d8c173ddae42e9b756fbc275e0a269 aa653c9e5836b06949af6848ae5945a04d3ba25529c7842de4a53be1a33ce2a83deab43a f6a647e1b246ddc484f0d1a2f6c662e7a336e09a44e5af45f4bd6aefc360e3bb4cd6ac3e deb05ae7c892d7d9c1bddcd4cbddc7d3cfced5d2dbd3dfcbd7d187e3b249eeb34df0ca79 edc774e9c16ce7be66ebbe61ecbc5ae7b64feab54befbb4ead743fae7145975437894230 8f494186454373383e733e466a3a386a3a386c3a396e3b3a723a3b753a3c773b3d773b3d 5e3032572f2f4b2c293c231f3b231f432a264e30305f3d3e783c3e823e3d924239a1493b aa5141a452448f493d7d3e354e30304e30304d2f2f4e30304f3131513333543636553737 503a3c4a3436422e2f3e2c2c382a292f24222319171b110f040d0a060c0a080c0b0d0f0e 1313131b191a242021282425472d2e4c2f315133355636395b383c5e3a3e60393e60393e 5a363a5835395234364c32334631303f302d3c2e2b382d29352727352727352727352727 3527273527273527273527273828293a2a2b3c2c2d3d2d2e3d2d2e3c2c2d3a2a2b382829 3e30303c2e2e382a2a362828362828382a2a3c2e2e3e30304930344b32364e353951383c 52393d52393d51383c50373b52353a4d3237442e31382527281c1c1a12100d0906050400 31302c3e3a3738302e2014141a0e0e261e1c24201d13120e020202020202020202020202 2b21202c22212e24232e24232d23222b2120281e1d271d1c2b21202c22212e2423302625 322827352b2a372d2c382e2d3a3c31413d344436333c2628331d20483839847b7cbdb9b8 d4d4dec5c5cfb4b7c0b3b7c0bac1c9bdc8ceb5c2c8acb9bfb5bdbfb4bcbeb4bcbeb3bbbd b2babcb1b9bbb1b9bbb1b9bbb1b5b4b1b5b4b2b6b5b4b8b7b5b9b8b6bab9b7bbbab8bcbb b2b4afb0b2adadadaba9a7a8a8a3a7a8a1a8aaa0a8ab9fa9a9abaaa6a7a1a3a192a49b7e aa9266b88b50c5853dce8234e0ae4de4b148dba53bc68a2abb7827c17c2ece8b32d8952d d48c38db9243d48b3ec37932be732fc67b38c67b3abc7130cb7e2ec3752bc07031c26f3b be6a3bb56230b25f29b6642ac39d8a8e6f5d9e8e7fafada0868c80636b6076796e75776a a4a19ab1aea7a09f9ab7b8b2b4b6b18186827c8280858e8bafada0bdbbae89877a1e1c0f 2523167d7b6e939184908e81b2aa97b4ae9e3131276c6e6961635e94948accc6b6aaa28f b19f7bae9b7db1a187b4b397c2cabfc3c7cabab29bccba78f4c251e8b54cf2bc62e7af65 b87a419f5c2fa96139ab613ca34d3e9a4735b26537d08d32da9c2dd4962fd39830e3ac37 ffb74eefc654e1cb82e7cc89f4c54ff5b342e5a34fd1a236d28d3fe6a746efb33be7a324 e19a28e5ae54e2cd8cd8e0afccd6bdd5d7a8d9dfb9d0e2d2d4d0a1e3b74ceeb33deec16c ebc363e8c05ee7bb5ae5b755e7b554e9b351ebb352efb554cb9441dca05ab470437f3421 7d3334843e49783b43773e446c3c3a6c3c3a6d3b3a6f3c3b733b3c753a3c763a3c773b3d 5f2e325e34355b3836553935493029452a234f302b522f2b75343c81393a9a483ab15941 b76146a5574186443670352f452d2d442c2c422a2a422a2a432b2b462e2e4a32324c3434 483537403031382a2a3125252d2322251d1b1813100e0a07000703000602000401000503 080a091313131e1c1d242223442c2c462e2e4b313253353757373a5a373b5b373b5b373b 5734385434375033354a323242302e3c2e2b372e29352e28352727352727352727352727 3527273527273527273527273e2a2c3f2b2d412d2f432f31432f31412d2f3f2b2d3e2a2c 3c2e2e3a2c2c3729293527273527273729293a2c2c3c2e2e432d30452f324933364c3639 4e383b4e383b4d373a4d373a4e3539493336412d2f332525241a19140f0c060501000100 22211d4038364d3d3e3f2b2d3b27294030312e26240b0a06030303030303030303030303 2c22212d23222f25242f25242f25242d23222b2120291f1e2a201f2b21202d23222f2524 312726332928342a29352b2a2f2f27332e284136324a38363823222b1313472f2f715c5b aba9b4bcbac5cbcbd5cacdd6bcc0c9b0b7bfb1bac1b7c2c8b5bdbfb5bdbfb4bcbeb4bcbe b3bbbdb2babcb2babcb1b9bbb3b7b6b3b7b6b3b7b6b4b8b7b5b9b8b5b9b8b6bab9b6bab9 b0b6b2aeb3afacaeadaaaaaaa8a6a9a9a4a8aaa3aaada3aba7a9a4a1aaa59ca9a29aa499 a39b86b3906ac88551d47e41cb8f49d2923bd68d2ed2822dcd7a38cd7f41ce8b3ace952c e09941df9743d58b42c67b3abe7139be713bbd703cb96c36c16b38b6632db5652abe7332 c57c37c68237d18f41de9f4fb5a08fa08f7f988f80b2aea2535548888b808c92866c7266 deddd8dad9d5c5c6c1c1c3bec1c6c0939d95737d758a958d7b796c7775689a988b6f6d60 38362968665993918489877ab4ac95686252101008383a376b6d6ac1c1b9beb8a8b7af98 a3967397866ab4a588bdb89bcacdc4cecfd4b3ab98c4b177efbb4be3b045ebb956f3bf69 daa258b67639a35c289e51239a443395432ec37745e4a242e2a433d79934ce9230dba336 eeb240e4bb52d9bd69dabd5fedc049fcbf55f5b760e3b24bd07918d68824d99830dca139 e4aa46edb652edbb5ae8bc5bd1cab8d9d9a5dae4b0d5ddc5d9cca0e6c055ebb934e5b546 e2b649eabb51e7b850ddac46dfaa4aebb658f0b95eeab359dea943ecb157c07c3b833819 7b2d2b843c477d3f447a403e6e3e3c6e3e3c6f3d3c703d3c733b3c753a3c763a3c763a3c 713a40703d42653b3c64413d55372f59382f7b564d7e564e732c3c823638a14e3cbd6746 be6a45a156397d3d3167302d3c2c2c3a2a2a3727273525253525253828283c2c2c3f2f2f 392d3133272b2c2125281f22241e201a1819100e0f070707000502000300000200000100 060606131112221e1f2a2627412c2b442f2e4931314e3435533537553538563337563337 5434375133354c32334631303f302d392e2a342d27312c26352727352727352727352727 352727352727352727352727432d30442e31463033473134473134463033442e31432d30 382a2a382a2a362828352727352727362828382a2a382a2a3b282a3d2a2c412e30443133 4734364835374734364734364832354430323d2a2c2e22222119170f0e0a040500000100 0905022f25244e383b54373c573a3f5741443c3231181411050505040404040404030303 2c22212d23222f25243127263127263026252e24232d23222b21202c22212d23222e2423 2f2524302625312726322827321e1d3e2c2a4233303b2e28392a253e2b253d2420361a17 4d4a55807d88b8b6c1ccccd6bcbfc8aab1b9b1b8c0bfc8cfb7bfc1b7bfc1b6bec0b5bdbf b4bcbeb4bcbeb3bbbdb3bbbdb6bab9b6bab9b5b9b8b5b9b8b4b8b7b3b7b6b3b7b6b3b7b6 aeb7b4aeb4b2abafaeaaaca9a8a8a6a9a8a6aba7a6ada7a7a4a796a2a89ea2a8a6a6a4a5 af9c95bb9276c88654cf7e3fc87a3acc7d38d3853ade9143e5994be49c4adb983fd19233 df983ed68e3ace8439cb803fc67a3ec07436c17635c77c38c36b3dc8763ecd803acd8b35 d69b3fe3b055edbd68edc06f766e61b4ac9f847e708e8a7e58584c9c9f947c8276979f94 dededcd1d1cfd3d5d09ea39d7c837c79837a68746a87948a7b796c585649a09e91bfbdb0 8482757b796c9b998cafada0ccc5ab585340191911414244a1a2a4dcdcd4b1ac999b947a 777151867a62c9bba0c7c0a4c3c3b9d0cdd4b2a595bfaa73d69e31e2ad43edbc56f8c768 fac672dda158b97433a95d23a9543f97462bc77c43e7a642e8ab38e8aa47dda047e8ae4b e5ae3adaa653d7aa4ddbb23cdfae45e3a74feeaf52fab95de8a53dd48f30c7852fd09746 e0b05ce7b65ae0a642d9962dd8c195dcd9badde1c8ded1aee5c789eac66ee5bb4fd9a630 d9a339ebb44de7b24ad49f39d6a23fedbb5af4c263e4b354f2b95fedb055cf8c3bac642a 9a50358d464080413c7b41366f3f3d6f3f3d703e3d713e3d733b3c753a3c763a3c763a3c 6c2d386a2e365c2a2d6138345c352e744c42b4887dc2928672293c853639a6513cc06b42 bc693d99502f77392c683232392d2d3529293024242c20202c20202e2222332727362a2a 2d2226281f22211b1d1f1b1c1c1a1b1717170d0f0e040807020605000302000100000000 0a06071913152a2124342b2e422e2d44302f4732314c34345033355032345030334f2f32 5234364f32344a323244302f3d2f2c372e29312c262f2c25352727352727352727352727 352727352727352727352727482f334a31354c33374d34384d34384c33374a3135482f33 3426263527273628283729293729293628283527273426263421233623253a27293e2b2d 412e30422f31422f31422f313f2b2d3b282a342425281e1d1b16130d0c08020400000200 030000251717482d325a373e623f465e4348463838282421050505050505040404030303 2b21202d23223026253228273329283329283127263026252c22212c22212d23222d2322 2e24232f25242f2524302625552d354527293622213227213c332a4538304430293e2520 130d1936333e706e79aaaab4cccfd8cfd3dcb9c0c8a6adb5b8c0c2b8c0c2b7bfc1b6bec0 b5bdbfb5bdbfb4bcbeb4bcbeb8bcbbb7bbbab6bab9b5b9b8b4b8b7b2b6b5b1b5b4b1b5b4 aeb7b6acb5b2aab0aeaaaca9a9aaa5aaa9a4aca9a2adaaa3a6a890aaa89bb0a6a7b6a1a8 bc9995c08d72c3824ac37b2fd37829d07932d38642e19f4befb547f3b840eba93be09739 d48d33c77f2ac37a2dca8039ca8039c2782dc57d29d28b31d37b3ced9b51f5ae52e9ab44 eab651f7cb72eec881d5b1754c4c40adab9f9995895a564ab8b6a98d8f827f857bd3ddd2 d2d2d2c5c5c3e2e4e1868d86404a4169756b77857898a699bbb9ac626053767467c1bfb2 cccabd9a988b79776a9c9a8db7b094a49f8c95948fafb0b2bbbcbe8a8984a09b88b0a98d 4d4c30817660e2d4bacec7abbab7aed0c9d0b2a293bda670daa138f4bf55efc058e5b853 f4c366f0b664d58f47ca7c3ac36f559d4c2ec07639db9a34e4a734f5b655f2b45fffc468 fac34febaa68e5a949e5b02ed0973dba7a30cc8728f1a350ead073e0b159dc9847dd9445 d09642c0903bc3913cd29747dfc16bddd8c5dedce1e4c99aebc475ecca8be0b976d29834 d49534ebaf4de8ae4bcd9632d09c37f0c05af6ca61e0b54cffc981efb15ee1a044da9547 c179479a543b8041327b413370403e70403e713f3e713e3d743c3d753a3c763a3c763a3c 8039477d3e47713a3f7646446437316c3d33a16f64a26e61752a3f87373aa7533bbf6b3d b66635914b2774392b6a3638372e2f332a2b2d2425281f20261d1e2920212d2425302728 261d22201a1e1b161a19171a19191b1516180c1011040a0a080808040404010000040001 0d07091e15182f24283b2f33432f2e44302f4833324b33334c32334d30324c2e304c2c2f 5033354d333449313142302e3c2e2b362d28312c262e2b24352727352727352727352727 3527273527273527273527274b32364c33374e353950373b50373b4e35394c33374b3236 312323342626372929392b2b392b2b3729293426263123232e1e1f30202134242539292a 3c2c2d3d2d2e3d2d2e3d2d2e3723253421232d1f1f2319181713100a0b06000400000300 09040124161645282d59313a6139425a3d424436362f2a27060606050505050505030303 3d31313a2e2e362b293426253425223526233829243c2a262b26232b26232d25232d2322 2c22212b1f1f2b1d1d2a1c1c2f2b282f2b282f2b282f2b282f2b282f2b282f2b282f2b28 3926282c1c1d2d1f1f4e4443867e7cb4b0adc8c7c3c9cac5abbfbeb5c3c4b5b8bdaca7ae aca7aeb8b7bdb9c1c4b0bbbdb2b6b5b2b6b5b3b7b6b4b8b7b5b9b8b5b9b8b6bab9b6bab9 b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1a9ab9ea9aaa2ada99db2a885 bca464c69c48d0923bd68b3ad18434d78d36e09a38e8a83ef0b243f1b847f1b94ceeb84c c97431c97b33c98638cb913dcf983dd39938d49231d68e2ce1b02ff8c760f5bf69ebb14e dea62ffad467f2e5ae727972938e88bdb8b288857eb2b2aaa1a49b7a8076ced6cba7afa4 9a9fa3676c70757a7e667072707e7f80929268787896a6a6d4bea9917d65d5c5abafa58a 8f8a74a6a7994f514c696d70555b51adb3a7c3c5b87677672a271498927cd4ccb5ada38a 3f352c9fa792d7dcc8d5cac8d6d0dcb9c2bfada275e8b967f9b842fdbf4ef0c43ddbc355 ddce97ebd98de3b955c87f48a35c4a8b482bad713be1b25aeac553e7c758e9c974e0be81 dbb840e9ce8bf2d9c5f1ceaeedb966e89b33d37327ba5131cd7827ea973be79446c06a37 a85720b97721d39e40daac5fd7c36dd9d4acd1dfeac5dffac9dddcdbd5a3e9be60e8a532 db9056d38f2cd89628d28d40c38147d19851eebe72f9cc91f8cd96e0b75bcf9f33e0a84b eaab5eb7763282401d843d437a434077403d743d3a713a37703936713a37743d3a763f3c 6c3b3e6c3b3e6d3b3e6c3a3d6c383c6e373c6d363b6e353b7a3d3a87423d9d4d42b35a4c bc614fac5a458f4834773924282a251f201b24201d35302d423636403232412e30463234 2020201d1d1d1717171010100a0a0a0606060303030202020608070808080c0c0c151112 1e181a281f2231262a352a2e3f31313f3131403232413333433535443636453737453737 3c32313a302f372d2c352b2a342a29342a29352b2a362c2b312323312323312323312323 3224243729293d2f2f423434473231473231473231473231473231473231473231473231 4a3031442a2b452b2c4b3132472d2e391f20321819361c1d22150f291c16382926483636 4d3a3c4b363d49323a4a333d2828282424241c1c1c1313130b0b0b050505020202000000 0f14100c0805362827593f405230315d39396744424c2b262d1a1c2b181a2515161e1212 4a38344b39354937333e2f2c3425222f1f1f3123223527262824212925222a25222a2522 2c24222b23212a22202b21202b27242b27242b27242b27242b27242b27242b27242b2724 3a2e2e352929302424312726453d3b6f6a67a49f9ccac5c2bfcaccacb6b8b0b3b8c4c5ca c3c2c8abacb1a4a9adb3bbbeb1b5b4b2b6b5b2b6b5b3b7b6b4b8b7b5b9b8b5b9b8b6bab9 b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1adafa4aeaea6b3ad9fb9ac89 c1a866cba14dd4963fd9913fd58f37de993ceca843f4b44af7b94cf1b449e9ab46e2a342 c97048cd7b43d38b39d7962ed99e2adca030e0a03ce2a145efc348ecbc56f1be65f7c05c e2ad39d9b350d2c694a3a9a7c0bbb5b6b1aba8a59e97978fafb2a9d3d9cfdde5daeef6eb e0dfe5a8adb1ccdcdcd4e6e6b8c8c8bccacbb2c2c2b2c4c4b0a498958979dbd3bec0baa2 afa9918f8a773b372c5d5852636557545547595a4a3938268d8a79b4b19eb9b4a05d5844 645a50b4b9a5c9cdbccac0bfd3cbd6b7bab1b8aa79eabb69e8ad45f1bc4ce8bf49d7bd5c e3d08bf9e08ee9b663c072428f43368f483690542fb98845efc362ebbf52d7a640e6af55 e3c191e0d0b7d8d9d3d2d6c5d9d099e5c56ee3b150d79a3db96d2fc67b2ac77a28bb6b2e bf7538d69843e5b352e6b860d8c9a8dadac0d1e4dec4dff2c6d6f0d4cdc7dcbd7cdcac3d d68e2ccf852ed78d38d8923acc8e37d8a459edc484f2d093f2c582f7c86ce1ad47d2973b d89a47c4893d93552e7332367c42407a403e773d3b743a38733937743a38763c3a783e3c 6d3c3f6d3c3f6c3b3e6c3a3d6b393c6b373b6a363a6a363a7d3c38873e37964439a64d3f ac5343a051428a4737773d2f2c2d2821201c221e1b342c2a413736423434403031443133 2323231f1f1f1818181010100b0b0b0808080707070707071113121313131515151c1819 231d1f2d242734292d382d313d2f2f3d2f2f3e30303f3131403232423434423434433535 382e2d362c2b342a29322827322827332928352b2a362c2b3325252d1f1f291b1b2e2020 382a2a3f31313e3030392b2b463130463130463130463130463130463130463130463130 4a3031462c2d4c3233563c3d53393a442a2b3a20213b21223627223f302b4637344a3838 4b383a4c373e473239412c352222221e1e1e171717101010090909050505020202010101 0308040300002c1e1d53393a563435654141704d4b573631402d2f3e2b2d39292a352727 775b5063483f4c332c3e29243b2927382a2a2f2325261b1f25211e23221e24231f24231f 25242026252127262228272326221f26221f26221f26221f26221f26221f26221f26221f 241f1c2e2926322d2a2d25232a201f3e3232685c5c8b7f7fcac8cdc1c2c6b3b8bbabb3b5 afb7b9b6bbbeb3b4b8a8a7acb0b4b3b1b5b4b1b5b4b2b6b5b3b7b6b4b8b7b4b8b7b4b8b7 b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b0b3acb2b2a8b8b19ec0ae86 c8ab69d1a250d89a43dc9440dd9e3de4a643ecaf45f4b54bf4b44aefab48e5a045df9943 c06d29cb7635d88340dc8c43dd933ede9f38e3af39eaba3ae7c154e7bd59f0bf62eab54f f1be55edca76b3a5807b7f80a39e98aba6a09a9790acaca4c2c5bca6aca28b9388848c81 b6abb3bac2c4cbe4e0daf5f0e2f2f1d3dbddc6d4d4e0f6f387847f9a978ed8d5c4c0bba5 c0baa2857e6b786f60aea49a98937f5e5945383522827f6edbdac8dcddcd7f80723c3e30 8f877cced1bebcc0b2c0babcccc6cab1ac96c9b578eebf6dddaf59eac157e6c166dbbe72 e8cf75f8d27dde9d65b05b36843821904830965633b77c44e5ad58e8ae43e2a12df9b23c e5c18fe2d4a5d6e5becae4cbccdacbdbd5bde7d19febcd84d8a07bd49955cf8f38cd883b d3904adea452edb755f4c05ad1c9b6d2d3c3cfdbd7c6d9eac8d5f5d2d4e1dacfb3dbc788 d59519cd8435d78a46dc9931d59c29deae5cedc68beacd89daa754f8c365e8b04fc5882d ce9335dca552b27c50763e3f7d41407c403f793d3c773b3a763a39773b3a783c3b793d3c 6f3d406f3d406c3b3e6b3a3d69383b6736396635386534378a433d8e4139963f359b4136 9a43398e443b7c40386e3c3534302d26221f221d1a2f27253e34334236364032323d2f2f 2727272121211919191111110c0c0c0b0b0b0d0d0d0e0e0e1d1f1e1e1e1e1f1f1f242021 29232530272a352a2e382d313b2d2d3b2d2d3c2e2e3c2e2e3d2f2f3e30303e30303f3131 352b2a342a293127262f25242e24232f2524312726322827322424291b1b2416162d1f1f 3d2f2f4436363d2f2f33252545302f45302f45302f45302f45302f45302f45302f45302f 4c302f4d3130593d3c694d4c684c4b563a39492d2c472b2a503e3a5b49455947454a3838 4532344b383e443137331f281717171414140f0f0f0a0a0a060606040404030303030303 0003000300002517164e34355a38396a4646724f4d5a39344d393b4b3739473436423233 8d68587c584a6342394d322b3e29263626273125292f242c221e1b221e1b1f1e1a1f1e1a 1f201b20221d212620222721211d1a211d1a211d1a211d1a211d1a211d1a211d1a211d1a 1a1b1622211d2b2724332b29342a29382a2a3c2c2d412e30998a91b2acb0bbbfc0abb5b4 a4b0aeb1bab9b8b8baafaaaeafb3b2afb3b2b0b4b3b0b4b3b1b5b4b2b6b5b3b7b6b3b7b6 afafafafafafafafafafafafafafafafafafafafafafafafafb1aeb1b1a5bab095c2ad7e cba864d4a150d99942da943edea742e2a942e4aa3fe8a93fe6a53fe59e42e29744e09247 b56712c47324d48038da8743d78d44d59c45deb54ce7c756e2c46ce1bc61ecbe5ce0ac46 f4c668eacb888b8064606163b3aea8c2bdb7aba8a1d1d1c9a9aca350564c626a5f575f54 2a1b224349495d787199b2acccd6d5d1d2d4d4dedddcf5ef7c7771b0aca1dedbcab2b09b 9b99846c6958b9b5aae5e0dadcd2b9a59d868b856fe4e1cedbdccca2a4972f3529151b11 9d978bdddeccbdc0b7c3c0c7c7bfbdaa9d73d7bb71f3c671e2c17cebce70edce8bebc98c e5c25adaa650bf734fa74f2bad672cb0682ed3894ce69c55d48c37da972ff3b442f5b941 e2b45eeccc77efe198e4e2b2d6d0c0d2c5bfd9c6b7e4ccb0f5d9c1e9c589e1af56d9a046 d19346cc8e41df9e40f8b447ddce8bdcd0aad7d2ced1d4ddcfdadccfded9cfddddcfd9e2 dcae4cd29b48d99942dc9c2ed7992ae0a94febbd70e4c06ad7a13fedb556e9ac53d19437 d29a39d9a552b07f547b48457e41407e41407c3f3e7b3e3d7a3d3c7a3d3c7a3d3c7a3d3c 713f42703e416d3c3f6a393c66383a6236375e34355d3334934a439647409b443b9a4139 923f39813c376e38386236373b33312d2523241c1a2a2220382e2d3e34333b3130352b2a 2828282222221a1a1a1212120e0e0e0f0f0f121212151515222423222222222222252122 2822242e252832272b352a2e392b2b392b2b392b2b3a2c2c3a2c2c3a2c2c3a2c2c3a2c2c 382e2d352b2a3127262d23222a201f291f1e291f1e2a201f2b1d1d291b1b2b1d1d332525 3d2f2f4032323b2d2d352727442f2e442f2e442f2e442f2e442f2e442f2e442f2e442f2e 4f312f533533644644795b597a5c5a6749475739375436345f4a476b5653604c4b453333 3e2b2d47363c3e2f3425161d0d0d0d0b0b0b080808050505030303020202030303040404 0308040501002719184c32335c3a3b6945456b484655342f483235473134432f31402d2f 784e3e835c4d86625670524a4e35313622233626293f30352b1f1f281e1d241c1a1e1a17 1c1b171b1d181c211b1d241d201c19201c19201c19201c19201c19201c19201c19201c19 2c2d2827262225211e2a2220322827382a2a3424252f1c1e493439635759979694c1cac5 c0cbc5a6afaaa8aaa7c1bdbcadb1b0adb1b0aeb2b1afb3b2b0b4b3b0b4b3b1b5b4b1b5b4 aeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaea9adaeb0aea1b9ac8ac3aa72 cba35dd39c4cd5953ed79237daa33fdfa541e3a840e5a63fe09f3bd99238d08534ca7c32 bc7233cb8235da9434de9e34d8a23ed6ac60dabe8ee2cdb0eed89fd7b76ae5bb57f0c159 f1c773b99d6b786d599f9e9ab4afa9b6b1abc4c1baaeaea64a4d443c42389ba398b3bbb0 8b7f811c1c1a0c17113c45404f4e4ca4a09ff1f7f3bcd1c86a5c4fb1a595d1cab79d9b86 5f62515c6258e0e9e8e0e9eecec4ab9f9780d3cdb7d8d5c2a6a797282a1d0c1206000300 848074d9d8c6cdcfcacdcfdbc4bbb2aa9458e0bd63f5cc74e3cd93e6d386f2d7a2f6cf98 daad48b87727ae5c37b96634dba03ce19d38fbad49fca846dd8c30db9a3ce9bb5bdfbd59 e8b969eab96deebe76eec47ce5c77edbc780d4c687d3c890e3dab9d3ba81cea14cd49d40 d29747c98a43d08834e49127f2cc5fedca78e7cc97e1d3b0d6d9baccdbc8c3dae0c0daf5 e8cfa6dbbd63dbb138dda036d48c37dd983be8ad45e2af46f1bc54e9b154ecaf56efb353 dba346b57e3b8b5938713f368040408141418141418040407f3f3f7e3e3e7d3d3d7c3c3c 744044713f426e3d40683a3c6137385d3535573432563331863f3d90423e9a45429e4642 954441844041713d41653a413f3534342a29281e1d251b1a2d2523352d2b332b292d2523 2626262121211b1b1b1515151212121313131515151818181d1f1e1d1d1d1c1c1c201c1d 231d1f281f222d22262f2428392b2b392b2b382a2a382a2a382a2a382a2a382a2a372929 3b3130372d2c3228272c2221281e1d251b1a241a19231918271919302222392b2b3b2d2d 382a2a352727382a2a3d2f2f45302f45302f45302f45302f45302f45302f45302f45302f 553633593a376b4c4981625f83646171524f6243405f403d654b4a6c54525c4746402c2d 382829423338362a2e1b1016060606040404020202000000000000010101030303050505 0207030c08052b1d1c4d33345e3c3d684444684543593833482d32452c30402a2d3c282a 73493b754d417c564d7f5d547a5b56674b484d3334392121402a2d3b2729312122291d1d 231b191d1c181e1f1a1e201b211d1a211d1a211d1a211d1a211d1a211d1a211d1a211d1a 2d28252e29262f2a27312927312726322626322626332727311d1f312122473f3d7a7975 adb2acc4c9c3bbc0baacb1abacb0afacb0afacb0afadb1b0aeb2b1afb3b2afb3b2b0b4b3 aeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaeaea8abb2afad9ebaab82c6a86a cfa05ad39a4bd4943dd49234d6963edd9e41e5a645e9a945e3a140d38e33c07a25b36c1e d69940dea54be6b358e6bd6bdfc181dac8a0dbd4c4dfdddee5d5bedcc285e6be5cf4c760 f4d084c4ae85948c79a9a79a39342e8a857fb1aea7a6a69e33362d3c4238c5cdc2939b90 757671716967a39092867073230f112f27258c918bbcc7bf857566bcaf9eada6937f7e6a 373b2c717b73f5ffffdbe9f2716c58837e6acecbb8cecbba686755232414494a3c656759 585649c1beabdbdddcd2d8e8c1baa8bd9c4fe8bd55f4cb73dbcb90e2cf95edd3a0eac181 c88e3aaa5f1cb86833da9248d7a42ef0b53de5a22dde952ef8b25afcc575e6c372e2cc76 efcc72deab5ad29344dc9f42e9bf49e7d159dbcf6bd2c67ae1d29bcaa66cc48842d79944 eab265e4ad6cd29044c77917e19e35dea532e2b542e8c96fe4d39fd8d4bbd0d9c6d1e0c9 ece3dae0d47ee2c440e0a53dd3853bd98c32e5a231e1a533f3c35fdba64ae6ae51fcc060 d99b4e9b5d367b4030784335824041834142844243844243834142813f407f3d3e7d3b3c 7841467440446e3d40673b3c5f373757353351322f4e312d6e2d337d363a9141429b4747 9747488a45487b44497144494234343d2f2f2f23232117161f171525201d26221f221e1b 2020201e1e1e1b1b1b1818181616161616161616161717171315141414141515151a1617 1f191b251c1f2b20242e23273a2c2c3a2c2c392b2b382a2a382a2a372929362828362828 372d2c352b2a3026252c2221281e1d271d1c261c1b261c1b2c1e1e382a2a433535403232 3426262f2121372929423434473231473231473231473231473231473231473231473231 5f3c36613e386f4c46825f5984615b75524c6a47416946406c4e4e6a4e4d594141412d2e 3a2a2b3c30342d2427140e12030303010101000000000000000000010101030303050505 000200100c093022215036376240416945456a474565443f52353a4e33384a2f34432d30 7751486c453e68433b77524a8a655f87645e6e4b4753302c5c3b4255363c482d323a2628 2f212128201e24201d23221e24201d24201d24201d24201d24201d24201d24201d24201d 2519192d2121362a2a392f2e362e2c302b282d28252c27243a2a2a3323232414142a1f1d 645f5ba8aba4b6c2b89eaea3aaaeadabafaeabafaeacb0afadb1b0aeb2b1aeb2b1afb3b2 adadadadadadadadadadadadadadadadadadadadadadadadabafb8b3b0a1c0af81ccab68 d5a35ada9c51d99741d89636d48c40d8913fde9840e09e3edd9d3bd49536cc8c32c6862f ebbb3bebc15be9ca8ae4d0b5ddd5d2d7dbdcd3e2dbd2e7d8d8cdd3e8d0a4e4bd5eebc15b e9c883c8b594a69f8c88866d2b2620918c86b1aea7c7c7bf6d7067585e54b6beb34d554a a5b5aa9c8c8c905e69652b396a454c7267655d5e58898c85b7b0a8d2cec38583746c6f5c 383c2d919890e0e9e8b1babf67695b909183a7a898c0bfad6d6a59817e6b9a9581c8c3af 312f22a49f8cdedfe1ccd4e7c0b8a3d8b156f1bf4eeec76cd9ca89e6d3abe6cd95cda455 b26d32b2612ccf8838ebb24bd7a63de9ba50d7ab48cda351f2c989ffdd9fefc97ee1be62 e5ba54d69b3dcf852ed79131dfa93dddb44adcb45cdfb36ce9be6fd59059ca7240d98a47 eeba6ff2cb8adeb26bc99438ca761ecb801ad69926e7b74beac471e0c180dac17edcc97c e3e1cae0d291eac761ebae44d88b33d78b37e2a040e0a83be1b85cd6a74fe8b353f0b653 c28044914b41854043844735844041854142874344884445874344854142823e3f803c3d 7942477541456f3e41653b3c5b38365334314d312d48312b632b3873343f893e45934147 8f3f42833b3e783d41723f444532344434353729291e1413130b0916120f1a1915181914 1919191a1a1a1b1b1b1b1b1b1a1a1a1818181515151313130c0e0d0e0e0e101010171314 1e181a271e212f242833282c3c2e2e3b2d2d3a2c2c392b2b382a2a372929362828362828 2d23222c22212b21202a201f2b21202e24233127263329283b2d2d4032324335353e3030 362828332525392b2b3f31314a35344a35344a35344a35344a35344a35344a35344a3534 65423c63403a6c49437b58527c5953714e486b48426e4b457757586c4e4e593f40493536 423233382d31231d1f100b0f020202010101000000000000000000000000030303040404 0106021c18153d2f2e553b3c624041633f3f64413f6746415c3d43583b4051363b4b3236 66433f734e487b56507550486c453e6f473f815951946c6470485167424a59383f482d32 3b27293224242c24222a252226221f26221f26221f26221f26221f26221f26221f26221f 3522242e1e1f2a1c1c2a201f302826312d2a2d2c282728233227253727273f2b2c3e2a2b 34262541403b7d8a80bcd2c5aaaeadaaaeadabafaeabafaeacb0afadb1b0aeb2b1aeb2b1 adadadadadadadadadadadadadadadadadadadadadadadadb0b4bfb8b5a4c6b482d4af6b dca85fdf9f55de9c46dc9a3ad78845d3873cce8632cb892bd1912fda9e3ce5ab4aebb356 f0c459ecc870e4d197dcdbbfd5e1ddcce3e9c6e1e8c3dee5e7ddf5e7d0aedab55af0c660 c8a9668172538e88729f9c7db0aba59f9a94bcb9b29f9f97676a61787e74868e836f776c ddf8e999838588415370213692636da799984f4e4933302b94989baeb3af4d5349666a5b 585c4bbabcafc5c6c08e8d89c0c6bca0a69a898b7e737464a9a693b2ac96b3ab949e947b 1a1b0d8e8976dbdce0c2cde1c0b99fedc160f6c24ceac368decd88f0dbbee4cc8eb78b34 a1562cbd683fdf9d3cebba43fac873f0ca77f0d88ee2d19bd4c197eccc9bf0c274c69129 d18b45d98640e58a41e69345d68f3fc78632d38c34ec9b42d89233cd6e38c2542fc46b31 d3a158e4cb89e8d18be4c56bd47b27d17e32db9146e9ac51e7b43fd6a628ca9626c99333 dad8a5dec99cf0c37ff3b548dd9527d99041e1a358deae4ae2bc67e6bd65f6c55fdfa844 a66231893c46843a47783522853f418741438943458a44468a4446874143833d3f813b3d 7b42487541456f3e41653b3c5b383651343049322c443029693649783b4b873f4b8c3b42 833336772c306b2d2e6730334531334936383a2c2c1e12120a05020d090613140f141611 1414141717171a1a1a1d1d1d1c1c1c191919141414111111080a090b0b0b0e0e0e171314 201a1c2b222533282c382d313d2f2f3d2f2f3c2e2e3a2c2c392b2b382a2a372929362828 241a19241a19261c1b291f1e2e2423352b2a3a302f3e3433493b3b4436363e30303a2c2c 392b2b3a2c2c3a2c2c3a2c2c4b36354b36354b36354b36354b36354b36354b36354b3635 6b463e67423a6b463e77524a78534b6f4a426c473f724d458361627050515e4143523c3e 483a3a362d301e1a1b0d0b0e030303020202000000000000000000000000020202040404 0c110d2c28254a3c3b5b4142603e3f5a3636593634603f3a5e3f455b3c4254373c4c3136 573c35593c365d3e3963423d6845416e4845724a48754c4a845c5c7b51526e4445643a3b 563432462f2933261d242014191f1f0e150d0f13141e1c31261f2f211a102318062a1b14 1e1a172b27242e2a2725211e231f1c2c28252f2b282a26233228273127262f25242f2524 312726362c2b3a302f3d3332b2b6b5aaaeada9a9a9acaaabb1abadb2a7abafa3a7afa0a5 a3a3a39b9b9b9d9d9dabababb1b1b1adadadafafafb7b7b7b9b4b0beb29ac7ae76cfa954 d8a33ddc9d33de9934e09739be711fc97f2ada933beaa84af6b655f7bc56f5bc53f1bb4f e6bd54e9b94decbb54e7c675e0d5a7d9ddccd7d9d6d8d2d4c9e0dad3dedacfd2b5d6dc9c c5d096c4c9b399958ad3c8accec9c3a19c96918e8753534b4f52497c8278747c718c9489 ddf9e2a99890894a52772e3997696b71675d1d2615535e4d8d8b7ea3a194a9a79a8d8b7e 9a988bb1afa2989689bbb9accbb4a68d87779a978e6754567865678a877eb9b3a3ad9688 535240978883bea4a5c0ad9ec4c397c0cb8fc8cc99e4debadce5b8e7d1a0e0a271c66f44 b85e39cd7a48f1a456ffbc57f3ad31eda737e1b366dacba0e2c991e8a847d8901ac28c20 d37746d17c29de8e39d27e52a7503d994626a4512fa04a39cc7933cb7846ce814bd79848 e1b366e3c8aaddd7d9d7e0e7d6d583dbac52e68729ec8328db8937c28b39c18e33d09730 e4893ae5a75cebc981efce7fe4ae54d7922ddc9c32ebb64cffcb8cecb865dfae51be8c47 874f3673383e7a424577413583443d82433c80433e7d423e7b413f774141764040744042 723d476f3d46683d445d3d404e39383f3430332e282b2b235b2f306a383b7a414782434b 7b3c446e353b6432355f333445353839292c2d1e21291d1f2b22232721211b17160f0b0a 0c10110c10110a0e0f090d0e080c0d080c0d080c0d080c0d0f0b08090502070300120e0b 231f1c302c29302c292b2724302424352b2a3d33324038363a35322f2a27221e1b191512 412426432628472a2c4a2d2f4c2f314d30324c2f314c2f315e36365e36365d37365a3735 5836345437335337335236325936345a37355c39375e3b395e3b395c39375a3735593634 5d3a3e5e3b3f5f3c40613e426340446542466744486845495a4648523f41493638423434 3f33333d333238302e332b29261d2011080b0400000b07080d0b0c0606060507060b0f0e 422829482e2f5135345638365738355a37315d3a34623d3558383b563639533336513134 4f3631533733573a365c3d3a62403e6845436d47466f47477a5051784c4d7446486f4344 66403f5337333c2b232b211714111814130e1d1c1a27233427212d1d1b0f1e1d08272318 1f1b1826221f28242125211e2723202b2724221e1b130f0c312726302625302625302625 322827342a29362c2b382e2d424242908e8fc7c5c6bdb9baaaa4a6b2acaebbb2b5b1a8ab b6b6b6a8a8a8a1a1a1a8a8a8adadadaeaeaeb4b4b4bebebebfb9adc4b699ccb37ad5ad58 dba541dc9c38db9637d99238db9d3ae0a03fe6a645e9a948e9a749e2a042da983ad79237 ebac43ecb042ecb94ce8c76ae0d595d9dcc1d4dcdfd3daedcbe3e3d1dce2cbcec7d8ddbf d0dac1c4c9c3949087c3b696c1bcb6827d77a29f9863635b020500393f35838b809fa79c 7c8776b9a69fa47678653437937c76827e72332f2382756c79776a79776ad1cfc2d9d7ca 6e6c5f8785789e9c8f615f525e57444c584453615225241f13120d010f00283420756e5b a0a297d1cac0e0d3cbbcb5ada8ada7c4cbc4dfdcd3e1d5c9e2d38edcb66fcb8841bb6a28 c17331da9748efb14cf4b840dda536dba744dbb165e4c187f1c584f3b75fe1aa45cfa543 ea9a43dc8e2ad18225c16d2fa85133a14935a2493b9d433bb25b30ba6a39c7893cd5af4c deca81d8d6c1c9d7d7bcd7cecfdbc3d2c9a2dab473dfa74adb992bd28b21cf812cd4803a e5883be5a55beccb7ef3d482eab95dd99834d9962ee2a942f1b96ee5af57d19d47ab7539 7f463572363e773f427842367e3f387e3f387b3e39793e3a763c3a723c3c713b3b6f3b3d 66373f63383f5f383d56383a4c34343f302d362b27302923592d2e643336723b407a3f45 793e44713a3f68373a6236374a37394835374131323d2f2f372d2c322a282c2724292421 2929292222221a1a1a1313131212121717171f1f1f2424241d19161c18151b17141d1916 201c19221e1b221e1b221e1b312726352b2a3b31303c343239312f2e2926241f1c1d1815 482a2c4b2d2f4e30325133355335375436385335375234366036375f37375e38375b3836 5937355637345538345337335936345b38365d3a385e3b395e3b395d3a385b3836593634 5c3c3f5c3c3f5e3e41604043624245634346654548654548614d4f544143443133382a2a 352929372d2c372f2d362e2c4035392a2124150c0f080405030102010101030504080a09 472d2e4f35365a3e3d5b3d3b58393658352f5c3933643f375d3d405b3b3e59393c563639 462e2a472f2b4c332f5236335739375f3d3c62403f6440406c42437143457746497c4b4e 754b4c6442404b342e38271f44303936251d26180f21141b221b222224171d2611192519 1e1a171f1b181e1a17201c19292522312d2a2a26231d19162d25232e2624302826312927 3129273028262f27252e2624251c1f4c43467f797baba5a7c0bcbdc0bebfb8b6b7b0b0b0 b6b6b6a9a9a9a2a2a2a6a6a6acacacafafafb3b3b3b9b9b9c0baa2c7b992d1b878dbb35e dfaa4cde9f40d8953ed58e3ce7b345e6b044e4ab42e1a340dd983dd88e39d28533ce7f30 ef9d31eda93ae8ba4be5c860e1d07edad7aad3dddccde1fccee7eccedce9c7cbd7d8dde1 d6dfe4b9bdc886817bad9e7d79746e6d6862dddad3cbcbc345483f3e443a60685d575f54 413f3268574f6c504c2b1811333124595a4c5d4c449c7675a6a497706e6189877a89877a 8785785a584b6b695c29271a080500464f3c50544643343140312e383c2e0811003b3827 716f70818177b1b3a5cdd4cdcbd4d9dfe3e6e6ddccc4ad8be1b35dd59b47c47b2cbd7123 c98834dfaa4ae9bb4ce6ba3feab54dddb158d3a454da9947e7a14ceeb460f0c167eec35d e9b444e7ac48d59237c27628b66331a94c3da2403fa6453c9c3538a75034bc803ad1b262 ddd4addee1ead5e1efcedfd5ced4eecfdcefcfdad4d0ca96d7b553dfa02cdc8d26d17f2b d6782ed9974be5c271f3d881f1c668e2a342db9734de9e3cdca646dea74cbf873c91562c 793c3774364174393d7a41387b3c357b3c35773a35753a367238366d37376c36366a3638 5632365532365333364f3234492f30422d2c3d29283a2826572d2e5d2f316533366d393d 733f437240436a3c3e623839473133422e2f3d2b2b3a2a2a3b2d2c382d2b3329272f2523 2f2629281f221f1619180f12170e111d1417251c1f2b22252925222d2926312d2a302c29 2c28252b27242e2a27322e2b332928352b2a382e2d382e2d362c2b3026252a201f251b1a 5331325533345836375b393a5d3b3c5d3b3c5c3a3b5b393a6438396238396038385f3938 5c39375a38365738355639355a37355b38365d3a385f3c3a5f3c3a5d3a385b38365a3735 5b3d3f5c3e405d3f415e40426042446143456345476345475e4a4c513e40422f31362828 3226263228273129273028262e22262e2327281f221f191b1b17181715160d0d0d010101 472d2e553b3c6549486749475e3f3c5a37315e3b3568433b6343466141445e3e415c3c3f 3b29253e2b27412c29472f2d4c3231513534553737583839633b3b693d3e7342457c4b4e 7c4e506f4948593c3848312b795a586f533b563d1e341e111c0e0d151509172413192a24 29252226221f211d1a1f1b1824201d2b27242f2b282e2a272823202a25222d28252e2926 2e29262b262327221f25201d433439261a1e2a1f23645e60a2a0a1bababab5b9b8b1b5b4 a8a8a8a4a4a4a3a3a3a8a8a8acacacacacacaaaaaaa9a9a9bcb490c3b584d2b675ddb462 e4ae54e1a44bdd9a49d89547d79e35d69b35d59734d59534d79537dc973ce19a42e69c47 f09f2cebad40e6bf5ae4c867e4ca73dfcd8fd7d6bad0deded3e6ead3dee2cbd0d4d5dce2 ccd9e1969fa4736f649f90737e7973928d87e0ddd6e2e2daa9aca3868c826a7267697166 84857760584d61544b544e42595d4e848577a6938cd5afae8f8d80a19f92656356706e61 a2a093615f522b291c222013170d017c72688c6a698f4d5773313b83616042382e746a5e 62575d272125565855bbc3b4d4dbbad7d197e2cc77d4b14de09f45dc9545d38942cc863e d19242dda84ce8b856ecbe5cffbd52eabc64d8a854d88d32de903be3ad65ecbe68f7ba45 cebd63eacc8ceabe81d9a04fcb8540b35c40a74b36bc6135b85341bc653ac58643cfab6f d9caade3dad5ebddd0f1ddb8d1cebdd4dee0cee3f6c7d8e0d0cfb1e1c480dfac53cf9031 c0651cc48333d5b058eccf71f3c869e7ac4edd9b3de09b3ed49f2fdca447b4783a83432a 7a383a7839447337397b403a7e3f387d3e377a3d38773c38743a386f39396d37376a3638 4c32334c32334c32334b31324c2f314b2e304a2d2f492c2e5e34355a30315c3031613536 693d3e6b3f40653b3c5f35364b3132402627371f1f39242342302e4435323a2c2930221f 3628283426263224243022222f2121302222312323322424221e1b25211e272320282421 27232026221f26221f272320332b29332b29352b2a362a2a372929352727332324322223 5a33345c35365f3839613a3b623b3c623b3c613a3b60393a67393b663a3b643a3b623a3a 603a395d3a385c3a385b39375b38365c39375e3b39603d3b603d3b5e3b395c39375b3836 5a3d3f5a3d3f5b3e405c3f415d40425e41435f42445f42445541434c393b412e30382a2a 3428283026252a2220251d1b28191e37282d3c3034342b2e2d27292a2627171516010000 3c2223543a3b6e52517557556b4c4964413b66433d6f4a426444476242456040435d3d40 3628253729263a2b283e2c2a422d2c462e2e492f304a30315b39375f3938663c3d704244 75494a6e48475f423e533a357d56479d7b4da7844c7c5a3742261821130619190d17201f 29252229252228242124201d1d19161814111c1815221e1b24201d26221f2824212a2623 292522272320231f1c211d1a34252a3a2e3233282c2923254240417b7b7ba5a9a8b3b7b6 b0b0b0b1b1b1b0b0b0abababa7a7a7a6a6a6a6a6a6a5a5a5b5a97fbead79cdb06edcb163 e4b05ae6aa54e2a353dfa051d48a35d08931cd882dce8c2ed59534e0a23febb04af2b950 ecaf3ae9b953e7c56ee6c775e8c372e6c274dec988d8d19bd9e0d0dadcc4d2d5b8d2dbc8 becec3707c70676455a3907fb2ada7afaaa4bab7b0bfbfb7c8cbc2bac0b6a1a99eb9c1b6 bfccbab2b4a6a29a8fa29a8fbcb8ac9b978b63594f7a67608e8c7fbbb9ac6e6c5fa4a295 9391849b998c403e311a180b8f8d81afa29a966266a24454842636bd898d8d8078aaa89c 6c595526182750485dbcb8addacf8dd7bd4ff2c548ffcd52e9a842df9b48d79050d7904e de9a47e6a746eab356eebc65f4b554e8c476e1c681e5ba76e5ba87dfc499debc72e3a52e c6d1a7e7e2ceedd6b7e3bc77dda654c98347c07438d78c39dea755dda955daae63d6bc83 d3c998d8cc92e2c577ecbb5fd0d593d6dcb8d2dedec9dbefcfe0eae0dbc7e2bb84d79547 ba6a21be802dd0a346e5bf5ee9bb5ddfa34bd7953ed9983edba430dca04ab07044854136 833e43813f437939397f423f86474085463f81443f7d423e7a403e743e3e723c3c6f3b3d 4a35344a35344c34344d33344f3234513335533336543437643c3c5d3736573130553230 5b3836603a395d37365a32325333345133334f33324d33324d35334b3633483531463430 4b39394b39394a38384937374836364735354735354735352e2a272c28252a26232c2825 2e2a272c282525211e1f1b182e2926312927332928382a2a3a2a2b3e2a2c402a2d412b2e 603435613536643839663a3b663a3b663a3b6438396337386c3b3e6c3b3e693b3d653b3c 633b3b623c3b5f3c3a5f3c3a5b38365d3a385f3c3a603d3b603d3b5f3c3a5d3a385b3836 563c3d563c3d563c3d573d3e583e3f583e3f583e3f593f405743454b383a3c292b302222 2c20202c22212b23212a222049383e5140464b3c4135292d291e222620221d191a0f0b0c 3016174d33347155547f615f7a5b58724f49704d477651496343466141445e3e415c3c3f 372d2b372d2b382d2b3a2c2b3b2b2b3d2b2b3e2a2b3f292b503933523632573533603a39 66403f6442405b3f3b523b35714737a27b44c69c50bb8c549d6f4e79573c49351c1c0f00 130f0c14100d1b1714221e1b201c1917131015110e1a16131e1f1a1f201b20211c21221d 22231e21221d20211c1f201b2b2225362d303832342923251c1819302e2f6a68699e9e9e b3b3b3bcbcbcbcbcbcaeaeaea1a1a1a2a2a2aaaaaaafafafb2a378bba573c8a969d7ab60 e1ad5ae5ac55e4ab54e2a854e29344dd903ed78d38d38e33d39532dba239e5af43ebb749 eac157e8c464eac773ebc678ebc270ebbf66e6c262e4c362e1d4a0e0d593d7d193d3d7b2 bfccba606d596d6a59b3a099c3beb8aea9a3c0bdb6d1d1c9bbbeb5a0a69c959d929ba398 627360757e6da5a195c3b6adbfb5aba9a599787a6c4c50418f8d807c7a6d787669908e81 bcbaad7f7d705250432d2b1e4755486c72646b4e4880393f863f45dbbeb87c8274273528 6355486f63719e91adcfbeb7debf7ce0ae3fedab3dffb457eeb03fd4943dc27c3ece803e e4973bf0a93dedb44de6b860eac47de3cf90dcd9a4d9dcbdd9ded8d9dcd1e1ce96e7bc53 dbe5cde2dfcee0cbacdebb7be3b55de0ad52ddaa4fe4b354d2cd8bd9cc88e0cc9addceb7 d6d2b7d4cb90d9b75ee0a442ccd6cbd1d8d1d2d8d8cfdcd3d2dfc3dfd69de7b964e79c37 cb8c3fce953ed9a542e4ae4cdda44ace913eca8e38d0993fe5a83ed19149ac674a924b47 8e484689463e82413b8646468c4d468b4c458649448247437e4442784242764040723e40 4d35354c34344c32334c32334f323452353755373957393b653f3e5d3a385233304c302c 4d312d5334315835335b3534512d2d5c3a396543425a3c3a492d2a402723472f2b533b37 3e2f2c3e2f2c3d2e2b3d2e2b3c2d2a3b2c293a2b283a2b282925222c2825302c29322e2b 322e2b2f2b282b27242925222823202b26233228273b2d2d432f314a3135503338523339 6737356838366b3b396c3c3a6c3c3a6b3b396939376737356f3d406f3d406d3c3f6b3d3f 673d3e653d3d633d3c633d3c5c39375e3b39603d3b613e3c613e3c603d3b5e3b395c3937 523a3a523a3a523a3a523a3a523a3a523a3a523a3a523a3a5d494b4b383a352224241616 221616291f1e312927362e2c4633394e3b414d3c42433439372b2f32272b2c2326241e20 3117184b31326d51507f615f7e5f5c7b58527956507e59516444476242456040435d3d40 39343139343139312f382e2d382a2a37272838252737242642352c44332b49322c533632 593c38583c384c383144332b5e3634956b39c29240ce924cd29360ce9665a479466f4d1f 302c291c1815110d0a191512221e1b201c191b17141b17141c1d181b1c171b1c171b1c17 1d1e191f201b21221d23241f3131311c1a1b1d1b1c332f302f292b1c1618332a2d61585b 7f7f7f9d9d9db4b4b4b0b0b0a5a5a5a6a6a6aeaeaeb4b4b4b6a47cbda575c8a56bd2a65f dca757e0a851e1aa4fe1aa4deba64be4a244dc9a3cd79537d79736dd9d3ce6a645ecae4b e7c971eac76befc466f0c567eec768ecc663ecc256ecbd4be7c86be1c65fd6c374dad4b4 d3d9d56d776e838172c8b5b1d1ccc6bbb6b0d2cfc8d4d4cc888b824d53494e564b555d52 6d7464747869c5c6b8e7e3d7b5b3a6c7c9bbd4dfce8a99863432253f3d30848275646255 a9a79a5a584b5a584b5d5b4e000300394738707362937c6e937c6eadb09f4b594a030a02 94958fcacdd4c5c4cca69480ba914fd29332d7912feaa34bd79630be7833af6039bf6437 d77c2de2982fe5b350e6c775ecd59fe7d08cdcd398cfdecbcae0ebd7d8d2e7d198f1d06f e5cf9ee2c58be9c080ecba73e6b45be5be55e8cb6de2ca84c2caccd0cbabdfcda5e3d0c2 dcd1cbdacba4e1b869e9a445d0cacacecfd3d0d6d4d8d9b9dbcd84e0ba55e7b145f1b24b deaf61dfac51e7a944e7a340d7913bc58637c79340d7aa4fe6a549bd7841a35a4ba05655 9650448e4b388e4c408e4c4e8d4e478b4c45874a458247437e4442774141753f3f713d3f 4d30324c2f314a2d2f492c2e482e2f4b31324f35365137385f39385836344d342f443029 422e274a312c593735643e3d663e3e653f3e623f3d5c3a385334314d302c4a2e2a49302b 3429233f342e4e433d584d47574c464b403a3b302a30251f322e2b403c394a4643403c39 282421181411181411201c19211d1a27221f3329284030314c363956393e5b3a415e3b42 713e3d723f3e74414075424175424173403f713e3d6f3c3b723e42723e42703e416e3d40 6a3e3f683e3f663e3e663e3e5d3a385e3b39603d3b623f3d623f3d603d3b5e3b395d3a38 4c37364c37364c37364c37364b36354b36354b36354b3635533f414532343623252a1c1c 2a1e1e2f2524322a28342c2a3a252c3f2a3149363c4f3e44493a3f3b3034382d313c3336 412728513738674b4a755755795a577c59537e5b55845f576a4a4d68484b654548634346 3d39383b37363a3434382f30362a2c3526293323263222253531253a30264231294d3630 533a3550393343322a372a21421929926840d09b4bce8b3dc3793ad6934fedb56bf0be77 7b77744945421c1815130f0c1d19161e1a17171310120e0b191b16181a15161813161813 191b161d1f1a22241f2527221c201f1d21202121212523242c2628302529291d21221318 3838386d6d6da2a2a2b3b3b3aeaeaeacacacaeaeaeafafafbca883c2a77cc9a46dd0a35f d7a252daa44adca747dda946e2ac40dfa63dd99e38d99938dc9a3ce69f47f1a752f9ac5a e7c981edc46cf4c156f4c656efcd61edcd66efc45bf2ba4bebc246dfb93ed5b664e1d1c2 e9e8fa7f878a979589d5c3bfc5c0bac7c2bcd4d1cac7c7bf73766d1c2218131b102e362b 7b7368939184bdc1b2cdd6c5bfc8b7c4cfbed4e1cfc3d2bf29271a4f4d40424033262417 151306bbb9acaaa89b7775685a4c4c18201317260f656a537f846d4c5b44081003352727 9ba9b4d9e9e9b1bcab86794dbf954be8a948e4a534fbc34ab96d22ad5b36ab4e49b75041 c25d25c97b26dcaf5eefdd9de5cb98ecc270ebca7bdfe0c1d8e2e4ddc8b3e1b973dfbc56 d8a44ee2ac42ffc45fffc674e6b056daba4de3d37cddd1abd7d3eae2d3a8ecd07ce9cb8f e0c8a6dcc289e5b44bf0a621dbc464cecd8ccfd9b4ddd6aae3b971df9e4ae4a967f1c59e e5c06fe8b75aecab43e99b39d68835c78539d0a04ee6c166e3a04fac63369c4f47a75d5c 9a5643904f339654469550558b4c458a4b448548438045417c4240753f3f723c3c6f3b3d 4c2c2f4a2a2d47292b44272943292a452d2d4732314934335731305233304a332d402f27 3d2c2447302a5b3c396d4746916866764e4c5f3936613e3a73524d7556515e413b432821 514a426a635b8e877fa7a098a69f978c857d665f574c453d84807d9995929f9b987e7a77 46423f1e1a171a16132824211d191625201d332928433334533a3e5d3e44634047643f47 7943417a44427c46447d47457d47457b454378424077413f733f43733f43713f426f3e41 6c3e406b3f40683e3f673f3f5d3a385f3c3a613e3c623f3d623f3d613e3c5f3c3a5d3a38 4935344935344834334834334733324733324733324632313f2b2d3c292b3c292b3c2e2e 3d3131382e2d2d2523251d1b5841494c373e46333945343a3b2c312f2327382d314c4145 513738583e3f6347466b4d4b71524f795650805d5787625a6f4f526d4d506b4b4e68484b 472d2e452b2c452d2d4833324a36354432303627242a1c1925201d231e1b241f1c2c2422 362c2b3c30303a2e2e372b2b4f3536895d42a96842a04f3e994b3fab663dc7883bde9d43 ffb24df8b461c795626f503b1e100d0300020d0e102123202e21102e2118251b1a181118 120f16181a171c24151a26101f1f1f1f1f1f1e1e1e1d1d1d1e1e1e212121242424272727 22171b1f14184e45489e989abdb9baa4a4a49f9f9fb8bab9b8bfa0bbbc9abfb88ac5b277 ccac63d2a64fd6a141d9a039daab41dfaa42e6a53fe59e32e09d2ce1a73be9ba60f1ca7f f2c477f2c572f2c769f2c864f2c862f2c866f2c66df2c572e9c299d0b566d9ca79ebddc0 d2c6d0b4aeaebab8a3cac9adc8d2d4bdc7c9cfd9dbe1ebed8f999b6b75778a94967a8486 8586818c8d88747570aeafaaa4a5a0babbb6f6f7f25455502e2c1fd5d3c6242215565447 cfcdc0bcbaaddcdacda9a79a8c7c6f978e7f3c3928070a007b7e6b444130655c4d5a4a3d 6c6552aba5a9c0b5bda28a66d4a943f8bd3dffc564b56e368c3c45ad4e38cd7a34e2ad47 e1af4ece8842e29b67ffe4b1f7cbc2ffc67ffdc153edc774e4dbb0e2ddb5deb16ed37e24 d19831f2bd57f2c25ce0b54fe3b651e2b14be1aa45efb44edbd48edbd368dbd682d8dbca dad2bbdfb85be09836db835bdf7f29dbae5bd4d8b3dadbdfe4c79fecbd51e3ca78d4d7dc fcda9bdebc75e8c373f1c870ddaf51eab557fabf65dfa14aa8644f9651428c463c924b47 964f4b904a408e493a94503b95493c93463c8e4439894039813c357b383276353174332f 62353a552a31542b335d3840583740462b343d2630422e37422c2e503b3a493431301c15 311d12604d3e9a8776baa894b49175ae8c7164462e462d198673649e908590867daba49e c8b6a2b09e8aa4927ea4927e96847085735f968470baa894c8b4b59f8e878b7f6faea691 b0ae975c5b4716180b20231c2a1f1b2e1f1c3724204c302d63403e7249457a4a48784743 7b4a457a49447a4944794843784742774641774641764540724542724542734643734643 7447447548457548457548456335386c3e4176484b784a4d7345486c3e4167393c66383b 3f2a25402b26412c27422d28432e29422d28412c27402b26231f132b2219392c264e3938 6344496e49517044516e404d483235513b3e4e383b3d272a341e213d272a4933364d373a 4e3741523b4356404356404258433e614d46715d527e6b5d7e5d547c5b5279584f76554c 56393b5238394f35364b363548343343312f3b2c293627242a25222a222029211f2d2523 342a29372d2c3329282f23234c32358e6249a76542994739984942a55f3db87735d59244 dc953bedaa59eeb36fcd9f6d9978575f4e3c2624180004001a181d0d0c0a111105272818 2f312320211c13121a1310211c1c1c1e1e1e2020202020201e1e1e1f1f1f222222262626 3b303431262a2920233e383a787475aeaeaebbbbbba9abaab6bba7b9b99fbeb691c6b27d cead68d5a853daa545dda43df1b94ae8a94adb9945d1933ed1a043d5b75ddbcf87dedda7 f0c662f0c662f0c660f0c660f0c664f0c569f0c36ef0c372fad28cf8da82ddcd809d967a 736e74918f90c6c4b7d5d1c5b6c0c2cbd5d7bdc7c9bdc7c9afb9bbacb6b8d3dddfeaf4f6 c8c9c4cacbc687888370716c9c9d98dedfdabbbcb70b0c074d4b3eb0aea1242215757366 bcbaad9c9a8ddfddd0cbc9bc837568958c7db2af9e70715f4c4d3b2623125f56476f6154 746d5aaba5a7cabfc3c6ad85eac05cedb135e8a4439b5519904130c66a45df8a49d2943f c2873bb76a34ba6d41cc9264be8661ce8b44dc923bdda25cdfbc82dfc27ed89f52cb7027 cd7f3de79e4fe8a844e3a735efb140f2ac4aeb994dee9657e1c572e2bf65e1c37ae1cf9f e3c98ce4ad49dd9028d58234d38930d1b466d3dab9dbdbdbe7c69bebbb4ce1c15cd1c9a2 efcd85dfbb6fe9c16cedc168dcab4fdfa750e5a855d29344a35f4a934e3f8b453b904945 934c488f493f8f4a3b94503b94483b93463c8f453a8c433c87423b84413b803f3b7f3e3a 6d3b3a6c3e3e653d3d563433482c2b452b2c492f324c32355d484745302d301b16402c23 776358aa9788b29f8ea2907c9f8672a38c7a78614f5f4a39867364a18e7f9f8d7fac9a8c ac9d8aafa08daa9b889c8d7a91826f91826f9687749889766c5a5a6b5c557266588b8471 a09b8782816f4545391d1e182d221e31221f3a27234d312e623f3d714844784846774642 7b4a457a49447a4944794843784742774641774641764540734643734643734643724542 7245427245427245427245426436396c3e4175474a77494c7345486d3f426b3d406c3e41 593d3a593d3a583c39583c39593d3a5a3e3b5b3f3c5c403d3d30273c2e25402d274a3230 583b3f64414867414c68404b5d474a5b4548523c3f4933364f393c5a44475741444a3437 4b343c50393f553f425843425b4643645049715d547b675c7d5f577b5d55785a5275574f 6244465f4244593f40513939483332432f2e43312f453331352b2a3228272f2524302625 322a2830282628201e221a18492e33956851a56341903e339747469a523ca15d2cc68045 e5a254d89441d18e37e1a34ef7c57ce0bf8c8b7c5d39332300020f181b2222241f181b10 1f211433322e2f2a3116101e1818181e1e1e2424242323231e1e1e1c1c1c202020242424 22171b372c302e2528181214383435878787bfbfbfc6c8c7b4b6b1b7b5a8beb499c7b285 d1af6fdaad5ae1ad4ae4ab42f2b445e29f48d28b49cd8d46d3a44cd9c167d8d38fd4d9b0 eec84beec750eec657eec460eec365eec269eec26beec26bf6c964f7d677d7c8878f8d78 666c6c929894cbccc4d5cfcfe8f0f2dae2e4dce4e6edf5f7e9f1f3e9f1f3e1e9eb8d9597 2b2c274f504b9fa09ba2a39ea5a6a1b2b3ae60615c1b1c1789877a767467110f02949285 b0aea1838174cac8bbb0aea1998d7fa19889cecbbab7b6a4a8a7959e9b8a6f66577f7365 908d7cb4afaccabcb9d3bb8decc260dda52cdc9835ac6221b1612ccf743bcf7738b96f32 ae6435a54f2e9a45269b562fb16e27c2762ad17d37d58548db984adfa440d79235ca7534 d08240df9649e19f3fe5a53bf5b048f6a94dea8f48e48146ebba51ebae5beab46eeac577 edc266eaa548df9034d28d32c99f55cbc287d1ddc5dfd8d0eac493eaba4ce0b942d3ba60 dfb966e1b962e7bb62e5b65ad9a34bce9343c78740bd7c3a9b5742904b3c8943398c4541 8f48448e483e904b3c94503b93473a93463c91473c9047408c47408b4842894844894844 6f3430724039653d354d2f25482f28563d385c3d3b53313055413a311d164c382fa59186 cab7a9a491828f7c6ba49180918378a7978a928275756355867364a38e7da99280a7907e 9f9886827b69655e4c625b497a73618c85737e7765665f4d8b7d7a90837d80766c686151 756f5f949084726f6625221b3126223627243f2c28503431623f3d6e4541754543754440 7b4a457a49447a4944794843784742774641774641764540744744734643724542714441 7043406e413e6d403d6d403d66383a6d3f4174464876484a734547704244724446744648 6f4c4a6d4a486b48466946446a47456d4a48714e4c74514f6a514a604740543b36523633 56393b5e3f4463444a65464c634d50665053624c4f5d474a624c4f6953565f494c4e383b 4832354e383b5640425c4746614c4967524d705c5576625b7c5f577a5d55775a5274574f 5f3f42624245604244573d3e4c34344631304834334d39384032323a2e2e362a2a342a29 3129272924211e191614100d482e319c7057a563418b392e96454b90463d8c4527b56b48 db8e48dc9140d69031d3922edca445e6b86bdfba83cfaf888d857a48403d160d1219111c 27212b201f1d1f1f132a2d181717171e1e1e2525252424241d1d1d1a1a1a1d1d1d222222 302529382d31473e41615b5d8a8687abababb2b2b2a8aaa9b2b3b7b7b3b0beb39fc9b38a d5b374e0b360e8b451ecb348d99f34d89441d98f48df9743e6ac40e9be55e4c680dec6a4 edc846edc64dedc458edc361edc266edc264edc35fedc45bf1c258d4b160cebd92cfd0c2 b5c1b7a2aea0a8aba0ada9a89197979aa0a0a5abab969c9c848a8a7f85858e9494484e4e 494a451d1e19a6a7a2b3b4af70716c5455500809041d1e19aaa89b4543360c0a00a5a396 9c9a8d6b695c9b998c6e6c5f5851418881719e9888908a7a918b7bcec8b8999282918a7a b9b7abc6bfb9bdada0c3a775d3ad4cdaa531f2b14de49a4fde9139c97628ad571aac582c b66144a84d389c4228aa5737dc9641e69c45e59647de8742dc8633e09226df9830d69143 d79e33e0a944e5b052e7b159eab15ae8a74de19738dd8d2af1c155efb65feab873eac57e ecc26eeead59e5a55ddaac70cbbb99cdd2bcd4dfd1e2d3b6ebc385ebbe61e5ba51e0b44b d6a84ae2b456e6b556e0ab4fd49a48be7f38ad6b2fac6633934f3a8e493a89433989423e 8b44408d473d914c3d94503b94483b94473d91473c9047408c47408a4741884743874642 7a383973393760352f50322a5439325d403c5834344c22264f3c2e7d6a5ca18e809f8c7e 9380719784759d8a7b9c89788e7c6eb5a395a290827c6a5c8a786a968476806e60715f51 6d6b5c605e4f656354777566787667716f60898778b1afa0f0e7e2c6bdb6847b74423930 3a3329726b637871693a332d3429253a2b2844312d533734623f3d6c433f744442754440 7b4a457a49447a4944794843784742774641774641764540754845744744724542704340 6e413e6c3f3c6a3d3a693c396a3c3c6f41417547477648487446467446467749497c4e4e 774e4c744b497047456d44426f4644734a487a514f7e5553926d6783605a704f4a624340 5f434060464762494c634a4d513b3e644e51705a5d6953565f494c5d474a594346523c3f 4b3635503b3a594443614c4b66514e6b56536e5956705b587a5e5a785c58745854715551 59363a5f3c406141445d3f4155383a4d35354c37364f3a394835374232333d2f2f382e2d 332b292723201817130c0d084d3430a17656aa69418e3c2e95444b8a3e3e813829a75a4a cd7337db8842dc9543cb9032c48d30d09843e3a75eeeae6ed6b683cfaf86977b63452f24 1606061a12101d1d151113061a1a1a1f1f1f2222222121211c1c1c1a1a1a1c1c1c202020 1b101452474ba0979ad0caccccc8c9b0b0b0abababb8bab9b3b4b8b8b4b1c0b5a1ccb68d d9b778e5b865edb956f2b94ed89e3ae1a245eca74cf0ad45ecb541e9be5fe7c89ae8cfcb edc651edc556edc35dedc263edc263edc35dedc556edc651f1c974be9e6db9a898d0d0c8 b2bdad7e8a76747b6986867a545955818682585d592c312d7479757378748388848d928e 76777240413ca7a8a392938e3f403b21221d0001004f504b918f82201e11131104adab9e b5b3a6adab9ecfcdc0acaa9d8a8474363020302919b2ab9b8e8777787161979181d5cfbf c9cac2d3ccc2baa890c2a56bcda949e8b647ffc561ffb561f0a63ddd933ac27233bc643c c4694cb45d41aa5335bb6041e2a054edad4ceda946e1903fd98235de8e2fe1a038deaa47 d9a12adea939eab651ebb85ce1aa4fdca03ee5a237eda634f1ca87eec881e9cb95e2cea9 e2c58be7b85ce8be6ce5cca3d9d4d8d7dfeadbddd0e5cb92eac378eac889ebc584efb961 d6a03ce2ac48e4ac4bdca145ce8f3fb67533a45e2ba15a308f4b368f4a3b8c463c88413d 87403c8c463c914c3d94503b964a3d95483e91473c8e453e88433c85423c81403c803f3b 8c474a7c3e3f6d3a39633a385531314b272952292f61343b83715da5937faf9c8b9a8776 9a8776af9c8da49182816e5f957a65baa18d937e6d685648887b729289826e6965605c59 87897eb5b7ace0e2d7c6c8bd6c6e63393b30787a6fdcded3cdc6be8a837d4c453f28201d 241a18473e395a514c443b34352a263e2f2c493632573b38623f3d6b423e744442774642 7b4a457a49447a4944794843784742774641774641764540754845744744724542704340 6e413e6c3f3c6a3d3a693c396d403b734641784b46794c47774a45774a457b4e497f524d 7b504a784d47754a44734842754a447a4f49815650855a549d726c8e655f7c55506b4a45 6447436148445e46445b4645432d30584245654f52604a4d584245564043513b3e4a3437 57423f5a45425f4a4766514e6b56556c57566a5554685352735a567158546d54506a514d 5f3b3f623e426441456141445b3d3f55383a5137385038384c383a4834364232333d3131 35302d2827231719140c0e09563f31a3794fb273409948339545488a3e408237319e4f4b b75b32bc6937bc763abe8239cc9141db9a48d78b3dc9762adca153daa255ddaf62cfa968 8f7344402a131d0a0c2315262323232020201c1c1c1a1a1a1a1a1a1c1c1c1d1d1d1d1d1d 32272b3d3236564d507e787aaca8a9c7c7c7c5c5c5b5b7b6b6b8b3bab8abc3b99ecfba8d dcba7ae8bb68f0bc59f5bc53f0b952f6bd54f9bf54f1bc52e3bc5ddcc788e0dbc8e6e9f8 eec460eec460eec364eec364eec462eec45eeec559eec657fee7b3e0cab5c5b5b6a49f99 7f8271828678a7aa9fc0c3b29fa09b8a8b866f706b696a65b9bab5babbb6d4d5d0b7b8b3 6465607f807bcacbc6d4d5d097989331322d1b1c17a1a29dcdcbbe514f422220138b897c b9b7aac2c0b39b998c777568a4a3918582710e05003c3022574b3d736a5baeab9acbcab8 c5cac4d0c8bbbba786dcbe7ee5c264f5c65cfcbd56f4a549e39c34fdbd63f4b067d6814a c9723dc27541b66538b15331b57538cd8e31db9830d68937d07f3dd78c3bdfa03fe0ab45 d4872fd88f31eaa943f1b74be3a93de1a23bf3ae4fffb65deac9a0edd1a2e9dcbcded8c8 d7c18fd9ae48e1b554e6cc93e7dfeae3e1f6e4d5c0eac26deac574e4d6bbe9d5bcf1c57e e2a441e1a340e2a342db9b41c88537b87234ac6434a157308e4a35934e3f924c4288413d 853e3a8b453b914c3d924e39974b3e96493f91473c8e453e88433c84413b803f3b7f3e3a 82433c7f423d8247437a434061302c562926754c4aa27c7baf9e8495836b9b8971c5b39d d9c6b5bdaa9b978475877466ac907aa88d786a5544433326736a63aca8a5c0c0c0cccdcf f1f0ebf2f1ecd3d2cd8988833f3e39262520403f3a61605b5350472b2821201b18382f32 43383c3e32323d2f2c3d302a3429253f302d4c3935593d3a623f3d6b423e7646447b4a46 7b4a457a49447a4944794843784742774641774641764540744744734643724542714441 7043406e413e6d403d6d403d70433d774a447e514b7f524c7c4f497a4d477b4e487e514b 7f574f7e564e7c544c7c544c7e564e835b538860588b635b956a61895e577750496a4741 65443f6245415f43405a403f4b3538513b3e533d40543e415c4649634d50584245452f32 69554e68544d68534e6b56536d58576b5557654f52604a4d6a525068504e654d4b624a48 795257734c516a464a6441455f3f425a3d3f56393b5137384f363a4a3437463335413535 3a35322c2d281a1f190d140d614b33a47d46bc7e41a657399545448c40428a3e3e99494c 813225a0543cbb714abf763fbf7632c97c34d5853cdc8840ca7f2ecd8731d09038d79f48 e3b466d3a96d906b414b26092c2c2c2121211515151212121919191e1e1e1e1e1e1b1b1b 170c1023181c2920232a24263c38396868689f9f9fc3c5c4b9beaabebea4c6be99d1bd88 debd78eabd68f2bd5df7be57f6ca61f6c658f6c254f0be5deac076e3ca94dfdab2dfe5c3 f0c566f0c664f0c662f0c662f0c664f0c567f0c46bf0c46dc4bfa1cbc0beac9da463564d 493f33878185bebdc3b7bab1a8a9a35f605a999a94aeafa98e8f897d7e78e4e5dfb4b5af 78797498999490918cd3d4cff1f2eda8a9a4aeafaae7e8e3bdbbae6765582a281b5f5d50 a4a2959c9a8d3836293b392c525341c7c4b37e75668f8174dbcdc0bab1a2aba897c5c6b4 c9cfcdbfb8a8a99269e8ca84f1d073f2c75feeaf48f3a340da973ef3c067ecb563d6893b d58432d08f3dbb7537af512ba9622cc2792cd58432cc7b39c57739ce8739df9943eb9f51 da863dd38638e6a44ef8bf65ecb95deab75cf8c26cfac26fe3bf69edce98ede2c6dfddb7 ceb16fcb8530d28432da9e58e9ddc7e6dadaeacaa3f0bb53e9c879dde2e5dde1e0e7cd83 f2ad4ede993ae19c3fde9840c37e30be7737bc7442a65d34904c379954459751478a433f 833c388a443a914c3d914d38964a3d95483e93493e9148418d48418b4842894844884743 7d4a2d8c523a9b5a449a57468f503f8e5b48a37f69bca088a69577ae9d7fad9c82a6947c ad9b87bba899b5a294a18e80ac998a7967594d3d303a2d24554b42aba49ef4efebfdf9f6 cfc9c9928c8c464040201a1a2923233832322d27271610101d1f121e1d182a2627473d45 564b5344353a3525253f2e273227233f302d4d3a365b3f3c63403e6c433f784846804f4b 7b4a457a49447a4944794843784742774641774641764540734643734643734643724542 72454272454272454272454272463d7a4e4583574e85595081554c7c50477a4e457a4e45 7c584c7d594d7e5a4e805c50835f538662568864588a665a956d618862577852496e4941 6b48426e4d486e4f4c6f504e5842455c46495b45485a4447644e51705a5d6b55585c4649 7d695e77635a715d566e59566e59586a5457614a5059424a614c4b5f4a495b4645584342 90676d825b606f4b4f6441455f3f425d3f41583b3d5437394f34394a3437483537433737 3d383530312c1e231d0f1911685334a37d3fc28542b0623e9647428e424291454598474e 7f4049934e49a95940b86233c97132d88340d78846cd8143d6873acd7f35c77c38c8823d ca8b3ecf973adca93ce9bb433232322121211010100d0d0d1717172020201f1f1f1a1a1a 22171b1e131720171a251f212521222e2e2e585858858786bbc2a3c0c19fc8c193d3c085 dfbf76eabe67f2bd5df7be57e5c45de6be4fecb94cf6b95efebf7af8c584eac672dbc45a f2c768f2c864f2c960f2c960f2c866f2c570f2c37bf2c282bec7b4d9d7dac7bbbf847264 7a6760c0b1c6d4cfe69da1a4b0afaa6b6a65b9b8b3deddd8b5b4af787772c3c2bd92918c 62635e90918c4c4d48797a75b4b5b0d0d1ccfffffbb1b2ada19f929d9b8e838174929083 b7b5a88785781c1a0d6c6a5d3e412eb9b6a5a69d8ea39386e9d9cce7decfe4e1d0d1d4c1 d6dfdeb1aa988c7448dcbe76eac86ee7bd57ecad46ffb953e3a153cfa14cb78830c47e1f e39a31dda43dc38339bf633ac67943da8545e08245cc7240ba6e32c6822fe49548fa9b65 e3a047d29545e4ae6efacf98f3d096eed088f0d57ae6cb64deb618eec875f5e2b8e4dd97 cba14dc05d24c55225ce6f2fe7d79ce5d3bbecc38ff4b647eaca7fd7eafbd3e7eedad07b ffb65ade9539df983cdf9742c17a2cc27b3bca814eab6237924e399d58499b554b8b4440 833c388a443a914c3d904c3795493c95483e944a3f954c45934e4793504a92514d93524e 966c3ca87347b5744eb96e4ebe795ac18c6aaf8f689784599b8b6a9a8a69a49377b19f87 af9d89a49180a18e80a8948989827c483f384a413a5349403f352c81746bc4b7aea69990 50454932272b261b1f362b2f3a2f332a1f23281d21382d31272b1d3c3d373a3539352e36 4437414031383e2c2c503d373025213e2f2c4e3b375b3f3c64413f6c433f7a4a4883524e 7b4a457a49447a4944794843784742774641774641764540724542724542734643734643 74474475484575484575484573473e7c5047865a51895d5484584f7d5148784c43774b42 73514574524678564a7b594d7e5c50805e52815f538260549f7b6d916d5f7f5b4f765149 78534d805a578562608966645c46496b5558725c5f6b5558675154715b5e7a64677c6669 897668806c6176625b705b566e585a6852555e474f563f495b4746594544564241533f3e afa299a6999094867d7b6a6263504958413b583f3a5d443f593e3759403b5139354e3b37 4d3e3b352a282319182c2324864c40b16e44d38441c77634a85b3195493996463b9c4637 983e40993f41a34a42b96541d28341db8e40d2843cc5753ad3853ad5873cd5873cd08237 ca7c31cb7d32d3853adb8d42ffb957e099498a5f2c3a28121612131b1b272723312a202b 2c1f171e15101713121b1c1e2425292d2b2c3d35324b403aad9b6bc5b278d8c384ddc386 e4c38df0cd8bf3d076edcb5dffb755fcbc50f5c24defc654edc35fefc163f5c15ef9c357 dec078e7dc9ae6e0b0d8c8a4d4c695e0e09aece3a0efcfa0b9babecccdcfa7a9a8d1d3d0 dfe1dcc0c3bc96999090938ab9a99a87796c8a7e72b9b0a7c2bbb5f7f4efc8c7c55d5d5b a5b9a042543a344229828a75a19f928a8178998c84c5b7aecdbbadcab4a7ddc2b7c9ada2 d8c2b54a3e302d2e1e727b6849513a7678608c8572ccbab6efdae3cdb9c2cabfb9d7d5be cac3e2cbb782e0b940f1bd47f2ba67f6c276f8c665eab545be852ce29e3df1a640e49c38 e1a64cf2ba63f2b356df9630d68a3ccf7527cb6719d27321d88c38d49643c7873dbb7530 e8944cdcaa2fe0cd55f0dfa9ecc8a4dea54eed9d2effad53e2a44dfec872f6c276cb8c49 ba682ed2753adb843fc87c30db8842eca759f5b754e7b546d7c875d7e7c0e6e6ccf1cba4 f0b154d08729da8b2ff5a953e8a459ce8c50bd7044ab52329b544095513e8e4c3c85473a 7c433876413b71403c70403c9b4b44a15146974a3a934833a35841ad614aba6c56d1836d b38352a6733ea46b34b0753db67f49b28453b28d63ba99769f8a6f947e66ac98809d8b75 a1907cb5a6939588778d81714239343a312c1e1510544b465b524d2a211c3a312c322924 393b362c2e292628232c2e292a2c2720221d21231e2d2f2a39292a39292a39292a3a2a2b 3a2a2b3a2a2b3a2a2b3a2a2b302f2d342a293e282a4f2f30663e3e764a477c514a7b534b 663e49794a50824d497e44398247398c57498b5c527d554d634c3e7951497b413f793f3e 7b4b4767403b683c398f5b5da0666482493e95624f905f5183524e8856577b4b41865743 7a554c7b564d7e5950815c53845f5688635a8a655c8c675e7f616372545676585a7c5e60 7a5c5e8365678a6c6e7f61637966687f6c6e806d6f7461636451535f4c4e695658756264 64555a6152575f50556051566253585e4f5456474c4f4045534f504945463c3839353132 b0a69cafa299a6988f97867e826f686d595260494359403b523932553c374c37324d3a36 4e3f3c382a29241a192e2425844a3eb06d43d28340ca7a35ad60349c513a9b4c3d9e4837 99403a983f3b9e463cb05b3cc6753dd0823acc7e36c47533d4863bd6883dd4863bce8035 c87a2fc87a2fce8035d5873cd38a2cedab55f0b770bd925d7355313a28141e140b170f0d 1d1c1a1f1f2122252a1d22280d111411100e3f362f74665ba89972c1b180d9c68bdfc98d e2c88deccd8cf0d07deccf6bfbc177fac46ef8c76af3c975f1cb8deecea5edd4b5edd9b8 d9cca2dbe1bfdbe4d3d6d2cfd3d0c9d4dccdd7d6d1d8c2cec4c4c6cbcbcdb5b5b5cececc b6b7b2abaca4a8a9a1a3a59aaaa796888272696353a7a28fc4bdababa491867f6cb1a996 dacce3aba0b18a8089a8a2a2b2b0a46466514f5235a1a584dec9b49e8976ab9587beaa9f c4b6ab50493961634e9499833d3f3e36362c534d3da9a196ede4e5dcdadfc3c9c9d5e1d7 ddc7cad4b676e1b741f0bb4bf3bb64f7c36df4c05be0a73cdc9c44e29c3de99b37e8a03e e4a94de3ad53e5a949e6a23debb04ce79d3ce38b2ae08826df942fdd9938cd872fbd7020 dc8842d99f31e2bf49edca79e4b371d2923ad08824de923ccc862ee0a24ddda256c27d3a b9642bce703ada8145d28340d08329e09739eda43beba840d9bb73cfd8b9d5dcccdecdb3 eaab4cd48b2fd68631e99c4ce39f54cd8c4cc37743c0693e9b544095513e8f4c3c85473a 7d433877403b723f3c71403c833c289c55419c5543914d389a563fa66046af6a4bbe7958 a26f3abb8650c38c54b47d47b38150c0966cb29470947b5cad9b859a8872a08f7baa9a8a 91847482796a786e624840331f1a1445403a302b2539342e3f3a343d38324f4a44231e18 2728232425202627222d2e2930312c2d2e292e2f2a32332e372929372929382a2a392b2b 3a2c2c3b2d2d3c2e2e3c2e2e3c312d4937355b3d3d6941416f423f72423e74453d754940 7f4f4f7a484776454174474170484065443d573e39503b38402f25563530713f3e794343 6f423d71484280534e8855518a56499b68539162488b5a498554507a474c7d4b4c83524b 74524876544a7a584e7d5b51805e548260568361578462586b51525b41425a40415e4445 5a4041654b4c705657674d4e715e607764667b686a7663656a5759635052645153685557 5d4e53594a4f56474c55464b56474c524348493a3f4233383f3b3c3632332b2728252122 aca499afa59bb4a79eb3a59ca8978f917e7777635c67504a553e365440394c37324c3935 4c3d3a372928281a1a322628844b3aae6b41d28341cd7d38b66a38a65d3da3553fa34e39 9d46359a42369b4337a65037b66337c37334c57631c47530d08335d18436cf8234cb7e30 c87b2dca7d2fd18436d78a3cd79e47de9e44e5a043eca54be3a558b98c5172562f35250c 191d2011161a141d221e272e1a1f231817153c332a6c5e51a59981c1b491dbce9ae2d296 dfcb8eddc484dcbf79d8bd6ee4c777e5c86ce6c868e6ca77e3cf9cdcd7c3d4deddcde5e9 d4dec5cde5d5cce7e0d3e2dfd4e0dcc8e1dbc1d3d5c3c1ccbebebec0c0c0b8b8b6bebfba 888983979890afb1a6a1a396acb09fa7aa97b2b49fd9d7c0ada68a998e709f9272b6a786 c9b8b0b2a39cd0c1bce5d8d0bfb5a99f98888079694b4539b09778ac987db1a28fa59d90 a19d915b594a91907bb1b1956a686974726576765e74735ea2a39ed6dde7d4e0eec4d6e2 f0c49fe3b464e9b547f2bb54f1bc60f2c15befb94fdc9e3be59e46d88d30de9230f4ae4e f5b85be1aa4fdba342e8ad47f8cc63fdc25cf7ae48ec9a36e89835ea9b3edb8935c77023 c76f27d58e34e4b042ecba4be6ab47d9983ed1913acc943dc6751cce822ecd8437c17331 bc632bc16631cb743fd07f47d28923df912cf39d36fdae4ff1c385dbd6b9d9ddcee2dcc6 fdbf5ce2983fc57327c3752bcd8940c98941d08647e7925b9c534097503e904b3c86463a 7e423877403b743e3c723f3c844824a16145a05e488e4b3b93503da16044ad6f46bb814f c48f59b6814bba8752c69666b38c619072508d795ea69780988b7bac9f8f83776983796d 7e756c504c43433e38211e1719191155554d42423a25251d1e1e1640403876766e5e5e56 1f201b2728232d2e292e2f2a30312c33342f31322d2b2c27322626332727342828362a2a 392d2d3b2f2f3c30303d31315037325637325f37356a3837733e3a7a453f7f4d46825349 7f4a447a4a465f3c3a584040857777a59f9f686765070705201913331e1d643d3e774949 673c36784d469263598553487f5441a67d618f64448b5d46895854703f457e4d537f5151 72504675534979574d7e5c52805e54805e547f5d537e5c525e4a494a363544302f422e2d 3e2a294c38375c48475743426754566c595b725f617461636f5c5e6754565e4b4d584547 55464b4f40454a3b4048393e47383d423338392a2f3122272f2b2c2824251f1b1c1c1819 a59f93a9a196b1a79dbaada4baaca3ae9d9597847d85726b67554b63504953403a4c3a36 4b39373626262a1a1b39292c874e3aae6b41d18142d08039bf743ab36c42ac5f41a95539 a65035a34d36a14a37a44e37ad5935b96735c27331c77931cc7e34cc7e34ca7c32ca7c32 cb7d33d18339d88a40dd8f45d29447d5913ce19233ee9c36f4a848ebaa5ace9c5fb58d5c 857b713e3a31090a05101415272d2d2a2c2b2926212d29206b615590866dbdb187dacd96 e4d496e7d293e9d193ebd194dad08bd9d083d8d183dbd597dadab8d4ded5cadfe0c2e0e0 d8e0e2c7dee4c4dedfd4e2d5d6e5d2c7e1d4bdd7ccc3cbbea7a3a2b5b1b0b3afacaca9a4 838079a3a195a6a497828073aeaf9fb2b19fc7c4b1d0cbb7817b65b2aa93fff6ddf2e6cc dce3ade4e5bbcfcaaabbb092c3b392c3ac8db1927ea8867da4875dd5be9cb9ad93a19e8b b4b6a86b6c5a828067ada88b898b75a4a58686876753523d504c4994919cdddbe6dddee2 f5b56bedb151f4bb52f7bf60ecb654ecb848efb74ae4a44ad3882dcc8124df9536ffbb5c ffc768ecb455dfa746e5ae4af3cc71fecd71fbbb61eca048e99443ef944be68848d47438 c06111d6863de8af55edbf47f2bd4bf8ba65f4b96be9b754e1892fda842dd48131d17d35 c66d31b95f2abb6331c67341cf832fd7862def9a3dffbb64ffd08ce9d3a2e3d4abeedbb0 ffc762e99f48bb6822b56621d39042d59646d1893fe38f4f9e5340984f3e924a3c88463a 8041387a3f3b753d3c743e3ca66b3fad6d4a9c55438d4038984c3fa45e44b37448c99156 c9945ec79361b48859a17b54977d5c97856d8a826f7673649f958bada39a898077615a54 3b363229282431302e14141280817967686026271f23241c4a4b436d6e667c7d7552534b 302c293b37343b37342f2b282c2825312d2a2d29261f1b182e24232f2524312726342a29 372d2c3a302f3c32313d3332613c345e352f62312d6b363277403d7d484278494172463d 663d3760413f756568a9a6adc8cfd5b0babb717a7540483d181818261c1d51383b6e4c4b 6c433f7449408c6053956a5a95756a8d6a548c674a996d548f5f557c4c4c7d5150815951 74564b77594e7b5d527f61568163587f61567d5f547a5c516455524d3e3b4233303e2f2c 3a2b284a3b385d4e4b5a4b48624f516350526552546a57596d5a5c6754565b484a513e40 5142474b3c414334393f30353d2e3338292e2f202528191e2d292a272324211d1e201c1d aaa69aa7a397a9a397afa59bb4a79eb1a39aa7968e9d8c8483756a796b6261524b55433f 503e3c3c2829311d1f412c318f573cb26f44d08041d28239c77e3abe7942b66b43af5c3c b05c3aaf5a3bac573aab553aad583bb56238c17038ca7b38d38441cf803dca7b38c87936 ca7b38ce7f3cd28340d48542d2853fe09247eea24cf3a84beba244e29c46e5a254ecac65 ddb792b798798a765e5b524323231b0001000b0d0830322d4a40346f6452a49777cfc192 e3d397e8d394e8cf96ebd09be8dabfe3dabbdfdec0e0e3d2e4e6e3e3e3e5dcd9d0d6d1bd e1d2d9cecedacacbcdd8d0bddbd5c5ced7dec7d3e1cecfd1908b88b6b1aeb3aeaaa39f96 9c988dcac6baa09d8e656253b3a6969c90827a7064877f74514a405b574e9a9790aeada8 dbd9c0d1cbbbd8cec4f6e7d0ecd9aeb39c66b29664f0d0a9caa97cb69d748c7d5cb4ae94 e6e4cf8d8b76817962bbaf979698826b6b5347443180776e75656559453cb49d7de2cb95 eda747eeaf45fec35dfbc366e7ae47e4ad36f2ba4bf1b763d18629d89135e7a449f8b658 fcbc5bf3b351e8ac4ae1a948ebc469f5c86bf4bc5fe7a046e18c3be3873edf8342d57b3d d37722e39b60ecbf7ce9ce67f1ce66ffcf83ffcd7efbc759fdaa4aed983be79238e89342 d9853cc06d2bbb662dc8713cc26f39c97431dd9040f3bb66f6cd7beac471eabd6cf3c273 f3b64ce0963fc26f29d08238fab862efb259c37b2faf5a21a051409b4e3e93493c8a443a 8141387b3e3b773d3c753d3cb97b52ab6649924336903b38a24d48a65842ab683ec28650 bc8c5bb18659a07d57957c5d9a8a73968f7d7472664c4e43aaa097e1d6d0aea5a0362e2b 27221f2c2b291311123c3c3eaeaba489867f55524b56534c67645d615e574c4942232019 3f3a3745403d3f3a372d282526211e2d28252b26231f1a172a22202c24222e2624312927 342c2a372f2d39312f3a3230613b326b403975454178454273423e693d3a623d375c3e36 6653559d9296c7c6cecbd2daccd7dbc2cbc884877c3938261b1f22211f20362a2a5f4745 7855516e4740845e51be9d8cb7a2a7583d348a684fac826993655591655a7e574888654f 7d61557f635782665a84685c84685c82665a8064587e62566b5c595445424a3b38463734 423330534441665754645552624f515d4a4c5b484a5e4b4d6451536350525b484a523f41 5344494b3c414233383d2e333a2b3035262b2d1e2326171c292526252122211d1e231f20 b3b1a4aeac9fa8a498a6a094a69c92a69990a4968da1938a978d8190837a776a62685954 624e4d4a34363b25284a31379b6445b8754ad08043d18237cb8337c68241be7443b5633d b25d3eb6623eb8653db5613cb05c3ab25e3abd6b3cc7763ed7864fd07f48c7763fc2713a c17039c2713ac17039bf6e37e99454da8b46d38e40e0a24deeb359eeae56e39d47da8f3c d89659eab17af7cb9edabd9d92826b4a41302e281a312d215d503d7566539d8b73c6b28d dac58edac083d7b97dd7b882ebd3b1e3d1abdad1aadcd3b4e3d5bae7cfa9e3c182deb563 e5c280dcc591d9c588dec278ddc692d4cccacfcee0d1cbcf8d8582c1bab4c1bab29f988e a09a8ee3ddcfb2ac9c6c6656c7b5a794847766584d756b622b262231302e6d6d6d5a5b5d 8b76657c6462b69fa7f4e1dde3d3b9d0c49ee0d5b9e6d8cbe9c498bb9c70b1986fc8b690 c6b79893846d877767a797889894933e3a373f3a37c8bcbcdfcbc4a68c69c7a75cd7b64d e8a846ebb346fcc75dfdc362e7a841e5a931f4be51f5c571e5a13ef1b253f2b75de8a94c e29e3de9a13deda849e9a94fedbc55efbe55eeba4de6a63cdc8c29d27f23d0822dd48d3b e8a95ceec08febd4a8e1d790e4cf7cf0c880f2bf6ceab849ffbc55f2a741f09e3bf6a645 eba043d38835ce7c33da8041ca7648d37b3bdf9647ebbb66f2ca6af5c050f7b846fabe50 f2b54bdc9239c8762adc903cffc15ff7bb59c37a35994217a251409c4d3e95493c8c433a 8340387d3d3b783c3c773d3ca56e4fa2614b93463c923f3ba3504aa156439a5b3aa36e42 a681579a7a549b83679c907a817b6d58584c5a5c51787a6f9e9087c8b9b2c1b4ae514642 2f25234a4542403a3a605c5d776e699289849087826d645f433a35312823382f2a372e29 40383640383639312f2f27252b23213028263129272f272527221f2823202924212b2623 2e2926302b28312c29322d2a56352e66413b7149476c4444613d3d644a497e6c6a968b87 cfd0d4bebfc4c8cbd0e5e9ecd2d4d387847d413b2f281f101e201f191b18211d1a50413e 7759576a4944866b62dccabcc3b7c532201c80644eb68f72986b549f746383604a8f7254 856b5e856b5e846a5d83695c82685b82685b82685b82685b68545354403f4f3b3a4f3b3a 4d39385c48476c5857685453604d4f5a47495542445744465c494b5f4c4e5c494b574446 54454a4c3d424233383c2d32392a2f36272c2f2025291a1f211d1e1d191a1b17181e1a1b aaab9dabac9eaba99ca8a498a49c91a1978da2958ca2958c9d95889a92878d807883746f 7e6a695f494b4b303551363da7714fbe7b50d08045d08136cc8533ca883ec37a43b9683d ae583fb8643fbf6d3bbb6939b15d38ad5839b5613cbe6b3fc77448bf6c40b56236b36034 b56236b86539b76438b56236d28245d08840d79642e2aa4be3ae48dca541dfa343eaaa50 e79549d78e49d2965ae9bb8affe0b8eed1af9f8667543b1c6f5f3e77634b927867b79c81 d2b687dcbc7fe0bd7de5c185e7c480deba6ed5b05fd7ae5fe1b064e7af5ae2a83ddba022 e4ae32e9c054ebc854e5bd3adcbb5cd2c7abc9c8c6c3c0ad908782c0b7b0cdc4bd8f877c 6b6356cec7b7cac3b18d8674bcb3a4a69f8f736c5c6b6356363022888276f4eee2ccc8bc 968861b8a7959381816a5b56978c76cec4abded3cdefe1f0e1bb94e3be94f7d2a5cba97b 9f7f588d73586e584b887571868080403d3677746bc1bdb4e1d7cbfff2d4fee6aae9cf7a edbb62e8bc51f4c656f9be58eca740eeac3bf4c156eac471ecb149f7c360f4c367e4a94d d8902ee18f2cefa044f5ad58eeac4be8ad47eab146edad43e19631cf8325d29238e4b259 e0cf99e1d9b4e0dfc1dedaade3ce8fe8c279e6bc66e0b858f4c052ebad3eeda536f7af40 f2b042e3a038df8f36e68b3cdd8e49ea903af09d41ecb157f2bc58fdbd42ffc03ff4c750 f9bc51db9236ce7e29db9130ecae3decb14bcf8548a74c37a350409e4c3e96483c8d433a 843f387e3d3b793b3c783c3c8c6852aa7b6ba5695e8d4940904c3f9b614d986b4e93714e 8b7150a48f729d907d706c6056574f65685f77796e76766a7b675c614d44ab9891a4938c 3a2b26574c488c817f443a395947456957556a58565644424b3937513f3d52403e493735 3b3130362c2b352b2a372d2c372d2c322827322827372d2c25211e25211e26221f272320 2824212925222a26232a2623442d274b322e563a3962484b756264938a8db7b8bacfd9da cdd1d0d8d9d4e1dcd6d1c8c19e91895d504a32242322161626231e13160f1f201a4b403c 6248496347468e7c78d2cfc6a6a5ab30261c725a3eaf8b6b9a6d56986e5e896857937863 877062846d5f7f685a7b64567a63557b64567e675980695b654b4c553b3c573d3e5c4243 5b4142684e4f745a5b6d53545a47495744465643455744465b484a5c494b5b484a594648 524348493a3f3f3035392a2f37282d35262b3021262b1c211f1b1c1c18191b17181f1b1c 9a9b8da3a496adab9eaeaa9eaaa498a49c91a49a90a79a919a94869e968b988e85978883 927e7d72595c54393e573941af7955c27f54d08045cf8035cc8630cc8b3bc67e43bc6b3e a9523eb6623dc17038be6d35b15e32a95435ae593ab6623daf5b37a85430a24e2aa5512d ae5a36b6623eb96541b7633fbf7b3cc8863cdb9e41eeb649eab742dba93ad7a640dfae51 f29d4ce6984dce8b47be864dcd9d6cedc192face9ff0c4959481578d755b95786ab1917c cba87ed4b072dbb471e2b979f1c17fe6b165e09e48e29640ed9a4af2a14feba142e29e33 de9931efb85df6c55ceab939dab356cebfa8c0c0c2b1b39e908780b6ada6d0c7be7a7267 2c2417a8a191d8d1bfa7a08d898c7bc5c6b46f6e5a514c365c563c8f856acabfa1e5d7ba dee8c6f2f6e8eeeaebb6aea3887f569588539c895f7e6350a27c58b48a60cfa170bc8e5a ca9f74bb987c745952bfaaaf9c9680555436a1a7858a8f78808275dfd8d0d9c9b9e2ceb3 f8cf7fe7c55aebc24ef3b94ef3a843f7b044f2c15adcbe68e2ac40ebbd5bf3c66becb55a e19533e08a27ef9a3fffb260eb994ddf9944e6a64cf4b156eb9f4bd58c3ddea659f9d484 c9ddbacbdec2d3dfc7e2dbbeeed39ef1cc7eedcb74e9cf7be9c14fe5b03ceaa732f3b137 f1b53be6a936e39530e98b31de9634ef9326ef922ae2973ae4a03ff3ab33f0b93adbc450 eaad42d48b2dd98930e9a037e8ac32e7ad42d48952aa4d46a350409e4c3e96483c8d433a 853e387e3d3b7b3b3c793b3c867062bf9f90bb9080844f3f7a4533986e58a5876d9c886d b5a0857d6e57554f3f6768608e938d9598917474685552432c14087b63577d675c8d7970 97847d5f4d4942332e3c2e2b3d28274631305c47466954536853525d4847513c3b513c3b 3b3130322827352b2a4036353f35343127262c222133292823221e23221e23221e23221e 23221e24231f24231f24231f3624203725234c383979696cafa6abcdd0d5cbdaddbed3d6 d4d6cba9a599a19484bea89ab29c916e5a59372a312c2431312a2213150a272a214b443e 4a34365a4144908685b7bcb6858a86403b2766522fa4825d9b6e578c62568f6d64957a6f 887163826b5d7b6456755e50735c4e765f517b64567e675967494b5b3d3f604244694b4d 694b4d7557597e60627456585340425542445845475b484a5c494b5b484a5a4749584547 4f404546373c3c2d3236272c35262b34252a2f20252b1c21252122221e1f211d1e252122 ada290aea391afa492aea391aca18fa99e8ca59a88a29785a49d8da99b8ea39185988c7e a9a393837c6c50382e79524dc67a49c77b49cb7b48cd7a44cf7940d0773bd17437d17335 aa5842b2623fbc6f3bc37739c27638b86b37ab5b38a2503aab6949a76545a45d3fa05737 a05434a45335a85535ab5637ca7a3bd18240dd9048e89f50f1ab55f4b254f5b551f5b64f e0ab4bdda84adaa349d99f4bdba050e3a55ceaaa63eead69edb771e6b771d9b672ceb37c ccaf87d0ad85dbad79e1af6aedc160e7b762e1ad61e2ab59e7ac4ee5a645d99740ce893e dfa448e79d46ffa247ffa851d1b98dbcdfe5bad0deab8a85887976c4bcafc6c5b1a3a798 434542636162bebaaecec8ae9c9c90828074918e7f726b5b695e4c95897384745da6967d e2b890d4b38adfc9a2ede3bfe2dfc0e0dec5dfdac7bfb6a7af9280d2ae949c7558654631 50433b666261362e2b695850baa391857458b3aa895d5c40242311b2ad97f8ecc4e1ce96 e8d19de9c675ecbb5fedbb62efc65decc742e8b333e49a37f1b33ce8a54cdd9857da984e dfa53ae8b133f0b442f2b354f4b241efb551e9b154ec9d3ef08e29f09542e7b288dfcec6 d3e6ede7dec1f5d38cf0ce77dbcd82cccf98d3cfa2e0cfa1f1d196e5c270e5ba5ef4bd6d fabc69f1b04ae8a437e99f3ed39d33da9e3cd9933dd08534d48735e39c40ebab41e8ac3a d57441d78138e19b3bedb449ffca6ec6894aca7b5cad51448c452f92594584614e93806f 816e5d75523faf7662c67f6996725a906e55b5977fbfa78f8a7a618176609d977f918c76 55524d41433e7d888294a4a1839192767d83464250271c2c3b322d503c3b6d4d50825b60 835f63705858544d4741443b5232355032344e31334a3031483030442f2e422e2d412d2c 402c2e321f214431334e404033272722181729211f2a22201b1c1724252011120d1b1c17 1e1f1a15161124252016171230272c271e214c4442c9c1bed6cdc4ded6cbc2bbab88816f 4c433e867d78b2aaa7b9b4b1837f7e4f4f4f48484a25262a2a26272a26272c28292f2b2c 3430313935363d393a403c3d2e241b34271e65574c988678a18c7b9f86729e826c90745e 7960649d93926f716c07080319110f1c12111f211c03130932292c372e31463d40574e51 5b5255595053645b5e756c6f5c4d52594a4f594a4f5e4f546253585d4e534d3e43403136 4a3f433d3236372e313d37393b37382929291c1c1c171918161616171717181818191919 a29785a69b89aba08eaea391aea391aa9f8da59a88a29785aba897b1a597a9998c9d9183 aca6968980715b4037885c59c98651c9844dc78048c57d42c4783ac47435c47332c57230 c36e45c67341cb793dce7e37ce7e35ca7a35c57337c16e38c56d45be673cb65f32af5c2a b3602abf6d35cd7d42d68649d89143db9444dd9a43e2a241e3a93ee6ae3de6b23ae6b437 e6b04ee3ad4ddea74cdda44ddda252e3a55ae8a962ecad68e8b26ae3b76edebd78d8bf87 d4bb92d7b68dd9b07adcad69e1af4ee7b15becb268edb264ebae55e6a746e2a145e09c49 e0b04aeca940e8952dcf9040bcb28ebdd6d3bcc3bbb59475c7b6accfc4b2bebca5a7a89a 5f605b797876aea89ab5af95b6b4a899978a7a766a747162aaa494958e7eada69499907f a08a72ae9d83aea588aba688c1bda0c8c1a5c8baa0e8d6bee3c9b2ffdec2d6b194b49880 887a6f49443e342d257a6c5fa0817c98817191826d8e8673aaa295dacebeeaddbbe6d6a5 f4ce85f3cb6cf5c45ef5c064f5c15cf3c146f1b73ceea944efb345e9a445e39542e49337 e99d2df0ab32f0b444eeb657eaae32e4b03ae1ac38e2a227e29b1be2a42ddebb5dd8d087 d4d2a1ddc57fe8b961ecbe68eccf8beddca8f0d9a7f6d19ce5cd85e2d68cded999dfd097 e5c173edb848f7b43ffeb54fe4a734edaa42f1a54deb9c4fee9f54f6ae5af6b655efb54a de8f4de19947e9a842ebb23ff3ba51c28333cc8655ba6d517e45329a6b59af907bac9a84 ac9a84a485709465539d64519a77617d5d46af937dac967f9b8b749b907a6a644e66614d 63767498a6a6a1abaa888c8d88888a6260612e2c2d4341425846445f454669454972454c 6f464c66484a594a47504b455434375333365133354e31334a323246313044302f432f2e 4834364d3a3c4f3c3e352727261a1a3d33324139371f17151f201b24252012130e1b1c17 1c1d1811120d191a150b0c072b27280a0605312c29c9c2bccac1ba897f755b4f434e4234 776e67a89f9a9f9892918c898b87867171715a5a5c363739201c1d221e1f252122272324 2925262a26272a2627292526453e38342b2440362d594b406353467a67589b8675a8917f 9a908f989994787f78090b06271d1c5848494f47453a3c373c3839231f20110d0e161213 2420212d292a3531323f3b3c57484d54454a53444958494e5d4e535d4e5356474c4f4045 44393d3a2f33382f323e383a3c3839282828191919131514121212121212131313131313 a39886a39886a29785a39886a49987a69b89a89d8ba99e8cadae9cb1aa9aa89a8d988f80 a49e8e82766854362e845452c68545c38242c27e3dc07b38c17b36c57c37c97f38cd8038 c9784bc77647c47443c1713cbf7038bf7037bf7035bf7035ba623abb673bbf6f3cc2773d c47f3cc4843ac28737c28a37d38f3ad49339d69738d89f38dea83be4b33de9bc3fedc242 edb554eab251e5ad50e2a952e1a755e2a759e5a960e8ac64e4b067e4ba6ee3c77ee2cd94 decba0dbc29ad8b782d7b06dd79b3be3a552f0b068f4b16aeeab5aeaa94defad4df4b252 e9bf4fffc044e5a333c0a262c3ccbbbbc8bfa49c78a2884bdcc8afbeae94b8b197bebbac 979691a8a5a09b9585928b71bdb9adafada059574b616155abada256574fb5b8af93968f 83847c95978ca2a4968f917caca88fc7bca0b9a487d2b99bdac2a6c5aa8ca386668e765c 8f826f4d4739474030807563997a75a1877875644abfae90ffeecee0caa3e1cd9addcb8b f8c15cf8c655f6c657f4bc5df1b054f1ad42f2b040f3b44bedb14feba641eb9a31ee962a f49e31f4ac40f0b750e9bc57edb651ebb44fe9b24ee6b24fe5b65ae5c06fe5cd85e5d695 e0be64daa94ad69a3adaa14ae4b56de7c07de2b36bdaa24fe5b65eead086e6e3b6deddbe dbcb90e5bf5cf1ba53f8b668deb76ce8bf70f0c274f0c274efc57deeca8ee7c899dcc39b e8b469ecb864f1ba55eeb33feaab37c58724cb904ec08960a78375b89987ae9880766549 8a795d9a846c6d4e3c8864569677627c604b89705c9b86739d8c7a7065534a4130766f5f 93b1b19db1b2909598918b8d7f7373453c3727251925291a6b4d4d6b47496c3d436a373e 67363c613a3d5a403f58464259363a5835395535385335374e34354b3333483332483332 4a36383f2c2e412e30403232382c2c3b31304139373b333130312c2728230f100b11120d 11120d0a0b0613140f0d0e090405001718133e3b369f9892988f8860534b44332b46352d 8f867d968d8697908aada8a49e9a976a6a684141410c0e0d1d191a1f1b1c221e1f242021 231f20201c1d1d191a1a161724201d15110e17120e211a14231a132f251c45382f4e4035 2a2c276c736c97a1991d1f1a271d1c9c898bb7a7a8afa5a4454746282a29111312101211 1618171315140e100f0d0f0e58494e56474c54454a55464b57484d58494e57484d55464b 3f3438372c30362d303b35373733342323231515151113120d0d0d0d0d0d0d0d0d0c0c0c afa492a89d8b9e9381978c7a968b799d9280a79c8aaea3919fa28fa9a393a69a8c9a9182 a8a19185776a5a3932895756cd803ac97c36c5772fc2742cc3752dc8782fce7e35d18138 d2793fce743fc86d40c1653ebd613cbb5f36ba5f30bb612dbe6c34c6763bd38543de9449 e29c46de9c3ed69730d09329d48e36d49136d8973bdd9e3fe3a745e7b04bebb64eeeb951 f3b854f1b553ecb153e6ad53e5ab57e4ab5ae4ac5fe6ae63e6b66ce5bf74e5cd87e2d59e e1d5addecca8dac292d9bb7dd89640de9b4ce5a05be8a360e8a45beca853f3b150fab951 f1be4bffc140e0ad44c1bf99cde1eab4b5b09a875cb7a661e6cb9ec0aa83c2b497c5beac a7a398b3afa4817a68726b51a5a293d1cdc17b7b6f91928a969b952b342fa0aaa9acb8b6 a6afaa7a817a797f7558594b5c57449e937da99980a28e75aa9576b29d7e6f5a3b705f43 8d81693c341f6f675258503ba08e829e8f7894875ddec88feac783cca353efcc70dbc059 f1b33cf0be45edc251ebb352e99f48e9983ceda63ef0b64aefb053ecab43eca83bf1a943 f5af59f4b766efbd64e7c059efbe62f0b85befb65debbc76e4c898e4d1b1ead2b6eecfb0 ffc26ff4b65fe9ac53e8af56edbb62ebbc60e3af4cd9a037e09339e19e4fe8b580efceaf eed8b1e7d291e7c385eeba93e1c86ee7cb77eccf81eecf8eecd19ae6d4a6ddd4add5d2af ebd28fedcf87f3c673f5ba52f1ae39d7982fc3924daf8c64897870a797888d7d64605132 897a5b9080676151427f6e668f75647d66543a25148b7a6aae9e8f463a2e51473b867c72 879691525b5636363488807da49b965f5850403e32353529683e406c3e41703b41733a41 6d383e66383b5b37375537355d393d5c383c5a373b57373a5436384f35364d35354d3535 412d2f372426412e304e40404135352b21202e26244038361c1d180f100b0405000e0f0a 16171212130e13140f11120d1d2017000100101008948f89b7aca6554641301e1a5c4945 a59d92f3ebe0c4bdb36d686266635e83847f7f7f7d484a47242021242021242021242021 231f20221e1f211d1e211d1e2726242928263433313c3b393a3633302c292723201f1a16 32332e888a85b0b2ad3e3d390c0704584e4d867c7b847c7a09120f010a0700090608110e 0c15120b1411121b181e27245a4b505c4d525d4e535a4b5055464b4f40454d3e434c3d42 41363a362b2f30272a302a2c2a26271919191313131618170b0b0b0b0b0b0a0a0a090909 b1a694aca18fa59a889f94829b907e9c917f9f9482a29785969583a49b8ca89a8da29688 aea8988b7f715e41398f625fcf7a37ca7733c5722ec06d2bbe6d2bc1702fc57433c97837 c37429c37231c26f3dc06b46be6749bc6547b9643fb86438c77f37c88036cc8334d28831 da8e2ee2952de99b2eec9e2fe0a03ce3a341e7a544e9a747e8a648e4a244df9c41dd983d f4b450f2b451eeb252ebb054e7af58e7b05de6b161e5b263ebbe7ae8c67fe3d08eded6a5 dbd6b8dcd2b7ddcda9dfcb98e0a45eda9b56d29350d39250dd9853e7a255eba84deeaa47 eaa339e59d31bf984db3bbb0bdcfe3a49a98ac956bf4e5aadebf7ec8ac7ac4b08fb0a392 8b8376a29b8b827a65867b65656253c3bfb3a0a094c8c9c1777c76303934707a79a4b0ae b0b0a46f6d616c685d696357635d517a7466837d6f918b7d8f7e628f7f65d0c4aca49880 a3947d80745c756a54958e7b6c676493907fd0c49ce1c282c18934cb8a26f5bb4debbe49 eeb647ecc057ebc361e9b65ae8a24ceb9b44f0a846f2b948f0ae4decb34ce9b955e8c271 ecc88ceec992ecc67be9c360e6bc4ee8b542e7b242e3bb5cddc982dbd196e1cb8fe6c47d f1ad62f0b363edbb62edc05befc056f0c05aeec166edc273daa362ce8634d48332efae78 f8dbb3ece5b7e8d9aef4d1b5f6d03ff6ca4ff7c469f8c282f6c48fefc98bead083e8d77b e4e0b1e4d7abeccd97fbc774ffbc4ff0b14ab68d4d8d785d8d8980b3ab9e8176626e6248 877b616b604c5a52456b675e8e7d6d6e5c4e1604006b5a50b2a49b685b535a4d475c514b 6d5b4f5d51451f190b656357bab7ae716a643e302f6250506935396e373c733a40753c42 733c416b3a3d6137385b3534613a3f60393e5d393d5a373b57373a5436385235374f3536 412d2f4a3739473436392b2b3226263026252b2321271f1d0f100b0001000001000d0e09 191a1513140f0607020506010e0e040f0c033a362daba49cb7aea93227231103004d3e3b 696154a59d90666054615d52d2cfc6e9eae4abaca77f817c272324252122221e1f211d1e 211d1e2420212824252a26271b1a181e1d1b1c1b19171717181818161616111214111214 332b29a09695c7bdbc97928f2e2d290406012a2c2732332e3c45422b34311a2320101916 040d0a000502040d0a131c195041465a4b5063545961525757484d4d3e43493a3f493a3f 453a3e362b2f281f22211b1d1814150d0d0d1212121e201f0f0f0f0e0e0e0d0d0d0c0c0c aa9f8dada290b1a694b2a795ada290a499879a8f7d9388769d9183aa988ca9958a9a8c7f 9d9a89746e5e45312678534db57341b67442b57545b97b4cbf8255c58c5fcd9469d09a6e b8ad7fbbaf89c0b298c4b4a7c6b5aec6b5aec5b4aac3b3a4c0ad83c0ab80c0a87ac0a472 c1a26bc2a063c59f5ec49f5bd5a64adca94de3ac4fe7ac4ee8a749e49f40df9637db9132 efab46edac48edae4debb052eab358e9b55fe8b864e8ba65f0c388e9c98ce1d09ad9d5b0 d5d6c4d8d6c9ded6bfe3d6b3e8b88adca972ce9957cb924dd29350da9550da9246d68e39 d58026b87932ac9470b8bcbbb0b1b597826db39364f4d79ddbb761dab879ccb28fb8a692 897d6d8c846f7870598378625e5a4eaba99ca19f93bcbcb055574c54554d64675e90938c 948c77867b69837666a69689a79a9167605655554b8e918884745d665b49c9c3b5aea596 a99a85b8a891928574cac4b61d15248c8686e4d2bac69864bb6d22d88026d88f30eeb75a edcc7feecd8aeecd86efc770f1bc60f3b35bf7b755f9be48f3b047ecb74fe4c165e4cd8a e6d4a6ead3a7eacd8be8c66decbf62efba54efb64becbd55e6c868e4cd6fe5c860e9c14f e2a248e5b457eac461e9c356e6b647e6b455e7bf81e8cea9f1e2b9d9b060cb8a24da9947 e5c187e3d8a2e3d9a4e9d7a9f0d079ebc86ce7c362ecc665eac76de3c67ae1cc8de6d5a1 dae2cad9dac8e2d3b6fad18fffc25bfdbf5aab844b77675ab7b3a8c9c3b788817169604f 534a39322b1b514b3f605c518e827462564a45382f43362e6c5f59796b68504241504242 6849377056455444345b52436f655b55403d4b2b30633943774141763e3f713b3b6e3838 6b38376a3a38693d3a69403c63383f61383e5f383d5b373b59363a553538533537513436 4e3a3c3724262c191b3d2f2f493d3d362c2b3129274d45435e5f5a2b2c270c0d08000100 0001000304000102001516112e261b51493e7e756caca39ca7a09a3e363328201e3b3331 797262413a2a6b6557f0ece0fcfaee9fa1965c5d551e211a252122242021221e1f221e1f 221e1f24202126222328242537302a39322c231e1a0d09060c0b090f0f0f0f101214181b 807675c1b1b27865673d33321315103c463e929992939590696b6a505251383a392e302f 222423121413090b0a090b0a41323751424762535863545958494e4e3f444e3f44524348 473c40352a2e22191c1610120b0708040404101010222423171717171717161616161616 aca18fafa492b3a896b5aa98b2a795aa9f8da196849c917f9f897eaa8f86a78c83968679 9b9a88787967564a3c8e766cb29d82b5a188b9a78fc1b19ac8bba8cfc6b5d6cfbfdbd4c4 d1e7e4d3e9e7d4e9ead6ebeed7ecefd6ecead5ebe8d4ebe5c8d3cdc8d3cbc8d4cac7d2c4 c3cebec0c9b6bbc4afb8c2aac6aa82caab7dcfab77d5a96cdba55ddd9e4ede9842de9539 e6a13ae7a33ee9a947eab04fecb55aecba61ecbe68edc06bedc190e8c897dfd0a5d8d6bd d6d8d3d8dad9dcd9d0dfdac4e9c6b0e3c096dbb473d3a45cce9552ca8649c57b3ec27432 c8742c9b7252a9a5a4c4c1baac9573a68248c99a56dba359f0c964ecc67dccac85d1bba6 a79885877d64736950796e58a9a79bc3c1b4b6b2a6a6a394635d4d867f6f938c7a827968 72675191826d7b6655897264887469605247746e608c8d7f7b6b5c8a837b4544407e7973 8e7e6f947f6eb6a89b645d573e2a2bb09e94e0c3a3b77f46cd7327e2832fce8536ebbc74 e3dfbae6d4bce9cda6efcf82f3d16ff5ca6bf6bf5af6b83df4b544ecb64ce6ba61e3c27b e6cb94e9d09ae8cf8fe6cc82f6c578f7c06df8bd61f8bc5af5bf5bf2c15befbe55eebb4e f6bb55f6ca69f8d476f6cb63f4ba4ceeb655e9c787e4dcb7fdd8a4f1c870d8a83ac78c30 cf944ee1ba77e3d397d7d7a5dfd2c2d8caa5d7ca84dfd178ddd279d6cb8bd6cdaeddd4cb d3e1d2d2dfd8d9dac8efd696efbb4ef7bf52a27e42766760887c709c9288a49d95817c76 49443e4740386b61576e6256847c71554c43655a5444393534262551434346373a58484b 584838453424604a3d5f43384d261f6c3a39864b4d7d3f42844e4e7f49497542416c3c3a 673a37663a37693d3a6a413d64373e61363d5f363c5c353a593539563337533336513335 4a36382b181a3825277365657468682c22212c24227f7775a9aaa55b5c57242520020300 0809042526213c3d386a6b66b4a0957c6a607c6e65c0b5afdad5cf61605c323431282c2b 77705e6e6755cac4b4dad7c84745383a3c318c8e8345483f2c28292f2b2c332f30353132 332f302d292a272324231f20493b3052453c3d332a221b15201c191b1a18101010111214 b7b9b4a9a19f2b1b1c1208074c4e49a1a8a19798935b51502c28292420212521222e2a2b 332f30312d2e373334423e3f423338534449625358605156524348493a3f4e3f4458494e 42373b32272b20171a130d0f0703040000000d0d0d202221202020212121212121222222 b7ac9ab4a997aea391aa9f8da99e8caba08eaea391b1a69492756da2817aa68880a19184 bab9a7acb19d9e9888e3d1c5d8e8ded8e9e1d7ece5d7edead4eeedd1eff1cdeff1ccedf2 cfbdbdccbab8c7b6aec2b2a2bdaf95b9ac8cb6a986b5a983c0b0a3b8aa9db1a595ada694 b2b09bbfc1a9d0d4bbdbe1c7cbbed0cabac5c8b0aec9a693c89b74c9925acb8c47cc883d e19933e39e39e6a541e9ad4decb759edbe62efc36aefc66ee9be93e6c79be0d2add9dac8 d7dcdfd8dce5dadad8dbd9cae5cdc3ead0adebcd8de0b96ccd9854bd7a43b76a36b7662f ca7e408d79709faabcb8af9ea07936bf9139f8bf65eda044e0b64acaa3569a7950c8af9b beaf9aa59b80ada388c3b7a1c7c7bbc9c7bbc3c0b1958e7e756a588c806aa090794e3e25 443c27877a67846f5e876b5d806456a08b7ad7c8b5877f6a74635b6964614a4b4f4b4746 5e4d43c5aea088776f191411b19276eed2ade7c48eca8a40e0852af09238eaab5bf6d590 d6e3dcd8d2d6dfc6b2e6ce84ebd871eed26eeebd54ecac2ef4b945edb449e7b053e5b563 e9bf77eac986e7ce8ee4ce92f0c157f0bd52f1b749f2b03ff2ac3aeda93ae8a940e3a945 ecad4ce6b661e5bc6ce9b458eda93cebad40e0c268d5d893e78337f3ab45e0ac3fbf7f27 cb7832f5ae6cf8dfa9d7e6c5e6d9a2dfcd9de2c8a5efcdb2f0cdb1e6c9a1e5cf96eddb99 cee0d2d0e3dfd8e1d0e7d593dab03aebb8429e7b3b7f71687c6a5c8f8279d3cbc89d9b9c 4d4b4c7068658d80778977697972683f3830473e37655b59463a3a35262947363c3f2e34 61695a4a463a5239325d2d298a4a48a75d5c8d423da1565187514f83504d7d4d49754843 6c433d663d376239335f363063363d62353c5f343b5c353a583438553236523235523235 342022523f41816e70a092927a6e6e271d1c241c1a736b699c9d98565752343530292a25 45464173746f92938ec7c8c3b3988dbfa79dddcac3e1d7cec8c3bf5658532e373406100f 4b4431736c5a817b6b565344252316595b4e919388494c413935363f3b3c4844454c4849 4743443b37382c2829221e1f44322454443744362b2e241b342d27312d2a242321262626 59695f444641241a1960585683847f6e706b211716381f236c6366655c5f5a5154443b3e 281f221e1518372e315a51544e3f445d4e5366575c5d4e534a3b4040313648393e56474c 3c31352f242821181b150f110703040000000909091c1e1d2727272727272828282a2a2a bec2b3b0b2a4a8a99bacaa9da7a3979d978ba0978eada49bb49c98b9aaa5c2bfb8c7d1c8 ccddd5d3e6e2dbeeece1f3f3d7e4dbc9d0c8cccabec8baafb5998db99283c39283b67e6d b78330ce953ee19c41e09434e0902fe69539e2963eda903ba2a590bebab79990917b6f5f 8b7b6b705d566d5b4db8a988d5b68ad1ba98cbc0aac6c6bcc1c5c4bbbfc0b6b6b6b4b0ad c0a475caa465d19b43ebab3fffbe4bf3b64be6b358eec374d2d4becdd4c4c8d5cec5d8dc c8dce5ccdee8d1dee6d4dde4dad3fde8dadae1c596e5b573e7a96ccc8c4fc48a40ddac4f bc9344baa383c5bcc1a2958fa78957e8c87ff4e4b1d9ddcec1714cbd734e8f532e784c29 b39976aca27fbec29fabb593b8b6a9adab9ea9a79a7472656a685b8684779a988b504e41 7b654e99856db09b86a8937eb19f8bd6c5b1b3a29052412f7e75562c1e1b6152579e9381 b2aa86aba1866d625060553fd9c4b3dab98edeaf5fe6b044edbd4feacf76e1dca5d6e1c1 e2d797e6cc82eabc66edad49eda63ceaa53de5aa46e2ad4dd9b032e3ab3aeca43eee9c36 e99d2fe6a73de8b95dedc77cffb44fed9f3ae29732f0a540fab04deea441d8902ecc8422 eb922ce2a743eab847fdb336f9a332e49842e09a45ee9f3ae5cf84efc579ffbe70fba252 d77e2ecc8938dfb869e8d586e0b775e9c066e5cc68d8d176e3cb71fcc261fcbf66e9c17c dfc86ae2dda6d3d8afdaca75f6c44bebab47bc9667a29f96c6baacb9b3a7c0c0b8cdcfca b6b8b38e8e867f796d867a6c65655d231c12291c0c78675383725e423525393228717169 4a47388978649b76598d5532a35f3cb76f56a461508d4d44885540895643845342794b3c 6c4037623933603934633b396c454a623e425d3a3e644447604244472d2e3d25254e3636 727679a4a8abc1c1c1acaba9a39e9a685f58281e155c4f46c2b9bc3c312f6d5a548b7369 68504683706a9b908ebeb5b8dee5ebe8edf1f4f8fbd4d4d48b87845a534d3d342d1e140b 3a29315c4b515141422717175c4d464f413856483b7265557f7574463c3b554b4a443a39 4a403f544a49211716362c2b785f588c736c7f665f6950496c534c634a434f362f4e352e 3d33345248475d5351544b46372e272a21186a6257cdc5b8f1fdfbe3edecaeb7b65d6162 212123161115231d212f262b64615c6c6964585451494544514c50443f45322d343e3940 2b27282420211713140a06070300000300000703040c080930252934292d392e323e3337 bcb6a0b5b09cb1ac99afac9dafada1b1b1a9bdbeb9cbcbc9dfdbdcd4d3d1cecfcad8d5cc e3dbd0ddcbbdc5a89aad897ba66e49a66c46ae7049ab683e9b52279e5022b66234c77142 c6721acc771cd37f1fe08c29f2a742febc5cf3ba60deac53a3b7aec7d5e0d4ddecc6cccc b3b2ad8e8483736356766440af8637b18a3db59049be9e61d3b889ddcaaacdc0afb4aca1 b9a791c2a887c5a06bd3a55ae6b45de2b357e1b85cf4cf76dad7cedcdfd8dbe5e4d7e6e9 d0e3e7cfe1e3d4e0ded8e3ddebedf9efe9cfe6d38fddb963d7a657d4a055e3b55ffdd66f c8b885c5beaccdcbd0a397899d8348e2c26bf8de91e1d09ab06836b87646c18b5da37c53 876f4b736a4ba5a88d8e967ea9a79abdbbaeb4b2a57d7b6e8f8d80a19f92b5b3a6a9a79a d2c1afa99886998773a08e7aa6917cac9782aa967e97816a60573a2b1d1a5e4f528e836f 968d6c80765d6a5f4daba188d9beabdabca0dbbc90dec084e2c986e4d290dfd99bdddca3 e6cc82eac773f0c05af5ba46f6b13af1ab3beaa643e5a44ae79a30eb9b3cef9b43ef9a3d eb9d30eca330f2ae3ff7b650ffb550f1a641eba03af7a944fcaf47f2a33ce5962de09128 eeaf3be3b963e3bd74efb959f2b948e8be5ce0bd79e2b886decb91e6c786fac97cf9b962 de9840da9945eab567efc37af4b04df4ae3cedb544ebc86cf8d486ffd388efd394d4d7aa e0c485e1cfa9ded0a9edcd80f8bd57db9648ba8863baa496bba995bcac9cc9bfb3cfc8be b2aba18a80748171618e7c6835363128251e362e21615444776a5a635b4e48453e3d3e39 3b392d766958a8866bb47f5faf6d4da35d439a55459b594d7d4838885343905c4e88564b 78483e693a34653833683b36673840572c33562f34563639482e2f4533316f615ea09591 a6a6a6b9b8b6b9b8b68a8683635e5a342f2b18110b49423cb2aead574f4c6b5c5578665c 736157afa099d7cfcce2dedde1e2e4eff0f2e6e5e3b0aca987807a8a817a73665d31241b 31222761525546383820120f7366606c5f566f6357ab9f91cdc3c2554b4a352b2a4d4342 5c52514238372a201f665c5b9b847e937c767d6660755e5887706a8c756f725b555a433d 3d33345147464f4543403732463d36675e55afa79cfdf5e8d9e4e0aab3b07076744b4d4c 3a3a3a3430312a2426251c1f4845404c494454504d524e4d433e423d383e3f3a413e3940 302c2d2521221713140d090a0b07080f0b0c14101118141534292d372c303b30343f3438 ada187b9af96c1bba5c0bdaec0c0b6caccc7d3d7d6d7dcdfc4cccecad0cec9c9c1b9ab9e a483709c6550a75c45b55f48ca7147be673cb55e31bf6b3dcd794ac27040b66432bb6937 df8b42e69345ec9a46eb9f41e9a542eaaf47eab950e9be55bbd0c9bbd0e1cae2fcc9e0ee b9cad2c1c4cdb9aea88f815ea2813cc19a4bd9ab4dd8a33bd19c36d8a74be2b86ee5c183 c2b69cc9bba0c7b496bda684c4a97ec7a973cbad6de7c983d1cdccd7d7d7dce2e2dae4e5 d3e2dfd2dfd8d6e2d6dce7d7d9dfdbe0e1bfeee49fdbc272c29d56cba564e6c47cfade8b ccded0c3cfcdb8b9bb8a826d9a874ce3c86fecc86ad0a74fc48945f7c181d9ac73a88859 bba7848a8369717260777d6f8785789d9b8ebfbdb0b7b5a8d7d5c8dcdacde2e0d3dcdacd aca3946d6153675a49a99884d2bda8c7af97b3997eac9176645a3f2113106b5d5cafa48e 948b6c6f6450807563d3c9aed7bcabd7c3b8d7ccc6d7d4cdd7d6c4dad6b1e0d79ce4d88e e8c66debc86ef1c86cf6c568f6be5ff3b453ecaa49e9a542eaa54aebaa56eab160e8b661 e6b85ae9b955f1bd5af7bf62eac67ce7c176e9bf73efc475f1c270eebc67f0ba64f4be66 f5c151ebc66ee1bf75e3b458ebbc50efd171e7d897dccea7e2c47be3c275f5cc7df8c97b e6b569e6b46df2c280efc384ffbc4ffcad36efa633e8ad51e7b26ce5b97ce2d19de2efc4 f7d3a3f3d1a4f6d092fccd73eaad54b5743c9b6849af8d71a2886fa9937cb2a18fb4a797 a19484857462816b54937960292d2c1e1f1a24211a413d325753484f4c45353631212524 4e4b4655473c7d5d48a8755aac6c50a15b42a15a46a45f5091574c92584d92574f8c534a 814a437a453d78433d78433d6e3c45683b425b343955373964504f8075719b9690aaaaa2 b2aba599928c827d795c57534b47445a595772716f979797afb0ab53504b4d463c9e948a d4cac0e9e2d8f7f4eff7f8f3d5d0ccb6afa98f88826f665f695c538072678d7f74817164 392d2d675b5b61565441363262575150463d5b514791877d6b63615a525059514f5b5351 433b39231b1929211f4a42408a777183706a826f69806d677d6a64826f697e6b656a5751 312728504645524846473e395e554e857c73a59d92bdb5a8babebd787c7b4042413b3b3b 4b494a494546352f3126202235322d3835305753505d59583d383c3b363c474249373239 322e2f2420211410110f0b0c161213201c1d2622232723243b30343c31353e33373f3438 9f9d88bbb8a5d1cebdd1cfc0ceccc0d0cdc6c8c7c2bcbbb6a19b8ba19a889e937f99856d 96745b99654fa25e49aa5d49885b488d5f4f7b503f68422f603b29512d1d634231997a68 a07650ac8053c4955dddab62ebb55decb550efb748f4bd49c9c2a5d1d0cee9f5ffe5fdff c3ddeabdccdfbec2c5a5a58d9289809b8c6fb59b60deb75cfdcb5affc94ef5b83be9a92e d2bd7edac694d5c6a7b7ab9db3a6a0baa7a0b59e8ccdb39aefeae4eae7e0e2e3ddd9ded8 d3dad3ced5cdc8d2c7c8d0c5ccd1d7b1b2a4b5ad86a9986aa98f6ac8ad90c4ab8daa9673 add0d6c2d3daaeadab796e5a94895cc7b374bc964dbf873af4c97bf0ca81d9b97cc7af7f cabb9aada6937d7d75515350a3a1946563568684778d8b7e959386b2b0a3dad8cbc6c4b7 867f756c6459776b5dab9a88d2bca5d1b69bbe9f82b190717e735d0b000071645ee1d7be aba2858d8171aca090bfb697d4c3bbd6cecbd7dadfd5dfe8d1dbdcd4d7c6ded6b2e7d7a6 e5ce82e4cf90e7d0a4e7cfade8caa4e8c488e9bc67e8b852ebbc52eabf56e9c45ae7c85e e6c963e9ca6defca79f2ca82dcc889e0cb8ae8cd8ae8ca82e7c377e9c070f4c774ffd07c f2ba5bf6c75fefc24de3ac37e6a740f3bd65f0d07de5d37df1c85eeabe5bf3c36ef7c77f ebbe83eaca97efdaabe5d8abf2bf70f0b95eefb854e6af54cf9647c38842d7a661f6d18a facf8af7c878fac55ff7c250d8a651a77b589273619d896e9e88719d8b77948777948a7e 9a9084897c6c72604c6d57401f23223a3f3b4c4e4b393a35191a151315122a2f2b464a49 5045413f2b22623f2c94624b9f5f449e583fa55d47a05945a1605a99585291524b8e524a 90554f8c534c804942753e37613e42634346583b3d7b6363b9a7a5aea39f97908ab7b2ac 91847b897f769289828984807b7a78949494b1b5b8babec1cccdc56c6c6439392fa8a59c fffff6f5f5ebd8d8d0b7b8b0897c7370635a62544964564b6d5f5480706399897cab9b8e 4e4540483f3a5c534e584f4a4037303a312a473e373930293f3a37817c797d78754b4643 191411201b185e59566c676470615c6b5c577d6e698475707465607768637e6f6a6e5f5a 3d33345d5352675d5b645b56736a63776e656a6257625a4daeada97a79754847433b3a36 45413e46423f3e3a3737322f36332e3f3c3756524f585453454044413c423e39402a252c 2e2a2b1e1a1b0f0b0c0f0b0c1c18192b2728312d2e302c2d4035393f34383f34383e3337 acb8aec2ccc3d0d6cccbcbbfc3bdafbdb4a5afa2929e8e7eab8966ad906ea790708e7d5f 6b5d434d3a293d2519381d1420211b2b2c27292a2530312c41413f3b393a403e3f626061 7f7762584e35453615634e2197793dbb954ccda04dd9a74ed4a76ebf9d82a89b95afb8b7 c8dae4cbddf3c2d0dbc4d2c5acacb483838171705b938c5ebdac67d6b45de8b856fcc35a ca9e53d1ad73d4bc9ab2a199b3a8aec5bac0b4a4a5c4b1ab93917c847f6b6f6c59676455 68655c6566605a5e5d5056564c4c584645416a655260543e5242336f5d596c5c5c655756 9dbbc5d3dee0c1bab27f73658b86739489737c5e42aa7958cbae68cfb674c4af78bcad82 b5ac8f9693846566615e5f61bdbbae5f5d5077756872706366645777756899978a8f8d80 989187a59d92b2a698b7a694b59f88ad9277ad8e71b69576a79c8a3629237d7067e1d7bc bcb297ab9f93c1b5a5b9b18dd2d0c4d7d7cbdbdfd1d8ded0d5d8cdd5d5cbdbd7cee2d9d4 e0d7c6ded7cdd7d8d2d3d8d1d6d7c7dad4b2e1d19ee7cf91f1c05af0bd50eebb45efbb43 f0be4df2c260f3c673f2c87eedbb64f2bf64f3bc5fedb14fe6a53fe9a23af1a73af8ac3e eab358fac55bfcc954e9b140e19e35eda53ff6bd52f4d062fad988eec66ef1c15ff6c05e ecbc67eccd8deddfbadfddc8dab581e2c17ef3d178f7cf6fe2b057cb8e3ed38733ec9335 dfac50e2a844e9ac37e8b33fd9b46dc9b5acbeb8c6b5b8bdb4a797ada39798948b9f9e99 bfbeb9aeaaa16e64584336262b2c275759565358541e22211317164045414f514e32332e 492d215a36288b5e49ab735a9c5a40954d349d533a984d36944b4496504898554d95564f 8b524b804b4577464173443e6a5c59dbcdcaaa9c994f403d837471a3949173615f655351 867970a59b92bdb4adb6b1ada2a19fafafafced2d5d6daddd8d5cc9e9e943e3e365b5c54 a6a79fadada59292885552496454477e6e6181716474665b887a6fac9e93b2a59ca0938a 8a837b655e565f5850635c54544d47736c667b746e4f484265615e6b67643d39362d2926 1d19161e1a175e5a57706c69776c686257536c615d8176728479758b807c867b776a5f5b 665c5d716766736967786f6a7c736c6960575b5348675f52bbb3b09c949168635f403b37 39343043403b46433e3e3b3633302b46433e423e3b3b37364843474540462c272e1e1920 2420211713140c08090f0b0c1f1b1c2e2a2b332f30312d2e41363a4035393d32363a2f33 c8dbd9c7d8d2c0cac2b5b7acaea699ab9c89a48f7a9c836d9264408864406f5838453b20 20210f1d2319383e3c555a5d595a4a5e5c4d6c695a9c9686d5ccbddbcfbfada090877768 a3977d7369505048315751376b64487c724f9a8b64baa77dcf9d5eae83616b4f41585045 7e82818d95a29fa9b2d2ded08a7c597f785b8d8f7aafb5a9bbc1b5aeac9daba188b8a789 b18055b38865b89984948075a99c96d2c7c1bcb0a4c3b6a6a39f84908a707871576b604c 635c4c59544e484848383b404a4542665f55aaa3909791796b6350585045362f293e3634 d1e2ead7dbdab4aba27d756a868581807c7d5c494b7c5c5d92804eb2a17390825b8e8566 ada7918a887b494a4490928f7d7b6e59574a918f82939184908e817371645d5b4e656356 8c83748b7f718477668c7b679c8772a58d75b59b80d3b89ddacec0afa29cafa395c5bc9f c3b9a0b8aba3beb2a2d4cca5d9d6b7dcd8b5ded9b3ded9b3dcd5b8dbd5c5dcd8d5dddbe0 e1d7f0dbd9e4d4dccfcfdcbed1ddb7d8dcb9e2dac3e9d9caefc99cecc489eac076eabf70 eec47aeecb87edd28fead38dfcc463fec461f5ba56e9aa43e29f36e49d31e1992bdc9324 e2b347f4c166fccc84edc37bdfae48e4a428f0b541f7cc70f2e4c7e5cf95ebc864f3c343 edbb3ef2c568f4d0a0e5c8b6f1c691e8c986eacd71f2cd63f1c86ce7b96ee29f50e68929 de9f40db983de3a140e8b24ee5c282ead8d4e5e6ffcfdcfeada397b5aea4a6a5a0b6b8b5 eaece9dfded9938c82594f4337332836332c2627221e2221393d3c5a5b564e4b44282419 8d573f955d44a86b4fad6c50a56043a45a3fa1553b94482e9a483aa052459a544882463c 68362f653c387e5c5a997b79d4d5cdd2d2ca64615a362f2975676450413e261211776261 827b75b0a9a3ada8a4b6b1addbd7d4ebeae8e8e7e5d7d7d7a99f958b847a3936312e2f2a 4a4b464e4b46564f45493f3572625586786d93857aa0938ab0a7a0a39c9679726c595450 6f6b5f848074625e5337332a1a150f393430443f3c4f4a4767686357585332332e3f403b 4647423a3b364e4f4a4a4b465a555155504c615c586d68646a656168635f6d68646d6864 746a6b7167667369678178737f766f5b524950483d6f675abaaca9a19692726763443b36 38332d46433c49463f393931322f2a45423d2f2b281e1a193a35393e393f231e2519141b 1c1819151112100c0d161213221e1f2d292a312d2e2f2b2c3f34383d3236392e32352a2e d7e4ddc2cdc5b0b8adacaca0ada799ab9f8fa99989aa97869f8473665140291d0d1c180c 3d3f34646560706f6d6b676859473b655146725f5075604d7a644d907a629a82668a7354 80664f89745fa99d8dc2beb2aaaca17c7e7077766492907ba59571d6c4b6d1c2bd958c7d 574f422b2120443a318d8669b29d5ea492609a8c71a09991b3b2b8bdc1ccb1bac3a0abaf b3978ba2887b968374665646857867c1b5a5a69989a7998cc3bcaabab29dab9c8591816a 70644e50493935352b262b254c40345e52449d95829d977f938e787e7b68444035534e48 f8ffffbabebf98988e78766961615763605b5c52516656578a7f69685d49635b48898373 8380718080746e7065adb0a759574a2e2c1f6d6b5e6c6a5d69675a5654474341344d4b3e 7766547a69576957435f4d39745f4a8b7661a9957dd1bba4bdb1a5dacdc5c7bbabada485 bdb29cc8bbb5cbbfb1e0d9afe5d28de3d191e0d09ce0d3a9e2d7b7e1dabddfdabdded9bc e5d3bbe3d4b3e0d5a8dfd5a2e1d5a1e5d7aaebd7b4efd5bceed0ace7cfa3e0cea0dfd1aa e5d8b8e8dbb9e8daa9e6d897fbc666f7c262ecb654e2ac4ae6af4becb551e5ab47d39935 e5a738ecab4df1be7dedce98e3c06fe1a835e6ad3cedc46ce6d1a2ddc484e8c66ff2c960 efc056f5c465f9c97decbe80fad591edd285e3c261e4b549edc266f0d18eedc178e6a03e eda745da9645e2a14ff0b653e8b95fe4c396e0d5d1d1d7e3988976b0a494a39d91a19e95 cecbc2d4cec2a59989897a67493c2c2f271a29261f393a353637322a2720463e31776a5a c97b54be704aa85c38a55937b26748b2674aa4593c9e5238b35d46ae5d489e5745854d40 764b44816562a39191c0b4b6fffff8dad7d059564f241f1957504a4037321409053e332f 767676bbbab89a99979e9a97e4dfdbd7d2ce99928c746d6782706675665f514946413d3c 3e3a393e36335e4f48816f6571645b7b6e65898079a39c9699959251504e0d0e10030406 3938268d8b7c8b897c504d440b08011d191624201f3a36373b3c373e3f3a2d2e2922231e 2728233435303c3d382e2f2a2d2c283f3e3a48474342413d33322e2827233f3e3a696864 685e5f665c5b6a605e7e75707d746d534a413e362b574f428b79757d6b676356504b403a 4039313e3b323f3f353f3f354d4a45514e493d3936221e1d252024322d332c272e1c171e 1915161915161b1718211d1e2824252e2a2b2f2b2c2f2b2c3b3034392e3234292d302529 d3d7c9b8bcaea9ab9db3b1a4bbb7abb7b1a5b2aa9fb4aaa03b363d28262b252527484745 7a726f8b7c797357564f2d2c6046515c454d594346402d29322117584b3a796d57695f46 4f33285b443c81746ea29d9987877f4b4839312b1541371c879c95c9d6dfebeef7cdc6bc 9a8b78735b4f6c503a7c6134cfc28de4d1a6d8bea3a88c8089746f938a859da39997a598 c9c8c3a6a49886816e433b2461553fa49487847272826d74b7aea7c5b9adccbba9bdab93 9d8d737e775d6f6e5a6b6f5e56433d87766fcabdb4968e814e4838252115221d17888282 eef4ff989fa59fa69e81857425240f28230f635c4a827a6d5c54515f585249443e67645b 78786e6b6b5f56584bdee2d37e7c6f1b190c59574a5a584b3d3b2e46443749473a2d2b1e 59432cb19d85d5c0aba7927d72604c564531564533695846584b42958880a89d8b9a9170 ada28edaccc9e9ddcfd6cfa3f0cd69e9ca78e0cb94e0d1b0e4dabfe6deb9e4dba4ded590 e9d163ead073eecd88f0ca9bf2cba2f3cd9ef4d092f4d28af9c466edc66be1ca7eded19d e4d5b6ebd3b1edcb8eecc46ff5aa37f0a532e59c29e49b2af5ad3effbd4efab444e49e2f ec932de99324e8ad49eccd8aeac68ce4a353e1a137e4bb47e0b545daaf51e7bc77f2c993 efc690f4cc84f9d171eec553d6c47ae0d082eac867ecb247efb961efd090e5c57ad9a339 eca637ce8a35d79946f1b543e4ab2ad0a33ed3ba82d3ceae9a826ab8a38e9c8c7c786c5e 8d81739a8a7a907b669880689b8a765e5141251e141a1a1234342c575046796c5c91806c b85929c16639bb6439bb6742bd6e4da5583a974c2faf6447bd6348b35f45a8614daa7565 b8938bc0ada9beb8bab9babecdc2bec7beb97d7670403b35504d4652524a33342c2c2d25 808487d4d8db8989894948466e6965564d46362c234e41388c746aa28f898c817f4b4245 3b32356c615f95827ca28a80948a81a198917c756f393532101010000104000105080f15 04060056574587887a7e7e723837325a59554f4d4e201e213a3c372e302b2c2e29353732 47494450524d42443f3537322f2e2c3332302322201a1917201f1d1c1b192c2b29585755 655b5c6258576056546f6661776e675b52494a42375b5346523f3b523f3b564742554a44 433c342f2b2235352b4c4e43726f6a66635e5a56533834331813172a252b3a353c201b22 1915161e1a1b2521222b27282e2a2b2f2b2c2f2b2c302c2d392e32362b2f31262a2d2226 d7d3caa4a1989e9e94acafa4a5ab9fafbaaca7b4a3717f6e12110f353130231e1b342c29 564d48544943584b4351433a332f2e1c1817292522514e4965625b66635a5654483c3a2d 6c614b6e6351696252524e4331302b1f21201e22251d2228afb49eacaf9eb6b3aabdb4af 8d7e79776358a38a76a88e757d7f728383779b998d7c766a635b4e8176648c806a908268 696853a7a5969f9c93413d3a74706dcac7be868475504f3a49473a7d7b6eafada0bebcaf b3b1a4a2a0939391848b897ca79e8f92887c514a401a170e1f201a070906181e1ea5aead ebeedd9294868f9184797b7025261e11120c42433e7a7a7875736689877aa19f92686659 3e3c2f706e617c7a6d4d4b3ec2c6c5626665272b2a3e4241373b3a2b2f2e373b3a151918 7c6c5dcbbca5e0d5b9e8dcc6988c7c483d2986815bb2af78e2cb9fd1b0a9856a578c7f6e cbc7d8dcd9d0e3d596e3c086e4d68bead994edd89febd3a3e9cd9ee8ca98ebcc93f0d191 ffc74ffbc04af6b841f5b949ebc36bdecf8ce2d090ecca80e6ca77e8c082ebbb69edbb38 efb72eedb24ceab356e8ba42eea944e9a341e4a241eaab4cf4b95df9c268f7c169f0bc66 e8b15ece963fd09537fdbf5cffc766d98f36c57627eb994fea8d3df3a64cf1b251f1bb59 ffc86ef8c576e7c27be7d08ce0ce5ee8c778eec689ebcc8ce3d191dfcb90e6be79f0b55b e79c27d9962ee0a144edac44ffc953eba835f4be66af87498a7351baa28075695169695d afa2919e7a58835b389e8571a88952b19981aa9e924743265b4c2fba8e83b16953c56f36 b47027b77539b26f45b16c4dbd7356b16740ab6430ca8448867c619f99839f9d919fa09b bcbcbac7bfbcb0a19aa28e83a39288b8a79d86756b6453495b4a4052413771605686756b 5e5148b2a89f8a817a423d395756542a2a2a3c40431c202354463d8e80779e90876e6057 53453c6a5c5375675e62544b5553473b382f0a0a020205000308020107030c1512000302 1103141d111f3f3540342d343d393a7a79771e1f1940433c202930273037262f363c454c 374047353e45535c634750575656566c6c6c6363633f3f3f2222221414142626264e4e4e 5248473c3231534948524847665c5b807675574d4c433938584f484b423b6a61587f776c 574f423d3626564f3d706956736b60574f44534b405d554a433b301e160b261e13494136 2b27282e2a2b363233363233252122171314292526484445363233343031312d2e2e2a2b e6ded1b5ada0aaa494b3ad9daba895b1b09ba4a48c70725a1e1d1b322e2d26211e312926 51484170665d6c5f562a1c131e1a19332f2e696562918e8988857c7472666b695c626053 8b806c857d6a807969625e53272621050505242529585b60888d77a1a4938d897ea59c95 94857e735f54a18a78a08870504c41928e828b8877514c39645d4a6e6653564d3c847b6c b2a78bb1a58d968979726459c6b8adf5e8d891856d6c61454b493c211f12363427888679 bbb9acbab8aba4a295908e81a69c908e867967615535312822211c191b164a4e4dd4d8d7 cfd2c1a0a294a0a29566685d1011091f201a5c5d588282808d8b7e848275a09e91a3a194 5d5b4e817f72bfbdb05e5c4f3d3f3c8486836a6c693234312628253234314648453e403d 786a5dcabba6eddfc4e1d2bb8b7a6853432aaa9b70f9edb1dacbaed5bdbd927f71938a79 c8c3c9d1caaee1cc79fbd280e8ca8aeccd8defd08feecd8ae9c77fe8c574ebc76feecb6d ecc25cf1be51f3bb4ef1bd57f1c365f8c76afcc769fec76af0dcaaf0d4a5f0ca89efc161 edb548eba744e8a03ce79e2feda840f3b048f9b852fbbd5af7bd5cf6be5ff9c265fcc76b f5bf69e3ac52d3983ae0a23ff3af4ee0963dc87a28cf7d31f2a65af6b561ebb658e6b24f f4ba59f4b95defbd66f8d079e5c473e7c78ee8cd96e9cf84e9cb73e9c474eac378ecc671 f7b239d2932cd79a3fefaf4bfebb46f0b241e3b05f9c7443a18c6faa93746d5e4b7d796e a895849e7351a476559175607e6058a49183dcd4c7b4ada373644f967349c1864cb76b2f bc7c35ba7436b36735b3653eb56a4aac6b4ba87653b88e68a99d8fb8b1a1bebdabbdc0ad adaea0837c726b5d5a7f6b6de0d2c7cbbdb293857a6b5d525143385b4d42706257493b30 3b342e8b847e67625e433e3a575350353432383735202020383023423a2d6c6457968e81 81796c443c2f322a1d4b433643423d3d3c37080904000100070b0c000104000205010b0d 08080a0000020c0c0c0d0d0d47474586878256575292938d7b777485817e8b8784a09c99 989491938f8ca8a4a196928f9494948c8c8c6464643838382323231a1a1a2323233e3e3e 3c32315a504f3d33324d43426e64636e6463726867564c4b5148436c635c6f665f756d62 8c84777972624b44323932207a7267756d625f574c5951465d554a423a2f31291e484035 1a1617252122322e2f343031292526231f20322e2f4642433a3637393536383435383435 eae3d3bdb6a6aaa28faca48dafa58cbbb094b4a989928565100f0b19151225201c362f29 463d34776d63998d816d5f54110d0a25211e504d4864615a545246545245706e5f838172 8f88768f88767a746649453a17140f0e0d0b4747498c8d8f4d4e3e7a786b5450458d837a ac9e95776557927d6a9a846d786d67dfd5c9e9e1cca49f829b977c6f6c59302c235e5858 98927c504937261e11433a31c9c0b7f9f1e49a938186806a6a685b5856493533261a180b 49473ab3b1a4d1cfc296948772685c7c74699b958999958a89867f7978738e8f89e5e6e1 c9ccbb93958797998c6b6d6213140c1718126a6b66bbbbb9939184807e71b1afa2757366 5856499d9b8e8c8a7d7a786b908f8a5857523c3b365d5c5752514c302f2a3837322c2b26 706257c3b4a1d1c1a78a765d573e287d6245c7ad7ce2c986ded4bbdccecb998f85918b7b c7c0b6d5c89be1c768ffd86fecc278efc676f3c76ef0c463ebbc54e8b84cecb84befbb4e ecc260fdc151fdc459ecc76fefc267ffb94affb94bedbf67eccd9eebce8ceccc7fecc47c efbc69f1b74cf4ae3ef5a743eeae42f2b246f7b84efabd53fabf57f9c059f7c05bf7c15d f9c369f5be61d59b3ac38423dd9839eca347d48631bc6b1af0b569f5c36ee9be5fe4b34d f2b34cf5ad47f2b149ffc65bebc499e5cba8e3d19fe7cf75efc559f4bf5ff0c873e9d57c ffc951d39935d19841efb554e8af3ef5c058c497548f684bb5a08d89705a604f3f897e78 9a7e709e6c4bb98464835e4c867389c5bbafd7d3bad7cdc4c1ab94a98440ba8132bc713a ce8b45c47936bc6a2ebe6d3eb27151aa7f6cad978aa1988fc0b2afb9afa5a39e8a87876b 6d6d555a5748726869a4949f9a9087aaa0978a8077564c4332281f342a21453b3231271e 4343437f7e7c5554524e4a47534e4a494440403933443d374d4a3734311e514e3b928f7c 908d7a53503d474431726f5c5048465b5653231f1e0f0d0e1b1b1d00000500000511141b 0c1002010400191c1123261d6c6f685b5c5615161138393440322772645996887d9b8d82 86786d87796e95877c796b601717171e1e1e1919191b1b1b2d2d2d353535404040585858 392f2e4b41403e3433594f4e665c5b5f55547369685f5554594f4d7c736e776e67726960 999186a0988b655e4e2f28184f473c81796e877f747d756a81796e635b504d453a6a6257 1e1a1b2925262d292a2420211f1b1c2824253a36374541423d393a3e3a3b413d3e433f40 dfe1d4bcbdafa19f909c9784a29a83ada388ac9e839a8c6f61605b35322d15100a130c02 1b1308443a2e877b6da7998c8986816c696459564f4e4c404644375a58497b7a68858472 8885747b7869423e320c09001d1a13534f4c706f6d757374201c104d493d3c362a73695f b6a89b8f7f70796856907e6a74625eb0a295f2e8cde3dfbac0bd9ca9a894999894b6b5bd 7e89854b56523b4544293332576160889291717c7886918d858376868477706e614f4d40 4341345c5a4d959386c8c6b9a59d92958d82807a6e686157736c627773687e7a71c0bcb3 c6c9b88486788486796e706530312926272151524d8b8b897e7c6f9f9d90a2a093383629 908e81cac8bb3d3b2e7a786b615d5477736a79756c524e451c180f4a463d8c887f5c584f 473a32908372b6a68d8c765e8d7159d2b190efce99e3c17ae9dab1e8dabfa19984817e6b c0baa4e7d9a8e6ce78efcc68ebc767f0c962f2c957f1c049ebb33aeaa935eda539f0a53f ecad43ffb43bffc258e4ce85e5c97ffebf58f8c365d5d096efba6ceec553edc959edc881 efc97ef3cc57f8c74efabe66f3b749e7ab3ddea234e3a93bf5bb4ffdc357f5bc51eab146 f0bb5bf8c260dda342c38423da9536f1a84cdf9238c6761fe6ae57f1c365eec460eebb52 f9b64df4aa3feea838f7ba47e8d7bde3d2b4e1ce94e8c96cf3c35ff9c16af5c86dedd163 fece60e2b04fd6a24cecb759e0b149f3c76ea47e4d9d796bb2a092694f3e644e4184736c 8e6e5fa97052ab7456805a4dcdcdd7dddeccc2bc98cdb792e4be8dcb9349c47a2fdb824a da9045cf8139c4753ac7865eb38d7aac9c9caeafb3868e919788818d817567604e443f2b 4b4835787469a5a09cbdb7b9938b88a69e9b6c6461443c395c54514b43401f17141d1512 4a4e516c70734343434b4a48494440655c55695f569c8f8692897a8a81728a8172978e7f a59c8da39a8b938a7b837a6b9a8781ad9a948775717769668378768379788a8182867d80 96897992867693897d7d776baeaba25e5d581e201b2f312e5b4d425e50455e5045685a4f 8a7c71b5a79ca1938855473c1717171d1d1d1515151212121d1d1d2727274444446f6f6f 3c3231241a19574d4c605655544a496c6261675d5c675d5c6c62616b61607b716f7c736c 766d648d857a928a7d6d655830281d635b50888075978f84928a7f6d655a4e463b554d42 3935363935362824250f0b0c0c08092521223c38394440413b37383e3a3b423e3f464243 d2ded4c5cfc6aeb6aba2a698a4a5979d9b8c8e887887826f88888068655c504c41615b4f 867e71837a6b6f6353786b5b817e775e5b524c4a3e5553466a68598a897794937f81806c 4a4b3d3434280f0d0129261d8f8c85bdb8b4726d6915100d1a110a4f463d5f55494b4233 928775b3a6957464547868586a5854695b4ebbb196ebe7c2e7e4c3eeedd9f2f1edf9f8ff e1ebeae0eae9d8e3df57625e0007030e1915121c1b242e2d413f326361547d7b6e969487 8c8a7d514f425f5d50c5c3b6aaa399a59f937971665f574c948c7f8c8276493f335d5347 9699887c7e707e807354564b2a2b2334352f292a251212103533266c6a5d4745384f4d40 c4c2b5a5a3963a382b5e5c4f8a807491877b81776b968c8093897d5b51454d43375a5044 685b5570625594846dab957eae927db19071c2a16eebc884e3c883edda9fb4ab8a7a7764 a8a386e9ddb3ead79cdcc577e9ce77eecf72f1cc62f0c350ebb33ce9a633eda034f1a037 d89c3af2a539efbc60d7d197ddd499fbcf76fad786d8e5b9fcc572f9d054f3d25ef1cb8c eec889eeca58eec44bf0b969f4bb50e6ad42dba135dea438edb345fabe50f9bd4ff3b749 ecb852f7c15decb24fe0a142e9a447e89f41db8e32db8c2fe8a942f4be54efbf53e9b349 f1ad48eda43ee9a63df5bd4ee1e9c4e1d6a8e4c680ebc36bf3c77cf6c88af6c46df2bf3f ebc66ef6ca73e3b45ae8bc5de1bc61dec07a91734fb99b90a49487654e3e7d675979665f 8e6e5fb9836792624b9c7f79f5ffffcbcdcce1d2b3ebc97fce973cd08335e0853ced8d39 cb8339ca813db77946ba967cada4a5a3adb9a1aab36565655d513b5d503f574a41544945 6a605e918c88a5a5999fa3947978766d6c6a41403e36353358575546454322211f3a3937 565a5d5e62653b3b3b3938362e2925534a43594f46a0938ad5c2bbac9992705d564d3a33 5c49427a676077645d5d4a43775b4d866a5e7e6358826a60927e75c4b1aae3d2cbb19f9b 7651499472699376707b67609388823936311f211c282d2938332f403b377c7773beb9b5 d4cfcbb9b4b0726d69302b272929292929291e1e1e1c1c1c2525252828283f3f3f6a6a6a 3e3433574d4c6c62614137364d43428177767066656e64636d63645b5152706665867c7a 736a6571685f8f877c9f978c5a5247443c315c5449867e73847c71675f5440382d181005 433f403b3738231f200b07080f0b0c2a26273d393a3e3a3b3632333935363d393a403c3d c0cbbdd1dccecbd3c6c1c9bcc6ccc2b4b7ae9fa299a7a8a095958b959387878377817b6d a59e8eada4938c816f94877487847b757367716f627371626f6e5c6c6b5754533e292911 0508004c4e4384847a9a978e8a857f4f484229201b3b322d2a1f1d6e615b968a7e3f3422 6b604ccdc2ae867b695e524264595341372b504833757053a6a287bebba8b0aca3b9b3b3 c8bfb6b2aa9d97907e524c366f6953b4ad9b857d705c534a3735285d5b4e4c4a3d363427 5a584b7674677f7d70989689bdb9aec9c2b8968e8372685cb3a799b2a595685848786858 66695867695b797b6e47493e0809010d0e08292a254949471e1c0f0301002f2d20918f82 8d8b7e4a483b5b594c949285988b7b7b6e5e6c5f4fa59888c8bbabb2a595948777645747 382926312016382715624d3c7c62537e624c92774ca68c51cca85cd8c47bc3ba99827e73 8d8868dacca7eedbb3dccd8ae8cd96ecd091efce87edc774e8bd5fe6b451eab34ceeb54c dcb771eabd6ce7c981d6d4a1dad59df0cd7cf1cb76e1d08aeccc8fecd271eed076f0c897 f1c391f3c563f7c157f8b870f1bb57f5be59f8bf58f1b64eeaad43ebac42f7b74bffc357 eebb4ef6c159f9bf5cfebf60fab558dd9436d18525e79935f5ad3efabc4de9b347dba03c dd983be09739e7a745fcc75fdddf9ee2ce8fe9c179eec575f0ce8eefcf9cf0c577f1ba43 e0c08dfbd78bedc661e7c15ee2c579c0aa7b907b60c3ad988d7d6e7d675296816e705f55 896e5db3876e906c5cc3b9bae6f6ffd3d2e2f0d8bef2ca6be8ab38df933bcc7a2ed98a2b ac7942b986519d734ba19182a1aeb797acbd8c9399473a3163583c50412e54433c665659 6e6266807c79a3aa9abdc9b1b7b8ba7677796c6d6f61626437383a292a2c3536384b4c4e 6060604f4e4c3c3b392a2623292420433e3a3c352f716a64e2cfd18471732f1c1e1d0a0c 2c191b3724264b383a655254634c3e6650426a5749776559837268bdaea7e1d4cc9c8f89 3e17187f5d5e8e7271a28e8d9388862c29241b1d181318121e1f21333436959698cecfd1 9495973c3d3f2526285253552424241b1b1b0e0e0e1818182828281e1e1e1b1b1b313131 544a49bfb5b46e64631c1211514746766c6b7268675b51505e53575d52565d53547a706f 8f8583726964685f58928980857d7250483d5951467a72676c6459625a4f5c54493c3429 3c3839302c2d201c1d1a16172622233733343b3738353132343031353132363233373334 aaa98dcccab1c6c6aebbbaa8c4c5b7b9bbb0b5b6b0d7d9d49191858c8a7d9a9788918b7b 8c8573817966605541655943464438514f426f6d5e868573908f7b9d9c8797977f7a7a60 7277706f72695a5b535f5f57716d644b423b22171131241c3424256a5b56a89c8e726753 817660cdc5b099907f5d5347635f54464236312e1d322d1a706956a79f8ca69d8cb8afa0 d7c9bea5988862563e5b5034dbd0b4f3e7cf6e615155473c8c8a7d514f42232114252316 363427484639615f5279776a8a867dbbb4aabdb5aa978e7faa9d8da695837765518c7a64 858877787a6c838578595b502021192c2d2772736ecbcbc97d7b6e312f22575548b4b2a5 7573662e2c1f7e7c6fc6c4b7897866948371b7a694bdac9a7e6d5b877664d1c0aec7b6a4 a99a97c5b4aaaf9f90baaa9dcdbab3d5c3b5dfcfaeb0a271ba9c60ac9e5fb0af9b8a888b 877f5bd4bf92f5d9b1e7d186e9c892edcc97efcf9ceace9ce5cb98e0cb92e2d092e7d593 f0d69df1d395ecd295e3d49be0d295e5ca7decbd61ecb54edec781e2c96fe7c770eec186 f4c285fac871fdcc70ffca84f2bd61ffc86bffce6ffdc362ebad4ae1a03ae8a53df4af47 e7b544ecb84bf5bb57ffc669ffbe63df9637d38823f0a33bf2ae41f8bb50e7b04bd6993e d88e39d88a35e39d3efabf59dfb859e5bb73ecc584f0d085eed487ead191e8ca8ae8c979 e5cab7efd08ff0ce60e8c95fd8c3849c8c739f907bb1a17d6c5d48978267958169655848 6c59488868519d8b7fdae2edcad8e5eeeee4ddc795e4bb61ffcd68e3ac59ba894fb2895b 978871b59e7f8c73548f8677a2afb598abb987888a3e2c1e71645357473856443a63504a 66595387847dc7cfc2f1fff2b6bbbf565b5f4f545852575b3a3f434c5155555a5e494e52 5d5650352e283e39352e292565615e8e8d8b898886a6a6a6cdcbd09b999e807e838f8d92 9a989d8e8c91908e93a4a2a79d99909f9a94989590a2a19da4a4a2b6bab9cbcfd09fa5a5 9f969bccc3c8b8b2b4e7e3e4a09f9d2f302b2a2b262528212929294f4f4fbababadfdfdf a2a2a25454542a2a2a3e3e3e2828281616160000000505051717171919192b2b2b535353 5f55549d93924e4443201615534948675d5c645a59564c4b5a4f555c51574e4347594f50 7d73727a716c675e596f665f8e867b746c6171695e6f675c5d554a675f547d756a7b7368 4642432f2b2c1f1b1c252122332f30393536332f302e2a2b373334363233332f30312d2e 9e8f68bdaf8aa99c7c8b8166928b788f8b7f9f9c93d9d6d1dfe0d28c8a7d878475948e7e 87806e766e595c513b60543e201e11312f225553446b6a58706f5b7979616f6f554f4f35 272e2623282112150e3e3e369d9990a89f9662554d352820301d214b3c399c8e81b9ae98 b4ad93c3bba4a19a886c64573e40335454487d7b6f7b7569827a6d9c917f7f735d615339 5e5a576f6c63525041585742b8b7a2737162030000726e6b8a887b403e315755488e8c7f 605e51252316262417302e212b271e827e73ccc4b9b9ad9fa59585a1907c9b8671bba58e cbcebdb4b6a89fa1946a6c61676860a7a8a2c8c9c4d4d4d2cac8bbb1afa2323023969487 9f9d90302e21817f7276746785735fbba995cebca8d8c6b2c8b6a2a4927e97857183715d a18f8db4a39b77695c66584f5547445a4d45a7a086b0ab83baa27e867f519196928c8c98 90845edabf8afbd7a3e9cc70edc474efca84f0d09dead3b3e2d5c2dddbcedde3d7dfebdd f5cf90eecc8debc989ecce88ebd491ead691f6cb6fffbe48edcc63eeca58f1c55cf2c069 f1c272eec971ecd07cead48bf5c16bfbc56dfec76df9be62edae4fde9c3bd58f2dd08b26 dba936dfab3de9af4bfebf62ffc166e9a041de932df9ac42e6a740f1ba55eab557dd9f4a de913fda8532de8f32f1ac45e39322e7ab62eecb95efdc8fecd877e8cf7be2d199ded9af f0d7dae0c389eecf5beace61cfbb8685796dada28e998f5e4f4127a18c6d847055574f3c 4d3e2b58422ba9a29adbf1ffc6d2d0e9ebb9decc78f3ce7ed8ae64bb9d57d6c5a99a8ea6 92a4a6bcbaab8a795d8d816bacb2b0a1b0b78d8d8d4634285748455d4a436e5b4c766450 7f7460b1afa2dee8e0dfefee9da4aa777e847f868c70777d5f666c6a71776a7177787f85 7f726941372e564d46433e3aa8a7a5e5e5e5e4e8ebf1f5f8d5e0e6e8f3f9edf8fedfeaf0 dfeaf0eef9ffeef9ffdfeaf0e7f3f3edf8fadbe9eae6f5fae7f8ffd5e9f0ddf1fad6edf5 ddf2f7f1ffffc5d7d9f7ffffa3aca9252a261e1f191b1a15130f0c716d6ae4e0ddd5d1ce 908c89716d6a474340302c29121212151515141414181818151515040404121212414141 4b41400e04032016153a302f544a496c626160565572686764596153484e483d413c3233 453b3a796f6d8b827d625954968e838b83786b635850483d5951466f675c726a5f696156 5f5b5c3a36371f1b1c242021312d2e302c2d2b27282b27283b3738383435332f302e2a2b bca88faf9b839987718776627e715e81766489806f908776d0c3b0cabfab948c797d7664 7a7466837f739f9d918e8e82928f886b68613f3c3536332c37342d1d1a13110e0727241d 1d15132119172a22202e2624453d3b736b696a62602a22203832403f3536a19182b1a082 a29270aa9c81c3bcac6f6c657e7e646b6b518181679090768282688c8c7283836949492f 0208041f25212d332f363c38292f2b090f0b181e1a474d49817076211811848067d4c69f be9f70c39a64a0793e6f50102e2b263e393d727073d8d4c9aaa184a08c699a7a61c79c93 727874909692a4aaa6cad0ccb7bdb9c4cac6a5aba7bcc2beb8bbb4adb0a9858881a3a69f a5a8a1454841191c15868982c7b6acd7c4b6c4a996ddbda6c8a68bdebca3927660917663 ada1899e927a8d806d8b7e6d5d4f426052499c8d8681726d9d98828f8a77898373726a57 9c8c6bb29963e2c076f2ca72d9cb82e9e095e9e4a0dad6a7e1dac0f6ead4f0e1b8d6c688 dbc37be3d79df2dea9f9d392f5cb81ebcb7ee7c667e9ba48e99e65e9b873ebd174f0cc5c f4b840f4ad43f0b86beacb94e6cd95e9cd8eecca82f0c672f2c061f3b852f1b145f0ac3d f5a431e29b41de993cf0a935f7c35debd08be8c277f0af3bfbb847edb441e1a94adf9b52 e99348ee9336eb973de59854dd8f3cef9a3dffc151ffda68e4cd7ddfc7a3ded3c1cadabf e2dae7d5c288f0d063f7d075947a41968a60cac7a45c5a439e826a785e4f745f5a45363b 1a151b5d6164d0dbd5deeee3e1d5c5cbbfafd1c9bcc8c2b6a09d94adaea8d7d9d4d7dcd8 a3a1a27977788b898a9a9899aaa8a9b7b5b6838182514f505b3c44765c4fab9b7a7f735d a99ea2d5cfdbe0e1dcbfc5abc0c1c3c4c5c7dbdcded2d3d5d5d6d8e0e1e3d3d4d6e1e2e4 c8cfbd7178684d534542483c7a8076d5dad3d2d7d1b7bcb8c4ced0c3cdcfc1cbcdbec8ca bbc5c7b8c2c4b6c0c2b5bfc1b5bfc1b5bfc1b5bfc1b5bfc1b5bfc1b5bfc1b5bfc1b5bfc1 a9a99dc7c7bdcdcdc5b9b8b47b797a302e3315121919161f231c16b5aea8d3cbc8b7afad 9e95966c6366483f44281e26302f2d2423210c0b090f0e0c1c1b1905040201000021201e 0d1311191d1c2325242c2a2b443e4062575b6f6065695a5f62515758474d56454b3b2a30 402f3547363c2d1c22433238736759887c6e75695b6d6153665a4c6b5f51918577908476 0f0f0f1717172121212626262525252626262929292e2e2e312b2d312b2d302a2c2f292b c0ac93b7a38aa6947c9787708b7e6b877c6a867d6c887f6e9d907da79c889189768a8371 847e70868276a7a599a8a89ca3a099918e875f5c5538352e3330292d2a23201d161f1c15 352e265851497d766e726b634c453d484139544d454c453d2822262f25239b8d80c5b39d ab9a809a8e76c0b9a79d998d8e8d7895947f8887727c7b6685846f8f8e798d8c778d8c77 1920192128212c332c151c151a211a1b221b161d165a615a635761312e29a09c83dccba0 c9a16decb678e6b06ac19044a58c53715b32493824938b7ec6c3b4a5a4926e675d8f8584 2a312a3f463f737a737b827b7c837cc7cec7c6cdc6b5bcb5a3a598939588949689d8dacd a8aa9d66685b212316343629c6b8afdbc9bbc7ae9acaac94b8987fc8a8917a5f4a9f8574 fcf1dfdacfbdf3e7d7a99d8d433729a2968ad7cbbf5044386c6553706a5c514b3f999081 7b6b519a8254caa966deb768f8de91e9d286e1cd84e6d190e4c78ddbb77be3b96ff6c873 ecc985edd597edd491efc770edc264eac871ebc76ff0bf59e8aa5febbd68eecc6af2c85c f4b647f4ad43f3b453f2c067f0be67f0c169f1c36df2c66ff0c76deec76aebc766e8c662 f0bc56ddb669d9b466e9be56f3cd68ebd184e7c470ecb63cf2b44ff2bd4bf1bc4ce9a947 e19533de9429e3a248e8b073dfa130eba043fbb963f5cf6ae4ca59edca66f6d286e6d295 f1d5a5e8c766ffd660d7ad5b9d7f5baa9b8699927f5d5548988372806d5e7d6d5e7f756b 3c3731939393dbdee3dbdfe8e0d6cac5bdb0c0b8adb9b3a7a6a297b7b3aad1cec5cfccc5 a7a6a47877758a8987b2b1afcccbc9b6b5b36665633837354d3035897264948365928670 998f90d3cbd69fa09b9499829898985f5f5f525252545454404040505050616161434343 5c5f4e5355474b4d4043453a38393150514b7d7e7984848293959093959092948f91938e 90928d8f918c8e908b8d8f8a7b7d787b7d787b7d787b7d787b7d787b7d787b7d787b7d78 797569918d8198948b827d776863603b35351913172822267e776f8f8882908983a79f9d b8afb09b92956f666b4940453f3b3828242135312e3e3a371b17140501000f0b08130f0c 0408071214132222223735365650526f64686e62666152575c4d5253444941323748393e 50414641323737282d34252a5e574d6e675d6760567c756b8b847a7f786e6c655b3e372d 0d0d0d1717172222222525252222222222222828282e2e2e342e30342e30342e30342e30 ae9a7fad997eaa997fa7977ea296809f94809b93809a927f897c699287738e867398917f 958f818b877b9e9c90a4a498a19e97adaaa3908d8656534c2d2a23201d1635322b5c5952 8c8575938c7ca29b8b9a9383787161655e4e655e4e615a4a352e24190f065a4c41968679 ad9d8eb3a695b3ab98706b57373528514f426462557573668e8c7f99978a959386959386 353a33050a03262b24131811050a03262b24282d265459522e2c3a5a5b5de8e7d3f3e1b9 a87d48c08342e7a55bedb060cfb561bda76b615233484236c6c7c1bec3bc7f847d878a83 121710454a439ba099565b54363b344e534c50554e71766f878a796669588b8e7d6c6f5e 717463383b2a4a4d3c2a2d1c554b42bdafa2dbc6b3cfb39ddcbea6dec2ac644b378d7865 e0d6ccc8beb4dfd5cb958b7f605748a39a89988f7e4b42315d554a362d268a837da89f96 978875917b5692723fa98547e4cc8cecd496ecd393e8ca81edc86ffbca64fdbe57f5ad49 f9c57af3cd84f0cc76f1c155f1c153eeca6aedc56eecb65ce8b653efbe55f4c456f5c154 f1b84ff1b249f5b342f9b73df1c735f2c744f2ca5bf2ca73f0cb85eccc91eacd93e6cd94 ebd389dad2a4d4d1a8e2d38eeed585ebd08de5c57ae4bd56f0be65f2c659f1c653ebb84d e1a83ddca737e1b256e6be80e4ac3fe6a34aefb468ebc874e6c65ffac754ffca5ff8c260 ecbc56ecbe44fbce5ba98241a48978bfafaf6f645e6659509684787363534c402a736b54 4e4b3acac9c7f0eefce3e0fbccc5bdb4ada3a19a909d958aa59b8fb5a99dbdb1a3b9ad9f 87827ea19c98d3cecae7e2ded9d4d09d98944e494536312d3c22257660527a6a50948774 b8acace8e1e9bdbab5cdcfbac3c3c36e6e6e3b3b3b3434342222222c2c2c4141412d2d2d 3e382a524c40494238554e463a332d241c19554d4b584f50746c61746c61756d62766e63 776f647870657971667a72677b73687b73687b73687b73687b73687b73687b73687b7368 877e6f90867a958b7f756b61766b655d524e271c184d4240a79e957970698f8681ccc2c0 f4eae9e4dadb90858944393d28231f1d181435302c46413d2d28241e19151f1a16110c08 0c0e0d0f0f0f141213242021413b3d5f5659695e6263585c574b4f52464a3a2e324f4347 493d412e2226392d31271b1f0a0b052f302a4d4e486c6d676e6f694c4d4723241e000100 0a0a0a1616162222222323231d1d1d1c1c1c2424242e2e2e343031343031353132353132 a79476aa9779ac9b7fb0a086b0a48ab0a58faea691ada590ac9f8c9d927e938b78a09987 a29c8e908c808785798282769d9a93aba8a1aaa7a0918e875d5a5328251e36332c75726b bbb39ea79f8a958d78948c779e9681a39b86958d787e7661625a452a211020130a3c2d28 7b6c67beb2a6b3ab96554e3429261f120f0828251e403d36423f3874716aa9a69fa3a099 4e51480d10070205001b1e1544473e474a4120231a2b2e252b2d3c4c5457bbc2b2e4dbba d7b68be6b483c18e558b5a1f66633a8180624546360c0f08b8b9b1ccc8bcb4a995b3a58b 0a0d0455584fbfc2b9686b6251544b13160d00030082857c807d746a675e6461580c0900 2a271e1b180f69665d807d74171008584e42cebdabf7e1cacbb39ba9907a503e2a867565 d0c6c4c9c0bbeee5de9f968d443c2f958e7cd1cab7958f792e231f544a48bbb3b0837a75 998d7fc6b299c2a6818e6d4291855fc8b998f3e1b9f1dc99edcd6af9ca54ffc351feb44b f7b857f2c369f5c767fec456ffc95af8cd6eeabe65dfa746edb848f5b941f8bb3ef5be49 edc059ebbf5ef2bc52fbb845e7d299e9d29eedd1a2eed09ef0ce8fedce74edcd5cebcd4b ebd7a4ded9c6d9dcd5dfdcbde9d4a5ebca9de4c491dfc47df6ce79ebca6bdfc66adec47a e4c77beac769e9c161e6b767e7a656e6a446ecb95debce88e8ca96f7c387fabe68e3ae46 d9a84bdfb553d5b560846c3c958576b1a3a35c4d465848398f786667523f2a1b064a4130 858176ededefe8ecf8c8cde1aea9a3b1aaa2b0a79eb7aaa1c0b0a3b09f8f937e6d7f6a57 948983d3c8c2dfd4ceaa9f998378726b605a574c465a4f493924214231217667509b8e7d e8dddbd6cdd2d9d4d0e6e3d4bab6b37a76734945422925223834313c383528242154504d 695d516659505c4f477467617668656759587b6d6d7c6d70786956796a577b6c597e6f5c 81725f847562867764877865837461837461837461837461837461837461837461837461 8274678072658e807377695e8c7e73796b603d2f2684766d8d8379a59b91e5dbd2dfd4d0 c5bab8b1a5a5685c5e392d2f211a14322b252e272138312b59524c534c46322b252a231d 110f10110f10120e0f151112231f203a34364d4749575153564b4f54494d5045494a3f43 392e3234292d362b2f271c20080e0e0f15150b1111040a0a000202000202070d0d0e1414 0707071313131f1f1f1e1e1e1616161515151f1f1f2a2a2a2e2c2d2d2b2c2c2a2b2b292a b8a585b8a585b5a486b4a588b0a589afa58aaca58baca48dc4b7a4aca18da19986a29b89 9f998b8f8b7f7f7d718080749b98919e9b94a5a29bb1aea7a29f9865625b3b3831434039 756a548f846e998e7890856f938872a39882aca18baa9f899085696a5f4b3d3329241917 2d2220756b629f94828f836b6b686343403b3b38332a27220c09043b3833918e89afaca7 69695f4a4a400101003f3f35a5a59b5f5f550a0a000e0e04252330060e10212d21535641 8f8069b199817f69513d2d134a4f62363e49252f3128312cd9dcd1ccc6b6bfae9ac2ac94 44443a5b5b51b8b8ae83837986867c51514736362ca4a49a6b6565514b4b1711113e3838 040000150f0f150f0f5f5959b5b5ad534c4225190974624cc5b1997d6b55251603beb2a2 e9e1dffffbf9b9b2ac474038474135d4cebefffce977725e4b3d3a9b928d534b488e8781 91877d9d8c7ac4ac94ac8e729b8c758f7e6aa18e70d0ba7ef3d273f2c34ff1b23efcb247 f8b73ff5c257f6c461ffc059ffc35ffbca6decbe5ce4a836f1b143f8b239fab639f2c04d e6cc6be2d07eebcb80f5c478e2dcd0e3d9d0e7d6cce9d3bceccfa3eccc7febcb5eebca49 eccc8fe8d1b1e2d6c8e0d9bde8d2a9ebc89ee6c896dbcb8df0cd79e3cd7ad8cd95d7d2be e0d8c3ebd79cf1c66af2b451e8a348e9a538f3c25cf0db9ce9d7b3ecca9cdfb877bb9d55 c29c6dccaf83b0a379908c67a4a087b8b0a3a29585887a6094765c7d624f452f242e201f b3aeb2dfe4e8dae6e6c8d8d7c8bdb7c5bbb2baaca3ac9c8fa28f80947d6d866c5b836855 998b80afa1968b7d725d4f445d4f4464564b584a3f4a3c314a393129190a6b5e4bb2a698 cec3bf887c7c867c7a81796e7974703b36323d3834211c1847423e635e5a2f2a26706b67 8170666d5c5475645d72605c786664806e6e7d6a6c9c898d948572958673978875998a77 9c8d7a9f907da1927fa29380978875978875978875978875978875978875978875978875 8b7d7486786f988a7f897b709c8e838375684e4033a496898b7f73a19589b2a59c857870 7f726c897b7a5345453b2d2d4239325a514a5148414d443d6e655e786f685e554e504740 1b1517241e202d27292b2728231f20211d1e2c2a2b3b393a544b4e483f42665d60494043 50474a71686b3d34372920230a121400070900030500020400060800080a000204000204 0505050e0e0e1616161515150f0f0f0f0f0f1818182222222325242123221f21201c1e1d b5a37fb5a281b2a281b1a283b0a587b1a88bb1aa8eb3ac92b6a996aa9f8bada5929e9785 918b7d8e8a7e8c8a7e9f9f9386837c98958e9b98919e9b94a5a29b8c89825f5c5546433c 31220f71624f9687748a7b687e6f5c847562968774ab9c89a6987b998d77776e5f564f47 1f18122a1f195042357d6d5d6e6b667976716c696445423d23201b221f1a56534e9a9792 7e7c7065635749473b949286918f833634283331252b291d1d161d080c0b000800000500 100e021e17111a1a18202a29473d5f38354851565969746ed9e4deb7beb7b6b6aebbb7ab 77756959574ba9a79b8b897d66645883817598968aacaa9ea8a09d534b48463e3bafa7a4 635b5838302d0c040121191641443db1b1a79f98884d402d6959427c6d58736856cec7b7 d5d0cccec9c556514b48443bb5b1a8e2ded3b1ada14c483c7b6e6691877e3c352b7b776b 9892845e5141877261a38978c4a68ea082688c6e4ca4834ecea758ebb956f7b94cfbb546 fac13ef5c758f3c35ff6b652f8b655f5c364f5c259f6b63bf4b149f5b44cf3bc58e9ca70 ded68ddcdba5e3d7afecd1b3e1dca5e3dbade5d8b6e6d6bde7d3bbe4cfb2e3cda8e1cca1 ebc76fefc885ecce9ce3d5a6e4d4a0ebcc93e8cb85ddd17be2c46ee5d081e6dab0dedce1 d8d8e2dcd1b3edc881fac168e9af41edae3df9c66bf6dd9de9db92e8d06ed6bb64b09a6b 88694c8a7564625c4c67674b646043655c4b85796b5b553d5a40297e64557e696652434a d4cfd6c6cacbc7d3c9ccdac9b5a196a79388978476846e61735c4e795f508c7263987e6d 8573657c6a5c655345766456867466726052604e40503e30625648382c1c473b2baca094 7b6e685446434c3e3b574c46958e88413a34564f49433c364c453f6b645e352e28443d37 96887d6f615863544d42332e4637345343433626274f3f426860556860556961566a6257 6b63586c64596d655a6e665b746c61746c61746c61746c61746c61746c61746c61746c61 807573867b779085817e736d8b817780766a61574b9d94858b7b6e7e6e61705f55705f57 92817a93817d5b49473f2d2d473d34574d446d635a645a51594f467d736a938980756b62 4e43474d4246473e41373133211d1e151314242424393b3a453f41332d2f5d57594a4446 7d7779a8a2a43f393b251f211e24240f1515111717080e0e030909020808000202070d0d 0404040707070a0a0a0a0a0a0909090b0b0b111111161616181c1b171b1a141817111514 aa9872ac9a76ad9d7bb0a180b1a686b3aa8bb2ab8eb3ac90ada08da59a86aaa28f908977 868072949084949286a5a59988857e95928b928f88918e8799968f87847d625f584f4c45 5242337767588575667b6b5c7565566b5b4c7161528a7a6b9b8971a0947e88816e7c796a 4a463b3f362d34251e4c37325b594d666458605e5248463a2e2c202523174b493d848276 837f73625e526c685c8d897d4d493d252115635f536d695d5d515163625d535b4e4d5345 58554c433e3b1d1e22121d23150a122b272662685e7f8c85a8b6b97a888b8f9a92bcc8b4 bebaae959185bfbbaf9b978b322e22706c609e9a8e827e72bbb4a27a736198917fada694 afa89667604e514a38231c0a0b120b2f30288e8b7ccfc4b0a296807a6f5b97907e959283 868478514f4358554c807d767a77707d7a758c89846f6b686b5d50857c6d8d87774f4e3a 7c7966756c5b8c7c6d7d675aa27f6cba9a83bea385a48a6797774eb08c52dbb260f7cb68 f0bf49efca60f3c963f6bc4ef6b84bf5c15cf8c15cffb949f1bb5befc072e9c892e0d2ab d8d9b9d7ddc3dfdccbe7dad2ded2badfd3bbe1d5bbe2d7bbe1d9b5dfd8aeddd7a7d9d6a1 e4cc76f1c57af1c88ae3d29ee1d7a6ead198eccc7fe1cf6de6ca76ecd17aeed595e6d5c3 dbd6d3d7d5c0ded1a5e9cd9eebc284efbb68f7c773f3d187e8ca68f0c640eabb45cca45e b9944da488637161516b6148443520261412453a40262922342a21867d76bdb4afb3a9a7 ece4e1ccc5bfb9b2aab6b2a7896c5a816454927766977b6d7f65587a63557f675b6e584b 9883728e7968836e5d95806f846f5e6b5645867160917c6b7f766562594a463c307f7269 52453d4a3d37655653857575685f58443b343d342d3c332c4940395d544d564d46554c45 9b91857d73694c42392e231d3e332f443937271b1b1c101211130e11130e10120d0f110c 0e100b0d0f0a0c0e090b0d080f110c0f110c0f110c0f110c0f110c0f110c0f110c0f110c 28222647414548424235302d544f497d7970837f738b877b7f6f60847465928275a7968c 94837b74625e705e5a63514f43362d5b4e4572655c5f524953463d83766da4978e897c73 807176675b5f4a3f43322c2e1c1a1b1414142123223438372f2b2c2723243b3738413d3e 807c7d8783842420212420211214110d0f0c2a2c292e302d282a271a1c19000100020401 0404040101010000000000000404040909090b0b0b0b0b0b121816121816111715111715 b2a07ab4a27cb5a581b5a685b0a585aaa182a29b7e9e977ab9ac99a79c88a19986817a68 837d6f9d998d8f8d818e8e82afaca593908985827ba4a19abab7b085827b36332c131009 96847898867a87756979675b6f5d515644385a483c816f63806e5aa0917c87816b7b7a65 666553847b7272605e6b55574e4c3f3331244b493c5d5b4e3e3c2f413f325b594c545245 7f7c6d7572633330211815064e4b3c5956475855469f9c8d9b8c89a9a59c8e9281838472 a1978b9a8b86736a6b5f60624b4f3e38412e606e5da2b2b1a8b4c0414a4f4c5341b9c097 c2bfb0a3a091a8a596a19e8f4c493a959283bdbaaba8a596a19c7cd3ceaed0cbaba29d7d bfba9ac9c4a4a5a080746f4f29322d2a2d240301004b4330a99e8a766e5b746e5ea7a596 a2a393b4b5a7a3a39734342a31302bb5b4b2e5e3e4d2d0d3a89b8aa9a18e7b7660999981 6e6c55564f3c7366568a766ba087838774668578679d9387a79c969c8f7c9e915db0a355 ddb24ce8c766f6d469ffcd50ffc548f8c359f6bc5bfaaf4aeec667eacc8ee3d2bed9d7d8 d3d9d9d5dad3dcdcd2e3dedad9dcb1daddb0dcdeafdfdcafded9afded5aeddd0aedbceae dcd392efc78af3c593e3d1abddd8bae9d2a9edca86e3c96cf6db8af1cf6eeac26ae7c691 e5d7bcdfe3cad6dbc7ccccc4edcbe3eec396f5c76fefc571e7b45bfab43affbb35f1b241 deb235c09a5395785a856d555a3e33351a2b483e57464e599ba2a8d1d5d6dddfd4cecbba bcb4a1c6b6a9c7b2afd7bcc1cca892a98672a786739d8070765c4f755d517d6960645047 998270a48d7ba48d7ba68f7d816a58715a48957e6c8a736197907d898272746a60594f45 62554c3e2f287869668c7a7a887f78aea59e6b625b5a514a6a615a514841655c55797069 777163888276534c4249423a5c554f59514e635b595148490d17190c16180a1416071113 040e10020c0e000a0c00090b050f11050f11050f11050f11050f11050f11050f11050f11 100d16403d44312f34121011393834888880a2a2987b7b6f907f6f6f5d4f604e42816f65 7f6c6594817bc3b0ac9885815d4f448a7c717b6d6255473c6e605596887d95877c887a6f 84757a66575c473c403731332a28291a1c1b121615151b191d1b1c2624251e1c1d302e2f 5f5d5e3a38390100002523244f504a191a140c0d0710110b2f302a42433d25262022231d 050505000000000000000000010101080808070707040404111715121816131917151b19 b9a175b7a47cb3a686b1a78cb1a58db4a087b79a7cba9676a9987cc7b698af9c8d8b7b7b 8e847a9f9a7ca69e8b9a8a9493928d9c99929f9b92a6a0929b9482756d56796f56ada187 8c84776b62594b43413932392e29303534396767679d9f9aa59e8eaaa3938c85754d4636 726b5bc2bbabc5beaec3bcac9991868b8378564e43443c31625a4f5850454c443971695e 938779766a5c3226185145376a5e50473b2d9e928474685a5d5a53b9b6afa7a49d928f88 7a7770bbb8b19390898b88817476737e807d929491878986888a8770726f323431313330 b9b5b6acaa9ea9a894979586514d4c615c60c3c0bbcac9b7b7ab9dc4b8aad1c4b4b4a592 e6d5c1d8c3aeccb69fb49e879c9481696154322a27110b0d05000410110c2c2f24404433 5d5a534b554a364a3f0e2221151e2d71798ec5d6ead7f1ffc8cdd3c6c9cebdbdbbc3bfb6 786f607f705ba5947a968365b6a281b6a281a69271958160a08c6bb39f7ea28e6d7e6a49 7d755e7d725ca1957db2a386cbba8fe3d397d7c676dfd075f9e3bcd6c9a9eee9cbf3edbd e3d690e6d492e0d0acede3dae4d3b9ead9bbe5d7b4d9cca9ded0b3ecdfcfeee0dfe3d4db dfcd8feacd93f2ce9af0cfa2e5cfa6ded0a3e3d098ebcf8ee3ce7df4d9aae4cb89d3c26a e6d5a1e7dec1d5d8b9d5dbcfe8e8e8e7e2cfece0b0ecd585ecc661ffcd5dffcc5bfdb644 c29d66876a409687726a696550595e9cabaebdcec8c5d5c8e2d1a5cdb696bc9f91be9c9d bfa1abc0afb9d0ced1e8eeecd8ddd6e7e7dfd2c5bfa088848568646d4f4750342952362b 976e6caf8785be9b95ac8e84846a5d6c57467c6b59998a7593836c9988748f7e6a978672 71624d3a2b164b3c275f503dcfcab7d5cebc736a595447376957497f695ca89084876c61 756b6a746a698379787e74736056556258577c72718278772b27242a262327232024201d 201c191d19161a16131915120608070d0f0e0a0c0b101211191b1a0709080204031e201f 2024232426253434342f2b2c382f3281767a9f90956051564945427d78745e5751494039 756b62ac9f96bcaea39183786c63547d7463887c6c8b7f6f8a7d6c7c6f5c877865aa9b88 62534e51423d58494451423d5c4d48685954493a35473833273132263031202528525659 5e5d624641470400044238409ea4a25e6462252b29151b191117150a100e151b192d3331 45483d2023180002000104000608030001000001000000021915161a16171c1819201c1d 5c442a6e563c8a725aa58d75b39e83b2a185ab9e7ea39878a28e75b8a886ac9d80817164 8275659f9677a29b81a69b999a9994a6a39c97938a8c86789a93819d957e8f856c8b7f65 5a5343463d34312927342e32575258868688b0b0aec4c6c1c3bcacb9b2a29e97876d6656 746d5d867f6f655e4e5b54448e867bc9c1b6cdc5baa1998e857d726b6358524a3f51493e 897d6f84786a63574974685a514537716557695d4f5a4e405f5c554e4b44524f48928f88 5d5a53827f788f8c8588857e6264596d6f6486887d8f9186a5a79c9ea0956b6d6266685d 908c8bc4c1b8bab9a77c7a6d322e2b211d1e514e475e5c4db2a99ad4c8bac7baaadbcbbb f0dfcdcdbba7e3ceb9c6b0999a856aa18c77917f7162554d342f2922231e1a1f190f1911 1813101519180a18181620212d2f2e575755939c9bb4c6c6a6a29fc6c1bed2cdc7aea79d 675f52a9a08fd9d1bec6bba7b99d78c6aa85cbaf8ac6aa85c8ac87ccb08bbda17ca38762 ae97899b84769c85758a745f8a765b9b896595875aa59864ead3c3decdc3ebe4d2d6cfa5 ccbf7be8d78fe1d5a1d3ccafe7d9b2e4d6b1e4d6b3e7dab8e9dcbce4d9b9e3d9b6e4dab6 ddcdacdfc990e4c77be7cb82ead198ebd29aeacd7fecc661edd084f2d0a3f7d693efd67a e7ce95e7d7b6e3dcc0d3d0c9b5afbdbab2afcbc0a2d1c386d0bb6ae2c569ecc96bddb659 a9a491bab2a5ddd0c7b2a19ab7a69ef2e2d5decfbccdc1a9b19b90b5a29bc2b3aed3c9c8 dad4d6d8d4d5d6d2d1d7d3d2bbb5a5d3c7b9ddc9bed3b6aeac8e8680625a654a414a3226 907d6eac998ab7a49699877b6c5a505b4841705d578c79739e8873957f6a826d5a968470 92816f675747514132352818493d2fa5998b9a8c7f8676698e7e7188766a8a766b746055 7369686c62616e64635f55543b31303228273e34333d33322f2b282e2a272c2825292522 27232024201d231f1c221e1b17161421201e1514120908060f0e0c080705010000020100 18141115110e2d2825393431433b39756b6a887e7d5c50501b1714605c59736e6a6f6862 594f464639306e605593857a8073629487768d80707a6e5e7e7262817869807768857c6d 867973786b65776a64675a546b5e586f625c5c4f496d605a4448472024234a4c4b929493 9898987e7c7d2e2c2d110f105b615f585e5c4f55533a403e1c2220090f0d0f1513212725 2a2c212a2c212b2c241718120d0e091010100c0c0c0d0d0f151112191516211d1e282425 191015302021553c387c5c51997a66a79275aba17eaaa881a79080b2a27eb3a57e73624e 6d5a4b918167857b60969082a09f9aa7a49d938f86857f719c9583ada58e9a90777f7359 50493920180d1f1714655f61a39ea2adadafb5b6b1c9ccc5b8b1a1a9a2929f9888918a7a 8d8676726b5b3d3626312a1a48403581796ec8c0b5e9e1d6d2cabf9991866a6257585045 8d81738d81736c6052594d3f41352774685a62564844382a4c49422e2b244c4942b1aea7 57544d322f284f4c455c5952696c59646754686b586e715e8b8e7b959885737663676a57 74706dbab7aec9c7b899978b605d584e4a494a4740232114756d60d2c8bcd9cdbfb1a494 938373d2c1adf4e2cec8b39ebd9a7c927356876e509f8d758c84714a473e2e302f454a4e 5d55526b696e363942231f1e4338246e6043847e665e605383766e978a828f857c50483d 060000635f53b6b4a7bcbaadaf8d6abb9976c4a27fc3a17ec19f7cc5a380cba986ceac89 c9a492c29f8bc8a891ba9f84ae997ea9987e9887739a8b78887264aa9c91dfd9c9e4deba ddd197ecdc9ee3d8aad5cfb5eae0ade0d5a8e3d6b3f1e4c2f0e4beded4a1d9d190e3dd91 ebdcc9e6d198e3c96ce8c96cedcf89eecd8ae7bf5fe0b12feec47ae5b688f8cc81fad76f e5c27eedd1aafae7c9e3d4cd7d775fa09b7dcec9a1ceca9bb0a97db0a884cfc3added0c3 ced3d7f4f0eff9e8e1c0a297d2ae9eebcab7bba08dbea795c7b0c4d0c2cfd9d7dad9e0d9 d5dfd6d0d3cac5bdbab9a9a9b2a38cceb9a4bda28fc3a293b894868968597e6352775d4c aea391b7aa99a89783816d556b513a795b41937155a07d619e7f6ba08371937968978072 907c71806f657c6d666d6058655146927e7398867a8c7c6f8f7f729183768a7e70796d5f 3b3130362c2b3a302f372d2c281e1d291f1e352b2a352b2a312d2a312d2a302c292f2b28 2e2a272d29262c28252c2825241c1939312e3a322f362e2b48403d4f47443b3330271f1c 281a19160807170c0a1b110f1c1210372f2c4a45413c373352514f3c3b3924201d4a4541 79706982786f988b82aea1988e7c688976658575658a7e70797166423e3528251e3d3c37 8d827e9085819489858a7f7b867b777065615247436358546d6a635f5c55a6a39cc0c0b8 a6a79f6b6e6533362d0002005a5e5d5f6362545857373b3a2125241a1e1d171b1a121615 16180d3234294c4d451617110001000202020000000606081410111c1819282425312d2e 2d293a2e2334321d2e3d212d54343573564890765ba38d68b09395b4a389c0b28f786350 6d50488e77656f634d817f72a09f9a9f9c9599958c9993859d96849a927b958b7295896f 867f6c544c3f554e48999393c1bdbeaaaaa89d9e98adb0a79f9888968f7f9c9585a8a191 aba494938c7c6e6757615a4a534b4028201541392e9f978ce4dcd1e2dacfa29a8f595146 786c5e897d6f716557372b1d493d2f35291b786c5e483c2e504d466f6c65737069bebbb4 95928b4e4b443f3c3556534c6462555553464846394a483b6361546e6c5f525043353326 6360597e7b72b2b0a4aba89f7b78719f9c97ccc9c29f9c932f291d91897ee3d9cd978b7d 584b3bdfcfbfd9c8b6cfbeaae9c7aec7a88b896f4c58442145341834271623181422171b 332816443c39271d2548372f987e59dabe8ecbb4944d3f34312823403732483f3861584f 1d150a272010615a4a5e57459a7c60a18367ad8f73b39579b19377ad8f73af9175b6987c bc9a7fc2a482d2b68ed5bd8fd2bb91cab391bba58db7a291928260a19a7dcbcaaeeae6c3 f0e2bbe9d6b5ded1c0dfd7d5e4ddb1ddd6acdfd6afe8dcb6e8ddb0dfd49edcd191e0d68f efdba9edd28deeca76efca7af1ce8eefcc8aeac167e8b643f9c975f0b980f5c36cf4ca54 e7bf6ae9c893eed5ade0cbbac9c09faea884a39d79a19c7ea7a28ec4bfbbe0dae4e1d9ee d5c5b8d4c2b4c5ae9ea68c7dbca698c6b5abb0a7a0dcd7d3eedbeef1e4ede6dedbcbc9ba bcb9a6b9b1a4b19f9ba18b8db8ab98c4b29e7f645185604daa816d926a519b785cb49374 d1beb7bba89a8f745f6b4a277e5625b28244ca954fc48c3f8e6f5d9b7e6e977b6f887066 7b68617869647b6d6c7064648c706c654c47654e485d4a434a3c33564c425f574c554f43 3228272e24232d23222c22212a201f2c22213127263026252f2b282f2b282f2b28302c29 302c29302c29302c29312d2a291e1a352a26413632554a46746965867b777c716d6a5f5b 7462626957575c4e4d5348464f47445c595466656061625ca8a8a8787878484745484441 5b5652665d56766c63857b728f78668f7a698a7a6b7f72694f4842010000000002070c0f 342a284f45437066648d83819c9290847a785f55535c5250342a20766e63cdc7bbaca89c 8283752b2f20181f0f0f18073335342426250a0c0b000100000100151716282a292d2f2e 43403757544b6f6c652b28230300000602030300001813170a0607130f101e1a1b252122 7a564a65443f462b322d1628261122331c26472d2c583b338565748f796ea79780755a51 7e5855a786777e71608787859c9b969f9c95a4a097a39d8f98917f938b749a90779f9379 99937da09989a29b93a09b98a9a5a4b2b3aeafb0a8a0a496a9a292a9a292a7a090ada696 afa898aba494a19a8a8c8575665e533f372c2e261b3d352a6f675cbdb5aad6cec3ada59a 6f6355887c6e978b7d5145375044361a0e005d51435a4e4053504967645d45423b817e77 b0ada689867f5350495350495f5a57595451514c495e595677726f847f7c706b6846413e 3e3b325a5750b0ada8c0bdb68481788b897db2afa6a7a49d211d143632279f998dcac0b4 a89c8ed9ccbcb6a696e2d2c2cfbaa7f6e2c9f3debfcdb996baa685aa927a8f7568836763 695b36453825453329987c67dfb88df9d1a0cfb094523d3a03060b18191b2f2e2cb5b1a8 988f80675a4797876e7e6d5375654e6b5b4465553e6a5a43796952897962988871a3937c a59281a8947ba59271ac966fb19a71b69c7bc1a38bc2a391d0c1a2c1bc9fb0b096bcb89d d5c7ade0cebae2d5cdddd9dad6d0b8d7d0b4d4cca7cfc799d9cd9de6d8a9e7d6abdecba3 e6ce6eefca7af7c98ef7cb9af2d19ef0d397f4d18ffbce8ae7b959f1bc7aecbb5eefc54c fcd47cf2d297e2cb99e5d1b6d7c7d2b5a6aba79996b0a496bcb19dcfc5acd2cbb1c1b9a2 c3aa8cb7a38abeb39fbfbdaec9ccc1d1d4cbcdd0c7e5e6decec1b8d0c0b3c5ae9caf947f a78b76af9588b19995a89495c1bbaf988c7e5c4535724f3b9c7259ad8260cba179c9a376 cec3bfb8aaa18e7c68775c3e947445cba368dcaf6bcc9c52826a60846d65907c75a5928c c7b8b3d3c5c2a09593695d5d573b386e55517b635f5b48423f3029352b222c231a332c22 4137363f3534372d2c3228273329283026252b21202b21202b27242b27242c28252c2825 2d29262e2a272f2b282f2b28352a282c211f382d2b574c4a6e6361766b69756a68726765 78696e887c80887d817e787a8482838989897a7e7d6b6f6e7a7b7d6061635a5a5a555452 393532211c182f2822564d46786254998779807267362d260d090607070903080c02090f 060000150d0b362e2c625a58807876837b79736b695c5452625648827668938c7c504a3a 4342303a3d2a8085719aa28d4d4b4c3f3d3e2e2c2d1f1d1e110f100c0a0b1a18192c2a2b 4d49404e4a415d585237322e130e0b0e080a070103221c200b07081511121f1b1c231f20 cb8348ba7c4b9c6f4e785c4e5545463a2d372719261f0e1e37161f4b342e655347573a34 835848b5917194856e898e919b9a95a5a29ba9a59c9b9587938c7aa59d86aaa08792866c 716b53857e6c9d978baaa5a1ada9a6a8a9a3a6a89da3a798b5ae9eb8b1a1ada696a9a292 a59e8eaea797b8b1a19b94847e766b70685d645c51423a2f241c114d453a9e968bcbc3b8 a89c8e8e8274a39789665a4c5246385a4e40372b1d65594b5d5a53403d3637342d6d6a63 9f9c958a87804b4841333029372e31332a2d251c1f2d24273f3639574e5160575a3a3134 2f2d20504d469d9998c5c2bdb1afa379776839362d24201d2b28211a170e363229b5afa3 dbd1c5c9bdafcbbeaedacdbdd2c7b1d0c4acaea08599886ca9937bb19681c1a192e8c6ba dac79fc4af90cdb39adfbb99e2b389d8ab84977863392725141b2312171b232323bfbab4 c6bdaeb6a792fce8cdf2ddbecbc1a8b7ad949a907781775e756b526c62495a5037483e25 594f4e645955675950806e60907a6c90766793756a87665d86756ebab3a9bcbeb1b0ae9f b5a898bcac9cbdb4a3a4a595c2bdb7c4beaebdb699bbb289d2c598efddb5f1dabadec4ab ecd669f8d385ffd1a1fcd1a7efd19de6d094ebcd9bf2caa7e0ba59f5c98af1c775ecc961 f3d18ad8bd8ec1b082d2c7abb4a5a8b3a3a4c6b7b2cfc1b6bdad9db3a48fb9aa93b9a992 d1c8b7d0cdbee1e7d9dbe7d9cad5c5d4d5c3d0c5afb7a58daa9b7ca58d7197755c8b6250 90695aa7867dbea6a2c8b9b6cec7bd6c60545a4436755642835f45c8a482fbdab1d5b68a c6d0c5c9d1c6bfc3b5b2b0a3bab4a4d2c7b5d9cab5cfbeaaddd5d3d1c9c7d9d1cee4dcd9 f8eeece8dfda8e837f433834816c69d0bbb8a08d8956444065534f5b4c47372a243b2e28 352b2a3b3130342a293228273a302f382e2d322827382e2d2a26232a26232a26232b2724 2c28252c28252c28252d292622191c2f26296a6164b0a7aacfc6c9d4cbced6cdd0d6cdd0 c9c8d0dddce4ceced6bec1c8ced1d8cdd2d8b3bac0aab1b78f9498373b3e232426444444 51504e39353218130f1d161041322b534640392e2a0c040208040516151a111419090c13 161012110b0d1711132d27293f393b6b65678d87896963658e8478786e626e6659605a4c 686254878475aeac9d7a78695652534a46474743444c4849444041292526100c0d050102 5c554d59524a5f58526a625f5e5654443b3e42393c4f464b0703041511122622232d292a f1b36aeeb46ae4b46acea965ad8f59836c4a5c493a46332f38180d4b3527503e32583829 94673ec19b5ca2956b7c8582a3a29d9f9c95a4a097a29c8e999280a59d86aba188968a70 7a745a655e4b777165aba6a0b8b5b09798908e9083a5a998aba494aea797a69f8fafa898 a8a191ada696c0b9a9a19a8abeb6ab8f877c726a5f6a6257534b403e362b463e33595146 a79b8d837769a39789675b4d716557807466483c2e6c6052706d6647443d5c59526f6c65 98958ea3a099625f58413e374f4641564d484c433e514843534a45645b56776e694f4641 413f30322f28656162a4a09dc6c4b7a8a7954a473e16121121201b24241c131007575348 b7afa4c7bdb1e9ddcfc6baacdcd2aedacfaf85765f2c1b0b331f14533f34553f34523c31 795d45b3987bf2d4b0e4bd94e9b890daad8c7e654f59514439393b0b0a0837342fc1bab2 c0b8abb1a694c7bba5ddceb7e5d7b2f4e6c1f9ebc6efe1bce0d2adc7b994958762655732 443a30463c33382b254d3f3c5f524c605249695b4e56493842332ea09c93c4c6bbc5c2b9 c0b3adb1a099a49b9286887ab2afaaa6a297a19a87b5aa8ed9cba4f3e1b3f0daa8dfc894 e6d794ecd393f1cf8ff0d28cecd68be8d590eaca97ecc09bf1d581f8d6a9f2ce9ad3b672 a6896783705f8c856badab9ce8dabfbfb399ada28eb4a89cb7aca8c5bbbcd3c9d1d1c7d0 d6d7dbd1d3d0d6d6cccfccb9bbb39cb7a78eb29c859d826d9d82659276608b6e5e90736b 9d847fb09d97c9bab3ddd3cab7a694614c396247366245337b604bd4c0a7f1e3c6e0daba d5e0dcdde8e4e1ebeadce8e8d7e2e4d7e2e6d7e2e8d5e0e6dde8eed8e1e6e8f0f2e0e2e1 d0cfcab4b0a76f675c4c4236e1d3d0dfd1ce6b5c593a282662504e6a56555c47463b2625 372d2c4339383b3130352b2a3c3231352b2a2c2221372d2c2e2a272e2a272e2a272e2a27 2d29262d29262d29262d29261d1a2344414a939099d1ced7dad7e0d4d1dacbc8d1b9b6bf c8d8e5c7d5e2a3b1be97a3b1b1bac9a3acbb8b92a29aa1b1858c92262b2f0e12152c2d2f 4d4d4d5756543834312a2521130d0f0f090b0500010d080c1a15190a080b010004060409 1410110a06070f0b0c181415161213646061b6b2b38581828783787e7a6f544d43544b42 4f463d70665d776d644a3d355d5759595355554f515c56586d67697872746c6668585254 5950495e554e4d443f6c6260685e5d42373b4e4347463b414440415b5758787475898586 e8ce93edcd84f0c96aeabe53d7ab42bd903b9f773a8d683e8061428d795e7566537d5f45 b7894bd4af57b1a76a768278adaca7918e879e9a91b7b1a3a39c8a8f87709d937aaca086 bfb99f97917b7e786a8c887fa09d98a0a199a1a396a8ac9ba59e8ea69f8fa59e8ebeb7a7 b6af9faca595bdb6a6a09989989085a49c919e968b837b706e665b625a4f50483d3e362b 493d2f6c6052c0b4a67b6f61a195875d514372665875695b59564f322f28302d261b1811 7d7a73ccc9c286837c605d56736b5e958d80aaa295c2baadbab2a5b5ada0b3ab9e776f62 4645331b18136d686cbcb8b7c9c7b8c4c3af7a786c201c1d181712201f1a302d262f2b22 a19a90d2cabde1d8c9c9bdafded09fc7b8918070574c3b31483936403232544945948983 714f46a2826bdbbb94d4ab7fe7b590b48b6d503c218d9073877c7622171161584fcec5bc aba5997d7769565344918e7fd2be8ce7d3a1ecd8a6e2ce9ce7d3a1f9e5b3f4e0aeddc997 bdb08d928769403521282111352f214f4e3a84876a8c92705d533a838168858a73aeaea2 e3d7d9ead8e4ebdfebdbdae2a7a79d8e8b828c857bb6ab99e5d5b4f4e0abebd88ce2ce77 c4bdaac6b887cfbb64decb64f0e07efee99affdfa0ffd199d1ba74c7a98fcaab8fa48b63 5a413d504148919189c8cdc9d9c7bdc8b7afd0c3bdded6d3d7d6d2d5dad6dfe8e5e1ece8 dddae5c9c0c1bfae9ecbb196bc9f818f755e7f695e8a7675855e4f88685d9c8983bdb5b3 cecccdd0ccc9d4cdc3dfd5c97f614568482f6949344f3220917e6ddbd4c2bdc1b0ddead8 e2dce6d9d6dfd5d4dad6dbdfd8e2e3d5e4e1d2e5dfd1e6dfcce4eec8dce5def0f4d4dedd c0c2bda2a094676050564d3cfdf6f09d948f372c2856474468545368505083696a442729 2f25244036353c3231372d2c4238373c3231362c2b463c3b332f2c332f2c322e2b312d2a 312d2a302c292f2b282f2b2821202e504f5d9897a5bbbac8bbbac8cbcad8d8d7e5c8c7d5 a5bfce9ab4c37e93a48c9fb0b1c0d395a0b4757d929298ae555c621f262c181d21121315 2626264e4d4b373330120e0b0a090f1f1e2419161d060409080307070206040002040000 121013100e11222023242225080609575558b3b1b4706e7145453d98958e6c6761655e58 5449455d4e4b5b49477e6a69a8a2a4aea8aa8f898b524c4e322c2e453f41605a5c686264 6f665f7b726b504742665c5a5c525131262a4d4246382d334541426460618c8889a6a2a3 d5e2d0d5e2d0d6deb7dbd287e2bc5be7a944eaa33be8a534e0c64bf9ba53f6b05bdcb663 ddc56ff4ca76eebf71d0b0657f877c9ea39dabaca69a9985938f6a9e96719b8c77887571 c4bcb1b1a99e5b534891897ea8a095aba398b1a99eb1a99eb1a99ea8a095a29a8fa79f94 b3aba0bbb3a8b9b1a6b4aca1a79787ac9c8cb2a292b2a292a494848676666454444c3c2c 4239324f463f5c534c675e577269626b625b483f3821181137332a0d0c07161a19252d2f 525a5cc6cac984837e4f4b42c0b79a6b604a62564883766da3968dbeb2a4c1b6a0cbc2a5 92836c756a565a5446b2afa6b3b0abb8b1abb2a59d4c3b31322f2819181310120d22231e 7d7a73c7bbadb9a791c3ab91c6ae8ac6ad8f69533e69554a81706875675c685f4e9b957f b5a895978770c8b398dcc3a5d5bc9eab967b675740837663766f5c514a379d96869c9487 8279704b423d201614312726534544675a54ada193cfc4aedbd2b1e1daaed7d19df1ecb4 fee39cf3ecbeadb59d5255388f7d4dba9e6e75634d8c8690928179605249695c534c4439 b3ada1e9e7dac7c8baacaea071534862463a9e8779a28f7eaa9b88b3ab9488826aa39f84 503e4c402f3960505166584f5d513b7a6f4fa89f76dfd7a9a9b1c6bac1d1ced5dbcdd6d1 bcc4b9aeb6abb0b7b0b6bcbaced9d3d0dad2dfe4ddf0f0e6ede6dcdacec2d1c1b4d6c4b6 baa286c0a18fb08d79957d5b8173567062557b654ea08152b3a483bfcbcbd0eaffdaeaf7 d6dfdccfdad6cfd3d4d4cacb766168645d649ea1a69c9d97b1aa98f0e8d5dad9d4d2dbe2 c5d1ddd7e0e7e1e5e6ebeae5fefef4f5f8eddde7dee5f2e9dedfdae6e5e1dcd8d5cfc7c5 d1c5c5756767402d2fc3b0b2fffef8544f494c4741645f5939342e65605a7e79733e3933 2c20222b1f21322628392d2f322628261a1c291d1f372b2d31262425191923191a2e2528 302a2e25232826232a2f2e360300007a7571bdb3b4d9ced2d9cfd7bbbac2b5bcc4a6b3bb 7b898c8c9a9d95a3a6a4b2b5bac8cb919fa28e9c9f6c7a7d272322171310191611403d36 423f3649473b585649201e0f181818161616050505080808181818040404000000020202 1c1815221e1b292522292522231f1c1d19161c18151d1916817c83887d81594743513d34 6a57497c6b61afa69fd2cfcaf0e9e3e8e1dbbfb8b2736c662c251f140d0729221c463f39 8d7d6e7b6b5e9b8b7e7b6a624c3b344d3b3745333152403e594f507c72739a91929d9797 d5deddd4dfd7d5ddc5dcd09ce3bf67e8b33feaae36eaad42ddc968edc46cefc068e0c06b dfc882eccd97e9c686d9bb6193957f9a98899c9a8e9c9986a19c7fa29c7c988d7b897c76 b4aca1c9c1b68f877ca79f949c9489978f84afa79cbdb5aabfb7acb8b0a5ada59aa49c91 a1998ea8a095b3aba0bcb4a9b7a797ad9d8da69686a99989ae9e8ea595858c7c6c766656 5b5148645a515f554c4b4138433930554b42736960857b72a4a0977d7c772f3332141c1e 1b2325898d8c8a89841b170ecac0a593887283776770635a5a4d445f53437e735dbab095 b5a9918e8673524e42aeaea6b1b0aba6a19bbeb4ab7c6e6337342d32312c1b1d181d1e19 5c5952aa9e90cab8a2ccb49ad7bf9dbda58956402b533d3066544a6a5c4f6f64529c917b bdb09d75654ec9b499f9e0c2d8bfa1d5c0a5a4947d685b48746d5d6d6558484033585045 8f867d4e453c332a233027203a2b322f2025483a396a5d55a09484c4b9a3bfb699d3caab ecd69afef6c7edf0c5b1af74d7c273ddc1787c6c4a7573768c827881776d655b51574f44 7e786cdad6ca8f8b7f6c685ca78a7a7a5f4e8d76668976655f52413c3421504b37b8b5a2 a89a978477717b6e65988c80b7ab9b8378644a40277c7259dfe2f3e2e8f4dce5eadbe6e2 dde7dfcfd7ccd0d3cae9e9e1dcded1ddddd1dad8ccd0cabec2b8acb8aa9daf9f92a8968a ad957b9e81738669597f6e528d836a938578ac9176d9b27be1cea6dce2ded4eaffcddbe6 c4cdcac3cfcdccd4d7d8d2d6ddced1f5f0f4a3a7aacacbc5ccc5b595897be1dcd8f2f5fc dbdfe2ebebebcfc8c2b2a69aab9e8ea397899c968899958a9d9088998c849b8e86b6a7a0 c6b5ae84736c76635de4d1cbd7cec93229242f2621584f4a2f26213b322d645b56665d58 4c454c4e474e4e474e4b444b433c433b343b3730373831384c3e35483932463931463b37 4137353833303731313b37364c4b468e8986988c8c8e7f827c70746660627474767b7f80 78797d7f808467686c5253578182869e9fa3babbbf8b8c90221e1d110d0a2825204d4a43 39362d4b493d7573664442334545452424240a0a0a0303030303030808080a0a0a060606 1b1714221e1b2925222a262325211e211d1a201c19211d1a918a918b80846e5f5c836f66 9e8a7f9c8e83a89f96a49f9998918b5d56503b342e4f48425f58525049434c453f5b544e 72625381716488786b94837b5b4a43311f1b5644424a38366157568379789f9795a29d9a d5d8e9d4deddd5dcd4dbd0bce3c57de9c23febbe3deabb63d9cf94ddd398e3d28ce6ce84 e1cfa1ddd2c0dfd0a5e4ca6dbeb78da39a7b8f8774958e7ea19a87a09b87989584969287 a0988dccc4b9b6aea3cec6bbc3bbb0afa79caea69ba69e93b7afa4b8b0a5b2aa9fa49c91 978f84989085a79f94b7afa4c3b3a3b7a797ad9d8dac9c8cb3a393b5a595ae9e8ea69686 7264596a5c515c4e4353453a56483d5e50455c4e4353453a68615756534c292b280b1111 070d0d7b7d7aaaa7a05d564cc0b69d958a768a7e6e8d8173998d7f887c6c5e533f665c43 c8bca6b9b09f5b574baaaba5aaaba6918c88c8bdb7a2948b413e3741403b282a251f201b 45423b988c7ed4c2acccb49adbc2a4b29a80553f285c47366a57487766567a6b56897a63 9d907d61513ac4af94f6ddbfd4bb9df1dcc1d4c4ad756855534a432a211a584f466f665d 9a9287968e81756d60615a4a5f51504335322518121f120a4d413575695b857a68aca18f baa87adad1a6f9f4bce5d683f9de75efd274908251767771807c705c584c3f3b2f575145 7b7368b6aca2776d637c7268b0967f765d4778634ea594827f7363221a0d201c10706e62 756d565d554058503d756c5da0968a847a716e635dc0b5b1e0dfe7f4f7fee0eaecc3d2cf cfdad4dee1d8dbd5c9dcd0c4d3c8b4d5c8b5c8bba8b0a392a89889ab9b8ea795899b897d 8f775f866b62725e576962506765566f6259ae8b6dffd293e8cb9fe0e0d6d8e9f9d4e1e9 d2dbd6d3e1e1d8e3e9dddce4e7e2def7f7f57c807fbcbdb7ada79b594f46d9d1cfd8d5dc e3e0dbf0e7e0b1a1947b64546348375c403270584c664f479680758d776c958176beaa9f ae9c907c6c5f99897cd7c9bc6859562e1f1c51423f78696671625f8879769c8d8a90817e a59693a1928f9b8c8998898698898698898694858290817ea3917dab9887af9e8ead9d8e ab9d90aba195a99f95a29a8fc7c6c2b1aca96a5f5d3c2a2a2b17162a1814453a345b544a a4989aada1a3b6aaaca4989a94888a65595b6e62646155571612112925226b68637c7972 2a271e28261a7d7b6e8b897a0909091515153939393939390e0e0e060606131313060606 191512211d1a2925222c282529252226221f26221f2824215c53586b5f616f605d7b6a63 8271678779707e746b504941706760554c454239324f463f696059756c656d645d615851 5d4d3e7f6f6275655884736b73625b42302c4b39374f3d3b6f65638e8482a8a09daba6a2 d4d5e7d3dedad4dcdedbd1d9e2cc9ae8cf51e8cd58e8c792d5d5b9cfdcc8d9ddc2e9d7af e4d5bed1d9dbd7d9c4efd392e5d59ac1b0829a8b6c8a7f6d8d85789593869fa092a7ab9c 989085b9b1a6afa79cc2baafbab2a79b9388a0988da9a196a39b90aba398b0a89daca499 a29a8f9d958aa0988da79f94bdad9dc1b1a1c3b3a3bbab9baf9f8fab9b8bb1a191baaa9a aa988a8a786a645244574537614f416553455442343d2b1d140c0104000020211c1c201f 464a49a4a5a08e8a81837b70b8ad99a09581968b79998d7db4a898a79c8a716652746955 aea691cdc7b75f5f55a2a49f9fa19e706f6bb8b1abafa59c5d5a53373631292b26242520 4b4841a69a8ccebca6c6ae94d0b89ea9917969533c87715c907a6596816c88745c715d45 6053406d5d46b5a085c0a789b2997bcab59aae9e877b6e5b372d2c1a100e857c777f766d 7971649c9585a69f8cb0a9969d937aa2987f857b625d523c524731483d274e432d796e58 897c5c9d9070c1b386cab066e6c365f0d27ab9ac78a3a695989a8c656658656356534d41 958d829c8f8692847b9a898171593f806a52998771c8b8a891877b28211924211c464541 726c52605a425d5643433c2c433a316d645f9c9291d7cdced6d1d5d1d2d4cbd4d3bfcbc7 b3bdb5b5b5a9b6a999ae99889f8a6f9b856d937d668a7562867160826f617e6a5f79655c 705a45634e494e40403b3b332a2d242c1f197c5335ecaf6eddb987d5cfc1d0dee9d4dde4 d8dedad8e6e6d7e4edd9d9e5c6c6beb2b3adb4b5afc4c3be8e8985978d8bede2e6dcd2da ddd9d6d7d0c88f7e74674f4355392d573a32876b68876d6ea18c89a5928eb9a7a3d8cbc5 aca39c959188cecbc2d0d0c6594544503c3b503c3b523e3d685453958180ac9897a99594 c09f8ca685729574619978659e7d6a9776639574619b7a67917c619c876ca18d749f8d75 a79682ae9f8c9e9180857a68babab88984815244434a35325c433e614a42604e405f4f40 776c686f6460998e8ab6aba7b1a6a2675c58463b372a1f1b393534322e2b55524d6f6c65 48453c343226605e51817f700a0a0a3c3c3c707070616161232323050505060606000000 181411201c192a26232d29262c28252a26232b27242d292633272b65595b87797872635e 594a4374675e7d73694d4339948a8191877e80766d6b6158675d5470665d72685f6b6158 5c4c3d7464576c5c4f4d3c347968617866623b29276553517f76719b928db2aba5b3aea8 d5d6dad3dfd1d4dfe1d8d5eadfd1b7e3d678e3d785e3d2c2d4dccfcadde4d4dce9e8dada e3dcd4ccdfdbd2dcd3edd5bdeed996dfcb90bcab7f988a6f897f73918e85a0a297a7ab9c 9b9388a69e93a29a8fa1998e888075574f447b7368b8b0a5a8a095ada59ab2aa9fb5ada2 b3aba0aea69ba8a095a59d92b0a090baaa9ac2b2a2c0b0a0b4a494ab9b8bab9b8baf9f8f b8a191ae97879c8575856e5e735c4c745d4d8871619d86767b7263433d311f1f17090b06 777974c2c2ba8d877b999081b5a999a49987958a76857a66988d79998e7a7a6f5d8a7e6e 8e8774adaa9b3e3f37969b979da1a04646448d8884c8bfba928f882e2d281c1e191c1d18 5a5750beb2a4c6b49eccb49aceb8a3aa947d765e46977f659c846aa88e75997f66765c45 46392674644da79277a08769977e609d886d7b6b545b4e3b160c0b5b514f7c736e6a6158 6d6558564f3f77705d7c75628c8267b4aa91bfb59cb5aa949d927e6b604e473b2b3f3323 716a507467568e7968a98867d1ac75e8cb91c9bd93b6baa3bbbdafc5c6b8bdbbae676155 827a6f978a818e807767564e5b472e9c8a72af9e8ab9ad9d958d825d5852494844191917 9b9585958f818f897d7a74689d968cd9d2cae3dcd6efe8e2f0f0f0b8bab7bec3bdd3d9cf aaaa9e8c83749885769e837290765d7f674f7f66508c7261816a5a634d40574136604941 877465634f50392d312b2d282c322e3329276f4a2fc88e4fe9c490dbd4c4cfd9e3c9d0d6 cacfc9cedad8d2dde3d7d5e0c3c3b7a09e92ede9e0c6c1bd878384bdbbc0d7d2d6e8dfe2 e2e6e7b6b5b36e65605e4f48624f4b746262b3a4a9cbbfc9bfb9bdc7c2c6d4d2d5e2e3e5 c0c4c5cad3d2edf7f6a8b4b254403f533f3e3a26253a2625554140655150897574bda9a8 c9aa96977864795a4682634f81624e6a4b376748347a5b477d684d877257907c639c8a72 b5a490c5b6a3b2a594938876bfbfbd7b76734638373c2724523934725b538c7a6ca39384 c5c5bdadada5aeaea67e7e765c5c5437372f2c2c240a0a0234302f24201d23201b49463f 66635a4745393e3c2f7876676f6f6f8686867575754646462525250c0c0c000000000000 181411201c192a26232e2a272d29262b27242d29262f2b28302021574949837777766b69 5a4f4b72685f897d717c6e61a89b927a6d6464574e7669607f72696e61586a5d54796c63 6555466f5f5269594c3d2c245847407d6b6762504e7664628e857ea69d96b9b2aab9b5ac d6d8cdd3dfd1d2dfe5d4d9edd9d5ccdbd8a5dcdab1dadadcd5dfd6d0d9d8d6d4e1e4d9e9 dfe1ded0e1ced3dcc9e2d5cde1cc8be8d495dccb93bcae87a297839b948a9c99909b9c8e 9d958a999186aba398c0b8adbcb4a969615658504571695eaea69bb0a89db4aca1b8b0a5 bbb3a8bbb3a8b8b0a5b6aea3af9f8fac9c8cad9d8db7a797c0b0a0bfaf9fb1a191a39383 a88d78b29782b99e89b09580987d688f745fa48974c2a792c2b5a5bcb2a657534a30312b a3a49ebbb7aeaaa094afa2928f83758e8272988d798c826992886f988d79817565897d6f 827c6c7b796c0b0e07878d8baab0b02b2c2e575350d5cdcabfbcb548474211130e13140f 615e57c0b4a6cab8a2d9c1a7d7c2b1b6a08b775f458970518e7453a6896bab8c709a7a61 5a4d3a65553ea48f74c1a88ab2997bbaa58aa4947d6356433027204e453e80776e5d544b 50483d564e41595144322b1b544741675a547567649183809b8d8c8577766052522d1f1f 565040584d496a555a8c706db39885b6a385958b72838270979387c4c0b4a7a397817b6f 5c544981776d63594f5a50469d8a79c1b0a0a59586897f738982788b8b8372736d1c1e19 57524c706b656e6a61827e75ddd9d0e8e4dbb4b0a5d9d5caedf5f7ced3cfc6c6bab2ad99 8a7e68917f6b9c85757a5f54816e5d715e4d7c6756947e708b7367644c425c443a745950 8472666b575947383d36332e32343132292c5f402ea67645e2be8cded8caddeaf3e1e6ea e2e3dbe2e9e2e2e6e9e6dde2efe9dbc5bcadc7b9aeb5aba9bfbec4dadee7d2d6d9f2f1ef eff0f298979559514e504742675d5b928e8fc9c9d1edf1fce9f0f6e2e9efdfe8eddbe4e9 d2dde1e6f4f7c7d5d85160634c3d3a9d8e8ba59693968784a0918ea0918eb3a4a1d4c5c2 cac3bd9c958f827b758e87818f88827b746e7a736d8f88829c8a76a18e7daa9989b8a899 ccbeb1d5cbbfcdc3b9bdb5aa9a99956b6663574c4a493737483433675551887d77aca59b 6c726e636965a1a7a39ea4a07278743339351f25211117131d191828242123201b3b3831 7572695f5d514644379795865858586d6d6d5757573535352c2c2c1515150000000e0e0e 181411201c192925222d29262c28252a26232b27242e2a273624222e1e1e5246466c6364 706865746963786a5f8d7d6e8f7f7279695c6f5f527b6b5e8171647565586b5b4e6b5b4e 7060517565586c5c4f61504835241d4d3b379987857d6b69988f86ada49bbdb6acbcb8ad d6d8cbd4dddad2e0ebd3dcedd5d6dad4d6c9d3dbced0e0dfd9e1d6d9d3afdccdacdcd8d5 dae3e0d7e1bed4daacd4d7bcdec992e1cf8fe4d592dbce99c2b799a69c909c938a9f998d a39b90938b80aca499cdc5baece4d9978f8450483d30281d7d756a90887da79f94b5ada2 b9b1a6bab2a7bfb7acc4bcb1bbab9bb0a090a99989b1a191c1b1a1c7b7a7bcac9cae9e8e c2a48cb2947ca4866e9d7f6797796192745c9b7d65aa8c74928574b3aa9b979388898981 cdcdc595918672695a84776672655c7b6f5f9e937d91876c80765b8c816b8b7f6f92857c 7d7a696b6b5f00020077807fbac2c4393d3e2827259f9a97c2bfb8807f7a191b1620211c 69665fa89c8ed7c5afd6bea4d2bcafc5ae9c897155957d5b9c805bb29170b89579b59177 796c596050399e896edbc2a4d0b799d9c4a9d1c1aa897c69665f4f3e362990887b473f34 20170e756c63928982726962655757564848483a3a554747554746645655796b6a4f4140 35312842393a4639425e52547e746b6f6858524b41534b49584e44796f656c6258726a5f 524c406460546e6a5ea29e92bcaaa0b3a29a85776e554d4245413658584c73766b71776b 2a271e48453c58554c838079dfdcd5d9d6d1aca9a4efece7d3e4ebd6dfdab4af9c857456 896f4e9f836b866c5f5139354c433a493f36574a426b5a52655148553d3361463d7f625a 58473f5741444733353b31283635303730375a463f947350c3a476cac8bcd6e4efe0e5e9 e0ded2dadcd1d9d5d2ddcbc9c2b5a5aa9788826c5fa29493d0d2decbd9e4ccdadbd5dcd4 d8cfca746a615145394b3f33756e64bdbeb8ced8d9e1f0f3e1edebd7e3e1dee8e7d7e0df e0e9e8e8eeee979b9c2f33346e6560ece3def0e7e2bbb2adb7aea9c1b8b3cbc2bdc5bcb7 c3c8cbb3b8bba9aeb1afb4b7b7bcbfb7bcbfb7bcbfbabfc2b6a89fb6a7a0b7aaa2b3a8a4 a99f9da19c99aaa4a4b7b3b277767147423f3c30302c1d20382c307d7779bdbdbfeef2f3 ccd0d374787b8b8f929b9fa285898c45494c2b2f322a2e312d29283935322a272225221b 68655c79776b575548807e6f4343434343433e3e3e3939392f2f2f1c1c1c101010141414 181411201c192925222c28252a26232723202824212a2623412c29302020413738565052 787272877e77796b5e907d6e80706393837698887b86766979695c79695c7565586b5b4e 8373647f6f6276665975645c3e2d263a28248775738775739e968bb0a89dbeb8acbcb8ac d7d7cfd4dae6d0dff2d0dee9d0d7ddd0d5dbcddbdbcbe6d7dae0d6e2cf8be0cb7cd7dabf d8e4e0dddeb4d7d896cad9a2e9d4a5d8c88adace86e5da9cd4c8a2ada090a0938baca298 aea69b978f849b9388938b80b1a99e7b73685951464a423739312661594e948c81b1a99e b6aea3b5ada2bdb5aac7bfb4c7b7a7c0b0a0b8a898b5a595b7a797bcac9cc0b0a0c1b1a1 bb9b84ac8c75a3836ca3836c9c7c658e6e578a6a538f6f588e7e6e6155477b746a716e67 a9a69f766f654135279181718679705e52446b604a80775aa0977ab9ae987d716333261d 7774657a7a6e0f140e6f7779c6ced1595d600f0d0e534e4badaaa3adaca72a2c27383934 74716a908476e1cfb9c8b096c1aba0ceb9a6a58d71b99f7cc1a37dc8a882be997cbc947b 8a7d6a6c5c45917c61cab193c6ad8fc0ab90bbab949083703d3623746d5a8f887850483b 433a316f6661b9afadb3a9a8a79a919689807e7266675b4d332717413624857a66776c58 2a25223a3436312b2d3b3a3556584a4647393b3633564a544d3c3471635a80736a484035 585246626053adaea0d4d6c8937f7e5846423528223c332a48463a46483a474e3e5c6554 6e6f5d6566567677699a9a90d0cfcad5d4d2b5b3b6cfcdd2d8eef9bac5c1857d66917752 bf9d779472565036295541403e3d3b46423f5149465447414e3b355138316c4f478a6c62 6455505d474a5238395f4f42847f799c979eab9d9cc4ae96b69e72b5b6aeb2c2cfacb1b5 a29e929e9c8fa8a199b69f998f7d6fa28879b99d91e2d2d3dadeeacddfedecffffe5f2e8 b09d8f544132543f2c51402c8b8370e6e7d9d5e1d7d0e1dbd4e0d6d3ddd4ecf3ebe2e5de e4e3ded1cec96b636029211e3d38329d98929d98928d888298938d8a857f8a857f8d8882 76767481817f81817f7777757c7c7a8a8a8884848270706e817674887c7c91878890878a 7f797d7b797ea19ea5cfced65855502c2723352b2c3e3337665c64b5b4bcdbe2eaeefbff b7b6be84838ba6a5ad8d8c94605f6734333b25242c17161e14100f4a464356534e1f1c15 232017403e32403e31696758bdbdbd6b6b6b3434342727271e1e1e222222202020090909 191512211d1a2925222c282529252225211e26221f2824212f1a153e2c2a4036372b262a 555150887f7a79695c7c69588e7c6e7b695b7b695b8f7d6f917f717f6d5f806e60938173 97877884746782726567564e604f48513f3b473533958381a0988bb2aa9dbdb7a9bcb9aa cddbdedbd7d6d8d9d1c5e0dbc6dde5dcd5e7e0dbe2cfe9e0dcd5e5dcd7dddcd8cdddd9be dfd9b5e0d9afe1d8b1e2d5b2d8dab3dcd3b2d4c3a5d7ccb0dadabeb4baa08d8c788b8376 b6af9f9d96869d9686938c7c867f6f6861515c5545a09989231b102c2419564e43948c81 b3aba0afa79cb1a99ec1b9aebdbab1c7b4adccaea3c9ae9dbcb299b2b29aaead99b0a69a a4988aa397898f82727465527b6a56baa590b7a18a68523b69594aa48873a07c66a68677 897b783435372328223a3c2e3d3d47403f451d1d1b6b68618a867b3831295d53512f2526 5f6156babcb107080051524cc5c6c17070700e0e0e121214a6aba7c5c7c47f7e79625e53 877d71b3a696cdbcaacdbba7d2c0aacdb59dc6a78ac5a17fc7a17dc6a27ec09f80ba9b7e 8e765c8c745c9b856ec3ae99cbb9a5d1c2afc3b3a36b5e4e301f156f5f50887861a08c71 846f509a8266ae947da88d7ac5ab92c2a88fd4bca4bda790917c67766551a899869a8a7a 1d21241a1e21202427282c2f23272a212528363a3d53575a8680689b957f918974766956 857565bba99bd3c0b2c3b0a2665344725f50867364998677a49182a69384a28f809e8b7c 856e4f796149978072b9a49fdbcbccdad1d2b9bab5cfd4cde3e0d19b8f759e855db99565 b79266967b5e594a433c353c43322b3e2b256450475941377a5e529f827281604fad8c79 8c8276a69c92c3bcb2d1ccc6cdccc8c5c7c4c3c7c8c6ccccdfe4f8e2dff0b9a9b494757a a8817ac89f8dcfaa8dd3af8bb08370ab8572b99e89ddceb9f3ecd9e8e2d2dcd6c8e9e1d6 78716b3e352e4a413a746a61a2988fa99c937a6d647b6e65a391838b796ba59385af9d8f a59385b7a5974d3b2d3f2d1f4c32316f564f967f6d8d785d9a8568af99819e8775ae9789 a69a8a9d91819c9080a296869e92828f83738a7e6e8f83738e887a847e70837d6f8d8779 928c7e979183afa99bccc6b86463612e2d2b2b2a283f3e3c7f7e7cd2d1cfe1e0dee1e0de 87919aa3adb6a7b0b7747b81373c40242527282828232323191919141414333333585858 4040401010102121215f5f5f7272722d2d2d3737372020201a1a1a2f2f2f080808080808 030000211d1e393536322e2f2824252b27282e2a2b2b2728302f2b2e2a272b26232a2220 2e24233529293e3030453536564a4c8478787f74726459557b706a92887f7f756b685e52 9582738976677e6b5c7663546956476552437c695a9d8a7ba7a090b0a999bab3a3beb7a7 d5dad6e1d99ae2d38ed6d1cbcdd8ecd1e2dad2e2d7d1dbf6d3dbe6d4dbe1d6dbd7d7d9cc d9d8c4dbd7bedcd5b9dbd4b8d7d9b4dad4b4d6c6acdbcfb5dfdfc5bec4aa9a978495897b a49d8d8f8878a8a191b0a999a39c8c8a8373615a4a5c5545534b40322a1f251d124c4439 8b8378b4aca1bab2a7b1a99eb9b5a9c4b4a7ccb0a2cab09fbeb39db7b6a2bab8abc0b9b1 bcb3a49a8e808b7e6e4f3f2f907f6dae9c887a65506c563f69584884685394705ab49487 9e8f8a3939391419133e4033363640403f4432332e5d5a51827e73433c344c4240382e2f 66685db0b2a73031294b4c46d0d1cc6f6f6f2121211e1e205a5f5bb1b3b0908f8a524e43 61574b9e9181cbbaa8c6b4a0c4b29cc3ab93c2a386c39f7dc7a17dc8a480c8a788c6a78a 927a608c745c9a846dc5b09bcbb9a5cbbca9c8b8a8887b6b23120a7e6e5fad9d86b1a084 9a8769baa58abba28cb99e8da48a71a1896fa79179a18d75ae9c88b09f8da595853b2b1b 28292b1e1f211c1d1f27282a3334363637393233353132344b44325a51406e6351867968 a79885bcab99b19e8d9a8572a28d7ca38e7da5907fa5907fa38e7d9d8877988372947f6e 97755c886d586b5b4c878078e4e3dfdbdad8a49f9ce0d6d4b3a7996e5e476c543282623b 8b6a47856b5467544d6253565649435a4d47786d677b706a9e938fbcb3aeaea4a2d0c6c4 b5aab8c6bbc9d9d1e0e4deece4ddede0dcede0dcede2dff2d5d6d0e1dad2cbb9afb7978a c89f8bd7ab90d2a584cfa57fbb98848667538d7762c2b39eaa9d8c968679b8a49bac938e 8a77684c3a2c43332675685fb5aba2cac3bdb5b0acb4b0adb4a69b8b7d726153486a5c51 70625787796e4d3f34312318614c4b715c57907d6f89776188765e8c7a64806d5c968375 968a7c9084769387799d91839d918394887a928678978b7d91837a9b8d84a1938a9f9188 9a8c839a8c839c8e859c8e85625e5b1915121c18154c484594908ddedad7eae6e3e2dedb 6d7780848d947f868c5e6367464a4d3a3b3d2828281616161c1c1c171717222222363636 2f2f2f1818181717172a2a2a6666662727274141414444442626262222221010100f0f0f 231f202d292a2b2728211d1e2925263e3a3b413d3e3430312c2b272f2b28312c29342c2a 362c2b382c2c3b2d2d3c2c2d372b2d6e62628a7f7d6f646060554f7b7168887e7472685c 7a67587b68597f6c5d826f607d6a5b766354796657806d5eb5ae9ebdb6a6c7c0b0cbc4b4 ddde92e3d28cecc87eedcb84dcd9b0c7e3e4c8e0ecd5d7d4cee1e7d0dfe4d1dce0d3d8db d5d6d1d7d3c7d9d2bfd9d2b8d4d8b5dad3b6d7cbb3dbd3bcdfe1cbcaceb7a8a38f9c8b7b 9c9585938c7caca5959f98887c7565827b6b898272878070625a4f4d453a3d352a3c3429 433b305b53488f877cc0b8adb7b4a1c2b5a4cab3a1c6b19ebcb19fbab6aac6c3bed2cecf aea6999b918584786a685b4bb4a49483725e4b392569543f99827082634f7e5b47947467 9988815a55511c1f163337292e2e364d4d4f75767168665a6f6b5f534c425d544f837978 5a5c519193885051493a3b35bbbcb75d5d5d1f1f1f1d1d1f0e130f9193909c9b964b473c 4e4438968979d0bfadc6b4a0cab8a2ccb49ccbac8fc6a280c19b77bc9874bc9b7cbd9e81 9c846a8b735b927c65c5b09bcdbba7c3b4a1cabaaaa4978743342d7b6d608d7e69a9997f bbaa8ed9c3abc5af9ac5ab9ab8a088b49e86a6907992806aa1907ea595859c8f7f2c1f0f 32312f3433312d2c2a1d1c1a1615131f1e1c2f2e2c3938363e342b2f22193e3025766657 ac9b89bdab95baa68eb6a088b89d8ab09582a48976977c698d725f866b58846956836855 875e4c8c71609f9589afb5abcad5cfcccecbb9afadd0bcbb7260563f2d1f48352463513b 7f6d5999857a9c8985b09c9ea8a199b2ada7b6b5b0bcbebbcbd0d3ced7dcc9d6dcd2e1e8 ece4e1ebe2dde5dcd7dcd1cbd1c4bcc4b7afbbada4b5a79ea0926fb19e7db69979b59171 c19676bd906fac7e5da174536247346147366f5c4bb3a292bba89a92756d916e6a8c6361 a78b757358434f3a29817164bdb4add1cdcadddddddddee0e1d8d1ada49d4b423b453c35 5047406a615a7067605a514a68585957484555473e5b4d407164537366566c5e518e8075 74685a75695b7d7163877b6d867a6c786c5e6d61536b5f5188736e9c8782a48f8a99847f 937e7997827d907b76806b664d4844130e0a322d29746f6b9e9995c8c3bfd3cecac8c3bf c2cbd2c3cad0858c923c4043262729222222201f1d2928261212121111110d0d0d0b0b0b 1515152424242b2b2b2929293636361d1d1d2828283636362a2a2a2323233b3b3b676767 817d7e4541422723243c3839423e3f2521221a16172d292a32312d34302d342f2c362e2c 372d2c382c2c3a2c2c3b2b2c4031343b2d2d695b5a8c7e7b6c5f59594c447669608d8175 6b58497663548471628c796a8e7b6c8572636e5b4c584536a69f8faea797b8b1a1bcb5a5 e1d663ddcf92e9c990fac962edd170ced7bacbdad5dedab4d2e1ded3dee0d3dbded7d6db d8d3cfdad1c0ddd0b0ddd0a6d5d9b8dad4bad9cebcd8d2c2dbdecbced1bcb0a59196806b 726b5b898272a59e8e9f98888a83738c85759e9787afa898a1998e6961563e362b3b3328 3e362b3d352a554d42797166a39e8ab0a893c0b19cc3b2a0bdb1a3beb7afc7c5c8d3d3db bab4a8b7afa46a605494887a8a7d6d443424705f4da2917dac927b977863906d5a7b5b4e 8872677e746b39392f2f322745464b3838387d7e766a685b7f7c6d8b85795d544d534947 52544980827764655d42433d91928d5757571111111c1c1e000300646663878681565247 71675baa9d8dd7c6b4cab8a4cdbba5d1b9a1d1b295c8a482bd9773b5916db69576b99a7d aa9278876f57816b54bfaa95d3c1adc1b29fc2b2a2a79a8a685b557064584c3f2c9d8f75 bbab91b19f87c7b29fdac3b5c7b19acbb79fbcaa94b2a18fb0a090918474978b7d493d2f 615c586c6763625d59423d392e29253c373355504c635e5a5f504d4738334f3e36806e60 ac9a86b8a48bb6a184bba4858a6f5a856a557d624d775c47765b46795e497f644f836853 715444aa9487d9d2c8c7c9c4aeb4b0c6c6c4d0c6c4b5a6a1ae9b9d9084849f9999adadad c0c0c0d5cfd1d5c9cdeddae0cdcdc3deded6d0d1cbdddfdadee4e0cfd8d5d9e3e2ced8d7 cdbca8c3b19bb6a28aae967ca98e71a78a6aa382619e7d5aaa835aae8760ab835fa98060 b0896cb59076b49078b3907a6d4d426e52478b7367998678a08a7faf928aae8983a37a76 af947f987f6b725d4c948274ab9e95a69d96c5c0bcccc8c5c7c2be9a9591494440302b27 322d2949444065605c66615d4a4041564c4b4d443f564d4471695e5a5245554d409b9386 8b7e75887b7285786f84776e84776e83766d84776e877a718c7d789485809586818f807b 91827d9c8d88a2938e9f908b6d66603e3731544d477d767087807a9e9791a69f9989827c d4dbe1d6dbdf8e92954546483f3f3f3635331f1b18211d1a1d1d1d242424212121141414 1212121c1c1c2020201c1c1c1616163131311e1e1e181818282828242424494949909090 5854552521221915163e3a3b4b4748322e2f3531325854553a3935383431332e2b312927 322827362a2a3e30304232335242452b1b1c46363684757281726d5e4f486759508a7c71 776455816e5f8673648673648b78698875666a5748473425aba494b3ac9cbdb6a6c0b9a9 e0c57ed6d68ee1d998f8c886f8c063e1ca5adbd682e8dab3dbdccedcdaceddd6ceded4ca e1d2bfe1d1b0e2d0a0e4d197d7dabddbd6c0d9d1c4d3d1c5d0d6c8cdd0bdb1a28d8b6c57 7a7363979080918a7a958e7eada6969d9686918a7aa9a292c7bfb48e867b50483d3a3227 473f3450483d3f372c28201565604d817a67a59d88bfb4a2c7bbafc8bebcc7c6cbc7ced6 c2beb5a5a1966f695dcec4b8665a4c584b3ba29282c1b1a1916f5495755ea483728a6a5b 93796c9e8e81635f53484e427c7d814e4e4c989a8f7a7869817e6dada799948b82797069 63655a8d8f84797a726d6e6871726d6060600b0b0b262628151a162f312e5a5954646055 aaa094c2b5a5d1c0aec6b4a0bfad97c6ae96c9aa8dc6a280c19b77be9a76c3a283c8a98c b9a187846c546e5841b39e89d9c7b3c3b4a1b6a6969a8d7d5f56516f655c3d3121a79b83 998b7172624bcebbaad9c6b8988670b8a692b5a492c0b0a0d0c3b3aa9e90978d812a2215 72685f736960685e555a50475c5249675d54645a51544a417462607765618a7770a59284 b09b88a28a72876d52745b3d725945755c487b624e846b579077639c836fa68d79ac937f 99907fd4cac0bab1ac958c8dbeb8baddd7d7cecbc4c3c0b7daced8cec8d2d7dbe6cfdce5 d2dfe8dadee7cfcad1e1d6dce3e1d5eeeaded4cec2dad3c3c9c0afada290b9ac99a1927f c2a097b6948aab857aa78071b08575ba8b79bd8d79be8b76c89077bd8971ad7f68a07a67 a48475baa395cbb7accab8aeb48c8c84615dd4b9b2ac988f645246bba59acaafa67c5e56 8a786a9987798f7d6fa18f818f7d6f705e50948274a79587918c886e69655d5854443f3b 4b464255504c4c47434d48446a605e746a68473d3b584f4a9e958e7b73684941346c6555 6e615964574f5548404b3e3651443c6659517e71698d807867675f5a5a5254544c595951 5e5e566060586a6a6277776f847b746c635c706760857c75968d86c2b9b2d8cfc8b1a8a1 eaeef1e3e7ea9c9d9f6060606b6a68676360403b37342d272424242424242727272b2b2b 2828281d1d1d1313130f0f0f2929294646463a3a3a2424241e1e1e2424243b3b3b4b4b4b 262223403c3d403c3d201c1d141011272324322e2f28242531302c332f2c342f2c362e2c 382e2d3b2f2f3f3131413132493439533f41523e3f5d49487a67637e6b65715e57715f55 8875668875667f6c5d7764557e6b5c8774657a6758635041b2ab9bbab3a3c3bcacc6bfaf e1c794d8d4aedadab4ecce8ef6bf5af2be46eccb62ebd987e3d6b3e2d5b3e3d4b3e2d3b2 e2d2b0e4d2aae4d2a4e4d2a0d7dabfdad7c4dad6cdd0d1cbcbd3c8d1d2c0b7a58f846048 a29b8bcac3b3a29b8b8a83739992827d76667c7565aba494a0988dafa79c90887d4e463b 362e234b4338484035261e13281f16464334777463a39c8abfb3a7c9bebac7c8cac2d0d3 c6c3bc7d7a71938f86d5cfc36f6559ab9f91b4a797ab9e8ea37f5f906e557f5f507c5c4d a78876c5b09d96908261695e7476756a6b66aeb0a3838270615e4b75705d898176867d74 797b7097998e8889818b8c865e5f5a5252520505052121232328240a0c09403f3a6f6b60 cdc3b7c8bbabc5b4a2beac98c5b39dc9b199c9aa8dc7a381c49e7ac29e7ac4a384c6a78a c1a98f866e56634d36a7927dd9c7b3c7b8a5b4a494978a7a5e5653605750403929b1a692 867a62766752d7c6b692807483725ec1b09ea595857e71618b7f71877f72968e83251e14 5c4e43615348605247594b4056483d5a4c415d4f445b4d4283706a8b7871917d74907a6d 8f75668b705d8367527a5e489683749d8a7baa9788b9a697c7b4a5d1beafd7c4b5dac7b8 d5d7c9d2cfc8b3aaabbbaeb5d9ced4e4dbdce6e6dedee0d3eadde6e7e0e7e9e8edd1d6d9 cfd5d5d5d5d3c6c1bdd9cecac0b0a1c4b4a4b7a493c6b09bbea48bb4967acba98db89577 b8947ead8772a07a639d745ea1775fa77d64ab7f66aa7e63af7e7ab38784b79691b19a94 b4a29ec6b8b5bcb1af978c8a906d71957777e6d1ccbcaea58c8074c4b6a9a391835a4437 66584d8373669686799f8d7f7f6c5d5e4938725b498d76648b827b716861817871736a63 8980798279725e554e4a413a84766d80716a3f302b43343185766f827469675a496b5f49 6457516b5e586d605a6a5d576c5f597366607467616f625c3439321f241d151a131c211a 1b20190f140d0e130c191e17392f266a60578e847b9e948b9d938ab5aba2d6ccc3c8beb5 eeeff1ecedefa9a9a95655533c3835423b354e4741675e57424242272727171717202020 2727272121211d1d1d2121213737372626263535352727271616164646465959592b2b2b 0a06072c2829393536231f20110d0e191516231f20201c1d1d1c1826221f322d2a3d3533 4339384236364032323d2d2e493238614b4e5a4446574241715c59725d5867534c766259 897667837061796657725f507b68598c796a958273958273b4ad9dbcb5a5c4bdadc7c0b0 e5d8ace2cfc0deccb4ddcf84ecc96bfabf71f5c16de6cd56e0d595e0d597e0d699e0d6a1 ded7abddd7b5dcd6c0ddd6c6d7d9c1d9d7c8dcd9d4d0d4d3ccd6cedcddcdc7b19a8b6148 918a7adad3c3cec7b7ada696989181726b5b736c5c867f6f8b8378a79f949d958a696156 4d453a554d424d453a30281d241a192d2a234143366360518d8077b0a59fc8cac5cfe2de dddcd78c8c84a09d947d796e746c61c1b7abcdc1b3a69a8cc89d7bb5937a7c5f4f69493c 987764ccb19caca5934e564b2426254c4d477e80728d8c78afad98b0ab98bbb3a6aba398 999b909d9f94a2a39b989993696a653737370c0c0c18181a1b201c0a0c095a59547f7b70 d0c6bac4b7a7c6b5a3c1af9bd1bfa9cfb79fcbac8fc9a583c8a27ec6a27ec3a283c0a184 c3ab918e765e644e379e8974d3c1adc8b9a6beae9eab9e8e726d6a2c251f3f372ab7af9a 7e735d8b7e6bbfafa042312790816ed4c4b4988b7b493d2f40362a50483d968f85504c43 6553456f5d4f7b695b7a685a6d5b4d6957497b695b9381738c796b857264806a5c80695b 8c7062a08375b8988bc7a798c2b5acc8bbb2d2c5bcdccfc6e1d4cbe3d6cde2d5cce0d3ca dcd7d1c6c2bfd3d3d3f8f9fddbdadfcac6c5e6dfd9ccc4b9d0b7b3d7c0b8d5c4b4b6a790 b4a68cb6a68ca08c74ad9780c2a38eb69782b1917cb0907b9e7e679d7a64a9876e9b7960 7f65447d62447d624482694b8d7559988366a28d72a79277b5a4acc6babed7d1d1d3d2ce d4d1cce2dbd5c2b3b07a65628e7e7fcabfbdbdb6b0a8a49bbeb8aca195877663557f6558 79604c7e675598816f9883728370616f5c4d6351437664569d8f8490827784766b73655a 84766b6d5f547b6d62685a4f6552437e6a5f76625b604b4648342d635041a7957db09f83 6658557e706d9688859c8e8b95878481737060524f4133303e3d3931302c24231f1e1d19 1c1b1717161213120e11100c1e11086a5d548b7e7591847b7c6f6660534a6b5e556d6057 868686979797807f7d45413e211c182c251f4e453e685e556a6a6a5252523a3a3a2d2d2d 2323231a1a1a1b1b1b2323232a2a2a0909091717171717174242428080806262623b3b3b 1a1617130f10302c2d5f5b5c5d595a2c28291511122420211918141f1b18282320322a28 3b3130423636483a3a4b3b3c573e4450373b533a3d634b4b674f4d5c4440634c467e675f 816e5f7e6b5c7f6c5d837061887566907d6ea39081b5a293c0b9a9c7c0b0cfc8b8d2cbbb eae0e8efd19be5c57cd5c69fe1cdaafccd7bf8c359e1ba5fdcd682dcd785dad88bd9da98 d8daabd6d9c4d4d8d9d4d7e6d5d7c1d9d7cadddcd8d2d8d8d0dbd5e6e7d7d5bda595674d 8d8676cec7b7cac3b3b2ab9ba09989908979898272625b4b7c746990887da1998e958d82 6b6358433b303b332848403545383f2f2e2c1e211828261954483c92857dc4c9c2e2f7ee cac9c4b0afaaa7a49d4b473e979086aea699d8cfc08c8072b38764eecab0c3a6987f5f52 7a5743a98d779d9483272e26292b2873746e6e70626a695494927b7f7a66aaa393c6beb3 bec0b5adafa4c6c7bfa7a8a28b8c872e2e2e2626262020220d120e1d1f1c84837e8f8b80 cac0b4c1b4a4d0bfadcdbba7c6b49ec4ac94c1a285c6a280cfa985d3af8bd2b192ceaf92 c1a98f947c6469533c9a8570ccbaa6c8b9a6cabaaac1b4a4797373040000484234c6bfac 68604b776a599686792f1e148a7a6abfaf9f867969564d3e595144564f459a968d68645b 685546675445766354917e6f9b8879907d6e8774658976677f6d597e69588f7868b19788 c9ada1cfb1a7d5b4abddbcb3d0c7c0d3cac3d7cec7dad1cad8cfc8d4cbc4cec5becac1ba bfafafddd7d7cdd3d3d3e3e3d3e1e1b3b5b2ac9f99a48d87a88474be9a82cdaa8abc996f c8a678d1b083b8976ec2a27bae846b996f5797715a8d6855765544785b4d7e6256755a4f 9984819d8988a69494b2a3a6c3b7bbd4cad2e1dae2e9e3eddae7eddfeef1d9eceac1d0cb c0c5bee3dad1d0b9b184665ed2d7d3f7fcf6b5bab3cbcec3cbc7bb69594c61463b714f45 a683678f6d529e8068947b678c796a83756a5a50475f5852c0aea0c0aea0877567675547 6452444533259f8d7fa18f81826c577b64548a72688e75706a5248a28b7be8d3b89f8b6a 5e504d7769668c7e7b8f817e887a77776966574946392b284d43444a4041382e2f241a1b 261c1d382e2f3c32333228296e6055887a6f6f61567b6d628d7f7470625763554a5c4e43 7d7d7d7171715b5a5847423e4e47416d645d766c6362584f4949495c5c5c6969695d5d5d 4141412d2d2d2929292d2d2d1f1f1f1c1c1c1616161919198a8a8aa4a4a42b2b2b252525 1d191a1b17184743448f8b8ca09c9d6864652a2627120e0f24231f211d1a1c17141e1614 291f1e3d313153454561515252393f4d3438563d40533b3b4b3331674f4b7b645e654e46 7d6a5b7f6c5d8b7869988576958273907d6ea18e7fbaa798aba494b3ac9cbbb4a4bdb6a6 d2d6c7d9d4b4d3d7b2ccdcc2d9d9b3f5cb7ff8bf4ee9b83feace84e5d08dded39bd8d6ad d3d8bad1dbc3d2dbc8d4dbcbdadfe2d6dcdcd5d9d8d8dad7dad9d5d6d3cecbc6c0bfbab4 a78377ccbea3bcc5aadce3dcc7c4bf8b856f95987b77867369715c6e72618b8c7caaa498 998c8466534d4c3432543a394f4842544d472b241e130c0628211b38312b746d67e0d9d3 d1c5b5796d5d9b8f7f988c7c6c6050a89c8cc1b5a58b7f6f96705d835e4bd2b3a1efd8c8 bbab9c8f85797470647f7f73737a735b625b1f261e2328212b2e252b2e235d5f546d6f64 484a475355529395925a5c592426232b2d2a050704181a171716144f4e4c8d8c8a939290 7473716d6c6a706f6d605f5d5e5b544d4a4344413a4d4a4356534c605d5676736c908d86 a89d89584d396457448f806dcebdabb7a291b7a090988171a18c6f846f54948068a08f7d 6b5a5032231e2c1e1d352727918b7dc3bcac887f6e4134216958448d7b63826c54887358 796a558879648c7d6881725d81725d91826d9d8e799d8e79b0a29fbeb0adcfc1bed9cbc8 d9cbc8d6c8c5d5c7c4d6c8c5dad6cbd4cec2cbc2b1c5b9a3bfaf95b2a281a38f6c98825b ada799e6e4d8e4e6e1dce0e1bdbdbf9189879f8e87a78e87c7c0bacfc2bac2aea5a28a80 937a75a99395c8b8c2d9cedcc8d1d0ced7d6939c9bb4bdbcd2dbdac3cccbe8f1f0e0e9e8 e3edf6e0e7edf6f7f9f9f4f0ded0c7dcc6b9e7cbbddbbeaec8b8a9b2a18db9a487bea684 ae8d6ea680699f706681504c9c8a76a79581bca792b09a82846c526f543977593d785a3e d5a06eedcb69d7bd60cead5ebc9337ad7f43b08f70847939967c63a9937c877662726350 75604b8d6f53ae86629a6b41b67e5bb17b59a475579870568a6a558066557b685a7b6b5e 725d5c71625f89827c76766e6868606d68626557547b67664138332b262328282a25292c 191d1e2727253d39303b332860424aa16f568a4410a66837ae8b757469634e4838867c59 a6897b8e74597a63416b56396b55488370699080717e7055302a1e61594e857d727d7369 63594f5951465e564b625c5088806b68604d211c09454334838578787b72676e67505a52 242d34050e151b242b3d464d626b72656e752d363d1922290e13171b1f222324261c1b19 17120e2c231c60534a8c7f763d38352c2724241f1c2b2623302b282e2926322d2a3d3835 352b2a352b2a362c2b382e2d382e2d352b2a3026252c22213b282e3b282e3b282e3b282e d7dfbae1d9aae0d7aed6dacbdcd8ccefcd9ff5c16becbc50ebd08de6d295dfd5a4d8d8b4 d4d9c2d3dccbd4dccfd6dcd0dae0e0d7ddddd5d9d8d8dad7dad9d5d7d4cdccc7c1c2beb5 a77e60c8b28bcacaaec6cfccc3c9c9b9bba6a5a28379715a9093828283737b796c888276 93897f86777062514a422f29352e2858514b5b544e3c352f2a231d352e28544d47756e68 a5998b776b5d95897b9185776a5e509a8e80b8ac9e837769936e5b886552907361b9a292 ecdaccd9d0c1979183858175605d4e4643343a382c2a2c21181d16151c15111a15121d19 34353013140f1f201b1819140e0f0a191a15171813292a258f8a869d98949e99957f7a76 625d5966615d544f4b241f1b302d2629261f232019201d161b181119160f1f1c152a2720 726a55695e4a897c699889769685737e6b5a85705f7b64547e694e77634a77654f806f5f 7463594536311c0e0d1004048c7a6ea9978b9684787562547e685a846f5e7a63537b6454 85746a8f7e7499887e9f8e84aa998fbbaaa0c9b8aecdbcb2d0c2bfd7c9c6dfd1cee1d3d0 ddcfccd5c7c4cdbfbcc9bbb8c8beb5c1b5a9b5a898aa9b88a08e76998467927b5b8c7553 a99f93e2dbd1e0dfdac7c7c7c4c2c3cbc3c0c8bbb3b2a096ecf9ffccd3d9b1b3b2b4b3af cccbc6dee0ddd9e1e3cedcdfe0d9cfe4ddd3beb7add5cec4ebe4dad4cdc3d8d1c7ccc5bb bdbab3b7b3a8b1aa9aaca48fa899869583758e776f967d799a7c71a184769b7c6a8f705c 92715e96726288645879534a756653938270c7b5a1e1ccb7d0baa3c5ad95c6ac93bfa489 f5da8bf9da70e1b04aeda954eb9f54ab662fb1844dffed9dad8d66a07f5c886a489d7e5f a48163946d4cab7f5cb58561a67a53a87c57a57c5c9e795e93705c84645577594f6f524a 684a4c6d55558c7a78857c777a736d716662524340533f3e2c29241b1a181a1a1c1c1c1e 1b1716332824533f34583e31683b36a96848ca7840e599679d6954634545654a4380614c 8e786ab39f86d7c4a4bfab927d675c5c4742624e4363513d70665c73695f796f657a7066 746a606b6157685e546a60566b635087806e837d6d4c493a40403471746b797e775a5f59 27221e625d59beb9b5c9c4c096918d56514d1f1a16221d191216191e1f212121211c1b19 27221e463d36645a5175685f2c2221231918241a19322827392f2e342a292f2524312726 2e2423302625322827352b2a372d2c362c2b342a29322827362a2e362a2e362a2e362a2e dfdeaeecd69df0d1a2e4d4c5ddd7d9e5d3bdedcc87edc661ebd29ae5d3a1e0d7b0d9d9bf d7dbccd5dcd4d7dcd6dadcd7d9dfddd7dddbd6dbd7d9dbd6dad9d4d7d4cdcfcbc2c8c4b9 d9b384a88a56b0a07ed2d3cedbe9eabbc7b39a9174ad8e79918d82888478757165736d61 908a7eaaa2978e867b5c5449534a43302720453c356f665f5a514a2a211a231a13332a23 64584a665a4c786c5e786c5e5a4e407367599b8f8175695b937462684b396e5443927d6c 928171b0a393d4cbbcafa5998a7a635d503d5f56452c281c030600101916020e0e051417 0708022526205758528889835e5f59383933878882cbccc6cbc1b8998f867b71686b6158 463c332a20172f251c3a3027201d1628251e2c292227241d28251e34312a423f384a4740 29220f4f47347065537e71606454447564547e6b5c816b5d6b573f7967516f5e4c736354 8d7f7680736d5c514f594f4d876c63846960846c62866f67866f698f7a75a7948ebdaaa4 c3b1afbfadabc3b1afcebcbad5c3c1d5c3c1d5c3c1d8c6c4cabdb7c0b3adaea19b998c86 82756f6a5d57544741473a34705f5776645a7d6a5c8671608e7863998169a48a6fab9274 8e7d73d7c8c1eee5e0b4afaca6a29fc3beb8bcb5abafa599effeffd6e4e5c9d2cdcdd5ca c9d1c2b8c3b2b1c2b0b9ccb9b5a38faa988492806c94826ea89682a4927e95836f978571 97877093836a7a6b4e7c6d4e9c8b6f97856f8d7a6ba49087ad8d5cc1a073b7986cb1916a c0a37bbba073b69e6ed0b985d3c7b9dbcfc1f5e8d8ffefdfebdac6e2cdb8dfcbb3d4bea7 d4c276e6c673ecbb54e8ac4ceca975c88a61ac793addac69a57b4bb48859b68655bd8959 b47e50a77248b17d56af7d5a9b784e9e7b539f7b59987559916c598862557b544d6f4843 6d494d785a5c988080998785887a776a5c5936282521120f19191711110f1311121a1414 2917154f3129754b3d7f4f3b884937c5764deb8d50f298668f48366935378756529b6656 655243a18f79e0ceb6d1bfab8571685e46446a534d7862558c7b7384766d85786f84776e 6d605753463d5749406e5d555249385e57479d97897f7b6f535145616157494a4235362e 1c150f4b443e867f79837c765d565039322c160f09130c06191a1c2727272a292724201d 302b27524b456c635c736a633020212f1f2032222338282938282934242539292a423233 302625322827342a29342a293228273026252f25242f2524241e20241e20241e20241e20 e5cf9df1c984f7c57eeccb9edcd7c4d8dbc6dfd8a1e7d37ae9d4a7e4d6afded9bcd9daca d7dcd5d8dedcdadedfdcdddfd8dedad7ddd9d8ddd7d9dcd5dadad2d7d5c9d3cfc3cfcbbf c9b485b5965fad8b5ebca796c5cbc1bfcdb6b4ae96b39187a29b93a19a928a837969655a 6c685c8987789695838a8b7951473e443a314c4239584e454f453c433930382e25281e15 3629204c3f364a3d3452453c3f32293f322972655c6e61586e57478b74647b6655826c5e 7a6758746353b7a597e9d9cac8b9a6a292828e82724c463a1411081d1e181f2420181e1c 15120b1e1b14322f28a3a099b1aea7838079aaa7a0928f889082777f7166685a4f483a2f 3b2d22594b4073655a6a5c5177746d7c7972716e6756534c4a474057544d65625b69665f 66614e7972606e65548579697467579c8c7d8e7c6e8876688a79698978687666576a5c51 74675e70655f6d645f7b737091796d927c719d8980b2a19abdb0aabcb2b0c3bbb9cfc9c9 cebdb6bfaea7b9a8a1c1b0a9bfaea7ab9a939d8c859b8a839f928a7d70684d4038291c14 1d100825181033261e3e3129523b336e574f937b6fa99284a7907e957c687d644e6f573f 68514b9f8c86cebfb8a99e98a19a92b8b4a9aaa69bb7b5a8d8d2c2b8b19f9f9983a39c82 b0a98daca7899e997b939071998471846f5c7c67546d58457f6a57897461695441735e4b 7163486f5f465e4c3868534099836bc0a989cfb88cddc694e4cc8ce9d097e7cd9ce4c99e d3b88db59b6ac2a970f8e0a0efebe0e4ddd3e4dcd1e2d8ccd7cbbddfd2c2eedeceebdcc9 93705099734fd4b75bded067e2d5a9efdabba97f4f621b056b3e179a6539c18553cd8c54 c58349c1844dcc9765d2a272a47d54a37c559a73528b664b86614e8463547d5b52715049 7a5d5f8870709d8988978885776e694c453f1e19130803001c18151f1b18271d1c311f1d 4a2924734337975644a35841944b38ce7952c96a32c368398f432b7f40378449398f5235 6b4b407457498d71638d766879625a7966608c7b748f817891787384706974635b5b4e45 3e3128322119443029624944857d706e66598f897d948e82686256353126100c01151106 1c20235a5e618d9194777b7e4044471e222514181b1b1f221c1b19282725312d2a2d2926 2d2926433e3a65605c7e7975412d2f4632344b3739493537402c2e3c282a422e304c383a 392f2e3b31303a302f342a292b2120251b1a2319182319180a0c0b0a0c0b0a0c0b0a0c0b e8c375f0be5df6bd4cefc466dcd29dcfddc4d3debededba6e4d6b1e1d6b6dcdac5d8dbd0 d7ddd9d8dde0dddee2dedee0d6ddd6d8dfd8d9ded7daddd2d9d9cdd8d6c9d7d4c5d6d3c4 d8dbbee0c997be8f59ca9f7dafa38d757f66787b6a6350525d58547a7571928f88918f83 808171797a6880836e8a8e776a5d546f625960534a4f42394b3e3546393045382f50433a 34271f3f322a2e21193b2e2632251d251810594c4473665e6252437d6b5d766555998677 9f897b7a65549d8676cab3a3d0c6bcd2c8beb9afa5988e845e544a372d232f251b080000 27231a403c334a463d87837a6f6b62504c43908c837672698a7c714b3d3246382d7b6d62 9082778274697f716687796e6c6962716e6765625b4d4a4345423b5350495f5c55605d56 838071858273716b5d8d83777c70648a7c716f5e5476655b7d6c6262514767595081746b 867c738e877fa29d97aca7a1ccc0b0d5c9bbbeb6a9b6afa5ccc9c0c5c6c0acaea9abb0ac afa08da1927f9f907dab9c89ab9c899a8b788c7d6a8b7c6965584f56494046393045382f 56494070635a887b7295887f846c62856d63836b617b6558766052776453826f5e8c7a66 9b8681948079ad9b91b2a197bbafa3aaa295747064807e72938060a79476a7967c87755f 65533f63513d7b66558f7a6986786d786a5f8173686f61567d6f64807267483a2f483a2f 484532443b324536394a33396b5047b59870e5c77eddc160dfd6b9e4d8c2ebdbcecab2ae 80646162413a7b5a4b96735fdcddd8cdcec9cecdc8d1cec7cac6bdd0cabed6cec1cbc1b5 92686a714640af9349e1d974ebedbbe6e3c4ad95738553547e55417e5036a7714bd79b69 dc9f66c28a4dc49255e7ba7fb8825eb58360a677598f674d82624d7f64536e584b584439 6a5f5b7b706c827773766d68534c462c2922191a1211140b36251e4734304f3733573533 72433b955647ab5d47b25b40a15548c77658be6a3cce7b4fb76d50b8785fae7250985b2c a36f627f4f456c413a69473d694f427563557e726671685f8f6d6b725652422e271d0f06 1e10073b27205337335b39378e877d9992886e655c554d424b43383e342a53493d5f5549 7e7f81b6b7b9d1d2d4acadaf6364662d2e301f20221c1d1f1914101a1511241f1b2e2a27 302c29322e2b4443415d5c5a5440424f3b3d4a36384834364b37394935373d292b311d1f 382e2d3a302f3a302f322827261c1b1f15141f1514231918000201000201000201000201 e9c660e7c24febbf3cefc149e3cc80d3d7bcd2dacdddd7c1dfd7b3ddd8bad8dac5d5dbcf d4ddd8d8dededcdedddedcddd5dcd4d8e0d5dbe1d5dbdfd1d9daccd8d6c7dbd8c7dddac9 ebf8f1efe0b9cb9e65e9ba90b19c813c412a3843332e2f332e302d4f514c81827ca3a598 9e9f8f8486717c7c64838369ae9e918d7d706454475f4f426c5c4f59493c49392c59493c 4a3d37443731342721392c26392c26372a245649436f625c5d5347332a1b382b1b665545 644f3e634938937664ad8e7cbab0a7dbd1c8ccbfb6d0c2b7b3a3967666575f4e3e2e1d0d d6cfc5a59e946d665c5a5349484137696258948d8360594f3f352c4c4239675d54736960 6f655c6d635a594f4631271e302d2635322b39362f3d3a3347443d56534c5f5c555e5b54 4b493c6462557571658d877b72685e54473e483a316c5e556f605b4f403b6c5f59a69b95 bcb5adc8c4bbd0cdc4c0bdb4bab3a3c1baaa9892827d7a6b9e9a8ea9a79b929286909084 998a759687729a8b76a2937ea596819e8f7a97887396877280746681756784786a877b6d 897d6f85796b7e7264786c5e7e685d8470658f7d7198887b9d9080a09484a29988a59c8b 91827b817068907d6fb29f90c0ad9e95857661574d70695f8e7e677c6f5c5c5246433c34 4a423f5d51515545453d292a574c48504541544945473c385b504c675c583d322e3b302c 373a31343235413646412f3f4d3332977a58dbbf75dec25fc7cbbce1e3d6efeae4b3a8a6 6250506048487b5d5b6c4d4af2fafce4eaeae1e7e5e1e3e0d2d3cecdccc7c9c6bfb6b3aa bda4a7b18f85bc9262caa25dcfad7dbca583b5aa8ee2d9d2b29b959c8176a2826dac8564 ba8f65c09562b48952bd9359c38866c6906eb9886aa0765d8e6e598167565e4c3e392b1e 42423a56514b564d485547444233302a1f1b3229242e29235b3329734b43774c4675443f 8b5146a65e4fad5b45b1553c9a4d45a3553fb36541d78b67b16a4cd08f71f3b48bd2955f c08370a1655a915a5588594f75503e6a4e39654e3e57433a5b31325b393748312b2f1e16 301f174c352f62403e653b3c504b45554e481b120b362c23766960807267968679928273 9d8f849d8f8486786d7d6f647163585a4c4146382d26180d241b141d140d1e17112b2622 36322f31302e2424241b1c1e6858595242433525262818193323243f2f30382829271718 2c22213127263329282d23222319181f1514251b1a2d2322030504030504030504030504 ebd488e0cf7de0c665f0c159f2c476e2d0a2ddd5b0e5d3a1dbd7b2d8d7b8d5dac3d1dcce d3ddd5d6dcd8dadcd7dddcd7d4dcd1d8e0d3dce2d6dce0d1d8d9c9d9d8c6dedbc8e3e0cd bcc1c7eadfbfe2c490cdac89a79787878b7a83907e5f6960919b9a899490868d8585887d 7e7f6f787762858168979177a29285ad9d9097877a6151445646398070637e6e6148382b 5446434b3d3a4b3d3a3d2f2c4234315b4d4a5749464f413e2f2b1f373123211809241204 594232664937775441d2ad9abba597d0baacc6b3a4ccbaacd7c7bab7a69c97897e988a81 aca4995c5449443c312c2419443c319f978ca0988d5c54491d181427221e504b4767625e 45403c221d1928231f38332f5b5851504d464b484154514a5e5b545f5c555d5a535c5952 6d6d618a8a7eaba99da4a095928980544a4151443c71645c867877786a69776c6a867d78 8f8a848f8c837f7f756d6d636e605374665978685b7d6d5e8e7d6d9582718471606f5c4b 79695a8171628575668171627d6d5e7d6d5e7a6a5b7565566d61536e6254706456726658 786c5e8377699084769a8e8088786986766782766882796a847e70848273838474838476 9f988ea59b8f9f8e7eb19a88b096857b6557544339473a3251433651473d4c4743454543 4444444944414c413b4b3d344638354638353d2f2c382a275648456c5e5b5e504d5c4e4b 3c3c3a403e41474047463940493735705b48b39d76e5cf9dd1d7c9e7eedeedf2deced0bb aeab98aea898b7afa4b4aaa1d1dfe2c9d4d8d2dcdedde5e7dde3e3e8ede9f1f3f0e8e9e4 d8d7d2e6dfcdd7bfb5caa499c09779b7997fcac3b9dae8d9b5b6b0a9a9a197938a645a4e 6a5844a3856bac8564aa7e59c18f6ac59773ba8f6fa27a6095725e8b6e5e6a4f4441291f 2e2b243f3631402c2b57393b5e3b3f53303459393c4a2c2e7c3a2c9555498c50467c4036 904d44a45b4ca5543fa54f36873026913f29964728b263458a3c26a55743d58a6dd58b64 ba83649c6551955c519964568e5c4583543a7c4d3d6e3d3846151858303061484358473f 4938304b322d623a3a764548443f3c0901000700005e544bad9f949e8c807f6c5e7a6758 6b5b4e7a6a5d7b6b5e97877a9b8b7e8373667d6d6069594c4d40374e443b4138312c2723 2d2c2a3737372324260002054c42415248474a403f322827221817261c1b302625342a29 281e1d2e24233127262b21202016151d1312251b1a2f2524040203040203040203040203 ebe0c2dad9bbdbcd9ef1c179fcc175f0cb87e8d384eed36ad9d7b0d6d7b7d1d9c1d0dbcb d1ddd1d5dcd4dadbd3dedbd2d4dccfd8e0d3dde3d5dce0d1d8d9c9d9d8c6dfdcc9e5e3ce e4dce7ded3b7e5d4a6d5c5abbfb8b2adb5aaa4b09ab3bfa9b6c9c5b2c1bca6b1a9969c90 8b8c7c8b8974968f759f957a8674669a887abdab9d9785774937295745378e7c6e8c7a6c 4d3f3c493b385a4c493c2e2b4436337466635446432a1c1929291d2f2b1f5f5549736354 8f7868ac8d7ba98471c8a28fc8a68dc2a38ec1a693b59f91d5c4bacdc2bca09895cac5c2 70685b261e11494134443c2f635b4eb4ac9f6c64570b030021201e2d2c2a41403e403f3d 3433314746446665636c6b69605d564a474043403959564f74716a8380798f8c859b9891 a1a195a9a99dbbb9ad9f9b90ada69c7269606b615872655d6153537468685e5452443c39 534e4864615869695f79796d8b746c765e5483685d8d70627b5b4c805d4a87624f724c37 6b59557b69657f6d6972605c6b5955705e5a715f5b6a58547c70608b7f6f9c9080a09484 958979827666716555685c4c817565887c6c948d7da8a594bcbdadc7cbbcc7cdbfc3cbbc b7b7adc5bfb19d8e7ba78e7abfa491a88e7f7d665e38252168524558473d564c43646057 69655a5b53404d3f244b381847342d56433c4e3b3454413a725f587c69626d5a535c4942 453c3d4c4441443b343d342b392f253429236b5d5ac9bbbbe2dff0dcdae5d2d3d5e2e4df eef3edcfd5d5c3c7d0e8ebfab3c2c9b1c0c5c5d3d6d8e2e4d6dee0dae0e0dbdfdeccd0cf e3ece9ccdececfdff6f9fcffe8e0d5d7c9c0e6dfefadb8b0cdddd0a7b8ae8798924b5554 31302c4f3c35724e40a87b68b9946abd9770ad8967967156926d5a977365835e56623d37 312823432f2e4323266d404787525c7f475279444e58252e8c3b28a35646944c407a362b 89453a9c53449b4d379d4b33a34432ce745ab35e41b9654da54d41953b33943c2eb15c47 ba8f6587583c723d2f864d3c9c6046a9694ea25d4e8c42437b474b63393a5034304c3b33 4a39314d312d6238397d494d4e4946040000281f1a776a629b8a809b897b8771639d8676 aba7a4b5b1aeb1adaaccc8c5c5c1bea09c999e9a97928e8b75685f8c7f76726962322d29 1d1c1a37383a33373a0c11150904014c4744817c796f6a6737322f17120f211c1935302d 2e2423342a29352b2a2b21201d1312170d0c1e1413291f1e040000040000040000040000 e3d6ced1e0e3c9e7e5d9e1bcf3cf81f9c56fe6cc93d1d6c0e1c69bdec290dfc087e4c686 ebd196edd8abe6d7b8ded3bfdfdfbde4d2aee7c69be7c49ae4cbace0d1badfd2b0dfcf9c cedcc3dbddc5dfdbb8d1d1afc1c8c0b6bccca3a1a291876358595353564571755c8b8e7d 888886949399a1a19f919386787a6498987e97977598967094906d524b312e22125a4c43 93867e7d70686659515d50485f524a594c44473a323528202b201c423731483b33403227 392719311e0d55402d97826dab8f84e7d7c8a8a3909b8f7fb69e92ad9186c7b4a6c4bdab 5c5447494136474036615d5474716a61625c393a351b1d1a2626262828282d2d2d373737 464646575757666666707070787974989994a5a6a19e9f9aa9aaa5bcbdb8b7b8b3a4a5a0 9a9b96cacbc6bcbdb8bebfbac5c6c1858681494a45363732412d248f7b729f8b828b776e 715d547f6b626f5b527b675e57544d44413a3e3b3475726b77746d423f3848453e59564f 7a7a7a363533292522413a343e352e4c3f366153485b4b3e796a657d6e67998b80988b7b 7f725f948870b3a78da89d81aaa399989085bfb5a9bdb0a09b8a78a5907ba8927aa38b73 a49383897b6eab9e96d2c7c5e8d8d8ccb7b481645e6a483f6b574e6450475c483f5a463d 5b473e59453c513d3449352c3c2d32413237493a3f51424758494e5c4d525d4e535d4e53 2d1d1e3d2d2e4535364333345646478a7a7bc6b6b7eadadbe3e4e8d8d9dde1e5e8c2c6c9 dfe3e6e1e2e49d9d9fcfcfd1ccb3ac76635fccc3c4ecedf1dbe3e6edf3f1d0d3ccf3f5ea dce9f2cfdce5cad4ddd7e1eae2ebf2d7e0e7cfd6dcd5dce2fff8e6685f50140b00695f55 4c3f374e3f3a5d4e4b40312e89766f9d9b8f988e8c8d8e887a887b8263609c515579443c 2d26203b3a38603f3aa355489c5344643f396c4c518a4b5383452eb24b46b13e43762f29 623d2b8a5d4aa46150964f3b975a3bd27941c76528c77c52894926d6885ada845196481a ac614aab67529f61528955487c4c427a4a3e865546996653633a36633c377643428e494e 8e50537b4f4c764342893c445846420a00000801004441387d7a71968f87887b758c7a76 c1c1b993938bb3b3abd4d4ccd4d4ccb2b2aa5e5e562b2b233d2c2567564f88776f8b7a70 64544525150526170464554239493c1a20162e2721745f5e85686a5437393722214b3736 48342d3f2b243e2a2345312a422e2737231c3420193b2720191718060807000200000a06 e0d5c3ddd9c0dddbb4e3d79dead086e9cb85e1cd9bd8cfb0e2c697dec08cdaba7dddbe7d e5c789e8d19de8d6b0e4d6b9e2d9b8e4d0ade7c7a0e6c9a1e3cfaae0d4acdfd29ee1ce8a d9c9a5e5cda9eacfa0dece9acfcbb0bcbdbfa09f9b87826263645cb9bcaba8ac95585b4a 848482e5e4e9b0b0ae22241975746f858279969384aba693aea79781776d433534241417 392c247a6d65998c8470635b4639314639314e4139463931493e384b403a3a2d2437291e 5c4a3c816e5fa28d7ac1ac99a28a80d8cabdd4cdbdd8ccbeceb8ada88d84907e72605949 544c3f4840354942385b574e625f584b4c46292a251214112020202222222727272f2f2f 3a3a3a47474753535359595983847f7576718e8f8a9e9f9a979893bcbdb8d2d3ceaaaba6 aaaba6afb0abdcddd8bbbcb7a1a29da3a49f63645f484944614e4774615a84716ac9b6af 8e7b7465524b79665f5b48412c292243403956534c807d767e7b74535049434039312e27 4b4642332e2a524d497d76707970697d736a91877e8f857c7d7864908b788882727e786c 7f787069615e4f4647554c4d3931263b33267f736593867682715f917f699a846d978169 b6a18e867466867970a19893c5bbb9c6bbb79a89828b776e6d5f5c6658555f514e5d4f4c 5e504d5d4f4c584a47534542624e47624e47624e47614d465e4a435b474058443d56423b 635a5b584f5050474862595a958c8dcdc4c5e5dcdde3dadbd5e0e2d4dfe1d8e4e4cdd7d6 c4cdcab8bebab0b5b1cdd2cc9684808c817fdcd8d9e9eaeedbdfe0d3d5d2b4b4aaeae6da ecedefe8e9ebe6eaedeaeef1e5eaeedadfe3d7dce0dce3e9b5a898392b1e6256488a7d74 746a614b403a6a5f5b4b423d77645d89877b948a8893948e79877a836461a3585c7b463e 3a352f38383650312e944b3c944c3d5837305b3e40763b3f894731a54d439a454072382d 7342349f5b50ab5d50904c3785482cd17b4cd67845db916c9a593dd28760e89569cd825b efa791c07d6a854b3d6b392e764a41865a4f885a4b8352415f333057342e6b423e7d4a49 673b384e332a613a358341454a3b361d120c07030001010035352b969289a49993867772 8c887d8d897eb4b0a5a4a095716d6259554a49453a504c4137261f53423b7e6d659b8a80 89796a58483850412e7364515151493f3f372c2922312a24544643735f5e6d50524f3133 694d49614541604440654945624642563a36523632573b372521220e0e0e000301010a07 e1d0a4ecce90f1cb78ecc975e0ce86dbd19cdbcca3dcc49edec08ed8b982d5b173d7b26e ddbc79e4c78be9d2a0e9d6abe5d2aae5cea4e5cca3e4cea7e1d4a7e0d69be2d189e5cb7e e7bb7ceec082f2c880ebce80dece9dc6bfb7a39ea485827996988db7baa97c7f6c4a4c3e 8f908bc5c5c59d9d9b72736b1e1d1944413c827e73a39d919e968b9d938a9587847a6a6b 483b32483b325f524980736a80736a5e51484639304639303d332a43392f40332a56483d 9e8c7eddcabbf0daccebd6c5e4d0c5e7d9cce2dbcbcdc1b39f8d81856f64716154473e2f 564e41574f445c554b605c5354514a3738321f201b1315121b1b1b1d1d1d212121262626 2c2c2c3333333838383a3a3a6b6c674e4f4a5556515d5e595556517f807bb6b7b2b8b9b4 71726d5f605b8f908bd0d1cc81827d5a5b5693948f5c5d5857484154453e4d3e378d7e77 75665f493a3362534cad9e9739362f403d36403d36504d4649463f34312a3e3b34434039 3a3027443a317f756caea59e9e979185807c77726e605b572a2c1e47493b404237282923 1d1d1b0a0a0c131217424147352b1f3f33258377679b8e7d92816f9f8d799f8d779e8974 987d68816c5b84766b8c87819fa09bb7b8b2c4c4bce2e0d4c0bcbdbab6b7b4b0b1b1adae b3afb0b5b1b2b4b0b1b2aeafaea39daea39dafa49eb0a59fb2a7a1b6aba5b9aea8baafa9 b9b9bbb9b9bbb6b6b8b6b6b8c0c0c2d0d0d2d5d5d7d0d0d2c6dbdccde1e0d2e2e1e6f1ed ccd3ccb5bbb1d2d8cccfd5c78f898bc4bfc3efeef3e7e8ead9d9d7a8a59e7a7265a69a8a c1b4abcfc2b9e0d7d0e5e0dcdddcdadadadadbdfe2dbe0e46d574a4733289381758b7d74 6b6158362f27433e3864615a816e677e7c70877d7b8a8b8569776a7c5d5aa75c60764139 54504d4142444b2f2c8f4a3b8f4e3a4d32274b36336633328e4735985042894b406f3d32 7f4039a6534fa9554b8c49368b4c3acc7b5dc97551d28a719a5945b2684dc07355c57b60 b774619d604e804a3e6e42396c443c744c427a5042774c3c643231512c265f3e376c4943 3d251b1b12034c3128814648443532170c080a0500000100020300443f39796e6a9f908d 5c5246aea498d9cfc3847a6e443a2e645a4e746a5e60564a3e2d2668575083726a76655b 7262538e7e6e8e7f6c6a5b4860524f716a6457574f21241b1f1c155647447a5d5f764f54 755250704d4b6f4c4a714e4c6c4947613e3c5c39375d3a38382f321d191a070908040a08 edc377f5c468f6c159ebc15ddfc97ddbd19cd8c9a2d2ba98cfaf7ccfac74d0a86ad5aa66 dab26de3bd7ce7c78ae9cf96e9cc90e7cf91e5d19ee2d3aae0d6a3e1d58be4cf80e5c683 e9c476ecc47eefc879eed078e5d08fd1c0b0b1a5b198909d76786a4d4f4157594bb8baad d9dad47c7d78777873f7f8f398998992917f93907d96917b97917ba79f88aea38d9f927f 998d8175695d483c303a2e225b4f437f7367796d615d514533291d3e34284f43376d5f52 a29084cab7a9d4beb1cbb5a8aea09795887f83796f63564d4b3d34594b426c5e55746a60 675f52736b607b746a706c63524f4831322c23241f20221f1c1c1c1d1d1d202020222222 24242424242425252524242431322d3d3e3933342f2829242e2f2a393a35656661a9aaa5 83847f656661494a4563645fafb0aba6a7a26d6e698a8b866c615b3f342e3d322c3a2f29 4b403a685d57453a34aea39d45423b2b282124211a3c39323c39322f2c25514e4786837c 88786b6b5d5261544b564d463e373135312e3736342b2b2b1d181c3935365c585756534e 322f262f2d2047463451503e9588788d8070b5a897beae9eb3a393b9a896a998869e8d7b a1826d89726081756995928bbabfb9d5dfd7d7e1d6d7dfd2eeeff3eaebefe5e6eae3e4e8 e4e5e9e8e9edebecf0ecedf1e8f2f3e6f0f1e2ecede0eaebe0eaebe3edeee6f0f1e8f2f3 dce0dfeff3f2fcfffff2f6f5dee2e1d4d8d7dde1e0e9edecdaeae9dbe7e5dee2e1efebe8 d7d0cab4ada3b2ac9e9f9c8bb9bdc6e3e8eee7ebeee4e4e4d5d0cc8b81785244375e4d3d 876c579b8472b7a495c0b2a7beb5aec8c4c1cbcbcbb9babc41261b846c626f5b505c4e45 3e352c3c39304d4e46cccfc699867f807e727e74727a7b754d5b4e654643a05559723d35 69696952575a573f3d98554591503c492f224b37306e3b378f47389850448e4b43753a34 70352f83403891493d8e4637914f43a75f508d4433a360508c4a3e874233813c2c874233 713422753d2e8152487b544d63423d5d3c3569473d6e4a3c783a3d5a2d2a593630694841 301e120903004a3228874b4d42373507000009080310130c0002000100002d25227a6f6d 87796c9a8c7fb7a99cb6a89b998b7e7c6e616a5c4f67594c5f4e4744332c58473f7f6e64 8070617d6d5d8879668a7b68645955736a65716c66514e472f2822332421624a4a907375 754b4c744a4b73494a7248496d4344643a3b5c3233592f304e3f442f2428110f10080c0b f7b44cf7bd59eec25fe3bf5fe7c56eeacb8adac198c1af99be9a6ac49c68cd9e66d5a364 dbaa67e1b26ee4bc77e7c27eebcc7ae8d27fe4d594e2d3a8e2d39ce3d280e6cc82e6c39b e2d6aee1d1b0e4cfa0e8d287e6cf83dac296c8b5a4bdafa29ea18e8b8d7fb3b5aae8eadf b2b3ab4041393839348989899b9c8e908e81747064645e50696250736b568c8269b0a48a aea292a79b8b8d8171675b4b5246365c50407165557d71615d5443514837554939645649 6c5a4e6b574e6c554d6d564e463d34463c33473a3241342c4b4138544b42594f46776a62 736b5e857d728c857b7571684d4a432d2e28242520262825212121212121222222232323 2222222121211f1f1f1e1e1e1617123435303b3c373d3e3940413c2a2b2632332e676863 9b9c977b7c7757585335363158595493948f81827d5f605b9e999349443e47423c39342e 3f3a34918c864e4943211c16605d56524f48615e5777746d68655e44413a3c39325a5750 64544766584d73665d7168615e57514b47443635331a1a1a2c1e1b372a243f322a3f3327 554939867b67a49a819f957ab6a593a08f7daf9e8cac9c8ca89888a396857d7060665949 5f44317c695a9e948abdbdb5d1d6d0d9e0d8dee4d8d9ddcedbdbd9dadad8d9d9d7d8d8d6 d8d8d6dbdbd9dfdfdde2e2e0d7e0dbd5ded9d1dad5cfd8d3ced7d2d0d9d4d3dcd7d5ded9 dddcd8d9d8d4d8d7d3dcdbd7deddd9dbdad6d6d5d1d2d1cdbfbebac4bcb9d3c1bfcbb1b0 c2a6a3ac938c928074a6988bd2dfe7d4dde4d2d6d9e0dfdbc3bab3786a6157463c544236 85674f8f735d9e85719986779a8c81bab1aac1bcb89d9996654a3fb59d93584439372920 261d144542399c9d95fefff885726b79776b857b797c7d773b493c492a2793484c7d4840 656a6d596267634e4d9d574b8d493647271a5435307e413c8b46379c4c459c46477e3435 572820502f1e6f3e2d93473a803d34753833581d1980413886433a74352e6d302b74352e 935e4e76463a69423b634441573b385c433e63463e53352b8942486c36365b2f2c6e4541 43281f180a005837308f4a4f392f2e0e0906090a0500020000020013140f15100d1b1110 72645b6f61587b6d649c8e859b8d846e60574e4037493b3249383138272059484088776d 8c7c6d87776791826f9384717070685d58526456537c6867745f5e53413f4f413e655c57 82565785595a85595a8256577c505175494a6d4142663a3b614a523f2e341e1518101010 f3a43bf1bc60e6ca77e0c470ebc26af5c477ddb989b6a490b28860ba8d63c89465d39a65 d99e62dea766e3b16ae7b96eeacb78e8d37ae6d590e5d0a3e5cd8fe6cd73e5ca87e4c4b7 d7e3efd5dcefdbd7d4e4d59ee6cb74e0bf6edec37ee2cf8ab6b9a4bec0b2c1c2bc8a8b85 40423536382b3637320e0e1007050a1e1920241d2522182124191d291e1a53463d918474 a69b899c917fa99e8cbdb2a0a29785685d4b5045336055437e7661665e4b685d4b746757 68564c4f3b34412a24371f1b37342b453a3442332e51443e5c584f4e4b42524b436b5c57 726a5d857d728881776a665d413e372526201f201b1f211e242424242424232323232323 23232324242424242425252531322d262722353631454641393a352c2d282b2c27282924 3435307879744b4c4731322d30312c1c1d183738333e3f3a6f6e696b6a652a2924201f1a 29282362615c605f5a24231e4340394e4b4468655e68655e64615a63605955524b5a5750 645a51695f565d534a372e27160f09120d091d18141e1915726449a4967b94856e7d6e5b 9888798e7d736d5c547a69629f8a7587755f8f7d69897866847766786c5c433729241a0e 68564abdafa6f3ece4ebe8e3afaea9868277978e7fa497869e948aa2988ea59b91a59b91 a3998fa3998fa69c92a99f95afa08bae9f8aae9f8aad9e89ae9f8ab0a18cb2a38eb3a48f b7aaa1a89b929e9188a3968da79a91a0938a95887f8e817897877aa68e84be9d96af8684 a77f7da78885b29e97f1e4dbddecf1ccd6d8d6d8d7dad5cf9b91885b4c455546415e4f4a 977c679b826e9e89788c796a8e7e71c4b7aed8cfc8aca39c826c5f937f74614f4333251c 30261d4a433be1dcd6c5c2bb503d3662605483797783847e4351444324218d42468a554d 515c60536066644e50974f43893c2a5223196636328e3d3a8741359445419b404784333a 5429233d2c1a613a2996463d7f3d31673536582b2e854a4484443a692f2b723f3e7f4541 916155673c3353322d4c32313e2a29493534503c35382119783a3d734341592d2a71413f 5936302b170c603b358f4a4f2c23260e0a0b0c0e0d0002000002001214130c0809060000 0c0100403531473c38403531463b375045415b504c50454155443d75645d85746c726157 726253978777a1927f8273607e7f7777706a7d6b698d7374896f706753523e3731282820 825859895f608b6162865c5d8157587e5455754b4c6a40416f515b4c353d281c201a1617 e29c47ecbc70ead28ce4ca80ebc272f3bf74ddaf7bba997aad8061b78564c38964cb8c61 d2905cdb9b5ee5aa64ecb76be9cc8ae8d384e7d394e7caa0e8c581e9c962e4cc86e1c9c9 d0e8f2d2e2fcdcdfe8e9d8aceac56ce4b656eac368f6db8062664d62645770716c63645f 46483a5557495859532b2a3038353c2e28322a222f2a1f2d2d222a3f3432483c303c2f1c 8f8470988d79988d79968b77a09581a59a86887d695f54407a725b726a53817664877a6a 7361576450495c44424a323032332b352c27301e1c5446434d4d45393c335e5953625350 736b5e827a6f80796f5f5b523b38312526201e1f1a1a1c19242424222222202020202020 2222222727272c2c2c2f2f2f3e3f3a2c2d28262722292a252728232728232526211c1d18 3b3c370304003b3c374647422829242d2e291c1d1821221d20221d5c5e591f211c1d1f1a 14161115171251534e90928d57544d4e4b4456534c423f383d3a334b4841423f384d4a43 48433f534e4a47423e2b241e2a211a4b41386c62597a70679e9880b7b19b8a83705c5545 5a52473c332c4137358e848396806889735c8e7c66847361807363786c5e4c443741392e a09993d6d1cde0dcd9cbc6c3a297938b776c9f7f709873609b876fa28e76aa967eac9880 a8947ca49078a49078a6927a9b7f679a7e66987c6495796190745c8b6f57866a52846850 8a6e628a6e629175699a7e7295796d8b6f638d7165987c70a49077ae937eb48e81b38881 966f6a927975cfc6bff7f8f0e2f0f1d7e0dde3e2ddc2b9b0786b634a3d37413736554c4f 907e70a29285b3a3969d8d80918378c8baafdccfc6a79a914f42324c3e315c504253463d 52483f877c76efe4e0786f6a412e274c4a3e5e54526f706a59675a5f403d904549855048 44565a506169634d5096483e963e3074352e8743449f383b853b327d3d34823e3d853d41 71393859392c6a3f2f94453e8a4937623a3b603d448b544d7f40315b282473474676443d 653b2f4b261e4e322f4a36372b1d1d302221402e2a2e1b1546231d6a4d4553302a693c39 633e382e1a0f55372f84494b2f292b0402030a0e0d08110e000300000100010000110b0d 0400000a04060802042d27293a3436201a1c403a3c6f696b9f8e8788777078675f827167 857566716151736451948572857774988a879b8c898374716c5d5a5d4f4c433a3528211b 795352835d5c876160815b5a7e58577d5756744e4d67414075545f533841312026221c1e cf9858e7be7ef3d595e9ce89e8c37cebbe7bddac74c19065ac7e64b48166be8164c5815c cc8557d59359e7a863f3b76ee7cb9be7d291e6d198e9c69eebc177eac656e4cd81e0cdcf cfe9d0d2e4e4e2e2e0f0daa9eec06ae5ac52eebb6cfeda8e989c837375687a7a788d8e89 6567592e312021221c2c2b313a3a32383431534d4d564d4e49403b7e7564aca3868e8460 635844695e4a847965a39884a29783908571968b77b1a6927770567f77608b806c746757 5240365a45407159576b535343463d4138334c3a388b7d7a66675f3b4137645f593d2e2b 787063857d727e776d5e5a513e3b342c2d272425201c1e1b2121211f1f1f1d1d1d1d1d1d 21212128282830303035353528292445464130312c21221d3a3b362c2d2813140f2a2b26 21221d2c2d28191a151f201b2728231f201b2728232425202025212126224b504c717672 212622020703353a369499955d5a53403d36514e4758554e524f48413e3725221b3a3730 3d3d3d5c5b596e6a67766f698c837ca0938a92847978685b5c5a5f3735381c1a1b201f1b 25251d29291d616254b4b5a5977f67907a629a85708d7c6a8e8171958b7f898176979086 b7b7b5e4e4e4d6d4d5b4abac907d79907065c29484cb97818f7254997c5ea48769a78a6c a184669b7e60997c5e9a7d5f8d7664917a6897806e9d8674a18a78a28b79a28b79a18a78 a98474a68171ad8878bd9888c19c8cb89383b59080bc97879d8d6b9e8567916e5ab38d80 8d6f67857671e3e6dfd6e7dfdbe7e5dbe2dbe0ddd4a3998f6a5b545045433933374d4a53 847a71a99f96ccc2b9b3a69d93867db6a89dbbada279695c38311f504738675e4f897f75 71645ccdbeb9c4b5b24435325a47404442363026244d4e4869776a7f605d964b4f743f37 485b6154676e654f529b4840aa4a3e964d47a55558ae393f85363168382c653f36834749 8f4a4f7d48447d443b91443e8544304e2a2c4f31397d4b42773826592821744c4c693831 754d43522f294e34334936382b1f2132262640322f291a151a0e005b52434b352a603934 603f3826180b4731267a4a483e383c0a080b040809020b0a030c0b0e1213010002040002 0a080d1a181d08060b22202538363b201e23262429302e3395847d87766f76655d847369 93837477675766574482736086696b937e7d887d7967645d595951645f59635552513d3c 7956548663618b6866856260825f5d83605e7a57556b4846785460573943362329251f21 dea065e89970f9ad6ff0c074d6c692dbda9ae6d07dd69e61c47c54c07651ba6e4ab56a42 b5743cc08f43d4b251e2cc5fdcdc96edcd94f0c778e4c66ee3c474ecc75de8d066d6d39e afd5ece5e4f2ffd1c1e9a65fe1a62ef5cf50e7d374bdac74b0a9979992827c76665c584c 363428191b1023241c3b3e3557483181716164524857403a7259528d7364a58b72977e5f 5d5956312d2a201c194f4b4894908db5b1aeada9a69d9996a085709e836e9d8470917a68 77604e5e4938604d3e736051775a4c997d6fa18a7c8d7a6c6c5b514c4238595047756e64 7d746f6a615c7067627269644d443f2c231e29201b2d241f2e2a2726221f24201d322e2b 4a464358545156524f4e4a474141413b3b3b3131312727272020201e1e1e1e1e1e1f1f1f 1f211e242623282a27282a272628252628252b2d2a30322f1b1c0e57584a909183929385 6e6f615f60527f8072a9aa9cbbb1b2a99fa09b9192948a8b877d7e756b6c716768786e6f 978f8d8b8381776f6d6058564b43413d3533352d2b322a284a362d4f3b3258443b645047 715d547d696087736a8c786f9883729782719681709782719a85749f8a79a48f7ea79281 b7a7a7ebe4dcbbb8a9d1c5b5897165ac918ad9cec8c1c6bfc2b3a0b3a491a392809f8c7b 9e8777957b6c967a6ca4887a5b3d3b674b4a3c2225301a1d3a262f372731423441392d3b 38241d58443d5440393f2b2447332c523e375e4a4375615a6b685f4f4d415d574b5f5647 716453705f4b917f6799856dc3b2a8e8d7cdcdbcb27c6b6159483e5a493f4b3a30443329 837c83f1ecf2dddbe0a6a6a8b6babbd2d8d6ccd5d2a1aca64d433a3d332a897f76a09790 999089e8e1db736c66423b35584f4a8b807c9a8c89917f7d5a45444228296f5254705254 764d3b7a4537924741983d3c8e2c2d9c41409f504b7e3a31a64c329c443096403398433e 944340883831833427893a297c322f6e332b7a4138944d4988413d5f291f562217702b26 723c3a7d47457e4846743e3c733d3b7b4543753f3d66302e6634373d2a24282722221a17 3f302d513e37532b238d46445c333b0600000007000f0e0c0e050a0c101300070a000005 0303030000000505051b1b1b2929292727272525252a2a2a4434376c595b867474867170 8a7270957c788d716d745952856d637e665c7860567860567c645a7d655b796157735b51 9361608f5d5c8e5c5b905e5d9361608e5c5b8351507846457952576242453e2a2b3b3130 e0b96acf9d58d7a34fe8bc7be8c8bbefd7bff1d486e1b755d7a369d29a5fca8c4fc18040 c07f3dca8f4bdea965edbe78e0d897eeca96f3c47ee8c476e7c27ef0c46deacc72dacea6 d9ebdfede3bfe9bd76e1a245efb255eebe72cba870ae91657a73618c85758d8777686458 38362a1d1f14272820383b32695a477b6b5b56443a533f38836c64a28b7ba189717b6244 514d4a3e3a372a262326221f383431615d5a9a9693c5c1beb49d8ba38c7a9c8573a69180 aa95849683747a67586956477c5f516e524490796b927f71695b5070665c857e7479756a 857c77534a455b524d716863514843403732443b36332a25322e2b332f2c34302d373330 3b37343e3a37403c39403c392d2d2d2b2b2b2828282626262727272a2a2a2e2e2e313131 252621282924292a252526211e1f1a1a1b161b1c171e1f1a1c1a0d252316333124444235 5a584b79776a9b998cb3b1a4a39997928886847a787b716f6c62605b514f554b495a504e 312624322725342927392e2c4136344b403e554a485b504e86746a88766c8d7b71928076 98867c9e8c82a29086a4928897877a99897c9d8d80a49487ac9c8fb4a497bbab9ebfafa2 b2a4a3d2cbc39b9587cfc2b2d2baaed5bcb5aca19b9da099fafffeced4d2c6cac9dbdddc e0e0e0e6e2e3ddd9dac0babcb1a099c9bab5b3a5a2aca2a1aca6a69893978c8a8f77767c 887d7b837876726765665b59685d5b6d62607a6f6d8f8482a19e978e8a81bfb8ae91877d 4234275d4d3db09f8d867460968679a39386ad9d9069594c4d3d3059493c4c3c2f756558 abaea5d4d7d0bfc1bcbbbdbcd4d4d6dad9dfe6e5ede9e6f1c3bfbcb6b2af9b9692817c78 c1bab4b6afa957504a3b322b483433786463a69291d6c2c1d6c4c29a8886685956534441 6448336b44337f463b9b524ca4535095413f8c3b38974945ab553eb05948a4504683302c 7a29269d4e49a75a508337298949476d3c356b3a3385444288474573413a76413b904c4b 85444875343875393b834b4c7b4847623230623532774b487b3841613a3b543e40432d2f 432f2e40332a43291c7c4a4160373f0600000006000d0c0a0b02070b0f1200080b000005 0202020000000505051a1a1a28282827272726262629292937272a4f3c3e675555776261 846c6a907773967a76947972927a70876f65796157745c52775f557e665c826a60836b61 946261915f5e905e5d92605f946261905e5d8654537d4b4a6e474a6f4f52432e2d3a2e2e ddcd91c2aa7ac8a966e6c081f2c4acf0c8a4efd282ead17de9cc94e5c384dcb46fd2a258 ce9850d89c5de9ac75f6ba88e5d588f1cb8df3c577edc56eecc17df1c471edca76e2cda2 d7dcc8e2d89bddbf5de3b75ef2c695d6b39f9d836c826b427f78669f9888a09a8a6d695d 3d3b2f393b305b5c547679704033235d4f4254433956443a776358927b6d917b66755d45 2925224b47445a56533a3633130f0c1e1a175f5b589b9794c3b1a3b19f91a08e809b897b 9987799280728f7c6d8e7b6c8e726473594a9a84769381736f6357827a6f84807578766a 766d683b322d5249448077726a615c5b524d59504b372e29302c293a3633413d3a3b3734 2f2b28292522302c293a3633272727262626252525232323222222222222212121222222 292a2428292325262020211b1e1f1924251f31322c3c3d376d695d646054676357827e72 a4a094aca89c9591857a766a6f665d635a51564d444c433a40372e31281f2c231a2f261d 4d3e3954454060514c70615c81726d92837e9f908ba69792aa9b94ab9c95ac9d96ae9f98 b0a19ab2a39cb3a49db4a59ea69f99aaa39db1aaa4bab3adc3bcb6cac3bdcfc8c2d1cac4 a69b99bab1aa9e9689bcac9db49e91bda9a0a29791c2c1bcf5ffffc2cdd1bfcaced7dee4 d8dde3e6e9f0e3e3ebb4b4bcdcdacef4f2e6e3e3d9ddded6e5e8e1e4e9e3eaf0ece9efed eae6e7e0dcdddbd7d8d8d4d5d5d1d2dcd8d9dedadbd2cecfcdc5c3ccc4c1c9c0bb756a64 5e51499c8e85d0bfb58c7a70ae9e91beaea18c7c6f89796c6e5e51433326766659928275 e9e3d3cecbbcb3b0a7c9c8c4dedee0d7d7dfdcdeebdcdfeed8dfe5f1f6fadce0e3cbcbcb eeeae7635c563a312a453b32482b2d775d5e816969877372d3c5c2efe6e1bbb4aeaaa59f 6f5e4c75604f67473a673c337c4540773634823839b26366a556458435288c3c35a65753 9d4f4f8e4441924b459350475f322d492b21512f26784240874d4c70433e6635316f2d2f 793238803b407d3f4270383962302f5e322f5d38325c3933783d43673f406444495f373f 5b383c4e3a33513328834c45653c44090100000500090806060002080c0f01090c030208 0101010000000505051717172525252727272727272828284535384b383a584646695453 725a58765d598266628f746d9479708b706780655c7b60577e635a846960896e658c7168 9664639462619462619664639765649361608b5958855352623a3b8563644e36363b2b2b d6cccad0bfcfd6c2aae5c57ce7b963e3bb5ce2ce85e5dcd3eadbbaead8b4e6d0a7e0c495 dcbb86dfbb81eac285f2ca8ce9d574eece81f0cc6cf0ca65efc677efc976ecce74e8ce95 dde1e2e3dfbcd7cc8cbfaf7bb7af9ccac6c5c8c7b5adaa7f837c6aada696cfc9b9c9c5b9 b0aea29b9d9295968e92958c34271e66595076685f57463c39271b433021695441806a55 4a464376726f8985825d59561e1a170703001a1613322e2bb2a89fb3a9a0b4a79ead9f94 9d8f849080738c7a6c8d7b6d927869988171ae9b8c8d7d6e7f75698680746c6a5e828276 5047423e3530665d589a918c8f86816b625d544b464138332d29263733303e3a373a3633 302c292c282534302d3d39363434343333333030302c2c2c2727272222221d1d1d1a1a1a 201d162e2b24423f3855524b68655e7a77708d8a8399968fb7b1a5a6a094918b7f868074 7d776b6a64584c463a332d21251d10292114342c1f463e315a52456c64577d7568888073 9887809d8c85a4938cac9b94b2a19ab6a59eb8a7a0b9a8a1b1a6a4b2a7a5b5aaa8b9aeac bdb2b0c1b6b4c4b9b7c5bab8c5c5c5c9c9c9cececed3d3d3d5d5d5d6d6d6d4d4d4d3d3d3 cac2bf9b928b8d8175917f717b685ab1a096aea59eb9b6b1bfbbb0a29e939b948a9b9289 92887f9b8e86a4958e9988817e7a6f9c9a8ea19f93a19f93ababa1b0b0a6b2b2a8b6b6ac cec7c1d0c9c3ded7d1dbd4ced4cdc7e9e2dcefe8e2cfc8c2b5a9a9fff3f3beb3b1534544 9a8c8bbeb0adc1b2afd9cac7d8c8b8c8b8a88a7a6ab0a090a59585645444897969908070 c5aca5ae9a93afa09bcbc3c0dddfdee9f3f4e2f1f4bed1d5cbd8e1ccd6dfe3eaf0f0f1f3 b5b1ae3c332c72655cb1a3986a56559884838c7a786e5f5cc1b3b0dfd4d0948b8699928c f0ded2edddd0b7a99c7563575d4239623b36723f3e8a4e508d4d418e4b428b443e87403e 813b3b7636367a403e8c57536c534f53463e4d3c3560393a64373a532f2f51292a62262e 62292f733a40723e4263313464363875494a744a4b633b3b563a3647322d4d303463343e 7041496e48477a45419c4a4c6a41490b0300000400050402060002060a0d01090c06050b 0000000000000606061313132121212828282828282727274f3f425744465c4a4a5f4a49 5f4745634a466b4f4b70554e83665e84675f856860876a62886b63886b6385686083665e 966463976564986665996766986665966463915f5e8e5c5b5f37379775735d4440392725 d0c5e3dccef2ddccbcdab968e1b14fe6bd61ded1a4d6dafddbd6b8e1dbc3e6ded1e5dcd3 e0d9c9ded7bae1dbabe5dfa3e9d481e6d192e8d181edcf75edcb8ee9cf92e7d38aead09d ddddd1e4dcc7dbcdb29e967f7d7f72b6beb3cacdb883805f78715f7a73638882729b978b 99978b7d7f7461625a52554c3128235f544e7e7168594b3e3121124332227a6758a69384 837f7c716d6a45413e130f0c0804012d29265f5b587b7774cbcac8c4c0bdb7b2aeafa69f a89e95a3958a98887b8d7b6d947f6e9a8776a796868b7f6f8c84779894887f7f7387897c 372e29615853877e79a49b96a49b966e6560473e39574e4936322f373330383431383431 3834313834313935323935323838383838383838383737373535353232323030302e2e2e 66625977736a8d898099958c95918887837a78746b6f6b625e564b5b5348554d424d453a 473f344a4237534b405b53487b736680786b867e718d8578978f82a49c8fada598b0a89b ada09ab0a39db6a9a3bbaea8beb1abbdb0aabbaea8baada7c1bbbbc3bdbdc6c0c0cbc5c5 d0cacad4ceced8d2d2d9d3d3dadfe3dce1e5dee3e7dfe4e8dee3e7dbe0e4d7dce0d4d9dd dcd9d4978e8797867c9b887a8270629084787269627a726f7c6f5e877a69796959695848 705d4e725b4d725b4d876d60695b52786a6174635b604f456352486c5a4e6c5a4e79675b 8676677565568272639080718474658a7a6b9f8f80a29283ab9c99dccdca998b88504241 a89a99b2a7a5b1a5a5e9ddddd9cab77667547b6c59837461a1927fb6a7948d7e6bb4a592 c6b6b6b3a5a4baafadb6aeabaaa7a2cacbc5edf2ebf1f6efeff9ffd5dee5e0e5e9d4d4d4 645f5b61574e8a7c71bcac9ff3eee8cbc6c099928c9b908ce3d5d2ae9f9c2a181632201e 876f6dad9b97dfd6cfe7e4dbc6c2b9a79a94816766523031663a3172413a6d3631773d3b 8048475d2d2d674041b59596d1c6c4a8a9a37e7b76674e5155363b513d3e705659996e77 8264626748465d3b3a6d4949754d4e63393b5a2c2f5e303345322b402f284c2c3166343f 65363e5e3b397e4744a3484d6c434b0d050200040004030106000205090c01090c06050b 0000000101010606060f0f0f1d1d1d2828282a2a2a2626263a2a2d5441435f4d4d584342 5b43416b524e6f534f644942785a527c5e5681635b84665e84665e82645c81635b81635b 9664639866659a68679a68679967669765649664639664636c4442a27f7b72565239211d cac8d5d5cfd3cdc08ccca957e6b17bf8c8a2e5d5b4d0d5d9d3d0a3dad9bbe1e2dce1e5f0 dde3f1d9e1e3dae3d2dce5c8e2d3acdbd2c1ddd5aee6d5a1e6d0bbdfd3c3ded8b4e6d5b9 d7cfa0e1cfabdec4adab9482918672c6c0aabdb09d5a4436352e1c2b24143b3525646054 7b796d65675c42433b2c2f261c1412352c27665e537266587669599383749d8b7f9c8a80 37333024201d130f0c1f1b18504c498f8b88bab6b3cac6c3cdd2d6ced2d5c4c4c4ada9a6 9b948e9a8d849c8e839f8f82b7a493988775998979988f7e8d8779a1a294a0a295636759 413833867d789b928d9d948fa89f9a776e694a413c655c5745413e3e3a37373330373330 3a3633393532322e2b2b27243030302f2f2f2e2e2e2c2c2c2c2c2c2b2b2b2c2c2c2c2c2c 766f657a73697a7369716a6060594f4f483e433c323e372d392f254a403662584e786e64 877d7393897f9d9389a49a90aba299b1a89fb3aaa1b0a79eb4aba2bcb3aabfb6adbbb2a9 c0b8b6c3bbb9c9c1bfcec6c4d2cac8d3cbc9d2cac8d2cac8d1cfd2d1cfd2d2d0d3d4d2d5 d5d3d6d7d5d8d8d6d9d9d7dadce1e5dde2e6dee3e7dfe4e8dfe4e8dee3e7dce1e5dbe0e4 e1e0dbb7aca6a8948bab9588a696876d6558281f18483d3b38352c545047413d343b322b 5f544e61544e4f403b60514c826964816861755a515a3e33684a3f89695c967566b29182 7e6c585b49356d5b47a08e7a96847067554166544089776386746a604f455e5047594c44 796e689a918cc9c1bec1b9b79485705849347b6c57a0917ccfc0abebdcc7ae9f8a9d8e79 ccd6cec3c8c2e3e4ded8d3cd9e938d8a7b76ad9a94e5cdc9d9d9d9f4f3f1faf6f3b7b0aa 5349408f8176746457867466888b82b9bab2e0ddd6f4ede7f0e5e19b8c894c3837695453 baa3a9b2a3a6d8d2d2f7f9f6dee0ddc5c5c3c3babbb3a7a98f7a756f544d59363079504c 805a574a2c2c705d5ff7ecf0f2edf1dae4e3c4cdccb4a7ae9a89918580848e888ca98f9a a69f978f867f7a6b66745f5c765a59714f5069404462383c4c2927553634633b43693b45 3d1f2125170e633d349b4d4d6d444c0e060300050005040206000205090c00070a030208 0000000303030606060c0c0c1919192828282b2b2b2525253121245542446654545d4847 614947735a567256525d423b7c59537c59537c595379565078554f7c595385625c8c6963 9462619866659b69689a68679866659765649967669d6b6a835a56a5807a9475704a312c c2d1cacaccc1c3b77dceaa5cedb989f4c494e2c989ddd0a3dbd2b1dfd9c3e0dfdadce1e7 d6dce8d5dae0d9d9d7dcdcd2dcd3ccd0d6e6d0dcd2dfdbbee0d7dcd4dce7d5e0d2e2dbcb e2dfb2eadab6d5ba9fb79c89c2b1a1e2d5c5dfcebec5ab9e958e7c7d7666605a4a4c483c 413f3331332823241c191c132923272a2220584f466d64537d726093867676655d55443d 110d0a312d2a6864619f9b98b9b5b2b4b0ada39f9c989491a8b2bbc6cfd6dde2e6d5d5d5 beb9b5aaa0979b8d82918174cfbeacb8a996988b7a9f9886908d7e9596889ca092444a3c 5a514c938a859f96919b928dafa6a1948b865e5550534a4544403d3e3a3738343134302d 332f2c302c29292522231f1c2323232222222222222424242828282e2e2e353535393939 40382d453d324c4439534b405e564b70685d857d72948c81a2958ca5988fa79a91a89b92 a99c93ac9f96b1a49bb6a9a0b0a6a4b9afadbcb2b0b8aeacbbb1afc5bbb9c7bdbbc0b6b4 cdccd1cecdd2d0cfd4d2d1d6d3d2d7d3d2d7d2d1d6d2d1d6d3d4d9d3d4d9d3d4d9d3d4d9 d3d4d9d3d4d9d3d4d9d3d4d9d9dde0d8dcdfd7dbded7dbded7dbded9dde0dadee1dbdfe2 e4e7e0c3b8b29c837c90786cab9e8e8e887a4841392d1f1e1f211c2e302b23241f21201c 433f3c504b48453d3b463c3b4f38305c433c6f54496d4f448e6d5eaf8b7ba7826fb8927f 796c5c6c5f4f827565b8ab9bbbae9e7a6d5d4f42325c4f3f8e7c687867559686767b6d60 5d53496f685ea5a19885827b72634c9d8e77bbac95fdeed7fffde6f8e9d2d3c4ad6e5f48 838174949285dedacff2ebe3baafa97567645949497d6d6d9b8e85bdafa4d1c3b883756a 57473a87776a8a786aa6948686817b938e8885807a98918bc2bbb5d6cdc8d0c7c2b9b0ab d7d2d6c4c2c5c1c1c3c7c8cac0c0c2b7b5b8c2bcc0d0c7cce1e2dcc2bbb599867f705750 58413b6c615fafb0b2eaf5f9d4d3d9d3e1e2deecedeae7eee0dbe2bec8caa6b0b2a39ca4 77776f7a7a72645f593c332e3325224c3a385844434f3a394425225337345c3d425b3c41 2214110d0900664138ab55586b424a0e060300070008070506000205090c000609000005 0000000404040606060909091717172828282c2c2c2424243d2d305744466957576b5655 6c54527259556e524e61463f77544e7b58527d5a547b58527956507c595386635d8f6c66 92605f9664639b69689a68679664639664639b6968a06e6d976e68a27d75c2a19a7a5d57 b9d7cfc5ccd4cbbb9adbba6bedc065ddb546d7b547f1ce8ce7d5d3e5dad8e1dddcd9d9d7 d2d3ced3cfc6d9cfc3e2d2c3d7d7d5c9dbf1c9e2dcd9e2c7dadde2cde3eecfe8d5dee2c9 cdd8d0e4e2d5cebeaec3b1a5d5cac6b9b4b0a69f8fcfc4a8cdc6b4d9d2c2d3cdbdb1ada1 8e8c80727469595a5240433a362f36221c1c3e372d38311f372f1c584b3b54433b4a3836 88848195918ea6a29fa7a3a08f8b88827e7b9d9996c6c2bfcfdde8d4dee7d5dce2d8d9db e0dcd9dcd3ccbcafa699897cc0af9ddacbb89c917f9992809d9b8c87897b8a9082505849 685f5a8d847f9c938ea39a95bcb3aeb2a9a4706762342b2638343138343136322f322e2b 2c282528242126221f26221f1818181b1b1b2222222e2e2e3f3f3f5353536464646e6e6e 8b8376958d80a39b8eaaa295aba396a9a194a9a194aba396b5a79ebdafa6c2b4abc0b2a9 bbada4bbada4c4b6adcec0b7c9bfc0d3c9cad7cdced2c8c9d6cccde1d7d8e3d9dadbd1d2 d5d8dfd5d8dfd6d9e0d6d9e0d6d9e0d6d9e0d7dae1d7dae1d5d8dfd6d9e0d7dae1d8dbe2 dadde4dbdee5dcdfe6dde0e7dddddddbdbdbd6d6d6d3d3d3d1d1d1d2d2d2d4d4d4d6d6d6 a0a59ebcb1abc7aca5957d717c7060939081948d854939393939313a373036312b312a24 382f2a4f44405d4f4c584946614f45645045705a4d674d3e846755a68671997660a8846c 6d695e7f7b70868277989489aba79c89857a4e4a3f363227624e367f6d557b6a56645746 b8afa0eee8dcf1efe3d1cec5d0c1aae9dac3f3e4cddfd0b9d1c2abebdcc5ebdcc5b3a48d 856455826557a38b7f9c8e857c7771555a56313d3b4858578e7765715c4b9b8675766150 7360516553458c7a6c98867890817e9b8c89968b87a89f9a8f8a847e7e76b2b3abbec1b8 d3dee0e7f1f3d3d6dbb9b8bec2bdc4c5bbc4b6aab4b8abb5d5e1dda7aaa3b8b1a7d0c2b9 ada098a9a8a4d0dedfd8f0f4dedce7cfdee3cbdadfd9d9e3dee1ead4e7ebccdfe3ccd0d9 c4c1baa8a59e84817a605d563b383128251e3c39325e5b542e261b362f253a2f2d453535 1d18121e160b87504dcb5b676a41490d050200080009080606000205090c000508000005 0000000404040707070707071515152828282d2d2d2424243f2f324d3a3c614f4f735e5d 79615f745b57745854775c556f4a4479544e85605a8a655f88635d85605a86615b89645e 905e5d9664639a68679967669563629563629b6968a2706fa17971a17a73e4c3bca68782 cdd4e4c3c8cbbeb79acfaa63e9ab44f5bb5aeecf98e2dfced8dcced8dcced8dbd0d8dbd2 d8dbd4d8dad7d8dad9d8dad9d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 d1e5eee8eeeeb7b0a6d7cdc1e3e0d971737032332d343025867762c7bab2e2d7dbe0dadc e3dfd4bfb6a59080718c756d888a7d7677696b695a58594b8486796f71647b7c6e908e7f 978f82a29e959d9e99767877a5a6aabcbdc2e6e9f2f7fbffe0eff4d8e7ecd3e2e7d8e7ec ddecf1d7e6ebc5d4d9b5c4c9897a7dccbcbdfff7f4a1928b998c839c92867f796b504d3e 6860538c8479a0978e9d948daaa19cc0b6b4b0a6a5887e7f2c2c2c2a2a2a252525212121 2525252a2a2a292929232323252525242424262626393939676767989898abababa6a6a6 a4a4a4aaaaaaa9a9a9a1a1a1a4a4a4b1b1b1b3b3b3abababbcb7bbc1bcc0cac5c9d2cdd1 d6d1d5d5d0d4cfcacecac5c9c7c8cdd1d2d7d5d6dbd1d2d7d2d3d8d9dadfd7d8ddcdced3 d1d1d9d7d7dfdcdce4dedee6dedee6dddde5dbdbe3d9d9e1dad7dee4e1e8e4e1e8dad7de d8d5dcdfdce3dddae1d2cfd6d2d0d5ebe9eeedebf0dad8dddbd9dee7e5eadddbe0c6c4c9 d0c6c7d3c9c8ebe2ddd6cbc75c514d3c2e2b8c7f7791847b51443b463930392c2334271e 3b2e25483b3253463d5a4d446246456c504d72575071564d7a60538d736299806a987f69 82796a7970618b8273aba293a19889736a5b5f56476d6455766960695c53665e53a09c90 e9eadceef2e3d5dcccdde6d5e8eadfe3e0cfdfd5bae0d2abe3d5a6e3d7a3ddd6a0d6d39c c2b197a79378927a608c705a866755785a4f7556517c5f5b8a7354816d54847361948475 9d8a7c977b6d926d5a926850866a679e827f8b6f6b997e77a890849b85779c8978907f6d 837f7ea1a09bc3c5bacdd4c4c5cbbdbfc4bdc7cbcad4d8dbc9bdc15b4a438c796b9b9185 90908effffffc6b8b7b3968ee7efe4e8f0e5ced5cdc4c9c3e0e2dfeaecebdedfe1dddce1 e6e4e9e9e7eaf2eceee0d4d69d8d8e55414248303062484969644e3d2c243d272a362c34 1218182c251d78483eab544d6c3b3f1e0904000500000800000d01111c16000c08000600 0006040003010001000305040f0d0e1f1b1c2d2729342e303b2c294a383664524e705b56 6f58527d655b83685f6d5247825d57845f5985605a86615b86615b845f59825d57815c56 8a625a8a625a926a629a726a956d659068609f776fb68e86c5a09a7c5953c6a8a0c7aca3 cee5dfcac9adcaaa77d5a364e4b378eac994e9d5a2e3d8a2d7ddcfd7ddd1d7ddd3d7dcd5 d7dcd6d7dcd8d7dbdad7dbdad4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 cce0e9ced4d49b948ac8beb2d3d0c95e605d2a2b253a362bb6a996d7cac2cfc6c9c3bdbf d1cac0b6ae9b8877677c655db8b4a8767266736f63696559706c60736f63868276575347 726c60a19d94c0bfbaaaaaa8d1d2d4d8dbe0dbdfe8c5c9d4e3f0f6d5e2e8cad7ddcedbe1 dce9efe1eef4d4e1e7c4d1d7d1c5c5a09494d8cdcbf3e8e2b2a9a08d857a9f998d5f594b 6c64578e867b9f968d999089a59c97c1b7b5c1b7b6aaa0a14a4a4a323232292929323232 2c2c2c1818181d1d1d3535353131313434343838384949497070709f9f9fb9b9b9bcbcbc b7b7b7bfbfbfc2c2c2bebebec4c4c4d1d1d1d4d4d4cfcfcfdad6d5d9d5d4d6d2d1d1cdcc cecac9cfcbcad4d0cfd8d4d3cac9cfd3d2d8d6d5dbd0cfd5d0cfd5d6d5dbd3d2d8c9c8ce e1e0e6dcdbe1d8d7ddd8d7dddad9dfd7d6dcd0cfd5c9c8cecbc9ccc8c6c9cac8cbd0ced1 d6d4d7d8d6d9d8d6d9d9d7dad9d3d5c1bbbdd9d3d5ede7e9cfc9cbc7c1c3d8d2d4d5cfd1 e1d9d7d6cccbd4cbc6e7dcd6a4978f3e322645372a726555a49c9a6a62602f2725251d1b 443c3a5b53514f4745362e2c5d413d61454160453e5f44397056478e7463a18874a48b75 978a7a8e81719d9080b8ab9bb0a3938a7d6d7c6f5f8b7e6e7e7168796f65978f84c8c4b8 e4e5d7e6e8dae0e4d5d9e0d0e3e0cfe2dcc6e4d7b7e6d5aae9d5a2e6d59fdfd29bdad198 d7c499c7b58db4a180a29177947f6c8a73658f7469997d72836d56806e5a8a7a6b998c83 9a8c838b786a7e64537c5d49806e6e806e6c74625e92837c95877e675b4f6f63558a8172 b49590a2877c9481729c947fb7b6a2cdd4c4d2dcd3cdd9d593878963514d917f73988e84 d3d2cee2e0e1726461472c25a2a499dadbd3fffff8fefffbe5e7e4c3c5c4c2c6c7e5e9ea e4e2e5d1cfd2cac4c6bbafb18272734733325139398268677c776354433b4430323b3237 1216151d160e6b3f369747407a4c4f270f0d000300000600000d04121814000502000400 0006040003010001000204030e0c0d1e1a1b2c2628352f313728254b39376a5854735e59 67504a6c544a765b526a4f4483605e84615f866361876462876462856260825f5d815e5c 855c58845b57885f5b8d64608a615d8a615d9e7571b68d89c4a39c84665ebca198ceb8ad d0eee4d4c993d8a549dca553dcc39bdddbcce0dbbbe1cf8fd6ded1d6ded3d6ded3d6ddd6 d6dcd8d6dcdad6dcdcd6dcdcd4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 d8ecf5d1d7d7958e84bdb3a7d9d6cf8a8c8962635d5a564bb5aa98d8cfcae0dadee0dcdd eae3d9ccc1ad9b88778f74698175697a70676e655e5950477a6e6284786c8b817780776e 989890bdbab1c7c0b89f9c97b9b9bbd5d9e2e9edf8d8dce7ced9ddd9e4e8dee9edd4dfe3 cad5d9cdd8dcdbe6eae7f2f6d7cfcda29a988b8683efebe8ebe6e0aba79e9690847c7467 787063958d82a39a919990899e9590b9afadc9bfbec5bbbc9898985c5c5c292929222222 2828282323232323232d2d2d2121212727272b2b2b3131314646466565657f7f7f898989 a4a4a4a9a9a9a9a9a9a1a1a19c9c9c9d9d9d9a9a9a9494948c8982918e8796938c9b9891 a5a29bb9b6afd5d2cbe9e6dfdcdbe0e3e2e7dfdee3d2d1d6cbcacfcbcacfc5c4c9b9b8bd b9b9bbb4b4b6b0b0b2b0b0b2afafb1aeaeb0b1b1b3b7b7b9cac4c4bdb7b7b8b2b2beb8b8 b8b2b2a6a0a09c96969f9999887d79877c78928783988d899e938fbdb2aed3c8c4c8bdb9 b2aaa7c9c1bfb6acaac6b9b1d8cbbb9485704a3a2339271178706e69615f5149473a3230 2f272538302e4f47456159575941375b43395b43375f483a765f4f947d6ba28c779f8972 9f8e7c9483719a8977aa9987a1907e8574627e6d5b8d7c6a766e63888075c3bdb1e7e3d7 dbd9ccdcddcfe7e9dbd7dbccdfd5bce4d7b7ebd8b0edd6a4ebd29ae5cf95e2cf94e1d196 e6d297e1d19ecbc39eaba78e948e8093867d9d897ea78b7f89756a8170667e716b7f7872 7b746e726b63766c608276688d888c8280815f5f5d6d6e6990918b8e8e867f7c736a685c 7f5f549b7e6eb49984ae9880978871968e7bb6b2a6d8d8d0c5b5b6b4a29eab9d929d948b fdfcf7b5b1b0594a474128232d231a6f665fb3aca6dddad5edeee9e0e5e1d1d7d5d0d9d6 eceaebdbd9dabbb5b58a7e7e564646432f2e5f4745856c686962505241393b282a41383b 20221f221b11774f459b554d7d5357290f10050000000300040a06111312000002000102 0005030004020001000103020b090a1b17182c26283731333627244e3c3a75635f836e69 735c56735b5182675e82675c8d69698e6a6a8f6b6b906c6c8f6b6b8d69698a6666886464 8f67658e6664906866926a68936b699a7270af8785c69e9cd5bab19e867cb19d92d8c8bb dae5e7ddcb8de0b13bdeb647d9cf9cd7e2ded9dbd6dcceb1d5dfd4d5dfd6d5dfd7d5ded9 d5dedbd5deddd5dddfd5dddfd4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 d9edf6dfe5e5a59e94bfb5a9f1eee7e4e6e3d3d4cea5a196a39c8cc0b8b5d0cbd1dfdbdc ece6daccc0aa97816c7e6153837062e5d8d0dad0ce746963756357a79184a9989089807b b7bcb8c5c2b9bfb5a9a59c93b1b1b1cdd4dee2e8f4dfe3eed9e3e5d2dcdecdd7d9d5dfe1 e6f0f2e8f2f4c8d2d4a3adaf9b9794939290535554999d9ccfd1ced1d1c9938d81817869 7e7669989085a79e959f968f9d948fada3a1beb4b3c2b8b9cececea4a4a4606060272727 1919192828282c2c2c2222221818181e1e1e2121211d1d1d1c1c1c2424243333333e3e3e 3636363c3c3c4040403e3e3e3a3a3a3a3a3a3a3a3a3a3a3a504e425a584c636155636155 605e52666458777569868478c4bfc3cac5c9c7c2c6bab5b9b5b0b4b7b2b6b2adb1a8a3a7 a4a09fa8a4a3b1adacbab6b5b9b5b4b9b5b4c7c3c2dad6d5f9eee8d2c7c1aea39da79c96 aca19b988d8763585233282224160d46382f33251c3b2d2496887fcdbfb6c9bbb2d1c3ba b4afa9a098966a605f6d6155b7a98edfcfabb5a17e765f407b695175634b6f5d456f5d45 77654d8371598e7c6495836b816c5b6a554445301f311c09442f1c715c49947f6a9f8a75 a4927e9a88749987739e8c7894826e83715d83715d917f6b6f6b5f989488d1cdc1e3e1d4 d5d3c6dad8cbe4e5d7dbdccee0d1b0e6d4aeedd5a7edcf99e8c88de4c688e6cc8fe8d296 e3cd94dacda0bcbca0929c9180888a86858a8e7f828a75748171717469676b6361676362 61615f62645f7477708b8e87737a808b95977e8c8c6f7e7b7a89848a938e8d8f8a888783 84837f777068726457897463aa917db69b8aa4887c8b7067bba9a7bdaea9b6aca3cfc8c0 eee9e37a726f3d2e2b3a25224430296e5b547e6f687c736ea5a29de6e8e3fafffce6efea e0dfddefeeecc3bebb64595736272453403a7059536a514c8f857973645d4333333c3734 1a1b15110900613f3582473f6d4a4e250c0f0b01000201000103020a0b0d010005060409 0004020004020002010002010806071814152b25273933353f302d4f3d3b7664608f7a75 866f69836b6191766d94796e93756b94766c95776d95776d94766c9173698e70668c6e64 9470649672669975699d796da07c70aa867abb978bcba79bc6b0a59a867b8d7d70c5b9ab e2d7c5e2ce8fe1c65fddc86bdad2a4d6dbd7d5dde0d5dad3d5e0d8d5e0d8d5e0dad5e0dc d5dfded5dfe0d5dfe1d5dee3d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 c9dde6d3d9d9aea79dccc2b6eae7e0e5e7e4f7f8f2dedacfdad6cad2cecdc2bfc6c5c0c4 cfc7bcb9aa938e755f765544bfa89abaada5c8c2c2bcb3ae8a7769977d6ead9c929f9a97 cad5d7bbb8afad9d8db1a396b4b2b3c6ced9d2dce8dce0ebd9e1e3dbe3e5dae2e4dde5e7 e1e9ebd0d8da99a1a3626a6c7c7f78505652515b5c6270714e5859b1b3aeaea79d796d5f 766e618b83789e958ca29992a19893a79d9bb0a6a5b5abacbfbfbfcfcfcfbdbdbd7b7b7b 3939392020202323232727272525252929292e2e2e2d2d2d2626262222222727272f2f2f 2b2b2b3030303434343535352f2f2f2b2b2b2f2f2f3636361c1a0e302e2248463a545246 535145514f435553475a584c8e88889b9595a49e9ea8a2a2b5afafcac4c4d4ceced1cbcb eee9e5e2ddd9ded9d5e2ddd9d6d1cdb9b4b0a39e9a9d9894ab9d90c7b9acd0c2b5b8aa9d a19386998b7e887a6d716356493b2e4e403344362975675acabcafcfc1b4bbada0ddcfc2 d1cfc358535042363685776abbab87ead6a3f6dfadd6ba92d9b97ecaaa6fc1a166cdad72 e3c388edcd92e2c287d2b277baa890a6947c86745c705e4878665093816ba2907ca18f7b a59482a08f7d9b8a789584728a796782715f8574628e7d6b8c8d7fbdbeb0d6d4c7d5d3c6 dedccfe2ded2ddd9cde1ddd1e3d4ade9d4a9ebd19ee8c88fe2c083dfbf82e5c88eead198 d3bf9ac0b69b9b9e9576858c6d7e90788095747184655a6a6b6569666064676163696866 6464625b615f626c6b727e7c747d84738086728687748a8874878366716d6c6c6a958f8f 5e697b696d79736a6f74615d73584d7b5f51907564a28879988380958683b2aaa7f6f1eb 9c958d473d343c2d285442406d544db09991b3a099685b5359524aa09f9ac4c6c1a2a7a1 9e9d99bbbab69f9a965146403a2b2468554e8069616a514a8c7f776a5d573a2f2b35302c 2121171f170c61453981554a5b4749281517190d0f06020300000205090c00010605020b 0003010004020003020001000503041511122b25273b3537433431453331695753937e79 98817b957d739d82799f8479a68b76a78c77a78c77a78c77a58a75a287729f846f9c816c a1836ba88a72af9179b3957db79981c0a28acaac94cfb199e8d2c7c1ada299897ccfc3b5 e6d19ae3d395dfd496ddd1a7dad1c2d9d5d6d5dedbd4e6dad5e2dbd5e2dbd5e1ddd5e1df d5e1e1d5e0e4d5e0e4d5e0e6d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 d1e5eec6ccccb4ada3dfd5c9c0bdb67e807daaaba5cdc9bee2e2d8e6e4e5e2e1e9d3d1d4 bab2a59c8c7392765e9a7763bfaa977f7870707475b4b0adb1a090957c688b7d72b4b4b4 e3f6fcb7b4ad947f6aa69382a4a3a1bfcbd9d1ddebe6eaf5dbe1dfe9efede1e7e5bac0be 989e9c8b918f848a887a807e7f877c434f4b647678576c711d2f336e7472999288958576 746c5f7b7368898077999089a39a95a89e9caba1a0afa5a6abababc3c3c3dbdbdbcecece 9494945151513131312f2f2f2525252121212424242c2c2c2e2e2e2a2a2a2a2a2a2f2f2f 2d2d2d2d2d2d2f2f2f2e2e2e2424241c1c1c222222303030413e375b5851807d769f9c95 b2afa8bebbb4c7c4bdcecbc4dad2d0e0d8d6ded6d4d4cccad3cbc9dbd3d1dcd4d2d5cdcb 7c756b5c554b4f483e635c52736c62625b51423b312f281e6f5e4cb8a795e2d1bfbfae9c 92816f938270a79684ae9d8b756b5f756b5f9b9185c9bfb3d6ccc0dbd1c5e1d7cbdbd1c5 adab9e3f3a376a5e60c9b9a9d5c393e4cf8cf7de9eebcb98f3ca78fed583ffdd8bffd785 f3ca78efc674fad17fffde8ce0d3b3eaddbdf0e2c5f0e2c7efe1c7e8d9c2cebfaab1a28f 887f7092897a9d9485a59c8db1a899c3baabd6cdbee1d8c9c2c6b7e4e6d8e0e1d3d6d4c7 eae6daeae4d8dbd3c8e1d9cee3d6ace6d4a6e5ce9ae4c58edfbf84ddbe85ddc28bddc692 baae9ea09c937c8487687888697d9570819b66728a575d7573747968676c625e5d615d5a 5c5855525451575c5f636d6f85888f818a8f7a8a8a677b79707f7a747975685e5d796567 5c5b7a5c546c5e49586046496a4d45796254897a67928a759d847f877875bbb7b6dddcd7 4b423947392e6e5c58a3939491796fbca69bc5b1a8a4938bab9e96dcd3ccc4bdb7696460 48474256564e57534a4b413853453c715f55867065866e646b5e584f423c443b344a463d 48463a41372b4a34275b3d32504f4d2d242522191c060407000507131e220a0f15000009 000200000402000302000100010000110d0e2a24263d37393c2d2a392725614f4b9b8681 ac958fab9389b69b92bda297b89f89b89f89b89f89b89f89b59c86b29983ae957fab927c b69a82c0a48cc6aa92c7ab93cbaf97d4b8a0d6baa2d3b79fdabfb6bca49a948075c8b8ab e6d1a2e3d2a4e1d4b1ddd7c1d9d8d3d8dcddd7e0dfd7e2ded3e2ddd3e2ddd3e2dfd3e1e1 d3e1e2d3e1e4d3e0e6d3e0e8d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 dcf0f9c0c6c6b6afa5eae0d4aba8a14a4c497d7e78b7b3a8babbb3cacbcfdfdfe9dbd9dc b6aea19281679072589f7964b2a088e6e6dcbfcbcb9fa49ea09380b9a58c938c7c929c9b d8eef9aeaea6927a62b29c87b3afaed4e1f1d2dfefd0d4dfd4d8d7c2c6c59a9e9d787c7b 7d8180989c9b9b9f9e898d8c86928673847e728c8d718c9381959c61696b706961ab998b 8b83767c7469786f668980799d948fa59b99aaa09faea4a5abababb1b1b1c1c1c1d0d0d0 c9c9c9a4a4a47777775b5b5b3e3e3e2a2a2a1d1d1d232323292929252525212121212121 2121212121212727272e2e2e2b2b2b2b2b2b3d3d3d545454e7e3e2e5e1e0dfdbdad4d0cf c6c2c1bab6b5b2aeadafabaaa69c9aaea4a2aea4a2a99f9daea4a2bbb1afc1b7b5bdb3b1 888073554d402c24172b2316362e21372f223a3225453d30d2bea6d5c1a9c4b098a7937b a08c74a490788e7a626b573f615d526864598b877c969287888479a7a398c4c0b5aeaa9f 696a5a49444173676ba1907ec0ad75e8d183f1d38bf4d197e9c06eedc472f0c775eec573 ecc371ecc371f2c977f8cf7defe2bff5e8c5f5e8c6eddfc2ebddc3eedfc8e7d8c3d9cab7 dad6cde5e1d8e9e5dce3dfd6e1ddd4e9e5dcefebe2eeeae1e2e9d9e6eadbe1e3d5ddded0 e5e1d5e7dfd4e0d6ccddd0c7ddd4a9dfd1a4e2ce9be6cb96e5c791ddc28dd2ba8ac9b485 9695918082816a747668767f6c7f8d6a8191637d8e607a8b77818365696a575652575048 564d46524d4a58565964656a7a737aa1a2a6adb7b676827e666d667d7572786060846165 59596161565c6c5253734b49764b4479534a7c5e547f695e755a53746663d6d4d5aaa9a5 4a4036705e52816f6baa9ea0a592847a67596b594d8c7a70beaba4d9c7c3ae9c98604e4c 25251d2c2c2238342b493f355e50457260547f695c826a5e4a3a3a2116123029213c382d 545142685f5057473a73605246524e2529281b161a0304080f1d1e3b4f5026333900000c 0002000004020004030001000100000f0b0c2a24263f393b41322f3d2b2965534f98837e 99827c88706690756c9f84798a716a8b726b8b726b8a716a876e67836a637f665f7c635c 5c403463473b63473b5e42366145396b4f436d5145664a3e6140395638304d32298b756a e2d2c5e3cfb7e3d1added9b3d8e5cbd6e7e1d8e0ebdbd8edd2e3ddd2e3ddd2e2dfd2e2e2 d2e1e4d2e1e6d2e1e8d2e1e8d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 cfe3ecb4babaafa89ee7ddd1b9b6af70726fa5a6a0cbc7bce3e6dfcdced2c8c8d4d6d4d9 d8d0c3c5b198a8896d936a54988a70c1c4b9d8eaead6e1db8d826e9f8e72b7b4a3c9d9d8 d8f3feb3b3ab91775eac937daeaaa9e2efffdeebfcccd0db8587848b8d8a8486837b7d7a 8d8f8cabadaaadafac989a97919f928ba0997b979aa3c1cbe7ffff7f898a766f67998779 a9a1948880757269607d746d948b86a09694a59b9aaba1a2a3a3a3b6b6b6b8b8b8adadad b6b6b6cbcbcbc2c2c2a3a3a37a7a7a5757573838383434343636362e2e2e2424241f1f1f 2929292626262828282b2b2b2525252323233535354f4f4f4b464a4944484b464a585357 706b6f908b8fada8acbeb9bdcbc1bfd1c7c5cfc5c3c6bcbac7bdbbd2c8c6d6cccad1c7c5 f3ebdeb6aea1736b5e4a4235383023443c2f797164b3ab9eedd7bfe4ceb6cfb9a1b59f87 a58f779f8971957f6789735b86867e6060583f3f3737372f2b2b2319191122221a414139 1c1d0b2d28254f43477b6a56d0bd82f5dc88d7b76aeeca8ce3be77ecc780f6d18af8d38c f4cf88efca83efca83f2cd86ded5ace4dbb4e1d7b4d8cdadd9ceb2e6dac2eee1ceebdecb d4d5d0e2e3dee7e8e3dcddd8d7d8d3dedfdadfe0dbd9dad5e2ebdacfd6c6d9ddcee2e3d5 d7d3c7ded6cbe8dbd2d7cac1d6d1a7dacfa2e1cf9fe8d19dead09dddc694c9b487b9a67c 757e7d656e6d5f69686775756a7e7f627e8162858b6c939a5e6c6d545d5a55554d625950 695c546055515a515459545b9a8e928b8587888d897a817a8788828f807d765455885a5d 657761737661806b58805245753931702e2f77393e82464e6d5048827473f9f9fb7c7c7a 6b6155a28f81988780b3a7abac9c8c705e50746256a6948aa28e87775f5b5e464459413f 3131274242384a463b493f35584a3f715f53735d50614a3c8d7d7d3b302c28241b0b0900 272113564d3e4f413488786b3950481924200d0d0f000005223436637d7e46575e080f19 0002000004020004030001000100000d090a292325403a3c5445424c3a386a585486716c 654e48361e1433180f43281d3a1f243a1f243a1f24381d22351a1f31161b2d12172a0f14 492b2d4e3032482a2c3d1f213f21234b2d2f4f3133482a2c431e183e1b1544261e7e635a dbd9c4dbd6d2dbd5e3dad6e5d9dadcd7dcd5d7ded7d7ddddd8e0e2d8e0e2d8e0e2d8e0e2 d8e0e2d8e0e2d8e0e2d8e0e2d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 eaf4eca9a19ec0b7b2d7dacf9a9c8f98877fcbb9afc9c7b8cedadac7d4e4d2def4eae9ef e7d3bbc3a279a88464a68272918076b9bdc6ddf3ffd9eafabab4b4a9978bbcb1abd7dadf ebeef7b8b2b284766da9a196c7c9c4b0b7b0a2a393897f667b726b79726c5e59559f9e9c d4d5d7cbd0d4d2d9dfadb6bda0a79f989d978f9092e2e3e7e1dfe286858188867a888576 a097929e95909f96916b625d645b568d847f89807b918883a4a6a3a7a9a6abadaaafb1ae b4b6b3b8bab7bbbdbabdbfbcc6c3beb6b3aea09d988f8c8784817c7c797473706b6c6964 65635679776a434037110e092c28273a35392722282b262d2d241f342b26362d283f3631 665d58a59c97d9d0cbf0e7e2d7ded6d5dad3dadbd6d8d5cea29a8fc2b7a1ded1b1dbcda6 c9c8b4d4d0b7d9d0b1cfc097c3ae79ccae70e8c67fffde95dfba6cdcb28cc49e8bb19474 907c598a7c71a29d9a8f90829a9285645c515d564c68615750493f433c32524a3f5a5245 4444283d4b4c2c362d897e3eead675e7d883ecd087fac371f3cb76f3c97df1ca7bf0cd79 efcc88efca9eedcb9deed08ae6d4ace5d7aae4daa7e2dca8e1deb1dfe0c0dedfcddddfd4 dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7d4dbcbd5dcccd8dccddadcce dcddcfdfddd0e1ddd1e2ded2d2c8afd1d0b1caceabc6c0a0ceb8a3cbb3a7a79e977c837c 545d5856615958685e5d6e686372776a77886e7997717aa16a98a85f83935167744d5258 52494a5b4c456553456a59478d8d8f9c9b96918f828a7f6b9d826fa06f61964c49974144 8c4447736757906b6595625f66664a7a573bad4e3aa16a41532817986e5eb99484997869 866a5ca38c7ebdaa9cbba99b6f5f52594f46787775b8bdc1d7e1ead8e2ebbac3ca8f969c 39373c4442455f5b5c706a6a5d55534036344a3f3b6c5e5b5a4b524a3b402b1d1d231514 3c2f27685b52968a7caca0903a434a151a1e08080834302d8f8b888b8c8e8a91975a646d 0d10000102000301000803000a02000a00001e0f143b2c3355383a6346485d4140866a67 9579756e534a684d446b50456f4d446f4d446e4c436d4b426b49406a483f69473e69473e 764e427b5347805a4d8561538767589073639e8372a98e7da37e758a645b7649446e3e3c dbdcb2dbd9c2dbd8d3dad8d9d9dcd5d7ddd3d7ded7d7dcdfd8e0e2d8e0e2d8e0e2d8e0e2 d8e0e2d8e0e2d8e0e2d8e0e2d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 d1dbd3aba3a1cfc5c3d8ded48c8e8182746bc3b2a8d8d6c7e3efefd4e2efd3e0f3dedde3 d1bfabad8f6b9674589376688b7d70b6bbbedbf0ffd9e9f6bebbb6b7a798d6ccc2fbfdfc d8dbe2d6d1ceb7aaa1aba29993948e737872717264635b446e6a675b57549f9e9cacacac a9a9a9cdced0c1c5c8bcc0c3bcbfb4abaea7959595c5c5c7cfcfd1a2a2a0a1a199969789 a19893cec5c0c0b7b2837a75837a75958c87908782a69d989c9b969e9d98a09f9aa5a49f aaa9a4b1b0abb6b5b0bab9b4c1bcc0bdb8bcbab5b9bab5b9bdb8bcbfbabec0bbbfbfbabe c7c5b8626054434037504d48302c2b211d1e302b2f2a252b2c231e3027222f2621312823 4b423d817873bcb3aee1d8d3d0d5cedde0d9e3e2ded0cbc5827a6f9c907accbf9fe6d8b1 e0d7b6e3dab3e6d8abe4cf9ae0c485e4c27af1ca7bfed482daba71dab692b08f7e947a5f 7d6c4e988c80c1bab4918f8068605581796e9a9488837d71746e62726c60756d62a0988d 6e684652525e57524cad9b49efd773edd9a6ead3aaefca79f2ca73f2c979f1cb78f1ce74 f1cd83efc998efcb97edd084e7d6a0e7d7a4e5d8abe3d9b5e1dcbce0dfc3dee0c8dde1c8 dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dadcd9dadcd9dadcd9dbdbd9 dbdbd9dbdbd9dbdbd9dcdbd9d8cab0c8bd9fbcb191beaf90bfab93af9e8e90897f787b74 5b5d585a5f595b625b5d6864606e716774846c7a956f7e9f6889905d767d4f5c644c4b51 52494a594e4a5c52495c52487a7c7b8f8a848d7c6c8c6953a67358b7755bb46650b2604b 9146417c66519a68619f5e5c7062487d533ba64d3b986641723a2d9864569f7162805b4b 7c60529f8c7daca0909b92815e4e4162584f918d8ac5caced9e2e9dbe4ebccd3d9b0b5b9 5654573c3a3d3a36375a54547066655e534f40322f30231d190a0f2c1d20382a2a231512 3a2d27897c73a2968a8276682a2e312626263935325c5753878380757678878e946b7881 1c261d030a020002000607020e08080c01051d11153d2e334f32345d40425d41408b6f6c aa8e8a94797092776e967b7096746a96746a957369947268937167927066916f65916f65 9e766aa47c70a98376ad897bb09081b89b8bc6ab9ad1b6a5b9978b9a746b7a4f496a3a38 dbdea9dbdbb5dbdac5dadbcdd9ddcfd7ddd3d7ddd9d7dce0d8e0e2d8e0e2d8e0e2d8e0e2 d8e0e2d8e0e2d8e0e2d8e0e2d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 c6d1cbaea8a8d1c9c7d2d9d18b8e8383766ec2b4abdcddcfdee8e9d4e2edd6e3f4dcdde2 c4b7a793795e6b503b5d473c928775b8bebadbeff6d6e8ecb8b8aea99c89bcb3a4d7d8d0 fbfbfdf7f2eebeb5ac817a705d5e5860635c7775696e6755909497777b7eb4b5b79d9d9d 9c9c9cc8c7c59e9a979f9b98b2b0a3b2b2a8afaeaab7b7b9b6b5baaaacaba9aba6a2a89e afa69fa19891c0b7b0dcd3cc9e958e6a615a807770887f78938c829790869e978da49d93 aaa399ada69caea79daea79db3adb7b8b2bcbeb8c2c2bcc6c2bcc6c2bcc6c3bdc7c5bfc9 b5b2a98380775e5b543a373214100d1b1716312d2e2a2529382f2a29201b261d18453c37 7d746fb3aaa5d5ccc7e3dad5cccfc8dcddd7e1e0dcccc5bf796f65887964b1a283d0c19a eed9a0efd99ff0d795f4d38cf5d080f7ca75f4c569f3c162dec281e2c5a3a18778715f4b 6a5e48a8a093d8d1c97d77695a564bbfb8aecfc7bc948a7e9a9084978f84837c72c0bcb1 a295756a5a65917c67dcbf57f4d46ff4d8c0ecd5c5e4d182f3ca6ef3c975f3cb73f1cd6d f1cc7befcb91efcb8defd179e7d895e8d79fe6d7aee3d8bce2dac3e2dec5e0e1c2dee2bd dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7e8dedfe8dedfe8dedfe6ddde e4dedee4dedee3dfdee3dfdee7d9bcc7b295b19377af9175a591768a7d6a746a5e6f645e 645c5a605b575e5b565d5e595e666864717a6b7c906e829b647473545e5f454547443a3b 4f4141584946574a4451464060534a886e619f725dab6a4cc97b55dc895fcd7a4eb9683b 994a3b8c644ba86459ad5a56815b46834e3c9d4b3d8b624289473b9a5f519965578a6050 947767ad9a899d9280736e5a5f4d3f82756cbdb8b4e1e5e8e2e9efd4dde4bbc0c49a9ea1 6460613e3a3b262020332b29463b394436333a2b263728213729293224244335342a1c19 3f322c8f827a91847b6d60574a4643504b476158515f554c48413b3535357079807c8992 384a4a0a1a1a0003050106090d0d0f0a04061c11153d31354d3032573a3c5a3e3d886c69 b59995b3988fb2978eb4998eb08e82b08e82b08e82af8d81ae8c80ad8b7fad8b7fad8b7f aa8374af8879b48f7fb89484ba9b89c2a593cfb4a1dabfacd2b5a5ae8a7e825750693935 dbdbb7dbdabcdbdac5dadbcdd9dcd1d7dcd6d7dddbd7dcdfd8e0e2d8e0e2d8e0e2d8e0e2 d8e0e2d8e0e2d8e0e2d8e0e2d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 d7e3dfbab6b7c7c1c1c9d3cba2a9a1a69b95cec1b9d5d7cad4d9dccfdce5d5e3f0dce1e7 ccc3baa49381827265736862a29b88bfc5bbdaececdbe9e9c0c2b5a59d8899927f9b998a c8c7c2b7b4ad79756c38342b27241d4848407471687f7b70687178949ba1a8adb1abacae cdcccabab5b18079739188819795869e9c90b2b1adb9b9bbbab9beb3b4b69ba09a939891 cdc4bb6f665d645b52cec5bce2d9d0bcb3aab0a79e746b62776d617e7468897f7393897d 9a90849c92869c92869a9084989397a19ca0aca7abb1acb0b2adb1b5b0b4bdb8bcc3bec2 a9a6a1bebbb677746f4c484568646136322f0a06054c4847372e29342b2649403b817873 bdb4afded5d0dfd6d1d6cdc8cecfc9d4d3ced5d0cdd1c8c39689809988749a896ba08e68 e9c879ecc977f0ca75f4c86ff4c366f2be5bf2b952f1b64ee3c68aeed4afa48e8066584d 686151b7b1a1d3cbbe534b407b7871ebe7dcbbb1a5887c6eafa3959b91858e8a7fc0bdb6 b8ad9b766553bc9f63f8d462f2c768fbd3aff2d5b3e1d07ef4c96bf4c871f3ca6ef2cc67 f1cc73f1ca87f1cc85efd16fead697ead79fe8d7a9e6d8b5e4dbbee3dec1e1e0c2e0e1c1 dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7e8d7d0e8d7d0e7d8d1e6d9d1 e5dbd2e5dcd3e6ddd4e5ded4e1ddb8c6af8daa81639a73548e7356806e567c6156805a59 6b5b5c6757576354515f5651605f5d646d726b7d896f8696626b6850504e3f3432412c29 54373363453b66483d6044367f4833a4674bba7450c07146d4814de9945eda8751bc6b34 a04e369c6143b4614fb9584f9457448b4e3c944c3e825e44894b3c975e4d9764538b604f 926f5ca085748e79686b5847867364a7998ecfc8c2e1e2e4e1e6ead0d5d9a5a9ac77787a 403a3a332d2d362e2c4a3f3d5749464f403948372f47362e382a273b2d2a514340433532 6b5e58998c865f524c32251f5e554e645a51685a4f594b402b21182d292681868aa0aab3 556c74182c3300040b00020703060b010002170e113b313255383a593c3e583c3b795d5a aa8e8ab79c93b0958cad9287aa897aaa897aaa897aaa897aa98879a98879a98879a98879 9e7766a37c6ba88371ab8875ad8e7ab49883c1a691ccb19cdfc4b1b898898a62586f423c dbd6d2dbd6d0dbd8cfdadad2d9dbd6d7dbdcd7ddddd7ddddd8e0e2d8e0e2d8e0e2d8e0e2 d8e0e2d8e0e2d8e0e2d8e0e2d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 e4f3f0cccccecbc9caccd7d1bdc7bfc8bebcdacfcbd5d8cddde0e5d5dfe8d4e2ebdae3e8 ddd9d6d8cdc7d5d0ccd9dbdab8b5a4c3c9bfd1e0ddd8e4e0d1d4c9bfbaa7a8a18e9a9582 5c584d57544b4848402e2b24221e1538342b5c59527b7a75434c53848b91b6bbbfb9babc b2b1af8b8682837c76ddd4cd9496898c8d85a2a2a0bfbfc1ceced0bebebc999a948e9085 aca499aaa2975d554a797166ddd5cae7dfd4c5bdb2a69e9383796d81776b7e74687c7266 7d736781776b857b6f887e727e7b72817e75817e757b786f7370676f6c6373706778756c a5a1a0dcd8d79f9b98625e5b817d7a5e5b561916112a2722271e1950474289807bb7aea9 cdc4bfd3cac5d8cfcaddd4cfd2d1ccd4d1cccfc7c5d6cbc7b09f97a89682937e618c7651 e4b857ecbe5ef3c361f2be59ecb54eecb149f5b64dfdbd53deb879efcfa0b39a8470615a 7f786ec8c3b0bcb5a33e342b9b9c97e5e1d8817869827262b2a2928e8576b8b4abd5d6d1 b1aaa4847639d0b54afcd667eabb63fdcc80fcd384e5c976f5c66af4c671f4c96df3cc65 f3cc71f1c983f1cc7ef1d069ebd4a2ebd6a1e9d8a2e7daa5e5ddaee4debae3dfc4e1decb dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7d7d9ced8dacfd9dbd0dadcd1 dcded3dddfd4dee0d5dfe1d6d8d7abccb68fb085639264438a6344926e5896675f925658 6f5b5c6c575669514d66514c665b576b6b6b6f7c82748891787876665c5a553e385a342b 7241328a523b92593c915938ce6748d4744ccd7746bb6f35be7739d88d53dc8f5bcb794a a75337a96040bc624abd5c49a3584193503d914f3f805a457f4f398c5c46875844704330 663b2a764b3b82584a845a4eb7a08ec0aea0c5bcb5d0cfcde1e5e8d6dadda0a1a3696969 433b39302625302523574a4481726b8c7b7376645a5e4c422a1d173b2e283e312b3c2f29 7d6f6c91838042343124161371675e7f72697d6d607f6d5f5d4d405c534c8584828a8b8d 6c7e882e3f47000b130002070000050000020e0a0b322c2c6043456043455c403f6d514e 9d817db79c93aa8f86a58a7fa38271a38271a38271a38271a38271a48372a48372a48372 a47e6ba98370ad8875b08d79b1927db89c86c5ab94cfb59edbc5aebba08b9470607c5047 dbd3e0dbd5d7dbd8cfdadad2d9dbd8d7dbded7dcdfd7ddddd8e0e2d8e0e2d8e0e2d8e0e2 d8e0e2d8e0e2d8e0e2d8e0e2d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 daececdddee2dedee0d5e1ddc8d3cdcfc7c5d9cfcdd7ddd3dfdee4dae3e8dbeaefe1ecf0 e6e7ece7e5eae9ecf1eaf8fbd0cdc4cbcdc8c4cecdc6d0cfc9cec8c4c1b8b5afa1a9a091 57503e39372b32352e32332d3f382e4a413848433f575759767a7d707477d6d7d9c9c9c9 878787757472736f6ccdc9c68e9890969c9aaaaeb1cbccd0c4c2c5a4a19ca6a296b9b4a1 8e8777bbb4a4aaa3939f9888b8b1a1bfb8a8d3ccbcf3ecdcd4cdc3c4bdb3aca59b948d83 857e74827b71857e748982788a89778d8c7a91907e959482999886a09f8da9a896afae9c 9b969ac3bfc0c5c1c074706d35322d59564f706d64403d347c736ea29994c8bfbad6cdc8 d9d0cbded5d0ded5d0dad1ccd5d2cdd9d4d0d2c8c7d7c9c6af9c95a5907d967e629f8763 eebb52f6c159fbc45df7be57efb44ef1b24bfcbb55ffc761d29c52e1b377c1a07f816e68 9f968fd7d2bca09a84524741c2c7c3bfbcb56c6052998773a4927e85796bdad7d0dee3df a79a8aab933bdfc13ff5cf6ae9bb66fcc561ffce69ebc475f6c46ff5c475f5c570f4ca66 f3ca70f2c782f1ca7bf1ce66edd2a5ecd5a1ebd89ee9da9fe8dda7e6dfb5e4dec4e4ddcd dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dceee2ddede2deebe1dfebe1 e1e8e0e3e6dfe4e5dfe4e5dfe7d6abe3c69eca9e77a974529a65469c6853955c51834643 705c5d6f57556f524e6e514b715955756a68777b7c7987889a897f8c7166805648874d39 9e573bb66844c27146c57343e6704cdc7145d47a45c97f42c17f42ce8c52d38b5bc5764d a8573aaf6041bb6647bb6446ab5e409b583e925540885a437b4f348a5e458e60497d4d39 6b3a29703c2e824c40905a50bea591c3b0a1c6bcb3dad6d3eff0f2cfd0d27d7d7d3d3c3a 3f353330252130231d51423b7e6d6593817789756a7a6459695c5345382f2f221a675a54 9587845a4c4b3123235d4f4f75706c8c82797a6a5d8a7768806b5a7c6a5c73665d605750 767e814c515521262a0d111400000200000006060623212265484a65484a644847674b48 977b77bfa49bb1968dae9388aa8976aa8976ab8a77ab8a77ac8b78ad8c79ad8c79ae8d7a ac866fb08a73b49078b6947bb7997fbea388cab095d5bba0d4c3a9c0a890a07f6c8a5e51 dbd5d5dbd7cbdbdbc1dadcc6d9dcd3d7dbded7dce0d7ddddd8e0e2d8e0e2d8e0e2d8e0e2 d8e0e2d8e0e2d8e0e2d8e0e2d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 cbdfe0dee1e6e2e3e7d2e1dec9d5d1cbc5c5cdc5c3ccd3cbd2cfd6d5dde0dbeaede0edf3 e0e4efd8dae7cdd7e1c4d8dfd9d9d7d5d7d6ced3d6c8d0d3c5cacdc2c2c0bfb8b0bcb0a4 9d957e6061534247413738324a4138574a4135302d2b2e35837f7c6d6966bdbcbad4d4d4 aeaeae999a9c74787b787c7f8a9c9c9dacaf9ba4a9bcbfc4b6b2b1978e85b9ac99c8baa0 dbd4c4aea797bab3a3bdb6a6a69f8faea797b7b0a0aea797e3e2dddcdbd6d0cfcac3c2bd b8b7b2b2b1acafaea9aeada89c9a8e9a988c98968a98968a9795899593878f8d818b897d bbb6bc9a9599b0acadb2aead6d6a6556534a8482769c9a8dc1b8b3d5ccc7d7cec9c8bfba ccc3bedfd6d1d9d0cbc0b7b2d0cbc7d9d1ced0c4c4d5c6c3b09c95a48d7b9a8065af9572 fac45afcc35afac15af8bd59f8b958f7b859fab95bfcbb5fd98e3bd89a4dcda2758f746d b5aaa4d9d5ba837d637c716fe2e8e87e7e76716454b5a08b947f6a958878e2e2dac2c8c8 af9064e4b85ff4c956eecc6aedc772f9c268ffcb74ecc67ff7c276f6c279f5c573f4c76a f4c772f3c683f3c97df3cd68eed49beed59cecd7a0ead8a6e8dbaee7ddb9e5dfbfe4dfc2 dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7e5e4c6e5e1c4e5dbc0e3d3ba e3cab4e2c3afe2bdabe1baa9dfad8adbab85d3a27ac9916cc08162ae715c97645383594d 6c5e5b6e595673544f76534d7c5b567e69647f7a77808483ac8269a77459a46448ad5e3f bd6541cc6f44d47545d57744c26141b75b34c77344db8e58d68e54d48d57cf8958be774b a75a40b16043b66a46b56d45af6741a1613e965d40945a428b553b925c4497634d95614c 8e5947885544875345885447977c67a99483b8aba2d0cbc7d9d9d9a3a3a3504f4d25211e 251a163d302a5f504b7b6a6286746a836f647c6659765e52897d6f62564a60534ab8aba5 a496933729293b2c2f67585d4545456e696556483d7762518d725d9a7f6a846b577a6554 796f6d6a625f504b47302c290e0d0b00000004060511151462454765484a684c4b604441 8e726ebfa49bb49990b4998eac8c77ac8c77ad8d78ae8e79af8f7ab0907bb1917cb1917c a7816aab856eaf8b73b18f76b19379b89d82c4aa8fceb499d5c7aac7b398ac8c77906656 dbd8c5dbdbb9dbdeb1dadeb9d9ddccd7dbded7dce2d7dcdfd8e0e2d8e0e2d8e0e2d8e0e2 d8e0e2d8e0e2d8e0e2d8e0e2d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7d4e1e7 c4d8d9dadde4d8d9dec8d7d4c9d5d1cbc7c6c0bbb8b7c1b9d0cdd4cfd7dacee0e0d0dfe4 d3d9e7d4daead1e1f0cde8f1d4d3d8dddee2e5e8efe3eaf2dce3ebd8d9dddbd3d0dfd0c9 ddd3bab2b3a39ca39c757772665e5365574e403a3a3b3e476960597b746e6a65619a9997 b7b8baa8adb1bcc3c9a7b0b79cb5ba93a7ae637078959aa0bfb9b9b7ab9fc7b59d9d8869 5d56444e4735524b394039276a6351b5ae9cb4ad9baea795979996a7a9a6c0c2bfd7d9d6 e6e8e5e9ebe8e5e7e4e0e2dfe5e1dee1dddadedad7ddd9d6dad6d3d0ccc9bfbbb8b2aeab 9f9aa19e999f8d888ca4a09fc6c3be918e85565447676558645b568f8681a39a95948b86 9d948fc8bfbae0d7d2dad1ccccc4c1d1c7c5c7bbbbd6c4c2baa69fae97859a8065a98f6c ffc85ff8bf58f1b753f3b755f9bc5ffaba60f0b058e6a64fe59035d8923cd7a57094766c bfb0abd4d0b36d674d988c8ce4edec3c3b366d6050bea8918d7760b6a999e8e7e2a5aead bc8d47ffd282ffce6fedcd6af3d57df7c27effca8ceac888f6c078f7c07ff6c277f5c66c f5c676f3c685f2c87ef2cc69eed68eeed696edd6a4ead6b1e9d8bae9dcbce7dfbae5e0b6 dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dedfd7dcb983dbb37fd7a977d59d6e d18e63cf815acc7853cb734fb26549b16f4dc08962dda37de4a282cc8e77b08773a5907b 69605b6e5b557556517d564f825d55866a66867875868281ae6f46ae6940b4623cbc6039 c6653bcb6a3fcd6d3dcb6c3ca56247974e2eb0613ad67f52d98352d68653d8925ece8f5a a65c43b16146b36e47b07346b06d42a5653f9961409c5a40a4624c95543e864934864d3a 8c57458c5d4b8658487e53426f543f826d5c8f82799e9995a09f9d73737348444148433f 594c46695c567c6d6686756d86746a8872678c746890796b796d5d90847693867da2958d 584a493325257a6b705f50572c30336b6a68594c43816c5ba1856fae8e758c6e568d6f57 77655b7d6c6472655d4c433e1f1c17050503020605070d0d5d4042614446664a49573b38 816561b69b92ab9087ae9388a2826ba3836ca4846da5856ea6866fa88871a98972a98972 a58066aa856bad896faf8d72af9175b59a7dc1a88acbb294d8cdafcfbba0b2937e936959 dbddacd6d88dd7d6a8dbdbe3dbdeedd4dad8d4dbe1d9e4ffced3d9d6dbe1dce4e6dae6e2 d1e1d7ceded1d5e3d2dee9d8d4ddc0dbded3dfdfe9dbdcf0d5d8e7d1d7e3d5dbe7dde0f1 d2e7ead5e3f0dadfdbdfdcbbdcd3b2d5c8c2cbc1c0c6c1abcbccc7dadbd6dadbd6d0d1cc d7d8d3ddded9b9bab5858681a999809c907aaca593d5d2c9e3e7e6d6dfe6d4e2ede4f6ff dbebeabcc5c074746a4b433663594d7067604c4b492c2d3123221d50514b70716c898e8a 72787891999b9fa9ab8c9698747e80878e96898c918f9186adab92c7c2a5c8bcb0b3a2a8 928785594e4c0e0301160b097b706ec3b8b6bdb2b0a69b99b2a18fb6a593b8a795b6a593 b2a18fb5a492bead9bc7b6a4d7c6bcd5c4bad3c2b8d4c3b9d8c7bddecdc3e2d1c7e5d4ca f3f2f0d9d8d6dbdad8d4d3d1cbcac8e6e5e3dedddba4a3a18b7b6e6b5b4e6d5d5058483b 766659c7b7aad6c6b9d1c1b4c9baa7bea89db29695b7a2a1b6aca0989175988660c4a47d f1b94cffce68facd70eab557efab46ffb34ef7ab49db9438ed9042f2b682957d658f8375 f1dcc7c0a58a7d6b55d7d0c6b0b5af1f0f0fb9998aac926d897f64e9e0e1dec8bdba986b efcc6eeac665e9ca6fedd58df0d399edc683f0c46df7cd67f1d488efcfa8ebceb0e7d195 e9d282efcd85f5c97ef8c96fe8c687e4c78de2ca9ae4d2ace9dabbebdbc2e7d7c0e2d0ba c6d3c9e9eae5f8edebe3d9cfd8d2b8e4d5aae9c18ddea067e4a379e1af7ada9c63d47441 d46e3ed68556d2875fcd6f559e533e9e5640a45d47a6644c9f61489e6246ac7557c18a6c a97452b57955ab663fa4562fb05e39ac583ca35541b16557b1573db1573db3593eb45b3d b65d3db85f3db9613dba623cb86a3ac67847cf824ed0834dd58850dc9056da8e52cf8345 af6540b4663fba693ebc693db86940af6540a462429f5f43ab6242a95f42a35c3e9e583e 98563c95553c93553e93553e834c4589574e97685ea1796da180718f7463725d4a5b4935 9d8875988370937e6b947f6c988370988370927d6a8b76637a655485705f8370618e7c6e 3a2c213c2f2690867d30261d4a433d3229222c231c30231a36281d645447917f71948274 896c5c7b5f51816b60998881382a27170d0c0600000d04054e3f3c5a46455c4241614341 6f524ebea69ca694889c8f7fb59485b29182ae8d7cac8b78ad8d78b09077b4947bb7987c b28570b78a75ba907ab9907ab69079b8967dc1a188cbab92c4bda0cdbca2bd9e89996b5b dddcc0d8d6afd7d7bddddddbdce0e1d6dbd5d6dcdcdce5eed3d7e0d9e2e9dbe8eedbe1e1 d7d0c6d8c4abdec398e4c690e0d8b4dbdac6d8ddd6d9ddcfdcdbbfdfdabcdbdbcfd6dde3 d0e3ead4e0eed9dfd5dbdfacd9db9ad1d2a8cacab2c3c8aad4d5d0e8e9e4ddded9b7b8b3 acada8bdbeb9b8b9b49c9d9892897a7f776a817d74aaa9a5d7d8dce5e9f2dae2efcfd8e7 d9ebebd6e4e4929b985459557a7c79bec0bfb9bdc08f94983d3c3753544e646560686d69 585e5e848c8e747e80555f61778681899498898f9b696c736768608c8978a49c8fafa69f c2bea5bcb89fa09c839c987fbdb9a0c8c4abada99097937abea994c4af9acab5a0cab5a0 c6b19cc5b09bc9b49fceb9a4c0aa95c0aa95c0aa95bea893bba590bba590bda792c0aa95 a49f9bb5b0aca19c98b4afabddd8d4c3bebab2ada9d7d2ced7c7babaaa9dc8b8abd5c5b8 d1c1b4d5c5b8d6c6b9decec1c6b7a4c1aba0b59b9abda9a8bdb3a79992769c8961d4b386 f1b94af8c760f5c767ecb654f0ad45f8ad47eea644d8953cd78d4ee0b187b4a28eb9ad9f d9c3aca98f74988775d6d3cc8287812f201dab8c7aa38b69a39980e4dad8d0b9a9c6a06f f3cb6cedc766eccb6ef0d387f3d191efc782f2c570f9ce68f2d383efcf9eebcea6e9d28f ebd27eefce81f4cb7cf8cb6ef8d291f0d095e6d09fe4d6b3e5dec4e7e2cfe5dfd1e3dbce eaecdee8e2d4e0cfbdd9c1a7dabd95dfb682dca165d28b4be38c61dd9b6bd89868de8f66 eb9c71edaf7cda9b65c57341a05631a15933aa613eab65439c55358f4a2d9f5c3fbd7a5f cf8e64d38e64c7794bbe673ac46b41bd6341b0593eb7624db0563cb1573db2583db55c3e b85f3dba623ebc643ebd653fba6c3cc87a49d18450d1844ed68951de9258db8f53d08446 af6540b56740bb6a3fbd6a3eb86940b06641a563439f5f43ad6444ab6144a55e40a05a40 9a583e98583f9658419658417d48407b49407e4f45825a4e896859907363947a69937e6b 957e6c957e6c97806e9e8775a48d7ba38c7a9b8472927b69826d5c826d5c8976674e3c2e 45372c695c534d433a51473e5854511c1713251e18473e373e342b4c3f366f61566e5e51 997e6d886e5f947e718370691f100b1309071f16170400004738355541405a403f5e403e 6e514db9a197ac9a8ea69989b49384b19081ad8c7bab8a77ab8b76ae8e75b29279b5967a b68974b98f79bc937dbb957eba967ebd9b82c5a58ccdad94cdc4a7d5c4aac7a893a57a69 dfdbd0d8d6d9d8d7d3e0dfcadfe0d0dadcd9d8ddd9e1e2dad9dde6d9e6ecd9e5e5d8cfc8 dbb59ee5a977eba95beaaf4bebd693e1d5a1d8d7abdbdaa4e6db99eadba0e0d9bdd6d6d8 d4ded5d7dbdcdbdccedcdfaad9df95d3d8a2cccfbac8cac9b8b9b4cbccc7eff0ebf5f6f1 bbbcb77c7d78848580b8b9b4d9d8d3e8e7e2f0efebe7e7e7e4e3e8e9eaefececf4e7e7f1 cddbdbe2f2f2def0f4d2e3eadae9f0dae3eac1c6caacadb1a5a49f8b8c868e8f8a868b87 7278788890924f595b3c46485469606e80848590a241485b0d111a1e1f1929271a3f3d2e 4d503374775a8f92759a9d80a6a98ca6a98c9b9e81989b7ed1bca7d5c0abd8c3aed5c0ab ccb7a2c4af9abea994bda893c0a789c6ad8fcbb294c8af91c0a789bba284bda486c2a98b a89e95bfb5ac988e85afa59cb5aba24339301c12095f554cb1a194cebeb1e0d0c3decec1 c3b3a6bbab9ec4b4a7ac9c8f83736399857ab09993d1bfbbd8d0c5a69c819a8359d5b07b f6bd4af2bf52efbf59f1ba55f4b148eea73fe09f41d39945c69a73a2876cb2a696e7daca c1ab93927a60b6aa9cc9cdcc4a4c474e3f389b7c6a998062c8bda9dad0c6c1a689dcaf74 f9c869efc767efcb6bf5cf7af6ce88f1c984f3c977fbcd6df2d27ff0ce8fedce95ecd086 ecd17af1ce7cf2cc79f5cc70fdd58ff4d295e8d2a3e2d7b7e4dfcbe7e5d6e9e5d9e9e3d7 efddc7ddcaacd3ba92dfb585ebae78e8a066e3995ae29d5ae39565e2a576e1ae81e7ad87 efb48af0b983e1a060d27d3cbb723dad6431b06433bd6f41b7683fa85630b15d39ca7654 d68a59d68655ca7443c66b3cd17448cf714bc46746c3684bae5439b0563bb35a3cb65d3d ba623ebe6640c06840c26a42bd6f3fca7c4bd38652d48751d88b53df9359dc9054d18547 b16742b66841bc6b40be6b3fb96a41b16742a66444a06044af6646ad6346a76042a35d43 9d5b419b5b429a5c459a5c458a564b8351467b4d40754b3d775242836452967967a48976 987e6d907665876d5c856b5a8c7261977d6ca08675a48a799e89789782718f7c6d402e20 5f51468c7f76453b32695f56343434343331211d1a3f3a36807973766d665f554c85786f 6e5445644a3b9d8678927e73493830271c1835302d0501023f302d513d3c583e3d5a3c3a 6c4f4bad958bb19f93b2a595b39283b08f80ac8b7aa98875a98974ac8c73b09077b39478 b88e78ba917bbf9680be9a82be9c83c1a188c7a98fceb096cdc2a6d4c4aac9af98ac8774 e2dbc9d9d7efd9dadee3e2b6e3dfc4dbdce1dadee1e5ded4dce1e7dae0ded3d0bfd1b091 de9c6aefa157f2b44debc246edd774e9d177e6cd7de6d085e7d895e7dba7e4d5b8e0ccc1 e0daa8e1d8bbe3d6c3e3d9b5e0daa6dbd6aed7cdced4c6e9a8a9a4969792b8b9b4f7f8f3 ecede89fa09b797a758d8e896b756c80877f838881797c758a8984aeaba4b7b2aca7a09a c2c5bcc7d0cbd3e1e2e1f2f9e5f4fbd4dcdfc5c1c0c0b7b2a1a09b62635d696a65797e7a 747a7a777f81273133313b3d4a6264738790abbbca6f7b892932392529281417101d1f14 00010010120d2628232a2c272d2f2a3638333d3f3a41433e574e3f5e5546696051756c5d 807768887f708f8677938a7ba7967cb4a389c1b096c2b197bcab91b9a88ebfae94c8b79d baaca1b6a89da5978cc5b7ac9a8c8135271c36281d504237a19184cebeb1d0c0b3c4b4a7 a9998caf9f92d1c1b4b0a093c1b4a3cbb9abcbb9afd0c5bfc7c3b89d937aaa8f60f9cd8e fec34ff0ba4dedb953f7be57f8b54de8a441d8a049d4a659c6b5a35e5448958f83f7eada b09a82907c64cdc6bcabb5b7292a2579685e947863967d67e6dacacdc2aebb9c6ef4bf79 fcc568f1c86cf2cb6cfacb6ffbcc7ef3cc8bf3cd84fccf74f2d079f2ce80efce81efcf7c efcf78f1cf78f1cd77f2cd74f5cd88eecc8fe5cfa0e3d6b3e6dcc1ebdcc5ecd6beeacfb4 e5ba98e2bd91e6c18aefbb81f1a871ed9d6aeaab78eebf8be4c08ce9c996ebc999e5b688 dda071dc955fe59155ef8d50d88c58b36733a65923c3723dd9844ed27844cb6f3cd37541 c97b4dc17043b56236b56135c06b44c26d48ba6443b25d3eae5537b05739b45b3bb8603c bd653fc16941c56e43c66f44bf7141cd7f4ed68955d58852d98c54e0945adc9054d18547 b26843b76942bd6c41c06d41bb6c43b36944a76545a26246b16848af6548a96244a55f45 a05e449f5f469e60499e60498a594b8b5b4d895b4c8156467a53427c57458463508f6e5b 8c6c5d8363547656477050417454458161528f6f6099796a9e89789f8a797f6c5d6c5a4c 6f61567e71687e746b5c524927282a4c4c4c28272525211e6d6864655e582e251e3d332a 4a33234932209a806fab9484907e703a2d252c27240f0f113b2c29523e3d5c42415a3c3a 694c489a8278b19f93b7aa9ab49384b19081ad8c7ba98875a98974ab8b72af8f76b29377 b78e78b7917aba967ebd9b82bf9f86c2a48ac5aa8fc9ae93c7b99eccbca2c4ae96ad917b e5d8abd9d6e7dadbd3e5dfa3e5dbc2daddecd9e0e6e5dadedbe0dcdad3c1cfb288cc9052 dc8f41f2b052f4d36ae7e473e7d573f1c96af8c06bf1c682e4d4a1dddbb2e3d5a8efcb97 ead788ebd49eecd2b7ecd4bcead7afe8d6a6e5d0b5e6c9cd86878287888390918ca6a7a2 bdbeb9bebfbaa2a39e83847f414f3e5865545a63524448374546346663507a7361746c59 6c65527c796a888b84939997b5b7b6dbd6d2d3c5baaa9788a6a5a0676862666762757a76 7f858581898b1c26281620227e93a8899caaa4b6ba6978751f2a24151918090b0a080808 11121720212628292e1d1e230e0f1411121718191e17181d13140f13140f1516111b1c17 23241f2c2d2833342f363732726f5e817e6d93907f9a97869996859c9988a7a493b2af9e bfb1a6bbada2c3b5aae1d3c8ccbeb3c1b3a8efe1d6f0e2d7d5c5b8d1c1b4c2b2a5d5c5b8 c7b7aabbab9ee4d4c7e3d3c6cec1b1d2c2b2d1c3b6c8c1b9aba8a18a8067a88953edbd71 ffc657f2b94eecb550f7bc58f7b652e6a64cd7a75dd7b578bfbcb55a5750a8a197ebdecd 97826da99886dbd8d1818b8d2928249686769c826ba0897bf0e4d8c3b696c4a062ffc572 fcc36af0ca75f1cc71fcc865fcc875efcf94efd296fad181f1cd79f2cd72f2ce70f2cd75 f2cc79f2ce7af1ce7aefce7defcc8cebce96e5d3a5e6d6b2e9d5b4e8c8a1e3b486dea570 efac7ff0b782eab87de1a970e19f6fe5a782e2b999dac7a7e2d4a7e6ca9be6bd89dfab71 d8965ad78b51e3925befa26cdc9368b96f42ac602fc77742dd8b51d88444ce7734ce7732 cd7f58ba6f48ab613ea55c3ba55b3ea65c3f9f543793482bb15838b35a38b65e3aba623c bf673fc46d42c77043c97245c17343ce804fd68955d68953d98c54df9359db8f53d08446 b46a45b96b44bf6e43c16e42bd6e45b46a45a96747a46448b16848af6548aa6345a76147 a26046a16148a0624ba1634c7a4b3b825343895b4b8b5d4d875c4b855c4a87614e8c6653 7450427753457b57497d594b7b5749795547775345765244705b4a7f6a596b5849847264 73655a71645b9b9188584e452d2c2a1918162e2a274c47435c554f756b62756b624e4138 6451427863509f86709c816ca5907d3f3327120e0b11151637282554403f6349485c3e3c 684b47846c62ad9b8fb6a999b99889b69586b1907fad8c79ac8c77ae8e75b19178b49579 b38f77b49078b6947bba9a81bfa187c2a78cc3a98ec2aa8ec7b79dccbca2c7b69cbaa48c e9d286dad5cfd9dcbde6d995e5d5c8d5e1efd4e7e5e1d9e8dadfc9dcc8a5d29e64cc8032 db9033f3be60f0db82dddc8ae9ca7af6c374febe76f6c489e5d19edcd9a2e5d991f2d380 eed68eecd69aedd5afebd6bbecd7a8edda8dedd784efd193c8c9c4c9cac5b9bab59a9b96 81827d73746f6667625859546f7c6a828d7c848d7c6a6e5d555644615e4d807b6897907e 766f5c665f4f5a5446514a404e453c6b5f53928074a18b7d7d7c776c6d676e6f6a7c817d 919797a6aeb0555f61434d4facbbd88797a4687770404c3e1820152f302b666261767271 47473f5959516a6a6259595133332b25251d23231b191911282a27212320191b18161815 181a171a1c191b1d1a1a1c190c0e0916181322241f282a25292b262d2f2a3638333f413c 594f466c625972685f71675e7f756c8b81788e847b988e85cabaadc5b5a8c0b0a3c8b8ab c8b8abd4c4b7e9d9ccd9c9bcc7b9acac9f8c9489776d6a6146454061553fbf9c62ffd77f ffc25af1b652e9ae50f0af53f0ad54e2a759d8b075d6bf95a19e977f7b72e2dacdd0c3b3 7c6c5cc6bdaed2d1cc5a6060383431978873aa9079b7a09ae6dbd5bdae87d8b162ffbe5d fac170edcd82edcf79fbc561fbc774ead4a2e9d8aaf5d290f2c97af3cb69f5cc63f7c973 f7c97ef3cc7defce7fecce88f0d69decd8a6e7dbb3e6dbbbe6d3b2e1bc92d89f68d0894b ec9963de935cc88751bf8555d19c74e5bc9ce4caafd3c7ade4c19bdba47dd79563dea063 e9a969eaa469e3a46edaaf78e39e77da956cd79366dd9764dc955dd48d4fd18a48d7914c d17f59ba6c48ac6343a360459b593fa15e44a660469c5539b55c3ab75f3bba623cbd653d c16a3fc56e41c77142c97342c17343ce804fd68955d48751d78a52dd9157d98d51cd8143 b56b46bb6d46c06f44c37044be6f46b66c47ab6949a56549b06747ae6447aa6345a66046 a36147a26249a2644da3654e845541865743885b46885b46885b46895c478c5f4a8f624d 80564a83594d865c50845a4e7f55497a5044794f43794f43533e2d634e3d7360517e6c5e 8173688f827991877e7b7168635c563a332d4940396f655c6d5f546454476553455b493b 736353ab9681a98f768d7257a58d755b4e3e0c0b06070c0f2e1f1c523e3d694f4e604240 694c48735b51aa988cb7aa9ac09f90bd9c8db89786b3927fb2927db3937ab6967db8997d b5937ab5937ab6967dbc9e84c2a78cc5ad91c4ac90c0ab8ecfbea4d3c3a9d3c5aaccbea3 ebc86adbd1b6dad9a9e7d291e4ced0d2e5ebcfefd7dedae8d9e0b6dfc495da9a5dd18338 dd9b45f2bf6eecbc7ad2a06bf6c25cf4c56df4c87ff0cb84eccf81e9d284e8d792e6d9a2 e4d6b3e4d9abe3dcb0e4dbbee6dab0e9db90ecd882eed289e1e2ddc5c6c1afb0abafb0ab adaea9969792777873656661787d774b504a25272220211c2b2c27302f2d2f2b2a2d2928 53594f4143385b574b7b736660564a3b31273d342d50474249484362635d72736e848985 666c6c474f51293335515b5db3bfcf778284313b32292f251919173731319a918aada694 7f7a668e8975a7a28ea7a28e948f7b9a9581a6a18d9b9682837b707870656961565c5449 51493e453d32372f242d251a201f1d201f1d1f1e1c1b1a18161513141311151412171614 1f1a16201b1726211d110c0825201c27221e07020036312dbeaea1c6b6a9d3c3b6b9a99c b4a497d6c6b9ccbcafa494878c7e737c6f5c88826c73736745464170644ed0aa6cf4ba59 fab858edad53e3a24ee09c49e19c4edda363d6b389d2c2a9786c5e837666f4e7d6b3a696 7a7064d8d1c7a9a8a3464742423e3b7f7157b49c82cfbab9d6cbc7bbaa7cefc468f5ad41 f6c279e7d08de8d181f8c45ff6c674e5d8aee2ddbdf0d59ef1c57ef3c965f6ca5df9c673 f7c684f4ca82eecd86ebce96ecd8a3e6dbaee0dfc1dfe2cde1dcc6e1caabdcaf85d89c68 d87a44c46a36ba673bc8875ddcb187e7c79ee3c79fe0c099ebb787dd966ad58d5ddfa870 eeb77ef0aa77e4a16dd8a971e8ab7cf2b586f5b889ebb181e0a879dfa778e0aa7be1ab7c d77a4ebf673fb46441ab61449d583ba86143ba704db56945ba623ebb633dbd653dc0693e c26b3ec56f40c77140c8723fc07242cc7e4dd48753d38650d58850da8e54d68a4eca7e40 b76d48bc6e47c27146c47145bf7047b76d48ac6a4aa6664aae6545ad6346a86143a55f45 a26046a26249a2644da3654e9467528f624d8a5d488859458859458a5a46895945885844 8d61568c6055885c517f5348784c41794d4281554a8a5e53634e3d5f4a39786556746254 7e70659b8e858a8077867c738c7e738b7d728474679280729f8c7d8069576f56428d7460 7a6d5db09e88a2876ca38467ac927778685811100b020c0d20110e4c38376a504f634543 6d504c6a5248ad9b8fbcaf9fc8a798c4a394be9d8cba9986b89883b99980bc9c83be9f83 bb9b82ba9a81bb9d83c2a78ccbb196ceb69acab598c4b193d0bca3d3c3a9d6cbafd2cbae edc25cdccea9d9d99de6ce92e2cbd5cfeae5ccf5c9dbdee5d9e0aae4c692e19e67d88d4c e0a45bf3ba75e99964cb633effc02cf3cc57e5d67be8d47bf5ce67f7cb72e9d2a6dbdbdb dbd8d3dadebdd9e1b8dadec7dddbc6e1d8ade7d2a3ebccafcccdc8e4e5e0e6e7e2cacbc6 babbb6b8b9b4a4a5a082837e7d7e8347464c201f2726242f312f3c2b29372e2c3a403c4d 1d2f31141e1d303129605a4e7d756a87827c6c6c6e373c42403f3a6e6f69999a95c3c8c4 6e74740d1517273133abb5b7717b7c3e48400207002929292f273244393fb3a797bdb38f c2b9a8b3aa99aea594a19887928978a19887aca3929a9180b3a18db09e8aaf9d89b3a18d b8a692b8a692af9d89a79581756d6a6d65625f5754524a47463e3b3c3431362e2b322a27 4c4b491413111b1a180706040f0e0c21201e1e1d1b777674ddcdc0bdada0cebeb1bcac9f a393869b8b7e78685b706053382a1f342915625e4565675a3f413e817461e9c183f6bb57 f3ad55e9a753db9a4ad38e41d58f47d8a067d5b493cec1b15f4a35644f3ad7c6b29e9282 8b847adad7d0807d76423f3845403d67573db9a187e0cacccabfbdbbab78fdd170ea9e2e f3c17ce4d196e5d286f5c360f4c575e0dab6dde1c8ecd8a6f0c382f4c963f9c95bfac575 f8c488f4c887eecc8ce9cd9de3d3a0dbd6aed6dec6d6e6d9dde9dfe6e1ceeacfb2e9c29b cd6e36c36132d3754ff0ab84f3c89bdabc86d2ab72e1aa71f4c37ee9a66fe1a171dfb585 e0b185e2936ce78a5fec9c67dfa96deab37ae8b37fddaa7bdeae86e7ba99e5b99cd6ac93 ed8552d16f40c36a42b2623f9a4e2ca3552fb8673cb56133bd653fbe6640bf673fc16a3f c36c3fc56f3ec6703dc7713ebf7141cb7d4cd38652d1844ed3864ed98d53d4884cc97d3f b76d48bc6e47c27146c57246c07148b86e49ac6a4aa7674bad6444ac6245a76042a55f45 a15f45a16148a1634ca3654e8e614a895c458759428a5c4591614b95624d905d488b5742 7f504685564c8b5c5288594f8152487d4e447f504683544a7964535e49386451426d5b4d 6254497c6f668a8077645a516b594b715e4f8c7766937c6a856c588c715c997d6790725a 9e9282ac9a8492775ab99a7baa8f727465520c0d070b1516140502453130694f4e654745 715450674f45b19f93c2b5a5ccab9cc9a899c3a291be9d8abc9c87bd9d84bf9f86c1a286 c2a289c0a087c2a48ac8ae93d2ba9ed4bfa2d0bd9fcab799c9b39bcbbba1cfc6a9d0cbad ebcee4f5d693e9c469edd49ee4e9c1d5e8d5e3e9f5e0d4e0febd6bffc668e7b24ae7b252 f8bf72ce874fa65424c06838ddb748edb85ae5a14ce49c3af9bd4ff6d178e8d6b2ece6f4 daddcadce1c3dfe1cbe1dfe0e1dfe2e0e2d5dde1d2dadfdbe4ecefcad5d7c9d9d8d8ebe9 daedebe0ebede8ebf4e2e0eb96816e856953c5a28ca98d7fbaaca3c9c3b55f583e71633e d3b958b9a58d312d3c3640379199849b999ca3a2a09ba37e9f867fbdab97a89a80dcd3c4 b0aaac464451999ca3646d68020300262b272e34343c42400f100b4b4539e7d7c7c6b19c bea788e1caabd7c0a1d3bc9de0c9aacbb495b8a182bea788ab9f91988c7ca19484b5a693 bdab97cbb6a1cdb7a0b59f88c9b9a98f7f6f7d6d5dbfaf9fc4b4a4ae9e8ec1b1a1a69686 c0b4a6b5a99b897d6f6f63555e52443024164e4234c1b5a7d0baaccbb6a37767506f6349 79715c645e52746e6e5f5a610a241b46575f5052672b1f29523d22c2a363fad27de2b45c ffc867db9a36eaa13beaa041d39548cea975695d45565b54e2dfd885827bd3d0c9bcb9b2 b0ada6bebbb444413a49463f150d00928b6eded1c0f0dacfb5956cca9f41e0a638ffd577 f4be68f5bf69f6c06af7c16bf9c36dfbc56ffcc670fcc670f2c270f9c16cffc068ffc16a ffc575f6ca8be7cf9fe0d2adefd7abe4d6bbe5e3d7e3dcd4dcc9b8f0d2b6f2d5b5cfba9b d56734c15a31d36b4eeb7d5ce06c3dd5672ad27935c7813cd77c47d87740d9743ed77747 d57b56d77b56dc7145e06833c3574abe5538ce673cd3683ac15227c65529d76a31d16922 b0501eba6b40d1916dd48a65c6653bcf6332d3723bbc6b33bf5d3abf5d3ac05e3bc2603d c3613ec4623fc56340c66441c97549ce7a4cd78354df8c58e29058e18f53dc8b4cd98847 c97048cb7248cc7647ca7745c67645bf7045b86a46b46547b77249b57047b06c45aa6943 a76743a36745a26745a3684894624b93614a915f48905e47915f4893614a95634c97654e 8f5e4d8b5c4c8557488054477d53477c544a7d574e7d58507a57516b4a436a4d456d5249 634d4264524664564957493c3f473a88847bb0a2999e8c80918172a19184ab928ea68389 ac9e93ac9e93b0a297ab9d928b7d725a4c41382a1f2f2116232019392b286f51517b5153 8a5e5f603b358b7369cabdadbda694bba492b9a08cb89d88b79b85ba9a81bb9b82bd9b80 c4a588c4a588c4a588c6a78ac9aa8dceaf92d3b497d5b699c0b192c2b394c5b697c8b99a dfc384e9c662ddad63e3baa6ddd3cad4d9d3e8e7d2e4dd99dd9948e1a245daa43ae9b550 fcc372e3a262d0854ce89960ecb565dd9f54d48d3de19b3bf5b84ef1c668eacf98f4e4cd e1d7b3e2dbade3ddbbe4ddd7e2dedfdfe1d6dbe1d7dadfe3e5e9eac9d2d1c8d7d4d6e9e7 daeaeadee9ede6e9f0e0dee9a18c79a98d77e2c2abcbafa1ecded5e5dfd16f654c958561 dfc451c9b68b3e3c3d2d372c80867894909ea29eada3a69bb19a92907d6ca2937ed5ccbd cdc7c9717078585b601f25211b1c172227234d53531f2523262722999387c5b5a5c1ac97 d4c5b0ab9c879e8f7a9e8f7a9c8d78a59681afa08bb9aa95a29282a0917eb2a18fb9a791 ab957eaa927aaf957ca58b72b8a8987d6d5d837363c6b6a6c2b2a2a59585b6a696bcac9c baaea0b9ad9fbbafa1beb2a4a69a8c7f73658a7e70bfb3a59f8d819d8d7e6d604d706853 78715f625e53635f5e4644491d2d23212c28302f2d5a4c41a48c68e8c687f8ce7ce5b65c eebd60efb758eead4fe19f49d29b58b5946b615849979c9ff4f1ea827f78b2afa894918a 928f88bbb8b159564f4a47405d5643d1cab0bcaf9cbfa89acfae81d6ab4feab044f1ab4c f4be68f4be68f6c06af7c16bf9c36dfac46efbc56ffcc670f4c26df8c26afbc269ffc36b fbc676f5ca85eccd94e6d09eefd49de6d4ace8e0c9ebdfcfe5ccb6e2c1a2e5c4a3e1c4a4 de7d39d8814ce59675eb9f85e09777e0a07adcb187c8ac84e5b68ce6b283e8b081e6b28a e4b796e4b694eaad81eda46fe7b293e5b18aedba8deeba93e4ac95e3aa99e6b098e2ad8d c15936c87d5ddeb08fe1b38fcc875dc76b3cc56334b25529c4623dc4623dc5633ec5633e c6643fc6643fc76540c76540bb633bbf683dc56e41cc7647d27c49d6814ad78249d78347 c97048cb7246cd7746cb7844c77744bf7045b96b47b56647ba724ab57047b16d46ab6a44 a76743a56645a46846a3684894624b93614a915f48905e47905e4792604994624b96644d 8b5a498859498557488054477e54487b53497a544b7a554d77564f6b4a436e504873584f 6b554a6d594e6e5e516050430c0d054c3f39886b65987a70988577a69c90bbb0aac9bbba d1c4bbc9bcb3b6a9a08f8279574a412e21182d201741342b2a251f3e2f2c6a4c4c744c4d 8458595e3b358a7268cabaabbda694bba492baa18db89d88b79b85ba9a81bb9b82be9c81 c3a487c2a386c3a487c4a588c7a88bcbac8fd0b194d2b396cab99bcdbc9ed0bfa1d4c3a5 caa360e4b559e8b461f7d08df4e5aae1ddc4e0d7c6cec07fca8335c7862ad59a32ecb550 f3bb64e7ab63e6a760f4b46ce79c72c87d46ca8035eba647f4b953ebb85cebc272f2cf8b eacf8cead38de8d7a3e5d8c8e1dbdbdee0dbdbe1dfd9dfefe4e5dfc9cec8c7d2ced5e5e4 d7e7e7dbe6eae1e6ecdcdce491806cb0967fe4c6aed1b7a8faece3e1d9cc675b43a3916d f0cc5ed9c1954b45451d271e727a6d9c99a2a29faaa6a79fb19f958a786a8f8271c3b9af dbd6d36a686d1d1e20030502282924424743575d5d040a083b3c37beb8acbaaa9adac5b0 999385484234696355807a6c6c66587b75677a7466756f61a18c77a8947cbca68fc1a98f b2977cae9074b49578b19275ac9c8c746454968676cfbfafc2b2a2a39383ab9b8bd2c2b2 d6cabcbdb1a3b3a799a69a8c8a7e708a7e70a5998bb3a79992887f80766c5f58485a5444 57544557554962615c55545228281c3d3b24756d49beab80ebcf9df2cc8eeabe73e9bb65 cda95dd3a85ad5a155d8a35dc398639d84665b5851929da3514e4735322bb0ada6c3c0b9 a4a19a95928b423f38514e47cec8bacbc5af968772887058cdac79e5b761f7be57eea94a f3bd67f4be68f5bf69f6c06af8c26cfac46efbc56ffbc56ff5c467f5c468f6c569f8c66f f6c775f4ca7ef2cc85f0cd89eed08aecd69deadbb4f1dec0edcfb3d7b391d6b08befc9a4 dd9c58e3ae7aebc2a4e3c2b3dbbeb0e6cfbde4d8c2ccc8afdfcebae2cab2e1c7ace0ccb3 ddd1bbe0d0b7e4c8a3e8bf91d99c70e2a674e5ac77ecb98cf9ccadf9cfb7f2c9adf4caa4 d4664fd68e76e4c5a8e2cea9cca073c07244c36037c05833ca693fc9683ec9683ec8673d c8673dc7663cc7663cc7663cb45a35b55c34b85f35bd6438c46c3ecd7545d57d4bd9814f ca7147cc7446ce7845cc7943c87845c17345ba6c46b66746bb714ab87048b26d46ad6944 a96744a76646a66746a5684995634c93614a915f48905e478f5d46915f4893614a94624b 8655448556468456478256497f55497a5248775148744f4776554e6a4c4471534b795c54 725a50755f547660556652471717153d28256e413c80514973574b736b60919489b1b6af a399907f756c574d444339303d332a392f26362c23352b22322b2541322f6246456f4849 7a5252613e38876c63c2b0a2bea795bca593baa18db89d88b89c86bb9b82bc9c83be9c81 c1a285c1a285c0a184c1a285c4a588c7a88bcbac8fcdae91ccb99bcebb9dd2bfa1d7c4a6 ecc09bfbcb8debbc6ee7ca62e1da70dbd7b4eadde7dfcfb8d78f47cd8a33e1a33ef2b854 e1aa50d69f4ddca855dca852ce7759bd6c37d68e3cf9b759f3b359e8ac54edb258e8b14d f4c668f1cd6fedd28de6d4bce0dadadce0dfd9e2e9d8dff9e5e1d5c9cbc0c8cecad5e1e1 d5e3e6d8e3e9dee3e9d9dadf90846eac967ee0c4acd1b7a8ebddd4d1c7bb71614ab19a78 ffdb8cf5d3b86359621b2423737b6eaeafa7aaaaa0b5b79fa5948a9b8d8282766ab6ada4 c5bdba3834331e1d1b292927353631808581484e4e171d1b696a65aaa498bfaf9fddc8b3 6a62553e36299d9588c2baadaea699bfb7aaaaa295a59d90b79f87b69e84bba186c0a588 c2a386c4a384c29f7fbc9979a797877a6a5aa79787c7b7a7b8a898a595859d8d7dc5b5a5 c3b7a9c4b8aad2c6b8d9cdbfd1c5b7c9bdafb2a6988e82747c756f726e656e6a5f726e62 6a685969675a6d6b5e605e51746550a7986ddecf8ef5de98efcf92e8c188e8bc7be9be70 dec69abc9d6eb89460d0aa79ad90669a8c715f62573d4b4b16130c2f2c25a3a099bebbb4 94918a53504929261f89867fe5e2d96962528f8167907653ceaa70f4c573f3bb5cffc160 f2bc66f3bd67f4be68f6c06af7c16bf9c36dfac46efbc56ff5c563f3c565f3c66bf1c771 f1c876f3c879f4c97af7ca79eccd7df6d995edd3a0e8cea9f0cfaedeb796d4a883edbd95 d6b17adcbf93e0ccb3dbcdc2dacbc4e1cfc3e1ccb7d5c0a5d8bebfd9bab5dab8aed8bcb1 d6c2b9d8c2b4ddb99fe1b18bb84830c75a3bc6633cd2805af2b390f2c29ae4b581e9b778 dc725ada947adcbda0d7bf99c99667c3713fd16d3ce1774dcc6b3ecb6a3dca693cc9683b c76639c66538c56437c46336c0643dbd613abb5f36bc6037c2663dcc7145d77c50dd8256 cc7347ce7646cf7a44ce7c44c97a42c27444bb6d46b86a46bd724bba7049b56d47af6945 ac6845a96747a86747a9684a95634c93614a915f488f5d468e5c458f5d46915f48926049 82514083544484564782564980564a7a5248754f46714c4475574f6b4d4572544c785b53 72554d74574f72574e62473e0f1b1b3f2c28854e47985c526e483f42382f383d363b423a 362d26352c25352c25352c253229222d241d2c231c2d241d3328244533315d41406d4b4a 78525167463f7f645bb39d90bea795bca593bba28eb99e89b99d87bb9b82bc9c83bf9d82 c1a285c0a184bfa083bfa083c0a184c3a487c6a78ac8a98cc0ab8ec3ae91c7b295ccb79a e5c183f8d095eac098e5c999dfd799dbd7cbe8e0edd6d3b2e9a566dc9a4ceeb154fac05f dfa84ed6a24eddae56d4a849c56f40c97a37e8a149f5b257e19e4fe29d50f2ab4feea73b fcc254f8c95defcf82e5d4b8dedad9dbe0e3d8e2ebd7e1fce8e0cbccc9b8c9ccc5d5dddf d5e0e6d8e1e8dbe2e8d5d9dc847c658f7e64c4aa91b49a8bb5a49ca99d916d5b45a08665 ffe098ffe7c98a7d841d25285c665d999d8f9e9c8fcdccb8afa39771675e8e837dc0b7b2 9c958d3e39334d48444a46456c6d68969b97292f2f313735a5a6a1b7b1a5c2b2a2c8b39e 6757486a5a4bc0b0a1c1b1a2ae9e8fc2b2a3b2a293c9b9aac9b199c8b096c1a78cbda285 c1a285c09f80bd9a7abd9a7ab0a090978777b9a999bfaf9fb4a494b2a2929f8f7fb0a090 b7ab9daea294988c7e978b7db5a99bc1b5a7ada1939c9082504c435a564d565247635d51 645e4e625d49767056746f52e7caa8f4dba2f2db8de0c777dbbc7ce7c38febc692e2be84 e1d0c6b7a4938e765c9f8667978464afa88c8b907c3e4c3d39362f4b48415a57506b6861 97948d7b78715b5851b5b2ab7a7975393228a39379c0a675efc887f7c67aeab15af6bb57 f2bc66f2bc66f3bd67f5bf69f7c16bf8c26cf9c36dfac46ef6c664f4c666f3c66bf1c771 f0c775f2c778f2c778f4c776eaca7df8d996eacb95ddbd96edcbafecc7aadeb08cdfaa80 e0b17bdcb282dcb898e2c5b3e4ccc2dcc1b8d9b8a7e2bca5e2bebee3bab4e4b8afe2bcb3 e0c2bae1c1b4e7b99febb18bba4136c65140c25743ca7c66e7b7a1e5c6a9d2b387d5ae75 d57b58db9b78dbb08ed5a57dd18554d06d34da7338e9894dcc6c39cc6c39cb6b38c96936 c86835c66633c56532c56532cb6f40c86b3fc3663ac06337c2653ac86b40d07348d5784d ce7648d07846d17c45cf7d43cb7c43c47743bd7044b96b45c0724cbd6f49b86c48b26946 ad6745ac6748aa6848aa674a96644d94624b915f488e5c458d5b448e5c458f5d46905e47 82514083544484564782564980564a7a5248754f46714c44785b536c4e466f514972544c 6b4a436b4a436948415837300014183a2b268d5548a05c4f693d343c2d282d29261d1814 231c162c251f39322c423b3548413b47403a3f383237302a332421463231563c3b70514f 7755536d4f477558509e8378bfa896bda694bca38fba9f8ab99d87bc9c83bd9d84c09e83 c2a386c1a285bfa083be9f82be9f82c0a184c2a386c3a487bca488bfa78bc3ab8fc7af93 b8934fdfb97ae6c1a4f2d5c3f2e4bfe5deccddd9dab9b99fe5a975dca15deeb460fcc068 eeb462efb868f7c56eecc061d89143e19b45e9a64be19b46d1873edc9045f6a94dffb547 ffc452f9cb5ceed185e2d6bcdbdcded7e3e3d6e3ebd6e0f9eddfc2d0c8b3cbcbc3d7dce0 d6dfe8d7e0e9dbe3e6d4dada746e56807056ae967c947d6d8d7c748d7f7466503b896a4b efac55fed2a19b8e851a22253741406567626b6766c6bfc6c6beb3362d28b0a6a5c4bab8 797268787266766f675c5452b0b1ac696e6a000606191f1da3a49fdad4c8c9b9a9d3bea9 776557897769b2a0929c8a7c907e70a08e809b897bbdab9dad9883bca890bfa992bba389 bca186ba9c80bfa083d0b194c4b4a4c0b0a0cebeaeccbcacc8b8a8c8b8a8bcac9cbcac9c b5a99ba296888c8072a09486cbbfb1c5b9ab9c90828a7e704f493d756d625951466b6253 7f725f7b6d50ab9f77baad80fad9a3edce8ee2c67de2c780e3c68eddc096dbbe9cdec1a1 cfc6c9c6bbb969584e5948366d5e49948e788a8d7a5e675655524b6f6c654a474059564f adaaa3939089423f38413e371e1f2177726ead9c7ecdb172fbd189e9b470eeb564ebb44f f1bb65f1bb65f3bd67f4be68f6c06af7c16bf8c26cf9c36df6c568f6c569f6c569f6c46d f3c472efc579ecc67fe9c682e7ca8ef1d09ae6c497dab99ae6c7b2f0d0b9e7ba99d9a279 eea86aeaa469e6a474efba9af1cdbddfc3bfd7bbb7eaccc4e2c8bbe3c4b0e4c1abe2c5b3 e0cabce1cab8e6c1a4eab990c15f30c86134ca6a40d48d6de6bfa2e7ccaedeb790dda574 cf885ee3ab86e5b493db9b75e08453e2753adb7632d7833bd07139d07139cf7038cf7038 ce6f37cd6e36cd6e36cd6e36d1743bcf713bcb6d39c86a37c76838c8693bc96a3ecb6c40 cf7745d17a45d27d44d17f43cd7e43c57842bf7244bb6d46c2724dbf6f4aba6c48b56947 b06746ad6648ad6849ad684b96644d94624b915f488e5c458c5a438c5a438d5b448e5c45 8655448556468456478256497f55497a5248775148744f477a5d556b4e466b4d456c4b44 623f39643f39653e39552e290a202b3c2a28894b3c95523f5d32294133324b4243473938 2924201b16120c07030d0804211c182e2925231e1a0d08043422204834334f37356f5350 73565274564e6e50488d6f65c0a997bea795bda490bba08bba9e88bd9d84be9e85c19f84 c4a588c3a487c0a184be9f82bd9e81be9f82c0a184c1a285c4aa8fc7ad92cab095ccb297 ca9762e7bc6de2bb80e1c095e0cd89e1d49de7dbcdc9bfbdd1a077d19f64e0a85defb262 f6b66cfcc077ffcb78f8c96df5bf5befb554df9d47d68c41da8c44dd8f3deea33effc04d ffc75bf9ce66ecd68de0dac4d7dfe2d4e4e3d5e4e7d6e2f2f0e0bcd3c7adcfcbc0d9dce1 d7dfead8e1eadce4e7d5dbd97d7b62a99b80c8b398a58e7eae9b94ad9c9279604c987758 ab5e0ed2a06da092853a414959646670756e55514eb0a9b1bfbbaf5c5451ccc3c8a09896 5b534877705e81786fa19595b2b3ae424743040a0a121816646560a39d91a29282b7a28d 3c332c332a233930293e352e433a33463d36433a334d443d6e5e4e8475628c7b6993816b a69079a78f77a68c73b59b82b1a191baaa9abaaa9ac8b8a8c6b6a6b8a898c2b2a2c9b9a9 bbafa1877b6d85796bbaaea0d3c7b9b9ad9f8b7f71695d4f645848baae9e9a8d7dc6b5a3 e5d1b6bca981e8d39eecd89bddbb73dcba7ae7c690f2d6a4e8d0a2d1bd9acdbba5dbc9bf d8d5d0d8d1c973695d5e50434f4134574f4458554e2f302b46433c56534c3c39326d6a63 a6a39c716e672f2c251a17102b2e35b0aaaab3a382ccb067e8bd6eeab573f4ba70f1bc56 f0ba64f1bb65f2bc66f4be68f5bf69f7c16bf8c26cf9c36df6c46ff9c36bfbc269fdc169 f6c171eec37ee4c58cddc795e3caa2e1c19ae5c1a1e5c4b1e0c5bce4c8bce8c1a4e1ac84 e6a97af0ad78eba66fe8ad81efc7ade3cbbfd8c4bde5ccc5d9c9b0dac5a8dbc3a7d8c6ae d7cbbbd9c9b9dcc1a6e1b995cf8f52cb8047d48857dea37be1ba99e7c19ceaab7fe38954 d59975f0c3a2e9c2a5d49c7be38c61f38b54e58845d78843d87a3dd87a3dd87a3dd97b3e d97b3eda7c3fda7c3fdb7d40d87b38d97b3bdb7d3fdb7c42d97a42d57542d1713fcf6f3f d07846d27b45d48044d28142ce8042c77a42c07443bc6f45c4724dc2704bbc6c49b76847 b26747b06649ae6749af684c96644d94624b915f488e5c458c5a438b59428c5a438d5b44 8b5a498859498557488054477e54487b53497a544b7a554d7b60576a4d4567494168453f 603b35653c386b3f3c5f322f171c323f1c2095513eb371596d4d3e34302d323031322421 3f3b3849454246423f2f2b281d19161d191625211e2925223c27264c373649312f69504c 6b4f4b7659516a4c4487665dc1aa98bfa896bda490bba08bbb9f89be9e85be9e85c19f84 c7a88bc5a689c2a386bfa083be9f82be9f82bfa083c0a184cbb095ccb196ceb398d0b59a d79a57f7c65df4c678f1c699ecce85e9d481e8d7acc5b5a6c59a77c89a66d09b57dea057 eeae67f6b772f3ba6beebb62ffdb6befbd5cd59345dc8e4cef9f56e5953ce29a2cffc146 ffca63f9d16febd894dddcc8d6e1e5d2e5e3d4e4e3d5e1edf3e0b8d6c8add1cac0dcdde2 d8deead9e1ecdbe5e7d4ddda75755bbeb397d5c0a5a99282c6b3acbfada3775c49977456 893400ba8462b19f9d747a88b0bcbcb9c1aa6d6d55b0a99fa4a094b9b4b1d3ccd3716869 443c2f4139267e746af8ecec848580454a46343a3a3f454331322d3731255747375f4a35 b9b9bb7f7f815e5e607070726e6e7059595b4d4d4f3535373f33254a3e2e493c2c594a37 7d6b577f6a5568523b644e377b6b5b8c7c6c867666a69686a39383857565a49484b9a999 a79b8d584c3e6d6153c1b5a7ccc0b2aea2948a7e705c50426a5d4ad3c4b1988574c0ab96 e3ccadad9567e3c98eeed38eedca76ebc888e5c499dec29ddac49fd9caabdaccc1d8cbd2 c8c9b7b5b29f817a68978b7d60534b564b49635961160e1964615a45423b3b383199968f aaa7a047443d1d1a1319160f5b5f68a29e9fc4b190e5c87cdfb261ffc98ceab16aefbb55 f0ba64f1bb65f2bc66f3bd67f5bf69f6c06af8c26cf8c26cf5c573fac26dffc068ffbf68 fabf6feec283ddc595d5c7a2e0cab2d2b49ae5c1abf1d3c8dac5c2d3bcb4e5bfa8edb991 cdb39ce9c19ee6b081d8a475e3bd99e5cbb4dac1addabea8dbcbb2dcc7aaddc3a8dbc6b3 d9cbc0dbcac2e0c1afe4b89fe8d7c3dbbca8e6bcaee9c9bed8c5b7dabea8e1a37ed37444 e0a88dfad4bde3c9b2c2997dda926afb9f6cf89f63e89c5ede8042df8143e08244e18345 e38547e48648e58749e6884ae2843ce68840ec8d49ef9050ee8e52e88750e1804bdc7b48 d17a45d37d44d48042d38241ce8042c77a42c07443bd7044c5734ec2704bbc6c49b86948 b36747b16649b1674aaf684c97654e94624b915f488d5b448b59428b59428b59428c5a43 8f5e4d8b5c4c8557488054477d53477c544a7d574e7d58507b6057694c4465473f66433d 6039346a3e3b7346436b3b393d2e4d4a1821934634b270565f4534121912111a17241d17 1b1a182423212827252a292736353342413f3d3c3a2d2c2a422d2c503b3a422d2a624a46 634a457659516b4a4386645bc1aa98bfa896bea591bca18cbb9f89be9e85bf9f86c2a085 c9aa8dc7a88bc3a487c0a184be9f82be9f82bfa083c0a184cbad93ccae94cdaf95ceb096 a56e13dec491eefdfac5e2e6e3dfbaeec877f2d5879fa371b49198a98471ae8557c89867 e1ac80e6b481e2b967e0bf4ef0d993ebbd72e49d4de39237e9a03af0b044f6b746f8b544 ffd259f3ca58eed076f0deace7ded5dedde5dee4e4e0eddcd4c9d7dad8e3d4dde6dbeaef d1e3e5d1dfe2e1e5eec3c1cf5d6e5bb9a999b0897a987c6ebfc1b3a0ac9879654cab7455 b45d0e9d5a25c5a187cac7b4abbba1abbba1727668a1989be5e8fbf0f3fa6e6f6a2a2925 4641483b353f797574f0eee11c22201b211f4e54525f65633a403e121816242a28777d7b d2d4c7727467646659787a6d86887ba6a89baaac9fa2a4979694859593848f8d7e848273 76746567655655534447453633282c2b20241b1014170c10271c2033282c2d222622171b 120f0824211aa7a49dd0cdc6bfbcb575726b4d4a4375726b6f6f67d8c4a1a28468cfbcbe e0d6ccc9b173f6cd7debba80ecdc91e7d4a9e1d1addad294d2ce8ed0c8a4d4c8b8dccfbc d5cebcaea795b2ad9accc9b6b6b5a18487726b6e59656a5479837b6c6e6368615171624f 8a796787796c51484127221f7b7774b8a48bddb47ee5b468f0c173eabe73e2b36dedbb74 edd37ff4c985f9bb7ef9b061f1b147e9bd54e8cd8aead9bdfcbd60f4be5ceabc6ee1bb8c dcbf93dec88ee2c9a0e7c6c1dbc2acdcc6a1dcc5a5dec4b7dfc4bbe0c7b3e1c7b6e2c6c5 cca063ebc0a0e0ccc3d7b184efa25ce8bdaddacfddefc3a0e0c5badbc0b5d7bcb1dbc0b5 e2c7bce5cabfe1c6bbdcc1b6ddc8b3dcbab1e4c7bfdcd2b7d1c095e7b286f1a37fd78668 d7c4a3f2cca7dea175e59e68f0ab68dc964ee3904ae48543ee8c4be78544e98748f29051 f08e51e27f44dd7a3fe38047e07d3ae88b46eb9751ea9b56f0a35ff6a667ed995bde854b cd7939da8648d88446d68148dc8750d17c46c26c39c7713ec27146bf7047bc6e47b86d46 b56b46b16845ae6846ac6746a1654b9e6248995f47945c459059448d5a478d5a478b5a49 91604f8a594a865849825b4c7955476d4d406e52467a5f547864595f443b5e3b356a413d 6337345e3531643f3963403a2024272c222b945039cc6f436c463315161812110f161417 0a0a0a0d0d0d1111111212121414141919192222222a2a2a492d2964413f352f234c4d3f 7b55546f474777605a6f4747c2a68ed8bca4c6aa92b3977fc3a78fc4a890bca088ccb098 bb9d7bbea07ebea07eb99b79b39573b49674bd9f7dc5a785b99d88bda18cc4a893caae99 eeca8efbd6a9eed1b3d2b490e6b674dba24be1c488aab9a6ad8f8da98970b18d5dc79b6a d7a67edbaa7fdfb771e8c664efd485edbe6eeba550ea9b3ceca33af0b140f6b745f9b746 ffcb40f2c351e7c77ee4d4badddce1d7e1ebdcece9e5fae7e2e8f4dce0e9c9ccd1d4d5d7 d9d9d9dfdfdfe5e6e8bbbcc08a937ec6b7a4d6b5a6cdb7acd1d5c7b5bda8997e61a76847 e49036d49047d5aa7dc1afa197949b999ca570707089877bebeaf09e9f99393c2b2a2e1f 4a4c49494d504d53515a6159182120182120505958656e6d3b4443131c1b37403fa3acab 63645c25261e21221a22231b21221a393a324041394748404b493d5c5a4e76746889877b 8c8a7e8583778381758785798d847d867d76847b74766d667168618e857e8e857e605750 39362f6d6a63afaca594918a4e4b4434312a817e778c89826a685ccfb8989b7d61d6c4c2 dbcfbfceb575f6cb7cf5c48ad3c79fdac9b9e1cebfe3d2a7dccc99cfbea2c2b0a4b8a89b f3ebdeb4ac9fa6a092bbb8a9a2a0917d806f9b9e8dd1d5c48c8f8488887c4640324a4132 65594b84786c8c8279150c058b8175bb9e7eddaf73e7af64e9b667e8ba6fe7bb74eebe76 e8cc79efc87df8bf78f8b763f2b751edbd59ebca7dedd3a0f7bc60f3bd63eabd7ae3bd99 dfc0a1dfc799e1c7a4e3c5bddcc3afdcc5a3ddc6a6dec4b7dfc4bbe0c7b1e1c8b4e1c5c4 d7b88ce0ae89e3bba1dfc2a0e1ae7fe9ae90e7c1b6dfc8b6e4cabddec4b7dac0b3dbc1b4 e0c6b9e3c9bce1c7badec4b7e0cab5dfbdb3e2c4badcd0b6d9c499e3ad81e79b79db9073 dbc3a1f0c4a1db9a70e29662eda261de914de28b48e37f41eb894ae38142e17f40e78548 e78548df7d40e07d42e78449e07f3be98c47eb9751eb9c57efa460f5a767ec9b5cdd874c d17d3dda8648d37f41d07b42d9844dd17c46c26c39c6703dc27146bf7047bc6e47b86d46 b56b46b16845ae6846ac67469f63499d6147975d45925a438f58438b58458b58458a5948 7c4b3a7b4a3b815344855e4f7c584a6a4a3d63473b694e436a564b5c4138623f396c433f 6438355f36325d3832502d272024272d232c95513acd70446c4633141517100f0d131114 0a0a0a0505050000000202020909091010101414141414144a2e2a654240332d2148493b 7b55546f4747765f59704848a68a74ccb09acaae98bfa38dcdb19bc6aa94b69a84c0a48e cfaf96cfaf96ccac93c4a48bbb9b82b8987fbd9d84c4a48bc8ad90c9ae91cbb093cdb295 dab58be1a978d2915bc68a4cd99e52cf9649cfb082a1a29ca59188a68e72b19264c49b6d cb9e77d1a179dfb678f0cd79edce73f0c167f3b256f3aa44f0ad3cf1b33cf7ba47fbbd4e f5c53febc564e4cca0e4d9d5e2e1e7d9dad4d9d9bfe6e2bce2efe8d6dbd5bdb5b2bdadad b7a4a6b1a7a6b0b0a881897a7a7a607e6d598b7065877a727e817695937ebb9475c77b57 cc7a2ed58e3cc99448cba679b9a09ccdbccfc5bdccc9c8c6a7a3a24d4f42282d17343e26 475345535f5b5965656e7a7a5660625c666889939597a1a38892947d8789869092bbc5c7 3b3b392d2d2b3f3f3d3e3e3c3e3e3c4343412a2a282424221a171216130e1916111d1a15 1b18132825205956518e8b86a39c8c9a9383a39c8c9a9383918a7ab4ad9da19a8a453e2e 64615a9a9790827f7848453e2320195c5952dbd8d176736c726c5ec8b192977b63e2d1ca d0c1aad3b474ecbe71f5c78ccac2abdbcac0e9cfc0eccfa7e5ca95d4bc98b6a5939b8f81 e1d8d1908780534c44524e456e6b628d8f849a9c918d9085e3ddcd8c897a1d1b0f211e15 423e3581786fbfb1a6766458a38d75bc956adfa565e8ab5be0a959e4b66befc37aefbf77 e5c271eac471f2c46cf4c165f4bf61f1c063f2c46ef3c975f1bc60eebd6ee8be8ee4bfad dfc2b2dfc6a8dfc5aadfc1b7dcc2b1ddc6a6ddc6a7dec3b8dfc4bbdfc7afe0c7b1e0c4c0 e3d2b6d6a27be5a87be8ceb3d6c1ace7a072f1ac82d0ccc9e8cebfe3c9baddc3b4dcc2b3 dec4b5e1c7b8e2c8b9e0c6b7e2ccb7e5c3b9dfc1b6dccdb0e2c79cdda37bd99070e09f83 e3bf9feab895d89369dd8b59e79357e18748e38144df763ce28043db793cd87639db793c dd7b3edc7a3de28043ea8849df803ce98e48ec9953eb9e58f0a561f7a969ed9c5ddd894d d98545da8648cc783ac9743bd6814ad17c46c36d3ac46e3bc27146bf7047bc6e47b86d46 b56b46b16845ae6846ac67469d61479b5f45955b439058418d5641895643895643885746 85544384534487594a8760517a5648624235573b2f5b403557433853382f5b3832603733 5d312e653c3867423c54312b1e22252d232c96523bce71456d47341314160c0b090e0c0f 0606060303030000000000000000000404040a0a0a0e0e0e4c302c6744422f291d424335 7953526f4747745d57724a4a967a65c1a590c0a48fad917cb79b86b79b86b39782c5a994 c7a88bc7a88bc6a78ac2a386be9f82c0a184c6a78accad90dcc29fdac09dd6bc99d2b895 b98754dc9a5ed5975cc8a774d4c493e2cd98e8cba3b99e8ba99793a38e7ba98d68ba956b c99c73d2a473e0b873f1cd75ecc965f2c667f9c162f9ba53f4b744f1b942f6be4ffbc45f e2c663e9d191eddbc3f4e7dff4e5d0e0c49cd3a66fdca469dbcfa5dfd1acd5c1a9d1bcb7 bfabb4b3a9b1babcb197a186574d323d2e1b3b2d2438332d2f30225a4a31986240a55029 c06e3cd58b42c58426e9b257e1b576e3c3aae4d1d3dacfdf7c7d7f595e586672646e7f6f 7181766a7674727a7dc7cad3c3ccd1cbd4d9d7e0e5c5ced3d2dbe0e2ebf09ea7ac5b6469 1515171d1d1f3535373b3b3d5555576767694343453333352925241e1a191a1615171312 0a0605110d0c4e4a49969291918a7a8b8474908979979080a29b8bafa8987d7666221b0b 39362f8e8b846d6a6325221b2a27208e8b84f7f4ed5e5b54867b69c7ad949f846febdbcc c5b593d4ae6dddad63e8bb80e1d4a8ecd4b0eac7a1deb682d9b77ad6bf8dbeb5989fa092 ddd3d1574d4b0700001d181435322d34352f4c4d476d7069c6bea95b55451e1e14090b06 3536318d867ea69689ab9484b29472ba8c5adc9d58eca95adfa456e4b46af2c67deabc6f e2b96ae6bf64edc661f2c866f4c56df6c26cf7c15ff7c154ebbd65eabe7de7c0a1e3c2bd e0c4c0dfc5b6dec3b0ddc0b2dec4b7dec6aadec6aadec3b8dfc4b9dfc7addfc7afdfc4bb ead5c0d5b093e0a273e9c2a1dbcdc0e3a375ea9b62d0c9c3e9cfbee5cbbae0c6b5ddc3b2 ddc3b2dfc5b4e1c7b6e3c9b8e3cdb8e9c7bbddc0b0ddc8a9e8c59bd99973ce8b6ee4b098 e7b998e3ab86d89165da8451e2834be17d42df763ddb6d38d37037d16e35d26f34d57336 d97738dd7b3ce2803fe78544e0833ee89049eb9b54eba15aefa963f5ab6aed9f5fdc8a4e df8b4bdb8749c87436c36e35d37e47d17c46c36d3ac36d3ac27146bf7047bc6e47b86d46 b56b46b16845ae6846ac67469c6046995d43945a428f57408b543f885542885542865544 9564538e5d4e8a5c4d845d4e7753456545386145396a4f445f4b405b40375b3832532a26 502421633a366d48425a37311b1f222b212a96523bcf72466e483512131508070509070a 0101010808080c0c0c0606060000000000000c0c0c1919194e322e6a474529231738392b 7751506e4646725b55744c4c8d705ec0a391c4a795af9280b99c8ac1a492c7aa98dfc2b0 caac88c9ab87c6a884c3a581c2a480c5a783caac88ceb08cd4b99cd0b598caaf92c4a98c bf9a65e3ad6fd9ac73d4cfa9cbdfc3d4debce2d3b2cdb39caf9ea4a18a849f8169b48c68 cea06fddad6de7ba65f1c763ebc864f2cb6ef8cc73f8c866f3c053f1bf50f2c462f8cb78 e6cb86f4daa9f5debcf6debaf4d59fd7a566c27c40d07a47c89043d5a660d6b484e3cebd d9cfd8d4d3e1dce3dcb8c5a9a191789588779a9389a19d929d9280a48165af6b48ab4f2a c97e47d7894bc07126f3a94ae3a540c79846c3a275bda3967c80815f676a919fa2becdd0 c5d3d4b6bbbfa6a1a8d4c8d6dee1e6e5e8ede6e9eec4c7ccced1d6d8dbe0777a7f13161b 6060625656584c4c4e2b2b2d3333354d4d4f3f3f413e3e403a36353531303c3837423e3d 363231332f2e625e5d9d9998847b748e857e8a817a948b84a69d968f867f625952524942 64615adad7d0d4d1ca4c494228251e76736cc2bfb86a676095856ebea28dab9180e8d9c2 c5b083dcb16de1ac66dfb279ecce98e8c38fcd9e70ae7e50b28a59cbb68bcbc6b2b5b9bc 766c6a453b39261e1b2b262247443f6a6b6573746e5c5f585d5540655f4f65655b3b3d38 696a65b3aca49c8c7f988171b49670b68856d59653eba75ce3a75ee6b66eefc378e5b769 e3b464e5ba5bebc258f1c763f6c672f9c270f9bf5cf8bd47e7c16ce6c18ce2c1aee0c4c1 dec6c4dcc5bfdcc1b6dbbfb1dfc4bbdfc7afdfc7addfc4b9dec3b8dec6aadec6aadec4b7 e7c7b2deccbed9ab87e3a87ee9cab5dfb38ed69963dabaa3e3cab6e3cab6e1c8b4dec5b1 ddc4b0dec5b1e1c8b4e4cbb7e1cbb6e8c8b9e0c1addfc5a4e8bd93d8946fcf8f74e4bea9 e7b290dca27add9668dc8653dc7942df7440db6d3ad56938c5612dcb6831d16e35d67338 dc7a3be1803ce2813ce1803be0853fe9924bec9e56e9a35befab64f5ae6ceca160dd8d50 e38f4fde8a4cc97537c36e35d27d46d17c46c36d3ac46e3bc27146bf7047bc6e47b86d46 b56b46b16845ae6846ac67469c6046995d43945a428f57408b543f885542885542865544 895847845344845647825b4c78544669493c664a3e70554a6a564b664b4265423c5b322e 53272461383466413b4f2c2615191c271d2695513ad073476f4936121315070604050306 0202020303030505050505050505050808080e0e0e1313135135316c4947231d112f3022 754f4e6d45456f5852774f4f7c5f4fbb9e8ecaad9db59888b99c8cb79a8ab09383bda090 9e7e699878638f6f5a86665181614c7f5f4a7e5e497d5d48573a2a5437275033234c2f1f beb98fcbab72c79d63efdeb3d8e1c2bec5a4bebaa1c9c1b4b29fa5a087839f7a67b68962 d5a168e4af61edb858f3c053efcc6eefd07df0d387f0d07deeca69efc968f0cc7ef0d296 f5d09bffdda7f2cf97e3bf81ddb474bd8852aa6845c27760d48028d39035cc9e50d9c394 d5cec6c8cbd4b9c1c3818c7c6e5e4e686053716f638a8371a08769ae7957b66746b65b3e bc762ec2733bb2592deb8d57de8c3acc8825d19c3edfb2618c867059564f9a9ea7e6effe dfe9f5e1e1e9e5dbe3d3c1cdd7d3d4d4d0d1d9d5d6b9b5b6a29e9f918d8e585455363233 6262605a5a585e5e5c3f3f3d3232304d4d4b57575564646253504b514e495855505e5b56 524f4a423f3a4d4a4567645f847b769b928d99908b9a918c99908b6d645f6d645fb4aba6 d6d3ccfffff8e3e0d9413e372d2a23838079aca9a26d6a639d886bad9080b99e93d8caad cbb57ce9b873f5bc79d8ae74c19676ba8c6a97634b744032885d4ac0a587d4c7b6c1bac2 5b524b2c231c3f383088847b9e9b927a7c71606257595c515f59499592837b796d6e6b62 949087a49b92afa196b7a599b09c79b18b5cc88d51dfa05be5ac67e6b771e8bd6fe7b964 e9b464e8b75ae9b953efbf5df5c16df9c270f7c063f4be52e5c878e4c599dfc3b5dcc6bb d9c7bbd9c4bfdbc2bdddc1b5e0c4c0e0c7b1dfc7afdfc4bbdec3b8ddc6a7ddc6a6dcc2b1 e0bbabe2d9d0d9bba1df9c6feeb894dac09fcca77de3a97bd9c0aaddc4aee0c7b1e0c7b1 ddc4aeddc4aee0c7b1e3cab4dec5afe5c6b4e3c4afe0c29ee2ad83db9070d79d87e1c8b4 e1ad86d59f73e4a572e08f58d7733fdd6f3ed46839d16b3bc05c2bcb6735d37039d77439 dd7b3ce5843fe5843de08036e08841e8964deaa057eaa65df0ad66f5b06deda362de9052 e38f4fe18d4fcf7b3dc8733ad37e47d07b45c46e3bc6703dc27146bf7047bc6e47b86d46 b56b46b16845ae6846ac67469d61479b5f45955b439058418d5641895643895643885746 855443855445895b4c8861527753455a3a2d492d21492e234d392e4f342b5f3c366a413d 693d3a6e45416a453f53302a0e1215211720924e37d17448714b38141517070604040205 0606060000000000000101010d0d0d1010100808080000005337336f4c4a1d170b262719 734d4c6c44446d56507a5252745749ab8e80a5887a785b4d6a4d3f5f423450332556392b 512d314a262a421e223e1a1e3f1b1f411d21411d21401c2050322a4f31294e30284e3028 d1cd9ecda871b7824ceac49dd5c1a0bca885b39e81c2b2a3ae988da48777a97f66c18c62 d99d5fe6a757f2b352fcbf55f2d27debd58ce5d597e6d38ee9d07cecd07decd498e9d7b3 e6d8b3f4e2b2e1c78aceaa6ec99b6aa57050905744ad7365e6934de9a248e1ae45edca76 e2cfa5ccc5bfb3b6bd747f814339302e2b223d3f3165593f8258329f5c32b76243b45c46 ac6531b7683fb95f3ddf7d58d6753edc8333e0922df4ae3ec8ab69a28c65baafabe7e9f6 ccd4e1c1c8cedfdfe1cdc7cbc5b7b4b5a7a4b1a3a08e807d7769667163605c4e4b625451 56574f5d5e569d9e96b8b9b1bdbeb6d9dad2dcddd5d7d8d0716f637371657c7a6e868478 85837775736762605457554991897ca29a8daca497b0a89b9b9386776f629b9386f7efe2 e2dfd8d7d4cd9b989125221b3a3730a5a29bc7c4bd605d56ab9273a8887bcbb3a9c7ba97 d4bb79f1bb73ffc788c99f65724645784d4465383354252b7e5446c7a776dec38ec7aa8a 8c8477898174938d7f8c897a6a6859575a495b5e4d595d4c73766b7a7a6e605a4c635a4b 8175678b7f73a2988fbeb5aeaba488b0946db68751cd9256e3ad6fe5b672e3b869f0c26a f0b966ecb55be9b152ecb457f4bb64f8c16ef5c26ff0c26ce5cd83e1c7a4dbc5b7d7c7ae d5c7acd7c5bbdcc2c1dfc4b9e1c5c4e1c8b4e0c7b1dfc4bbdec4b7ddc6a6dcc5a3dcc3af d9bdbcdfccbbdfc5ace1a684e4a176d8be99d5bf9ae29a60cdb59dd5bda5dec6aee2cab2 dfc7afddc5addec6aee1c9b1dac1abe0c1ade8cab0e2c09ad89d73dd9171e5ae99dbcebd dba87dce9e6eecb57de79c63d3723dda6c3bd06537cf6f3fc66133d06c3bd5713dd47136 da7837e78641ea8a3ee48437e08942e7974eeba158eaa85eefaf67f6b36feca563dd9153 e18d4de59153d88446cf7a41d58049cf7a44c46e3bc97340c27146bf7047bc6e47b86d46 b56b46b16845ae6846ac67469f63499d6147975d45925a438f58438b58458b58458a5948 8f5e4d8d5c4d8f61528a63547551435333263e22163b20153d291e3a1f16522f296d4440 7044416e454169443e593630070b0e1c121b904c35d17448734d3a161719080705050306 0707070d0d0d1414141515150f0f0f070707010101000000553935714e4c1913071f2012 714b4a6c44446b544e7b535376584da3857a8e706556382d482a1f46281d3e201546281d 6440405e3a3a5b3737603c3c6b47477551517a56567b5757977a6c997c6e9b7e709e8173 e7c98bd6a06295562a9d7059886851966e4a9d6d45a17455a69176a8896db58966ca9263 d99858e29e4df3ae53ffc160f4d789e9d797ded7a3ded59ae6d48aebd58ce9d7a7e4d9c3 c8dfcfdfeac8d3cd9bc7a879c493759b60508144329d6147b66f41ce8d3bdfa833f5ca59 eecd86e0cebadad8e3adb8caa59d9a898a84aeb1a0deceadd8a578d3865ac77052994332 8d3f3ba8583fbc6436d3723fc96030de773dd97827e48c22eebf55efc883d1bba6e0dbe2 dae2edb9c4c8c5d0ccc8cecc6b58515a4740513e373b282156433c8c79728a7770826f68 52544746483b87897cabada09ea09397998c6c6e613b3d30666455514f403b392a3b392a 5553447775668b897a8f8d7e837d63827c62989278aaa48a918b717e785ea8a288e8e2c8 e5e2dbefece5d4d1ca726f6853504999968fd1cec75a5750bba180ac8c81dcc3bcbdb18b d9be79f1b96fffc98bb88d5635111d4b29274e2a2a512a2d8a6346d9b463ebc45fcb9d48 cdc6b4958e7c7e79668f8c7993927e8a8d78878a7580856f57615955574c857e6e6a5b48 837260c7b9aca1989195908dacab96af9c7bab8151bc8751deac71e1b471e2b466f9ca70 f4bc67efb35debab53edac52f3b65df7c06df3c578edc57fe7d08ce1caaad8c5b4d2c8a5 d1c7a3d6c4b8dcc4c4e1c6bbe2c6c5e1c7b6e0c7b3dfc4bbdec4b7dcc5a5dcc6a1dbc2ac d4c6d3dab89ce8c9ace6b8a1d69265d9b88be2d0aadd9258c5ad93d0b89eddc5abe3cbb1 e0c8aeddc5abddc5abe0c8aed6bda7dcbda9ecccb3e3c098d1936ae19273eebaa5d7d0c0 d5a479ca9f6cf1c086eba56ad2733dd66a3bcc6538cd7142cd683cd57042d6723ed06d32 d67433e7863fee8e42ea8a3de18a43e7974eeba359eba95ff0b068f6b36feda664dc9253 df8b4be79355dd894bd47f46d7824bce7943c46e3bcc7643c27146bf7047bc6e47b86d46 b56b46b16845ae6846ac6746a1654b9e6248995f47945c459059448d5a478d5a478b5a49 8d5c4b88574887594a835c4d765244614134593d315f4439564237462b2253302a683f3b 643835582f2b532e284b2822020609180e178e4a33d17448744e3b17181a090806060407 0404042525253f3f3f3434340f0f0f000000000000121212563a36724f4d1610041b1c0e 714b4a6c44446a534d7c5454705248aa8c82ab8d838a6c6292746a9d7f75987a709e8076 a3836c9d7d669878619d7d66a88871b3937cb79780b6967fa68a74a98d77ad917bb0947e a97a66d0a18da57662794a369a6b579f705c895a469c6d59bb846fb17364c07c69d18760 d18c41e7a544ffc359fbc05af5cf6af3cc71f2c87cf5ca85fbd189ffd78bffdb8cffdc8c e7cd82dcdaa1e5d597eaab5cca794a9f636b94575f9e4324be6233b66a36cb793dd97539 ce8b61d9dacadaf7f3b5c1bfb3a48fe8e3d0e2d0bcc68c67d68848da9547b5743a975030 8d55269d5235c35a47b74636b46256c8a096c4a287d69e6bbca36bc7b489c2b89fd1d0cb ced6d9d3dee2c3cfcf5a6561483f38584f485f564f574e475950496c635c7f766f877e77 5858582727270c0c0c1818181414140000000000001c1c1c73594083695092785f987e65 9a80679b8168937960886e558674668f81748a80748d877b9492868e8f81a9aa9ce6e8da eadbc8dfd3b9e3dbc46a635b2d2530aca6b4b7b3b071705c9d886dae9674c7ad7ce2c285 f1cd83f0c577e6b767dfac5d7e4b308d5620a56921b87830eaab5cf7bf60d7a045f0b86d 7f7d6e6d6b5c868475bebcadd2d0c18381728c8a7b7c7a6b78787054514a625e5570695f 8c8479b4ab9cbfb3a5dbcebeb0a8b5aaa393b3a171c79f62d3955cd79259e4ae68f7d17c e4b364f2bc7edf9a4fefa134f0b74cceb97ad6cea9e1d3a6decbbcddcabbddcabbdcc9ba dbc8b9dac7b8dac7b8d9c6b7e0c3bfe1c4bee2c6bbe3c7b9e3c8b5e3c7b1e2c7ace1c6a9 e9b2afdebcb2dbc5aee0be99dea476d4956cd1ac92d8d0c3e0a374dda778e4c49bdfd2b2 d4c09be5bf92ecc9a9cfc2badfbb99c1beb7d0c7c2d99d83bb7053c89d96e1cec0d0b387 d8aa86e6ba97eec29feabc98e8b389e7a875db9359cd7d3ed26f2cd57538da7f4ae08c5d e79a6ee9a777ecb07cedb47de0893ce08849eca064efb673dcb278deae88e9aa7fe99c58 e68d4be48b49e28849df8547dc8148d97e47d67b46d57a45c6733fc47240c27041bc6d42 b76942b26644af6445ae6344aa5e44a05b3ea063479a624b87503c884e4093544b8f4e48 904f3da56849945e427348416a4e4a5f5343504c405250554e3a393f2b2a432e2d573f3f 5d4344573d3e56393b56393b0c0f08281c1c6f433aa36147754b35141412000005230f08 2d10087f6763968b896967684e525344494534372e282a1c0e0c0f3e3e3e2f2f2f161511 4339376b56556848496b42466e4b4994716db29087b29084a88774a78770a7886ca38467 a58463bb997dae8b759e7b68b08d7abe9b85bf9d81c9a887c2a087c3a188c5a38ac8a68d 996a56c0917daa7b678758449768549a6b578f604ca1725ebd8973b07565b16e5bc07852 cf8846e3a049edae51e5a84de6b245f0bb51f6c05ef0b865e3a960dea35fe6ac63f1b869 fad57de7d895e7cb8af3ae60da8a4fa26759965954b86145c86f37b26b37b46334b6532c b4725cd2d1cdd7f1eea9b5a7b4a497e5ded4d9c5bab77b61c4743fcb8442b16f3d9c543c b26940ac5f41c16951bc654ab67f6abda9a0bca698d7a988c0a87ca18d6cc8bcacdbdad8 cdd2d8dde6eba0aaa9131e185950495b524b5c534c655c55786f68867d76797069625952 5c5c5c4141412828282323232b2b2b3131312f2f2f2c2c2c8c8276b2a89cc5bbafbcb2a6 beb4a8c3b9ada0968a6a6054988f867b72696b645a504c4049473a9d9b8ce8e9d9e0e1cf e5d9c1d8ceb5f1e9d6aea59e81787bc1bac1b9b5b2908e82b7a287c0a886ccb281daba7d e5c177edc274efc070f0bd6ec49166cc9755f8bf70f0b066d7984be6ad54e4ad5dd7a165 6d6b5e969487bcbaadb2b0a3b9b7aa9c9a8d9290835b594c68686058554e3430276f685e 8b83789d9485ddd1c3b8ab9bb2aab5b8b1a1bead81c19a61c88a51d48f56dea560e2b967 d5a747e6b267df9b4afaad41ffcd6ce5cea2dcd2c6dac9b9decbbcddcabbddcabbdcc9ba dbc8b9dac7b8dac7b8d9c6b7e1c4bee1c4bce0c5badfc5b8dfc5b4dec5b1ddc4aeddc5ad e6c2b6dec7b5ddd0b0e2cda2dfb98ad5aa7fd4b690dccfafe4b28fd7a278dcb28ce1c9af dac2a8deba98e4c2a9d3c5bcecc397dcccb3e4cab1e59e72d3815be0ad9cf0ccb2e0ae73 d0ab8edbb89ce1bfa4debca1e1ba9be8ba96e7b185dfa273dd812ae0893ce49658e6a376 e4af8ddfb79dd7bba3d2bca4df904ddb8a52e89d63edb26ce0b277e2b28cedaf86e7a060 e68d4be48b49e28849df8547dc8148d97e47d67b46d57a45c6733fc47240c27041bc6d42 b76942b26644af6445ae6344ac6046a15c3fa2624799614a854e3a854b3d9152498f4c46 9651419c5f408b5539774c457a5b586d5d4d554d404d484c4733323e29283f2a294b3635 523a3a4e36364b31324a3031190f0d331d1f7d4b44ae6c54784f391a1b16000504150900 6d524b7d66607566636a625f74736f797b76757a737d837939373a3b3b3b1919190b0a06 3c32306954537353547d54586e4b498e6b67a9877eb08e82b49380bc9c85b8997dad8e71 ba9c7abe9f82c4a48dcead9ad0af9cbe9e87b49578c0a280c8aa90c8aa90c6a88ec4a68c 8d5e4aac7d69ab7c68966753936450936450966753a37460bc8e76b47d69a4664fad6745 cb844eda944ed49045ce8e44e19c37f4b043fdb94ee9a14dc47741af5f3ab7683dc97d43 f0bb51f2cd7de5b672e29346de8c42b97f4fa76e51c57459ce7e43b47242a35838983927 9f5d5ed1d0d8dbf3f3a6ad9bb4a29ee1d9d6d1bcb7aa6d5bb36139be7540b16e44ab634d d77a4fbe6b49bd734ebe7f53c09f7cbdbbaeb8b3afceb2a7c2a891988574c7bcb6d4d3d8 d9dde6e7eef487908f1c231c453e383c352f362f29423b35625b5579726c746d67625b55 3c3c3c4444444141412f2f2f2222223434345e5e5e818181a39d91c8c2b6e4ded2eae4d8 f0eadee7e1d5b1ab9f706a5e66686573746f5c5c544a483b7f7c6bd2cfbcf0eed9e2e0cb e5dcbdcfc4aef2e5dce7dcd6d0c7bec6c2b7b4b3aeb8b8b8c8b398d1b997d8be8ddebe81 e5c177edc274f2c373f3c071e9b57be9b667ffcb75eaaf63d6974af0b664e5b06ab58459 a5a397bdbbafc9c7bb9492869694889b998d8a887c6c6a5e8b8b836d6a63635f56686157 5f574c908778dfd3c5e7dacabeb3bbcac0b4cbb993bc9763bc8149ca884cd09752c69b4c dcaa51e4ad6add974ff8aa48ffd47df5debcdfd7d5d3c6c0decbbcddcabbddcabbdcc9ba dbc8b9dac7b8dac7b8d9c6b7e1c6bde0c5bcdfc4b9dcc4b8dbc4b6dac5b4dac5b4dac5b4 e3bbbbe2bdb4e3c3aee6c4abe0baa3d9ac95dbaa8ce4b08be8c6abd5a078d69c76e2b9a3 e0c0b1ddb9a3debea5dbc6b5d9bd95e1d1afe1be96d6905cd68c67e2b4a4e5b99cd89956 d4b49bdabea8d8c1afd3beadd5c0afe4c9b4e9c9b0e6c3a7dda04fe1a861e6b37ce6be9a e2c6b0d9c7b9cec6bbc7c3badea16bd79364e39c64eaae66e3b172e7b68eedb38de3a268 e68d4be48b49e28849df8547dc8148d97e47d67b46d57a45c6733fc47240c27041bc6d42 b76942b26644af6445ae6344b06349a35c40a26247985e48834a3783483a8e4e458c4742 9e5447945438824c308051498b65627e64535f4f404e4445432e2d412c2b3f2a29422d2c 473231442f2e3e29283e29284431334b2b2c7b443d90523d56301d1e1a1122251e372e1d 553c375f46416f5b547f716870696144453d3d473f616d69737174454545101010060501 3026245b46456f4f50774e526c4947886561a7857cb9978bc09f8cc2a28bbfa084ba9b7e c9aa8bbc9d81c3a38ed6b5a4dbbaa9d8b8a3cfb094c1a283d0b89ccbb397c4ac90bda589 9465519c6d59a2735f996a568d5e4a8f604c996a569e6f5bba9278bd8d76aa7156a96745 c67d52ca814cbf793ec88447ec9f4ffbb150ffb650e5974fb86346993f3e973f35a24c31 cf8919f3ba61de9b55be681fd88630e1a755c28b55b56b50c67c4db37954a46248943e31 9d6065d8d7e5e7faffaeaea6b4a39bdfd8cecfbcaeaa6d5ab4623cc17845bd7b4bc27c5a e68149c97546b8703ebb7d42cda778cfcab6b9bebaaba49ea18881b29f9bccc2c3d6d5db e5eaf0b8bdc1585c5b42453e68635f635e5a58534f4b464246413d494440504b47544f4b 3a3a3a3d3d3d4545454141412727271919193e3e3e747474a99886b1a08ecbbaa8e8d7c5 e7d6c4c0af9d938270796856192324494f4d686a5f9b9585e3dbc6f1e9d2d7d1b9d7d3ba e6e2bfcbc3aee1d4cceee0d5eadfc9b5b09aafafa7d4d4dec1ac91d5bd9be7cd9cedcd90 efcb81f1c678eabb6be1ae5fcc9253deab5ad8a853cd954aeaad5dffc871e6b570c89e76 a8a59c89867d7f7c73615e5568655c5b584f413e3568655c77776f78756e9a968d8e877d 797166978e7fb8ac9ed1c4b4cec2c6d5c8bfd0bfa1c19f72bc834ebf7d41bf8440b9893d f3bd7febb086d68d58e0923dfac371f2e0bcdadad0d2ccbcdecbbcddcabbddcabbdcc9ba dbc8b9dac7b8dac7b8d9c6b7ddc5b9ddc5b9ddc6b8ddc6b8ddc6b8dec7b9dec7b9dfc8b8 d9c0c3dcbeb6e0bfb0e0c4b8dcc3bed7b9afdcaa91e09f77e4cbacdca677db9562dda486 e1b8a4e6bea4e5bf9bdbbe9cc5ba9ee0d3b3d8b78ac58b5bd39d81debdb4d3ae94cd8f52 e2bfa3e6c6ade1c8b4d3c1b3d2c2b5dccabce0cbbadac3b1d5bc93d9bf9cdec4a9e2c8b7 e1cbbdddcabbd8c7b5d5c4b0e3b790d9a37fe2a370eaaf69e3b172e7b68ee9b18cdc9f69 e68d4be48b49e28849df8547dc8148d97e47d67b46d57a45c6733fc47240c27041bc6d42 b76942b26644af6445ae6344b3634aa65a40a25f45975a458045338143368c49418a433f a65449924f34814b2f85534a91615d865f4e6c52415c4a484830304f3a394833323e2a29 43312f40312e3728253728253425284528247b4a3c8d5544603628442d254f3d31513c29 22050031150a553a2f6e5b4d51473d1e1e16212c26526261706e714242421717170e0d09 2a201e5641406f4f506e45496946447b5854a38178c8a69acaa996b6967fb49579c3a487 bca183c0a58ac6aa95b093839d8070bca08bd9bea3caaf91d0bd9fc9b698beab8db3a082 a3746094655192634f9364508b5c488e5f4b976854986955b69479c89f83c08b6bb67653 be7650b86f44b8713dd08a4ff3aa5df7b151f4af48e2994cc57554ab56519e4a409a492e ca7814eea44bd4843fb85c1be39239fabf57ce994da86646ae6b4ea8765ba870559b503b a06d69d7d9e5e7f7ffb4aab2b5a693ded9c3d0bfa5b27759bf6e43cc844acb8a50d4915d ea8841d8884db97039b96a2fd39562dcc4a8b1b8a8727f6d766160baaaaad7ced1edecf1 c8cccf565a5b252724393833312d2a3d39364a46434c484546423f46423f575350696562 5b5b5b4747474646466363637c7c7c7373735454543c3c3c8276687f73658b7f71988c7e 867a6c5f53454b3f3150443607120e2528218a8474e1d5bbe8d9bae1d4b4dfd8bbc9c5a8 e6e5c6d4cdbadbcdc0e3d2beeadfc1a7a285b2b3add1d5e1b8a388d6be9cedd3a2f1d194 efcb81efc476e5b666d7a455bf8144d7a358deb062e4b065f3b665f7bf62edc377dabd91 37342d56534c7b7871807d767f7c7558554e17140d3e3b342a2a2249463f3f3b32686157 8c8479a09788dbcfc1e1d4c4dfcfcfd5c8bfd0c0a9d0ae89c6915fb9793cb77a37bf8b42 f3bb7ee7a982d18756ce812fe8b468eddfc2d7dbdad5d5cddecbbcddcabbddcabbdcc9ba dbc8b9dac7b8dac7b8d9c6b7dac4b6dbc5b7e0c6b7e3c8b7e4c7b5e5c4b1e5c2aee4c1ad d4d1b2dacca9dec8a3dbcaacd8d2b8d8cdafdbbb8cdda769d8b98de9ab70e39555d09062 daa885f1be93edb987d6ad79cfb59aedcfa9e3b480cf935fe3ac8deac1afddac8bdd995c e6ba97ebc4a5e6c8b0dac3b1d6c3b4dbc8b9dac3b1d2b9a5dec8bbddc7bcddc4bfdbc2be dbc2bbddc3b6ddc5addec6aae9cbafddb39ae4af85e8b677e0b378e1b38fe3ad89d69963 e68d4be48b49e28849df8547dc8148d97e47d67b46d57a45c6733fc47240c27041bc6d42 b76942b26644af6445ae6344b36149a5583ea15b429655417e412f7e3e328c47408b413e a9504a964e358850358751478b504c8954447b56436a514c4d3535624a4a543f3e3c2a28 40312e3e332f322924312a241b191a3c281d6f46347b4b3d6839316f4038835548805544 744f3f5531234226184b372c51493e53544c5f68636d7c773230332d2d2d2424241c1b17 2f25235e49487a5a5b774e526c494766433f88665dc4a296d5b4a1b89881ae8f73c1a285 b09779bba188c0a5929377695a3e306d523fac9279cbb294cbb89ac4b193b9a688ad9a7c a475618e5f4b8758448f604c8f604c8c5d4991624e986955b3977fcaa787d2a27cc48961 b57049ae653abd743fd99252eaa74ce6a63ce1a231de9c3cda9351cd8258b96e46aa602f d8843be1903fcd7835cd733ef2a356f9bb4cd19a40b57954a66a509c745aa174579d5b41 a1756ad1d5dee1edfbb3a4abbbac95ddd9bcccbc9bb37858c37149cf864fcc8c4ed69555 e48c44e09d66bf8152b86a39cd8356d2ab8ca4a48a52634376655eab9e98d2cdc9e0e2dd 7d827c1c1e1937363245423d3737373333333232323232322f2f2f2e2e2e393939474747 5151514b4b4b3f3f3f4a4a4a7b7b7b9b9b9b7070702929293137373b4141404646353b3b 242a2a1e2424242a2a2c32322f2f236f6855c0b194ebd3afe9d2a9e5d3abe3d9b6dad5b7 e2e4cedfd9c3e3d5bbe5d5b4f3e5c2a9a389c1c3bebfc8d1bba68bd9c19febd1a0e7c78a e4c076ebc072eabb6be0ad5ef1ad6cd4a054dfb667f9c97fe6ac58e8b252e1bb708f7950 04010066635e6e6b663d3a352b282336332e24211c524f4a87877f59564f49453c2f281e 372f246c63549f9385e1d4c4e4d1cdd6c7c0d3c1b3d9ba9dd3a071c18346c08340ce9754 dca650d89e5fd48f4ecf852ce6b46bf0e4d4d8ddf0dbddecdecbbcddcabbddcabbdcc9ba dbc8b9dac7b8dac7b8d9c6b7d5c4b4dac5b4e1c8b4e6c6b1e8c0a7e5b69ae1ac8ddca586 d9af73deaa71dfa06ddb9e71dba87be1ad7de09f65dc8b4ac99364e89e5fe08f4ec6804d d39365f0a770e8a065cf925cd5946ee9a574e1975ad6874ce5986ae9a179e09762e59651 dbad89e3ba9ae5c3a8dcc1acdbc4b2e1cab8e0c5b0d6baa4ebc6b6e8c2b7e0bdb7dabbb8 d7bbb7d4c0b5d6c5b3d7c8b1e9d3bcdfbeabe4bb9be5c08bdbb785ddb290e3ab86da985e e68d4be48b49e28849df8547dc8148d97e47d67b46d57a45c6733fc47240c27041bc6d42 b76942b26644af6445ae6344b25e46a4543b9e563e93503d7e3c2c803c318f46408d423f ab4c4a9a4f388c5439854a4286403e925040895945694b414b31326b5353584342322320 342925362f2925221b23231b13171847342566422a5f372d6837338c4c43a56452a76e5b b683688d634d6b4e405d4e475e5d586c716b646b64474e4619171a3535353b3b3b2d2c28 372d2b5641406d4d4e774e5278555364413d745249ab897dcead9ac5a58eb6977bb8997c b79f838d745e775d4e74594e5d4237543a2b7f6650b49c80c7af93c2aa8ebaa286b1997d 90614d88594586574392634f9667538859458a5b47a2735fb69d87c3a583d6ab7ecd9566 af6c41ac6336c57e40d99447e19a42db9536d89230de9839e59d49df9450ca7e42b66a30 c4724acd7a36cc7737ce7950e49658edae44dfa748cb9569c38f67a284629974599a5f4d a67d7bd4d8e3e2edf1b9a79dc4b4a5dbd7bec0af93a56951b76347c2774dbe7d45c88649 d28348d8a77fba977bb88363c18261ba9279a19a7e68704b8477679e9787bbb9aa989c8d 3b41351f201844413c4b4341515151424242373737363636343434313131373737434343 3737374343433434341616161b1b1b3a3a3a3636361515150a14150e1819141e1f182223 172122192324242e2f323c3d887c64d2bf9fe6c9a1dcba8af3d29fefd7a9dacea8e5e2c1 e1e2daddd9c0e2d6ace8d6a8f6e6c4aba291d1d3d0b3bec2bba68bdbc3a1edd3a2e5c588 e2be74efc476f3c474ebb869ffc473e1af56d6b25cecbe73e0a652e5ae53c09d5b38280e 3a3633a39f9c8b87846a666354504d46423f332f2c625e5bbabab247443d928e855c554b 292116655c4d54483aa19484dcc8c1dacbc6dbcac2dcbea6d9a97bd6985bd99a57dea263 d39d43d9a05de29d58d68d31e3b568eee7d4d7e3f3e2e8f6decbbcddcabbddcabbdcc9ba dbc8b9dac7b8dac7b8d9c6b7d2c5b5d9c6b5e1c7b0e6c1a6e5b392dc9e77d18a60c77d50 cc7b3ad07641cc6940c7613bcb6a3fd37748d27248c6643fbf6a41da804ed17e4cbd764c cd8052e08147d77842cb7953cc6a43ce7243ca753eca7741d07a47d17743d17f43db9150 d4af92ddbba0ddc2add7c2b1d7c4b5e1ccbbe4c9b4dcc0aadfc4a9dec2acdcc0b2dabfb6 d8c0b6d8c2b5d7c5b1d8c6b0e2cdb8dbbfb1e2c4ace2c9a0d4bd93dab598eaaf87e99e5d e68d4be48b49e28849df8547dc8148d97e47d67b46d57a45c6733fc47240c27041bc6d42 b76942b26644af6445ae6344af5942a14f379c513a904c397c3a2a7f3b30904741914341 ae47489b4f388c543981453b8b3d3ba45848935b44593527402325674d4e4f3a3920110e 22191423201911120a0f1209040002553a297951386f4841885657a35d519a543b8c5840 a4623f94614683675b686360515659515a594a4d442826173432354f4f4f494949373632 3d3331442f2e5636377e555984615f8966628d6b62957367a88774ba9a83bc9d81b29376 baa58a725c45412a1a553d316c54485f48386c563f9d886dc6a88ec4a68cc1a389bd9f85 7b4c3882533f895a469869559b6c58845541865743ad7e6ab8a28dbca07bd2aa79cd9866 ad6a3dae6634cb8540d4903be28f4bdd8a46db8842e29046e8964ae08d47cb763db76132 984a36bb692fcb7739bc6947c37743ebaa44f5be5ad8a474e9bc81b29a6e9976609c625e ae868fdce2eeebf6f0c2b195cbb9b5d9d3c5b6a49097594ca85241b36648ae6b3eb87540 c07349c7a38bb1a599b89d8ab68c76a78673a99d858e8e6a7d725c9d977fa6a890535842 2229172c2e2315100c180e0f1a1b1d1e1f212e2f313e3f413c3d3f2e2f312d2e30363739 3a3a3a4242423e3e3e2929291919191e1e1e2e2e2e39393928251e0d0a03120f0836332c 3e3b3425221b26231c423f38ebd7b6e0c49de5bf8edcb17acaa36cd7bd8ce6dab2d7d4b3 e2e4e3d6d2b9d8cc9adfce98f1debda59b8fd7dbdaafbebbb5a085dbc3a1f2d8a7edcd90 eac67cf6cb7df8c979edba6be9a746ffcf69e8c76ae0b567e5ab59e0a853ab885235271c c8c4c3d5d1d06f6b6a6d6968605c5b2d29282f2b2a938f8ebcbcb43b38318a867d979086 554d42655c4d483c2e352818d3bcb4e0cecae3d2cbd8bca7daab7fe7ab6decad6ae5a96a e1a75ee6aa76efa96dd58e34dbae5be6e3c2d5e5e2eaf6ecdecbbcddcabbddcabbdcc9ba dbc8b9dac7b8dac7b8d9c6b7d1c5b5d8c7b5e2c6aee7bb9ee1a781d38a5dc26f3db75d29 ad6f32af6b3ea85b3da34f33a95731b46941af6c4fa06253b84f2fcc6640c26e4cb87153 ca754cd1622bc6572ac96b52c75e41bd5f3dbc6e47c47c54c47348c16937c77e49d49f6b d7bba5dbc4b2d9c7b9d0c2b7d1c3b8ddcbbde1cab8dcc1acc6c6aac9c7aed0c8b5d8c8b9 ddc6b6e0c4aee3c1a5e3c19edac5b0d4bcb0dec7b5dfcfabd1c19ddbb99ef1b38af5a560 e68d4be48b49e28849df8547dc8148d97e47d67b46d57a45c6733fc47240c27041bc6d42 b76942b26644af6445ae6344ad553f9e4c349a4f388e4a377c3929803a30924742944443 ae45499b4d378950357f403791403db46352985c444621113316185f454645302f110200 120b0513130b01040000040018040f724b3c7f513a603b358251549f5a4b924f32895d40 cf845aa770517b61544a4a4c27343c354141444639352d184a484b585858434343363531 443a383f2a295434359970748b6866b6938fbb99908f6d617e5d4aa08069b99a7eb29376 b29c848c76615b44364f372d563e344b34265f49349a846cc7a58cc7a58cc8a68dc7a58c 8657478f60508a5b4b8859499465559263538c5d4d966757bf9e7fc6a08bc1977eac7b5a 975d4799574bb06b4ac4843da95b37b1633cbe7145c97c48cf8349d18643d2853fd2863b 9848238d3831943c4aa34f45ba6e32e8a145ffb763e99d5ffdce7ef0c58ebb8968a37055 b89b8bdee4e2e1efef9fa098d3c2bbdcc6bbb59684865e458d5e40a57051a86f54a96f57 94615d7e5d54af9d93cab9afcaada5cda99bb4967ead997893907f625f504e4c3d9c9c90 919489646a609198908289817c7678544e503e383a3c36382721232923253d3739292325 3634373836393a383b3c3a3d3b393c383639333134302e31040410302f37342f35332e2b 080000372b1d7c6d5ab1a08ca98865c5a86cdec571e1c77adec08ce2c19ee6c69de5c890 ebead5e6ddcedbd6d0e4cea7e7b762bca869a3aea6bfb9c7d6b69de3c29feac694e8c17e eec072f6c46ff5bd66ebb259ebc16fedbc70e5ab62f2b564e7ac50dba449d6a5569e7130 bfb6adc5bdb09a938179725f948d7b5a5148251b198e8387cbcbbf4a4738756c5bc1b4a3 786b5b453b3235302c262425454344cfc9cbeddedbdfc6b0daba87b58f46edbe70d9a55a fabe7fe6ba7ddda85cdd912fe19f4bdfca9fdde0cde3e1cad0c8b3d5cdb8dcd1bbddceb9 d9c8b4d8c6b0dfcab5e5cfbae8cfa7e2c7aadbbca7e3c0a4e5b68cc48458b25f3fc46553 b1633fb36848a461448f5236905337a46144ae6343a85a36b26843b16742aa603ba65c37 b16742bd734eb369449f5530b96b44b96b44ba6c45bc6e47bd6f48bf714ac0724bc0724b d0b7a3d6bda9dcc3afdec5b1dcc3afdcc3afe0c7b3e5ccb8e0d1bad6c7b0d1bfa9dac4af e9cebbeccfbde1c0afd3b2a1c7c1b5cfc3b5dbc6b3e7c3abeab999e6a682da9166d28456 ee8e54ea8b51d3743ace7136e48a4ee48b4fd17b3ed27c3fb4522bc1633dcc724dc6724d b3653fa45c36a1603aa86842b27147a86842a46544a3674b965d497e4939703e33713f38 7f584792524986433d73473c6a42387f49478256533c2c1f4b232c73565b362627030000 040601070904100f0b0a0603000b091a29261b201c18110b432f287351487f534a805046 955f5da06f6b6239374e2c2b4e39383a2f2d3b35352523242b372d2f2e2a1a1c1b000d03 1417066a3d28c17254ee956dd87e59db835dc57955905d485b48445b5a5f929292ccc5bd be9790aa857d89685f7f5e557d5c536442396442398d6b62cba689d7bda4a492846f5a59 8556468f60508a5b4b8859499465559263538d5e4e966757c9a583c59d83be9172b27e56 a267479c5748a96246bb753aaa5d3faf6341b96b44c27644ca7e42d38640d78b3fda8d3d ab5c35974339974049a65248b96c34d8903be49b4ccf8349eabb6de4b682bf8d6cae785e b19482d6dad9e5f3f4a6a69ed0bcb3d9bdb1c09584a16b53a8684db17054ac6f53af7359 8c6c5d81705ea19b8baba597bba99fd6baaeb69d878d7c60736f634743385452469e9e96 8f928b8b908adee4e0ebf1edf4eef0d2ccce9a9496615b5d5953556761635953554a4446 4c49384542314441304e4b3a5b58475b58474b48373936254b4a4f302e31262221504845 5c534c8c80748e80739c8c7d927660b29a6cd3be7be1cb8fe8ceabefd3beeed3b8e8cfa6 dfdecce4d8caded4cae6c99deeb75dcbb471b4beb6c9c5d3d6b79ae3c39deac692e8c17e eec072f6c46ff5bd66ebb25be7b866efbb6feab067f0b363dfa44ad59e43daa758b38240 b7aea5bfb7aa958e7c7c75629d968461584f281e1c897e82cecec24e4b3c776e5dc3b6a5 8679694c423935302c292728262523a19b9dddcfccdac4addec08cc6a059efc174e9b56c daaa68d5a96cd5a35edea150e7b264eacb94e8d8b7e8d8c1d7c7aed9c7afdac8b2dac5b2 d9c2b4dbc0b5dec1b9e2c4bcddcfb5d9cab7d7c1b3e1c2ade3b794c3845bad5a3abb5c46 bc6f51a9624697553d8e503b8f513c95533ba0593daa5d3fb16845b36a47ae6542a75e3b ac6340b66d4ab56c49aa613eaf6343b16545b36747b66a4aba6e4ebd7151c07454c17555 e2b797edc2a2f4c9a9eec3a3e0b595daaf8fe0b595eabf9fdca181dba485dcab8dddb498 debca1ddc3aadbc7aedac8b0dfc8b6e6c9b7e8c4aedeb096cc9375bb7956b46942b2633a d17947d77f4dca7240c66e3cd27c49cf7946c6703dce7845d4805ecb7856ba6b4aa65d3c 9954339758399e6343a56c4eac704ea46949a1684aa0684f905c4673432f6132225d2d1f 643f2d7a3f3773342f6a3b33633630743c3b7d4b4a41291f52353a765d61423233160e0c 14130f0f100b13140f0f100b130e0a170f0c251a164d3b37654b4a4d2e2c421f1d5e3837 7747456e423f5832314b2d2b614d4c605454332d2d191816151509130806060004000007 2d1c247542499c5b579b6352ac6545ad67459c5c40704233442f2e4439416f60639b8683 cfb1a9ae8b85805954724d477f5e57795b5376534d8c635fc69785a489767e786891948b 8455458e5f4f895a4a8859499465559364548d5e4e976858d4ad86c19475b5855db7824e b17448a45c46a45841b1643aab604bad6149b16543bb6f3ec77b3dd5873cdf913ce3963c cd8145af5f3ea65247b8674acb803ddb9438de9642d08542d7a65ad3a470c79270b98264 a48571c7c9c6eaf5f7afaeaab7ac98b6a089ab7e67a66a4eaf6c4fac6d4ea06b4b9f7251 847057867c63a0a28da2a3939d978ba6988d988878887d6765605a433e3a3c393462615f 888a89b5b9bae7ecefeff4f7ddd9daafabacc9c5c69f9b9c716d6eb6b2b3cfcbcca5a1a2 918974877f6a807863867e69958d78a29a85a59d88a29a85b0ada68e8b8487827c928b83 a49b94b2a89f66595153463e978378bbaa8cdfd1a2efe0b7f3e1cdf3dfd8ecd9cbe2d1b7 d7d7cbe8dbcbe6d6c6e9c490f3b256dabb7ac0c9c4cdcbd9d6b896e3c39aeac78fe8c27b eec170f6c46ff5bd68ebb25be4b25deeb96becb268e9ac5cd89d43d49d40dfab58cb9650 b3aaa1bbb3a68d86747e7764a69f8d685f562a201e807579d2d2c65350417b7261c7baa9 9f92825349402f2a262725260c0c0a6a6667cfc4c0d8c4acdfc393d2ad67e2b36be8b671 cdb06cd2a564d19b5dd7aa69e3b96debbf74f2cda0ecd4c8e0cab2dec5afd9c0acd8bcae dbbdb3debfbae1bfbee1bfbed4cbbad7c9bcd5c1b6dfc0ace0b394be7f5ca45536ae543a b66e5697533e8543338a4c418c4e43874535904c37a55d45ad6847b36e4db36e4da86342 a25d3ca66140ac6746ad6847b26748b26748b26748b36849b4694ab4694ab56a4bb56a4b d09166dd9e73e5a67bdc9d72c98a5fbf8055c8895ed5966bcf794ad8875ae0956bdc9b73 d29c76d0a382d9b595e6c5a6e7b499eab498e4ab8ece8e72ae6c4c9c5333984c2a9e4f2e a85e37b66b44b66b44b56741b86a44b3633eb1613cbe6c47ad6b51a2624993573d8a523b 88543e8557408255407d533d814733804632844b378c55418c584384513c7f4f3b82523e 5d3627763f38773a3576433f6e3d3971393a7848484d2e2929191a514142433334332525 342a291c18150409030004002a110d3017133b21205a42407a62626f595b533d40483235 5b35344b29285f413f59413f706060756b6a28242111100e1c1e132d29263533383c373e 5e3a468c465099564e88604696624a915d477f503e603b33452c304530395e434a7a5658 a28a80a67f7a9462617e514e72544c6852476b4d457c534fab7b77674c4327271b223023 8354448d5e4e8859498758489465559364548f6050996a5ad5ab81b68866a7764db88246 bc7f49ad634aa35443ae5d42a65c51a65d4aad6243ba6d3fcc7e3eda8b3ce49539e79734 eca450c87c42b5663fc77948e09748eba545eea74deda35ad19c56cd9b6acb9573be8567 95755eb6b7b2eaf3f8bab4b4abae8f97896c8a65489b6244b07150a97351926e4a88704a 8d6d54846b559f9281c1bbafb8b4ab918d827d796d8a8776645f5c5d57572521220f0d10 7a7b7fe4e7ece3e8eeccd1d7efedee898788a4a2a39795968c8a8bd3d1d2a8a6a7413f40 5042395d4f466c5e5572645b72645b75675e8072698b7d74cac5b2c7c1b1a7a193696357 978e87bcb3ae63595763595795897bbeb499e4dcb8ede4c7e6dacce3d5d2e2d5cce0d5c1 d5d6d0ecded1ebd9c5e8bd86f1ac51ddbb7ec4cac8cac8d6d6b991e3c495eac88be8c279 eec170f6c46ff5bd68ebb15deab45aeeb564e8ac63dda254d79e44dda548e4ac57d9a157 bdb4abc0b8ab847d6b7a7360a9a2906960572a201e73686cd1d1c55855467e7564cabdac b9ac9c564c43221d192321220b0d08464543cec4c2d9c7b1dabf90d1ae6ec39854d2a061 ddce8de6b570d79759d6a668d9b060e1ae55f2cc9beadfe7e6d3c5decbbdd8c2b5d8c1b3 dfc5b8e5c9bbe5c8bae3c6b8e0c1a5e2c0a5ddb59cdeaf93daa183b977579f5435a55439 9b59498545397c3d368747458949477f40398040348f4d3d9f5e40aa694bb06f51a86749 9b5a3c9756389f5e40a76648ae6a43af6b44b16d46b36f48b5714ab8744db9754eba764f b56c41ba7146bd7449b97045b0673cae653ab46b40bb7247bd723bc77b47cf8351cd8052 c97a4fcc7d56dd8b66eb9876d38561d28362c97d5bba6e4ea75c3d9b51349a50359d5338 894b3292543b9959409c5a429e5a439d56409c553fa159437e53427d54427d5645815c4c 8361558062577a5e5375594e8a4b4686474083443b81453a834838864b398a513d8f5740 6b3930854b478549488951507745445e323158363439251e05010028201e3325253c292b 463838332e2b141b1409160d6551526854555543433f3130493e3c635957675f5c595450 422d2c3c2726493737423433493f3d4e4946322e2b2f2e2a35403847564f5267605d675e 855f56b86150c8704abb7f419a6a5c8b5c52714a455a3c3e50373d523b435e4348684849 745552976669a5686f8c585c6a4d4953453c4c39324c2f2b715154624a4a51423f3d302a 8253438c5d4d8859498758489465559465559061519a6b5bcba578aa7f5f9c6c48ae7b44 ba7e48ae674ba85b4bb3624d9f5951a25a4bab6144bb7142d18343e1903fe69335e5922c f2aa58cf8347b3643bbc6d42d4884adf954ae39855eda06ad9a05dce9a6bca926fbd8260 936f57afaeaae9edf6c0b7bccdc3a8a1876c895a409e5b41b56d54ae725695694e7e6044 945c4d79493d815c56bba6a1d4cdc7abaca48b8d80898a7cada8a5c1bbbb7e7a7b161417 3c3d41babdc2e1e6ecc8cdd3d4d6d59092917779786b6d6ca6a8a7b8bab94b4d4c272928 5c4e4563554c65574e5a4c434c3e354c3e355e504771635a88826a9a947e7c75624d4538 b7b0a6cec7c158504d5e565489856ab3b08fd6d3b0d7d3b6cbc5b5cec8bcddd8c5eae6cb cbc9cae1d3c8dfd0bddcb783e9a856d9b986c5cdcfcfcbd9d6ba8be3c58feac888e8c375 eec16ef6c46ff5bd68ebb15ff5b85befb25fe2a65dd59a4cdea54cebb354e4a850db9c4f cfc6bdcac2b57b7462736c59a8a18f665d54291f1d665b5fcfcfc35d5a4b807766c8bbaa cfc2b2574d441c17132e2c2d111810343432cdc5c2d7c6b2cfb78bceac6fb38a4cbc8e53 e2d398f3be7cdf9652e0a55fdfad56dcab4ff2d5a9e3eafce7dfdde0d7d2dacdc4dbcbbb e2ceb5e7ceafe7cba6e3c69eefad7df0ac7de29e6fda926ad38a69b66c4f9d563aa15c3d 81493a82483d844744854748804243783b3873392e723a2b884a3194563da06249a2644b 995b4293553c9a5c43a5674eb07241b27443b77948be804fc58756cc8e5dd19362d49665 b6724fae6a47a6623fa76340af6b48b4704db16d4aad6946b26e47b46c44b56a42ba673d bf663cc9683dd16c40d86f42c46841bb613cb15b38ac5c3baa5f42a45e4499573f904f39 7a46397743367c463a834d418751458a5448875145804a3e624c3f5d473c513d3245322b 402d2745332f51423f5c4d4a89484c8b4b4c8a4a4a88494489493f8b4c3d8c4d3c8c4b39 763938884e4c804847854f4f6b3f3e341d1720170e0f0b020f110c181713201615281a1a 413535524d4a484d4747544b7d818a8b9096999ea19296957479725b61555158484f5342 3b3233372e2f1d1717211b1b1f1b18211d1a413d3a41403b3a39372d36331b362d2b4232 745a49c9745fe48158d282439d645d7f4f4d5b373b472e344934394f3f40544444524441 835b5c945d639657608249506a48475f50494f453c382b234132375a3f447d50578c4f56 8051418b5c4c8758488758489566569566569162529b6c5cc09f74a57f68986b56a07048 a87244a7674bab6450b56d549f5d519f5c49a86143bb7142d48445e49141e79237e58d2b e19653cb7d4faa593ea25038b46443bc6d46be6d4fc97761dda163d19b6dc58965bb7f5a a07b60bab7b2e7eaf3c1b5bfe7c6b5b48676975a48a25646ad6050ab66579860537f4f43 874e47834b4a7244468167688d88847f85798e9283a0a18fafaaa4dad5d1c8c5c0585755 0a0c0b606465e6ebeefbffffacb0af7e8281818584868a89969a998c908f5a5e5d656968 9b937e968e798c846f7e76617169546b634e6e6651726a55756a549388729c9481948d7d dfd9cba09c912b281f44413a807d5a9b9877aca98aa19c86928c7c979181a7a28cb3af94 b8b3b7c5bbb2c0bdaec7b087d9a664cfb793c6cfd6ddd7e1d6bb86e3c68aeac982e8c373 eec16cf6c46ff5bd6aebb15ffaba58f3b35ce7a95ed49b4ee5ae54f5ba5cdf9f47d89544 d9d0c7cfc7ba716a586f6855aba492675e55291f1d594e52d0d0c4625f507e7564bfb2a1 ded1c15c52492a2521585657111b12282d29c4bfbbd0c1aec5ae84d0b077bd945cbb8d59 d0b580eab97fdc9b5becae63eab75eddb361f4dfb2e3e4dee6e6e6e3e0dbe0d6cadfcdb7 dfc5a4debc8fdab27ed7aa73e99056eb9256dd8448d07848ca7654b2664c9d5a3fa06344 7c4d3b8551468b514f86484b7a3c3f7339377642377b4c3a824a33844c358d553e975f48 986049955d469a624ba46c55c38b5cc38b5cc38b5cc38b5cc48c5dc48c5dc48c5dc48c5d b97960a8684f9959409c5c43ab6b52b3735aab6b529f5f46ac5f4fa65746a4533ea8573a b25f3db8653db76337b55f30c46843b65e3aa85533a45739a461469c5e47884f3b76422d 7645416b3b376e413c744943714a4376514975544b69483f4a4339433c32332c24201913 120a071108091b1213251c1f5d2c2f6c3a3b7a494584514d8e5750965d52925a4d8b5143 7842407c534d6c4540764848603d3b2017100c12060e0f070c0e090a0b0611100c181310 3d383563625e595b564c514b384c555b6f7682969792a29f94a49a95a1936b776329351f 4b50534a4f522f33344345443838361e1d1932312c1916112c16182014160a13120b1b18 4836329f5854cd7260d07d5da466597d4e485133353f2c304838385247414f4a44464742 643d40784e5294636999666d9a6b71b89697dac7c3e1d8d18e7a837d535d762e3c882c3b 7f50408a5b4b8657478758489566569566569263539c6d5db89f76a9897a99716f91674f 92623e986143a76950b57653a46755a16148a4603db56b3ccf7f40e48e43ea903ae88d30 d38940d1844cb56540a04f32ac5b3db16042ad5b43b46151d89a5fd0976cbc7d5abb7d56 bc9578d0cbc7e9e9f5bfb1bee0c2b7bd998bad7f6fae7967aa7565b07f71b1847e9e7772 a4837eb591919e7e818e7e7f6669623843336063509d9781aca89cdad6cbcbc9bd9d9d95 41443d1116106c726ebdc3bf8e94924b514f787e7cb5bbb9a3a9a79ca2a0919795616765 7572618582719693829c998893907f8784737e7b6a7c7968aa9b88b8ab9ac6bba99f9886 8c86763f3d2e4546388c8e80afaa8ab2ac92aaa391999186938a819a92879c958598917e b1a7afb1a8a3a7aea7b2ac92c7a371bcad96bac6d2e1d8ddd6bc81e3c786eaca81e8c372 eec26bf6c46df5bd6aebb060f7b34ef7b55ef5b76cdda457e8b059f2b759d49337d7913c d5ccc3ccc4b7696250716a57b5ae9c6e655c2c2220504549d6d6ca6764557a7160b1a493 e5d8c8665c534a4541969495131f13292e28bbb8b3c7baa7bba57cd3b47ed2aa76be9263 cb9b6de1b78dcba16fe2b16ce3b568d1b36df7daa0efcc94e9e7daeae3d3e9d9c2e3c4a8 d9ab87cd9266c4804fc07744c67341d17a44cb723ac26a3cc26d50b164509c5c419c6441 7f56407c4d3d804942834746793d3c763f38956656bc937da26e59925e498a56418f5b46 935f4a905c47915d4898644fb67e5db47c5bb07857ac7453a76f4ea36b4a9f67469d6544 a365509658438b4d388e503b9d5f4aa76954a567529e604b9e554f9c534c9a51489d5547 a25b49a25b459e583e9a5538b76444ae5d3fa2573a9a573c945a428a57427b4e396f4632 7543446a3c3c714846704f4a5e433c5e4c4267594e5e544855574c53544c5051494f4e4a 4c4b494541423833372f2a2e5f4d436a564d6f594e6d52476d4f447151446c4a3e623e30 60433b584c3e402f255633315234321d1910191f15322a2711100c0a0c07161b15151c15 3b403a696b664a494527221f06151a0e1d200c1c1c04131026322e6e7b7279857b3f4b3f 5264684d5c5f5462635c66653d41401f201b17161109060012110c221819241e22242424 4c3430955249c9715bd98360a670567e5748523d3843383651443c5b4e45514c46424443 392524584946897574976e7481414f884654a77a81b39e9de6b6c4d794a59f4b5a762029 7f50408a5b4b8657478758489566569667579263539d6e5eb5a278af928a9d7a81886056 81553a8d5a3fa26b4db37951aa705aa3664aa05f39ae6634ca7a3de28c43ec913eed8f37 d18b29df9745ca7e40b26531bb6d3cc07242b86940bb6b48d09259cc9167b47651bd7e55 d1aa8be4ddd7eaeaf6beb0bfd6cfbdc4bca9c5b9a1c4b49bb9a791c7b6a4d8cabfd0c3bb e4e0d7eadfdbdccecee6e2e1a9b5ab3443302f311b675b43999685c7c4b5949283bebeb2 bbbeb3151b110003005960586a706e717775a8aeacdde3e1b6bcba767c7a515755262c2a 2a282b3533363a383b2f2d302826293d3b3e716f729d9b9ec9b7a9c7b7a8cabeae7d7463 2e2b18060500676a57a7ac98928b6f857c6b70655f665a5a7266668176727a7067675d53 b5aab2aca4a29baaa7a7ab9ab9a17ba9a194acbac7dacfd3d6bc7fe3c885eaca7fe8c470 eec26bf6c46df5bd6aebb060f2aa44fcb960ffc479e6ad60e8b059ebb150cc8a2cda9039 cdc4bbc5bdb0655e4c746d5abdb6a4756c632e24224b4044dcdcd06b6859766d5ca79a89 e7daca6f655c645f5bc6c4c51a261a2f362fb9b6b1c2b7a5b3a078d2b480deb683bf9366 d6976ce1c0a1bba782ceac6fcfaa66c1a96df6d089fbb754eae4caf0e2c7f0d5b8e6b99a d29370bf6f4ab3552fae4b22a85f3cb76d40ba6a39b9663ebe6c56b064549c5c419a633c 7e593f6b402d6e3a2f7f44407d423e7e4a3fb68b78ffdcc2cd9886ad7866925d4b8d5846 8e594789544285503e885341824936834a37854c39874e3b89503d8c53408d54418e5542 91503c8c4b378a49358f4e3a9c5b47a96854b06f5bb3725e8f5b50935d53975e55985c54 96554f934e49924845914643a156399f553a99563c91553d87533d81543f7d57447c5945 734144703f427a5252715450513e374d43395f5d505f61535d6357595f555e615a6d6f6a 7d7d7b7d7b7c6a686b5853597378647275626463514b4837423b29493d2d4d3d2e463627 37312127331d0b0e002a110d391d1c130e08201f1a4a36373a3633292a25263028132017 3542396d726c4d48451f1313171916000100000100000202000302151d1f4f595a6e7879 425a5e2d42454d5f61354141060c0a050702100f0a38352e0022160b17151c1316372a22 7a5743c87e5beb8a55e786419b6f4873573f4c3f36413a3450423958483b4a3f39353334 30302846504580837aa48487974a5c83273e6f2b3a532c317b2c42be697eb15d6a6f2d2e 915e4b94614e8c59468d5a47966350905d4a95624faf7c69b2987fae9189896c66927764 8c71604f31334e2f34795d51845a4c8051418c533fa66748bb7750c3814dce9053dda260 dea850f8c667facf67eec961e7c56bd6b774c6a67dc7a78ecdc0bacbcec3e8eee4e4dcd9 d6d1cef7ffffcfdbd7bebcbfdbd8cfd8d5ccedeae3efece7d6d2cfd9d5d6dcd8d9bcb7bb b0b0b0d9d9d9dededecacacabcbcbc8e8e8e4a4a4a272727555553b0b1ac6e70656e7063 d6d7cfd2d2d46f6d7a000012121615282c2b787c7b6f73723d4140353938252928272b2a 201a262b252f2b262d201b21211c20332f303e3a393d393630281b4840350d06000c0800 49463f72736d82837e4e504d141e27393a3c4f453c614f41a29082aca2997c7d7f8e98a1 eceee36666684846538a898fd7d8d2939588bdbdbbc9c7d2c5a981e9c98eecc578e0b15f e9b56ae8b66ddfb261e3bc61eba752e1a857e8b96beebe6ce7ad59eaab5becb16ddfab72 cfc7bcbfb7ac756d62888075b3aba0625a4f2c2419534b40a9a1948c7f6f958470ac967f cab49d968571675a4ad1c9bc8176641410049c907ac7a67dbc976ac2aa84bda780bc9c6b dcba7becbd89bf8659cc976bbf9a63e1c37bf3cf7befc066eac89af4cb9fe9b48cc2845f a25e3b9c5634a15d3aa15f3c9254479553439c5541a75b44ad614aa8614d9d5b4b925447 5b393762403e6947456b49476b4947714f4d7e5c5a8a68667c53417d52428a5a4e8c5a53 81514d7d56516b4c49462d28543c3c573f3f5c44446048486149496149495f47475d4545 6c443a7c514a6e3e3a764340864d469d6252e0a28bffcaaba06239945c3989553d845545 87585089565287504d844945793b3e77373877373880424388534f825c516e57475e4f3c 745b54745b54684f48553c354f362f573e375a413a543b344e4e4e383838272727313131 4e4e4e5f5f5f5656564545454c49443e39353228262e22222f26292f2f312f393b2e4042 435259373f414c524e737f7d75848b5e6369715e5aa2786c8e6e63836f6e3a333b040309 31292763504a62464556363b111c0e0914061824180d190f0a170e030f0b000b07404f4c 4949495353536969692121210606062929290c0c0c080808271d28322f3a3e445049525b 54575c655a587659517f574b6e352a8b5b4f8260544f3c2e33291f423932483e3c382c2c 2c262a2325245157558781818a656c88415196394e9c344d80394b9c5769924f606d2f3e 915e4b94614e8c59468d5a47966350905d4a95624faf7c69aa9a80ab998f7360595f4f38 5c4b373d29284430315f4d3f63443f67423a73493d8655448e5c438e603e976d45a58053 a7834fd2b17be4c78ddbc58bdcca98dacbaad4c7b7d9ccc6f2e5dfc7cabfc7cdc3b4aca9 a19c99e5f1edcbd7d3b4b2b5c8c5bc89867d7f7c7597948f8b8784807c7d878384837e82 6d6d6d7a7a7a8c8c8c8a8a8a7171715555553f3f3f2e2e2e29292973746f4f504863655a abaca6aaaaac9f9da88d8a9b575b5a3a3e3d2c302f2a2e2d111514070b0a1519180c100f 070304120e0d1a161519161119160f1e1b12222014222014231b0e433b306d665c747067 a19e97a7a8a24b4c471e201d363f465455576f665f776557917f71908780969799e4edf4 bdbeb64c4c4e3d3b466f6e74aeafa98e9085cfcfcdd3d2dab8a17fd7bb89eac881eec375 f0be75ecbb6fe9bb66efc466eba955e2a958eab96cf1be6de8af58e9ac5bebb26fdcaa75 cdc5babdb5aa5850456e665bbcb4a9665e53221a0f635b50d4ccbf6a5d4d705f4bbea891 e5cfb8a1907c726555dcd4c7887d6b312d21a69a84d4b38ac8a376c7af89c5af88b69665 b58e55cd9b66b47a48c7925cb99050d0ae64eac171f5c26ff6cfa4e7ba91c9916ca86845 955331975533a0603da2664293594b93564496543e9c563da15b429f5d47975a48905648 5f403d5738354f302d50312e5839365e3f3c5c3d3a5839366e4a3c80584c986c63916059 70413b643b35644239553830523a3a533b3b563e3e5840405941415a42425a4242594141 5f3e376e4845653c3a6c3c3a733c37905545d4967dedac8c995a3b8d5439814d387b493e 7d4c45814c4682494280443c8050448452498e57509a635c9f6d66956a617e5e536c5245 68514b68514b5e47415039334c352f5039334d3630432c262a2a2a1d1d1d111111111111 1a1a1a1f1f1f1b1b1b14141411271b1f2f25232c2720211c27221e39322a423a2f3d3626 1f282f3339393c423e47535377868bb2b7bdbfadaba57f74a75a50966360553f4231262a 4e403f5644402f24221816170e10050f12070b0e05000200060d061218160e1714283130 4444443535354242421010100000001b1b1b1111112c2c2c281c2825222d282c372f363e 3a3b3f4a3f3d64494279544bba7d6baf7c6b835f4f48352731282139383636353a212028 221d2154585953535154373b813d48882d3c79262e9551526c384484505c75434e52232d 915e4b94614e8c59468d5a47966350905d4a95624faf7c69a89e83aa9d945e51482e230d 3227133022213e3030483c2e4c36385238395d3f3f65464361443e5840365848385f5442 684d3aab9180cdb8a7c7b5a9cabbb4d5cac6d8cfd0dbd5d7c2b5af76796e797f7578706d 6b6663d0dcd8e4f0ecd9d7dae0ddd4726f663d3a334946413a3633231f201e1a1b161115 2020201717173e3e3e5a5a5a3d3d3d2727272b2b2b262626111111323230252620494a44 61625d5151538d8c92ceccd78589886064635357568e9291858988636766767a79545857 7d7a737b78717c79707d7b6f7876697574627e7d6b8a8975aba396c4bcb1d2cbc1949087 9a9790bbbcb66f706b3537345a6167181818534a4380726767594e958c85d9d9d9ecf3f9 5a5b552c2c2e2221274b4a4fb2b3aea5a6a0bfbfbdc8c7cda19177b6a379e0c68dfad591 f0c17be2b164e5b35ce7b755ecab59e4ab5cecbb6cf3c06de9b059e8ad5de6b374d6ac7a cec6bbcac2b75d554a625a4fb4aca16a6257251d125e564bcfc7ba7669598a7965c0aa93 ceb8a1a897838c7f6fd9d1c49e93815f5b4faca08ad9b88fcaa578c8b08ad9c39cc0a06f c59565cd9864be864bd6a15dcda156d3aa5bdcac62e3ad67e4b692c4926fa36848935435 945335975a3b9b6342a06b4991604f8e5a458c523a8f523693563a9258408d5944875645 5d403c573a36533632583b37624541694c48674a46624541664b4275544b89625b80534d 61322a5c31286c46396d4c3d4d38374c37364b36354b36354b36354d38374f3a39503b3a 4c34325a3e3d5e3c3d643a3b61302b8d5345d4967dd495749b5c4b915648854f457f4c48 814e4a89524b8f5349935347926e5499745aa47966aa7c6da476698f645b764f48634138 523f39523f394e3b354a37314c39334d3a3443302a35221c181818181818171717121212 0f0f0f1111111717171d1d1d1416112524222e2329341d255530378654539b5f54935240 323337292b2a1d23211d2b2b38474c6a6f73a39594caaca4d47967ad75685d4a43413330 6547456a424243292a342b2e382d27382f282019131d1814201c19121011191919232426 4b4b4b2424242424240808080000000909090d0d0d3f3f3f31252f27212b27272f2f343a 38383a40363559413d74534cc88f7bab7b677854444632273028252d2e33262a36171e2e 3b393c3434343b2c2f66333c953e4ea14552a86c6bc2a899baa4a7c6b0b3b1989c8f767a 915e4b94614e8c59468d5a47966350905d4a95624faf7c69afa089a99891564440281906 35251539262844313548372d543c3a543a39583b3d5a3d41533d4046373a373332303231 5a3e33ad908cd9bfc2d1b8becfbac1d9c7c7d8c9c2d4c8bc6b5e582b2e23666c62a19996 97928fd6e2decad6d2b0aeb1ccc9c07f7c736b68618d8a85979390928e8f817d7e5f5a5e 2525250c0c0c1e1e1e3e3e3e3232321b1b1b1f1f1f2b2b2b171717151515171715484846 484846363636848486d3d2d7797d7c464a49717574b8bcbb7d81804a4e4d636766464a49 56524f49454243403b49463f514e4562605389877ab0ae9fbab2a5c4bcb1a69f9577736a 615e577f807a7e7f7a2123203034370c0c0c3f38324c3f3662554ce2dbd5ebebeb65696c 2626241616163737395c5c5e8e8e8c858583a3a3a3a9a9ab74695585775ac4ae85f4d59e eac380dfb060e6b358e6b04eedaf5ce6ad5eefbc6df4c06ce9b059e8af62e3b67bd2b186 ddd5cadad2c79b9388928a7f9f978c696156443c31494136bfb7aa938676a2917dac967f baa48dae9d89918474c9c1b4b5aa98888478a79b85d1b087c39e71c8b08aecd6afd7b786 e2ad85d09b69c5904ee1ae5fecbc6ae3b367cf9c5bc48d55b481629c65478d513597573c 9d6044975e419261419467468b604d895b4488553a8b54368e57398c593e84563f7d523f 4125214c302c583c385c403c5b3f3b5c403c624642694d4967544d5e454064413b653c36 60342b6f43367d5645795441493534473332432f2e412d2c402c2b422e2d453130473332 3a2a2b493537573c41633f435b2f2c8d594cd49c83c287678c534c854e4b7c4a49794949 7f4e4a8b544d985d4fa2614f95714f987152977257916b56845f4f714f4560423a573a34 473833493a354d3e3956474260514c61524d564742493a352f2f2f2d2d2d2727271c1c1c 1010100a0a0a0c0c0c1111113125252b1c2124101c2f13225c33418c5a5da3645d9e5b4b 453d3b24201f181e1e202e2f101e21070c0f49413fa7948ec7917794826c3a4534252118 6f3d3e9f4d5390434b82495068504e5e49464a3635665656524646180f122d292a4c474b 2d2d2d0b0b0b1010100707070202020808080d0d0d39393930242e2821292d2c323d3e42 403e3f3e34334b37366147467651416b493d5a3f3645342d342a2b29272c282832292d39 2a2a2c493d415e3a46682d3f9d586be9b3c1f1d8dbbab9b4dee3dfe2e4e1c4c4c2a6a2a1 915e4b94614e8c59468d5a47966350905d4a95624faf7c69baa08fa78b8a593c3e43281d 573c334c2e364e303a583c38654337613c335a352f593634593b3d4c393b332a2d1b1b1d 705f58c1afafeedde5e5d3dfe2d2dce4d9d7d9d2c2cec8b090837d2e3126444a40857d7a 97928fdae6e2cedad6cbc9ccd9d6cd79766d4d4a435f5c576d6966878384a9a5a6b0abaf 6464644242421d1d1d2020203030302323231e1e1e3939391f1f1f1313131b1b1d3c3c3e 2e2e302a2a2a6f6f6da5a6a17c807f2f3332868a89afb3b23f43421519182a2e2d1c201f 3732392722291b161c1c171b2420213d3938716d6aa29f9abcb4a79b93889c958b949087 6d6a637a7b757677721a1c191919192a2927241f1b332a239a918ac3beba6d6c6a3c3c3c 3535371616166565637f7f7d4c4c4e4c4c4e929292929290584f40675a49a69579e1ca9e eaca8decc374f5c66cf3be60efb262e6af5fedba69f2bc66e7ae57e6b066e5bd89d4bc98 bfb7ac989085a8a095cac2b78a82775951466f675c574f44bab2a5928575877662927c65 c7b19abaa995847767c2baadb4a997979387a29680d2b188c7a275cbb38de9d3acddbd8c d69e7bc28e5eca9a58ddb05ff1c071dfad66c18e57b17e519162488a563e8e543e9c5e49 9b5e498c563e85563c8c6045835d488659428a583d915c3d945f408e5c4182553e754f3a 482f2a5239345d443f5f464159403b553c37583f3a5d443f5e4d45513a34593b336b443d 70453c7851427c57456e4e394536334233303e2f2c3b2c29392a273a2b283d2e2b3e2f2c 33272742323550393f5a3d424e2a2a774c43b2857096674d6737376236375e35395f363a 673e3c7847408b5346985b49915f44885a407951386747325a402f53403254433955473e 3a2f2b3e332f4a3f3b5c514d695e5a6b605c655a566055513535352e2e2e262626212121 212121212121212121212121262b251519181011151e1d232e292f342c2a3a3227443b2a 38251f302826292e312735382634352a303034302f3d353388766068736219392a0c110a 6f3937be5c5db04c4e903a3b684647604242553b3c7a646667575834292d4a44465d585c 0101010000000f0f0f1515151c1c1c2626262525253838382c202a291f272d282c363435 3733323329283b2c2f4b383c342927332826362a2a3e2f323c3032372c30372e333c3338 2729266c555d724154714056b090a5beb6c59695a58d8a9b3e4e4b3c4b48202c2a091211 915e4b94614e8c59468d5a47966350905d4a95624faf7c69bc9f91a0808357373c51332b 6748434e2c3a4b29375d3e3c6a412d673c2b6133265d302a5e36345739393e2a2b231717 838486c1c2c7e2e2eedddceadcdfe8dce0e1ccd2c8c1c8b69f928c3e4136242a20534b48 96918eebf7f3cedad6d4d2d5d1cec566635a3d3a3356534e4844412f2b2c3b37384e494d 6a6a6a5959592b2b2b1e1e1e3b3b3b3636361f1f1f2626261b1b1b1717191e1d23222127 13121727272561625c8183788f93923a3e3d7f8382868a892529281c201f2e32312a2e2d 403a4638323e2e2832231e251f1a20302c2d5854557e7a79aea6998c8479b8b1a76e6a61 49463f8a8b855859541618151e1d1b1f1b18130e0a201b175e595557524e332f2c6c6b69 25242a2d2d2b52534d6667627c7b808483898080828b8c8683766685776aa39383cab797 dfc990eece81f2ca73edbe66f0b565e6af5feab663eeb65fe5a953e6b269e7c796dac9ad 6b6358221a0f5e564bbdb5aa756d62494136857d727c746980786b978a7a9a89757f6952 af9982bdac98918474c7bfb2a29785908c80a0947eddbc93d3ae81cdb58fcbb58ecbab7a cd9878c89c6fe9c385e4be77e6b975c9975cab7c4e9f754f8d634b90624b94604b965b49 8c513f824936824f3a895b44845b47885a438e5c419760429b64469462478759427a513d 5e4741563f39503933543d375e4741664f49664f49634c464f3b30553d3175544b876259 7852476c483c694c3c644a394638354436334133303e302d3b2d2a3a2c293a2c293a2c29 382a274434344833384d343a3f22264e2f2c7353485a39284c2d2a4d2f2f4f3234513438 58383969403c7c4c428a55479c64578756486742324c32233e2c1e3c302441372d453c33 1b16121f1a162c27233d383445403c433e3a443f3b4944402020201919191414141b1b1b 2929293535353939393737371f161925191b432f316248495c42413f2a253b2d244d4339 62443c493b3a24292d0f1e231a282831373533353426272b837069707770253e3b0a100e 5b3832a660589c5247803d34633c3f5e3c3d523436513b3d49393a453c3f4844452d2d2f 1b1b1b2424243434343838383c3c3c4141413a3a3a3737373b2f393c31373a3132322d29 312926352c2d40353b493c45333b3e2c2f3429262d352a3242333a45363b3d3133372b2d 373c383423295e38479e778a9d86985951603d34455d4b612d3b3b2b3939162222091314 915e4b94614e8c59468d5a47966350905d4a95624faf7c69b19b8d967d804930343e271f 503933391f2a391f2c4c3432633d286e45337244376a3b356236335e3a3a523839432f30 a6aaadc8cbd0d2d5dad0d1d6d8d9dbd7d9d6c9cac4c1c3b8dbcec89c9f94454b41241c19 706b68d9e5e1bfcbc7dcdaddaaa79e43403717140d2926211b17140d090a211d1e312c30 3535353d3d3d3434342b2b2b3d3d3d4a4a4a3333331212121b1b1b1a191e1e1d25201e29 2e2d335b5b5983857a8d907f737776464a493e42412a2e2d000201000302131716161a19 201b21292428292526201c1b211d1a3d3a3566635c8380797971648b8378bfb8ae767269 6d6a638e8f893c3d3832343115110e1c18152824210300000b07045d59566e6a674e4a47 0f0e163434323c3e334f504aafaeb4dad8e398989a898a82b9a793af9d919b8a80a19178 c7b682e5cd83e6c574e1bb6af3b86ae6ae61e6b25fe9b057e1a54fe5b56deccfa3e2d8bf 7a72673d352a4d453a938b80776f64574f447870656d655a585043968979bcab9787715a 8e7861b2a18da29585cac2b59c917f8581759c907adebd94d2ad80c6ae88a69069b99968 c39271c6a175f0d59ed7b97dd8ad78b8885a976d4782613e83634c8d6752936452905849 8c51438d52448e56478c57478e5f4d8f5c47935b429a5e429f63479d654c93604b8a5b49 66524b55413a412d263b272045312a57433c65514a6b5750543a295f44338260548d695d 704b425c3e34685245746256463b37463b37453a364338344035313c312d392e2a372c28 422f284e3b3745313348333a3d282f34202243302c35221b402f2743342f463837483839 4c363858393769403c76473f89595972494752352f3d2a23362920382b23392a23392821 161511181713201f1b28272322211d1918141d1c182b2a262121211a1a1a1212120f0f0f 10101013131313131312121225111d401d24733b3aa85e55b9675bab5c4f9d574b9d5f54 946e635644421c21270c1b200f1b1b121814222828363f46957e788680803f4a4e101a1b 3a2f29684a3f6f4b3f725047724b4e5f3d3e583b3d3723242519193b3738343635191d1e 4545454242423c3c3c3737373131312b2b2b2828282525253c303a4a3f454a403f39302b 342b263f3637423b433d3542303c3c34393c36333a382c363d303a403440372f3a29262f 2327281a1c1b62595a94737a78404b80424d84575a4529253d40453e41462d303523282c 915e4b94614e8c59468d5a47966350905d4a95624faf7c69a599898d7f7e3a2c2c211509 2f221921111b2616213628255f3e2d785245855a51784b48673d3e64404265484a614b4d dbd1cfe7ded9e0d5cfdbcec6e7dad1e6d8cddac9bfd8c7bddccfc9e7eadf9da399524a47 8e8986e7f3efb6c2bed1cfd29d9a915350472a272024211c191512221e1f2b2728171216 1212122020202f2f2f2626262626265050505555551f1f1f2c2c2e1f1e2314121d171522 3a39416868666d6f625b5e494a4e4d7478775e6261636766666a69434746383c3b252928 3a37324946414a47403b382f3e3c2f63615291907eacab998f877a9f978ca29b91b5b1a8 b9b6af4a4b450203005355523e3935130e0a15110e31302e68676585817e55504c2e2925 28263121211f5f615470716b76757bd7d5e2d9d9db888a7fc4ab97b29c91817069776853 b0a472e3d189ecd182edcb81f3ba6de6ae61e5ae5ce5ac53dea24ce6b570eed6aae6e0ca f0e8ddd4ccc19890858b83788d857a766e635d554a362e237d75687a6d5da69581a18b74 9e8871a59480998c7cc7bfb2a49987827e72978b75d5b48bc6a174bba37d8e7851b49463 9f73509e7f53cbb583b19764c6a073b486628c6846694f2e70563f81614c8d6251905b4d 985c51a4665b9e62578e5448956453935f4a945a42995c409f6246a0664e9b6752966554 927f7883706969564f4d3a33412e274d3a336b58518370696549336546327854447b574b 603e355a3f3677645d8e8178483d39493e3a4a3f3b493e3a453a364035313a2f2b362b27 49332655413a4731334c393f47364031222935292b3127252f271a332c2236312d372e2f 3828293f25264c2927572e2a542f36432429321d1c30211e3b2e284735314a322e482a28 3736343635333938363837352726241615131c1b192e2d2b3232323131312b2b2b232323 1b1b1b1818181b1b1b1f1f1f1a21293121215c2c1e9a4a2fce6d4cdd7a5ac56a4da65238 865c5054413d2b30362c3b423c4846393f3b232b2d101f268371637b7672434b4e182325 23282231281f3c2b245b45477955575634355f42443e2c2c201615302e2f333736404948 3b3b3b2626261010100b0b0b050505000000050505101010241721403438493e3c382d27 3128233b3233322c361d1725121912313330484246433741362a383029392a2b3d232a3c 2d3035283d384a5c50856361b2535bcb565cb4665981603f544d5555505743404736373c 915e4b8c59468e5b4894614e94614e905d4a9a6754aa7764b59f918c7c6f2d261e070904 0b110f02070312130d201d142f271c2b1811563734683e3f7c52536344413e2b24938b80 cfcbc8d3cfccd7d3d0d8d4d1dad6d3dbd7d4d8d4d1d5d1cececfc9cacac8dfdde0d3ced4 e0d7dce1d7d6b4aaa1bdb4a5958e7bc2bba9c6c0b279756a211e190e0e0c1e1e200f1014 171f34141b2e02081802061136393e36383754555027282232373b262b2f1c21251e2327 21262a1c2125151a1e12171b2222225050507c7c7c878787838383848484888888888888 a0a295abada0a7a99c8f91847e80737d7f727f81747b7d708b8c8e5c5d5f3f40426c6d6f 2d2e3013141618191b5d5e604a47363634254a4b3d6063583f463e19231b14201c182420 2019135f5e5cc6cbcf626b724c555cdfe4e8d3d2d0928b85a0978ea5a08dded6cb88797e 7a6459e7c993e7c170f1c36edeb679e9c17cdcac5aeab251edb554dcbc71e2e8c4cdf2eb f4eed8dfd8c5c2bba9746c5f8d847ba39a95887e7d6b61606d6757979181857e6eaca394 9d91839181749f8f82d4c2b6aea898736646a08a5bd4b485b895759075607d6d5d87826f 6744427351458a6b4fa68864b49672a5866a805e52603d3b7159596d5555674f4f634b4b 614949644c4c6850506b53538f624f9063509063508f624f8b5e4b8457447e513e7a4d3a 79625a6750484f3830422b234b342c6a534b8f7870a9928a533b3b533b3b523a3a513939 5139395038384f37374f37373c2827402c2b44302f44302f432f2e4632314e3a39554140 553d3d5139394c34344830304830304c3434513939553d3d4b39374b39374b39374b3937 4b39374b39374b39374b393746383543353240322f3e302d3e302d40322f433532463835 362b33291e241c10141c0e0e2618173224233a2c2b3a2f2d3b312823140f331f1e4b3738 3926281f1514201f1b252a232f25243f261f7b4334b75f4bd0614ecd6353b66b58956551 825e606e4e4f5c403f5541405449474746442b312f121c1b3e3b3473706b4c4d48272928 2e34342a34362631351a272d30242680655a694f3e1c231c0831435e8dab5a7485232726 1a181d2a292e2f2e33202427181d201e26281f292a1822230511272824324c353d51363f 3b283b26213726273b31304049454441403c3c3f36363c32282d271b1f22242230363147 432a402033373c4043784f57956163a65f5bb3564fb461516a5f63574c50473e43413a41 915e4b8c59468e5b4895624f94614e915e4b9b6855ab7865b49e9087776a2c231c0f0e0a 191b1a14141224211c2d29203d352a35241c5639355e38376b4544573a363d2c24928a7f cfcbc8cfcbc8d4d0cddbd7d4dcd8d5d6d2cfd2cecbd3cfccaeaba2c5c2bdafa9a9cac1c4 dfd3d3c5b8b2bbada09e9180a59e8b97907e918b7d7a766b43403b1b1b19151517101115 000911111c22232e30192123282e2c22272151544b595c531a1f2332373b2f34380f1418 060b0f1a1f231f24280f14181f1f1f3b3b3b5757576161616262626161615757574b4b4b 67676561615f5454524c4c4a5252505c5c5a5656544747454b4b4b0c0c0c1d1d1d161616 0707071a1a1a4b4b4b7e7e7e24231f0d0c081415102a2a282729281e2221161a1b030909 58534f8e8d8bc7cbce92999f82898fb6babd8f8e8c4f4a468f867da7a28fcfc7bc96878a 867063dcbe88f4ce7de3b462dfbb89e9c287dcad65e8af56e9ae54dab26ce0d8b3cee2d6 e0d9c6d4cdbbc3bcac827a6d9c938ab2a9a4a89e9c9c92916d6757938d7d867f6fa49b8c 9286788c7c6fa39386d8c6bac2bcae9d8f72af986cb39468a584659d827177675857523f 755351826257987a62af9270b89b79aa8c748c6c6173514f674f4f624a4a5b4343553d3d 513939513939533b3b553d3d6c45366c45366d46376e47386d46376c45366a4334694233 6049435a433d533c364f3832513a3459423c624b4569524c523a3a523a3a513939503838 4f37374f37374e36364e36364d3b394f3d3b4f3d3b4a383643312f412f2d4533314a3836 533b3b5139394d35354b33334b33334d3535513939533b3b4a38364a38364a38364a3836 4a38364a38364a38364a383640322f41333042343142343142343142343141333040322f 291f20342a2b3f33333f3333362c2b2f25242e24233026252e2f2a3a36333e34332d1f1f 3b2d2c746a686f6c6522231b4721205f3328af755dd4815fbd5532d76c4cd67b5c883d1e 6f4f526e5153634a4d4838392a212218181a191e21202a2b2f2a2e6a6569504e53322f36 33323826262e1c1c240c0f18614b4d5e4d453b31253539383a56646c95a758707a2d2b2e 26242927262b2d2c312f33362d3235262e30252f302832331b25262d2a253e2d263e2c2c 3329312e2c3738363b463e3c2f4240272f32241e2a331e2d4a2e3a593d3c533c2e47351d 392c35203e36323e3a503c3e544d4762584f7357536d5b516b61605b515250474a504a4e 915e4b8c59468e5b4895624f95624f925f4c9c6956ad7a67b3a0918272652d221c1e1916 2e2a2b2b25253a322f3e352e4a42373f3128543b3653312f58363449302b3d2f268e867b d3cfcccdc9c6d1cdcadedad7dedad7d2cecbcecac7d5d1cec2c0b4d5d2cbbab5b2d1c8c9 e1d7d6c2b5adaca092887b68867f6c77705e807a6c716d623e3b361b1b1921212335363a 1b2c2615261e2a393216231c151f170d140d1f241d373c352c30333b3f4244484b373b3e 1f2326111518121619181c1f0d0d0d1515151f1f1f2b2b2b3b3b3b4444443b3b3b2c2c2c 1413181d1c2126252a26252a201f2418171c100f140a090e2d2d2d1a1a1a454545343434 4444444040407979795c5c5c0b0c1123242939383d2c2a2f110f12211d1e474143595353 8b86828a8683c0c1c3e9edf0a8acaf5152543d3936423d39857c77aba693c5bdb0a79999 957f71d5b781ffd988d5a656ddbb8be5be85ddae6ae4ab5ae4aa58ddb06fe2cfa5dbdac5 d7d0becbc4b4bab2a580786d8d847b8e857e8c837e8c837e6b65558781718881719d9485 877b6d857568a7978adbc9bddbd3c6cbc0a4c8b58ba68a6394765c947d6f6b5d504f4838 7c5e5c886c61997e69a58c6ea38a6c9479647e62576e504e564140513c3b4a3534432e2d 3e29283c27263c27263d2827482b23492c244a2d254c2f274f322a53362e563931593c34 4b36314e3934523d38543f3a523d384b3631432e293e29244e39384d38374d38374c3736 4b36354a35344a35344934334836344b39374d3b394b39374735334836344e3c3a544240 4e39384e39384d38374d38374d38374d38374e39384e3938493735493735493735493735 4937354937354937354937353c2e2b3f312e4335324638354638354335323f312e3c2e2b 2f241e372c263c3431383330312b2b2e2828322d2a3a323027302b080d072723207e7673 beb4b2d1c9c6d1d0ccd3d3d1a3879d462f3750352c986b569c593f99472fb76750c17963 826569745b5f56454b31282d1411180d10151c24272d37392c242f5f5762493f4831272f 4439415f5259897c839e8f94805c5c332e280f1a16393941242c372846483e5253655b63 3e3c412b2a2f2524292c30332d323520282a202a2b2d37381e3a452532382a292e2a242e 2825362a2b3d36333e41363a4c3b4d3f24295e3023a2664acb8b68b97d61946553835d54 2a1b182030233d3a33614b4d4b5a532f50473246452c444654504f494544413f40464648 905d4a8c59468e5b4896635096635094614e9e6b58af7c69b8a797807063342520322424 4334393f30334b3d3c493a354c423846352d543b364f322e4f322e472e2946352d867c72 dbd7d4d0ccc9d1cdcadedad7dfdbd8d2cecbd2cecbdedad7dfe9e1dae0dee9edf0d8d9de dcdbe0d3d2d09a978e9c998a9a9380aaa391b5afa1726e632c292427272530303235363a 2e3f37101f181d2c25131e1a161f1c1a20200f1314282c2d26272946474960616358595b 333436121315090a0c0d0e101b1b1b1515151010101414141f1f1f2525251e1e1e141414 2323232525252b2b2b3232323232322f2f2f3434343d3d3d3c383568646175716e908c89 a7a3a0817d7a9b979454504d77787c696a6c4d4b4c3b37345a534d958b82ac9f969b8d82 7a75717b7774d6d5d3f6f5f37b7a78171614302c2965605c8f8583afa999cdc5b8afa19e 9f8a77dcbf87fcd586d5a559e3bb76e5b972e2af60e1a857dda95edab476dfcca2e1dcbe d3cbbebdb5a8a39b90766e637e766b675e55635a51665d54686252736d5d8982729d9485 85796b817164a8988bd8c6bae6ded3e3d7bfdeceaaaf9878876e5a715d545d4f46706959 6b524e755c557f6858806a53755f48654e3e573e37503733483433453130412d2c3c2827 3925243824233925243a26253c28273b27263a26253b27263f2b2a44302f4a36354e3a39 4b38344b38344b38344c39354b38344a3733493632483531493534493534493534483433 4733324632314632314531303d2f2c4133304436334335324133304335324a3c3950423f 4a36354b37364c38374d39384d39384c38374b37364a3635483634483634483634483634 4836344836344836344836343d2f2c3f312e4133304335324335324133303f312e3d2f2c 463a2c392f2529241e26221f292826322c2c342a283325223e3c2f847e70b3ab9ebcb2a6 b2aba39a9997b2b7bbf6ffffd7d4e7d8d7dfbfb6afaa8b79ac6f5ca655409d4833a5553e 7c64645b474934272e211e272629322e323b2a2d322024273b2f316254546151518b7874 cfbab5e5ccc5ceb3aa9b7f7486545326262420383c737280463f461a2a202e3a366f626c 5d5b604443482e2d3225292c24292c20282a242e2f2d37381529412e2c394f3133623532 673639683b3e6e404075443f943d34b55d45de8357e78e54c87740a4633da26e60b3898b 9f827a7f7866805952995e606b5e5538443a4e4c5162616942454a393e4230373d323b42 905d4a8b58458e5b4896635097645195624fa06d5ab17e6bbaad9c7f6f623d2a26452f32 513a42483137543e404c39354c38314a332d583b37563734563734523531533c367d6962 dcd8d5d3cfccd3cfccdcd8d5dedad7d7d3d0dad6d3e4e0ddd2e6e5d7e9ededfeffd9e5f1 d4dee8cfd8dda9b2afbec8bf827b68999280a8a2945854491f1c173232302a2a2c141519 1721200008070c14160a0f1206090e1011160000071d1a2312110f6f6e6c9d9c9a6c6b69 3b3a3842413f4c4b493e3d3b4e4e4e545454616161737373808080858585858585848484 9092877d7f747f81769fa196b5b7aca9aba08e90857d7f7497928ea29d9976716d88837f 8b86828b8682696460696460bfbfb3929083555243514a389d9280e3d7c1d9c9b2a4947b 6a6663b3aeaaf0ebe79a938d49423c4e4945524d496e6a67a49a9bb6b0a2dbd4c4ab9e96 a59179ebce94edc679e0b066f4c166efb95fe9b15ad7a455c69f5ec1aa78c0b790c8c7a8 b8afa69a9188827970787065938b80746c61746c5f7b73666862525a5444878070a39a8b 8d81737e6e61a7978ad5c3b7e1d7cdddd2bed4c6a998876d66534553413d4c3f396c6256 483531513e3759473b5745374c3a2c412f233d2a233d2a263d2e2b3d2e2b3d2e2b3d2e2b 3f302d41322f4435324536333a2e2e382c2c352929332727342828372b2b3a2e2e3d3131 47383544353240312e3e2f2c3f302d4435324a3b384e3f3c433431433431423330423330 41322f40312e3f302d3f302d3e332f4035314035313a2f2b342925322723352a263a2f2b 453633463734473835473835473835473835463734453633463432463432463432463432 46343246343246343246343242343140322f3e302d3c2e2b3c2e2b3e302d40322f423431 4c403040382d35302c31302e3832323e302f3e27213a1d158d6f55d9bba1cbb095a48e77 c2b6a8c5c5c3adbac3bed3e4f0eee1ffffecead2b6bc8463c86e4cd66843d7683de67d4e 735753584443382d3326242f25273327272f1f1d22161010826d5cc5ae9cdbc2aedcc0aa cbad93b18f74ab886a9b76599b66603028261c2d357274815f55563c3e312d2f2a3f333f 59575c504f543e3d42292d30282d3030383a313b3c29333430262e694137b46443e3784a ee7e50e67f54e6865eeb8e65f08e51f89868d67d5f9b4c3d8e4b42b07c6fbf9680ae8b6d d0b9abc1b6a0b57d70b9625b8c59486d4d3e956563986a6d474e5642495339414c333d49 8f5c498b58458e5b48976451986552976451a26f5cb3806db5aa9879695c432b2954353b 5a39444b2c34593c405036355533315432305d3a38623c3b643e3d613e3c613f3d72504e cbc7c4cfcbc8d4d0cdd9d5d2dcd8d5dedad7dedad7dcd8d5c8dedcf0ffffdff2f9e4f2fd d7e4edb3bec0c1ccc8c2cec44d463367604e9791835d594e221f1a29292720202225262a 2324261112140b0b0d1f1d1e2b252742393c2c22232e2425221e1b64605d8f8b8874706d 4844414743406b6764898582a3a3a3adadadc1c1c1d2d2d2d2d2d2c3c3c3b8b8b8b5b5b5 c0c2b4acaea0a5a799a8aa9c92948667695b52544657594ba69f99645d57463f394b443e 59524ca39c966b645ebdb6b0877f68877f68786e5574685092866cafa186b2a288b0a184 a9a8a6eae5e1cec5be5a4d446f62598f867f504b47676664a69ba1c0baaedad3c1a3978b b19d84f1d49ae2bb70ebba75fac869edb65be3ac5abc904f9c81548f866786846b949279 8c837e6d645f5e554e776e65a39b907c7467847d6d938c7a736d5d423c2c7f7868a89f90 978b7d7c6c5fa7978ad6c4b8d4cac1bbb09ea1957d6154413e2f284838393e302d392f25 30211e3829243f302940312a3c2d263a2b243d2e29423330372926392b283c2e2b413330 4638354b3d3a50423f52444138302e362e2c332b293028262d25232c24222c24222c2422 3a2c2b392b2a392b2a392b2a392b2a392b2a3a2c2b3a2c2b3f312e3f312e3e302d3d2f2c 3d2f2c3c2e2b3b2d2a3b2d2a2e2520312823332a253128232e25202e2520342b263a312c 42343142343141333040322f40322f413330423431423431453331453331453331453331 4533314533314533314533314335324133303c2e2b392b28392b283c2e2b413330433532 3f33273e352e3b35353832363f3033563a37774c438f5c4b874b31ba8164cb9676c59c7e b49a836a6158464d537a8a99a3ada2e2e9d9e7dcc89f7660813923ae543ac26748a2502a 70504369524a5145452d282f14131b251e255c4e4e8c7973ffe2bff4d0aebb9773ad8763 bb936fc49d76c89e78a77d579a6a5c5135322c272e52595f464644453c3731282927262e 2a282d35343934333825292c252a2d2e36382a34351923243a2e30764f3ec1764fe88855 e48452cd7b4cc27b4fc18153a471547f4f38693e2e866055bd9b91ddbcadd5b29ec09c84 c5c0addbe4c5cea68dc56e5da35c469c634fb8756d854e4951484d524c504e474e413e45 8f5c498b58458e5b48976451996653986552a3705db4816ea79f8c6f5f524329285c3940 5d34424b25305c3c41563838683a3c65373965393a6a3e3f6d41426d41426b3d3f66383a aca8a5c3bfbcd4d0cdd6d2cfdcd8d5e3dfdcdad6d3c7c3c0bac6bcdfeae2cfd8d5d6dcdc ced0cfaaaba5b3b4a6aeb09b7e77648f8876bfb9ab7d796e312e292b2b2920202236373b 433b384f4842433a3370665c84786aaa9a8aa49582978672514c48322d2937322e58534f 4d4844231e1a312c2868635f3636363333333a3a3a4848484b4b4b4141413b3b3b3d3d3d 2c2d2724251f20211b1718120001000506005b5c56bebfb9a39a93463d363f362f554c45 6c635c948b84887f78a79e976e5a4275634b77654f806f5b90816c90816e9b8e7dbdb09f e9e8e6dfdad6998f867c6c5fa69689877d7447423e5c5b5990858dcbc5b9c3bcaa9c9082 c2ae93eacd91e3bc71eebd7ae2ba72d1a25ec4955d936f4b6a584a5f5754534c4465594b 655b5a4e44424a413c766d649f978a696252797260958e7b857f6f302a1a746d5da9a091 9d918379695ca8988bdfcdc1cbbeb67e75645e56414138293025233e3236332826291c13 2f2420332824382d293a2f2d392e2c3a2f2b3d322e403531302521332824382d293e332f 453a364a3f3b4e433f5045413c32303b312f3a302e382e2c342a283026242c22202a201e 2e2222302424342828372b2b382c2c3529293226263024243b302c3a2f2b3a2f2b392e2a 382d29382d29372c28372c281e1711231c1629221c2b241e2d2620332c263d3630453e38 4035313d322e3a2f2b382d29382d293a2f2b3d322e40353143312f43312f43312f43312f 43312f43312f43312f43312f3f312e3e302d3d2f2c3d2f2c3d2f2c3d2f2c3e302d3f312e 3629233329282e282c31262e472e32724643a8695ace826bac5f4fb0654ec78263d49a75 c194739b7f6962554c2b27263c3e4b626c75a8b8b8e4e9e5d5c2be87625c6539307f5547 77533d775a4c65534f433a3d302930544646a48d87e8cabfcb9772d19f7ac3916ed1a17d deb08edaac8bcfa281a477567b5542844f4b7454576c78764a5a5752434a3f2e38333c41 211f2425242928272c22262920252821292b232d2e232d2e5739397346408d53458c5244 6e433a4e31293b281a3a2512170c14372a22817054ccb486dbbc8dbd9b76b8927fcfa8a1 e9e0d1ebf8dac8aa8ec57260ab5e4c9c5a4cb17473785454613c34714c4475524c6b4a45 8f5c498b58458e5b48976451996653986552a4715eb5826f9d9582665649432726613942 5d31404c222e603b425a3a3b7b4449723e426a383b6e3c3f7240437341446f3b3f60292e 938f8cb8b4b1d2cecbd5d1cedcd8d5e5e1ded4d0cdb4b0ad92947f9b998ab9b5aaaea79f b3aba0b3aa998f856ca1987ba49d8a9a9381a8a29456524723201b3939372222241b1c20 43372b837769857865a99a83928366a69372c0ae88c6b38996918d48433f241f1b4c4743 65605c423d39201b17201b174747472c2c2c1515151111111111110d0d0d0f0f0f161616 09080d06050a11101519181d0f0e1324232888878cf7f6fb877e77403730342b24786f68 999089746b64c2b9b2786f68755a47816655856e5e937d7096847a897b72a89b95ede2dc e4e3e1c0bbb792857cae9c8ea290826b5e55625d59454442776b75d2ccc0aba4919a8e7e d0bd9fdfc286e9c277ebba78c2a374ae895da67d5d7254494b3b45423a493829304f3733 4e4443413736483f3a7e756ca0988b60594779725fa09a84938d7d2721116c6555a79e8f 9f938577675aa9998ce6d4c8c6b9b34b4233312a17443c31392f3031262e31252541342b 3a312c3a302e3a302f392f2e372d2c352b2a342a28342b262d221e302521352a263b302c 413632453a36493e3a4a3f3b423934433a35443b36433a354037323a312c352c27312823 2d21232f2325332729372b2d3a2e303c30323d31333d31333a2f2b392e2a392e2a382d29 372c28362b27362b27352a2628211b2c251f2f28222e27212d26202f2822362f293d3630 3f34303c312d372c28332824332824372c283c312d3f343043312f43312f43312f43312f 43312f43312f43312f43312f392b283c2e2b3f312e4234314234313f312e3c2e2b392b28 352a282d24272b212a3a2a355c3d43834e4aa35644b2563dbe6b67873529a25539c9855e ba8058c59a78b1937b553e2e6741504f3e4622282468756bfafaeefdeadca18071886150 af856ca3826f8b77707064665f545862504e77594f896454d79875d59977be8363c0896b bf8c71ba8871b6887192634f7a5c449a555078454447574d233d3a453343472e41455757 4b494e3b3a3f333237303437292e31222a2c303a3b495354c86f5dc36a5aaf5f568e5156 6442504834403c28293921155d5f3ab0a47eedcda6e7b78fd7a278d6a57cbf986f97774e 977a7484866e7d5b42c3695ec069628b434490586367535e7d402d945a46a46b589c6453 976451915e4b915e4b986552986552986552a4715eb68370b0958c6e5048562f2a6d403d 7545436e3e3c673e3a653e396a3d3a693c39663936653835663936693c396c3f3c6f423f 553a33aa918ae7cfcbe3d1d1dbcfd1e4dee2d7d6dcb7bac19d9a8b8e887c9c968ab8aea4 c4b7afc9bab5b2a09c7f6d6974675e56494054473e6b5e5564574e483b3255483f81746b 5f5f575a564d776a6267514646241a885c51ca9689a96f635950499b928b7d746d231a13 2d241d776e676b625b2017103d383e342f352b262c272228272228241f251d181e161117 39312e1d15131f1418261a1c3e302d908071c1b398a39473493627504030948c79a49f8b 76715d978f7cb4a494705d4e8a726896776599715795694c9b7960b4a699ced6d9daf0fd e1c6abbba1889f8b73968774a49a8e79756c302f2a464843c6b8c5d3c2c8bda8a3c6af9d 9a8568c1b18dc9c09988835b65615e5c58554e4a473e3a37312d2a29252225211e24201d 22211f231f1c201b17756e68a69d966c6259675a5197897e97927f1c150d645a58a1978e aa9f8b897b61a3947fdfcdc3a0918e4f403d4a3b383b2c2944353241322f41322f372825 31212239292a42323344343542323343333449393a504041453536463637463637463637 4434354232334030313e2e2f39353236322f322e2b2e2a272c28252c28252d29262e2a27 2c28252c28252c28252c28252c28252c28252c28252c28252f2b28312d2a332f2c34302d 34302d312d2a2d29262b27242626262020201b1b1b181818161616141414161616191919 1e1a1b2c2829312d2e2925262622232d292a322e2f302c2d2a26272b27282c28292d292a 2e2a2b2e2a2b2d292a2d292a493d3d473b3b443838423636423636443838463a3a483c3c 39362d39322c4234345340465e4a5553434e372b371c141f664244644447492c287f5a47 c59474c28a67b6846bb58a7a2a333a3c3b432c272e2226276e7f77d4e1d7d2c9c4866868 97676781595758322f7644458c495079363d703e3f4b2c278f796ea5857aa7796aa46c5b bc8171c48f7f9d7365734f4367363ca760646b5140232c17422f333a2c3b1d262d4a3943 7a6b644b3c372a2021393b4841495e2c29345b3b30c0896ad77e5cd89467d0a267bf9356 b47b46b26e47a36445895339e9ba90edba8fe8b081dc9f72d49b6ecd9c73bb906da37f5d 8551467c4f497c55508f5f558c4e41b570697436436c37539d553dca7a719b575680584e 976451915e4b915e4b986552986552986552a4715eb68370ad92896c4e46552e296c3f3c 7343416c3c3a663d39653e396a3d3a693c39673a37663936673a37693c396d403d6f423f 4c24225a3d37baaba4f7f6f1d2d8d4cad3d2e0e5e9c9c9d1a3a09989847e8a837b9f968f a1978ea59890c3b4ade2d4cba2958c8d8077786b626a5d545f52495a4d4463564d72655c 7c6d5a867565927f70796155593c3475544f9771708963625f564f312821665d56887f78 3c332c2c231c6c635c7f766f3a382c2e2c20201e121b190d2220142e2c2038362a3c3a2e 352d2a413937584e4f5e52545f514e79695c87795f7667488e7b6dbaaa9b7f7665544f3c 938e7b9f96856a5a4b614e40af8e7ba788768d7261806a5ca9998ce3dad1eae7e0cbccc7 b59b8298826a9f8d77bbab9bbcb2a6726e653a3934757671d1c4cbddcdcebaa69db9a290 968166a59476998e6e67614154504d4f4b4846423f3c3835322e2b292522231f1c201c19 21201e231f1c1e1915706963a9a099756b626d6057a1938897927f1d160e615755a1978e aca18d8e8066a59681e3d1c7998b884c3e3b4f413e4638354c3e3b4638354537343a2c29 433535463838483a3a4436363e30303b2d2d3e30304234344335354032323d2f2f3b2d2d 3a2c2c3c2e2e3e303040323238343134302d2f2b282b27242925222a26232c28252e2a27 2b27242b27242b27242b27242b27242b27242b27242b27242d29262e2a27302c29312d2a 312d2a2e2a272b27242925221f1f1f2121211f1f1f1a1a1a181818191919181818151515 161213221e1f272324231f20262223312d2e363233322e2f2c28292e2a2b312d2e343031 3632333733343632333632334038363b3331352d2b312927312927362e2c3e3634433b39 36251b422b2350332f5a36365b34355430324d2d2e482c2b72453f6d4a4659403b381e0f 8c6850c69c83805b487f5f54666a6d636267474548262827313735696e6a9a9693a89c9c 4c3430624a46674e4a7551517a5052764c4e6c4d4b503b3649302c7f625a8e6a5c956c58 b78d77af867086634f7f604c9d6266fec2c1a98c7e211b0f2d2125392f38151412291a13 2a252932292a342b2e34323d2a3141292b386d5f5fd4b6ac9f675a94614e86583e956243 a36848915233a16447dea587f7c69deeb98fe0a879dba173e3aa7de0ac84be926f987150 b57f63a0735c906b589c735f8a543ca16250a568638a5157b2573ac5716684514e60534d 976451915e4b915e4b986552986552986552a4715eb68370a88d84694b43532c276a3d3a 70403e683836643b37643d386b3e3b6a3d3a693c39683b38693c396b3e3b6d403d6f423f 6e373a5c3432766760bfc4bdedfef6eafaf7dce6e8dadde4c6c6c8a7a5a889858488837f 958e86999087b2a89cdcd0c2e8dbd2b6a9a095887f93867d81746b5f52495f52497c6f66 85674b987c649b806d7f685a5d454149353648353b4e3d4737302a3b342e312a2467605a 97908a40393318110b8079737c7a6e7573676c6a5e6765596563576361555f5d515b594d 433b38524a4752484751464460534d6c5c4f7768538f7f667d6b616052473e36295d5a4b 8683745b5346483a2f8d7b71a37d66866b5a6c5f57808080c5c7c6f6f3ece3d6c5b19d84 8a765eb7a58fcfbeac9e92825f574c38342b595853c1c2bcd7cdcbdacdc5a694869c8772 9480688b7b62655a4447402d423e3b413d3a3f3b383b373435312e2e2a2728242124201d 3a39373a36332a25216c655fa9a099746a615d504795877c96917e2019115b514fa2988f b0a591988a70a69782e7d5cb9186824439355146424f4440504541473c384a3f3b3e332f 5347475145454c4040413535372b2b3226263327273529293d3131392d2d3327272f2323 3024243529293b2f2f40343435312e312d2a2b272426221f25211e26221f2a26232d2926 2925222925222925222925222925222925222925222925222a26232b27242c28252d2926 2c28252a262328242126221f1616162020202121211818181717171e1e1e1c1c1c111111 120e0f1d191a231f202521222e2a2b3b37383c3839322e2f231f202521222a26272e2a2b 302c2d302c2d2f2b2c2e2a2b272622201f1b17161211100c12110d1b1a162625212d2c28 663c2e6f3f317b43368343378743368a46398e4f40905547884f3c6a423653403a3f352c 322215736051a692895d4e4b66625f5859544f514e413f40251f212621256c7073c2cece 11160f25201c514141604e4e584848453a382c1e1d2d1b1b3a3937746b647f695c906b59 c1917daf7462834435965446a95e62f2c2b8bb958c391d1c201e1f4a49475a463b57482b 211b252b2227261d202522293c424e646b7b9a9aa6cdc6cdc9b8bea3888f6d464b734547 97655c88513d97563ae79e7ef2bd95eab38ae2a87ae3a97becb386e4b088c092709b7050 c2895cca9d76a582629e7d5eb68b69ae7758c88b6fa2644fd96e4abf6d5863403c2b3534 976451915e4b915e4b986552986552986552a4715eb68370a3887f664840522b26683b38 6d3d3b653533623935653e396b3e3b6b3e3b6b3e3b6b3e3b6c3f3c6d403d6e413e6f423f 81464a764646593a38736564c7c7c5eef7f6e5eff1e8f1f6dddfebdedee8b2b3b8888687 8b8883918d828f897b969080d8cbc2e6d9d0d5c8bfa699908f827993867d887b726c5f56 7e6046876b539278678670635a48442e22242318202a202928231f342f2b1c171325201c 65605c716c683c37331b1612433e444641474b464c514c52565157585359575258555056 4f48424a433d2a211c271c1644372e3d2d202f1f0f48372583726b574a42655e546f6d61 3d3b2f2f281e51443c5b4a436349385342386c6865b4b9bce4eaead7d5c9b4a48da18869 ad9d8dac9c8c7f7363463c30443e32656158918e87c0bfbad8d1c7c1b7ab817161776551 9785718a7a6a51473b3b342a3d39363d39363c38353b373439353237333035312e34302d 4948464a4643312c28655e58aba29b796f6650433a8d7f7496917e231c14544a48a2988f b3a894a2947aa59681ead8ce9289843c332e4b423d4c433e49403b4138334c433e433a35 5349484e4443453b3a3b3130342a29312726322827352b2a362c2b342a293026252f2524 302625352b2a3a302f3e3433302c292d2926272320231f1c221e1b24201d2824212b2724 272320272320272320272320272320272320272320272320282421282421282421282421 27232026221f24201d24201d1212121d1d1d1b1b1b0e0e0e0f0f0f1e1e1e222222181818 1e1a1b292526332f303935364440414c4849423e3f312d2e1f1b1c211d1e242021262223 262223252122231f20221e1f1b1d181618130e100b0a0c070c0e091315101d1f1a232520 a66152a25946a5533db2583ec26448ca6c50c76c51c0664c803f218c614e47362f31302c 1e1b16171008675f5c7670724d443d38393140453e54504f4130382b1e285e676cb4d2d2 394a4207070518090c2c1d20332f2e32312f3f3033876c7351716c67726c6f5651955450 cd6e6ac45252a62b2db134387b1f22754b3d89595564353d292a2c63665fc2a190c5ac84 856664644c4a35292b25262b454e5771788286858d887f84abb0b3e9dfe8d0b8c574565e 543532805242b1654dcd674ed49873dda27ae8ae80ecb485e7b183d8a47cc2906fb07f61 b07648e5b48bb28e6c775a3cd2af91cb9d7cc88c6abe7856e17e55ab654c5a3f38323639 976451915e4b915e4b986552986552986552a4715eb683709f847b65473f532c27683b38 6b3b3962323062393567403b6b3e3b6c3f3c6d403d6e413e6f423f6f423f6f423f6f423f 663435612f307140446c414a5e40488f848ad4d9dde4f6f8d9dfeff3f9ffdce0eba6a9ae 8b8d8c7e7d7878786e878579c6b9b0c3b6add1c4bbdfd2c9c9bcb39d90878b7e75968980 7e6d5d6f5d4f8575688e80775f524c382d293327272b2120272320120e0b211d1a130f0c 030000514d4a7975722a26230f0a100d080e0a050b0a050b0e090f171218231e242b262c 564f494c453d3a31284d43397c70648d7c7297867cbcaaa08375745f55533f3c372e2f29 252620282520322826362827312d2a55514e9f9e9ae0e0d8d9d7cb9f99897e7663867b65 8a8074574d4130281d5b55499f9b90b7b3aabab7b0c2bfb8d4d0c49e97876053425d4c3a 99887896887d595049342f2c413d3a3e3a373b37343834313935323d3936423e3b46423f 3b3a38423e3b28231f564f49b1a8a18a807754473e998b809893802821194c4240a1978e b4a995aa9c82a1927de8d6cc9d969037302a3f38324039333a332d37302a4b443e453e38 423a383c3432362e2c312927312927352d2b38302e3a32302f2725302826312927332b29 342c2a352d2b362e2c362e2c2a262329252226221f24201d24201d25211e26221f272320 24201d24201d24201d24201d24201d24201d24201d24201d27232026221f25211e24201d 231f1c221e1b221e1b231f1c1616161919191313130707070c0c0c1f1f1f2c2c2c2b2b2b 3a3637484445534f505955565f5b5c5e5a5b4b4748343031242021231f20221e1f201c1d 1e1a1b1d191a1c18191b171821231e1f211c1c1e191a1c171c1e191f211c242621272924 b75e4cba5b45c05b3dcc613dd56841d56943c9613ebe5a3a8e4b2e774a374d3a34241f1c 1f1a16160d06170c0a4d4447544b4440413940453e4d4948413038332630565f648fadad 646a662d24253c252b523d425a4e505e5254775961cc9eab4e68674d47496e353ca63a47 c73949ca2f3fc32b3abd2b38a23940744438905a58894d592f2a2e383d36936e5eac7e5d b06e587f5346493b3b2c333b313b444f4f517160598163589ea0938780789a8f8baca19b 8171626b402daa523ef67967b97754c98963e0a376ecb485e6b082d5a078c79071c1896e ac7153ddaa8dcca28962422ba68671c89b84c6896cd58a6acc815a8f5b43694c44674f4f 976451915e4b915e4b986552986552986552a4715eb683709d827965473f552e296a3d3a 6b3b39623230633a366b443f6b3e3b6d403d6f423f7144417245427144417043406f423f 63403c6e45437a474c6c3844451d2833222a686d71b3c7c8e4f0fee6f0fce0e9f2d6dde5 c2c7cb989c9d797b787676748a7d74b6a9a0d2c5bccbbeb5c5b8afc9bcb3b9aca39d9087 877e79655c557168617f766f5d524c4237313d322c2b201a2626261717171515151f1f1f 1c1c1c1818183636366464644644384c4a3e5553475d5b4f6462566f6d617b796d838175 867f79807971888073867d6e70645668574d7d6c65968281493d3f261d1e0c0b091a1c19 2c2e2b282725292021382c2e1e272c676b6cb3b0a9c1b9ac97907e6a6454565749575d51 332f2657534a7c786faeaaa1bfbbb2b6b2a9c9c6bddbd8cfc1c1b7736f63544b3a584838 9383749c92866a655f2a29253d393639353234302d312d2a332f2c393532403c3945413e 343331403c39241f1b4b443eb4aba491877e4a3d3494867b9994812c251d443a389f958c b3a894afa1879a8b76e4d2c8aea9a336312b35302a36312b2e29232f2a2449443e403b35 2d282529242125201d27221f2d2825342f2c37322f37322f2924212b26232d28252f2a27 302b282f2a272d28252c272424201d25211e28242129252229252227232025211e24201d 221e1b221e1b221e1b221e1b221e1b221e1b221e1b221e1b27232026221f231f1c211d1a 211d1a211d1a221e1b231f1c1f1f1f1818181010100f0f0f1717172727273a3a3a484848 5e5a5b6c6869757172747071726e6f6c6869595556433f40201c1d1c1819171314120e0f 100c0d110d0e14101116121325242024231f22211d201f1b201f1b21201c23221e24231f b34e3cc75f46dc704ce07146d16537c1572fb75333b654398f503f6b3c323e221f47342e 543e335d41355b40391a020238343143443f50524f4d4b4c3c36383833375054576f7b7b 3c312f4533336953557e65687c626575555a7d4e58a265744f444c552d388a3948bb4255 b8293bb52435bc3544b2354397283891504c9359577f434d48353b2c312d593b339b5249 a8573c6e3a2d3d31332d3a42384347605951907564a9806a867666695c4b827763a1957d 9e846bb87e66d57461c4493ab16847ba7651d09366e4ae7fe9b586dba67ecf9476c98b72 a47062ba8774ebbba49f7660825d4ab18673c78d75c57e60a068516e493787695fbb8f86 976451915e4b915e4b986552986552986552a4715eb683709c817866484057302b6c3f3c 6c3c3a623230653c386f48436b3e3b6d403d714441734643744744734643714441704340 69524a6f564f4e322f442a2d4c393d130e12091315607475effdffd9e8efd8e5ede8f2fb e8f1f8d0d7dfa6abb17d808792857c877a71968980bcafa6cec1b8c3b6adbdb0a7c7bab1 a79e99847b766b645e69625c605b554a453f322d2724211a323232131313181818353535 2c2c2c090909111111373737504e4259574b646256666458605e5258564a535145525044 251e18423b319b9484b8b09d7569595c4b416e5c5a614d4f3c31374c464a4f4f512b2f30 15191a3434363b35390d0208252a2d5156527c7c74837f73727061626456545c4f48544a 605f5a9f9e99bfbeb9bfbcb59a97908b877ebab6add0ccc3979692443f39594f45635646 887b6b9f95898480772a29242b272429252227232026221f2723202a26232e2a27302c29 2c2b293d3936211c18423b35b6ada6958b823f32298c7e739b96833029213e34329d938a b1a692b1a38992836edeccc2c1beb73a373034312a38352e2f2c25302d2648453e39362f 24201d201c191d1916211d1a2824212d29262c282529252226221f25211e231f1c221e1b 211d1a221e1b231f1c231f1c1f1b18231f1c2a26232f2b282f2b282b272425211e201c19 201c19201c19201c19201c19201c19201c19201c19201c1928242126221f231f1c211d1a 201c19211d1a231f1c24201d2929291a1a1a1616162424242f2f2f3737374b4b4b646464 7c78798884858b8788817d7e777374726e6f6864655b57582a26272521221d191a181415 191516201c1d292526302c2d3c343238302e3129272c24222a22202c2422302826332b29 b84d3bce6248e37751e4794bd16a3db85a34aa5338a653416f38359466663714125e3a2e a1725e95614ba06f604f241d03070a2e2d32504e514f5150414745434844534f4c5e5252 361d193a27213629214a373159373667363a8e555ca86b734c33395b333b7c3f4496444a 97363d992f399f3543a237499b2e43a04b52935955824e525d394524272c392429943146 ad675f6a4041332e3528353d414a47756d609881719575688567657d655ba18673c29a81 b6785fb05e46c1624cc6634cab5f3fba7450d79a6debb586e8b485d69f78d19477da9880 a47b75a97867dea98ae7b4959b7256b28870cf997fbc7a5a9e64635d403a897167c79989 976451915e4b915e4b986552986552986552a4715eb683709c817867494159322d6e413e 6d3d3b633331673e3a714a456b3e3b6e413e714441744744754845744744714441704340 4c3e332a20141e1a0f282923191f1b000a08061415243336dceef2dceef2e2f1f6d6e3eb d0dae4ebf1fdecf2fec2c5d493867d80736a7a6d6493867db7aaa1cdc0b7d0c3bacec1b8 d4c6bdb8aba37d726c67605a76736e5b5c572d322e292d2c1213152d2e302425270a0b0d 0e0f111617191011130f1012211c222621272a252b28232926212729242a332e343c373d 17100a302a1e9f9886c1b9a4695e4c5a493f7b69695b464d433840423b4225242913171a 222629252429221b2232272f3635302f2e293f403a696c65868d85848c81707a6f646e63 b3b5b0c7c8c3c9cac4c4c3be7b7871413e358b877edbd7cc6c6c6c1f1b185d544b6f6355 817565a099899694873032271915121a16131c18151d19161e1a171d19161c18151b1714 1413112b2724140f0b39322cbbb2aba1978e483b32988a7f9c9784322b233b312f9c9289 afa490b1a3898d7e69dac8becbcbc33d3d3535352d3c3c3434342c33332b47473f32322a 22211d1e1d191a19151d1c1823221e24231f1f1e1a19181424231f201f1b19181414130f 12110d1514101918141d1c181c1815221e1b2c2825332f2c332f2c2d292625211e1e1a17 1f1b181f1b181f1b181f1b181f1b181f1b181f1b181f1b182a262327232024201d211d1a 201c19211d1a24201d25211e3030301c1c1c1d1d1d373737444444444444565656747474 8c8889969293949091827e7f747071736f70736f706e6a6b4b47484440413c3839383435 3c38394743445551525f5b5c6256565a4e4e4f4343463a3a433737473b3b4f4343544848 bc513fc5593fcf653ed46d40cd6e42b7633f9e523b8c463a85525963363d6b4142885748 7b3b20975233bc7b677e453a000209222129423d44404445384941445147564d485d3f3f 745751493b301614053c30245b38345f272a915459a871742e23213c2f293b271c49271d 723d37883c408e2d3e9e3049a4365185202e874c469a6a686a3a4822222c423644a6315d 84525b533847292b381d282c33362d6d66569082798171717d5b6987676a966e64af6e5c c36752c45e47bc6248b86f4fa35435c47b58edae82f9c394dfac7dc58e67d19175eda992 bb9892b68773ad7550f2bc90c0936cc19676daa887d29672aa6a7a5b3c417e6d63b49078 8c5d498b5c4890614d966753946551936450a47561bb8c789e75736e3e3e6f3b3d753d40 713a3d6f41415f3c3864474160464f6049515f4a5157464c483c40342b2e201c1d151112 0003000a1310081110040e0d101a1b07131300080a091718d6e9f0d5ebf6d3eafadbf2ff e1f2ffd0d9e2cfcdd0ebe1e0c9d5e18b9499858a8486847788816eaca18fc7b7a8dac8be bbaea5d4c7beccbfb69c8f867e716883766d85786f776a61524944534a453b342e3d3832 3b38312f3028393a322b2e251c1c1c3030302323231111111e1e1e2b2b2b313131414141 151c22433d31b09f83c7b39b72635e42373d4335344130202f2621524944352c273e3530 5f56514a413c3c332e382f2a2b2f2e959998afb3b2c0c4c3919594bbbfbebdc1c0a3a7a6 97989a8f8b88948d879998966f7478696d709e958e9f8876251c17584f4a7f76716e6560 a39a95b6ada8c5bcb7584f4a0c100f0c100f0c100f0c100f0c100f0c100f0c100f0c100f 000702171e1701040036362ec2beb59c92893f3128998b80b2a18d372f22231e18a69c93 b6a190c2a7949a8276d5c6c1cacaca4b4b4b2a2a2a3838383434343d3d3d343434222222 1a1a1a181818151515131313111111111111131313131313060d05070e060910080c130b 0e150d111810131a12141b1324251f23241e22231d21221c1f201a1d1e181c1d171b1c16 1414121d1d1b2323211f1f1d16161415151320201e2b2b29212725292f2d252b29141a18 111715262c2a4046444d53513c3e3d3a3c3b3d3f3e484a49595b5a656766696b6a696b6a 897e828b80848d82868f84888f84888e83878c81858a7f838c8281756b6a655b5a675d5c 6d63626f6564776d6c8379787272747775787c777b7f767b82777d83747b7f6e767f6b74 c26833e67d4ee47047cb613ac4704cb36d5497504a93424b53474959342e9b5845b76c4d 8f502d895f477d5b5a341423060918241e2c2636351f4c37303b2a8b55559c62604a3f2d 79494758413b302d26251b1a4922277b3d42935351905a4e342b2226231c2a302e323a3c 363739574949795a587c504d5b3a417240498d4e568d55566847403a32272d2e26373833 403f4436353a29282d2322272a292e3e3d4256555a66656a66514e695e58886561b5605d c25b52a65b4888564b7f4c5195594ed29b7cf7c391f3c28ae6b686d09f74c59765d6aa6d d19f6ecc9868cd9768dca475ebb183ebae81d5966abf8054916365785448a5886ae7c598 8d5e4a8b5c4890614d966753946551936450a47561bb8c78a582806f47476c40416d3f41 693d3e6a44435d403c634c464a393f43323837282d281d211c1618161213111111111111 1b1b1d1816170b090a080607181413201c1b211c19282320ccdaddddecf3dff1fbddf0fe dfeffcd3dae4c6c5cbcec8cad9e1ecc8d0d3a2a7a17475678b8673ada191a7998cc2b1a7 cdc0b7c2b5acc0b3aac0b3aaa99c9385786f776a6180736a79706b6e65604e474155504a 5e5b545051494748402629201313133131313535352727272727272323232121212d2d2d 080f15464034c2b195b5a1894d3e393a2f355345446d5c4c766d68867d787067627c736e 887f7a69605b685f5a766d688a8e8dabafaea1a5a4f4f8f7a4a8a76569684c504fa0a4a3 d2d3d5a9a5a2726b654d4c4a2b30343d4144675e574a332131282359504b807772766d68 b0a7a2beb5b0c3bab5564d480c100f0c100f0c100f0c100f0c100f0c100f0c100f0c100f 000803171e1702050035352dc0bcb39c92893f312895877ca1907c3a32252a251fa2988f b39e8dc1a6939f877bd2c3beb6b6b65151515656567979796d6d6d6363635656564e4e4e 2727272323231d1d1d1616161010100b0b0b0909090808080c130b0d140c0f160e121911 151c14181f171b221a1c231b13140e14150f161711191a141c1d171e1f1920211b21221c 2222222323232020201818181515152121213b3b3b5050502228263a403e4d53514f5553 4e5452555b595a605e5a605e4949494a4a4a4e4e4e5757576262626b6b6b6f6f6f707070 8d82868e838791868a94898d958a8e94898d93888c92878b918786807675756b6a766c6b 776d6c756b6a7c7271877d7c7e787a827c7e767072756c6f857a7e82767a766a6e7a6e72 eb7744d1682ed27437d57946b85d3e9a4d3d7c4b3d593f30493f3d4b2a258d4e45b56d5f 844c3d53352b3a2e30130f1d131f1f1510162e343421362f746465a55765a3535e603b33 49382e5b4c455045412d2321311c1b6932379a3944aa2737552c3233141c2818222a2833 21242d26252d3b32374434375e454b673e46784147814d4f6f4e494a3d35312c282a2625 2b2a2f2b2a2f2c2b302d2c312f2e33323136343338353439716363776f6c8c6a68a35450 a9483f9451417a52506b4957b98367e0ad82e4b67bd3a56adbab7be4b48cdcad81d8aa79 e4b182dca97adaa677e2ac7eedb286e8ab7fd09166b97a4f8f5c61734a4496735fcca586 8d5e4a8c5d4991624e976854956652946551a57662bc8d799279756145426446446a4b49 614542553d3937261f31241c221c1e1b1517100c0d0604050202020507060b0f0e0f1513 1610140a01040c02032113123a2824544039614a425a4238bec3bfe6eceaeff9fbe1ecf2 e3ecf5e0e4edcacad2bab8bdd4d9dfe7ebeed2d7d18f91848583749b948497897cae9d93 d9ccc3bbaea5b8aba2d4c7bed2c5bca6999083766d7f72698f8681817873625b55736e68 86837c75766e5e5f572f32291717173434343d3d3d3030302323231717171414141e1e1e 060d13474135c9b89c9480682c1d18483d436a5c5b8f7e6e837a75807772756c67847b76 7d746f6057526d645f867d78a0a4a3b6bab9b9bdbc727675494d4c2a2e2da2a6a5ebefee 7374766f6b684b443e22211f0a0f1335393c70676057402e473e395b524d8178737e7570 bfb6b1c8bfbac2b9b4584f4a0c100f0c100f0c100f0c100f0c100f0c100f0c100f0c100f 020b06171e1702050035352dbcb8af9c92894032298e8075a897835d55484f4a44ada39a baa594c3a895a48c80c8b9b4c1c1c13e3e3e2f2f2f5d5d5d6161615555553c3c3c2c2c2c 1717171515151111110e0e0e0c0c0c0b0b0b0c0c0c0c0c0c151a14151a14151a14161b15 171c16171c16181d17181d17161712161712161712161712161712161712171813171813 2222221f1f1f1717170b0b0b0606061010102828283b3b3b393d3c4d51505c605f595d5c 4d51504448473d4140383c3b4a4a4a4343433939393232323636364646465d5d5d6e6e6e 8f848891868a958a8e998e929b90949c91959b90949b9094988e8d9187868d83828b8180 867c7b7f75748379788b81809e9296867a7e92868a998e9281787b8781839c9698908a8c c96241d06b3fcd6b3ac26641af60538f57566a4f48534e3b3a261b4c271f7d454494595b 663d412f2023141b21071a21000e00020500424b4a303f42a792a1a25871803343633a38 3a332960464569494e42343322231b422622902a38cd24456a56553d313328262b2c2e3a 2224331b1426261528331c3050414656393d673d3f784c4b704f4a503d37322826271e21 201f242524292c2b303130353130352d2c3126252a22212646454b5d5b5c866965ad645b bb6252a56353785454533c4ea97c55dcb17eefc589dfb579e0b582e8bb91deb188cea178 e8b88ae0b082ddaa7de1ac80e8af84dfa47ac88a61b2744b8561656246426a5749876d5c 8e5f4b8d5e4a92634f986955956652956652a67763bd8e7a83746f4b3c37463732453832 382d272a231b0a0600030000020605040807060a09090f0d0c12100c15120e17140f1815 1d1b20130e1229202146383556433c765e529175678a6d5baba79ce1ded5eeede8d9d9d9 e0e1e3eeeff4dddce2c2c1c7b1afb2c1c1bff3f6efd4d7cc8d8f82837f7395897daa998f cfc2b9c7bab1c0b3aac8bbb2d7cac1d2c5bcada09785786f847b768178736a635d7a756f 88857e7c7d7574756d54574e2c2c2c3333332f2f2f2323231818181313131616161d1d1d 0f161c433d31c0af937b674f2b1c176b6066796b6a877666635a5559504b59504b625954 59504b5249446158536c635eb3b7b6cbcfcea0a4a3000100121615828685f2f6f5838786 2d2e30494542413a342423211015193c40438178717c65535d544f615853857c77827974 c5bcb7cfc6c1c3bab56259540c100f0c100f0c100f0c100f0c100f0c100f0c100f0c100f 040d08161d1603060035352db9b5ac9e948b44362d897b70b2a18d827a6d736e68b4aaa1 bda897bca18ea1897db5a6a1cdcdcd2c2c2c0000002626262d2d2d2323230b0b0b000000 1010100f0f0f0d0d0d0d0d0d0e0e0e101010131313141414161815151714131512111310 0f110e0d0f0c0c0e0b0b0d0a0c0c0a0d0d0b0e0e0c0f0f0d10100e11110f121210131311 1c1b201f1e232120251d1c2118171c19181d201f2428272c464847494b4a454746363837 2729282628272f3130393b3a332f30302c2d2d292a2b27283531324e4a4b6f6b6c878384 8e838791868a958a8e998e929d92969e93979e93979e93979d9392a09695a197969d9392 938988897f7e887e7d8c82819986889383849a8c8c998f8e88807e84807d989793a9aaa5 a7827aa85e53bd5049c15d5d93565e5e404c5334396235327f4834925c508c5c58633f43 3a262f1e1b220a121500090a1010003735287585825e8082aebac89a7d91421c2745362f 4e3a3b552d3560323c533f3e2a35271f20126a2f33c55468836a6363504a483e3c3d3b3c 3433392b27352d2233312237322d314031345d4042714d4d6846454a323235262932292e 28272c27262b26252a27262b2c2b303332373b3a3f403f444d5b664d5057674b48975545 ba6449b5684e94605576565b89633fc19c72ecc793e8c38cd6af78d1a775dcb083e8bc8f dcac84d6a67ed3a27ad8a47ddca680d69c77c28662af734f7269644e4f47485045545750 8f604c8e5f4b936450996a56966753956652a77864be8f7b887f7a3c352f1c19120b0c04 0407000b0e050208000b1107020d0905100c0a1511101916111a171019160f15130d1311 131b1d161c1c3234313f3c35322a1f4033235c4b395f4d37948c7fc6beb1d0c8bdbcb3aa cbc2bde6dedce1dbddd0cbcf90868495928de4e7e0fbfffabec4ba8d8a81847a70a5948a b8aba2cdc0b7d1c4bbc4b7aec6b9b0d5c8bfcbbeb5afa299877e79877e796b645e67625c 65625b64655d83847c8689803939392a2a2a2020202020202121212222222020201a1a1a 0d141a403a2ebead91806c543c2d287d7278796b6a7261516259545d544f5d544f544b46 554c47685f5a685f5a5c534ecfd3d2d8dcdb4a4e4d1d21202b2f2eb0b4b3898d8c272b2a 464749524e4b37302a1a191712171b3e4245867d768e776569605b665d588d847f817873 c2b9b4d2c9c4c5bcb76e65600c100f0c100f0c100f0c100f0c100f0c100f0c100f0c100f 060f0a151c1504070036362eb8b4aba49a914c3e3586786da4937f8b83767c7771aea49b c0ab9aba9f8caa9286b5a6a1cdcdcd3a3a3a1616162e2e2e1e1e1e0f0f0f090909131313 1818181616161212120f0f0f0d0d0d0c0c0c0d0d0d0d0d0d0e0c0f0e0c0f0f0d100f0d10 100e11110f121210131210130707090b0b0d1212141a1a1c2424262c2c2e333335373739 3a393f4241474d4c5254535956555b58575d5a595f5d5c627270716d6b6c626061504e4f 3c3a3b3533343e3c3d4c4a4b5c5658645e606d67696f696b6b65676761636963656c6668 8d828690858994898d988d919b90949d92969d92969d92969e9493a69c9baba1a0a59b9a 998f8e8f85848a807f887e7d7866668e7e7e6e605f615755857d7a6e6b665857528b8c86 8391924b3134702e32ab60678549515a292d753631944234c66e58b673607b574b3b302c 2626262927282b23212e211b76423796736f8e8f917fa2a6a1c6cebec8d2333136282923 3e2f3439202645262c503c3b3a362d2020143d2e27735452a4454d9e494e8141425c3432 4c3a36444341363f3e26343525262a2d27294434346044436343444e343738292e302b31 2c2b3026252a1f1e231c1b20242328333237444348504f545e7583434a544a353284503a bb6b46c06842a75d42935e4e6b4b3c876752bb9a7be4c398e5c28cd6af76d7ae78e3b987 daac88d5a783d3a37fd5a380d7a280ce9876bc8463af7454726e6551564f404c4840484b 90614d8e5f4b936450996a56976854966753a77864be8f7b79716e2b28230c0f08000800 000b010e1a100e150d191e17111d190d1915091410060f0c060c0a090d0c0d0f0e111111 00040216211d464f4a555a533b3d3229271a2c291a312b1b918b7faea699ada0909e8d7d ab998bc2b1a7c9bab5cabfbd897b72938a83bfc0bae5efe7eff9f1b7b8b07b72699b8a82 a89b92c1b4abd7cac1d2c5bcc0b3aabaada4c9bcb3dccfc6a89f9a9f9691736c665e5953 56534c5d5e568f9088a1a49b3a3a3a2525252626263939393f3f3f3c3c3c2d2d2d171717 00060c403a2ec3b2969a866e4d3e3971666c7466656d5c4c665d5869605b625954463d38 5047426b625d5c534e4f4641c4c8c7a2a6a5131716363a397f8382787c7b272b2a3d4140 28292b4c484540393321201e13181c3b3f428d847da8917f61585369605b9d948f837a75 bbb2add3cac5c7beb9756c670c100f0c100f0c100f0c100f0c100f0c100f0c100f0c100f 08110c131a13050801383830b9b5acaca299574940887a6f9d8c788f877a807b75aea49b d0bbaac6ab98c3ab9fcbbcb7e3e3e34949491616162020201414141313131414141b1b1b 0707070707070707070808080a0a0a0e0e0e12121215151519141a1c171d201b21262127 2d282e322d333732383a353b45444948474c4d4c515352575a595e61606566656a69686d 74737b76757d78777f7b7a827e7d8581808886858d898890928e8f9490919692938e8a8b 7672735c5859524e4f55515252494c5c53566b6265786f727f7679847b7e877e81898083 91868a93888c978c909a8f939c91959d92969d92969c91959a908fa39998a79d9ca09695 978d8c918786887e7d7e74737f7470574c48504541625954625b555b565055504a48433d 5361545c6e603d43354a2e229f5749c4644ebe5d3cd07c4ec66e5a8d503e4933252c2f26 2e343035302d49342f63463ea04d57995d67523d4657646d7a939ad2e0e952555a211c20 0715161d232338302e47322f462d293a272128231d1a1f18912b38a849519b4b4e7a3a38 71413d6e4c4a5b42454a353a303136222023271f1d493433634547573e42342b30191e22 2120251f1e231c1b201b1a1f201f2429282d32313638373c3f5a63292f3b2f2327674a3a a76f4ec4724dbb6e50ae765d896f6e694d4a795b50b6977adaba8bdbb880d7af7cd4a97e ddb292d9ac8dd6a789d5a486d19e81c79175b67e63aa715682696468595651484b473b47 90614d8f604c9465519a6b57986955976854a87965bf907c685e5d211d1a0d120c0b180f 0e1e1418251c0c130c0c110b0c171307120e020b08020605080808110f101d191a251f21 1b1c1e535554888a899999998b8b8971706e696864767571adada5b2aca0a4948497816c 9d826f9f8473a68e82bba79e9481739c9288afb0a8bfcac4e8f3efe3e5e0a39c949a8981 aa9d94b4a79ec8bbb2d5c8bfccbfb6bdb0a7c1b4abd3c6bdc4bbb6b3aaa5857e787f7a74 87847d8a8b83a0a199979a914141413030303f3f3f5959595959594e4e4e3c3c3c222222 0001073f392dbbaa8ead99815b4c4762575d746665746353645b56685f5a5d544f3a312c 49403b5c534e4b423d5f5651b9bdbc4e5251080c0b363a39ced2d1595d5c161a19090d0c 08090b55514e5d56503433311c21253f4346847b74917a684c433e685f5aaea5a08a817c b7aea9d3cac5c5bcb7756c670c100f0c100f0c100f0c100f0c100f0c100f0c100f0c100f 08110c121912070a033a3a32bab6adb4aaa161534a8b7d72a59480968e817d7872a39990 c9b4a3b09582b0988cb5a6a1d1d1d13a3a3a050505121212161616262626232323191919 1616161818181c1c1c2323232c2c2c3636363e3e3e43434349424a4b444c4f4850554e56 5c555d625b63665f67686169807f85807f85807f85807f857f7e847f7e847f7e847f7e84 84828d82808b807e897e7c877f7d88817f8a83818c85838e7d7779847e80928c8e999395 8c8688777173706a6c777173695e62665b5f655a5e695e6273686c807579897e828e8387 998e929b90949e9397a09599a1969aa1969a9f94989e9397948a899b91909c9291958b8a 918786908685847a7973696876726939352c5551487c756d595049594e486a5f59453a34 6b6e5192a9954864561b1402904f31ec825adc7145ce734692594e5c3c31322e25292f2d 29292b2a1e22321e1d36231d5c2e3b4b2a351a0913201a24373a43818890515159160e19 00080a161e203f313151333354323148302e2b23200d120e4c242c6f484b78504e6e453f 6f4642663c3d5729335426333d3a4326252a201c1b3c2c2c583e414e373d262429071619 17161b18171c1c1b201f1e232221262423282524292524292b404321273313131f1b1c17 543f2ea56c5bd29789d8b4a4beaaa989737579605c907361a0805aac8a5dc29774c89a83 c89f81c79b7ec5987bc49579c29075b9866bad775da36b52846259745b576050535b4955 90614d8f604c9465519a6b57986955976854a87965bf907c7066672622210b100c04110a 05140d0e1b14020804050704000400000300000403090b0a161415262022352a2e3e3236 5d54578a81849e989c9d979b9d989e948f959c99a0c1bec5d2d5cec4c0b5ad9d8da38873 9f7f688d6d58907361b296889d87799c9084b7b8b0aeb9b5bdc9c5f5faf4dfd8d09e9087 b2a59cafa299b5a89fc7bab1dbcec5dacdc4c2b5aca89b92cac1bcb7aea9958e88aba6a0 c9c6bfc3c4bcafb0a880837a4e4e4e3f3f3f5353536c6c6c616161535353484848353535 00070d3d372ba8977bb09c846657525e5359796b6a7766566f6661736a65675e59473e39 59504b625954584f4a8e8580a9adac2428270d11105d61609fa3a2393d3c2428270e1211 1a1b1d615d5a57504a272624292e326064678a817a6c55433a312c655c57bab1ac908782 b7aea9d4cbc6c3bab57168630c100f0c100f0c100f0c100f0c100f0c100f0c100f0c100f 08110c111811070a033b3b33bcb8afb8aea5685a518e8075a99884928a7d6c6761867c73 a691807c614e765e527768638888882323232121213e3e3e3a3a3a4444444343433d3d3d 5757575858585a5a5a5e5e5e6464646b6b6b7171717575757b747c7b747c7c757d7e777f 807981827b83837c84837c8478777d79787e79787e7a797f7b7a807c7b817c7b817d7c82 6a68736c6a7572707b7977827f7d8883818c82808b817f8a888284847e80868082868082 7973756b6567716b6d827c7e84797d8075797f7478867b7f92878b9a8f939c9195998e92 a1969aa2979ba4999da69b9fa69b9fa59a9ea3989ca2979b908685958b8a9389888c8281 8c82818e84838076756c626135372c58584e5a574e665f577b726b594c464738337f6d69 9fa380474f44393d464c3c3c562709af6133f18d69c5574639211f2c2421272d2d1f242a 1c141f2e1c282a1e2207030019272a1c2a2b282c2f150e1504000511141929303600000a 050e171e111b49232e673d41604440413630322e2b353334170d18372d35463e3b494238 44403525251d0e0e0c18191d4038432f2d322d29283e302f4a30333924291a1b1f031b1b 15141918171c1c1b2021202525242928272c29282d29282d0e1f192529341b2435021921 282e2c866667be9a9cbdafaf988982a18f8dc5adadcfb3a7a6876a886442855a4781514d a98064a98064ab7f64af8066b17f66ac7860a56f579d674f7e64557160565b5350524850 90614d92634f9465519465519667539f705cad7e6ab889755455501a1b160f100b080904 00010003040001020012130e292a25353631545550595a556566616768634d4e49565752 9991869a92879d958a9e968b9d958a9a9287978f84948c81d9dedab5b2ab8b7f71c3ae99 ab917a8c745c826e5696846e957a67a2918acec7c1b9b6a5968c82bcb0baf3f1fcd9e6dd 808372b3b09db0a893988871bcaa96dfcdbfccbeb5b9aeaabaa89abeaea1b4a497c4b7ae e9dfd6e4dbd4c4bdb7b3aca6665c5b564c4b504645756b6a514746544a49615756544a49 1b201c1e1317b9a185d3bb8f6d53466b55488870568f6c6a786a5f6f62596b61583d342d 645d57504b47484441a8a7a54a4e4d060a09191d1cc0c4c3717574222625272b2a242827 0e14125e6462545a58474d4b525856343a38282e2c2d33311c1912706d66d3d0c9a09d96 b2afa8e5e2dbcdcac367645d0b0a080c0b090f0e0c11100e12110f11100e100f0d0f0e0c 0b0b0b1212122222223737374b4b4b5454545353534f4f4f3e38443f3945413b47433d49 453f4b47414d49434f4a4450716b77726c78746e7a77717d79737f7c76827e78847f7985 89848a817c82787379746f7576717779747a7873797671777b70787c71797e737b7f747c 7f747c7d727a7a6f77796e7681757f80747e80747e7f737d7e727c7e727c7d717b7d717b 79696a746666716565776d6c81797786817e837f7c7d7c78847a7b887e7f8a8081887e7f 8278797f7576827879857b7c8581828783848a86878e8a8b928e8f9692939995969a9697 9d9b9ca4a0a1a9a3a5aaa1a4a69b9fa4989ca7989dac9ba1a48f94917f7f8c7b74988c80 9a90847b72694e464332292a62513f6a5b486252426a5e4e53493d5d554a888177918a80 453d2a626658656b67392423622425bc6c6fa7646b6338424a454135302c25211e232220 2223251d21241f2428292e32262a2d1f23261e2225191d202b2f3203070a0206090d1114 04000411040d341d2751313c5a343f603b42613e425636392a1f1d4136343f35344a4142 342e32161419323137403f4512111629282d36353a2b2a2f1e1d221e1d22201f241e1d22 262a2d2e32352c3033292d3025292c1115180c101323272a14232a0d212813282b585e52 c2ad90eec198cf956dab6f4b7b5a5167504a58534f636c697f8b898a93907b7a76675e59 5b5b59595a5c5e6369666d7762667154535b51484d594a4d5f5a5e5f565b5b4e5558474f 90614d92634f9465519465519667539f705cad7e6ab889754546410d0e090809040c0d08 13140f2b2c273b3c3756575277787373746f6e6f6a4c4d483a3b362b2c270f100b1f201b 9a92879b93889d958a9e968b9d958a9b9388978f84958d82b3b5b29e9b947f7365b39e89 a38b738a725a7e685186745e957a67988780c9c2bcbdbaa98f857bbbafb9f8f6ffd6e3da b2b4a69f9c8bb2aa95b5a490a08e7ac5b3a5ded0c7b2a7a1ad9b8dab9b8ea19184afa299 d9cfc6e7ded7d9d2ccd5cec87f7574645a593c32315f55545e5453695f5e655b5a4f4544 1b201c1e1317b9a185d3bb8f6f55486c56498971578e6b69796b606e6158695f56473e37 58514b4b4642625e5b9897953034330b0f0e292d2cb6bab95f6362222625282c2b161a19 0b110f5f65636d73715e6462494f4d252b291d23211c222013100b5d5a55c2bfbaaaa7a2 bcb9b4d7d4cfb7b4af605d580000040302070a090e11101517161b1b1a1f1d1c211e1d22 2b2a302d2c323433393f3e444a494f4d4c5248474d424147655f6b67616d6c6672716b77 77717d7c7682807a86827c8876707c76707c77717d79737f7b75817c76827e78847e7884 868187807b8179747a77727879747a7a757b777278736e747a6f777c71797d727a7f747c 7e737b7d727a7a6f77796e767e727c7e727c7d717b7d717b7c707a7b6f797b6f797a6e78 7767686f61616a5e5e746a69847c7a8d8885898582807f7b877d7c8b81808d83828a807f 857b7a827877847a79877d7c8581828783848a86878e8a8b928e8f9692939995969a9697 9c9a9ba39fa0a9a3a5aaa1a4a69b9fa4989ca7989dab9aa0b39ea3a79595a190899d9185 8c8276696057473f3c382f305e4d3b6c5d4a7161517b6f5f62584c595146736c626e675d 4a453146483b53544f4a3b385d3832774a445d3c374737374c47433a35312d29262c2b29 2b2c2e24282b23282c2a2f331f232624282b292d30161a1d23272a05090c060a0d0e1215 1f1e2420192034252c462c354628304d303552383b4a32322e2321392f2d332b293e3838 332e321b191e2e2d3336353b18171c27262b2e2d322423281b1a1f1d1c211f1e231c1b20 1e2225262a2d24282b23272a24282b161a1d171b1e3135382c313735424a404f5463665f 9b8671b38a6aaf7c5db17b63987a7276625b4b4642343d3a3f4b4b596261747472847c79 5858564e4f514a4f5550576150545f48474f483f444f4043695b5b695658675154654a4f 90614d92634f9465519465519667539f705cad7e6ab889753d3e3910110c1c1d182b2c27 32332e3f403b3f403b4e4f4a1d1e191f201b1e1f1a0405000c0d0821221d2d2e295e5f5a 9b93889c94899d958a9e968b9d958a9b9388989085978f8490908e908b857b6f61a3937c a28c74927a62846e57806a55957a67897871c0b9b3c4c1b083796fb9adb7fdfbffd0ddd4 e4e6dba7a397aea594c5b4a29b8877a59385d1c3bab8ada7c1afa1b6a699a9998ca69990 bab0a7bdb4ada69f99a09993958b8a887e7d3f3534514746605655655b5a564c4b453b3a 1c211d1e1317b9a185d4bc9071574a6e584b8971578c69677a6c616c5f56665c53564d46 463f39423d398a8683807f7d1115141014133f4342afb3b24b4f4e202423272b2a090d0c 0b0f0e585c5b7e8281696d6c343837181c1b2529282125241713123531306a6665595554 5d5958656160575352302c2b4c4a58504e5c5755635f5d6b6664726b69776e6c7a706e7c 83818e807e8b7f7d8a817f8c85839084828f7e7c897876838c86928b85918a849088828e 86808c847e8a827c88817b877c76827c76827c76827c76827d77837d77837d77837d7783 817c827d787e7a757b7b767c7d787e7b767c746f756e696f796e767a6f777c71797e737b 7e737b7d727a7b7078796e767b70787a6f777a6f77796e76786d75776c74776c74766b73 7363646658585f53536f656489817f97928f928e8b84837f8d83819086849288868f8583 897f7d857b79877d7b8b817f8581828783848a86878e8a8b928e8f9692939995969a9697 9a9899a19d9ea8a2a4aaa1a4a79ca0a4989ca7989dab9aa0baa5aab5a3a3ae9d969b8f83 7a70645a51484a423f4b42436655437465527e6e5e817565665c504d453a60594f524b41 3e3f2d39372b564f495b504a564d3e4b46333032243a423737322e2b2622221e1b21201e 1f2022181c1f13181c13181c14181b2d31343b3f42111518161a1d090d100c10130f1316 3f4a4e3035393530363629302d1a203421253f2f303429273329272d2522221a182e2a29 2f2d3022212628292e26272c2120252524292221261a191e17161b1c1b201d1c211a191e 161a1d1d21241a1e211b1f222125281b1f2224282b44484b3630343c3f48383f473c3c3c 513d325c372565392c7b5049846962705d574f4a47343a3a2a36363942415557546b6764 6262604c4d4f3b40463d444e43475244434b483f444f4043775b577859567b55547b5152 90614d92634f9465519465519667539f705cad7e6ab8897543443f1b1c172a2b26383934 3435302f302b1b1c171e1f1a42433e48494440413c1d1e1923241f393a354e4f4a8e8f8a 9d958a9d958a9d958a9d958a9c94899b93889a92879991868a868598938d857c6d9b8c75 a7967c9d876f957c66876e5a967b687c6b64b6afa9cac7b6786e64b9adb7fffdffcbd8cf f9faf5cdc8c2a69c90bead9db9a697907e70a19388cac0b7d9c7b9c8b8abc3b3a6ada097 9e948b837a734d464037302a605655857b7a3f3534483e3d5f55546157566056556c6261 1d221e1e1317b8a084d5bd91745a4d725c4f8a72588a67657b6d626b5e5562584f635a53 362f293b3632aeaaa7686765030706111514515554b4b8b74549481e2221222625080c0b 101211464847787a795f61601c1e1d1c1e1d4143423a3c3b3f3a3e3530343e393d3d383c 413c404843475a55596a656984859a86879c8a8ba08d8ea38e8fa48e8fa48d8ea38d8ea3 878499827f947e7b907e7b90827f948380957f7c917b788d7a74807a74807b75817c7682 7d77837e78847e78847f7985827c88817b87807a867f79857e78847d77837c76827c7682 7d787e7b767c7b767c7d787e7f7a807c777d726d736a656b776c74786d757b70787d727a 7d727a7c71797b70787a6f77776c72776c72766b71756a7074696f74696f73686e73686e 6f5f605e5050564a4a6a605f8d8583a19c99999592888783928984968d88978e89948b86 8d847f89807b8b827d8e85808581828783848a86878e8a8b928e8f9692939995969a9697 9694959f9b9ca7a1a3aaa1a4a79ca0a5999da7989daa999fb5a0a5b09e9ea4938c8a7e72 6c6256595047574f4c5e55567968567d6e5b8171617367575b51453e362b5c554b4f483e 262f1c48423676635d6758515556485865514c57465357493e393538332f332f2c31302e 2f3032272b2e1e2327181d210a0e1135393c4e52550c1013090d100e1215121619111518 50686a37464931393c2e2c311a1418211b1d302c2b21201e39312e211c180f0b081d1c1a 2d2d2f292a2e202328171a1f2a292e22212618171c11101513121719181d1a191e18171c 15191c1a1e2115191c15191c1d21241c20232b2f324e5255816d6f665c6541414b3f3d42 5e4c486849446341406a495257433c594a455652514d525540494e374143363c3c383a37 78787657585a3a3f45383f494448534d4c54554c515d4e51835d54865b548b5854915755 end %%PageTrailer %%Trailer %%EOF ������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/doc/T2.tex������������������������������������������������������0000644�0001750�0001750�00000004561�11757531137�020032� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������\documentclass[11pt,letterpaper,headsepline,pagesize,letterpaper,DIV12,liststotoc]{scrartcl} \usepackage[T1]{fontenc} \usepackage{graphicx,verbatim,rotate,times,subfigure} \usepackage{longtable} \usepackage[latin1]{inputenc} % paragraph setup \setlength{\parindent}{0pt} \setlength{\parskip}{0.4\baselineskip plus1pt minus1pt} % not in this country: \frenchspacing % % page setup % \usepackage[automark]{scrpage2} \pagestyle{scrheadings} \renewcommand{\headfont}{\normalfont\sffamily} \renewcommand{\pnumfont}{\normalfont\sffamily} \clearscrheadfoot \ohead{\pagemark}\chead{}\ihead{\headmark} \ofoot{}\cfoot{}\ifoot{} \pagestyle{scrheadings} \thispagestyle{empty} %\ohead{\pagemark}\chead{}\ihead{\headmark}\cfoot{} % % I hate koma's page layout, so I fix it my way % %\setlength{\topmargin}{0pt} % headexclude document style is broken \setlength{\textheight}{1.08\textheight} % add n% more length \setlength{\textwidth}{1.05\textwidth} % add n% more width % % my verbatim stuff % \makeatletter \newlength{\myverbatimindent} \setlength{\myverbatimindent}{10mm} \renewcommand{\verbatim@processline}{% \leavevmode\hspace*{\myverbatimindent}\the\verbatim@line\par} \renewcommand{\verbatim@font}{\bf\ttfamily\small\baselineskip10pt} \makeatother % % personal shortcuts % \font\manual =manfnt scaled \magstep0 \def\dbend{{\manual\symbol{127}}} % \newenvironment{difficult}% {{\makebox[-50pt][r]{\raisebox{8pt}{\dbend}}\makebox[50pt][l]{}}% \small\leftskip5mm}% {\par\normalsize\leftskip0pt} % \def\bs{$\mathbf{\backslash}$} \def\dd{\\\hline} \def\at{\symbol{64}} \def\ul#1{\underline{#1}} \def\rref#1{\ref{#1} (page~\pageref{#1})} % % should be last % \usepackage{hyperref} % % \title{Transfer V2 (T2)\\\vspace*{15mm}\includegraphics[scale=0.5]{T2}} \date{06/10/2004} \author{Jens-S. Vckler} % \usepackage{hyperref} % \begin{document} \thispagestyle{empty} \maketitle \begin{center} \begin{tabular}{|l|l|p{80mm}|}\hline \textbf{Author} & \textbf{Date} & \textbf{Modification}\dd\hline Jens Vckler & 20040610 & initial document\dd \end{tabular} \end{center} \setlongtables \pagebreak \tableofcontents \vfill \begin{difficult} Difficult sections which are not important for the casual users are expressed with a dangerous bend sign in the margin. \end{difficult} % \pagebreak \section{Overview} \label{sec:overview} \end{document} %%% Local Variables: %%% mode: latex %%% TeX-master: t %%% End: �����������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/doc/Makefile����������������������������������������������������0000644�0001750�0001750�00000001005�11757531137�020451� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # make for LaTeX # LATEX = latex #PS2PDF = ps2pdf14 PS2PDF = ps2pdf -sPAPERSIZE=letter DVIPS = dvips PRTGIF = tgif -print RM = rm -f %.pdf : %.ps $(PS2PDF) $< $@ %.ps : %.dvi $(DVIPS) $< -o $@ %.eps : %.obj $(PRTGIF) -adobe=3.0/1.2 -eps $< all : T2.pdf #SRC = euryale.obj partition.obj pegasus.obj sphinx.obj #EPS = $(SRC:.obj=.eps) T2.pdf : T2.ps T2.ps : T2.dvi T2.dvi : T2.tex $(EPS) $(LATEX) $< $(LATEX) $< clean: $(RM) T2.aux T2.log T2.out T2.toc T2.ps distclean: clean $(RM) $(EPS) T2.pdf T2.dvi ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/T2/mypopen.c�������������������������������������������������������0000644�0001750�0001750�00000006475�11757531137�020117� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#include <stdlib.h> #include <unistd.h> #include <stdio.h> #include <string.h> #include <memory.h> #include <errno.h> #include <sys/wait.h> #include "mypopen.h" static const char* RCS_Id = "$Id: mypopen.c 50 2007-05-19 00:48:32Z gmehta $"; PipeCmd* mypopen( const char* tag, char* argv[], char* envp[] ) /* purpose: fork off a commend and capture its stderr and stdout. * warning: does not use /bin/sh -c internally. * paramtr: tag (IN): some short tag to name the app * argv (IN): the true argv[] vector for execve * envp (IN): the true envp[] vector for execve * returns: a structure which contains information about the child process. * it will return NULL on failure. */ { pid_t child; int pfds[2]; PipeCmd* result = NULL; /* create communication with subprocess */ if ( pipe(pfds) == -1 ) { fprintf( stderr, "Error: %s create pipe: %s\n", tag, strerror(errno) ); return result; } /* prepare for fork */ fflush( stdout ); fflush( stderr ); /* popen(): spawn child process to execute grid-proxy-info */ if ( (child=fork()) == (pid_t) -1 ) { /* unable to fork */ fprintf( stderr, "Error: %s fork: %s\n", tag, strerror(errno) ); return result; } else if ( child == 0 ) { /* child - redirect stdout and stderr onto communication channel */ close(pfds[0]); if ( dup2( pfds[1], STDOUT_FILENO ) == -1 ) _exit(126); if ( dup2( pfds[1], STDERR_FILENO ) == -1 ) _exit(126); close(pfds[1]); execve( argv[0], argv, envp ); _exit(127); /* if you reach this, exec failed */ } /* parent */ close(pfds[1]); /* prepare result */ if ( (result = (PipeCmd*) malloc( sizeof(PipeCmd) )) != NULL ) { result->child = child; result->readfd = pfds[0]; } return result; } int mypclose( PipeCmd* po ) /* purpose: free the data structure and all associated resources. * paramtr: po (IO): is a valid pipe open structure. * returns: process exit status, or -1 for invalid po structure. */ { int status = -1; /* sanity check */ if ( po != NULL ) { /* close fd early to send SIGPIPE */ close(po->readfd); /* wait for child */ while ( waitpid( po->child, &status, 0 ) == -1 ) { if ( errno == EINTR || errno == EAGAIN ) continue; fprintf( stderr, "Error: waiting for child %d: %s\n", po->child, strerror(errno) ); status = -1; } /* done with memory piece */ free( (void*) po ); } return status; } int pipe_out_cmd( const char* tag, char* argv[], char* envp[], char* buffer, size_t blen ) /* purpose: fork off a commend and capture its stderr and stdout * paramtr: name (IN): some short tag to name the app * argv (IN): the true argv[] vector for execve * envp (IN): the true envp[] vector for execve * buffer (OUT): area to store output into. Will be cleared * blen (IN): length of the area that is usable to us. * returns: -1 for regular failure, exit code from application otherwise */ { ssize_t rsize, wsize = 0; PipeCmd* cmd = mypopen( tag, argv, envp ); /* prepare */ if ( cmd == NULL ) return -1; else memset( buffer, 0, blen ); /* read result(s) */ while ( (rsize=read( cmd->readfd, buffer+wsize, blen-wsize )) > 0 && wsize < blen ) { wsize += rsize; } /* done with it */ return mypclose(cmd); } ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/replanner/���������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�017761� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/replanner/Makefile�������������������������������������������������0000644�0001750�0001750�00000000765�11757531137�021421� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������# # Makefile # INSTALL = install NROFF = groff -mandoc TEXT = -Tlatin1 HTML = -Thtml GENDOC = replanner.html replanner.txt replanner.ps %.html : %.1 ; $(NROFF) $(HTML) $< > $@ %.ps : %.1 ; $(NROFF) $< > $@ %.txt : %.1 ; $(NROFF) $(TEXT) $< > $@ install: $(GENDOC) $(INSTALL) -m 0755 replanner $(PEGASUS_HOME)/bin $(INSTALL) -m 0644 $(GENDOC) $(PEGASUS_HOME)/man $(INSTALL) -m 0644 replanner.1 $(PEGASUS_HOME)/man/man1 doc: $(GENDOC) clean: $(RM) $(GENDOC) core distclean: clean �����������pegasus-wms_4.0.1+dfsg/src/tools/replanner/replanner������������������������������������������������0000755�0001750�0001750�00000011770�11757531137�021673� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������#!/usr/bin/env perl # # replanner - use cplan output, and change around the .sub files # require 5.005; use strict; use Getopt::Std; use File::Spec; use File::Basename; if ( @ARGV == 0 || $ARGV[0] eq '-?' || $ARGV[0] eq '--help' ) { print "Usage: $0 [-p pool.config] -d plandir\n"; exit 0; } my %opts; getopt( 'd:p:', \%opts ); unless ( exists $opts{d} ) { print "Error!\n"; print "I must know about a directory where the .sub files reside.\n"; exit(1); } unless ( exists $opts{p} ) { if ( length($ENV{'PEGASUS_HOME'}) > 0 ) { $opts{p} = File::Spec->catfile($ENV{'PEGASUS_HOME'},'etc','pool.config'); } else { print "Error!\n"; print "I must know about a position of a valid pool.config file.\n"; exit(1); } } my @dagfn = glob("$opts{d}/*.dag"); if ( @dagfn > 1 ) { print "Error!\n"; print "There are multiple .dag files in the $opts{d} directory.\n"; exit 1; } elsif ( @dagfn < 1 ) { print "Error!\n"; print "There is not .dag file in the $opts{d} directory.\n"; exit 1; } my $dagfn = basename($dagfn[0],'.dag'); print STDERR "# basename of DAG file is \"$dagfn\"\n"; # slurp pool config my $comment = '#.*$'; # ' my (%pool,%jm); if ( open( IN, "<$opts{p}" ) ) { print STDERR "# reading pool.config file $opts{p}\n"; while (<IN>) { s/[\r\n]*$//; s/$comment//; next unless length($_) > 1; my @column = split(/\s+/,$_,6); $pool{$column[0]}{lc($column[1])} = [ @column[2..5] ]; $jm{lc($column[2])}{lc($column[1])} = $column[0]; } close(IN); } else { die "ropen $opts{p}: $!\n"; } foreach my $fn ( glob("$opts{d}/*.sub") ) { if ( open( IN, "<$fn" ) ) { print STDERR "<<< processing $fn\n"; # guess at grid id of job my $outfn = substr($fn,0,rindex($fn,'.')); $outfn = basename($outfn) unless ( substr($outfn,0,1) eq '/' ); my $gridid = "null"; $gridid = $1 if ( $outfn =~ /_(ID\d+)/ ); my $errfn = $outfn . '.err'; $outfn .= '.out'; # input my (@lines,%lines,$save,$flag,$universe,$handle); while ( <IN> ) { s/[\r\n]*$//; # weed out special errors next if ( /^\s*Error\s*=\s*(.*)/i && index($1,$dagfn) >= 0 ); push( @lines, $_ ); s/$comment//; next unless length($_) > 1; $lines{lc($1)}=$2 if /\s*(\S+)\s*=\s*(.*)/; } close(IN); # obtain universe $universe = 'vanilla'; $universe = lc($lines{universe}) if exists $lines{universe}; print STDERR "## initial universe = $universe\n" if $main::DEBUG; # guess at pool handle if ( $universe eq 'globus' ) { $_ = $lines{globusrsl}; $universe = ( /jobType=condor/i ) ? 'standard' : 'vanilla'; $_ = $lines{executable}; $universe = 'transfer' if ( /gsincftp/i && $universe eq 'vanilla' ); my $jm = lc($lines{globusscheduler}); $handle = $jm{$jm}{$universe}; if ( length($handle) == 0 ) { if ( exists $jm{$jm}{globus} ) { $universe = 'globus'; $handle = $jm{$jm}{$universe}; } else { warn "unable to determine pool handle\n"; next; } } } else { $handle = 'local'; } # obtain kickstart location (and opt. args) from pool file my ($kickstart,$remainder) = split(/\s+/,$pool{$handle}{$universe}[3],2); if ( length($kickstart) == 0 || lc($kickstart) eq 'null' ) { print STDERR "gridstart empty or not found for ($handle,$universe), skipping $fn\n"; next; } print STDERR "# $handle:$universe => $kickstart $remainder\n"; # change stdio files $remainder .= " -i $lines{input}" if ( exists $lines{input} ); $remainder .= " -o $lines{output}" if ( exists $lines{output} && index($lines{output},$dagfn) == -1 ); $remainder .= " -e $lines{error}" if ( exists $lines{error} && index($lines{output},$dagfn) == -1 ); $remainder .= " -n $gridid" if ( length($gridid) && $gridid ne 'null' ); # real processing for ( my $i=0; $i < @lines; ++$i ) { if ( $lines[$i] =~ /^Executable/i ) { $save = $lines{executable}; $lines[$i] = "Executable\t= $kickstart"; $flag |= 1; } elsif ( $lines[$i] =~ /^Arguments/i ) { $lines[$i] = "Arguments\t=$remainder $save $lines{arguments}"; $flag |= 2; } elsif ( $lines[$i] =~ /^Input/i ) { $lines[$i] = "Input \t= /dev/null"; } elsif ( $lines[$i] =~ /^Output/i ) { $lines[$i] = "Output\t= $outfn"; $flag |= 4; } elsif ( $lines[$i] =~ /^Error/i ) { $lines[$i] = "Error \t= $errfn"; $flag |= 8; } } # post-processing (cmd w/o arguments) if ( ($flag & 2) == 0 ) { splice(@lines,1,0,"Arguments\t=$remainder $save"); $flag |= 2; } # we need to capture the output - always if ( ($flag & 4) == 0 ) { splice(@lines,1,0,"Output\t= $outfn"); $flag |= 4; } # we should add the error capture just in case if ( ($flag & 8) == 0 ) { splice(@lines,1,0,"Error \t= $errfn"); $flag |= 8; } # output if ( ($flag & 7) == 7 ) { rename($fn,"$fn.bak"); if ( open( OUT, ">$fn" ) ) { print OUT join("\n",@lines); close(OUT); } else { warn "wopen $fn: $!\n"; } } else { warn "incomplete submit file $fn!\n"; } print STDERR ">>> done with $fn\n"; } else { warn "ropen $fn: $!\n"; } } ��������pegasus-wms_4.0.1+dfsg/src/tools/replanner/replanner.1����������������������������������������������0000644�0001750�0001750�00000023233�11757531137�022024� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������.\"Globus Toolkit Public License (GTPL) .\" .\"Copyright (c) 1999 University of Chicago and The University of .\"Southern California. All Rights Reserved. .\" .\" 1) The "Software", below, refers to the Globus Toolkit (in either .\" source-code, or binary form and accompanying documentation) and a .\" "work based on the Software" means a work based on either the .\" Software, on part of the Software, or on any derivative work of .\" the Software under copyright law: that is, a work containing all .\" or a portion of the Software either verbatim or with .\" modifications. Each licensee is addressed as "you" or "Licensee." .\" .\" 2) The University of Southern California and the University of .\" Chicago as Operator of Argonne National Laboratory are copyright .\" holders in the Software. The copyright holders and their third .\" party licensors hereby grant Licensee a royalty-free nonexclusive .\" license, subject to the limitations stated herein and .\" U.S. Government license rights. .\" .\" 3) A copy or copies of the Software may be given to others, if you .\" meet the following conditions: .\" .\" a) Copies in source code must include the copyright notice and .\" this license. .\" .\" b) Copies in binary form must include the copyright notice and .\" this license in the documentation and/or other materials .\" provided with the copy. .\" .\" 4) All advertising materials, journal articles and documentation .\" mentioning features derived from or use of the Software must .\" display the following acknowledgement: .\" .\" "This product includes software developed by and/or derived from .\" the Globus project (http://www.globus.org/)." .\" .\" In the event that the product being advertised includes an intact .\" Globus distribution (with copyright and license included) then .\" this clause is waived. .\" .\" 5) You are encouraged to package modifications to the Software .\" separately, as patches to the Software. .\" .\" 6) You may make modifications to the Software, however, if you .\" modify a copy or copies of the Software or any portion of it, .\" thus forming a work based on the Software, and give a copy or .\" copies of such work to others, either in source code or binary .\" form, you must meet the following conditions: .\" .\" a) The Software must carry prominent notices stating that you .\" changed specified portions of the Software. .\" .\" b) The Software must display the following acknowledgement: .\" .\" "This product includes software developed by and/or derived .\" from the Globus Project (http://www.globus.org/) to which the .\" U.S. Government retains certain rights." .\" .\" 7) You may incorporate the Software or a modified version of the .\" Software into a commercial product, if you meet the following .\" conditions: .\" .\" a) The commercial product or accompanying documentation must .\" display the following acknowledgment: .\" .\" "This product includes software developed by and/or derived .\" from the Globus Project (http://www.globus.org/) to which the .\" U.S. Government retains a paid-up, nonexclusive, irrevocable .\" worldwide license to reproduce, prepare derivative works, and .\" perform publicly and display publicly." .\" .\" b) The user of the commercial product must be given the following .\" notice: .\" .\" "[Commercial product] was prepared, in part, as an account of .\" work sponsored by an agency of the United States Government. .\" Neither the United States, nor the University of Chicago, nor .\" University of Southern California, nor any contributors to .\" the Globus Project or Globus Toolkit nor any of their employees, .\" makes any warranty express or implied, or assumes any legal .\" liability or responsibility for the accuracy, completeness, or .\" usefulness of any information, apparatus, product, or process .\" disclosed, or represents that its use would not infringe .\" privately owned rights. .\" .\" IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO .\" OR THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS .\" TO THE GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY .\" DAMAGES, INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL .\" DAMAGES RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR .\" THE USE OF THE [COMMERCIAL PRODUCT]." .\" .\" 8) LICENSEE AGREES THAT THE EXPORT OF GOODS AND/OR TECHNICAL DATA .\" FROM THE UNITED STATES MAY REQUIRE SOME FORM OF EXPORT CONTROL .\" LICENSE FROM THE U.S. GOVERNMENT AND THAT FAILURE TO OBTAIN SUCH .\" EXPORT CONTROL LICENSE MAY RESULT IN CRIMINAL LIABILITY UNDER U.S. .\" LAWS. .\" .\" 9) Portions of the Software resulted from work developed under a .\" U.S. Government contract and are subject to the following license: .\" the Government is granted for itself and others acting on its .\" behalf a paid-up, nonexclusive, irrevocable worldwide license in .\" this computer software to reproduce, prepare derivative works, and .\" perform publicly and display publicly. .\" .\"10) The Software was prepared, in part, as an account of work .\" sponsored by an agency of the United States Government. Neither .\" the United States, nor the University of Chicago, nor The .\" University of Southern California, nor any contributors to the .\" Globus Project or Globus Toolkit, nor any of their employees, .\" makes any warranty express or implied, or assumes any legal .\" liability or responsibility for the accuracy, completeness, or .\" usefulness of any information, apparatus, product, or process .\" disclosed, or represents that its use would not infringe privately .\" owned rights. .\" .\"11) IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO OR .\" THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS TO THE .\" GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY DAMAGES, .\" INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES .\" RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR THE USE OF .\" THE SOFTWARE. .\" .\" END OF LICENSE .\" .\" $Id: replanner.1 50 2007-05-19 00:48:32Z gmehta $ .\" .\" Authors: Jens-S. Vckler, Yong Zhao .\" .\" .TH replanner 1 "September, 12th 2002" .SH NAME replanner \- change cplan for stdio handling and insert kickstart .SH SYNOPSIS .B replanner [\-p\~poolfn] \-d\~dir .SH DESCRIPTION The .B replanner executable is a simple perl script that takes a pool.config file into account, and replans the Condor submit files as generated by the concrete planner. In the replan process, corrects the capture of any .IR stdin , .IR stdout and .I stderr that might occur in the submit file to connect to the correct remote pool file (local to where the job runs). .PP The addition benefit of kickstart is that it transfers job performance data back to the submit site. From this performance data, the invocation records in the VDDB are generated. .SH ARGUMENTS .TP .B \-p poolfn allows to overwrite the standard location of the .I pool.config file. .IP The default is to use the .I $PEGASUS_HOME/etc/pool.config file. .TP .B \-d dir refers to the directory in which the concrete planner generated its products. Being a mandatory parameter, there are no defaults. .SH "RETURN VALUE" .B replanner will exit with an error, if it cannot find the pool file, it finds more than one DAGMan control file in the specified directory or the mandatory parameter \-d is missing. .PP Error opening or writing files are warned about, but don't abort execution. .SH "SEE ALSO" .BR gencdag (1), .BR kickstart (1). .SH EXAMPLE Most prominently the enhancements of the .B replanner can be seen on the changes for an application submit file. Originally, this application submit file called .I /bin/echo and wanted to capture the .I stdout into a file for later perusal. Being in the .I globus universe, the actual output is transferred via GASS to the submit site, and not kept on the remote host: .nf \f(CB universe = globus globusscheduler = chalant.mcs.anl.gov/jobmanager-condor globusrsl = (directory=/homes/voeckler/vdldemo) executable = /bin/echo arguments = Hello World output = data.out error = world_ID000001.err log = hello-world-0.log notification = never transfer_executable = false copy_to_spool = false queue \fP .fi Running the replanner, it replaces the application .I /bin/echo with the .B kickstart application. The original application and its arguments become an argument the .B kickstart gridstart program. Additional parameters capture the .I stdout and .I stderr stream of the application on the remote pool in the appropriate files. Additionally, .B kickstart reports on the GASS transferred .I stdout the performance data from running the application, see its manual page. .nf \f(CB universe = globus globusscheduler = chalant.mcs.anl.gov/jobmanager-condor globusrsl = (directory=/homes/voeckler/vdldemo) Executable = /homes/voeckler/bin/kickstart Arguments = -o data.out -e world_ID000001.err -n ID000001 /bin/echo Hello World Output = world_ID000001.out Error = world_ID000001.err log = hello-world-0.log notification = never transfer_executable = false copy_to_spool = false queue \fP .fi .SH RESTRICTIONS Only those submit files will be replanned for which the gridstart column in the .I pool.config file is not null and not empty. .SH AUTHORS Jens-S. Vckler <voeckler@cs.uchicago.edu>, .\"Jens-S. V\*:ockler .br Mike Wilde <wilde@mcs.anl.gov>, .br Yong Zhao <yongzh@cs.uchicago.edu>. .PP Chimera .B http://www.griphyn.org/chimera/ .br GriPhyN .BR http://www.griphyn.org/ ���������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/k.2/���������������������������������������������������������������0000755�0001750�0001750�00000000000�11757531667�016365� 5����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/k.2/shared.hh������������������������������������������������������0000644�0001750�0001750�00000001471�11757531137�020147� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _SHARED_HH #define _SHARED_HH #include <string> #include <set> #include <list> typedef std::set<std::string> StringSet; typedef std::list<std::string> StringList; #endif // _SHARED_HH �������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/k.2/xml.hh���������������������������������������������������������0000644�0001750�0001750�00000012102�11757531137�017472� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _CHIMERA_XML_HH #define _CHIMERA_XML_HH #include <string> #include <iostream> #include <iomanip> // for convenience class XML { // interface to enable recursive printing of XML data public: static std::string quote( const std::string& original, bool isAttribute = false ); // purpose: Escapes certain characters inappropriate for XML output. // paramtr: original (IN): is a string that needs to be quoted // isAttribute (IN): denotes an attributes value, if set to true. // If false, it denotes regular XML content outside of attributes. // returns: a string that is "safe" to print as XML. static std::string indent( int width ); // purpose: create a string with indentation // paramtr: width (IN): if >0, generate that many spaces // returns: a string either empty, or with the wanted number of spaces. static std::string printf( const char* fmt, ... ); // purpose: format a string into a buffer. // paramtr: fmt (IN): printf compatible format // ... (IN): parameters to format // returns: a string with the formatted information // // --- string based -------------------------------------------------- // static inline std::string startElement( const std::string& tag, int indent = 0, const char* nspace = 0 ) // purpose: starts an XML element without closing the angular bracket // paramtr: tag (IN): name of the tag to use // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: a string containing the opening tag { std::string result( XML::indent(indent) ); // result.reserve( tag.size() + 3 + ( nspace ? strlen(nspace) : 0) ); result += '<'; if ( nspace ) result.append(nspace).append(":"); return result.append(tag); } static inline std::string finalElement( const std::string& tag, int indent = 0, const char* nspace = 0, bool crlf = true ) // purpose: creates an XML closing element with the angular bracket CRLF // paramtr: tag (IN): name of the tag to use // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: a string containing the opening tag { std::string result( XML::indent(indent) ); result += "</"; if ( nspace ) result.append(nspace).append(":"); result += tag + ">"; if ( crlf ) result += "\r\n"; return result; } virtual std::string toXML( int indent = 0, const char* nspace = 0 ) const = 0; // purpose: XML format a record. // paramtr: indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: string containing the element data // // --- stream based -------------------------------------------------- // inline static std::ostream& startElement( std::ostream& s, const std::string& tag, int indent = 0, const char* nspace = 0 ) // purpose: starts an XML element without closing the angular bracket // paramtr: s (IO): stream to put the element on // tag (IN): name of the tag to use // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { s << XML::indent(indent) << '<'; if ( nspace ) s << nspace << ':'; return s << tag; } static inline std::ostream& finalElement( std::ostream& s, const std::string& tag, int indent = 0, const char* nspace = 0, bool crlf = true ) // purpose: creates an XML closing element with the angular bracket CRLF // paramtr: s (IO): stream to put the element on // tag (IN): name of the tag to use // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { s << XML::indent(indent) << "</"; if ( nspace ) s << nspace << ':'; s << tag << '>'; if ( crlf ) s << "\r\n"; return s; } virtual std::ostream& toXML( std::ostream& s, int indent = 0, const char* nspace = 0 ) const = 0; // purpose: XML format a record onto a given stream // paramtr: s (IO): stream to put information into // indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: s }; #endif // _CHIMERA_XML_HH ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/k.2/quote.hh�������������������������������������������������������0000644�0001750�0001750�00000007066�11757531137�020044� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _CHIMERA_QUOTE_HH #define _CHIMERA_QUOTE_HH #include "shared.hh" namespace Quote { // deal with unquoting one level of quotes, variable interpolation // and backslash escapes. enum Action { Noop, // nothing to do Sb, // store char into buffer Xb, // like Sb, but interprete \n and friends Fb, // finalize buffer (call context sensitive) Sv, // store char into varname buffer Fvpb, // put back char, and resolve varname buffer Fv, // resolve content of varname buffer SDpb, // store dollar and push back input SD, // store dollar A_MAX }; enum State { NQ_LWS, // start state for CLI-argv splitting parser, skips lws NQ_MAIN, // main state for filling one argv NQ_BS, // backslash processing state for "unquoted" strings NQ_DLLR, // dollar recognized in "unquoted" strings NQ_VAR1, // collecting $var varname NQ_VAR2, // collecting ${var} varname DQ_MAIN, // main state for double quoted strings, fills buffer DQ_BS, // backslash processing state DQ_DLLR, // dollar recognized state DQ_VAR1, // collecting $var varname DQ_VAR2, // collecting ${var} varname SQ_MAIN, // main state for single quoted strings, fills buffer SQ_BS, // backslash processing state FINAL, // good final state: done parsing ERR1, // bad final state: premature end of string ERR2, // bad final state: missing apostrophe ERR3, // bad final state: missing quote ERR4, // bad final state: illegal character in varname S_MAX }; enum CharClass { EOS, // end of string, NUL character QUOTE, // double quote character APOS, // single quote character DOLLAR, // dollar sign LBRACE, // left brace, opening RBRACE, // right brace, opening BSLASH, // backslash character ALNUM, // legal identifier character LWS, // any whitespace ELSE, // any other character C_MAX }; extern CharClass xlate( int ch ); // purpose: translates an input character into its character class // paramtr: ch (IN): input character // returns: the character class extern State parse( const std::string& input, std::string& output, State state ); // purpose: parse a single or doubly-quoted string into a single string // paramtr: input (IN): The raw string without outer quotes // output (OUT): The cooked string, one level removed // state (IN): start start: 0 -> squotes, 2 -> dquotes // returns: the final state after being done extern State parse( const std::string& input, StringList& output, State state ); // purpose: parse a single or doubly-quoted string into an argv[] list // paramtr: input (IN): The raw string without outer quotes // output (OUT): The cooked ws-split argv, one level removed // state (IN): start start: 0 -> squotes, 2 -> dquotes // returns: the final state after being done } #endif // _CHIMERA_QUOTE_HH ��������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������pegasus-wms_4.0.1+dfsg/src/tools/k.2/license.html���������������������������������������������������0000644�0001750�0001750�00000034521�11757531137�020672� 0����������������������������������������������������������������������������������������������������ustar �rynge���������������������������rynge������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������������ <html> <head> <title>Globus Toolkit Public License (GTPL)
FAQ
Site Index
Contact Us

About
Support
Download
Documentation
Advisories
CoG Kits
Bugzilla
License

Toolkit
Components
Core
Security
Data
Management
Resource
Management
Information
Services
XIO

Activities
News & Archives
Meetings

Site Search:

Commitment to Open Source

The Globus Alliance is committed to maintaining a liberal, open source license. The Globus Toolkit Public License (GTPL) allows software to be used by anyone and for any purpose, without restriction. We believe that this is the best way to ensure that Grid technologies gain wide spread acceptance and benefit from a large developer community.

Globus Toolkit Public License (GTPL) Version 2

Globus Toolkit Public License
Version 2, July 31, 2003

Copyright 1999-2003 University of Chicago and The University of Southern
California.  All rights reserved.

This software referred to as the Globus Toolkit software
("Software") includes voluntary contributions made to the Globus
Project collaboration.  Persons and entities that have made voluntary
contributions are hereinafter referred to as "Contributors." This Globus
Toolkit Public License is referred to herein as "the GTPL."  For more
information on the Globus Project, please see http://www.globus.org/.

Permission is granted for the installation, use, reproduction,
modification, display, performance and redistribution of this Software,
with or without modification, in source and binary forms.  Permission is
granted for the installation, use, reproduction, modification, display,
performance and redistribution of user files, manuals, and training and
demonstration slides ("Documentation") distributed with or specifically
designated as distributed under the GTPL.  Any exercise of rights under
the GTPL is subject to the following conditions:

1.  Redistributions of this Software, with or without modification,
    must reproduce the GTPL in: (1) the Software, or (2) the Documentation
    or some other similar material which is provided with the Software
    (if any).

2.  The Documentation, alone or if included with a redistribution of
    the Software, must include the following notice: "This
    product includes material developed by the Globus Project
    (http://www.globus.org/)."

    Alternatively, if that is where third-party acknowledgments normally
    appear, this acknowledgment must be reproduced in the Software itself.

3.  Globus Toolkit and Globus Project are trademarks of the
    University of Chicago.  Any trademarks of the University of
    Chicago or the University of Southern California may not be used
    to endorse or promote software, or products derived therefrom, and
    except as expressly provided herein may not be affixed to modified
    redistributions of this Software or Documentation except with prior
    written approval, obtainable at the discretion of the trademark
    owner from info@globus.org.

4.  To the extent that patent claims licensable by the University of
    Southern California and/or by the University of Chicago (as Operator
    of Argonne National Laboratory) are necessarily infringed by the
    use or sale of the Software, you and your transferees are granted
    a non-exclusive, worldwide, royalty-free license under such patent
    claims, with the rights to make, use, sell, offer to sell, import and
    otherwise transfer the Software in source code and object code form.
    This patent license shall not apply to Documentation or to any other
    software combinations which include the Software.  No hardware per
    se is licensed hereunder.

    If you or any subsequent transferee (a "Recipient") institutes patent
    litigation against any entity (including a cross-claim or counterclaim
    in a lawsuit) alleging that the Software infringes such Recipient's
    patent(s), then such Recipient's rights granted under the patent
    license above shall terminate as of the date such litigation is filed.

5.  DISCLAIMER 

    SOFTWARE AND DOCUMENTATION ARE PROVIDED BY THE COPYRIGHT HOLDERS AND
    CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
    BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY,
    OF SATISFACTORY QUALITY, AND FITNESS FOR A PARTICULAR PURPOSE OR
    USE ARE DISCLAIMED.  THE COPYRIGHT HOLDERS AND CONTRIBUTORS MAKE
    NO REPRESENTATION THAT THE SOFTWARE, DOCUMENTATION, MODIFICATIONS,
    ENHANCEMENTS OR DERIVATIVE WORKS THEREOF, WILL NOT INFRINGE ANY
    PATENT, COPYRIGHT, TRADEMARK, TRADE SECRET OR OTHER PROPRIETARY RIGHT.

6.  LIMITATION OF LIABILITY

    THE COPYRIGHT HOLDERS AND CONTRIBUTORS SHALL HAVE NO LIABILITY TO
    LICENSEE OR OTHER PERSONS FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL,
    CONSEQUENTIAL, EXEMPLARY, OR PUNITIVE DAMAGES OF ANY CHARACTER
    INCLUDING, WITHOUT LIMITATION, PROCUREMENT OF SUBSTITUTE GOODS OR
    SERVICES, LOSS OF USE, DATA OR PROFITS, OR BUSINESS INTERRUPTION,
    HOWEVER CAUSED AND ON ANY THEORY OF CONTRACT, WARRANTY, TORT
    (INCLUDING NEGLIGENCE), PRODUCT LIABILITY OR OTHERWISE, ARISING IN
    ANY WAY OUT OF THE USE OF THIS SOFTWARE OR DOCUMENTATION, EVEN IF
    ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.

7.  The Globus Project may publish revised and/or new versions of
    the GTPL from time to time.  Each version will be given a
    distinguishing version number.  Once Software or Documentation
    has been published under a particular version of the GTPL, you may
    always continue to use it under the terms of that version. You may
    also choose to use such Software or Documentation under the terms of
    any subsequent version of the GTPL published by the Globus Project.
    No one other than the Globus Project has the right to modify the
    terms of the GTPL.

Globus Toolkit Public License 7-31-03
pegasus-wms_4.0.1+dfsg/src/tools/k.2/uname.cc0000644000175000017500000001020711757531137017771 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "uname.hh" static const char* RCS_ID = "$Id: uname.cc 50 2007-05-19 00:48:32Z gmehta $"; #if 0 static size_t mystrlen( const char* s ) { size_t i=0; while ( i < SYS_NMLN && s[i] ) ++i; return i; } #endif static char* mytolower( char* s ) { // array version for ( size_t i=0; i < SYS_NMLN && s[i]; ++i ) s[i] = tolower(s[i]); return s; } static std::string convert( const char* s ) { size_t i=0; while ( i < SYS_NMLN && s[i] ) ++i; return std::string( s, i ); } Uname::Uname() { // find out where we run at (might stall for some time on DNS probs?) if ( uname( &m_uname ) == -1 ) { memset( &m_uname, 0, sizeof(m_uname) ); } else { // downcase most things mytolower( m_uname.sysname ); mytolower( m_uname.nodename ); mytolower( m_uname.machine ); #ifdef _GNU_SOURCE mytolower( m_uname.domainname ); #endif } // now for the messy part, which needs adjustments for each and every // operating system architecture we run this on #if defined(AIX) strncpy( m_archmode, "IBM", SYS_NMLN ); #elif defined(SUNOS) #if defined(_LP64) strncpy( m_archmode, "LP64", SYS_NMLN ); #elif defined(_ILP32) strncpy( m_archmode, "ILP32", SYS_NMLN ); #else strncpy( m_archmode, "unknown SUN", SYS_NMLN ); #endif // SunOS architecture #elif defined(LINUX) && #machine(i386) switch ( sizeof(int) ) { case 4: strncpy( m_archmode, "IA32", SYS_NMLN ); break; case 8: strncpy( m_archmode, "IA64", SYS_NMLN ); break; default: strncpy( m_archmode, "unknown LINUX", SYS_NMLN ); } #else // LINUX architecture strncpy( m_archmode, "unknown", SYS_NMLN ); #endif // SUNOS } Uname::~Uname() { // empty } std::ostream& Uname::toXML( std::ostream& s, int indent, const char* nspace ) const // purpose: XML format a rusage info record onto a given stream // paramtr: s (IO): stream to put information into // indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: s { // start s << XML::startElement( s, "uname", indent, nspace ); // attributes s << " system=\"" << convert( m_uname.sysname ) << '"'; if ( *m_archmode ) s << " archmode=\"" << convert( m_archmode ) << '"'; s << " nodename=\"" << convert( m_uname.nodename ) << '"'; s << " release=\"" << convert( m_uname.release ) << '"'; s << " machine=\"" << convert( m_uname.machine ) << '"'; #ifdef _GNU_SOURCE if ( *m_uname.domainname ) s << " domainname=\"" << convert( m_uname.domainname ) << '"'; #endif s << '>' << convert( m_uname.version ); s << XML::finalElement( "uname", 0, nspace ); return s; } std::string Uname::toXML( int indent, const char* nspace ) const // purpose: XML format a uname record. // paramtr: indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: string containing the element data { // start std::string result( XML::startElement( "uname", indent, nspace ) ); // attributes result += " system=\"" + convert( m_uname.sysname ); if ( *m_archmode ) result += "\" archmode=\"" + convert( m_archmode ); result += "\" nodename=\"" + convert( m_uname.nodename ); result += "\" release=\"" + convert( m_uname.release ); result += "\" machine=\"" + convert( m_uname.machine ); #ifdef _GNU_SOURCE if ( *m_uname.domainname ) result += "\" domainname=\"" + convert( m_uname.domainname ); #endif result += "\">" + convert( m_uname.version ); result += XML::finalElement( "uname", 0, nspace ); return result; } pegasus-wms_4.0.1+dfsg/src/tools/k.2/k.2.10000644000175000017500000002544211757531137017040 0ustar ryngerynge.\" .\" This file or a portion of this file is licensed under the terms of .\" the Globus Toolkit Public License, found in file GTPL, or at .\" http://www.globus.org/toolkit/download/license.html. This notice must .\" appear in redistributions of this file, with or without modification. .\" .\" Redistributions of this Software, with or without modification, must .\" reproduce the GTPL in: (1) the Software, or (2) the Documentation or .\" some other similar material which is provided with the Software (if .\" any). .\" .\" Copyright 1999-2004 University of Chicago and The University of .\" Southern California. All rights reserved. .\" .\" $Id: k.2.1 50 2007-05-19 00:48:32Z gmehta $ .\" .\" Authors: Jens-S. Vckler, Yong Zhao .\" .TH k.2 1 " March, 17th 2004 " " .SH NAME k.2 \- run an executable while recording invocation data .SH SYNOPSIS .B k.2 [cfgfile] .SH DESCRIPTION The .B k.2 executable is a light-weight program which connects the .I stdio filehandles for Chimera jobs on the remote site. In addition, it records the invocation data from the runtime environment. It can be extensively configured using a configuration file, which is usually passied via .I stdin . .PP Sitting in between the remote scheduler and the executable, it is possible for .B k.2 to gather additional information about the executable run-time behavior, including the exit status of jobs. This information is important for Chimera invocation tracking as well as to Condor DAGMan's awareness of Globus job failures. .PP .B K.2 permits the optional execution of jobs before and after the main application job that run in chained execution with the main application job. In addition, it permits for any number of clean-up jobs, which run independent of any exit code. .PP All jobs with relative path specifications to the application are part of search relative to the current working directory (yes, this is unsafe), and by prepending each component from the .I PATH environment variable. The first match is used. Jobs that use absolute pathnames, starting in a slash, are exempt. .PP .B K.2 rewrites the commandline of any job (pre, post and main) with variable substitutions from Unix environment variables. Please refer to the external documentation for details. .PP .B K.2 provides a temporary named pipe (fifo) for applications that are gridstart aware. Any data an application writes to the FIFO will be propagated back to the submit host, thus enabling progress meters and other application dependent monitoring. Please refer to the external documentation for details. .PP Last but not least, .B k.2 provides a exponential back-off heart-beat to verify that an application is indeed running. However, due to the best-effort transportation through GRAM protocols, the heart beats may not be streamed back. Please refer to the external documentation for details. .SH ARGUMENTS .TP .B cfgfile The explicit configuration file .I cfgfile is optional to overide default behavior. The configuration file in depth configures the run-time behavior of k.2. Furthermore, it may attempt to include further configuration files, which are site-specific. .IP By default, k.2 reads the configuration file from .IR stdin . It is expected that Globus, GRAM and the scheduling system work together to connect a staged configuration file to the .I stdin filedescriptor of .B k.2 . .SH "RETURN VALUE" .B k.2 will return the return value of the last job it ran. Since prejobs, main job and post jobs are chained, it can be any of these. In addition, the error code 127 signals that a call to exec failed, and 126 that reconnecting the stdio failed. A job failing with the same exit codes of 126 and 127 is indistinguishable from .B k.2 failures. .SH "SEE ALSO" .BR condor_submit_dag (1), .BR condor_submit (1), .BR getrusage (3c), .BR gencdag (1). .PP .BR http://www.griphyn.org/workspace/VDS/iv-1.3/iv-1.3.html , .br .BR http://www.griphyn.org/workspace/VDS/ , .br .BR http://www.griphyn.org/chimera/ .br .BR http://www.cs.wisc.edu/condor/manual/ .SH EXAMPLE You can run the .B k.2 executable locallly to verify that it is functioning well. In the initial phase, the format of the performance data may be slightly adjusted. However, you must provide a minimal configuration file. .nf \f(CB $ echo 'main "/bin/date"' | $(PEGASUS_HOME)/bin/linux/k.2 # line 1: 2004-03-15T13:37:39 k.2 is running # line 1: added valid main application /bin/date # 2004-03-15T13:37:39.378: about to invoke /bin/date #2 SMP Thu Feb 19 16:09:15 CST 2004 /home/voeckler/vds/src/tools/k.2

7f454c46010101000000000000000000
7f454c46010101000000000000000000 Mon Mar 15 13:37:39 CST 2004 \fP .fi Please take note a few things about the output: .PP The above example still contains debug information - the lines starting with an octothorpe. .PP The output from the postjob can be appended to the output of the main job on .IR stdout . The output could potentially be separated into different data sections through different temporary files. If you truly need the separation, request that feature. .PP The log file may be reported with a size of zero, because the log file did indeed barely exist at the time the data structure was (re-) initialized. With regular GASS output, it will report the status of the socket file descriptor, though. .PP The file descriptors reported for the temporary files are from the perspective of .BR k.2 . Since the temporary files have the close-on-exec flag set, .BR k.2 's filedescriptors are invisible to the job processes. Still, the .I stdio of the job processes are connected to the temporary files. .PP Even this output already appears large. The output may already be too large to guarantee that the append operation on networked pipes (GASS, NFS) are atomically written. If logical file and secondary staging is defined, the output may become even larger. .SH "OUTPUT FORMAT" Refer to .B http://www.griphyn.org/workspace/VDS/iv-1.3/iv-1.3.html for an up-to-date description of elements and their attributes. Check with .B http://www.griphyn.org/workspace/VDS/ for IV schemas with a higher version number. .SH RESTRICTIONS There is no version for the Condor .I standard universe. It is simply not possible within the constraints of Condor. .PP Due to its very nature, .B k.2 will also prove difficult to port outside the Unix environment. .PP Any of the pre-, main-, cleanup and postjob are unable to influence one anothers visible environment. However, you can set up environment variables using the configuration file. These variables will be visible to all jobs. .PP Do not use a Chimera definition with just the name .I null and no namespace or version. .PP A job failing with exit code 126 or 127 is indistinguishable from .B k.2 failing with the same exit codes. Sometimes, careful examination of the returned data can help. .PP If the logfile is collected into a shared file, due to the size of the data, simultaneous appends on a shared filesystem from different machines may still mangle data. Currently, file locking is not even attempted, although all data is written atomically from the perspective of .BR k.2 . .PP The upper limit of characters of commandline characters is currently not checked by .BR k.2 . Thus, some variable substitutions could potentially result in a commandline that is larger than permissable. .PP If the output or error file is opened in append mode, but the application decides to truncate its output file, the resulting file will still be truncated. This is correct behavior, but sometimes not obvious. .SH FILES .TP .B $PEGASUS_HOME/etc/iv-1.3.xsd is the suggested location of the latest XML schema describing the data on the submit host. .SH ENVIRONMENT VARIABLES .TP .B TMP is the hightest priority to look for a temporary directory, if specified. Fixme: Still true? .TP .B TEMP is the next highest priority for an environment variable denoting a temporary files directory. .TP .B TMPDIR is next in the checklist. If none of these are found, either the .I stdio definition .I P_tmpdir is taken, or the fixed string .I /tmp . .SH AUTHORS Jens-S. Vckler , .br Mike Wilde , .br Yong Zhao . .PP Chimera .B http://www.griphyn.org/chimera/ .br GriPhyN .BR http://www.griphyn.org/ pegasus-wms_4.0.1+dfsg/src/tools/k.2/mysignal.cc0000644000175000017500000000405111757531137020507 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include #include #include #include #include #include #include #include #include "mysignal.hh" static const char* RCS_ID = "$Id: mysignal.cc 50 2007-05-19 00:48:32Z gmehta $"; SigFunc* mysignal( int signo, SigFunc* newhandler, bool doInterrupt ) // purpose: install reliable signals // paramtr: signo (IN): signal for which a handler is to be installed // newhandler (IN): function pointer to the signal handler // doInterrupt (IN): interrupted system calls wanted! // returns: the old signal handler, or SIG_ERR in case of error. { struct sigaction action, old; memset( &old, 0, sizeof(old) ); memset( &action, 0, sizeof(action) ); #if 1 action.sa_handler = newhandler; // I HATE TYPE-OVERCORRECT NAGGING #else memmove( &action.sa_handler, &newhandler, sizeof(SigFunc*) ); #endif sigemptyset( &action.sa_mask ); if ( signo == SIGCHLD ) { action.sa_flags |= SA_NOCLDSTOP; #ifdef SA_NODEFER action.sa_flags |= SA_NODEFER; // SYSV: don't block current signal #endif } if ( signo == SIGALRM || doInterrupt ) { #ifdef SA_INTERRUPT action.sa_flags |= SA_INTERRUPT; // SunOS, obsoleted by POSIX #endif } else { #ifdef SA_RESTART action.sa_flags |= SA_RESTART; // BSD, SVR4 #endif } return ( sigaction( signo, &action, &old ) < 0 ) ? (SigFunc*) SIG_ERR : (SigFunc*) old.sa_handler; } pegasus-wms_4.0.1+dfsg/src/tools/k.2/GTPL0000755000175000017500000001175011757531137017055 0ustar ryngeryngeGlobus Toolkit Public License Version 2, July 31, 2003 Copyright 1999-2003 University of Chicago and The University of Southern California. All rights reserved. This software referred to as the Globus Toolkit software ("Software") includes voluntary contributions made to the Globus Project collaboration. Persons and entities that have made voluntary contributions are hereinafter referred to as "Contributors." This Globus Toolkit Public License is referred to herein as "the GTPL." For more information on the Globus Project, please see http://www.globus.org/. Permission is granted for the installation, use, reproduction, modification, display, performance and redistribution of this Software, with or without modification, in source and binary forms. Permission is granted for the installation, use, reproduction, modification, display, performance and redistribution of user files, manuals, and training and demonstration slides ("Documentation") distributed with or specifically designated as distributed under the GTPL. Any exercise of rights under the GTPL is subject to the following conditions: 1. Redistributions of this Software, with or without modification, must reproduce the GTPL in: (1) the Software, or (2) the Documentation or some other similar material which is provided with the Software (if any). 2. The Documentation, alone or if included with a redistribution of the Software, must include the following notice: "This product includes material developed by the Globus Project (http://www.globus.org/)." Alternatively, if that is where third-party acknowledgments normally appear, this acknowledgment must be reproduced in the Software itself. 3. Globus Toolkit and Globus Project are trademarks of the University of Chicago. Any trademarks of the University of Chicago or the University of Southern California may not be used to endorse or promote software, or products derived therefrom, and except as expressly provided herein may not be affixed to modified redistributions of this Software or Documentation except with prior written approval, obtainable at the discretion of the trademark owner from info@globus.org. 4. To the extent that patent claims licensable by the University of Southern California and/or by the University of Chicago (as Operator of Argonne National Laboratory) are necessarily infringed by the use or sale of the Software, you and your transferees are granted a non-exclusive, worldwide, royalty-free license under such patent claims, with the rights to make, use, sell, offer to sell, import and otherwise transfer the Software in source code and object code form. This patent license shall not apply to Documentation or to any other software combinations which include the Software. No hardware per se is licensed hereunder. If you or any subsequent transferee (a "Recipient") institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Software infringes such Recipient's patent(s), then such Recipient's rights granted under the patent license above shall terminate as of the date such litigation is filed. 5. DISCLAIMER SOFTWARE AND DOCUMENTATION ARE PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, OF SATISFACTORY QUALITY, AND FITNESS FOR A PARTICULAR PURPOSE OR USE ARE DISCLAIMED. THE COPYRIGHT HOLDERS AND CONTRIBUTORS MAKE NO REPRESENTATION THAT THE SOFTWARE, DOCUMENTATION, MODIFICATIONS, ENHANCEMENTS OR DERIVATIVE WORKS THEREOF, WILL NOT INFRINGE ANY PATENT, COPYRIGHT, TRADEMARK, TRADE SECRET OR OTHER PROPRIETARY RIGHT. 6. LIMITATION OF LIABILITY THE COPYRIGHT HOLDERS AND CONTRIBUTORS SHALL HAVE NO LIABILITY TO LICENSEE OR OTHER PERSONS FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, CONSEQUENTIAL, EXEMPLARY, OR PUNITIVE DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES, LOSS OF USE, DATA OR PROFITS, OR BUSINESS INTERRUPTION, HOWEVER CAUSED AND ON ANY THEORY OF CONTRACT, WARRANTY, TORT (INCLUDING NEGLIGENCE), PRODUCT LIABILITY OR OTHERWISE, ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE OR DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 7. The Globus Project may publish revised and/or new versions of the GTPL from time to time. Each version will be given a distinguishing version number. Once Software or Documentation has been published under a particular version of the GTPL, you may always continue to use it under the terms of that version. You may also choose to use such Software or Documentation under the terms of any subsequent version of the GTPL published by the Globus Project. No one other than the Globus Project has the right to modify the terms of the GTPL. Globus Toolkit Public License 7-31-03 pegasus-wms_4.0.1+dfsg/src/tools/k.2/mysignal.hh0000644000175000017500000000254411757531137020526 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _MYSIGNAL_HH #define _MYSIGNAL_HH #include #if 1 // so far, all systems I know use void # define SIGRETTYPE void #else # define SIGRETTYPE int #endif #if defined(SUNOS) && defined(SUN) # define SIGPARAM void #else // SOLARIS, LINUX, IRIX, AIX, SINIXY # define SIGPARAM int #endif typedef SIGRETTYPE SigFunc( SIGPARAM ); SigFunc* mysignal( int signo, SigFunc* newhandler, bool doInterrupt ); // purpose: install reliable signals // paramtr: signo (IN): signal for which a handler is to be installed // newhandler (IN): function pointer to the signal handler // doInterrupt (IN): interrupted system calls wanted! // returns: the old signal handler, or SIG_ERR in case of error. #endif // _MYSIGNAL_H pegasus-wms_4.0.1+dfsg/src/tools/k.2/time.hh0000644000175000017500000001216311757531137017637 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _CHIMERA_TIME_HH #define _CHIMERA_TIME_HH #include #include #include #include #include class Time { // encapsulate time-specific functionality. While instances of this // class allow to capture moments in time, the static members provide // various time-related conversion functions. private: struct timeval m_tv; bool m_local; bool m_extended; public: // static members static bool c_extended; // timestamp format concise or extended static bool c_local; // timestamp time zone, UTC or local static const Time c_epoch; // Unix epoch as Time constant public: inline Time() :m_tv( Time::now() ), m_local(Time::c_local), m_extended(Time::c_extended) { } inline Time( const struct timeval* tv, bool isLocal = Time::c_local, bool isExtended = Time::c_extended ) :m_tv( tv ? *tv : Time::now() ), m_local(isLocal), m_extended(isExtended) { } inline Time( const struct timeval& tv, bool isLocal = Time::c_local, bool isExtended = Time::c_extended ) :m_tv(tv), m_local(isLocal), m_extended(isExtended) { } inline Time( time_t tv, long micros, bool isLocal = Time::c_local, bool isExtended = Time::c_extended ) :m_local(isLocal), m_extended(isExtended) { m_tv.tv_sec = tv; m_tv.tv_usec = micros; } // // Accessors // inline bool getLocal() const { return m_local; } inline void setLocal( bool isLocal ) { m_local = isLocal; } inline bool getExtended() const { return m_extended; } inline void setExtended( bool isExt ) { m_extended = isExt; } // naughty inline operator std::string() const { return this->date(); } // // Member functions // inline double seconds() const // purpose: convert a timeval into a duration as seconds since epoch. // returns: a double containing information since epoch. { return Time::seconds(m_tv); } inline double elapsed( const struct timeval* tv = 0 ) const // purpose: Determines the elapsed time since the time was taken // paramtr: tv (IN): timestamp structure, may be NULL for current moment // returns: the difference := tv - this { return fabs(Time::seconds(tv?*tv:Time::now()) - Time::seconds(m_tv)); } inline double elapsed( const struct timeval& tv ) const // purpose: Determines the elapsed time since the time was taken // paramtr: tv (IN): timestamp structure // returns: the difference := tv - this { return fabs(Time::seconds(tv) - Time::seconds(m_tv)); } inline double elapsed( const Time& t ) const // purpose: Determines the elapsed time since the time was taken // paramtr: tv (IN): other time instance // returns: the difference := tv - this { return fabs(Time::seconds(t.m_tv) - Time::seconds(m_tv)); } inline std::string date( bool micros = true ) const // purpose: create an ISO timestamp // returns: a string with the formatted ISO 8601 timestamp { return Time::date( m_tv.tv_sec, micros ? m_tv.tv_usec : -1, m_local, m_extended ); } public: // static functions inline static double seconds( const struct timeval* tv ) // purpose: convert a timeval into a duration as seconds since epoch. // paramtr: tv (IN): timeval structure with a duration or timestamp // returns: a double containing information since epoch. { return ( tv->tv_sec + tv->tv_usec / 1E6 ); } inline static double seconds( const struct timeval& tv ) // purpose: convert a timeval into a duration as seconds since epoch. // paramtr: tv (IN): timeval structure with a duration or timestamp // returns: a double containing information since epoch. { return ( tv.tv_sec + tv.tv_usec / 1E6 ); } static struct timeval now(); // purpose: capture a point in time with microsecond extension // returns: a time record static std::string date( time_t seconds, long micros = -1, bool isLocal = Time::c_local, bool isExtended = Time::c_extended ); // purpose: create an ISO timestamp // paramtr: seconds (IN): tv_sec part of timeval // micros (IN): if negative, don't show micros. // isLocal (IN): flag, if 0 use UTC, otherwise use local time // isExtd (IN): flag, if 0 use concise format, otherwise extended // returns: a string with the formatted ISO 8601 timestamp friend std::ostream& operator<<( std::ostream& s, const Time& t ); }; inline std::ostream& operator<<( std::ostream& s, const Time& t ) { return s << t.date(); } #endif // _CHIMERA_TIME_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/null.cc0000644000175000017500000000141411757531137017636 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "null.hh" static const char* RCS_ID = "$Id: null.cc 50 2007-05-19 00:48:32Z gmehta $"; null_pointer::~null_pointer() throw() { // empty } pegasus-wms_4.0.1+dfsg/src/tools/k.2/jobinfo.cc0000644000175000017500000012443011757531137020316 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include #include #include #include #include #include #include #include #include #include #include #include extern "C" { extern char** environ; } #include "getif.hh" #include "jobinfo.hh" #include "appinfo.hh" #include "mysignal.hh" static const char* RCS_ID = "$Id: jobinfo.cc 50 2007-05-19 00:48:32Z gmehta $"; #ifdef sun #define sys_siglist _sys_siglist #endif #if defined(AIX) extern const char* const sys_siglist[64]; #endif static int debug = 0; // debug parser state machinery bool JobInfo::hasAccess( const char* fn ) // purpose: check a given file for being accessible and executable // under the currently effective user and group id. // paramtr: path (IN): fully qualified path to check // mode (IN): permissions to check against, default executable // returns: true if the file is accessible, false for not { // sanity check if ( fn && *fn ) { struct stat st; if ( stat(fn,&st) == 0 && S_ISREG(st.st_mode) && #if 1 ( ( st.st_uid == geteuid() && (S_IXUSR & st.st_mode) == S_IXUSR ) || ( st.st_gid == getegid() && (S_IXGRP & st.st_mode) == S_IXGRP ) || ( (S_IXOTH & st.st_mode) == S_IXOTH ) ) #else ( ( st.st_uid == geteuid() && (st.st_mode & mode & S_IRWXU) == (mode & S_IRWXU) ) || ( st.st_gid == getegid() && (st.st_mode & mode & S_IRWXG) == (mode & S_IRWXG) ) || ( (st.st_mode & mode & S_IRWXO) == (mode & S_IRWXO) ) ) #endif ) return true; else return false; } else { return false; } } char* JobInfo::findApplication( const char* fn ) // purpose: check the executable filename and correct it if necessary // absolute names will not be matched against a PATH // paramtr: fn (IN): current knowledge of filename // returns: newly allocated fqpn of path to exectuble, or NULL if not found { // sanity check if ( ! (fn && *fn) ) return 0; // only check, but don't touch absolute paths if ( *fn == '/' ) { if ( JobInfo::hasAccess(fn) ) return strdup(fn); else return 0; } // try from CWD first (suprise!) if ( JobInfo::hasAccess(fn) ) return strdup(fn); // continue only if there is a PATH to check char* path = 0; if ( char* s = getenv("PATH") ) path = strdup(s); else return 0; // tokenize to compare char* t = 0; for ( char* s = strtok(path,":"); s; s = strtok(0,":") ) { size_t len = strlen(fn) + strlen(s) + 2; t = (char*) malloc(len); strncpy( t, s, len ); strncat( t, "/", len ); strncat( t, fn, len ); if ( JobInfo::hasAccess(t) ) break; else { free((void*) t); t = 0; } } /* some or no matches found */ free((void*) path); return t; } #if 0 Parsing pre- and postjob argument line splits whitespaces in shell fashion. state transition table maps from start state and input character to new state and action. The actions are abbreviated as follows: abb | # | meaning ----+---+-------------------------------------------------------- Sb | 0 | store input char into argument buffer Fb | 1 | flush regular buffer and reset argument buffer pointers Sv | 2 | store input char into variable name buffer Fv | 3 | flush varname via lookup into argument buffer and reset vpointers Fvb | 4 | Do Fv followed by Fb - | 5 | skip (ignore) input char (do nothing) * | 6 | translate abfnrtv to controls, other store verbatim FS | 7 | Do Fv followed by Sb | 8 | print error and exit special final states: state | meaning ------+----------------- F 32 | final, leave machine E1 33 | error 1: missing closing apostrophe E2 34 | error 2: missing closing quote E3 35 | error 3: illegal variable name E4 36 | error 4: missing closing brace E5 37 | error 5: premature end of string STATE | eos | "" | '' | { | } | $ | \ | alnum| wspc | else | ------+------+------+------+------+------+------+------+------+------+------+ 0 | F,- | 4,- | 2,- | 1,Sb | 1,Sb | 11,- | 14,- | 1,Sb | 0,- | 1,Sb | 1 | F,Fb | 4,- | 2,- | 1,Sb | 1,Sb | 11,- | 14,- | 1,Sb | 0,Fb | 1,Sb | 2 | E1 | 2,Sb | 1,- | 2,Sb | 2,Sb | 2,Sb | 3,- | 2,Sb | 2,Sb | 2,Sb | 3 | E1 | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 2,Sb | 4 | E2 | 1,- | 4,Sb | 4,Sb | 4,Sb | 8,- | 7,- | 4,Sb | 4,Sb | 4,Sb | 7 | E2 | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,Sb | 4,* | 4,Sb | 4,Sb | 8 | E2 | E2 | E2 | 9,- | E3 | E3 | E3 |10,Sv | E3 | E3 | 9 | E4 | E4 | E4 | E4 | 4,Fv | E3 | 9,Sv | 9,Sv | 9,Sv | 9,Sv | 10 | E2 | 1,Fv | 4,Fv | 4,Fv | 4,Fv | 8,Fv | 7,Fv |10,Sv | 4,Fv |10,Sv | 11 | E3 | E3 | E3 |12,- | E3 | E3 | E3 |13,Sv | E3 | E3 | 12 | E4 | E4 | E4 | E4 | 1,Fv | E3 |12,Sv |12,Sv |12,Sv |12,Sv | 13 | F,Fvb| 4,Fv | 2,Fv | 1,Fv | 1,Fv | E3 |14,Fv |13,Sv | 1,Fv | 1,FS | 14 | E5 | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | 1,Sb | REMOVED: 5 | E1 | 5,Sb | 4,- | 5,Sb | 5,Sb | 5,Sb | 6,- | 5,Sb | 5,Sb | 5,Sb | 6 | E1 | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | 5,Sb | #endif typedef const char Row[10]; typedef const Row Map[15]; static const Map actionmap = { { 5, 5, 5, 0, 0, 5, 5, 0, 5, 0 }, // 0 { 1, 5, 5, 0, 0, 5, 5, 0, 1, 0 }, // 1 { 8, 0, 5, 0, 0, 0, 5, 0, 0, 0 }, // 2 { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 3 { 8, 5, 4, 0, 0, 5, 5, 0, 0, 0 }, // 4 { 8, 0, 5, 0, 0, 0, 5, 0, 0, 0 }, // 5 (unused) { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 6 (unused) { 8, 0, 0, 0, 0, 0, 0, 6, 0, 0 }, // 7 { 8, 8, 8, 5, 8, 8, 8, 2, 8, 8 }, // 8 { 8, 8, 8, 8, 3, 8, 2, 2, 2, 2 }, // 9 { 8, 3, 3, 3, 3, 3, 3, 2, 3, 2 }, // 10 { 8, 8, 8, 5, 8, 8, 8, 2, 8, 8 }, // 11 { 8, 8, 8, 8, 3, 8, 2, 2, 2, 2 }, // 12 { 4, 3, 3, 3, 3, 8, 3, 2, 3, 7 }, // 13 { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0 } // 14 }; static const Map statemap = { { 32, 4, 2, 1, 1, 11, 14, 1, 0, 1 }, // 0 { 32, 4, 2, 1, 1, 11, 14, 1, 0, 1 }, // 1 { 33, 2, 1, 2, 2, 2, 3, 2, 2, 2 }, // 2 { 33, 2, 2, 2, 2, 2, 2, 2, 2, 2 }, // 3 { 34, 1, 0, 4, 4, 8, 7, 4, 4, 4 }, // 4 { 33, 5, 4, 5, 5, 5, 6, 5, 5, 5 }, // 5 (unused) { 33, 5, 5, 5, 5, 5, 5, 5, 5, 5 }, // 6 (unused) { 34, 4, 4, 4, 4, 4, 4, 4, 4, 5 }, // 7 { 34, 34, 34, 9, 34, 34, 34, 10, 34, 34 }, // 8 { 36, 36, 36, 36, 4, 36, 9, 9, 9, 9 }, // 9 { 34, 1, 4, 4, 4, 8, 7, 10, 4, 10 }, // 10 { 35, 35, 35, 12, 35, 35, 35, 13, 35, 35 }, // 11 { 36, 36, 36, 36, 1, 35, 12, 12, 12, 12 }, // 12 { 32, 4, 2, 1, 1, 35, 14, 13, 1, 1 }, // 13 { 37, 1, 1, 1, 1, 1, 1, 1, 1, 1 } // 14 }; static const char* errormessage[5] = { "Error 1: missing closing apostrophe\n", "Error 2: missing closing quote\n", "Error 3: illegal variable name\n", "Error 4: missing closing brace\n", "Error 5: premature end of string\n" }; static const char* translation = "abnrtv"; static const char translationmap[] = "\a\b\n\r\t\v"; static int xlate( const char input ) // purpose: translate an input character into the character class. // paramtr: input (IN): input character // returns: numerical character class for input character. { switch ( input ) { case 0: return 0; case '"': return 1; case '\'': return 2; case '{': return 3; case '}': return 4; case '$': return 5; case '\\': return 6; default: return ( (isalnum(input) || input=='_') ? 7 : ( isspace(input) ? 8 : 9 ) ); } } static void resolve( const std::string& varname, std::string& buffer ) // purpose: resolves a given environment variable into its value // paramtr: varname (IN): buffer with the variable name // buffer (OUT): where to append the translation to // returns: - { char* x = getenv( varname.c_str() ); if ( x == 0 ) { // variable not found if ( debug ) fprintf( stderr, "# %s does not exist\n", varname.c_str() ); } else { // replace with value if ( debug ) fprintf( stderr, "# %s=%s\n", varname.c_str(), x ); buffer.append( x ); } } static int internalParse( const char*& s, StringList& result, int state = 0 ) // purpose: parse a commandline string into arguments // paramtr: s (IO): pointer into the commandline string buffer // result (OUT): list of strings, partially filled on errors // state (IN): start state of parse, use state 0 // returns: final state; a stop state of 32 is fine, >32 implies an error { const char* x = 0; std::string buffer; std::string varname; // house keeping if ( result.size() ) result.clear(); // parse while ( state < 32 ) { int charclass = xlate( *s ); int newstate = statemap[state][charclass]; int action = actionmap[state][charclass]; if ( debug ) fprintf( stderr, "# state=%02d, class=%d, action=%d, newstate=%02d, " "char=%02X (%c)\n", state, charclass, action, newstate, *s, ((*s & 127) >= 32) ? *s : '.' ); switch ( action ) { case 0: // store into buffer buffer += *s; break; case 1: // conditionally finalize buffer result.push_back(buffer); buffer.clear(); break; case 2: // store variable part varname += *s; break; case 3: // finalize variable name resolve( varname, buffer ); varname.clear(); break; case 4: // case 3 followed by case 1 resolve( varname, buffer ); varname.clear(); result.push_back( buffer ); buffer.clear(); break; case 5: // noop break; case 6: // translate control escapes x = strchr( translation, *s ); buffer += ( x == 0 ? *s : translationmap[x-translation] ); if ( debug ) fprintf( stderr, "# escape %c -> %d\n", *s, buffer[buffer.size()-1] ); break; case 7: // case 3 followed by case 0 resolve( varname, buffer ); varname.clear(); buffer += *s; break; case 8: // print error message fputs( errormessage[newstate-33], stderr ); break; } ++s; state = newstate; } return state; } #if 0 JobInfo::JobInfo() :m_isValid(INVALID), m_copy(0), m_argv(0), m_argc(0), // m_start( c_notime ), m_finish( c_notime ), m_child(0), m_status(0), m_saverr(0), m_use(0), m_executable(0) { // empty } #endif static void init( char*& m_copy, char* const*& m_argv, int& m_argc, const StringList& args ) { // only continue if there is anything to do if ( args.size() ) { size_t total = 0; for ( StringList::const_iterator i=args.begin(); i != args.end(); i++ ) { m_argc++; total += (i->size() + 1); } // prepare copy area and argument vector char* t = m_copy = new char[ total + m_argc ]; m_argv = static_cast( calloc( m_argc+1, sizeof(char*) ) ); // copy list while updating argument vector StringList::const_iterator temp = args.begin(); for ( int i=0; i < m_argc && temp != args.end(); ++i ) { // append string to copy area size_t len = temp->size()+1; memcpy( t, temp->c_str(), len ); // put string into argument vector - assign to readonly! memcpy( (void*) &m_argv[i], &t, sizeof(char*) ); // advance t += len; temp++; } } } JobInfo::JobInfo( const char* tag, const char* commandline ) // purpose: initialize the data structure by parsing a command string. // paramtr: commandline (IN): commandline concatenated string to separate :m_isValid(JobInfo::INVALID), m_copy(0), m_argv(0), m_argc(0), // m_start(), m_finish(), m_child(0), m_status(0), m_saverr(0), m_use(0), m_executable(0) { // recoded for SunCC if ( tag ) m_tag = tag; else throw null_pointer(); StringList args; int state = internalParse( commandline, args ); // only continue in ok state AND if there is anything to do if ( state == 32 ) init( this->m_copy, this->m_argv, this->m_argc, args ); // free list args.clear(); // this is a valid and initialized entry if ( m_argc > 0 ) { // check job path const char* realpath = JobInfo::findApplication( m_argv[0] ); if ( realpath ) { memcpy( (void*) &m_argv[0], &realpath, sizeof(char*) ); m_isValid = VALID; // initialize data for myself m_executable = new StatFile( m_argv[0], O_RDONLY, 0 ); } else { m_status = -127; m_saverr = errno; m_isValid = NOTFOUND; } } } JobInfo::JobInfo( const char* tag, const StringList& args ) // purpose: initialize the data structure by parsing a command string. // paramtr: tag (IN): kind of job, used for XML element tag name // argv (IN): commandline already split into arg vector :m_isValid(JobInfo::INVALID), m_copy(0), m_argv(0), m_argc(0), // m_start(), m_finish(), m_child(0), m_status(0), m_saverr(0), m_use(0), m_executable(0) { // recoded for SunCC if ( tag ) m_tag = tag; else throw null_pointer(); init( this->m_copy, this->m_argv, this->m_argc, args ); // this is a valid and initialized entry if ( m_argc > 0 ) { // check job path const char* realpath = JobInfo::findApplication( m_argv[0] ); if ( realpath ) { memcpy( (void*) &m_argv[0], &realpath, sizeof(char*) ); m_isValid = VALID; // initialize data for myself m_executable = new StatFile( m_argv[0], O_RDONLY, 0 ); } else { m_status = -127; m_saverr = errno; m_isValid = NOTFOUND; } } } #if 0 static std::string show_argv( int argc, char* const* argv ) { std::string result( XML::printf( "%p:[", static_cast(argv) ) ); for ( int i=0; i<=argc; ++i ) { if ( i > 0 ) result += ','; result += XML::printf( "%p", static_cast(argv[i]) ); if ( argv[i] ) result.append("=").append(argv[i]); } result += "]"; return result; } #endif void JobInfo::addArgument( const std::string& arg ) // purpose: Adds an additional argument to the end of the CLI // paramtr: arg (IN): Argument string to add. Will _not_ be interpreted! // warning: You cannot set the application to run with this // warning: This (ugly) hack is for internal use for stage-in jobs. { // this is a valid and initialized entry if ( m_isValid == VALID && m_argc > 0 ) { #if 0 fprintf( stderr, "# old_argc=%d, old_copy=%p=%s\n", m_argc, static_cast(m_copy), m_copy ); fprintf( stderr, "# old_argv=%s\n", show_argv( m_argc, m_argv ).c_str() ); #endif size_t new_argc( m_argc+1 ); char* const* new_argv = static_cast( calloc( new_argc+1, sizeof(char*) ) ); // determine new copy area size size_t old_size = strlen(m_copy)+1; // original argv[0] for ( int i=1; i(new_copy), new_copy ); fprintf( stderr, "# new_argv=%s\n", show_argv( new_argc, new_argv ).c_str() ); #endif // switch new to old m_argc = new_argc; delete[] m_copy; m_copy = new_copy; free((void*) m_argv); m_argv = new_argv; } } JobInfo::~JobInfo() // purpose: dtor { if ( m_isValid == VALID ) { // from findApp() allocation if ( m_argv[0] ) free((void*) m_argv[0]); // done with stat information delete m_executable; m_executable = 0; } if ( m_use ) { delete m_use; m_use = 0; } if ( m_copy ) { free((void*) m_argv ); m_argv = 0; delete[] m_copy; m_copy = 0; } /* final invalidation */ m_isValid = INVALID; } void JobInfo::rewrite() { // empty } std::ostream& JobInfo::toXML( std::ostream& s, int indent, const char* nspace ) const // purpose: XML format a rusage info record onto a given stream // paramtr: s (IO): stream to put information into // indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: s { // sanity check if ( m_isValid != VALID ) return s; // start element s << XML::startElement( s, m_tag, indent, nspace ); // attributes s << " start=\"" << m_start << "\" duration=\"" << std::setfill('0') << std::setprecision(3) << m_start.elapsed(m_finish) << '"'; // optional attribute: application process id if ( m_child != 0 ) s << " pid=\"" << m_child << '"'; s << ">\r\n"; // if ( m_use ) m_use->toXML( s, indent+2, nspace ); // : open tag s << XML::startElement( s, "status", indent+2, nspace ); s << " raw=\"" << m_status << "\">"; // : cases of completion if ( m_status < 0 ) { // s << XML::startElement( s, "failure", 0, nspace ); s << " error=\"" << m_saverr << "\">" << strerror(m_saverr); s << XML::finalElement( s, "failure", 0, nspace, false ); } else if ( WIFEXITED(m_status) ) { // s << XML::startElement( s, "regular", 0, nspace ); s << " exitcode=\"" << WEXITSTATUS(m_status) << "\"/>"; } else if ( WIFSIGNALED(m_status) ) { // // result = 128 + WTERMSIG(m_status); s << XML::startElement( s, "signalled", 0, nspace ); s << " signal=\"" << WTERMSIG(m_status) << '"'; #ifdef WCOREDUMP s << " corefile=\"" << (WCOREDUMP(m_status) ? "true" : "false" ) << '"'; #endif s << '>' << sys_siglist[WTERMSIG(m_status)]; s << XML::finalElement( s, "signalled", 0, nspace, false ); } else if ( WIFSTOPPED(m_status) ) { // s << XML::startElement( s, "suspended", 0, nspace ); s << " signal=\"" << WSTOPSIG(m_status) << "\">"; s << sys_siglist[WSTOPSIG(m_status)]; s << XML::finalElement( s, "suspended", 0, nspace, false ); } // FIXME: else? // s << XML::finalElement( s, "status", 0, nspace ); // m_executable->toXML( s, indent+2, nspace ); // s << XML::startElement( s, "arguments", indent+2, nspace ); s << " argc=\"" << m_argc << '"'; if ( m_argc == 1 ) { // empty element s << "/>\r\n"; } else { // content are the CLI args s << ">\r\n"; for ( int i=1; i'; s << XML::quote(m_argv[i]) << XML::finalElement( s, "argv", 0, nspace ); } // s << XML::finalElement( s, "arguments", indent+2, nspace ); } // finalize close tag of outmost element s << XML::finalElement( s, m_tag, indent, nspace ); return s; } std::string JobInfo::toXML( int indent, const char* nspace ) const // purpose: format the job information into the given buffer as XML. // paramtr: buffer (IO): area to store the output in (append) // indent (IN): indentation level // returns: the buffer { std::string buffer; // sanity check if ( m_isValid != VALID ) return buffer; // start tag with indentation buffer += XML::startElement( m_tag, indent, nspace ); // start time and duration buffer += " start=\"" + m_start.date(); buffer += XML::printf( "\" duration=\"%.3f\"", m_start.elapsed(m_finish) ); // optional attribute: application process id if ( m_child != 0 ) buffer += XML::printf( " pid=\"%d\"", m_child ); buffer += ">\r\n"; // if ( m_use ) buffer += m_use->toXML( indent+2, nspace ); // : open tag buffer += XML::startElement( "status", indent+2, nspace ); buffer += XML::printf( " raw=\"%d\">", m_status ); // : cases of completion if ( m_status < 0 ) { // buffer += XML::startElement( "failure", 0, nspace ); buffer += XML::printf( " error=\"%d\">%s", m_saverr, strerror(m_saverr) ); buffer += XML::finalElement( "failure", 0, nspace, false ); } else if ( WIFEXITED(m_status) ) { buffer += XML::startElement( "regular", 0, nspace ); buffer += XML::printf( " exitcode=\"%d\"/>", WEXITSTATUS(m_status) ); } else if ( WIFSIGNALED(m_status) ) { // result = 128 + WTERMSIG(m_status); buffer += XML::startElement( "signalled", 0, nspace ); buffer += XML::printf( " signal=\"%u\"", WTERMSIG(m_status) ); #ifdef WCOREDUMP buffer += XML::printf( " corefile=\"%s\"", WCOREDUMP(m_status) ? "true" : "false" ); #endif buffer += ">"; buffer += sys_siglist[WTERMSIG(m_status)]; buffer += XML::finalElement( "signalled", 0, nspace, false ); } else if ( WIFSTOPPED(m_status) ) { buffer += XML::startElement( "suspended", 0, nspace ); buffer += XML::printf( " signal=\"%u\">", WSTOPSIG(m_status) ); buffer += sys_siglist[WSTOPSIG(m_status)]; buffer += XML::finalElement( "suspended", 0, nspace, false ); } // FIXME: else? buffer += XML::finalElement( "status", 0, nspace ); // buffer += m_executable->toXML( indent+2, nspace ); // buffer += XML::startElement( "arguments", indent+2, nspace ); buffer += XML::printf( " argc=\"%d\"", m_argc ); if ( m_argc == 1 ) { // empty element buffer += "/>\r\n"; } else { // content are the CLI args buffer += ">\r\n"; for ( int i=1; i"; buffer += XML::quote(m_argv[i]); buffer += XML::finalElement( "argv", 0, nspace ); } // end tag buffer += XML::finalElement( "arguments", indent+2, nspace ); } // finalize close tag of outmost element buffer += XML::finalElement( m_tag, indent, nspace ); return buffer; } void JobInfo::setUse( const struct rusage* use ) // purpose: sets the rusage information from an external source // paramtr: use (IN): pointer to a valid rusage record { if ( m_use ) delete m_use; // the element name inside a job element is "usage" m_use = new UseInfo( "usage", use ); } int JobInfo::exitCode( int raw ) // purpose: convert the raw result from wait() into a status code // paramtr: raw (IN): the raw exit code // returns: a cooked exit code // < 0 --> error while invoking job // [0..127] --> regular exitcode // [128..] --> terminal signal + 128 { int result = 127; if ( raw < 0 ) { // nothing to do to result } else if ( WIFEXITED(raw) ) { result = WEXITSTATUS(raw); } else if ( WIFSIGNALED(raw) ) { result = 128 + WTERMSIG(raw); } else if ( WIFSTOPPED(raw) ) { // nothing to do to result } return result; } int JobInfo::wait4( int flags ) { struct rusage ru; int result = ::wait4( m_child, &m_status, flags, &ru ); setUse( &ru ); return result; } static int debug_msg( const char* fmt, ... ) // try to be reentrant... { char buffer[1024]; size_t size = sizeof(buffer); struct timeval me( Time::now() ); struct tm temp; strftime( buffer, size, "# %Y-%m-%dT%H:%M:%S", localtime_r(&me.tv_sec,&temp) ); snprintf( buffer+strlen(buffer), size-strlen(buffer)-1, ".%03ld: ", me.tv_usec / 1000 ); va_list ap; va_start( ap, fmt ); int result = vsnprintf( buffer + strlen(buffer), size-strlen(buffer)-1, fmt, ap ); write( STDERR_FILENO, buffer, strlen(buffer) ); va_end(ap); return result; } // // --- class set_signal ------------------------------------------------ // class set_signal : public null_pointer { public: set_signal() throw() { } virtual ~set_signal() throw(); }; set_signal::~set_signal() throw() { // empty } // // --- class SignalHandlerCommunication -------------------------------- // #ifdef SUNOS #ifdef SIG_ERR #undef SIG_ERR #define SIG_ERR ((SigFunc*)-1) #endif #endif class SignalHandlerCommunication { // Encapsulates communications with the signal handler into a Singleton. // It also installs a SIGCHLD handler with the constructor, and removes // it with the destructor. Since both, ctor and dtor are protected, the // only access is granted through a single instance via the Singleton // pattern. // // This class is not thread-safe, nor truly reentrant. protected: SignalHandlerCommunication( JobInfo* job ); // purpose: c'tor installs SIGCHLD handler // paramtr: job (IN): job information to update in SIGCHLD handler ~SignalHandlerCommunication(); // purpose: d'tor static SIGRETTYPE sig_child( SIGPARAM signo ); // purpose: signal handler for SIGCHLD, updating job information // paramtr: signo (IN): signal number from OS, os-dependent // returns: usually a void function public: // Singleton accessors static SignalHandlerCommunication* init( JobInfo* job ); // purpose: first time initialization instead of instance() // paramtr: job (IO): pointer to the job which installs the handler // returns: a pointer to the single instance static SignalHandlerCommunication* instance(); // purpose: next time instance() accessor // returns: a pointer to the single instance // warning: need to initialize with init() first static void done( void ); // purpose: last time tear down of the singleton object // // Accessors // typedef volatile sig_atomic_t AtomicType; inline AtomicType isDone() const { return m_done; } inline AtomicType& isDone() { return m_done; } inline const JobInfo* job() const { return m_job; } inline JobInfo* job() { return m_job; } private: // protected from ever being used SignalHandlerCommunication(); SignalHandlerCommunication( const SignalHandlerCommunication& ); SignalHandlerCommunication& operator=( const SignalHandlerCommunication& ); // singleton instance static SignalHandlerCommunication* m_instance; // regular members AtomicType m_done; JobInfo* m_job; SigFunc* m_old_sigchild; }; SignalHandlerCommunication* SignalHandlerCommunication::m_instance = 0; SignalHandlerCommunication::SignalHandlerCommunication( JobInfo* job ) // purpose: c'tor installs SIGCHLD handler // paramtr: job (IN): job information to update in SIGCHLD handler :m_done(0), m_job(job) { if ( job == 0 ) throw null_pointer(); m_old_sigchild = mysignal( SIGCHLD, sig_child, true ); if ( m_old_sigchild == SIG_ERR ) throw set_signal(); } SignalHandlerCommunication::~SignalHandlerCommunication() // purpose: dtor { // reset signal -- all we can attempt if ( mysignal( SIGCHLD, m_old_sigchild, true ) == SIG_ERR ) throw set_signal(); } SIGRETTYPE SignalHandlerCommunication::sig_child( SIGPARAM signo ) // purpose: signal handler for SIGCHLD, updating job information // paramtr: signo (IN): signal number from OS, os-dependent // returns: usually a void function { if ( debug ) debug_msg( "seen signal %d\n", signo ); #if 0 if ( instance()->job()->wait4(0) == -1 ) instance()->job()->setStatus(-1); instance()->job()->setErrno(); #else if ( ! instance()->isDone() ) { while ( instance()->job()->wait4(0) < 0 ) { if ( errno != EINTR ) { instance()->job()->setStatus(-1); break; } } instance()->job()->setErrno(); } #endif instance()->isDone() = 1; } SignalHandlerCommunication* SignalHandlerCommunication::init( JobInfo* job ) // purpose: first time initialization instead of instance() // paramtr: job (IO): pointer to the job which installs the handler // returns: a pointer to the single instance { if ( m_instance ) delete m_instance; m_instance = new SignalHandlerCommunication(job); return m_instance; } SignalHandlerCommunication* SignalHandlerCommunication::instance() // purpose: next time instance() accessor // returns: a pointer to the single instance // warning: need to initialize with init() first { if ( m_instance == 0 ) throw null_pointer(); return m_instance; } void SignalHandlerCommunication::done( void ) // purpose: last time tear down of the singleton object { if ( m_instance ) delete m_instance; m_instance = 0; } // // --- class EventLoop ------------------------------------------------- // class EventLoop { // Encapsulates the event loop for optional reading from a FIFO, and // propagating the information onto the stderr of the gridshell. The // stderr will be forwarded by Globus-IO to the remote submit host. // // The EventLoop handler will _always_ be called, even in the absence // of a feedback channel. The handler does provide an exponential backed // off heartbeat of the child. // // This class is not thread-safe, nor truly reentrant. public: EventLoop( pid_t child, StatInfo* fifo, int outfd = STDERR_FILENO ); // purpose: Set up the connection between a FIFO (in) and stderr (out) // paramtr: child (IN): child process to check status of // fifo (IN): Stat handle for the FIFO -- may be 0 // outfd (IN): handle to format messages onto // warning: If fifo cannot be casted to StatFifo*, it will throw // a null_pointer exception. However, NULL is a legal value. ssize_t send( const char* msg, ssize_t msize, int channel = 0 ) const; // purpose: writes the message to the remote submit host // paramtr: msg (IN): message buffer // msize (IN): size of message to actually use // channel (IN): channel number - negative are system channels! // returns: size of actual message written. Since the message will be // XML wrapped, it is larger than the input message. // warning: If the FIFO (src) or channel (dst) is not defined, it will // simulate success without writing anything. bool handle( struct pollfd& pfds, int& result, SignalHandlerCommunication::AtomicType& terminate, char* rbuffer, size_t bufsize ); // purpose: if poll returned 1, handle the waiting data // paramtr: pfds (IO): poll structure // result (IO): return value for outer loop // terminate (IN): volatile flag from SIGCHLD // rbuffer (IO): i/o buffer area // bufsize (IN): size of buffer area // returns: true, if outer loops needs to be exited, // false to continue with outer loop. int loop( SignalHandlerCommunication::AtomicType& terminate, double heartbeat = 30.0 ); // purpose: Periodically wake up and send back FIFO stuff and heart beats. // paramtr: termiante (IO): reference to an external "done" flag. This // is typically from the SIGCHLD handler. // heartbeat (IN): initial heartbeat interval. // returns: -1 in case of error, 0 for o.k. private: // render inaccessible EventLoop(); EventLoop( const EventLoop& ); EventLoop& operator=( const EventLoop& ); static SignalHandlerCommunication::AtomicType m_seen_sigpipe; static SIGRETTYPE sig_pipe( SIGPARAM signo ) { if ( debug ) debug_msg( "seen signal %d\n", signo ); EventLoop::m_seen_sigpipe = 1; } pid_t m_child; int m_outfd; StatFifo* m_fifo; SigFunc* m_old_sigpipe; }; SignalHandlerCommunication::AtomicType EventLoop::m_seen_sigpipe = 0; EventLoop::EventLoop( pid_t child, StatInfo* fifo, int outfd ) // purpose: Set up the connection between a FIFO (in) and stderr (out) // paramtr: child (IN): child process to check status of // fifo (IN): Stat handle for the FIFO // outfd (IN): handle to format messages onto // warning: If fifo cannot be casted to StatFifo*, it will throw // a null_pointer exception. However, NULL is a legal value. :m_child(child), m_outfd(outfd), m_fifo(0) { if ( fifo && (m_fifo = dynamic_cast(fifo)) == 0 ) throw null_pointer(); // NULL m_fifo is legal } ssize_t EventLoop::send( const char* msg, ssize_t msize, int channel ) const // purpose: writes the message to the remote submit host // paramtr: msg (IN): message buffer // msize (IN): size of message to actually use // channel (IN): channel number - negative are system channels! // returns: size of actual message written. Since the message will be // XML wrapped, it is larger than the input message. // warning: If the channel (dst) is not defined, it will // simulate success without writing anything. { // sanity -- simulate success if ( m_outfd == -1 ) return msize; // compose output message std::string buffer; buffer.reserve( msize+128 ); buffer += XML::printf( "\r\n"; // almost-atomic write to outfd ssize_t result = AppInfo::writen( m_outfd, buffer.c_str(), buffer.size(), 3 ); // NFS sync for gatekeeper troubles AppInfo::nfs_sync( m_outfd ); // done return result; } bool EventLoop::handle( struct pollfd& pfds, int& result, SignalHandlerCommunication::AtomicType& terminate, char* rbuffer, size_t bufsize ) // purpose: if poll returned 1, handle the waiting data // paramtr: pfds (IO): poll structure // result (IO): return value for outer loop // terminate (IN): volatile flag from SIGCHLD // rbuffer (IO): i/o buffer area // bufsize (IN): size of buffer area // returns: true, if outer loops needs to be exited, // false to continue with outer loop. { static const int mask = POLLIN | POLLERR | POLLHUP | POLLNVAL; // poll OK, data is waiting if ( (pfds.revents & mask) > 0 ) { ssize_t rsize = read( pfds.fd, rbuffer, bufsize-1 ); if ( rsize == -1 ) { // error while reading if ( errno != EINTR ) { result = -1; return true; // do exit } // check our signal interruptions if ( terminate || EventLoop::m_seen_sigpipe ) { result = 0; return true; // do exit } } else if ( rsize == 0 ) { // EOF -- close file and be done? This is a FIFO! if ( debug ) debug_msg( "seen an EOF on FIFO\n" ); // FIXME: Faulty logic! If the child spawned multiple subprocesses // we may have multiple writers. Only exit, if the child is gone. // result = 0; // return true; // FIXED: Open a server FIFO with O_RDWR to *not* having to deal // with EOF conditions whenever the clients drop to zero! result = 0; return true; } else { // data available ssize_t wsize = this->send( rbuffer, rsize, 1 ); if ( wsize == -1 ) { // unable to send anything further due to error condition result = -1; return true; // do exit } else { // update statistics if ( m_fifo ) m_fifo->add( rsize, wsize, 1 ); } } // IF rsize > 0 } // IF revents & mask return false; } int EventLoop::loop( SignalHandlerCommunication::AtomicType& terminate, double heartbeat ) // purpose: Periodically wake up and send back FIFO stuff and heart beats. // paramtr: termiante (IO): reference to an external "done" flag. This // is typically from the SIGCHLD handler. // heartbeat (IN): initial heartbeat interval. // returns: -1 in case of error, 0 for o.k. { int result = 0; // sanity checks if ( m_outfd == -1 ) return 0; // become aware of SIGPIPE for write failures SigFunc* old_pipe = mysignal( SIGPIPE, EventLoop::sig_pipe, true ); if ( old_pipe == SIG_ERR ) { if ( debug ) debug_msg( "unable to set SIGPIPE handler\n" ); return -1; } else { if ( debug ) debug_msg( "in poll loop\n" ); } // prepare poll FDs struct pollfd pfds = { m_fifo ? m_fifo->getDescriptor() : -1, // .fd POLLIN, // .events 0 }; // .revents // allocate buffer size_t bufsize = getpagesize(); char* rbuffer = new char[bufsize]; // heart-beat variables Time hb_start; size_t hb_count = 0; // poll -- may be interrupted by SIGCHLD bool isNext = false; while ( terminate == 0 && EventLoop::m_seen_sigpipe == 0 ) { // race condition possible, thus we MUST time out -- default 30s // due to introduction of heartbeat, we must wake for it, too. if ( debug ) debug_msg( "invoking poll()\n" ); long timeo = ( terminate==0 && EventLoop::m_seen_sigpipe==0 && isNext ) ? 30000 : 0; int status = m_fifo ? poll( &pfds, 1, timeo ) : poll( 0, 0, timeo ); int saverr = errno; if ( debug ) debug_msg( "poll returns %d, errno %d\n", status, saverr ); isNext = true; // heart-beat Time t; if ( hb_start.seconds() + heartbeat <= t.seconds() ) { // test for presence of child process by using the kill(2) checks // FIXME: /proc filesystems allow for more magic :-) errno = 0; int result = kill( m_child, 0 ); // FIXME: if ( result == -1 && errno == ESRCH ) CHILD_IS_GONE; std::string msg( XML::printf( "heartbeat %u: %.3f %d/%d", ++hb_count, hb_start.elapsed(t), result, errno ) ); heartbeat *= 2.0; // exponential backoff ssize_t wsize = this->send( msg.c_str(), msg.size(), 0 ); if ( wsize > 0 ) if ( m_fifo ) m_fifo->add( msg.size(), wsize, 1 ); } // ensure invariance... errno = saverr; // handle the status if ( status == -1 ) { // poll error if ( terminate || EventLoop::m_seen_sigpipe ) { // we were interrupted by our own signal handlers result = 0; break; } if ( errno != EINTR ) { // not a regular interruption result = -1; break; } } else if ( status > 0 ) { // poll OK, data is waiting if ( handle( pfds, result, terminate, rbuffer, bufsize ) ) break; } // IF status > 0 } // WHILE // some final message? while ( m_fifo && poll( &pfds, 1, 0 ) == 1 ) { // handle waiting message(s) if ( handle( pfds, result, terminate, rbuffer, bufsize ) ) break; } // restore defaults mysignal( SIGPIPE, old_pipe, 1 ); if ( debug ) debug_msg( "leaving poll loop\n" ); // done delete[] rbuffer; return result; } // // --- class JobInfo cont'd -------------------------------------------- // #include inline long getMaxFD( void ) { long result = sysconf( _SC_OPEN_MAX ); #ifdef OPEN_MAX if ( result == -1 ) result = OPEN_MAX; #endif #ifdef _POSIX_OPEN_MAX if ( result == -1 ) result = _POSIX_OPEN_MAX; #endif return result; } int JobInfo::system( AppInfo* appinfo ) // purpose: runs the current job with the given stdio connections // paramtr: appinfo (IO): shared record of information // isPrinted (IO): only to reset isPrinted in child // stdin (IN): connect to stdin or share // stdout (IN): connect to stdout or share // stderr (IN): connect to stderr or share // returns: -1: failure in mysystem processing, check errno // 126: connecting child to its new stdout failed // 127: execve() call failed // else: status of child { static const char* msg = "unable to restore SIGCHLD handler\r\n"; struct sigaction ignore, saveintr, savequit; // sanity checks first if ( ! m_isValid ) { errno = ENOEXEC; // no executable return -1; } ignore.sa_handler = SIG_IGN; sigemptyset( &ignore.sa_mask ); ignore.sa_flags = 0; if ( sigaction( SIGINT, &ignore, &saveintr ) < 0 ) return -1; if ( sigaction( SIGQUIT, &ignore, &savequit ) < 0 ) return -1; // install our own SIGCHLD handler try { SignalHandlerCommunication::init( this ); } catch ( set_signal ss ) { return -1; } // start wall-clock m_start = Time(); // do this early, before fork() this->rewrite(); // what are we doing long maxfd = getMaxFD(); debug_msg( "about to invoke %s\n", m_argv[0] ); if ( (m_child=fork()) < 0 ) { // no more process table space m_status = -1; } else if ( m_child == 0 ) { // // child // appinfo->setPrinted( true ); // connect jobs stdio if ( ! this->forcefd( appinfo->getStdin(), STDIN_FILENO ) ) _exit(126); if ( ! this->forcefd( appinfo->getStdout(), STDOUT_FILENO ) ) _exit(126); if ( ! this->forcefd( appinfo->getStderr(), STDERR_FILENO ) ) _exit(126); // close all other FDs for ( int fd=STDERR_FILENO+1; fd < maxfd; ++fd ) close(fd); // undo signal handlers sigaction( SIGINT, &saveintr, 0 ); sigaction( SIGQUIT, &savequit, 0 ); // restore old SIGCHLD handler in child process try { SignalHandlerCommunication::done(); } catch ( set_signal ss ) { write( STDERR_FILENO, msg, strlen(msg) ); } execve( m_argv[0], (char* const*) m_argv, environ ); _exit(127); // executed in child process on error } else { // // parent // // channel checkups in parallel to child waiting EventLoop e( m_child, appinfo->getChannel(), STDERR_FILENO ); // while ( ! SignalHandlerCommunication::instance()->isDone() ) e.loop( SignalHandlerCommunication::instance()->isDone() ); } #if 0 // done in signal handler now // save any errors before anybody overwrites this this->setErrno(); // m_saverr = errno; #endif // stop wall-clock m_finish = Time(); // restore old SIGCHLD handler in child process try { SignalHandlerCommunication::done(); } catch ( set_signal ss ) { write( STDERR_FILENO, msg, strlen(msg) ); } // ignore errors on these, too. sigaction( SIGINT, &saveintr, 0 ); sigaction( SIGQUIT, &savequit, 0 ); // finalize return m_status; } pegasus-wms_4.0.1+dfsg/src/tools/k.2/xml.cc0000644000175000017500000000454611757531137017475 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include #include #include #include "xml.hh" #include static const char* RCS_ID = "$Id: xml.cc 50 2007-05-19 00:48:32Z gmehta $"; std::string XML::indent( int width ) // purpose: create a string with indentation // paramtr: width (IN): if >0, generate that many spaces // returns: a string either empty, or with the wanted number of spaces. { return ( width > 0 ? std::string( width, ' ' ) : std::string() ); } std::string XML::quote( const std::string& original, bool isAttribute ) // purpose: Escapes certain characters inappropriate for XML content output. // paramtr: original (IN): is a string that needs to be quoted // isAttribute (IN): denotes an attributes value, if set to true. // If false, it denotes regular XML content outside of attributes. // returns: a string that is "safe" to print as XML. { std::string buffer; for ( size_t i=0; i < original.size(); ++i ) { switch ( original[i] ) { case '\'': buffer += "'"; break; case '"': buffer += """; break; case '<': buffer += "<"; break; case '&': buffer += "&"; break; case '>': buffer += ">"; break; default: buffer += original[i]; break; } } return buffer; } std::string XML::printf( const char* fmt, ... ) // purpose: format arbitrary information into a C++ string. // paramtr: fmt (IN): printf compatible format // ... (IN): parameters to format // returns: a string with the formatted information { char temp[4096]; va_list ap; *temp = '\0'; va_start( ap, fmt ); vsnprintf( temp, sizeof(temp), fmt, ap ); va_end(ap); return std::string(temp); } pegasus-wms_4.0.1+dfsg/src/tools/k.2/uname-s0000755000175000017500000000015311757531137017647 0ustar ryngerynge#!/usr/bin/env perl require 5.003; use strict; chomp($_=uc `uname -s`); s{[^[:alnum:]]}{}g; print "$_\n"; pegasus-wms_4.0.1+dfsg/src/tools/k.2/time.cc0000644000175000017500000000543511757531137017631 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "time.hh" static const char* RCS_ID = "$Id: time.cc 50 2007-05-19 00:48:32Z gmehta $"; bool Time::c_local = true; bool Time::c_extended = true; const Time Time::c_epoch( 0, 0, Time::c_local, Time::c_extended ); std::string Time::date( time_t seconds, long micros, bool isLocal, bool isExtended ) // purpose: create an ISO timestamp // paramtr: seconds (IN): tv_sec part of timeval // micros (IN): if negative, don't show micros. // isLocal (IN): flag, if 0 use UTC, otherwise use local time // isExtd (IN): flag, if 0 use concise format, otherwise extended // returns: a string with the formatted ISO 8601 timestamp { std::string result; char line[32]; struct tm zulu = *gmtime(&seconds); result.reserve(32); if ( isLocal ) { // requirement that we attach our time zone offset struct tm local = *localtime(&seconds); zulu.tm_isdst = local.tm_isdst; time_t distance = (seconds - mktime(&zulu)) / 60; int hours = distance / 60; // Solaris does not like std::abs(int) vs std::abs(double) int minutes = distance < 0 ? -distance % 60 : distance % 60; // timestamp strftime( line, sizeof(line), isExtended ? "%Y-%m-%dT%H:%M:%S" : "%Y%m%dT%H%M%S", &local ); result += line; // show microseconds if ( micros >= 0 ) { snprintf( line, sizeof(line), ".%03ld", micros / 1000 ); result += line; } // show timezone offset snprintf( line, sizeof(line), isExtended ? "%+03d:%02d" : "%+03d%02d", hours, minutes ); result += line; } else { // zulu time aka UTC strftime( line, sizeof(line), isExtended ? "%Y-%m-%dT%H:%M:%S" : "%Y%m%dT%H%M%S", &zulu ); result += line; // show microseconds if ( micros >= 0 ) { snprintf( line, sizeof(line), ".%03ld", micros / 1000 ); result += line; } // show timezone zulu result += 'Z'; } return result; } struct timeval Time::now( void ) // purpose: capture a point in time with microsecond extension // returns: a time record { struct timeval t = { -1, 0 }; int timeout = 0; while ( gettimeofday( &t, 0 ) == -1 && timeout < 10 ) timeout++; return t; } pegasus-wms_4.0.1+dfsg/src/tools/k.2/null.hh0000644000175000017500000000147211757531137017654 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _NULL_HH #define _NULL_HH #include class null_pointer : public std::exception { public: null_pointer() throw() { } virtual ~null_pointer() throw(); }; #endif // _NULL_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/README0000644000175000017500000000010611757531137017232 0ustar ryngeryngeKICKSTART V2 ============ please read the doc/kickstart.{pdf|ps|dvi} pegasus-wms_4.0.1+dfsg/src/tools/k.2/depend.mak0000644000175000017500000000163411757531137020312 0ustar ryngeryngeappinfo.o: appinfo.cc getif.hh useinfo.hh xml.hh null.hh jobinfo.hh \ shared.hh time.hh statinfo.hh appinfo.hh maptypes.hh stagejob.hh \ uname.hh getif.o: getif.cc getif.hh jobinfo.o: jobinfo.cc getif.hh jobinfo.hh shared.hh null.hh time.hh \ xml.hh statinfo.hh useinfo.hh appinfo.hh maptypes.hh stagejob.hh \ uname.hh mysignal.hh justparse.o: justparse.cc k.2.o: k.2.cc null.hh appinfo.hh maptypes.hh time.hh xml.hh statinfo.hh \ jobinfo.hh shared.hh useinfo.hh stagejob.hh uname.hh mysignal.o: mysignal.cc mysignal.hh null.o: null.cc null.hh quote.o: quote.cc quote.hh shared.hh stagejob.o: stagejob.cc statinfo.hh null.hh xml.hh stagejob.hh \ maptypes.hh jobinfo.hh shared.hh time.hh useinfo.hh appinfo.hh uname.hh statinfo.o: statinfo.cc statinfo.hh null.hh xml.hh time.hh time.o: time.cc time.hh uname.o: uname.cc uname.hh xml.hh useinfo.o: useinfo.cc useinfo.hh xml.hh null.hh time.hh xml.o: xml.cc xml.hh pegasus-wms_4.0.1+dfsg/src/tools/k.2/scan.l0000755000175000017500000001435711757531137017473 0ustar ryngerynge%{ /* This may look like -*-C++-*- code, but it is really flex * * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * * $Id: scan.l 50 2007-05-19 00:48:32Z gmehta $ * * Author: Jens-S. Vckler * File: scan.l * 2004-01-16 * * $Log: scan.l,v $ * Revision 1.5 2004/06/07 22:17:19 griphyn * Added "setup" feature to maintain symmetrie to "cleanup" feature. * Added "here" option for stdin configuration, which permits a string * to be dumped into temporary file from the configuration, to be used * as stdin for jobs. * * Revision 1.4 2004/02/23 20:21:53 griphyn * Added new GTPL license schema -- much shorter :-) * * Revision 1.3 2004/02/11 22:36:28 griphyn * new parser. * * Revision 1.1 2004/02/03 23:13:17 griphyn * Kickstart version 2. * * */ #include #include #include #include #define YY_SKIP_YYWRAP false extern unsigned long lineno; extern char parserErrorSeen; /* forward declaration */ void yyerror( const char* ); void warning( const char*, const char* ); int yywrap(); int yy_push_file( const char* path ); #include #include "shared.hh" #include "scan.tab.h" // local forward declaration static std::string find_eos( char quote ); struct FileInfo { // Maintain information about the recursion of include files // Recursing into includes requires unput buffer switching. public: FileInfo( const char* fn, unsigned lno, YY_BUFFER_STATE& yy ) :m_filename(fn), m_yylineno(lno) { m_yybuffer = yy; } std::string m_filename; unsigned long m_yylineno; YY_BUFFER_STATE m_yybuffer; }; typedef std::deque FileStack; static FileStack filestack; %} id [A-Za-z_][A-Za-z0-9_]* lws [ \t\r\v\f] %% #[^\n]* ; /* do nothing - a comment */ include return TK_INCLUDE; setup return TK_SETUP; set return TK_SET; pre return TK_PRE; main return TK_MAIN; post return TK_POST; cleanup return TK_CLEANUP; transformation return TK_TR1; tr return TK_TR2; derivation return TK_DV1; dv return TK_DV2; chdir return TK_CHDIR; site return TK_SITE; stdin return TK_STDIN; stdout return TK_STDOUT; stderr return TK_STDERR; input return TK_INPUT; output return TK_OUTPUT; feedback return TK_FEEDBACK; stagein return TK_STAGEIN; stageout return TK_STAGEOUT; xmlns return TK_XMLNS; {id} { /* identifier */ yylval.string = strdup(yytext); return TK_IDENT; } \" { /* " found start of double-quoted string */ yylval.string = strdup( find_eos(yytext[0]).c_str() ); return TK_QSTR1; } \' { /* found start of single-quoted string */ yylval.string = strdup( find_eos(yytext[0]).c_str() ); return TK_QSTR2; } \n { /* count line number and terminate command */ lineno++; return TK_EOC; } \; return TK_EOC; {lws}+ ; /* ignore linear whitespace */ . return yytext[0]; /* error or one-char-token */ %% static const char *RCS_ID = "$Id: scan.l 50 2007-05-19 00:48:32Z gmehta $"; static std::string find_eos( char quote ) // purpose: find closing quote to opening quote, counting backslashing // paramtr: quote (IN): The quote we look for // returns: a string containing the string with outer quotes removed // Using a string allows for multi-line large tokens w/o overflow { std::string result; int state = 0; for (;;) { char ch = yyinput(); if ( ch==EOF || ch==0 ) break; if ( state == 0 ) { // regular state if ( ch==quote /* || ch=='\n' */ ) break; result.push_back(ch); if ( ch=='\\' ) state=1; // switch to backslash state } else { // seen-backslash state state=0; if ( ch=='\n' ) result.erase(result.size()-1); // continuation line else result.push_back(ch); } } return result; } #include #include class FileInfo_eq : public std::unary_function { // Predicate to find existing filenames in the stack of include // file recursion. This is used to detect circular references. public: FileInfo_eq( const std::string& predicate ) :m_predicate(predicate) { } bool operator()( const FileInfo* fi ) const { return fi->m_filename == m_predicate; } private: std::string m_predicate; }; int yy_push_file( const char* filename ) { FILE* temp; FileInfo* fi = new FileInfo( filename, lineno, YY_CURRENT_BUFFER ); // check for out of memory if ( fi == NULL ) { yyerror( "Out of memory: Includes nested too deeply" ); return -1; } // check for circular references FileStack::iterator circular = find_if( filestack.begin(), filestack.end(), FileInfo_eq(filename) ); if ( circular != filestack.end() ) { char msg[1024]; snprintf( msg, sizeof(msg), "Detected circular reference for file \"%s\"", filename ); yyerror( msg ); return -1; } if ( (temp = fopen( filename, "r" )) == NULL ) { char msg[1024]; snprintf( msg, sizeof(msg), "Unable to open include file \"%s\"", filename ); delete fi; yyerror( msg ); return -1; } else { filestack.push_back( fi ); } yy_switch_to_buffer( yy_create_buffer( temp, YY_BUF_SIZE ) ); BEGIN(INITIAL); lineno = 1; return 0; } int yywrap() { if ( filestack.size() == 0 ) { return 1; } else { FileInfo* fi = filestack.back(); filestack.pop_back(); yy_delete_buffer( YY_CURRENT_BUFFER ); yy_switch_to_buffer( fi->m_yybuffer ); lineno = fi->m_yylineno; delete fi; return 0; } } void yyerror( const char* msg ) { /* parserErrorSeen = 1; */ fprintf( stderr, "ERROR: %lu: %s at input \"%s\"\n", lineno, msg, yytext ); } void warning( const char* msg, const char* addon ) { fprintf( stderr, "Warning: %lu: %s at input \"%s\"\n", lineno, msg, addon ); } pegasus-wms_4.0.1+dfsg/src/tools/k.2/statinfo.hh0000644000175000017500000003770111757531137020535 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _CHIMERA_STATINFO_HH #define _CHIMERA_STATINFO_HH #include #include #include #include "null.hh" #include "xml.hh" #if 0 #ifndef HAS_MUTABLE #define mutable #endif #endif class StatInfo : public XML { // Class to encapsulate and maintain stat call information about various // files or descriptors or temporary thingies. public: enum StatSource { IS_INVALID, // not initialized IS_FILE, // regular file IS_HANDLE, // file descriptor IS_TEMP, // temporary file (name + handle) IS_FIFO // POSIX FIFO (name + handle) }; StatInfo(); // purpose: ctor virtual ~StatInfo(); // purpose: dtor virtual int update() = 0; // purpose: update existing and initialized statinfo with latest info // returns: the result of the stat() or fstat() system call. virtual std::string toXML( int indent = 0, const char* id = 0 ) const; // purpose: XML format a stat info record into a given buffer // paramtr: buffer (IO): area to store the output in // indent (IN): indentation level of tag // id (IN): id attribute, use NULL to not generate // returns: string containing the element data // warning: dependent elements are formatted using show() and data() virtual std::ostream& toXML( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s // warning: dependent elements are formatted using show() and data() virtual StatSource whoami( void ) const; // purpose: if you don't like RTTI // returns: type virtual int forcefd( int fd ) const = 0; // purpose: force open a file on a certain fd // paramtr: fd (IN): is the file descriptor to plug onto. If this fd is // the same as the descriptor in info, nothing will be done. // returns: 0 if all is well, or fn was NULL or empty. // 1 if opening a filename failed, // 2 if dup2 call failed bool isValid( void ) const; // purpose: check, if object was correctly constructed // returns: true for a valid object, false for an invalid one void invalidate( void ); // purpose: invalide that current member protected: std::string m_id; // for certain entities, there is an ID std::string m_lfn; // for certain entities, there is a LFN public: inline void setId( const std::string& id ) { m_id = id; } inline std::string getId(void) const { return m_id; } inline void setLFN( const std::string& lfn ) { m_lfn = lfn; } inline std::string getLFN() const { return m_lfn; } private: static const unsigned int c_valid; unsigned m_valid; // set to a sensible cookie if valid protected: virtual std::string data( int indent = 0, const char* nspace = 0 ) const // purpose: Generate special post-element code, e.g. stderr and stdout data // paramtr: indent (IN): indentation level for tag // nspace (IN): tag namespace, if not null // returns: string with section { return std::string(); } virtual std::ostream& data( std::ostream& s, int indent = 0, const char* nspace = 0 ) const // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { return s; } virtual std::string show( int indent = 0, const char* nspace = 0 ) const = 0; // purpose: Generate the element-specific information. Called from toXML() // paramtr: buffer (IO): area to store the output in // indent (IN): indentation level of tag // returns: string with the information. virtual std::ostream& show( std::ostream& s, int indent = 0, const char* nspace = 0 ) const = 0; // purpose: format content as XML onto stream, called from toXML() // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s protected: mutable struct stat m_stat; // result from stat() or fstat() mutable int m_error; // reason for failed stat call }; class StatFile : public virtual StatInfo { // This class handles all regular files. public: typedef unsigned char Digest[16]; StatFile( const std::string& filename, int openmode, bool truncate ); // purpose: Initialize a stat info buffer with a filename to point to // paramtr: filename (IN): the filename to memorize (deep copy) // openmode (IN): are the fcntl O_* flags to later open calls // truncate (IN): flag to truncate stdout or stderr #if 0 StatFile( const char* filename, int openmode, int truncate ); // purpose: Initialize a stat info buffer with a filename to point to // paramtr: filename (IN): the filename to memorize (deep copy) // openmode (IN): are the fcntl O_* flags to later open calls // truncate (IN): flag to truncate stdout or stderr #endif virtual ~StatFile(); // dtor virtual StatInfo::StatSource whoami( void ) const; // purpose: if you don't like RTTI // returns: type virtual int update(); // purpose: update existing and initialized statinfo with latest info // returns: the result of the stat() or fstat() system call. virtual int forcefd( int fd ) const; // purpose: force open a file on a certain fd // paramtr: fd (IN): is the file descriptor to plug onto. If this fd is // the same as the descriptor in info, nothing will be done. // returns: 0 if all is well, or fn was NULL or empty. // 1 if opening a filename failed, // 2 if dup2 call failed // // Accessors // const std::string& getFilename() const { return m_filename; } int getOpenmode() const { return m_openmode; } ssize_t getHeaderSize() const { return m_hsize; } const unsigned char* getHeader() const { return m_header; } class sum_error { // class to encapsulate IO exceptions from the checksum functions. public: inline sum_error( const std::string& msg, int error = 0 ) :m_msg(msg) { if ( error != 0 ) { m_msg += ": "; m_msg += strerror(error); } } inline std::string getMessage() const { return m_msg; } private: std::string m_msg; }; virtual void md5sum(); // purpose: calculate the MD5 checksum over the complete file // throws : sum_error on IO error, bad_alloc on out of memory bool getMD5sum( Digest digest ) const; // purpose: obtains the stored MD5 sum // paramtr: digest (OUT): a digest area to store the 128 bits into // returns: true, if a string was stored in the digest area, // false, if no sum was obtained, and the digest is untouched protected: virtual std::string show( int indent = 0, const char* nspace = 0 ) const; // purpose: Generate the element-specific information. Called from toXML() // paramtr: buffer (IO): area to store the output in // indent (IN): indentation level of tag // returns: string with the information. virtual std::ostream& show( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s StatFile::StatFile(); // purpose: ctor protected: std::string m_filename; // name of the file to access int m_openmode; // open mode for open call ssize_t m_hsize; // valid bytes in header (result of read()) bool m_done_md5; // valid info in md5sum member Digest m_header; // first bytes from file std::string m_logical; // LFN Digest m_digest; // md5sum, if applicable }; class StatHandle : public virtual StatInfo { // This class handles already open files, of which only the descriptor // is known. public: StatHandle( int descriptor ); // purpose: Initialize a stat info buffer with a filename to point to // paramtr: descriptor (IN): the handle to attach to virtual ~StatHandle(); // purpose: dtor virtual StatSource whoami( void ) const; // purpose: if you don't like RTTI // returns: type virtual int update(); // purpose: update existing and initialized statinfo with latest info // returns: the result of the stat() or fstat() system call. virtual int forcefd( int fd ) const; // purpose: force open a file on a certain fd // paramtr: fd (IN): is the file descriptor to plug onto. If this fd is // the same as the descriptor in info, nothing will be done. // returns: 0 if all is well, or fn was NULL or empty. // 1 if opening a filename failed, // 2 if dup2 call faile // // Accessors // const int getDescriptor() const { return m_descriptor; } protected: virtual std::string show( int indent = 0, const char* nspace = 0 ) const; // purpose: Generate the element-specific information. Called from toXML() // paramtr: buffer (IO): area to store the output in // indent (IN): indentation level of tag // returns: string with the information. virtual std::ostream& show( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s protected: int m_descriptor; // file descriptor }; class StatTemporary : public StatFile, public StatHandle { // This class handles temporary files. public: StatTemporary( int fd, const char* fn ); // purpose: Initialize for an externally generated temporary file // paramtr: fd (IN): is the connected file descriptor // fn (IN): is the concretized temporary filename StatTemporary( char* pattern ); // purpose: Initialize a stat info buffer with a temporary file // paramtr: pattern (IO): is the input pattern to mkstemp() // warning: pattern will be modified! StatTemporary( const std::string& pattern, bool c_o_e = true ); // purpose: Initialize a stat info buffer with a temporary file // paramtr: pattern (IN): is the input pattern to mkstemp() // c_o_e (IN): if true, set FD_CLOEXEC fd flag, unset if false // warning: pattern will be copied for modification virtual ~StatTemporary(); // purpose: dtor virtual StatSource whoami( void ) const; // purpose: if you don't like RTTI // returns: type virtual int update(); // purpose: update existing and initialized statinfo with latest info // returns: the result of the stat() or fstat() system call. virtual int forcefd( int fd ) const; // purpose: force open a file on a certain fd // paramtr: fd (IN): is the file descriptor to plug onto. If this fd is // the same as the descriptor in info, nothing will be done. // returns: 0 if all is well, or fn was NULL or empty. // 1 if opening a filename failed, // 2 if dup2 call failed protected: virtual std::string show( int indent = 0, const char* nspace = 0 ) const; // purpose: Generate the element-specific information. Called from toXML() // paramtr: indent (IN): indentation level of tag // nspace (IN): tag namespace, if not null // returns: string with the information. virtual std::ostream& show( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s virtual std::string data( int indent = 0, const char* nspace = 0 ) const; // purpose: Generate special post-element code, e.g. stderr and stdout data // paramtr: indent (IN): indentation level for tag // nspace (IN): tag namespace, if not null // returns: string with the information. virtual std::ostream& data( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s protected: // no member data }; class StatFifo : public StatFile, public StatHandle { public: StatFifo( const std::string& pattern, const std::string& key ); // purpose: Initialize a stat info buffer associated with a named pipe // paramtr: pattern (IN): input pattern to mkstemp(), iff XXXXXX suffix // otherwise append hyphen-XXXXXX // key (IN): is the environment key at which to store the filename // unused if empty or not starting like an identifier #if 0 StatFifo( char* pattern, const char* key ); // purpose: Initialize a stat info buffer associated with a named pipe // paramtr: pattern (IO): input pattern to mkstemp(), will be modified! // key (IN): is the environment key at which to store the filename #endif virtual ~StatFifo(); // purpose: dtor virtual StatSource whoami( void ) const; // purpose: if you don't like RTTI // returns: type virtual int update(); // purpose: update existing and initialized statinfo with latest info // returns: the result of the stat() or fstat() system call. virtual int forcefd( int fd ) const; // purpose: force open a file on a certain fd // paramtr: fd (IN): is the file descriptor to plug onto. If this fd is // the same as the descriptor in info, nothing will be done. // returns: 0 if all is well, or fn was NULL or empty. // 1 if opening a filename failed, // 2 if dup2 call failed virtual void add( ssize_t read, ssize_t write, ssize_t count ) // purpose: update the size information passed through the FIFO { m_rsize += read; m_wsize += write; m_count += count; } protected: virtual std::string show( int indent = 0, const char* nspace = 0 ) const; // purpose: Generate the element-specific information. Called from toXML() // paramtr: indent (IN): indentation level of tag // nspace (IN): tag namespace, if not null // returns: string with the information. virtual std::ostream& show( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s protected: size_t m_count; // number of total bytes size_t m_rsize; // number of bytes read from fifo size_t m_wsize; // number of bytes written to socket }; #endif // _CHIMERA_STATINFO_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/k.2.cc0000644000175000017500000001121211757531137017253 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include #include "null.hh" #include "appinfo.hh" // truely shared globals bool isExtended = true; // timestamp format concise or extended bool isLocal = true; // timestamp time zone, UTC or local AppInfo* app; // make globally visible for bison // from bison/flex extern FILE* yyin; extern int yyparse(); static const char* RCS_ID = "$Id: k.2.cc 50 2007-05-19 00:48:32Z gmehta $"; static void helpMe() // purpose: print invocation quick help and exit with error condition. // paramtr: run (IN): constitutes the set of currently set parameters. // returns: does not return - exits with an error to the OS level. { std::cerr << RCS_ID << std::endl << "Usage: " << app->getSelf() << " [cfg]" << std::endl << " cfg\tOptional configuration file to use instead of stdin" << std::endl; // avoid printing of results in exit handler app->setPrinted( true ); // exit with error condition exit(127); } int main( int argc, char* argv[] ) { int result = -1; // avoid libc tz malloc errors during app->print() tzset(); try { // create me if ( (app = new AppInfo(argv[0])) == 0 ) throw null_pointer(); // take ownership for auto-delete on exit from try block std::auto_ptr owner( app ); #if 0 fprintf( stderr, "# appinfo=%d, jobinfo=%d, statinfo=%d, useinfo=%d\n", sizeof(AppInfo), sizeof(JobInfo), sizeof(StatInfo), sizeof(UseInfo) ); #endif // if there are no arguments, and stdin is a TTY, print help and exit if ( argc == 1 && isatty(STDIN_FILENO) || argc > 2 ) helpMe(); // open parsing file if ( argc == 1 ) yyin = fdopen( STDIN_FILENO, "r" ); /// stdin; else yyin = fopen( argv[1], "r" ); // sanity check if ( yyin == 0 ) { int saverr = errno; std::cerr << "open " << argv[1] << ": " << strerror(saverr) << std::endl; app->setPrinted( true ); return 127; } // read until the bitter end /// while ( ! feof(yyin) ) yyparse(); if ( yyparse() ) { // unresolved parse error std::cerr << "Unrecoverable error while parsing configuration" << std::endl; app->setPrinted( true ); return 127; } // done with config file input fclose(yyin); // // >>> start with the main work of running applications and stat calls // // act on file if ( ! app->hasMainJob() ) { std::cerr << "There is no main job" << std::endl; app->setPrinted( true ); return 127; } // Maybe here the 2nd-level stage-in??? errno = 0; if ( app->runStageIn() ) { // what now? int saverr( errno ); std::cerr << "Failure "; if ( saverr != 0 ) std::cerr << '"' << strerror(saverr) << "\" "; std::cerr << "during stage-in, ignoring" << std::endl; } // Do the stat on the input files app->createInputInfo(); // execute all jobs result = 0; app->run(result); // Do the stat on the output files app->createOutputInfo(); // Maybe here the 2nd-level stage-out??? errno = 0; if ( app->runStageOut() ) { // what now? int saverr( errno ); std::cerr << "Failure "; if ( saverr != 0 ) std::cerr << '"' << strerror(saverr) << "\" "; std::cerr << "during stage-out, ignoring" << std::endl; } // // <<< done with the main work of running applications and stat calls // // append results atomically to logfile app->print(); // done } catch ( null_pointer e ) { std::cerr << "caught kickstart NULL pointer exception" << std::endl; } catch ( std::exception e ) { std::cerr << "caught STL or derived exception" << std::endl; } catch (...) { std::cerr << "caught *unknown* exception in main()" << std::endl; } // done return JobInfo::exitCode(result); } pegasus-wms_4.0.1+dfsg/src/tools/k.2/justparse.cc0000644000175000017500000000217611757531137020712 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include #include extern FILE* yyin; extern int yylex(); unsigned long lineno = 1; union { char* string; } yylval; int main( int argc, char* argv[] ) { int tokenclass; if ( argc != 2 ) return 1; yyin = fopen( argv[1], "r" ); if ( yyin == NULL ) return 2; do { yylval.string = 0; tokenclass = yylex(); printf( "%3d %s\n", tokenclass, yylval.string ? yylval.string : "" ); if ( yylval.string ) free((void*) yylval.string); } while ( tokenclass != 0 ); return 0; } pegasus-wms_4.0.1+dfsg/src/tools/k.2/maptypes.hh0000644000175000017500000000216611757531137020545 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _CHIMERA_MAPTYPES_HH #define _CHIMERA_MAPTYPES_HH #include #include // normal LFN to SFN mapping, includes a flag typedef std::pair< std::string, bool > FilenameBool; typedef std::map< std::string, FilenameBool > FilenameMap; // multiple SFN to TFN mapping typedef std::multimap< std::string, std::string > FilenameMultiMap; // range operator typedef std::pair< FilenameMultiMap::const_iterator, FilenameMultiMap::const_iterator > FilenameMMRange; #endif // _CHIMERA_MAPTYPES_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/stagejob.cc0000644000175000017500000001364311757531137020471 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include #include #include #include #include #include #include #include #include #include #include #include #include #include "statinfo.hh" #include "stagejob.hh" #include "jobinfo.hh" #include "appinfo.hh" static const char* RCS_ID = "$Id: stagejob.cc 50 2007-05-19 00:48:32Z gmehta $"; StageJob::StageJob( const char* tag, const std::string& format, const char* commandline ) // purpose: initialize the data structure by parsing a command string. // paramtr: tag (IN): stage-in or stage-out // format (IN): format string for output // commandline (IN): commandline concatenated string to separate :JobInfo(tag,commandline),m_tempfile(0),m_format(format) { // FIXME: We could do checking of the format string, but what do we care? if ( m_format.find("%l") == std::string::npos && m_format.find("%s") == std::string::npos && m_format.find("%t") == std::string::npos ) throw std::invalid_argument( "format string lacks %l,%s,%t" ); } StageJob::StageJob( const char* tag, const std::string& format, const StringList& argv ) // purpose: initialize the data structure by parsing a command string. // paramtr: tag (IN): stage-in or stage-out // format (IN): format string for output // argv (IN): commandline already split into arg vector :JobInfo(tag,argv),m_tempfile(0),m_format(format) { // FIXME: We could do checking of the format string, but what do we care? if ( m_format.find("%l") == std::string::npos && m_format.find("%s") == std::string::npos && m_format.find("%t") == std::string::npos ) throw std::invalid_argument( "format string lacks %l,%s,%t" ); } StageJob::~StageJob() // purpose: dtor { if ( m_tempfile ) delete m_tempfile; m_tempfile = 0; } int StageJob::createTempfile( const char* id, const FilenameMap& l2s, const FilenameMultiMap& s2t ) // purpose: create the tempfile from the external filemaps // paratmr: id (IN): stage-in or stage-out // l2s (IN): map with LFN to SFN mapping // s2t (IN): multimap with SFN to TFN mapping // warning: filehandle for tempfile is open forthwith // returns: -1 in case of error // 0 for nothing to do // >0 for number of files { int result(-1); // check, if there is anything to do if ( s2t.size() == 0 || l2s.size() == 0 ) return 0; // Create tmpfile with filelist std::string tempfn( AppInfo::getTempDir() ); tempfn += "/gs."; tempfn += id; tempfn += ".XXXXXX"; StatTemporary* st = new StatTemporary(tempfn,false); if ( st && st->isValid() ) { if ( m_tempfile ) delete m_tempfile; m_tempfile = st; // if we created the temporary file result = 0; // fill in file information m_tempfile->setId(id); // enumerate LFN -> SFN => TFN for ( FilenameMap::const_iterator i(l2s.begin()); i!=l2s.end(); ++i ) { // std::string lfn( i->first ); // std::string sfn( i->second ); FilenameMMRange p( s2t.equal_range((*i).second.first) ); // check, if there are TFNs available // only do the next intensive steps, if necessary std::string::size_type n; if ( p.first != p.second ) { // start with a format template std::string msg( m_format ); // replace all %l with LFN for ( n=msg.find("%l"); n != std::string::npos; n=msg.find("%l") ) { msg.replace( n, 2, i->first ); } // replace all %s with SFN for ( n=msg.find("%s"); n != std::string::npos; n=msg.find("%s") ) { msg.replace( n, 2, (*i).second.first ); } // dynamically determine the separator string // it may have contained %s or %l itself for ( n=msg.find("%t"); n != std::string::npos; n=msg.find("%t") ) { std::string separator; std::string::size_type m(n+2); if ( n+2 >= msg.size() || msg.at(n+2) != '{' ) { // no "{separator}" string, default to " " separator = " "; } else { try { for ( m=n+3; msg.at(m) != '}'; ++m ) ; // postcondition: [n+2,m] == "{separator}" separator = msg.substr(n+3,m-1); } catch ( std::out_of_range ) { // unterminated separator, make a guess separator = msg.substr(n+3,m-1); } } // assemble replacement string std::string replacement; for ( FilenameMultiMap::const_iterator j = p.first; j != p.second; ++j ) { if ( j != p.first ) replacement += separator; replacement += j->second; } // replace the %t msg.replace( n, m, replacement ); } // terminate message string msg += "\r\n"; // FIXME: very inefficient continuous writes, use buffering ssize_t wsize = write( m_tempfile->getDescriptor(), msg.c_str(), msg.size() ); if ( wsize == static_cast(msg.size()) ) result++; else { // throw IOException result = -1; break; } } } // update stat info record if ( result >= 0 ) m_tempfile->update(); } return result; } void StageJob::rewrite() // purpose: rewrite the argv vector before calling the job // warning: called from system() { if ( m_tempfile && m_tempfile->isValid() ) { this->addArgument( m_tempfile->getFilename() ); } else { fprintf( stderr, "%s tempfile\n", m_tempfile ? "invalid" : "null" ); } JobInfo::rewrite(); } pegasus-wms_4.0.1+dfsg/src/tools/k.2/jobinfo.hh0000644000175000017500000001556711757531137020342 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _JOBINFO_HH #define _JOBINFO_HH #include #include #include #include "shared.hh" #include "null.hh" #include "time.hh" #include "xml.hh" #include "statinfo.hh" #include "useinfo.hh" #include #ifndef HAS_MUTABLE #define mutable #endif class AppInfo; // forward declaration class JobInfo : public XML { // This class assembles information about each job that could be run. // A job can be a prejob, the main job, a post job, or a cleanup jobs. // Except for the main job, all other jobs may occur 0..N times. private: std::string m_tag; // element tag, e.g. mainjob etc. public: enum Validity { INVALID, VALID, NOTFOUND }; private: Validity m_isValid; char* m_copy; // buffer for argument separation protected: char* const* m_argv; // application executable and arguments int m_argc; // application CLI number of arguments private: Time m_start; // point of time that app was started Time m_finish; // point of time that app was reaped pid_t m_child; // pid of process that ran application int m_status; // raw exit status of application int m_saverr; // errno for status < 0 UseInfo* m_use; // rusage record from reaping application status StatInfo* m_executable; // stat() info for executable, if available // render inaccessible JobInfo(); JobInfo( const JobInfo& ); JobInfo& operator=( const JobInfo& ); public: JobInfo( const char* tag, const char* commandline ); // purpose: initialize the data structure by parsing a command string. // paramtr: tag (IN): kind of job, used for XML element tag name // commandline (IN): commandline concatenated string to separate JobInfo( const char* tag, const StringList& args ); // purpose: initialize the data structure by parsing a command string. // paramtr: tag (IN): kind of job, used for XML element tag name // args (IN): commandline already split into arg vector virtual ~JobInfo(); // purpose: dtor virtual std::string toXML( int indent = 0, const char* nspace = 0 ) const; // purpose: XML format a job info record. // paramtr: indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: string containing the element data virtual std::ostream& toXML( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: XML format a rusage info record onto a given stream // paramtr: s (IO): stream to put information into // indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: s inline Validity getValidity() const { return m_isValid; } inline void setValidity( Validity v ){ m_isValid = v; } inline int getStatus() const { return m_status; } inline void setStatus( int status ) { m_status = status; } inline int getErrno() const { return m_saverr; } inline void setErrno() { m_saverr = errno; } inline const char* getArg0() const { if ( m_argv ) return m_argv[0]; else throw null_pointer(); } void setUse( const struct rusage* use ); // purpose: sets the rusage information from an external source // paramtr: use (IN): pointer to a valid rusage record int wait4( int flags = 0 ); // purpose: wrapper around system wait4() call // returns: result from the wait4 call. // sidekck: m_child (IN): pid to check for, 1st argument of wait4 // m_status (OUT): set by the wait4 call // m_use (OUT): rusage record will be updated protected: void addArgument( const std::string& arg ); // purpose: Adds an additional argument to the end of the CLI // paramtr: arg (IN): Argument string to add. Will _not_ be interpreted! // warning: You cannot set the application to run with this // warning: This (ugly) hack is for internal use for stage-in jobs. virtual void rewrite(); // purpose: rewrite the argv vector before calling the job // warning: called from system() virtual bool forcefd( const StatInfo* si, int fd ) const // purpose: force a stdio filehandle from a statinfo record // paramtr: si (IN): StatInfo placeholder for a filehandle // fd (IN): stdio filehandle to connect with new // returns: true, if all went well, or false otherwise // warning: called from system() { return ( si == 0 ? false : si->forcefd(fd)==0 ); } public: int system( AppInfo* appinfo ); // purpose: runs the current job with the given stdio connections // paramtr: appinfo (IO): shared record of information // isPrinted (IO): only to reset isPrinted in child // stdin (IN): connect to stdin or share // stdout (IN): connect to stdout or share // stderr (IN): connect to stderr or share // returns: -1: failure in mysystem processing, check errno // 126: connecting child to its new stdout failed // 127: execve() call failed // else: status of child public: static int exitCode( int raw ); // purpose: convert the raw result from wait() into a status code // paramtr: raw (IN): the raw exit code // returns: a cooked exit code // < 0 --> error while invoking job // [0..127] --> regular exitcode // [128..] --> terminal signal + 128 static char* findApplication( const char* fn ); // purpose: check the executable filename and correct it if necessary // absolute names will not be matched against a PATH // paramtr: fn (IN): current knowledge of filename // returns: newly allocated fqpn of path to exectuble, or NULL if not found // warning: use free() to free the allocation static bool hasAccess( const char* fn ); // purpose: check a given file for being accessible and executable // under the currently effective user and group id. // paramtr: path (IN): fully qualified path to check // returns: true if the file is accessible, false for not }; #endif // _JOBINFO_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/quote.cc0000644000175000017500000003566411757531137020037 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "quote.hh" #include // // STATE EOS "" '' $ { } BS ALNUM LWS ELSE // ------------+-------+-------+-------+-------+-------+-------+-------+-------+-------+----- // 0 NQ_LWS F 5 3 10 1 1 2 1 0 1 // - - - - Sb Sb - Sb - Sb // 1 NQ_MAIN F 5 3 10 1 1 2 1 0 1 // Fb - - - Sb Sb - Sb Fb Sb // 2 NQ_BS E1 1 1 1 1 1 1 1 1 1 // - Sb Sb Sb Sb Sb Sb Sb Sb // ------------+-------+-------+-------+-------+-------+-------+-------+-------+-------+----- // 3 SQ_MAIN E2 F 3 1 3 3 3 3 4 3 3 3 // - Fb Sb - Sb Sb Sb Sb - Sb Sb Sb // 4 SQ_BS E1 3 3 3 3 3 3 3 3 3 // - Sb Sb Sb Sb Sb Sb Sb Sb Sb // ------------+-------+-------+-------+-------+-------+-------+-------+-------+-------+----- // 5 DQ_MAIN E3 F 1 5 5 7 5 5 6 5 5 5 // - Fb - Sb Sb - Sb Sb - Sb Sb Sb // 6 DQ_BS E1 5 5 5 5 5 5 5 5 5 // - Sb Sb Sb Sb Sb Sb Xb Sb Sb // 7 DQ_DLLR E3 1 E4 E4 E4 9 E4 6 8 5 E4 // - - - - - - - S($) Sv S($) - // 8 DQ_VAR1 E3 1 5 5 7 5 5 6 8 5 5 // - Fv Fvpb Fvpb Fv Fvpb Fvpb Fv Sv Fvpb Fvpb // 9 DQ_VAR2 E1 9 9 E4 E4 5 9 9 9 9 // - Sv Sv - - Fv Sv Sv Sv Sv // ------------+-------+-------+-------+-------+-------+-------+-------+-------+-------+----- // 10 NQ_DLLR 1 5 3 10 12 E4 2 11 1 1 // S($)pb S($) S($) Sb - - S($) Sv S($)pb S($)pb // 11 NQ_VAR1 1 5 3 10 1 1 2 11 1 1 // Fvpb Fv Fv Fv Fvpb Fvpb Fv Sv Fvpb Fvpb // 12 NQ_VAR2 E4 12 12 E4 E4 1 12 12 12 12 // - Sv Sv - - Fv Sv Sv Sv Sv // ------------+-------+-------+-------+-------+-------+-------+-------+-------+-------+----- // 13 FINAL final state: ok // 14 ERR1 error state 1: premature end of string // 15 ERR2 error state 2: missing apostrophe // 16 ERR3 error state 3: missing quote // 17 ERR4 error state 4: illegal character in varname // namespace Quote { // and more struct Mealy { State s; Action a; inline Mealy( State _s, Action _a ):s(_s),a(_a) { } inline Mealy():s(NQ_LWS),a(Noop) { } }; typedef Mealy MealyRow[C_MAX]; typedef MealyRow MealyMap[FINAL]; static MealyMap statemap = // sm[FINAL][C_MAX] = { { // state NQ_LWS Mealy( FINAL, Noop ), // state NQ_LWS, input EOS Mealy( DQ_MAIN, Noop ), // state NQ_LWS, input QUOTE Mealy( SQ_MAIN, Noop ), // state NQ_LWS, input APOS Mealy( NQ_DLLR, Noop ), // state NQ_LWS, input DOLLAR Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input LBRACE Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input RBRACE Mealy( NQ_BS, Noop ), // state NQ_LWS, input BSLASH Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input ALNUM Mealy( NQ_LWS, Noop ), // state NQ_LWS, input LWS Mealy( NQ_MAIN, Sb ) // state NQ_LWS, input * },{ // state NQ_MAIN Mealy( FINAL, Fb ), // state NQ_LWS, input EOS Mealy( DQ_MAIN, Noop ), // state NQ_LWS, input QUOTE Mealy( SQ_MAIN, Noop ), // state NQ_LWS, input APOS Mealy( NQ_DLLR, Noop ), // state NQ_LWS, input DOLLAR Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input LBRACE Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input RBRACE Mealy( NQ_BS, Noop ), // state NQ_LWS, input BSLASH Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input ALNUM Mealy( NQ_LWS, Fb ), // state NQ_LWS, input LWS Mealy( NQ_MAIN, Sb ) // state NQ_LWS, input * },{ // state NQ_BS Mealy( ERR1, Noop ), // state NQ_LWS, input EOS Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input QUOTE Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input APOS Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input DOLLAR Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input LBRACE Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input RBRACE Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input BSLASH Mealy( NQ_MAIN, Sb ), // state NQ_LWS, input ALNUM Mealy( NQ_MAIN, Fb ), // state NQ_LWS, input LWS Mealy( NQ_MAIN, Sb ) // state NQ_LWS, input * },{ // state NQ_DLLR Mealy( NQ_MAIN, SDpb ), // state NQ_DLLR, input EOS Mealy( DQ_MAIN, SD ), // state NQ_DLLR, input QUOTE Mealy( SQ_MAIN, SD ), // state NQ_DLLR, input APOS Mealy( NQ_DLLR, Sb ), // state NQ_DLLR, input DOLLAR Mealy( NQ_VAR2, Noop ), // state NQ_DLLR, input LBRACE Mealy( ERR4, Noop ), // state NQ_DLLR, input RBRACE Mealy( NQ_BS, SD ), // state NQ_DLLR, input BSLASH Mealy( NQ_VAR1, Sv ), // state NQ_DLLR, input ALNUM Mealy( NQ_MAIN, SDpb ), // state NQ_DLLR, input LWS Mealy( NQ_MAIN, SDpb ) // state NQ_DLLR, input * },{ // state NQ_VAR1 Mealy( NQ_MAIN, Fvpb ), // state NQ_VAR1, input EOS Mealy( DQ_MAIN, Fv ), // state NQ_VAR1, input QUOTE Mealy( SQ_MAIN, Fv ), // state NQ_VAR1, input APOS Mealy( NQ_DLLR, Fv ), // state NQ_VAR1, input DOLLAR Mealy( NQ_MAIN, Fvpb ), // state NQ_VAR1, input LBRACE Mealy( NQ_MAIN, Fvpb ), // state NQ_VAR1, input RBRACE Mealy( NQ_BS, Fv ), // state NQ_VAR1, input BSLASH Mealy( NQ_VAR1, Sv ), // state NQ_VAR1, input ALNUM Mealy( NQ_MAIN, Fvpb ), // state NQ_VAR1, input LWS Mealy( NQ_MAIN, Fvpb ) // state NQ_VAR1, input * },{ // state NQ_VAR2 Mealy( ERR4, Noop ), // state NQ_VAR2, input EOS Mealy( NQ_VAR2, Sv ), // state NQ_VAR2, input QUOTE Mealy( NQ_VAR2, Sv ), // state NQ_VAR2, input APOS Mealy( ERR4, Noop ), // state NQ_VAR2, input DOLLAR Mealy( ERR4, Noop ), // state NQ_VAR2, input LBRACE Mealy( NQ_MAIN, Fv ), // state NQ_VAR2, input RBRACE Mealy( NQ_VAR2, Sv ), // state NQ_VAR2, input BSLASH Mealy( NQ_VAR2, Sv ), // state NQ_VAR2, input ALNUM Mealy( NQ_VAR2, Sv ), // state NQ_VAR2, input LWS Mealy( NQ_VAR2, Sv ) // state NQ_VAR2, input * },{ // state DQ_MAIN Mealy( FINAL, Fb ), // state DQ_MAIN, input EOS Mealy( DQ_MAIN, Sb ), // state DQ_MAIN, input QUOTE Mealy( DQ_MAIN, Sb ), // state DQ_MAIN, input APOS Mealy( DQ_DLLR, Noop ), // state DQ_MAIN, input DOLLAR Mealy( DQ_MAIN, Sb ), // state DQ_MAIN, input LBRACE Mealy( DQ_MAIN, Sb ), // state DQ_MAIN, input RBRACE Mealy( DQ_BS, Noop ), // state DQ_MAIN, input BSLASH Mealy( DQ_MAIN, Sb ), // state DQ_MAIN, input ALNUM Mealy( DQ_MAIN, Sb ), // state DQ_MAIN, input LWS Mealy( DQ_MAIN, Sb ) // state DQ_MAIN, input * },{ // state DQ_BS Mealy( ERR1, Noop ), // state DQ_BS, input EOS Mealy( DQ_MAIN, Sb ), // state DQ_BS, input QUOTE Mealy( DQ_MAIN, Sb ), // state DQ_BS, input APOS Mealy( DQ_MAIN, Sb ), // state DQ_BS, input DOLLAR Mealy( DQ_MAIN, Sb ), // state DQ_BS, input LBRACE Mealy( DQ_MAIN, Sb ), // state DQ_BS, input RBRACE Mealy( DQ_MAIN, Sb ), // state DQ_BS, input BSLASH Mealy( DQ_MAIN, Xb ), // state DQ_BS, input ALNUM Mealy( DQ_MAIN, Sb ), // state DQ_BS, input LWS Mealy( DQ_MAIN, Sb ) // state DQ_BS, input * },{ // state DQ_DLLR Mealy( ERR3, Noop ), // state DQ_DLLR, input EOS Mealy( ERR4, Noop ), // state DQ_DLLR, input QUOTE Mealy( ERR4, Noop ), // state DQ_DLLR, input APOS Mealy( ERR4, Noop ), // state DQ_DLLR, input DOLLAR Mealy( DQ_VAR2, Noop ), // state DQ_DLLR, input LBRACE Mealy( ERR4, Noop ), // state DQ_DLLR, input RBRACE Mealy( DQ_BS, SD ), // state DQ_DLLR, input BSLASH Mealy( DQ_VAR1, Sv ), // state DQ_DLLR, input ALNUM Mealy( DQ_MAIN, SD ), // state DQ_DLLR, input LWS Mealy( ERR4, Noop ) // state DQ_DLLR, input * },{ // state DQ_VAR1 Mealy( ERR3, Fvpb ), // state DQ_VAR1, input EOS Mealy( DQ_MAIN, Fvpb ), // state DQ_VAR1, input QUOTE Mealy( DQ_MAIN, Fvpb ), // state DQ_VAR1, input APOS Mealy( DQ_DLLR, Fv ), // state DQ_VAR1, input DOLLAR Mealy( DQ_MAIN, Fvpb ), // state DQ_VAR1, input LBRACE Mealy( DQ_MAIN, Fvpb ), // state DQ_VAR1, input RBRACE Mealy( DQ_BS, Fv ), // state DQ_VAR1, input BSLASH Mealy( DQ_VAR1, Sv ), // state DQ_VAR1, input ALNUM Mealy( DQ_MAIN, Fvpb ), // state DQ_VAR1, input LWS Mealy( DQ_MAIN, Fvpb ) // state DQ_VAR1, input * },{ // state DQ_VAR2 Mealy( ERR1, Noop ), // state DQ_VAR2, input EOS Mealy( DQ_VAR2, Sv ), // state DQ_VAR2, input QUOTE Mealy( DQ_VAR2, Sv ), // state DQ_VAR2, input APOS Mealy( ERR4, Noop ), // state DQ_VAR2, input DOLLAR Mealy( ERR4, Noop ), // state DQ_VAR2, input LBRACE Mealy( DQ_MAIN, Fv ), // state DQ_VAR2, input RBRACE Mealy( DQ_VAR2, Sv ), // state DQ_VAR2, input BSLASH Mealy( DQ_VAR2, Sv ), // state DQ_VAR2, input ALNUM Mealy( DQ_VAR2, Sv ), // state DQ_VAR2, input LWS Mealy( DQ_VAR2, Sv ) // state DQ_VAR2, input * },{ // state SQ_MAIN Mealy( FINAL, Fb ), // state SQ_MAIN, input EOS Mealy( SQ_MAIN, Sb ), // state SQ_MAIN, input QUOTE Mealy( SQ_MAIN, Sb ), // state SQ_MAIN, input APOS Mealy( SQ_MAIN, Sb ), // state SQ_MAIN, input DOLLAR Mealy( SQ_MAIN, Sb ), // state SQ_MAIN, input LBRACE Mealy( SQ_MAIN, Sb ), // state SQ_MAIN, input RBRACE Mealy( SQ_BS, Noop ), // state SQ_MAIN, input BSLASH Mealy( SQ_MAIN, Sb ), // state SQ_MAIN, input ALNUM Mealy( SQ_MAIN, Sb ), // state SQ_MAIN, input LWS Mealy( SQ_MAIN, Sb ) // state SQ_MAIN, input * },{ // state SQ_BS Mealy( ERR1, Noop ), // state SQ_BS, input EOS Mealy( SQ_MAIN, Sb ), // state SQ_BS, input QUOTE Mealy( SQ_MAIN, Sb ), // state SQ_BS, input APOS Mealy( SQ_MAIN, Sb ), // state SQ_BS, input DOLLAR Mealy( SQ_MAIN, Sb ), // state SQ_BS, input LBRACE Mealy( SQ_MAIN, Sb ), // state SQ_BS, input RBRACE Mealy( SQ_MAIN, Sb ), // state SQ_BS, input BSLASH Mealy( SQ_MAIN, Sb ), // state SQ_BS, input ALNUM Mealy( SQ_MAIN, Sb ), // state SQ_BS, input LWS Mealy( SQ_MAIN, Sb ) // state SQ_BS, input * } }; static const char* translation = "abenrtv"; static const char translationmap[] = "\a\b\033\n\r\t\v"; CharClass xlate( int input ) // purpose: translates an input character into its character class // paramtr: input (IN): input character // returns: the character class { switch ( input ) { case EOF: case 0: return EOS; case '"': return QUOTE; case '\'': return APOS; case '$': return DOLLAR; case '{': return LBRACE; case '}': return RBRACE; case '\\': return BSLASH; case '_': return ALNUM; default: return ( isalnum(input) ? ALNUM : ( isspace(input) ? LWS : ELSE ) ); } } State parse( const std::string& input, std::string& output, State state ) // purpose: parse a single or doubly-quoted string into a single string // paramtr: input (IN): The raw string without outer quotes // output (OUT): The cooked string, one level removed // state (IN): start start: 0 -> squotes, 2 -> dquotes // returns: the final state after being done { std::string buffer; std::string varname; const char* x = 0; const char* s = input.c_str(); while ( state < FINAL ) { Mealy s_a( statemap[state][xlate(*s)] ); // (So,A) := F(Si,I) switch ( s_a.a ) { case Noop: // do nothing break; case Sb: // expand regular buffer buffer += *s; break; case Xb: // expand regular buffer, translate \n and friends x = strchr( translation, *s ); buffer += ( x == 0 ? *s : translationmap[x-translation] ); break; case Fb: // finalize buffer output = buffer; buffer.clear(); break; case Sv: // store varname varname += *s; break; case Fvpb: // put back and do Fv --s; // FALL THROUGH case Fv: // resolve varname if ( (x = getenv( varname.c_str() )) == 0 ) { // variable not found: keep original string buffer += '$'; if ( state == DQ_VAR2 ) buffer += '{'; buffer.append( varname ); if ( state == DQ_VAR2 ) buffer += '}'; } else { // resolve variable buffer.append( x ); } varname.clear(); break; case SDpb: // put back and store dollar --s; // FALL THROUGH case SD: // put back a dollar sign buffer += "$"; break; case A_MAX: // illegal action break; } // advance to next state s++; state = s_a.s; // new state } return state; } State parse( const std::string& input, StringList& output, State state ) // purpose: parse a single or doubly-quoted string into an argv[] list // paramtr: input (IN): The raw string without outer quotes // output (OUT): The cooked ws-split argv, one level removed // state (IN): start start: 0 -> squotes, 2 -> dquotes // returns: the final state after being done { std::string buffer; std::string varname; // update local copy of Mealy map for argv parsing MealyMap m; memcpy( m, statemap, sizeof(MealyMap) ); m[SQ_MAIN][EOS] = Mealy( ERR2, Noop ); m[SQ_MAIN][APOS] = Mealy( NQ_MAIN, Noop ); m[DQ_MAIN][EOS] = Mealy( ERR3, Noop ); m[DQ_MAIN][QUOTE] = Mealy( NQ_MAIN, Noop ); m[DQ_DLLR][QUOTE] = Mealy( NQ_MAIN, Noop ); m[DQ_VAR1][QUOTE] = Mealy( NQ_MAIN, Fv ); const char* x = 0; const char* s = input.c_str(); while ( state < FINAL ) { Mealy s_a( m[state][xlate(*s)] ); // (So,A) := F(Si,I) switch ( s_a.a ) { case Noop: // do nothing break; case Sb: // expand regular buffer buffer += *s; break; case Xb: // expand regular buffer, translate \n and friends x = strchr( translation, *s ); buffer += ( x == 0 ? *s : translationmap[x-translation] ); break; case Fb: // finalize buffer output.push_back(buffer); buffer.clear(); break; case Sv: // store varname varname += *s; break; case Fvpb: // put back and do Fv --s; // FALL THROUGH case Fv: // resolve varname if ( (x = getenv( varname.c_str() )) == 0 ) { // variable not found: keep original string buffer += '$'; if ( state == DQ_VAR2 ) buffer += '{'; buffer.append( varname ); if ( state == DQ_VAR2 ) buffer += '}'; } else { // resolve variable buffer.append( x ); } varname.clear(); break; case SDpb: // put back and store dollar --s; // FALL THROUGH case SD: // put back a dollar sign buffer += "$"; break; case A_MAX: // illegal action break; } // advance to next state s++; state = s_a.s; // new state } return state; } // done with namespace Quote } // namespace Quote pegasus-wms_4.0.1+dfsg/src/tools/k.2/make.SunOS0000755000175000017500000000033011757531137020222 0ustar ryngerynge#!/bin/sh gmake distclean rm -rf SunWS_cache gmake EXTRACFLAGS='$(V7FLAGS)' k.2 mv k.2 k.2.sparcv7 gmake clean rm -rf SunWS_cache gmake EXTRACFLAGS='$(V9FLAGS)' k.2 mv k.2 k.2.sparcv9 gmake clean rm -rf SunWS_cache pegasus-wms_4.0.1+dfsg/src/tools/k.2/getif.cc0000644000175000017500000001661011757531137017766 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "getif.hh" #include #include #include #include #include #include #include #include #include #include #include #ifdef HAS_SYS_SOCKIO #include #endif static const char* RCS_ID = "$Id: getif.cc 50 2007-05-19 00:48:32Z gmehta $"; const unsigned long PrimaryInterface::vpn_netmask[4] = { inet_addr("255.0.0.0"), /* loopbackmask */ inet_addr("255.0.0.0"), /* class A mask */ inet_addr("255.240.0.0"), /* class B VPN mask */ inet_addr("255.255.0.0") /* class C VPN mask */ }; const unsigned long PrimaryInterface::vpn_network[4] = { inet_addr("127.0.0.0"), /* loopbacknet */ inet_addr("10.0.0.0"), /* class A VPN net */ inet_addr("172.16.0.0"), /* class B VPN nets */ inet_addr("192.168.0.0") /* class C VPN nets */ }; bool PrimaryInterface::isVPN( const unsigned long host ) // purpose: Determines if an IPv4 address is from a VPN // paramtr: host (IN): network byte ordered IPv4 host address // returns: true, if the host is in a VPN address space { return ( (host & vpn_netmask[1]) == vpn_network[1] || (host & vpn_netmask[2]) == vpn_network[2] || (host & vpn_netmask[3]) == vpn_network[3] ); } static int debug( char* fmt, ... ) { int result; va_list ap; char buffer[4096]; int saverr = errno; va_start( ap, fmt ); vsnprintf( buffer, sizeof(buffer), fmt, ap ); va_end( ap ); result = write( STDERR_FILENO, buffer, strlen(buffer) ); errno = saverr; return result; } struct ifreq* PrimaryInterface::primary_interface( ) // purpose: obtain the primary interface information // returns: a structure containing the if info, or NULL for error { #if defined(SIOCGLIFNUM) struct lifnum ifnr; #endif struct sockaddr_in sa; struct ifconf ifc; struct ifreq result, primary; struct ifreq* ifrcopy = 0; char *ptr, *buf = 0; int lastlen, len, sockfd, flag = 0; /* * phase 0: init */ memset( &result, 0, sizeof(result) ); memset( &primary, 0, sizeof(primary) ); /* create a socket */ if ( (sockfd = socket( AF_INET, SOCK_DGRAM, 0 )) == -1 ) { debug( "ERROR: socket DGRAM: %d: %s\n", errno, strerror(errno) ); return ifrcopy; } /* * phase 1: guestimate size of buffer necessary to contain all interface * information records. */ #if defined(SIOCGLIFNUM) /* API exists to determine the correct buffer size */ memset( &ifnr, 0, sizeof(ifnr) ); ifnr.lifn_family = AF_INET; if ( ioctl( sockfd, SIOCGLIFNUM, &ifnr ) < 0 ) { debug( "ERROR: ioctl SIOCGLIFNUM: %d: %s\n", errno, strerror(errno) ); if ( errno != EINVAL ) { close(sockfd); return ifrcopy; } } else { len = lastlen = ifnr.lifn_count * sizeof(struct ifreq); } #else /* does not have SIOCGLIFNUM */ /* determine by repetitive guessing a buffer size */ lastlen = len = 4 * sizeof(struct ifreq); /* 1st guesstimate */ #endif /* POST CONDITION: some buffer size determined */ /* FIXME: Missing upper bound */ for (;;) { /* guestimate correct buffer length */ buf = (char*) malloc(len); memset( buf, 0, len ); ifc.ifc_len = len; ifc.ifc_buf = buf; if ( ioctl( sockfd, SIOCGIFCONF, &ifc ) < 0 ) { debug( "WARN: ioctl SIOCGIFCONF: %d: %s\n", errno, strerror(errno) ); if ( errno != EINVAL || lastlen != 0 ) { close(sockfd); return ifrcopy; } } else { if ( ifc.ifc_len == lastlen ) break; /* success */ lastlen = ifc.ifc_len; } len <<= 1; free((void*) buf); } /* POST CONDITION: Now the buffer contains list of all interfaces */ /* * phase 2: walk interface list until a good interface is reached */ /* Notice: recycle meaning of "len" in here */ for ( ptr = buf; ptr < buf + ifc.ifc_len; ) { struct ifreq* ifr = (struct ifreq*) ptr; len = sizeof(*ifr); ptr += len; /* interested in IPv4 interfaces only */ if ( ifr->ifr_addr.sa_family != AF_INET ) continue; memcpy( &sa, &(ifr->ifr_addr), sizeof(struct sockaddr_in) ); /* Do not use localhost aka loopback interfaces. While loopback * interfaces traditionally start with "lo", this is not mandatory. * It is safer to check that the address is in the 127.0.0.0 class A * network. */ if ( (sa.sin_addr.s_addr & vpn_netmask[0]) == vpn_network[0] ) continue; /* prime candidate - check, if interface is UP */ result = *ifr; ioctl( sockfd, SIOCGIFFLAGS, &result ); /* interface is up - our work is done. Or is it? */ if ( (result.ifr_flags & IFF_UP) ) { if ( ! flag ) { /* remember first found primary interface */ primary = result; flag = 1; } /* check for VPNs */ if ( ! isVPN(sa.sin_addr.s_addr) ) { flag = 2; break; } } } /* check for loop exceeded - if yes, fall back on first primary */ if ( flag == 1 && ptr >= buf + ifc.ifc_len ) result = primary; /* clean up */ free((void*) buf); close(sockfd); /* create a freshly allocated copy */ ifrcopy = (struct ifreq*) malloc( sizeof(struct ifreq) ); memcpy( ifrcopy, &result, sizeof(struct ifreq) ); return ifrcopy; } PrimaryInterface::PrimaryInterface() // purpose: protected singleton ctor :m_interface( primary_interface() ) { // empty } PrimaryInterface::~PrimaryInterface() // purpose: protected singleton destructor { if ( m_interface ) free((void*) m_interface); } PrimaryInterface* PrimaryInterface::m_instance = 0; const PrimaryInterface& PrimaryInterface::instance() // purpose: Obtains access to the singleton // returns: The single instance of the Singleton. { if ( m_instance == 0 ) m_instance = new PrimaryInterface(); return *m_instance; } std::string PrimaryInterface::whoami() const // purpose: Obtains the primary interface's IPv4 as dotted quad // returns: The IPv4 as dotted quad - or an empty string on failure { std::string result; if ( m_interface ) { struct sockaddr_in sa; // type-ignoring copy memcpy( &sa, &(m_interface->ifr_addr), sizeof(struct sockaddr) ); result = inet_ntoa(sa.sin_addr); } else { #if 0 result = "xsi:null"; #else result = "0.0.0.0"; #endif } return result; } const char* PrimaryInterface::whoami( char* area, size_t size ) const // purpose: Obtains the primary interface's IPv4 as dotted quad // paramtr: area (OUT): The IPv4 as dotted quad - sizeof >= 16! // returns: area { memset( area, 0, size ); if ( m_interface ) { struct sockaddr_in sa; // type-ignoring copy memcpy( &sa, &(m_interface->ifr_addr), sizeof(struct sockaddr) ); // add to result return strncpy( area, inet_ntoa(sa.sin_addr), size ); } else { #if 0 /* future lab */ return strncpy( area, "xsi:null", size ); #else /* for now */ return strncpy( area, "0.0.0.0", size ); #endif } } pegasus-wms_4.0.1+dfsg/src/tools/k.2/try/0000755000175000017500000000000011757531667017203 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/tools/k.2/try/reflect.c0000644000175000017500000000051711757531137020766 0ustar ryngerynge#include int main( int argc, char* argv[] ) { int i; if ( argc > 1 ) fputs( "r: ", stdout ); for ( i=1; i1 ) fputc( ' ', stdout ); fputs( argv[i], stdout ); } if ( argc > 1 ) fputs( "\n", stdout ); for ( i=1; i/dev/null) PERL = perl LOADLIBES = -lm ifndef ${prefix} prefix = $(PEGASUS_HOME) endif NROFF = groff -mandoc TEXT = -Tlatin1 HTML = -Thtml OPENSSL_BASEDIR = /usr/share/ssl OPENSSL_INCLUDE = -I$(OPENSSL_BASEDIR)/include OPENSSL_LIBRARY = -L$(OPENSSL_BASEDIR)/lib -lcrypto #ZLIB_BASEDIR = /usr #ZLIB_INCLUDE = -I$(ZLIB_BASDIR)/include #ZLIB_LIBRARY = -L$(ZLIB_BASEDIR)/lib -lz ifeq (SUNOS,${SYSTEM}) ifeq (5,${MAJOR}) # use these for the SUN CC compiler CC = cc CXX = CC LD = $(CXX) ## SPARCv7 V7FLAGS = -xtarget=generic V9FLAGS = -xtarget=ultra -xarch=v9 CFLAGS = -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACFLAGS) CFLAGS := -DSOLARIS $(CFLAGS) -xO2 -D__EXTENSIONS__=1 #CFXXLAGS += -dalign -ftrap=%none -fsimple -xlibmil $(EXTRACFLAGS) CXXFLAGS += -dalign $(EXTRACFLAGS) -xO2 -D__EXTENSIONS__=1 LOADLIBES += -lnsl -lsocket INSTALL = /usr/ucb/install OPENSSL_BASEDIR = /usr/local/ssl OPENSSL_INCLUDE = -I$(OPENSSL_BASEDIR)/include OPENSSL_LIBRARY = -Bstatic -L$(OPENSSL_BASEDIR)/lib -lcrypto -Bdynamic else # old Solaris 1 not supported! endif endif ifeq (LINUX,${SYSTEM}) ifeq (ia64,${MARCH}) # TeraGrid OPENSSL_BASEDIR = /usr OPENSSL_INCLUDE = -I$(OPENSSL_BASEDIR)/include OPENSSL_LIBRARY = -Wl,-Bstatic -L$(OPENSSL_BASEDIR)/lib -lcrypto -Wl,-Bdynamic CXXFLAGS += -Wall -O2 -DHAS_RUSAGE_WHO -DHAS_SETENV -DHAS_UNSETENV -frtti CXXFLAGS += -fexceptions -ggdb LOADLIBES := -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic $(LOADLIBES) LD = $(CC) else ifeq (x86_64,${MARCH}) OPENSSL_BASEDIR = /usr OPENSSL_INCLUDE = -I$(OPENSSL_BASEDIR)/include OPENSSL_LIBRARY = -Wl,-Bstatic -L$(OPENSSL_BASEDIR)/lib64 -lcrypto -Wl,-Bdynamic CXXFLAGS += -Wall -O2 -m64 -DHAS_RUSAGE_WHO -DHAS_SETENV CXXFLAGS += -DHAS_UNSETENV -frtti -fexceptions -ggdb LOADLIBES := -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic $(LOADLIBES) LD = $(CC) -m64 else OPENSSL_LIBRARY = -Wl,-Bstatic -L$(OPENSSL_BASEDIR)/lib -lcrypto -Wl,-Bdynamic CXXFLAGS += -Wall -O2 -march=$(MARCH) -DHAS_RUSAGE_WHO -DHAS_SETENV CXXFLAGS += -DHAS_UNSETENV -frtti -fexceptions -ggdb LOADLIBES := -Wl,-Bstatic -lstdc++ -Wl,-Bdynamic $(LOADLIBES) LD = gcc #EXTRA_LDFLAGS += -static endif endif endif ifeq (IRIX64,${SYSTEM}) # The regular 64bit Irix stuff is just too slow, use n32! SYSTEM := IRIX endif ifeq (IRIX,${SYSTEM}) CC = cc -n32 -mips3 -r4000 LD = $(CXX) OPT_NORM = -O3 -IPA -LNO:opt=1 endif ifeq (AIX,${SYSTEM}) #CC = xlc #CXX = xlC CXXFLAGS += -DHAS_SETENV OPENSSL_BASEDIR = /usr/share/apps/openssl OPENSSL_INCLUDE = -I$(OPENSSL_BASEDIR)/include OPENSSL_LIBRARY = -Bstatic -L$(OPENSSL_BASEDIR)/lib -lcrypto -Bdynamic ZLIB_BASEDIR = /usr/share/apps/zlib ZLIB_INCLUDE = -I$(ZLIB_BASDIR)/include ZLIB_LIBRARY = -L$(ZLIB_BASEDIR)/lib -lz endif ifeq (DARWIN,${SYSTEM}) CXXFLAGS += -DHAS_UNSETENV -DHAS_SETENV OPENSSL_BASEDIR = /usr OPENSSL_INCLUDE = -I$(OPENSSL_BASEDIR)/include OPENSSL_LIBRARY = -lcrypto -Bdynamic ZLIB_BASEDIR = /usr ZLIB_INCLUDE = -I$(ZLIB_BASDIR)/include ZLIB_LIBRARY = -lz endif # # === [3] ======================================================= rules section # There is no need to change things below this line. CXXFLAGS += -I. -D${SYSTEM} -DMAJOR=${MAJOR} # -DSOCKLEN=${SOCKLEN} ifneq (,${SOCKIO}) CXXFLAGS += -DHAS_SYS_SOCKIO=1 endif LDFLAGS += $(EXTRA_LDFLAGS) #CXXFLAGS += $(ZLIB_INCLUDE) #LOADLIBES += $(ZLIB_LIBRARY) ifeq (LINUX,${SYSTEM}) CXXFLAGS += $(OPENSSL_INCLUDE) #LOADLIBES := -Wl,-Bstatic $(OPENSSL_LIBRARY) -Wl,-Bdynamic $(LOADLIBES) LOADLIBES := $(OPENSSL_LIBRARY) $(LOADLIBES) else CXXFLAGS += $(OPENSSL_INCLUDE) LOADLIBES += $(OPENSSL_LIBRARY) endif GENDOC = k.2.html k.2.txt k.2.ps CXXOBJS = quote.o scan.o scan.tab.o appinfo.o jobinfo.o stagejob.o \ mysignal.o getif.o statinfo.o useinfo.o uname.o xml.o time.o \ null.o #CXXFLAGS += -DUSE_PARSE=1 -DUSE_SIGCHILD=1 # -ggdb #CXXFLAGS += -ggdb %.html : %.1 ; $(NROFF) $(HTML) $< > $@ %.ps : %.1 ; $(NROFF) $< > $@ %.txt : %.1 ; $(NROFF) $(TEXT) $< > $@ %.o : %.c $(CC) $(CFLAGS) -c $< -o $@ %.o : %.cc $(CXX) $(CXXFLAGS) -c $< -o $@ % : %.o $(CXX) $(CXXFLAGS) $(LDFLAGS) $^ -o $@ $(LOADLIBES) all: k.2 .PHONY: miniclean clean realclean ifeq (g++,${CXX}) depend.mak: *.cc $(RM) depend.mak ; touch depend.mak $(CXX) $(OPENSSL_INCLUDE) -MM -E *.cc >> depend.mak endif k.2: k.2.o $(CXXOBJS) $(LD) $(CXXFLAGS) $(LDFLAGS) $^ -o $@ $(LOADLIBES) justparse: justparse.o scan.o # scan.tab.o $(CXX) $(CXXFLAGS) $(LDFLAGS) $^ -o $@ $(LOADLIBES) scan.c: scan.l scan.tab.h $(FLEX) -t $< > $@ scan.o: scan.c scan.tab.h scan.y $(CXX) $(CXXFLAGS) $(LEXCFLG) -c $< -o $@ scan.tab.o: scan.tab.c scan.tab.h $(CXX) $(CXXFLAGS) $(YACCCFLG) -c $< -o $@ scan.tab.c scan.tab.h: scan.y $(BISON) -d -o scan.tab.c $< $(PERL) -i fixbison.pl scan.tab.c install: k.2 $(INSTALL) -m 0755 k.2 $(prefix)/bin install.doc: $(GENDOC) $(INSTALL) -m 0644 $(GENDOC) $(prefix)/man install.man: k.2.1 $(INSTALL) -m 0644 k.2.1 $(prefix)/man/man1 install.all: install install.man install.doc gendoc: $(GENDOC) $(GENDOC): k.2.1 miniclean: $(RM) scan.tab.[cho] scan.[co] clean: miniclean $(RM) *.o core $(GENDOC) distclean: clean $(RM) k.2 justparse -include depend.mak pegasus-wms_4.0.1+dfsg/src/tools/k.2/fixbison.pl0000755000175000017500000000053011757531137020534 0ustar ryngerynge#!/usr/bin/env perl # # fix bug in bison 1.875 generated output # require 5.005; use strict; my $state; while ( <> ) { if ( ! $state && m{Suppress GCC warning that yyerrlab1 is unused when no action} ) { $state=1; $_ = "#if 0\n$_"; } elsif ( $state == 1 && m'#endif' ) { undef $state; $_ .= "#endif\n"; } print; } pegasus-wms_4.0.1+dfsg/src/tools/k.2/useinfo.hh0000644000175000017500000000656411757531137020361 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _USEINFO_HH #define _USEINFO_HH #include #include #include #include #include "xml.hh" #ifndef HAS_RUSAGE_WHO #define __rusage_who int #endif #include "null.hh" class UseInfo : public XML { // This class encapsulated a rusage record. The default copy ctor and // assignment operator are safe to employ. public: UseInfo( const char* tag, __rusage_who who ); // purpose: construction from getrusage call // paramtr: tag (IN): name of the element // who (IN): RUSAGE_SELF or RUSAGE_CHILDREN UseInfo( const char* tag, const struct rusage* use ); // purpose: ctor // paramtr: tag (IN): name of the element // use (IN): Usage record to initialize to virtual std::string toXML( int indent = 0, const char* nspace = 0 ) const; // purpose: XML format a rusage record. // paramtr: indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: string containing the element data virtual std::ostream& toXML( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: XML format a rusage info record onto a given stream // paramtr: s (IO): stream to put information into // indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: s UseInfo& operator+=( const UseInfo& summand ); // purpose: add another resource usage to the current record // paramtr: summand (IN): usage record to add // returns: current object, modified to be sum += summand UseInfo& operator+=( const struct rusage* summand ); // purpose: add another resource usage to the current record // paramtr: summand (IN): usage record to add // returns: current object, modified to be sum += summand virtual ~UseInfo() { }; // purpose: dtor private: std::string m_tag; struct rusage m_use; // render inaccessible UseInfo(); }; struct timeval& operator+=( struct timeval& sum, const struct timeval& summand ); // purpose: add one timeval structure to another w/ normalization // paramtr: sum (IO): first operand and result // summand (IN): second operand // returns: sum struct timeval operator+( const struct timeval& a, const struct timeval& b ); // purpose: adds two timeval structures // paramtr: a (IN): first operand (summand) // b (IN): second operand (summand) // returns: normalized sum = a + b UseInfo operator+( const UseInfo& a, const UseInfo& b ); // purpose: Add two useinfo records // paramtr: a (IN): first operand (summand) // b (IN): second operand (summand) // returns: sum = a + b #endif // _USEINFO_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/scan.y0000755000175000017500000006432311757531137017506 0ustar ryngerynge%{ /* This may look like -*- C++ -*- code, but it is really bison * * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. * * $Id: scan.y 50 2007-05-19 00:48:32Z gmehta $ * * Author: Jens-S. Vckler * File: scan.y * 2001-01-15 * * $Log: scan.y,v $ * Revision 1.11 2004/07/22 21:17:21 griphyn * Make the setenv() function available to statinfo.o. * * Revision 1.10 2004/06/15 20:42:36 griphyn * Added support for AIX. * * Revision 1.9 2004/06/08 23:51:06 griphyn * Fixed bug in stdin handling for statinfo StatFile, where the dup2 * command closed the FD even if it was identical to the one we dup2'ed * to. This led to the wrong behavior (bad filehandle) later. * * Revision 1.8 2004/06/07 22:17:19 griphyn * Added "setup" feature to maintain symmetrie to "cleanup" feature. * Added "here" option for stdin configuration, which permits a string * to be dumped into temporary file from the configuration, to be used * as stdin for jobs. * * Revision 1.7 2004/02/23 20:21:53 griphyn * Added new GTPL license schema -- much shorter :-) * * Revision 1.6 2004/02/16 23:06:58 griphyn * Updated TR argument from string to list of strings. This will enable the TR * to capture the compound TR hierarchy. * * Revision 1.5 2004/02/11 22:36:28 griphyn * new parser. * * Revision 1.3 2004/02/04 22:16:44 griphyn * Made TFN argument a string list. * * Revision 1.2 2004/02/04 21:29:53 griphyn * Made the mapping from SFN to TFN a multimap. * Introduced a format string for the file format of the staging file. * * Revision 1.1 2004/02/03 23:13:17 griphyn * Kickstart version 2. * */ #include #include #include #include #include #include #include #include #include #include "shared.hh" #include "quote.hh" #include "xml.hh" #include "useinfo.hh" #include "statinfo.hh" #include "jobinfo.hh" #include "appinfo.hh" extern AppInfo* app; // make globally visible for bison unsigned long lineno = 1; // make available for flex extern void yyerror( const char* ); extern void warning( const char*, const char* ); extern int yylex(); extern int yy_push_file( const char* ); static int debug( const char* fmt, ... ); #ifndef HAS_SETENV // Please do export for statinfo.o int setenv(const char *name, const char *value, int overwrite); #endif // HAS_SETENV #ifndef HAS_UNSETENV static int unsetenv(const char *name); #endif // HAS_UNSETENV class setup_exception { // exception throw by invalid jobs in setupJob or setupStage public: inline setup_exception( const std::string& a ) :m_msg(a) { } inline setup_exception( const std::string& a, const std::string& b ) :m_msg(a + " " + b) { } inline ~setup_exception() { } inline std::string getMessage() const { return m_msg; } private: std::string m_msg; }; typedef void (AppInfo::* AIJobMember)( JobInfo* ); static void setupJob( AIJobMember add, const char* jobtype, StringList* args ); // purpose: Add a pre, post, cleanup or main job // paramtr: add (IN): pointer to AppInfo::set(Pre|Post|Main|Clean)Job member // jobtype (IN): symbolic name for the job class // args (IO): commandline argument string // except.: setup_exception for invalid jobs // warning: cleans up args after use typedef void (AppInfo::* AIStageMember)( StageJob* ); static void setupStage( AIStageMember add, const char* jobtype, std::string* fmt, StringList* args ); // purpose: Add a stage-in or stage-out job // paramtr: add (IN): pointer to AppInfo::setStage(In|Out) member // jobtype (IN): symbolic name for the job class // fmt (IO): format for the temporary file // args (IO): commandline argument string // except.: setup_exception for invalid jobs // warning: cleans up fmt and args after use typedef void (AppInfo::* AIStdioMember)( StatInfo* ); static void setupStdin( AIStdioMember add, int fd, const char* name, StringSet* options, std::string* fn ); // purpose: set stdin handle for sub-processes // paramtr: add (IN): pointer to AppInfo::setStdin member // fd (IN): STDIN_FILENO // name (IN): stdin // options (IO): option string list for stdin // fn (IO): file description to connect stdio to // except.: null_pointer exception for certain file operations // warning: cleans up options and fn after use static void setupStdio( AIStdioMember add, int fd, const char* name, StringSet* options, std::string* fn ); // purpose: set stdio handle for sub-processes // paramtr: add (IN): pointer to AppInfo::setStd(in|out|err) member // fd (IN): value of STD(IN|OUT|ERR)_FILENO // name (IN): one of stdin, stdout, or stderr // options (IO): value of STD(IN|OUT|ERR)_FILENO // fn (IO): file description to connect stdio to // except.: null_pointer exception for certain file operations // warning: cleans up options and fn after use typedef void (AppInfo::* AIIOMember)( const std::string&, const std::string&, bool ); typedef void (AppInfo::* AIMIOMember)( const std::string&, const std::string& ); static void setupFile( AIIOMember add1, AIMIOMember add2, StringSet* options, std::string* lfn, std::string* sfn, StringList* tfn ); // purpose: register input and output files // paramtr: add1 (IN): pointer to AppInfo::add(In|Out)putSFN member // add2 (IN): pointer to AppInfo::add(In|Out)putTFN member // options (IO): value of STD(IN|OUT|ERR)_FILENO // lfn (IO): logical filename pointer // sfn (IO): storage filename pointer // tfn (IO): transfer filename list pointer // warning: cleans up after use %} /* reentrant parser */ /* %pure-parser */ /* token value union */ %union { char* string; std::string* cxxstr; StringSet* set; StringList* list; } /* type of terminal symbols */ %token TK_IDENT TK_QSTR1 TK_QSTR2 /* typeless terminal symbols: just token class, no token value */ %token TK_EOC %token TK_INCLUDE TK_SET TK_XMLNS %token TK_SETUP TK_PRE TK_MAIN TK_POST TK_CLEANUP %token TK_TR1 TK_TR2 TK_DV1 TK_DV2 %token TK_INPUT TK_OUTPUT %token TK_STAGEIN TK_STAGEOUT %token TK_CHDIR TK_SITE TK_FEEDBACK %token TK_STDIN TK_STDOUT TK_STDERR /* type of non-terminals */ %type identifier reserved_word %type string %type options %type stringlist argvstr /* %type command */ %% configuration : /* empty rule */ { // say hi char t[64]; time_t now = time(0); strftime( t, sizeof(t), "%Y-%m-%dT%H:%M:%S kickstart is running\n", localtime(&now) ); debug( t ); } | configuration error TK_EOC { // resynchronize after error yyerrok; fprintf( stderr, "reset to line %lu after error.\n", lineno ); } | configuration command ; string : TK_QSTR1 { // dquote std::string result; if ( Quote::parse( $1, result, Quote::DQ_MAIN ) != Quote::FINAL ) { // parse error in string yyerror( "string does not parse" ); YYERROR; } else { $$ = new std::string(result); } if ( $1 ) free( static_cast($1) ); } | TK_QSTR2 { // squote std::string result; if ( Quote::parse( $1, result, Quote::SQ_MAIN ) != Quote::FINAL ) { yyerror( "string does not parse" ); YYERROR; } else { $$ = new std::string(result); } if ( $1 ) free( static_cast($1) ); } ; argvstr : TK_QSTR1 { // dquote StringList result; if ( Quote::parse( $1, result, Quote::NQ_LWS ) != Quote::FINAL ) { yyerror( "string does not parse" ); YYERROR; } else { $$ = new StringList(result); } if ( $1 ) free( static_cast($1) ); } | TK_QSTR2 { // squote StringList result; if ( Quote::parse( $1, result, Quote::NQ_LWS ) != Quote::FINAL ) { yyerror( "string does not parse" ); YYERROR; } else { $$ = new StringList(result); } if ( $1 ) free( static_cast($1) ); } ; stringlist : /* empty list */ { // start -- create new, empty list $$ = new StringList(); } | stringlist string { // cont'd -- add element to list if ( $2 ) { $1->push_back( *$2 ); delete $2; } $$ = $1; } ; options : /* empty list */ { // start -- create new, empty list $$ = new StringSet(); } | options TK_IDENT /* identifier */ { if ( $2 && *$2 ) { // downcase for ( const char* s=$2; *s; s++ ) *( const_cast(s) ) = tolower(*s); // insert $1->insert( std::string($2) ); free((void*) $2); } $$ = $1; } ; reserved_word: TK_INCLUDE { $$=strdup("include"); } | TK_SET { $$=strdup("set"); } | TK_SETUP { $$=strdup("setup"); } | TK_PRE { $$=strdup("pre"); } | TK_MAIN { $$=strdup("main"); } | TK_POST { $$=strdup("post"); } | TK_CLEANUP { $$=strdup("cleanup"); } | TK_TR1 { $$=strdup("transformation"); } | TK_DV1 { $$=strdup("derivation"); } | TK_TR2 { $$=strdup("tr"); } | TK_DV2 { $$=strdup("dv"); } | TK_CHDIR { $$=strdup("chdir"); } | TK_SITE { $$=strdup("site"); } | TK_STDIN { $$=strdup("stdin"); } | TK_STDOUT { $$=strdup("stdout"); } | TK_STDERR { $$=strdup("stderr"); } | TK_INPUT { $$=strdup("input"); } | TK_OUTPUT { $$=strdup("output"); } | TK_FEEDBACK { $$=strdup("feedback"); } | TK_STAGEIN { $$=strdup("stagein"); } | TK_STAGEOUT { $$=strdup("stageout"); } | TK_XMLNS { $$=strdup("xmlns"); } ; identifier: TK_IDENT | reserved_word ; command: TK_EOC /* empty rule */ /* * 4.1 Other commands section */ | TK_INCLUDE string { if ( $2 != 0 ) { int fd = open( $2->c_str(), O_RDONLY ); if ( fd != -1 ) { close(fd); debug( "including file '%s'\n", $2->c_str() ); if ( yy_push_file($2->c_str()) == 0 ) yyparse(); } else { // file won't be accessible debug( "open '%s': %s\n", $2->c_str(), strerror(errno) ); } delete $2; } else { debug( "illegal filename for include, ignoring\n" ); } } | TK_XMLNS identifier { if ( $2 ) { app->setXMLNamespace($2); debug( "setting XML namespace to %s", $2 ); free((void*) $2); } } /* FIXME: add debug HERE */ /* * 4.2 Descriptions section */ /* if we need to access RLS, set the site attribute */ | TK_SITE string { if ( $2 && $2->size() ) { app->setPoolHandle( *$2 ); debug( "set site handle to '%s'\n", $2->c_str() ); } else { debug( "site handle identifier string is empty, ignoring\n" ); } if ( $2 ) delete $2; } | TK_TR1 stringlist { if ( $2 && $2->size() ) { for ( StringList::const_iterator i = $2->begin(); i != $2->end(); ++i ) { app->addTransformation( (*i) ); debug( "adding transformation name '%s'\n", i->c_str() ); } } else { debug( "transformation name is empty, ignoring\n" ); } if ( $2 ) delete $2; } | TK_TR2 stringlist { if ( $2 && $2->size() ) { for ( StringList::const_iterator i = $2->begin(); i != $2->end(); ++i ) { app->addTransformation( (*i) ); debug( "adding transformation name '%s'\n", i->c_str() ); } } else { debug( "transformation name is empty, ignoring\n" ); } if ( $2 ) delete $2; } | TK_DV1 string { if ( $2 && $2->size() ) { app->setDerivation( *$2 ); debug( "set derivation name to '%s'\n", $2->c_str() ); } else { debug( "derivation name is empty, ignoring\n" ); } if ( $2 ) delete $2; } | TK_DV2 string { if ( $2 && $2->size() ) { app->setDerivation( *$2 ); debug( "set derivation name to '%s'\n", $2->c_str() ); } else { debug( "derivation name is empty, ignoring\n" ); } if ( $2 ) delete $2; } /* describe files so we can do intelligent things with them */ | TK_INPUT options string string stringlist { try { setupFile( &AppInfo::addInputSFN, &AppInfo::addInputTFN, $2, $3, $4, $5 ); } catch (...) { yyerror( "an exception while adding an input" ); YYERROR; } } | TK_OUTPUT options string string stringlist { try { setupFile( &AppInfo::addOutputSFN, &AppInfo::addOutputTFN, $2, $3, $4, $5 ); } catch (...) { yyerror( "an exception while adding an output" ); YYERROR; } } /* * 4.3 Processing environment section */ /* set an environment variable */ | TK_SET identifier string { if ( $2 && *$2 && $3 ) { // *$3 can be empty to unset // set the environment variable as appropriate if ( $3->size() == 0 ) { // remove variable from environment unsetenv( $2 ); debug( "removed variable '%s'\n", $2 ); } else { // add the variable to the environment if ( setenv( $2, $3->c_str(), 1 ) != 0 ) { // unable to modify environment yyerror( "unable to set environment variable" ); YYERROR; } else { // new environment debug( "setenv '%s' '%s'\n", $2, $3->c_str() ); } } } else { debug( "illegal identifier '%s'\n", ($2 ? $2 : "null") ); } // cleanup if ( $2 != 0 ) free((void*) $2); if ( $3 ) delete $3; } /* runtime environment manipulations */ | TK_CHDIR options string { if ( $3 && $3->size() ) { // check the option string if ( $2->find( std::string("create") ) != $2->end() ) { // create the directory if it does not exist // implemented via mkdir, which may fail -> silently if ( mkdir( $3->c_str(), 0777 ) == -1 ) { // mkdir failed, ignore for existing dir if ( errno == EEXIST ) { debug( "directory '%s' already exists\n", $3->c_str() ); } else { debug( "mkdir '%s': %s\n", $3->c_str(), strerror(errno) ); // make it a hard error yyerror( "unable to create directory" ); YYERROR; } } else { debug( "created directory '%s'\n", $3->c_str() ); } } // post-condition: directory may still *not* exist // attempt to change into $3 if ( chdir($3->c_str()) == -1 ) { // error while chdir debug( "chdir '%s': %s\n", $3->c_str(), strerror(errno) ); } else { // change ok, update app app->setWorkdir(*$3); debug( "changed into directory '%s'\n", $3->c_str() ); } // done delete $3; } else { debug( "empty working directory string, ignoring\n" ); } delete $2; } /* Feedback channel for grid-aware applications */ | TK_FEEDBACK identifier string { if ( $2 == 0 || *$2 == 0 ) { debug( "empty identifier for feedback, ignoring\n" ); } else if ( $3 == 0 || $3->size() == 0 ) { debug( "empty file appointment for feedback, ignoring\n" ); } else { // ok, $2 and $3 are existing and valid std::string fn; // do we need to prepend a tempdir? if ( $3->at(0) != '/' ) { // prepend tempdir const char* temp = AppInfo::getTempDir(); if ( temp == 0 ) temp = "/tmp"; // last resort fn += temp; fn += "/"; } fn += *$3; StatFifo* fifo = new StatFifo( fn, $2 ); if ( fifo ) app->setChannel(fifo); else { yyerror( "error while allocating feedback channel" ); YYERROR; } } if ( $2 != 0 ) free((void*) $2 ); if ( $3 ) delete $3; } | TK_FEEDBACK string { if ( $2 == 0 || $2->size() == 0 ) { debug( "empty file appointment for feedback, ignoring\n" ); } else { // ok, $2 is existing and valid std::string fn; // do we need to prepend a tempdir? if ( $2->at(0) != '/' ) { // prepend tempdir const char* temp = AppInfo::getTempDir(); if ( temp == 0 ) temp = "/tmp"; // last resort fn += temp; fn += "/"; } fn += *$2; StatFifo* fifo = new StatFifo( fn, "GRIDSTART_CHANNEL" ); if ( fifo ) app->setChannel(fifo); else { yyerror( "error while allocating feedback channel" ); YYERROR; } } if ( $2 ) delete $2; } /* connect stdio filehandle of subprocesses with something */ | TK_STDIN options string { try { setupStdin( &AppInfo::setStdin, STDIN_FILENO, "stdin", $2, $3 ); } catch ( null_pointer np ) { yyerror( "unable to renew stdin" ); YYERROR; } } | TK_STDOUT options string { try { setupStdio( &AppInfo::setStdout, STDOUT_FILENO, "stdout", $2, $3 ); } catch ( null_pointer np ) { yyerror( "unable to renew stdout" ); YYERROR; } } | TK_STDERR options string { try { setupStdio( &AppInfo::setStderr, STDERR_FILENO, "stderr", $2, $3 ); } catch ( null_pointer np ) { yyerror( "unable to renew stderr" ); YYERROR; } } /* * 4.4 Job commands section */ | TK_SETUP argvstr { try { setupJob( &AppInfo::addSetupJob, "setup", $2 ); } catch ( setup_exception je ) { yyerror( je.getMessage().c_str() ); YYERROR; } } | TK_PRE argvstr { try { setupJob( &AppInfo::addPreJob, "prejob", $2 ); } catch ( setup_exception je ) { yyerror( je.getMessage().c_str() ); YYERROR; } } | TK_POST argvstr { try { setupJob( &AppInfo::addPostJob, "postjob", $2 ); } catch ( setup_exception je ) { yyerror( je.getMessage().c_str() ); YYERROR; } } | TK_CLEANUP argvstr { try { setupJob( &AppInfo::addCleanJob, "cleanup", $2 ); } catch ( setup_exception je ) { yyerror( je.getMessage().c_str() ); YYERROR; } } | TK_MAIN argvstr { try { setupJob( &AppInfo::setMainJob, "main", $2 ); } catch ( setup_exception je ) { yyerror( je.getMessage().c_str() ); YYERROR; } } /* * 4.5 Optional 2nd-level staging section */ | TK_STAGEIN string argvstr { try { setupStage( &AppInfo::setStageIn, "stage-in", $2, $3 ); } catch ( setup_exception je ) { yyerror( je.getMessage().c_str() ); YYERROR; } } | TK_STAGEOUT string argvstr { try { setupStage( &AppInfo::setStageOut, "stage-out", $2, $3 ); } catch ( setup_exception je ) { yyerror( je.getMessage().c_str() ); YYERROR; } } ; %% static const char* RCS_ID = "$Id: scan.y 50 2007-05-19 00:48:32Z gmehta $"; #include #include #include static int debug( const char* fmt, ... ) { size_t size = getpagesize(); std::auto_ptr buffer( new char[size] ); snprintf( &(*buffer), size, "# line %lu: ", lineno ); strncat( &(*buffer), fmt, size-strlen(&(*buffer))-1 ); va_list ap; va_start( ap, fmt ); int result = vfprintf( stderr, &(*buffer), ap ); va_end(ap); return result; } #ifndef HAS_SETENV // Sorry, but I have to insist on setenv and unsetenv extern char** environ; int setenv( const char *name, const char *value, int overwrite ) { // cheat, ignore the overwrite flag size_t s1 = strlen(name); size_t s2 = strlen(value); // check for presence if ( ! overwrite ) { for ( char** s = environ; s && *s; ++s ) { if ( strncmp( *s, name, s1 ) != 0 && (*s)[s1] == '=' ) return 0; } } char* keep = static_cast( malloc(s1+s2+2) ); if ( keep == 0 ) { errno = ENOMEM; return -1; } strcpy( keep, name ); strcat( keep, "=" ); strcat( keep, value ); int result = putenv(keep); if ( result == -1 ) free((void*) keep); return result; } #endif // HAS_SETENV #ifndef HAS_UNSETENV #ifdef HAS_SETENV extern char** environ; #endif static int unsetenv( const char *name ) { // sanity checks if ( name == 0 || *name == 0 || strchr(name,'=') != 0 ) { errno = EINVAL; return -1; } // let's start size_t size = strlen(name); for ( char** s = environ; *s; ) { if ( strncmp( *s, name, size ) != 0 && (*s)[size] == '=' ) { // found it -- move the rest for ( char** t = s; *t; ++t ) t[0] = t[1]; } else { // continue loop for more of the same key ++s; } } return 0; } #endif // HAS_UNSETENV static void setupJob( AIJobMember add, const char* jobtype, StringList* args ) { if ( args && args->size() ) { // context exists JobInfo* job = new JobInfo( jobtype, *args ); if ( job != 0 && job->getValidity() == JobInfo::VALID ) { (app->*add)( job ); // indirect member invocation debug( "added valid %s application %s\n", jobtype, job->getArg0() ); } else { throw setup_exception( jobtype, "contains an invalid spec" ); } } else { debug( "%s specification is empty, ignoring\n", jobtype ); } if ( args ) delete args; } static void setupStage( AIStageMember add, const char* jobtype, std::string* fmt, StringList* args ) { if ( fmt && args && args->size() ) { // context exists StageJob* job = new StageJob( jobtype, *fmt, *args ); if ( job != 0 && job->getValidity() == JobInfo::VALID ) { (app->*add)( job ); // indirect member invocation debug( "added valid %s application %s\n", jobtype, job->getArg0() ); } else { throw setup_exception( jobtype, "job contains an invalid spec" ); } } else { debug( "%s specification is empty, ignoring\n", jobtype ); } if ( fmt ) delete fmt; if ( args ) delete args; } static void setupStdin( AIStdioMember add, int fd, const char* name, StringSet* options, std::string* fn ) // purpose: set stdin handle for sub-processes // paramtr: add (IN): pointer to AppInfo::setStdin member // fd (IN): STDIN_FILENO // name (IN): stdin // options (IO): option list for stdin // fn (IO): file description to connect stdin to // except.: null_pointer exception for certain file operations // warning: cleans up options and fn after use { StatInfo* si = 0; for ( StringSet::iterator i=options->begin(); i != options->end(); ++i ) { if ( *i == "here" ) { // create here script, contents in fn char realfn[4096]; const char* tempdir = AppInfo::getTempDir(); if ( tempdir == 0 ) tempdir = "/tmp"; AppInfo::pattern( realfn, sizeof(realfn), tempdir, "/", "gs.in.XXXXXX" ); int tempfd = mkstemp( realfn ); if ( tempfd == -1 ) { debug( "unable to create tmp file %s: %d: %s\n", realfn, errno, strerror(errno) ); throw null_pointer(); } // write contents into tmp file AppInfo::writen( tempfd, fn->c_str(), fn->size(), 3 ); // rewind to start of file lseek( tempfd, 0, SEEK_SET ); // add temporary file from external temp constructor // added benefit: The data section will repeat the first page delete fn; fn = new std::string(realfn); si = new StatTemporary( tempfd, realfn ); // not nice, but there can be only one. goto DONE; } else { debug( "ignoring invalid option '%s' for %s handle\n", i->c_str(), name ); } } if ( fn->at(0) == '-' && fn->size() == 1 ) { si = new StatHandle(fd); } else { si = new StatFile( *fn, O_RDONLY, false ); } // may throw null_pointer exception! DONE: (app->*add)( si ); debug( "set %s handle to use '%s'\n", name, fn->c_str() ); if ( fn ) delete fn; if ( options ) delete options; } static void setupStdio( AIStdioMember add, int fd, const char* name, StringSet* options, std::string* fn ) // purpose: set stdio handle for sub-processes // paramtr: add (IN): pointer to AppInfo::setStd(in|out|err) member // fd (IN): value of STD(IN|OUT|ERR)_FILENO // name (IN): one of stdin, stdout, or stderr // options (IO): value of STD(IN|OUT|ERR)_FILENO // fn (IO): file description to connect stdio to // except.: null_pointer exception for certain file operations // warning: cleans up options and fn after use { if ( fn && fn->size() ) { // check options int mode = O_WRONLY | O_CREAT; for ( StringSet::iterator i=options->begin(); i != options->end(); ++i ) { if ( *i == "append" ) { mode |= O_APPEND; } else if ( *i == "truncate" ) { mode &= ~O_APPEND; } else { debug( "ignoring invalid option '%s' for %s handle\n", i->c_str(), name ); } } StatInfo* si = 0; if ( fn->at(0) == '-' && fn->size() == 1 ) { si = new StatHandle(fd); } else { si = ( fd == STDIN_FILENO ) ? new StatFile( *fn, O_RDONLY, 0 ) : new StatFile( *fn, mode, (mode & O_APPEND) != 0 ); } // may throw null_pointer exception! (app->*add)( si ); debug( "set %s handle to use '%s'\n", name, fn->c_str() ); } else { debug( "empty filename for %s handle, ignoring\n", name ); } if ( fn ) delete fn; if ( options ) delete options; } static void setupFile( AIIOMember add1, AIMIOMember add2, StringSet* options, std::string* lfn, std::string* sfn, StringList* tfn ) // purpose: register input and output files // paramtr: add1 (IN): pointer to AppInfo::add(In|Out)putSFN member // add2 (IN): pointer to AppInfo::add(In|Out)putTFN member // options (IO): value of STD(IN|OUT|ERR)_FILENO // lfn (IO): logical filename pointer // sfn (IO): storage filename pointer // tfn (IO): transfer filename list pointer // warning: cleans up after use { bool do_md5(false); if ( options ) { // check the option string static std::string c_md5("md5"); if ( options->find(c_md5) != options->end() ) do_md5 = true; delete options; } if ( lfn && lfn->size() && sfn && sfn->size() && tfn ) { // essential information is valid and existing (app->*add1)( *lfn, *sfn, do_md5 ); debug( "added SFN mapping '%s' => '%s'\n", lfn->c_str(), sfn->c_str() ); for ( StringList::iterator i=tfn->begin(); i != tfn->end(); ++i ) { (app->*add2)( *sfn, *i ); debug( "added TFN mapping '%s' => '%s'\n", sfn->c_str(), i->c_str() ); } } else { debug( "invalid output filename specification, ignoring\n" ); } if ( lfn ) delete lfn; if ( sfn ) delete sfn; if ( tfn ) delete tfn; } pegasus-wms_4.0.1+dfsg/src/tools/k.2/useinfo.cc0000644000175000017500000001510011757531137020331 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include "useinfo.hh" #include "time.hh" #include static const char* RCS_ID = "$Id: useinfo.cc 50 2007-05-19 00:48:32Z gmehta $"; UseInfo::UseInfo() :m_tag() // purpose: empty ctor { memset( &m_use, 0, sizeof(struct rusage) ); } UseInfo::UseInfo( const char* tag, __rusage_who who ) // purpose: construction from getrusage call // paramtr: tag (IN): name of the element // who (IN): RUSAGE_SELF or RUSAGE_CHILDREN { // recoded for SunCC if ( tag ) m_tag = tag; else throw null_pointer(); memset( &m_use, 0, sizeof(struct rusage) ); if ( who == RUSAGE_SELF || who == RUSAGE_CHILDREN ) { // may still fail getrusage( who, &m_use ); } } UseInfo::UseInfo( const char* tag, const struct rusage* use ) // purpose: ctor // paramtr: tag (IN): name of the element // use (IN): Usage record to initialize to { // recoded for SunCC if ( tag ) m_tag = tag; else throw null_pointer(); if ( use != &m_use ) memcpy( &m_use, use, sizeof(struct rusage) ); } std::string UseInfo::toXML( int indent, const char* nspace ) const // purpose: XML format a rusage info record into a given buffer // paramtr: buffer (IO): area to store the output in // indent (IN): indentation level of tag // returns: buffer { /* */ std::string result( XML::startElement( m_tag, indent, nspace ) ); result += XML::printf( " utime=\"%.3f\" stime=\"%.3f\"" " minflt=\"%lu\" majflt=\"%lu\"" " nswap=\"%lu\" nsignals=\"%lu\"" " nvcsw=\"%lu\" nivcsw=\"%lu\"/>\r\n", Time::seconds(m_use.ru_utime), Time::seconds(m_use.ru_stime), m_use.ru_minflt, m_use.ru_majflt, m_use.ru_nswap, m_use.ru_nsignals, m_use.ru_nvcsw, m_use.ru_nivcsw ); return result; } std::ostream& UseInfo::toXML( std::ostream& s, int indent, const char* nspace ) const // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { // start element XML::startElement( s, m_tag, indent, nspace ); // print attributes s << " utime=\"" << std::setprecision(3) << Time::seconds(m_use.ru_utime) << '"'; s << " stime=\"" << std::setprecision(3) << Time::seconds(m_use.ru_stime) << '"'; // no need to quote integers s << " minftl=\"" << m_use.ru_minflt << '"'; s << " majftl=\"" << m_use.ru_majflt << '"'; s << " nswap=\"" << m_use.ru_nswap << '"'; s << " nsignals=\"" << m_use.ru_nsignals << '"'; s << " nvcsw=\"" << m_use.ru_nvcsw << '"'; s << " nivcsw=\"" << m_use.ru_nivcsw << '"'; // finalize element return s << "/>\r\n"; } UseInfo& UseInfo::operator+=( const struct rusage* summand ) // purpose: add another resource usage to the current record // paramtr: summand (IN): usage record to add // returns: current object, modified to be sum += summand { // Total amount of user time used. m_use.ru_utime += summand->ru_utime; // Total amount of system time used. m_use.ru_stime += summand->ru_utime; // Maximum resident set size (in kilobytes). m_use.ru_maxrss += summand->ru_maxrss; // Amount of sharing of text segment memory // with other processes (kilobyte-seconds). m_use.ru_ixrss += summand->ru_ixrss; // Amount of data segment memory used (kilobyte-seconds). m_use.ru_idrss += summand->ru_idrss; // Amount of stack memory used (kilobyte-seconds). m_use.ru_isrss += summand->ru_isrss; // Number of soft page faults (i.e. those serviced by reclaiming // a page from the list of pages awaiting reallocation. m_use.ru_minflt += summand->ru_minflt; // Number of hard page faults (i.e. those that required I/O). m_use.ru_majflt += summand->ru_majflt; // Number of times a process was swapped out of physical memory. m_use.ru_nswap += summand->ru_nswap; // Number of input operations via the file system. Note: This // and `ru_oublock' do not include operations with the cache. m_use.ru_inblock += summand->ru_inblock; // Number of output operations via the file system. m_use.ru_oublock += summand->ru_oublock; // Number of IPC messages sent. m_use.ru_msgsnd += summand->ru_msgsnd; // Number of IPC messages received. m_use.ru_msgrcv += summand->ru_msgrcv; // Number of signals delivered. m_use.ru_nsignals += summand->ru_nsignals; // Number of voluntary context switches, i.e. because the process // gave up the process before it had to (usually to wait for some // resource to be available). m_use.ru_nvcsw += summand->ru_nvcsw; // Number of involuntary context switches, i.e. a higher priority process // became runnable or the current process used up its time slice. m_use.ru_nivcsw += summand->ru_nivcsw; return *this; } UseInfo& UseInfo::operator+=( const UseInfo& summand ) // purpose: add another resource usage to the current record // paramtr: summand (IN): usage record to add // returns: current object, modified to be sum += summand { return operator+=( &summand.m_use ); } struct timeval& operator+=( struct timeval& sum, const struct timeval& summand ) // purpose: add one timeval structure to another w/ normalization // paramtr: sum (IO): first operand and result // summand (IN): second operand // returns: sum { sum.tv_usec += summand.tv_usec; sum.tv_sec += summand.tv_sec; while ( sum.tv_usec >= 1000000 ) { sum.tv_sec++; sum.tv_usec -= 1000000; } return sum; } struct timeval operator+( const struct timeval& a, const struct timeval& b ) // purpose: adds two timeval structures // paramtr: a (IN): first operand (summand) // b (IN): second operand (summand) // returns: normalized sum = a + b { struct timeval result = a; return result += b; } UseInfo operator+( const UseInfo& a, const UseInfo& b ) // purpose: Add two useinfo records // paramtr: a (IN): first operand (summand) // b (IN): second operand (summand) // returns: sum = a + b { UseInfo result = a; return result += b; } pegasus-wms_4.0.1+dfsg/src/tools/k.2/dummy.c0000644000175000017500000000000011757531137017642 0ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/tools/k.2/getif.hh0000644000175000017500000000427411757531137020003 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _GETIF_HH #define _GETIF_HH #include #include #include #include #include #include class PrimaryInterface { // This class obtains the IPv4 address of the primary interface. // It is implemented using the Singleton pattern. private: static const unsigned long vpn_netmask[4]; static const unsigned long vpn_network[4]; struct ifreq* m_interface; static PrimaryInterface* m_instance; protected: PrimaryInterface(); // purpose: protected singleton ctor ~PrimaryInterface(); // purpose: protected singleton destructor static struct ifreq* primary_interface(); // purpose: obtain the primary interface information // returns: a structure containing the if info, or NULL for error public: static const PrimaryInterface& instance(); // purpose: Obtains access to the singleton // returns: The single instance of the Singleton. std::string whoami() const; // purpose: Obtains the primary interface's IPv4 as dotted quad // returns: The IPv4 as dotted quad - or an empty string on failure const char* whoami( char* area, size_t size ) const; // purpose: Obtains the primary interface's IPv4 as dotted quad // paramtr: area (OUT): The IPv4 as dotted quad - sizeof >= 16! // returns: area static bool isVPN( const unsigned long host ); // purpose: Determines if an IPv4 address is from a VPN // paramtr: host (IN): network byte ordered IPv4 host address // returns: true, if the host is in a VPN address space }; #endif // _GETIF_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/stagejob.hh0000644000175000017500000000502311757531137020474 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _STAGEJOB_HH #define _STAGEJOB_HH #include "maptypes.hh" #include "jobinfo.hh" class StageJob : public JobInfo { // This class assembles information about each job that could be run. // The difference to a regular job is that certain simplifications are // possible, and that it allows for certain run-time substitutions in // its arguments. private: StatTemporary* m_tempfile; // information about the tempfile std::string m_format; // stores the format string // render inaccessible StageJob(); StageJob( const StageJob& ); StageJob& operator=( const StageJob& ); public: StageJob( const char* tag, const std::string& format, const char* commandline ); // purpose: initialize the data structure by parsing a command string. // paramtr: tag (IN): stage-in or stage-out // format (IN): format string for output // commandline (IN): commandline concatenated string to separate StageJob( const char* tag, const std::string& format, const StringList& argv ); // purpose: initialize the data structure by parsing a command string. // paramtr: tag (IN): stage-in or stage-out // format (IN): format string for output // argv (IN): commandline already split into arg vector virtual ~StageJob(); // purpose: dtor virtual int createTempfile( const char* id, const FilenameMap& l2s, const FilenameMultiMap& s2t ); // purpose: create the tempfile from the external filemaps // paratmr: id (IN): stage-in or stage-out // l2s (IN): map with LFN to SFN mapping // s2t (IN): multimap with SFN to TFN mapping // returns: -1 in case of error // 0 for nothing to do // >0 for number of files protected: virtual void rewrite(); // purpose: rewrite the argv vector before calling the job // warning: called from system() }; #endif // _STAGEJOB_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/appinfo.hh0000644000175000017500000002670011757531137020337 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _APPINFO_HH #define _APPINFO_HH #include #include #include "maptypes.hh" #include "time.hh" #include "xml.hh" #include "statinfo.hh" #include "jobinfo.hh" #include "stagejob.hh" #include "shared.hh" #include "uname.hh" #include #include #ifndef HAS_MUTABLE #define mutable #endif class AppInfo : public XML { // Class to collect everything around grid shell. There is usually // only one instance of this class. Nevertheless, the singleton // pattern is not enforced. private: std::string m_self; // gridstart's argv[0] argument Time m_start; // point of time that app was started pid_t m_child; // pid of gridstart itself mutable bool m_isPrinted; // flag to set after successful print op UseInfo* m_usage; // rusage record for myself StatInfo* m_stdin; // stat() info for "input", if available StatInfo* m_stdout; // stat() info for "output", if available StatInfo* m_stderr; // stat() info for "error", if available StatInfo* m_logfile; // stat() info for "logfile", if available StatInfo* m_gridstart; // stat() info for myself, if available StatInfo* m_channel; // stat() on application channel FIFO char m_ipv4[16]; // host address of primary interface Uname m_uname; // uname -a and architecture StringList m_xformation; // chosen VDC TR fqdn trail (incl. compound) std::string m_derivation; // chosen VDC DV fqdn for this invocation std::string m_workdir; // CWD at point of execution std::string m_pool_handle; // optional pool handle std::string m_xmlns; // XML namespace for output, usually empty typedef std::list JobInfoList; static void free_job( JobInfo* ); JobInfoList m_setup; // optional setup application to run JobInfoList m_prejob; // optional pre-job application to run JobInfo* m_application; // the application itself that was run JobInfoList m_postjob; // optional post-job application to run JobInfoList m_cleanup; // optional clean-up application to run StageJob* m_stagein; // optional script for 2nd-level stage-in StageJob* m_stageout; // optional script for 2nd-level stage-out FilenameMap m_input_lfn_sfn; // iLFN -> iSFN FilenameMultiMap m_input_sfn_tfn; // iSFN -> iTFN, iTFN, ... FilenameMap m_output_lfn_sfn; // oLFN -> oSFN FilenameMultiMap m_output_sfn_tfn; // oSFN -> oTFN, oTFN, ... typedef std::map< std::string, StatFile* > StatFileMap; static void free_statinfo( StatFileMap::value_type& sim ); StatFileMap m_input_info; StatFileMap m_output_info; // render inaccessible AppInfo(); AppInfo( const AppInfo& ); AppInfo& operator=( const AppInfo& ); public: AppInfo( const char* self ); // purpose: initialize the data structure with defaults. // paramtr: self (IN): the argv[0] from main() virtual ~AppInfo(); // purpose: dtor inline std::string getSelf() const { return m_self; } inline const StatInfo* getStdin() const { return m_stdin; } inline const StatInfo* getStdout() const { return m_stdout; } inline const StatInfo* getStderr() const { return m_stderr; } void setStdin( StatInfo* handle ); void setStdout( StatInfo* handle ); void setStderr( StatInfo* handle ); inline StatInfo* getChannel() const { return m_channel; } void setChannel( StatFifo* handle ); inline void setPrinted( bool isPrinted ) { m_isPrinted = isPrinted; } inline bool getPrinted() const { return m_isPrinted; } inline void addTransformation( const std::string& tr ) { m_xformation.push_back(tr); } inline std::string getDerivation() const { return m_derivation; } inline void setDerivation( const std::string& dv ) { m_derivation = dv; } inline std::string getPoolHandle() const { return m_pool_handle; } inline void setPoolHandle( const std::string& ph ) { m_pool_handle = ph; } inline std::string getXMLNamespace() const { return m_xmlns; } inline void setXMLNamespace( const std::string& ns ) { m_xmlns = ns; } inline void addInputSFN( const std::string& lfn, const std::string& sfn, bool do_md5 = false ) { m_input_lfn_sfn[lfn] = FilenameBool(sfn,do_md5); } inline void addInputTFN( const std::string& sfn, const std::string& tfn ) { m_input_sfn_tfn.insert( FilenameMultiMap::value_type(sfn,tfn) ); } inline void addOutputSFN( const std::string& lfn, const std::string& sfn, bool do_md5 = false ) { m_output_lfn_sfn[lfn] = FilenameBool(sfn,do_md5); } inline void addOutputTFN( const std::string& sfn, const std::string& tfn ) { m_output_sfn_tfn.insert( FilenameMultiMap::value_type(sfn,tfn) ); } virtual size_t createInputInfo(); // purpose: update the internal input file statinfo map // returns: number of updates processed successfully. virtual size_t createOutputInfo(); // purpose: update the internal output file statinfo map // returns: number of updates processed successfully. virtual ssize_t print(); // purpose: output the given app info onto the given (stdout) fd // paramtr: appinfo (IN): is the collective information about the run // returns: the number of characters actually written (as of write() call). // mutable: will update the self resource usage record before print. // mutable: will update the isPrinted predicate after print. virtual std::string toXML( int indent = 0, const char* nspace = 0 ) const; // purpose: XML format a job info record. // paramtr: indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: string containing the element data virtual std::ostream& toXML( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: XML format a rusage info record onto a given stream // paramtr: s (IO): stream to put information into // indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: s inline void addSetupJob( JobInfo* job ) // purpose: add a job to the list of pre jobs, first come, first served // paramtr: job (IN): newly allocated job information record // warning: ownership of job record will pass here { if ( job != 0 ) m_setup.push_back(job); } inline void addPreJob( JobInfo* job ) // purpose: add a job to the list of pre jobs, first come, first served // paramtr: job (IN): newly allocated job information record // warning: ownership of job record will pass here { if ( job != 0 ) m_prejob.push_back(job); } inline void addPostJob( JobInfo* job ) // purpose: add a job to the list of post jobs, first come, first served // paramtr: job (IN): newly allocated job information record // warning: ownership of job record will pass here { if ( job != 0 ) m_postjob.push_back(job); } inline void addCleanJob( JobInfo* job ) // purpose: add a job to the list of cleanup jobs, first come, first served // paramtr: job (IN): newly allocated job information record // warning: ownership of job record will pass here { if ( job != 0 ) m_cleanup.push_back(job); } inline void setMainJob( JobInfo* job ) // purpose: set or replace the compute job // paramtr: job (IN): newly allocated job information record // warning: ownership of job record will pass here { if ( job != 0 ) { if ( m_application != 0 ) delete m_application; m_application = job; } } inline bool hasMainJob() const // purpose: Predicate to determine, if a main job exists // returns: true, if a job exists, false otherwise { return m_application != 0; } inline void setStageIn( StageJob* job ) // purpose: sets or replaces a stage-in job // paramtr: job (IN): newly allocated job information record // warning: ownership of job will pass here { if ( job ) { if ( m_stagein != 0 ) delete m_stagein; m_stagein = job; } } inline void setStageOut( StageJob* job ) // purpose: sets or replaces a stage-out job // paramtr: job (IN): newly allocated job information record // warning: ownership of job will pass here { if ( job ) { if ( m_stageout != 0 ) delete m_stageout; m_stageout = job; } } inline std::string getWorkdir( void ) const { return m_workdir; } inline void setWorkdir( const std::string& dir ) { m_workdir = dir; } size_t run( int& status ); // purpose: run all runnable jobs. This constitutes a logical chaining of // pre && main && post ; cleanup // paramtr: status (IO): first "failed" return code. Must be 0 to come in // returns: the number of jobs run virtual int runStageIn(); // purpose: run the stage-in job, if one exists. // returns: Return code from running the stage-in job. // warning: No existing job will also result in status of 0. // require: m_stagein valid, m_input_sfn_tfn defined virtual int runStageOut(); // purpose: run the stage-out job, if one exists. // returns: Return code from running the stage-out job. // warning: No existing job will also result in status of 0. // require: m_stageout valid, m_output_sfn_tfn defined public: // static section static const char* getTempDir( void ); // purpose: determine a suitable directory for temporary files. // warning: remote schedulers may chose to set a different TMP.. // returns: a string with a temporary directory, may still be NULL. // warning: result is _not_ allocated, so don't free the string static char* pattern( char* buffer, size_t size, const char* dir, const char* sep, const char* file ); // purpose: concatenate directory, separator and filename into one string // paramtr: buffer (OUT): where to put the string // size (IN): capacity of buffer // dir (IN): directory pointer, see getTempDir() // sep (IN): pathname separator as string // file (IN): file to add to path // returns: buffer static int nfs_sync( int fd, long micros = 100000 ); // purpose: tries to force NFS to update the given file descriptor // paramtr: fd (IN): descriptor of an open file // returns: 0 is ok, -1 for failure static ssize_t writen( int fd, const char* buffer, ssize_t n, unsigned restart ); // purpose: write all n bytes in buffer, if possible at all // paramtr: fd (IN): filedescriptor open for writing // buffer (IN): bytes to write (must be at least n byte long) // n (IN): number of bytes to write // restart (IN): if true, try to restart write at max that often // returns: n, if everything was written, or // [0..n-1], if some bytes were written, but then failed, // < 0, if some error occurred. }; #endif // _APPINFO_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/appinfo.cc0000644000175000017500000006173611757531137020335 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include #include #include #include #include #include #include #include #include #include "getif.hh" #include "useinfo.hh" #include "jobinfo.hh" #include "statinfo.hh" #include "appinfo.hh" static const char* RCS_ID = "$Id: appinfo.cc 50 2007-05-19 00:48:32Z gmehta $"; #define XML_SCHEMA_URI "http://www.griphyn.org/chimera/Invocation" #define XML_SCHEMA_LOC "http://www.griphyn.org/chimera/iv-1.3.xsd" #define XML_SCHEMA_VERSION "1.3" inline int lock_reg( int fd, int cmd, int type, off_t offset, int whence, off_t len ) // purpose: wrap the lock functionality of fcntl // paramtr: fd (IN): opened file descriptor, appropriate for lock mode // cmd (IN): F_SETLK or F_SETLKW // type (IN): F_RDLCK (shared), F_WRLCK (exclusive) or F_UNLCK // offset (IN): offset into file // whence (IN): SEEK_SET, SEEK_CUR or SEEK_END // len (IN): region size, or 0 for complete file // returns: result of fcntl() call { struct flock lock; lock.l_type = type; lock.l_whence = whence; lock.l_start = offset; lock.l_len = len; return ( fcntl( fd, cmd, &lock ) ); } int AppInfo::nfs_sync( int fd, long micros ) // purpose: tries to force NFS to update the given file descriptor // paramtr: fd (IN): descriptor of an open file // returns: 0 is ok, -1 for failure { // lock file if ( lock_reg( fd, F_SETLK, F_WRLCK, 0, SEEK_SET, 0 ) == -1 ) return -1; // wait a while struct timeval p = { micros / 1000000, micros % 1000000 }; select( 0, 0, 0, 0, &p ); // unlock file return lock_reg( fd, F_SETLK, F_UNLCK, 0, SEEK_SET, 0 ); } static std::string split_fqdi( const std::string& fqdi ) { std::string result; std::string::size_type first = fqdi.find( "::" ); std::string::size_type second = fqdi.rfind(":"); std::string ns, id, vs; if ( first != std::string::npos ) { // has a namespace ns = fqdi.substr( 0, first ); if ( second != std::string::npos && second > first+1 ) { // has valid version id = fqdi.substr( first+2, second-(first+2) ); vs = fqdi.substr( second+1 ); } else { // no version id = fqdi.substr( first+2 ); } } else { // no namespace if ( second != std::string::npos ) { // has version id = fqdi.substr( 0, second ); vs = fqdi.substr( second+1 ); } else { // no version id = fqdi; } } // combine result if ( ns.size() ) result.append(" namespace=\"").append(ns).append("\""); result.append(" name=\"").append(id).append("\""); if ( vs.size() ) result.append(" version=\"").append(vs).append("\""); return result; } std::ostream& AppInfo::toXML( std::ostream& s, int indent, const char* nspace ) const // purpose: XML format a rusage info record onto a given stream // paramtr: s (IO): stream to put information into // indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // warning: does not include the preamble // returns: s { // start root element XML::startElement( s, "invocation", indent, nspace ) << " xmlns"; // assign our own namespace, if that is wanted if ( nspace ) s << ':' << nspace; // XML generic attributes s << "=\"" XML_SCHEMA_URI "\" xmlns:gvds_xsi=\"" "http://www.w3.org/2001/XMLSchema-instance\" " "gvds_xsi:schemaLocation=\"" XML_SCHEMA_URI " " XML_SCHEMA_LOC "\" version=\"" XML_SCHEMA_VERSION "\""; // non-generic attributes s << " start=\"" << m_start << XML::printf( "\" duration=\"%.3f\"", m_start.elapsed() ); // optional attributes for root element: application process id if ( m_child != 0 ) s << " pid=\"" << m_child << '"'; // user and group info about who ran this thing s << " uid=\"" << getuid() << '"'; s << " gid=\"" << getgid() << '"'; // finalize open tag of root element s << ">\r\n"; // pseudo-element XML::startElement( s, "provenance", indent+2, nspace ); // optional attribute: host address dotted quad if ( isdigit( m_ipv4[0] ) ) s << " host=\"" << m_ipv4 << '"'; // optional site handle if ( m_pool_handle.size() ) s << " pool=\"" << m_pool_handle << '"'; s << ">\r\n"; // m_uname.toXML( s, indent+4, nspace ); // optional element for provenance: derivation fqdn if ( m_derivation.size() ) { s << XML::startElement( s, "dv", indent+4, nspace ); s << split_fqdi(m_derivation) << "/>\r\n"; } // optional element for provenance: transformation fqdn if ( m_xformation.size() ) { for ( StringList::const_iterator i = m_xformation.begin(); i != m_xformation.end(); ++i ) { s << XML::startElement( s, "tr", indent+4, nspace ); s << split_fqdi(*i) << "/>\r\n"; } } // done with provenance XML::finalElement( s, "provenance", indent+2, nspace ); // XML::startElement( s, "cwd", indent+2, nspace ); if ( m_workdir.size() ) { s << '>' << m_workdir; XML::finalElement( s, "cwd", 0, nspace ); } else { #if 0 s << " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" " xsi:nil=\"true\"/>\r\n"; #else s << "/>\r\n"; #endif } // own resources if ( m_usage ) m_usage->toXML( s, indent+2, nspace ); // job record for stage-in job if ( m_stagein ) m_stagein->toXML( s, indent+2, nspace ); // job lists: , , , , for ( JobInfoList::const_iterator i( m_setup.begin() ); i != m_setup.end(); i++ ) (*i)->toXML( s, indent+2, nspace ); for ( JobInfoList::const_iterator i( m_prejob.begin() ); i != m_prejob.end(); i++ ) (*i)->toXML( s, indent+2, nspace ); m_application->toXML( s, indent+2, nspace ); for ( JobInfoList::const_iterator i( m_postjob.begin() ); i != m_postjob.end(); i++ ) (*i)->toXML( s, indent+2, nspace ); for ( JobInfoList::const_iterator i( m_cleanup.begin() ); i != m_cleanup.end(); i++ ) (*i)->toXML( s, indent+2, nspace ); // job record for stage-out job if ( m_stageout ) m_stageout->toXML( s, indent+2, nspace ); // records if ( m_gridstart ) m_gridstart->toXML( s, indent+2, nspace ); if ( m_stdin ) m_stdin->toXML( s, indent+2, nspace ); if ( m_stdout ) m_stdout->toXML( s, indent+2, nspace ); if ( m_stderr ) m_stderr->toXML( s, indent+2, nspace ); if ( m_logfile ) m_logfile->toXML( s, indent+2, nspace ); if ( m_channel ) m_channel->toXML( s, indent+2, nspace ); // records for input and output for ( StatFileMap::const_iterator i( m_input_info.begin() ); i != m_input_info.end(); ++i ) { i->second->toXML( s, indent+2, nspace ); } for ( StatFileMap::const_iterator i( m_output_info.begin() ); i != m_output_info.end(); ++i ) { i->second->toXML( s, indent+2, nspace ); } // finish root element return XML::finalElement( s, "invocation", indent, nspace ); } std::string AppInfo::toXML( int indent, const char* nspace ) const // purpose: generate provenance tracking information // paramtr: indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // warning: does not include the preamble // returns: string containing the element data { std::string result; result.reserve( getpagesize() << 2 ); // start root element result += XML::startElement( "invocation", indent, nspace ) + " xmlns"; if ( nspace ) result.append(":").append(nspace); // XML generic attributes result += "=\"" XML_SCHEMA_URI "\" xmlns:gvds_xsi=\"" "http://www.w3.org/2001/XMLSchema-instance\" " "gvds_xsi:schemaLocation=\"" XML_SCHEMA_URI " " XML_SCHEMA_LOC "\" version=\"" XML_SCHEMA_VERSION "\""; // non-generic attributes result += " start=\"" + m_start.date(); result += XML::printf( "\" duration=\"%.3f\"", m_start.elapsed() ); // optional attributes for root element: application process id if ( m_child != 0 ) result += XML::printf( " pid=\"%d\"", m_child ); // user and group info about who ran this thing result += XML::printf( " uid=\"%d\" gid=\"%d\">\r\n", getuid(), getgid() ); // pseudo-element result += XML::startElement( "provenance", indent+2, nspace ); // optional attribute: host address dotted quad if ( isdigit( m_ipv4[0] ) ) { result += " host=\""; result += m_ipv4; result += "\""; } // optional site handle if ( m_pool_handle.size() ) result += " pool=\"" + m_pool_handle + "\""; result += ">\r\n"; // result += m_uname.toXML( indent+4, nspace ); // optional element for provenance: derivation fqdn if ( m_derivation.size() ) { result += XML::startElement( "dv", indent+4, nspace ); result += split_fqdi(m_derivation) + "/>\r\n"; } // optional element for provenance: transformation fqdn if ( m_xformation.size() ) { for ( StringList::const_iterator i = m_xformation.begin(); i != m_xformation.end(); ++i ) { result += XML::startElement( "tr", indent+4, nspace ); result += split_fqdi(*i) + "/>\r\n"; } } // done with provenance result += XML::finalElement( "provenance", indent+2, nspace ); // result += XML::startElement( "cwd", indent+2, nspace ); if ( m_workdir.size() ) { result += ">" + m_workdir; result += XML::finalElement( "cwd", 0, nspace ); } else { #if 0 result += " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" " xsi:nil=\"true\"/>\r\n"; #else result += "/>\r\n"; #endif } // own resources if ( m_usage ) result += m_usage->toXML( indent+2, nspace ); // job record for stage-in job if ( m_stagein ) result += m_stagein->toXML( indent+2, nspace ); // job lists: , , , , for ( JobInfoList::const_iterator i=m_setup.begin(); i != m_setup.end(); i++ ) result += (*i)->toXML( indent+2, nspace ); for ( JobInfoList::const_iterator i=m_prejob.begin(); i != m_prejob.end(); i++ ) result += (*i)->toXML( indent+2, nspace ); result += m_application->toXML( indent+2, nspace ); for ( JobInfoList::const_iterator i=m_postjob.begin(); i != m_postjob.end(); i++ ) result += (*i)->toXML( indent+2, nspace ); for ( JobInfoList::const_iterator i=m_cleanup.begin(); i != m_cleanup.end(); i++ ) result += (*i)->toXML( indent+2, nspace ); // job record for stage-out job if ( m_stageout ) result += m_stageout->toXML( indent+2, nspace ); // records if ( m_gridstart ) result += m_gridstart->toXML( indent+2, nspace ); if ( m_stdin ) result += m_stdin->toXML( indent+2, nspace ); if ( m_stdout ) result += m_stdout->toXML( indent+2, nspace ); if ( m_stderr ) result += m_stderr->toXML( indent+2, nspace ); if ( m_logfile ) result += m_logfile->toXML( indent+2, nspace ); if ( m_channel ) result += m_channel->toXML( indent+2, nspace ); // records for input and output for ( StatFileMap::const_iterator i( m_input_info.begin() ); i != m_input_info.end(); ++i ) { result += i->second->toXML( indent+2, nspace ); } for ( StatFileMap::const_iterator i( m_output_info.begin() ); i != m_output_info.end(); ++i ) { result += i->second->toXML( indent+2, nspace ); } // finish root element result += XML::finalElement( "invocation", indent, nspace ); return result; } char* AppInfo::pattern( char* buffer, size_t size, const char* dir, const char* sep, const char* file ) // purpose: concatenate directory, separator and filename into one string // paramtr: buffer (OUT): where to put the string // size (IN): capacity of buffer // dir (IN): directory pointer // sep (IN): pathname separator as string // file (IN): file to add to path // returns: buffer { --size; buffer[size] = 0; // reliably terminate string strncpy( buffer, dir, size ); strncat( buffer, sep, size ); strncat( buffer, file, size ); return buffer; } AppInfo::AppInfo( const char* self ) // purpose: initialize the data structure with defaults. // paramtr: self (IN): the argv[0] from main() :m_self(self), // m_start( now() ), m_child( getpid() ), m_isPrinted(false), m_usage(0), m_stdin(0), m_stdout(0), m_stderr(0), m_logfile(0), m_gridstart(0), m_channel(0), m_uname(), m_application(0), m_stagein(0), m_stageout(0) { size_t tempsize = getpagesize(); char* tempname = new char[tempsize]; // find a suitable directory for temporary files - not malloc'ed const char* tempdir = getTempDir(); // initialize some data for myself m_gridstart = new StatFile( self, O_RDONLY, 0 ); m_gridstart->setId("gridstart"); // default for stdin #if 1 // original m_stdin = new StatFile( "/dev/null", O_RDONLY, 0 ); #else m_stdin = new StatHandle( STDIN_FILENO ); #endif m_stdin->setId("stdin"); // default for stdout #if 1 pattern( tempname, tempsize, tempdir, "/", "gs.out.XXXXXX" ); m_stdout = new StatTemporary( tempname ); #else m_stdout = new StatFile( "/dev/null", O_WRONLY | O_CREAT, 1 ); #endif m_stdout->setId("stdout"); // default for stderr #if 1 pattern( tempname, tempsize, tempdir, "/", "gs.err.XXXXXX" ); m_stderr = new StatTemporary( tempname ); #else m_stderr = new StatHandle( STDERR_FILENO ); #endif m_stderr->setId("stderr"); // default for stdlog m_logfile = new StatHandle( STDOUT_FILENO ); m_logfile->setId("logfile"); #if 0 // FIXME: Make this config-driven !!! // default for application-level feedback-channel pattern( tempname, tempsize, tempdir, "/", "gs.app.XXXXXX" ); m_channel = new StatFifo( tempname, "GRIDSTART_CHANNEL" ); m_channel->setId("channel"); #endif // which workdir if ( getcwd( tempname, tempsize ) != 0 ) m_workdir = tempname; // clean-up delete[] tempname; tempname = 0; // where do I run -- guess the primary interface IPv4 dotted quad PrimaryInterface::instance().whoami( m_ipv4, sizeof(m_ipv4) ); } void AppInfo::free_job( JobInfo* job ) { if ( job != 0 ) delete job; job = 0; } void AppInfo::free_statinfo( StatFileMap::value_type& sim ) { if ( sim.second != 0 ) delete sim.second; sim.second = 0; } AppInfo::~AppInfo() // purpose: destructor { #if 0 // can we do this?! if ( ! m_isPrinted ) print(); #endif if ( m_usage ) delete m_usage; m_usage = 0; // standard stat info records if ( m_stdin ) delete m_stdin; m_stdin = 0; if ( m_stdout ) delete m_stdout; m_stdout = 0; if ( m_stderr ) delete m_stderr; m_stderr = 0; if ( m_logfile ) delete m_logfile; m_logfile = 0; if ( m_gridstart ) delete m_gridstart; m_gridstart = 0; if ( m_channel ) delete m_channel; m_channel = 0; // jobs for_each( m_setup.begin(), m_setup.end(), AppInfo::free_job ); m_setup.clear(); for_each( m_prejob.begin(), m_prejob.end(), AppInfo::free_job ); m_prejob.clear(); for_each( m_postjob.begin(), m_postjob.end(), AppInfo::free_job ); m_postjob.clear(); for_each( m_cleanup.begin(), m_cleanup.end(), AppInfo::free_job ); m_cleanup.clear(); if ( m_application ) delete m_application; m_application = 0; if ( m_stagein ) delete m_stagein; m_stagein = 0; if ( m_stageout ) delete m_stageout; m_stageout = 0; // non-standard statinfo records for_each( m_input_info.begin(), m_input_info.end(), AppInfo::free_statinfo ); m_input_info.clear(); m_input_sfn_tfn.clear(); m_input_lfn_sfn.clear(); for_each( m_output_info.begin(), m_output_info.end(), AppInfo::free_statinfo ); m_output_info.clear(); m_output_sfn_tfn.clear(); m_output_lfn_sfn.clear(); } void AppInfo::setChannel( StatFifo* handle ) { // sanity check if ( handle == 0 ) throw null_pointer(); // renew if ( m_channel ) delete m_channel; m_channel = handle; m_channel->setId("channel"); } void AppInfo::setStdin( StatInfo* handle ) { // sanity check if ( handle == 0 ) throw null_pointer(); // delete old and reset new if ( m_stdin ) delete m_stdin; m_stdin = handle; m_stdin->setId("stdin"); } void AppInfo::setStdout( StatInfo* handle ) { // sanity check if ( handle == 0 ) throw null_pointer(); // delete old and reset new if ( m_stdout ) delete m_stdout; m_stdout = handle; m_stdout->setId("stdout"); } void AppInfo::setStderr( StatInfo* handle ) { // sanity check if ( handle == 0 ) throw null_pointer(); // delete old and reset new if ( m_stderr ) delete m_stderr; m_stderr = handle; m_stderr->setId("stderr"); } size_t AppInfo::createInputInfo() // purpose: update the internal input file statinfo map // returns: number of updates processed successfully. { static std::string c_input("input"); size_t result(0); // delete old info if ( m_input_info.size() > 0 ) { for_each( m_input_info.begin(), m_input_info.end(), AppInfo::free_statinfo ); m_input_info.clear(); } // create new mappings for ( FilenameMap::iterator i(m_input_lfn_sfn.begin()); i != m_input_lfn_sfn.end(); ++i ) { StatFile* file = new StatFile( (*i).second.first, O_RDONLY, false ); if ( file ) { file->setId(c_input); file->setLFN(i->first); if ( (*i).second.second ) file->md5sum(); m_input_info[i->first] = file; ++result; } } return result; } size_t AppInfo::createOutputInfo() // purpose: update the internal output file statinfo map // returns: number of updates processed successfully. { static std::string c_output("output"); size_t result(0); // delete old info if ( m_output_info.size() > 0 ) { for_each( m_output_info.begin(), m_output_info.end(), AppInfo::free_statinfo ); m_output_info.clear(); } // create new mappings for ( FilenameMap::iterator i(m_output_lfn_sfn.begin()); i != m_output_lfn_sfn.end(); ++i ) { StatFile* file = new StatFile( (*i).second.first, O_RDONLY, false ); if ( file ) { file->setId(c_output); file->setLFN(i->first); if ( (*i).second.second ) file->md5sum(); m_output_info[i->first] = file; ++result; } } return result; } ssize_t AppInfo::print() // purpose: output the given app info onto the given (stdout) fd // paramtr: appinfo (IN): is the collective information about the run // returns: the number of characters actually written (as of write() call). // mutable: will update the self resource usage record before print. // mutable: will update the isPrinted predicate after print. { int logfd = -1; if ( m_logfile ) { if ( StatHandle* sh = dynamic_cast(m_logfile) ) { // upcast to a handle is ok, we can use the handle logfd = dup( sh->getDescriptor() ); } else if ( StatFile* sf = dynamic_cast(m_logfile) ) { // upcast to a file is ok, we can use the name logfd = open( sf->getFilename().c_str(), O_WRONLY | O_APPEND | O_CREAT, 0644 ); } else { // This is a "this should not happen" case // no cast possible, run into error throw null_pointer(); } } ssize_t result = -1; if ( logfd != -1 ) { // what about myself? Update stat info on log file m_logfile->update(); // obtain resoure usage for xxxx #if 0 m_usage = new UseInfo( "usage", RUSAGE_SELF ); *m_usage += UseInfo( "usage", RUSAGE_CHILDREN ); #else // getrusage( RUSAGE_SELF, (struct rusage*) &m_usage ); m_usage = new UseInfo( "usage", RUSAGE_SELF ); #endif std::string record( "\r\n" ); if ( m_xmlns.size() ) record += this->toXML( 0, m_xmlns.c_str() ); else record += this->toXML( 0, 0 ); result = writen( logfd, record.c_str(), record.size(), 3 ); // FIXME: what about wsize != result // synchronize trick for Globus and gatekeepers... nfs_sync( logfd ); // done m_isPrinted = 1; close(logfd); } return result; } inline bool isDir( const char* tmp ) // purpose: Check that the given dir exists and is writable for us // paramtr: tmp (IN): designates a directory location // returns: true, if tmp exists, isa dir, and writable { struct stat st; if ( stat( tmp, &st ) == 0 && S_ISDIR(st.st_mode) ) { // exists and isa directory if ( (geteuid() != st.st_uid || (st.st_mode & S_IWUSR) == 0) && (getegid() != st.st_gid || (st.st_mode & S_IWGRP) == 0) && ((st.st_mode & S_IWOTH) == 0) ) { // not writable to us return false; } else { // yes, writable dir for us return true; } } else { // location does not exist, or is not a directory return false; } } const char* AppInfo::getTempDir( void ) // purpose: determine a suitable directory for temporary files. // warning: remote schedulers may chose to set a different TMP.. // returns: a string with a temporary directory, may still be NULL. // warning: result is _not_ allocated, so don't free the string { const char* tempdir = getenv("GRIDSTART_TMP"); if ( tempdir != NULL && isDir(tempdir) ) return tempdir; tempdir = getenv("TMP"); if ( tempdir != NULL && isDir(tempdir) ) return tempdir; tempdir = getenv("TEMP"); if ( tempdir != NULL && isDir(tempdir) ) return tempdir; tempdir = getenv("TMPDIR"); if ( tempdir != NULL && isDir(tempdir) ) return tempdir; #ifdef P_tmpdir /* in stdio.h */ tempdir = P_tmpdir; if ( tempdir != NULL && isDir(tempdir) ) return tempdir; #endif tempdir = "/tmp"; if ( isDir(tempdir) ) return tempdir; tempdir = "/var/tmp"; if ( isDir(tempdir) ) return tempdir; /* whatever we have by now is it - may still be NULL */ return tempdir; } ssize_t AppInfo::writen( int fd, const char* buffer, ssize_t n, unsigned restart ) // purpose: write all n bytes in buffer, if possible at all // paramtr: fd (IN): filedescriptor open for writing // buffer (IN): bytes to write (must be at least n byte long) // n (IN): number of bytes to write // restart (IN): if true, try to restart write at max that often // returns: n, if everything was written, or // [0..n-1], if some bytes were written, but then failed, // < 0, if some error occurred. { ssize_t start = 0; while ( start < n ) { ssize_t size = write( fd, buffer+start, n-start ); if ( size < 0 ) { if ( restart && errno == EINTR ) { restart--; continue; } return size; } else { start += size; } } return n; } size_t AppInfo::run( int& status ) // purpose: run all runnable jobs. This constitutes a logical chaining of // pre && main && post ; cleanup // paramtr: status (IO): first "failed" return code. Must be 0 to come in // returns: the number of jobs run { size_t result(0); // run setup - this loop may be by-passed in the absence of setup jobs. // In the presence of setup jobs, the loop will always be executed for ( JobInfoList::iterator i=m_setup.begin(); i!=m_setup.end(); ++i ) { (*i)->system(this); result++; } // run prejobs - this loop may be by-passed in the absence of prejobs if ( status == 0 ) { for ( JobInfoList::iterator i=m_prejob.begin(); i!=m_prejob.end(); ++i ) { if ( (status = (*i)->system(this)) != 0 ) break; result++; } } // run main job - unless there were previous errors if ( status == 0 ) { status = m_application->system(this); result++; } // run postjobs - this loop may be by-passed in the presence of errors, // or absence of postjobs if ( status == 0 ) { for ( JobInfoList::iterator i=m_postjob.begin(); i!=m_postjob.end(); ++i ) { if ( (status = (*i)->system(this)) != 0 ) break; result++; } } // run cleanup - this loop may be by-passed in the absence of cleanups // In the presence of cleanup jobs, the loop will always be executed for ( JobInfoList::iterator i=m_cleanup.begin(); i!=m_cleanup.end(); ++i ) { (*i)->system(this); result++; } return result; } int AppInfo::runStageIn() // purpose: sets or replaces a stage-in job // paramtr: job (IN): newly allocated job information record // warning: ownership of job will pass here { int result(0); if ( m_stagein ) { m_stagein->createTempfile( "stage-in", m_input_lfn_sfn, m_input_sfn_tfn ); result = m_stagein->system(this); // note: filehandle will be kept open... } return result; } int AppInfo::runStageOut() // purpose: run the stage-out job, if one exists. // returns: Return code from running the stage-out job. // warning: No existing job will also result in status of 0. { int result(0); if ( m_stageout ) { m_stageout->createTempfile( "stage-out", m_output_lfn_sfn, m_output_sfn_tfn ); result = m_stageout->system(this); // note: filehandle will be kept open... } return result; } pegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/0000755000175000017500000000000011757531667017132 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/kickstart.ps0000644000175000017500000414011011757531137021466 0ustar ryngerynge%!PS-Adobe-2.0 %%Creator: dvips(k) 5.86 Copyright 1999 Radical Eye Software %%Title: kickstart.dvi %%Pages: 18 %%PageOrder: Ascend %%BoundingBox: 0 0 612 792 %%DocumentFonts: Helvetica-Bold Times-Roman Times-Bold Courier Helvetica %%+ Courier-Bold Times-Italic %%EndComments %DVIPSWebPage: (www.radicaleye.com) %DVIPSCommandLine: dvips kickstart -o %DVIPSParameters: dpi=600, compressed %DVIPSSource: TeX output 2005.11.09:1546 %%BeginProcSet: texc.pro %! /TeXDict 300 dict def TeXDict begin/N{def}def/B{bind def}N/S{exch}N/X{S N}B/A{dup}B/TR{translate}N/isls false N/vsize 11 72 mul N/hsize 8.5 72 mul N/landplus90{false}def/@rigin{isls{[0 landplus90{1 -1}{-1 1}ifelse 0 0 0]concat}if 72 Resolution div 72 VResolution div neg scale isls{ landplus90{VResolution 72 div vsize mul 0 exch}{Resolution -72 div hsize mul 0}ifelse TR}if Resolution VResolution vsize -72 div 1 add mul TR[ matrix currentmatrix{A A round sub abs 0.00001 lt{round}if}forall round exch round exch]setmatrix}N/@landscape{/isls true N}B/@manualfeed{ statusdict/manualfeed true put}B/@copies{/#copies X}B/FMat[1 0 0 -1 0 0] N/FBB[0 0 0 0]N/nn 0 N/IEn 0 N/ctr 0 N/df-tail{/nn 8 dict N nn begin /FontType 3 N/FontMatrix fntrx N/FontBBox FBB N string/base X array /BitMaps X/BuildChar{CharBuilder}N/Encoding IEn N end A{/foo setfont}2 array copy cvx N load 0 nn put/ctr 0 N[}B/sf 0 N/df{/sf 1 N/fntrx FMat N df-tail}B/dfs{div/sf X/fntrx[sf 0 0 sf neg 0 0]N df-tail}B/E{pop nn A definefont setfont}B/Cw{Cd A length 5 sub get}B/Ch{Cd A length 4 sub get }B/Cx{128 Cd A length 3 sub get sub}B/Cy{Cd A length 2 sub get 127 sub} B/Cdx{Cd A length 1 sub get}B/Ci{Cd A type/stringtype ne{ctr get/ctr ctr 1 add N}if}B/id 0 N/rw 0 N/rc 0 N/gp 0 N/cp 0 N/G 0 N/CharBuilder{save 3 1 roll S A/base get 2 index get S/BitMaps get S get/Cd X pop/ctr 0 N Cdx 0 Cx Cy Ch sub Cx Cw add Cy setcachedevice Cw Ch true[1 0 0 -1 -.1 Cx sub Cy .1 sub]/id Ci N/rw Cw 7 add 8 idiv string N/rc 0 N/gp 0 N/cp 0 N{ rc 0 ne{rc 1 sub/rc X rw}{G}ifelse}imagemask restore}B/G{{id gp get/gp gp 1 add N A 18 mod S 18 idiv pl S get exec}loop}B/adv{cp add/cp X}B /chg{rw cp id gp 4 index getinterval putinterval A gp add/gp X adv}B/nd{ /cp 0 N rw exit}B/lsh{rw cp 2 copy get A 0 eq{pop 1}{A 255 eq{pop 254}{ A A add 255 and S 1 and or}ifelse}ifelse put 1 adv}B/rsh{rw cp 2 copy get A 0 eq{pop 128}{A 255 eq{pop 127}{A 2 idiv S 128 and or}ifelse} ifelse put 1 adv}B/clr{rw cp 2 index string putinterval adv}B/set{rw cp fillstr 0 4 index getinterval putinterval adv}B/fillstr 18 string 0 1 17 {2 copy 255 put pop}for N/pl[{adv 1 chg}{adv 1 chg nd}{1 add chg}{1 add chg nd}{adv lsh}{adv lsh nd}{adv rsh}{adv rsh nd}{1 add adv}{/rc X nd}{ 1 add set}{1 add clr}{adv 2 chg}{adv 2 chg nd}{pop nd}]A{bind pop} forall N/D{/cc X A type/stringtype ne{]}if nn/base get cc ctr put nn /BitMaps get S ctr S sf 1 ne{A A length 1 sub A 2 index S get sf div put }if put/ctr ctr 1 add N}B/I{cc 1 add D}B/bop{userdict/bop-hook known{ bop-hook}if/SI save N @rigin 0 0 moveto/V matrix currentmatrix A 1 get A mul exch 0 get A mul add .99 lt{/QV}{/RV}ifelse load def pop pop}N/eop{ SI restore userdict/eop-hook known{eop-hook}if showpage}N/@start{ userdict/start-hook known{start-hook}if pop/VResolution X/Resolution X 1000 div/DVImag X/IEn 256 array N 2 string 0 1 255{IEn S A 360 add 36 4 index cvrs cvn put}for pop 65781.76 div/vsize X 65781.76 div/hsize X}N /p{show}N/RMat[1 0 0 -1 0 0]N/BDot 260 string N/Rx 0 N/Ry 0 N/V{}B/RV/v{ /Ry X/Rx X V}B statusdict begin/product where{pop false[(Display)(NeXT) (LaserWriter 16/600)]{A length product length le{A length product exch 0 exch getinterval eq{pop true exit}if}{pop}ifelse}forall}{false}ifelse end{{gsave TR -.1 .1 TR 1 1 scale Rx Ry false RMat{BDot}imagemask grestore}}{{gsave TR -.1 .1 TR Rx Ry scale 1 1 false RMat{BDot} imagemask grestore}}ifelse B/QV{gsave newpath transform round exch round exch itransform moveto Rx 0 rlineto 0 Ry neg rlineto Rx neg 0 rlineto fill grestore}B/a{moveto}B/delta 0 N/tail{A/delta X 0 rmoveto}B/M{S p delta add tail}B/b{S p tail}B/c{-4 M}B/d{-3 M}B/e{-2 M}B/f{-1 M}B/g{0 M} B/h{1 M}B/i{2 M}B/j{3 M}B/k{4 M}B/w{0 rmoveto}B/l{p -4 w}B/m{p -3 w}B/n{ p -2 w}B/o{p -1 w}B/q{p 1 w}B/r{p 2 w}B/s{p 3 w}B/t{p 4 w}B/x{0 S rmoveto}B/y{3 2 roll p a}B/bos{/SS save N}B/eos{SS restore}B end %%EndProcSet %%BeginProcSet: 8r.enc % @@psencodingfile@{ % author = "S. Rahtz, P. MacKay, Alan Jeffrey, B. Horn, K. Berry", % version = "0.6", % date = "22 June 1996", % filename = "8r.enc", % email = "kb@@mail.tug.org", % address = "135 Center Hill Rd. // Plymouth, MA 02360", % codetable = "ISO/ASCII", % checksum = "119 662 4424", % docstring = "Encoding for TrueType or Type 1 fonts to be used with TeX." % @} % % Idea is to have all the characters normally included in Type 1 fonts % available for typesetting. This is effectively the characters in Adobe % Standard Encoding + ISO Latin 1 + extra characters from Lucida. % % Character code assignments were made as follows: % % (1) the Windows ANSI characters are almost all in their Windows ANSI % positions, because some Windows users cannot easily reencode the % fonts, and it makes no difference on other systems. The only Windows % ANSI characters not available are those that make no sense for % typesetting -- rubout (127 decimal), nobreakspace (160), softhyphen % (173). quotesingle and grave are moved just because it's such an % irritation not having them in TeX positions. % % (2) Remaining characters are assigned arbitrarily to the lower part % of the range, avoiding 0, 10 and 13 in case we meet dumb software. % % (3) Y&Y Lucida Bright includes some extra text characters; in the % hopes that other PostScript fonts, perhaps created for public % consumption, will include them, they are included starting at 0x12. % % (4) Remaining positions left undefined are for use in (hopefully) % upward-compatible revisions, if someday more characters are generally % available. % % (5) hyphen appears twice for compatibility with both ASCII and Windows. % /TeXBase1Encoding [ % 0x00 (encoded characters from Adobe Standard not in Windows 3.1) /.notdef /dotaccent /fi /fl /fraction /hungarumlaut /Lslash /lslash /ogonek /ring /.notdef /breve /minus /.notdef % These are the only two remaining unencoded characters, so may as % well include them. /Zcaron /zcaron % 0x10 /caron /dotlessi % (unusual TeX characters available in, e.g., Lucida Bright) /dotlessj /ff /ffi /ffl /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef % very contentious; it's so painful not having quoteleft and quoteright % at 96 and 145 that we move the things normally found there down to here. /grave /quotesingle % 0x20 (ASCII begins) /space /exclam /quotedbl /numbersign /dollar /percent /ampersand /quoteright /parenleft /parenright /asterisk /plus /comma /hyphen /period /slash % 0x30 /zero /one /two /three /four /five /six /seven /eight /nine /colon /semicolon /less /equal /greater /question % 0x40 /at /A /B /C /D /E /F /G /H /I /J /K /L /M /N /O % 0x50 /P /Q /R /S /T /U /V /W /X /Y /Z /bracketleft /backslash /bracketright /asciicircum /underscore % 0x60 /quoteleft /a /b /c /d /e /f /g /h /i /j /k /l /m /n /o % 0x70 /p /q /r /s /t /u /v /w /x /y /z /braceleft /bar /braceright /asciitilde /.notdef % rubout; ASCII ends % 0x80 /.notdef /.notdef /quotesinglbase /florin /quotedblbase /ellipsis /dagger /daggerdbl /circumflex /perthousand /Scaron /guilsinglleft /OE /.notdef /.notdef /.notdef % 0x90 /.notdef /.notdef /.notdef /quotedblleft /quotedblright /bullet /endash /emdash /tilde /trademark /scaron /guilsinglright /oe /.notdef /.notdef /Ydieresis % 0xA0 /.notdef % nobreakspace /exclamdown /cent /sterling /currency /yen /brokenbar /section /dieresis /copyright /ordfeminine /guillemotleft /logicalnot /hyphen % Y&Y (also at 45); Windows' softhyphen /registered /macron % 0xD0 /degree /plusminus /twosuperior /threesuperior /acute /mu /paragraph /periodcentered /cedilla /onesuperior /ordmasculine /guillemotright /onequarter /onehalf /threequarters /questiondown % 0xC0 /Agrave /Aacute /Acircumflex /Atilde /Adieresis /Aring /AE /Ccedilla /Egrave /Eacute /Ecircumflex /Edieresis /Igrave /Iacute /Icircumflex /Idieresis % 0xD0 /Eth /Ntilde /Ograve /Oacute /Ocircumflex /Otilde /Odieresis /multiply /Oslash /Ugrave /Uacute /Ucircumflex /Udieresis /Yacute /Thorn /germandbls % 0xE0 /agrave /aacute /acircumflex /atilde /adieresis /aring /ae /ccedilla /egrave /eacute /ecircumflex /edieresis /igrave /iacute /icircumflex /idieresis % 0xF0 /eth /ntilde /ograve /oacute /ocircumflex /otilde /odieresis /divide /oslash /ugrave /uacute /ucircumflex /udieresis /yacute /thorn /ydieresis ] def %%EndProcSet %%BeginProcSet: texps.pro %! TeXDict begin/rf{findfont dup length 1 add dict begin{1 index/FID ne 2 index/UniqueID ne and{def}{pop pop}ifelse}forall[1 index 0 6 -1 roll exec 0 exch 5 -1 roll VResolution Resolution div mul neg 0 0]/Metrics exch def dict begin Encoding{exch dup type/integertype ne{pop pop 1 sub dup 0 le{pop}{[}ifelse}{FontMatrix 0 get div Metrics 0 get div def} ifelse}forall Metrics/Metrics currentdict end def[2 index currentdict end definefont 3 -1 roll makefont/setfont cvx]cvx def}def/ObliqueSlant{ dup sin S cos div neg}B/SlantFont{4 index mul add}def/ExtendFont{3 -1 roll mul exch}def/ReEncodeFont{CharStrings rcheck{/Encoding false def dup[exch{dup CharStrings exch known not{pop/.notdef/Encoding true def} if}forall Encoding{]exch pop}{cleartomark}ifelse}if/Encoding exch def} def end %%EndProcSet %%BeginProcSet: special.pro %! TeXDict begin/SDict 200 dict N SDict begin/@SpecialDefaults{/hs 612 N /vs 792 N/ho 0 N/vo 0 N/hsc 1 N/vsc 1 N/ang 0 N/CLIP 0 N/rwiSeen false N /rhiSeen false N/letter{}N/note{}N/a4{}N/legal{}N}B/@scaleunit 100 N /@hscale{@scaleunit div/hsc X}B/@vscale{@scaleunit div/vsc X}B/@hsize{ /hs X/CLIP 1 N}B/@vsize{/vs X/CLIP 1 N}B/@clip{/CLIP 2 N}B/@hoffset{/ho X}B/@voffset{/vo X}B/@angle{/ang X}B/@rwi{10 div/rwi X/rwiSeen true N}B /@rhi{10 div/rhi X/rhiSeen true N}B/@llx{/llx X}B/@lly{/lly X}B/@urx{ /urx X}B/@ury{/ury X}B/magscale true def end/@MacSetUp{userdict/md known {userdict/md get type/dicttype eq{userdict begin md length 10 add md maxlength ge{/md md dup length 20 add dict copy def}if end md begin /letter{}N/note{}N/legal{}N/od{txpose 1 0 mtx defaultmatrix dtransform S atan/pa X newpath clippath mark{transform{itransform moveto}}{transform{ itransform lineto}}{6 -2 roll transform 6 -2 roll transform 6 -2 roll transform{itransform 6 2 roll itransform 6 2 roll itransform 6 2 roll curveto}}{{closepath}}pathforall newpath counttomark array astore/gc xdf pop ct 39 0 put 10 fz 0 fs 2 F/|______Courier fnt invertflag{PaintBlack} if}N/txpose{pxs pys scale ppr aload pop por{noflips{pop S neg S TR pop 1 -1 scale}if xflip yflip and{pop S neg S TR 180 rotate 1 -1 scale ppr 3 get ppr 1 get neg sub neg ppr 2 get ppr 0 get neg sub neg TR}if xflip yflip not and{pop S neg S TR pop 180 rotate ppr 3 get ppr 1 get neg sub neg 0 TR}if yflip xflip not and{ppr 1 get neg ppr 0 get neg TR}if}{ noflips{TR pop pop 270 rotate 1 -1 scale}if xflip yflip and{TR pop pop 90 rotate 1 -1 scale ppr 3 get ppr 1 get neg sub neg ppr 2 get ppr 0 get neg sub neg TR}if xflip yflip not and{TR pop pop 90 rotate ppr 3 get ppr 1 get neg sub neg 0 TR}if yflip xflip not and{TR pop pop 270 rotate ppr 2 get ppr 0 get neg sub neg 0 S TR}if}ifelse scaleby96{ppr aload pop 4 -1 roll add 2 div 3 1 roll add 2 div 2 copy TR .96 dup scale neg S neg S TR}if}N/cp{pop pop showpage pm restore}N end}if}if}N/normalscale{ Resolution 72 div VResolution 72 div neg scale magscale{DVImag dup scale }if 0 setgray}N/psfts{S 65781.76 div N}N/startTexFig{/psf$SavedState save N userdict maxlength dict begin/magscale true def normalscale currentpoint TR/psf$ury psfts/psf$urx psfts/psf$lly psfts/psf$llx psfts /psf$y psfts/psf$x psfts currentpoint/psf$cy X/psf$cx X/psf$sx psf$x psf$urx psf$llx sub div N/psf$sy psf$y psf$ury psf$lly sub div N psf$sx psf$sy scale psf$cx psf$sx div psf$llx sub psf$cy psf$sy div psf$ury sub TR/showpage{}N/erasepage{}N/copypage{}N/p 3 def @MacSetUp}N/doclip{ psf$llx psf$lly psf$urx psf$ury currentpoint 6 2 roll newpath 4 copy 4 2 roll moveto 6 -1 roll S lineto S lineto S lineto closepath clip newpath moveto}N/endTexFig{end psf$SavedState restore}N/@beginspecial{SDict begin/SpecialSave save N gsave normalscale currentpoint TR @SpecialDefaults count/ocount X/dcount countdictstack N}N/@setspecial{ CLIP 1 eq{newpath 0 0 moveto hs 0 rlineto 0 vs rlineto hs neg 0 rlineto closepath clip}if ho vo TR hsc vsc scale ang rotate rwiSeen{rwi urx llx sub div rhiSeen{rhi ury lly sub div}{dup}ifelse scale llx neg lly neg TR }{rhiSeen{rhi ury lly sub div dup scale llx neg lly neg TR}if}ifelse CLIP 2 eq{newpath llx lly moveto urx lly lineto urx ury lineto llx ury lineto closepath clip}if/showpage{}N/erasepage{}N/copypage{}N newpath}N /@endspecial{count ocount sub{pop}repeat countdictstack dcount sub{end} repeat grestore SpecialSave restore end}N/@defspecial{SDict begin}N /@fedspecial{end}B/li{lineto}B/rl{rlineto}B/rc{rcurveto}B/np{/SaveX currentpoint/SaveY X N 1 setlinecap newpath}N/st{stroke SaveX SaveY moveto}N/fil{fill SaveX SaveY moveto}N/ellipse{/endangle X/startangle X /yrad X/xrad X/savematrix matrix currentmatrix N TR xrad yrad scale 0 0 1 startangle endangle arc savematrix setmatrix}N end %%EndProcSet TeXDict begin 40258437 52099151 1000 600 600 (kickstart.dvi) @start %DVIPSBitmapFont: Fa cmmi10 10.95 2 /Fa 2 115 df<01F8EB0FF0D803FEEB3FFC3A078F80F03E3A0F0F83C01F3B0E07C7800F 80001CEBCF0002FE80003C5B00385B495A127800705BA200F049131F011F5D00005BA216 3F013F92C7FC91C7FC5E167E5B017E14FE5EA201FE0101EB03804914F8A2030313070001 03F013005B170E16E000035E49153C17385F0007913801F1E0496DB45AD801C0023FC7FC 31297EA737>110 D114 D E %EndDVIPSBitmapFont %DVIPSBitmapFont: Fb cmsy10 10 1 /Fb 1 111 df<126012F07EA21278127CA2123C123EA2121E121FA27E7FA212077FA212 037FA212017FA212007FA21378137CA2133C133EA2131E131FA27F80A2130780A26D7EA2 130180A2130080A21478147CA2143C143EA2141E141FA2801580A2140715C0A2140315E0 A2140115F0A2140015F8A21578157CA2153C153EA2151E150C1F537BBD2A>110 D E %EndDVIPSBitmapFont /Fc 137[42 46 28 32 37 1[46 42 46 1[23 1[28 23 1[42 1[37 2[46 42 97[{TeXBase1Encoding ReEncodeFont}15 83.022 /Times-Bold rf /Fd 137[55 1[33 55 39 1[61 61 61 89 28 55 1[28 61 61 1[55 61 55 61 55 12[61 66 72 1[66 78 4[55 28 2[61 66 72 72 13[55 55 55 55 55 2[28 33 42[61 2[{ TeXBase1Encoding ReEncodeFont}37 99.6264 /Helvetica-Bold rf %DVIPSBitmapFont: Fe cmsy10 10.95 2 /Fe 2 111 df15 D<126012F07EA21278127CA2123C123EA2121E121FA27E7FA212077FA212037FA212017F A212007FA21378137CA27FA2131E131FA27F80A2130780A2130380A2130180A2130080A2 1478147CA2143C143EA2141E141FA26E7EA2140781A2140381A2140181A2140081A21578 157CA2153C153EA2151E151FA2811680A2150716C0A21503ED0180225B7BC32D>110 D E %EndDVIPSBitmapFont /Ff 138[45 25 35 30 2[45 45 4[25 3[40 45 100[{ .167 SlantFont TeXBase1Encoding ReEncodeFont}9 90.9091 /Times-Roman rf /Fg 134[40 3[45 25 35 35 1[45 45 45 66 25 1[25 25 45 1[25 40 45 40 45 45 97[{TeXBase1Encoding ReEncodeFont}19 90.9091 /Times-Italic rf /Fh 68[50 15[50 22[50 22[50 1[50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 50 1[50 1[50 50 50 2[50 1[50 50 50 50 50 1[50 50 50 50 50 50 1[50 50 50 50 50 50 50 50 50 2[50 50 50 1[50 1[50 1[50 50 50 50 50 50 50 50 50 50 1[50 50 2[50 1[50 50 50 50 50 33[{TeXBase1Encoding ReEncodeFont} 79 83.022 /Courier-Bold rf /Fi 68[42 15[42 45[40 1[40 1[42 42 60 42 42 23 32 28 42 42 42 42 65 23 42 23 23 42 42 28 37 42 37 42 37 1[42 6[60 78 1[60 51 46 1[60 1[60 60 74 51 2[28 1[60 46 51 60 2[60 6[23 6[42 3[23 21 28 21 4[28 2[42 1[34 31[46 2[{TeXBase1Encoding ReEncodeFont}56 83.022 /Times-Roman rf %DVIPSBitmapFont: Fj manfnt 10 1 /Fj 1 128 df127 D E %EndDVIPSBitmapFont /Fk 134[51 51 71 51 56 30 51 35 1[56 56 56 81 25 51 1[25 56 56 30 51 56 51 56 51 8[61 3[56 61 66 1[61 71 1[76 2[51 25 66 71 56 61 66 66 66 66 9[51 1[51 51 51 51 51 2[25 30 42[56 2[{TeXBase1Encoding ReEncodeFont}49 90.9091 /Helvetica-Bold rf /Fl 136[93 66 73 40 66 47 2[73 73 106 33 2[33 1[73 1[66 73 66 1[66 14[86 1[80 93 7[93 73 80 1[86 13[66 66 66 66 66 46[73 2[{TeXBase1Encoding ReEncodeFont}29 119.552 /Helvetica-Bold rf /Fm 136[66 45 51 25 45 30 2[51 51 76 20 2[20 1[51 1[51 51 45 1[51 14[66 1[61 71 7[71 56 61 1[66 9[51 51 51 51 51 51 51 51 51 51 45[45 2[{TeXBase1Encoding ReEncodeFont}34 90.9091 /Helvetica rf /Fn 130[55 1[55 2[55 1[55 1[55 55 55 3[55 1[55 3[55 2[55 5[55 6[55 3[55 55 55 3[55 1[55 2[55 55 55 1[55 55 55 1[55 26[55 55 37[{TeXBase1Encoding ReEncodeFont}27 90.9091 /Courier rf /Fo 9[45 58[45 15[45 20[45 24[44 1[44 40 45 45 66 45 45 25 35 30 45 45 45 45 71 25 45 25 25 45 45 30 40 45 40 45 40 3[30 1[30 1[66 66 86 66 66 56 51 61 1[51 66 66 81 56 66 35 30 66 66 51 56 66 61 61 66 1[40 4[25 45 45 45 45 45 45 45 45 45 45 25 23 30 23 2[30 30 30 2[45 45 37 30[51 51 2[{TeXBase1Encoding ReEncodeFont} 82 90.9091 /Times-Roman rf /Fp 134[45 3[51 30 35 40 1[51 45 51 76 25 2[25 51 45 30 40 51 40 1[45 14[66 4[86 3[35 4[66 2[66 62[51 2[{TeXBase1Encoding ReEncodeFont}24 90.9091 /Times-Bold rf /Fq 9[60 130[47 40 3[60 1[33 60 5[53 1[53 12[86 2[66 8[47 19[60 1[60 1[60 1[60 33 30 40 45[{ TeXBase1Encoding ReEncodeFont}18 119.552 /Times-Roman rf /Fr 139[57 96 67 6[96 1[48 5[96 1[96 10[115 10[124 24[96 50[{TeXBase1Encoding ReEncodeFont}10 172.188 /Helvetica-Bold rf end %%EndProlog %%BeginSetup %%Feature: *Resolution 600dpi TeXDict begin %%EndSetup %%Page: 1 1 1 0 bop 1548 741 a Fr(Kic)m(kstar)s(t)48 b(V2)1667 1072 y Fq(Jens-S.)29 b(V\366ckler)1773 1358 y(06/06/2004)p 533 1652 3025 4 v 531 1765 4 113 v 583 1731 a Fp(A)-5 b(uthor)p 1104 1765 V 294 w(Date)p 1567 1765 V 282 w(Modi\002cation)p 3556 1765 V 533 1768 3025 4 v 533 1785 V 531 1898 4 113 v 583 1864 a Fo(Jens)25 b(V\366ckler)p 1104 1898 V 101 w(20040204)p 1567 1898 V 103 w(initial)g(document)p 3556 1898 V 533 1901 3025 4 v 531 2014 4 113 v 583 1980 a(Jens)g(V\366ckler) p 1104 2014 V 101 w(20040211)p 1567 2014 V 103 w(e)o(xtensions)p 3556 2014 V 533 2017 3025 4 v 531 2130 4 113 v 583 2096 a(Jens)g(V\366ckler)p 1104 2130 V 101 w(20040607)p 1567 2130 V 103 w(added)g Fn(here)c Fo(to)i(stdin,)i(and)f(setup)g(jobs)p 3556 2130 V 533 2133 3025 4 v eop %%Page: 2 2 2 1 bop 37 57 a Fm(Contents)3601 b(2)p 37 94 2009 4 v 2046 94 V 37 352 a Fl(Contents)37 559 y Fk(1)86 b(Over)q(vie)o(w)3423 b(3)37 763 y(2)86 b(Pr)n(ocessing)22 b(in)j(a)g(Grid)g(En)l(vir)n (onment)2370 b(4)37 966 y(3)86 b(Con\002guration)22 b(File)3051 b(4)174 1079 y Fo(3.1)96 b(Identi\002ers)61 b(.)45 b(.)h(.)f(.)g(.)g(.) g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g (.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.) h(.)f(.)g(.)g(.)175 b(5)174 1192 y(3.2)96 b(Strings)35 b(.)45 b(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.) g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g (.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)175 b(5)174 1305 y(3.3)96 b(F)o(ormat)30 b(.)45 b(.)g(.)g(.)h(.)f(.)g(.)g (.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.) g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g (.)h(.)f(.)g(.)g(.)175 b(7)37 1509 y Fk(4)86 b(Commands)3331 b(7)174 1622 y Fo(4.1)96 b(Other)24 b(commands)78 b(.)45 b(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g (.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.) g(.)h(.)f(.)g(.)g(.)175 b(8)383 1735 y(4.1.1)110 b(Include)71 b(.)45 b(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.) f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g (.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)175 b(8)383 1848 y(4.1.2)110 b(Deb)n(ug)34 b(.)45 b(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g (.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.) g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)175 b(8)383 1961 y(4.1.3)110 b(Xmlns)31 b(.)45 b(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.) g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g (.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)175 b(8)174 2074 y(4.2)96 b(Descriptions)39 b(.)46 b(.)f(.)g(.)g(.)g(.)g(.) h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f (.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.) g(.)g(.)175 b(8)383 2186 y(4.2.1)110 b(Site)63 b(.)46 b(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h (.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.) g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)175 b(8)383 2299 y(4.2.2)110 b(T)m(ransformation)54 b(.)46 b(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g (.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.) f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)175 b(9)383 2412 y(4.2.3)110 b(Deri)n(v)n(ation)27 b(.)45 b(.)g(.)g(.)h(.)f(.)g(.)g(.)g (.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.) h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)175 b(9)383 2525 y(4.2.4)110 b(Input)83 b(.)45 b(.)g(.)g(.)g(.)g(.)h(.)f(.) g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g (.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.) 175 b(9)383 2638 y(4.2.5)110 b(Output)90 b(.)45 b(.)g(.)g(.)g(.)h(.)f (.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.) g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g (.)130 b(10)174 2751 y(4.3)96 b(Processing)26 b(En)l(vironment)88 b(.)45 b(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.) f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f (.)g(.)g(.)130 b(10)383 2864 y(4.3.1)110 b(Set)88 b(.)46 b(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h (.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.) g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(10)383 2977 y(4.3.2)110 b(Chdir)67 b(.)45 b(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g (.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.) g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(11)383 3090 y(4.3.3)110 b(Feedback)63 b(.)45 b(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h (.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.) f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(11)383 3203 y(4.3.4)110 b(Stdin)82 b(.)45 b(.)g(.)g(.)g(.)g(.)h(.)f (.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.) g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g (.)130 b(12)383 3316 y(4.3.5)110 b(Stdout)24 b(and)g(Stderr)47 b(.)e(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f (.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.) g(.)g(.)130 b(12)174 3428 y(4.4)96 b(Job)24 b(commands)91 b(.)45 b(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.) f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g (.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(13)383 3541 y(4.4.1)110 b(Setup)24 b(Jobs)87 b(.)45 b(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g (.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.) g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(14)383 3654 y(4.4.2)110 b(Pre)23 b(Jobs)37 b(.)45 b(.)g(.)g(.)g(.)h(.)f(.)g(.) g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g (.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(14)383 3767 y(4.4.3)110 b(Main)23 b(Job)70 b(.)45 b(.)g(.)g(.)h(.)f (.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.) g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g (.)130 b(14)383 3880 y(4.4.4)110 b(Post)23 b(Jobs)70 b(.)45 b(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.) g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g (.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(15)383 3993 y(4.4.5)110 b(Cleanup)25 b(Jobs)59 b(.)45 b(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g (.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.) h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(15)174 4106 y(4.5)96 b(Optional)25 b(2nd-le)n(v)o(el)g(Staging)62 b(.)45 b(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.) g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g (.)g(.)130 b(15)383 4219 y(4.5.1)110 b(Stagein)70 b(.)45 b(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g (.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.) g(.)g(.)h(.)f(.)g(.)g(.)130 b(16)383 4332 y(4.5.2)110 b(Stageout)93 b(.)45 b(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g (.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.) h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(16)37 4536 y Fk(5)86 b(Results)3449 b(17)174 4648 y Fo(5.1)96 b(The)23 b(Pro)o(v)o(enance)i(T)m(racking)g(Record)j(.)46 b(.)f(.)g(.)g(.)g(.)g (.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.) f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(17)174 4761 y(5.2)96 b(The)23 b(Feedback)i(Channel)68 b(.)46 b(.)f(.)g(.)g(.)g(.)h (.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.) f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(17)383 4874 y(5.2.1)110 b(Exponentially)27 b(Back)o(ed-of)n(f)e (Heartbeat)51 b(.)46 b(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f (.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)130 b(17)383 4987 y(5.2.2)110 b(Gridshell-a)o(w)o(are)26 b(Applications)92 b(.)45 b(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g (.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.)g(.)g(.)h(.)f(.)g(.)g(.) 130 b(18)-375 5292 y Fj(\177)156 5359 y Fi(Dif)n(\002cult)18 b(sections)h(which)f(are)h(not)g(important)e(for)h(the)h(casual)g (users)g(are)g(e)o(xpressed)e(with)i(a)g(dangerous)e(bend)h(sign)g(in)h (the)g(mar)o(gin.)p eop %%Page: 3 3 3 2 bop 37 57 a Fm(1)45 b(Ov)n(er)s(vie)n(w)3492 b(3)p 37 94 2009 4 v 2046 94 V 37 352 a Fl(1)61 b(Over)q(vie)n(w)37 601 y Fo(Kickstart)23 b(is)d(a)g(grid)i(shell.)28 b(A)20 b(grid)h(shell)h(is)e(run)h(at)g(the)g(remote)g(e)o(x)o(ecution)i (host,)f(and)f(lik)o(e)g(a)f(shell,)i(starts)g(and)f(w)o(atches)h(o)o (v)o(er)37 713 y(an)28 b(application.)42 b(Figures)28 b(1)f(and)h(2)e(compare)j(the)e(absence)i(and)f(presence)h(of)e(a)g (grid)g(shell)h(in)f(the)h(remote)g(job)f(e)o(x)o(ecution)37 826 y(chain.)808 2087 y @beginspecial 26 @llx 636 @lly 323 @urx 776 @ury 2970 @rwi @setspecial %%BeginDocument: without.eps %!PS-Adobe-2.0 EPSF-1.2 %%BoundingBox: 26 636 323 776 %%Title: without %%CreationDate: Wed May 21 15:53:50 2003 %%Creator: Tgif-4.1.41 written by William Chia-Wei Cheng (bill.cheng@acm.org) %%ProducedBy: (unknown) %%Pages: 1 %%DocumentFonts: (atend) %%EndComments %%BeginProlog /tgifdict 58 dict def tgifdict begin /tgifellipsedict 6 dict def tgifellipsedict /mtrx matrix put /TGEL % tgifellipse { tgifellipsedict begin /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 0 360 arc savematrix setmatrix end } def /tgifarrowtipdict 8 dict def tgifarrowtipdict /mtrx matrix put /TGAT % tgifarrowtip { tgifarrowtipdict begin /dy exch def /dx exch def /h exch def /w exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate dy dx atan rotate 0 0 moveto w neg h lineto w neg h neg lineto savematrix setmatrix end } def /tgifarcdict 8 dict def tgifarcdict /mtrx matrix put /TGAN % tgifarcn { tgifarcdict begin /endangle exch def /startangle exch def /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 startangle endangle arc savematrix setmatrix end } def /TGAR % tgifarc { tgifarcdict begin /endangle exch def /startangle exch def /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 startangle endangle arcn savematrix setmatrix end } def /TGMAX { exch dup 3 1 roll exch dup 3 1 roll gt { pop } { exch pop } ifelse } def /TGMIN { exch dup 3 1 roll exch dup 3 1 roll lt { pop } { exch pop } ifelse } def /TGSW { stringwidth pop } def /bd { bind def } bind def /GS { gsave } bd /GR { grestore } bd /NP { newpath } bd /CP { closepath } bd /CHP { charpath } bd /CT { curveto } bd /L { lineto } bd /RL { rlineto } bd /M { moveto } bd /RM { rmoveto } bd /S { stroke } bd /F { fill } bd /TR { translate } bd /RO { rotate } bd /SC { scale } bd /MU { mul } bd /DI { div } bd /DU { dup } bd /NE { neg } bd /AD { add } bd /SU { sub } bd /PO { pop } bd /EX { exch } bd /CO { concat } bd /CL { clip } bd /EC { eoclip } bd /EF { eofill } bd /IM { image } bd /IMM { imagemask } bd /ARY { array } bd /SG { setgray } bd /RG { setrgbcolor } bd /SD { setdash } bd /W { setlinewidth } bd /SM { setmiterlimit } bd /SLC { setlinecap } bd /SLJ { setlinejoin } bd /SH { show } bd /FF { findfont } bd /MS { makefont setfont } bd /AR { arcto 4 {pop} repeat } bd /CURP { currentpoint } bd /FLAT { flattenpath strokepath clip newpath } bd /TGSM { tgiforigctm setmatrix } def /TGRM { savematrix setmatrix } def end %%EndProlog %%Page: 1 1 %%PageBoundingBox: 26 636 323 776 tgifdict begin /tgifsavedpage save def 1 SM 1 W 0 SG 72 0 MU 72 11 MU TR 72 128 DI 100.000 MU 100 DI DU NE SC GS /tgiforigctm matrix currentmatrix def % BOX 0 SG GS 10 SM GS NP 50 50 M 160 50 L 160 200 L 50 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 65 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (submit host) SH GR GR % BOX 0 SG GS 10 SM GS NP 275 50 M 425 50 L 425 200 L 275 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 300 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (rem. scheduler) SH GR GR % OVAL 0 SG NP 495 125 5 5 TGEL F GS GS NP 495 125 5 5 TGEL S GR GR % BOX 0 SG GS 10 SM GS NP 495 50 M 570 50 L 570 200 L 495 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 495 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (application) SH GR GR % OVAL 0 SG NP 495 75 5 5 TGEL F GS GS NP 495 75 5 5 TGEL S GR GR % OVAL 0 SG NP 495 175 5 5 TGEL F GS GS NP 495 175 5 5 TGEL S GR GR % OVAL 0 SG NP 425 125 5 5 TGEL F GS GS NP 425 125 5 5 TGEL S GR GR % OVAL 0 SG NP 425 75 5 5 TGEL F GS GS NP 425 75 5 5 TGEL S GR GR % OVAL 0 SG NP 425 175 5 5 TGEL F GS GS NP 425 175 5 5 TGEL S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 425 75 M 0 35 atan DU cos 8.000 MU 460 exch SU exch sin 8.000 MU 75 exch SU L TGSM 1 W S GR GS TGSM NP 460 75 8.000 3.000 35 0 TGAT 1 SG CP F 0 SG NP 460 75 8.000 3.000 35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 495 125 M 0 -35 atan DU cos 8.000 MU 460 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S GR GS TGSM NP 460 125 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 460 125 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 495 175 M 0 -35 atan DU cos 8.000 MU 460 exch SU exch sin 8.000 MU 175 exch SU L TGSM 1 W S GR GS TGSM NP 460 175 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 460 175 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 455 75 M 495 75 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 425 125 M 465 125 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 425 175 M 465 175 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 505 80 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] stdin) SH GR GR % TEXT NP 0 SG GS 1 W 505 130 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] stdout) SH GR GR % TEXT NP 0 SG GS 1 W 505 180 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] stderr) SH GR GR % OVAL 0 SG GS GS NP 337 91 37 16 TGEL S GR GR % ARC 0 SG GS GS NP 337 158 37 16 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 300 91 M 300 158 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 375 91 M 375 158 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 337 138 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (GASS) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (GASS) SH GR 0 15 RM GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (cache) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (cache) SH GR GR % POLY/OPEN-SPLINE 0 SG GS NP 425 75 M 365 135 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 425 125 M 365 135 L 425 175 L TGSM 1 W S GR % OVAL 0 SG GS GS NP 102 95 37 15 TGEL S GR GR % ARC 0 SG GS GS NP 102 156 37 15 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 65 93 M 65 156 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 140 93 M 140 156 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 102 137 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (local) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (local) SH GR 0 15 RM GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (capture) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (capture) SH GR GR % TEXT NP 0 SG GS 1 W 360 240 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] /dev/null) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] /dev/null) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] /dev/null) SH GR GR % TEXT NP 0 SG GS 1 W 360 225 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (defaults) SH GR GR % POLYGON/CLOSED-SPLINE 0 SG NP 220 112 M 220 137 L 257 137 L 257 150 L 300 125 L 257 100 L 257 112 L CP GS 1 SG EF GR GS S GR % TEXT NP 0 SG GS 1 W 242 129 M GS GS 0 0 AD GR 2 DI NE 0 RM GR GR % POLYGON/CLOSED-SPLINE 0 SG NP 220 112 M 220 137 L 183 137 L 183 150 L 140 125 L 183 100 L 183 112 L CP GS 1 SG EF GR GS S GR % TEXT NP 0 SG NP 172 115 M 268 115 L 268 133 L 172 133 L CP 1 SG F 0 SG GS 1 W 220 130 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (Globus GASS) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (Globus GASS) SH GR GR GR tgifsavedpage restore end showpage %%Trailer %MatchingCreationDate: Wed May 21 15:53:50 2003 %%DocumentFonts: Helvetica %%+ Helvetica-Bold %%EOF %%EndDocument @endspecial 1210 2283 a(Figure)e(1:)e(W)l(ithout)i(the)f(presence)i (of)d(a)g(grid)h(shell.)291 3613 y @beginspecial 26 @llx 636 @lly 447 @urx 776 @ury 4210 @rwi @setspecial %%BeginDocument: with.eps %!PS-Adobe-2.0 EPSF-1.2 %%BoundingBox: 26 636 447 776 %%Title: with %%CreationDate: Wed May 21 15:53:39 2003 %%Creator: Tgif-4.1.41 written by William Chia-Wei Cheng (bill.cheng@acm.org) %%ProducedBy: (unknown) %%Pages: 1 %%DocumentFonts: (atend) %%EndComments %%BeginProlog /tgifdict 58 dict def tgifdict begin /tgifellipsedict 6 dict def tgifellipsedict /mtrx matrix put /TGEL % tgifellipse { tgifellipsedict begin /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 0 360 arc savematrix setmatrix end } def /tgifarrowtipdict 8 dict def tgifarrowtipdict /mtrx matrix put /TGAT % tgifarrowtip { tgifarrowtipdict begin /dy exch def /dx exch def /h exch def /w exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate dy dx atan rotate 0 0 moveto w neg h lineto w neg h neg lineto savematrix setmatrix end } def /tgifarcdict 8 dict def tgifarcdict /mtrx matrix put /TGAN % tgifarcn { tgifarcdict begin /endangle exch def /startangle exch def /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 startangle endangle arc savematrix setmatrix end } def /TGAR % tgifarc { tgifarcdict begin /endangle exch def /startangle exch def /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 startangle endangle arcn savematrix setmatrix end } def /TGMAX { exch dup 3 1 roll exch dup 3 1 roll gt { pop } { exch pop } ifelse } def /TGMIN { exch dup 3 1 roll exch dup 3 1 roll lt { pop } { exch pop } ifelse } def /TGSW { stringwidth pop } def /bd { bind def } bind def /GS { gsave } bd /GR { grestore } bd /NP { newpath } bd /CP { closepath } bd /CHP { charpath } bd /CT { curveto } bd /L { lineto } bd /RL { rlineto } bd /M { moveto } bd /RM { rmoveto } bd /S { stroke } bd /F { fill } bd /TR { translate } bd /RO { rotate } bd /SC { scale } bd /MU { mul } bd /DI { div } bd /DU { dup } bd /NE { neg } bd /AD { add } bd /SU { sub } bd /PO { pop } bd /EX { exch } bd /CO { concat } bd /CL { clip } bd /EC { eoclip } bd /EF { eofill } bd /IM { image } bd /IMM { imagemask } bd /ARY { array } bd /SG { setgray } bd /RG { setrgbcolor } bd /SD { setdash } bd /W { setlinewidth } bd /SM { setmiterlimit } bd /SLC { setlinecap } bd /SLJ { setlinejoin } bd /SH { show } bd /FF { findfont } bd /MS { makefont setfont } bd /AR { arcto 4 {pop} repeat } bd /CURP { currentpoint } bd /FLAT { flattenpath strokepath clip newpath } bd /TGSM { tgiforigctm setmatrix } def /TGRM { savematrix setmatrix } def end %%EndProlog %%Page: 1 1 %%PageBoundingBox: 26 636 447 776 tgifdict begin /tgifsavedpage save def 1 SM 1 W 0 SG 72 0 MU 72 11 MU TR 72 128 DI 100.000 MU 100 DI DU NE SC GS /tgiforigctm matrix currentmatrix def % BOX 0 SG GS 10 SM GS NP 275 50 M 425 50 L 425 200 L 275 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 300 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (rem. scheduler) SH GR GR % OVAL 0 SG NP 495 125 5 5 TGEL F GS GS NP 495 125 5 5 TGEL S GR GR % BOX 0 SG GS 10 SM GS NP 495 50 M 645 50 L 645 200 L 495 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 530 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (kickstart) SH GR GR % OVAL 0 SG NP 495 75 5 5 TGEL F GS GS NP 495 75 5 5 TGEL S GR GR % OVAL 0 SG NP 495 175 5 5 TGEL F GS GS NP 495 175 5 5 TGEL S GR GR % OVAL 0 SG NP 425 125 5 5 TGEL F GS GS NP 425 125 5 5 TGEL S GR GR % OVAL 0 SG NP 425 75 5 5 TGEL F GS GS NP 425 75 5 5 TGEL S GR GR % OVAL 0 SG NP 425 175 5 5 TGEL F GS GS NP 425 175 5 5 TGEL S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 425 75 M 0 35 atan DU cos 8.000 MU 460 exch SU exch sin 8.000 MU 75 exch SU L TGSM 1 W S GR GS TGSM NP 460 75 8.000 3.000 35 0 TGAT 1 SG CP F 0 SG NP 460 75 8.000 3.000 35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 495 125 M 0 -35 atan DU cos 8.000 MU 460 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S GR GS TGSM NP 460 125 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 460 125 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 495 175 M 0 -35 atan DU cos 8.000 MU 460 exch SU exch sin 8.000 MU 175 exch SU L TGSM 1 W S GR GS TGSM NP 460 175 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 460 175 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 455 75 M 495 75 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 425 125 M 465 125 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 425 175 M 465 175 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 505 80 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] unused) SH GR GR % TEXT NP 0 SG GS 1 W 505 130 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] IV rec.) SH GR GR % TEXT NP 0 SG GS 1 W 505 180 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] gs err) SH GR GR % OVAL 0 SG GS GS NP 337 91 37 16 TGEL S GR GR % ARC 0 SG GS GS NP 337 158 37 16 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 300 91 M 300 158 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 375 91 M 375 158 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 337 138 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (GASS) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (GASS) SH GR 0 15 RM GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (cache) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (cache) SH GR GR % POLY/OPEN-SPLINE 0 SG GS NP 425 75 M 365 135 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 425 125 M 365 135 L 425 175 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 222 129 M GS GS 0 0 AD GR 2 DI NE 0 RM GR GR % OVAL 0 SG NP 715 125 5 5 TGEL F GS GS NP 715 125 5 5 TGEL S GR GR % BOX 0 SG GS 10 SM GS NP 715 50 M 790 50 L 790 200 L 715 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 715 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (application) SH GR GR % OVAL 0 SG NP 715 75 5 5 TGEL F GS GS NP 715 75 5 5 TGEL S GR GR % OVAL 0 SG NP 715 175 5 5 TGEL F GS GS NP 715 175 5 5 TGEL S GR GR % TEXT NP 0 SG GS 1 W 725 80 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] stdin) SH GR GR % TEXT NP 0 SG GS 1 W 725 130 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] stdout) SH GR GR % TEXT NP 0 SG GS 1 W 725 180 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] stderr) SH GR GR % OVAL 0 SG NP 645 125 5 5 TGEL F GS GS NP 645 125 5 5 TGEL S GR GR % OVAL 0 SG NP 645 75 5 5 TGEL F GS GS NP 645 75 5 5 TGEL S GR GR % OVAL 0 SG NP 645 175 5 5 TGEL F GS GS NP 645 175 5 5 TGEL S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 645 75 M 0 35 atan DU cos 8.000 MU 680 exch SU exch sin 8.000 MU 75 exch SU L TGSM 1 W S GR GS TGSM NP 680 75 8.000 3.000 35 0 TGAT 1 SG CP F 0 SG NP 680 75 8.000 3.000 35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 715 125 M 0 -35 atan DU cos 8.000 MU 680 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S GR GS TGSM NP 680 125 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 680 125 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 715 175 M 0 -35 atan DU cos 8.000 MU 680 exch SU exch sin 8.000 MU 175 exch SU L TGSM 1 W S GR GS TGSM NP 680 175 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 680 175 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 675 75 M 715 75 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 645 125 M 685 125 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 645 175 M 685 175 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 635 80 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (-i fn) TGSW AD GR NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (-i fn) SH GR GR % TEXT NP 0 SG GS 1 W 635 130 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (-o fn) TGSW AD GR NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (-o fn) SH GR GR % TEXT NP 0 SG GS 1 W 635 180 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (-e fn) TGSW AD GR NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (-e fn) SH GR GR % POLY/OPEN-SPLINE 0 SG GS [4 4] 0 SD NP 595 125 M 0 -30 atan DU cos 8.000 MU 565 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S [] 0 SD GR GS TGSM NP 565 125 8.000 3.000 -30 0 TGAT 1 SG CP F 0 SG NP 565 125 8.000 3.000 -30 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS [4 4] 0 SD NP 600 175 M 580 175 L 580 125 L 0 -15 atan DU cos 8.000 MU 565 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S [] 0 SD GR GS TGSM NP 565 125 8.000 3.000 -15 0 TGAT 1 SG CP F 0 SG NP 565 125 8.000 3.000 -15 0 TGAT CP F GR % TEXT NP 0 SG GS 1 W 360 240 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] /dev/null) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] /dev/null) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] /dev/null) SH GR GR % TEXT NP 0 SG GS 1 W 360 225 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (defaults) SH GR GR % TEXT NP 0 SG GS 1 W 560 240 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] /dev/null) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] /tmp/mktmp\(\)) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] /tmp/mktmp\(\)) SH GR GR % TEXT NP 0 SG GS 1 W 560 225 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (defaults) SH GR GR % BOX 0 SG GS 10 SM GS NP 50 50 M 160 50 L 160 200 L 50 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 65 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (submit host) SH GR GR % OVAL 0 SG GS GS NP 102 95 37 15 TGEL S GR GR % ARC 0 SG GS GS NP 102 156 37 15 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 65 93 M 65 156 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 140 93 M 140 156 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 102 137 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (local) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (local) SH GR 0 15 RM GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (capture) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (capture) SH GR GR % POLYGON/CLOSED-SPLINE 0 SG NP 220 112 M 220 137 L 257 137 L 257 150 L 300 125 L 257 100 L 257 112 L CP GS 1 SG EF GR GS S GR % TEXT NP 0 SG GS 1 W 242 129 M GS GS 0 0 AD GR 2 DI NE 0 RM GR GR % POLYGON/CLOSED-SPLINE 0 SG NP 220 112 M 220 137 L 183 137 L 183 150 L 140 125 L 183 100 L 183 112 L CP GS 1 SG EF GR GS S GR % TEXT NP 0 SG NP 172 115 M 268 115 L 268 133 L 172 133 L CP 1 SG F 0 SG GS 1 W 220 130 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (Globus GASS) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (Globus GASS) SH GR GR GR tgifsavedpage restore end showpage %%Trailer %MatchingCreationDate: Wed May 21 15:53:39 2003 %%DocumentFonts: Helvetica %%+ Helvetica-Bold %%EOF %%EndDocument @endspecial 1314 3809 a(Figure)g(2:)g(W)l(ith)f(the)h(presence)i(of)d (kickstart.)37 4033 y(In)30 b(the)g(absence)i(of)e(a)f(grid)i(shell,)h (\002gure)e(1,)h(the)f(remote)h(scheduler)h(starts)f(the)f(application) j(on)d(an)o(y)g(w)o(ork)o(er)-5 b(.)49 b(The)29 b(stdio)37 4146 y(streams)d(of)f(the)h(application)i(usually)e(def)o(ault)h(to)e (either)h Fh(/dev/null)d Fo(or)i(some)g(other)h(scheduler)n (-speci\002c)j(\002le.)k(Through)37 4259 y(the)24 b(presence)h(of)e (Glob)n(us,)h(stdio)g(can)f(be)g(both,)h(streamed)g(or)f(staged,)h(via) f(GASS.)e(Furthermore,)j(the)f(application)j(itself,)e(if)37 4372 y(independently)31 b(b)n(undled)e(\(i.e.)38 b(statically)29 b(link)o(ed\),)g(and)e(appropriate)i(for)e(the)g(architecture,)j(can)d (be)g(staged)h(using)f(GASS)37 4485 y(from)d(the)g(submit)g(site.)37 4640 y(In)c(the)g(presence)h(of)f(a)f(grid)h(shell,)h(\002gure)e(2,)h (speci\002cally)i(kickstart,)g(the)e(GASS-handled)g(stdio)g(streams)g (are)g(used)g(for)g(kick-)37 4752 y(start')-5 b(s)31 b(o)n(wn)d(purposes.)46 b(Since)29 b(kickstart)i(is)e(used)g(in)g (conjunction)j(with)c(the)h(GriPhyN)f(V)-5 b(irtual)29 b(Data)f(System)h(\(GVDS\),)37 4865 y(an)o(y)c(application)k(stdio)c (streams)h(are)f(kno)n(wn)h(and)f(re)o(gistered)i(with)e(the)g(GVDS.)d (If)j(a)f(stream)i(is)f(important)h(to)f(an)g(applica-)37 4978 y(tion,)e(e.g.)28 b(the)23 b(application)i(produces)g(results)f (on)e Fg(stdout)p Fo(,)h(the)g(stream)g(can)f(be)g(connected)j(to)e(a)e (\002le)h(on)g(the)h(remote)f(system.)37 5091 y(The)k(GVDS)d(will)j (generate)i(the)e(right)g(con\002guration)j(to)d(ha)n(v)o(e)g(a)g(grid) g(shell)h(locally)g(connect)h(the)e(stream)h(to)e(a)h(\002le)f(on)h (the)37 5204 y(remote)f(machine.)37 5359 y(Although)32 b(kickstart)g(pro)o(vides)f(the)f(capture)i(of)d(stdio)i(of)e(remote)i (application,)j(the)c(GVDS-track)o(ed)g(stdio)h(will)e(become)p eop %%Page: 4 4 4 3 bop 37 57 a Fm(2)45 b(Processing)25 b(in)g(a)g(Gr)q(id)h(En)n (vironment)2514 b(4)p 37 94 2009 4 v 2046 94 V 37 352 a Fo(part)25 b(of)g(the)f(GVDS)f(staging)j(and)f(replica)h(tracking)g (mechanism.)33 b(The)24 b(actual)h(staging)i(and)e(tracking)h(of)e(the) h(capture)h(\002les)37 465 y(is)e(not)g(part)g(of)f(this)h(document.)p 456 587 3181 4 v 454 700 4 113 v 505 666 a Fp(str)n(eam)p 819 700 V 102 w(transport)p 1288 700 V 101 w(use)p 3634 700 V 456 704 3181 4 v 456 720 V 454 833 4 113 v 505 799 a Ff(stdin)p 819 833 V 191 w Fo(stagable)p 1288 833 V 173 w(Initial)h(con\002guration.)p 3634 833 V 456 836 3181 4 v 454 949 4 113 v 505 915 a Ff(stdout)p 819 949 V 146 w Fo(stagable)p 1288 949 V 173 w(Pro)o(v)o(enance)g(tracking)h (record)f(\(PTR\).)p 3634 949 V 456 953 3181 4 v 454 1066 4 113 v 505 1032 a Ff(stderr)p 819 1066 V 161 w Fo(streaming)p 1288 1066 V 112 w(Heart)f(beat,)g(application)i (feedback)g(channel,)f(and)f(error)h(messages.)p 3634 1066 V 456 1069 3181 4 v 1026 1222 a(T)-7 b(able)23 b(1:)h(The)f(w)o (ay)g(kickstart)j(uses)e(GASS'ed)e(stdio)j(streams.)37 1434 y(In)19 b(the)g(presence)h(of)f(kickstart,)i(it)e(e)o(xpects)h (that)f(its)f Fg(stdin)i Fo(recei)n(v)o(es)g(the)f(con\002guration)i (\002le.)27 b(The)18 b(con\002guration)j(is)e(e)o(xplained)37 1547 y(in)i(detail)g(in)f(sections)i(3)e(\(page)h(4\))f(and)h(4)f (\(page)h(7\).)27 b(The)20 b Fg(stdout)j Fo(stream)e(recei)n(v)o(es)g (the)g(pro)o(v)o(enance)h(tracking)h(record,)f(which)37 1660 y(records)33 b(information)h(about)e(the)f(e)o(x)o(ecution)i(and)f (host)g(machine')-5 b(s)33 b(state.)52 b(The)31 b Fg(stderr)j Fo(stream)e(is)f(used)g(for)h(application)37 1773 y(speci\002c)25 b(feedback)h(of)d(gridshell-a)o(w)o(are)k(softw)o(are.)37 1931 y(T)-7 b(able)26 b(1)g(summarizes)h(the)f(usage)h(of)f(the)g(GASS) e(streams.)36 b(Note)26 b(that)h Fg(stdin)g Fo(and)f Fg(stdout)j Fo(may)c(be)h(staged,)i(which)e(is)g(more)37 2044 y(frugal)e(with)e(system)i(resources)h(on)d(the)h(gatek)o(eeper)-5 b(.)31 b(Ho)n(we)n(v)o(er)l(,)22 b(due)i(to)e(the)h(interacti)n(v)o(e)i (nature)f(of)e(application)k(feedback,)37 2157 y(the)i Fg(stderr)j Fo(should)d(be)f(used)h(in)g(streaming)g(mode.)40 b(Also)27 b(note)h(that)g(Glob)n(us)g(uses)g(a)f(best-ef)n(fort)i (streaming)g(with)e(GASS:)37 2270 y(Due)32 b(to)f(restrictions)k(of)c (the)h(remote)g(scheduling)j(systems,)f(a)d(streamed)i(\002le)e(is)h (not)f(guaranteed)k(to)c(stream)i(during)g(the)37 2383 y(lifetime)25 b(of)e(the)h(job)l(.)37 2541 y(Kickstart')-5 b(s)33 b(adv)n(antage)f(o)o(v)o(er)f(running)h(a)e(plain)h(application) i(without)e(the)g(help)g(of)f(a)g(grid)h(shell)g(is)f(a)g(number)h(of)f (v)n(alue-)37 2654 y(added)25 b(services)h(it)d(pro)o(vides.)31 b(Details)24 b(are)g(pro)o(vided)h(in)f(the)g(con\002guration)i (description)h(in)c(section)j(4:)174 2912 y Fe(\017)46 b Fo(Exponentially)27 b(back-of)n(f)e(heart)f(beats.)174 3099 y Fe(\017)46 b Fo(Multiple)25 b(pre-)f(and)g(post-jobs)i(chained)f (to)f(the)g(main)f(application.)174 3287 y Fe(\017)46 b Fo(Independent)27 b(cleanup)e(jobs.)174 3474 y Fe(\017)46 b Fo(Site-speci\002c)25 b(kno)n(wledge)g(and)f(con\002gurability)j (through)f(includes.)174 3662 y Fe(\017)46 b Fo(Optional)25 b(2nd-le)n(v)o(el)g(staging)g(capability)i(through)e(user)f(call-outs.) 174 3850 y Fe(\017)46 b Fo(Optional)25 b(information)h(and)e(MD5)e (about)j(arbitrary)h(remote)e(\002les.)37 4187 y Fl(2)61 b(Pr)n(ocessing)34 b(in)g(a)f(Grid)g(En)-5 b(vir)n(onment)37 4440 y Fo(TBD)37 4777 y Fl(3)61 b(Con\002guration)34 b(File)37 5030 y Fo(The)21 b(con\002guration)j(\002le)c(accepts)i(a)e (v)n(ariety)j(of)d(commands,)i(which)f(are)g(usually)i(parametrized)g (further)-5 b(.)30 b(Some)20 b(commands)37 5142 y(may)28 b(appear)h(multiple)h(times,)f(and)f(some)g(may)g(only)g(appear)i (once,)f(with)f(each)h(repetition)h(discarding)h(and)d(o)o(v)o (erwriting)37 5255 y(pre)n(vious)e(settings.)31 b(The)23 b(ar)n(guments)j(to)d(v)n(arious)i(commands)g(are)f(either)g (identi\002ers)i(or)d(strings.)p eop %%Page: 5 5 5 4 bop 37 57 a Fm(3)45 b(Con\002gur)o(ation)23 b(File)3158 b(5)p 37 94 2009 4 v 2046 94 V 37 352 a Fd(3.1)51 b(Identi\002er)o(s)37 571 y Fo(Identi\002ers)25 b(are)e(w)o(ords)g(that)g(start)h(with)e(a)h (letter)g(or)g(and)g(underscore.)32 b(Further)23 b(characters)j(may)c (include)j(numerical)f(digits.)37 684 y(Examples)h(for)e(v)n(alid)i (identi\002ers)g(are:)274 912 y Fh(qwer)198 b(task1)149 b(false)f(__system)37 1170 y Fo(and)24 b(e)o(xamples)h(for)f(in)l(v)n (alid)h(identi\002ers)h(are:)274 1397 y Fh(12)298 b(1asdf)149 b(to-me)f(some:word)37 1655 y Fo(In)23 b(general,)h(identi\002ers)g (permissable)h(to)d(the)h(C)e(language)k(are)d(also)h(permissable)i(to) e(the)f(con\002guration)k(\002le.)h(Furthermore)37 1768 y(all)d(reserv)o(ed)h(w)o(ords)f(may)g(also)g(double)h(as)e (identi\002ers.)31 b(Reserv)o(ed)25 b(w)o(ords)f(are)f(the)h(command)g (introducing)j(identi\002ers:)274 1996 y Fh(pre)248 b(main)199 b(post)248 b(cleanup)148 b(site)274 2079 y(tr)298 b(transformation)147 b(dv)398 b(derivation)274 2162 y(chdir)148 b(stdin)h(stdout)f(stderr) 198 b(feedback)274 2245 y(input)148 b(output)99 b(stagein)f(stageout)g (xmlns)37 2539 y Fd(3.2)51 b(Strings)37 2759 y Fo(String)27 b(handling)h(is)e(to)f(a)h(limited)g(e)o(xtend)h(similar)g(to)f(the)g (Bourne)g(shell)h(and)f(the)g(C)f(programming)j(language.)38 b(There)26 b(are)37 2872 y(tw)o(o)31 b(kinds)g(of)g(strings:)44 b(V)-10 b(erbatim)31 b(strings)h(in)f(single)g(quotes)h(\(apostrophe\)) j(and)30 b(interpreted)k(strings)e(in)e(double)i(quotes)37 2985 y(\(re)o(gular)26 b(quotes\).)37 3143 y(Single)i(quote)g(strings)g (pass)g(all)f(characters)i(v)o(erbatim.)40 b(The)26 b(escape)i (character)h(is)e(the)g(backslash,)j(which)d(escapes)i(\(read:)37 3256 y(mak)o(es)c(it)e(part)h(of)f(the)h(string\))h Fg(any)f Fo(character)i(after)e(it,)f(including)k(the)c(single)i(quote)g(and)f (itself.)-375 3391 y Fj(\177)156 3457 y Fi(The)19 b(Unix)h(shell)h (does)f(not)g(allo)n(w)g(the)g(escape)g(character)f(inside)h (single-quoted)e(strings,)i(nor)f(the)h(apostrophe.)37 3615 y Fo(Double)j(quote)h(strings)f(are)g(subject)g(to)f(v)n(ariable)i (interpolation)i(in)c(addition)i(to)e(an)g(e)o(xtended)i(backslashing)i (escape)e(mech-)37 3728 y(anism.)44 b(If)29 b(v)n(ariables)h(in)f(the)f (form)h($v)n(ariable)h(or)f(${v)n(ariable})h(are)e(encountered)k (within)d(the)g(double-quoted)k(string,)e(the)37 3841 y(v)n(ariable)23 b(is)d(replaced)j(with)d(the)h(current)h(Unix)e(en)l (vironment)k(v)n(alue)d(of)g(the)f(same)h(k)o(e)o(y)g(as)f(the)h(v)n (ariable)h(name.)28 b(If)20 b(there)i(is)e(no)37 3954 y(Unix)26 b(en)l(vironment)j(v)n(alue,)e(the)f($v)n(ariable)i(or)d(${v) n(ariable})i(is)f Fg(not)i Fo(replaced,)g(b)n(ut)e(rather)h(k)o(eeps)g (the)f(unresolv)o(ed)i(v)n(ariable)37 4067 y(in)c(place.)-375 4203 y Fj(\177)156 4269 y Fi(The)e(Unix)g(shell)g(replaces)g(v)n (ariables,)g(which)g(do)g(not)g(e)o(xist,)g(with)h(an)f(empty)g (string.)31 b(Furthermore,)20 b(an)o(y)h(adv)n(anced)g(shell)h(substi-) 156 4369 y(tutions,)d(e.g.)25 b(${v)n(ar:Xw)o(ord})18 b(are)i(not)f(a)n(v)n(ailable)h(with)h(kickstart.)37 4527 y Fo(Inside)28 b(double-quoted)j(strings,)d(the)f(escaping)i (machanism)e(features)i(additional)g(special)f(character)h(inclusion)f (for)f(hori-)37 4640 y(zontal)e(tab)f(\()p Fe(n)p Fo(t\),)g(v)o (ertical)h(tab)f(\()p Fe(n)p Fo(v\),)g(ne)n(wline)g(\()p Fe(n)p Fo(n\),)g(carriage)h(return)g(\()p Fe(n)p Fo(r\),)f(bell)g(\()p Fe(n)p Fo(a\),)g(escape)h(\()p Fe(n)p Fo(e\),)f(and)g(backspace)i(\()p Fe(n)p Fo(b\).)37 4798 y(The)e(v)n(ariable)h(interpolation)i(for)d(job) g(ar)n(gument)h(strings)g(dif)n(fers)g(in)f(tw)o(o)f(important)i(w)o (ays)f(from)f(other)l(,)i(re)o(gular)l(,)g(strings:)151 5055 y(1.)46 b(Job)21 b(strings)h(must)e(split)i(ar)n(guments)g(for)f (the)g(in)l(v)n(ocation)j(on)c(unprotected)k(whitespaces.)30 b(Unprotected)23 b(whitespaces)265 5168 y(are)g(neither)j(escaped)f (nor)f(quoted.)151 5356 y(2.)46 b(Job)19 b(strings)i(must)e(remo)o(v)o (e)h(one)f(le)n(v)o(el)h(of)f(quote)h(characters)i(in)d(ar)n(guments)i (that)f(quote)g(their)g(whitespace)i(characters.)p eop %%Page: 6 6 6 5 bop 37 57 a Fm(3)45 b(Con\002gur)o(ation)23 b(File)3158 b(6)p 37 94 2009 4 v 2046 94 V 973 554 2146 4 v 971 654 4 100 v 1023 624 a Fc(job)20 b(string)g(input)p 2312 654 V 787 w(ar)o(gv)g(r)o(esult)p 3117 654 V 973 657 2146 4 v 973 674 V 971 773 4 100 v 1023 743 a Fi('/bin/echo)e(hi)j ($LOGN)m(AME')p 2312 773 V 414 w(\273hi\253)f(\273v)n(oeckler\253)p 3117 773 V 973 776 2146 4 v 971 876 4 100 v 1023 846 a('/bin/echo)e Fb(n)p Fi("hi)i($LOGN)m(AME)p Fb(n)p Fi("')p 2312 876 V 263 w(\273"hi\253)g(\273v)n(oeckler"\253)p 3117 876 V 973 879 2146 4 v 971 979 4 100 v 1023 949 a('/bin/echo)e Fb(nn)p Fi("hi)i($LOGN)m(AME)p Fb(nn)p Fi("')p 2312 979 V 179 w(\273)p Fb(n)p Fi(hi)g(v)n(oeckler)p Fb(n)p Fi(\253)p 3117 979 V 973 982 2146 4 v 971 1082 4 100 v 1023 1052 a('/bin/echo)e Fb(nnn)p Fi("hi)h($LOGN)m(AME)p Fb(nnn)p Fi("')p 2312 1082 V 96 w(\273)p Fb(n)p Fi("hi\253)g(\273v)n (oeckler)p Fb(n)p Fi("\253)p 3117 1082 V 973 1085 2146 4 v 973 1102 V 971 1202 4 100 v 1023 1172 a("/bin/echo)g('hi)h($LOGN)m (AME'")p 2312 1202 V 346 w(\273hi)g($LOGN)m(AME\253)p 3117 1202 V 973 1205 2146 4 v 971 1304 4 100 v 1023 1275 a("/bin/echo)f Fb(n)p Fi('hi)g($LOGN)m(AME)p Fb(n)p Fi('")p 2312 1304 V 263 w(\273'hi\253)g(\273v)n(oeckler'\253)p 3117 1304 V 973 1308 2146 4 v 971 1407 4 100 v 1023 1378 a("/bin/echo)g Fb(nn)p Fi('hi)g($LOGN)m(AME)p Fb(nn)p Fi('")p 2312 1407 V 179 w(\273)p Fb(n)p Fi(hi)h($LOGN)m(AME)p Fb(n)p Fi(\253)p 3117 1407 V 973 1411 2146 4 v 971 1510 4 100 v 1023 1480 a("/bin/echo)f Fb(nnn)p Fi('hi)f($LOGN)m(AME)p Fb(nnn)p Fi('")p 2312 1510 V 96 w(\273)p Fb(n)p Fi('hi\253)h(\273v)n (oeckler)p Fb(n)p Fi('\253)p 3117 1510 V 973 1514 2146 4 v 839 1667 a Fo(T)-7 b(able)23 b(2:)h(Con)l(v)o(ersion)i(of)d(jobs)h (strings)h(into)g(ar)n(gument)g(v)o(ector)g(elements.)-362 4853 y @beginspecial 0 @llx 180 @lly 792 @urx 612 @ury 5544 @rwi @setspecial %%BeginDocument: statetable.eps %!PS-Adobe-3.0 EPSF-3.0 %%Creator: (ImageMagick) %%Title: (st.eps) %%CreationDate: (Wed Feb 11 14:00:21 2004) %%BoundingBox: 0 0 792 612 %%DocumentData: Clean7Bit %%LanguageLevel: 1 %%Pages: 1 %%EndComments %%BeginDefaults %%EndDefaults %%BeginProlog % % Display a color image. The image is displayed in color on % Postscript viewers or printers that support color, otherwise % it is displayed as grayscale. % /DirectClassPacket { % % Get a DirectClass packet. % % Parameters: % red. % green. % blue. % length: number of pixels minus one of this color (optional). % currentfile color_packet readhexstring pop pop compression 0 eq { /number_pixels 3 def } { currentfile byte readhexstring pop 0 get /number_pixels exch 1 add 3 mul def } ifelse 0 3 number_pixels 1 sub { pixels exch color_packet putinterval } for pixels 0 number_pixels getinterval } bind def /DirectClassImage { % % Display a DirectClass image. % systemdict /colorimage known { columns rows 8 [ columns 0 0 rows neg 0 rows ] { DirectClassPacket } false 3 colorimage } { % % No colorimage operator; convert to grayscale. % columns rows 8 [ columns 0 0 rows neg 0 rows ] { GrayDirectClassPacket } image } ifelse } bind def /GrayDirectClassPacket { % % Get a DirectClass packet; convert to grayscale. % % Parameters: % red % green % blue % length: number of pixels minus one of this color (optional). % currentfile color_packet readhexstring pop pop color_packet 0 get 0.299 mul color_packet 1 get 0.587 mul add color_packet 2 get 0.114 mul add cvi /gray_packet exch def compression 0 eq { /number_pixels 1 def } { currentfile byte readhexstring pop 0 get /number_pixels exch 1 add def } ifelse 0 1 number_pixels 1 sub { pixels exch gray_packet put } for pixels 0 number_pixels getinterval } bind def /GrayPseudoClassPacket { % % Get a PseudoClass packet; convert to grayscale. % % Parameters: % index: index into the colormap. % length: number of pixels minus one of this color (optional). % currentfile byte readhexstring pop 0 get /offset exch 3 mul def /color_packet colormap offset 3 getinterval def color_packet 0 get 0.299 mul color_packet 1 get 0.587 mul add color_packet 2 get 0.114 mul add cvi /gray_packet exch def compression 0 eq { /number_pixels 1 def } { currentfile byte readhexstring pop 0 get /number_pixels exch 1 add def } ifelse 0 1 number_pixels 1 sub { pixels exch gray_packet put } for pixels 0 number_pixels getinterval } bind def /PseudoClassPacket { % % Get a PseudoClass packet. % % Parameters: % index: index into the colormap. % length: number of pixels minus one of this color (optional). % currentfile byte readhexstring pop 0 get /offset exch 3 mul def /color_packet colormap offset 3 getinterval def compression 0 eq { /number_pixels 3 def } { currentfile byte readhexstring pop 0 get /number_pixels exch 1 add 3 mul def } ifelse 0 3 number_pixels 1 sub { pixels exch color_packet putinterval } for pixels 0 number_pixels getinterval } bind def /PseudoClassImage { % % Display a PseudoClass image. % % Parameters: % class: 0-PseudoClass or 1-Grayscale. % currentfile buffer readline pop token pop /class exch def pop class 0 gt { currentfile buffer readline pop token pop /depth exch def pop /grays columns 8 add depth sub depth mul 8 idiv string def columns rows depth [ columns 0 0 rows neg 0 rows ] { currentfile grays readhexstring pop } image } { % % Parameters: % colors: number of colors in the colormap. % colormap: red, green, blue color packets. % currentfile buffer readline pop token pop /colors exch def pop /colors colors 3 mul def /colormap colors string def currentfile colormap readhexstring pop pop systemdict /colorimage known { columns rows 8 [ columns 0 0 rows neg 0 rows ] { PseudoClassPacket } false 3 colorimage } { % % No colorimage operator; convert to grayscale. % columns rows 8 [ columns 0 0 rows neg 0 rows ] { GrayPseudoClassPacket } image } ifelse } ifelse } bind def /DisplayImage { % % Display a DirectClass or PseudoClass image. % % Parameters: % x & y translation. % x & y scale. % label pointsize. % image label. % image columns & rows. % class: 0-DirectClass or 1-PseudoClass. % compression: 0-none or 1-RunlengthEncoded. % hex color packets. % gsave /buffer 512 string def /byte 1 string def /color_packet 3 string def /pixels 768 string def currentfile buffer readline pop token pop /x exch def token pop /y exch def pop x y translate currentfile buffer readline pop token pop /x exch def token pop /y exch def pop currentfile buffer readline pop token pop /pointsize exch def pop /Times-Roman findfont pointsize scalefont setfont x y scale currentfile buffer readline pop token pop /columns exch def token pop /rows exch def pop currentfile buffer readline pop token pop /class exch def pop currentfile buffer readline pop token pop /compression exch def pop class 0 gt { PseudoClassImage } { DirectClassImage } ifelse grestore } bind def %%EndProlog %%Page: 1 1 %%PageBoundingBox: 0 0 792 612 userdict begin DisplayImage 0 0 792 612 12.000000 792 612 1 0 0 86 000000 110e00 101010 1e1e1e 1e1e21 211b00 2e2500 2f290e 2f2a16 322800 312a0e 382d00 302b17 322d18 212121 2e2b24 2d2d2d 2e2e32 302f29 343229 323232 333338 3d3d3d 3e3e44 443700 483a00 443d20 403e36 544300 584600 484022 584f2a 604d00 665200 685300 715b00 776000 424242 414148 4c4c4c 4d4d55 4f4f58 545454 505058 5b5b5b 565660 5c5c66 656565 666668 6b6b6b 666671 6c6c76 767676 7b7b7b 7b7b88 886d00 997b00 a38200 aa8800 bb9600 cca400 ddb100 eebf00 ffcc00 868686 898989 878796 8a8a99 949494 999999 9a9aaa a2a2a2 a9a9a9 a9a9bb b1b1b1 bababa b8b8cc cacaca c8c8dd d7d7d7 dddddd d7d7ee e6e6e6 eeeeee e6e6ff ffffff 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555534b4a4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b5055555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00040000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333332d04 2d3333333333333333333333333333333333333333333333333333333333333333333333 333333333333333333333333333333332d26333333333333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333043333 333333333333333333333333333333333333333333333333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333333333 333333333333333333333333331533333333333333333333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333262d33333333 333333333333333333333333333333333333333333333333333333333333333333333333 333333333333333333333333153333333333333333333333333333333333333333333333 3333333333333333333333333333333333333333333333333333333333262d3333333333 333333333333333333333333333333333333333333333333333333333333333333333333 333333333333333333333315333333333333333333333333333333333333333333333333 33333333333333333333333333333333333333333333333333333333262d333333333333 333333333333333333333333333333333333333333333333333333333333333333333333 333333333333333333331533333333333333333333333333333333333333333333333333 333333333333333333333333333333333333333333333333333333043155555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 495454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454544933545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454005454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454541754545454545454545454545454545454545454545454 545454335454545454545454545454545454545454545454545454545454334954545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454175454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545433495454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545417545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454543349545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454541754545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545446110002 435417000000005454113354545417000000005454000000005454545454545454544900 495454545454545454545454545454545454000000005454330200175154461100024354 545454545454545454545454545454544933545454545454545454545454545454545454 545454545454545454545454545454545454545433542e54545400540054545454545454 545454545454545454545454545454545454545454545454545454545454545454005454 545454545454545454545454545454545454545454545454545454545454545454545454 545454540054545454545454545454545454545454545454545454545454545454545454 545454545454545454545454541754545454545454545454545454545454545454545454 54432e001143545454545454545454545454545454545454545454545454334954545454 545454545454545454545454545454545454545411495454545454545454545454545454 545454545454545454545454175454545454545454545454545454545454545454545454 171154545454545454545454545454545454545454545454545454545433495454545454 545454545454545454545454545454540000022e54461100024354545454545454545454 545454545454545454545417545454545454545454545454113354545400545454541154 545400545400545454005400045454540400545454545454545454543349545454545454 545454545454545454545400545454284c543333544c2854461100024354545454545454 545454545454545454541754545454545454545454545454545454000000005454005454 544611000243545400000000545454545454545454545454545454003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454545454545454004e5451 36545454005454544c331754545454540054545454005454545454545454545454544900 4954545454545454545454545454545454540054545454332e5154332854004e54513654 545454545454545454545454545454544933545454545454545454545454545454545454 545454545454545454545454545454545454545400540054545433543654545454545454 545454545454545454545454545454545454545454545454545454545454545454005454 545454545454545454545454545454545454545454545454545454545454545454545454 545454541754545454545454545454545454545454545454545454545454545454545454 545454545454545454545454541754545454545454545454545454545454545454545454 540054004333545454545454545454545454545454545454545454545454334954545454 545454545454545454545454545454545454545400545454545454545454545454545454 545454545454545454545454175454545454545454545454545454545454545454545454 540054545454545454545454545454545454545454545454545454545433495454545454 545454545454545454545454545454540054540054004e54513654545454545454545454 54545454545454545454541754545454545454545454544c331754545400545454540046 545400545400545454005400174c544c1700545454545454545454543349545454545454 545454545454545454545400545454363654281754363654004e54513654545454545454 545454545454545454541754545454545454545454545454545454005454545454005454 54004e545136545400545454545454545454545454545454545454003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545436042e46 51545454005454543351364c545454540054545454000000175454545454545454544900 49545454545454545454545454545454545400000017540251545454005436042e465154 545454545454545454545454545454544933545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454005454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454541754545454545454545454545454545454545454545454 543328004e54545454545454545454545454545454545454545454545454334954545454 545454545454545454545454545454545454545400545454545454545454545454545454 545454545454545454545454175454545454545454545454545454545454545454545454 540054545454545454545454545454545454545454545454545454545433495454545454 54545454545454545454545454545454000011335436042e465154545454545454545454 54545454545454545454541754545454545454545454543351364c545400545454540049 465400545400545454005400432e54334300545454545454545454543349545454545454 5454545454545454545454005454544c2e4e33334e2e4e5436042e465154545454545454 545454545454545454541754545454545454545454545454545454000000175454005454 5436042e4651545400000017545454545454545454545454545454003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454544c33 17545454005454540200002e545454540054545454005454545454545454545454544900 49545454545454545454545454545454545400545454540051545454005454544c331754 545454545454545454545454545454544933545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454005454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454541754545454545454545454545454545454545454545454 54544e002e33545454545454545454545454545454545454545454545454334954545454 545454545454545454545454545454545454543633545454545454545454545454545454 545454545454545454545454175454545454545454545454545454545454545454545454 54174e545454545454545454545454545454545454545454545454545433495454545454 54545454545454545454545454545454005451285454544c331754545454545454545454 54545454545454545454541754545454545454545454540200002e545400545454540054 285100545400545454005400541154115400545454545454545454543349545454545454 54545454545454545454540054545454173646463617545454544c331754545454545454 545454545454545454541754545454545454545454545454545454005454545454005454 5454544c3317545400545454545454545454545454545454545454003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545436515454 005454540054544636545411545454540054545454005454545454545454545454544900 49545454545454545454545454545454545400545454542e284e54331154365154540054 545454545454545454545454545454544933545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454005454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454541754545454545454545454545454545454545454545454 543354005102545454545454545454545454545454545454545454545454334954545454 545454545454545454545454545454545454544c11545454545454545454545454545454 545454545454545454545454175454545454545454545454545454545454545454545454 540251545454545454545454545454545454545454545454545454545433495454545454 545454545454545454545454545454540054540054365154540054545454545454545454 545454545454545454545417545454545454545454544636545411545400545454540054 49170054540254544e025400543333335400545454545454545454543349545454545454 545454545454545454545400545454542e115451112e5454365154540054545454545454 545454545454545454541754545454545454545454545454545454005454545454005454 543651545400545400545454545454545454545454545454545454003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454545454545454544e040004 2e545454005454284e545433465454540054545454000000004954545454545454544900 4954545454545454545454545454545454540000000049512e0200114c544e0400042e54 545454545454545454545454545454544933545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454005454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454541754545454545454545454545454545454545454545454 54462800284c545454545454545454545454545454545454545454545454334954545454 545454545454545454545454545454545454545400545454545454545454545454545454 545454545454545454545454175454545454545454545454545454545454545454545454 540054545454545454545454545454545454545454545454545454545433495454545454 545454545454545454545454545454540000002e544e0400042e54545454545454545454 54545454545454545454541754545454545454545454284e545433465400000033540054 5417005454360200284c5400544e004e5400545454545454545454543349545454545454 5454545454545454545454000000335443175454174354544e0400042e54545454545454 545454545454545454541754545454545454545454545454545454000000004954000000 334e0400042e545400000000495454545454545454545454545454003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 495454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454544933545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454005454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545454541754545454545454545454545454545454545454545454 54544e005154545454545454545454545454545454545454545454545454334954545454 545454545454545454545454545454545454545400545454545454545454545454545454 545454545454545454545454175454545454545454545454545454545454545454545454 540054545454545454545454545454545454545454545454545454545433495454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454545417545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454543349545454545454 545454545454545454545454545454545454545454545454545454545454545454545454 545454545454545454541754545454545454545454545454545454545454545454545454 545454545454545454545454545454545454545454545454545454003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949493600 364949494949494949494949494949494949494949494949494949494949494949494949 494949494949494949494949494949494232494949494949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949114949 494949494949494949494949494949494949494949494949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494949 494949494949494949494949492b49494949494949494949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949324349494949 494949494949494949494949494949494949494911364949494949494949494949494949 494949494949494949494949284949494949494949494949494949494949494949494949 112649494949494949494949494949494949494949494949494949494932424949494949 494949494949494949494949494949494949494949494949494949494949494949494949 49494949494949494949492b494949494949494949494949494949494949494949494949 494949494949494949494949494949494949494949494949494949493242494949494949 494949494949494949494949494949494949494949494949494949494949494949494949 494949494949494949492b49494949494949494949494949494949494949494949494949 494949494949494949494949494949494949494949494949494949003055555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00040000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000001455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949493600 354848484848484848484848484848484848484848484848484848484848484848484848 48484848484848484848484848484848352a484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848004848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848481048484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848273548484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848104848484848484848484848484848484848484848484848 48484848484848484848484848484848484848484848484848484848482a354848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484810484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848482a35484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848481048484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848002f55555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252270000000300310010525210165252454552522700 455252525252525252525252525252524831525252525252525252525252525252525252 525252525252310000164b52523102023152525252525200024f52522c004b5252454552 160031001052521016525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252524f10162c525231020231525252 52525200024f52522c004b52454552521600001052521016525252525252525252525252 525252525252525252525252521652525252525252001052521016523102023152525252 5252310000164b52270045525227004552523100002c4f52525252525252314852525252 5252001052521016525231020231525252525200024f52522c004b525245455216003100 105252101652525252525252165252525252525200105252101652310202315252525252 5200024f52522c004b524545525216000010525210165252525252525231485252525252 5252525252525200105252101652310202315252525252521600000345524f10162c5252 525252525252525252525216525252525252520010525210165231020231525252525252 00024f52522c004b52454552521600310010525210165252525252523148525252525252 5252001052521016523102023152525252525227004552521010480027522c2c4f10162c 525252525252525252521652525252525252001052521016523102023152525252525200 024f52522c004b5245455252160031001052521016525252525252003155555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252520052524800525200455202525252312752525200 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525200524f16483131525231315252525252520045524f1000525252312752 520052520045520252525252525252525252525252525252525252525252525252005252 5252525252525252525252525252525252525252525252104d5241523131525231315252 525252520045524f10005252312752525200520045520252525252525252525252525252 525252525252525252525252521652525252525252520045520252313152523131525252 52525200524f164852005252525200525252520052481052525252525252314852525252 52525200455202525231315252313152525252520045524f100052525231275252005252 004552025252525252525252165252525252525252004552025231315252313152525252 52520045524f100052523127525252005200455202525252525252525231485252525252 525252525252525200455202523131525231315252525252520052480252104d52415252 525252525252525252525216525252525252525200455202523131525231315252525252 520045524f10005252312752525200525200455202525252525252523148525252525252 5252520045520252313152523131525252525252005252524d1652314152454f104d5241 525252525252525252521652525252525252520045520252313152523131525252525252 0045524f1000525231275252520052520045520252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4852525252525252525252525252525200524552005252004b520052524f41034d525200 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252520052524d03024f52524f0252525252525200355241410052524f410e4d 52005252004b520052525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252522c03485252024f52524f025252 52525252003552414100524f41034d52520052004b520052525252525252525252525252 52525252525252525252525252165252525252525252004b520052024f52524f02525252 5252520052524d0352005252525200525252520052412c52525252525252314852525252 525252004b52005252024f52524f02525252525200355241410052524f41034d52005252 004b52005252525252525252165252525252525252004b520052024f52524f0252525252 5252003552414100524f41034d52520052004b5200525252525252525231485252525252 5252525252525252004b520052024f52524f0252525252525200524531522c0348525252 5252525252525252525252165252525252525252004b520052024f52524f025252525252 52003552414100524f41034d5252005252004b5200525252525252523148525252525252 525252004b520052024f52524f025252525252520052525252164d44105245522c034852 52525252525252525252165252525252525252004b520052024f52524f02525252525252 003552414100524f41034d5252005252004b520052525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252520000005200525200524b005252414f3531525200 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525200525252000052525252005252525252520041483152005252414f3531 5200525200524b0052525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252481631520052525252005252 5252525200414831520052414f35315252005200524b0052525252525252525252525252 5252525252525252525252525216525252525252525200524b0052005252525200525252 525252005252520052005252525200525252520010415252525252525252314852525252 52525200524b00525200525252520052525252520041483152005252414f353152005252 00524b00525252525252525216525252525252525200524b005200525252520052525252 525200414831520052414f35315252005200524b00525252525252525231485252525252 525252525252525200524b00520052525252005252525252520048313552524816315252 525252525252525252525216525252525252525200524b00520052525252005252525252 5200414831520052414f3531525200525200524b00525252525252523148525252525252 52525200524b005200525252520052525252525200525252524131352745455252481631 5252525252525252525216525252525252525200524b0052005252525200525252525252 00414831520052414f3531525200525200524b0052525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454541154545400545433 020017515454545454540054545454284c543333544c2854461100024354545454544900 485252525252525252525252525252520052525200525200525202525203000003525200 52524d525252525252525252525252524831525252525252525252525252525252525252 5252525252525200525245312c455252482c525252525252004f10485200525203000003 520052520052520252525252525252525252525252525252525252525252525252005252 5252525252525252525252525252525252525252525252454f5202522c455252482c5252 52525252004f104852005203000003525200520052520252525252525252525252525252 5252525252525252525252525216525252525252525200525202522c455252482c525252 5252520052524531520052524d520052524d52004f034d52525252525252314852525252 5252520052520252522c455252482c5252525252004f1048520052520300000352005252 00525202525252525252525216525252525252525200525202522c455252482c52525252 5252004f1048520052030000035252005200525202525252525252525231485252525252 525252525252525200525202522c455252482c5252525252520052520052454f52025252 525252525252525252525216525252525252525200525202522c455252482c5252525252 52004f104852005203000003525200525200525202525252525252523148525252525252 52525200525202522c455252482c5252525252520052524d524f024b450e5252454f5202 5252525252525252525216525252525252525200525202522c455252482c525252525252 004f10485200520300000352520052520052520252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454545454004654540054332e 515433285454545454540054545454363654281754363654004e54513654545454544900 485252525252525252525252525252450031524b00164500525245522727524802314500 00032c525252525252525252525252524831525252525252525252525252525252525252 5252525252524b00023135524f2c4545314f5252525252450052315245004f2727524802 2a0016450052524552525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525235020231524f2c4545314f5252 525252450052315245002727524802314b00100052524452525252525252525252525252 5252525252525252525252525216525252525252524500525245524f2c4545314f525252 52524b0002313552450000032c450000032c4b004d4a104d525252525252314852525252 5252450052524452524f2c4545314f52525252450052315245004f27275248022a001645 00525245525252525252525216525252525252524500525245524f2c4545314f52525252 52450052315245002727524802314b001000525245525252525252525231485252525252 525252525252524500525245524f2c4545314f52525252524b00022c3152350202315252 525252525252525252525216525252525252524500525245524f2c4545314f5252525252 450052315245002727524802314b00164500525245525252525252523148525252525252 52524500525245524f2c4545314f5252525252450000032c525231525231525235020231 5252525252525252525216525252525252524500525245524f2c4545314f525252525245 0052315245002727524802314b0016450052524552525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454540049465400540251 5454540054545454545400545454544c2e4e33334e2e4e5436042e465154545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525241034d52520000000048525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252525252525252525241034d525200 000000485252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525216525252525252525252525252525252410e4d52520000 000048525252525252525252525252525252525252525252525252525252314852525252 5252525252525252525252410e4d52000000004852525252525252525252525252525252 52525252525252525252525216525252525252525252525252525252410e4d5252000000 004852525252525252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525241034d525200000000485252525252525252525252 52525252525252525252521652525252525252525252525252525241034d525200000000 485252525252525252525252525252525252525252525252525252523148525252525252 52525252525252525252410e4d5252000000004852525252525252525252525252525252 525252525252525252521652525252525252525252525252525241034d52520000000048 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454540054285100540051 54545400545454545454005454545454173646463617545454544c331754545454544900 2f3131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131312714313131313131313131313131313131313131 3131313131313131313131313131312c162a313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131003131 313131313131313131313131313131313131313131313131313131313131312c162a3131 313131313131313131313131313131313131313131313131313131313131313131313131 3131313131313131313131313103313131313131313131313131313131312c1627313131 313131313131313131313131313131313131313131313131313131313131142731313131 3131313131313131313131312c1627313131313131313131313131313131313131313131 31313131313131313131313103313131313131313131313131313131312c162731313131 313131313131313131313131313131313131313131313131313131313114273131313131 313131313131313131313131313131312c162a3131313131313131313131313131313131 313131313131313131313103313131313131313131313131313131312c162a3131313131 31313131313131313131313131313131313131313131313131313131142a313131313131 31313131313131313131312c162731313131313131313131313131313131313131313131 3131313131313131313103313131313131313131313131313131312f1627313131313131 313131313131313131313131313131313131313131313131313131003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454540054491700542e28 4e4e33045454545454540054545454542e115451112e5454365154540054545454544900 414848484848484848484848484848484848484848484848484848484848484848484848 48484848484848484848484848484848352a484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848004848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848481448484848484848484848484848484848484848484848 4848484848484848484848484848484848484848484848484848484848482a3548484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848104848484848484848484848484848484848484848484848 48484848484848484848484848484848484848484848484848484848482a354848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484810484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848482735484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848481048484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454545454005454170054512e 0202042854545454545400000033545443175454174354544e0400042e54545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454495454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 52525252525252525252525252525252524f10162c523552525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252524f10 162c52355252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252524f1016 2c5235525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252524f10162c 525235525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454544900000000175454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252114d5241520052525252525252525252525252 52525252525252525252525216525252525252525252525252525252525252525252104d 524152005252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252104d52 415200525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252104d5241 525200525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 52525252525252525252525252525252522d084852520027004152525252525252525252 525252525252525252525252165252525252525252525252525252525252525252522c03 485252002700415252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252522c0348 525200270041525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252522c034852 525200270041525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252524815315200524b0252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525248 16315200524b025252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252524816 315200524b02525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252481631 525200524b02525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252520000525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252000052525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252520000525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252000052525252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252454f5202520052520252525252525252525252 52525252525252525252525216525252525252525252525252525252525252525252454f 520252005252025252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252520000525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252454f52 025200525202525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252000052525252525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252454f5202 525200525202525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 52525252525252525252525252525252523502023152100e024152525252525252525252 525252525252525252525252165252525252525252525252525252525252525252523502 023152100302415252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252350202 315210030241525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525235020231 525210030241525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 404848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848483527484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848004848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848481448484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848273548484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848104848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484827354848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484810484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848482735484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848481048484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333333200 313431343134313431343134313431343134313434313431343134313134313431343134 343134313431343134313431343134312a14343134313431343134313431343134313134 313431343134313434313431313431343134313434313431343134313431343134313431 313431343134313434313431343134313134313431343134343134313431343134003134 313431343134313431343134343134313431343131343134343134313431343134313431 343134313431343131343134313431343431343134313431343134313431343134313431 343131343431343134313431340331343134313434313431343134313431343134313431 313431343134313434313431343134313431343134313431313431343134142a34313431 343134313431343134313431313431343134313431343134313431343431343134313431 343134313431343131343134033431343431343134313431343134313431343134313431 3431343134313431343134313431343134313431343134313431343134142a3431343134 343134313431343134313431343134313134313431343134313434313431343134313431 343134313134313434313403343134313431343134313431343134313431343134313431 31343134313431343431343134313431343134313431343131343134142a313431343134 313431343431343134313431343134313134343131343134313431343431343134313431 343134313431343131340334313431343134343134313431343134313431343131343134 313431343134343134313431343134313431343134313431343134003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555165555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555516555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555551655555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b55555555555555555555555555552a0000000300340014555514255555484855552a00 485555555555555555555555555555554b34555555555555555555555555555555555555 555555555555340000254d55553402023455555555555500025355552f004d5555484855 250034001455551416555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555555314162f555534020234555555 55555500025355552f004d55484855551600001455551425555555555555555555555555 555555555555555555555555551655555555555555001455551425553502023455555555 5555340000254d552a004855552a004855553400002f5355555555555555344b55555555 5555001455551425555534020234555555555500025355552f004d555548485525003400 145555142555555555555555255555555555555500145555141655340202345555555555 5500025355552f004d5548485555160000145555142555555555555555344b5555555555 5555555555555500145555142555340202345555555555551600000e4855531416315555 555555555555555555555525555555555555550014555514165534020235555555555555 00025355552f004d5548485555250034001455551425555555555555344b555555555555 555500145555141655350202355555555555552a0048555514144b002a552f2f53141631 555555555555555555552555555555555555001455551416553402023455555555555500 025355552f004d5548485555250034001455551416555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555550055554b00555500485502555555342a55555500 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555005553254b343455553534555555555555004855531400555555342a55 550055550048550255555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555514505545553434555535345555 555555550048555314005555342a55555500550048550255555555555555555555555555 555555555555555555555555552555555555555555550048550255343455553434555555 555555005553254b550055555555005555555500554b1455555555555555344b55555555 5555550048550255553435555535345555555555004855531400555555342a5555005555 004855025555555555555555255555555555555555004855025535345555343555555555 55550048555314005555342a5555550055004855025555555555555555344b5555555555 5555555555555555004855025534355555353455555555555500554b0255145055455555 555555555555555555555525555555555555555500485502553435555534345555555555 550048555314005555342a5555550055550048550255555555555555354b555555555555 555555004855025534345555343455555555555500555555502555344555485314505545 555555555555555555552555555555555555550048550255353455553434555555555555 0048555314005555352a5555550055550048550255555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b55555555555555555555555555555500554855005555004d5500555553450e50555500 555555555555555555555555555555554b35555555555555555555555555555555555555 55555555555555005555500e025355555302555555555555004155454500555553450e50 55005555004d550055555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555552f0e4b55550253555553025555 555555550041554545005553450e5055550055004d550055555555555555555555555555 55555555555555555555555555255555555555555555004d550055025355555302555555 555555005555500e55005555555500555555550055452f55555555555555344b55555555 555555004d550055550253555553025555555555004155454500555553450e5055005555 004d55005555555555555555255555555555555555004d55005502535555530255555555 55550041554545005553450e5055550055004d55005555555555555555344b5555555555 5555555555555555004d55005502535555530255555555555500554835552f0e4b555555 5555555555555555555555255555555555555555004d5500550253555553025555555555 550041554545005553450e505555005555004d550055555555555555344b555555555555 555555004d5500550253555553025555555555550055555555165048145548552f0e4b55 55555555555555555555255555555555555555004d550055025355555302555555555555 0041554545005553450e505555005555004d550055555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555550000005500555500554d00555545534134555500 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555005555550000555555550055555555555500454b345500555545534134 5500555500554d0055555555555555555555555555555555555555555555555555005555 5555555555555555555555555555555555555555555555554b2535550055555555005555 5555555500454b34550055455341345555005500554d0055555555555555555555555555 5555555555555555555555555525555555555555555500554d0055005555555500555555 555555005555550055005555555500555555550014455555555555555555344b55555555 55555500554d005555005555555500555555555500454b34550055554553413455005555 00554d00555555555555555525555555555555555500554d005500555555550055555555 555500454b34550055455341345555005500554d005555555555555555344b5555555555 555555555555555500554d0055005555555500555555555555004b344055554b16355555 555555555555555555555525555555555555555500554d00550055555555005555555555 5500454b3455005545534035555500555500554d0055555555555555354b555555555555 55555500554d005500555555550055555555555500555555554535412a484855554b1634 5555555555555555555525555555555555555500554d0055005555555500555555555555 00454b3455005545534034555500555500554d0055555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555555555555555555555005555550055550055550255550e00000e555500 555550555555555555555555555555554b34555555555555555555555555555555555555 5555555555555500555548342f4855554b2f5555555555550053144b550055550e00000e 550055550055550255555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555548535502552f4855554b2f5555 555555550053144b5500550e00000e555500550055550255555555555555555555555555 5555555555555555555555555525555555555555555500555502552f4855554b2f555555 5555550055554834550055555055005555505500530e5055555555555555344b55555555 5555550055550255552f4855554b2f55555555550053144b550055550e00000e55005555 00555502555555555555555525555555555555555500555502552f4855554b2f55555555 55550053144b5500550e00000e55550055005555025555555555555555344b5555555555 555555555555555500555502552f4855554b2f5555555555550055550055485355025555 555555555555555555555525555555555555555500555502552f4855554b2f5555555555 550053144b5500550e00000e55550055550055550255555555555555344b555555555555 55555500555502552f4855554b2f555555555555005555505553024d480e555548535502 5555555555555555555525555555555555555500555502552f4855554b2f555555555555 0053144b5500550e00000e55550055550055550255555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545411545454005454330200 175154545454540004545454040054545411335454545400545411545454005454544900 4b5555555555555555555555555555480034554d00254800555548552a2a554b02344800 000e2f555555555555555555555555554b34555555555555555555555555555555555555 5555555555554d0002344155532f48483453555555555548005534554800532a2a554b02 2f0025480055554855555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555554102023455532f484834535555 555555480055345548002a2a554b02354d00100055554855555555555555555555555555 555555555555555555555555552555555555555555480055554855532f48483453555555 55554d00023441554800000e2f4800000e2f4d00504d1450555555555555354b55555555 555548005555485555532f484834535555555548005534554800532a2a554b022f002548 0055554855555555555555552555555555555555480055554855532f4848345355555555 55480055345548002a2a554b02344d0010005555485555555555555555344b5555555555 55555555555555480055554855532f4848345355555555554d00022f3455410202345555 55555555555555555555551655555555555555480055554855532f484834535555555555 480055345548002a2a554b02344d0025480055554855555555555555344b555555555555 5555480055554855532f4848345355555555554800000e2f555535555535555541020235 555555555555555555552555555555555555480055554855532f48483453555555555548 0055345548002a2a554b02344d0025480055554855555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454004654540054332e5154 3328545454545400174c544c170054544c33175454545400545400465454005454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555555555555555555450e505555000000004b555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555450e50555500 0000004b5555555555555555555555555555555555555555555555555555555555555555 5555555555555555555555555525555555555555555555555555555555450e5055550000 00004b555555555555555555555555555555555555555555555555555555344b55555555 5555555555555555555555450e5055000000004b55555555555555555555555555555555 55555555555555555555555525555555555555555555555555555555450e505555000000 004b555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555450e505555000000004b5555555555555555555555 555555555555555555555525555555555555555555555555555555450e50555500000000 4b555555555555555555555555555555555555555555555555555555344b555555555555 55555555555555555555450e505555000000004b55555555555555555555555555555555 5555555555555555555525555555555555555555555555555555450e505555000000004b 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400494654005402515454 5400545454545400432e5433430054543351364c54545400545400494654005454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555555550454b555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 5555555555555555555555555555555555555555555555555555555555555550454b5555 555555555555555555555555555555555555555555555555555555555555555555555555 55555555555555555555555555255555555555555555555555555555555550454b555555 555555555555555555555555555555555555555555555555555555555555344b55555555 55555555555555555555555550454b555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555550454b55555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 5555555555555555555555555555555550454b5555555555555555555555555555555555 5555555555555555555555255555555555555555555555555555555550454b5555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555550454b55555555555555555555555555555555555555555555 55555555555555555555255555555555555555555555555555555550454b555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400542851005400515454 540054545454540054115411540054540200002e54545400545400542851005454544900 140000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454540054491700542e284e4e 330454545454540054333333540054463654541154545400545400544917005454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454005454170054512e0202 0428545454545400544e004e540054284e54543346545400545400545417005454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 544954545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555552a00000016555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 55555555555555555555555555555555555314252f554155555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555314 252f554155555555555555555555555555555555555555555555555555354b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555531425 2f554155555555555555555555555555555555555555555555555555354b555555555555 5555555555555555555555555555552a0000003441555555555555555555555555555555 55555555555555555555255555555555555555555555555555555555555555555314252f 555541555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545449000000001754545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555500555500555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555514505545550055555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555551450 5545550055555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555145055 45550055555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555550055554b00555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555514505545 555500555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b55555555555555555555555555555555555555555555005548002a0045555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 55555555555555555555555555555555552f0e4b5555002a004555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555552f0e 4b5555002a004555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 5555555555555555555555255555555555555555555555555555555555555555552f0e4b 5555002a004555555555555555555555555555555555555555555555344b555555555555 5555555555555555555555555555555500554855002a0044555555555555555555555555 55555555555555555555255555555555555555555555555555555555555555552f0e4b55 5555002a0045555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555500000000554d02555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 5555555555555555555555555555555555554b25345500554d0255555555555555555555 55555555555555555555555525555555555555555555555555555555555555555555554b 25355500554d0255555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555554b25 345500554d0255555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555550000005500554d02555555555555555555555555 5555555555555555555525555555555555555555555555555555555555555555554b2535 555500554d02555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555500555500555502555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555000055555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555550000555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555000055555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555548535502550055550255555555555555555555 555555555555555555555555255555555555555555555555555555555555555555554853 5502550055550255555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555550000555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555485255 02550055550255555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555550055555500555502555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555548535502 555500555502555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b55555555555555555555555555555555555555555548003455140e0245555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555541020234551303024555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555554102 023455140e024455555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555400202 3455140e024455555555555555555555555555555555555555555555344b555555555555 5555555555555555555555555555554800345555140e0245555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555541020234 5555140e0244555555555555555555555555555555555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555354b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333333200 313434343434343434343434343434343434343434343434343434343434343434343434 343434343434343434313434343434342a14343434343434343434343434313434343434 343434343434343434343434343434343434343434343434343434313434343134343434 343434343434343434343434343434343434343134343434343434343434343434003434 343434343434343431343434343434343434343434343434343434343434343434343434 343434343434343434343434343434343434343434343434343434343434343434343434 313434343434343434343434340833343434343434343434343434343434343434343434 343434343434343434343434343434343434343134343434343434343434142c34343434 343434343434313434343434343434343434343434343434343434343434343434343434 343434343434343434343434033434343434343434343434343434343134343434343434 3434343434343434343434343434343434343434343434343434343434142a3434343434 343434343434343434343434343434343434343434343434343434343434343434343434 343434343434343434343403343433343434343434343434343434343434343434343434 34343434343434343434343434343434343434313434343434343434142a343434343434 343434343434343134343434343434343434343434343434343434343434343434343434 343434343434343434340334343434343434343134343434343434343434313434343434 343434343434343434343434343434343434343434343434343434003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 414848484748484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848483527484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848004848 484848484848484848484848484848484848484848484848484848484848484748484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848481048484848484848484847484848484848484748484748 484848484848484848484848484848484848484848484848484848484848273548484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848104848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484827354848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484810484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848482a35484848484848 484848484848484848484848484848484848484848484848484848484848484748484848 484848484848484848481048484848484848484848484848484848484848484848484848 484848484848484848484848484848474848484848484848484848003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 5252525252525252525252525252525252525252525252525252525252522f4852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252155252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521552525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4852525252525252525252525252525227000000313100002c4f523100002c4f52524827 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525200105252101652523102022f525252525200024f52522c004b5252454552 160031001052521015525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252520010525213155252310202315252 52525200024f52522c004b52524544521600310010525210155252525252525252525252 525252525252525252525252521552525252525252001052521016523102023152525252 525200024f52522c004b5245445252160000105252101652525252525252314852525252 525200105252101652522f020231525252525200024f52522c004b525245445216003100 1052521015525252525252521e5452525252525200105252101652310202315252525252 5200024f52522c004b524545525216000010525210155252525252525231485252525252 5200115252131552522f02023152525252525200024f52522c004b524445525215000010 525210165252525252525216525252525252520010525213155231020231525252525252 00024f52522c004b52454552521500310010525210155252525252523148525252525252 00105252101652523102023152525252525200024f52522c004b52444552521600001052 521015525252525252541e52525252525252001052521016522f02023152525252525200 024f52522c004b5245445252160031001052521015525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200525248520052481052520052481052524d00 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525200445202525231315252313152525252520045524f1000525252312752 520052520045520252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525200455202525231315252313152 525252520045524f10005252523127525200525200455202525252525252525252525252 525252525252525252525252521552525252525252520045520252313152523131525252 5252520045524f1000525231275252520052004552025252525252525252314852525252 52525200455202525231315252313152525252520045524f140052525231275252005252 004552025252525252525252165252525252525252004552025231315252313152525252 52520045524f100052523127525252005200445202525252525252525231485252525252 52520045520252523131525231315252525252520045524f100052523127525252005200 455202525252525252525216525252525252525200455202523131525231315252525252 520045524f10005252312752525200525200455202525252525252523148525252525252 520044520252523131525231315252525252520045524f10005252312752525200520045 520252525252525252521652525252525252520045520252313152523131525252525252 0045524f1000525231275252520052520045520252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200524852520052412c52520052412c52525200 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252004b52005252024f52524f02525252525200355241410052524f410e4d 52005252004b520052525252525252525252525252525252525252525252525252005252 5252525252525252525252525252525252525252525252004b52005252024f52524f0252 5252525200355241410052524f41034d52005252004b5200525252525252525252525252 525252525252525252525252541e5252525252525252004b520052024f52524f02525252 525252003552414100524f41034d52520052004b52005252525252525252314852525252 525252004b52005252024f52524f02525252525200355241410052524f41034d52005252 004b52005252525252525252165252525252525252004b520052024f52524f0252525252 5252003552414100524f41034d52520052004b5200525252525252525231485252525252 5252004b52005252024f52524f02525252525252003552414100524f40034d5252005200 4b52005252525252525252165252525252525252004b520052024f52524f025252525252 52003552414100524f41034d5252005252004b5200525252525252523148525252525252 52004b52005252024f52524f02525252525252003552414100524f41034d52520052004b 52005252525252525252165252525252525252004b520052024f52524f02525252525252 003552414100524f41034d5252005252004b520052525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200000052520010415252520010415252525200 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525200524b00525200525252520052525252520041483152005252414f3531 5200525200524b0052525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525200524b00525200525252520052 525252520041483152005252414f35315200525200524b00525252525252525252525252 5252525252525252525252525216525252525252525200524b0052005252525200525252 52525200414831520052414f35315252005200524b005252525252525252314852525252 52525200524b00525200525252520052525252520041483152005252414f353152005252 00524b00525252525252525216525252525252525200524b005200525252520052525252 525200414831520052414f35315252005200524b00525252525252525231485252525252 525200524b00525200525252520052525252525200414831520052414f35315252005200 524b00525252525252525216525252525252525200524b00520052525252005252525252 5200414831520052414f3531525200525200524b00525252525252523148525252525252 5200524b00525200525252520052525252525200414831520052414f3531525200520052 4b00525252525252525216525252525252525200524b0052005252525200525252525252 00414831520052414f3531525200525200524b0052525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 48525252525252525252525252525252520052524b52004f034d5252004f034d52525200 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252520052520252522c455252482c5252525252004f10485200525203000003 520052520052520252525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252520052520252522c455252482c52 52525252004f1048520052520e0000035200525200525202525252525252525252525252 5252525252525252525252525216525252525252525200525202522c445252482c525252 525252004f10485200520e00000352520052005252025252525252525252314852525252 5252520052520252522c455252482c5252525252004f1048520052520300000352005252 00525202525252525252525216525252525252525200525202522c455252482c52525252 5252004f1048520052030000035252005200525202525252525252525231485252525252 52520052520252522c455252482c525252525252004f1048520052030000035252005200 525202525252525252525216525252525252525200525202522c455252482c5252525252 52004f104852005203000003525200525200525202525252525252523148525252525252 520052520252522c455252482c525252525252004f104852005203000003525200520052 5202525252525252525216525252525252525200525202522c455252482c525252525252 004f10485200520e00000352520052520052520252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545411545454 00545433020017515454545454540000022e545446110002435454545454545454544900 4852525252525252525252525252525245020000274b004d4b104d4b004d4b104d523100 485252525252525252525252525252524831525252525252525252525252525252525252 525252525252450052524552524f2c4545314f52525252450052315245004f2727524802 2a0016450052524552525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252450052524552524f2c4545314f52 525252450052315245004f27275248022a00164500525245525252525252525252525252 5252525252525252525252525216525252525252524500525245524f2c4545314f525252 5252450052315245002527524802314b0010005252455252525252525252314852525252 5252450052524452524f2c4545314f52525252450052315245004f27275248022a001645 00525245525252525252525216525252525252524400525245524f2c4545314f52525252 52450052315245002727524802314b001000525245525252525252525231485252525252 52450052524552524f2c4545314f5252525252450052315245002727524802314b001000 525245525252525252525216525252525252524500525245524f2c4545314f5252525252 450052315245002727524802314b00164500525245525252525252523148525252525252 450052524552524f2c4545314f5252525252450052315245002727524802314b00100052 5244525252525252525216525252525252524500525245524f2c4545314f525252525245 0052315245002527524802314b0016450052524552525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545400465454 0054332e51543328545454545454005454005454004e5451365454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525241034d520000000048525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 5252525252525252525252525252525252525252525252525252525252525241034d5200 000000485252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525241034d52520000 000048525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525241034d52000000004852525252525252525252525252525252 5252525252525252525252521652525252525252525252525252525241034d5252000000 004852525252525252525252525252525252525252525252525252525231485252525252 5252525252525252525241034d5200000000485252525252525252525252525252525252 52525252525252525252521552525252525252525252525252525241034d525200000000 485252525252525252525252525252525252525252525252525252523148525252525252 52525252525252525241034d520000000048525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525241034d52520000000048 525252525252525252525252525252525252525252525252525252003155555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545400494654 005402515454540054545454545400001133545436042e46515454545454545454544900 414848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848483527484848484848484848484848484848484848 48484848484848484848484848484848442f354848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848004848 4848484848484848484848484848484848484848484848484848484848484848442f3548 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848481048484848484848484848484848484848442f35484848 4848484848484848484848484848484848484848484848484848484848482a3548484848 484848484848484848484848442f35484848484848484848484848484848484848484848 4848484848484848484848481048484848484848484848484848484848442f3548484848 48484848484848484848484848484848484848484848484848484848482a354848484848 4848484848484848484848442f3548484848484848484848484848484848484848484848 48484848484848484848481048484848484848484848484848484848442f354848484848 484848484848484848484848484848484848484848484848484848482a35484848484848 48484848484848484848442f354848484848484848484848484848484848484848484848 484848484848484848481048484848484848484848484848484848442f35484848484848 484848484848484848484848484848484848484848484848484848003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545400542851 005400515454540054545454545400545128545454544c33175454545454545454544900 2f3131312f31313131313131313131313131313131313131312f31313131313131313131 313131313131313131313131313131312a14313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131003131 31313131313131313131312f313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 3131312f3131313131313131310331313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131142731313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131033131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313114273131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313103313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131311427313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131310331313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545400544917 00542e284e4e330454545454545400545400545436515454005454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545400545417 0054512e020204285454545454540000002e54544e0400042e5454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525252525252524f10162c525235525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 52525252525252525252525252165252525252525252525252525252525252525252524f 10162c523552525252525252525252525252525252525252525252525252314852525252 52525252525252525252525252525252524f10162c523552525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252524f10 162c52355252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252524f10162c52355252525252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252524f1016 2c5235525252525252525252525252525252525252525252525252523148525252525252 5252525252525252525252525252524f10162c5235525252525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252524f10162c 525235525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454544954545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252525252525252104d5241525200525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525210 4d5241520052525252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252104d5241520052525252525252525252525252 52525252525252525252525216525252525252525252525252525252525252525252104d 524152005252525252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525252104d524152005252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252104d52 415200525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252144d52415200525252525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252104d5241 525200525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545449000000001754545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525252525252522c034852525200270041525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 52525252525252525252525252165252525252525252525252525252525252525252522c 034852520027004152525252525252525252525252525252525252525252314852525252 52525252525252525252525252525252522c034852520027004152525252525252525252 525252525252525252525252165252525252525252525252525252525252525252522c03 485252002700415252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252522c03485252002700415252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252522c0348 525200270041525252525252525252525252525252525252525252523148525252525252 5252525252525252525252525252522c0348525200270041525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252522c034852 525200270041525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252481631525200524b02525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 4816315200524b0252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252524816315200524b0252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525248 16315200524b025252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525252524816315200524b025252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252524816 315200524b02525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252524816315200524b02525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252481631 525200524b02525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252520000525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252525252525252454f5202525200525202525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525245 4f5202520052520252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252454f5202520052520252525252525252525252 52525252525252525252525216525252525252525252525252525252525252525252454f 520252005252025252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525252454f520252005252025252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252454f52 025200525202525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252454f52025200525202525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252454f5202 525200525202525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525235020231525210030241525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525235 020231521003024152525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525235020231521003024152525252525252525252 525252525252525252525252165252525252525252525252525252525252525252523502 023152100302415252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252523502023152100302415252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252350202 315210030241525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252350202315210030241525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525235020231 525210030241525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00040000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000001100 100000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555354b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555001455551425553402023555555555555500025355552f004d55554848 552500340014555514255555555555554b34555555555555555555555555555555555555 555555555555340000254d55553402023455555555555500025355552f004d5555484855 250035001455551425555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555555314252f555535020235555555 55555500025355552f004d55484855552500001455551425555555555555555555555555 555555555555555555555555552555555555555555001455551425553402023555555555 5555340000254d552a004855552a004855553400002f5355555555555555344b55555555 5555001455551425553402023455555555555525025055252f55554848553400002f5355 531402455555555555555555255555555555555555555555555555552a00000034553400 002f533400002f53555555345555555555555555555555555555555555344b5555555555 5555555555555500145555142555340202345555555555552500000e48555314252f5555 555555555555555555555525555555555555001455551425555534020234555555555555 25025055252f55484855553400002f5355554b2a5555555555555555344b555555555555 00145555142555553402023455555555555500025355552f004d55484855552500001455 551425555555555555552555555555555555001455551425553502023455555555555500 025355552f004d5548485555250034001455551425555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555550048550255343455553434555555555555004855531400555555352a 555500555500485502555555555555554b34555555555555555555555555555555555555 55555555555555005553254b353455553434555555555555004855531400555555342a55 550055550048550255555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555514505545553434555534345555 555555550048555314005555342a55555500550048550255555555555555555555555555 555555555555555555555555552555555555555555550048550255343455553434555555 555555005553254b550055555555005555555500554b1455555555555555344b55555555 55555500485502553435555535345555555555530e555545535555342a555500554b1455 45554802555555555555555525555555555555555555555555555555550055554b555500 554b145500554b14555553005555555555555555555555555555555555354b5555555555 5555555555555555004855025534345555353455555555555500554b0255145055455555 555555555555555555555525555555555555550048550255553435555535345555555555 530e5555455355352a55555500554b14555550005555555555555555344b555555555555 550048550255553434555534345555555555550048555314005555342a55555500550048 550255555555555555552555555555555555550048550255343455553534555555555555 0048555314005555342a5555550055550048550255555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b55555555555555004d550055025355555302555555555555004155454500555553450e 5055005555004d5500555555555555554b35555555555555555555555555555555555555 55555555555555005555500e025355555302555555555555004155454500555553450e50 55005555004d550055555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555552f0e4b55550253555553025555 555555550041554545005553450e5055550055004d550055555555555555555555555555 55555555555555555555555555255555555555555555004d550055025355555302555555 555555005555500e55005555555500555555550055452f55555555555555344b55555555 555555004d55005502535555530255555555555534455541555553450e50550055452f55 555553255555555555555555255555555555555555555555555555555500554b55555500 55452f550055452f555350005555555555555555555555555555555555344b5555555555 5555555555555555004d55005502535555530255555555555500554834552f0e4b555555 55555555555555555555552555555555555555004d550055550253555553025555555555 55344555415553450e5055550055452f555555005555555555555555344b555555555555 55004d550055550253555553025555555555550041554545005553450e5055550055004d 55005555555555555555255555555555555555004d550055025355555302555555555555 0041554545005553450e505555005555004d550055555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555500554d005500555555550055555555555500454b3455005555455340 345500555500554d00555555555555554b34555555555555555555555555555555555555 55555555555555005555550000555555550055555555555500454b345500555545534035 5500555500554d0055555555555555555555555555555555555555555555555555005555 5555555555555555555555555555555555555555555555554b2534550055555555005555 5555555500454b34550055455341345555005500554d0055555555555555555555555555 5555555555555555555555555525555555555555555500554d0055005555555500555555 555555005555550055005555555500555555550014455555555555555555344b55555555 55555500554d0055005555555500555555555555530e4d4b555545534134550014455555 555548535555555555555555255555555555555555555555555555555500000055555500 1445555500144555554b55005555555555555555555555555555555555344b5555555555 555555555555555500554d0055005555555500555555555555004b344155554b16345555 5555555555555555555555255555555555555500554d0055550055555555005555555555 55530e4d4b5544534134555500144555555555005555555555555555344b555555555555 5500554d00555500555555550055555555555500454b3455005545534134555500550055 4d00555555555555555525555555555555555500554d0055005555555500555555555555 00454b3455005545534134555500555500554d0055555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545411545454005454330200 175154545454545454000000285154540054545454005454545400000004335454544900 4b5555555555555500555502552f4855554b2f5555555555550053144b550055550e0000 035500555500555502555555555555554b34555555555555555555555555555555555555 5555555555555500555548352f4855554b2f5555555555550053144b550055550e00000e 550055550055550255555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555548535502552f4855554b2f5555 555555550053144b5500550e00000e555500550055550255555555555555555555555555 5555555555555555555555555525555555555555555500555502552f4855554b2f555555 5555550055554834550055555055005555505500530e5055555555555555344b55555555 55555500555502552f4855554b2f555555555555552f2f5555550e0000035500530e5055 55555353555555555555555525555555555555555555555555555555550055554d555500 530e505500530e50550000000055555555555555555555555555555555344b5555555555 555555555555555500555502552f4855554b2f5555555555550055550055485355025555 555555555555555555555525555555555555550055550255552f4855554b2f5555555555 55552f2f55550e00000e555500530e50555555005555555555555555344b555555555555 550055550255552f4855554b2f5555555555550053144b5500550e00000e555500550055 5502555555555555555525555555555555555500555502552f4855554b2f555555555555 0053144b5500550300000e55550055550055550255555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454004654540054332e5154 3328545454545454540054512e2854540054545454005454545400545451005454544900 4b555555555555480055554855532f48483453555555555548005534554800532a2a554b 022f00254800555548555555555555554b34555555555555555555555555555555555555 5555555555554d0002344055532f48483453555555555548005534554800532a2a554b02 2f0025480055554855555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555554002023455532f484834535555 555555480055345548002a2a554b02344d00100055554855555555555555555555555555 555555555555555555555555552555555555555555480055554855532f48483553555555 55554d00023441554800000e2f4800000e2f4d00504d1450555555555555344b55555555 5555480055554855532f4848345355555555555555504555552a2a554b022f00504d1450 45000025555555555555555525555555555555555555555555555555480200002a554d00 504d144a00504d14505555005555555555555555555555555555555555344b5555555555 55555555555555480055554855532f4848345355555555554d00022f3455410202345555 55555555555555555555552555555555555548005555485555532f484834535555555555 55555045552a2a554b02354d00504d14505534004b55555555555555344b555555555555 48005555485555532f48483453555555555548005534554800272a554b02344d00100055 554855555555555555552555555555555555480055554855532f48483453555555555548 0055345548002a2a554b02344d0025480055554855555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400494654005402515454 540054545454545454005454510054540054545454005454545400545446435454544900 4b5555555555555555555555555555450e505555000000004b5555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 5555555555555555555555555555440e505555000000004b555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555440e50555500 0000004b5555555555555555555555555555555555555555555555555555555555555555 5555555555555555555555555525555555555555555555555555555555450e5055550000 00004b555555555555555555555555555555555555555555555555555555344b55555555 55555555555555555555450e505555000000004b55555555555555555555555555555555 555555555555555555555555165555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555450e505555000000004b5555555555555555555555 555555555555555555555516555555555555555555555555555555440e5055000000004b 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555450e5055000000004b555555555555555555555555555555555555 5555555555555555555525555555555555555555555555555555450e505555000000004b 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400542851005400515454 540054545454545454005454540054540054545454005454545400000000335454544900 343534343435343434343435343434342f272c3434343434353435343435343434353434 343434343435343435343434343435342c15343434343434343434343435343434343434 34343534343434353434343434353431272a343534343435343435343434353434353434 343534343534343434343434343435343434343534343434343534343434343434003434 3434353434343435343434343435343434343434343434353434353434353431272a3435 343435343434353434353434343434353435343434343534343534343434343434343435 343434343435343434343434340e3435343434343435343434353434343431252a353434 343534343435343434353434343534343435343434353434343534343435142c34353434 353434343434343534343431272a34343435343434353434343434343434353434343534 343435343534343434343434033534343434343534343434343434353435343434353434 3534343434343435343435343435343435343434343434343435343435142c3434343534 343434353434343435343434343434352f272a3534343434343435343435343434353434 35343434343434353434340e343434353435343434353434343434352f272a3534343534 34353434353434343434343434353434343434353434343434343435142c343434353434 343434353434343534342f272a3534343435343434353434343534343435343434353434 343534343434343434340e343435343434353434343434353435342f252c343434343534 343434353435343434343534343534343434343534343435343434003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454540054491700542e284e4e 330454545454545454005454332e54540054545454005454545400545433175454544900 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b412c4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b004b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454005454170054512e0202 0428545454545454540000002e5154540000003354000000335400545443285454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555165555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 544954545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555314252f55555353534b004b55554d5555555555555540 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555555555555314162f5555535355534b004b554d55555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555531425 2f5555535355534b004b554d555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555553 14162f554155555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555531416315555555353534b004b554d555555555555555555 555555555555555555555525555555555555555555555555555555555555555555531425 2f555555555555555555555555555555555555555555555555555555344b555555555555 5555555555555314252f5555535355534b004b554d555555555555555541555555555555 5555555555555555555525555555555555555555555555555314163155555353534b004b 554d55555555555555554155555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545449000000001754545454545454545454545454545454545454545454545454544900 4b55555555555555555555555514505545555341551455004b5555553453555555555500 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555555555551450554555534155551455004b555534535555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555145055 4555534155551455004b5555345355555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555514 505545550055555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 5555555555555555555555551450554555555341551455004b5555345355555555555555 555555555555555555555516555555555555555555555555555555555555555555145055 45555555555555555555555555555555555555555555555555555555354b555555555555 5555555555551450554555534155551455004b5555345355555555555500555555555555 55555555555555555555165555555555555555555555555514505545555341551455004b 555534535555555555550055555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555552f0e4b5555344d55412500535555554534343400415500 2a0045555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555555555552f0e4b5555344d555541160053555545345555 555555555555555555555555555555555555555555555555555555555555555555005555 5555555555555555555555555555555555555555555555555555555555555555552f0e4b 5555344d5555402500535555453455555555555555555555555555555555555555555555 55555555555555555555555555255555555555555555555555555555555555555555552f 0e4b5555002a004555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 5555555555555555555555552f0e4b555555344d55411600535555453455555555555555 5555555555555555555555255555555555555555555555555555555555555555552f0e4b 5555551634532f555555555555555555555555555555555555555555344b555555555555 5555555555552f0e4b5555344d55554125005355554534353400415555002a0045555555 5555555555555555555525555555555555555555555555552f0e4b5555344d5541160053 55554534553434004155002a004555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555555555555555554b253455025555555000355555555300005550025500 554d02555555555555555555555555554b34555555555555555555555555555555555555 5555555555555555555555555555555555554b2534550255555555500034555553005555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555554b25 345502555555555000355555530055555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 4b25345500554d0255555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555554b16355555025555555200345555530055555555555555 555555555555555555555525555555555555555555555555555555555555555555554b25 345555532a534b555555555555555555555555555555555555555555344b555555555555 555555555555554b16355502555555555000345555530000555002555500554d02555555 555555555555555555552555555555555555555555555555554b25345502555555520034 5555530055005550025500554d0255555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555555555555555485355025502555550550000555555550e005555025500 555502555555555555555555555555554b34555555555555555555555555555555555555 5555555555555555555555555555555555485355025502555555505500005555550e5555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555485355 025502555555505500005555550e55555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555548 535502550055550255555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555500005555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555000055555555555555555555555555555555555555555555555555354b5555555555 555555555555555555555555485355025555025555505500005555550e55555555555555 555555555555555555555525555555555555555555555555555555555555555555485355 025555552f4155555555555555555555555555555555555555555555344b555555555555 555555555555485355025502555555505500005555550e00555502555500555502555555 55555555555555555555255555555555555555555555555548535502550255554f550000 5555550e5500555502550055550255555555555555555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555554102023555344855404800415555554548000e02455514 0e0245555555555555555555555555554b35555555555555555555555555555555555555 555555555555555555555555555555555541020234553548555541480041555545485555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555410202 345535485555414800415555454855555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555541 02023555140e024555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555410202345555354855414800415555454855555555555555 555555555555555555555525555555555555555555555555555555555555555555410202 34555555504155555555555555555555555555555555555555555555354b555555555555 5555555555554002023455344855554148004155554548000e02455555140e0245555555 555555555555555555552555555555555555555555555555400202355534485541480041 5555454855000e024555140e024555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555554555555516555555534555005555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555555555555555555555555555555555555545555555552555555345555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555455555555525555553455555555555555555555555555555555555555555555555 555555555555555555555555551655555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555554555555516555553455555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555455555555525555553455500555555555555555555555555 555555555555555555551655555555555555555555555555555555555555455555552555 555345555500555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b474b4b4b4b4b4b474b4b00004b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b412a4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b474b4b4b4b4b4b474b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b004b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b474b4b4b4b4b4b474b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b474b4b4b4b4b474b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b474b4b4b4b4b4b474b4b00004b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b474b4b4b4b 4b474b4b4b00004b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333333200 2f3131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131312714313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131003131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131310331313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131142731313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131033131313131313131313131313131313131313131313131 3131313131313131313131313131313131313131313131313131313131142a3131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313103313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131311427313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131310331313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 48525252525252001052521016523102023152525252525200024f52522c004b52524545 521600310010525210165252525252524831525252525252525252525252525252525252 525252525252310000164b52523102023152525252525200024f52522c004b5252454552 160031001052521016525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252524f10162c525231020231525252 52525200024f52522c004b52454552521600001052521016525252525252525252525252 525252525252525252525252521652525252525252001052521016523102023152525252 5252310000164b52270045525227004552523100002c4f52525252525252314852525252 5252001052521016525231020231525252525200024f52522c004b525245455216003100 105252101652525252525252165252525252525200105252101652310202315252525252 5200024f52522c004b524545525216000010525210165252525252525231485252525252 5252525252525200105252101652310202315252525252521600000345524f10162c5252 525252525252525252525216525252525252001052521016525231020231525252525252 16024d52162c52454452523100002c4f5252482752525252525252523148525252525252 00105252101652523102023152525252525200024f52522c004b52454552521600001052 521016525252525252521652525252525252001052521016523102023152525252525200 024f52522c004b5245455252160031001052521016525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525200455202523131525231315252525252520045524f10005252523127 525200525200455202525252525252524831525252525252525252525252525252525252 5252525252525200524f16483131525231315252525252520045524f1000525252312752 520052520045520252525252525252525252525252525252525252525252525252005252 5252525252525252525252525252525252525252525252104d5241523131525231315252 525252520045524f10005252312752525200520045520252525252525252525252525252 525252525252525252525252521652525252525252520045520252313152523131525252 52525200524f164852005252525200525252520052481052525252525252314852525252 52525200455202525231315252313152525252520045524f140052525231275252005252 004552025252525252525252165252525252525252004552025231315252313152525252 52520045524f100052523127525252005200455202525252525252525231485252525252 525252525252525200455202523131525231315252525252520052480252104d52415252 525252525252525252525216525252525252520045520252523131525231315252525252 4f035252414f5231275252520052481052524d0052525252525252523148525252525252 520045520252523131525231315252525252520045524f10005252312752525200520045 520252525252525252521652525252525252520045520252313152523131525252525252 0045524f1000525231275252520052520045520252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4852525252525252004b520052024f52524f0252525252525200355241410052524f410e 4d52005252004b5200525252525252524831525252525252525252525252525252525252 525252525252520052524d03024f52524f0252525252525200355241410052524f41034d 52005252004b520052525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252522c03485252024f52524f025252 52525252003552414100524f41034d52520052004b520052525252525252525252525252 52525252525252525252525252165252525252525252004b520052024f52524f02525252 5252520052524d0352005252525200525252520052412c52525252525252314852525252 525252004b52005252024f52524f02525252525200355241410052524f410e4d52005252 004b52005252525252525252165252525252525252004b520052024f52524f0252525252 5252003552414100524f41034d52520052004b5200525252525252525231485252525252 5252525252525252004b520052024f52524f0252525252525200524531522c0348525252 52525252525252525252521652525252525252004b52005252024f52524f025252525252 5231415235524f41034d52520052412c5252520052525252525252523148525252525252 52004b52005252024f52524f02525252525252003552414100524f41034d52520052004b 52005252525252525252165252525252525252004b520052024f52524f02525252525252 003552414100524f41034d5252005252004b520052525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525200524b00520052525252005252525252520041483152005252414f35 315200525200524b00525252525252524831525252525252525252525252525252525252 5252525252525200525252000052525252005252525252520041483152005252414f3531 5200525200524b0052525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252481531520052525252005252 5252525200414831520052414f35315252005200524b0052525252525252525252525252 5252525252525252525252525216525252525252525200524b0052005252525200525252 525252005252520052005252525200525252520010415252525252525252314852525252 52525200524b00525200525252520052525252520041483152005252414f353152005252 00524b00525252525252525216525252525252525200524b005200525252520052525252 525200414831520052414f35315252005200524b00525252525252525231485252525252 525252525252525200524b00520052525252005252525252520048313552524816315252 5252525252525252525252165252525252525200524b0052520052525252005252525252 524f034b4852414f35315252001041525252520052525252525252523148525252525252 5200524b00525200525252520052525252525200414831520052414f3531525200520052 4b00525252525252525216525252525252525200524b0052005252525200525252525252 00414831520052414f3531525200525200524b0052525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525200525202522c455252482c525252525252004f104852005252030000 035200525200525202525252525252524831525252525252525252525252525252525252 5252525252525200525245312c455252482c525252525252004f10485200525203000003 520052520052520252525252525252525252525252525252525252525252525252005252 5252525252525252525252525252525252525252525252454f5202522c455252482c5252 52525252004f104852005203000003525200520052520252525252525252525252525252 5252525252525252525252525216525252525252525200525202522c455252482c525252 5252520052524531520052524d520052524d52004f034d52525252525252314852525252 5252520052520252522c455252482c5252525252004f1048520052520300000352005252 00525202525252525252525216525252525252525200525202522c455252482c52525252 5252004f10485200520e0000035252005200525202525252525252525231485252525252 525252525252525200525202522c455252482c5252525252520052520052454f52025252 525252525252525252525216525252525252520052520252522c455252482c5252525252 52522c2c5252030000035252004f034d5252520052525252525252523148525252525252 520052520252522c455252482c525252525252004f104852005203000003525200520052 5202525252525252525216525252525252525200525202522c455252482c525252525252 004f10485200520300000352520052520052520252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454541154545400545454330200 1751545454545433465454334c5454113354545454000000043354541754545454544900 485252525252524500525245524f2c4545314f5252525252450052315245004f27275248 022a00164500525245525252525252524831525252525252525252525252525252525252 5252525252524b00023135524f2c4545314f5252525252450052315245004f2727524802 2c0016450052524552525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525235020231524f2c4545314f5252 525252450052315245002727524802314b00100052524552525252525252525252525252 5252525252525252525252525216525252525252524500525245524f2c4545314f525252 52524b0002313552450000032c450000032c4b004d4b104d525252525252314852525252 5252450052524452524f2c4545314f52525252450052315245004f27275248022a001645 00525245525252525252525216525252525252524500525245524f2c4545314f52525252 52450052315245002727524802314b001000525245525252525252525231485252525252 525252525252524500525245524f2c4545314f52525252524b00022c3152350202315252 525252525252525252525216525252525252450052524452524f2c4545314f5252525252 52524d41522727524802314b004d4b104d52310048525252525252523148525252525252 450052524552524f2c4545314f5252525252450052315245002727524802314b00100052 5245525252525252525216525252525252524500525245524f2c4545314f525252525245 0052315245002727524802314b0016450052524552525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545400465454005454332e5154 332854545454544c2e54541754544c33175454545400545451004e2e0054545454544900 48525252525252525252525252525241034d525200000000485252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525241034d52520000000048525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252410e4d525200 000000485252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525241034d52520000 000048525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525241034d52000000004852525252525252525252525252525252 5252525252525252525252521652525252525252525252525252525241034d5252000000 004852525252525252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525241034d525200000000485252525252525252525252 52525252525252525252521652525252525252525252525252525241034d520000000048 525252525252525252525252525252525252525252525252525252523148525252525252 52525252525252525241034d520000000048525252525252525252525252525252525252 5252525252525252525216525252525252525252525252525252410e4d52520000000048 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540049465400545402515454 54005454545454541154463354543351364c545454005454464354540054545454544900 485252525252525252525252525252524d41485252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252524d4148525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252524d41485252 525252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525216525252525252525252525252525252524d4148525252 525252525252525252525252525252525252525252525252525252525252314852525252 5252525252525252525252524d4148525252525252525252525252525252525252525252 52525252525252525252525216525252525252525252525252525252524d414852525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252524d41485252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252524d41485252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252524d41485252525252525252525252525252525252525252525252 5252525252525252525216525252525252525252525252525252524d4148525252525252 525252525252525252525252525252525252525252525252525252003155555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540054285100545400515454 540054545454545436432e4c54540200002e545454000000003354540054545454544900 2a1616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616161616161003161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161616161616001616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616160216161616161616161616161616161616161616161616 1616161616161616161616161616161616161616161616161616161616160310151e1616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616021616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161603101616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161602161616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616160310161616161616 161616161616161616161616161616161616161616161616161616161616161616161616 16161616161616161616021616161616161616161616161616161e161616171616161616 161616161616161616161616161616161616161616161616161616003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454005449170054542e284e4e 33045454545454544e281754544636545411545454005454331754540054545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852545252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525254525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545400545417005454512e0202 04285454545454545428365454284e545433465454005454432854540054545454544900 485252525252525252525252525252525227000000315252525252525252525235525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252270000003152525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 522700000031525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 270000003152525252525252525252525252525252525252525252525252314852525252 525252525252525252525252270000003152525252525252525252355252525252525252 525252525252525252525252165252525252525252525252525252525227000000315252 525252525252525235525252525252525252525252525252525252525231485252525252 525252525252525252525252525252522700000031525252525252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252524f1016 2c5252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252522700000031525252525252525252523552525252525252525252 525252525252525252521652525252525252525252525252525252270000003152525252 525252525252523552525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 544954545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252005252485252525252525252525200525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252520052524852525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525200525248525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 520052524852525252525252525252525252525252525252525252525252314852525252 525252525252525252525252520052524852525252525252525252005252525252525252 525252525252525252525252165252525252525252525252525252525252005252485252 525252525252525200525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525200525248525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252104d52 415252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525200525248525252525252525252520052525252525252525252 525252525252525252521652525252525252525252525252525252520052524852525252 525252525252520052525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545449000000001754545454545454545454545454545454545454545454545454544900 48525252525252525252525252525252525200524516314f2c5231310035525200270041 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252525252525252525200524516314f2c52525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 52520052455216314f2c5252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 5200524516314f2c52525252525252525252525252525252525252525252314852525252 525252525252525252525252520052455216314f2c523131003552002700415252525252 525252525252525252525252165252525252525252525252525252525252005245521631 4f2c52313100355200270041525252525252525252525252525252525231485252525252 52525252525252525252525252525252520052455216314f2c5252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252522c0348 52525216314f2c5252525252525252525252525252525252525252523148525252525252 52525252525252525252520052455216314f2c5231310035520027004152525252525252 525252525252525252521652525252525252525252525252525252520052455216314f2c 523131003552520027004152525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4852525252525252525252525252525252520000004f274f485200524d02525200524b02 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252520000004f274f4852525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 5252000000524f274f485252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 520000004f274f4852525252525252525252525252525252525252525252314852525252 52525252525252525252525252000000524f274f485200524d025200524b025252525252 525252525252525252525252165252525252525252525252525252525252000000524f27 4f485200524d025200524b02525252525252525252525252525252525231485252525252 5252525252525252525252525252525252000000524f274f485252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252524816 3152524f274f485252525252525252525252525252525252525252523148525252525252 5252525252525252525252000000524f274f485200524d025200524b0252525252525252 52525252525252525252165252525252525252525252525252525252000000524f274f48 5200524d02525200524b0252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252005252522c35525200525202525200525202 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525252525252525252005252522c355252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525200525252522c35525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 52005252522c355252525252525252525252525252525252525252525252314852525252 5252525252525252525252525200525252522c3552520052520252005252025252525252 52525252525252525252525216525252525252525252525252525252525200525252522c 355252005252025200525202525252525252525252525252525252525231485252525252 525252525252525252525252525252525200525252522c35525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252454f52 025252522c35525252525252525252525252525252525252525252523148525252525252 525252525252525252525200525252522c35525200525202520052520252525252525252 5252525252525252525216525252525252525252525252525252525200525252522c3552 520052520252520052520252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525245003152524d35525200030241525210030241 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525252525252525245003152524d355252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 524500315252524d35525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 45003152524d355252525252525252525252525252525252525252525252314852525252 5252525252525252525252524500315252524d355252000e024152100302415252525252 52525252525252525252525216525252525252525252525252525252524500315252524d 355252000302415210030241525252525252525252525252525252525231485252525252 525252525252525252525252525252524500315252524d35525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252350202 315252524d35525252525252525252525252525252525252525252523148525252525252 525252525252525252524500315252524d35525200030241521003024152525252525252 5252525252525252525216525252525252525252525252525252524500315252524d3552 520003024152521003024152525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525200525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252520052525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252005252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525200525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 520052525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525200005252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252520000525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252000052525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525452525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525200005252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 520000525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00151717171717171717171717171717 171717171717171717171717171717171717171717171717171717171717171717172b00 271616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616161616161003161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161616161616001616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616160216161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161616031016161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161616021616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161603101616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161e16161602161616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616160310161616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616160216161616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b412a4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b004b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555552a000000343400002f53553400002f5355555534 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555001455551425553402023455555555555525025055252f55554848553400 002f53555314024555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555550014555514255534020235555555 55555525025055162f55554848553400002f535553140245555555555555555555555555 55555555555555555555555555255555555555555555555555555555552a000000345534 00002f533400002f53555555345555555555555555555555555555555555344b55555555 55555555555555555555552a000000343400002f53553400002f53555555345555555555 555555555555555555555555255555555555555500145555142555340202345555555555 5500025355552f004d5548485555250000145555142555555555555555344b5555555555 5500145555142555340202345555555555552502505516315555484855553400002f5353 140245555555555555555525555555555555001455551425555534020235555555555555 25025055162f55484855553400002f53555314024555555555555555354b555555555555 001455551425553402023455555555555516025055252f5555484855553400002f535314 024555555555555555552555555555555555001455551425553402023455555555555525 025055252f55484855553400002f53555314024555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555555555555555555555550055554b5500554b14555500554b1455555300 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555500485502553434555534355555555555530e555545535555342a555500 554b14554555480255555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555500485502553434555534345555 555555530e555545535555342a555500554b145545554802555555555555555555555555 5555555555555555555555555525555555555555555555555555555555550055554b5555 00554b145500554b14555553005555555555555555555555555555555555344b55555555 5555555555555555555555550055554b5500554b14555500554b14555553005555555555 555555555555555555555555255555555555555555004855025534345555343555555555 55550048555314005555342a5555550055004855025555555555555555344b5555555555 555500485502553434555534345555555555530e555545535555342a55555500554b1445 554802555555555555555525555555555555550048550255553434555534345555555555 530e5555455355342a55555500554b14554555480255555555555555344b555555555555 5500485502553434555534355555555555530e555545535555342a55555500554b144555 480255555555555555552555555555555555550048550255343455553435555555555553 0e5555455355342a55555500554b14554555480255555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555500554b55550055452f55550055452f55535000 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555004d55005502535555530255555555555534455541555553450e505500 55452f555555532555555555555555555555555555555555555555555555555555005555 5555555555555555555555555555555555555555555555004d5500550253555553025555 5555555534455541555553450e50550055452f5555555325555555555555555555555555 55555555555555555555555555255555555555555555555555555555555500554b555555 0055452f550055452f555350005555555555555555555555555555555555344b55555555 55555555555555555555555500554b55550055452f55550055452f555350005555555555 555555555555555555555555255555555555555555004d55005502535555530255555555 55550041554545005553450e5055550055004d55005555555555555555344b5555555555 5555004d55005502535555530255555555555534455541555553450e5055550055452f55 55532555555555555555552555555555555555004d550055550253555553025555555555 55344555415553450e5055550055452f555555532555555555555555344b555555555555 55004d55005502535555530255555555555534455541555553450e5055550055452f5555 53255555555555555555255555555555555555004d550055025355555302555555555555 344555415553450e5055550055452f555555532555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b55555555555555555555555555555555000000555500144555555500144555554b5500 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555500554d0055005555555500555555555555530e4d4b5555455341345500 144555555555485355555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555500554d00550055555555005555 55555555530e4d4b55554553413455001445555555554853555555555555555555555555 555555555555555555555555552555555555555555555555555555555555000000555555 001445555500144555554b55005555555555555555555555555555555555344b55555555 555555555555555555555555000000555500144555555500144555554b55005555555555 55555555555555555555555516555555555555555500554d005500555555550055555555 555500454b34550055445341345555005500554d005555555555555555344b5555555555 555500554d0055005555555500555555555555530e4d4b55554553403455550014455555 5548535555555555555555255555555555555500554d0055550055555555005555555555 55530e4d4b5545534134555500144555555555485355555555555555344b555555555555 5500554d0055005555555500555555555555530e4d4b5555455341345555001445555555 4853555555555555555525555555555555555500554d0055005555555500555555555555 530e4d4b5545534034555500144555555555485355555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454541154545400545454330200 1751545454545433465454334c5454113354545454000000043351110033545454544900 4b555555555555555555555555555555550055554d5500530e50555500530e5055000000 005555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555500555502552f4855554b2f555555555555552f2f5555550e00000e5500 530e50555555535355555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555500555502552f4855554b2f5555 55555555552f2f5555550e0000035500530e505555555353555555555555555555555555 5555555555555555555555555525555555555555555555555555555555550055554d5555 00530e505500530e50550000000055555555555555555555555555555555344b55555555 5555555555555555555555550055554d5500530e50555500530e50550000000055555555 55555555555555555555555525555555555555555500555502552f4855554b2f55555555 55550053144b5500550e00000e55550055005555025555555555555555344b5555555555 555500555502552f4855554b2f555555555555552f2f5555550e00000e555500530e5055 555353555555555555555525555555555555550055550255552f4855554b2f5555555555 55552f2f55550e00000e555500530e50555555535355555555555555344b555555555555 5500555502552f4855554b2f555555555555552f2f5555550e00000e555500530e505555 5353555555555555555525555555555555555500555502552f4855554b2f555555555555 552f2f55550e00000e555500530e50555555535355555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545400465454005454332e5154 332854545454544c2e54541754544c331754545454005454510033465100545454544900 4b555555555555555555555555555555480200002a4d00504d14504d00504d1450555500 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555480055554855532f4848345355555555555555504555552a2a554b022c00 504d14504500001655555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555480055554855532f484834535555 5555555555504555552a2a554b022f00504d145045000025555555555555555555555555 5555555555555555555555555525555555555555555555555555555555480200002a554d 00504d144a00504d14505555005555555555555555555555555555555555344b55555555 5555555555555555555555480200002a4d00504d14504d00504d14505555005555555555 5555555555555555555555552555555555555555480055554855532f4848345355555555 55480055345548002a2a554b02354d0010005555485555555555555555354b5555555555 55480055554855532f4848355355555555555555504555552a2a554b02354d00504d1440 00001655555555555555552555555555555548005555485555532f484834535555555555 55555045552a2a554b02344d00504d14504500002555555555555555344b555555555555 480055554855532f4848355355555555555555504555552a2a554b02344d00504d144000 002555555555555555552555555555555555480055554855532f48483453555555555555 555045552a2a554b02354d00504d14504500001655555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540049465400545402515454 54005454545454541154463354543351364c545454005454464354545102545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555555555555555555450e505555000000004b555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555450e50555500 0000004b5555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 55555555555555555555555525555555555555555555555555555555450e505555000000 004b555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555450e505555000000004b5555555555555555555555555555555555 555555555555555555555516555555555555555555555555555555450e5055000000004b 55555555555555555555555555555555555555555555555555555555344b555555555555 5555555555555555450e505555000000004b555555555555555555555555555555555555 5555555555555555555525555555555555555555555555555555450e5055000000004b55 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540054285100545400515454 540054545454545436432e4c54540200002e54545400000000335446174c545454544900 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b412c4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4734414b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b004b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4834414b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4734404b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4831414b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4831414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4734414b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4734414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454005449170054542e284e4e 33045454545454544e281754544636545411545454005454331746335454545454544900 343434343434343534343534343434343435343434353434353434343534343434343435 343434353434353434343434343534342c14343434343434343435343434353435343434 343434343534343434343435343434343434353435343435343434343435343434343534 343534343434353435343434343434343534343434343435343434353434353434003434 343534343434343534343434343435343434353435343434343434343434343434343434 343534343434353435343434343434343435343434343434343435343434353434343435 343434343434353434343534340334343435343435343434343434353434343534343534 353434343434343434343435343434343434353434343534343435343435142c34343435 343435343434343434343534343434353434353434353434343534343434343534343534 353434343434343534343534033435343534343434343434353434343534343434343534 3534343435343434343434343434343434343435343534343434353434152a3434353434 353434343435343434343434343435343534343534343434343534343434343534343435 34343534343434343435350e353434343434343534343435343435343534343434343534 35343434343534343434343534343534343534343435343434343435152c343434353434 343534343434353434343434343435343434343534343435343534343435343434343534 343534343434353435340e34343434343534343434343435343434343434353435343435 343434343435343434343534343534343434353434343435343435003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545400545417005454512e0202 04285454545454545428365454284e545433465454005454432828000000545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555165555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555354b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555354b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555551655555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 544954545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555555555555555555314162f555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 5314162f5555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 55555555555555555555555525555555555555555555555555555555555555555555552a 0000003555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555314162f55555555555555555555555555555555 555555555555555555555516555555555555555555555555555555555555555555531425 2f555555555555555555555555555555555555555555555555555555344b555555555555 5555555555555555555555555555555314162f5555555555555555555555555555555555 55555555555555555555255555555555555555555555555555555555555555555314162f 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545449000000001754545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555555555555555555555555555555514505545555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 145055455555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 0055554b55555555555555555555555555555555555555555555555555354b5555555555 555555555555555555555555555555551450554555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555145055 45555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555145055455555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555514505545 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555555555555555552f0e4b5555552534532f555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 2f0e4b5555552534532f5555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 0055482534532f55555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555552f0e4b55552534532f5555555555555555555555 5555555555555555555555255555555555555555555555555555555555555555552f0e4b 5555551634532f555555555555555555555555555555555555555555354b555555555555 5555555555555555555555555555552f0e4b55552535532f555555555555555555555555 55555555555555555555255555555555555555555555555555555555555555552f0e4b55 55552534532f555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555555555555555555555555555555554b25355555532a534b555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 554b25345555532a534b5555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 000000532a534b55555555555555555555555555555555555555555555344b5555555555 55555555555555555555555555555555554b253555532a534b5555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555554b25 345555532a534b555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555554b253455532a534b555555555555555555555555 5555555555555555555525555555555555555555555555555555555555555555554b2535 5555532a534b555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555550000555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555555555555555555555555555555485355025555552f4155555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 485355025555552f41555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555000055555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555500005555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 005555552f415555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555554853550255552f40555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555485355 025555552f4155555555555555555555555555555555555555555555344b555555555555 5555555555555555555555555555554853550255552f4055555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555548535502 5555552f4155555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555540020234555555504155555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 410202345555555040555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555548 0034555550415555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555554002023455555041555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555410202 35555555504155555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555410202345555504155555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555540020234 555555504155555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555354b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00040000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000001455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333332900 273131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131312c1d242424242424242424242424242424242424 242424242424242424242424242424242424242424242424241924242424242424242424 242424242424242424242424242424242424242423242424242424242424242424133131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131311631313131313131313131313131313131313131313131 3131313131313131313131313131313131313131313131313131313131312a2c31313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131163131313131313131313131313131313131313131313131 3131313131313131313131313131313131313131313131313131313131272c3131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313116313131313131313131313131313131313131313131313131 31313131313131313131313131313131313131313131313131313131272c313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131311631313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131002a55555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252545252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4852525252525252525252525252525227000000313100002c4f523100002c4f524f1002 4152525252525252525252525252525248243f3f00093f3f09183f240101243f3f3f3f3f 3f00013e3f3f21003c3f3f3a3a3f18002400093f3f09183f3f183f3f240000183c3f3f3f 240101243f3f3f3f3f00013e3f3f21003c3f3f3a3a3f18002400093f3f09183f3f005252 52525252525252525252525252525252525252525252310000164b525252310202315252 52525200024f52522c004b52524545521600310010525210165252525252525252525252 525252525252525252525252521652525252525252310000164b52523102023152525252 5252310000164b52270045525227004552523100002c4f52525252525252314852525252 5252310000164b5252523102023152525252525200024f52522c004b5245455252160000 1052521016525252525252521652525252525252310000164b5252310202315252525252 5200024f52522c004b524545525216003100105252101652525252525231485252525252 52525252525252310000154b5252310202315252525252521e00000345524f11162c5252 52525252525252525252521652525252525252310000154b525231020231525252525252 00024f52522c004b52454552521600310010525210165252525252523148525252525252 310000164b5252523102023152525252525200024f52522c004b52454552521600001052 5210165252525252525216525252525252523100001e4b52523102023152525252525200 024f52522c004b5252454552160031001052521016525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4852525252525252525252525252525252005252485200524810525200524810524b524b 0252525252525252525252525252525248243f3f3f003a3f013f24243f3f24243f3f3f3f 3f3f003a3f3e09003f3f3f241d3f3f003f3f003a3f013f3f3f183f3f3f003f3e183b3f24 243f3f24243f3f3f3f3f003a3f3e09003f3f3f241d3f3f003f3f003a3f013f3f3f005252 525252525252525252525252525252525252525252525200524f16485231315252313152 525252520045524f10005252523127525200525200455202525252525252525252525252 5252525252525252525252525216525252525252525200524f1648313152523131525252 52525200524f164852005252525200525252520052481052525252525252314852525252 52525200524f1648523131525231315252525252520045524f1000525231275252520052 00455202525252525252525216525252525252525200524f164831315252313152525252 52520045524f100052523127525252005252004552025252525252525231485252525252 525252525252525200524f16483131525231315252525252540052480252104d52405252 525252525252525252525216525252525252525200524f16483131525231315252525252 520045524f10005252312752525200525200455202525252525252523148525252525252 5200524f1548523131525231315252525252520045524f10005252312752525200520045 5202525252525252525216525252525252525200524f1548313152523131525252525252 0045524f1000525252312752520052520045520252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200524852520052412c52520052412c52525231 4d52525252525252525252525252525248243f3f3f003c3f003f013f3f3f3e013f3f3f3f 3f3f00373f3838003f3f3e38053d3f003f3f003c3f003f3f3f183f3f3f003f3f3d053f01 3e3f3f3e013f3f3f3f3f00373f3838003f3f3e38053d3f003f3f003c3f003f3f3f005252 52525252525252525252525252525252525252525252520052524d0352024f52524f0252 5252525200355241410052524f41034d52005252004b5200525252525252525252525252 525252525252525252525252521652525252525252520052524d03024f52524f02525252 5252520052524d0352005252525200525252520052412c52525252525252314852525252 5252520052524d0352024f52524f02525252525252003552414100524f410e4d52520052 004b520052525252525252521652525252525252520052524d03024f52524f0252525252 5252003552414100524f41034d5252005252004b52005252525252525231485252525252 52525252525252520052524d03024f52524f0252525252525200524531522c0348525252 52525252525252525252521652525252525252520052524d03024f52524f025252525252 52003552414100524f410e4d5252005252004b5200525252525252523148525252525252 520052524d0852024f52524f02525252525252003552414100524f41034d52520052004b 520052525252525252521652525252525252520052524d03024f52524f02525252525252 00355241410052524f41034d52005252004b520052525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200000052520010415252520010415252524b31 2752525252525252525252525252525248243f3f3f003f3c003f003f3f3f3f003f3f3f3f 3f3f00383b243f003f3f383e37243f003f3f003f3c003f3f3f183f3f3f003f3f3f003f00 3f3f3f3f003f3f3f3f3f00383b243f003f3f383e37243f003f3f003f3c003f3f3f005252 525252525252525252525252525252525252525252525200525252005200525252520052 525252520041483152005252414f35315200525200524b00525252525252525252525252 525252525252525252525252521652525252525252520052525200005252525200525252 525252005252520052005252525200525252520010415252525252525252314852525252 52525200525252005200525252520052525252525200414831520052414f353152520052 00524b005252525252525252165252525252525252005252520000525252520052525252 525200414831520052414f3531525200525200524b005252525252525231485252525252 525252525252525200525252000052525252005252525252520048313552524816315252 525252525252525252525216525252525252525200525252000052525252005252525252 5200414831520052414f3531525200525200524b00525252525252523148525252525252 5200525252005200525252520052525252525200414831520052414f3531525200520052 4b0052525252525252521652525252525252520052525200005252525200525252525252 0041483152005252414f35315200525200524b0052525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 48525252525252525252525252525252520052524b52004f034d5252004f034d52525252 0352525252525252525252525252525248243f3f3f003f3f013f213a3f3f3b1d3f3f3f3f 3f3f003e093b3f003f3f050000053f003f3f003f3f013f3f3f183f3f3f003f3f3a243f21 3a3f3f3b213f3f3f3f3f003f093b3f003f3f050000053f003f3f003f3f013f3f3f005252 52525252525252525252525252525252525252525252520052524531522c455252482c52 52525252004f104852005252030000035200525200525202525252525252525252525252 5252525252525252525252525216525252525252525200525245312c455252482c525252 5252520052524531520052524d520052524d52004f034d52525252525252314852525252 5252520052524531522c455252482c525252525252004f10485200520300000352520052 00525202525252525252525216525252525252525200525245312c455252482c52525252 5252004f1048520052030000035252005252005252025252525252525231485252525252 525252525252525200525245312c455252482c5252525252520052520052454f52025252 525252525252525252525216525252525252525200525245312c455252482c5252525252 52004f104852005203000003525200525200525202525252525252523148525252525252 520052524531522e455252482c525252525252004f104852005203000003525200520052 5202525252525252525216525252525252525200525245312c455252482c525252525252 004f1048520052520e000003520052520052520252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400000028515454330200 175154545454540004545454040054545411335454545400545411545454005454544900 4852525252525252525252525252525245020000274b004d4b104d4b004d4a104d401010 4b52525252525252525252525252525248243f3f3a003f3f3a3f3f1d3939243f3f3f3f3f 3f3a003f243f3a003e1c1d3f3b012000183a003f3f3a3f3f3f183f3f3c000124373f3f3e 213a3a243e3f3f3f3f3a003f243f3a003e1d1c3f3b012000183a003f3f3a3f3f3f005252 525252525252525252525252525252525252525252524b0002313552524f2c4545314f52 525252450052315245004f27275248022c00164500525245525252525252525252525252 5252525252525252525252525216525252525252524b00023135524f2c4545314f525252 52524b0002313552450000032c4500000e2c4b004d4b104d525252525252314852525252 52524b0002313552524f2c4545314f5252525252450052315245002727524802314b0010 00525245525252525252525216525252525252524b00023135524f2c4545314f52525252 52450052315245002727524802314b001645005252455252525252525231485252525252 525252525252524b00023135524f2c4545314f52525252524b00022c3152350202315252 525252525252525252525216525252525252524b00023135524f2c4545314f5252525252 450052315245002727524802314b00164500525245525252525252523148525252525252 4b0002313552524f2c4545314f5252525252450052315245002727524802314b00100052 5245525252525252525216525252525252524b00023135524f2c4545314f525252525245 0052315245004f27275248022a0016450052524552525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454540054512e2854332e5154 3328545454545400174c544c170054544c33175454545400545400465454005454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f38053d3f3f000000 003b3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f38053d3f000000003b3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 5252525252525252525252525252525252525252525252525252525252525241034d5200 000000485252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525241034d52520000 000048525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525241034d52000000004852525252525252525252525252525252 5252525252525252525252521652525252525252525252525252525241034d5252000000 004852525252525252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525241034d525200000000485252525252525252525252 52525252525252525252521652525252525252525252525252525241034d525200000000 485252525252525252525252525252525252525252525252525252523148525252525252 52525252525252525241034d525200000000485252525252525252525252525252525252 525252525252525252521652525252525252525252525252525241034d52520000000048 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400545451005402515454 5400545454545400432e5433430054543351364c54545400545400494654005454544900 404848484848484848484848484848484848484848484848484848484848484848484848 4848484848484848484848484848484840223b3b3b3b3b3b3b3b3b3b3b3923373b3b3b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b093b3b3b3b3b3b3b3b3b3b 3b3b3923373b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b004848 4848484848484848484848484848484848484848484848484848484848484848442f3548 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848481048484848484848484848484848484848442f35484848 484848484848484848484848484848484848484848484848484848484848273548484848 484848484848484848484848442f35484848484848484848484848484848484848484848 4848484848484848484848481048484848484848484848484848484848442f3548484848 48484848484848484848484848484848484848484848484848484848482a354848484848 48484848484848484848484848484848442f354848484848484848484848484848484848 48484848484848484848481048484848484848484848484848484848442f354848484848 484848484848484848484848484848484848484848484848484848482735484848484848 48484848484848484848442f354848484848484848484848484848484848484848484848 484848484848484848481048484848484848484848484848484848443035484848484848 484848484848484848484848484848484848484848484848484848003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400545454005400515454 540054545454540054115411540054540200002e54545400545400542851005454544900 2f3131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313130312f1d242424242424242424242424242424242424 242424242424242424242424242424242424242424242424240524242424242424242424 2424242424242424242424242424242424242424242424242424242424242424240f3131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131310331313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131142731313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131033131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313114273131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313103313131313131313131313131313131313131313131313131 31313131313131313131313131313131313131313131313131313131142a313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131310331313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454005454332e542e284e4e 330454545454540054333333540054463654541154545400545400544917005454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454540000002e5154512e0202 0428545454545400544e004e540054284e54543346545400545400545417005454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f0618223f3f373f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 4f10162c5252355252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 52525252525252525252525252525252524f10162c523552525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252524f10 162c52355252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252524f1016 2c5235525252525252525252525252525252525252525252525252523148525252525252 5252525252525252525252525252524f10162c5235525252525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252524f10162c 525235525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 544954545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f063d3f383f3f003f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 104d52415252005252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252104d5241520052525252525252525252525252 52525252525252525252525216525252525252525252525252525252525252525252104d 524152005252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252104d52 415200525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252104d52415200525252525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252104d5241 525200525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545449000000001754545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f20053b3f3f3f001d00383f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 2c0348525252002700415252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 52525252525252525252525252525252522c034852520027004152525252525252525252 525252525252525252525252165252525252525252525252525252525252525252522c03 485252002700415252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252522c0348 525200270041525252525252525252525252525252525252525252523148525252525252 5252525252525252525252525252522c0e48525200270041525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252522c034852 525200270041525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3b18233f3f003f3c013f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 52481631525200524b025252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252524816315200524b0252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525248 16315200524b025252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252524816 315200524b02525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252524816315200524b02525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252481631 525200524b02525252525252525252525252525252525252525252003155555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252520000525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f00003f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3a3e3f013f3f003f3f013f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 454f52025252005252025252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252000052525252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252454f5202520052520252525252525252525252 52525252525252525252525216525252525252525252525252525252525252525252454f 520252005252025252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252520000525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252454f52 025200525202525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252454f52025200525202525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252454f5202 525200525202525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f370101243f3f090501383f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 350202315252100302415252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525235020231521003024152525252525252525252 525252525252525252525252165252525252525252525252525252525252525252523502 023152100302415252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252350202 315210030241525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252350202315210030241525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525235020231 525210030241525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248233f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00040000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000001100 100000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000001000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555552a000000343400002f53553400002f5355554b2a 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555340000254d55553402023455555555555500025355552f004d5555484855 250034001455551425555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555340000254d555555340202345555 55555500025355552f004d55554848552500340014555514255555555555555555555555 555555555555555555555555552555555555555555340000254d55553402023455555555 555500025355552f004d5548485555250034001455551425555555555555344b55555555 5555340000254d5555553402023455555555555500025355552f004d5548485555250000 1455551425555555555555552555555555555555340000254d5555340202345555555555 5500025355552f004d5548485555250034001455551425555555555555344b5555555555 55340000254d5555553402023455555555555500025355552f004d554848555525000014 55551425555555555555552555555555555555340000254d555534020234555555555555 00025355552f004d5548485555250034001455551425555555555555354b555555555555 340000254d5555553402023455555555555500025355552f004d55484855552500001455 551425555555555555552555555555555555340000254d55553402023455555555555500 025355552f004d5555484855250034001455551425555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555555555555555555555550055554b5500554b14555500554b1455555000 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555005553254b343555553435555555555555004855531400555555342a55 550055550048550255555555555555555555555555555555555555555555555555005555 5555555555555555555555555555555555555555555555005553254b5534355555343455 55555555004855531400555555342a555500555500485502555555555555555555555555 55555555555555555555555555255555555555555555005553254b353555553434555555 5555550048555314005555342a5555550055550048550255555555555555354b55555555 555555005553254b553434555535345555555555550048555314005555342a5555550055 004855025555555555555555255555555555555555005553254b35345555343555555555 55550048555314005555342a5555550055550048550255555555555555354b5555555555 5555005553254b553435555534345555555555550048555314005555342a555555005500 4855025555555555555555255555555555555555005553254b3435555534345555555555 550048555314005555342a5555550055550048550255555555555555344b555555555555 55005553254b553434555534345555555555550048555314005555342a55555500550048 55025555555555555555255555555555555555005553254b343555553435555555555555 004855531400555555342a55550055550048550255555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555500554b55550055452f55550055452f55555500 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555005555500e025355555302555555555555004155454500555553450e50 55005555004d550055555555555555555555555555555555555555555555555555005555 5555555555555555555555555555555555555555555555005555500e5502535555530255 55555555004155454500555553450e5055005555004d5500555555555555555555555555 55555555555555555555555555255555555555555555005555500e025355555302555555 5555550041554545005553450e505555005555004d550055555555555555344b55555555 555555005555500e550253555553025555555555550041554545005553450e5055550055 004d55005555555555555555255555555555555555005555500e02535555530255555555 55550041554545005553450e505555005555004d550055555555555555344b5555555555 5555005555500e550253555553025555555555550041554545005553450e505555005500 4d55005555555555555555255555555555555555005555500e0253555553025555555555 550041554545005553450e505555005555004d550055555555555555354b555555555555 55005555500e550253555553025555555555550041554545005553450e5055550055004d 55005555555555555555255555555555555555005555500e025355555302555555555555 004155454500555553450e5055005555004d550055555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555500000055550014455555550014455555555500 555555555555555555555555555555554b35555555555555555555555555555555555555 55555555555555005555550000555555550055555555555500454b345500555545534134 5500555500554d0055555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555500555555005500555555550055 5555555500454b3455005555455341345500555500554d00555555555555555555555555 555555555555555555555555552555555555555555550055555500005555555500555555 55555500454b3455005545534134555500555500554d0055555555555555344b55555555 55555500555555005500555555550055555555555500454b345500554553413455550055 00554d005555555555555555255555555555555555005555550000555555550055555555 555500454b3455005545534134555500555500554d0055555555555555344b5555555555 555500555555005500555555550055555555555500454b34550055455341345555005500 554d00555555555555555525555555555555555500555555000055555555005555555555 5500454b3455005545534134555500555500554d0055555555555555344b555555555555 5500555555005500555555550055555555555500454b3455005545534134555500550055 4d0055555555555555552555555555555555550055555500005555555500555555555555 00454b3455005555455341345500555500554d0055555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545400000028 51545433020017515454545454540000022e545446110002435454545454545454544900 4b555555555555555555555555555555550055554d5500530e50555500530e5055555500 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555500555548342f4855554b2f5555555555550053144b550055550e00000e 550055550055550255555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555550055554834552f4855554b2f55 555555550053144b550055550e00000e5500555500555502555555555555555555555555 5555555555555555555555555525555555555555555500555548342f4855554b2f555555 5555550053144b5500550e00000e55550055550055550255555555555555344b55555555 5555550055554834552f4855554b2f5555555555550053144b5500550e00000e55550055 00555502555555555555555525555555555555555500555548342f4855554b2f55555555 55550053144b5500550e00000e55550055550055550255555555555555344b5555555555 55550055554834552f4855554b2f5555555555550053144b5500550e00000e5555005500 555502555555555555555525555555555555555500555548342f4855554b2f5555555555 550053144b5500550e00000e55550055550055550255555555555555344b555555555555 550055554835552f4855554b2f5555555555550053144b5500550e00000e555500550055 5502555555555555555525555555555555555500555548342f4855554b2f555555555555 0053144b550055550e00000e550055550055550255555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454545454545454540054512e 2854332e51543328545454545454005454005454004e5451365454545454545454544900 4b555555555555555555555555555555480200002a4d00504d14504d00504d1450553400 4b5555555555555555555555555555554b34555555555555555555555555555555555555 5555555555554d0002344055532f48483453555555555548005534554800532a2a554b02 2f0025480055554855555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555554d000235415555532f4848345355 55555548005534554800532a2a554b022f00164800555548555555555555555555555555 5555555555555555555555555525555555555555554d0002344055532f48483553555555 5555480055345548002a2a554b02344d0025480055554855555555555555344b55555555 55554d000234415555532f484834535555555555480055345548002a2a554b02344d0010 00555548555555555555555525555555555555554d0002344155532f4848345355555555 55480055345548002a2a554b02344d0025480055554855555555555555344b5555555555 554d000234415555532f484834535555555555480055345548002a2a554b02344d001000 555548555555555555555525555555555555554d0002344155532f484834535555555555 480055345548002a2a554b02344d0025480055554855555555555555344b555555555555 4d000234415555532f48483553555555555548005534554800272a554b02344d00100055 5548555555555555555525555555555555554d0002354155532f48483453555555555548 005534554800532a2a554b022f0025480055554855555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545400545451 005402515454540054545454545400001133545436042e46515454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555554503505555000000004b555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 55555555555555555555555555555555555555555555555555555555555555450e505500 0000004b5555555555555555555555555555555555555555555555555555555555555555 5555555555555555555555555525555555555555555555555555555555450e5055550000 00004b555555555555555555555555555555555555555555555555555555344b55555555 5555555555555555555555450e5055000000004b55555555555555555555555555555555 55555555555555555555555525555555555555555555555555555555450e505555000000 004b555555555555555555555555555555555555555555555555555555344b5555555555 55555555555555555555450e505555000000004b55555555555555555555555555555555 555555555555555555555516555555555555555555555555555555450e50555500000000 4b555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555450e505555000000004b5555555555555555555555555555555555 55555555555555555555165555555555555555555555555555554503505555000000004b 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545400545454 005400515454540054545454545400545128545454544c33175454545454545454544900 343434353434343434353434343434343435343535343434343434353434343534343434 343534343434343534343434343534342c15343534343434343534343434343534343434 34353434343434353434343434343531252c343434343435343434343434343534343434 343434343434343534343435343434343435343434343435343434343435343434003434 343434353435343434343435343434343435343434353434343434343434353431252c34 343534343435343434343434343534343434343534343435343434343435343435343434 343435343434343434353434340e3434343534343434343534343435343431252c343434 343534343435343434343434343534343435343434353434353434343534142c34343434 34343435343434353434343431252c343534343434343435343434343435343434343435 3434343534343434343534340e3434343435343434353434343534343431252c34353434 3435343434353434343434343435343434353434343434353434343435152c3434343434 343534343534343434343431272a34343434343534343435343434343434343535343434 34343435343534343434340e3435343434343435343534343434343531252c3434353434 34343435343434343435343434343435343434343435343435343434142c343434353434 353434343434343434342f272c3434343435343434353434353434343435343434353434 34353434343435343435033434353434343435343434343434343531252c343434343435 343434343434343534343434343434343434343534343435343435003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545400545433 2e542e284e4e330454545454545400545400545436515454005454545454545454544900 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b412a4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b004b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454545454545454540000002e 5154512e020204285454545454540000002e54544e0400042e5454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454544954545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555555555555555555314252f555541555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 5314162f5555415555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555553 14252f554155555555555555555555555555555555555555555555555555344b55555555 55555555555555555555555555555555555314252f554155555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555314 162f554155555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555314252f55415555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555531425 2f554155555555555555555555555555555555555555555555555555344b555555555555 5555555555555555555555555555555314252f5541555555555555555555555555555555 55555555555555555555255555555555555555555555555555555555555555555314162f 555541555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545449000000001754545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555514505545555500555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 145055455555005555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555514 505545550055555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555514505545550055555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555551450 5545550055555555555555555555555555555555555555555555555555354b5555555555 555555555555555555555555555555551450554555005555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555145055 45550055555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555145055455500555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555514505545 555500555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555555555555555552f0e4b555555002a0045555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 2f0e4b555555002a00455555555555555555555555555555555555555555555555555555 55555555555555555555555555255555555555555555555555555555555555555555552f 0e4b5555002a004555555555555555555555555555555555555555555555344b55555555 55555555555555555555555555555555552f0e4b5555002a004555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555552f0e 4b5555002a004555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555552f0e4b5555002a00455555555555555555555555 5555555555555555555555255555555555555555555555555555555555555555552f0e4b 5555002a004555555555555555555555555555555555555555555555354b555555555555 5555555555555555555555555555552f0e4b5555002a0045555555555555555555555555 55555555555555555555255555555555555555555555555555555555555555552f0e4b55 5555002a0045555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555555555555555555555555555555554b1635555500554d02555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 554b1635555500554d025555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 4b25355500554d0255555555555555555555555555555555555555555555344b55555555 5555555555555555555555555555555555554b25345500554d0255555555555555555555 55555555555555555555555525555555555555555555555555555555555555555555554b 25355500554d0255555555555555555555555555555555555555555555344b5555555555 55555555555555555555555555555555554b25355500554d025555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555554b25 345500554d0255555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555554b25355500554d02555555555555555555555555 5555555555555555555525555555555555555555555555555555555555555555554b2535 555500554d02555555555555555555555555555555555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555550000555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555548535502555500555502555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 485355025555005555025555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555548 535502550055550255555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555548535502550055550255555555555555555555 555555555555555555555555255555555555555555555555555555555555555555554853 5502550055550255555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555554853550255005555025555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555485355 02550055550255555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555485355025500555502555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555548535502 555500555502555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 5555555555555555555555555555555555555555410202345555140e0245555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 410202345555140e02455555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555541 02023455140e024555555555555555555555555555555555555555555555344b55555555 55555555555555555555555555555555554102023455140e024555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555554102 023455140e024555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555554102023455140e02455555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555410202 3555140e024555555555555555555555555555555555555555555555344b555555555555 5555555555555555555555555555554102023455140e0245555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555541020234 5555140e0245555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b45344b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b104b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333333200 2f3131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131312f1d242424242424242324242424242424242424 242423232424242424242424242424242424242424242424241924242424242424242424 2424242424242424242424242424242424242424242424242424242424242424240f3131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131310331313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131142a31313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131033131313131313131313131313131313131313131313131 3131313131313131313131313131313131313131313131313131313131142a3131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313103313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131311427313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131310331313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248233f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4852525252525252525252525252525227000000313100002c4f523100002c4f524f1002 4152525252525252525252525252525248243f3f00093f3f09183f240101243f3f3f3f3f 3f00013f3f3f22003c3f3f3a3a3f18002400093f3f09183f3f183f3f3f3f3f3f3f3f3f3f 3f1d00000024240000213e3f240000213e3f3f3f243f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252522700000031 3100002c4f523100002c4f52525231525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525227000000315231 00002c4f3100002c4f525252315252525252525252525252525252525252314852525252 5252310000164b52523102023152525252525216024d52162c5252454552523100002c4f 524f10024152525252525252165252525252525252525252525252522700000031523100 002c4f3100002c4f52525231525252525252525252525252525252525231485252525252 52525252525252310000164b5252310202315252525252521600000345524f10162c5252 525252525252525252525216525252525252310000164b52525231020231525252525252 15024d52162c52524545523100002c4f5252482752525252525252523148525252525252 310000164b5252523102023152525252525200024f52522c004b52454552521600001052 52101652525252525252165252525252525252525252525252522700000031523100002c 4f523100002c4f5252315252525252525252525252525252525252003155555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4852525252525252525252525252525252005252485200524810525200524810524b524b 0252525252525252525252525252525248243f3f3f003a3f013f23233f3f24243f3f3f3f 3f3f003a3f3f06003f3f3f231d3f3f003f3f003a3f013f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f003f3f3b3f003f3b093f3f003f3b063f3f3f003f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525200525248 520052481052520052481052524f00525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252005252485252 00524810520052481052524f005252525252525252525252525252525252314852525252 52525200524f164831315252313152525252524f035252414f5252312752525200524810 524152450252525252525252165252525252525252525252525252525200525248525200 524810520052481052524f00525252525252525252525252525252525231485252525252 525252525252525200524f16483131525231315252525252520052480252104d52415252 5252525252525252525252165252525252525200524f1648523131525231315252525252 4f085252414f5252312752520052481052524d0052525252525252523148525252525252 5200524f1548523031525231315252525252520045524f14005252312752525200520045 520252525252525252521652525252525252525252525252525252005252485252005248 10525200524810524f005252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200524852520052412c52520052402c52525231 4d52525252525252525252525252525248243f3f3f003c3f003f013f3f3f3f013f3f3f3f 3f3f00373f3838003f3f3e38053d3f003f3f003c3f003f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f003f3b3f3f003f38213f3f003f38203f3e3d003f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525200524852 520052412c52520052412c524f4d00525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252005248525252 0052412c520052412c524f4d005252525252525252525252525252525252314852525252 5252520052524d03024f52524f025252525252523141523552524f410e4d52520052412c 5252524f1652525252525252165252525252525252525252525252525200524852525200 52412c520052412c524f4d00525252525252525252525252525252525231485252525252 52525252525252520052524d03024f52524f0252525252525200524531522c0348525252 525252525252525252525216525252525252520052524d0352024f52524f025252525252 523040523552524f41034d520052412c5252520052525252525252523148525252525252 520052524d0852024f52524f02525252525252003552414100524f41034d52520052004b 520052525252525252521652525252525252525252525252525252005248525252005241 2c52520052412c4f4d005252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200000052520010415252520010405252524b31 2752525252525252525252525252525248243f3f3f003f3c003f003f3f3f3f003f3f3f3f 3f3f00383b243f003f3f383e37243f003f3f003f3c003f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f0000003f3f0009383f3f3f0009383f3f3b3f003f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525200000052 520010415252520014415252485200525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252000000525252 001041525200104152524852005252525252525252525252525252525252314852525252 52525200525252000052525252005252525252524f034b485252414f3531525200104052 525252454f52525252525252165252525252525252525252525252525200000052525200 104152520010415252485200525252525252525252525252525252525231485252525252 525252525252525200525252000052525252005252525252520048313552524816315252 525252525252525252525216525252525252520052525200520052525252005252525252 524f044b485252414f353352001141525252520052525252525252523148525252525252 5200525252005200525252520052525252525200414831520052414f3531525200520052 4b0052525252525252521652525252525252525252525252525252000000525252001041 525252001041524852005252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 48525252525252525252525252525252520052524b52004f034d5252004f034d52525252 0352525252525252525252525252525248243f3f3f003f3f013f213a3f3f3b203f3f3f3f 3f3f003e093b3f003f3f050000053f003f3f003f3f013f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f003f3f3c3f003f053d3f3f003e053d3f000000003f3f3f3f3f3f3f3f3f3f3f005252 52525252525252525252525252525252525252525252525252525252525252520052524b 52004f034d5252004f034d52000000005252525252525252525252525252525252525252 5252525252525252525252525216525252525252525252525252525252520052524b5252 004f034d52004f034d520000000052525252525252525252525252525252314852525252 52525200525245312c455252482c525252525252522c2c525252030000035252004f034d 5252524f4f5252525252525216525252525252525252525252525252520052524b525200 4f034d52004f034d52000000005252525252525252525252525252525231485252525252 525252525252525200525245312c455252482c5252525252520052520052454f52025252 525252525252525252525216525252525252520052524531522c455252482c5252525252 52522e2c5252520300000852004f034d5252520052525252525252523148525252525252 520052524531522e455252482c525252525252004f104852005203000003525200520052 5202525252525252525216525252525252525252525252525252520052524b5252004f03 4d5252004f034d0000000052525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400000028515454330200 175154545454545454000000285154005454545454005454545400000004335454544900 4852525252525252525252525252525245020000274b004d4b104d4b004d4b104d401010 4b52525252525252525252525252525248243f3f3a003f3f3a3f3f1d3a3a243f3f3f3f3f 3f3a003f243f3a003e1d1d3f3b012000183a003f3f3a3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3a0100001d3c003d3c093d3c003d3c093d3f3f003f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252524502000027 4b004d4b104d4b004d4b104d525200525252525252525252525252525252525252525252 52525252525252525252525252165252525252525252525252525252524502000027524b 004d4b1045004d4b104d5252005252525252525252525252525252525252314852525252 52524b00023135524f2c4545314f525252525252524d4152522727524802314b004d4b10 4d4100001652525252525252165252525252525252525252525252524502000027524b00 4d4b1047004d4b104d525200525252525252525252525252525252525231485252525252 525252525252524b00023135524f2c4545314f52525252524b00022c3152350202315252 5252525252525252525252165252525252524b0002313552524f2c4545314f5252525252 52524d41525227275248022a004d4b104d52310048525252525252523148525252525252 4b0002313552524f2c4545314f5252525252450052315245002727524802314b00100052 52455252525252525252165252525252525252525252525252524502000027524b004d4b 104d4b004d4b104d52005252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454540054512e2854332e5154 3328545454545454540054512e2854005454545454005454545400545451005454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f38053d3f3f000000 003b3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 5252525252525252525241034d5252000000004852525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252410e4d525200000000485252525252525252525252 525252525252525252525216525252525252525252525252525252410e4d525200000000 485252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252410e4d525200000000485252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400545451005402515454 540054545454545454005454510054005454545454005454545400545446435454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3d383b3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 52525252525252525252524d414852525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252524d41485252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252524d41485252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252524d41485252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545400545454005400515454 540054545454545454005454540054005454545454005454545400000000335454544900 100000000000000000000000000000000000000000000000000000000000000000000000 00000000000000000000000000000000100b000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454005454332e542e284e4e 330454545454545454005454332e54005454545454005454545400545433175454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454540000002e5154512e0202 0428545454545454540000002e5154000000335454000000335400545443285454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 544954545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 5252525252525252525252524f10162c5252524f4f4f480048524b525252525252525252 5252525252525252525252165252525252525252525252525252525252525252524f1016 2c5252525252525252525252525252525252525252525252525252523148525252525252 52525252525252525252524f10162c5252524f4f4f480048524b52525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545449000000001754545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252104d524152524f3552105200485252314f52525252525252 525252525252525252525216525252525252525252525252525252525252525252104d52 415252525252525252525252525252525252525252525252525252523148525252525252 5252525252525252525252104d524152524f3552105200485252314f5252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 5252525252525252525252522c0348525252314b523516004f5252413152525252525252 5252525252525252525252165252525252525252525252525252525252525252522c0348 52525216314f2c5252525252525252525252525252525252525252523148525252525252 52525252525252525252522c0348525252314b523516004f525241315252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252524816315252025252524d003152524f0052525252525252 525252525252525252525216525252525252525252525252525252525252525252524816 3152524f274f485252525252525252525252525252525252525252523148525252525252 5252525252525252525252524816315252025252524d003152524f005252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252520000525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f00003f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f00003f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252520000525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252000052525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525200005252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525200005252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252454f520252520252524d5200005252520352525252525252 525252525252525252525216525252525252525252525252525252525252525252454f52 025252522c35525252525252525252525252525252525252525252523148525252525252 5252525252525252525252454f520252520252524d520000525252035252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 000052525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252350202315252314552354500355252414552525252525252 525252525252525252525216525252525252525252525252525252525252525252350202 315252524d35525252525252525252525252525252525252525252523148525252525252 525252525252525252525235020231525231445235450035525241455252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525252525248243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525252525252415252521652524f415252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252415252521652524f41525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333333200 2f3131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131312f1d242424242424242424242424242424242424 242424242424242424242424242424242424242424242424230524242424242424242424 242424242424242424242424242424242424242424242424242424242424242424083131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131310331313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131142731313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131033131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313114273131313131 31313131313131313131313131313131313131312c31313131312c313131313131313131 313131313131313131313103313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131311427313131313131 313131313131313131313131313131313131312c31313131312c31313131313131313131 313131313131313131310331313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b44223b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b093b3b3b3b3b3b3b3b3b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b004b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555551655555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555165555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555354b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555551655555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555552a000000343400002f53553400002f5355531402 455555555555555555555555555555554b243f3f00093f3f09183f240101243f3f3f3f3f 3f00013e3f3f21003c3f3f393a3f18002400093f3f09183f3f183f3f240000183c3f3f3f 240101243f3f3f3f3f00013e3f3f21003c3f3f393a3f18002400093f3f09183f3f005555 55555555555555555555555555555555555555555555340000254d555555340202345555 55555500025355552f004d55554848552500340014555514255555555555555555555555 555555555555555555555555552555555555555555340000254d55553402023455555555 5555340000254d552a004855552a004855553400002f5355555555555555344b55555555 5555340000254d5555553402023555555555555500025355552f004d5548485555250000 1455551425555555555555552555555555555555340000254d5555340202345555555555 5500025355552f004d5548485555250035001455551425555555555555344b5555555555 55555555555555340000254d5555340202345555555555552500000e48555314252f5555 555555555555555555555525555555555555340000254d55555534020234555555555555 25025055252f55484855553400002f5355554b2a5555555555555555344b555555555555 340000254d5555553402023555555555555500025355552f004d55484855552500001455 551425555555555555552555555555555555340000254d55553402023455555555555500 025355552f004d5555484855250034001455551425555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555555555555555555555550055554b5500554b14555500554b14554d554d 025555555555555555555555555555554b243f3f3f003a3f013f24243f3f24243f3f3f3f 3f3f003a3f3f09003f3f3f231c3f3f003f3f003a3f013f3f3f183f3f3f003f3e183b3f24 243f3f24243f3f3f3f3f003a3f3e09003f3f3f231d3f3f003f3f003a3f013f3f3f005555 5555555555555555555555555555555555555555555555005553254b5534345555343455 55555555004855531400555555342a555500555500485502555555555555555555555555 55555555555555555555555555255555555555555555005553254b353455553434555555 555555005553254b550055555555005555555500554b1455555555555555344b55555555 555555005553254b553434555534345555555555550048555314005555342a5555550055 004855025555555555555555255555555555555555005553254b35345555343455555555 55550048555314005555342a5555550055550048550255555555555555344b5555555555 5555555555555555005553254b34345555353455555555555500554b0255145055455555 55555555555555555555552555555555555555005553254b553434555535345555555555 530e5555455355352a55555500554b14555550005555555555555555344b555555555555 55005553254b553434555534345555555555550048555314005555342a55555500550048 55025555555555555555255555555555555555005553254b343555553534555555555555 004855531400555555342a55550055550048550255555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555500554b55550055452f55550055452f55555534 505555555555555555555555555555554b243f3f3f003c3f003f013e3f3f3e013f3f3f3f 3f3f00373f3838003f3f3e38053d3f003f3f003c3f003f3f3f183f3f3f003f3f3d053f01 3e3f3f3e023f3f3f3f3f00373f3838003f3f3e38053d3f003f3f003c3f003f3f3f005555 5555555555555555555555555555555555555555555555005555500e5502535555530255 55555555004155454500555553450e5055005555004d5500555555555555555555555555 55555555555555555555555555255555555555555555005555500e025355555302555555 555555005555500e55005555555500555555550055452f55555555555555354b55555555 555555005555500e550253555553025555555555550041554545005553450e5055550055 004d55005555555555555555255555555555555555005555500e02535555530255555555 55550041554545005553450e505555005555004d550055555555555555354b5555555555 5555555555555555005555500e02535555530255555555555500554834552f0e4b555555 55555555555555555555552555555555555555005555500e550253555553025555555555 55344555415553450e5055550055452f555555005555555555555555344b555555555555 55005555500e550253555553025555555555550041554545005553450e5055550055004d 55005555555555555555255555555555555555005555500e025355555302555555555555 004155454500555553450e5055005555004d550055555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555500000055550014455555550014455555554d34 2a5555555555555555555555555555554b243f3f3f003f3c003f003f3f3f3f003f3f3f3f 3f3f00383b233f003f3f383f37243f003f3f003f3c003f3f3f183f3f3f003f3f3f003f00 3f3f3f3f003f3f3f3f3f00383b243f003f3f383f37233f003f3f003f3c003f3f3f005555 555555555555555555555555555555555555555555555500555555005500555555550055 5555555500454b3455005555455341345500555500554d00555555555555555555555555 555555555555555555555555552555555555555555550055555500005555555500555555 555555005555550055005555555500555555550014455555555555555555344b55555555 55555500555555005500555555550055555555555500454b345500554553403555550055 00554d005555555555555555255555555555555555005555550000555555550055555555 555500454b3455005545534134555500555500554d0055555555555555344b5555555555 55555555555555550055555500005555555500555555555555004b344155554b25345555 555555555555555555555525555555555555550055555500550055555555005555555555 55530e4d4b5544534134555500144555555555005555555555555555354b555555555555 5500555555005500555555550055555555555500454b3455005545534134555500550055 4d0055555555555555552555555555555555550055555500005555555500555555555555 00454b3455005555455341345500555500554d0055555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555555555555555555555550055554d5500530e50555500530e5055555555 0e5555555555555555555555555555554b243f3f3f003f3f013f213a3f3f3b1d3f3f3f3f 3f3f003f063b3f003f3f010000053f003f3f003f3f013f3f3f183f3f3f003f3f3a243f21 3a3f3f3b1d3f3f3f3f3f003e093b3f003f3f050000053f003f3f003f3f013f3f3f005555 55555555555555555555555555555555555555555555550055554834552f4855554b2f55 555555550053144b550055550e00000e5500555500555502555555555555555555555555 5555555555555555555555555525555555555555555500555548342f4855554b2f555555 5555550055554835550055555055005555505500530e5055555555555555344b55555555 5555550055554834552f4855554b2f5555555555550053144b5500550e00000e55550055 00555502555555555555555525555555555555555500555548342f4855554b2f55555555 55550053144b5500550e00000e55550055550055550255555555555555344b5555555555 555555555555555500555548352f4855554b2f5555555555550055550055485355025555 555555555555555555555525555555555555550055554834552f4855554b2f5555555555 55552f2f55550e00000e555500530e50555555005555555555555555344b555555555555 550055554835552f4855554b2f5555555555550053144b5500550e00000e555500550055 5502555555555555555525555555555555555500555548342f4855554b2f555555555555 0053144b550055550e00000e550055550055550255555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540000002851545433020017 5154545454545433465454334c5454113354545454000000043354541754545454544900 4b555555555555555555555555555555480200002a4d00504d14504d00504d1450451414 4d5555555555555555555555555555554b243f3f3a003f3f3a3f3e213a3a243f3f3f3f3f 3f3a003f233f3a003f1d1c3f3b012000183a003f3f3a3f3f3f183f3f3c000124373f3f3e 213a3a243f3f3f3f3f3a003f243f3a003f191c3f3b012000183a003f3f3a3f3f3f005555 555555555555555555555555555555555555555555554d000234415555532f4848345355 55555548005534554800532a2a554b022f00164800555548555555555555555555555555 5555555555555555555555555525555555555555554d0002344155532f48483553555555 55554d00023440554800000e2f4800000e2f4d00504d1450555555555555344b55555555 55554d000234415555532f484834535555555555480055345548002a2a554b02354d0010 00555548555555555555555525555555555555554d0002344155532f4848355355555555 55480055345548002a2a554b02354d0025480055554855555555555555344b5555555555 555555555555554d0002344155532f4848345355555555554d00022f3455410202345555 5555555555555555555555255555555555554d000234415555532f484834535555555555 55555045552a2a554b02344d00504d14505534004b55555555555555344b555555555555 4d000234405555532f484835535555555555480055345548002a2a554b02354d00100055 5548555555555555555525555555555555554d0002344155532f48483453555555555548 005534554800532a2a554b022f0025480055554855555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540054512e2854332e515433 285454545454544c2e54541754544c33175454545400545451004e2e0054545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f38053d3f3f000000 003b3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f38053d3f000000003b3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005555 55555555555555555555555555555555555555555555555555555555555555450e505500 0000004b5555555555555555555555555555555555555555555555555555555555555555 5555555555555555555555555525555555555555555555555555555555450e4f55550000 00004b555555555555555555555555555555555555555555555555555555344b55555555 5555555555555555555555450e5055000000004b55555555555555555555555555555555 55555555555555555555555525555555555555555555555555555555450e505555000000 004b555555555555555555555555555555555555555555555555555555354b5555555555 555555555555555555555555555555450e505555000000004b5555555555555555555555 555555555555555555555516555555555555555555555555555555450e50555500000000 4b555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555450e4f5555000000004b5555555555555555555555555555555555 5555555555555555555525555555555555555555555555555555440e505555000000004b 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540054545100540251545454 00545454545454541154463354543351364c545454005454464354540054545454544900 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b44223b3b3b3b3b3b3b3b3b3b3b3923373b3b3b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b093b3b3b3b3b3b3b3b3b3b 3b3b3923373b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b074b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4831414b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4734414b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4734414b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4534414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4831414b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4831414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4734414b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4734414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540054545400540051545454 005454545454545436432e4c54540200002e545454000000003354540054545454544900 343434343435343434343435343435343434343434353434343434353434343434353434 34353434343434353434353434343434341d242424242424242424242424242424242324 242424242323242424242424242424242424242424242424240524242424242424242424 2424242424242424242424242424242424242424242424242424242424242424240a3434 353434343534343434343434343534343434343534343534343434343434353434343534 343435343434353434343434343435343534343434343434343534343434343534343534 343434343434343435343434340e34343434353434343434343435343434343435343434 343534343534343434343534343434343434343435343434343434353434152c34343434 343435343434343435343434343434343534343434343534343434343434343435343434 3434353434343434343435340e3434343434343435343434343534343434343534343534 3434343435343434353434343434343534343434343435343434343435142c3434343434 343534343434343534343534343434343534343434343434353434343434353434343434 34343534343434343434350e343534343434343534343434343534343534343434343534 34343534343435343434343434343534343434343534343434343434152c343434343534 343434343434343434343534343435343434343435343434343434343534343434343434 343534343434343534340e34353434343434343435343434343435343434353434343434 343435343534343435343434343434343434343434343534343434003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454005454332e542e284e4e33 04545454545454544e281754544636545411545454005454331754540054545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b233f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555354b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540000002e5154512e020204 28545454545454545428365454284e545434465454005454432854540054545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f1d 000000243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f1c000000243f3f3f3f3f3f3f3f3f3f3f373f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005555 555555555555555555555555555555555555555555555555555555555555552a00000034 555555555555555555555541555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 2a0000003455555555555555555555555555555555555555555555555555354b55555555 5555555555555555555555552a0000003455555555555555555555415555555555555555 55555555555555555555555525555555555555555555555555555555552a000000345555 5555555555555555405555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555552a00000034555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555531425 2f555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555552a00000034555555555555555555554155555555555555555555 5555555555555555555525555555555555555555555555555555552a0000003455555555 555555555555554055555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 495454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 003f3f3b3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f003f3f3b3f3f3f3f3f3f3f3f3f3f3f003f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005555 55555555555555555555555555555555555555555555555555555555555555550055554b 555555555555555555555500555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 550055554b55555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555550055554b55555555555555555555005555555555555555 5555555555555555555555552555555555555555555555555555555555550055554b5555 5555555555555555005555555555555555555555555555555555555555344b5555555555 55555555555555555555555555555555550055554b555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555145055 45555555555555555555555555555555555555555555555555555555344b555555555555 55555555555555555555550055554b555555555555555555550055555555555555555555 555555555555555555552555555555555555555555555555555555550055554b55555555 555555555555550055555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545449000000001754545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 003f3a18233f203f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f003f393f18243e213f242400373f3f001d00383f3f3f3f3f3f3f3f3f3f3f3f005555 555555555555555555555555555555555555555555555555555555555555555500554855 1635532f55343400415555002a0045555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 550055482535532f55555555555555555555555555555555555555555555344b55555555 55555555555555555555555555005548552535532f553434004155002a00455555555555 555555555555555555555555165555555555555555555555555555555555005548552535 532f553434004055002a00455555555555555555555555555555555555344b5555555555 5555555555555555555555555555555555005548552535532f5555555555555555555555 5555555555555555555555255555555555555555555555555555555555555555552f0e4b 5555552534532f555555555555555555555555555555555555555555344b555555555555 5555555555555555555555005548552535532f553434004155002a004555555555555555 55555555555555555555255555555555555555555555555555555555005548552535532f 55343400405555002a004555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 0000003f1c3e3b3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f0000003f3f1c3e3b3f003f3d013f3f003f3c013f3f3f3f3f3f3f3f3f3f3f3f005555 555555555555555555555555555555555555555555555555555555555555555500000055 532a534b5500555002555500554d02555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 55000000532a534b55555555555555555555555555555555555555555555344b55555555 5555555555555555555555555500000055532a534b55005550025500554d025555555555 55555555555555555555555525555555555555555555555555555555555500000055532a 534b55005550025500554d025555555555555555555555555555555555344b5555555555 555555555555555555555555555555555500000055532a534b5555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555554b25 345555532a534b555555555555555555555555555555555555555555344b555555555555 555555555555555555555500000055532a534b55005550025500554d0255555555555555 5555555555555555555525555555555555555555555555555555555500000055532a534b 5500555002555500554d0255555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555550000555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 003f3f3f21373f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f003f3f3f3f21373f3f003f3f013f3f003f3f013f3f3f3f3f3f3f3f3f3f3f3f005555 555555555555555555555555555555555555555555555555555555555555555500555555 552f41555500555502555500555502555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 55005555552f405555555555555555555555555555555555555555555555344b55555555 5555555555555555555555555500555555552f4055550055550255005555025555555555 55555555555555555555555525555555555555555555555555555555555500555555552f 4155550055550255005555025555555555555555555555555555555555354b5555555555 555555555555555555555555555555555500555555552f40555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555485355 025555552f4155555555555555555555555555555555555555555555344b555555555555 555555555555555555555500555555552f41555500555502550055550255555555555555 5555555555555555555525555555555555555555555555555555555500555555552f4055 550055550255550055550255555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3a 00233f3f3d373f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3a00233f3f3f3d373f3f000501383f3f090501383f3f3f3f3f3f3f3f3f3f3f3f005555 555555555555555555555555555555555555555555555555555555555555554800345555 5550405555000e02455555140e0245555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 480034555550415555555555555555555555555555555555555555555555344b55555555 55555555555555555555555548003455555550415555000e024555140e02455555555555 555555555555555555555555255555555555555555555555555555555548003455555550 415555000e024555140e02455555555555555555555555555555555555344b5555555555 555555555555555555555555555555554800345555555041555555555555555555555555 555555555555555555555516555555555555555555555555555555555555555555410202 34555555504155555555555555555555555555555555555555555555344b555555555555 5555555555555555555548003455555550405555000e024555140e024555555555555555 555555555555555555552555555555555555555555555555555555480034555555504155 55000e02455555140e024555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f003f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555500555555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555550055555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555550055555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555500555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 550055555555555555555555555555555555555555555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b243f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f00003f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f005555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555500005555555555555555555555555555555555555555555555555555555555 555555555555555555555555552555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555550000555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555555 5555550000555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555354b555555555555 555555555555555555555555555555555555555500005555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 550000555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00151717171717171717171717171717 171717171717251717171717171717171717171717171725171717171717171717172a00 2a1616161616161616161616161616161616161616161616162516161616161616161616 1616171716161616161616162516162527131b1b1b1b1b161b1b1b1b1b1b1b1b1b1b1616 1b1b1b1b1b1b1b1b1b1b16161b1b1b1b161b171e1b1b1b1b16031b1b1b1b1b1b1b1b1b1b 1b1b1b1b161e161e1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b1b161b1b1b1b1e001616 162516161616161616161616162525161616161616161616161616161616161616161616 161616161616161616161616161616161616161625161616161616161616161616161616 161616161616161625161625160216161616161616161616161616161616161616161616 1625161616161616161616161616161616161616161616161616161616160e1016161616 161617161616161616161616161616161616161616251616161616161616161616161616 161616161616161616161625021616161616161616161616161616161616161616161616 161616161625161616161616161616161616161616161616161616161603101616251616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161602161616161616161616161616161616161617161616161616 161616161616161616161616161616161616161616161616161616160310161616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616160216161616162516161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525254005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252522a000000343100002c4f523100002c4f5252482a 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252310000164b52523102023252525252525216024d52162c52524545523100 002c4f524f10024152525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252310000164b525231020231525252 52525216024d52162c52524545523100002c4f524f100241525252525252525252525252 525252525252525252525252521652525252525252525252525252525227000000345231 00002c4f3100002c4f525252345252525252525252525252525252525252314852525252 52525252525252525252522a000000313400002c4f523100002c4f525252315252525252 5252525252525252525252521652525252525252340000164b5252310202315252525252 5200024f52522c004b524545525216003100105252141652525252525231485252525252 52310000164b52523102023152525252525216024d52162c5252454552523100002c4f52 4f10024152525252525252165252525252523100001e4b52525231020231525252525252 16024d52162c52524545523100002c4f5250140241525252525252523148525252525252 310000164b52523102023152525252525216024d52162c5252454552523100002f4f524f 1002415252525252525216525252525252523100001e4b52523102023152525252525216 024d52162f52524545523100002c4f525010024152525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200525248520052481052520052481052524d00 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525200524f164831315252313152525252524f035252404f52523128525200 524810524152450252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525200525016483131525231315252 5252524f035252414f525231275252005248105241524502525252525252525252525252 525252525252525252525252521652525252525252525252525252525252005252485252 00524810520052481052524f005252525252525252525252525252525252314852525252 525252525252525252525252005252485200524810525200524810525250005252525252 52525252525252525252525216525252525252525200524f164831315252313152525252 52520045524f100052523127525252005252004552025252525252525231485252525252 5252005250164831315252313152525252524f0e5252404f525231275252520052481452 4152450252525252525252165252525252525200544f1548523431525231315252525252 4f0e5252414f525231275252005248105241524502525252525252523148525252525252 5200524f164831315252313152525252524f035252414f52523127525252005248105241 5245025252525252525216525252525252525200544f154831315252313152525252524f 035252414f52523127525200524814524152450252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200524852520052412c52520052412c52525200 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252520052524d03024f52524f025252525252523141523552524f41034d5200 52412c5252524f1652525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252520052524d03024f52524f025252 525252523141523552524f410e4d520052412c5252524f16525252525252525252525252 525252525252525252525252521652525252525252525252525252525252005248525252 0052412c520052412c524f4d005252525252525252525252525252525252314852525252 52525252525252525252525200524852520052412c52520052412a524f4d005252525252 5252525252525252525252521652525252525252520052524d03024f52524f0252525252 5252003552414100524f41034d5252005252004b52005252525252525231485252525252 52520052524d03024f52524f025252525252523141523552524f410e4d52520052412c52 52524f165252525252525216525252525252520052524d0352024f52524f025252525252 523141523552524f410e4d520052412c5252524f16525252525252523148525252525252 520052524d0e024f52524f025252525252523141523552524f41034d52520052412c5252 524f16525252525252521652525252525252520052524d03024f52524f02525252525252 3141523552524f41034d520052412c5252524f1652525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200000052520010415252520010415252525200 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525200525252000052525252005252525252524f034b485252414f35315200 104152525252454f52525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525200525252000052525252005252 525252524f0e4b485252414f35315200144152525252454f525252525252525252525252 525252525252525252525252521652525252525252525252525252525252000000525252 001041525200104052524852005252525252525252525252525252525252314852525252 525252525252525252525252000000525200104152525200104152524852005252525252 525252525252525252525252165252525252525252005252520000525252520052525252 525200414831520052414f3531525200525200524b005252525252525231485252525252 5252005252520000525252520052525252525250034b485252414f353152520010415252 5252454f5252525252525216525252525252520052525200520052525252005252525252 524f034b485252414f35315200104152525252454f525252525252523148525252525252 5200525252000052525252005252525252524f0e4b485252414f35315252001041525252 52454f525252525252521652525252525252520052525200005252525200525252525252 4f034b485252414f35315200104152525252454f52525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 48525252525252525252525252525252520052524b52004f034d5252004f034d52525200 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525200525245312c455252482c525252525252522c2c525252030000085200 4f034d5252524f4f52525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525200525245312c455252482c5252 52525252522c2c5252520300000352004f034d5252524f4f525252525252525252525252 5252525252525252525252525216525252525252525252525252525252520052524b5252 004f034d52004f034d520000000052525252525252525252525252525252314852525252 5252525252525252525252520052524b52004f034d5252004f034d520000000052525252 52525252525252525252525216525252525252525200525245312c455252482c52525252 5252004f10485200520300000e5252005252005252025252525252525231485252525252 525200525245312c455252482c525252525252522c2c5252520e00000e5252004f034d52 52524f4f5252525252525216525252525252520052524531522c455252482c5252525252 52522c2c5252520300000352004f034d5252524f4f525252525252523148525252525252 5200525245312c455252482c525252525252522c2c525252030000035252004f034d5252 524f4f5252525252525216525252525252525200525245312c455252482c525252525252 522c2c5252520300000e52004f034d5252524f4f52525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540000002851545433020017 5154545454545433465454334c5454113354545454000000043351110033545454544900 4852525252525252525252525252525245020000274b004d4a104d4b004d4b104d523100 485252525252525252525252525252524831525252525252525252525252525252525252 5252525252524b00023135524f2c4545314f525252525252524d41525227275248022b00 4d4b104d4100001652525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252524b00023135524f2c4545314f5252 52525252524d41525228275248022a004d4b144d41000016525252525252525252525252 52525252525252525252525252165252525252525252525252525252524502000027524b 004d4b1047004d4b104d5252005252525252525252525252525252525252314852525252 525252525252525252525245020000274b004d4b104d4b004d4b104d5252005252525252 52525252525252525252525216525252525252524b00023135524f2c4545314f52525252 52450052315245002727524802314b001645005252455252525252525231485252525252 524b00023135524f2c4545314f525252525252524d4152522727524802314b004d4b104d 4100001652525252525252165252525252524b0002313552524f2c4545314f5252525252 52524d41525227275248022a004d4b104d41000016525252525252523148525252525252 4b00023135524f2c4545314f525252525252524d4152522727524802314b004d4b104d40 0000165252525252525216525252525252524b00023135524f2c4545314f525252525252 524d41525227275248022a004d4b104d4000001652525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540054512e2854332e515433 2b5454545454544c2e54541754544c331754545454005454510033465100545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525241034d52520000000048525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252525252525252525241034d525200 000000485252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252521652525252525252525252525252525241034d5252000000 004852525252525252525252525252525252525252525252525252525231485252525252 52525252525252525241034d525200000000485252525252525252525252525252525252 52525252525252525252521652525252525252525252525252525241034d525200000000 485252525252525252525252525252525252525252525252525252523148525252525252 5252525252525252410e4d52520000000048525252525252525252525252525252525252 5252525252525252525216525252525252525252525252525252410e4d52520000000048 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540054545100540251545454 00545454545454541154463354543351364c545454005454464354545102545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252524d4148525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252524d41485252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 52525252525252525252525216525252525252525252525252525252524d414852525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252524d41485252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252524d41485252525252 525252525252525252525252525252525252525252525252525252523148525252525252 5252525252525252524d4148525252525252525252525252525252525252525252525252 5252525252525252525216525252525252525252525252525252524d4148525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540054545400540051545454 005454545454545436432e4c54540200002e54545400000000335446174c545454544900 271e171716161616161616161616161616161616151e1717161616161616161616161616 161616161616161716151e26161616161003161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161616161616161616 151616161616161616161616161616161616161616161616161616161616161616001616 161616161616161616161616161616161616161616161616161616161616161616161616 16161616161616161616161616161617151e171716161616161616161616161616161616 161616161616161616161616160216161616161616161616161616161616161616161616 151616161616161616161616161616161616161616161616161616161616031016161616 16161616161616161616161616161617151e171716161616161616161616161616161616 161616161616161616161616021616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616161603101616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616161602161616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616160314161616161616 161616161616161616161616161616161616161616161616161616161616161616161616 161616161616161616160216161616161616161616161616161616161616161616161616 161616161616161616161616161616161616161616161616161616003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454005454332e542e284e4e33 0e545454545454544e281754544636545411545454005454331746335454545454544900 414848484848484848484848484848484848484848484848484848484848484848484848 48484848484848484848484848484848352a484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848004848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848481448484848484848484848484848484848484848484848 4848484848484848484848484848484848484848484848484848484848482a3548484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848104848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484827354848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484814484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848482735484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848481448484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454540000002e5154512e02020e 28545454545454545428365454284e545433465454005454432828000000545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 495454545454545454545454545454545454545454545454545454545454545454544c00 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525252525252524f10162c525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 4f14162c5252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 52525252525252525252525216525252525252525252525252525252525252525252522a 000000315252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252524f10162c52525252525252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252524f1016 2c5252525252525252525252525252525252525252525252525252523148525252525252 5252525252525252525252525252524f10162c5252525252525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252524f10162c 525252525252525252525252525252525252525252525252525252003155555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545449000000001754545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252525252525252104d5241525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 104d52415252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 005252485252525252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525252104d524152525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252104d52 415252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252104d52415252525252525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252104d5241 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525252525252522c034852525216314f2c525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 2c034852525216314f2c5252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 00524516314f2c5252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252522c0348525216314f2c5252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252522c0348 52525216314f2c5252525252525252525252525252525252525252523148525252525252 5252525252525252525252525252522c0348525216314f2c525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252522c034852 525216314f2c525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525252525252525248163152524f274f48525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 5248163152524f274f485252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 0000004f274f485252525252525252525252525252525252525252525231485252525252 5252525252525252525252525252525252481631524f274f485252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252524816 3152524f274f485252525252525252525252525252525252525252523148525252525252 52525252525252525252525252525252481631524f274f48525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252481631 52524f274f48525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252520000525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252525252525252454f52025252522c3552525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 454f52025252522c35525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252000052525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525200005252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 005252522c35525252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525252454f520252522c35525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252454f52 025252522c35525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252454f520252522c3552525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252454f5202 5252522c3552525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252525252525252350202315252524d3552525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 350202315252524d35525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525245 003152524d35525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252523502023152524d35525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252350202 315252524d35525252525252525252525252525252525252525252523148525252525252 5252525252525252525252525252523502023152524d3552525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525235020231 5252524d3552525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949493600 354848484848484848484848484848484848484848484848484848484848484848484848 48484848484848484848484848484848402f484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848104848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848484848482a48484848484848484848484848484848484848484848 4848484848484848484848484848484848484848484848484848484848482f4048484848 484848484848484848484848484848484848484848484848484848484848484848484848 4848484848484848484848482a4848484848484848484848484848484848484848484848 48484848484848484848484848484848484848484848484848484848482f404848484848 484848484848484848484848484848484848484848484848484848484848484848484848 48484848484848484848482a484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848482f40484848484848 484848484848484848484848484848484848484848484848484848484848484848484848 484848484848484848482a48484848484848484848484848484848484848484848484848 484848484848484848484848484848484848484848484848484848002f55555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00040000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000001455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949493600 404b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4c4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b412c4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b 3b3b3b3b3b093b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b0d4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b002f55555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a55555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555165555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555516555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555552a000000343400002f53553400002f5355531402 455555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555314162f55553502023555555555555500025355552f004d5548485555 250000145555142555555555555555555555555555555555555555555555555555003f3f 00093f3f09183f3f240101243f3f3f3f3f3f00013e3f3f21003c3f3a3a3f3f180000093f 3f09183f3f183f3f3f3e0918213f3f240101243f3f3f3f3f3f00013e3f3f21003c3f3a3a 3f3f180000093f3f09183f3f3f1a555555555555555314252f5555553402023555555555 555500025355552f004d5548485525003400145555142555555555555555344b55555555 5555555314162f55553502023455555555555500025355552f004d554848555525000014 55551425555555555555555525555555555555555314252f555555350202345555555555 5500025355552f004d5548485555250000145555142555555555555555344b5555555555 555555555555555314162f5555553402023455555555552500000e48555314252f555555 555555555555555555555525555555555555555314252f55555534020235555555555555 00025355552f004d5548485555250000145555142555555555555555344b555555555555 555314252f55553502023455555555555500025355552f004d5548485555250034001455 5514255555555555555525555555555555555314162f5555553502023555555555555500 025355552f004d5548485555250000145555142555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b555555555555555555555555555555550055554b5500554b14555500554b1455455548 025555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555514505545553434555534345555555555550048555314005555352a5555 550055004855025555555555555555555555555555555555555555555555555555003f3f 3f003a3f013f3f24243f3f24243f3f3f3f3f3f003a3f3e09003f3f241d3f3f3f003f003a 3f013f3f3f183f3f3f093d3f383f24243f3f24243f3f3f3f3f3f003a3f3e09003f3f241d 3f3f3f003f003a3f013f3f3f3f1a55555555555555145055455555343455553434555555 5555550048555314005555342a5555005555004855025555555555555555354b55555555 55555514505545553434555535345555555555550048555314005555342a555555005500 485502555555555555555555255555555555555514505545555534345555343455555555 55550048555314005555352a5555550055004855025555555555555555344b5555555555 55555555555555145055455555343555553534555555555500554b025514505545555555 555555555555555555555525555555555555551450554555553534555534345555555555 550048555314005555342a5555550055004855025555555555555555344b555555555555 5514505545553434555534345555555555550048555314005555342a5555550055550048 550255555555555555552555555555555555145055455555343455553434555555555555 0048555314005555352a5555550055004855025555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555500554b55550055452f55550055452f55555553 255555555555555555555555555555554b34555555555555555555555555555555555555 555555555555552f0e4b55550253555553025555555555550041554545005553450e5055 550055004d55005555555555555555555555555555555555555555555555555555003f3f 3f003c3f003f3f013e3f3f3e013f3f3f3f3f3f00373f3838003f3e38053d3f3f003f003c 3f003f3f3f183f3f3f21053b3f3f013e3f3f3e013f3f3f3f3f3f00373f3838003f3f3805 3d3f3f003f003c3f003f3f3f3f1a555555555555552f0e4b555555025355555302555555 5555550040554545005553450e5055005555004d55005555555555555555344b55555555 5555552f0e4b55550253555553025555555555550041554545005553450e505555005500 4d550055555555555555555525555555555555552f0e4b55555502535555530255555555 55550041554545005553450e5055550055004d55005555555555555555344b5555555555 555555555555552f0e4b555555025355555302555555555500554835552f0e4b55555555 555555555555555555555525555555555555552f0e4b5555550253555553025555555555 550041554545005553450e5055550055004d55005555555555555555344b555555555555 552f0e4b55550253555553025555555555550041554545005553450e505555005555004d 5500555555555555555525555555555555552f0e4b555555025355555302555555555555 0041554545005553450e5055550055004d55005555555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555500000055550014455555550014455555555548 535555555555555555555555555555554b35555555555555555555555555555555555555 55555555555555554b25345500555555550055555555555500454b345500554553413455 55005500554d005555555555555555555555555555555555555555555555555555003f3f 3f003f3c003f3f003f3f3f3f003f3f3f3f3f3f00383b243f003f383e37243f3f003f003f 3c003f3f3f183f3f3f3f3b18243f003f3f3f3f003f3f3f3f3f3f00383b243f003f383e24 243f3f003f003f3c003f3f3f3f1a55555555555555554b25345555005555555500555555 55555500454b34550055455341345500555500554d005555555555555555344b55555555 555555554b25355500555555550055555555555500454b34550055455341345555005500 554d005555555555555555552555555555555555554b2534555500555555550055555555 555500454b35550055455341345555005500554d005555555555555555344b5555555555 55555555555555554b253555550055555555005555555555004b344155554b2534555555 55555555555555555555552555555555555555554b253455550055555555005555555555 5500454b34550055455340355555005500554d005555555555555555344b555555555555 55554b25345500555555550055555555555500454b345500554453413455550055550055 4d0055555555555555552555555555555555554b25345555005555555500555555555555 00454b34550055455341345555005500554d005555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454544611000243545433020017 515454545454540004545454040054545411335454540054545411545454005454544900 4b555555555555555555555555555555550055554d5500530e50555500530e5055555553 535555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555548535502552f4855554b2f5555555555550053144b5500550e00000e55 550055005555025555555555555555555555555555555555555555555555555555003f3f 3f003f3f013f3f213a3f3f3b213f3f3f3f3f3f003f063b3f003f050000053f3f003f003f 3f013f3f3f183f3f3f3a3e3f013f213a3f3f3b213f3f3f3f3f3f003e093b3f003f050000 053f3f003f003f3f013f3f3f3f1a555555555555554853550255552f4855554b2f555555 5555550053144b5500550e00000355005555005555025555555555555555344b55555555 55555548535502552f4855554b2f5555555555550053144b5500550e00000e5555005500 55550255555555555555555525555555555555554853550255552f4855554b2f55555555 55550053144b5500550e00000e55550055005555025555555555555555354b5555555555 555555555555554853550255552f4855554b2f5555555555005555005548535502555555 555555555555555555555525555555555555554853550255552f4855554b2f5555555555 550053144b5500550e00000e55550055005555025555555555555555344b555555555555 5548535502552f4855554b2f5555555555550053144b5500550e00000e55550055550055 5502555555555555555525555555555555554853550255552f4855554b2f555555555555 0053144b5500550e00000e55550055005555025555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454004e54513654332e515433 2854545454545400174c544c170054544c33175454540054545400465454005454544900 4b555555555555555555555555555555480200002a4d00504d14504d00504d1450450000 255555555555555555555555555555554b34555555555555555555555555555555555555 555555555555554102023555532f484834535555555555480055345548002a2a554b0235 4d0010005555485555555555555555555555555555555555555555555555555555003f3f 3a003f3f3a3f3f3e213a3a243e3f3f3f3f3f3a003f233f3a00191d3f3b01243c0006003f 3f3a3f3f3f183f3f3f370101243f3e213a3a243e3f3f3f3f3f3a003f243f3a00191d3f3b 01243c0006003f3f3a3f3f3f3f1a55555555555555410202355555532f48483453555555 5555480055345548002a2a554b022f002548005555485555555555555555344b55555555 5555554102023455532f484834535555555555480055345548002a2a554b02354d001000 5555485555555555555555551655555555555555410202355555532f4848345355555555 5548005534554800272a554b02354d0010005555485555555555555555344b5555555555 55555555555555410202345555532f48483453555555554d00022f345540020234555555 55555555555555555555552555555555555555410202345555532f484834535555555555 480055345548002a2a554b02344d0010005555485555555555555555344b555555555555 554102023455532f484834535555555555480055345548002a2a554b02344d0025480055 554855555555555555552555555555555555410202355555532f48483453555555555548 0055345548002a2a554b02354d0010005555485555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545436042e4651540251545454 0054545454545400432e5433430054543351364c54540054545400494654005454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555555555555555555440e505555000000004b555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f38053d3f000000003b3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f38053d3f3f000000003b3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a555555555555555555555555555555450e5055000000 004b55555555555555555555555555555555555555555555555555555555344b55555555 55555555555555555555450e505555000000004b55555555555555555555555555555555 55555555555555555555555516555555555555555555555555555555440e505500000000 4b55555555555555555555555555555555555555555555555555555555354b5555555555 555555555555555555555555555555440e5055000000004b555555555555555555555555 555555555555555555555516555555555555555555555555555555450e5055000000004b 55555555555555555555555555555555555555555555555555555555344b555555555555 5555555555555555450e505555000000004b555555555555555555555555555555555555 5555555555555555555516555555555555555555555555555555450e5055000000004b55 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454544c3317540051545454 005454545454540054115411540054540200002e54540054545400542851005454544900 343534343434343434353434343434343435343434343434343534343434343434353434 343534343434343435343434343434342c14343434343534343434343534343434353434 34343434343534343434343434353431252c343434343534343434343434343435343434 343435343434343435343434343435343434343435343434343434343435343434002424 2424242424242424242422191d2424242424242424242424242424242424242424242424 242424242305242424242424242424242422191d24242424242424242424242424242424 242424242424242424242424241a3434343534343434343434353434343531252c343434 343534343434343434343435343434343434353434343434343434343434142c35343434 343435343434343434343431252c34353434353434343534343434343534343434343534 34343434343435343534343403353534343434343534343434343534352f252c34343434 3534343434343434353434343435343434343434343534343434343434142c3434343434 3534343434343434353434343434353431252a3535343434343434343435343434343434 3435343434343434353434033435343434343434343434343434343431252c3435343434 34343534343434343534343434343534343434343434353434343434142c353434343534 3434343435343434342f252c353434343434353434343434353434343435343434343435 343434343434353434350e353434343434353434343434343434342f272a353434353434 343434343434343435343434343435343434343435343434343434003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454543651545400542e284e4e33 045454545454540054333333540054463654541154540054545400544917005454544900 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b412c4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b 3b3b3b3b3b093b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b1f4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454544e0400042e54512e020204 2854545454545400544e004e540054284e54543346540054545400545417005454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a55555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555165555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555551655555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 495454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 55555555555555555555555555555555555555555314252f555541555555555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3e0918213f373f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a55555555555555555555555555555555555555555553 14252f554155555555555555555555555555555555555555555555555555344b55555555 55555555555555555555555555555555555314252f554155555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555555314 162f554155555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555531425 2f554155555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555531416315541555555555555555555555555555555 55555555555555555555255555555555555555555555555555555555555555555314252f 555541555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 544900000000175454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555514505545555500555555555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f093d3f383f003f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a55555555555555555555555555555555555555555514 505545550055555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555514505545550055555555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555551450 5545550055555555555555555555555555555555555555555555555555354b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555145055 45550055555555555555555555555555555555555555555555555555354b555555555555 555555555555555555555555555555145055455500555555555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555514505545 555500555555555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 55555555555555555555555555555555555555552f0e4b555555002a0045555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f21053b3f3f001d00383f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a5555555555555555555555555555555555555555552f 0e4b5555002a004555555555555555555555555555555555555555555555344b55555555 55555555555555555555555555555555552f0e4b5555002a004555555555555555555555 55555555555555555555555525555555555555555555555555555555555555555555310e 4b5555002a004555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 5555555555555555555555255555555555555555555555555555555555555555552f0e4b 5555002a004555555555555555555555555555555555555555555555344b555555555555 5555555555555555555555555555552f0e4b5555002a0045555555555555555555555555 55555555555555555555255555555555555555555555555555555555555555552f0e4b55 5555002a0045555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 5555555555555555555555555555555555555555554b2534555500554d02555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3b18243f003f3c013f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a55555555555555555555555555555555555555555555 4b25345500554d0255555555555555555555555555555555555555555555344b55555555 5555555555555555555555555555555555554b25345500554d0255555555555555555555 55555555555555555555555525555555555555555555555555555555555555555555554b 16355500554d0255555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555554b16 345500554d0255555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555554b16345500554d02555555555555555555555555 5555555555555555555525555555555555555555555555555555555555555555554b2534 555500554d02555555555555555555555555555555555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555550000555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555548535502555500555502555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f00003f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3a3e3f013f003f3f013f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a55555555555555555555555555555555555555555548 535502550055550255555555555555555555555555555555555555555555344b55555555 555555555555555555555555555555555548535502550055550255555555555555555555 555555555555555555555555255555555555555555555555555555555555555555554853 5502550055550255555555555555555555555555555555555555555555354b5555555555 555555555555555555555555555555555555550000555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555485355 02550055550255555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555485355025500555502555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555548535502 555500555502555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b35555555555555555555555555555555555555 5555555555555555555555555555555555555555410202345555140e0245555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f370101243f090501383f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a55555555555555555555555555555555555555555541 02023455140e024555555555555555555555555555555555555555555555344b55555555 55555555555555555555555555555555554102023455140e024555555555555555555555 555555555555555555555555255555555555555555555555555555555555555555554102 023455140e024555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555525555555555555555555555555555555555555555555410202 3455140e024555555555555555555555555555555555555555555555344b555555555555 5555555555555555555555555555554102023455140e0245555555555555555555555555 555555555555555555552555555555555555555555555555555555555555555541020234 5555140e0245555555555555555555555555555555555555555555003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555554b34555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555003f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f183f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f3f 3f3f3f3f3f3f3f3f3f3f3f3f3f1a55555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555354b55555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555165555555555555555555555555555555555555555555555 5555555555555555555555555555555555555555555555555555555555344b5555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555516555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555555555555555555555344b555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555551655555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555003555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b412a4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b 3b3b3b3b3b093b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b3b 3b3b3b3b3b3b3b3b3b3b3b3b3b0c4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2a414b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4c 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b2c414b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b144b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333333200 2f3131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131312a14313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131103131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313116313131313131313131313131313131313131313131313131313131313131 313131313131313131313131311031313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131142731313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131033131313131313131313131313131313131313131313131 3131313131313131313131313131313131313131313131313131313131142a3131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313103313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131311427313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131310331313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4852525252525252525252525252525227000000313100002c4f523100002c4f52524827 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252524f10162c52523102023152525252525200024f52522c004b5245455252 160000105252101652525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252524f10162c525231020231525252 52525200024f52522c004b52454552521600001052521016525252525252525252525252 5252525252525252525252525216525252525252524f10162c5252523102023152525252 525200024f52522c004b5245455252160000105252101652525252525252314852525252 5252524f10162c52523102023152525252525200024f52522c004b524545525216000010 52521016525252525252525216525252525252524f10162c525252310202315252525252 5200024f52522c004b524545525216000010525210165252525252525231485252525252 52524f10162c52523102023152525252525200024f52522c004b52454552521600310010 525210165252525252525216525252525252524f10162c52525231020231525252525252 00024f52522c004b52454552521600001052521016525252525252523148525252525252 524f10162c52523102023152525252525200024f52522c004b5245455252160031001052 5210165252525252525216525252525252524f10162c5252523102023152525252525200 024f52522c004b5245455252160000105252101652525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200525248520052481052520052481052524d00 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252104d5241523131525231315252525252520045524f1000525231275252 520052004552025252525252525252525252525252525252525252525252525252005252 5252525252525252525252525252525252525252525252104d5241523131525231315252 525252520045524f10005252312752525200520045520252525252525252525252525252 525252525252525252525252521652525252525252104d52415252313152523131525252 5252520045524f1000525231275252520052004552025252525252525252314852525252 525252104d5241523131525231315252525252520045524f100052523127525252005200 4552025252525252525252521652525252525252104d5241525231315252313152525252 52520045524f100052523127525252005200455202525252525252525231485252525252 5252104d5241523131525231315252525252520045524f10005252312752525200525200 45520252525252525252521652525252525252104d524152523131525231315252525252 520045524f10005252312752525200520045520252525252525252523148525252525252 52104d5241523131525231315252525252520045524f1000525231275252520052520045 520252525252525252521652525252525252104d52415252313152523131525252525252 0045524f1000525231275252520052004552025252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200524852520052402c52520052412c52525200 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252522c03485252024f52524f02525252525252003552414100524f41034d52 520052004b52005252525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252522c03485252024f52524f025252 52525252003552414100524f41034d52520052004b520052525252525252525252525252 5252525252525252525252525216525252525252522c0348525252024f52524f02525252 525252003552414100524f41034d52520052004b52005252525252525252314852525252 5252522c03485252024f52524f02525252525252003552414100524f40034d5252005200 4b520052525252525252525216525252525252522c0348525252024f52524f0252525252 5252003552414100524f41034d52520052004b5200525252525252525231485252525252 52522c03485252024f52524f02525252525252003552414100524f41034d525200525200 4b5200525252525252525216525252525252522c0348525252024f52524f025252525252 52003552414100524f41034d52520052004b520052525252525252523148525252525252 522c03485252024f52524f02525252525252003552414100524f41034d5252005252004b 5200525252525252525216525252525252522c0348525252024f52524f02525252525252 003552414100524f41034d52520052004b52005252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525200000052520010405252520010405252525200 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252524816315200525252520052525252525200414831520052414f353152 52005200524b005252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252481631520052525252005252 5252525200414831520052414f35315252005200524b0052525252525252525252525252 525252525252525252525252521652525252525252524816315252005252525200525252 52525200414831520052414f35315252005200524b005252525252525252314852525252 525252524816315200525252520052525252525200414831520052414f35315252005200 524b00525252525252525252165252525252525252481631525200525252520052525252 525200414831520052414f35315252005200524b00525252525252525231485252525252 5252524816315200525252520052525252525200414831520052414f3531525200525200 524b00525252525252525216525252525252525248163152520052525252005252525252 5200414831520052414f35315252005200524b0052525252525252523148525252525252 52524816315200525252520052525252525200414831520052414f353152520052520052 4b0052525252525252521652525252525252524816315252005252525200525252525252 00414831520052414f35315252005200524b005252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 48525252525252525252525252525252520052524b52004f034d5252004f034d52525200 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252454f5202522c455252482c525252525252004f10485200520300000352 520052005252025252525252525252525252525252525252525252525252525252005252 5252525252525252525252525252525252525252525252454f5202522c455252482c5252 52525252004f10485200520e000003525200520052520252525252525252525252525252 525252525252525252525252521652525252525252454f520252522c455252482c525252 525252004f10485200520300000352520052005252025252525252525252314852525252 525252454f5202522c455252482c525252525252004f1048520052030000035252005200 5252025252525252525252521652525252525252454f520252522c455252482c52525252 5252004f1048520052030000035252005200525202525252525252525231485252525252 5252454f5202522c455252482c525252525252004f104852005203000003525200525200 52520252525252525252521652525252525252454f520252522c455252482c5252525252 52004f104852005203000003525200520052520252525252525252523148525252525252 52454f5202522c455252482c525252525252004f10485200520300000352520052520052 520252525252525252521652525252525252454f520252522c455252482c525252525252 004f10485200520300000352520052005252025252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454544611000243 54543302001751545454545454540000022e544611000243545454545454545454544900 4852525252525252525252525252525245020000274b004d4b104d4b004d4b104d523100 485252525252525252525252525252524831525252525252525252525252525252525252 5252525252525235020231524f2c4545314f525252525245005231524500272752480231 4b0010005252445252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525235020231524f2c4545314f5252 525252450052315245002727524802314b00100052524552525252525252525252525252 5252525252525252525252525216525252525252523502023152524f2c4545314f525252 5252450052315245002727524802314b0010005252455252525252525252314852525252 52525235020231524f2c4545314f5252525252450052315245002727524802314b001000 52524552525252525252525216525252525252523502023152524f2c4545314f52525252 52450052315245002727524802314b001000525245525252525252525231485252525252 525235020231524f2c4545314f5252525252450052315245002727524802314b00164500 525244525252525252525216525252525252523502023152524f2c4545314f5252525252 450052315245002727524802314b00100052524552525252525252523148525252525252 5235020231524f2c4545314f5252525252450052315245002727524802314b0016450052 5245525252525252525216525252525252523502023152524f2c4545314f525252525245 0052315245002727524802314b0010005252445252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b0033545454545454545454004e545136 54332e51543328545454545454540054540054004e545136545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525241034d52520000000048525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 52525252525252525252525252525252525252525252525252525252525241034d525200 000000485252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525241034d52000000 004852525252525252525252525252525252525252525252525252525252314852525252 5252525252525252525241034d5252000000004852525252525252525252525252525252 5252525252525252525252521652525252525252525252525252525241034d5200000000 485252525252525252525252525252525252525252525252525252525231485252525252 52525252525252525241034d525200000000485252525252525252525252525252525252 52525252525252525252521652525252525252525252525252525241084d520000000048 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525241034d52520000000048525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525241034d52000000004852 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454545454545436042e4651 5402515454540054545454545454000011335436042e4651545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252524d4148525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252524d41485252 525252525252525252525252525252525252525252525252525252525252525252525252 5252525252525252525252525216525252525252525252525252525252524d4148525252 525252525252525252525252525252525252525252525252525252525252314852525252 52525252525252525252524d414852525252525252525252525252525252525252525252 52525252525252525252525216525252525252525252525252525252524d414852525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252524d41485252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252524d40485252525252 525252525252525252525252525252525252525252525252525252523148525252525252 5252525252525252524d4148525252525252525252525252525252525252525252525252 5252525252525252525216525252525252525252525252525252524d4148525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b003354545454545454545454544c3317 5400515454540054545454545454005451285454544c3317545454545454545454544900 100000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000003155555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454543651545400 542e284e4e33045454545454545400545400543651545400545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454544e0400042e 54512e02020428545454545454540000002e544e0400042e545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525252525252524f10162c525235525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 4f10162c5252355252525252525252525252525252525252525252525252525252525252 52525252525252525252525252165252525252525252525252525252525252525252524f 10162c523552525252525252525252525252525252525252525252525252314852525252 52525252525252525252525252525252524f10162c523552525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252524f10 162c52355252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252524f10162c52355252525252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252524f1016 2c5235525252525252525252525252525252525252525252525252523148525252525252 5252525252525252525252525252524f10162c5235525252525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252524f10162c 525235525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454495454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252525252525252104d5241525200525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 104d52415252005252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525210 4d5241520052525252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252104d5241520052525252525252525252525252 52525252525252525252525216525252525252525252525252525252525252525252104d 524152005252525252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525252104d524152005252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252104d52 415200525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252104d52415200525252525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252104d5241 525200525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545449000000001754545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 52525252525252525252525252525252525252522c034852525200270041525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 2c0348525252002700415252525252525252525252525252525252525252525252525252 52525252525252525252525252165252525252525252525252525252525252525252522c 034852520027004152525252525252525252525252525252525252525252314852525252 52525252525252525252525252525252522c034852520027004152525252525252525252 525252525252525252525252165252525252525252525252525252525252525252522c03 485252002700415252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252522c03485252002700415252525252525252525252 5252525252525252525252165252525252525252525252525252525252525252522c0348 525200270041525252525252525252525252525252525252525252523148525252525252 5252525252525252525252525252522c0e48525200270041525252525252525252525252 52525252525252525252165252525252525252525252525252525252525252522c034852 525200270041525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252481631525200524b02525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 52481631525200524b025252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 4816315200524b0252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252524816315200524b0252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525248 16315200524b025252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525252524816315200524b025252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252524816 315200524b02525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252524816315200524b02525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252481631 525200524b02525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252520000525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 5252525252525252525252525252525252525252454f5202525200525202525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 454f52025252005252025252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525245 4f5202520052520252525252525252525252525252525252525252525252314852525252 5252525252525252525252525252525252454f5202520052520252525252525252525252 52525252525252525252525216525252525252525252525252525252525252525252454f 520252005252025252525252525252525252525252525252525252525231485252525252 52525252525252525252525252525252454f520252005252025252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252454f52 025200525202525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252454f52025200525202525252525252525252525252 5252525252525252525216525252525252525252525252525252525252525252454f5202 525200525202525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525235020231525210030241525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 350202315252100302415252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525235 020231521003024152525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525235020231521003024152525252525252525252 525252525252525252525252165252525252525252525252525252525252525252523502 023152100302415252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252523502023152100302415252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252350202 315210030241525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252350202315210030241525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525235020231 525210030241525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 485252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252524831525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252005252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252521652525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525252314852525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525252165252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252525231485252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252525216525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252523148525252525252 525252525252525252525252525252525252525252525252525252525252525252525252 525252525252525252521652525252525252525252525252525252525252525252525252 525252525252525252525252525252525252525252525252525252003455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333332900 273131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131312c27313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131143131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131311631313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131313131272c31313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313131163131313131313131313131313131313131313131313131 3131313131313131313131313131313131313131313131313131313131272c3131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131313116313131313131313131313131313131313131313131313131 31313131313131313131313131313131313131313131313131313131272c313131313131 313131313131313131313131313131313131313131313131313131313131313131313131 313131313131313131311631313131313131313131313131313131313131313131313131 313131313131313131313131313131313131313131313131313131002a55555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00040000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000001455555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555034343434343434343434343434353434 343434343434343434353434343434343434343434353434343434343434343434343434 343434353434343434343434343434343434343434343434343434343434343434343434 343434343434343434343434343434343435343434343434343434343435343434343434 343434343435343434343434343434343435343434343434343434343435343434343434 343434353434343434343434343434343434343434343434343434343434343434343434 343434343434343434343434343534343434343434343434343534343434343434343434 343434343434505555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00030000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 0000000000004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000175400545411545454 005454541133545454540054545454545454545454545454545454545454545454544900 4b2a00000034552555555555555555555555554b2a555555555555555555555555555555 555555555555555555555555555353555555555548255555554d55555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545400465454 0054544c3317545454540054545454545454545454545454545454545454545454544900 4b550055554b55555555555555555555555555550055555555555555534d555555555555 55534d555555555555555555534155555555555555005555555534535555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000335400545400494654 0054543351364c5454540054545454545454545454545454545454545454545454544900 4b55005548554525554b250e2f553502145555550055555534022a550e00555534021455 550e00554d0e145355555555344d554b0e02455555002f2a555545345555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545400542851 0054540200002e5454540054545454545454545454545454545454545454545454544900 4b55000000555500555500530055454d005555550055555514415055344b5555454d0055 55344b552a000048555555550255550e534d025555005055555553005555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545400544917 005446365454115454540054545454545454545454545454545454545454545454544900 4b55005555555500555500550055455300555555005555554d411455344b555545530055 55344b5531535553555555550255550253550255550041555555550e5555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545400545417 0054284e5454334654540000003354545454545454545454545454545454545454544900 4b48003455554d00534d004d0055340e2534554d005355550e0e255541025355340e2534 5541025348000e5055555555344855450e0245554b024534535545485555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555554555555555555555555555555345555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555550555555555555555555555055555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00151717171717171717171717171717 171717171717171717171717171717171717171717171717171717171717171717172b00 2c1625252525252525252525252525252525252525252525252525252525252525252525 252525252525252525252525252525252525252525252525252525252525252516252527 252525252525252525252525252525252525252525252525252525252525252525252525 252525252525252525252525252525252525252525252525252525252525252525252525 252525251625252525252525252525252525252525252525252525251625252716252525 252525252c004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b44004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000005400000004335454 000000043354541754545454545454545454545454545454545454545454545454544900 4b2a00000034555555555555555555555555555555555555555555555555555555555555 55555555555555555555555555554b2a5555555555555555350000345555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 55555555555555555555554d2a5555555555555555555535025055555555555555555555 555555552555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545451005454 00545451004e2e0054545454545454545454545454545454545454545454545454544900 4b550055554b5555555555555555555555555555555555555555555555534d5555555555 5555534d5555555555555555555550005555555555555555550055482555555555555555 555555555555555555555555555555534d55555555555555555555555555555555555555 5555555555555555555555550055555555555555555555005355555555555555534d5555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000175400545446435454 005454464354540054545454545454545454545454545454545454545454545454544900 4b5500554b555534342a34342a554b0e02455534342a55555534022a550e005555340214 55550e00554d0e14535555555555550055554b34555555555500554b2534342a4d0e1453 5534340034022555555534021455550e0055025525005534342a554d0e1453555555554d 0e1453554b250e2f554d021400555555554b0e02455516003455555534022a550e005555 34342a45254b250e2f55554d0e1400555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400000000335454 000000003354540054545454545454545454545454545454545454545454545454544900 4b550000005555005555005555550e534d025500555555555514415055344b5555454d00 5555344b552a00004855555555555500555555555555555555000234550055552a000048 55005553005500555555454d005555354b550055550055005555552a000048555555552a 0000485555005300550e535500555555550e534d025555005555555514415055354b5555 00555555005500530055552f415314555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545433175454 005454331754540054545454545454545454545454545454545454545454545454544900 4b550055554d550055550055555502535502550055555555554d411455354b5555455300 5555354b552f53555355555555555500555555555555555555005555550055552f535553 550055550055005555554553005555344b550055550055005555552f535553555555552f 535553555500550055004d55005555555502535502555500555555554d411455344b5555 00555555005500550055554d141450555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000004900545443285454 005454432854540054545454545454545454545454545454545454545454545454544900 4b480200002a5500005500005555450e0245550000555555550e0e255540025355340e16 345541025348000e50555555555534004b554b34555555554d0034555500005548000e50 5500024d004500505555340e1635554102531600340e550000555548000e505555555548 000e50554d004d005534002f0e55555555450e0245554d004b5555550e0e165541025355 0000554d004b004d005555452f3448555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 55555555555555555555552f535002555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b2c0003444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b44004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00263333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333333200 343435343434353434343534343434343534343434343434353434343434343534343435 343435343434353434343434353434343434353434353434343435343434353435343434 353434343434353434343534343435343434353435343434353434343434353434353434 343435343435343435343434353434343434353434343534343435343434353434343434 353434343434343534343434343434353434343434343534353434343434343434343534 3434353434004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000005400000004335454 000000043351110033545454545454545454545454545454545454545454545454544900 4b2a00000034555555555555555555555555555555555555555555555555555555555555 55555555555555555555555555531402455555555555555500025355552f004d16555555 555555555555552555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555540555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545451005454 005454510033465100545454545454545454545454545454545454545454545454544900 4b550055554b5555555555555555555555555555555555555555555555534d5555555555 5555534d5555555555555555554555480255555555555555550048555314005555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555534d55555555555555555555555555555500555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000175400545446435454 005454464354545102545454545454545454545454545454545454545454545454544900 4b5500554b555534342a35342a554b0e02455534342a55555534022a550e005555340214 55550e00554d0e14535555555555555325554b345555555555004155454500451635022a 555534022a5545254b160e2f55554d0e140055555555340214555534340040554b0e0245 5534022a55550e005534342a4b0e0245555534340041550034002f554d0e145355555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400000000335454 00000000335446174c545454545454545454545454545454545454545454545454544900 4b550000005555005555005555550e534d025500555555555514415055344b5555454d00 5555344b552a0000485555555555554853555555555555555500454b3455005500144150 55551441505555005500530055552f41531455555555454d00555500555002550e534d02 551441505555344b550055550e534d025555005550025500555300552a00004855555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545433175454 005454331746335454545454545454545454545454545454545454545454545454544900 4b550055554d550055550055555502535502550055555555554d411455344b5555455300 5555344b552f535553555555555555535355555555555555550053144b550055004d4014 55554d41145555005500550055554d141450555555554553005555005555025502535502 554d41145555344b55005555025355025555005555025500555500552f53555355555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000004900545443285454 005454432828000000545454545454545454545454545454545454545454545454544900 4b480200002a5500005500005555450e0245550000555555550e0e255541025355340e25 345541025348000e505555555545000025554b3455555555480055345548004b000e0e25 55550e0e25554d004b004d005555452f354855555555340e253455000e024555450e0245 550e0e255555410253000055450e02455555000e02455500484d005548000e5055555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 55555555555555555555555555552f535002555555555555555555005555555555555555 555555555555555555555555555555555555005555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555535000e4d555555555555555555000055555555555555 555555555555555555555555555555555555000055555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00151717171717171717171717171717 171717171717171717171717171717171717171717171717171717171717171717172b00 2c2525252525251625252525252525252525252525252525252525252516252525252525 252525252525252525252525252525252525252525252525252525252525252525252525 252525252525252525252525252516251625252525252525252525252525252525252525 252525252525252525252525252525252525252525252525252525252525252525252525 252525252525252525252525252525252525252525252525252525252525252525252525 252525252c004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000005400000004335454 00000004334e040033545454545454545454545454545454545454545454545454544900 4b2a00000035555555555555555555555555555555555555555555555555555555555555 55555555555555555555555555531402455555555555555500025355552f004d25555555 555555555555552555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545451005454 005454510033495100545454545454545454545454545454545454545454545454544900 4b550055554b5555555555555555555555555555555555555555555555534d5555555555 5555534d5555555555555555554d554d0255555555555555550048555314005555555555 555555555555555555555555555555555555555555555555555555555555555555555555 5555534d5555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000175400545446435454 005454464354513628545454545454545454545454545454545454545454545454544900 4b5500554b555534342a34342a554b0e02455534342a55555534022a550e005555350214 55550e00554d0e14535555555555553450554b345555555555004155454500452534022a 555534022a5545254b250e2f55554d0e1400555555554d0e0e025502552500554b0e0245 55550e00554d0e1453555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400000000335454 000000003354514611545454545454545454545454545454545454545454545454544900 4b550000005555005555005555550e534d025500555555555514415055344b5555454d00 5555354b552a00004855555555554d342a555555555555555500454b3455005500144052 55551441505555005500530055552f415314555555550e5355005500555500550e534d02 5555344b552a000048555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545433175454 0054543317334c5400545454545454545454545454545454545454545454545454544900 4b550055554d550055550055555502535502550055555555554d411455354b5555445300 5555344b552f535553555555555555550e55555555555555550053144b550055004d4014 55554d41145555005500550055554d141450555555550055550055005555005502535502 5555344b552f535553555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000004900545443285454 005454432846020033545454545454545454545454545454545454545454545454544900 4b480200002a5500005500005555450e0245550000555555550e0e255540025355340e25 345541025348000e50555555554514144d554b3455555555480055345548004b000e0e16 55550e0e25554d004b004d005555452f34485555555534023500552500340e55450e0245 555541025348000e50555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555531535002555555555555550055555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555535000e4d555555555555450055555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00151717171717171717171717171717 171717171717171717171717171717171717171717171717171717171717171717172b00 2c1625252525252525252525252525252525252525252525252525252525251625252525 252525252525252525252525162525252525252525252525252525252525252525252525 252525252525252525252525252516252525252525252525162525252525252525252525 252525252525252525252525252525252525252525252525252525252525252525252525 252525252525252525252525252525252525252525252525252525252525252525252516 252525162c004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b002d4949494949494949494949494949 494949494949494949494949494949494949494949494949494949494949494949494200 444b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b44004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000005400000004335454 000000043354544300545454545454545454545454545454545454545454545454544900 4b2a00000034555555555555555555555555555555555555555555555555555555555555 55555555555555555555555555555534555555555555555516002c2a554b2a5555555555 55555555555555555555554b2a5555555555555555555555555555555555255555555555 5541555555554b2a55555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545451005454 0054545100544c1700545454545454545454545454545454545454545454545454544900 4b550055554b5555555555555555555555555555555555555555555555534d5555555555 5555534d5555555555555555555553005555555555555555550055005555005555555555 555555555555555555555555005555555555555555555555555555555555555555555555 550055555555550055555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000175400545446435454 005454464354174e00545454545454545454545454545454545454545454545454544900 4b5500554b555534342a34342a554b0e02455535342a55555535022a550e005555340214 55550e00554d0e14535555555553500055554b3455555555550055005555004d0e145355 4d0e14005534021455555555005555551635532f34021455555534342a45163502145555 55002a00455555004d0e1453555555554b250e2f5535021455555534340035022555554d 0e1453555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400000000335454 000000003333495400545454545454545454545454545454545454545454545454544900 4b550000005555005555005555550e534d025500555555555514415055344b5555454d00 5555344b552a000048555555554b55005555555555555555550055005555002a00004855 2f41531455454d005555555500555555532a534b454d005555550055555500454d005555 5500554d025555002a000048555555555500530055454d0055555500555300550055552a 000048555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454005454545400545433175454 005454331717000000495454545454545454545454545454545454545454545454544900 4b550055554d550055550055555502535502550055555555554d411455344b5555455300 5555354b552f535553555555550000000055555555555555550055005555003153555355 4d141450554553005555555500555555552f405545530055555500555555004453005555 55005555025555003153555355555555550055005545530055555500555500550055552f 535553555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454000000004900545443285454 005454432854545400545454545454545454545454545454545454545454545454544900 4b480200002a5500005500005555450e0245550000555555550e0e165541025355340e25 345540025348000e505555555555550055554b34555555554d001400534d0045000e5055 452f354855350e253455554d0053555555504155350e253455550000554d00340e253555 55140e0245554d0045000e50555555554d004d0055340e1634555500024d004500505548 000e50555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00335454545454545454545454545454 545454545454545454545454545454545454545454545454545454545454545454544900 4b5555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 305350025555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555554b004b5555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00173333333333333333333333333333 333333333333333333333333333333333333333333333333333333333333333333332b00 2c3435353435343434353434343434353434343435343434343435343435343434353434 343434353434343435343434343435343434343435343434343434343534343434343534 1500022f3434343435343434343434353434343435343434343435343435343434353434 343434353434343435343434343435343435343434353434353434343434353435343434 343435343435343434353434343434353434343435343434343435343435343434353434 343534352c00475555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555554b00040000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000000000000000000000000000000000000000000000000000000000000000 000000000000415555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 55555555555555555555555555555555555555534b4a4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4841 484b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b4b 4b4b4b4b4841505555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 555555555555555555555555555555555555555555555555555555555555555555555555 end %%PageTrailer %%Trailer %%EOF %%EndDocument @endspecial 1118 5049 a(Figure)f(3:)f(State)h(transition)i(table)e (for)g(the)g(string)h(parser)-5 b(.)p eop %%Page: 7 7 7 6 bop 37 57 a Fm(4)45 b(Commands)3403 b(7)p 37 94 2009 4 v 2046 94 V 37 352 a Fo(T)-7 b(able)24 b(2)g(illustrates)j(the)d(v)n (arious)h(interpretation)j(after)d(the)f(dif)n(ferent)i(stages)f(for)f (dif)n(ferent)i(input)f(strings.)32 b(F)o(or)23 b(illustration)37 465 y(purposes,)j(a)d(resulting)j(nak)o(ed)f(string)g(or)e(strings)i (are)f(enclosed)i(in)d(\273\253)h(pairs)h(to)e(sho)n(w)g(the)h (delimination:)-375 600 y Fj(\177)156 667 y Fi(Figure)d(3)h(sho)n(ws)g (the)g(Mealy)g(state)g(transitions)g(automaton)e(for)h(parsing)g(job)h (strings)g(and)g(re)o(gular)e(strings.)30 b(The)22 b(table)g(sho)n(ws)g (for)156 766 y(a)e(gi)n(v)o(en)f(state)i(and)f(input)f(character)g (class,)i(the)f(resulting)g(state)g(and)g(action.)156 911 y(The)g(start)i(state)g(for)e(job)h(strings)g(is)i(NQ_L)-6 b(WS,)20 b(and)h(the)g(left)h(side)f(of)g(yello)n(w)g(states)h (applies.)27 b(The)21 b(start)h(state)g(for)e(re)o(gular)g(single-)156 1011 y(quoted)e(strings)j(is)g(SQ_MAIN)e(and)h(for)g(re)o(gular)e (double-quoted)f(strings)j(DQ_MAIN.)37 1169 y Fo(Multi-line)39 b(strings)f(are)f(permitted.)69 b(There)37 b(are)g(tw)o(o)f(multi-line) i(modes.)68 b(In)37 b(line-continuation)42 b(mode,)d(a)d(backslash)37 1282 y(immediately)26 b(before)e(a)f(linefeed)i(character)h(implies)e (that)g(both)g(are)g(ignored,)h(and)e(the)h(ne)o(xt)g(line)f(is)h(vie)n (wed)f(as)h(part)f(of)h(the)37 1395 y(current)29 b(line.)39 b(An)26 b(unprotected)k(ne)n(wline)d(character)i(inside)g(a)d(string)i (will)e(be)h(copied)h(into)g(a)e(re)o(gular)i(string,)h(or)d(is)h(a)f (split)37 1507 y(point)f(for)f(job)g(strings.)30 b(T)-7 b(able)24 b(3)f(sho)n(ws)g(e)o(xamples)i(for)f(the)g(handling)i(of)d (multiline)i(strings.)p 1306 1630 1479 4 v 1304 1743 4 113 v 1356 1709 a Fp(Input)d(string)p 1995 1743 V 224 w(Resulting)i(string)p 2783 1743 V 1306 1746 1479 4 v 1306 1763 V 1304 1989 4 226 v 1356 1842 a Fo(1:)29 b Fh("some)49 b(\\)1356 1955 y Fo(2:)29 b Fh(string")p 1995 1989 V 2046 1842 a Fo(\273)p Fh(some)50 b(string)p Fo(\253)p 2783 1989 V 1306 1992 1479 4 v 1304 2218 4 226 v 1356 2071 a(1:)29 b Fh("some)1356 2184 y Fo(2:)g Fh(string")p 1995 2218 V 2046 2071 a Fo(\273)p Fh(some\\nstring)p Fo(\253)p 2783 2218 V 1306 2221 1479 4 v 1407 2374 a(T)-7 b(able)23 b(3:)g(Multi-line)j(string)f(handling.)37 2723 y Fd(3.3)51 b(Format)37 2942 y Fo(The)32 b(con\002guration)j(\002le)d (is)g(a)g(format)h(free)f(language.)57 b(It)32 b(does)h(not)g(care)g (about)g(an)o(y)f(number)h(of)g(\(linear\))g(whitespaces)37 3055 y(between)26 b(its)f(tok)o(ens.)34 b(Ho)n(we)n(v)o(er)l(,)24 b(to)h(maintain)h(a)e(Unix)g(shell)i(lik)o(e)f(semantic,)h(commands)g (are)f(auto-delimited)j(by)d(the)f(end)37 3168 y(of)f(a)f(line.)29 b(Continuation)c(lines)f(are)e(not)h(permitted)h(\(if)f(you)g(don')n(t) g(kno)n(w)g(what)f(that)h(means,)g(you)g(don')n(t)h(need)f(it\).)29 b(Multiple)37 3281 y(commands)c(in)f(the)f(same)h(line)g(must)g(be)f (separated)j(with)d(a)h(semicolon.)37 3439 y(The)32 b(\002le)f(may)h (contain)h(comments)g(in)f(the)g(tradition)i(of)e(scripting)i (languages.)56 b(A)31 b(comment)h(starts)h(with)f(the)g(hash)h(\(#\))37 3552 y(character)l(,)26 b(and)e(e)o(xtends)h(through)h(the)e(end)g(of)f (the)h(line.)37 3710 y(There)k(is)f(no)g(\002lename)g(globbing.)42 b(If)27 b(you)g(need)h(globbing)h(\(e.g.)40 b(w)o(orking)28 b(with)f(wildcarded)i(\002lenames)f(as)f(ar)n(guments\),)37 3823 y(you)h(must)e(run)h(your)g(jobs)g(through)i Fh(/bin/sh)48 b(-c)75 b Fo(to)27 b(let)f(shell)i(do)e(the)h(globbing)i(for)e(you.)38 b(Ho)n(we)n(v)o(er)l(,)27 b(be)f(w)o(arned)h(that)37 3936 y(such)e(encapsulation)i(will)d(add)g(another)h(le)n(v)o(el)f(of)f (job)h(string)h(de-quoting)i(and)d(interpretation.)37 4274 y Fl(4)61 b(Commands)37 4526 y Fo(While)26 b(most)f(commands)i(ha) n(v)o(e)f(a)f(rather)h(static)g(semantic,)h(some)f(permit)g(optional)h (ar)n(guments.)36 b(The)25 b(follo)n(wing)i(sections)37 4639 y(use)d(the)g(follo)n(wing)h(generic)g(descriptions)j(to)23 b(denote)i(a)e(non-terminal)k(tok)o(en:)37 4797 y(\273string\253)39 b(is)d(the)h(placeholder)j(for)d(a)f(string,)k(single)e(or)e(double)j (quoted.)69 b(\273id\253)37 b(is)f(the)h(placeholder)j(for)c(an)h (identi\002er)-5 b(.)37 4910 y(\273options\253)39 b(is)d(the)g (placeholder)j(for)c(a)h(list)g(of)f(options.)68 b(Each)35 b(option)i(is)f(an)g(identi\002er)h(itself.)66 b(Multiple)37 b(options)h(are)37 5023 y(separated)26 b(by)e(linear)h(whitespace.)31 b(Reserv)o(ed)24 b(w)o(ords)g(are)g(not)g(permitted)h(as)f(v)n(alid)g (identi\002ers)h(within)f(an)g(option)h(string.)p eop %%Page: 8 8 8 7 bop 37 57 a Fm(4)45 b(Commands)3403 b(8)p 37 94 2009 4 v 2046 94 V 37 352 a Fd(4.1)51 b(Other)27 b(commands)37 571 y Fo(This)d(cate)o(gory)h(describes)h(commands)f(which)f(deal)g (with)f(the)h(con\002guration)j(of)c(the)h(kickstart)i(application)g (itself.)37 862 y Fk(4.1.1)45 b(Inc)n(lude)274 1052 y Fh(include)j(\273string\253)274 1218 y(include)g("/my/site/local.cfg") 37 1476 y Fo(The)23 b(con\002guration)i(\002le)d(permits)i(the)f (recursi)n(v)o(e)h(inclusion)i(of)c(other)i(con\002guration)i(\002les,) c(which)h(are)g(speci\002ed)h(by)f(gi)n(ving)37 1588 y(their)k(name)f(within)g(the)g(\273string\253.)37 b(Since)26 b(these)h(denote)g(\002les)e(on)h(the)g(remote)h(system,)f(this)g (mechanism)h(allo)n(ws)f(for)g(site-)37 1701 y(speci\002c)e(tie-ins)g (of)e(con\002guration)k(v)n(alues.)j(Furthermore,)24 b(since)g(a)e(double-quoted)k(string)e(is)e(subject)i(to)f (interpretation,)j(it)37 1814 y(permits)f(for)f(a)f(rich)h(\003a)n(v)n (or)g(of)f(dynamically)k(determinable)f(\002lesnames)f(to)e(include.)37 1972 y(If)h(the)g(\002le)e(cannot)k(be)d(opened)i(for)f(reading)h(by)f (the)g(ef)n(fecti)n(v)o(e)h(user)f(running)h(gridshell,)h(its)e (contents)h(will)f(be)f(ignored.)37 2130 y(Included)j(\002les)e(may)f (include)j(other)f(\002les.)k(Ho)n(we)n(v)o(er)l(,)23 b(a)g(loop-detection)28 b(mechanism)d(is)f(not)g(\(yet\))h(in)e(place,) i(thus)f(circular)37 2243 y(dependencies)k(are)c(most)f(lik)o(ely)i (crash)g(gridshell)h(with)d(a)g(remote)h(resource)i(e)o(xhaustion.)37 2534 y Fk(4.1.2)45 b(Deb)n(ug)37 2754 y Fo(Not)24 b(yet)f(implemented)j (-)d(deb)n(ugging)k(is)c(currently)j(hard-coded)g(as)e(seen)g(\002t.)37 3045 y Fk(4.1.3)45 b(Xmlns)274 3234 y Fh(xmlns)k(\273identifier\253)274 3400 y(xmlns)g(ptc)37 3658 y Fo(The)26 b(pro)o(v)o(enance)j(tracking)g (record)f(\(PTR\))c(may)i(occasionally)31 b(require)d(a)d(namespace)k (identi\002er)l(,)g(if)d(it)g(is)g(to)g(be)h(included)37 3771 y(as)j(part)h(of)e(other)i(XML)d(documents.)50 b(Usually)-6 b(,)32 b(you)f(w)o(on')n(t)f(need)h(to)f(set)g(this)g(attrib)n(ute,)k (though.)49 b(Please)31 b(note)f(that)h(the)37 3884 y(ar)n(gument)22 b(is)d(an)g(identi\002er)l(,)i(not)f(a)f(string.)28 b(The)19 b(rules)h(for)f(v)n(alid)h(identi\002ers)h(are)f(more)f(restricti)n(v)o (e,)j(see)d(section)i(3.1)e(\(page)h(5\).)37 4042 y(Each)k(repetition)i (of)e(this)g(command)g(will)f(o)o(v)o(erwrite)h(an)o(y)g(pre)n(viously) i(con\002gured)g(v)n(alue.)37 4336 y Fd(4.2)51 b(Descriptions)37 4556 y Fo(The)28 b(descriptions)k(set)c(up)h(certain)h(v)n(alues)f(to)f (be)g(re\003ected)i(into)f(the)f(pro)o(v)o(enance)j(tracking)f(record)g (\(PTR\),)c(the)j(ultimate)37 4669 y(result)c(of)f(the)f(gridshell.)37 4960 y Fk(4.2.1)45 b(Site)274 5149 y Fh(site)k(\273string\253)274 5315 y(site)g('UBuffalo')p eop %%Page: 9 9 9 8 bop 37 57 a Fm(4)45 b(Commands)3403 b(9)p 37 94 2009 4 v 2046 94 V 37 352 a Fo(The)32 b(string)g(de\002nes)h(the)e(name)h (of)f(a)g(site.)53 b(This)32 b(is)f(useful)i(for)f(future)g(cases,)j (where)c(a)h(2nd)g(le)n(v)o(el)f(staging)j(mechanism)37 465 y(accesses)25 b(some)e(form)f(of)g(e)o(xternal)i(replica)g (mechanism,)g(and)f(needs)h(to)e(store)h(a)f(site)h(handle.)30 b(Note)23 b(that)f(the)h(site)g(handle)h(is)37 578 y(an)g(arbitrarily)i (chosen)f(name)f(with)f(no)h(further)h(meaning)g(be)o(yond)g(the)f (GriPhyN)e(V)-5 b(irtual)24 b(Data)g(System.)37 736 y(Each)h (repetition)j(of)d(this)g(command)h(will)e(o)o(v)o(erwrite)i(the)f(pre) n(vious)i(v)n(alue.)34 b(F)o(or)24 b(no)n(w)-6 b(,)25 b(the)g(site)g(handle)i(is)d(just)i(re\003ected)g(in)37 849 y(the)e(PTR.)37 1140 y Fk(4.2.2)45 b(T)-7 b(ransf)n(ormation)274 1329 y Fh(tr)49 b(\273string\253)f([\273string\253)h([..]])274 1412 y(transformation)e(\273string\253)h([\273string\253)g([..]])274 1578 y(transformation)f('voeckler::findrange:1.0')37 1836 y Fo(This)25 b(command)h(comes)f(in)g(tw)o(o)f(\003a)n(v)n(ors,)i (with)f(a)g(short)g(and)h(a)e(long)i(reserv)o(ed)h(w)o(ord.)32 b(Their)25 b(meaning)i(is)d(equi)n(v)n(alent.)35 b(The)37 1949 y(string)29 b(list)e(ar)n(guments)i(describe)h(a)c (fully-quali\002ed)31 b(VDC-de\002nition)d(name)f(each.)40 b(Such)27 b(a)f(name)h(is)g(usually)i(something)37 2062 y(akin)c(to)e("namespace::name:v)o(ersio)q(n".)37 2220 y(Each)k(repetition)j(of)d(this)h(command)f(will)g(append)i(to)e(a)f (list)i(of)f(transformation)j(records.)41 b(While)27 b(there)h(can)g(be)f(only)g(one)37 2333 y(transformation)j(that)d(is)f (being)h(called,)h(a)e(compound)i(transformation)i(may)c(call)h(other)g (transformations.)40 b(Thus,)27 b(the)f(call)37 2446 y(stack)f(is)e(part)h(of)g(the)g(record.)30 b(The)23 b(v)n(alues)i(are)e(solely)i(used)g(to)e(be)h(re\003ected)g(in)g(the)g (PTR.)37 2737 y Fk(4.2.3)45 b(Deriv)n(ation)274 2926 y Fh(dv)k(\273string\253)274 3009 y(derivation)f(\273string\253)274 3175 y(derivation)g('voeckler::right:1.0')37 3433 y Fo(This)27 b(command)g(also)g(comes)f(in)g(tw)o(o)g(reserv)o(ed)i(w)o(ord)f(\003a) n(v)n(ors,)g(which)g(are)g(equi)n(v)n(alent.)39 b(The)26 b(string)h(ar)n(gument)h(describes)37 3546 y(the)c(fully-quali\002ed)j (VDC-de\002nition)e(name,)e(which)h(is)f(usually)j(something)f(akin)f (to)g("namespace::name:v)o(ersion)q(".)37 3704 y(Each)j(repetition)i (of)d(this)h(command)h(will)e(o)o(v)o(erwrite)h(the)g(pre)n(vious)h(v)n (alue.)39 b(The)26 b(v)n(alue)h(is)f(solely)i(used)f(to)g(be)f (re\003ected)i(in)37 3817 y(the)c(PTR.)37 4108 y Fk(4.2.4)45 b(Input)274 4297 y Fh(input)k(\273lfn\253)f(\273sfn\253)274 4381 y(input)h(md5)g(\273lfn\253)g(\273sfn\253)274 4464 y(input)g(\273lfn\253)f(\273sfn\253)h(\273tfn\253)g(...)274 4547 y(input)g(md5)g(\273lfn\253)g(\273sfn\253)g(\273tfn\253)f(...)274 4713 y(input)h('voeckler.f.a')e('/home/voeckler/vdldemo/voeckler.f)o (.a')37 4970 y Fo(Each)28 b(repetition)j(of)d(this)g(command)h(will)f (re)o(gister)h(a)f(logical)h(\002lename)g(\(LFN\))d(with)i(a)g(storage) h(\002lename)f(\(SFN\).)f(Addi-)37 5083 y(tionally)-6 b(,)26 b(if)d(2nd)h(le)n(v)o(el)g(staging)h(is)e(required,)i(an)o(y)f (number)g(of)g(input)g(transfer)h(\002lenames)g(\(iTFN\))d(may)h(be)g (associated.)32 b(F)o(or)37 5196 y(each)e(LFN,)d(only)j(one)g(SFN)d (can)j(be)f(associated.)49 b(F)o(or)28 b(each)i(SFN,)d(multiple)k (iTFNs)d(may)h(be)g(associated.)49 b(All)28 b(of)h(LFN,)37 5309 y(SFN)22 b(and)i(TFN)d(are)j(strings,)h(and)f(thus)g(must)g(be)f (enclosed)j(in)e(quotes.)p eop %%Page: 10 10 10 9 bop 37 57 a Fm(4)45 b(Commands)3353 b(10)p 37 94 2009 4 v 2046 94 V 37 352 a Fo(Ef)n(fecti)n(v)o(ely)-6 b(,)22 b(for)f(each)g(re)o(gistered)i(input)e(\002lename,)g(a)f(stat)h (record)h(will)e(be)g(obtained)j(from)d(the)g(SFN)f(after)i(gridshell)h (parsed)37 465 y(all)29 b(its)g(ar)n(guments)h(and)f(ran)g(an)f (optional)j(2nd)e(le)n(v)o(el)g(stage-in)h(job)f(\(see)g(4.5.1)f (\(page)i(16\)\).)44 b(The)28 b(stat)h(record)g(is)g(obtained)37 578 y Fg(befor)m(e)c Fo(an)o(y)f(of)f(the)h(re)o(gular)h(subjobs)g(pre) f(through)i(cleanup)f(are)f(being)h(run.)37 732 y(The)i(information)i (from)e(the)g(stat)h(call)f(is)g(being)h(re\003ected)g(into)g(the)f (PTR,)d(one)k(record)g(for)f(each)h(\002le,)f(featuring)i(the)e(LFN)37 845 y(as)d(distinction.)32 b(Files)23 b(may)h(appear)h(in)e(both,)h (the)g(input)h(and)f(the)f(output)i(list.)37 1000 y(The)i(optional)i (ar)n(gument)g Fh(md5)d Fo(speci\002es)i(that)f(an)g(MD5)f(sum)h (should)i(be)e(obtained)i(of)e(the)g(\002le,)g(and)g(become)h(part)g (of)f(the)37 1113 y(resulting)f(stat)e(info)h(record.)30 b(Note)23 b(that)h Fh(md5)e Fo(is)i(an)f(option,)i(and)f(thus)g(not)g (quoted.)37 1398 y Fk(4.2.5)45 b(Output)274 1584 y Fh(output)j (\273LFN\253)h(\273SFN\253)274 1667 y(output)f(md5)i(\273LFN\253)f (\273SFN\253)274 1750 y(output)f(\273LFN\253)h(\273SFN\253)g (\273TFN\253)g(...)274 1833 y(output)f(md5)i(\273LFN\253)f(\273SFN\253) f(\273TFN\253)h(...)274 1999 y(output)f(md5)i('voeckler.f.d')d ('/home/voeckler/vdldemo/voeckler.f.)o(d')37 2234 y Fo(Each)19 b(repetition)j(of)d(this)h(command)f(will)g(re)o(gister)i(a)d(logical)j (\002lename)e(LFN)e(with)i(a)g(storage)h(\002lename)g(SFN.)c (Additionally)-6 b(,)37 2347 y(if)30 b(2nd)g(le)n(v)o(el)h(staging)g (is)f(required,)j(an)o(y)d(number)h(of)f(output)h(transfer)h (\002lenames)e(oTFN)e(may)i(be)g(associated.)50 b(F)o(or)29 b(each)37 2460 y(LFN,)d(only)k(one)e(SFN)f(can)h(be)h(associated.)46 b(F)o(or)27 b(each)i(SFN,)d(multiple)k(oTFNs)d(may)h(be)g(associated.) 46 b(All)28 b(of)g(LFN,)e(SFN)37 2573 y(and)e(TFN)e(are)i(strings,)h (and)f(thus)g(must)f(be)h(enclosed)i(in)d(quotes.)37 2728 y(Ef)n(fecti)n(v)o(ely)-6 b(,)27 b(for)e(each)g(re)o(gistered)i (output)g(\002lename,)e(a)f(stat)i(record)g(will)e(be)h(obtained)i (from)e(the)g(SFN)e(after)i(gridshell)j(ran)37 2841 y(all)c(jobs,)g(b)n (ut)g(before)h(the)f(optional)i(2nd)e(le)n(v)o(el)g(stage-out)i(job)d (is)h(run)g(\(see)g(section)h(2.4.y\).)37 2996 y(The)i(information)i (from)e(the)g(stat)h(call)f(is)g(being)h(re\003ected)g(into)g(the)f (PTR,)d(one)k(record)g(for)f(each)h(\002le,)f(featuring)i(the)e(LFN)37 3108 y(as)d(distinction.)32 b(Files)23 b(may)h(appear)h(in)e(both,)h (the)g(input)h(and)f(the)f(output)i(list.)37 3263 y(The)i(optional)i (ar)n(gument)g Fh(md5)d Fo(speci\002es)i(that)f(an)g(MD5)f(sum)h (should)i(be)e(obtained)i(of)e(the)g(\002le,)g(and)g(become)h(part)g (of)f(the)37 3376 y(resulting)f(stat)e(info)h(record.)30 b(Note)23 b(that)h Fh(md5)e Fo(is)i(an)f(option,)i(and)f(thus)g(not)g (quoted.)37 3664 y Fd(4.3)51 b(Pr)n(ocessing)29 b(En)l(vir)n(onment)37 3880 y Fo(The)c(processing)j(en)l(vironment)g(deals)e(with)f(setting)h (up)f(the)h(en)l(vironment)i(in)d(which)g(jobs)h(\(see)f(4.4)g(\(page)h (13\)\))g(are)f(being)37 3993 y(run.)30 b(The)23 b(changes)i(pertain)g (only)g(to)e(gridshell,)j(and)e(stay)g(within)g(gridshell.)37 4278 y Fk(4.3.1)45 b(Set)274 4464 y Fh(set)k(\273id\253)g (\273string\253)274 4630 y(set)g(PATH)g("$PATH:$HOME/bin")37 4865 y Fo(The)28 b(set)h(command)f(is)g(similar)h(to)f(the)h(C-shell)g Fn(setenv)c Fo(command.)43 b(The)28 b(identi\002er)h(\273id\253)g (denotes)h(a)e(k)o(e)o(y)g(in)g(the)h(Unix)37 4978 y(en)l(vironment,)f (which)d(is)f(to)g(be)h(set)f(to)g(the)h(speci\002ed)h(v)n(alue)f(of)f (\273string\253.)34 b(Please)24 b(note)i(that)f(the)f(string,)i(if)e (double-quoted,)37 5091 y(may)g(be)f(subject)j(to)d(v)n(ariable)i (interpolation.)37 5246 y(En)l(viroment)j(setting)g(are)e Fg(not)i Fo(re\003ected)g(in)d(the)i(PTR.)c(The)j(en)l(vironment)j(v)n (ariables)f(are)f(changed)h(during)f(compile-time.)37 5359 y(En)l(vironment)f(v)n(alues)f(are)f(o)o(v)o(erwritten,)h(if)e (the)o(y)h(already)h(e)o(xist.)p eop %%Page: 11 11 11 10 bop 37 57 a Fm(4)45 b(Commands)3353 b(11)p 37 94 2009 4 v 2046 94 V 37 352 a Fk(4.3.2)45 b(Chdir)274 536 y Fh(chdir)k(\273string\253)274 619 y(chdir)g(create)f(\273string\253) 274 785 y(chdir)h(create)f("/home/$LOGNAME/vdldemo/tmp")37 1006 y Fo(The)28 b(chdir)g(command)h(determines)g(the)f(current)i(w)o (orking)f(directory)-6 b(.)43 b(The)27 b(command)i(comes)f(in)f(tw)o(o) h(\003a)n(v)n(ors,)h(with)e(and)37 1119 y(without)e(the)f Fg(cr)m(eate)g Fo(option.)37 1272 y(W)l(ithout)31 b(the)e Fg(cr)m(eate)h Fo(option,)h(the)e(speci\002ed)h(w)o(orking)g(directory) i(is)c(attempted)j(to)d(be)h(created)i(before)f(changing)h(into)e(it.) 37 1385 y(F)o(ailure)20 b(to)f(create)h(the)f(directory)i(through)g (reason)f(of)f(e)o(xistence)i(will)e(be)g(ignored.)29 b(Other)19 b(f)o(ailures)i(on)e(the)g(mkdir)g(command)37 1498 y(will)24 b(cause)g(a)f(semantic)i(error)g(during)g(compile-time.) 37 1650 y(Re)o(gardless)h(of)e(options,)i(in)e(the)g(ne)o(xt)h(step,)f (gridshell)j(will)d(attempt)h(to)f(change)h(into)g(the)f(speci\002ed)i (w)o(orking)f(directory)-6 b(.)33 b(If)37 1763 y(the)d(change)g(f)o (ails,)h(the)e(pre)n(vious)i(current)g(w)o(orkding)f(directory)i (remains)e(current.)46 b(The)29 b(w)o(orking)h(directory)h(is)e (changed)37 1876 y(during)d(compile-time.)k(Ef)n(fects)24 b(are)g(immediate)g(on)g(relati)n(v)o(e)h(\002lenames.)37 2029 y(If)e(no)g(chdir)h(command)g(is)f(speci\002ed)i(whatsoe)n(v)o(er) l(,)f(the)g(gridshell)h(uses)f(the)f(w)o(orking)i(directory)g(assigned) g(by)e(the)h(schedul-)37 2142 y(ing)g(system,)g(i.e.)29 b(where)23 b(it)h(w)o(as)f(started)i(in.)k(The)23 b(current)i(w)o (orking)g(directory)h(will)d(be)g(re\003ected)i(in)f(the)f(PTR.)37 2295 y(Multiple)35 b(speci\002ciations)i(will)c(change)i(the)f (directory)h(again)f(and)g(again.)59 b(The)33 b(\002nally)h(chosen)h(w) o(orking)f(directory)i(is)37 2408 y(recorded)26 b(in)e(the)f(PTR.)37 2688 y Fk(4.3.3)45 b(Feedbac)n(k)274 2873 y Fh(feedback)j (\273string\253)274 2956 y(feedback)g(\273id\253)h(\273string\253)274 3122 y(feedback)f(ATLAS_FEEDBACK)f("/dev/shm/atlas-XXXXXX")274 3205 y(feedback)h('gs-fb-XXXXXX')37 3426 y Fo(The)19 b(feedback)j(is)d(currently)i(a)e(simple)h(mechanism)g(to)f(allo)n(w)g (gridshell-a)o(w)o(are)k(applications)g(to)c(send)h (application-speci\002c)37 3539 y(data)h(back)g(to)f(the)h(submit)g (host.)28 b(An)o(y)20 b(processing,)j(collection)g(or)d(visualization)k (of)c(this)h(data)g(is)f(up)g(to)g(the)h(user)g(application)37 3652 y(frame)n(w)o(ork.)37 3804 y(The)29 b(\273id\253)h(speci\002es)g (the)f(name)g(of)g(an)g(en)l(vironment)j(v)n(ariable)f(which)e(is)g(to) g(be)g(set)g(to)g(the)g(\002lename)h(of)e(the)i(named)f(pipe)37 3917 y(\(FIFO\).)22 b(If)i(no)f(identi\002er)i(is)f(speci\002ed,)g(the) g(def)o(ault)h(of)f Fn(GRIDSTART_CHANN)o(EL)16 b Fo(is)23 b(used.)37 4070 y(The)f(\002lename)f(is)h(created)h(using)g(the)e (mkstemp\(2\))j(system)e(call)g(from)f(the)h(pattern)h(pro)o(vided)h (by)d(the)h(string.)30 b(The)21 b(\002lename)37 4183 y(string)35 b Fg(must)g Fo(ha)n(v)o(e)e(a)g(suf)n(\002x)g(of)g(six)h (capitol)h Fn(X)d Fo(letters,)k(or)d(these)i(letters)f(will)f(be)g (mandatorily)j(appended,)i(including)d(a)37 4296 y(separating)27 b(hyphen.)37 4449 y(If)j(the)g(\002lename)h(is)e(absolute,)34 b(starting)e(is)e(a)f(slash,)j(the)f(speci\002ed)g(path)g(will)e(be)h (used.)49 b(If)30 b(the)g(\002lename)g(is)g(relati)n(v)o(e,)j(not)37 4562 y(starting)26 b(with)d(a)g(slash,)i(an)e(appropriate)k(temporary)e (directory)h(will)d(be)h(determined)i(and)e(pre\002x)o(ed.)37 4714 y(The)30 b(gridshell)h(attempts)g(to)e(create)i(a)e(FIFO)f(\(Unix) h(named)h(pipe\))h(from)e(the)h(pattern)h(with)e(the)h(temporary)h (\002lename.)47 b(It)37 4827 y(changes)26 b(the)e(en)l(vironment)i(to)e (record)h(the)f(\002lename,)f(so)h(that)g(subjobs)h(\(see)f(4.4)g (\(page)g(13\)\))h(may)e(write)g(to)h(this)g(channel.)37 4980 y([future)i(lab:)j(Permit)23 b(an)o(y)h(number)g(of)g(such)g (channels,)i(and)e(multiple)o(x)h(them])37 5133 y(If)f(feedback)h(is)f (speci\002ed)g(multiple)h(times,)e(only)i(the)e(last)h(speci\002cation) i(will)e(persist.)30 b(Pre)n(vious)24 b(channels)i(will)d(be)g(closed) 37 5246 y(and)34 b(remo)o(v)o(ed.)58 b(The)33 b(feedback)i(mechanism)f (is)f(only)h(acti)n(v)n(ated,)j(if)c(the)h(feedback)h(w)o(as)e (speci\002ed)h(at)f(least)h(once.)58 b(The)37 5359 y(chosen)26 b(FIFO)21 b(and)j(its)g(stat)g(information)i(is)d(part)h(of)g(the)g (standard)h(statinfo)h(PTR)21 b(record.)p eop %%Page: 12 12 12 11 bop 37 57 a Fm(4)45 b(Commands)3353 b(12)p 37 94 2009 4 v 2046 94 V 37 352 a Fk(4.3.4)45 b(Stdin)274 541 y Fh(stdin)k(\273string\253)274 624 y(stdin)g(here)g(\273string\253)274 790 y(stdin)g('my/file')274 873 y(stdin)g(here)g('copy)g(to)g(stdin')37 1128 y Fo(These)25 b(directi)n(v)o(es)h(permit)f(the)f(connection)j(of) d(the)h(stdin)g(\002lehandle)h(of)e(subjobs)i(with)e(an)g(e)o(xisting)i (\002le.)k(The)23 b(primary)j(use)37 1241 y(of)21 b(this)h(option)g(is) f(to)g(peruse)h(input,)h(if)d(an)o(y)h(stdin)h(pertains)h(to)e(a)g (GVDS-track)o(ed)g(\002le)g(during)h(a)f(computation.)30 b(Ideally)-6 b(,)23 b(only)37 1354 y(the)h(main)g(computational)j(jobs) d(should)h(access)g(these)g(\002les.)37 1512 y(Y)-10 b(ou)28 b(can)g(\(currently\))j(only)d(use)g(one)h(redirected)h(stdio)f (for)f(all)g(subjobs.)43 b(Thus,)28 b(if)g(you)g(redirect)i(stdin)e(to) g(a)f(\002le,)h(this)h(\002le)37 1625 y(will)24 b(be)f(used)i(for)e (all)h(subjobs.)37 1782 y(W)l(ill)30 b(the)f(\002le)f(be)h(reopened)i (for)e(each)h(job?)45 b(Depending)31 b(on)e(what)g(you)h(connect)g (your)g(stdin)g(to,)g(the)f(answer)h(is)e(yes)h(and)37 1895 y(no.)36 b(Please)27 b(refer)f(to)g(table)h(4)e(for)h(the)g(dif)n (ferent)i(open)f(modes.)36 b(The)26 b(reason)h(a)e(re)o(gular)i(\002le) f(is)f(being)i(reopened)i(lies)d(in)g(the)37 2008 y(f)o(act)f(that)f (an)f(open)i(\002ledescriptor)h(is)d(not)h(maintained)i(between)f (jobs.)k(Furthermore,)c(the)f(last)g(position)h(of)f(the)g(\002le)e (cannot)37 2121 y(be)29 b(remembered,)i(because)f(it)e(cannot)i(be)f (obtained:)41 b(The)28 b(\002le)g(handle)i(of)e(a)g(re)o(gular)i (\002le,)f(once)g(opened,)i(will)d(be)g(passed)37 2234 y(completely)e(into)e(the)g(job')-5 b(s)25 b(application)h(space.)p 732 2356 2627 4 v 730 2469 4 113 v 1014 2435 a Fp(\002le)d(type)p 1368 2469 V 100 w(r)n(euse)i(or)e(r)n(eopen)h(mode)p 3357 2469 V 732 2472 2627 4 v 732 2489 V 730 2602 4 113 v 925 2568 a Fo(re)o(gular)h(\002le)p 1368 2602 V 99 w(\002le)e(will)h(be)f(reopened)j(for)e(each)g(job)l(.)p 3357 2602 V 732 2605 2627 4 v 730 2718 4 113 v 817 2684 a(\002le)f(descriptor)p 1368 2718 V 103 w(\002le)g(descriptor)k(will)c (be)g(dupped.)p 3357 2718 V 732 2721 2627 4 v 730 2834 4 113 v 807 2800 a(temporary)j(\002le)p 1368 2834 V 99 w(same)e(as)f(\002le)g(descriptor)-5 b(.)p 3357 2834 V 732 2838 2627 4 v 730 2950 4 113 v 782 2917 a(here)24 b(document)p 1368 2950 V 102 w(same)g(as)f(\002le)g(descriptor)-5 b(.)p 3357 2950 V 732 2954 2627 4 v 730 3067 4 113 v 949 3033 a(user)24 b(FIFO)p 1368 3067 V 98 w(not)g(applicable,)i(b)n (ut)f(same)e(as)h(descriptor)-5 b(.)p 3357 3067 V 732 3070 2627 4 v 1484 3223 a(T)e(able)23 b(4:)h(Open)f(modes)h(for)g Fg(stdin)p Fo(.)37 3435 y(The)33 b(special)h(\002lename)e(of)h(just)g (a)f(hyphen)i(means)f(to)f(connect)j(the)d(\002lehandle)j(with)d(the)h (one)g(passed)h(to)e(the)h(gridshell.)37 3548 y(Ho)n(we)n(v)o(er)l(,)d (in)f(order)h(to)f(use)g(a)g Fg(her)m(e)g(document)p Fo(,)j(which)d(copies)i(content)g(from)e(the)g(con\002guration)j(onto)e (the)f(stdin)h(of)f(the)37 3661 y(application,)e(you)d(must)g(use)f (the)h Fn(here)d Fo(option.)37 3818 y(When)j(using)g(the)f Fn(here)d Fo(option,)25 b(the)e(string)h(is)f(not)g(a)f(\002lename,)h (b)n(ut)h(the)f(content)i(to)e(be)f(put)i(into)f(a)g(temporary)h (\002le,)f(which)37 3931 y(is)e(subsequently)j(connected)e(with)e(the)h (stdin)g(of)f(the)h(jobs.)28 b(Multi-line)22 b(strings)g(are)e(v)n (alid,)i(and)e(re)o(gular)i(string)f(interpolation)37 4044 y(applies,)k(see)f(section)i(3.2)d(\(page)i(5\).)37 4202 y([future)36 b(lab:)49 b(Add)34 b(options)h(to)f(allo)n(w)g (disassociation)j(of)d(stdio)h(handles)g(during)g(stage)g(jobs.)60 b(Add)33 b(options)j(to)d(permit)37 4315 y(disassocation)28 b(during)d(non-main)g(job)l(.])37 4473 y(A)e(stdin)h(redirection)j (should)e(only)f(be)g(speci\002ed)h(once.)k(A)23 b(re-con\002guration)k (will)c(close)i(a)e(pre)n(vious)j(con\002guration.)37 4630 y(Def)o(ault)f(for)f(stdin)g(is)g(to)f(connect)i(to)f Fh(/dev/null)d Fo(in)i(the)h(absence)i(of)d(an)o(y)h(speci\002cation.) 37 4921 y Fk(4.3.5)45 b(Stdout)24 b(and)g(Stderr)274 5110 y Fh(stdout)48 b(\273string\253)274 5193 y(stdout)g(append)h (\273string\253)274 5276 y(stdout)f(truncate)h(\273string\253)p eop %%Page: 13 13 13 12 bop 37 57 a Fm(4)45 b(Commands)3353 b(13)p 37 94 2009 4 v 2046 94 V 274 352 a Fh(stderr)48 b(\273string\253)274 435 y(stderr)g(append)h(\273string\253)274 518 y(stderr)f(truncate)h (\273string\253)274 684 y(stdout)f('-')274 767 y(stderr)g(append)h ('local.err')37 1025 y Fo(These)27 b(directi)n(v)o(es)i(permit)e(the)g (connection)i(of)e(stdio)g(\002lehandles)i(of)e(subjobs)h(with)e(e)o (xisting)j(\002les.)37 b(The)26 b(primary)i(use)f(for)37 1138 y(these)e(options)g(is)e(to)g(capture)i(output,)f(if)f(an)o(y)h (stdout)g(or)f(stderr)i(pertains)g(to)e(a)g(GVDS-track)o(ed)g(\002le)g (during)i(a)d(computation.)37 1250 y(Ideally)-6 b(,)34 b(only)d(the)g(main)f(computational)k(jobs)d(should)h(access)g(these)f (\002les.)49 b(Ho)n(we)n(v)o(er)l(,)32 b(you)e(should)i(k)o(eep)g(in)e (mind)g(that)37 1363 y(v)n(arious)c(error)e(conditions)i(may)e(result)g (in)g(output)h(to)e(stderr)i(for)f(f)o(ailures)h(of)f(other)g(subjobs.) 37 1521 y(Y)-10 b(ou)23 b(can)f(\(currently\))k(only)d(use)g(one)g (redirected)i(stdio)e(for)g(all)f(subjobs.)30 b(Thus,)23 b(if)f(you)h(redirect)h(stdout)g(into)f(a)f(\002le,)g(this)h(\002le)37 1634 y(will)h(be)f(used)i(for)e(all)h(subjobs.)37 1792 y(Def)o(ault)i(mode)f(for)f(these)i(directi)n(v)o(es)g(is)e(the)h (truncated)i(mode.)32 b(In)24 b(truncate)i(mode,)f(a)f(pre)n(viously)j (\(before)f(the)f(start)g(of)g(the)37 1905 y(gridshell\))33 b(e)o(xisting)e(\002le)d(is)i(truncated)i(at)d(compile-time.)48 b(In)29 b(append)j(mode,)e(a)f(pre)n(viously)j(e)o(xisting)g(\002le)c (is)i(opened)h(for)37 2018 y(appending.)37 2176 y(The)24 b(special)h(\002lename)f(of)f(just)h(a)f(hyphen)i(means)f(to)g(connect) h(the)f(\002lehandle)h(with)e(the)h(one)g(passed)h(to)f(the)g (gridshell.)37 2334 y([future)36 b(lab:)49 b(Add)34 b(options)h(to)f (allo)n(w)g(disassociation)j(of)d(stdio)h(handles)g(during)g(stage)g (jobs.)60 b(Add)33 b(options)j(to)d(permit)37 2447 y(disassocation)28 b(during)d(non-main)g(job)l(.])37 2605 y(Each)30 b(stdio)h(redirection) h(should)f(only)g(be)e(speci\002ed)i(once.)48 b(A)28 b(re-con\002guration)34 b(will)29 b(close)h(and)g(o)o(v)o(erwrite)h(a)e (pre)n(vious)37 2718 y(setup.)37 2876 y(Def)o(ault)f(for)e(stdout)i (and)f(stderr)h(is)e(to)g(connect)i(to)f(separate)h(temporary)g (\002les)e(in)h(the)f(absence)j(of)d(an)o(y)g(speci\002cation.)40 b(The)37 2989 y(\002rst)24 b(page)g(of)f(data)i(from)e(the)h(temporary) h(\002les)f(are)f(re\003ected)i(in)e(the)h(stat)g(info)g(records)i(of)d (the)h(PTR)d(for)j(stdout)h(and)f(stderr)-5 b(.)37 3284 y Fd(4.4)51 b(Job)28 b(commands)37 3503 y Fo(The)d(job)g (con\002gurations)j(describe)e(the)f(command)g(and)g(command)h(line)f (to)f(be)h(used)g(for)g(subjobs.)34 b(All)24 b(speci\002cations)k(are) 37 3616 y(optional)e(e)o(xcept)f(for)f(the)f(main)h(job,)g(which)f (must)h(be)g(speci\002ed.)37 3774 y(The)g(chaining)h(of)f(job)g (results)h(is)e(loosely)j(as)d(follo)n(ws:)151 4032 y(1.)46 b(run)24 b(stage-in)h(job,)f(if)f(applicable)151 4219 y(2.)46 b(obtain)25 b(stat)f(records)h(on)f(\002les)f(declared)j(input) 151 4407 y(3.)46 b(run)24 b(setup)g(job)g(chain)151 4595 y(4.)46 b(run)24 b(pre)f(job)h(chain)h Fn(&&)d Fo(run)i(main)f(job)h Fn(&&)e Fo(run)i(post)g(job)g(chain)151 4782 y(5.)46 b(run)24 b(cleanup)h(job)f(chain)151 4970 y(6.)46 b(obtain)25 b(stat)f(records)h(on)f(\002les)f(declared)j(output)151 5158 y(7.)46 b(run)24 b(stage-out)i(job,)d(if)h(applicable)p eop %%Page: 14 14 14 13 bop 37 57 a Fm(4)45 b(Commands)3353 b(14)p 37 94 2009 4 v 2046 94 V 37 352 a Fo(If)31 b(an)o(y)h(prejob)g(e)o(xists)h (and)e(f)o(ails,)j(no)d(further)i(jobs)f(in)f(the)h(pre,)h(main,)g (post)f(chain)g(will)f(be)g(run.)52 b(If)31 b(the)h(main)f(job)g(f)o (ails,)37 465 y(no)c(further)h(jobs)g(in)e(the)h(post)h(chain)f(will)g (be)f(run.)39 b(If)26 b(an)o(y)h(of)f(the)h(postjobs)i(f)o(ails,)f(the) f(chain)h(will)e(not)h(be)g(continued.)40 b(The)37 578 y(stagein,)31 b(stageout,)h(setup)d(and)g(cleanup)h(jobs)f(are)g (independent)i(of)e(an)o(y)f(f)o(ailures)i(in)e(the)h(pre)f(chain,)j (main)d(job,)h(and)g(post)37 691 y(chain.)37 847 y(An)o(y)e(number)h (of)f(setup,)i(pre,)f(post)g(and)g(cleanup)h(jobs)f(may)e(be)i (spec\002ed.)40 b(The)o(y)27 b(are)g(queued)i(into)f(separate)h (chains,)g(and)37 960 y(each)c(chain)f(e)o(x)o(ecutes)h(its)f(jobs)g (in)g(the)g(order)g(of)g(their)g(speci\002cation.)37 1116 y(The)c(main)f(job)h(is)f(mandatory)-6 b(.)29 b(It)20 b(must)f(be)h(speci\002ed)g(once,)h(and)f(should)h(be)f(speci\002ed)g (once)h(only)-6 b(.)28 b(Multiple)21 b(speci\002cations)37 1229 y(will)j(o)o(v)o(erwrite)g(pre)n(vious)i(ones.)37 1385 y(Please)40 b(note)g(the)f(commandline)i(speci\002cation)h(string) e(for)g(jobs)f(is)g(subject)i(to)e(double)i(interpretation,)47 b(once)39 b(during)37 1498 y(compile-time,)26 b(and)e(once)g(during)h (run-time,)g(see)f(3.2)f(\(page)i(5\))e(for)h(details.)37 1655 y([future)i(lab:)j(Drop)24 b(the)f(double)j(interpolation)h(\226)c (too)h(confusing.)32 b(It)23 b(is)g(an)h(artef)o(act)h(of)f(code)g(rec) o(ycling])37 1767 y([Is)g(it)f(already)j(dropped?])37 1924 y(The)18 b(stdio)h(of)g(an)o(y)f(job)h(will)e(be)i(connected)i (according)f(to)f(the)f(speci\002cation)j(of)d(stdin,)i(stdout)g(and)e (stderr)l(,)j(see)e(4.3)f(\(page)h(10\).)37 2037 y(Stdin)24 b(def)o(aults)i(to)d Fh(/dev/null)p Fo(,)e(stdout)k(to)f(a)f(temporary) i(\002le,)e(and)h(stderr)g(to)g(a)f(dif)n(ferent)i(temporary)h(\002le.) 37 2193 y(Absolute)33 b(application)g(names)e(are)g(tak)o(en)h(at)e(f)o (ace)h(v)n(alue,)i(and)e(may)f(f)o(ail)h(in)g(their)g(non-e)o (xistence.)54 b(Relati)n(v)o(e)31 b(names)g(are)37 2306 y(canoni\002ed)26 b(with)d(the)h(help)g(of)g(the)g(current)h(w)o (orking)g(directory)-6 b(.)31 b([\002xme:)e(are)23 b(the)o(y?])37 2593 y Fk(4.4.1)45 b(Setup)24 b(Jobs)274 2781 y Fh(setup)49 b(\273string\253)274 2947 y(setup)g("/bin/mkdir)e(-p)j($HOME/tmp")37 3193 y Fo(The)27 b(setup)h(job)f(con\002guration)j(may)c(be)h (speci\002ed)h(multiple)h(times.)38 b(Each)27 b(job)g(will)g(be)g (queued)h(into)g(a)e(chain)i Fg(setup)p Fo(,)g(and)37 3306 y(at)d(run-time,)h(these)f(will)g(be)f(e)o(x)o(ecuted)i(in)f(the)g (order)g(of)g(their)g(chaining.)35 b(An)o(y)24 b(setup)h(job)g(may)g(f) o(ail)g(without)g(af)n(fecting)i(an)o(y)37 3419 y(other)e(jobs.)37 3575 y(Note:)32 b(Usually)26 b(you)g(are)f(better)h(of)f(wrapping)h(e)n (v)o(en)f(simple)h(commands)g(into)g(a)e(real)h(shell)h(script,)g(set)g (the)f(e)o(x)o(ecute)h(bit)f(on)37 3688 y(it,)e(and)h(run)g(just)g (that)h(particular)h(shell)e(script.)37 3975 y Fk(4.4.2)45 b(Pre)25 b(Jobs)274 4163 y Fh(pre)49 b(\273string\253)274 4329 y(pre)g('/bin/date')274 4412 y(pre)g('/usr/bin/env)e(perl)i (-i.bak)g(-pe)g(\\\\"s{a}{b}g\\\\")f(*.jof')37 4658 y Fo(The)27 b(pre)g(job)g(con\002guration)j(may)c(be)h(speci\002ed)h (multiple)g(times.)39 b(Each)26 b(job)h(will)g(be)g(queued)h(into)f(a)g (chain)h Fg(pr)m(ejobs)p Fo(,)g(and)37 4771 y(at)g(run-time,)h(these)g (will)e(be)g(e)o(x)o(ecuted)i(in)e(the)h(order)h(of)e(their)h (chaining.)43 b(The)27 b(\002rst)g(f)o(ailed)i(prejob)g(will)e(result)h (in)g(a)f(f)o(ailed)37 4883 y(e)o(x)o(ecution)f(of)d(the)h (computation.)32 b(No)22 b(further)k(prejobs,)f(no)e(main)h(jobs,)g (and)g(no)f(postjobs)j(will)d(be)h(run)g(in)f(case)i(of)e(f)o(ailure.) 37 5171 y Fk(4.4.3)45 b(Main)24 b(Job)274 5359 y Fh(main)49 b(\273string\253)p eop %%Page: 15 15 15 14 bop 37 57 a Fm(4)45 b(Commands)3353 b(15)p 37 94 2009 4 v 2046 94 V 274 435 a Fh(main)49 b('computation.sh')37 670 y Fo(The)26 b(main)g(job)h(must)f(be)g(speci\002ed,)i(and)e(should) i(only)f(be)f(speci\002ed)h(once.)38 b(A)24 b(minimum)i (con\002guration)j(\002le)d(contains)i(at)37 783 y(least)e(a)e (con\002guration)k(for)d(the)g(main)f(job)l(.)33 b(If)24 b(the)h(main)g(job)g(f)o(ails,)g(all)g(will)f(be)h(assessed)i(as)d(f)o (ailure.)34 b(No)24 b(post)h(jobs)h(will)e(be)37 896 y(run)g(in)g(case)g(of)f(f)o(ailure.)37 1181 y Fk(4.4.4)45 b(P)l(ost)25 b(Jobs)274 1367 y Fh(post)49 b(\273string\253)274 1533 y(post)g('/bin/date')37 1769 y Fo(The)23 b(post)h(job)g (con\002guration)i(may)d(be)h(speci\002ed)g(multiple)h(times.)k(Each)23 b(job)h(will)f(be)g(queued)i(into)f(a)f(chain)h Fg(postjobs)p Fo(,)h(and)37 1882 y(at)h(run-time,)i(these)e(will)g(be)g(e)o(x)o (ecuted)h(in)f(the)g(order)h(of)f(their)g(chaining.)38 b(The)26 b(\002rst)f(f)o(ailed)i(postjob)h(will)d(result)j(in)d(a)h(f)o (ailed)37 1995 y(e)o(x)o(ecution)g(of)d(the)h(computation.)32 b(No)22 b(further)k(postjobs)f(will)f(be)f(run)h(in)f(case)i(of)e(f)o (ailure.)37 2280 y Fk(4.4.5)45 b(Clean)o(up)23 b(Jobs)274 2466 y Fh(cleanup)48 b(\273string\253)274 2632 y(cleanup)g('rm)h (*.log')298 b(#)50 b(this)f(will)g(NOT)g(work:)g(ENOENT)274 2715 y(cleanup)f('/bin/sh)h(-c)g(\\'rm)g(*.log\\\224)347 b(#)50 b(this)f(might)g(work)37 2951 y Fo(The)25 b(cleanup)j(job)d (con\002guration)k(may)c(be)g(speci\002ed)i(multiple)f(times.)35 b(Each)25 b(job)h(will)f(be)g(queued)i(into)f(a)f(chain)h Fg(cleanup)p Fo(,)37 3064 y(and)21 b(at)e(run-time,)j(these)e(will)g (be)f(e)o(x)o(ecuted)j(in)d(the)h(order)h(of)f(their)g(chaining.)30 b(An)o(y)19 b(cleanup)j(job)e(may)f(f)o(ail)h(without)h(af)n(fecting)37 3177 y(an)o(y)j(other)h(jobs.)37 3331 y(Note:)32 b(Usually)26 b(you)g(are)f(better)h(of)f(wrapping)h(e)n(v)o(en)f(simple)h(commands)g (into)g(a)e(real)h(shell)h(script,)g(set)g(the)f(e)o(x)o(ecute)h(bit)f (on)37 3444 y(it,)e(and)h(run)g(just)g(that)h(particular)h(shell)e (script.)37 3732 y Fd(4.5)51 b(Optional)27 b(2nd-le)o(vel)h(Sta)o(ging) 37 3949 y Fo(The)g(stageout)j(and)e(stagein)h(jobs)f(are)f(optional,)j (should)f(only)f(be)g(speci\002ed)g(once.)44 b(The)28 b(job)g(restriction)k(and)c(feature,)j(as)37 4062 y(sho)n(wn)h(in)g (section)h(2.4,)g(apply)g(also)f(to)g(stage)h(jobs.)53 b(Multiple)33 b(speci\002cations)i(of)c(stage)i(jobs)f(will)g(o)o(v)o (erwrite)g(pre)n(vious)37 4175 y(speci\002cations.)37 4329 y([future)26 b(lab:)j(Do)23 b(we)g(need)h(multiple)h(stage)f (call-outs?])37 4484 y(The)k(stage)i(jobs)f(deal)g(with)f(the)h(2nd)f (le)n(v)o(el)h(staging)h(of)e(\002les.)43 b(The)28 b(2nd)h(le)n(v)o(el) g(staging)h(comes)f(into)g(ef)n(fect,)h(if)e(a)g(compute)37 4597 y(cluster)38 b(does)e(not)g(share)g(a)f(\002lesystem)h(between)h (the)f(e)o(xternally)i(visible)f(storage)g(element,)i(and)d(the)g (internal)h(w)o(ork)o(er)37 4710 y(node.)c(The)24 b(2nd)h(le)n(v)o(el)g (staging)h(may)f(be)f(emplo)o(yed)j(in)d(this)h(case)g(to)g(transfer)h (input)g(\002les)e(to)g(the)h(w)o(ork)o(er)h(node)f(before)h(an)o(y)37 4823 y(computation)h(tak)o(es)d(place,)h(and)f(to)f(store)i(output)g (\002les)e(to)g(the)h(storage)h(element)g(after)f(an)o(y)g(computation) i(took)f(place.)37 4978 y(Stage)33 b(jobs)f(ha)n(v)o(e)h(an)f(in)l (visible)j(\002nal)d(ar)n(gument.)56 b(The)31 b(\002nal)h(ar)n(gument)i (is)e(dynamically)i(af)n(\002x)o(ed.)55 b(It)31 b(is)h(the)g(name)g(of) g(a)37 5091 y(temporary)26 b(\002le)d(which)h(contains)i(a)d(list)h(of) f(\002le)g(mapping)i(from)f(SFN)d(to)j(TFN.)d(The)i(format)i(is)e (determined)j(by)e(the)f(format)37 5204 y(string,)i(which)f(is)f(the)h (\002rst)f(ar)n(gument)j(string)f(to)e(the)h(stage)g(commands.)37 5359 y(The)g(format)g(string)g(may)g(contain)h(three)g(kinds)f(of)g (placeholders,)j(each)d(of)f(which)h(may)g(occur)g(multiple)h(times:)p eop %%Page: 16 16 16 15 bop 37 57 a Fm(4)45 b(Commands)3353 b(16)p 37 94 2009 4 v 2046 94 V 629 264 2835 4 v 627 377 4 113 v 678 343 a Fp(fmt)p 999 377 V 237 w(meaning)p 3461 377 V 629 380 2835 4 v 629 397 V 627 510 4 113 v 678 476 a Fn(\045l)p 999 510 V 263 w Fo(replaced)25 b(with)f(the)g(LFN)p 3461 510 V 629 513 2835 4 v 627 626 4 113 v 678 592 a Fn(\045s)p 999 626 V 263 w Fo(replaced)h(with)f(the)g(SFN)p 3461 626 V 629 629 2835 4 v 627 742 4 113 v 678 708 a Fn(\045t)p 999 742 V 263 w Fo(shortcut)h(for)f Fn(\045t{)53 b(})p 3461 742 V 629 745 2835 4 v 627 971 4 226 v 678 824 a(\045t{x})p 999 971 V 98 w Fo(replaced)28 b(with)e(the)g(TFN)e(list.)37 b(The)26 b(string)i(x)d(is)h(the)h(separator)l(,)i(if)d(more)1051 937 y(than)e(one)g(TFN)d(is)j(a)n(v)n(ailable)p 3461 971 V 629 974 2835 4 v 37 1189 a(The)e(concrete)h(interf)o(ace)h(is)e (up)g(to)f(the)h(user)l(,)h(and)f(the)g(interf)o(ace)i(with)d(e)o (xternal)i(application)i(called)e(out)f(to.)28 b(Note)22 b(that)g(curly)37 1302 y(braces)i(may)e(not)h(be)f(part)h(within)g(the) g(separator)i(string)e(x.)28 b(Also,)22 b(lea)n(ving)j(the)d(separator) j(string)f(a)o(w)o(ay)e(def)o(aults)i(to)f(a)f(single)37 1415 y(space)j(separator)-5 b(.)31 b(Use)23 b(double-quoted)28 b(strings,)d(if)e(you)h(intend)h(to)f(use)g Fe(n)p Fa(r)s Fe(n)p Fa(n)p Fo(.)k(Some)23 b(popular)i(e)o(xamples)g(include:)274 1643 y Fh('\045s)49 b(\045t')747 b(single-line:)47 b(SFN)i(->)h(TFN1)f (TFN2)g(..)274 1726 y("\045l)g(\045s)g(\045t{\\r\\n+)g(}")199 b(multi-line:)47 b(use)j(continuation)d(symbol)274 1809 y("\045s)i(\045t{\\r\\n\045s)f(}")299 b(multi-line:)47 b(repeat)i(SFN)g(for)h(each)f(TFN)37 2067 y Fo(A)31 b(CRLF)e(will)i(be) g(implicitely)j(appended)g(to)d(the)h(end)f(of)h(the)f(format)h (string.)54 b(The)31 b(author)i(of)e(gridshell)j(is)d(a)o(w)o(are)g (that)37 2180 y(\002lenames)24 b(with)e(spaces)i(inside)g(them,)f(as)f (frequently)k(seen)d(on)g(MA)l(Cs)e(and)i(no)n(w)f(on)h(W)l(indo)n(ws,) g(will)f(break)i(the)f(interf)o(ace.)37 2292 y([\002xme:)29 b(we)23 b(may/will)h(need)g(quoting].)37 2450 y(In)30 b(order)g(to)g(create)g(the)g(list)f(of)h(\002les,)g(the)g(input)g(and) g(output)h(con\002guration)h(commands)f(must)e(use)h(the)g(three)g(ar)n (gument)37 2563 y(v)o(ersion,)25 b(which)f(associates)i(a)d(TFN)f(with) h(an)h(LFN.)37 2854 y Fk(4.5.1)45 b(Sta)o(g)q(ein)274 3044 y Fh(stagein)j(\273string\253)h(\273string\253)274 3210 y(stagein)f("\045s)h(\045t")h('transfer.sh)d(-I')398 b(#)50 b(make)f(sure)g(to)g(have)h(x)f(bit)g(set)37 3468 y Fo(The)29 b(stagein)i(job)f(speci\002cation,)j(if)c(present,)j (speci\002es)f(a)d(script)j(or)e(callout)i(to)e(run)h(for)f(the)h(2nd)g (le)n(v)o(el)f(stage-in)i(of)e(\002les.)37 3581 y(The)24 b(\002nal)f(ar)n(gument)i(will)f(be)f(dynamically)j(attached,)g(and)e (is)f(the)h(name)f(of)h(a)f(\002le)g(containing)j(a)e(list)f(of)h (\002les)f(mappings.)37 3739 y(No)j(stagein)i(job)e(will)g(be)g(run,)h (if)e(the)i(list)f(of)g(stagable)i(input)f(\002les)f(is)g(empty)-6 b(.)37 b(If)25 b(tw)o(o)h(ar)n(gument)i(input)f(con\002guration)i(w)o (as)37 3852 y(used,)c(i.e.)j(the)c(TFN)d(is)i(missing)i(for)f(an)f (SFN,)f(this)i(\002le)f(will)g(not)h(become)g(part)g(of)g(the)f(list.) 37 4143 y Fk(4.5.2)45 b(Sta)o(g)q(eout)274 4332 y Fh(stageout)j (\273string\253)g(\273string\253)274 4498 y(stageout)g("\045s)h(\045t") h('transfer.sh)d(-O')398 b(#)50 b(ensure)e(x)i(bit)37 4756 y Fo(The)25 b(stageout)h(job)f(speci\002cation,)j(if)c(present,)i (speci\002es)g(a)e(script)i(or)f(callout)h(to)e(run)h(for)g(the)g(2nd)g (le)n(v)o(el)g(stage-out)i(of)d(\002les.)37 4869 y(The)g(\002nal)f(ar)n (gument)i(will)f(be)f(dynamically)j(attached,)g(and)e(is)f(the)h(name)f (of)h(a)f(\002le)g(containing)j(a)e(list)f(of)h(\002les)f(mappings.)37 5027 y(No)j(stage-out)i(job)f(will)e(be)h(run,)h(if)f(the)g(list)h(of)e (stagable)j(output)g(\002les)d(is)h(empty)-6 b(.)37 b(If)26 b(tw)o(o)f(ar)n(gument)j(output)g(con\002guration)37 5140 y(w)o(as)c(used,)g(i.e.)k(the)c(TFN)d(is)j(missing)g(for)g(an)g (SFN,)d(this)j(\002le)f(will)g(not)h(become)g(part)g(of)g(the)g(list.)p eop %%Page: 17 17 17 16 bop 37 57 a Fm(5)45 b(Results)3517 b(17)p 37 94 2009 4 v 2046 94 V 37 352 a Fl(5)61 b(Results)37 604 y Fo(The)24 b(gridshell)h([...])37 898 y Fd(5.1)51 b(The)28 b(Pr)n(o)n(venance)h(T)-8 b(rac)n(king)28 b(Recor)n(d)37 1118 y Fo(The)d(pro)o(v)o(enance)j(tracking)f(record)g(written)f(by)f (the)h(gridshell)h(is)e(transported)k(on)c Fg(stdout)k Fo(made)c(a)n(v)n(ailable)i(by)f(the)f(remote)37 1231 y(scheduler)-5 b(.)30 b(Since)19 b(the)g(pro)o(v)o(enance)i(tracking)g (record)f(will)f(be)g(compiled)h(and)f(written)h(after)f(all)g(jobs)h (were)e(run,)i(it)f(is)f(usually)37 1344 y(a)25 b(good)g(idea,)g(when)g (using)h(Glob)n(us,)f(to)g(disconnect)i(the)e(stdout,)h(and)f(transfer) h(results)g(at)f(the)g(end-of-\(Glob)n(us-\)job)l(.)37 b(This)37 1457 y(will)24 b(sa)n(v)o(e)g(\002lehandle)h(resources)h(and) e(other)h(k)o(ernel)g(resources)h(on)d(the)h(gatek)o(eeper)i(host.)37 1615 y(The)36 b(pro)o(v)o(enance)j(tracking)f(record)g(contains)g(a)e (v)n(ariety)i(of)e(information,)42 b(usually)c(more)e(than)h(will)f(be) g(stored)i(in)e(the)37 1727 y(pro)o(v)o(enance)26 b(tracking)g(catalog) f(\(PTC\).)d(Much)i(of)f(the)h(information)i(is)e(useful)g(for)g(deb)n (ugging,)j(too.)37 1886 y(Some)k(remote)h(scheduling)j(systems)e (protocol)g(their)g(o)n(wn)e(information)j(on)d(the)h(stdout.)54 b(Furthermore,)35 b(if)c(subjobs)j(are)37 1998 y(con\002gured)i(to)d (use)g(gridshell')-5 b(s)36 b(stdout)f(handle,)i(their)d(output)h(may)e (also)g(appear)i(on)e(this)h(handle.)59 b(While)34 b(the)f(PTR)e(is)37 2111 y(distinct)37 b(in)d(this)i(mix)e(of)g(data,)k(dissemination)g (and)d(multiple)o(xing)i(of)e(stdout)h(is)e(not)h(a)f(task)h(that)g (can)g(be)g(solv)o(ed)h(by)e(a)37 2224 y(gridshell.)37 2382 y(The)j(pro)o(v)o(enance)j(tracking)f(record)g(\(also)f(kno)n(wn)g (as)f(in)l(v)n(ocation)k(records\),)h(can)c(be)f(found)i(online.)71 b(Please)38 b(refer)g(to)87 2495 y Fh (http://www.griphyn.org/workspace/VDS/)o Fo(.)37 2790 y Fd(5.2)51 b(The)28 b(Feedbac)n(k)h(Channel)37 3009 y Fo(The)j(feedback)j(channel)f(is)e(transported)j(via)e(stderr)g (\002lehandle)h(to)e(the)h(submit)g(host.)56 b(Since)32 b(feedback)i(information)h(is)37 3122 y(immediate,)h(Glob)n(us-IO)e (streaming)g(mode)f(is)f(applicable)j(for)e(this)g(channel.)58 b(Ho)n(we)n(v)o(er)l(,)34 b(Glob)n(us)g(streaming)g(is)e(a)g(best)37 3235 y(ef)n(fort)c(operation:)37 b(Remote)26 b(scheduling)k(systems)d (may)f(decide)i(to)e Fg(not)i Fo(mak)o(e)f(the)g(stderr)g(a)n(v)n (ailable)i(until)e(after)g(the)f(job)h(is)37 3348 y(done.)j(There)24 b(is)f(no)h(guarantee)i(that)e(stderr)h(will)e(actually)i(be)f (streamed,)h(or)e(stream)h(continually)-6 b(.)37 3506 y(The)26 b(gridshell)j(tries)e(to)f(use)h(the)g(stderr)g(for)g (application)i(feedback.)39 b(The)26 b(gridshell)j(feedback)f(records)h (are)d(XML)e(encap-)37 3619 y(sulated)i(chunks)f(to)e(distinguish)k (them)d(from)f(other)i(noise)f(on)g(the)g(stderr)h(channel.)37 3910 y Fk(5.2.1)45 b(Exponentiall)o(y)22 b(Bac)n(ked-off)h(Hear)r (tbeat)37 4129 y Fo(The)18 b(gridshell)j(w)o(ak)o(es)e(up)f(at)g(re)o (gular)i(interv)n(als)g(to)f(check)g(on)f(v)n(arious)i(\002lehandles)h (and)d(the)h(system)g(state.)28 b(At)17 b(e)o(xponentially)37 4242 y(gro)n(wing)28 b(interv)n(als,)i(gridshell)f(will)d(check)i(that) g(it)e(could)i(actually)h(kill)e(its)g(child.)40 b(The)26 b(result)i(will)e(be)h(send)h(as)f(feedback)37 4355 y(chunk)e(with)f(a) f(channel)i(number)g(of)e(zero:)274 4583 y Fh()274 4666 y()37 4924 y Fo(The)24 b(\002rst)f(heartbeat)j(is)d(re)o(gistered)j(after)e (30)g(seconds,)h(with)e(the)h(interv)n(al)h(doubling)h(each)f(time.)37 5082 y([\002xme:)k(gro)n(w)24 b(not)f(quite)i(so)f(f)o(ast,)f(b)n(ut)h (f)o(aster)h(than)f(linear])p eop %%Page: 18 18 18 17 bop 37 57 a Fm(5)45 b(Results)3517 b(18)p 37 94 2009 4 v 2046 94 V 37 352 a Fk(5.2.2)45 b(Gridshell-a)o(ware)23 b(Applications)37 571 y Fo(If)32 b(an)g(application)j(is)c(gridshell-a) o(w)o(are,)37 b(it)31 b(can)h(use)g(the)g(FIFO)e(created)k(by)d(the)h (feedback)i(con\002guration)h(to)d(send)g(back)37 684 y(application)f(data.)40 b(The)26 b(data)i(will)f(be)g(collected)i(in)e (the)h(submit)g(host')-5 b(s)28 b(stderr)g(\002le)f(for)g(the)g(job)l (.)41 b(The)26 b(name)h(of)g(the)h(FIFO)37 797 y(is)j(determined)h(by)e (the)h(identi\002er)h(of)e(the)g(en)l(vironment)k(v)n(ariable.)50 b(Thus,)32 b(this)f(mechanism)g(can)g(e)n(v)o(en)f(be)h(used)g(in)f (shell)37 910 y(scripts,)25 b(e.g.)k(contain)c(a)e(line)h(lik)o(e:)274 1138 y Fh(echo)49 b("some)g(comment")f(\273)i($GRIDSHELL_CHANNEL)37 1396 y Fo(The)24 b(application)i(data)e(will)g(be)f(chunk)o(ed)j(into)e (XML,)e(e.g:)274 1623 y Fh()274 1706 y()37 1964 y Fo(The)22 b(chunking)j(borders)f(are)e (arbitrary)-6 b(,)25 b(and)e(outside)h(the)e(sphere)i(of)e(in\003uence) h(of)g(the)f(gridshell.)31 b(Ne)n(v)o(er)22 b(assume)h(that)f(data)37 2077 y(written)i(in)e(one)g(write)h(will)f(result)h(in)f(one)h(chunk,)h (nor)e(assume)h(that)g(data)g(separately)i(written)e(data)g(will)f (result)h(in)g(multiple)37 2190 y(chunks.)37 2348 y(W)-7 b(orse,)33 b(there)f(is)f(currently)j(the)d(limitation)i(that)f(only)g (one)f(writer)h(at)f(each)g(time)g(should)i(write)e(to)g(the)g (feedback)j(chan-)37 2461 y(nel.)46 b(Since)30 b(it)f(is)g(a)f(mostly)i (untested)i(feature,)f(there)f(may)f(still)h(be)f(b)n(ugs)i(in)l(v)n (olv)o(ed,)h(which)e(may)f(lead)h(to)f(the)g(premature)37 2574 y(cancellation)e(of)d(a)f(compute)i(jobs.)k(Thus,)24 b(please)g(test)g(before)h(relying)h(on)d(production.)p eop %%Trailer end userdict /end-hook known{end-hook}if %%EOF pegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/statetable.sxc0000755000175000017500000002623111757531137022000 0ustar ryngeryngePKJ0Emimetypeapplication/vnd.sun.xml.calcPKJ0 content.xml]rF}߯2T& /UĔO)%A TC=`[ >8ShfrǞioߴW o2֙S [c ɿ?~Sj,}߷ۗ] O}ul4y/lNttCmt㭻Q:N;!9<:jjlX67}`yoœG7?5V|vlSf격;YaMx.>|\<~0/n~"y䉨1Cwb׻G VIǎ̷ 3x WZYtr{v˴Np2~4]cJ!dpn1cj;ϯ3/aOB-'T={˵|{LTdA5rjpL(p=ۦYݦa, {a+czq+1ÙOΙOm+zGFug 2ޖBZO!N@h;~N)qH#FCeW#F}5B[A>,+}$ [}Fv{na]f=5VZ@ϭ< gYz¡ /9%1fƽJx([|f1+Q(0PfwR(V&㐷M) AzŎ`e8+3Ï_ZckZ)3y=<&b⹭pC$~`?5#0HճzI|5n42nJ+~N؟㛞|c{P^+9(Z슍slM'72m':jjrIlj /|1,ý' ZimɷC*iM=HL0 db@Ê-HyH0` N,5JE 4p`v|dFK$gւ(O|jOt\=X<,e.N_qquDz"u`79LWMgf++x9[ƮҺBUڅwN/',E}!?fGTF`@`5C!)"0 0lCZ^4`Bp`%J@OAAl>&ѧx@H3pV+|YH11@@C (C    (@Pr& ;n+ J8}[r_8ʲe`AtH_3h66e" f 970&C#@`FG%A@@(R0N{avP 0b8rи{>dtPt &.( *]Pu%:)ΓB:) d%pPE98,,gN b!RF m{d(1$ `@L!Fjz( Di$1G RaicUk WmGRpd " "T*|Y+`~J:<a@0azk0C@@( 0$ga␃ݓie j"ԔB(t!DDӢ,$úlFD@D8-"OA#o}>O2<>Ť c܏.( O׫?#"z0~p@ G[{DF;1Y~ِh ȇa@_PQ$@āBB<HWK RO" =J "I1"%C Yq$>65kgkDr54*4=FUDkTY~4:n\ehNe(}:jjr%ˆM3uwuY(-JJ 怒ޗܞG"PNq*.<1V3:|IəD "HQ0KܜH! R#'!T0V@T b:8~QoF'E'pR)Xy5 ="?;/T@R|6QA x j?r@)H~NP-`5PkP"*rQ0V@T` J3]#O*{aj  w 1MN nwr wd&hh$M&:[g>3 NVVAcyXI`ы|\@r1 ߺ'-̷1xnl^ʏ^ܓуeݥt}]Χco%LB8RۈVKC yCΧwdu4 SYFh*t@SS{ӣXO5+nH|lv>z^Du?Ĕȫ^#AOoy*)}~뗿(T{dkm[Q|giSbƷ v%wXq{ c+y7^9~Ͽ[ uY(rCLot/YeD`57_~{wvv+=]^_(򊳝l_-<9 E D) [뮺~-W6vmh|@@R@L"EVªקXr2LCt|@78 0:atA%"'L@(*_(y έ!iF$.鰡K <&| v^F7/ `D_/>(BI 9@L%(FaT.ac}?g$Fǹ"{T: s`tO1Gx !ܧ%!Qd!0!BhإaJpc+(D„z✎zyXh:ҮC(RQ1H˃qIi|"uLull=dJG 1^PK:}_U5f 6n@*F_@/M~۩/  !Ou+_5y;Dg)eAN*$IbA1w0Qx*4OdFN Hރ"36[( (譫O%_B}PՄ&\ ;o]K`8%C[ btqr5K =L0`U&y^_UBo 0#Ni nʃ Blш):@0D֡% ,D^hФhA`61}ikح{%$ŋD$1S.EOVVJ WJ:X BU^рïpM\LF,1 @1^cqEZ(AWDh%d`,[B0ٌdʬY(67\'}-S@EU*2tDA` .TeB%I y=UPsEa+ρ7̀2n > *srŴex]i!yxOʯ fUBUd9csG[|xݳϾ55¹o5,lxrŷ{/_r$ˇ`ijyYe7Cߛ=X!L566ms/D(n}!> هvx8=  ߎoD]r5Jߕ#&L:qw͆إĸуe{_dhR~51NZly]S)V:^ ^^b7\/~XŠV@<]\||+l޴e_|J=t@!wX{?b[B^a, AOW(:b+#xgiܗXn?V M ^f+ WTW$֧&BO3,n dAO}"-?8-_4lulN)G`.T( wVp!BL*a0PTdq5y+M/*E$w->J 6AK .ZWm*) \ v3=0IHGPFg %xIp& .֊޳Zr׷H6.&? ~!cCqA_ ;&EU9&KSWPg TO_7/9gcF>p,Xr‡}aM8H t5޶BsY -Sŧj>)ҧ[$ \yO DhXTt R:`ex/!Ui}u?W.(|s<#[L(xizeL5!â~BJ fY0i,Tq=QM~ZmLSS -ч#ژ ̲XYz `(Ybr + ;VVJ{XnS3+]&}\mo2ٝg>2|jDs ɿPK HOPKJ0 styles.xml][o~``(͋dKr,xdh19&!9IYvP9/A}odfx/#3$ qϜw\9{v~ #j};ȟW_hC|?^<":^xЏ(sa\/+oeRG8ՅH_RH5kUQuGN쨓B?:M RMEE66q!ם4io,$8|q9(O;O5UCCLΒ"(VZ'kC0A!?3A3$Īr" H_ IKn bD;8\+Ty,x}5{s;u<7r1Jg!^l /AJWmV0.&g) j@ZMaM|@Ǔu7]/?şs pS.6щFȊTZ].iG,/'22xvH`p{"BY _22ڇ,׬ubUȤ a ti3ε)kVvFЏkV6;2-wK$wػ(t^ݗ[bFDQY<:i&tIeь#N 9n@}A?yjiv,H3Cxr!wt8p6j*bKΣZaY5L?[,C1]:]jMvY4Xw":.a*5 Sc  +1/1 %0J BK=w5hxT )6~oGK?CIg(~vv?.ٳ3ѸdϒH¸prorH^I=#c{:odY;w~`n79ZY`U2?wC9.\~4.uᔭDp[>,޻~t\ܻw\ :ig& K:)؋՗6b|0bɟqOZGv%`rLMɂ|dűdq,)Y -H8^љ.T|NGq8ZY;b;bI:b?vtEڗ0R;h,,H2$%%茵 2UgƊRoAR^*77GZlM_L'xGm%ݯ_w?f:~%Pin99*s|`?j7foˏ- 6|ȡA|HjvA{Eʁ }cu^ڥV1'uTy (o{@.|o2?u£ogx\o¦j t4+1ݰ\4k冺ƨ5}#yiIGmյ&faL,|.IiDrTK765iKfx^cZ*XE.*TaFJ&pNVEMmb'zabK:]:y!쵃Bh'U7=.†jz喻'X#/ZVUt{,/"-VP9@[ /$(VG4TL8Z((a|5UT`4PWjNh+ V:)'T֛]^.dlȯ㫽0{-KPi ^灈 alg2$MGZ5C-.Ȝ@t * PE/V(p]6W)(=1Jj&dV0皠`\70!Xl:3:}l"QE/k8ԴѶR;]j]T%JX faA~~jI &֛Ӟu3sسj(eAN:ʂɳPϯѯW6E OpenOffice.org 1.1.0 (Linux)Jens-S. Vöckler2004-02-09T15:40:25Jens-S. Vöckler2004-02-10T17:32:162004-02-10T17:31:31de-DE8PT1H13M53SPKJ0 settings.xmlZs:~םڮ;;+շ҆%Aj@n]Zř{;''ɗ3\~qnBQrO5%C]LU;|uwmXu>><< pairs to show the delimination: \begin{table}[htbp] \centering\small \begin{tabular}{|l|l|}\hline \textbf{job string input} & \textbf{argv result}\dd\hline '/bin/echo hi \$LOGNAME' & >>hi<< >>voeckler<<\dd '/bin/echo \bs"hi \$LOGNAME\bs"' & >>"hi<< >>voeckler"<<\dd '/bin/echo \bs\bs"hi \$LOGNAME\bs\bs"' & >>\bs{}hi voeckler\bs<<\dd '/bin/echo \bs\bs\bs"hi \$LOGNAME\bs\bs\bs"' & >>\bs"hi<< >>voeckler\bs"<<\dd\hline "/bin/echo 'hi \$LOGNAME'" & >>hi \$LOGNAME<<\dd "/bin/echo \bs'hi \$LOGNAME\bs'" & >>'hi<< >>voeckler'<<\dd "/bin/echo \bs\bs'hi \$LOGNAME\bs\bs'" & >>\bs{}hi \$LOGNAME\bs<<\dd "/bin/echo \bs\bs\bs'hi \$LOGNAME\bs\bs\bs'" & >>\bs'hi<< >>voeckler\bs'<<\dd \end{tabular} \caption{Conversion of jobs strings into argument vector elements.} \label{tab:strings} \end{table} \begin{figure}[htbp] \hspace*{-50bp} \includegraphics[scale=0.7,viewport= 0 180 792 612]{statetable} \caption{State transition table for the string parser.} \label{fig:statetable} \end{figure} \begin{difficult} Figure~\ref{fig:statetable} shows the Mealy state transitions automaton for parsing job strings and regular strings. The table shows for a given state and input character class, the resulting state and action. The start state for job strings is NQ\_LWS, and the left side of yellow states applies. The start state for regular single-quoted strings is SQ\_MAIN and for regular double-quoted strings DQ\_MAIN. \end{difficult} Multi-line strings are permitted. There are two multi-line modes. In line-continuation mode, a backslash immediately before a linefeed character implies that both are ignored, and the next line is viewed as part of the current line. An unprotected newline character inside a string will be copied into a regular string, or is a split point for job strings. Table~\ref{tab:multiline} shows examples for the handling of multiline strings. \begin{table}[htbp] \centering \begin{tabular}{|p{25mm}|l|}\hline \textbf{Input string} & \textbf{Resulting string}\dd\hline 1: \verb|"some \|\newline2: \verb|string"| & >>\verb|some string|<<\dd 1: \verb|"some|\newline2: \verb|string"| & >>\verb|some\nstring|<<\dd \end{tabular} \caption{Multi-line string handling.} \label{tab:multiline} \end{table} \subsection{Format} \label{sec:format} The configuration file is a format free language. It does not care about any number of (linear) whitespaces between its tokens. However, to maintain a Unix shell like semantic, commands are auto-delimited by the end of a line. Continuation lines are not permitted (if you don't know what that means, you don't need it). Multiple commands in the same line must be separated with a semicolon. The file may contain comments in the tradition of scripting languages. A comment starts with the hash (\#) character, and extends through the end of the line. There is no filename globbing. If you need globbing (e.g. working with wildcarded filenames as arguments), you must run your jobs through \verb|/bin/sh -c | to let shell do the globbing for you. However, be warned that such encapsulation will add another level of job string de-quoting and interpretation. \section{Commands} \label{sec:commands} While most commands have a rather static semantic, some permit optional arguments. The following sections use the following generic descriptions to denote a non-terminal token: >>string<< is the placeholder for a string, single or double quoted. >>id<< is the placeholder for an identifier. >>options<< is the placeholder for a list of options. Each option is an identifier itself. Multiple options are separated by linear whitespace. Reserved words are not permitted as valid identifiers within an option string. \subsection{Other commands} \label{sec:other} This category describes commands which deal with the configuration of the kickstart application itself. \subsubsection{Include} \label{sec:include} \begin{verbatim} include >>string<< include "/my/site/local.cfg" \end{verbatim} The configuration file permits the recursive inclusion of other configuration files, which are specified by giving their name within the >>string<<. Since these denote files on the remote system, this mechanism allows for site-specific tie-ins of configuration values. Furthermore, since a double-quoted string is subject to interpretation, it permits for a rich flavor of dynamically determinable filesnames to include. If the file cannot be opened for reading by the effective user running gridshell, its contents will be ignored. Included files may include other files. However, a loop-detection mechanism is not (yet) in place, thus circular dependencies are most likely crash gridshell with a remote resource exhaustion. \subsubsection{Debug} \label{sec:debug} Not yet implemented - debugging is currently hard-coded as seen fit. \subsubsection{Xmlns} \label{sec:xmlns} \begin{verbatim} xmlns >>identifier<< xmlns ptc \end{verbatim} The provenance tracking record (PTR) may occasionally require a namespace identifier, if it is to be included as part of other XML documents. Usually, you won't need to set this attribute, though. Please note that the argument is an identifier, not a string. The rules for valid identifiers are more restrictive, see section~\rref{sec:ident}. Each repetition of this command will overwrite any previously configured value. \subsection{Descriptions} \label{sec:descriptions} The descriptions set up certain values to be reflected into the provenance tracking record (PTR), the ultimate result of the gridshell. \subsubsection{Site} \label{sec:site} \begin{verbatim} site >>string<< site 'UBuffalo' \end{verbatim} The string defines the name of a site. This is useful for future cases, where a 2nd level staging mechanism accesses some form of external replica mechanism, and needs to store a site handle. Note that the site handle is an arbitrarily chosen name with no further meaning beyond the GriPhyN Virtual Data System. Each repetition of this command will overwrite the previous value. For now, the site handle is just reflected in the PTR. \subsubsection{Transformation} \label{sec:tr} \begin{verbatim} tr >>string<< [>>string<< [..]] transformation >>string<< [>>string<< [..]] transformation 'voeckler::findrange:1.0' \end{verbatim} This command comes in two flavors, with a short and a long reserved word. Their meaning is equivalent. The string list arguments describe a fully-qualified VDC-definition name each. Such a name is usually something akin to "namespace::\-name:\-version". Each repetition of this command will append to a list of transformation records. While there can be only one transformation that is being called, a compound transformation may call other transformations. Thus, the call stack is part of the record. The values are solely used to be reflected in the PTR. \subsubsection{Derivation} \label{sec:dv} \begin{verbatim} dv >>string<< derivation >>string<< derivation 'voeckler::right:1.0' \end{verbatim} This command also comes in two reserved word flavors, which are equivalent. The string argument describes the fully-qualified VDC-definition name, which is usually something akin to "namespace::name:version". Each repetition of this command will overwrite the previous value. The value is solely used to be reflected in the PTR. \subsubsection{Input} \label{sec:input} \begin{verbatim} input >>lfn<< >>sfn<< input md5 >>lfn<< >>sfn<< input >>lfn<< >>sfn<< >>tfn<< ... input md5 >>lfn<< >>sfn<< >>tfn<< ... input 'voeckler.f.a' '/home/voeckler/vdldemo/voeckler.f.a' \end{verbatim} Each repetition of this command will register a logical filename (LFN) with a storage filename (SFN). Additionally, if 2nd level staging is required, any number of input transfer filenames (iTFN) may be associated. For each LFN, only one SFN can be associated. For each SFN, multiple iTFNs may be associated. All of LFN, SFN and TFN are strings, and thus must be enclosed in quotes. Effectively, for each registered input filename, a stat record will be obtained from the SFN after gridshell parsed all its arguments and ran an optional 2nd level stage-in job (see~\rref{sec:stagein}). The stat record is obtained \emph{before} any of the regular subjobs pre through cleanup are being run. The information from the stat call is being reflected into the PTR, one record for each file, featuring the LFN as distinction. Files may appear in both, the input and the output list. The optional argument \verb|md5| specifies that an MD5 sum should be obtained of the file, and become part of the resulting stat info record. Note that \verb|md5| is an option, and thus not quoted. \subsubsection{Output} \label{sec:output} \begin{verbatim} output >>LFN<< >>SFN<< output md5 >>LFN<< >>SFN<< output >>LFN<< >>SFN<< >>TFN<< ... output md5 >>LFN<< >>SFN<< >>TFN<< ... output md5 'voeckler.f.d' '/home/voeckler/vdldemo/voeckler.f.d' \end{verbatim} Each repetition of this command will register a logical filename LFN with a storage filename SFN. Additionally, if 2nd level staging is required, any number of output transfer filenames oTFN may be associated. For each LFN, only one SFN can be associated. For each SFN, multiple oTFNs may be associated. All of LFN, SFN and TFN are strings, and thus must be enclosed in quotes. Effectively, for each registered output filename, a stat record will be obtained from the SFN after gridshell ran all jobs, but before the optional 2nd level stage-out job is run (see section 2.4.y). The information from the stat call is being reflected into the PTR, one record for each file, featuring the LFN as distinction. Files may appear in both, the input and the output list. The optional argument \verb|md5| specifies that an MD5 sum should be obtained of the file, and become part of the resulting stat info record. Note that \verb|md5| is an option, and thus not quoted. \subsection{Processing Environment} \label{sec:processing} The processing environment deals with setting up the environment in which jobs (see~\rref{sec:jobs}) are being run. The changes pertain only to gridshell, and stay within gridshell. \subsubsection{Set} \label{sec:set} \begin{verbatim} set >>id<< >>string<< set PATH "$PATH:$HOME/bin" \end{verbatim} The set command is similar to the C-shell \texttt{setenv} command. The identifier >>id<< denotes a key in the Unix environment, which is to be set to the specified value of >>string<<. Please note that the string, if double-quoted, may be subject to variable interpolation. Enviroment setting are \emph{not} reflected in the PTR. The environment variables are changed during compile-time. Environment values are overwritten, if they already exist. \subsubsection{Chdir} \label{sec:chdir} \begin{verbatim} chdir >>string<< chdir create >>string<< chdir create "/home/$LOGNAME/vdldemo/tmp" \end{verbatim} %$ The chdir command determines the current working directory. The command comes in two flavors, with and without the \emph{create} option. Without the \emph{create} option, the specified working directory is attempted to be created before changing into it. Failure to create the directory through reason of existence will be ignored. Other failures on the mkdir command will cause a semantic error during compile-time. Regardless of options, in the next step, gridshell will attempt to change into the specified working directory. If the change fails, the previous current workding directory remains current. The working directory is changed during compile-time. Effects are immediate on relative filenames. If no chdir command is specified whatsoever, the gridshell uses the working directory assigned by the scheduling system, i.e. where it was started in. The current working directory will be reflected in the PTR. Multiple specificiations will change the directory again and again. The finally chosen working directory is recorded in the PTR. \subsubsection{Feedback} \label{sec:feedback} \begin{verbatim} feedback >>string<< feedback >>id<< >>string<< feedback ATLAS_FEEDBACK "/dev/shm/atlas-XXXXXX" feedback 'gs-fb-XXXXXX' \end{verbatim} The feedback is currently a simple mechanism to allow gridshell-aware applications to send appli\-ca\-tion-specific data back to the submit host. Any processing, collection or visualization of this data is up to the user application framework. The >>id<< specifies the name of an environment variable which is to be set to the filename of the named pipe (FIFO). If no identifier is specified, the default of \texttt{GRIDSTART\_CHANNEL} is used. The filename is created using the mkstemp(2) system call from the pattern provided by the string. The filename string \emph{must} have a suffix of six capitol \texttt{X} letters, or these letters will be mandatorily appended, including a separating hyphen. If the filename is absolute, starting is a slash, the specified path will be used. If the filename is relative, not starting with a slash, an appropriate temporary directory will be determined and prefixed. The gridshell attempts to create a FIFO (Unix named pipe) from the pattern with the temporary filename. It changes the environment to record the filename, so that subjobs (see~\rref{sec:jobs}) may write to this channel. [future lab: Permit any number of such channels, and multiplex them] If feedback is specified multiple times, only the last specification will persist. Previous channels will be closed and removed. The feedback mechanism is only activated, if the feedback was specified at least once. The chosen FIFO and its stat information is part of the standard statinfo PTR record. \subsubsection{Stdin} \label{sec:stdin} \begin{verbatim} stdin >>string<< stdin here >>string<< stdin 'my/file' stdin here 'copy to stdin' \end{verbatim} These directives permit the connection of the stdin filehandle of subjobs with an existing file. The primary use of this option is to peruse input, if any stdin pertains to a GVDS-tracked file during a computation. Ideally, only the main computational jobs should access these files. You can (currently) only use one redirected stdio for all subjobs. Thus, if you redirect stdin to a file, this file will be used for all subjobs. Will the file be reopened for each job? Depending on what you connect your stdin to, the answer is yes and no. Please refer to table~\ref{tab:stdin} for the different open modes. The reason a regular file is being reopened lies in the fact that an open filedescriptor is not maintained between jobs. Furthermore, the last position of the file cannot be remembered, because it cannot be obtained: The file handle of a regular file, once opened, will be passed completely into the job's application space. \begin{table}[htb] \centering \begin{tabular}{|r|p{80mm}|}\hline \textbf{file type} & \textbf{reuse or reopen mode}\dd\hline regular file & file will be reopened for each job.\dd file descriptor & file descriptor will be dupped.\dd temporary file & same as file descriptor.\dd here document & same as file descriptor.\dd user FIFO & not applicable, but same as descriptor.\dd \end{tabular} \caption{Open modes for \emph{stdin}.} \label{tab:stdin} \end{table} The special filename of just a hyphen means to connect the filehandle with the one passed to the gridshell. However, in order to use a \emph{here document}, which copies content from the configuration onto the stdin of the application, you must use the \texttt{here} option. When using the \texttt{here} option, the string is not a filename, but the content to be put into a temporary file, which is subsequently connected with the stdin of the jobs. Multi-line strings are valid, and regular string interpolation applies, see section~\rref{sec:strings}. [future lab: Add options to allow disassociation of stdio handles during stage jobs. Add options to permit disassocation during non-main job.] A stdin redirection should only be specified once. A re-configuration will close a previous configuration. Default for stdin is to connect to \verb|/dev/null| in the absence of any specification. \subsubsection{Stdout and Stderr} \label{sec:stdout}\label{sec:stderr} \begin{verbatim} stdout >>string<< stdout append >>string<< stdout truncate >>string<< stderr >>string<< stderr append >>string<< stderr truncate >>string<< stdout '-' stderr append 'local.err' \end{verbatim} These directives permit the connection of stdio filehandles of subjobs with existing files. The primary use for these options is to capture output, if any stdout or stderr pertains to a GVDS-tracked file during a computation. Ideally, only the main computational jobs should access these files. However, you should keep in mind that various error conditions may result in output to stderr for failures of other subjobs. You can (currently) only use one redirected stdio for all subjobs. Thus, if you redirect stdout into a file, this file will be used for all subjobs. Default mode for these directives is the truncated mode. In truncate mode, a previously (before the start of the gridshell) existing file is truncated at compile-time. In append mode, a previously existing file is opened for appending. The special filename of just a hyphen means to connect the filehandle with the one passed to the gridshell. [future lab: Add options to allow disassociation of stdio handles during stage jobs. Add options to permit disassocation during non-main job.] Each stdio redirection should only be specified once. A re-configuration will close and overwrite a previous setup. Default for stdout and stderr is to connect to separate temporary files in the absence of any specification. The first page of data from the temporary files are reflected in the stat info records of the PTR for stdout and stderr. \subsection{Job commands} \label{sec:jobs} The job configurations describe the command and command line to be used for subjobs. All specifications are optional except for the main job, which must be specified. The chaining of job results is loosely as follows: \begin{enumerate} \item run stage-in job, if applicable \item obtain stat records on files declared input \item run setup job chain \item run pre job chain \texttt{\&\&} run main job \texttt{\&\&} run post job chain \item run cleanup job chain \item obtain stat records on files declared output \item run stage-out job, if applicable \end{enumerate} If any prejob exists and fails, no further jobs in the pre, main, post chain will be run. If the main job fails, no further jobs in the post chain will be run. If any of the postjobs fails, the chain will not be continued. The stagein, stageout, setup and cleanup jobs are independent of any failures in the pre chain, main job, and post chain. Any number of setup, pre, post and cleanup jobs may be specfied. They are queued into separate chains, and each chain executes its jobs in the order of their specification. The main job is mandatory. It must be specified once, and should be specified once only. Multiple specifications will overwrite previous ones. Please note the commandline specification string for jobs is subject to double interpretation, once during compile-time, and once during run-time, see~\rref{sec:strings} for details. [future lab: Drop the double interpolation -- too confusing. It is an artefact of code recycling]\newline{}[Is it already dropped?] The stdio of any job will be connected according to the specification of stdin, stdout and stderr, see~\rref{sec:processing}. Stdin defaults to \verb|/dev/null|, stdout to a temporary file, and stderr to a different temporary file. Absolute application names are taken at face value, and may fail in their non-existence. Relative names are canonified with the help of the current working directory. [fixme: are they?] \subsubsection{Setup Jobs} \label{sec:setup} \begin{verbatim} setup >>string<< setup "/bin/mkdir -p $HOME/tmp" \end{verbatim} The setup job configuration may be specified multiple times. Each job will be queued into a chain \emph{setup}, and at run-time, these will be executed in the order of their chaining. Any setup job may fail without affecting any other jobs. Note: Usually you are better of wrapping even simple commands into a real shell script, set the execute bit on it, and run just that particular shell script. \subsubsection{Pre Jobs} \label{sec:pre} \begin{verbatim} pre >>string<< pre '/bin/date' pre '/usr/bin/env perl -i.bak -pe \\"s{a}{b}g\\" *.jof' \end{verbatim} The pre job configuration may be specified multiple times. Each job will be queued into a chain \emph{prejobs}, and at run-time, these will be executed in the order of their chaining. The first failed prejob will result in a failed execution of the computation. No further prejobs, no main jobs, and no postjobs will be run in case of failure. \subsubsection{Main Job} \label{sec:main} \begin{verbatim} main >>string<< main 'computation.sh' \end{verbatim} The main job must be specified, and should only be specified once. A minimum configuration file contains at least a configuration for the main job. If the main job fails, all will be assessed as failure. No post jobs will be run in case of failure. \subsubsection{Post Jobs} \label{sec:post} \begin{verbatim} post >>string<< post '/bin/date' \end{verbatim} The post job configuration may be specified multiple times. Each job will be queued into a chain \emph{postjobs}, and at run-time, these will be executed in the order of their chaining. The first failed postjob will result in a failed execution of the computation. No further postjobs will be run in case of failure. \subsubsection{Cleanup Jobs} \label{sec:cleanup} \begin{verbatim} cleanup >>string<< cleanup 'rm *.log' # this will NOT work: ENOENT cleanup '/bin/sh -c \'rm *.log\'' # this might work \end{verbatim} The cleanup job configuration may be specified multiple times. Each job will be queued into a chain \emph{cleanup}, and at run-time, these will be executed in the order of their chaining. Any cleanup job may fail without affecting any other jobs. Note: Usually you are better of wrapping even simple commands into a real shell script, set the execute bit on it, and run just that particular shell script. \subsection{Optional 2nd-level Staging} \label{sec:staging} The stageout and stagein jobs are optional, should only be specified once. The job restriction and feature, as shown in section 2.4, apply also to stage jobs. Multiple specifications of stage jobs will overwrite previous specifications. [future lab: Do we need multiple stage call-outs?] The stage jobs deal with the 2nd level staging of files. The 2nd level staging comes into effect, if a compute cluster does not share a filesystem between the externally visible storage element, and the internal worker node. The 2nd level staging may be employed in this case to transfer input files to the worker node before any computation takes place, and to store output files to the storage element after any computation took place. Stage jobs have an invisible final argument. The final argument is dynamically affixed. It is the name of a temporary file which contains a list of file mapping from SFN to TFN. The format is determined by the format string, which is the first argument string to the stage commands. The format string may contain three kinds of placeholders, each of which may occur multiple times: \begin{center} \begin{tabular}{|l|p{100mm}|}\hline \textbf{fmt} & \textbf{meaning}\dd\hline \texttt{\%l} & replaced with the LFN\dd \texttt{\%s} & replaced with the SFN\dd \texttt{\%t} & shortcut for \texttt{\%t\{ \}}\dd \texttt{\%t\{x\}} & replaced with the TFN list. The string x is the separator, if more than one TFN is available\dd \end{tabular} \end{center} The concrete interface is up to the user, and the interface with external application called out to. Note that curly braces may not be part within the separator string x. Also, leaving the separator string away defaults to a single space separator. Use double-quoted strings, if you intend to use $\backslash{}r\backslash{}n$. Some popular examples include: \begin{verbatim} '%s %t' single-line: SFN -> TFN1 TFN2 .. "%l %s %t{\r\n+ }" multi-line: use continuation symbol "%s %t{\r\n%s }" multi-line: repeat SFN for each TFN \end{verbatim} A CRLF will be implicitely appended to the end of the format string. The author of gridshell is aware that filenames with spaces inside them, as frequently seen on MACs and now on Windows, will break the interface. [fixme: we may/will need quoting]. In order to create the list of files, the input and output configuration commands must use the three argument version, which associates a TFN with an LFN. \subsubsection{Stagein} \label{sec:stagein} \begin{verbatim} stagein >>string<< >>string<< stagein "%s %t" 'transfer.sh -I' # make sure to have x bit set \end{verbatim} The stagein job specification, if present, specifies a script or callout to run for the 2nd level stage-in of files. The final argument will be dynamically attached, and is the name of a file containing a list of files mappings. No stagein job will be run, if the list of stagable input files is empty. If two argument input configuration was used, i.e. the TFN is missing for an SFN, this file will not become part of the list. \subsubsection{Stageout} \label{sec:stageout} \begin{verbatim} stageout >>string<< >>string<< stageout "%s %t" 'transfer.sh -O' # ensure x bit \end{verbatim} The stageout job specification, if present, specifies a script or callout to run for the 2nd level stage-out of files. The final argument will be dynamically attached, and is the name of a file containing a list of files mappings. No stage-out job will be run, if the list of stagable output files is empty. If two argument output configuration was used, i.e. the TFN is missing for an SFN, this file will not become part of the list. \section{Results} \label{sec:results} The gridshell [...] \subsection{The Provenance Tracking Record} \label{sec:ptr} The provenance tracking record written by the gridshell is transported on \emph{stdout} made available by the remote scheduler. Since the provenance tracking record will be compiled and written after all jobs were run, it is usually a good idea, when using Globus, to disconnect the stdout, and transfer results at the end-of-(Globus-)job. This will save filehandle resources and other kernel resources on the gatekeeper host. The provenance tracking record contains a variety of information, usually more than will be stored in the provenance tracking catalog (PTC). Much of the information is useful for debugging, too. Some remote scheduling systems protocol their own information on the stdout. Furthermore, if subjobs are configured to use gridshell's stdout handle, their output may also appear on this handle. While the PTR is distinct in this mix of data, dissemination and multiplexing of stdout is not a task that can be solved by a gridshell. The provenance tracking record (also known as invocation records), can be found online. Please refer to \verb| http://www.griphyn.org/workspace/VDS/|. \subsection{The Feedback Channel} \label{sec:channel} The feedback channel is transported via stderr filehandle to the submit host. Since feedback information is immediate, Globus-IO streaming mode is applicable for this channel. However, Globus streaming is a best effort operation: Remote scheduling systems may decide to \emph{not} make the stderr available until after the job is done. There is no guarantee that stderr will actually be streamed, or stream continually. The gridshell tries to use the stderr for application feedback. The gridshell feedback records are XML encapsulated chunks to distinguish them from other noise on the stderr channel. \subsubsection{Exponentially Backed-off Heartbeat} The gridshell wakes up at regular intervals to check on various filehandles and the system state. At exponentially growing intervals, gridshell will check that it could actually kill its child. The result will be send as feedback chunk with a channel number of zero: \begin{verbatim} \end{verbatim} The first heartbeat is registered after 30 seconds, with the interval doubling each time. [fixme: grow not quite so fast, but faster than linear] \subsubsection{Gridshell-aware Applications} If an application is gridshell-aware, it can use the FIFO created by the feedback configuration to send back application data. The data will be collected in the submit host's stderr file for the job. The name of the FIFO is determined by the identifier of the environment variable. Thus, this mechanism can even be used in shell scripts, e.g. contain a line like: \begin{verbatim} echo "some comment" >> $GRIDSHELL_CHANNEL \end{verbatim} The application data will be chunked into XML, e.g: \begin{verbatim} \end{verbatim} The chunking borders are arbitrary, and outside the sphere of influence of the gridshell. Never assume that data written in one write will result in one chunk, nor assume that data separately written data will result in multiple chunks. Worse, there is currently the limitation that only one writer at each time should write to the feedback channel. Since it is a mostly untested feature, there may still be bugs involved, which may lead to the premature cancellation of a compute jobs. Thus, please test before relying on production. \end{document} %%% Local Variables: %%% mode: latex %%% TeX-master: t %%% End: pegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/with.eps0000644000175000017500000003355411757531137020620 0ustar ryngerynge%!PS-Adobe-2.0 EPSF-1.2 %%BoundingBox: 26 636 447 776 %%Title: with %%CreationDate: Wed May 21 15:53:39 2003 %%Creator: Tgif-4.1.41 written by William Chia-Wei Cheng (bill.cheng@acm.org) %%ProducedBy: (unknown) %%Pages: 1 %%DocumentFonts: (atend) %%EndComments %%BeginProlog /tgifdict 58 dict def tgifdict begin /tgifellipsedict 6 dict def tgifellipsedict /mtrx matrix put /TGEL % tgifellipse { tgifellipsedict begin /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 0 360 arc savematrix setmatrix end } def /tgifarrowtipdict 8 dict def tgifarrowtipdict /mtrx matrix put /TGAT % tgifarrowtip { tgifarrowtipdict begin /dy exch def /dx exch def /h exch def /w exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate dy dx atan rotate 0 0 moveto w neg h lineto w neg h neg lineto savematrix setmatrix end } def /tgifarcdict 8 dict def tgifarcdict /mtrx matrix put /TGAN % tgifarcn { tgifarcdict begin /endangle exch def /startangle exch def /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 startangle endangle arc savematrix setmatrix end } def /TGAR % tgifarc { tgifarcdict begin /endangle exch def /startangle exch def /yrad exch def /xrad exch def /y exch def /x exch def /savematrix mtrx currentmatrix def x y translate xrad yrad scale 0 0 1 startangle endangle arcn savematrix setmatrix end } def /TGMAX { exch dup 3 1 roll exch dup 3 1 roll gt { pop } { exch pop } ifelse } def /TGMIN { exch dup 3 1 roll exch dup 3 1 roll lt { pop } { exch pop } ifelse } def /TGSW { stringwidth pop } def /bd { bind def } bind def /GS { gsave } bd /GR { grestore } bd /NP { newpath } bd /CP { closepath } bd /CHP { charpath } bd /CT { curveto } bd /L { lineto } bd /RL { rlineto } bd /M { moveto } bd /RM { rmoveto } bd /S { stroke } bd /F { fill } bd /TR { translate } bd /RO { rotate } bd /SC { scale } bd /MU { mul } bd /DI { div } bd /DU { dup } bd /NE { neg } bd /AD { add } bd /SU { sub } bd /PO { pop } bd /EX { exch } bd /CO { concat } bd /CL { clip } bd /EC { eoclip } bd /EF { eofill } bd /IM { image } bd /IMM { imagemask } bd /ARY { array } bd /SG { setgray } bd /RG { setrgbcolor } bd /SD { setdash } bd /W { setlinewidth } bd /SM { setmiterlimit } bd /SLC { setlinecap } bd /SLJ { setlinejoin } bd /SH { show } bd /FF { findfont } bd /MS { makefont setfont } bd /AR { arcto 4 {pop} repeat } bd /CURP { currentpoint } bd /FLAT { flattenpath strokepath clip newpath } bd /TGSM { tgiforigctm setmatrix } def /TGRM { savematrix setmatrix } def end %%EndProlog %%Page: 1 1 %%PageBoundingBox: 26 636 447 776 tgifdict begin /tgifsavedpage save def 1 SM 1 W 0 SG 72 0 MU 72 11 MU TR 72 128 DI 100.000 MU 100 DI DU NE SC GS /tgiforigctm matrix currentmatrix def % BOX 0 SG GS 10 SM GS NP 275 50 M 425 50 L 425 200 L 275 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 300 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (rem. scheduler) SH GR GR % OVAL 0 SG NP 495 125 5 5 TGEL F GS GS NP 495 125 5 5 TGEL S GR GR % BOX 0 SG GS 10 SM GS NP 495 50 M 645 50 L 645 200 L 495 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 530 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (kickstart) SH GR GR % OVAL 0 SG NP 495 75 5 5 TGEL F GS GS NP 495 75 5 5 TGEL S GR GR % OVAL 0 SG NP 495 175 5 5 TGEL F GS GS NP 495 175 5 5 TGEL S GR GR % OVAL 0 SG NP 425 125 5 5 TGEL F GS GS NP 425 125 5 5 TGEL S GR GR % OVAL 0 SG NP 425 75 5 5 TGEL F GS GS NP 425 75 5 5 TGEL S GR GR % OVAL 0 SG NP 425 175 5 5 TGEL F GS GS NP 425 175 5 5 TGEL S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 425 75 M 0 35 atan DU cos 8.000 MU 460 exch SU exch sin 8.000 MU 75 exch SU L TGSM 1 W S GR GS TGSM NP 460 75 8.000 3.000 35 0 TGAT 1 SG CP F 0 SG NP 460 75 8.000 3.000 35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 495 125 M 0 -35 atan DU cos 8.000 MU 460 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S GR GS TGSM NP 460 125 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 460 125 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 495 175 M 0 -35 atan DU cos 8.000 MU 460 exch SU exch sin 8.000 MU 175 exch SU L TGSM 1 W S GR GS TGSM NP 460 175 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 460 175 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 455 75 M 495 75 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 425 125 M 465 125 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 425 175 M 465 175 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 505 80 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] unused) SH GR GR % TEXT NP 0 SG GS 1 W 505 130 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] IV rec.) SH GR GR % TEXT NP 0 SG GS 1 W 505 180 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] gs err) SH GR GR % OVAL 0 SG GS GS NP 337 91 37 16 TGEL S GR GR % ARC 0 SG GS GS NP 337 158 37 16 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 300 91 M 300 158 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 375 91 M 375 158 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 337 138 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (GASS) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (GASS) SH GR 0 15 RM GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (cache) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (cache) SH GR GR % POLY/OPEN-SPLINE 0 SG GS NP 425 75 M 365 135 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 425 125 M 365 135 L 425 175 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 222 129 M GS GS 0 0 AD GR 2 DI NE 0 RM GR GR % OVAL 0 SG NP 715 125 5 5 TGEL F GS GS NP 715 125 5 5 TGEL S GR GR % BOX 0 SG GS 10 SM GS NP 715 50 M 790 50 L 790 200 L 715 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 715 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (application) SH GR GR % OVAL 0 SG NP 715 75 5 5 TGEL F GS GS NP 715 75 5 5 TGEL S GR GR % OVAL 0 SG NP 715 175 5 5 TGEL F GS GS NP 715 175 5 5 TGEL S GR GR % TEXT NP 0 SG GS 1 W 725 80 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] stdin) SH GR GR % TEXT NP 0 SG GS 1 W 725 130 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] stdout) SH GR GR % TEXT NP 0 SG GS 1 W 725 180 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] stderr) SH GR GR % OVAL 0 SG NP 645 125 5 5 TGEL F GS GS NP 645 125 5 5 TGEL S GR GR % OVAL 0 SG NP 645 75 5 5 TGEL F GS GS NP 645 75 5 5 TGEL S GR GR % OVAL 0 SG NP 645 175 5 5 TGEL F GS GS NP 645 175 5 5 TGEL S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 645 75 M 0 35 atan DU cos 8.000 MU 680 exch SU exch sin 8.000 MU 75 exch SU L TGSM 1 W S GR GS TGSM NP 680 75 8.000 3.000 35 0 TGAT 1 SG CP F 0 SG NP 680 75 8.000 3.000 35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 715 125 M 0 -35 atan DU cos 8.000 MU 680 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S GR GS TGSM NP 680 125 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 680 125 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 715 175 M 0 -35 atan DU cos 8.000 MU 680 exch SU exch sin 8.000 MU 175 exch SU L TGSM 1 W S GR GS TGSM NP 680 175 8.000 3.000 -35 0 TGAT 1 SG CP F 0 SG NP 680 175 8.000 3.000 -35 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS NP 675 75 M 715 75 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 645 125 M 685 125 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 645 175 M 685 175 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 635 80 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (-i fn) TGSW AD GR NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (-i fn) SH GR GR % TEXT NP 0 SG GS 1 W 635 130 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (-o fn) TGSW AD GR NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (-o fn) SH GR GR % TEXT NP 0 SG GS 1 W 635 180 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (-e fn) TGSW AD GR NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (-e fn) SH GR GR % POLY/OPEN-SPLINE 0 SG GS [4 4] 0 SD NP 595 125 M 0 -30 atan DU cos 8.000 MU 565 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S [] 0 SD GR GS TGSM NP 565 125 8.000 3.000 -30 0 TGAT 1 SG CP F 0 SG NP 565 125 8.000 3.000 -30 0 TGAT CP F GR % POLY/OPEN-SPLINE 0 SG GS [4 4] 0 SD NP 600 175 M 580 175 L 580 125 L 0 -15 atan DU cos 8.000 MU 565 exch SU exch sin 8.000 MU 125 exch SU L TGSM 1 W S [] 0 SD GR GS TGSM NP 565 125 8.000 3.000 -15 0 TGAT 1 SG CP F 0 SG NP 565 125 8.000 3.000 -15 0 TGAT CP F GR % TEXT NP 0 SG GS 1 W 360 240 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] /dev/null) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] /dev/null) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] /dev/null) SH GR GR % TEXT NP 0 SG GS 1 W 360 225 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (defaults) SH GR GR % TEXT NP 0 SG GS 1 W 560 240 M GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([0] /dev/null) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([1] /tmp/mktmp\(\)) SH GR 0 15 RM GS 0 SG /Helvetica FF [12 0 0 -12 0 0] MS ([2] /tmp/mktmp\(\)) SH GR GR % TEXT NP 0 SG GS 1 W 560 225 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (defaults) SH GR GR % BOX 0 SG GS 10 SM GS NP 50 50 M 160 50 L 160 200 L 50 200 L CP S GR GR % TEXT NP 0 SG GS 1 W 65 45 M GS 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (submit host) SH GR GR % OVAL 0 SG GS GS NP 102 95 37 15 TGEL S GR GR % ARC 0 SG GS GS NP 102 156 37 15 -180 -360 TGAR S GR GR % POLY/OPEN-SPLINE 0 SG GS NP 65 93 M 65 156 L TGSM 1 W S GR % POLY/OPEN-SPLINE 0 SG GS NP 140 93 M 140 156 L TGSM 1 W S GR % TEXT NP 0 SG GS 1 W 102 137 M GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (local) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (local) SH GR 0 15 RM GS GS 0 /Helvetica FF [12 0 0 -12 0 0] MS (capture) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica FF [12 0 0 -12 0 0] MS (capture) SH GR GR % POLYGON/CLOSED-SPLINE 0 SG NP 220 112 M 220 137 L 257 137 L 257 150 L 300 125 L 257 100 L 257 112 L CP GS 1 SG EF GR GS S GR % TEXT NP 0 SG GS 1 W 242 129 M GS GS 0 0 AD GR 2 DI NE 0 RM GR GR % POLYGON/CLOSED-SPLINE 0 SG NP 220 112 M 220 137 L 183 137 L 183 150 L 140 125 L 183 100 L 183 112 L CP GS 1 SG EF GR GS S GR % TEXT NP 0 SG NP 172 115 M 268 115 L 268 133 L 172 133 L CP 1 SG F 0 SG GS 1 W 220 130 M GS GS 0 /Helvetica-Bold FF [14 0 0 -14 0 0] MS (Globus GASS) TGSW AD GR 2 DI NE 0 RM 0 SG /Helvetica-Bold FF [14 0 0 -14 0 0] MS (Globus GASS) SH GR GR GR tgifsavedpage restore end showpage %%Trailer %MatchingCreationDate: Wed May 21 15:53:39 2003 %%DocumentFonts: Helvetica %%+ Helvetica-Bold %%EOF pegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/without.png0000644000175000017500000000573411757531137021344 0ustar ryngeryngePNG  IHDR)T pHYsHHFk> FIDATx=b8US6eZuTs)t` 0Ʊjދ@~F 'xɤB6¨iBۺ%o> յ~ N{;MVOD norTv2RZf!nAKY_>oJ Wj5vT0`GmCRznvَHLtAm?&~ۢxD;))|7}ZsiE6t)/7 ;@tz^P%&nhOR `sR ON۾U* )(7"%l* Qڂ!ڸ?|s5ډP>sZ} FRPKʌkMPIeR A^86wFnRwRE&2¦̤)3)lL 2¦̤)3)lL o$u:OG/$u" eGvd.v)J[FL ބƩ_Ka"l2-%) TyRU8 UT,xkUi'`.)-9j؊J0ެ•xw>t?nm %k[m jXUw]S)|J3]aк R`3;'mᅿT߁TǖKy͖z,]6QM[{rYN_۴E*xRVNѕ%aAm,JJ)Wx8D3::PϰRR;Psޥ".}hbIAAiKސFjˑ0&Rs^)Q\x^ cڒ=R my։IMhMzT8()ѦPc|`m %@)&â"r]p4C_ɗ /UE;IsŸ6ÇUMC?1aT9YFF\gu%spuDꗠ5_,p)%~]ʯ\ Ӧ)H)HRИG[`TEuD /2) U )DTjfl O!P1 T>[cnK*mIp̑QbԶ-TD`#/aLVݝ )۰ z@ YYqi'0 ,ϵ< 4J M!c0}$t3@ cC]ؽ/k,x-=L[f]9bm[ha#Tdoܿv>~?Rx;)gg-ڿ%w[Iъ&i~WH J9q#)oH%zpf{MF%an$,IjeNn!Ot. j/i#`5:C»gKĺ>k~K2)dRXy?R8T-VHʿqSA$-;X=&5M(YEaÿv(4o2MOIE7aC%]d4U&5z!)B2ו#EXlOh)5lOjLLg+q^\2))q\8fs]c -UC,l html:a }z}DQ#html: html:9f q phvb8tKickstarj)tV2'jJ?|ff ptmr8tJens-S.Vckler"cэ06/06/2004B8;vifflHޤ ͤ} ffΟ!, 3 ptmb8tAsuthor%" ffJDate} ffModicationl} ffzfflHޟfflHޡͤ} ffΟ?| 3 ptmr8tJensVckler͡ ffJ20040204͟} ffinitialdocumentl} fffflHޡͤ} ffΟJensVckler͡ ffJ20040211͟} ffextensionsl} fffflHޡͤ} ffΟJensVckler͡ ffJ20040607͟} ffadded" 3 pcrr8theretostdin,andsetupjobsl} fffflHގ3fg }*%QԤQhtml: html:a ځ 3 phvr8tContentsݫ2}ffffffffz}D#Q#ff phvb8tContentsQ ]html: html: khtml: 3 phvb8t1lOverview html:A3͍html:2lProcessing CinaGridEnvironment html:W4html:3lConguration CFile html:o4 lhtml:3.1/]Identiers html:S.6^............................................r5lhtml:3.2/]Strings html:E.6^..............................................r5lhtml:3.3/]Format html:֍.6^..............................................r7͍html:4lCommands html:7lhtml:4.1/]Othercommands html:u.6^........................................r8)*$html:4.1.1# :Include html:| .6^.........................................r8)*$html:4.1.2# :Debug html:#$.6^..........................................r8)*$html:4.1.3# :Xmlns html:Z.6^..........................................r8lhtml:4.2/]Descriptions html:I.6^...........................................r8)*$html:4.2.1# :Site html:U.6^...........................................r8)*$html:4.2.2# :Transformation html:oԍ.6^.....................................r9)*$html:4.2.3# :Derivation html:@&.6^........................................r9)*$html:4.2.4# :Input html:.6^..........................................r9)*$html:4.2.5# :Output html:牍.6^......................................... 10lhtml:4.3/]ProcessingEnvironment html:ٍ.6^.................................... 10)*$html:4.3.1# :Set html:瘍.6^........................................... 10)*$html:4.3.2# :Chdir html:*Í.6^.......................................... 11)*$html:4.3.3# :Feedback html:\.6^........................................ 11)*$html:4.3.4# :Stdin html:.6^.......................................... 12)*$html:4.3.5# :StdoutandStderr html:e.6^.................................... 12lhtml:4.4/]Jobcommands html:r.6^......................................... 13)*$html:4.4.1# :SetupJobs html: .6^....................................... 14)*$html:4.4.2# :PreJobs html:Q.6^......................................... 14)*$html:4.4.3# :MainJob html:y<.6^........................................ 14)*$html:4.4.4# :PostJobs html:vw.6^........................................ 15)*$html:4.4.5# :CleanupJobs html:B.6^...................................... 15lhtml:4.5/]Optional2nd-levelStaging html:t.6^................................... 15)*$html:4.5.1# :Stagein html:yE.6^......................................... 16)*$html:4.5.2# :Stageout html:6 .6^........................................ 16html:5lResults html:I17lhtml:5.1/]TheProvenanceTrackingRecord html:E.6^................................ 17lhtml:5.2/]TheFeedbackChannel html:A..6^..................................... 17)*$html:5.2.1# :ExponentiallyBacked-oHeartbeat html:).6^.......................... 17)*$html:5.2.2# :Gridshell-awareApplications html:瀍.6^............................. 18,Vb*= manfnt9$?| ptmr8tDicultUsectionswhicharenotimportantforthecasualusersaree٠xpressedwithadangerousbendsigninthemargin.3fg }z%QԤQhtml: html:a 1yOverTviewݫ3}ffffffffz}DQ#html: html: 14Over$vieȽw/Kickstarteisagridshell.GAdgridshellisrunattheremoteexecutionehost,vandlikeashell,startsandwatchesover an*application.Figureshtml:1 html:andhtml:2 html:comparetheabsenceandpresenceofagridshellintheremotejobexecutionchain.0\ҏ=PSfile="without.eps" llx=26 lly=636 urx=323 ury=776 rwi=2970 HjFigure1:html: html:Withoutthepresenceofagridshell.8 :PSfile="with.eps" llx=26 lly=636 urx=447 ury=776 rwi=4210 =Figure2:html: html:Withthepresenceofkickstart.Intheabsenceofagridshell,gurehtml:1 html:,theremoteschedulerstartstheapplicationonanyworkere.Thestdiostreamsoftheapplicationusuallydefaulttoeither& pcrb8t/dev/nullorsomeotherscheduler-specicle.\ThroughtheYpresenceofGlobus,stdiocanbeboth,streamedorstaged,viaGASS.Furthermore,theapplicationitself,ifindependentlybundled(i.e.{ustaticallylinked),0andappropriateforthearchitecture,canbestagedusingGASSfromthesubmitsite.In>fthepresenceofagridshell,Wgurehtml:2 html:,specicallykickstart,theGASS-handledstdiostreamsareusedforkick-start'es[ownpurposes.ASincekickstartisusedinconjunctionwiththeGriPhyN[XVWirtualDataSystem(GVDS),anyapplicationstdiostreamsareknownandregisteredwiththeGVDS.Ifastreamisimportanttoanapplica-tion,e.g.W@theapplicationproducesresultson'ϯ8 3 ptmri8tstdout,thestreamcanbeconnectedtoaleontheremotesystem.TheGVDSwillgeneratetherightcongurationtohaveagridshelllocallyconnectthestreamtoaleontheremotemachine.Althoughykickstartprovidesthecaptureofstdioofremoteapplication,theGVDS-trackedstdiowillbecome3fg }F~%QԤQhtml: html:a 2yProcessing CinaGr*idEnvironmentݫ4}ffffffffz}D#QpartoftheGVDSstagingandreplicatrackingmechanism.*Theactualstagingandtrackingofthecaptureles Qisnotpartofthisdocument.]о26ߤviff ͤ} ffΟstr͏eam͡ ff2Jtransport͟} ffjn!use4} ffzff ff ͤ} ffΟ(?| 3 ptmro8tstdin ff2Jstagable_} ffjn!Initialconguration.b} ffff ͤ} ffΟstdout Z ff2Jstagable_} ffjn!Provenancetrackingrecord(PTR).{}} ffff ͤ} ffΟstderr M ff2Jstreaming} ffjn!Heartbeat,applicationfeedbackchannel,anderrormessages.͟} ffff ,{Table1:html: html:ThewaykickstartusesGASS'edstdiostreams.:QIn"thepresenceofkickstart,Amitexpectsthatitsstdinreceivesthecongurationle.1ThecongurationisexplainedQinSdetailinsectionshtml:3 html:(pagehtml:4 html:)andhtml:4 html:(pagehtml:7 html:).BThestdoutSstreamreceivestheprovenancetrackingrecord,hwhichQrecords^informationabouttheexecution^andhostmachine'esstate.ThestderrռstreamisusedforapplicationQspecicfeedbackofgridshell-awaresoftware. 8QTablehtml:1 html:summarizestheusageoftheGASSstreams.=Notethatstdinandstdout7smaybestaged,whichismoreQfrugalQwithsystemresourcesonthegatekeepere.[(However,6dueQtotheinteractivenatureofapplicationfeedback,Qthe,@stderrlshouldbeusedinstreamingmode.YAlsonotethatGlobususesabest-eortstreamingwithGASS:QDuetorestrictionsoftheremoteschedulingsystems,=astreamedleisnotguaranteedtostreamduringtheQlifetimeofthejob.QKickstart'esadvantageoverrunningaplainapplicationwithoutthehelpofagridshellisanumberofvalue-Qaddedservicesitprovides.dDetailsareprovidedinthecongurationdescriptioninsectionhtml:4 html:: 8!!", 3 cmsy10UExponentiallyback-oheartbeats.!UMultiplepre-andpost-jobschainedtothemainapplication.!UIndependentcleanupjobs.!USite-specicknowledgeandcongurabilitythroughincludes.!UOptional2nd-levelstagingcapabilitythroughusercall-outs.!UOptionalinformationandMD5aboutarbitraryremoteles.Qhtml: html:24PrJocessinginaGridEnlvironment_TBD?html: html:p34CongurationFileThefcongurationleacceptsavarietyofcommands,x whichareusuallyparametrizedfurthere.HWSomecommands mayCkappearmultipletimes,eandsomemayonlyappearonce,witheachrepetitiondiscardingandoverwritingprevioussettings.dThear͏gumentstovariouscommandsareeitheridentiersorstrings.3fg }P5%QԤQhtml: html:a 3yConguration CFileݫ5}ffffffffz}DQ#html: html: ) phvb8t3.1Identiersn`Identiersxarewordsthatstartwithaletterorandunderscore.\Furthercharactersmayincludenumericaldigits. Examplesforvalididentiersare:psqwertask1false__system 8andexamplesforinvalididentiersare:s12$1asdfto-mesome:wordIn0general,OidentierspermissabletotheC'languagearealsopermissabletothecongurationle.YFurthermore allreservedwordsmayalsodoubleasidentiers.dReservedwordsarethecommandintroducingidentiers:spremainpostcleanupsite str$transformationdv0derivationschdirstdinstdoutstderrfeedbacksinputoutput stageinstageoutxmlns^html: html:3.2Stringsn`StringhandlingistoalimitedextendsimilartotheBourneshellandtheCprogramminglanguage.6Therearetwokindsofstrings:Verbatimstringsinsinglequotes(apostrophe)andinterpretedstringsindoublequotes(regularquotes). 8Single!Mquotestringspassallcharactersverbatim.Theescapecharacteristhebackslash,:mwhichescapes(read:makesitpartofthestring)anycharacterafterit,includingthesinglequoteanditself.O|Vb9TheUnixshelldoesnotallowtheescapecharacterinsidesingle-quotedstrings,northeapostrophe.Doublequotestringsaresubjecttovariableinterpolationinadditiontoanextendedbackslashingescapemech-anism.-IfU variablesintheform$variableor${variable}areencounteredwithinthedouble-quotedstring,{5thevariable_ isreplacedwiththecurrentUnixenvironmentvalueofthesamekeyasthevariablename.EIfthereisnoUnixkenvironmentvalue,the$variableor${variable}isnot1ۼreplaced,butratherkeepstheunresolvedvariableinplace.O|Vb9TheUnixshellreplacesvariables,Cwhichdonote٠xist,withanemptystring.Furthermore,an٠yadvancedshellsubsti- 9tutions,e.g.${var:Xwgord}arenotavailablewithkickstart.InsidePdouble-quotedstrings,*1theescapingmachanismfeaturesadditionalspecialcharacterinclusionforhori-zontaltab(nt),verticaltab(nv),newline(nn),carriagereturn(nr),bell(na),escape(ne),andbackspace(nb).Thevariableinterpolationforjobar͏gumentstringsdiersintwoimportantwaysfromother,regular,strings:O|html: html: 1.`Job^stringsmustsplitar͏gumentsfortheinvocationonunprotectedwhitespaces.EUnprotectedwhitespaces`areneitherescapednorquoted.html: html: 2.`Job5stringsmustremove5onelevelofquotecharactersinar͏gumentsthatquotetheirwhitespacecharacters.3fg }^%QԤQhtml: html:a 3yConguration CFileݫ6}ffffffffz}DE1u4hff`fdͤ ffΟfd*, ptmb8tjobstringinputXx ffuarggvresult, ffff`ff`ͤ ffΟfd'/bin/echohi$LOGNgAME'+á ffuhivoeckler5ǟ ffff`ͤ ffΟfd'/bin/echo !", cmsy10n"hi$LOGNgAMEn"'ѡ ffu"hivoeckler" ן ffff`ͤ ffΟfd'/bin/echonn"hi$LOGNgAMEnn"'ϡ ffunhivoecklern5ş ffff`ͤ ffΟfd'/bin/echonnn"hi$LOGNgAMEnnn"'͡ ffun"hivoecklern" ՟ ffff`ff`ͤ ffΟfd"/bin/echo'hi$LOGNgAME'"#ӡ ffuhi$LOGNgAMEϟ ffff`ͤ ffΟfd"/bin/echon'hi$LOGNgAMEn'"ѡ ffu'hivoeckler'ן ffff`ͤ ffΟfd"/bin/echonn'hi$LOGNgAMEnn'"ϡ ffunhi$LOGNgAMEn͟ ffff`ͤ ffΟfd"/bin/echonnn'hi$LOGNgAMEnnn'"͡ ffun'hivoecklern' ՟ ffff`I쉍eTable2:html: html:Conversionofjobsstringsintoar͏gumentvectorelements.f'h?PSfile="statetable.eps" llx=0 lly=180 urx=792 ury=612 rwi=5544 +Figure3:html: html:Statetransitiontableforthestringparsere.3fg }lV%QԤQhtml: html:a 4yCommandsݫ7}ffffffffz}D#QTablehtml:2 html:illustratesthevariousinterpretationafterthedierentstagesfordierentinputstrings.YForillustration Qpurposes,aresultingnakedstringorstringsareenclosedinpairstoshowthedelimination:O|ڳDFigurehtml:3 html:showstheMealystatetransitionsautomatonforparsingjobstringsandre٠gularstrings.Thetableshowsfor Dagiv٠enstateandinputcharacterclass,theresultingstateandaction.pDThezstartstateforjobstringsisNQ_LBWS,andtheleftsideofyellowstatesapplies.rThestartstateforre٠gularsingle-DquotedstringsisSQ_MAINandforre٠gulardouble-quotedstringsDQ_MAIN. 8QMulti-lineMstringsarepermitted.Therearetwomulti-linemodes.Inline-continuationmode,2abackslashQimmediatelybeforealinefeedcharacterimpliesthatbothareignored,andthenextlineisviewedaspartoftheQcurrentline.Anunprotectednewlinecharacterinsideastringwillbecopiedintoaregularstring,8orisasplitQpointforjobstrings.dTablehtml:3 html:showsexamplesforthehandlingofmultilinestrings.jލ$ZΉff ͟} ff냍InputstringR] ffY!Resultingstring9} ffzffff͟33ffQ鍑ͼ1:d"some\ͼ2:dstring"R]33ffY!ļsomestring ͟33ff}ff͟33ffQ鍑1:d"someͼ2:dstring"R]33ffY!ļsome\nstring͟33ff}ff3|lTable3:html: html:Multi-linestringhandling.Qhtml: html:3.3Formatn`The congurationleisaformatfreelanguage.Itdoesnotcareaboutanynumberof(linear)whitespacesbetweenitstokens.However,tomaintainaUnixshelllikesemantic,commandsareauto-delimitedbytheendof(aline.YContinuationlinesarenotpermitted(ifyoudon'͏tknowwhatthatmeans,youdon'tneedit).YMultiplecommandsinthesamelinemustbeseparatedwithasemicolon.Thelemaycontaincommentsinthetraditionofscriptinglanguages.dAqcommentstartswiththehash(#)character,andextendsthroughtheendoftheline.There(isnolenameglobbing.Ifyouneedglobbing(e.g.workingwithwildcardedlenamesasar͏guments),youqmustrunyourjobsthrough/bin/sh-c qtoletshelldotheglobbingforyou.tHowever,.beqwarnedthatsuchencapsulationwilladdanotherlevelofjobstringde-quotingandinterpretation.html: html:44Commands_Whilemostcommandshavearatherstaticsemantic,bsomepermitoptionalar͏guments.2Thefollowingsectionsusethefollowinggenericdescriptionstodenoteanon-terminaltoken:stringKistheplaceholderforastring,singleordoublequoted.idistheplaceholderforanidentiere.options4istheplaceholderforalistofoptions.Eachoptionisanidentieritself.Multipleoptionsareseparatedbylinearwhitespace.dReservedwordsarenotpermittedasvalididentierswithinanoptionstring.3fg }sؠ%QԤQhtml: html:a 4yCommandsݫ8}ffffffffz}DQ#html: html: 4.1OtherUcommandsn`Thiscategorydescribescommandswhichdealwiththecongurationofthekickstartapplicationitself.b#html: html:Z4.1.1Includeƍsincludestring sinclude"/my/site/local.cfg" 8TheZcongurationlepermitstherecursiveinclusionofothercongurationles,whicharespeciedbygiving theirnamewithinthestring.+Sincethesedenotelesontheremotesystem,Xthismechanismallowsforsite-specictie-insofcongurationvalues.YFurthermore,sinceadouble-quotedstringissubjecttointerpretation,itpermitsforarichavorofdynamicallydeterminablelesnamestoinclude. 8Ifthelecannotbeopenedforreadingbytheeectiveuserrunninggridshell,itscontentswillbeignored.Includedwlesmayincludeotherles.uHowever,awloop-detectionmechanismisnot(yet)inplace,thuscircular dependenciesaremostlikelycrashgridshellwitharemoteresourceexhaustion.#html: html:Z4.1.2Debugn`Notyetimplemented-debuggingiscurrentlyhard-codedasseent.#html: html:4.1.3Xmlnsƍsxmlnsidentifier sxmlnsptc 8Theprovenancetrackingrecord(PTR)mayoccasionallyrequireanamespaceidentier,*ifitistobeincluded aspartofotherXMLdocuments.UsuallyI,youwon'͏tneedtosetthisattribute,though.Pleasenotethatthear͏gument6Hisanidentier,Q0notastring.8%Therulesforvalididentiersaremorerestrictive,Q0seesectionhtml:3.1 html:(pagehtml:5 html:). 8Eachrepetitionofthiscommandwilloverwriteanypreviouslyconguredvalue.html: html:4.2Descriptionsn`TheMdescriptionssetupcertainvaluestobereectedintotheprovenanceMtrackingrecord(PTR),theultimateresultofthegridshell.#html: html:Z4.2.1Siteƍssitestring ssite'UBuffalo'3fg } %QԤQhtml: html:a 4yCommandsݫ9}ffffffffz}D#QTheXstringdenesthenameofasite.EThisisusefulforfuturecases,wherea2ndlevelstagingmechanism Qaccesses,someformofexternalreplicamechanism,andneedstostoreasitehandle.YNotethatthesitehandleisQanarbitrarilychosennamewithnofurthermeaningbeyondtheGriPhyNVWirtualDataSystem. 8QEachrepetitionofthiscommandwilloverwritethepreviousvalue.yFornowI,VthesitehandleisjustreectedinQthePTR.Q#html: html:64.2.2Transformationƍstrstring[string[..]] stransformationstring[string[..]]stransformation'voeckler::findrange:1.0' 8ThisJcommandcomesintwoavors,withJashortandalongreservedword.uTheirmeaningisequivalent.The string(Elistar͏gumentsdescribeafully-qualiedVDC-denitionnameeach.gSuchanameisusuallysomethingakinto"namespace::name:version".Each(repetitionofthiscommandwillappendtoalistoftransformationrecords.Whiletherecanbeonlyonetransformationpthatisbeingcalled,$acompoundtransformationmaycallothertransformations.\Thus,thecallstackispartoftherecord.dThevaluesaresolelyusedtobereectedinthePTR.b#html: html:Z4.2.3Derivationƍsdvstring sderivationstringsderivation'voeckler::right:1.0' 8This[commandalsocomesintworeservedwordavors,&which[areequivalent.bThe[stringar͏gumentdescribes thefully-qualiedVDC-denitionname,whichisusuallysomethingakinto"namespace::name:version".Eachrepetitionofthiscommandwilloverwritethepreviousvalue.u3ThevalueissolelyusedtobereectedinthePTR.#html: html:64.2.4Inputƍsinputlfnsfn sinputmd5lfnsfnsinputlfnsfntfn...sinputmd5lfnsfntfn...sinput'voeckler.f.a''/home/voeckler/vdldemo/voeckler.f.a' 8EachFrepetitionofthiscommandwillregisteralogicallename(LFN)Ewithastoragelename(SFN).Addi- tionallyI,Qif2ndlevelstagingisrequired,anynumberofinputtransferlenames(iTFN)maybeassociated.cForeachm2LFN,onlyoneSFNmcanbeassociated.v/ForeachSFN,multipleiTFNsmaybeassociated.AllofLFN,SFNandTFNarestrings,andthusmustbeenclosedinquotes.3fg } I%QԤQhtml: html:a 4yCommandsו10}ffffffffz}D#QEectivelyI,nforZmeachregisteredinputlename,nastatrecordwillbeobtainedfromtheSFNZTaftergridshellparsed QallSitsar͏gumentsandrananoptional2ndlevelstage-injob(see$html:4.5.1 html:(page$html:16 html:)).)?ThestatrecordisobtainedQbeforHe̼anyoftheregularsubjobsprethroughcleanuparebeingrun.pQThe$informationfromthestatcallisbeingreectedintothePTR,onerecordforeachle,>featuringtheLFNQasdistinction.dFilesmayappearinboth,theinputandtheoutputlist.QThe&qoptionalar͏gumentmd5speciesthatanMD5sumshouldbeobtainedofthele,@andbecomepartoftheQresultingstatinforecord.dNotethatmd5isanoption,andthusnotquoted.Q5#html: html:G4.2.5OutputpsoutputLFNSFN soutputmd5LFNSFNsoutputLFNSFNTFN...soutputmd5LFNSFNTFN...soutputmd5'voeckler.f.d''/home/voeckler/vdldemo/voeckler.f.d'OEach1repetitionofthiscommandwillregisteralogicallenameLFN1withastoragelenameSFN.AdditionallyI, ifC2ndlevelstagingisrequired,anynumberofoutputtransferlenamesoTFNmaybeassociated.`ForeachLFN,PonlyoneSFNPcanbeassociated. ForeachSFN,multipleoTFNsmaybeassociated.AllofLFN,SFNandTFNarestrings,andthusmustbeenclosedinquotes.EectivelyI,foreachregisteredoutputlename,astatrecordwillbeobtainedfromtheSFNaftergridshellranalljobs,butbeforetheoptional2ndlevelstage-outjobisrun(seesection2.4.y).The$informationfromthestatcallisbeingreectedintothePTR,onerecordforeachle,>featuringtheLFNasdistinction.dFilesmayappearinboth,theinputandtheoutputlist.The&qoptionalar͏gumentmd5speciesthatanMD5sumshouldbeobtainedofthele,@andbecomepartoftheresultingstatinforecord.dNotethatmd5isanoption,andthusnotquoted.5html: html:4.3Pr“ocessingUEn&vironment Theprocessingenvironmentdealswithsettinguptheenvironmentinwhichjobs(seehtml:4.4 html:(pagehtml:13 html:))arebeingrun.dThechangespertainonlytogridshell,andstaywithingridshell.#html: html:G4.3.1Setpssetidstring ssetPATH"$PATH:$HOME/bin"OTheKsetcommandissimilartotheC-shellsetenvcommand.TheidentieriddenotesakeyintheUnix environment,which֏istobesettothespeciedvalueofstring.EPleasenotethatthestring,ifdouble-quoted,maybesubjecttovariableinterpolation.pEnviroment settingarenot<3reectedinthePTR.Theenvironmentvariablesarechangedduringcompile-time.Environmentvaluesareoverwritten,iftheyalreadyexist.3fg } %QԤQhtml: html:a 4yCommandsו11}ffffffffz}DQ##html: html: 4.3.2Chdir2΍schdirstring schdircreatestringschdircreate"/home/$LOGNAME/vdldemo/tmp"mThe;pchdircommanddeterminesthecurrentworkingdirectoryI.Thecommandcomesintwoavors,[with;pand withoutthecrHeateoption.h@Without^thecrHeateoption,[thespeciedworkingdirectoryisattemptedtobecreatedbeforechangingintoit.Failure.tocreatethedirectorythroughreasonofexistencewillbeignored.5iOtherfailuresonthemkdircommandwillcauseasemanticerrorduringcompile-time.Regardlessdofoptions,inthenextstep,gridshellwillattempttochangeintothespeciedworkingdirectoryI.Ifthebchangefails,Nthepreviouscurrentworkdingdirectoryremainscurrent.WTheworkingdirectoryischangedduringcompile-time.dEectsareimmediateonrelativelenames.Ifnochdircommandisspeciedwhatsoever,thegridshellusestheworkingdirectoryassignedbytheschedul-ingsystem,i.e.dwhereitwasstartedin.ThecurrentworkingdirectorywillbereectedinthePTR.Multiplespeciciationswillchangethedirectoryagainandagain.lThenallychosenworkingdirectoryisrecordedinthePTR.#html: html:L>4.3.3Feedback2΍sfeedbackstring sfeedbackidstringsfeedbackATLAS_FEEDBACK"/dev/shm/atlas-XXXXXX"sfeedback'gs-fb-XXXXXX'mThe6Xfeedbackiscurrentlyasimplemechanismtoallowgridshell-awareapplicationstosendapplication-specic dataUbacktothesubmithost.BAnyprocessing,j>collectionorvisualizationofthisdataisuptotheuserapplicationframework.ThedWidspeciesthenameofanenvironmentvariablewhichistobesettothelenameofthenamedpipe(FIFO).Ifnoidentierisspecied,thedefaultofGRIDSTART_CHANNELisused.Thelenameiscreatedusingthemkstemp(2)systemcallfromthepatternprovidedbythestring.PjThelenamestringmust&haveasuxofsixcapitolXjletters,/ortheseletterswillbemandatorilyappended,includingaseparatinghyphen.Ifthelenameisabsolute,startingisaslash,thespeciedpathwillbeused.Ifthelenameisrelative,notstartingwithaslash,anappropriatetemporarydirectorywillbedeterminedandprexed.Thes_gridshellattemptstocreateaFIFOs0(Unixnamedpipe)fromthepatternwiththetemporarylename.Itchangestheenvironmenttorecordthelename,sothatsubjobs(seehtml:4.4 html:(pagehtml:13 html:))maywritetothischannel.[futurelab:dPermitanynumberofsuchchannels,andmultiplexthem]If`feedbackisspeciedmultipletimes,onlythelastspecicationwillpersist.cPreviouschannelswillbeclosedandremoved.ڇThefeedbackmechanismisonlyactivated,-ifthefeedbackwasspeciedatleastonce.ڇThechosenFIFOanditsstatinformationispartofthestandardstatinfoPTRrecord.3fg } Р%QԤQhtml: html:a 4yCommandsו12}ffffffffz}DQ##html: html: 4.3.4Stdinȍsstdinstring sstdinherestringsstdin'my/file'sstdinhere'copytostdin'CThesedirectivespermittheconnectionofthestdinlehandleofsubjobswithanexistingle.Theprimaryuse ofm#thisoptionistoperuseinput,}ifanystdinpertainstoaGVDS-trackedleduringacomputation.JnIdeallyI,onlythemaincomputationaljobsshouldaccesstheseles.:Y˧ou>kcan(currently)onlyuseoneredirectedstdioforallsubjobs.Thus,^ifyouredirectstdintoale,thislewillbeusedforallsubjobs.Willa`thelebereopenedforeachjob?RDependingonwhatyouconnectyourstdinto,theanswerisyesandno.;dPleaseDrefertotablehtml:4 html:forthedierentopenmodes.Thereasonaregularleisbeingreopenedliesinthefactthatanopenledescriptorisnotmaintainedbetweenjobs.dFurthermore,*thelastpositionofthelecannotbeSpremembered,ybecauseitcannotbeobtained:DThelehandleofaregularle,onceopened,willbepassedcompletelyintothejob'esapplicationspace.yz¾3Svjff html:Openmodesforstdin.$Thespeciallenameofjustahyphenmeanstoconnectthelehandlewiththeonepassedtothegridshell.However,VindnordertouseaherHedocument,whichcopiescontentfromthecongurationontothestdinoftheapplication,youmustusethehereoption.When2usingthehereoption,thestringisnotalename,butthecontenttobeputintoatemporaryle,whichisU'subsequentlyconnectedwiththestdinofthejobs.BpMulti-linestringsarevalid,iandregularstringinterpolationapplies,seesectionhtml:3.2 html:(pagehtml:5 html:).[futurelab:ԗAddoptionstoallowdisassociationofstdiohandlesduringstagejobs. dAddoptionstopermitdisassocationduringnon-mainjob.]Astdinredirectionshouldonlybespeciedonce.dAre-congurationwillcloseapreviousconguration.Defaultforstdinistoconnectto/dev/nullintheabsenceofanyspecication.W#html: html:\4.3.5Stdout CandStderrȍsstdoutstring sstdoutappendstringsstdouttruncatestring3fg } %QԤQhtml: html:a 4yCommandsו13}ffffffffz}D# 8stderrstring 8stderrappendstring 8stderrtruncatestring 8stdout'-' 8stderrappend'local.err' 8QThesedirectivespermittheconnectionofstdiolehandlesofsubjobswithexistingles.{)Theprimaryusefor Qtheseoptionsistocaptureoutput,'ifanystdoutorstderrpertainstoaGVDS-trackedleduringacomputation.QIdeallyI,aonlythemaincomputationaljobsshouldaccesstheseles..However,youshouldkeepinmindthatQvariouserrorconditionsmayresultinoutputtostderrforfailuresofothersubjobs. 8QY˧oucan(currently)onlyuseoneredirectedstdioforallsubjobs.YBThus,ifyouredirectstdoutintoale,thisleQwillbeusedforallsubjobs.QDefaultmodeforthesedirectivesisthetruncatedmode.Intruncatemode,+apreviously(beforethestartoftheQgridshell)qexistingleistruncatedatcompile-time.Inappendmode, apreviouslyexistingleisopenedforQappending.QThespeciallenameofjustahyphenmeanstoconnectthelehandlewiththeonepassedtothegridshell.Q[futurelab:ԗAddoptionstoallowdisassociationofstdiohandlesduringstagejobs. dAddoptionstopermitQdisassocationduringnon-mainjob.]QEachvstdioredirectionshouldonlybespeciedonce.3AvYre-congurationwillcloseandoverwritevapreviousQsetup.QDefaulttforstdoutandstderristoconnecttoseparatetemporarylesintheabsenceofanyspecication.kTheQrstpageofdatafromthetemporarylesarereectedinthestatinforecordsofthePTRforstdoutandstderre.Qbhtml: html:4.4JobUcommandsn`The?jobcongurationsdescribethecommandandcommandlinetobeusedforsubjobs.VAllspecicationsareoptionalexceptforthemainjob,whichmustbespecied.Thechainingofjobresultsislooselyasfollows:O|html: html: 1.`runstage-injob,ifapplicablehtml: html: 2.`obtainstatrecordsonlesdeclaredinputhtml: html: 3.`runsetupjobchainhtml: html: 4.`runprejobchain&&runmainjob&&runpostjobchainhtml: html: 5.`runcleanupjobchainhtml: html: 6.`obtainstatrecordsonlesdeclaredoutputhtml: html: 7.`runstage-outjob,ifapplicable3fg }Á%QԤQhtml: html:a 4yCommandsו14}ffffffffz}D#QIfanyprejobexistsandfails,nofurtherjobsinthepre,main,postchainwillberun.+Ifthemainjobfails, QnoNfurtherjobsinthepostchainwillberun.Ifanyofthepostjobsfails,4.thechainwillnotbecontinued.TheQstagein,sAstageout,setupNandcleanupjobsareindependentofanyfailuresintheprechain,sAmainjob,andpostQchain.(QAny,Jnumberofsetup,H)pre,post,Jandcleanupjobsmaybespeced.uTheyarequeuedintoseparatechains,H)andQeachchainexecutesitsjobsintheorderoftheirspecication.QThe>mainjobismandatoryI.:Itmustbespeciedonce,Wandshouldbespeciedonceonly.:MultiplespecicationsQwilloverwritepreviousones.QPlease}notethecommandlinespecicationstringforjobsissubjecttodoubleinterpretation,(onceduringQcompile-time,andonceduringrun-time,seehtml:3.2 html:(pagehtml:5 html:)fordetails.Q[futurelab:dDropthedoubleinterpolationtooconfusing.Itisanartefactofcoderecycling]Q[Isitalreadydropped?]QThestdioofanyjobwillbeconnectedaccordingtothespecicationofstdin,9}stdoutandstderr,seehtml:4.3 html:(pagehtml:10 html:).QStdindefaultsto/dev/null,stdouttoatemporaryle,andstderrtoadierenttemporaryle.QAbsoluteapplicationnamesaretakenatfacevalue,andmayfailintheirnon-existence.RelativenamesareQcanoniedwiththehelpofthecurrentworkingdirectoryI.d[xme:arethey?]Q-C#html: html:uJ4.4.1Setup CJobsssetupstring ssetup"/bin/mkdir-p$HOME/tmp"ȍThe"usetupjobcongurationmaybespeciedmultipletimes.Eachjobwillbequeuedintoachainsetup,;and atݱrun-time,thesewillbeexecutedݱintheorderoftheirchaining.ǫAnysetupjobmayfailwithoutaectinganyotherjobs.Note:UsuallyZyouarebetterofwrappingevensimplecommandsintoarealshellscript,settheexecutebitonit,andrunjustthatparticularshellscript.-C#html: html:uJ4.4.2Pre CJobssprestring spre'/bin/date'spre'/usr/bin/envperl-i.bak-pe\\"s{a}{b}g\\"*.jof'ȍTheprejobcongurationmaybespeciedmultipletimes.{EachjobwillbequeuedintoachainprHejobs,8and at5 run-time,Sthesewillbeexecuted5 intheorderoftheirchaining.͸Therstfailedprejobwillresultinafailedexecutionofthecomputation.dNofurtherprejobs,nomainjobs,andnopostjobswillberunincaseoffailure.#html: html:uJ4.4.3Main CJobsmainstring3fg }q%QԤQhtml: html:a 4yCommandsו15}ffffffffz}D# 8main'computation.sh'aQThe 3mainjobmustbespecied,andshouldonlybespeciedonce.P0A minimumcongurationlecontainsat Qleastacongurationforthemainjob.Ifthemainjobfails,Oallwillbeassessedasfailure.NopostjobswillbeQrunincaseoffailure.Q#html: html:鍍4.4.4Post CJobssyspoststringspost'/bin/date'aThepostjobcongurationmaybespeciedmultipletimes.bEachjobwillbequeuedintoachainpostjobs,andatrun-time,thesewillbeexecutedintheorderoftheirchaining.:RTherstfailedpostjobwillresultinafailedexecutionofthecomputation.dNofurtherpostjobswillberunincaseoffailure.#html: html:J 4.4.5Cleanup CJobssyscleanupstringscleanup'rm*.log'$#thiswillNOTwork:ENOENTscleanup'/bin/sh-c\'rm*.log\*#thismightworkaThecleanupjobcongurationmaybespeciedmultipletimes. Eachjobwillbequeuedintoachaincleanup,andFatrun-time,^=thesewillbeexecutedFintheorderoftheirchaining.=Anycleanupjobmayfailwithoutaectinganyotherjobs.Note:UsuallyZyouarebetterofwrappingevensimplecommandsintoarealshellscript,settheexecutebitonit,andrunjustthatparticularshellscript.html: html:s4.5OptionalU2nd-levelStaIging TheNstageoutandstageinjobsareoptional,s@shouldonlybespeciedonce.Thejobrestrictionandfeature,asshownbinsection2.4,applyalsotostagejobs.NMultiplespecicationsofstagejobswilloverwritebpreviousspecications.[futurelab:dDoweneedmultiplestagecall-outs?]ThePstagejobsdealwiththe2ndlevelstagingofles. The2ndlevelstagingcomesintoeect,uifacomputecluster,doesnotsharealesystembetweentheexternallyvisiblestorageelement,oandtheinternalworkernode.xTheK2ndlevelstagingmaybeemployedinthiscasetotransferinputlestotheworkernodebeforeanycomputationtakesplace,andtostoreoutputlestothestorageelementafteranycomputationtookplace.Stagejobshaveaninvisiblenalar͏gument.sThenalargumentisdynamicallyaxed.sItisthenameofatemporary7lewhichcontainsalistoflemappingfromSFNtoTFN.Theformatisdeterminedbytheformatstring,whichistherstar͏gumentstringtothestagecommands.Theformatstringmaycontainthreekindsofplaceholders,eachofwhichmayoccurmultipletimes:3fg }ܸ%QԤQhtml: html:a 4yCommandsו16}ffffffffz}Dǥ퍍KҩffU` ͤ} ffΟfmt 3 cmmi10rMnn.Somepopularexamplesinclude:p 8'%s%t'Zsingle-line:SFN->TFN1TFN2.. 8"%l%s%t{\r\n+}"multi-line:usecontinuationsymbol 8"%s%t{\r\n%s}"$multi-line:repeatSFNforeachTFN 8QACRLFwill&beimplicitelyappendedtotheendoftheformatstring.< Theauthorofgridshellisawarethat Qlenames{withspacesinsidethem,XasfrequentlyseenonMACsandnowonWindows,Xwillbreaktheinterface.Q[xme:dwemay/willneedquoting]. 8QInq/ordertocreatethelistofles,Gtheinputandoutputcongurationcommandsmustusethethreear͏gumentQversion,whichassociatesaTFNwithanLFN.Q#html: html:u,4.5.1Stageinƍsstageinstringstring sstagein"%s%t"'transfer.sh-I'0#makesuretohavexbitsetThelstageinjobspecication,ifpresent,speciesascriptorcallouttorunforthe2ndlevelstage-inofles. Thenalar͏gumentwillbedynamicallyattached,andisthenameofalecontainingalistoflesmappings. 8No stageinjobwillberun,/ifthelistofstagableinputlesisemptyI.NIftwoar͏gumentinputcongurationwasused,i.e.dtheTFNismissingforanSFN,thislewillnotbecomepartofthelist.b#html: html:Z4.5.2Stageoutƍsstageoutstringstring sstageout"%s%t"'transfer.sh-O'0#ensurexbitThe4stageoutjobspecication,Nifpresent,speciesascriptorcallouttorunforthe2ndlevelstage-outofles. Thenalar͏gumentwillbedynamicallyattached,andisthenameofalecontainingalistoflesmappings. 8No \stage-outjobwillberun,ifthelistofstagableoutputlesisemptyI.JIftwoar͏gumentoutputcongurationwasused,i.e.dtheTFNismissingforanSFN,thislewillnotbecomepartofthelist.3fg }%QԤQhtml: html:a 5yResultsו17}ffffffffz}DQ#html: html: 54Results_Thegridshell[...]bhtml: html:5.1TheUPr“ovenanceT BrackingRecordn`Thelprovenancetrackingrecordwrittenbythegridshellistransportedonstdout#ܼmadeavailablebytheremote schedulere.4Since)theprovenance)trackingrecordwillbecompiledandwrittenafteralljobswererun,GQitisusuallyagoodidea,,whenusingGlobus,todisconnectthestdout,andtransferresultsattheend-of-(Globus-)job.Thiswillsavelehandleresourcesandotherkernelresourcesonthegatekeeperhost. 8TheDqprovenancetrackingrecordcontainsavarietyofinformation,Zusuallymorethanwillbestoredintheprovenancetrackingcatalog(PTC).Muchoftheinformationisusefulfordebugging,too.Someremoteschedulingsystemsprotocoltheirowninformationonthestdout.FFurthermore,'ifsubjobsareconguredtousegridshell'esstdouthandle,/theiroutputmayalsoappearonthishandle.߄WhilethePTRXisdistinct@inthismixofdata,f]disseminationandmultiplexingofstdoutisnotataskthatcanbesolvedbyagridshell.Thed}provenancetrackingrecord(alsoknownasinvocationrecords),icanbefoundonline.\Pleaserefertohttp://www.griphyn.org/workspace/VDS/.^html: html:5.2TheUFeedbac“kChanneln`Thefeedbackchannelistransportedviastderrlehandletothesubmithost.Sincefeedbackinformationisimmediate,DGlobus-IOѲstreamingmodeisapplicableforthischannel.However,Globusstreamingisabesteort6operation:RemoteschedulingsystemsmaydecidetonotDmakethestderravailableuntilafterthejobisdone.dThereisnoguaranteethatstderrwillactuallybestreamed,orstreamcontinuallyI.The4gridshelltriestousethestderrforapplicationfeedback.e5ThegridshellfeedbackrecordsareXMLencap-sulatedchunkstodistinguishthemfromothernoiseonthestderrchannel.b#html: html:Z5.2.1Exponentially CBacked-oHear8 tbeatn`The_gridshellwakes_upatregularintervalstocheckonvariouslehandlesandthesystemstate./-Atexponentiallygrowing!intervals,;gridshellwillcheckthatitcouldactuallykillitschild.Theresultwillbesendasfeedbackchunkwithachannelnumberofzero:ps s 8Therstheartbeatisregisteredafter30seconds,withtheintervaldoublingeachtime.[xme:dgrownotquitesofast,butfasterthanlinear]3fg }p%QԤQhtml: html:a 5yResultsו18}ffffffffz}DQ##html: html: 5.2.2Gridshell-aware CApplicationsn`IfDanapplicationisgridshell-aware,"itcanusetheFIFOcreatedbythefeedbackcongurationtosendback application*Idata.tThedatawillbecollectedinthesubmithost'esstderrleforthejob.ThenameoftheFIFOis7determinedbytheidentieroftheenvironmentvariable.>Thus,thismechanismcanevenbeusedinshellscripts,e.g.dcontainalinelike:psecho"somecomment"$GRIDSHELL_CHANNEL 8TheapplicationdatawillbechunkedintoXML,e.g:s sThechunkingbordersarearbitraryI,andoutsidethesphereofinuenceofthegridshell.W9Neverassumethatdata writteninonewritewillresultinonechunk,norassumethatdataseparatelywrittendatawillresultinmultiplechunks. 8Worse,$thereiscurrentlythelimitationthatonlyonewriterateachtimeshouldwritetothefeedbackchan-nel.m]SincejBitisamostlyuntestedfeature,theremaystillbebugsinvolved,whichmayleadtotheprematurecancellationofacomputejobs.dThus,pleasetestbeforerelyingonproduction.3fg }';%F *, ptmb8t) phvb8t(?| 3 ptmro8t'ϯ8 3 ptmri8t& pcrb8t$?| ptmr8t#ff phvb8t" 3 pcrr8t!, 3 ptmb8t!", 3 cmsy10 b> 3 cmmi10?|ff ptmr8tq phvb8t 3 phvb8tځ 3 phvr8t?| 3 ptmr8t*= manfnt !", cmsy10 pegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/with.obj0000755000175000017500000003231411757531137020577 0ustar ryngerynge%TGIF 4.1.41 state(0,37,100.000,0,0,0,16,1,9,1,1,0,0,1,0,1,0,'Helvetica',0,69120,0,5,1,5,0,0,1,0,0,16,0,0,1,1,1,1,1088,1408,1,0,2880,0). % % @(#)$Header: /nfs/asd2/gmehta/GRIPHYN/CVS/cvsroot/vds/src/tools/k.2/doc/with.obj,v 1.1 2004/02/11 22:00:07 griphyn Exp $ % %W% % unit("1 pixel/pixel"). color_info(11,65535,0,[ "magenta", 65535, 0, 65535, 65535, 0, 65535, 1, "red", 65535, 0, 0, 65535, 0, 0, 1, "green", 0, 65535, 0, 0, 65535, 0, 1, "blue", 0, 0, 65535, 0, 0, 65535, 1, "yellow", 65535, 65535, 0, 65535, 65535, 0, 1, "pink", 65535, 49931, 50971, 65535, 49344, 52171, 1, "cyan", 0, 65535, 65535, 0, 65535, 65535, 1, "CadetBlue", 22885, 40569, 40569, 24415, 40606, 41120, 1, "white", 65535, 65535, 65535, 65535, 65535, 65535, 1, "black", 0, 0, 0, 0, 0, 0, 1, "DarkSlateGray", 10402, 19764, 18724, 12079, 20303, 20303, 1 ]). script_frac("0.6"). fg_bg_colors('black','white'). dont_reencode("FFDingbests:ZapfDingbats"). page(1,"",1,''). box('black','',275,50,425,200,0,1,1,21,0,0,0,0,0,'1',0,[ ]). text('black',300,31,1,0,1,103,17,31,14,3,0,0,0,0,2,103,17,0,0,"",0,0,0,0,45,'',[ minilines(103,17,0,0,0,0,0,[ mini_line(103,14,3,0,0,0,[ str_block(0,103,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,103,14,3,0,0,0,0,0,0,0, "rem. scheduler")]) ]) ])]). oval('black','',490,120,500,130,1,1,1,79,0,0,0,0,0,'1',0,[ ]). box('black','',495,50,645,200,0,1,1,92,0,0,0,0,0,'1',0,[ ]). text('black',530,31,1,0,1,60,17,94,14,3,0,0,0,0,2,60,17,0,0,"",0,0,0,0,45,'',[ minilines(60,17,0,0,0,0,0,[ mini_line(60,14,3,0,0,0,[ str_block(0,60,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,60,14,3,0,0,0,0,0,0,0, "kickstart")]) ]) ])]). oval('black','',490,70,500,80,1,1,1,96,0,0,0,0,0,'1',0,[ ]). oval('black','',490,170,500,180,1,1,1,97,0,0,0,0,0,'1',0,[ ]). oval('black','',420,120,430,130,1,1,1,99,0,0,0,0,0,'1',0,[ ]). oval('black','',420,70,430,80,1,1,1,100,0,0,0,0,0,'1',0,[ ]). oval('black','',420,170,430,180,1,1,1,101,0,0,0,0,0,'1',0,[ ]). poly('black','',2,[ 425,75,460,75],1,1,1,102,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 495,125,460,125],1,1,1,103,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 495,175,460,175],1,1,1,104,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 455,75,495,75],0,1,1,105,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 425,125,465,125],0,1,1,106,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 425,175,465,175],0,1,1,107,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). text('black',505,68,1,0,1,58,15,115,12,3,0,0,0,0,2,58,15,0,0,"",0,0,0,0,80,'',[ minilines(58,15,0,0,0,0,0,[ mini_line(58,12,3,0,0,0,[ str_block(0,58,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,58,12,3,0,-1,0,0,0,0,0, "[0] unused")]) ]) ])]). text('black',505,118,1,0,1,54,15,117,12,3,0,0,0,0,2,54,15,0,0,"",0,0,0,0,130,'',[ minilines(54,15,0,0,0,0,0,[ mini_line(54,12,3,0,0,0,[ str_block(0,54,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,54,12,3,0,-1,0,0,0,0,0, "[1] IV rec.")]) ]) ])]). text('black',505,168,1,0,1,49,15,119,12,3,0,0,0,0,2,49,15,0,0,"",0,0,0,0,180,'',[ minilines(49,15,0,0,0,0,0,[ mini_line(49,12,3,0,0,0,[ str_block(0,49,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,49,12,3,0,0,0,0,0,0,0, "[2] gs err")]) ]) ])]). group([ box('black','',300,91,375,158,0,1,0,129,0,0,0,0,0,'1',0,[ ]), oval('black','',300,75,375,108,0,1,1,130,0,0,0,0,0,'1',0,[ ]), arc('black','',0,1,1,0,300,142,337,158,300,158,375,158,0,74,32,11520,11520,131,0,0,8,3,0,0,0,'1','8','3',0,[ ]), poly('black','',2,[ 300,91,300,158],0,1,1,132,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), poly('black','',2,[ 375,91,375,158],0,1,1,133,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), box('black','',302,110,373,173,0,1,0,134,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',337,109,1,1,1,90,15,135,12,3,0,0,0,0,2,90,15,0,0,"",0,0,0,0,121,'',[ minilines(90,15,0,0,1,0,0,[ mini_line(90,12,3,0,0,0,[ str_block(0,90,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,90,12,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "GASS", 1, 0, 0, text('black',337,126,2,1,1,35,30,136,12,3,0,0,0,0,2,35,30,0,0,"",0,0,0,0,138,'',[ minilines(35,30,0,0,1,0,0,[ mini_line(34,12,3,0,0,0,[ str_block(0,34,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,34,12,3,0,-1,0,0,0,0,0, "GASS")]) ]), mini_line(35,12,3,0,0,0,[ str_block(0,35,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,35,12,3,0,-1,0,0,0,0,0, "cache")]) ]) ])])) ]) ], 137,0,0,[ ]). poly('black','',2,[ 425,75,365,135],0,1,1,165,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 425,125,365,135,425,175],0,1,1,166,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). box('black','',177,102,267,148,0,1,0,179,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',222,101,1,1,1,90,15,180,12,3,0,0,0,0,2,90,15,0,0,"",0,0,0,0,113,'',[ minilines(90,15,0,0,1,0,0,[ mini_line(90,12,3,0,0,0,[ str_block(0,90,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,90,12,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "", 1, 0, 0, text('black',222,117,1,1,1,0,15,181,12,3,0,0,0,0,2,0,15,0,0,"",0,0,0,0,129,'',[ minilines(0,15,0,0,1,0,0,[ mini_line(0,12,3,0,0,0,[ str_block(0,0,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,0,12,3,0,0,0,0,0,0,0, "")]) ]) ])])) ]). oval('black','',710,120,720,130,1,1,1,251,0,0,0,0,0,'1',0,[ ]). box('black','',715,50,790,200,0,1,1,252,0,0,0,0,0,'1',0,[ ]). text('black',715,31,1,0,1,77,17,253,14,3,0,0,0,0,2,77,17,0,0,"",0,0,0,0,45,'',[ minilines(77,17,0,0,0,0,0,[ mini_line(77,14,3,0,0,0,[ str_block(0,77,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,77,14,3,0,-1,0,0,0,0,0, "application")]) ]) ])]). oval('black','',710,70,720,80,1,1,1,254,0,0,0,0,0,'1',0,[ ]). oval('black','',710,170,720,180,1,1,1,255,0,0,0,0,0,'1',0,[ ]). text('black',725,68,1,0,1,43,15,256,12,3,0,0,0,0,2,43,15,0,0,"",0,0,0,0,80,'',[ minilines(43,15,0,0,0,0,0,[ mini_line(43,12,3,0,0,0,[ str_block(0,43,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,43,12,3,0,-1,0,0,0,0,0, "[0] stdin")]) ]) ])]). text('black',725,118,1,0,1,50,15,257,12,3,0,0,0,0,2,50,15,0,0,"",0,0,0,0,130,'',[ minilines(50,15,0,0,0,0,0,[ mini_line(50,12,3,0,0,0,[ str_block(0,50,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,50,12,3,0,0,0,0,0,0,0, "[1] stdout")]) ]) ])]). text('black',725,168,1,0,1,48,15,258,12,3,0,0,0,0,2,48,15,0,0,"",0,0,0,0,180,'',[ minilines(48,15,0,0,0,0,0,[ mini_line(48,12,3,0,0,0,[ str_block(0,48,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,48,12,3,0,0,0,0,0,0,0, "[2] stderr")]) ]) ])]). oval('black','',640,120,650,130,1,1,1,260,0,0,0,0,0,'1',0,[ ]). oval('black','',640,70,650,80,1,1,1,261,0,0,0,0,0,'1',0,[ ]). oval('black','',640,170,650,180,1,1,1,262,0,0,0,0,0,'1',0,[ ]). poly('black','',2,[ 645,75,680,75],1,1,1,263,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 715,125,680,125],1,1,1,264,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 715,175,680,175],1,1,1,265,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 675,75,715,75],0,1,1,266,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 645,125,685,125],0,1,1,267,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 645,175,685,175],0,1,1,268,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). text('black',635,68,1,2,1,22,15,274,12,3,0,0,0,0,2,22,15,0,0,"",0,0,0,0,80,'',[ minilines(22,15,0,0,2,0,0,[ mini_line(22,12,3,0,0,0,[ str_block(0,22,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,22,12,3,0,-1,0,0,0,0,0, "-i fn")]) ]) ])]). text('black',635,118,1,2,1,26,15,276,12,3,0,0,0,0,2,26,15,0,0,"",0,0,0,0,130,'',[ minilines(26,15,0,0,2,0,0,[ mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,-1,0,0,0,0,0, "-o fn")]) ]) ])]). text('black',635,168,1,2,1,26,15,278,12,3,0,0,0,0,2,26,15,0,0,"",0,0,0,0,180,'',[ minilines(26,15,0,0,2,0,0,[ mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,-1,0,0,0,0,0, "-e fn")]) ]) ])]). poly('black','',2,[ 595,125,565,125],1,1,1,289,0,0,5,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',4,[ 600,175,580,175,580,125,565,125],1,1,1,290,0,0,5,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). group([ text('black',360,228,3,0,1,66,45,311,12,3,0,0,0,0,2,66,45,0,0,"",0,0,0,0,240,'',[ minilines(66,45,0,0,0,0,0,[ mini_line(66,12,3,0,0,0,[ str_block(0,66,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,66,12,3,0,-1,0,0,0,0,0, "[0] /dev/null")]) ]), mini_line(66,12,3,0,0,0,[ str_block(0,66,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,66,12,3,0,-1,0,0,0,0,0, "[1] /dev/null")]) ]), mini_line(66,12,3,0,0,0,[ str_block(0,66,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,66,12,3,0,-1,0,0,0,0,0, "[2] /dev/null")]) ]) ])]), text('black',360,211,1,0,1,55,17,315,14,3,0,0,0,0,2,55,17,0,0,"",0,0,0,0,225,'',[ minilines(55,17,0,0,0,0,0,[ mini_line(55,14,3,0,0,0,[ str_block(0,55,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,55,14,3,0,-1,0,0,0,0,0, "defaults")]) ]) ])]) ], 326,0,0,[ ]). group([ text('black',560,228,3,0,1,86,45,313,12,3,0,0,0,0,2,86,45,0,0,"",0,0,0,0,240,'',[ minilines(86,45,0,0,0,0,0,[ mini_line(66,12,3,0,0,0,[ str_block(0,66,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,66,12,3,0,-1,0,0,0,0,0, "[0] /dev/null")]) ]), mini_line(86,12,3,0,0,0,[ str_block(0,86,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,86,12,3,0,-1,0,0,0,0,0, "[1] /tmp/mktmp()")]) ]), mini_line(86,12,3,0,0,0,[ str_block(0,86,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,86,12,3,0,-1,0,0,0,0,0, "[2] /tmp/mktmp()")]) ]) ])]), text('black',560,211,1,0,1,55,17,317,14,3,0,0,0,0,2,55,17,0,0,"",0,0,0,0,225,'',[ minilines(55,17,0,0,0,0,0,[ mini_line(55,14,3,0,0,0,[ str_block(0,55,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,55,14,3,0,-1,0,0,0,0,0, "defaults")]) ]) ])]) ], 329,0,0,[ ]). box('black','',50,50,160,200,0,1,1,370,0,0,0,0,0,'1',0,[ ]). text('black',65,31,1,0,1,82,17,371,14,3,0,0,0,0,2,82,17,0,0,"",0,0,0,0,45,'',[ minilines(82,17,0,0,0,0,0,[ mini_line(82,14,3,0,0,0,[ str_block(0,82,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,82,14,3,0,0,0,0,0,0,0, "submit host")]) ]) ])]). group([ box('black','',65,93,140,156,0,1,0,388,0,0,0,0,0,'1',0,[ ]), oval('black','',65,80,140,110,0,1,1,387,0,0,0,0,0,'1',0,[ ]), arc('black','',0,1,1,0,65,141,102,156,65,156,140,156,0,74,30,11520,11520,386,0,0,8,3,0,0,0,'1','8','3',0,[ ]), poly('black','',2,[ 65,93,65,156],0,1,1,385,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), poly('black','',2,[ 140,93,140,156],0,1,1,384,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), box('black','',67,114,138,166,0,1,0,381,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',102,112,1,1,1,90,15,383,12,3,0,0,0,0,2,90,15,0,0,"",0,0,0,0,124,'',[ minilines(90,15,0,0,1,0,0,[ mini_line(90,12,3,0,0,0,[ str_block(0,90,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,90,12,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "local", 1, 0, 0, text('black',102,125,2,1,1,42,30,382,12,3,0,0,0,0,2,42,30,0,0,"",0,0,0,0,137,'',[ minilines(42,30,0,0,1,0,0,[ mini_line(27,12,3,0,0,0,[ str_block(0,27,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,27,12,3,0,-1,0,0,0,0,0, "local")]) ]), mini_line(42,12,3,0,0,0,[ str_block(0,42,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,42,12,3,0,-1,0,0,0,0,0, "capture")]) ]) ])])) ]) ], 380,0,0,[ ]). polygon('black','',8,[ 220,112,220,137,257,137,257,150,300,125,257,100,257,112,220,112],2,1,1,0,389,0,0,0,0,0,'1',0, "00",[ ]). box('black','',197,102,287,148,0,1,0,390,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',242,101,1,1,1,90,15,392,12,3,0,0,0,0,2,90,15,0,0,"",0,0,0,0,113,'',[ minilines(90,15,0,0,1,0,0,[ mini_line(90,12,3,0,0,0,[ str_block(0,90,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,90,12,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "", 1, 0, 0, text('black',242,117,1,1,1,0,15,391,12,3,0,0,0,0,2,0,15,0,0,"",0,0,0,0,129,'',[ minilines(0,15,0,0,1,0,0,[ mini_line(0,12,3,0,0,0,[ str_block(0,0,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,0,12,3,0,0,0,0,0,0,0, "")]) ]) ])])) ]). polygon('black','',8,[ 220,112,220,137,183,137,183,150,140,125,183,100,183,112,220,112],2,1,1,0,393,0,0,0,0,0,'1',0, "00",[ ]). text('black',220,116,1,1,1,95,17,394,14,3,2,0,0,0,2,95,17,0,0,"",0,0,0,0,130,'',[ minilines(95,17,0,0,1,0,0,[ mini_line(95,14,3,0,0,0,[ str_block(0,95,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,95,14,3,0,-1,0,0,0,0,0, "Globus GASS")]) ]) ])]). pegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/with.png0000644000175000017500000001013711757531137020605 0ustar ryngeryngePNG  IHDRm8 pHYsHHFk>IDATx]Ga+#+#⎎ PW_:^`E  i6t3lۤkI=;[ SVrBrw+He.3hW› NJ{4('쩜PJ3 !h.3%""I7Ҋܽ`V_V[g(czE܂ wW܂ܘ]sG4N"Pʙh|5qjNj6KD[W} !aTzgsr9xݔtWl :Jp /V/Q@z3Ewk룄aV)eoc)C\䝐 NN61H%-[`V3GG5Xܦh6c~ J_:Kg\gJ[:x$b :f5̛r<4M,qHDA:Plwk6j-D)ҁR7e<$R :)OD"@:PjSCS'̐o!0U*'—^ S\$H{B*?4LH{<%(0E9RRMV Wu-@27 JsDeL d6'a_l &x6:+d*g4@/}XoT{dA{x+x␟( 5j nŗs_VH}0ۚTt>"vZ|xG}5eoeECJk%qěPH[.k/Ҵe 7t+'dЫ`AMO (L)d]o-TX`/ =AA2%qZ|SsúL%pzYrI&oX;*GOxR%78O:9n:,FZ%i{ѭ lOp%O}<+;CS%FW"V:rϻ`"^(}KF{o%!e4_ V*(MZLҔΗz ²l@)jq&޹1y%Ӈ&-~?) ;s{˜ R!?OpW˫D˂$ڰA QaKAae{ < QݳTe6MHW(!b8|?r=bƎQ*5^ KC?i?{lW(LVx<("ϝnf?/ɄZrv$x鐟)J{D.!~yΨ6 =F ,;7 Q]'jcTbyZ.س7 gGcT;l9FL9Pڃ6dv> stream xV]o0E>>>v#\2c2ٯudjlSWM&q"C%pl:O.>ٛ]Bki\ܯ*t u ! xd~+K(RE3?uGTM?qh8K="8#:d\M Y.711\g[iZ]rQm~\v-s>P|6U$wendstream endobj 7 0 obj 642 endobj 29 0 obj <> stream x]Ks6_[`Mc;ii&JKFJJ_<䪮R&3gI.~(0?; FPx'WK v`gC Fẙ$GHGo D$Dg #$Q9\|*G8^ _xYa([/ C3_\>ĚÅdH4uiH@;.ezncF,[)"Tp7BiGh("I=.u;DdiqXŝB"͕w#D8$E,2c\Oߛ1j[^ZbL*LcI%Kh%p[.9VeY.oNJ k?ʱ4toDv:mS HDL$bw-vҢGsV$OW:{#^k@O%N*S3!H(ZF'Vs/f{! Г8Elgx/uK !AV Zy9K0~k̞?vg/ 0h]=ՈGgXQ X6Ww‰A| 8&ƍ(]Aj GLdblt1w6:kmʣE1&:&.]c˃(_Hmf"6γmAXeV:ouF/(n[ H$v}KlZwj}i1=5LTe)06 6+go7/Gmvg;C]ylr+DZM]@խľTrb_֬-F5G'(q79~Q|kLeY|Xqb2`&`f 3v16U/Fakw =?&r!zРl}+*A'^hIopOoh1A݌}_.̬ICI ^(R^^/I&~JMCz.˷f5jM@Y@2'x9_5k2oҏ;3o Pmi2-ϦYZ g}E*1־=;bZ U`Jo-yK1NU,"*&C-WH-L~ƀjezc@f nn3H S!EBFW=k7 3fa/yafCAT*08U}Wc@h\{իl#:(iUfmdS߾{irVs3uo|b 0U6;0ح': [>ݷI-o{+/pZjݧEa2y]G  8G*F#_üPg_r`^ZP!!t}+}A=by@e8s=xz=zÎƕN fpo}>0 8.\2X|0/i6Ksw@G\{8ʤU> stream x.EuǵT脶w}}>n7edމm·Œ бs mJE,hY>[Rj{\&xm'~X'(\$7Gw۶ֈ셹N Aa6kSGDN43Gm9 یܷ]lԿM5;Ģ4'j0 ϐ6@-66{ۇf.9n>?a!Q=MNjm;w?z']t6?Bl6h\('c @-S^`7,aBٴ),e2MqY_ e*EY`±1C\ Y us؞\PJ %Ѹ LUR:$`~ԹIP2LyH<% s⏦$SD8WD;/ZdLsdfӐ%evwMʷwa_=XrMfSzV1jWS(qtY?}rKۇr.@6&̀!ņ'O4-T,lØoघXcCu8Y\ \~s,7.?;H d~;BSp.TB+uy.sRD o?u0͹8oD):vck`_̾6jz v^a +H3v XV}I!8ٵԶ{a?q8U&lqmPD(IP钞h:i}Q%7`bdU.u\4e2 L% v2By L*1Pe(MqY_9!d8TcAKY8~A)`(YȌiCyoGWic9I ^Tb++d@D9YSRMTA tyW3s+L$жzIRlw I邤}lhɂ<3/Ϲ#+L$I_ÿ?|WL~ <[gGSӍ]βhN>SK8ƒN >bh<^ErM/l.Z!caz <qa[ P*&A ωE>"D)Z'$}9iX% 0ḧ́iAy;j]2}wԆ;jXK{{L8֚_BJ/_fE~sW=r^5)/MH痦m3ҾC6i3 fSϯRزQW+:Ez9򣯊,*fe4)횶 ZESJ@~e3}dcϻ{m~>N~5^0ھ mBa4D\h"wܽ\XPCk<_nncW#@JK<~):Er`iV'-$|<%tOrWnXuL1s>/x 8x݁$xㅓ+3N{C)w S4LhFpN}TCaCV,"=wk+B]'3q#^mXaCG!vί4xs؟Б~M'Dw`1W Rz%%]cb[W wn.~W~&mÜ_Ի5F%nHiE\'WB|HCǩoW+|q}W5?wjPFemq =L{o#N]R#w Yl`Fz>+g*(&*HX5{ {wV;wP|t/| cҮigf'7az xՙ2h e йu_aendstream endobj 46 0 obj 4954 endobj 61 0 obj <> stream xZKs N|ԯ* ~ܛvV%_(Nkk9Z~C^%\%MDh~[Y]Ͼ}[q*7g>Q +Qgduy}f +)"W}y!QoWۦP]W[}-Q R쵲,uz)H.ohDIJݣk8Fe{UYK\/\ vM9l/< ͛WV};WYWo> .ޮrbQ#2ZvN;CHICގ&Y$쮛kkLz;5.(ҙ~F$w}d`"Μx$ePEnpwW''8+{UKx\Z=aCyyiIƓEMX֔ݩ7X,E~Va*5DqeV@΄̜)YzMS : "zрTBHIk?A$ZY'X=X xK֠z,n*ߣm*cQUh±;4ƳQ*F~vT9ri8kcI yZ{ 3$nLH1S\TFЂm~U݂ ٟ܈Yb./C5>Ml;y_6utX?BP1yh"CQH(Uf%Cy(>n@1I&?Yn@{:ģ,0#Xy](K^NVM;2_^|}kw,=v<g]Vm{t?r{Z b|d;@,#"41(Ry>.Mr~}pDZ ]/r U5bYD;m"uz['*aSQdzHTD2hlFw\ԒR##I|#@~- GPqDzr,_+$b\t{ ؉-P'BqO)O3w 53h[i]}ogBS6ق[<$b07ۉKFl$<7&}4 E6mSdXU:~ytvl>X(}UzF7J3\dz r6(Z I_]۾:ͼG7ʦ,xa2b ar77rnܐg/~Җ)gc = MNZ13_x"li  {DŽ&%MM:w*t7$hfqqV?WM%lLkJa”( ;^hDHeVQ>/v#mv&}~{) `Dx Q JT)V([ {6ELD.\KRq_ޞ lmۖnj=4y1xKl\ٮoBK½‘։V>GRہ|$<4a&"Y(c4s\ڙ`Z,4$#hl렇u%r!1<>U A"&ffRnM{C+c`ݝ*y>}c *)C~!7ebzBqDPGe[S8YHY %[H_-)R % H1KZH>5Mh ~,xX[4;ĭAo߱zt׹;0`r>ǠSZs =HџB&8)vPm,^7HG@=?'-Ξ7٘ C:&vIcqϫ3ݬT5W^AuB W$՘z~?PJv t1c9zr7On]ufRw퐷9Įh:aAt3cNZk͡`k՗{eW,apJؒ! WpD~wskEA3|/|ylg|ﳙݓC];!Q 豥Em 7𶃍5*PuW1(^_ \~6e/r(WaeUaTM\X jmyb|I9+省44{wg~]n< r ^w6M$ 48Øғby˄G1Zajmy@@"$}BP2C_^}@dPWL&STi/fNyhjUjdn9NWpP1*Aq'Y[2[{C՗h֡5@z5Vh-T].ɀ5"DE<$#*Y`>J2eTǬѸh{iD1z/?EœzWuJ:4zhCf29 i?sw(T4(6zEۣ =VH̆Gf4/jԕ[E-D? un%jѷ*cX m_:(r{RMyqx[ōg~&ڛhNχ MncfNZIr#&$ƌgg%%Dr˩a`(,™"W>4A3A&d<Zendstream endobj 62 0 obj 3190 endobj 69 0 obj <> stream xZێ]qB`6JbL$3͋LR3奄VSJEuWW:u/Mfm{ˆkcn^pL)'Tmn۫+J47f}8/%,QNuߙ VQR\Z)\p;XRX+A|(9\q3 ?7_>n~|wfu)NsIou|RT;P FUTdFx^қNrsI%7Rl HA~9XOSI4˙d.]Q©/'h:N0Q}\Y/5432ࡕ"Zv;oG퇊XqG0\PˡNNj˯Im+,je zQB T?VrHK\r.S 1ʦvЄ4lGRn4ъ)aqD3n;<9 FTXxl*+_Sծ"o Eu]!ݺD)Dg7I&b,ou0W?K/%0Ϙ%Zۻ) 帻_:sg}[}|ޛ;3e=T]5k;5bnq ~5@YEcdܒdޛKLg!={8/F[,ьv3 (_1 ^2c5{8Z`Pwk+Oӻl,%`} dTcELbmW_8ram#3ڮ@.&V==<w&\wrabNѡRWDؓJY w7Xږ*;el :( r'/h/K_CW_D* I*U= U/5lѝ#S4b=DcA9*, ǗR7GhQq+}|d8$?5:0n1s( 3gv 3P WQʜt`SHSDžxF$%djܖP3䢸]JcS%X88\ېؔ*HQ@Ɍf>.b-r{0r8ig P5x%j.zR@B3trV P E_}]+ IͯQVO#34TNˬj wV $V(:힦j-icG"gN4s,Ua!( Tv鐛uNvJ2{Vۮ{Bp`B)m2⦑,=!q聓nߗ 8Vuh0Kf= o#ܩ$EiO\$~1'),lBu" \>CSn=4a?$USE-S=4Ae"t ר:gJfw_DAK,dȗwb, u]H[Rm$Mժ Y+GN:Iх0a!'h#;PY0J"HoET etUfL};A T}f_+7ÙˡKu%Q:!$sZduz3 5S JBI> stream xYn6-B d4m5g'U~ChdJ Z9sH_1'n,\i6L ɧY5J"sE"2>dӻ$8>_.٬ ra)XŽ2#fiO!E;o8P# x&qvM.>jSdrM~_g]]#J <ҖEV\ fr1ԞMMr$#%s"ԥny*JT|qcR˦Ҵ%>0! &vbg\R<9(R?VQ1XpXo;]\.urCl1#;fPP9tHN.Iqut9HiV۹KDᐧ~nѠIOOU$ՔR[-$GI>4Tj$O-oI3y*no ʶ3> xoIu`A8{&HL j{h K(U*4GIl ~l? X_b~LP[!IuQ[ZjE IGN@MԶaIbw:+mi2bpSU(CH^:1VE HpD;u ]; e jER!!Q"LU TN%#'!h1_=wbQ2{n9( Lw/zP{,K Vm Kh[AW胾q0 ?dHp=-IJ5vVMy$}HRp QbpN%^4Rpk?ʠ񢢾a=X6zAF !#GfG= ]P!YOTC'wATEl焚b?}?pH14 DmӸۧi̶#;(8䧕8"Jdv% 8DNP!S@_@E4Q"RbgPz Z5w(1zdzd'Xk1GSL(}PCZAοuO -VeuH1RD_% ΫcIĵ*4X@>y֗ަbͻ?]QV>E\nT$K#4 G7bΨA*+n Sq_XkQVm 9PV6Z4QӇfUx DrVߢ/Wz֤vsôr]Ez!(vG9 /պ'eք~endstream endobj 78 0 obj 1377 endobj 89 0 obj <> stream x[n.zX\*M?iq7 yWhJ7ϔG3Cd'1 4!?UUٛ7+6> 6wg/^qO2[9љ+DrwL>댱LnW"q~\r k aM`(l]e5ً?V_{sꫳ/̨,Z`aehc2qΧBÛxS ݣ"GuPWǟq \ߋɨPwcn3! +Cuuka}gF*خ nx\:T0gRq~(onq;@}O[nGi >sA?ym(PQk)Df^ǽ?]_磙d VMG&S2PfƷ<ݖ‰ lrx P*X0gn86'fiQ6}YEf x(Gyu2ho/^Fg l E W߶ SN(Q@0ތ`<Lq\M̅KZC{[yf 0']OAIe]w778kcڛ!.^Q0UDx٢b>1`a@, " "y).lnC q$k;K= _emBP5M+K  ÒYDsXJ)*R=hqRv9 ~ "47Ϝs9sKaGMW EbX%v c\ {HX.{z|Fc&70l@HioP}a!g*?q 1R3F q>"n)門?K=>SX>" #'t4Tl2e.=Il)٭}ܕpcKG|PonWa)tWW RB?ao&Z$=~a¨# ٧[ |#r* k=WYQ/ !G'JqTIT^|ԃOaHd" V@`M@-h%D&& ;_͋v|q͵slnqJe԰_tb 6O~Bz:Dzop@ڋT! J)cy a*dbmW=N sq7X}rjX^wHUOv^P356;]. 6nBq5m9KRCˍ?Zu4]W}囶)z-c^CM3p~sBއWg83])W>MIH] NG4V߯`m(̔w/RG5PE ծlz`"lCƿ*UT_e)OwBMӆ;0H-:ow@m\_XAEIi/!G|Ҟ@-k q>kA0L͵Wbnɫgutz0k:&q dH5P0RU llB|I]Z챁+QW3*y%ER" t(&ݱڳ'3 k}/!m׋M3֛7͋9+e%ޕpO5n!ܸzNUc6TcTIPn[Tj4P5͡jez}}聫Ԕ~FʦG'ާui 1Rx|<&ƎGmTw](Tmi؞нt =QfUf|NhO/sNB 1%C#R;C}4O/Y~&5͏4`:b#i4Ke1K[MJ/"Ne70ZS%Ńɝ\ۀg:U,1A"kCsw/SY:]T*y6Qt=p2('/ҏE>#jlU0IU⍊WU_b.x%ҟ%J tR~LjĐd~ڈ!]pI ?VXVNIz AoM&M?Cendstream endobj 90 0 obj 3626 endobj 93 0 obj <> stream xYn6^w447?S7~K2B/}xJ_cw N (m7w?o'5ݿds_6WHΓϝ<ۂ%:"ϓ! ,?Tz9 :'R<)˶;*tPc; Ѻ@paP/7{9֪}61YB))Va%8 ٷOm}4gI8gUYg ta7&$e9_ovSQaʳ~/me*5ا|i>q"f |XRRrY-<kc OȝH{.[}.]dDƦk~G'';hGJgϫ/~wzIuC]uI?| .-)(\6e""^t.Yў$/ [zVcM IXB،K<i"1q j.G!re^%|*h8R+^gY71V`C_[HKy@(7_xp4``[5Wޒ:S?xx sƯJ ޠ_ ? nj#We 84|U&I{m\)>%OX;<"AI$cշ` qnp$!#рpQu-0o4@@:o\DWlGWk6x+U7$IRY8 Jqӽʖ$e" c'wO3d^roE\yoIA8?& BKS$YD¸ 24gk;!>X&bIY3gR&3;v_?*%Z} 443E"BuMڃz K\`;{YH}Ms12GA 'UʿݗWC]H}v/Xd8BZ1լ(Y5e +^=E(C9E/#6@Rs;Cuh6Qv-3`_*m!#W͡oldN3vJxhP] sAޠ!WN 2zH@.PaqG&A[-GrbWUȏ Bܚ9 p%V{O}.G ͢VD:9}~*#ٶU/Uh`pVF-|VJ  FBA[M,p'gr M>Ե_V0*-[DPxĤW_jA̭OkLƹ0+2VFUpG !Ka&u1!Pф^>bBXaUsK3Hi6RƱ\t )/Co0CBk4Yup}/kpE]ljA!QJ5z@ZVu{3G|v9 h;6C(DǰbjTvsQWSd}ks`$ rVX\KkW˴@" TUq_P E o>>| "E u]˫< |[*PW]7ILΕuP4?.voq*ߗxQ2f 'XXfz)RMYLp @x19 pM+nHoY׻mZqviGf"$Pt}^/mtmGo2zxV\3#;&u5U0Ìd5o36t=Ls:>>j;R>LT!ĭU5$,^%F` Pt['µ_nmP?\I@dK z*E\E@SWPօ%+\yObFAeZpg.pӀ t;Ww~Z@< g^]sS>PO(@7/3X=.6n4DCpUZ -څ}C_aC^iQƊz>~iI}ZmAgͥY~*m,4m\\1<^|vm>jn߯`Vmt}YtѨwѮ[H_*I(UnXb^Vs7' tm?1q,hdX/]?VΒRҵ?(%h B Hï͟'endstream endobj 94 0 obj 2402 endobj 97 0 obj <> stream xZr6rnSة@ nO6u9Trsg0R⏴z<¾\DhP#ۻ*KF_7|_Fo͒k|^\%0)\b)yx\?Լ-ųP<` O2K_`X|7crX^Y\om?^{ ޼Zcͬ˔E&k\=lLJ&1{mNGL2Cw}[Wz)R\'q[!<ե\]h jߌYr]ar(wfׂEY*py/cpfi׌A̡ӻUXEUEU ZeKy7E縞!w{m_<9*h(֭O%KޚřNB;B$vd7.yɈ @fVFw:!XPf~JYxhiy=?8d5\Oۺ֏YluUn |0hT0%DkmD]7` g2OQ7~/Ameј Jmz'7EJEV)gdq;N|%dbbuz( A~*hf5lMkJ|KR]՘Eu;fa)[ĹZZ(O}`'X$ٖoGj]@sS`ip΅{UCr]W d _=Onݗ}ٌ,%'R4H!CݕztIw-b-d^h+ϼ]m|>Gޖ!<Ґp3.|+n^ɾb(xdGvgɧ6M0)T$DJ"#Lɣϡs!{^[AU. c5"lmmEY"KSoCy :c6`!̳ܪۢr%O(wc[CZ`Ta$T*mQ&3;B}1| m7+^^z o_K&Oɦjg< J>krԻԂ"#!)gc u |d mZk2ݶ$PJ?voeiJYyFtd}ZQm H 'dːlP~[!Ӓ$CRJ@BF**] YI"i-, }RP(Ĵ8:^ صãP۴Sʇ.QpYW7P#'Y v^˕ɑaN3GևOŏaFJ."x:2Ӛbi-Zkb Ʃ wl-x$e{Nw]lGnAqo\ @.c6Aq&(] 6A%:a-*}s _LG=Ùu!v۾\F`Z2cŰAj<>9zC=H5 \S R(:KH09RlP&D9̈́DAboBDr 8/O.9vt=E$RnsIS9$I5(?MbHi tCB$fR'O~|d*Jj;qI_/:*aيJЊUMRBժv_,)f$dF&茤6UxN<ՋWѶRfߨ2c}d+CTL]|g;V<\EGE{HmՇ"|f,(BBifK(3~ْ䫺BIWMm3ܗ>hG;0:r3w2v"-g/b2"~fOsS\i,eC{ʘH?2q-WX  _4cȔ>K:'srgnB.^A+PLSEZs56S@BaQ6A#2if0uAHUYV4L?>+"CG+nX_uW_t]) 0ܻGYU5ǀ @@ MF!a@pjUf3w4;g.6pG11XdltܤQRq9)'spidjEl8}N  vlr=B _CP0yEOg!Yכ> stream xZ͎8^)I,(-tl^rmv[3HrgLr[dRw'rX h hX+""6=\|`Ú(sX__J#!񈥋xHbx9zיo"8~bv^MKQAQ垽XzۓXw.V/?sGO۽YM(Q[mYˢDf,õuwFWox!"JS4Ir,庾D0 Ƕi}JH\XVհ;Zo4AEU鋲Fsu9=k4LŻD-W\9­orOނǢ&K!AVKH Zyx%gO.!`WzqAv3 ),\ؗM]Th80rfDوOBCe9!3?f3UN%ܻOD2b2Dylcr¯O"A/{'c4QL8×&u.IqVf:,0iLl4&-iqu|BgJ'LjmsP.QUSeClǂQ\6ܞQ &!tǪVOGB1E;F͠KAT]& 00.|fg>X W!n}V0IHim Bn]g(ۨBWY.sR]ެ^Ӕ[ZfbIoS \bȠUV(Ll["3Q0;Te5>6N=xQVCEP"Ő 8 qnR[TfKru6Beՙ(gS32(V13npwGdxEi>c=hqɹ e,qޖ?n'xߞHYbΓ;uz}sl(Cf^U #J5ri$̔Ka>Il iJߣd4T. KH4 q!i2_mzRS a]AG03r&ԩb hz RiN2-&K ArPzn%#4}ᓳPt| E/lY;\^HMz-A>ah|['}Y4=!ag(Ō (Ј`YFA%gͽ.UmTݣƪ?=ZܮC"? !صsis(?*FU9MKz܃u XJZ?kL~Ěm`Z57;pዜ"_Ձ u.H1n0 K#3L9@IMԢ0!f:jw 1S97$j{?!)S%a!3 1c 5*&@6M0GJ3 pooعKZq0h:BsgIskg1U6F +*ӞfFtIJgK Kno'\'.!a^|p i}}}&\P s)Ʉ Bko "so1@teED6{[sqOlLHX:7αz`|L=##*35O;]T4FO;ݻO'-($d˴%v,w=ԥ؃d)=Od {Jv `)dy@jCp1 ol5Fݚ2%<$\̣,<ʇlA4p&FPQ30Kn>0s|=0ӟ=]'*G_(5soKFWB 3[3YAo_ '^j)g:_d U?V FG(s+JCWJ|n=I=_znz$(o ot@gmPh[WH?optD@œ?% 4zړ3<%8 O]iCޢwa8}Gk޽#~Řmafe貘Ƣ){W"$Nλd\,TE7Ck\'#9%8$յsW5MK rgس ĿEyx (öe)|A ϜH5<2rqiBз]2垽rϸ# ((ڿO{]ؽDB(.qE#'&SjC޸ (f{?ڝ\г(G ˾> stream xZnF^8c؀b؉9 Z acBrlk_)/ax 8:∭b/ݴgb3gӮ^ݜ]\ DE\~4r1tuӞ/o~=c,Jxu={o۲ێ`aL={sl%gk 2Ey{uuvqjO+vvϫwW ,Kd!H,:#/,Nn[uHy*5xHZsV7fH<qwYIQOrfg(Aypq@ i/v}[]7Ͷj?gi'iov\G\fg ĘFجG ?m5UC[w~ lMzvt2bJg~x&20==Xm~x#q@> lskKYndL`,z(OyE.0[+b`a<(Ri7kϣF|-$⅄8Q/ ){9 Z8%3^0xS`\XETlu$V'C–ӓQX3L=AHs8>ϸ6J @sᚉpLpGD(jSeSSQwZmAKKP̩ٕ*/S9No`yRrouh0e>/Uǰ $P{|˲Hrgv<c?gPfh[;˧znmAxCH:C^&js0mVMDM Rqb}6G6 }o "ȸ"Ş0s'ܔ87wYI jczclW CuF0ڷSV:Tv.E:pPۦG,DܙMceuRX]!Ni—E4ڟ#5Po].DBY#YT09gz6Hv <rW=B@_Y)$Nigs% ʰ ‘5>a-Eqؓy1RFUd@!3z횹vHV}Nr5-˚<' H'zHouj$HκmmmuNaQjʩ ildH|T] RPQ] H:!93/u 2(:a& Sܣ%8qWNc_!pǏB1±s,E|\=#[[C9Mp4X)3nr$5M`wK3nvЬmCQ*Gmewd?7Y[Ю'ͅp]qǩ OԚ3> lSXCU.KRU_Dž.^4FZGeg_;4SoL3NrFgDхt3;&0eKѾ7އRZ:y+[+q]?V8b|hCJ?luG`Q(J`- 62[z[UۻRSXX{4p(N'j0/O}o@P4;^̳N锤˛o/7_|T xŸk/ʩ)?5'S*0/ݎU9y`Z+>4EOPDj"6+qJd9I[ĭv><,B &LXn! KPj~Bvs:AtY#NErr*9d}bs*eip&TfM,^_\Ģo8G׹_՚yBjB i>l$ȶi1o=y nd3R{.Qi_qmCW4z~^N+'k,lqJqK6aQ/ 2Gv@F.eGz  7HY| p q#7H0/"hU3}o_Ι0,>V'-,sC?>)`^MHvϫ h?J"d7ͫt@R ;= AC C]v0N*Z+qsj2/ GB[>Jk׉%m}=M(`@;&T8ZX=uv>M +t5dXjRBSkJdWVRmHA,ӅJ/(Ri={_c0ǐqDkƾ9L93aPt})ݰ*|2oh.vzhNwd0Ww@8d(6f+V%6>wro`PK3 )}40[kW-O2R_>+\tT2GB~?; 8MyŚNs[~%99-NIp ?.e2g+ LFsmHqray齀Ka!IBx/M"e=fd4\(ǿw#ftj- Q[/#HGenw{[&n_bPĴjG'ǡH(逥?w#?L퀊]̦)0BfqUݡWXEac<]GsE BPKjOwI'l)(T0o>F?=jV{Z3u37ZMF5}9?X d(¹d\#"Ar` ڔާ"f8qa;~4ם <U/-<ҠsZSВU{"Bt7r S70&2k!&`wkcnoH|8\5$bm*#.^ޓ(bsTYƃZSsȵTu>\C?1-c9tI>X jRe7\tIlCedendstream endobj 106 0 obj 3444 endobj 109 0 obj <> stream xZ_۶>ޜ̜p`^:I4~$un}I,"b.kEο*4Emd*alCxn0 IPVqD;"wht7J'y >fSJ2." D2^Wc߷0kfN&xNd]H"ƾ^҃CVXi2em;: pI9H+qEAlpr*Gl()'˂1ڏon?e]K i<1#o@!*E$++?-RNC2u6Ge`OP[ZWox( Ey9fUhUME7YfyWj( */)ᦃw[f߬M@և}6aWPb焫. TtuNdhp%lHo& %/ׄI&F~ߒ{ k ,$a!fh ڏ=ķ)C|/R 奖A現Y}Z ? m5V<~I,16 HO` x>3\szIc$ƕbr(/$3: B9B&4]3 `r&j= [ -߻t',CI>Z K3"I%zKdQH_2ɦ@,rMfDVBzd~'>'tR|a,..,1A1>t3om281TN@&QH,462׌nM~OIqsX7]XR= VSӇȴ fp=Tp ]i{19%]p_a3w O]Tw4qNŴ&vB'Mx FoE}R]pO47ߠ&I˸TƀX濛ͦ 6aTzt-ȏmzYVkO̡>Yfh!  v,>NBt m SV"HfAIuO֕K> stream xZܸ [E^)f q 8ZWjK7s)E4qvjȪz_˚gO}Xݮ> OX|]Z+&ZqV+Yafnp" r/où`3vߌQ2 ڡMypW3 l,F>;5áu2En6L4hwxƴ[4.x41i{w4ɸg~5{(Ο̈i3[+];yRSKl ,fh22g†*p˒=Tý32'ۇ:c򭷜˕K.1K(qN&r%<ǻ 9`ZTfz{}tݟ7Pտ8$7 ;DRL@JLC/Ou8V6M'WL%]skI(qWc0#~Sw8@UnJ/ BUu݌#!ה "\UN\Ns\᧐f7r”Y}vJhA1 Y7@}]\gn7MO5aeɀR2^D__) #bawmiX=&H~x&LIe)\2ZKX"dj;&Ei0Y / ]ڄ7 yxtWPZ<&zY>Cs߻XC>,K@yO,46\H*ӪIa%vG҃iPr8ݟט1mCCKByylg`q}{D蔬*lx3pk\N{lR:Z~_ BCf%+2 3 9޻9 +{5L/eZFIee FySL\Y.!^0vhT3MqdFC( N^ԋ_-}S&0 G\?8UL<Qݸo2A**E$0Ĭim# *S|Sml՘Y[N q1u usal `t=0: SDz NOhr7l1?ICqj- TLϪt\w!Y&!#Ҋ4&Gט\9jNe,pFL$ gx 1(Iu365r@Ky!>NW%JreY~1!D{A'\/]5U(?晾0R\zfB$ai$y!X<Pz=x ~؅C,pISʬzHIC>ybW`0Z[o4`p,޳=tQtu>\ܧCz#,)gЂɍJμ*hvX'wȳωJ+[Mأk&'xpSzXc|>/?P{}4K)\{=X}YB@" Enߢ^> ?Ts]ދV`!$ngD!BǗ.t= po ] d|KQQw^t{8I`qv)`E3l|zӳtںˤRY*.q)†*.<^&5u%RM]r=:VJՑ %T_Bšm*Ȏkw N !(Y+j '!Nk<=G=O=2,τ[@ʤ,6uTG`̞FHAS!}X(QK/2nyo[a[\endstream endobj 114 0 obj 2614 endobj 117 0 obj <> stream xZێܸ qa5HҾEqau#[jb`'RdOغRuΩ>҄RWlfmWo2W2-ܼV*OX)Oӫ%R?vO E;l~/0m&`bw-*I`(^\_\ZT_m/x-nѡ8S2In5Xy5reqy=SnjZ*ݵ[}LL tF_,JH^fL&K48[7Jp`BY W~x6Un~[$¯zǛ0,,uC1[͒UnUY4ʽ~\a\εtJs?1k/&hD.ݓ1Fp˄Ѕ@`-r%ְ:HYYG=٩'ux'2j\Ɩ\θϜqG$<뻡ؐ /7 iڄD ]Y>FPu۵cNn6fp2E0Vj nn["̬ z,Tnu՚9WαUo 1vWkm!!8PLwkuAx3d8˼ n\:* ;* aﳭ 9k_9g$0v4Lt`z;3Gԙ hKO56+r  @M~!I[X0sA"ӆc=L_9w 1(S 7۬iΧ9vjk&KHhɓz7Yd4i9\h[r#`nB`:LF";6/ oz*yT9Vcį3~&5Β&sA A4yIzƮ5fy,ӗSFDrfcB[ak(ڰRIw1t~_=#yb |`W~lyUIO9ē}UP$x(vo0`@m]Tjiӻ X :1אDx KO{=X4UϾi盠ݕ:79}Ӿð>lg2l!H˰z;K(SLn? Vhy|=S`]6`/wG5[QR"& l= ar#zk캥@^O02&aj$,I_Cn$Ecآ@u"(?>+`T4ι4A@$-u)wcDBJ~OnXqtvbU"=1i\:f[Tr._GQ49X[B$gRL2i/ܨ4m3"BaK6\USʨި|<[~"ḗAҷO\͆?`X+ev\ǝC⍔+<rMjE;$() ҶGCX.!8ևcW(T"<˺)3EK.bX0)/S aKAWL$S盡OF3Ju<sA[zeKCl>,sYb"jr cUA'A 4`߬U]qU]F5{leVD, Է]qMn֠LJsojM|Vt>&9MD& LuNKtZ\Ӫ[fMJ.wZ׷Soj&BT7rjڊ J_VVL䊟~9X:9*v!7ʼx%Yju^H%Ҕɶ`_iJ^Cl !"Wv2rF8୵dbӴ;duUW//Q,_Pa2(uςi1oߥ& m wSRT(\WCAhV;ZS(d^&*%/(A")I"pMQA6O[>BÔ'gF*0k+у Rp˰z5i9c=t" N"$vA.˕D*ṭ4@X 30hq+Hxj\ۅfU !ڤ]ITkPbu(ā&`u׉M4;jozFS$ uHHIaqXcU,/Lj1IeDuP_nx[ݝA Q ؑffxV j)ivi-,nM1P6@ oϣ_73v̤ ?$Q3Mş.H9.E.*qD=.I}8]9W.Z&hSjG i>~_eCA\m""Z&tXZyj`.YQ rl-,OR_MwV. DKƿ$&)N{NRD?H4> stream xZrM_%mvGh7,DXV.4 `HXֻz{)T"S`.4,I bSM]6]eD0.\ O|ЩdS_`hiӋ$;g]c75H™O?ĞPc!֓؃!Bփ)OWхοu4lW풒)8oYoK, nlJ,|YTv][a2!ڿ*uhĒ'M<v7Yv/@ g'$mO(pb, S/㛙̟;;8+řT3B*B /C§*o- ;`w>XowWrj|NHW#ۖGĤN@Oræ2Az'۪c4@r"U_dh*OLB|XcR>=f"VEFdiLDʄe\ѧ8zR"8"fi 5`hƇI&aBI>R]:+ȣjyѩG׼*"xLiWUv0~N9Ɇj!+/<0@f׾oD[[p[uzHft`BORv- 4I3+%7CA hS-ԢRaOZ22%$ ezP >=נvLJ 4k.F>",t/Z?|6̓ 辢ȴϚ;e7OtǦk,Oon޾4- &XZ,hɥbY=>|p'Fc/<,'~z s(KJ2}2 'gE5m{}Oɞ칎L1*@tDM9=}5zzƸ^L;׃Dp}i5(MW\7n!vuT.< ta qDQBڀiدyrCJjL4fqXE5_n3ʆg˙ʑ%"ߺGtTul.Hƶ^{4$lF709u*|&zl_x: 5FVfQ[xݺBv#e[KGaU= "O9R-P>w amouDHz0hw_S J83v_aaS$1ڝRgjp& 00dR& 8xen{CC5-]j)AԾTk~}gϹ#MЅˇiI0cPPn_'C:$N<.?.γKaa햔 Na  U+ãZ]:-bEx*{Pi8A5pRj`LxȂ/WKd OQHQ7"x? U@d npg"ՔYۻcҪDTPyZ|h;.\hݷ.tԗ9`lu Gd2+߿8iU7KB?9+GU8Q4d f^Ft=X[VO-Zb1"HB?5fZdNx"P3 HoX*M5N, >/6n8+E %? QuG@$c0ΈKtEVW ;W[mEojgt>0.781-U3X ETѮq4л3$1Ҡ1LpeT7dc5: wiQe];'^]}&z#\m]︑|}\yt8-ɵ]])YCmL7_PأL%$@7HE(c;O&-&G|Ș>-w=f}J6nVMbWa~^4F(,jN&:1BAQYQ'6bWNr9h+s DZ0L zHu$+pt|M ~$VIR5ӗnO枳'.'\6GƟkMH=TֺC3Bx6~~"t3sW)T μ@ԃrN)=,Y"pvJ.q.7Iڥ09+Q6 ?](Vlz<ܯ䑄+h0QSiy!ہ3ƙl$HBܖGUf%㠄Oݷ (Wb|ӍׅBw耫aj! НdPNA-6l[ڜ4:s08D5TA\ k ($L0II=}NuYRL-q#C*ztOҗ~B4|nȬSo|٬v-Qf:daLܣiC4v׌ZCendstream endobj 122 0 obj 3034 endobj 125 0 obj <> stream xZn-E[)FEs>G$ݠiq?%D$}ܙ)v@ 5s?=ܡngqf.ǷǷ3֯?w\J]\/J#)g?3%xvݍYXI#[x{@!GoW,"N3ha3\ lMiWeEr Hl5^8hL&,YR,83GFIgɛb4~tjʬeQi][Mq$ $󪷖ERԕ5C 杈Dދ(^0+IJ^UI=uv JE$A TEStUW4W.L:m;`4sn7saXTcA4ȶm1. r0!6x䖏r)T(B)M!خ`*ﵪWlV"ʺGt>}kV@Pm$ÂܙdMap.lm ̺ (Rr Obdޔu~.U#&n (sh 晨,l;!g E8K`\{j7Аz\9Z 9>]'MN8+>%P :c\WXKi|ZgF$| r{$܂ %xFAn*@d pp:/9 kᤡv~ ص}6![ H{W։XdbaEJx R=hОλ/S8 ,D?IO53p b<\~5~Fj5 ]"\nZvmd:F n"yffUd "1Aõ QY.,KT'>v7Hl ip)Ma9Vyp 4'@B J L XG(*_[6b<0ᆳcY@9ti߆,d)nEY|HxmQTcclw^pZPq'~tKa֘:{ϡ`jY` 3U-r^6E#pMԺGG}07}k>ѧԐ-3gd)UBN#P<3DÁH:us8Mfc(=9Jh#*FZ4237d.[%asġaZVA`mوga2Q0m{QʌN\iS$J۝iIۻ(vFl{z´8O&㜐SѴ)2jf7oL$y֋i(PB;$EG~ ]7F[9SN)T1Bl#'zT}cj( v+4RsTwjl55> ԧM蟻:Aȡ{7ܣ^Eɫhf߸2͂tEHmFkXT#c'I@Ɖ]냨8ܤӎh@Cazd蛡dW se}Ȯdêas:4HC/h㛻Փ戴G-p =ZUҽ^7/GM'_߃0d(HL]à`04a@I?F%SU GU裥LLBC/#pym1,uٶX”G{܉a0{[1RW,JL wbW[`l4 AN&]Hl aŮrIOQ[[)[<*ڑ[ss&HmMlw0燉rt(ƷA'3?=#'%2蘞QLCoB9Gy$v:(ɣHI yħ#ةM#TyS/ $;kDI}ltrJr (($+ liD\b "buhD~A$"_H\H s-krga؎E5QaHx 4!ic.Ħ5=P߿;endstream endobj 126 0 obj 3017 endobj 132 0 obj <> stream xZm6> _ִHQh 4i{ r[z@Mյ%G/l !9kB<3C_D/"nOW/ٞ/W[G9_W |*&"1j>]}X~9K̋bVk2X,$~S_|6,?LXe]կ ~Ww]}zv >oygKK%PiRꌄx[S.ݛ8b$5rX2]Sڃ>F,xefOF$l\Iډqy7aܼ)s\7%ԛf0B3Sm,JU]v~ryX$GMLggeu7lHz[1K2ubHt ḟ ? 3`5|\&q\6bo<4e JU윶y,gi撤Cʛw\zC>8Y)\7a@re]Y%٨G&ݮ9 $nS Sb%2Br(hk̛$31 S ϗ2P<5 ۃGLaF8/LE) 'I4Į-ljQYrd” Sj+SyGjpi>'gh*}tĆ#ewWtUX(Qh(_u XB.0p:؍!%"it)>t@Qj芲jeFatEtvrhJ="~e=TJ)}ݜ-"8Y4|DEh1<- |…epȂeˢ1Xg9Oy&z pl8we.G}f Ta8Fi M 2I]/5Oo?bE½cMjwwpk z5N 5P>i<$0Z(pMU z811iNZt=xjWo㴎oe3ezR?;I#r2FFKK ]C428=RG7NHf@p ]:xvc3t$C ;{erMB@'Hc$H4K-sCBEθΩxD Q![c[#A5.,;}F%&+XfޯyDi+|u ha:(v҄Ƀѳ,hSgty ;k$jlh|+Iq>l*0vt16_* gTWu&gDsH\ ]D4ֲi#š@bp,1sa&{HRcXД!_i$ ڃĻPfdL"TňKVB/):[P*a݋ks{fj@g P.b?} j_p/-\ʇG6t(6r3ܘF0_ xJ&JWzϨrwSlX~;DT| hvS]ސD4'meYx>#Sg%FSk'̣kpARF.= y/$ÔB{mlvZN.($# Ek d #E!> 1N.L>ѩnّ9K;Pw \"sϱ69flyUQD)c)cY;7M‰ 3! E!wOp\ތoP{z XP_ya.AJpƴ">J+(2(^sug7Y OK00㫿}ݷo=r-$sSF*oZ GfEW;c4>kZWtJ4G?8\6Tn) W<-O\<7byW^INtu[߮> stream xXmo6oZ0-3HTKH-5E16k%W/ ?l0:=wCM0" ξξ&Ĭ6y]VPs,nfKd=/2'ѵꆪIDS.opj4f Rrvv.A>&dvVyK|04E`p!p`:4Gg>7m6~q}tWʼ ]Ulj5 xSA\޸L`"Sg[3F>*a|/1AZQ zY%clʪr{I\]Z (bUSUjp$BjWdQڴI2is#g_l)e,_u9yi#$ٸc 6Kq*Nd<"[[ʑ omns? /9azoznf:eBES!ѡ2VUݗT)yfp@O{vP-B/FTR~Fv z4Z 2c0*5D Nuև !CHDi{ XO}9f{˴@zYdczBj I ;qYA\?ީ_vJcO5”Ô#FЩ]RUD:}X,}SӐ amagJkΘ;. <Af>>~"xh]tw2,Oiж'ظr*Aw1+B:b65F:є`|3K U6Q-MZ:wXij2%Gek{n30zF:l}w(Mi#GSbE-j M\J|kրjP0Gz%J+cOP dn0`p4 Ab C}(h1:"ZG#Tp/Tƌ-T"8@mJo4^Ͽendstream endobj 137 0 obj 1779 endobj 5 0 obj <
> /Contents 6 0 R >> endobj 28 0 obj <
> /Contents 29 0 R >> endobj 44 0 obj <
> /Contents 45 0 R >> endobj 60 0 obj <> /Contents 61 0 R >> endobj 68 0 obj <> /Contents 69 0 R >> endobj 76 0 obj <> /Contents 77 0 R >> endobj 88 0 obj <> /Contents 89 0 R >> endobj 92 0 obj <> /Contents 93 0 R >> endobj 96 0 obj <> /Contents 97 0 R >> endobj 100 0 obj <> /Contents 101 0 R >> endobj 104 0 obj <> /Contents 105 0 R >> endobj 108 0 obj <> /Contents 109 0 R >> endobj 112 0 obj <> /Contents 113 0 R >> endobj 116 0 obj <> /Contents 117 0 R >> endobj 120 0 obj <> /Contents 121 0 R >> endobj 124 0 obj <> /Contents 125 0 R >> endobj 131 0 obj <> /Contents 132 0 R >> endobj 135 0 obj <> /Contents 136 0 R >> endobj 3 0 obj << /Type /Pages /Kids [ 5 0 R 28 0 R 44 0 R 60 0 R 68 0 R 76 0 R 88 0 R 92 0 R 96 0 R 100 0 R 104 0 R 108 0 R 112 0 R 116 0 R 120 0 R 124 0 R 131 0 R 135 0 R ] /Count 18 >> endobj 1 0 obj <> endobj 4 0 obj <> endobj 23 0 obj <> endobj 24 0 obj <> endobj 39 0 obj <>stream 0 0 0 0 138 189 d1 138 0 0 189 0 0 cm BI /IM true /W 138 /H 189 /BPC 1 /D[1 0] /F/CCF /DP<> ID &\adq^ @ 77> 77> 77> 7N"7CI8o<$@zQzD8SzAC|-l<$ ޫzJޕR $KT<%"H"6JIcH=P<@ްJ (0;AȐt|'``h*6xa >Rjx?K/dƗl:Xru@Al0]v%ImX~ iv@l.Atjl@Z aذaB[tm %nJ"@a fS[HnxإKimKimKim- .`[_j  EI endstream endobj 43 0 obj <> endobj 59 0 obj <> endobj 66 0 obj <>stream 0 0 0 0 42 41 d1 42 0 0 41 0 0 cm BI /IM true /W 42 /H 41 /BPC 1 /D[1 0] /F/CCF /DP<> ID &< @=< [[_ P EI endstream endobj 67 0 obj <> endobj 74 0 obj <>stream 0 0 0 -82 42 28 d1 42 0 0 110 0 -82 cm BI /IM true /W 42 /H 110 /BPC 1 /D[1 0] /F/CCF /DP<> ID &kK/Zz_kK/Zz_k/Z/j  EI endstream endobj 75 0 obj <> endobj 82 0 obj <>stream 0 0 0 -75 39 26 d1 39 0 0 101 0 -75 cm BI /IM true /W 39 /H 101 /BPC 1 /D[1 0] /F/CCF /DP<> ID &:AX_/_kKZ__kZ/_@ EI endstream endobj 83 0 obj <> stream 51 0 0 0 0 0 d1 endstream endobj 84 0 obj <> stream 50 0 0 0 0 0 d1 endstream endobj 85 0 obj <>stream AdobedC  $, !$4.763.22:ASF:=N>22HbINVX]^]8EfmeZlS[]YC**Y;2;YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYd" }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (L?Quʸ xwHńRL&WL?Q?Ю7T BzU 21ޤO q5]t-:[KspIھ^p]1L?Q?ЮMtJ mCFe IRA=dqU?˄eu!cTπQ68G(?У0h\ьF)7~gk:úCx٬!0ocIlQ}+SBlIӚk]*2G)97/>}+] [?OH< ,C=Gt:gHlc(\X7rHfWN{1)6.VC?У0h\"`@?#4`B=sh_ ?Ub𭁇e˹>]]~? X_`B=sPSGi]y@B8R)6>FC?У0h\"`@?#4r0:=i{G} пƨ>ѧf$+R`ui{G} >)6h_ ?O(L?W=@?#4)60hG`BMMmd۲6cTh_ ?G#L?Q?Ю{M ~GhSBl9`B=sh_ ?G"`~}+3nt,DҮpz  @?#4X`B=sh_ ?XVF"}6 KɌ08qrr}(L?W=@?#4)6>FC?У0h\&xU j"``t?i{G} >)65/36dO(L?W)oGmgKYe]gj"``t?i{G} >)6U:(-t;Hd?0hG`BE4/M ~Gi0:=i{G} 9wbZfW?пƗ+L?Q?Ю{M ~GhSBl|0hG`B; 貤XVпƗ+L?Q?Ю{M ~GkaG:5H@L/cS׎0NP=#0hG`BM;Ú%ՒJ}>YX2x'#ҭ)60hG`BMO:,:]ܱЫ.px!N*"`~}+4;}ZiPV-0G#-Fj nGvi{G} >-ڤZ]~ϝgy:Xtn4OƲ|ȭpe^/P=0hG`B]ޜiVp$7*dJ 諨4B:X`cY?У0h\"`MSú-yuldCB3ϵ0hG`BSD&X-dy&!fnaq sYiVm _.NNy890hG`BO h76Nu cW g#=oE4/~}+Ίt$G0U<-&)6.VC?У0h\=ƃcvE[,q+y[&E 5 mfap,7(}L?Q?Я>Isci,$R '7U"9䜎kSBl9@~ 9+ּ5[蚄RAL]qORjZ)QEQEQEQEQEQEQEz*|5kk81p=fqs[N_Gp!5 us΀ -`-Jth ޘBfxSp8J=vo=R#4#qM\]ce TqCx|mCz}m#U-I7ʮl8_;w\2Wzv#qce]\A|@eh0=YAMz}f'cnvwִk饿{{չ,BA ?uߞ 30xe63Igl?QLZ_^Y%R_d*pHV|o?xwZ]5ꭺ*A$8j.c"֊>`$Ҝ HpO9=W.ἊIn.<u7f5˘.V  #Fdžwޔ]:1EM_vl7MHϭ-6d't2(6oPppwc##dBm\I )fT98dg=q6U.b{f5X!wH;4]Aw\Eq`Z1qF[5=Ek5HwJ!'P9[AnWU퓇i 8Nt8b~?.PFmȶE; Vy|Ƌ*}Vꖕ֒~8 1~s[oWjzg ^9Z. GAAo._>7`fK& v^clݱ_zd:1EM_vl7MOx&KkDdFP1 Kaq$6+liPq]}ioVѵ;!;A|2gvac"=F㊫[k2G|I>&6vm9y;Uq~'7 K4FbpK<~Q:^^Kq%դ Ɓ\9+9(mNi|~&wیg#N{Z;߲>faO62m n$9d@~ʷ;qRn$XZPQv= ~~PG]c-.[0!nηgm%0vlT5]/̜_z~4u͠ωcSpqzv'ՓΫd|UmR@\7pAZ|RkR4VlYImݻ>="GK/E*t۟}8.84fq$ck`/ʠu#< 2jmb[\9.*Y5XX+j #OL{DviR8%ʛݽSl3 mOèn<M`XU|K0"v9 0Ɇ3` ɑOʥH^-VPn;K%k[7خQGEL o@3<8ɨNe$}]pfn{6j jKezgk-XgvƱMWx0^qQQvux^TE`#ś݆|dskQ&}<=S6mԗ_j!+_5z` gV]:iZA4HoirIb92ta.5T[5l`GͻnOmwmE ܤٺzjv?one.UdPsZVS;{Te5ǯ`P])?dpsl'<#M*w ,٧a|OFmC_+&fTQYfy5uIi"aS;.\OhCuiCmHgId6Q[.oeq̋n#X(`.y+۩}SWRU p@ϦFqW+uJk9 0Af&b!A ;A+4]kn/v_1ӟ4rjK$;7/d'8\|6Og* +WݧEqK[8jj/w9' E֡|:T)Y4iWӮEڼ?rWM8-1՘%%& )-rp8zly\=V€+(D??UKj+w?f-RdaYQ sШFh|ip遃c޳RIld+y^?h0F0Ss~Ykv BqK?#@_h{G} >͋{=&gچ$~QI3Z㭽!$4R9!\@O!BC=N]ƪְ9%{ Rfϕ_ h{G} gYIP\[ٶ?m~+?CR֖~r2cS@>}*m[ߥ ?l/PC=h{G} XiR4=űʰ\wR䜎$V[# I<0 rNs@/s4G>c4e1R}+7SX,TKHe[y !CEB8%-B/Evl0]GR2z~}+/t+gnD_jfikk_A2 H!Cʌ#XD??V~V3沸mP+{dzEE-&I  Ex<ŗ*s;rrE\%Ht!]r #>^5V΂H%_%W _3޷itФHA H8{{gy}Y.p D-2#hTc<z:E[[rźw}Lx9^ 9mmo|"@*r=b_D7 1Ig ˟3%nBPKУh\𸶸g)#d1~ XA.e-ݤ q$( FH(<";ʟ'UߴC=EMKsKZL0>oTDN:b"D??TiHۺ#fw6یcV q EqklLVHYTEt*H[ky)G9v +v&D??QЮbEꉬ13E`W 9 tZEޛf]۾w<10~׽A  0IC}*=cO]H"QU eOI =(/C=g[.lc>k+ح@a9iE`CQ\Z&[hH$,*yʢ :Vgm,w -#€ʻAv1{7YIDyߐ{!B<-ʿ %!A$rsޞs\X-Z;vVkeB`NC`[th{G} qj*,).GjhG!B[,0<9?kbVB*Ł8 YHy=X۴K+$HIШ;k/̸D(2WczsX4m$Ea<qs@!BC=s,Y[̰@J41 7(ovWxzKe<"Ps8!<}*;> ې ļ*?j̿.yom% -e%(zm\gҀ5G\Hc6W"M͒lG I-Eo+nE0- 5[H!-/<\~F,iӿأdggs.q;x1qկC=sVZ_d-gas$72->`Wdv ssܩͼ$E X px>I0᭝3Av?Yiu"-.rO)C+OL]qOP( ( ( ( ( ( ( ("Yo\h׾ !]asUe G u_7׋:c63튨,Vov{&̡h?2Uh4 .u]\G0MDFbS2>~>1Ə_ό4k`6`V'B tu~)ELl]P!R8?/hcG/? "KH{ άO1f3oȆ[߈'"L! oVhcYvw7qȆCk)t┶o6zoƨͻޡ|[O=?#.ײ nᵷ ™nsȬRpnmDUI9giCe4,*0":û6rRHu5Fe ^O9j^FXZXf$䞘wACTfoP-'4SI[OB ¤:wACUGI]=أ ϟ>wOhn7jۿ ſ}? e0ڍ黎34h $;귬ҧ&cċrzi},+dcAl^"j_}h|3LPSl$.-\%rJwACTfoP-C<"tfnq֢>v),إmꝪ8$ހ-foP-ٷ?5LMJDWKeYnAz)ozGc@ wACU/e!]bڮF 81O_nj*~{xRFj9ə(G6zoƨͻޡ|[O=?#Ohn7j@i$Uf<3$_FW#֑#3o|yQ8Ͻ/Կj7ь_kf M(g\# dOwACUU dJ]b[2 =8=*HdћheZ6zoƨͻޡ|[|RYɱK;TpI㧽XMJDWKeYnAz(ٷ?5GmR}ozGc@t:\f?& nyZwACUKL[W b<|s{T\Yޟ{ۭ$.w"NA+rF9*b<}Km!pD9 6Smnm ݫm>jR٤FTqNw[ͻޡ|[?n7j]ffJ2ft1Oz-|VF-[㯵7VOruӆH1Ǥus6zoƪ}x{Eӟ??=ͼFY!5]#iw,[Z*F1*ɿVieuu jYO%UY@ zrkAc{dpdvWMf؍hce*:r#(7/l\7@neUy}ƮfoP-bZ"]ZC^!Wap;71=q>ⵋ$W;Pۿ ſ_|IGSXGTvA"Jt%ضo?]ͻޡ|[mY6XfF*}C)4q=+,؆lu8 4JXqg=^MOm-Vց\{q'd̷ ыvFDuSoY (%Щ JndgU+l?.9gg] l@(;PmQwACU_rOȵN3wzT3IJg FKA@ ftcY\\H8H9\ͻޡ|[񪣦_^wCҦ䇤O}hvYJ~TcUNO C4Q=ő.b8M, V4~513B_Oy_~~c٠ CLP֯#TfoP-C< ą)t$u(D%I#[7"ީOLq@].|Yp-%Y OfoP-R[GVES?8OZS7K(4I9R_#dz% wyspPFZ{.P8^muw<6<.M⏐NΩb(D!G =>d*G@'S_<Uw6zoƫ ʷ{{ض<zO,Dm'h3׶kZ]jX|Ѯ{NG_j6zoƪƟt5[%:%$#*H%q6|jw9ǵCq}xu["lP`y~^ͻޡ|[dFfds; $p=}J5k |???ή@Vd r~?fkXG] _YWMispo E")L]Mwo0 TIf9?w$oR(5{>DrGRyLU7\[L|֊Cy>#;[KdDrGRyLUYЮ MK]1q~Q@;{HEx#1g *: ]G(Ga?QuvuKp19}VRݻjCNCPI5ǫ5s@_idV(Yz SI }O rKR,ʥn@eOw٥ّw>%ޥn5W 6%am2gnQ@i[I${"@gv0~amith`Kcs{l$dr#bm#|h mWT{ s;BUү-aIg?8 $s3YǾW8 ABZɪXZپr/mo :}wgsq`uKy̭scV="9(iX8Gˑ񮺊5-N N& ЧtU$V8-Fx.qb>ke-hjzwfT`+bXFqe@t..."d*4[kr9$~@!R̈?4ItY@Zu퍞kjo#sIo\3VjJc{n֟Ovא.o<<֮$ЪrhQg*2*W]$r3R\Ǩ]ݥ٘20.72g}lsԊkYv7D̚Ky]XY6;y #[y$q*1a+Oqs$זh2X[1w]MKi J#1]R[,8=NVcN }^&P'k vn/`y- @Ȳ=d[6,)ekLlT`a~z;+kiXOHw3g9 ;Rݭ>8gy#.IWv,7`c`1ӵ=t9o-QZ;GW,G `qtM:i粎K),W y8<ժu;)4E 55 )ڟz?g\SAj((((((((oW!m. H^hB^J`]}z*4kyn|Ewn c5Q[z|]ֿ'q< w Nŭ!~%Y0N?=7{U6OP`u܈]/^pXֿƵ?Yv,ZXy&#3_~ m-3EVn<A",.όdƵ?Y:U(bZCrշd KIK|-Oy $H 5n\jehU|$}4 `&p_I01*Fܸ9`~꾐zlnꍟ}WzY߷?Y7Q7kkO,T 8ӦzVWzY߷.w5\mZVEF]Iz([=WǴSfoܻܨ'jK[=R Yذ 2 WwL0϶)g>oH1‰91<8f`# )QvYF= u}6!2NмS, #Lf(U!ծ&q^r=&UX\C2p;?R祗n=,EeDFgȺ_eLtn۪K/Y Ծ.-1|\/|]6_߾n=,u_e~h*Q {q5+'Sî Z[5W_ǴSRrr +BN9kVx (RAʆd=݉~JAHlAɍy@ (J wt bحvx5 vHJvʳ(w*jxX%]qUJrȽFr1Pi +c)")XM|WbHUX$i,RhpUNr x?UѧglLlrXf޵$'lfolX!XyoJkBkmy KRP@9ܼr9P^[_]F5 -g> CxRX\yy`~LRq}+^}.X)$] >oVzՁ2+pX7.ҀKkgҮ5)09 1.f5d{)?熏86ZH2ns]Of2Y<{(/|]ek֚LvHc/qDv ]}Ho5VN M m,eNUHh.XaVlasT;TVRXhsR|~9[-69ⵛIiVSC*(|ːvw\oo6oEM0Wh#-peb`CHrygp g'x[:MݤsSׯcg $틸f$ۀv J.Xg"Z,05$1$I$I$@5}=w,|]WLoe6JJrqRk c_ooާ/-eTW24a3M P޴m 79Hw|h xe̲ 观O*Tm 20jĶɲVwTp6s -`9Ֆ5HÙߩ$m,Ip([[xnX̋,cT%JFweH׽Wk8 w J.R~fdt>.6T9;@$+n{44"T@y@HLq|z]WgrП\ta%l=$o{⭥wr]+yҢ1:Ρ]OV2Zn&>FWuՌ{o^;LEF`q9cJ`k[[KdtmZBhUQMɷw]:qjiğS*<7u׿zt\Z[ޯ,& @`0[=j̚)\Ipw`2@/iiݠr.d26 t?YA$騫4.5)pG+1~5,#LH o8ZA¥ckR۩$׹$-ͮ^ 8ӏd6ַM2}oK͓ x !Q%~7eN}1R5I;FY8݁I K{[w;kjw pەMIwkcgAk7Q7AB2 92dKKtE]lnQVA-ԭ$ڍ$ @]jp8={P,V;C<\K$EN;yeYn5Y)dUHn5kɁ09#`ץ_A"';j&Fݕ˷AVSE[FU`k$!`9 sP*H~;!}+sL]qOVWIhSڍl=9]3Av?@(((((((=fqr~X5` ". {Yuʹo ^Eif$yS3s5Q O$ki'^߆V2<Ҽāw-GsU{DJ|!`W+Ai'^߆I?5Si;Ēegjzti9`[žI>ן2PHU#Asm?QZV7SE4rH:c)䎄ϥSPH'jlC>Ջ*>Ћ++3W ݹOtٯ.+=rYS܏}B,QGvn7P;sbE_Dw"9XIj˹1ޤ}cŧfc;㕤1H͉brXNkNRc5!ihp[p8f3zUsjZALV ǖ`FHOJn!ӡOu6-7~{ pk ߭ni6e8PBz@i:I. cflC>WHzڏڟX7tSid!G gqm&g!&m#rwγ^|2!!y&'s&@ۦ8zv+&ay+X)%Rz}Z؇}?: 0M,^3Tz4[uZ;{ H `' PS;Q;S4TE 0 =)hϽg躬Q g7W':?_@!{j?!{j}کq_ڈV Ԇ ncH>Pj6IZOT%A$9֪ZAKm4wp= 7#ic%c[#OMj͒r=MCbI^J rRz}Zl5o{"hZ"w2`a$cwvZRu?nA•%ov&<@Ǭj?yUFdh2vm xFN0t>aϯ ɵHeHw_٘r;˧$ƒ<`}įX,JS UO"KXlEG%pTnscPjڍݭƐ]bB9A(!8 0S#D{|*?yUuMrٯ4Kx$,#Cϵ[؇}?؇}?m2A%ď).IK(SoIe=Pn5Ó;!l$`78 [}M!cj 3$7=I;W7c_š+\U[9;g1l/漄bM<D(hY~lqa#:ک˫Dug.-<Ut-:{A *7rr˸ϦJ^v_ ׼tgbvbvZ}Ky0Lp )=p[oڱSKk==`7f$Kk8@ ?Ż541[$,ܤ~'?yY]N!go ;;ۺ{d;˻WLWq_UnTY!t1"2@!̺-$Hlݝ3H#5_T }gljͱA5RF7JZ4$f $޾nd8vX\en˝m唴En.HPv󓃎@-+ydK o#y!%Ifx^؇}?]xO;QL͹8 C]i$Ф7f5p p #hGA-o ɁlC>[W5{nlך%q dy`ڀ-lC>OTBN0`ew*A6 GOK$եD)\7͎iPG:4-eHi*_13ڀ'YmIdY`F2gϽbɦlkk`88=GU)4{5`'p\ɴg@8h؇}?ƭlȻ¤y A]beU, sїkbv{[ɦSuD]lQqG\UF%<&fi8-Y-ڭQZUPDr󹽳v 7/nˁ=#m`TL%ONP68 Uo?q溼)BI \n)Aϸn,u[>Co%(UUޘ$ P5!Ԧpaef&p0 ڹi4?':M  ~<9LkKy@d+w; Yj-֢Mp Usbv_Q}%ΣvS ]۱vJY;G?JKU,Rq*tu;a ,~[VUbp88<+S*["CNG8R}i\H-՚ ؃7 1IҀ65V)<=0=Mli )QcޤQm%@aEZ((((((((+Un+ p$Dl kYo\.݃GA9iǢE utB2A>݅,|Imi7e૖f,I=}ia?&ZZQy@dI׍U}сǵ#R[o$JL$,S$0یtlJ>{AiMa?&g[o$-$mvV=mmϋoL3ZYv72x?fW[hIFa1S-ӿVglt6h!:%C7g쫿 ߨw`mۯw((RVc3ZY1ȮҬcﲅ,Bq AqweZ\ /NfhߝG~bAbw*ܑ6$}ަ81¥UY@NA횒i_6} qB.dYRsӿQυ_?/q~hʻ +L{-cl{bSgڮeii~W*]=2골fe]^GZwZߕ ?+'U?Mfܼ3!Cv=(V>¹Ԇ/#eVTY XLkc5̱(o)Lòl  R]Eskq侥zs CӷQPɥԾh@ʈg;|-?Ry,RS6{+fhwl_ZlO-{ OcG'4+N OGV>¤ʻ*?&#ӿU L{) [1MɉOgt?쫿 ߨiOe!]RtjFy8' ZwZߕ ?+'U?MWQ4V>;|-?Qyg$B" b?q:zS2ģRHϑ3ɰ p33"WTgV@d;qM hr1ǧqQEskq侥zs Cی܏o~*+ג%3jwkavݱ ZwZߕ ?+BI'KĤKE '#ʻ+eii~W*O쫿 ߨ?/q~hX5'Ϙs0V> 4˦r5Yy + ƭe]^GZwZߕ 4ȣ2JYEu, w`ع n01Z ɯ-RQ Qy9N=FIa.2n6O&!YqǮxȠ ;|-?ZwZߕ (dkxnK\BgOF>Ak*?&3]2,ȉGjυ_tͪzpbfe]^GZwZߕ tauq-w0EpAhr]݌"Wr E21X=mT%^b/|O"+jVP}soXCo)ִ4cl UETEj ݬ28gx+yŽ֤sIp$c[х;+N OU$,la`. WQ5RM2j9& N*>0}J+N OY!¨K$kϨ_nD0;o۷v70]Ǫ]svAi m.$1/MJυ_ntqVuăj ʓjwјn p1~qv}}@?ZwZߕ tauq-w0EpAhr]݌"m"{iJyC]AzdppyQ,w1\/Xb(1 v6?Q@`cRu*"SS+Rju[kCv1$fou 3$Bג9\(MWLM&X@V+]&zͪ`1GxBOMNHB' ?+YZ٬X7s, ֞(i5+%XUVs+rsVlnnrol7- @ᷲ" 2DYdhUI<OEheii~W*FHÛǛ40f?,^QQOpp%Ha*6׀ZӿU9Y RL)p~L]6gQF]k@8<:mƙt5K5:䔓 Gf,eii~W*aem_O R8 b5K11V Xy805ܥpU`[!%6~Yh0z-vߘ`ҳl&ht-#vB0HY zZmnI_Sti Ha7n9oee +n} < Vuͤ3fG梾ƅrӿU2kG̍J|9Vm.omVt.)ze\߃cjBK{տ;|-?Ae]5֠8+8Q9* xQ2D" a.㧥Sl`]jf$PO\ 8Ni>&i*ۖ+Ѕ8pkbQ$Sșdp3X8;BkDF+ؓ<'ހ(yCPVB;mbtULmL ӿU)H!wKYB&1 ;O`=Sdx줹kDX X{>?],#u'K+eu( \u1đ 7g9$Wt BۺGl?!?ƀ2kEF)yKd|@qヹW-/jvXrgwH'umJbkXU\ΡI Rtkd?ƳIaOV)<`N8`xcڮA_O? /Jf2^%ٸG3E󍊸>(?\$Ua6,oِ&o 󎵧67kzZa}+n^a?CRA_OtwqD1TAiՏikd?ƣE)4A_O]]=0W~{@?g:&bޮA_Ow )KtݟrG~sV /Jh%?ƀ3!h+&0)*;Ҡp 3ϡWem0agD 5 A}?Sk+[w#e o_1?pq㑜P<,nMBq?fKq$hyYtUYROOOj kd?Ə2OQA_O? /JIISk?D[B0UϚ=:W? /Jg&bnX@pfr^]$4}  A}?Shw%ĺlhSR;c;mBbc  m* 7C,fxX!MޠrA_OLN/1]vc ̚Z{d]PIPv䌀F'O /Jh%?ƀ+jWmm:Eӟ5?='O[]$SUfއ pW? /J<ޕ iGQ,|~oVax"T2@6™%@$\smA_O? /JQ6S^]"$bO:,K==c ML{3 o…ǰ5, .u.Į'$/ O >{@?SfgI[́?4~5c Iu 5[o(-%*73[I&74EQ4xbQPm>+Z%4-*" ֟$/ O >{r mP<,po9kPKHw gڝ&0i2vndby4{ZL6$rw'8 Εol>a,Np0''ܚ4WvFcUy! qq+w+W.Q(3!/$/ O m2Gs:FHN>nj9lL;O7+JMf$w[Č3p \E)4D&IQ]]Vȝ:@BK?wެA_OqUqؔT3 w ?iI42*c ji1[O-]V9iPP 9hP}@ִ /Jh%?ƀ(Ow~g{mFcmt4*0$Fp3Ϸn0*;GΞՏڔN2zz{Th%?ƀ)=i"YAc* .+_4?b=K kcK VdA#@袊(((((((=fqr^Ov7ڢe \rMu_7mN3*R(lt`[f)ٰ=*Sm A]Mn3;/aivAq隆Uh4 .u]\G0MDFbS2>FK- 8&!?x1KiTV~\Y6cOlw\G{,-dc?*cG;N-Zff>wʆ $Q}O_km5wnm`(1q+. ;$/0n." 53:<Ƃ/o;a(?oXk13?4o~+īoab@=dG ~~?^K}_aY瀼2[ڎ)!~Xx85ϡOrcpG >\ٴcx9E?^K+;Ylf$soOg>¢G?hcGGneGC9F9Ud6&iU8c,~\Tczղf~ }RI,7,rrO*T}B1m"pY’y(_-0)S{`Tx"#Mo  vJяEᧉ&|Uۚ&˯²U!)ŷs; ®> & pjClmh# ג(F=CM?_Vaqd>rs1oVZGOoA( C}(L>Ze6@ ^knx$#`O^~^wM?_P4tY;eUn{u8'f{_V 0 B^fV#VaXyvymn1‘&d %UOcYI^xx݁J8Zbo*H%o!Yxė~\b)<dw1 M8BL~؛. ]ɟN$^]'D J=;C,I[ud_?kgϥ_y}/Q½ƿXծ/ V ?ޱgG˻W.3^Ovv1ڵy}/T3hNۦs,TOU?/ t ZYTHdBS@qK?#Y M.{S+% Ē'_c>j{!ogg4cYFw @A5mA^4m@9W q%&9ͳv Hh TrCJNTU6йK3?ן`(_?kgϥ_y}/Q€!lmZO01HV-ˈ.53l]ʑXSFn nI# ֮blC{U?^KBcȽHLǖhO'Uo(5k-rhbYQ5&o*ʒM32#l*HۂxSxb[\ CG@lWKf7Vhː"/lz=kʶ!#QݓcgP+o?ן`( V,;ӑ)յ}<0A@"C#3UecۦӂW>y}/P1X!Ȗٝ(P6QcqZe$-,Rc1px8Xm8k۲vu-(f)J@`W8`<7NZ?`(+B.m(0K=3=:U,mleiRG0N`*-lm Nw.GE[.I n8[{PRAu;3[An L]nA8c?^KJ]3Frr[Emc@% &$ JBz`YƗa%"3.w!*kmt9#8gI%K'20yzk]:7TT4<],ȶE "R˻;ʶ8e-bgШڡT @~c?ן`( :ђuMi2qkǿ"i,qt1q J€9֚h wІ,1wd`5Z"N2)H1(灎 #ՆӋ)V Yyj8㙤"5QAp:zP%0 d13GhWHsҭ쮴"vL10zu8 M`(V4[XAAje׏~EQlYtmb8,?)`(MiNq\]bͳx]As[e$-,Rc1px8Xm8k۲vm" (%q)=Nv1Ӡgln<\<10:mEXm.`efT&W`[iY&կo"j ^3Z?`(#JϸY#XA ?l"n!z'f'X#?vTaُ\@VKh,k.G>U:bu`LLC095£IY,wNGB>^KP5ןfm|"Co U ̂{ybᙦWE#,F]՛> jĘ?i+l9`;83Η$Z $h T0꼎845R*H O0 $& F8w8=,GyPBjeP0==Տ?^K}_lmXYp@p,jgA 0j_?^KO[ú ZIj !<J40:y)&MQo6-;Y<+?g\SAj((((((((oW!iu5݀V{Rygu_7wr;cQjaƩw[Imhet,>B 1<-Ig=bb% 2>r1 X;W 5Gύ?k7S5d5V\sRԤxfe"=v>߹Zjaƫ2o▶~w١ܦ邁 ޴\A:5p˜geCk@Z?-Z-?_U{SxE~ͨg阫O^ߗ ׮ږvX&2*qcn&rțo˘Ol, TXDX֮^JQ%LF|M<Am÷v4rMd)@nH8=2@6_k6_k5{< /k~O@:u(]Z緖jص-Ojɝ'?}/,&jMism3UL6tbɾ[hߏ|/JW]DV|Lp]ϑRmo>bAd/߱d/"5%D}.q=}m0L˧+2Mo `䁜sE.,<ђ{Gnu[(Yv}{ijSFDTm A=1/V-q{<]M-@uTbYgt36]Œ`^X%HEzѤA@?˰/ϸ^O;l QW Hsk_[ײ{/~_)-m5>4DՖv,n|_/IN7y? |}BT*][GRM2l~|G"($x&Hy!3mEٛ]<*e2Vʑ03V,Z Kh7V圜=qzu;{K"9#d䀛Vz㊫$R,<{Rcz@1jWSo&.u"v8$zWӝAU`1KĩcV%r A:o{/~_(k| 5Fk| 5V쿿'^ߗ WW3쳱#ϛێ|y}-DCG]EyY>%y3+J˾DҁCu:X!lg=kB٣gm.*0n MP Y[|h~`Y?Zbju5*r `0p ^teeXc6,ʦF`vK(7PչZ!*ť4eg>#[m5lwmq}hDQcFϑ!vꃃ{U]_Y؅ޜۓ23ImE LTh{ޟj]^HFOĥYH ڦmɘj-yXxFm$ JEϸe:Ic_3dCn!F*Ps-:[YIo6IvOpf+[[D7i"&`8$1卣ۻonSH0 ɨޙ1uƃ1#P0/NSI =ŴY)Duguƥ+G%U[$]=W׶͝"\1<})43mG;3- v%ܺm[|{P7WwA zMs2 \%ޡC$zJM/P`W4N9؇6әYdI yVJVwj*_&UѲ:_Jcp /4g>J̥p⤎zԶyhA`HyCr;ţ'gқj5<"|REci-B %0HLř|p>u/C@{٤:NH_) cOj)5YVdmsPuZ[ t;Eli׶vO#4g㑟?RWWqIݸw?ʤ[4ĺ_*m3UXwnHOA զE,ro\?ƫh5ɹ.?U㇓?€2.{>>V+Ũ6oa$,w,jR.},cu^\zL V?.4h,J cyvi,gEOʣq\$9_4og9 c>LXZ"-2BIC q5vfEw,XY|\=1wQos\\XG-b9`@B 28<+8f5sJ^M tw }qWkڊjԤή̵?k/#n̵?N%cR J>~zڄve>$9_4og9 c5ńrF#)O# +Y&gIҨ ۂFNv1w$j:ĬuުWnO;z+2_Geƀ6O_IٖǤ?z\Λ[5$_0\ozk/ [dfGYWq;jUH^feigV߽G2&Y-mM_~rwx}>弥YYD14m21|ǂ= Er1bڭ%$b#/c?_I\ֿ_cUӭgzuz]Z+=&o2^E,05(S ʤH F.909s$X.|4nW6ӟjeiݏ.d+X~ ?LԿ20'-s}u&*b1 .nJ!$@$.q%8ӥ BP!˫" |d8*i(!4#mJRN@d1߭$uL%?MEbfZKk/\XCo 1Vi3 *'j^}9'Y&@tnnnaSe+y-9܎IzfKE^K wo Sf[0. HHPb}0R:]rV/nHy0P*sߡ4j 3Z)Ğj`ήpPCN}:s!%k&+X[Td}Dϟ MN\?}]Jʚ;RLcc+HÏx-`h/!YXeÌ;(gSiX)";A5*ܱeOCs<5u\Z*jۣ/঺ 4٭<7H Bwy@`'\ƾkݼȮ1s)޾h߈<.r#jG~Ĥ<1 ڿss=RMf0;P6;(<Ӛ.,#1İ !LyFqV뇆OgoQ2Ӽ1t|UNӂq tmesoiq,l^p? Ə^+5b9{ H4"y Jl^OCckb}i.~-?oV|/?>y|_*M@e$o%pƞÜmN>Ү?-I1 <΃[k6l"ď9'R}o+P\ 9u77ڦ}UjTZI8f*wH;K_J񹺳WYmXt?`>o.>c>w_|iTUeknRE!yVnđJrS)O^n ۆ?l}jYd&D= GR}_꾫*).d2)Ó9szQYڬ0v?'moWj#KG?zqF^݄'OVYWG3K϶+4}_R;Ez[wnݻ?blθJSeł]6Gp8̋,`ela &rjO+49TgCT:y,~? h|A?~+4 AWN *e~:^U+|E~N??v'W;S}o+Qy|^CcojΗyvRiS}&X2qW7?TW 'L*Tɥ` rgsQif[`s*wᦍŮ{_>9؏|ԟjTjT}o+P>N??v'W;S}o+Qy|@tZ}@yusϘoZm̢av߿=ޭo+P]rI _>fiWLSY4cP<Gjڦ}UG\}.f+d{s9 YjQ)sJ&,XmSaRy:XڛS>_*M@"Eoeܜ GiVe&N7ek B_**s8͋0Ɨv7a0?/<Ӿ}qV(UPY$HR}o+P(xyE3}OQn "\>x>y|U$^>Dm\?ZmKU9 .ejT}o+P#}zP4卣Yt EǛӓYkadK]B=$#,>Iy|@:X<_ My|Gڦ}Ufj'{d?&shҭur0ol?vne q 'E^>o\TW e3iG'>\:k9"-έQ[7$d O9<zEs~yv0d!3.{01 AY^ZjP 2Fcy{?@shi"&љq˃[*6,Piƣ bp-wJ46N⫙ o>o+Pv4LC GXT&[:HaX=i̧HA*)jTHin`IĄ0NM&ԣTBV1 31=njڦ}UTԖ{aH`p1pE4Z0n#T.w9.FJ2H g=B&Y$Xciq jڦ}UU[-I&iVMY\,X3y-Lhw0FܢKmf^{-4!cu?קǭho+S&i!xe+jG>۹ -q%h涝Q GƲ]dK&G=Ra2w9r)ַvE*WhG=}AsjG|˄5U @.K7#V9V'$prϵYj`^Y?7?TW 8_h'NU'W;QY.u X]2NտM@&y-& K=zTJ*W7?7ᦍŮ{_>9؏|Q\io;?zTN??vTWS>_*2ȵAj&K&[I-FӜ05i )nemRAe\y3Av?@(((((((=fqr~^kyhm8f"E=묺Yo\tr'H܃_tcڪ hf)M˕,ƫ0 S$sa$֫qeHz##D(SosslJ<0 ,[<`i6v[FcCZxhZD$hmڣ=$Goi,vn,qǴ6vqp4hD2!o5|oE#t`qH-[4 * 1t]Rc9$Gp1GBxҫȑQ.깴Ջ=*y%o $ckhǵUڀ(8֦[}+0 Mo H}ullgXϝ_:|f3Xum V? ly(~~en=y9iV@{;hˍ)/$` }C5#8A~ 8RgXϝ_X6I,!%VMy''j`Bα;Guߥ Yl$RUHq;xA-aDnhb9ll[4$$rP|bh?د[=Fnqy9`(M70f3ڡX{kFS3)^qKBTA7&4H+KqsxuHm`(cb#̾p2w)9(G88Q-o{ɤbsHA?>k>߱UŐЌA?z]TZzlʶ;)˹0Ŷ}*y鴊 ;;28ʹj%#,]l(9 wZA~bƇKQ+f6DwlvQ{_ט լVڪ^pFfc/st{'tr>nϵY/" i< 1WfPUq1ѦZZF@ P.FW5 6~DYp @; K]# *{=PA~[卤N絁-'g쮧=IzWVHL?Q?Ш/Qc>v½ÐmVmfx!M-IxǕ"wtd9ټ.ɴ_}/n&9?'ɳ α;oXZ.FgFd(886Bqz.'Ž6%3;001onz `<<tOE;Sp=坬Lm3"WnXB`¡uԭ:m gX*ʦFp\|p道@Kf`-D<'4THQS?/Qc>v€%L?Q?Ш/Qc>v€+WI34c"I.uVX^-1Ҵ6r֖qk=_ڮgXϝ_ K9lxb1BRxQ 4X*>lw{b--6Zoo@찡m%A 88nկc>v¹+]D"Bv3G-B"Ȃ]EKA8q3սwsis ,0]* }Lm9#&Ե+h>od.LA想o)99]$X0a9L{"?[=qA K!O@̬ˌJCj|y?Xα;oTWMď-q4ܺk5Wp^D-˜s>S{VSyP-ۤ17%}cRFޤNkα;oY>$Ҽ".c-ER8'hն W18eXȻ]Sy}ŵIqoI!TX|Å<3Ks5qHL "n&uջvvҬIq4UͺF*Oң??m~(α;o@Dw91/UǞݑԂ #ҳ>ͣinqs ?o~(+1ං%[Mrq H89c[ڵl5+->2-Җ(O%6*Bujα;o\Ε.kquo Y.+.s#]7[͖$^$izqx65i,ZlI0ƌHqSG4j^\YDjp}3i?m~(q /2M4w˼е h)eN:u7X]X[AG}uߥ ke#)pg' +CvZRܓ")kxbSj ;y\voKkgb4;yKxZgIR+i# ydO82Y\C,2?sǴ@8kbabND]exx$ۀvz"/PKw NV Ue29۵:{GV/U 1Y(Z}=v;{tkxc༈Z31 }2CJ*_B# n`bFO҃m?/V~kmkA2Ȏ!C׊\q鴉Y/䱍ِa>sn KH$6Z7 ]6y B`,:Ÿ`>Z0fmZOXii$Pxm~G #ր=i qk-IhPDC?q_xM%cd2Kb>m1R}> cY64KjoK^wfh򎲵UYbKkAx1H':@hXfQ܁pk?m~(ȳ-;ugFMÌrw\N/{4l1A$2Km\ɮ:|PO jq*9UUH .+;6iQe#V ? TQEQEQEQEQEQEQEQEEuʹO o#.V<O]]z*t)n_e7m&3ݷGX5|M3;s_^35`fI7D@B) TgՈOA+/"A+/"'JIAf@qۜvˤ'|Lpc " n2( sI5>?ɵuM-° O}~-}hh"o2 |֌6pw1V?^ktXrMҖk7"+7Y_d$Q*ܟ(tk:«^E{z#ކ;ʀz?dVچ6=]N&LA,\)S"{_ɻGwAK/"TtQSWaZSF6Ф-X|qO^׳[^ǻfK 4Mtiq49HvOQޣڊJZ+8]"""s&S@ ^]2Ǻx$| u-j݊M٧%3=)^7@{-r12J7R,-K&}Wi(_jMW2b#DU1+p8 gWɻYomvW8"2"|ǯ^juOA_Xn)w|EQRi?}TtPzd7F]($_չsɻYuƤ-m,<78l [N@?+ P_ϫX ܞ<跑X6'zQ_\ȫpʻlF|nya1ԁMK.NY'sjt#AA0b!NF~RG=.qrV}hZl2B cgFο8ߓwAK/"X#]Q1rJ dSWa@ ΥtzpV/DM..5#b!Q99!\N@?+ SS-X'=5м.$1}|zmL>D쵟$q$CIG?j:W0XU_Er޳8#T?֫Ⱥ,jIyc',^3~ng-2tGFn RZYmnn$܌ #(*| 3V.avnrHxG㐾WJ134Z%{*$c -Q䞄 rrIq :;$Gbp.6쿂 {KNЛC=~4>.m%p^3\Jc$ fJ5k!Y,2/C`{l՛ly)|C`Tv$~5woCoAu <PtV_3>RLq( VI{x/cݍIJg:qCKEtVqRkœ1qO{DmX5!r9C}&sdnO3D63V[PI|ۨ^9 |S+pAG8I1tĬKsn$SE_-^\v W9̗!IYuC>x ?N~WF6Ф-X_+l$ץ-Ƥ.oBId},S]5Ě\M# FNm]_G,90 E/g:U0`[#X!01J];_ΪiD|]`ZC穖;MQ &AKO+{X>)4^9>βNQ[̣h}+1M-,u{Âp0F?JMU{ YKmG0y(! `qEy+fb6T\3.xٴ<ywMQ &AKO+^{w-䑣Rq?٠1"9m3DVJjV;5oMVo P'|?{@?hI4_ Z[fx|Kmb@of-UJrNG@$)icZ|VP>gl`K trq88=[$dcm' )9?SZݪG FqK?#@A!!Q&j_7#'?$)igk+")\ ̋>`6H=WwS w q{Q#=A랔 &AKO(Eh?ȣ@?h: &jng` ? Yh?Ȯ] $nxʩ oLc6& \!mIR ?$)i`e qq7٘ڤIsA=j2v[mrok9;4zKEIR g1.:B ҫ"2FX?MA.d:k4q$ XЂ8f8=>N<,V$ VfE'9_U}w<1j !xYZ5;Ef1Հ9T?Y^K &AKO(EjqVTn݂FtcLy.%,lS$ tF<M斟 ";wWԭTlC#V$)i\m:X&(zP[HRj~\mݰrI0=8K*]I4_ ZQEw3n]1]}"%2@ei h< pAnO֫]$:^[#bd jn==/$/-?MV$Iq*[O,f돴ĘWa#o_j׵Yo[a2|$.13@j~ %dRf,Aفh5xFC;V.;I8WTbg1$)ih\8#H6iJBvw13bJ:NwQG܌s@pxKXT\j2KMا 8_I4_ ZYk Mʊ"\L DzTZFYK[&)ASv"y+"d9#Ҁ6?$)iUA-ņE'ɏ*i.ěcR5.<$2rINQǏ;MQ &AKO*_̖ d67C$$Ok"D\A#<*0F1@zjVmqHys︡CfI @hI4_ ZTƢ#[Ikk[dm4'=Eny5q\I H\3לg<@ rov.$ "SOL:-Ʈl bo>fKR܄p I4_ ZG$/-?Wx$clDp~]1ʷtsn& 20=z-4fԭ@y)#A_hlm7za(ꑙīf$1'w7z088=@#Egi[]@ʹ,cq<4IldW-H03$4Nkvk,2[^:*y/AtD~|܌N3ҤEe_dHqs$72, GDڿtu# ]O%1-ǗY/ځ6JSv?g}Ug+G+I1 ) v*hQH!$AVjov< s^e1PGu2\xPYŬ'1R~g?g}V]vSk]s┶-?jZVYAXY gT=?he^CQ{ XW7^Y p%OrS9!=}ȍdR\Z5i?Wڵ?g(V'&7-D_v\7M/b~(=2Պ!1կ/]#mFݞg]ӪeیÌkx%u62i.OLq}H4̂-ہnzNOT- w1L]`*)#!JL1A'qSχa&RX`BͩzZM}R+b~(mLDM47.,0j&b(L2]>cgJ$jZ@~m8PI/qo|;=Ij1(%6󞘠 :MӃ;]OZQ:{ ڭlGdc+x fѠ&if qYz΍ۤWA-̨ʋ,1$bz{ur"cdxMږH V`ݞ%ַmj7*k!OJOzh!+@C6&m.B㊷s)mm_2 A'8CO>Ҁ5~!j f~wͼ>{gG4CO,Ėl G;px *.u/{bb {coie^CTrho$l*zp{U[Vo-u8 xsӑLZ&E S=70\/Egkhw7kk%J9M(U-0tOLrv- G ,ñpAEeꀆ {r$ieNT .A+3:Qk?+VK$ҹ`s O-?_]MAl]mV᱀|Ч m;[cSjWvAE ܤG i \H8UjnuحD˷]J{02_wIx m8 ͥFUo Tº[N; ٛ%'ZwlV?QFGmIGnI8ay{ GZ5gí]O{ Qˢ]rglP\jWXcB1.Ow׮XZ[#*vs].[Ans\Le7 03w 1ִmXnsHѼCG^GK*IV")Mv{,0*+$-#kdMi_^v.XMnf'^CQ{ Y&eIl+OC5v3mOU ˘./Pb#$i  A5Es\Hlc)ԖT;2\qD1OyճDd1BN#O\v\Vl.1,d n:9zUXU$3ڶꜿy(3Tb/$Htٗ=Ԁ01 C{s"8_ϸ I_^zWKEsw[{Y#|ms.b\g=jj D)\#y%*aw5Z۳:,6,rGcr jikg~n0>ϗzvzŒ'.g_!Xe YZvZ%,` @k@zvl>cySJxmn%C4ml+9dֶ4w'Wh5ij91RſvH?(qמ#齺KdHQ:R Xt-a' dCdYyl%O_AO쩂wp9#=[EI4yQ[D4ӈ\F~yd[~ѱ|ͽ7c~5-bjW#p3e0t)V&md~e+Ο1l }^&P3Ko: 4녍>R@ 0[]Vh$v31I34 _FH< +?Z{5XVW Bq@fp[0}'*yw¯^ޑ"2 ˥Lplhjpj|odh|\/:1@-m?|ojIkc7{Y3)=s[JB_IJD#R02i=0Te[uv,3? B$(ry]M߶{YQ˻pmǩǽVZAARh%#Ko9$:ck6IZOLh%A$3}iPbv HiBwCUր3pfW'J/\A;;;HLH#ɓc>rXz(V[cbc,sqң70k7h*鳆~60@\Un@_?Q>\Un@_?Q>\Un@_?Q>\Un@_?Q>\Un@#(m0_DtovGFy.o*DtovGFy.o*Dtov>HU/":7sUEa#ϼ7G":7sUEa#ϼ7G":7sUEa#ϼ7IlUoQ@_?Q>\Un@_?Q>\Un@_?Q>\Un@#(m7R#ϼ7[P":7sT#ϼ7[P":7sT#ϼ7[P F {GFy.o*h /DtoGFy.o*h /DtoGFy.o*h /Dto?sM( }?ѿ ( }?ѿ ( }?ѿ ( gno*}ݢ0ѿ }ݢ0ѿ }ݢ0[#)V MV%jFsZ((((((((kDgn2k/O ,n2nb+r=fqp.5-+@yG 'doL 8=@n@Fip:J n/Qu_4SE}|꧟Pyޙgj1ǥV460EY$%j@G%7W K%SPWʞa*N6cE7nT*9DgQu𦯋-fl5V@fAf= Ӯ[SZa;<7 "(RXH;qͯIb^2@gd ExlUc@ȲCi-psxI`AրL%ڙz銐Ib^2@gd eZi'.4|aT(#Zg$e|A@/?'Q ,_ OL؝D2#7JD|˅ryl5{Rh3hv`y{ߎ($/X?Ib^2W{}Zdw[*be9N;c85l&db`=/?'Q ,_ ?O(X G$/Y?·( ?Ib^2@cd ܢ0%zK+r/?'Q ,_ O(X G$/X?·( ?Ib^2@gd ܢ1`א[=nHq mV!]EnPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPW_7δPt;[ yI+ dN>oRzq^uʼH"o? VX!;0@#t'#U/i>Yly'$L* *ǁsU-fM:[E7{ X#M%qF$KR=Զ0䌐ܐ3zԷdʏq|HB`qT#kK??eݬ7_'2,|U/?U>05[Zj 4oA'\ctge\~.UoCl>Ύ=e+ qbK1Xp9z[Wgv,@-HV89}=._M v F܌H5[4*(gܪˎqu<לSKF^kn.$g#bK[A2+y Sa$ŒnGLcL-taVMǚsKn\i;4[M7ͤtUfhcU&m 2+IcI$d]I6mfjXĪn0ϵG%VrDZZ lAg @i7:ȴJ7YG)< dTFіI-D,O,|s5{dRJ5=2mB+'oL=, T2H2r7e=}heҭ>֗MrBHQ[n9<I2G-v迼Gcڙ $)ަt/' TxHZ&YlBXE9=h+N: ip/lF8%N=U*E-iI7TG8ڦ$3Zý {U!HAUG:IQV ۉkqpt 44`i?jO?*>ϨB?Slu mPc#zbWЍ mK@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@]Ǭ7?bB{)HYmHn$>zz*-}E.7[ȏߵ̤rOUD /[Z+m,I`|gEeHbX>p,A$sW0^A4ͣƗa3FU7*pp8ЋHvZ!fn3岮@*t] @Q-^}>?n憎XZiֶϨY$@$8Xӿ/}Quo>&I@0UنwR 9tںwofXElZJRR0~2 $Sh($6*BFS_i[S 8mȯ4~Qag;1wUWU+i(Q+Y %T7c8ײH 5uT f3B)ⷖSS #[$3بK-LE<7G֙>VzW :C|2zqn&y i\͏Fzi|z"#ߍCp ]$F.q<$sMZoen',oP9TV)fE'"?y<#'-o  0/ >Q0l$dO7VZ0o2nwT @F@Sx$ՁfoTY]s2TnS95q$ u(3.BJ |2@SMmj!;TU0Npx8@˥q@.36Ə36ƀ2uk&!%] ƺJkK_B[k&e$ ݿtQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEz*9 A.[yY#qV@>gG<`}xG+$H00M;KO]V%JahQ~pxz~t 3Gu#Z{XߝDCv 2r:s[I3sC}'Us6E4ieݼ2A'gx|f OO5??Ÿ2xȮWUYjڔleDyҲ7NN:sk_5??£SGu%kڈE dkvFm1J,jq$鬭<"?4BDzyv-SO&Go?b?hXN4B?ߢn ŀ 8995k^N.hmxVH-:=#1jŪIbv]ÌyHz՞i,.<`I^䢩sZ^ŪI9yC}'P-]v[eXˑw"Y^7F3lK9ᇭP@ N@\\z pjG/?b? 趍\3=(] dp3"[m:K[T=Bq(+(o6Jw#1jŪIp pZ[gyv(86Fx$zUdډb$BSvU|q]v-SO&G/?b? E4Dr:Iv꧑Yx:؛V (@Up;sVTO?]KV!ȑ>S=҈5hgңM>- _ғO??T ϳA> endobj 87 0 obj <> endobj 91 0 obj <> endobj 95 0 obj <> endobj 99 0 obj <> endobj 103 0 obj <> endobj 107 0 obj <> endobj 111 0 obj <> endobj 115 0 obj <> endobj 119 0 obj <> endobj 123 0 obj <> endobj 127 0 obj <>stream 0 0 0 -48 45 2 d1 45 0 0 50 0 -48 cm BI /IM true /W 45 /H 50 /BPC 1 /D[1 0] /F/CCF /DP<> ID &8  5[3a `A @@ EI endstream endobj 128 0 obj <> stream 56 0 0 0 0 0 d1 endstream endobj 129 0 obj <>stream 0 0 0 -48 60 2 d1 60 0 0 50 0 -48 cm BI /IM true /W 60 /H 50 /BPC 1 /D[1 0] /F/CCF /DP<> ID & 00# o{p9~|oxzH6ba`  EI endstream endobj 130 0 obj <> endobj 134 0 obj <> endobj 138 0 obj <> endobj 18 0 obj <> endobj 17 0 obj <>stream xzgxI+!B z'`cܛ.ɪ]&ɶa0`JB=$@R9IHr>{Lr|y>?ٳgw}CLDp8nv>+264q468Nx {UƉ̜U,|M^ .o()"#ּy+Vpޯ87,64.iF<:&&sxB^%2&R&8EݟmsBIے>H޽=5&{Nbx%v "p&tb1Bk^Kd‰xӈ'hb qYƧ&I:b/O; <Y FX)BUa'upmEc>P j+W4TP^ܱU|2)"u^i"%:k;\f6\$E;>/#wuaeikmu7Y_( Vh{[Tokj=.N$mlΘ.'OT@Pqሇ೻&Z[K:64".fW$ b{ɫY>Y8j^(ujm/s_u$sE>27 rl(bUY(MB&~B`l<drc })լėvب Ej F3nfbS ŏ4&gf-@Mr#Nk*43Gx2WySam?u"Wi:5@. ?>8s"@=*m&7JY ۙ>ɏfU,Ai Ӥ+`kdNf*%̬iG4\LuE1No7g`@ʘ;%B&h==tsx-oƽݭb;W`\;9;E6(nA-qg 9ix̸oIh|^EhM`. x|CP~<84 b{?8$\1D3(`~SQׯB gBgcCs+˻aOCS~mziIOP&$%EDSx:mP8i#@O@/)CB&_!GQYnPu\8}lW x{kdFfT''JӟqS&^i+/{t1-e={}ܵK}@y)&lQWAC.3pY-h- %:mcɒFGJ&FT6 +ByUd~^GqH3Qde薔 7^h&Fnsap3sL$NQ[>Z\_t;Ý޲xI|`FxJzEg 6kR+ t /N?x ?ࠠ1*ϵw~AYˁqT*9y3L6)XzPL0)OBoP"zIF") Tg1uPDgtjRbL Љe]ZL7lo/"gzp5YW9pMRPWҴ@ *CchR[ VpTL}R]n4Zܑb K'wrMxBu70NS3bCkTVLV7.rm $J"W(̨8M*H)ھM{dQ '۪Apao$(z'J`j*)5[9^k[SG -uvswt>5M{׽ed?&) 6w 5*c+ 0?qt%[& p(Jj%Nlj啕?Sc+{Jk N8uT*ޟ^Vkg5\qu ^wLI7bi%2}s@Irra*KFRA(0ZcS4g4L qSܔUtѫ;> ci4zۚδF.[g+wz] VGz&]f,WhO|} YML2xiTa^(@IFd<#טR z#5ؘ_:~j:><6&-H-Da𒹵MO| Ϳ8߉[e:Y ulm)(ҘRa::σl?}N .Glk]wk8?DAdWdWdXӋ@ޣw6s w}xpBep J+e p3`8/4i >k@> XAAo5;&P8 <pZWo )p0#? wNo5l ӹ:lV͸3%(HZEDדF7~45zrlp2Ol{.lW`F<%]Qr67,r?&Ⱛ*;gDEg[@9,\T!|-PE"u0ǤzzCAF8_7-Mx .V-ܙǸ\E8tG_2B=άRyAW : E%#ȤӍZ\d`A-8-M>0R"Z* 5y(< AFZVoqCqiXYdlO? d*iEge8J*9uX {дXu`QzXpcПShtB﯀k VC!prH}/Y+>{vj6%^m,i0]iXKȒd}8N0e2Μ4:b \CԖpUTOVξ$7ɋp.d唁2(,ģ}tB +bKŸ+1F:KXLEv/=#Rν\l^dy^Eej)[qY)Bmp^(C[2Mc;>s˙]Jm)`@L.'2[TA67@_dO$9%D77ЂʌTu$r}5'΋TN]F -<#1)D4=I!nE}p VBL;gl'k˃HvHAAEsƹB4~u#XEf)cmyy695m;>$0W^~~Vˏrc3eTd`嘜6eBL8=,R+4u~ݳ=.݌:Co; $ZmfصQ{e 5,Bvbe" Ykg޵r`t`%c@je z?/ښZ+DVDEqi 1FB N`5c〪jߡׅD+b Q"|JIEbl܃/} F`!+*V0:^ݕژsA aX/A,T^ V;_UѰ%e&SC@Uk#N$+呧hXDtTLE)nV*>?6_S ~ >rnOnLdnjWeGT{v慏8<_rՂ`,cF2Pq^!04=X2^I< I5rKóB>qj4qwسA;Mϐ D?9y<#QRz)"!.(6@^crP_WvE]N-K\ٱB茺̋ꬉZx#gZ=N pJ|-e͐NĔ>o?Lɮ =)*·ȼ'\`,kV t73]YiLQ(YF,.>`0dsϘM/ψ)&wnno RUoϑXjڇݠt'Et;]T%WU[*֒Y JsGhr?_n4l&%F&ն&41;يf$HKW>MG<0<D76 ]''>4vO6MkSDmkEKѽ4Mȶ={ ?`>@~%L7R)@BIA rD%MRa'w]V*M躤2̗qjmB[7?&:\sC=5Тͅ' 22*K2j3ŏP?֒C-,|e< woA7D6;{0T@×}+#*0*ip"ƜJ! B#G6k󻻿 4ۭʶRL>gKE|cQ)TלSI6aKւ,Ѱ@U\WT(>sGmMB5m.֪[7;vpm2et;tOLPf< 5]g%1;3<]3ih@-?Iw7epBH0XVTe8z4>&3[U5֪~5k%uaƕ(6SvM8I9U#@cI7-" #f*1 _~:49zUpyp>qh_DbR_]'.6A&JҰaR9Y,CǛ9MW@76{+3&}N\Ο,_C4/(J42pfP,Q.Wg;e}/':Ґ }Hֻh~.@n'_k VdȪ'R9Ʉ+FR;t`OcgQx#R!R*F/~Pz WpYdWe68o[B1+DH_/w"єb{Á'}߾k}F\ Ov=-}lib6CL s?B~;FÁ'SosET+NJ+ҬoU-אh9x0` &1_ lN6.z<|@'XC’7z}HWG_*)K&i|L幚FI_^VIBX0uNIא'3@hX/Cڡ#﷑VNƢ}<$"&b؃$29;bxdAG|=hc8#$CGA*;БBvnm:ȅyˁ3_LئCH-$ma߂j&[TdsuyQ+jGGR 27EZvF]<#*4<֯{M/YVlGAgF֞')W=UV,NmC`FJ okd`' 8Uz.>ȍ7)A-جHږz):lӞ͊&'x3+{ry 7)TEQyaa~;ۚRU}JWgJK+GYgn3u79#.#0M>s3H nC]OykO0qt*Z&S+~pc˗H23W~twf)S*˘SqD!>XpEQ l-|]<LKݢu'y--C:K5:aCx:OZ-/ޖ f>[^ggA]N}:Ϳr!RMajJ1|νƾ##}8v$-V^hi9x酾pqL5KwpGx85\ &Қ,`҈9P}'+*+?vn%^?̟5]h}}4R׻gx?x1B/q8vw !))^Z\WԔT'@T;m^v/%J endstream endobj 139 0 obj 8350 endobj 15 0 obj <> endobj 14 0 obj <>stream x}{PSWoDr**٬P0nVkh}7PA  DB^G$$P@ 񱮫u\jtj\=̞@mwfw{~9E͜AXQF/6*d)ާ `2=um@sQovbK7)Z45-S%K˓aaa$ji!b#[,S(C6jL,Hiij(%E}-F$gޓʤJ"[iWKIYj^\tHHEQj%V(ś%TuLivH>R=fjAm"EV*FES۩*HSr*Eͤ,e]nFfgwl ;ZO`|YozІ>v(s,f\ 6VMtA*#*2ka+87,v׺T"?WT莇TCTX?cq殃l&qR5{lVmсn9aUlKݸ\M,6\40PsdLdί.eRkVjT<,ws:yN!Z,l:]SQܴ?3LTw# 4ﶢj8jˏ85Ƶ!@3?}BoDfFt^W8[Q<QDM?Q/n===LScړ g ѤkBbhg#'\|#c#6Vɬ9e}@_"ft<_Vkys9M0A, ] LRŹ|NP6&fQw\{]p |U*#^*<x}]`k}n0. aKRfWvoMO_hl>V[`(xf]U|qjͶXuVW 0+^]G$)0' n:P/ItVU򚕍VzZ!ig~r[ss kC1sGj-QyRL6;GsQ-9^Ui}KoQ75kb idS.s*Rj;,`l?:: =.GY~uj%o8h1\#á+Agemeafe9q?AZbJ c j5ܼl06)F b㉐j3rB#Q'MY 0s-81-Mߙ>898 NUF~99* HrB5} gI:9>GSUf1jWb~vpzV G\_7uP[s(`rh8*'*fSɜΟ7W$sJ#9ZN/m7yfIr,&h>_Lh +Q1}ېVoww5> endobj 11 0 obj <>stream xe{PSw5ۭR%B@N@UT! _!@HH<~IP&T ViKm+tcltvo3ߙ`8OW5%ʗ^Ä̪%jډ.<,ۺ +_qp\cDsYWJ_ ]̦Qj*(Fr:7G*!dJE(SYg 2<[P*rsՅW#_mگIjD*WG"1 h= @ 螇>&na;]HVTL3)#Ñ,e $A[S;uS@CM>+\<ym#xb-n ;dp|(](hZvB!485|(_ГݔCuZ]78tZzab! "}'24[IC1 E耽VeWf{8IU JEHJl A?Ot;&[ .b .8+ h5b6yș-B?.|ffxwx=E{W" -1=0t1~#8xks4"̱EV " [/'J?L .]ZbB?MA(" $}.KS(OÄqk8a=-5'tN~RՑݓי#d< :Ě/~Eh 8x s#tYqrD@xzJQ['!2 (ʣɑ\*ںU3LYÐ3/i '92$Ʋc F}.w~Md+JuQ'95%.&+Ó>țv&F+B> endobj 8 0 obj <>stream xU{LSw hog`P -.לPi)}VF'&dC~Ra&,8m&%E˞d\8Vc;9Ð0afcS^k,^[)-e7"z)R_qakѵ ];O%xs]n_Z(IJII߳5++K<{ ebww+grj+=lCe&J̳mnFܒ$nKOߺ8r#1_ }C /xmU1RHOr $Du2< #I/Ǽ3S ¡Q*ҜXX[Kyܑ|[ !mzu.w evΏM+tg!SKh@3A^ ¤*rŋwt4hUc9|dpECM  f@.) :%)e,b+>3n@n`WIzzǧnnDNc`']W&'?* 8mwAxO=oj!S/XYf0/ِ"&=?sS?dž:llS{Fn.0&ÕB̎b^ rFE#ci51($HQ|F· ` ;9<C>+'oCR:/T8C?aN0_ڇMEϏcuZ?yhWRHiie=Vuv^_\61R9̆‹奆hg:dQW!7B۪3Oc4偃HNZ6Y ^hnB#.*KR|Y4r87X9 48b4 p;;1GXg/~$|vvLnZn}7QfȮc:*Ux&t @v_xo endstream endobj 142 0 obj 994 endobj 26 0 obj <> endobj 25 0 obj <>stream xUV \Lc7mJ3#Qz#!t'F 3!*9GR(CBye\l+TIjEBl6V+dqYLl"R)Uʤt 2gGGIiAuz!ψxgk}ڭa^07l+x4zgm͑AJwr@nF߾xBz FϷ^Y_|vmA7셽^`k"[N\cy 0Q6jOىv˴Y/7 Uo+5Ǚ8+[ޖ6\,Ka^%q5_)ck2c1`  Yka:z {bȢo+OW;|50jF +Pw=Zk{![Y#h#vi![kjϝ31aCQ\C'6dM/X)ѱ6IðC ,Zgo$/s|3hla; [ 8~yd=dW |:HMl!g4;54jb,Bؒũ0 ;B/O$%Ey+MF-]'=D S`t݂/f鸖̉9w_3 6˃_%\W 9^nfHpŢI41cN 7>+v;8`Gly "RG)6!ؑ:u[f=m[9R@:E7;mMXWxL w.Z,KR6w.`=?- Bb4Algm_an* \֕\-.F|{, >+F&CrE#'nTuH/+`'1f?KenmwZ w17&Ƥ,cJ/Zf媞1iO ~]C=c5,͆P=\cZMp2 p_3}m p aB>7RlGG,D/5?;_zDU>ONۗX\SscAf3bz }W+~v@FXGBW#pAXCgCH4`D;0.OZ¬ߣ{*;J]t](1w-b=D{I}t|t$![$[=i B;m"p6X.d}fg1GC~_4UiB 6C&cҸK%܇+B܇cN NZ),'W,5kQuCcM3)5> >-8iJMN}=;TA)OΞQլb :i41q yd#ײrp2oIK/6Ba\ڄvʍ1g @N(`ulpbcDG|[~ұ:>j> endobj 20 0 obj <>stream xuV{TWJlZt"ZEX@"wxrA0 ($("V]]UU8kSϮ]޸g's?|{͕L9*4l,mn:׹43&/a_l%s v=Ody m+s3U6lU\YJ)~DHT5YٻԙAFM)NS%S6&k;U~%-ؕSRE6ӴY$7*$ssJuتܼ 1c# 4ES: ^#f@ub6N!1D,I,%qDGA, 6k-D0A[ H$fX{G&q[,_-LFUuד_\fRB ;!^ iw ~5o!~< ߃kLu춝ڵ(hg 7.>@ЭrsSH&Aa{s*8[tphI·+:Qv,Ôuh#J\ C)g[89cP@ B$ņKиQm'9aq9Jzqb%~#Hx qpނ0LcFI%L%պʊ6]'%/Z0*h68rHCtgU͍DM&XbashMh3z/k[43hx$M0 10^`>=o_h;]lI|!Ce(T ?ag ա3U +v`;l'dl;OJ w.|uh@mXjmZh&\չ Gn}u.3U\j TsmV ]U~G蚬>AEWcM"Atd{g}OQ_aGKn)[M[#/-NΏެ9(qvSqD)Аaf։FWXM-n $%uZܺ5AXLMOL Aatk,ծKJW8 D 4)D<%E /O@VV]XS懕 :T%"&;8@b?^RԷ`i}K#?a -Gpy$t1^ ^~w}!q;oOd<xOVB 0oYaOJ:6&"|W/\EpV~ujVɏOY> endobj 34 0 obj <>stream xWyTWzUtzT% *;(b\D4J! .`07(~Q_cLL\0դ&9`99}޺UjT RL5?-96h!Jh"2E?<Ь}d[{3*U2 wuwϊ6:_qI Zg5^;NNN&$'fjbcbckuIIi9a]F KJΎJN*ca 7u5dl֋c9{N]*F*d ?rߋ:ث͜32/4cjkq2򢕓U ":k '[[ +Upхu8?SX\E'PYk^tG5^D3 "%Ϗ(XW-O3\Zԑ;| [9kH}ha hO^F zɫ;<~eS5ŃEM#ҧh'p_UQr< {`C =b\^`Z\",ungZrL/~TvR}dAlfџC{\}a=:2/\e0e 0_XzpB|]~,q YupN:b֟)tuN L[ʌG~j!'Еɗ}Cǯؓ-ǖO 3fõS΋OI6AU,4,nA0S |)\O* D3d`}*~ljR1bY`Ix\h<J%>vkK:B]7;.o$ mtu]0Gs9@52(00ߚ;:$Pw@_aq_y6ڌƾjtV]0DBgnnjyt/~MyW 0T!S+bzIS Uz뺫A$/޼+|JqCP}E낣u3Zgۈԓr&*EzQs`J:j\_ ̒qIWfzcvB:N@o=Y 8kxH ']; 7ql wjJˋj'$%#)x1V~7JSSb0O`8Kx[Qq`kFwkK5h7ȃ6->_ l Lo(=H=UZ76߽hs7Md*/]=1f^> 0SckR- ߡ Z_qOX8/q_cc)MW=)-PK8acĎ.C=}~ꋛ7R:%]:Wbd3V\U7GQD Cӄ]9R=kϯ J H,6wJB<]vm涢Xr\,om=pēUxlidpLǙpu][Qm)"Ũt+'O\=| \Su I#5w/&: -v Ʈ?u4("LZKE+I) A[+>eMj7M;=6^"ߓr[-;`d1fADnڻz D V~wMƥb1Ɗk[S?v*`0nhxoa.';d/΁aiVȌaG:jQCv } dQ3/C>mUri険'-1~4zz2BK ߈ jv"wFA7c]4Ze!OI%Jz#~⨠)5uْGA:srf*EZnE&Qݣ0+P&䝽 bǵS]턁| ݨX*zv0B,-[yr`ԑ{(3زJ(^x1U{ČwC`2P]ԀRd%}w:ie!!مysYTW~d29Aaٻ DX(Y0J5%/p@OE*'p^3|'* j&p?„G8Ay`fzcPLS7]:Q=޾qҗj`\w5 2A4+(EsuSJ>a]z]?; (S[_%[B`4WRZ`F|B3{OF:6t,=鍞_2; ZYl-v)x_ӽ@4p  }uMDi%Ò\ܬ 7c|@C!NNa%GKa bT`; `/ѡ}26c?j&jtB#3)ZSXS,Rx,Qp{U6?@~`Ј]mYW\ܿ]_01#G-}㧭~cQ >IRHRC6IK̏$ӄ/b޲#DhM0Vzog_W` 0/;-ۍ4 endstream endobj 145 0 obj 3518 endobj 32 0 obj <> endobj 31 0 obj <>stream xuUkXT[l7*@\xCTr)^HZ! GbB@)fœ+{hWSӱ)JF= عHsx>S7Ň*.@@䶲2RoJ*ZY]Um9h4:Z;|-~}GkGwI/t j#֦o筫RTz*)4#4ߕn!8#ɜ;HRqw1ђ2zAf6 i͕=:Z;?$_z573Jjdy腕uʆ- [jrIєglyek8234?oziݨ2< '5vU n=J|ʋڒxX{.fwDRY|0fzeaIQ a>81_wr|G]v>9z>lQ,8 SJ:VXP=K ^0հXٶI::, ~k8=O7u"X5,trtaQq$BA6S4f8GOo/酣=Cϑ7CLW5 nX%AϋX,s!!$-NR$** |a𗚵ڊWl )*F"HL]=Hy $koy+<bq>lc6d8sZչ]{MIl~˃'H3Z`j]}T|e۟O߭;zʎS8ɖyQ #$0fG$:8/Y> endobj 56 0 obj <>stream xe{TW'ĄkL%f}X+hb׮@A !@'LInwD%-ۇ>uT[nݞ;uv.EFσm7=[G|Bn2@8v qx͵ϝPl۾~%'eZԮp{S8օ-:]9{,h}N\ ETh0RYNPX>l'~Fj*ҁkzo<Ж:vlarG#m"z1bŌo;6=!īIKZL~eT 鸄NW>.]Ʈ|D?KFhV: 7zVp,e'ޛ?z ;5~k̴,5?2|S|gXlzZn8`Dzwv[$m&C$Pa:Hg?/(,Nb_W҉ҵwݾv' G8t|˩[ŕ2D& ĺ qs1:?Jf)~Jt(#PXE[hTנV:W1 dqv•}ZvM:1cN#;@_"h.Q8S"|1VtxDZWZtFx6ZTT<(o`c [KqLƤQjY62ÌKe'b>ϫWs깕@Ɉ~݉'Ճ/chG{ޱuUz.oQb#nV6,3S9j(%B+uX63lpU cXwMS(گh/ؚ~yΡ24SbiQ=fQj=Mno8!tCXVf'<%)nyH&ExpOǷ M8Qydf~>s~nG`&S7ٰ rńj V@u& !_%ÛoVC o/xHvwh4 hzMp_KBS5NIѩlS`3,5Bb݂o^RLl5$`u8yN' w*^hUU;_tH'ؓ ;I~!NLH4j{]3aDojVF]HKI1y\-Կ~d =6>[<=6F+:$z(NJ?<P"TU`/*rW*_LL3d+ &-6§6 ŹZA3T D0`<5 D*$lIaZI I{ M[(~fݘQ e)wC3#p=|a I>{_I48z9^6@|P4rFΒ#֦o qws9(K?홍\/YXxX~?2 ~w y#n%]9@؝@E}(sVAPp]ravq޾MtLt%Є*[;625 ŤoYz [cjEBB0e"i5e7g꺾c&Ə3 p[1vڄȹF' v:l8fKBXZmdg02ys (bcØwI6P 7q8z#7%p-L+f{g<:&ܽ6;TX@Wޒ%JM 9jf#[&?3 :y9h"Trp[dKx/hX endstream endobj 147 0 obj 2698 endobj 54 0 obj <> endobj 53 0 obj <>stream xZ XS׶>rrjcm:׹Ykc "jEDS!6A S2*jkZk۹Xwߦ{d`{ާg=p8cشӌIX}}a̋^(5?:qk_z49xq}p%+q $6N1}|{V|V%Jc}&R¢q1aIWGGKB}£"}Bc Y%IS|3oΜɉ>륱R?! ~cC_!".lL_$ɖɑ[פDmK X 8w™fM|{L-Ƽ)i#FLb@$^#f`b#&1xD"[\b2XML!kTbxFt"#^%/A?$hb,4J,&'Fi0O?ڋExKijx ċhBD M1FL|Q)g50/}=Orc C5?SO}2O}ĩg>pQiϮxx෎iguc_jpA5}q _yK&YR9'ߝ7'xM4nC&VO<҅nOzvҊNrt*9U2i{<|$ p0|r(40%ŕXYUeTtqBdie\jmupW|-p\/?j=ZSg6ʞTS)E)yiDP|S~ynMhWddFѫ*5 *ƊRΈ7 Q+i0\6^n%,_R0A1˞9 q/CNPj}jWx?&C ONv"Wlи\5S$_=4"Bb.$?v]sJUX!z \ijM~oc| '0BGߑ cߛp,iE\wEg ,*%cƣn$\!' v|6HljV|D3]u^2渼Γ f960?:h9O04~$]+|D[uk׸~"T3S)q=|si 'B$x1ТnS6%>q\$-ZRlg&@NQ4-JhF4R8LYul2\zRĥ"@; XH?}8҈||Sonn\tզyV]M2ʹYbG4\[=eӌ%4oK]oz;qX oŔp|;{Fz1!cjHTlt7hn6$"A8ʔ$$H$Y H* VZMm v{ki@v>2FD.JvT8J졫OC$ʘ؀0>Y])o5ɌYʼnujJ2/Ue^cJ!}|d+\|yvptnυ]a9wp'|ևBvO"8_60Uvc; .!4AJ[jSJ~I>h|[-M5"ptΝo(8GxPCd= șgzRT_VfVʞRgf'&A=*żLLCWs6qƁz7 g!Uq@Հ} {mi~\W&G:,)=YzA\;st5p:Ńs7|Mlqɒ#"\c>;F4Io.؞!ހaE;WL7()fb)( T 9W۩c!f?h,u_9i<'FKxh:LݠQi4"1:6Z[u;q;(p |tMT5VST'ĕ ~!m-޿7XFʵ8b8ՕcbDzP*\,Jy'趜8rœ=3A_k"=f S$lɛ"6o X%F:7Lˍoj[G&AN"ry y8,_*E[_<>1wem?SS2Qe'h * Ny21O.l*'3@Z&ďe"]3HO!Mte s+b Puu*uyEm*;@Xn\_LL[-MH8ʶ.V^"hyܹMBΗ&OWDHDƥ+6.YG]D,X ^8$r02-^8u,EX84`aTzA N8@vf ,gD߰0!S>S (8̋ o&%"0<4Dɕ4h_^Nl~}P#ɳkT J H]-+RIz*#y1̪*PEeδK&ʗ]߮XB˖4ŲӖZvQXtI²bk0CM_f@J>lyBm) s-@B%l_|Zؠ  ܝ7T݉88} 1hXtCh6(Jst "Ck۝Nso"Z2h#I7\y׻/1')M\ٴuXEeS?T)ieiuFw1B @ZSjzca)@=VjF3ҀBRlx M ^P+y͆9u?g\?NQPKps)e5 wׄ!7lB+1 ?sPHRwFtR^ЪQkI9 HK+4ZZp}X¡rQ!Ye%[/h8a>c6VXJQvv|Lp6Vܒޢ~9&6۔ R="kjWTKLC ɖ5CVLw/~siGC[ aWKMTcrxw{NDžDQ:z5v8Ŭe; |A[btg2'':B U (N$Agjnu@D֚-tr,lY+K],? <]KnM i1l@ f|%X3%XIpOQ?oT;qaqpGnrܠIPBx.ۍ{ 68ܿ24"݃'[(&p[Oܕ%{\17/~[s[s ]xr1ß^#Nӎ+7yW; LsBsUY@fXirqӲwp.2ot"Ef^_߁k[h {oV{Rޯl"SO^ݠrYJpw>g6=6Rh2 ijvv +u dcmگwW8 6IʷUd$@:^.Y8 -eL[z 7BacSNW:P?Ka4*hYVF(XLa5vIw"*(n&D[]U Z+WL EIUTѵWQ&hw!:`uYcT8ϛث\lE"nab(}ҽQrI(5ҼĀTe⟩=s1'TJ@$x;5筦ֆ1;'?㚥zp)7,7e(r9"T`!Zo)J]XUi2r9F1}0}p3*Ѐ9dˣa4zDI܅!,lPŹ3wü hqZÛ3ƹ\.e#!w@y܏c!eiE]@dTMbDWR!:^{ܱlqkc 1by(PqIq[GxzH91sDJ49"c-p )Cс+y*%KjKVѹϭ55@XWXg׈o^f6->A_I-r{@vגXSJ @Wّ#69|. j5*+Wf+%Ue",(2VchṠ7XӗqC~7Ond>$] s@^p0₲<ƈCGvޜZk/ [ޕ|TwuT(} ҶG7n?wڴ6,omx7rf!J+a2St6h! a}N|Ɔ ]a@> PVi1L"p}fYN LXUȗ2wx{$mY$I 9:1U oYRB[}]1ߨ-boc3G1taE P]];%]lQ.NTU57n=tGSSGGl~q?&&<`LJ9`zɤKB7Syhg}3Wzw1pyn]R|Av={ٽ|6t{Ўol,`tzp:gCz2mH_`PTlͰTưWh5ypU: HZ]]|C<q;HbJVړc{Wsxv5~&mqI/7rл`GhmdZPj'Injm6>MX^\RZWXY qN]ܿ#mb/ pusf)w#nט987pwaG/hՕ2!..)K!G/R+BPd0UG;?3 ge2ĩp^oSXzGjܻ8*@qx,:nGrŜ4A \5Y-̒,<?*`fSKfZ %^荅+p\~$1n^_$nLƵHգiEjy#[J&EЁ}~cZUP6'700X,r U\l.>f,&hs`3NJ9<0Ǿ;-t%p nk>g|GCߔ޿u뭉"Mʕ Sni7>bp 7>i?%zO{->* lkC3kK`,reпdJd A+\4jgO>o^Ͼ.ոǤӠQo65U6!8*>?Uֱ ƒђg/Q)V^Ak=&V'TNr-Mu 8&(+7j!(і,"?[ӳE(LSgs\Vb* JuT+˓ʑ ({h4"*A0֚d3W̜v2sYD"zQTq||hݻ ?|r z ܸ95L[PnʲzV}ոؤb9 TJiA65#"6իDXJkfXie[#4iEv0&8>6I{ 42!aR2 Z)Rj%(eE%XDzj))Ra&* Z5ЀtCv]4t tV61|:&9%6:U+>()SiMǁ Y\~*Ĕ[d<]6_JoΛZOep}# QaGgjڤFSE n9d/cw!%GZ5V8j{F> endobj 40 0 obj <>stream xXiXW~#&mҒFbukVmBֺ (Ⱦ%I  !HIdY][V[i;fFg:s^zNp\3..laQgQ,kЭ![Vf& ge{a1f1z7p}x3z3+B#ՓktoKd9)yÂbKgoO|&&g.% 3ĒĬ[茌C2IJo\BBbgZx\FbԌTD\| kW^'53>?w氾{q94~oɲE'$ݚ-'9h{nʎԐз 3⤙zzisb^yq2+>jEVʟZERۨ j;ZBS;TZK-BT*N~MHרj7P[(?*R\5DPORlES)j6Mfme=K`voqvp.%`+~O{͉ss~?^;sO>⚞~ybޅ-X`݂K-<?bK:;9U"&v]3RpKoSmt mJR2Ach3;3o* Q+7k*;h֦F2ջRurEB[Gn&clXRs%ո @l\ęp=梛ňpQpz*E܉lN˘vx%h+kWM%̎Ȇd@E^EOo~6`$ՊkNa(hkr56.*09^#ߠ񂻫a{ʟ ,yX9UޠyPe%c6﹟;i)lp*~E_58lJ"Hp.T%h@Ooo3ՃHZQ5jOO@x6E|s* y.tMv~g^cn#S(%S{]`Yi7i&fUMxi#.bl%|^jS.VJd6VɛYP$AVDV`k/dbbyxX}]AB"w ,fܱ)!e@$>YtK7NqڗQCN \R7!b>.TVHZ"$HAU$of9Uv̓:Zh )o}"Lx3q{@& 2{a4< .^u#{ح&4L))A{zz7JYuq&;3N̻}r!WaeY+- >h:p>&POsrT*Uu}i-QHf%m_#a)PT@^י[4ڴ@@UuՃy  ά#4)>FП#jTXdaKh NExm8֣uXq6!x^W>$:UoɋDEE!~Qu+HgO @?U sRRa4?~y'547K9wubG2N3,:H#VϡCe󣛑"/Sh*ActTW aq )š eQe7Y7yYgxvҭ֢6R9(hj : -Ǝwj\V('$fJY-lEiCfX7P'^L mPa*%s6Weu@{4T&KӲTԫG $:NﴍnGo0ṉoVX*LUnp'`gi?~׫TLW:Ǜ-"3wM tG:$+%zo&M=KNWxcZ?Ol÷$" y.P\OP7X t 0o NH3NY1.Zr(:)~_^(c{'cߏLO>GyַL.*zb=!Y,g|dFjƓ3pc( -FEt3dIfVUM򿱐ۈr^LQ^s7Naq ;&vm:8|)Cx 7:8D8 ZU 4~;z`\h!: JM꜒"]*M 鱚Xt:%}qP Az|a3k*kd1CQ\ciH#ق-C绎oƏ Ǐ_jҡ.qu%9zLY$tD$qjho@U @ݻv=ꛚ=W#YsP|Bh:ˤewEjVV^V[^ kp}M_SkMϯϬQVշ5*}5X0LZ?&۳G*ĺ{ETͩ$NO4)U$DR4scSg1ohg RfR.U"XW W.ln9n0WXD֊h{g j$61P h4`<}΂G޿gy0@@:E7&":>2掓=CC7󊃅h'6dQ 4 =^]z:?'T nxTmntU) 6|1gkui-ql8ln]4[7ꡞ׺rZAh")I<^.9T㹪hjꊐ zupFkj-~H7b `F y\hhd]c HuJD8 $vlcQC 6Ed egS` +@gjB3m+Poʩ RcBRr@%dSue@zZ'n>Ehze{ -[x13G,k\55-Ȑr)x!\¦ZŬGNg;8Фd=E9nɇGφJĽ̶*pwCm.5 dra^".t%:C9 9Wf7X̶rpSixUk|g2^P-(*2Vg ~~w;.1fi֍ZC{I5wiT!$BTv*/fC7z4xw566f{I%.&pqS>6uv@%%yÎƦȎoeG%25I4R5Mzwџ8jы}"e`Jn^{8Y:Pmrc+}j%⤰$UiPĩVx Z^x{8^br:lY>^&3/W*+rиFx7,i,y9Y9mm"O; /Ĵ!1x[P%bEL̿!n{JHv&>p'}S=F֟1HuO :qqZvR:MhVVOD+΅Q=jo<JϽA pO{45x~g [:>/ HV$23Ŋ +t3nt=~9s;̥_ endstream endobj 149 0 obj 5976 endobj 64 0 obj <> endobj 63 0 obj <>stream xeRmLSWN]k5305":ގ`?(X(@)L  G4SaƲ͏(0t?4rn<vky}K@:@nǻ:1T]ەk\[ĭxJu"B^Eȵ Hc說-ƃC7-33[f]NZ=c2h,UzLWh4ZMB=F:7fm3m{uJ~9,ɤiʪ WF[cO"r8H! 81+#LZ& _ (;.6Ⱥ=mAHFb rzI'g/$`8$)0e/J4aP HdQޞx<6[F>FBex2+piH$OF fr1t>PstR-8kw -3~`kHV^^bfrnݐ<`+)ژ)xl"z%ݼ6A偎-AujX#$wE+hǟnLP8~d!0)6ʍyPG q'eGkbɲӇ`,iulIbŽx6".+'Ex. 2q~ao;)#qIn,BLq~<0pDV2=hf`S$ڈh.~m)VmI`Cq J<ʼF:Gɤh-=^(olt[TfY,n}vn ǘBoLkt`edOG;?4?C&):=­;}&~8"t6zuvL1egfI|tus24D{ZM OcSժK^R4y–6GJ_+=uaxf\ Y+u|Cb]>A˟K柿kܪ9|NXwN"l<_T2)gXfZT"T݁d>rx|<-kJKSv0"74׻MV}c8\o 9a;_Q\":|ӑ!yERGcKu''/J^ND0 endstream endobj 150 0 obj 1241 endobj 80 0 obj <> endobj 79 0 obj <>stream xePSW_ 佩,E2)"+Z/dKUC)*?BbB#0.C8S4ZN!/-}x"w_|K?IHJJIޕQ y(TZVV(=R)ErQ[JRL#VΈWZF/K'ztZRANh2Q&ItLt\&UT?aeL'JJ, ,NbGlC, >ra,`˂`5MNgz`!y!B:Y4yGNG.B !şXNR@cW=hs`8]8h]{qnj]Ԟ$9V)&isn$ek2Kx8c2o{9 p/1jF;ϿuV(~ `əܬL5 ߽L_Os7Y_WބqQqa8Ӂ|)8ԏu;|Pm0uC#n Q ƾBbOHrKeFqAZ6]@4ʝnF5u'IxqgW"D AlG@!E@ ``"YIQi F}b(8,5UnFHG4kvF+mnʶ({L@WpRH]HWIS ~h_˜c~IƊ ?Iݛ{ (޵͜exNufoou؅vG[7N$y8<:G:'o?L:QFg,5.戋oijpPјҶ'F Att4jpauH`>ă3"gMo9ׂ4%?o0be & O4DvYC!wU`Էs A ,pN! УzMna%Ґ6]xG[JtEj-alW]7ys'Ve0QP''<#̞͹vAbpʃr(`c=/JT̓hATYi {prr=nB: xň]0=E[}0(r ]Q1 endstream endobj 151 0 obj 1813 endobj 72 0 obj <> endobj 71 0 obj <>stream xuViXa*# *QPܐmFY(ܢ OQ " "ѧF%Dc4m,gtuթsO{oI(SJ"؄FjcS5Qc|uژ,/`'C_bZ:PvhV(D!OW*FΝ]?؆߈r뺛XXM  Pp90{lX9fU 6pE<3_zqgÞ hyq5:%`KfB>f$+S0|`Wx2e܍ӪaʩSw<q7_7be$;+p!n^X@r=6A׆X,z||c 4 E&jf0T/JNs{+ҹB)GNgU?8obG` {rzҘ(ؓq4Kts# Ҹ!Wp"Dҷűxm] O{\mw&&hq%_.Mz|~R/stܳ+p':bsoR'6V'Fpltc T EO)SUeؔY&&-9ЎY{~e<#GzX{ B:q0HpGڛKS2[2ĩ'Xa! $LhXg-˞7 lr6$=mMAֆSnSSsg-YmBQhT/kmxiLLbzotOHËp0 }(l'~¦`sޗ X_-t^TqZw&<y,aqy~o'N^m5 2U 7dO\QVX:L L$%^[n3؁:^%|ɪ=gVäم(]_)_kî Glz;615t4jB:}X880gПj<%"Ov50hNG^byaHBZ3ʵ^Th.f zQu@='yjQpׇWl@9[-P&FӶY'>k/XWuvx#IK8w~S"SB+ @(:}Qk]ۊyo,sl^}'&|]>)X @ɚ,܄]4~JT/l{m#$;X z/09!{ӸG%f`ndGe8`h޻9Ksq\OzF/+zweEI-bZ2_)?AM9< Ҿ?.HhtҾ:0]Tw$^r^MÅ:XXesYU 僦^ ~N˰{g98acoBHCѕZg-W&mRlA6EE `H鬸[_:x3z1?ag]6uyr=@o8ybRZ<K;nz<ҍr @f7@r܃N-Xm, vq@ 7OwP׶`~ 8M-ש" cUm+[T^QRؖ ,e^chAh03a}4GQ,b endstream endobj 152 0 obj 2809 endobj 48 0 obj <> endobj 47 0 obj <>stream xmTiPTW~_<K .#1c)K7 J7iU[%2KiFF F ̀0 8qd":x^MftbxU{W9M9:P4M{DW9ڔF]%Ka-LwfpX"B'c`0=Ћ}ni RpVX`B˵bcq2J/Lp-9#Md`m{HFrHmnŝ2D~s], N4ŦȤ0/q:r@ ?&" |\< 8 gAx \OeJibCdkeb]EΛ*BYf? —^4VbLA ,{ 4AsR^u"AzՇO@v6Sc[5) li%?=_wAf^ }.V"9*>gfis򣺚'jd vU-۽ )vچ'42ΑRE9BtB,"1iJ 0 Q,3V<xwK2`JCDj9WQͶ2'ˉlDnMV g v wm绛d`+%NE|iR p1x `/afI{&/`"k\ϼ fJ.NOY萀.UM[%]gO!Yc>w_|w>Bq9!C[5ݭݬmkGƣ{YXP LPWYkic'G*Nx%n/\XN> endobj 50 0 obj <>stream xUU TW23ZuEȏ I!Pu>ѕ@mkAX K@hv_̙y?P(@zDlȊ!*uJ)iܖ's8VJ:~JI4S)@my`fVAvZJVn'^^^/rR2 KJQehZN$OQdȓUf$*]"M' :[xfcn<*)#GGRrI:(jMY)9 mZTnh^:)FcN͎lUT "B5T(eGESaC-RJj%@-©I2J@R5FM8j"5@,&蛠g7o-&[[\eΊn~mt;|41-ól{|`R8cirF:.>2]8S4B#&Oq٘F "ު4L`,&#:S+n~8`p:?-7)w/gZjFėv4(Eפ4*k:c B !9.xl cIbRbtT\?lrcQY?24 Nmlp2ڀ==R__<[E8/_AktRAbX~WL1OҩMQzݰ{A,2]ʮk05o< Ovw`yCs =Pw%\U!э9m o, L_,zzwB[KI—GX]5 !mAkFB(w.n,m=)a?8la hvE|JAOf~]am?ݸՏؾ[{}{e36 ؍} B;aG}xh fB&l+G |`c = bP׉kX]/e3;.-Bhc:}p5f43}-zxʏ~&魚k<wqeXgZ6lJ2rՉo!J;=|{ΰu+Gߝz>X~|IdR<%xZ˚q"^qlJxKgx 2Bd]QTW}'YNZ֝>EBO;%>GƇvgھ;fHbU(QN:1UXUUBzh6F/kߌG鄶냪htpͫO=7b17yAv>g88YCd !$XH cz+a@mduXZ] ڞ凌 E7Yfh;m!9LZ = r:朦bv轝FlJZ3qIO JB93:,?pOӑ1t~6*%\7?y ag5D-u"9#@yԹ E2}">㿝-@@) )P钢?h}_p3mWluLnynk8( ]nϛ._QIߒFUw؀.f\f #!t4L +DN4rއrT'zV!LD^/Z3o! BErglGs ,lҶw.fQ\]oMfXq ɩҜb޵l?}ۺ=b3ce J O^nlui9KXbSh8ҝ^/@CP;$jS8R8]GXG&t!x#qB!b?6&dQFSlƂxv!Wl3w xɐ?0(Hq'Wgrr'ލ椺C+ IO^*ڰ75.9VYyM:/j:ey-[ Ճ/BPD*%Ca[8 )vyq F1 AR[bRVIOl82y ET endstream endobj 154 0 obj 2513 endobj 49 0 obj <> endobj 38 0 obj <>/FontBBox[0 0 138 192]/FontMatrix[1 0 0 1 0 0]/FirstChar 0/LastChar 8/Widths[ 0 0 0 0 51 50 0 56 0] >> endobj 27 0 obj <> endobj 73 0 obj <> endobj 65 0 obj <> endobj 22 0 obj <> endobj 52 0 obj <> endobj 19 0 obj <> endobj 16 0 obj <> endobj 13 0 obj <> endobj 58 0 obj <> endobj 10 0 obj <> endobj 55 0 obj <> endobj 42 0 obj <> endobj 81 0 obj <> endobj 36 0 obj <> endobj 33 0 obj <> endobj 37 0 obj <> endobj 2 0 obj <>endobj xref 0 155 0000000000 65535 f 0000055368 00000 n 0000209775 00000 n 0000055180 00000 n 0000055416 00000 n 0000052377 00000 n 0000000015 00000 n 0000000727 00000 n 0000146318 00000 n 0000146074 00000 n 0000202536 00000 n 0000144047 00000 n 0000143758 00000 n 0000200161 00000 n 0000140859 00000 n 0000140590 00000 n 0000198969 00000 n 0000132131 00000 n 0000131559 00000 n 0000197778 00000 n 0000151120 00000 n 0000150809 00000 n 0000195570 00000 n 0000055485 00000 n 0000055515 00000 n 0000147736 00000 n 0000147419 00000 n 0000192018 00000 n 0000052545 00000 n 0000000746 00000 n 0000002944 00000 n 0000158117 00000 n 0000157817 00000 n 0000208498 00000 n 0000154190 00000 n 0000153836 00000 n 0000207301 00000 n 0000209695 00000 n 0000191743 00000 n 0000055591 00000 n 0000172287 00000 n 0000171852 00000 n 0000204915 00000 n 0000056182 00000 n 0000052697 00000 n 0000002965 00000 n 0000007991 00000 n 0000185679 00000 n 0000185383 00000 n 0000190641 00000 n 0000188019 00000 n 0000187651 00000 n 0000196751 00000 n 0000164281 00000 n 0000163704 00000 n 0000203729 00000 n 0000160897 00000 n 0000160637 00000 n 0000201349 00000 n 0000056267 00000 n 0000052849 00000 n 0000008012 00000 n 0000011274 00000 n 0000178614 00000 n 0000178372 00000 n 0000194379 00000 n 0000056365 00000 n 0000056570 00000 n 0000053001 00000 n 0000011295 00000 n 0000014674 00000 n 0000182465 00000 n 0000182139 00000 n 0000193182 00000 n 0000056666 00000 n 0000056916 00000 n 0000053153 00000 n 0000014695 00000 n 0000016144 00000 n 0000180217 00000 n 0000179964 00000 n 0000206106 00000 n 0000057012 00000 n 0000057257 00000 n 0000057321 00000 n 0000057385 00000 n 0000129857 00000 n 0000129889 00000 n 0000053329 00000 n 0000016165 00000 n 0000019863 00000 n 0000129963 00000 n 0000053481 00000 n 0000019884 00000 n 0000022358 00000 n 0000130070 00000 n 0000053633 00000 n 0000022379 00000 n 0000025124 00000 n 0000130146 00000 n 0000053785 00000 n 0000025145 00000 n 0000028122 00000 n 0000130211 00000 n 0000053940 00000 n 0000028144 00000 n 0000031662 00000 n 0000130310 00000 n 0000054095 00000 n 0000031684 00000 n 0000035050 00000 n 0000130398 00000 n 0000054250 00000 n 0000035072 00000 n 0000037760 00000 n 0000130497 00000 n 0000054405 00000 n 0000037782 00000 n 0000040881 00000 n 0000130574 00000 n 0000054560 00000 n 0000040903 00000 n 0000044011 00000 n 0000130651 00000 n 0000054715 00000 n 0000044033 00000 n 0000047124 00000 n 0000130739 00000 n 0000130972 00000 n 0000131037 00000 n 0000131297 00000 n 0000054870 00000 n 0000047146 00000 n 0000050480 00000 n 0000131394 00000 n 0000055025 00000 n 0000050502 00000 n 0000052355 00000 n 0000131493 00000 n 0000140568 00000 n 0000143736 00000 n 0000146052 00000 n 0000147398 00000 n 0000150787 00000 n 0000153814 00000 n 0000157795 00000 n 0000160615 00000 n 0000163682 00000 n 0000171830 00000 n 0000178350 00000 n 0000179942 00000 n 0000182117 00000 n 0000185361 00000 n 0000187629 00000 n 0000190619 00000 n trailer << /Size 155 /Root 1 0 R /Info 2 0 R >> startxref 209825 %%EOF pegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/without.obj0000755000175000017500000002101711757531137021325 0ustar ryngerynge%TGIF 4.1.41 state(0,37,100.000,0,0,0,16,1,9,1,1,0,0,0,0,1,1,'Helvetica',0,69120,0,0,1,5,0,0,1,0,0,16,0,0,1,1,1,1,1088,1408,1,0,2880,0). % % @(#)$Header: /nfs/asd2/gmehta/GRIPHYN/CVS/cvsroot/vds/src/tools/k.2/doc/without.obj,v 1.1 2004/02/11 22:00:07 griphyn Exp $ % %W% % unit("1 pixel/pixel"). color_info(11,65535,0,[ "magenta", 65535, 0, 65535, 65535, 0, 65535, 1, "red", 65535, 0, 0, 65535, 0, 0, 1, "green", 0, 65535, 0, 0, 65535, 0, 1, "blue", 0, 0, 65535, 0, 0, 65535, 1, "yellow", 65535, 65535, 0, 65535, 65535, 0, 1, "pink", 65535, 49931, 50971, 65535, 49344, 52171, 1, "cyan", 0, 65535, 65535, 0, 65535, 65535, 1, "CadetBlue", 22885, 40569, 40569, 24415, 40606, 41120, 1, "white", 65535, 65535, 65535, 65535, 65535, 65535, 1, "black", 0, 0, 0, 0, 0, 0, 1, "DarkSlateGray", 10402, 19764, 18724, 12079, 20303, 20303, 1 ]). script_frac("0.6"). fg_bg_colors('black','white'). dont_reencode("FFDingbests:ZapfDingbats"). page(1,"",1,''). box('black','',50,50,160,200,0,1,1,0,0,0,0,0,0,'1',0,[ ]). text('black',65,31,1,0,1,82,17,17,14,3,0,0,0,0,2,82,17,0,0,"",0,0,0,0,45,'',[ minilines(82,17,0,0,0,0,0,[ mini_line(82,14,3,0,0,0,[ str_block(0,82,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,82,14,3,0,0,0,0,0,0,0, "submit host")]) ]) ])]). box('black','',275,50,425,200,0,1,1,21,0,0,0,0,0,'1',0,[ ]). text('black',300,31,1,0,1,103,17,31,14,3,0,0,0,0,2,103,17,0,0,"",0,0,0,0,45,'',[ minilines(103,17,0,0,0,0,0,[ mini_line(103,14,3,0,0,0,[ str_block(0,103,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,103,14,3,0,0,0,0,0,0,0, "rem. scheduler")]) ]) ])]). oval('black','',490,120,500,130,1,1,1,79,0,0,0,0,0,'1',0,[ ]). box('black','',495,50,570,200,0,1,1,92,0,0,0,0,0,'1',0,[ ]). text('black',495,31,1,0,1,77,17,94,14,3,0,0,0,0,2,77,17,0,0,"",0,0,0,0,45,'',[ minilines(77,17,0,0,0,0,0,[ mini_line(77,14,3,0,0,0,[ str_block(0,77,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,77,14,3,0,-1,0,0,0,0,0, "application")]) ]) ])]). oval('black','',490,70,500,80,1,1,1,96,0,0,0,0,0,'1',0,[ ]). oval('black','',490,170,500,180,1,1,1,97,0,0,0,0,0,'1',0,[ ]). oval('black','',420,120,430,130,1,1,1,99,0,0,0,0,0,'1',0,[ ]). oval('black','',420,70,430,80,1,1,1,100,0,0,0,0,0,'1',0,[ ]). oval('black','',420,170,430,180,1,1,1,101,0,0,0,0,0,'1',0,[ ]). poly('black','',2,[ 425,75,460,75],1,1,1,102,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 495,125,460,125],1,1,1,103,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 495,175,460,175],1,1,1,104,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 455,75,495,75],0,1,1,105,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 425,125,465,125],0,1,1,106,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 425,175,465,175],0,1,1,107,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). text('black',505,68,1,0,1,43,15,115,12,3,0,0,0,0,2,43,15,0,0,"",0,0,0,0,80,'',[ minilines(43,15,0,0,0,0,0,[ mini_line(43,12,3,0,0,0,[ str_block(0,43,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,43,12,3,0,-1,0,0,0,0,0, "[0] stdin")]) ]) ])]). text('black',505,118,1,0,1,50,15,117,12,3,0,0,0,0,2,50,15,0,0,"",0,0,0,0,130,'',[ minilines(50,15,0,0,0,0,0,[ mini_line(50,12,3,0,0,0,[ str_block(0,50,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,50,12,3,0,0,0,0,0,0,0, "[1] stdout")]) ]) ])]). text('black',505,168,1,0,1,48,15,119,12,3,0,0,0,0,2,48,15,0,0,"",0,0,0,0,180,'',[ minilines(48,15,0,0,0,0,0,[ mini_line(48,12,3,0,0,0,[ str_block(0,48,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,48,12,3,0,0,0,0,0,0,0, "[2] stderr")]) ]) ])]). group([ box('black','',300,91,375,158,0,1,0,129,0,0,0,0,0,'1',0,[ ]), oval('black','',300,75,375,108,0,1,1,130,0,0,0,0,0,'1',0,[ ]), arc('black','',0,1,1,0,300,142,337,158,300,158,375,158,0,74,32,11520,11520,131,0,0,8,3,0,0,0,'1','8','3',0,[ ]), poly('black','',2,[ 300,91,300,158],0,1,1,132,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), poly('black','',2,[ 375,91,375,158],0,1,1,133,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), box('black','',302,110,373,173,0,1,0,134,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',337,109,1,1,1,90,15,135,12,3,0,0,0,0,2,90,15,0,0,"",0,0,0,0,121,'',[ minilines(90,15,0,0,1,0,0,[ mini_line(90,12,3,0,0,0,[ str_block(0,90,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,90,12,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "GASS", 1, 0, 0, text('black',337,126,2,1,1,35,30,136,12,3,0,0,0,0,2,35,30,0,0,"",0,0,0,0,138,'',[ minilines(35,30,0,0,1,0,0,[ mini_line(34,12,3,0,0,0,[ str_block(0,34,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,34,12,3,0,-1,0,0,0,0,0, "GASS")]) ]), mini_line(35,12,3,0,0,0,[ str_block(0,35,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,35,12,3,0,-1,0,0,0,0,0, "cache")]) ]) ])])) ]) ], 137,0,0,[ ]). poly('black','',2,[ 425,75,365,135],0,1,1,165,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 425,125,365,135,425,175],0,1,1,166,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). group([ box('black','',65,93,140,156,0,1,0,238,0,0,0,0,0,'1',0,[ ]), oval('black','',65,80,140,110,0,1,1,239,0,0,0,0,0,'1',0,[ ]), arc('black','',0,1,1,0,65,141,102,156,65,156,140,156,0,74,30,11520,11520,240,0,0,8,3,0,0,0,'1','8','3',0,[ ]), poly('black','',2,[ 65,93,65,156],0,1,1,241,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), poly('black','',2,[ 140,93,140,156],0,1,1,242,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), box('black','',67,114,138,166,0,1,0,243,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',102,112,1,1,1,90,15,244,12,3,0,0,0,0,2,90,15,0,0,"",0,0,0,0,124,'',[ minilines(90,15,0,0,1,0,0,[ mini_line(90,12,3,0,0,0,[ str_block(0,90,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,90,12,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "local", 1, 0, 0, text('black',102,125,2,1,1,42,30,245,12,3,0,0,0,0,2,42,30,0,0,"",0,0,0,0,137,'',[ minilines(42,30,0,0,1,0,0,[ mini_line(27,12,3,0,0,0,[ str_block(0,27,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,27,12,3,0,-1,0,0,0,0,0, "local")]) ]), mini_line(42,12,3,0,0,0,[ str_block(0,42,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,42,12,3,0,-1,0,0,0,0,0, "capture")]) ]) ])])) ]) ], 246,0,0,[ ]). text('black',360,228,3,0,1,66,45,320,12,3,0,0,0,0,2,66,45,0,0,"",0,0,0,0,240,'',[ minilines(66,45,0,0,0,0,0,[ mini_line(66,12,3,0,0,0,[ str_block(0,66,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,66,12,3,0,-1,0,0,0,0,0, "[0] /dev/null")]) ]), mini_line(66,12,3,0,0,0,[ str_block(0,66,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,66,12,3,0,-1,0,0,0,0,0, "[1] /dev/null")]) ]), mini_line(66,12,3,0,0,0,[ str_block(0,66,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,66,12,3,0,-1,0,0,0,0,0, "[2] /dev/null")]) ]) ])]). text('black',360,211,1,0,1,55,17,321,14,3,0,0,0,0,2,55,17,0,0,"",0,0,0,0,225,'',[ minilines(55,17,0,0,0,0,0,[ mini_line(55,14,3,0,0,0,[ str_block(0,55,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,55,14,3,0,-1,0,0,0,0,0, "defaults")]) ]) ])]). polygon('black','',8,[ 220,112,220,137,257,137,257,150,300,125,257,100,257,112,220,112],2,1,1,0,340,0,0,0,0,0,'1',0, "00",[ ]). box('black','',197,102,287,148,0,1,0,341,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',242,101,1,1,1,90,15,343,12,3,0,0,0,0,2,90,15,0,0,"",0,0,0,0,113,'',[ minilines(90,15,0,0,1,0,0,[ mini_line(90,12,3,0,0,0,[ str_block(0,90,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,90,12,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "", 1, 0, 0, text('black',242,117,1,1,1,0,15,342,12,3,0,0,0,0,2,0,15,0,0,"",0,0,0,0,129,'',[ minilines(0,15,0,0,1,0,0,[ mini_line(0,12,3,0,0,0,[ str_block(0,0,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,0,12,3,0,0,0,0,0,0,0, "")]) ]) ])])) ]). polygon('black','',8,[ 220,112,220,137,183,137,183,150,140,125,183,100,183,112,220,112],2,1,1,0,344,0,0,0,0,0,'1',0, "00",[ ]). text('black',220,116,1,1,1,95,17,345,14,3,2,0,0,0,2,95,17,0,0,"",0,0,0,0,130,'',[ minilines(95,17,0,0,1,0,0,[ mini_line(95,14,3,0,0,0,[ str_block(0,95,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,95,14,3,0,-1,0,0,0,0,0, "Globus GASS")]) ]) ])]). pegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/statetable.pdf0000755000175000017500000003147011757531137021755 0ustar ryngerynge%PDF-1.3 % 14 0 obj << /Linearized 1 /O 16 /H [ 1101 252 ] /L 13112 /E 9889 /N 1 /T 12714 >> endobj xref 14 29 0000000016 00000 n 0000000927 00000 n 0000001353 00000 n 0000001575 00000 n 0000001722 00000 n 0000001761 00000 n 0000001782 00000 n 0000002212 00000 n 0000002233 00000 n 0000002764 00000 n 0000002785 00000 n 0000003302 00000 n 0000003776 00000 n 0000003975 00000 n 0000004172 00000 n 0000004630 00000 n 0000004651 00000 n 0000005238 00000 n 0000005259 00000 n 0000005759 00000 n 0000005780 00000 n 0000006224 00000 n 0000006245 00000 n 0000006659 00000 n 0000006680 00000 n 0000007057 00000 n 0000009734 00000 n 0000001101 00000 n 0000001332 00000 n trailer << /Size 43 /Info 12 0 R /Root 15 0 R /Prev 12704 /ID[<1ece8f87f8c02cddc1021be4a487e3de><1eb26cf3b2cbc7fd5c2a1381356a61b6>] >> startxref 0 %%EOF 15 0 obj << /Type /Catalog /Pages 11 0 R /Metadata 13 0 R /OpenAction [ 16 0 R /XYZ null null null ] /PageMode /UseNone /PageLabels 10 0 R /Outlines 1 0 R >> endobj 41 0 obj << /S 36 /T 102 /O 144 /L 160 /Filter /FlateDecode /Length 42 0 R >> stream Hb``f``eEnlJ $302LcT` A( R ļ'@u00)iFX L10?@V 3bN jOX+ endstream endobj 42 0 obj 126 endobj 16 0 obj << /Type /Page /Parent 11 0 R /Resources 17 0 R /Contents [ 20 0 R 22 0 R 24 0 R 30 0 R 32 0 R 34 0 R 36 0 R 38 0 R ] /Rotate 90 /Thumb 4 0 R /MediaBox [ 0 0 612 792 ] /CropBox [ 0 0 612 792 ] >> endobj 17 0 obj << /ProcSet [ /PDF /Text ] /Font << /TT2 25 0 R /TT4 28 0 R >> /ExtGState << /GS1 40 0 R >> /ColorSpace << /Cs6 18 0 R >> >> endobj 18 0 obj [ /ICCBased 39 0 R ] endobj 19 0 obj 352 endobj 20 0 obj << /Filter /FlateDecode /Length 19 0 R >> stream HTKR0 Z¢$۲7AYp}- m:FIFzzzd0( L$2 /}1 +FGż )`PaY{ٚ=<:H$aqR / #w0Z='mjb<ߪt^T1?%~RZ"OXաYQ$/KljG.^*eũ6h6LHM]'V )/aRS]RSOZ9%?65QEC]_-I=OTKnYGwTFGa}Kg͹FXVpS :)ɟG]-AWcnv.?}-ȏ  endstream endobj 21 0 obj 453 endobj 22 0 obj << /Filter /FlateDecode /Length 21 0 R >> stream HT=O1 +n` $تTt;`R ;vr@ RSc7~ٹjt/fW]h N(G #+!ФV3@vysI ^zqGGxvz_VPOFIT ~J帥A,mrXrlCG&GdڸM "٧He& ]b  ݖ (V b|"٠k?b 3-EH>'Fg=Ix5[yq3X?! b?SސТx -U6 S{y+xZ2c2VK3^Ǽ6TJB.%vkqyP!U|Znx"׍6v{.2Aم?p8ZŜ h,10_, 9o?!dZ{~&z5T endstream endobj 23 0 obj 439 endobj 24 0 obj << /Filter /FlateDecode /Length 23 0 R >> stream HTN31 =|a8IrJ @Bgm?}7zۓNV} D“BJ&swXȴObEmg␷<<_U0C萺Gf+.ơ$d#9X7g&-!GIpbeGWK9=GT WD,C;` HEuh;%Gj0(?{5A}Hf6_S \!Z 1JB$ oY4`*PomԝiџT&7aֆ0R{\Z{H~6sM>RslSMןU"٫]͊aW/f*-K,9 endstream endobj 25 0 obj << /Type /Font /Subtype /TrueType /FirstChar 36 /LastChar 148 /Widths [ 696 0 0 306 0 0 0 0 0 0 0 0 0 696 696 696 696 0 0 0 0 0 0 0 0 0 0 0 0 774 762 0 830 683 683 0 0 372 0 0 637 995 837 850 0 850 770 720 682 812 774 1103 0 0 0 0 0 0 0 500 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 712 0 712 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 657 657 ] /Encoding /WinAnsiEncoding /BaseFont /BitstreamVeraSans-Bold /FontDescriptor 26 0 R >> endobj 26 0 obj << /Type /FontDescriptor /Ascent 928 /CapHeight 734 /Descent -235 /Flags 32 /FontBBox [ -199 -236 1417 928 ] /FontName /BitstreamVeraSans-Bold /ItalicAngle 0 /StemV 188 >> endobj 27 0 obj << /Type /FontDescriptor /Ascent 928 /CapHeight 0 /Descent -235 /Flags 32 /FontBBox [ -183 -236 1287 928 ] /FontName /BitstreamVeraSans-Roman /ItalicAngle 0 /StemV 98 >> endobj 28 0 obj << /Type /Font /Subtype /TrueType /FirstChar 32 /LastChar 118 /Widths [ 318 0 0 0 636 0 0 0 390 390 0 0 0 361 0 0 0 636 636 636 636 0 0 0 0 0 337 0 0 0 0 0 0 684 686 0 770 632 575 0 0 295 0 0 557 863 748 0 603 787 695 635 0 0 684 989 0 0 0 0 0 0 0 500 0 613 635 0 635 615 352 635 634 278 0 579 278 974 634 612 635 635 411 521 392 634 592 ] /Encoding /WinAnsiEncoding /BaseFont /BitstreamVeraSans-Roman /FontDescriptor 27 0 R >> endobj 29 0 obj 509 endobj 30 0 obj << /Filter /FlateDecode /Length 29 0 R >> stream HTn0+xT@,\>䛋Ԁhl=$A JֱRYɏ(Jj8r˝Ȳσ8LR:òEi68hƉm l4 ch&=_y,""=#,NtjNYM)&ǚ2y=G<5={+@В4eP`A-Rϰ;%:G28'"ujűU/]PШ>԰7:5*I:W ,0qz﷍mPDMY&f8,Qܵ:4މƓ4~i$ k]FK03\C&ʊm.d*n{̫eXlfɱ+Cvl[ߗ*{JԮCYي/O/!$_F0d_.r''6zpZmЏ{m\H@n{t* =N\@6ӓa= endstream endobj 31 0 obj 422 endobj 32 0 obj << /Filter /FlateDecode /Length 31 0 R >> stream HAO0 CGfhi9AJ"4^\cOۡ e]om<+ٯZ1@ Ҙ 6+s עeH.~)P(TCT1}PRÝLXlF O"(ȓga0J wbfHPi͜.A{D1g1s?ƳU y1~wwЫc!xɽcj n쬐|\)jCsb+eP5Z#Z.&騅C )v3+ce*jq=lyq%Fq;k׿3=x&΅ܝxmo&N_ _ױP(M y| 7ĦgLu1x je #+6%%c4t/##-* endstream endobj 33 0 obj 366 endobj 34 0 obj << /Filter /FlateDecode /Length 33 0 R >> stream Ht?S0 | a;``8v`dcnh.T'?Ezz5{Qy+ rf fFٔqF-+!#;c3e;--2ڗz (%e\0<,B$s.gH@bկ,3u`RІ۹.!TJMRi=WԱ!. t+G4*nhtģGD!Ŵdb'aEŔI5A1eBj~qsGkVY<߶^JZEZ#PހՍCz7c#o-?Bթ9QşUB endstream endobj 35 0 obj 336 endobj 36 0 obj << /Filter /FlateDecode /Length 35 0 R >> stream H|Kr! D>\*o%AMrohZ-B|Ԣ`G'{|Q-7хbPqb;L`oɀJpj*> stream H|q @[ XBA^;눓gxn@V +E.BuZF ;g#P=ErF4E[k Z|?lf'EFe2O \s=bBcr.=sVɜ7Uzi6eV#Ӭ>3/Ͳ+C<^!M6캲ýS92*IS}ؔ&娊 lN#mc37ֳqb?;ۑaoFگr? endstream endobj 39 0 obj << /N 3 /Alternate /DeviceRGB /Length 2575 /Filter /FlateDecode >> stream HyTSwoɞc [5laQIBHADED2mtFOE.c}08׎8GNg9w߽'0 ֠Jb  2y.-;!KZ ^i"L0- @8(r;q7Ly&Qq4j|9 V)gB0iW8#8wթ8_٥ʨQQj@&A)/g>'Kt;\ ӥ$պFZUn(4T%)뫔0C&Zi8bxEB;Pӓ̹A om?W= x-[0}y)7ta>jT7@tܛ`q2ʀ&6ZLĄ?_yxg)˔zçLU*uSkSeO4?׸c. R ߁-25 S>ӣVd`rn~Y&+`;A4 A9=-tl`;~p Gp| [`L`< "A YA+Cb(R,*T2B- ꇆnQt}MA0alSx k&^>0|>_',G!"F$H:R!zFQd?r 9\A&G rQ hE]a4zBgE#H *B=0HIpp0MxJ$D1D, VĭKĻYdE"EI2EBGt4MzNr!YK ?%_&#(0J:EAiQ(()ӔWT6U@P+!~mD eԴ!hӦh/']B/ҏӿ?a0nhF!X8܌kc&5S6lIa2cKMA!E#ƒdV(kel }}Cq9 N')].uJr  wG xR^[oƜchg`>b$*~ :Eb~,m,-ݖ,Y¬*6X[ݱF=3뭷Y~dó ti zf6~`{v.Ng#{}}jc1X6fm;'_9 r:8q:˜O:ϸ8uJqnv=MmR 4 n3ܣkGݯz=[==<=GTB(/S,]6*-W:#7*e^YDY}UjAyT`#D="b{ų+ʯ:!kJ4Gmt}uC%K7YVfFY .=b?SƕƩȺy چ k5%4m7lqlioZlG+Zz͹mzy]?uuw|"űNwW&e֥ﺱ*|j5kyݭǯg^ykEklD_p߶7Dmo꿻1ml{Mś nLl<9O[$h՛BdҞ@iءG&vVǥ8nRĩ7u\ЭD-u`ֲK³8%yhYѹJº;.! zpg_XQKFAǿ=ȼ:ɹ8ʷ6˶5̵5͵6ζ7ϸ9к<Ѿ?DINU\dlvۀ܊ݖޢ)߯6DScs 2F[p(@Xr4Pm8Ww)Km endstream endobj 40 0 obj << /Type /ExtGState /SA false /SM 0.02 /TR2 /Default >> endobj 1 0 obj << /Count 1 /Type /Outlines /First 2 0 R /Last 2 0 R >> endobj 2 0 obj << /Title (Sheet1) /Parent 1 0 R /A 3 0 R >> endobj 3 0 obj << /S /GoTo /D [ 16 0 R /XYZ 0 0 null ] >> endobj 4 0 obj << /Filter [ /ASCII85Decode /FlateDecode ] /Width 99 /Height 76 /ColorSpace 8 0 R /BitsPerComponent 8 /Length 5 0 R >> stream 8;Z]!CCZ"?%"s!-]6KqHKq#0NR.Z#"bs7KKq1o)P*"+T&Pn6(+JKL/EqFfb'OAJ7/ `-?i>n`,YX\Oajlb?V,$^#_"rZ2*5CEb/`c]U%2p;.j"r>513QY0X1AZKbHE1AXD5 X>\XtTXa2;JL>&"Td(d10#0e$EFE^fe6^idIAD[:WhtbqnQKCP(OkXlQ,(k+Mr#(k @WiHl_-7Ngfq@GOlYJ74ZGD/F@%miZi.q5'ds'hQZ_(#YM#gpZi_Lg*+bUCnaAY8) !3'77li~> endstream endobj 5 0 obj 273 endobj 6 0 obj 481 endobj 7 0 obj << /Filter [ /ASCII85Decode /LZWDecode ] /Length 6 0 R >> stream J,g]g+e/h_!_gCtO=0f)$P%cIi8Zdfc5&3j_8$7g.@L`YKUJNGBP\poR=_;Dl'P(T (7Boo^^S:71(MN]ZQX/+Cbu.lK"p74pe1T%s.DY%&\1TdJhr54.M9au6>79n6`Q:4 PbLSZTLEE(8E@'*1mg_*eTnN*;*'V3+gm-EEetX%;Bo$ur2ss*N`.-!.kG_q6GDD' dKoL!8Ka#EV,@V!\j8ZFbp6EE<9cn=N6j0nf;(&;QU6bUD')c@\ 9-d\DA=cZ0Q>gIM$$;cd2O@&a;X,Nn_aP(]I1aRc(K1^ue> gF/(+GaKo$qneLWDrQ#;5\S(\$q'4Q,85`-8;S(=Z"WSBOV*FM)4,?B],R endstream endobj 8 0 obj [ /Indexed /DeviceRGB 255 7 0 R ] endobj 9 0 obj << /S /D >> endobj 10 0 obj << /Nums [ 0 9 0 R ] >> endobj 11 0 obj << /Type /Pages /Kids [ 16 0 R ] /Count 1 >> endobj 12 0 obj << /Producer (Acrobat Distiller 5.0.5 \(Windows\)) /Title (statetable.xls) /Author (\376\377\000J\000e\000n\000s\000-\000S\000.\000 \000V\000\366\000c\000k\000\ l\000e\000r) /ModDate (D:20040210173438-06'00') /CreationDate (D:20040210173419-06'00') /Creator (Acrobat PDFMaker 5.0 for Excel) >> endobj 13 0 obj << /Type /Metadata /Subtype /XML /Length 1061 >> stream statetable.xls endstream endobj xref 0 14 0000000000 65535 f 0000009812 00000 n 0000009889 00000 n 0000009953 00000 n 0000010015 00000 n 0000010443 00000 n 0000010463 00000 n 0000010483 00000 n 0000011057 00000 n 0000011108 00000 n 0000011138 00000 n 0000011181 00000 n 0000011247 00000 n 0000011559 00000 n trailer << /Size 14 /ID[<1ece8f87f8c02cddc1021be4a487e3de><1eb26cf3b2cbc7fd5c2a1381356a61b6>] >> startxref 173 %%EOF pegasus-wms_4.0.1+dfsg/src/tools/k.2/doc/statetable.xls0000755000175000017500000006300011757531137022004 0ustar ryngeryngeࡱ> 01  !"#$%&'()*+,-./Root Entry F/ks.@WorkbookWOle SummaryInformation(` Oh+'00x   (SSJens-S. VcklerensensensJens-S. Vckler8ns@FR @ =.@jSU@.՜.+,0H ـ\pJens-S. Vckler Ba==Zx_-* #,##0.00\ "$"_-;\-* #,##0.00\ "$"_-;_-* "-"??\ "$"_-;_-@_-@;_-* #,##0.00\ _$_-;\-* #,##0.00\ _$_-;_-* "-"??\ _$_-;_-@_-                     1"  1"t @1 D1"4 1"t1"t1"4 1"t! 1"4!  1"t! 1"t!!  1"4@ 1"4! @ 1"4@ 1"t! 1"t! 1|""@ @  1 1"4@@ 1"4@ 1|"@ @ 1|" @  1"t1"t1|"@ @   1!4 1"t@ @  1"t@ @  1"t@ @  @ 1"t 1"t 1"t 1"t!   1"4@  1"4 @  1"4  1"4  1"t 1"t 1"t 1"t!   @ 1"t 1"t 1"t 1"t!  1"t 1"t!  1"t!  1"t!  1"t!!   1"t@ @ 3 1"t@ @ 3 1"t@ @ 3 1|"@  1"t@ @ 3 1"t@ 3 1"t@ 3 1"t@ @ 3 1"t@ @ 3 1|"@  1<" @ 1|"@  1|"@ @  1|"!@ @  1"t  1"t 1|"@ @   1"t  1"t!   1"t 1"t 1"t 1"t 1"t@1"t@1"t! 1"t! 1|"@   1"t@ 1|"@ @ 1"| @ 1"|@ @ 1"|@ @ 1"|!@ @ 1|"@   1"t@  1"t 1"t 1"t@ @ 1"t@ @ 1"4@ @  1"4@ @ 1"t@ @ 1"t@ @ 1"t@ @   1"t@ @  1"t@ @ 1"t@ @ 1"t!@ @ 1"t!@ @ 1"4!@ @ 1"t!@ @ 1"t!@ @  1"t!@ @ 1"t!@ @ 1"t!!@ @ 1"t @ 1"t @ 1"4 @ 1"t @ 1"t @  1"t! @ 1|"@ @  1 D 1|"@ @  1!4 1"t @ 1"t!@ @  @ 1"t @  1"t 1"t@@  1"t@ 1"t 1"t 1"| 1"t!  1"t  1"t@ 1"t 1"t 1"| 1"t!   1"t 1"t  1"t  1"t 1"t  1"t 1"t 1"t 1"t!   1"t@ @ 1"|@ @ 1"t @  1"|@ @  1"|!@ @  1"|@ @  1"t!@ @  1"t 1"t 1"t 1"t 1"t!  1"t 1"t 1"t! @  1"t!@ @  1"t!!@ @   1!4@ @  1!4@ @  1!4!@ @  1!4!@ @  1!4!@ @  1!4!!@ @ 1|"@ @  1|"@ @  1|"!@ @   1!4@ @  1!4@ @  1!4!@ @ 83ffff̙̙3f3fff3f3f33333f33333\`1!Sheet1j>Sheet2RSheet389STATEEOS  '${}BSALNUMLWSELSE----SbSb-Sb-SbFb---SbSb-SbFbSb-SbSbSbSbSbSbSbSb-Sb-SbSbSbSb-SbSbSb-SbSbSbSbSbSbSbSbSb--SbSb-SbSb-SbSbSb-SbSbSbSbSbSbSbSbSb-------S($)SvS($)--FvFvpbFvpbFvFvpbFvpbFvSvFvpbFvpb-SvSv--FvSvSvSvSvS($)pbS($)S($)Sb--S($)SvS($)pbS($)pbFvpbFvFvFvFvpbFvpbFvSvFvpbFvpb-SvSv--FvSvSvSvSvFinal state (ok)&Error state 1: Premature end of string!Error state 2: Missing apostropheError state 3: Missing quote$Error state 4: Illegal variable nameFINALERR1ERR2ERR3ERR4NQ_LWSNQ_MAINNQ_BSSQ_MAINSQ_BSDQ_MAINDQ_BSDQ_DLLRDQ_VAR1DQ_VAR2NQ_DLLRNQ_VAR1NQ_VAR2NQ_LWSNQ_MAINNQ_BSSQ_BSSQ_MAIN DQ_MAINDQ_BSDQ_DLLR DQ_VAR1DQ_VAR2NQ_DLLRNQ_VAR1NQ_VAR2 FINAL ERR3 ERR4DQ_LWSSQ_LWSA1A26 `6` GmCi9^4bi?T * ـ "%S;<  dMbP?_*+%&333333?'333333?(333333?)333333?MApple LaserWriter Plus v42.2S oU,,LetterPRIV0''''\KhC0}"U,,??U} (}  } `} @}  "          4 4 A A 4 4  A A 4 4 A A 4 4 A A            ' i jj jj k k k k k k l   zz zz 1 y y  y  y  y  {/  r r r r 2 q q  q  q  q  | /  tt tt 3 s s  s  s  s  }/  tt tt 3 s s  s  s  s  } /  rr rr q q q  q  q  q  |/  r rrr q! q" q#  q$  q%  q&  |' W  vv vv u u u  u  u  u  ~W r vsv vtv uu uv uw  ux  uy  uz  ~{ W  x x x x w w w w w w  W | x} x x~ x w w w w w w  W  v v v v u u u u u u ~ X         m > P Q n o p ? ? ? ? ? @f B= R> S? g@^ CA DB DC  DD  DE  DF  EG W 5 `` YY 7 6 6  6  6  6  8W 5H aIa YJY 7K 6L 6M  6N  6O  6P  8Q W B S S ^Z C D D  D  D  D  EW BR SS ST ^UZ CV DW DX  DY  DZ  D[  E\ W = S S _Y 7 6 6  6  6  6  8W =] S^ S_ _`Y 7a 6b 6c  6d  6e  6f  8g W F \\ ZZ C D D  D  D  D  EX Gh ]i] ]j] Hk Il Im  In  Io  Ip  Jq h $ 9: K L ) " "  "  "  "  #N ( ;)< M* O+ *, - .  /  0  1  2 +  -. bc           , &3 d4e d5e %6  7  8  9  :  ;  !<         B$X,, !   ! ! !,>@<d@!!           7 ـ CQ  dMbP?_*+% &C&10&A &C&10Page &P&333333?'333333?(333333?)333333?MApple LaserWriter Plus v42.2S o_,,LetterPRIV0''''\KhC0}"_,,??U} (}       4   4 4   4 4     4                ' i k k k k k k k k l [  1         W    2 2 2 2 2    m           f            W           W             W  u         W  u     Y Z  Y  W  2        W  2_ _ ^ _ _ ^ Z _ _ W           Z Z   ^ Z Z Z Z [   1      W     2 2 2 2    W    3 3 3 3 3   W    3 3 3 3 3    W  2 2 2 2 2 2 2  2  X             T  V0 T 0 T0 T 0 T0 T 0 T0 T 0 U>0*****>@r   7 ـ W  dMbP?_*+% &C&10&A &C&10Page &P&333333?'333333?(333333?)333333?MApple LaserWriter Plus v42.2S o_,,LetterPRIV0''''\KhC0}"_,,??U>@ 7 DocumentSummaryInformation8CompObj fP X`hp x { Sheet1Sheet2Sheet3  Worksheets FMicrosoft Excel WorksheetBiff8Excel.Sheet.89qpegasus-wms_4.0.1+dfsg/src/tools/k.2/uname.hh0000644000175000017500000000364211757531137020010 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #ifndef _CHIMERA_UNAME_HH #define _CHIMERA_UNAME_HH #include #include #include /* MacOS Darwin fix */ #if defined(_SYS_NAMELEN) && ! defined(SYS_NMLN) #define SYS_NMLN _SYS_NAMELEN #endif #include "xml.hh" class Uname : public XML { // class to encapsulate the uname(2) information. This information // will only be printed. The code is somewhat system dependent. public: Uname(); virtual ~Uname(); virtual std::string toXML( int indent = 0, const char* nspace = 0 ) const; // purpose: XML format a uname record. // paramtr: indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: string containing the element data virtual std::ostream& toXML( std::ostream& s, int indent = 0, const char* nspace = 0 ) const; // purpose: XML format a uname record onto a given stream // paramtr: s (IO): stream to put information into // indent (IN): indentation level of tag // nspace (IN): If defined, namespace prefix before element // returns: s private: struct utsname m_uname; // system environment (uname -a) char m_archmode[SYS_NMLN]; // IA32, IA64, ILP32, LP64, ... }; #endif // _CHIMERA_UNAME_HH pegasus-wms_4.0.1+dfsg/src/tools/k.2/statinfo.cc0000644000175000017500000007335711757531137020532 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ #include #include #include #include #include #include #include #include "statinfo.hh" #include "time.hh" #ifndef HAS_SETENV // implementation found in scan.y extern int setenv( const char* name, const char* value, int overwrite ); #endif static const char* RCS_ID = "$Id: statinfo.cc 50 2007-05-19 00:48:32Z gmehta $"; // // --- StatInfo ------------------------------------------------- // const unsigned int StatInfo::c_valid = 0xCAFEBABE; StatInfo::StatInfo() // purpose: ctor :m_valid(StatInfo::c_valid) { memset( &m_stat, 0, sizeof(struct stat) ); m_error = -1; } StatInfo::~StatInfo() // purpose: dtor { invalidate(); } bool StatInfo::isValid( void ) const // purpose: check, if object was correctly constructed // returns: true for a valid object, false for an invalid one { return ( StatInfo::c_valid == m_valid ); } void StatInfo::invalidate( void ) // purpose: invalide that current member { m_valid = -1ul; } StatInfo::StatSource StatInfo::whoami( void ) const // purpose: if you don't like RTTI // returns: type { return StatInfo::IS_INVALID; } std::ostream& StatInfo::toXML( std::ostream& s, int indent, const char* nspace ) const // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { // sanity checks if ( this->whoami() == StatInfo::IS_INVALID || ! isValid() ) return s; // start tag s << XML::startElement( s, "statcall", indent, nspace ); // print known attributes s << " error=\"" << m_error << '"'; if ( m_id.size() ) s << " id=\"" << m_id << '"'; if ( m_lfn.size() ) s << " lfn=\"" << m_lfn << '"'; s << ">\r\n"; // child printing show( s, indent+2, nspace ); // record if ( m_error == 0 ) { s << XML::startElement(s, "statinfo", indent+2, nspace ); // stat attributes s << XML::printf( " mode=\"0%o\" size=\"%lu\" inode=\"%lu\"" " nlink=\"%lu\" blksize=\"%lu\"", m_stat.st_mode, m_stat.st_size, m_stat.st_ino, m_stat.st_nlink, m_stat.st_blksize ); Time mtime( m_stat.st_mtime, -1 ); s << "\" mtime=\"" << mtime; Time ctime( m_stat.st_ctime, -1 ); s << "\" ctime=\"" << ctime; Time atime( m_stat.st_atime, -1 ); s << "\" atime=\"" << atime; s << "\" uid=\"" << m_stat.st_uid; s << "\" gid=\"" << m_stat.st_gid << "\">\r\n"; } // data section from stdout and stderr of application data( s, indent+2, nspace ); // finalize s << XML::finalElement( s, "statcall", indent, nspace ); return s; } std::string StatInfo::toXML( int indent, const char* nspace ) const // purpose: XML format a stat info record into a given buffer // paramtr: indent (IN): indentation level of tag // nspace (IN): tag namespace, if not null // returns: buffer { std::string result; // sanity check if ( this->whoami() == StatInfo::IS_INVALID ) return result; if ( ! isValid() ) return result; result += XML::startElement( "statcall", indent, nspace ); if ( m_id.size() ) result.append(" id=\"").append(m_id).append("\""); if ( m_lfn.size() ) result.append( " lfn=\"").append(m_lfn).append("\""); result += ">\r\n"; // child class printing result += show( indent+2, nspace ); // record if ( m_error == 0 ) { result += XML::startElement( "statinfo", indent+2, nspace ); result += XML::printf( " mode=\"0%o\" size=\"%lu\" inode=\"%lu\"" " nlink=\"%lu\" blksize=\"%lu\"", m_stat.st_mode, m_stat.st_size, m_stat.st_ino, m_stat.st_nlink, m_stat.st_blksize ); Time mtime( m_stat.st_mtime, -1 ); result += " mtime=\"" + mtime.date(); Time atime( m_stat.st_atime, -1 ); result += " atime=\"" + atime.date(); Time ctime( m_stat.st_ctime, -1 ); result += " ctime=\"" + ctime.date(); result += XML::printf( "\" uid=\"%lu\" gid=\"%lu\"/>\r\n", m_stat.st_uid, m_stat.st_gid ); } // data section from stdout and stderr of application result += data( indent+2, nspace ); // finalize result += XML::finalElement( "statcall", indent, nspace ); return result; } // // --- StatFile ------------------------------------------------- // StatFile::StatFile() :StatInfo(), m_filename(""), m_openmode(O_RDONLY), m_hsize(0), m_done_md5(false) { memset( m_header, 0, sizeof(m_header) ); memset( m_digest, 0, sizeof(m_digest) ); } StatFile::StatFile( const std::string& filename, int openmode, bool truncate ) // purpose: Initialize a stat info buffer with a filename to point to // paramtr: filename (IN): the filename to memorize (deep copy) // openmode (IN): are the fcntl O_* flags to later open calls // truncate (IN): flag to truncate stdout or stderr :StatInfo(), m_filename(filename), m_openmode(openmode), m_hsize(0), m_done_md5(false) { memset( m_header, 0, sizeof(m_header) ); memset( m_digest, 0, sizeof(m_digest) ); if ( truncate ) { // FIXME: As long as we use shared stdio for stdout and stderr, we need // to explicitely truncate (and create) file to zero, if not appending. int fd = open( filename.c_str(), (openmode & O_ACCMODE) | O_CREAT | O_TRUNC, 0666 ); if ( fd != -1 ) close(fd); } errno = 0; if ( StatFile::update() == -1 ) invalidate(); } #if 0 StatFile::StatFile( const char* filename, int openmode, int truncate ) // purpose: Initialize a stat info buffer with a filename to point to // paramtr: filename (IN): the filename to memorize (deep copy) // openmode (IN): are the fcntl O_* flags to later open calls // truncate (IN): flag to truncate stdout or stderr :StatInfo(), m_filename(filename), m_openmode(openmode), m_hsize(0), m_done_md5(false) { memset( m_header, 0, sizeof(m_header) ); memset( m_digest, 0, sizeof(m_digest) ); if ( truncate ) { // FIXME: As long as we use shared stdio for stdout and stderr, we need // to explicitely truncate (and create) file to zero, if not appending. int fd = open( filename, (openmode & O_ACCMODE) | O_CREAT | O_TRUNC, 0666 ); if ( fd != -1 ) close(fd); } errno = 0; if ( StatFile::update() == -1 ) invalidate(); } #endif StatFile::~StatFile() // purpose: dtor { // clean m_filename.clear(); invalidate(); } StatInfo::StatSource StatFile::whoami( void ) const // purpose: if you don't like RTTI // returns: type { return StatInfo::IS_FILE; } int StatFile::update() // purpose: update existing and initialized statinfo with latest info // returns: the result of the stat() or fstat() system call. { int result = stat( m_filename.c_str(), &m_stat ); m_error = errno; if ( isValid() && result != -1 && S_ISREG(m_stat.st_mode) && m_stat.st_size > 0 ) { int fd = open( m_filename.c_str(), O_RDONLY ); if ( fd != -1 ) { m_hsize = read( fd, (char*) m_header, sizeof(m_header) ); close(fd); } } return result; } std::ostream& StatFile::show( std::ostream& s, int indent, const char* nspace ) const // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { // sanity check if ( ! isValid() ) return s; // element s << XML::startElement( s, "file", indent, nspace ); s << " name=\"" << m_filename << '"'; // optional MD5 sum Digest d; if ( getMD5sum(d) ) { s << " md5sum=\""; for ( size_t i=0; i 0 ) { // hex information ssize_t end = sizeof(m_header); if ( m_stat.st_size < end ) end = m_stat.st_size; s << '>'; for ( ssize_t i=0; i\r\n"; } else { s << "/>\r\n"; } return s; } std::string StatFile::show( int indent, const char* nspace ) const // purpose: Generate the element-specific information. Called from toXML() // paramtr: buffer (IO): area to store the output in // indent (IN): indentation level of tag // returns: string with the information. { std::string buffer; // sanity check if ( ! isValid() ) return buffer; // element buffer += XML::startElement( "file", indent, nspace ); buffer += " name=\"" + m_filename + "\""; // optional MD5 sum Digest d; if ( getMD5sum(d) ) { buffer += " md5sum=\""; for ( size_t i=0; i 0 ) { // hex information ssize_t i, end = sizeof(m_header); if ( m_stat.st_size < end ) end = m_stat.st_size; buffer += '>'; for ( i=0; i } void StatFile::md5sum() // purpose: calculate the MD5 checksum over the complete file // throws : sum_error on IO error, bad_alloc on out of memory { // FIXME: use mmap instead of read int fd = open( m_filename.c_str(), O_RDONLY ); if ( fd == -1 ) throw sum_error( "open " + m_filename, errno ); MD5_CTX context; MD5_Init( &context ); size_t size( getpagesize() << 4 ); std::auto_ptr buffer( new char[size] ); ssize_t rsize; while ( (rsize = read( fd, &(*buffer), size )) > 0 ) { MD5_Update( &context, &(*buffer), rsize ); } if ( rsize == -1 ) throw sum_error( "read " + m_filename, errno ); if ( close(fd) == -1 ) throw sum_error( "close " + m_filename, errno ); MD5_Final( m_digest, &context ); m_done_md5 = true; } bool StatFile::getMD5sum( StatFile::Digest digest ) const // purpose: obtains the stored MD5 sum // paramtr: digest (OUT): a digest area to store the 128 bits into // returns: true, if a string was stored in the digest area, // false, if no sum was obtained, and the digest is untouched { // FIXME: can store to NULL :-( if ( m_done_md5 ) memcpy( digest, m_digest, sizeof(m_digest) ); return m_done_md5; } // // --- StatHandle ------------------------------------------------- // StatHandle::StatHandle( int descriptor ) // purpose: Initialize a stat info buffer with a filename to point to // paramtr: descriptor (IN): the handle to attach to :StatInfo(), m_descriptor(descriptor) { StatHandle::update(); } StatHandle::~StatHandle() // purpose: dtor { // DO NOT close foreign handle invalidate(); } StatInfo::StatSource StatHandle::whoami( void ) const // purpose: if you don't like RTTI // returns: type { return StatInfo::IS_FILE; } int StatHandle::update() // purpose: update existing and initialized statinfo with latest info // returns: the result of the stat() or fstat() system call. { int result = -1; if ( m_descriptor != -1 ) { errno = 0; fstat( m_descriptor, &m_stat ); m_error = errno; } else { memset( &m_stat, 0, sizeof(struct stat) ); m_error = errno = EBADF; } return result; } std::ostream& StatHandle::show( std::ostream& s, int indent, const char* nspace ) const // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { // sanity check if ( ! isValid() ) return s; // element s << XML::startElement( s, "descriptor", indent, nspace ); s << " number=\"" << m_descriptor << "/>\r\n"; return s; } std::string StatHandle::show( int indent, const char* nspace ) const // purpose: Generate the element-specific information. Called from toXML() // paramtr: buffer (IO): area to store the output in // indent (IN): indentation level of tag // returns: string with the information. { std::string buffer; // sanity check if ( ! isValid() ) return buffer; // element buffer += XML::startElement( "descriptor", indent, nspace ); buffer += XML::printf( " number=\"%d\"/>\r\n", m_descriptor ); return buffer; } int StatHandle::forcefd( int fd ) const // purpose: force open a file on a certain fd // paramtr: fd (IN): is the file descriptor to plug onto. If this fd is // the same as the descriptor in info, nothing will be done. // returns: 0 if all is well, or fn was NULL or empty. // 1 if opening a filename failed, // 2 if dup2 call failed { // check for validity if ( m_descriptor == -1 ) return 1; // create a duplicate of the new fd onto the given (stdio) fd. This operation // is guaranteed to close the given (stdio) fd first, if open. if ( m_descriptor != fd ) { // FIXME: Does dup2 guarantee noop for newfd==fd on all platforms ? if ( dup2( m_descriptor, fd ) == -1 ) return 2; } return 0; } // // --- StatTemporary ------------------------------------------------- // StatTemporary::StatTemporary( int fd, const char* fn ) // purpose: Initialize for an externally generated temporary file // paramtr: fd (IN): is the connected file descriptor // fn (IN): is the concretized temporary filename :StatFile(),StatHandle(-1) { m_descriptor = fd; m_filename = fn; this->StatHandle::update(); } StatTemporary::StatTemporary( char* pattern ) // purpose: Initialize a stat info buffer with a temporary file // paramtr: pattern (IO): is the input pattern to mkstemp(), will be modified! :StatFile(),StatHandle(-1) { int result = mkstemp(pattern); if ( result == -1 ) { // mkstemp has failed, au weia! m_error = errno; invalidate(); } else { // try to ensure append mode for the file, because it is shared // between jobs. If the SETFL operation fails, well there is nothing // we can do about that. int flags = fcntl( result, F_GETFL ); if ( flags != -1 ) fcntl( result, F_SETFL, (m_openmode = (flags | O_APPEND)) ); // this file descriptor is NOT to be passed to the jobs? So far, the // answer is true. We close this fd on exec of sub jobs, so it will // be invisible to them. flags = fcntl( result, F_GETFD ); if ( flags != -1 ) fcntl( result, F_SETFD, flags | FD_CLOEXEC ); // the return is the chosen filename as well as the opened descriptor. // we *could* unlink the filename right now, and be truly private, but // users may want to look into the log files of long persisting operations. m_descriptor = result; m_filename = pattern; // m_openmode = O_RDWR | O_EXCL; this->StatHandle::update(); } } StatTemporary::StatTemporary( const std::string& p, bool c_o_e ) // purpose: Initialize a stat info buffer with a temporary file // paramtr: p (IO): is the input pattern to mkstemp(), will be modified! // c_o_e (IN): if true, set FD_CLOEXEC fd flag, unset if false // warning: pattern will be copied for modification :StatFile(),StatHandle(-1) { std::auto_ptr pattern( new char[p.size()+1] ); memcpy( &(*pattern), p.c_str(), p.size()+1 ); int result = mkstemp( &(*pattern) ); if ( result == -1 ) { // mkstemp has failed, au weia! m_error = errno; invalidate(); } else { // try to ensure append mode for the file, because it is shared // between jobs. If the SETFL operation fails, well there is nothing // we can do about that. int flags = fcntl( result, F_GETFL ); if ( flags != -1 ) fcntl( result, F_SETFL, (m_openmode = (flags | O_APPEND)) ); // this file descriptor is NOT to be passed to the jobs? So far, the // answer is true. We close this fd on exec of sub jobs, so it will // be invisible to them. flags = fcntl( result, F_GETFD ); if ( flags != -1 ) { if ( c_o_e ) flags |= FD_CLOEXEC; else flags &= ~FD_CLOEXEC; fcntl( result, F_SETFD, flags | FD_CLOEXEC ); } // the return is the chosen filename as well as the opened descriptor. // we *could* unlink the filename right now, and be truly private, but // users may want to look into the log files of long persisting operations. m_descriptor = result; m_filename = &(*pattern); // m_openmode = O_RDWR | O_EXCL; this->StatHandle::update(); } } StatTemporary::~StatTemporary() // purpose: dtor { // close descriptor close( m_descriptor ); m_descriptor = -1; // remove filename unlink( m_filename.c_str() ); m_filename.clear(); invalidate(); } StatInfo::StatSource StatTemporary::whoami( void ) const // purpose: if you don't like RTTI // returns: type { return StatInfo::IS_TEMP; } int StatTemporary::update() // purpose: update existing and initialized statinfo with latest info // returns: the result of the stat() or fstat() system call. { #if 1 return this->StatFile::update(); #else return this->StatHandle::update(); #endif } std::ostream& StatTemporary::show( std::ostream& s, int indent, const char* nspace ) const // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { // sanity check if ( ! isValid() ) return s; // late update for temp files errno = 0; const_cast(this)->StatFile::update(); // element s << XML::startElement( s, "temporary", indent, nspace ); s << " name=\"" << m_filename << '"'; s << " descriptor=\"" << m_descriptor << "\"/>\r\n"; return s; } std::string StatTemporary::show( int indent, const char* nspace ) const // purpose: Generate the element-specific information. Called from toXML() // paramtr: indent (IN): indentation level of tag // nspace (IN): tag namespace, if not null // returns: string with the information. { std::string buffer; // preparation for element if ( ! isValid() ) return buffer; // late update for temp files errno = 0; const_cast(this)->StatFile::update(); // element buffer += XML::startElement( "temporary", indent, nspace ) + " name=\"" + m_filename + "\" descriptor=\""; buffer += XML::printf( "%d\"/>\r\n", m_descriptor ); return buffer; } int StatTemporary::forcefd( int fd ) const // purpose: force open a file on a certain fd // paramtr: fd (IN): is the file descriptor to plug onto. If this fd is // the same as the descriptor in info, nothing will be done. // returns: 0 if all is well, or fn was NULL or empty. // 1 if opening a filename failed, // 2 if dup2 call failed { return StatHandle::forcefd( fd ); } std::ostream& StatTemporary::data( std::ostream& s, int indent, const char* nspace ) const // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { if ( ! isValid() ) return s; // data section from stdout and stderr of application if ( m_error == 0 && m_stat.st_size ) { size_t dsize = getpagesize()-1; size_t fsize = m_stat.st_size; // element s << XML::startElement( s, "data", indent, nspace ); if ( fsize > dsize ) s << " truncated=\"true\""; // content for element if ( fsize > 0 ) { char* data = new char[dsize+1]; int fd = dup(m_descriptor); s << '>'; if ( fd != -1 ) { if ( lseek( fd, SEEK_SET, 0 ) != -1 ) { ssize_t rsize = read( fd, data, dsize ); s << XML::quote( std::string( data, rsize) ); } close(fd); } delete[] data; s << "\r\n"; } else { s << "/>\r\n"; } } return s; } std::string StatTemporary::data( int indent, const char* nspace ) const // purpose: Generate special post-element code, e.g. stderr and stdout data // paramtr: buffer (IO): area to store the output in // indent (IN): indentation level for tag // returns: buffer { std::string buffer; if ( ! isValid() ) return buffer; // data section from stdout and stderr of application if ( m_error == 0 && m_stat.st_size ) { size_t dsize = getpagesize()-1; size_t fsize = m_stat.st_size; // element buffer += XML::startElement( "data", indent, nspace ); if ( fsize > dsize ) buffer += " truncated=\"true\""; // content for element if ( fsize > 0 ) { char* data = new char[dsize+1]; int fd = dup(m_descriptor); buffer += '>'; if ( fd != -1 ) { if ( lseek( fd, SEEK_SET, 0 ) != -1 ) { ssize_t rsize = read( fd, data, dsize ); buffer += XML::quote( std::string( data, rsize ) ); } close(fd); } buffer += "\r\n"; delete[] data; } else { buffer += "/>\r\n"; } } return buffer; } // // --- StatFifo ------------------------------------------------- // StatFifo::StatFifo( const std::string& pattern, const std::string& key ) // purpose: Initialize a stat info buffer associated with a named pipe // paramtr: pattern (IN): input pattern to mkstemp(), will be modified! // otherwise append hyphen-XXXXXX // key (IN): is the environment key at which to store the filename :StatFile(),StatHandle(-1) { std::string temp(pattern); if ( temp.rfind("XXXXXX") != temp.size()-6 ) temp += "-XXXXXX"; // make a copy that is modifiable (for mkstemp) std::auto_ptr p( new char[ temp.size()+1 ] ); memcpy( &(*p), temp.c_str(), temp.size()+1 ); // create a temporary filename int result = mkstemp( &(*p) ); if ( result == -1 ) { // mkstemp has failed, au weia! m_error = errno; invalidate(); } else { // create a FIFO instead of a regular tmp file. // we could have used mktemp() right away, mkstemp() is NOT safer here! close( result ); unlink( &(*p) ); // FIXME: race condition possible if ( (result = mkfifo( &(*p), 0660 )) == -1 ) { m_error = errno; invalidate(); } else { // open in non-blocking mode for reading. // WARNING: DO NOT open in O_RDONLY or suffer the consequences. // You must open in O_RDWR to avoid having to deal with EOF // whenever the clients drop to zero. m_openmode = O_RDWR | O_NONBLOCK; if ( (result = open( &(*p), m_openmode )) == -1 ) { m_error = errno; invalidate(); } else { // this file descriptor is NOT to be passed to the jobs? So far, // the answer is true. We close this fd on exec of sub jobs, so // it will be invisible to them. int flags = fcntl( result, F_GETFD ); if ( flags != -1 ) fcntl( result, F_SETFD, flags | FD_CLOEXEC ); // the return is the chosen filename as well as the opened // descriptor. We cannot unlink the filename right now. */ m_descriptor = result; // obtain a copy inside the string m_filename = std::string( &(*p) ); // fix environment -- use setenv from our/system implementation if ( key.size() && (isalnum(key[1]) || key[1]=='_') ) { size_t size = strlen(&(*p))+1; char* value = static_cast( malloc(size) ); memcpy( value, &(*p), size ); if ( setenv( key.c_str(), value, 1 ) == -1 ) fprintf( stderr, "# Warning: Unable to set %s: %d: %s\n", key.c_str(), errno, strerror(errno) ); delete[] value; } StatHandle::update(); } } } } #if 0 StatFifo::StatFifo( char* pattern, const char* key ) // purpose: Initialize a stat info buffer associated with a named pipe // paramtr: pattern (IO): input pattern to mkstemp(), will be modified! // key (IN): is the environment key at which to store the filename :StatFile(),StatHandle(-1) { int result = mkstemp(pattern); if ( result == -1 ) { // mkstemp has failed, au weia! m_error = errno; invalidate(); } else { // create a FIFO instead of a regular tmp file. // we could have used mktemp() right away, mkstemp() is NOT safer here! close( result ); unlink( pattern ); if ( (result = mkfifo( pattern, 0660 )) == -1 ) { m_error = errno; invalidate(); } else { // open in non-blocking mode for reading. // WARNING: DO NOT open in O_RDONLY or suffer the consequences. // You must open in O_RDWR to avoid having to deal with EOF // whenever the clients drop to zero. m_openmode = O_RDWR | O_NONBLOCK; if ( (result = open( pattern, m_openmode )) == -1 ) { m_error = errno; invalidate(); } else { // this file descriptor is NOT to be passed to the jobs? So far, // the answer is true. We close this fd on exec of sub jobs, so // it will be invisible to them. int flags = fcntl( result, F_GETFD ); if ( flags != -1 ) fcntl( result, F_SETFD, flags | FD_CLOEXEC ); // the return is the chosen filename as well as the opened // descriptor. We cannot unlink the filename right now. */ m_descriptor = result; m_filename = pattern; // fix environment if ( key != NULL ) { size_t size = strlen(key) + strlen(pattern) + 2; char* temp = static_cast( malloc(size) ); memset( temp, 0, size-- ); strncpy( temp, key, size ); strncat( temp, "=", size ); strncat( temp, pattern, size ); putenv( temp ); // DO NOT free this string here nor now } StatHandle::update(); } } } } #endif StatFifo::~StatFifo() // purpose: dtor { // close descriptor close( m_descriptor ); m_descriptor = -1; // remove filename unlink( m_filename.c_str() ); m_filename.clear(); } StatInfo::StatSource StatFifo::whoami( void ) const // purpose: if you don't like RTTI // returns: type { return StatInfo::IS_FIFO; } int StatFifo::update() // purpose: update existing and initialized statinfo with latest info // returns: the result of the stat() or fstat() system call. { return StatHandle::update(); } std::ostream& StatFifo::show( std::ostream& s, int indent, const char* nspace ) const // purpose: format content as XML onto stream // paramtr: s (IO): stream to put things on // indent (IN): indentation depth, negative for none // nspace (IN): tag namespace, if not null // returns: s { // sanity check if ( ! isValid() ) return s; // element s << XML::startElement( s, "fifo", indent, nspace ); s << " name=\"" << m_filename << '"'; s << " descriptor=\"" << m_descriptor << '"'; s << " count=\"" << m_count << '"'; s << " rsize=\"" << m_rsize << '"'; s << " wsize=\"" << m_wsize << '"'; return s << "/>\r\n"; } std::string StatFifo::show( int indent, const char* nspace ) const // purpose: Generate the element-specific information. Called from toXML() // paramtr: indent (IN): indentation level of tag // nspace (IN): tag namespace, if not null // returns: string with the information. { std::string buffer; if ( ! isValid() ) return buffer; // element buffer += XML::startElement( "fifo", indent, nspace ) + " name=\"" + m_filename + "\" descriptor=\""; buffer += XML::printf( "%d\" count=\"%u\" rsize=\"%u\" wsize=\"%u\"/>\r\n", m_descriptor, m_count, m_rsize, m_wsize ); return buffer; } int StatFifo::forcefd( int fd ) const // purpose: force open a file on a certain fd // paramtr: fd (IN): is the file descriptor to plug onto. If this fd is // the same as the descriptor in info, nothing will be done. // returns: 0 if all is well, or fn was NULL or empty. // 1 if opening a filename failed, // 2 if dup2 call failed { return StatHandle::forcefd( fd ); } pegasus-wms_4.0.1+dfsg/src/edu/0000755000175000017500000000000011757531667015410 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/0000755000175000017500000000000011757531667016174 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/ikcap/0000755000175000017500000000000011757531667017263 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/ikcap/workflows/0000755000175000017500000000000011757531667021320 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/ikcap/workflows/util/0000755000175000017500000000000011757531667022275 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/ikcap/workflows/util/logging/0000755000175000017500000000000011757551566023723 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/ikcap/workflows/util/logging/LoggingKeys.java0000644000175000017500000001653211757531137027007 0ustar ryngeryngepackage edu.isi.ikcap.workflows.util.logging; /** * Defines keys for creating logs within the workflow system * @author pgroth * */ public interface LoggingKeys { public static String MSG_ID = "msgid"; public static String EVENT_ID_KEY = "eventId"; public static String PORTFOLIO_ID = "portfolio.id"; public static String REQUEST_ID = "request.id"; public static String DAX_ID = "dax.id"; public static String DAG_ID = "dag.id"; public static String JOB_NUMBER = "job.id"; public static String JOB_ID = "job.id"; public static String JOB_INPUTS = "job.input.ids"; public static String JOB_OUTPUTS = "job.output.ids"; public static String SEED_ID = "seed.id"; public static String TEMPLATE_ID = "template.id"; public static String SEED_NAME = "seed.name"; public static String ONTOLOGY_LOCATION = "ontology.location"; public static String EVENT_QUERY_PROCESSCATALOG = "event.query.processcatalog"; public static String EVENT_QUERY_DATACATALOG = "event.query.datacatalog"; public static String EVENT_WG = "event.wg"; public static String EVENT_WG_LOAD_SEED = "event.wg.loadseed"; public static String EVENT_WG_INITIALIZE_PC = "event.wg.pcinitialize"; public static String EVENT_WG_INITIALIZE_DC = "event.wg.dcinitialize"; public static String EVENT_WG_GET_CANDIDATE_SEEDS = "event.wg.getcandidateseeds"; public static String EVENT_WG_BACKWARD_SWEEP = "event.wg.backwardsweep"; public static String EVENT_WG_SPECIALIZE = "event.wg.specializetemplate"; public static String EVENT_WG_DATA_SELECTION = "event.wg.dataselection"; public static String EVENT_WG_FETCH_METRICS = "event.wg.fetchmetrics"; public static String EVENT_WG_FORWARD_SWEEP = "event.wg.forwardsweep"; public static String EVENT_WG_CONFIGURE = "event.wg.configuretemplate"; public static String EVENT_WG_GET_DAX = "event.wg.getdax"; public static String EVENT_INFER_TEMPLATE = "event.matching.infertemplate"; public static String DOMAIN = "domain"; public static String SEED = "seed"; public static String TEMPLATE = "template"; public static String NO_MATCH = "No Match"; public static String QUEUED_TEMPLATES = "templates.queue"; public static String SPECIALIZED_TEMPLATES_Q = "templates.specialized.queue"; public static String CONFIGURED_TEMPLATES_Q = "templates.configured.queue"; public static String EVENT_RANKING = "event.ranking"; public static String EVENT_PEGASUS_PLAN = "event.pegasus.plan"; public static String EVENT_PEGASUS_REDUCE = "event.pegasus.reduce"; public static String EVENT_PEGASUS_SITESELECTION = "event.pegasus.siteselection"; public static String EVENT_PEGASUS_ADDDATASTAGING = "event.pegasus.adddatastaging"; public static String EVENT_PEGASUS_ADDREGISTRATION = "event.pegasus.addregistration"; public static String EVENT_PEGASUS_ADDCLEANUP = "event.pegasus.addcleanup"; public static String EVENT_PEGASUS_CLUSTER = "event.pegasus.cluster"; public static String EVENT_PEGASUS_GENERATECLEANUP = "event.pegasus.generatecleanup"; public static final String DATA_CHARACTERIZATION_PROGRAM = "DataCharacterization"; public static String EVENT_DC_CHARACTERIZE = "event.dc.characterize"; public static String EVENT_DC_CHARACTERIZE_STATUS = "event.dc.characterize.status"; public static String EVENT_DC_CHARACTERIZE_STATUSES = "event.dc.characterize.statuses"; public static String EVENT_DC_RICH_CHARACTERIZE = "event.dc.richcharacterize"; public static String EVENT_DC_GET_DATASOURCES = "event.dc.get.datasources"; public static String EVENT_DC_GET_ALL_METRICS = "event.dc.get.all.metrics"; public static String EVENT_DC_GET_DMO_METRICS = "event.dc.get.dmo.metrics"; public static String EVENT_DC_DISSEMINATE_DATASOURCE = "event.dc.disseminate.datasource"; public static String EVENT_DC_INGEST_DATASOURCE = "event.dc.ingest.datasource"; public static String EVENT_DC_GET_INGEST_STATUS = "event.dc.get.ingest.status"; public static String EVENT_DC_UPDATE_INGEST_MAP = "event.dc.update.ingest.map"; public static String EVENT_DC_GET_INGEST_TIME = "event.dc.get.ingest.time"; public static String EVENT_DC_REGISTER_DATASOURCE = "event.dc.register.datasource"; public static String EVENT_DC_GET_ALL_DATASOURCES = "event.dc.getall.datasources"; public static String EVENT_DC_GET_UNCHARACTERIZED_DATASOURCES = "event.dc.getuncharacterized.datasources"; public static String EVENT_DC_GET_DATASOURCE = "event.dc.get.datasource"; public static String EVENT_DC_REMOVE_DATASOURCE = "event.dc.remove.datasource"; public static String EVENT_DC_DATASOURCE_EXISTS = "event.dc.datasource.exists"; public static String EVENT_DC_GET_DATASOURCE_UUID_FOR_DESCRIPTION = "event.dc.get.datasource.uuid.for.description"; public static String EVENT_DC_GET_DATASOURCE_DESCRIPTION_FOR_UUID = "event.dc.get.datasource.description.for.uuid"; public static String EVENT_DC_SET_DATASOURCE_DESCRIPTION_FOR_UUID = "event.dc.set.datasource.description.for.uuid"; public static String EVENT_DC_AUGMENT_DATASOURCE = "event.dc.augment.datasource"; public static String EVENT_DC_GET_AUGMENTED_TYPES = "event.dc.get.augmented.types"; public static String EVENT_DC_SATURATE_DATASOURCE = "event.dc.saturate.datasource"; public static String EVENT_DC_GET_SATURATED_TYPES = "event.dc.get.saturated.types"; public static String EVENT_DC_GET_DATASOURCE_LOCATIONS = "event.dc.get.datasourcelocations"; public static String EVENT_DC_ADD_DATASOURCE_LOCATION = "event.dc.add.datasourcelocation"; public static String EVENT_DC_REMOVE_DATASOURCE_LOCATION = "event.dc.remove.datasourcelocation"; public static String MSG = "msg"; public static String PROG = "prog"; public static String PROG_SETUP = "prog.setup"; public static String CATALOG_URL = "catalog.url"; public static String QUERY_NUMBER = "query.number"; public static String QUERY_NAME = "query.name"; public static String QUERY_INPUT = "query.input"; public static String QUERY_OUTPUT = "query.output"; public static String QUERY_ID = "query.id"; public static String QUERY_ARGUMENTS = "query.arguments"; public static String QUERY_RESPONSE = "query.response"; public static String FILE_OUTPUT_NAME = "file.output.name"; public static String FILE_OUTPUT_CONTENTS = "file.output.contents"; public static String FILE_PRIORTY = "file.priorty"; public static String FILE_TYPE = "file.type"; public static String TIME_START = "time.start"; public static String TIME_END = "time.end"; public static String SYSTEM_HOSTNAME = "system.hostname"; public static String SYSTEM_HOSTADDR = "system.hostaddr"; public static String SYSTEM_OS = "system.os"; public static String SYSTEM_ARCHITECTURE = "system.architecture"; public static String SYSTEM_NODENAME = "system.nodename"; public static String SYSTEM_NUMBEROFPROCESSORS = "system.numberOfProcessors"; public static String JOB_EXITCODE = "job.exitcode"; public static String JOB_ARGUMENTS = "job.arguments"; public static String JOB_ENVIRONMENTVARIABLE = "job.environmentVariable"; public static String JOB_RESOURCE_INFORMATION = "job.resource.information"; public static String PERFMETRIC_CPU_UTILIZATION = "perfmetric.cpu.utilization"; public static String PERFMETRIC_MEMORY_UTILIZATION = "perfmetric.memory.utilization"; public static String PERFMETRIC_NETWORK_BANDWIDTH = "perfmetric.network.bandwidth"; public static String PERFMETRIC_TIME_DURATION = "perfmetric.time.duration"; } pegasus-wms_4.0.1+dfsg/src/edu/isi/ikcap/workflows/util/logging/LogEvent.java0000644000175000017500000000740111757531137026303 0ustar ryngeryngepackage edu.isi.ikcap.workflows.util.logging; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.UUID; public class LogEvent implements LoggingKeys { private static String START_POSTFIX = ".start"; private static String END_POSTFIX = ".end"; /** * The variable that triggers generation of message id and the event id * in the messages. */ private static boolean _ADD_BLOAT = false; private String _eventName; private String _progName; private Map _entityIdMap; private String _eventId; public LogEvent (String eventName, String programName, String entityType, String entityId) { _eventName = eventName; _progName = programName; _eventId = eventName + "_" + UUID.randomUUID().toString(); _entityIdMap = new HashMap(); _entityIdMap.put(entityType, entityId); } public LogEvent (String eventName, String programName, Map entityTypeToIdMap) { _eventName = eventName; _progName = programName; _eventId = eventName + "_" + UUID.randomUUID().toString(); _entityIdMap = entityTypeToIdMap; } public LogEvent (String eventName, String programName) { _eventName = eventName; _progName = programName; _eventId = eventName + "_" + UUID.randomUUID().toString(); _entityIdMap = new HashMap(); } public EventLogMessage createStartLogMsg() { String msgid = UUID.randomUUID().toString(); EventLogMessage elm = new EventLogMessage(_eventName + START_POSTFIX); if( _ADD_BLOAT ){ elm= elm.add(MSG_ID, msgid).add(EVENT_ID_KEY, _eventId); } elm.add(PROG, _progName); for (Map.Entry entry : _entityIdMap.entrySet()) { elm.add(entry.getKey(), entry.getValue()); } return elm; } public EventLogMessage createLogMsg() { String msgid = UUID.randomUUID().toString(); EventLogMessage elm = new EventLogMessage(_eventName); if( _ADD_BLOAT ){ elm= elm.add(MSG_ID, msgid).add(EVENT_ID_KEY, _eventId); } for (Map.Entry entry : _entityIdMap.entrySet()) { elm.add(entry.getKey(), entry.getValue()); } return elm; } public EventLogMessage createEndLogMsg() { String msgid = UUID.randomUUID().toString(); EventLogMessage elm = new EventLogMessage(_eventName + END_POSTFIX); if( _ADD_BLOAT ){ elm= elm.add(MSG_ID, msgid).add(EVENT_ID_KEY, _eventId); } for (Map.Entry entry : _entityIdMap.entrySet()) { elm.add(entry.getKey(), entry.getValue()); } return elm; } public static EventLogMessage createIdHierarchyLogMsg (String parentIdType, String parentId, String childIdType, Iterator childIds) { String msgid = UUID.randomUUID().toString(); String eventId = "idHierarchy_" + UUID.randomUUID().toString(); EventLogMessage lm = new EventLogMessage("event.id.creation"); if( _ADD_BLOAT ){ lm= lm.add(MSG_ID, msgid).add(EVENT_ID_KEY, eventId); } lm.add("parent.id.type", parentIdType).add("parent.id", parentId); lm.add("child.ids.type", childIdType); StringBuffer cids = new StringBuffer("{"); while (childIds.hasNext()) { cids.append(childIds.next()); cids.append(","); } cids.append("}"); lm.add("child.ids", cids.toString()); return lm; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/ikcap/workflows/util/logging/Escape.java0000644000175000017500000001110311757531137025752 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.ikcap.workflows.util.logging; /** * This class tries to define an interface to deal with quoting, escaping, * and the way back. The quoting algorithm is safe to only itself. Thus, * *
 * unescape( escape( s ) ) === s
 * 
* * holds true, but * *
 * escape( unescape( s ) ) =?= s
 * 
* * does not necessarily hold. * * @author Gaurang Mehta * @author Karan Vahi * @author Jens-S. Vöckler * @version $Revision: 1.1 $ */ public class Escape { /** * Defines the character used to escape characters. */ private char m_escape; /** * Defines the set of characters that require escaping. */ private String m_escapable; /** * Defines the default quoting and escaping rules, escaping the * apostrophe, double quote and backslash. The escape character * is the backslash. * */ public Escape() { m_escapable = "\"'\\"; m_escape = '\\'; } /** * Constructs arbitrary escaping rules. * * @param escapable is the set of characters that require escaping * @param escape is the escape character itself. */ public Escape( String escapable, char escape ) { this( escapable, escape, true ); } /** * Constructs arbitrary escaping rules. * * @param escapable is the set of characters that require escaping * @param escape is the escape character itself. * @param escapeEscape boolean indicating whether escape character itself * should be escaped if not present in escapable. */ public Escape( String escapable, char escape , boolean escapeEscape ) { m_escape = escape; m_escapable = escapable; if( escapeEscape ){ // ensure that the escape character is part of the escapable char set if ( escapable.indexOf(escape) == -1 ) m_escapable += m_escape; } } /** * Transforms a given string by escaping all characters inside the * quotable characters set with the escape character. The escape * character itself is also escaped. * * @param s is the string to escape. * @return the quoted string * @see #unescape( String ) */ public String escape( String s ) { // sanity check if ( s == null ) return null; StringBuffer result = new StringBuffer( s.length() ); for ( int i=0; i " + args[i] ); System.out.println( "e(s) > " + e ); System.out.println( "u(e(s))> " + me.unescape(e) ); System.out.println( "u(s) > " + u ); System.out.println( "e(u(s))> " + me.escape(u) ); System.out.println(); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/ikcap/workflows/util/logging/EventLogMessage.java0000644000175000017500000003000511757531137027604 0ustar ryngeryngepackage edu.isi.ikcap.workflows.util.logging; /* * The following is the original copy right from gov.lbl.netlogger.LogMessage * Copyright (c) 2004, The Regents of the University of California, through * Lawrence Berkeley National Laboratory (subject to receipt of any required * approvals from the U.S. Dept. of Energy). All rights reserved. */ import java.text.SimpleDateFormat; import java.util.Date; import java.util.Enumeration; import java.util.GregorianCalendar; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.concurrent.locks.*; /** * This is a modification of gov.lbl.netlogger.LogMessage * * This class lets you easily construct a set of typed (name, value) pairs * that formats itself as a CEDPS Best Practices log message. *

* Name and value pairs are added into the record with add() methods * which, by virtue of returning the newly modified LogMessage instance, * can be chained together. *

* The user can set the timestamp to something other than the * time of the call by calling setTimeStamp{Millis,Nanos}() as part of the chain. *

* To format the message, call toString(). The output format is * CEDPS "Best Practices" format. *

* Since the addition of the nanosecond timestamp (which is rounded * down to microseconds, and no I don't want to discuss it), this class * requires Java 1.5 *

* @author Dan Gunter dkgunter@lbl.gov * @author Wolfgang Hoschek whoschek@lbl.gov * @author Paul Groth pgroth@isi.edu */ public class EventLogMessage { // Variables private final StringBuffer buf = new StringBuffer(256); // set initial capacity for efficiency/memory trade-off private static long micro0, nano0, micro1; private long micro2; // Static Variables private static String timeString = null; private static SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); private static final GregorianCalendar calendar = new GregorianCalendar(TimeZone.getTimeZone("UTC")); private static Lock timeStringLock = new ReentrantLock( ); // Constants public static final String APPENDER = "KEYVALUE"; public static final String EVENT_KW = "event"; public static final String DATE_KW = "ts"; public static final String FAKE_DATE = "1999-01-01T11:59:59.999999Z"; private final int dateStart = DATE_KW.length() + 1; private final int dateEnd = dateStart + FAKE_DATE.length() - 8; private final int usecStart = dateEnd + 1; @SuppressWarnings("unused") private final int usecEnd = dateEnd + 7; public static final String LEVEL_KW = "level="; private static final char[] DIGIT = {'0','1','2','3','4','5','6','7','8','9'}; private static Escape escape = new Escape( "\"" , '\\' , false ); /** * Create a new LogMessage at the current time with a given event name. * * The timestamp is set at creation time, but can be changed later * with setTimeStampMillis or setTimeStampNanos. * * @param eventName Name of this logging event. * @see #setTimeStampMillis * @see #setTimeStampNanos */ protected EventLogMessage(String eventName) { add(DATE_KW, FAKE_DATE); add(EVENT_KW, eventName); long nano1 = System.nanoTime(); /* calculate timestamp in microseconds */ micro2 = ( nano1 - nano0 ) / 1000 + micro0; } /** * Add a string. * @return Self-reference, so calls can be chained */ public EventLogMessage add(String key, String value) { buf.append(key); buf.append("="); buf.append(value); buf.append(" "); return this; } /** * Add a string. Strings automatically have quotes around them * and are escaped * @return Self-reference, so calls can be chained */ public EventLogMessage addWQ (String key, String value) { buf.append(key); buf.append("="); buf.append("\""); buf.append(escape.escape(value)); buf.append("\""); buf.append(" "); return this; } /** * Add an int. * @return Self-reference, so calls can be chained */ public EventLogMessage add(String key, int value) { buf.append(key); buf.append("="); buf.append(value); buf.append(" "); return this; } /** * Add a long. * @return Self-reference, so calls can be chained */ public EventLogMessage add(String key, long value) { buf.append(key); buf.append("="); buf.append(value); buf.append(" "); return this; } /** * Add a float. * @return Self-reference, so calls can be chained */ public EventLogMessage add(String key, float value) { buf.append(key); buf.append("="); buf.append(value); buf.append(" "); return this; } /** * Add a double. * @return Self-reference, so calls can be chained */ public EventLogMessage add(String key, double value) { buf.append(key); buf.append("="); buf.append(value); buf.append(" "); return this; } /** * Add a key,value pair * The result looks like: key = (pairKey, pairValue) * @param key * @param pairKey * @param pairValue */ public EventLogMessage addPair (String key, String pairKey, String pairValue) { buf.append(key); buf.append("="); buf.append("("); buf.append(pairKey); buf.append(","); buf.append(pairValue); buf.append(")"); buf.append(" "); return this; } public EventLogMessage addTime (String key, long timeInMillis) { buf.append(key); buf.append("="); buf.append(format.format(new Date(timeInMillis))); buf.append(" "); return this; } /** * Add a Map to the buffer. Represents maps as a series of (key, value) in quotes. * The method assumes that both keys and values toString method returns a "nice" string representation. * The method escapes all quotes * @param key * @param map * @return */ public EventLogMessage addMap (String key, Map map) { buf.append(key); buf.append("="); buf.append("\""); StringBuffer ps = new StringBuffer(); Set pairs = map.entrySet(); for (Map.Entry x : pairs) { ps.append("("); ps.append(x.getKey().toString()); if(x.getValue() != null) { ps.append(","); ps.append(x.getValue().toString()); } ps.append(")"); } buf.append(escape.escape(ps.toString())); buf.append("\""); buf.append(" "); return this; } public EventLogMessage addList (String key, List list) { buf.append(key); buf.append("="); buf.append("\""); int size = list.size(); for (int i = 0; i < size; i++) { buf.append(escape.escape(list.get(i).toString())); if (i != size-1) { buf.append(","); } } buf.append("\""); buf.append(" "); return this; } /** * Add an natural language message to the log message. * The string is put in quotes and is escaped * @param msg * @return */ public EventLogMessage addMsg (String msg) { buf.append(LoggingKeys.MSG); buf.append("="); buf.append("\""); buf.append(escape.escape(msg)); buf.append("\""); buf.append(" "); return this; } /** * Set the timestamp from milliseconds * returned by System.currentTimeMillis(). * * @return 'this' so we can chain */ public EventLogMessage setTimeStampMillis(long millis) { micro2 = millis * 1000; return this; } /** * Set the timestamp from nanoseconds * returned by System.nanoTime(). * * @return 'this' so we can chain */ public EventLogMessage setTimeStampNanos(long nano1) { micro2 = ( nano1 - nano0 ) / 1000 + micro0; return this; } /** * Format a message in CEDPS Best Practices format. * * @return Formatted message string * @see CEDPS "Best Practices" format */ public String toString() { if (micro2 > 0) { addTimeStamp(); micro2 = 0; } return buf.toString(); } /** * Add a timestamp to the message. */ private void addTimeStamp() { // re-use or re-set whole seconds if (micro2 / 1000000L != micro1 / 1000000L) { timeStringLock.lock(); timeString = format.format(new Date(micro2 / 1000L)); timeStringLock.unlock(); micro1 = micro2; } buf.replace(dateStart, dateEnd, timeString); // add fractional time (microseconds) long div, frac; int i; frac = micro2 % 1000000L; for (i=0, div = 100000L; i < 6; div = div / 10, i++) { long n = frac / div; buf.setCharAt(usecStart + i, DIGIT[(int)n]); frac -= n * div; } } //=============================================================== // Log4J compatibility (contributed by Wolfgang Hoschek) //=============================================================== /** * Static class initializer. * * Make it so that log4j.jar is a compile time requirement, * but not a runtime requirement */ static { try { // check if log4j is present Class.forName("org.apache.log4j.spi.Filter"); // executed only if log4j is present Log4jFilter.init(); } catch (ClassNotFoundException e) { // This warning might mess up daemon processes, // so it's commented out by default System.err.println( "Warning: Cannot find log4j " + "(org.apache.log4j.spi.Filter), " + "continuing.."); } // set calendar of formatter: otherwise no UTC!! format.setCalendar(calendar); // init base nanosecond and millisecond time long ms = System.currentTimeMillis(); nano0 = System.nanoTime(); micro0 = micro1 = ms * 1000L; timeString = format.format(new Date(ms)); } /** * In log4j, ignore all messages not specifically directed * at this appender. */ private static final class Log4jFilter extends org.apache.log4j.spi.Filter { public static void init() { Enumeration loggers = org.apache.log4j.Logger .getRootLogger() .getLoggerRepository() .getCurrentLoggers(); while ( loggers.hasMoreElements() ) { org.apache.log4j.Logger logger = (org.apache.log4j.Logger) loggers.nextElement(); if (logger.getAppender(APPENDER) != null) { logger.getAppender(APPENDER).addFilter(new Log4jFilter()); } } } public int decide(org.apache.log4j.spi.LoggingEvent event) { if (event.getMessage() instanceof EventLogMessage) { return NEUTRAL; // let message pass through } else { return DENY; // ignore all non-netlogger messages } } } }pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/0000755000175000017500000000000011757531667017643 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/0000755000175000017500000000000011757531667021302 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/0000755000175000017500000000000011757531667022763 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/Horizontal.java0000644000175000017500000010626511757531137025761 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.cluster.aggregator.JobAggregatorInstanceFactory; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.partitioner.Partition; import edu.isi.pegasus.planner.provenance.pasoa.XMLProducer; import edu.isi.pegasus.planner.provenance.pasoa.producer.XMLProducerFactory; import edu.isi.pegasus.planner.provenance.pasoa.PPS; import edu.isi.pegasus.planner.provenance.pasoa.pps.PPSFactory; import java.util.Collections; import java.util.List; import java.util.ArrayList; import java.util.LinkedList; import java.util.Map; import java.util.HashMap; import java.util.Iterator; import java.util.Comparator; import java.util.Set; import java.util.StringTokenizer; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The horizontal clusterer, that clusters jobs on the same level. * * @author Karan Vahi * @version $Revision: 4687 $ */ public class Horizontal implements Clusterer, edu.isi.pegasus.planner.refiner.Refiner{//reqd for PASOA integration /** * The default collapse factor for collapsing jobs with same logical name * scheduled onto the same execution pool. */ public static final int DEFAULT_COLLAPSE_FACTOR = 1; /** * A short description about the partitioner. */ public static final String DESCRIPTION = "Horizontal Clustering"; /** * A singleton access to the job comparator. */ private static Comparator mJobComparator = null; /** * The handle to the logger object. */ protected LogManager mLogger; /** * The handle to the properties object holding all the properties. */ protected PegasusProperties mProps; /** * The handle to the job aggregator factory. */ protected JobAggregatorInstanceFactory mJobAggregatorFactory; /** * ADag object containing the jobs that have been scheduled by the site * selector. */ private ADag mScheduledDAG; /** * Map to hold the jobs sorted by the label of jobs in dax. * The key is the logical job name and value is the list of jobs with that * logical name. * * This no longer used, and would be removed later. */ private Map mJobMap; /** * A Map to store all the job(Job) objects indexed by their logical ID found in * the dax. This should actually be in the ADag structure. */ private Map mSubInfoMap; /** * Map to hold the collapse values for the various execution pools. The * values are gotten from the properties file or can be gotten from the * resource information catalog a.k.a MDS. */ private Map mCollapseMap; /** * Replacement table, that identifies the corresponding fat job for a job. */ private Map mReplacementTable; /** * The XML Producer object that records the actions. */ private XMLProducer mXMLStore; /** * The handle to the provenance store implementation. */ private PPS mPPS; /** * Singleton access to the job comparator. * * @return the job comparator. */ private Comparator jobComparator(){ return (mJobComparator == null)? new JobComparator(): mJobComparator; } /** * The default constructor. */ public Horizontal(){ mLogger = LogManagerFactory.loadSingletonInstance(); mJobAggregatorFactory = new JobAggregatorInstanceFactory(); } /** * Returns a reference to the workflow that is being refined by the refiner. * * * @return ADAG object. */ public ADag getWorkflow(){ return this.mScheduledDAG; } /** * Returns a reference to the XMLProducer, that generates the XML fragment * capturing the actions of the refiner. This is used for provenace * purposes. * * @return XMLProducer */ public XMLProducer getXMLProducer(){ return this.mXMLStore; } /** *Initializes the Clusterer impelementation * * @param dag the workflow that is being clustered. * @param bag the bag of objects that is useful for initialization. * * @throws ClustererException in case of error. */ public void initialize( ADag dag , PegasusBag bag ) throws ClustererException{ mScheduledDAG = dag; mProps = bag.getPegasusProperties(); mJobAggregatorFactory.initialize( dag, bag ); mJobMap = new HashMap(); mCollapseMap = this.constructMap(mProps.getCollapseFactors()); mReplacementTable = new HashMap(); mSubInfoMap = new HashMap(); for(Iterator it = mScheduledDAG.vJobSubInfos.iterator();it.hasNext();){ //pass the jobs to the callback Job job = (Job)it.next(); mSubInfoMap.put(job.getLogicalID(), job ); } //load the PPS implementation mXMLStore = XMLProducerFactory.loadXMLProducer( mProps ); mPPS = PPSFactory.loadPPS( this.mProps ); mXMLStore.add( "" ); //call the begin workflow method try{ mPPS.beginWorkflowRefinementStep( this, PPS.REFINEMENT_CLUSTER, false ); } catch( Exception e ){ throw new ClustererException( "PASOA Exception", e ); } //clear the XML store mXMLStore.clear(); } /** * Determine the clusters for a partition. The partition is assumed to * contain independant jobs, and multiple clusters maybe created for the * partition. Internally the jobs are grouped according to transformation name * and then according to the execution site. Each group * (having same transformation name and scheduled on same site), is then * clustered. * The number of clustered jobs created for each group is dependant on the * following Pegasus profiles that can be associated with the jobs. *
     *       1) bundle   (dictates the number of clustered jobs that are created)
     *       2) collapse (the number of jobs that make a single clustered job)
     * 
* * In case of both parameters being associated with the jobs in a group, the * bundle parameter overrides collapse parameter. * * @param partition the partition for which the clusters need to be * determined. * * @throws ClustererException in case of error. * * @see Pegasus#BUNDLE_KEY * @see Pegasus#COLLAPSE_KEY */ public void determineClusters( Partition partition ) throws ClustererException { Set s = partition.getNodeIDs(); List l = new ArrayList(s.size()); mLogger.log("Collapsing jobs in partition " + partition.getID() + " " + s, LogManager.DEBUG_MESSAGE_LEVEL); for(Iterator it = s.iterator();it.hasNext();){ Job job = (Job)mSubInfoMap.get(it.next()); l.add(job); } //group the jobs by their transformation names Collections.sort( l, jobComparator() ); //traverse through the list and collapse jobs //referring to same logical transformation Job previous = null; List clusterList = new LinkedList(); Job job = null; for(Iterator it = l.iterator();it.hasNext();){ job = (Job)it.next(); if(previous == null || job.getCompleteTCName().equals(previous.getCompleteTCName())){ clusterList.add(job); } else{ //at boundary collapse jobs collapseJobs(previous.getStagedExecutableBaseName(),clusterList,partition.getID()); clusterList = new LinkedList(); clusterList.add(job); } previous = job; } //cluster the last clusterList if(previous != null){ collapseJobs(previous.getStagedExecutableBaseName(), clusterList, partition.getID()); } } /** * Am empty implementation of the callout, as state is maintained * internally to determine the relations between the jobs. * * @param partitionID the id of a partition. * @param parents the list of String objects that contain * the id's of the parents of the partition. * * @throws ClustererException in case of error. */ public void parents( String partitionID, List parents ) throws ClustererException{ } /** * Collapses the jobs having the same logical name according to the sites * where they are scheduled. * * @param name the logical name of the jobs in the list passed to * this function. * @param jobs the list Job objects corresponding * to the jobs that have the same logical name. * @param partitionID the ID of the partition to which the jobs belong. */ private void collapseJobs( String name, List jobs, String partitionID ){ String key = null; Job job = null; List l = null; //internal map that keeps the jobs according to the execution pool Map tempMap = new java.util.HashMap(); int[] cFactor = new int[] {0, 0, 0, 0}; //the collapse factor for collapsing the jobs AggregatedJob fatJob = null; mLogger.log("Collapsing jobs of type " + name, LogManager.DEBUG_MESSAGE_LEVEL); //traverse through all the jobs and order them by the //pool on which they are scheduled for(Iterator it = jobs.iterator();it.hasNext();){ job = (Job)it.next(); key = job.executionPool; //check if the job logical name is already in the map if(tempMap.containsKey(key)){ //add the job to the corresponding list. l = (List)tempMap.get(key); l.add(job); } else{ //first instance of this logical name l = new java.util.LinkedList(); l.add(job); tempMap.put(key,l); } } //iterate through the built up temp map to get jobs per execution pool String factor = null; int size = -1; //the id for the fatjobs. we want ids //unique across the execution pools for a //particular type of job being merged. int id = 1; for( Iterator it = tempMap.entrySet().iterator();it.hasNext(); ){ Map.Entry entry = (Map.Entry)it.next(); l = (List)entry.getValue(); size= l.size(); //the pool name on which the job is to run is the key key = (String)entry.getKey(); if( size <= 1 ){ //no need to collapse one job. go to the next iteration mLogger.log("\t No collapsing for execution pool " + key, LogManager.DEBUG_MESSAGE_LEVEL); continue; } JobAggregator aggregator = mJobAggregatorFactory.loadInstance( (Job)l.get(0) ); if(aggregator.entryNotInTC(key)){ //no need to collapse one job. go to the next iteration mLogger.log("\t No collapsing for execution pool because job aggregator entry not in tc " + key, LogManager.DEBUG_MESSAGE_LEVEL); continue; } //checks made ensure that l is not empty at this point cFactor = getCollapseFactor( key, (Job) l.get(0), size ); if( cFactor[0] == 1 && cFactor[1] == 0 ){ mLogger.log("\t Collapse factor of (" + cFactor[0] + "," + cFactor[1] + ") determined for pool. " + key + ". Skipping collapsing", LogManager.DEBUG_MESSAGE_LEVEL); continue; } if (mProps.getHorizontalClusterPreference() != null && mProps.getHorizontalClusterPreference() .equalsIgnoreCase( "runtime" )) { double maxRunTime = -1; try { maxRunTime = Double .parseDouble( (String) ((Job) l.get( 0 )).vdsNS .get( Pegasus.MAX_RUN_TIME ) ); } catch (RuntimeException e) { throw new RuntimeException( "Profile key " + Pegasus.MAX_RUN_TIME + " is either not set, or is not a valid number.", e ); } mLogger.log( "\t Collapsing jobs at execution pool " + key + " having maximum run time " + cFactor[2], LogManager.DEBUG_MESSAGE_LEVEL ); Collections.sort( l, getBinPackingComparator() ); mLogger.log( "Job Type: " + ((Job) l.get( 0 )).getCompleteTCName() + " max runtime " + maxRunTime, LogManager.DEBUG_MESSAGE_LEVEL ); List> bins = bestFitBinPack( l, maxRunTime ); mLogger.log( "Jobs are merged into " + bins.size() + " clustered jobs.", LogManager.DEBUG_MESSAGE_LEVEL ); for (List bin : bins) { fatJob = aggregator.constructAbstractAggregatedJob( bin, name, constructID( partitionID, id ) ); updateReplacementTable( bin, fatJob ); // increment the id id++; // add the fat job to the dag // use the method to add, else add explicitly to DagInfo mScheduledDAG.add( fatJob ); // log the refiner action capturing the creation of the job this.logRefinerAction( fatJob, aggregator ); } tempMap = null; return; } //we do collapsing in chunks of 3 instead of picking up //from the properties file. ceiling is (x + y -1)/y //cFactor = (size + 2)/3; else { mLogger.log( "\t Collapsing jobs at execution pool " + key + " with collapse factor " + cFactor[0] + "," + cFactor[1], LogManager.DEBUG_MESSAGE_LEVEL ); if (cFactor[0] >= size) { // means collapse all the jobs in the list as a fat node // Note: Passing a link to iterator might be more efficient, // as // this would only require a single traversal through the // list fatJob = aggregator.constructAbstractAggregatedJob( l.subList( 0, size ), name, constructID( partitionID, id ) ); updateReplacementTable( l.subList( 0, size ), fatJob ); // increment the id id++; // add the fat job to the dag // use the method to add, else add explicitly to DagInfo mScheduledDAG.add( fatJob ); // log the refiner action capturing the creation of the job this.logRefinerAction( fatJob, aggregator ); } else { // do collapsing in chunks of cFactor int increment = 0; for (int i = 0; i < size; i = i + increment) { // compute the increment and decrement cFactor[1] increment = (cFactor[1] > 0) ? cFactor[0] + 1 : cFactor[0]; cFactor[1]--; if (increment == 1) { // we can exit out of the loop as we do not want // any merging for single jobs break; } else if ((i + increment) < size) { fatJob = aggregator.constructAbstractAggregatedJob( l.subList( i, i + increment ), name, constructID( partitionID, id ) ); updateReplacementTable( l.subList( i, i + increment ), fatJob ); } else { fatJob = aggregator.constructAbstractAggregatedJob( l.subList( i, size ), name, constructID( partitionID, id ) ); updateReplacementTable( l.subList( i, size ), fatJob ); } // increment the id id++; // add the fat job to the dag // use the method to add, else add explicitly to DagInfo mScheduledDAG.add( fatJob ); // log the refiner action capturing the creation of the // job this.logRefinerAction( fatJob, aggregator ); } } } } //explicity free the map tempMap = null; } /** * Perform best fit bin packing. * * @param jobs * List of jobs sorted in decreasing order of the job runtime. * @param maxTime * The maximum time for which the clustered job should run. * @return List of List of Jobs where each List is the set of jobs * which should be clustered together so as to run in under maxTime. */ private List> bestFitBinPack(List jobs, double maxTime) { List> bins = new LinkedList>(); List> returnBins = new LinkedList>(); List binTime = new LinkedList(); double minJobRunTime = Double.MAX_VALUE; if (jobs != null && jobs.size() > 0) { minJobRunTime = Double .parseDouble( (String) jobs.get( jobs.size() - 1 ).vdsNS .get( Pegasus.JOB_RUN_TIME ) ); } for (Job j : jobs) { List bin; double currentBinTime; boolean isBreak = false; double jobRunTime = Double.parseDouble( (String) j.vdsNS .get( Pegasus.JOB_RUN_TIME ) ); mLogger.log( "Job " + j.getID() + " runtime " + jobRunTime, LogManager.DEBUG_MESSAGE_LEVEL ); // Create first bin. if (bins.size() == 0) { bins.add( new LinkedList() ); binTime.add( 0, 0d ); } // Loop through each job. for (int i = 0, k = bins.size(); i < k; ++i) { currentBinTime = binTime.get( i ); // Is the job runtime greater than the max allowed runtime? Then // do not cluster this job. if (maxTime < jobRunTime) { mLogger.log( "Job " + j.getID() + " runtime " + jobRunTime + " is greater than clusters max run time " + maxTime + " specified by the Pegasus profile " + Pegasus.MAX_RUN_TIME, LogManager.DEBUG_MESSAGE_LEVEL ); break; } // Can we fit the job in an existing bin? if (maxTime >= currentBinTime + jobRunTime) { bin = bins.get( i ); bin.add( j ); binTime.set( i, currentBinTime + jobRunTime ); isBreak = true; } else if (i == k - 1) { // We cannot fit the job in any of the open bins, so create // a new one. bin = new LinkedList(); bin.add( j ); bins.add( bin ); binTime.add( binTime.size(), jobRunTime ); } // Either this bin is full, or it does not even have space to // fit the job with the smallest run time. So lets avoid trying // to fit jobs in this bin. if (binTime.get( i ) + minJobRunTime > maxTime) { returnBins.add( bins.remove( i ) ); binTime.remove( i ); } // Job has been assigned a bin, no need to check other bins for // space. if (isBreak) break; } } returnBins.addAll( bins ); return returnBins; } /** * The comparator is used to sort a collection of jobs in decreasing order * of their run times as specified by the Pegasus.JOB_RUN_TIME property. * * @return */ private Comparator getBinPackingComparator() { return new Comparator() { @Override public int compare(Job job1, Job job2) { String s1 = (String) job1.vdsNS.get( Pegasus.JOB_RUN_TIME ); String s2 = (String) job2.vdsNS.get( Pegasus.JOB_RUN_TIME ); if (s1 == null || s1.length() == 0) throw new RuntimeException( "Profile Key: " + Pegasus.JOB_RUN_TIME + " is not set for the job " + job1.getID() ); if (s2 == null || s2.length() == 0) throw new RuntimeException( "Profile Key: " + Pegasus.JOB_RUN_TIME + " is not set for the job " + job2.getID() ); double jobTime1 = Double.parseDouble( s1 ); double jobTime2 = Double.parseDouble( s2 ); return (int) (jobTime2 - jobTime1); } }; } /** * Returns the clustered workflow. * * @return the ADag object corresponding to the clustered workflow. * * @throws ClustererException in case of error. */ public ADag getClusteredDAG() throws ClustererException{ //do all the replacement of jobs in the main data structure //that needs to be returned replaceJobs(); //should be in the done method. which is currently not htere in the //Clusterer API try{ mPPS.endWorkflowRefinementStep( this ); } catch( Exception e ){ throw new ClustererException( "PASOA Exception while logging end of clustering refinement", e ); } return mScheduledDAG; } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String description(){ return this.DESCRIPTION; } /** * Records the refiner action into the Provenace Store as a XML fragment. * * @param clusteredJob the clustered job * @param aggregator the aggregator that was used to create this clustered job */ protected void logRefinerAction( AggregatedJob clusteredJob, JobAggregator aggregator ){ StringBuffer sb = new StringBuffer(); String indent = "\t"; sb.append( indent ); sb.append( "" ).append( "\n" ); //traverse through all the files String newIndent = indent + "\t"; List jobs = new ArrayList(); for( Iterator it = clusteredJob.constituentJobsIterator(); it.hasNext(); ){ Job job = ( Job )it.next(); jobs.add( job.getName() ); sb.append( newIndent ); sb.append( "" ); sb.append( "\n" ); } sb.append( indent ); sb.append( "" ); sb.append( "\n" ); //log the action for creating the relationship assertions try{ mPPS.clusteringOf( clusteredJob.getName(), jobs ); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception while logging relationship assertion for clustering ", e ); } mXMLStore.add( sb.toString() ); } /** * Appends an xml attribute to the xml feed. * * @param xmlFeed the xmlFeed to which xml is being written * @param key the attribute key * @param value the attribute value */ protected void appendAttribute( StringBuffer xmlFeed, String key, String value ){ xmlFeed.append( key ).append( "=" ).append( "\"" ).append( value ). append( "\" " ); } /** * A callback that triggers the collapsing of a partition/level of a graph. * * @param partition the partition that needs to be collapsed. * */ /* private void collapseJobs(Partition partition){ Set s = partition.getNodeIDs(); List l = new ArrayList(s.size()); mLogger.log("Collapsing jobs in partition " + partition.getID() + " " + s, LogManager.DEBUG_MESSAGE_LEVEL); for(Iterator it = s.iterator();it.hasNext();){ Job job = (Job)mSubInfoMap.get(it.next()); l.add(job); } //group the jobs by their transformation names Collections.sort(l,jobComparator()); //traverse through the list and collapse jobs //referring to same logical transformation Job previous = null; List clusterList = new LinkedList(); Job job = null; for(Iterator it = l.iterator();it.hasNext();){ job = (Job)it.next(); if(previous == null || job.getCompleteTCName().equals(previous.getCompleteTCName())){ clusterList.add(job); } else{ //at boundary collapse jobs collapseJobs(previous.getStagedExecutableBaseName(),clusterList,partition.getID()); clusterList = new LinkedList(); clusterList.add(job); } previous = job; } //cluster the last clusterList if(previous != null){ collapseJobs(previous.getStagedExecutableBaseName(), clusterList, partition.getID()); } //collapse the jobs in list l // collapseJobs(job.logicalName,l,partition.getID()); } */ /** * Returns the collapse factor, that is used to chunk up the jobs of a * particular type on a pool. The collapse factor is determined by * getting the collapse key in the Pegasus namespace/profile associated with the * job in the transformation catalog. Right now tc overrides the property * from the one in the properties file that specifies per pool. * There are two orthogonal notions of bundling and collapsing. In case the * bundle key is specified, it ends up overriding the collapse key, and * the bundle value is used to generate the collapse values. * * @param pool the pool where the chunking up is occuring * @param job the Job object containing the job that * is to be chunked up together. * @param size the number of jobs that refer to the same logical * transformation and are scheduled on the same execution pool. * * @return int array of size 4 where int[0] is the the collapse factor * int[1] is the number of jobs for whom collapsing is int[0] + 1. * int [2] is maximum time for which the clusterd job should run. * int [3] is time for which the single job would run. */ public int[] getCollapseFactor(String pool, Job job, int size) { String factor = null; int result[] = new int[] { 0, 0, 0, 0 }; // the job should have the collapse key from the TC if // by the user specified factor = (String) job.vdsNS.get( Pegasus.COLLAPSE_KEY ); // ceiling is (x + y -1)/y String bundle = (String) job.vdsNS.get( Pegasus.BUNDLE_KEY ); if (bundle != null) { int b = Integer.parseInt( bundle ); result[0] = size / b; result[1] = size % b; return result; // doing no boundary condition checks // return (size + b -1)/b; } String runTime = (String) job.vdsNS.get( Pegasus.JOB_RUN_TIME ); String clusterTime = (String) job.vdsNS.get( Pegasus.MAX_RUN_TIME ); // return the appropriate value result[0] = (factor == null) ? ((factor = (String) mCollapseMap .get( pool )) == null) ? this.DEFAULT_COLLAPSE_FACTOR : // the // default // value Integer.parseInt( factor )// use the value in the prop file : // return the value found in the TC Integer.parseInt( factor ); result[2] = clusterTime == null || clusterTime.length() == 0 ? 0 : Integer.parseInt( clusterTime ); result[3] = runTime == null || runTime.length() == 0 ? 0 : Integer .parseInt( runTime ); return result; } /** * Given an integer id, returns a string id that is used for the clustered * job. * * @param partitionID the id of the partition. * @param id the integer id from which the string id has to be * constructed. The id should be unique for all the * clustered jobs that are formed for a particular * partition. * * @return the id of the clustered job */ public String constructID(String partitionID, int id){ StringBuffer sb = new StringBuffer(8); sb.append("P").append(partitionID).append("_"); sb.append("ID").append(id); return sb.toString(); } /** * Updates the replacement table. * * @param jobs the List of jobs that is being replaced. * @param mergedJob the mergedJob that is replacing the jobs in the list. */ private void updateReplacementTable(List jobs, Job mergedJob){ if(jobs == null || jobs.isEmpty()) return; String mergedJobName = mergedJob.jobName; for(Iterator it = jobs.iterator();it.hasNext();){ Job job = (Job)it.next(); //put the entry in the replacement table mReplacementTable.put(job.jobName,mergedJobName); } } /** * Puts the jobs in the abstract workflow into the job that is index * by the logical name of the jobs. */ private void assimilateJobs(){ Iterator it = mScheduledDAG.vJobSubInfos.iterator(); Job job = null; List l = null; String key = null; while(it.hasNext()){ job = (Job)it.next(); key = job.logicalName; //check if the job logical name is already in the map if(mJobMap.containsKey(key)){ //add the job to the corresponding list. l = (List)mJobMap.get(key); l.add(job); } else{ //first instance of this logical name l = new java.util.LinkedList(); l.add(job); mJobMap.put(key,l); } } } /** * Constructs a map with the numbers/values for the collapsing factors to * collapse the nodes of same type. The user ends up specifying these through * the properties file. The value of the property is of the form * poolname1=value,poolname2=value.... * * @param propValue the value of the property got from the properties file. * * @return the constructed map. */ private Map constructMap(String propValue) { Map map = new java.util.TreeMap(); if (propValue != null) { StringTokenizer st = new StringTokenizer(propValue, ","); while (st.hasMoreTokens()) { String raw = st.nextToken(); int pos = raw.indexOf('='); if (pos > 0) { map.put(raw.substring(0, pos).trim(), raw.substring(pos + 1).trim()); } } } return map; } /** * The relations/edges are changed in local graph structure. */ private void replaceJobs(){ boolean val = false; List l = null; List nl = null; Job sub = new Job(); String msg; for( Iterator it = mReplacementTable.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry)it.next(); String key = (String)entry.getKey(); mLogger.log("Replacing job " + key +" with " + entry.getValue(), LogManager.DEBUG_MESSAGE_LEVEL); //remove the old job //remove by just creating a subinfo object with the same key sub.jobName = key; sub.setJobType( Job.COMPUTE_JOB ); val = mScheduledDAG.remove(sub); if(val == false){ throw new RuntimeException("Removal of job " + key + " while clustering not successful"); } } mLogger.log("All clustered jobs removed from the workflow", LogManager.DEBUG_MESSAGE_LEVEL); //Set mergedEdges = new java.util.HashSet(); //this is temp thing till the hast thing sorted out correctly List mergedEdges = new java.util.ArrayList(mScheduledDAG.vJobSubInfos.size()); //traverse the edges and do appropriate replacements String parent = null; String child = null; String value = null; for( Iterator it = mScheduledDAG.dagInfo.relations.iterator(); it.hasNext(); ){ PCRelation rel = (PCRelation)it.next(); //replace the parent and child if there is a need parent = rel.parent; child = rel.child; msg = ("\n Replacing " + rel); value = (String)mReplacementTable.get(parent); if(value != null){ rel.parent = value; } value = (String)mReplacementTable.get(child); if(value != null){ rel.child = value; } msg += (" with " + rel); //put in the merged edges set if(!mergedEdges.contains(rel)){ val = mergedEdges.add(rel); msg += "Add to set : " + val; } else{ msg += "\t Duplicate Entry for " + rel; } mLogger.log( msg, LogManager.DEBUG_MESSAGE_LEVEL ); } //the final edges need to be updated mScheduledDAG.dagInfo.relations = null; mScheduledDAG.dagInfo.relations = new java.util.Vector(mergedEdges); } /** * A utility method to print short description of jobs in a list. * * @param l the list of Job objects */ private void printList(List l){ for(Iterator it = l.iterator();it.hasNext();){ Job job = (Job)it.next(); System.out.print( " "+ /*job.getCompleteTCName() +*/ "[" + job.logicalId + "]"); } } /** * A job comparator, that allows me to compare jobs according to the * transformation names. It is applied to group jobs in a particular partition, * according to the underlying transformation that is referred. *

* This comparator is not consistent with the Job.equals(Object) method. * Hence, should not be used in sorted sets or Maps. */ private class JobComparator implements Comparator{ /** * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer if the first argument is * less than, equal to, or greater than the specified object. The * Job are compared by their transformation name. * * This implementation is not consistent with the * Job.equals(Object) method. Hence, should not be used in sorted * Sets or Maps. * * @param o1 is the first object to be compared. * @param o2 is the second object to be compared. * * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compare(Object o1, Object o2) { if (o1 instanceof Job && o2 instanceof Job) { return ( (Job) o1).getCompleteTCName().compareTo( ( ( Job) o2).getCompleteTCName()); } else { throw new ClassCastException("Objects being compared are not SubInfo"); } } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/Abstract.java0000644000175000017500000003057411757531137025372 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.cluster.JobAggregator; import edu.isi.pegasus.planner.cluster.aggregator.JobAggregatorInstanceFactory; import edu.isi.pegasus.planner.partitioner.Partition; import java.util.Collection; import java.util.Vector; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.Iterator; import java.util.Set; import edu.isi.pegasus.planner.classes.PegasusBag; /** * An abstract clusterer that the other clusterers can extend. The abstract * implementation treats each partition as a single cluster. It has callouts * to determine the ordering of the jobs in the cluster, and the input/output * files for the clustered jobs. * * @author Karan Vahi * @version $Revision: 3342 $ */ public abstract class Abstract implements Clusterer { /** * A Map to store all the job(Job) objects indexed by their logical ID found in * the dax. This should actually be in the ADag structure. */ protected Map mSubInfoMap; /** * A Map that indexes the partition ID to the name of clustered job. */ protected Map mPartitionClusterMap; /** * The handle to the logger object. */ protected LogManager mLogger; /** * The handle to the properties object holding all the properties. */ protected PegasusProperties mProps; /** * The handle to the job aggregator factory. */ protected JobAggregatorInstanceFactory mJobAggregatorFactory; /** * The collection of relations, that is constructed for the clustered * workflow. */ protected Collection mClusteredRelations; /** * ADag object containing the jobs that have been scheduled by the site * selector. */ protected ADag mScheduledDAG; /** * The Abstract constructor. */ public Abstract(){ //mLogger = LogManager.getInstance(); mJobAggregatorFactory = new JobAggregatorInstanceFactory(); } /** * Returns the nodes in the partition as a List in a particular order. * The iterator of the list returns the nodes in the order determined by * the clusterer. * * @param p the partition whose nodes have to be ordered. * * @return an ordered List of String objects that are the ID's * of the nodes. * * @throws ClustererException in case of error. */ public abstract List order( Partition p ) throws ClustererException; /** * Determine the input and output files of the job on the basis of the * order of the constituent jobs in the AggregatedJob. * * @param job the AggregatedJob * * @throws ClustererException in case of error. */ public abstract void determineInputOutputFiles( AggregatedJob job ); /*{ //by default we do not care about order List l = new ArrayList( p.getNodeIDs().size() ); for( Iterator it = p.getNodeIDs().iterator(); it.hasNext();){ l.add( it.next() ); } return l; } */ /** *Initializes the Clusterer impelementation * * @param dag the workflow that is being clustered. * @param bag the bag of objects that is useful for initialization. * * @throws ClustererException in case of error. */ public void initialize( ADag dag , PegasusBag bag ) throws ClustererException{ mLogger = bag.getLogger(); mScheduledDAG = dag; mProps = bag.getPegasusProperties(); mJobAggregatorFactory.initialize( dag, bag ); mClusteredRelations = new Vector( dag.dagInfo.relations.size()/2 ); mSubInfoMap = new HashMap( dag.vJobSubInfos.size() ); mPartitionClusterMap = new HashMap(); for(Iterator it = mScheduledDAG.vJobSubInfos.iterator();it.hasNext();){ Job job = (Job)it.next(); addJob( job ); } } /** * It creates a single clustered job for the partition. If there is only * one job in the partition, then no clustering happens. * * @param partition the partition for which the clusters need to be * determined. * * @throws ClustererException if the clustering executable is not installed * on the remote site or if all the jobs in the partition are not * scheduled on the same site. */ public void determineClusters( Partition partition ) throws ClustererException { String pID = partition.getID(); //do the ordering on the partition as required. List nodes = order( partition ); List l = new ArrayList( nodes.size() ); mLogger.log( "Clustering jobs in partition " + pID + " " + nodes, LogManager.DEBUG_MESSAGE_LEVEL); String prevSite = null; String currSite = null; for( Iterator it = nodes.iterator(); it.hasNext(); ){ Job job = ( Job )mSubInfoMap.get( it.next() ); currSite = job.getSiteHandle(); l.add( job ); //sanity check to ensure jobs are scheduled on same site. if( prevSite == null || currSite.equals( prevSite) ){ prevSite = currSite; continue; } else{ throw new ClustererException("Jobs in the partition " + partition.getID() + " not scheduled on the same site!"); } } int size = l.size(); Job firstJob = (Job)l.get(0); // System.out.println( " Job to be clustered is " + firstJob); if(size == 1){ //no need to collapse one job. go to the next iteration mLogger.log("\t No clustering for partition " + pID, LogManager.DEBUG_MESSAGE_LEVEL); associate( partition, firstJob ); return; } //do the ordering of the list JobAggregator aggregator = mJobAggregatorFactory.loadInstance( firstJob ); if( aggregator.entryNotInTC( currSite ) ){ throw new ClustererException ("No installed aggregator executable found for partition " + pID + " at site " + currSite ); } AggregatedJob clusteredJob = aggregator.constructAbstractAggregatedJob( l, getLogicalNameForJobs( l ),//firstJob.getStagedExecutableBaseName(), this.constructClusteredJobID( partition ) ); //replace the jobs in the partition with the clustered job //in the original workflow for( Iterator it = l.iterator(); it.hasNext(); ){ Job job = (Job)it.next(); mLogger.log("Replacing job " + job.getName() +" with " + clusteredJob.getName(), LogManager.DEBUG_MESSAGE_LEVEL); //remove the old job if( !mScheduledDAG.remove( job ) ){ String msg = "Removal of job " + job.getName() + " while clustering not successful"; throw new ClustererException( msg ); } } //get the correct input and output files for the job this.determineInputOutputFiles( clusteredJob ); //System.out.println(" Clustered Job is " + clusteredJob ); mScheduledDAG.add( clusteredJob ); associate( partition, clusteredJob ); } /** * Returns the logical names for the jobs. It picks the first job in the list * and uses that to construct the name * * @param jobs List of jobs * * @return name */ protected String getLogicalNameForJobs( List jobs ){ Job firstJob = (Job)jobs.get(0); return firstJob.getStagedExecutableBaseName(); } /** * Associates the relations between the partitions with the corresponding * relations between the clustered jobs that are created for each Partition. * * @param partitionID the id of a partition. * @param parents the list of String objects that contain * the id's of the parents of the partition. * * @throws ClustererException in case of clustered job not being found for a partition. */ public void parents( String partitionID, List parents ) throws ClustererException{ String error = "No cluster job for partition "; Job clusteredNode = clusteredJob( partitionID ); Job parentClusteredNode; //throw error if not found if( clusteredNode == null){ throw new ClustererException( error + partitionID); } for( Iterator it = parents.iterator(); it.hasNext(); ){ String parent = (String)it.next(); parentClusteredNode = clusteredJob( parent ); //throw error if not found if( clusteredNode == null){ throw new ClustererException( error + parent); } //add a relation between these clustered jobs mClusteredRelations.add( new PCRelation( parentClusteredNode.getName(), clusteredNode.getName() )); } } /** * Returns the clustered workflow. * * @return the ADag object corresponding to the clustered workflow. * * @throws ClustererException in case of error. */ public ADag getClusteredDAG() throws ClustererException{ //replace the relations of the original DAG and return mScheduledDAG.dagInfo.relations = null; mScheduledDAG.dagInfo.relations = (Vector)mClusteredRelations; return mScheduledDAG; } /** * Returns the ID for the clustered job corresponding to a partition. * * @param partition the partition. * * @return the ID of the clustered job */ protected String constructClusteredJobID( Partition partition ){ return partition.getID(); } /** * Adds jobs to the internal map of jobs that is maintained by the clusterer. * * @param job the job being added */ protected void addJob( Job job ){ mSubInfoMap.put( job.getLogicalID(), job ); } /** * Returns the job object corresponding to the id of the job. * * @param id the id of the job * * @return the corresponding job. */ protected Job getJob( String id ){ return (Job) mSubInfoMap.get( id ); } /** * Maps the partition to the corresponding clustered job. * * @param p the partition being clustered. * @param job the corresponding clustered job. */ protected void associate( Partition p, Job job ){ mPartitionClusterMap.put( p.getID(), job ); } /** * Returns the job corresponding to a partition. * * @param p the partition for which the clustered job is reqd. * * @return the corresponding job, else null in case of job is not found. */ protected Job clusteredJob( Partition p ){ return this.clusteredJob( p.getID() ); } /** * Returns the job corresponding to a partition. * * @param id the partition id. * * @return the corresponding job, else null in case of job is not found. */ protected Job clusteredJob( String id ){ Object obj = mPartitionClusterMap.get( id ); return ( obj == null) ? null: (Job)obj; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/Clusterer.java0000644000175000017500000000540611757531137025573 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.partitioner.Partition; import java.util.List; /** * The clustering API, that constructs clusters of jobs out of a single * partition. * * * @author Karan Vahi * @version $Revision: 2576 $ */ public interface Clusterer { /** * The version number associated with this API of Code Generator. */ public static final String VERSION = "1.1"; /** *Initializes the Clusterer impelementation * * @param dag the workflow that is being clustered. * @param bag the bag of objects that is useful for initialization. * * @throws ClustererException in case of error. */ public void initialize( ADag dag , PegasusBag bag ) throws ClustererException; /** * Determine the clusters for a partition. * * @param partition the partition for which the clusters need to be * determined. * * @throws ClustererException in case of error. */ public void determineClusters( Partition partition ) throws ClustererException; /** * Associates the relations between the partitions with the corresponding * relations between the clustered jobs that are created for each Partition. * * @param partitionID the id of a partition. * @param parents the list of String objects that contain * the id's of the parents of the partition. * * @throws ClustererException in case of error. */ public void parents( String partitionID, List parents ) throws ClustererException; /** * Returns the clustered workflow. * * @return the ADag object corresponding to the clustered workflow. * * @throws ClustererException in case of error. */ public ADag getClusteredDAG() throws ClustererException; /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String description(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/ClustererFactoryException.java0000644000175000017500000000647111757531137031005 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Clusterer implementations. * * @author Karan Vahi * @version $Revision: 2553 $ */ public class ClustererFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Clusterer"; /** * Constructs a ClustererFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public ClustererFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a ClustererFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public ClustererFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a ClustererFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public ClustererFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a ClustererFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public ClustererFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/ClustererException.java0000644000175000017500000000431111757531137027444 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster; /** * The baseclass of the exception that is thrown by all Clusterers. * It is a checked exception. * * @author Karan Vahi * @version $Revision: 2553 $ */ public class ClustererException extends Exception { /** * Constructs a ClustererException with no detail * message. */ public ClustererException() { super(); } /** * Constructs a ClustererException with the specified detailed * message. * * @param message is the detailled message. */ public ClustererException(String message) { super(message); } /** * Constructs a ClustererException with the specified detailed * message and a cause. * * @param message is the detailled message. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public ClustererException(String message, Throwable cause) { super(message, cause); } /** * Constructs a ClustererException with the * specified just a cause. * * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public ClustererException(Throwable cause) { super(cause); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/aggregator/0000755000175000017500000000000011757531667025105 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/aggregator/Abstract.java0000644000175000017500000010401311757531137027502 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster.aggregator; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.cluster.JobAggregator; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import java.io.File; import java.io.FileReader; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; import java.util.List; import java.util.Set; import java.util.Iterator; import edu.isi.pegasus.common.util.Separator; import java.util.HashSet; /** * An abstract implementation of the JobAggregator interface, which the other * implementations can choose to extend. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4737 $ * */ public abstract class Abstract implements JobAggregator { /** * The prefix that is assigned to the jobname of the collapsed jobs to * get the jobname for the fat job. */ public static final String CLUSTERED_JOB_PREFIX = "merge_"; /** * The transformation namespace for the cluster jobs. */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The version number for the derivations for cluster jobs */ public static final String TRANSFORMATION_VERSION = null; /** * The derivation namespace for the cluster jobs. */ public static final String DERIVATION_NAMESPACE = "pegasus"; /** * The version number for the derivations for cluster jobs. */ public static final String DERIVATION_VERSION = "1.0"; /** * The marker to designate a line in the input file reserved for * monitord purposes. */ public static final String MONITORD_COMMENT_MARKER = "#@"; /** * The directory, where the stdin file of the fat jobs are created. * It should be the submit file directory that the user mentions at * runtime. */ protected String mDirectory; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The handle to the LogManager that logs all the messages. */ protected LogManager mLogger; /** * The handle to the transformation catalog. */ protected TransformationCatalog mTCHandle; /** * Handle to the site catalog store */ protected SiteStore mSiteStore; //protected PoolInfoProvider mSiteHandle; /** * The handle to the ADag object that contains the workflow being * clustered. */ protected ADag mClusteredADag; /** * The handle to the GridStart Factory. */ protected GridStartFactory mGridStartFactory; /** * Bag of initialization objects. */ protected PegasusBag mBag; /** * A convenience method to return the complete transformation name being * used to construct jobs in this class. * * @param name the name of the transformation * * @return the complete transformation name */ public static String getCompleteTranformationName( String name ){ return Separator.combine( TRANSFORMATION_NAMESPACE, name, TRANSFORMATION_VERSION ); } /** * The default constructor. */ public Abstract(){ } /** *Initializes the JobAggregator impelementation * * @param dag the workflow that is being clustered. * @param bag the bag of objects that is useful for initialization. * */ public void initialize( ADag dag , PegasusBag bag ){ mBag = bag; mClusteredADag = dag; mLogger = bag.getLogger(); mProps = bag.getPegasusProperties(); mTCHandle = bag.getHandleToTransformationCatalog(); mSiteStore = bag.getHandleToSiteStore(); setDirectory( bag.getPlannerOptions().getSubmitDirectory() ); mGridStartFactory = new GridStartFactory(); mGridStartFactory.initialize( mBag, dag ); } /** * Returns the arguments with which the AggregatedJob * needs to be invoked with. * * @param job the AggregatedJob for which the arguments have * to be constructed. * * @return argument string */ public abstract String aggregatedJobArguments( AggregatedJob job ); /** * Enables the constitutent jobs that make up a aggregated job. * * @param mergedJob the clusteredJob * @param jobs the constitutent jobs * * @return AggregatedJob */ // protected abstract AggregatedJob enable( AggregatedJob mergedJob, List jobs ); /** * Constructs a new aggregated job that contains all the jobs passed to it. * The new aggregated job, appears as a single job in the workflow and * replaces the jobs it contains in the workflow. * * @param jobs the list of Job objects that need to be * collapsed. All the jobs being collapsed should be scheduled * at the same pool, to maintain correct semantics. * @param name the logical name of the jobs in the list passed to this * function. * @param id the id that is given to the new job. * * @return the Job object corresponding to the aggregated * job containing the jobs passed as List in the input, * null if the list of jobs is empty */ public AggregatedJob constructAbstractAggregatedJob(List jobs,String name,String id){ return constructAbstractAggregatedJob(jobs,name,id,getClusterExecutableLFN()); } /** * Constructs an abstract aggregated job that has a handle to the appropriate * JobAggregator that will be used to aggregate the jobs. * * @param jobs the list of SubInfo objects that need to be * collapsed. All the jobs being collapsed should be scheduled * at the same pool, to maintain correct semantics. * @param name the logical name of the jobs in the list passed to this * function. * @param id the id that is given to the new job. * @param mergeLFN the logical name for the aggregated job that has to be * constructed. * * @return the SubInfo object corresponding to the aggregated * job containing the jobs passed as List in the input, * null if the list of jobs is empty */ public AggregatedJob constructAbstractAggregatedJob( List jobs, String name, String id, String mergeLFN ){ //sanity check if(jobs == null || jobs.isEmpty()){ mLogger.log("List of jobs for clustering is empty", LogManager.ERROR_MESSAGE_LEVEL); return null; } //sanity check missing to ensure jobs are of same type //Right now done in NodeCollapser. But we do not need this for //Vertical Clumping. Karan July 28, 2005 //To get the gridstart/kickstart path on the remote //pool, querying with entry for vanilla universe. //In the new format the gridstart is associated with the //pool not pool, condor universe Job firstJob = (Job)jobs.get(0); AggregatedJob mergedJob = new AggregatedJob( /*(Job)jobs.get(0),*/ jobs.size() ); mergedJob.setJobAggregator( this ); mergedJob.setJobType( Job.COMPUTE_JOB ); Job job = null; StringBuffer sb = new StringBuffer(); sb.append( Abstract.CLUSTERED_JOB_PREFIX ); if( name != null && name.length() > 0 ){ sb.append( name ).append( "_" ); } sb.append( id ); String mergedJobName = sb.toString(); mLogger.log("Constructing Abstract clustered job " + mergedJobName, LogManager.DEBUG_MESSAGE_LEVEL); //enable the jobs that need to be merged //before writing out the stdin file // String gridStartPath = site.getKickstartPath(); // GridStart gridStart = mGridStartFactory.loadGridStart( firstJob, gridStartPath ); // mergedJob = gridStart.enable( mergedJob, jobs ); //inconsistency between job name and logical name for now mergedJob.setName( mergedJobName ); //fix for JIRA bug 83 //the site handle needs to be set for the aggregated job //before it is enabled. mergedJob.setSiteHandle( firstJob.getSiteHandle() ); mergedJob.setStagingSiteHandle( firstJob.getStagingSiteHandle() ); Set ipFiles = new HashSet(); Set opFiles = new HashSet(); boolean userExecutablesStaged = false; for( Iterator it = jobs.iterator(); it.hasNext(); ) { job = (Job) it.next(); ipFiles.addAll( job.getInputFiles() ); opFiles.addAll( job.getOutputFiles() ); mergedJob.add(job); //update user executable staging. userExecutablesStaged = userExecutablesStaged || job.userExecutablesStagedForJob(); //we need to merge the profiles from the constituent //jobs now, rather in function makeAbstractAggreagatedJobConcrete //JIRA PM-368 //merge profiles for all jobs mergedJob.mergeProfiles( job ); } mergedJob.setExecutableStagingForJob(userExecutablesStaged); //overriding the input files, output files, id mergedJob.setInputFiles( ipFiles ); mergedJob.setOutputFiles( opFiles ); mergedJob.setTransformation( Abstract.TRANSFORMATION_NAMESPACE, mergeLFN, Abstract.TRANSFORMATION_VERSION ); mergedJob.setDerivation( Abstract.DERIVATION_NAMESPACE, mergeLFN, Abstract.DERIVATION_VERSION); mergedJob.setLogicalID( id ); //the compute job of the VDS supernode is this job itself mergedJob.setVDSSuperNode( mergedJobName ); //explicitly set stdout to null overriding any stdout //that might have been inherited in the clone operation. //FIX for bug 142 http://bugzilla.globus.org/vds/show_bug.cgi?id=142 mergedJob.setStdOut( "" ); mergedJob.setStdErr( "" ); return mergedJob; } /** * Enables the abstract clustered job for execution and converts it to it's * executable form * * @param job the abstract clustered job */ public void makeAbstractAggregatedJobConcrete( AggregatedJob job ){ String stdIn = null; //containers for the input and output //files of fat job. Set insures no duplication //The multiple transfer ensures no duplicate transfer of //input files. So doing the set thing is redundant. //Hashset not used correctly // mergedJob = enable( mergedJob, jobs ); Job firstJob = (Job)job.getConstituentJob( 0 ); try { BufferedWriter writer; stdIn = job.getID() + ".in"; writer = new BufferedWriter(new FileWriter( new File(mDirectory,stdIn))); //traverse throught the jobs to determine input/output files //and merge the profiles for the jobs int taskid = 1; for( Iterator it = job.constituentJobsIterator(); it.hasNext(); taskid++ ) { Job constitutentJob = (Job) it.next(); //handle stdin if( constitutentJob instanceof AggregatedJob ){ //slurp in contents of it's stdin File file = new File ( mDirectory, job.getStdIn() ); BufferedReader reader = new BufferedReader( new FileReader( file ) ); String line; while( (line = reader.readLine()) != null ){ //ignore comment out lines if( line.startsWith( "#" ) ){ continue; } writer.write( line ); writer.write( "\n" ); taskid++; } reader.close(); //delete the previous stdin file file.delete(); } else{ //write out the argument string to the //stdin file for the fat job //genereate the comment string that has the //taskid transformation derivation writer.write( getCommentString( constitutentJob, taskid ) + "\n" ); // the arguments are no longer set as condor profiles // they are now set to the corresponding profiles in // the Condor Code Generator only. writer.write( constitutentJob.getRemoteExecutable() + " " + constitutentJob.getArguments() + "\n"); } } //closing the handle to the writer writer.close(); } catch(IOException e){ mLogger.log("While writing the stdIn file " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( "While writing the stdIn file " + stdIn, e ); } /* JIRA PM-277 job.setUniverse( firstJob.getUniverse() ); job.setJobManager( firstJob.getJobManager() ); */ //the executable that fat job refers to is collapser TransformationCatalogEntry entry = this.getTCEntry( job ); job.setRemoteExecutable( entry.getPhysicalTransformation() ); //stdin file is the file containing the arguments //for the jobs being collapsed job.setStdIn( stdIn ); //explicitly set stdout to null overriding any stdout //that might have been inherited in the clone operation. //FIX for bug 142 http://bugzilla.globus.org/vds/show_bug.cgi?id=142 job.setStdOut( "" ); job.setStdErr( "" ); //set the arguments for the clustered job job.setArguments( this.aggregatedJobArguments( job ) ); //get hold of one of the jobs and suck init's globus namespace //info into the the map. /* Not needed, as the clone method would have taken care of it. Karan Sept 09, 2004 entry = getTCEntry(job); mergedJob.globusRSL.checkKeyInNS(entry.getProfiles(Profile.GLOBUS)); */ //also put in jobType as mpi //mergedJob.globusRSL.checkKeyinNS("jobtype","mpi"); //the profile information from the pool catalog does not need to be //assimilated into the job. As the collapsed job is run on the //same pool as the job is run // mergedJob.updateProfiles(mPoolHandle.getPoolProfile(mergedJob.executionPool)); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 job.addNotifications( entry ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. job.updateProfiles( entry ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. job.updateProfiles( mProps ); return ; } /** * Generates the comment string for the job . It generates a comment of the * format # task_id transformation derivation. * * @param job the job for which * @param taskid the task id to put in. * * @return the comment invocation */ protected String getCommentString( Job job, int taskid ){ StringBuffer sb = new StringBuffer(); sb.append( MONITORD_COMMENT_MARKER ).append( " " ). append( taskid ).append( " " ). append( job.getCompleteTCName() ).append( " " ). append( job.getDAXID() ).append( " " ); return sb.toString(); } /** * Constructs a new aggregated job that contains all the jobs passed to it. * The new aggregated job, appears as a single job in the workflow and * replaces the jobs it contains in the workflow. * * @param jobs the list of Job objects that need to be * collapsed. All the jobs being collapsed should be scheduled * at the same pool, to maintain correct semantics. * @param name the logical name of the jobs in the list passed to this * function. * @param id the id that is given to the new job. * * @return the Job object corresponding to the aggregated * job containing the jobs passed as List in the input, * null if the list of jobs is empty */ /* public AggregatedJob construct(List jobs,String name,String id){ return construct(jobs,name,id,getClusterExecutableLFN()); } */ /** * Constructs a new aggregated job that contains all the jobs passed to it. * The new aggregated job, appears as a single job in the workflow and * replaces the jobs it contains in the workflow. * * @param jobs the list of Job objects that need to be * collapsed. All the jobs being collapsed should be scheduled * at the same pool, to maintain correct semantics. * @param name the logical name of the jobs in the list passed to this * function. * @param id the id that is given to the new job. * @param mergeLFN the logical name for the aggregated job that has to be * constructed. * * @return the AggregatedJob object corresponding to the aggregated * job containing the jobs passed as List in the input, * null if the list of jobs is empty */ /* protected AggregatedJob construct( List jobs, String name, String id, String mergeLFN){ //sanity check if(jobs == null || jobs.isEmpty()){ mLogger.log("List of jobs for clustering is empty", LogManager.ERROR_MESSAGE_LEVEL); return null; } //sanity check missing to ensure jobs are of same type //Right now done in NodeCollapser. But we do not need this for //Vertical Clumping. Karan July 28, 2005 //To get the gridstart/kickstart path on the remote //pool, querying with entry for vanilla universe. //In the new format the gridstart is associated with the //pool not pool, condor universe Job firstJob = (Job)jobs.get(0); AggregatedJob mergedJob = new AggregatedJob( jobs.size() ); Job job = null; StringBuffer sb = new StringBuffer(); sb.append( Abstract.CLUSTERED_JOB_PREFIX ); if( name != null && name.length() > 0 ){ sb.append( name ).append( "_" ); } sb.append( id ); String mergedJobName = sb.toString(); mLogger.log("Constructing clustered job " + mergedJobName, LogManager.DEBUG_MESSAGE_LEVEL); String stdIn = null; //containers for the input and output //files of fat job. Set insures no duplication //The multiple transfer ensures no duplicate transfer of //input files. So doing the set thing is redundant. //Hashset not used correctly Set ipFiles = new java.util.HashSet(); Set opFiles = new java.util.HashSet(); //enable the jobs that need to be merged //before writing out the stdin file // String gridStartPath = site.getKickstartPath(); // GridStart gridStart = mGridStartFactory.loadGridStart( firstJob, gridStartPath ); // mergedJob = gridStart.enable( mergedJob, jobs ); //inconsistency between job name and logical name for now mergedJob.setName( mergedJobName ); //fix for JIRA bug 83 //the site handle needs to be set for the aggregated job //before it is enabled. mergedJob.setSiteHandle( firstJob.getSiteHandle() ); mergedJob = enable( mergedJob, jobs ); try { BufferedWriter writer; stdIn = mergedJobName + ".in"; writer = new BufferedWriter(new FileWriter( new File(mDirectory,stdIn))); //traverse throught the jobs to determine input/output files //and merge the profiles for the jobs boolean merge = false; for( Iterator it = jobs.iterator(); it.hasNext(); ) { job = (Job) it.next(); ipFiles.addAll( job.getInputFiles() ); opFiles.addAll( job.getOutputFiles() ); //merge profiles for all jobs except the first // if( merge ) { mergedJob.mergeProfiles( job ); } //merge profiles for all jobs mergedJob.mergeProfiles( job ); merge = true; //handle stdin if( job instanceof AggregatedJob ){ //slurp in contents of it's stdin File file = new File ( mDirectory, job.getStdIn() ); BufferedReader reader = new BufferedReader( new FileReader( file ) ); String line; while( (line = reader.readLine()) != null ){ writer.write( line ); writer.write( "\n" ); } reader.close(); //delete the previous stdin file file.delete(); } else{ //write out the argument string to the //stdin file for the fat job writer.write( job.condorVariables.get("executable") + " " + job.condorVariables.get("arguments") + "\n"); } } //closing the handle to the writer writer.close(); } catch(IOException e){ mLogger.log("While writing the stdIn file " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( "While writing the stdIn file " + stdIn, e ); } mergedJob.setTransformation( Abstract.TRANSFORMATION_NAMESPACE, mergeLFN, Abstract.TRANSFORMATION_VERSION ); mergedJob.setDerivation( Abstract.DERIVATION_NAMESPACE, mergeLFN, Abstract.DERIVATION_VERSION); mergedJob.setLogicalID( id ); mergedJob.setUniverse( firstJob.getUniverse() ); mergedJob.setJobManager( firstJob.getJobManager() ); mergedJob.setJobType( Job.COMPUTE_JOB ); //the compute job of the VDS supernode is this job itself mergedJob.setVDSSuperNode( mergedJobName ); //the executable that fat job refers to is collapser TransformationCatalogEntry entry = this.getTCEntry(mergedJob); mergedJob.setRemoteExecutable( entry.getPhysicalTransformation() ); //overriding the input files, output files, id mergedJob.setInputFiles( ipFiles ); mergedJob.setOutputFiles( opFiles ); //stdin file is the file containing the arguments //for the jobs being collapsed mergedJob.setStdIn( stdIn ); //explicitly set stdout to null overriding any stdout //that might have been inherited in the clone operation. //FIX for bug 142 http://bugzilla.globus.org/vds/show_bug.cgi?id=142 mergedJob.setStdOut( "" ); mergedJob.setStdErr( "" ); //set the arguments for the clustered job mergedJob.setArguments( this.aggregatedJobArguments( mergedJob ) ); //get hold of one of the jobs and suck init's globus namespace //info into the the map. // Not needed, as the clone method would have taken care of it. // Karan Sept 09, 2004 //entry = getTCEntry(job); //mergedJob.globusRSL.checkKeyInNS(entry.getProfiles(Profile.GLOBUS)); // //also put in jobType as mpi //mergedJob.globusRSL.checkKeyinNS("jobtype","mpi"); //the profile information from the pool catalog does not need to be //assimilated into the job. As the collapsed job is run on the //same pool as the job is run // mergedJob.updateProfiles(mPoolHandle.getPoolProfile(mergedJob.executionPool)); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. mergedJob.updateProfiles( entry ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. mergedJob.updateProfiles( mProps ); return mergedJob; } */ /** * Helper method to get an entry from the transformation catalog for an * installed executable. It does the traversal from the list of entries * to return a single TransformationCatalogEntry object, and dies with * an appropriate error message if the object is not found. * The pool and the name are retrieved from job object. * * @param job the job whose corresponding TransformationCatalogEntry you want. * * @return the TransformationCatalogEntry corresponding to the entry in the * TC. */ protected TransformationCatalogEntry getTCEntry(Job job){ List tcentries = null; TransformationCatalogEntry entry = null; try { tcentries = mTCHandle.lookup(job.namespace, job.logicalName, job.version, job.executionPool, TCType.INSTALLED); } catch (Exception e) { mLogger.log( "Unable to retrieve entry from TC for transformation " + job.getCompleteTCName() + " " + e.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL ); } entry = ( tcentries == null ) ? this.defaultTCEntry( this.getClusterExecutableLFN(), this.getClusterExecutableBasename(), job.getSiteHandle() ): //try using a default one (TransformationCatalogEntry) tcentries.get(0); if( entry == null ){ //NOW THROWN AN EXCEPTION //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append("Could not find entry in tc for lfn "). append( job.getCompleteTCName() ). append(" at site ").append( job.getSiteHandle() ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } return entry; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param name the logical name for the clustering transformation. * @param executableBasename the basename for the executable in the bin directory * of a Pegasus installation * @param site the site for which the default entry is required. * * * @return the default entry. */ private TransformationCatalogEntry defaultTCEntry( String name, String executableBasename, String site ){ TransformationCatalogEntry defaultTCEntry = null; //check if PEGASUS_HOME is set String home = mSiteStore.getPegasusHome( site ); //if PEGASUS_HOME is not set, use VDS_HOME home = ( home == null )? mSiteStore.getVDSHome( site ): home; mLogger.log( "Creating a default TC entry for " + Abstract.getCompleteTranformationName( name ) + " at site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); //if home is still null if ( home == null ){ //cannot create default TC mLogger.log( "Unable to create a default entry for " + Abstract.getCompleteTranformationName( name ), LogManager.DEBUG_MESSAGE_LEVEL ); //set the flag back to true return defaultTCEntry; } //remove trailing / if specified home = ( home.charAt( home.length() - 1 ) == File.separatorChar )? home.substring( 0, home.length() - 1 ): home; //construct the path to it StringBuffer path = new StringBuffer(); path.append( home ).append( File.separator ). append( "bin" ).append( File.separator ). append( executableBasename ); defaultTCEntry = new TransformationCatalogEntry( Abstract.TRANSFORMATION_NAMESPACE, name, Abstract.TRANSFORMATION_VERSION ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.setSysInfo( this.mSiteStore.getSysInfo( site ) ); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.DEBUG_MESSAGE_LEVEL ); } return defaultTCEntry; } /** * Determines whether there is NOT an entry in the transformation catalog * for a particular transformation on a particular site. * * @param namespace the logical namespace of the transformation. * @param name the logical name of the transformation. * @param version the version of the transformation. * @param executableBasename basename of the executable that does the clustering. * @param site the site at which existence check is required. * * @return boolean true if an entry does not exists, false otherwise. */ protected boolean entryNotInTC(String namespace, String name, String version, String executableBasename, String site){ //check on for pfn for existence. gmehta says lesser queries //underneath List l = null; try{ l = mTCHandle.lookupNoProfiles(namespace, name, version, site, TCType.INSTALLED); } catch (Exception e) { mLogger.log( "Unable to retrieve entry from TC " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } //a double negative return ( l == null || l.isEmpty() ) ? (( this.defaultTCEntry( name, executableBasename, site ) ) == null ) ://construct a default tc entry false ; } /** * Sets the directory where the stdin files are to be generated. * * @param directory the path to the directory to which it needs to be set. */ protected void setDirectory(String directory){ mDirectory = (directory == null)? //user did not specify a submit file dir //use the default i.e current directory ".": //user specified directory picked up directory; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/aggregator/JobAggregatorFactory.java0000644000175000017500000001233711757531137032013 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster.aggregator; import edu.isi.pegasus.planner.cluster.JobAggregator; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.util.DynamicLoader; /** * A factory class to load the appropriate JobAggregator implementations while * clustering jobs. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class JobAggregatorFactory { /** * Package to prefix "just" class names with. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.cluster.aggregator"; /** * The name of the class in this package, that corresponds to seqexec. * This is required to load the correct class, even though the user * specifyies a class that matches on ignoring case, but not directly. */ public static final String SEQ_EXEC_CLASS = "SeqExec"; /** * The name of the class in this package, that corresponds to mpiexec. * This is required to load the correct class, even though the user * specifyies a class that matches on ignoring case, but not directly. */ public static final String MPI_EXEC_CLASS = "MPIExec"; /** * Loads the implementing class corresponding to the mode specified by the user * at runtime in the properties file. The properties object passed should not * be null. * * @param dag the workflow that is being clustered. * @param bag the bag of objects that is useful for initialization. * * @return the instance of the class implementing this interface. * * @throws JobAggregatorFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static JobAggregator loadInstance( ADag dag, PegasusBag bag ) { PegasusProperties properties = bag.getPegasusProperties(); //sanity check if( properties == null){ throw new RuntimeException("Invalid properties passed"); } return loadInstance( properties.getJobAggregator(), dag, bag ); } /** * Loads the implementing class corresponding to the class passed. * * @param className the name of the class that implements the mode. It is the * name of the class, not the complete name with package. That * is added by itself. * @param dag the workflow that is being clustered. * @param bag the bag of objects that is useful for initialization. * * @return the instance of the class implementing this interface. * * @throws JobAggregatorFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static JobAggregator loadInstance( String className, ADag dag, PegasusBag bag ) { //sanity check if( bag.getPegasusProperties() == null){ throw new RuntimeException("Invalid properties passed"); } if(className == null){ throw new RuntimeException("Invalid class specified to load"); } JobAggregator ja = null; try{ //ensure that correct class is picked up in case //of mpiexec and seqexec if(className.equalsIgnoreCase(MPI_EXEC_CLASS)){ className = MPI_EXEC_CLASS; } else if(className.equalsIgnoreCase(SEQ_EXEC_CLASS)){ className = SEQ_EXEC_CLASS; } //prepend the package name if required className = (className.indexOf('.') == -1)? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className: //load directly className; //try loading the class dynamically DynamicLoader dl = new DynamicLoader( className); Object argList[] = new Object[0]; ja = (JobAggregator) dl.instantiate(argList); ja.initialize( dag, bag ); } catch ( Exception e ) { throw new JobAggregatorFactoryException("Instantiating JobAggregator ", className, e); } return ja; } } ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/aggregator/JobAggregatorInstanceFactory.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/aggregator/JobAggregatorInstanceFactory.j0000644000175000017500000001566111757531137033013 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster.aggregator; import edu.isi.pegasus.planner.cluster.JobAggregator; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.Pegasus; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import edu.isi.pegasus.planner.classes.PegasusBag; /** * A JobAggergator factory that caches up the loaded implementations. * It loads a new implementation only if it has not loaded it earlier. * However, it is different from a Singleton Factory, as the implementations * are not stored in static instances. Hence, two different instances of this * Factory can load different instances of the same implementation. * * * @author Karan Vahi * @version $Revision: 2590 $ * * @see JobAggregatorFactory */ public class JobAggregatorInstanceFactory { /** * A table that maps, Pegasus style keys to the names of the corresponding classes * implementing the CondorStyle interface. */ private static Map mImplementingClassNameTable; /** * A table that maps, Pegasus style keys to appropriate classes implementing the * JobAggregator interface */ private Map mImplementingClassTable ; /** * The handle to the properties object holding all the properties. */ protected PegasusProperties mProps; /** * ADag object containing the jobs that have been scheduled by the site * selector. */ private ADag mDAG; /** * A boolean indicating that the factory has been initialized. */ private boolean mInitialized; /** * The bag of initialization objects */ private PegasusBag mBag; /** * The default constructor. */ public JobAggregatorInstanceFactory() { mInitialized = false; mImplementingClassTable = new HashMap(3); } /** * Initializes the Factory. Loads all the implementations just once. * * @param dag the workflow that is being clustered. * @param bag the bag of initialization objects. * * @throws JobAggregatorFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public void initialize( ADag dag, PegasusBag bag ) throws JobAggregatorFactoryException{ mBag = bag; mProps = bag.getPegasusProperties(); mDAG = dag; //load all the implementations that correspond to the Pegasus style keys for( Iterator it = this.implementingClassNameTable().entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); String aggregator = (String)entry.getKey(); String className = (String)entry.getValue(); //load via reflection. not required in this case though put( aggregator, JobAggregatorFactory.loadInstance( className, mDAG, mBag )); } //we have successfully loaded all implementations mInitialized = true; } /** * Returns the appropriate handle to the JobAggregator that is to be used * for a particular type of job. Aggregators for mpiexec and seqexec are * already loaded in the constructor, and just the reference is returned. * For any other aggregator it is dynamically loaded. * * @param job the job corresponding to which the aggregator is to be * loaded. * * @return the appropriate JobAggregator * * @throws JobAggregatorFactoryException that nests any error that * might occur during the instantiation * */ public JobAggregator loadInstance( Job job ) throws JobAggregatorFactoryException{ //sanity checks first if( !mInitialized ){ throw new JobAggregatorFactoryException( "JobAggregatorFactory needs to be initialized first before using" ); } Object obj; String shortName = ((obj =job.vdsNS.get(Pegasus.COLLAPSER_KEY))==null)? //pick the one from the properties mProps.getJobAggregator(): (String)obj; //now look up the job aggregator Object aggregator = this.get( shortName.toLowerCase() ); if ( aggregator == null ) { //load via reflection aggregator = JobAggregatorFactory.loadInstance( shortName, mDAG, mBag ); //throw exception if still null if (aggregator == null ){ throw new JobAggregatorFactoryException( "Unsupported aggregator " + shortName); } //register in cache this.put( shortName, aggregator ); } return (JobAggregator)aggregator; } /** * Returns the implementation from the implementing class table. * * @param style the aggregator style * * @return implementation the class implementing that style, else null */ private Object get( String style ){ return mImplementingClassTable.get( style); } /** * Inserts an entry into the implementing class table. * * @param style the aggregator style * @param implementation the class implementing that aggregator. */ private void put( String style, Object implementation){ mImplementingClassTable.put( style.toLowerCase(), implementation ); } /** * Returns a table that maps, the Pegasus style keys to the names of implementing * classes. * * @return a Map indexed by Pegasus styles, and values as names of implementing * classes. */ private static Map implementingClassNameTable(){ if( mImplementingClassNameTable == null ){ mImplementingClassNameTable = new HashMap(3); mImplementingClassNameTable.put( JobAggregatorFactory.SEQ_EXEC_CLASS.toLowerCase(), JobAggregatorFactory.SEQ_EXEC_CLASS); mImplementingClassNameTable.put( JobAggregatorFactory.MPI_EXEC_CLASS.toLowerCase(), JobAggregatorFactory.MPI_EXEC_CLASS); } return mImplementingClassNameTable; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/aggregator/SeqExec.java0000644000175000017500000002045311757531137027301 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster.aggregator; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.code.gridstart.PegasusExitCode; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.classes.PegasusBag; /** * This class aggregates the smaller jobs in a manner such that * they are launched at remote end, sequentially on a single node using * seqexec. The executable seqexec is a Pegasus tool distributed in the Pegasus worker * package, and can be usually found at $PEGASUS_HOME/bin/seqexec. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4676 $ */ public class SeqExec extends Abstract { /** * The logical name of the transformation that is able to run multiple * jobs sequentially. */ public static final String COLLAPSE_LOGICAL_NAME = "seqexec"; /** * The basename of the pegasus cluster executable. */ public static final String EXECUTABLE_BASENAME = "pegasus-cluster"; /** * The suffix to be applied to seqexec progress report file. */ public static final String SEQEXEC_PROGRESS_REPORT_SUFFIX = ".prg"; /** * Flag indicating whether a global log file or per job file. */ private boolean mGlobalLog; /** * Flag indicating whether to fail on first hard error or not. */ private boolean mFailOnFirstError; /** * Flag to indicate whether to log progress or not. */ private boolean mLogProgress; /** * The default constructor. */ public SeqExec(){ super(); } /** *Initializes the JobAggregator impelementation * * @param dag the workflow that is being clustered. * @param bag the bag of objects that is useful for initialization. * */ public void initialize( ADag dag , PegasusBag bag ){ super.initialize( dag, bag ); mGlobalLog = mProps.logJobAggregatorProgressToGlobal(); mLogProgress = mProps.logJobAggregatorProgress(); //set abort of first job failure this.setAbortOnFirstJobFailure( mProps.abortOnFirstJobFailure() ); } /** * Enables the abstract clustered job for execution and converts it to it's * executable form. Also associates the post script that should be invoked * for the AggregatedJob * * @param job the abstract clustered job */ public void makeAbstractAggregatedJobConcrete( AggregatedJob job ){ super.makeAbstractAggregatedJobConcrete(job); Job firstJob = (Job)job.getConstituentJob( 0 ); StringBuffer message = new StringBuffer(); message.append( " POSTScript for merged job " ). append( job.getName() ).append( " " ); //should we tinker with the postscript for this job if( job.dagmanVariables.containsKey( Dagman.POST_SCRIPT_KEY ) ){ //no merged job has been set to have a specific post script //no tinkering } else{ //we need to tinker //gridstart is always populated String gridstart = (String) firstJob.vdsNS.get(Pegasus.GRIDSTART_KEY); if (gridstart.equalsIgnoreCase( GridStartFactory. GRIDSTART_SHORT_NAMES[ GridStartFactory.KICKSTART_INDEX]) || gridstart.equalsIgnoreCase( GridStartFactory. GRIDSTART_SHORT_NAMES[ GridStartFactory.SEQEXEC_INDEX]) ) { //ensure $PEGASUS_HOME/bin/exitpost is invoked //as the baby jobs are being invoked by kickstart job.dagmanVariables.construct( Dagman.POST_SCRIPT_KEY, PegasusExitCode.SHORT_NAME ); } } message.append( job.dagmanVariables.get( Dagman.POST_SCRIPT_KEY ) ); mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); return ; } /** * Returns the logical name of the transformation that is used to * collapse the jobs. * * @return the the logical name of the collapser executable. * @see #COLLAPSE_LOGICAL_NAME */ public String getClusterExecutableLFN(){ return COLLAPSE_LOGICAL_NAME; } /** * Returns the executable basename of the clustering executable used. * * @return the executable basename. * @see #EXECUTABLE_BASENAME */ public String getClusterExecutableBasename(){ return SeqExec.EXECUTABLE_BASENAME; } /** * Determines whether there is NOT an entry in the transformation catalog * for the job aggregator executable on a particular site. * * @param site the site at which existence check is required. * * @return boolean true if an entry does not exists, false otherwise. */ public boolean entryNotInTC(String site) { return this.entryNotInTC( SeqExec.TRANSFORMATION_NAMESPACE, SeqExec.COLLAPSE_LOGICAL_NAME, SeqExec.TRANSFORMATION_VERSION, this.getClusterExecutableBasename(), site); } /** * Returns the arguments with which the AggregatedJob * needs to be invoked with. * * @param job the AggregatedJob for which the arguments have * to be constructed. * * @return argument string */ public String aggregatedJobArguments( AggregatedJob job ){ StringBuffer arguments = new StringBuffer(); //do we need to fail hard on first error if( this.abortOnFristJobFailure()){ arguments.append( " -f " ); } //track the progress of the seqexec job //if specified in properties if( mLogProgress ){ arguments.append( " -R ").append( logFile(job) ); } return arguments.toString(); } /** * Setter method to indicate , failure on first consitutent job should * result in the abort of the whole aggregated job. Ignores any value * passed, as MPIExec does not handle it for time being. * * @param fail indicates whether to abort or not . */ public void setAbortOnFirstJobFailure( boolean fail){ mFailOnFirstError = fail; } /** * Returns a boolean indicating whether to fail the aggregated job on * detecting the first failure during execution of constituent jobs. * * @return boolean indicating whether to fail or not. */ public boolean abortOnFristJobFailure(){ return mFailOnFirstError; } /** * Returns the name of the log file to used on the remote site, for the * seqexec job. Depending upon the property settings, either assigns a * common * * * @param job the AggregatedJob * * @return the path to the log file. */ protected String logFile( AggregatedJob job ){ StringBuffer sb = new StringBuffer( 32 ); if ( mGlobalLog ){ //the basename of the log file is derived from the dag name sb.append( this.mClusteredADag.dagInfo.getLabel() ); } else{ //per seqexec job name sb.append( job.getName() ); } sb.append( this.SEQEXEC_PROGRESS_REPORT_SUFFIX); return sb.toString(); } } ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/aggregator/JobAggregatorFactoryException.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/aggregator/JobAggregatorFactoryException.0000644000175000017500000000656211757531137033033 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster.aggregator; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating JobAggregator implementations. * * @author Karan Vahi * @version $Revision: 2553 $ */ public class JobAggregatorFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Job Aggregator"; /** * Constructs a JobAggregatorFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public JobAggregatorFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a JobAggregatorFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public JobAggregatorFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a JobAggregatorFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public JobAggregatorFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a JobAggregatorFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public JobAggregatorFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/aggregator/MPIExec.java0000644000175000017500000001726611757531137027206 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster.aggregator; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.PegasusBag; /** * This class aggregates the smaller jobs in a manner such that * they are launched at remote end, by mpiexec on n nodes where n is the nodecount * associated with the aggregated job that is being lauched by mpiexec. * The executable mpiexec is a Pegasus tool distributed in the Pegasus worker package, and * can be usually found at $PEGASUS_HOME/bin/mpiexec. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4546 $ */ public class MPIExec extends Abstract { /** * The logical name of the transformation that is able to run multiple * jobs via mpi. */ public static final String COLLAPSE_LOGICAL_NAME = "mpiexec"; /** * The basename of the executable that is able to run multiple * jobs via mpi. */ public static String EXECUTABLE_BASENAME = "mpiexec"; /** * The default constructor. */ public MPIExec(){ super(); } /** *Initializes the JobAggregator impelementation * * @param dag the workflow that is being clustered. * @param bag the bag of objects that is useful for initialization. * * */ public void initialize( ADag dag , PegasusBag bag ){ super.initialize(dag, bag); } /** * Enables the abstract clustered job for execution and converts it to it's * executable form. Also associates the post script that should be invoked * for the AggregatedJob * * @param job the abstract clustered job */ public void makeAbstractAggregatedJobConcrete( AggregatedJob job ){ super.makeAbstractAggregatedJobConcrete(job); //also put in jobType as mpi job.globusRSL.checkKeyInNS("jobtype","mpi"); return; } /** * Constructs a new aggregated job that contains all the jobs passed to it. * The new aggregated job, appears as a single job in the workflow and * replaces the jobs it contains in the workflow. *

* The aggregated job is executed at a site, using mpiexec that * executes each of the smaller jobs in the aggregated job on n number of * nodes where n is the nodecount associated with the job. * All the sub jobs are in turn launched via kickstart if kickstart is * installed at the site where the job resides. * * @param jobs the list of Job objects that need to be * collapsed. All the jobs being collapsed should be scheduled * at the same pool, to maintain correct semantics. * @param name the logical name of the jobs in the list passed to this * function. * @param id the id that is given to the new job. * * * @return the AggregatedJob object corresponding to the aggregated * job containing the jobs passed as List in the input, * null if the list of jobs is empty */ /* public AggregatedJob construct(List jobs,String name, String id) { AggregatedJob mergedJob = super.construct(jobs,name,id); //also put in jobType as mpi mergedJob.globusRSL.checkKeyInNS("jobtype","mpi"); //ensure that AggregatedJob is invoked via NoGridStart mergedJob.vdsNS.construct( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[ GridStartFactory.NO_GRIDSTART_INDEX] ); return mergedJob; } */ /** * Enables the constitutent jobs that make up a aggregated job. Makes sure * that they all are enabled via no kickstart * * @param mergedJob the clusteredJob * @param jobs the constitutent jobs * * @return AggregatedJob */ /* protected AggregatedJob enable( AggregatedJob mergedJob, List jobs ){ //we cannot invoke any of clustered jobs also via kickstart //as the output will be clobbered Job firstJob = (Job)jobs.get(0); SiteCatalogEntry site = mSiteStore.lookup( firstJob.getSiteHandle() ); firstJob.vdsNS.construct( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[ GridStartFactory.NO_GRIDSTART_INDEX] ); //NEEDS TO BE FIXED AS CURRENTLY NO PLACEHOLDER FOR Kickstart //PATH IN THE NEW SITE CATALOG Karan July 10, 2008 GridStart gridStart = mGridStartFactory.loadGridStart( firstJob, site.getKickstartPath() ); return gridStart.enable( mergedJob, jobs ); } */ /** * Returns the logical name of the transformation that is used to * collapse the jobs. * * @return the the logical name of the collapser executable. * @see #COLLAPSE_LOGICAL_NAME */ public String getClusterExecutableLFN(){ return COLLAPSE_LOGICAL_NAME; } /** * Returns the executable basename of the clustering executable used. * * @return the executable basename. * @see #EXECUTABLE_BASENAME */ public String getClusterExecutableBasename(){ return MPIExec.EXECUTABLE_BASENAME; } /** * Determines whether there is NOT an entry in the transformation catalog * for the job aggregator executable on a particular site. * * @param site the site at which existence check is required. * * @return boolean true if an entry does not exists, false otherwise. */ public boolean entryNotInTC(String site) { return this.entryNotInTC( MPIExec.TRANSFORMATION_NAMESPACE, MPIExec.COLLAPSE_LOGICAL_NAME, MPIExec.TRANSFORMATION_VERSION, this.getClusterExecutableBasename(), site); } /** * Returns the arguments with which the AggregatedJob * needs to be invoked with. At present any empty argument string is * returned. * * @param job the AggregatedJob for which the arguments have * to be constructed. * * @return argument string */ public String aggregatedJobArguments( AggregatedJob job ){ return ""; } /** * Setter method to indicate , failure on first consitutent job should * result in the abort of the whole aggregated job. Ignores any value * passed, as MPIExec does not handle it for time being. * * @param fail indicates whether to abort or not . */ public void setAbortOnFirstJobFailure( boolean fail){ } /** * Returns a boolean indicating whether to fail the aggregated job on * detecting the first failure during execution of constituent jobs. * * @return boolean indicating whether to fail or not. */ public boolean abortOnFristJobFailure(){ return false; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/Vertical.java0000644000175000017500000001357711757531137025404 0ustar ryngerynge/** * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.cluster; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.partitioner.Topological; import edu.isi.pegasus.planner.partitioner.Partition; import edu.isi.pegasus.planner.partitioner.graph.Bag; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.partitioner.graph.LabelBag; import java.util.List; import java.util.Set; import java.util.HashSet; import java.util.Iterator; /** * The vertical cluster, that extends the Default clusterer and topologically * sorts the partition before clustering the jobs into aggregated jobs. * * @author Karan Vahi * @version $Revision: 2755 $ */ public class Vertical extends Abstract { /** * A short description about the partitioner. */ public static final String DESCRIPTION = "Topological based Vertical Clustering"; /** * The default constructor. */ public Vertical(){ super(); } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String description(){ return Vertical.DESCRIPTION; } /** * Returns the nodes in the partition as a List in the topologically sorted * order. * * @param p the partition whose nodes have to be ordered. * * @return an ordered List of String objects that are the ID's * of the nodes. * * @throws ClustererException in case of error. */ public List order( Partition p ) throws ClustererException{ try{ return new Topological(p).sort(); } catch( Exception e){ throw new ClustererException( "Unable to sort the partition " + p.getID(), e ); } } /** * Determine the input and output files of the job on the basis of the * order of the constituent jobs in the AggregatedJob. * The input and output files are determined on the basis of topologically * sorted order of the constituent jobs. * * @param job the AggregatedJob * * @throws ClustererException in case of error. */ public void determineInputOutputFiles( AggregatedJob job ){ //set the input files to null for time being job.inputFiles = null; job.outputFiles = null; Set inputFiles = new HashSet(); Set materializedFiles = new HashSet(); PegasusFile file; //the constituent jobs are topologically sorted //traverse through them and build up the ip and op files for( Iterator it = job.constituentJobsIterator(); it.hasNext();){ Job cjob = (Job)it.next(); //traverse through input files of constituent job for( Iterator fileIt = cjob.getInputFiles().iterator(); fileIt.hasNext();){ file = (PegasusFile)fileIt.next(); //add to input files if it has not been materializd if( !materializedFiles.contains( file ) ){ inputFiles.add( file ); } } //traverse through output files of constituent job for( Iterator fileIt = cjob.getOutputFiles().iterator(); fileIt.hasNext();){ file = (PegasusFile)fileIt.next(); //add to materialized files materializedFiles.add( file ); // //file is output only if it has to be registered or transferred // if ( !file.getTransientRegFlag() || // file.getTransferFlag() != PegasusFile.TRANSFER_NOT ){ // outputFiles.add( file ); // } } } job.setInputFiles( inputFiles ); //all the materialized files are output files for //the aggregated job. job.setOutputFiles( materializedFiles ); } /** * Returns null as for label based clustering we dont want the transformation * name to be considered for constructing the name of the clustered jobs * * @param jobs List of jobs * * @return name */ protected String getLogicalNameForJobs( List jobs ){ return null; } /** * Returns the ID for the clustered job corresponding to a partition. * * @param partition the partition. * * @return the ID of the clustered job */ protected String constructClusteredJobID( Partition partition ){ StringBuffer id = new StringBuffer(); //get the label key from the last added job GraphNode gn = partition.lastAddedNode(); Bag b = gn.getBag(); if( b instanceof LabelBag ){ LabelBag bag = ( LabelBag )b; String label = (String) bag.get( LabelBag.LABEL_KEY ); if( label == null ){ //add the partition id id.append( partition.getID() ); } else{ //add the label id.append( label ); } } else{ throw new RuntimeException( "Wrong type of bag associated with node " + gn ); } return id.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/ClustererFactory.java0000644000175000017500000002065711757531137027130 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.partitioner.Partitioner; import edu.isi.pegasus.planner.partitioner.PartitionerFactory; import edu.isi.pegasus.planner.partitioner.PartitionerFactoryException; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.common.util.DynamicLoader; import java.util.Map; import java.util.HashMap; /** * A factory class to load the appropriate Partitioner, and Clusterer Callback * for clustering. An abstract factory, as it loads the appropriate partitioner * matching a clustering technique. * * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2576 $ */ public class ClustererFactory { /** * The default package where all the implementations reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.cluster"; /** * The name of the class implementing horizontal clustering. */ public static final String HORIZONTAL_CLUSTERING_CLASS = "Horizontal"; /** * The name of the class implementing vertical clustering. */ public static final String VERTICAL_CLUSTERING_CLASS = "Vertical"; /** * The type corresponding to label based clustering. */ private static final String LABEL_CLUSTERING_TYPE = "label"; /** * The table that maps clustering technique to a partitioner. */ private static Map mPartitionerTable; /** * The table that maps a clustering technique to a clustering impelemntation. */ private static Map mClustererTable; /** * Loads the appropriate partitioner on the basis of the clustering type * specified in the options passed to the planner. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param type type of clustering to be used. * @param root the dummy root node of the graph. * @param graph the map containing all the nodes of the graph keyed by * the logical id of the nodes. * * @return the instance of the appropriate partitioner. * * @throws ClustererFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static Partitioner loadPartitioner(PegasusProperties properties, String type, GraphNode root, Map graph ) throws ClustererFactoryException{ String clusterer = type; //sanity check if( clusterer == null ){ throw new ClustererFactoryException( "No Clustering Technique Specified "); } //try to find the appropriate partitioner Object partitionerClass = partitionerTable().get( clusterer ); if ( partitionerClass == null ){ throw new ClustererFactoryException( "No matching partitioner found for clustering technique " + clusterer ); } //now load the partitioner Partitioner partitioner = null; try{ partitioner = PartitionerFactory.loadInstance( properties, root, graph, (String) partitionerClass ); } catch ( PartitionerFactoryException e ){ throw new ClustererFactoryException( " Unable to instantiate partitioner " + partitionerClass, e ); } return partitioner; } /** * Loads the appropriate clusterer on the basis of the clustering type * specified in the options passed to the planner. * * @param dag the workflow being clustered. * @param bag the bag of initialization objects. * @param type type of clustering to be used. * * @return the instance of the appropriate clusterer. * * @throws ClustererFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static Clusterer loadClusterer( ADag dag, PegasusBag bag, String type ) throws ClustererFactoryException{ //sanity check if( type == null ){ throw new ClustererFactoryException( "No Clustering Technique Specified "); } //try to find the appropriate clusterer Object clustererClass = clustererTable().get( type ); if ( clustererClass == null ){ throw new ClustererFactoryException( "No matching clusterer found for clustering technique " + type ); } //now load the clusterer Clusterer clusterer = null; String className = ( String )clustererClass; try{ //prepend the package name if required className = ( className.indexOf('.') == -1 )? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className: //load directly className; //try loading the class dynamically DynamicLoader dl = new DynamicLoader( className ); clusterer = (Clusterer) dl.instantiate( new Object[0] ); clusterer.initialize( dag, bag ); } catch ( Exception e ){ throw new ClustererFactoryException( " Unable to instantiate partitioner ", className, e ); } return clusterer; } /** * Returns a table that maps, the clustering technique to an appropriate * class implementing that clustering technique. * * @return a Map indexed by clustering styles, and values as corresponding * implementing Clustering classes. */ private static Map clustererTable(){ if( mClustererTable == null ){ mClustererTable = new HashMap(3); mClustererTable.put( HORIZONTAL_CLUSTERING_CLASS.toLowerCase(), HORIZONTAL_CLUSTERING_CLASS ); mClustererTable.put( VERTICAL_CLUSTERING_CLASS.toLowerCase(), VERTICAL_CLUSTERING_CLASS ); mClustererTable.put( LABEL_CLUSTERING_TYPE.toLowerCase(), VERTICAL_CLUSTERING_CLASS ); } return mClustererTable; } /** * Returns a table that maps, the clustering technique to an appropriate * partitioning technique. * * @return a Map indexed by clustering styles, and values as corresponding * Partitioners. */ private static Map partitionerTable(){ if( mPartitionerTable == null ){ mPartitionerTable = new HashMap(3); mPartitionerTable.put( HORIZONTAL_CLUSTERING_CLASS.toLowerCase(), PartitionerFactory.LEVEL_BASED_PARTITIONING_CLASS ); mPartitionerTable.put( VERTICAL_CLUSTERING_CLASS.toLowerCase(), PartitionerFactory.LABEL_BASED_PARTITIONING_CLASS ); mPartitionerTable.put( LABEL_CLUSTERING_TYPE.toLowerCase(), PartitionerFactory.LABEL_BASED_PARTITIONING_CLASS ); } return mPartitionerTable; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/cluster/JobAggregator.java0000644000175000017500000001157511757531137026344 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.cluster; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.AggregatedJob; import java.util.List; /** * The interface that dictates how the jobs are clumped together into one single * larger job. The interface does not dictate how the graph structure is * to be modified as a result of the clumping. That is handled outside of the * implementing class in NodeCollapser. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4546 $ */ public interface JobAggregator { /** * The version number associated with this API of Job Aggregator. */ public static final String VERSION = "1.5"; /** *Initializes the JobAggregator impelementation * * @param dag the workflow that is being clustered. * @param bag the bag of objects that is useful for initialization. * */ public void initialize( ADag dag , PegasusBag bag ); /** * Constructs a new aggregated job that contains all the jobs passed to it. * The new aggregated job, appears as a single job in the workflow and * replaces the jobs it contains in the workflow. * * @param jobs the list of SubInfo objects that need to be * collapsed. All the jobs being collapsed should be scheduled * at the same pool, to maintain correct semantics. * @param name the logical name of the jobs in the list passed to this * function. * @param id the id that is given to the new job. * * @return the SubInfo object corresponding to the aggregated * job containing the jobs passed as List in the input, * null if the list of jobs is empty */ // public AggregatedJob construct(List jobs,String name,String id); /** * Constructs an abstract aggregated job that has a handle to the appropriate * JobAggregator that will be used to aggregate the jobs. * * @param jobs the list of SubInfo objects that need to be * collapsed. All the jobs being collapsed should be scheduled * at the same pool, to maintain correct semantics. * @param name the logical name of the jobs in the list passed to this * function. * @param id the id that is given to the new job. * * @return the SubInfo object corresponding to the aggregated * job containing the jobs passed as List in the input, * null if the list of jobs is empty */ public AggregatedJob constructAbstractAggregatedJob(List jobs,String name,String id); /** * Enables the abstract clustered job for execution and converts it to it's * executable form * * @param job the abstract clustered job */ public void makeAbstractAggregatedJobConcrete( AggregatedJob job ); /** * Setter method to indicate , failure on first consitutent job should * result in the abort of the whole aggregated job. * * @param fail indicates whether to abort or not . */ public void setAbortOnFirstJobFailure( boolean fail); /** * Returns a boolean indicating whether to fail the aggregated job on * detecting the first failure during execution of constituent jobs. * * @return boolean indicating whether to fail or not. */ public boolean abortOnFristJobFailure(); /** * Determines whether there is NOT an entry in the transformation catalog * for the job aggregator executable on a particular site. * * @param site the site at which existence check is required. * * @return boolean true if an entry does not exists, false otherwise. */ public boolean entryNotInTC(String site); /** * Returns the logical name of the transformation that is used to * collapse the jobs. * * @return the the logical name of the collapser executable. */ public String getClusterExecutableLFN(); /** * Returns the executable basename of the clustering executable used. * * @return the executable basename. */ public String getClusterExecutableBasename(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/0000755000175000017500000000000011757531667022737 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/PCRelation.java0000644000175000017500000001535611757531137025604 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import java.io.Writer; import java.io.StringWriter; import java.io.IOException; /** * Captures the parent child relationship between the jobs in the ADAG. * * @author Karan Vahi * @version $Revision: 4003 $ */ public class PCRelation extends Data /*implements Comparable*/{ /** * the parent making up the * parent child relationship pair * in a dag */ public String parent; /** * the child making up the * parent child relationship pair * in a dag */ public String child; /** * this is used for collapsing the dag * during the reduction algorithm * on the basis of the results returned * from the Replica Catalog. */ public boolean isDeleted; /** * The abstract id for the parent node. required for stampede events. */ private String mAbstractParentID; /** * The abstract id for the child node. required for stampede events. */ private String mAbstractChildID; /** * the default constructor */ public PCRelation(){ parent = new String(); child = new String(); mAbstractParentID = new String(); mAbstractChildID = new String(); isDeleted = false; } /** * the overloaded constructor */ public PCRelation(String parentName,String childName,boolean deleted){ this(); parent = new String(parentName); child = new String(childName); isDeleted = deleted; } /** * the overloaded constructor */ public PCRelation(String parentName,String childName){ this(); parent = new String(parentName); child = new String(childName); isDeleted = false; } /** * Returns the parent in the edge. * * @return parent */ public String getParent(){ return parent; } /** * Returns the child in the edge. * * @return child */ public String getChild(){ return child; } /** * Sets the abstract parent id associated with the edge. * * @param id the abstract id */ public void setAbstractParentID( String id){ mAbstractParentID = id; } /** * Sets the abstract child id associated with the edge. * * @param id the abstract id */ public void setAbstractChildID( String id){ mAbstractChildID = id; } /** * Returns the abstract parent id associated with the edge. * * @return parent */ public String getAbstractParentID(){ return mAbstractParentID; } /** * Returns the abstract child id associated with the edge. * * @return child */ public String getAbstractChildID(){ return mAbstractChildID; } /** * returns a new copy of the * Object */ public Object clone(){ PCRelation pc = new PCRelation(); pc.parent = this.parent; pc.child = this.child; pc.isDeleted = this.isDeleted; pc.mAbstractChildID = this.mAbstractChildID; pc.mAbstractParentID = this.mAbstractParentID; return pc; } /** * Checks if an object is similar to the one referred to by this class. * We compare the primary key to determine if it is the same or not. * * @return true if the primary key (parent,child) match. * else false. */ public boolean equals(Object o){ PCRelation rel = (PCRelation)o; return (rel.parent.equals(this.parent) && rel.child.equals(this.child))? true: false; } public int compareTo(Object o){ return (this.equals(o))?0: 1; } /** * Returns the textual description. * * @return textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( "{" ).append( parent ).append( " [" ).append( this.getAbstractParentID()). append( "]" ).append( " -> " ).append( child ).append( " [" ).append( this.getAbstractChildID()). append( "]" ).append( "," ).append( this.isDeleted ).append( "}" ); return sb.toString(); } /** * Returns the DOT description of the object. This is used for visualizing * the workflow. * * @return String containing the Partition object in XML. * * @exception IOException if something fishy happens to the stream. */ public String toDOT() throws IOException{ Writer writer = new StringWriter(32); toDOT( writer, "" ); return writer.toString(); } /** * Returns the DOT description of the object. This is used for visualizing * the workflow. * * @param stream is a stream opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty * string. The parameter is used internally for the recursive * traversal. * * * @exception IOException if something fishy happens to the stream. */ public void toDOT( Writer stream, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); //write out the edge stream.write( indent ); stream.write( "\"" ); stream.write( getParent()); stream.write( "\"" ); stream.write( " -> "); stream.write( "\"" ); stream.write( getChild() ); stream.write( "\"" ); stream.write( newLine ); stream.flush(); } /** * Sets the child of the edge. * * @param childNode the child node */ public void setChild(String childNode) { this.child = childNode; } /** * Sets the parent of the edge. * * @param parentNode the parent node */ public void setParent(String parentNode) { this.parent = parentNode; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/ReplicaLocation.java0000644000175000017500000002111411757531137026641 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.planner.dax.PFN; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ArrayList; /** * A Data Class that associates a LFN with the PFN's. Attributes associated * with the LFN go here. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2630 $ * * @see org.griphyn.common.catalog.ReplicaCatalogEntry */ public class ReplicaLocation extends Data implements Cloneable{ /** * The site name that is associated in the case the resource handle is not * specified with the PFN. */ public static final String UNDEFINED_SITE_NAME = "UNDEFINED_SITE"; /** * The LFN associated with the entry. */ private String mLFN; /** * A list of ReplicaCatalogEntry objects containing the PFN's * and associated attributes. */ private List mPFNList; /** * Default constructor. */ public ReplicaLocation(){ mLFN = new String(); mPFNList = new ArrayList(); } /** * Overloaded constructor. * Intializes the member variables to the values passed. * * @param lfn the logical filename. * @param pfns the list of ReplicaCatalogEntry objects. */ public ReplicaLocation( String lfn , List pfns ){ mLFN = lfn; mPFNList = pfns; //sanitize pfns. add a default resource handle if not specified sanitize( mPFNList ); } /** * Overloaded constructor. * Intializes the member variables to the values passed. * * @param lfn the logical filename. * @param pfns the list of ReplicaCatalogEntry objects. */ public ReplicaLocation( String lfn , Collection pfns ){ mLFN = lfn; //create a separate list only if required mPFNList = ( pfns instanceof List )? (List)pfns: //create a new list from the collection new ArrayList( pfns ) ; //sanitize pfns. add a default resource handle if not specified sanitize( mPFNList ); } /** * Adds a PFN specified in the DAX to the object * * @param pfn the PFN */ public void addPFN(PFN pfn) { ReplicaCatalogEntry rce = new ReplicaCatalogEntry( ); rce.setPFN( pfn.getURL() ); rce.setResourceHandle( pfn.getSite() ); this.mPFNList.add( rce ); } /** * Add a PFN and it's attributes. Any existing * mapping with the same PFN will be replaced, including all its * attributes. * * @param tuple the ReplicaCatalogEntry object containing the * PFN and the attributes. */ public void addPFN( ReplicaCatalogEntry tuple ){ boolean seen = false; String pfn = tuple.getPFN(); sanitize( tuple ); //traverse through the existing PFN's to check for the //same pfn for ( Iterator i= this.pfnIterator(); i.hasNext() && ! seen; ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) i.next(); if ( (seen = pfn.equals(rce.getPFN())) ) { try { i.remove(); } catch ( UnsupportedOperationException uoe ) { //ignore for time being } } } this.mPFNList.add( tuple ); } /** * Add a PFN and it's attributes. * * @param tuples the List object of ReplicaCatalogEntry * objects, each containing the PFN and the attributes. */ protected void addPFNs( List tuples ){ for( Iterator it = tuples.iterator(); it.hasNext(); ){ addPFN( (ReplicaCatalogEntry)it.next() ); } } /** * Sets the LFN. * * @param lfn the lfn. */ public void setLFN( String lfn ){ this.mLFN = lfn; } /** * Returns the associated LFN. * * @return lfn */ public String getLFN( ){ return this.mLFN; } /** * Return a PFN as a ReplicaCatalogEntry * * @param index the pfn location. * * @return the element at the specified position in this list. * * @throws IndexOutOfBoundsException - if the index is out of range (index < 0 || index >= size()). */ public ReplicaCatalogEntry getPFN( int index ){ return (ReplicaCatalogEntry)this.mPFNList.get( index ); } /** * Returns the list of pfn's as ReplicaCatalogEntry objects. * * @return List */ public List getPFNList( ){ return this.mPFNList; } /** * Returns an iterator to the list of ReplicaCatalogEntry * objects. * * @return Iterator. */ public Iterator pfnIterator(){ return this.mPFNList.iterator(); } /** * Returns the number of pfn's associated with the lfn. * * @return int */ public int getPFNCount(){ return this.mPFNList.size(); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ ReplicaLocation rc; try{ rc = ( ReplicaLocation ) super.clone(); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } rc.mPFNList = new ArrayList(); rc.setLFN( this.mLFN ); //add all the RCE's for( Iterator it = this.pfnIterator(); it.hasNext(); ){ //creating a shallow clone here. rc.addPFN( ( ReplicaCatalogEntry )it.next() ); } //clone is not implemented fully. //throw new RuntimeException( "Clone not implemented for " + this.getClass().getName() ); return rc; } /** * Merges the ReplicaLocation object to the existing one, * only if the logical filenames match. * * @param location is another ReplicaLocations to merge with. * * @return true if a merge was successful, false if the LFNs did not * match. */ public boolean merge(ReplicaLocation location){ String lfn1 = this.getLFN(); String lfn2 = (location == null)? null : location.getLFN(); boolean result = (lfn1 == null && lfn2 == null || lfn1 != null && lfn2 != null && lfn1.equals(lfn2)); // only merge if PFN match if (result) { this.addPFNs( location.getPFNList() ); } return result; } /** * Returns the textual description of the data class. * * @return the textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( mLFN ).append( " -> {"); for( Iterator it = this.pfnIterator(); it.hasNext(); ){ sb.append( it.next() ); sb.append( "," ); } sb.append( "}" ); return sb.toString(); } /** * Sanitizes a tuple list . Sets the resource handle to a default value if not * specified. * * @param tuples the tuple to be sanitized. */ private void sanitize( List tuples ){ for( Iterator it = tuples.iterator(); it.hasNext(); ){ this.sanitize( (ReplicaCatalogEntry)it.next() ); } } /** * Sanitizes a tuple . Sets the resource handle to a default value if not * specified. * * @param tuple the tuple to be sanitized. */ private void sanitize( ReplicaCatalogEntry tuple ){ //sanity check if( tuple.getResourceHandle() == null ){ tuple.setResourceHandle( this.UNDEFINED_SITE_NAME ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/WorkflowMetrics.java0000644000175000017500000002424211757531137026737 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; /** * A Workflow metrics class that stores the metrics about the workflow. * * @author Karan Vahi * @version $Revision: 4720 $ */ public class WorkflowMetrics extends Data { /** * The total number of jobs in the executable workflow. */ private int mNumTotalJobs; /** * The number of compute jobs. */ private int mNumComputeJobs; /** * The number of clustered compute jobs. */ private int mNumClusteredJobs; /** * The number of stage in transfer jobs. */ private int mNumSITxJobs; /** * The number of stage-out transfer jobs. */ private int mNumSOTxJobs; /** * The number of inter-site transfer jobs. */ private int mNumInterTxJobs; /** * The number of registration jobs. */ private int mNumRegJobs; /** * The number of cleanup jobs. */ private int mNumCleanupJobs; /** * The number of create dir jobs. */ private int mNumCreateDirJobs; /** * The number of dax jobs in the workflow */ private int mNumDAXJobs; /** * The number of DAG jobs in the workflow */ private int mNumDAGJobs; /* * The number of chmod jobs in the workflow */ private int mNumChmodJobs; /** * The number of compute tasks in the DAX */ private int mNumComputeTasks; /** * The number of DAX tasks in the DAX */ private int mNumDAXTasks; /** * The number of DAG tasks in the DAX. */ private int mNumDAGTasks; /** * The label of the dax. */ private String mDAXLabel; /** * The default constructor. */ public WorkflowMetrics() { reset(); } /** * Resets the internal counters to zero. */ public void reset(){ mNumTotalJobs = 0; mNumComputeJobs = 0; mNumSITxJobs = 0; mNumSOTxJobs = 0; mNumInterTxJobs = 0; mNumRegJobs = 0; mNumCleanupJobs = 0; mNumCreateDirJobs = 0; mNumClusteredJobs = 0; mNumChmodJobs = 0; mNumDAXJobs = 0; mNumDAGJobs = 0; mNumComputeTasks = 0; mNumDAXTasks = 0; mNumDAGTasks = 0; } /** * Sets the DAXlabel. * * @param label the dax label */ public void setLabel( String label ){ mDAXLabel = label; } /** * Returns the DAXlabel. * * @return the dax label */ public String getLabel( ){ return mDAXLabel; } /** * Increment the metrics when on the basis of type of job. * * @param job the job being added. */ public void increment( Job job ){ //sanity check if( job == null ){ return; } //increment the total mNumTotalJobs++; //increment on basis of type of job int type = job.getJobType(); switch( type ){ //treating compute and staged compute as same case Job.COMPUTE_JOB: if( job instanceof AggregatedJob ){ mNumClusteredJobs++; }else{ mNumComputeJobs++; mNumComputeTasks++; } break; case Job.DAX_JOB: mNumDAXJobs++; mNumDAXTasks++; break; case Job.DAG_JOB: mNumDAGJobs++; mNumDAGTasks++; break; case Job.STAGE_IN_JOB: case Job.STAGE_IN_WORKER_PACKAGE_JOB: mNumSITxJobs++; break; case Job.STAGE_OUT_JOB: mNumSOTxJobs++; break; case Job.INTER_POOL_JOB: mNumInterTxJobs++; break; case Job.REPLICA_REG_JOB: mNumRegJobs++; break; case Job.CLEANUP_JOB: mNumCleanupJobs++; break; case Job.CREATE_DIR_JOB: mNumCreateDirJobs++; break; case Job.CHMOD_JOB: mNumChmodJobs++; break; default: throw new RuntimeException( "Unknown or Unassigned job " + job.getID() + " of type " + type ); } } /** * Decrement the metrics when on the basis of type of job. * Does not decrement the task related metrics. * * @param job the job being removed. */ public void decrement( Job job ){ //sanity check if( job == null ){ return; } //increment the total mNumTotalJobs--; //increment on basis of type of job int type = job.getJobType(); switch( type ){ //treating compute and staged compute as same case Job.COMPUTE_JOB: if( job instanceof AggregatedJob ){ mNumClusteredJobs--; } else{ mNumComputeJobs--; } break; case Job.DAX_JOB: mNumDAXJobs--; break; case Job.DAG_JOB: mNumDAGJobs--; break; case Job.STAGE_IN_JOB: case Job.STAGE_IN_WORKER_PACKAGE_JOB: mNumSITxJobs--; break; case Job.STAGE_OUT_JOB: mNumSOTxJobs--; break; case Job.INTER_POOL_JOB: mNumInterTxJobs--; break; case Job.REPLICA_REG_JOB: mNumRegJobs--; break; case Job.CLEANUP_JOB: mNumCleanupJobs--; break; case Job.CREATE_DIR_JOB: mNumCreateDirJobs--; break; case Job.CHMOD_JOB: mNumChmodJobs--; break; default: throw new RuntimeException( "Unknown or Unassigned job " + job.getID() + " of type " + type ); } } /** * Returns a textual description of the object. * * @return Object */ public String toString(){ StringBuffer sb = new StringBuffer(); append( sb, "dax-label", this.mDAXLabel ); //dax task related metrics append( sb, "compute-tasks.count", this.mNumComputeTasks ); append( sb, "dax-tasks.count", this.mNumDAXTasks ); append( sb, "dag-tasks.count", this.mNumDAGTasks ); append( sb, "total-tasks.count", this.mNumComputeTasks + this.mNumDAGTasks + this.mNumDAXTasks ); //job related metrics append( sb, "createdir-jobs.count", this.mNumCreateDirJobs ); append( sb, "chmod-jobs.count", this.mNumChmodJobs ); append( sb, "unclustered-compute-jobs.count", this.mNumComputeJobs ); append( sb, "clustered-compute-jobs.count", this.mNumClusteredJobs ); append( sb, "dax-jobs.count", this.mNumDAXJobs ); append( sb, "dag-jobs.count", this.mNumDAGJobs ); append( sb, "si-jobs.count", this.mNumSITxJobs ); append( sb, "so-jobs.count", this.mNumSOTxJobs ); append( sb, "inter-jobs.count", this.mNumInterTxJobs ); append( sb, "reg-jobs.count", this.mNumRegJobs ); append( sb, "cleanup-jobs.count", this.mNumCleanupJobs ); append( sb, "total-jobs.count", this.mNumTotalJobs ); return sb.toString(); } /** * Appends a key=value pair to the StringBuffer. * * @param buffer the StringBuffer that is to be appended to. * @param key the key. * @param value the value. */ protected void append( StringBuffer buffer, String key, String value ){ buffer.append( key ).append( " = " ).append( value ).append( "\n" ); } /** * Appends a key=value pair to the StringBuffer. * * @param buffer the StringBuffer that is to be appended to. * @param key the key. * @param value the value. */ protected void append( StringBuffer buffer, String key, int value ){ buffer.append( key ).append( " = " ).append( value ).append( "\n" ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ WorkflowMetrics wm; try { wm = (WorkflowMetrics)super.clone(); } catch (CloneNotSupportedException e) { //somewhere in the hierarch chain clone is not implemented throw new RuntimeException( "Clone not implemented in the base class of " + this.getClass().getName(), e); } wm.mNumCleanupJobs = this.mNumCleanupJobs; wm.mNumComputeJobs = this.mNumComputeJobs; wm.mNumInterTxJobs = this.mNumInterTxJobs; wm.mNumRegJobs = this.mNumRegJobs; wm.mNumSITxJobs = this.mNumSITxJobs; wm.mNumSOTxJobs = this.mNumSOTxJobs; wm.mNumTotalJobs = this.mNumTotalJobs; wm.mDAXLabel = this.mDAXLabel; wm.mNumCreateDirJobs = this.mNumCreateDirJobs; wm.mNumClusteredJobs = this.mNumClusteredJobs; wm.mNumChmodJobs = this.mNumChmodJobs; wm.mNumDAXJobs = this.mNumDAGJobs; wm.mNumDAGJobs = this.mNumDAGJobs; wm.mNumComputeTasks = this.mNumComputeTasks; wm.mNumDAXTasks = this.mNumDAXTasks; wm.mNumDAGTasks = this.mNumDAGTasks; return wm; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/TCMap.java0000644000175000017500000002141111757531137024535 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * This is a data class to store the TCMAP for a particular dag. * This data class is populated and maintained in the TCMapper and is queried * from the Interpool Engine and site selectors. * * The TCMAP is a hashmap which maps an lfn to a Map which contains keys as * siteids and values as List of TransformationCatalogEntry objects * * TCMAP= lfn1 ---> MAP1 * lfn2 ---> MAP2 * * * MAP1 = site1 ---> List1 * site2 ---> List2 * * List1 = TCE1 * TCE2 * TCEn * * * @author Gaurang Mehta * @version $Revision: 2572 $ */ public class TCMap { /** * The TCMap for a dag is stored in this HashMap. */ private Map mTCMap; private LogManager mLogger; /** * Default constructor. Initializes the tcmap to 10 lfns. */ public TCMap() { mLogger = LogManagerFactory.loadSingletonInstance(); mTCMap = new HashMap( 10 ); } /** * Returns a HashMap of sites as keys and a List of TransformationCatalogEntry object as values. * @param fqlfn String The fully qualified logical transformation name for which you want the map. * @return Map Returns NULL if the transformation does not exist in the map. * @see org.griphyn.common.catalog.TransformationCatalogEntry */ public Map getSiteMap( String fqlfn ) { return mTCMap.containsKey( fqlfn ) ? ( Map ) mTCMap.get( fqlfn ) : null; } /** * This method allows to associate a site map with a particular logical transformation * @param fqlfn String The transformation for which the sitemap is to be stored * @param sitemap Map The sitemap that is to be stored. It is a hashmap with key * as the siteid and value as a list of TranformationCatalogEntry objects * @return boolean * @see org.griphyn.common.catalog.TransformationCatalogEntry */ public boolean setSiteMap( String fqlfn, Map sitemap ) { mTCMap.put( fqlfn, sitemap ); return true; } /** * Returns a List of siteid's that are valid for a particular lfn. * @param fqlfn String * @return List */ public List getSiteList( String fqlfn ) { List results = null; if ( mTCMap.containsKey( fqlfn ) ) { return new ArrayList( ( ( Map ) mTCMap.get( fqlfn ) ).keySet() ); } return results; } /** * Returns a list of siteid's that are valid for a particular lfn and * among a list of input sites * @param fqlfn The logical name of the transformation * @param sites The list of siteids * @return the list of siteids which are valid. */ public List getSiteList( String fqlfn, List sites ) { List results = new ArrayList() ; if ( mTCMap.containsKey( fqlfn ) ) { for ( Iterator i = (( Map ) mTCMap.get( fqlfn )).keySet().iterator(); i.hasNext(); ) { String site = ( String ) i.next(); if ( sites.contains( site ) ) { results.add( site ); } } } return results.isEmpty() ? null : results; } /** * This method returns a list of TransformationCatalogEntry objects * for a given transformation and siteid * @param fqlfn String The fully qualified logical name of the transformation * @param siteid String The siteid for which the Entries are required * @return List returns NULL if no entries exist. */ public List getSiteTCEntries( String fqlfn, String siteid ) { Map sitemap = null; List tcentries = null; if ( mTCMap.containsKey( fqlfn ) ) { sitemap = ( Map ) mTCMap.get( fqlfn ); if ( sitemap.containsKey( siteid ) ) { tcentries = ( List ) sitemap.get( siteid ); } else { mLogger.log( "The TCMap does not contain the site \"" + siteid + "\" for the transformation \"" + fqlfn + "\"", LogManager.DEBUG_MESSAGE_LEVEL ); } } else { mLogger.log( "The TCMap does not contain the transformation \"" + fqlfn + "\"", LogManager.DEBUG_MESSAGE_LEVEL ); } return tcentries; } /** * Retrieves all the entries matching a particular lfn for the sites * passed. * * @param fqlfn the fully qualified logical name * @param sites the list of siteID's for which the entries are required. * * @return a map indexed by site names. Each value is a collection of * TransformationCatalogEntry objects. Returns null in case * of no entry being found. */ public Map getSitesTCEntries( String fqlfn, List sites ) { Map m = this.getSiteMap( fqlfn ); Set siteIDS = new HashSet( sites ); String site = null; if ( m == null ) { return null; } Map result = new HashMap( siteIDS.size() ); for ( Iterator it = m.keySet().iterator(); it.hasNext(); ) { site = ( String ) it.next(); if ( siteIDS.contains( site ) ) { result.put( site, m.get( site ) ); } } //returning NULL only to maintain semantics //for rest of mapper operations. Gaurang //should change to return empty map return result.isEmpty() ? null : result; } /** * This method allows to add a TransformationCatalogEntry object in the map * to a particular transformation for a particular site * @param fqlfn String The fully qualified logical transformation * @param siteid String The site for which the TransformationCatalogEntry is valid * @param entry TransformationCatalogEntry The Transformation CatalogEntry object to be added. * @return boolean */ public boolean setSiteTCEntries( String fqlfn, String siteid, TransformationCatalogEntry entry ) { Map sitemap = null; List tcentries = null; if ( mTCMap.containsKey( fqlfn ) ) { sitemap = ( Map ) mTCMap.get( fqlfn ); } else { sitemap = new HashMap( 10 ); setSiteMap( fqlfn, sitemap ); } if ( sitemap.containsKey( siteid ) ) { tcentries = ( List ) sitemap.get( siteid ); } else { tcentries = new ArrayList( 10 ); sitemap.put( siteid, tcentries ); } tcentries.add( entry ); return true; } /** * Returns the textual description of the contents of the object * @return String */ public String toString() { StringBuffer sb = new StringBuffer(); for ( Iterator i = mTCMap.keySet().iterator(); i.hasNext(); ) { sb.append( toString( ( String ) i.next() ) ); } return sb.toString(); } /** * Returns a textual description of the object. * @param lfn String * @return the textual description. */ public String toString( String lfn ) { StringBuffer sb = new StringBuffer(); sb.append( "LFN = " + lfn + "\n" ); sb.append( "\tSite map\n" ); Map sitemap = ( Map ) mTCMap.get( lfn ); for ( Iterator j = sitemap.keySet().iterator(); j.hasNext(); ) { String site = ( String ) j.next(); sb.append( "\t\tSite=" + site + "\n" ); List tc = ( List ) sitemap.get( site ); for ( Iterator k = tc.iterator(); k.hasNext(); ) { TransformationCatalogEntry tcentry = ( TransformationCatalogEntry ) k.next(); sb.append( "\t\t\tPfn=" + tcentry.getPhysicalTransformation() + "\n" ); sb.append( "\t\t\tPfn site=" + tcentry.getResourceId() + "\n" ); } } return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/PegasusBag.java0000644000175000017500000003155511757531137025624 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.partitioner.graph.Bag; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.site.impl.old.PoolInfoProvider; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.transformation.Mapper; import java.util.Map; /** * A bag of objects that needs to be passed to various refiners. * It contains handles to the various catalogs, the properties and the * planner options. * * @author Karan Vahi * @version $Revision: 4628 $ */ public class PegasusBag implements Bag { /** * Array storing the names of the attributes that are stored with the * site. */ public static final String PEGASUS_INFO[] = { "pegasus-properties", "planner-options", "replica-catalog", "site-catalog", "transformation-catalog", "transformation-mapper", "pegasus-logger", "site-store", "transient-rc", "worker-package-map" }; /** * The constant to be passed to the accessor functions to get or set the * PegasusProperties. */ public static final Integer PEGASUS_PROPERTIES = new Integer( 0 ); /** * The constant to be passed to the accessor functions to get or set the * options passed to the planner. */ public static final Integer PLANNER_OPTIONS = new Integer( 1 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the replica catalog */ public static final Integer REPLICA_CATALOG = new Integer( 2 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the site catalog. */ public static final Integer SITE_CATALOG = new Integer( 3 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the transformation catalog. */ public static final Integer TRANSFORMATION_CATALOG = new Integer( 4 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the Transformation Mapper. */ public static final Integer TRANSFORMATION_MAPPER = new Integer( 5 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the Logging manager */ public static final Integer PEGASUS_LOGMANAGER = new Integer( 6 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the Site Store */ public static final Integer SITE_STORE = new Integer( 7 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the transient replica catalog */ public static final Integer TRANSIENT_REPLICA_CATALOG = new Integer( 8 ); /** * The constant to be passed to the accessor functions to get or set the * handle to the worker package maps */ public static final Integer WORKER_PACKAGE_MAP = new Integer( 9 ); /** * The handle to the PegasusProperties. */ private PegasusProperties mProps; /** * The options passed to the planner. */ private PlannerOptions mPOptions; /** * The handle to the replica catalog. */ private ReplicaCatalog mRCHandle; /** * The handle to the site catalog. */ private PoolInfoProvider mSCHandle; /** * The handle to the transformation catalog. */ private TransformationCatalog mTCHandle; /** * The handle to the Transformation Mapper. */ private Mapper mTCMapper; /** * The handle to the LogManager. */ private LogManager mLogger; /** * The site store containing the sites that need to be used. */ private SiteStore mSiteStore; /** * The transient replica catalog that tracks the files created or transferred * during the workflow */ private ReplicaCatalog mTransientRC; /** * Worker Package Map, that indexes execution site with the location of the * corresponding worker package in the submit directory */ private Map mWorkerPackageMap; /** * The default constructor. */ public PegasusBag() { } /** * Adds an object to the underlying bag corresponding to a particular key. * * @param key the key with which the value has to be associated. * @param value the value to be associated with the key. * * @return boolean indicating if insertion was successful. * */ public boolean add( Object key, Object value ) { //to denote if object is of valid type or not. boolean valid = true; int k = getIntValue( key ); switch ( k ) { case 0: //PEGASUS_PROPERTIES if ( value != null && value instanceof PegasusProperties) mProps = (PegasusProperties) value; else valid = false; break; case 1: //PLANNER_OPTIONS if ( value != null && value instanceof PlannerOptions ) mPOptions = ( PlannerOptions ) value; else valid = false; break; case 2: //REPLICA_CATALOG: if ( value != null && value instanceof ReplicaCatalog ) mRCHandle = ( ReplicaCatalog ) value; else valid = false; break; case 3: //SITE_CATALOG: if ( value != null && value instanceof PoolInfoProvider ) mSCHandle = ( PoolInfoProvider ) value; else valid = false; break; case 4: //TRANSFORMATION_CATALOG: if ( value != null && value instanceof TransformationCatalog ) mTCHandle = ( TransformationCatalog ) value; else valid = false; break; case 5: //TRANSFORMATION_MAPPER if ( value != null && value instanceof Mapper ) mTCMapper = ( Mapper ) value; else valid = false; break; case 6: //PEGASUS_LOGGER if ( value != null && value instanceof LogManager ) mLogger = ( LogManager ) value; else valid = false; break; case 7: //SITE_STORE if ( value != null && value instanceof SiteStore ) mSiteStore = ( SiteStore ) value; else valid = false; break; case 8: //TRANSIENT_REPLICA_CATALOG if ( value != null && value instanceof ReplicaCatalog ) mTransientRC = ( ReplicaCatalog ) value; else valid = false; break; case 9: //WORKER_PACKAGE_MAP if ( value != null && value instanceof Map ) mWorkerPackageMap = ( Map ) value; else valid = false; break; default: throw new RuntimeException( " Wrong Pegasus Bag key. Please use one of the predefined Integer key types"); } //if object is not null , and valid == false //throw exception if( !valid && value != null ){ throw new RuntimeException( "Invalid object passed for key " + PEGASUS_INFO[ k ]); } return valid; } /** * Returns true if the namespace contains a mapping for the specified key. * * @param key The key that you want to search for in the bag. * * @return boolean */ public boolean containsKey(Object key) { int k = -1; try{ k = ( (Integer) key).intValue(); } catch( Exception e ){} return ( k >= PegasusBag.PEGASUS_PROPERTIES.intValue() && k <= PegasusBag.WORKER_PACKAGE_MAP.intValue() ); } /** * Returns an objects corresponding to the key passed. * * @param key the key corresponding to which the objects need to be * returned. * * @return the object that is found corresponding to the key or null. */ public Object get( Object key ) { int k = getIntValue( key ); switch( k ){ case 0: return this.mProps; case 1: return this.mPOptions; case 2: return this.mRCHandle; /* case 3: return this.mSCHandle; */ case 4: return this.mTCHandle; case 5: //TRANSFORMATION_MAPPER return this.mTCMapper; case 6: //PEGASUS_LOGMANAGER return this.mLogger; case 7: //SITE_STORE return this.mSiteStore; case 8://TRANSIENT_RC return this.mTransientRC; case 9://WORKER PACKAGE MAP return this.mWorkerPackageMap; default: throw new RuntimeException( " Wrong Pegasus Bag key. Please use one of the predefined Integer key types"); } } /** * A convenice method to get PlannerOptions * * @return the handle to options passed to the planner. */ public PlannerOptions getPlannerOptions(){ return ( PlannerOptions )get( PegasusBag.PLANNER_OPTIONS ); } /** * A convenice method to get PegasusProperties * * @return the handle to the properties. */ public PegasusProperties getPegasusProperties(){ return ( PegasusProperties )get( PegasusBag.PEGASUS_PROPERTIES ); } /** * A convenice method to get Logger/ * * @return the handle to the logger. */ public LogManager getLogger(){ return ( LogManager )get( PegasusBag.PEGASUS_LOGMANAGER ); } /** * A convenice method to get the handle to the site catalog. * * @return the handle to site catalog */ /* public PoolInfoProvider getHandleToSiteCatalog(){ return ( PoolInfoProvider )get( PegasusBag.SITE_CATALOG ); }*/ /** * A convenice method to get the handle to the site store * * @return the handle to site store */ public SiteStore getHandleToSiteStore(){ return ( SiteStore )get( PegasusBag.SITE_STORE ); } /** * A convenice method to get the handle to the transient replica catalog * * @return the handle to transient replica catalog */ public ReplicaCatalog getHandleToTransientReplicaCatalog(){ return ( ReplicaCatalog )get( PegasusBag.TRANSIENT_REPLICA_CATALOG ); } /** * A convenice method to get the handle to the transformation catalog. * * @return the handle to transformation catalog */ public TransformationCatalog getHandleToTransformationCatalog(){ return ( TransformationCatalog )get( PegasusBag.TRANSFORMATION_CATALOG ); } /** * A convenice method to get the handle to the transformation mapper. * * @return the handle to transformation catalog */ public Mapper getHandleToTransformationMapper(){ return ( Mapper )get( PegasusBag.TRANSFORMATION_MAPPER ); } /** * A convenice method to get the worker package * * @return the handle to worker package map */ public Map getWorkerPackageMap(){ return ( Map )get( PegasusBag.WORKER_PACKAGE_MAP ); } /** * A convenience method to get the intValue for the object passed. * * @param key the key to be converted * * @return the int value if object an integer, else -1 */ private int getIntValue( Object key ){ int k = -1; try{ k = ( (Integer) key).intValue(); } catch( Exception e ){} return k; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/PlannerMetrics.java0000644000175000017500000001657611757531137026537 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.common.util.Currently; import java.util.Date; /** * A Data class containing the metrics about the planning instance. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class PlannerMetrics extends Data{ /** * The base submit directory where the files are being created. */ private String mBaseSubmitDirectory; /** * The relative submit directory for this run. */ private String mRelativeSubmitDirectory; /** * The path to the DAX that was planned by the workflow. */ private String mDAXPath; /** * The pointer to the properties file that was used. */ private String mPropertiesPath; /** * The user who planned the workflow. */ private String mUser; /** * The VOGroup to which the user belonged to. */ private String mVOGroup; /** * The metrics about the workflow. */ private WorkflowMetrics mWFMetrics; /** * The start time for hte planning. */ private Date mStartTime; /** * The end time for the planning. */ private Date mEndTime; /** * The default metrics. */ public PlannerMetrics() { } /** * Returns the workflow metrics. * * @return the workflow metrics. */ public WorkflowMetrics getWorkflowMetrics(){ return mWFMetrics; } /** * Sets the workflow metrics. * * @param metrics the workflow metrics. */ public void setWorkflowMetrics( WorkflowMetrics metrics ){ mWFMetrics = metrics; } /** * Returns the username. * * @return the user. */ public String setUser( ){ return mUser; } /** * Sets the user. * * @param user the user. */ public void setUser( String user ){ mUser = user; } /** * Sets the vo group * * @param group the vo group. */ public void setVOGroup( String group ){ this.mVOGroup = group; } /** * Returns the VO Group. * * @return the VO Group to which the user belongs */ public String getVOGroup( ){ return this.mVOGroup; } /** * Sets the path to the properties file for the run. * * @param path the path to the properties file. */ public void setProperties( String path ){ mPropertiesPath = path; } /** * Returns the path to the properties file for the run. * * @return the path to the properties file. */ public String getProperties( ){ return mPropertiesPath; } /** * Sets the path to the base submit directory. * * @param base the path to the base submit directory. */ public void setBaseSubmitDirectory( String base ){ mBaseSubmitDirectory = base; } /** * Returns the path to the base submit directory. * * @return the path to the base submit directory. */ public String getBaseSubmitDirectory( ){ return mBaseSubmitDirectory; } /** * Sets the path to the submit directory relative to the base. * * @param relative the relative path from the base submit directory. */ public void setRelativeSubmitDirectory( String relative ){ mRelativeSubmitDirectory = relative; } /** * Returns the path to the relative submit directory. * * @return the path to the relative submit directory. */ public String getRelativeSubmitDirectory( ){ return mRelativeSubmitDirectory; } /** * Sets the path to the DAX. * * @param path the path to the DAX file. */ public void setDAX( String path ){ mDAXPath = path; } /** * Sets the path to the DAX. * * @return the path to the DAX file. */ public String getDAX( ){ return mDAXPath; } /** * Set the start time for the planning operation. * * @param start the start time. */ public void setStartTime( Date start ){ mStartTime = start; } /** * Returns the start time for the planning operation. * * @return the start time. */ public Date getStartTime( ){ return mStartTime; } /** * Set the end time for the planning operation. * * @param end the end time. */ public void setEndTime( Date end ){ mEndTime = end; } /** * Returns the end time for the planning operation. * * @return the end time. */ public Date getEndTime( ){ return mEndTime; } /** * Returns a textual description of the object. * * @return Object */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( "{" ).append( "\n" ); append( sb, "user", this.mUser ); append( sb, "vogroup", this.mVOGroup ); append( sb, "submitdir.base", this.mBaseSubmitDirectory ); append( sb, "submitdir.relative", this.mRelativeSubmitDirectory ); append( sb, "planning.start", Currently.iso8601( false, true, false, mStartTime ) ); append( sb, "planning.end", Currently.iso8601( false, true, false, mEndTime ) ); append( sb, "properties", this.mPropertiesPath ); append( sb, "dax", this.mDAXPath ); sb.append( this.getWorkflowMetrics() ); sb.append( "}" ).append( "\n" ); return sb.toString(); } /** * Appends a key=value pair to the StringBuffer. * * @param buffer the StringBuffer that is to be appended to. * @param key the key. * @param value the value. */ protected void append( StringBuffer buffer, String key, String value ){ buffer.append( key ).append( " = " ).append( value ).append( "\n" ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ PlannerMetrics pm; try { pm = ( PlannerMetrics )super.clone(); } catch (CloneNotSupportedException e) { //somewhere in the hierarch chain clone is not implemented throw new RuntimeException( "Clone not implemented in the base class of " + this.getClass().getName(), e); } pm.setUser( this.mUser ); pm.setVOGroup( this.mVOGroup ); pm.setBaseSubmitDirectory( this.mBaseSubmitDirectory ); pm.setRelativeSubmitDirectory( this.mRelativeSubmitDirectory ); pm.setProperties( this.mPropertiesPath ); pm.setDAX( this.mDAXPath ); pm.setStartTime( this.mStartTime ); pm.setEndTime( this.mEndTime ); return pm; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/AggregatedJob.java0000644000175000017500000001367211757531137026270 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.planner.cluster.JobAggregator; import java.util.Collection; import java.util.List; import java.util.ArrayList; import java.util.Iterator; /** * This class holds all the specifics of an aggregated job. An aggregated job * or a clustered job is a job, that contains a collection of smaller jobs. * An aggregated job during execution may explode into n smaller job executions. * At present it does not store information about the dependencies between the * jobs. * * @author Karan Vahi * @version $Revision: 3684 $ */ public class AggregatedJob extends Job { /** * The collection of jobs that are contained in the aggregated job. */ private List mConstituentJobs; /** * Boolean indicating whether a job has been fully rendered to an executable * job or not i.e the aggregated job has been mapped to the aggregator and * the constituent jobs have been gridstarted or not. */ private boolean mHasBeenRenderedToExecutableForm; /** * Handle to the JobAggregator that created this job. */ private JobAggregator mJobAggregator; /** * The default constructor. */ public AggregatedJob() { super(); mConstituentJobs = new ArrayList(3); mHasBeenRenderedToExecutableForm = false; this.mJobAggregator = null; } /** * The overloaded constructor. * * @param num the number of constituent jobs */ public AggregatedJob(int num) { this(); mConstituentJobs = new ArrayList(num); } /** * The overloaded constructor. * * @param job the job whose shallow copy is created, and is the main job. * @param num the number of constituent jobs. */ public AggregatedJob(Job job,int num) { super((Job)job.clone()); mConstituentJobs = new ArrayList(num); mHasBeenRenderedToExecutableForm = false; this.mJobAggregator = null; } /** * Returns a boolean indicating whether a job has been rendered to an executable * form or not * * @return boolean */ public boolean renderedToExecutableForm( ){ return this.mHasBeenRenderedToExecutableForm; } /** * Returns a boolean indicating whether a job has been rendered to an executable * form or not * * @param value boolean to set to. */ public void setRenderedToExecutableForm( boolean value ){ this.mHasBeenRenderedToExecutableForm = value; } /** * Sets the JobAggregator that created this aggregated job. * Useful for rendering the job to an executable form later on. * * @param aggregator handle to the JobAggregator used for aggregating the job */ public void setJobAggregator( JobAggregator aggregator ){ this.mJobAggregator = aggregator; } /** * Returns the JobAggregator that created this aggregated job. * Useful for rendering the job to an executable form later on. * * @return JobAggregator */ public JobAggregator getJobAggregator( ){ return this.mJobAggregator; } /** * Adds a job to the aggregated job. * * @param job the job to be added. */ public void add(Job job){ mConstituentJobs.add(job); } /** * Clustered jobs never originate in the DAX. Always return null. * * @return null */ public String getDAXID(){ return null; } /** * Returns a new copy of the Object. The constituent jobs are also cloned. * * @return Object */ public Object clone(){ AggregatedJob newJob = new AggregatedJob((Job)super.clone(), mConstituentJobs.size()); for(Iterator it = this.mConstituentJobs.iterator();it.hasNext();){ newJob.add( (Job)(((Job)it.next()).clone())); } newJob.mHasBeenRenderedToExecutableForm = this.mHasBeenRenderedToExecutableForm; newJob.mJobAggregator = this.mJobAggregator; return newJob; } /** * Returns an iterator to the constituent jobs of the AggregatedJob. * * @return Iterator */ public Iterator constituentJobsIterator(){ return mConstituentJobs.iterator(); } /** * Returns a job from a particular position in the list of constituent jobs * * @param index the index to retrieve from * * @return a constituent job. */ public Job getConstituentJob( int index ){ return (Job) this.mConstituentJobs.get( index ); } /** * Returns the number of constituent jobs. * * @return Iterator */ public int numberOfConsitutentJobs(){ return mConstituentJobs.size(); } /** * Returns a textual description of the object. * * @return textual description of the job. */ public String toString(){ StringBuffer sb = new StringBuffer(32); sb.append("\n").append("[MAIN JOB]").append(super.toString()); sb.append("\n").append("[CONSTITUENT JOBS]"); int num = 0; for(Iterator it = mConstituentJobs.iterator();it.hasNext();++num){ sb.append("\n").append("[CONSTITUENT JOB] :").append(num); sb.append(it.next()); } return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/PlannerOptions.java0000644000175000017500000013331311757531137026551 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.common.logging.LogManager; import java.io.File; import java.util.Collection; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.HashSet; import java.util.StringTokenizer; import java.util.Properties; import edu.isi.pegasus.common.util.Currently; import java.util.HashMap; import java.util.Map; /** * Holds the information about thevarious options which user specifies to * the Concrete Planner at runtime. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4797 $ */ public class PlannerOptions extends Data implements Cloneable{ /** * The default logging level. */ public static final int DEFAULT_LOGGING_LEVEL = LogManager.WARNING_MESSAGE_LEVEL; /** * The value of number of rescue retries if no value is specified */ public static final int DEFAULT_NUMBER_OF_RESCUE_TRIES = 999; /** * The base submit directory. */ private String mBaseDir; /** * The relative directory for the submit directory and remote execution directory */ private String mRelativeDir; /** * The relative directory for the submit directory. Overrides relative dir if * both are specified. */ private String mRelativeSubmitDir; /** * This is the directory where the submit files are generated on the submit * host (the site running the concrete planner). */ //private String mSubmitFileDir ; /** * The dax file which contains the abstract dag. This dax is created by the * Abstract Planner using the gendax command. */ private String mDAXFile; /** * The path to the pdax file that contains the partition graph. */ private String mPDAXFile; /** * List of execution pools on which the user wants the Dag to be executed. */ private Set mvExecPools; /** * Set of cache files that need to be used, to determine the location of the * transiency files. */ private Set mCacheFiles; /** * Set of replica catalog files that are inherited by a planning instance. * Locations in this file have a lower priority than the file locations * mentioned in the DAX Replica Store */ private SetmInheritedRCFiles; /** * The output pool on which the data products are needed to be transferred to. */ private String mOutputPool; /** * If specified, then it submits to the underlying CondorG using the * kickstart-Condorscript provided. */ private boolean mSubmit; /** * The force option to make a build dag from scratch. Leads to no reduction * of dag occuring. */ private boolean mForce; /** * A boolean indicating whether to do cleanup or not. */ private boolean mCleanup; /** * To Display help or not. */ private boolean mDisplayHelp; /** * Denotes the logging level that is to be used for logging the messages. */ private int mLoggingLevel; /** * Whether to create a random directory in the execution directory that is * determined from the exec mount point in the pool configuration file. * This forces Pegasus to do runs in a unique random directory just below * the execution mount point at a remote pool. */ private boolean mGenRandomDir; /** * Whether to attempt authentication against the jobmanagers for the pools * specified at runtime. */ private boolean mAuthenticate; /** * The megadag generation mode. */ private String mMegadag; /** * The list of VDS properties set at runtime by the user on commandline. * It is a list of NameValue pairs, with name as vds property * name and value as the corresponding value. */ private List mVDSProps; /** * Denotes what type of clustering needs to be done */ private String mClusterer; /** * If the genRandomDir flag is set, then this contains the name of * the random directory. Else it can be a null or an empty string. */ private String mRandomDirName; /** * Designates if the optional argument to the random directory was given. */ private boolean mOptArg; /** * The basename prefix that is to be given to the per workflow file, like * the log file, the .dag file, the .cache files etc. */ private String mBasenamePrefix; /** * The prefix that is to be applied while constructing job names. */ private String mJobPrefix; /** * A boolean indicating whether the planner invocation is part of a larger * deferred planning run. */ private boolean mDeferredRun; /** * Boolean indicating whether to spawn off a monitoring process or not * for the workflow. */ private boolean mMonitor; /** * The VOGroup to which the user belongs to. */ private String mVOGroup; /** * Stores the time at which the planning process started. */ private Date mDate; /** * Stores the type of partitioning to be done. */ private String mPartitioningType; /** * A boolean storing whether to sanitize paths or not */ private boolean mSanitizePath; /** * The numer of rescue's to try before replanning. */ private int mNumOfRescueTries; /** * The properties container for properties specified on the commandline * in the DAX dax elements. */ private Properties mProperties; /** * The options that need to be passed forward to pegasus-run. */ private List mForwardOptions; /** * The set of non standard java options that need to be passed to the JVM */ private Set mNonStandardJavaOptions; /** * Returns the force replan option */ private boolean mForceReplan; /** * The argument string with which the planner was invoked. */ private String mOriginalArgumentString; /** * A map that maps an execution site to a staging site. */ private Map mStagingSitesMap; /** * Default Constructor. */ public PlannerOptions(){ // mSubmitFileDir = "."; mBaseDir = "."; mRelativeDir = null; mRelativeSubmitDir= null; mDAXFile = null; mPDAXFile = null; mvExecPools = new java.util.HashSet(); mCacheFiles = new java.util.HashSet(); mInheritedRCFiles = new java.util.HashSet(); mNonStandardJavaOptions = new java.util.HashSet(); mForwardOptions = new java.util.LinkedList(); mOutputPool = null; mDisplayHelp = false; mLoggingLevel = DEFAULT_LOGGING_LEVEL; mForce = false; mSubmit = false; mGenRandomDir = false; mRandomDirName = null; mOptArg = false; mAuthenticate = false; mMegadag = null; mVDSProps = null; mClusterer = null; mBasenamePrefix = null; mMonitor = false; mCleanup = true; mVOGroup = "pegasus"; mDeferredRun = false; mDate = new Date(); mPartitioningType = null; mSanitizePath = true; mJobPrefix = null; mNumOfRescueTries = DEFAULT_NUMBER_OF_RESCUE_TRIES; mProperties = new Properties(); mForceReplan = false; mOriginalArgumentString = null; mStagingSitesMap = new HashMap(); } /** * Returns the authenticate option for the planner. * * @return boolean indicating if it was set or not. */ public boolean authenticationSet(){ return mAuthenticate; } /** * Returns the cache files. * * @return Set of fully qualified paths to the cache files. * */ public Set getCacheFiles(){ return mCacheFiles; } /** * Returns the inherited rc files. * * @return Set of fully qualified paths to the cache files. * */ public Set getInheritedRCFiles(){ return mInheritedRCFiles; } /** * Returns whether to do clustering or not. * * @return boolean */ // public boolean clusteringSet(){ // return mCluster; // } /** * Returns the clustering technique to be used for clustering. * * @return the value of clustering technique if set, else null */ public String getClusteringTechnique(){ return mClusterer; } /** * Returns the basename prefix for the per workflow files that are to be * generated by the planner. * * @return the basename if set, else null. */ public String getBasenamePrefix(){ return mBasenamePrefix; } /** * Returns the job prefix to be used while constructing the job names. * * @return the job prefix if set, else null. */ public String getJobnamePrefix(){ return mJobPrefix; } /** * Returns the path to the dax file being used by the planner. * * @return path to DAX file. */ public String getDAX(){ return mDAXFile; } /** * Returns the names of the execution sites where the concrete workflow can * be run. * * @return Set of execution site names. */ public Collection getExecutionSites(){ return mvExecPools; } /** * Returns the force option set for the planner. * * @return the boolean value indicating the force option. */ public boolean getForce(){ return mForce; } /** * Returns the force replan option * * @return boolean */ public boolean getForceReplan( ){ return mForceReplan; } /** * Returns the option indicating whether to do cleanup or not. * * @return the boolean value indicating the cleanup option. */ public boolean getCleanup(){ return mCleanup; } /** * Returns the time at which planning started in a ISO 8601 format. * * @param extendedFormat will use the extended ISO 8601 format which * separates the different timestamp items. If false, the basic * format will be used. In UTC and basic format, the 'T' separator * will be omitted. * * @return String */ public String getDateTime( boolean extendedFormat ){ StringBuffer sb = new StringBuffer(); sb.append( Currently.iso8601( false, extendedFormat, false, mDate ) ); return sb.toString(); } /** * Returns whether to display or not. * * @return help boolean value. */ public boolean getHelp(){ return mDisplayHelp; } /** * Increments the logging level by 1. */ public void incrementLogging(){ mLoggingLevel++; } /** * Deccrements the logging level by 1. */ public void decrementLogging(){ mLoggingLevel--; } /** * Returns the logging level. * * @return the logging level. */ public int getLoggingLevel(){ return mLoggingLevel; } /** * Returns the megadag generation option . * * @return the mode if mode is set * else null */ public String getMegaDAGMode(){ return this.mMegadag; } /** * Returns the output site where to stage the output . * * @return the output site. */ public String getOutputSite(){ return mOutputPool; } /** * Returns the path to the PDAX file being used by the planner. * * @return path to PDAX file. */ public String getPDAX(){ return mPDAXFile; } /** * Returns whether to generate a random directory or not. * * @return boolean */ public boolean generateRandomDirectory(){ return mGenRandomDir; } /** * Returns the random directory option. * * @return the directory name * null if not set. */ public String getRandomDir(){ return mRandomDirName; } /** * Returns the name of the random * directory, only if the generate * Random Dir flag is set. * Else it returns null. */ public String getRandomDirName(){ if ( this .generateRandomDirectory()){ return this.getRandomDir(); } return null; } /** * Returns the argument string of how planner was invoked. * * @return the arguments with which the planner was invoked. */ public String getOriginalArgString( ) { return this.mOriginalArgumentString; } /** * Sets the argument string of how planner was invoked. This function * just stores the arguments as a String internally. * * @param args the arguments with which the planner was invoked. */ public void setOriginalArgString( String[] args ) { StringBuffer originalArgs = new StringBuffer(); for( int i = 0; i < args.length ; i++ ){ originalArgs.append( args[i] ).append( " " ); } this.mOriginalArgumentString = originalArgs.toString(); } /** * Sets a property passed on the command line. * * @param optarg key=value property specification */ public void setProperty( String optarg ) { String[] args = optarg.split( "=" ); if( args.length != 2 ){ throw new RuntimeException( "Wrong format for property specification on command line" + optarg ); } mProperties.setProperty( args[0], args[1] ); } /** * Returns whether to submit the workflow or not. * * @return boolean indicating whether to submit or not. */ public boolean submitToScheduler(){ return mSubmit; } /** * Returns the VDS properties that were set by the user. * * @return List of NameValue objects each corresponding to a * property key and value. */ public List getVDSProperties(){ return mVDSProps; } /** * Returns the VO Group to which the user belongs * * @return VOGroup */ public String getVOGroup( ){ return mVOGroup; } /** * Returns the base submit directory * * @return the path to the directory. */ public String getBaseSubmitDirectory(){ return mBaseDir; } /** * Returns the relative directory. * * @return the relative directory */ public String getRelativeDirectory(){ return mRelativeDir; // return ( mBaseDir == null ) ? // mSubmitFileDir: // mSubmitFileDir.substring( mBaseDir.length() ); } /** * Returns the relative submit directory option. * * @return the relative submit directory option if specified else null */ public String getRelativeSubmitDirectoryOption(){ return mRelativeSubmitDir; } /** * Returns the relative submit directory. * * @return the relative submit directory if specified else the relative dir. */ public String getRelativeSubmitDirectory(){ return ( mRelativeSubmitDir == null )? mRelativeDir: //pick the relative dir mRelativeSubmitDir;//pick the relative submit directory } /** * Returns the path to the directory where the submit files are to be * generated. The relative submit directory if specified overrides the * relative directory. * * @return the path to the directory. */ public String getSubmitDirectory(){ String relative = ( mRelativeSubmitDir == null )? mRelativeDir: //pick the relative dir mRelativeSubmitDir;//pick the relative submit directory if( mSanitizePath ){ return ( relative == null )? new File( mBaseDir ).getAbsolutePath(): new File( mBaseDir, relative ).getAbsolutePath(); } else{ return (relative == null )? mBaseDir: new File( mBaseDir, relative ).getPath(); } } /** * Sets the authenticate flag to the value passed. * * @param value boolean value passed. */ public void setAuthentication(boolean value){ mAuthenticate = value; } /** * Sets the basename prefix for the per workflow files. * * @param prefix the prefix to be set. */ public void setBasenamePrefix(String prefix){ mBasenamePrefix = prefix; } /** * Sets the job prefix to be used while constructing the job names. * * @param prefix the job prefix . */ public void setJobnamePrefix( String prefix ){ mJobPrefix = prefix; } /** * Sets the flag to denote that the optional argument for the random was * specified. * * @param value boolean indicating whether the optional argument was given * or not. */ public void setOptionalArg(boolean value){ this.mOptArg = value; } /** * Returns the flag to denote whether the optional argument for the random was * specified or not. * * @return boolean indicating whether the optional argument was supplied or not. */ public boolean optionalArgSet(){ return this.mOptArg; } /** * Sets the flag to denote whether we want to monitor the workflow or not. * * @param value boolean. */ public void setMonitoring(boolean value){ this.mMonitor = value; } /** * Returns boolean indicating whether we want to monitor or not. * * @return boolean indicating whether monitoring was set or not. */ public boolean monitorWorkflow(){ return this.mMonitor; } /** * Sets the partitioning type in case of partition and plan. * * @param type the type of partitioning technique */ public void setPartitioningType( String type ){ mPartitioningType = type; } /** * Returns the partitioning type in case of partition and plan. * * @return the type of partitioning technique */ public String getPartitioningType( ){ return mPartitioningType; } /** * Sets the flag to denote that the run is part of a larger deferred run. * * @param value the value */ public void setPartOfDeferredRun( boolean value ){ mDeferredRun = value; } /** * Returns a boolean indicating whether this invocation is part of a * deferred execution or not. * * @return boolean */ public boolean partOfDeferredRun( ){ return mDeferredRun; } /** * Sets the flag denoting whether to sanitize path or not. * * @param value the value to set */ public void setSanitizePath( boolean value ){ mSanitizePath = value; } /** * Returns whether to sanitize paths or not. Internal method only. * * @return boolean */ /* protected boolean sanitizePath(){ return mSanitizePath; } */ /** * Sets the caches files. If cache files have been already specified it * adds to the existing set of files. It also sanitizes the paths. Tries * to resolve the path, if the path given is relative instead of absolute. * * @param cacheList comma separated list of cache files. */ public void setCacheFiles( String cacheList ){ this.setCacheFiles( this.generateSet(cacheList) ); } /** * Sets the caches files. If cache files have been already specified it * adds to the existing set of files. It also sanitizes the paths. Tries * to resolve the path, if the path given is relative instead of absolute. * * @param files the set of fully qualified paths to the cache files. * */ public void setCacheFiles(Set files){ //use the existing set if present if (mCacheFiles == null ) { mCacheFiles = new HashSet(); } //traverse through each file in the set, and //sanitize path along the way. for ( Iterator it = files.iterator(); it.hasNext(); ){ mCacheFiles.add( this.sanitizePath( (String)it.next() ) ); } } /** * Sets the inherited RC Files. If RC files have been already specified it * adds to the existing set of files. It also sanitizes the paths. Tries * to resolve the path, if the path given is relative instead of absolute. * * @param l comma separated list of cache files. */ public void setInheritedRCFiles( String list ){ this.setInheritedRCFiles( this.generateSet( list ) ); } /** *Sets the inherited RC Files. If RC files have been already specified it * adds to the existing set of files. It also sanitizes the paths. Tries * to resolve the path, if the path given is relative instead of absolute. * * @param files the set of fully qualified paths to the cache files. * */ public void setInheritedRCFiles(Set files){ //use the existing set if present if (this.mInheritedRCFiles == null ) { mInheritedRCFiles = new HashSet(); } //traverse through each file in the set, and //sanitize path along the way. for ( Iterator it = files.iterator(); it.hasNext(); ){ mInheritedRCFiles.add( this.sanitizePath( (String)it.next() ) ); } } /** * Sets the clustering option. * * @param value the value to set. */ public void setClusteringTechnique( String value ){ mClusterer = value; } /** * Sets the DAX that has to be worked on by the planner. * * @param dax the path to the DAX file. */ public void setDAX(String dax){ dax = sanitizePath(dax); mDAXFile = dax; } /** * Sets the names of the execution sites where the concrete workflow can * be run. * * @param siteList comma separated list of sites. */ public void setExecutionSites(String siteList){ mvExecPools = this.generateSet( siteList ); } /** * Sets the names of the execution sites where the concrete workflow can * be run. * * @param sites Collection of execution site names. */ public void setExecutionSites(Collection sites){ mvExecPools = new HashSet( sites ); } /** * Sets the force option for the planner. * * @param force boolean value. */ public void setForce(boolean force){ mForce = force; } /** * Sets the force replan option * * @param force the boolean value */ public void setForceReplan(boolean force ) { mForceReplan = force; } /** * Parses the argument in form of option=[value] and adds to the * options that are to be passed ahead to pegasus-run. * * @param argument the argument to be passed. */ public void addToForwardOptions( String argument ) { //split on = String[] arr = argument.split( "=" ); NameValue nv = new NameValue(); nv.setKey( arr[0] ); if( arr.length == 2 ){ //set the value nv.setValue( arr[1] ); } this.mForwardOptions.add( nv ); } /** * Returns the forward options set * * @return List containing the option and the value. */ public List getForwardOptions( ) { return this.mForwardOptions; } /** * Sets the cleanup option for the planner. * * @param cleanup boolean value. */ public void setCleanup( boolean cleanup ){ mCleanup = cleanup; } /** * Sets the help option for the planner. * * @param help boolean value. */ public void setHelp(boolean help){ mDisplayHelp = help; } /** * Sets the logging level for logging of messages. * * @param level the logging level. */ public void setLoggingLevel(String level){ mLoggingLevel = (level != null && level.length() > 0)? //the value that was passed by the user new Integer(level).intValue(): //by default not setting it to 0, //but to 1, as --verbose is an optional //argument 1; } /** * Sets the megadag generation option * * @param mode the mode. */ public void setMegaDAGMode(String mode){ this.mMegadag = mode; } /** * Sets the PDAX that has to be worked on by the planner. * * @param pdax the path to the PDAX file. */ public void setPDAX(String pdax){ pdax = sanitizePath(pdax); mPDAXFile = pdax; } /** * Sets the output site specified by the user. * * @param site the output site. */ public void setOutputSite(String site){ mOutputPool = site; } /** * Sets the random directory in which the jobs are run. * * @param dir the basename of the random directory. */ public void setRandomDir(String dir){ //setting the genRandomDir option to true also mGenRandomDir = true; mRandomDirName = dir; if(dir != null && dir.length() > 0) //set the flag to denote that optional arg was given setOptionalArg(true); } /** * Returns whether to submit the workflow or not. * * @param submit boolean indicating whether to submit or not. */ public void setSubmitToScheduler(boolean submit){ mSubmit = submit; } /** * Sets the path to the directory where the submit files are to be * generated. * * @param dir the path to the directory. */ public void setSubmitDirectory( String dir ){ this.setSubmitDirectory( dir, null ); } /** * Sets the path to the directory where the submit files are to be * generated. * * @param dir the path to the directory. */ public void setSubmitDirectory( File dir ){ this.setSubmitDirectory( dir.getAbsolutePath() , null ); } /** * Sets the path to the directory where the submit files are to be * generated. * * @param base the path to the base directory. * @param relative the directory relative to the base where submit files are generated. */ public void setSubmitDirectory( String base, String relative ){ base = sanitizePath( base ); /* mSubmitFileDir = ( relative == null )? new File( base ).getAbsolutePath(): new File( base, relative ).getAbsolutePath(); */ //not clear what it should be . // mRelativeDir = relative; mRelativeSubmitDir = relative; mBaseDir = base; } /** * Sets the path to the base submitdirectory where the submit files are to be * generated. * * @param base the base directory where submit files are generated. */ public void setBaseSubmitDirectory( String base ){ mBaseDir = base; } /** * Sets the path to the relative directory where the submit files are to be * generated. The submit directory can be overridden by * setRelativeSubmitDirectory( String) * * @param relative the directory relative to the base where submit files are generated. */ public void setRelativeDirectory( String relative ){ mRelativeDir = relative; } /** * Sets the path to the directory where the submit files are to be * generated. * * @param relative the directory relative to the base where submit files are generated. */ public void setRelativeSubmitDirectory( String relative ){ mRelativeSubmitDir = relative; } /** * Sets the VDS properties specifed by the user at the command line. * * @param properties List of NameValue objects. */ public void setVDSProperties(List properties){ mVDSProps = properties; } /** * Set the VO Group to which the user belongs * * @param group the VOGroup */ public void setVOGroup( String group ){ mVOGroup = group; } /** * Sets the number of times to try for rescue dag submission. * * @param num number. */ public void setNumberOfRescueTries( String num ){ this.mNumOfRescueTries = Integer.parseInt( num ); } /** * Sets the number of times to try for rescue dag submission. * * @param num number. */ public void setNumberOfRescueTries( int num ){ this.mNumOfRescueTries = num; } /** * Returns the number of times to try for rescue dag submission. * * @return number. */ public int getNumberOfRescueTries( ){ return this.mNumOfRescueTries; } /** * Adds to the Set of non standard JAVA options that need to be passed * to the JVM. The list of non standard java options can be retrieved * by doing java -X . * * The option is always prefixed by -X internally. If mx1024m is passed, * internally option will be set to -Xmx1024m * * @param option the non standard option. */ public void addToNonStandardJavaOptions( String option ){ this.mNonStandardJavaOptions.add( "-X" + option ); } /** * Returns the Set of non standard java options. * * @return Set */ public Set getNonStandardJavaOptions( ){ return this.mNonStandardJavaOptions; } /** * Adds to the staging sites * * @param value comma separated key=value pairs where key is execution site * and value is the staging site to use for that execution site */ public void addToStagingSitesMappings( String value ) { if( value == null ){ //do nothing return; } for( String kvstr : value.split( "," ) ){ //kvstr is of form key=value String[] kv = kvstr.split( "=" ); if ( kv.length == 1 ){ //add a * notation addToStagingSitesMappings( "*", kv[0] ); } else{ addToStagingSitesMappings( kv[0], kv[1] ); } } } /** * Adds to the staging sites * * @param executionSite the execution site * @param stagingSite the staging site. */ public void addToStagingSitesMappings( String executionSite, String stagingSite ) { this.mStagingSitesMap.put( executionSite, stagingSite ); } /** * Returns the staging site for an execution site. * * @param executionSite the execution site * * @return the staging site corresponding to an execution site, else null */ public String getStagingSite( String executionSite ){ return ( this.mStagingSitesMap.containsKey( executionSite ) ? this.mStagingSitesMap.get( executionSite )://the mapping for the execution site this.mStagingSitesMap.get( "*" ) //the value for the star site if specified ); } /** * Convers the staging site mappings to comma separated list of * executionsite=stagingsite mappings * * @return mappings as string */ protected String stagingSiteMappingToString(){ StringBuffer sb = new StringBuffer(); for( Map.Entry entry : this.mStagingSitesMap.entrySet() ){ String eSite = entry.getKey(); String sSite = entry.getValue(); if( eSite.equals( "*" ) ){ //for a star notation just add the corresponding staging site sb.append( sSite ); } else{ sb.append( eSite ). append( "=" ). append( sSite ); } sb.append( "," ); } return sb.toString(); } /** * Returns the textual description of all the options that were set for * the planner. * * @return the textual description. */ public String toString(){ String st = "\n" + "\n Planner Options" + "\n Argument String As Seen By Planner " + this.getOriginalArgString() + "\n Base Submit Directory " + mBaseDir + "\n SubmitFile Directory " + this.getSubmitDirectory() + "\n Basename Prefix " + mBasenamePrefix + "\n Jobname Prefix " + mJobPrefix + "\n Abstract Dag File " + mDAXFile + "\n Partition File " + mPDAXFile + "\n Execution Sites " + this.setToString(mvExecPools,",")+ "\n Staging Sites " + this.stagingSiteMappingToString() + "\n Cache Files " + this.setToString(mCacheFiles,",") + "\n Inherited RC Files " + this.setToString(mInheritedRCFiles,",") + "\n Output Pool " + mOutputPool + "\n Submit to CondorG " + mSubmit + "\n Display Help " + mDisplayHelp + "\n Logging Level " + mLoggingLevel + "\n Force Option " + mForce + "\n Force Replan " + mForceReplan + "\n Cleanup within wf " + mCleanup + "\n Create Random Direct " + mGenRandomDir + "\n Random Direct Name " + mRandomDirName + "\n Authenticate " + mAuthenticate + "\n Clustering Technique " + mClusterer + "\n Monitor Workflow " + mMonitor + "\n VO Group " + mVOGroup + "\n Rescue Tries " + mNumOfRescueTries + "\n VDS Properties " + mVDSProps + "\n Non Standard JVM Options " + this.mNonStandardJavaOptions; return st; } /** * Generates the argument string corresponding to these options that can * be used to invoke Pegasus. During its generation it ignores the * dax and pdax options as they are specified elsewhere. * * @return all the options in a String separated by whitespace. */ public String toOptions(){ StringBuffer sb = new StringBuffer(); //the submit file dir // if( mSubmitFileDir != null){ sb.append(" --dir ").append(mSubmitFileDir);} //confirm how this plays in deferred planning. not clear. Karan Oct 31 2007 sb.append(" --dir ").append( this.getBaseSubmitDirectory() ); if( mRelativeDir != null){ sb.append(" --relative-dir ").append( this.getRelativeDirectory() ); } if( mRelativeSubmitDir != null ){ sb.append( " --relative-submit-dir " ).append( mRelativeSubmitDir ); } //the basename prefix if(mBasenamePrefix != null){ sb.append(" --basename ").append(mBasenamePrefix);} //the jobname prefix if( mJobPrefix != null ){ sb.append( " --job-prefix " ).append( mJobPrefix ); } if(!mvExecPools.isEmpty()){ sb.append(" --sites "); //generate the comma separated string //for the execution pools sb.append(setToString(mvExecPools,",")); } if( !this.mStagingSitesMap.isEmpty() ){ sb.append( " --staging-site " ). append( this.stagingSiteMappingToString() ); } //cache files if(!mCacheFiles.isEmpty()){ sb.append(" --cache ").append(setToString(mCacheFiles,",")); } //inherited rc files if( !mInheritedRCFiles.isEmpty() ){ sb.append( " --inherited-rc-files " ).append(setToString(mInheritedRCFiles,",")); } //collapse option if( mClusterer != null ){ sb.append(" --cluster ").append(mClusterer);} //specify the output pool if(mOutputPool != null){ sb.append(" --output ").append(mOutputPool);} //the condor submit option if(mSubmit){ sb.append(" --run "); } //the force option if(mForce){ sb.append(" --force "); } //the force replan option if( mForceReplan ){ sb.append( " --force-replan " ); } //the cleanup option if( !mCleanup ){ sb.append(" --nocleanup "); } //the verbose option for(int i = PlannerOptions.DEFAULT_LOGGING_LEVEL; i < getLoggingLevel();i++){ sb.append(" --verbose " ); } //add any quiet logging options if required for( int i = getLoggingLevel() ; i < PlannerOptions.DEFAULT_LOGGING_LEVEL; i++ ){ sb.append(" --quiet " ); } //the monitor option if( mMonitor ) { sb.append(" --monitor "); } //the deferred run option if( mDeferredRun ) { sb.append( " --deferred "); } //the random directory if(mGenRandomDir){ //an optional argument sb.append(" --randomdir"); if(this.getRandomDir() == null){ //no argument to be given sb.append(" "); } else{ //add the optional argument sb.append("=").append(getRandomDir()); } } //the authenticate option if(mAuthenticate){ sb.append(" --authenticate"); } //specify the megadag option if set if(mMegadag != null){ sb.append(" --megadag ").append(mMegadag);} //specify the vogroup sb.append(" --group ").append( mVOGroup ); //specify the number of times to try rescue //only if it does not match the default value! if( this.getNumberOfRescueTries() != PlannerOptions.DEFAULT_NUMBER_OF_RESCUE_TRIES ){ sb.append( " --rescue " ).append( this.getNumberOfRescueTries() ); } //help option if(mDisplayHelp){ sb.append(" --help ");} return sb.toString(); } /** * Converts the vds properties that need to be passed to the jvm as an * option. * * @return the jvm options as String. */ public String toJVMOptions(){ StringBuffer sb = new StringBuffer(); if(mVDSProps != null){ for( Iterator it = mVDSProps.iterator(); it.hasNext(); ){ NameValue nv = (NameValue)it.next(); sb.append(" -D").append(nv.getKey()).append("=").append(nv.getValue()); } } //add all the properties specified in the dax elements for(Iterator it = mProperties.keySet().iterator(); it.hasNext() ; ){ String key = (String) it.next(); sb.append(" -D").append( key ).append("=").append( mProperties.getProperty(key)); } //pass on all the -X options to jvm for( Iterator it = this.mNonStandardJavaOptions.iterator(); it.hasNext() ;){ sb.append( " " ).append( it.next() ); } return sb.toString(); } /** * Returns the complete options string that is used to invoke pegasus * * @return the options as string. */ public String getCompleteOptions(){ StringBuffer sb = new StringBuffer(); sb./*append( this.toJVMOptions() ).append( " " ).*/ append( "--dax" ).append( " " ).append( this.getDAX() ). append( this.toOptions() ); return sb.toString(); } /** * Clones a Set. * * @param s Set * * @return the cloned set as a HashSet */ private Set cloneSet(Set s){ java.util.Iterator it = s.iterator(); Set newSet = new java.util.HashSet(); while(it.hasNext()){ newSet.add(it.next()); } return newSet; } /** * Returns a new copy of the Object. The clone does not clone the internal * VDS properties at the moment. * * @return the cloned copy. */ public Object clone(){ PlannerOptions pOpt = null; try{ pOpt = (PlannerOptions)super.clone(); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented mLogger.log("Clone not implemented in the base class of " + this.getClass().getName(), LogManager.WARNING_MESSAGE_LEVEL); //try calling the constructor directly pOpt = new PlannerOptions(); } // pOpt.mSubmitFileDir = this.mSubmitFileDir; pOpt.mBaseDir = this.mBaseDir; pOpt.mRelativeDir = this.mRelativeDir; pOpt.mDAXFile = this.mDAXFile; pOpt.mPDAXFile = this.mPDAXFile; pOpt.mvExecPools = cloneSet(this.mvExecPools); pOpt.mStagingSitesMap = new HashMap( this.mStagingSitesMap ); pOpt.mCacheFiles = cloneSet(this.mCacheFiles); pOpt.mInheritedRCFiles = cloneSet(this.mInheritedRCFiles); pOpt.mNonStandardJavaOptions = cloneSet( this.mNonStandardJavaOptions ); pOpt.mOutputPool = this.mOutputPool; pOpt.mDisplayHelp = this.mDisplayHelp; pOpt.mLoggingLevel = this.mLoggingLevel; pOpt.mForce = this.mForce; pOpt.mForceReplan = this.mForceReplan; pOpt.mCleanup = this.mCleanup; pOpt.mSubmit = this.mSubmit; pOpt.mGenRandomDir = this.mGenRandomDir; pOpt.mOptArg = this.mOptArg; pOpt.mMonitor = this.mMonitor; pOpt.mRandomDirName = this.mRandomDirName; pOpt.mAuthenticate = this.mAuthenticate; pOpt.mClusterer = this.mClusterer; pOpt.mBasenamePrefix = this.mBasenamePrefix; pOpt.mJobPrefix = this.mJobPrefix; pOpt.mVOGroup = this.mVOGroup; pOpt.mDeferredRun = this.mDeferredRun; pOpt.mDate = (Date)this.mDate.clone(); pOpt.mPartitioningType = this.mPartitioningType; pOpt.mNumOfRescueTries = this.mNumOfRescueTries; pOpt.mOriginalArgumentString = this.mOriginalArgumentString; //a shallow clone for forward options pOpt.mForwardOptions = this.mForwardOptions; pOpt.mProperties = (Properties)this.mProperties.clone(); //Note not cloning the vdsProps or mProperties pOpt.mVDSProps = null; return pOpt; } /** * Generates a Set by parsing a comma separated string. * * @param str the comma separted String. * * @return Set containing the parsed values, in case of a null string * an empty set is returned. */ private Set generateSet(String str){ Set s = new HashSet(); //check for null if( s == null ) { return s; } for ( StringTokenizer st = new StringTokenizer(str,","); st.hasMoreElements(); ){ s.add(st.nextToken().trim()); } return s; } /** * A small utility method that santizes the url, converting it from * relative to absolute. In case the path is relative, it uses the * System property user.dir to get the current working directory, from * where the planner is being run. * * @param path the absolute or the relative path. * * @return the absolute path. */ private String sanitizePath( String path ){ if( path == null ){ return null; } if( !mSanitizePath ){ return path; } String absPath; char separator = File.separatorChar; absPath = (path.indexOf(separator) == 0)? //absolute path given already path: //get the current working dir System.getProperty( "user.dir" ) + separator + ( ( path.indexOf( '.' ) == 0 )? //path starts with a . ? ( (path.indexOf( separator ) == 1 ) ? //path starts with a ./ ? path.substring( 2 ): ( path.length() > 1 && path.charAt( 1 ) == '.' )? //path starts with .. ? path: //keep path as it is path.substring( path.indexOf( '.' ) + 1 ) ) : path ); //remove trailing separator if any absPath = (absPath.lastIndexOf(separator) == absPath.length() - 1)? absPath.substring(0, absPath.length() - 1): absPath; return absPath; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/DAGJob.java0000644000175000017500000001114011757531137024615 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; /** * This is a data class that stores the contents of the DAG job in a DAX conforming * to schema 3.0 or higher. * * @author Karan Vahi * * @version $Revision: 3415 $ */ public class DAGJob extends Job { /** * The prefix to be attached for the DAX jobs */ public static final String JOB_PREFIX = "subdag_"; /** * The DAG LFN. */ private String mDAGLFN; /** * The DAG File that the job refers to. */ private String mDAGFile; /** * The directory in which the DAG needs to execute. */ private String mDirectory; /** * The default constructor. */ public DAGJob() { super(); mDAGFile = null; mDirectory = null; this.setJobType( Job.DAG_JOB ); } /** * The overloaded construct that constructs a DAG job by wrapping around * the Job job. * * @param job the original job description. */ public DAGJob(Job job){ super(job); mDAGFile = null; this.setJobType( Job.DAG_JOB ); } /** * Sets the DAG file LFN * * @param lfn the LFN of the DAG file. */ public void setDAGLFN(String lfn ){ mDAGLFN = lfn ; } /** * Returns the lfn for the DAGFile the job refers to. * * @return the lfn */ public String getDAGLFN(){ return mDAGLFN; } /** * Sets the DAG file * * @param file the path to the DAG file. */ public void setDAGFile(String file ){ mDAGFile = file ; } /** * Returns the DAGFile the job refers to. * * @return dag file */ public String getDAGFile(){ return mDAGFile; } /** * Sets the directory in which the dag needs to execute. * * @param directory the directory where dag needs to execute */ public void setDirectory( String directory ){ mDirectory = directory ; } /** * Returns the directory the job refers to. * * @return the directory. */ public String getDirectory(){ return mDirectory; } /** * Generates a name for the job that serves as the primary id for the job * * @param prefix any prefix that needs to be applied while constructing the * job name * * @return the id for the job */ public String generateName( String prefix ){ StringBuffer sb = new StringBuffer(); //prepend a job prefix to job if required if (prefix != null) { sb.append( prefix ); } String lfn = this.getDAGLFN(); String lid = this.getLogicalID(); if( lfn == null || this.getLogicalID() == null ){ //sanity check throw new RuntimeException( "Generate name called for job before setting the DAGLFN/Logicalid" + lfn + "," + lid ); } if( lfn.contains( "." ) ){ lfn = lfn.substring( 0, lfn.lastIndexOf( "." ) ); } sb.append( DAGJob.JOB_PREFIX ).append( lfn ).append( "_" ). append( lid ); return sb.toString(); } /** * Returns a textual description of the DAG Job. * * @return the textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(super.toString()); return sb.toString(); } /** * Returns a new copy of the Object. The implementation is faulty. * There is a shallow copy for the profiles. That is the clone retains * references to the original object. * * @return Object */ public Object clone(){ DAGJob newJob = new DAGJob((Job)super.clone()); newJob.setDAGLFN( this.getDAGLFN() ); newJob.setDAGFile( this.getDAGFile() ); newJob.setDirectory( this.getDirectory() ); return newJob; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/AuthenticateRequest.java0000644000175000017500000000702111757531137027561 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; /** * The object that describes the authenticate request. It specifies the mode of * authentication, the contact string of the resource. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class AuthenticateRequest extends Data { /** * The type identifying that the resource to authenticate against is a job * manager. */ public static final char JOBMANAGER_RESOURCE = 'j'; /** * The type identifying that the resource to authenticate against is a grid * ftp server. */ public static final char GRIDFTP_RESOURCE = 'g'; /** * Specifies what type of resource you are authenticating to. */ private char mType; /** * The pool id at of the pool which the resource is . */ private String mPool; /** * The contact string to the resource. */ private String mResourceContact; /** * Default Constructor. */ private AuthenticateRequest() { } /** * Overloaded Constructor. */ public AuthenticateRequest(char type, String pool,String url) { mType = type; mPool = pool; mResourceContact = url; } /** * Returns the type of the request. */ public char getResourceType(){ return mType; } /** * Returns the url of the resource to contact. */ public String getResourceContact(){ return mResourceContact; } /** * Returns the pool id of the associated resource in this request. */ public String getPool(){ return mPool; } /** * Returns a string version of this. */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append("TYPE").append("-->").append(mType).append(" ") .append(" Pool").append("-->").append(mPool) .append(" URL").append("-->").append(mResourceContact); return sb.toString(); } /** * Returns a clone of the object. */ public Object clone(){ AuthenticateRequest ar = new AuthenticateRequest(); ar.mType = this.mType; ar.mResourceContact = new String(this.mResourceContact); ar.mPool = new String(this.mPool); return ar; } /** * Checks if the request is invalid or not. It is invalid if the resource * contact is null or empty or the type is an invalid type. * * * @return boolean true if the request is invalid. */ public boolean requestInvalid() { String c = this.getResourceContact(); //sanity check first if (c == null || c.length() == 0) { return true; } boolean val = true; switch (this.getResourceType()) { case 'g': case 'j': val = false; break; default: val = true; } return val; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/ReplicaStore.java0000644000175000017500000002031611757531137026170 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.Collection; import java.util.Set; import java.util.HashSet; import java.util.List; /** * A Replica Store that allows us to store the entries from a replica catalog. * The store map is indexed by LFN's and values stored are ReplicaLocation * objects. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2687 $ * * @see org.griphyn.common.catalog.ReplicaCatalogEntry */ public class ReplicaStore extends Data implements Cloneable{ /** * The replica store. */ private Map mStore; /** * Default constructor. */ public ReplicaStore(){ mStore = new HashMap(); } /** * Overloaded constructor. * Intializes the member variables to the values passed. * * @param rces map indexed by LFN's and each value is a collection * of replica catalog entries for the LFN. */ public ReplicaStore( Map rces ){ mStore = new HashMap( rces.size() ); store( rces ); } /** * Stores replica catalog entries into the store. It overwrites any * existing entries with the same LFN's. The ReplicaCatlogEntry * ends up being stored as a ReplicaLocation object. * * @param rces map indexed by LFN's and each value is a collection * of replica catalog entries for the LFN. */ public void store( Map rces ){ String lfn; Map.Entry entry; Collection values; //traverse through all the entries and render them into //ReplicaLocation objects before storing in the store for( Iterator it = rces.entrySet().iterator(); it.hasNext(); ){ entry = ( Map.Entry )it.next(); lfn = ( String )entry.getKey(); values = ( Collection )entry.getValue(); //only put in if the values are not empty if( !values.isEmpty() ){ put(lfn, new ReplicaLocation(lfn, values)); } } } /** * Adds ReplicaCatalogEntries into the store. Any existing * mapping of the same LFN and PFN will be replaced, including all its * attributes. The ReplicaCatlogEntry * ends up being stored as a ReplicaLocation object. * * @param rces map indexed by LFN's and each value is a collection * of replica catalog entries for the LFN. */ public void add( Map rces ){ String lfn; Map.Entry entry; Collection values; ReplicaLocation rl; //traverse through all the entries and render them into //ReplicaLocation objects before storing in the store for( Iterator it = rces.entrySet().iterator(); it.hasNext(); ){ entry = ( Map.Entry )it.next(); lfn = ( String )entry.getKey(); values = ( Collection )entry.getValue(); add( lfn, values ); } } /** * Adds replica catalog entries into the store. Any existing * mapping of the same LFN and PFN will be replaced, including all its * attributes. * * @param lfn the lfn. * @param tuples list of ReplicaCatalogEntry containing the PFN and the * attributes. */ public void add( String lfn, Collection tuples ){ //add only if tuples is not empty if( tuples.isEmpty() ){ return; } this.add( new ReplicaLocation( lfn, tuples ) ); } /** * Adds replica catalog entries into the store. Any existing * mapping of the same LFN and PFN will be replaced, including all its * attributes. * * @param rl the ReplicaLocation containing a pfn and all * the attributes. */ public void add( ReplicaLocation rl ){ String lfn = rl.getLFN(); if( this.containsLFN( lfn ) ){ //add to the existing Replica Location ReplicaLocation existing = this.get( lfn ); existing.addPFNs( rl.getPFNList() ); } else{ //store directly in the store. put( lfn, rl ); } } /** * Returns a ReplicaLocation corresponding to the LFN. * * @param lfn the lfn for which the ReplicaLocation is required. * * @return ReplicaLocation if entry exists else null. */ public ReplicaLocation getReplicaLocation( String lfn ){ return get( lfn ); } /** * Returns an iterator to the list of ReplicaLocation * objects stored in the store. * * @return Iterator. */ public Iterator replicaLocationIterator(){ return this.mStore.values().iterator(); } /** * Returns the set of LFN's for which the mappings are stored in the store. * * @return Set */ public Set getLFNs(){ return this.mStore.keySet(); } /** * Returns a Set of lfns for which the mappings are stored in * the store, amongst the Set passed as input. * * @param lfns the collections of lfns * * @return Set */ public Set getLFNs( Set lfns ){ Set s = new HashSet(); String lfn; for( Iterator it = lfns.iterator(); it.hasNext(); ){ lfn = (String)it.next(); if( this.containsLFN( lfn ) ){ s.add( lfn ); } } return s; } /** * Returns a boolean indicating whether a store is empty or not. * * @return boolean */ public boolean isEmpty(){ return this.getLFNCount() == 0; } /** * Returns the number of LFN's for which the mappings are stored in the * store. * * @return int */ public int getLFNCount(){ return this.mStore.size(); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ //clone is not implemented fully. throw new RuntimeException( "Clone not implemented for " + this.getClass().getName() ); // return rc; } /** * Returns the textual description of the data class. * * @return the textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(); for( Iterator it = this.replicaLocationIterator(); it.hasNext(); ){ sb.append( it.next() ); sb.append( "\n" ); } return sb.toString(); } /** * Returns a boolean indicating whether the store has a mapping for a * particular LFN or not. * * @param lfn the logical file name of the file. * * @return boolean */ public boolean containsLFN( String lfn ){ return mStore.containsKey( lfn ); } /** * Inserts entry in the store overwriting any existing entry. * * @param key the key * @param value ReplicaLocation object. * * @return Object */ protected Object put( String key, ReplicaLocation value ){ return mStore.put( key, value ); } /** * Returns an entry corresponding to the LFN * * @param key the LFN * * @return ReplicaLocation object if exists, else null. */ protected ReplicaLocation get( String key ){ Object result = mStore.get( key ); return ( result == null )? null : ( ReplicaLocation )result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/PegasusFile.java0000644000175000017500000004307411757531137026011 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import java.util.BitSet; import edu.isi.pegasus.planner.dax.File; /** * The logical file object that contains the logical filename which is got from * the DAX, and the associated set of flags specifying the transient * characteristics. * It ends up associating the following information with a lfn * -type of the file (data or executable) * -optionality of a file * -transient attributes of a file (dontTransfer and dontRegister) * * @author Gaurang Mehta * @author Karan Vahi * @version $Revision: 4258 $ */ public class PegasusFile extends Data { /** * Enumeration for denoting type of linkage */ public static enum LINKAGE { INPUT, OUTPUT, INOUT, NONE }; /** * The index of the flags field which when set indicates that the file * is to be considered optional. */ public static final int TRANSIENT_OPTIONAL_FLAG = 0; /** * The index of the flags field which when set indicates that the file is * not to be registered in the RLS/ RC. */ public static final int TRANSIENT_REGISTRATION_FLAG = 1; /** * The number of transient flags. This is the length of the BitSet in the * flags fields. */ public static final int NO_OF_TRANSIENT_FLAGS = 2; /** * The mode where the transfer for this file to the pool * is constructed and the transfer job fails if the transfer fails. * The corresponding dT (dontTransfer) value is false. */ public static final int TRANSFER_MANDATORY = 0; /** * The mode where the transfer for this file to the pool is constructed, * but the transfer job should not fail if the transfer fails. * The corresponding dT (dontTransfer) value is optional. */ public static final int TRANSFER_OPTIONAL = 1; /** * The mode where the transfer for this file is not constructed. * The corresponding dT (dontTransfer) value is true. */ public static final int TRANSFER_NOT = 2; /** * The string value of a file that is of type data. * @see #DATA_FILE */ public static final String DATA_TYPE = "data"; /** * The string value of a file that is of type executable. * @see #DATA_FILE */ public static final String EXECUTABLE_TYPE = "executable"; /** * The string value of a file that is of type other. * @see #OTHER_FILE */ public static final String OTHER_TYPE = "other"; /** * The type denoting that a logical file is a data file. */ public static final int DATA_FILE = 0; /** * The type denoting that a logical file is a executable file. */ public static final int EXECUTABLE_FILE = 1; /** * The type denoting that a logical file is an other file. */ public static final int OTHER_FILE = 2; /** * The logical name of the file. */ protected String mLogicalFile; /** * The type associated with the file. It can either be a data file or an * executable file. * * @see #DATA_FILE * @see #EXECUTABLE_FILE * @see #OTHER_FILE */ protected int mType; /** * Linkage of the file. Only used for parsers */ protected LINKAGE mLink; /** * The transfer flag associated with the file containing tristate of * transfer,dontTransfer and optional. * * @see #TRANSFER_MANDATORY * @see #TRANSFER_OPTIONAL * @see #TRANSFER_NOT */ protected int mTransferFlag; /** * The transient flags field which is kept as a bit field. It keeps track * of the dontRegister and optional attributes associated with the filename * in the dax. */ protected BitSet mFlags; /** * The size of the file. */ protected double mSize; /** * The default constructor. */ public PegasusFile() { super(); mFlags = new BitSet(NO_OF_TRANSIENT_FLAGS); mLogicalFile = new String(); //by default the type is DATA //and transfers are mandatory mType = DATA_FILE; mTransferFlag= this.TRANSFER_MANDATORY; mSize = -1; mLink = LINKAGE.NONE; } /** * The overloaded constructor. * * @param logName the logical name of the file. */ public PegasusFile(String logName) { super(); mFlags = new BitSet(NO_OF_TRANSIENT_FLAGS); mLogicalFile = logName; //by default the type is DATA //and transfers are mandatory mType = DATA_FILE; mTransferFlag= this.TRANSFER_MANDATORY; mSize = -1; mLink = LINKAGE.NONE; } /** * Sets the linkage for the file during parsing. * * @param link linkage type */ public void setLinkage( LINKAGE link ){ mLink = link; } /** * Returns the linkage for the file during parsing. * * @return the linkage */ public LINKAGE getLinkage(){ return mLink; } /** * It returns the lfn of the file that is associated with this transfer. * * @return the lfn associated with the transfer */ public String getLFN(){ return this.mLogicalFile; } /** * It sets the logical filename of the file that is being transferred. * * @param lfn the logical name of the file that this transfer is associated * with. */ public void setLFN(String lfn){ mLogicalFile = lfn; } /** * Sets the size for the file. * * @param size the size of the file. */ public void setSize( double size ) { mSize = size; } /** * Sets the size for the file. * * @param size the size of the file. */ public void setSize( String size ) { if( size == null ){ mSize = -1; } else{ mSize = Double.parseDouble( size ); } } /** * Returns the size for the file. Can be -1 if not set. * * @return size if set else -1. */ public double getSize(){ return mSize; } /** * Returns whether the type of file value is valid or not. * * @param type the value for the type of file. * * @return true if the value is in range. * false if the value is not in range. */ public boolean typeValid(int type){ return (type >= PegasusFile.DATA_FILE && type <= PegasusFile.OTHER_FILE ); } /** * Returns whether the transfer value for the mode is in range or not. * * @param transfer the value for the transfer. * * @return true if the value is in range. * false if the value is not in range. */ public boolean transferInRange(int transfer){ return (transfer >= PegasusFile.TRANSFER_MANDATORY && transfer <= PegasusFile.TRANSFER_NOT); } /** * Sets the type flag to value passed. * * @param type valid transfer value. * @exception IllegalArgumentException if the transfer mode is outside * its legal range. * * @see #DATA_FILE * @see #EXECUTABLE_FILE */ public void setType(int type) throws IllegalArgumentException{ if(typeValid(type)){ mType = type; } else{ throw new IllegalArgumentException(); } } /** * Sets the transient transfer flag to value passed. * * @param type valid transfer value. * @exception IllegalArgumentException if the transfer mode is outside * its legal range. * * @see #DATA_FILE * @see #EXECUTABLE_FILE */ public void setType( String type) throws IllegalArgumentException{ if( type == null || type.length() == 0) throw new IllegalArgumentException( "Invalid Type passed " + type ); if( type.equals( PegasusFile.DATA_TYPE )){ mType = PegasusFile.DATA_FILE; } else if( type.equals( PegasusFile.EXECUTABLE_TYPE )){ mType = PegasusFile.EXECUTABLE_FILE; } else if( type.equals( PegasusFile.OTHER_TYPE )){ mType = PegasusFile.OTHER_FILE; } else{ throw new IllegalArgumentException( "Invalid Type passed " + type ); } } /** * Sets the transient transfer flag to value passed. * * @param transfer valid transfer value. * @exception IllegalArgumentException if the transfer mode is outside * its legal range. * * @see #TRANSFER_MANDATORY * @see #TRANSFER_NOT * @see #TRANSFER_OPTIONAL */ public void setTransferFlag(int transfer) throws IllegalArgumentException{ if(this.transferInRange(transfer)){ mTransferFlag = transfer; } else{ throw new IllegalArgumentException(); } } /** * Sets the transient transfer flag corresponding to the string * value of transfer mode passed. The legal range of transfer values is * true|false|optional. * * @param flag tri-state transfer value as got from dontTransfer flag. * * @exception IllegalArgumentException if the transfer mode is outside * its legal range. * * @see #TRANSFER_MANDATORY * @see #TRANSFER_NOT * @see #TRANSFER_OPTIONAL */ public void setTransferFlag( String flag ) throws IllegalArgumentException{ this.setTransferFlag( flag, false ); } /** * Sets the transient transfer flag corresponding to the string * value of transfer mode passed. The legal range of transfer values is * true|false|optional. * * @param flag tri-state transfer value as got from dontTransfer flag. * @param doubleNegative indicates whether a double negative or not. * * @exception IllegalArgumentException if the transfer mode is outside * its legal range. * * @see #TRANSFER_MANDATORY * @see #TRANSFER_NOT * @see #TRANSFER_OPTIONAL */ public void setTransferFlag( String flag, boolean doubleNegative ) throws IllegalArgumentException{ if( flag == null || flag.length() == 0){ //set to default value. //throw new IllegalArgumentException(); mTransferFlag = this.TRANSFER_MANDATORY; return; } if( flag.equals("true") ){ mTransferFlag = (doubleNegative) ? this.TRANSFER_NOT : this.TRANSFER_MANDATORY; } else if( flag.equals("false")){ mTransferFlag = ( doubleNegative ) ? this.TRANSFER_MANDATORY: this.TRANSFER_NOT; } else if( flag.equals("optional")) mTransferFlag = this.TRANSFER_OPTIONAL; else{ throw new IllegalArgumentException( "Invalid transfer value passed " + flag ); } } /** * Returns whether the transfer is transient or not. By transient we mean * no transfer. * * @return true if transfer mode is TRANSFER_NOT * false if transfer mandatory or optional. */ public boolean getTransientTransferFlag(){ return (mTransferFlag == this.TRANSFER_NOT); } /** * Sets the transient registration flag to true. * * @deprecated * @see #setRegisterFlag( boolean ) */ public void setTransientRegFlag(){ mFlags.set(TRANSIENT_REGISTRATION_FLAG); } /** * Sets the transient registration flag to value specified. * * @param value the value to set to */ public void setRegisterFlag( boolean value ){ mFlags.set( TRANSIENT_REGISTRATION_FLAG, !value ); } /** * Sets the optionalflag denoting the file to be optional to true. */ public void setFileOptional(){ mFlags.set(TRANSIENT_OPTIONAL_FLAG); } /** * Returns optionalflag denoting the file to be optional or not. * * @return true denoting the file is optional. * false denoting that file is not optional. */ public boolean fileOptional(){ return mFlags.get(TRANSIENT_OPTIONAL_FLAG); } /** * Returns the tristate transfer mode that is associated with the file. * * @return the int value denoting the type. * * @see #DATA_FILE * @see #EXECUTABLE_FILE * @see #OTHER_FILE */ public int getType(){ return mType; } /** * Returns the tristate transfer mode that is associated with the file. * * @return the int value denoting the tristate. * * @see #TRANSFER_MANDATORY * @see #TRANSFER_NOT * @see #TRANSFER_OPTIONAL */ public int getTransferFlag(){ return mTransferFlag; } /** * Returns the value of the register flag * * @return true denoting the file needs be registered into the replica * catalog. * false denoting that file does not need to be registered. */ public boolean getRegisterFlag(){ return !mFlags.get(TRANSIENT_REGISTRATION_FLAG); } /** * Returns the transient registration flag (the value of dontRegister). * * @return true denoting the file need not be registered into the replica * catalog. * false denoting that file needs to be registered. */ public boolean getTransientRegFlag(){ return mFlags.get(TRANSIENT_REGISTRATION_FLAG); } /** * Returns the bit fields that contain the transient flags (dR and optional). * * * @see #NO_OF_TRANSIENT_FLAGS * @see #TRANSIENT_OPTIONAL_FLAG * @see #TRANSIENT_REGISTRATION_FLAG */ public BitSet getFlags(){ return mFlags; } /** * Checks if an object is similar to the one referred to by this class. * We compare the primary key to determine if it is the same or not. * * @return true if the primary key (lfn,transfer flag,transient flag) match. * else false. */ /* public boolean equals(Object o){ if(o instanceof PegasusFile){ PegasusFile file = (PegasusFile) o; return (file.mLogicalFile.equals(this.mLogicalFile) && (file.getTransientRegFlag() == this.getTransientRegFlag()) && (file.getTransferFlag() == this.getTransferFlag())); } return false; } */ /** * Checks if an object is similar to the one referred to by this class. * We compare the primary key to determine if it is the same or not. * * @return true if the primary key (lfn) matches. * else false. */ public boolean equals( Object o ){ if(o instanceof PegasusFile){ PegasusFile file = (PegasusFile) o; return (file.mLogicalFile.equals(this.mLogicalFile)); } return false; } /** * Calculate a hash code value for the object to support hash tables. * * @return a hash code value for the object. */ public int hashCode() { return this.mLogicalFile.hashCode(); } /** * Returns a copy of the existing data object. * * @return clone of the object. */ public Object clone(){ PegasusFile pf = new PegasusFile(); pf.mLogicalFile = new String(mLogicalFile); pf.mFlags = (BitSet)this.mFlags.clone(); pf.mType = mType; pf.mTransferFlag = mTransferFlag; pf.mSize = mSize; return pf; } /** * Returns the type associated with the logical file. * * @return type of the file. */ public String typeToString(){ String result = null; switch( mType ){ case DATA_FILE: result = DATA_TYPE; break; case EXECUTABLE_FILE: result = EXECUTABLE_TYPE; break; case OTHER_FILE: result = OTHER_TYPE; break; } return result; } /** * Returns the String version of the data object, which is in human readable * form. * * @return the dump of the data object into a string. */ public String toString(){ String st = "\n Logical Name :" + this.mLogicalFile + "\n Type :" + typeToString() + "\n Size :" + mSize + "\n Transient Flags (transfer,optional,dontRegister):" + " ( "; st += getTransferFlag() + ","; for(int i = 0; i < NO_OF_TRANSIENT_FLAGS; i ++) { st += mFlags.get(i) ; if( i < NO_OF_TRANSIENT_FLAGS) st += ","; } st += ")"; return st; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/Data.java0000644000175000017500000000541411757531137024447 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.common.logging.LogManagerFactory; import java.util.Enumeration; import java.util.Iterator; import java.util.Set; import java.util.Vector; import edu.isi.pegasus.common.logging.LogManager; /** * This is the container for all the Data classes. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2572 $ */ public abstract class Data implements Cloneable { /** * The LogManager object which is used to log all the messages. * */ public LogManager mLogger = LogManagerFactory.loadSingletonInstance( ); /** * The String which stores the message to be stored. */ public String mLogMsg; /** * The default constructor. */ public Data(){ mLogMsg = new String(); } /** * Returns the String version of the data object, which is in human readable * form. */ public abstract String toString(); /** * It converts the contents of the Vector to a String and returns it. * For this to work , all the objects making up the vector should be having * a valid toString() method. * * @param heading The heading you want to give * to the text which is printed * * @param vector The Vector whose * elements you want to print */ public String vectorToString(String heading,Vector vector){ Enumeration e = vector.elements(); String st = "\n" + heading; while(e.hasMoreElements()){ st += " " + e.nextElement().toString(); } return st; } /** * A small helper method that displays the contents of a Set in a String. * * @param delim The delimited between the members of the set. * @return String */ public String setToString(Set s, String delim){ Iterator it = s.iterator(); String st = new String(); while(it.hasNext()){ st += (String)it.next() + delim; } st = (st.length() > 0)? st.substring(0,st.lastIndexOf(delim)): st; return st; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/TransferJob.java0000644000175000017500000000610411757531137026012 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; /** * This is a data class that stores the contents of the transfer job that * transfers the data. Later on stdin etc, would be stored in it. * * @author Karan Vahi vahi@isi.edu * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 2590 $ */ public class TransferJob extends Job { /** * The site at which the transfer jobs would have run, had it been running * in a peer 2 peer transfer mode (non third party mode). */ private String mNonTPTSite; /** * The default constructor. */ public TransferJob() { super(); mNonTPTSite = null; } /** * The overloaded construct that constructs a GRMS job by wrapping around * the Job job. * * @param job the original job description. */ public TransferJob(Job job){ super(job); mNonTPTSite = null; } /** * Returns the site at which the job would have run if the transfer job was * being run in non third party mode. If the job is run in a non third party * mode, the result should be the same as the site where the transfer job * has been scheduled. * * @return the site at which the job would have run in a non third party mode, * null if not set. */ public String getNonThirdPartySite(){ return mNonTPTSite; } /** * Sets the non third party site for the transfer job. This is the site * at which the job would have run if the transfer job was being run in * non third party mode. * * @param site the name of the site */ public void setNonThirdPartySite(String site){ mNonTPTSite = site; } /** * Returns a textual description of the Transfer Job. * * @return the textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(super.toString()); sb.append("\n").append(" Non TPT Site :"). append(getNonThirdPartySite()); return sb.toString(); } /** * Returns a new copy of the Object. The implementation is faulty. * There is a shallow copy for the profiles. That is the clone retains * references to the original object. * * @return Object */ public Object clone(){ TransferJob newJob = new TransferJob((Job)super.clone()); newJob.setNonThirdPartySite(this.getNonThirdPartySite()); return newJob; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/Profile.java0000644000175000017500000001722111757531137025175 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.common.util.Escape; import java.util.ArrayList; import java.util.List; /** * This Class hold informations about profiles associated with a tc.

* * @author Gaurang Mehta gmehta@isi.edu * @author Karan Vahi vahi@isi.edu * * @version $Revision: 2572 $ */ public class Profile extends Data { /** * A private static handle to the escape class. */ private static Escape mEscape = new Escape(); public static final String CONDOR = "condor"; public static final String GLOBUS = "globus"; public static final String VDS = "pegasus"; public static final String DAGMAN = "dagman"; public static final String HINTS = "hints"; public static final String ENV = "env"; public static final String STAT = "stat"; public static final String SELECTOR = "selector"; private String mNamespace; private String mKey ; private String mValue; /** * Returns the unknown namespace message. * * @param namespace the namespace. * @return the message */ public static final String unknownNamespaceMessage( String namespace ){ StringBuffer sb = new StringBuffer(); sb.append( "Unknown namespace type " ).append( namespace ). append( " . Valid types are " ).append( validTypesToString() ); return sb.toString(); } /** * Returns a boolean indicating whether the namespace is valid or not. * * @param namespace the namespace * * @return true if valid namespace */ public static boolean namespaceValid( String namespace ){ boolean valid = false; //sanity checks if( namespace == null || namespace.length() < 2){ return valid; } return ( namespace.equalsIgnoreCase( CONDOR ) || namespace.equalsIgnoreCase( GLOBUS ) || namespace.equalsIgnoreCase( VDS ) || namespace.equalsIgnoreCase( DAGMAN ) || namespace.equalsIgnoreCase( HINTS ) || namespace.equalsIgnoreCase( ENV ) || namespace.equalsIgnoreCase( SELECTOR ) || namespace.equalsIgnoreCase( STAT ) ) ; } /** * Returns a comma separated string containing the valid namespace types. * * @return comma separated list. */ public static String validTypesToString( ){ StringBuffer sb = new StringBuffer(); sb.append( CONDOR ).append( ',' ).append( GLOBUS ).append( ',' ). append( VDS ).append( ',' ).append( DAGMAN ).append( ',' ). append( HINTS ).append( ',' ).append( ENV ); return sb.toString(); } /** * * C'tpr for the class; * @throws java.lang.Exception */ public Profile() { mNamespace = null; mKey = null; mValue = null; } /** * This constructor allows to set the namespace , key and value of the PoolProfile. * * @param namespace Takes a String as the namespace. Has to be one of the predefined types. * @param key Takes a String as the key. * @param value The value for the key as String * @throws Exception */ public Profile( String namespace, String key, String value ) { if ( namespaceValid( namespace ) ){ mNamespace = new String( namespace ); mKey = new String( key ); mValue = new String( value ); } else { throw new RuntimeException( unknownNamespaceMessage( namespace ) ); } } /** * This method allows to set the namespace , key and value of the Profile. * * @param namespace Takes a String as the namespace. Has to be one of the predefined types. * @param key Takes a String as the key. * @param value The value for the key as String * * @throws Exception */ public void setProfile( String namespace, String key, String value ) { if ( namespaceValid( namespace ) ){ mNamespace = new String( namespace ); mKey = new String( key ); mValue = new String( value ); } else { throw new RuntimeException( unknownNamespaceMessage( namespace ) ); } } /** * Returns the Profile (namespace, value and key); * @return ArrayList */ public List getProfile() { ArrayList m_profile = new ArrayList( 3 ); m_profile.add( mNamespace ); m_profile.add( mKey ); m_profile.add( mValue ); return m_profile; } /** * Sets the NameSpace of the Profile * * @param namespace the namespace * * @exception in case of invalid namespace */ public void setProfileNamespace( String namespace ) { if( namespaceValid( namespace ) ){ mNamespace = namespace; return; } else { throw new RuntimeException( unknownNamespaceMessage( namespace ) ); } } /** * Returns the NameSpace of the Profile * @return String */ public String getProfileNamespace() { return mNamespace; } /** * Sets the profile key * * @param key the profile key */ public void setProfileKey( String key ) { mKey = key; } /** * Returns the Key of the Profile * @return String */ public String getProfileKey() { return mKey; } /** * Sets the profile value * * @param value the profile value */ public void setProfileValue( String value ) { mValue = value; } /** * Returns the Value for the profile * @return String */ public String getProfileValue() { return mValue; } /** * Returns the textual description of the contents of Profile * object in the multiline format. * * @return the textual description in multiline format. */ public String toMultiLine() { return this.toString(); } /** * This method returns a string of the contents of this object. * The values are always escaped. * * @return String * @see org.griphyn.common.util.Escape */ public String toString() { String output = "profile " + mNamespace + " \"" + mKey + "\" \"" + mEscape.escape(mValue) + "\""; // System.out.println(output); return output; } /** * This method returns an xml of the contents of this object. * @return String. */ public String toXML() { String output = "" + mValue + ""; // System.out.println(output); return output; } /** * Returns a copy of the object. * * @return copy of the object. */ public Object clone() { Profile newprofile=null; try { newprofile = new Profile( mNamespace, mKey, mValue ); } catch ( Exception e ) { } return newprofile; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/CompoundTransformation.java0000644000175000017500000001342511757531137030312 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.dax.Invoke; import java.util.Collection; import java.util.List; import java.util.LinkedList; /** * * A data class to contain compound transformations. * * @author Karan Vahi * @version $Revision: 3664 $ */ public class CompoundTransformation { /** * The namespace of the compound transformation. */ protected String mNamespace; /** * The name of the tranformation. */ protected String mName; /** * The version */ protected String mVersion; /** * The list of dependant executables */ protected List mUses; /** * All the notifications associated with the job */ protected Notifications mNotifications; /** * Constructor * * @param name of transformation */ public CompoundTransformation(String name) { this("", name, ""); } /** * Overloaded Constructor * * @param namespace namespace * @param name name * @param version version */ public CompoundTransformation(String namespace, String name, String version) { mNamespace = (namespace == null) ? "" : namespace; mName = (name == null) ? "" : name; mVersion = (version == null) ? "" : version; mUses = new LinkedList(); mNotifications = new Notifications(); } /** * Returns name of compound transformation. * * @return name */ public String getName() { return mName; } /** * Returns the namespace * * @return namespace */ public String getNamespace() { return mNamespace; } /** * Returns the version * * @return version */ public String getVersion() { return mVersion; } /** * Adds a dependant file. * * @param pf */ public void addDependantFile( PegasusFile pf ){ this.mUses.add( pf ); } /** * Returns the List of dependant files * * @return List of Dependant Files */ public List getDependantFiles() { return this.mUses; } /** * Adds a Invoke object correpsonding to a notification. * * @param invoke the invoke object containing the notification */ public void addNotification( Invoke invoke ){ this.mNotifications.add(invoke); } /** * Adds all the notifications passed to the underlying container. * * @param invokes the notifications to be added */ public void addNotifications( Notifications invokes ){ this.mNotifications.addAll(invokes); } /** * Returns a collection of all the notifications that need to be * done for a particular condition * * @param when the condition * * @return */ public Collection getNotifications( Invoke.WHEN when ){ return this.mNotifications.getNotifications(when); } /** * Returns all the notifications associated with the job. * * @return the notifications */ public Notifications getNotifications( ){ return this.mNotifications; } /** * Returns whether two objects are equal or not on the basis of the * complete name of the transformation. * * @param obj the reference object with which to compare. * * @return true, if the primary keys match, false otherwise. */ public boolean equals( Object obj ){ // ward against null if ( obj == null ) return false; // shortcut if ( obj == this ) return true; // compare similar objects only if ( ! (obj instanceof CompoundTransformation) ) return false; // now we can safely cast CompoundTransformation c = (CompoundTransformation) obj; return this.getCompleteName().equals( c.getCompleteName() ); } /** * Calculate a hash code value for the object to support hash tables. * The hashcode value is computed only on basis of namespace, name and version * fields * * @return a hash code value for the object. */ public int hashCode(){ return this.getCompleteName().hashCode(); } /** * Returns the complete name for the transformation. * * @return the complete name */ public String getCompleteName(){ return Separator.combine(mNamespace, mName, mVersion); } /** * Converts object to String * * @return the textual description */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( "Transformation -> " ).append( this.getCompleteName() ).append( "\n" ); for( PegasusFile pf : this.getDependantFiles() ){ sb.append( "\t " ); sb.append( pf.getType() == PegasusFile.DATA_FILE ? "data" : "executable" ). append( " -> ").append( pf ). append( "\n" ); } sb.append( "Notifications -> " ).append( "\n" ). append( this.getNotifications() ); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/Notifications.java0000644000175000017500000001151211757531137026403 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.planner.dax.Invoke; import edu.isi.pegasus.planner.dax.Invoke.WHEN; import java.util.Collection; import java.util.List; import java.util.EnumMap; import java.util.LinkedList; /** * A container class that stores all the notifications that need to be done * indexed by the various conditions. * * @author Karan Vahi * @version $Revision: 3833 $ */ public class Notifications extends Data{ /** * An enum map that associates the various notification events with the list * of actions that need to be taken. */ private EnumMap> mInvokeMap; /** * The default constructor. */ public Notifications() { reset(); } /** * Resets the internal invoke map. */ public void reset() { mInvokeMap = new EnumMap>( Invoke.WHEN.class ); Invoke.WHEN[] values = Invoke.WHEN.values(); for ( int i = 0; i < values.length; i++ ){ mInvokeMap.put( values[i], new LinkedList() ); } } /** * Adds a Invoke object correpsonding to a notification. * * @param notification the notification object */ public void add( Invoke notification ){ if(notification == null){ return; // do nothing } //retrieve the appropriate namespace and then add List l = ( List )mInvokeMap.get( Invoke.WHEN.valueOf( notification.getWhen() )); l.add( notification); } /** * Adds all the notifications passed to the underlying container. * * @param notifications the notification object */ public void addAll( Notifications notifications ){ if(notifications == null){ return; // do nothing } for( Invoke.WHEN when : Invoke.WHEN.values() ){ this.addAll( when, notifications.getNotifications(when) ); } } /** * Returns a collection of all the notifications that need to be * done for a particular condition * * @param when the condition * * @return */ public Collection getNotifications( Invoke.WHEN when ){ return this.mInvokeMap.get(when); } /** * Returns a boolean indicating whether the notifications object is empty or not. * * @return true if empty else false */ public boolean isEmpty(){ Invoke.WHEN[] values = Invoke.WHEN.values(); for ( int i = 0; i < values.length; i++ ){ if(!mInvokeMap.get( values[i]).isEmpty()) return false; } return true; } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ Notifications inv; try { inv = (Notifications)super.clone(); } catch (CloneNotSupportedException e) { //somewhere in the hierarch chain clone is not implemented throw new RuntimeException( "Clone not implemented in the base class of " + this.getClass().getName(), e); } //traverse through all the enum keys for ( Invoke.WHEN when : Invoke.WHEN.values() ){ Collection c = this.getNotifications( when ); inv.addAll(when, c); } return inv; } /** * Returns a String description of the object * * @return */ public String toString() { StringBuffer sb = new StringBuffer(); for( Invoke.WHEN when : Invoke.WHEN.values() ){ Collection c = this.getNotifications(when); for( Invoke invoke : c ){ sb.append( invoke.toString() ); } } return sb.toString(); } /** * Convenience method at add all the notifications corresponding * to a particular event * * @param when when does the event happen * @param notifications the list of notificiations */ private void addAll(WHEN when, Collection invokes) { Collection c = this.mInvokeMap.get(when); c.addAll( invokes ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/DagInfo.java0000644000175000017500000004666311757531137025120 0ustar ryngerynge /** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.common.util.Version; import java.io.File; import java.util.Enumeration; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.HashSet; import java.util.TreeMap; import java.util.Vector; /** * Holds the information needed to make one dag file corresponding to a Abstract * Dag. It holds information to generate the .dax file which is submitted to * Condor. * * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4003 $ */ public class DagInfo extends Data { /** * The default name for the ADag object, if not supplied in the DAX. */ private static final String DEFAULT_NAME = "PegasusRun"; /** * Vector of String objects containing the jobname_id of jobs making * the abstract dag. */ public Vector dagJobs; /** * Captures the parent child relations making up the DAG. It is a Vector of * PCRelation objects. */ public Vector relations; /** * The name of the Abstract Dag taken from the adag element of the DAX * generated by the Abstract Planner. */ public String nameOfADag; /** * Refers to the number of the Abstract Dags which are being sent to the * Concrete Planner in response to the user's request. */ public String count; /** * Refers to the number of the Dag. Index can vary from 0 to count - 1. */ public String index; /** * It is a unique identifier identifying the concrete DAG generated by Pegasus. * It consists of the dag name and the timestamp. * * @see #flowIDName * @see #mFlowTimestamp */ public String flowID; /** * It is the name of the dag as generated by Chimera in the dax. If none is * specified then a default name of PegasusRun is assigned. */ public String flowIDName; /** * The ISO timestamp corresponding to the time when Pegasus is invoked for a * dax. It is used to generate the random directory names also if required. */ private String mFlowTimestamp; /** * Keeps the last modified time of the DAX. */ private String mDAXMTime; /** * Identifies the release version of the VDS software that was * used to generate the workflow. It is populated from Version.java. * * @see org.griphyn.common.util.Version */ public String releaseVersion; /** * The workflow metric objects that contains metrics about the workflow being * planned. */ private WorkflowMetrics mWFMetrics; /** * Contains a unique ordered listing of the logical names referred * to by the dag. The TreeMap implementation guarentees us a log(n) execution * time for the basic operations. Hence should scale well. The key for the * map is the lfn name. The value is a String flag denoting whether this * file is an input(i) or output(o) or both (b) or none(n). A value of * none(n) would denote an error condition. */ public TreeMap lfnMap; /** * The DAX Version */ private String mDAXVersion; //for scripts later /** * The default constructor. */ public DagInfo() { dagJobs = new Vector(); relations = new Vector(); nameOfADag = new String(); count = new String(); index = new String(); flowID = new String(); flowIDName = new String(); mFlowTimestamp = new String(); mDAXMTime = new String(); releaseVersion = new String(); mDAXVersion = new String(); lfnMap = new TreeMap(); mWFMetrics = new WorkflowMetrics(); } /** * Adds a new job to the dag. * * @param job the job to be added */ public void addNewJob( Job job ) { dagJobs.add( job.getID() ); //increment the various metrics mWFMetrics.increment( job ); } /** * Adds a new PCRelation pair to the Vector of PCRelation * pairs. Since we are adding a new relation the isDeleted parameter should * be false * * @param relation the relation to be added */ public void addNewRelation(PCRelation relation) { relations.addElement( relation ); } /** * Adds a new PCRelation pair to the Vector of PCRelation * pairs. Since we are adding a new relation the isDeleted parameter should * be false. * * @param parent The parent in the relation pair * @param child The child in the relation pair * * @see #relations */ public void addNewRelation(String parent, String child) { PCRelation newRelation = new PCRelation(parent, child); relations.addElement(newRelation); } /** * Adds a new PCRelation pair to the Vector of PCRelation pairs. * * @param parent The parent in the relation pair * @param child The child in the relation pair * @param isDeleted Whether the relation has been deleted due to the * reduction algorithm or not * * @see #relations */ public void addNewRelation(String parent, String child, boolean isDeleted) { PCRelation newRelation = new PCRelation(parent, child, isDeleted); relations.addElement(newRelation); } /** * Removes a job from the dag/graph structure. It however does not * delete the relations the edges that refer to the job. * * @param job the job to be removed * * @return boolean indicating whether removal was successful or not. */ public boolean remove( Job job ){ mWFMetrics.decrement( job ); return dagJobs.remove( job.getID() ); } /** * It returns the list of lfns referred to by the DAG. The list is unique * as it is gotten from iterating through the lfnMap. * * @return a Set of String objects corresponding to the * logical filenames */ public Set getLFNs(){ return this.getLFNs( false ); } /** * Returns the list of lfns referred to by the DAG. The list is unique * as it is gotten from iterating through the lfnMap. The contents of the list * are determined on the basis of the command line options passed by the user * at runtime. For e.g. if the user has specified force, then one needs to * search only for the input files. * * @param onlyInput a boolean flag indicating that you need only the input * files to the whole workflow * * @return a set of logical filenames. */ public Set getLFNs( boolean onlyInput ) { Set lfns = onlyInput ? new HashSet( lfnMap.size()/3 ): new HashSet( lfnMap.size() ); String key = null; String val = null; //if the force option is set we //need to search only for the //input files in the dag i.e //whose link is set to input in //the dag. if ( onlyInput ){ for (Iterator it = lfnMap.keySet().iterator(); it.hasNext(); ) { key = (String) it.next(); val = (String) lfnMap.get(key); if ( val.equals( "i" ) ) { lfns.add( key ); } } } else { lfns=new HashSet( lfnMap.keySet() ); } return lfns; } /** * Returns the label of the workflow, that was specified in the DAX. * * @return the label of the workflow. */ public String getLabel(){ return (nameOfADag == null)? this.DEFAULT_NAME: nameOfADag; } /** * Returns the dax version * * @return teh dax version. */ public String getDAXVersion( ) { return this.mDAXVersion; } /** * Returns the last modified time for the file containing the workflow * description. * * @return the MTime */ public String getMTime(){ return mDAXMTime; } /** * Returns the flow timestamp for the workflow. * * @return the flowtimestamp */ public String getFlowTimestamp(){ return mFlowTimestamp; } /** * Sets the flow timestamp for the workflow. * * @param timestamp the flowtimestamp */ public void setFlowTimestamp( String timestamp ){ mFlowTimestamp = timestamp; } /** * Returns the number of jobs in the dag on the basis of number of elements * in the dagJobs Vector. * * @return the number of the jobs. */ public int getNoOfJobs() { return dagJobs.size(); } /** * Gets all the parents of a particular node. * * @param node the name of the job whose parents are to be found. * * @return Vector corresponding to the parents of the node. */ public Vector getParents(String node) { //getting the parents of that node Enumeration ePcRel = this.relations.elements(); Vector vParents = new Vector(); PCRelation currentRelPair; while (ePcRel.hasMoreElements()) { currentRelPair = (PCRelation) ePcRel.nextElement(); if (currentRelPair.child.trim().equalsIgnoreCase(node)) { vParents.addElement(new String(currentRelPair.parent)); } } return vParents; } /** * Get all the children of a particular node. * * @param node the name of the node whose children we want to find. * * @return Vector containing the children of the node. */ public Vector getChildren(String node) { Enumeration ePcRel = this.relations.elements(); Vector vChildren = new Vector(); PCRelation currentRelPair; while (ePcRel.hasMoreElements()) { currentRelPair = (PCRelation) ePcRel.nextElement(); if (currentRelPair.parent.trim().equalsIgnoreCase(node)) { vChildren.addElement(new String(currentRelPair.child)); } } return vChildren; } /** * This returns all the leaf nodes of the dag. The way the structure of Dag * is specified in terms of the parent child relationship pairs, the * determination of the leaf nodes can be computationally intensive. The * complexity if of order n^2. * * @return Vector of String corresponding to the job names of * the leaf nodes. * * @see org.griphyn.cPlanner.classes.PCRelation * @see org.griphyn.cPlanner.classes.DagInfo#relations */ public Vector getLeafNodes() { Vector leafJobs = new Vector(); Vector vJobs = this.dagJobs; Vector vRelations = this.relations; Enumeration eRel; String job; PCRelation pcRel; boolean isLeaf = false; //search for all the jobs which are Roots i.e are not child in relation Enumeration e = vJobs.elements(); while (e.hasMoreElements()) { //traverse through all the relations job = (String) e.nextElement(); eRel = vRelations.elements(); isLeaf = true; while (eRel.hasMoreElements()) { pcRel = (PCRelation) eRel.nextElement(); if (pcRel.parent.equalsIgnoreCase(job)) { //means not a Child job isLeaf = false; break; } } //adding if leaf to vector if (isLeaf) { mLogger.log("Leaf job is " + job, LogManager.DEBUG_MESSAGE_LEVEL); leafJobs.addElement(new String(job)); } } return leafJobs; } /** * It determines the root Nodes for the ADag looking at the relation pairs * of the adag. The way the structure of Dag is specified in terms * of the parent child relationship pairs, the determination of the leaf * nodes can be computationally intensive. The complexity if of * order n^2. * * * @return the root jobs of the Adag * * @see org.griphyn.cPlanner.classes.PCRelation * @see org.griphyn.cPlanner.classes.DagInfo#relations * */ public Vector getRootNodes() { Vector rootJobs = new Vector(); Vector vJobs = this.dagJobs; Vector vRelations = this.relations; Enumeration eRel; String job; PCRelation pcRel; boolean isRoot = false; //search for all the jobs which are Roots //i.e are not child in relation Enumeration e = vJobs.elements(); while (e.hasMoreElements()) { //traverse through all the relations job = (String) e.nextElement(); eRel = vRelations.elements(); isRoot = true; while (eRel.hasMoreElements()) { pcRel = (PCRelation) eRel.nextElement(); if (pcRel.child.equalsIgnoreCase(job)) { //means not a Root job isRoot = false; break; } } //adding if Root to vector if (isRoot) { mLogger.log("Root job is " + job, LogManager.DEBUG_MESSAGE_LEVEL); rootJobs.addElement(new String(job)); } } return rootJobs; } /** * Returns the workflow metrics so far. * * @return the workflow metrics */ public WorkflowMetrics getWorkflowMetrics(){ return this.mWFMetrics; } /** * Generates the flow id for this current run. It is made of the name of the * dag and a timestamp. This is a simple concat of the mFlowTimestamp and the * flowName. For it work correctly the function needs to be called after the * flow name and timestamp have been generated. */ public void generateFlowID() { StringBuffer sb = new StringBuffer(40); sb.append(flowIDName).append("-").append(mFlowTimestamp); flowID = sb.toString(); } /** * Generates the name of the flow. It is same as the nameOfADag if specified * in dax generated by Chimera. */ public void generateFlowName(){ StringBuffer sb = new StringBuffer(); if (nameOfADag != null) sb.append(nameOfADag); else sb.append(this.DEFAULT_NAME); //append the count. that is important for deffered planning sb.append("-").append(index); flowIDName = sb.toString(); } /** * Sets the label for the workflow. * * @param label the label to be assigned to the workflow */ public void setLabel(String label){ this.nameOfADag = label; mWFMetrics.setLabel( label ); } /** * Sets the dax version * @param version the version of the DAX */ public void setDAXVersion( String version ) { mDAXVersion = version; } /** * Sets the mtime (last modified time) for the DAX. It is the time, when * the DAX file was last modified. If the DAX file does not exist or an * IO error occurs, the MTime is set to OL i.e . The DAX mTime is always * generated in an extended format. Generating not in extended format, leads * to the XML parser tripping while parsing the invocation record generated * by Kickstart. * * @param f the file descriptor to the DAX|PDAX file. */ public void setDAXMTime( File f ){ long time = f.lastModified(); mDAXMTime = Currently.iso8601(false,true,false, new java.util.Date(time)); } /** * Sets the mtime (last modified time) for the DAX. It is the time, when * the DAX file was last modified. If the DAX file does not exist or an * IO error occurs, the MTime is set to OL i.e . The DAX mTime is always * generated in an extended format. Generating not in extended format, leads * to the XML parser tripping while parsing the invocation record generated * by Kickstart. * * @param time iso formatted time string indicating the last modified time * of DAX */ public void setDAXMTime( String time ){ mDAXMTime = time; } /** * Grabs the release version from VDS.Properties file. * * @see org.griphyn.common.util.Version */ public void setReleaseVersion() { this.releaseVersion = Version.instance().toString(); } /** * Updates the lfn map, that contains the mapping of an lfn with the type. * * @param lfn the logical file name. * @param type type the type of lfn (i|o|b). usually a character. */ public void updateLFNMap(String lfn,String type){ Object entry = lfnMap.get(lfn); if(entry == null){ lfnMap.put(lfn,type); return; } else{ //there is a preexisting entry in the map, check if it needs to be //updated if(!(entry.equals("b") || entry.equals(type))){ //types do not match. so upgrade the type to both lfnMap.put(lfn,"b"); } } } /** * Returns a new copy of the Object. * * @return a copy of the object. */ public Object clone() { DagInfo dag = new DagInfo(); dag.dagJobs = (Vector)this.dagJobs.clone(); dag.relations = (Vector)this.relations.clone(); dag.nameOfADag = new String(this.nameOfADag); dag.count = new String(this.count); dag.index = new String(this.index); dag.mDAXVersion = this.mDAXVersion; dag.flowID = new String(this.flowID); dag.flowIDName = new String(this.flowIDName); dag.mFlowTimestamp = new String(this.mFlowTimestamp); dag.mDAXMTime = new String(this.mDAXMTime); dag.releaseVersion = new String(this.releaseVersion); dag.lfnMap = (TreeMap)this.lfnMap.clone(); dag.mWFMetrics = ( WorkflowMetrics )this.mWFMetrics.clone(); return dag; } /** * Returns the a textual description of the object. * * @return textual description. */ public String toString() { String st = "\n " + "\n Name of ADag : " + this.nameOfADag + "\n Index : " + this.index + " Count :" + this.count + "\n DAX Version : " + this.mDAXVersion + //"\n FlowId : " + this.flowID + "\n FlowName : " + this.flowIDName + "\n FlowTimestamp: " + this.mFlowTimestamp + "\n Release Ver : " + this.releaseVersion + vectorToString(" Relations making the Dag ", this.relations) + "\n LFN List is " + this.lfnMap; return st; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/ADag.java0000644000175000017500000005145411757531137024377 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationStore; import edu.isi.pegasus.planner.dax.Invoke; import java.util.Enumeration; import java.util.Iterator; import java.util.Set; import java.util.TreeSet; import java.util.Vector; import java.io.Writer; import java.io.StringWriter; import java.io.IOException; import java.util.Collection; import java.util.UUID; /** * This class object contains the info about a Dag. * DagInfo object contains the information to create the .dax file. * vJobSubInfos is a Vector containing Job objects of jobs making * the Dag. * Each subinfo object contains information needed to generate a submit * file for that job. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 3970 $ * * @see DagInfo * @see Job */ public class ADag extends Data { /** * The DagInfo object which contains the information got from parsing the * dax file. */ public DagInfo dagInfo; /** * Vector of Job objects. Each Job object contains * information corresponding to the submit file for one job. */ public Vector vJobSubInfos; /** * The root of the submit directory hierarchy for the DAG. This is the * directory where generally the DAG related files like the log files, * .dag and dagman output files reside. */ private String mSubmitDirectory; /** * The optional request ID associated with the DAX. */ private String mRequestID; /** * Handle to the replica store that stores the replica catalog * user specifies in the DAX */ protected ReplicaStore mReplicaStore; /** * Handle to the transformation store that stores the transformation catalog * user specifies in the DAX */ protected TransformationStore mTransformationStore; /** * The Root Workflow UUID. */ protected String mRootWorkflowUUID; /** * The UUID associated with the workflow. */ protected String mWorkflowUUID; /** * Boolean indicating whether the refinement process on the workflow has * started or not. */ protected boolean mWorkflowRefinementStarted; /** * All the notifications associated with the job */ protected Notifications mNotifications; /** * Initialises the class member variables. */ public ADag() { dagInfo = new DagInfo(); vJobSubInfos = new Vector(); mSubmitDirectory = "."; mWorkflowUUID = generateWorkflowUUID(); mRootWorkflowUUID = null; mWorkflowRefinementStarted = false; mNotifications = new Notifications(); resetStores(); } /** * Overloaded constructor. * * @param dg the DagInfo * @param vSubs the jobs in the workflow. */ public ADag (DagInfo dg, Vector vSubs){ this.dagInfo = (DagInfo)dg.clone(); this.vJobSubInfos = (Vector)vSubs.clone(); mSubmitDirectory = "."; mWorkflowUUID = generateWorkflowUUID(); mRootWorkflowUUID = null; mWorkflowRefinementStarted = false; mNotifications = new Notifications(); resetStores(); } /** * Adds a Invoke object correpsonding to a notification. * * @param invoke the invoke object containing the notification */ public void addNotification( Invoke invoke ){ this.mNotifications.add(invoke); } /** * Adds all the notifications passed to the underlying container. * * @param invokes the notifications to be added */ public void addNotifications( Notifications invokes ){ this.mNotifications.addAll(invokes); } /** * Returns a collection of all the notifications that need to be * done for a particular condition * * @param when the condition * * @return */ public Collection getNotifications( Invoke.WHEN when ){ return this.mNotifications.getNotifications(when); } /** * Returns all the notifications associated with the job. * * @return the notifications */ public Notifications getNotifications( ){ return this.mNotifications; } /** * Resets the replica and transformation stores; */ public void resetStores( ){ this.mReplicaStore = new ReplicaStore(); this.mTransformationStore = new TransformationStore(); } /** * Returns a new copy of the Object. * * @return the clone of the object. */ public Object clone(){ ADag newAdag = new ADag(); newAdag.dagInfo = (DagInfo)this.dagInfo.clone(); newAdag.vJobSubInfos= (Vector)this.vJobSubInfos.clone(); newAdag.setBaseSubmitDirectory( this.mSubmitDirectory ); newAdag.setRequestID( this.mRequestID ); newAdag.setRootWorkflowUUID( this.getRootWorkflowUUID() ); newAdag.setWorkflowRefinementStarted( this.mWorkflowRefinementStarted ); //the stores are not a true clone newAdag.setReplicaStore(mReplicaStore); newAdag.setTransformationStore(mTransformationStore); newAdag.setWorkflowUUID( this.getWorkflowUUID() ); newAdag.addNotifications( this.getNotifications() ); return newAdag; } /** * Returns the UUID for the Root workflow * * @return the UUID of the workflow */ public String getRootWorkflowUUID() { return this.mRootWorkflowUUID; } /** * Sets the root UUID for the workflow * * @param uuid the UUID of the workflow */ public void setRootWorkflowUUID( String uuid ) { this.mRootWorkflowUUID = uuid; } /** * Returns the UUID for the workflow * * @return the UUID of the workflow */ public String getWorkflowUUID() { return this.mWorkflowUUID; } /** * Sets the UUID for the workflow * * @param uuid the UUID of the workflow */ public void setWorkflowUUID( String uuid ) { this.mWorkflowUUID = uuid; } /** * Generates the UUID for the workflow * * @return the UUID of the workflow */ protected String generateWorkflowUUID() { return UUID.randomUUID().toString(); } /** * Returns a boolean indicating whether the workflow refinement has started * or not * * @return boolean */ public boolean hasWorkflowRefinementStarted(){ return this.mWorkflowRefinementStarted; } /** * Sets whether the workflow refinement has started or not * * @param state the boolean value */ public void setWorkflowRefinementStarted( boolean state ){ this.mWorkflowRefinementStarted = state; } /** * Returns the String description of the dag associated with this object. * * @return textual description. */ public String toString(){ String st = "\n Submit Directory " + this.mSubmitDirectory + "\n Root Workflow UUID " + this.getRootWorkflowUUID() + "\n Workflow UUID " + this.getWorkflowUUID() + "\n Workflow Refinement Started " + this.hasWorkflowRefinementStarted() + "\n" + this.dagInfo.toString() + vectorToString("\n Jobs making the DAG ",this.vJobSubInfos); return st; } /** * This adds a new job to the ADAG object. It ends up adding both the job name * and the job description to the internal structure. * * @param job the new job that is to be added to the ADag. */ public void add(Job job){ //add to the dagInfo dagInfo.addNewJob(job ); vJobSubInfos.addElement(job); } /** * Removes all the jobs from the workflow, and all the edges between * the workflows. The only thing that remains is the meta data about the * workflow. * * */ public void clearJobs(){ vJobSubInfos.clear(); dagInfo.dagJobs.clear(); dagInfo.relations.clear(); dagInfo.lfnMap.clear(); //reset the workflow metrics also this.getWorkflowMetrics().reset(); } /** * Returns whether the workflow is empty or not. * @return boolean */ public boolean isEmpty(){ return vJobSubInfos.isEmpty(); } /** * Removes a particular job from the workflow. It however does not * delete the relations the edges that refer to the job. * * @param job the Job object containing the job description. * * @return boolean indicating whether the removal was successful or not. */ public boolean remove(Job job){ boolean a = dagInfo.remove( job ); boolean b = vJobSubInfos.remove(job); return a && b; } /** * Returns the number of jobs in the dag on the basis of number of elements * in the dagJobs Vector. * * @return the number of jobs. */ public int getNoOfJobs(){ return this.dagInfo.getNoOfJobs(); } /** * Sets the request id. * * @param id the request id. */ public void setRequestID( String id ){ mRequestID = id; } /** * Returns the request id. * * @return the request id. */ public String getRequestID( ){ return mRequestID; } /** * Returns the workflow id * @return the abstract workflow id */ public String getAbstractWorkflowName(){ StringBuffer id = new StringBuffer(); id.append( this.dagInfo.getLabel() ).append( "_" ).append( this.dagInfo.index ); return id.toString(); } /** * Returns the workflow id * @return the executable workflow id */ public String getExecutableWorkflowName(){ StringBuffer id = new StringBuffer(); id.append( this.dagInfo.getLabel() ).append( "_" ).append( this.dagInfo.index ). append( "." ).append( "dag"); return id.toString(); } /** * Adds a new PCRelation pair to the Vector of PCRelation * pairs. For the new relation the isDeleted parameter is set to false. * * @param parent The parent in the relation pair * @param child The child in the relation pair * * @see org.griphyn.cPlanner.classes.PCRelation */ public void addNewRelation(String parent, String child){ PCRelation newRelation = new PCRelation(parent,child); this.dagInfo.relations.addElement(newRelation); } /** * Adds a new PCRelation pair to the Vector of PCRelation * pairs. * * @param parent The parent in the relation pair * @param child The child in the relation pair * @param isDeleted Whether the relation has been deleted due to the reduction * algorithm or not. * * @see org.griphyn.cPlanner.classes.PCRelation */ public void addNewRelation(String parent, String child, boolean isDeleted){ PCRelation newRelation = new PCRelation(parent,child,isDeleted); this.dagInfo.relations.addElement(newRelation); } /** * Sets the submit directory for the workflow. * * @param dir the submit directory. */ public void setBaseSubmitDirectory(String dir){ this.mSubmitDirectory = dir; } /** * Returns the label of the workflow, that was specified in the DAX. * * @return the label of the workflow. */ public String getLabel(){ return this.dagInfo.getLabel(); } /** * Returns the dax version * * @return teh dax version. */ public String getDAXVersion( ) { return this.dagInfo.getDAXVersion(); } /** * Returns the last modified time for the file containing the workflow * description. * * @return the MTime */ public String getMTime(){ return this.dagInfo.getMTime(); } /** * Returns the root of submit directory hierarchy for the workflow. * * @return the directory. */ public String getBaseSubmitDirectory(){ return this.mSubmitDirectory; } /** * Gets all the parents of a particular node * * @param node the name of the job whose parents are to be found. * * @return Vector corresponding to the parents of the node */ public Vector getParents(String node){ return this.dagInfo.getParents(node); } /** * Get all the children of a particular node. * * @param node the name of the node whose children we want to find. * * @return Vector containing the * children of the node * */ public Vector getChildren(String node){ return this.dagInfo.getChildren(node); } /** * Returns all the leaf nodes of the dag. The way the structure of Dag is * specified, in terms of the parent child relationship pairs, the * determination of the leaf nodes can be computationally intensive. The * complexity is of order n^2 * * @return Vector of String corresponding to the job names of * the leaf nodes. * * @see org.griphyn.cPlanner.classes.PCRelation * @see org.griphyn.cPlanner.classes.DagInfo#relations */ public Vector getLeafNodes(){ return this.dagInfo.getLeafNodes(); } /** * It returns the a unique list of the execution sites that the Planner * has mapped the dax to so far in it's stage of planning . This is a * subset of the pools specified by the user at runtime. * * @return a TreeSet containing a list of siteID's of the sites where the * dag has to be run. */ public Set getExecutionSites(){ Set set = new TreeSet(); Job sub = null; for(Iterator it = this.vJobSubInfos.iterator();it.hasNext();){ sub = (Job)it.next(); set.add(sub.executionPool); } //remove the stork pool set.remove("stork"); return set; } /** * Sets the Replica Store * * @param store the Replica Store */ public void setReplicaStore( ReplicaStore store ){ this.mReplicaStore = store; } /** * Returns the Replica Store * * @return the Replica Store */ public ReplicaStore getReplicaStore( ){ return this.mReplicaStore; } /** * Sets the Transformation Store * * @param store the Transformation Store */ public void setTransformationStore( TransformationStore store ){ this.mTransformationStore = store; } /** * Returns the Transformation Store * * @return the Replica Store */ public TransformationStore getTransformationStore( ){ return this.mTransformationStore; } /** * It determines the root Nodes for the ADag looking at the relation pairs * of the adag. The way the structure of Dag is specified in terms * of the parent child relationship pairs, the determination of the leaf * nodes can be computationally intensive. The complexity if of order n^2. * * * @return the root jobs of the Adag * * @see org.griphyn.cPlanner.classes.PCRelation * @see org.griphyn.cPlanner.classes.DagInfo#relations */ public Vector getRootNodes(){ return this.dagInfo.getRootNodes(); } /** * Returns an iterator for traversing through the jobs in the workflow. * * @return Iterator */ public Iterator jobIterator(){ return this.vJobSubInfos.iterator(); } /** * This returns a Job object corresponding to the job by looking through * all the subInfos. * * *@param job jobName of the job for which we need the subInfo object. * *@return the Job objects corresponding to the job */ public Job getSubInfo(String job){ Job sub = null; //System.out.println("Job being considered is " + job); for ( Enumeration e = this.vJobSubInfos.elements(); e.hasMoreElements(); ){ sub = (Job)e.nextElement(); if(job.equalsIgnoreCase(sub.jobName)){ return sub; } } throw new RuntimeException("Can't find the sub info object for job " + job); } /** * Returns the metrics about the workflow. * * @return the WorkflowMetrics */ public WorkflowMetrics getWorkflowMetrics(){ return this.dagInfo.getWorkflowMetrics(); } /** * Returns the DOT description of the object. This is used for visualizing * the workflow. * * @return String containing the Partition object in XML. * * @exception IOException if something fishy happens to the stream. */ public String toDOT() throws IOException{ Writer writer = new StringWriter(32); toDOT( writer, "" ); return writer.toString(); } /** * Returns the DOT description of the object. This is used for visualizing * the workflow. * * @param stream is a stream opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty * string. The parameter is used internally for the recursive * traversal. * * * @exception IOException if something fishy happens to the stream. */ public void toDOT( Writer stream, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = (indent == null ) ? "\t" : indent + "\t"; //write out the dot header writeDOTHeader( stream, null ); //traverse through the jobs for( Iterator it = jobIterator(); it.hasNext(); ){ ( (Job)it.next() ).toDOT( stream, newIndent ); } stream.write( newLine ); //traverse through the edges for( Iterator it = dagInfo.relations.iterator(); it.hasNext(); ){ ( (PCRelation)it.next() ).toDOT( stream, newIndent ); } //write out the tail stream.write( "}" ); stream.write( newLine ); } /** * Writes out the static DOT Header. * * @param stream is a stream opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty * string. The parameter is used internally for the recursive * traversal. * * * @exception IOException if something fishy happens to the stream. */ public void writeDOTHeader( Writer stream, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = ( indent == null ) ? null : indent + "\t"; //write out the header and static stuff for now if ( indent != null && indent.length() > 0 ) {stream.write( indent ) ;} stream.write( "digraph E {"); stream.write( newLine ); //write out the size of the image if ( newIndent != null && newIndent.length() > 0 ) { stream.write( newIndent );} stream.write( "size=\"8.0,10.0\""); stream.write( newLine ); //write out the ratio if ( newIndent != null && newIndent.length() > 0 ) { stream.write( newIndent );} stream.write( "ratio=fill"); stream.write( newLine ); //write out what the shape of the nodes need to be like if ( newIndent != null && newIndent.length() > 0 ) { stream.write( newIndent );} stream.write( "node [shape=ellipse]"); stream.write( newLine ); //write out how edges are to be rendered. if ( newIndent != null && newIndent.length() > 0 ) { stream.write( newIndent );} stream.write( "edge [arrowhead=normal, arrowsize=1.0]"); stream.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/NameValue.java0000644000175000017500000000757511757531137025465 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; /** * The object of this class holds the name value pair. * At present to be used for environment variables. Will be used more * after integration of Spitfire. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2572 $ */ public class NameValue extends Data implements Comparable{ /** * stores the name of the pair. */ private String name; /** * stores the corresponding value to the name in the pair. */ private String value; /** * the default constructor which initialises the class member variables. */ public NameValue(){ name = new String(); value = new String(); } /** * Initialises the class member variables to the values passed in the * arguments. * * @param name corresponds to the name in the NameValue pair. * @param value corresponds to the value for the name in the NameValue pair. */ public NameValue(String name,String value){ this.name = name; this.value = value; } /** * Sets the key associated with this tuple. * * @param key the key associated with the tuple. */ public void setKey( String key ){ this.name = key; } /** * Sets the value associated with this tuple. * * @param value the value associated with the tuple. */ public void setValue( String value ){ this.value = value; } /** * Returns the key associated with this tuple. * * @return the key associated with the tuple. */ public String getKey(){ return this.name; } /** * Returns the value associated with this tuple. * * @return value associated with the tuple. */ public String getValue(){ return this.value; } /** * Returns a copy of this object * * @return object containing a cloned copy of the tuple. */ public Object clone(){ NameValue nv = new NameValue(this.name,this.value) ; return nv; } /** * Writes out the contents of the class to a String * in form suitable for displaying. * * @return the textual description. */ public String toString(){ String str = this.getKey() + "=" + this.getValue(); return str; } /** * Implementation of the {@link java.lang.Comparable} interface. * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer as this object is * less than, equal to, or greater than the specified object. The * NameValue are compared by their keys. * * @param o is the object to be compared * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compareTo( Object o ){ if ( o instanceof NameValue ) { NameValue nv = (NameValue) o; return this.name.compareTo(nv.name); } else { throw new ClassCastException( "Object is not a NameValue" ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/DAXJob.java0000644000175000017500000001115211757531137024641 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; /** * This is a data class that stores the contents of the DAX job in a DAX conforming * to schema 3.0 or higher. * * @author Karan Vahi * * @version $Revision: 3415 $ */ public class DAXJob extends Job { /** * The prefix to be attached for the DAX jobs */ public static final String JOB_PREFIX = "subdax_"; /** * The DAX LFN. */ private String mDAXLFN; /** * The DAX File that the job refers to. */ private String mDAXFile; /** * The directory in which the DAX needs to execute. */ private String mDirectory; /** * The default constructor. */ public DAXJob() { super(); mDAXFile = null; mDirectory = null; this.setJobType( Job.DAX_JOB ); } /** * The overloaded construct that constructs a DAX job by wrapping around * the Job job. * * @param job the original job description. */ public DAXJob(Job job){ super(job); mDAXFile = null; this.setJobType( Job.DAX_JOB ); } /** * Sets the DAX file LFN * * @param lfn the LFN of the DAX file. */ public void setDAXLFN(String lfn ){ mDAXLFN = lfn ; } /** * Returns the lfn for the DAXFile the job refers to. * * @return the lfn */ public String getDAXLFN(){ return mDAXLFN; } /** * Sets the DAX file * * @param file the path to the DAX file. */ public void setDAXFile(String file ){ mDAXFile = file ; } /** * Returns the DAXFile the job refers to. * * @return dag file */ public String getDAXFile(){ return mDAXFile; } /** * Generates a name for the job that serves as the primary id for the job * * @param prefix any prefix that needs to be applied while constructing the * job name * * @return the id for the job */ public String generateName( String prefix ){ StringBuffer sb = new StringBuffer(); //prepend a job prefix to job if required if (prefix != null) { sb.append( prefix ); } String lfn = this.getDAXLFN(); String lid = this.getLogicalID(); if( lfn == null || this.getLogicalID() == null ){ //sanity check throw new RuntimeException( "Generate name called for job before setting the DAXLFN/Logicalid" + lfn + "," + lid ); } if( lfn.contains( "." ) ){ lfn = lfn.substring( 0, lfn.lastIndexOf( "." ) ); } sb.append( DAXJob.JOB_PREFIX ).append( lfn ).append( "_" ). append( lid ); return sb.toString(); } /** * Sets the directory in which the dag needs to execute. * * @param directory the directory where dag needs to execute */ public void setDirectory( String directory ){ mDirectory = directory ; } /** * Returns the directory the job refers to. * * @return the directory. */ public String getDirectory(){ return mDirectory; } /** * Returns a textual description of the DAX Job. * * @return the textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(super.toString()); return sb.toString(); } /** * Returns a new copy of the Object. The implementation is faulty. * There is a shallow copy for the profiles. That is the clone retains * references to the original object. * * @return Object */ public Object clone(){ DAXJob newJob = new DAXJob((Job)super.clone()); newJob.setDAXLFN( this.getDAXLFN() ); newJob.setDAXFile( this.getDAXFile() ); newJob.setDirectory( this.getDirectory() ); return newJob; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/FileTransfer.java0000644000175000017500000003603211757531137026162 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.planner.common.PegRandom; import java.io.File; import java.util.ArrayList; import java.util.BitSet; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; /** * This is a container for the storing the transfers that are required in * between sites. It refers to one lfn, but can contains more than one source * and destination urls. All the source url's are presumed to be identical. * The destination urls, can in effect be used to refer to TFN's for a lfn on * different pools. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4258 $ * */ public class FileTransfer extends PegasusFile { /** * The logical name of the asssociated VDS super node, with which the file * is associated. The name of the job can be of the job that generates that * file(while doing interpool or transferring output files to output pool) * or of a job for which the file is an input(getting an input file from the * Replica Services). */ private String mJob; /** * The map containing all the source urls keyed by the pool id/name. * Corresponding to each pool, a list of url's is stored that contain * the URL's for that pool. All url's not associated with a pool, are * associated with a undefined pool. */ private Map mSourceMap; /** * The map containing all the destination urls keyed by the pool id/name. * Corresponding to each pool, a list of url's is stored that contain * the URL's for that pool. All url's not associated with a pool, are * associated with a undefined pool. */ private Map mDestMap; /** * Default constructor. */ public FileTransfer(){ super(); mJob = new String(); mFlags = new BitSet(NO_OF_TRANSIENT_FLAGS); mSourceMap = new HashMap(); mDestMap = new HashMap(); } /** * The overloaded constructor. * * @param pf PegasusFile object containing the transiency * attributes, and the logical name of the file. */ public FileTransfer(PegasusFile pf){ this.mLogicalFile = pf.mLogicalFile; this.mTransferFlag = pf.mTransferFlag; this.mSize = pf.mSize; this.mFlags = pf.getFlags(); this.mType = pf.getType(); this.mJob = new String(); this.mSourceMap = new HashMap(); this.mDestMap = new HashMap(); } /** * The overloaded constructor. * * @param lfn The logical name of the file that has to be transferred. * @param job The name of the job with which the transfer is * associated with. */ public FileTransfer(String lfn, String job){ super(lfn); mJob = job; mSourceMap = new HashMap(); mDestMap = new HashMap(); mFlags = new BitSet(NO_OF_TRANSIENT_FLAGS); } /** * The overloaded constructor. * * @param lfn The logical name of the file that has to be transferred. * @param job The name of the job with which the transfer is * associated with. * @param flags the BitSet flags. */ public FileTransfer(String lfn, String job, BitSet flags){ mLogicalFile = lfn; mJob = job; mSourceMap = new HashMap(); mDestMap = new HashMap(); mFlags = (BitSet)flags.clone(); } /** * It returns the name of the main/compute job making up the VDS supernode * with which this transfer is related. * * @return the name of associated job */ public String getJobName(){ return this.mJob; } /** * Adds a source URL for the transfer. * * @param nv the NameValue object containing the name of the site as the key * and URL as the value. */ public void addSource(NameValue nv){ this.addSource(nv.getKey(),nv.getValue()); } /** * Adds a source URL for the transfer. * * @param pool the pool from which the source file is being transferred. * @param url the source url. */ public void addSource(String pool, String url){ List l = null; if(mSourceMap.containsKey(pool)){ //add the url to the existing list l = (List)mSourceMap.get(pool); //add the entry to the list l.add(url); } else{ //add a new list l = new ArrayList(3); l.add(url); mSourceMap.put(pool,l); } } /** * Adds a destination URL for the transfer. * * @param nv the NameValue object containing the name of the site as the key * and URL as the value. */ public void addDestination(NameValue nv){ this.addDestination(nv.getKey(),nv.getValue()); } /** * Adds a destination URL for the transfer. * * @param pool the pool to which the destination file is being transferred. * @param url the destination url. */ public void addDestination(String pool, String url){ List l = null; if(mDestMap.containsKey(pool)){ //add the url to the existing list l = (List)mDestMap.get(pool); //add the entry to the list l.add(url); } else{ //add a new list l = new ArrayList(3); l.add(url); mDestMap.put(pool,l); } } /** * Returns a single source url associated with the transfer. * The source url returned is first entry from the key set of the * underlying map. * * @return NameValue where the name would be the pool on which the URL is * and value the URL. * null if no urls are assoiciated with the object. */ public NameValue getSourceURL(){ return getSourceURL( false ); } /** * Returns a single source url associated with the transfer. * If random is set to false, thensource url returned is first entry from * the key set of the underlying map. * * @param random boolean indicating if a random entry needs to be picked. * * @return NameValue where the name would be the pool on which the URL is * and value the URL. * null if no urls are assoiciated with the object. */ public NameValue getSourceURL( boolean random ){ return getURL( mSourceMap , random ); } /** * Returns a single destination url associated with the transfer. * The destination url returned is first entry from the key set of the * underlying map. * * @return NameValue where the name would be the pool on which the URL is * and value the URL. * null if no urls are assoiciated with the object. */ public NameValue getDestURL(){ return getDestURL( false ); } /** * Returns a single destination url associated with the transfer. * If random is set to false, then dest url returned is first entry from * the key set of the underlying map. * * @param random boolean indicating if a random entry needs to be picked. * * @return NameValue where the name would be the pool on which the URL is * and value the URL. * null if no urls are assoiciated with the object. */ public NameValue getDestURL( boolean random ){ return getURL( mDestMap, random ); } /** * Removes a single source url associated with the transfer. * The source url removed is first entry from the key set of the * underlying map. * * @return NameValue where the name would be the pool on which the URL is * and value the URL. * null if no urls are assoiciated with the object. */ public NameValue removeSourceURL(){ return removeURL(mSourceMap); } /** * Removes a single destination url associated with the transfer. * The destination url removed is first entry from the key set of the * underlying map. * * @return NameValue where the name would be the pool on which the URL is * and value the URL. * null if no urls are assoiciated with the object. */ public NameValue removeDestURL(){ return removeURL(mDestMap); } /** * Returns a boolean indicating if a file that is being staged is an * executable or not (i.e is a data file). * * @return boolean indicating whether a file is executable or not. */ public boolean isTransferringExecutableFile(){ return (this.mType == this.EXECUTABLE_FILE); } /** * Returns a single url from the map passed. If the random parameter is set, * then a random url is returned from the values for the first site. * * Fix Me: Random set to true, shud also lead to randomness on the sites. * * @param m the map containing the url's * @param random boolean indicating that a random url to be picked up. * * @return NameValue where the name would be the pool on which the URL is * and value the URL. * null if no urls are assoiciated with the object. */ private NameValue getURL( Map m, boolean random ){ if(m == null || m.keySet().isEmpty()){ return null; } //Return the first url from the EntrySet Iterator it = m.entrySet().iterator(); Map.Entry entry = ( Map.Entry )it.next(); List urls = ( List )entry.getValue(); String site = ( String )entry.getKey(); return ( random ) ? //pick a random value new NameValue( site, ( String ) urls.get( PegRandom.getInteger( 0, urls.size() -1 )) ): //returning the first element. No need for a check as //population of the list is controlled new NameValue( site, ( String )( urls.get(0) ) ); } /** * Removes a single url from the map passed. * * @param m the map containing the url's * * @return NameValue where the name would be the pool on which the URL is * and value the URL. * null if no urls are assoiciated with the object. */ private NameValue removeURL(Map m){ if(m == null || m.keySet().isEmpty()){ return null; } //Return the first url from the EntrySet Iterator it = m.entrySet().iterator(); Map.Entry entry = (Map.Entry)it.next(); //remove this entry it.remove(); //returning the first element. No need for a check as //population of the list is controlled return new NameValue( (String)entry.getKey(), (String)( ((List)entry.getValue()).get(0) ) ); } /** * Constructs a URL with the prefix as the poolname enclosed in #. * * @param site the site * @param directory the directory * @param filename the filename * * @return String */ private String constructURL(String site, String directory, String filename ){ StringBuffer sb = new StringBuffer(); sb/*.append("#").append(pool).append("#\n")*/ .append( directory ).append(File.separatorChar).append(filename); return sb.toString(); } /** * Returns a boolean value of whether the source url and the destination * url members of this object match or not. */ /*public boolean URLsMatch(){ if(mSourceURL.trim().equalsIgnoreCase(mDestURL.trim())){ return true; } return false; }*/ /** * Returns a clone of the object. * * @return clone of the object. */ public Object clone() { FileTransfer ft = new FileTransfer(); ft.mLogicalFile = new String(this.mLogicalFile); ft.mFlags = (BitSet)this.mFlags.clone(); ft.mTransferFlag = this.mTransferFlag; ft.mJob = new String(this.mJob); //the maps are not cloned underneath return ft; } /** * Determines whether the transfer contained in this container is valid or * not. It is deemed valid if there is at least one source url and one * destination url. * * @return true if valid, else false. */ public boolean isValid(){ return !(mSourceMap.isEmpty() || mDestMap.isEmpty()); } /** * Returns a textual interpretation of the object. The method outputs * in a T2 compatible format. Each FileTransfer object can refer to one * section in the T2 format. * * @return the textual description. */ public String toString() { StringBuffer sb = new StringBuffer(); String mode = (mTransferFlag == this.TRANSFER_OPTIONAL)? "optional" : "any"; Iterator it = null; Map.Entry entry = null; List l = null; sb.append(mLogicalFile).append(" ").append(mode); //writing out all the sources it = mSourceMap.entrySet().iterator(); //sb.append("\n").append(" "); while(it.hasNext()){ entry = (Map.Entry) it.next(); //inserting the source pool sb.append("\n").append("#").append(entry.getKey()); l = (List)entry.getValue(); Iterator it1 = l.iterator(); while(it1.hasNext()){ //write out the source url's //each line starts with a single whitespace sb.append("\n").append(" ").append(it1.next()); } } //writing out all the destinations it = mDestMap.entrySet().iterator(); //sb.append("\n").append(" "); while(it.hasNext()){ entry = (Map.Entry) it.next(); //inserting the destination pool sb.append("\n").append("# ").append(entry.getKey()); l = (List)entry.getValue(); Iterator it1 = l.iterator(); while(it1.hasNext()){ //write out the source url's //each line starts with a two whitespaces sb.append("\n").append(" ").append(" ").append(it1.next()); } } return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/classes/Job.java0000644000175000017500000016241611757531137024316 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.classes; import edu.isi.pegasus.common.credential.CredentialHandler; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.catalog.classes.Profiles.NAMESPACES; import edu.isi.pegasus.planner.namespace.Namespace; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.namespace.ENV; import edu.isi.pegasus.planner.namespace.Globus; import edu.isi.pegasus.planner.namespace.Hints; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.partitioner.graph.GraphNodeContent; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.dax.Invoke; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.HashSet; import java.io.Writer; import java.io.StringWriter; import java.io.IOException; import java.util.Collection; /** * The object of this class holds the information to generate a submit file about * one particular job making the Dag. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4784 $ */ public class Job extends Data implements GraphNodeContent{ /** * Denotes a job that does not fall into the other categories. It might * denote an error condition or a faulty logic in the planner. */ public static final int UNASSIGNED_JOB = 0; /** * Denotes a compute job. Generally these are the jobs that are specified in * the DAX. */ public static final int COMPUTE_JOB = 1; /** * Denotea a job that is used to stage in the input files for a compute job. */ public static final int STAGE_IN_JOB = 2; /** * Denotes a job that transfers the date generated by a compute job to the * output pool specified by the user. */ public static final int STAGE_OUT_JOB = 3; /** * Denotes a job that registers in the replica mechanism the materialized * files. */ public static final int REPLICA_REG_JOB = 4; /** * Denotes a job that transfers the output of a compute node to the site * where the child compute node is to be generated. */ public static final int INTER_POOL_JOB = 5; /** * Denotes a job that creates directories at the remote pools. */ public static final int CREATE_DIR_JOB = 6; /** * Denotes a job that stages the worker package. */ public static final int STAGE_IN_WORKER_PACKAGE_JOB = 7; /** * Denotes a job for which the executable has been staged as part of the * workflow. */ // public static final int STAGED_COMPUTE_JOB = 7; /** * Denotes a cleanup job, that removes the files that from the remote * working directories of the remote sites. */ public static final int CLEANUP_JOB = 8; /** * Denotes a chmod job that sets the xbit on the remote end. */ public static final int CHMOD_JOB = 9; /** * Denotes a chmod job that sets the xbit on the remote end. */ public static final int DAX_JOB = 10; /** * Denotes a chmod job that sets the xbit on the remote end. */ public static final int DAG_JOB = 11; /** * Returns an appropriate grid gateway job type corresponding to a job type * * @param type the job type * * @return corresponding GridGateway job type */ private static GridGateway.JOB_TYPE jobType2GridGatewayJobType( int type ){ //sanity check if ( !( typeInRange(type) ) ){ throw new IllegalArgumentException("Invalid Job type " + type); } GridGateway.JOB_TYPE jtype ; switch( type ){ case Job.COMPUTE_JOB: case Job.DAG_JOB: case Job.DAX_JOB: jtype = GridGateway.JOB_TYPE.compute; break; case Job.STAGE_IN_JOB: jtype = GridGateway.JOB_TYPE.transfer; break; case Job.STAGE_OUT_JOB: jtype = GridGateway.JOB_TYPE.transfer; break; case Job.REPLICA_REG_JOB: jtype = GridGateway.JOB_TYPE.register; break; case Job.INTER_POOL_JOB: jtype = GridGateway.JOB_TYPE.transfer; break; case Job.CREATE_DIR_JOB: case Job.CHMOD_JOB: jtype = GridGateway.JOB_TYPE.auxillary; break; case Job.CLEANUP_JOB: jtype = GridGateway.JOB_TYPE.cleanup; break; /* case Job.SYMLINK_STAGE_IN_JOB: jtype = GridGateway.JOB_TYPE.transfer; break; */ case Job.UNASSIGNED_JOB: default: jtype = GridGateway.JOB_TYPE.compute; } return jtype; } /** * The delimiter that has to be used to combine the name for the staged * executable. */ private static String DELIMITER = PegasusProperties.getInstance().getStagingDelimiter(); /** * The type of the job. Pegasus tags the jobs according to the function of * the job. The jobs are tagged according to the functionality they serve in * the Pegasus super node. The job class can be * * unassigned * compute job * stage-in * stage-out * replica registration * inter-pool transfer * create-dir job * staged-compute job */ public int jobClass; /** * Identifies of which Pegasus Super Node is a job associated with. * Pegasus Supernode is identified by the jobName of the compute node in the * super node. */ public String jobID; /** * The name of the job. */ public String jobName; /** * The logical name of the transformation which is executed as a part of * this job. Note: The tc is looked up by namespace__logicalName_version. */ public String logicalName; /** * The logical id of the job as referred to in the dax. */ public String logicalId; /** * The namespace to which the transformation is bound. */ public String namespace; /** * The version of the transformation. */ public String version; /** * The name of the derivation in Chimera that generated the job. */ public String dvName; /** * The namespace to which the derivation is bound. * */ public String dvNamespace; /** * The version of the derivation. */ public String dvVersion; /** * The globus Scheduler for the job. */ public String globusScheduler; /** * The path of the executable on the machine at which the job is executed. */ public String executable; /** * The universe in which the job has to be executed. Can be standard, * vanilla or globus. */ public String condorUniverse; /** * File which contains stdin (keyboard input). */ public String stdIn; /** * File which contains stdout. */ public String stdOut; /** * File which contains standard error. */ public String stdErr; /** * The arguements for the job. It is the contains the arguments for the * job. This string is put in the arguments in Condor Submit File. */ public String strargs; /** * Contains the input files for the submit file. They are vector of * PegasusFile Objects which store the transiency information of each * logical file. * * @see org.griphyn.cPlanner.classes.PegasusFile */ public Set inputFiles; /** * Contains the output files for the submit file. They are vector of * PegasusFile Objects which store the transiency information of each logical * file. * * @see org.griphyn.cPlanner.classes.PegasusFile */ public Set outputFiles; /** * The pool on which this job has been decided to be executed by the * Interpool Engine. * */ public String executionPool; //the namespace variables /** * The namespace object containing the globus rsl attributes which the user * specifies in the dax, or the pool file or the properties file. */ public Globus globusRSL; /** * For Condor Namespace. This contains the extra Condor options which one * may want to specify. These are copied straightaway to the Submit file. */ public Condor condorVariables; /** * To accomodate the environment variables which might needed to be set. */ public ENV envVariables; /** * The DAGMAN namespace profile variable holding the dagman profiles. * It holds the prescript and the postscripts for the jobs. */ public Dagman dagmanVariables; /** * To accomodate all the hints that maybe passed through the DAX. */ public Hints hints; /** * The Pegasus namespace variable. */ public Pegasus vdsNS; /** * Identifies the level of the job in the dax. The level is bottom up * from the final child node. */ public int level; /** * The expected runtime for a job. */ private double mRuntime; /** * Boolean indicating whether the job executables were staged for it or not. */ private boolean mJobExecutablesStaged; /** * All the notifications associated with the job */ private Notifications mNotifications; /** * The staging site associated with the job */ private String mStagingSite; /** * The directory in which the job should run. */ private String mDirectory; /** * The relative path to the submit directory for the job, from the workflows * base submit directory. */ // private String submitDirectory; /** * Set of credential types required by a job. */ private Set mCredentialsType; /** * Intialises the member variables. */ public Job() { jobName = ""; namespace = ""; logicalName = ""; logicalId = ""; version = ""; dvName = null; dvNamespace = null; dvVersion = null; jobID = ""; globusScheduler = ""; executable = ""; condorUniverse = ""; stdIn = ""; stdOut = ""; stdErr = ""; inputFiles = new HashSet(); outputFiles = new HashSet(); strargs = ""; envVariables = new ENV(); executionPool = new String(); globusRSL = new Globus(); condorVariables = new Condor(); dagmanVariables = new Dagman(); hints = new Hints(); vdsNS = new Pegasus(); jobClass = UNASSIGNED_JOB; level = -1; mRuntime = -1; mJobExecutablesStaged = false; mNotifications = new Notifications(); mStagingSite = null; mDirectory = null; mCredentialsType = new HashSet(); // submitDirectory = null; } /** * Overloaded constructor. Does a shallow copy of the job object passed. * * @param job the Job object containing the job description. */ public Job(Job job){ jobName = job.jobName; namespace = job.namespace; logicalName = job.logicalName; logicalId = job.logicalId; version = job.version; dvName = job.dvName; dvNamespace = job.dvNamespace; dvVersion = job.dvVersion; jobID = job.jobID; globusScheduler = job.globusScheduler; executable = job.executable; condorUniverse = job.condorUniverse; stdIn = job.stdIn; stdOut = job.stdOut; stdErr = job.stdErr; inputFiles = job.inputFiles; outputFiles = job.outputFiles; strargs = job.strargs; envVariables = job.envVariables; executionPool = job.executionPool; globusRSL = job.globusRSL; condorVariables = job.condorVariables; dagmanVariables = job.dagmanVariables; hints = job.hints; vdsNS = job.vdsNS; jobClass = job.getJobType(); level = job.level; mRuntime = job.mRuntime; mJobExecutablesStaged = job.mJobExecutablesStaged; mNotifications = job.mNotifications; mStagingSite = job.mStagingSite; mDirectory = job.mDirectory; mCredentialsType = new HashSet(); // submitDirectory = job.submitDirectory; } /** * Returns a new copy of the Object. * * @return clone of the object. */ public Object clone(){ Job newSub = new Job(); newSub.condorUniverse = this.condorUniverse; newSub.envVariables = (ENV)this.envVariables.clone(); newSub.executable = this.executable; newSub.globusScheduler= this.globusScheduler; for(Iterator it = this.inputFiles.iterator(); it.hasNext(); ){ newSub.addInputFile( (PegasusFile)((PegasusFile)it.next()).clone()); } for(Iterator it = this.outputFiles.iterator(); it.hasNext(); ){ newSub.addOutputFile( (PegasusFile)((PegasusFile)it.next()).clone()); } newSub.jobName = this.jobName; newSub.logicalName = this.logicalName; newSub.logicalId = this.logicalId; newSub.stdErr = this.stdErr; newSub.stdIn = this.stdIn; newSub.stdOut = this.stdOut; newSub.strargs = this.strargs; newSub.executionPool = this.executionPool; newSub.globusRSL = this.globusRSL == null ? null : (Globus)this.globusRSL.clone(); newSub.condorVariables= this.condorVariables == null ? null : (Condor)this.condorVariables.clone(); newSub.dagmanVariables= this.dagmanVariables == null ? null : (Dagman)this.dagmanVariables.clone(); newSub.vdsNS = this.vdsNS == null ? null :(Pegasus)this.vdsNS.clone(); newSub.hints = (Hints)this.hints.clone(); newSub.jobID = this.jobID; newSub.jobClass = this.jobClass; newSub.dvName = this.dvName; newSub.namespace = this.namespace; newSub.version = this.version; newSub.dvNamespace = this.dvNamespace; newSub.dvVersion = this.dvVersion; newSub.level = this.level; newSub.mRuntime = this.mRuntime; // newSub.submitDirectory = this.submitDirectory == null ? null : new String(this.submitDirectory); newSub.mJobExecutablesStaged = this.mJobExecutablesStaged; newSub.mNotifications = (Notifications)this.getNotifications().clone(); newSub.mStagingSite = this.mStagingSite; newSub.mDirectory = this.mDirectory; for( CredentialHandler.TYPE type : this.getCredentialTypes() ){ newSub.addCredentialType( type ); } return newSub; } /** * Sets the expected runtime for the job. * * @param runtime the runtime for the job. */ public void setRuntime( String runtime ) { if( runtime == null ){ mRuntime = -1; } else{ mRuntime = Double.parseDouble( runtime ); } } /** * Sets the expected runtime for the job. * * @param runtime the runtime for the job. */ public void setRuntime( double runtime ) { mRuntime = runtime; } /** * Returns the expected runtime for the job that is set using the * setRuntime method. * * @return the runtime for the job. */ public double getRuntime( ) { return mRuntime; } /** * Returns the runtime associated with the job. If the runtime variable with * the job is set to -ve then, it also attempts to check on the value * specified in Pegasus Profile key runtime for the job. If there is * a value associated with profile key, that the runtime value is set to that * using setRuntime( String ) function. * * @return the expected runtime. * @see org.griphyn.cPlanner.namespace.Pegasus#RUNTIME_KEY */ public double computeRuntime( ){ if( mRuntime < 0 ){ //attempt to look up the value from pegasus profile String value = this.vdsNS.getStringValue( Pegasus.RUNTIME_KEY ); if( value == null ){ return -1; } else{ setRuntime( value ); } } return mRuntime; } /** * Set the universe associated with the job. * * @param universe the universe to be associated. */ public void setUniverse( String universe ){ this.condorUniverse = universe; } /** * Returns the universe associated with the job. * * @return the universe associate with job . */ public String getUniverse( ){ return this.condorUniverse; } /** * Sets the executable staging flag in the job to the value passed. * * @param value the boolean value. */ public void setExecutableStagingForJob( boolean value ){ mJobExecutablesStaged = value; } /** * Returns whether user executables need to be staged for job or not. * * @return user executable staging. */ public boolean userExecutablesStagedForJob(){ return mJobExecutablesStaged ; } /** * Adds an input file to the underlying collection of input files * associated with the job. * * @param file the PegasusFile containing the input file. */ public void addInputFile(PegasusFile file){ this.inputFiles.add(file); } /** * Sets the input files associated with the job. * * @param ipFiles Set of PegasusFile objects containing the input files. */ public void setInputFiles( Set ipFiles ){ this.inputFiles = ipFiles; } /** * Returns the set of input files associated with the job. * * @return Set of PegasusFile objects containing the input files. */ public Set getInputFiles( ){ return this.inputFiles; } /** * Resets the notifications associated with the job */ public void resetNotifications( ){ this.mNotifications = new Notifications(); } /** * Adds a Invoke object correpsonding to a notification. * * @param invoke the invoke object containing the notification */ public void addNotification( Invoke invoke ){ this.mNotifications.add(invoke); } /** * Adds all the notifications specfied in the TransformationCatalogEntry * to the underlying job notifications. * * @param entry the TransformationCatalogEntry object */ public void addNotifications( TransformationCatalogEntry entry ){ this.mNotifications.addAll( entry.getNotifications() ); } /** * Adds all the notifications passed to the underlying container. * * @param invokes the notifications to be added */ public void addNotifications( Notifications invokes ){ this.mNotifications.addAll(invokes); } /** * Returns a collection of all the notifications that need to be * done for a particular condition * * @param when the condition * * @return */ public Collection getNotifications( Invoke.WHEN when ){ return this.mNotifications.getNotifications(when); } /** * Returns all the notifications associated with the job. * * @return the notifications */ public Notifications getNotifications( ){ return this.mNotifications; } /** * Looks at a URL to determine whether a credential should be associated with * a job or not. * * @param url the url for which a credential needs to be added */ public void addCredentialType( String url ){ //sanity check if( url == null ){ return; } if( url.startsWith( "gsiftp" ) ){ this.addCredentialType( CredentialHandler.TYPE.x509 ); } else if( url.startsWith( "s3" ) ){ this.addCredentialType( CredentialHandler.TYPE.s3 ); } else if( url.startsWith( "irods" ) ){ this.addCredentialType( CredentialHandler.TYPE.irods ); } else if( url.startsWith( "scp" ) ){ this.addCredentialType( CredentialHandler.TYPE.ssh ); } } /** * Adds a type of credential that will be required by a job. * * @param type the credential type. */ public void addCredentialType( CredentialHandler.TYPE type ){ this.mCredentialsType.add( type ); } /** * Returns the various credential types required by a job * * @return the set of credentials required. */ public Set getCredentialTypes( ){ return this.mCredentialsType; } /** * Resets the credential types required by a job. * */ public void resetCredentialTypes(){ this.mCredentialsType.clear(); } /** * Adds an output file to the underlying collection of output files * associated with the job. * * @param file the PegasusFile containing the output file. */ public void addOutputFile(PegasusFile file){ this.outputFiles.add(file); } /** * Sets the output files associated with the job. * * @param opFiles Set of PegasusFile objects containing the * output files. */ public void setOutputFiles( Set opFiles ){ this.outputFiles = opFiles; } /** * Returns the set of output files associated with the job. * * @return Set of PegasusFile objects containing the output files. */ public Set getOutputFiles( ){ return this.outputFiles; } /** * Sets the type of the job. * * @param type the type of the job. * @exception IllegalArgumentException if the job type is outside its legal * range. * * @see #UNASSIGNED_JOB * @see #COMPUTE_JOB * @see #STAGE_IN_JOB * @see #STAGE_OUT_JOB * @see #REPLICA_REG_JOB * @see #INTER_POOL_JOB * @see #CREATE_DIR_JOB * @see #STAGED_COMPUTE_JOB * @see #CLEANUP_JOB */ public void setJobType(int type){ if(typeInRange(type)){ jobClass = type; } else{ throw new IllegalArgumentException("Invalid Job type " + type); } } /** * Sets the site handle of the site, where teh job is to be executed * * @param site the site handle. */ public void setSiteHandle(String site){ this.executionPool = site; } /** * Returns the handle of the site where the job is scheduled. * * @return site handle. */ public String getSiteHandle(){ return executionPool; } /** * Sets the path to the executable on the remote grid site. This executable * is invoked whenever a job is run on the remote grid site. * * @param path the path to the underlying transformation on the remote grid * site. * * @see #getSiteHandle() */ public void setRemoteExecutable( String path ){ this.executable = path; } /** * Returns the path of the underlying executable on the remote grid site. * * @return the path to the executable if set. */ public String getRemoteExecutable( ){ return this.executable; } /** * Sets the remote jobmanager on which the job has to run. * * @param jobmanager the jobmanager url. * * @see #getJobManager() */ public void setJobManager( String jobmanager ){ this.globusScheduler = jobmanager; } /** * Returnss the remote jobmanager on which the job has to run. * * @return the jobmanager url. * * @see #setJobManager(java.lang.String) */ public String getJobManager( ){ return this.globusScheduler; } /** * Sets the file to which the stdout of the job needs to be written to * at the remote grid site. Should be just the basename. The file appears * in the remote working directory for that job. * * @param fileName the basename of the file. */ public void setStdOut( String fileName ){ this.stdOut = fileName; } /** * Returns the file to which the stdout of the job is written to. * * @return the basename of the file. */ public String getStdOut( ){ return this.stdOut; } /** * Sets the file to which the stderr of the job needs to be written to * at the remote grid site. Should be just the basename. The file appears * in the remote working directory for that job. * * @param fileName the basename of the file. */ public void setStdErr( String fileName ){ this.stdErr = fileName; } /** * Returns the file to which the stderr of the job is written to. * * @return the basename of the file. */ public String getStdErr( ){ return this.stdErr; } /** * Sets the file to from which to pick up the stdin for the job. The file * is tracked via Replica Catalog, and is staged to the remote grid site. * * @param fileName the basename of the file. */ public void setStdIn( String fileName ){ this.stdIn = fileName; } /** * Returns the file from which the stdin is picked up. * * @return the basename of the file. */ public String getStdIn( ){ return this.stdIn; } /** * Returns the ID associated with the job. Unfortunately currently it is * the job name. * * @return the ID of the job. */ public String getID(){ return getName(); } /** * Sets the staging site. * * @param site the staging */ public void setStagingSiteHandle( String site ){ this.mStagingSite = site; } /** * Returns the staging site. * * @return the staging */ public String getStagingSiteHandle( ){ return this.mStagingSite; } /** * Returns the name of the job. * * @return String */ public String getName(){ return jobName; } /** * Setter method to set the name of the job. * * @param name the name of the job. */ public void setName(String name){ jobName = name; } /** * Returns the directory where the job runs. * * @return String */ public String getDirectory(){ return mDirectory ; } /** * Setter method to set the name of the job. * * @param name the name of the job. */ public void setDirectory( String directory ){ mDirectory = directory; } /** * Returns the logical id of the job. * * @return String */ public String getLogicalID(){ return logicalId; } /** * Returns the DAX ID for the job if it appeared in the DAX, else null * * @return the id of the job in the DAX if present , else null */ public String getDAXID(){ StringBuffer sb = new StringBuffer(); int type = this.getJobType(); if( type == Job.COMPUTE_JOB || type == Job.DAG_JOB || type == Job.DAX_JOB ) { //dax and dag jobs actually are never launched //via kickstart as of now. //pass the logical id in the DAX sb.append( this.getLogicalID() ); } else{ //for all auxillary jobs pass null sb.append( (String)null ); } return sb.toString(); } /** * Setter method to set the logical id of the job. * * @param id the logical id of the job. */ public void setLogicalID(String id){ logicalId = id; } /** * Returns the name of the compute job of Pegasus supernode containing this job. * * @return String */ public String getVDSSuperNode(){ return this.jobID; } /** * Setter method to the name of the compute job of Pegasus supernode containing * this job. * * @param name the name of the job. */ public void setVDSSuperNode( String name ){ this.jobID = name; } /** * Returns the type of the job. Returns the value matching the jobClass. * * @return int value of job class. */ public int getJobType(){ return this.jobClass; } /** * Returns the corresponding grid gateway job type * * @return grid gateway job type */ public GridGateway.JOB_TYPE getGridGatewayJobType(){ // JIRA PM-277 // return Job.jobType2GridGatewayJobType( this.getJobType() ); return this.hints.containsKey( Hints.JOBMANAGER_UNIVERSE_KEY ) ? GridGateway.JOB_TYPE.valueOf( (String)this.hints.get( Hints.JOBMANAGER_UNIVERSE_KEY ) ): Job.jobType2GridGatewayJobType( this.getJobType() ); } /** * Gets the textual description of the type associated with the job. * * @return the textual description of the type associated with the job. */ public String getJobTypeDescription(){ return getJobTypeDescription(this.jobClass); } /** * Gets the textual description of the type that can be associated * with a job. * * @param type the type of the job. * * @return the textual description of the type associated with the job. */ public String getJobTypeDescription(int type){ String desc = null; switch (type){ case COMPUTE_JOB: desc = "compute"; break; case STAGE_IN_JOB: desc = "stage-in-tx"; break; case STAGE_OUT_JOB: desc = "stage-out-tx"; break; case INTER_POOL_JOB: desc = "inter-site-tx"; break; case REPLICA_REG_JOB: desc = "registration"; break; case UNASSIGNED_JOB: desc = "unassigned"; break; case CREATE_DIR_JOB: desc = "create-dir"; break; case CLEANUP_JOB: desc = "cleanup"; break; case CHMOD_JOB: desc = "chmod"; break; case DAX_JOB: desc = "dax"; break; case DAG_JOB: desc = "dag"; break; default: desc = "unknown"; break; } return desc; } /** * Returns the namespace of the underlying transformation. * * @return namespace */ public String getTXNamespace(){ return namespace; } /** * Sets the transformation namespace to be associated with the job. * * @param ns the namespace. */ public void setTXNamespace( String ns ){ this.namespace = ns; } /** * Returns the name of the underlying transformation. * * @return name */ public String getTXName(){ return logicalName; } /** * Sets the transformation name of the underlying transformation. * * @param name the logical name of the transformation. */ public void setTXName(String name){ this.logicalName = name; } /** * Returns the version of the underlying transformation. * * @return version */ public String getTXVersion(){ return version; } /** * Sets the version of the underlying transformation. * * @param vs the version. */ public void setTXVersion(String vs){ this.version = vs; } /** * Sets the various attributes of underlying transformation. * * @param ns the namespace of the transformation. * @param name the logical name of the transformation. * @param vs the version of the transformation. */ public void setTransformation( String ns, String name, String vs){ this.setTXNamespace(ns); this.setTXName(name); this.setTXVersion(vs); } /** * Constructs the fully qualified name of a transformation with * which to query the TC, including the namespace and version. * * @return the complete tranformation name. */ public String getCompleteTCName(){ return Separator.combine(namespace,logicalName,version); } /** * Returns the namespace of the underlying derivation. * * @return namespace */ public String getDVNamespace(){ return dvNamespace; } /** * Sets the derivation namespace to be associated with the job. * * @param ns the namespace. */ public void setDVNamespace( String ns ){ this.dvNamespace = ns; } /** * Returns the name of the underlying derivation. * * @return name */ public String getDVName(){ return dvName; } /** * Sets the derivation name of the underlying derivation. * * @param name the logical name of the derivation. */ public void setDVName(String name){ this.dvName = name; } /** * Returns the version of the underlying derivation. * * @return version */ public String getDVVersion(){ return dvVersion; } /** * Sets the version of the underlying derivation. * * @param vs the version. */ public void setDVVersion(String vs){ this.dvVersion = vs; } /** * Sets the various attributes of underlying derivation. * * @param ns the namespace of the derivation. * @param name the logical name of the derivation. * @param vs the version of the derivation. */ public void setDerivation( String ns, String name, String vs){ this.setDVNamespace(ns); this.setDVName(name); this.setDVVersion(vs); } /** * Returns the level associated with the job. * * @return int designating the level */ public int getLevel( ){ return level; } /** * Sets the level for the job. * * @param value the level */ public void setLevel( int value ){ level = value; } /** * Constructs the fully qualified name of the corresponding derivation used * to generate this job in Chimera including the namespace and version. * * @return the complete derivation name. */ public String getCompleteDVName(){ return (dvName == null) ? null: Separator.combine(dvNamespace,dvName,dvVersion); } /** * Returns the basename for the staged executable corresponding to the * job. * * @return the staged executable basename */ public String getStagedExecutableBaseName(){ return getStagedExecutableBaseName( namespace, logicalName, version ); } /** * Returns the basename for the staged executable corresponding to the * job. * * @param txNamespace is the namespace in which the TR resides, may be null. * @param txName is the base name of the transformation, must not be null. * @param txVersion is the version of the TR, may be null * * @return the staged executable basename */ public static String getStagedExecutableBaseName( String txNamespace, String txName, String txVersion ){ return combine( txNamespace, txName, txVersion); } /** * Returns the submit directory path relative to the workflow submit * directory. * * @return the directory name, if set else null. */ // public String getSubmitDirectory(){ // return this.submitDirectory; // } /** * Sets the submit directory path relative to the workflow submit * directory. * * @param directory the directory name. */ // public void setSubmitDirectory(String directory){ // this.submitDirectory = directory; // } /** * Returns the argument string with which the job has to be invoked. * * @return the argument string. */ public String getArguments(){ return this.strargs; } /** * Sets the argument string with which the job has to be invoked. * * @param arguments the argument string. */ public void setArguments(String arguments){ this.strargs = arguments; } /** * Combines the three components together into a single string as * namespace-name-version. * * @param namespace is the namespace in which the TR resides, may be null. * @param name is the base name of the transformation, must not be null. * @param version is the version of the TR, may be null. * * @return the concatenated form . */ private static String combine(String namespace, String name, String version) { StringBuffer result = new StringBuffer(32); if ( namespace != null && namespace.length() > 0 ) { result.append( namespace ).append( DELIMITER); } result.append(name); if (version != null && version.length() > 0 ) { result.append( DELIMITER ).append( version ); } return result.toString(); } /** * It sets the prescript for the job. The argument string is assumed to be * empty. * * @param path the path to the script that has to be run as a prescript. */ public void setPreScript(String path) { setPreScript( path , ""); } /** * It sets the prescript for the job. * * @param path the path to the script that has to be run as a prescript. * @param arguments the arguments to the prescript, */ public void setPreScript(String path, String arguments){ //this.preScript = script; //construct directly as we know keys are valid. dagmanVariables.construct( Dagman.PRE_SCRIPT_KEY, path ); dagmanVariables.construct( Dagman.PRE_SCRIPT_ARGUMENTS_KEY, arguments); } /** * Returns the path to the prescript for the job if set. * * @return the path to the script that has to be run as a prescript, else * null if no prescript has been set. */ public String getPreScriptPath(){ Object obj = dagmanVariables.get( Dagman.PRE_SCRIPT_KEY ); return (obj == null)? null: (String)obj; } /** * Returns the arguments to the prescript for the job if set. * * @return the argumetns to the prescript script that has to be run as a * prescript, else null if no prescript has been set. */ public String getPreScriptArguments(){ Object obj = dagmanVariables.get( Dagman.PRE_SCRIPT_ARGUMENTS_KEY ); return (obj == null)? null: (String)obj; } /** * Returns whether the job is recursive or not. * * @return boolean */ public boolean typeRecursive(){ return this.vdsNS.containsKey( Pegasus.TYPE_KEY )? this.vdsNS.getStringValue( Pegasus.TYPE_KEY ).equals( "recursive" ): false; } /** * Sets the job to be recursive. */ public void setTypeRecursive(){ this.vdsNS.construct( Pegasus.TYPE_KEY, "recursive" ); } /** * Returns whether the job type value for the job is in range or not. * * @param type the job type. * * @return true if the value is in range. * false if the value is not in range. */ public static boolean typeInRange(int type){ return ( type >= Job.UNASSIGNED_JOB && type <= Job.DAG_JOB ); } /** * Updates all the profile namespaces with the information associated in * the transformation catalog for this job. * It ends up updating already existing information, and adds supplemental * new information if present in the transformation catalog. * The method does not explicitly check whehter the data object passed refers * to this job or not. The calling method should ensure this. * * @param entry the TCEntry object corresponding to the * entry in the Transformation Catalog for the job. */ public void updateProfiles(TransformationCatalogEntry entry){ condorVariables.checkKeyInNS(entry); dagmanVariables.checkKeyInNS(entry); globusRSL.checkKeyInNS(entry); envVariables.checkKeyInNS(entry); vdsNS.checkKeyInNS(entry); } /** * Updates all the profile namespaces with the information specified by the * user in the properties file, that apply to this job. * It ends up updating already existing information, and adds supplemental * new information if present in the properties file. * The method does not explicitly check whehter the data object passed refers * to this job or not. The calling method should ensure this. * * @param properties the PegasusProperties object containing * the user properties. */ public void updateProfiles(PegasusProperties properties){ condorVariables.checkKeyInNS(properties,executionPool); dagmanVariables.checkKeyInNS(properties,executionPool); globusRSL.checkKeyInNS(properties,executionPool); envVariables.checkKeyInNS(properties,executionPool); vdsNS.checkKeyInNS(properties,executionPool); } /** * Updates all the profile namespaces with the information specified in * list of profile objects passed. Pool catalog returns profile information * as a list of Profile objects that need to be propogated to * the job. * It ends up updating already existing information, and adds supplemental * new information if present in the properties file. * * * @param profiles The Profiles that need to be incorporated in * the jobs profile namespaces. */ public void updateProfiles( Profiles profiles){ if( profiles == null ){ //nothing to put in the namespaces return; } String key = null; Namespace n = profiles.get( NAMESPACES.condor ); for( Iterator it = n.getProfileKeyIterator(); it.hasNext(); ){ key = (String)it.next(); condorVariables.checkKeyInNS( key, (String)n.get( key ) ); } n = profiles.get( NAMESPACES.globus ); for( Iterator it = n.getProfileKeyIterator(); it.hasNext(); ){ key = (String)it.next(); globusRSL.checkKeyInNS( key, (String)n.get( key ) ); } n = profiles.get( NAMESPACES.env ); for( Iterator it = n.getProfileKeyIterator(); it.hasNext(); ){ key = (String)it.next(); envVariables.checkKeyInNS( key, (String)n.get( key ) ); } n = profiles.get( NAMESPACES.pegasus ); for( Iterator it = n.getProfileKeyIterator(); it.hasNext(); ){ key = (String)it.next(); vdsNS.checkKeyInNS( key, (String)n.get( key ) ); } n = profiles.get( NAMESPACES.dagman ); for( Iterator it = n.getProfileKeyIterator(); it.hasNext(); ){ key = (String)it.next(); dagmanVariables.checkKeyInNS( key, (String)n.get( key ) ); } } /** * Updates all the profile namespaces with the information specified in * list of profile objects passed. Pool catalog returns profile information * as a list of Profile objects that need to be propogated to * the job. * It ends up updating already existing information, and adds supplemental * new information if present in the properties file. * * * @param profiles the list of Profile objects that need to be * incorporated in the jobs profile namespaces. */ public void updateProfiles(List profiles){ if(profiles == null || profiles.isEmpty()){ //nothing to put in the namespaces return; } Profile profile = null; for( Iterator it = profiles.iterator(); it.hasNext(); ){ profile = (Profile)it.next(); if(profile.getProfileNamespace().equals(Profile.CONDOR)) condorVariables.checkKeyInNS(profile); else if(profile.getProfileNamespace().equals(Profile.GLOBUS)) globusRSL.checkKeyInNS(profile); else if(profile.getProfileNamespace().equals(Profile.ENV)) envVariables.checkKeyInNS(profile); else if(profile.getProfileNamespace().equals(Profile.VDS)) vdsNS.checkKeyInNS(profile); else if(profile.getProfileNamespace().equals(Profile.DAGMAN)) dagmanVariables.checkKeyInNS(profile); else{ //unknown profile. mLogger.log("Unknown Profile: " + profile + " for job" + this.jobName,LogManager.WARNING_MESSAGE_LEVEL); } } } /** * Merges profiles from another job to this job in a controlled fashion. * The merging of the profile is dependant upon the namespace to which it * belongs. Some profiles maybe overriden, others maybe summed up etc. * * @param job the Job object containing the job description * for the job whose profiles have to be merged into this job. * */ public void mergeProfiles( Job job ){ this.globusRSL.merge( job.globusRSL ); this.envVariables.merge( job.envVariables ); this.condorVariables.merge( job.condorVariables ); this.dagmanVariables.merge( job.dagmanVariables ); this.vdsNS.merge( job.vdsNS ); this.hints.merge( job.hints ); } /** * Checks if an object is similar to the one referred to by this class. * We compare the primary key to determine if it is the same or not. * * @param obj the object for which equalsto is applied. * * @return true if the primary key (jobName) match. * else false. */ public boolean equals(Object obj){ if(obj instanceof Job){ Job job = (Job) obj; return job.jobName.equals(this.jobName) ? true : false; } //objects are of different type. cannot be compared return false; } /** * Returns a boolean value denoting whether the job is MPI or not. * If no job type is specified in the globus rsl for the job, the job is * assumed to be non mpi. * * @return boolean true if jobtype=mpi set in the globus rsl. * false in all other cases. */ public boolean isMPIJob(){ boolean mpi = false; //sanity checks if(this.globusRSL == null || !(globusRSL.containsKey("jobtype")) ){ return false; } return( ((String)globusRSL.get("jobtype")).equalsIgnoreCase("mpi") )? true : //the job type is set to mpi false; } /** * Returns whether a job should be run in the work directory or not. * If a job is not run in the work directory, then it should be run * in the submit directory. That would be the case if the job has been * scheduled to site "local" and the class of the job coressponds to the * auxillary jobs that have been created by Pegasus. * * @return boolean true to indicate job can run in work directory, * false job cant be run. */ public boolean runInWorkDirectory(){ return !(executionPool != null && executionPool.equalsIgnoreCase("local") && (jobClass > this.COMPUTE_JOB && jobClass <= this.CREATE_DIR_JOB)); } /** * Resets all the profiles associated with the job. */ public void resetProfiles(){ envVariables = new ENV(); globusRSL = new Globus(); condorVariables = new Condor(); dagmanVariables = new Dagman(); hints = new Hints(); vdsNS = new Pegasus(); } /** * Returns a textual description of the object. * * @return textual description of the job. */ public String toString(){ String str = this.globusRSL == null ? null : this.globusRSL.toString(); String cVar = this.condorVariables == null ? null : this.condorVariables.toString(); String envStr = this.envVariables == null ? null : this.envVariables.toString(); StringBuffer sb = new StringBuffer(); String newline = System.getProperty( "line.separator", "\r\n" ); sb.append("["); append( sb, "Job Name", this.jobName , newline ); append( sb, "Logical Id", this.logicalId , newline ); append( sb, "Transformation", this.getCompleteTCName() , newline ); append( sb, "Derivation", this.getCompleteDVName() , newline ); append( sb, "Level", new Integer(this.level).toString() , newline ); append( sb, "Job Type Description", getJobTypeDescription(this.jobClass) , newline ); append( sb, "Job Id" , this.jobID , newline ); append( sb, "Runtime", this.mRuntime, newline ); append( sb, "Executable" , this.executable , newline ); append( sb, "Directory", this.mDirectory, newline ); append( sb, "Condor Universe" , this.condorUniverse , newline ); append( sb, "Globus Scheduler" , this.globusScheduler , newline ); append( sb, "Standard Output" , this.stdOut , newline ); append( sb, "Standard Input" , this.stdIn , newline ); append( sb, "Standard Error" , this.stdErr , newline ); append( sb, "Argument String" , this.strargs , newline ); append( sb, "Execution Site", this.executionPool , newline ); append( sb, "Staging Site", this.mStagingSite , newline ); append( sb, "Globus RSL" , str , newline ); append( sb, "Environment Variables" , envStr , newline ); append( sb, "Dagman Variables" , this.dagmanVariables , newline ); append( sb, "Hints" , this.hints , newline ); append( sb, "Input Files " , this.inputFiles , newline ); append( sb, "Output Files ", this.outputFiles , newline ); append( sb, "Condor Variables\n" , cVar , newline ); append( sb, "VDS Profiles" , vdsNS , newline ); append( sb, "Notifications", this.mNotifications, newline ); append( sb, "Credentials", this.mCredentialsType, newline ); sb.append("]"); return sb.toString(); } /** * Returns the DOT description of the object. This is used for visualizing * the workflow. * * @return String containing the Partition object in XML. * * @exception IOException if something fishy happens to the stream. */ public String toDOT() throws IOException{ Writer writer = new StringWriter(32); toDOT( writer, "" ); return writer.toString(); } /** * Returns the DOT description of the object. This is used for visualizing * the workflow. * * @param stream is a stream opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty * string. The parameter is used internally for the recursive * traversal. * * * @exception IOException if something fishy happens to the stream. */ public void toDOT( Writer stream, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); //write out the node stream.write( indent ); stream.write( "\"" ); stream.write( getID() ); stream.write( "\"" ); stream.write( " " ); stream.write( "[" ); //write out the color for the node stream.write( "color=" ); stream.write( getDOTColor() ); stream.write( "," ); //write out the style stream.write( "style=filled," ); //write out the label stream.write( "label=" ); stream.write( "\"" ); stream.write( getID() ); stream.write( "\"" ); stream.write( "]" ); stream.write( newLine ); stream.flush(); } /** * Returns the color with which DOT should color the node representing the * job. * * @return the color. */ protected String getDOTColor(){ int type = this.getJobType(); String color; switch ( type ){ case 1: //this.COMPUTE_JOB: color = "blueviolet"; break; case 2: //this.STAGE_IN_JOB: color = "gold"; break; case 3: //this.STAGE_OUT_JOB: color = "goldenrod"; break; case 5: //this.INTER_POOL_JOB: color = "goldenrod4"; break; case 4: //this.REPLICA_REG_JOB: color = "orange"; break; case 6: //this.CREATE_DIR_JOB: color = "darkturquoise"; break; case 7: //this.STAGED_COMPUTE_JOB: color = "violet"; break; case 8: //this.CLEANUP_JOB: color = "deepskyblue"; break; default: color = "grey"; } return color; } /** * Appends a key value mapping to the StringBuffer. * * @param sb StringBuffer to which the mapping has to be appended. * @param key the field. * @param value the value of the field. * @param newLine the newLineSeparator to be used. */ private void append(StringBuffer sb, String key, Object value, String newLine){ String openingBrace = "{"; String closingBrace = "}"; String pointsTo = " -> "; String separator = ","; sb.append(newLine).append(openingBrace).append(key).append(pointsTo). append(value).append(closingBrace).append(separator); } /** * Adds a profile to the job object * * @param p the profile to be added */ public void addProfile( Profile p ) { String namespace = p.getProfileNamespace(); String key = p.getProfileKey(); String value = p.getProfileValue(); switch( namespace.charAt(0) ){ case 'c'://condor this.condorVariables.checkKeyInNS( key, value ); break; case 'd'://dagman this.dagmanVariables.checkKeyInNS(key, value ); break; case 'e'://env this.envVariables.checkKeyInNS(key, value ); break; case 'g'://globus this.globusRSL.checkKeyInNS(key, value ); break; case 'h'://hint this.hints.checkKeyInNS(key, value ); break; case 'p'://pegasus this.vdsNS.checkKeyInNS(key, value ); break; default: //ignore should not come here ever. mLogger.log("Namespace not supported. ignoring "+ namespace, LogManager.WARNING_MESSAGE_LEVEL); break; } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/0000755000175000017500000000000011757531667023315 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/Measurement.java0000644000175000017500000000353111757531137026437 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize; import java.util.Date; /** * An empty interface that is the super interface for all measuremnts we take * from the kickstart records. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2563 $ */ public interface Measurement { /** * Returns the job for which the measurement was taken. * * @return the name of the job. */ public String getJobName(); /** * Returns the time at which the measurement was taken. * * @return the Date object representing the time. */ public Date getTime(); /** * Returns the value of the measurement. * * @return the value. */ public Object getValue(); /** * Sets the job for which the measurement was taken. * * @param name set the name of the job. */ public void setJobName( String name ); /** * Sets the time at which the measurement was taken. * * @param time the Date object representing the time. */ public void setTime( Date time ); /** * Sets the value of the measurement. * * @param value the value to be associated with measurement. */ public void setValue( Object value ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/0000755000175000017500000000000011757531667025435 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/Plot.java0000644000175000017500000000337711757531137027220 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.spaceusage; import java.io.IOException; import java.util.List; /** * A plot interface that allows us to plot the SpaceUsage in different * formats. * * @author Karan Vahi * @version $Revision: 2563 $ */ public interface Plot { /** * The version of this API */ public static final String VERSION = "1.3"; /** * Initializer method. * * @param directory the directory where the plots need to be generated. * @param basename the basename for the files that are generated. * @param useStatInfo boolean indicating whether to use stat info or not. */ public void initialize( String directory , String basename, boolean useStatInfo ); /** * Plot out the space usage. * * @param su the SpaceUsage. * @param u the size unit. * @param timeUnits the time unit. * * @return List of file pathnames for the files that are written out. * * @exception IOException in case of unable to write to the file. */ public List plot( SpaceUsage su, char u , String timeUnits) throws IOException; } ././@LongLink0000000000000000000000000000015300000000000011564 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/KickstartOutputFilenameFilter.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/KickstartOutputFilenameFilte0000644000175000017500000000465411757531137033166 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.spaceusage; import java.io.FilenameFilter; import java.io.File; import java.util.regex.Pattern; /** * A filename filter for identifying the kickstart output files. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2563 $ */ public class KickstartOutputFilenameFilter implements FilenameFilter { /** * Store the regular expressions necessary to parse kickstart output files */ private static final String mRegexExpression = "[a-zA-Z_0-9]*(.out)([.][0-9][0-9][0-9])*"; /** * Stores compiled patterns at first use, quasi-Singleton. */ private static Pattern mPattern = null; /*** * Tests if a specified file should be included in a file list. * * @param dir the directory in which the file was found. * @param name - the name of the file. * * @return true if and only if the name should be included in the file list * false otherwise. * * */ public boolean accept( File dir, String name) { //compile the pattern only once. if( mPattern == null ){ mPattern = Pattern.compile( mRegexExpression ); } boolean result = mPattern.matcher( name ).matches(); //we want to ignore jobs starting with chmod return result ? !name.startsWith( "chmod" ) : result; } public static void main( String[] args){ KickstartOutputFilenameFilter f = new KickstartOutputFilenameFilter(); System.out.println( f.accept( new java.io.File("."), "rc_tx_preprocess_ID000001_0.out.000" )); System.out.println( f.accept( new java.io.File("."), "blackdiamond-0.dag.dagman.out " )); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/JobSpace.java0000644000175000017500000000674611757531137027773 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.spaceusage; import java.util.List; import java.util.Iterator; import java.util.ArrayList; /** * A data class that associates at most three space reading with the job * corresponding to the GRIDSTART_PREJOB, GRIDSTART_MAINJOB and GRIDSTART_POSTJOB. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2563 $ */ public class JobSpace { /** * The PREJOB data index. */ public static final int GRIDSTART_PREJOB_EVENT_TYPE = 0; /** * The MAINJOB data index. */ public static final int GRIDSTART_MAINJOB_EVENT_TYPE = 1; /** * The POSTJOB data index. */ public static final int GRIDSTART_POSTJOB_EVENT_TYPE = 2; /** * The name of the job. */ private String mName; /** * The list of Space reading objects. */ private List mSpaceList; /** * The default constructor. */ public JobSpace(){ mSpaceList = new ArrayList( 3 ); for( int i = 0; i < 3; i++ ){ mSpaceList.add( null ); } } /** * The overloaded constructor. * * @param name the name of the job */ public JobSpace( String name ){ this(); mName = name; } /** * Adds a space record for a particular event type. * * @param space the space record. * @param type the type of job */ public void addSpaceReading( Space space, int type ){ if ( !typeInRange( type ) ){ throw new NumberFormatException( "Event type specified is not in range " + type ); } mSpaceList.set( type, space ); } /** * Returns the space reading for a particular type of job of event. * * @param type event type. * * @return Space object if data exists else null */ public Space getSpaceReading( int type ){ if ( !typeInRange( type ) ){ throw new NumberFormatException( "Event type specified is not in range " + type ); } Object obj = mSpaceList.get( type ); return ( obj == null ) ? null : (Space)obj; } /** * Returns the readings iterator. Values can be null. * * @return iterator to space readings. */ public Iterator spaceReadingsIterator(){ return mSpaceList.iterator(); } /** * Returns a boolean indicating whether the event type is in range of not. * * @param type the type value */ public boolean typeInRange( int type ){ return ( type >= GRIDSTART_PREJOB_EVENT_TYPE && type <= GRIDSTART_POSTJOB_EVENT_TYPE ); } /** * Returns a textual description of the object. * * @return description */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( mName ).append( " ").append( mSpaceList ); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/Ploticus.java0000644000175000017500000003551511757531137030103 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.spaceusage; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import java.util.List; import java.util.Iterator; import java.io.PrintWriter; import java.io.FileWriter; import java.io.File; import java.io.IOException; import java.util.List; import java.util.ArrayList; import java.text.NumberFormat; import java.text.DecimalFormat; /** * An implementation that plots in the Ploticus format. * * @author Karan Vahi * @version $Revision: 2563 $ */ public class Ploticus implements Plot { /** * The size of an empty directory as reported by du -s */ public static final String EMPTY_DIRECTORY_SIZE = "4K"; /** * The default timing units. */ public static final String DEFAULT_TIMING_UNITS = "seconds"; /** * The minutes unit for the x axis. */ public static final String MINUTES_TIMING_UNITS = "minutes"; /** * The minutes unit for the x axis. */ public static final String HOURS_TIMING_UNITS = "hours"; /** * The directory where the files are to be generated. * */ private String mDirectory; /** * The basename of the files. */ private String mBasename; /** * A boolean indicating whether to use stat info or not. */ private boolean mUseStatInfo; /** * The handle to the logging object. */ private LogManager mLogger; /** * The number formatter to format the float entries. */ private NumberFormat mNumFormatter; /** * The time units. */ private String mTimeUnits; /** * The default constructor. * */ public Ploticus(){ mLogger = LogManagerFactory.loadSingletonInstance(); mDirectory = "."; mBasename = "ploticus" ; mNumFormatter = new DecimalFormat( "0.000" ); mTimeUnits = this.DEFAULT_TIMING_UNITS; } /** * Initializer method. * * @param directory the directory where the plots need to be generated. * @param basename the basename for the files that are generated. * @param useStatInfo boolean indicating whether to use stat info or not. */ public void initialize( String directory , String basename, boolean useStatInfo ){ mDirectory = directory; mBasename = basename; mUseStatInfo = useStatInfo; } /** * Plot out the space usage. Writes out a Ploticus data file. * * @param su the SpaceUsage. * @param unit the unit in which we need to plot. (K,M,G) * @param timeUnits the time unit. * * @return List of file pathnames for the files that are written out. * * @exception IOException in case of unable to write to the file. */ public List plot( SpaceUsage su, char unit, String timeUnits ) throws IOException{ //first let us sort on the timestamps su.sort(); String site; List result = new ArrayList( 2 ); //sanity check on time units mTimeUnits = ( timeUnits == null )? this.DEFAULT_TIMING_UNITS : timeUnits; //get the size of the empty directory in appropriate units float empty = new Space( new java.util.Date(), this.EMPTY_DIRECTORY_SIZE).getSize( unit ) ; //go thru space usage for each site. for( Iterator it = su.siteIterator(); it.hasNext(); ){ site = ( String ) it.next(); String dataFile = getFilename( site, ".dat" ); String scriptFile = getFilename( site, ".pl" ); result.add( dataFile ); result.add( scriptFile ); PrintWriter dataPW = new PrintWriter( new FileWriter( dataFile ) ); mLogger.log( "Will write out to " + dataFile + "," + scriptFile, LogManager.DEBUG_MESSAGE_LEVEL ); float cummulative_cln_size = 0;//tracks the space that has been cleaned up float curr_size = 0; //stores the current size float clnup = 0; boolean first = true; long minTime = 0,absTime, time = 0; //in seconds float maxSpace = 0; float cTime = 0; //go through space usage for a particular site for ( Iterator sizeIT = su.getSizes( site ).iterator(); sizeIT.hasNext(); ){ Space s = (Space) sizeIT.next(); absTime = s.getDate().getTime(); curr_size = s.getSize( unit ); if ( first ) { minTime = absTime; first = false; } //if the difference is >0 means data was cleaned up //add to cummulative size //cummulative_cln_size += ( diff > 0 ) ? diff : 0; //calculate the relative time in seconds time = ( absTime - minTime ) / 1000; //convert time from seconds to units specified cTime = convertFromSecondsTo( time, timeUnits ); //if data is regarding amount cleaned up add to cummulative size if( s.getCleanupFlag() ){ //subtract 4K overhead of directory size //only if not use statinfo clnup = mUseStatInfo ? curr_size : curr_size - empty; cummulative_cln_size += clnup; mLogger.log( cTime + " job " + s.getAssociatedJob() + " cleans up " + clnup + unit , LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( " Cummulative cleaned up size is now " + cummulative_cln_size, LogManager.DEBUG_MESSAGE_LEVEL ); //do not log just proceed continue; } //update the max space if ( cummulative_cln_size + curr_size > maxSpace ){ maxSpace = cummulative_cln_size + curr_size; } //log the entry in the data file. String entry = constructEntry( s.getAssociatedJob(), cTime, curr_size, ( cummulative_cln_size + curr_size)); mLogger.log( entry, LogManager.DEBUG_MESSAGE_LEVEL ); dataPW.println( entry ); } //the value in time right now it the max time generateScriptFile( scriptFile, dataFile, new Character(unit).toString(), cTime, maxSpace ); //close and flush to file per site dataPW.close(); } return result; } /** * Generates the script file required to give as input to ploticus. * * @param name the path to the script file. * @param dataFile the path to corresponding data file. * @param yUnits the units for the space value. * @param maxTime the time in seconds. * @param maxSpace the maximum space. */ public void generateScriptFile( String name, String dataFile, String yUnits, float maxTime, float maxSpace ) throws IOException{ PrintWriter writer = new PrintWriter( new FileWriter( name ) ); //write the page proc writer.println( "#proc page" ); writer.println( "#if @DEVICE in png,gif" ); writer.println( "\t scale: 0.6" ); writer.println( "#endif" ); writer.println(); //write the getdata proc writer.println( "#proc getdata" ); writer.print( "file: "); writer.println( new File(dataFile).getName() ); writer.println( " fieldnames: time with_cleanup without_cleanup jobname" ); writer.println(); //write out area defn writer.println( "#proc areadef" ); writer.println( "title: Remote Storage used over time" ); writer.println( "titledetails: size=14 align=C" ); writer.println( "rectangle: 1 1 8 4" ); /* we let ploticus worry about ranges */ // writer.print( "xrange: 0 " ); // //round to the latest 100 // long modTime = ( maxTime/100 + 1 )* 100 ; // //round space to latest 100 if > 0 // float modSpace = maxSpace > 1 ? // (new Float(maxSpace/100).intValue() + 1)* 100: // maxSpace; // writer.println( modTime ); // writer.print( "yrange: 0 " ); // writer.println( modSpace ); // writer.println(); writer.println( "xautorange datafield=1" ); writer.println( "yautorange datafield=3 lowfix=0" );//y axis always starts from 0 writer.println(); //round to the latest 100 float modTime = ( maxTime/100 + 1 )* 100 ; //round space to latest 100 if > 0 float modSpace = maxSpace > 1 ? (new Float(maxSpace/100).intValue() + 1)* 100: maxSpace; //we want 15-16 points on the x axis float xIncrement = ( (modTime/150) + 1 ) * 10; writer.println( "#proc xaxis" ); writer.print( "stubs: inc " ); writer.println( (int)xIncrement ); writer.print( "minorticinc: " ); writer.println( (int)(xIncrement/2) ); writer.print( "label: time in " ); writer.println( mTimeUnits ); writer.println(); //we want 10 points on the y axis float yIncrement = modSpace > 1? ( (modSpace/100) + 1 ) * 10: modSpace/10; writer.println( "#proc yaxis" ); writer.print( "stubs: inc " ); writer.println( yIncrement ); writer.print( "minorticinc: " ); writer.println( yIncrement/2 ); writer.println( "gridskip: min" ); //writer.println( "ticincrement: 100 1000" ); writer.println( "label: space used in " + yUnits ); writer.println( "labeldistance: 0.6" ); writer.println(); writer.println( "#proc lineplot" ); writer.println( "xfield: time" ); writer.println( "yfield: with_cleanup" ); writer.println( "linedetails: color=blue width=.5" ); writer.println( "legendlabel: with cleanup " ); writer.println(); //generate the cleanup jobs using a scatter plot writer.println( "#proc scatterplot" ); writer.println( "xfield: time" ); writer.println( "yfield: with_cleanup" ); writer.println( "symbol: shape=circle fillcolor=red radius=0.04" ); writer.println( "select: @@jobname like clean_up_*" ); writer.println( "legendlabel: cleanup nodes" ); writer.println( ); // using scatter plot now // //we want only the cleanup jobs to appear // writer.println( "pointsymbol: shape=circle fillcolor=blue radius=0.0" ); // writer.println( "altsymbol: shape=circle fillcolor=red radius=0.04" ); // writer.println( "altwhen: @@jobname like cln_*" );//only plot points for cleanup jobs // writer.println(); writer.println( "#proc lineplot" ); writer.println( "xfield: time" ); writer.println( "yfield: without_cleanup" ); writer.println( "linedetails: style=1 dashscale=3 color=green width=.5" ); writer.println( "legendlabel: without cleanup " ); writer.println(); writer.println( "#proc legend" ); writer.println( "location: min+1 max+0.5" ); writer.println( "format: singleline" ); writer.close(); } /** * Returns the filename of the ploticus file to be generated. * * @param site the site handle. * @param suffix the suffix to be applied to the file. * * @return the path to the file. */ protected String getFilename( String site, String suffix ){ StringBuffer sb = new StringBuffer(); sb.append( mDirectory ).append( File.separator ).append( mBasename ). append( "-" ).append( site ).append( suffix ); return sb.toString(); } /** * Returns an entry that needs to be plotted in the graph. * * @param job the name of the associated job. * @param time the time * @param clnup_size the size with cleanup * @param no_clnup_size the size without cleanup * * @return the entry to be logged */ protected String constructEntry( String job, float time, float clnup_size, float no_clnup_size ){ StringBuffer sb = new StringBuffer(); sb.append( mNumFormatter.format( time ) ).append( "\t" ) .append( mNumFormatter.format( clnup_size ) ).append( "\t" ) .append( mNumFormatter.format( no_clnup_size ) ).append( "\t" ) .append( job ); return sb.toString(); } /** * Converts from seconds to one of the units specified. * * @param time the time. * @param units the units * * @return converted value in long. */ private float convertFromSecondsTo( long time, String units ){ if( !validTimeUnits( units) ){ throw new RuntimeException( "Unsupported time units " + units); } if( units == this.DEFAULT_TIMING_UNITS ){ return time; } float result; float factor = ( units.equals( this.MINUTES_TIMING_UNITS ) ) ? 60 : (units.equals( HOURS_TIMING_UNITS ) )? 3600: -1; result = ( time/(int)factor + (time % factor)/factor ); return result; } /** * Returns a boolean indicating if a valid time unit or not. * * @param units the time unit. * * @return boolean */ private boolean validTimeUnits( String units ){ return ( units.equals( this.DEFAULT_TIMING_UNITS) || units.equals( this.MINUTES_TIMING_UNITS ) || units.equals( this.HOURS_TIMING_UNITS ) ) ; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/TailStatd.java0000644000175000017500000001746311757531137030174 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.spaceusage; import edu.isi.pegasus.planner.visualize.Callback; import edu.isi.pegasus.common.logging.LogManager; import java.io.File; import java.io.FileReader; import java.io.BufferedReader; import java.io.IOException; import java.util.StringTokenizer; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.Date; /** * The callback parses the jobstate.log file to order the events of how * the jobs executed. * * @author Karan Vahi vahi@isi.edu * * @version $Revision: 2563 $ */ public class TailStatd extends SpaceUsageCallback{ /** * The name of the tailstatd file. */ public static final String JOBSTATE_LOG = "jobstate.log"; /** * The state in the jobstate that is taken to designate the GRIDSTART_PREJOB * time. */ public static final String GRIDSTART_PREJOB_STATE = "EXECUTE"; /** * The state in the jobstate that is taken to designate the GRIDSTART_MAINJOB * time. */ public static final String GRIDSTART_MAINJOB_STATE = "EXECUTE"; /** * The state in the jobstate that is taken to designate the GRIDSTART_POSTJOB * time. */ public static final String GRIDSTART_POSTJOB_STATE = "JOB_TERMINATED"; /** * The directory where all the files reside. */ private String mDirectory ; /** * A Map store that stores JobSpace objects indexed by the name of the jobs. */ private Map mJobSpaceStore; /** * The handle to the logging object */ private LogManager mLogger; /** * The default constructor. */ public TailStatd(){ super(); mDirectory = "."; mJobSpaceStore = new HashMap(); } /** * Initializes the callback. * * @param directory the directory where all the files reside. * @param useStatInfo boolean indicating whether to use stat info or not. */ public void initialize( String directory, boolean useStatInfo ){ super.initialize( directory, useStatInfo ); mDirectory = directory; File jobstate = new File( directory, this.JOBSTATE_LOG ); //some sanity checks on file if ( jobstate.exists() ){ if ( !jobstate.canRead() ){ throw new RuntimeException( "The jobstate file does not exist " + jobstate ); } } else{ throw new RuntimeException( "Unable to read the jobstate file " + jobstate ); } BufferedReader reader ; try{ reader = new BufferedReader(new FileReader(jobstate)); String line, time = null, job = null, state = null, token; int count = 0; StringTokenizer st; while ( (line = reader.readLine()) != null) { //parse the line contents st = new StringTokenizer( line ); count = 1; while( st.hasMoreTokens() ){ token = ( String )st.nextToken(); switch ( count ){ case 1: time = token; break; case 2: job = token; break; case 3: state = token; break; default: } count ++; } if ( !validState( state ) ){ //ignore and move to next line continue; } JobSpace js = ( mJobSpaceStore.containsKey( job ) )? (JobSpace)mJobSpaceStore.get( job ): new JobSpace( job ); Date d = new Date( Long.parseLong( time ) * 1000 ); Space s = new Space( d ); s.setAssociatedJob( job ); js.addSpaceReading( s, this.getEventType( state )); //specific quirk because i am using same trigger for pre job and main job if( state.equals( this.GRIDSTART_PREJOB_STATE ) ){ //add the same event reading for the main job js.addSpaceReading( (Space)s.clone(), JobSpace.GRIDSTART_MAINJOB_EVENT_TYPE ); } //add the js back mJobSpaceStore.put( job, js ); } } catch( IOException ioe ){ throw new RuntimeException( "While reading jobstate file " + jobstate, ioe ); } //System.out.println( "Job space store is " + mJobSpaceStore ); } /** * Callback for the starting of an invocation record. * * @param job the job/file being parsed. * @param resource the site id where the job was executed. */ public void cbInvocationStart( String job, String resource) { mMainJob = job; mSite = resource; mJobOutSize = 0; //get the one from store! mJobSpace = (JobSpace)mJobSpaceStore.get( job ); } /** * Parses the content and stores it in a SpaceUsage object. * The date is taken from the jobstate.log instead of the date in the record. * * @param header the header from which the content was collected. * @param content the Content. * * @return Space */ protected Space parseContent( String header, String content ){ String date = null; String size = null; for ( StringTokenizer st = new StringTokenizer( content ); st.hasMoreTokens(); ){ if ( date == null ) { date = st.nextToken(); } else{ size = st.nextToken(); break; } } JobSpace js = (JobSpace)mJobSpaceStore.get( mMainJob ); Space s = js.getSpaceReading( this.getEventTypeForHeader( header ) ); if( s == null ){ throw new RuntimeException( "JobState " + js + " did not contain information about header " + header ); } s.setSize( size ); return s; } /** * Returns a boolean indicating whether the state is valid or not. * * @param state the state * * @return boolean */ protected boolean validState( String state ){ return ( state.equals( this.GRIDSTART_MAINJOB_STATE ) || state.equals( this.GRIDSTART_POSTJOB_STATE ) || state.equals( this.GRIDSTART_PREJOB_STATE ) ); } /** * Returns the event type matching a particular job type * * @param state the state of the job * * @return the corresponding event type */ private int getEventType( String state ){ int event = -1; if ( state.equals( this.GRIDSTART_PREJOB_STATE ) ){ event = JobSpace.GRIDSTART_PREJOB_EVENT_TYPE; } else if ( state.equals( this.GRIDSTART_MAINJOB_STATE ) ){ event = JobSpace.GRIDSTART_MAINJOB_EVENT_TYPE; } else if ( state.equals( this.GRIDSTART_POSTJOB_STATE ) ){ event = JobSpace.GRIDSTART_POSTJOB_EVENT_TYPE; } return event; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/SpaceUsage.java0000644000175000017500000001073411757531137030315 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.spaceusage; import java.util.Map; import java.util.HashMap; import java.util.List; import java.util.LinkedList; import java.util.Comparator; import java.util.Collections; import java.util.Iterator; /** * A container object that stores the space usage for each site. * * @author Karan Vahi * @version $Revision: 2564 $ */ public class SpaceUsage { /** * The map that stores the list of space objects indexed by site name. */ private Map mStore; /** * The default store. */ public SpaceUsage() { mStore = new HashMap(); } /** * Returns an iterator to list of String site identifiers * for which data is available. * * @return Iterator */ public Iterator siteIterator(){ return mStore.keySet().iterator(); } /** * Returns the list of Space objects corresponding to a * particular site. * * @param site the site for which sizes are required. * * @return List */ public List getSizes( String site ) { return (mStore.containsKey( site ) ? (List) mStore.get( site ) : new LinkedList()); } /** * Add a Space record to the store. * * @param site the site for which the record is logged. * @param record the SpaceUsage record. */ public void addRecord( String site, Space record ){ List l = ( mStore.containsKey( site ) ) ? (List) mStore.get( site ): new LinkedList(); l.add( record ); mStore.put( site, l ); } /** * Sorts the records for each site. */ public void sort(){ SpaceComparator s = new SpaceComparator(); for( Iterator it = mStore.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); List l = (List)entry.getValue(); Collections.sort( l, s ); } } /** * Returns textual description of the object. * * @return the textual description */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( "{\n "); for( Iterator it = mStore.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); List l = (List)entry.getValue(); sb.append( entry.getKey() ).append( " -> " ); for( Iterator lIT = l.iterator(); lIT.hasNext(); ){ sb.append( "\n\t"); sb.append( lIT.next() ); sb.append( " , "); } } sb.append( "\n}" ); return sb.toString(); } } /** * Comparator for Space objects that allows us to sort on time. * */ class SpaceComparator implements Comparator{ /** * Implementation of the {@link java.lang.Comparable} interface. * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer as this object is * less than, equal to, or greater than the specified object. The * definitions are compared by their type, and by their short ids. * * @param o1 is the object to be compared * @param o2 is the object to be compared with o1. * * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compare( Object o1, Object o2 ) { if ( o1 instanceof Space && o2 instanceof Space ) { Space s1 = (Space) o1; Space s2 = (Space) o2; return s1.getDate().compareTo( s2.getDate() ); } else { throw new ClassCastException( "object is not a Space" ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/SpaceUsageCallback.java0000644000175000017500000004103211757531137031725 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.spaceusage; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.planner.visualize.Callback; import edu.isi.pegasus.planner.invocation.StatInfo; import java.util.List; import java.util.Iterator; import java.util.Map; import java.util.StringTokenizer; import edu.isi.pegasus.planner.invocation.Machine; /** * Implements callback interface to calculate space usage. * * @author not attributable * @version 1.0 */ public class SpaceUsageCallback implements Callback { /** * The marker for the PREJOB. The stdout corresponding to the * PREJOB is enclosed within this marker. */ public static final String PREJOB_MARKER = "@@@PREJOB@@@"; /** * The marker for the MAINJOB. The stdout corresponding to the * MAINJOB is enclosed within this marker. */ public static final String MAINJOB_MARKER = "@@@MAINJOB@@@"; /** * The marker for the POSTJOB. The stdout corresponding to the * POSTJOB is enclosed within this marker. */ public static final String POSTJOB_MARKER = "@@@POSTJOB@@@"; /** * The logical site where the job was run. */ protected String mSite; /** * The SpaceUsage object created during the callback construction. */ protected SpaceUsage mSpaceStore; /** * The main job whose record is being parsed. */ protected String mMainJob; /** * The handle to the logger. */ protected LogManager mLogger; /** * Boolean indicating whether to use stat data or not for computing directory * sizes. */ protected boolean mUseStatInfo; /** * Stores in bytes the size of all the output files for a job. */ protected long mJobOutSize; /** * Stores in bytes the size of all the input files for a job. */ protected long mJobInSize; /** * Stores all the space readings for the current invocation record. */ protected JobSpace mJobSpace; /** * The default constructor. */ public SpaceUsageCallback() { mSpaceStore = new SpaceUsage(); mLogger = LogManagerFactory.loadSingletonInstance(); mUseStatInfo = false; mJobOutSize = 0; mJobInSize = 0; } /** * Initializes the callback. * * @param directory the directory where all the files reside. * @param useStatInfo boolean indicating whether to use stat info or not. */ public void initialize( String directory , boolean useStatInfo){ mUseStatInfo = useStatInfo; } /** * Callback for the starting of an invocation record. * * @param job the job/file being parsed. * @param resource the site id where the job was executed. */ public void cbInvocationStart( String job, String resource) { mMainJob = job; mSite = resource; mJobOutSize = 0; mJobSpace = new JobSpace( job ); } public void cbStdIN(List jobs, String data) { } public void cbStdOut(List jobs, String data) { this.parseData( data ); } public void cbStdERR(List jobs, String data) { } /** * Callback function for when stat information for an input file is * encountered. Empty for time being. * * @param filename the name of the file. * @param info the StatInfo about the file. * */ public void cbInputFile( String filename, StatInfo info ){ if( mUseStatInfo ){ //sanity check if( info == null){ //log a warning mLogger.log( "No stat info for input file " + filename, LogManager.WARNING_MESSAGE_LEVEL ); return; } //increment the size to the already stored size. mJobInSize += info.getSize(); mLogger.log( "\tInput file is " + filename + " of size " + info.getSize() , LogManager.DEBUG_MESSAGE_LEVEL); } } /** * Callback function for when stat information for an output file is * encountered. The size of the file is computed and stored. * * @param filename the name of the file. * @param info the StatInfo about the file. * */ public void cbOutputFile( String filename, StatInfo info ){ if( mUseStatInfo ){ //sanity check if( info == null){ //log a warning mLogger.log( "No stat info for output file " + filename, LogManager.WARNING_MESSAGE_LEVEL ); return; } //increment the size to the already stored size. mJobOutSize += info.getSize(); mLogger.log( "\tOutput file is " + filename + " of size " + info.getSize() , LogManager.DEBUG_MESSAGE_LEVEL); } } /** * Callback signalling that an invocation record has been parsed. * Stores the total compute size, somewhere in the space structure * for the jobs. * * */ public void cbInvocationEnd() { //if we are using statinfo if( mUseStatInfo ){ //we get just the post job record, and put the mJobOut size in //there Space s = mJobSpace.getSpaceReading( JobSpace.GRIDSTART_POSTJOB_EVENT_TYPE ); if( s == null ){ mLogger.log( "No space reading for job " + mMainJob, LogManager.WARNING_MESSAGE_LEVEL ); return; } if( cleanupJob( mMainJob ) ){ float size = (float)mJobInSize; size = size/1024; //for cleanup jobs we take input size s.setCleanupFlag( true ); s.setSize( size , 'K' ); mLogger.log( "For job " + mMainJob + " total input file size in K " + size, LogManager.DEBUG_MESSAGE_LEVEL ); mSpaceStore.addRecord( mSite, s ) ; //we add a duplicate space reading that gets populated //later with the reading of directory after the cleanup job Space s1 = (Space)s.clone(); s1.setCleanupFlag( false ); s1.setSize( "0" ); mSpaceStore.addRecord( mSite, s1 ); } else{ //for all other jobs we take output sizes float size = (float)mJobOutSize; size = size/1024; s.setSize( size , 'K' ); mSpaceStore.addRecord( mSite, s ) ; mLogger.log( "For job " + mMainJob + " total output file size in K " + size, LogManager.DEBUG_MESSAGE_LEVEL ); } } else{ //we put all the valid records into the space store for( Iterator it = mJobSpace.spaceReadingsIterator(); it.hasNext(); ){ Object obj = it.next(); if( obj == null ){ //go to next reading continue; } mSpaceStore.addRecord( mSite, (Space)obj ); } } //reset per site data mSite = ""; mMainJob = ""; mJobOutSize = 0; mJobInSize = 0; mJobSpace = null; } /** * Returns the SpaceUsage store built. * * @return SpaceUsage */ public Object getConstructedObject() { return mSpaceStore; } /** * Parses the data in the data section. * * @param data String */ private void parseData ( String data ) { //sanity check if ( data == null ){ return; } //System.out.println( "DATA is " + data ); //parse through String token; String header = null; StringBuffer content = new StringBuffer(); boolean start = false; for( StringTokenizer st = new StringTokenizer(data); st.hasMoreTokens(); ){ token = st.nextToken(); if ( validHeader( token ) ){ //if start is true we have one job data if( start ){ //token needs to match the previous header if( token.equals( header ) ){ mLogger.log( "Content before parsing " + content, LogManager.DEBUG_MESSAGE_LEVEL ); Space s = parseContent( header, content.toString() ); //if the content was set for the MAINJOB //set the marker to be true if ( token.equals( this.MAINJOB_MARKER ) ){ s.setCleanupFlag( true ); } // mSpaceStore.addRecord( mSite, s ) ; mJobSpace.addSpaceReading( s, this.getEventTypeForHeader( header )); mLogger.log( "Content after parsing is " + s, LogManager.DEBUG_MESSAGE_LEVEL ); start = !start; content = new StringBuffer(); } else{ /* error */ throw new RuntimeException( "Incorrect placement of markers in stdout (" + header + " , " + token + " )" ); } continue; } //token is a valid header header = token; //header is matched. start = !start; } else if ( start ){ //we have already matched a header. content.append( token ).append( " " ); } } } /** * Callback signalling that we are done with the parsing of the files. */ public void done(){ mSpaceStore.sort(); //we have all the records. //need to do some mischief in case of using statinfo if( mUseStatInfo ){ //go thru space usage for each site. for( Iterator it = mSpaceStore.siteIterator(); it.hasNext(); ){ String site = (String) it.next(); float dir_size = 0; //go through space usage for a particular site for (Iterator sizeIT = mSpaceStore.getSizes( site ).iterator(); sizeIT.hasNext(); ) { Space s = (Space) sizeIT.next(); if( s.getCleanupFlag() ){ //subtract directory size dir_size -= s.getSize( 'K' ); } else{ dir_size += s.getSize( 'K' ); //set the size back s.setSize( dir_size, 'K' ); } mLogger.log( "Directory size after job " + s.getAssociatedJob() + " in K is " + dir_size, LogManager.DEBUG_MESSAGE_LEVEL ); } } } } /** * Parses the data in the data section. * * @param data String */ /* private void parseData ( String data ) { int length = ( data == null ) ? 0 : data.length(); System.out.println( "Data is " + data ); String header = PREJOB_MARKER; StringBuffer content = new StringBuffer(); boolean start = false; boolean end = true; for ( int i = 0; i < length; i++){ char c = data.charAt( i ); if ( c == '@' ){ //see if look ahead matches if ( i + header.length() < length && ( data.substring( i, i + header.length()).equals( header ) ) ){ //if start is true we have one job data if ( start ) { //we are capturing date for post jobs only if ( header.equalsIgnoreCase( POSTJOB_MARKER ) ){ System.out.println("Content before parsing " + content); Space s = parseContent(content.toString()); mSpaceStore.addRecord(mSite, s); System.out.println("CONTENT IS " + s); } content = new StringBuffer(); start = false; end = true; //skip to the character after the header i = i + header.length() - 1; header = POSTJOB_MARKER; continue; } //header is matched. start = !start; end = !end; //skip to the character after the header i = i + header.length() - 1; } else if ( start ) { content.append( c ); } } else if ( start ){ //add to content content.append( c ); } } } */ /** * Returns a boolean indicating whether the token passed matches * a header or not. In the specific case of using statinfo, for calculating * directory sizes, we only mainjob and postjob markers are valid. * * @param token the token to be matched. * * @return boolean */ protected boolean validHeader( String token ){ return ( this.mUseStatInfo ) ? //only two headers are valid. (token.equals( this.MAINJOB_MARKER ) || token.equals( this.POSTJOB_MARKER )): //all three headers are valid. (token.equals( this.MAINJOB_MARKER ) || token.equals( this.PREJOB_MARKER ) || token.equals( this.POSTJOB_MARKER )); } /** * Returns boolean indicating whether the job is a cleanup job or not. * Does it on the basis of the name of the job. * * @param name the name of the job. * * @return boolean */ public boolean cleanupJob( String name ){ return name.startsWith( "clean_up_" ); } /** * Parses the content and stores it in a Space object. * * @param header the header from which the content was collected. * @param content the Content. * * @return Space */ protected Space parseContent( String header, String content ){ String date = null; String size = null; Space s ; for ( StringTokenizer st = new StringTokenizer( content ); st.hasMoreTokens(); ){ if ( date == null ) { date = st.nextToken(); } else{ size = st.nextToken(); break; } } s = new Space( Currently.parse(date), size ); s.setAssociatedJob( mMainJob ); return s; } /** * Returns the event type matching a header. * * @param marker the marker * * @return the corresponding event type */ protected int getEventTypeForHeader( String marker ){ int event = -1; if ( marker.equals( this.PREJOB_MARKER ) ){ event = JobSpace.GRIDSTART_PREJOB_EVENT_TYPE; } if ( marker.equals( this.MAINJOB_MARKER ) ){ event = JobSpace.GRIDSTART_MAINJOB_EVENT_TYPE; } if ( marker.equals( this.POSTJOB_MARKER ) ){ event = JobSpace.GRIDSTART_POSTJOB_EVENT_TYPE; } return event; } /** * Callback for the metadata retrieved from the kickstart record. * * @param metadata */ public void cbMetadata( Map metadata ){ } /** * Callback to pass the machine information on which the job is executed. * * @param machine */ public void cbMachine( Machine machine ){ } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/spaceusage/Space.java0000644000175000017500000001666311757531137027337 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.spaceusage; import java.util.Date; /** * A data class that stores the space usage. * * @author Karan Vahi * @version $Revision: 2563 $ */ public class Space implements Cloneable{ /** * Holds the timestamp when usage was taken. */ private Date mDate; /** * The size. */ private float mSize; /** * The units in which the size is stored. * M for megabyte, K for Kilobytes, T for terabytes. * The default unit is K */ private char mUnit; /** * index to do conversions between units. */ private int mIndex; /** * Indicates whether the space denoted is the amount of space * cleaned up. */ private boolean mCleanupSpace; /** * The jobname for which the reading was taken. */ private String mAssociatedJob; /** * The overloaded constructor. * * @param d the date. */ public Space( Date d ){ mDate = d; mUnit = 'K'; mIndex = getIndex( mUnit ); mCleanupSpace = false; } /** * The overloaded constructor. * * @param d Date * @param size the size with the last character denoting the unit. */ public Space( Date d, String size ) { mDate = d; mUnit = 'K'; mIndex = getIndex( mUnit ); setSize( size ); mCleanupSpace = false; } /** * Sets boolean indicating that space denoted is the amount cleaned. * * @param cleanup boolean indicating that value is cleaned up. */ public void setCleanupFlag( boolean cleanup ){ mCleanupSpace = cleanup; } /** * Sets the associated job for which the reading was taken. * * @param job the associated job. */ public void setAssociatedJob( String job ){ mAssociatedJob = job; } /** * Returns the associated job. * * @return the associated job. */ public String getAssociatedJob(){ return mAssociatedJob; } /** * Returns boolean indicating whether the space denoted is the amount cleaned * or not. * * @return boolean indicating that value is cleaned up. */ public boolean getCleanupFlag( ){ return mCleanupSpace; } /** * Sets the size. * * @param size the size optionally with the units. * @param unit the unit of the size */ public void setSize( float size, char unit ) { mSize = size; mUnit = unit; mIndex = getIndex( unit ); } /** * Sets the size. * * @param size the size optionally with the units. */ public void setSize( String size ) { size = size.trim(); char c = size.charAt( size.length() - 1); if ( Character.isLetter( c ) ){ if ( validUnit(c)){ mUnit = c; mIndex = this.getIndex( c ); mSize = Float.parseFloat( size.substring( 0, size.length() - 1)); } else { throw new RuntimeException( "Invald unit " + c ); } } else{ mSize = Float.parseFloat(size); mUnit = 'K'; } } /** * Returns the time at which Space was record. * * @return Date */ public Date getDate( ){ return mDate; } /** * Returns the size in the units associated with it. * * @return size in float */ public float getSize(){ return mSize; } /** * Returns the size in particular unit. * * @param u the unit. * * @return size in float */ public float getSize( char u ){ int index = getIndex( u ); //System.out.println( "difference is " + (mIndex - index) ); //System.out.println( "multiplying factor is " + this.getMultiplyingFactor( 1024, mIndex - index)); //return mSize * (float) Math.pow( 1024, mIndex - index ); return mSize * this.getMultiplyingFactor( 1024, mIndex -index ); } /** * Returns the units associated with the size. * * @return the unit */ public char getUnits(){ return mUnit; } /** * Returns if a character is a valid unit or not * * @param c the unit * * @return boolean */ public boolean validUnit( char c ){ return ( c == 'K' || c == 'M' || c == 'G'); } /** * Returns a textual description of the content * * @return String. */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( mDate ).append( " " ).append( mSize ).append( mUnit ); sb.append( " " ).append( getAssociatedJob() ); if( mCleanupSpace ){ sb.append( " cleaned up space" ); } return sb.toString(); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ Space s; try{ s = ( Space) super.clone(); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } s.mAssociatedJob = this.mAssociatedJob; s.mCleanupSpace = this.mCleanupSpace; s.mDate = (Date)this.mDate.clone(); s.mIndex = this.mIndex; s.mSize = this.mSize; s.mUnit = this.mUnit; return s; } /** * Returns the index for the associated unit. * * @return the index. */ private int getIndex(){ return mIndex; } /** * Returns the index for a unit. * * @param u the unit * @return the index. */ private int getIndex( char u ){ int index = -1; switch ( u ){ case 'K': index = 1; break; case 'M': index = 2; break; case 'G': index = 3; break; default: throw new RuntimeException( "Invalid unit scheme" + u ); } return index; } /** * Returns multiplying factor for conversion. * Simulates ^ operator. * * @param base * @param power the power to raise the base to. * * @return multiplying value */ private float getMultiplyingFactor( int base, int power){ float result = 1; if ( power >= 0 ){ for( int i = 0; i < power; i++){ result *= base; } } else{ for( int i = 0; i < -power; i++){ result /= base; } } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/JobMeasurements.java0000644000175000017500000001040311757531137027251 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize; import java.util.Date; import java.util.List; import java.util.Iterator; import java.util.ArrayList; /** * A data class that associates at most three measurements with the job * corresponding to the GRIDSTART_PREJOB, GRIDSTART_MAINJOB and GRIDSTART_POSTJOB. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2563 $ */ public class JobMeasurements { /** * The PREJOB data index. */ public static final int GRIDSTART_PREJOB_EVENT_TYPE = 0; /** * The MAINJOB data index. */ public static final int GRIDSTART_MAINJOB_EVENT_TYPE = 1; /** * The POSTJOB data index. */ public static final int GRIDSTART_POSTJOB_EVENT_TYPE = 2; /** * The name of the job. */ private String mName; /** * The list of Measurement objects. */ private List mMeasurementList; /** * The corresponding list of Date objects signifying the times at which * an event happened. */ private List mTimeList; /** * The default constructor. */ public JobMeasurements(){ mMeasurementList = new ArrayList( 3 ); for( int i = 0; i < 3; i++ ){ mMeasurementList.add( null ); } mTimeList = new ArrayList( 3 ); for( int i = 0; i < 3; i++ ){ mTimeList.add( null ); } } /** * The overloaded constructor. * * @param name the name of the job */ public JobMeasurements( String name ){ this(); mName = name; } /** * Adds a measurement for a particular event type. * * @param measurement the measurement to be associated * @param type the event type */ public void setMeasurement( Measurement measurement, int type ){ if ( !typeInRange( type ) ){ throw new NumberFormatException( "Event type specified is not in range " + type ); } mMeasurementList.set( type, measurement ); //mTimeList.set( type, measurement.getTime() ); } /** * Adds a time for a particular event type. * * @param time the time * @param type the event type */ public void setTime( Date time, int type ){ if ( !typeInRange( type ) ){ throw new NumberFormatException( "Event type specified is not in range " + type ); } mTimeList.set( type, time ); } /** * Returns the measurement corresponding to the event type. * * @param type event type. * * @return Measurement object if data exists else null */ public Measurement getMeasurement( int type ){ if ( !typeInRange( type ) ){ throw new NumberFormatException( "Event type specified is not in range " + type ); } Object obj = mMeasurementList.get( type ); return ( obj == null ) ? null : ( Measurement )obj; } /** * Returns the readings iterator. Values can be null. * * @return iterator to measurements. */ public Iterator measurementsIterator(){ return mMeasurementList.iterator(); } /** * Returns a boolean indicating whether the event type is in range of not. * * @param type the type value */ public boolean typeInRange( int type ){ return ( type >= GRIDSTART_PREJOB_EVENT_TYPE && type <= GRIDSTART_POSTJOB_EVENT_TYPE ); } /** * Returns a textual description of the object. * * @return description */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( mName ).append( " ").append( mMeasurementList ); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/WorkflowMeasurements.java0000644000175000017500000001130111757531137030347 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize ; import java.util.Map; import java.util.HashMap; import java.util.List; import java.util.LinkedList; import java.util.Comparator; import java.util.Collections; import java.util.Iterator; /** * A container object that stores the measurements for each site on which * the workflow was executed. * * @author Karan Vahi * @version $Revision: 2564 $ */ public class WorkflowMeasurements { /** * The map that stores the list of Measurement indexed by site name. */ private Map mStore; /** * The default constructor. */ public WorkflowMeasurements() { mStore = new HashMap(); } /** * Returns an iterator to list of String site identifiers * for which data is available. * * @return Iterator */ public Iterator siteIterator(){ return mStore.keySet().iterator(); } /** * Returns the list of Measurement objects corresponding to a * particular site. * * @param site the site for which Measurements are required. * * @return List */ public List getMeasurements( String site ) { return (mStore.containsKey( site ) ? (List) mStore.get( site ) : new LinkedList()); } /** * Add a Measurement to the store. * * @param site the site for which the record is logged. * @param record the Measurement record. */ public void addMeasurement( String site, Measurement record ){ List l = ( mStore.containsKey( site ) ) ? (List) mStore.get( site ): new LinkedList(); l.add( record ); mStore.put( site, l ); } /** * Sorts the records for each site. */ public void sort(){ MeasurementComparator s = new MeasurementComparator(); for( Iterator it = mStore.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); List l = (List)entry.getValue(); Collections.sort( l, s ); } } /** * Returns textual description of the object. * * @return the textual description */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( "{\n "); for( Iterator it = mStore.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); List l = (List)entry.getValue(); sb.append( entry.getKey() ).append( " -> " ); for( Iterator lIT = l.iterator(); lIT.hasNext(); ){ sb.append( "\n\t"); sb.append( lIT.next() ); sb.append( " , "); } } sb.append( "\n}" ); return sb.toString(); } } /** * Comparator for Measurement objects that allows us to sort on time. * */ class MeasurementComparator implements Comparator{ /** * Implementation of the {@link java.lang.Comparable} interface. * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer as this object is * less than, equal to, or greater than the specified object. The * definitions are compared by their type, and by their short ids. * * @param o1 is the object to be compared * @param o2 is the object to be compared with o1. * * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compare( Object o1, Object o2 ) { if ( o1 instanceof Measurement && o2 instanceof Measurement ) { Measurement s1 = (Measurement) o1; Measurement s2 = (Measurement) o2; return s1.getTime().compareTo( s2.getTime() ); } else { throw new ClassCastException( "object is not a Space" ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/KickstartParser.java0000644000175000017500000002234211757531137027267 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import org.griphyn.vdl.util.ChimeraProperties; import org.griphyn.vdl.directive.ParseKickstart; import edu.isi.pegasus.planner.invocation.InvocationRecord; import edu.isi.pegasus.planner.invocation.Job; import edu.isi.pegasus.planner.invocation.JobStatus; import edu.isi.pegasus.planner.invocation.JobStatusRegular; import edu.isi.pegasus.planner.invocation.StatCall; import edu.isi.pegasus.planner.invocation.Data; import edu.isi.pegasus.planner.invocation.Regular; import edu.isi.pegasus.planner.invocation.Status; import edu.isi.pegasus.planner.parser.InvocationParser; import java.util.List; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; import java.io.File; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import java.util.HashMap; /** * A helper class that parses the kickstart records and has calls to callbacks * for working on data sections of standard out, standard error, and standard * input. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2587 $ */ public class KickstartParser { /** * The parser class that is used to parse a kickstart record and return * the invocation record. */ private ParseKickstart mParseKickstart; /** * The handle to the logging object. */ private LogManager mLogger; /** * The callback object. */ private Callback mCallback; /** * Semi-singleton, dynamically instantiated once for the lifetime. * The properties determine which Xerces parser is being used. */ private InvocationParser mInvocationParser; /** * The default constructor. */ public KickstartParser(){ mLogger = LogManagerFactory.loadSingletonInstance(); } /** * Sets the callback to which to callout to while parsing a kickstart * record. * * @param c the Callback to call out to. */ public void setCallback( Callback c ){ mCallback = c; mLogger = LogManagerFactory.loadSingletonInstance(); } /** * */ public List parseKickstartFile( String file ) throws IOException{ List result = new ArrayList(); //sanity check if ( mCallback == null ){ throw new RuntimeException( "Callback not initialized" ); } //initialize the parser if required if ( mParseKickstart == null ){ mParseKickstart = new ParseKickstart( ); } // get access to the invocation parser if ( mInvocationParser == null ) { ChimeraProperties props = ChimeraProperties.instance(); String psl = props.getPTCSchemaLocation(); mLogger.log( "Using XML schema location " + psl, LogManager.DEBUG_MESSAGE_LEVEL ); mInvocationParser = new InvocationParser( psl ); } //do some sanity checks for the file. //extract to memory File f = new java.io.File( file ); List extract = mParseKickstart.extractToMemory( f ); edu.isi.pegasus.planner.invocation.File invocationFile = null; // testme: for each record obtained, work on it for ( int j=1; j-1 < extract.size(); ++j ) { String temp = (String) extract.get( j-1 ); // test 5: try to parse XML InvocationRecord invocation = mInvocationParser.parse( new StringReader(temp) ); mCallback.cbInvocationStart( getJobName( f.getName() ), invocation.getResource() ); //get the data about the various jobs List jobs = invocation.getJobList(); //////////////////////////////////////////////////////////////////////////////////// //Map metadata = mCallback.cbMetadata(); Map metadata = new HashMap(); ArrayList exitcodes = new ArrayList(); ArrayList executables = new ArrayList(); ArrayList arguments = new ArrayList(); for ( Iterator it = jobs.iterator(); it.hasNext(); ){ Job job = (Job) it.next(); Status status = job.getStatus(); JobStatus js = status.getJobStatus(); int exitcode = ((JobStatusRegular) js).getExitCode(); exitcodes.add(new Integer(exitcode)); executables.add(((Regular)job.getExecutable().getFile()).getFilename()); arguments.add(job.getArguments().getValue()); } metadata.put("exitcodes", exitcodes); metadata.put("directory", invocation.getWorkingDirectory().getValue() ); metadata.put("executables", executables); metadata.put("arguments", arguments); metadata.put("version", invocation.getVersion()); metadata.put("start", invocation.getStart()); metadata.put("duration", invocation.getDuration()); metadata.put("transformation",invocation.getTransformation()); metadata.put("derivation", invocation.getDerivation()); metadata.put("resource", invocation.getResource()); metadata.put("hostaddr", invocation.getHostAddress()); metadata.put("hostname", invocation.getHostname()); metadata.put("pid", invocation.getPID()); metadata.put("uid", invocation.getUID()); metadata.put("user", invocation.getUser()); metadata.put("gid", invocation.getGID()); metadata.put("group", invocation.getGroup()); metadata.put("umask", invocation.getUMask()); metadata.put("resource", invocation.getResource()); mCallback.cbMetadata(metadata); ////////////////////////////////////////////////////////////////////////////////////// //callback for the data sections of various streams List stats = invocation.getStatList(); StringWriter writer = new StringWriter(); for ( Iterator it = stats.iterator(); it.hasNext(); ){ StatCall statC = ( StatCall )it.next(); String handle = statC.getHandle(); invocationFile = statC.getFile(); //call out appropriate callback functions with the data char c = handle.charAt( 0 ); Data data = statC.getData(); String value = ( data == null ) ? "" : data.getValue(); switch ( c ){ case 's': //stdout, //stderr,//stdin if ( handle.equals( "stdout" ) ){ mCallback.cbStdOut( jobs, value ); } else if( handle.equals( "stdin" ) ){ mCallback.cbStdIN( jobs, value ); } else if( handle.equals( "stderr" ) ){ mCallback.cbStdERR( jobs, value ); } break; case 'i'://initial if ( handle.equals( "initial" ) ){ if( invocationFile instanceof Regular ){ //we are interested in Regular files only mCallback.cbInputFile( ((Regular)invocationFile).getFilename() , statC.getStatInfo() ); } } break; case 'f'://final if ( handle.equals( "final" ) ){ if( invocationFile instanceof Regular ){ //we are interested in Regular files only mCallback.cbOutputFile( ((Regular)invocationFile).getFilename() , statC.getStatInfo() ); } } default: break; } } //callback with the machine information if present if( invocation.getMachine() != null ){ mCallback.cbMachine( invocation.getMachine() ); } //successfully done with an invocation record mCallback.cbInvocationEnd(); } return result; } /** * Returns the name of the job from the kickstart output filename. * * @param outName the name of the out file. * * @return the job name. */ protected String getJobName( String outName ){ return outName.substring( 0, outName.indexOf( '.' )); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/nodeusage/0000755000175000017500000000000011757531667025267 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/nodeusage/NodeUsageCallback.java0000644000175000017500000002635211757531137031421 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.nodeusage; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.planner.visualize.Callback; import edu.isi.pegasus.planner.visualize.JobMeasurements; import edu.isi.pegasus.planner.visualize.WorkflowMeasurements; import edu.isi.pegasus.planner.invocation.StatInfo; import java.util.List; import java.util.Iterator; import java.util.StringTokenizer; import java.util.Map; import java.util.HashMap; import java.util.Date; import java.io.File; import java.io.FileReader; import java.io.BufferedReader; import java.io.IOException; import edu.isi.pegasus.planner.invocation.Machine; /** * Implements callback interface to calculate node usage or number of jobs * over time. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class NodeUsageCallback implements Callback { /** * The name of the tailstatd file. */ public static final String JOBSTATE_LOG = "jobstate.log"; /** * The state in the jobstate that is taken to designate the GRIDSTART_PREJOB * time. */ public static final String GRIDSTART_PREJOB_STATE = "EXECUTE"; /** * The state in the jobstate that is taken to designate the GRIDSTART_MAINJOB * time. */ public static final String GRIDSTART_MAINJOB_STATE = "EXECUTE"; /** * The state in the jobstate that is taken to designate the GRIDSTART_POSTJOB * time. */ public static final String GRIDSTART_POSTJOB_STATE = "JOB_TERMINATED"; /** * The logical site where the job was run. */ protected String mSite; /** * The WorkflowMeasurements object created during the callback construction. */ protected WorkflowMeasurements mWFMeasurements; /** * The main job whose record is being parsed. */ protected String mMainJob; /** * The handle to the logger. */ protected LogManager mLogger; /** * Stores all the space readings for the current invocation record. */ protected JobMeasurements mJobMeasurements; /** * A Map store that stores JobMeasurements objects indexed by the name of the jobs. */ private Map mJMStore; /** * The directory where all the files reside. */ private String mDirectory ; /** * The number of jobs executing at any given time per site. */ private Map mNumJobsStore; /** * The default constructor. */ public NodeUsageCallback() { mWFMeasurements = new WorkflowMeasurements(); mJMStore = new HashMap(); mNumJobsStore = new HashMap(); mLogger = LogManagerFactory.loadSingletonInstance(); } /** * Initializes the callback. * * @param directory the directory where all the files reside. * @param useStatInfo boolean indicating whether to use stat info or not. */ public void initialize( String directory , boolean useStatInfo){ mDirectory = directory; File jobstate = new File( directory, this.JOBSTATE_LOG ); //some sanity checks on file if ( jobstate.exists() ){ if ( !jobstate.canRead() ){ throw new RuntimeException( "The jobstate file does not exist " + jobstate ); } } else{ throw new RuntimeException( "Unable to read the jobstate file " + jobstate ); } BufferedReader reader ; try{ reader = new BufferedReader( new FileReader( jobstate ) ); String line, time = null, job = null, state = null, token; int count = 0; StringTokenizer st; while ( (line = reader.readLine()) != null) { String site = null; //parse the line contents st = new StringTokenizer( line ); count = 1; while( st.hasMoreTokens() ){ token = ( String )st.nextToken(); switch ( count ){ case 1: time = token; break; case 2: job = token; break; case 3: state = token; break; case 5: site = token; break; default: break; } count ++; } if ( !validState( state ) ){ //ignore and move to next line continue; } JobMeasurements js = ( mJMStore.containsKey( job ) )? (JobMeasurements)mJMStore.get( job ): new JobMeasurements( job ); Date d = new Date( Long.parseLong( time ) * 1000 ); //add date for that particular event js.setTime( d, this.getEventType( state ) ); if( state.equals( this.GRIDSTART_MAINJOB_STATE ) ){ //increment value int num = this.getCurrentNumOfJobs( site ); mNumJobsStore.put( site, new Integer( ++num ) ); mWFMeasurements.addMeasurement( site, new NumJobsMeasurement(d, new Integer(num), job ) ); } else if( state.equals( this.GRIDSTART_POSTJOB_STATE ) ){ //decrement value int num = this.getCurrentNumOfJobs( site ); mNumJobsStore.put( site, new Integer( --num ) ); mWFMeasurements.addMeasurement( site, new NumJobsMeasurement(d, new Integer(num), job ) ); } // Space s = new Space( d ); // s.setAssociatedJob( job ); // js.addSpaceReading( s, this.getEventType( state )); //specific quirk because i am using same trigger for pre job and main job // if( state.equals( this.GRIDSTART_PREJOB_STATE ) ){ // //add the same event reading for the main job // js.addSpaceReading( (Space)s.clone(), JobMeasurements.GRIDSTART_MAINJOB_EVENT_TYPE ); // } //add the js back mJMStore.put( job, js ); } } catch( IOException ioe ){ throw new RuntimeException( "While reading jobstate file " + jobstate, ioe ); } //System.out.println( "Job space store is " + mJMStore ); } public void cbInvocationStart( String job, String resource) { mMainJob = job; mSite = resource; mJobMeasurements = new JobMeasurements( job ); } public void cbStdIN(List jobs, String data) { } public void cbStdOut(List jobs, String data) { } public void cbStdERR(List jobs, String data) { } /** * Returns the number of jobs that are executing for a particular site * * @param site the name of the site. * * @return number of jobs */ private int getCurrentNumOfJobs( String site ){ if( site == null ){ throw new RuntimeException( "Null site specified");} int value = 0; if( mNumJobsStore.containsKey( site) ){ value = ((Integer)mNumJobsStore.get( site )).intValue(); } else{ mNumJobsStore.put( site, new Integer(value) ); } return value; } /** * Callback function for when stat information for an input file is * encountered. Empty for time being. * * @param filename the name of the file. * @param info the StatInfo about the file. * */ public void cbInputFile( String filename, StatInfo info ){ //do nothing } /** * Callback function for when stat information for an output file is * encountered. The size of the file is computed and stored. * * @param filename the name of the file. * @param info the StatInfo about the file. * */ public void cbOutputFile( String filename, StatInfo info ){ //do nothing } /** * Callback signalling that an invocation record has been parsed. * Stores the total compute size, somewhere in the space structure * for the jobs. * * */ public void cbInvocationEnd() { } /** * Returns the SpaceUsage store built. * * @return SpaceUsage */ public Object getConstructedObject() { return mWFMeasurements; } /** * Callback signalling that we are done with the parsing of the files. */ public void done(){ mWFMeasurements.sort(); } /** * Returns a boolean indicating whether the state is valid or not. * * @param state the state * * @return boolean */ protected boolean validState( String state ){ return ( state.equals( this.GRIDSTART_MAINJOB_STATE ) || state.equals( this.GRIDSTART_POSTJOB_STATE ) || state.equals( this.GRIDSTART_PREJOB_STATE ) ); } /** * Returns the event type matching a particular job type * * @param state the state of the job * * @return the corresponding event type */ private int getEventType( String state ){ int event = -1; if ( state.equals( this.GRIDSTART_PREJOB_STATE ) ){ event = JobMeasurements.GRIDSTART_PREJOB_EVENT_TYPE; } else if ( state.equals( this.GRIDSTART_MAINJOB_STATE ) ){ event = JobMeasurements.GRIDSTART_MAINJOB_EVENT_TYPE; } else if ( state.equals( this.GRIDSTART_POSTJOB_STATE ) ){ event = JobMeasurements.GRIDSTART_POSTJOB_EVENT_TYPE; } return event; } /** * Returns boolean indicating whether the job is a cleanup job or not. * Does it on the basis of the name of the job. * * @param name the name of the job. * * @return boolean */ public boolean cleanupJob( String name ){ return name.startsWith( "clean_up_" ); } /** * Callback for the metadata retrieved from the kickstart record. * * @param metadata */ public void cbMetadata( Map metadata ){ } /** * Callback to pass the machine information on which the job is executed. * * @param machine */ public void cbMachine( Machine machine ){ } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/nodeusage/NumJobsMeasurement.java0000644000175000017500000000565111757531137031714 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.nodeusage; import edu.isi.pegasus.planner.visualize.Measurement; import java.util.Date; /** * Stores the number of jobs as an integer. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2563 $ */ public class NumJobsMeasurement implements Measurement { /** * Holds the timestamp when usage was taken. */ private Date mDate; /** * The number of jobs. */ private Integer mNum; /** * The jobname for which the reading was taken. */ private String mAssociatedJob; /** * The overloaded constructor. * * @param d the date. * @param num the number * @param name the jobname. */ public NumJobsMeasurement( Date d, Integer num , String name){ mDate = d; mNum = num; mAssociatedJob = name; } /** * Returns the job for which the measurement was taken. * * @return the name of the job. */ public String getJobName(){ return this.mAssociatedJob; } /** * Returns the time at which the measurement was taken. * * @return the Date object representing the time. */ public Date getTime(){ return mDate; } /** * Returns the value of the measurement. * * @return the value. */ public Object getValue(){ return mNum; } /** * Sets the job for which the measurement was taken. * * @param name sets the name of the job. */ public void setJobName( String name ){ this.mAssociatedJob = name; } /** * Sets the time at which the measurement was taken. * * @param time the Date object representing the time. */ public void setTime( Date time ){ this.mDate = time; } /** * Sets the value of the measurement. * * @param value the value to be associated with measurement. */ public void setValue( Object value ){ this.mNum = (Integer)value; } /** * Returns textual description. * * @return String */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( mDate ).append( " " ).append( getValue() ); sb.append( " " ).append( getJobName() ); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/nodeusage/Ploticus.java0000644000175000017500000002744211757531137027735 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize.nodeusage; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.visualize.WorkflowMeasurements; import edu.isi.pegasus.planner.visualize.Measurement; import java.util.List; import java.util.Iterator; import java.io.PrintWriter; import java.io.FileWriter; import java.io.File; import java.io.IOException; import java.util.List; import java.util.ArrayList; import java.text.NumberFormat; import java.text.DecimalFormat; /** * An implementation that plots in the Ploticus format. * * @author Karan Vahi * @version $Revision: 2563 $ */ public class Ploticus { /** * The default timing units. */ public static final String DEFAULT_TIMING_UNITS = "seconds"; /** * The minutes unit for the x axis. */ public static final String MINUTES_TIMING_UNITS = "minutes"; /** * The minutes unit for the x axis. */ public static final String HOURS_TIMING_UNITS = "hours"; /** * The directory where the files are to be generated. * */ private String mDirectory; /** * The basename of the files. */ private String mBasename; /** * A boolean indicating whether to use stat info or not. */ private boolean mUseStatInfo; /** * The handle to the logging object. */ private LogManager mLogger; /** * The number formatter to format the float entries. */ private NumberFormat mNumFormatter; /** * The time units. */ private String mTimeUnits; /** * The default constructor. * */ public Ploticus(){ mLogger = LogManagerFactory.loadSingletonInstance(); mDirectory = "."; mBasename = "ploticus" ; mNumFormatter = new DecimalFormat( "0.000" ); mTimeUnits = this.DEFAULT_TIMING_UNITS; } /** * Initializer method. * * @param directory the directory where the plots need to be generated. * @param basename the basename for the files that are generated. * @param useStatInfo boolean indicating whether to use stat info or not. */ public void initialize( String directory , String basename, boolean useStatInfo ){ mDirectory = directory; mBasename = basename; mUseStatInfo = useStatInfo; } /** * Plot out the space usage. Writes out a Ploticus data file. * * @param wm the workflow measurements. * @param unit the unit in which we need to plot the number. * @param timeUnits units in which to plot time. * * @return List of file pathnames for the files that are written out. * * @exception IOException in case of unable to write to the file. */ public List plot( WorkflowMeasurements wm, char unit , String timeUnits ) throws IOException{ //first let us sort on the timestamps wm.sort(); String site; List result = new ArrayList( 2 ); //sanity check on time units mTimeUnits = ( timeUnits == null )? this.DEFAULT_TIMING_UNITS : timeUnits; //go thru space usage for each site. for( Iterator it = wm.siteIterator(); it.hasNext(); ){ site = ( String ) it.next(); String dataFile = getFilename( site, "_nu.dat" ); String scriptFile = getFilename( site, "_nu.pl" ); result.add( dataFile ); result.add( scriptFile ); PrintWriter dataPW = new PrintWriter( new FileWriter( dataFile ) ); mLogger.log( "Will write out to " + dataFile + "," + scriptFile, LogManager.DEBUG_MESSAGE_LEVEL ); long min = 0; boolean first = true; long absTime, time = 0; //in seconds long currJobs = 0; long maxJobs = 0; float cTime = 0; //go through space usage for a particular site for ( Iterator sizeIT = wm.getMeasurements( site ).iterator(); sizeIT.hasNext(); ){ Measurement m = ( Measurement ) sizeIT.next(); absTime = m.getTime().getTime(); currJobs = ((Integer)m.getValue()).intValue(); if ( first ) { min = absTime; first = false; } //calculate the relative time in seconds time = ( absTime - min ) / 1000; //convert time from seconds to units specified cTime = convertFromSecondsTo( time, timeUnits ); //update the max space if ( currJobs > maxJobs ){ maxJobs = currJobs; } //log the entry in the data file. String entry = constructEntry( m.getJobName(), cTime, currJobs); mLogger.log( entry, LogManager.DEBUG_MESSAGE_LEVEL ); dataPW.println( entry ); } //the value in time right now it the max time generateScriptFile( scriptFile, dataFile, new Character(unit).toString(), cTime, maxJobs ); //close and flush to file per site dataPW.close(); } return result; } /** * Generates the script file required to give as input to ploticus. * * @param name the path to the script file. * @param dataFile the path to corresponding data file. * @param yUnits the units for the space value. * @param maxX the time in seconds. * @param maxY the maximum space. */ public void generateScriptFile( String name, String dataFile, String yUnits, float maxX, float maxY ) throws IOException{ PrintWriter writer = new PrintWriter( new FileWriter( name ) ); //write the page proc writer.println( "#proc page" ); writer.println( "#if @DEVICE in png,gif" ); writer.println( "\t scale: 0.6" ); writer.println( "#endif" ); writer.println(); //write the getdata proc writer.println( "#proc getdata" ); writer.print( "file: "); writer.println( new File(dataFile).getName() ); writer.println( " fieldnames: time number_of_jobs" ); writer.println(); //write out area defn writer.println( "#proc areadef" ); writer.println( "title: Number of jobs running over time" ); writer.println( "titledetails: size=14 align=C" ); writer.println( "rectangle: 1 1 8 4" ); /* we let ploticus worry about ranges */ // writer.print( "xrange: 0 " ); // //round to the latest 100 // long modTime = ( maxTime/100 + 1 )* 100 ; // //round space to latest 100 if > 0 // float modSpace = maxSpace > 1 ? // (new Float(maxSpace/100).intValue() + 1)* 100: // maxSpace; // writer.println( modTime ); // writer.print( "yrange: 0 " ); // writer.println( modSpace ); // writer.println(); writer.println( "xautorange datafield=1" ); writer.println( "yautorange datafield=2 lowfix=0" );//y axis always starts from 0 //round to the latest 100 float modTime = ( maxX/100 + 1 )* 100 ; //round space to latest 100 if > 0 float modSpace = maxY > 1 ? (new Float(maxY/10).intValue() + 1)* 10: maxY; //we want 15-16 points on the x axis float xIncrement = ( (modTime/150) + 1 ) * 10; writer.println( "#proc xaxis" ); writer.print( "stubs: inc " ); writer.println( xIncrement ); writer.print( "minorticinc: " ); writer.println( xIncrement/2 ); writer.print( "label: time in " ); writer.println( mTimeUnits ); writer.println(); //we want 10 points on the y axis float yIncrement = modSpace/10; writer.println( "#proc yaxis" ); writer.print( "stubs: inc " ); writer.println( yIncrement ); writer.print( "minorticinc: " ); writer.println( yIncrement/2 ); writer.println( "gridskip: min" ); //writer.println( "ticincrement: 100 1000" ); writer.println( "label: number of jobs running " ); writer.println(); writer.println( "#proc lineplot" ); writer.println( "xfield: time" ); writer.println( "yfield: number_of_jobs" ); writer.println( "linedetails: color=blue width=.5" ); writer.println(); writer.println( "#proc legend" ); writer.println( "location: max-1 max" ); writer.println( "seglen: 0.2" ); writer.close(); } /** * Returns the filename of the ploticus file to be generated. * * @param site the site handle. * @param suffix the suffix to be applied to the file. * * @return the path to the file. */ protected String getFilename( String site, String suffix ){ StringBuffer sb = new StringBuffer(); sb.append( mDirectory ).append( File.separator ).append( mBasename ). append( "-" ).append( site ).append( suffix ); return sb.toString(); } /** * Returns an entry that needs to be plotted in the graph. * * @param job the name of the associated job. * @param time the time * @param measurement measurement * * @return the entry to be logged */ protected String constructEntry( String job, float time, long measurement ){ StringBuffer sb = new StringBuffer(); sb.append( mNumFormatter.format( time ) ).append( "\t" ) .append( measurement ).append( "\t" ) .append( job ); return sb.toString(); } /** * Converts from seconds to one of the units specified. * * @param time the time. * @param units the units * * @return converted value in long. */ private float convertFromSecondsTo( long time, String units ){ if( !validTimeUnits( units) ){ throw new RuntimeException( "Unsupported time units " + units); } if( units == this.DEFAULT_TIMING_UNITS ){ return time; } float result; float factor = ( units.equals( this.MINUTES_TIMING_UNITS ) ) ? 60 : (units.equals( HOURS_TIMING_UNITS ) )? 3600: -1; result = ( time/(int)factor + (time % factor)/factor ); return result; } /** * Returns a boolean indicating if a valid time unit or not. * * @param units the time unit. * * @return boolean */ private boolean validTimeUnits( String units ){ return ( units.equals( this.DEFAULT_TIMING_UNITS) || units.equals( this.MINUTES_TIMING_UNITS ) || units.equals( this.HOURS_TIMING_UNITS ) ) ; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/visualize/Callback.java0000644000175000017500000001020511757531137025642 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.visualize; import edu.isi.pegasus.planner.invocation.Data; import edu.isi.pegasus.planner.invocation.Job; import edu.isi.pegasus.planner.invocation.Machine; import edu.isi.pegasus.planner.invocation.StatInfo; import java.util.List; import java.util.Map; /** * This callback interface has methods to handle the data sections * for stdout, stderr and stdin. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2587 $ */ public interface Callback { /** * The version of the API. */ public static final String VERSION ="1.5"; /** * Initializes the callback. * * @param directory the directory where all the files reside. * @param useStatInfo boolean indicating whether to use stat info or not. */ public void initialize( String directory, boolean useStatInfo ); /** * Callback for the starting of an invocation record. * * @param job the job/file being parsed. * @param resource the site id where the job was executed. */ public void cbInvocationStart( String job, String resource ); /** * Callback function for the data section of stdin. Since the jobs * ( setup, prejob, main, postjob, cleanup) * do not have separate stdout etc, all are passed. * * @param jobs all the jobs specified in the kickstart record. * @param data the data contents as String. * */ public void cbStdIN( List jobs, String data ); /** * Callback function for the data section of stdout. Since the jobs * ( setup, prejob, main, postjob, cleanup) * do not have separate stdout etc, all are passed. * * @param jobs all the jobs specified in the kickstart record. * @param data the data contents as String. * */ public void cbStdOut( List jobs, String data ); /** * Callback function for the data section of stderr. Since the jobs * ( setup, prejob, main, postjob, cleanup) * do not have separate stdout etc, all are passed. * * @param jobs all the jobs specified in the kickstart record. * @param data the data contents as String. * */ public void cbStdERR( List jobs, String data ); /** * Callback function for when stat information for an input file is * encountered * * @param filename the name of the file. * @param info the StatInfo about the file. * */ public void cbInputFile( String filename, StatInfo info ); /** * Callback function for when stat information for an output file is * encountered * * @param filename the name of the file. * @param info the StatInfo about the file. * */ public void cbOutputFile( String filename, StatInfo info ); /** * Callback signalling that an invocation record has been parsed. * */ public void cbInvocationEnd( ); /** * Callback signalling that we are done with the parsing of the files. */ public void done(); /** * Returns the object constructed. * * @return the Object constructed. */ public Object getConstructedObject(); /** * Callback for the metadata retrieved from the kickstart record. * * @param metadata */ public void cbMetadata( Map metadata ); /** * Callback to pass the machine information on which the job is executed. * * @param machine */ public void cbMachine( Machine machine ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/0000755000175000017500000000000011757531667023236 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/Dagman.java0000644000175000017500000005326711757531137025275 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.Iterator; import java.util.Map; import java.util.TreeMap; /** * This profile namespace is the placeholder for the keys that go into the .dag * file . Keys like RETRY that trigger retries in dagman in the event of a job * failing would go in here. * All the keys stored in it are in UPPERCASE irrespective of the case specified * by the user in the various catalogs. To specify a post script or a pre script * use POST and PRE keys. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4061 $ */ public class Dagman extends Namespace { /** * The name of the namespace that this class implements. */ public static final String NAMESPACE_NAME = Profile.DAGMAN; /** * The name of the key that determines what post script is to be invoked * when the job completes. */ public static final String POST_SCRIPT_KEY = "POST"; /** * The name of the key that determines the arguments that need to be passed * to the postscript. */ public static final String POST_SCRIPT_ARGUMENTS_KEY = "POST.ARGUMENTS"; /** * The key prefix that determines the path to a postscript */ public static final String POST_SCRIPT_PATH_PREFIX = "POST.PATH"; /** * The key prefix that determines the path to a postscript */ public static final String POST_SCRIPT_SCOPE_KEY = "POST.SCOPE"; /** * The default value for the arguments passed to postscript */ public static final String DEFAULT_POST_SCRIPT_ARGUMENTS_KEY_VALUE = ""; /** * The name of the key that determines what pre script is to be invoked * when the job is run. */ public static final String PRE_SCRIPT_KEY = "PRE"; /** * The name of the key that determines the arguments that need to be passed * to the postscript. */ public static final String PRE_SCRIPT_ARGUMENTS_KEY = "PRE.ARGUMENTS"; /** * The name of the key that determines the file on the submit host on * which postscript is to be invoked. */ public static final String OUTPUT_KEY = "OUTPUT"; /** * The default value for the arguments passed to prescript */ public static final String DEFAULT_PRE_SCRIPT_ARGUMENTS_KEY_VALUE = ""; /** * The name of the key that determines how many times DAGMAN should be * retrying the job. */ public static final String RETRY_KEY = "RETRY"; /** * The default value for the JOB Retries */ public static final String DEFAULT_RETRY_VALUE = "3"; /** * The name of the key that determines the category to which the job * belongs to. */ public static final String CATEGORY_KEY = "CATEGORY"; /** * The name of the key that determines the priority a job is assigned. */ public static final String PRIORITY_KEY = "PRIORITY"; /** * The name of the key that indicates the path to the corresponding * submit file for the job. */ public static final String JOB_KEY = "JOB"; /** * The name of the key that indicates the path to the external subdag */ public static final String SUBDAG_EXTERNAL_KEY = "SUBDAG EXTERNAL"; /** * The name of the key that indicates the directory in which the * DAG has to be execute */ public static final String DIRECTORY_EXTERNAL_KEY = "DIR"; /** * The key name for the post script that is put in the .dag file. */ private static final String POST_SCRIPT_REPLACEMENT_KEY = "SCRIPT POST"; /** * The key name for the pre script that is put in the .dag file. */ private static final String PRE_SCRIPT_REPLACEMENT_KEY = "SCRIPT PRE"; /** * The prefix for the max keys */ public static final String MAX_KEYS_PREFIX = "MAX"; /** * The key name for max pre setting for dagman */ public static final String MAXPRE_KEY = "MAXPRE"; /** * The key name for max post setting for dagman */ public static final String MAXPOST_KEY = "MAXPOST"; /** * The key name for max idle setting for dagman */ public static final String MAXIDLE_KEY = "MAXIDLE"; /** * The key name for max jobs setting for dagman */ public static final String MAXJOBS_KEY = "MAXJOBS"; /** * Determines whether a key is category related or not. * * @param key the key in question * * @return */ public static boolean categoryRelatedKey(String key) { boolean result = true; int dotIndex = -1; if( (dotIndex = key.indexOf( "." )) != -1 && dotIndex != key.length() - 1 ){ //the key has a . in it if( key.equals( Dagman.POST_SCRIPT_ARGUMENTS_KEY) || key.equals( Dagman.POST_SCRIPT_SCOPE_KEY) || key.equals( Dagman.PRE_SCRIPT_ARGUMENTS_KEY ) || key.startsWith( Dagman.POST_SCRIPT_PATH_PREFIX) ){ //these are note category related keys return !result; } } else{ return !result; } return result; } /** * The name of the job (jobname) to which the profiles for this namespace * belong. * * @see org.griphyn.cPlanner.classes.SubInfo#jobName */ private String mJobName; /** * The name of the implementing namespace. It should be one of the valid * namespaces always. * * @see Namespace#isNamespaceValid(String) */ protected String mNamespace; /** * The default constructor. * We always initialize the map, as the map is guarenteed to store at least * the postscript value for a job. */ public Dagman() { mProfileMap = new TreeMap(); mNamespace = NAMESPACE_NAME; mJobName = null; } /** * The overloaded constructor. * * @param mp the initial map containing the profile keys for this namespace. */ public Dagman(Map mp) { this(); mProfileMap = new TreeMap(mp); } /** * The overloaded constructor. * * @param mp the initial map containing the profile keys for this namespace. * @param name name of the job with which these profile keys are associated. */ public Dagman(Map mp, String name) { this( mp ); mJobName = name; } /** * Returns the name of the namespace associated with the profile implementations. * * @return the namespace name. * @see #NAMESPACE_NAME */ public String namespaceName(){ return mNamespace; } /** * It sets the name of the job that is associated with the profiles contained * in this placeholder. * * @param name name of the job with which these profile keys are associated. */ public void setJobName(String name){ mJobName = name; } /** * Constructs a new element of the format (key=value). * The underlying map is allocated memory in the constructors always. * All the keys are converted to UPPER CASE before storing. * * @param key is the left-hand-side * @param value is the right hand side */ public void construct(String key, String value) { //convert to uppercase the key mProfileMap.put(key.toUpperCase(), value); } /** * This checks the whether a key value pair specified is valid in the current * namespace or not by calling the checkKey function and then on the basis of * the values returned puts them into the associated map in the class. * * @param key key that needs to be checked in the namespace for validity. * @param value value of the key * */ public void checkKeyInNS(String key, String value){ //convert key to lower case key = key.toUpperCase(); //special handling for category related keys if( categoryRelatedKey( key ) ){ //category related key is ignored mLogger.log( "Dagman category related key cannot be associated with job " + key, LogManager.DEBUG_MESSAGE_LEVEL ); return; } int rslVal = checkKey(key,value); switch (rslVal){ case Namespace.MALFORMED_KEY: //key is malformed ignore malformedKey(key,value); break; case Namespace.NOT_PERMITTED_KEY: notPermitted(key); break; case Namespace.UNKNOWN_KEY: unknownKey(key, value); break; case Namespace.VALID_KEY: construct(key, value); break; case Namespace.DEPRECATED_KEY: deprecatedKey(key,value); break; case Namespace.EMPTY_KEY: emptyKey( key ); break; } } /** * This checks whether the key passed by the user is valid in the current * namespace or not. All keys are assumed valid currently. * * @param key (left hand side) * @param value (right hand side) * * @return Namespace.VALID_KEY * */ public int checkKey(String key, String value) { //all are valid because of certain keys //are defined in SCRIPT POST, that needs //to be corrected int res = 0; if (key == null || key.length() < 2 || value == null || value.length() < 2) { res = MALFORMED_KEY ; } switch (key.charAt(0)) { case 'C': if ( key.compareTo( Dagman.CATEGORY_KEY ) == 0 ){ res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'D': if( key.compareTo( Dagman.DIRECTORY_EXTERNAL_KEY) == 0 ){ res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'J': if (key.compareTo(Dagman.JOB_KEY) == 0) { res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'M': if( key.startsWith( MAX_KEYS_PREFIX ) ){ res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'O': if (key.compareTo(Dagman.OUTPUT_KEY) == 0) { res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'P': if ( ( key.compareTo(Dagman.POST_SCRIPT_KEY) == 0) || ( key.compareTo(Dagman.POST_SCRIPT_ARGUMENTS_KEY) == 0)|| ( key.compareTo(Dagman.PRE_SCRIPT_KEY) == 0) || ( key.compareTo(Dagman.PRE_SCRIPT_ARGUMENTS_KEY) == 0 ) || ( key.compareTo(Dagman.POST_SCRIPT_SCOPE_KEY) == 0 ) || ( key.compareTo( Dagman.PRIORITY_KEY) == 0 ) || ( key.startsWith( Dagman.POST_SCRIPT_PATH_PREFIX ) ) ) { res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'R': if (key.compareTo(Dagman.RETRY_KEY) == 0) { res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'S': if (key.compareTo( Dagman.SUBDAG_EXTERNAL_KEY ) == 0) { res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; default: res = NOT_PERMITTED_KEY; } return res; } /** * Returns the path to the postscript of a particular type * * @param type type of postscript * * @return the path */ public String getPOSTScriptPath( String type ){ StringBuffer property = new StringBuffer(); property.append( Dagman.POST_SCRIPT_PATH_PREFIX ). append( "." ).append( type.toUpperCase() ); return (String) this.get( property.toString() ); } /** * It puts in the namespace specific information specified in the properties * file into the namespace. The profile information is populated only if the * corresponding key does not exist in the object already. * * @param properties the PegasusProperties object containing * all the properties that the user specified at various * places (like .chimerarc, properties file, command line). * @param pool the pool name where the job is scheduled to run. */ public void checkKeyInNS(PegasusProperties properties, String pool){ //retrieve the relevant profiles from properties //and merge them into the existing. this.assimilate( properties , Profiles.NAMESPACES.dagman ); //check if the arguments for the //post script are specified or not //System.out.println( this.mProfileMap ); if(!this.containsKey(Dagman.POST_SCRIPT_ARGUMENTS_KEY)){ //push in the default arguments for the post script this.checkKeyInNS( Dagman.POST_SCRIPT_ARGUMENTS_KEY, Dagman.DEFAULT_POST_SCRIPT_ARGUMENTS_KEY_VALUE ); } //check if the arguments for the //pre script are specified or not if(!this.containsKey(Dagman.PRE_SCRIPT_ARGUMENTS_KEY)){ //push in the default arguments for the post script this.checkKeyInNS( Dagman.PRE_SCRIPT_ARGUMENTS_KEY, Dagman.DEFAULT_PRE_SCRIPT_ARGUMENTS_KEY_VALUE ); } //what type of postscript needs to be invoked for the job /* if( !this.containsKey( this.POST_SCRIPT_KEY ) ){ //get one from the properties String ps = properties.getPOSTScript(); if( ps != null ){ checkKeyInNS( this.POST_SCRIPT_KEY, properties.getPOSTScript() ); } } */ } /** * Assimilate the profiles in the namespace in a controlled manner. * During assimilation all category related keys are ignored. * * @param profiles the Namespace object containing the profiles. * @param namespace the namespace for which the profiles need to be assimilated. */ public void assimilate( PegasusProperties properties, Profiles.NAMESPACES namespace ){ Namespace profiles = properties.getProfiles( namespace ) ; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ String key = (String)it.next(); //profiles assimilated from properties have lowest priority if( !this.containsKey(key) ){ this.checkKeyInNS( key, (String)profiles.get( key ) ); } } //profiles in properties have lowest priority //we associate default retry only if user did //not specify in properties if( !this.containsKey( Dagman.RETRY_KEY ) ){ this.construct( RETRY_KEY, DEFAULT_RETRY_VALUE ); } } /** * Merge the profiles in the namespace in a controlled manner. * In case of intersection, the new profile value overrides, the existing * profile value. * * @param profiles the Namespace object containing the profiles. */ public void merge( Namespace profiles ){ //check if we are merging profiles of same type if (!(profiles instanceof Dagman )){ //throw an error throw new IllegalArgumentException( "Profiles mismatch while merging" ); } String key; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ //construct directly. bypassing the checks! key = (String)it.next(); this.construct( key, (String)profiles.get( key )); } } /** * Converts the contents of the map into the string that can be put in the * Condor file for printing. * * @return the the textual description. */ public String toCondor() { return toString(mJobName); } /** * Converts the contents of the map into the string that can be put in the * Condor file for printing. * * @param name the name of the condor job that contains these variables. * * @return the textual description. */ public String toString(String name) { StringBuffer sb = new StringBuffer(); if(mProfileMap == null){ //no profile keys were stored in here return sb.toString(); } String key = null; for(Iterator it = mProfileMap.keySet().iterator();it.hasNext();){ key = (String) it.next(); //continue to next if the key has to be ignored. if( ignore(key) ){ continue;} append( sb, replacementKey( key ), name, replacementValue( key ) ); // sb.append( replacementKey(key) ).append(" ").append(name). // append(" "). // /*append((String)mProfileMap.get(key))*/ // append( replacementValue(key)). // append("\n"); } //add the category key in the end if required if( this.containsKey( Dagman.CATEGORY_KEY ) ){ append( sb, replacementKey( Dagman.CATEGORY_KEY ), name, replacementValue( Dagman.CATEGORY_KEY ) ); } return sb.toString(); } protected StringBuffer append ( StringBuffer sb, String key, String name, String value ){ return sb.append( key ).append(" ").append( name ). append(" ").append( value). append("\n"); } /** * Helper method to decide whether a key has to be ignored or not. * * @param key the key * * @return boolean */ private boolean ignore(String key){ return key.equals( Dagman.POST_SCRIPT_ARGUMENTS_KEY ) || key.equals( Dagman.PRE_SCRIPT_ARGUMENTS_KEY) || key.equals( Dagman.OUTPUT_KEY ) || key.equals( Dagman.CATEGORY_KEY ) || key.equals ( Dagman.POST_SCRIPT_SCOPE_KEY ) || key.startsWith( Dagman.POST_SCRIPT_PATH_PREFIX ) || key.startsWith( Dagman.MAX_KEYS_PREFIX ); } /** * Returns the replacement key that needs to be printed in .dag file in * lieu of the key. * * @param key the key * * @return the replacement key. */ private String replacementKey(String key){ String replacement = key; if(key.equalsIgnoreCase(Dagman.POST_SCRIPT_KEY)){ replacement = Dagman.POST_SCRIPT_REPLACEMENT_KEY; } else if(key.equalsIgnoreCase(Dagman.PRE_SCRIPT_KEY)){ replacement = Dagman.PRE_SCRIPT_REPLACEMENT_KEY; } return replacement; } /** * Returns the replacement value that needs to be printed in .dag file for * a key. This helps us tie the post script path to the arguments, and same * for prescript. * * @param key the key * * @return the replacement value */ private String replacementValue(String key){ StringBuffer value = new StringBuffer(); //append the value for the key value.append( (String)mProfileMap.get(key)); //for postscript and prescript in addition put in the arguments. if(key.equalsIgnoreCase(Dagman.POST_SCRIPT_KEY)){ //append the postscript arguments value.append(" ").append( (String)this.get( Dagman.POST_SCRIPT_ARGUMENTS_KEY) ); //append the output file value.append(" ").append( (String)this.get( Dagman.OUTPUT_KEY ) ); } else if(key.equalsIgnoreCase(Dagman.PRE_SCRIPT_KEY)){ //append the prescript arguments value.append(" "). append( (String)this.get( Dagman.PRE_SCRIPT_ARGUMENTS_KEY)); } return value.toString(); } /** * Returns a copy of the current namespace object. * * @return the Cloned object */ public Object clone() { Dagman ns = (mProfileMap == null) ? new Dagman():new Dagman(this.mProfileMap); ns.mJobName = (mJobName == null)? null : new String(this.mJobName); return ns; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/Condor.java0000644000175000017500000006575311757531137025335 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.Iterator; import java.util.Map; import java.util.TreeMap; /** * This helper class helps in handling the arguments specified in the * Condor namespace by the user either through dax or through profiles in pool. * * @author Karan Vahi * @version $Revision: 5266 $ */ public class Condor extends Namespace{ /** * The name of the namespace that this class implements. */ public static final String NAMESPACE_NAME = Profile.CONDOR; /** * The name of the key that denotes the arguments of the job. */ public static final String ARGUMENTS_KEY = "arguments"; /** * The name of the key that denotes the executable of the job. */ public static final String EXECUTABLE_KEY = "executable"; /** * The name of the key that denotes the requirements of the job. */ public static final String REQUIREMENTS_KEY = "requirements"; /** * The name of the key that denotes the condor universe key. */ public static final String UNIVERSE_KEY = "universe"; /** * The name of the key that denotes the remote condor universe key. */ public static final String REMOTE_UNIVERSE_KEY = "remote_universe"; /** * The name of the key that denotes the File System Domain. Is actually * propogated to the expression for the Requirements Key. * * @see #REQUIREMENTS_KEY */ public static final String FILE_SYSTEM_DOMAIN_KEY = "filesystemdomain"; /** * The name of the key that specifies the grid job type. */ public static final String GRID_JOB_TYPE_KEY = "grid_type"; /** * The name of the key that specifies the jobmanager type. */ public static final String JOBMANAGER_TYPE_KEY = "jobmanager_type"; /** * The name of the key that designates that files should be transferred * via Condor File Transfer mechanism. */ public static final String SHOULD_TRANSFER_FILES_KEY = "should_transfer_files"; /** * The corresponding remote kye name that designates that files should be * transferred via Condor File Transfer mechanism. */ public static final String REMOTE_SHOULD_TRANSFER_FILES_KEY = "+remote_ShouldTransferFiles"; /** * The name of key that designates when to transfer output. */ public static final String WHEN_TO_TRANSFER_OUTPUT_KEY = "when_to_transfer_output"; /** * The corresponding name of the remote key that designated when to transfer output. */ public static final String REMOTE_WHEN_TO_TRANSFER_OUTPUT_KEY = "+remote_WhenToTransferOutput"; /** * The name of the key that specifies whether to stream stderr or not */ public static final String STREAM_STDERR_KEY = "stream_error"; /** * The name of the key that specifies whether to stream stderr or not */ public static final String STREAM_STDOUT_KEY = "stream_output"; /** * The name of the key that specifies transfer of input files. */ public static final String TRANSFER_IP_FILES_KEY = "transfer_input_files"; /** * The name of the key that specifies transfer of input files. */ public static final String TRANSFER_OP_FILES_KEY = "transfer_output_files"; /** * The name of the key that specifies transfer of executable */ public static final String TRANSFER_EXECUTABLE_KEY = "transfer_executable"; /** * The name of the key that specifies the priority for the job. */ public static final String PRIORITY_KEY = "priority"; /** * The name of the key that specifies the peridic release */ public static final String PERIODIC_RELEASE_KEY = "periodic_release"; /** * The name of the key that specifies the peridic remove */ public static final String PERIODIC_REMOVE_KEY = "periodic_remove"; /** * The condor key for designation the grid_resource. */ public static final String GRID_RESOURCE_KEY = "grid_resource"; /** * The key that designates the collector associated with the job. */ public static final String COLLECTOR_KEY = "condor_collector"; /** * The key that overrides the default x509 proxy location. */ public static final String X509USERPROXY_KEY = "x509userproxy"; //new condor keys starting 7.8.0 /** * The Condor Key designating the numnber of cpu's to request. */ public static final String REQUEST_CPUS_KEY = "request_cpus"; /** * The Condor Key designating the amount of memory to request. */ public static final String REQUEST_MEMORY_KEY = "request_memory"; /** * The Condor Key designating the amount of disk to request. */ public static final String REQUEST_DISK_KEY = "request_disk"; /** * The condor universe key value for vanilla universe. */ public static final String VANILLA_UNIVERSE = "vanilla"; /** * The condor universe key value for grid universe. */ public static final String GRID_UNIVERSE = "grid"; /** * The condor key for using the local environment */ public static final String GET_ENV_KEY = "getenv"; /** * The condor universe key value for vanilla universe. */ public static final String GLOBUS_UNIVERSE = "globus"; /** * The condor universe key value for scheduler universe. */ public static final String SCHEDULER_UNIVERSE = "scheduler"; /** * The condor universe key value for standard universe. */ public static final String STANDARD_UNIVERSE = "standard"; /** * The condor universe key value for local universe. */ public static final String LOCAL_UNIVERSE = "local"; /** * The condor universe key value for parallel universe. */ public static final String PARALLEL_UNIVERSE = "parallel"; /** * The name of the implementing namespace. It should be one of the valid * namespaces always. * * @see Namespace#isNamespaceValid(String) */ protected String mNamespace; /** * The default constructor. */ public Condor(){ mProfileMap = new TreeMap(); mNamespace = NAMESPACE_NAME; } /** * The overloaded constructor * * @param mp map containing the profile keys. */ public Condor(Map mp){ mProfileMap = new TreeMap(mp); mNamespace = NAMESPACE_NAME; } /** * Returns the name of the namespace associated with the profile implementations. * * @return the namespace name. * @see #NAMESPACE_NAME */ public String namespaceName(){ return mNamespace; } /** * Returns a comma separated list of files that are designated * for transfer via condor file transfer mechanism for the job. * * @return a csv file else null */ public String getIPFilesForTransfer(){ return ( this.containsKey( Condor.TRANSFER_IP_FILES_KEY ) )? (String)this.get(Condor.TRANSFER_IP_FILES_KEY): null; } /** * Returns a comma separated list of files that are designated * for transfer via condor file transfer mechanism for the job. * * @return a csv file else null */ public String getOutputFilesForTransfer(){ return ( this.containsKey( Condor.TRANSFER_OP_FILES_KEY ) )? (String)this.get(Condor.TRANSFER_OP_FILES_KEY ): null; } /** * Remove the input files that were designated for transfer using * Condor File Transfer Mechanism. */ public void removeIPFilesForTransfer() { Object obj = this.removeKey( Condor.TRANSFER_IP_FILES_KEY ); if( obj != null ){ //delete stf and wto only if no output files tx //and transfer_executable is not set if( !this.containsKey( Condor.TRANSFER_OP_FILES_KEY ) && !this.containsKey( Condor.TRANSFER_EXECUTABLE_KEY )){ this.removeKey( "should_transfer_files" ); this.removeKey( "when_to_transfer_output" ); } } } /** * Remove the output files that were designated for transfer using * Condor File Transfer Mechanism. */ public void removeOutputFilesForTransfer() { Object obj = this.removeKey( Condor.TRANSFER_OP_FILES_KEY ); if( obj != null ){ //delete stf and wto only if no output files tx //and transfer_executable is not set if( !this.containsKey( Condor.TRANSFER_IP_FILES_KEY ) && !this.containsKey( Condor.TRANSFER_EXECUTABLE_KEY )){ this.removeKey( "should_transfer_files" ); this.removeKey( "when_to_transfer_output" ); } } } /** * Adds the executable for transfer via the condor file transfer mechanism. * * */ public void setExecutableForTransfer( ){ this.construct( Condor.TRANSFER_EXECUTABLE_KEY, "true" ); this.construct("should_transfer_files","YES"); this.construct("when_to_transfer_output","ON_EXIT"); } /** * Adds an input file that is to be transferred from the submit host via * the Condor File Transfer Mechanism. It also sets the associated condor * keys like when_to_transfer and should_transfer_files. * * @param file the path to the file on the submit host. */ public void addIPFileForTransfer(String file){ //sanity check if(file == null || file.length() == 0){ return ; } String files; //check if the key is already set. if(this.containsKey(Condor.TRANSFER_IP_FILES_KEY)){ //update the existing list. files = (String)this.get(Condor.TRANSFER_IP_FILES_KEY); files = files + "," + file; } else{ files = file; //set the additional keys only once this.construct("should_transfer_files","YES"); this.construct("when_to_transfer_output","ON_EXIT"); } this.construct(Condor.TRANSFER_IP_FILES_KEY,files); } /** * Adds an output file that is to be transferred from the submit host via * the Condor File Transfer Mechanism. It also sets the associated condor * keys like when_to_transfer and should_transfer_files. * * @param file the path to the file on the submit host. */ public void addOPFileForTransfer( String file ){ //sanity check if(file == null || file.length() == 0){ return ; } String files; //check if the key is already set. if(this.containsKey( Condor.TRANSFER_OP_FILES_KEY )){ //update the existing list. files = (String)this.get( Condor.TRANSFER_OP_FILES_KEY ); files = files + "," + file; } else{ files = file; //set the additional keys only once this.construct("should_transfer_files","YES"); this.construct("when_to_transfer_output","ON_EXIT"); } this.construct( Condor.TRANSFER_OP_FILES_KEY, files ); } /** * Additional method to handle the Condor namespace with * convenience mappings. Currently the following keys * are not supported keys as they clash with Pegasus * internals * *
     * arguments	- not supported, got from the arguments tag in DAX
     * copy_to_spool    - supported, limited to LCG sites at present where one needs
     *                    to stage in the kickstart. Pegasus sets it to false by default
     *                    for arch start stuff on the local pool, unless the user
     *                    overrides it.
     * environment	- not supported, use env namespace fpr this
     * executable       - not supported, this is got from the transformation catalog
     * FileSystemDomain - supported, but is propogated to the classad expression
     *                    for requirements.
     * globusscheduler  - not supported, Pegasus determines this on the basis of
     *                    it's planning strategy
     * globusrsl        - not supported, rsl to populated through Globus namespace.
     * grid_type        - OK (like gt2, gt4, condor)
     * getevn           - OK
     * log              - not supported, as it has to be same for the whole dag
     * notification     - OK
     * noop_job         - OK (used for synchronizing jobs in graph)
     * noop_job_exit_signal - OK
     * noop_job_exit_code - OK
     * periodic_release - OK
     * periodic_remove  - OK
     * priority         - OK
     * queue		- required thing. always added
     * remote_initialdir- not allowed, the working directory is picked up from
     *                    pool file and properties file
     * request_cpus     - number of cpu's required. New in Condor 7.8.0
     * request_memory   - amount of memory required . New in Condor 7.8.0
     * request_disk     - amount of disk required. New in Condor 7.8.0.
     * stream_error     -  supported,however it is applicable only for globus jobs.
     *                   
     * stream_output    -  supported, however it is applicable only for globus jobs.
     *                    
     * transfer_executable  - supported, limited to LCG sites at present where one needs
     *                        to stage in the kickstart.
     * transfer_input_files - supported, especially used to transfer proxies in
     *                        case of glide in pools.
     * universe         - supported, especially used to incorporate glide in pools.
     * x509userpoxy     - supported, overrides x509 default proxy and proxy transfers in
     *                    for glideins and vanilla jobs
     * +xxxx            - supported, this is used to add extra classads with the jobs.
     * 
* * @param key is the key within the globus namespace, must be lowercase! * @param value is the value for the given key. * * @return MALFORMED_KEY * VALID_KEY * UNKNOWN_KEY * NOT_PERMITTED_KEY * DEPRECATED_KEY * EMPTY_KEY */ public int checkKey(String key, String value) { // sanity checks first int res = 0; if (key == null || key.length() < 2 ) { res = MALFORMED_KEY ; return res; } if( value == null || value.length() < 1 ){ res = EMPTY_KEY; return res; } //before checking convert the key to lower case key = key.toLowerCase(); switch (key.charAt(0)) { case 'a': if (key.compareTo("arguments") == 0) { res = NOT_PERMITTED_KEY; } else { res = UNKNOWN_KEY; } break; case 'c': if (key.compareTo("copy_to_spool") == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'e': if (key.compareTo("environment") == 0 || key.compareTo("executable") == 0) { res = NOT_PERMITTED_KEY; } else { res = UNKNOWN_KEY; } break; case 'f': //want to preserve case if (key.compareTo(FILE_SYSTEM_DOMAIN_KEY) == 0 ) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'g': if (key.compareTo(GRID_JOB_TYPE_KEY) == 0){ res = VALID_KEY; } else if ( key.compareTo(GET_ENV_KEY) == 0 ){ res = VALID_KEY; } else if (key.compareTo("globusscheduler") == 0 || key.compareTo("globusrsl") == 0) { res = NOT_PERMITTED_KEY; } else { res = UNKNOWN_KEY; } break; case 'j': if (key.compareTo(JOBMANAGER_TYPE_KEY) == 0){ res = VALID_KEY; } else{ res = UNKNOWN_KEY; } break; case 'l': if (key.compareTo("log") == 0) { res = NOT_PERMITTED_KEY; } else { res = UNKNOWN_KEY; } break; case 'n': if (key.compareTo("notification") == 0 || key.compareTo("noop_job") == 0 || key.compareTo("noop_job_exit_code") == 0 || key.compareTo("noop_job_exit_signal") == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'p': if (key.compareTo( Condor.PERIODIC_RELEASE_KEY ) == 0 || key.compareTo( Condor.PERIODIC_REMOVE_KEY ) == 0 || key.compareTo( Condor.PRIORITY_KEY ) == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'q': if (key.compareTo("queue") == 0) { res = NOT_PERMITTED_KEY; } else { res = UNKNOWN_KEY; } break; case 'r': if (key.compareTo("remote_initialdir") == 0) { res = NOT_PERMITTED_KEY; } else if( key.compareTo( "requirements" ) == 0 ){ res = VALID_KEY; } else if( key.compareTo( "rank" ) == 0 ){ res = VALID_KEY; } else if ( key.compareTo( Condor.REQUEST_CPUS_KEY ) == 0 || key.compareTo( Condor.REQUEST_MEMORY_KEY) == 0 || key.compareTo( Condor.REQUEST_DISK_KEY) == 0 ){ res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 's': if (key.compareTo("stream_error") == 0 || key.compareTo("stream_output") == 0) { res = VALID_KEY; } else if( key.compareTo( "should_transfer_files" ) == 0 ){ res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 't': if (key.compareTo(TRANSFER_EXECUTABLE_KEY) == 0 || key.compareTo(TRANSFER_IP_FILES_KEY) == 0){ res = VALID_KEY; } else if( key.compareTo( "transfer_output" ) == 0 ){ res = VALID_KEY; } else if( key.compareTo( "transfer_error" ) == 0 ){ res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'u': if (key.compareTo(UNIVERSE_KEY) == 0 ){ res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'w': if (key.compareTo( Condor.WHEN_TO_TRANSFER_OUTPUT_KEY ) == 0 ){ res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'x': if (key.compareTo( Condor.X509USERPROXY_KEY ) == 0 ){ res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case '+': res = VALID_KEY; break; default: res = UNKNOWN_KEY; } return res; } /** * It puts in the namespace specific information specified in the properties * file into the namespace. The name of the pool is also passed, as many of * the properties specified in the properties file are on a per pool basis. * It handles the periodic_remove and periodic_release characteristics for * condor jobs. * * @param properties the PegasusProperties object containing * all the properties that the user specified at various * places (like .chimerarc, properties file, command line). * @param pool the pool name where the job is scheduled to run. */ public void checkKeyInNS(PegasusProperties properties, String pool){ //retrieve the relevant profiles from properties //and merge them into the existing. this.assimilate( properties , Profiles.NAMESPACES.condor ); } /** * Returns a boolean value indicating if the string passed is an integer or not * * @param value the value * * @return boolean */ public boolean isInteger( String value ){ boolean result = true; try{ Integer.parseInt(value); } catch( Exception e ){ result = false; } return result; } /** * This checks the whether a key value pair specified is valid in the current * namespace or not by calling the checkKey function and then on the basis of * the values returned puts them into the associated map in the class. * In addition it transfers the FILE_SYSTEM_DOMAIN_KEY to the * REQUIREMENTS_KEY. * * @param key key that needs to be checked in the namespace for validity. * @param value value of the key * */ public void checkKeyInNS(String key, String value){ int rslVal = checkKey(key,value); switch (rslVal){ case Namespace.MALFORMED_KEY: //key is malformed ignore malformedKey(key,value); break; case Namespace.NOT_PERMITTED_KEY: notPermitted(key); break; case Namespace.UNKNOWN_KEY: unknownKey(key, value); break; case Namespace.VALID_KEY: if(key.equalsIgnoreCase(FILE_SYSTEM_DOMAIN_KEY)){ //set it to the REQUIREMENTS_KEY key = REQUIREMENTS_KEY; //construct the classad expression value = FILE_SYSTEM_DOMAIN_KEY + " == " + "\"" + value + "\""; } construct(key, value); break; case Namespace.EMPTY_KEY: emptyKey( key ); break; } } /** * Merge the profiles in the namespace in a controlled manner. * In case of intersection, the new profile value overrides, the existing * profile value. * * @param profiles the Namespace object containing the profiles. */ public void merge( Namespace profiles ){ //check if we are merging profiles of same type if (!(profiles instanceof Condor )){ //throw an error throw new IllegalArgumentException( "Profiles mismatch while merging" ); } String key, value; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ //construct directly. bypassing the checks! key = (String)it.next(); value = (String) profiles.get(key); //override only if key is not transfer_ip_files if( key.equals( this.TRANSFER_IP_FILES_KEY )){ //add to existing this.addIPFileForTransfer( value ); } //overriding the arguments makes no sense. if( key.equals( this.ARGUMENTS_KEY )){ continue; } else{ this.construct(key, value ); } } } /** * Constructs a new element of the format (key=value). All the keys * are converted to lower case before storing. * * @param key is the left-hand-side * @param value is the right hand side */ public void construct(String key, String value) { mProfileMap.put(key.toLowerCase(), value); } /** * Returns a boolean value, that a particular key is mapped to in this * namespace. If the key is mapped to a non boolean * value or the key is not populated in the namespace false is returned. * * @param key The key whose boolean value you desire. * * @return boolean */ public boolean getBooleanValue(Object key){ boolean value; if(mProfileMap.containsKey(key)){ value = Boolean.valueOf((String)mProfileMap.get(key)).booleanValue(); } else{ //the key is not in the namespace //return false return false; } return value; } /** * Converts the contents of the map into the string that can be put in the * Condor file for printing. * * @return the textual description */ public String toCondor(){ StringBuffer st = new StringBuffer(); String key = null; String value = null; Iterator it = mProfileMap.keySet().iterator(); while(it.hasNext()){ key = (String)it.next(); value = (String)mProfileMap.get(key); if( value == null || value.equals( "" ) ){ continue; } st.append(key).append(" = ").append(value).append("\n"); } return st.toString(); } /** * Returns a copy of the current namespace object. * * @return the Cloned object */ public Object clone(){ return new Condor(this.mProfileMap); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/aggregator/0000755000175000017500000000000011757531667025360 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/aggregator/Abstract.java0000644000175000017500000000323111757531137027755 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace.aggregator; /** * An abstract implementation of the Profile Aggregators. * * @author Karan Vahi * @version $Revision: 2550 $ */ public abstract class Abstract implements Aggregator{ /** * Formats the String value as an integer. If the String is NaN then the * default value is assigned. * * @param value the value to be converted to integer. * @param dflt the default value to be used in case value is NaN or null. * * @return the integer value * * @throws NumberFormatException in the case when default value cannot be * converted to an int. */ protected int parseInt( String value, String dflt ) throws NumberFormatException{ int val = Integer.parseInt( dflt ); //check for null and apply default if( value == null ) return val; //try to parse the value try{ val = Integer.parseInt( value ); } catch( Exception e ){ /*ignore for now*/ } return val; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/aggregator/MAX.java0000644000175000017500000000312411757531137026640 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace.aggregator; /** * An implementation of the Aggregator interface that takes the maximum of the * profile values. In the case of either of the profile values not valid * integers, the default value is picked up. * * @author Karan Vahi * @version $Revision: 2550 $ */ public class MAX extends Abstract{ /** * Returns the maximum of two values. * * @param oldValue the existing value for the profile. * @param newValue the new value being added to the profile. * @param dflt the default value to be used in case the values * are not of the correct type. * * @return the computed value as a String. */ public String compute( String oldValue, String newValue, String dflt ){ int val1 = parseInt( oldValue, dflt ); int val2 = parseInt( newValue, dflt ); return ( val2 > val1 )? Integer.toString( val2 ) : Integer.toString( val1 ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/aggregator/Update.java0000644000175000017500000000266711757531137027450 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace.aggregator; /** * An implementation of the Aggregator interface that always takes the * new profile value. Updates the old value with the new value. * * @author Karan Vahi * @version $Revision: 2550 $ */ public class Update extends Abstract{ /** * Returns the minimum of two values. * * @param oldValue the existing value for the profile. * @param newValue the new value being added to the profile. * @param dflt the default value to be used in case the values * are not of the correct type. * * @return the computed value as a String. */ public String compute( String oldValue, String newValue, String dflt ){ //always return the new value. no sanity checks return newValue; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/aggregator/MIN.java0000644000175000017500000000312611757531137026640 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace.aggregator; /** * An implementation of the Aggregator interface that takes the minimum of the * profile values. In the case of either of the profile values not valid * integers, the default value is picked up. * * @author Karan Vahi * @version $Revision: 2550 $ */ public class MIN extends Abstract{ /** * Returns the minimum of two values. * * @param oldValue the existing value for the profile. * @param newValue the new value being added to the profile. * @param dflt the default value to be used in case the values * are not of the correct type. * * @return the computed value as a String. */ public String compute( String oldValue, String newValue, String dflt ){ int val1 = parseInt( oldValue, dflt ); int val2 = parseInt( newValue, dflt ); return ( val2 < val1 )? Integer.toString( val2 ) : Integer.toString( val1 ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/aggregator/Sum.java0000644000175000017500000000277211757531137026767 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace.aggregator; /** * An implementation of the Aggregator interface that sums the profile values. * In the case of either of the profile values not valid integers, the * default value is picked up. * * * @author Karan Vahi * @version $Revision: 2550 $ */ public class Sum extends Abstract{ /** * Sums up the values. * * @param oldValue the existing value for the profile. * @param newValue the new value being added to the profile. * @param dflt the default value to be used in case the values * are not of the correct type. * * @return the computed value as a String. */ public String compute( String oldValue, String newValue, String dflt ){ return Integer.toString( parseInt( oldValue, dflt ) + parseInt(newValue, dflt ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/aggregator/Aggregator.java0000644000175000017500000000274211757531137030302 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace.aggregator; /** * An internal interface, that allows us to perform aggregation functions * on profiles during merging of profiles. * * @author Karan Vahi * @version $Revision: 2550 $ */ public interface Aggregator{ /** * The version number associated with this API of Profile Aggregators. */ public static final String VERSION = "1.0"; /** * Do the aggregation function on the profile values. * * @param oldValue the existing value for the profile. * @param newValue the new value being added to the profile. * @param dflt the default value to be used in case the values * are not of the correct type. * * @return the computed value as a String. */ public String compute( String oldValue, String newValue, String dflt ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/TestNamespace.java0000644000175000017500000000212511757531137026625 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; /** * Test Class for namespaces. * * @author Karan Vahi * @version $Revision: 2550 $ */ public class TestNamespace { public TestNamespace() { } public static void main(String[] args){ Condor c = new Condor(); Pegasus v = new Pegasus(); System.out.println(v.namespaceName() + " \n" + v.deprecatedTable()); System.out.println(c.namespaceName() + " \n" + c.deprecatedTable()); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/Hints.java0000644000175000017500000002060011757531137025154 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.Map; import java.util.TreeMap; import java.util.Iterator; /** * An empty mechanical implementation for the * namespace. At present we do not * know what the meaning is. The meaning is * is determined at the point of writing the * submit files. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2786 $ */ public class Hints extends Namespace { /** * The name of the namespace that this class implements. */ public static final String NAMESPACE_NAME = Profile.HINTS; /** * The jobmanager universe key. */ public static final String JOBMANAGER_UNIVERSE_KEY = "jobmanager.universe"; /** * The execution pool key */ public static final String EXECUTION_POOL_KEY = "executionPool"; /** * The globus scheduler hints key */ public static final String GLOBUS_SCHEDULER_KEY = "globusScheduler"; /** * The pfnHint key */ public static final String PFN_HINT_KEY = "pfnHint"; /** * The name of the implementing namespace. It should be one of the valid * namespaces always. * * @see Namespace#isNamespaceValid(String) */ protected String mNamespace; /** * The default constructor. * Note that the map is not allocated memory at this stage. It is done so * in the overloaded construct function. */ public Hints() { mProfileMap = null; mNamespace = NAMESPACE_NAME; } /** * The overloaded constructor. * * @param mp the map containing the profiles to be prepopulated with. */ public Hints(Map mp) { mProfileMap = new TreeMap(mp); mNamespace = NAMESPACE_NAME; } /** * Returns the name of the namespace associated with the profile implementations. * * @return the namespace name. * @see #NAMESPACE_NAME */ public String namespaceName(){ return mNamespace; } /** * Provides an iterator to traverse the profiles by their keys. * * @return an iterator over the keys to walk the profile list. */ public Iterator getProfileKeyIterator() { return ( this.mProfileMap == null ) ? new EmptyIterator() : this.mProfileMap.keySet().iterator(); } /** * Constructs a new element of the format (key=value). It first checks if * the map has been initialised or not. If not then allocates memory first. * * @param key is the left-hand-side. * @param value is the right hand side. */ public void construct(String key, String value) { if(mProfileMap == null) mProfileMap = new TreeMap(); mProfileMap.put(key, value); } /** * Returns true if the namespace contains a mapping for the specified key. * More formally, returns true if and only if this map contains at a mapping * for a key k such that (key==null ? k==null : key.equals(k)). * (There can be at most one such mapping.) * It also returns false if the map does not exist. * * @param key The key that you want to search for * in the namespace. * * @return boolean */ public boolean containsKey(Object key){ return (mProfileMap == null)? false : mProfileMap.containsKey(key); } /** * This checks whether the key passed by the user is valid in the current * namespace or not. At present, for this namespace only a limited number of * keys have been assigned semantics. * * @param key (left hand side) * @param value (right hand side) * * @return Namespace.VALID_KEY * @return Namespace.NOT_PERMITTED_KEY * */ public int checkKey(String key, String value) { // sanity checks first int res = 0; if (key == null || key.length() < 2 || value == null || value.length() < 2) { res = MALFORMED_KEY ; } switch (key.charAt(0)) { case 'e': if (key.compareTo( Hints.EXECUTION_POOL_KEY ) == 0) { res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'g': if (key.compareTo( Hints.GLOBUS_SCHEDULER_KEY ) == 0) { res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'j': if (key.compareTo( Hints.JOBMANAGER_UNIVERSE_KEY ) == 0 ) { res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; case 'p': if (key.compareTo( Hints.PFN_HINT_KEY ) == 0 /*|| key.compareTo("pfnUniverse") == 0*/) { res = VALID_KEY; } else { res = NOT_PERMITTED_KEY; } break; default: res = NOT_PERMITTED_KEY; } return res; } /** * It puts in the namespace specific information specified in the properties * file into the namespace. The name of the pool is also passed, as many of * the properties specified in the properties file are on a per pool basis. * An empty implementation for the timebeing. * * @param properties the PegasusProperties object containing * all the properties that the user specified at various * places (like .chimerarc, properties file, command line). * @param pool the pool name where the job is scheduled to run. */ public void checkKeyInNS(PegasusProperties properties, String pool){ //retrieve the relevant profiles from properties //and merge them into the existing. this.assimilate( properties, Profiles.NAMESPACES.hints ); } /** * Merge the profiles in the namespace in a controlled manner. * In case of intersection, the new profile value overrides, the existing * profile value. * * @param profiles the Namespace object containing the profiles. */ public void merge( Namespace profiles ){ //check if we are merging profiles of same type if (!(profiles instanceof Hints )){ //throw an error throw new IllegalArgumentException( "Profiles mismatch while merging" ); } String key; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ //construct directly. bypassing the checks! key = (String)it.next(); this.construct( key, (String)profiles.get( key ) ); } } /** * Converts the contents of the map into the string that can be put in the * Condor file for printing. * * @return String */ public String toCondor() { StringBuffer st = new StringBuffer(); String key = null; String value = null; if(mProfileMap == null) return ""; for ( Iterator it = mProfileMap.keySet().iterator(); it.hasNext(); ){ key = (String)it.next(); value = (String)mProfileMap.get(key); st.append(key).append(" = ").append(value).append("\n"); } return st.toString(); } /** * Returns a copy of the current namespace object. * * @return the Cloned object */ public Object clone() { return (mProfileMap == null)?new Hints():new Hints(this.mProfileMap); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/Namespace.java0000644000175000017500000004170611757531137025775 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; import edu.isi.pegasus.planner.classes.Data; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * The base namespace class that all the othernamepsace handling classes extend. * Some constants are defined. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2802 $ */ public abstract class Namespace /*extends Data*/{ /** * The LogManager object which is used to log all the messages. * */ public LogManager mLogger = LogManagerFactory.loadSingletonInstance( ); /** * The version number associated with this API of Profile Namespaces. */ public static final String VERSION = "1.2"; //constants for whether the key //is valid in the namespace or not /** * Either the key or the value specified is null or malformed. */ public static final int MALFORMED_KEY = -1; /** * The key is a valid key and can be put in the profiles. */ public static final int VALID_KEY = 0; /** * The key is unknown. Upto the profile namespace implementation whether to * use it or not. */ public static final int UNKNOWN_KEY = 1; /** * The key is not permitted in as it clashes with default Pegasus constructs. */ public static final int NOT_PERMITTED_KEY = 2; /** * The key is deprecated. Support is for a limited time. */ public static final int DEPRECATED_KEY = 3; /** * The key value is empty . */ public static final int EMPTY_KEY = 4; /** * The Map object that contains the profiles for a particular namespace. * The Map is indexed by profile key. Each value, is a profile value. */ protected Map mProfileMap; /** * Checks if the namespace specified is valid or not. * * @param namespace The namespace you want to check */ public static boolean isNamespaceValid(String namespace){ return Profile.namespaceValid( namespace ); } /** * This checks the whether a key value pair specified is valid in the current * namespace or not, and whether it clashes with other key value pairs that * might have been set by Pegasus internally. * * @return MALFORMED_KEY * VALID_KEY * UNKNOWN_KEY * NOT_PERMITTED_KEY */ public abstract int checkKey(String key, String value); /** * Merge the profiles in the namespace in a controlled manner. * The profiles should be merged only if the namespace object containing them * matches to the current namespace. * * @param profiles the Namespace object containing the profiles. */ public abstract void merge( Namespace profiles ); /** * Returns the name of the namespace associated with the profile implementations. * * @return the namespace name. */ public abstract String namespaceName(); /** * Returns the contents as String. Currently, it returns condor compatible * string that can be put in the condor submit file * * @return textual description */ public String toString(){ return this.toCondor(); } /** * Returns a condor description that can be used to put the contents of the * namespace into the condor submit file during code generation. * * @return String */ public abstract String toCondor(); /** * Provides an iterator to traverse the profiles by their keys. * * @return an iterator over the keys to walk the profile list. */ public Iterator getProfileKeyIterator() { return ( mProfileMap == null )? new EmptyIterator() : this.mProfileMap.keySet().iterator(); } /** * Singleton access to the deprecated table that holds the deprecated keys, * and the keys that replace them. It should be overriden in the namespaces, * that have deprecated keys. * * @return Map */ public Map deprecatedTable() { throw new UnsupportedOperationException("No Deprecation support in the namespace " + namespaceName()); } /** * It puts in the namespaces keys from another namespace instance. * * @param nm the namespace to be assimilated */ public void checkKeyInNS( Namespace nm ) { if( !nm.namespaceName().equals( this.namespaceName() ) ){ //mismatch of namespaces throw new RuntimeException( "Mismatch of namespaces " + this.namespaceName() + " " + nm.namespaceName() ); } for( Iterator it = nm.getProfileKeyIterator(); it.hasNext() ; ){ String key = ( String )it.next(); this.checkKeyInNS( key, (String) nm.get(key) ); } } /** * It puts in the namespace specific information from the Transformation * Catalog into the namespace. * * @param entry the TCEntry object containing the result from * the Transformation Catalog. */ public void checkKeyInNS(TransformationCatalogEntry entry){ //sanity check if(entry == null ) { return; } //pass down the list of Profile objects to be sucked in. checkKeyInNS(entry.getProfiles(this.namespaceName())); } /** * It takes in a Profiles object and puts them into the namespace after * checking if the namespace in the Profile object is same as the namepsace * implementation. * * @param profile the Profile object containing the key and * value. * * @exception IllegalArgumentException if the namespace in the profile * is not the same as the profile namepsace in which the profile * is being incorporated. * * @see org.griphyn.cPlanner.classes.Profile */ public void checkKeyInNS(Profile profile) throws IllegalArgumentException{ if(profile.getProfileNamespace().equals(this.namespaceName())){ checkKeyInNS(profile.getProfileKey(), profile.getProfileValue()); } else{ //throw an exception for the illegal Profile Argument throw new IllegalArgumentException("Illegal Profile " + profile); } } /** * It takes in a list of profiles and puts them into the namespace after * checking if they are valid or not. Note, there are no checks on the * namespace however. The user should ensure that each Profile object in * the list is of the same namespace type. * * @param vars List of Profile objects, each referring * to a key value for the profile. * * * @see org.griphyn.cPlanner.classes.Profile */ public void checkKeyInNS(List vars){ if(vars == null || vars.isEmpty()){ //no variables to insert return; } Profile p = null; for( Iterator it = vars.iterator(); it.hasNext(); ){ p = (Profile)it.next(); checkKeyInNS(p.getProfileKey(),p.getProfileValue()); } } /** * It puts in the namespace specific information specified in the properties * file into the namespace. The name of the pool is also passed, as many of * the properties specified in the properties file are on a per pool basis. * * @param properties the PegasusProperties object containing * all the properties that the user specified at various * places (like .chimerarc, properties file, command line). * @param pool the pool name where the job is scheduled to run. */ public abstract void checkKeyInNS(PegasusProperties properties, String pool); /** * This checks the whether a key value pair specified is valid in the current * namespace or not by calling the checkKey function and then on the basis of * the values returned puts them into the associated map in the class. * * @param key key that needs to be checked in the namespace for validity. * @param value value of the key * */ public void checkKeyInNS(String key, String value){ int rslVal = checkKey(key,value); switch (rslVal){ case Namespace.MALFORMED_KEY: //key is malformed ignore malformedKey(key,value); break; case Namespace.NOT_PERMITTED_KEY: notPermitted(key); break; case Namespace.UNKNOWN_KEY: unknownKey(key, value); break; case Namespace.VALID_KEY: construct(key, value); break; case Namespace.DEPRECATED_KEY: deprecatedKey(key,value); break; case Namespace.EMPTY_KEY: emptyKey( key ); break; } } /** * Assimilate the profiles in the namespace in a controlled manner. * In case of intersection, the new profile value overrides, the existing * profile value. * * @param profiles the Namespace object containing the profiles. * @param namespace the namespace for which the profiles need to be assimilated. */ public void assimilate( PegasusProperties properties, Profiles.NAMESPACES namespace ){ Namespace profiles = properties.getProfiles( namespace ) ; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ String key = (String)it.next(); //profiles assimilated from properties have lowest priority if( !this.containsKey(key) ){ this.checkKeyInNS( key, (String)profiles.get( key ) ); } } } /** * Returns true if the namespace contains a mapping for the specified key. * More formally, returns true if and only if this map contains at a mapping * for a key k such that (key==null ? k==null : key.equals(k)). * (There can be at most one such mapping.) * * @param key The key that you want to search for * in the namespace. */ public boolean containsKey(Object key){ return (mProfileMap == null)? false: mProfileMap.containsKey(key); } /** * Constructs a new element of the format (key=value). * * @param key is the left-hand-side * @param value is the right hand side */ public void construct(String key, String value) { mProfileMap.put(key, value); } /** * Removes the key from the namespace. * * @param key The key you want to remove. * * @return the value object if it exists. * null if the key does not exist in the namespace. */ public Object removeKey(Object key){ return mProfileMap.remove(key); } /** * Returns the key set associated with the namespace. * * @return key set if the mProfileMap is populated. * null if the associated mProfileMap is not populated. */ public Set keySet(){ return (mProfileMap == null) ? null: mProfileMap.keySet(); } /** * Returns a boolean indicating if the object is empty. * * The object is empty if the underlying map's key set is empty. * * @return */ public boolean isEmpty(){ return ( mProfileMap == null )? true : mProfileMap.keySet().isEmpty(); } /** * Returns the value to which this namespace maps the specified key. * Returns null if the map contains no mapping for this key. A return value * of null does not necessarily indicate that the map contains no mapping for * the key; it's also possible that the map explicitly maps the key to null. * The containsKey operation may be used to distinguish these two cases. * * @param key The key whose value you want. * */ public Object get(Object key){ return mProfileMap.get(key); } /** * Warns about an unknown profile key and constructs it anyway. * Constructs a new RSL element of the format (key=value). * * @param key is the left-hand-side * @param value is the right hand side */ public void unknownKey(String key, String value) { mLogger.log("unknown profile " + namespaceName() + "." + key + ", using anyway", LogManager.WARNING_MESSAGE_LEVEL); construct(key, value); } /** * Warns about a deprecated profile key. It constructs the corresponding * replacement key. * * @param key is the left-hand-side * @param value is the right hand side * * @see #deprecatedTable() */ public void deprecatedKey(String key, String value) { String replacement = (String)deprecatedTable().get(key); if(replacement == null){ //no replacement key for the deprecated //profile! Fatal Internal Error StringBuffer error = new StringBuffer(); error.append( "No replacement key exists for deprecated profile "). append( namespaceName() ).append( "." ).append( key ); throw new RuntimeException( error.toString() ); } mLogger.log( "profile " + namespaceName() + "." + key + " is deprecated. Replacing with " + namespaceName() + "." + replacement, LogManager.WARNING_MESSAGE_LEVEL); if(containsKey(replacement)){ //replacement key already exists. //use that ! might break profile overriding ?? } else{ construct(replacement,value); } } /** * Warns about a namespace profile key that cannot be permitted. * * @param key is the key that induced the warning. */ public void notPermitted(String key) { mLogger.log( "profile " + namespaceName() + "." + key + " is not permitted, ignoring!", LogManager.WARNING_MESSAGE_LEVEL); } /** * Deletes the key from the namespace. * * @param key the key with empty value */ public void emptyKey( String key ) { mLogger.log( "profile " + namespaceName() + "." + key + " is empty, Removing!", LogManager.WARNING_MESSAGE_LEVEL); this.removeKey( key ); } /** * Warns about a namespace profile key-value pair that is malformed. * * @param key is the key that induced the warning. * @param value is the corresponding value of the key. */ public void malformedKey(String key, String value) { mLogger.log( "profile " + namespaceName() + "." + key + " with value " + value + " is malformed, ignoring!", LogManager.WARNING_MESSAGE_LEVEL); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ Namespace obj; try{ obj = ( Namespace ) super.clone(); for( Iterator it = this.getProfileKeyIterator(); it.hasNext(); ){ String key = ( String )it.next(); obj.construct( key, (String)this.get( key )); } } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } /** * An empty iterator that allows me to traverse in case of null objects. */ protected class EmptyIterator implements Iterator{ /** * Always returns false, as an empty iterator. * * @return false */ public boolean hasNext(){ return false; } /** * Returns a null as we are iterating over nothing. * * @return null */ public Object next(){ return null; } /** * Returns a false, as no removal * */ public void remove(){ } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/ENV.java0000644000175000017500000002051611757531137024525 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; import edu.isi.pegasus.planner.catalog.classes.Profiles; import java.util.Iterator; import java.util.Map; import java.util.StringTokenizer; import java.util.TreeMap; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegasusProperties; /** * The environment namespace, that puts in the environment variables for the * transformation that is being run, through Condor. At present on the occurence * of a clash between the values of an environment variable the values are * overwritten with the order of preference in decreasing order being users * local properties, transformation catalog, pool file and the dax (vdl). * Later on operations like append , prepend would also be supported. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4077 $ */ public class ENV extends Namespace { /** * The name of the namespace that this class implements. */ public static final String NAMESPACE_NAME = Profile.ENV; /** * The name of the environment variable that specifies the path to the * proxy. */ public static final String X509_USER_PROXY_KEY = "X509_USER_PROXY"; /** * The name of the environment variable that specifies the Gridstart PREJOB. */ public static final String GRIDSTART_PREJOB = "GRIDSTART_PREJOB"; /** * The name of the environment variable that specifies the s3cfg path */ public static final String S3CFG = "S3CFG"; /** * The name of the implementing namespace. It should be one of the valid * namespaces always. * * @see Namespace#isNamespaceValid(String) */ protected String mNamespace; /** * The default constructor. * Note that the map is not allocated memory at this stage. It is done so * in the overloaded construct function. */ public ENV() { mProfileMap = null; mNamespace = NAMESPACE_NAME; } /** * The overloaded constructor. * * @param mp map (possibly empty). */ public ENV(Map mp) { mProfileMap = new TreeMap(mp); mNamespace = NAMESPACE_NAME; } /** * Returns the value to which this namespace maps the specified key. * Returns null if the map contains no mapping for this key. A return value * of null does not necessarily indicate that the map contains no mapping for * the key; it's also possible that the map explicitly maps the key to null. * The containsKey operation may be used to distinguish these two cases. * * @param key The key whose value you want. * */ public Object get(Object key){ return ( mProfileMap == null )? null : mProfileMap.get(key); } /** * Returns the name of the namespace associated with the profile implementations. * * @return the namespace name. * @see #NAMESPACE_NAME */ public String namespaceName(){ return mNamespace; } /** * Constructs a new element of the format (key=value). It first checks if * the map has been initialised or not. If not then allocates memory first. * * @param key is the left-hand-side * @param value is the right hand side */ public void construct(String key, String value) { if(mProfileMap == null) mProfileMap = new TreeMap(); mProfileMap.put(key, value); } /** * This checks whether the key passed by the user is valid in the current * namespace or not. At present, for this namespace all the keys are * construed as valid as long as the value passed is not null. * * @param key (left hand side) * @param value (right hand side) * * @return Namespace.VALID_KEY * @return Namespace.NOT_PERMITTED_KEY * */ public int checkKey(String key, String value) { if(key == null || value == null) return Namespace.NOT_PERMITTED_KEY; return Namespace.VALID_KEY; } /** * Converts the contents of the map into the string that can be put in the * Condor file for printing. * * @return String . */ public String toCondor() { StringBuffer st = new StringBuffer(); String key = null; String value = null; Iterator it = (mProfileMap == null) ? null: mProfileMap.keySet().iterator(); if(it == null) return null; st.append("environment = "); while(it.hasNext()){ key = (String)it.next(); value = (String)mProfileMap.get(key); st.append(key).append("=").append(value).append(";"); } st.append("\n"); return st.toString(); } /** * It puts in the namespace specific information specified in the properties * file into the namespace. The name of the pool is also passed, as many of * the properties specified in the properties file are on a per pool basis. * * @param properties the PegasusProperties object containing * all the properties that the user specified at various * places (like .chimerarc, properties file, command line). * @param pool the pool name where the job is scheduled to run. */ public void checkKeyInNS(PegasusProperties properties, String pool){ /* //get from the properties for pool local String prop = pool.equalsIgnoreCase("local") ? //check if property in props file properties.getLocalPoolEnvVar() : null; if (prop != null) { checkKeyInNS(prop); } */ //retrieve the relevant profiles from properties //and merge them into the existing. this.assimilate( properties , Profiles.NAMESPACES.env ); } /** * It takes in key=value pairs separated by a ; and puts them into the * namespace after checking if they are valid or not. * * @param envString the String containing the environment variables and * their values separated by a semi colon. */ public void checkKeyInNS(String envString){ //sanity check if(envString == null) return; StringTokenizer st = new StringTokenizer(envString,";"); String name; String value; String keyValPair; while(st.hasMoreTokens()){ keyValPair = (String)st.nextToken(";"); if(keyValPair.trim().equalsIgnoreCase("null")){ return; } StringTokenizer st1 = new StringTokenizer(keyValPair); name = st1.nextToken("="); value= st1.nextToken(); checkKeyInNS(name,value); } } /** * Merge the profiles in the namespace in a controlled manner. * In case of intersection, the new profile value overrides, the existing * profile value. * * @param profiles the Namespace object containing the profiles. */ public void merge( Namespace profiles ){ //check if we are merging profiles of same type if (!(profiles instanceof ENV )){ //throw an error throw new IllegalArgumentException( "Profiles mismatch while merging" ); } String key; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ //construct directly. bypassing the checks! key = (String)it.next(); this.construct( key, (String)profiles.get( key ) ); } } /** * Returns a copy of the current namespace object. * * @return the Cloned object */ public Object clone() { return ( mProfileMap == null ? new ENV() : new ENV(this.mProfileMap) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/Stat.java0000644000175000017500000001476011757531137025014 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; import java.util.Iterator; import java.util.Map; import java.util.StringTokenizer; import java.util.TreeMap; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegasusProperties; /** * The stat namespace object. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class Stat extends Namespace { /** * The name of the namespace that this class implements. */ public static final String NAMESPACE_NAME = Profile.STAT; /** * The name of the implementing namespace. It should be one of the valid * namespaces always. * * @see Namespace#isNamespaceValid(String) */ protected String mNamespace; /** * The default constructor. * Note that the map is not allocated memory at this stage. It is done so * in the overloaded construct function. */ public Stat() { mProfileMap = null; mNamespace = NAMESPACE_NAME; } /** * The overloaded constructor. * * @param mp map (possibly empty). */ public Stat(Map mp) { mProfileMap = new TreeMap(mp); mNamespace = NAMESPACE_NAME; } /** * Returns the value to which this namespace maps the specified key. * Returns null if the map contains no mapping for this key. A return value * of null does not necessarily indicate that the map contains no mapping for * the key; it's also possible that the map explicitly maps the key to null. * The containsKey operation may be used to distinguish these two cases. * * @param key The key whose value you want. * */ public Object get(Object key){ return ( mProfileMap == null )? null : mProfileMap.get(key); } /** * Returns the name of the namespace associated with the profile implementations. * * @return the namespace name. * @see #NAMESPACE_NAME */ public String namespaceName(){ return mNamespace; } /** * Constructs a new element of the format (key=value). It first checks if * the map has been initialised or not. If not then allocates memory first. * * @param key is the left-hand-side * @param value is the right hand side */ public void construct(String key, String value) { if(mProfileMap == null) mProfileMap = new TreeMap(); mProfileMap.put(key, value); } /** * This checks whether the key passed by the user is valid in the current * namespace or not. At present, for this namespace all the keys are * construed as valid as long as the value passed is not null. * * @param key (left hand side) * @param value (right hand side) * * @return Namespace.VALID_KEY * @return Namespace.NOT_PERMITTED_KEY * */ public int checkKey(String key, String value) { if(key == null || value == null) return Namespace.NOT_PERMITTED_KEY; return Namespace.VALID_KEY; } /** * It puts in the namespace specific information specified in the properties * file into the namespace. The name of the pool is also passed, as many of * the properties specified in the properties file are on a per pool basis. * * @param properties the PegasusProperties object containing * all the properties that the user specified at various * places (like .chimerarc, properties file, command line). * @param pool the pool name where the job is scheduled to run. */ public void checkKeyInNS(PegasusProperties properties, String pool){ //do nothing for time being. } /** * It takes in key=value pairs separated by a ; and puts them into the * namespace after checking if they are valid or not. * * @param envString the String containing the environment variables and * their values separated by a semi colon. */ public void checkKeyInNS(String envString){ //sanity check if(envString == null) return; StringTokenizer st = new StringTokenizer(envString,";"); String name; String value; String keyValPair; while(st.hasMoreTokens()){ keyValPair = (String)st.nextToken(";"); if(keyValPair.trim().equalsIgnoreCase("null")){ return; } StringTokenizer st1 = new StringTokenizer(keyValPair); name = st1.nextToken("="); value= st1.nextToken(); checkKeyInNS(name,value); } } /** * Merge the profiles in the namespace in a controlled manner. * In case of intersection, the new profile value overrides, the existing * profile value. * * @param profiles the Namespace object containing the profiles. */ public void merge( Namespace profiles ){ //check if we are merging profiles of same type if (!(profiles instanceof ENV )){ //throw an error throw new IllegalArgumentException( "Profiles mismatch while merging" ); } String key; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ //construct directly. bypassing the checks! key = (String)it.next(); this.construct( key, (String)profiles.get( key ) ); } } /** * Converts the contents of the map into the string that can be put in the * Condor file for printing. * * @return the textual description. */ public String toCondor() { return ""; } /** * Returns a copy of the current namespace object. * * @return the Cloned object */ public Object clone() { return ( mProfileMap == null ? new Stat() : new Stat(this.mProfileMap) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/Globus.java0000644000175000017500000003105111757531137025324 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.aggregator.Aggregator; import edu.isi.pegasus.planner.namespace.aggregator.MIN; import edu.isi.pegasus.planner.namespace.aggregator.MAX; import edu.isi.pegasus.planner.namespace.aggregator.Sum; import edu.isi.pegasus.planner.namespace.aggregator.Update; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.TreeMap; /** * This helper class helps in handling the globus rsl key value pairs that * come through profile information for namespace Globus. * The information can either come in through transformation catalog, site catalog * or through profile tags in DAX. * * @author Karan Vahi * @version $Revision: 2798 $ */ public class Globus extends Namespace { /** * The name of the namespace that this class implements. */ public static final String NAMESPACE_NAME = Profile.GLOBUS; /** * The table that maps the various globus profile keys to their aggregator * functions. * * */ public static Map mAggregatorTable; /** * The default aggregator to be used for profile aggregation, if none specified * in the aggregator table; */ public static Aggregator mDefaultAggregator = new Update(); /** * Initializer block that populates the Aggregator table just once. */ static{ mAggregatorTable = new HashMap( 5 ); Aggregator max = new MAX(); Aggregator sum = new Sum(); //all the times need to be added mAggregatorTable.put( "maxtime", sum ); mAggregatorTable.put( "maxcputime", sum ); mAggregatorTable.put( "maxwalltime", sum ); //for the memory rsl params we take max mAggregatorTable.put( "maxmemory", max ); mAggregatorTable.put( "minmemory", max ); } /** * The name of the implementing namespace. It should be one of the valid * namespaces always. * * @see Namespace#isNamespaceValid(String) */ protected String mNamespace; /** * The default constructor. */ public Globus(){ mProfileMap = new TreeMap(); mNamespace = NAMESPACE_NAME; } /** * The overloaded constructor * * @param map a possibly empty map. */ public Globus(Map map){ mProfileMap = new TreeMap(map); mNamespace = NAMESPACE_NAME; } /** * Returns the name of the namespace associated with the profile * implementations. * * @return the namespace name. * @see #NAMESPACE_NAME */ public String namespaceName(){ return mNamespace; } /** * Constructs a new element of the format (key=value). All the keys * are converted to lower case before storing. * * @param key is the left-hand-side * @param value is the right hand side */ public void construct(String key, String value) { mProfileMap.put(key.toLowerCase(), value); } /** * Additional method to handle the globus namespace with * convenience mappings. Currently supported keys are: * *
     * arguments	- not supported, clashes with Condor
     * count		- OK
     * directory	- not supported, clashes with Pegasus
     * dryRun		- OK, beware the consequences!
     * environment	- not supported, use env namespace
     * executable	- not supported, clashes with Condor
     * gramMyjob	- OK
     * hostCount	- OK
     * jobType		- OK to handle MPI jobs
     * maxCpuTime	- OK
     * maxMemory	- OK
     * maxTime		- OK
     * maxWallTime	- OK
     * minMemory	- OK
     * project		- OK
     * queue		- OK
     * stdin		- not supported, clashes with Pegasus
     * stdout		- not supported, clashes with Pegasus
     * stderr		- not supported, clashes with Pegasus
     *
     * rls		- OK: Chimera's generic extension (AOB)
     * 
* * @param key is the key within the globus namespace, must be lowercase! * @param value is the value for the given key. * * @return MALFORMED_KEY * VALID_KEY * UNKNOWN_KEY * NOT_PERMITTED_KEY */ public int checkKey(String key, String value) { // sanity checks first int res = 0; if (key == null || key.length() < 2 ) { res = MALFORMED_KEY ; return res; } if( value == null || value.length() < 1 ){ res = EMPTY_KEY; return res; } //before checking convert the key to lower case key = key.toLowerCase(); switch (key.charAt(0)) { case 'a': if( ( key.compareTo( "arch" ) == 0 ) ){ res = VALID_KEY; } else if ( (key.compareTo("arguments") == 0) ){ res = NOT_PERMITTED_KEY; } else { res = UNKNOWN_KEY; } break; case 'c': if (key.compareTo("count") == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'd': if (key.compareTo("directory") == 0) { res = NOT_PERMITTED_KEY; } else if (key.compareTo("dryrun") == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'e': if (key.compareTo("environment") == 0 || key.compareTo("executable") == 0) { res = NOT_PERMITTED_KEY; } else { res = UNKNOWN_KEY; } break; case 'g': if (key.compareTo("grammyjob") == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'h': if (key.compareTo("hostcount") == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'j': if (key.compareTo("jobtype") == 0) { // FIXME: Gaurang? res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'm': if (key.compareTo("maxcputime") == 0 || key.compareTo("maxmemory") == 0 || key.compareTo("maxtime") == 0 || key.compareTo("maxwalltime") == 0 || key.compareTo("minmemory") == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'p': if (key.compareTo("project") == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'q': if (key.compareTo("queue") == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'r': if (key.compareTo("rsl") == 0) { // our own extension mechanism, no warnings here // Note: The value IS the RSL!!! new String(value); } else { res = UNKNOWN_KEY; } break; case 's': if (key.compareTo("stdin") == 0 || key.compareTo("stdout") == 0 || key.compareTo("stderr") == 0) { res = NOT_PERMITTED_KEY; } else { res = UNKNOWN_KEY; } default: res = UNKNOWN_KEY; } return res; } /** * Merge the profiles in the namespace in a controlled manner. * In case of intersection, the new profile value overrides, the existing * profile value. * * @param profiles the Namespace object containing the profiles. */ public void merge( Namespace profiles ){ //check if we are merging profiles of same type if (!(profiles instanceof Globus )){ //throw an error throw new IllegalArgumentException( "Profiles mismatch while merging" ); } String key; Aggregator agg; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ key = (String)it.next(); agg = this.aggregator( key ); //load the appropriate aggregator to merge the profiles this.construct( key, agg.compute( (String)get( key ), (String)profiles.get( key ), "0" ) ); } } /** * It puts in the namespace specific information specified in the properties * file into the namespace. The name of the pool is also passed, as many of * the properties specified in the properties file are on a per pool basis. * An empty implementation for the timebeing. It is handled in the submit * writer. * * @param properties the PegasusProperties object containing * all the properties that the user specified at various * places (like .chimerarc, properties file, command line). * @param pool the pool name where the job is scheduled to run. */ public void checkKeyInNS(PegasusProperties properties, String pool){ //retrieve the relevant profiles from properties //and merge them into the existing. this.assimilate( properties ,Profiles.NAMESPACES.globus ) ; } /** * Converts the contents of the map into the string that can be put in the * Condor file for printing. * * @return the textual description. */ public String toCondor(){ return convert(mProfileMap); } /** * Returns a copy of the current namespace object * * @return the Cloned object */ public Object clone(){ return new Globus(this.mProfileMap); } /** * Returns the aggregator to be used for the profile key while merging. * If no aggregator is found, the then default Aggregator (Update) is used. * * @param key the key for which the aggregator is found. * * @return the aggregator for the profile key. */ protected Aggregator aggregator( String key ){ Object aggregator = this.mAggregatorTable.get( key ); return ( aggregator == null )? mDefaultAggregator : (Aggregator)aggregator; } /** * Converts a map with RSL kv-pairs into an RSL string. * * @param rsl is the RSL map to convert * @return the new string to use in globusrsl of Condor. */ private String convert(java.util.Map rsl) { StringBuffer result = new StringBuffer(); for (Iterator i = rsl.keySet().iterator(); i.hasNext(); ) { String key = (String) i.next(); String value = (String) rsl.get(key); if (value != null && value.length() > 0) { if (key.compareTo("rsl") == 0) { result.append(value); } else { result.append('(').append(key).append('=').append(value). append(')'); } } } return result.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/Pegasus.java0000644000175000017500000005711611757531137025512 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; import java.util.Iterator; import java.util.Map; import java.util.TreeMap; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.aggregator.Aggregator; import edu.isi.pegasus.planner.namespace.aggregator.Sum; /** * A Planner specific namespace. It defines profiles that are used to fine * tune Pegasus behaviour on a per job basis if required. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4850 $ */ public class Pegasus extends Namespace { /** * The name of the namespace that this class implements. */ public static final String NAMESPACE_NAME = Profile.VDS; /** * The name of the key that sets a remote initial dir for a condor globus * job. */ public static final String REMOTE_INITIALDIR_KEY = "workdir"; /** * The name of the key that if set, determines the number of super jobs * that are made corresponding to a logical transformation and an execution * pool. It overrides the collapse key if set. * * @see #COLLAPSE_KEY */ public static final String BUNDLE_KEY = "clusters.num"; /** * The name of the key that if set in the Pegasus namespace determines the * number of jobs that are collapsed into the super job. */ public static final String COLLAPSE_KEY = "clusters.size"; /** * The name of the key that if set in the Pegasus namespace specifies the * approximate runtime of the job. This key is used in while clustering jobs * according to run times. */ public static final String JOB_RUN_TIME = "job.runtime"; /** * The name of the key that if set in the Pegasus namespace specifies the * maximum amount of time for which a cluster should run. This key is used * while clustering jobs horizontally. Only those jobs are grouped together * whose combined runtime is less than or equal to the max runtime. */ public static final String MAX_RUN_TIME = "clusters.maxruntime"; /** * The name of the key that determines the collapser executable to be used * to run the merged/collapsed job. */ public static final String COLLAPSER_KEY = "collapser"; /** * The name of the profile key in vds namespace that does the grouping. */ public static final String GROUP_KEY = "group"; /** * The name of the profile key in vds namespace that does the labelling * by default. */ public static final String LABEL_KEY = "label"; /** * The name of the profile key that determines the launching executable * to be used to launch a job on the grid. */ public static final String GRIDSTART_KEY = "gridstart"; /** * The name of the profile key, that determines the arguments with which * the GridStart that is used to launch a job on the remote site is invoked * with. The arguments are appended to the ones constructed by default * by the GridStart implementation. */ public static final String GRIDSTART_ARGUMENTS_KEY = "gridstart.arguments"; /** * The name of the profile key that designates the path to a gridstart. */ public static final String GRIDSTART_PATH_KEY = "gridstart.path"; /** * The deprecated change dir key. * @see #CHANGE_DIR_KEY */ public static final String DEPRECATED_CHANGE_DIR_KEY = "change_dir"; /** * The name of the profile key that triggers the kickstart to change directory * before launching an executable instead of launching the executable from * the directory where kickstart is being run. */ public static final String CHANGE_DIR_KEY = "change.dir"; /** * The name of the profile key that triggers the kickstart to create and * then the working directory to it before launching an executable. */ public static final String CREATE_AND_CHANGE_DIR_KEY = "create.dir"; /** * The number of cores that are associated with the job. To be used for * multiplying the job runtimes accordingly. This does not set the corresponding * globus profiles automatically. Is only used for stampede purposes. */ public static final String CORES_KEY = "cores"; /** * The deprecated bundle stagein key. * @see #CHANGE_DIR_KEY */ public static final String DEPRECATED_BUNDLE_STAGE_IN_KEY = "bundle.stagein"; /** * The name of the key that determines the bundling parameter for the * stagein transfer node. */ public static final String BUNDLE_STAGE_IN_KEY = "stagein.clusters"; /** * The name of the key that determines the bundling parameter for the * remote stagein transfer node. */ public static final String BUNDLE_REMOTE_STAGE_IN_KEY = "stagein.remote.clusters"; /** * The name of the key that determines the bundling parameter for the * local stagein transfer node. */ public static final String BUNDLE_LOCAL_STAGE_IN_KEY = "stagein.local.clusters"; /** * The name of the key that determines the bundling parameter for the * remote stagein transfer node. */ public static final String BUNDLE_REMOTE_STAGE_OUT_KEY = "stageout.remote.clusters"; /** * The name of the key that determines the bundling parameter for the * local stagein transfer node. */ public static final String BUNDLE_LOCAL_STAGE_OUT_KEY = "stageout.local.clusters"; /** * The name of the key that determines the bundling parameter for the * stageout transfer node. */ public static final String BUNDLE_STAGE_OUT_KEY = "stageout.clusters"; /** * The name of the key that determines the clustering parameter for the * stagein transfer node. */ public static final String CLUSTER_STAGE_IN_KEY = "stagein.clusters"; /** * The name of the key that determines the clustering parameter for the * stagein transfer node. */ public static final String CLUSTER_REMOTE_STAGE_IN_KEY = "stagein.remote.clusters"; /** * The name of the key that determines the cluster parameter for the * local stagein transfer node. */ public static final String CLUSTER_LOCAL_STAGE_IN_KEY = "stagein.local.clusters"; /** * The name of the key that determines the clustering parameter for the * stageout transfer node. */ public static final String CLUSTER_STAGE_OUT_KEY = "stageout.clusters"; /** * The name of the key that determines the bundling parameter for the * remote stagein transfer node. */ public static final String CLUSTER_REMOTE_STAGE_OUT_KEY = "stageout.remote.clusters"; /** * The name of the key that determines the cluster parameter for the * local stagein transfer node. */ public static final String CLUSTER_LOCAL_STAGE_OUT_KEY = "stageout.local.clusters"; /** * The name of the key that determines the number of chains of stagein * nodes that are to be created per site. */ public static final String CHAIN_STAGE_IN_KEY = "chain.stagein"; /** * The name of the profile key if associated with a job, results in an explicit * transfer of the proxy from the submit host to the remote site, instead of * banking upon CondorG to transfer the proxy. */ public static final String TRANSFER_PROXY_KEY = "transfer.proxy"; /** * The name of the profile key, that when associated with transfer jobs * determines the arguments with which the transfer executable is invoked. */ public static final String TRANSFER_ARGUMENTS_KEY = "transfer.arguments"; /** * The name of the profile key when associated with a transformation in the * transformation catalog gives expected runtime in seconds. */ public static final String RUNTIME_KEY = "runtime"; /** * The directory in which job needs to execute on worker node tmp. */ public static final String WORKER_NODE_DIRECTORY_KEY = "wntmp"; /** * The name of the key, that denotes the style of the dag that is constructed. * Possible styles can be * -condor(glidein,flocking,submitting directly to condor pool) * -globus(condorg) */ public static final String STYLE_KEY = "style"; /** * The name of the key that denotes the type of the job. Whether it is * recursive or not. Still protypical. */ public static final String TYPE_KEY = "type"; /** * The style indicating that the submit files are to be generated for * a vanilla condor execution. */ public static final String CONDOR_STYLE = "condor"; /** * The style indicating that the submit files are to be generated for * a CondorC submission to remote schedds. */ public static final String CONDORC_STYLE = "condorc"; /** * The style indicating that the submit files are to be generated for * a CondorG execution. */ public static final String GLOBUS_STYLE = "globus"; /** * The style indicating that the submit files are to be generated for a * glidein execution. */ public static final String GLIDEIN_STYLE = "glidein"; /** * The style indicating that the submit files are to be generated for a * glideinwms execution. */ public static final String GLIDEINWMS_STYLE = "glideinwms"; /** * The style indicating that the submit files are to be generated for a * glite execution. */ public static final String GLITE_STYLE = "glite"; /** * Static Handle to the sum aggregator. */ private static Aggregator SUM_AGGREGATOR = new Sum(); /** * The name of the implementing namespace. It should be one of the valid * namespaces always. * * @see Namespace#isNamespaceValid(String) */ protected String mNamespace; /** * The table containing the mapping of the deprecated keys to the newer keys. */ protected static Map mDeprecatedTable = null; /** * The default constructor. * Note that the map is not allocated memory at this stage. It is done so * in the overloaded construct function. */ public Pegasus() { mProfileMap = null; mNamespace = NAMESPACE_NAME; } /** * The overloaded constructor. * * @param mp the initial map. */ public Pegasus(Map mp) { mProfileMap = new TreeMap(mp); mNamespace = NAMESPACE_NAME; } /** * Returns the name of the namespace associated with the profile implementations. * * @return the namespace name. * @see #NAMESPACE_NAME */ public String namespaceName(){ return mNamespace; } /** * Constructs a new element of the format (key=value). * It first checks if the map has been initialised or not. If not then * allocates memory first. It converts the key to lower case before storing. * * @param key is the left-hand-side * @param value is the right hand side */ public void construct(String key, String value) { if(mProfileMap == null) mProfileMap = new TreeMap(); mProfileMap.put(key.toLowerCase(), value); } /** * This checks whether the key passed by the user is valid in the current * namespace or not. * * @param key (left hand side) * @param value (right hand side) * * @return Namespace.VALID_KEY * Namespace.UNKNOWN_KEY * Namespace.EMPTY_KEY * */ public int checkKey(String key, String value) { int res = 0; if (key == null || key.length() < 2 ) { res = MALFORMED_KEY ; return res; } if( value == null || value.length() < 1 ){ res = EMPTY_KEY; return res; } //convert key to lower case key = key.toLowerCase(); switch (key.charAt(0)) { case 'b': if ( (key.compareTo(BUNDLE_KEY) == 0) || (key.compareTo(BUNDLE_STAGE_IN_KEY) == 0) || (key.compareTo(BUNDLE_STAGE_OUT_KEY) == 0 ) || (key.compareTo( BUNDLE_REMOTE_STAGE_IN_KEY) == 0 )) { res = VALID_KEY; } else if( key.compareTo(DEPRECATED_BUNDLE_STAGE_IN_KEY) == 0){ res = DEPRECATED_KEY; } else { res = UNKNOWN_KEY; } break; case 'c': if ((key.compareTo( COLLAPSE_KEY ) == 0) || (key.compareTo( COLLAPSER_KEY ) == 0) || (key.compareTo( CHANGE_DIR_KEY ) == 0) || (key.compareTo( CHAIN_STAGE_IN_KEY ) == 0) || (key.compareTo( MAX_RUN_TIME ) == 0) || (key.compareTo(CREATE_AND_CHANGE_DIR_KEY ) == 0 ) || (key.compareTo( CORES_KEY ) == 0 ) ) { res = VALID_KEY; } else if(key.compareTo(DEPRECATED_CHANGE_DIR_KEY) == 0){ res = DEPRECATED_KEY; } else { res = UNKNOWN_KEY; } break; case 'g': if (key.compareTo( GROUP_KEY ) == 0 || key.compareTo( GRIDSTART_KEY ) == 0 || key.compareTo( GRIDSTART_PATH_KEY ) == 0 || key.compareTo( GRIDSTART_ARGUMENTS_KEY ) == 0 ) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'j': if (key.compareTo( JOB_RUN_TIME ) == 0) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; case 'l': if( key.compareTo( LABEL_KEY ) == 0 ){ res = VALID_KEY; } else{ res = UNKNOWN_KEY; } break; case 'r': if( key.compareTo( RUNTIME_KEY ) == 0 ){ res = VALID_KEY; } else{ res = UNKNOWN_KEY; } break; case 's': if(key.compareTo(STYLE_KEY) == 0){ res = VALID_KEY; } else{ res = UNKNOWN_KEY; } break; case 't': if ((key.compareTo(TRANSFER_PROXY_KEY) == 0) || (key.compareTo(TRANSFER_ARGUMENTS_KEY) == 0)){ res = VALID_KEY; } else{ res = UNKNOWN_KEY; } break; case 'w': if ( (key.compareTo(REMOTE_INITIALDIR_KEY) == 0) || (key.compareTo(WORKER_NODE_DIRECTORY_KEY) == 0) ) { res = VALID_KEY; } else { res = UNKNOWN_KEY; } break; default: res = UNKNOWN_KEY; } return res; } /** * It puts in the namespace specific information specified in the properties * file into the namespace. The name of the pool is also passed, as many of * the properties specified in the properties file are on a per pool basis. * This is used to load the appropriate collapser for the job. * Any preexisting profile is preferred over the one in the property file. * * @param properties the PegasusProperties object containing * all the properties that the user specified at various * places (like .chimerarc, properties file, command line). * @param pool the pool name where the job is scheduled to run. * * @see #COLLAPSER_KEY * @see #TRANSFER_PROXY_KEY */ public void checkKeyInNS(PegasusProperties properties, String pool){ this.assimilate( properties ,Profiles.NAMESPACES.pegasus ) ; /* //get the value that might have been populated //from other profile sources String value = (String)get(this.COLLAPSER_KEY); value = (value == null)? //load the global from the properties file properties.getJobAggregator(): //prefer the existing one value; //no strict type check required //populate directly this.construct(this.COLLAPSER_KEY,value); value = (String)get(this.TRANSFER_PROXY_KEY); value = (value == null) ? //load the property from the properties file Boolean.toString(properties.transferProxy()): //prefer the existing one value; //no strict type check required //populate directly this.construct(this.TRANSFER_PROXY_KEY,value); value = (String)get(this.TRANSFER_ARGUMENTS_KEY); value = (value == null) ? //load the property from the properties file properties.getTransferArguments(): //prefer the existing one value; if(value!=null){ //no strict type check required //populate directly this.construct(Pegasus.TRANSFER_ARGUMENTS_KEY,value); } value = (String)get( this.GRIDSTART_PATH_KEY ); value = ( value == null ) ? */ } /** * Merge the profiles in the namespace in a controlled manner. * In case of intersection, the new profile value (except for key runtime where * the values are summed ) overrides, the existing * profile value. * * @param profiles the Namespace object containing the profiles. */ public void merge( Namespace profiles ){ //check if we are merging profiles of same type if (!(profiles instanceof Pegasus )){ //throw an error throw new IllegalArgumentException( "Profiles mismatch while merging" ); } String key; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ //construct directly. bypassing the checks! key = (String)it.next(); if( key.equals( Pegasus.RUNTIME_KEY ) ){ this.construct( key, SUM_AGGREGATOR.compute((String)get( key ), (String)profiles.get( key ), "0" ) ); } else{ this.construct( key, (String)profiles.get( key ) ); } } } /** * Singleton access to the deprecated table that holds the deprecated keys, * and the keys that replace them. * * @return Map */ public java.util.Map deprecatedTable() { if ( mDeprecatedTable == null ) { // only initialize once and only once, as needed. mDeprecatedTable = new java.util.TreeMap(); mDeprecatedTable.put(DEPRECATED_BUNDLE_STAGE_IN_KEY, BUNDLE_STAGE_IN_KEY); mDeprecatedTable.put(DEPRECATED_CHANGE_DIR_KEY, CHANGE_DIR_KEY); } return mDeprecatedTable; } /** * Converts the contents of the map into the string that can be put in the * Condor file for printing. * * @return the textual description. */ public String toCondor() { StringBuffer st = new StringBuffer(); String key = null; String value = null; if(mProfileMap == null) return ""; Iterator it = mProfileMap.keySet().iterator(); while(it.hasNext()){ key = (String)it.next(); value = (String)mProfileMap.get(key); st.append(key).append(" = ").append(value).append("\n"); } return st.toString(); } /** * Warns about an unknown profile key and constructs it anyway. * Constructs a new RSL element of the format (key=value). * * @param key is the left-hand-side * @param value is the right hand side */ public void unknownKey(String key, String value) { //mLogger.log("unknown profile " + mNamespace + "." + key + // ", using anyway", LogManager.DEBUG_MESSAGE_LEVEL); construct(key, value); } /** * Returns true if the namespace contains a mapping * for the specified key. More formally, returns true * if and only if this map contains at a mapping for a * key k such that (key==null ? k==null : key.equals(k)). * (There can be at most one such mapping.) * It also returns false if the map does not exist. * * @param key The key that you want to search for * in the namespace. * * @return boolean */ public boolean containsKey(Object key){ return (mProfileMap == null)? false : mProfileMap.containsKey(key); } /** * Returns the value to which this namespace maps the specified key. * Returns null if the map contains no mapping for this key. A return value * of null does not necessarily indicate that the map contains no mapping for * the key; it's also possible that the map explicitly maps the key to null. * The containsKey operation may be used to distinguish these two cases. * * @param key The key whose value you want. * * @return the object */ public Object get(Object key){ return (mProfileMap == null) ? null : mProfileMap.get(key); } /** * Returns a boolean value, that a particular key is mapped to in this * namespace. If the key is mapped to a non boolean * value or the key is not populated in the namespace false is returned. * * @param key The key whose boolean value you desire. * * @return boolean */ public boolean getBooleanValue(Object key){ boolean value = false; if(mProfileMap != null && mProfileMap.containsKey(key)){ value = Boolean.valueOf((String)mProfileMap.get(key)).booleanValue(); } return value; } /** * Returns a String value, that a particular key is mapped to in this * namespace. If is not populated in the namespace null is returned. * * @param key The key whose boolean value you desire. * * @return String if key is in the namespace * null otherwise. */ public String getStringValue(Object key){ return containsKey(key)? get(key).toString(): null; } /** * Returns a copy of the current namespace object * * @return the Cloned object */ public Object clone() { return (mProfileMap == null)?new Pegasus() : new Pegasus(this.mProfileMap); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/namespace/Selector.java0000644000175000017500000001501411757531137025652 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.namespace; import java.util.Iterator; import java.util.Map; import java.util.StringTokenizer; import java.util.TreeMap; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegasusProperties; /** * The selector namespace object. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class Selector extends Namespace { /** * The name of the namespace that this class implements. */ public static final String NAMESPACE_NAME = Profile.SELECTOR; /** * The name of the implementing namespace. It should be one of the valid * namespaces always. * * @see Namespace#isNamespaceValid(String) */ protected String mNamespace; /** * The default constructor. * Note that the map is not allocated memory at this stage. It is done so * in the overloaded construct function. */ public Selector() { mProfileMap = null; mNamespace = NAMESPACE_NAME; } /** * The overloaded constructor. * * @param mp map (possibly empty). */ public Selector(Map mp) { mProfileMap = new TreeMap(mp); mNamespace = NAMESPACE_NAME; } /** * Returns the value to which this namespace maps the specified key. * Returns null if the map contains no mapping for this key. A return value * of null does not necessarily indicate that the map contains no mapping for * the key; it's also possible that the map explicitly maps the key to null. * The containsKey operation may be used to distinguish these two cases. * * @param key The key whose value you want. * */ public Object get(Object key){ return ( mProfileMap == null )? null : mProfileMap.get(key); } /** * Returns the name of the namespace associated with the profile implementations. * * @return the namespace name. * @see #NAMESPACE_NAME */ public String namespaceName(){ return mNamespace; } /** * Constructs a new element of the format (key=value). It first checks if * the map has been initialised or not. If not then allocates memory first. * * @param key is the left-hand-side * @param value is the right hand side */ public void construct(String key, String value) { if(mProfileMap == null) mProfileMap = new TreeMap(); mProfileMap.put(key, value); } /** * This checks whether the key passed by the user is valid in the current * namespace or not. At present, for this namespace all the keys are * construed as valid as long as the value passed is not null. * * @param key (left hand side) * @param value (right hand side) * * @return Namespace.VALID_KEY * @return Namespace.NOT_PERMITTED_KEY * */ public int checkKey(String key, String value) { if(key == null || value == null) return Namespace.NOT_PERMITTED_KEY; return Namespace.VALID_KEY; } /** * It puts in the namespace specific information specified in the properties * file into the namespace. The name of the pool is also passed, as many of * the properties specified in the properties file are on a per pool basis. * * @param properties the PegasusProperties object containing * all the properties that the user specified at various * places (like .chimerarc, properties file, command line). * @param pool the pool name where the job is scheduled to run. */ public void checkKeyInNS(PegasusProperties properties, String pool){ //do nothing for time being. } /** * It takes in key=value pairs separated by a ; and puts them into the * namespace after checking if they are valid or not. * * @param envString the String containing the environment variables and * their values separated by a semi colon. */ public void checkKeyInNS(String envString){ //sanity check if(envString == null) return; StringTokenizer st = new StringTokenizer(envString,";"); String name; String value; String keyValPair; while(st.hasMoreTokens()){ keyValPair = (String)st.nextToken(";"); if(keyValPair.trim().equalsIgnoreCase("null")){ return; } StringTokenizer st1 = new StringTokenizer(keyValPair); name = st1.nextToken("="); value= st1.nextToken(); checkKeyInNS(name,value); } } /** * Merge the profiles in the namespace in a controlled manner. * In case of intersection, the new profile value overrides, the existing * profile value. * * @param profiles the Namespace object containing the profiles. */ public void merge( Namespace profiles ){ //check if we are merging profiles of same type if (!(profiles instanceof ENV )){ //throw an error throw new IllegalArgumentException( "Profiles mismatch while merging" ); } String key; for ( Iterator it = profiles.getProfileKeyIterator(); it.hasNext(); ){ //construct directly. bypassing the checks! key = (String)it.next(); this.construct( key, (String)profiles.get( key ) ); } } /** * Converts the contents of the map into the string that can be put in the * Condor file for printing. * * @return the textual description. */ public String toCondor() { return ""; } /** * Returns a copy of the current namespace object. * * @return the Cloned object */ public Object clone() { return ( mProfileMap == null ? new Selector() : new Selector(this.mProfileMap) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/0000755000175000017500000000000011757531667022560 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/Executable.java0000644000175000017500000002652711757531137025510 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.text.DecimalFormat; import java.text.NumberFormat; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.common.PegasusProperties; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.util.MissingResourceException; /** * The interface which defines all the methods , any executable should implement. * * @author GAURANG MEHTA * @author KARAN VAHI * @version $Revision: 4665 $ * */ public abstract class Executable { /** * The LogManager object which is used to log all the messages. * */ protected LogManager mLogger ; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * It stores the verison of the Griphyn Virtual Data System software. */ protected String mVersion; /** * The error message to be logged. */ protected String mLogMsg; /** * The command line options passed to the executable */ private String[] commandLineOpts; /** * The default constructor. */ public Executable(){ this( null ); } /** * The constructor which ends up initialising the PegasusProperties object. * * @param logger the logger to use. Can be null. */ public Executable( LogManager logger ) { mLogger = logger; } /** * Looks up for the conf property in the arguments passed to the executable * @param opts command line arguments passed to the executable * @param confChar the short option corresponding to the conf property * @return */ protected String lookupConfProperty(String[] opts , char confChar){ LongOpt[] longOptions = new LongOpt[1 ]; longOptions[ 0 ] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null,confChar ); Getopt g = new Getopt("Executable", opts, confChar+":", longOptions, false); g.setOpterr(false); String propertyFilePath = null; int option = 0; while ( ( option = g.getopt() ) != -1 ) { if(option == confChar){ propertyFilePath = g.getOptarg(); break; } } return propertyFilePath; } /** * Initialize the executable object * @param opts the command line argument passed by the user * @param confChar the short option corresponding the conf property. */ protected void initialize(String[] opts , char confChar){ this.commandLineOpts = opts; String propertyFile =lookupConfProperty(getCommandLineOptions(), confChar); mProps = PegasusProperties.getInstance(propertyFile); mVersion = Version.instance().toString(); //setup logging before doing anything with properties try{ setupLogging( mLogger , mProps ); }catch(IOException ioe){ throw new RuntimeException("Unable to initialize the logger " , ioe); } mLogMsg = new String(); sanityCheckOnProperties( ); loadProperties(); } /** * Initialize the executable object * @param opts the command line argument passed to the executable */ protected void initialize(String[] opts) { initialize(opts, 'c'); } /** * Returns an error message that chains all the lower order error messages * that might have been thrown. * * @param e the Exception for which the error message has to be composed. * * @return the error message. */ public static String convertException( Exception e ){ return Executable.convertException( e, LogManager.TRACE_MESSAGE_LEVEL ); } /** * Returns an error message that chains all the lower order error messages * that might have been thrown. * * @param e the Exception for which the error message has to be composed. * @param logLevel the user specified level for the logger * * @return the error message. */ public static String convertException( Exception e , int logLevel ){ StringBuffer message = new StringBuffer(); int i = 0; //check if we want to throw the whole stack trace if( logLevel >= LogManager.TRACE_MESSAGE_LEVEL ){ //we want the stack trace to a String Writer. StringWriter sw = new StringWriter(); e.printStackTrace( new PrintWriter( sw ) ); return sw.toString(); } //append all the causes for(Throwable cause = e; cause != null ; cause = cause.getCause()){ if( cause instanceof FactoryException ){ //do the specialized convert for Factory Exceptions message.append(((FactoryException)cause).convertException(i)); break; } message.append("\n [").append( Integer.toString(++i)).append("] "). append(cause.getClass().getName()).append(": "). append(cause.getMessage()); //append just one elment of stack trace for each exception message.append( " at " ).append( cause.getStackTrace()[0] ); } return message.toString(); } /** * Sets up the logging options for this class. Looking at the properties * file, sets up the appropriate writers for output and stderr. * * @param logger the logger to use. Can be null. * @param properties reference of pegasus properties object. */ protected void setupLogging( LogManager logger , PegasusProperties properties ) throws IOException{ if( logger != null ){ mLogger = logger; return; } //setup the logger for the default streams. mLogger = LogManagerFactory.loadSingletonInstance( properties ); mLogger.logEventStart( "event.pegasus.planner", "planner.version", mVersion ); //get the logging value set in properties String value = properties.getProperty("pegasus.log.*"); //use defaults if nothing is set. if( value == null){ mLogger.log("Logging to default streams", LogManager.DEBUG_MESSAGE_LEVEL); return; } else{ //take a backup of the log if required. File f = new File( value ); File dir = f.getParentFile(); String basename = f.getName(); NumberFormat formatter = new DecimalFormat( "000" ); File backupFile = null; //start from 000 onwards and check for existence for( int i = 0; i < 999 ; i++ ){ StringBuffer backup = new StringBuffer(); backup.append( basename ).append( "." ).append( formatter.format(i) ); //check if backup file exists. backupFile = new File( dir, backup.toString() ); if( !backupFile.exists() ){ break; } } //log both output and error messages to value specified mLogger.setWriters(backupFile.getAbsolutePath ()); } } /** * Loads all the properties that would be needed by the Toolkit classes. */ public abstract void loadProperties(); /** * This method is used to print the long version of the command. */ public abstract void printLongVersion(); /** * This is used to print the short version of the command. */ public abstract void printShortVersion(); /** * This function is passed command line arguments. In this function you * generate the valid options and parse the options specified at run time. */ //public abstract void executeCommand(String[] args); /** * Generates an array of valid LongOpt objects which contain * all the valid options to the Executable. */ public abstract LongOpt[] generateValidOptions(); /** * Returns the version of the Griphyn Virtual Data System. */ public String getGVDSVersion() { StringBuffer sb = new StringBuffer(); sb.append( "Pegasus Release Version " ).append(mVersion); return sb.toString(); } /** * Logs messages to the singleton logger. * * @param msg is the message itself. * @param level is the level to generate the log message for. */ public void log( String msg, int level ){ mLogger.log( msg, level ); } /** * Get the value of the environment variable. * * @param envVariable the environment variable whose value you want. * * @return String corresponding to the value of the environment * variable if it is set. * null if the environment variable is not set */ public String getEnvValue(String envVariable) { String value = null; value = System.getProperty(envVariable); return value; } /** * Returns the command line arguments passed to the executable * @return command line arguments passed to the executable */ protected String[] getCommandLineOptions(){ String[] optsClone = new String[commandLineOpts.length]; for(int i =0; i< commandLineOpts.length;i++){ optsClone[i] = commandLineOpts[i]; } return optsClone; } /** * Does a sanity check on the properties to make sure that all the * required properties are loaded. * */ protected void sanityCheckOnProperties() { // check required properties if ( mProps.getProperty( "pegasus.home.bindir" ) == null ) { throw new MissingResourceException( "The pegasus.home.bindir property was not set ", "java.util.Properties", "pegasus.home.bindir" ); } if ( mProps.getProperty( "pegasus.home.schemadir" ) == null ) { throw new MissingResourceException( "The pegasus.home.schemadir property was not set ", "java.util.Properties", "pegasus.home.schemadir" ); } if ( mProps.getProperty( "pegasus.home.sharedstatedir" ) == null ) { throw new MissingResourceException( "The pegasus.home.sharedstatedir property was not set ", "java.util.Properties", "pegasus.home.sharedstatedir" ); } if ( mProps.getProperty( "pegasus.home.sysconfdir" ) == null ) { throw new MissingResourceException( "The pegasus.home.sysconfdir property was not set ", "java.util.Properties", "pegasus.home.sysconfdir" ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/VersionNumber.java0000644000175000017500000001036611757531137026217 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.client; import java.io.*; import edu.isi.pegasus.common.util.Version; import gnu.getopt.*; /** * This class just prints the current version number on stdout. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 4981 $ */ public class VersionNumber { /** * application's own name. */ private String m_application = null; /** * ctor: Constructs a new instance with the given application name. * @param appName is the name of the application */ public VersionNumber( String appName ) { m_application = appName; } /** * Prints the usage string. */ public void showUsage() { String linefeed = System.getProperty( "line.separator", "\r\n" ); System.out.println( "$Id: VersionNumber.java 4981 2012-02-23 00:28:17Z rynge $" + linefeed + "PEGASUS version " + Version.instance().toString() + linefeed ); System.out.println( "Usage: " + m_application + " [-f | -V ]" ); System.out.println( linefeed + "Options:" + linefeed + " -V|--version print version information about itself and exit." + linefeed + " --verbose increases the verbosity level (ignored)." + linefeed + " -f|--full also shows the internal built time stamp." + linefeed + " -l|--long alias for --full." + linefeed + linefeed + "The following exit codes are produced:" + linefeed + " 0 :-) Success" + linefeed + " 2 :-( Runtime error detected, please read the message." + linefeed + " 3 8-O Fatal error merits a program abortion." + linefeed ); } /** * Creates a set of options. * @return the assembled long option list */ protected LongOpt[] generateValidOptions() { LongOpt[] lo = new LongOpt[8]; lo[0] = new LongOpt( "version", LongOpt.NO_ARGUMENT, null, 'V' ); lo[1] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); lo[2] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 1 ); lo[3] = new LongOpt( "full", LongOpt.NO_ARGUMENT, null, 'f' ); lo[4] = new LongOpt( "long", LongOpt.NO_ARGUMENT, null, 'l' ); lo[5] = new LongOpt( "build", LongOpt.NO_ARGUMENT, null, 'f' ); return lo; } /** * Print the version information onto stdout. * * @param v is the version information class. * @param build if true, also show build information with version. */ public static void showVersion( Version v, boolean build ) { System.out.print( v.toString() ); if ( build ) System.out.print( '-' + v.determinePlatform() + '-' + v.determineBuilt() ); System.out.println(); } public static void main( String args[] ) { int result = 0; VersionNumber me = null; try { me = new VersionNumber("pegasus-version"); Getopt opts = new Getopt( me.m_application, args, "Vflhmq", me.generateValidOptions() ); opts.setOpterr(false); String installed = null; String internal = null; boolean build = false; Version v = Version.instance(); int option = 0; while ( (option = opts.getopt()) != -1 ) { switch ( option ) { case 1: break; case 'V': System.out.println( "$Id: VersionNumber.java 4981 2012-02-23 00:28:17Z rynge $" ); System.out.println( "PEGASUS version " + v.toString() ); return; case 'l': case 'f': build = true; break; case 'h': default: me.showUsage(); return; } } showVersion(v,build); } catch ( RuntimeException rte ) { System.err.println( "ERROR: " + rte.getMessage() ); result = 2; } catch( Exception e ) { e.printStackTrace(); System.err.println( "FATAL: " + e.getMessage() ); result = 3; } if ( result != 0 ) System.exit(result); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/NetloggerExitcode.java0000644000175000017500000003142311757531137027031 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.visualize.Callback; import edu.isi.pegasus.planner.visualize.KickstartParser; import edu.isi.pegasus.planner.provenance.NetloggerCallback; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.common.util.FactoryException; import org.griphyn.vdl.toolkit.FriendlyNudge; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.io.IOException; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import edu.isi.pegasus.common.util.CommonProperties; /** * This parses the kickstart records and logs via Log4j the kickstart record * in the Netlogger Format. * * * @author Karan Vahi * * @version $Revision: 3538 $ */ public class NetloggerExitcode extends Executable{ /** * The logging level to be used. */ private int mLoggingLevel; /** * The kickstart file being parsed. */ private String mFilename; /** * The id of the job. */ private String mJobID; /** * The workflow id. */ private String mWorkflowID; /** * Default constructor. */ public NetloggerExitcode(){ super(); } /** * Initialize the NetloggerExitCode object * @param opts the command line argument passed to the NetloggerExitCode */ public void initialize(String [] opts){ super.initialize(opts); mLogMsg = new String(); mVersion = Version.instance().toString(); mLoggingLevel = 0; } /** * The main program. * * * @param args the main arguments passed to the plotter. */ public static void main(String[] args) { NetloggerExitcode me = new NetloggerExitcode(); int result = 0; double starttime = new Date().getTime(); double execTime = -1; try{ me.initialize(args); result = me.executeCommand( ); } catch ( FactoryException fe){ me.log( fe.convertException() , LogManager.FATAL_MESSAGE_LEVEL); result = 2; } catch ( Exception e ) { //unaccounted for exceptions me.log(e.getMessage(), LogManager.FATAL_MESSAGE_LEVEL ); e.printStackTrace(); result = 7; } finally { double endtime = new Date().getTime(); execTime = (endtime - starttime)/1000; } // warn about non zero exit code if ( result != 0 ) { me.log("Non-zero exit-code " + result, LogManager.WARNING_MESSAGE_LEVEL ); } else{ //log the time taken to execute me.log("Time taken to execute is " + execTime + " seconds", LogManager.INFO_MESSAGE_LEVEL); } me.log( "Exiting with exitcode " + result, LogManager.CONSOLE_MESSAGE_LEVEL ); me.mLogger.logEventCompletion(); System.exit(result); } /** * Sets up the logging options for this class. Looking at the properties * file, sets up the appropriate writers for output and stderr. */ protected void setupLogging(){ //setup the logger for the default streams. mLogger = LogManagerFactory.loadSingletonInstance( mProps ); mLogger.logEventStart( "event.pegasus.netlogger-exitcode", "postscript.version", mVersion ); } /** * Executes the command on the basis of the options specified. * * @param args the command line options. * * @return the exitcode to exit with */ public int executeCommand() { int result = 0; parseCommandLineArguments(getCommandLineOptions()); //set logging level only if explicitly set by user if( mLoggingLevel > 0 ) { mLogger.setLevel( mLoggingLevel ); } else{mLogger.setLevel(LogManager.WARNING_MESSAGE_LEVEL);} //do sanity check on input directory if( mFilename == null ){ throw new RuntimeException( "You need to specify the file containing kickstart records"); } KickstartParser su = new KickstartParser(); Callback c = new NetloggerCallback(); c.initialize( null, true ); su.setCallback( c ); try{ Map eventIDMap = new HashMap(); eventIDMap.put( LoggingKeys.DAG_ID, mWorkflowID ); eventIDMap.put( LoggingKeys.JOB_ID , mJobID ); mLogger.logEventStart( LoggingKeys.EVENT_WORKFLOW_JOB_STATUS, eventIDMap ); log( "Parsing file " + mFilename , LogManager.DEBUG_MESSAGE_LEVEL ); su.parseKickstartFile( mFilename ); //grab the list of map objects List> records = (List>) c.getConstructedObject(); //iterate through all the records and log them. for( Map m : records ){ for( String key : m.keySet() ){ mLogger.add( key, m.get(key) ); } //add job.kickstart for easier debugging mLogger.add( "job.kickstart", mFilename ); int exitcode = Integer.parseInt( m.get( "job.exitcode" ) ); if( exitcode != 0 ){ result = 8; } mLogger.logAndReset( LogManager.INFO_MESSAGE_LEVEL ); } } catch (IOException ioe) { log( "Unable to parse kickstart file " + mFilename + convertException( ioe, mLogger.getLevel() ), LogManager.DEBUG_MESSAGE_LEVEL); result = 5; } catch( FriendlyNudge fn ){ mLogger.add( "job.exitcode" , "9" ); log( "Problem parsing file " + mFilename + convertException( fn , mLogger.getLevel()), LogManager.WARNING_MESSAGE_LEVEL ); result = 9; } finally{ //we are done with parsing c.done(); mLogger.logEventCompletion(); } return result; } /** * Parses the command line arguments using GetOpt and returns a * PlannerOptions contains all the options passed by the * user at the command line. * * @param args the arguments passed by the user at command line. */ public void parseCommandLineArguments(String[] args){ LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt( "plot-node-usage", args, "f:w:j:c:hvV", longOptions, false); g.setOpterr(false); int option = 0; while( (option = g.getopt()) != -1){ //System.out.println("Option tag " + (char)option); switch (option) { case 'f': mFilename = g.getOptarg(); break; case 'j': mJobID = g.getOptarg(); break; case 'w': mWorkflowID = g.getOptarg(); break; case 'h'://help printLongVersion(); System.exit( 0 ); return; case 'c': // conf //do nothing break; case 'v'://verbose mLoggingLevel++; break; case 'V'://version mLogger.log(getGVDSVersion(),LogManager.INFO_MESSAGE_LEVEL); System.exit(0); default: //same as help printShortVersion(); throw new RuntimeException("Incorrect option or option usage " + (char)g.getOptopt()); } } } /** * Logs messages to the logger. Adds the workflow id. * * @param msg is the message itself. * @param level is the level to generate the log message for. */ public void log( String msg, int level ){ mLogger.add( msg ); mLogger.logAndReset(level); } /** * Tt generates the LongOpt which contain the valid options that the command * will accept. * * @return array of LongOpt objects , corresponding to the valid * options */ public LongOpt[] generateValidOptions(){ LongOpt[] longopts = new LongOpt[7]; longopts[0] = new LongOpt( "file", LongOpt.REQUIRED_ARGUMENT, null, 'f' ); longopts[1] = new LongOpt( "wf-id", LongOpt.REQUIRED_ARGUMENT, null, 'w' ); longopts[2] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 'v' ); longopts[3] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[4] = new LongOpt( "version", LongOpt.NO_ARGUMENT, null, 'V' ); longopts[5] = new LongOpt( "job-id", LongOpt.REQUIRED_ARGUMENT, null, 'j' ); longopts[6] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } /** * Prints out a short description of what the command does. */ public void printShortVersion(){ String text = "\n $Id: NetloggerExitcode.java 3538 2011-04-21 01:50:41Z prasanth $ " + "\n " + getGVDSVersion() + "\n Usage : netlogger-exitcode [-Dprop [..]] -f " + " -w -j [-c ] [-v] [-V] [-h]"; System.out.println(text); } /** * Prints the long description, displaying in detail what the various options * to the command stand for. */ public void printLongVersion(){ String text = "\n $Id: NetloggerExitcode.java 3538 2011-04-21 01:50:41Z prasanth $ " + "\n " + getGVDSVersion() + "\n netlogger-exitcode - Parses the kickstart output and logs relevant information using pegasus logger." + "\n The Pegasus Logger can be configured by specifying the following properties " + "\n pegasus.log.manager " + "\n pegasus.log.formatter ." + "\n Usage: netlogger-exitcode [-Dprop [..]] --file -f " + "\n --wf-id --job-id [--conf ] [--version] [--verbose] [--help]" + "\n" + "\n Mandatory Options " + "\n --file the kickstart output file to be parsed. May contain multiple invocation records." + "\n Other Options " + "\n -w |--wf-id the workflow id to use while logging." + "\n -j |--job-id the job id to use while logging." + "\n -c |--conf path to property file" + "\n -v |--verbose increases the verbosity of messages about what is going on" + "\n -V |--version displays the version of the Pegasus Workflow Planner" + "\n -h |--help generates this help." + "\n " + "\n 0 remote application ran to conclusion with exit code zero." + "\n 2 an error occured while loading a specific module implementation at runtime" + "\n 1 remote application concluded with a non-zero exit code." + "\n 5 invocation record has an invalid state, unable to parse." + "\n 7 illegal state, stumbled over an exception, try --verbose for details. " + "\n 8 multiple 0..5 failures during parsing of multiple records" + "\n 9 probably an empty kickstart output." + "\n"; System.out.println(text); //mLogger.log(text,LogManager.INFO_MESSAGE_LEVEL); } /** * Loads all the properties that would be needed by the Toolkit classes. */ public void loadProperties(){ //empty for time being } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/SCClient.java0000644000175000017500000005422411757531137025066 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.PoolConfig; import edu.isi.pegasus.planner.parser.ScannerException; import edu.isi.pegasus.planner.parser.SiteCatalogTextParser; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.planner.catalog.site.SiteFactory; import edu.isi.pegasus.planner.catalog.site.classes.SiteInfo2SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; //import javax.naming.NamingEnumeration; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.common.util.FactoryException; //import javax.naming.ldap.LdapContext; /** * A client to convert site catalog between different formats. * * @author Karan Vahi * @author Gaurang Mehta gmehta@isi.edu * * @version $Revision: 3532 $ */ public class SCClient extends Executable { /** * The default output format. */ private static String DEFAULT_OUTPUT_FORMAT = "XML3"; /** * The XML format. */ private static String XML_FORMAT = "XML"; /** * The textual format. */ private static String TEXT_FORMAT = "Text"; private static final String XML_NAMESPACE="http://pegasus.isi.edu/schema"; private static final String XML_VERSION="2.0"; //private boolean mText; /** * The input files. */ private List mInputFiles; /** * The output file that is written out. */ private String mOutputFile; /** * The output format for the site catalog. */ private String mOutputFormat; /** * The input format for the site catalog. */ private String mInputFormat; /** * Denotes the logging level that is to be used for logging the messages. */ private int mLoggingLevel; /** * The default constructor. */ public SCClient() { super(); } public void initialize(String[] opts){ super.initialize(opts); //the output format is whatever user specified in the properties mOutputFormat = mProps.getPoolMode(); mInputFormat = SCClient.TEXT_FORMAT; mLoggingLevel = LogManager.WARNING_MESSAGE_LEVEL; //mText = false; mInputFiles = null; mOutputFile = null; } /** * Sets up the logging options for this class. Looking at the properties * file, sets up the appropriate writers for output and stderr. */ protected void setupLogging(){ //setup the logger for the default streams. mLogger = LogManagerFactory.loadSingletonInstance( mProps ); mLogger.logEventStart( "event.pegasus.pegasus-sc-converter", "pegasus.version", mVersion ); } /** * Loads all the properties * that would be needed * by the Toolkit classes */ public void loadProperties() { } public LongOpt[] generateValidOptions() { LongOpt[] longopts = new LongOpt[ 11 ]; longopts[ 0 ] = new LongOpt( "text", LongOpt.NO_ARGUMENT, null, 't' ); longopts[ 1 ] = new LongOpt( "files", LongOpt.REQUIRED_ARGUMENT, null, 'f' ); longopts[ 2 ] = new LongOpt( "input", LongOpt.REQUIRED_ARGUMENT, null, 'i' ); longopts[ 3 ] = new LongOpt( "iformat", LongOpt.REQUIRED_ARGUMENT, null, 'I' ); longopts[ 4 ] = new LongOpt( "output", LongOpt.REQUIRED_ARGUMENT, null, 'o' ); longopts[ 5 ] = new LongOpt( "oformat", LongOpt.REQUIRED_ARGUMENT, null, 'O' ); longopts[ 6 ] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[ 7 ] = new LongOpt( "version", LongOpt.NO_ARGUMENT, null, 'V' ); longopts[ 8 ] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 'v' ); longopts[ 9 ] = new LongOpt( "quiet", LongOpt.NO_ARGUMENT, null, 'q' ); longopts[ 10 ] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } /** * Call the correct commands depending on options. * @param opts Command options */ public void executeCommand() throws IOException { LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt( "SCClient", getCommandLineOptions(), "lthvqVi:I:o:O:f:c:", longOptions, false ); int option = 0; while ( ( option = g.getopt() ) != -1 ) { switch ( option ) { case 't': //text //mText = true; mOutputFormat = SCClient.TEXT_FORMAT; break; case 'f': //files StringTokenizer st = new StringTokenizer( g.getOptarg(), "," ); mInputFiles = new ArrayList( st.countTokens() ); while ( st.hasMoreTokens() ) { mInputFiles.add( st.nextToken() ); } break; case 'i': //input StringTokenizer str = new StringTokenizer( g.getOptarg(), "," ); mInputFiles = new ArrayList( str.countTokens() ); while ( str.hasMoreTokens() ) { mInputFiles.add( str.nextToken() ); } break; case 'I': //iformat mInputFormat = g.getOptarg(); break; case 'o': //output mOutputFile = g.getOptarg(); break; case 'O': //oformat mOutputFormat = g.getOptarg(); break; case 'h': //help printLongVersion(); System.exit( 0 ); break; case 'V': //version System.out.println(getGVDSVersion()); System.exit( 0 ); break; /* case 'l': // Precedence for local or remote mLocalPrec = true; break; */ case 'v': //Verbose mode incrementLogging(); break; case 'q': //Quiet mode decrementLogging(); break; case 'c': // conf //do nothing break; default: mLogger.log( "Unrecognized option or Invalid argument to option : " + (char)g.getOptopt(), LogManager.FATAL_MESSAGE_LEVEL ); printShortVersion(); System.exit( 1 ); } } if(getLoggingLevel() >= 0){ //set the logging level only if -v was specified //else bank upon the the default logging level mLogger.setLevel(getLoggingLevel()); } else{ //set log level to FATAL only mLogger.setLevel( LogManager.FATAL_MESSAGE_LEVEL ); } if(mInputFiles==null || mInputFiles.isEmpty()|| mOutputFile==null || mOutputFile.isEmpty()){ mLogger.log("Please provide the input and the output file",mLogger.ERROR_MESSAGE_LEVEL); this.printShortVersion(); System.exit(1); } String result = this.parseInputFiles( mInputFiles, mInputFormat, mOutputFormat ); //write out the result to the output file this.toFile( mOutputFile, result ); } /** * Increments the logging level by 1. */ public void incrementLogging(){ mLoggingLevel++; } /** * Decrements the logging level by 1. */ public void decrementLogging(){ mLoggingLevel--; } /** * Returns the logging level. * * @return the logging level. */ public int getLoggingLevel(){ return mLoggingLevel; } /** * Parses the input files in the input format and returns a String in the * output format. * * @param inputFiles list of input files that need to be converted * @param inputFormat input format of the input files * @param outputFormat output format of the output file * * @return String in output format * * @throws java.io.IOException */ public String parseInputFiles( List inputFiles, String inputFormat, String outputFormat ) throws IOException{ //sanity check if ( inputFiles == null || inputFiles.isEmpty() ){ throw new IOException( "Input files not specified. Specify the --input option" ); } mLogger.log( "Input format detected is " + inputFormat , LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Output format detected is " + outputFormat , LogManager.DEBUG_MESSAGE_LEVEL ); //check if support for backward compatibility applies boolean backwardCompatibility = mInputFormat.equals( SCClient.TEXT_FORMAT ) && mOutputFormat.equals( SCClient.XML_FORMAT ) ; if( backwardCompatibility ){ return parseInputFilesForBackwardCompatibility( inputFiles, inputFormat, outputFormat ); } //sanity check for output format if ( !outputFormat.equals( SCClient.DEFAULT_OUTPUT_FORMAT )){ throw new RuntimeException( "Only XML3 output format is currently supported"); } SiteStore result = new SiteStore(); for( String inputFile : inputFiles ){ //switch on input format. if( inputFormat.equals( "XML" ) ){ SiteCatalog catalog = null; /* load the catalog using the factory */ try{ mProps.setProperty( "pegasus.catalog.site.file", inputFile ); mProps.setProperty( SiteCatalog.c_prefix, mInputFormat ); catalog = SiteFactory.loadInstance( mProps ); /* load all sites in site catalog */ List s = new ArrayList(1); s.add( "*" ); mLogger.log( "Loaded " + catalog.load( s ) + " number of sites ", LogManager.DEBUG_MESSAGE_LEVEL ); /* query for the sites, and print them out */ mLogger.log( "Sites loaded are " + catalog.list( ) , LogManager.DEBUG_MESSAGE_LEVEL ); for( String site : catalog.list() ){ result.addEntry( catalog.lookup( site ) ); } } finally{ /* close the connection */ try{ catalog.close(); }catch( Exception e ){} } }//end of input format xml else if ( inputFormat.equals( "Text" ) ){ //do a two step process. //1. convert to PoolConfig //2. convert to SiteCatalogEntry PoolConfig config = this.getTextToPoolConfig( inputFile ); //iterate through each entry for( Iterator it = config.getSites().values().iterator(); it.hasNext(); ){ SiteInfo s = (SiteInfo)it.next(); //convert and add to site store result.addEntry( SiteInfo2SiteCatalogEntry.convert( s , mLogger ) ); } }//end of input format Text }//end of iteration through input files. return result.toXML(); } /** * Parses the input files in the input format and returns a String in the old XML * output format. * * @param inputFiles list of input files that need to be converted * @param inputFormat input format of the input files * @param outputFormat output format of the output file * * @return String in output format ( old XML ) * * @throws java.io.IOException */ private String parseInputFilesForBackwardCompatibility( List inputFiles, String inputFormat, String outputFormat ) { PoolConfig result = new PoolConfig(); for( String inputFile : inputFiles ){ PoolConfig config = this.getTextToPoolConfig( inputFile ); result.add( config ); } return this.toXML( result ); } /** * Returns the short help. * * */ public void printShortVersion() { String text = "\n $Id: SCClient.java 3532 2011-04-20 22:51:51Z prasanth $ " + "\n " + getGVDSVersion() + "\n Usage: pegasus-sc-converter [-Dprop [..]] -i -o " + "\n [-I input format] [-O ] [-c ] [-v] [-q] [-V] [-h]\n" ; System.out.print(text); } public void printLongVersion() { String text = "\n $Id: SCClient.java 3532 2011-04-20 22:51:51Z prasanth $ " + "\n " + getGVDSVersion() + "\n pegasus-sc-converter - Parses the site catalogs in old format ( Text and XML3 ) and generates site catalog in new format ( XML3 )" + "\n " + "\n Usage: pegasus-sc-converter [-Dprop [..]] --input --output " + "\n [--iformat input format] [--oformat ] [--conf ] [--verbose] [--quiet] [--Version] [--help]" + "\n" + "\n" + "\n Mandatory Options " + "\n" + "\n -i |--input comma separated list of input files to convert " + "\n -o |--output the output file to which the output needs to be written to." + "\n" + "\n" + "\n Other Options " + "\n" + "\n -I |--iformat the input format for the files . Can be [XML , Text] " + "\n -O |--oformat the output format of the file. Usually [XML3] " + "\n -c |--conf path to property file" + "\n -v |--verbose increases the verbosity of messages about what is going on" + "\n -q |--quiet decreases the verbosity of messages about what is going on" + "\n -V |--version displays the version of the Pegasus Workflow Planner" + "\n -h |--help generates this help." + "\n" + "\n" + "\n Deprecated Options " + "\n" + "\n --text | -t To convert an xml site catalog file to the multiline site catalog file." + "\n Use --iformat instead " + "\n" + "\n --files | -f The local text site catalog file|files to be converted to " + "\n xml or text. This file needs to be in multiline textual " + "\n format not the single line or in xml format if converting " + "\n to text format. See $PEGASUS_HOME/etc/sample.sites.txt. " + "\n" + "\n" + "\n Example Usage " + "\n" + "\n pegasus-sc-converter -i sites.xml -I XML -o sites.xml.new -O XML3 -vvvvv" + "\n" + "\n" + "\n Deprecated Usage . Exists only for backward compatibility " + "\n" + "\n pegasus-sc-converter --files sites.txt --output sites.xml\n" ; System.out.print(text); } /** * Generates the old site catalog object reading in from text file. * * * @param file text file to parse. * * @return PoolConfig */ public PoolConfig getTextToPoolConfig( String file ) { PoolConfig result = new PoolConfig(); try { mLogger.log( "Reading " + file, LogManager.INFO_MESSAGE_LEVEL); SiteCatalogTextParser p = new SiteCatalogTextParser( new FileReader( file ) ); result.add(p.parse()); mLogger.log( "Reading " + file + " -DONE", LogManager.INFO_MESSAGE_LEVEL); } catch ( ScannerException pce ) { mLogger.log( file + ": " + pce.getMessage() , LogManager.ERROR_MESSAGE_LEVEL); mLogger.log( " ignoring rest, skipping to next file", LogManager.ERROR_MESSAGE_LEVEL ); } catch ( IOException ioe ) { mLogger.log( file + ": " + ioe.getMessage() , LogManager.ERROR_MESSAGE_LEVEL); mLogger.log("ignoring rest, skipping to next file", LogManager.ERROR_MESSAGE_LEVEL ); } catch ( Exception e ) { mLogger.log( file + ": " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL ); mLogger.log("ignoring rest, skipping to next file", LogManager.ERROR_MESSAGE_LEVEL ); } return result; } /** * Returns the XML description of the contents of PoolConfig * object passed, conforming to pool config schema found at * http://pegasus.isi.edu/schema/sc-2.0.xsd. * * @param cfg the PoolConfig object whose xml description is * desired. * * @return the xml description. */ public String toXML( PoolConfig cfg ) { String output = "\n"; output += "\n"; output += "\n"; output += "\n"; output += cfg.toXML(); output += ""; return output; } /** * Returns the String description of the contents of PoolConfig * object passed. * * @param cfg the PoolConfig object whose description is * desired. * * @return the String description. */ public String toMultiLine( PoolConfig cfg ) { String output = "#Text version of site catalog\n"; output += "#Generated by SCClient\n"; output += cfg.toMultiLine(); output += "\n"; return output; } /** * Writes out to a file, a string. * * @param filename the fully qualified path name to the file. * @param output the text that needs to be written to the file. * * @throws IOException */ public void toFile( String filename, String output ) throws IOException { if( filename == null ){ throw new IOException( "Please specify a file to write the output to using --output option "); } File outfile = new File( filename ); PrintWriter pw = new PrintWriter( new BufferedWriter( new FileWriter( outfile ) ) ); pw.println( output ); pw.close(); mLogger.log( "Written out the converted file to " + filename, LogManager.CONSOLE_MESSAGE_LEVEL ); } public static void main( String[] args ) throws Exception { SCClient me = new SCClient(); int result = 0; double starttime = new Date().getTime(); double execTime = -1; try{ me.initialize(args); me.executeCommand( ); } catch ( IOException ioe ){ me.log( convertException(ioe,me.getLoggingLevel()), LogManager.FATAL_MESSAGE_LEVEL); result = 1; } catch ( FactoryException fe){ me.log( convertException(fe, me.getLoggingLevel()) , LogManager.FATAL_MESSAGE_LEVEL); result = 2; } catch ( Exception e ) { //unaccounted for exceptions me.log(convertException(e,me.getLoggingLevel()), LogManager.FATAL_MESSAGE_LEVEL ); result = 3; } finally { double endtime = new Date().getTime(); execTime = (endtime - starttime)/1000; } // warn about non zero exit code if ( result != 0 ) { me.log("Non-zero exit-code " + result, LogManager.WARNING_MESSAGE_LEVEL ); } else{ //log the time taken to execute me.log("Time taken to execute is " + execTime + " seconds", LogManager.INFO_MESSAGE_LEVEL); } me.log( "Exiting with exitcode " + result, LogManager.DEBUG_MESSAGE_LEVEL ); me.mLogger.logEventCompletion(); System.exit(result); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/TCConverter.java0000644000175000017500000006400211757531137025613 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import java.io.File; import java.io.IOException; import java.sql.SQLException; import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.StringTokenizer; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.TransformationFactory; import edu.isi.pegasus.planner.catalog.transformation.TransformationFactoryException; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationStore; import edu.isi.pegasus.planner.catalog.transformation.impl.CreateTCDatabase; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; /** * A client to convert transformation catalog between different formats. * * @author Prasanth Thomas * @version $Revision: 4507 $ */ public class TCConverter extends Executable { /** * The default database . */ private static String DEFAULT_DATABASE = "MySQL"; /** * The database format. */ private static String DATABASE_FORMAT = "Database"; /** * The File format. */ private static String FILE_FORMAT = "File"; /** * The textual format. */ private static String TEXT_FORMAT = "Text"; /** * The supported transformation formats. */ private static final String[] SUPPORTED_TRANSFORMATION_FORMAT = {TEXT_FORMAT ,FILE_FORMAT,DATABASE_FORMAT}; /** * List of sql initialization files */ private static final String [] TC_INITIALIZATION_FILES ={"create-my-init.sql","create-my-tc.sql"}; /** * The input files. */ private List mInputFiles; /** * The output file that is written out. */ private String mOutputFile; /** * The output format for the transformation catalog. */ private String mOutputFormat; /** * The input format for the transformation catalog. */ private String mInputFormat; /** * The database type. */ private String mDatabaseURL; /** * The database type. */ private String mDatabase; /** * The database name. */ private String mDatabaseName; /** * The database user name. */ private String mDatabaseUserName; /** * The database user password. */ private String mDatabasePassword; /** * The database host . */ private String mDatabaseHost; /** * Denotes the logging level that is to be used for logging the messages. */ private int mLoggingLevel; /** * The default constructor. */ public TCConverter() { super(); } protected void initialize(String[] opts){ super.initialize(opts); //the output format is whatever user specified in the properties mOutputFormat = mProps.getTCMode(); mInputFormat = TCConverter.TEXT_FORMAT; mDatabase = TCConverter.DEFAULT_DATABASE; mDatabaseHost ="localhost"; mInputFiles = null; mOutputFile = null; mLoggingLevel = LogManager.WARNING_MESSAGE_LEVEL; } /** * Sets up the logging options for this class. Looking at the properties * file, sets up the appropriate writers for output and stderr. */ protected void setupLogging(){ //setup the logger for the default streams. mLogger = LogManagerFactory.loadSingletonInstance( mProps ); mLogger.logEventStart( "event.pegasus.pegasus-tc-converter", "pegasus.version", mVersion ); } /** * Loads all the properties * that would be needed * by the Toolkit classes */ public void loadProperties() { } /** * Generates the list of valid options for the tc-converter client * * @return LongOpt[] list of valid options */ public LongOpt[] generateValidOptions() { LongOpt[] longopts = new LongOpt[ 13 ]; longopts[ 0 ] = new LongOpt( "input", LongOpt.REQUIRED_ARGUMENT, null, 'i' ); longopts[ 1 ] = new LongOpt( "iformat", LongOpt.REQUIRED_ARGUMENT, null, 'I' ); longopts[ 2 ] = new LongOpt( "output", LongOpt.REQUIRED_ARGUMENT, null, 'o' ); longopts[ 3 ] = new LongOpt( "oformat", LongOpt.REQUIRED_ARGUMENT, null, 'O' ); longopts[ 4 ] = new LongOpt( "db-user-name", LongOpt.REQUIRED_ARGUMENT, null, 'N' ); longopts[ 5 ] = new LongOpt( "db-user-password", LongOpt.REQUIRED_ARGUMENT, null, 'P' ); longopts[ 6 ] = new LongOpt( "db-url", LongOpt.REQUIRED_ARGUMENT, null, 'U' ); longopts[ 7 ] = new LongOpt( "db-host", LongOpt.REQUIRED_ARGUMENT, null, 'H' ); longopts[ 8 ] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[ 9 ] = new LongOpt( "version", LongOpt.NO_ARGUMENT, null, 'V' ); longopts[ 10 ] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 'v' ); longopts[ 11 ] = new LongOpt( "quiet", LongOpt.NO_ARGUMENT, null, 'q' ); longopts[ 12 ] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } /** * Call the correct commands depending on options. * @param opts Command options */ public void executeCommand() throws IOException { String[] opts = getCommandLineOptions(); if(opts.length == 0){ mLogger.log("Please provide the required options.",LogManager.ERROR_MESSAGE_LEVEL); this.printShortVersion(); System.exit(1); } LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt( "TCConverter", opts, "hVvqI:i:O:o:U:P:N:H:c:", longOptions, false ); int option = 0; int noOfOptions = 0; while ( ( option = g.getopt() ) != -1 ) { switch ( option ) { case 'i': //input StringTokenizer str = new StringTokenizer( g.getOptarg(), "," ); mInputFiles = new ArrayList( str.countTokens() ); while ( str.hasMoreTokens() ) { mInputFiles.add( str.nextToken() ); } break; case 'I': //iformat mInputFormat = g.getOptarg(); break; case 'o': //output mOutputFile = g.getOptarg(); break; case 'O': //oformat mOutputFormat = g.getOptarg(); break; case 'N': //name mDatabaseUserName = g.getOptarg(); break; case 'P': //password mDatabasePassword = g.getOptarg(); break; case 'U': //url mDatabaseURL = g.getOptarg(); break; case 'H': //host mDatabaseHost = g.getOptarg(); break; case 'h': //help printLongVersion(); System.exit( 0 ); break; case 'V': //version System.out.println(getGVDSVersion()); System.exit( 0 ); break; case 'v': //Verbose mode incrementLogging(); break; case 'q': //Quiet mode decrementLogging(); break; case 'c': //do nothing break; default: mLogger.log( "Unrecognized option or Invalid argument to option : " + (char)g.getOptopt(), LogManager.FATAL_MESSAGE_LEVEL ); printShortVersion(); System.exit( 1 ); } } if(getLoggingLevel() >= 0){ //set the logging level only if -v was specified //else bank upon the the default logging level mLogger.setLevel(getLoggingLevel()); }else{ //set log level to FATAL only mLogger.setLevel( LogManager.FATAL_MESSAGE_LEVEL ); } convertTC(); } /** * Increments the logging level by 1. */ public void incrementLogging(){ mLoggingLevel++; } /** * Decrements the logging level by 1. */ public void decrementLogging(){ mLoggingLevel--; } /** * Returns the logging level. * * @return the logging level. */ public int getLoggingLevel(){ return mLoggingLevel; } /** * Converts transformation catalog from one format to another * @throws IOException */ private void convertTC() throws IOException{ mLogger.log( "Input format detected is " + mInputFormat , LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Output format detected is " + mOutputFormat , LogManager.DEBUG_MESSAGE_LEVEL ); //check if format is supported if(!isSupportedFormat(mInputFormat)){ StringBuffer error = new StringBuffer(); error.append( "Format not supported ! The supported input formats are [" ); for( String format : SUPPORTED_TRANSFORMATION_FORMAT ){ error.append( format ).append( " " ); } error.append( "]" ); throw new RuntimeException( error.toString() ); } if(!isSupportedFormat(mOutputFormat)){ StringBuffer error = new StringBuffer(); error.append( "Format not supported ! The supported output formats are [" ); for( String format : SUPPORTED_TRANSFORMATION_FORMAT ){ error.append( format ).append( " " ); } error.append( "]" ); throw new RuntimeException( error.toString() ); } TransformationStore result = this.convertTCEntryFrom( mInputFiles, mInputFormat ); //write out the result to the output file this.convertTCEntryTo( result ,mOutputFormat ,mOutputFile); } /** * Parses the input files in the input format and returns the output as a TransformationStore instance * * @param inputFiles list of input files that need to be converted * @param inputFormat input format of the input files * * @return TransformationStore reference to the TransformationStore object , null if no transformation catalog entry exists. * * @throws java.io.IOException */ private TransformationStore convertTCEntryFrom( List inputFiles, String inputFormat ) throws IOException{ //sanity check if(!inputFormat.equals(DATABASE_FORMAT)){ if ( inputFiles == null || inputFiles.isEmpty() ){ throw new IOException( "Input files not specified. Specify the --input option" ); } }else { // Checks if db values are passed,else take the values from the properties file if(mDatabaseURL != null && mDatabaseUserName != null && mDatabasePassword != null){ mProps.setProperty( "pegasus.catalog.transformation.db", mDatabase ); mProps.setProperty( "pegasus.catalog.transformation.db.driver", mDatabase ); mProps.setProperty( "pegasus.catalog.transformation.db.url", mDatabaseURL ); mProps.setProperty( "pegasus.catalog.transformation.db.user", mDatabaseUserName ); mProps.setProperty( "pegasus.catalog.transformation.db.password", mDatabasePassword ); } } TransformationStore result = new TransformationStore(); List entries = null; mProps.setProperty( "pegasus.catalog.transformation", inputFormat ); if(inputFormat.equals(DATABASE_FORMAT)){ entries = parseTC(mProps); if(entries != null){ for( TransformationCatalogEntry site : entries ){ result.addEntry( site ); } } }else{ // Sanity check for( String inputFile : inputFiles ){ File input = new File(inputFile); if(!input.canRead()){ throw new IOException( "File not found or cannot be read." + inputFile ); } } for( String inputFile : inputFiles ){ mProps.setProperty( "pegasus.catalog.transformation.file", inputFile ); entries = parseTC(mProps); if(entries != null){ for( TransformationCatalogEntry site : entries ){ result.addEntry( site ); } } }//end of iteration through input files. } return result; } /** * Parses the input format specified in the properties file and returns list of TransfromationCatalogEntry * @param pegasusProperties input format specified in the properties file * @return list of TransfromationCatalogEntry */ private List parseTC(PegasusProperties pegasusProperties) { //switch on input format. TransformationCatalog catalog = null; List entries = null; try{ /* load the catalog using the factory */ catalog = TransformationFactory.loadInstance( pegasusProperties ); /* load all sites in transformation catalog */ entries = (List )catalog.getContents(); mLogger.log( "Loaded " + entries.size() + " number of transformations ", LogManager.DEBUG_MESSAGE_LEVEL ); /* query for the sites, and print them out */ mLogger.log( "Transformation loaded are " + catalog.getContents( ) , LogManager.DEBUG_MESSAGE_LEVEL ); } catch (TransformationFactoryException ife){ throw ife; } catch (Exception e) { throw new RuntimeException("Failed to parse transformation catalog " + e.getMessage()); } finally{ /* close the connection */ if(catalog != null){ catalog.close(); } } return entries; } /** * Checks if it is a supported transformation catalog format * @param format the format * @return true , if format is supported, false otherwise. */ private boolean isSupportedFormat(String format){ for(String sformat : SUPPORTED_TRANSFORMATION_FORMAT ){ if(sformat.equals(format)) return true; } return false; } /** * Prints the short help. * * */ public void printShortVersion() { String text = "\n $Id: TCConverter.java 4507 2011-08-29 16:13:32Z rynge $ " + "\n " + getGVDSVersion() + "\n Usage: pegasus-tc-converter [-Dprop [..]] -I -O " + "\n [-i ] [-o ] " + "\n [-N ] [-P ] [-U ] [-H ] " + "\n [-c ] [-v] [-q] [-V] [-h] \n Type 'pegasus-tc-converter --help' for more help."; System.out.println(text); } public void printLongVersion() { StringBuffer text = new StringBuffer(); text.append("\n $Id: TCConverter.java 4507 2011-08-29 16:13:32Z rynge $ " ); text.append("\n " + getGVDSVersion() ); text.append("\n pegasus-tc-converter - Parses the transformation catalogs in given input format ( Text ,File ,Database ) and generates transformation catalog into given output format ( Text ,File ,Database )" ); text.append("\n " ); text.append("\n Usage: pegasus-tc-converter [-Dprop [..]] [--iformat ] [--oformat ]" ); text.append("\n [--input ] [--output ] "); text.append("\n [--db-user-name ] [--db-user-pwd ] [--db-url ] [--db-host ]"); text.append("\n [--conf ] [--verbose] [--quiet][--Version] [--help]" ); text.append("\n" ); text.append("\n" ); text.append("\n Mandatory Options " ); text.append("\n" ); text.append("\n -I |--iformat the input format for the files . Can be [Text ,File ,Database] " ); text.append("\n -O |--oformat the output format of the file. Can be [Text ,File ,Database] " ); text.append("\n -i |--input comma separated list of input files to convert.This option is mandatory when input format is Text or file " ); text.append("\n -o |--output the output file to which the output needs to be written to. This option is mandatory when output format is Text or file " ); text.append("\n" ); text.append("\n" ); text.append("\n Other Options " ); text.append("\n" ); text.append("\n -N |--db-user-name the database user name " ); text.append("\n -P |--db-user-pwd the database user password " ); text.append("\n -U |--db-url the database url " ); text.append("\n -H |--db-host the database host " ); text.append("\n -c |--conf path to property file" ); text.append("\n -v |--verbose increases the verbosity of messages about what is going on" ); text.append("\n -q |--quiet decreases the verbosity of messages about what is going on" ); text.append("\n -V |--version displays the version of the Pegasus Workflow Planner" ); text.append("\n -h |--help generates this help." ); text.append("\n" ); text.append("\n" ); text.append("\n Example Usage " ); text.append("\n Text to file format conversion :- " ); text.append(" pegasus-tc-converter -i tc.txt -I Text -o tc.data -O File -v"); text.append("\n File to Database(new) format conversion :- " ); text.append(" pegasus-tc-converter -i tc.data -I File -N mysql_user -P mysql_pwd -U jdbc:mysql://localhost:3306/tc -H localhost -O Database -v" ); text.append("\n Database(existing specified in properties file) to text format conversion :-" ); text.append(" pegasus-tc-converter -I Database -o tc.txt -O Text -vvvvv"); System.out.println(text.toString()); } /** * Converts Transformation store to the given output format. * * @param output the reference to TransformationStore object * @param filename the given output format. * @param output the given output file name, null if the format is database. * * @throws IOException */ private void convertTCEntryTo(TransformationStore output, String format, String filename) throws IOException { TransformationCatalog catalog = null; if (format.equals(FILE_FORMAT) || format.equals(TEXT_FORMAT)) { if (filename == null) { throw new IOException("Please specify a file to write the output to using --output option "); } mProps.setProperty("pegasus.catalog.transformation.file", filename); } else { if (mDatabaseURL != null && mDatabaseUserName != null && mDatabasePassword != null) { CreateTCDatabase jdbcTC; try { jdbcTC = new CreateTCDatabase(mDatabase, mDatabaseURL, mDatabaseUserName, mDatabasePassword, mDatabaseHost); } catch (ClassNotFoundException e1) { throw new RuntimeException("Failed to load driver " + mDatabase); } catch (SQLException e1) { throw new RuntimeException("Failed to get connection " + mDatabaseURL); } mDatabaseName = jdbcTC.getDatabaseName(mDatabaseURL); if (mDatabaseName != null) { try { if (!jdbcTC.checkIfDatabaseExists(mDatabaseName)) { if (!jdbcTC.createDatabase(mDatabaseName)) { throw new RuntimeException("Failed to create database " + mDatabaseName); } String initFilePath = mProps.getSharedDir() + File.separator + "sql" + File.separator ; for (String name : TC_INITIALIZATION_FILES) { if (!jdbcTC.initializeDatabase(mDatabaseName, initFilePath + name)) { jdbcTC.deleteDatabase(mDatabaseName); throw new RuntimeException("Failed to initialize database " + mDatabaseName); } } mProps.setProperty("pegasus.catalog.transformation.db", mDatabase); mProps.setProperty("pegasus.catalog.transformation.db.driver", mDatabase); mProps.setProperty("pegasus.catalog.transformation.db.url", mDatabaseURL); mProps.setProperty("pegasus.catalog.transformation.db.user", mDatabaseUserName); mProps.setProperty("pegasus.catalog.transformation.db.password", mDatabasePassword); } else { mLogger.log("Database " + mDatabaseName + " already exists", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("Cannot over write an existing database " + mDatabaseName); } } catch (SQLException e) { mLogger.log("Failed connection with the database " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("Connection Failed " + mDatabaseName); } } else { mLogger.log("Unable to detect database name in the URL", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("Unable to detect database name in the URL" + mDatabaseURL); } } } mProps.setProperty( "pegasus.catalog.transformation", format ); catalog = TransformationFactory.loadInstance( mProps ); List entries = output.getEntries(null, (TCType)null); for(TransformationCatalogEntry tcentry:entries){ try { // Related to JIRA PM-228 if(tcentry.getType().equals(TCType.STATIC_BINARY)){ tcentry.setType(TCType.STAGEABLE); } catalog.insert(tcentry); } catch (Exception e) { mLogger.log( "Transformation failed to add " + tcentry.toString() , LogManager.ERROR_MESSAGE_LEVEL ); } } //close the connection to the catalog catalog.close(); mLogger.log( "Successfully converted Transformation Catalog from "+ mInputFormat +" to " + mOutputFormat , LogManager.CONSOLE_MESSAGE_LEVEL ); if( filename != null ){ mLogger.log( "The output transfomation catalog is in file "+ new java.io.File(filename).getAbsolutePath() , LogManager.CONSOLE_MESSAGE_LEVEL ); } } /** * The main function * * @param args arguments passed at runtime * * @throws java.lang.Exception */ public static void main( String[] args ) throws Exception { TCConverter me = new TCConverter(); int result = 0; double starttime = new Date().getTime(); double execTime = -1; try{ me.initialize(args); me.executeCommand(); } catch ( IOException ioe ){ me.log(convertException( ioe,me.mLogger.getLevel()), LogManager.FATAL_MESSAGE_LEVEL); result = 1; } catch ( FactoryException fe){ me.log( convertException(fe,me.mLogger.getLevel()) , LogManager.FATAL_MESSAGE_LEVEL); result = 2; } catch ( Exception e ) { //unaccounted for exceptions me.log(convertException(e,me.mLogger.getLevel()), LogManager.FATAL_MESSAGE_LEVEL ); result = 3; } finally { double endtime = new Date().getTime(); execTime = (endtime - starttime)/1000; } // warn about non zero exit code if ( result != 0 ) { me.log("Non-zero exit-code " + result, LogManager.WARNING_MESSAGE_LEVEL ); } else{ //log the time taken to execute me.log("Time taken to execute is " + execTime + " seconds", LogManager.INFO_MESSAGE_LEVEL); } me.log( "Exiting with exitcode " + result, LogManager.DEBUG_MESSAGE_LEVEL ); me.mLogger.logEventCompletion(); System.exit(result); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/VDS2PegasusProperties.java0000644000175000017500000011310611757531137027540 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.common.util.Currently; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.io.File; import java.io.IOException; import java.io.FileInputStream; import java.io.PrintWriter; import java.io.FileWriter; import java.util.Properties; import java.util.Enumeration; import java.util.Iterator; import java.util.Map; import java.util.TreeMap; import java.util.HashMap; import java.util.regex.Pattern; /** * A Central Properties class that keeps track of all the properties used by * Pegasus. All other classes access the methods in this class to get the value * of the property. It access the VDSProperties class to read the property file. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 4507 $ * * @see org.griphyn.common.util.VDSProperties */ public class VDS2PegasusProperties extends Executable { /** * The handle to the internal map, that maps vds properties to pegasus * properties. */ private static Map mVDSToPegasusPropertiesTable; /** * An internal table that resolves the old transfer mode property, to * the corresponding transfer implementation. */ private static Map mTXFERImplTable; /** * An internal table that resolves the old transfer mode property, to * the corresponding transfer refiner. */ private static Map mTXFERRefinerTable; /** * Store the regular expressions necessary to match the * properties. */ private static final String mRegexExpression[]={ "(vds.replica.)([a-zA-Z_0-9]*[-]*)+(.prefer.stagein.sites)", "(vds.replica.)([a-zA-Z_0-9]*[-]*)+(.ignore.stagein.sites)", "(vds.site.selector.env.)([a-zA-Z_0-9]*[-]*)+", "(vds.exitcode.path.)([a-zA-Z_0-9]*[-]*)+", "(vds.partitioner.horizontal.bundle.)([a-zA-Z_0-9]*[-]*)+", "(vds.partitioner.horizontal.collapse.)([a-zA-Z_0-9]*[-]*)+", "(vds.transfer.rft.)([a-zA-Z_0-9]*[-]*)+", "(vds.transfer.crft.)([a-zA-Z_0-9]*[-]*)+", //"(vds.db.)([a-zA-Z_0-9]*[-]*)+(.)([a-zA-Z_0-9.]*[-]*)+" "(vds.db.tc.driver)[.]+([a-zA-Z_0-9]*[-]*)+", "(vds.db.ptc.driver)[.]+([a-zA-Z_0-9]*[-]*)+", "(vds.db.\\*.driver)[.]+([a-zA-Z_0-9]*[-]*)+", }; /** * Replacement 2 D Array for the above properties. */ private static final String mStarReplacements [][] ={ { "vds.replica.", "pegasus.selector.replica." }, { "vds.replica.", "pegasus.selector.replica." }, { "vds.site.selector.env.", "pegasus.selector.site.env."}, { "vds.exitcode.path.", "pegasus.exitcode.path." }, { "vds.partitioner.horizontal.bundle", "pegasus.partitioner.horizontal.bundle."}, { "vds.partitioner.horizontal.collapse", "pegasus.partitioner.horizontal.collapse."}, { "vds.transfer.rft.", "pegasus.transfer.rft."}, { "vds.transfer.crft.", "pegasus.transfer.crft."}, //{ "vds.db.", "pegasus.db." } { "vds.db.tc.driver.", "pegasus.catalog.transformation.db." }, { "vds.db.ptc.driver.", "pegasus.catalog.provenance.db." }, { "vds.db.\\*.driver.", "pegasus.catalog.*.db." }, }; /** * Stores compiled patterns at first use, quasi-Singleton. */ private static Pattern mCompiledPatterns[] = null; /** * The input directory containing the kickstart records. */ private String mInputFile; /** * The output directory where to generate the ploticus output. */ private String mOutputDir; /** * The default constructor. Compiles the patterns only once. */ public VDS2PegasusProperties(){ // initialize the compiled expressions once if ( mCompiledPatterns == null ) { mCompiledPatterns = new Pattern[ mRegexExpression.length ]; for (int i = 0; i < mRegexExpression.length; i++) mCompiledPatterns[i] = Pattern.compile( mRegexExpression[i] ); } } public void initialize(String[] opts){ super.initialize(opts); } /** * Singleton access to the transfer implementation table. * Contains the mapping of the old transfer property value to the * new transfer implementation property value. * * @return map */ private static Map transferImplementationTable(){ //singleton access if(mTXFERImplTable == null){ mTXFERImplTable = new HashMap(13); mTXFERImplTable.put("Bundle","Transfer"); mTXFERImplTable.put("Chain","Transfer"); mTXFERImplTable.put("CRFT","CRFT"); mTXFERImplTable.put("GRMS","GRMS"); mTXFERImplTable.put("multiple","Transfer"); mTXFERImplTable.put("Multiple","Transfer"); mTXFERImplTable.put("MultipleTransfer","Transfer"); mTXFERImplTable.put("RFT","RFT"); mTXFERImplTable.put("single","OldGUC"); mTXFERImplTable.put("Single","OldGUC"); mTXFERImplTable.put("SingleTransfer","OldGUC"); mTXFERImplTable.put("StorkSingle","Stork"); mTXFERImplTable.put("T2","T2"); } return mTXFERImplTable; } /** * Singleton access to the transfer refiner table. * Contains the mapping of the old transfer property value to the * new transfer refiner property value. * * @return map */ private static Map transferRefinerTable(){ //singleton access if(mTXFERRefinerTable == null){ mTXFERRefinerTable = new HashMap(13); mTXFERRefinerTable.put("Bundle","Bundle"); mTXFERRefinerTable.put("Chain","Chain"); mTXFERRefinerTable.put("CRFT","Default"); mTXFERRefinerTable.put("GRMS","GRMS"); mTXFERRefinerTable.put("multiple","Default"); mTXFERRefinerTable.put("Multiple","Default"); mTXFERRefinerTable.put("MultipleTransfer","Default"); mTXFERRefinerTable.put("RFT","Default"); mTXFERRefinerTable.put("single","SDefault"); mTXFERRefinerTable.put("Single","SDefault"); mTXFERRefinerTable.put("SingleTransfer","SDefault"); mTXFERRefinerTable.put("StorkSingle","Single"); mTXFERRefinerTable.put("T2","Default"); } return mTXFERRefinerTable; } /** * Singleton access to the transfer implementation table. * Contains the mapping of the old transfer property value to the * new transfer implementation property value. * * @return map */ private static Map vdsToPegasusPropertiesTable(){ //return the already existing one if possible if( mVDSToPegasusPropertiesTable != null ){ return mVDSToPegasusPropertiesTable; } //PROPERTIES RELATED TO SCHEMAS associate( "vds.schema.dax", "pegasus.schema.dax" ); associate( "vds.schema.pdax", "pegasus.schema.pdax" ); associate( "vds.schema.poolconfig", "pegasus.schema.sc" ); associate( "vds.schema.sc", "pegasus.schema.sc" ); associate( "vds.db.ptc.schema", "pegasus.catalog.provenance" ); //PROPERTIES RELATED TO DIRECTORIES associate( "vds.dir.exec", "pegasus.dir.exec" ); associate( "vds.dir.storage", "pegasus.dir.storage" ); associate( "vds.dir.create.mode", "pegasus.dir.create" ); associate( "vds.dir.create", "pegasus.dir.create" ); associate( "vds.dir.timestamp.extended", "pegasus.dir.timestamp.extended" ); //PROPERTIES RELATED TO THE TRANSFORMATION CATALOG associate( "vds.tc.mode", "pegasus.catalog.transformation" ); associate( "vds.tc", "pegasus.catalog.transformation" ); associate( "vds.tc.file", "pegasus.catalog.transformation.file" ); associate( "vds.tc.mapper", "pegasus.catalog.transformation.mapper" ); //REPLICA CATALOG PROPERTIES associate( "vds.replica.mode", "pegasus.catalog.replica" ); associate( "vds.rc", "pegasus.catalog.replica" ); associate( "vds.rls.url", "pegasus.catalog.replica.url" ); associate( "vds.rc.url", "pegasus.catalog.replica.url" ); associate( "vds.rc.lrc.ignore", "pegasus.catalog.replica.lrc.ignore" ); associate( "vds.rc.lrc.restrict", "pegasus.catalog.replica.lrc.restrict" ); associate( "vds.cache.asrc", "pegasus.catalog.replica.cache.asrc" ); associate( "vds.rls.query", "" ); associate( "vds.rls.query.attrib","" ); associate( "vds.rls.exit", "" ); associate( "vds.rc.rls.timeout", "" ); //SITE CATALOG PROPERTIES associate( "vds.pool.mode", "pegasus.catalog.site" ); associate( "vds.sc", "pegasus.catalog.site" ); associate( "vds.pool.file", "pegasus.catalog.site.file" ); associate( "vds.sc.file", "pegasus.catalog.site.file" ); //PROPERTIES RELATED TO SELECTION associate( "vds.transformation.selector", "pegasus.selector.transformation" ); associate( "vds.rc.selector", "pegasus.selector.replica" ); associate( "vds.replica.selector", "pegasus.selector.replica" ); // associate( "vds.replica.*.prefer.stagein.sites", "pegasus.selector.replica.*.prefer.stagein.sites" ); associate( "vds.rc.restricted.sites", "pegasus.selector.replica.*.ignore.stagein.sites" ); // associate( "vds.replica.*.ignore.stagein.sites", "pegasus.selector.replica.*.ignore.stagein.sites" ); associate( "vds.site.selector", "pegasus.selector.site" ); associate( "vds.site.selector.path", "pegasus.selector.site.path" ); associate( "vds.site.selector.timeout", "pegasus.selector.site.timeout" ); associate( "vds.site.selector.keep.tmp", "pegasus.selector.site.keep.tmp" ); //TRANSFER MECHANISM PROPERTIES associate( "vds.transfer.*.impl", "pegasus.transfer.*.impl" ); associate( "vds.transfer.stagein.impl", "pegasus.transfer.stagein.impl" ); associate( "vds.transfer.stageout.impl", "pegasus.transfer.stageout.impl" ); associate( "vds.transfer.stagein.impl", "pegasus.transfer.inter.impl" ); associate( "vds.transfer.refiner", "pegasus.transfer.refiner" ); associate( "vds.transfer.single.quote", "pegasus.transfer.single.quote" ); associate( "vds.transfer.throttle.processes", "pegasus.transfer.throttle.processes" ); associate( "vds.transfer.throttle.streams", "pegasus.transfer.throttle.streams" ); associate( "vds.transfer.force", "pegasus.transfer.force" ); associate( "vds.transfer.mode.links", "pegasus.transfer.links" ); associate( "vds.transfer.links", "pegasus.transfer.links" ); associate( "vds.transfer.thirdparty.sites", "pegasus.transfer.*.thirdparty.sites" ); associate( "vds.transfer.thirdparty.pools", "pegasus.transfer.*.thirdparty.sites" ); associate( "vds.transfer.*.thirdparty.sites", "pegasus.transfer.*.thirdparty.sites" ); associate( "vds.transfer.stagein.thirdparty.sites", "pegasus.transfer.stagein.thirdparty.sites" ); associate( "vds.transfer.stageout.thirdparty.sites", "pegasus.transfer.stageout.thirdparty.sites" ); associate( "vds.transfer.inter.thirdparty.sites", "pegasus.transfer.inter.thirdparty.sites" ); associate( "vds.transfer.staging.delimiter", "pegasus.transfer.staging.delimiter" ); associate( "vds.transfer.disable.chmod.sites","pegasus.transfer.disable.chmod.sites" ); associate( "vds.transfer.proxy", "pegasus.transfer.proxy"); associate( "vds.transfer.arguments", "pegasus.transfer.arguments" ); associate( "vds.transfer.*.priority", "pegasus.transfer.*.priority" ); associate( "vds.transfer.stagein.priority", "pegasus.transfer.stagein.priority" ); associate( "vds.transfer.stageout.priority", "pegasus.transfer.stageout.priority" ); associate( "vds.transfer.inter.priority", "pegasus.transfer.inter.priority" ); associate( "vds.scheduler.stork.cred", "pegasus.transfer.stork.cred" ); //PROPERTIES RELATED TO KICKSTART AND EXITCODE associate( "vds.gridstart", "pegasus.gridstart" ); associate( "vds.gridstart.invoke.always", "pegasus.gristart.invoke.always" ); associate( "vds.gridstart.invoke.length", "pegasus.gridstart.invoke.length" ); associate( "vds.gridstart.kickstart.stat", "pegasus.gridstart.kickstart.stat" ); associate( "vds.gridstart.label", "pegasus.gristart.label" ); associate( "vds.exitcode.impl", "pegasus.exitcode.impl" ); associate( "vds.exitcode.mode", "pegasus.exitcode.scope" ); associate( "vds.exitcode", "pegasus.exitcode.scope" ); // associate( "vds.exitcode.path.[value]","pegasus.exitcode.path.[value]" ); associate( "vds.exitcode.arguments", "pegasus.exitcode.arguments" ); associate( "vds.exitcode.debug", "pegasus.exitcode.debug" ); associate( "vds.prescript.arguments", "pegasus.prescript.arguments" ); //PROPERTIES RELATED TO REMOTE SCHEDULERS associate( "vds.scheduler.remote.projects", "pegasus.remote.scheduler.projects" ); associate( "vds.scheduler.remote.queues", "pegasus.remote.scheduler.queues" ); // associate( "vds.scheduler.remote.maxwalltimes", "pegasus.remote.scheduler.maxwalltimes" ); associate( "vds.scheduler.remote.min.maxtime", "pegasus.remote.scheduler.min.maxtime" ); associate( "vds.scheduler.remote.min.maxwalltime", "pegasus.remote.scheduler.min.maxwalltime" ); associate( "vds.scheduler.remote.min.maxcputime", "pegasus.remote.scheduler.min.maxcputime" ); //PROPERTIES RELATED TO Condor and DAGMAN associate( "vds.scheduler.condor.release", "pegasus.condor.release" ); associate( "vds.scheduler.condor.remove", "pegasus.condor.remove" ); associate( "vds.scheduler.condor.arguments.quote", "pegasus.condor.arguments.quote" ); associate( "vds.scheduler.condor.output.stream", "pegasus.condor.output.stream" ); associate( "vds.scheduler.condor.error.stream", "pegasus.condor.error.stream" ); associate( "vds.scheduler.condor.retry", "pegasus.dagman.retry" ); //JOB CLUSTERING associate( "vds.exec.node.collapse", "pegasus.clusterer.nodes" ); associate( "vds.job.aggregator", "pegasus.clusterer.job.aggregator" ); associate( "vds.job.aggregator.seqexec.isgloballog", "pegasus.clusterer.job.aggregator.hasgloballog" ); associate( "vds.clusterer.label.key", "pegasus.clusterer.label.key" ); //MISCELLANEOUS associate( "vds.auth.gridftp.timeout", "pegasus.auth.gridftp.timeout" ); associate( "vds.submit.mode", "pegasus.submit" ); associate( "vds.job.priority", "pegasus.job.priority" ); associate( "vds.dax.callback", "pegasus.parser.dax.callback" ); associate( "vds.label.key", "pegasus.partitioner.label.key" ); associate( "vds.partitioner.label.key", "pegasus.partitioner.label.key" ); associate( "vds.partition.parser.mode", "pegasus.partitioner.parser.load" ); // associate( "vds.partitioner.horizontal.bundle.", "pegasus.partitioner.horizontal.bundle." ); // associate( "vds.partitioner.horizontal.collapse.", "pegasus.partitioner.horizontal.collapse." ); //SOME DB DRIVER PROPERTIES associate( "vds.db.*.driver", "pegasus.catalog.*.db.driver" ); associate( "vds.db.tc.driver", "pegasus.catalog.transformation.db.driver" ); associate( "vds.db.ptc.driver", "pegasus.catalog.provenance.db.driver" ); //WORK DB PROPERTIES associate( "work.db", "pegasus.catalog.work.db" ); associate( "work.db.hostname", "pegasus.catalog.work.db.hostname" ); associate( "work.db.database", "pegasus.catalog.work.db.database" ); associate( "work.db.user", "pegasus.catalog.work.db.user" ); associate( "work.db.password", "pegasus.catalog.work.db.password" ); return mVDSToPegasusPropertiesTable; } /** * Convert a VDS Properties file to Pegasus properties. * * @param input the path to the VDS Properties file. * @param directory the directory where the Pegasus properties file needs to be written out to. * * @return path to the properties file that is written. * * @exception IOException */ public String convert( String input, String directory ) throws IOException{ File dir = new File(directory); //sanity check on the directory sanityCheck( dir ); //we only want to write out the VDS properties for time being Properties ipProperties = new Properties( ); ipProperties.load( new FileInputStream(input) ); Properties vdsProperties = this.matchingSubset( ipProperties, "vds", true ); //traverse through the VDS properties and convert them to //the new names Properties temp = new Properties(); for( Iterator it = vdsProperties.keySet().iterator(); it.hasNext(); ){ String vds = ( String )it.next(); String vdsValue = (String)vdsProperties.get( vds ); String pgs = ( String )vdsToPegasusPropertiesTable().get( vds ); //if pgs is not null store the pgs with the vds value //if null then barf if( pgs == null ){ //match for star properties pgs = matchForStarProperties( vds ); if ( pgs == null ){ System.err.println("Unable to associate VDS property " + vds ); continue; } } else{ if( pgs.length() == 0 ){ //ignore continue; } } //put the pegasus property with the vds value temp.setProperty( pgs, vdsValue ); } //put the properties in temp into PegasusProperties in a sorted order //does not work, as the store method does not store it in that manner Map pegasusProperties = new TreeMap(); for( Iterator it = temp.keySet().iterator(); it.hasNext(); ){ String key = (String)it.next(); pegasusProperties.put( key, (String)temp.get( key )); } //create a temporary file in directory File f = File.createTempFile( "pegasus.", ".properties", dir ); PrintWriter pw = new PrintWriter( new FileWriter( f ) ); //the header of the file StringBuffer header = new StringBuffer(64); header.append( "############################################################################\n" ); header.append( "# PEGASUS USER PROPERTIES GENERATED FROM VDS PROPERTY FILE \n" ) .append( "# ( " + input + " ) \n" ) .append( "# GENERATED AT ").append( Currently.iso8601( false, true, false, new java.util.Date() )).append( "\n" ); header.append( "############################################################################" ); pw.println( header.toString() ); for( Iterator it = pegasusProperties.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = ( Map.Entry )it.next(); String line = entry.getKey() + " = " + entry.getValue(); pw.println( line ); } pw.close(); /* //the header of the file StringBuffer header = new StringBuffer(64); header.append( "############################################################################\n" ); header.append( "# PEGASUS USER PROPERTIES GENERATED FROM VDS PROPERTY FILE \n#( " + input + " ) \n" ) .append( "# ESCAPES IN VALUES ARE INTRODUCED \n"); header.append( "############################################################################" ); //create an output stream to this file and write out the properties OutputStream os = new FileOutputStream( f ); pegasusProperties.store( os, header.toString() ); os.close(); //convert the properties file into a sorted properties file convertToSorted( f, dir ); */ return f.getAbsolutePath(); } /** * Returns a matching pegasus property for a VDS star property. * * @param vds the vds property. * * @return the new Pegasus Property if found else, null. */ protected String matchForStarProperties( String vds ){ String pgs = null; // match against pattern for ( int i=0; i< mRegexExpression.length; i++ ) { //if a vds property matches against existing patterns if( mCompiledPatterns[i].matcher( vds ).matches() ){ //get the replacement value pgs = vds.replaceFirst( mStarReplacements[i][0], mStarReplacements[i][1] ); System.out.println( "The matching pegasus * property for " + vds + " is " + pgs ); break; } } return pgs; } /** * The main test program. * * @param args the arguments to the program. */ public static void main( String[] args ){ VDS2PegasusProperties me = new VDS2PegasusProperties(); int result = 0; try{ me.initialize(args); me.executeCommand(); } catch ( FactoryException fe){ me.log( fe.convertException() , LogManager.FATAL_MESSAGE_LEVEL); result = 2; } catch ( RuntimeException rte ) { //catch all runtime exceptions including our own that //are thrown that may have chained causes me.log( convertException(rte , me.mLogger.getLevel()), LogManager.FATAL_MESSAGE_LEVEL ); result = 1; } catch ( Exception e ) { //unaccounted for exceptions me.log(e.getMessage(), LogManager.FATAL_MESSAGE_LEVEL ); e.printStackTrace(); result = 3; } // warn about non zero exit code if ( result != 0 ) { me.log("Non-zero exit-code " + result, LogManager.WARNING_MESSAGE_LEVEL ); } System.exit(result); } /** * Executes the command on the basis of the options specified. * * @param args the command line options. */ public void executeCommand() { parseCommandLineArguments(getCommandLineOptions()); //sanity check on output directory mOutputDir = ( mOutputDir == null ) ? "." : mOutputDir; File dir = new File( mOutputDir ); if( dir.exists() ){ //directory already exists. if ( dir.isDirectory() ){ if ( !dir.canWrite() ){ throw new RuntimeException( "Cannot write out to output directory " + mOutputDir ); } } else{ //directory is a file throw new RuntimeException( mOutputDir + " is not a directory "); } } else{ dir.mkdirs(); } String output; try{ output = this.convert(mInputFile, mOutputDir ); System.out.println( "Pegasus Properties Written out to file " + output ); } catch( IOException ioe ){ throw new RuntimeException( "Unable to convert properties file ", ioe ); } } /** * Parses the command line arguments using GetOpt and returns a * PlannerOptions contains all the options passed by the * user at the command line. * * @param args the arguments passed by the user at command line. */ public void parseCommandLineArguments(String[] args){ LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt( "properties-converter", args, "i:o:c:h", longOptions, false); g.setOpterr(false); int option = 0; while( (option = g.getopt()) != -1){ //System.out.println("Option tag " + (char)option); switch (option) { case 'i'://input this.mInputFile = g.getOptarg(); break; case 'h'://help printLongVersion(); System.exit( 0 ); return; case 'o'://output directory this.mOutputDir = g.getOptarg(); break; case 'c': //do nothing break; default: //same as help printShortVersion(); throw new RuntimeException("Incorrect option or option usage " + (char)g.getOptopt()); } } } /** * Tt generates the LongOpt which contain the valid options that the command * will accept. * * @return array of LongOpt objects , corresponding to the valid * options */ public LongOpt[] generateValidOptions(){ LongOpt[] longopts = new LongOpt[4]; longopts[0] = new LongOpt( "input", LongOpt.REQUIRED_ARGUMENT, null, 'i' ); longopts[1] = new LongOpt( "output", LongOpt.REQUIRED_ARGUMENT, null, 'o' ); longopts[2] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[3] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } /** * Prints out a short description of what the command does. */ public void printShortVersion(){ String text = "\n $Id: VDS2PegasusProperties.java 4507 2011-08-29 16:13:32Z rynge $ " + "\n " + getGVDSVersion() + "\n Usage : properties-converter [-Dprop [..]] -i " + " [-o output directory] [-c ] [-h]"; System.out.println(text); } /** * Prints the long description, displaying in detail what the various options * to the command stand for. */ public void printLongVersion(){ String text = "\n $Id: VDS2PegasusProperties.java 4507 2011-08-29 16:13:32Z rynge $ " + "\n " + getGVDSVersion() + "\n properties-converter - A tool that converts the VDS properties file to " + "\n the corresponding Pegasus properties file " + "\n Usage: properties-converter [-Dprop [..]] --input " + "\n [--output output directory] [--conf ] [--help] " + "\n" + "\n Mandatory Options " + "\n --input the path to the VDS properties file." + "\n Other Options " + "\n -o |--output the output directory where to generate the pegasus property file." + "\n -c |--conf path to property file" + "\n -h |--help generates this help." + "\n "; System.out.println(text); //mLogger.log(text,LogManager.INFO_MESSAGE_LEVEL); } /** * Loads all the properties that would be needed by the Toolkit classes. */ public void loadProperties(){ //empty for time being } /** * Returns the transfer implementation. * * @param property property name. * * @return the transfer implementation, * else the one specified by "pegasus.transfer.*.impl", * else the DEFAULT_TRANSFER_IMPLEMENTATION. */ /* public String getTransferImplementation(String property){ String value = mProps.getProperty(property, getDefaultTransferImplementation()); if(value == null){ //check for older deprecated properties value = mProps.getProperty("pegasus.transfer"); value = (value == null)? mProps.getProperty("pegasus.transfer.mode"): value; //convert a non null value to the corresponding //transfer implementation if(value != null){ value = (String)transferImplementationTable().get(value); logDeprecatedWarning("pegasus.transfer","pegasus.transfer.*.impl and " + "pegasus.transfer.refiner"); } } //put in default if still we have a non null value = (value == null)? DEFAULT_TRANSFER_IMPLEMENTATION: value; return value; } */ /** * Returns the transfer refiner that is to be used for adding in the * transfer jobs in the workflow * * Referred to by the "pegasus.transfer.refiner" property. * * @return the transfer refiner, else the DEFAULT_TRANSFER_REFINER. * * @see #DEFAULT_TRANSFER_REFINER */ /* public String getTransferRefiner(){ String value = mProps.getProperty("pegasus.transfer.refiner"); if(value == null){ //check for older deprecated properties value = mProps.getProperty("pegasus.transfer"); value = (value == null)? mProps.getProperty("pegasus.transfer.mode"): value; //convert a non null value to the corresponding //transfer refiner if(value != null){ value = (String)transferRefinerTable().get(value); logDeprecatedWarning("pegasus.transfer","pegasus.transfer.impl and " + "pegasus.transfer.refiner"); } } //put in default if still we have a non null value = (value == null)? DEFAULT_TRANSFER_REFINER: value; return value; } */ //SOME LOGGING PROPERTIES /** * Returns the file to which all the logging needs to be directed to. * * Referred to by the "vds.log.*" property. * * @return the value of the property that is specified, else * null */ // public String getLoggingFile(){ // return mProps.getProperty("vds.log.*"); // } /** * Returns the location of the local log file where you want the messages to * be logged. Not used for the moment. * * Referred to by the "vds.log4j.log" property. * * @return the value specified in the property file,else null. */ // public String getLog4JLogFile() { // return getProperty( "vds.log.file", "vds.log4j.log" ); // } /** * Return returns the environment string specified for the local pool. If * specified the registration jobs are set with these environment variables. * * Referred to by the "vds.local.env" property * * @return the environment string for local pool in properties file if * defined, else null. */ // public String getLocalPoolEnvVar() { // return mProps.getProperty( "vds.local.env" ); // } /** * Returns a boolean indicating whether to treat the entries in the cache * files as a replica catalog or not. * * @return boolean */ // public boolean treatCacheAsRC(){ // return Boolean.parse(mProps.getProperty( "vds.cache.asrc"), // false); // } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheck( File dir ) throws IOException{ if ( dir.exists() ) { // location exists if ( dir.isDirectory() ) { // ok, isa directory if ( dir.canWrite() ) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException( "Cannot write to existing directory " + dir.getPath() ); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory." ); } } else { // does not exist, try to make it if ( ! dir.mkdirs() ) { throw new IOException( "Unable to create directory destination " + dir.getPath() ); } } } /** * Extracts a specific property key subset from the known properties. * The prefix may be removed from the keys in the resulting dictionary, * or it may be kept. In the latter case, exact matches on the prefix * will also be copied into the resulting dictionary. * * @param properties is the properties from where to get the subset. * @param prefix is the key prefix to filter the properties by. * @param keepPrefix if true, the key prefix is kept in the resulting * dictionary. As side-effect, a key that matches the prefix * exactly will also be copied. If false, the resulting * dictionary's keys are shortened by the prefix. An * exact prefix match will not be copied, as it would * result in an empty string key. * * @return a property dictionary matching the filter key. May be * an empty dictionary, if no prefix matches were found. * * */ public Properties matchingSubset( Properties properties, String prefix, boolean keepPrefix ) { Properties result = new Properties(); // sanity check if ( prefix == null || prefix.length() == 0 ) return result; String prefixMatch; // match prefix strings with this String prefixSelf; // match self with this if ( prefix.charAt(prefix.length()-1) != '.' ) { // prefix does not end in a dot prefixSelf = prefix; prefixMatch = prefix + '.'; } else { // prefix does end in one dot, remove for exact matches prefixSelf = prefix.substring( 0, prefix.length()-1 ); prefixMatch = prefix; } // POSTCONDITION: prefixMatch and prefixSelf are initialized! // now add all matches into the resulting properties. // Remark 1: #propertyNames() will contain the System properties! // Remark 2: We need to give priority to System properties. This is done // automatically by calling this class's getProperty method. String key; for ( Enumeration e = properties.propertyNames(); e.hasMoreElements(); ) { key = (String) e.nextElement(); if ( keepPrefix ) { // keep full prefix in result, also copy direct matches if ( key.startsWith(prefixMatch) || key.equals(prefixSelf) ) result.setProperty( key, (String)properties.get(key) ); } else { // remove full prefix in result, dont copy direct matches if ( key.startsWith(prefixMatch) ) result.setProperty( key.substring( prefixMatch.length() ), (String)properties.get(key) ); } } // done return result; } /** * Associates a VDS property with the new pegasus property. * * @param vdsProperty the old VDS property. * @param pegasusProperty the new Pegasus property. * */ private static void associate( String vdsProperty, String pegasusProperty ){ if( mVDSToPegasusPropertiesTable == null ){ mVDSToPegasusPropertiesTable = new HashMap(13); } mVDSToPegasusPropertiesTable.put( vdsProperty, pegasusProperty ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/PlotNodeUsage.java0000644000175000017500000003332011757531137026125 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.planner.visualize.Callback; import edu.isi.pegasus.planner.visualize.KickstartParser; import edu.isi.pegasus.planner.visualize.spaceusage.KickstartOutputFilenameFilter; import edu.isi.pegasus.planner.visualize.WorkflowMeasurements; import edu.isi.pegasus.planner.visualize.nodeusage.NodeUsageCallback; import edu.isi.pegasus.planner.visualize.nodeusage.Ploticus; import org.griphyn.vdl.toolkit.FriendlyNudge; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Date; import java.util.Iterator; import java.util.List; /** * This parses the kickstart records and generates input files for ploticus, * to visualize. * * * @author Karan Vahi * * @version $Revision: 3540 $ */ public class PlotNodeUsage extends Executable{ /** * The default output directory. */ public static final String DEFAULT_OUTPUT_DIR = "."; /** * The default timing source. */ public static final String DEFAULT_TIMING_SOURCE = "Kickstart"; /** * The tailstatd timing source. */ public static final String TAILSTATD_TIMING_SOURCE = "Tailstatd"; /** * The input directory containing the kickstart records. */ private String mInputDir; /** * The output directory where to generate the ploticus output. */ private String mOutputDir; /** * The default basename given to the files. */ private String mBasename; /** * The logging level to be used. */ private int mLoggingLevel; /** * The time units. */ private String mTimeUnits; /** * Default constructor. */ public PlotNodeUsage(){ super(); } /** * Initialize the PlotNodeUsage object * @param opts the command line argument passed to the PlotNodeUsage */ public void initialize(String[] opts){ super.initialize(opts); mLogMsg = new String(); mVersion = Version.instance().toString(); mOutputDir = this.DEFAULT_OUTPUT_DIR; mLoggingLevel = 0; mBasename = "ploticus"; } /** * The main program. * * * @param args the main arguments passed to the plotter. */ public static void main(String[] args) { PlotNodeUsage me = new PlotNodeUsage(); int result = 0; double starttime = new Date().getTime(); double execTime = -1; try{ me.initialize(args); me.executeCommand(); } catch ( FactoryException fe){ me.log( fe.convertException() , LogManager.FATAL_MESSAGE_LEVEL); result = 2; } catch ( RuntimeException rte ) { //catch all runtime exceptions including our own that //are thrown that may have chained causes me.log( convertException(rte, me.mLogger.getLevel()), LogManager.FATAL_MESSAGE_LEVEL ); result = 1; } catch ( Exception e ) { //unaccounted for exceptions me.log(e.getMessage(), LogManager.FATAL_MESSAGE_LEVEL ); e.printStackTrace(); result = 3; } finally { double endtime = new Date().getTime(); execTime = (endtime - starttime)/1000; } // warn about non zero exit code if ( result != 0 ) { me.log("Non-zero exit-code " + result, LogManager.WARNING_MESSAGE_LEVEL ); } else{ //log the time taken to execute me.log("Time taken to execute is " + execTime + " seconds", LogManager.INFO_MESSAGE_LEVEL); } System.exit(result); } /** * Executes the command on the basis of the options specified. * * @param args the command line options. */ public void executeCommand() { parseCommandLineArguments(getCommandLineOptions()); //set logging level to warning if the level is not set by user if( mLoggingLevel > 0 ) { mLogger.setLevel( mLoggingLevel ); } else{mLogger.setLevel(LogManager.WARNING_MESSAGE_LEVEL);} //do sanity check on input directory if( mInputDir == null ){ throw new RuntimeException( "You need to specify the directory containing kickstart records"); } File dir = new File( mInputDir ); if ( dir.isDirectory() ){ //see if it is readable if ( !dir.canRead() ){ throw new RuntimeException( "Cannot read directory " + mInputDir); } } else{ throw new RuntimeException( mInputDir + " is not a directory " ); } //sanity check on output directory dir = new File( mOutputDir ); if( dir.exists() ){ //directory already exists. if ( dir.isDirectory() ){ if ( !dir.canWrite() ){ throw new RuntimeException( "Cannot write out to output directory " + mOutputDir ); } } else{ //directory is a file throw new RuntimeException( mOutputDir + " is not a directory "); } } else{ dir.mkdirs(); } KickstartParser su = new KickstartParser(); Callback c = new NodeUsageCallback(); c.initialize( mInputDir, true ); su.setCallback( c ); //String dir = "/usr/sukhna/work/test/dags/ivdgl1/blackdiamond/run0004"; File directory = new File( mInputDir ); String[] files = directory.list( new KickstartOutputFilenameFilter() ); for( int i = 0; i < files.length; i++){ String file = mInputDir + File.separator + files[i]; try { log( "Parsing file " + file , LogManager.DEBUG_MESSAGE_LEVEL ); su.parseKickstartFile(file); } catch (IOException ioe) { log( "Unable to parse kickstart file " + file + convertException( ioe , mLogger.getLevel()), LogManager.DEBUG_MESSAGE_LEVEL); } catch( FriendlyNudge fn ){ log( "Problem parsing file " + file + convertException( fn , mLogger.getLevel()), LogManager.WARNING_MESSAGE_LEVEL ); } } //we are done with parsing c.done(); WorkflowMeasurements wm = ( WorkflowMeasurements )c.getConstructedObject(); wm.sort(); log( " Workflow Measurements is \n" + wm, LogManager.CONSOLE_MESSAGE_LEVEL); //generate the ploticus format Ploticus plotter = new Ploticus(); plotter.initialize( mOutputDir, mBasename , true); try{ List result = plotter.plot( wm, '0', mTimeUnits ); for( Iterator it = result.iterator(); it.hasNext(); ){ mLogger.log( "Written out file " + it.next(), LogManager.CONSOLE_MESSAGE_LEVEL ); } } catch (IOException ioe) { log( "Unable to plot the files " + convertException( ioe, mLogger.getLevel() ), LogManager.ERROR_MESSAGE_LEVEL); } } /** * Parses the command line arguments using GetOpt and returns a * PlannerOptions contains all the options passed by the * user at the command line. * * @param args the arguments passed by the user at command line. */ public void parseCommandLineArguments(String[] args){ LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt( "plot-node-usage", args, "b:i:o:T:c:hvV", longOptions, false); g.setOpterr(false); int option = 0; while( (option = g.getopt()) != -1){ //System.out.println("Option tag " + (char)option); switch (option) { case 'b'://the basename this.mBasename = g.getOptarg(); break; case 'i'://dir this.mInputDir = g.getOptarg(); break; case 'h'://help printLongVersion(); System.exit( 0 ); return; case 'c': // conf //do nothing break; case 'o'://output directory this.mOutputDir = g.getOptarg(); break; case 'T'://time units this.mTimeUnits = g.getOptarg(); break; case 'v'://verbose mLoggingLevel++; break; case 'V'://version mLogger.log(getGVDSVersion(),LogManager.INFO_MESSAGE_LEVEL); System.exit(0); default: //same as help printShortVersion(); throw new RuntimeException("Incorrect option or option usage " + (char)g.getOptopt()); } } } /** * Tt generates the LongOpt which contain the valid options that the command * will accept. * * @return array of LongOpt objects , corresponding to the valid * options */ public LongOpt[] generateValidOptions(){ LongOpt[] longopts = new LongOpt[8]; longopts[0] = new LongOpt( "input", LongOpt.REQUIRED_ARGUMENT, null, 'i' ); longopts[1] = new LongOpt( "output", LongOpt.REQUIRED_ARGUMENT, null, 'o' ); longopts[2] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 'v' ); longopts[3] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[4] = new LongOpt( "Version", LongOpt.NO_ARGUMENT, null, 'V' ); longopts[5] = new LongOpt( "basename", LongOpt.REQUIRED_ARGUMENT, null, 'b' ); longopts[6] = new LongOpt( "time-units", LongOpt.REQUIRED_ARGUMENT, null, 'T' ); longopts[7] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } /** * Prints out a short description of what the command does. */ public void printShortVersion(){ String text = "\n $Id: PlotNodeUsage.java 3540 2011-04-21 02:10:27Z prasanth $ " + "\n " + getGVDSVersion() + "\n Usage : plot_node_usage [-Dprop [..]] -i " + " [-o output directory] [-b basename] [-T time units] [-c ] [-v] [-V] [-h]"; System.out.println(text); } /** * Prints the long description, displaying in detail what the various options * to the command stand for. */ public void printLongVersion(){ String text = "\n $Id: PlotNodeUsage.java 3540 2011-04-21 02:10:27Z prasanth $ " + "\n " + getGVDSVersion() + "\n plot-node-usage - A plotting tool that plots out the number of jobs running on remote clusters over time" + "\n Usage: plot_node_usage [-Dprop [..]] --input [--base basename] " + "\n [--output output directory] [-T time units] [--conf ] [--verbose] [--Version] [--help] " + "\n" + "\n Mandatory Options " + "\n --input the directory where the kickstart records reside." + "\n Other Options " + "\n -b |--basename the basename prefix for constructing the ploticus files." + "\n -o |--output the output directory where to generate the ploticus files." + "\n -T |--time-units the units in which you want the x axis to be plotted (seconds|minutes|hours) " + "\n Defaults to seconds." + "\n -c |--conf path to property file" + "\n -v |--verbose increases the verbosity of messages about what is going on" + "\n -V |--version displays the version of the Pegasus Workflow Planner" + "\n -h |--help generates this help." + "\n The following exitcodes are produced" + "\n 0 plotter was able to generate plots" + "\n 1 an error occured. In most cases, the error message logged should give a" + "\n clear indication as to where things went wrong." + "\n 2 an error occured while loading a specific module implementation at runtime" + "\n "; System.out.println(text); //mLogger.log(text,LogManager.INFO_MESSAGE_LEVEL); } /** * Loads all the properties that would be needed by the Toolkit classes. */ public void loadProperties(){ //empty for time being } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/RCClient.java0000644000175000017500000010045411757531137025062 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.lang.reflect.InvocationTargetException; import java.util.ArrayList; import java.util.Collection; import java.util.Enumeration; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.MissingResourceException; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.log4j.PatternLayout; import org.apache.log4j.Priority; import org.griphyn.vdl.toolkit.Toolkit; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogException; import edu.isi.pegasus.planner.catalog.replica.ReplicaFactory; import edu.isi.pegasus.planner.common.PegasusProperties; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; /** * This class interfaces the with the replica catalog API to delve into the * underlying true catalog without knowing (once instantiated) which one it is. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 3619 $ * * @see edu.isi.pegasus.planner.catalog.replica.ReplicaCatalog * @see edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry * @see edu.isi.pegasus.planner.catalog.replica.impl.JDBCRC */ public class RCClient extends Toolkit { /** * The message for LFN's not found. */ private static final String LFN_DOES_NOT_EXIST_MSG = "LFN doesn't exist:"; /** * The default chunk factor that is used for biting off chunks of large * files. */ private static final int DEFAULT_CHUNK_FACTOR = 1000; /** * Maintains the interface to the replica catalog implementation. */ private ReplicaCatalog m_rc; /** * Maintains instance-local settings on user preferences. */ private Map m_prefs; /** * Keeps track of log4j's root logger as singleton. */ private static Logger m_root; /** * Logger for RLS implementation for the time being. */ private LogManager m_rls_logger; /** * The number of lines that are to be parsed for chunking up large input * files. */ private int m_chunk_factor; /** * The total number of lines on which the client has worked on till yet. */ private int m_total_lines_worked; /** * The total number of lines on which the client has successfully worked on * till yet. */ private int m_total_lines_succ_worked; /** * Indication of batch mode. */ private boolean m_batch; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties m_pegasus_props; /** * Reference to the property file passed using the --conf option */ private String m_conf_property_file = null; /** * Initializes the root logger when this class is loaded. */ static { if ((m_root = Logger.getRootLogger()) != null) { m_root.removeAllAppenders(); // clean house m_root.addAppender(new ConsoleAppender(new PatternLayout( "%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%c{1}] %m%n"))); m_root.setLevel(Level.INFO); m_root.debug("starting"); } } /** * Sets a logging level. * * @param level * is the new level to achieve. */ public void setLevel(int level) { doSet(Level.toLevel(level)); } /** * Logs messages from main() method. * * @param level * is the log4j level to generate the log message for * @param msg * is the message itself. * * @see org.apache.log4j.Category#log(Priority, Object ) */ public static void log(Level level, String msg) { m_root.log(level, msg); } /** * Our own logger. */ private Logger m_log; private void doSet(Level level) { m_root.setLevel(level); m_log.setLevel(level); m_rls_logger.setLevel(level); } /** * Adds a preference to the instance preferences settings. * * @param key * is a key into the preference map. * @param value * is the new value to add. * @return the previous value, or null if no such value exists. */ public Object enter(String key, String value) { String newkey = key.toLowerCase(); Object result = m_prefs.put(newkey, value); return result; } /** * ctor: Constructs a new instance of the commandline interface to replica * catalogs. * * @param appName * is the name of to print in usage records. */ public RCClient(String appName) { super(appName); } /** * Initialize the RCClient object * @param opts the command line argument passed by the user * @param confChar the short option corresponding the conf property. */ private void initialize(String [] opts , char confChar){ m_rc = null; m_prefs = new HashMap(); m_batch = false; m_total_lines_worked = 0; m_total_lines_succ_worked = 0; // private logger m_log = Logger.getLogger(RCClient.class); String propertyFile =lookupConfProperty(opts, confChar); m_pegasus_props = PegasusProperties.getInstance(propertyFile); m_rls_logger = LogManagerFactory.loadSingletonInstance(m_pegasus_props); m_rls_logger.setLevel(Level.WARN); m_rls_logger.logEventStart("pegasus-rc-client", "planner.version", Version.instance().toString()); m_log.debug("starting instance"); determineChunkFactor(); } /** * Prints the usage string on stdout. */ public void showUsage() { String linefeed = System.getProperty("line.separator", "\r\n"); System.out .println("$Id: RCClient.java 3619 2011-04-29 00:41:50Z prasanth $" + linefeed + "Pegasus version " + Version.instance().toString() + linefeed); System.out .println("Usage: " + this.m_application + " [-p k=v] [ [-f fn] | [-i|-d fn] | [cmd [args]] ]" + linefeed + " -h|--help print this help text" + linefeed + " -V|--version print some version identification string and exit" + linefeed + " -f|--file fn uses non-interactive mode, reading from file fn." + linefeed + " The special filename hyphen reads from pipes" + linefeed + " -c|--conf fn path to the property file" + linefeed + " -v|--verbose increases the verbosity level" + linefeed + " -p|--pref k=v enters the specified mapping into preferences (multi-use)." + linefeed + " remember quoting, e.g. -p 'format=%l %p %a'" + linefeed + " -i|--insert fn the path to the file containing the mappings to be inserted." + linefeed + " Each line in the file denotes one mapping of format [k=v [..]]" + linefeed + " -d|--delete fn the path to the file containing the mappings to be deleted." + linefeed + " Each line in the file denotes one mapping of format [k=v [..]]." + linefeed + " -l|--lookup fn the path to the file containing the LFN's to be looked up." + linefeed + " Each line in the file denotes one LFN" + linefeed + " For now attributes are not matched to determine the entries to delete." + linefeed + " cmd [args] exactly one of the commands below with arguments."); showHelp(); System.out .println("FIXME list:" + linefeed + " o permit input to span multiple lines (format free input)" + linefeed + " o permit whitespaces within PFNs (but not in SITE nor LFN)" + linefeed + " o permit commands to deal with values that contain whitespaces (quoting)" + linefeed + " o add some missing out-of-bounds checks to the format string" + linefeed); } /** * Creates a set of GNU long options. * * @return an initialized array with the options */ protected LongOpt[] generateValidOptions() { LongOpt[] lo = new LongOpt[9]; lo[0] = new LongOpt("help", LongOpt.NO_ARGUMENT, null, 'h'); lo[1] = new LongOpt("version", LongOpt.NO_ARGUMENT, null, 'V'); lo[2] = new LongOpt("file", LongOpt.REQUIRED_ARGUMENT, null, 'f'); lo[3] = new LongOpt("pref", LongOpt.REQUIRED_ARGUMENT, null, 'p'); lo[4] = new LongOpt("insert", LongOpt.REQUIRED_ARGUMENT, null, 'i'); lo[5] = new LongOpt("delete", LongOpt.REQUIRED_ARGUMENT, null, 'd'); lo[6] = new LongOpt("lookup", LongOpt.REQUIRED_ARGUMENT, null, 'l'); lo[7] = new LongOpt("verbose", LongOpt.NO_ARGUMENT, null, 'v'); lo[8] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return lo; } /** * Connects the interface with the replica catalog implementation. The * choice of backend is configured through properties. * * @exception ClassNotFoundException * if the schema for the database cannot be loaded. You might * want to check your CLASSPATH, too. * @exception NoSuchMethodException * if the schema's constructor interface does not comply with * the database driver API. * @exception InstantiationException * if the schema class is an abstract class instead of a * concrete implementation. * @exception IllegalAccessException * if the constructor for the schema class it not publicly * accessible to this package. * @exception InvocationTargetException * if the constructor of the schema throws an exception while * being dynamically loaded. * @exception IOException * @exception MissingResourceException * * @see org.griphyn.vdl.util.ChimeraProperties */ void connect(PegasusProperties properties ) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, MissingResourceException { m_rc = ReplicaFactory.loadInstance(properties); // auto-disconnect, should we forget it, or die in an orderly fashion Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { try { // log for the batch mode if (m_batch) { // log on stderr to prevent clobbing System.err.println("#Successfully worked on : " + m_total_lines_succ_worked + " lines."); System.err.println("#Worked on total number of : " + m_total_lines_worked + " lines."); } // disconnect from the replica catalog close(); } catch (Exception e) { e.printStackTrace(); } } }); } /** * Frees resources taken by the instance of the replica catalog. This method * is safe to be called on failed or already closed catalogs. */ void close() { if (m_rc != null) { m_rc.close(); m_rc = null; } } /** * Escapes quotes and backslashes by backslashing them. Identity s == * unescape(escape(s)) is preserved. * * @param s * is the string to escape * @return a string with escaped special characters. * @see #unescape(String ) */ private String escape(String s) { StringBuffer result = new StringBuffer(s.length()); for (int i = 0; i < s.length(); ++i) { char ch = s.charAt(i); if (ch == '"' || ch == '\\') result.append('\\'); result.append(ch); } return result.toString(); } /** * Unescapes previously backslashed characters. Identity s == * unescape(escape(s)) is preserved. * * @param s * is the string to escape * @return a string with unescaped special characters. * @see #escape(String ) */ private String unescape(String s) { StringBuffer result = new StringBuffer(s.length()); int state = 0; for (int i = 0; i < s.length(); ++i) { char ch = s.charAt(i); if (state == 0) { if (ch == '\\') state = 1; else result.append(ch); } else { result.append(ch); state = 0; } } return result.toString(); } /** * Removes a pair of outer quotes, which are optional. * * @param s * is a string which may start and end in quotes * @return a string without the optional quotes, or the string itself. */ private String noquote(String s) { int len = s.length(); // remove outer quotes, if they exist return ((s.charAt(0) == '"' && s.charAt(len - 1) == '"') ? s.substring( 1, len - 1) : s); } /** * Writes out a message about LFN not existing. * * @param lfn * the lfn. */ private void lfnDoesNotExist(String lfn) { System.err.println(LFN_DOES_NOT_EXIST_MSG + " " + lfn); } /** * Preliminary implementation of output method. * * @param lfn * is the logical filename to show * @param rce * is the replica catalog entry to show. It contains at minimum * the physical filename, and may contain any number of key-value * pairs. */ private void show(String lfn, ReplicaCatalogEntry rce) { System.out.print(lfn + " " + rce.getPFN()); for (Iterator i = rce.getAttributeIterator(); i.hasNext();) { String key = (String) i.next(); Object val = rce.getAttribute(key); System.out.print(" " + key + "=\"" + escape(val.toString()) + "\""); } System.out.println(); } /** * Prints internal command help. */ public void showHelp() { String linefeed = System.getProperty("line.separator", "\r\n"); System.out.println(linefeed + "Commands and their respective arguments, line-by-line:" + linefeed + " help" + linefeed + " quit" + linefeed + " exit" + linefeed + " clear" + linefeed + " insert LFN PFN [k=v [..]]" + linefeed + " delete LFN PFN [k=v [..]]" + linefeed + " remove LFN [LFN [..]]" + linefeed + " lookup LFN [LFN [..]]" + linefeed + " list [lfn ] [pfn ] [ ]" + linefeed + " set [var [value]]" + linefeed); } /** * Works on the command contained within chunk of lines. * * @param lines * is a list of lines with each line being a list of words that * is split appropriately * @param command * the command to be invoked. * * @return number of entries affected, or -1 to stop processing. */ public int work(List lines, String command) { // sanity checks if (command == null) throw new RuntimeException( "The command to be applied to the file contents not specified"); if (lines == null || lines.isEmpty()) return 0; String c_argnum = "Illegal number of arguments, ignoring!"; int result = 0; // a map indexed by lfn Map entries = new HashMap(); if (command.equals("insert") || command.equals("delete")) { for (Iterator it = lines.iterator(); it.hasNext();) { List words = (List) it.next(); if (words.size() < 2) { m_log.warn(c_argnum); } else { Iterator i = words.listIterator(); String lfn = (String) i.next(); ReplicaCatalogEntry rce = new ReplicaCatalogEntry( (String) i.next()); while (i.hasNext()) { String attr = (String) i.next(); int pos = attr.indexOf('='); if (pos == -1) { m_log.error("attribute \"" + attr + "\" without assignment, " + "assuming resource handle"); rce.setResourceHandle(attr); } else { rce.setAttribute(attr.substring(0, pos), unescape(noquote(attr.substring(pos + 1)))); } } // check to see if the lfn is already there // not doing a contains check as most of // the times lfn is expected to be unique // add all the old pfn's to the existing collection Collection c = new ArrayList(1); c.add(rce); Object old = entries.put(lfn, c); if (old != null) c.addAll((Collection) old); } }// end of iteration over the lines if (command.equals("insert")) { result = m_rc.insert(entries); m_log.info("inserted " + result + " entries"); } else { result = m_rc.delete(entries, false); m_log.info("deleted " + result + " entries"); } } else if (command.equals("lookup")) { Set lfns = new HashSet(); // each line has a single LFN for (Iterator it = lines.iterator(); it.hasNext();) { List words = (List) it.next(); if (words.size() != 1) { m_log.warn(c_argnum); } String lfn = words.get(0); lfns.add(lfn); } Map> results = m_rc .lookup(lfns); result = results.size(); // display results for LFN for (Iterator>> it = results .entrySet().iterator(); it.hasNext();) { Map.Entry> entry = it .next(); String lfn = entry.getKey(); Collection rces = entry.getValue(); for (Iterator j = rces.iterator(); j.hasNext();) { show(lfn, (ReplicaCatalogEntry) j.next()); } } // try and figure out LFN's for which mappings were not found // and display them lfns.removeAll(results.keySet()); for (String lfn : lfns) { lfnDoesNotExist(lfn); } } return result; } /** * Works on the command contained within one line. * * @param words * is a list of the arguments, split appropriately * @return number of entries affected, or -1 to stop processing. */ public int work(List words) { String c_argnum = "Illegal number of arguments, ignoring!"; int result = 0; // sanity check if (words == null || words.size() == 0) return result; // separate command from arguments String cmd = ((String) words.remove(0)).toLowerCase(); if (cmd.equals("help")) { showHelp(); } else if (cmd.equals("lookup")) { for (Iterator i = words.iterator(); i.hasNext();) { String lfn = (String) i.next(); Collection c = m_rc.lookup(lfn); m_log.info("found " + c.size() + " matches"); for (Iterator j = c.iterator(); j.hasNext();) { show(lfn, (ReplicaCatalogEntry) j.next()); result++; } } } else if (cmd.equals("list")) { Map m = new HashMap(); for (Iterator i = words.iterator(); i.hasNext();) { String key = ((String) i.next()).toLowerCase(); if (i.hasNext()) { String val = (String) i.next(); m.put(key, val); } } Map lfns = m_rc.lookup(m); if (lfns.size() > 0) { for (Iterator i = lfns.keySet().iterator(); i.hasNext();) { String lfn = (String) i.next(); for (Iterator j = ((List) lfns.get(lfn)).iterator(); j .hasNext();) { show(lfn, (ReplicaCatalogEntry) j.next()); result++; } } m_log.info("found " + result + " matches"); } else { m_log.info("no matches found"); } } else if (cmd.equals("insert") || cmd.equals("delete")) { if (words.size() < 2) { m_log.warn(c_argnum); } else { Iterator i = words.listIterator(); String lfn = (String) i.next(); ReplicaCatalogEntry rce = new ReplicaCatalogEntry( (String) i.next()); while (i.hasNext()) { String attr = (String) i.next(); int pos = attr.indexOf('='); if (pos == -1) { m_log.error("attribute \"" + attr + "\" without assignment, " + "assuming resource handle"); rce.setResourceHandle(attr); } else { rce.setAttribute(attr.substring(0, pos), unescape(noquote(attr.substring(pos + 1)))); } } if (cmd.equals("insert")) { result = m_rc.insert(lfn, rce); m_log.info("inserted " + result + " entries"); } else { result = rce.getAttributeCount() == 0 ? m_rc.delete(lfn, rce.getPFN()) : m_rc.delete(lfn, rce); m_log.info("deleted " + result + " entries"); } } } else if (cmd.equals("remove")) { // do it the slow way, better debugging for (Iterator i = words.iterator(); i.hasNext();) { String lfn = (String) i.next(); int count = m_rc.remove(lfn); result += count; if (count > 0) { m_log.info("removed LFN " + lfn); } else { m_log.info("ignoring unknown LFN " + lfn); } } } else if (cmd.equals("clear")) { result = m_rc.clear(); m_log.info("removed " + result + " entries"); } else if (cmd.equals("quit") || cmd.equals("exit")) { result = -1; m_log.info("Good-bye"); } else if (cmd.equals("set")) { String key, value; switch (words.size()) { case 0: // show all for (Iterator i = m_prefs.keySet().iterator(); i.hasNext();) { key = (String) i.next(); value = (String) m_prefs.get(key); System.out.println("set " + key + " " + value); result++; } break; case 1: // show one key = ((String) words.get(0)).toLowerCase(); if (m_prefs.containsKey(key)) { value = (String) m_prefs.get(key); System.out.println("set " + key + " " + value); result++; } else { m_log.warn("no such preference"); } break; case 2: // set one enter((String) words.get(0), (String) words.get(1)); result++; break; default: // other m_log.warn(c_argnum); break; } } else { // unknown command m_log.warn("Unknown command: " + cmd + ", ignoring!"); } return result; } /** * Consumes commands that control the replica management. * * @param filename * is the file to read from. If null, use stdin. * @exception IOException */ public void parse(String filename) throws IOException { boolean prompt = (filename == null); LineNumberReader lnr = null; if (filename != null) { // connect to file, use non-interactive mode if (filename.equals("-")) // reading from a pipe, don't prompt lnr = new LineNumberReader(new InputStreamReader(System.in)); else // reading from a file, don't prompt lnr = new LineNumberReader(new FileReader(filename)); } else { // connect to stdin lnr = new LineNumberReader(new InputStreamReader(System.in)); } int pos, result = 0; String line; StringTokenizer st; List words = new ArrayList(); if (prompt) System.out.print("rc> "); while ((line = lnr.readLine()) != null) { // do away with superflous whitespaces and comments if ((pos = line.indexOf('#')) != -1) line = line.substring(0, pos); line = line.trim(); // skip empty lines if (line.length() == 0) continue; // repeat what we are working on now m_log.debug("LINE " + lnr.getLineNumber() + ": " + line); words.clear(); st = new StringTokenizer(line); while (st.hasMoreTokens()) words.add(st.nextToken()); try { if (work(words) == -1) break; } catch (ReplicaCatalogException rce) { do { RCClient.log(Level.ERROR, rce.getMessage()); rce = (ReplicaCatalogException) rce.getNextException(); } while (rce != null); result = 1; } catch (RuntimeException rte) { do { RCClient.log(Level.ERROR, rte.getClass() + " " + rte.getMessage()); rte = (RuntimeException) rte.getCause(); } while (rte != null); result = 1; } if (prompt) System.out.print("rc> "); } // done if (prompt && line == null) System.out.println(); lnr.close(); // telmi, if something went wrong if (result == 1) throw new RuntimeException("Errors while processing input file"); } /** * Consumes commands that control the replica management. * * @param filename * is the file to read from. * @param command * is the command that needs to be applied to the file contents * * @exception IOException */ public void parse(String filename, String command) throws IOException { LineNumberReader lnr = null; int chunk = m_chunk_factor; int lines_succ_worked = 0; if (command == null) { // throw an exception throw new RuntimeException( "The command to be applied to the file contents not specified"); } if (filename != null) { // connect to file, use non-interactive mode // reading from a file lnr = new LineNumberReader(new FileReader(filename)); } else { // throw an exception throw new RuntimeException( "File containing the mappings not specified"); } int pos, result = 0; String line = null; StringTokenizer st; List words; // set the batch mode to true m_batch = true; // contains the number of valid lines read so far in the current block int counter = 0; List mappings = new ArrayList(chunk); while (true) { while (counter < chunk && (line = lnr.readLine()) != null) { // do away with superflous whitespaces and comments if ((pos = line.indexOf('#')) != -1) line = line.substring(0, pos); line = line.trim(); // skip empty lines if (line.length() == 0) continue; // repeat what we are working on now m_total_lines_worked = lnr.getLineNumber(); m_log.debug("LINE " + m_total_lines_worked + ": " + line); words = new ArrayList(chunk); st = new StringTokenizer(line); while (st.hasMoreTokens()) words.add(st.nextToken()); // add to the mappings counter++; mappings.add(words); } // hand off the mappings for work try { lines_succ_worked = work(mappings, command); m_total_lines_succ_worked += lines_succ_worked; } catch (ReplicaCatalogException rce) { do { RCClient.log(Level.ERROR, rce.getMessage()); rce = (ReplicaCatalogException) rce.getNextException(); } while (rce != null); result = 1; } catch (RuntimeException rte) { RCClient.log(Level.ERROR, rte.getMessage()); result = 1; } finally { // log the number of lines successfully worked m_log.info("Successfully worked on " + m_total_lines_succ_worked + " lines."); mappings.clear(); } m_log.info("Worked till line " + m_total_lines_worked); // System.out.println(); // get out of the loop if end if (line == null) break; else counter = 0; } // done lnr.close(); // telmi, if something went wrong if (result == 1) throw new RuntimeException("Errors while processing input file"); } /** * Looks up for the conf property in the command line arguments passed to the RCClient * @param opts command line arguments * @param confChar short char corresponding to the conf property * @return path to the property file */ private String lookupConfProperty(String[] opts , char confChar){ LongOpt[] longOptions = new LongOpt[1 ]; longOptions[ 0 ] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null,confChar ); // Create a clone before passing it to the GetOpts // Getopts changes the ordering of the array. String[] optsClone = new String[opts.length]; for(int i =0; i< opts.length;i++){ optsClone[i] = opts[i]; } Getopt g = new Getopt("RCClient", optsClone, confChar+":", longOptions, false); g.setOpterr(false); String propertyFilePath = null; int option = 0; while ( ( option = g.getopt() ) != -1 ) { if(option == confChar){ propertyFilePath = g.getOptarg(); break; } } return propertyFilePath; } /** * Manipulate entries in a given replica catalog implementation. * * @param args * are the commandline arguments. */ public static void main(String[] args) { int result = 0; int level = Level.FATAL_INT; RCClient me = null; try { // create an instance of self me = new RCClient("pegasus-rc-client"); me.initialize(args ,'c'); if (args.length == 0) { me.m_log.error("Please provide the required options."); me.showUsage(); System.exit(1); } // get the command line options Getopt opts = new Getopt(me.m_application, args, "f:hp:vVi:d:l:c:", me.generateValidOptions()); opts.setOpterr(false); String arg; String filename = null; int pos, option = -1; boolean interactive = false; String command = null; while ((option = opts.getopt()) != -1) { switch (option) { case 'V': System.out .println("$Id: RCClient.java 3619 2011-04-29 00:41:50Z prasanth $"); System.out.println("Pegasus version " + Version.instance().toString()); return; case 'v': level -= 10000; break; case 'f': arg = opts.getOptarg(); interactive = true; if (arg != null) filename = arg; break; case 'p': arg = opts.getOptarg(); if (arg != null && (pos = arg.indexOf('=')) != -1) me.enter(arg.substring(0, pos), arg.substring(pos + 1)); break; case 'i': arg = opts.getOptarg(); command = "insert"; if (arg != null) filename = arg; break; case 'd': arg = opts.getOptarg(); command = "delete"; if (arg != null) filename = arg; break; case 'l': arg = opts.getOptarg(); command = "lookup"; if (arg != null) filename = arg; break; case 'c': // conf // do nothing break; case 'h': default: me.showUsage(); return; } } // Set verbosity level me.setLevel(level); // now work with me me.connect(me.m_pegasus_props); RCClient.log(Level.DEBUG, "connected to backend"); // are there any remaining CLI arguments? if (opts.getOptind() < args.length) { // there are CLI arguments if (filename != null) { // you must not use -f and CLI extra args throw new RuntimeException( "The -f|-i|-d|-l option and CLI arguments " + "are mutually exclusive"); } else { // just work on one (virtual, already shell-spit) line List words = new ArrayList(); for (int i = opts.getOptind(); i < args.length; ++i) words.add(args[i]); me.work(words); RCClient.log(Level.DEBUG, "done with CLI commands"); } } else { // no CLI args, use single command or interactive mode if (interactive && command != null) { throw new RuntimeException( "The -f and -i|-d|-l options are mutually exclusive"); } // in interactive mode parse each line if (interactive) me.parse(filename); // in the command mode parse chunks of lines together else if (command != null) me.parse(filename, command); RCClient.log(Level.DEBUG, "done parsing commands"); } } catch (ReplicaCatalogException rce) { do { RCClient.log(Level.ERROR, rce.getMessage()); rce = (ReplicaCatalogException) rce.getNextException(); } while (rce != null); result = 1; } catch (RuntimeException rte) { do { RCClient.log(Level.ERROR, rte.getClass() + " " + rte.getMessage()); rte = (RuntimeException) rte.getCause(); } while (rte != null); result = 1; } catch (Exception e) { RCClient.log(Level.ERROR, e.getMessage()); e.printStackTrace(); result = 2; } finally { me.close(); RCClient.log(Level.DEBUG, "disconnected from backend"); } // log event completion in rls logger me.m_rls_logger.logEventCompletion(); // get out if (result != 0) { RCClient.log(Level.WARN, "non-zero exit-code " + result); System.exit(result); } } /** * Sets the chunk factor for chunking up large input files. * */ private void determineChunkFactor() { int size = RCClient.DEFAULT_CHUNK_FACTOR; try { Properties properties = CommonProperties.instance().matchingSubset( ReplicaCatalog.c_prefix, false); String s = properties.getProperty(ReplicaCatalog.BATCH_KEY); size = Integer.parseInt(s); } catch (Exception e) { } m_chunk_factor = size; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/TCClient.java0000644000175000017500000004153111757531137025064 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import java.util.HashMap; import java.util.Map; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationFactory; import edu.isi.pegasus.planner.catalog.transformation.client.TCAdd; import edu.isi.pegasus.planner.catalog.transformation.client.TCDelete; import edu.isi.pegasus.planner.catalog.transformation.client.TCQuery; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; /** * A common client to add, modify, delete, query any Transformation Catalog * implementation. * * @author Gaurang Mehta * @version $Revision: 4091 $ */ public class TCClient extends Executable{ public String classname; private int add = 0; private int query = 0; private int delete = 0; private int bulk = 0; private int islfn = 0; private int ispfn = 0; private int isresource = 0; private int isprofile = 0; private int istype = 0; private int issysinfo = 0; private boolean isxml = false; private boolean isoldformat = false; private String lfn = null; private String pfn = null; private String profile = null; private String type = null; private String resource = null; private String system = null; private String file = null; private TransformationCatalog tc = null; private Map argsmap = null; private Version version = Version.instance(); public TCClient() { super(); } public void initialize(String[] opts){ super.initialize(opts); } public void loadProperties() { } /** * Sets up the logging options for this class. Looking at the properties * file, sets up the appropriate writers for output and stderr. */ protected void setupLogging(){ //setup the logger for the default streams. mLogger = LogManagerFactory.loadSingletonInstance( mProps ); mLogger.logEventStart( "event.pegasus.tc-client", "client.version", mVersion, LogManager.DEBUG_MESSAGE_LEVEL ); } public LongOpt[] generateValidOptions() { LongOpt[] longopts = new LongOpt[16 ]; longopts[ 0 ] = new LongOpt( "add", LongOpt.NO_ARGUMENT, null, 'a' ); longopts[ 1 ] = new LongOpt( "delete", LongOpt.NO_ARGUMENT, null, 'd' ); longopts[ 2 ] = new LongOpt( "query", LongOpt.NO_ARGUMENT, null, 'q' ); longopts[ 3 ] = new LongOpt( "lfn", LongOpt.REQUIRED_ARGUMENT, null, 'l' ); longopts[ 4 ] = new LongOpt( "pfn", LongOpt.REQUIRED_ARGUMENT, null, 'p' ); longopts[ 5 ] = new LongOpt( "profile", LongOpt.REQUIRED_ARGUMENT, null, 'e' ); longopts[ 6 ] = new LongOpt( "type", LongOpt.REQUIRED_ARGUMENT, null, 't' ); longopts[ 7 ] = new LongOpt( "file", LongOpt.REQUIRED_ARGUMENT, null, 'f' ); longopts[ 8 ] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[ 9 ] = new LongOpt( "version", LongOpt.NO_ARGUMENT, null, 'V' ); longopts[ 10 ] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 'v' ); longopts[ 11 ] = new LongOpt( "resource", LongOpt.REQUIRED_ARGUMENT, null, 'r' ); longopts[ 12 ] = new LongOpt( "system", LongOpt.REQUIRED_ARGUMENT, null, 's' ); longopts[ 13 ] = new LongOpt( "xml", LongOpt.NO_ARGUMENT, null, 'x' ); longopts[ 14 ] = new LongOpt( "oldformat", LongOpt.NO_ARGUMENT, null, 'o' ); longopts[ 15 ] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } /** * Call the correct commands depending on options. * @param opts String[] The arguments obtained from the command line. */ public void executeCommand() { String[] opts = getCommandLineOptions(); if(opts.length == 0){ mLogger.log("Please provide the required options.",LogManager.ERROR_MESSAGE_LEVEL); this.printShortVersion(); System.exit(1); } LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt( "TCClient", opts, "adqhvxoVLPERTBSs:t:l:p:r:e:f:c:", longOptions, false ); int option = 0; int level = 0; while ( ( option = g.getopt() ) != -1 ) { switch ( option ) { case 'q': //output query = 1; break; case 'a': add = 2; break; case 'd': delete = 4; break; case 'B': bulk = 1; break; case 'L': islfn = 2; break; case 'P': ispfn = 4; break; case 'R': isresource = 8; break; case 'E': isprofile = 16; break; case 'T': istype = 32; break; case 'S': issysinfo = 64; break; case 't': type = g.getOptarg(); break; case 's': system = g.getOptarg(); break; case 'l': lfn = g.getOptarg(); break; case 'p': pfn = g.getOptarg(); break; case 'e': if ( profile != null ) { profile = profile + ";" + g.getOptarg(); } else { profile = g.getOptarg(); } break; case 'f': file = g.getOptarg(); break; case 'r': resource = g.getOptarg(); break; case 'h': //help printLongVersion(); System.exit( 0 ); break; case 'V': //version System.out.println(version.toString()); System.exit( 0 ); case 'v': //Verbose mode level++; break; case 'x': //Is XML isxml = true; if(isoldformat){ throw new IllegalArgumentException("Error: Illegal Argument passed. Options -x and -o cannot be set at the same time"); } break; case 'o': //Is Old format isoldformat = true; if(isxml){ throw new IllegalArgumentException("Error: Illegal Argument passed. Options -x and -o cannot be set at the same time"); } break; case 'c': //do nothing break; default: mLogger.log( "Unrecognized option or Invalid argument to option " + (char)g.getOptopt(), LogManager.FATAL_MESSAGE_LEVEL ); printShortVersion(); System.exit( 1 ); break; } } if(level >0){ mLogger.setLevel( level ); }else{ mLogger.setLevel(LogManager.WARNING_MESSAGE_LEVEL); } //calculating the value of the trigger int trigger = bulk + islfn + ispfn + isresource + isprofile + istype + issysinfo; argsmap = new HashMap( 11 ); argsmap.put( "trigger", new java.lang.Integer( trigger ) ); argsmap.put( "lfn", lfn ); argsmap.put( "pfn", pfn ); argsmap.put( "profile", profile ); argsmap.put( "type", type ); argsmap.put( "resource", resource ); argsmap.put( "system", system ); argsmap.put( "file", file ); argsmap.put( "isxml", new Boolean( isxml ) ); argsmap.put( "isoldformat", new Boolean( isoldformat ) ); //Select what operation is to be performed. int operationcase = query + add + delete; //load the transformation catalog if required try{ if (operationcase == 1 || operationcase == 4 || operationcase == 2) { this.mProps.setProperty(TransformationCatalog.MODIFY_FOR_FILE_URLS_KEY, "false"); tc = TransformationFactory.loadInstance(this.mProps); } } catch ( FactoryException fe){ mLogger.log( convertException(fe,mLogger.getLevel()) , LogManager.FATAL_MESSAGE_LEVEL); System.exit( 2 ); } try{ switch ( operationcase ) { case 1: //QUERY OPERATION SELECTED TCQuery tcquery = new TCQuery( tc, mLogger, argsmap ); tcquery.doQuery(); break; case 2: //ADD OPERATION SELECTED TCAdd tcadd = new TCAdd( tc, mLogger, argsmap ); tcadd.doAdds(); break; case 4: //DELETE OPERATION SELECTED TCDelete tcdelete = new TCDelete( tc, mLogger, argsmap ); tcdelete.doDelete(); break; default: //ERROR IN SELECTION OPERATION mLogger.log( "Please specify the correct operation for the client." + "Only one operation can be done at a time.", LogManager.FATAL_MESSAGE_LEVEL ); this.printShortVersion(); System.exit( -1 ); } }finally{ if(tc != null){ if(!tc.isClosed()){ tc.close(); } } } mLogger.logEventCompletion( LogManager.DEBUG_MESSAGE_LEVEL ); } public void printShortVersion() { String text = "\n " + version.toString() + "\n Usage :pegasus-tc-client [ operation ] [ operation arguments ]" + "\n Type pegasus-tc-client -h for more details"; System.out.println(text); System.exit( 1 ); } public void printLongVersion() { String text = "\n" + version.toString() + "\n" + "\n pegasus-tc-client - This client is used to add, delete, query any Tranformation Catalog implemented to the TC interface." + "\n" + "\n Usage: pegasus-tc-client [Operation] [Triggers] [Options]...." + "\n" + "\n Operations :" + "\n ------------" + "\n Always one of these operations have to be specified." + "\n" + "\n -a | --add Perform addition operations on the TC." + "\n -d | --delete Perform delete operations on the TC." + "\n -q | --query Perform query operations on the TC." + "\n" + "\n Triggers :" + "\n ----------" + "\n" + "\n -L Triggers an operation on a logical transformation" + "\n -P Triggers an operation on a physical transformation" + "\n -R Triggers an operation on a resource" + "\n -E Triggers an operation on a Profile" + "\n -T Triggers an operation on a Type" + "\n -B Triggers a bulk operation." + "\n" + "\n Options :" + "\n ---------" + "\n" + "\n -l | --lfn The logical transformation to be added in the format NAMESPACE::NAME:VERSION." + "\n (The name is always required, namespace and version are optional.)" + "\n -p | ---pfn The physical transfromation to be added. " + "\n For INSTALLED executables its a local file path, for all others its a url." + "\n -t | --type The type of physical transformation. Valid values are :" + "\n INSTALLED, STAGEABLE. " + "\n -r | --resource The Id of the resource where the transformation is located. " + "\n -e | --profile The profiles belonging to the transformation." + "\n Mulitple profiles of same namespace can be added simultaneously" + "\n by seperating them with a comma \",\"." + "\n Each profile section is written as NAMESPACE::KEY=VALUE,KEY2=VALUE2 " + "\n e.g. ENV::JAVA_HOME=/usr/bin/java2,PEGASUS_HOME=/usr/local/vds" + "\n To add muliple namespaces you need to repeat the -e option for each namespace." + "\n e.g -e ENV::JAVA_HOME=/usr/bin/java -e GLOBUS::JobType=MPI,COUNT=10" + "\n -s | --system The architecture,os and glibc if any for the executable." + "\n Each system info is written in the form ARCH::OS:OSVER:GLIBC" + "\n The allowed ARCH's are x86, x86_64, ppc, ppc_64, ia64, sparcv7, sparcv9, amd64" + "\n The allowed OS's are LINUX, SUNOS, AIX, MACOSX, WINDOWS" + "\n" + "\n Other Options :" + "\n ---------------" + "\n" + "\n --xml | -x Generates the output in the xml format " + "\n --oldformat | -o Generates the output in the old single line format " + "\n --conf | -c path to property file" + "\n --verbose | -v increases the verbosity level" + "\n --version | -V Displays the version number of the Griphyn Virtual Data System software " + "\n --help | -h Generates this help" + "\n" + "\n Valid Combinations :" + "\n --------------------" + "\n ADD" + "\n ---" + "\n " + "\n\tAdd TC Entry : -a -l -p -r [-t ] [-s ] [-e ....]" + "\n\tAdd PFN Profile : -a -P -E -p -t -r -e ...." + "\n\tAdd LFN Profile : -a -L -E -l -e ...." + "\n\tAdd Bulk Entries : -a -B -f " + "\n" + "\n DELETE" + "\n ------" + "\n" + "\n\tDelete all TC : -d -BPRELST " + "\n\t (!!!WARNING : THIS DELETES THE ENTIRE TC!!!)" + "\n\tDelete by LFN : -d -L -l [-r ] [-t ]" + "\n\tDelete by PFN : -d -P -l -p [-r ] [-t type]" + "\n\tDelete by Type : -d -T -t [-r ]" + "\n\tDelete by Resource : -d -R -r " + "\n\tDelete by SysInfo : -d -S -s " + "\n\tDelete Pfn Profile : -d -P -E -p -r -t [-e ....]" + "\n\tDelete Lfn Profile : -d -L -E -l [-e .....]" + "\n" + "\n QUERY" + "\n -----" + "\n " + "\n\tQuery Bulk : -q -B" + "\n\tQuery LFN : -q -L [-r ] [-t ]" + "\n\tQuery PFN : -q -P -l [-r ] [-t ]" + "\n\tQuery Resource : -q -R -l [-t ]" + "\n\tQuery Lfn Profile : -q -L -E -l " + "\n\tQuery Pfn Profile : -q -P -E -p -r -t " + "\n"; System.out.println(text); System.exit( 0 ); } public static void main( String[] args ) throws Exception { TCClient client = new TCClient(); client.initialize(args); client.executeCommand( ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/PegasusGetSites.java0000644000175000017500000003465411757531137026506 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.client; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.planner.catalog.site.impl.OSGMM; import edu.isi.pegasus.planner.catalog.site.SiteCatalogException; import edu.isi.pegasus.planner.catalog.site.SiteFactory; import edu.isi.pegasus.planner.catalog.site.SiteFactoryException; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.Version; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.io.File; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStream; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Date; import java.util.Iterator; import java.util.List; import java.util.Properties; /** * The client that replaces the perl based pegasus-get-sites. * It generates a Site Catalog by querying VORS. * * @author Atul Kumar * @author Karan Vahi * @version $Revision: 4507 $ */ public class PegasusGetSites extends Executable{ private String mVO = null; private String mGrid = null; /** * The default source to query for generating a site catalog. */ private String mSource="OSGMM"; private SiteCatalog mCatalog = null; private String mSCFile = null; private String mPropertiesFilename; /** * The default constructor. */ public PegasusGetSites(){ super(); } /** * Initialize the PegasusGetSites object * @param opts the command line argument passed to the PegasusGetSites */ public void intialize(String [] opts){ super.initialize(opts); mLogMsg = new String(); mVersion = Version.instance().toString(); } /** * The main program * * @param args */ public static void main( String[] args ){ PegasusGetSites me = new PegasusGetSites(); int result = 0; double starttime = new Date().getTime(); double execTime = -1; try{ me.initialize(args); me.executeCommand( ); } catch ( RuntimeException rte ) { //catch all runtime exceptions including our own that //are thrown that may have chained causes me.log( convertException(rte), LogManager.FATAL_MESSAGE_LEVEL ); result = 1; } catch ( Exception e ) { //unaccounted for exceptions me.log(e.getMessage(), LogManager.FATAL_MESSAGE_LEVEL ); result = 3; } finally { double endtime = new Date().getTime(); execTime = (endtime - starttime)/1000; } // warn about non zero exit code if ( result != 0 ) { me.log("Non-zero exit-code " + result, LogManager.WARNING_MESSAGE_LEVEL ); } else{ //log the time taken to execute me.log("Time taken to execute is " + execTime + " seconds", LogManager.INFO_MESSAGE_LEVEL); } me.mLogger.logEventCompletion(); System.exit( result ); } /** * An empty implementation. */ public void loadProperties() { } /** * Prints out the long help. */ public void printLongVersion() { StringBuffer text = new StringBuffer(); text.append( "\n" ).append( "$Id: PegasusGetSites.java 4507 2011-08-29 16:13:32Z rynge $ "). append( "\n" ).append( getGVDSVersion() ). append( "\n" ).append( "Usage : pegasus-get-sites --source --grid --vo --sc --properties " ). append( "\n" ).append( "[--conf ] [-v] [-h]" ). append( "\n" ). append( "\n Mandatory Options " ). append( "\n --source the source to query for information. Defaults to OSGMM" ). append( "\n" ). append( "\n Other Options " ). append( "\n -g |--grid the grid for which to generate the site catalog "). append( "\n -o |--vo the virtual organization to which the user belongs " ). append( "\n -s |--sc the path to the created site catalog file" ). append( "\n -p |--properties the properties file to be created" ). append( "\n -c |--conf path to property file"). append( "\n -v |--verbose increases the verbosity of messages about what is going on" ). append( "\n -V |--version displays the version of the Pegasus Workflow Management System" ). append( "\n -h |--help generates this help." ); System.out.println( text.toString() ); } /** * The short help version. */ public void printShortVersion() { StringBuffer text = new StringBuffer(); text.append( "\n" ).append( "$Id: PegasusGetSites.java 4507 2011-08-29 16:13:32Z rynge $ "). append( "\n" ).append( getGVDSVersion() ). append( "\n" ).append( "Usage : pegasus-get-sites -source -g -o -s -p " ). append( "\n" ).append( "[-c ] [-v] [-h]" ); System.out.println( text.toString() ); } /** * Executes the command on the basis of the command line options passed. * * @param args */ public void executeCommand() { parseCommandLineArguments(getCommandLineOptions()); PegasusProperties p = PegasusProperties.getInstance(lookupConfProperty(getCommandLineOptions(), 'c')); p.setProperty( "pegasus.catalog.site", mSource ); if(mSCFile == null){ //no sc path is passed using command line //sc path is not set in the properties file go to default File f = new File(p.getSysConfDir(), "/sites.xml"); mSCFile = f.getAbsolutePath(); } //pass on the VO and Grid information as properties //to site catalog back end. if(mVO != null ){ p.setProperty( getPropertyKey( mSource, "vo" ), mVO ); } if(mGrid != null ){ p.setProperty( getPropertyKey( mSource, "grid" ), mGrid ); } try{ mCatalog = SiteFactory.loadInstance( p); } catch ( SiteFactoryException e ){ System.out.println( e.convertException() ); System.exit( 2 ); } SiteStore store = new SiteStore(); /* load all sites in site catalog */ try{ List s = new ArrayList(1); s.add( "*" ); mCatalog.load( s ); List toLoad = new ArrayList( mCatalog.list() ); toLoad.add( "local" ); //load into SiteStore from the catalog. int num = 0; for( Iterator it = toLoad.iterator(); it.hasNext(); ){ SiteCatalogEntry se = mCatalog.lookup( it.next() ); if( se != null ){ store.addEntry( se ); num++; } } mLogger.log( "Loaded " + num + " sites ", LogManager.INFO_MESSAGE_LEVEL ); //write DAX to file FileWriter scFw = new FileWriter( mSCFile ); mLogger.log( "Writing out site catalog to " + new File( mSCFile ).getAbsolutePath() , LogManager.CONSOLE_MESSAGE_LEVEL ); store.toXML( scFw, "" ); scFw.close(); //generate the SRM properties file only if //interfacing with OSGMM. if( mCatalog instanceof OSGMM ){ Properties properties = ((OSGMM)mCatalog).generateSRMProperties(); mLogger.log( "Number of SRM Properties retrieved " + properties.entrySet().size() , LogManager.CONSOLE_MESSAGE_LEVEL ); mLogger.log( properties.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); File file = ( mPropertiesFilename == null )? //default one in the working directory //create a temporary file in directory File.createTempFile( "pegasus.", ".properties", new File( "." ) ): new File( mPropertiesFilename ); OutputStream os = new FileOutputStream( file ); mLogger.log( "Writing out properties to " + file.getAbsolutePath() , LogManager.CONSOLE_MESSAGE_LEVEL ); properties.store( os, "Pegasus SRM Properties" ); os.close(); } } catch ( SiteCatalogException e ){ e.printStackTrace(); } catch( IOException ioe ){ ioe.printStackTrace(); } } /** * Returns the full name of the property key with the appropriate prefix * * @param source the source i.e type of site catalog * @param key the basename of the key * * @return the property key. */ protected String getPropertyKey( String source, String key ){ //"pegasus.catalog.site.vors.grid" String lSource = source.toLowerCase(); StringBuffer property = new StringBuffer(); property.append( "pegasus.catalog.site" ).append( "." ). append( lSource ).append( "." ). append( key ); return property.toString(); } /** * Sets up the logging options for this class. Looking at the properties * file, sets up the appropriate writers for output and stderr. */ protected void setupLogging(){ //setup the logger for the default streams. mLogger = LogManagerFactory.loadSingletonInstance( mProps ); mLogger.logEventStart( "event.pegasus.pegasus-get-sites", "pegasus.version", mVersion ); } /** * Parses the command line arguments using GetOpt and sets the class * member variables. * * @param args the arguments passed by the user at command line. * * */ public void parseCommandLineArguments(String[] args){ LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt("pegasus-get-sites", args, "1:g:o:s:p:c:hvV", longOptions, false); g.setOpterr(false); int option = 0; int level = 0; while ( (option = g.getopt()) != -1) { //System.out.println("Option tag " + (char)option); switch (option) { case '1': //--source mSource = g.getOptarg(); break; case 'g': //--grid mGrid = g.getOptarg(); break; case 'o': //--vo mVO = g.getOptarg(); break; case 's': //--sc mSCFile = g.getOptarg(); break; case 'p': //--properties mPropertiesFilename = g.getOptarg(); break; case 'c': // conf //do nothing break; case 'v': //--verbose level++; break; case 'V'://--version mLogger.log(getGVDSVersion(),LogManager.INFO_MESSAGE_LEVEL); System.exit(0); case 'h'://--help printLongVersion(); System.exit( 0 ); break; default: //same as help printShortVersion(); for( int i =0 ; i < args.length ; i++ ) System.out.println( args[i] ); throw new RuntimeException("Incorrect option or option usage " + (char)g.getOptopt()); } } if(level >0){ mLogger.setLevel( level ); } else{ mLogger.setLevel(LogManager.WARNING_MESSAGE_LEVEL); } if(mSource == null || mSource.isEmpty()){ mLogger.log("Please provide the source to query for information",LogManager.ERROR_MESSAGE_LEVEL); this.printShortVersion(); System.exit(1); } } /** * Generates valid LongOpts. * * @return LongOpt[] */ public LongOpt[] generateValidOptions() { LongOpt[] longopts = new LongOpt[9]; longopts[0] = new LongOpt( "source", LongOpt.REQUIRED_ARGUMENT, null, '1' ); longopts[1] = new LongOpt( "grid", LongOpt.REQUIRED_ARGUMENT, null, 'g' ); longopts[2] = new LongOpt( "vo", LongOpt.REQUIRED_ARGUMENT, null, 'o' ); longopts[3] = new LongOpt( "sc", LongOpt.REQUIRED_ARGUMENT, null, 's' ); longopts[4] = new LongOpt( "version", LongOpt.NO_ARGUMENT, null, 'V' ); longopts[5] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 'v' ); longopts[6] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[7] = new LongOpt( "properties", LongOpt.REQUIRED_ARGUMENT, null, 'p' ); longopts[8] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/DAXValidator.java0000644000175000017500000005064711757531137025711 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import java.lang.System; import java.io.File; import java.text.DecimalFormat; import java.io.IOException; import org.xml.sax.Attributes; import org.xml.sax.XMLReader; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.SAXNotRecognizedException; import org.xml.sax.SAXNotSupportedException; import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.helpers.XMLReaderFactory; import org.apache.xerces.impl.Version; /** * This class reads to validate a DAX document. It requires at least Xerces-J 2.10. * * @author: Jens-S. Vöckler * @version: $Id: DAXValidator.java 3679 2011-05-05 22:55:54Z voeckler $ */ public class DAXValidator extends DefaultHandler { /** * Default parser is the Xerces parser. */ protected static final String vendorParserClass = "org.apache.xerces.parsers.SAXParser"; /** * URI namespace for DAX schema. */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/DAX"; /** * what is the name of the schema file in the filename hint? */ private String m_schemafile = "dax-3.3.xsd"; /** * Holds the instance of a {@link org.xml.sax.XMLReader} class. */ private XMLReader m_reader; /** * Keep the location within the document. */ private Locator m_location; /** * How verbose should we be? */ protected boolean m_verbose; /** * Counts the number of warnings. */ protected int m_warnings; /** * Counts the number of errors. */ protected int m_errors; /** * Counts the number of fatal errors. */ protected int m_fatals; /** * Sets a feature while capturing failed features right here. * * @param uri is the feature's URI to modify * @param flag is the new value to set. * @return true, if the feature could be set, false for an exception */ private boolean set( String uri, boolean flag ) { boolean result = false; try { this.m_reader.setFeature( uri, flag ); result = true; } catch ( SAXNotRecognizedException e ) { System.err.println( "Unrecognized feature " + uri + ": " + e ); } catch ( SAXNotSupportedException e ) { System.err.println( "Unsupported feature " + uri + ": " + e ); } catch ( SAXException e ) { System.err.println( "Parser feature error: " + e ); } return result; } /** * Sets a SAX property while capturing failed features right here. * * @param uri is the property's URI to modify * @param value is the new value to set. * @return true, if the feature could be set, false for an exception */ private boolean prop( String uri, Object value ) { boolean result = false; try { this.m_reader.setProperty( uri, value ); result = true; } catch ( SAXNotRecognizedException e ) { System.err.println( "Unrecognized property " + uri + ": " + e ); } catch ( SAXNotSupportedException e ) { System.err.println( "Unsupported property " + uri + ": " + e ); } catch ( SAXException e ) { System.err.println( "Parser property error: " + e ); } return result; } /** * default c'tor */ public DAXValidator( boolean verbose ) throws Exception { m_reader = XMLReaderFactory.createXMLReader(vendorParserClass); m_reader.setContentHandler(this); m_reader.setErrorHandler(this); m_verbose = verbose; m_warnings = m_errors = m_fatals = 0; if ( m_verbose ) { System.err.println( "# XMLReader is " + org.apache.xerces.impl.Version.getVersion() ); } // // turn on almost all features that we can safely turn on. // WARNING: The features below assume Xerces-J 2.10 or greater. // // Perform namespace processing: prefixes will be stripped off // element and attribute names and replaced with the corresponding // namespace URIs. By default, the two will simply be concatenated, // but the namespace-sep core property allows the application to // specify a delimiter string for separating the URI part and the // local part. set( "http://xml.org/sax/features/namespaces", true ); // The methods of the org.xml.sax.ext.EntityResolver2 interface will // be used when an object implementing this interface is registered // with the parser using setEntityResolver. // // If the disallow DOCTYPE declaration feature is set to true // org.xml.sax.ext.EntityResolver2.getExternalSubset() will not be // called when the document contains no DOCTYPE declaration. set( "http://xml.org/sax/features/use-entity-resolver2", true ); // Validate the document and report validity errors. // // If this feature is set to true, the document must specify a // grammar. By default, validation will occur against DTD. For more // information, please, refer to the FAQ. If this feature is set to // false, and document specifies a grammar that grammar might be // parsed but no validation of the document contents will be // performed. set( "http://xml.org/sax/features/validation", true ); // true: The parser will validate the document only if a grammar is // specified. // false: Validation is determined by the state of the validation // feature. set( "http://apache.org/xml/features/validation/dynamic", false ); // Turn on XML Schema validation by inserting an XML Schema // validator into the pipeline. // // Validation errors will only be reported if the validation feature // is set to true. For more information, please, refer to the FAQ. // // Checking of constraints on a schema grammar which are either // time-consuming or memory intensive such as unique particle // attribution will only occur if the schema full checking feature // is set to true. set( "http://apache.org/xml/features/validation/schema", true ); // Enable full schema grammar constraint checking, including // checking which may be time-consuming or memory // intensive. Currently, unique particle attribution constraint // checking and particle derivation restriction checking are // controlled by this option. // // This feature checks the Schema grammar itself for additional // errors that are time-consuming or memory intensive. It does not // affect the level of checking performed on document instances that // use Schema grammars. set( "http://apache.org/xml/features/validation/schema-full-checking", true ); // Expose via SAX and DOM XML Schema normalized values for // attributes and elements. // // XML Schema normalized values will be exposed only if both schema // validation and validation features are set to true. set( "http://apache.org/xml/features/validation/schema/normalized-value", true ); // Send XML Schema element default values via characters(). // // XML Schema default values will be send via characters() if both // schema validation and validation features are set to true. set( "http://apache.org/xml/features/validation/schema/element-default", true ); // Augment Post-Schema-Validation-Infoset. // // This feature can be turned off to improve parsing performance. set( "http://apache.org/xml/features/validation/schema/augment-psvi", true ); // xsi:type attributes will be ignored until a global element // declaration has been found, at which point xsi:type attributes // will be processed on the element for which the global element // declaration was found as well as its descendants. set( "http://apache.org/xml/features/validation/schema/ignore-xsi-type-until-elemdecl", true ); // Enable generation of synthetic annotations. A synthetic // annotation will be generated when a schema component has // non-schema attributes but no child annotation. set( "http://apache.org/xml/features/generate-synthetic-annotations", true ); // Schema annotations will be laxly validated against available // schema components. set( "http://apache.org/xml/features/validate-annotations", true ); // All schema location hints will be used to locate the components // for a given target namespace. set( "http://apache.org/xml/features/honour-all-schemaLocations", true ); // Include external general entities. set( "http://xml.org/sax/features/external-general-entities", true ); // Include external parameter entities and the external DTD subset. set( "http://xml.org/sax/features/external-parameter-entities", true ); // Construct an optimal representation for DTD content models to // significantly reduce the likelihood a StackOverflowError will // occur when large content models are processed. // // Enabling this feature may cost your application some performance // when DTDs are processed so it is recommended that it only be // turned on when necessary. set( "http://apache.org/xml/features/validation/balance-syntax-trees", true ); // Enable checking of ID/IDREF constraints. // // This feature only applies to schema validation. set( "http://apache.org/xml/features/validation/id-idref-checking", true ); // Enable identity constraint checking. set( "http://apache.org/xml/features/validation/identity-constraint-checking", true ); // Check that each value of type ENTITY matches the name of an // unparsed entity declared in the DTD. // // This feature only applies to schema validation. set( "http://apache.org/xml/features/validation/unparsed-entity-checking", true ); // Report a warning when a duplicate attribute is re-declared. set( "http://apache.org/xml/features/validation/warn-on-duplicate-attdef", true ); // Report a warning if an element referenced in a content model is // not declared. set( "http://apache.org/xml/features/validation/warn-on-undeclared-elemdef", true ); // Report a warning for duplicate entity declaration. set( "http://apache.org/xml/features/warn-on-duplicate-entitydef", true ); // Do not allow Java encoding names in XMLDecl and TextDecl line. // // A true value for this feature allows the encoding of the file to // be specified as a Java encoding name as well as the standard ISO // encoding name. Be aware that other parsers may not be able to use // Java encoding names. If this feature is set to false, an error // will be generated if Java encoding names are used. set( "http://apache.org/xml/features/allow-java-encodings", false ); // Attempt to continue parsing after a fatal error. // // The behavior of the parser when this feature is set to true is // undetermined! Therefore use this feature with extreme caution // because the parser may get stuck in an infinite loop or worse. set( "http://apache.org/xml/features/continue-after-fatal-error", true ); // Load the DTD and use it to add default attributes and set // attribute types when parsing. // // This feature is always on when validation is on. set( "http://apache.org/xml/features/nonvalidating/load-dtd-grammar", true ); // Load the external DTD. // // This feature is always on when validation is on. set( "http://apache.org/xml/features/nonvalidating/load-external-dtd", true ); // Notifies the handler of character reference boundaries in the // document via the start/endEntity callbacks. set( "http://apache.org/xml/features/scanner/notify-char-refs", false ); // Notifies the handler of built-in entity boundaries (e.g &) in // the document via the start/endEntity callbacks. set( "http://apache.org/xml/features/scanner/notify-builtin-refs", false ); // A fatal error is thrown if the incoming document contains a // DOCTYPE declaration. set( "http://apache.org/xml/features/disallow-doctype-decl", true ); // Requires that a URI has to be provided where a URI is expected. // // It's recommended to set this feature to true if you want your // application/documents to be truly portable across different XML // processors. set( "http://apache.org/xml/features/standard-uri-conformant", true ); // Report the original prefixed names and attributes used for // namespace declarations. set( "http://xml.org/sax/features/namespace-prefixes", true ); // All element names, prefixes, attribute names, namespace URIs, and // local names are internalized using the // java.lang.String#intern(String):String method. set( "http://xml.org/sax/features/string-interning", true ); // Report the beginning and end of parameter entities to a // registered LexicalHandler. set( "http://xml.org/sax/features/lexical-handler/parameter-entities", true ); // set( "http://apache.org/xml/features/xinclude", true ); // set( "http://apache.org/xml/features/xinclude/fixup-base-uris", true ); // set( "http://apache.org/xml/features/xinclude/fixup-language", true ); // // set( "http://xml.org/sax/features/is-standalone", true ); // set( "http://xml.org/sax/features/unicode-normalization-checking", true ); // set( "http://xml.org/sax/features/use-attributes2", true ); // set( "http://xml.org/sax/features/use-locator2", true ); // The system identifiers passed to the notationDecl, // unparsedEntityDecl, and externalEntityDecl events will be // absolutized relative to their base URIs before reporting. // // This feature does not apply to EntityResolver.resolveEntity(), // which is not used to report declarations, or to // LexicalHandler.startDTD(), which already provides the // non-absolutized URI. set( "http://xml.org/sax/features/resolve-dtd-uris", true ); // true: When the namespace-prefixes feature is set to true, namespace // declaration attributes will be reported as being in the // http://www.w3.org/2000/xmlns/ namespace. // false: Namespace declaration attributes are reported as having no // namespace. set( "http://xml.org/sax/features/xmlns-uris", true ); String schemaLocation = null; String pegasus_home = System.getenv("PEGASUS_HOME"); if ( pegasus_home != null ) { File sl = new File( new File( pegasus_home, "etc" ), m_schemafile ); if ( sl.canRead() ) { schemaLocation = sl.toString(); } else { System.err.println( "Warning: Unable to read " + sl ); } } // The XML Schema Recommendation explicitly states that the // inclusion of schemaLocation/noNamespaceSchemaLocation attributes // is only a hint; it does not mandate that these attributes must be // used to locate schemas. Similar situation happens to // element in schema documents. This property allows the user to // specify a list of schemas to use. If the targetNamespace of a // schema (specified using this property) matches the // targetNamespace of a schema occurring in the instance document in // schemaLocation attribute, or if the targetNamespace matches the // namespace attribute of element, the schema specified by // the user using this property will be used (i.e., the // schemaLocation attribute in the instance document or on the // element will be effectively ignored). // // The syntax is the same as for schemaLocation attributes in // instance documents: e.g, "http://www.example.com // file_name.xsd". The user can specify more than one XML Schema in // the list. if ( schemaLocation != null ) { prop( "http://apache.org/xml/properties/schema/external-schemaLocation", SCHEMA_NAMESPACE + " " + schemaLocation ); if ( m_verbose ) System.err.println( "# will use " + schemaLocation ); } else { if ( m_verbose ) System.err.println( "# will use document schema hint" ); } // The size of the input buffer in the readers. This determines how // many bytes to read for each chunk. Some tests indicate that a // bigger buffer size can improve the parsing performance for // relatively large files. The default buffer size in Xerces is // 2K. This would give a good performance for small documents (less // than 10K). For documents larger than 10K, specifying the buffer // size to 4K or 8K will significantly improve the performance. But // it's not recommended to set it to a value larger than 16K. For // really tiny documents (1K, for example), you can also set it to a // value less than 2K, to get the best performance. prop( "http://apache.org/xml/properties/input-buffer-size", 16384 ); } // --- ErrorHandler --- public void warning(SAXParseException ex) throws SAXException { m_warnings++; System.err.println("WARNING in " + full_where() + ": " + ex.getMessage()); } public void error(SAXParseException ex) throws SAXException { m_errors++; System.err.println("ERROR in " + full_where() + ": " + ex.getMessage()); } public void fatalError(SAXParseException ex) throws SAXException { m_fatals++; System.err.println("FATAL in " + full_where() + ": " + ex.getMessage()); } // --- ContentHandler --- public void setDocumentLocator( Locator locator ) { this.m_location = locator; } private String full_where() { return ( "line " + m_location.getLineNumber() + ", col " + m_location.getColumnNumber() ); } private String where() { return ( m_location.getLineNumber() + ":" + m_location.getColumnNumber() ); } public void startDocument() throws SAXException { if ( m_verbose ) { System.out.println( where() + " *** start of document ***" ); } } public void endDocument() { if ( m_verbose ) { System.out.println( where() + " *** end of document ***" ); } } public void startElement( String nsURI, String localName, String qName, Attributes attrs ) throws SAXException { if ( m_verbose ) { System.out.print( where() + " <" + qName ); for ( int i=0; i < attrs.getLength(); i++ ) { System.out.print( " " + attrs.getQName(i) ); System.out.print( "=\"" + attrs.getValue(i) + "\"" ); } System.out.println(">"); } } public void endElement( String nsURI, String localName, String qName ) throws SAXException { if ( m_verbose ) { System.out.println( where() + " " ); } } public void characters( char[] ch, int start, int length ) throws SAXException { if ( m_verbose ) { String s = new String( ch, start, length ).trim(); if ( s.length() > 0 ) System.out.println( where() + " \"" + s + "\"" ); } } public void ignorableWhitespace( char[] ch, int start, int length ) throws SAXException { // if ( m_verbose ) { // String s = new String( ch, start, length ).trim(); // if ( s.length() > 0 ) System.out.println( where() + " \"" + s + "\"" ); // } } public void parse( String filename ) throws Exception { m_reader.parse(filename); } /** * Show how many warnings, errors and fatals were shown. * * @return true, if we should transmit an error exit code. */ public boolean statistics() { System.out.println(); System.out.print( m_warnings + " warnings, " ); System.out.print( m_errors + " errors, and " ); System.out.println( m_fatals + " fatal errors detected." ); return ( m_errors > 0 || m_fatals > 0 ); } // --- main --- public static void main( String args[] ) throws Exception { boolean fail = true; if ( args.length > 0 ) { try { DAXValidator validator = new DAXValidator( args.length > 1 ); validator.parse( args[0] ); fail = validator.statistics(); } catch ( IOException ioe ) { System.err.println( ioe ); } catch ( SAXException spe ) { System.err.println( spe ); } } if ( fail ) System.exit(1); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/PlotSpaceUsage.java0000644000175000017500000004020011757531137026266 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.planner.visualize.Callback; import edu.isi.pegasus.planner.visualize.KickstartParser; import edu.isi.pegasus.planner.visualize.spaceusage.Plot; import edu.isi.pegasus.planner.visualize.spaceusage.Ploticus; import edu.isi.pegasus.planner.visualize.spaceusage.KickstartOutputFilenameFilter; import edu.isi.pegasus.planner.visualize.spaceusage.SpaceUsage; import edu.isi.pegasus.planner.visualize.spaceusage.SpaceUsageCallback; import edu.isi.pegasus.planner.visualize.spaceusage.TailStatd; import org.griphyn.vdl.toolkit.FriendlyNudge; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.util.Date; import java.util.Iterator; import java.util.List; /** * This parses the kickstart records and generates input files for ploticus, * to visualize. * * * @author Karan Vahi * * @version $Revision: 3540 $ */ public class PlotSpaceUsage extends Executable{ /** * The default output directory. */ public static final String DEFAULT_OUTPUT_DIR = "."; /** * The default timing source. */ public static final String DEFAULT_TIMING_SOURCE = "Kickstart"; /** * The tailstatd timing source. */ public static final String TAILSTATD_TIMING_SOURCE = "Tailstatd"; /** * The input directory containing the kickstart records. */ private String mInputDir; /** * The output directory where to generate the ploticus output. */ private String mOutputDir; /** * The default basename given to the files. */ private String mBasename; /** * The logging level to be used. */ private int mLoggingLevel; /** * The size units. */ private String mSizeUnits; /** * The time units. */ private String mTimeUnits; /** * The timing source used to order the events. */ private String mTimingSource; /** * A boolean indicating to use stat information for estimating * directory sizes. */ private boolean mUseStatInfo; /** * Default constructor. */ public PlotSpaceUsage(){ super(); } /** * Initialize the PlotSpaceUsage object * @param opts the command line argument passed to the PlotSpaceUsage */ public void initialize(String[] opts){ super.initialize(opts); mLogMsg = new String(); mVersion = Version.instance().toString(); mOutputDir = this.DEFAULT_OUTPUT_DIR; mLoggingLevel = 0; mSizeUnits = "K"; mTimeUnits = null; mBasename = "ploticus"; mTimingSource = this.DEFAULT_TIMING_SOURCE; mUseStatInfo = false; } /** * The main program. * * * @param args the main arguments passed to the plotter. */ public static void main(String[] args) { PlotSpaceUsage me = new PlotSpaceUsage(); int result = 0; double starttime = new Date().getTime(); double execTime = -1; try{ me.initialize(args); me.executeCommand(); } catch ( FactoryException fe){ me.log( fe.convertException() , LogManager.FATAL_MESSAGE_LEVEL); result = 2; } catch ( RuntimeException rte ) { //catch all runtime exceptions including our own that //are thrown that may have chained causes me.log( convertException(rte, me.mLogger.getLevel()), LogManager.FATAL_MESSAGE_LEVEL ); result = 1; } catch ( Exception e ) { //unaccounted for exceptions me.log(e.getMessage(), LogManager.FATAL_MESSAGE_LEVEL ); e.printStackTrace(); result = 3; } finally { double endtime = new Date().getTime(); execTime = (endtime - starttime)/1000; } // warn about non zero exit code if ( result != 0 ) { me.log("Non-zero exit-code " + result, LogManager.WARNING_MESSAGE_LEVEL ); } else{ //log the time taken to execute me.log("Time taken to execute is " + execTime + " seconds", LogManager.INFO_MESSAGE_LEVEL); } System.exit(result); } /** * Executes the command on the basis of the options specified. * * @param args the command line options. */ public void executeCommand() { parseCommandLineArguments(getCommandLineOptions()); //set logging level to warning if the level is not set by user if( mLoggingLevel > 0 ) { mLogger.setLevel( mLoggingLevel ); } else{mLogger.setLevel(LogManager.WARNING_MESSAGE_LEVEL);} //do sanity check on units mSizeUnits = mSizeUnits.trim(); if ( mSizeUnits.length() != 1 ){ throw new RuntimeException( "The valid size units can be K or M or G" ); } //do sanity check on input directory if( mInputDir == null ){ throw new RuntimeException( "You need to specify the directory containing kickstart records"); } File dir = new File( mInputDir ); if ( dir.isDirectory() ){ //see if it is readable if ( !dir.canRead() ){ throw new RuntimeException( "Cannot read directory " + mInputDir); } } else{ throw new RuntimeException( mInputDir + " is not a directory " ); } //sanity check on output directory dir = new File( mOutputDir ); if( dir.exists() ){ //directory already exists. if ( dir.isDirectory() ){ if ( !dir.canWrite() ){ throw new RuntimeException( "Cannot write out to output directory " + mOutputDir ); } } else{ //directory is a file throw new RuntimeException( mOutputDir + " is not a directory "); } } else{ dir.mkdirs(); } KickstartParser su = new KickstartParser(); //determing the callback on the basis of timing source Callback c; if( mTimingSource.equalsIgnoreCase( this.DEFAULT_TIMING_SOURCE )){ c = new SpaceUsageCallback(); } else if ( mTimingSource.equalsIgnoreCase( this.TAILSTATD_TIMING_SOURCE )){ c = new TailStatd(); } else{ throw new RuntimeException( "No callback available for timing source" + mTimingSource ); } mLogger.log( "Timing Source being used is " + mTimingSource , LogManager.DEBUG_MESSAGE_LEVEL ); c.initialize( mInputDir, mUseStatInfo ); su.setCallback( c ); //String dir = "/usr/sukhna/work/test/dags/ivdgl1/blackdiamond/run0004"; File directory = new File( mInputDir ); String[] files = directory.list( new KickstartOutputFilenameFilter() ); for( int i = 0; i < files.length; i++){ String file = mInputDir + File.separator + files[i]; try { log( "Parsing file " + file , LogManager.DEBUG_MESSAGE_LEVEL ); su.parseKickstartFile(file); } catch (IOException ioe) { log( "Unable to parse kickstart file " + file + convertException( ioe, mLogger.getLevel() ), LogManager.DEBUG_MESSAGE_LEVEL); } catch( FriendlyNudge fn ){ log( "Problem parsing file " + file + convertException( fn, mLogger.getLevel() ), LogManager.WARNING_MESSAGE_LEVEL ); } } //we are done with parsing c.done(); SpaceUsage s = (SpaceUsage)c.getConstructedObject(); s.sort(); log( " Space Store is \n" + c.getConstructedObject(), LogManager.CONSOLE_MESSAGE_LEVEL); //generate the ploticus format Plot plotter = new Ploticus(); plotter.initialize( mOutputDir, mBasename , mUseStatInfo); try{ List result = plotter.plot( s, mSizeUnits.charAt( 0 ), mTimeUnits ); for( Iterator it = result.iterator(); it.hasNext(); ){ mLogger.log( "Written out file " + it.next(), LogManager.CONSOLE_MESSAGE_LEVEL ); } } catch (IOException ioe) { log( "Unable to plot the files " + convertException( ioe , mLogger.getLevel()), LogManager.ERROR_MESSAGE_LEVEL); } } /** * Parses the command line arguments using GetOpt and returns a * PlannerOptions contains all the options passed by the * user at the command line. * * @param args the arguments passed by the user at command line. */ public void parseCommandLineArguments(String[] args){ LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt( "plot-space-usage", args, "b:i:o:s:t:T:c:uhvV", longOptions, false); g.setOpterr(false); int option = 0; while( (option = g.getopt()) != -1){ //System.out.println("Option tag " + (char)option); switch (option) { case 'b'://the basename this.mBasename = g.getOptarg(); break; case 'i'://dir this.mInputDir = g.getOptarg(); break; case 'h'://help printLongVersion(); System.exit( 0 ); return; case 'o'://output directory this.mOutputDir = g.getOptarg(); break; case 's'://size-units this.mSizeUnits = g.getOptarg(); break; case 't'://timing source this.mTimingSource = g.getOptarg(); break; case 'T'://time units this.mTimeUnits = g.getOptarg(); break; case 'u'://use-stat this.mUseStatInfo = true; break; case 'c': // conf //do nothing break; case 'v'://verbose mLoggingLevel++; break; case 'V'://version mLogger.log(getGVDSVersion(),LogManager.INFO_MESSAGE_LEVEL); System.exit(0); default: //same as help printShortVersion(); throw new RuntimeException("Incorrect option or option usage " + (char)g.getOptopt()); } } } /** * Tt generates the LongOpt which contain the valid options that the command * will accept. * * @return array of LongOpt objects , corresponding to the valid * options */ public LongOpt[] generateValidOptions(){ LongOpt[] longopts = new LongOpt[11]; longopts[0] = new LongOpt( "input", LongOpt.REQUIRED_ARGUMENT, null, 'i' ); longopts[1] = new LongOpt( "output", LongOpt.REQUIRED_ARGUMENT, null, 'o' ); longopts[2] = new LongOpt( "size-units", LongOpt.REQUIRED_ARGUMENT, null, 's' ); longopts[3] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 'v' ); longopts[4] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[5] = new LongOpt( "Version", LongOpt.NO_ARGUMENT, null, 'V' ); longopts[6] = new LongOpt( "basename", LongOpt.REQUIRED_ARGUMENT, null, 'b' ); longopts[7] = new LongOpt( "timing-source", LongOpt.REQUIRED_ARGUMENT, null, 't'); longopts[8] = new LongOpt( "use-stat", LongOpt.NO_ARGUMENT, null, 'u' ); longopts[9] = new LongOpt( "time-units", LongOpt.REQUIRED_ARGUMENT, null, 'T' ); longopts[10] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } /** * Prints out a short description of what the command does. */ public void printShortVersion(){ String text = "\n $Id: PlotSpaceUsage.java 3540 2011-04-21 02:10:27Z prasanth $ " + "\n " + getGVDSVersion() + "\n Usage : plot-space-usage [-Dprop [..]] -i " + " [-o output directory] [-b basename] [-s size units] [-t timing source] " + " [-T time units] [-c ] [-u] [-v] [-V] [-h]"; System.out.println(text); } /** * Prints the long description, displaying in detail what the various options * to the command stand for. */ public void printLongVersion(){ String text = "\n $Id: PlotSpaceUsage.java 3540 2011-04-21 02:10:27Z prasanth $ " + "\n " + getGVDSVersion() + "\n plot-space-usage - A plotting tool that plots out the space usage on remote clusters over time" + "\n Usage: plot_space_usage [-Dprop [..]] --dir [--base basename] " + "\n [--output output directory] [--timing-source source] [--use-stat] [--conf ] [--verbose] [--Version] [--help] " + "\n" + "\n Mandatory Options " + "\n --input the directory where the kickstart records reside." + "\n Other Options " + "\n -b |--basename the basename prefix for constructing the ploticus files." + "\n -o |--output the output directory where to generate the ploticus files." + "\n -s |--size-units the units in which you want the filesizes to be plotted (can be K or M or G)." + "\n -t |--timing-source the source from which the ordering of events is determined. " + "\n Can be kickstart or tailstatd. Defaults to kickstart." + "\n -T |--time-units the units in which you want the x axis to be plotted (seconds|minutes|hours) Defaults to seconds." + "\n -u |--use-stat use the file stat information in kickstart records to estimate directory usage" + "\n -c |--conf path to property file" + "\n -v |--verbose increases the verbosity of messages about what is going on" + "\n -V |--version displays the version of the Pegasus Workflow Planner" + "\n -h |--help generates this help." + "\n The following exitcodes are produced" + "\n 0 plotter was able to generate plots" + "\n 1 an error occured. In most cases, the error message logged should give a" + "\n clear indication as to where things went wrong." + "\n 2 an error occured while loading a specific module implementation at runtime" + "\n "; System.out.println(text); //mLogger.log(text,LogManager.INFO_MESSAGE_LEVEL); } /** * Loads all the properties that would be needed by the Toolkit classes. */ public void loadProperties(){ //empty for time being } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/PartitionDAX.java0000644000175000017500000003335111757531137025726 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import edu.isi.pegasus.planner.parser.dax.DAXParser2; import edu.isi.pegasus.planner.parser.dax.Callback; import edu.isi.pegasus.planner.parser.dax.DAX2Graph; import edu.isi.pegasus.planner.parser.dax.DAX2LabelGraph; import edu.isi.pegasus.planner.parser.DAXParserFactory; import edu.isi.pegasus.planner.partitioner.WriterCallback; import edu.isi.pegasus.planner.partitioner.Partitioner; import edu.isi.pegasus.planner.partitioner.PartitionerFactory; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.planner.parser.Parser; import edu.isi.pegasus.planner.parser.dax.DAXParser; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.io.File; import java.util.Date; import java.util.Map; /** * The class ends up partitioning the dax into smaller daxes according to the * various algorithms/criteria, to be used for deferred planning. * * * @author Karan Vahi * @version $Revision: 3778 $ */ public class PartitionDAX extends Executable { /** * The name of the default partitioner that is loaded, if none is specified * by the user. */ public static final String DEFAULT_PARTITIONER_TYPE = PartitionerFactory.DEFAULT_PARTITIONING_CLASS; /** * The path to the dax file that is to be partitioned. */ private String mDAXFile; /** * The directory in which the partition daxes are generated. */ private String mDirectory; /** * The type of the partitioner to be used. Is the same as the name of the * implementing class. */ private String mType; /** * The default constructor. */ public PartitionDAX() { } /** * Initialize the PartitionDax object * @param opts the command line argument passed to the PartitionDax */ public void initalize(String[] opts){ super.initialize(opts); mDAXFile = null; mDirectory = "."; mType = DEFAULT_PARTITIONER_TYPE; } /** * The main function of the class, that is invoked by the jvm. It calls * the executeCommand function. * * @param args array of arguments. */ public static void main(String[] args){ PartitionDAX pdax = new PartitionDAX(); pdax.initalize(args); pdax.executeCommand(); } /** * Executes the partition dax on the basis of the options given by the * user. * * @param args the arguments array populated by the user options. */ public void executeCommand() { int option = 0; LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt("PartitionDAX", getCommandLineOptions(), "vhVD:d:t:c:", longOptions, false); boolean help = false; boolean version = false; int status = 0; //log the starting time double starttime = new Date().getTime(); int level = 0; while ( (option = g.getopt()) != -1) { //System.out.println("Option tag " + option); switch (option) { case 'd': //dax mDAXFile = g.getOptarg(); break; case 'D': //dir mDirectory = g.getOptarg(); break; case 't': //type mType = g.getOptarg(); break; case 'c': // conf //do nothing break; case 'v': //verbose //set the verbose level in the logger level++; break; case 'V': //version version = true; break; case 'h': //help help = true; break; default: //same as help mLogger.log("Incorrect option or option usage " + (char)g.getOptopt(), LogManager.FATAL_MESSAGE_LEVEL); printShortVersion(); System.exit(1); break; } } if ( level > 0 ) { //set the logging level only if -v was specified //else bank upon the the default logging level mLogger.setLevel( level ); }else{ // default level is warning mLogger.setLevel(LogManager.WARNING_MESSAGE_LEVEL); } if ( ( help && version ) || help ) { printLongVersion(); System.exit( status ); } else if ( version ) { //print the version message mLogger.log( getGVDSVersion(), LogManager.INFO_MESSAGE_LEVEL ); System.exit( status ); } try{ String pdax = partitionDAX( mProps, mDAXFile, mDirectory, mType ); mLogger.log( "Partitioned DAX written out " + pdax, LogManager.CONSOLE_MESSAGE_LEVEL ); } catch( Exception e ){ mLogger.log( "", e, LogManager.FATAL_MESSAGE_LEVEL ); status = 1; } //log the end time and time execute double endtime = new Date().getTime(); double execTime = (endtime - starttime)/1000; mLogger.log("Time taken to execute is " + execTime + " seconds", LogManager.INFO_MESSAGE_LEVEL); System.exit( status ); } /** * @param properties the PegasusProperties * @param daxFile String * @param directory the directory where paritioned daxes reside * @param type the type of partitioning to use. * * @return the path to the pdax file. */ public String partitionDAX( PegasusProperties properties , String daxFile, String directory, String type ){ int status = 0; //sanity check for the dax file if ( daxFile == null || daxFile.length() == 0 ) { mLogger.log( "The dax file that is to be partitioned not " + "specified", LogManager.FATAL_MESSAGE_LEVEL ); printShortVersion(); status = 1; throw new RuntimeException( "Unable to partition" ); } //always try to make the directory //referred to by the directory File dir = new File( directory ); dir.mkdirs(); //build up the partition graph //String callbackClass = ( type.equalsIgnoreCase("label") ) ? // "DAX2LabelGraph": //graph with labels populated // "DAX2Graph"; //load the appropriate partitioner Callback callback = null; Partitioner partitioner = null; String daxName = null; int state = 0; try{ callback = DAXParserFactory.loadDAXParserCallback( type, properties, daxFile ); //set the appropriate key that is to be used for picking up the labels if( callback instanceof DAX2LabelGraph ){ ((DAX2LabelGraph)callback).setLabelKey( properties.getPartitionerLabelKey() ); } state = 1; PegasusBag bag = new PegasusBag(); bag.add( PegasusBag.PEGASUS_PROPERTIES, properties ); bag.add( PegasusBag.PEGASUS_LOGMANAGER, mLogger ); // DAXParser2 d = new DAXParser2( daxFile, bag, callback ); Parser p = (Parser)DAXParserFactory.loadDAXParser( bag, callback, daxFile ); p.startParser( daxFile ); state = 2; //get the graph map Map graphMap = (Map) callback.getConstructedObject(); //get the fake dummy root node GraphNode root = (GraphNode) graphMap.get( DAX2Graph.DUMMY_NODE_ID ); daxName = ( (DAX2Graph) callback).getNameOfDAX(); state = 3; partitioner = PartitionerFactory.loadInstance( properties, root, graphMap, type ); } catch ( FactoryException fe){ mLogger.log( fe.convertException() , LogManager.FATAL_MESSAGE_LEVEL); System.exit( 2 ); } catch( Exception e ){ int errorStatus = 1; switch(state){ case 0: mLogger.log( "Unable to load the DAXCallback", e, LogManager.FATAL_MESSAGE_LEVEL ); errorStatus = 2; break; case 1: mLogger.log( "Error while parsing the DAX file", e , LogManager.FATAL_MESSAGE_LEVEL ); errorStatus = 1; break; case 2: mLogger.log( "Error while determining the root of the parsed DAX", e, LogManager.FATAL_MESSAGE_LEVEL ); errorStatus = 1; break; case 3: mLogger.log( "Unable to load the partitioner", e, LogManager.FATAL_MESSAGE_LEVEL ); errorStatus = 2; break; default: mLogger.log( "Unknown Error", e, LogManager.FATAL_MESSAGE_LEVEL ); errorStatus = 1; break; } status = errorStatus; } if( status > 0 ){ throw new RuntimeException( "Unable to partition" ); } //load the writer callback that writes out //the partitioned daxes and PDAX WriterCallback cb = new WriterCallback(); cb.initialize( properties, daxFile, daxName, directory ); //start the partitioning of the graph partitioner.determinePartitions( cb ); return cb.getPDAX(); } /** * Generates the short version of the help on the stdout. */ public void printShortVersion() { String text = "\n $Id: PartitionDAX.java 3778 2011-05-16 22:51:46Z vahi $ " + "\n" + getGVDSVersion() + "\n Usage :partitiondax -d [-D ] " + " -t [-c ] [-v] [-V] [-h]"; mLogger.log(text,LogManager.ERROR_MESSAGE_LEVEL); } /** * Generated the long version of the help on the stdout. */ public void printLongVersion() { String text = "\n " + getGVDSVersion() + "\n CPlanner/partitiondax - The tool that is used to partition the dax " + "\n into smaller daxes for use in deferred planning." + "\n " + "\n Usage :partitiondax --dax [--dir ] " + "\n --type [--conf ] [--verbose] [--version] " + "\n [--help]" + "\n" + "\n Mandatory Options " + "\n -d|--dax fn the dax file that has to be partitioned into smaller daxes." + "\n Other Options " + "\n -t|--type type the partitioning technique that is to be used for partitioning." + "\n -D|--dir dir the directory in which the partitioned daxes reside (defaults to " + "\n current directory)"+ "\n -c|--conf path to property file" + "\n -v|--verbose increases the verbosity of messages about what is going on." + "\n -V|--version displays the version number of the Griphyn Virtual Data System." + "\n -h|--help generates this help"; System.out.println(text); } /** * Tt generates the LongOpt which contain the valid options that the command * will accept. * * @return array of LongOpt objects , corresponding to the valid * options */ public LongOpt[] generateValidOptions() { LongOpt[] longopts = new LongOpt[7]; longopts[0] = new LongOpt("dir",LongOpt.REQUIRED_ARGUMENT,null,'D'); longopts[1] = new LongOpt("dax",LongOpt.REQUIRED_ARGUMENT,null,'d'); longopts[2] = new LongOpt("type",LongOpt.REQUIRED_ARGUMENT,null,'t'); longopts[3] = new LongOpt("verbose",LongOpt.NO_ARGUMENT,null,'v'); longopts[4] = new LongOpt("version",LongOpt.NO_ARGUMENT,null,'V'); longopts[5] = new LongOpt("help",LongOpt.NO_ARGUMENT,null,'h'); longopts[6] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } /** * Loads all the properties that are needed by this class. */ public void loadProperties(){ } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/RankDAX.java0000644000175000017500000003712611757531137024654 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.client; import edu.isi.pegasus.planner.catalog.site.SiteFactory; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.ranking.GetDAX; import edu.isi.pegasus.planner.ranking.Rank; import edu.isi.pegasus.planner.ranking.Ranking; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationFactory; import gnu.getopt.LongOpt; import gnu.getopt.Getopt; import java.util.StringTokenizer; import java.util.Collection; import java.util.List; import java.util.LinkedList; import java.util.Iterator; import java.io.File; import java.io.FileWriter; import java.io.PrintWriter; import java.io.IOException; import java.util.Date; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.planner.catalog.transformation.Mapper; /** * A client that ranks the DAX'es corresponding to the request id. * * * @author Karan Vahi * @version $Revision: 3541 $ */ public class RankDAX extends Executable { /** * The base directory where the ranked daxes are kept. */ private String mBaseDir; /** * The list of grid sites where the daxes can run. */ private List mSites; /** * The output file that lists the daxes in sorted order. */ private String mOutputFile; /** * The request id to get the daxes. */ private String mRequestID; /** * The bag of objects that Pegasus requires. */ private PegasusBag mBag; /** * The options to be passed ahead to pegasus plan. */ private PlannerOptions mPlannerOptions; /** * The top n workflows to execute and put in the rankings file */ private int mTopNum; /** * The default constructor. */ public RankDAX() { super(); } public void initialize(String[] opts){ super.initialize(opts); mBag = new PegasusBag(); mBag.add( PegasusBag.PEGASUS_LOGMANAGER, mLogger ); mBag.add( PegasusBag.PEGASUS_PROPERTIES, mProps ); mTopNum = Integer.MAX_VALUE; } /** * The main program for the CPlanner. * * * @param args the main arguments passed to the planner. */ public static void main(String[] args) { RankDAX me = new RankDAX(); int result = 0; double starttime = new Date().getTime(); double execTime = -1; try{ me.initialize(args); me.executeCommand(); } catch ( FactoryException fe){ me.log( fe.convertException() , LogManager.FATAL_MESSAGE_LEVEL); result = 2; } catch ( RuntimeException rte ) { //catch all runtime exceptions including our own that //are thrown that may have chained causes me.log( convertException(rte, me.mLogger.getLevel()), LogManager.FATAL_MESSAGE_LEVEL ); rte.printStackTrace(); result = 1; } catch ( Exception e ) { //unaccounted for exceptions me.log(e.getMessage(), LogManager.FATAL_MESSAGE_LEVEL ); result = 3; } finally { double endtime = new Date().getTime(); execTime = (endtime - starttime)/1000; } // warn about non zero exit code if ( result != 0 ) { me.log("Non-zero exit-code " + result, LogManager.WARNING_MESSAGE_LEVEL ); } else{ //log the time taken to execute me.log("Time taken to execute is " + execTime + " seconds", LogManager.INFO_MESSAGE_LEVEL); } System.exit( result ); } /** * Parses the command line arguments using GetOpt and sets the class * member variables. * * @param args the arguments passed by the user at command line. * * */ public void parseCommandLineArguments(String[] args){ LongOpt[] longOptions = generateValidOptions(); Getopt g = new Getopt("rank-dax", args, "vhr:d:s:o:r:f:t:c:", longOptions, false); g.setOpterr(false); int option = 0; int level = 0; while ( (option = g.getopt()) != -1) { //System.out.println("Option tag " + (char)option); switch (option) { case 'd': //base directory mBaseDir = g.getOptarg(); break; case 's': //comma separated list of sites mSites = this.generateList( g.getOptarg() ); break; case 'o': //the output file where the ranked list is kept mOutputFile = g.getOptarg(); break; case 'r': //the request id mRequestID = g.getOptarg(); break; case 'v': //sets the verbosity level level++; break; case 'f'://the options to be passed to pegasus-plan mPlannerOptions = new CPlanner().parseCommandLineArguments( g.getOptarg().split( "\\s" ) ); mBag.add( PegasusBag.PLANNER_OPTIONS , mPlannerOptions ); break; case 't'://rank top t mTopNum = new Integer( g.getOptarg() ).intValue(); break; case 'c': // conf //do nothing break; case 'h': printShortHelp(); System.exit( 0 ); break; default: //same as help printShortHelp(); for( int i =0 ; i < args.length ; i++ ) System.out.println( args[i] ); throw new RuntimeException("Incorrect option or option usage " + (char)g.getOptopt()); } } if( level > 0 ){ mLogger.setLevel( level ); }else{ mLogger.setLevel(LogManager.WARNING_MESSAGE_LEVEL); } } /** * Executes the command on the basis of the options specified. * * @param args the command line options. */ public void executeCommand() { parseCommandLineArguments(getCommandLineOptions()); if( mRequestID == null ){ mLogger.log( "\nNeed to specify the request id.", LogManager.INFO_MESSAGE_LEVEL ); this.printShortVersion(); return; } if( mPlannerOptions == null ){ mPlannerOptions = new PlannerOptions(); } //set the request id in the properties mProps.setProperty( "pegasus.wings.request.id", mRequestID ); //override the sites if any are set in the forward options mPlannerOptions.setExecutionSites( mSites ); //load the site catalog using the factory // PoolInfoProvider sCatalog = SiteFactory.loadInstance( mProps, false ); // mBag.add( PegasusBag.SITE_CATALOG, sCatalog ); SiteStore s = SiteFactory.loadSiteStore( mSites, mBag ); mBag.add( PegasusBag.SITE_STORE, s ); //load the transformation catalog using the factory TransformationCatalog tCatalog = TransformationFactory.loadInstance( mBag ); mBag.add( PegasusBag.TRANSFORMATION_CATALOG, tCatalog ); //initialize the transformation mapper mBag.add( PegasusBag.TRANSFORMATION_MAPPER, Mapper.loadTCMapper( mProps.getTCMapperMode(), mBag ) ); //write out the daxes to the directory File dir = new File( mBaseDir, mRequestID ); Collection daxes; GetDAX getDax = new GetDAX(); try{ //log( "Writing daxes to directory " + dir, // LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_RANKING_RETRIEVE_DAX, LoggingKeys.REQUEST_ID, mRequestID ); getDax.connect( mProps ); daxes = getDax.get( mRequestID, dir.getAbsolutePath() ); mLogger.log( "Number of DAX'es retrieved " + daxes.size(), LogManager.CONSOLE_MESSAGE_LEVEL ); mLogger.logEventCompletion( ); mLogger.log( "Writing daxes to directory " + dir, LogManager.CONSOLE_MESSAGE_LEVEL); } finally{ getDax.close(); getDax = null; } //now rank the daxes Rank rank = new Rank(); rank.initialize( mBag, (List)mSites, mRequestID ); Collection rankings = rank.rank( daxes ); //write out the rankings file File f = null; if( mOutputFile == null ){ mLogger.log( "Output file not specified. Writing out ranked file in dir " + dir, LogManager.CONSOLE_MESSAGE_LEVEL ); f = new File( dir, "ranked_daxes.txt" ); } else{ f = new File( mOutputFile ); } log( "Writing out the ranking file " + f, LogManager.CONSOLE_MESSAGE_LEVEL ); try{ writeOutRankings( f, rankings ); }catch( IOException ioe ){ throw new RuntimeException( "Unable to write to file " + f , ioe ); } } /** * Writes out the ranking to the file. If the file is null then it is written * out to a file named ranked_daxes.txt in the directory where the daxes * reside * * @param file String * @param rankings Collection * * @throws IOException */ protected void writeOutRankings( File file , Collection rankings ) throws IOException{ //do a sanity check on the directory for the file specified File dir = file.getParentFile(); if( dir == null ){ dir = new File( "." ); mLogger.log( "Writing out ranking file to current workdir " + dir.getAbsolutePath(), LogManager.DEBUG_MESSAGE_LEVEL ); } sanityCheck( dir ); //write out the ranked daxes. PrintWriter pw = new PrintWriter( new FileWriter( file ) ); //write out header pw.println( "#\t DAX\tRANK\tRUNTIME " ); int i = 1; Iterator it = rankings.iterator(); while( it.hasNext() && i <= mTopNum ) { pw.println( it.next() ); i++; //pw.println( mPlannerOptions.toOptions() ); } //write out all the remaining as comments while( it.hasNext() ){ pw.println( "#" + it.next() ); } pw.close(); } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheck( File dir ) throws IOException{ if ( dir.exists() ) { // location exists if ( dir.isDirectory() ) { // ok, isa directory if ( dir.canWrite() ) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException( "Cannot write to existing directory " + dir.getPath() ); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory." ); } } else { // does not exist, try to make it if ( ! dir.mkdirs() ) { throw new IOException( "Unable to create directory " + dir.getPath() ); } } } /** * Loads all the properties that would be needed by the Toolkit classes. * Empty implementation. */ public void loadProperties(){ } /** * This method is used to print the long version of the command. */ public void printLongVersion(){ printShortHelp(); } /** * This is used to print the short version of the command. */ public void printShortVersion(){ printShortHelp(); } /** * This is used to print the short version of the command. */ public void printShortHelp(){ StringBuffer text = new StringBuffer(); text.append( "\n" ).append( " $Id: RankDAX.java 3541 2011-04-21 02:17:34Z prasanth $ "). append( "\n" ).append( getGVDSVersion() ). append( "\n" ).append( "Usage : rank-dax [-Dprop [..]] -r -f -d " ). append( "\n" ).append( " [-s site[,site[..]]] [-o ] [-t execute top t] [-c ] [-v] [-h]" ); System.out.println( text.toString() ); } /** * It generates the LongOpt which contain the valid options that the command * will accept. * * @return array of LongOpt objects , corresponding to the valid * options */ public LongOpt[] generateValidOptions(){ LongOpt[] longopts = new LongOpt[9]; longopts[0] = new LongOpt( "dir", LongOpt.REQUIRED_ARGUMENT, null, 'd' ); longopts[1] = new LongOpt( "sites", LongOpt.REQUIRED_ARGUMENT, null, 's' ); longopts[2] = new LongOpt( "output", LongOpt.REQUIRED_ARGUMENT, null, 'o' ); longopts[3] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 'v' ); longopts[4] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[5] = new LongOpt( "request-id", LongOpt.OPTIONAL_ARGUMENT, null, 'r' ); longopts[6] = new LongOpt( "forward", LongOpt.REQUIRED_ARGUMENT, null, 'f' ); longopts[7] = new LongOpt( "top", LongOpt.REQUIRED_ARGUMENT, null, 't' ); longopts[8] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); return longopts; } /** * Generates a List by parsing a comma separated string. * * @param str the comma separted String. * * @return List containing the parsed values, in case of a null string * an empty List is returned. */ private List generateList( String str ){ List l = new LinkedList(); //check for null if( str == null ) { return l; } for ( StringTokenizer st = new StringTokenizer(str,","); st.hasMoreElements(); ){ l.add( st.nextToken().trim() ); } return l; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/PasoaProvenanceClient.java0000644000175000017500000016352511757531137027652 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import java.io.BufferedReader; import java.io.StringReader; import java.io.FileReader; import java.net.URL; import java.util.LinkedList; import java.io.File; import java.util.List; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilder; import org.pasoa.common.Constants; import org.pasoa.pstructure.Record; import org.pasoa.pstructure.ActorStatePAssertion; import org.pasoa.pstructure.GlobalPAssertionKey; import org.pasoa.pstructure.InteractionKey; import org.pasoa.pstructure.InteractionPAssertion; import org.pasoa.pstructure.ObjectID; import org.pasoa.pstructure.RelationshipPAssertion; import org.pasoa.storeclient.ClientLib; import org.pasoa.util.httpsoap.WSAddressEndpoint; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.NodeList; import org.w3c.dom.Node; import org.xml.sax.InputSource; import java.io.StringWriter; import java.io.IOException; import java.io.Reader; import edu.isi.pegasus.planner.transfer.Refiner; import org.xml.sax.InputSource; public class PasoaProvenanceClient { /** change this to connect to the preserv server **/ public static String URL = "http://localhost:8080/preserv-1.0"; public static String XMLHEADER =""; public static String CONDOR= "www.cs.wisc.edu/condor"; public long filecount=0; public static String documentationStyle = "http://www.pasoa.org/schemas/pegasus"; public ClientLib clientLib = null; public URL provenanceStore = null; public String jobname=null; public String wf_label=null; public String wf_planned_time=null; public String transformation=null; public Element docelement=null; public Element daxelement=null; // public List input=null; // public List output=null; public List parents=null; public List children=null; public Map input = null; public Map output = null; public PasoaProvenanceClient(String url){ clientLib=new ClientLib(); try{ provenanceStore = new URL(url + "/record"); }catch(Exception e){ System.err.println("Bad Bad Bad url"); } } public PasoaProvenanceClient(){ clientLib=new ClientLib(); try{ provenanceStore = new URL(URL + "/record"); }catch(Exception e){ System.err.println("Bad Bad Bad url"); } } public static void main(String[] args) throws Exception { PasoaProvenanceClient cle=null; String jobfile=null; String daxfile=null; String dagfile=null; String url=null; if(args.length<3){ System.err.println("Usage: Client daxfile dagfile outfile"); // System.err.println("Usage: Client daxfile dagfile preservurl"); System.exit(1); }else if(args.length==3){ jobfile=args[2]; daxfile=args[0]; dagfile=args[1]; cle = new PasoaProvenanceClient(); } /*}else { jobfile=args[0]; daxfile=args[0]; dagfile=args[2]; url=args[3]; cle = new PasoaProvenanceClient(url); }*/ try{ cle.jobname=(new File(jobfile)).getName().split("\\.out")[0]; System.out.println("Processing job --- "+ cle.jobname); cle.parseKickstartRecord(jobfile); cle.parseDag(dagfile); List newlist=new ArrayList(); if(cle.parents!=null && !cle.parents.isEmpty()){ System.out.println("Adding parents "+ cle.parents); newlist.addAll(cle.parents); } if(cle.children!=null && !cle.children.isEmpty()){ System.out.println("Adding children "+ cle.children); newlist.addAll(cle.children); } System.out.println("Adding job "+ cle.jobname); newlist.add(cle.jobname); System.out.println("Job List is "+ newlist); cle.parseFiles(newlist); // cle.parseDaxFile(daxfile,newlist); // cle.parseInput(); System.out.println("Inputs == "+cle.input); System.out.println("Outputs == "+cle.output); if(cle.jobname.startsWith( Refiner.STAGE_IN_PREFIX )|| (cle.jobname.startsWith(Refiner.STAGE_OUT_PREFIX))){ InteractionKey ik = cle.transferInvocationInteraction(); cle.transferCompletionInteraction(ik); } else if(cle.jobname.startsWith("new_rc_register")){ InteractionKey ik = cle.registerInvocationInteraction(); cle.registerCompletionInteraction(ik); } else if(cle.jobname.startsWith("create_dir")) { //write this handler } else if(cle.jobname.startsWith("clean_up")){ //write this handler }else if(cle.jobname.startsWith("pegasus_concat")){ //write this handler }else{ InteractionKey ik = cle.jobInvocationInteraction(); cle.jobCompletionInteraction(ik); } }catch (Exception e){ e.printStackTrace(); } } private void parseDag(String file) throws Exception{ BufferedReader bf = new BufferedReader(new FileReader(file)); String line = null; while((line=bf.readLine())!=null){ String[] list = null; if (line.startsWith("PARENT")){ list = line.split(" "); } if(list!=null){ if(list[1].equalsIgnoreCase(jobname)){ if(children==null){ children=new ArrayList(); } children.add(list[3]); } if(list[3].equalsIgnoreCase(jobname)){ if(parents==null){ parents=new ArrayList(); } parents.add(list[1]); } } } bf.close(); } private void parseKickstartRecord(String file) throws Exception{ DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); List records=extractToMemory(new File(file)); if(records!=null){ for (Iterator i=records.iterator();i.hasNext();){ Document msgDoc = db.parse(new InputSource(new StringReader((String)i.next()))); docelement = msgDoc.getDocumentElement(); transformation = docelement.getAttribute("transformation"); wf_label=docelement.getAttribute("wf-label"); wf_planned_time=docelement.getAttribute("wf-stamp"); } } } public List extractToMemory( java.io.File input ) throws Exception { List result = new ArrayList(); StringWriter out = null; // open the files int p1, p2, state = 0; try { BufferedReader in = new BufferedReader( new FileReader(input) ); out = new StringWriter(4096); String line = null; while ( (line = in.readLine()) != null ) { if ( (state & 1) == 0 ) { // try to copy the XML line in any case if ( (p1 = line.indexOf( " -1 ) if ( (p2 = line.indexOf( "?>", p1 )) > -1 ) { // out.write( line, p1, p2+2 ); System.out.println( "state=" + state + ", seen " ); } // start state with the correct root element if ( (p1 = line.indexOf( " -1 ) { if ( p1 > 0 ) line = line.substring( p1 ); System.out.println( "state=" + state + ", seen " ); out.write(XMLHEADER); ++state; } } if ( (state & 1) == 1 ) { out.write( line ); if ( (p1 = line.indexOf("")) > -1 ) { System.out.println( "state=" + state + ", seen " ); ++state; out.flush(); out.close(); result.add( out.toString() ); out = new StringWriter(4096); } } } in.close(); out.close(); } catch ( IOException ioe ) { throw new Exception( "While copying " + input.getPath() + " into temp. file: " + ioe.getMessage() ); } // some sanity checks if ( state == 0 ) throw new Exception( "File " + input.getPath() + " does not contain invocation records," + " assuming failure"); if ( (state & 1) == 1 ) throw new Exception( "File " + input.getPath() + " contains an incomplete invocation record," + " assuming failure" ); // done return result; } private void parseFiles(List jobs)throws Exception{ File infile = null; File outfile = null; List ilist = null; List temp = new ArrayList(jobs); for (Iterator i = temp.iterator(); i.hasNext(); ) { String job = (String) i.next(); if (job.startsWith( Refiner.STAGE_IN_PREFIX )) { //this is for stagein jobs outfile = new File(job + ".out.lof"); if (outfile.exists() && outfile.canRead() && outfile.length() != 0) { try { BufferedReader in = new BufferedReader(new FileReader(outfile)); String str; while ( (str = in.readLine()) != null) { if (output == null) { output = new HashMap(); } if (!output.containsKey(job)) { output.put(job, new ArrayList()); } ilist = (List) output.get(job); ilist.add(str); } in.close(); } catch (IOException e) { } } }else if (job.startsWith( Refiner.STAGE_OUT_PREFIX )) { //this is for stageout/inter tx jobs outfile = new File(job + ".out.lof"); if (outfile.exists() && outfile.canRead() && outfile.length() != 0) { try { BufferedReader in = new BufferedReader(new FileReader(outfile)); String str; while ( (str = in.readLine()) != null) { if (input == null) { input = new HashMap(); } if (!input.containsKey(job)) { input.put(job, new ArrayList()); } ilist = (List) input.get(job); ilist.add(str); } in.close(); } catch (IOException e) { } } }else if(job.startsWith( Refiner.INTER_POOL_PREFIX )){ outfile = new File(job + ".out.lof"); if (outfile.exists() && outfile.canRead() && outfile.length() != 0) { try { BufferedReader in = new BufferedReader(new FileReader(outfile)); String str; while ( (str = in.readLine()) != null) { if (output == null) { output = new HashMap(); } if (!output.containsKey(job)) { output.put(job, new ArrayList()); } ilist = (List) output.get(job); ilist.add(str); if (input == null) { input = new HashMap(); } if (!input.containsKey(job)) { input.put(job, new ArrayList()); } ilist = (List) input.get(job); ilist.add(str); } in.close(); } catch (IOException e) { } } } else if(job.startsWith("new_rc_register")){ BufferedReader bf =new BufferedReader(new FileReader(new File(job+".in"))); String line = null; while((line=bf.readLine())!=null){ String lfn=null; lfn= line.split(" ")[0]; if(input==null){ input=new HashMap(); } if(!input.containsKey(job)){ input.put(job, new ArrayList()); } ilist=(List)input.get(job); ilist.add(lfn); } bf.close(); }else if (job.startsWith("cln_")) { //this is for cleanup jobs infile = new File(job + ".in.lof"); if (infile.exists() && infile.canRead() && infile.length() != 0) { try { BufferedReader in = new BufferedReader(new FileReader(infile)); String str; while ( (str = in.readLine()) != null) { if (input == null) { input = new HashMap(); } if (!input.containsKey(job)) { input.put(job, new ArrayList()); } ilist = (List) input.get(job); ilist.add(str); } in.close(); } catch (IOException e) { } } } else if (!job.endsWith("_cdir")) { //this is a regular job outfile = new File(job + ".out.lof"); if (outfile.exists() && outfile.canRead() && outfile.length() != 0) { try { BufferedReader in = new BufferedReader(new FileReader(outfile)); String str; while ( (str = in.readLine()) != null) { if (output == null) { output = new HashMap(); } if (!output.containsKey(job)) { output.put(job, new ArrayList()); } ilist = (List) output.get(job); ilist.add(str); } in.close(); } catch (IOException e) { } } infile = new File(job + ".in.lof"); if (infile.exists() && infile.canRead() && infile.length() != 0) { try { BufferedReader in = new BufferedReader(new FileReader(infile)); String str; while ( (str = in.readLine()) != null) { if (input == null) { input = new HashMap(); } if (!input.containsKey(job)) { input.put(job, new ArrayList()); } ilist = (List) input.get(job); ilist.add(str); } in.close(); } catch (IOException e) { } } } } } private void parseDaxFile(String file, List jobs)throws Exception { DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document msgDoc = db.parse(new File(file)); NodeList nlist = msgDoc.getElementsByTagName("job"); List temp = new ArrayList(jobs); input = new HashMap(); output = new HashMap(); for (int i =0;i and obtaining the source urls BufferedReader bf =new BufferedReader(new FileReader(new File(jobname+".in"))); String line = null; StringBuffer message = new StringBuffer(""); while((bf.readLine())!=null){ line=bf.readLine(); filecount++; if(!jobname.startsWith(Refiner.STAGE_OUT_PREFIX)){ message.append(""+line+""); } else { String lfn= line.split("run\\d{4}/")[1]; message.append(""+line+""); } bf.readLine(); bf.readLine(); } bf.close(); message.append(""); // Convert it into a DOM Element DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document msgDoc = db.parse(new InputSource(new StringReader(message.toString()))); Element messageBody = msgDoc.getDocumentElement(); InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody); return ipa; } private InteractionPAssertion createTransferCompletionInteractionPAssertion() throws Exception { // Create an interaction p-assertion // First we make a local p-assertion id and then // we make a documentationStyle. In this case we // call it verbatium. // // In most cases, you'll be grabing the messageBody from the message // being sent between parties. So a SOAP message, or a CORBA message. // With this example we'll just use a hard coded message body. String localPAssertionId = "1"; // this message content will be obtained by parsing the transfer input files and obtaining the destination urls BufferedReader bf =new BufferedReader(new FileReader(new File(jobname+".in"))); String line = null; StringBuffer message = new StringBuffer(""); while((line=bf.readLine())!=null){ bf.readLine(); bf.readLine(); line = bf.readLine(); filecount++; if(jobname.startsWith(Refiner.STAGE_OUT_PREFIX)){ message.append(""+line+""); }else { String lfn= line.split("run\\d{4}/")[1]; message.append(""+line+""); } } bf.close(); message.append(""); // Convert it into a DOM Element DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document msgDoc = db.parse(new InputSource(new StringReader(message.toString()))); Element messageBody = msgDoc.getDocumentElement(); InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody); return ipa; } private ActorStatePAssertion createActorStatePAssertion(long count) throws Exception { // Create an actor state p-assertion // Just like the interaction p-assertion this p-assertion // needs a local p-assertion id. Remember, all the p-assertions // in one view need a different id. Therefore, we give this assertion // the id of "2" instead of "1". // // Again you'll typically be getting some state from the actor, // translating it to XML to create the actor state p-assertion // In this example, we just use a hard coded string. String localPAssertionId = "aspa-"+count; ActorStatePAssertion asa = new ActorStatePAssertion(localPAssertionId, docelement); return asa; } private RelationshipPAssertion createTransferRelationshipPAssertion(InteractionKey invocationik, long index) throws Exception { // Create a relationship p-assertion // Again a different local p-assertion id // // We'll create a "usage" relationship between the interaction p-assertion // and the actor state p-assertion. This relationship says that // message represented by interaction p-assertion "1" used the actor state // represented by actor state p-assertion "2". // There are no data accessors or links so we pass null. // Create the information to identify the subject of the relationship // Remember, parameter names must be identified and they need to be URIs String localPAssertionId = "rpa"+index; String subjectLocalPAssertionId = "1"; String subjectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/outputfile"; // Create the information to identify the object of the relationship String objectLocalPAssertionId = "1"; // points to the interaction p-assertion of the invocation interaction receiver GlobalPAssertionKey gpak = new GlobalPAssertionKey(invocationik, "receiver", objectLocalPAssertionId); String objectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/inputfile"; Element dataAccessor= createTransferDataAccessor(index); ObjectID objId = new ObjectID(gpak, objectParameterName, dataAccessor, null); // We add the objId to the list of objects. We only have one objectId here // but when making another type of relationship more than one objectId may // be required LinkedList objectIds = new LinkedList(); objectIds.add(objId); // Create the "use" relation. Again this should be a URI String relation = "http://pegasus.isi.edu/pasoa/relation/transfer/copy-of"; dataAccessor= createTransferDataAccessor( index); // Finally, create the relationship object and return it. RelationshipPAssertion rel = new RelationshipPAssertion(localPAssertionId, subjectLocalPAssertionId, dataAccessor, subjectParameterName, relation, objectIds); return rel; } //will have to do for handling merged jobs correctly. private InteractionPAssertion createMergedJobInvocationInteractionPAssertion() throws Exception{ String localPAssertionId = "1"; // this message content will be obtained by parsing the transfer input files and obtaining the source urls StringBuffer message = new StringBuffer(""); if(input!=null){ if(input.containsKey(jobname)){ List inputs = (List) input.get(jobname); for (Iterator i = inputs.iterator(); i.hasNext(); ) { message.append("" + (String) i.next() + ""); } } } message.append(""); // Convert it into a DOM Element DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document msgDoc = db.parse(new InputSource(new StringReader(message.toString()))); Element messageBody = msgDoc.getDocumentElement(); InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody); return ipa; } private InteractionPAssertion createJobInvocationInteractionPAssertion() throws Exception{ String localPAssertionId = "1"; // this message content will be obtained by parsing the transfer input files and obtaining the source urls StringBuffer message = new StringBuffer(""); if(input!=null){ if(input.containsKey(jobname)){ List inputs = (List) input.get(jobname); for (Iterator i = inputs.iterator(); i.hasNext(); ) { message.append("" + (String) i.next() + ""); } } } message.append(""); // Convert it into a DOM Element DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document msgDoc = db.parse(new InputSource(new StringReader(message.toString()))); Element messageBody = msgDoc.getDocumentElement(); InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody); return ipa; } private InteractionPAssertion createJobCompletionInteractionPAssertion() throws Exception{ String localPAssertionId = "1"; // this message content will be obtained by parsing the transfer input files and obtaining the source urls StringBuffer message = new StringBuffer(""); if(output!=null){ if(output.containsKey(jobname)){ List outputs = (List) output.get(jobname); for (Iterator i = outputs.iterator(); i.hasNext(); ) { message.append("" + (String) i.next() + ""); } } } message.append(""); // Convert it into a DOM Element DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document msgDoc = db.parse(new InputSource(new StringReader(message.toString()))); Element messageBody = msgDoc.getDocumentElement(); InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody); return ipa; } private RelationshipPAssertion createJobRelationshipPAssertion(InteractionKey invocationik, String filename, long index) throws Exception { String localPAssertionId = "rpa"+index; String subjectLocalPAssertionId = "1"; String subjectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/outputfile"; // Create the information to identify the object of the relationship String objectLocalPAssertionId = "1"; // points to the interaction p-assertion of the invocation interaction receiver GlobalPAssertionKey gpak = new GlobalPAssertionKey(invocationik, "receiver", objectLocalPAssertionId); String objectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/inputfile"; LinkedList objectIds = new LinkedList(); for(Iterator i=((List)input.get(jobname)).iterator();i.hasNext();){ Element dataAccessor= createLFNDataAccessor((String)i.next()); // We add the objId to the list of objects. We only have one objectId here // but when making another type of relationship more than one objectId may // be required objectIds.add(new ObjectID(gpak, objectParameterName, dataAccessor, null)); } // Create the "use" relation. Again this should be a URI String relation = "http://pegasus.isi.edu/pasoa/relation/transformation/product-of"; Element dataAccessor= createLFNDataAccessor(filename); // Finally, create the relationship object and return it. RelationshipPAssertion rel = new RelationshipPAssertion(localPAssertionId, subjectLocalPAssertionId, dataAccessor, subjectParameterName, relation, objectIds); return rel; } private RelationshipPAssertion createJobToTransferRelationshipPAssertion(String filename,String parentjob,int index) throws Exception{ String localPAssertionId = "rpa"+index; String subjectLocalPAssertionId = "1"; String subjectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/inputfile"; // Create the information to identify the object of the relationship String objectLocalPAssertionId = "1"; // points to the interaction p-assertion of the invocation interaction receiver // interaction. WSAddressEndpoint source = new WSAddressEndpoint(parentjob); WSAddressEndpoint sink = new WSAddressEndpoint(CONDOR); String interactionId = wf_label+wf_planned_time+parentjob; InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId); GlobalPAssertionKey gpak = new GlobalPAssertionKey(ik, "receiver", objectLocalPAssertionId); String objectParameterName = "http://pegasus.isi.edu/schema/pasoa/type/outputfile"; Element dataAccessor= createLFNDataAccessor(filename); ObjectID objId = new ObjectID(gpak, objectParameterName, dataAccessor, null); // We add the objId to the list of objects. We only have one objectId here // but when making another type of relationship more than one objectId may // be required LinkedList objectIds = new LinkedList(); objectIds.add(objId); // Create the "use" relation. Again this should be a URI String relation = "http://pegasus.isi.edu/pasoa/relation/transfer/same-as"; // dataAccessor=createNameValueDataAccessor(filename); // Finally, create the relationship object and return it. RelationshipPAssertion rel = new RelationshipPAssertion(localPAssertionId, subjectLocalPAssertionId, dataAccessor, subjectParameterName, relation, objectIds); return rel; } private Element createTransferDataAccessor(long index){ Map namespaces = new HashMap(); namespaces.put("tr", "http://pegasus.isi.edu/schema/pasoa/content/transfer"); return new org.pasoa.accessors.snxpath.SingleNodeXPathManager().createAccessor("/tr:transfer[0]/tr:filename[" + index + "]", namespaces); } private Element createLFNDataAccessor(String value){ return new org.pasoa.accessors.lfn.LFNAccessorManager().createLFNAccessor(value); } private InteractionPAssertion createRegisterInvocationInteractionPAssertion() throws Exception { // Create an interaction p-assertion // First we make a local p-assertion id and then // we make a documentationStyle. In this case we // call it verbatium. // // In most cases, you'll be grabing the messageBody from the message // being sent between parties. So a SOAP message, or a CORBA message. // With this example we'll just use a hard coded message body. String localPAssertionId = "1"; BufferedReader bf =new BufferedReader(new FileReader(new File(jobname+".in"))); String line = null; StringBuffer message = new StringBuffer(""); while((line=bf.readLine())!=null){ filecount++; String[] lfn= line.split(" "); message.append(""+lfn[1]+""); } message.append(""); // Convert it into a DOM Element DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); Document msgDoc = db.parse(new InputSource(new StringReader(message.toString()))); Element messageBody = msgDoc.getDocumentElement(); InteractionPAssertion ipa = new InteractionPAssertion(localPAssertionId, documentationStyle, messageBody); return ipa; } public InteractionKey registerInvocationInteraction() throws Exception{ System.out.println("We now create the transfer Invocation interaction key"); // Create addresses for the source and sink of the // interaction. WSAddressEndpoint source = new WSAddressEndpoint(CONDOR); WSAddressEndpoint sink = new WSAddressEndpoint(jobname); String interactionId = wf_label+wf_planned_time+jobname; InteractionKey ik = new InteractionKey(source.getElement(), sink.getElement(), interactionId); System.out.println("Building p-assertions..."); InteractionPAssertion ipa = createRegisterInvocationInteractionPAssertion(); List records=new ArrayList(); //setting sender type System.out.println("We are the sender/client view of the interaction"); String vk = Constants.SENDER_VIEW_TYPE; System.out.println(); //set asserter to CONDOR WSAddressEndpoint asserter = new WSAddressEndpoint(CONDOR); System.out.println("Creating Record objects for each p-assertion"); Record recIpa = new Record(ipa, ik, vk, asserter.getElement()); records.add(recIpa); Record recRpa = null; String tempparent=null; if(parents !=null || !parents.isEmpty()){ tempparent=(String)parents.get(0); } for(int i=0; i 0 ) wf_flag |= 1; else wf_label = null; } break; case 'm': if ( (wf_mtime = Currently.parse( opts.getOptarg() )) != null ) wf_flag |= 2; break; case 'n': noDBase = true; break; case 'N': ignoreDBFail = true; break; case 'v': verbose = me.increaseVerbosity(); break; case '?': System.out.println( "Invalid option '" + (char) opts.getOptopt() + "'" ); default: case 'h': me.showUsage(); return; } } // print usage information String arg0 = null; if ( opts.getOptind() >= args.length ) { System.out.println( "missing necessary file argument" ); me.showUsage(); return ; } // check for -m and -l if ( wf_flag != 0 && wf_flag != 3 ) { me.m_logger.log( "default", 0, "Warning: Options -m and -l should be used together!" ); } if ( wf_label != null && wf_label.length() > 32 ) { wf_label = wf_label.substring( 0, 32 ); me.m_logger.log( "default", 0, "Warning: Truncating workflow label to \"" + wf_label + "\"" ); } ChimeraProperties props = ChimeraProperties.instance(); DatabaseSchema dbschema = null; String ptcSchemaName = props.getPTCSchemaName(); if ( ptcSchemaName == null ) noDBase = true; if ( ! noDBase ) { try { Connect connect = new Connect(); dbschema = connect.connectDatabase(ptcSchemaName); } catch ( Exception e ) { if ( ignoreDBFail ) { // if dbase errors are not fatal, just record the fact String cls = e.getClass().getName(); String msg = e.getMessage(); if ( msg == null ) { Throwable t = e.getCause(); if ( t != null ) { cls = t.getClass().getName(); msg = t.getMessage(); } } me.m_logger.log( "default", 0, "While connecting to dbase: " + cls + ": " + msg + ", ignoring" ); dbschema = null; } else { // re-throw, if dbase errors are fatal (default) throw e; } } // check for invocation record support if ( dbschema == null || ! (dbschema instanceof PTC) ) { me.m_logger.log( "default", 0, "Your database cannot store invocation records" + ", assuming -n mode" ); noDBase = true; } } // instantiate parser pks = new ParseKickstart( dbschema, emptyFail ); pks.setNoDBase( noDBase ); pks.setIgnoreDBFail( ignoreDBFail ); pks.setWorkflowLabel( wf_label ); // null ok pks.setWorkflowTimestamp( wf_mtime ); // null ok dbschema = null; // decrease reference counter // for all files specified for ( int i=opts.getOptind(); i < args.length; ++i ) { List l = pks.parseFile( args[i] ); // determine result code if ( failOver ) { for ( Iterator j=l.iterator(); j.hasNext(); ) { int status = ((Integer) j.next()).intValue(); me.m_logger.log( "app", 1, "exit status = " + status ); if ( status != 0 ) result = ( result==0 ? status : 8 ); } } if ( result != 0 && earlyFail ) break; } // for } catch ( FriendlyNudge fn ) { me.m_logger.log( "default", 0, fn.getMessage() ); if ( failOver ) result = fn.getResult(); } catch ( Exception e ) { String cls = e.getClass().getName(); String msg = e.getMessage(); if ( msg == null ) { // another try Throwable t = e.getCause(); if ( t != null ) { msg = t.getMessage(); cls = t.getClass().getName(); } } if ( verbose > 0 ) e.printStackTrace(); System.err.println(cls + ": " + msg); result = 7; } finally { try { if ( pks != null ) pks.close(); } catch ( Exception e ) { me.m_logger.log( "default", 0, "ERROR: " + e.getMessage() ); } } // Java will return with 0 unless exit is used. Unfortunately, using // System.exit sometimes has some unwanted side-effects on d'tors, // thus avoid using it unless strictly necessary. // me.showThreads(); if ( result != 0 ) System.exit(result); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/client/CPlanner.java0000644000175000017500000023057411757531137025130 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.client; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.planner.catalog.site.SiteCatalogException; import edu.isi.pegasus.planner.catalog.site.SiteFactory; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.code.CodeGenerator; import edu.isi.pegasus.planner.code.CodeGeneratorFactory; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.DagInfo; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.planner.classes.PlannerMetrics; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.StreamGobbler; import edu.isi.pegasus.common.util.DefaultStreamGobblerCallback; import edu.isi.pegasus.planner.common.RunDirectoryFilenameFilter; import edu.isi.pegasus.planner.refiner.MainEngine; import edu.isi.pegasus.planner.parser.dax.Callback; import edu.isi.pegasus.planner.parser.DAXParserFactory; import edu.isi.pegasus.planner.parser.pdax.PDAXCallbackFactory; import edu.isi.pegasus.planner.parser.PDAXParser; import edu.isi.pegasus.planner.catalog.transformation.TransformationFactory; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.common.PegasusConfiguration; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.parser.Parser; import edu.isi.pegasus.planner.parser.dax.DAXParser; import gnu.getopt.Getopt; import gnu.getopt.LongOpt; import java.io.File; import java.io.IOException; import java.io.FileOutputStream; import java.nio.channels.FileLock; import java.util.Collection; import java.util.List; import java.util.Date; import java.util.Map; import java.util.Iterator; import java.text.NumberFormat; import java.text.DecimalFormat; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashSet; import java.util.LinkedList; import java.util.Set; import java.io.BufferedReader; import java.io.FilenameFilter; import java.io.InputStream; import java.io.InputStreamReader; import java.util.logging.Level; import java.util.logging.Logger; import java.util.regex.Pattern; /** * This is the main program for the Pegasus. It parses the options specified * by the user and calls out to the appropriate components to parse the abstract * plan, concretize it and then write the submit files. * * @author Gaurang Mehta * @author Karan Vahi * @version $Revision: 5255 $ */ public class CPlanner extends Executable{ /** * The default megadag mode that is used for generation of megadags in * deferred planning. */ public static final String DEFAULT_MEGADAG_MODE = "dag"; /** * The basename of the directory that contains the submit files for the * cleanup DAG that for the concrete dag generated for the workflow. */ public static final String CLEANUP_DIR = "cleanup"; /** * The prefix for the NoOP jobs that are created. */ public static final String NOOP_PREFIX = "noop_"; /** * The name of the property key that determines whether pegasus-run * should monitord or not. */ public static final String PEGASUS_MONITORD_LAUNCH_PROPERTY_KEY = "pegasus.monitord" ; /** * The regex used to match against a java property that is set using * -Dpropertyname=value in the argument string */ public static final String JAVA_COMMAND_LINE_PROPERTY_REGEX = "(env|condor|globus|dagman|pegasus)\\..*=.*" ; /** * The final successful message that is to be logged. */ private static final String EMPTY_FINAL_WORKFLOW_MESSAGE = "\n\n\n" + "The executable workflow generated contains only a single NOOP job.\n" + "It seems that the output files are already at the output site. \n"+ "To regenerate the output data from scratch specify --force option.\n" + "\n\n\n"; /** * The message to be logged in case of empty executable workflow. */ private static final String SUCCESS_MESSAGE = "\n\n\n" + "I have concretized your abstract workflow. The workflow has been entered \n" + "into the workflow database with a state of \"planned\". The next step is \n" + "to start or execute your workflow. The invocation required is" + "\n\n\n"; /** * The object containing all the options passed to the Concrete Planner. */ private PlannerOptions mPOptions; /** * The object containing the bag of pegasus objects */ private PegasusBag mBag; /** * The PlannerMetrics object storing the metrics about this planning instance. */ private PlannerMetrics mPMetrics; /** * The number formatter to format the run submit dir entries. */ private NumberFormat mNumFormatter; /** * The user name of the user running Pegasus. */ private String mUser; /** * Default constructor. */ public CPlanner(){ this( null ); } /** * The overload constructor. * * @param logger the logger object to use. can be null. */ public CPlanner( LogManager logger ){ super( logger ); } public void initialize(String [] opts , char confChar){ super.initialize(opts , confChar); mLogMsg = new String(); mVersion = Version.instance().toString(); mNumFormatter = new DecimalFormat( "0000" ); this.mPOptions = new PlannerOptions(); mPOptions.setSubmitDirectory( ".", null ); mPOptions.setExecutionSites(new java.util.HashSet()); mPOptions.setOutputSite(""); mUser = mProps.getProperty( "user.name" ) ; if ( mUser == null ){ mUser = "user"; } mPMetrics = new PlannerMetrics(); mPMetrics.setUser( mUser ); mBag = new PegasusBag(); } /** * The main program for the CPlanner. * * * @param args the main arguments passed to the planner. */ public static void main(String[] args) { CPlanner cPlanner = new CPlanner(); int result = 0; double starttime = new Date().getTime(); double execTime = -1; try{ cPlanner.initialize(args , '6'); cPlanner.executeCommand(); } catch ( FactoryException fe){ cPlanner.log( fe.convertException() , LogManager.FATAL_MESSAGE_LEVEL); result = 2; } catch ( RuntimeException rte ) { //catch all runtime exceptions including our own that //are thrown that may have chained causes cPlanner.log( convertException(rte, cPlanner.mLogger.getLevel() ), LogManager.FATAL_MESSAGE_LEVEL ); result = 1; } catch ( Exception e ) { //unaccounted for exceptions cPlanner.log( convertException(e, cPlanner.mLogger.getLevel() ), LogManager.FATAL_MESSAGE_LEVEL ); result = 3; } finally { double endtime = new Date().getTime(); execTime = (endtime - starttime)/1000; } // warn about non zero exit code if ( result != 0 ) { cPlanner.log("Non-zero exit-code " + result, LogManager.WARNING_MESSAGE_LEVEL ); } else{ //log the time taken to execute cPlanner.log("Time taken to execute is " + execTime + " seconds", LogManager.CONSOLE_MESSAGE_LEVEL ); } cPlanner.mLogger.logEventCompletion(); System.exit(result); } /** * Loads all the properties that are needed by this class. */ public void loadProperties(){ } /** * Executes the command on the basis of the options specified. * * @param args the command line options. */ public void executeCommand( ) { executeCommand( parseCommandLineArguments( getCommandLineOptions() ) ); } /** * Executes the command on the basis of the options specified. * * @param options the command line options. * * * @return the Collection of File objects for the files written * out. */ public Collection executeCommand( PlannerOptions options ) { String message = new String(); mPOptions = options; mBag.add( PegasusBag.PEGASUS_PROPERTIES, mProps ); mBag.add( PegasusBag.PLANNER_OPTIONS, mPOptions ); mBag.add( PegasusBag.PEGASUS_LOGMANAGER, mLogger ); Collection result = null; //print help if asked for if( mPOptions.getHelp() ) { printLongVersion(); return result; } //set the logging level only if -v was specified if(mPOptions.getLoggingLevel() >= 0){ mLogger.setLevel(mPOptions.getLoggingLevel()); } else{ //set log level to FATAL only mLogger.setLevel( LogManager.FATAL_MESSAGE_LEVEL ); } PegasusConfiguration configurator = new PegasusConfiguration( mLogger ); configurator.loadConfigurationPropertiesAndOptions( mProps , mPOptions ); //do sanity check on dax file String dax = mPOptions.getDAX(); String pdax = mPOptions.getPDAX(); String baseDir = mPOptions.getBaseSubmitDirectory(); if( dax == null && pdax == null ){ mLogger.log( "\nNeed to specify either a dax file ( using --dax ) or a pdax file (using --pdax) to plan", LogManager.CONSOLE_MESSAGE_LEVEL); this.printShortVersion(); return result; } if( mPOptions.getPartitioningType() != null ){ // partition and plan the workflow doPartitionAndPlan( mProps, options ); return result; } //check if sites set by user. If user has not specified any sites then //load all sites from site catalog. Collection eSites = mPOptions.getExecutionSites(); Set toLoad = new HashSet(); mLogger.log( "All sites will be loaded from the site catalog", LogManager.DEBUG_MESSAGE_LEVEL ); toLoad.add( "*" ); if( eSites.isEmpty() ) { mLogger.log("No sites given by user. Will use sites from the site catalog", LogManager.DEBUG_MESSAGE_LEVEL); eSites.add( "*" ); } //load the site catalog and transformation catalog accordingly SiteStore s = loadSiteStore( toLoad ); s.setForPlannerUse( mProps, mPOptions); if( eSites.contains( "*" ) ){ //set execution sites to all sites that are loaded into site store //only if a user passed * option on command line or did not specify eSites.addAll( s.list() ); } mLogger.log( "Execution sites are " + eSites, LogManager.DEBUG_MESSAGE_LEVEL ); //sanity check to make sure that output site is loaded if( mPOptions.getOutputSite() != null ){ String site = mPOptions.getOutputSite(); if( !s.list().contains( site ) ){ StringBuffer error = new StringBuffer( ); error.append( "The output site [" ).append( site ). append( "] not loaded from the site catalog." ); throw new RuntimeException( error.toString() ); } } mBag.add( PegasusBag.SITE_STORE, s ); mBag.add( PegasusBag.TRANSFORMATION_CATALOG, TransformationFactory.loadInstance( mBag ) ); //populate planner metrics mPMetrics.setStartTime( new Date() ); mPMetrics.setVOGroup( mPOptions.getVOGroup() ); mPMetrics.setBaseSubmitDirectory( mPOptions.getSubmitDirectory() ); mPMetrics.setDAX( mPOptions.getDAX() ); //try to get hold of the vds properties //set in the jvm that user specifed at command line mPOptions.setVDSProperties(mProps.getMatchingProperties("pegasus.",false)); List allVDSProps = mProps.getMatchingProperties("pegasus.",false); mLogger.log("Pegasus Properties set by the user",LogManager.CONFIG_MESSAGE_LEVEL ); for(java.util.Iterator it = allVDSProps.iterator(); it.hasNext();){ NameValue nv = (NameValue)it.next(); mLogger.log(nv.toString(),LogManager.CONFIG_MESSAGE_LEVEL); } if(dax == null && pdax != null && !eSites.isEmpty()){ //do the deferreed planning by parsing //the partition graph in the pdax file. result = doDeferredPlanning(); } else if(pdax == null && dax != null && !eSites.isEmpty()){ Parser p = (Parser)DAXParserFactory.loadDAXParser( mBag, "DAX2CDAG", dax ); Callback cb = ((DAXParser)p).getDAXCallback(); p.startParser( dax ); ADag orgDag = (ADag)cb.getConstructedObject(); //generate the flow ids for the classads information orgDag.dagInfo.generateFlowName(); orgDag.dagInfo.setFlowTimestamp( mPOptions.getDateTime( mProps.useExtendedTimeStamp() )); orgDag.dagInfo.setDAXMTime( new File(dax) ); orgDag.dagInfo.generateFlowID(); orgDag.dagInfo.setReleaseVersion(); //set out the root workflow id orgDag.setRootWorkflowUUID( determineRootWorkflowUUID( orgDag, this.mPOptions, this.mProps ) ); //log id hiearchy message //that connects dax with the jobs logIDHierarchyMessage( orgDag , LoggingKeys.DAX_ID, orgDag.getAbstractWorkflowName() ); //write out a the relevant properties to submit directory int state = 0; String relativeSubmitDir; //the submit directory relative to the base specified try{ //determine the relative submit directory relativeSubmitDir = ( mPOptions.getRelativeSubmitDirectory() == null )? //create our own relative dir determineRelativeSubmitDirectory( orgDag, baseDir, mUser, mPOptions.getVOGroup(), mProps.useTimestampForDirectoryStructure() ): mPOptions.getRelativeSubmitDirectory(); mPOptions.setSubmitDirectory( baseDir, relativeSubmitDir ); if( options.partOfDeferredRun() ){ if( !mPOptions.getForceReplan() ){ //if --force-replan is not set handle //rescue dags boolean rescue = handleRescueDAG( orgDag, mPOptions ); if( rescue ){ result = new LinkedList( ); result.add( new File ( mPOptions.getSubmitDirectory(), this.getDAGFilename( orgDag, mPOptions))); return result; } } //replanning case. rescues already accounted for earlier. //the relativeSubmitDir is to be a symlink to relativeSubmitDir.XXX relativeSubmitDir = doBackupAndCreateSymbolicLinkForSubmitDirectory( baseDir , relativeSubmitDir ); //update the submit directory again. mPOptions.setSubmitDirectory( baseDir, relativeSubmitDir ); mLogger.log( "Setting relative submit dir to " + relativeSubmitDir, LogManager.DEBUG_MESSAGE_LEVEL ); } else{ //create the relative submit directory if required sanityCheck( new File( baseDir, relativeSubmitDir) ); } state++; mProps.writeOutProperties( mPOptions.getSubmitDirectory() ); mPMetrics.setRelativeSubmitDirectory( mPOptions.getRelativeSubmitDirectory() ); //also log in the planner metrics where the properties are mPMetrics.setProperties( mProps.getPropertiesInSubmitDirectory() ); } catch( IOException ioe ){ String error = ( state == 0 ) ? "Unable to write to directory" : "Unable to write out properties to directory"; throw new RuntimeException( error + mPOptions.getSubmitDirectory() , ioe ); } //check if a random directory is specified by the user if(mPOptions.generateRandomDirectory() && mPOptions.getRandomDir() == null){ //user has specified the random dir name but wants //to go with default name which is the flow id //for the workflow unless a basename is specified. mPOptions.setRandomDir(getRandomDirectory(orgDag)); } else if( mPOptions.getRandomDir() != null ){ //keep the name that the user passed } else if( mPOptions.getRelativeDirectory() != null ){ //the relative-dir option is used to construct //the remote directory name mPOptions.setRandomDir( mPOptions.getRelativeDirectory() ); } else if( relativeSubmitDir != null ){ //the relative directory constructed on the submit host //is the one required for remote sites mPOptions.setRandomDir( relativeSubmitDir ); //also for time being set the relative dir option to //same as the relative submit directory. //Eventually we should have getRelativeExecDir function also //SLS interfaces use getRelativeDir for time being. mPOptions.setRelativeDirectory( relativeSubmitDir ); } //before starting the refinement process load //the stampede event generator and generate events for the dax generateStampedeEventsForAbstractWorkflow( orgDag , mBag ); //populate the singleton instance for user options //UserOptions opts = UserOptions.getInstance(mPOptions); MainEngine cwmain = new MainEngine( orgDag, mBag ); ADag finalDag = cwmain.runPlanner(); DagInfo ndi = finalDag.dagInfo; //store the workflow metrics from the final dag into //the planner metrics mPMetrics.setWorkflowMetrics( finalDag.getWorkflowMetrics() ); //we only need the script writer for daglite megadag generator mode CodeGenerator codeGenerator = null; codeGenerator = CodeGeneratorFactory. loadInstance( cwmain.getPegasusBag() ); //before generating the codes for the workflow check //for emtpy workflows boolean emptyWorkflow = false; if( finalDag.isEmpty() ){ mLogger.log( "Adding a noop job to the empty workflow ", LogManager.DEBUG_MESSAGE_LEVEL ); finalDag.add( this.createNoOPJob( getNOOPJobName( finalDag ) )); emptyWorkflow = true; } message = "Generating codes for the concrete workflow"; log( message, LogManager.INFO_MESSAGE_LEVEL ); try{ result = codeGenerator.generateCode(finalDag); //connect the DAX and the DAG via the hieararcy message List l = new ArrayList(1); l.add( finalDag.getExecutableWorkflowName() ); mLogger.logEntityHierarchyMessage( LoggingKeys.DAX_ID, finalDag.getAbstractWorkflowName(), LoggingKeys.DAG_ID, l ); //connect the jobs and the DAG via the hierarchy message this.logIDHierarchyMessage( finalDag, LoggingKeys.DAG_ID, finalDag.getExecutableWorkflowName() ); } catch ( Exception e ){ throw new RuntimeException( "Unable to generate code", e ); } finally{ //close the connection to transient replica catalog mBag.getHandleToTransientReplicaCatalog().close(); } mLogger.log( message + " -DONE", LogManager.INFO_MESSAGE_LEVEL ); // CLEANUP WORKFLOW GENERATION IS DISABLED FOR 3.2 // JIRA PM-529 // // //create the submit files for cleanup dag if // //random dir option specified // if(mPOptions.generateRandomDirectory() && !emptyWorkflow ){ // ADag cleanupDAG = cwmain.getCleanupDAG(); // // //the cleanup dags are never part of hierarichal workflows // //for time being // cleanupDAG.setRootWorkflowUUID( cleanupDAG.getWorkflowUUID() ); // // //set the refinement started flag to get right events // //generated for stampede for cleanup workflow // cleanupDAG.setWorkflowRefinementStarted( true ); // // PlannerOptions cleanupOptions = (PlannerOptions)mPOptions.clone(); // // //submit files are generated in a subdirectory // //of the submit directory // message = "Generating code for the cleanup workflow"; // mLogger.log( message, LogManager.INFO_MESSAGE_LEVEL ); // //set the submit directory in the planner options for cleanup wf // cleanupOptions.setSubmitDirectory( cleanupOptions.getSubmitDirectory(), CPlanner.CLEANUP_DIR ); // PegasusBag bag = cwmain.getPegasusBag(); // bag.add( PegasusBag.PLANNER_OPTIONS, cleanupOptions ); // // //create a separate properties file for the cleanup workflow. // //pegasus run should not launch monitord for the cleanup workflow // PegasusProperties cleanupWFProperties = PegasusProperties.nonSingletonInstance(); // cleanupWFProperties.setProperty( PEGASUS_MONITORD_LAUNCH_PROPERTY_KEY, "false" ); // try { // cleanupWFProperties.writeOutProperties(cleanupOptions.getSubmitDirectory()); // } catch (IOException ex) { // throw new RuntimeException( "Unable to write out properties for the cleanup workflow ", ex ); // } // bag.add( PegasusBag.PEGASUS_PROPERTIES , cleanupWFProperties ); // // codeGenerator = CodeGeneratorFactory. // loadInstance( bag ); // // try{ // codeGenerator.generateCode(cleanupDAG); // } // catch ( Exception e ){ // throw new RuntimeException( "Unable to generate code", e ); // } // // mLogger.log(message + " -DONE",LogManager.INFO_MESSAGE_LEVEL); // } // END OF COMMENTED OUT CODE //write out the planner metrics to global log mPMetrics.setEndTime( new Date() ); writeOutMetrics( mPMetrics ); if( mPOptions.submitToScheduler() ){//submit the jobs StringBuffer invocation = new StringBuffer(); //construct the path to the bin directory invocation.append( mProps.getBinDir() ).append( File.separator ). append( getPegasusRunInvocation ( ) ); boolean submit = submitWorkflow( invocation.toString() ); if ( !submit ){ throw new RuntimeException( "Unable to submit the workflow using pegasus-run" ); } } else{ //log the success message this.logSuccessfulCompletion( emptyWorkflow); } } else{ printShortVersion(); throw new RuntimeException("Invalid combination of arguments passed"); } return result; } /** * Returns the name of the noop job. * * @param dag the workflow * * @return the name */ public String getNOOPJobName( ADag dag ){ StringBuffer sb = new StringBuffer(); sb.append( CPlanner.NOOP_PREFIX ).append( dag.getLabel() ). append( "_" ).append( dag.dagInfo.index ); return sb.toString(); } /** * It creates a NoOP job that runs on the submit host. * * @param name the name to be assigned to the noop job * * @return the noop job. */ protected Job createNoOPJob( String name ) { Job newJob = new Job(); //jobname has the dagname and index to indicate different //jobs for deferred planning newJob.setName( name ); newJob.setTransformation( "pegasus", "noop", "1.0" ); newJob.setDerivation( "pegasus", "noop", "1.0" ); // newJob.setUniverse( "vanilla" ); newJob.setUniverse( GridGateway.JOB_TYPE.auxillary.toString()); //the noop job does not get run by condor //even if it does, giving it the maximum //possible chance newJob.executable = "/bin/true"; //construct noop keys newJob.setSiteHandle( "local" ); newJob.setJobType( Job.CREATE_DIR_JOB ); construct(newJob,"noop_job","true"); construct(newJob,"noop_job_exit_code","0"); //we do not want the job to be launched //by kickstart, as the job is not run actually newJob.vdsNS.checkKeyInNS( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[GridStartFactory.NO_GRIDSTART_INDEX] ); return newJob; } /** * Constructs a condor variable in the condor profile namespace * associated with the job. Overrides any preexisting key values. * * @param job contains the job description. * @param key the key of the profile. * @param value the associated value. */ protected void construct(Job job, String key, String value){ job.condorVariables.checkKeyInNS(key,value); } /** * Parses the command line arguments using GetOpt and returns a * PlannerOptions contains all the options passed by the * user at the command line. * * @param args the arguments passed by the user at command line. * * @return the options. */ public PlannerOptions parseCommandLineArguments( String[] args ){ return parseCommandLineArguments( args, true ); } /** * Parses the command line arguments using GetOpt and returns a * PlannerOptions contains all the options passed by the * user at the command line. * * @param args the arguments passed by the user at command line. * @param sanitizePath whether to sanitize path during construction of options * * * @return the options. */ public PlannerOptions parseCommandLineArguments( String[] args, boolean sanitizePath ){ LongOpt[] longOptions = generateValidOptions(); //store the args with which planner was invoked PlannerOptions options = new PlannerOptions(); options.setSanitizePath( sanitizePath ); options.setOriginalArgString( args ); Getopt g = new Getopt("pegasus-plan",args, "vqhfSnzpVr::aD:d:s:o:P:c:C:b:g:2:j:3:F:X:4:5:6:78:9:", longOptions,false); g.setOpterr(false); int option = 0; //construct the property matcher regex Pattern propertyPattern = Pattern.compile( CPlanner.JAVA_COMMAND_LINE_PROPERTY_REGEX ); while( (option = g.getopt()) != -1){ //System.out.println("Option tag " + (char)option); switch (option) { case 1://monitor options.setMonitoring( true ); break; case 'z'://deferred options.setPartOfDeferredRun( true ); break; case 'a'://authenticate options.setAuthentication(true); break; case 'b'://optional basename options.setBasenamePrefix(g.getOptarg()); break; case 'c'://cache options.setCacheFiles( g.getOptarg() ); break; case 'C'://cluster options.setClusteringTechnique( g.getOptarg() ); break; case '6':// conf //do nothing break; case 'd'://dax options.setDAX(g.getOptarg()); break; case 'D': // -Dpegasus.blah= String optarg = g.getOptarg(); //if( optarg.matches( "pegasus\\..*=.*" ) ){ if( propertyPattern.matcher( optarg ).matches() ){ options.setProperty( optarg ); } else{ //JIRA PM-390 dont accept -D for --dir //log warning StringBuffer sb = new StringBuffer(); sb.append( "Submit Directory can only be set by specifying the --dir option now. " ). append( "Setting -D to " ).append( optarg ).append(" does not work" ); mLogger.log( sb.toString(), LogManager.WARNING_MESSAGE_LEVEL ); } break; case '8'://dir option options.setSubmitDirectory( g.getOptarg(), null ); break; case '2'://relative-dir options.setRelativeDirectory( g.getOptarg() ); break; case '3'://rescue options.setNumberOfRescueTries( g.getOptarg() ); break; case '4'://relative-submit-dir options.setRelativeSubmitDirectory( g.getOptarg() ); break; case 'f'://force options.setForce(true); break; case '7'://force replan options.setForceReplan( true ); break; case 'F'://forward options.addToForwardOptions( g.getOptarg() ); break; case 'g': //group options.setVOGroup( g.getOptarg() ); break; case 'h'://help options.setHelp(true); break; case '5'://inherited-rc-files options.setInheritedRCFiles( g.getOptarg() ); break; case 'j'://job-prefix options.setJobnamePrefix( g.getOptarg() ); break; case 'm'://megadag option options.setMegaDAGMode(g.getOptarg()); break; case 'n'://nocleanup option options.setCleanup( false ); break; case 'o'://output options.setOutputSite(g.getOptarg()); break; case 'p'://partition and plan options.setPartitioningType( "Whole" ); break; case 'P'://pdax file options.setPDAX(g.getOptarg()); break; case 'q'://quiet options.decrementLogging(); break; case 'r'://randomdir options.setRandomDir(g.getOptarg()); break; case 'S'://submit option options.setSubmitToScheduler( true ); break; case 's'://sites options.setExecutionSites( g.getOptarg() ); break; case '9'://staging-sites options.addToStagingSitesMappings( g.getOptarg() ); break; case 'v'://verbose options.incrementLogging(); break; case 'V'://version mLogger.log(getGVDSVersion(),LogManager.CONSOLE_MESSAGE_LEVEL ); System.exit(0); case 'X'://jvm options options.addToNonStandardJavaOptions( g.getOptarg() ); break; default: //same as help printShortVersion(); throw new RuntimeException("Incorrect option or option usage " + (char)g.getOptopt()); } } return options; } /** * Submits the workflow for execution using pegasus-run, a wrapper around * pegasus-submit-dag. * * @param invocation the pegasus run invocation * * @return boolean indicating whether could successfully submit the workflow or not. */ public boolean submitWorkflow( String invocation ){ boolean result = false; try{ //set the callback and run the pegasus-run command Runtime r = Runtime.getRuntime(); mLogger.log( "Executing " + invocation, LogManager.DEBUG_MESSAGE_LEVEL ); Process p = r.exec( invocation ); //spawn off the gobblers with the already initialized default callback StreamGobbler ips = new StreamGobbler( p.getInputStream(), new DefaultStreamGobblerCallback( LogManager.CONSOLE_MESSAGE_LEVEL )); StreamGobbler eps = new StreamGobbler( p.getErrorStream(), new DefaultStreamGobblerCallback( LogManager.ERROR_MESSAGE_LEVEL)); ips.start(); eps.start(); //wait for the threads to finish off ips.join(); eps.join(); //get the status int status = p.waitFor(); mLogger.log( "Submission of workflow exited with status " + status, LogManager.DEBUG_MESSAGE_LEVEL ); result = (status == 0) ?true : false; } catch(IOException ioe){ mLogger.log("IOException while running tailstatd ", ioe, LogManager.ERROR_MESSAGE_LEVEL); } catch( InterruptedException ie){ //ignore } return result; } /** * Partitions and plans the workflow. First step of merging DAGMan and * Condor * * @param properties the properties passed to the planner. * @param options the options passed to the planner. */ protected void doPartitionAndPlan( PegasusProperties properties, PlannerOptions options ){ PegasusBag bag = new PegasusBag(); bag.add( PegasusBag.PEGASUS_LOGMANAGER, this.mLogger ); bag.add( PegasusBag.PEGASUS_PROPERTIES, properties ); bag.add( PegasusBag.PLANNER_OPTIONS, options ); String dax = options.getDAX(); Parser p = (Parser)DAXParserFactory.loadDAXParser( bag, "DAX2Metadata" , dax ); Callback cb = ((DAXParser)p).getDAXCallback(); try{ p.startParser( dax ); }catch( Exception e ){ //ignore } Map metadata = ( Map ) cb.getConstructedObject(); String label = (String) metadata.get( "name" ); String baseDir = options.getBaseSubmitDirectory(); String relativeDir = null; //construct the submit directory structure try{ relativeDir = (options.getRelativeDirectory() == null) ? //create our own relative dir determineRelativeSubmitDirectory(label, baseDir, mUser, options.getVOGroup(), properties.useTimestampForDirectoryStructure()) : options.getRelativeDirectory(); //create the relative submit directory if required sanityCheck( new File( baseDir, relativeDir) ); } catch( IOException ioe ){ String error = "Unable to write to directory" ; throw new RuntimeException( error + options.getSubmitDirectory() , ioe ); } options.setSubmitDirectory( baseDir, relativeDir ); mLogger.log( "Submit Directory for workflow is " + options.getSubmitDirectory() , LogManager.DEBUG_MESSAGE_LEVEL ); //now let us run partitiondax //mLogger.log( "Partitioning Workflow" , LogManager.INFO_MESSAGE_LEVEL ); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_PARTITION, LoggingKeys.DAX_ID, options.getDAX() ); PartitionDAX partitionDAX = new PartitionDAX(); File dir = new File( options.getSubmitDirectory(), "dax" ); String pdax = partitionDAX.partitionDAX( properties, options.getDAX(), dir.getAbsolutePath(), options.getPartitioningType() ); mLogger.log( "PDAX file generated is " + pdax , LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.logEventCompletion(); //now run pegasus-plan with pdax option CPlanner pegasusPlan = new CPlanner(); options.setDAX( null ); options.setPDAX( pdax ); options.setPartitioningType( null ); pegasusPlan.executeCommand( options ); } /** * Sets the basename of the random directory that is created on the remote * sites per workflow. The name is generated by default from teh flow ID, * unless a basename prefix is specifed at runtime in the planner options. * * @param dag the DAG containing the abstract workflow. * * @return the basename of the random directory. */ protected String getRandomDirectory(ADag dag){ //constructing the name of the dagfile StringBuffer sb = new StringBuffer(); String bprefix = mPOptions.getBasenamePrefix(); if( bprefix != null){ //the prefix is not null using it sb.append(bprefix); sb.append("-"); //append timestamp to generate some uniqueness sb.append(dag.dagInfo.getFlowTimestamp()); } else{ //use the flow ID that contains the timestamp and the name both. sb.append(dag.dagInfo.flowID); } return sb.toString(); } /** * Tt generates the LongOpt which contain the valid options that the command * will accept. * * @return array of LongOpt objects , corresponding to the valid * options */ public LongOpt[] generateValidOptions(){ LongOpt[] longopts = new LongOpt[32]; longopts[0] = new LongOpt( "dir", LongOpt.REQUIRED_ARGUMENT, null, '8' ); longopts[1] = new LongOpt( "dax", LongOpt.REQUIRED_ARGUMENT, null, 'd' ); longopts[2] = new LongOpt( "sites", LongOpt.REQUIRED_ARGUMENT, null, 's' ); longopts[3] = new LongOpt( "output", LongOpt.REQUIRED_ARGUMENT, null, 'o' ); longopts[4] = new LongOpt( "verbose", LongOpt.NO_ARGUMENT, null, 'v' ); longopts[5] = new LongOpt( "help", LongOpt.NO_ARGUMENT, null, 'h' ); longopts[6] = new LongOpt( "force", LongOpt.NO_ARGUMENT, null, 'f' ); longopts[7] = new LongOpt( "submit", LongOpt.NO_ARGUMENT, null, 'S' ); longopts[8] = new LongOpt( "version", LongOpt.NO_ARGUMENT, null, 'V' ); longopts[9] = new LongOpt( "randomdir", LongOpt.OPTIONAL_ARGUMENT, null, 'r' ); longopts[10] = new LongOpt( "authenticate", LongOpt.NO_ARGUMENT, null, 'a' ); longopts[11] = new LongOpt( "conf", LongOpt.REQUIRED_ARGUMENT, null, '6' ); //deferred planning options longopts[12] = new LongOpt( "pdax", LongOpt.REQUIRED_ARGUMENT, null, 'P' ); longopts[13] = new LongOpt( "cache", LongOpt.REQUIRED_ARGUMENT, null, 'c' ); longopts[14] = new LongOpt( "megadag", LongOpt.REQUIRED_ARGUMENT, null, 'm' ); //collapsing for mpi longopts[15] = new LongOpt( "cluster", LongOpt.REQUIRED_ARGUMENT, null, 'C' ); //more deferred planning stuff longopts[16] = new LongOpt( "basename", LongOpt.REQUIRED_ARGUMENT, null, 'b' ); longopts[17] = new LongOpt( "monitor", LongOpt.NO_ARGUMENT, null , 1 ); longopts[18] = new LongOpt( "nocleanup", LongOpt.NO_ARGUMENT, null, 'n' ); longopts[19] = new LongOpt( "group", LongOpt.REQUIRED_ARGUMENT, null, 'g' ); longopts[20] = new LongOpt( "deferred", LongOpt.NO_ARGUMENT, null, 'z'); longopts[21] = new LongOpt( "relative-dir", LongOpt.REQUIRED_ARGUMENT, null, '2' ); longopts[22] = new LongOpt( "pap", LongOpt.NO_ARGUMENT, null, 'p' ); longopts[23] = new LongOpt( "job-prefix", LongOpt.REQUIRED_ARGUMENT, null, 'j' ); longopts[24] = new LongOpt( "rescue", LongOpt.REQUIRED_ARGUMENT, null, '3'); longopts[25] = new LongOpt( "forward", LongOpt.REQUIRED_ARGUMENT, null, 'F'); longopts[26] = new LongOpt( "X", LongOpt.REQUIRED_ARGUMENT, null, 'X' ); longopts[27] = new LongOpt( "relative-submit-dir", LongOpt.REQUIRED_ARGUMENT, null, '4' ); longopts[28] = new LongOpt( "quiet", LongOpt.NO_ARGUMENT, null, 'q' ); longopts[29] = new LongOpt( "inherited-rc-files", LongOpt.REQUIRED_ARGUMENT, null, '5' ); longopts[30] = new LongOpt( "force-replan" , LongOpt.NO_ARGUMENT, null, '7' ); longopts[31] = new LongOpt( "staging-site", LongOpt.REQUIRED_ARGUMENT, null, '9' ); return longopts; } /** * Prints out a short description of what the command does. */ public void printShortVersion(){ String text = "\n $Id: CPlanner.java 5255 2012-05-21 22:20:28Z vahi $ " + "\n " + getGVDSVersion() + "\n Usage : pegasus-plan [-Dprop [..]] -d " + " [-s site[,site[..]]] [--staging-site s1=ss1[,s2=ss2[..]][-b prefix] [-c f1[,f2[..]]] [--conf ] "+ "\n [-f] [--force-replan] [-b basename] [-C t1[,t2[..]] [--dir ] [-j ] " + "\n [--relative-dir ] [--relative-submit-dir ]" + "\n [--inherited-rc-files f1[,f2[..]]] " + "\n [-g ] [-o ] [-r[dir name]] [-F option[=value] ] " + //"[--rescue ]" "\n [-S] [-n] [-v] [-q] [-V] [-X[non standard jvm option] [-h]"; System.out.println(text); } /** * Prints the long description, displaying in detail what the various options * to the command stand for. */ public void printLongVersion(){ String text = "\n $Id: CPlanner.java 5255 2012-05-21 22:20:28Z vahi $ " + "\n " + getGVDSVersion() + "\n pegasus-plan - The main class which is used to run Pegasus. " + "\n Usage: pegasus-plan [-Dprop [..]] --dax|--pdax [--sites ] " + "\n [--staging-site s1=ss1[,s2=ss2[..]] [--basename prefix] [--cache f1[,f2[..]] [--cluster t1[,t2[..]] [--conf ]" + "\n [--dir ] [--force] [--force-replan] [--forward option=[value] ] [--group vogroup] [--nocleanup] " + "\n [--output output site] [--randomdir=[dir name]] [--verbose] [--version][--help] " + "\n" + "\n Mandatory Options " + "\n -d fn "+ "\n --dax the path to the dax file containing the abstract workflow " + "\n Other Options " + "\n -b |--basename the basename prefix while constructing the per workflow files like .dag etc." + "\n -c |--cache comma separated list of replica cache files." + "\n --inherited-rc-files comma separated list of replica files. Locations mentioned in these have a lower priority than the locations in the DAX file" + "\n -C |--cluster comma separated list of clustering techniques to be applied to the workflow to " + "\n to cluster jobs in to larger jobs, to avoid scheduling overheads." + "\n --conf the path to the properties file to use for planning. " + "\n --dir the directory where to generate the concrete workflow." + "\n --relative-dir the relative directory to the base directory where to generate the concrete workflow." + "\n --relative-submit-dir the relative submit directory where to generate the concrete workflow. Overrids --relative-dir ." + "\n -f |--force skip reduction of the workflow, resulting in build style dag." + "\n --force-replan force replanning for sub workflows in case of failure. " + "\n -F |--forward any options that need to be passed ahead to pegasus-run in format option[=value] " + "\n where value can be optional. e.g -F nogrid will result in --nogrid . The option " + "\n can be repeated multiple times." + "\n -g |--group the VO Group to which the user belongs " + "\n -j |--job-prefix the prefix to be applied while construction job submit filenames " + "\n -o |--output the output site where the data products during workflow execution are transferred to." + "\n -s |--sites comma separated list of executions sites on which to map the workflow." + "\n --staging-site comma separated list of key=value pairs , where the key is the execution site and value is the staging site for that execution site." + "\n -r |--randomdir create random directories on remote execution sites in which jobs are executed" + // "\n --rescue the number of times rescue dag should be submitted for sub workflows before triggering re-planning" + "\n can optionally specify the basename of the remote directories" + "\n -n |--nocleanup generates only the separate cleanup workflow. Does not add cleanup nodes to the concrete workflow." + "\n -S |--submit submit the executable workflow generated" + "\n --staging-site comma separated list of key=value pairs, where key is the execution site and value is the staging site" + "\n -v |--verbose increases the verbosity of messages about what is going on" + "\n -q |--quiet decreases the verbosity of messages about what is going on" + "\n -V |--version displays the version of the Pegasus Workflow Management System" + "\n -X[non standard java option] pass to jvm a non standard option . e.g. -Xmx1024m -Xms512m" + "\n -h |--help generates this help." + "\n The following exitcodes are produced" + "\n 0 concrete planner planner was able to generate a concretized workflow" + "\n 1 an error occured. In most cases, the error message logged should give a" + "\n clear indication as to where things went wrong." + "\n 2 an error occured while loading a specific module implementation at runtime" + "\n "; System.out.println(text); //mLogger.log(text,LogManager.INFO_MESSAGE_LEVEL); } /** * Determines the workflow uuid for a workflow * * @param dag the workflow * @param options the options passed to the planner * @param properties the properties passed to the planner * * @return uuid for the root workflow instance */ private String determineRootWorkflowUUID(ADag dag, PlannerOptions options, PegasusProperties properties ) { //figure out the root workflow uuid to put for pegasus-state //JIRA PM-396 String uuid = null; if( options.partOfDeferredRun() ){ //in recursive workflow we are not on the root , but some level //we have to retrive from properties uuid = properties.getRootWorkflowUUID(); } else{ //the root workflow uuid is the uuid of the workflow //being planned right now. We are on the root level of the recursive //workflows uuid = dag.getWorkflowUUID(); } if ( uuid == null ){ //something amiss throw new RuntimeException( "Unable to determine Root Workflow UUID" ); } return uuid; } /** * This ends up invoking the deferred planning code, that generates * the MegaDAG that is used to submit the partitioned daxes in layers. * * @return the Collection of File objects for the files written * out. */ private Collection doDeferredPlanning(){ String mode = mPOptions.getMegaDAGMode(); mode = (mode == null)? DEFAULT_MEGADAG_MODE: mode; String file = mPOptions.getPDAX(); //get the name of the directory from the file String directory = new File(file).getParent(); //System.out.println("Directory in which partitioned daxes are " + directory); int errorStatus = 1; ADag megaDAG = null; Collection result = null; try{ //load the correct callback handler edu.isi.pegasus.planner.parser.pdax.Callback c = PDAXCallbackFactory.loadInstance(mProps, mPOptions, directory); errorStatus = 2; //this is a bug. Should not be called. To be corrected by Karan //Commented for new Site catalog // UserOptions y = UserOptions.getInstance(mPOptions); //intialize the bag of objects and load the site selector PegasusBag bag = new PegasusBag(); bag.add( PegasusBag.PEGASUS_LOGMANAGER, mLogger ); bag.add( PegasusBag.PEGASUS_PROPERTIES, mProps ); bag.add( PegasusBag.PLANNER_OPTIONS, mPOptions ); // bag.add( PegasusBag.TRANSFORMATION_CATALOG, TCMode.loadDAXParserCallback() ); //bag.add( PegasusBag.TRANSFORMATION_MAPPER, mTCMapper ); bag.add( PegasusBag.PEGASUS_LOGMANAGER, mLogger ); bag.add( PegasusBag.SITE_STORE, mBag.getHandleToSiteStore() ); bag.add( PegasusBag.TRANSFORMATION_CATALOG, mBag.getHandleToTransformationCatalog() ); //start the parsing and let the fun begin PDAXParser p = new PDAXParser( file , mProps ); p.setCallback(c); p.startParser(file); megaDAG = (ADag)c.getConstructedObject(); CodeGenerator codeGenerator = null; //load the Condor Writer that understands HashedFile Factories. codeGenerator = CodeGeneratorFactory.loadInstance( bag ); errorStatus = 3; result = codeGenerator.generateCode( megaDAG ); } catch(FactoryException fe){ //just rethrow for time being. we need error status as 2 throw fe; } catch(Exception e){ String message; switch(errorStatus){ case 1: message = "Unable to load the PDAX Callback "; break; case 2: message = "Unable to parse the PDAX file "; break; case 3: message = "Unable to generate the code for the MegaDAG"; break; default: //unreachable code message = "Unknown Error " ; break; } throw new RuntimeException(message, e); } this.logSuccessfulCompletion( false ); return result; } /** * Creates the submit directory for the workflow. This is not thread safe. * * @param dag the workflow being worked upon. * @param dir the base directory specified by the user. * @param user the username of the user. * @param vogroup the vogroup to which the user belongs to. * @param timestampBased boolean indicating whether to have a timestamp based dir or not * * @return the directory name created relative to the base directory passed * as input. * * @throws IOException in case of unable to create submit directory. */ protected String determineRelativeSubmitDirectory( ADag dag, String dir, String user, String vogroup, boolean timestampBased ) throws IOException { return determineRelativeSubmitDirectory( dag.getLabel(), dir, user, vogroup, timestampBased ); } /** * Creates the submit directory for the workflow. This is not thread safe. * * @param label the label of the workflow * @param dir the base directory specified by the user. * @param user the username of the user. * @param vogroup the vogroup to which the user belongs to. * @param timestampBased boolean indicating whether to have a timestamp based dir or not * * @return the directory name created relative to the base directory passed * as input. * * @throws IOException in case of unable to create submit directory. */ protected String determineRelativeSubmitDirectory( String label, String dir, String user, String vogroup, boolean timestampBased ) throws IOException { File base = new File( dir ); StringBuffer result = new StringBuffer(); //do a sanity check on the base sanityCheck( base ); //add the user name if possible base = new File( base, user ); result.append( user ).append( File.separator ); //add the vogroup base = new File( base, vogroup ); sanityCheck( base ); result.append( vogroup ).append( File.separator ); //add the label of the DAX base = new File( base, label ); sanityCheck( base ); result.append( label ).append( File.separator ); //create the directory name StringBuffer leaf = new StringBuffer(); if( timestampBased ){ leaf.append( mPOptions.getDateTime( mProps.useExtendedTimeStamp() ) ); } else{ //get all the files in this directory String[] files = base.list( new RunDirectoryFilenameFilter() ); //find the maximum run directory int num, max = 1; for( int i = 0; i < files.length ; i++ ){ num = Integer.parseInt( files[i].substring( RunDirectoryFilenameFilter.SUBMIT_DIRECTORY_PREFIX.length() ) ); if ( num + 1 > max ){ max = num + 1; } } //create the directory name leaf.append( RunDirectoryFilenameFilter.SUBMIT_DIRECTORY_PREFIX ).append( mNumFormatter.format( max ) ); } result.append( leaf.toString() ); base = new File( base, leaf.toString() ); mLogger.log( "Directory to be created is " + base.getAbsolutePath(), LogManager.DEBUG_MESSAGE_LEVEL ); return result.toString(); } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheck( File dir ) throws IOException{ if ( dir.exists() ) { // location exists if ( dir.isDirectory() ) { // ok, isa directory if ( dir.canWrite() ) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException( "Cannot write to existing directory " + dir.getPath() ); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory." ); } } else { // does not exist, try to make it if ( ! dir.mkdirs() ) { //try to get around JVM bug. JIRA PM-91 if( dir.getPath().endsWith( "." ) ){ //just try to create the parent directory if( !dir.getParentFile().mkdirs() ){ throw new IOException( "Unable to create directory " + dir.getPath() ); } return; } throw new IOException( "Unable to create directory " + dir.getPath() ); } } } /** * Writes out the planner metrics to the global log. * * @param pm the metrics to be written out. * * @return boolean */ protected boolean writeOutMetrics( PlannerMetrics pm ){ boolean result = false; if ( mProps.writeOutMetrics() ) { File log = new File( mProps.getMetricsLogFile() ); //do a sanity check on the directory try{ sanityCheck( log.getParentFile() ); //open the log file in append mode FileOutputStream fos = new FileOutputStream( log ,true ); //get an exclusive lock FileLock fl = fos.getChannel().lock(); try{ mLogger.log( "Logging Planner Metrics to " + log, LogManager.DEBUG_MESSAGE_LEVEL ); //write out to the planner metrics to fos fos.write( pm.toString().getBytes() ); } finally{ fl.release(); fos.close(); } } catch( IOException ioe ){ mLogger.log( "Unable to write out planner metrics ", ioe, LogManager.DEBUG_MESSAGE_LEVEL ); return false; } result = true; } return result; } /** * Returns the basename of the dag file * * @param dag the dag that was parsed. * @param options the planner options * * @return boolean true means submit the rescue * false do the planning operation */ protected String getDAGFilename( ADag dag, PlannerOptions options ){ //determine the name of the .dag file that will be written out. //constructing the name of the dagfile StringBuffer sb = new StringBuffer(); String bprefix = options.getBasenamePrefix(); if( bprefix != null){ //the prefix is not null using it sb.append(bprefix); } else{ //generate the prefix from the name of the dag sb.append(dag.dagInfo.nameOfADag).append("-"). append(dag.dagInfo.index); } //append the suffix sb.append( ".dag" ); return sb.toString(); } /** * Checks for rescue dags, and determines whether to plan or not. * * * @param dag the dag that was parsed. * @param options the planner options * * @return boolean true means submit the rescue * false do the planning operation */ protected boolean handleRescueDAG( ADag dag, PlannerOptions options ) { return this.handleRescueDAG( getDAGFilename( dag, options ), options.getSubmitDirectory(), options.getNumberOfRescueTries() ); } /** * Checks for rescue dags, and determines whether to submit a rescue dag * or not. * * * @param dag the dag file for the dax * @param dir the submit directory. * @param numOfRescues the number of rescues to handle. * * * @return true means submit the rescue * false do the planning operation */ protected boolean handleRescueDAG( String dag, String dir, int numOfRescues ) { boolean result = false; //sanity check if( numOfRescues < 1 ){ return result; } //check for existence of dag file //if it does not exists means we need to plan File dagFile = new File( dir, dag ); mLogger.log( "Determining existence of dag file " + dagFile.getAbsolutePath(), LogManager.DEBUG_MESSAGE_LEVEL ); if ( !dagFile.exists() ){ return result; } /* //if it is default max value , then return true always if( numOfRescues == PlannerOptions.DEFAULT_NUMBER_OF_RESCUE_TRIES ){ return true; } */ int largestRescue = 0; String largestRescueFile = null; //check for existence of latest rescue file. NumberFormat nf = new DecimalFormat( "000" ); for( int i = 1; i <= numOfRescues + 1; i++ ){ String rescue = dag + ".rescue" + nf.format( i ); File rescueFile = new File( dir, rescue ); mLogger.log( "Determining existence of rescue file " + rescueFile.getAbsolutePath(), LogManager.DEBUG_MESSAGE_LEVEL ); if( rescueFile.exists() ){ largestRescue = i; largestRescueFile = rescue; } else{ break; } } if( largestRescue == 0 ){ //no rescue dag. but the dag still exists mLogger.log( "No planning attempted. Existing DAG will be submitted " + dagFile, LogManager.CONSOLE_MESSAGE_LEVEL ); return true; } if( largestRescue == numOfRescues + 1 ){ //we need to start planning now mLogger.log( "Reached user specified limit of rescue retries " + numOfRescues + " .Replanning will be triggered ", LogManager.CONFIG_MESSAGE_LEVEL ); return false; } if( largestRescueFile != null ){ //a rescue file was detected . lets log that mLogger.log( "Rescue DAG will be submitted. Largest Rescue File detected was " + largestRescueFile, LogManager.CONSOLE_MESSAGE_LEVEL ); } return true; } /** * This method generates a symlink between two files * * @param source the file that has to be symlinked * @param destination the destination of the symlink * @param directory the directory in which to execute the command * @param logErrorToDebug whether to log messeage to debug or not * * @return boolean indicating if creation of symlink was successful or not * */ protected boolean createSymbolicLink( String source, String destination , File directory, boolean logErrorToDebug ) { try{ Runtime rt = Runtime.getRuntime(); String command = "ln -sf " + source + " " + destination; mLogger.log( "Creating symlink between " + source + " " + destination, LogManager.DEBUG_MESSAGE_LEVEL); Process p = ( directory == null )? rt.exec( command, null )://dont specify the directory to execute in rt.exec( command, null, directory ); // set up to read subprogram output InputStream is = p.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); // set up to read subprogram error InputStream er = p.getErrorStream(); InputStreamReader err = new InputStreamReader(er); BufferedReader ebr = new BufferedReader(err); // read output from subprogram // and display it String s,se=null; while ( ((s = br.readLine()) != null) || ((se = ebr.readLine()) != null ) ) { if(s!=null){ mLogger.log(s,LogManager.DEBUG_MESSAGE_LEVEL); } else { if( logErrorToDebug ){ mLogger.log( se, LogManager.DEBUG_MESSAGE_LEVEL ); } else{ mLogger.log(se,LogManager.ERROR_MESSAGE_LEVEL ); } } } br.close(); return true; } catch(Exception ex){ if( logErrorToDebug ){ mLogger.log("Unable to create symlink to the log file" , ex, LogManager.DEBUG_MESSAGE_LEVEL ); }else{ mLogger.log("Unable to create symlink to the log file" , ex, LogManager.ERROR_MESSAGE_LEVEL); } return false; } } /** * Generates events for the abstract workflow. * * @param workflow the parsed dax * @param bag the initialized object bag */ private void generateStampedeEventsForAbstractWorkflow(ADag workflow, PegasusBag bag ) { CodeGenerator codeGenerator = CodeGeneratorFactory.loadInstance( bag, CodeGeneratorFactory.STAMPEDE_EVENT_GENERATOR_CLASS ); String message = "Generating Stampede Events for Abstract Workflow"; log( message, LogManager.INFO_MESSAGE_LEVEL ); try{ Collection result = codeGenerator.generateCode( workflow ); for( Iterator it = result.iterator(); it.hasNext() ;){ mLogger.log("Written out stampede events for the abstract workflow to " + it.next(), LogManager.DEBUG_MESSAGE_LEVEL); } } catch ( Exception e ){ throw new RuntimeException( "Unable to generate stampede events for abstract workflow", e ); } mLogger.log( message + " -DONE", LogManager.INFO_MESSAGE_LEVEL ); } /** * Loads the sites from the site catalog into the site store * * @param sites * @return SiteStore object containing the information about the sites. */ private SiteStore loadSiteStore( Set sites ) { SiteStore result = new SiteStore(); SiteCatalog catalog = null; /* load the catalog using the factory */ catalog = SiteFactory.loadInstance( mProps ); Set toLoad = new HashSet( sites ); /* we want to load the staging sites mentioned for the execution sites */ for( String eSite: sites ){ String ss = this.mPOptions.getStagingSite( eSite ); if( eSite != null ){ toLoad.add( ss ); } } /* always load local site */ toLoad.add( "local" ); /* load the sites in site catalog */ try{ catalog.load( new LinkedList( toLoad) ); //load into SiteStore from the catalog. if( toLoad.contains( "*" ) ){ //we need to load all sites into the site store toLoad.addAll( catalog.list() ); } for( Iterator it = toLoad.iterator(); it.hasNext(); ){ SiteCatalogEntry s = catalog.lookup( it.next() ); if( s != null ){ result.addEntry( s ); } } /* query for the sites, and print them out */ mLogger.log( "Sites loaded are " + result.list( ) , LogManager.DEBUG_MESSAGE_LEVEL ); } catch ( SiteCatalogException e ){ throw new RuntimeException( "Unable to load from site catalog " , e ); } finally{ /* close the connection */ try{ catalog.close(); }catch( Exception e ){} } return result; } /** * Logs a message that connects the jobs with DAX/DAG * * * @param dag the DAG object * @param parentType the parent type * @param parentID the parent id */ private void logIDHierarchyMessage(ADag dag, String parentType, String parentID ) { //log the create id hieararchy message that //ties the DAX with the jobs in it. //in bunches of 1000 Enumeration e = dag.vJobSubInfos.elements(); while( e.hasMoreElements() ){ List l = new LinkedList(); for( int i = 0; e.hasMoreElements() && i++ < 1000; ){ Job job = (Job)e.nextElement(); l.add( job.getID() ); } mLogger.logEntityHierarchyMessage( parentType, parentID, LoggingKeys.JOB_ID, l ); } } /** * Logs the successful completion message. * * @param emptyWorkflow indicates whether the workflow created was empty or not. */ private void logSuccessfulCompletion( boolean emptyWorkflow ){ StringBuffer message = new StringBuffer(); message.append( emptyWorkflow ? CPlanner.EMPTY_FINAL_WORKFLOW_MESSAGE : CPlanner.SUCCESS_MESSAGE ). append( "" ).append( getPegasusRunInvocation( ) ). append( "\n\n" ); mLogger.log( message.toString(), LogManager.CONSOLE_MESSAGE_LEVEL ); } /** * Returns the pegasus-run invocation on the workflow planned. * * * @return the pegasus-run invocation */ private String getPegasusRunInvocation( ){ StringBuffer result = new StringBuffer(); result.append( "pegasus-run "); //check if we need to add any other options to pegasus-run for( Iterator it = mPOptions.getForwardOptions().iterator(); it.hasNext() ; ){ NameValue nv = it.next(); result.append( " --" ).append( nv.getKey() ); if( nv.getValue() != null ){ result.append( " " ).append( nv.getValue() ); } } result.append( " " ).append( mPOptions.getSubmitDirectory() ); return result.toString(); } protected String doBackupAndCreateSymbolicLinkForSubmitDirectory( String baseDir, String relativeSubmitDir) throws IOException { //find the maximum run directory //get the parent of the current relativeSubmitDir File f = new File( relativeSubmitDir ); String relativeParentSubmitDir = f.getParent(); File parent = ( relativeParentSubmitDir == null ) ? new File( baseDir ): new File( baseDir, relativeParentSubmitDir ); String basename = f.getName(); int num, max = 0; String prefix = basename + "."; //check if parent exists. first time around the submit directory for //sub workflow may not exist if( parent.exists() ){ String[] files = parent.list( new SubmitDirectoryFilenameFilter( basename ) ); for( int i = 0; i < files.length ; i++ ){ num = Integer.parseInt( files[i].substring( prefix.length() ) ); if ( num + 1 > max ){ max = num + 1; } } } //create the directory name NumberFormat formatter = new DecimalFormat( "000" ); //prefix is just the basname of relativeSubmitDir.XXX prefix = prefix + formatter.format( max ) ; String relativeSubmitDirXXX = ( relativeParentSubmitDir == null ) ? new File( prefix ).getPath(): new File( relativeParentSubmitDir, prefix ).getPath(); //create the relativeSubmitDirXXX File fRelativeSubmitDirXXX = new File( baseDir, relativeSubmitDirXXX ); sanityCheck( fRelativeSubmitDirXXX ); //we have to create a symlink between relativeSubmitDir and relativeSubmitDir.xxx //and update relativeSubmitDir to be relativeSubmitDir.xxx File destination = new File( baseDir, relativeSubmitDir); if( destination.exists() ){ //delete existing file //no way in java to detect if a file is a symbolic link destination.delete(); } //we want symlinks to be created in parent directory //without absolute paths createSymbolicLink( fRelativeSubmitDirXXX.getName(), destination.getName(), parent, true ); return relativeSubmitDirXXX; } } /** * A filename filter for identifying the submit directory * * @author Karan Vahi vahi@isi.edu */ class SubmitDirectoryFilenameFilter implements FilenameFilter { /** * Store the regular expressions necessary to parse kickstart output files */ private String mRegexExpression; /** * Stores compiled patterns at first use, quasi-Singleton. */ private Pattern mPattern = null; /** * Overloaded constructor. * * @param prefix prefix for the submit directory */ public SubmitDirectoryFilenameFilter( String prefix ){ mRegexExpression = "(" + prefix + ")([\\.][0-9][0-9][0-9])"; mPattern = Pattern.compile( mRegexExpression ); } /*** * Tests if a specified file should be included in a file list. * * @param dir the directory in which the file was found. * @param name - the name of the file. * * @return true if and only if the name should be included in the file list * false otherwise. * * */ public boolean accept( File dir, String name) { return mPattern.matcher( name ).matches(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provisioner/0000755000175000017500000000000011757531667023661 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provisioner/Edge.java0000644000175000017500000000557011757531137025367 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.provisioner; import java.util.*; /** * An instance of this class represent an edge of workflow which is * data dependencies between two tasks. So this class contains references * of two dependent nodes(From node and To node) and information of * data between them; file name, data size and data transfer time. * * @author Eunkyu Byun */ public class Edge { public static final long DEFAULT_SIZE = 0; public static final long DEFAULT_BPS = 1; public static final long DEFAULT_LATENCY = 0; Node fromNode; Node toNode; long cost; String fileName; long fileSize; /** * Constructor * @param from One of nodes this edge connects which generate the data * @param to Another node this edge connects which get the data * @param fileName the stored name of data * @param fileSize the size of the data, DTT is calculated according to BPS and Latency */ public Edge(Node from, Node to, String fileName, long fileSize) { this.fromNode = from; this.toNode = to; this.fileName = fileName; this.fileSize = fileSize; this.cost = (fileSize/DEFAULT_BPS) + DEFAULT_LATENCY; } /** * initiate bit and completion time variable used bt HEFT algorithm */ public void init() { this.complete = false; this.compTime = 0; } /** * return the 'from' task */ public Node getFrom() { return fromNode; } /** * return the 'to' task */ public Node getTo() { return toNode; } /** * set the 'from' task * @param e the from task to be set */ public void setFrom(Node e) { this.fromNode = e; } /** * set the 'to' task * @param e the to task to be set */ public void setTo(Node e) { this.toNode = e; } public String getID() { return fileName; } public long getCost(long bandwidth,long latency) { return (fileSize/bandwidth) + latency; } public long getCost() { return cost; } public void setCost(long c) { this.cost = c; } public void print() { System.out.println("Edge_"+fileName+"("+fileSize+"KB) "+fromNode.getID()+" -> " + toNode.getID() ); } public boolean complete = false; public long compTime = 0; public boolean deleted = false; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provisioner/Estimator.java0000644000175000017500000002774411757531137026501 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.provisioner; import java.io.*; import java.util.*; import javax.xml.parsers.*; import org.w3c.dom.*; /** * This Estimator is used to find the near-optimal number of processors * required to complete workflow within a given RFT(requested finish time). * This estimator read workflow from a DAX file and user can select * one estimation method among BTS, DSC, and IterHEFT. User also * need to provide RFT and the precision of the predictied execution time. * * @author Eunkyu Byun */ public class Estimator { private String fileName; private String method; private long RFT; private int prec; private Node topNode; private Node bottomNode; private HashSet edges; private HashMap nodes; private long totalET = 0; /** * Constructor * @param fileName DAX file describing the workflow * @param methodID One of those; BTS, DSC, IterHEFT * @param RFT requested finish time. i.e., deadline * @param prec The precision of the predicted execution time */ public Estimator(String fileName, String methodID, long RFT, int prec) { this.fileName = fileName; this.prec = prec; this.method = methodID; edges = new HashSet(); nodes = new HashMap(); topNode = new Node("TOP","NullTask", 0); bottomNode = new Node("BOTTOM", "NullTask", 0); nodes.put("TOP", topNode); nodes.put("BOTTOM", bottomNode); } private void readDAX() throws Exception { // for dummy input DocumentBuilder db = DocumentBuilderFactory.newInstance().newDocumentBuilder(); Element dax = db.parse(new File(fileName)).getDocumentElement(); if( !dax.getTagName().equals("adag") ) { throw new Exception("This input is not a adag file"); } NodeList nodelist = dax.getChildNodes(); int nodeCount = nodelist.getLength(); HashMap fileMap = new HashMap(); HashSet output = new HashSet(); for(int i = 0 ; i< nodeCount ; i++) { org.w3c.dom.Node cur = nodelist.item(i); String node = (String)cur.getNodeName(); if(node.equals("filename")) { String name = cur.getAttributes().getNamedItem("file").getNodeValue(); fileMap.put(name, topNode); } if(node.equals("job")) { String id = cur.getAttributes().getNamedItem("id").getNodeValue(); String name = cur.getAttributes().getNamedItem("name").getNodeValue(); long wei = Node.DEFAULT_WEIGHT; if(cur.getAttributes().getNamedItem("weight")!= null) { wei = Long.parseLong(cur.getAttributes().getNamedItem("weight").getNodeValue()); } Node newNode = new Node(id, name, wei); totalET += wei; nodes.put(id, newNode); NodeList usesList = cur.getChildNodes(); boolean noInput = true; boolean noOutput = true; for(int j = 0; j < usesList.getLength() ;j++) { org.w3c.dom.Node uses = usesList.item(j); if( uses.getNodeName().equals("uses") ) { String file = uses.getAttributes().getNamedItem("file").getNodeValue(); String link = uses.getAttributes().getNamedItem("link").getNodeValue(); if( link.equals("input") ) { Node from = (Node)fileMap.get(file); if( from == null ) { from = topNode; fileMap.put(file, topNode); } Edge target = new Edge(from, newNode, file, Edge.DEFAULT_SIZE ); edges.add(target); from.addOut(target); newNode.addIn(target); output.remove(file); noInput = false; } else if ( link.equals("output") ) { fileMap.put(file, newNode); output.add(file); noOutput = false; } } } if( noInput ) { Edge nullIn = new Edge(topNode, newNode, id+"_null_input_edge", 0); topNode.addOut(nullIn); newNode.addIn(nullIn); } if( noOutput ) { Edge nullOut = new Edge(newNode,bottomNode,id+"_null_output_edge", 0); newNode.addOut(nullOut); bottomNode.addIn(nullOut); } } } // connect input edges to TOP node, and output edges to BOTTOM node Iterator iter = output.iterator(); while( iter.hasNext() ) { String fileName = (String)iter.next(); Node from = (Node)fileMap.get(fileName); Edge target = new Edge(from, bottomNode, fileName, Edge.DEFAULT_SIZE); edges.add(target); from.addOut(target); bottomNode.addIn(target); } } private void calculateSlotSize() { } private void updateETs() { Iterator iter = nodes.values().iterator(); while( iter.hasNext() ) { Node n = (Node)iter.next(); long old = n.evalWeight(); n.setWeight( (long)Math.ceil( (double)old/this.prec ) ); } iter = edges.iterator(); while( iter.hasNext() ) { Edge e = (Edge)iter.next(); long old = e.getCost(); e.setCost( (long)Math.ceil( (double)old/this.prec ) ); } } private int BTS() { topNode.buildDescendants(); bottomNode.buildAncestors(); calculateSlotSize(); updateETs(); bottomNode.getUpLen(); topNode.getDownLen(); RFT = Math.max( RFT, topNode.getDownLen() ); OccupationDiagram od = new OccupationDiagram(RFT); Iterator iter = nodes.values().iterator(); while( iter.hasNext() ) { Node nextn = (Node)iter.next(); nextn.init(); if( nextn.isTop() || nextn.isBottom() ) continue; nextn.olb = nextn.lb = nextn.getUpLen(); nextn.orb = nextn.rb = RFT - nextn.getDownLen(); nextn.tempST = -1; nextn.tempFT = -1; nextn.stacked = false; od.add( nextn ); } topNode.olb = topNode.lb = 0; topNode.orb = topNode.rb = 0; topNode.tempST = 0; topNode.tempFT = 0; bottomNode.olb = bottomNode.lb = RFT; bottomNode.orb = bottomNode.rb = RFT; bottomNode.tempST = RFT; bottomNode.tempFT = RFT; return od.stack(false); } private int DSC() { topNode.getDownLen(); bottomNode.getUpLen(); int totalNodes = nodes.size(); long[] clusters = new long[totalNodes+2]; for(int i = 0 ; i < clusters.length ; i++) clusters[i] = 0; int ccnt = 0; LinkedList freeTasks = new LinkedList(); freeTasks.add(topNode); while(freeTasks.size() > 0 ) { long max = -1; Node ft = null; for(int i = 0 ; i < freeTasks.size() ; i++) { Node cn = (Node)freeTasks.get(i); long prio = cn.tlevel + cn.getDownLen(); if( prio > max ) { max = prio; ft = cn; } } freeTasks.remove(ft); LinkedList parents = ft.getIn(); long otl = 0; long temp = 0; for(int i = 0 ; i < parents.size() ; i++ ) { Edge ce = (Edge)parents.get(i); Node p = (Node)ce.getFrom(); long newtl = p.tlevel+p.evalWeight()+ce.getCost(); if( clusters[p.cluster] <= ft.tlevel ) { otl = Math.max(otl, temp); ft.tlevel = clusters[p.cluster]; ft.cluster = p.cluster; temp = newtl; } else { otl = Math.max(otl, newtl); } } if( ft.cluster < 0 ) { ft.cluster = ccnt; clusters[ccnt] = ft.tlevel + ft.evalWeight(); ccnt++; } else { ft.tlevel = Math.max(otl, clusters[ft.cluster]); clusters[ft.cluster] = ft.tlevel + ft.evalWeight(); } ft.examined = true; LinkedList childs = ft.getOut(); for(int i = 0 ; i < childs.size() ; i++) { Edge ce = (Edge)childs.get(i); Node c = (Node)ce.getTo(); c.tlevel = Math.max( c.tlevel, ft.tlevel + ft.evalWeight() + ce.getCost() ); if( c.isFree() && !freeTasks.contains(c) ) { freeTasks.add(c); } } } return ccnt; } private int IterHEFT() { topNode.getDownLen(); RFT = Math.max(RFT, bottomNode.getUpLen()); int lb = (int)(totalET/RFT); int i = lb; int tasks = nodes.size(); for(i = lb; i <= tasks ; i++) { // i = the number of hosts long makespan = HEFT(i); if( RFT >= makespan) { break; } } return i; } // HEFT algorithm for homogeneous resources private long HEFT(int size) { //clean up edges and nodes Iterator iter = edges.iterator(); while( iter.hasNext() ) { Edge next = (Edge)iter.next(); next.init(); } iter = nodes.values().iterator(); while( iter.hasNext() ) { Node nextn = (Node)iter.next(); nextn.st = 0; } long[] resTable = new long[size]; //time table for 'size' resources for(int i = 0 ; i < size; i++) resTable[i] = 0; LinkedList readySet = new LinkedList(); // queue of tasks whose all parents are finished topNode.initOut(true,0); readySet.add(topNode); while(readySet.size() > 0 ) { Node cur = (Node)readySet.removeFirst(); //schedule long min = Long.MAX_VALUE; long st = cur.st; //startable time long et = 0; //end time int target = 0; boolean sched = false; for(int i = 0 ; i< size; i++) { if( resTable[i] <= st ) { //if resource(i) is available at the time st, //schedule the task on it resTable[i] = st + cur.evalWeight(); sched = true; et = resTable[i]; break; } if( resTable[i] < min ) { // find the fastest time when a resource become free min = resTable[i]; target = i; } } if( !sched ) { // schedule on the freed resource resTable[target]+= cur.evalWeight(); et = resTable[target]; } // System.out.println(cur.getID()+" ends at "+et); cur.initOut(true,et); //notify finish time to child tasks Iterator ite = cur.getOut().iterator(); while( ite.hasNext() ) { Edge nex = (Edge)ite.next(); Node no = nex.getTo(); if( readySet.contains(no) ) continue; if(no.checkIn() && !no.isBottom() ) { boolean mid = false; // sorting ready set (based on HEFT algorithm definition) for(int i = 0 ; i < readySet.size() ; i++) { Node tm = (Node)readySet.get(i); if( (tm.getDownLen()+tm.evalWeight()) < (no.getDownLen()+no.evalWeight()) ) { readySet.add(i,no); mid = true; break; } } if( !mid ) readySet.add(no); no.initOut(false,0); } } } long max = 0; for(int i = 0 ; i< size; i++) { max = Math.max(max, resTable[i]); } return max; } /** * Estimate the number of processors and return the value. * @return Estimated number of processors */ public int estimate() throws RuntimeException { int result = -1; try { readDAX(); } catch (Exception e) { throw new RuntimeException("Invalid DAX file"); } if( this.method.equals("BTS") ) { result = BTS(); } else if ( this.method.equals("IterHEFT") ) { result = IterHEFT(); } else if ( this.method.equals("DSC") ) { result = DSC(); } else { throw new RuntimeException("Invalid estimate method"); } return result; } public static void main(String[] args) { if(args.length < 3) { System.out.println("Usage: [] []"); System.out.println(" Estimation method = BTS : IterHEFT : DSC"); System.out.println(" or = 1 (for BTS) : 2 (for IterHEFT) : 3 (for DSC)"); return; } int prec = 1; if( args.length > 4 ) { try { prec = Integer.parseInt(args[3]); } catch (Exception e) {} } long RFT = -1; if( args.length > 3 ) { try { RFT = Long.parseLong(args[2]); } catch(Exception e) {} } Estimator body = new Estimator(args[0], args[1], RFT, prec); try { int estimate = body.estimate(); System.out.println(estimate); } catch (Exception e) { System.out.println( e.getMessage() ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provisioner/OccupationDiagram.java0000644000175000017500000001747711757531137030125 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.provisioner; import java.util.*; /** * This class keeps structure of an Occupation Diagram and conduct BTS algorithm * * @author Eunkyu Byun */ public class OccupationDiagram { TreeSet nodes; long RFT; LinkedList[] timeMap; int max; int maxIndex; /** * Constructor * @param rft Requested finish time(deadline) of algorithm. */ public OccupationDiagram(long rft) { nodes = new TreeSet(new NodeComp()); this.RFT = rft; max = 0; maxIndex = 0; timeMap = new LinkedList[(int)rft]; for(int i = 0 ; i < rft ; i++) { timeMap[i] = new LinkedList(); } } /** * Comparator which compare the schedulable duration of two nodes */ private class NodeComp implements Comparator { // compare schedulable duration of two nodes public int compare(Node a, Node b) { if( (a.rb - a.lb) > (b.rb - b.lb) ) return 1; if( (a.rb - a.lb) < (b.rb - b.lb) ) return -1; // tie is broken by the number of dependent nodes if( a.getDepET() > b.getDepET() ) return 1; if( a.getID() == b.getID() ) return 0; return -1; } } /** * Add tasks into the occupation diagram. But task placement is not conducted yet * @param node Node to be added */ public void add( Node node ) { // node with narrower has priority if( node.evalWeight() > 0 ) { nodes.add( node ); } } private void printNodes() { System.out.println("******************"); Iterator iter = nodes.iterator(); while( iter.hasNext() ) { Node n = (Node)iter.next(); System.out.println(n.getID()+" lb:"+n.lb+" lb:"+n.rb+" diff:"+(n.rb-n.lb)+" dep:"+n.getDepET() ); } } /** * Run Task placement phase of BTS algorithm * @param print if true print debug message to stdout * @return maxinum height of Occupation diagram when Task placement is done */ public int stack(boolean print) { while( nodes.size() > 0 ) { double t00 = Calendar.getInstance().getTimeInMillis(); // printNodes(); Node cn = (Node)nodes.first(); nodes.remove(cn); cn.stacked = true; int leftB = (int)cn.lb; int rightB = (int)cn.rb; int et = (int)cn.evalWeight(); if( et <= 0 ) continue; boolean rightBias = (cn.getDesSize() localMax ) { min = localMax; cursor = i; } if( min == localMax && rightBias) { cursor = i; } } for(int i = cursor ; i < (cursor+et) ; i++) { timeMap[i].add(cn); max = Math.max(max, timeMap[i].size()); } cn.tempST = (long)cursor; cn.tempFT = (long)(cursor + et); // System.out.println(cn.getID()+"(lb:"+leftB+",rb:"+rightB+",et:"+et+") -> <"+cn.tempST+","+cn.tempFT+"> height:"+(min+1)); LinkedList parents = cn.getIn(); for(int i = 0; i < parents.size() ;i++) { Edge ce = (Edge)parents.get(i); Node n = ce.getFrom(); n.updateRightBound(cn.tempST - ce.getCost(), nodes); } LinkedList childs = cn.getOut(); for(int i = 0; i < childs.size() ;i++) { Edge ce = (Edge)childs.get(i); Node n = ce.getTo(); n.updateLeftBound(cn.tempFT + ce.getCost(), nodes); } double t01 = Calendar.getInstance().getTimeInMillis(); // System.out.println("stack :"+cn.getID()+" takes " + (t01-t00) ); } return max; } /** * Run Task redistribution phase of BTS algorithm * @param goal Redistribution loop is stop if maximum height of OD reach this goal value. * @param print if true print debug message to stdout * @return maxinum height of Occupation diagram when Task redistribution is done */ public int balance(int goal, boolean print) { int max = 0; int maxIndexL = 0; int maxIndexR = 0; int cntWhile = 0; int cntNPt = 0; int cntNP = 0; int cntPt = 0; int cntP = 0; LinkedList sortTemp = null; while(true) { cntWhile++; max = 0; // find maximum time slot for(int i = 0 ; i < RFT ; i++) { // System.out.println("ts: "+i+ " h: "+ timeMap[i].size() ); if( timeMap[i].size() == max ) maxIndexR = i; if( timeMap[i].size() > max ) { max = timeMap[i].size(); maxIndexL = i; maxIndexR = i; } } if( max <= goal ) break; // System.out.println("RFT: "+ RFT+" max: "+max+" maxIndexL: "+maxIndexL+" maxIndexR: "+maxIndexR); boolean reduced = false; // non propagated balancing for(int k = 0 ; k < max ; k++ ) { cntNPt++; Node cn = (Node)timeMap[maxIndexL].get(k); if( cn.NPbalance(timeMap, max, false) ) { reduced = true; cntNP++; break; } cn = (Node)timeMap[maxIndexR].get(k); if( cn.NPbalance(timeMap, max, false) ) { reduced = true; cntNP++; break; } } if( reduced ) continue; // propagated balancing // redistribute to left direction sortTemp = new LinkedList(); for(int k = 0 ; k < max ;k++) { // among tasks in the highest time slot Node cn = (Node)timeMap[maxIndexL].get(k); if( (cn.tempST-cn.olb) >= cn.evalWeight() ) { //check whether the tasks can be moved or not boolean mid = false; for(int ii = 0 ; ii < sortTemp.size(); ii++ ) { //sorting movable tasks with the number ancestor tasks and ET Node tn = (Node)sortTemp.get(ii); if( tn.getAncET() > cn.getAncET() ) { sortTemp.add(ii, cn); mid = true; break; } else if ( tn.getAncET() < cn.getAncET() ) { continue; } else if( tn.evalWeight() > cn.evalWeight() ) { //tie break sortTemp.add(ii, cn); mid = true; } } if( !mid ) sortTemp.add(cn); } } boolean succ = false; for(int k = 0 ; k < sortTemp.size() ; k++ ) { Node cn = (Node)sortTemp.get(k); succ = cn.moveLeft(timeMap, max, maxIndexL); if( succ ) break; } if( succ ) continue; // redistribute to right direction sortTemp = new LinkedList(); for(int k = 0 ; k < max ;k++) { // among tasks in the highest time slot Node cn = (Node)timeMap[maxIndexR].get(k); if( (cn.orb - cn.tempFT) >= cn.evalWeight() ) { //check whether the tasks can be moved or not boolean mid = false; for(int ii = 0 ; ii < sortTemp.size(); ii++ ) { //sorting movable tasks with the number descendant tasks and ET Node tn = (Node)sortTemp.get(ii); if( tn.getDesET() > cn.getDesET() ) { sortTemp.add(ii, cn); mid = true; break; } else if ( tn.getDesET() < cn.getDesET() ) { continue; } else if( tn.evalWeight() > cn.evalWeight() ) { //tie break sortTemp.add(ii, cn); mid = true; } } if( !mid ) sortTemp.add(cn); } } for(int k = 0 ; k < sortTemp.size() ; k++ ) { Node cn = (Node)sortTemp.get(k); succ = cn.moveRight(timeMap, max, maxIndexR+1); if( succ ) break; } if( !succ ) break; } // System.out.println("cntWhile: "+cntWhile+" cntNP: "+cntNP+" cntNPt:"+cntNPt+" cntP: "+cntP+" cntPt:"+cntPt+" max:"+max); return max; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provisioner/Node.java0000644000175000017500000004251411757531137025407 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.provisioner; import java.util.*; /** * An instance of this class represents an independent task of a workflow. * A task has PET(predicted execution time), list of input data * sources(parent tasks), and list of output data(child tasks). This class * also used for BTS algorithm such as EST, LFT, schdeuled start/finish time * and core methods for BTS algorithm used by OccupationDiagram class. * * @author Eunkyu Byun * */ public class Node { public static final long DEFAULT_WEIGHT = 1; private LinkedList inEdges; private LinkedList outEdges; private Edge critEdge; private String id; private String taskName; private long weight; private long upLen; private long downLen; private HashSet ancestors; private HashSet descendants; public void init() { this.st = 0; this.ancET = -1; this.desET = -1; } /** * constructor * @param id the unique ID of this task * @param name task name(executable name) * @param w predicted execution time */ public Node(String id, String name, long w) { this(id, name, -1, w); } public Node(String id, String name, int n, long w) { this.upLen = -1; this.downLen = -1; this.id = id; this.taskName = name; this.weight = w; inEdges = new LinkedList(); outEdges = new LinkedList(); ancestors = new HashSet(); descendants = new HashSet(); init(); } public Node(String id) { this(id, "NullTask", DEFAULT_WEIGHT); } /** * set execution time of this task * @param w excution time */ public void setWeight(long w) { this.weight = w; } /** * return execution time of this task * @return excution time */ public long evalWeight() { return weight; } /** * add a data dependency edge to this task * @param e the edge to be added */ public void addIn(Edge e) { if( !inEdges.contains(e) ) { inEdges.add(e); } } /** * remove a specified data dependency edge from this task * @param e the edge to be removed */ public void removeIn(Edge e) { // System.out.println("in "+id+" edge "+e.getID() + " is removedIn"); inEdges.remove(e); } /** * add a data dependency edge from this task * @param e the edge to be added */ public void addOut(Edge e) { if( !outEdges.contains(e) ) { outEdges.add(e); } } /** * remove a specified data dependency edge from this task * @param e the edge to be removed */ public void removeOut(Edge e) { // System.out.println("in "+id+" edge "+e.getID() + " is removedOut"); outEdges.remove(e); } /** * check whether all parent tasks are finished. * @return true if all parent tasks are finished. */ public boolean checkIn() { boolean ret = true; Iterator iter = inEdges.iterator(); while(iter.hasNext()) { Edge next =(Edge)iter.next(); ret &= next.complete; st = Math.max(st, next.compTime + next.getCost() ); } return ret; } /** * return the list of incoming edges. * @return LinkedList of edges. */ public LinkedList getIn() { return inEdges; } /** * return the list of outgoing edges. * @return LinkedList of edges. */ public LinkedList getOut() { return outEdges; } /** notify child nodes that this task will finish at time et * @param et finish time */ public void initOut(boolean s,long et) { Iterator iter = outEdges.iterator(); while(iter.hasNext()) { Edge next =(Edge)iter.next(); next.complete = s; if(s) { next.compTime = Math.max(next.compTime,et); } else { next.compTime = et; } } } public long st = 0; //scheduled time, used for HEFT algorithm // Calculate UpLength private void updateUpLen() { if( isTop() ) { this.upLen = 0; return; } long maxUpLen = -1; Iterator parents = inEdges.iterator(); while( parents.hasNext() ) { Edge next = (Edge)parents.next(); Node cur = next.getFrom(); long curUpLen = cur.getUpLen() + cur.evalWeight() + next.getCost(); if( maxUpLen < curUpLen ) { maxUpLen = curUpLen; critEdge = next; } } this.upLen = maxUpLen; } /** * return the UpLength of this task. UpLength is the longest path from the entry task * @return UpLength */ public long getUpLen() { if( this.upLen < 0 ) { updateUpLen(); } return this.upLen; } private void updateDownLen() { if( isBottom() ) { downLen = 0; return; } Iterator iter = outEdges.iterator(); while( iter.hasNext() ) { Edge ce = (Edge)iter.next(); Node cn = ce.getTo(); downLen = Math.max( downLen, cn.getDownLen() + cn.evalWeight() + ce.getCost()); } } /** * return the DnLength of this task. DnLength is the longest path to the exit task * @return DnLength */ public long getDownLen() { if( downLen == -1 ) updateDownLen(); return this.downLen; } /** * Check whether this task is the entry task or not. * @return true or false */ public boolean isTop() { return (inEdges.size() == 0); } /** * Check whether this task is the exit task or not. * @return true or false */ public boolean isBottom() { return (outEdges.size() == 0); } public String getID() { return id; } /** * update EST(earliest start time) of this task. * @param nlb new EST * @param set set of ancestor tasks */ public void updateLeftBound(long nlb, /*LinkedList*/TreeSet set) { // System.out.print("updateLeftBound in "+id+" from "+lb); if( nlb > lb ) { // System.out.print(" to "+nlb+ "\r\n"); boolean contain = false; if( set != null ) contain = set.remove(this); this.lb = nlb; if(contain) { // System.out.println(id+ " is removed in updateLeftBound"); boolean test = set.add(this); // System.out.println(test); } for(int i = 0 ; i < outEdges.size() ; i++) { Edge ce = (Edge)outEdges.get(i); Node n = ce.getTo(); n.updateLeftBound(nlb + weight + ce.getCost(), set); } } //else System.out.print("\r\n"); } /** * update LFT(latest finish time) of this task. * @param nrb new LFT * @param set set of descendant tasks */ public void updateRightBound(long nrb, /*LinkedList*/TreeSet set) { // System.out.print("updateRightBound in "+id+" from "+rb); if( nrb < rb ) { // System.out.print(" to "+nrb+"\r\n"); boolean contain = false; if( set != null ) contain = set.remove(this); this.rb = nrb; if(contain) { // System.out.println(id+ " is removed in updateRightBound"); boolean test = set.add(this); // System.out.println(test); } for(int i = 0 ; i < inEdges.size() ; i++) { Edge ce = (Edge)inEdges.get(i); Node n = ce.getFrom(); n.updateRightBound(nrb - weight - ce.getCost() , set); } } //else System.out.print("\r\n"); } public boolean stacked = false; //bit for stacked in Occupation diagram public long lb; //left bound (EST) public long rb; //right bound (LFT) public long olb; //original left bound public long orb; //original right bound public long tempST; //scheduled start time public long tempFT; //scheduled finish time /** * Non-propagate redistribution of this task * @param timeMap List of scheduled tasks at each time slot. i.e., Occupation diagram itself * @param limit maximum height of Occupation diagram. The result of redistribution should cause taller height of this limit. * @param force Not used. * @return true if this task is redistributed. */ public boolean NPbalance(LinkedList[] timeMap, int limit, boolean force) { if( weight == 0 ) return false; if( !force && (tempST - lb) < weight && (rb - tempFT) < weight ) return false; int min = limit; int cursor = (int)lb; boolean rightBias = (getDesSize()= tempST && j < tempFT) ? 1 : 0; localMax = Math.max(localMax, timeMap[j].size() - overlap); } if( min > localMax ) { min = localMax; cursor = i; } if( min == localMax && rightBias) { cursor = i; } } if( (min+1) >= limit && !force ) return false; // System.out.println("NPbalance ID:"+id+" limit:"+limit+" from: "+ tempST+" to: " + cursor); for(int i = (int)tempST ; i < tempFT ; i++) timeMap[i].remove(this); tempST = cursor; tempFT = cursor+weight; for(int i = (int)tempST ; i < tempFT ; i++) timeMap[i].add(this); for(int i = 0; i < outEdges.size() ;i++) { Edge ce = (Edge)outEdges.get(i); Node n = ce.getTo(); n.updateLeftBound(tempFT+ce.getCost(),null); } for(int i = 0; i < inEdges.size() ;i++) { Edge ce = (Edge)inEdges.get(i); Node n = ce.getFrom(); n.updateRightBound(tempST-ce.getCost(),null); } return (min= olb ; i-- ) { int localMax = 0; for(int j = i ; j < (i+weight); j++) { int overlap = (j >= tempST && j < tempFT) ? 1 : 0; localMax = Math.max(localMax, timeMap[j].size() - overlap); } if( limit > (localMax + 1 - pushedParents(i) ) ) { cursor = i; found = true; break; } } // System.out.println("moveLeft ID:"+id+" limit:"+limit + " bound:"+ bound +" found:" + found+ " " + cursor); if( !found ) return false; //not possible to move // push parent nodes for(int i = 0; i < inEdges.size() && found ;i++) { Edge ce = (Edge)inEdges.get(i); Node n = ce.getFrom(); if( cursor >= lb ) { n.updateRightBound(cursor - ce.getCost(), null); } else { found &= n.moveLeft( timeMap, limit, cursor - ce.getCost() ); } } if( !found ) return false; //child block!! // re-schedule lb = Math.min(lb, cursor); for(int i = (int)tempST; i < tempFT ; i++) timeMap[i].remove(this); tempST = cursor; tempFT = cursor + weight; for(int i = (int)tempST; i < tempFT ; i++) timeMap[i].add(this); //release leftbound of child nodes for(int i = 0; i < outEdges.size() ;i++) { Edge ce = (Edge)outEdges.get(i); Node n = ce.getTo(); n.updateLeftBound(tempFT+ce.getCost(),null); } return true; } /** * Redistribute this task to the later time. This causes descensant tasks to be redistributed too. * @param timeMap List of scheduled tasks at each time slot. i.e., Occupation diagram itself * @param limit maximum height of Occupation diagram. The result of redistribution should cause taller height of this limit. * @param bound Lastest time this tasks can be scheduled. * @return true if this task is redistributed. */ public boolean moveRight(LinkedList[] timeMap, int limit, long bound) { boolean found = false; int cursor = (int)bound; for(int i = (int)bound ; i <= (orb-weight) ; i++ ) { int localMax = 0; for(int j = i ; j < (i+weight); j++) { int overlap = (j >= tempST && j < tempFT) ? 1 : 0; localMax = Math.max(localMax, timeMap[j].size() - overlap); } if( limit > (localMax + 1 - pushedChildren(i + weight)) ) { cursor = i; found = true; break; } } // System.out.println("moveRight ID:"+id+" limit:"+limit + " bound:"+ bound +" found:" + found+ " " + cursor); if( !found ) return false; //not possible to move // push child nodes for(int i = 0; i < outEdges.size() && found ;i++) { Edge ce = (Edge)outEdges.get(i); Node n = ce.getTo(); if( cursor+weight <= rb ) { n.updateLeftBound(cursor + weight + ce.getCost(), null); } else { found &= n.moveRight( timeMap, limit, cursor + weight + ce.getCost() ); } } if( !found ) return false; //child block!! // re-schedule rb = Math.max(rb, cursor); for(int i = (int)tempST; i < tempFT ; i++) timeMap[i].remove(this); tempST = cursor; tempFT = cursor + weight; for(int i = (int)tempST; i < tempFT ; i++) timeMap[i].add(this); //release leftbound of child nodes for(int i = 0; i < inEdges.size() ;i++) { Edge ce = (Edge)inEdges.get(i); Node n = ce.getFrom(); n.updateRightBound(tempST - ce.getCost(),null); } return true; } private int pushedParents(long timeLimit) { int result = 0; for(int i = 0 ; i < inEdges.size(); i++) { Edge ce = (Edge)inEdges.get(i); Node cn = ce.getFrom(); if( timeLimit < cn.tempFT ) { result++; } } return result; } private int pushedChildren(long timeLimit) { int result = 0; for(int i = 0 ; i < outEdges.size(); i++) { Edge ce = (Edge)outEdges.get(i); Node cn = ce.getTo(); if( timeLimit > cn.tempST ) { result++; } } return result; } /** * check all child task's start time is scheduled after the finishtime of this task */ public void checkIntegrity() { for(int i = 0 ; i < outEdges.size(); i++ ) { Edge ce = (Edge)outEdges.get(i); Node cn = ce.getTo(); if( (tempFT + ce.getCost()) > cn.tempST ) { System.out.println("Violation! from:"+id+"("+tempFT+") to "+cn.getID()+"("+cn.tempST+")"); } cn.checkIntegrity(); } } /** * build the list of descendant tasks * @return the list of descensant tasks */ public Set buildDescendants() { if( !isBottom() && descendants.size() == 0 ) { Iterator iter = outEdges.iterator(); while( iter.hasNext() ) { Edge ce = (Edge)iter.next(); Node cn = ce.getTo(); if( !cn.isBottom() ) { descendants.add(cn); descendants.addAll(cn.buildDescendants()); } } } return descendants; } /** * build the list of ancestor tasks * @return the list of ancestor tasks */ public Set buildAncestors() { if( !isTop() && ancestors.size() == 0 ) { Iterator iter = inEdges.iterator(); while( iter.hasNext() ) { Edge ce = (Edge)iter.next(); Node cn = ce.getFrom(); if( !cn.isTop() ) { ancestors.add(cn); ancestors.addAll(cn.buildAncestors()); } } } return ancestors; } public int getDepSize() { return descendants.size() + ancestors.size(); } public int getAncSize() { return ancestors.size(); } public int getDesSize() { return descendants.size(); } private long ancET; private long desET; /** * @return the sum of execution time of all ancestor tasks */ public long getAncET() { if( ancET < 0 ) { ancET = 0; Iterator iter = ancestors.iterator(); while( iter.hasNext() ) { Node cn = (Node)iter.next(); ancET += cn.evalWeight(); } } return ancET; } /** * @return the sum of execution time of all descendant tasks */ public long getDesET() { if( desET < 0 ) { desET = 0; Iterator iter = descendants.iterator(); while( iter.hasNext() ) { Node cn = (Node)iter.next(); desET += cn.evalWeight(); } } return desET; } /** * @return the sum of execution time of all dependent tasks */ public long getDepET() { return getAncET() + getDesET(); } public boolean isAnc(Node n) { return ancestors.contains(n); } public boolean isDes(Node n) { return descendants.contains(n); } public void print() { String inList = ""; Iterator iter = inEdges.iterator(); while(iter.hasNext() ) { inList += ((Edge)iter.next()).getID() + ","; } String outList = ""; iter = outEdges.iterator(); while(iter.hasNext() ) { outList += ((Edge)iter.next()).getID() + ","; } // System.out.println("Node_"+id+"("+ taskName +","+ weight +"sec) IN:"+inList+", OUT:"+outList); System.out.println("Node_"+id+"("+ taskName +","+ weight +") upLen:"+upLen+", downLen:" + downLen ); } public int cluster = -1; public long tlevel = 0; public boolean examined = false; /** * used for DSC algorithm */ public boolean isFree() { if( examined ) return false; boolean result = true; for(int i = 0 ; i < inEdges.size() ; i++ ) { Edge ce = (Edge)inEdges.get(i); Node cn = ce.getFrom(); if( !cn.examined ) { result = false; break; } } return result; } public boolean equals(Object e) { // System.out.println("comparison is called this"); if( e instanceof Node ) { Node cn = (Node)e; boolean same = cn.getID().equals(id); // System.out.println(id+ " " + cn.getID() + " " + same); return same; } else return false; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/0000755000175000017500000000000011757531667023122 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/0000755000175000017500000000000011757531667024066 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/Abstract.java0000644000175000017500000000542011757531137026465 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.partitioner.graph.Adapter; import edu.isi.pegasus.planner.selector.SiteSelector; import edu.isi.pegasus.planner.catalog.transformation.Mapper; import java.util.List; /** * The Abstract Site selector. * * @author Karan Vahi * @author Jens-S. Vöckler * @author Gaurang Mehta * * * @version $Revision: 2576 $ */ public abstract class Abstract implements SiteSelector { /** * The properties passed to Pegasus at runtime. */ protected PegasusProperties mProps; /** * The handle to the logger. */ protected LogManager mLogger; /** * The handle to the site catalog. */ // protected PoolInfoProvider mSCHandle; protected SiteStore mSiteStore; /** * The handle to the TCMapper object. */ protected Mapper mTCMapper; /** * The bag of Pegasus objects. */ protected PegasusBag mBag; /** * Initializes the site selector. * * @param bag the bag of objects that is useful for initialization. * */ public void initialize( PegasusBag bag ){ mBag = bag; mProps = ( PegasusProperties )bag.get( PegasusBag.PEGASUS_PROPERTIES ); mLogger = ( LogManager )bag.get( PegasusBag.PEGASUS_LOGMANAGER ); mSiteStore = bag.getHandleToSiteStore(); mTCMapper = ( Mapper )bag.get( PegasusBag.TRANSFORMATION_MAPPER ); } /** * Maps the jobs in the workflow to the various grid sites. * The jobs are mapped by setting the site handle for the jobs. * * @param workflow the workflow. * * @param sites the list of String objects representing the * execution sites that can be used. */ public void mapWorkflow( ADag workflow, List sites ){ mapWorkflow( Adapter.convert( workflow ), sites ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/heft/0000755000175000017500000000000011757531667025014 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/heft/Site.java0000644000175000017500000001035311757531137026555 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site.heft; import java.util.List; import java.util.LinkedList; import java.util.ListIterator; /** * A data class that models a site as a collection of processors. * The number of processors can only be specified in the constructor. * * @author Karan Vahi * @version $Revision: 2050 $ */ public class Site { /** * The number of processors making up a site. */ private int mNumProcessors; /** * A list of processors making up the site. */ private List mProcessors; /** * The index to the processor that is to be used for scheduling a job. */ private int mCurrentProcessorIndex; /** * The logical name assigned to the site. */ private String mName; /** * The default constructor. * * @param name the name to be assigned to the site. */ public Site( String name ) { mName = name; mNumProcessors = 0; mProcessors = new LinkedList(); mCurrentProcessorIndex = 0; } /** * The overloaded constructor. * * @param name the name to be assigned to the site. * @param num the number of processors. */ public Site( String name, int num ){ mName = name; mNumProcessors = num; mCurrentProcessorIndex = -1; mProcessors = new LinkedList( ); } /** * Returns the earliest time the site is available for scheduling * a job. It is non insertion based scheduling policy. * * @param start the time at which to start the search. * * @return long */ public long getAvailableTime( long start ){ int num = 0; //each processor is checked for start of list long result = Long.MAX_VALUE; long current; ListIterator it; for( it = mProcessors.listIterator( ); it.hasNext(); num++ ){ Processor p = ( Processor ) it.next(); current = p.getAvailableTime( start ); if( current < result ){ //tentatively schedule a job on the processor result = current; mCurrentProcessorIndex = num; } } if( result > start && num < mNumProcessors ){ //tentatively schedule a job to an unused processor as yet. result = start; mCurrentProcessorIndex = num++; //if using a normal iterator //could use addLast() method it.add( new Processor () ); } //sanity check if( result == Long.MAX_VALUE ){ throw new RuntimeException( "Unable to scheduled to site" ); } return result; } /** * Schedules a job to the site. * * @param start the start time of the job. * @param end the end time for the job */ public void scheduleJob( long start, long end ){ //sanity check if( mCurrentProcessorIndex == -1 ){ throw new RuntimeException( "Invalid State. The job needs to be tentatively scheduled first!" ); } Processor p = ( Processor )mProcessors.get( mCurrentProcessorIndex ); p.scheduleJob( start, end ); //reset the index mCurrentProcessorIndex = -1; } /** * Returns the name of the site. * * @return name of the site. */ public String getName(){ return mName; } /** * Returns the number of available processors. * * @return number of available processors. */ public int getAvailableProcessors( ){ return this.mNumProcessors; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/heft/Algorithm.java0000644000175000017500000006516411757531137027611 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site.heft; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.partitioner.graph.Adapter; import edu.isi.pegasus.planner.partitioner.graph.Bag; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.Mapper; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import java.util.List; import java.util.Map; import java.util.HashMap; import java.util.LinkedList; import java.util.Iterator; import java.util.Comparator; import java.util.Collections; import edu.isi.pegasus.planner.classes.Profile; /** * The HEFT based site selector. The runtime for the job in seconds is picked * from the pegasus profile key runtime in the transformation catalog for a * transformation. * * The data communication costs between jobs if scheduled on different sites * is assumed to be fixed. Later on if required, the ability to specify this * value will be exposed via properties. * * The number of processors in a site is picked by the attribute idle-nodes * associated with the vanilla jobmanager for a site in the site catalog. * * There are two important differences with the algorithm cited in the * HEFT paper. *
 *    - Our implementation uses downward ranks instead of the upward ranks as
 *      mentioned in the paper. The formulas have been updated accordingly.
 *
 *    - During the processor selection phase, we do the simple selection and
 *      not follow the insertion based approach.
 * 
* * @author Karan Vahi * @version $Revision: 4402 $ * * @see #AVERAGE_BANDWIDTH * @see #RUNTIME_PROFILE_KEY * @see #DEFAULT_NUMBER_OF_FREE_NODES * @see #AVERAGE_DATA_SIZE_BETWEEN_JOBS * @see org.griphyn.cPlanner.classes.JobManager#IDLE_NODES * */ public class Algorithm { /** * The pegasus profile key that gives us the expected runtime. */ public static final String RUNTIME_PROFILE_KEY = Pegasus.RUNTIME_KEY; /** * The property that designates which Process catalog impl to pick up. */ public static final String PROCESS_CATALOG_IMPL_PROPERTY = "pegasus.catalog.transformation.windward"; /** * The average bandwidth between the sites. In mega bytes/per second. */ public static final float AVERAGE_BANDWIDTH = 5; /** * The average data that is transferred in between 2 jobs in the workflow. * In megabytes. */ public static final float AVERAGE_DATA_SIZE_BETWEEN_JOBS = 2; /** * The default number of nodes that are associated with a site if not found * in the site catalog. */ public static final int DEFAULT_NUMBER_OF_FREE_NODES = 10; /** * The maximum finish time possible for a job. */ public static final long MAXIMUM_FINISH_TIME = Long.MAX_VALUE; /** * The average communication cost between nodes. */ private float mAverageCommunicationCost; /** * The workflow in the graph format, that needs to be scheduled. */ private Graph mWorkflow; /** * Handle to the site catalog. */ // private PoolInfoProvider mSiteHandle; private SiteStore mSiteStore; /** * The list of sites where the workflow can run. */ private List mSites; /** * Map containing the number of free nodes for each site. The key is the site * name, and value is a Site object. */ private Map mSiteMap; /** * Handle to the TCMapper. */ protected Mapper mTCMapper; /** * The handle to the LogManager */ private LogManager mLogger; /** * The handle to the properties. */ private PegasusProperties mProps; //TANGRAM related variables /** * The request id associated with the DAX. */ private String mRequestID; /** * The label of the workflow. */ private String mLabel; /** * The handle to the transformation catalog. */ private TransformationCatalog mTCHandle; /** * The default constructor. * * @param bag the bag of Pegasus related objects. */ public Algorithm( PegasusBag bag ) { mProps = ( PegasusProperties ) bag.get( PegasusBag.PEGASUS_PROPERTIES ); mRequestID = mProps.getWingsRequestID(); mTCHandle = ( TransformationCatalog )bag.get( PegasusBag.TRANSFORMATION_CATALOG ); mTCMapper = ( Mapper )bag.get( PegasusBag.TRANSFORMATION_MAPPER ); mLogger = ( LogManager )bag.get( PegasusBag.PEGASUS_LOGMANAGER ); // mSiteHandle = ( PoolInfoProvider )bag.get( PegasusBag.SITE_CATALOG ); mSiteStore = bag.getHandleToSiteStore(); mAverageCommunicationCost = (this.AVERAGE_BANDWIDTH / this.AVERAGE_DATA_SIZE_BETWEEN_JOBS); } /** * Schedules the workflow using the heft. * * @param dag the ADag object containing the abstract workflow * that needs to be mapped. * @param sites the list of candidate sites where the workflow can potentially * execute. */ public void schedule( ADag dag , List sites ){ //metadata about the DAG needs to go to Graph object //mLabel = dag.getLabel(); //convert the dag into a graph representation schedule( Adapter.convert( dag ), sites, dag.getLabel() ); } /** * Schedules the workflow according to the HEFT algorithm. * * @param workflow the workflow that has to be scheduled. * @param sites the list of candidate sites where the workflow can potentially * execute. * @param label the label of the workflow * */ public void schedule( Graph workflow , List sites, String label ){ mLabel = label; mWorkflow = workflow; populateSiteMap( sites ); //compute weighted execution times for each job for( Iterator it = workflow.nodeIterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); Job job = (Job)node.getContent(); //add the heft bag to a node Float averageComputeTime = new Float( calculateAverageComputeTime( job ) ); HeftBag b = new HeftBag(); b.add( HeftBag.AVG_COMPUTE_TIME, averageComputeTime ); node.setBag( b ); mLogger.log( "Average Compute Time " + node.getID() + " is " + averageComputeTime, LogManager.DEBUG_MESSAGE_LEVEL ); } //add a dummy root Bag bag; GraphNode dummyRoot = new GraphNode( "dummy", "dummy" ); workflow.addRoot( dummyRoot ); bag = new HeftBag(); //downward rank for the root is set to 0 bag.add( HeftBag.DOWNWARD_RANK, new Float( 0 ) ); dummyRoot.setBag( bag ); //do a breadth first traversal and compute the downward ranks Iterator it = workflow.iterator(); dummyRoot = ( GraphNode )it.next(); //we have the dummy root Float drank; //the dummy root has a downward rank of 0 dummyRoot.getBag().add( HeftBag.DOWNWARD_RANK, new Float( 0 ) ); //stores the nodes in sorted ascending order List sortedNodes = new LinkedList(); while ( it.hasNext() ){ GraphNode node = ( GraphNode ) it.next(); drank = new Float( computeDownwardRank( node ) ); bag = node.getBag(); bag.add( HeftBag.DOWNWARD_RANK , drank ); sortedNodes.add( node ); mLogger.log( "Downward rank for node " + node.getID() + " is " + drank, LogManager.DEBUG_MESSAGE_LEVEL ); } //sort the node Collections.sort( sortedNodes, new HeftGraphNodeComparator() ); //the start time and end time for the dummy root is 0 dummyRoot.getBag().add( HeftBag.ACTUAL_START_TIME, new Long( 0 ) ); dummyRoot.getBag().add( HeftBag.ACTUAL_FINISH_TIME, new Long( 0 ) ); //schedule out the sorted order of the nodes for( it = sortedNodes.iterator(); it.hasNext(); ){ GraphNode current = (GraphNode) it.next(); bag = current.getBag(); mLogger.log("Scheduling node " + current.getID(), LogManager.DEBUG_MESSAGE_LEVEL); //figure out the sites where a job can run Job job = (Job) current.getContent(); List runnableSites = mTCMapper.getSiteList( job.getTXNamespace(), job.getTXName(), job.getTXVersion(), mSites); //for each runnable site get the estimated finish time //and schedule job on site that minimizes the finish time String site; long est_result[ ]; long result[] = new long[ 2 ]; result [ 1 ] = this.MAXIMUM_FINISH_TIME; for( Iterator rit = runnableSites.iterator(); rit.hasNext(); ){ site = (String) rit.next(); est_result = calculateEstimatedStartAndFinishTime( current, site ); //if existing EFT is greater than the returned EFT //set existing EFT to the returned EFT if( result[ 1 ] > est_result[ 1 ] ){ result[ 0 ] = est_result[ 0 ]; result[ 1 ] = est_result[ 1 ]; //tentatively schedule the job for that site bag.add( HeftBag.SCHEDULED_SITE , site ); } } //update the site selected with the job bag.add( HeftBag.ACTUAL_START_TIME, new Long( result[ 0 ] )); bag.add( HeftBag.ACTUAL_FINISH_TIME, new Long( result[ 1 ] ) ); site = (String)bag.get( HeftBag.SCHEDULED_SITE ); scheduleJob( site, result[ 0 ], result[ 1 ] ); //log the information StringBuffer sb = new StringBuffer(); sb.append( "Scheduled job " ).append( current.getID() ). append( " to site " ).append( site ). append( " with from ").append( result[ 0 ] ). append( " till " ).append( result[ 1 ] ); mLogger.log( sb.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); }//end of going through all the sorted nodes //remove the dummy root mWorkflow.remove( dummyRoot.getID() ); } /** * Returns the makespan of the scheduled workflow. It is maximum of the * actual finish times for the leaves of the scheduled workflow. * * @return long the makespan of the workflow. */ public long getMakespan( ){ long result = -1; //compute the maximum of the actual end times of leaves for( Iterator it = mWorkflow.getLeaves().iterator(); it.hasNext() ; ){ GraphNode node = ( GraphNode )it.next(); Long endTime = ( Long ) node.getBag().get( HeftBag.ACTUAL_FINISH_TIME ); //sanity check if( endTime == null ){ throw new RuntimeException( "Looks like the leave node is unscheduled " + node.getID()); } if( endTime > result ){ result = endTime; } } return result; } /** * Estimates the start and finish time of a job on a site. * * @param node the node that is being scheduled * @param site the site for which the finish time is reqd. * * @return long[0] the estimated start time. * long[1] the estimated finish time. */ protected long[] calculateEstimatedStartAndFinishTime( GraphNode node, String site ){ Job job = ( Job )node.getContent(); long[] result = new long[2]; //calculate the ready time for the job //that is time by which all the data needed //by the job has reached the site. long readyTime = 0; for( Iterator it = node.getParents().iterator(); it.hasNext(); ){ GraphNode parent = ( GraphNode )it.next(); long current = 0; //add the parent finish time to current current += (Long)parent.getBag().get( HeftBag.ACTUAL_FINISH_TIME ); //if the parent was scheduled on another site //add the average data transfer time. if( !parent.getBag().get( HeftBag.SCHEDULED_SITE ).equals( site ) ){ current += this.mAverageCommunicationCost; } if ( current > readyTime ){ //ready time is maximum of all currents readyTime = current; } } //the estimated start time is the maximum //of the ready time and available time of the site //using non insertion based policy for time being result[ 0 ] = getAvailableTime( site , readyTime ); // do not need it, as available time is always >= ready time // if ( result[ 0 ] < readyTime ){ // result[ 0 ] = readyTime; // } //the estimated finish time is est + compute time on site List entries = mTCMapper.getTCList( job.getTXNamespace(), job.getTXName(), job.getTXVersion(), site ); //pick the first one for time being TransformationCatalogEntry entry = ( TransformationCatalogEntry ) entries.get( 0 ); result[ 1 ] = result[ 0 ] + getExpectedRuntime( job, entry ); //est now stores the estimated finish time return result; } /** * Computes the downward rank of a node. * * The downward rank of node i is * _ ___ * max { rank( n ) + w + c } * j E pred( i ) d j j ji * * * * @param node the GraphNode whose rank needs to be computed. * * @return computed rank. */ protected float computeDownwardRank( GraphNode node ){ float result = 0; //value needs to be computed for each parent separately //float value = 0; for( Iterator it = node.getParents().iterator(); it.hasNext(); ){ GraphNode p = (GraphNode)it.next(); Bag pbag = p.getBag(); float value = 0; value += ( getFloatValue ( pbag.get( HeftBag.DOWNWARD_RANK ) )+ getFloatValue ( pbag.get( HeftBag.AVG_COMPUTE_TIME ) ) + mAverageCommunicationCost ); if( value > result ){ result = value; } } return result; } /** * Returns the average compute time in seconds for a job. * * @param job the job whose average compute time is to be computed. * * @return the weighted compute time in seconds. */ protected float calculateAverageComputeTime( Job job ){ //get all the TC entries for the sites where a job can run List runnableSites = mTCMapper.getSiteList( job.getTXNamespace(), job.getTXName(), job.getTXVersion(), mSites ); //sanity check if( runnableSites == null || runnableSites.isEmpty() ){ throw new RuntimeException( "No runnable site for job " + job.getName() ); } mLogger.log( "Runnables sites for job " + job.getName() + " " + runnableSites , LogManager.DEBUG_MESSAGE_LEVEL ); //for each runnable site get the expected runtime String site; int total_nodes = 0; int total = 0; for( Iterator it = runnableSites.iterator(); it.hasNext(); ){ site = ( String ) it.next(); int nodes = getFreeNodesForSite( site ); List entries = mTCMapper.getTCList( job.getTXNamespace(), job.getTXName(), job.getTXVersion(), site ); //pick the first one for time being TransformationCatalogEntry entry = ( TransformationCatalogEntry ) entries.get( 0 ); int jobRuntime = getExpectedRuntime( job, entry ); total_nodes += nodes; total += jobRuntime * nodes; } return total/total_nodes; } /** * Return expected runtime. * * @param job the job in the workflow. * @param entry the TransformationCatalogEntry object. * * @return the runtime in seconds. */ protected int getExpectedRuntime( Job job, TransformationCatalogEntry entry ){ int result = -1; //try and fetch the expected runtime from the Windward AC double pcresult = getExpectedRuntimeFromAC( job , entry ); if( pcresult == 0.0 ){ mLogger.log( "PC returned a value of 0 for job" + job.getID(), LogManager.WARNING_MESSAGE_LEVEL ); result = 1; } else if( pcresult > 0.0 && pcresult < 1.0 ){ mLogger.log( "PC returned a value between 0 and 1" + pcresult + " for job " + job.getID(), LogManager.WARNING_MESSAGE_LEVEL ); result = 1; } else{ result = (int)pcresult; } // if(result == 0){ // mLogger.log("PC returned 0 as runtime. Returning 1", LogManager.ERROR_MESSAGE_LEVEL); // return result=1; // } if( result >= 1 ){ return result; } //else try and get the runtime from the profiles List profiles = entry.getProfiles( Profile.VDS ); mLogger.log( "Fetching runtime information from profiles for job " + job.getName(), LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Profiles are " + profiles, LogManager.DEBUG_MESSAGE_LEVEL); if( profiles != null ){ for (Iterator it = profiles.iterator(); it.hasNext(); ) { Profile p = (Profile) it.next(); if (p.getProfileKey().equals(this.RUNTIME_PROFILE_KEY)) { result = Integer.parseInt(p.getProfileValue()); break; } } } //if no information . try from profiles in dax if( result < 1 ){ mLogger.log( "Fetching runtime information from profiles for job " + job.getName(), LogManager.DEBUG_MESSAGE_LEVEL ); for (Iterator it = job.vdsNS.getProfileKeyIterator(); it.hasNext(); ) { String key = (String) it.next(); if ( key.equals(this.RUNTIME_PROFILE_KEY)) { result = Integer.parseInt( job.vdsNS.getStringValue( key ) ); break; } } } //sanity check for time being if( result < 1 ){ throw new RuntimeException( "Invalid or no runtime specified for job " + job.getID() ); } return result; } /** * Return expected runtime from the AC only if the process catalog is * initialized. Since Pegasus 3.0 release it always returns -1. * * @param job the job in the workflow. * @param entry the TC entry * * @return the runtime in seconds. */ protected double getExpectedRuntimeFromAC( Job job, TransformationCatalogEntry entry ){ double result = -1; return result; } /** * Populates the number of free nodes for each site, by querying the * Site Catalog. * * @param sites list of sites. */ @SuppressWarnings({"unchecked", "unchecked"}) protected void populateSiteMap( List sites ){ mSiteMap = new HashMap(); //for testing purposes mSites = sites; String value = null; int nodes = 0; for( Iterator it = mSites.iterator(); it.hasNext(); ){ // SiteInfo s = mSiteHandle.getPoolEntry( site, "vanilla" ); // JobManager manager = s.selectJobManager( "vanilla", true ); // value = (String)manager.getInfo( JobManager.IDLE_NODES ); String site = (String)it.next(); SiteCatalogEntry eSite = mSiteStore.lookup( site ); GridGateway jobManager = eSite.selectGridGateway( GridGateway.JOB_TYPE.compute ); try { nodes = jobManager.getIdleNodes(); if( nodes == -1 ){ mLogger.log( "Picking up total nodes for site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); nodes = jobManager.getTotalNodes(); if( nodes == -1 ){ mLogger.log( "Picking up default free nodes for site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); nodes = Algorithm.DEFAULT_NUMBER_OF_FREE_NODES; } } }catch( Exception e ){ nodes = this.DEFAULT_NUMBER_OF_FREE_NODES; } mLogger.log( "Available nodes set for site " + site + " " + nodes, LogManager.DEBUG_MESSAGE_LEVEL ); mSiteMap.put( site, new Site( site, nodes ) ); } } /** * Returns the freenodes for a site. * * @param site the site identifier. * * @return number of nodes */ protected int getFreeNodesForSite( String site ){ if( mSiteMap.containsKey( site ) ){ return ( ( Site )mSiteMap.get( site )).getAvailableProcessors(); } else{ throw new RuntimeException( "The number of free nodes not available for site " + site ); } } /** * Schedules a job to a site. * * @param site the site at which to schedule * @param start the start time for job * @param end the end time of job */ protected void scheduleJob( String site, long start , long end ){ Site s = ( Site )mSiteMap.get( site ); s.scheduleJob( start, end ); } /** * Returns the available time for a site. * * @param site the site at which you want to schedule the job. * @param readyTime the time at which all the data reqd by the job will arrive at site. * * @return the available time of the site. */ protected long getAvailableTime( String site , long readyTime ){ if( mSiteMap.containsKey( site ) ){ return ( ( Site )mSiteMap.get( site )).getAvailableTime( readyTime ); } else{ throw new RuntimeException( "Site information unavailable for site " + site ); } } /** * This method returns a String describing the site selection technique * that is being implemented by the implementing class. * * @return String */ public String description() { return "Heft based Site Selector"; } /** * The call out to the site selector to determine on what pool the job * should be scheduled. * * @param job Job the Job object corresponding to * the job whose execution pool we want to determine. * @param pools the list of String objects representing the * execution pools that can be used. * @return if the pool is found to which the job can be mapped, a string * of the form executionpool:jobmanager where the * jobmanager can be null. If the pool is not found, then set * poolhandle to NONE. null - if some error occured . */ public String mapJob2ExecPool(Job job, List pools) { return ""; } /** * A convenience method to get the intValue for the object passed. * * @param key the key to be converted * * @return the floatt value if object an integer, else -1 */ private float getFloatValue( Object key ){ float k = -1; //try{ k = ( (Float) key).floatValue(); //} //catch( Exception e ){} return k; } } /** * Comparator for GraphNode objects that allow us to sort on basis of * the downward rank computed. */ class HeftGraphNodeComparator implements Comparator{ /** * Implementation of the {@link java.lang.Comparable} interface. * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer as this object is * less than, equal to, or greater than the specified object. The * definitions are compared by their type, and by their short ids. * * @param o1 is the object to be compared * @param o2 is the object to be compared with o1. * * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compare( Object o1, Object o2 ) { if ( o1 instanceof GraphNode && o2 instanceof GraphNode ) { GraphNode g1 = ( GraphNode ) o1; GraphNode g2 = ( GraphNode ) o2; float drank1 = (( Float )g1.getBag().get( HeftBag.DOWNWARD_RANK ));//.floatValue(); float drank2 = (( Float )g2.getBag().get( HeftBag.DOWNWARD_RANK ));//.floatValue(); return (int)(drank1 - drank2); } else { throw new ClassCastException( "object is not a GraphNode" ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/heft/Processor.java0000644000175000017500000000340211757531137027625 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site.heft; /** * A data class that is used to simulate a processor on a site. * * @author Karan Vahi * @version $Revision: 2050 $ */ public class Processor { /** * The start time of the current scheduled job. */ private long mStartTime; /** * The end time of the current scheduled job. */ private long mEndTime; /** * The default constructor. */ public Processor() { mStartTime = 0; mEndTime = 0; } /** * Returns the earliest time the processor is available for scheduling * a job. It is non insertion based scheduling policy. * * @param start the time at which to start the search. * * @return long */ public long getAvailableTime( long start ){ return ( mEndTime > start )? mEndTime : start; } /** * Schedules a job on to a processor. * * @param start the start time of the job. * @param end the end time for the job */ public void scheduleJob( long start, long end ){ mStartTime = start; mEndTime = end; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/heft/HeftBag.java0000644000175000017500000001437111757531137027155 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site.heft; import edu.isi.pegasus.planner.partitioner.graph.Bag; /** * A data class that implements the Bag interface and stores the extra information * that is required by the HEFT algorithm for each node. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class HeftBag implements Bag { /** * Array storing the names of the attributes that are stored with the * site. */ public static final String HEFTINFO[] = { "avg-compute-time", "downward-rank", "upward-rank", "start-time", "end-time", "scheduled-site" }; /** * The constant to be passed to the accessor functions to get or set the * average compute time for the node. */ public static final Integer AVG_COMPUTE_TIME = new Integer( 0 ); /** * The constant to be passed to the accessor functions to get or set the * downward rank for a node. */ public static final Integer DOWNWARD_RANK = new Integer( 1 ); /** * The constant to be passed to the accessor functions to get or set the * upward rank for a node. */ public static final Integer UPWARD_RANK = new Integer( 2 ); /** * The constant to be passed to the accessor functions to get or set the * actual start time for a job. */ public static final Integer ACTUAL_START_TIME = new Integer( 3 ); /** * The constant to be passed to the accessor functions to get or set the * actual end time for a job. */ public static final Integer ACTUAL_FINISH_TIME = new Integer( 4 ); /** * The site where the job is scheduled. */ public static final Integer SCHEDULED_SITE = new Integer( 5 ); /** * The average compute time for a node. */ private float mAvgComputeTime; /** * The downward rank for a node. */ private float mDownwardRank; /** * The upward rank for a node. */ private float mUpwardRank; /** * The estimated start time for a job. */ private long mStartTime; /** * The estimated end time for a job. */ private long mEndTime; /** * The site where a job is scheduled to run. */ private String mScheduledSite; /** * The default constructor. */ public HeftBag() { mAvgComputeTime = 0; mDownwardRank = 0; mUpwardRank = 0; mStartTime = 0; mEndTime = 0; mScheduledSite = ""; } /** * Adds an object to the underlying bag corresponding to a particular key. * * @param key the key with which the value has to be associated. * @param value the value to be associated with the key. * * @return boolean indicating if insertion was successful. * */ public boolean add( Object key, Object value ) { boolean result = true; int k = getIntValue( key ); float fv = 0; long lv = 0; //short cut for scheduled site if( k == this.SCHEDULED_SITE ){ mScheduledSite = (String)value; return result; } //parse the value correctly switch( k ){ case 0: case 1: case 2: fv = ( (Float) value).floatValue(); break; case 3: case 4: lv = ( (Long) value).longValue(); break; default: } switch ( k ) { case 0: this.mAvgComputeTime = fv; break; case 1: this.mDownwardRank = fv; break; case 2: this.mUpwardRank = fv; break; case 3: this.mStartTime = lv; break; case 4: this.mEndTime = lv; break; default: result = false; } return result; } /** * Returns true if the namespace contains a mapping for the specified key. * * @param key The key that you want to search for in the bag. * * @return boolean */ public boolean containsKey(Object key) { int k = -1; try{ k = ( (Integer) key).intValue(); } catch( Exception e ){} return ( k >= this.AVG_COMPUTE_TIME.intValue() && k <= this.UPWARD_RANK.intValue() ); } /** * Returns an objects corresponding to the key passed. * * @param key the key corresponding to which the objects need to be * returned. * * @return the object that is found corresponding to the key or null. */ public Object get( Object key ) { int k = getIntValue( key ); switch ( k ) { case 0: return this.mAvgComputeTime; case 1: return this.mDownwardRank; case 2: return this.mUpwardRank; case 3: return this.mStartTime; case 4: return this.mEndTime; case 5: return this.mScheduledSite; default: throw new RuntimeException( " Wrong Heft key. Please use one of the predefined key types " + key ); } } /** * A convenience method to get the intValue for the object passed. * * @param key the key to be converted * * @return the int value if object an integer, else -1 */ private int getIntValue( Object key ){ int k = -1; try{ k = ( (Integer) key).intValue(); } catch( Exception e ){} return k; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/Heft.java0000644000175000017500000001157211757531137025615 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.selector.site.heft.HeftBag; import edu.isi.pegasus.planner.selector.site.heft.Algorithm; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.util.List; import java.util.Iterator; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.partitioner.graph.Adapter; /** * The HEFT based site selector. The runtime for the job in seconds is picked * from the pegasus profile key runtime in the transformation catalog for a * transformation. * * The data communication costs between jobs if scheduled on different sites * is assumed to be fixed. Later on if required, the ability to specify this * value will be exposed via properties. * * The number of processors in a site is picked by the attribute idle-nodes * associated with the vanilla jobmanager for a site in the site catalog. * * @author Karan Vahi * @version $Revision: 2590 $ * * @see Algorithm#AVERAGE_BANDWIDTH * @see Algorithm#RUNTIME_PROFILE_KEY * @see Algorithm#DEFAULT_NUMBER_OF_FREE_NODES * @see Algorithm#AVERAGE_DATA_SIZE_BETWEEN_JOBS * @see org.griphyn.cPlanner.classes.JobManager#IDLE_NODES */ public class Heft extends Abstract { /** * An instance of the class that implements the HEFT algorithm. */ private Algorithm mHeftImpl; /** * The default constructor. */ public Heft() { super(); } /** * Initializes the site selector. * * @param bag the bag of objects that is useful for initialization. */ public void initialize( PegasusBag bag ){ super.initialize( bag ); mHeftImpl = new Algorithm( bag ); } /** * Maps the jobs in the workflow to the various grid sites. * The jobs are mapped by setting the site handle for the jobs. * * @param workflow the workflow. * * @param sites the list of String objects representing the * execution sites that can be used. */ public void mapWorkflow( ADag workflow, List sites ){ this.mapWorkflow( Adapter.convert( workflow ), sites, workflow.getLabel() ); } /** * Maps the jobs in the workflow to the various grid sites. * The jobs are mapped by setting the site handle for the jobs. * * @param workflow the workflow in a Graph form. * * @param sites the list of String objects representing the * execution sites that can be used. * */ public void mapWorkflow( Graph workflow, List sites ){ throw new UnsupportedOperationException( "Heft needs the DAX label to work" ); } /** * Maps the jobs in the workflow to the various grid sites. * The jobs are mapped by setting the site handle for the jobs. * * @param workflow the workflow in a Graph form. * * @param sites the list of String objects representing the * execution sites that can be used. * * @param label the label of the workflow */ public void mapWorkflow( Graph workflow, List sites, String label ){ //schedule the workflow, till i fix the interface mHeftImpl.schedule( workflow, sites, label ); //get the makespan of the workflow mLogger.log( "Makespan of scheduled workflow is " + mHeftImpl.getMakespan() , LogManager.DEBUG_MESSAGE_LEVEL ); //iterate through the jobs and just set the site handle //accordingly for( Iterator it = workflow.nodeIterator(); it.hasNext(); ){ GraphNode node = ( GraphNode ) it.next(); Job job = ( Job ) node.getContent(); job.setSiteHandle( (String)node.getBag().get( HeftBag.SCHEDULED_SITE ) ); } } /** * This method returns a String describing the site selection technique * that is being implemented by the implementing class. * * @return String */ public String description() { return "Heft based Site Selector"; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/SiteSelectorFactoryException.java0000644000175000017500000000654411757531137032546 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating SiteSelector implementations. * * @author Karan Vahi * @version $Revision: 2079 $ */ public class SiteSelectorFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Site Selector"; /** * Constructs a SiteSelectorFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public SiteSelectorFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a SiteSelectorFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public SiteSelectorFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a SiteSelectorFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public SiteSelectorFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a SiteSelectorFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public SiteSelectorFactoryException(String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/RoundRobin.java0000644000175000017500000001665211757531137027014 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site; import edu.isi.pegasus.planner.classes.Job; import java.util.Iterator; import java.util.List; import java.util.ListIterator; /** * This ends up scheduling the jobs in a round robin manner. In order to avoid * starvation, the jobs are scheduled in a round robin manner per level, and * the queue is initialised for each level. * * @author Karan Vahi * @version $Revision: 2590 $ */ public class RoundRobin extends AbstractPerJob { /** * The current level in the abstract workflow. It is level that is designated * by Chimera while constructing the graph bottom up. */ private int mCurrentLevel; /** * The list of pools that have been given by the user at run time or has been * authenticated against. At present these are the same as the list of pools * that is passed for site selection at each function. */ private java.util.LinkedList mExecPools; /** * The default constructor. Not to be used. */ public RoundRobin() { mCurrentLevel = -1; } /** * Returns a brief description of the site selection techinque implemented by * this class. * * @return String */ public String description() { String st = "Round Robin Scheduling per level of the workflow"; return st; } /** * Maps a job in the workflow to an execution site. * * @param job the job to be mapped. * @param sites the list of String objects representing the * execution sites that can be used. * */ public void mapJob( Job job, List sites ){ ListIterator it; NameValue current; NameValue next; if ( mExecPools == null ) { initialiseList( sites ); } if ( job.level != mCurrentLevel ) { //reinitialize stuff mCurrentLevel = job.level; it = mExecPools.listIterator(); while ( it.hasNext() ) { ( ( NameValue ) it.next() ).setValue( 0 ); } } //go around the list and schedule it to the first one where it can it = mExecPools.listIterator(); String mapping = null; while ( it.hasNext() ) { current = ( NameValue ) it.next(); //check if job can run on pool if ( mTCMapper.isSiteValid( job.namespace, job.logicalName, job.version, current.getName() ) ) { mapping = current.getName(); //update the the number of times used and place it at the //correct position in the list current.increment(); //the current element stays at it's place if it is the only one //in the list or it's value is less than the next one. if ( it.hasNext() ) { next = ( NameValue ) it.next(); if ( current.getValue() <= next.getValue() ) { break; } else { current = ( NameValue ) it.previous(); current = ( NameValue ) it.previous(); System.out.print( "" ); } } it.remove(); //now go thru the list and insert in the correct position while ( it.hasNext() ) { next = ( NameValue ) it.next(); if ( current.getValue() <= next.getValue() ) { //current has to be inserted before next next = ( NameValue ) it.previous(); it.add( current ); break; } } //current goes to the end of the list it.add( current ); break; } } //means no pool has been found to which the job could be mapped to. job.setSiteHandle( mapping ); } /** * It initialises the internal list. A node in the list corresponds to a pool * that can be used, and has the value associated with it which is the * number of jobs in the current level have been scheduled to it. * * @param pools List */ private void initialiseList( List pools ) { if ( mExecPools == null ) { mExecPools = new java.util.LinkedList(); Iterator it = pools.iterator(); while ( it.hasNext() ) { mExecPools.add( new NameValue( ( String ) it.next(), 0 ) ); } } } /** * A inner name value class that associates a string with an int value. * This is used to populate the round robin list that is used by this * scheduler. */ class NameValue { /** * Stores the name of the pair (the left handside of the mapping). */ private String name; /** * Stores the corresponding value to the name in the pair. */ private int value; /** * The default constructor which initialises the class member variables. */ public NameValue() { name = new String(); value = -1; } /** * Initialises the class member variables to the values passed in the * arguments. * * @param name corresponds to the name in the NameValue pair * * @param value corresponds to the value for the name in the NameValue pair */ public NameValue( String name, int value ) { this.name = name; this.value = value; } /** * The set method to set the value. * @param value int */ public void setValue( int value ) { this.value = value; } /** * Returns the value associated with this pair. * @return int */ public int getValue() { return this.value; } /** * Returns the key of this pair, i.e the left hand side of the mapping. * @return String */ public String getName() { return this.name; } /** * Increments the int value by one. */ public void increment() { value += 1; } /** * Returns a copy of this object. * @return Object */ public Object clone() { NameValue nv = new NameValue( this.name, this.value ); return nv; } /** * Writes out the contents of the class to a String in form suitable for * displaying. * @return String */ public String toString() { String str = name + "-->" + value; return str; } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/AbstractPerJob.java0000644000175000017500000000433511757531137027573 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.util.List; import java.util.Iterator; import edu.isi.pegasus.planner.namespace.Hints; /** * The base class for the site selectors that want to map one job at a time. * * @author Karan Vahi * @version $Revision: 2590 $ */ public abstract class AbstractPerJob extends Abstract { /** * Maps the jobs in the workflow to the various grid sites. * * @param workflow the workflow in a Graph form. * @param sites the list of String objects representing the * execution sites that can be used. * */ public void mapWorkflow(Graph workflow, List sites) { //iterate through the jobs in BFS for (Iterator it = workflow.iterator(); it.hasNext(); ) { GraphNode node = (GraphNode) it.next(); Job job = (Job) node.getContent(); //only map a job for which execute site hint //is not specified in the DAX if( !job.hints.containsKey( Hints.EXECUTION_POOL_KEY ) ){ mapJob( job, sites); } } } /** * Maps a job in the workflow to the various grid sites. * * @param job the job to be mapped. * @param sites the list of String objects representing the * execution sites that can be used. * */ public abstract void mapJob( Job job, List sites ) ; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/Random.java0000644000175000017500000000647711757531137026157 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import java.util.List; /** * A random site selector that maps to a job to a random pool, amongst the subset * of pools where that particular job can be executed. * * @author Karan Vahi * @version $Revision: 2590 $ */ public class Random extends AbstractPerJob { /** * The default constructor. Should not be called. Call the overloaded one. */ public Random() { } /** * Initializes the site selector. * * @param bag the bag of objects that is useful for initialization. * */ public void initialize( PegasusBag bag ){ super.initialize( bag ); } /** * Maps a job in the workflow to an execution site. * * @param job the job to be mapped. * @param sites the list of String objects representing the * execution sites that can be used. * */ public void mapJob( Job job, List sites ){ List rsites = mTCMapper.getSiteList( job.getTXNamespace(),job.getTXName(), job.getTXVersion(), sites ); if( rsites == null || rsites.isEmpty() ){ job.setSiteHandle( null ); } else{ job.setSiteHandle(selectRandomSite(rsites)); StringBuffer message = new StringBuffer( ); message.append( "[Random Selector] Mapped " ).append( job.getID() ).append( " to " ).append( job.getSiteHandle() ); mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); } } /** * Returns a brief description of the site selection technique being used. * * @return String */ public String description() { String st = "Random Site Selection"; return st; } /** * The random selection that selects randomly one of the records returned by * the transformation catalog. * * @param sites List of Stringobjects. * * @return String */ private String selectRandomSite(List sites) { double randNo; int noOfRecs = sites.size(); //means we have to choose a random location between 0 and (noOfLocs -1) randNo = Math.random() * noOfRecs; int recSelected = new Double(randNo).intValue(); /* String message = "Random Site selected is " + (recSelected + 1) + " amongst " + noOfRecs + " possible"; mLogger.logMessage(message, 1, false); */ return (String)sites.get(recSelected); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/NonJavaCallout.java0000644000175000017500000007133011757531137027605 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.site.classes.FileServer; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPServer; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.catalog.site.impl.old.PoolInfoProvider; import java.io.BufferedReader; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.StringTokenizer; import edu.isi.pegasus.planner.partitioner.graph.Adapter; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; /** * This is the class that implements a call-out to a site selector which * is an application or executable script. In order to use the site * selector implemented by this class, the property * pegasus.selector.site must be set to value * NonJavaCallout.

* * This site selector implements a popen() like call to an * external application that conforms to the API laid out here. The name * of the application to run is specified by the property * pegasus.selector.site.path. Its value points to a locally * available application.

* * If the external executable requires certain environment variables to * be set for execution, these can be specified in the property files, * using the prefix pegasus.selector.site.env. The name of the * environment variable is obtained by stripping the prefix. For * example, to set the variable PATH to a certain value, use the * following entry in your user property file:

* *

 *   pegasus.selector.site.env.PATH = /usr/bin:/bin:/usr/X11R6/bin
 * 
* * The site selector populates the environment of the external * application with the following default properties, which can * be overwritten by user-specified properties:

* * * * * * * * * * * * * * * * * * * *
keyvalue
PEGASUS_HOMEAs set by the system
CLASSPATHFrom java.class.path
JAVA_HOMEFrom java.home
USERFrom user.name, if present
LOGNAMEFrom user.name, if present
HOMEFrom user.home, if present
TMPFrom java.io.tmpdir, if present
TZFrom user.timezone, if present

* * The user can specify the environment variables, by specifying the * properties with the prefix pegasus.selector.site.env. prefix. for e.g user * can override the default user.name property by setting the property * pegasus.selector.site.env.user.home .

* * The external application is invoked with one commandline argument. * This argument is the name of a temporary file. The temporary file is * created for each invocation anew by the site selecting caller. Being * temporary, the file is deleted after the site selector returns with * success. The deletion of the file is governed by the property * pegasus.selector.site.keep.tmp. It can have a tristate value with the valid * values being *

 *              ALWAYS
 *              NEVER
 *              ONERROR
 * 
*

* * The external application is expected to write one line to stdout. * The line starts with the string SOLUTION:, followed * by the chosen site handle. Optionally, separated by a colon, the * name of a jobmanager for the site can be provided by the site * selector. Two examples for successful site selections are:

* *

 *   SOLUTION:mysite:my.job.mgr/jobmanager-batch
 *   SOLUTION:siteY
 * 
* * Note, these are two examples. The site selector only returns one line * with the appropriate solution. If no site is found to be eligble, the * poolhandle should be set to NONE by the site selector.

* * The temporary file is the corner stone of the communication between * the site selecting caller and the external site selector. It is a * collection of key-value pairs. Each pair is separated by an equals * (=) sign, and stands on a line of its own. There are no multi-line * values permitted.

* * The following pairs are generated for the siteselector temporary file:

* * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
#keyvalue
1versionThe version of the site selector API, currently 2.0
1transformationThe fully-qualified definition identifier for the TR, ns::id:vs.
1derivationThe fully-qualified definition identifier for the DV, ns::id:vs.
1job.levelThe job's depth in the DFS tree of the workflow DAG
1job.idThe job's ID, as used in the DAX file.
Nresource.idA pool handle, followed by a whitespace, followed by a gridftp server. * Typically, each gridftp server is enumerated once, so you may have multiple * occurances of the same site.
Minput.lfnAn input LFN, optionally followed by a whitespace and filesize.
1wf.nameThe label of the DAX, as found in the DAX's root element.
1wf.indexThe DAX index, which is incremented for each partition.
1wf.timeThe mtime of the workflow.
1wf.managerThe name of the workflow manager to be used, e.g. dagman.
1vo.nameunused at present, name of the virtual organization who runs this WF.
1vo.groupunused at present, usage not clear .

* * In order to detect malfunctioning site selectors, a timeout is * attached with each site selector, see property * pegasus.selector.site.timeout. By default, a site selector * is given up upon after 60 s.

* * @author Karan Vahi * @author Jens Vöckler * * @version $Revision: 4507 $ * * @see java.lang.Runtime * @see java.lang.Process */ public class NonJavaCallout extends AbstractPerJob { /** * The prefix to be used while creating a temporary file to pass to * the external siteselector. */ public static final String PREFIX_TEMPORARY_FILE = "pegasus"; /** * The suffix to be used while creating a temporary file to pass to * the external siteselector. */ public static final String SUFFIX_TEMPORARY_FILE = null; /** * The prefix of the property names that specify the environment * variables that need to be set before calling out to the site * selector. */ public static final String PREFIX_PROPERTIES = "pegasus.selector.site.env."; /** * The prefix that the site selector writes out on its stdout to * designate that it is sending a solution. */ public static final String SOLUTION_PREFIX = "SOLUTION:"; /** * The version number associated with this API of non java callout * site selection. */ public static final String VERSION = "2.0"; //tristate variables for keeping the temporary files generated /** * The state denoting never to keep the temporary files. */ public static final int KEEP_NEVER = 0; /** * The state denoting to keep the temporary files only in case of error. */ public static final int KEEP_ONERROR = 1; /** * The state denoting always to keep the temporary files. */ public static final int KEEP_ALWAYS = 2; /** * The description of the site selector. */ private static final String mDescription = "External call-out to a site-selector application"; /** * The map that contains the environment variables including the * default ones that are set while calling out to the site selector * unless they are overridden by the values set in the properties * file. */ private Map mEnvVar; /** * The timeout value in seconds after which to timeout, in the case * where the external site selector does nothing (nothing on stdout * nor stderr). */ private int mTimeout; /** * The tristate value for whether keeping the temporary files generated or * not. */ private int mKeepTMP; /** * The path to the site selector. */ private String mSiteSelectorPath; /** * The abstract DAG. */ private ADag mAbstractDag; /** * The default constructor. */ public NonJavaCallout(){ super(); // set the default timeout to 60 seconds mTimeout = 60; //default would be onerror mKeepTMP = KEEP_ONERROR; } /** * Initializes the site selector. * * @param bag the bag of objects that is useful for initialization. * */ public void initialize( PegasusBag bag ){ super.initialize( bag ); mTimeout = mProps.getSiteSelectorTimeout(); mSiteSelectorPath = mProps.getSiteSelectorPath(); // load the environment variables from the properties file // and the default values. this.loadEnvironmentVariables(); //get the value from the properties file. mKeepTMP = getKeepTMPValue(mProps.getSiteSelectorKeep()); } /** * Maps the jobs in the workflow to the various grid sites. * The jobs are mapped by setting the site handle for the jobs. * * @param workflow the workflow. * * @param sites the list of String objects representing the * execution sites that can be used. */ public void mapWorkflow( ADag workflow, List sites ){ mAbstractDag = workflow; mapWorkflow( Adapter.convert( workflow ), sites ); } /** * Returns a brief description of the site selection technique * implemented by this class. * * @return a self-description of this site selector. */ public String description(){ return mDescription; } /** * Calls out to the external site selector. The method converts a * Job object into an API-compliant temporary file. * The file's name is provided as single commandline argument to the * site selector executable when it is invoked. The executable, * representing the external site selector, provides its answer * on stdout. The answer is captures, and returned. * * @param job is a representation of the DAX compute job whose site of * execution need to be determined. * * @param sites the list of String objects representing the * execution sites that can be used. * * * * FIXME: Some site selector return an empty string on failures. Also: * NONE could be a valid site name. * * @see org.griphyn.cPlanner.classes.Job */ public void mapJob( Job job, List sites ){ Runtime rt = Runtime.getRuntime(); // prepare the temporary file that needs to be sent to the // Site Selector via command line. File ipFile = prepareInputFile( job, sites ); // sanity check if(ipFile == null){ job.setSiteHandle( null ); return; } // prepare the environment to call out the site selector String command = this.mSiteSelectorPath; if ( command == null ) { // delete the temporary file generated ipFile.delete(); throw new RuntimeException( "Site Selector: Please set the path to the external site " + "selector in the properties! " ); } try { command += " " + ipFile.getAbsolutePath(); // get hold of all the environment variables that are to be set String[] envArr = this.getEnvArrFromMap(); mLogger.log( "Calling out to site selector " + command, LogManager.DEBUG_MESSAGE_LEVEL); Process p = rt.exec( command , envArr ); // set up to read subprogram output InputStream is = p.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); // set up to read subprogram error InputStream er = p.getErrorStream(); InputStreamReader err = new InputStreamReader(er); BufferedReader ebr = new BufferedReader(err); // pipe the process stdout and stderr to standard stdout/stderr // FIXME: Really? I thought we want to capture stdout? String s = null; String se = null; // set the variable to check if the timeout needs to be set or not boolean notTimeout = ( mTimeout <= 0 ); boolean stdout = false; boolean stderr = false; int time = 0; while( ( (stdout =br.ready()) || (stderr = ebr.ready()) ) || notTimeout || time < mTimeout){ if ( ! ( stdout || stderr ) ) { // nothing on either streams // sleep for some time try { time +=5; mLogger.log("main thread going to sleep " + time, LogManager.DEBUG_MESSAGE_LEVEL); Thread.sleep(5000); mLogger.log("main thread woken up", LogManager.DEBUG_MESSAGE_LEVEL); } catch ( InterruptedException e ) { // do nothing // we potentially loose time here. } } else { // we hearing something from selector // reset the time counter time = 0; if ( stdout ) { s = br.readLine(); mLogger.log("[Site Selector stdout] " + s, LogManager.DEBUG_MESSAGE_LEVEL); // parse the string to get the output if ( parseStdOut( job, s ) ){ break; } } if ( stderr ) { se = ebr.readLine(); mLogger.log("[Site Selector stderr] " + se, LogManager.ERROR_MESSAGE_LEVEL); } } } // while // close the streams br.close(); ebr.close(); if ( time >= mTimeout ) { mLogger.log( "External Site Selector timeout after " + mTimeout + " seconds", LogManager.ERROR_MESSAGE_LEVEL); p.destroy(); // no use closing the streams as it would be probably hung job.setSiteHandle( null ); return; } // the site selector seems to have worked without any errors // delete the temporary file that was generated only if the // process exited with a status of 0 // FIXME: Who is going to clean up after us? int status = p.waitFor(); if ( status != 0){ // let the user know site selector exited with non zero mLogger.log("Site Selector exited with non zero exit " + "status " + status, LogManager.DEBUG_MESSAGE_LEVEL); } //delete the temporary file on basis of keep value if((status == 0 && mKeepTMP < KEEP_ALWAYS) || (status != 0 && mKeepTMP == KEEP_NEVER )){ //deleting the file if ( ! ipFile.delete() ) mLogger.log("Unable to delete temporary file " + ipFile.getAbsolutePath(),LogManager.WARNING_MESSAGE_LEVEL); } } catch ( IOException e ) { mLogger.log("[Site selector] " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } catch ( InterruptedException e ) { mLogger.log("Waiting for site selector to exit: " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } return; } /** * Writes job knowledge into the temporary file passed to the external * site selector. The job knowledge derives from the contents of the * DAX job's Job record, and the a list of site * candidates. The format of the file is laid out in the class's * introductory documentation. * * @param job is a representation of the DAX compute job whose site of * execution need to be determined. * * @param pools is a list of site candidates. The items of the list are * String objects. * * @return the temporary input file was successfully prepared. A value * of null implies that an error occured while writing * the file. * * @see #getTempFilename() */ private File prepareInputFile( Job job, List pools ) { File f = new File( this.getTempFilename() ); PrintWriter pw; try { pw = new PrintWriter(new FileWriter(f)); // write out the version of the api pw.println("version=" + this.VERSION); // fw.write("\nvds_job_name=" + job.jobName); pw.println("transformation=" + job.getCompleteTCName()); pw.println("derivation=" + job.getCompleteDVName()); // write out the job id and level as gotten from dax pw.println("job.level=" + job.level); pw.println("job.id=" + job.logicalId); //at present Pegasus always asks to schedule compute jobs //User should be able to specify through vdl or the pool config file. //Karan Feb 10 3:00 PM PDT //pw.println("vds_scheduler_preference=regular"); // write down the list of exec Pools and their corresponding grid // ftp servers if ( pools.isEmpty() ) { // just write out saying illustrating no exec pool or grid ftp // server passed to site selector. Upto the selector to do what // it wants. // FIXME: We need to define this part of the interface. If there // are not site candidates, should it ever reach this part of // the code? If now, insert assertion and abort here. If yes, we // need to define this case! But just silently write the below // will not site will with our set of site selectors. pw.println("resource.id=NONE NONE"); } else { String st, pool; for ( Iterator i = pools.iterator(); i.hasNext(); ) { pool = (String) i.next(); st = "resource.id=" + pool + " "; // get handle to pool config /* List l = mSCHandle.getGridFTPServers(pool); if (l == null || l.isEmpty()) { // FIXME: How hard should this error be? mLogger.log("Site " + pool + " has no grid ftp" + "servers associated with it", LogManager.WARNING_MESSAGE_LEVEL); // append a NONE grid ftp server pw.println(st + "NONE"); } else { for ( Iterator j=l.iterator(); j.hasNext(); ) { pw.println(st + ( (GridFTPServer) j.next()). getInfo(GridFTPServer.GRIDFTP_URL)); } } */ SiteCatalogEntry site = mSiteStore.lookup( pool ); for( Iterator it = site.getHeadNodeFS().getScratch().getSharedDirectory().getFileServersIterator(); it.hasNext();){ pw.println(st + ( (FileServer) it.next()).getURLPrefix() ); } } // for } // write the input files for ( Iterator i=job.inputFiles.iterator(); i.hasNext(); ) pw.println("input.lfn=" + ((PegasusFile)i.next()).getLFN()); // write workflow related metadata if ( this.mAbstractDag != null ) { pw.println("wf.name=" + mAbstractDag.dagInfo.nameOfADag); pw.println("wf.index=" + mAbstractDag.dagInfo.index); // pw.println("workflow.time=" + mAbstractDag.dagInfo.time??); // FIXME: Try File.lastModified() on the DAX file // should actually be picked up from the properties file pw.println("wf.manager=" + "dagman"); } // uninitialized values pw.println("vo.name=" + "NONE"); pw.println("vo.group=" + "NONE"); // done pw.flush(); pw.close(); } catch ( IOException e ) { mLogger.log("While writing to the temporary file :" + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); return null; } catch ( Exception ex ) { //an unknown exception mLogger.log("Unknown error while writing to the temp file :" + ex.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); return null; } return f; } /** * Extracts the chosen site from the site selector's answer. Parses * the stdout sent by the selector, to see, if the execution * pool and the jobmanager were sent or not. * * @param job the job that has to be mapped. * @param s is the stdout received from the site selector. * * @return boolean indicating if the stdout was succesfully parsed and * job populated. * * */ private boolean parseStdOut( Job job, String s ){ String val = null; s = s.trim(); boolean result = false; if(s.startsWith(SOLUTION_PREFIX)){ s = s.substring(SOLUTION_PREFIX.length()); StringTokenizer st = new StringTokenizer(s,":"); while(st.hasMoreTokens()){ result = true; job.setSiteHandle( (String)st.nextToken() ); job.setJobManager( st.hasMoreTokens() ? st.nextToken(): null ); } } // HMMM: String.indexOf() functions can be used in Jens HO. return result; } /** * Creates a temporary file and obtains its name. This method returns * the absolute path to a temporary file in the system's TEMP * directory. The file is guarenteed to be unique for the current * invocation of the virtual machine. * * FIXME: However, since we return a filename and not an opened file, race * conditions are still possible. * * @return the absolute path of a newly created temporary file. */ private String getTempFilename(){ File f = null; try { f = File.createTempFile(PREFIX_TEMPORARY_FILE,SUFFIX_TEMPORARY_FILE); return f.getAbsolutePath(); } catch ( IOException e ) { throw new RuntimeException( "Unable to get handle to a temporary file :" + e.getMessage()); } } /** * Initializes the internal hash that collects environment variables. * These variables are set up to run the external helper application. * Environment variables come from two source. * *

    *
  1. Default environment variables, fixed, hard-coded. *
  2. User environment variables, from properties. *
*/ private void loadEnvironmentVariables(){ // load the default environment variables String value = null; mEnvVar = new HashMap(); mEnvVar.put("CLASSPATH",mProps.getProperty("java.class.path")); mEnvVar.put("JAVA_HOME",mProps.getProperty("java.home")); // set $LOGNAME and $USER if corresponding property set in JVM if ( (value = mProps.getProperty("user.name")) != null ) { mEnvVar.put("USER",value); mEnvVar.put("LOGNAME",value); } // set the $HOME if user.home is set if ( (value = mProps.getProperty("user.home")) != null ) mEnvVar.put("HOME",value); // set the $TMP if java.io.tmpdir is set if ( (value = mProps.getProperty("java.io.tmpdir")) != null ) mEnvVar.put("TMP",value); // set $TZ if user.timezone is set if ( (value = mProps.getProperty("user.timezone")) != null ) mEnvVar.put("TZ",value); // get hold of the environment variables that user might have set // and put them in the map overriding the variables already set. mEnvVar.putAll( mProps.matchingSubset(PREFIX_PROPERTIES,false) ); } /** * Generates an array of environment variables. The variables are kept * in an internal map. Converts the environment variables in the map * to the array format. * * @return array of enviroment variables set, or null if * the map is empty. * @see #loadEnvironmentVariables() */ private String[] getEnvArrFromMap(){ String result[] = null; // short-cut if ( mEnvVar == null || mEnvVar.isEmpty() ) return result; else result = new String[mEnvVar.size()]; Iterator it = mEnvVar.entrySet().iterator(); int i = 0; while(it.hasNext()){ Map.Entry entry = (Map.Entry)it.next(); result[i] = entry.getKey() + "=" + entry.getValue(); i++; } return result; } /** * Returns the int value corresponding to the string value passed. * * @param value the string value for keeping the temporary files. * * @return the corresponding int value. * @see #KEEP_ALWAYS * @see #KEEP_NEVER * @see #KEEP_ONERROR */ private int getKeepTMPValue(String value){ //default value is keep on error int val = KEEP_ONERROR; //sanity check of the string value if(value == null || value.length() == 0){ //return the default value return val; } value = value.trim(); if(value.equalsIgnoreCase("always")) val = KEEP_ALWAYS; if(value.equalsIgnoreCase("never")) val = KEEP_NEVER; return val; } /** * The main program that allows you to test. * FIXME: Test programs should have prefix Test.....java * * @param args the arguments * */ public static void main( String[] args ){ LogManagerFactory.loadSingletonInstance().setLevel(LogManager.DEBUG_MESSAGE_LEVEL); NonJavaCallout nj = new NonJavaCallout( ); Job s = new Job(); s.logicalName = "test"; s.namespace = "pegasus"; s.version = "1.01"; s.jobName = "test_ID00001"; List pools = new java.util.ArrayList(); pools.add("isi-condor");pools.add("isi-lsf"); nj.mapJob( s,pools ); System.out.println("Exec Pool return by site selector is " + s.getSiteHandle() ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/SiteSelectorFactory.java0000644000175000017500000000743511757531137030667 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site; import edu.isi.pegasus.planner.selector.SiteSelector; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; /** * A factory class to load the appropriate type of Site Selector, as * specified by the user at runtime in properties. Each invocation of the * factory results in a SiteSelector being instantiated. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class SiteSelectorFactory { /** * The default package where the all the implementing classes provided with * the VDS reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.selector.site"; /** * The name of the class in the DEFAULT package, that corresponds to the * default site selector. */ public static final String DEFAULT_SITE_SELECTOR = "Random"; /** * Loads the implementing class corresponding to the mode specified by the user * at runtime in the properties file. A default replica selector is loaded * if property is not specified in the properties. * * @param bag the bag of objects that is required. * * @return the instance of the class implementing this interface. * * @exception SiteSelectorFactoryException that chains any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME * @see #DEFAULT_SITE_SELECTOR */ public static SiteSelector loadInstance( PegasusBag bag ) throws SiteSelectorFactoryException { PegasusProperties properties = ( PegasusProperties )bag.get( PegasusBag.PEGASUS_PROPERTIES ); String className = null; SiteSelector selector; //sanity check try{ if (properties == null) { throw new RuntimeException("Invalid properties passed"); } //figure out the implementing class //that needs to be instantiated. className = properties.getSiteSelectorMode(); className = ( className == null || className.trim().length() < 2) ? DEFAULT_SITE_SELECTOR : className; //prepend the package name if required className = (className.indexOf('.') == -1)? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className: //load directly className; //try loading the class dynamically DynamicLoader dl = new DynamicLoader(className); selector = ( SiteSelector ) dl.instantiate( new Object[ 0 ] ); selector.initialize( bag ); } catch(Exception e){ //chain the exception caught into the appropriate Factory Exception throw new SiteSelectorFactoryException( "Instantiating SiteSelector ", className, e ); } return selector; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/site/Group.java0000644000175000017500000001730211757531137026020 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.site; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.TreeMap; /** * A site selector than ends up doing grouping jobs together on the basis of * an identifier specifed in the dax for the jobs, and schedules them on to the * same site. Currently, the identifier is key group in the pegasus * profile namespace. All the jobs that do not have a group associated with them * are put in one default group and end up being scheduled on the same pool. * A limitation of this site selector is that it does not check whether all the * jobs can be scheduled on a particular pool or not. It just checks whether * the first job can be or not. The reason for that is after the grouping the * the selector just hands the first job in each group to the other site selectors * that work on jobs. Currently, it hands it to the Random Site Selector. * * In the DAX, a job tagged with groups will look as follows *
 * 
 *    group-1
 *    -a top -T 6  -i   -o  
 *    
 *    
 * 
 * 
* * @author Karan Vahi * @author Gaurang Mehta * @author Mei-Hui Su * * @version $Revision: 2590 $ */ public class Group extends Abstract { /** * The description of the site selector. */ private static final String mDescription = "Site selector doing clustering on the basis of key group in pegasus namespace"; /** * The name of the group into which jobs are grouped if no group is * specified in the dax. */ private static final String mDefaultGroup = "default"; /** * The map containing the the jobs grouped by the key group. */ private Map mGroupMap; /** * The handle to the internal site selector that is used to schedule jobs * amongst the groups. */ private AbstractPerJob mSelector; /** * The default constructor. */ public Group() { mGroupMap = new TreeMap(); mSelector = new Random(); // mLogger = LogManager.getInstance(); } /** * Initializes the site selector. * * @param bag the bag of objects that is useful for initialization. * */ public void initialize( PegasusBag bag ){ super.initialize( bag ); mSelector.initialize( bag ); } /** * Returns the description of the site selector. * * @return description. */ public String description() { return mDescription; } /** * The call out to map a list of jobs on to the execution pools. A default * implementation is provided that internally calls mapJob2ExecPool(Job, * String,String,String) to map each of the jobs sequentially to an execution site. * The reason for this method is to support site selectors that * make their decision on a group of jobs i.e use backtracking to reach a good * decision. * The implementation that calls out to an executable using Runtime does not * implement this method, but relies on the default implementation defined * here. * * @param workflow the workflow that needs to be scheduled. * @param sites the list of String objects representing the * execution pools that can be used. * */ public void mapWorkflow( Graph workflow, List sites) { Job job; List l = null; int i = 0; for(Iterator it = workflow.nodeIterator();it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); job = ( Job )node.getContent(); //put the jobs into the map grouped by key VDS_GROUP_KEY insert(job); } //traverse through the group map and send off the first job //in each group to the internal site selector. for(Iterator it = mGroupMap.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); boolean defaultGroup = entry.getKey().equals( mDefaultGroup ); mLogger.log("[Group Selector]Mapping jobs in group " + entry.getKey(), LogManager.DEBUG_MESSAGE_LEVEL); l = (List)entry.getValue(); String msg = "\t{"; boolean first = true; for(Iterator it1 = l.iterator();it1.hasNext();){ msg += (first)? "" : ","; msg += ((Job)it1.next()).jobName ; first = false; } msg += "}"; mLogger.log(msg,LogManager.DEBUG_MESSAGE_LEVEL); //hand of the first job to the internal selector job = (Job)l.get(0); mSelector.mapJob( job, sites ); //traverse thru the remaining jobs in the group for(Iterator it1 = l.iterator();it1.hasNext();){ Job j = (Job)it1.next(); if ( defaultGroup ){ //each job in the group has to be //mapped individually mSelector.mapJob( j, sites); } else{ //mapping same as the one for //for the first job in group j.setSiteHandle( job.getSiteHandle() ); } } } } /** * Inserts the job into the group map. * * @param job the job to be inserted. */ private void insert(Job job){ Object obj = job.vdsNS.get(Pegasus.GROUP_KEY); if(obj != null && ((String)obj).equalsIgnoreCase(mDefaultGroup)){ //throw an exception? throw new RuntimeException( "The group name " + mDefaultGroup + " is a reserved keyword for the selector." + " Use another group name in your DAX" ); } String key = (obj == null)? //no group specified. set to default mDefaultGroup: //get the value from the profile (String)obj; if(mGroupMap.containsKey(key)){ //there is already a group associated. List l = (List)mGroupMap.get(key); l.add(job); } else{ //insert a new entry to the map List l = new LinkedList(); l.add(job); mGroupMap.put(key,l); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/SiteSelector.java0000644000175000017500000000505211757531137026364 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.partitioner.graph.Graph; import java.util.List; /** * * The interface for the Site Selector. Allows us to maps the workflows * to different sites. * * @author Karan Vahi * @author Jens-S. Vöckler * @author Gaurang Mehta * * * @version $Revision: 2576 $ */ public interface SiteSelector { /** * The version of the API of the Site Selector. */ public static final String VERSION = "2.0"; /** * The value for the pool handle, when the pool is not found. */ public static final String SITE_NOT_FOUND = "NONE"; /** * Initializes the site selector. * * @param bag the bag of objects that is useful for initialization. * */ public void initialize( PegasusBag bag ); /** * Maps the jobs in the workflow to the various grid sites. * The jobs are mapped by setting the site handle for the jobs. * * @param workflow the workflow in a Graph form. * * @param sites the list of String objects representing the * execution sites that can be used. */ public void mapWorkflow( Graph workflow, List sites ); /** * Maps the jobs in the workflow to the various grid sites. * The jobs are mapped by setting the site handle for the jobs. * * @param workflow the workflow. * * @param sites the list of String objects representing the * execution sites that can be used. */ public void mapWorkflow( ADag workflow, List sites ); /** * This method returns a String describing the site selection technique * that is being implemented by the implementing class. * * @return a short description */ public String description(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/TransformationSelector.java0000644000175000017500000000627311757531137030474 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector; /** * * This class is an abstract class for the Transformation Catalog Selector. * Its purpose is to provide a generic api to select one valid transformation * among the many transformations. * @author Gaurang Mehta * @version $Revision: 2079 $ * */ import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.common.util.FactoryException; import java.util.List; public abstract class TransformationSelector { public static final String PACKAGE_NAME = "edu.isi.pegasus.planner.selector.transformation"; protected LogManager mLogger; public TransformationSelector() { mLogger = LogManagerFactory.loadSingletonInstance(); } /** * Takes a list of TransformationCatalogEntry objects and returns 1 or many * TransformationCatalogEntry objects as a list depending on the type of selection algorithm. * The Random and RoundRobin implementation ensure that only one entry is * returned and should be run last when chaining multiple selectors * @param tcentries List * @return List */ public abstract List getTCEntry( List tcentries ); /** * Loads the implementing class corresponding to the mode specified by the * user at runtime in the properties file. * * @param className String The name of the class that implements the mode. * It is the name of the class, not the complete name with * package. That is added by itself. * * @return TransformationSelector * * @throws FactoryException that nests any error that * might occur during the instantiation of the implementation. */ public static TransformationSelector loadTXSelector( String className ) throws FactoryException { //prepend the package name className = PACKAGE_NAME + "." + className; //try loading the class dynamically TransformationSelector ss = null; DynamicLoader dl = new DynamicLoader( className ); try { Object argList[] = new Object[0 ]; //argList[ 0 ] = ( path == null ) ? new String() : path; ss = ( TransformationSelector ) dl.instantiate( argList ); } catch ( Exception e ) { throw new FactoryException( "Instantiating Create Directory", className, e ); } return ss; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/ReplicaSelector.java0000644000175000017500000000750511757531137027044 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; /** * A prototypical interface for a replica selector. It would be changed when * Pegasus interfaces with the new RC API. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2572 $ */ public interface ReplicaSelector { /** * The version of this API. */ public static final String VERSION ="1.5"; /** * The local site handle. */ public static final String LOCAL_SITE_HANDLE = "local"; /** * Selects a replica amongst all the replicas returned by the implementing * Replica Mechanism. It should select all the locations for which the site * attribute matches to the preffered site passed. If no match on the * preffered site is found, it is upto the implementation to select a replica * and return it. * * This function is called to determine if a file does exist on the output * pool or not beforehand. We need all the locations to ensure that we are * able to make a match if it so exists. * * @param rl the ReplicaLocation object containing all * the pfn's associated with that LFN. * @param prefferedSite the preffered site for picking up the replicas. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return ReplicaLocation corresponding to the replicas selected. * * @see org.griphyn.cPlanner.classes.ReplicaLocation */ public abstract ReplicaLocation selectReplicas( ReplicaLocation rl, String prefferedSite, boolean allowLocalFileURLs ); /** * Selects a single replica amongst all the replicas returned by the implementing * Replica Mechanism. If more than one replica is found to be matching the * preffered site, a random replica is picked up from the matching replicas. * Else, in case of no match any replica maybe returned. * * @param rl the ReplicaLocation object containing all * the pfn's associated with that LFN. * @param prefferedSite the preffered site for picking up the replicas. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return ReplicaCatalogEntry corresponding to the location selected. * * @see org.griphyn.cPlanner.classes.ReplicaLocation */ public abstract ReplicaCatalogEntry selectReplica( ReplicaLocation rl, String prefferedSite, boolean allowLocalFileURLs ); /** * Returns a short description of the replica selector, that is being * implemented by the implementing class. * * @return string corresponding to the description. */ public abstract String description(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/replica/0000755000175000017500000000000011757531667024541 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/replica/Local.java0000644000175000017500000002172211757531137026432 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.replica; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.selector.ReplicaSelector; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.common.PegRandom; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import java.util.ArrayList; import java.util.Iterator; /** * This replica selector only prefers replicas from the local host and that * start with a file: URL scheme. It is useful, when you want to stagin * files to a remote site from your submit host using the Condor file transfer * mechanism. * *

* In order to use the replica selector implemented by this class, *

 *        - the property pegasus.selector.replica must be set to value Local
 * 
* * * @see org.griphyn.cPlanner.transfer.implementation.Condor * * @author Karan Vahi * @version $Revision: 4292 $ */ public class Local implements ReplicaSelector { /** * A short description of the replica selector. */ private static final String mDescription = "Local from submit host"; /** * Sanity Check Error Message. */ public static final String SANITY_CHECK_ERROR_MESSAGE_PREFIX = "Local Replica Selector selects only local file URL's. Set transfers to run on submit host."; /** * The scheme name for file url. */ protected static final String FILE_URL_SCHEME = "file:"; /** * The handle to the logging object that is used to log the various debug * messages. */ protected LogManager mLogger; /** * The properties object containing the properties passed to the planner. */ protected PegasusProperties mProps; /** * The overloaded constructor, that is called by load method. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * * */ public Local( PegasusProperties properties ){ mProps = properties; mLogger = LogManagerFactory.loadSingletonInstance( properties ); } /** * Selects a random replica from all the replica's that have their * site handle set to local and the pfn's start with a file url scheme. * * @param rl the ReplicaLocation object containing all * the pfn's associated with that LFN. * @param preferredSite the preffered site for picking up the replicas. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return ReplicaCatalogEntry corresponding to the location selected. * * @see org.griphyn.cPlanner.classes.ReplicaLocation */ public ReplicaCatalogEntry selectReplica( ReplicaLocation rl, String preferredSite, boolean allowLocalFileURLs ){ //sanity check if( !allowLocalFileURLs && !preferredSite.equals( ReplicaSelector.LOCAL_SITE_HANDLE )){ StringBuffer message = new StringBuffer(); message.append( SANITY_CHECK_ERROR_MESSAGE_PREFIX ). append( "For LFN " ).append( rl.getLFN() ). append( " (preferred site , allow local urls) is set to "). append( "(").append( preferredSite ).append( "," ).append( allowLocalFileURLs ).append( ")" ); throw new RuntimeException( message.toString() ); } ReplicaCatalogEntry rce; ArrayList prefPFNs = new ArrayList(); int locSelected; String site = null; // mLogger.log("Selecting a pfn for lfn " + lfn + "\n amongst" + locations , // LogManager.DEBUG_MESSAGE_LEVEL); for ( Iterator it = rl.pfnIterator(); it.hasNext(); ) { rce = ( ReplicaCatalogEntry ) it.next(); site = rce.getResourceHandle(); if( site == null ){ //skip to next replica continue; } //check if has pool attribute as local, and at same time //start with a file url scheme if( site.equals( "local" ) && rce.getPFN().startsWith( FILE_URL_SCHEME ) ){ prefPFNs.add( rce ); } } if ( prefPFNs.isEmpty() ) { //select a random location from //all the matching locations //in all likelihood all the urls were file urls and none //were associated with the preference pool. throw new RuntimeException( "Unable to select any location on local site from " + "the list passed for lfn " + rl.getLFN() ); } else { //select a random location //amongst the locations //on the preference pool int length = prefPFNs.size(); //System.out.println("No of locations found at pool " + prefPool + " are " + length); locSelected = PegRandom.getInteger( length - 1 ); rce = ( ReplicaCatalogEntry ) prefPFNs.get( locSelected ); } return rce; } /** * This chooses a location amongst all the locations returned by the * Replica Mechanism. If a location is found with re/pool attribute same * as the preference pool, it is taken. This returns all the locations which * match to the preference pool. This function is called to determine if a * file does exist on the output pool or not beforehand. We need all the * location to ensure that we are able to make a match if it so exists. * Else a random location is selected and returned * * @param rl the ReplicaLocation object containing all * the pfn's associated with that LFN. * @param preferredSite the preffered site for picking up the replicas. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return ReplicaLocation corresponding to the replicas selected. * * @see org.griphyn.cPlanner.classes.ReplicaLocation */ public ReplicaLocation selectReplicas( ReplicaLocation rl, String preferredSite, boolean allowLocalFileURLs ){ //sanity check if( !allowLocalFileURLs && !preferredSite.equals( ReplicaSelector.LOCAL_SITE_HANDLE )){ StringBuffer message = new StringBuffer(); message.append( SANITY_CHECK_ERROR_MESSAGE_PREFIX ). append( "For LFN " ).append( rl.getLFN() ). append( " (preferred site , allow local urls) is set to "). append( "(").append( preferredSite ).append( "," ).append( allowLocalFileURLs ).append( ")" ); throw new RuntimeException( message.toString() ); } String lfn = rl.getLFN(); ReplicaLocation result = new ReplicaLocation(); result.setLFN( rl.getLFN() ); ReplicaCatalogEntry rce; String site; int noOfLocs = 0; for ( Iterator it = rl.pfnIterator(); it.hasNext(); ) { noOfLocs++; rce = ( ReplicaCatalogEntry ) it.next(); site = rce.getResourceHandle(); if ( site != null && site.equals( preferredSite )) { result.addPFN( rce ); } else if ( site == null ){ mLogger.log( " pool attribute not specified for the location objects" + " in the Replica Catalog", LogManager.WARNING_MESSAGE_LEVEL); } } if ( result.getPFNCount() == 0 ) { //means we have to choose a random location between 0 and (noOfLocs -1) int locSelected = PegRandom.getInteger( noOfLocs - 1 ); rce = ( ReplicaCatalogEntry ) rl.getPFN(locSelected ); result.addPFN( rce ); } return result; } /** * Returns a short description of the replica selector. * * @return string corresponding to the description. */ public String description(){ return mDescription; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/replica/Default.java0000644000175000017500000002757311757531137026776 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.replica; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.selector.ReplicaSelector; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.common.PegRandom; import edu.isi.pegasus.planner.common.Utility; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import java.util.ArrayList; import java.util.Iterator; /** * The default replica selector that is used if non is specifed by the user. * This gives preference to a replica residing on the same site as the site, * where it is required to be staged to. If there is no such replica, then a * random replica is selected. * * *

* In order to use the replica selector implemented by this class, *

 *        - the property pegasus.selector.replica must be set to value Default, or
 *          the property should be left undefined in the properties.
 * 
* * @author Karan Vahi * @version $Revision: 2572 $ */ public class Default implements ReplicaSelector { /** * A short description of the replica selector. */ private static String mDescription = "Default"; /** * The scheme name for file url. */ protected static final String FILE_URL_SCHEME = "file:"; /** * The handle to the logging object that is used to log the various debug * messages. */ protected LogManager mLogger; /** * The properties object containing the properties passed to the planner. */ protected PegasusProperties mProps; /** * The overloaded constructor, that is called by load method. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * * */ public Default( PegasusProperties properties ){ mProps = properties; mLogger = LogManagerFactory.loadSingletonInstance( properties ); } /** * This chooses a location amongst all the locations returned by the replica * location service. If a location is found with re attribute same as the * preference pool, it is taken. Else a random location is selected and * returned. If more than one location for the lfn is found at the preference * pool, then also a random location amongst the ones at the preference pool * is selected. * * @param rl the ReplicaLocation object containing all * the pfn's associated with that LFN. * @param preferredSite the preffered site for picking up the replicas. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return ReplicaCatalogEntry corresponding to the location selected. * * @see org.griphyn.cPlanner.classes.ReplicaLocation */ public ReplicaCatalogEntry selectReplica( ReplicaLocation rl, String preferredSite, boolean allowLocalFileURLs ){ ReplicaCatalogEntry rce; ArrayList prefPFNs = new ArrayList(); int locSelected; String site = null; //create a shallow clone as we will be removing //using Iterator.remove() methods rl = (ReplicaLocation)rl.clone(); mLogger.log("Selecting a pfn for lfn " + rl.getLFN() + "\n amongst" + rl.getPFNList() , LogManager.DEBUG_MESSAGE_LEVEL); for ( Iterator it = rl.pfnIterator(); it.hasNext(); ) { rce = ( ReplicaCatalogEntry ) it.next(); site = rce.getResourceHandle(); //check if equal to the execution pool if ( site != null && site.equals( preferredSite ) ) { prefPFNs.add( rce ); //return the one with file url for ligo stuff //is temporary till new api coded if ( rce.getPFN().startsWith( FILE_URL_SCHEME ) ) { //this is the one which is reqd for ligo //return instead of break; return rce; } } //check if we need to remove a file url or not else if ( removeFileURL(rce, preferredSite, allowLocalFileURLs) ){ it.remove(); } /* mLogger.log( "pool attribute not specified for the location objects" + " in the Replica Catalog",LogManager.WARNING_MESSAGE_LEVEL); */ } int noOfLocs = rl.getPFNCount(); if ( noOfLocs == 0 ) { //in all likelihood all the urls were file urls and none //were associated with the preference pool. throw new RuntimeException( "Unable to select any location from " + "the list passed for lfn " + rl.getLFN() ); } if ( prefPFNs.isEmpty() ) { //select a random location from //all the matching locations locSelected = PegRandom.getInteger( noOfLocs - 1 ); rce = ( ReplicaCatalogEntry ) rl.getPFN( locSelected ); } else { //select a random location //amongst the locations //on the preference pool int length = prefPFNs.size(); //System.out.println("No of locations found at pool " + prefPool + " are " + length); locSelected = PegRandom.getInteger( length - 1 ); rce = ( ReplicaCatalogEntry ) prefPFNs.get( locSelected ); //user has specified that //he wants to create symbolic //links instead of going thru the //grid ftp server //create symbolic links instead of going through gridftp server //moved to Transfer Engine Karan June 8th, 2009 /* if (mUseSymLinks) { rce = replaceProtocolFromURL( rce ); } */ } return rce; } /** * This chooses a location amongst all the locations returned by the * Replica Mechanism. If a location is found with re/pool attribute same * as the preference pool, it is taken. This returns all the locations which * match to the preference pool. This function is called to determine if a * file does exist on the output pool or not beforehand. We need all the * location to ensure that we are able to make a match if it so exists. * Else a random location is selected and returned * * @param rl the ReplicaLocation object containing all * the pfn's associated with that LFN. * @param preferredSite the preffered site for picking up the replicas. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return ReplicaLocation corresponding to the replicas selected. * * @see org.griphyn.cPlanner.classes.ReplicaLocation */ public ReplicaLocation selectReplicas( ReplicaLocation rl, String preferredSite, boolean allowLocalFileURLs ){ String lfn = rl.getLFN(); ReplicaLocation result = new ReplicaLocation(); result.setLFN( rl.getLFN() ); ReplicaCatalogEntry rce; String site; String ucAttrib; int noOfLocs = 0; for ( Iterator it = rl.pfnIterator(); it.hasNext(); ) { noOfLocs++; rce = ( ReplicaCatalogEntry ) it.next(); site = rce.getResourceHandle(); if ( site != null && site.equals( preferredSite )) { result.addPFN( rce ); } else if ( site == null ){ mLogger.log( " pool attribute not specified for the location objects" + " in the Replica Catalog", LogManager.WARNING_MESSAGE_LEVEL); } } if ( result.getPFNCount() == 0 ) { //means we have to choose a random location between 0 and (noOfLocs -1) int locSelected = PegRandom.getInteger( noOfLocs - 1 ); rce = ( ReplicaCatalogEntry ) rl.getPFN(locSelected ); result.addPFN( rce ); } return result; } /** * A convenience function that determines whether we should be removing a * file URL from replica selection or not. The file urls make sense only * *
     *      - if associated with the preference site or
     *      - if local File URL are allowed and rce is associated
     *        with local site
     * 
* * @param rce the ReplicaCatalogEntry object. * @param preferredSite the preferred site. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return boolean */ public boolean removeFileURL( ReplicaCatalogEntry rce, String preferredSite, boolean allowLocalFileURLs ){ return this.removeFileURL( rce.getPFN(), rce.getResourceHandle(), preferredSite, allowLocalFileURLs ); } /** * A convenience function that determines whether we should be removing a * file URL from replica selection or not. The file urls make sense only * *
     *      - if associated with the preference site or
     *      - if local File URL are allowed and rce is associated
     *        with local site
     * 
* * @param pfn the file url * @param site the site associated with the pfn. * @param preferredSite the preferred site. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return boolean */ protected boolean removeFileURL( String pfn, String site, String preferredSite, boolean allowLocalFileURLs ){ boolean result = false; if ( !pfn.startsWith( FILE_URL_SCHEME ) ){ //not a file url . dont remove return result; } if( site == null ){ //remove the url and continue //nothing can be done result = true; } else if( !site.equalsIgnoreCase( preferredSite ) ){ //the URL is not from a preferred site. //we can still use it if local file urls are allowed //and url is from a local site. result = !( allowLocalFileURLs && site.equals( LOCAL_SITE_HANDLE ) ); } return result; } /** * Returns a short description of the replica selector. * * @return string corresponding to the description. */ public String description(){ return mDescription; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/replica/ReplicaSelectorFactory.java0000644000175000017500000001412511757531137032007 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.replica; import edu.isi.pegasus.planner.selector.ReplicaSelector; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; /** * A factory class to load the appropriate type of Replica Selector, as * specified by the user at runtime in properties. Each invocation of the * factory results in a ReplicaSelector being instantiated. * * @author Karan Vahi * @version $Revision: 2567 $ */ public class ReplicaSelectorFactory { /** * The default package where the all the implementing classes provided with * the VDS reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.selector.replica"; /** * The name of the class in the DEFAULT package, that corresponds to the * default replica selector. */ public static final String DEFAULT_REPLICA_SELECTOR = "Default"; /** * A no hassle factory method that loads the replica selector specified * in the properties. The properties are obtained from the property * singleton. A default replica selector is loaded if property is not * specified in the properties. * * @return the instance of the class implementing this interface. * * @exception ReplicaSelectorFactoryException that chains any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME * @see #DEFAULT_REPLICA_SELECTOR */ public static ReplicaSelector loadInstance() throws ReplicaSelectorFactoryException { return loadInstance( PegasusProperties.getInstance()); } /** * Loads the implementing class corresponding to the mode specified by the user * at runtime in the properties file. A default replica selector is loaded * if property is not specified in the properties. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * * @return the instance of the class implementing this interface. * * @exception ReplicaSelectorFactoryException that chains any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME * @see #DEFAULT_REPLICA_SELECTOR */ public static ReplicaSelector loadInstance(PegasusProperties properties) throws ReplicaSelectorFactoryException { String className = null; //sanity check try{ if(properties == null){ throw new RuntimeException("Invalid properties passed"); } //figure out the implementing class //that needs to be instantiated. className = properties.getReplicaSelector(); className = (className == null || className.trim().length() < 2)? DEFAULT_REPLICA_SELECTOR: className; } catch( Exception e ){ throw new ReplicaSelectorFactoryException( "Instantiating ReplicaSelector ",e ); } return loadInstance(properties,className); } /** * Loads the implementing class corresponding to the class. If the package * name is not specified with the class, then class is assumed to be * in the DEFAULT_PACKAGE. The properties object passed should not be null. * * @param className the name of the class that implements the mode. It is the * name of the class, not the complete name with package. That * is added by itself. * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * * @return the instance of the class implementing this interface. * * @exception ReplicaSelectorFactoryException that chains any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static ReplicaSelector loadInstance(PegasusProperties properties, String className) throws ReplicaSelectorFactoryException{ ReplicaSelector rs = null; try{ //some sanity checks if(properties == null){ throw new RuntimeException("Invalid properties passed"); } if(className == null){ throw new RuntimeException("Invalid className specified"); } //prepend the package name className = (className.indexOf('.') == -1)? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className: //load directly className; //try loading the class dynamically DynamicLoader dl = new DynamicLoader(className); Object argList[] = new Object[1]; argList[0] = properties; rs = (ReplicaSelector) dl.instantiate(argList); } catch(Exception e){ //chain the exception caught into the appropriate Factory Exception throw new ReplicaSelectorFactoryException( "Instantiating ReplicaSelector ", className, e ); } return rs; } } ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/replica/ReplicaSelectorFactoryException.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/replica/ReplicaSelectorFactoryException.0000644000175000017500000000661111757531137033025 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.replica; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating ReplicaSelector implementations. * * @author Karan Vahi * @version $Revision: 2079 $ */ public class ReplicaSelectorFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Replica Selector"; /** * Constructs a ReplicaSelectorFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public ReplicaSelectorFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a ReplicaSelectorFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public ReplicaSelectorFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a ReplicaSelectorFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public ReplicaSelectorFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a ReplicaSelectorFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public ReplicaSelectorFactoryException(String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/replica/Restricted.java0000644000175000017500000003254411757531137027514 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.replica; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.common.PegRandom; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Set; import java.util.StringTokenizer; /** * A replica selector, that allows the user to specify good sites and bad sites * for staging in data to a compute site. * *

* A good site for a compute site X, is a preferred site from which replicas * should be staged to site X. If there are more than one good sites having a * particular replica, then a random siteis selected amongst these preferred sites. *

* A bad site for a compute site X, is a site from which replica's should not be * staged. The reason of not accessing replica from a bad site can vary from * the link being down, to the user not having permissions on that site's data. *

* The good | bad sites are specified by the properties * pegasus.selector.replica.*.prefer.stagein.sites| pegasus.selector.replica.*.ignore.stagein.sites, where * the * in the property name denotes the name of the compute site. * A * in the property key is taken to mean all sites. *

* The pegasus.selector.replica.*.prefer.stagein.sites property takes precedence over * pegasus.selector.replica.*.ignore.stagein.sites property i.e. if for a site X, a site Y is * specified both in the ignored and the preferred set, then site Y is taken to * mean as only a preferred site for a site X. * *

* In order to use the replica selector implemented by this class, *

 *        - the property pegasus.selector.replica.selector must be set to value Restricted.
 * 
* * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2572 $ */ public class Restricted extends Default { /** * A short description of the replica selector. */ private static final String mDescription = "Restricted"; /** * The property prefix for all properties used by this selector. */ private static final String PROPERTY_PREFIX = "pegasus.selector.replica"; /** * The property suffix for determining the preferred sites for a site x. */ private static final String PROPERTY_PREFER_SUFFIX = "prefer.stagein.sites"; /** * The property suffix for determining the ignored sites for a site x. */ private static final String PROPERTY_IGNORE_SUFFIX = "ignore.stagein.sites"; /** * A Map indexed by site handles, that contains a set of site handles. * The sites in the set are the sites from which to prefer data transfers to * the site referred to by key of the map. */ private Map mPreferredSitesMap; /** * The set of preferred sites, that are preferred stagein sites for all sites. * Referred to by "pegasus.selector.replica.*.prefer.sites" property. */ private Set mGlobalPreferredSites; /** * A Map indexed by site handles, that contains a set of site handles. * The sites in the set are the sites from which to ignore data transfers to * the site referred to by key of the map. */ private Map mIgnoredSitesMap; /** * The Set of ignored sites, that are ignored for selecting replicas for all * sites. Referred to by "pegasus.selector.replica.*.default.sites" property. */ private Set mGlobalIgnoredSites; /** * The overloaded constructor, that is called by load method. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. */ public Restricted(PegasusProperties properties) { super(properties); mIgnoredSitesMap = new HashMap(15); mPreferredSitesMap = new HashMap(15); mGlobalIgnoredSites = getSitesSet(mProps.getAllIgnoredSites()); mGlobalPreferredSites = getSitesSet(mProps.getAllPreferredSites()); } /** * This chooses a location amongst all the locations returned by the replica * location service. If a location is found with re attribute same as the * preference pool, it is taken. Else a random location is selected and * returned. If more than one location for the lfn is found at the preference * pool, then also a random location amongst the ones at the preference pool * is selected. * * @param rl the ReplicaLocation object containing all * the pfn's associated with that LFN. * @param preferredSite the preffered site for picking up the replicas. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return ReplicaCatalogEntry corresponding to the location selected. * * @see org.griphyn.cPlanner.classes.ReplicaLocation */ public ReplicaCatalogEntry selectReplica( ReplicaLocation rl, String preferredSite, boolean allowLocalFileURLs ){ String lfn = rl.getLFN(); String site; ArrayList prefLocs = new ArrayList(); int locSelected; //create a shallow clone as we will be removing //using Iterator.remove() methods rl = (ReplicaLocation)rl.clone(); //build state on the basis of preferred sites populateSiteMaps( preferredSite ); mLogger.log( "[RestrictedReplicaSelector] Selecting a pfn for lfn " + lfn + "\n amongst" + rl , LogManager.DEBUG_MESSAGE_LEVEL ); ReplicaCatalogEntry rce; for ( Iterator it = rl.pfnIterator(); it.hasNext(); ) { rce = ( ReplicaCatalogEntry ) it.next(); site = rce.getResourceHandle(); //check if equal to the execution site //or site is preferred to stage to execution site. if ( prefer( site, preferredSite ) ) { //check for file URL if( this.removeFileURL(rce, preferredSite, allowLocalFileURLs) ){ it.remove(); } else{ if ( rce.getPFN().startsWith( FILE_URL_SCHEME ) ) { //this is the one which is reqd for ligo //return the location instead of breaking return rce; } prefLocs.add( rce ); } } //remove a URL with a site that is //to be ignored for staging data to any site. // or if it is a file url else if ( ignore( site, preferredSite ) || this.removeFileURL( rce, preferredSite, allowLocalFileURLs)) { it.remove(); } } int noOfLocs = rl.getPFNCount(); if ( noOfLocs == 0 ) { //in all likelihood all the urls were file urls and //none were associated with the preference pool. //replica not selected throw new RuntimeException( "Unable to select any location from " + "the list passed for lfn " + lfn ); } if ( prefLocs.isEmpty() ) { //select a random location from all the matching locations locSelected = PegRandom.getInteger( noOfLocs - 1 ); rce = rl.getPFN( locSelected ); } else { //select a random location amongst all the preferred locations int preferredSize = prefLocs.size(); locSelected = PegRandom.getInteger( preferredSize - 1 ); rce = ( ReplicaCatalogEntry ) prefLocs.get( locSelected ); //create symbolic links instead of going through gridftp server //moved to Transfer Engine Karan June 8th, 2009 /* if (mUseSymLinks) { rce = replaceProtocolFromURL( rce ); } */ } return rce; } /** * Returns a short description of the replica selector. * * @return string corresponding to the description. */ public String description(){ return mDescription; } /** * Returns a boolean indicating whether a source site is to be preffered for * staging to a destination site * * @param source the source site. * @param destination the destination site. * * @return true if source is a preferred site for staging to destination, * else false. */ protected boolean prefer(String source, String destination){ boolean result = false; Set s; if(mPreferredSitesMap.containsKey(destination)){ s = (Set)mPreferredSitesMap.get(destination); result = s.contains(source); } if(!result){ //check for source in global preferred sites result = globallyPreferred(source); } return result; } /** * Returns a boolean indicating whether a source site is to be ignored for * staging to a destination site * * @param source the source site. * @param destination the destination site. * * @return true if source is tp be ignored while staging to destination, * else false. */ protected boolean ignore(String source, String destination){ boolean result = false; Set s; if(mIgnoredSitesMap.containsKey(destination)){ s = (Set)mIgnoredSitesMap.get(destination); result = s.contains(source); } if(!result){ //check for source in global preferred sites result = globallyIgnored(source); } return result; } /** * Returns a boolean indicating whether a site is a preferred replica source * for all compute sites. * * @param site the site to test for. * * @return boolean. */ protected boolean globallyPreferred(String site){ return mGlobalPreferredSites.contains(site); } /** * Returns a boolean indicating whether a site is to be ignored as a replica * source for all compute sites. * * @param site the site to test for. * * @return boolean. */ protected boolean globallyIgnored(String site){ return mGlobalIgnoredSites.contains(site); } /** * Returns the name of the property, for a particular site X. The value of * the property contains a comma separated list of site handles that are * to be ignored|preferred while selecting replicas to stage to the site X. * * @param site the site X. * @param suffix the property suffix to be applied. * * @return the name of the property. */ protected String getProperty(String site, String suffix){ StringBuffer sb = new StringBuffer(); sb.append(this.PROPERTY_PREFIX).append('.') .append(site).append('.').append(suffix); return sb.toString(); } /** * Builds up the set of preferred and ignored sites for a site. * * @param site the site for which to identify the preferred and ignored * sites. * */ private void populateSiteMaps(String site){ //check to see if we already have an entry if(mPreferredSitesMap.containsKey(site)){ //we already have computed the site return; } //build up preferred sites for site String name = getProperty(site,this.PROPERTY_PREFER_SUFFIX); Set p = this.getSitesSet(mProps.getProperty(name)); mPreferredSitesMap.put(site,p); //build up ignored sites for site name = getProperty(site,this.PROPERTY_IGNORE_SUFFIX); Set i = this.getSitesSet(mProps.getProperty(name)); mIgnoredSitesMap.put(site,i); } /** * Returns a set of third party sites. An empty set is returned if value is * null. * * @param value the comma separated list in the properties file. * * @return Set containing the names of the pools. */ private Set getSitesSet(String value) { Set set = new LinkedHashSet(); String site; if (value == null || value.length() == 0) { return set; } for (StringTokenizer st = new StringTokenizer(value, ","); st.hasMoreTokens();){ site = (String) st.nextToken(); set.add(site); } return set; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/replica/Regex.java0000644000175000017500000002642111757531137026453 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.replica; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.common.PegRandom; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import java.util.Properties; import java.util.ArrayList; import java.util.Iterator; import java.util.SortedSet; import java.util.TreeSet; import java.util.regex.Pattern; /** * A replica selector that allows the user to specific regex expressions that * can be used to rank various PFN's returned from the Replica Catalog for a * particular LFN. This replica selector selects the highest ranked PFN i.e the * replica with the lowest rank value. * *

* The regular expressions are assigned different rank, that determine * the order in which the expressions are employed. The rank values for * the regex can expressed in user properties using the property. * pegasus.selector.replica.regex.rank.[value] * * The value is an integer value that denotes the rank of an expression with * a rank value of 1 being the highest rank. * *

* A thing to note is that before applying any regular expressions on the PFN's, * the file URL's that dont match the preferred site are explicitly filtered out. * * *

* In order to use the replica selector implemented by this class, *

 *        - the property pegasus.selector.replica.selector must be set to value Regex
 * 
* * @author Karan Vahi * * @version $Revision: 2572 $ */ public class Regex extends Default { /** * A short description of the replica selector. */ private static final String mDescription = "Regex"; /** * The property prefix for all Regex rank property. */ private static final String PROPERTY_PREFIX = "pegasus.selector.replica.regex.rank."; /** * The highest value of rank. In terms of integers , the lower the int higher * the rank with 1 being the highest value. */ private static final int HIGHEST_RANK_VALUE = 1; /** * The lowest rank value */ private static final int LOWEST_RANK_VALUE = Integer.MAX_VALUE; /** * The Set of regular expressions that orders the regex expressions to use * in ascending order. */ private SortedSet mSortedRegexSet; /** * The overloaded constructor, that is called by load method. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. */ public Regex(PegasusProperties properties) { super(properties); mSortedRegexSet = getRegexSet( properties.matchingSubset( Regex.PROPERTY_PREFIX, false )); mLogger.log( "[RegexReplicaSelector] User Provided Ranked regexes are " + mSortedRegexSet, LogManager.DEBUG_MESSAGE_LEVEL ); } /** * This chooses a location amongst all the locations returned by the replica * location service. If a location is found with re attribute same as the * preference pool, it is taken. Else a random location is selected and * returned. If more than one location for the lfn is found at the preference * pool, then also a random location amongst the ones at the preference pool * is selected. * * @param rl the ReplicaLocation object containing all * the pfn's associated with that LFN. * @param preferredSite the preffered site for picking up the replicas. * @param allowLocalFileURLs indicates whether Replica Selector can select a replica * on the local site / submit host. * * @return ReplicaCatalogEntry corresponding to the location selected. * * @see org.griphyn.cPlanner.classes.ReplicaLocation */ public ReplicaCatalogEntry selectReplica( ReplicaLocation rl, String preferredSite, boolean allowLocalFileURLs ){ String lfn = rl.getLFN(); String site; ArrayList prefLocs = new ArrayList(); int locSelected; //create a shallow clone as we will be removing //using Iterator.remove() methods rl = (ReplicaLocation)rl.clone(); //log message StringBuffer sb = new StringBuffer(); sb.append( "[RegexReplicaSelector] Selecting a pfn for lfn ").append( lfn ). append( " at site ").append( preferredSite ).append( "\n amongst "). append( rl ); mLogger.log( sb.toString() , LogManager.DEBUG_MESSAGE_LEVEL ); ReplicaCatalogEntry selectedRCE = null; Rank lowestRank = new Rank( Regex.LOWEST_RANK_VALUE, ".*" ); for ( Iterator it = rl.pfnIterator(); it.hasNext(); ) { ReplicaCatalogEntry rce = ( ReplicaCatalogEntry ) it.next(); site = rce.getResourceHandle(); String pfn = rce.getPFN(); //if a PFN starts with file url and does //not match the preferredSite ignore. if( this.removeFileURL( rce, preferredSite, allowLocalFileURLs) ){ //remove the url and continue it.remove(); continue; } //System.out.println( "PFN is " + pfn ); //apply the various Regex till you get the lowest rank value of 1 int lowestRankValue = lowestRank.getRank(); for( Iterator regIt = mSortedRegexSet.iterator(); regIt.hasNext(); ){ Rank r = regIt.next(); //System.out.println( "Applying regex " + r ); if( r.matches( pfn ) ){ //System.out.println( "Rank for pfn " + pfn + " is " + r.getRank() ); if( r.getRank() < lowestRankValue ){ selectedRCE = rce; lowestRank = r; lowestRankValue = r.getRank(); //check if the lowest rank is 1 , then we //have a highest rank replica if( lowestRankValue == Regex.HIGHEST_RANK_VALUE ){ break; } } } } } int numLocs = rl.getPFNCount(); if ( selectedRCE == null ){ if ( numLocs == 0 ) { //in all likelihood all the urls were file urls and //none were associated with the preference pool. //replica not selected throw new RuntimeException( "Unable to select any location from " + "the list passed for lfn " + lfn ); } else{ //select a random location from all the matching locations selectedRCE = rl.getPFN( PegRandom.getInteger( numLocs - 1 ) ); } } //log message sb = new StringBuffer(); sb.append( "[RegexReplicaSelector] Selected for LFN " ).append( rl.getLFN() ). append( " " ).append( selectedRCE ).append( " matching " ).append( lowestRank.getRegex() ); mLogger.log( sb.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); return selectedRCE; } /** * Returns a short description of the replica selector. * * @return string corresponding to the description. */ public String description(){ return mDescription; } /** * Returns a sorted set containing the various Patterns pre-compiled. * The order in the set determines the order in which the patterns are * applied on the PFN's * * @param properties with the key being an integer specifying the rank * and value as regex expression to be applied. * * @return Sorted */ private SortedSet getRegexSet( Properties properties ) { SortedSet result = new TreeSet(); //traverse through the properties and put them //in a sorted set for( Iterator it = properties.keySet().iterator(); it.hasNext() ; ){ String key = (String)it.next(); result.add( new Rank( Integer.parseInt(key), properties.getProperty( key ))); } return result; } /** * A Data class that allows us to compile a regex expression * and associate a rank value with it. */ private class Rank implements Comparable{ /** * The rank value. */ private int mRank; /** * The compiled regex expression */ private Pattern mPattern; /** * The default constructor. * * @param rank The rank value. * @param regex The regex expression. */ public Rank( int rank, String regex ){ mRank = rank; mPattern = Pattern.compile( regex ); } /** * Matches a string against the compiled regex expression * * @param input the input string to be matched * * @return boolean indicating whether input matches or not. */ public boolean matches( String input ){ return mPattern.matcher(input).matches(); } /** * Returns the underlying regex pattern associated with the Rank object. * * @return the regex pattern */ public Pattern getRegex(){ return mPattern; } /** * Returns the rank associated with it. * * @return the int value of the rank */ public int getRank(){ return mRank; } /** * Compares the Rank object with another rank object. * * @param o the object to be compared. * @return int */ public int compareTo(Object o) { if ( o instanceof Rank ) { Rank r = ( Rank ) o; return ( this.getRank() - r.getRank()); } else { throw new ClassCastException( "Object is not of class Regex.Rank" ); } } /** * Returns the textual representation of this */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( "( value => " ).append( getRank() ).append( " expr => ").append( getRegex() ).append( ")" ); return sb.toString(); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/package.html0000644000175000017500000000231211757531137025371 0ustar ryngerynge Provides the interface and call-out to the site selector mechanism.

Package Specification

The classes in this package provide a site selection implementation to select eligible sites from a list of candidates during deferred planning.

TODO: describe here how it works, e.g. show how the c'tor and mapJob2ExecPool( SubInfo, List ) work together, what defaults there are, and link to non-Java call-out for info an using external applications.

Related Documentation

For overviews, tutorials, examples, guides, and tool documentation, please see: @see org.griphyn.cPlanner.common.PegasusProperties @see org.griphyn.cPlanner.engine.InterPoolEngine @since 1.2.3 pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/transformation/0000755000175000017500000000000011757531667026170 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/transformation/RoundRobin.java0000644000175000017500000000237111757531137031107 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.transformation; import edu.isi.pegasus.planner.selector.TransformationSelector; import java.util.LinkedList; import java.util.List; /** * This implementation of the Selector select a transformation from a list in a round robin fashion. * * @author Gaurang Mehta * @version $Revision: 2050 $ */ public class RoundRobin extends TransformationSelector { private LinkedList tclist; public RoundRobin() { } /** * * @param tcentries List * @return TransformationCatalogEntry */ public List getTCEntry( List tcentries ) { return null; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/transformation/Installed.java0000644000175000017500000000402711757531137030745 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.transformation; import edu.isi.pegasus.planner.selector.TransformationSelector; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * This implementation of the Selector returns a list of TransformationCatalogEntry objects of type INSTALLED y on the submit site. * * @author Gaurang Mehta * * @version $Revision: 2076 $ */ public class Installed extends TransformationSelector { /** * Returns a list of TransformationCatalogEntry objects of type installed * from a List of valid TCEntries * @param tcentries List The original list containing TransformationCatalogEntries. * @return List returns a List of TransformationCatalogEntry objects of type INSTALLED * */ public List getTCEntry( List tcentries ) { List results = null; for ( Iterator i = tcentries.iterator(); i.hasNext(); ) { TransformationCatalogEntry tc = ( TransformationCatalogEntry ) i. next(); if ( tc.getType().equals( TCType.INSTALLED ) ) { if ( results == null ) { results = new ArrayList( 5 ); } results.add( tc ); } } return results; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/transformation/Submit.java0000644000175000017500000000414011757531137030265 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.transformation; import edu.isi.pegasus.planner.selector.TransformationSelector; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * This implementation of the Selector select a transformation of type STAGEABLE and only on the submit site. * @author Gaurang Mehta * @version $Revision: 2638 $ */ public class Submit extends TransformationSelector { /** * This method returns a list of TransformationCatalogEntry objects of type * STATIC_BINARY and only available on the Submit machine( "local" site). * * @param tcentries the original list of TransformationCatalogEntry objects * on which the selector needs to run. * * @return List */ public List getTCEntry( List tcentries ) { List results = null; for ( Iterator i = tcentries.iterator(); i.hasNext(); ) { TransformationCatalogEntry tc = ( TransformationCatalogEntry ) i. next(); if ( ( tc.getType().equals( TCType.STAGEABLE ) ) && ( tc.getResourceId().equalsIgnoreCase( "local" ) ) ) { if ( results == null ) { results = new ArrayList( 5 ); } results.add( tc ); } } return results; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/transformation/Random.java0000644000175000017500000000364011757531137030246 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.transformation; import edu.isi.pegasus.planner.selector.TransformationSelector; import edu.isi.pegasus.common.logging.LogManager; import java.util.ArrayList; import java.util.List; /** * This implemenation of the TCSelector selects a random * TransformationCatalogEntry from a List of entries. * * @author Gaurang Mehta * @version $Revision: 2050 $ */ public class Random extends TransformationSelector { public Random() { } /** * This method randomly selects one of the records from numerous valid * Transformation Catalog Entries returned by the TCMapper. * * @param tcentries List TransformationCatalogEntry objects returned by the TCMapper. * @return TransformationCatalogEntry Single TransformationCatalogEntry object */ public List getTCEntry( List tcentries ) { int no_of_entries = tcentries.size(); int recSelected = new Double( Math.random() * no_of_entries ).intValue(); String message = "Random TC Record selected is " + ( recSelected + 1 ) + " amongst " + no_of_entries + " possible"; mLogger.log( message,LogManager.DEBUG_MESSAGE_LEVEL); List result = new ArrayList( 1 ); result.add( tcentries.get( recSelected ) ); return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/selector/transformation/Staged.java0000644000175000017500000000364611757531137030243 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.selector.transformation; import edu.isi.pegasus.planner.selector.TransformationSelector; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import java.util.ArrayList; import java.util.Iterator; import java.util.List; /** * This implementation of the Selector select a transformation of type STAGEABLE on all sites. * * @author Gaurang Mehta * @version $Revision: 2638 $ */ public class Staged extends TransformationSelector { /** * Takes a list of TransformationCatalogEntry objects and returns 1 or * many TransformationCatalogEntry objects as a list by selecting only Static stageable binary's * * @param tcentries List * @return List */ public List getTCEntry( List tcentries ) { List results = null; for ( Iterator i = tcentries.iterator(); i.hasNext(); ) { TransformationCatalogEntry tc = ( TransformationCatalogEntry ) i. next(); if ( tc.getType().equals( TCType.STAGEABLE ) ) { if ( results == null ) { results = new ArrayList( 5 ); } results.add( tc ); } } return results; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/0000755000175000017500000000000011757531667023442 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/NetloggerCallback.java0000644000175000017500000002023411757531137027641 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.visualize.Callback; import edu.isi.pegasus.planner.invocation.StatInfo; import java.util.List; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; import edu.isi.pegasus.planner.invocation.HasText; import edu.isi.pegasus.planner.invocation.Stamp; import edu.isi.pegasus.planner.invocation.Uname; import edu.isi.pegasus.planner.invocation.Machine; import edu.isi.pegasus.planner.invocation.MachineSpecific; import edu.isi.pegasus.planner.invocation.MachineInfo; /** * Implements callback interface to calculate space usage. * * @author not attributable * @version 1.0 */ public class NetloggerCallback implements Callback { /** * The prefix for machine information keys */ public static final String MACHINE_INFO_PREFIX = "job.machine."; /** * The logical site where the job was run. */ protected String mSite; /** * The main job whose record is being parsed. */ protected String mMainJob; /** * The handle to the logger. */ protected LogManager mLogger; /** * The Map of key value pairs that are to be logged via Netlogger. */ protected Map mInvocationMap; /** * List of Invocation maps. */ protected List> mInvocationList; /** * The counter to track the number of invocation records. */ protected int counter; /** * The default constructor. */ public NetloggerCallback() { mLogger = LogManagerFactory.loadSingletonInstance(); mInvocationList = new LinkedList>(); counter = 0; } /** * Initializes the callback. * * @param directory the directory where all the files reside. * @param useStatInfo boolean indicating whether to use stat info or not. */ public void initialize( String directory , boolean useStatInfo){ } /** * Callback for the starting of an invocation record. * * @param job the job/file being parsed. * @param resource the site id where the job was executed. */ public void cbInvocationStart( String job, String resource) { counter ++; mInvocationMap = new LinkedHashMap(); mInvocationMap.put( "job.resource", resource ); } public void cbStdIN(List jobs, String data) { } public void cbStdOut(List jobs, String data) { } public void cbStdERR(List jobs, String data) { } /** * Callback function for when stat information for an input file is * encountered. Empty for time being. * * @param filename the name of the file. * @param info the StatInfo about the file. * */ public void cbInputFile( String filename, StatInfo info ){ } /** * Callback function for when stat information for an output file is * encountered. The size of the file is computed and stored. * * @param filename the name of the file. * @param info the StatInfo about the file. * */ public void cbOutputFile( String filename, StatInfo info ){ } /** * Callback signalling that an invocation record has been parsed. * Stores the total compute size, somewhere in the space structure * for the jobs. * * */ public void cbInvocationEnd() { mInvocationList.add(mInvocationMap); } /** * Returns a List of Map objects where each map captures information in one * invocation record. * * @return List> */ public Object getConstructedObject() { return mInvocationList; } /** * Callback signalling that we are done with the parsing of the files. */ public void done(){ for( Map m : mInvocationList ){ for( Iterator it = m.keySet().iterator(); it.hasNext() ; ){ String key = it.next(); //System.out.println( key + " " + mInvocationMap.get(key) ); } } } /** * Callback for the metadata retrieved from the kickstart record. * * @param metadata */ public void cbMetadata( Map metadata ){ mInvocationMap.put( "job.counter", Integer.toString(counter) ); mInvocationMap.put( "job.exitcode", getListValueFromMetadata( metadata, "exitcodes" ) ); mInvocationMap.put( "job.executable", getListValueFromMetadata( metadata, "executables" ) ); mInvocationMap.put( "job.arguments", getListValueFromMetadata( metadata, "arguments") ); mInvocationMap.put( "job.directory", metadata.get( "directory" ).toString() ); mInvocationMap.put( "job.duration", metadata.get( "duration" ).toString() ); mInvocationMap.put( "job.hostname", metadata.get( "hostname" ).toString() ); mInvocationMap.put( "job.hostaddress", metadata.get( "hostaddr" ).toString() ); mInvocationMap.put( "job.user", metadata.get( "user" ).toString() ); mInvocationMap.put( "job.starttime", metadata.get( "start" ).toString() ); } /** * Callback to pass the machine information on which the job is * executed. Iterates through the machine info objects and puts the * keys and values in internal map. * * @param machine */ public void cbMachine( Machine machine ) { // go through the values in a Machine String prefix = MACHINE_INFO_PREFIX + machine.getElementName(); // stamp element Stamp stamp = machine.getStamp(); mInvocationMap.put( prefix + "." + stamp.getElementName(), stamp.getValue() ); // uname element Uname uname = machine.getUname(); String specific = prefix + "." + uname.getElementName(); if ( uname instanceof HasText ) { mInvocationMap.put( specific, uname.getValue() ); } for ( Iterator ai = uname.getAttributeKeysIterator(); ai.hasNext(); ) { String akey = ai.next(); mInvocationMap.put( specific + "." + akey, uname.get(akey) ); } // machine-specific group MachineSpecific ms = machine.getMachineSpecific(); specific = prefix + "." + ms.getElementName(); for ( Iterator it = ms.getMachineInfoIterator(); it.hasNext(); ) { MachineInfo info = (MachineInfo) it.next(); String key = specific + "." + info.getElementName(); if ( info instanceof HasText ) { mInvocationMap.put( key, ((HasText)info).getValue() ); } key += "."; // put in all the attribute key and values for ( Iterator ai = info.getAttributeKeysIterator(); ai.hasNext() ; ){ String akey = ai.next(); mInvocationMap.put( key + akey , info.get(akey) ); } } } /** * Returns the first value from the List values for a key * * @param m * @param key * * @return the first value. */ private String getListValueFromMetadata(Map m, String key ) { Object obj = m.get( key ); if( obj == null ){ return ""; } if( !( obj instanceof List )){ throw new RuntimeException( "Value corresponding to key is not a List " + key ); } //in case of windward there are no pre job or postjobs return ((List)obj).get( 0 ).toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/0000755000175000017500000000000011757531667024545 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/pps/0000755000175000017500000000000011757531667025347 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/pps/PPSFactory.java0000644000175000017500000000674011757531137030203 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance.pasoa.pps; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.provenance.pasoa.PPS; /** * The factory for instantiating an XMLProducer. * * @author Karan Vahi * @version $Revision: 2567 $ */ public class PPSFactory { /** * The default package where all the implementations reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.provenance.pasoa.pps"; /** * The default PPS implementation to be used. */ public static final String DEFAULT_PPS_PROVIDER = "Empty"; /** * The default Pasoa PPS implementation to be used. */ public static final String PASOA_PPS_PROVIDER = "Pasoa"; /** * The singleton instance of the PPS implementation that is returned. */ private static PPS mInstance = null; /** * Loads the appropriate PPS implementation on the basis of the property set in the * properties. * * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * * @return the instance of the appropriate XML Producer. * * @throws PPSFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static PPS loadPPS( PegasusProperties properties ) throws PPSFactoryException{ //sanity check if( properties == null ){ throw new PPSFactoryException( "No properties passed to factory " ); } //check for singleton if( mInstance != null ){ return mInstance; } String className = properties.getRefinementProvenanceStore(); if( className == null ){ className = DEFAULT_PPS_PROVIDER; } else if ( className.equalsIgnoreCase( "pasoa" ) ){ className = PASOA_PPS_PROVIDER; } PPS pps = null; try{ //prepend the package name if required className = ( className.indexOf('.') == -1 )? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className: //load directly className; //try loading the class dynamically DynamicLoader dl = new DynamicLoader( className ); pps = ( PPS ) dl.instantiate( new Object[0] ); } catch( Exception e ){ throw new PPSFactoryException( " Unable to instantiate PPS ", className, e ); } mInstance = pps; return pps; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/pps/Empty.java0000644000175000017500000000763411757531137027312 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance.pasoa.pps; import java.util.*; import edu.isi.pegasus.planner.refiner.Refiner; import edu.isi.pegasus.planner.provenance.pasoa.PPS; /** * The default empty implementation to be used. * * @author Karan Vahi * @version $Revision: 2582 $ */ public class Empty implements PPS { public Empty() { } /** * * @return The ID used for the whole refinement process of this workflow * @param refiner workflow Refiner * @param refinementStepName String * @param firstStep boolean * @throws Exception */ public String beginWorkflowRefinementStep( Refiner refiner, String refinementStepName, boolean firstStep ) throws Exception { // System.out.println( "Start of Refiner- " + refinementStepName ); // System.out.println( "First Step " + firstStep ); // System.out.println( refiner.getXMLProducer().toXML() ); return ""; } /** * clusteringOf * * @param clusteredJob String * @param jobs List * @throws Exception */ public void clusteringOf( String clusteredJob, List jobs ) throws Exception { // System.out.println( "Clustered Job " + clusteredJob ); // System.out.println( " contains " + jobs ); } /** * endWorkflowRefinementStep * * @param refiner workflow Refiner * @throws Exception */ public void endWorkflowRefinementStep( Refiner refiner ) throws Exception { // System.out.println( "End of Refiner" ); // System.out.println( refiner.getXMLProducer().toXML() ); } /** * isIdenticalTo * * @param afterNode String * @param beforeNode String * @throws Exception */ public void isIdenticalTo(String afterNode, String beforeNode) throws Exception { // System.out.println( beforeNode + " identical to " + afterNode ); } /** * isPartitionOf * * @param afterNode String * @param beforeNode List * @throws Exception */ public void isPartitionOf(String afterNode, List beforeNode) throws Exception { } /** * registrationIntroducedFor * * @param registrationNode String * @param dataStagingNode String * @throws Exception */ public void registrationIntroducedFor( String registrationNode, String dataStagingNode ) throws Exception { // System.out.println( "registration node " + registrationNode + " for " + dataStagingNode ); } /** * siteSelectionFor * * @param afterNode String * @param beforeNode String * @throws Exception */ public void siteSelectionFor(String afterNode, String beforeNode) throws Exception { // System.out.print( " Site Selection for " + beforeNode ); // System.out.println( " is " + afterNode ); } /** * stagingIntroducedFor * * @param stagingNodes List * @param appNode String * @throws Exception */ public void stagingIntroducedFor(List stagingNodes, String appNode) throws Exception { // System.out.println( "Staging done by " + stagingNodes + " for " + appNode); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/pps/PPSFactoryException.java0000644000175000017500000000640511757531137032060 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance.pasoa.pps; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating PPS implementations. * * @author Karan Vahi * @version $Revision: 2561 $ */ public class PPSFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "PPS"; /** * Constructs a PPSFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public PPSFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a PPSFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public PPSFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a PPSFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public PPSFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a PPSFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public PPSFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/pps/Pasoa.java0000644000175000017500000004145611757531137027257 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance.pasoa.pps; import edu.isi.pegasus.planner.provenance.pasoa.PPS; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.refiner.Refiner; import org.pasoa.common.BestPractice; import org.pasoa.common.Constants; import org.pasoa.pstructure.GlobalPAssertionKey; import org.pasoa.pstructure.InteractionKey; import org.pasoa.pstructure.InteractionPAssertion; import org.pasoa.pstructure.ObjectID; import org.pasoa.pstructure.PAssertion; import org.pasoa.pstructure.Record; import org.pasoa.pstructure.RelationshipPAssertion; import org.pasoa.pstructure.SubjectID; import org.pasoa.storeclient.ClientLib; import org.pasoa.util.httpsoap.WSAddressEndpoint; import java.io.IOException; import java.io.StringReader; import java.net.URL; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import org.w3c.dom.Element; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * Implements the PPS interface for recording documentation from a Pegasus refinement. */ public class Pasoa implements PPS { // The current workflow XML serialisation (except for the final footer part: see _xmlFooter below) // This is built up cumulatively over time by the refiners providing XML fragments to add private String _workflowXML; // A count of the number of relationship p-assertions recorded (used to create unique p-assertion IDs) private int _relationshipPAssertionCounter; // The key for the interaction in which a refiner is invoked private InteractionKey _causeKey; // The key for the interaction in which a refiner completes private InteractionKey _effectKey; // The name (URI) of the current refinement step private String _refinement; // The unique name of the current refinement process, generated from system time private String _refinementID; // The suffix to the XML workflow serialisation //private static final String _xmlFooter = ""; private static final String _xmlFooter = ""; /** * On initialisation, create a ClientLib object for communication with a * store, set the store URL and create a namespace-aware DOM document parser. */ public Pasoa () throws Exception { _storeProxy = new ClientLib (); String storeURL = "http://localhost:8080/preserv-1.0"; _storeRecordURL = new URL (storeURL + "/record"); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance (); factory.setNamespaceAware (true); _builder = factory.newDocumentBuilder (); } /** * On initialisation, create a ClientLib object for communication with a * store, set the store URL and create a namespace-aware DOM document parser. */ public Pasoa (String storeURL) throws Exception { _storeProxy = new ClientLib (); _storeRecordURL = new URL (storeURL + "/record"); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance (); factory.setNamespaceAware (true); _builder = factory.newDocumentBuilder (); } // PPS methods // public String beginWorkflowRefinementStep (Refiner workflow, String refinementStepName, boolean firstStep) throws Exception { if (firstStep) { _workflowXML = workflow.getXMLProducer ().toXML (); _refinementID = Long.toString (System.currentTimeMillis ()); _causeKey = createInteractionKey (_refinementID, refinementStepName, true); } else { // Record relationships between output of one refinement, input of the one being started _causeKey = _effectKey; _effectKey = createInteractionKey (_refinementID, refinementStepName, true); for (Iterator it = workflow.getWorkflow ().jobIterator (); it.hasNext ();){ Job job = (Job) it.next (); String name = job.getName (); isIdenticalTo (name, name); } // Now move on to refinement itself _causeKey = _effectKey; } _effectKey = createInteractionKey (_refinementID, refinementStepName, false); _refinement = refinementStepName; _relationshipPAssertionCounter = 0; // Record the initial invocation of the refiner recordInteraction (_workflowXML + _xmlFooter, _causeKey, _refinement, true); return _refinementID; } public void isIdenticalTo (String afterNode, String beforeNode) throws Exception { recordRelationship (_relationshipPAssertionCounter, afterNode, _identicalParameter, _identicalRelation, _causeKey, beforeNode, _identicalParameter, _refinement); _relationshipPAssertionCounter += 1; } public void siteSelectionFor (String afterNode, String beforeNode) throws Exception { recordRelationship (_relationshipPAssertionCounter, afterNode, _siteSelectionOutputParameter, _siteSelectionRelation, _causeKey, beforeNode, _siteSelectionInputParameter, _refinement); _relationshipPAssertionCounter += 1; } public void stagingIntroducedFor (List stagingNodes, String appNode) throws Exception { for (Object stagingNode : stagingNodes) { recordRelationship (_relationshipPAssertionCounter, stagingNode.toString (), _stagingParameter, _stagingRelation, _causeKey, appNode, _stagedForParameter, _refinement); _relationshipPAssertionCounter += 1; } } public void registrationIntroducedFor (String registrationNode, String dataStagingNode) throws Exception { recordRelationship (_relationshipPAssertionCounter, dataStagingNode, _registrationParameter, _registrationRelation, _causeKey, registrationNode, _registrationOfParameter, _refinement); _relationshipPAssertionCounter += 1; } public void clusteringOf (String clusteredJob, List jobs) throws Exception { for (Object inCluster : jobs) { recordRelationship (_relationshipPAssertionCounter, clusteredJob, _clusterParameter, _clusteredRelation, _causeKey, inCluster.toString (), _inClusterParameter, _refinement); _relationshipPAssertionCounter += 1; } } public void isPartitionOf (String afterNode, List beforeNode) { throw new UnsupportedOperationException (); } public void endWorkflowRefinementStep (Refiner workflow) throws Exception { _workflowXML += workflow.getXMLProducer ().toXML (); recordInteraction (_workflowXML + _xmlFooter, _effectKey, _refinement, false); } // Utility constants and methods // /** * A namespace we can use to identify relationships and concepts defined for Pegasus' provenance data */ //private static final String _namespace = "http://www.isi.edu/pasoa"; // Relations: // Relationships are asserted between workflow nodes before a refinement and // those after the refinement. The former are 'objects' of the relationship, // the latter are 'subjects'. Every relationship has a type which is identified // by a URI. // // For each subject and object of a relationship, the role that each plays // in the relationship must be declared, the role type being called the // 'parameter name' and identified by a URI. /** * The identicalTo relationship relates a workflow node before and after a * refinement that has not changed during that refinement */ public static final String _identicalRelation = NAMESPACE + "/relations#identicalTo"; /* * In an identical relationship both subject and object play the role of * 'item', as in 'this item is identical to that item'. */ public static final String _identicalParameter = NAMESPACE + "/parameters#item"; /** * The site seleciotn relationship relates a job that has had its site selected * to that same job before site selection. */ public static final String _siteSelectionRelation = NAMESPACE + "/relations#siteSelectionOf"; /** * The job before site selection plays the 'preselection' role. */ public static final String _siteSelectionInputParameter = NAMESPACE + "/parameters#preselection"; /** * The job after site selection plays the 'postselection' role. */ public static final String _siteSelectionOutputParameter = NAMESPACE + "/parameters#postselection"; public static final String _stagingRelation = NAMESPACE + "/relations#staging"; public static final String _stagedForParameter = NAMESPACE + "/parameters#stagedFor"; public static final String _stagingParameter = NAMESPACE + "/parameters#staging"; public static final String _registrationRelation = NAMESPACE + "/relations#registration"; public static final String _registrationOfParameter = NAMESPACE + "/parameters#registrationOf"; public static final String _registrationParameter = NAMESPACE + "/parameters#registration"; public static final String _clusteredRelation = NAMESPACE + "/relations#clustered"; public static final String _inClusterParameter = NAMESPACE + "/parameters#inCluster"; public static final String _clusterParameter = NAMESPACE + "/parameters#cluster"; /** * A partially refined workflow is specified as an XML document. * We represent this as a String object, and for convenience this is * the closing tag of that document. */ private static final String _workflowPostfix = ""; /** ClientLib is the primary class by which a client communicates with a provenance store */ private ClientLib _storeProxy; /** The URL of the provenance store Web Service (recording port) */ private URL _storeRecordURL; /** A pre-created DOM XML parser (expensive to create so we do just once) */ private DocumentBuilder _builder; /** * Conventionally, we use WS-Addressing to identify the endpoints of an * interaction between actors, and this method constructs an XML (DOM) fragment * in the WS-Addressing schema for a particular URL. * * @param address The URL of the endpoint * @return An XML (DOM) fragment in WS-Addressing endpoint schema containing the address */ public static Element addressToElement (String address) { return new WSAddressEndpoint (address).getElement (); } /** * Individual jobs in a workflow are identified by an XML document fragment, * called a data accessor, and this method constructs the fragment for a given * job ID. * * @param jobID The job ID * @return An XML (DOM) fragment representing a reference to that job in an XML workflow representation */ public Element createDataAccessor (String jobID) throws IOException, SAXException { return toElement ("" + jobID + ""); } /** * Creates an interaction p-assertion asserting that a given partially * refined workflow was exchanged between actors. * * @param workflow The (XML) content of the partially refined workflow * @return A JavaBean representation of an interaction p-assertion containing the workflow */ public InteractionPAssertion createInteractionPAssertion (String workflow) throws IOException, SAXException { return new InteractionPAssertion ("1", BestPractice.VERBATIM_STYLE, toElement (workflow + _workflowPostfix)); } /** * Creates an interaction key to identify an interaction between two actors. * * @param refinementID The unique identifier for this workflow refinement (run of Pegasus) * @param refinementAddress The URI of the particular refinement step (site selection, cluster etc.) * @param preRefinement True if the interaction is pre-refinement, i.e. from Pegasus to a refiner, rather than the other way round */ public InteractionKey createInteractionKey (String refinementID, String refinementAddress, boolean preRefinement) { if (preRefinement) { return new InteractionKey (addressToElement (PEGASUS), addressToElement (refinementAddress), refinementID + "Start"); } else { return new InteractionKey (addressToElement (refinementAddress), addressToElement (PEGASUS), refinementID + "End"); } } /** * Creates a relationship p-assertion between nodes in two partially refined workflows. * * @param count The index of this relationship p-assertion in the interaction (to support the requirement that each p-assertion has a unique ID) * @param effectJobID The job ID of the subject (effect) of the relationship * @param effectParameter The role played by the subject of the relationship * @param relationType The type of the relationship * @param causeKey The interaction key of the object of the relationship * @param causeJobID The job ID of the object (cause) of the relationship * @param causeParameter The role played by the object of the relationship * @return A RelationshipPAssertion JavaBean representing the relationship p-assertion with the given arguments */ public RelationshipPAssertion createRelationship (int count, String effectJobID, String effectParameter, String relationType, InteractionKey causeKey, String causeJobID, String causeParameter) throws IOException, SAXException { List objectIDs = new LinkedList (); ObjectID objectID = new ObjectID ( new GlobalPAssertionKey (causeKey, Constants.RECEIVER_VIEW_TYPE, "1"), effectParameter, createDataAccessor (causeJobID), null); objectIDs.add (objectID); return new RelationshipPAssertion ("RPA" + count, new SubjectID ("1", createDataAccessor (effectJobID), effectParameter), relationType, objectIDs); } public void record (PAssertion passertion, InteractionKey interactionKey, boolean isSender, String asserterURL) throws Exception { if (isSender) { _storeProxy.record (new Record (passertion, interactionKey, Constants.SENDER_VIEW_TYPE, addressToElement (asserterURL)), _storeRecordURL); } else { _storeProxy.record (new Record (passertion, interactionKey, Constants.RECEIVER_VIEW_TYPE, addressToElement (asserterURL)), _storeRecordURL); } } public void recordInteraction (InteractionPAssertion passertion, InteractionKey interactionKey, String refinerType, boolean refinementInput) throws Exception { if (refinementInput) { record (passertion, interactionKey, true, PEGASUS); record (passertion, interactionKey, false, refinerType); } else { record (passertion, interactionKey, true, refinerType); record (passertion, interactionKey, false, PEGASUS); } } public void recordInteraction (String workflow, InteractionKey interactionKey, String refinerType, boolean refinementInput) throws Exception { recordInteraction (createInteractionPAssertion (workflow), interactionKey, refinerType, refinementInput); } public RelationshipPAssertion recordRelationship (int count, String effectJobID, String effectParameter, String relationType, InteractionKey causeKey, String causeJobID, String causeParameter, String asserterURL) throws Exception { RelationshipPAssertion passertion = createRelationship (count, effectJobID, effectParameter, relationType, causeKey, causeJobID, causeParameter); record (passertion, _effectKey, true, asserterURL); return passertion; } /** * Convenience method to parse string represented XML into a DOM XML fragment representation */ public Element toElement (String xmlAsString) throws IOException, SAXException { //System.out.println( "XML as string is " + xmlAsString ); return _builder.parse (new InputSource (new StringReader (xmlAsString))).getDocumentElement (); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/PPS.java0000644000175000017500000000661211757531137026047 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance.pasoa; import java.util.List; import edu.isi.pegasus.planner.refiner.Refiner; /** * Pegasus P-assertion Support interface * * Classes that implement this interface assist in the creation of p-assertions for the Pegasus workflow refinement system. * This interface follows a builder pattern. * * Using this interface proceeds as follows: * 1. At the beginning of a refinement step the beginWorkflowRefinmentStep method should be called. * 2. As nodes are transformed the particular refinement operation method (siteSelectionFor, isParticiationOf...) should be called. * 3. When the refinement step is complete the endWorkflowStep method should be called. * 4. At this point identicalTo relationships are automatically created between the resulting workflow and the input workflow * * A note on PHeaders: * For the first refinement step, the p-header can be passed in as null. * For each, subsequent refinement step the p-header provided by the endWorkflowRefinementStep method * should be passed into the beginWorkflowRefinementMethod */ public interface PPS { /** * A namespace we can use to identify relationships and concepts defined for Pegasus' provenance data */ public static final String NAMESPACE = "http://www.isi.edu/pasoa"; // Actors: Every refinement step and Pegaus itself is given an identifying URI public static final String PEGASUS = NAMESPACE + "/actors#pegasus"; public static final String REFINEMENT_CLUSTER = NAMESPACE + "/actors#cluster"; public static final String REFINEMENT_REDUCE = NAMESPACE + "/actors#reduce"; public static final String REFINEMENT_REGISTER = NAMESPACE + "/actors#register"; public static final String REFINEMENT_SITE_SELECT = NAMESPACE + "/actors#siteSelect"; public static final String REFINEMENT_STAGE = NAMESPACE + "/actors#stage"; /** * @return The ID used for the whole refinement process of this workflow */ public String beginWorkflowRefinementStep ( Refiner refiner, String refinementStepName, boolean firstStep) throws Exception; public void isIdenticalTo (String afterNode, String beforeNode) throws Exception; public void siteSelectionFor (String afterNode, String beforeNode) throws Exception; public void stagingIntroducedFor (List stagingNodes, String appNode) throws Exception; public void registrationIntroducedFor (String registrationNode, String dataStagingNode) throws Exception; public void clusteringOf (String clusteredJob, List jobs) throws Exception; public void isPartitionOf (String afterNode, List beforeNode) throws Exception; public void endWorkflowRefinementStep ( Refiner refiner ) throws Exception; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/producer/0000755000175000017500000000000011757531667026370 5ustar ryngerynge././@LongLink0000000000000000000000000000015600000000000011567 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/producer/XMLProducerFactoryException.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/producer/XMLProducerFactoryExcep0000644000175000017500000000664111757531137032773 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance.pasoa.producer; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.planner.provenance.pasoa.XMLProducer; /** * Class to notify of failures while instantiating XMLProducer implementations. * * @author Karan Vahi * @version $Revision: 2561 $ */ public class XMLProducerFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "XMLProducer"; /** * Constructs a XMLProducerFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public XMLProducerFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a XMLProducerFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public XMLProducerFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a XMLProducerFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public XMLProducerFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a XMLProducerFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public XMLProducerFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/producer/XMLProducerFactory.java0000644000175000017500000000555111757531137032725 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance.pasoa.producer; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.provenance.pasoa.XMLProducer; /** * * The factory for instantiating an XMLProducer. * * @author Karan Vahi * @version $Revision: 2567 $ */ public class XMLProducerFactory { /** * The default package where all the implementations reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.provenance.pasoa.producer"; /** * The default XML producer implementation to be used. */ public static final String DEFAULT_XML_PRODUCER = "InMemory"; /** * Loads the appropriate XMLProducer on the basis of the property set in the * properties. * * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * * @return the instance of the appropriate XML Producer. * * @throws XMLProducerFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static XMLProducer loadXMLProducer( PegasusProperties properties ) throws XMLProducerFactoryException{ //sanity check String className = DEFAULT_XML_PRODUCER; XMLProducer producer = null; try{ //prepend the package name if required className = ( className.indexOf('.') == -1 )? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className: //load directly className; //try loading the class dynamically DynamicLoader dl = new DynamicLoader( className ); producer = ( XMLProducer ) dl.instantiate( new Object[0] ); } catch ( Exception e ){ throw new XMLProducerFactoryException( " Unable to instantiate XMLProducer ", className, e ); } return producer; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/producer/InMemory.java0000644000175000017500000000517611757531137030773 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance.pasoa.producer; import edu.isi.pegasus.planner.provenance.pasoa.XMLProducer; import java.io.*; /** * An implementation of the XMLProducer interface backed by a StringBuffer. * It does not check for any wellformedness of the XML. It is basically a * data store. * * @author Karan Vahi * @version $Revision: 2561 $ */ public class InMemory implements XMLProducer { /** * The StringBuffer store. */ private StringBuffer mStore; /** * The initial size of the buffer. */ private int mSize; /** * The default constructor. */ public InMemory() { mSize = 32; reset(); } /** * The overloaded constructor. * * @param size the intial number of characters it can store. */ public InMemory( int size ){ mSize = size; reset(); } /** * Adds to the internal XML representation. * * @param xml the XML fragment to be added. * */ public void add( String xml ) { mStore.append( xml ); } /** * Clears the internal state. * * */ public void clear() { reset(); } /** * Returns the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @throws IOException if something fishy happens to the stream. */ public void toXML( Writer writer ) throws IOException { writer.write( mStore.toString() ); } /** * Returns the interaction assertions as a XML blob. * * @return String * @throws IOException if something fishy happens to the stream. */ public String toXML() throws IOException { Writer writer = new StringWriter( mSize ); toXML( writer ); return writer.toString(); } /** * Resets the internal store. */ private void reset(){ mStore = new StringBuffer( mSize ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/provenance/pasoa/XMLProducer.java0000644000175000017500000000341611757531137027550 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.provenance.pasoa; import java.io.IOException; import java.io.Writer; /** * A PASOA specific interface to generate various assertions as XML. * * @author Karan Vahi * @version $Revision: 2561 $ */ public interface XMLProducer { /** * Clears the internal state. */ public void clear(); /** * Adds an XML fragment to the internal XML store * * @param xml the XML fragment to be added. * */ public void add( String xml ); /** * Returns the xml description of the object. This is used for generating * the partition graph. That is no longer done. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer ) throws IOException ; /** * Returns the interaction assertions as a XML blob. * * @return String * * @exception IOException if something fishy happens to the stream. */ public String toXML() throws IOException; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/0000755000175000017500000000000011757531667023453 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Status.java0000644000175000017500000001235411757531137025576 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class encapsulates the exit code or reason of termination for * a given job. The class itself contains the raw exit code. It also * aggregates an instance of the JobStatus interface, which describes * more clearly failure, regular execution, signal and suspension. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class Status extends Invocation // implements Cloneable { /** * The raw exit code, unparsed and unprepared. There are several * interpretation of the value. Usually, it is interpreted as * unsigned 16 bit value. The high byte contains the exit code. * The low byte has the core dump flag as MSB, and the rest denote * the signal number. A value of -1 denotes a failure from the * grid launcher before starting the job. */ private int m_status; /** * This member variable contains the real status of the job. */ private JobStatus m_jobStatus; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Status() { m_status = 0; m_jobStatus = null; } /** * Constructs a layer with the raw exit code. * @param raw is the raw exit code to store. */ public Status( int raw ) { m_status = raw; m_jobStatus = null; } /** * Constructs the complete class with raw exit code * and a status child describing the exit code. * @param raw is the raw exit status * @param status is the description of the kind of exit. */ public Status( int raw, JobStatus status ) { m_status = raw; m_jobStatus = status; } /** * Accessor * * @see #setStatus(int) */ public int getStatus() { return this.m_status; } /** * Accessor. * * @param status * @see #getStatus() */ public void setStatus( int status ) { this.m_status = status; } /** * Accessor * * @see #setJobStatus( JobStatus ) */ public JobStatus getJobStatus() { return this.m_jobStatus; } /** * Accessor. * * @param jobStatus is an instance of the class describing * the real reason for program termination on the remote end. * @see #getJobStatus() */ public void setJobStatus( JobStatus jobStatus ) { this.m_jobStatus = jobStatus; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":status" : "status"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " raw=\"", Integer.toString(m_status) ); stream.write( ">" ); // dump content String newindent = indent==null ? null : indent+" "; if ( m_jobStatus != null ) m_jobStatus.toXML( stream, newindent, namespace ); else throw new RuntimeException( "unknown state of job status" ); // close tag stream.write( "' ); if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/StatCall.java0000644000175000017500000001466611757531137026032 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is the container for a complete call to stat() or fstat(). * It contains information about the file or descriptor. Optionally, it * may also contain some data from the file or descriptor. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class StatCall extends Invocation // implements Cloneable { /** * optional handle for stat calls of the invocation record */ private String m_handle; /** * optional logical filename associated with this stat call */ private String m_lfn; /** * value of errno after calling any stat function, or -1 for failure. */ private int m_errno; /** * the object (fn,fd) that the stat call was taken on. */ private File m_file; /** * the stat information itself, only present for unfailed calls. */ private StatInfo m_statinfo; /** * Optional data gleaned from stdout or stderr. */ private Data m_data; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public StatCall() { m_handle = m_lfn = null; m_file = null; m_statinfo = null; m_data = null; } /** * Construct a specific but empty stat call object. * @param handle is the identifier to give this specific stat call. */ public StatCall( String handle ) { m_handle = handle; m_lfn = null; m_file = null; m_statinfo = null; m_data = null; } /** * Accessor * * @see #setHandle(String) */ public String getHandle() { return this.m_handle; } /** * Accessor. * * @param handle * @see #getHandle() */ public void setHandle( String handle ) { this.m_handle = handle; } /** * Accessor * * @see #setLFN(String) */ public String getLFN() { return this.m_lfn; } /** * Accessor. * * @param lfn * @see #getLFN() */ public void setLFN( String lfn ) { this.m_lfn = lfn; } /** * Accessor * * @see #setError(int) */ public int getError() { return this.m_errno; } /** * Accessor. * * @param errno * @see #getError() */ public void setError( int errno ) { this.m_errno = errno; } /** * Accessor * * @see #setFile(File) */ public File getFile() { return this.m_file; } /** * Accessor. * * @param file * @see #getFile() */ public void setFile( File file ) { this.m_file = file; } /** * Accessor * * @see #setStatInfo(StatInfo) */ public StatInfo getStatInfo() { return this.m_statinfo; } /** * Accessor. * * @param statinfo * @see #getStatInfo() */ public void setStatInfo( StatInfo statinfo ) { this.m_statinfo = statinfo; } /** * Accessor * * @see #setData(String) */ public Data getData() { return this.m_data; } /** * Accessor. * * @param data * @see #getData() */ public void setData( String data ) { this.m_data = new Data(data); } /** * Conversion accessor. * * @param data * @see #getData() * @see #setData( String ) */ public void setData( Data data ) { this.m_data = data; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":statcall" : "statcall"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " error=\"", Integer.toString(m_errno) ); if ( m_handle != null ) writeAttribute( stream, " id=\"", m_handle ); if ( m_lfn != null ) writeAttribute( stream, " lfn=\"", m_lfn ); stream.write( '>' ); if ( indent != null ) stream.write( newline ); // dump content String newindent = indent==null ? null : indent+" "; m_file.toXML( stream, newindent, namespace ); if ( m_statinfo != null ) m_statinfo.toXML( stream, newindent, namespace ); if ( m_data != null ) m_data.toXML( stream, newindent, namespace ); // close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/HasFilename.java0000644000175000017500000000255711757531137026473 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; /** * This interface defines a common base for all File elements in an invocation * record that carry a filename in their values. It exists primarily for * grouping purposes and for easier access for the database manager. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public interface HasFilename { /** * Accessor: Obtains the name of the file * @return the name of the file objects. Null is legal. * * @see #setFilename(String) */ public String getFilename(); /** * Accessor: Sets the name of an file object. * @param filename is the new name to store as filename. * @see #getFilename() */ public void setFilename( String filename ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/JobStatusSuspend.java0000644000175000017500000001351411757531137027572 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is transient for XML parsing. The data value will be * incorporated into the job status classes. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see StatCall */ public class JobStatusSuspend extends JobStatus implements HasText { /** * This is the data contained between the tags. A null * value is not valid. */ private String m_value; /** * This is the signal number that led to the suspension. */ private short m_signo; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public JobStatusSuspend() { m_signo = 0; m_value = null; } /** * Constructs an error number without reason text. * @param signo is the signal number for the suspension. */ public JobStatusSuspend( short signo ) { m_signo = signo; m_value = null; } /** * Constructs a piece of data. * @param signo is the signal number for the suspension. * @param value is the textual error reason. */ public JobStatusSuspend( short signo, String value ) { m_signo = signo; m_value = value; } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new String(fragment); else this.m_value += fragment; } } /** * Accessor * * @see #setSignalNumber(short) */ public short getSignalNumber() { return this.m_signo; } /** * Accessor. * * @param signo * @see #getSignalNumber() */ public void setSignalNumber( short signo ) { this.m_signo = signo; } /** * Accessor * * @see #setValue(String) */ public String getValue() { return this.m_value; } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = value; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dumps the state of the current element as XML output. This function * can return the necessary data more efficiently, thus overwriting * the inherited method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { StringBuffer result = new StringBuffer(36); // good for no content result.append( "" ); } else { // yes, content result.append( "\">" ); result.append( quote(m_value,false) ); result.append( "" ); } return result.toString(); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":suspended" : "suspended"; // open tag stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " signal=\"", Short.toString(m_signo) ); if ( m_value == null ) { // no content stream.write( "/>" ); } else { // yes, content stream.write( '>' ); stream.write( quote(m_value,false) ); // close tag stream.write( "' ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Fifo.java0000644000175000017500000001113511757531137025172 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is the container for a FIFO object. A FIFO, also known as * named pipe, does not consume space on the filesystem except for an * inode. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class Fifo extends Temporary { /** * optional message count for the FIFO. */ protected int m_count; /** * optional number of bytes read from FIFO. */ protected long m_rsize; /** * optional number of bytes written - but not to the FIFO. This * has to do with the message size that was created from the * original input message. */ protected long m_wsize; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Fifo() { super(); m_count = 0; m_rsize = m_wsize = 0; } /** * Constructs a FIFO object. * @param filename is the name of the file that stat was invoked * @param descriptor is a valid file descriptor number. */ public Fifo( String filename, int descriptor ) { super(filename,descriptor); m_count = 0; m_rsize = m_wsize = 0; } /** * Accessor * * @see #setCount(int) */ public int getCount() { return this.m_count; } /** * Accessor. * * @param count * @see #getCount() */ public void setCount( int count ) { this.m_count = count; } /** * Accessor * * @see #setInputSize(long) */ public long getInputSize() { return this.m_rsize; } /** * Accessor. * * @param rsize * @see #getInputSize() */ public void setInputSize( long rsize ) { this.m_rsize = rsize; } /** * Accessor * * @see #setOutputSize(long) */ public long getOutputSize() { return this.m_wsize; } /** * Accessor. * * @param wsize * @see #getOutputSize() */ public void setOutputSize( long wsize ) { this.m_wsize = wsize; } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":fifo" : "fifo"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " name=\"", m_filename ); writeAttribute( stream, " descriptor=\"", Integer.toString(m_descriptor) ); writeAttribute( stream, " count=\"", Integer.toString(m_count) ); writeAttribute( stream, " rsize=\"", Long.toString(m_rsize) ); writeAttribute( stream, " wsize=\"", Long.toString(m_wsize) ); if ( m_hexbyte != null && m_hexbyte.length() > 0 ) { // yes, content stream.write( '>' ); stream.write( m_hexbyte ); stream.write( "' ); } else { // no content stream.write( "/>" ); } if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/HasDescriptor.java0000644000175000017500000000260211757531137027060 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; /** * This interface defines a common base for all File elements in an invocation * record that carry a descriptor in their values. It exists primarily for * grouping purposes and for easier access for the database manager. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public interface HasDescriptor { /** * Accessor: Obtains the descriptor of the file handle * @return the descriptor number, or -1 for none. * * @see #setDescriptor(int) */ public int getDescriptor(); /** * Accessor: Sets the descriptor number of a file object. * @param descriptor is the new descriptor to store. * @see #getDescriptor() */ public void setDescriptor( int descriptor ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Boot.java0000644000175000017500000000515611757531137025220 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; /** * The boot element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class Boot extends MachineInfo implements HasText{ /** * The text value */ private StringBuffer mValue; /** * The element name */ public static final String ELEMENT_NAME = "boot"; /** * The default constructor */ public Boot(){ super(); mValue = null; } /** * Constructs a piece of data. * * @param value is the data to remember. The string may be empty, * but it must not be null. * @exception NullPointerException if the argument was null. */ public Boot( String value ) { this(); if ( value == null ) { throw new NullPointerException( "the value to the tag constructor must not be null" ); } else { mValue = new StringBuffer( value ); } } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue(String fragment) { if (fragment != null) { if (this.mValue == null) { this.mValue = new StringBuffer(fragment); } else { this.mValue.append(fragment); } } } /** * Accessor * * @see #setValue(String) */ public String getValue() { return (mValue == null ? null : mValue.toString()); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue(String value) { this.mValue = (value == null ? null : new StringBuffer(value)); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/CommandLine.java0000644000175000017500000001424711757531137026504 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class maintains the application that was run, and the * arguments to the commandline that were actually passed on to * the application. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see Job */ public class CommandLine extends Invocation implements HasText { /** * This is the executable that was run. */ private String m_executable; /** * This is the data contained between the tags. There may be * no data. */ private StringBuffer m_value; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public CommandLine() { m_executable = null; m_value = null; } /** * Constructs an applications without arguments. * @param executable is the name of the application. */ public CommandLine( String executable ) { m_executable = executable; m_value = null; } /** * Constructs an applications with arguments. * @param executable is the name of the application. * @param value represents the argument line passed. */ public CommandLine( String executable, String value ) { m_executable = executable; m_value = new StringBuffer(value); } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new StringBuffer(fragment); else this.m_value.append( fragment ); } } /** * Accessor * * @see #setExecutable(String) */ public String getExecutable() { return this.m_executable; } /** * Accessor. * * @param executable * @see #getExecutable() */ public void setExecutable( String executable ) { this.m_executable = executable; } /** * Accessor * * @see #setValue(String) */ public String getValue() { return ( m_value == null ? null : m_value.toString() ); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = ( value == null ? null : new StringBuffer(value) ); } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dumps the state of the current element as XML output. This function * can return the necessary data more efficiently, thus overwriting * the inherited method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { StringBuffer result = new StringBuffer(32 + m_executable.length()); result.append( "" ); } else { // yes, content result.append( "\">" ); result.append( quote(getValue(),false) ); result.append( "" ); } return result.toString(); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":command-line" : "command-line"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " executable=\"", m_executable ); if ( m_value != null ) { // yes, content stream.write( '>' ); stream.write( quote(getValue(),false) ); stream.write( "' ); } else { // no content stream.write( "/>" ); } if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Task.java0000644000175000017500000000225711757531137025216 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; /** * The proc element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class Task extends MachineInfo { /** * The element name */ public static final String ELEMENT_NAME = "task"; /** * The default constructor */ public Task(){ super(); } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/gen0000644000175000017500000000102111757531137024131 0ustar ryngerynge#!/usr/bin/env perl require 5.005; use strict; use warnings; $|=1; my ($name,$type,$var,$line); do { print STDERR "method type varname: "; chomp($line=); last if $line eq ''; ($name,$type,$var) = split(/\s+/,$line); print << "EOF"; /** * Accessor * * \@see #set$name($type) */ public $type get$name() { return this.m_$var; } /** * Accessor. * * \@param $var * \@see #get$name() */ public void set$name( $type $var ) { this.m_$var = $var; } EOF } while ( 1 ); pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Stamp.java0000644000175000017500000000516311757531137025377 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; /** * The Stamp element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class Stamp extends MachineInfo implements HasText{ /** * The text value */ private StringBuffer mValue; /** * The element name */ public static final String ELEMENT_NAME = "stamp"; /** * The default constructor */ public Stamp(){ super(); mValue = null; } /** * Constructs a piece of data. * * @param value is the data to remember. The string may be empty, * but it must not be null. * @exception NullPointerException if the argument was null. */ public Stamp( String value ) { this(); if ( value == null ) { throw new NullPointerException( "the value to the tag constructor must not be null" ); } else { mValue = new StringBuffer( value ); } } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue(String fragment) { if (fragment != null) { if (this.mValue == null) { this.mValue = new StringBuffer(fragment); } else { this.mValue.append(fragment); } } } /** * Accessor * * @see #setValue(String) */ public String getValue() { return (mValue == null ? null : mValue.toString()); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue(String value) { this.mValue = (value == null ? null : new StringBuffer(value)); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/JobStatusFailure.java0000644000175000017500000001345111757531137027540 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is transient for XML parsing. The data value will be * incorporated into the job status classes. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see Job * @see JobStatus */ public class JobStatusFailure extends JobStatus implements HasText { /** * This is the errno value with the reason that the invocation * of the application failed. */ private int m_errno; /** * This is the data contained between the tags. A null * value is not valid. */ private String m_value; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public JobStatusFailure() { m_errno = 0; m_value = null; } /** * Constructs an error number without reason text. * @param errno is the error number */ public JobStatusFailure( int errno ) { m_errno = errno; m_value = null; } /** * Constructs a piece of data. * @param errno is the error number . * @param value is the textual error reason. */ public JobStatusFailure( int errno, String value ) { m_errno = errno; m_value = value; } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new String(fragment); else this.m_value += fragment; } } /** * Accessor * * @see #setError(int) */ public int getError() { return this.m_errno; } /** * Accessor. * * @param errno * @see #getError() */ public void setError( int errno ) { this.m_errno = errno; } /** * Accessor * * @see #setValue(String) */ public String getValue() { return this.m_value; } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = value; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dumps the state of the current element as XML output. This function * can return the necessary data more efficiently, thus overwriting * the inherited method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { StringBuffer result = new StringBuffer(32); // good for no content result.append( "" ); } else { // yes, content result.append( "\">" ); result.append( quote(m_value,false) ); result.append( "" ); } return result.toString(); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":failure" : "failure"; // open tag stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " error=\"", Integer.toString(m_errno) ); if ( m_value == null ) { // no content stream.write( "/>" ); } else { // yes, content stream.write( '>' ); stream.write( quote(m_value,false) ); // close tag stream.write( "' ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Proc.java0000644000175000017500000000225711757531137025217 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; /** * The proc element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class Proc extends MachineInfo { /** * The element name */ public static final String ELEMENT_NAME = "proc"; /** * The default constructor */ public Proc(){ super(); } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Environment.java0000644000175000017500000001335411757531137026620 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class maintains the application that was run, and the * arguments to the commandline that were actually passed on to * the application. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see Job */ public class Environment extends Invocation { /** * Mappings of keys to values */ private Map m_environment; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Environment() { m_environment = new HashMap(); } /** * Adds an environment entry, effectively a key value pair, to the * current environment settings. * * @param entry is the environment entry to add * @return the old entry including null. * @see #addEntry( String, String ) */ public String addEntry( EnvEntry entry ) { String key = entry.getKey(); if ( key != null ) { String value = entry.getValue(); if ( value == null ) value = new String(); return (String) m_environment.put( entry.getKey(), entry.getValue() ); } else { return null; // evil! } } /** * Adds an environment entry, effectively a key value pair, to the * current environment settings. * * @param key is the identifier for the environment setting. * @param value is the value associated with the key. * @return the old entry including null. * @see #addEntry( EnvEntry ) */ public String addEntry( String key, String value ) { if ( key != null ) { if ( value == null ) value = new String(); return (String) m_environment.put( key, value ); } else { return null; } } /** * Retrieves the value for a given key * * @param key is the identifier in the map to retrieve the key for * @return the value for the given, which may include null. */ public String get( String key ) { return (String) m_environment.get(key); } /** * Creates a sorted iterator * * @return an iterator over sorted keys */ public Iterator iterator() { Set result = new TreeSet( m_environment.keySet() ); return result.iterator(); } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":environment" : "environment"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); if ( m_environment.size() == 0 ) { // no content stream.write( "/>" ); if ( indent != null ) stream.write( newline ); } else { // yes, content String newindent = ( indent == null ) ? null : indent + " "; String envtag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":env" : "env"; stream.write( '>' ); if ( indent != null ) stream.write( newline ); for ( Iterator i=this.iterator(); i.hasNext(); ) { String key = (String) i.next(); String value = this.get(key); if ( newindent != null && newindent.length() > 0 ) stream.write( newindent ); stream.write( '<' ); stream.write( envtag ); writeAttribute( stream, " key=\"", key ); stream.write( '>' ); if ( value != null ) stream.write( quote(value,false) ); stream.write( "' ); if ( indent != null ) stream.write( newline ); } stream.write( "' ); } if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Load.java0000644000175000017500000000225611757531137025172 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; /** * The RAM element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class Load extends MachineInfo { /** * The element name */ public static final String ELEMENT_NAME = "load"; /** * The default constructor */ public Load(){ super(); } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/HasText.java0000644000175000017500000000307611757531137025674 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; /** * This interface defines a common base for all elements in an invocation * record that can carry text in their values. It exists primarily for * grouping purposes and for easier access through the character SAX * callback. Due to the fact that SAX may present text in several chunks, * all text-carrying classes must also provide the append function. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public interface HasText { /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ); /** * Accessor * * @see #setValue(String) */ public String getValue(); /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/MachineInfo.java0000644000175000017500000001275111757531137026474 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; import java.io.IOException; import java.io.Writer; import java.util.Map; import java.util.HashMap; import java.util.Iterator; import java.util.List; /** * An abstract class that is used for all the child elements that appear * in the machine element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public abstract class MachineInfo extends Invocation { /** * An internal maps that is indexed by attribute keys. */ protected Map mAttributeMap; /** * Default constructor. */ public MachineInfo( ){ mAttributeMap = new HashMap(); } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public abstract String getElementName(); /** * Adds an attribute. * * @param key the attribute key * @param value the attribute value */ public void addAttribute( String key, String value ){ mAttributeMap.put( key, value ); } /** * Add multiple attributes to the machine info element. * * @param keys List of keys * @param values Corresponding List of values */ public void addAttributes( List keys, List values ){ for ( int i=0; i< keys.size(); ++i ) { String name = (String) keys.get(i); String value = (String) values.get(i); addAttribute( name, value ); } } /** * Returns Iterator for attribute keys. * * @return iterator */ public Iterator getAttributeKeysIterator(){ return mAttributeMap.keySet().iterator(); } /** * Returns attribute value for a key * * @param key * * @return value */ public String get( String key ){ return mAttributeMap.get(key); } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString(Writer stream) throws IOException { throw new IOException( "method not implemented, please contact pegasus-support@isi.edu" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = (namespace != null && namespace.length() > 0) ? namespace + ":" : ""; tag = tag + getElementName() ; // if (this.m_value != null) { // open tag if (indent != null && indent.length() > 0) { stream.write(indent); } stream.write('<'); stream.write(tag); stream.write( " " ); //write out all the attributes for( Iterator it = mAttributeMap.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = ( Map.Entry )it.next(); writeAttribute( stream, " " + entry.getKey() + "=\"", quote( entry.getValue(),true) ); //writeAttribute( stream, entry.getKey(), entry.getValue() ); } // dump content if required if( this instanceof HasText ){ stream.write( ">" ); HasText ht = (HasText)this; stream.write( quote(ht.getValue(), false ) ); //close tag stream.write( "' ); } else{ stream.write( "/>" ); } if (indent != null) { stream.write(System.getProperty("line.separator", "\r\n")); } } //} } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Ignore.java0000644000175000017500000000671211757531137025537 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is transient for XML parsing. It does not store * anything, just goes through the motions to satisfy the API. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see StatCall */ public class Ignore extends Invocation implements HasText { /** * Default c'tor. */ public Ignore() { // empty } /** * Constructs a piece of data. * @param value is the data to remember. The string may be empty, * but it must not be null. * @exception NullPointerException if the argument was null. */ public Ignore( String value ) { // ignore } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { // ignore } /** * Accessor * * @see #setValue(String) */ public String getValue() { return ""; } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { // ignore } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/MachineSpecific.java0000644000175000017500000001135011757531137027320 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; import java.io.IOException; import java.io.Writer; import java.util.Iterator; import java.util.List; import java.util.LinkedList; /** * This class collects the various OS-specific elements that we are capturing * machine information for. * * @author Jens-S. Vöckler * @version $Revision: 2587 $ */ public class MachineSpecific extends Invocation { /** * This is the tag to group the machine-specific information. Usually, * it is one of "darwin", "sunos", "linux" or "basic". */ private String m_tag; /** * The List of MachineInfo elements associated with the machine. */ private List m_info; /** * Default constructor. */ public MachineSpecific( String tag ) { m_tag = tag; m_info = new LinkedList(); } /** * Accessor * * @see #setTag(String) */ public String getTag() { return this.m_tag; } /** * Accessor. * * @param tag * @see #getTag() */ public void setTag( String tag ) { this.m_tag = tag; } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return this.m_tag; } /** * Add a MachineInfo element. * * @param info the machine info element */ public void addMachineInfo( MachineInfo info ) { m_info.add( info ); } /** * Returns an iterator for the machine info objects * * @return Iterator for MachineInfo objects. */ public Iterator getMachineInfoIterator() { return m_info.iterator(); } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString(Writer stream) throws IOException { throw new IOException( "method not implemented, please contact pegasus-support@isi.edu" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = (namespace != null && namespace.length() > 0) ? namespace + ":" + m_tag : m_tag; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); if ( m_info.isEmpty() ) { stream.write( "/>" ); } else { stream.write( '>' ); if ( indent != null ) stream.write( newline ); // dump content -- MachineInfo elements String newIndent = ( indent == null ) ? null : indent + " "; for ( Iterator it = m_info.iterator(); it.hasNext(); ) { MachineInfo mi = ( MachineInfo ) it.next(); mi.toXML( stream, newIndent, namespace ); } // close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); } if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/JobStatus.java0000644000175000017500000000211011757531137026216 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This abstract class is the interface for all classes that describe * the job exit, which describes more clearly failure, regular * execution, signal and suspension. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public abstract class JobStatus extends Invocation // implements Cloneable { // empty } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Machine.java0000644000175000017500000001302611757531137025654 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; import java.io.IOException; import java.io.Writer; /** * The Machine element groups a time stamp, the page size, the generic * utsname information, and a machine-specific content collecting element. * * @author Karan Vahi * @author Jens-S. Vöckler * @version $Revision: 2587 $ */ public class Machine extends Invocation { /** * element name */ public static final String ELEMENT_NAME = "machine"; /** * The only attribute to the machine element is required. */ private long m_pagesize; /** * The time when the snapshot was taken. */ private Stamp m_stamp; /** * The uname child element is mandatory. */ private Uname m_uname; /** * This is a grouping element for the remaining machine-specific * items. */ private MachineSpecific m_specific; /** * Default constructor. */ public Machine() { m_pagesize = 0; m_stamp = null; m_uname = null; m_specific = null; } /** * Sets the page size. * * @param size is the remote page size in byte. */ public void setPageSize( long size ) { m_pagesize = size; } /** * Obtains the page size information. * * @return pagesize in byte */ public long getPageSize() { return m_pagesize; } /** * Sets the time stamp when the machine info was obtained. * * @param stamp is the time stamp */ public void setStamp( Stamp stamp ) { m_stamp = stamp; } /** * Obtains the time stamp information when the remote machine element * was recorded. * * @return stamp is a time stamp */ public Stamp getStamp() { return m_stamp; } /** * Sets the utsname generic system information record. * * @param uname is the utsname record */ public void setUname( Uname uname ) { m_uname = uname; } /** * Obtains the utsname generic system information record. * * @return uname is the utsname record */ public Uname getUname() { return m_uname; } /** * Sets the machine-specific grouping element. * * @param m is the machine specific grouping element */ public void setMachineSpecific( MachineSpecific m ) { m_specific = m; } /** * Obtains the machine-specific grouping element. * * @return machine */ public MachineSpecific getMachineSpecific() { return m_specific; } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString(Writer stream) throws IOException { throw new IOException( "method not implemented, please contact pegasus-support@isi.edu" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newLine = System.getProperty("line.separator", "\r\n"); String tag = (namespace != null && namespace.length() > 0) ? namespace + ":" : ""; tag = tag + getElementName(); if ( indent != null && indent.length() > 0 ) stream.write(indent); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " page-size=\"", Long.toString(m_pagesize) ); stream.write( '>' ); if ( indent != null ) stream.write(newLine); // dump content String newIndent = ( indent == null ) ? null : indent + " "; m_stamp.toXML( stream, newIndent, namespace ); m_uname.toXML( stream, newIndent, namespace ); m_specific.toXML( stream, newIndent, namespace ); // close element stream.write( "" ); if ( indent != null ) stream.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/WorkingDir.java0000644000175000017500000001316011757531137026366 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is transient for XML parsing. The data value will be * incorporated into the job classes. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see StatCall */ public class WorkingDir extends Invocation implements HasText { /** * This is the data contained between the tags. A null * value is not valid. */ private StringBuffer m_value; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public WorkingDir() { m_value = null; } /** * Constructs a piece of data. * @param value is the data to remember. The string may be empty, * but it must not be null. * @exception NullPointerException if the argument was null. */ public WorkingDir( String value ) { if ( value == null ) throw new NullPointerException("the value to the tag constructor must not be null" ); else m_value = new StringBuffer(value); } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new StringBuffer(fragment); else this.m_value.append(fragment); } } /** * Accessor * * @see #setValue(String) */ public String getValue() { return ( m_value == null ? null : m_value.toString() ); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = ( value == null ? null : new StringBuffer(value) ); } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dumps the state of the current element as XML output. This function * can return the necessary data more efficiently, thus overwriting * the inherited method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { if ( m_value != null ) { String newline = System.getProperty( "line.separator", "\r\n" ); StringBuffer result = new StringBuffer( m_value.length() + 15 + newline.length() ); if ( indent != null && indent.length() > 0 ) result.append( indent ); result.append(""); result.append(quote(getValue(),false)); result.append(""); if ( indent != null ) result.append(newline); return result.toString(); } else { return new String(); } } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":cwd" : "cwd"; if ( this.m_value != null ) { // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); stream.write( '>' ); // dump content stream.write( quote(getValue(),false) ); // close tag stream.write( "' ); if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Regular.java0000644000175000017500000000660711757531137025720 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is the container for a regular file object. A regular * file object contains just a filename. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class Regular extends File implements HasFilename { /** * Name of the file. */ protected String m_filename; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Regular() { super(); m_filename = null; } /** * Constructs a regular file. * @param filename is the name of the file that stat was invoked */ public Regular( String filename ) { super(); m_filename = filename; } /** * Accessor * * @see #setFilename(String) */ public String getFilename() { return this.m_filename; } /** * Accessor. * * @param filename * @see #getFilename() */ public void setFilename( String filename ) { this.m_filename = filename; } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":file" : "file"; if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " name=\"", m_filename ); if ( m_hexbyte != null && m_hexbyte.length() > 0 ) { // yes, content stream.write( '>' ); stream.write( m_hexbyte ); stream.write( "' ); } else { // no content stream.write( "/>" ); } if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Usage.java0000644000175000017500000003104311757531137025353 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.text.*; import java.io.Writer; import java.io.IOException; /** * This class is contains some excerpt from the getrusage call. Due to * Linux not populating a lot of records, the amount of information is * restricted. Adjustments to LP64 architecture may be necessary. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class Usage extends Invocation // implements Cloneable { /** * user time - time spent in user mode, seconds with fraction. */ private double m_utime; /** * system time - time spent in system mode, seconds with fraction. */ private double m_stime; /** * minor page faults - sometimes also recovered pages. */ private int m_minflt; /** * major page faults - incurred subsystem IO, sometimes includes swap. */ private int m_majflt; /** * number of swap operations - unused in Linux unless kernel patched. */ private int m_nswap; /** * number of signals sent to process. */ private int m_nsignals; /** * voluntary context switches. */ private int m_nvcsw; /** * involuntary conext switches. */ private int m_nivcsw; /** * maximum resident set size. */ private int m_maxrss; /** * integral shared memory size. */ private int m_ixrss; /** * integral unshared data size, or private integral resident set size. */ private int m_idrss; /** * integral stoack size. */ private int m_isrss; /** * block input operations. */ private int m_inblock; /** * block output operations. */ private int m_outblock; /** * messages sent. */ private int m_msgsnd; /** * messages received. */ private int m_msgrcv; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Usage() { m_utime = m_stime = 0.0; m_minflt = m_majflt = m_nswap = m_nsignals = m_nvcsw = m_nivcsw = 0; m_maxrss = m_ixrss = m_idrss = m_isrss = 0; m_inblock = m_outblock = m_msgsnd = m_msgrcv = 0; } /** * Full c'tor: All values are provided. * * @param utime is the time spent in user mode * @param stime is the time spent in system mode * @param minflt are minor page faults and page reclaims * @param majflt are major page faults and s.t. swaps * @param nswap are the number of swap operations * @param nsignals are the number of signals sent * @param nvcsw are voluntary context switches * @param nivcsw are involuntary context switches * @param maxrss is the maximum resident set size * @param ixrss is the integral shared memory size * @param idrss is the integral unshared data size * @param isrss is the integral unshared stack size * @param inblock are block input operations * @param outblock are block output operations * @param msgsnd are messages sent * @param msgrcv are messages received */ public Usage( double utime, double stime, int minflt, int majflt, int nswap, int nsignals, int nvcsw, int nivcsw, int maxrss, int ixrss, int idrss, int isrss, int inblock, int outblock, int msgsnd, int msgrcv ) { m_utime = utime; m_stime = stime; m_minflt = minflt; m_majflt = majflt; m_nswap = nswap; m_nsignals = nsignals; m_nvcsw = nvcsw; m_nivcsw = nivcsw; m_maxrss = maxrss; m_ixrss = ixrss; m_idrss = idrss; m_isrss = isrss; m_inblock = inblock; m_outblock = outblock; m_msgsnd = msgsnd; m_msgrcv = msgrcv; } /** * Accessor: Obtains the user time from the object. * * @return the time spent in user mode. * @see #setUserTime(double) */ public double getUserTime() { return this.m_utime; } /** * Accessor: Obtains the system time from the object. * * @return the time spent in system mode. * @see #setSystemTime(double) */ public double getSystemTime() { return this.m_stime; } /** * Accessor: Obtains the minfor page faults. * * @return the number of page reclaims. * @see #setMinorFaults(int) */ public int getMinorFaults() { return this.m_minflt; } /** * Accessor: Obtains the major page faults. * * @return the number of major page faults. * @see #setMajorFaults(int) */ public int getMajorFaults() { return this.m_majflt; } /** * Accessor: Obtains number of swap operations. * * @return the number of swaps. * @see #setSwaps(int) */ public int getSwaps() { return this.m_nswap; } /** * Accessor: Obtains the system signals sent. * * @return the number of signals sent to the process. * @see #setSignals(int) */ public int getSignals() { return this.m_nsignals; } /** * Accessor: Obtains the voluntary context switches. * * @return the number of voluntary context switches. * @see #setVoluntarySwitches(int) */ public int getVoluntarySwitches() { return this.m_nvcsw; } /** * Accessor: Obtains the involuntary context switches. * * @return the number of involuntary context switches. * @see #setInvoluntarySwitches(int) */ public int getInvoluntarySwitches() { return this.m_nivcsw; } /** * Accessor: Sets the user time. * * @param utime is the new user time in seconds with fraction. * @see #getUserTime() */ public void setUserTime( double utime ) { this.m_utime = utime; } /** * Accessor: Sets the system time. * * @param stime is the new user time in seconds with fraction. * @see #getSystemTime() */ public void setSystemTime( double stime ) { this.m_stime = stime; } /** * Accessor: Sets the number of minor faults. * * @param minflt is the new number of minor faults. * @see #getMinorFaults() */ public void setMinorFaults( int minflt ) { this.m_minflt = minflt; } /** * Accessor: Sets the number of major page faults. * * @param majflt is the new number of major page faults. * @see #getMajorFaults() */ public void setMajorFaults( int majflt ) { this.m_majflt = majflt; } /** * Accessor: Sets the number of swap ops. * * @param nswap is the new number of swap operations. * @see #getSwaps() */ public void setSwaps( int nswap ) { this.m_nswap = nswap; } /** * Accessor: Sets the number of signalss sent. * * @param nsignals is the new number of signals. * @see #getSignals() */ public void setSignals( int nsignals ) { this.m_nsignals = nsignals; } /** * Accessor: Sets the number of voluntary context switches. * * @param nvcsw is the new number voluntary context switches. * @see #getVoluntarySwitches() */ public void setVoluntarySwitches( int nvcsw ) { this.m_nvcsw = nvcsw; } /** * Accessor: Sets the number of involuntary context switches. * * @param nivcsw is the new number involuntary context switches. * @see #getInvoluntarySwitches() */ public void setInvoluntarySwitches( int nivcsw ) { this.m_nivcsw = nivcsw; } /** * Accessor. * * @see #setMaximumRSS(int) */ public int getMaximumRSS() { return this.m_maxrss; } /** * Accessor. * * @param maxrss * @see #getMaximumRSS() */ public void setMaximumRSS( int maxrss ) { this.m_maxrss = maxrss; } /** * Accessor. * * @see #setSharedRSS(int) */ public int getSharedRSS() { return this.m_ixrss; } /** * Accessor. * * @param ixrss * @see #getSharedRSS() */ public void setSharedRSS( int ixrss ) { this.m_ixrss = ixrss; } /** * Accessor. * * @see #setUnsharedRSS(int) */ public int getUnsharedRSS() { return this.m_idrss; } /** * Accessor. * * @param idrss * @see #getUnsharedRSS() */ public void setUnsharedRSS( int idrss ) { this.m_idrss = idrss; } /** * Accessor. * * @see #setStackRSS(int) */ public int getStackRSS() { return this.m_isrss; } /** * Accessor. * * @param isrss * @see #getStackRSS() */ public void setStackRSS( int isrss ) { this.m_isrss = isrss; } /** * Accessor. * * @see #setInputBlocks(int) */ public int getInputBlocks() { return this.m_inblock; } /** * Accessor. * * @param inblock * @see #getInputBlocks() */ public void setInputBlocks( int inblock ) { this.m_inblock = inblock; } /** * Accessor. * * @see #setOutputBlocks(int) */ public int getOutputBlocks() { return this.m_outblock; } /** * Accessor. * * @param outblock * @see #getOutputBlocks() */ public void setOutputBlocks( int outblock ) { this.m_outblock = outblock; } /** * Accessor. * * @see #setSent(int) */ public int getSent() { return this.m_msgsnd; } /** * Accessor. * * @param msgsnd * @see #getSent() */ public void setSent( int msgsnd ) { this.m_msgsnd = msgsnd; } /** * Accessor. * * @see #setReceived(int) */ public int getReceived() { return this.m_msgrcv; } /** * Accessor. * * @param msgrcv * @see #getReceived() */ public void setReceived( int msgrcv ) { this.m_msgrcv = msgrcv; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vdl-support@griphyn.org" ); } /** * Dump the state of the current element as XML output. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { DecimalFormat d = new DecimalFormat("0.000"); // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); if ( namespace != null && namespace.length() > 0 ) { stream.write( namespace ); stream.write( ':' ); } stream.write( "usage" ); writeAttribute( stream, " utime=\"", d.format(m_utime) ); writeAttribute( stream, " stime=\"", d.format(m_stime) ); writeAttribute( stream, " minflt=\"", Integer.toString(m_minflt) ); writeAttribute( stream, " majflt=\"", Integer.toString(m_majflt) ); writeAttribute( stream, " nswap=\"", Integer.toString(m_nswap) ); writeAttribute( stream, " nsignals=\"", Integer.toString(m_nsignals) ); writeAttribute( stream, " nvcsw=\"", Integer.toString(m_nvcsw) ); writeAttribute( stream, " nivcsw=\"", Integer.toString(m_nivcsw) ); writeAttribute( stream, " maxrss=\"", Integer.toString(m_maxrss) ); writeAttribute( stream, " ixrss=\"", Integer.toString(m_ixrss) ); writeAttribute( stream, " idrss=\"", Integer.toString(m_idrss) ); writeAttribute( stream, " isrss=\"", Integer.toString(m_isrss) ); writeAttribute( stream, " inblock=\"", Integer.toString(m_inblock) ); writeAttribute( stream, " outblock=\"", Integer.toString(m_outblock) ); writeAttribute( stream, " msgsnd=\"", Integer.toString(m_msgsnd) ); writeAttribute( stream, " msgrcv=\"", Integer.toString(m_msgrcv) ); // done stream.write( "/>" ); if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/SimpleServerThread.java0000644000175000017500000000640511757531137030063 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.net.*; import java.io.*; import java.util.Date; import org.apache.log4j.*; public class SimpleServerThread extends Thread { private String m_remote = null; private Socket m_socket = null; private SimpleServer m_server = null; private static Logger c_logger = null; static int c_count = 0; static int c_cdone = 0; public void log( Level l, String msg ) { c_logger.log( l, m_remote + ": " + msg ); } public SimpleServerThread( SimpleServer me, Socket socket ) { super( "SimpleServerThread#" + ++c_count ); this.m_server = me; this.m_socket = socket; if ( c_logger == null ) { // Singleton-like init c_logger = Logger.getLogger( SimpleServerThread.class ); c_logger.setLevel( Level.DEBUG ); c_logger.addAppender( new ConsoleAppender( new PatternLayout("%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%t] %m%n") ) ); c_logger.setAdditivity( false ); } InetSocketAddress remote = (InetSocketAddress) m_socket.getRemoteSocketAddress(); this.m_remote = remote.getAddress().getHostAddress() + ":" + remote.getPort(); } public void run() { String line = null; log( Level.INFO, "starting" ); try { PrintWriter out = new PrintWriter( m_socket.getOutputStream(), true ); BufferedReader in = new BufferedReader( new InputStreamReader( m_socket.getInputStream())); while ( (line = in.readLine()) != null ) { if ( c_logger.isDebugEnabled() ) log( Level.DEBUG, "received >>" + line + "<<" ); if ( line.startsWith("PARSE") ) { // request to parse a given file String[] request = line.split("[ \t]",3); if ( request.length != 3 ) { out.println( "400 Illegal request format" ); continue; } if ( ! request[2].equals("ECP/1.0") ) { out.println( "501 Unrecognized version" ); continue; } int result = m_server.checkFile( request[1] ); out.println( "300 Result code " + result ); } else if ( line.equals("QUIT") ) { // done out.println( "200 Good-bye" ); break; } else if ( line.equals("SHUTDOWN") ) { out.println( "200 Shutting down server, good-bye" ); SimpleServer.setTerminate( true ); m_server.m_server.close(); // close server socket break; } else { // illegal request out.println( "500 Illegal request" ); break; } } out.close(); in.close(); m_socket.close(); synchronized ( m_server ) { ++c_cdone; m_server.notifyAll(); } log( Level.INFO, "finished [" + c_count + ":" + c_cdone + "]" ); } catch (IOException e) { log( Level.WARN, "I/O error: " + e.getMessage() ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Uname.java0000644000175000017500000001105711757531137025357 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; /** * The uname element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class Uname extends MachineInfo implements HasText{ /** * The element name */ public static final String ELEMENT_NAME = "uname"; /** * The system attribute for the uname */ public static final String SYSTEM_ATTRIBUTE_KEY = "system"; /** * The nodename attribute for the uname */ public static final String NODENAME_ATTRIBUTE_KEY = "nodename"; /** * The release attribute for the uname */ public static final String RELEASE_ATTRIBUTE_KEY = "release"; /** * The release attribute for the uname */ public static final String MACHINE_ATTRIBUTE_KEY = "machine"; /** * The archmode attribute for the uname */ public static final String ARCHMODE_ATTRIBUTE_KEY = "archmode"; /** * The undefined value for archmode attribute. */ public static final String UNDEFINED_ARCHMODE_VALUE = "UNDEFINED"; /** * An adapter method to convert the uname object to the architecture object. * * @param uname the object to be converted * * @return the converted Architecture object */ public static Architecture unameToArchitecture( Uname uname ){ Architecture arch = new Architecture(); arch.setMachine( uname.get( Uname.MACHINE_ATTRIBUTE_KEY ) ); arch.setNodeName( uname.get( Uname.NODENAME_ATTRIBUTE_KEY ) ); arch.setRelease( uname.get( Uname.RELEASE_ATTRIBUTE_KEY ) ); arch.setSystemName( uname.get( Uname.SYSTEM_ATTRIBUTE_KEY ) ); arch.setArchMode( uname.get( Uname.ARCHMODE_ATTRIBUTE_KEY) == null ? Uname.UNDEFINED_ARCHMODE_VALUE: uname.get( Uname.ARCHMODE_ATTRIBUTE_KEY ) ); arch.setValue( uname.getValue() ); return arch; } /** * The text value */ private StringBuffer mValue; /** * The default constructor */ public Uname(){ super(); mValue = null; } /** * Constructs a piece of data. * * @param value is the data to remember. The string may be empty, * but it must not be null. * @exception NullPointerException if the argument was null. */ public Uname( String value ) { this(); if ( value == null ) { throw new NullPointerException( "the value to the tag constructor must not be null" ); } else { mValue = new StringBuffer( value ); } } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } /** * Appends a piece of text to the existing text. * * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue(String fragment) { if (fragment != null) { if (this.mValue == null) { this.mValue = new StringBuffer(fragment); } else { this.mValue.append(fragment); } } } /** * Accessor * * @see #setValue(String) */ public String getValue() { return (mValue == null ? null : mValue.toString()); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue(String value) { this.mValue = (value == null ? null : new StringBuffer(value)); } /** * An adapter method to convert the uname object to the architecture object. * * * @return the converted Architecture object */ public Architecture toArchitecture() { return Uname.unameToArchitecture( this ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/StatInfo.java0000644000175000017500000002103311757531137026034 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import edu.isi.pegasus.common.util.Currently; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is the container for the results of a call to either * stat() or fstat(). Not all stat information is kept. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class StatInfo extends Invocation // implements Cloneable { /** * Is the number for the file mode. This is originally an octal string. */ private int m_mode; /** * Denotes the size of the file. Files can grow rather large. */ private long m_size; /** * We store the inode number, which let's us reference a file uniquely * per filesystem. */ private long m_inode; /** * Stores the number of hard links to the file. */ private long m_nlink; /** * Stores the blocksize of the file. */ private long m_blksize; /** * Stores the number of blocks of the file. */ private long m_blocks; /** * Contains the last access time timestamp. */ private Date m_atime; /** * Contains the creation time timestamp. */ private Date m_ctime; /** * Contains the last modification time timestamp; */ private Date m_mtime; /** * user id of the owner of the file. */ private int m_uid; /** * symbolical user name of the effective user. */ private String m_user; /** * group id of the owner of the file. */ private int m_gid; /** * symbolical group name of the effective user. */ private String m_group; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public StatInfo() { m_uid = m_gid = -1; m_atime = m_ctime = m_mtime = new Date(); } /** * Accessor * * @see #setMode(int) */ public int getMode() { return this.m_mode; } /** * Accessor. * * @param mode * @see #getMode() */ public void setMode( int mode ) { this.m_mode = mode; } /** * Accessor * * @see #setSize(long) */ public long getSize() { return this.m_size; } /** * Accessor. * * @param size * @see #getSize() */ public void setSize( long size ) { this.m_size = size; } /** * Accessor * * @see #setINode(long) */ public long getINode() { return this.m_inode; } /** * Accessor. * * @param inode * @see #getINode() */ public void setINode( long inode ) { this.m_inode = inode; } /** * Accessor * * @see #setLinkCount(long) */ public long getLinkCount() { return this.m_nlink; } /** * Accessor. * * @param nlink * @see #getLinkCount() */ public void setLinkCount( long nlink ) { this.m_nlink = nlink; } /** * Accessor * * @see #setBlockSize(long) */ public long getBlockSize() { return this.m_blksize; } /** * Accessor. * * @param blksize * @see #getBlockSize() */ public void setBlockSize( long blksize ) { this.m_blksize = blksize; } /** * Accessor * * @see #setBlocks(long) */ public long getBlocks() { return this.m_blocks; } /** * Accessor. * * @param blocks * @see #getBlocks() */ public void setBlocks( long blocks ) { this.m_blocks = blocks; } /** * Accessor * * @see #setAccessTime(Date) */ public Date getAccessTime() { return this.m_atime; } /** * Accessor. * * @param atime * @see #getAccessTime() */ public void setAccessTime( Date atime ) { this.m_atime = atime; } /** * Accessor * * @see #setCreationTime(Date) */ public Date getCreationTime() { return this.m_ctime; } /** * Accessor. * * @param ctime * @see #getCreationTime() */ public void setCreationTime( Date ctime ) { this.m_ctime = ctime; } /** * Accessor * * @see #setModificationTime(Date) */ public Date getModificationTime() { return this.m_mtime; } /** * Accessor. * * @param mtime * @see #getModificationTime() */ public void setModificationTime( Date mtime ) { this.m_mtime = mtime; } /** * Accessor * * @see #setUID(int) */ public int getUID() { return this.m_uid; } /** * Accessor. * * @param uid * @see #getUID() */ public void setUID( int uid ) { this.m_uid = uid; } /** * Accessor * * @see #setUser(String) */ public String getUser() { return this.m_user; } /** * Accessor. * * @param user * @see #getUser() */ public void setUser( String user ) { this.m_user = user; } /** * Accessor * * @see #setGID(int) */ public int getGID() { return this.m_gid; } /** * Accessor. * * @param gid * @see #getGID() */ public void setGID( int gid ) { this.m_gid = gid; } /** * Accessor * * @see #setGroup(String) */ public String getGroup() { return this.m_group; } /** * Accessor. * * @param group * @see #getGroup() */ public void setGroup( String group ) { this.m_group = group; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); if ( namespace != null && namespace.length() > 0 ) { stream.write( namespace ); stream.write( ':' ); } stream.write( "statinfo" ); writeAttribute( stream, " mode=\"0", Integer.toOctalString(m_mode) ); writeAttribute( stream, " size=\"", Long.toString(m_size) ); writeAttribute( stream, " inode=\"", Long.toString(m_inode) ); writeAttribute( stream, " nlink=\"", Long.toString(m_nlink) ); writeAttribute( stream, " blksize=\"", Long.toString(m_blksize) ); writeAttribute( stream, " blocks=\"", Long.toString(m_blocks) ); writeAttribute( stream, " mtime=\"", Currently.iso8601(false,true,false,m_mtime) ); writeAttribute( stream, " atime=\"", Currently.iso8601(false,true,false,m_atime) ); writeAttribute( stream, " ctime=\"", Currently.iso8601(false,true,false,m_ctime) ); writeAttribute( stream, " uid=\"", Integer.toString(m_uid) ); if ( this.m_user != null && this.m_user.length() > 0 ) writeAttribute( stream, " user=\"", this.m_user ); writeAttribute( stream, " gid=\"", Integer.toString(m_gid) ); if ( this.m_group != null && this.m_group.length() > 0 ) writeAttribute( stream, " group=\"", this.m_group ); // done stream.write( "/>" ); if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/ArgString.java0000644000175000017500000001261111757531137026207 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class maintains the application that was run, and the * arguments to the commandline that were actually passed on to * the application. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see Job */ public class ArgString extends Arguments implements HasText { /** * This is the data contained between the tags. There may be * no data. */ private StringBuffer m_value; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public ArgString() { super(); m_value = null; } /** * Constructs an applications without arguments. * @param executable is the name of the application. */ public ArgString( String executable ) { super(executable); m_value = null; } /** * Constructs an applications with arguments. * @param executable is the name of the application. * @param value represents the argument line passed. */ public ArgString( String executable, String value ) { super(executable); m_value = new StringBuffer(value); } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new StringBuffer(fragment); else this.m_value.append( fragment ); } } /** * Accessor * * @see #setValue(String) */ public String getValue() { return ( m_value == null ? null : m_value.toString() ); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = ( value == null ? null : new StringBuffer(value) ); } /** * Dumps the state of the current element as XML output. This function * can return the necessary data more efficiently, thus overwriting * the inherited method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { StringBuffer result = new StringBuffer(64); result.append( "" ); } else { // yes, content result.append( '>' ); result.append( quote(getValue(),false) ); result.append( "" ); } return result.toString(); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":arguments" : "arguments"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); if ( m_executable != null ) writeAttribute( stream, " executable=\"", m_executable ); if ( m_value != null ) { // yes, content stream.write( '>' ); stream.write( quote(getValue(),false) ); stream.write( "' ); } else { // no content stream.write( "/>" ); } if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/InvocationRecord.java0000644000175000017500000006010611757531137027561 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import edu.isi.pegasus.common.util.Currently; import java.util.*; import java.net.InetAddress; import java.io.Writer; import java.io.IOException; /** * This class is the container for an invocation record. The record * itself contains information about the job or jobs that ran, the total * usage, and information about central files that were used. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class InvocationRecord extends Invocation // implements Cloneable { /** * The "official" namespace URI of the invocation record schema. */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/invocation"; /** * The "not-so-official" location URL of the invocation record definition. */ public static final String SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/iv-2.1.xsd"; /** * protocol version information. */ private String m_version; /** * start of gridlaunch timestamp. */ private Date m_start; /** * total duration of call. */ private double m_duration; /** * Name of the: Transformation that produced this invocation. */ private String m_transformation; /** * Name of the Derivation that produced this invocation. */ private String m_derivation; /** * Records the physical memory on the remote machine, if available. */ private long m_pmem = -1; /** * process id of gridlaunch itself. */ private int m_pid; /** * host address where gridlaunch ran (primary interface). */ private InetAddress m_hostaddr; /** * Symbolic hostname where gridlaunch ran (primary interface). */ private String m_hostname; /** * Symbolic name of primary interface we used to determine host-name and -address. */ private String m_interface; /** * numerical user id of the effective user. */ private int m_uid; /** * symbolical user name of the effective user. */ private String m_user; /** * numerical group id of the effective user. */ private int m_gid; /** * symbolical group name of the effective user. */ private String m_group; /** * Working directory at startup. */ private WorkingDir m_cwd; /** * Architectural information. */ private Architecture m_uname; /** * Total resource consumption by gridlaunch and all siblings. */ private Usage m_usage; /** * Job records: prejob, main job, postjob */ private List m_job; /** * Array with stat() and fstat() information about various files. */ private List m_stat; /** * Resource, site or pool at which the jobs was run. */ private String m_resource; /** * Workflow label, currently optional? */ private String m_wf_label; /** * Workflow timestamp to make the label more unique. */ private Date m_wf_stamp; /** * Environment settings. */ private Environment m_environment; /** * Currently active umask while kickstart was executing. This * is available with new kickstart, older version will have -1 * at this API point. */ private int m_umask = -1; /** * The Machine object capturing machine information. */ private Machine m_machine; /** * Accessor * * @see #setUMask(int) */ public int getUMask() { return this.m_umask; } /** * Accessor. * * @param umask * @see #getUMask() */ public void setUMask( int umask ) { this.m_umask = umask; } /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public InvocationRecord() { m_stat = new ArrayList(5); m_job = new ArrayList(3); m_environment = null; } /** * Accessor * * @see #setVersion(String) */ public String getVersion() { return this.m_version; } /** * Accessor. * * @param version * @see #getVersion() */ public void setVersion( String version ) { this.m_version = version; } /** * Accessor * * @see #setStart(Date) */ public Date getStart() { return this.m_start; } /** * Accessor. * * @param start * @see #getStart() */ public void setStart( Date start ) { this.m_start = start; } /** * Accessor * * @see #setDuration(double) */ public double getDuration() { return this.m_duration; } /** * Accessor. * * @param duration * @see #getDuration() */ public void setDuration( double duration ) { this.m_duration = duration; } /** * Accessor. * * @param machine * @see #getMachine() */ public void setMachine( Machine machine ) { this.m_machine = machine; } /** * Accessor. * * @return machine * @see #setMachine(org.griphyn.vdl.invocation.Machine) */ public Machine getMachine( ) { return this.m_machine; } /** * Accessor * * @see #setTransformation(String) */ public String getTransformation() { return this.m_transformation; } /** * Accessor. * * @param transformation * @see #getTransformation() */ public void setTransformation( String transformation ) { this.m_transformation = transformation; } /** * Accessor * * @see #setDerivation(String) */ public String getDerivation() { return this.m_derivation; } /** * Accessor. * * @param derivation * @see #getDerivation() */ public void setDerivation( String derivation ) { this.m_derivation = derivation; } /** * Accessor * * @see #setPID(int) */ public int getPID() { return this.m_pid; } /** * Accessor. * * @param pid * @see #getPID() */ public void setPID( int pid ) { this.m_pid = pid; } /** * Accessor * * @see #setHostAddress(InetAddress) */ public InetAddress getHostAddress() { return this.m_hostaddr; } /** * Accessor. * * @param hostaddr * @see #getHostAddress() */ public void setHostAddress( InetAddress hostaddr ) { this.m_hostaddr = hostaddr; } /** * Accessor * * @see #setHostname(String) */ public String getHostname() { return this.m_hostname; } /** * Accessor. * * @param hostname * @see #getHostname() */ public void setHostname( String hostname ) { this.m_hostname = hostname; } /** * Accessor. * * @see #setInterface(String) */ public String getInterface() { return this.m_interface; } /** * Accessor. * * @param p_interface * @see #getInterface() */ public void setInterface( String p_interface ) { this.m_interface = p_interface; } /** * Accessor * * @see #setUID(int) */ public int getUID() { return this.m_uid; } /** * Accessor. * * @param uid * @see #getUID() */ public void setUID( int uid ) { this.m_uid = uid; } /** * Accessor * * @see #setUser(String) */ public String getUser() { return this.m_user; } /** * Accessor. * * @param user * @see #getUser() */ public void setUser( String user ) { this.m_user = user; } /** * Accessor * * @see #setGID(int) */ public int getGID() { return this.m_gid; } /** * Accessor. * * @param gid * @see #getGID() */ public void setGID( int gid ) { this.m_gid = gid; } /** * Accessor * * @see #setGroup(String) */ public String getGroup() { return this.m_group; } /** * Accessor. * * @param group * @see #getGroup() */ public void setGroup( String group ) { this.m_group = group; } /** * Accessor * * @see #setUsage(Usage) */ public Usage getUsage() { return this.m_usage; } /** * Accessor. * * @param usage * @see #getUsage() */ public void setUsage( Usage usage ) { this.m_usage = usage; } /** * Accessor * * @see #setArchitecture(Architecture) */ public Architecture getArchitecture() { return this.m_uname; } /** * Accessor. * * @param uname * @see #getArchitecture() */ public void setArchitecture( Architecture uname ) { this.m_uname = uname; } /** * Accessor * * @see #setResource( String ) */ public String getResource() { return this.m_resource; } /** * Accessor. * * @param resource * @see #getResource() */ public void setResource( String resource ) { this.m_resource = resource; } /** * Accessor * * @see #setWorkflowLabel( String ) */ public String getWorkflowLabel() { return this.m_wf_label; } /** * Accessor. * * @param label * @see #getWorkflowLabel() */ public void setWorkflowLabel( String label ) { this.m_wf_label = label; } /** * Accessor * * @see #setWorkflowTimestamp( Date ) */ public Date getWorkflowTimestamp() { return this.m_wf_stamp; } /** * Accessor. * * @param stamp * @see #getResource() */ public void setWorkflowTimestamp( Date stamp ) { this.m_wf_stamp = stamp; } /** * Accessor * * @see #setEnvironment(Environment) */ public Environment getEnvironment() { return this.m_environment; } /** * Accessor. * * @param environment * @see #getEnvironment() */ public void setEnvironment( Environment environment ) { this.m_environment = environment; } // /** // * Parses an ISO 8601 timestamp? // * // * @param stamp // * @see #getResource() // */ // public void setWorkflowTimestamp( String stamp ) // { this.m_wf_stamp = stamp; } /** * Accessor: Appends a job to the list of jobs. * * @param job is the job to append to the list. */ public void addJob( Job job ) { this.m_job.add(job); } /** * Accessor: Inserts a Job into a specific position of the job list. * * @param index is the position to insert the item into * @param job is the job to insert into the list. */ public void addJob( int index, Job job ) { this.m_job.add( index, job ); } /** * Accessor: Obtains a job at a certain position in the job list. * * @param index is the position in the list to obtain a job from * @return the job at that position. * @throws IndexOutOfBoundsException if the index points to an element * in the list that does not contain any elments. */ public Job getJob( int index ) throws IndexOutOfBoundsException { //-- check bound for index if ((index < 0) || (index >= this.m_job.size())) throw new IndexOutOfBoundsException(); return (Job) this.m_job.get(index); } /** * Accessor: Obtains the size of the job list. * * @return number of elements that an external array needs to be sized to. */ public int getJobCount() { return this.m_job.size(); } /** * Accessor: Gets an array of all values that constitute the current * content. This list is read-only. * * @return a list of jobs. */ public java.util.List getJobList() { return Collections.unmodifiableList(this.m_job); } /** * Accessor: Enumerates the internal values that constitute the content * of the job list. * * @return an iterator to walk the list with. */ public Iterator iterateJob() { return this.m_job.iterator(); } /** * Accessor: Enumerates the internal values that constitute the content * of the job list. * * @return a list iterator to walk the list with. */ public ListIterator listIterateJob() { return this.m_job.listIterator(); } /** * Accessor: Removes all values from the job list. */ public void removeAllJob() { this.m_job.clear(); } /** * Accessor: Removes a specific job from the job list. * @param index is the position at which an element is to be removed. * @return the job that was removed. */ public Job removeJob( int index ) { return (Job) this.m_job.remove(index); } /** * Accessor: Overwrites a job at a certain position. * * @param index position to overwrite an elment in. * @param job is the Job to replace with. * @throws IndexOutOfBoundsException if the position pointed to is invalid. */ public void setJob( int index, Job job ) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_job.size())) { throw new IndexOutOfBoundsException(); } this.m_job.set(index, job); } /** * Accessor: Overwrites internal list with an external list * representing jobs. * * @param jobs is the external list of job to overwrite with. */ public void setJob( Collection jobs ) { this.m_job.clear(); this.m_job.addAll(jobs); } /** * Accessor: Appends a stat to the list of stats. * * @param stat is the stat to append to the list. */ public void addStatCall( StatCall stat ) { this.m_stat.add(stat); } /** * Accessor: Inserts a StatCall into a specific position of the stat list. * * @param index is the position to insert the item into * @param stat is the stat to insert into the list. */ public void addStatCall( int index, StatCall stat ) { this.m_stat.add( index, stat ); } /** * Accessor: Obtains a stat at a certain position in the stat list. * * @param index is the position in the list to obtain a stat from * @return the stat at that position. * @throws IndexOutOfBoundsException if the index points to an element * in the list that does not contain any elments. */ public StatCall getStatCall( int index ) throws IndexOutOfBoundsException { //-- check bound for index if ((index < 0) || (index >= this.m_stat.size())) throw new IndexOutOfBoundsException(); return (StatCall) this.m_stat.get(index); } /** * Accessor: Obtains the size of the stat list. * * @return number of elements that an external array needs to be sized to. */ public int getStatCount() { return this.m_stat.size(); } /** * Accessor: Gets an array of all values that constitute the current * content. This list is read-only. * * @return a list of stats. */ public java.util.List getStatList() { return Collections.unmodifiableList(this.m_stat); } /** * Accessor: Enumerates the internal values that constitute the content * of the stat list. * * @return an iterator to walk the list with. */ public Iterator iterateStatCall() { return this.m_stat.iterator(); } /** * Accessor: Enumerates the internal values that constitute the content * of the stat list. * * @return a list iterator to walk the list with. */ public ListIterator listIterateStatCall() { return this.m_stat.listIterator(); } /** * Accessor: Removes all values from the stat list. */ public void removeAllStatCall() { this.m_stat.clear(); } /** * Accessor: Removes a specific stat from the stat list. * @param index is the position at which an element is to be removed. * @return the stat that was removed. */ public StatCall removeStatCall( int index ) { return (StatCall) this.m_stat.remove(index); } /** * Accessor: Overwrites a stat at a certain position. * * @param index position to overwrite an elment in. * @param stat is the StatCall to replace with. * @throws IndexOutOfBoundsException if the position pointed to is invalid. */ public void setStatCall( int index, StatCall stat ) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_stat.size())) { throw new IndexOutOfBoundsException(); } this.m_stat.set(index, stat); } /** * Accessor: Overwrites internal list with an external list * representing stats. * * @param stats is the external list of stat to overwrite with. */ public void setStatCall( Collection stats ) { this.m_stat.clear(); this.m_stat.addAll(stats); } /** * Accessor * * @see #setWorkingDirectory(WorkingDir) * @see #setWorkingDirectory(String) */ public WorkingDir getWorkingDirectory() { return this.m_cwd; } /** * Accessor. * * @param cwd * @see #getWorkingDirectory() * @see #setWorkingDirectory(WorkingDir) */ public void setWorkingDirectory( String cwd ) { this.m_cwd = new WorkingDir(cwd); } /** * Accessor. * * @param cwd * @see #getWorkingDirectory() * @see #setWorkingDirectory(String) */ public void setWorkingDirectory( WorkingDir cwd ) { this.m_cwd = cwd; } /** * Accessor. * * @return the recorded physical memory in byte, or -1 if not available. * @see #setPhysicalMemory( long ) */ public long getPhysicalMemory() { return this.m_pmem; } /** * Accessor. * * @param pmem * @see #getPhysicalMemory() */ public void setPhysicalMemory( long pmem ) { this.m_pmem = pmem; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Writes the header of the XML output. The output contains the special * strings to start an XML document, some comments, and the root element. * The latter points to the XML schema via XML Instances. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void writeXMLHeader( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); // intro if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "" ); stream.write( newline ); // when was this document generated if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "" ); stream.write( newline ); // who generated this document if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "" ); stream.write( newline ); // root element with elementary attributes if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); if ( namespace != null && namespace.length() > 0 ) { stream.write( namespace ); stream.write( ':' ); } stream.write( "invocation xmlns" ); if ( namespace != null && namespace.length() > 0 ) { stream.write( ':' ); stream.write( namespace ); } stream.write( "=\""); stream.write( SCHEMA_NAMESPACE ); stream.write( "\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"" ); stream.write( SCHEMA_NAMESPACE ); stream.write( ' ' ); stream.write( SCHEMA_LOCATION ); stream.write( "\""); writeAttribute( stream, " version=\"", this.m_version ); writeAttribute( stream, " start=\"", Currently.iso8601(false,true,true,this.m_start) ); writeAttribute( stream, " duration=\"", Double.toString(this.m_duration) ); if ( this.m_transformation != null && this.m_transformation.length() > 0 ) writeAttribute( stream, " transformation=\"", this.m_transformation ); if ( this.m_derivation != null && this.m_derivation.length() > 0 ) writeAttribute( stream, " derivation=\"", this.m_derivation ); if ( this.m_pmem != -1 ) writeAttribute( stream, " ram=\"", Long.toString(this.m_pmem) ); writeAttribute( stream, " pid=\"", Integer.toString( this.m_pid ) ); if ( this.m_resource != null && this.m_resource.length() > 0 ) writeAttribute( stream, " resource=\"", this.m_resource ); if ( this.m_wf_label != null && this.m_wf_label.length() > 0 ) writeAttribute( stream, " wf-label=\"", this.m_wf_label ); if ( this.m_wf_stamp != null ) writeAttribute( stream, " wf-stamp=\"", Currently.iso8601(false,true,true,this.m_wf_stamp) ); writeAttribute( stream, " hostaddr=\"", this.m_hostaddr.getHostAddress() ); if ( this.m_hostname != null && this.m_hostname.length() > 0 ) writeAttribute( stream, " hostname=\"", this.m_hostname ); writeAttribute( stream, " uid=\"", Integer.toString( this.m_uid ) ); if ( this.m_user != null && this.m_user.length() > 0 ) writeAttribute( stream, " user=\"", this.m_user ); writeAttribute( stream, " gid=\"", Integer.toString( this.m_gid ) ); if ( this.m_group != null && this.m_group.length() > 0 ) writeAttribute( stream, " group=\"", this.m_group ); stream.write( '>' ); if ( indent != null ) stream.write( newline ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { // write prefix writeXMLHeader( stream, indent, namespace ); // part 1: jobs String newindent = indent==null ? null : indent+" "; for ( Iterator i=this.m_job.iterator(); i.hasNext(); ) { ((Job) i.next()).toXML( stream, newindent, namespace ); } // part 2: cwd and total usage m_cwd.toXML( stream, newindent, namespace ); m_usage.toXML( stream, newindent, namespace ); m_uname.toXML( stream, newindent, namespace ); //machine if not null if( m_machine != null ){ m_machine.toXML( stream, indent, namespace ); } // part 3: statcall records for ( Iterator i=this.m_stat.iterator(); i.hasNext(); ) { ((StatCall) i.next()).toXML( stream, newindent, namespace ); } // part 4: environment and resourcs if ( m_environment != null ) m_environment.toXML( stream, newindent, namespace ); // close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( " 0 ) { stream.write( namespace ); stream.write( ':' ); } stream.write( "invocation>" ); stream.write( System.getProperty( "line.separator", "\r\n" ) ); stream.flush(); // this is the only time we flush } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/EnvEntry.java0000644000175000017500000001102211757531137026054 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class pushes an environmental entry into the environment map. * This calls is expected to be transient to the parsing process only. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class EnvEntry extends Invocation implements HasText { /** * environment handle */ private String m_key; /** * environment value */ private StringBuffer m_value; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public EnvEntry() { m_key = null; m_value = null; } /** * C'tor: Prepares a given key for accepting a value later on. * @param key is the key to prepare */ public EnvEntry( String key ) { m_key = key; m_value = null; } /** * C'tor: Fully initializes the class * @param key is the key to prepare * @param value is the value to remember */ public EnvEntry( String key, String value ) { m_key = key; m_value = new StringBuffer(value); } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new StringBuffer(fragment); else this.m_value.append(fragment); } } /** * Accessor * * @see #setKey(String) */ public String getKey() { return this.m_key; } /** * Accessor. * * @param m_key * @see #getKey() */ public void setKey( String m_key ) { this.m_key = m_key; } /** * Accessor * * @see #setValue(String) */ public String getValue() { return ( m_value == null ? null : m_value.toString() ); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = ( value == null ? null : new StringBuffer(value) ); } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { stream.write( m_key ); stream.write( '=' ); stream.write( getValue() ); } /** * Dumps the state of the current element as XML output. However, for * the given instance, this class is ludicrious. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":env" : "env"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " key=\"", quote(getKey(),true) ); stream.write( '>' ); stream.write( quote(getValue(),false) ); stream.write( "' ); if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/File.java0000644000175000017500000000470311757531137025171 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is the base class for a file object. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public abstract class File extends Invocation implements HasText { /** * optional first 16 byte of file, or less if shorter. */ protected String m_hexbyte; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public File() { m_hexbyte = null; } /** * C'tor: Constructs the value in the base class. * @param value is all or part of the hex bytes. */ public File( String value ) { m_hexbyte = value; } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_hexbyte == null ) this.m_hexbyte = new String(fragment); else this.m_hexbyte += fragment; } } /** * Accessor * * @see #setValue(String) */ public String getValue() { return this.m_hexbyte; } /** * Accessor. * * @param hexbyte * @see #getValue() */ public void setValue( String hexbyte ) { this.m_hexbyte = hexbyte; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Data.java0000644000175000017500000001526411757531137025167 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is transient for XML parsing. The data value will be * incorporated into the StatCall class. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see StatCall */ public class Data extends Invocation implements HasText { /** * This is the data contained between the tags. A null * value is not valid. */ private StringBuffer m_value; /** * Indicates, if the data is only partial. */ private boolean m_truncated; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Data() { m_value = null; m_truncated = false; } /** * Constructs a piece of data. * @param value is the data to remember. The string may be empty, * but it must not be null. * @exception NullPointerException if the argument was null. */ public Data( String value ) { if ( value == null ) throw new NullPointerException("the value to the tag constructor must not be null" ); else m_value = new StringBuffer(value); } /** * Constructs a piece of data. * @param value is the data to remember. The string may be empty, * but it must not be null. * @param truncated is a flag to indicate that the data is partial. * @exception NullPointerException if the argument was null. */ public Data( String value, boolean truncated ) { if ( value == null ) throw new NullPointerException("the value to the tag constructor must not be null" ); else m_value = new StringBuffer(value); m_truncated = truncated; } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new StringBuffer(fragment); else this.m_value.append(fragment); } } /** * Accessor * * @see #setTruncated(boolean) */ public boolean getTruncated() { return this.m_truncated; } /** * Accessor. * * @param truncated * @see #getTruncated() */ public void setTruncated( boolean truncated ) { this.m_truncated = truncated; } /** * Accessor * * @see #setValue(String) */ public String getValue() { return ( m_value == null ? null : m_value.toString() ); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = ( value == null ? null : new StringBuffer(value) ); } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dumps the state of the current element as XML output. This function * can return the necessary data more efficiently, thus overwriting * the inherited method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { if ( m_value != null ) { String newline = System.getProperty( "line.separator", "\r\n" ); StringBuffer result = new StringBuffer( m_value.length() + 24 ); if ( indent != null && indent.length() > 0 ) result.append( indent ); result.append( "" ).append( quote(getValue(),false) ).append(""); if ( indent != null ) result.append(newline); return result.toString(); } else { return new String(); } } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { if ( this.m_value != null ) { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":data" : "data"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " truncated=\"", Boolean.toString(m_truncated) ); stream.write( '>' ); // dump content stream.write( quote(getValue(),false) ); // close tag stream.write( "' ); if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Arguments.java0000644000175000017500000000502511757531137026255 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class maintains the application that was run, and the * arguments to the commandline that were actually passed on to * the application. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see Job */ public abstract class Arguments extends Invocation { /** * This is the executable that was run. */ protected String m_executable; /** * This abstract method is called by higher-level functions to * obtain a single string representation of the arguments. * * @return string representing arguments, or null * if there is no such string. The empty string is also possible. */ public abstract String getValue(); /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Arguments() { m_executable = null; } /** * Constructs an applications without arguments. * @param executable is the name of the application. */ public Arguments( String executable ) { m_executable = executable; } /** * Accessor * * @see #setExecutable(String) */ public String getExecutable() { return this.m_executable; } /** * Accessor. * * @param executable * @see #getExecutable() */ public void setExecutable( String executable ) { this.m_executable = executable; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Descriptor.java0000644000175000017500000000673611757531137026440 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is the container for a file descriptor object. A file * descriptor object contains just the descriptor number. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class Descriptor extends File implements HasDescriptor { /** * Descriptor of the file. */ private int m_descriptor; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Descriptor() { super(); m_descriptor = -1; } /** * Constructs a file descriptor. * @param descriptor is a valid file descriptor number. */ public Descriptor( int descriptor ) { super(); m_descriptor = descriptor; } /** * Accessor * * @see #setDescriptor(int) */ public int getDescriptor() { return this.m_descriptor; } /** * Accessor. * * @param descriptor * @see #getDescriptor() */ public void setDescriptor( int descriptor ) { this.m_descriptor = descriptor; } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":descriptor" : "descriptor"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " number=\"", Integer.toString(m_descriptor) ); if ( m_hexbyte != null && m_hexbyte.length() > 0 ) { // yes, content stream.write( '>' ); stream.write( m_hexbyte ); stream.write( "' ); } else { // no content stream.write( "/>" ); } if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/CPU.java0000644000175000017500000000515111757531137024737 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; /** * The CPU element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class CPU extends MachineInfo implements HasText{ /** * The text value */ private StringBuffer mValue; /** * The element name */ public static final String ELEMENT_NAME = "cpu"; /** * The default constructor */ public CPU(){ super(); mValue = null; } /** * Constructs a piece of data. * * @param value is the data to remember. The string may be empty, * but it must not be null. * @exception NullPointerException if the argument was null. */ public CPU( String value ) { this(); if ( value == null ) { throw new NullPointerException( "the value to the tag constructor must not be null" ); } else { mValue = new StringBuffer( value ); } } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue(String fragment) { if (fragment != null) { if (this.mValue == null) { this.mValue = new StringBuffer(fragment); } else { this.mValue.append(fragment); } } } /** * Accessor * * @see #setValue(String) */ public String getValue() { return (mValue == null ? null : mValue.toString()); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue(String value) { this.mValue = (value == null ? null : new StringBuffer(value)); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Swap.java0000644000175000017500000000225711757531137025226 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; /** * The swap element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class Swap extends MachineInfo { /** * The element name */ public static final String ELEMENT_NAME = "swap"; /** * The default constructor */ public Swap(){ super(); } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/ArgEntry.java0000644000175000017500000001130611757531137026042 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class carries an argument vector entry for the argument vector. * This calls is expected to be transient to the parsing process only. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class ArgEntry extends Invocation implements HasText { /** * argument vector position */ private int m_position; /** * argument vector value */ private StringBuffer m_value; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public ArgEntry() { m_position = -1; m_value = null; } /** * C'tor: Prepares a given key for accepting a value later on. * @param position is the location to use. */ public ArgEntry( int position ) { m_position = position; m_value = null; } /** * C'tor: Fully initializes the class * @param position is the location to use. * @param value is the value to remember */ public ArgEntry( int position, String value ) { m_position = position; m_value = new StringBuffer(value); } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new StringBuffer(fragment); else this.m_value.append(fragment); } } /** * Accessor * @return the position of this entry in the argument vector. * @see #setPosition(int) */ public int getPosition() { return this.m_position; } /** * Accessor. * * @param position * @see #getPosition() */ public void setPosition( int position ) { this.m_position = position; } /** * Accessor * * @see #setValue(String) */ public String getValue() { return ( m_value == null ? null : m_value.toString() ); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = ( value == null ? null : new StringBuffer(value) ); } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { stream.write( '[' ); stream.write( m_position ); stream.write( "]=" ); stream.write( getValue() ); } /** * Dumps the state of the current element as XML output. However, for * the given instance, this class is ludicrious. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":arg" : "arg"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " id=\"", Integer.toString(m_position) ); stream.write( '>' ); stream.write( quote(getValue(),false) ); stream.write( "' ); if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/ArgVector.java0000644000175000017500000001575011757531137026212 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class maintains the application that was run, and the * arguments to the commandline that were actually passed on to * the application. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see Job */ public class ArgVector extends Arguments { /** * This is the (new) alternative explicit argument vector. The reason * for using a map is that I cannot random access an ArrayList. */ private Map m_argv; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public ArgVector() { super(); m_argv = new TreeMap(); } /** * Constructs an applications without arguments. * @param executable is the name of the application. */ public ArgVector( String executable ) { super(executable); m_argv = new TreeMap(); } /** * Returns the full argument vector as one single string. * * @return a single string joining all arguments with a single space. * @see #setValue(int,String) */ public String getValue() { StringBuffer result = new StringBuffer(128); if ( m_argv.size() > 0 ) { boolean flag = false; for ( Iterator i=m_argv.keySet().iterator(); i.hasNext(); ) { Integer key = (Integer) i.next(); String value = (String) m_argv.get(key); if ( value != null ) { if ( flag ) result.append(' '); else flag = true; // FIXME: Use single quotes around value, if it contains ws. // FIXME: escape contained apostrophes and esc characters. result.append( value ); } } } return result.toString(); } /** * Sets the argument vector at the specified location. * @param position is the position at which to set the entry. * @param entry is the argument vector position * Setting null is a noop. */ public void setValue( int position, String entry ) { if ( position >= 0 ) { if ( entry == null ) { m_argv.put( new Integer(position), new String() ); } else { m_argv.put( new Integer(position), entry ); } } } /** * Dumps the state of the current element as XML output. This function * can return the necessary data more efficiently, thus overwriting * the inherited method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { StringBuffer result = new StringBuffer(64); String newline = System.getProperty( "line.separator", "\r\n" ); result.append( "" ); } else { // yes, content String newindent = indent == null ? null : indent + " "; result.append( '>' ); if ( indent != null ) result.append( newline ); for ( Iterator i=m_argv.keySet().iterator(); i.hasNext(); ) { Integer key = (Integer) i.next(); String entry = (String) m_argv.get(key); if ( entry != null ) { if ( newindent != null ) result.append( newindent ); result.append( "" ); result.append( quote(entry,false) ); result.append( "" ); if ( newindent != null ) result.append( newline ); } } result.append( "" ); } return result.toString(); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":argument-vector" : "argument-vector"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); if ( m_executable != null ) writeAttribute( stream, " executable=\"", m_executable ); if ( m_argv.size() > 0 ) { // yes, new content String newindent = indent == null ? null : indent + " "; String newtag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":arg" : "arg"; stream.write( '>' ); if ( indent != null ) stream.write( newline ); for ( Iterator i=m_argv.keySet().iterator(); i.hasNext(); ) { String key = (String) i.next(); String entry = (String) m_argv.get(key); if ( entry != null ) { if ( newindent != null && newindent.length() > 0 ) stream.write(newindent); stream.write( '<' ); stream.write( newtag ); writeAttribute( stream, " nr=\"", key ); stream.write( '>' ); stream.write( quote(entry,false) ); stream.write( "' ); if ( indent != null ) stream.write( newline ); } } if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); } else { // no content stream.write( "/>" ); } if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/SimpleServer.java0000644000175000017500000002537411757531137026741 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.net.*; import java.io.*; import java.sql.SQLException; import java.util.Iterator; import edu.isi.pegasus.planner.parser.InvocationParser; import org.griphyn.vdl.toolkit.*; import org.griphyn.vdl.dbschema.*; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.util.ChimeraProperties; import org.griphyn.vdl.directive.*; import org.apache.log4j.Logger; import org.apache.log4j.Level; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.PatternLayout; public class SimpleServer extends Toolkit { private static final int port = 65533; public static boolean c_terminate = false; public static Logger c_logger = null; private boolean m_emptyFail = true; private boolean m_noDBase; DatabaseSchema m_dbschema; InvocationParser m_parser; ServerSocket m_server; public static void setTerminate( boolean b ) { c_terminate = b; } public static boolean getTerminate() { return c_terminate; } public void showUsage() { // empty for now } public SimpleServer( int port ) throws Exception { super("SimpleServer"); // stand up the connection to the PTC this.m_noDBase = false; ChimeraProperties props = ChimeraProperties.instance(); String ptcSchemaName = props.getPTCSchemaName(); if ( ptcSchemaName == null ) m_noDBase = true; if ( ! m_noDBase ) { // ignore -d option for now - grumbl, why?! Connect connect = new Connect(); this.m_dbschema = connect.connectDatabase(ptcSchemaName); // check for invocation record support if ( ! (m_dbschema instanceof PTC) ) { c_logger.warn( "Your database cannot store invocation records" + ", assuming no-database-mode" ); m_noDBase = true; } } // create one XML parser -- once m_parser = new InvocationParser( props.getPTCSchemaLocation() ); // setup socket this.m_server = null; try { byte[] loopback = { 127, 0, 0, 1 }; this.m_server = new ServerSocket( port, 5, InetAddress.getByAddress(loopback) ); // new ServerSocket( port, 5, InetAddress..getLocalHost() ); // new ServerSocket( port, 5 ); } catch ( UnknownHostException e ) { c_logger.fatal( "Unable to determine own hostname: " + e.getMessage() ); System.exit(1); } catch ( IOException e ) { c_logger.fatal( "Could not listen on port " + port + ": " + e.getMessage() ); System.exit(1); } } /** * Copy the content of the file into memory. The copy operation also * weeds out anything that may have been added by the remote batch * scheduler. For instance, PBS is prone to add headers and footers. * * @param input is the file instance from which to read contents. * @return the result code from reading the file */ private String extractToMemory( java.io.File input ) throws FriendlyNudge { StringWriter out = null; // open the files int p1, p2, state = 0; try { BufferedReader in = new BufferedReader( new FileReader(input) ); out = new StringWriter(4096); String line = null; while ( (line = in.readLine()) != null ) { if ( (state & 1) == 0 ) { // try to copy the XML line in any case if ( (p1 = line.indexOf( " -1 ) if ( (p2 = line.indexOf( "?>", p1 )) > -1 ) out.write( line, p1, p2+2 ); // start state with the correct root element if ( (p1 = line.indexOf( " -1 ) { if ( p1 > 0 ) line = line.substring( p1 ); ++state; } } if ( (state & 1) == 1 ) { out.write( line ); if ( (p1 = line.indexOf("")) > -1 ) ++state; } } in.close(); out.flush(); out.close(); } catch ( IOException ioe ) { throw new FriendlyNudge( "While copying " + input.getPath() + " into temp. file: " + ioe.getMessage(), 5 ); } // some sanity checks if ( state == 0 ) throw new FriendlyNudge( "File " + input.getPath() + " does not contain invocation records, assuming failure", 5 ); if ( (state & 1) == 1 ) throw new FriendlyNudge( "File " + input.getPath() + " contains an incomplete invocation record, assuming failure", 5 ); // done return out.toString(); } /** * Determines the exit code of an invocation record. Currently, * we will determine the exit code from the main job only. * * @param ivr is the invocation record to put into the database * @return the status code as exit code to signal failure etc. *
   *   0   regular exit with exit code 0
   *   1   regular exit with exit code > 0
   *   2   failure to run program from kickstart
   *   3   application had died on signal
   *   4   application was suspended (should not happen)
   *   5   failure in exit code parsing
   *   6   impossible case
   * 
*/ private int determineExitStatus( InvocationRecord ivr ) { boolean seen = false; for ( Iterator i=ivr.iterateJob(); i.hasNext(); ) { Job job = (Job) i.next(); // clean-up jobs don't count in failure modes if ( job.getTag().equals("cleanup") ) continue; // obtains status from job Status status = job.getStatus(); if ( status == null ) return 6; JobStatus js = status.getJobStatus(); if ( js == null ) { // should not happen return 6; } else if ( js instanceof JobStatusRegular ) { // regular exit code - success or failure? int exitcode = ((JobStatusRegular) js).getExitCode(); if ( exitcode != 0 ) return 1; else seen = true; // continue, if exitcode of 0 to implement chaining !!!! } else if ( js instanceof JobStatusFailure ) { // kickstart failure return 2; } else if ( js instanceof JobStatusSignal ) { // died on signal return 3; } else if ( js instanceof JobStatusSuspend ) { // suspended??? return 4; } else { // impossible/unknown case return 6; } } // success, or no [matching] jobs return seen ? 0 : 5; } /** * Reads the contents of the specified file, and returns with the * remote exit code contained in the job chain. * * @param filename is the name of the file with the kickstart record. * @return the exit code derived from the remote exit code. */ public int checkFile( String filename ) { int result = 0; try { // check input file java.io.File check = new java.io.File(filename); // test 1: file exists if ( ! check.exists() ) throw new FriendlyNudge( "file does not exist " + filename + ", assuming failure", 5 ); // test 2: file is readable if ( ! check.canRead() ) throw new FriendlyNudge( "unable to read file " + filename + ", assuming failure", 5 ); // test 3: file has nonzero size if ( check.length() == 0 ) { if ( m_emptyFail ) { throw new FriendlyNudge( "file " + filename + " has zero length" + ", assuming failure", 5 ); } else { throw new FriendlyNudge( "file " + filename + " has zero length" + ", assuming success", 0 ); } } // test 4: extract XML into tmp file String temp = extractToMemory(check); // test 5: try to parse XML -- but there is only one parser InvocationRecord invocation = null; synchronized ( m_parser ) { c_logger.info( "starting to parse invocation" ); invocation = m_parser.parse( new StringReader(temp) ); }; if ( invocation == null ) throw new FriendlyNudge( "invalid XML invocation record in " + filename + ", assuming failure", 5 ); else c_logger.info( "invocation was parsed successfully" ); // insert into database. This trickery works, because we already // checked previously that the dbschema does support invocations. // However, there is only one database connection at a time. if ( ! m_noDBase ) { PTC ptc = (PTC) m_dbschema; synchronized ( ptc ) { // FIXME: (start,host,pid) may not be a sufficient secondary key if ( ptc.getInvocationID( invocation.getStart(), invocation.getHostAddress(), invocation.getPID() ) == -1 ) { c_logger.info( "adding invocation to database" ); // may throw SQLException ptc.saveInvocation( invocation ); } else { c_logger.info( "invocation already exists, skipping!" ); } } } // determine result code, just look at the main job for now c_logger.info( "determining exit status of main job" ); result = determineExitStatus( invocation ); c_logger.info( "exit status = " + result ); } catch ( FriendlyNudge fn ) { c_logger.warn( fn.getMessage() ); result = fn.getResult(); } catch ( Exception e ) { c_logger.warn( e.getMessage() ); result = 5; } // done return result; } public static void main( String args[] ) throws IOException { // setup logging System.setProperty( "log4j.defaultInitOverride", "true" ); Logger root = Logger.getRootLogger(); root.addAppender( new ConsoleAppender( new PatternLayout("%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%c{1}] %m%n") ) ); root.setLevel( Level.INFO ); c_logger = Logger.getLogger( SimpleServer.class ); c_logger.info( "starting" ); SimpleServer me = null; try { me = new SimpleServer(port); } catch ( Exception e ) { c_logger.fatal( "Unable to instantiate a server: " + e.getMessage() ); System.exit(1); } // run forever try { while ( ! c_terminate ) { new SimpleServerThread( me, me.m_server.accept() ).start(); } } catch ( SocketException se ) { // ignore -- closing the server socket in a thread during shutdown // will have accept fail with a socket exception in main() } // done c_logger.info( "received shutdown" ); // count your threads, and the last one locks the // door and switches off the light... Grrr. synchronized ( me ) { while ( SimpleServerThread.c_count > SimpleServerThread.c_cdone ) { try { me.wait(5000); } catch ( InterruptedException e ) { // ignore } } } try { me.m_dbschema.close(); } catch ( Exception e ) { c_logger.warn( "During database disconnect: " + e.getMessage() ); } c_logger.warn( "finished shutdown" ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Temporary.java0000644000175000017500000000777611757531137026311 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is the container for a temporary file object. A temporary * file object contains a filename and a file descriptor. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class Temporary extends File implements HasDescriptor, HasFilename { /** * Name of the file. */ protected String m_filename; /** * Descriptor of the file. */ protected int m_descriptor; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Temporary() { super(); m_filename = null; m_descriptor = -1; } /** * Constructs a temporary file object. * @param filename is the name of the file that stat was invoked * @param descriptor is a valid file descriptor number. */ public Temporary( String filename, int descriptor ) { super(); m_filename = filename; m_descriptor = descriptor; } /** * Accessor * * @see #setFilename(String) */ public String getFilename() { return this.m_filename; } /** * Accessor. * * @param filename * @see #getFilename() */ public void setFilename( String filename ) { this.m_filename = filename; } /** * Accessor * * @see #setDescriptor(int) */ public int getDescriptor() { return this.m_descriptor; } /** * Accessor. * * @param descriptor * @see #getDescriptor() */ public void setDescriptor( int descriptor ) { this.m_descriptor = descriptor; } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":temporary" : "temporary"; if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " name=\"", m_filename ); writeAttribute( stream, " descriptor=\"", Integer.toString(m_descriptor) ); if ( m_hexbyte != null && m_hexbyte.length() > 0 ) { // yes, content stream.write( '>' ); stream.write( m_hexbyte ); stream.write( "' ); } else { // no content stream.write( "/>" ); } if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Invocation.java0000644000175000017500000000223511757531137026421 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import org.griphyn.vdl.Chimera; import java.io.Serializable; /** * This abstract class defines a common base for all invocation record * related Java objects. Since all necessary functionality is described * in {@link Chimera}, this class is empty. It exists for grouping * purposes. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public abstract class Invocation extends Chimera implements Serializable { // empty class, existence just for grouping purposes } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Architecture.java0000644000175000017500000001776711757531137026752 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is transient for XML parsing. The data value will be * incorporated into the job classes. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see StatCall */ public class Architecture extends Invocation implements HasText { /** * This is the data contained between the tags. A null * value is not valid. */ private StringBuffer m_value; /** * Describes the architecture runtime mode. For instance, on a SPARC * can run in ILP32 or LP64 mode, an IA64 may have a * backward-compatible 32bit mode (IA32), etc. */ private String m_archmode; /** * Describes the operating system name. For instance: linux, sunos, ... */ private String m_sysname; /** * Describes the machine's network node hostname. Note that * incorrect host setup may include the domainname into this. */ private String m_nodename; /** * Contains the operating system's version string. */ private String m_release; /** * Contains the machine's hardware description. For instance: i686, * sun4u, ... */ private String m_machine; /** * Contains the optional domain name on the network. Note that incorrect * setup of the host name may contain the domain portion there. */ private String m_domainname; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public Architecture() { m_value = null; m_sysname = m_archmode = m_nodename = m_release = m_machine = m_domainname = null; } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new StringBuffer(fragment); else this.m_value.append(fragment); } } /** * Accessor * * @see #setArchMode(String) */ public String getArchMode() { return this.m_archmode; } /** * Accessor. * * @param archmode * @see #getArchMode() */ public void setArchMode( String archmode ) { this.m_archmode = archmode; } /** * Accessor * * @see #setValue(String) */ public String getValue() { return ( m_value == null ? null : m_value.toString() ); } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = ( value == null ? null : new StringBuffer(value) ); } /** * Accessor * * @see #setSystemName(String) */ public String getSystemName() { return this.m_sysname; } /** * Accessor. * * @param sysname * @see #getSystemName() */ public void setSystemName( String sysname ) { this.m_sysname = sysname; } /** * Accessor * * @see #setNodeName(String) */ public String getNodeName() { return this.m_nodename; } /** * Accessor. * * @param nodename * @see #getNodeName() */ public void setNodeName( String nodename ) { this.m_nodename = nodename; this.normalize(); } /** * Accessor * * @see #setRelease(String) */ public String getRelease() { return this.m_release; } /** * Accessor. * * @param release * @see #getRelease() */ public void setRelease( String release ) { this.m_release = release; } /** * Accessor * * @see #setDomainName(String) */ public String getDomainName() { return this.m_domainname; } /** * Accessor. * * @param domainname * @see #getDomainName() */ public void setDomainName( String domainname ) { this.m_domainname = domainname; } /** * Accessor * * @see #setMachine(String) */ public String getMachine() { return this.m_machine; } /** * Accessor. * * @param machine * @see #getMachine() */ public void setMachine( String machine ) { this.m_machine = machine; } /** * Normalizes a misconfigured nodename that contains a domainname. */ public void normalize() { int pos = this.m_nodename.indexOf('.'); if ( pos != -1 && this.m_domainname == null ) { // normalize domain portion this.m_domainname = this.m_nodename.substring(pos+1); this.m_nodename = this.m_nodename.substring(0,pos); } } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Quotes an input string for XML attributes while converting nulls. * @param s is the attribute string, may be null * @return the XML-quoted string, or an empty-but-not-null string. */ private String myquote( String s ) { if ( s == null ) return new String(); else return quote(s,true); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":uname" : "uname"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); stream.write( " sysname=\"" ); stream.write( myquote(m_sysname) ); stream.write( '\"' ); if ( m_archmode != null ) writeAttribute( stream, " archmode=\"", m_archmode ); stream.write( " nodename=\"" ); stream.write( myquote(m_nodename) ); stream.write( "\" release=\"" ); stream.write( myquote(m_release) ); stream.write( "\" machine=\"" ); stream.write( myquote(m_machine) ); stream.write( '\"' ); if ( this.m_domainname != null ) writeAttribute( stream, " domainname=\"", m_domainname ); if ( this.m_value != null ) { stream.write( '>' ); stream.write( quote(getValue(),false) ); stream.write( "' ); } else { // no content stream.write( "/>" ); } if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/RAM.java0000644000175000017500000000225311757531137024727 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.invocation; /** * The RAM element. * * @author Karan Vahi * @version $Revision: 2587 $ */ public class RAM extends MachineInfo { /** * The element name */ public static final String ELEMENT_NAME = "ram"; /** * The default constructor */ public RAM(){ super(); } /** * Returns the name of the xml element corresponding to the object. * * @return name */ public String getElementName() { return ELEMENT_NAME; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/Job.java0000644000175000017500000001466611757531137025035 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import edu.isi.pegasus.common.util.Currently; import java.util.*; import java.text.DecimalFormat; import java.io.Writer; import java.io.IOException; /** * This class is contains the record from each jobs that ran in every * invocation. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class Job extends Invocation // implements Cloneable { /** * This is the tag to produce the job for. Usually, it is one of * "mainjob", "prejob", "postjob", or "cleanup". */ private String m_tag; /** * Start time of this job. */ private Date m_start; /** * Duration of the job. */ private double m_duration; /** * Process id assigned to the job. */ private int m_pid; /** * Resource usage of this job. */ private Usage m_usage; /** * Exit condition of the job. */ private Status m_status; /** * Stat call of the executable. */ private StatCall m_executable; /** * Command-line arguments. */ private Arguments m_arguments; public Job( String tag ) { m_tag = tag; m_usage = null; m_status = null; m_executable = null; m_arguments = null; } /** * Accessor * * @see #setTag(String) */ public String getTag() { return this.m_tag; } /** * Accessor. * * @param tag * @see #getTag() */ public void setTag( String tag ) { this.m_tag = tag; } /** * Accessor * * @see #setStart(Date) */ public Date getStart() { return this.m_start; } /** * Accessor. * * @param start * @see #getStart() */ public void setStart( Date start ) { this.m_start = start; } /** * Accessor * * @see #setDuration(double) */ public double getDuration() { return this.m_duration; } /** * Accessor. * * @param duration * @see #getDuration() */ public void setDuration( double duration ) { this.m_duration = duration; } /** * Accessor * * @see #setPID(int) */ public int getPID() { return this.m_pid; } /** * Accessor. * * @param pid * @see #getPID() */ public void setPID( int pid ) { this.m_pid = pid; } /** * Accessor * * @see #setUsage(Usage) */ public Usage getUsage() { return this.m_usage; } /** * Accessor. * * @param usage * @see #getUsage() */ public void setUsage( Usage usage ) { this.m_usage = usage; } /** * Accessor * * @see #setStatus(Status) */ public Status getStatus() { return this.m_status; } /** * Accessor. * * @param status * @see #getStatus() */ public void setStatus( Status status ) { this.m_status = status; } /** * Accessor * * @see #setExecutable(StatCall) */ public StatCall getExecutable() { return this.m_executable; } /** * Accessor. * * @param executable * @see #getExecutable() */ public void setExecutable( StatCall executable ) { this.m_executable = executable; } /** * Accessor * * @see #setArguments(Arguments) */ public Arguments getArguments() { return this.m_arguments; } /** * Accessor. * * @param arguments * @see #getArguments() */ public void setArguments( Arguments arguments ) { this.m_arguments = arguments; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dumps the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); DecimalFormat d = new DecimalFormat("#.###"); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":" + m_tag : m_tag; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " start=\"", Currently.iso8601(false,true,true,m_start) ); writeAttribute( stream, " duration=\"", d.format(m_duration) ); writeAttribute( stream, " pid=\"", Integer.toString(m_pid) ); stream.write( '>' ); if ( indent != null ) stream.write( newline ); // dump content String newindent = indent==null ? null : indent+" "; m_usage.toXML( stream, newindent, namespace ); m_status.toXML( stream, newindent, namespace ); m_executable.toXML( stream, newindent, namespace ); m_arguments.toXML( stream, newindent, namespace ); // close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/JobStatusSignal.java0000644000175000017500000001517111757531137027367 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is transient for XML parsing. The data value will be * incorporated into the job status classes. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see StatCall */ public class JobStatusSignal extends JobStatus implements HasText { /** * This is the signal number that led to the process termination. */ private short m_signo; /** * This flag signals, if a core file was generated. Not all systems * support core files. */ private boolean m_core; /** * This is the signal name or message contained between the tags. */ private String m_value; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public JobStatusSignal() { m_signo = 0; m_core = false; m_value = null; } /** * Constructs an error number without reason text. * @param signo is the error number */ public JobStatusSignal( short signo ) { m_signo = signo; m_core = false; m_value = null; } /** * Constructs a piece of data. * @param signo is the error number. * @param core is the core flag. */ public JobStatusSignal( short signo, boolean core ) { m_signo = signo; m_core = core; m_value = null; } /** * Constructs a piece of data. * @param signo is the error number. * @param core is the core flag. * @param value is the textual error reason. */ public JobStatusSignal( short signo, boolean core, String value ) { m_signo = signo; m_value = value; } /** * Appends a piece of text to the existing text. * @param fragment is a piece of text to append to existing text. * Appending null is a noop. */ public void appendValue( String fragment ) { if ( fragment != null ) { if ( this.m_value == null ) this.m_value = new String(fragment); else this.m_value += fragment; } } /** * Accessor * * @see #setSignalNumber(short) */ public short getSignalNumber() { return this.m_signo; } /** * Accessor. * * @param signo * @see #getSignalNumber() */ public void setSignalNumber( short signo ) { this.m_signo = signo; } /** * Accessor * * @see #setCoreFlag(boolean) */ public boolean getCoreFlag() { return this.m_core; } /** * Accessor. * * @param core * @see #getCoreFlag() */ public void setCoreFlag( boolean core ) { this.m_core = core; } /** * Accessor * * @see #setValue(String) */ public String getValue() { return this.m_value; } /** * Accessor. * * @param value is the new value to set. * @see #getValue() */ public void setValue( String value ) { this.m_value = value; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dumps the state of the current element as XML output. This function * can return the necessary data more efficiently, thus overwriting * the inherited method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { StringBuffer result = new StringBuffer(40); // good for no content result.append( "" ); } else { // yes, content result.append( "\">" ); result.append( quote(m_value,false) ); result.append( "" ); } return result.toString(); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":signalled" : "signalled"; stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " signal=\"", Short.toString(m_signo) ); writeAttribute( stream, " corefile=\"", Boolean.toString(m_core) ); if ( m_value == null ) { // no content stream.write( "/>" ); } else { // yes, content stream.write( '>' ); stream.write( quote(m_value,false) ); // close tag stream.write( "' ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/invocation/JobStatusRegular.java0000644000175000017500000001045211757531137027550 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.invocation; import java.util.*; import java.io.Writer; import java.io.IOException; /** * This class is transient for XML parsing. The data value will be * incorporated into the job status classes. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * @see StatCall */ public class JobStatusRegular extends JobStatus // implements Cloneable { /** * This is exit code returned by the application. */ private short m_exitcode; /** * Default c'tor: Construct a hollow shell and allow further * information to be added later. */ public JobStatusRegular() { m_exitcode = 0; } /** * Constructs an exit code. * @param exitcode is the exit code of the application. */ public JobStatusRegular( short exitcode ) { m_exitcode = exitcode; } /** * Accessor * * @see #setExitCode(short) */ public short getExitCode() { return this.m_exitcode; } /** * Accessor. * * @param exitcode * @see #getExitCode() */ public void setExitCode( short exitcode ) { this.m_exitcode = exitcode; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { throw new IOException( "method not implemented, please contact vds-support@griphyn.org" ); } /** * Dumps the state of the current element as XML output. This function * can return the necessary data more efficiently, thus overwriting * the inherited method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { StringBuffer result = new StringBuffer(32); // good for no content result.append( "" ); return result.toString(); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { stream.write( '<' ); if ( namespace != null && namespace.length() > 0 ) { stream.write( namespace ); stream.write( ':' ); } stream.write( "regular" ); writeAttribute( stream, " exitcode=\"", Short.toString(m_exitcode) ); stream.write( "/>" ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/0000755000175000017500000000000011757531667023642 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/Horizontal.java0000644000175000017500000003755011757531137026640 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.partitioner.graph.Bag; import edu.isi.pegasus.planner.partitioner.graph.LabelBag; import java.util.Collections; import java.util.Comparator; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.Set; import java.util.HashSet; /** * Horizontal based partitioning scheme, that allows the user to configure the * number of partitions per transformation name per level. * To set the size of the partition per transformation, the following properties * need to be set *
 *       pegasus.partitioner.horizontal.collapse.[txName]
 *       pegasus.partitioner.horizontal.bundle.[txName]
 * 
* * The bundle value designates the number of partitions per transformation per level. * The collapse values designates the number of nodes in a partitioning referring * to a particular transformation. If both are specified, then bundle value takes * precedence. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class Horizontal extends BFS{ /** * A short description about the partitioner. */ public static final String DESCRIPTION = "Configurable Level Based Partitioning"; /** * The default collapse factor for collapsing jobs with same logical name * scheduled onto the same execution pool. */ public static final int DEFAULT_COLLAPSE_FACTOR = 3; /** * A map indexed by the partition ID. Each value is a partition object. */ private Map mPartitionMap; /** * A static instance of GraphNode comparator. */ private GraphNodeComparator mNodeComparator; /** * The global counter that is used to assign ID's to the partitions. */ private int mIDCounter; /** * Singleton access to the job comparator. * * @return the job comparator. */ private Comparator nodeComparator(){ return (mNodeComparator == null)? new GraphNodeComparator(): mNodeComparator; } /** * The overloaded constructor. * * @param root the dummy root node of the graph. * @param graph the map containing all the nodes of the graph keyed by * the logical id of the nodes. * @param properties the properties passed to the planner. */ public Horizontal(GraphNode root, Map graph, PegasusProperties properties) { super(root,graph,properties); mIDCounter = 0; mPartitionMap = new HashMap( 10 ); } /** * Returns a textual description of the partitioner implementation. * * @return a short textual description */ public String description(){ return this.DESCRIPTION; } /** * Given a list of jobs, constructs (one or more) partitions out of it. * Calls out to the partitioner callback, for each of the partitions * constructed. * * @param c the parititoner callback * @param nodes the list of GraphNode objects on a particular level. * @param level the level as determined from the root of the workflow. */ protected void constructPartitions( Callback c, List nodes, int level ){ //group the nodes by their logical names Collections.sort( nodes, nodeComparator() ); //traverse through the list and collapse jobs //referring to same logical transformation GraphNode previous = null; List clusterList = new LinkedList(); GraphNode node = null; for(Iterator it = nodes.iterator();it.hasNext();){ node = (GraphNode)it.next(); if( previous == null || node.getName().equals( previous.getName() ) ){ clusterList.add( node ); } else{ //at boundary collapse jobs constructPartitions( c, clusterList, level, previous.getName() ); clusterList = new LinkedList(); clusterList.add( node ); } previous = node; } //cluster the last clusterList if(previous != null){ constructPartitions( c, clusterList, level, previous.getName() ); } } /** * Given a list of jobs, constructs (one or more) partitions out of it. * Calls out to the partitioner callback, for each of the partitions * constructed. * * @param c the parititoner callback * @param nodes the list of GraphNode objects on a particular level, * referring to the same transformation underneath. * @param level the level as determined from the root of the workflow. * @param name the transformation name */ protected void constructPartitions( Callback c, List nodes, int level, String name ){ //figure out number of jobs that go into one partition int[] cFactor = new int[2]; cFactor[0] = 0; cFactor[1] = 0; int size = nodes.size(); cFactor = this.getCollapseFactor( name, size ); StringBuffer message = new StringBuffer(); if( cFactor[0] == 0 && cFactor[1] == 0 ){ message.append( "\t Collapse factor of ").append( cFactor[0] ). append( "," ).append( cFactor[1] ). append( " determined for transformation ").append( name ); mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); return; } message.append( "Partitioning jobs of type " ).append( name ).append( " at level "). append( level ).append(" wth collapse factor "). append( cFactor[0] ).append( "," ).append( cFactor[1] ); mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); Partition p; if( cFactor[0] >= size ){ //means put all the nodes in one partition //we want to ignore the dummy node partition p = createPartition( nodes ); c.cbPartition( p ); } else{ //do collapsing in chunks of cFactor int increment = 0; int toIndex; for( int i = 0; i < size; i = i + increment ){ //compute the increment and decrement cFactor[1] increment = (cFactor[1] > 0) ? cFactor[0] + 1: cFactor[0]; cFactor[1]--; //determine the toIndex for creating the partition toIndex = ( (i + increment) < size) ? i + increment : size; p = createPartition( nodes.subList( i, toIndex ) ); c.cbPartition( p ); } } } /** * Calls out to the callback with appropriate relations between the partitions * constructed for the levels. This is an empty implementation, as we * do our own book-keeping in this partitioner to determine the relations * between the partitions. * * @param c the parititoner callback * @param parent the parent level * @param child the child level. * * @see #done( Callback ) */ protected void constructLevelRelations( Callback c, int parent, int child ){ } /** * Indicates that we are done with the traversal of the graph. Determines * the relations between the partitions constructed and calls out to the * appropriate callback function * * @param c the partitioner callback */ protected void done( Callback c ){ GraphNode node; GraphNode parent; mLogger.log( "Determining relations between partitions", LogManager.INFO_MESSAGE_LEVEL ); //construct the relations for( Iterator it = mPartitionMap.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); Partition p = (Partition) entry.getValue(); List roots = p.getRootNodes(); Set parentPartitions = new HashSet( roots.size() ); //get the Root nodes for each partition and //for each root, determine the partitions of it's parents for( Iterator rootIt = roots.iterator(); rootIt.hasNext(); ){ node = (GraphNode)rootIt.next(); for( Iterator parentsIt = node.getParents().iterator(); parentsIt.hasNext(); ){ parent = (GraphNode)parentsIt.next(); //the parents partition id is parent for the //partition containing the root parentPartitions.add( parent.getBag().get( LabelBag.PARTITION_KEY ) ); } } //write out all the parents of the partition if(!parentPartitions.isEmpty()){ c.cbParents( p.getID(), new ArrayList( parentPartitions ) ); } } mLogger.log( "Determining relations between partitions - DONE", LogManager.INFO_MESSAGE_LEVEL ); //done with the partitioning c.cbDone(); } /** * Returns the collapse factor, that is used to determine the number of nodes * going in a partition. The collapse factor is determined by * getting the collapse and the bundle values specified for the transformations * in the properties file. * * There are two orthogonal notions of bundling and collapsing. In case the * bundle key is specified, it ends up overriding the collapse key, and * the bundle value is used to generate the collapse values. * * If both are not specified or null, then collapseFactor is set to size. * * @param txName the logical transformation name * @param size the number of jobs that refer to the same logical * transformation and are scheduled on the same execution pool. * * @return int array of size 2 where :- * int[0] is the the collapse factor (number of nodes in a partition) * int[1] is the number of parititons for whom collapsing is int[0] + 1. */ protected int[] getCollapseFactor(String txName, int size){ String factor = null; String bundle = null; int result[] = new int[2]; result[1] = 0; //the job should have the collapse key from the TC if //by the user specified try{ //ceiling is (x + y -1)/y bundle = mProps.getHorizontalPartitionerBundleValue(txName); if (bundle != null) { int b = Integer.parseInt(bundle); result[0] = size / b; result[1] = size % b; return result; //doing no boundary condition checks //return (size + b -1)/b; } factor = mProps.getHorizontalPartitionerCollapseValue(txName); //return the appropriate value result[0] = (factor == null) ? size : //then collapse factor is same as size Integer.parseInt(factor); //use the value in the prop file } catch( NumberFormatException e ){ //set bundle to size StringBuffer error = new StringBuffer(); if( factor == null ){ error.append( "Bundle value (" ).append( bundle ).append( ")" ); } else{ error.append( "Collapse value (").append(factor).append( ")" ); } error.append( " for transformation "). append( txName ).append(" is not a number" ); mLogger.log( error.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); result[0] = size; } return result; } /** * Creates a partition out of a list of nodes. Also stores it in the internal * partition map to track partitions later on. Associates the partition ID * with each of the nodes making the partition also. * * @param nodes the list of GraphNodes making the partition. * * @return the partition out of those nodes. */ protected Partition createPartition( List nodes ){ //increment the ID counter before getting the ID this.incrementIDCounter(); String id = getPartitionID( this.idCounter() ); Partition p = new Partition( nodes, id ); p.setIndex( this.idCounter() ); p.constructPartition(); mPartitionMap.put( p.getID(), p ); //associate the ID with all the nodes for( Iterator it = nodes.iterator(); it.hasNext(); ){ GraphNode node = (GraphNode)it.next(); Bag b = new LabelBag(); b.add( LabelBag.PARTITION_KEY, id ); node.setBag( b ); } //log a message StringBuffer message = new StringBuffer(); message.append( "Partition " ).append( p.getID() ).append(" is :"). append( p.getNodeIDs() ); mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); return p; } /** * Increments the ID counter by 1. */ private void incrementIDCounter(){ mIDCounter++; } /** * Returns the current value of the ID counter. */ private int idCounter(){ return mIDCounter; } /** * Constructs the id for the partition. * * @param id an integer ID. * * @return the ID for the Partition. */ private String getPartitionID( int id ){ StringBuffer sb = new StringBuffer(5); sb.append("ID").append( id ); return sb.toString(); } /** * A GraphNode comparator, that allows me to compare nodes according to the * transformation logical names. It is applied to group jobs in a particular partition, * according to the underlying transformation that is referred. * */ private class GraphNodeComparator implements Comparator{ /** * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer if the first argument is * less than, equal to, or greater than the specified object. The * SubInfo are compared by their transformation name. * * This implementation is not consistent with the * SubInfo.equals(Object) method. Hence, should not be used in sorted * Sets or Maps. * * @param o1 is the first object to be compared. * @param o2 is the second object to be compared. * * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compare(Object o1, Object o2) { if (o1 instanceof GraphNode && o2 instanceof GraphNode) { return ( ( GraphNode) o1).getName().compareTo( ( ( GraphNode) o2).getName()); } else { throw new ClassCastException("Objects being compared are not GraphNode"); } } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/One2One.java0000644000175000017500000001242111757531137025742 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * This partitioning technique considers each of the job in the dax as a * separate partition. This is used for Euryale style mode of operation in * Pegasus. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class One2One extends Partitioner { /** * A short description about the partitioner. */ public static final String DESCRIPTION = "One to One Partitioning"; /** * The overloaded constructor. * * @param root the dummy root node of the graph. * @param graph the map containing all the nodes of the graph keyed by * the logical id of the nodes. * @param properties the properties passed to the planner. */ public One2One( GraphNode root, Map graph, PegasusProperties properties ) { super( root, graph, properties ); } /** * This ends up writing out a partition for each job in the dax. It is a * one 2 one mapping from the jobs in the dax to the corresponding * partitions in the pdax. The ids of the partitions in pdax is same * as the ids of the corresponding jobs in the dax. * * @param c the callback object to callout to while partitioning. */ public void determinePartitions( Callback c ) { //we just traverse the graph via an iterator, as we do not //need to any particular graph traversal for this mode. String key = null; GraphNode node = null; int currentIndex = 0; for( Iterator it = mGraph.keySet().iterator(); it.hasNext(); ){ //the key is the logical id of the node specified in the dax key = (String)it.next(); node = (GraphNode)mGraph.get(key); //we have to ignore the dummy root node. if( node.getID().equals( mRoot.getID() ) ){ //we go to next node mLogger.log( "Ignoring node " + node.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); continue; } currentIndex++; //construct the partition for this node //the partition has just one node with id same as the id //of the corresponding id of the job in the dax List levelList = new ArrayList(1); levelList.add( node ); Partition p = new Partition( levelList, node.getID() ); p.setIndex( currentIndex ); // p.setName(mDAXWriter.getPartitionName()); p.constructPartition(); mLogger.log( "Partition is " + p.getNodeIDs(), LogManager.DEBUG_MESSAGE_LEVEL ); c.cbPartition( p ); } //writing out the relations between the partitions in the file mLogger.log( "Building Relations between partitions ", LogManager.DEBUG_MESSAGE_LEVEL ); for(Iterator it = mGraph.keySet().iterator(); it.hasNext();){ //the key is the logical id of the node specified in the dax key = (String)it.next(); node = (GraphNode)mGraph.get(key); List parents = node.getParents(); //we have to ignore the dummy root node. //and the node with no parents if( node.getID().equals(mRoot.getID()) || parents == null ){ //we go to next node mLogger.log( "Ignoring node " + node.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); continue; } //get the parents of the node and write out to the pdax file. List partitionIDs = new java.util.ArrayList( parents.size() ); for( Iterator it1 = parents.iterator(); it1.hasNext(); ) { //the jobs in the dax have same id as corresponding paritions partitionIDs.add( ( (GraphNode) it1.next()).getID()); } //write out to the pdax file c.cbParents( key, partitionIDs ); partitionIDs = null; } mLogger.log("Building Relations between partitions - DONE", LogManager.DEBUG_MESSAGE_LEVEL); //we are done with the partitioning c.cbDone(); } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String description(){ return this.DESCRIPTION; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/WriterCallback.java0000644000175000017500000002252511757531137027374 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import java.util.List; import java.util.Iterator; import java.util.List; import java.util.Set; import java.io.File; import java.io.IOException; /** * This callback writes out a DAX file for each of the partitions, * and also writes out a PDAX file that captures the relations * between the partitions. * * @author not attributable * @version $Revision: 2576 $ */ public class WriterCallback implements Callback { /** * The handle to the partition graph writer. */ protected PDAXWriter mPDAXWriter; /** * The handle to the dax writer that writes out the dax corresponding to the * partition identified. The base name of the partition is gotten from it. */ protected DAXWriter mDAXWriter; /** * The path to the PDAX file written out. */ protected String mPDAX; /** * Handle to the properties available. */ protected PegasusProperties mProps; /** * The handle to the logger object. */ protected LogManager mLogger; /** * A boolean indicating that the partitioning has started. This is set, * by the first call to the cbPartition( Partition ) callback. */ protected boolean mPartitioningStarted; /** * The default constructor. * */ public WriterCallback(){ //mLogger = LogManager.getInstance(); } /** * Initializes the Writer Callback. * * @param properties the properties passed to the planner. * @param daxFile the path to the DAX file that is being partitioned. * @param daxName the namelabel of the DAX as set in the root element of the DAX. * @param directory the directory where the partitioned daxes have to reside. */ public void initialize (PegasusProperties properties, String daxFile, String daxName, String directory ){ mProps = properties; mLogger = LogManagerFactory.loadSingletonInstance( properties ); //load the writer for the partitioned daxes mDAXWriter = DAXWriter.loadInstance( properties, daxFile, directory ); mDAXWriter.setPartitionName( daxName ); //name of pdax file is same as the dax file //meaning the name attribute in root element are same. mPDAXWriter = getHandletoPDAXWriter( daxFile, daxName, directory ) ; //write out the XML header for the PDAX file mPDAXWriter.writeHeader(); } /** * Callback for when a partitioner determines that partition has been * constructed. A DAX file is written out for the partition. * * @param p the constructed partition. * * @throws RuntimeException in case of any error while writing out the DAX or * the PDAX files. */ public void cbPartition( Partition p ) { mPartitioningStarted = true; //not sure if we still need it p.setName( mDAXWriter.getPartitionName() ); //for time being do a localize catch //till i change the interface try{ //write out the partition information to the PDAX file mLogger.log( "Writing to the pdax file for partition " + p.getID(), LogManager.DEBUG_MESSAGE_LEVEL); mPDAXWriter.write( p ); mLogger.log( "Writing to the pdax file for partition -DONE" + p.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); //write out the DAX file mDAXWriter.writePartitionDax( p ); } catch( IOException ioe ){ //wrap and throw in Runtime Exception throw new RuntimeException( "Writer Callback for partition " + p.getID(), ioe ); } } /** * Callback for when a partitioner determines the relations between partitions * that it has previously constructed. * * @param child the id of a partition. * @param parents the list of String objects that contain * the id's of the parents of the partition. * * * @throws RuntimeException in case of any error while writing out the DAX or * the PDAX files. */ public void cbParents( String child, List parents ) { mPDAXWriter.write( partitionRelation2XML( child, parents ) ); } /** * Callback for the partitioner to signal that it is done with the processing. * This internally closes all the handles to the DAX and PDAX writers. * */ public void cbDone(){ //change internal state to signal //that we are done with partitioning. mPartitioningStarted = false; mPDAXWriter.close(); mDAXWriter.close(); } /** * Returns the name of the pdax file written out. * Will be null if the partitioning has not completed. * * @return path to the pdax file. */ public String getPDAX(){ return this.mPDAX; } /** * Returns the name of the partition, that needs to be set while creating * the Partition object corresponding to each partition. * * @return the name of the partition. */ protected String getPartitionName(){ return mDAXWriter.getPartitionName(); } /** * It returns the handle to the writer for writing out the pdax file * that contains the relations amongst the partitions and the jobs making * up the partitions. * * @param daxFile the path to the DAX file that is being partitioned. * @param name the name/label that is to be assigned to the pdax file. * @param directory the directory where the partitioned daxes have to reside. * * @return handle to the writer of pdax file. */ protected PDAXWriter getHandletoPDAXWriter( String daxFile, String name, String directory ){ String pdaxPath; //get the name of dax file sans the path String daxName = new java.io.File( daxFile ).getName(); //construct the basename of the pdax file pdaxPath = (daxName == null)? "partition": ((daxName.indexOf('.') > 0)? daxName.substring(0,daxName.indexOf('.')): daxName) ; //now the complete path pdaxPath = directory + File.separator + pdaxPath + ".pdax"; //System.out.println("Name is " + nameOfPDAX); mPDAX = pdaxPath; return new PDAXWriter( name, pdaxPath ); } /** * Returns the xml description of a relation between 2 partitions. * * @param childID the ID of the child. * @param parentID the ID of the parent. * * @return the XML description of child parent relation. */ protected String partitionRelation2XML( String childID , String parentID ){ StringBuffer sb = new StringBuffer(); sb.append("\n\t"); sb.append("\n\t\t"); sb.append("\n\t"); return sb.toString(); } /** * Returns the xml description of a relation between 2 partitions. * * @param childID the ID of the child * @param parentIDs List of parent IDs. * * @return the XML description of child parent relations. */ protected String partitionRelation2XML( String childID , List parentIDs ){ StringBuffer sb = new StringBuffer(); sb.append("\n\t"); for( Iterator it = parentIDs.iterator(); it.hasNext(); ){ sb.append("\n\t\t"); } sb.append("\n\t"); return sb.toString(); } /** * Returns the xml description of a relation between 2 partitions. * * @param childID the ID of the child * @param parentIDs Set of parent IDs. * * @return the XML description of child parent relations. */ protected String partitionRelation2XML( String childID , Set parentIDs ){ StringBuffer sb = new StringBuffer(); sb.append("\n\t"); for ( Iterator it = parentIDs.iterator(); it.hasNext(); ){ sb.append("\n\t\t"); } sb.append("\n\t"); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/Partitioner.java0000644000175000017500000000714611757531137027005 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.util.Map; /** * The abstract class that lays out the api to do the partitioning of the dax * into smaller daxes. It defines additional functions to get and set the name * of the partitions etc. * * @author Karan Vahi * @version $Revision: 2576 $ */ public abstract class Partitioner { /** * The package name where the implementing classes of this interface reside. */ public static final String PACKAGE_NAME = "org.griphyn.cPlanner.partitioner"; /** * The version number associated with this API of Code Generator. */ public static final String VERSION = "1.2"; /** * The root node of the graph from where to start the BFS. */ protected GraphNode mRoot; /** * The map containing all the graph nodes. The key to the map are the logical * id's of the jobs as identified in the dax and the values are the * corresponding Graph Node objects. */ protected Map mGraph; /** * The handle to the internal logging object. */ protected LogManager mLogger; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The overloaded constructor. * * @param root the dummy root node of the graph. * @param graph the map containing all the nodes of the graph keyed by * the logical id of the nodes. * @param properties the properties passed out to the planner. */ public Partitioner(GraphNode root, Map graph, PegasusProperties properties) { mRoot = root; mGraph = graph; mLogger = LogManagerFactory.loadSingletonInstance( properties ); mProps = properties; //set a default name to the partition dax //mPDAXWriter = null; } /** * The main function that ends up traversing the graph structure corrsponding * to the dax and creates the smaller dax files(one dax file per partition) * and the .pdax file that illustrates the partition graph. It is recommended * that the implementing classes use the already initialized handles to the * DAXWriter and PDAXWriter interfaces to write out the xml files. The * advantage of using these preinitialized handles is that they already * are correctly configured for the directories where Pegasus expects the * submit files and dax files to reside. * * * @param c the callback object that the partitioner calls out to. */ public abstract void determinePartitions( Callback c ); /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public abstract String description(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/graph/0000755000175000017500000000000011757531667024743 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/graph/MapGraph.java0000644000175000017500000003513211757531137027301 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner.graph; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.Data; import edu.isi.pegasus.common.logging.LogManager; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.HashMap; /** * An implementation of the Graph that is backed by a Map. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2679 $ */ public class MapGraph implements Graph{ /** * The map indexed by the id of the GraphNode, used for storing * the nodes of the Graph. The value for each key is the corresponding * GraphNode of the class. * */ protected Map mStore; /** * The handle to the logging manager. */ private LogManager mLogger; /** * The default constructor. */ public MapGraph(){ mStore = new HashMap(); mLogger = LogManagerFactory.loadSingletonInstance(); } /** * Adds a node to the Graph. It overwrites an already existing node with the * same ID. * * @param node the node to be added to the Graph. */ public void addNode( GraphNode node ){ mStore.put( node.getID(), node ); } /** * Returns the node matching the id passed. * * @param identifier the id of the node. * * @return the node matching the ID else null. */ public GraphNode getNode( String identifier ){ Object obj = get ( identifier ); return ( obj == null ) ? null : (GraphNode)obj; } /** * Adds a single root node to the Graph. All the exisitng roots of the * Graph become children of the root. * * @param root the GraphNode to be added as a root. * * @throws RuntimeException if a node with the same id already exists. */ public void addRoot( GraphNode root ){ //sanity check if( mStore.containsKey( root.getID() ) ){ throw new RuntimeException( "Node with ID already exists:" + root.getID() ); } List existingRoots = getRoots(); root.setChildren( existingRoots ); //for existing root nodes, add a parent as the new Root for( Iterator it = existingRoots.iterator(); it.hasNext(); ){ GraphNode existing = ( GraphNode ) it.next(); existing.addParent( root ); } //add the new root into the graph addNode( root ); } /** * Removes a node from the Graph. * * @param identifier the id of the node to be removed. * * @return boolean indicating whether the node was removed or not. */ public boolean remove( String identifier ){ Object obj = get( identifier ); if ( obj == null ){ //node does not exist only. return false; } GraphNode removalNode = ( GraphNode )obj; // the parents of the node now become parents of the children List parents = removalNode.getParents(); List children = removalNode.getChildren(); for( Iterator pIt = parents.iterator(); pIt.hasNext(); ){ GraphNode parent = (GraphNode)pIt.next(); //for the parent the removal node is no longer a child parent.removeChild( removalNode ); //for each child make the parent it's parent instead of removed node for ( Iterator cIt = children.iterator(); cIt.hasNext(); ){ GraphNode child = (GraphNode)cIt.next(); child.removeParent( removalNode ); child.addParent( parent ); parent.addChild( child ); } } //we have the correct linkages now //remove the node from the store. mStore.remove( identifier ); return true; } /** * Returns the root nodes of the Graph. * * @return a list containing GraphNode corressponding to the * root nodes. */ public List getRoots(){ List rootNodes = new LinkedList(); for( Iterator it = mStore.entrySet().iterator(); it.hasNext(); ){ GraphNode gn = (GraphNode)( (Map.Entry)it.next()).getValue(); if(gn.getParents() == null || gn.getParents().isEmpty()){ rootNodes.add(gn); } } return rootNodes; //Not Generating a dummy node //add a dummy node that is a root to all these nodes. // String rootId = this.DUMMY_NODE_ID; // mRoot = new GraphNode(rootId,rootId); // mRoot.setChildren(rootNodes); // put(rootId,mRoot); //System.out.println(dummyNode); } /** * Returns the leaf nodes of the Graph. * * @return a list containing GraphNode corressponding to the * leaf nodes. */ public List getLeaves(){ List leaves = new LinkedList(); for( Iterator it = mStore.entrySet().iterator(); it.hasNext(); ){ GraphNode gn = (GraphNode)( (Map.Entry)it.next()).getValue(); if( gn.getChildren() == null || gn.getChildren().isEmpty() ){ leaves.add(gn); } } return leaves; } /** * Adds an edge between two already existing nodes in the graph. * * @param parent the parent node ID. * @param child the child node ID. */ public void addEdge( String parent, String child ){ //sanity check if( parent.equals( child )){ throw new IllegalArgumentException( "Invalid Edge Specification. An Edge specified from a node to itself for " + parent ); } GraphNode childNode = (GraphNode)getNode( child ); GraphNode parentNode = (GraphNode)getNode( parent ); String notExist = ( childNode == null )? child : ( parentNode == null ) ? parent : null; if ( notExist != null ) { /* should be replaced by Graph Exception */ throw new RuntimeException( "The node with identifier doesnt exist " + notExist ); } childNode.addParent( parentNode ); parentNode.addChild( childNode); } /** * A convenience method that allows for bulk addition of edges between * already existing nodes in the graph. * * @param child the child node ID * @param parents list of parent identifiers as String. */ public void addEdges( String child, List parents ){ //sanity check if( parents.contains( child )){ throw new IllegalArgumentException( "Invalid Edge Specification. Parents " + parents + " include the child " + child ); } GraphNode childNode = (GraphNode)getNode( child ); if( childNode == null ) { /* should be replaced by Graph Exception */ throw new RuntimeException( "The node with identifier doesnt exist " + child ); } String parentId; List parentList = new LinkedList(); //construct the references to the parent nodes for( Iterator it = parents.iterator(); it.hasNext(); ){ parentId = ( String )it.next(); GraphNode parentNode = (GraphNode)get( parentId ); if( parentNode == null ) { /* should be replaced by Graph Exception */ throw new RuntimeException( "The node with identifier doesnt exist " + parentId ); } parentList.add( parentNode ); //add the child to the parent's child list parentNode.addChild( childNode ); } childNode.setParents( parentList ); } /** * Returns the number of nodes in the graph. * * @return the number of nodes */ public int size(){ return mStore.values().size(); } /** * Returns an iterator for the nodes in the Graph. * * @return Iterator */ public Iterator nodeIterator(){ return mStore.values().iterator(); } /** * Returns an iterator that traverses through the graph using a graph * traversal algorithm. At any one time, only one iterator can * iterate through the graph. * * @return Iterator through the nodes of the graph. */ public Iterator iterator(){ return new MapGraphIterator(); } /** * Returns an iterator for the graph that traverses in topological sort * order. * * @return Iterator through the nodes of the graph. */ public Iterator topologicalSortIterator(){ return new TopologicalSortIterator( this ); } /** * The textual representation of the graph node. * * @return textual description. */ public String toString() { String newLine = System.getProperty( "line.separator", "\r\n" ); String indent = "\t"; StringBuffer sb = new StringBuffer( 32 ); GraphNode node; for( Iterator it = nodeIterator(); it.hasNext(); ){ node = ( GraphNode )it.next(); sb.append( newLine ).append( indent ).append( "Job ->" ).append( node.getID() ); //write out the node children sb.append(" Children's {"); for( Iterator it1 = node.getChildren().iterator(); it1.hasNext(); ){ sb.append( ((GraphNode)it1.next()).getID() ).append(','); } sb.append("}"); //write out the node's parents sb.append(" Parents {"); for( Iterator it1 = node.getParents().iterator(); it1.hasNext(); ){ sb.append( ((GraphNode)it1.next()).getID() ).append(','); } sb.append("}"); } return sb.toString(); } /** * Returns a boolean if there are no nodes in the graph. * * @return boolean */ public boolean isEmpty(){ return this.mStore.isEmpty(); } /** * Returns a copy of the object. * * @return clone of the object. */ public Object clone(){ return new java.lang.CloneNotSupportedException( "Clone() not implemented in GraphNode"); } /** * It returns the value associated with the key in the map. * * @param key the key to the entry in the map. */ public Object get( Object key ){ return mStore.get(key); } /** * An inner iterator class that traverses through the Graph. * The traversal of the graph is a modified BFS. A node is added to * the queue only when all it's parents have been added to the queue. */ protected class MapGraphIterator implements Iterator{ /** * The first in first out queue, that manages the set of gray vertices in a * breadth first search. */ private LinkedList mQueue; /** * The current depth of the nodes that are being traversed in the BFS. */ private int mCurrentDepth; /** * A temporary list that stores all the nodes on a particular level. */ private List mLevelList; /** * The default constructor. */ public MapGraphIterator(){ mQueue = new LinkedList(); mLevelList = new LinkedList(); mCurrentDepth = -1; //sanity intialization of all nodes depth for( Iterator it = nodeIterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); node.setDepth( mCurrentDepth ); } //intialize all the root nodes depth to 0 //and put them in the queue mCurrentDepth = 0; for( Iterator it = getRoots().iterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); node.setDepth( mCurrentDepth ); mQueue.add( node ); } } /** * Always returns false, as an empty iterator. * * @return true if there are still nodes in the queue. */ public boolean hasNext(){ return !mQueue.isEmpty(); } /** * Returns the next object in the traversal order. * * @return null */ public Object next(){ GraphNode node = (GraphNode)mQueue.getFirst(); int depth = node.getDepth(); if( mCurrentDepth < depth ){ if( mCurrentDepth > 0 ){ //we are done with one level! //that is when the callback should happen } //a new level starts mCurrentDepth++; mLevelList.clear(); } //mLogger.log( "Adding to level " + mCurrentDepth + " " + node.getID(), // LogManager.DEBUG_MESSAGE_LEVEL); mLevelList.add( node ); node.setColor( GraphNode.BLACK_COLOR ); //add the children to the list only if all the parents //of the child nodes have been traversed. for( Iterator it = node.getChildren().iterator(); it.hasNext(); ){ GraphNode child = (GraphNode)it.next(); if(!child.isColor( GraphNode.GRAY_COLOR ) && child.parentsColored( GraphNode.BLACK_COLOR )){ //mLogger.log( "Adding to queue " + child.getID(), // LogManager.DEBUG_MESSAGE_LEVEL ); child.setDepth( depth + 1 ); child.setColor( GraphNode.GRAY_COLOR ); mQueue.addLast( child ); } } node = (GraphNode)mQueue.removeFirst(); //mLogger.log( "Removed " + node.getID(), // LogManager.DEBUG_MESSAGE_LEVEL); return node; } /** * Method is not supported. */ public void remove(){ throw new java.lang.UnsupportedOperationException( "Method remove() not supported" ); } }//end of internal iterator class } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/graph/Bag.java0000644000175000017500000000363411757531137026275 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner.graph; /** * An interface to define a BAG of objects. The bag can be then associated * with other data structures, like Graph Nodes. * * @author Karan Vahi * @version $Revision: 2576 $ */ public interface Bag { /** * Returns an objects corresponding to the key passed. * * @param key the key corresponding to which the objects need to be returned. * * @return the object that is found corresponding to the key or null. */ public Object get(Object key); /** * Adds an object to the underlying bag corresponding to a particular key. * * @param key the key with which the value has to be associated. * @param value the value to be associated with the key. * * @return boolean indicating if insertion was successful. */ public boolean add(Object key, Object value); /** * Returns true if the namespace contains a mapping for the specified key. * More formally, returns true if and only if this map contains at a mapping * for a key k such that (key==null ? k==null : key.equals(k)). * (There can be at most one such mapping.) * * @param key The key that you want to search for in the bag. */ public boolean containsKey(Object key); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/graph/Adapter.java0000644000175000017500000000376611757531137027172 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner.graph; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PCRelation; import java.util.Iterator; /** * A Adapter class that converts the ADag to Graph and * vice a versa. * * * @author Karan Vahi * @version $Revision: 2590 $ */ public class Adapter { /** * Converts the ADag to Graph instance. * * @param dag the ADag object. * * @return it's representation as a Graph instance. */ public static Graph convert( ADag dag ){ Graph graph = new MapGraph(); //iterate through the list of jobs and populate the nodes in the graph for( Iterator it = dag.vJobSubInfos.iterator(); it.hasNext(); ){ //pass the jobs to the callback //populate the job as a node in the graph Job job = ( Job )it.next(); GraphNode node = new GraphNode( job.getID(), job ); graph.addNode( node ); } //add the edges between the nodes in the graph for( Iterator it = dag.dagInfo.relations.iterator(); it.hasNext(); ){ PCRelation rel = (PCRelation) it.next(); graph.addEdge( rel.getParent(), rel.getChild() ); } return graph; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/graph/Graph.java0000644000175000017500000000776011757531137026651 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner.graph; import java.util.Iterator; import java.util.List; /** * The interface for the Graph Class. It implements the GraphNodeContent interface. * This allows us to associate Graphs with the nodes in a Graph i.e. graph of * graphs. * * @author Karan Vahi * @version $Revision: 2576 $ */ public interface Graph extends GraphNodeContent { //allows us to have graphs as nodes of a graph /** * The version number associated with this Graph API. */ public static final String VERSION = "1.3"; /** * Adds a node to the Graph. It overwrites an already existing node with the * same ID. * * @param node the node to be added to the Graph. */ public void addNode( GraphNode node ); /** * Adds an edge between two already existing nodes in the graph. * * @param parent the parent node ID. * @param child the child node ID. */ public void addEdge( String parent, String child ); /** * A convenience method that allows for bulk addition of edges between * already existing nodes in the graph. * * @param child the child node ID * @param parents list of parent identifiers as String. */ public void addEdges( String child, List parents ); /** * Returns the node matching the id passed. * * @param identifier the id of the node. * * @return the node matching the ID else null. */ public GraphNode getNode( String identifier ); /** * Adds a single root node to the Graph. All the exisitng roots of the * Graph become children of the root. * * @param root the GraphNode to be added as a root. * * @throws RuntimeException if a node with the same id already exists. */ public void addRoot( GraphNode root ); /** * Removes a node from the Graph. * * @param identifier the id of the node to be removed. * * @return boolean indicating whether the node was removed or not. */ public boolean remove( String identifier ); /** * Returns an iterator for the nodes in the Graph. These iterators are * fail safe. * * @return Iterator */ public Iterator nodeIterator(); /** * Returns an iterator that traverses through the graph using a graph * traversal algorithm. * * @return Iterator through the nodes of the graph. */ public Iterator iterator(); /** * Returns an iterator for the graph that traverses in topological sort * order. * * @return Iterator through the nodes of the graph. */ public Iterator topologicalSortIterator(); /** * Returns the number of nodes in the graph. */ public int size(); /** * Returns the root nodes of the Graph. * * @return a list containing GraphNode corressponding to the * root nodes. */ public List getRoots(); /** * Returns the leaf nodes of the Graph. * * @return a list containing GraphNode corressponding to the * leaf nodes. */ public List getLeaves(); /** * Returns a boolean if there are no nodes in the graph. * * @return boolean */ public boolean isEmpty(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/graph/GraphNodeContent.java0000644000175000017500000000167611757531137031012 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner.graph; /** * This inteface defines a common base for all the classes that can reside in * a GraphNode object. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2576 $ */ public interface GraphNodeContent { //an empty interface for grouping purposes } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/graph/LabelBag.java0000644000175000017500000000673111757531137027236 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner.graph; /** * A bag implementation that just holds a particular value for the label key. * This bag implements just contains one object, and a null value is associated * by default with the label. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class LabelBag implements Bag { /** * The default key that is associated with label. */ public static String LABEL_KEY = "label"; /** * The key that designates the partition to which a node belongs to. */ public static final String PARTITION_KEY = "partition"; /** * The value for the Label. */ private Object mValue; /** * The value for the partition key. */ private Object mPartitionID; /** * Sets the label key that is to be associated with the bag. */ public static void setLabelKey(String key){ LABEL_KEY = key; } /** * The default constructor. */ public LabelBag(){ mValue = null; mPartitionID = null; } /** * Returns an objects corresponding to the key passed. * * @param key the key corresponding to which the objects need to be returned. * * @return the object that is found corresponding to the key or null. */ public Object get(Object key){ return (key.equals(this.LABEL_KEY)?mValue: key.equals(this.PARTITION_KEY)? mPartitionID:null); } /** * Adds an object to the underlying bag corresponding to a particular key. * * @param key the key with which the value has to be associated. * @param value the value to be associated with the key. */ public boolean add(Object key, Object value){ boolean result = false; if(key.equals(LABEL_KEY)){ mValue = value; result = true; } else if(key.equals(PARTITION_KEY)){ mPartitionID = value; result = true; } return result; } /** * Returns true if the namespace contains a mapping for the specified key. * More formally, returns true if and only if this map contains at a mapping * for a key k such that (key==null ? k==null : key.equals(k)). * (There can be at most one such mapping.) * * @param key The key that you want to search for in the bag. */ public boolean containsKey(Object key){ return key.equals(this.LABEL_KEY) || key.equals(this.PARTITION_KEY); } /** * Returns a textual description of the Bag. * * @return String */ public String toString(){ StringBuffer sb = new StringBuffer(32); sb.append('{').append(mValue).append(',').append(mPartitionID).append('}'); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/graph/GraphNode.java0000644000175000017500000002117411757531137027452 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner.graph; import edu.isi.pegasus.planner.classes.Data; import java.util.Iterator; import java.util.List; /** * Data class that allows us to construct information about the nodes * in the abstract graph. Contains for each node the references to it's * parents and children. The direction of the edges is usually following the * children from a node. Parents are kept to facilitate bottom up traversals. * * @author Karan Vahi * @version $Revision: 3366 $ */ public class GraphNode extends Data { //the constants for the color of the nodes public static final int WHITE_COLOR = 0; public static final int GRAY_COLOR = 1; public static final int BLACK_COLOR = 2; /** * The logical id of the job as identified in the dax. */ private String mLogicalID; /** * The logical name of the node as identified in the dax. */ private String mLogicalName; /** * The depth of the node from the root or any arbitary node. */ private int mDepth; /** * The color the node is colored. */ private int mColor; /** * The list of parents of the job/node in the abstract graph. Each element * of the list is a GraphNode object. */ private List mParents; /** * The list of children of the job/node in the abstract graph. Each element * of the list is a GraphNode object. */ private List mChildren; /** * The content associated with this node. */ private GraphNodeContent mContent; /** * A Bag of objects that maybe associated with the node. * * @see Bag */ private Bag mBag; /** * The default constructor. */ public GraphNode() { mLogicalID = new String(); mParents = new java.util.LinkedList(); mChildren = new java.util.LinkedList(); mDepth = -1; mLogicalName = new String(); mColor = this.WHITE_COLOR; mBag = null; } /** * The overloaded constructor. * * @param id the id of the node in the graph. * @param content the content to be associated with the node. */ public GraphNode( String id, GraphNodeContent content ){ this(); mLogicalID = id; mContent = content; } /** * The overloaded constructor. * * @param id the logical id of the node. * @param name the name of the node. */ public GraphNode(String id, String name) { mLogicalID = id; mParents = new java.util.LinkedList(); mChildren = new java.util.LinkedList(); mDepth = -1; mLogicalName = name; mColor = this.WHITE_COLOR; } /** * Sets the bag of objects associated with the node. Overwrite the previous * bag if existing. * * @param bag the Bag to be associated with the node. */ public void setBag( Bag bag ) { mBag = bag; } /** * Sets the content associated with the node. Overwrites the previous * content if existing. * * @param content the GraphNodeContent to be associated with the node. */ public void setContent( GraphNodeContent content ) { mContent = content; } /** * It adds the parents to the node. It ends up overwriting all the existing * parents if some already exist. */ public void setParents( List parents ) { mParents = parents; } /** * It sets the children to the node. It ends up overwriting all the existing * parents if some already exist. */ public void setChildren( List children ) { mChildren = children; } /** * Sets the depth associated with the node. */ public void setDepth( int depth ) { mDepth = depth; } /** * Returns the bag of objects associated with the node. * * @return the bag or null if no bag associated */ public Bag getBag(){ return mBag; } /** * Returns the content associated with the node. * * @return the content or null if no content associated */ public GraphNodeContent getContent(){ return mContent; } /** * Returns a list of GraphNode objects that are parents of the node. * * @return list of GraphNode objects. */ public List getParents() { return mParents; } /** * Returns a list of GraphNode objects that are children of the * node. * * @return list of GraphNode objects. */ public List getChildren() { return mChildren; } /** * Adds a child to end of the child list. * * @param child adds a child to the node. */ public void addChild( GraphNode child ) { mChildren.add( child ); } /** * Adds a parent to end of the parent list. * * @param parent adds a parent to the node. */ public void addParent( GraphNode parent ) { mParents.add( parent ); } /** * Removes a child linkage to the node. * * @param child child to be removed. */ public void removeChild( GraphNode child ){ mChildren.remove( child ); } /** * Removes a parent linkage to the node. * * @param parent parent to be removed. */ public void removeParent( GraphNode parent ){ mParents.remove( parent ); } /** * Returns the logical id of the graph node. */ public String getID() { return mLogicalID; } /** * Returns the logical name of the graph node. */ public String getName() { return mLogicalName; } /** * Returns the depth of the node in the graph. */ public int getDepth() { return mDepth; } /** * Returns if the color of the node is as specified. * * @param color color that node should be. */ public boolean isColor( int color ){ return (mColor == color)?true:false; } /** * Sets the color of the node to the color specified * * @param color color that node should be. */ public void setColor( int color ){ mColor = color; } /** * Returns if all the parents of that node have the color that is specified. * * @param color the color of the node. * * @return true if there are no parents or all parents are of the color. * false in all other cases. */ public boolean parentsColored( int color ) { boolean colored = true; GraphNode par; if (mParents == null) { return colored; } Iterator it = mParents.iterator(); while (it.hasNext() && colored) { par = (GraphNode) it.next(); colored = par.isColor(color); } return colored; } /** * The textual representation of the graph node. * * @return textual description. */ public String toString() { StringBuffer sb = new StringBuffer(); Iterator it; sb.append( "ID->" ).append(mLogicalID).append( " name->" ). append( mLogicalName ).append( " parents->{" ); if (mParents != null) { it = mParents.iterator(); while (it.hasNext()) { sb.append( ( (GraphNode) it.next()).getID()).append(','); } } sb.append( "} children->{" ); it = mChildren.iterator(); while (it.hasNext()) { sb.append( ( (GraphNode) it.next()).getID()).append(','); } sb.append( "}" ); sb.append( " Content-{" ).append( getContent() ).append( "}" ); sb.append( " Bag-{" ).append( getBag() ).append( "}" ); return sb.toString(); } /** * Returns a copy of the object. */ public Object clone(){ return new java.lang.CloneNotSupportedException( "Clone() not implemented in GraphNode"); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/graph/TopologicalSortIterator.java0000644000175000017500000001145311757531137032440 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner.graph; import java.util.List; import java.util.LinkedList; import java.util.Map; import java.util.HashMap; import java.util.Iterator; /** * Does a topological sort on the Partition. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class TopologicalSortIterator implements Iterator { /** * The partition that has to be sorted. */ private Graph mGraph; /** * An array that contains the number of incoming edges to a node. */ private int[] mInDegree; /** * A Map that returns the index into mInDegree map for a particular node * in graph. Maps a ID of the node to an int value, which is the index to * to the array containing the in degree for each node. * * @see #mInDegree */ private Map mIndexMap; /** * The internal list of nodes that contains the nodes to be traversed. */ private List mQueue ; /** * The number of nodes in the graph. */ private int mOrder; /** * The overloaded constructor. * * @param p the graph that has to be sorted. */ public TopologicalSortIterator( Graph graph ){ mGraph = graph; initialize(); mOrder = mGraph.size(); mQueue = new LinkedList(); //add all the root nodes to queue first for( Iterator it = this.mGraph.getRoots().iterator(); it.hasNext(); ){ mQueue.add( (GraphNode)it.next() ); } } /** * Initializes the inDegree for each node of the partition. * */ public void initialize(){ //build up a inDegree map for each node. int order = mGraph.size(); mInDegree = new int[ order ]; mIndexMap = new HashMap( order ); int index = 0; //each of the root nodes have in degree of 0 for ( Iterator it = mGraph.getRoots().iterator(); it.hasNext(); ){ GraphNode root = (GraphNode)it.next(); mIndexMap.put( root.getID(), new Integer( index ) ); mInDegree[ index++ ] = 0; } //determine inDegree for other nodes //in degree for a node is the number of incoming edges/parents of a node for( Iterator it = mGraph.nodeIterator(); it.hasNext(); ){ GraphNode node = it.next(); if( node.getParents().isEmpty() ){ //node is a root. indegree already assigned continue; } mIndexMap.put( node.getID(), new Integer( index) ); mInDegree[ index++ ] = node.getParents().size(); } //sanity check if( index != order){ throw new RuntimeException( "Index does not match order of partition " ); } } /** * Returns whether there are more nodes to be traversed in the graph or not. * * @return boolean */ public boolean hasNext() { return !mQueue.isEmpty(); } /** * Returns the next node to be traversed * * @return */ public Object next() { GraphNode node = mQueue.remove( 0 ); String nodeID = node.getID(); //traverse all the children of the node // GraphNode n = null; for( Iterator it = node.getChildren().iterator(); it.hasNext() ;){ GraphNode child = it.next(); String childID = child.getID(); //remove the edge from node to child by decrementing inDegree int index = index(childID); mInDegree[ index ] -= 1; if( mInDegree[ index ] == 0 ){ //add the node to the queue mQueue.add( child ); } } return node; } /** * Removes a node from the graph. Operation not supported as yet. */ public void remove() { throw new UnsupportedOperationException("Not supported yet."); } /** * Returns the index of a particular node. The index is used as an index into * arrays. * * @param id the id of the node. * * @return the index */ private int index( String id ){ return ((Integer)mIndexMap.get( id )).intValue(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/PartitionerFactoryException.java0000644000175000017500000000652311757531137032212 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Partitioner implementations. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class PartitionerFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Partitioner"; /** * Constructs a PartitionerFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public PartitionerFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a PartitionerFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public PartitionerFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a PartitionerFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public PartitionerFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a PartitionerFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public PartitionerFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/PartitionAndPlan.java0000644000175000017500000004643111757531137027714 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.parser.DAXParserFactory; import edu.isi.pegasus.planner.parser.dax.Callback; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import java.io.BufferedReader; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.common.RunDirectoryFilenameFilter; import edu.isi.pegasus.planner.parser.dax.DAXParser2; import edu.isi.pegasus.planner.client.CPlanner; import edu.isi.pegasus.planner.client.PartitionDAX; import edu.isi.pegasus.planner.parser.Parser; import java.util.Map; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.text.NumberFormat; import java.text.DecimalFormat; import java.util.Collection; /** * The class that triggers the partition and plan structure in pegasus. * * * @author Karan Vahi * @version $Revision$ */ public class PartitionAndPlan{ /** * The username of the user running the program. */ private String mUser; /** * The number formatter to format the run submit dir entries. */ private NumberFormat mNumFormatter; /** * The object containing all the options passed to the Concrete Planner. */ private PlannerOptions mPegasusPlanOptions; /** * The handle to Pegasus Properties. */ private PegasusProperties mProps; /** * Handle to the logging manager. */ private LogManager mLogger; /** * Bag of Pegasus objects */ private PegasusBag mBag; /** * The default constructor. */ public PartitionAndPlan() { mNumFormatter = new DecimalFormat( "0000" ); } /** * Initializes the class. * * @param bag the bag of objects required for initialization */ public void initialize( PegasusBag bag ){ mBag = bag; mProps = bag.getPegasusProperties(); mLogger = bag.getLogger(); this.mPegasusPlanOptions = bag.getPlannerOptions(); mUser = mProps.getProperty( "user.name" ) ; if ( mUser == null ){ mUser = "user"; } //hardcoded options for time being. mPegasusPlanOptions.setPartitioningType( "Whole" ); } /** * This function is passed command line arguments. In this function you * generate the valid options and parse the options specified at run time. * * @param arguments the arguments passed at runtime * * @return the Collection of File objects for the files written * out. */ public Collection doPartitionAndPlan( String arguments ){ String [] args = arguments.split( " " ); mLogger.log( "Arguments passed to Partition and Plan are " + arguments, LogManager.DEBUG_MESSAGE_LEVEL ); //convert the args to pegasus-plan options PlannerOptions options = new CPlanner().parseCommandLineArguments( args, false ); String submit = options.getSubmitDirectory(); mLogger.log( "Submit directory in dax specified is " + submit, LogManager.DEBUG_MESSAGE_LEVEL ); if( submit == null || !submit.startsWith( File.separator ) ){ //then set the submit directory relative to the parent workflow basedir String innerBase = mPegasusPlanOptions.getBaseSubmitDirectory(); String innerRelative = mPegasusPlanOptions.getRelativeDirectory(); innerRelative = ( innerRelative == null && mPegasusPlanOptions.partOfDeferredRun() )? mPegasusPlanOptions.getRandomDir(): //the random dir is the relative submit dir? innerRelative; //FIX for JIRA bug 65 to ensure innerRelative is resolved correctly //in case of innerRelative being ./ . We dont want inner relative //to compute to .// Instead we want it to compute to ././ //innerRelative += File.separator + submit ; innerRelative = new File( innerRelative, submit ).getPath(); //options.setSubmitDirectory( mPegasusPlanOptions.getSubmitDirectory(), submit ); options.setSubmitDirectory( innerBase, innerRelative ); mLogger.log( "Base Submit directory for inner workflow set to " + innerBase, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Relative Submit Directory for inner workflow set to " + innerRelative, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Submit directory for inner workflow set to " + options.getSubmitDirectory(), LogManager.DEBUG_MESSAGE_LEVEL ); } if( options.getExecutionSites().isEmpty() ){ //for JIRA feature request PM-64 //no sites are specified. use the execution sites for //the parent workflow mLogger.log( "Setting list of execution sites to the same as outer workflow", LogManager.DEBUG_MESSAGE_LEVEL ); options.getExecutionSites().addAll( mPegasusPlanOptions.getExecutionSites() ); } options.setPartitioningType( "Whole" ); //do some sanitization of the path to the dax file. //if it is a relative path, then ??? // options.setSanitizePath( true ); return this.doPartitionAndPlan( mProps, options ); } /** * Partitions and plans the workflow. First step of merging DAGMan and * Condor * * @param properties the properties passed to the planner. * @param options the options passed to the planner. * * @return the Collection of File objects for the files written * out. */ public Collection doPartitionAndPlan( PegasusProperties properties, PlannerOptions options ) { //we first need to get the label of DAX Callback cb = DAXParserFactory.loadDAXParserCallback( properties, options.getDAX(), "DAX2Metadata" ); try{ // DAXParser2 daxParser = new DAXParser2(options.getDAX(), mBag, cb); Parser p = (Parser)DAXParserFactory.loadDAXParser( mBag, cb , options.getDAX()); p.startParser( options.getDAX() ); } catch( RuntimeException e ){ //check explicity for file not found exception if( e.getCause() != null && e.getCause() instanceof java.io.IOException){ //rethrow throw e; } //ignore only if the parsing is completed mLogger.log( e.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL ); } Map metadata = ( Map ) cb.getConstructedObject(); String label = (String) metadata.get( "name" ); String baseDir = options.getBaseSubmitDirectory(); String relativeDir = null; //construct the submit directory structure try{ relativeDir = (options.getRelativeDirectory() == null) ? //create our own relative dir createSubmitDirectory(label, baseDir, mUser, options.getVOGroup(), properties.useTimestampForDirectoryStructure()) : options.getRelativeDirectory(); } catch( IOException ioe ){ String error = "Unable to write to directory" ; throw new RuntimeException( error + options.getSubmitDirectory() , ioe ); } options.setSubmitDirectory( baseDir, relativeDir ); mLogger.log( "Submit Directory for workflow is " + options.getSubmitDirectory() , LogManager.DEBUG_MESSAGE_LEVEL ); //now let us run partitiondax mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_PARTITION, LoggingKeys.DAX_ID, label ); PartitionDAX partitionDAX = new PartitionDAX(); File dir = new File( options.getSubmitDirectory(), "dax" ); String pdax = null; //bypass partitioning for time being //as the dax is already created boolean partitionWorkflow = false; if( partitionWorkflow ){ pdax = partitionDAX.partitionDAX( properties, options.getDAX(), dir.getAbsolutePath(), options.getPartitioningType() ); } else{ try { //create a shallow pdax file and a symbolic link //to the dax in the dax directory referred to by the dir variable String dax = options.getDAX(); String daxBasename = new File(dax).getName(); WriterCallback callback = new WriterCallback(); //create the dir if it does not exist sanityCheck( dir ); callback.initialize(properties, dax, daxBasename, dir.getAbsolutePath()); pdax = callback.getPDAX(); PDAXWriter writer = callback.getHandletoPDAXWriter(dax, label, dir.getAbsolutePath()); writer.writeHeader(); //create an empty Partition Partition p = new Partition(); String id = "1"; String partitionLabel = "partition_" + label; p.setName(partitionLabel); p.setIndex( Integer.parseInt( id ) ); p.setID( id ); p.constructPartition(); //write out the partition and close writer.write(p); writer.close(); //we have the partition written out //now create a symlink to the DAX file // partition_blackdiamond_1.dax StringBuffer destinationDAX = new StringBuffer(); destinationDAX.append( dir ).append( File.separator ). append( "partition_" ).append( label ). append( "_" ).append( id ).append( ".dax" ); if ( !createSymbolicLink( dax , destinationDAX.toString() ) ){ throw new RuntimeException( "Unable to create symbolic link between " + dax + " and " + destinationDAX.toString() ); } } catch ( IOException ioe ) { String error = "Unable to generate the pdax file" ; throw new RuntimeException( error + options.getSubmitDirectory() , ioe ); } } mLogger.log( "PDAX file generated is " + pdax , LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.logEventCompletion(); //now run pegasus-plan with pdax option CPlanner pegasusPlan = new CPlanner(); options.setDAX( null ); options.setPDAX( pdax ); options.setPartitioningType( null ); return pegasusPlan.executeCommand( options ); //we still need to create the condor submit file for submitting //the outer level dag created by pap } /** * Creates the submit directory for the workflow. This is not thread safe. * * @param dag the workflow being worked upon. * @param dir the base directory specified by the user. * @param user the username of the user. * @param vogroup the vogroup to which the user belongs to. * @param timestampBased boolean indicating whether to have a timestamp based dir or not * * @return the directory name created relative to the base directory passed * as input. * * @throws IOException in case of unable to create submit directory. */ protected String createSubmitDirectory( ADag dag, String dir, String user, String vogroup, boolean timestampBased ) throws IOException { return createSubmitDirectory( dag.getLabel(), dir, user, vogroup, timestampBased ); } /** * Creates the submit directory for the workflow. This is not thread safe. * * @param label the label of the workflow * @param dir the base directory specified by the user. * @param user the username of the user. * @param vogroup the vogroup to which the user belongs to. * @param timestampBased boolean indicating whether to have a timestamp based dir or not * * @return the directory name created relative to the base directory passed * as input. * * @throws IOException in case of unable to create submit directory. */ protected String createSubmitDirectory( String label, String dir, String user, String vogroup, boolean timestampBased ) throws IOException { File base = new File( dir ); StringBuffer result = new StringBuffer(); //do a sanity check on the base sanityCheck( base ); //add the user name if possible base = new File( base, user ); result.append( user ).append( File.separator ); //add the vogroup base = new File( base, vogroup ); sanityCheck( base ); result.append( vogroup ).append( File.separator ); //add the label of the DAX base = new File( base, label ); sanityCheck( base ); result.append( label ).append( File.separator ); //create the directory name StringBuffer leaf = new StringBuffer(); if( timestampBased ){ leaf.append( mPegasusPlanOptions.getDateTime( mProps.useExtendedTimeStamp() ) ); } else{ //get all the files in this directory String[] files = base.list( new RunDirectoryFilenameFilter() ); //find the maximum run directory int num, max = 1; for( int i = 0; i < files.length ; i++ ){ num = Integer.parseInt( files[i].substring( RunDirectoryFilenameFilter.SUBMIT_DIRECTORY_PREFIX.length() ) ); if ( num + 1 > max ){ max = num + 1; } } //create the directory name leaf.append( RunDirectoryFilenameFilter.SUBMIT_DIRECTORY_PREFIX ).append( mNumFormatter.format( max ) ); } result.append( leaf.toString() ); base = new File( base, leaf.toString() ); mLogger.log( "Directory to be created is " + base.getAbsolutePath(), LogManager.DEBUG_MESSAGE_LEVEL ); sanityCheck( base ); return result.toString(); } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheck( File dir ) throws IOException{ if ( dir.exists() ) { // location exists if ( dir.isDirectory() ) { // ok, isa directory if ( dir.canWrite() ) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException( "Cannot write to existing directory " + dir.getPath() ); } } else { //try to get around JVM bug. JIRA PM-91 if( dir.getPath().endsWith( "." ) ){ //just try to create the parent directory if( !dir.getParentFile().mkdirs() ){ //tried everything and failed throw new IOException( "Unable to create directory " + dir.getPath() ); } return; } // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory." ); } } else { // does not exist, try to make it if ( ! dir.mkdirs() ) { throw new IOException( "Unable to create directory " + dir.getPath() ); } } } /** * This method generates a symlink between two files * * @param source the file that has to be symlinked * @param destination the destination of the symlink * * @return boolean indicating if creation of symlink was successful or not */ protected boolean createSymbolicLink( String source, String destination ) { try{ Runtime rt = Runtime.getRuntime(); String command = "ln -sf " + source + " " + destination; mLogger.log( "Creating symlink between " + source + " " + destination, LogManager.DEBUG_MESSAGE_LEVEL); Process p = rt.exec( command, null ); // set up to read subprogram output InputStream is = p.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); // set up to read subprogram error InputStream er = p.getErrorStream(); InputStreamReader err = new InputStreamReader(er); BufferedReader ebr = new BufferedReader(err); // read output from subprogram // and display it String s,se=null; while ( ((s = br.readLine()) != null) || ((se = ebr.readLine()) != null ) ) { if(s!=null){ mLogger.log(s,LogManager.DEBUG_MESSAGE_LEVEL); } else { mLogger.log(se,LogManager.ERROR_MESSAGE_LEVEL ); } } br.close(); return true; } catch(Exception ex){ mLogger.log("Unable to create symlink to the log file" , ex, LogManager.ERROR_MESSAGE_LEVEL); return false; } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/DAXWriter.java0000644000175000017500000002643111757531137026314 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.common.util.FactoryException; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; /** * The abstract class that identifies the interface for writing out a dax * corresponding to a partition. The interface stipulates that the jobs making * up the partition and relations between those jobs in the partition are * identified when invoking it. However all the job details are to be gotten * by the implementing classes by parsing the original dax. * * @author Karan Vahi * @version $Revision: 2576 $ */ public abstract class DAXWriter { /** * The prefix added to the name of the dax to identify it is a partitioned * dax. */ public static final String PARTITION_PREFIX = "partition_"; /** * The name of the package in which the writers are implemented. */ public static final String PACKAGE_NAME = "org.griphyn.cPlanner.partitioner"; /** * The dax file that is being partitioned. The dax file is the repository * for all the jobs in the partitioned daxes. */ protected String mDaxFile; /** * The directory in which the daxes corresponding to the partition are * generated. */ protected String mPDAXDirectory; /** * The name of the partition dax that are generated. */ protected String mPartitionName; /** * The handle to the logging object. */ protected LogManager mLogger; /** * The write handle to the xml file being written. */ protected PrintWriter mWriteHandle; /** * The default constructor */ protected DAXWriter(){ mDaxFile = null; mPDAXDirectory = null; mLogger = LogManagerFactory.loadSingletonInstance(); mPartitionName = null; } /** * The overloaded constructor. * * @param daxFile the path to the dax file that is being partitioned. * @param directory the directory in which the partitioned daxes are to be * generated. */ protected DAXWriter(String daxFile, String directory) { mLogger = LogManagerFactory.loadSingletonInstance(); mDaxFile = daxFile; mPDAXDirectory = directory; mPartitionName = null; } /** * It writes out a dax consisting of the jobs as specified in the partition. * * @param partition the partition object containing the relations and id's * of the jobs making up the partition. * * @return boolean true if dax successfully generated and written. * false in case of error. */ public boolean writePartitionDax( Partition partition ){ return writePartitionDax( partition, partition.getIndex() ); } /** * It writes out a dax consisting of the jobs as specified in the partition. * * @param partition the partition object containing the relations and id's * of the jobs making up the partition. * @param index the index of the partition. * * @return boolean true if dax successfully generated and written. * false in case of error. */ public abstract boolean writePartitionDax( Partition partition, int index ); /** * The ends up loading the PDAXWriter. It selects the writer as specified by * the vds.partition.parse.mode property. * * @param properties the handle to the properties visible to Pegasus. * @param daxFile the path to the dax file that is being partitioned. * @param directory the directory in which the partitioned daxes are to be * generated. */ public static DAXWriter loadInstance( PegasusProperties properties, String daxFile, String directory) { String className = properties.getPartitionParsingMode(); className = (className.equalsIgnoreCase("single"))?"SingleLook": (className.equalsIgnoreCase("multiple"))?"MultipleLook": className; return loadInstance( className, properties, daxFile, directory ); } /** * Loads the implementing PDAXWriter. The name of the class that is to be * loaded is passed and can be complete(with package name) or just the name * of the class, in which case the class is loaded from the default package. * * @param properties the handle to the properties visible to Pegasus. * @param className the name of the class with or without the package name. * @param daxFile the path to the dax file that is being partitioned. * @param directory the directory in which the partitioned daxes are to be * generated. * * @throws FactoryException that nests any error that * might occur during the instantiation of the implementation. */ public static DAXWriter loadInstance( String className, PegasusProperties properties, String daxFile, String directory) throws FactoryException{ if(className.indexOf('.') == -1){ //prepend the default package name className = PACKAGE_NAME + "." + className; } //sanity and default checks directory = (directory == null)? "." : directory; //try loading the class dynamically DAXWriter writer = null; DynamicLoader dl = new DynamicLoader( className); try { Object argList[] = new Object[2]; argList[0] = daxFile; argList[1] = directory; writer = (DAXWriter) dl.instantiate(argList); } catch ( Exception e ) { throw new FactoryException( "Instantiating DAXWriter", className, e ); } return writer; } /** * It constructs the name of the partitioned dax file that has to be written * corresponding to a partition of the dax. The dax name returned has no * prefix added to it. * * @param daxName the name attribute in the adag element of the dax. * @param index the partition number of the partition. */ public static String getPDAXFilename(String daxName, int index){ return getPDAXFilename(daxName,index,false); } /** * It constructs the name of the partitioned dax file that has to be written * corresponding to a partition of the dax. * * @param daxName the name attribute in the adag element of the dax. * @param index the partition number of the partition. * @param addPrefix whether you want to addPrefix or not. */ public static String getPDAXFilename(String daxName, int index, boolean addPrefix){ StringBuffer sb = new StringBuffer(32); //get the partition name sb.append(constructPartitionName(daxName,addPrefix)); //add the suffix sb.append("_").append(index).append(".dax"); return sb.toString(); } /** * It constructs the partition name given the daxName. It only ends up adding * the prefix if the addPrefix parameter is set. * * @param daxName the name attribute in the adag element of the dax. * @param addPrefix whether to add prefix or not. */ private static String constructPartitionName(String daxName, boolean addPrefix){ StringBuffer sb = new StringBuffer(); //append the partition prefix to it. if(addPrefix) sb.append(PARTITION_PREFIX); //construct a partition name sb = (daxName == null)? // set it to the default name sb.append("test") : sb.append(daxName); return sb.toString(); } /** * It sets the name of the partition in the dax that is generated. It suffixes * PARTITION_PREFIX to the name of the dax. * * @param daxName the name attribute in the adag element of the dax. */ public void setPartitionName(String daxName){ //yes we want the partition prefix to be added mPartitionName = constructPartitionName(daxName,true); } /** * It returns the name of the partitioned dax, that the object is * currently writing or initialized to write. By the name, one means the * value that is set to the name attribute in the adag element. */ public String getPartitionName(){ return mPartitionName; } /** * This initializes the write handle a file in directory specified * when creating the instance of this class. The name of the file is * constructed by default, by looking at the partition name that is * assigned to the name attribute for the adag element. * * @param index the partition number of the partition. */ public void initializeWriteHandle(int index){ //check if partition name is set if(mPartitionName == null){ //set it to default setPartitionName(null); } String name = mPartitionName + "_" + index + ".dax"; initializeWriteHandle(name); } /** * This initializes the write handle to the file in directory specified * when creating the instance of this class. * * @param fileName the name of the file that is to be written in the * directory. */ public void initializeWriteHandle(String fileName){ String completeName = mPDAXDirectory + File.separator + fileName; try{ //if the write handle was not explicitly closed, closing it if(mWriteHandle != null) this.close(); mWriteHandle = new PrintWriter(new BufferedWriter(new FileWriter(completeName))); } catch(IOException e){ throw new RuntimeException( "Unable to write to file " + completeName + " :", e); } } /** * Writes out to the file. */ public void writeln(String st){ mWriteHandle.println(st); } /** * Close the write handle to the file that is written. */ public void close(){ if( mWriteHandle != null ){ mWriteHandle.close(); mWriteHandle = null; } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/ClustererCallback.java0000644000175000017500000000773111757531137030072 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.planner.cluster.Clusterer; import edu.isi.pegasus.planner.cluster.ClustererException; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.List; /** * A Callback implementation that passes the partitions detected during the * partitioning of the worflow to a Clusterer for clustering. The clusterer * is passed off to the callback during the callback initialization. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class ClustererCallback implements Callback { /** * The handle to the clusterer that does the clustering. */ private Clusterer mClusterer; /** * The handle to the properties object. */ private PegasusProperties mProps; /** * The default constructor. */ public ClustererCallback(){ } /** * Initializes the callback. * * @param properties the properties passed to the planner. * @param clusterer the clusterer that has to be called out, in the callback * methods. */ public void initialize (PegasusProperties properties, Clusterer clusterer){ mProps = properties; mClusterer = clusterer; } /** * Callback for when a partitioner determines that partition has been * constructed. The partition is passed off to the clusterer that the * callback has been initialized with. * * @param p the constructed partition. * * @throws RuntimeException in case of callback not being initialized, or * a ClustererException being thrown during the Clusterer operation. */ public void cbPartition( Partition p ) { //sanity check if( mClusterer == null ){ throw new RuntimeException( "Callback needs to be initialized before being used"); } //shallow wrap of exception for time being try{ mClusterer.determineClusters(p); } catch ( ClustererException e ){ throw new RuntimeException( "ClustererCallback cbPartition( Partition ) ", e ); } } /** * Callback for when a partitioner determines the relations between partitions * that it has previously constructed. * * @param child the id of a partition. * @param parents the list of String objects that contain * the id's of the parents of the partition. * * * @throws RuntimeException in case of callback not being initialized, or * a ClustererException being thrown during the Clusterer operation. */ public void cbParents(String child, List parents) { //sanity check if( mClusterer == null ){ throw new RuntimeException( "Callback needs to be initialized before being used"); } //shallow wrap of exception for time being try{ mClusterer.parents(child, parents); } catch ( ClustererException e ){ throw new RuntimeException( "ClustererCallback cbParents( String, List ) ", e ); } } /** * Callback for the partitioner to signal that it is done with the processing. * * */ public void cbDone(){ } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/PartitionerFactory.java0000644000175000017500000001221711757531137030330 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.util.Map; /** * A Factory class to load the right type of partitioner at runtime, as * specified by the Properties. Each invocation, results in a new partitioner * being loaded. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class PartitionerFactory { /** * Package to prefix "just" class names with. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.partitioner"; /** * The name of the class that does level based partitioning. */ public static final String LEVEL_BASED_PARTITIONING_CLASS = "BFS"; /** * The name of the class that does label based partitioning. */ public static final String LABEL_BASED_PARTITIONING_CLASS = "Label"; /** * The name of the class that does horizontal based partitioning. */ public static final String HORIZONTAL_PARTITIONING_CLASS = "Horizontal"; /** * The name of the class that does level based partitioning. */ public static final String DEFAULT_PARTITIONING_CLASS = LEVEL_BASED_PARTITIONING_CLASS; /** * An array of known partitioning classes. */ private static final String[] PARTITIONING_CLASSES = { LEVEL_BASED_PARTITIONING_CLASS, LABEL_BASED_PARTITIONING_CLASS , HORIZONTAL_PARTITIONING_CLASS }; /** * Loads the implementing class corresponding to the type specified by the user. * The properties object passed should not be null. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param root the dummy root node of the graph. * @param graph the map containing all the nodes of the graph keyed by * the logical id of the nodes. * @param className the name of the implementing class. * * @return the instance of the class implementing this interface. * * @throws PartitionerFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static Partitioner loadInstance(PegasusProperties properties, GraphNode root, Map graph, String className) throws PartitionerFactoryException{ //sanity check if(properties == null){ throw new NullPointerException("Invalid properties passed"); } if( className.indexOf( '.' ) == -1 ){ //compare with the known classes to ensure classnames //passed are case insensitive for( int i = 0; i < PARTITIONING_CLASSES.length; i++ ){ if( className.equalsIgnoreCase( PARTITIONING_CLASSES[i] )){ className = PARTITIONING_CLASSES[i]; break; } } className = DEFAULT_PACKAGE_NAME + "." + className; } //try loading the class dynamically Partitioner partitioner = null; try{ DynamicLoader dl = new DynamicLoader(className); Object argList[] = new Object[3]; Class classList[] = new Class[3]; argList[0] = root; //classList[0] = Class.forName( "org.griphyn.cPlanner.partitioner.GraphNode" ); classList[0] = new GraphNode().getClass();//to circumvent root being null argList[1] = graph; classList[1] = Class.forName("java.util.Map"); argList[2] = properties; classList[2] = Class.forName( "edu.isi.pegasus.planner.common.PegasusProperties"); partitioner = (Partitioner) dl.instantiate(classList, argList); } catch( Exception e ){ throw new PartitionerFactoryException("Instantiating Partitioner ", className, e); } return partitioner; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/PDAXWriter.java0000644000175000017500000000747611757531137026444 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LogManagerFactory; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Currently; /** * It writes out the partition graph in xml form. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class PDAXWriter { /** * The version of the associated xml schema, to which the pdax files being * written conform to. */ public static final String XML_VERSION = "2.0"; public static final String XML_NAMESPACE="http://pegasus.isi.edu/schema"; /** * The write handle to the xml file being written. */ private PrintWriter mWriteHandle; /** * The handle to the logging object. */ private LogManager mLogger; /** * The name assigned to the pdax file being written. */ private String mName; /** * The fully qaulified path to the file being written. */ private String mFileName; /** * The overloaded constructor. * * @param name the name that is assigned to the pdax. * @param fileName the path to the xml file that has to be written. */ public PDAXWriter(String name, String fileName) { mLogger = LogManagerFactory.loadSingletonInstance(); mFileName = fileName; mName = name; try{ mWriteHandle = new PrintWriter(new BufferedWriter(new FileWriter(fileName))); } catch(IOException e){ throw new RuntimeException( "Unable to write to file " + fileName + " :", e); } } /** * Writes out the opening element of the xml document. */ public void writeHeader(){ String name = new File(mFileName).getName(); writeln(""); writeln(""); writeln(""); } /** * Writes out a partition to the associate XML stream. * * @param p the partition to be written to the stream. * * @exception IOException if something fishy happens to the stream. */ public void write( Partition p ) throws IOException{ p.toXML( mWriteHandle ); } /** * Writes out to the file. * @param st String */ public void write(String st){ mWriteHandle.write(st); } /** * Writes out to the file. * @param st String */ public void writeln(String st){ mWriteHandle.println(st); } /** * Close the xml file that is written. */ public void close(){ write("\n"); mWriteHandle.close(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/Whole.java0000644000175000017500000000725211757531137025561 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.util.LinkedList; import java.util.Iterator; import java.util.List; import java.util.Map; /** * This partitioning technique considers the whole DAX as a single partition. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class Whole extends Partitioner { /** * A short description about the partitioner. */ public static final String DESCRIPTION = "Whole DAX as a Single Partition"; /** * The overloaded constructor. * * @param root the dummy root node of the graph. * @param graph the map containing all the nodes of the graph keyed by * the logical id of the nodes. * @param properties the properties passed to the planner. */ public Whole( GraphNode root, Map graph, PegasusProperties properties ) { super( root, graph, properties ); } /** * This ends up writing out a partition for each job in the dax. It is a * one 2 one mapping from the jobs in the dax to the corresponding * partitions in the pdax. The ids of the partitions in pdax is same * as the ids of the corresponding jobs in the dax. * * @param c the callback object to callout to while partitioning. */ public void determinePartitions( Callback c ) { //we just traverse the graph via an iterator, as we do not //need to any particular graph traversal for this mode. String key = null; GraphNode node = null; int currentIndex = 1; List levelList = new LinkedList(); for( Iterator it = mGraph.keySet().iterator(); it.hasNext(); ){ //the key is the logical id of the node specified in the dax key = (String)it.next(); node = (GraphNode)mGraph.get(key); //we have to ignore the dummy root node. if( node.getID().equals( mRoot.getID() ) ){ //we go to next node mLogger.log( "Ignoring node " + node.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); continue; } //construct the partition for this node //the partition has just one node with id same as the id //of the corresponding id of the job in the dax levelList.add( node ); } Partition p = new Partition( levelList, Integer.toString( currentIndex ) ); p.setIndex( currentIndex ); p.constructPartition(); mLogger.log( "Partition is " + p.getNodeIDs(), LogManager.DEBUG_MESSAGE_LEVEL ); c.cbPartition( p ); //we are done with the partitioning c.cbDone(); } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String description(){ return this.DESCRIPTION; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/Label.java0000644000175000017500000002235511757531137025523 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.partitioner.graph.Bag; import edu.isi.pegasus.planner.partitioner.graph.LabelBag; import java.util.Map; import java.util.HashMap; import java.util.List; import java.util.ArrayList; import java.util.LinkedList; import java.util.Iterator; import java.util.Set; import java.util.HashSet; /** * This partitioner partitions the DAX into smaller partitions as specified by * the labels associated with the jobs. If no label is specified, then the * partitioner puts the job into a unique partition corresponding to the job * ID. * * @author Karan Vahi * @version $Revision: 2576 $ * * */ public class Label extends Partitioner { /** * The default label that is associated with the job in case of no label * being specified. */ // public static final String DEFAULT_LABEL = "default"; /** * A short description about the partitioner. */ public static final String DESCRIPTION = "Label Based Partitioning"; /** * A map indexed by the label. Each value is a partition object * consisting of jobs with that label. */ private Map mPartitionMap; /** * The first in first out queue, that manages the set of gray vertices in a * breadth first search. */ private LinkedList mQueue; /** * The handle to the Logging object. */ private LogManager mLogger; /** * The overloaded constructor. * * @param root the dummy root node of the graph. * @param graph the map containing all the nodes of the graph keyed by * the logical id of the nodes. * @param properties the properties passed to the planner. */ public Label(GraphNode root, Map graph, PegasusProperties properties) { super(root, graph, properties); mPartitionMap = new HashMap(10); mQueue = new LinkedList(); mLogger = LogManagerFactory.loadSingletonInstance( properties ); } /** * Partitions the graph passed in the constructor, on the basis of the labels * associated with the nodes in the graph. All the nodes, with the same label * are deemed to be in the same partition. * * @param c the callback for the partitioner. */ public void determinePartitions( Callback c ){ int currentDepth = 0; GraphNode node; GraphNode parent; GraphNode child; int depth = 0; List levelList = new java.util.LinkedList(); String currentLabel = null; int i = 0,partitionNum = 0; mLogger.log( "Starting Graph Traversal", LogManager.INFO_MESSAGE_LEVEL ); //set the depth of the dummy root as 0 mRoot.setDepth( currentDepth ); mQueue.addLast( mRoot ); while( !mQueue.isEmpty() ){ node = (GraphNode)mQueue.getFirst(); depth = node.getDepth(); currentLabel = getLabel( node ); if(currentDepth < depth){ //a new level starts currentDepth++; levelList.clear(); } //get the partition for the label Partition p = null; if( mPartitionMap.containsKey( currentLabel ) ){ p = (Partition)mPartitionMap.get( currentLabel ); } else { p = new Partition(); if( currentDepth > 0 ){ partitionNum++; p.setIndex( partitionNum ); p.setID(getPartitionID( partitionNum )); mPartitionMap.put( currentLabel, p ); } } if( p.lastAddedNode()!= null && depth > p.lastAddedNode().getDepth() + 1 ){ throw new RuntimeException( "Invalid labelled graph" ); /* //partition with current label has been fully //constructed. write out the existing partition //create a new partition Partition newp = new Partition(); newp.addNode(node); mPartitionMap.put(currentLabel,newp); */ } else if(currentDepth > 0){ //add to the existing partition for the current label p.addNode(node); //also associate the partition id with the node node.getBag().add( LabelBag.PARTITION_KEY,p.getID() ); } mLogger.log("Adding to level " + currentDepth + " " + node.getID(),LogManager.DEBUG_MESSAGE_LEVEL); levelList.add( node ); node.setColor( GraphNode.BLACK_COLOR ); for( Iterator it = node.getChildren().iterator(); it.hasNext(); ){ child = (GraphNode)it.next(); if(!child.isColor( GraphNode.GRAY_COLOR ) && child.parentsColored( GraphNode.BLACK_COLOR ) ){ mLogger.log( "Adding to queue " + child.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); child.setDepth( depth + 1 ); child.setColor( GraphNode.GRAY_COLOR ); mQueue.addLast( child ); } } node = (GraphNode)mQueue.removeFirst(); mLogger.log( "Removed " + node.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); } mLogger.log( "Starting Graph Traversal - DONE", LogManager.INFO_MESSAGE_LEVEL ); for( Iterator it = mPartitionMap.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry)it.next(); Partition p = (Partition)entry.getValue(); p.constructPartition(); mLogger.log( "Partition is " + p.getNodeIDs() + " corresponding to label " + entry.getKey(), LogManager.DEBUG_MESSAGE_LEVEL ); c.cbPartition( p ); } mLogger.log( "Determining relations between partitions", LogManager.INFO_MESSAGE_LEVEL ); //construct the relations for( Iterator it = mPartitionMap.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); Partition p = (Partition) entry.getValue(); List roots = p.getRootNodes(); Set parentPartitions = new HashSet( roots.size() ); //get the Root nodes for each partition and //for each root, determine the partitions of it's parents for( Iterator rootIt = roots.iterator(); rootIt.hasNext(); ){ node = (GraphNode)rootIt.next(); for( Iterator parentsIt = node.getParents().iterator(); parentsIt.hasNext(); ){ parent = (GraphNode)parentsIt.next(); //the parents partition id is parent for the //partition containing the root parentPartitions.add( parent.getBag().get( LabelBag.PARTITION_KEY ) ); } } //write out all the parents of the partition if(!parentPartitions.isEmpty()){ c.cbParents( p.getID(), new ArrayList( parentPartitions ) ); } } mLogger.log( "Determining relations between partitions - DONE", LogManager.INFO_MESSAGE_LEVEL ); //done with the partitioning c.cbDone(); } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String description(){ return this.DESCRIPTION; } /** * Returns the label for the node. If no label is associated with the node, * then the ID of the node is assumed as the label. * * @param node the node for which the label is required. * * @return the label associated with the job, else the id of the node. */ private String getLabel(GraphNode node){ Bag b = (LabelBag)node.getBag(); Object obj = b.get( LabelBag.LABEL_KEY ); return (obj == null )? node.getID() /*this.DEFAULT_LABEL*/ : (String)obj; } /** * Constructs the id for the partition. * * @param id the integer id. * * @return the ID of the partition. */ private String getPartitionID( int id ){ StringBuffer sb = new StringBuffer(5); sb.append( "ID" ).append( id ); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/Partition.java0000644000175000017500000003111511757531137026447 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.planner.classes.Data; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.io.Writer; import java.io.StringWriter; import java.io.IOException; import java.util.HashMap; import java.util.LinkedHashSet; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.Set; /** * This is an abstract container for a partition in the graph. This used for * the generation of the partition element in the partition graph, and identifies * the relations between the jobs in the partition if any. * * * @author Karan Vahi * @version $Revision: 2753 $ */ public class Partition extends Data { /** * The set of node id's in the partition. */ private Set mNodeSet; /** * A map containing a node and it's parents ids in the partition. * A node id's is the key and the corresponding value is the list of * String id's of it's parents. The map only contain those nodes for * which there is a parent. */ private Map mParentsMap; /** * The list of GraphNode objects corresponding to the nodes * making the partiition. */ private List mNodeList; /** * The partition id of the partition. */ private String mID; /** * The index associated with the partition. In most cases the ID of the * partition is constructed using this index. */ private int mIndex; /** * The name of the partition. */ private String mName; /** * A pointer to the last added node to the partition. */ private GraphNode mLastAddedNode; /** * The default constructor. */ public Partition(){ mID = null; mName = "test"; mIndex = -1; mNodeSet = new LinkedHashSet(); mParentsMap = new HashMap(); mNodeList = new java.util.LinkedList(); mLastAddedNode = null; } /** * The overloaded constructor. * * @param nodeList list of GraphNode objects. * @param id the partition id of the partition. */ public Partition(List nodeList, String id) { mNodeList = nodeList; mID = id; mParentsMap = new HashMap(nodeList.size()); mNodeSet = new LinkedHashSet(nodeList.size()); mIndex = -1; //default to test mName = "test"; mLastAddedNode = null; for( Iterator it = mNodeList.iterator(); it.hasNext(); ){ mNodeSet.add(((GraphNode)it.next()).getID()); } } /** * Adds a node to the partition. It ends up adding it to the underneath * node list. * * @param node the GraphNode object corresponding to the job * that is to be added. */ public void addNode(GraphNode node){ mNodeList.add(node); //also add it to the underlying job set mNodeSet.add(node.getID()); mLastAddedNode = node; } /** * Returns the last added node to the partition. * * @return the last added node, or null in case partition is empty */ public GraphNode lastAddedNode(){ return mLastAddedNode; } /** * Returns a list of nodes making up the partition. * * @return List of GraphNode objects. */ public List getNodes(){ return this.mNodeList; } /** * Returns the root nodes in the partition. They can only be determined, after * the constructPartition() has been called. * * @return List of GraphNode objects that are the root. */ public List getRootNodes(){ List l = new ArrayList(10); Map m = this.getRelations(); for(Iterator it = getNodes().iterator();it.hasNext();){ GraphNode gn = (GraphNode)it.next(); if(!m.containsKey(gn.getID())){ l.add(gn); } } return l; } /** * It while looking at the node list constructs the relations between * the jobs in the partition, that can be gotten through * getRelationsInPartition(). */ public void constructPartition(){ //traverse through all the nodes in the partition for(Iterator it = mNodeList.iterator();it.hasNext();){ GraphNode node = (GraphNode)it.next(); List parents = node.getParents(); if(parents == null){ continue; } //traverse through all the parents of the node, in //the original DAX/Graph,that may or maynot be in //this partition List partitionParents = new java.util.LinkedList(); for(Iterator pIt = parents.iterator();pIt.hasNext();){ GraphNode parent = (GraphNode)pIt.next(); if(mNodeSet.contains(parent.getID())){ //relation between 2 nodes in the same partition. partitionParents.add(parent.getID()); } } //only add if there are any parents if(!partitionParents.isEmpty()){ mParentsMap.put(node.getID(), partitionParents); } } } /** * It sets the partition name to the value passed. * * @param name the name to which the partition name needs to be set to. */ public void setName(String name){ mName = name; } /** * It returns the name of the partition. */ public String getName(){ return mName; } /** * It sets the index associated with this partition to the value passed. * * @param index the index value. */ public void setIndex(int index){ mIndex = index; } /** * It returns the index to number of the partition. */ public int getIndex(){ return mIndex; } /** * It returns the unique id that is associated with the partition. */ public String getID(){ return mID; } /** * It sets the id of the partition. * * @param id the id of the partition. */ public void setID(String id){ mID = id; } /** * Returns the number of nodes in the partition. * * @return the number of nodes. */ public int size(){ return mNodeList.size(); } /** * Returns a String version of the object. */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append("Partition ID ->").append(mID); for( Iterator it = this.mNodeList.iterator(); it.hasNext(); ){ GraphNode gn = (GraphNode)it.next(); String id = (String)gn.getID(); sb.append( "\nJob ->").append(id); sb.append( "\nBag ->" ).append( gn.getBag() ); List l = (List)mParentsMap.get(id); if( l == null) continue; Iterator it1 = l.iterator(); sb.append(" Parents {"); while(it1.hasNext()){ sb.append(it1.next()).append(','); } sb.append("}"); } return sb.toString(); } /** * Returns the xml description of the object. This is used for generating * the partition graph. That is no longer done. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String indent = "\t"; //write out the partition xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); //write out all the jobs making up the partition String newIndent = indent + "\t"; for ( Iterator it = mNodeList.iterator(); it.hasNext() ;){ GraphNode gn = (GraphNode)it.next(); writer.write( newIndent ); writer.write( "" ); writer.write( newLine ); } //write out all the dependencies amongst the jobs. String id; for ( Iterator it = mNodeSet.iterator(); it.hasNext() ; ){ id = (String)it.next(); List l = (List)mParentsMap.get(id); if( l == null || l.isEmpty()) continue; //write out the child writer.write( newIndent ); writer.write( ""); writer.write( newLine ); //write out all the parents of the child String parentIndent = newIndent + "\t"; for( Iterator it1 = l.iterator(); it1.hasNext(); ){ writer.write( parentIndent ); writer.write( "" ); writer.write( newLine ); } writer.write( newIndent ); writer.write( "" ); writer.write( newLine ); } writer.write( indent ); writer.write( "" ); writer.write( newLine ); } /** * Returns the xml description of the object. This is used for generating * the partition graph. That is no longer done. * * @return String containing the Partition object in XML. * * @exception IOException if something fishy happens to the stream. */ public String toXML() throws IOException{ Writer writer = new StringWriter(32); toXML( writer ); return writer.toString(); } /** * Writes an attribute to the stream. Wraps the value in quotes as required * by XML. * * @param writer * @param key * @param value * * @exception IOException if something fishy happens to the stream. */ private void writeAttribute( Writer writer, String key, String value ) throws IOException{ writer.write( " " ); writer.write( key ); writer.write( "=\""); writer.write( value ); writer.write( "\"" ); } /** * It returns the set of the job ids making up the partition. */ public Set getNodeIDs(){ return mNodeSet; } /** * Ends up assigning the parents to a particular node. It does assign * the parents to the node, if the node is in the partition. It however * does not check if the parents are in the partition or not. * * @param node the id of the node for which you want to add the parents. * @param parents list of id's of the parents of the nodes. */ public void addParents(String node, List parents){ //check if node is in the node set if(mNodeSet.contains(node)){ //add to the graph mParentsMap.put(node,parents); } } /** * A function to return the child-parent relations for the jobs making up the * partition. The child parent relations are only returned for the jobs * that have parents in the partition. * * @return Map containing the job id's as the keys and the values as the * list of the parent id's in the partition. */ public Map getRelations(){ return mParentsMap; } /** * Returns a copy of the object */ public Object clone() throws CloneNotSupportedException { throw new CloneNotSupportedException("Clone method not implemented in Partition"); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/Topological.java0000644000175000017500000001364111757531137026756 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.util.List; import java.util.ArrayList; import java.util.LinkedList; import java.util.Map; import java.util.HashMap; import java.util.Iterator; /** * Does a topological sort on the Partition. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class Topological //implements Iterator { /** * The partition that has to be sorted. */ private Partition mPartition; /** * An array that contains the number of incoming edges to a node. */ private int[] mInDegree; /** * A Map that returns the index into mInDegree map for a particular node * in graph. Maps a ID of the node to an int value, which is the index to * to the array containing the in degree for each node. * * @see #mInDegree */ private Map mIndexMap; /** * The overloaded constructor. * * @param p the partition that has to be sorted. */ public Topological( Partition p ){ mPartition = p; initialize(); } /** * Initializes the inDegree for each node of the partition. * */ public void initialize(){ //build up a inDegree map for each node. int order = mPartition.size(); mInDegree = new int[ order ]; mIndexMap = new HashMap( order ); int index = 0; //each of the root nodes have in degree of 0 for ( Iterator it = mPartition.getRootNodes().iterator(); it.hasNext(); ){ GraphNode root = (GraphNode)it.next(); mIndexMap.put( root.getID(), new Integer( index ) ); mInDegree[ index++ ] = 0; } //determine inDegree for other nodes for( Iterator it = mPartition.getRelations().entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); mIndexMap.put( entry.getKey(), new Integer( index) ); mInDegree[ index++ ] = ((List) entry.getValue()).size(); } //sanity check if( index != order){ throw new RuntimeException( "Index does not match order of partition " + mPartition.getID()); } } /** * Topologically sorts the partition and returns a List of * GraphNode elements. The iterator of the list, returns * the elements in the topological order. * * @return List of GraphNode objects */ public List sort(){ List l = new LinkedList( ); int order = mPartition.size(); //get all the adjaceny list representation Map relations = this.childrenRepresentation(); List queue = new LinkedList(); //add all the root nodes to queue first for( Iterator it = this.mPartition.getRootNodes().iterator(); it.hasNext(); ){ queue.add( ((GraphNode)it.next()).getID() ); } int index; while( !queue.isEmpty() ){ String nodeID = (String)queue.remove(0); l.add( nodeID ); //traverse all the children of the node if( relations.containsKey( nodeID) ){ for( Iterator it = ( (List)relations.get( nodeID )).iterator(); it.hasNext();){ String childID = (String)it.next(); //remove the edge from node to child by decrementing inDegree index = index(childID); mInDegree[ index ] -= 1; if( mInDegree[ index ] == 0 ){ //add the node to the queue queue.add( childID ); } } } } //sanity check if( l.size() != order ){ throw new RuntimeException( " Partition " + mPartition.getID() + " has a cycle"); } return l; } /** * Returns a map that is index by GraphNode ID's and each value is the list * of ID's of children of that GraphNode. * * @return Map that contains adjacency list's for each node. */ protected Map childrenRepresentation(){ //adjacency list where List contains parents Map m = new HashMap( mPartition.size() ); for( Iterator it = mPartition.getRelations().entrySet().iterator(); it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); Object node = entry.getKey(); List parents = (List)entry.getValue(); List children = null; for( Iterator pit = parents.iterator(); pit.hasNext(); ){ Object parent = pit.next(); //the node should be in parents adjacency list if( m.containsKey( parent )){ children = (List)m.get(parent); children.add( node ); } else{ children = new ArrayList(5); children.add( node ); m.put( parent, children); } } } return m; } /** * Returns the index of a particular node. The index is used as an index into * arrays. * * @param id the id of the node. * * @return the index */ private int index( String id ){ return ((Integer)mIndexMap.get( id )).intValue(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/Callback.java0000644000175000017500000000334111757531137026172 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import java.util.List; /** * This interface defines the callback calls from the partitioners. The * partitioners call out to the appropriate callback methods as and when they * determine that a partition has been constructed. * * * @author Karan Vahi * * @version $Revision: 2576 $ */ public interface Callback { /** * Callback for when a partitioner determines that partition has been * constructed. * * @param partition the constructed partition. */ public void cbPartition( Partition partition ) ; /** * Callback for when a partitioner determines the relations between partitions * that it has previously constructed. * * @param child the id of a partition. * @param parents the list of String objects that contain * the id's of the parents of the partition. */ public void cbParents( String child, List parents ); /** * Callback for the partitioner to signal that it is done with the processing. * * */ public void cbDone(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/BFS.java0000644000175000017500000001703511757531137025115 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.ArrayList; import java.util.Map; /** * This does a modified breadth first search of the graph to identify the levels. * A node is put in a level only if all the parents of that node are already * assigned a level. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class BFS extends Partitioner{ /** * A short description about the partitioner. */ public static final String DESCRIPTION = "Level Based Partitioning"; /** * The first in first out queue, that manages the set of gray vertices in a * breadth first search. */ private LinkedList mQueue; /** * The current depth of the nodes that are being traversed in the BFS. */ private int mCurrentDepth; /** * The overloaded constructor. * * @param root the dummy root node of the graph. * @param graph the map containing all the nodes of the graph keyed by * the logical id of the nodes. * @param properties the properties passed to the planner. */ public BFS(GraphNode root, Map graph, PegasusProperties properties) { super(root,graph,properties); mQueue = new LinkedList(); mCurrentDepth = -1; } /** * Does a constrained breadth first search to identify the partitions, and * calls out to write out the partition graph. * * @param c the callback for the partitioner. */ public void determinePartitions( Callback c ){ mCurrentDepth = 0; GraphNode node; GraphNode child; int depth = 0; List levelList = new java.util.LinkedList(); int i = 0; //they contain those nodes whose parents have not been traversed as yet //but the BFS did it. List orphans = new java.util.LinkedList(); //set the depth of the dummy root as 0 mRoot.setDepth( mCurrentDepth ); mQueue.addLast( mRoot ); while( !mQueue.isEmpty() ){ node = (GraphNode)mQueue.getFirst(); depth = node.getDepth(); if( mCurrentDepth < depth ){ if( mCurrentDepth > 0 ){ //we are done with one level! constructPartitions( c, levelList, mCurrentDepth ); } //a new level starts mCurrentDepth++; levelList.clear(); } mLogger.log( "Adding to level " + mCurrentDepth + " " + node.getID(), LogManager.DEBUG_MESSAGE_LEVEL); levelList.add( node ); //look at the orphans first to see if any //of the dependency has changed or not. /*it = orphans.iterator(); while(it.hasNext()){ child = (GraphNode)it.next(); if(child.parentsBlack()){ child.setDepth(depth + 1); System.out.println("Set depth of " + child.getID() + " to " + child.getDepth()); child.traversed(); mQueue.addLast(child); } //remove the child from the orphan it.remove(); }*/ node.setColor( GraphNode.BLACK_COLOR ); for( Iterator it = node.getChildren().iterator(); it.hasNext(); ){ child = (GraphNode)it.next(); if(!child.isColor( GraphNode.GRAY_COLOR ) && child.parentsColored( GraphNode.BLACK_COLOR )){ mLogger.log( "Adding to queue " + child.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); child.setDepth( depth + 1 ); child.setColor( GraphNode.GRAY_COLOR ); mQueue.addLast( child ); } /*else if(!child.isTraversed() && !child.parentsBlack()){ //we have to do the bumping effect System.out.println("Bumping child " + child); orphans.add(child); }*/ } node = (GraphNode)mQueue.removeFirst(); mLogger.log( "Removed " + node.getID(), LogManager.DEBUG_MESSAGE_LEVEL); } //handle the last level of the BFS constructPartitions( c, levelList, mCurrentDepth ); //all the partitions are dependant sequentially for( i = mCurrentDepth; i > 1; i-- ){ constructLevelRelations( c, i - 1, i ); } done( c ); } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String description(){ return this.DESCRIPTION; } /** * Given a list of jobs, constructs (one or more) partitions out of it. * Calls out to the partitioner callback, for each of the partitions * constructed. * * @param c the parititoner callback * @param nodes the list of GraphNode objects on a particular level. * @param level the level as determined from the root of the workflow. */ protected void constructPartitions( Callback c, List nodes, int level ){ //we want to ignore the dummy node partition String id = getPartitionID( mCurrentDepth ); Partition p = new Partition( nodes, id ); p.setIndex( mCurrentDepth ); p.constructPartition(); mLogger.log( "Partition " + p.getID() + " is :" + p.getNodeIDs(), LogManager.DEBUG_MESSAGE_LEVEL ); c.cbPartition( p ); } /** * Calls out to the callback with appropriate relations between the partitions * constructed for the levels. * * @param c the parititoner callback * @param parent the parent level * @param child the child level. */ protected void constructLevelRelations( Callback c, int parent, int child ){ String childID = getPartitionID( child ); String parentID = getPartitionID( parent ); List parents = new ArrayList(1); parents.add( parentID ); c.cbParents( childID, parents ); } /** * Indicates that we are done with the partitioning. * Calls out to the appropriate callback function */ protected void done( Callback c ){ //done with the partitioning c.cbDone(); } /** * Constructs the id for the partition. * * @param level the depth from the root of the graph. * * @return the ID for the Partition. */ private String getPartitionID(int level){ StringBuffer sb = new StringBuffer(5); sb.append("ID").append(level); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/SingleLook.java0000644000175000017500000002264011757531137026547 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.common.logging.LogManager; import org.griphyn.vdl.classes.LFN; import org.griphyn.vdl.dax.ADAG; import org.griphyn.vdl.dax.Filename; import org.griphyn.vdl.dax.Job; import org.griphyn.vdl.euryale.Callback; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * This class ends up writing a partitioned dax, that corresponds to one * partition as defined by the Partitioner. It looks up the dax once when * it is initialized, stores it in memory and then refers the memory to look * up the job details for the jobs making up a particular partition. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class SingleLook extends DAXWriter{ /** * The set of job id's in the partition. */ private Set mNodeSet; /** * A map containing the relations between the jobs making up the partition. */ private Map mRelationsMap; /** * The ADAG object containing the partitioned dax. */ private ADAG mPartADAG; /** * The number of jobs that are in the partition. */ private int mNumOfJobs; /** * The number of jobs about which the callback interface has knowledge. */ private int mCurrentNum; /** * The flag to identify that dax is in memory. */ private boolean mDAXInMemory; /** * The map containing all the jobs in the dax indexed by the job id. */ private Map mJobMap; /** * The overloaded constructor. * * @param daxFile the path to the dax file that is being partitioned. * @param directory the directory in which the partitioned daxes are to be * generated. */ public SingleLook(String daxFile, String directory){ super(daxFile,directory); mDAXInMemory = false; mJobMap = null; } /** * It writes out a dax consisting of the jobs as specified in the partition. * * @param partition the partition object containing the relations and id's * of the jobs making up the partition. * @param index the index of the partition. * * @return boolean true if dax successfully generated and written. * false in case of error. */ public boolean writePartitionDax(Partition partition, int index){ Iterator it; List fileList = null; List parentIDs = null; //do the cleanup from the previous partition write mPartADAG = null; mNodeSet = null; mRelationsMap = null; //get from the partition object the set of jobs //and relations between them mNodeSet = partition.getNodeIDs(); mRelationsMap = partition.getRelations(); mNumOfJobs = mNodeSet.size(); //set the current number of jobs whose information we have mCurrentNum = 0; if(!mDAXInMemory){ mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_PARSE_DAX, LoggingKeys.DAX_ID, mDaxFile ); //dax is not in memory. mJobMap = new java.util.HashMap(); //Callback takes care of putting dax in memory Callback callback = new MyCallBackHandler(); org.griphyn.vdl.euryale.DAXParser d = new org.griphyn.vdl.euryale.DAXParser(null); d.setCallback(callback); //start the parsing of the dax d.parse(mDaxFile); mDAXInMemory = true; mLogger.logEventCompletion(); } mPartADAG = new ADAG(0,index,mPartitionName); //get the job information for the jobs in the partiton. it = mNodeSet.iterator(); while(it.hasNext()){ String id = (String)it.next(); Job job = (Job)mJobMap.get(id); if(job == null){ throw new RuntimeException( "Unable to find information about job" + id + "while constructing partition" ); } //add the job to ADAG mPartADAG.addJob(job); //build up the files used by the partition fileList = job.getUsesList(); //iterate through the file list //populate it in the ADAG object Iterator fileIt = fileList.iterator(); while(fileIt.hasNext()){ Filename file = (Filename)fileIt.next(); mPartADAG.addFilename(file.getFilename(), (file.getLink() == LFN.INPUT)?true:false, file.getTemporary(), file.getDontRegister(),file.getDontTransfer()); } } //put in the relations amongst //jobs in the partition //add the relations between the jobs in the partition to the ADAG it = mRelationsMap.keySet().iterator(); while(it.hasNext()){ String childID = (String)it.next(); parentIDs = (List)mRelationsMap.get(childID); //get all the parents of the children and populate them in the //ADAG object Iterator it1 = parentIDs.iterator(); while(it1.hasNext()){ mPartADAG.addChild(childID,(String)it1.next()); } } mLogger.log("Writing out the DAX File for partition " + partition.getID(), LogManager.DEBUG_MESSAGE_LEVEL); //do the actual writing to the file this.initializeWriteHandle(index); try{ mPartADAG.toXML(mWriteHandle, new String()); } catch(IOException e){ mLogger.log("Error while writing out a partition dax :" + e.getMessage(),LogManager.ERROR_MESSAGE_LEVEL); return false; } this.close(); mLogger.log("Writing out the DAX File for partition - DONE" + partition.getID(), LogManager.DEBUG_MESSAGE_LEVEL); //generation was successful return true; } /** * The internal callback handler for the DAXParser in Euryale. It stores * all the jobs making up the dax in an internal map, which is then referred * to get the job information for the jobs making up the partition. */ private class MyCallBackHandler implements Callback { /** * The empty constructor. */ public MyCallBackHandler(){ } /** * Callback when the opening tag was parsed. The attribute maps each * attribute to its raw value. The callback initializes the DAG * writer. * * @param attributes is a map of attribute key to attribute value */ public void cb_document(Map attributes) { //do nothing at the moment } /** * Callback for the filename from section 1 filenames. * Does nothing as the filenames for the partitioned dax are * constructed from the jobs. */ public void cb_filename(Filename filename) { //an empty implementation } /** * Callback for the job from section 2 jobs. This ends up storing all * the jobs in the memory to be used for writing out the partition dax. * * @param job the object containing the job information. */ public void cb_job(Job job) { String id = job.getID(); //put it in hashmap and also check for duplicate if(mJobMap.put(id,job) != null){ //warn for the duplicate entry mLogger.log("Entry for the job already in ", LogManager.WARNING_MESSAGE_LEVEL); } if(mCurrentNum == mNumOfJobs){ //exit or stop the parser. cb_done(); } } /** * Callback for child and parent relationships from section 3. * This is an empty implementation, as the Partition object * contains the relations amongst the jobs making up the partition. * * @param child is the IDREF of the child element. * @param parents is a list of IDREFs of the included parents. */ public void cb_parents(String child, List parents) { //an empty implementation } /** * Callback when the parsing of the document is done. While this state * could also be determined from the return of the invocation of the * parser, that return may be hidden in another place of the code. * This callback can be used to free callback-specific resources. */ public void cb_done(){ //an empty implementation } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/partitioner/MultipleLook.java0000644000175000017500000001535311757531137027124 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.partitioner; import org.griphyn.vdl.classes.LFN; import edu.isi.pegasus.common.logging.LogManager; import org.griphyn.vdl.dax.ADAG; import org.griphyn.vdl.dax.Filename; import org.griphyn.vdl.dax.Job; import org.griphyn.vdl.euryale.Callback; import java.io.IOException; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * This class ends up writing a partitioned dax, that corresponds to one * partition as defined by the Partitioner. Whenever it is called to write * out a dax corresponding to a partition it looks up the dax i.e parses the * dax and gets the information about the jobs making up the partition. * * @author Karan Vahi * @version $Revision: 2576 $ */ public class MultipleLook extends DAXWriter{ /** * The set of job id's in the partition. */ private Set mNodeSet; /** * A map containing the relations between the jobs making up the partition. */ private Map mRelationsMap; /** * The ADAG object containing the partitioned dax. */ private ADAG mPartADAG; /** * The number of jobs that are in the partition. */ private int mNumOfJobs; /** * The number of jobs about which the callback interface has knowledge. */ private int mCurrentNum; /** * The index of the partition that is being written out. */ private int mIndex; /** * The overloaded constructor. * * @param daxFile the path to the dax file that is being partitioned. * @param directory the directory in which the partitioned daxes are to be * generated. */ public MultipleLook(String daxFile, String directory){ super(daxFile,directory); mIndex = -1; } /** * It writes out a dax consisting of the jobs as specified in the partition. * * @param partition the partition object containing the relations and id's * of the jobs making up the partition. * * @return boolean true if dax successfully generated and written. * false in case of error. */ public boolean writePartitionDax(Partition partition, int index){ //do the cleanup mPartADAG = null; mNodeSet = null; mRelationsMap = null; mIndex = index; //get from the partition object the set of jobs //and relations between them mNodeSet = partition.getNodeIDs(); mRelationsMap = partition.getRelations(); mNumOfJobs = mNodeSet.size(); //set the current number of jobs whose information we have mCurrentNum = 0; mPartADAG = new ADAG(0,index,mPartitionName); Callback callback = new MyCallBackHandler(); org.griphyn.vdl.euryale.DAXParser d = new org.griphyn.vdl.euryale.DAXParser(null); d.setCallback(callback); d.parse(mDaxFile); //do the actual writing to the file this.initializeWriteHandle(mIndex); try{ mPartADAG.toXML(mWriteHandle, new String()); } catch(IOException e){ mLogger.log("Error while writing out a partition dax :" + e.getMessage(),LogManager.ERROR_MESSAGE_LEVEL); return false; } this.close(); return true; } /** * The internal callback handler for the DAXParser in Euryale. It only * stores the jobs that are part of the dax, that are then populated into * the internal ADAG object that is used to write out the dax file * corresponding to the partition. */ private class MyCallBackHandler implements Callback { /** * The empty constructor. */ public MyCallBackHandler(){ } /** * Callback when the opening tag was parsed. The attribute maps each * attribute to its raw value. The callback initializes the DAG * writer. * * @param attributes is a map of attribute key to attribute value */ public void cb_document(Map attributes) { //do nothing at the moment } public void cb_filename(Filename filename) { } /** * */ public void cb_job(Job job) { List fileList = null; Iterator it; if(mNodeSet.contains(job.getID())){ mCurrentNum++; mPartADAG.addJob(job); fileList = job.getUsesList(); //iterate through the file list //populate it in the ADAG object it = fileList.iterator(); while(it.hasNext()){ Filename file = (Filename)it.next(); mPartADAG.addFilename(file.getFilename(), (file.getLink() == LFN.INPUT)?true:false, file.getTemporary(), file.getDontRegister(),file.getDontTransfer()); } } } public void cb_parents(String child, List parents) { } public void cb_done() { List parentIDs; //print the xml generated so far if(mCurrentNum != mNumOfJobs){ //throw an error and exit. throw new RuntimeException( "Could not find information about all the jobs" + " in the dax for partition " + mNodeSet); } //add the relations between the jobs in the partition to the ADAG Iterator it = mRelationsMap.keySet().iterator(); while(it.hasNext()){ String childID = (String)it.next(); parentIDs = (List)mRelationsMap.get(childID); //get all the parents of the children and populate them in the //ADAG object Iterator it1 = parentIDs.iterator(); while(it1.hasNext()){ mPartADAG.addChild(childID,(String)it1.next()); } } } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/common/0000755000175000017500000000000011757531667022572 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/common/PegRandom.java0000644000175000017500000000546211757531137025310 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.common; import java.util.Random; /** * A Helper class that returns the Random values * using java.util.Random class. It is a singleton * instance, and all functions in Pegasus call it * to get the random value. The singleton ensures * that the number returned is random as compared * to previous values. If this is not used and one * ends up doing a new Random(), all the calls effectively * end up setting the same seed value, nad by extension * one gets the same value. Using just one Random object * we hope to bypass the problem. * *

Copyright: Copyright (c) 2002

*

Company: USC/ISI

* @author Gaurang Mehta * @author Karan Vahi * @version $Revision: 2567 $ */ public class PegRandom { /** * The object containing the instance * of the java.util.Random class. */ private static Random mRandom; /** * This is called only once when the * class is first loaded. */ static { mRandom = new Random(); } /** * Returns a double value between * 0.0 and 1.0. */ public static double nextDouble(){ return mRandom.nextDouble(); } /** * Returns a normally distributed (gaussian) random variable between * 0.0 and 1.0. */ public static double nextGaussian(){ return mRandom.nextGaussian(); } /** * This calls the next double function * and returns an integer between the * 0 and upper index. */ public static int getInteger(int upperIndex){ return getInteger(0,upperIndex); } /** * This calls the next double function * and returns an integer between the * lower index and upper index. */ public static int getInteger(int lowerIndex, int upperIndex){ double value = nextDouble(); int val = 0; //adding one as intValue() function //truncates the value instead of //rounding it off. upperIndex += 1; value = lowerIndex + ( (upperIndex - lowerIndex) * value); val = new Double(value).intValue(); if(val == upperIndex) //get the one lower value val -= 1; return val; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/common/Utility.java0000644000175000017500000000653511757531137025101 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.common; import java.net.URL; import java.util.StringTokenizer; /** * A utility class that contains a few common utility/helper functions used in * Pegasus. At present they are preliminary URL decomposition functions. * * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2567 $ */ public class Utility { /** * This returns the host name of the server, given a url prefix. * * @param urlPrefix the urlPrefix of the server. * @return String */ public static String getHostName(String urlPrefix) { StringTokenizer st = new StringTokenizer(urlPrefix); String hostName = new String(); String token = new String(); int count = 0; while (st.hasMoreTokens()) { token = st.nextToken("/"); count++; if (count == 2) { hostName = token.trim(); //System.out.println("host name " + hostName); return hostName; } } return hostName; } /** * Prunes the url prefix to ensure that only the url prefix as wanted * by Pegasus goes through. This is due to the different manner in which * url prefix was used earlier. * * For e.g the function when passed a url * gsiftp://dataserver.phys.uwm.edu/~/griphyn_test/ligodemo_output/ * returns url gsiftp://dataserver.phys.uwm.edu. * * @param url the url prefix. * @return String */ public static String pruneURLPrefix(String url) { String hostName = getHostName(url); url = url.substring(0, url.lastIndexOf(hostName) + hostName.length()).trim(); return url; } /** * It returns the absolute path of the url. The absolute path is the * directory path in the URL. In the GVDS lingo, it refers to the mount * points too. * * @param url String * @return String */ public static String getAbsolutePath( String url ) { String hostName = null; URL u = null; //try using the java url class to get mount point try { u = new URL( url ); } catch ( Exception e ) { //the url seems to be malformed. could be the gsiftp trigger u = null; //use our own method to get the url hostName = getHostName( url ); } return ( u == null ) ? //try to do some inhouse magic url.substring( url.indexOf( hostName ) + hostName.length() ).trim() : //malformed execption caught. most probably due to //invalid protocol/schema u.getPath(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/common/PegasusConfiguration.java0000644000175000017500000002161711757531137027573 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.common; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.transfer.sls.SLSFactory; import java.util.Iterator; import java.util.Properties; import java.util.Set; /** * A utility class that returns JAVA Properties that need to be set based on * a configuration value * * @author Karan Vahi * @version $Revision: 4935 $ */ public class PegasusConfiguration { /** * The property key for pegasus configuration. */ public static final String PEGASUS_CONFIGURATION_PROPERTY_KEY = "pegasus.data.configuration"; /** * The value for the S3 configuration. */ public static final String DEPRECATED_S3_CONFIGURATION_VALUE = "S3"; /** * The value for the non shared filesystem configuration. */ public static final String SHARED_FS_CONFIGURATION_VALUE = "sharedfs"; /** * The value for the non shared filesystem configuration. */ public static final String NON_SHARED_FS_CONFIGURATION_VALUE = "nonsharedfs"; /** * The value for the condor configuration. */ public static final String CONDOR_CONFIGURATION_VALUE = "condorio"; /** * The value for the condor configuration. */ public static final String DEPRECATED_CONDOR_CONFIGURATION_VALUE = "Condor"; /** * The logger to use. */ private LogManager mLogger; /** * Overloaded Constructor * * @param logger the logger to use. */ public PegasusConfiguration( LogManager logger ){ mLogger = logger; } /** * Loads configuration specific properties into PegasusProperties, * and adjusts planner options accordingly. * * @param properties the Pegasus Properties * @param options the PlannerOptions . */ public void loadConfigurationPropertiesAndOptions( PegasusProperties properties, PlannerOptions options ){ this.loadConfigurationProperties( properties ); //sanitize on the planner options if( properties.executeOnWorkerNode() ){ String slsImplementor = properties.getSLSTransferImplementation(); if( slsImplementor == null ){ slsImplementor = SLSFactory.DEFAULT_SLS_IMPL_CLASS; } //check for the sls implementation if( slsImplementor.equalsIgnoreCase( DEPRECATED_CONDOR_CONFIGURATION_VALUE ) ){ for( String site : (Set)options.getExecutionSites() ){ //sanity check to make sure staging site is set to local String stagingSite = options.getStagingSite( site ); if( stagingSite == null ){ stagingSite = "local"; //set it to local site mLogger.log( "Setting staging site for " + site + " to " + stagingSite , LogManager.CONFIG_MESSAGE_LEVEL ); options.addToStagingSitesMappings( site , stagingSite ); } else if (!( stagingSite.equalsIgnoreCase( "local" ) )){ StringBuffer sb = new StringBuffer(); sb.append( "Mismatch in the between execution site ").append( site ). append( " and staging site " ).append( stagingSite ). append( " . For Condor IO staging site should be set to local . " ); throw new RuntimeException( sb.toString() ); } } } } } /** * Loads configuration specific properties into PegasusProperties * * @param properties the Pegasus Properties. */ private void loadConfigurationProperties( PegasusProperties properties ){ String configuration = properties.getProperty( PEGASUS_CONFIGURATION_PROPERTY_KEY ) ; Properties props = this.getConfigurationProperties(configuration); for( Iterator it = props.keySet().iterator(); it.hasNext(); ){ String key = (String) it.next(); String value = props.getProperty( key ); this.checkAndSetProperty( properties, key, value ); } } /** * Returns Properties corresponding to a particular configuration. * * @param configuration the configuration value. * * @return Properties */ public Properties getConfigurationProperties( String configuration ){ //sanity check if( configuration == null ){ //default is the sharedfs configuration = SHARED_FS_CONFIGURATION_VALUE; } Properties p = new Properties( ); if( configuration.equalsIgnoreCase( DEPRECATED_S3_CONFIGURATION_VALUE ) || configuration.equalsIgnoreCase( NON_SHARED_FS_CONFIGURATION_VALUE ) ){ //throw warning for deprecated value if( configuration.equalsIgnoreCase( DEPRECATED_S3_CONFIGURATION_VALUE ) ){ mLogger.log( deprecatedValueMessage( PEGASUS_CONFIGURATION_PROPERTY_KEY,DEPRECATED_S3_CONFIGURATION_VALUE ,NON_SHARED_FS_CONFIGURATION_VALUE ), LogManager.WARNING_MESSAGE_LEVEL ); } p.setProperty( "pegasus.execute.*.filesystem.local", "true" ); p.setProperty( "pegasus.gridstart", "PegasusLite" ); //we want the worker package to be staged, unless user sets it to false explicitly p.setProperty( PegasusProperties.PEGASUS_TRANSFER_WORKER_PACKAGE_PROPERTY, "true" ); } else if ( configuration.equalsIgnoreCase( CONDOR_CONFIGURATION_VALUE ) || configuration.equalsIgnoreCase( DEPRECATED_CONDOR_CONFIGURATION_VALUE ) ){ //throw warning for deprecated value if( configuration.equalsIgnoreCase( DEPRECATED_CONDOR_CONFIGURATION_VALUE ) ){ mLogger.log( deprecatedValueMessage( PEGASUS_CONFIGURATION_PROPERTY_KEY,DEPRECATED_CONDOR_CONFIGURATION_VALUE ,CONDOR_CONFIGURATION_VALUE ), LogManager.WARNING_MESSAGE_LEVEL ); } p.setProperty( "pegasus.transfer.sls.*.impl", "Condor" ); p.setProperty( "pegasus.execute.*.filesystem.local", "true" ); p.setProperty( "pegasus.gridstart", "PegasusLite" ); //we want the worker package to be staged, unless user sets it to false explicitly p.setProperty( PegasusProperties.PEGASUS_TRANSFER_WORKER_PACKAGE_PROPERTY, "true" ); } else if( configuration.equalsIgnoreCase( SHARED_FS_CONFIGURATION_VALUE ) ){ p.setProperty( "pegasus.execute.*.filesystem.local", "false" ); } return p; } /** * Checks for a property, if it does not exist then sets the property to * the value passed * * @param key the property key * @param value the value to set to */ protected void checkAndSetProperty( PegasusProperties properties, String key, String value ) { String propValue = properties.getProperty( key ); if( propValue == null ){ //set the value properties.setProperty( key, value ); } else{ //log a warning StringBuffer sb = new StringBuffer(); sb.append( "Property Key " ).append( key ).append( " already set to " ). append( propValue ).append( ". Will not be set to - ").append( value ); mLogger.log( sb.toString(), LogManager.WARNING_MESSAGE_LEVEL ); } } /** * Returns the deperecated value message * * @param property the property * @param deprecatedValue the deprecated value * @param updatedValue the updated value * * @return message */ protected String deprecatedValueMessage(String property, String deprecatedValue, String updatedValue) { StringBuffer sb = new StringBuffer(); sb.append( " The property " ).append( property ) .append( " = " ).append( deprecatedValue ). append( " is deprecated. Replace with ").append( property ) .append( " = " ). append( updatedValue ); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/common/logger.sh0000644000175000017500000000334411757531137024401 0ustar ryngerynge#!/bin/sh maxsize=1000000 # MAX FILE SIZE = 1MB maxfiles=5 #MAX LOG FILES = 5 prefix=`date +%s` #VDS_LOG_FILE="/var/Pegasus/logging/rls.log" #VDS_LOG_FILE="./rls.log" if [ -z "${VDS_LOG_FILE}" ] then VDS_LOG_FILE="/var/Pegasus/logging/rls.log" fi if [ -z "${JAVA_HOME}" ] then echo "JAVA_HOME is not defined" exit fi if [ ! -e $VDS_LOG_FILE ] then echo > ${VDS_LOG_FILE} chmod 666 $VDS_LOG_FILE fi if [ ! -w "${VDS_LOG_FILE}" ] then echo "you do not have write permissions for $VDS_LOG_FILE" exit fi logdir=`dirname $VDS_LOG_FILE` logname=`basename $VDS_LOG_FILE` logsize="`ls -l $VDS_LOG_FILE | awk '{print $5}'`" if [[ $logsize -ge $maxsize ]] #rotate logs then numfiles=`ls -l $logdir | grep $logname | wc | awk '{print $1}'` if [[ $numfiles -ge $maxfiles ]] #total log files=5 then min=9999999999 for i in `ls $logdir | grep "$logname\."`; do #find oldest file time=`echo $i | cut -d "." -f 3` if [[ time -le min ]] then min=$time fi done rm "$VDS_LOG_FILE.$min" #remove oldest file fi mv $VDS_LOG_FILE "$VDS_LOG_FILE.$prefix" #archive current log echo > $VDS_LOG_FILE #create new log file fi for f in $@; do echo $'\n' >> $VDS_LOG_FILE # log rls stats date >> $VDS_LOG_FILE $JAVA_HOME/bin/java org/griphyn/cPlanner/common/StatRLS $f >> $VDS_LOG_FILE done #echo done pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/common/UserOptions.java0000644000175000017500000001166411757531137025727 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.common; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.PlannerOptions; import java.util.Collection; import java.util.Set; import java.util.Vector; /** * A Singleton wrapper around the * PlannerOptions class to get hold * of the options specified by the * user to run Pegasus. This allows * us to get hold of the options anywhere * without us messing up with the constructors * of classes. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2572 $ * * @see org.griphyn.cPlanner.classes.PlannerOptions */ public class UserOptions { /** * The object holding the * Singleton instance. */ private static UserOptions mPOptions = null; /** * The Logger object used to * log the messages. */ private LogManager mLogger; /** * The object containing all the * user options. */ private PlannerOptions mPlannerOpts = null; /** * The method used to invoke the * singleton instance the first * time, from the CPlanner class. * * @param opt the object containing * the options to the Planner. */ public static UserOptions getInstance(PlannerOptions opt){ if(mPOptions == null && opt != null){ mPOptions = new UserOptions(opt); } return mPOptions; } /** * The method used to invoke the * singleton instance from the * other classes. */ public static UserOptions getInstance(){ return getInstance(null); } /** * The private constructor that initializes the user options. * This should be invoked from the CPlanner class in the toolkit * package. * * @throws RuntimeException in case of incorrect initialization. */ private UserOptions(PlannerOptions opt) { if(opt == null){ mLogger.log("Wrong Invocation of the Singleton Instance", LogManager.FATAL_MESSAGE_LEVEL); throw new RuntimeException( "Wrong Invocation of the Singleton Instance" ); } mPlannerOpts = opt; } /** * Returns the dax file, that * was generated by Chimera * containing the description of * the Abstract plan. */ public String getDaxFile(){ return this.mPlannerOpts.getDAX(); } /** * Returns output pool where the * user wants the data products * to be transferred to. */ public String getOutputPool(){ return this.mPlannerOpts.getOutputSite(); } /** * Returns the options passed to the planner. * * @return PlannerOptions */ public PlannerOptions getOptions(){ return this.mPlannerOpts; } /** * Returns the directory where the * user wants the submit files to be * generated. */ public String getSubmitFileDir(){ return this.mPlannerOpts.getSubmitDirectory(); } /** * Returns the list of execution pools * where the user wants the data * products to be computed. */ public Collection getExecutionSites(){ //return new Vector(this.mPlannerOpts.getExecutionSites()); return this.mPlannerOpts.getExecutionSites(); } /** * Sets the execution sites. * * @param sites the Set containing the site names. */ public void setExecutionSites(Collection sites){ this.mPlannerOpts.setExecutionSites(sites); } /** * Returns whether we want to create * a Random Directory or not. */ public boolean generateRandomDirectory(){ return this.mPlannerOpts.generateRandomDirectory(); } /** * It returns the set of cache files. */ public Set getCacheFiles(){ return this.mPlannerOpts.getCacheFiles(); } /** * Returns the name of the random * directory, only if the generate * Random Dir flag is set. * Else it returns null. */ public String getRandomDirName(){ if ( this .generateRandomDirectory()){ return this.mPlannerOpts.getRandomDir(); } return null; } /** * Returns true if the user had asked * for the force option. */ public boolean getForceOption(){ return this.mPlannerOpts.getForce(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/common/PegasusProperties.java0000644000175000017500000025725111757531137027125 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.common; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.common.util.Boolean; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.namespace.Namespace; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.FileOutputStream; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.MissingResourceException; import java.util.Properties; import java.util.Set; import java.util.HashSet; import java.util.Map; /** * A Central Properties class that keeps track of all the properties used by * Pegasus. All other classes access the methods in this class to get the value * of the property. It access the CommonProperties class to read the property file. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 4935 $ * * @see org.griphyn.common.util.CommonProperties */ public class PegasusProperties { /** * the name of the property to disable invoke functionality */ public static final String DISABLE_INVOKE_PROPERTY = "pegasus.gridstart.invoke.disable"; public static final String PEGASUS_WORKER_NODE_EXECUTION_PROPERTY = "pegasus.execute.*.filesystem.local"; public static final String PEGASUS_TRANSFER_WORKER_PACKAGE_PROPERTY = "pegasus.transfer.worker.package"; //Replica Catalog Constants public static final String DEFAULT_RC_COLLECTION = "GriphynData"; public static final String DEFAULT_RLI_URL = null; public static final String DEFAULT_RLS_QUERY_MODE = "bulk"; public static final String DEFAULT_RLS_EXIT_MODE = "error"; //public static final String DEFAULT_REPLICA_MODE = "rls"; public static final String DEFAULT_RLS_QUERY_ATTRIB = "false"; public static final String DEFAULT_LRC_IGNORE_URL = null; public static final String DEFAULT_RLS_TIMEOUT = "30"; public static final String DEFAULT_EXEC_DIR = ""; public static final String DEFAULT_STORAGE_DIR = ""; public static final String DEFAULT_TC_MODE = "Text"; public static final String TC_TEXT_FILE = "tc.text"; public static final String TC_DATA_FILE = "tc.data"; public static final String DEFAULT_POOL_MODE = "XML3"; public static final String DEFAULT_CONDOR_BIN_DIR = ""; public static final String DEFAULT_CONDOR_CONFIG_DIR = ""; public static final String SC_XML_FILE = "sites.xml"; public static final String SC_XML3_FILE = "sites.xml3"; public static final String CONDOR_KICKSTART = "kickstart-condor"; //transfer constants public static final String DEFAULT_STAGING_DELIMITER = "-"; public static final String DEFAULT_TRANSFER_PROCESSES = "4"; public static final String DEFAULT_TRANSFER_STREAMS = "1"; //grid start constants public static final String DEFAULT_GRIDSTART_MODE = "Kickstart"; public static final String DEFAULT_INVOKE_LENGTH = "4000"; //site selector constants public static final String DEFAULT_SITE_SELECTOR = "Random"; public static final String DEFAULT_SITE_SELECTOR_TIMEOUT = "300"; public static final String DEFAULT_SITE_SELECTOR_KEEP = "onerror"; ///some simulator constants that are used public static final String DEFAULT_DATA_MULTIPLICATION_FACTOR = "1"; public static final String DEFAULT_COMP_MULTIPLICATION_FACTOR = "1"; public static final String DEFAULT_COMP_ERROR_PERCENTAGE = "0"; public static final String DEFAULT_COMP_VARIANCE_PERCENTAGE = "0"; //collapsing constants public static final String DEFAULT_JOB_AGGREGATOR = "SeqExec"; //some tranformation catalog constants public static final String DEFAULT_TC_MAPPER_MODE = "All"; public static final String DEFAULT_TX_SELECTOR_MODE = "Random"; //logging constants public static final String DEFAULT_LOGGING_FILE = "stdout"; /** * Default properties that applies priorities to all kinds of transfer * jobs. */ public static final String ALL_TRANSFER_PRIORITY_PROPERTY_KEY = "pegasus.transfer.*.priority"; /** * The property key designated the root workflow uuid. */ public static final String ROOT_WORKFLOW_UUID_PROPERTY_KEY = "pegasus.workflow.root.uuid"; /** * An enum defining The scope for cleanup algorithm */ public enum CLEANUP_SCOPE{ fullahead, deferred }; /** * The default DAXCallback that is loaded, if none is specified by the user. */ private static final String DEFAULT_DAX_CALLBACK = "DAX2Graph"; /** * Ensures only one object is created always. Implements the Singleton. */ private static PegasusProperties pegProperties = null; /** * The value of the PEGASUS_HOME environment variable. */ private String mPegasusHome; /** * The object holding all the properties pertaining to the VDS system. */ private CommonProperties mProps; /** * The Logger object. */ // private LogManager mLogger; /** * The String containing the messages to be logged. */ private String mLogMsg; /** * The default path to the transformation catalog. */ private String mDefaultTC; /** * The default path to the pool file. */ private String mDefaultPoolFile; /** * The default transfer priority that needs to be applied to the transfer * jobs. */ private String mDefaultTransferPriority; /** * The set containing the deprecated properties specified by the user. */ private Set mDeprecatedProperties; /** * The pointer to the properties file that is written out in the submit directory. */ private String mPropsInSubmitDir; /** * Profiles that are specified in the properties */ private Profiles mProfiles; private static Map mNamepsaceToPropertiesPrefix; public Map namespaceToPropertiesPrefix(){ if( mNamepsaceToPropertiesPrefix == null ){ mNamepsaceToPropertiesPrefix = new HashMap(); mNamepsaceToPropertiesPrefix.put( Profiles.NAMESPACES.condor, "condor" ); mNamepsaceToPropertiesPrefix.put( Profiles.NAMESPACES.dagman, "dagman" ); mNamepsaceToPropertiesPrefix.put( Profiles.NAMESPACES.globus, "globus" ); mNamepsaceToPropertiesPrefix.put( Profiles.NAMESPACES.env, "env" ); mNamepsaceToPropertiesPrefix.put( Profiles.NAMESPACES.hints, "hints" ); mNamepsaceToPropertiesPrefix.put( Profiles.NAMESPACES.pegasus, "pegasus" ); mNamepsaceToPropertiesPrefix.put( Profiles.NAMESPACES.selector, "selector" ); mNamepsaceToPropertiesPrefix.put( Profiles.NAMESPACES.stat, "stat" ); } return mNamepsaceToPropertiesPrefix; } /** * Returns an instance to this properties object. * * @return a handle to the Properties class. */ public static PegasusProperties getInstance( ){ return getInstance( null ); } /** * Returns an instance to this properties object. * * @param confProperties the path to conf properties, that supersede the * loading of properties from $PEGASUS_HOME/.pegasusrc * * @return a handle to the Properties class. */ public static PegasusProperties getInstance( String confProperties ){ return nonSingletonInstance( confProperties ); } /** * To get a reference to the the object. The properties file that is loaded is * from the path specified in the argument. * This is *not implemented* as singleton. However the invocation of this * does modify the internally held singleton object. * * @param confProperties the path to conf properties, that supersede the * loading of properties from $PEGASUS_HOME/.pegasusrc * * @return a handle to the Properties class. */ protected static PegasusProperties nonSingletonInstance( String confProperties ) { return new PegasusProperties( confProperties ); } /** * To get a reference to the the object. The properties file that is loaded is * from the path specified in the argument. * * This is *not implemented* as singleton. However the invocation of this * does modify the internally held singleton object. * * * @return a handle to the Properties class. */ public static PegasusProperties nonSingletonInstance() { //return nonSingletonInstance( CommonProperties.PROPERTY_FILENAME ); return nonSingletonInstance( null ); } /** * The constructor that constructs the default paths to the various * configuration files, and populates the singleton instance as required. If * the properties file passed is null, then the singleton instance is * invoked, else the non singleton instance is invoked. * * @param confProperties the path to conf properties, that supersede the * loading of properties from $PEGASUS_HOME/.pegasusrc */ private PegasusProperties( String confProperties ) { // mLogger = LogManager.getInstance(); mDeprecatedProperties = new HashSet(5); initializePropertyFile( confProperties ); mDefaultPoolFile = getDefaultPathToSC(); mDefaultTC = getDefaultPathToTC(); mDefaultTransferPriority= getDefaultTransferPriority(); } /** * Retrieves profiles from the properties * * @param properties the common properties so far * * @return profiles object. */ public Profiles retrieveProfilesFromProperties( ) { //retrieve up all the profiles that are specified in //the properties if( mProfiles == null ){ mProfiles = retrieveProfilesFromProperties( mProps ); //System.out.println( mProfiles ); } return mProfiles; } /** * Retrieves profiles from the properties * * @param properties the common properties so far * * @return profiles object. */ protected Profiles retrieveProfilesFromProperties( CommonProperties properties ) { Profiles profiles = new Profiles( ); //retrieve some matching properties first //traverse through all the enum keys for ( Profiles.NAMESPACES n : Profiles.NAMESPACES.values() ){ Properties p = properties.matchingSubset( namespaceToPropertiesPrefix().get( n ), false ); for( Map.Entry entry : p.entrySet() ){ profiles.addProfile( n, (String)entry.getKey(), (String)entry.getValue() ); } } return profiles; } /** * Accessor to the bin directory of the Pegasus install * * @return the "etc" directory of the VDS runtime system. */ public File getBinDir() { return mProps.getBinDir(); } /** * Accessor to the schema directory of the Pegasus install * * @return the "etc" directory of the VDS runtime system. */ public File getSchemaDir() { return mProps.getSchemaDir(); } /** * Accessor to the bin directory of the Pegasus install * * @return the "etc" directory of the VDS runtime system. */ public File getSharedDir() { return mProps.getSharedStateDir(); } /** * Returns all the profiles relevant to a particular namespace * * @param ns the namespace corresponding to which you need the profiles */ public Namespace getProfiles( Profiles.NAMESPACES ns ){ return this.retrieveProfilesFromProperties().get( ns ); } /** * Returns the default path to the transformation catalog. Currently the * default path defaults to $PEGASUS_HOME/etc/tc.text if transformation * type is Text else $PEGASUS_HOME/etc/tc.data * * @return the default path to transformation catalog file */ public String getDefaultPathToTC() { String name = (getTCMode().equalsIgnoreCase( DEFAULT_TC_MODE ))? PegasusProperties.TC_TEXT_FILE: PegasusProperties.TC_DATA_FILE; File f = new File( mProps.getSysConfDir(),name); //System.err.println("Default Path to SC is " + f.getAbsolutePath()); return f.getAbsolutePath(); } /** * Returns the default path to the site catalog file. * The default path is constructed on the basis of the mode set by * the user. * * @return $PEGASUS_HOME/etc/sites.xml3 if the pool mode is XML3, else * $PEGASUS_HOME/etc/sites.xml * * @see #getPoolMode() */ public String getDefaultPathToSC() { String name = (getPoolMode().equalsIgnoreCase( DEFAULT_POOL_MODE ))? PegasusProperties.SC_XML3_FILE: PegasusProperties.SC_XML_FILE; File f = new File( mProps.getSysConfDir(),name); //System.err.println("Default Path to SC is " + f.getAbsolutePath()); return f.getAbsolutePath(); } /** * Returns the default path to the condor kickstart. Currently the path * defaults to $PEGASUS_HOME/bin/kickstart-condor. * * @return default path to kickstart condor. */ public String getDefaultPathToCondorKickstart() { StringBuffer sb = new StringBuffer( 50 ); sb.append( mPegasusHome ); sb.append( File.separator ); sb.append( "bin" ); sb.append( File.separator ); sb.append( CONDOR_KICKSTART ); return sb.toString(); } /** * Gets the handle to the properties file. The singleton instance is * invoked if the properties file is null (partly due to the way CommonProperties * is implemented ), else the non singleton is invoked. * * @param confProperties the path to conf properties, that supersede the * loading of properties from $PEGASUS_HOME/.pegasusrc * * */ private void initializePropertyFile( String confProperties ) { try { /* mProps = ( confProperties == null ) ? //invoke the singleton instance CommonProperties.instance() : //invoke the non singleton instance CommonProperties.nonSingletonInstance( confProperties ); */ //we always load non singleton instance? //Karan April 27, 2011 mProps = CommonProperties.nonSingletonInstance( confProperties ); } catch ( IOException e ) { mLogMsg = "unable to read property file: " + e.getMessage(); System.err.println( mLogMsg ); System.exit( 1 ); } catch ( MissingResourceException e ) { mLogMsg = "A required property is missing: " + e.getMessage(); System.err.println( mLogMsg ); System.exit( 1 ); } } /** * It allows you to get any property from the property file without going * through the corresponding accesor function in this class. For coding * and clarity purposes, the function should be used judiciously, and the * accessor function should be used as far as possible. * * @param key the property whose value is desired. * @return String */ public String getProperty( String key ) { return mProps.getProperty( key ); } /** * Returns the CommonProperties that this object encapsulates. Use only when * absolutely necessary. Use accessor methods whereever possible. * * @return CommonProperties */ public CommonProperties getVDSProperties(){ return this.mProps; } /** * Accessor: Overwrite any properties from within the program. * * @param key is the key to look up * @param value is the new property value to place in the system. * @return the old value, or null if it didn't exist before. */ public Object setProperty( String key, String value ) { return mProps.setProperty( key, value ); } /** * Extracts a specific property key subset from the known properties. * The prefix may be removed from the keys in the resulting dictionary, * or it may be kept. In the latter case, exact matches on the prefix * will also be copied into the resulting dictionary. * * @param prefix is the key prefix to filter the properties by. * @param keepPrefix if true, the key prefix is kept in the resulting * dictionary. As side-effect, a key that matches the prefix exactly * will also be copied. If false, the resulting dictionary's keys are * shortened by the prefix. An exact prefix match will not be copied, * as it would result in an empty string key. * * @return a property dictionary matching the filter key. May be * an empty dictionary, if no prefix matches were found. * * @see #getProperty( String ) is used to assemble matches */ public Properties matchingSubset( String prefix, boolean keepPrefix ) { return mProps.matchingSubset( prefix, keepPrefix ); } /** * Returns the properties matching a particular prefix as a list of * sorted name value pairs, where name is the full name of the matching * property (including the prefix) and value is it's value in the properties * file. * * @param prefix the prefix for the property names. * @param system boolean indicating whether to match only System properties * or all including the ones in the property file. * * @return list of NameValue objects corresponding to the matched * properties sorted by keys. * null if no matching property is found. */ public List getMatchingProperties( String prefix, boolean system ) { //sanity check if ( prefix == null ) { return null; } Properties p = (system)? System.getProperties(): matchingSubset(prefix,true); java.util.Enumeration e = p.propertyNames(); List l = ( e.hasMoreElements() ) ? new java.util.ArrayList() : null; while ( e.hasMoreElements() ) { String key = ( String ) e.nextElement(); NameValue nv = new NameValue( key, p.getProperty( key ) ); l.add( nv ); } Collections.sort(l); return ( l.isEmpty() ) ? null : l; } /** * Accessor to $PEGASUS_HOME/etc. The files in this directory have a low * change frequency, are effectively read-only, they reside on a * per-machine basis, and they are valid usually for a single user. * * @return the "etc" directory of the VDS runtime system. */ public File getSysConfDir() { return mProps.getSysConfDir(); } //PROPERTIES RELATED TO SCHEMAS /** * Returns the location of the schema for the DAX. * * Referred to by the "pegasus.schema.dax" property. * * @return location to the DAX schema. */ public String getDAXSchemaLocation() { return this.getDAXSchemaLocation( null ); } /** * Returns the location of the schema for the DAX. * * Referred to by the "pegasus.schema.dax" property. * * @param defaultLocation the default location to the schema. * * @return location to the DAX schema specified in the properties file, * else the default location if no value specified. */ public String getDAXSchemaLocation( String defaultLocation ) { return mProps.getProperty( "pegasus.schema.dax", defaultLocation ); } /** * Returns the location of the schema for the PDAX. * * Referred to by the "pegasus.schema.pdax" property * * @param defaultLocation the default location to the schema. * * @return location to the PDAX schema specified in the properties file, * else the default location if no value specified. */ public String getPDAXSchemaLocation( String defaultLocation ) { return mProps.getProperty( "pegasus.schema.pdax", defaultLocation ); } //DIRECTORY CREATION PROPERTIES /** * Returns the name of the class that the user wants, to insert the * create directory jobs in the graph in case of creating random * directories. * * Referred to by the "pegasus.dir.create.strategy" property. * * @return the create dir classname if specified in the properties file, * else Tentacles. */ public String getCreateDirClass() { return getProperty( "pegasus.dir.create.strategy", "pegasus.dir.create", "Tentacles" ); } /** * Returns the name of the class that the user wants, to render the directory * creation jobs. It dictates what mechanism is used to create the directory * for a workflow. * * Referred to by the "pegasus.dir.create.impl" property. * * @return the create dir classname if specified in the properties file, * else DefaultImplementation. */ public String getCreateDirImplementation() { return mProps.getProperty( "pegasus.dir.create.impl", "DefaultImplementation" ); } /** * It specifies whether to use the extended timestamp format for generation * of timestamps that are used to create the random directory name, and for * the classads generation. * * Referred to by the "pegasus.dir.timestamp.extended" property. * * @return the value specified in the properties file if valid boolean, else * false. */ public boolean useExtendedTimeStamp() { return Boolean.parse(mProps.getProperty( "pegasus.dir.timestamp.extended"), false ); } /** * Returns a boolean indicating whether to use timestamp for directory * name creation or not. * * Referred to by "pegasus.dir.useTimestamp" property. * * @return the boolean value specified in the properties files, else false. */ public boolean useTimestampForDirectoryStructure(){ return Boolean.parse( mProps.getProperty( "pegasus.dir.useTimestamp" ), false ); } /** * Returns the execution directory suffix or absolute specified * that is appended/replaced to the exec-mount-point specified in the * pool catalog for the various pools. * * Referred to by the "pegasus.dir.exec" property * * @return the value specified in the properties file, * else the default suffix. * * @see #DEFAULT_EXEC_DIR */ public String getExecDirectory() { return mProps.getProperty( "pegasus.dir.exec", DEFAULT_EXEC_DIR ); } /** * Returns the the path to the logs directory on the submit host. * This is the directory where the condor logs for the workflows are * created. The logs directory should be on the local filesystem else * condor may complain * * Referred to by the "pegasus.dir.submit.logs" property * * @return the value in the properties file, else null */ public String getSubmitLogsDirectory(){ return mProps.getProperty( "pegasus.dir.submit.logs" ); } /** * Returns a boolean indicating whether the submit directory for the sub * workflows should include the label of the sub workflow or not. * * Referred to by the "pegasus.dir.submit.subwf.labelbased" property * * @return the value in the properties file, else false */ public boolean labelBasedSubmitDirectoryForSubWorkflows(){ return Boolean.parse( mProps.getProperty( "pegasus.dir.submit.subwf.labelbased" ), false ); } /** * Returns the storage directory suffix or absolute specified * that is appended/replaced to the storage-mount-point specified in the * pool catalog for the various pools. * * Referred to by the "pegasus.dir.storage" property. * * @return the value specified in the properties file, * else the default suffix. * * @see #DEFAULT_STORAGE_DIR */ public String getStorageDirectory() { return mProps.getProperty( "pegasus.dir.storage", DEFAULT_STORAGE_DIR ); } /** * Returns a boolean indicating whether to have a deep storage directory * structure or not while staging out data to the output site. * * Referred to by the "pegasus.dir.storage.deep" property. * * @return the boolean value specified in the properties files, else false. */ public boolean useDeepStorageDirectoryStructure(){ return Boolean.parse( mProps.getProperty( "pegasus.dir.storage.deep" ), false ); } //PROPERTIES RELATED TO CLEANUP /** * Returns the name of the Strategy class that the user wants, to insert the * cleanup jobs in the graph. * * Referred to by the "pegasus.file.cleanup.strategy" property. * * @return the create dir classname if specified in the properties file, * else InPlace. */ public String getCleanupStrategy() { return mProps.getProperty( "pegasus.file.cleanup.strategy", "InPlace" ); } /** * Returns the name of the class that the user wants, to render the cleanup * jobs. It dictates what mechanism is used to remove the files on a remote * system. * * Referred to by the "pegasus.file.cleanup.impl" property. * * @return the cleanup implementation classname if specified in the properties file, * else Cleanup. */ public String getCleanupImplementation() { return mProps.getProperty( "pegasus.file.cleanup.impl", "Cleanup" ); } /** * Returns the scope for file cleanup. It is used to trigger cleanup in case * of deferred planning. The vaild property values accepted are * - fullahead * - deferred * * Referred to by the property "pegasus.file.cleanup.scope" * * * @return the value in property file if specified, else fullahead */ public CLEANUP_SCOPE getCleanupScope(){ CLEANUP_SCOPE scope = CLEANUP_SCOPE.fullahead; String value = mProps.getProperty( "pegasus.file.cleanup.scope" ); if( value == null ){ return scope; } //try to assign a cleanup value try{ scope = CLEANUP_SCOPE.valueOf( value ); }catch( IllegalArgumentException iae ){ //ignore do nothing. } return scope; } //PROPERTIES RELATED TO THE TRANSFORMATION CATALOG /** * Returns the mode to be used for accessing the Transformation Catalog. * * Referred to by the "pegasus.catalog.transformation" property. * * @return the value specified in properties file, * else DEFAULT_TC_MODE. * * @see #DEFAULT_TC_MODE */ public String getTCMode() { return mProps.getProperty( "pegasus.catalog.transformation", DEFAULT_TC_MODE ); } /** * Returns the location of the transformation catalog. * * Referred to by "pegasus.catalog.transformation.file" property. * * @return the value specified in the properties file, * else default path specified by mDefaultTC. * * @see #mDefaultTC */ public String getTCPath() { return mProps.getProperty( "pegasus.catalog.transformation.file", mDefaultTC ); } /** * Returns the mode for loading the transformation mapper that sits in * front of the transformation catalog. * * Referred to by the "pegasus.catalog.transformation.mapper" property. * * @return the value specified in the properties file, * else default tc mapper mode. * * @see #DEFAULT_TC_MAPPER_MODE */ public String getTCMapperMode() { return mProps.getProperty( "pegasus.catalog.transformation.mapper", DEFAULT_TC_MAPPER_MODE ); } //REPLICA CATALOG PROPERTIES /** * Returns the replica mode. It identifies the ReplicaMechanism being used * by Pegasus to determine logical file locations. * * Referred to by the "pegasus.catalog.replica" property. * * @return the replica mode, that is used to load the appropriate * implementing class if property is specified, * else null */ public String getReplicaMode() { return mProps.getProperty( "pegasus.catalog.replica" ); } /** * Returns the url to the RLI of the RLS. * * Referred to by the "pegasus.rls.url" property. * * @return the value specified in properties file, * else DEFAULT_RLI_URL. * * @see #DEFAULT_RLI_URL */ public String getRLIURL() { return mProps.getProperty( "pegasus.catalog.replica.url", DEFAULT_RLI_URL ); } /** * It returns the timeout value in seconds after which to timeout in case of * no activity from the RLS. * * Referred to by the "pegasus.rc.rls.timeout" property. * * @return the timeout value if specified else, * DEFAULT_RLS_TIMEOUT. * * @see #DEFAULT_RLS_TIMEOUT */ public int getRLSTimeout() { String prop = mProps.getProperty( "pegasus.catalog.replica.rls.timeout", DEFAULT_RLS_TIMEOUT ); int val; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { return Integer.parseInt( DEFAULT_RLS_TIMEOUT ); } return val; } //PROPERTIES RELATED TO SITE CATALOG /** * Returns the mode to be used for accessing the pool information. * * Referred to by the "pegasus.catalog.site" property. * * @return the pool mode, that is used to load the appropriate * implementing class if the property is specified, * else default pool mode specified by DEFAULT_POOL_MODE * * @see #DEFAULT_POOL_MODE */ public String getPoolMode() { return mProps.getProperty( "pegasus.catalog.site", DEFAULT_POOL_MODE ); } /** * Returns the path to the pool file. * * Referred to by the "pegasus.catalog.site.file" property. * * @return the path to the pool file specified in the properties file, * else the default path specified by mDefaultPoolFile. * * @see #mDefaultPoolFile */ public String getPoolFile() { return mProps.getProperty( "pegasus.catalog.site.file", mDefaultPoolFile ); } /** * Returns the location of the schema for the DAX. * * Referred to by the "pegasus.schema.sc" property. * * @return the location of pool schema if specified in properties file, * else null. */ public String getPoolSchemaLocation() { return this.getPoolSchemaLocation( null ); } /** * Returns the location of the schema for the site catalog file. * * Referred to by the "pegasus.schema.sc" property * * @param defaultLocation the default location where the schema should be * if no other location is specified. * * @return the location specified by the property, * else defaultLocation. */ public String getPoolSchemaLocation( String defaultLocation ) { return mProps.getProperty("pegasus.schema.sc", defaultLocation ); } //PROVENANCE CATALOG PROPERTIES /** * Returns the provenance store to use to log the refiner actions. * * Referred to by the "pegasus.catalog.provenance.refinement" property. * * @return the value set in the properties, else null if not set. */ public String getRefinementProvenanceStore( ){ return mProps.getProperty( "pegasus.catalog.provenance.refinement" ); } //TRANSFER MECHANISM PROPERTIES /** * Returns the transfer implementation that is to be used for constructing * the transfer jobs. * * Referred to by the "pegasus.transfer.*.impl" property. * * @return the transfer implementation */ public String getTransferImplementation(){ return getTransferImplementation( "pegasus.transfer.*.impl" ); } /** * Returns the sls transfer implementation that is to be used for constructing * the transfer jobs. * * Referred to by the "pegasus.transfer.sls.*.impl" property. * * @return the transfer implementation * */ public String getSLSTransferImplementation(){ return getTransferImplementation( "pegasus.transfer.sls.*.impl" ); } /** * Returns the transfer implementation. * * @param property property name. * * @return the transfer implementation, * else the one specified by "pegasus.transfer.*.impl", */ public String getTransferImplementation( String property ){ return mProps.getProperty( property, getDefaultTransferImplementation()); } /** * Returns a boolean indicating whether to stage sls files via Pegasus * First Level Staging or let Condor do it. * * Referred to by the property "pegasus.transfer.stage.sls.file" * * @return boolean value mentioned in the properties or else the default * value which is true. */ public boolean stageSLSFilesViaFirstLevelStaging( ){ return Boolean.parse( mProps.getProperty( "pegasus.transfer.stage.sls.file" ), false ); } /** * Returns the default list of third party sites. * * Referred to by the "pegasus.transfer.*.thirdparty.sites" property. * * @return the value specified in the properties file, else * null. */ private String getDefaultThirdPartySites(){ return mProps.getProperty("pegasus.transfer.*.thirdparty.sites"); } /** * Returns the default transfer implementation to be picked up for * constructing transfer jobs. * * Referred to by the "pegasus.transfer.*.impl" property. * * @return the value specified in the properties file, else * null. */ private String getDefaultTransferImplementation(){ return mProps.getProperty("pegasus.transfer.*.impl"); } /** * Returns the default priority for the transfer jobs if specified in * the properties file. * * @return the value specified in the properties file, else null if * non integer value or no value specified. */ private String getDefaultTransferPriority(){ String prop = mProps.getProperty( this.ALL_TRANSFER_PRIORITY_PROPERTY_KEY); int val = -1; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { return null; } return Integer.toString( val ); } /** * Returns the base source URL where pointing to the directory where the * worker package executables for pegasus releases are kept. * * Referred to by the "pegasus.transfer.setup.source.base.url * * @return the value in the property file, else null */ public String getBaseSourceURLForSetupTransfers( ){ return mProps.getProperty( "pegasus.transfer.setup.source.base.url" ); } /** * Returns the transfer refiner that is to be used for adding in the * transfer jobs in the workflow * * Referred to by the "pegasus.transfer.refiner" property. * * @return the transfer refiner, else null */ public String getTransferRefiner(){ return mProps.getProperty("pegasus.transfer.refiner"); } /** * Returns whether to introduce quotes around url's before handing to * g-u-c and condor. * * Referred to by "pegasus.transfer.single.quote" property. * * @return boolean value specified in the properties file, else * true in case of non boolean value being specified or property * not being set. */ public boolean quoteTransferURL() { return Boolean.parse(mProps.getProperty( "pegasus.transfer.single.quote"), true); } /** * It returns the number of processes of g-u-c that the transfer script needs to * spawn to do the transfers. This is applicable only in the case where the * transfer executable has the capability of spawning processes. It should * not be confused with the number of streams that each process opens. * By default it is set to 4. In case a non integer value is specified in * the properties file it returns the default value. * * Referred to by "pegasus.transfer.throttle.processes" property. * * @return the number of processes specified in properties file, else * DEFAULT_TRANSFER_PROCESSES * * @see #DEFAULT_TRANSFER_PROCESSES */ public String getNumOfTransferProcesses() { String prop = mProps.getProperty( "pegasus.transfer.throttle.processes", DEFAULT_TRANSFER_PROCESSES ); int val = -1; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { return DEFAULT_TRANSFER_PROCESSES; } return Integer.toString( val ); } /** * It returns the number of streams that each transfer process uses to do the * ftp transfer. By default it is set to 1.In case a non integer * value is specified in the properties file it returns the default value. * * Referred to by "pegasus.transfer.throttle.streams" property. * * @return the number of streams specified in the properties file, else * DEFAULT_TRANSFER_STREAMS. * * @see #DEFAULT_TRANSFER_STREAMS */ public String getNumOfTransferStreams() { String prop = mProps.getProperty( "pegasus.transfer.throttle.streams", DEFAULT_TRANSFER_STREAMS ); int val = -1; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { return DEFAULT_TRANSFER_STREAMS; } return Integer.toString( val ); } /** * It specifies whether the underlying transfer mechanism being used should * use the force option if available to transfer the files. * * Referred to by "pegasus.transfer.force" property. * * @return boolean value specified in the properties file,else * false in case of non boolean value being specified or * property not being set. */ public boolean useForceInTransfer() { return Boolean.parse(mProps.getProperty( "pegasus.transfer.force"), false); } /** * It returns whether the use of symbolic links in case where the source * and destination files happen to be on the same file system. * * Referred to by "pegasus.transfer.links" property. * * @return boolean value specified in the properties file, else * false in case of non boolean value being specified or * property not being set. */ public boolean getUseOfSymbolicLinks() { String value = mProps.getProperty( "pegasus.transfer.links" ); return Boolean.parse(value,false); } /** * Returns the comma separated list of third party sites, specified in the * properties. * * @param property property name. * * @return the comma separated list of sites. */ public String getThirdPartySites(String property){ String value = mProps.getProperty(property); return value; } /** * Returns the comma separated list of third party sites for which * the third party transfers are executed on the remote sites. * * * @param property property name. * * @return the comma separated list of sites. */ public String getThirdPartySitesRemote(String property){ return mProps.getProperty(property); } /** * Returns the delimiter to be used for constructing the staged executable * name, during transfer of executables to remote sites. * * Referred to by the "pegasus.transfer.staging.delimiter" property. * * @return the value specified in the properties file, else * DEFAULT_STAGING_DELIMITER * * @see #DEFAULT_STAGING_DELIMITER */ public String getStagingDelimiter(){ return mProps.getProperty("pegasus.transfer.staging.delimiter", DEFAULT_STAGING_DELIMITER); } /** * Returns the list of sites for which the chmod job creation has to be * disabled for executable staging. * * Referred to by the "pegasus.transfer.disable.chmod" property. * * @return a comma separated list of site names. */ public String getChmodDisabledSites() { return mProps.getProperty( "pegasus.transfer.disable.chmod.sites" ); } /** * It specifies if the worker package needs to be staged to the remote site * or not. * * Referred to by "pegasus.transfer.worker.package" property. * * @return boolean value specified in the properties file,else * false in case of non boolean value being specified or * property not being set. */ public boolean transferWorkerPackage() { return Boolean.parse( mProps.getProperty( PEGASUS_TRANSFER_WORKER_PACKAGE_PROPERTY ), false ); } /** * Returns the arguments with which the transfer executable needs * to be invoked. * * Referred to by "pegasus.transfer.arguments" property. * * @return the arguments specified in the properties file, * else null if property is not specified. */ public String getTransferArguments() { return mProps.getProperty("pegasus.transfer.arguments"); } /** * Returns the extra arguments with which the SLS transfer executables needs * to be invoked. * * Referred to by "pegasus.transfer.sls.arguments" property. * * @return the arguments specified in the properties file, * else null if property is not specified. */ public String getSLSTransferArguments() { return mProps.getProperty("pegasus.transfer.sls.arguments"); } /** * Returns the priority to be set for the stage in transfer job. * * Referred to by "pegasus.transfer.stagein.priority" property if set, * else by "pegasus.transfer.*.priority" property. * * @return the priority as String if a valid integer specified in the * properties, else null. */ public String getTransferStageInPriority(){ return getTransferPriority("pegasus.transfer.stagein.priority"); } /** * Returns the priority to be set for the stage out transfer job. * * Referred to by "pegasus.transfer.stageout.priority" property if set, * else by "pegasus.transfer.*.priority" property. * * @return the priority as String if a valid integer specified in the * properties, else null. */ public String getTransferStageOutPriority(){ return getTransferPriority("pegasus.transfer.stageout.priority"); } /** * Returns the priority to be set for the interpool transfer job. * * Referred to by "pegasus.transfer.inter.priority" property if set, * else by "pegasus.transfer.*.priority" property. * * @return the priority as String if a valid integer specified in the * properties, else null. */ public String getTransferInterPriority(){ return getTransferPriority("pegasus.transfer.inter.priority"); } /** * Returns the transfer priority. * * @param property property name. * * @return the priority as String if a valid integer specified in the * properties as value to property, else null. */ private String getTransferPriority(String property){ String value = mProps.getProperty(property, mDefaultTransferPriority); int val = -1; try { val = Integer.parseInt( value ); } catch ( Exception e ) { } //if value in properties file is corrupted //again use the default transfer priority return ( val < 0 ) ? mDefaultTransferPriority : Integer.toString( val ); } //REPLICA SELECTOR FUNCTIONS /** * Returns the mode for loading the transformation selector that selects * amongst the various candidate transformation catalog entry objects. * * Referred to by the "pegasus.selector.transformation" property. * * @return the value specified in the properties file, * else default transformation selector. * * @see #DEFAULT_TC_MAPPER_MODE */ public String getTXSelectorMode() { return mProps.getProperty( "pegasus.selector.transformation", DEFAULT_TX_SELECTOR_MODE ); } /** * Returns the name of the selector to be used for selection amongst the * various replicas of a single lfn. * * Referred to by the "pegasus.selector.replica" property. * * @return the name of the selector if the property is specified, * else null */ public String getReplicaSelector(){ return mProps.getProperty( "pegasus.selector.replica" ); } /** * Returns a comma separated list of sites, that are restricted in terms of * data movement from the site. * * Referred to by the "pegasus.rc.restricted.sites" property. * * @return comma separated list of sites. */ // public String getRestrictedSites(){ // return mProps.getProperty("pegasus.rc.restricted.sites",""); // } /** * Returns a comma separated list of sites, from which to prefer data * transfers for all sites. * * Referred to by the "pegasus.selector.replica.*.prefer.stagein.sites" property. * * @return comma separated list of sites. */ public String getAllPreferredSites(){ return mProps.getProperty( "pegasus.selector.replica.*.prefer.stagein.sites",""); } /** * Returns a comma separated list of sites, from which to ignore data * transfers for all sites. Replaces the old pegasus.rc.restricted.sites * property. * * Referred to by the "pegasus.selector.ignore.*.prefer.stagein.sites" property. * * @return comma separated list of sites. */ public String getAllIgnoredSites(){ return mProps.getProperty("pegasus.selector.replica.*.ignore.stagein.sites", ""); } //SITE SELECTOR PROPERTIES /** * Returns the class name of the site selector, that needs to be invoked to do * the site selection. * * Referred to by the "pegasus.selector.site" property. * * @return the classname corresponding to the site selector that needs to be * invoked if specified in the properties file, else the default * selector specified by DEFAULT_SITE_SELECTOR. * * @see #DEFAULT_SITE_SELECTOR */ public String getSiteSelectorMode() { return mProps.getProperty( "pegasus.selector.site", DEFAULT_SITE_SELECTOR ); } /** * Returns the path to the external site selector that needs to be called * out to make the decision of site selection. * * Referred to by the "pegasus.selector.site.path" property. * * @return the path to the external site selector if specified in the * properties file, else null. */ public String getSiteSelectorPath() { return mProps.getProperty( "pegasus.selector.site.path" ); } /** * It returns the timeout value in seconds after which to timeout in case of * no activity from the external site selector. * * Referred to by the "pegasus.selector.site.timeout" property. * * @return the timeout value if specified else, * DEFAULT_SITE_SELECTOR_TIMEOUT. * * @see #DEFAULT_SITE_SELECTOR_TIMEOUT */ public int getSiteSelectorTimeout() { String prop = mProps.getProperty( "pegasus.selector.site.timeout", DEFAULT_SITE_SELECTOR_TIMEOUT ); int val; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { return Integer.parseInt( DEFAULT_SITE_SELECTOR_TIMEOUT ); } return val; } /** * Returns a value designating whether we need to keep the temporary files * that are passed to the external site selectors. The check for the valid * tristate value should be done at the calling function end. This just * passes on the value user specified in the properties file. * * Referred to by the "pegasus.selector.site.keep.tmp" property. * * @return the value of the property is specified, else * DEFAULT_SITE_SELECTOR_KEEP * * @see #DEFAULT_SITE_SELECTOR_KEEP */ public String getSiteSelectorKeep() { return mProps.getProperty( "pegasus.selector.site.keep.tmp", DEFAULT_SITE_SELECTOR_KEEP ); } //PROPERTIES RELATED TO KICKSTART AND EXITCODE /** * Returns the GRIDSTART that is to be used to launch the jobs on the grid. * * Referred to by the "pegasus.gridstart" property. * * @return the value specified in the property file, * else DEFAULT_GRIDSTART_MODE * * @see #DEFAULT_GRIDSTART_MODE */ public String getGridStart(){ return mProps.getProperty("pegasus.gridstart",DEFAULT_GRIDSTART_MODE); } /** * Returns a boolean indicating whether kickstart should set x bit on * staged executables before launching them. * * Referred to by the "pegasus.gridstart.kickstart.set.xbit" property. * * @return the value specified in the property file, * else false * */ public boolean setXBitWithKickstart(){ return Boolean.parse( mProps.getProperty( "pegasus.gridstart.kickstart.set.xbit" ), false ); } /** * Return a boolean indicating whether to turn the stat option for kickstart * on or not. By default it is turned on. * * Referred to by the "pegasus.gridstart.kickstart.stat" property. * * @return the boolean value specified in the property file, * else false if not specified or non boolean specified. */ public boolean doStatWithKickstart(){ return Boolean.parse( mProps.getProperty( "pegasus.gridstart.kickstart.stat"), false ); } /** * Return a boolean indicating whether to generate the LOF files for the jobs * or not. This is used to generate LOF files, but not trigger the stat option * * Referred to by the "pegasus.gridstart.kickstart.generate.loft" property. * * @return the boolean value specified in the property file, * else false if not specified or non boolean specified. */ public boolean generateLOFFiles(){ return Boolean.parse( mProps.getProperty( "pegasus.gridstart.generate.lof"), false ); } /** * Returns a boolean indicating whether to use invoke in kickstart always * or not. * * Referred to by the "pegasus.gridstart.invoke.always" property. * * @return the boolean value specified in the property file, * else false if not specified or non boolean specified. */ public boolean useInvokeInGridStart(){ return Boolean.parse( mProps.getProperty( "pegasus.gridstart.invoke.always"), false ); } /** * Returns a boolean indicating whether to disable use of invoke or not. * * Referred to by the "pegasus.gridstart.invoke.disable" property. * * @return the boolean value specified in the property file, * else false if not specified or non boolean specified. */ public boolean disableInvokeInGridStart(){ return Boolean.parse( mProps.getProperty( PegasusProperties.DISABLE_INVOKE_PROPERTY ), false ); } /** * Returns the trigger value for invoking an application through kickstart * using kickstart. If the arguments value being constructed in the condor * submit file is more than this value, then invoke is used to pass the * arguments to the remote end. Helps in bypassing the Condor 4K limit. * * Referred to by "pegasus.gridstart.invoke.length" property. * * @return the long value specified in the properties files, else * DEFAULT_INVOKE_LENGTH * * @see #DEFAULT_INVOKE_LENGTH */ public long getGridStartInvokeLength(){ long value = new Long(this.DEFAULT_INVOKE_LENGTH).longValue(); String st = mProps.getProperty( "pegasus.gridstart.invoke.length", this.DEFAULT_INVOKE_LENGTH ); try { value = new Long( st ).longValue(); } catch ( Exception e ) { //ignore malformed values from //the property file } return value; } /** * Returns a boolean indicating whehter to pass extra options to kickstart * or not. The extra options have appeared only in VDS version 1.4.2 (like -L * and -T). * * Referred to by "pegasus.gridstart.label" property. * * @return the boolean value specified in the property file, * else true if not specified or non boolean specified. */ public boolean generateKickstartExtraOptions(){ return Boolean.parse( mProps.getProperty( "pegasus.gridstart.label"), true ); } /** * Returns the mode adding the postscripts for the jobs. At present takes in * only two values all or none default being none. * * Referred to by the "pegasus.exitcode.scope" property. * * @return the mode specified by the property, else * DEFAULT_POSTSCRIPT_MODE * * @see #DEFAULT_POSTSCRIPT_MODE */ /* public String getPOSTScriptScope() { return mProps.getProperty( "pegasus.exitcode.scope", DEFAULT_POSTSCRIPT_MODE ); } */ /** * Returns the postscript to use with the jobs in the workflow. They * maybe overriden by values specified in the profiles. * * Referred to by the "pegasus.exitcode.impl" property. * * @return the postscript to use for the workflow, else null if not * specified in the properties. */ /* public String getPOSTScript(){ return mProps.getProperty( "pegasus.exitcode.impl" ); } */ /** * Returns the path to the exitcode executable to be used. * * Referred to by the "pegasus.exitcode.path.[value]" property, where [value] * is replaced by the value passed an input to this function. * * @param value the short name of the postscript whose path we want. * * @return the path to the postscript if specified in properties file. */ /* public String getPOSTScriptPath( String value ){ value = ( value == null ) ? "*" : value; StringBuffer key = new StringBuffer(); key.append( "pegasus.exitcode.path." ).append( value ); return mProps.getProperty( key.toString() ); } */ /** * Returns the argument string containing the arguments by which exitcode is * invoked. * * Referred to by the "pegasus.exitcode.arguments" property. * * @return String containing the arguments,else empty string. */ /* public String getPOSTScriptArguments() { return mProps.getProperty( "pegasus.exitcode.arguments", ""); } */ /** * Returns a boolean indicating whether to turn debug on or not for exitcode. * By default false is returned. * * Referred to by the "pegasus.exitcode.debug" property. * * @return boolean value. */ public boolean setPostSCRIPTDebugON(){ return Boolean.parse( mProps.getProperty( "pegasus.exitcode.debug"), false ); } /** * Returns the argument string containing the arguments by which prescript is * invoked. * * Referred to by the "pegasus.prescript.arguments" property. * * @return String containing the arguments. * null if not specified. */ /* public String getPrescriptArguments() { return mProps.getProperty( "pegasus.prescript.arguments","" ); } */ //PROPERTIES RELATED TO REMOTE SCHEDULERS /** * Returns the project names that need to be appended to the RSL String * while creating the submit files. Referred to by * pegasus.remote.projects property. If present, Pegasus ends up * inserting an RSL string (project = value) in the submit file. * * @return a comma separated list of key value pairs if property specified, * else null. */ // public String getRemoteSchedulerProjects() { // return mProps.getProperty( "pegasus.remote.scheduler.projects" ); // } /** * Returns the queue names that need to be appended to the RSL String while * creating the submit files. Referred to by the pegasus.remote.queues * property. If present, Pegasus ends up inserting an RSL string * (project = value) in the submit file. * * @return a comma separated list of key value pairs if property specified, * else null. */ // public String getRemoteSchedulerQueues() { // return mProps.getProperty( "pegasus.remote.scheduler.queues" ); // } /** * Returns the maxwalltimes for the various pools that need to be appended * to the RSL String while creating the submit files. Referred to by the * pegasus.scheduler.remote.queues property. If present, Pegasus ends up * inserting an RSL string (project = value) in the submit file. * * * @return a comma separated list of key value pairs if property specified, * else null. */ // public String getRemoteSchedulerMaxWallTimes() { // return mProps.getProperty( "pegasus.remote.scheduler.min.maxwalltime" ); // } /** * Returns the minimum walltimes that need to be enforced. * * Referred to by "pegasus.scheduler.remote.min.[key]" property. * * @param key the appropriate globus RSL key. Generally are * maxtime|maxwalltime|maxcputime * * @return the integer value as specified, -1 in case of no value being specified. */ // public int getMinimumRemoteSchedulerTime( String key ){ // StringBuffer property = new StringBuffer(); // property.append( "pegasus.remote.scheduler.min." ).append( key ); // // int val = -1; // // try { // val = Integer.parseInt( mProps.getProperty( property.toString() ) ); // } catch ( Exception e ) { // } // return val; // } //PROPERTIES RELATED TO CONDOR /** * Completely disable placing a symlink for Condor common log (indiscriminately). * * @return true if we want a symlink for Condor common log. */ public boolean symlinkCommonLog() { return Boolean.parse( mProps.getProperty( "pegasus.condor.logs.symlink"), true ); } /** * Returns a boolean indicating whether we want to Condor Quote the * arguments of the job or not. * * Referred to by the "pegasus.condor.arguments.quote" property. * * @return boolean */ public boolean useCondorQuotingForArguments(){ return Boolean.parse( mProps.getProperty("pegasus.condor.arguments.quote"), true); } /** * Returns the number of times Condor should retry running a job in case * of failure. The retry ends up reinvoking the prescript, that can change * the site selection decision in case of failure. * * Referred to by the "pegasus.dagman.retry" property. * * @return an int denoting the number of times to retry. * null if not specified or invalid entry. */ /* public String getCondorRetryValue() { String prop = mProps.getProperty( "pegasus.dagman.retry" ); int val = -1; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { return null; } return Integer.toString( val ); } */ /** * Tells whether to stream condor output or not. By default it is true , * meaning condor streams the output from the remote hosts back to the submit * hosts, instead of staging it. This helps in saving filedescriptors at the * jobmanager end. * * If it is set to false, output is not streamed back. The line * "stream_output = false" should be added in the submit files for kickstart * jobs. * * Referred to by the "pegasus.condor.output.stream" property. * * @return the boolean value specified by the property, else * false in case of invalid value or property not being specified. * */ /* public boolean streamCondorOutput() { return Boolean.parse(mProps.getProperty( "pegasus.condor.output.stream"), false ); } */ /** * Tells whether to stream condor error or not. By default it is true , * meaning condor streams the error from the remote hosts back to the submit * hosts instead of staging it in. This helps in saving filedescriptors at * the jobmanager end. * * Referred to by the "pegasus.condor.error.stream" property. * * If it is set to false, output is not streamed back. The line * "stream_output = false" should be added in the submit files for kickstart * jobs. * * @return the boolean value specified by the property, else * false in case of invalid value or property not being specified. */ /* public boolean streamCondorError() { return Boolean.parse(mProps.getProperty( "pegasus.condor.error.stream"), false ); } */ //PROPERTIES RELATED TO STORK /** * Returns the credential name to be used for the stork transfer jobs. * * Referred to by the "pegasus.transfer.stork.cred" property. * * @return the credential name if specified by the property, * else null. */ public String getCredName() { return mProps.getProperty( "pegasus.transfer.stork.cred" ); } //SOME LOGGING PROPERTIES /** * Returns the log manager to use. * * Referred to by the "pegasus.log.manager" property. * * @return the value in the properties file, else Default */ public String getLogManager() { return mProps.getProperty( "pegasus.log.manager", "Default" ); } /** * Returns the log formatter to use. * * Referred to by the "pegasus.log.formatter" property. * * @return the value in the properties file, else Simple */ public String getLogFormatter() { return mProps.getProperty( "pegasus.log.formatter", "Simple" ); } /** * Returns the http url for log4j properties for windward project. * * Referred to by the "log4j.configuration" property. * * @return the value in the properties file, else null */ public String getHttpLog4jURL() { //return mProps.getProperty( "pegasus.log.windward.log4j.http.url" ); return mProps.getProperty( "log4j.configuration" ); } /** * Returns the file to which all the logging needs to be directed to. * * Referred to by the "pegasus.log.*" property. * * @return the value of the property that is specified, else * null */ public String getLoggingFile(){ return mProps.getProperty("pegasus.log.*"); } /** * Returns the location of the local log file where you want the messages to * be logged. Not used for the moment. * * Referred to by the "pegasus.log4j.log" property. * * @return the value specified in the property file,else null. */ public String getLog4JLogFile() { return mProps.getProperty( "pegasus.log4j.log" ); } /** * Returns a boolean indicating whether to write out the planner metrics * or not. * * Referred to by the "pegasus.log.metrics" property. * * @return boolean in the properties, else true */ public boolean writeOutMetrics(){ return Boolean.parse( mProps.getProperty( "pegasus.log.metrics" ), true ) && (this.getMetricsLogFile() != null); } /** * Returns the path to the file that is used to be logging metrics * * Referred to by the "pegasus.log.metrics.file" property. * * @return path to the metrics file if specified, else rundir/pegasus.metrics */ public String getMetricsLogFile(){ String file = mProps.getProperty( "pegasus.log.metrics.file" ); return file; } //SOME MISCELLANEOUS PROPERTIES /** * Returns a boolean indicating whether we assign job priorities or not * to the jobs * * Referred to by the "pegasus.job.priority.assign" property. * * @return boolean value specified in properties else false. */ public boolean assignDefaultJobPriorities() { return Boolean.parse( mProps.getProperty( "pegasus.job.priority.assign" ) , true ); } /** * Return returns the environment string specified for the local pool. If * specified the registration jobs are set with these environment variables. * * Referred to by the "pegasus.local.env" property * * @return the environment string for local pool in properties file if * defined, else null. */ public String getLocalPoolEnvVar() { return mProps.getProperty( "pegasus.local.env" ); } /** * Returns a boolean indicating whether to have jobs executing on worker * node tmp or not. * * Referred to by the "pegasus.execute.*.filesystem.local" property. * * @return boolean value in the properties file, else false if not specified * or an invalid value specified. */ public boolean executeOnWorkerNode( ){ return Boolean.parse( mProps.getProperty( PegasusProperties.PEGASUS_WORKER_NODE_EXECUTION_PROPERTY ) , false ); } /** * Returns a boolean indicating whether to treat the entries in the cache * files as a replica catalog or not. * * @return boolean */ public boolean treatCacheAsRC(){ return Boolean.parse(mProps.getProperty( "pegasus.catalog.replica.cache.asrc" ), false); } /** * Returns a boolean indicating whether to preserver line breaks. * * Referred to by the "pegasus.parser.dax.preserve.linebreaks" property. * * @return boolean value in the properties file, else false if not specified * or an invalid value specified. */ public boolean preserveParserLineBreaks( ){ return Boolean.parse( mProps.getProperty( "pegasus.parser.dax.preserve.linebreaks" ), false) ; } /** * Returns the path to the wings properties file. * * Referred to by the "pegasus.wings.properties" property. * * @return value in the properties file, else null. */ public String getWingsPropertiesFile( ){ return mProps.getProperty( "pegasus.wings.properties" ) ; } /** * Returns the request id. * * Referred to by the "pegasus.wings.request-id" property. * * @return value in the properties file, else null. */ public String getWingsRequestID( ){ return mProps.getProperty( "pegasus.wings.request.id" ) ; } /** * Returns the timeout value in seconds after which to timeout in case of * opening sockets to grid ftp server. * * Referred to by the "pegasus.auth.gridftp.timeout" property. * * @return the timeout value if specified else, * null. * * @see #DEFAULT_SITE_SELECTOR_TIMEOUT */ public String getGridFTPTimeout(){ return mProps.getProperty("pegasus.auth.gridftp.timeout"); } /** * Returns which submit mode to be used to submit the jobs on to the grid. * * Referred to by the "pegasus.code.generator" property. * * @return the submit mode specified in the property file, * else the default i.e condor. */ public String getSubmitMode() { return mProps.getProperty( "pegasus.code.generator", "condor" ); } /** * Returns the mode for parsing the dax while writing out the partitioned * daxes. * * Referred to by the "pegasus.partition.parser.load" property. * * @return the value specified in the properties file, else * the default value i.e single. */ public String getPartitionParsingMode() { return mProps.getProperty( "pegasus.partition.parser.load", "single" ); } //JOB COLLAPSING PROPERTIES /** * Returns a comma separated list for the node collapsing criteria for the * execution pools. This determines how many jobs one fat node gobbles up. * * Referred to by the "pegasus.cluster.nodes" property. * * @return the value specified in the properties file, else null. */ public String getCollapseFactors() { return mProps.getProperty( "pegasus.clusterer.nodes" ); } /** * Returns the users horizontal clustering preference. This property * determines how to cluster horizontal jobs. If this property is set with a * value value of runtime, the jobs will be grouped into into clusters * according to their runtimes as specified by job.runtime * property. For all other cases the default horizontal clustering approach * will be used. * * @return the value specified in the properties file, else null. */ public String getHorizontalClusterPreference() { return mProps.getProperty( "pegasus.clusterer.preference" ); } /** * Returns what job aggregator is to be used to aggregate multiple * compute jobs into a single condor job. * * Referred to by the "pegasus.cluster.job.aggregator" property. * * @return the value specified in the properties file, else * DEFAULT_JOB_AGGREGATOR * * @see #DEFAULT_JOB_AGGREGATOR */ public String getJobAggregator(){ return mProps.getProperty("pegasus.clusterer.job.aggregator",DEFAULT_JOB_AGGREGATOR); } /** * Returns whether the seqexec job aggregator should log progress to a log or not. * * Referred to by the "pegasus.clusterer.job.aggregator.seqexec.log" property. * * @return the value specified in the properties file, else false * */ public boolean logJobAggregatorProgress(){ return Boolean.parse( getProperty( "pegasus.clusterer.job.aggregator.seqexec.log" ), false ); } /** * Returns whether the seqexec job aggregator should write to a global log or not. * This comes into play only if "pegasus.clusterer.job.aggregator.seqexec.log" * is set to true. * * Referred to by the "pegasus.clusterer.job.aggregator.seqexec.log.global" property. * * @return the value specified in the properties file, else true * */ public boolean logJobAggregatorProgressToGlobal(){ return Boolean.parse( getProperty( "pegasus.clusterer.job.aggregator.seqexec.log.global", "pegasus.clusterer.job.aggregator.seqexec.hasgloballog"), true ); } /** * Returns a boolean indicating whether seqexec trips on the first job failure. * * Referred to by the "pegasus.cluster.job.aggregator.seqexec.firstjobfail" property. * * @return the value specified in the properties file, else true * */ public boolean abortOnFirstJobFailure(){ return Boolean.parse( mProps.getProperty( "pegasus.clusterer.job.aggregator.seqexec.firstjobfail" ), true ); } //DEFERRED PLANNING PROPERTIES /** * Returns the root workflow UUID if defined in the properties, else null * * Referred to by the "pegasus.workflow.root.uuid" property. * * @return the value in the properties file else, null */ public String getRootWorkflowUUID() { return mProps.getProperty( ROOT_WORKFLOW_UUID_PROPERTY_KEY, null ); } /** * Returns the DAXCallback that is to be used while parsing the DAX. * * Referred to by the "pegasus.partitioner.parser.dax.callback" property. * * @return the value specified in the properties file, else * DEFAULT_DAX_CALLBACK * * @see #DEFAULT_DAX_CALLBACK */ public String getPartitionerDAXCallback(){ return mProps.getProperty("pegasus.partitioner.parser.dax.callback",DEFAULT_DAX_CALLBACK); } /** * Returns the key that is to be used as a label key, for labelled * partitioning. * * Referred to by the "pegasus.partitioner.label.key" property. * * @return the value specified in the properties file. */ public String getPartitionerLabelKey(){ return mProps.getProperty( "pegasus.partitioner.label.key" ); } /** * Returns the bundle value for a particular transformation. * * Referred to by the "pegasus.partitioner.horziontal.bundle.[txname]" property, * where [txname] is replaced by the name passed an input to this function. * * @param name the logical name of the transformation. * * @return the path to the postscript if specified in properties file, * else null. */ public String getHorizontalPartitionerBundleValue( String name ){ StringBuffer key = new StringBuffer(); key.append( "pegasus.partitioner.horizontal.bundle." ).append( name ); return mProps.getProperty( key.toString() ); } /** * Returns the collapse value for a particular transformation. * * Referred to by the "pegasus.partitioner.horziontal.collapse.[txname]" property, * where [txname] is replaced by the name passed an input to this function. * * @param name the logical name of the transformation. * * @return the path to the postscript if specified in properties file, * else null. */ public String getHorizontalPartitionerCollapseValue( String name ){ StringBuffer key = new StringBuffer(); key.append( "pegasus.partitioner.horizontal.collapse." ).append( name ); return mProps.getProperty( key.toString() ); } /** * Returns the key that is to be used as a label key, for labelled * clustering. * * Referred to by the "pegasus.clusterer.label.key" property. * * @return the value specified in the properties file. */ public String getClustererLabelKey(){ return mProps.getProperty( "pegasus.clusterer.label.key"); } /** * Returns the path to the property file that has been writting out in * the submit directory. * * @return path to the property file * * @exception RuntimeException in case of file not being generated. */ public String getPropertiesInSubmitDirectory( ){ if ( mPropsInSubmitDir == null || mPropsInSubmitDir.length() == 0 ){ throw new RuntimeException( "Properties file does not exist in directory " ); } return mPropsInSubmitDir; } /** * Writes out the properties to a temporary file in the directory passed. * * @param directory the directory in which the properties file needs to * be written to. * * * @return the absolute path to the properties file written in the directory. * * @throws IOException in case of error while writing out file. */ public String writeOutProperties( String directory ) throws IOException{ return this.writeOutProperties( directory, true ); } /** * Writes out the properties to a temporary file in the directory passed. * * @param directory the directory in which the properties file needs to * be written to. * @param sanitizePath boolean indicating whether to sanitize paths for * certain properties or not. * * @return the absolute path to the properties file written in the directory. * * @throws IOException in case of error while writing out file. */ public String writeOutProperties( String directory , boolean sanitizePath ) throws IOException{ return this.writeOutProperties( directory, sanitizePath, true ); } /** * Writes out the properties to a temporary file in the directory passed. * * @param directory the directory in which the properties file needs to * be written to. * @param sanitizePath boolean indicating whether to sanitize paths for * certain properties or not. * @param setInternalVariable whether to set the internal variable that stores * the path to the properties file. * * @return the absolute path to the properties file written in the directory. * * @throws IOException in case of error while writing out file. */ public String writeOutProperties( String directory , boolean sanitizePath, boolean setInternalVariable ) throws IOException{ File dir = new File(directory); //sanity check on the directory sanityCheck( dir ); //we only want to write out the Pegasus properties for time being //and any profiles that were mentioned in the properties. Properties properties = new Properties(); for ( Profiles.NAMESPACES n : Profiles.NAMESPACES.values() ){ Properties p = this.mProps.matchingSubset( namespaceToPropertiesPrefix().get( n ), true ); properties.putAll( p ); } //check if we need to sanitize paths for certain properties or not if( sanitizePath ){ sanitizePathForProperty( properties, "pegasus.catalog.site.file" ); sanitizePathForProperty( properties, "pegasus.catalog.replica.file" ); sanitizePathForProperty( properties, "pegasus.catalog.transformation.file" ); } //create a temporary file in directory File f = File.createTempFile( "pegasus.", ".properties", dir ); //the header of the file StringBuffer header = new StringBuffer(64); header.append("Pegasus USER PROPERTIES AT RUNTIME \n") .append("#ESCAPES IN VALUES ARE INTRODUCED"); //create an output stream to this file and write out the properties OutputStream os = new FileOutputStream(f); properties.store( os, header.toString() ); os.close(); //also set it to the internal variable if ( setInternalVariable ){ mPropsInSubmitDir = f.getAbsolutePath(); return mPropsInSubmitDir; } else { return f.getAbsolutePath(); } } /** * Santizes the value in the properties . Ensures that the path is absolute. * * @param properties the properties * @param key the key whose value needs to be sanitized */ private void sanitizePathForProperty(Properties properties, String key ) { if( properties.containsKey(key) ){ String value = properties.getProperty( key ); if( value != null ){ properties.setProperty( key, new File( value ).getAbsolutePath() ); } } } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheck( File dir ) throws IOException{ if ( dir.exists() ) { // location exists if ( dir.isDirectory() ) { // ok, isa directory if ( dir.canWrite() ) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException( "Cannot write to existing directory " + dir.getPath() ); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory." ); } } else { // does not exist, try to make it if ( ! dir.mkdirs() ) { //try to get around JVM bug. JIRA PM-91 if( dir.getPath().endsWith( "." ) ){ //just try to create the parent directory if( !dir.getParentFile().mkdirs() ){ throw new IOException( "Unable to create directory " + dir.getPath() ); } return; } throw new IOException( "Unable to create directory destination " + dir.getPath() ); } } } /** * This function is used to check whether a deprecated property is used or * not. If a deprecated property is used,it logs a warning message specifying * the new property. If both properties are not set by the user, the function * returns the default property. If no default property then null. * * @param newProperty the new property that should be used. * @param deprecatedProperty the deprecated property that needs to be * replaced. * * @return the appropriate value. */ private String getProperty( String newProperty, String deprecatedProperty ) { return this.getProperty( newProperty, deprecatedProperty, null ); } /** * This function is used to check whether a deprecated property is used or * not. If a deprecated property is used,it logs a warning message specifying * the new property. If both properties are not set by the user, the * function returns the default property. If no default property then null. * * * @param newProperty the new property that should be used. * @param deprecatedProperty the deprecated property that needs to be * replaced. * @param defaultValue the default value that should be returned. * * @return the appropriate value. */ private String getProperty( String newProperty, String deprecatedProperty, String defaultValue ) { String value = null; //try for the new property //first value = mProps.getProperty( newProperty ); if ( value == null ) { //try the deprecated property if set value = mProps.getProperty( deprecatedProperty ); //if the value is not null if ( value != null ) { //print the warning message logDeprecatedWarning(deprecatedProperty,newProperty); return value; } else { //else return the default value return defaultValue; } } return value; } /** * Logs a warning about the deprecated property. Logs a warning only if * it has not been displayed before. * * @param deprecatedProperty the deprecated property that needs to be * replaced. * @param newProperty the new property that should be used. */ private void logDeprecatedWarning(String deprecatedProperty, String newProperty){ if(!mDeprecatedProperties.contains(deprecatedProperty)){ //log only if it had already not been logged StringBuffer sb = new StringBuffer(); sb.append( "The property " ).append( deprecatedProperty ). append( " has been deprecated. Use " ).append( newProperty ). append( " instead." ); // mLogger.log(sb.toString(),LogManager.WARNING_MESSAGE_LEVEL ); System.err.println( "[WARNING] " + sb.toString() ); //push the property in to indicate it has already been //warned about mDeprecatedProperties.add(deprecatedProperty); } } /** * Returns a boolean indicating whether to use third party transfers for * all types of transfers or not. * * Referred to by the "pegasus.transfer.*.thirdparty" property. * * @return the boolean value in the properties files, * else false if no value specified, or non boolean specified. */ // private boolean useThirdPartyForAll(){ // return Boolean.parse("pegasus.transfer.*.thirdparty", // false); // } /** * Gets the reference to the internal singleton object. This method is * invoked with the assumption that the singleton method has been invoked once * and has been populated. Also that it has not been disposed by the garbage * collector. Can be potentially a buggy way to invoke. * * @return a handle to the Properties class. */ // public static PegasusProperties singletonInstance() { // return singletonInstance( null ); // } /** * Gets a reference to the internal singleton object. * * @param propFileName name of the properties file to picked * from $PEGASUS_HOME/etc/ directory. * * @return a handle to the Properties class. */ // public static PegasusProperties singletonInstance( String propFileName ) { // if ( pegProperties == null ) { // //only the default properties file // //can be picked up due to the way // //Singleton implemented in CommonProperties.??? // pegProperties = new PegasusProperties( null ); // } // return pegProperties; // } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/common/RunDirectoryFilenameFilter.java0000644000175000017500000000374111757531137030672 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.common; import java.io.FilenameFilter; import java.util.regex.Pattern; import java.io.File; /** * A filename filter for identifying the run directory * * @author Karan Vahi vahi@isi.edu */ public class RunDirectoryFilenameFilter implements FilenameFilter { /** * The prefix for the submit directory. */ public static final String SUBMIT_DIRECTORY_PREFIX = "run"; /** * Store the regular expressions necessary to parse kickstart output files */ private static final String mRegexExpression = "(" + SUBMIT_DIRECTORY_PREFIX + ")([0-9][0-9][0-9][0-9])"; /** * Stores compiled patterns at first use, quasi-Singleton. */ private static Pattern mPattern = null; /*** * Tests if a specified file should be included in a file list. * * @param dir the directory in which the file was found. * @param name - the name of the file. * * @return true if and only if the name should be included in the file list * false otherwise. * * */ public boolean accept( File dir, String name) { //compile the pattern only once. if( mPattern == null ){ mPattern = Pattern.compile( mRegexExpression ); } return mPattern.matcher( name ).matches(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/0000755000175000017500000000000011757531667023126 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/SLS.java0000644000175000017500000001643311757531137024431 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Job; import java.io.File; import java.util.Collection; /** * This interface defines the second level staging process, that manages * the transfer of files from the headnode to the worker node temp and back. * * * @author Karan Vahi * @version $Revision: 4605 $ */ public interface SLS { /** * The version associated with the API. */ public static final String VERSION = "1.2"; /** * Initializes the SLS implementation. * * @param bag the bag of objects. Contains access to catalogs etc. */ public void initialize( PegasusBag bag ); /** * Returns a boolean whether the SLS implementation does a condor based * modification or not. By condor based modification we mean whether it * uses condor specific classads to achieve the second level staging or not. * * @return boolean */ public boolean doesCondorModifications(); /** * Constructs a command line invocation for a job, with a given sls file. * The SLS maybe null. In the case where SLS impl does not read from a file, * it is advised to create a file in generateSLSXXX methods, and then read * the file in this function and put it on the command line. * * @param job the job that is being sls enabled * @param slsFile the slsFile that is accessible on the worker node. Can be null * * @return invocation string */ public String invocationString( Job job, File slsFile ); /** * Returns a boolean indicating whether it will an input file for a job * to do the transfers. Transfer reads from stdin the file transfers that * it needs to do. * * @param job the job being detected. * * @return true */ public boolean needsSLSInputTransfers( Job job ); /** * Returns a boolean indicating whether it will an output file for a job * to do the transfers. Transfer reads from stdin the file transfers that * it needs to do. * * @param job the job being detected. * * @return true */ public boolean needsSLSOutputTransfers( Job job ); /** * Returns the LFN of sls input file. * * @param job Job * * @return the name of the sls input file. */ public String getSLSInputLFN( Job job ); /** * Returns the LFN of sls output file. * * @param job Job * * @return the name of the sls input file. */ public String getSLSOutputLFN( Job job ); /** * Generates a second level staging file of the input files to the worker node * directory. It should be consistent with the function needsSLSFile( Job ) * * @param job the job for which the file is being created * @param submitDir the submit directory where it has to be written out. * @param fileName the name of the file that needs to be written out. * @param stagingSiteDirectory the directory on the head node of the staging site. * @param workerNodeDirectory the worker node directory * * @return a Collection of FileTransfer objects listing the transfers that * need to be done. * * @see #needsSLSInputTransfers( Job) */ public Collection determineSLSInputTransfers( Job job, String fileName, String submitDir, String stagingSiteDirectory, String workerNodeDirectory ); /** * Generates a second level staging file of the input files to the worker node * directory. It should be consistent with the function needsSLSFile( Job ) * * @param job the job for which the file is being created * @param submitDir the submit directory where it has to be written out. * @param fileName the name of the file that needs to be written out. * @param stagingSiteDirectory the directory on the head node of the staging site. * @param workerNodeDirectory the worker node directory * * @return a Collection of FileTransfer objects listing the transfers that * need to be done. * * @see #needsSLSOutputTransfers( Job) */ public Collection determineSLSOutputTransfers( Job job, String fileName, String submitDir, String stagingSiteDirectory, String workerNodeDirectory ); /** * Modifies a job for the first level staging to headnode.This is to add * any files that needs to be staged to the head node for a job specific * to the SLS implementation. If any file needs to be added, a FileTransfer * object should be created and added as an input or an output file. * * * @param job the job * @param submitDir the submit directory * @param slsInputLFN the sls input file if required, that is used for * staging in from the head node to worker node directory. * @param slsOutputLFN the sls output file if required, that is used * for staging in from the head node to worker node directory. * @return boolean */ public boolean modifyJobForFirstLevelStaging( Job job, String submitDir, String slsInputLFN, String slsOutputLFN ); /** * Modifies a compute job for second level staging. * * @param job the job to be modified. * @param stagingSiteURLPrefix the url prefix for the server on the staging site * @param stagingSitedirectory the directory on the staging site, where the input data is * read from and the output data written out. * read from and the output data written out. * @param workerNodeDirectory the directory in the worker node tmp * * @return boolean indicating whether job was successfully modified or not. */ public boolean modifyJobForWorkerNodeExecution( Job job, String stagingSiteURLPrefix, String stagingSitedirectory, String workerNodeDirectory ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/TestTPT.java0000644000175000017500000000310411757531137025266 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; import edu.isi.pegasus.planner.common.Utility; import edu.isi.pegasus.planner.transfer.TPT; /** * Client for testing the TPT class. * * @author Karan Vahi * @version $Revision: 2567 $ */ public class TestTPT { public static void main(String[] args) { TPT tpt = new TPT(); //build the TPT map tpt.buildState(); //print it out tpt.print(); System.out.println(tpt.stageInThirdParty("X")); System.out.println(tpt.interThirdParty("X")); System.out.println(tpt.stageOutThirdParty("X")); String url = "file:///gpfs-wan/karan.txt"; System.out.println("Hostname is " + Utility.getHostName(url)); try{ System.out.println("Java hostanme is " + new java.net.URL(url).getHost()); }catch(Exception e){} System.out.println("Mount point is " + Utility.getAbsolutePath(url)); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/SingleFTPerXFERJobRefiner.java0000644000175000017500000000722711757531137030546 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.transfer.implementation.ImplementationFactory; import edu.isi.pegasus.planner.transfer.implementation.TransferImplementationFactoryException; import java.util.Collection; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The refiner interface, that determines the functions that need to be * implemented to add various types of transfer nodes to the workflow. * The single in the name indicates that the refiner works with the * implementation that handles one file transfer per transfer job. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2590 $ */ public abstract class SingleFTPerXFERJobRefiner extends AbstractRefiner{ /** * The overloaded constructor. * * @param dag the workflow to which transfer nodes need to be added. * @param bag the bag of initialization objects */ public SingleFTPerXFERJobRefiner( ADag dag, PegasusBag bag ){ super( dag, bag ); } /** * Loads the appropriate implementations that is required by this refinement * strategy for different types of transfer jobs. It calls to the factory * method to load the appropriate Implementor. * * Loads the implementing class corresponding to the mode specified by the user * at runtime in the properties file. The properties object passed should not * be null. * * * @param bag the bag of initialization objects * * @exception TransferImplementationFactoryException that nests any error that * might occur during the instantiation. */ public void loadImplementations( PegasusBag bag ) throws TransferImplementationFactoryException{ //this can work with any Implementation Factory this.mTXStageInImplementation = ImplementationFactory.loadInstance( bag, ImplementationFactory.TYPE_STAGE_IN); this.mTXStageInImplementation.setRefiner(this); this.mTXInterImplementation = ImplementationFactory.loadInstance( bag, ImplementationFactory.TYPE_STAGE_INTER); this.mTXInterImplementation.setRefiner(this); this.mTXStageOutImplementation = ImplementationFactory.loadInstance( bag, ImplementationFactory.TYPE_STAGE_OUT); this.mTXStageOutImplementation.setRefiner(this); //log config messages message super.logConfigMessages(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/sls/0000755000175000017500000000000011757531667023727 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/sls/Condor.java0000644000175000017500000002562211757531137026015 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.sls; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.transfer.SLS; import edu.isi.pegasus.planner.namespace.Pegasus; import java.io.File; import java.util.Iterator; import java.util.Collection; /** * This uses the Condor File Transfer mechanism for the second level staging. * * It will work only if the Pegasus Style profile ( pegasus::style ) has a value * of condor. * * @author Karan Vahi * @version $Revision: 4605 $ */ public class Condor implements SLS { /** * A short description of the transfer implementation. */ public static final String DESCRIPTION = "Condor File Transfer Mechanism"; /** * The handle to the site catalog. */ // protected PoolInfoProvider mSiteHandle; protected SiteStore mSiteStore; /** * The handle to the properties. */ protected PegasusProperties mProps; /** * The handle to the planner options. */ protected PlannerOptions mPOptions; /** * The handle to the logging manager. */ protected LogManager mLogger; /** * The default constructor. */ public Condor() { } /** * Initializes the SLS implementation. * * @param bag the bag of objects. Contains access to catalogs etc. */ public void initialize( PegasusBag bag ) { mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mLogger = bag.getLogger(); // mSiteHandle = bag.getHandleToSiteCatalog(); mSiteStore = bag.getHandleToSiteStore(); } /** * Returns a boolean whether the SLS implementation does a condor based * modification or not. By condor based modification we mean whether it * uses condor specific classads to achieve the second level staging or not. * * @return false */ public boolean doesCondorModifications(){ return true; } /** * Constructs a command line invocation for a job, with a given sls file. * The SLS maybe null. In the case where SLS impl does not read from a file, * it is advised to create a file in generateSLSXXX methods, and then read * the file in this function and put it on the command line. * * @param job the job that is being sls enabled * @param slsFile the slsFile can be null * * @return invocation string */ public String invocationString( Job job, File slsFile ){ return null; } /** * Returns a boolean indicating whether it will an input file for a job * to do the transfers. Transfer reads from stdin the file transfers that * it needs to do. Always returns true, as we need to transfer the proxy * always. * * @param job the job being detected. * * @return false */ public boolean needsSLSInputTransfers( Job job ) { return false; } /** * Returns a boolean indicating whether it will an output file for a job * to do the transfers. Transfer reads from stdin the file transfers that * it needs to do. * * @param job the job being detected. * * @return false */ public boolean needsSLSOutputTransfers( Job job ) { return false; } /** * Returns the LFN of sls input file. * * @param job Job * * @return the name of the sls input file. */ public String getSLSInputLFN( Job job ){ return null; } /** * Returns the LFN of sls output file. * * @param job Job * * @return the name of the sls input file. */ public String getSLSOutputLFN( Job job ){ return null; } /** * Generates a second level staging file of the input files to the worker * node directory. * * @param job job for which the file is being created * @param fileName name of the file that needs to be written out. * @param submitDir submit directory where it has to be written out. * @param stagingSiteDirectory directory on the head node of the compute site. * @param workerNodeDirectory worker node directory * * @return a Collection of FileTransfer objects listing the transfers that * need to be done. * * @see #needsSLSInputTransfers( Job) */ public Collection determineSLSInputTransfers( Job job, String fileName, String submitDir, String stagingSiteDirectory, String workerNodeDirectory ) { return null; } /** * Generates a second level staging file of the input files to the worker * node directory. * * @param job the job for which the file is being created * @param fileName the name of the file that needs to be written out. * @param submitDir the submit directory where it has to be written out. * @param stagingSiteDirectory the directory on the head node of the * compute site. * @param workerNodeDirectory the worker node directory * * @return a Collection of FileTransfer objects listing the transfers that * need to be done. * * @see #needsSLSOutputTransfers( Job) */ public Collection determineSLSOutputTransfers(Job job, String fileName, String submitDir, String stagingSiteDirectory, String workerNodeDirectory) { return null; } /** * Modifies a job for the first level staging to headnode.This is to add * any files that needs to be staged to the head node for a job specific * to the SLS implementation. If any file needs to be added, a FileTransfer * object should be created and added as an input or an output file. * * * @param job the job * @param submitDir the submit directory * @param slsInputLFN the sls input file if required, that is used for * staging in from the head node to worker node directory. * @param slsOutputLFN the sls output file if required, that is used * for staging in from the head node to worker node directory. * @return boolean */ public boolean modifyJobForFirstLevelStaging( Job job, String submitDir, String slsInputLFN, String slsOutputLFN ) { return true; } /** * Modifies a compute job for second level staging. Adds the appropriate * condor classads. It assumes that all the files are being moved to and from * the submit directory directly. Ignores any headnode parameters passed. * * * @param job the job to be modified. * @param stagingSiteURLPrefix the url prefix for the server on the staging site * @param stagingSitedirectory the directory on the staging site, where the nput data * is read from and the output data written out. * @param workerNodeDirectory the directory in the worker node tmp * * @return boolean indicating whether job was successfully modified or * not. * */ public boolean modifyJobForWorkerNodeExecution( Job job, String stagingSiteURLPrefix, String stagingSiteDirectory, String workerNodeDirectory ) { //sanity check on style of the job //handle the -w option that asks kickstart to change //directory before launching an executable. String style = (String)job.vdsNS.get( Pegasus.STYLE_KEY ); if( style == null || !( style.equals( Pegasus.CONDOR_STYLE ) || style.equals( Pegasus.GLIDEIN_STYLE ) ) ){ mLogger.log( "Invalid style for the job " + job.getName(), LogManager.ERROR_MESSAGE_LEVEL ); return false; } //remove any directory. let condor figure it out job.condorVariables.removeKey( "remote_initialdir" ); //set the initial dir to the headnode directory //as this is the directory where we are staging //the input and output data job.condorVariables.construct( "initialdir", stagingSiteDirectory ); //iterate through all the input files for( Iterator it = job.getInputFiles().iterator(); it.hasNext(); ){ PegasusFile pf = ( PegasusFile )it.next(); /* not reqd for 3.2 //ignore any input files of FileTransfer as they are first level //staging put in by Condor Transfer refiner if( pf instanceof FileTransfer ){ continue; } */ String lfn = pf.getLFN(); //add an input file for transfer //job.condorVariables.addIPFileForTransfer( stagingSiteDirectory + File.separator + lfn ); //we add just the lfn as we are setting initialdir job.condorVariables.addIPFileForTransfer( lfn ); } //iterate and add output files for transfer back for( Iterator it = job.getOutputFiles().iterator(); it.hasNext(); ){ PegasusFile pf = ( PegasusFile )it.next(); String lfn = pf.getLFN(); //ignore any input files of FileTransfer as they are first level //staging put in by Condor Transfer refiner if( pf instanceof FileTransfer ){ continue; } //add an input file for transfer job.condorVariables.addOPFileForTransfer( lfn ); } return true; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/sls/SLSFactoryException.java0000644000175000017500000000643211757531137030437 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.sls; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating SLS implementations. * * @author Karan Vahi * @version $Revision: 2258 $ */ public class SLSFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Second Level Staging Implementor"; /** * Constructs a SLSFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public SLSFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a SLSFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public SLSFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a SLSFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public SLSFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a SLSFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public SLSFactoryException( String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/sls/Transfer.java0000644000175000017500000006605011757531137026355 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.sls; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.transfer.SLS; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.code.gridstart.PegasusLite; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.namespace.ENV; import edu.isi.pegasus.planner.common.PegasusProperties; import java.io.File; import java.util.Iterator; import java.util.Set; import java.util.List; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; /** * This uses the transfer executable distributed with Pegasus to do the * second level staging. * * @author Karan Vahi * @version $Revision: 4780 $ */ public class Transfer implements SLS { /** * The transformation namespace for the transfer job. */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The name of the underlying transformation that is queried for in the * Transformation Catalog. */ public static final String TRANSFORMATION_NAME = "transfer"; /** * The version number for the transfer job. */ public static final String TRANSFORMATION_VERSION = null; /** * The derivation namespace for for the transfer job. */ public static final String DERIVATION_NAMESPACE = "pegasus"; /** * The name of the underlying derivation. */ public static final String DERIVATION_NAME = "transfer"; /** * The derivation version number for the transfer job. */ public static final String DERIVATION_VERSION = "1.0"; /** * A short description of the transfer implementation. */ public static final String DESCRIPTION = "Pegasus Transfer Wrapper around GUC"; /** * The executable basename */ public static final String EXECUTABLE_BASENAME = "pegasus-transfer"; /** * The handle to the site catalog. */ protected SiteStore mSiteStore; /** * The handle to the transformation catalog. */ protected TransformationCatalog mTCHandle; /** * The handle to the properties. */ protected PegasusProperties mProps; /** * The handle to the logging manager. */ protected LogManager mLogger; /** * The local url prefix for the submit host. */ protected String mLocalURLPrefix; /** * The handle to the transient replica catalog. */ protected ReplicaCatalog mTransientRC; /** * Any extra arguments that need to be passed ahead to the s3 client invocation. */ protected String mExtraArguments; /** * Boolean to track whether to stage sls file or not */ protected boolean mStageSLSFile; /** * Boolean to track whether the gridstart used in PegasusLite or not */ protected boolean mSeqExecGridStartUsed; /** * The default constructor. */ public Transfer() { } /** * Initializes the SLS implementation. * * @param bag the bag of objects. Contains access to catalogs etc. */ public void initialize( PegasusBag bag ) { mProps = bag.getPegasusProperties(); mLogger = bag.getLogger(); mSiteStore = bag.getHandleToSiteStore(); mTCHandle = bag.getHandleToTransformationCatalog(); mLocalURLPrefix = mSiteStore.lookup( "local" ).getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix( ); mTransientRC = bag.getHandleToTransientReplicaCatalog(); mExtraArguments = mProps.getSLSTransferArguments(); mStageSLSFile = mProps.stageSLSFilesViaFirstLevelStaging(); mSeqExecGridStartUsed = mProps.getGridStart().equals( PegasusLite.CLASSNAME ); } /** * Returns a boolean whether the SLS implementation does a condor based * modification or not. By condor based modification we mean whether it * uses condor specific classads to achieve the second level staging or not. * * @return false */ public boolean doesCondorModifications(){ return false; } /** * Constructs a command line invocation for a job, with a given sls file. * The SLS maybe null. In the case where SLS impl does not read from a file, * it is advised to create a file in generateSLSXXX methods, and then read * the file in this function and put it on the command line. * * @param job the job that is being sls enabled * @param slsFile the slsFile can be null * * @return invocation string */ public String invocationString( Job job, File slsFile ){ StringBuffer invocation = new StringBuffer(); TransformationCatalogEntry entry = this.getTransformationCatalogEntry( job.getSiteHandle() ); String executable = ( entry == null )? this.getExecutableBasename() ://nothing in the transformation catalog, rely on the executable basenmae entry.getPhysicalTransformation();//rely on what is in the transformation catalog invocation.append( executable ); //append any extra arguments set by user //in properties if( mExtraArguments != null ){ invocation.append( " " ).append( mExtraArguments ); } if( slsFile != null ){ //add the required arguments to transfer invocation.append( " -f " ); //we add absolute path if the sls files are staged via //first level staging if( this.mStageSLSFile ){ invocation.append( slsFile.getAbsolutePath() ); } else{ //only the basename invocation.append( slsFile.getName() ); } } return invocation.toString(); } /** * Returns a boolean indicating whether it will an input file for a job * to do the transfers. Transfer reads from stdin the file transfers that * it needs to do. Always returns true, as we need to transfer the proxy * always. * * @param job the job being detected. * * @return true */ public boolean needsSLSInputTransfers( Job job ) { return true; } /** * Returns a boolean indicating whether it will an output file for a job * to do the transfers. Transfer reads from stdin the file transfers that * it needs to do. * * @param job the job being detected. * * @return true */ public boolean needsSLSOutputTransfers( Job job ) { Set files = job.getOutputFiles(); return! (files == null || files.isEmpty()); } /** * Returns the LFN of sls input file. * * @param job Job * * @return the name of the sls input file. */ public String getSLSInputLFN( Job job ){ StringBuffer lfn = new StringBuffer(); lfn.append( "sls_" ).append( job.getName() ).append( ".in" ); return lfn.toString(); } /** * Returns the LFN of sls output file. * * @param job Job * * @return the name of the sls input file. */ public String getSLSOutputLFN( Job job ){ StringBuffer lfn = new StringBuffer(); lfn.append( "sls_" ).append( job.getName() ).append( ".out" ); return lfn.toString(); } /** * Generates a second level staging file of the input files to the worker * node directory. * * @param job job for which the file is being created * @param fileName name of the file that needs to be written out. * @param submitDir submit directory where it has to be written out. * @param stagingSiteDirectory directory on the head node of the staging site. * @param workerNodeDirectory worker node directory * * @return a Collection of FileTransfer objects listing the transfers that * need to be done. * * @see #needsSLSInputTransfers( Job) */ public Collection determineSLSInputTransfers( Job job, String fileName, String submitDir, String stagingSiteDirectory, String workerNodeDirectory ) { //sanity check if ( !needsSLSInputTransfers( job ) ){ mLogger.log( "Not Writing out a SLS input file for job " + job.getName() , LogManager.DEBUG_MESSAGE_LEVEL ); return null; } Set files = job.getInputFiles(); // To handle for null conditions? // File sls = null; Collection result = new LinkedList(); //figure out the remote site's headnode gridftp server //and the working directory on it. //the below should be cached somehow String sourceURLPrefix = mSiteStore.lookup( job.getStagingSiteHandle() ).getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix( ); String sourceDir = stagingSiteDirectory; String destDir = workerNodeDirectory; PegasusFile pf; //To do. distinguish the sls file from the other input files for( Iterator it = files.iterator(); it.hasNext(); ){ pf = ( PegasusFile ) it.next(); String lfn = pf.getLFN(); if( lfn.equals( ENV.X509_USER_PROXY_KEY ) ){ //ignore the proxy file for time being //as we picking it from the head node directory continue; } //check if the input file is in the transient RC //all files in the DAX should be in the transient RC String transientPFN = mTransientRC.lookup( lfn, job.getSiteHandle() ); FileTransfer ft = new FileTransfer(); if( transientPFN == null ){ //create the default path from the directory //on the head node StringBuffer url = new StringBuffer(); url.append( sourceURLPrefix ).append( File.separator ); url.append( sourceDir ).append( File.separator ); url.append( lfn ); ft.addSource( job.getStagingSiteHandle(), url.toString() ); } else{ //use the location specified in //the transient replica catalog // input.write( transientPFN ); ft.addSource( job.getStagingSiteHandle(), transientPFN ); } //destination StringBuffer url = new StringBuffer(); url.append( "file://" ).append( destDir ).append( File.separator ). append( pf.getLFN() ); ft.addDestination( job.getSiteHandle(), url.toString() ); result.add( ft ); } return result; } /** * Generates a second level staging file of the input files to the worker * node directory. * * @param job the job for which the file is being created * @param fileName the name of the file that needs to be written out. * @param submitDir the submit directory where it has to be written out. * @param stagingSiteDirectory the directory on the head node of the * staging site. * @param workerNodeDirectory the worker node directory * * @return a Collection of FileTransfer objects listing the transfers that * need to be done. * * @see #needsSLSOutputTransfers( Job) */ public Collection determineSLSOutputTransfers( Job job, String fileName, String submitDir, String stagingSiteDirectory, String workerNodeDirectory ) { //sanity check if ( !needsSLSOutputTransfers( job ) ){ mLogger.log( "Not Writing out a SLS output file for job " + job.getName() , LogManager.DEBUG_MESSAGE_LEVEL ); return null; } // To handle for null conditions? // File sls = null; Collection result = new LinkedList(); Set files = job.getOutputFiles(); //figure out the remote site's headnode gridftp server //and the working directory on it. //the below should be cached somehow String destURLPrefix = mSiteStore.lookup( job.getStagingSiteHandle() ).getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix(); String destDir = stagingSiteDirectory; String sourceDir = workerNodeDirectory; PegasusFile pf; //To do. distinguish the sls file from the other input files for( Iterator it = files.iterator(); it.hasNext(); ){ pf = ( PegasusFile ) it.next(); FileTransfer ft = new FileTransfer(); //source StringBuffer url = new StringBuffer(); url.append( "file://" ).append( sourceDir ).append( File.separator ). append( pf.getLFN() ); ft.addSource( job.getSiteHandle(), url.toString() ); //destination url = new StringBuffer(); url.append( destURLPrefix ).append( File.separator ); url.append( destDir ).append( File.separator ); url.append( pf.getLFN() ); ft.addDestination( job.getStagingSiteHandle(), url.toString() ); result.add(ft); } return result; } /** * Modifies a job for the first level staging to headnode.This is to add * any files that needs to be staged to the head node for a job specific * to the SLS implementation. If any file needs to be added, a FileTransfer * object should be created and added as an input or an output file. * * * @param job the job * @param submitDir the submit directory * @param slsInputLFN the sls input file if required, that is used for * staging in from the head node to worker node directory. * @param slsOutputLFN the sls output file if required, that is used * for staging in from the head node to worker node directory. * @return boolean */ public boolean modifyJobForFirstLevelStaging( Job job, String submitDir, String slsInputLFN, String slsOutputLFN ) { String separator = File.separator; //holds the externally accessible path to the directory on the staging site String externalWorkDirectoryURL = mSiteStore.getExternalWorkDirectoryURL( job.getStagingSiteHandle() ); //sanity check if( !this.mStageSLSFile ){ //add condor file transfer keys if input and output lfs are not null if( slsInputLFN != null ){ job.condorVariables.addIPFileForTransfer( submitDir + File.separator + slsInputLFN ); } if( slsOutputLFN != null ){ job.condorVariables.addIPFileForTransfer( submitDir + File.separator + slsOutputLFN ); } return true; } //incorporate the sls input file if required if( slsInputLFN != null ){ FileTransfer ft = new FileTransfer( slsInputLFN, job.getName()); //the source sls is to be sent across from the local site //using the grid ftp server at local site. StringBuffer sourceURL = new StringBuffer(); sourceURL.append( mLocalURLPrefix ).append( separator ). append( submitDir ).append(separator). append( slsInputLFN ); ft.addSource("local", sourceURL.toString()); //the destination URL is the working directory on the filesystem //on the head node where the job is to be run. StringBuffer destURL = new StringBuffer(); destURL.append( externalWorkDirectoryURL ). append( separator ). append( slsInputLFN ); ft.addDestination( job.getStagingSiteHandle(), destURL.toString() ); //add this as input file for the job job.addInputFile( ft ); } //add the sls out file as input to the job if( slsOutputLFN != null ){ FileTransfer ft = new FileTransfer( slsOutputLFN, job.getName() ); //the source sls is to be sent across from the local site //using the grid ftp server at local site. StringBuffer sourceURL = new StringBuffer(); sourceURL.append( mLocalURLPrefix ).append( separator ). append( submitDir ).append( separator ). append( slsOutputLFN ); ft.addSource( "local" , sourceURL.toString() ); //the destination URL is the working directory on the filesystem //on the head node where the job is to be run. StringBuffer destURL = new StringBuffer(); destURL.append( externalWorkDirectoryURL ) .append( separator ). append( slsOutputLFN ); ft.addDestination( job.getStagingSiteHandle(), destURL.toString() ); //add this as input file for the job job.addInputFile( ft ); } return true; } /** * Modifies a compute job for second level staging. The only modification * it does is add the appropriate environment varialbes to the job * * @param job the job to be modified. * @param stagingSiteURLPrefix the url prefix for the server on the staging site * @param stagingSitedirectory the directory on the staging site, where the inp * * @param workerNodeDirectory the directory in the worker node tmp * * @return boolean indicating whether job was successfully modified or * not. * */ public boolean modifyJobForWorkerNodeExecution( Job job, String stagingSiteURLPrefix, String stagingSitedirectory, String workerNodeDirectory ) { List envs = this.getEnvironmentVariables( job.getSiteHandle() ); if( envs == null || envs.isEmpty()){ //no hard failure. mLogger.log( "No special environment set for " + Separator.combine( this.TRANSFORMATION_NAMESPACE, this.TRANSFORMATION_NAME, this.TRANSFORMATION_VERSION ) + " for job " + job.getID(), LogManager.TRACE_MESSAGE_LEVEL ); return true; } for( Iterator it = envs.iterator(); it.hasNext(); ){ job.envVariables.checkKeyInNS( (Profile)it.next() ); } return true; } /** * Retrieves the transformation catalog entry for the executable that is * being used to transfer the files in the implementation. If an entry is * not specified in the Transformation Catalog, then null is returned. * * @param siteHandle the handle of the site where the transformation is * to be searched. * * @return the transformation catalog entry if found, else null. */ public TransformationCatalogEntry getTransformationCatalogEntry(String siteHandle){ List tcentries = null; try { //namespace and version are null for time being tcentries = mTCHandle.lookup( Transfer.TRANSFORMATION_NAMESPACE, Transfer.TRANSFORMATION_NAME, Transfer.TRANSFORMATION_VERSION, siteHandle, TCType.INSTALLED); } catch (Exception e) { mLogger.log( "Unable to retrieve entry from TC for " + Separator.combine( Transfer.TRANSFORMATION_NAMESPACE, Transfer.TRANSFORMATION_NAME, Transfer.TRANSFORMATION_VERSION ) + " Cause:" + e, LogManager.DEBUG_MESSAGE_LEVEL ); } return ( tcentries == null ) ? null: (TransformationCatalogEntry) tcentries.get(0); } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param namespace the namespace of the transfer transformation * @param name the logical name of the transfer transformation * @param version the version of the transfer transformation * @param executableBasename the basename of the executable * @param site the site for which the default entry is required. * * * @return the default entry. */ protected TransformationCatalogEntry defaultTCEntry( String namespace, String name, String version, String executableBasename, String site ){ TransformationCatalogEntry defaultTCEntry = null; //check if PEGASUS_HOME is set String home = mSiteStore.getPegasusHome( site ); //if PEGASUS_HOME is not set, use VDS_HOME home = ( home == null )? mSiteStore.getVDSHome( site ): home; mLogger.log( "Creating a default TC entry for " + Separator.combine( namespace, name, version ) + " at site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); //if home is still null if ( home == null ){ //cannot create default TC mLogger.log( "Unable to create a default entry for " + Separator.combine( namespace, name, version ) + " as PEGASUS_HOME or VDS_HOME is not set in Site Catalog" , LogManager.DEBUG_MESSAGE_LEVEL ); //set the flag back to true return defaultTCEntry; } //remove trailing / if specified home = ( home.charAt( home.length() - 1 ) == File.separatorChar )? home.substring( 0, home.length() - 1 ): home; //construct the path to it StringBuffer path = new StringBuffer(); path.append( home ).append( File.separator ). append( "bin" ).append( File.separator ). append( Transfer.EXECUTABLE_BASENAME ); defaultTCEntry = new TransformationCatalogEntry( namespace, name, version ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.setSysInfo( this.mSiteStore.lookup( site ).getSysInfo() ); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.DEBUG_MESSAGE_LEVEL ); } mLogger.log( "Created entry with path " + defaultTCEntry.getPhysicalTransformation(), LogManager.DEBUG_MESSAGE_LEVEL ); return defaultTCEntry; } /** * Returns the environment profiles that are required for the default * entry to sensibly work. Tries to retrieve the following variables * *
     * PEGASUS_HOME
     * GLOBUS_LOCATION
     * LD_LIBRARY_PATH
     * 
* * * @param site the site where the job is going to run. * * @return List of environment variables, else empty list if none are found */ protected List getEnvironmentVariables( String site ){ List result = new ArrayList(2) ; String pegasusHome = mSiteStore.getEnvironmentVariable( site, "PEGASUS_HOME" ); if( pegasusHome != null ){ //we have both the environment variables result.add( new Profile( Profile.ENV, "PEGASUS_HOME", pegasusHome ) ); } String globus = mSiteStore.getEnvironmentVariable( site, "GLOBUS_LOCATION" ); if( globus != null ){ //check for LD_LIBRARY_PATH String ldpath = mSiteStore.getEnvironmentVariable( site, "LD_LIBRARY_PATH" ); if ( ldpath == null ){ //construct a default LD_LIBRARY_PATH ldpath = globus; //remove trailing / if specified ldpath = ( ldpath.charAt( ldpath.length() - 1 ) == File.separatorChar )? ldpath.substring( 0, ldpath.length() - 1 ): ldpath; ldpath = ldpath + File.separator + "lib"; mLogger.log( "Constructed default LD_LIBRARY_PATH " + ldpath, LogManager.DEBUG_MESSAGE_LEVEL ); } //we have both the environment variables result.add( new Profile( Profile.ENV, "GLOBUS_LOCATION", globus) ); result.add( new Profile( Profile.ENV, "LD_LIBRARY_PATH", ldpath) ); } return result; } /** * Return the executable basename for transfer executable used. * * @return the executable basename. */ protected String getExecutableBasename() { return Transfer.EXECUTABLE_BASENAME; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/sls/SLSFactory.java0000644000175000017500000001125111757531137026553 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.sls; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.transfer.SLS; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; /** * A factory class to load the appropriate type of SLS Implementation to do * the Second Level Staging. * * @author Karan Vahi * @version $Revision: 4612 $ */ public class SLSFactory { /** * The default package where the all the implementing classes are supposed to * reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.transfer.sls"; /** * The name of the class implementing the condor code generator. */ public static final String DEFAULT_SLS_IMPL_CLASS = "Transfer"; /** * This method loads the appropriate implementing code generator as specified * by the user at runtime. If the megadag mode is specified in the options, * then that is used to load the implementing class, overriding the submit * mode specified in the properties file. * * * @param bag the bag of initialization objects. * * @return the instance of the class implementing this interface. * * @exception CodeGeneratorFactoryException that nests any error that * might occur during the instantiation of the implementation. * * @see #DEFAULT_PACKAGE_NAME * * @throws SLSFactoryException */ public static SLS loadInstance( PegasusBag bag ) throws SLSFactoryException{ PegasusProperties properties = bag.getPegasusProperties(); PlannerOptions options = bag.getPlannerOptions(); //sanity check if(properties == null){ throw new SLSFactoryException( "Invalid properties passed" ); } if(options == null){ throw new SLSFactoryException( "Invalid Options specified" ); } String className = properties.getSLSTransferImplementation(); if( className == null ){ className = DEFAULT_SLS_IMPL_CLASS; //to be picked up from properties eventually } return loadInstance( bag, className ); } /** * This method loads the appropriate code generator as specified by the * user at runtime. * * * @param bag the bag of initialization objects. * @param className the name of the implementing class. * * @return the instance of the class implementing this interface. * * @exception CodeGeneratorFactoryException that nests any error that * might occur during the instantiation of the implementation. * * @see #DEFAULT_PACKAGE_NAME * * @throws SLSFactoryException */ public static SLS loadInstance( PegasusBag bag, String className) throws SLSFactoryException{ PegasusProperties properties = bag.getPegasusProperties(); PlannerOptions options = bag.getPlannerOptions(); //sanity check if (properties == null) { throw new SLSFactoryException( "Invalid properties passed" ); } if (className == null) { throw new SLSFactoryException( "Invalid className specified" ); } //prepend the package name if classname is actually just a basename className = (className.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className : //load directly className; //try loading the class dynamically SLS sls = null; try { DynamicLoader dl = new DynamicLoader( className ); sls = ( SLS ) dl.instantiate( new Object[0] ); //initialize the loaded code generator sls.initialize( bag ); } catch ( Exception e ) { throw new SLSFactoryException( "Instantiating SLS Implementor ", className, e ); } return sls; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/SingleFTPerXFERJob.java0000644000175000017500000000172411757531137027227 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; /** * An empty interface, that allows for grouping of implementations that can * handle only one file transfer per transfer job like old guc and Stork. * * @author Karan Vahi * @version $Revision: 2258 $ */ public interface SingleFTPerXFERJob /*extends Implementation */{ //empty for time being } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/MultipleFTPerXFERJob.java0000644000175000017500000000172711757531137027604 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; /** * An empty interface, that allows for grouping of implementations that can * handle multiple file transfers per transfer job like old guc and Stork. * * @author Karan Vahi * @version $Revision: 2258 $ */ public interface MultipleFTPerXFERJob /*extends Implementation*/ { //empty for time being } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/RemoteTransfer.java0000644000175000017500000003222211757531137026722 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import java.util.Iterator; import java.util.TreeMap; import java.util.Map; import java.util.Set; import java.util.HashSet; import java.util.StringTokenizer; /** * A common class, that builds up the state from the properties to determine * whether a user wants certain type of transfer jobs for particular site to * run remotely. This allows a user to override the default behavior of how * Pegasus decides whether a transfer job runs locally ( on the submit host) or * remotely. * * * @author Karan Vahi * @version $Revision: 2567 $ */ public class RemoteTransfer { /** * The constant to apply to all sites. */ public static final String ALL_SITES = "*"; /** * The property name to get the sites for which all transfers need to * be executed remotely. */ public static final String ALL_TRANSFERS_REMOTE_PROPERTY = "pegasus.transfer.*.remote.sites"; /** * The property name to get the sites for which stage-in transfers need to * be executed remotely. */ public static final String STAGE_IN_TRANSFERS_REMOTE_PROPERTY = "pegasus.transfer.stagein.remote.sites"; /** * The property name to get the sites for which inter site transfers need * to be executed remotely. */ public static final String INTER_TRANSFERS_REMOTE_PROPERTY = "pegasus.transfer.inter.remote.sites"; /** * The property name to get the sites for which stage-out transfers need to * be executed remotely. */ public static final String STAGE_OUT_TRANSFERS_REMOTE_PROPERTY = "pegasus.transfer.stageout.remote.sites"; /** * An internal table that maps remote transfer type to the corresponding * property. */ private static Map mPropertyTable; /** * The handle to the properties object holding the properties relevant to * Pegasus. */ private PegasusProperties mProps; /** * The handle to the logging object. */ private LogManager mLogger; /** * The map indexed by site name, that contains the state for all the sites. */ private Map mStateMap; /** * Singleton access to the type table * Contains the mapping of a property to the third party transfer type * * @return map */ private static Map propertyTable(){ //singleton access if (mPropertyTable == null) { mPropertyTable = new TreeMap(); mPropertyTable.put(new Integer(TransferState.STAGE_IN_REMOTE_TYPE), STAGE_IN_TRANSFERS_REMOTE_PROPERTY); mPropertyTable.put(new Integer(TransferState.INTER_REMOTE_TYPE), INTER_TRANSFERS_REMOTE_PROPERTY); mPropertyTable.put(new Integer(TransferState.STAGE_OUT_REMOTE_TYPE), STAGE_OUT_TRANSFERS_REMOTE_PROPERTY); mPropertyTable.put(new Integer(TransferState.ALL_REMOTE_TYPE), ALL_TRANSFERS_REMOTE_PROPERTY); } return mPropertyTable; } /** * The default constructor. */ public RemoteTransfer() { mProps = PegasusProperties.getInstance(); mLogger = LogManagerFactory.loadSingletonInstance( ); mStateMap = new TreeMap(); } /** * The overloaded constructor. * * @param properties handle to the properties required. */ public RemoteTransfer(PegasusProperties properties) { mProps = properties; mLogger = LogManagerFactory.loadSingletonInstance( properties ); mStateMap = new TreeMap(); } /** * Builds up the remote transfers state for all the sites. This reflects what is * set in the properties file. */ public void buildState(){ String site; Set sites; //build for stagein transfers buildState(TransferState.STAGE_IN_REMOTE_TYPE); //build for inter site transfers buildState(TransferState.INTER_REMOTE_TYPE); //build for stage out transfers buildState(TransferState.STAGE_OUT_REMOTE_TYPE); //build for all transfers buildState(TransferState.ALL_REMOTE_TYPE); //put the all sites (site = *) entry TransferState allState; if(containsKey(ALL_SITES)){ allState = get(ALL_SITES); } else{ allState = new TransferState(); put(ALL_SITES,allState); } if(allState.getState() != 0x0){ //apply the state to all sites for(Iterator it = mStateMap.values().iterator();it.hasNext();){ TransferState state = (TransferState)it.next(); state.set(allState.getState()); } } } /** * Adds to the existing state table, state information for a particular * type of transfers. * * @param type the type of transfer. */ private void buildState(int type){ String property = (String)propertyTable().get(new Integer(type)); Set sites = getThirdPartySites( (type > TransferState.ALL_REMOTE_TYPE)? mProps.getThirdPartySitesRemote(property): mProps.getThirdPartySites(property) ); String site; for(Iterator it = sites.iterator();it.hasNext();){ site = (String)it.next(); TransferState state = containsKey(site)? get(site): new TransferState(); state.set(type); put(site, state); } } /** * Returns a boolean indicating whether to execute stage-in transfers on * remote site or not. * * @return boolean */ public boolean stageInOnRemoteSite(String site){ return containsKey(site)? get(site).get(TransferState.STAGE_IN_REMOTE_TYPE): //return the value for all sites get(ALL_SITES).get(TransferState.STAGE_IN_REMOTE_TYPE); } /** * Returns a boolean indicating whether to execute inter site transfers on * remote site or not. * * @return boolean */ public boolean interOnRemoteSite(String site){ return containsKey(site)? get(site).get(TransferState.INTER_REMOTE_TYPE): //return the value for all sites get(ALL_SITES).get(TransferState.INTER_REMOTE_TYPE); } /** * Returns a boolean indicating whether to execute stage-out transfers on * remote site or not. * * @return boolean */ public boolean stageOutOnRemoteSite(String site){ return containsKey(site)? get(site).get(TransferState.STAGE_OUT_REMOTE_TYPE): //return the value for all sites get(ALL_SITES).get(TransferState.STAGE_OUT_REMOTE_TYPE); } /** * Prints out the third party state for the various sites. */ public void print(){ StringBuffer sb = new StringBuffer(); TransferState allSitesState = null; Object key; sb.append("Site | SI_REMOTE, IN_REMOTE , SO_REMOTE"); for(Iterator it = mStateMap.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = entry.getKey(); if(key.equals(ALL_SITES)){ //store value for printing in the end allSitesState = (TransferState)entry.getValue(); } else{ sb.append('\n').append(key).append(" | ").append(entry.getValue()); } } if(allSitesState != null){ sb.append('\n').append(ALL_SITES).append(" ").append(" | ").append(allSitesState); } System.out.println(sb.toString()); } /** * Returns whether there is an entry for a particular site or not. * * @param site the site handle for a site. * * @return boolean */ private boolean containsKey(String site){ return mStateMap.containsKey(site); } /** * Inserts an entry in to the State Map, that maintains state of various * sites. * * @param site the site handle for a site. * @param state the thirdparty state for the site. */ private void put(String site,TransferState state){ mStateMap.put(site, state); } /** * Returns the TPT state for a particular site. * * @param site the site handle for the site. * @return state the third party state for the site if there is an entry, * else null. */ private TransferState get(String site){ Object state = mStateMap.get(site); return (state == null)?null:(TransferState)state; } /** * Returns a set of third party sites. An empty set is returned if value is * null. * * @param value the value in the properties file. * * @return Set containing the names of the pools. */ private Set getThirdPartySites(String value) { HashSet set = new HashSet(); String site; if (value == null) { return set; } for (StringTokenizer st = new StringTokenizer(value, ",");st.hasMoreTokens();){ site = (String) st.nextToken(); /* mLogger.log(site + " is a third party enabled site " + "for " + desc + " transfers", LogManager.DEBUG_MESSAGE_LEVEL); */ set.add(site); } return set; } /** * An inner class that holds the state for a particular site,as to whether to * execute transfers remotely or not. * */ private class TransferState{ /** * The constant to denote that a stage-in transfer is to be exectuted remotely */ public static final int STAGE_IN_REMOTE_TYPE= 0x1; //000001 /** * The constant to denote that an inter site transfer is to be exectuted remotely */ public static final int INTER_REMOTE_TYPE = 0x2; //000010 /** * The constant to denote that a stage-out transfer is to be exectuted remotely */ public static final int STAGE_OUT_REMOTE_TYPE = 0x4;//000100 /** * The constant to denote that all transfers are to be exectuted remotely */ public static final int ALL_REMOTE_TYPE = 0x7; //000111 /** * Stores the state as an integer. */ private int mState; /** * The default constructor. */ public TransferState(){ mState = 0x0; } /** * Returns the state. * * @return the state as an int */ public int getState(){ return mState; } /** * Sets a type of transfer to be third party. * * @param type the type of transfer to be set TPT */ public void set(int type){ //no type checking for time being mState = mState | type; } /** * Returns a boolean indicating whether the attribute passed is set * in the transfer state or not. * The attribute types are as constants in this class. * * @param type the attribute type. */ public boolean get(int type){ return ((mState & type) == type); } /** * Returns a textual description of the state as * (stageinRemote,interRemote,stageoutRemote). * * @return the textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(36); sb.append('('). append(this.get(TransferState.STAGE_IN_REMOTE_TYPE)).append(" ").append(','). append(this.get(TransferState.INTER_REMOTE_TYPE)).append(" ").append(','). append(this.get(TransferState.STAGE_OUT_REMOTE_TYPE)). append(')'); return sb.toString(); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/0000755000175000017500000000000011757531667026153 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/Abstract.java0000644000175000017500000011427611757531137030564 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import java.io.File; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.StringTokenizer; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.credential.impl.Irods; import edu.isi.pegasus.common.credential.impl.Proxy; import edu.isi.pegasus.common.credential.impl.S3CFG; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.common.Utility; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.namespace.ENV; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.transfer.Implementation; import edu.isi.pegasus.planner.transfer.Refiner; /** * An abstract implementation that implements some of the common functions in * the Implementation Interface that are required by all the implementations. * * @author Karan Vahi * @version $Revision: 4778 $ */ public abstract class Abstract implements Implementation{ /** * The logical name of the transformation that creates directories on the * remote execution pools. */ public static final String CHANGE_XBIT_TRANSFORMATION = "chmod"; /** * The basename of the pegasus dirmanager executable. */ public static final String XBIT_EXECUTABLE_BASENAME = "chmod"; /** * The transformation namespace for the setXBit jobs. */ public static final String XBIT_TRANSFORMATION_NS = "system"; /** * The version number for the derivations for setXBit jobs. */ public static final String XBIT_TRANSFORMATION_VERSION = null; /** * The derivation namespace for the setXBit jobs. */ public static final String XBIT_DERIVATION_NS = "system"; /** * The version number for the derivations for setXBit jobs. */ public static final String XBIT_DERIVATION_VERSION = null; /** * The prefix for the jobs which are added to set X bit for the staged * executables. */ public static final String SET_XBIT_PREFIX = "chmod_"; /** * The prefix for the NoOP jobs that are created. */ public static final String NOOP_PREFIX = "noop_"; /** * The path to the user proxy on the submit host (local pool), that is picked * up for use in transfer of proxies. */ protected String mLocalUserProxy; /** * The basename of the user proxy , that is picked up for use in transfer of * proxies. */ protected String mLocalUserProxyBasename; /** * The path to the s3cfg file on the submit host (local pool). */ protected String mLocalS3cfg; /** * The basename of the user s3cfg file */ protected String mLocalS3cfgBasename; /** * The path to the irodsEnv file on the submit host (local pool). */ protected String mLocalIrodsEnv; /** * The basename of the user irodsEnv file */ protected String mLocalIrodsEnvBasename; /** * The handle to the properties object holding the properties relevant to * Pegasus. */ protected PegasusProperties mProps; /** * Contains the various options to the Planner as passed by the user at * runtime. */ protected PlannerOptions mPOptions; /** * The handle to the Site Catalog. It is instantiated in this class. */ // protected PoolInfoProvider mSCHandle; /** * The handle to the Pool Info Provider. It is instantiated in this class */ //protected PoolInfoProvider mPoolHandle; protected SiteStore mSiteStore; /** * The handle to the Transformation Catalog. It must be instantiated in the * implementing class */ protected TransformationCatalog mTCHandle; /** * The handle to the refiner that loaded this implementation. */ protected Refiner mRefiner; /** * The logging object which is used to log all the messages. * */ protected LogManager mLogger; /** * The set of sites for which chmod job creation has to be disabled while * doing executable staging. */ protected Set mDisabledChmodSites; /** * A boolean indicating whether chmod is disabled for all sites. */ protected boolean mChmodDisabledForAllSites; /** * A boolean indicating whether chmod jobs should be created that set the * xbit in case of executable staging. */ protected boolean mAddNodesForSettingXBit; /** * The overloaded constructor, that is called by the Factory to load the * class. * * @param bag the bag of initialization objects. */ public Abstract( PegasusBag bag ){ mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mLogger = bag.getLogger(); mSiteStore = bag.getHandleToSiteStore(); mTCHandle = bag.getHandleToTransformationCatalog(); //build up the set of disabled chmod sites mDisabledChmodSites = determineDisabledChmodSites( mProps.getChmodDisabledSites() ); mChmodDisabledForAllSites = mDisabledChmodSites.contains( "*" ); //from pegasus release 3.2 onwards xbit jobs are not added //for worker node execution/Pegasus Lite mAddNodesForSettingXBit = !mProps.executeOnWorkerNode(); Proxy p = new Proxy(); p.initialize(bag); mLocalUserProxy = p.getPath(); //set the path to user proxy only if the proxy exists if( !new File( mLocalUserProxy).exists() ){ mLogger.log( "The user proxy does not exist - " + mLocalUserProxy, LogManager.DEBUG_MESSAGE_LEVEL ); mLocalUserProxy = null; } mLocalUserProxyBasename = (mLocalUserProxy == null) ? null : new File(mLocalUserProxy).getName(); S3CFG s3cfg = new S3CFG(); s3cfg.initialize(bag); mLocalS3cfg = s3cfg.getPath(); //set the path to s3cfg only if the scfg exists if( mLocalS3cfg != null && !new File(mLocalS3cfg).exists() ){ mLogger.log( "The s3cfg file does not exist - " + mLocalS3cfg, LogManager.DEBUG_MESSAGE_LEVEL ); mLocalS3cfg = null; } mLocalS3cfgBasename = (mLocalS3cfg == null) ? null : new File(mLocalS3cfg).getName(); // irods Irods irods = new Irods(); irods.initialize(bag); mLocalIrodsEnv = irods.getPath(); //set the path to irodsEnv file only if the file exists if( mLocalIrodsEnv != null && !new File(mLocalIrodsEnv).exists() ){ mLogger.log( "The irodsEnv file does not exist - " + mLocalIrodsEnv, LogManager.ERROR_MESSAGE_LEVEL ); mLocalIrodsEnv = null; } mLocalIrodsEnvBasename = (mLocalIrodsEnv == null) ? null : new File(mLocalIrodsEnv).getName(); } /** * Applies priorities to the transfer jobs if a priority is specified * in the properties file. * * @param job the transfer job . */ public void applyPriority(TransferJob job){ String priority = this.getPriority(job); if(priority != null){ job.condorVariables.construct(Condor.PRIORITY_KEY, priority); } } /** * Determines if there is a need to transfer proxy for the transfer * job or not. If there is a need to transfer proxy, then the job is * modified to create the correct condor commands to transfer the proxy. * Proxy is usually transferred if the Pegasus profile TRANSFER_PROXY is set, * or the job is being run in the condor vanilla universe. The proxy is * transferred from the submit host (i.e site local). The location is * determined from the value of the X509_USER_PROXY profile key associated * in the env namespace. * * @param job the transfer job . * * @return boolean true job was modified to transfer the proxy, else * false when job is not modified. * * @deprecated */ public boolean checkAndTransferProxy(TransferJob job){ boolean transfer = false; //not handling for third party transfers correctly. String style = job.vdsNS.containsKey(Pegasus.STYLE_KEY)? (String)job.vdsNS.get(Pegasus.STYLE_KEY): Pegasus.GLOBUS_STYLE; String universe = job.condorVariables.containsKey(Condor.UNIVERSE_KEY)? (String)job.condorVariables.get(Condor.UNIVERSE_KEY): //empty ""; boolean condition1 = job.vdsNS.getBooleanValue(Pegasus.TRANSFER_PROXY_KEY) ; boolean condition2 = ((style.equalsIgnoreCase(Pegasus.CONDOR_STYLE))|| (style.equalsIgnoreCase(Pegasus.GLIDEIN_STYLE))|| (job.executionPool.equalsIgnoreCase("local") && (universe.equalsIgnoreCase(Condor.VANILLA_UNIVERSE) || universe.equalsIgnoreCase(Condor.STANDARD_UNIVERSE)) ) ); //condition1 is explicit request for transfer of proxy //condition2 is determination of the glide in case if(condition1 || condition2){ if(mLocalUserProxyBasename != null){ //set the transfer of proxy from the submit host //to the remote execution pool, using internal //condor transfer mechanism //add condor key transfer_input_files //and other required condor keys /* job.condorVariables.checkKeyInNS(Condor.TRANSFER_IP_FILES_KEY, mLocalUserProxy); job.condorVariables.construct("should_transfer_files","YES"); job.condorVariables.construct("when_to_transfer_output","ON_EXIT"); */ job.condorVariables.addIPFileForTransfer(mLocalUserProxy); //set the environment variable to basefile name //only for transfer jobs that dont execute on the local site if( job.getSiteHandle().equalsIgnoreCase( "local" ) ){ //the full path job.envVariables.checkKeyInNS(ENV.X509_USER_PROXY_KEY, this.mLocalUserProxy ); } else{ //just the basename job.envVariables.checkKeyInNS(ENV.X509_USER_PROXY_KEY, mLocalUserProxyBasename); } if(!condition2){ //means the transfer job is not being run in //condor vanilla universe. This means, that in //all probability the proxy is being transferred //by gass_cache, and that does not preserve file //permissions correctly if( job.getSiteHandle().equalsIgnoreCase( "local" ) ){ //the full path job.envVariables.checkKeyInNS( ENV.GRIDSTART_PREJOB, "/bin/chmod 600 " + mLocalUserProxy ); } else{ job.envVariables.checkKeyInNS(ENV.GRIDSTART_PREJOB, "/bin/chmod 600 " + mLocalUserProxyBasename); } } if(!condition1){ //for glide in jobs also tag we are //transferring proxy job.vdsNS.checkKeyInNS(Pegasus.TRANSFER_PROXY_KEY,"true"); } //we want the transfer job to be run in the //directory that Condor or GRAM decided to run job.condorVariables.removeKey("remote_initialdir"); transfer = true; } } return transfer; } /** * Determines if there is a need to transfer the irodsEnvFile for the transfer * job or not. If there is a need to transfert the file, then the job is * modified to create the correct condor commands to transfer the file. * The file is transferred from the submit host (i.e site local). * * @param job the transfer job . * * @return boolean true job was modified to transfer the irodsEnvFile, else * false when job is not modified. * @deprecated */ public boolean checkAndTransferIrodsEnvFile(TransferJob job){ // for remote execution, transfer the irodsEnvFile file if ( ! job.getSiteHandle().equalsIgnoreCase( "local" ) && mLocalIrodsEnv != null && ! job.envVariables.containsKey(Irods.IRODSENVFILE) ) { job.condorVariables.addIPFileForTransfer(mLocalIrodsEnv); //just the basename job.envVariables.checkKeyInNS(Irods.IRODSENVFILE, mLocalIrodsEnvBasename); } return true; } /** * Sets the callback to the refiner, that has loaded this implementation. * * @param refiner the transfer refiner that loaded the implementation. */ public void setRefiner(Refiner refiner){ mRefiner = refiner; } /** * Adds the dirmanager to the workflow, that do a chmod on the files * being staged. * * @param computeJob the computeJob for which the files are being staged. * @param txJob the transfer job that is staging the files. * @param execFiles the executable files that are being staged. * * @return boolean indicating whether any XBitJobs were succesfully added or * not. */ protected boolean addSetXBitJobs(Job computeJob, Job txJob, Collection execFiles){ return this.addSetXBitJobs( computeJob, txJob.getName(), execFiles, txJob.getJobType() ); } /** * Adds the dirmanager job to the workflow, that do a chmod on the files * being staged. * * @param computeJob the computeJob for which the files are * being staged. * @param txJobName the name of the transfer job that is staging the files. * @param execFiles the executable files that are being staged. * @param transferClass the class of transfer job * * @return boolean indicating whether any XBitJobs were succesfully added or * not. */ public boolean addSetXBitJobs( Job computeJob, String txJobName, Collection execFiles, int transferClass ){ boolean added = false; String computeJobName = computeJob.getName(); String site = computeJob.getSiteHandle(); //sanity check if(execFiles == null || execFiles.isEmpty()){ return added; } if(transferClass != Job.STAGE_IN_JOB){ //extra check. throw an exception throw new RuntimeException("Invalid Transfer Type (" + txJobName + "," + transferClass + ") for staging executable files "); } //figure out whether we need to create a chmod or noop boolean noop = this.disableChmodJobCreation( site ); //add setXBit jobs into the workflow int counter = 0; for( Iterator it = execFiles.iterator(); it.hasNext(); counter++ ){ FileTransfer execFile = (FileTransfer)it.next(); String xBitJobName = this.getSetXBitJobName( computeJobName, counter );//create a chmod job Job xBitJob = noop ? this.createNoOPJob( xBitJobName ) : //create a NOOP job this.createSetXBitJob( execFile, xBitJobName ); //create a chmod job if( xBitJob == null ){ //error occured while creating the job throw new RuntimeException("Unable to create setXBitJob " + "corresponding to compute job " + computeJobName + " and transfer" + " job " + txJobName); } else{ added = true; mRefiner.addJob( xBitJob ); //add the relation txJob->XBitJob->ComputeJob mRefiner.addRelation( txJobName, xBitJob.getName(), xBitJob.getSiteHandle(), true); mRefiner.addRelation( xBitJob.getName(), computeJobName ); } } return added; } /** * Adds the dirmanager job to the workflow, that do a chmod on the files * being staged. * * @param computeJob the computeJob for which the files are * being staged. * @param execFiles the executable files that are being staged. * @param transferClass the class of transfer job * @param xbitIndex index to be used for creating the name of XBitJob. * * @return the job object for the xBitJob */ public Job createSetXBitJob( Job computeJob, Collection execFiles, int transferClass, int xbitIndex ){ String computeJobName = computeJob.getName(); String site = computeJob.getSiteHandle(); if(transferClass != Job.STAGE_IN_JOB){ //extra check. throw an exception throw new RuntimeException( "Invalid Transfer Type (" + transferClass + ") for staging executable files for job " + computeJob.getName() ); } //figure out whether we need to create a chmod or noop boolean noop = this.disableChmodJobCreation( site ); //add setXBit jobs into the workflow int counter = 0; String xBitJobName = this.getSetXBitJobName( computeJobName, xbitIndex );//create a chmod job Job xBitJob = noop ? this.createNoOPJob( xBitJobName ) : //create a NOOP job this.createSetXBitJob( execFiles, xBitJobName, computeJob.getSiteHandle() ); //create a chmod job if( xBitJob == null ){ //error occured while creating the job throw new RuntimeException("Unable to create setXBitJob " + "corresponding to compute job " + computeJobName ); } return xBitJob; } /** * Adds the dirmanager job to the workflow, that do a chmod on the files * being staged. * * @param computeJob the computeJob for which the files are * being staged. * @param txJobName the name of the transfer job that is staging the files. * @param execFiles the executable files that are being staged. * @param transferClass the class of transfer job * @param xbitIndex index to be used for creating the name of XBitJob. * * @return boolean indicating whether any XBitJobs were succesfully added or * not. */ public boolean addSetXBitJobs( Job computeJob, String txJobName, Collection execFiles, int transferClass, int xbitIndex ){ boolean added = false; String computeJobName = computeJob.getName(); String site = computeJob.getSiteHandle(); //sanity check if(execFiles == null || execFiles.isEmpty()){ return added; } if(transferClass != Job.STAGE_IN_JOB){ //extra check. throw an exception throw new RuntimeException("Invalid Transfer Type (" + txJobName + "," + transferClass + ") for staging executable files "); } //figure out whether we need to create a chmod or noop boolean noop = this.disableChmodJobCreation( site ); //add setXBit jobs into the workflow int counter = 0; for( Iterator it = execFiles.iterator(); it.hasNext(); counter++ ){ FileTransfer execFile = (FileTransfer)it.next(); String xBitJobName = this.getSetXBitJobName( computeJobName, xbitIndex );//create a chmod job Job xBitJob = noop ? this.createNoOPJob( xBitJobName ) : //create a NOOP job this.createSetXBitJob( execFile, xBitJobName ); //create a chmod job if( xBitJob == null ){ //error occured while creating the job throw new RuntimeException("Unable to create setXBitJob " + "corresponding to compute job " + computeJobName + " and transfer" + " job " + txJobName); } else{ added = true; mRefiner.addJob( xBitJob ); //add the relation txJob->XBitJob->ComputeJob mRefiner.addRelation( txJobName, xBitJob.getName(), xBitJob.getSiteHandle(), true); mRefiner.addRelation( xBitJob.getName(), computeJobName ); } } return added; } /** * Generates the name of the setXBitJob , that is unique for the given * workflow. * * @param name the name of the compute job * @param counter the index for the setXBit job. * * @return the name of the setXBitJob . */ public String getSetXBitJobName(String name, int counter){ StringBuffer sb = new StringBuffer(); sb.append(this.SET_XBIT_PREFIX).append(name). append("_").append(counter); return sb.toString(); } /** * Generates the name of the noop job , that is unique for the given * workflow. * * @param name the name of the compute job * @param counter the index for the noop job. * * @return the name of the setXBitJob . */ public String getNOOPJobName( String name, int counter ){ StringBuffer sb = new StringBuffer(); sb.append( this.NOOP_PREFIX ).append( name ). append( "_" ).append( counter ); return sb.toString(); } /** * It creates a NoOP job that runs on the submit host. * * @param name the name to be assigned to the noop job * * @return the noop job. */ public Job createNoOPJob( String name ) { Job newJob = new Job(); List entries = null; String execPath = null; //jobname has the dagname and index to indicate different //jobs for deferred planning newJob.setName( name ); newJob.setTransformation( "pegasus", "noop", "1.0" ); newJob.setDerivation( "pegasus", "noop", "1.0" ); // newJob.setUniverse( "vanilla" ); newJob.setUniverse( GridGateway.JOB_TYPE.auxillary.toString()); //the noop job does not get run by condor //even if it does, giving it the maximum //possible chance newJob.executable = "/bin/true"; //construct noop keys newJob.setSiteHandle( "local" ); newJob.setJobType( Job.CREATE_DIR_JOB ); construct(newJob,"noop_job","true"); construct(newJob,"noop_job_exit_code","0"); //we do not want the job to be launched //by kickstart, as the job is not run actually newJob.vdsNS.checkKeyInNS( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[GridStartFactory.NO_GRIDSTART_INDEX] ); return newJob; } /** * Creates a dirmanager job, that does a chmod on the file being staged. * The file being staged should be of type executable. Though no explicit * check is made for that. The staged file is the one whose X bit would be * set on execution of this job. The site at which job is executed, is * determined from the site associated with the destination URL. * * @param files the collection FileTransfer containing the file that has * to be X Bit Set. * @param name the name that has to be assigned to the job. * @param site the site at which the job has to be created * * @return the chmod job, else null if it is not able to be created * for some reason. */ protected Job createSetXBitJob( Collection files, String name, String site ){ Job xBitJob = new Job(); TransformationCatalogEntry entry = null; // GridGateway jobManager = null; String eSiteHandle = site; List entries; try { entries= mTCHandle.lookup( Abstract.XBIT_TRANSFORMATION_NS, Abstract.CHANGE_XBIT_TRANSFORMATION, Abstract.XBIT_TRANSFORMATION_VERSION, eSiteHandle, TCType.INSTALLED); } catch (Exception e) { //non sensical catching mLogger.log("Unable to retrieve entries from TC " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL ); return null; } entry = ( entries == null ) ? this.defaultXBitTCEntry( eSiteHandle ): //try using a default one (TransformationCatalogEntry) entries.get(0); if( entry == null ){ //NOW THROWN AN EXCEPTION //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append("Could not find entry in tc for lfn "). append( Separator.combine( Abstract.XBIT_TRANSFORMATION_NS, Abstract.CHANGE_XBIT_TRANSFORMATION, Abstract.XBIT_TRANSFORMATION_VERSION )). append(" at site ").append( eSiteHandle ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } SiteCatalogEntry eSite = mSiteStore.lookup( eSiteHandle ); /* JIRA PM-277 jobManager = eSite.selectGridGateway( GridGateway.JOB_TYPE.transfer ); */ StringBuffer arguments = new StringBuffer(); arguments.append( " +x " ); for( FileTransfer file : files ){ NameValue destURL = (NameValue)file.getDestURL(); arguments.append( " " ); arguments.append( Utility.getAbsolutePath(destURL.getValue()) ); } xBitJob.jobName = name; xBitJob.logicalName = Abstract.CHANGE_XBIT_TRANSFORMATION; xBitJob.namespace = Abstract.XBIT_TRANSFORMATION_NS; xBitJob.version = Abstract.XBIT_TRANSFORMATION_VERSION; xBitJob.dvName = Abstract.CHANGE_XBIT_TRANSFORMATION; xBitJob.dvNamespace = Abstract.XBIT_DERIVATION_NS; xBitJob.dvVersion = Abstract.XBIT_DERIVATION_VERSION; /* JIRA PM-277 xBitJob.setUniverse( GridGateway.JOB_TYPE.auxillary.toString()); xBitJob.globusScheduler = jobManager.getContact(); */ xBitJob.executable = entry.getPhysicalTransformation(); xBitJob.executionPool = eSiteHandle; xBitJob.strargs = arguments.toString(); xBitJob.jobClass = Job.CHMOD_JOB; xBitJob.jobID = name; //the profile information from the pool catalog needs to be //assimilated into the job. xBitJob.updateProfiles( eSite.getProfiles() ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. xBitJob.updateProfiles( entry ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. xBitJob.updateProfiles( mProps ); return xBitJob; } /** * Creates a dirmanager job, that does a chmod on the file being staged. * The file being staged should be of type executable. Though no explicit * check is made for that. The staged file is the one whose X bit would be * set on execution of this job. The site at which job is executed, is * determined from the site associated with the destination URL. * * @param file the FileTransfer containing the file that has * to be X Bit Set. * @param name the name that has to be assigned to the job. * * @return the chmod job, else null if it is not able to be created * for some reason. */ protected Job createSetXBitJob(FileTransfer file, String name){ Job xBitJob = new Job(); TransformationCatalogEntry entry = null; GridGateway jobManager = null; NameValue destURL = (NameValue)file.getDestURL(); String eSiteHandle = destURL.getKey(); List entries; try { entries= mTCHandle.lookup( Abstract.XBIT_TRANSFORMATION_NS, Abstract.CHANGE_XBIT_TRANSFORMATION, Abstract.XBIT_TRANSFORMATION_VERSION, eSiteHandle, TCType.INSTALLED); } catch (Exception e) { //non sensical catching mLogger.log("Unable to retrieve entries from TC " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL ); return null; } entry = ( entries == null ) ? this.defaultXBitTCEntry( eSiteHandle ): //try using a default one (TransformationCatalogEntry) entries.get(0); if( entry == null ){ //NOW THROWN AN EXCEPTION //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append("Could not find entry in tc for lfn "). append( Separator.combine( Abstract.XBIT_TRANSFORMATION_NS, Abstract.CHANGE_XBIT_TRANSFORMATION, Abstract.XBIT_TRANSFORMATION_VERSION )). append(" at site ").append( eSiteHandle ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } SiteCatalogEntry eSite = mSiteStore.lookup( eSiteHandle ); jobManager = eSite.selectGridGateway( GridGateway.JOB_TYPE.transfer ); String arguments = " -X -f " + Utility.getAbsolutePath(destURL.getValue()); xBitJob.jobName = name; xBitJob.logicalName = Abstract.CHANGE_XBIT_TRANSFORMATION; xBitJob.namespace = Abstract.XBIT_TRANSFORMATION_NS; xBitJob.version = Abstract.XBIT_TRANSFORMATION_VERSION; xBitJob.dvName = Abstract.CHANGE_XBIT_TRANSFORMATION; xBitJob.dvNamespace = Abstract.XBIT_DERIVATION_NS; xBitJob.dvVersion = Abstract.XBIT_DERIVATION_VERSION; // xBitJob.condorUniverse = "vanilla"; xBitJob.setUniverse( GridGateway.JOB_TYPE.auxillary.toString()); xBitJob.globusScheduler = jobManager.getContact(); xBitJob.executable = entry.getPhysicalTransformation(); xBitJob.executionPool = eSiteHandle; xBitJob.strargs = arguments; xBitJob.jobClass = Job.CREATE_DIR_JOB; xBitJob.jobID = name; //the profile information from the pool catalog needs to be //assimilated into the job. xBitJob.updateProfiles( eSite.getProfiles() ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. xBitJob.updateProfiles( entry ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. xBitJob.updateProfiles( mProps ); return xBitJob; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param site the site for which the default entry is required. * * * @return the default entry. */ private TransformationCatalogEntry defaultXBitTCEntry( String site ){ TransformationCatalogEntry defaultTCEntry = null; //construct the path to it StringBuffer path = new StringBuffer(); path.append( File.separator ).append( "bin" ).append( File.separator ). append( Abstract.XBIT_EXECUTABLE_BASENAME ); defaultTCEntry = new TransformationCatalogEntry( Abstract.XBIT_TRANSFORMATION_NS, Abstract.CHANGE_XBIT_TRANSFORMATION, Abstract.XBIT_TRANSFORMATION_VERSION ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.setSysInfo( this.mSiteStore.lookup( site ).getSysInfo() ); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.DEBUG_MESSAGE_LEVEL ); } return defaultTCEntry; } /** * Builds up a set of disabled chmod sites * * @param sites comma separated list of sites. * * @return a Set containing the site names. */ protected Set determineDisabledChmodSites( String sites ){ Set s = new HashSet(); //sanity checks if( sites == null || sites.length() == 0 ) { return s;} for( StringTokenizer st = new StringTokenizer( sites ); st.hasMoreTokens() ; ){ s.add( st.nextToken() ); } return s; } /** * Returns a boolean indicating whether to disable chmod job creation for * a site or not. * * @param site the name of the site * * @return boolean */ protected boolean disableChmodJobCreation( String site ){ return this.mChmodDisabledForAllSites || this.mDisabledChmodSites.contains( site ); } /** * Returns the priority for the transfer job as specified in the properties * file. * * @param job the Transfer job. * * @return the priority of the job as determined from properties, can be null * if invalid value passed or property not set. */ protected String getPriority(TransferJob job){ String priority; int type = job.jobClass; switch(type){ case Job.STAGE_IN_JOB: priority = mProps.getTransferStageInPriority(); break; case Job.STAGE_OUT_JOB: priority = mProps.getTransferStageOutPriority(); break; case Job.INTER_POOL_JOB: priority = mProps.getTransferInterPriority(); break; default: priority = null; } return priority; } /** * Constructs a condor variable in the condor profile namespace * associated with the job. Overrides any preexisting key values. * * @param job contains the job description. * @param key the key of the profile. * @param value the associated value. */ protected void construct(Job job, String key, String value){ job.condorVariables.checkKeyInNS(key,value); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/GUC.java0000644000175000017500000004146011757531137027431 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import java.util.List; import java.util.Collection; import java.util.Iterator; import java.io.File; import java.io.FileWriter; import java.util.ArrayList; /** * The implementation that is used to create transfer jobs that callout to * the new globus-url-copy client, that support multiple file transfers * *

* In order to use the transfer implementation implemented by this class, *

 *        - the property pegasus.transfer.*.impl must be set to value GUC.
 * 
* *

* There should be an entry in the transformation catalog with the fully qualified * name as globus::guc for all the sites where workflow is run, * or on the local site in case of third party transfers. * * Pegasus can automatically construct the path to the globus-url-copy client, if * the environment variable GLOBUS_LOCATION is specified in the site catalog for * the site. * *

* The arguments with which the client is invoked can be specified *

 *       - by specifying the property pegasus.transfer.arguments
 *       - associating the Pegasus profile key transfer.arguments
 * 
* * @author Karan Vahi * @version $Revision: 145 $ */ public class GUC extends AbstractMultipleFTPerXFERJob { /** * The transformation namespace for the transfer job. */ public static final String TRANSFORMATION_NAMESPACE = "globus"; /** * The name of the underlying transformation that is queried for in the * Transformation Catalog. */ public static final String TRANSFORMATION_NAME = "guc"; /** * The version number for the transfer job. */ public static final String TRANSFORMATION_VERSION = null; /** * The derivation namespace for for the transfer job. */ public static final String DERIVATION_NAMESPACE = "globus"; /** * The name of the underlying derivation. */ public static final String DERIVATION_NAME = "guc"; /** * The derivation version number for the transfer job. */ public static final String DERIVATION_VERSION = null; /** * A short description of the transfer implementation. */ public static final String DESCRIPTION = "GUC client that supports multiple file transfers. Available in globus 4.x series"; /** * The number of streams that each g-u-c process opens to do the ftp transfer. */ protected String mNumOfTXStreams; /** * Whether to use force option for the transfer executable or not. */ protected boolean mUseForce; /** * The overloaded constructor, that is called by the Factory to load the * class. * * @param bag the bag of Pegasus initialization objects. */ public GUC( PegasusBag bag ){ super( bag ); mNumOfTXStreams = mProps.getNumOfTransferStreams(); mUseForce = mProps.useForceInTransfer(); } /** * Return a boolean indicating whether the transfers to be done always in * a third party transfer mode. A value of false, results in the * direct or peer to peer transfers being done. *

* A value of false does not preclude third party transfers. They still can * be done, by setting the property "pegasus.transfer.*.thirdparty.sites". * * @return boolean indicating whether to always use third party transfers * or not. * */ public boolean useThirdPartyTransferAlways(){ return false; } /** * Returns a boolean indicating whether the transfer protocol being used by * the implementation preserves the X Bit or not while staging. * * @return boolean */ public boolean doesPreserveXBit(){ return false; } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String getDescription(){ return this.DESCRIPTION; } /** * Retrieves the transformation catalog entry for the executable that is * being used to transfer the files in the implementation. * * @param siteHandle the handle of the site where the transformation is * to be searched. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * stage-in worker transfer * * @return the transformation catalog entry if found, else null. */ public TransformationCatalogEntry getTransformationCatalogEntry( String siteHandle, int jobClass ){ List tcentries = null; try { //namespace and version are null for time being tcentries = mTCHandle.lookup(this.TRANSFORMATION_NAMESPACE, this.TRANSFORMATION_NAME, this.TRANSFORMATION_VERSION, siteHandle, TCType.INSTALLED); } catch (Exception e) { mLogger.log( "Unable to retrieve entry from TC for " + getCompleteTCName() + " Cause:" + e, LogManager.DEBUG_MESSAGE_LEVEL ); } return ( tcentries == null ) ? this.defaultTCEntry( this.TRANSFORMATION_NAMESPACE, this.TRANSFORMATION_NAME, this.TRANSFORMATION_VERSION, siteHandle ): //try using a default one (TransformationCatalogEntry) tcentries.get(0); } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param namespace the namespace of the transfer transformation * @param name the logical name of the transfer transformation * @param version the version of the transfer transformation * * @param site the site for which the default entry is required. * * * @return the default entry. */ protected TransformationCatalogEntry defaultTCEntry( String namespace, String name, String version, String site ){ TransformationCatalogEntry defaultTCEntry = null; mLogger.log( "Creating a default TC entry for " + Separator.combine( namespace, name, version ) + " at site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); //get the essential environment variables required to get //it to work correctly List envs = this.getEnvironmentVariables( site ); if( envs == null ){ //cannot create default TC mLogger.log( "Unable to create a default entry for as could not construct necessary environment " + Separator.combine( namespace, name, version ) , LogManager.DEBUG_MESSAGE_LEVEL ); //set the flag back to true return defaultTCEntry; } //get the GLOBUS_LOCATION PROFILE String globusLocation = null; for( Iterator it = envs.iterator(); it.hasNext(); ){ Profile p = ( Profile )it.next(); if( p.getProfileKey( ).equals( "GLOBUS_LOCATION" ) ){ globusLocation = p.getProfileValue(); break; } } //if home is still null if ( globusLocation == null ){ //cannot create default TC mLogger.log( "Unable to create a default entry for " + Separator.combine( namespace, name, version ) + " as GLOBUS_LOCATION is not set in Site Catalog" , LogManager.WARNING_MESSAGE_LEVEL ); //set the flag back to true return defaultTCEntry; } //remove trailing / if specified globusLocation = ( globusLocation.charAt( globusLocation.length() - 1 ) == File.separatorChar )? globusLocation.substring( 0, globusLocation.length() - 1 ): globusLocation; //construct the path to it StringBuffer path = new StringBuffer(); path.append( globusLocation ).append( File.separator ). append( "bin" ).append( File.separator ). append( "globus-url-copy" ); defaultTCEntry = new TransformationCatalogEntry( namespace, name, version ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.addProfiles( envs ); defaultTCEntry.setSysInfo( this.mSiteStore.lookup( site ).getSysInfo() ); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.DEBUG_MESSAGE_LEVEL ); } mLogger.log( "Created entry with path " + defaultTCEntry.getPhysicalTransformation(), LogManager.DEBUG_MESSAGE_LEVEL ); return defaultTCEntry; } /** * Returns the environment profiles that are required for the default * entry to sensibly work. * * @param site the site where the job is going to run. * * @return List of environment variables, else null in case where the * required environment variables could not be found. */ protected List getEnvironmentVariables( String site ){ List result = new ArrayList(2) ; //create the CLASSPATH from home String globus = mSiteStore.getEnvironmentVariable( site, "GLOBUS_LOCATION" ); if( globus == null ){ mLogger.log( "GLOBUS_LOCATION not set in site catalog for site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); return null; } //check for LD_LIBRARY_PATH String ldpath = mSiteStore.getEnvironmentVariable( site, "LD_LIBRARY_PATH" ); if ( ldpath == null ){ //construct a default LD_LIBRARY_PATH ldpath = globus; //remove trailing / if specified ldpath = ( ldpath.charAt( ldpath.length() - 1 ) == File.separatorChar )? ldpath.substring( 0, ldpath.length() - 1 ): ldpath; ldpath = ldpath + File.separator + "lib"; mLogger.log( "Constructed default LD_LIBRARY_PATH " + ldpath, LogManager.DEBUG_MESSAGE_LEVEL ); } //we have both the environment variables result.add( new Profile( Profile.ENV, "GLOBUS_LOCATION", globus) ); result.add( new Profile( Profile.ENV, "LD_LIBRARY_PATH", ldpath) ); return result; } /** * Returns the namespace of the derivation that this implementation * refers to. * * @return the namespace of the derivation. */ protected String getDerivationNamespace(){ return this.DERIVATION_NAMESPACE; } /** * Returns the logical name of the derivation that this implementation * refers to. * * @return the name of the derivation. */ protected String getDerivationName(){ return this.DERIVATION_NAME; } /** * Returns the version of the derivation that this implementation * refers to. * * @return the version of the derivation. */ protected String getDerivationVersion(){ return this.DERIVATION_VERSION; } /** * It constructs the arguments to the transfer executable that need to be passed * to the executable referred to in this transfer mode. * * @param job the object containing the transfer node. * @return the argument string */ protected String generateArgumentString( TransferJob job ) { StringBuffer sb = new StringBuffer(); if(job.vdsNS.containsKey( Pegasus.TRANSFER_ARGUMENTS_KEY ) ){ sb.append( job.vdsNS.removeKey(Pegasus.TRANSFER_ARGUMENTS_KEY) ); } else{ //just add the default -p option sb.append(" -p ").append( mNumOfTXStreams ); } //always append -cd option and verbose option sb.append( " -cd -vb" ); sb.append(" -f ").append( job.getStdIn() ); return sb.toString(); } /** * Makes sure the stdin is transferred by the Condor File Transfer * Mechanism. In addition, the stdin is set to null, after the file has * been marked for transfer by Condor File Transfer Mechanism. * * @param job the TransferJob that has been created. */ public void postProcess( TransferJob job ){ super.postProcess(job); File f = new File( mPOptions.getSubmitDirectory(), job.getStdIn() ); //add condor key transfer_input_files to transfer the file job.condorVariables.addIPFileForTransfer( f.getAbsolutePath() ); job.setStdIn( "" ); } /** * Writes to a FileWriter stream the stdin which goes into the magic script * via standard input * * @param job the transfer job. * @param writer the writer to the stdin file. * @param files Collection of FileTransfer objects containing * the information about sourceam fin and destURL's. * @param stagingSite the site where the data will be populated by first * level staging jobs. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * * @throws Exception */ protected void writeStdInAndAssociateCredentials(TransferJob job, FileWriter writer, Collection files, String stagingSite, int jobClass ) throws Exception { for(Iterator it = files.iterator();it.hasNext();){ FileTransfer ft = (FileTransfer) it.next(); NameValue source = ft.getSourceURL(); //we want to leverage multiple dests if possible NameValue dest = ft.getDestURL( true ); StringBuffer entry = new StringBuffer(); entry.append( "#" ).append( source.getKey() ).append( " " ).append( dest.getKey() ).append( "\n" ). append( source.getValue() ).append( " " ).append( dest.getValue() ).append( "\n" ); writer.write( entry.toString() ); writer.flush(); //associate any credential required , both with destination // and the source urls job.addCredentialType( source.getValue() ); job.addCredentialType( dest.getValue() ); } } /** * Returns the complete name for the transformation. * * @return the complete name. */ protected String getCompleteTCName(){ return Separator.combine(GUC.TRANSFORMATION_NAMESPACE, GUC.TRANSFORMATION_NAME, GUC.TRANSFORMATION_VERSION); } } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/ImplementationFactory.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/ImplementationFactory.jav0000644000175000017500000001654111757531137033171 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import edu.isi.pegasus.planner.transfer.Implementation; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The factory class that loads an appropriate Transfer Immplementation class, * as specified by the properties. * * @author Karan Vahi * @version $Revision: 4717 $ */ public class ImplementationFactory { /** * The default package where the implementations reside, which this factory * loads. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.transfer.implementation"; public static final String DEFAULT_TRANSFER_IMPLEMENTATION = "Transfer"; public static final String DEFAULT_SETUP_TRANSFER_IMPLEMENTATION = "Transfer"; /** * The constant designating the implementation be loaded for stage in jobs. */ public static final int TYPE_STAGE_IN = 0; /** * The constant designating the implementation be loaded for inter pool jobs. */ public static final int TYPE_STAGE_INTER = 1; /** * The constant designating the implementation be loaded for stage out jobs. */ public static final int TYPE_STAGE_OUT = 2; /** * The constant designating the implementation to be loaded for setup tx jobs. */ public static final int TYPE_SETUP = 3; /** * The constant designating the implementation to be loaded for symbolic tx jobs. */ public static final int TYPE_SYMLINK_STAGE_IN = 4; /** * Loads the implementing class corresponding to the type specified by the user. * The type is used to determine what property to be picked up from the * properties file. The properties object passed should not be null. * * @param bag the bag of initialization objects. * @param type the type. * * @return the instance of the class implementing this interface. * * @exception TransferImplementationFactoryException that nests any error that * might occur during the instantiation. * * @see #DEFAULT_PACKAGE_NAME */ public static Implementation loadInstance( PegasusBag bag, int type) throws TransferImplementationFactoryException{ String key = getPropertyKey(type); String implementingClass = bag.getPegasusProperties().getTransferImplementation( key ); if( implementingClass == null ){ //User did not define anything in the properties file. implementingClass = ( type == ImplementationFactory.TYPE_SETUP ) ? DEFAULT_SETUP_TRANSFER_IMPLEMENTATION: DEFAULT_TRANSFER_IMPLEMENTATION ; } return loadInstance( implementingClass, bag ); } /** * Loads the implementing class corresponding to the mode specified by the user * at runtime in the properties file. The properties object passed should not * be null. * * @param bag bag of initialization objects. * * @return the instance of the class implementing this interface. * * @exception TransferImplementationFactoryException that nests any error that * might occur during the instantiation. * * @see #DEFAULT_PACKAGE_NAME */ /*public static Implementation loadInstance( PegasusBag bag ) throws TransferImplementationFactoryException{ return loadInstance( bag.getPegasusProperties().getTransferImplementation(), bag ); }*/ /** * Loads the implementing class corresponding to the class. If the package * name is not specified with the class, then class is assumed to be * in the DEFAULT_PACKAGE. The properties object passed should not be null. * * @param className the name of the class that implements the mode.It can or * cannot be with the package name. Can be null to get * the factory to load the default implementation * * @param bag the bag of initialization objects. * * @return the instance of the class implementing this interface. * * @exception TransferImplementationFactoryException that nests any error that * might occur during the instantiation. * * @see #DEFAULT_PACKAGE_NAME */ private static Implementation loadInstance( String className, PegasusBag bag ) throws TransferImplementationFactoryException{ Implementation implementation = null; try{ //sanity check if ( bag.getPegasusProperties() == null) { throw new RuntimeException("Invalid properties passed"); } //prepend the package name className = (className.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className : //load directly className; //try loading the class dynamically DynamicLoader dl = new DynamicLoader(className); Object argList[] = new Object[1]; argList[0] =bag; implementation = (Implementation) dl.instantiate(argList); } catch(Exception e){ throw new TransferImplementationFactoryException( "Instantiating Transfer Impelmentation ", className, e ); } return implementation; } /** * Returns the name of the property that needs to be loaded for a particular * type. * * @param type the type of implementation to be loaded. * * @return the name of the property * @throws IllegalArgumentException */ private static String getPropertyKey(int type) throws IllegalArgumentException{ String property; if(type == TYPE_STAGE_IN){ property = "pegasus.transfer.stagein.impl"; } else if(type == TYPE_STAGE_INTER){ property = "pegasus.transfer.inter.impl"; } else if(type == TYPE_STAGE_OUT){ property = "pegasus.transfer.stageout.impl"; } else if( type == TYPE_SETUP ){ property = "pegasus.transfer.setup.impl"; } else if( type == TYPE_SYMLINK_STAGE_IN ){ property = "pegasus.transfer.symlink.impl"; } else{ throw new java.lang.IllegalArgumentException( "Invalid implementation type passed to factory " + type); } return property; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/TPTGUC.java0000644000175000017500000001014111757531137030011 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.namespace.Pegasus; import java.io.File; /** * The implementation that is used to create transfer jobs that callout to * the new globus-url-copy client, that support multiple file transfers * *

* In order to use the transfer implementation implemented by this class, *

 *        - the property pegasus.transfer.*.impl must be set to value TPTGUC.
 * 
* *

* There should be an entry in the transformation catalog with the fully qualified * name as globus::guc for all the sites where workflow is run, * or on the local site in case of third party transfers. * * Pegasus can automatically construct the path to the globus-url-copy client, if * the environment variable GLOBUS_LOCATION is specified in the site catalog for * the site. * *

* The arguments with which the client is invoked can be specified *

 *       - by specifying the property pegasus.transfer.arguments
 *       - associating the Pegasus profile key transfer.arguments
 * 
* * @author Karan Vahi * @version $Revision: 4720 $ */ public class TPTGUC extends GUC { /** * The overloaded constructor, that is called by the Factory to load the * class. * * @param bag the bag of Pegasus initialization objects. */ public TPTGUC( PegasusBag bag ){ super( bag ); } /** * Return a boolean indicating whether the transfers to be done always in * a third party transfer mode. A value of false, results in the * direct or peer to peer transfers being done. *

* A value of false does not preclude third party transfers. They still can * be done, by setting the property "pegasus.transfer.*.thirdparty.sites". * * @return true always * */ public boolean useThirdPartyTransferAlways(){ return true; } /** * It constructs the arguments to the transfer executable that need to be passed * to the executable referred to in this transfer mode. * * @param job the object containing the transfer node. * @return the argument string */ protected String generateArgumentString( TransferJob job ) { StringBuffer sb = new StringBuffer(); if(job.vdsNS.containsKey( Pegasus.TRANSFER_ARGUMENTS_KEY ) ){ sb.append( job.vdsNS.removeKey(Pegasus.TRANSFER_ARGUMENTS_KEY) ); } else{ //just add the default -p option sb.append(" -p ").append( mNumOfTXStreams ); } //always append -cd option and verbose option sb.append( " -cd -vb" ); //specify the name of the stdin file on command line //since this transfer mode only executes on submit node //we can give full path to the stdin File f = new File( mPOptions.getSubmitDirectory(), job.getStdIn() ); sb.append( " -f " ).append( f.getAbsolutePath() ); return sb.toString(); } /** * Makes sure the stdin is transferred by the Condor File Transfer * Mechanism. In addition, the stdin is set to null, after the file has * been marked for transfer by Condor File Transfer Mechanism. * * @param job the TransferJob that has been created. */ public void postProcess( TransferJob job ){ super.postProcess(job); job.setStdIn( "" ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/Transfer.java0000644000175000017500000003404411757531137030577 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.classes.Job; import java.io.FileWriter; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.io.File; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The implementation that creates transfer jobs referring to the python based * transfer script distributed with Pegasus since version 3.0 * *

* Transfer is distributed as part of the Pegasus worker package and can be found at * $PEGASUS_HOME/bin/pegasus-transfer. * *

* It leads to the creation of the setup chmod jobs to the workflow, that appear * as parents to compute jobs in case the transfer implementation does not * preserve the X bit on the file being transferred. This is required for * staging of executables as part of the workflow. The setup jobs are only added * as children to the stage in jobs. *

* In order to use the transfer implementation implemented by this class, the * property pegasus.transfer.*.impl must be set to * value Transfer. * * The arguments with which the pegasus-transfer client is invoked can be specified *

 *       - by specifying the property pegasus.transfer.arguments
 *       - associating the Pegasus profile key transfer.arguments
 * 
* * @author Karan Vahi * @version $Revision: 4778 $ */ public class Transfer extends AbstractMultipleFTPerXFERJob { /** * The transformation namespace for the transfer job. */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The name of the underlying transformation that is queried for in the * Transformation Catalog. */ public static final String TRANSFORMATION_NAME = "transfer"; /** * The version number for the transfer job. */ public static final String TRANSFORMATION_VERSION = null; /** * The derivation namespace for for the transfer job. */ public static final String DERIVATION_NAMESPACE = "pegasus"; /** * The name of the underlying derivation. */ public static final String DERIVATION_NAME = "transfer"; /** * The derivation version number for the transfer job. */ public static final String DERIVATION_VERSION = "1.0"; /** * A short description of the transfer implementation. */ public static final String DESCRIPTION = "Python based Transfer Script"; /** * The executable basename for the transfer executable. */ public static final String EXECUTABLE_BASENAME = "pegasus-transfer"; /** * The overloaded constructor, that is called by the Factory to load the * class. * * @param bag the bag of initialization objects. */ public Transfer( PegasusBag bag ){ super( bag ); } /** * Return a boolean indicating whether the transfers to be done always in * a third party transfer mode. A value of false, results in the * direct or peer to peer transfers being done. *

* A value of false does not preclude third party transfers. They still can * be done, by setting the property "pegasus.transfer.*.thirdparty.sites". * * @return boolean indicating whether to always use third party transfers * or not. * */ public boolean useThirdPartyTransferAlways(){ return false; } /** * Returns a boolean indicating whether the transfer protocol being used by * the implementation preserves the X Bit or not while staging. * * @return boolean */ public boolean doesPreserveXBit(){ return false; } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String getDescription(){ return Transfer.DESCRIPTION; } /** * Retrieves the transformation catalog entry for the executable that is * being used to transfer the files in the implementation. * * @param siteHandle the handle of the site where the transformation is * to be searched. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * stage-in worker transfer * * * @return the transformation catalog entry if found, else null. */ public TransformationCatalogEntry getTransformationCatalogEntry(String siteHandle, int jobClass ){ if( jobClass == Job.STAGE_IN_WORKER_PACKAGE_JOB && !siteHandle.equalsIgnoreCase( "local") ){ //PM-538 //construct an entry for the local site and transfer it. return this.defaultTCEntry( Transfer.TRANSFORMATION_NAMESPACE, Transfer.TRANSFORMATION_NAME, Transfer.TRANSFORMATION_VERSION, Transfer.EXECUTABLE_BASENAME, "local" ); } List tcentries = null; try { //namespace and version are null for time being tcentries = mTCHandle.lookup( Transfer.TRANSFORMATION_NAMESPACE, Transfer.TRANSFORMATION_NAME, Transfer.TRANSFORMATION_VERSION, siteHandle, TCType.INSTALLED); } catch (Exception e) { mLogger.log( "Unable to retrieve entry from TC for " + getCompleteTCName() + " Cause:" + e, LogManager.DEBUG_MESSAGE_LEVEL ); } TransformationCatalogEntry entry = ( tcentries == null ) ? //attempt to create a default entry on the basis of //PEGASUS_HOME defined in the site catalog this.defaultTCEntry( Transfer.TRANSFORMATION_NAMESPACE, Transfer.TRANSFORMATION_NAME, Transfer.TRANSFORMATION_VERSION, Transfer.EXECUTABLE_BASENAME, siteHandle ): //get what was returned in the transformation catalog (TransformationCatalogEntry) tcentries.get(0); return entry; } /** * An optional method that allows the derived classes to do their own * post processing on the the transfer job before it is returned to * the calling module. * * @param job the TransferJob that has been created. */ public void postProcess( TransferJob job ){ if( job.getJobType() == Job.STAGE_IN_WORKER_PACKAGE_JOB ){ //all stage worker jobs are classified as stage in jobs //for further use in the planner job.setJobType( Job.STAGE_IN_JOB ); if( !job.getSiteHandle().equalsIgnoreCase( "local" ) ){ //PM-538 //executable for remote stage worker jobs is transferred //from local site. job.condorVariables.setExecutableForTransfer(); } } } /** * Returns the environment profiles that are required for the default * entry to sensibly work. Tries to retrieve the following variables * *

     * PEGASUS_HOME
     * GLOBUS_LOCATION
     * LD_LIBRARY_PATH
     * 
* * * @param site the site where the job is going to run. * * @return List of environment variables, else empty list if none are found */ protected List getEnvironmentVariables( String site ){ List result = new ArrayList(2) ; String pegasusHome = mSiteStore.getEnvironmentVariable( site, "PEGASUS_HOME" ); if( pegasusHome != null ){ //we have both the environment variables result.add( new Profile( Profile.ENV, "PEGASUS_HOME", pegasusHome ) ); } String globus = mSiteStore.getEnvironmentVariable( site, "GLOBUS_LOCATION" ); if( globus != null && globus.length() > 1 ){ //check for LD_LIBRARY_PATH String ldpath = mSiteStore.getEnvironmentVariable( site, "LD_LIBRARY_PATH" ); if ( ldpath == null ){ //construct a default LD_LIBRARY_PATH ldpath = globus; //remove trailing / if specified ldpath = ( ldpath.charAt( ldpath.length() - 1 ) == File.separatorChar )? ldpath.substring( 0, ldpath.length() - 1 ): ldpath; ldpath = ldpath + File.separator + "lib"; mLogger.log( "Constructed default LD_LIBRARY_PATH " + ldpath, LogManager.DEBUG_MESSAGE_LEVEL ); } //we have both the environment variables result.add( new Profile( Profile.ENV, "GLOBUS_LOCATION", globus) ); result.add( new Profile( Profile.ENV, "LD_LIBRARY_PATH", ldpath) ); } return result; } /** * Returns the namespace of the derivation that this implementation * refers to. * * @return the namespace of the derivation. */ protected String getDerivationNamespace(){ return Transfer.DERIVATION_NAMESPACE; } /** * Returns the logical name of the derivation that this implementation * refers to. * * @return the name of the derivation. */ protected String getDerivationName(){ return Transfer.DERIVATION_NAME; } /** * Returns the version of the derivation that this implementation * refers to. * * @return the version of the derivation. */ protected String getDerivationVersion(){ return Transfer.DERIVATION_VERSION; } /** * It constructs the arguments to the transfer executable that need to be passed * to the executable referred to in this transfer mode. * * @param job the object containing the transfer node. * @return the argument string */ protected String generateArgumentString(TransferJob job) { StringBuffer sb = new StringBuffer(); if(job.vdsNS.containsKey(Pegasus.TRANSFER_ARGUMENTS_KEY)){ sb.append( job.vdsNS.removeKey(Pegasus.TRANSFER_ARGUMENTS_KEY) ); } return sb.toString(); } /** * Writes to a FileWriter stream the stdin which goes into the magic script * via standard input * * @param job the transfer job * @param writer the writer to the stdin file. * @param files Collection of FileTransfer objects containing * the information about sourceam fin and destURL's. * @param stagingSite the site where the data will be populated by first * level staging jobs. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * * @throws Exception */ protected void writeStdInAndAssociateCredentials(TransferJob job, FileWriter writer, Collection files, String stagingSite, int jobClass ) throws Exception { int num = 1; for( Iterator it = files.iterator(); it.hasNext(); ){ FileTransfer ft = (FileTransfer) it.next(); NameValue source = ft.getSourceURL(); //we want to leverage multiple dests if possible NameValue dest = ft.getDestURL( true ); //write to the file one URL pair at a time StringBuffer urlPair = new StringBuffer( ); urlPair.append( "# " ).append( "src " ).append( num ).append( " " ).append( source.getKey() ).append( "\n" ). append( source.getValue() ).append( "\n" ). append( "# " ).append( "dst " ).append( num ).append( " " ).append( dest.getKey() ).append( "\n" ). append( dest.getValue() ).append( "\n" ); writer.write( urlPair.toString() ); writer.flush(); num++; //associate any credential required , both with destination // and the source urls job.addCredentialType( source.getValue() ); job.addCredentialType( dest.getValue() ); } } /** * Returns the complete name for the transformation. * * @return the complete name. */ protected String getCompleteTCName(){ return Separator.combine( Transfer.TRANSFORMATION_NAMESPACE, Transfer.TRANSFORMATION_NAME, Transfer.TRANSFORMATION_VERSION); } } ././@LongLink0000000000000000000000000000015300000000000011564 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/AbstractSingleFTPerXFERJob.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/AbstractSingleFTPerXFERJo0000644000175000017500000002721511757531137032661 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.transfer.SingleFTPerXFERJob; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import java.util.Collection; import java.util.HashSet; import java.util.Iterator; /** * An abstract implementation for implementations that can handle only a single * file transfer in a single file transfer job. * * @author Karan Vahi * @version $Revision: 4778 $ */ public abstract class AbstractSingleFTPerXFERJob extends Abstract implements SingleFTPerXFERJob { /** * The overloaded constructor, that is called by the Factory to load the * class. * * @param bag the bag of Pegasus initialization objects */ public AbstractSingleFTPerXFERJob( PegasusBag bag ) { super( bag ); } /** * Constructs a general transfer job that handles single transfers per * transfer job. There are appropriate callouts to generate the implementation * specific details. It throws an error if asked to create a transfer job * for more than one transfer. * * @param job the Job object for the job, in relation to which * the transfer node is being added. Either the transfer * node can be transferring this jobs input files to * the execution pool, or transferring this job's output * files to the output pool. * @param site the site where the transfer job should run. * @param files collection of FileTransfer objects * representing the data files and staged executables to be * transferred. * @param execFiles subset collection of the files parameter, that identifies * the executable files that are being transferred. * @param txJobName the name of transfer node. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * * @return the created TransferJob. */ public TransferJob createTransferJob( Job job, String site, Collection files, Collection execFiles, String txJobName, int jobClass) { if(files.size() > 1){ //log an error //should throw an exception! StringBuffer error = new StringBuffer(); error.append( "Transfer Implementation ").append( this.getDescription()). append(" supports single transfer per transfer job "); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } Iterator it = files.iterator(); FileTransfer ft = (FileTransfer)it.next(); TransferJob txJob = this.createTransferJob(job, site, ft,execFiles,txJobName,jobClass); //to get the file stat information we need to put //the files as output files of the transfer job txJob.outputFiles = new HashSet( files ); return txJob; } /** * Constructs a general transfer job that handles single transfers per * transfer job. There are appropriate callouts to generate the implementation * specific details. * * @param job the Job object for the job, in relation to which * the transfer node is being added. Either the transfer * node can be transferring this jobs input files to * the execution pool, or transferring this job's output * files to the output pool. * @param site the site where the transfer job should run. * @param file collection of FileTransfer objects * representing the data files and staged executables to be * transferred. * @param execFiles subset collection of the files parameter, that identifies * the executable files that are being transferred. * @param txJobName the name of transfer node. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * * @return the created TransferJob. */ public TransferJob createTransferJob( Job job, String site, FileTransfer file, Collection execFiles, String txJobName, int jobClass) { TransferJob txJob = new TransferJob(); SiteCatalogEntry ePool; GridGateway jobmanager; //site where the transfer is scheduled //to be run. For thirdparty site it makes //sense to schedule on the local host unless //explicitly designated to run TPT on remote site /* String tPool = mRefiner.isSiteThirdParty(job.getSiteHandle(),jobClass) ? //check if third party have to be run on remote site mRefiner.runTPTOnRemoteSite(job.getSiteHandle(),jobClass) ? job.getSiteHandle() : "local" :job.getSiteHandle(); */ String tPool = site; //the non third party site for the transfer job is //always the job execution site for which the transfer //job is being created. txJob.setNonThirdPartySite( job.getStagingSiteHandle() ); //we first check if there entry for transfer universe, //if no then go for globus ePool = mSiteStore.lookup( tPool ); txJob.jobName = txJobName; txJob.executionPool = tPool; txJob.setUniverse( GridGateway.JOB_TYPE.transfer.toString() ); TransformationCatalogEntry tcEntry = this.getTransformationCatalogEntry(tPool, jobClass ); if(tcEntry == null){ //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append( "Could not find entry in tc for lfn " ).append( getCompleteTCName() ). append(" at site " ).append( txJob.getSiteHandle()); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } txJob.namespace = tcEntry.getLogicalNamespace(); txJob.logicalName = tcEntry.getLogicalName(); txJob.version = tcEntry.getLogicalVersion(); txJob.dvName = this.getDerivationName(); txJob.dvNamespace = this.getDerivationNamespace(); txJob.dvVersion = this.getDerivationVersion(); //this should in fact only be set // for non third party pools /* JIRA PM-277 jobmanager = ePool.selectGridGateway( GridGateway.JOB_TYPE.transfer ); txJob.globusScheduler = (jobmanager == null) ? null : jobmanager.getContact(); */ txJob.jobClass = jobClass; txJob.jobID = job.jobName; txJob.stdErr = ""; txJob.stdOut = ""; txJob.executable = tcEntry.getPhysicalTransformation(); //the i/p and o/p files remain empty //as we doing just copying urls txJob.inputFiles = new HashSet(); txJob.outputFiles = new HashSet(); //no stdin file is written out //the profile information from the pool catalog needs to be //assimilated into the job. txJob.updateProfiles( ePool.getProfiles() ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 txJob.addNotifications( tcEntry ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. txJob.updateProfiles(tcEntry); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. txJob.updateProfiles(mProps); //take care of transfer of proxies this.checkAndTransferProxy(txJob); //take care of transfer of irods files this.checkAndTransferIrodsEnvFile(txJob); //apply the priority to the transfer job this.applyPriority(txJob); //constructing the arguments to transfer script //they only have to be incorporated after the //profile incorporation txJob.strargs = this.generateArgumentStringAndAssociateCredentials(txJob,file); if(execFiles != null){ //we need to add setup jobs to change the XBit super.addSetXBitJobs(job,txJob,execFiles); } return txJob; } /** * Returns the namespace of the derivation that this implementation * refers to. * * @return the namespace of the derivation. */ protected abstract String getDerivationNamespace(); /** * Returns the logical name of the derivation that this implementation * refers to. * * @return the name of the derivation. */ protected abstract String getDerivationName(); /** * Returns the version of the derivation that this implementation * refers to. * * @return the version of the derivation. */ protected abstract String getDerivationVersion(); /** * It constructs the arguments to the transfer executable that need to be passed * to the executable referred to in this transfer mode. * * @param job the job containing the transfer node. * @param file the FileTransfer that needs to be done. * @return the argument string */ protected abstract String generateArgumentStringAndAssociateCredentials( TransferJob job, FileTransfer file); /** * Returns the complete name for the transformation that the implementation * is using. * * @return the complete name. */ protected abstract String getCompleteTCName(); } ././@LongLink0000000000000000000000000000015500000000000011566 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/AbstractMultipleFTPerXFERJob.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/AbstractMultipleFTPerXFER0000644000175000017500000004205311757531137032737 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.transfer.MultipleFTPerXFERJob; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.util.Separator; import java.io.File; import java.io.FileWriter; import java.util.Collection; import java.util.HashSet; import java.util.List; import edu.isi.pegasus.planner.classes.Profile; /** * An abstract implementation for implementations that can handle multiple * file transfers in a single file transfer job. * * @author Karan Vahi * @version $Revision: 4778 $ */ public abstract class AbstractMultipleFTPerXFERJob extends Abstract implements MultipleFTPerXFERJob { /** * The overloaded constructor, that is called by the Factory to load the * class. * * @param bag the bag of Pegasus initialization objects */ public AbstractMultipleFTPerXFERJob( PegasusBag bag ) { super( bag ); } /** * Constructs a general transfer job that handles multiple transfers per * transfer job. There are appropriate callouts to generate the implementation * specific details. * * @param job the Job object for the job, in relation to which * the transfer node is being added. Either the transfer * node can be transferring this jobs input files to * the execution pool, or transferring this job's output * files to the output pool. * @param site the site where the transfer job should run. * @param files collection of FileTransfer objects * representing the data files and staged executables to be * transferred. * @param execFiles subset collection of the files parameter, that identifies * the executable files that are being transferred. * @param txJobName the name of transfer node. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * stage-in worker transfer * * @return the created TransferJob. */ public TransferJob createTransferJob( Job job, String site, Collection files, Collection execFiles, String txJobName, int jobClass ) { TransferJob txJob = new TransferJob(); SiteCatalogEntry ePool; GridGateway jobmanager; //site where the transfer is scheduled //to be run. For thirdparty site it makes //sense to schedule on the local host unless //explicitly designated to run TPT on remote site /*String tPool = mRefiner.isSiteThirdParty(job.getSiteHandle(),jobClass) ? //check if third party have to be run on remote site mRefiner.runTPTOnRemoteSite(job.getSiteHandle(),jobClass) ? job.getSiteHandle() : "local" :job.getSiteHandle();*/ String tPool = site; //the non third party site for the transfer job is //always the job execution site for which the transfer //job is being created. txJob.setNonThirdPartySite( job.getStagingSiteHandle() ); //we first check if there entry for transfer universe, //if no then go for globus ePool = mSiteStore.lookup( tPool ); txJob.jobName = txJobName; txJob.executionPool = tPool; txJob.setUniverse( GridGateway.JOB_TYPE.transfer.toString() ); TransformationCatalogEntry tcEntry = this.getTransformationCatalogEntry( tPool, jobClass ); if(tcEntry == null){ //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append( "Could not find entry in tc for lfn " ).append( getCompleteTCName() ). append(" at site " ).append( txJob.getSiteHandle()); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } txJob.namespace = tcEntry.getLogicalNamespace(); txJob.logicalName = tcEntry.getLogicalName(); txJob.version = tcEntry.getLogicalVersion(); txJob.dvName = this.getDerivationName(); txJob.dvNamespace = this.getDerivationNamespace(); txJob.dvVersion = this.getDerivationVersion(); //this should in fact only be set // for non third party pools /* JIRA PM-277 jobmanager = ePool.selectGridGateway( GridGateway.JOB_TYPE.transfer ); txJob.globusScheduler = (jobmanager == null) ? null : jobmanager.getContact(); */ txJob.jobClass = jobClass; txJob.jobID = job.jobName; txJob.stdErr = ""; txJob.stdOut = ""; txJob.executable = tcEntry.getPhysicalTransformation(); //the i/p and o/p files remain empty //as we doing just copying urls txJob.inputFiles = new HashSet(); //to get the file stat information we need to put //the files as output files of the transfer job txJob.outputFiles = new HashSet( files ); try{ //credentials are handled generically now when the stdin is //written out txJob.stdIn = prepareSTDINAndAssociateCredentials( txJob, files, job.getSiteHandle(), jobClass ); } catch (Exception e) { mLogger.log("Unable to write the stdIn file for job " + txJob.getCompleteTCName() + " " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); mLogger.log( "Files that were being written out " + files, LogManager.ERROR_MESSAGE_LEVEL ); } //the profile information from the pool catalog needs to be //assimilated into the job. txJob.updateProfiles( ePool.getProfiles() ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 txJob.addNotifications( tcEntry ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. txJob.updateProfiles(tcEntry); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. txJob.updateProfiles(mProps); //apply the priority to the transfer job this.applyPriority(txJob); //constructing the arguments to transfer script //they only have to be incorporated after the //profile incorporation txJob.strargs = this.generateArgumentString(txJob); if(execFiles != null && this.mAddNodesForSettingXBit ){ //we need to add setup jobs to change the XBit super.addSetXBitJobs(job,txJob,execFiles); } //a callout that allows the derived transfer implementation //classes do their own specific stuff on the job this.postProcess( txJob ); return txJob; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param namespace the namespace of the transfer transformation * @param name the logical name of the transfer transformation * @param version the version of the transfer transformation * @param executableBasename the basename of the executable * @param site the site for which the default entry is required. * * * @return the default entry. */ protected TransformationCatalogEntry defaultTCEntry( String namespace, String name, String version, String executableBasename, String site ){ TransformationCatalogEntry defaultTCEntry = null; //check if PEGASUS_HOME is set String home = mSiteStore.getPegasusHome( site ); mLogger.log( "Creating a default TC entry for " + Separator.combine( namespace, name, version ) + " at site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); //if home is still null if ( home == null ){ //cannot create default TC mLogger.log( "Unable to create a default entry for " + Separator.combine( namespace, name, version ) + " as PEGASUS_HOME or VDS_HOME is not set in Site Catalog" , LogManager.DEBUG_MESSAGE_LEVEL ); //set the flag back to true return defaultTCEntry; } //get the essential environment variables required to get //it to work correctly List envs = this.getEnvironmentVariables( site ); if( envs == null ){ //cannot create default TC mLogger.log( "Unable to create a default entry for as could not construct necessary environment " + Separator.combine( namespace, name, version ) , LogManager.DEBUG_MESSAGE_LEVEL ); //set the flag back to true return defaultTCEntry; } //remove trailing / if specified home = ( home.charAt( home.length() - 1 ) == File.separatorChar )? home.substring( 0, home.length() - 1 ): home; //construct the path to it StringBuffer path = new StringBuffer(); path.append( home ).append( File.separator ). append( "bin" ).append( File.separator ). append( executableBasename ); defaultTCEntry = new TransformationCatalogEntry( namespace, name, version ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.addProfiles( envs ); defaultTCEntry.setSysInfo( this.mSiteStore.lookup( site ).getSysInfo() ); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.DEBUG_MESSAGE_LEVEL ); } mLogger.log( "Created entry with path " + defaultTCEntry.getPhysicalTransformation(), LogManager.DEBUG_MESSAGE_LEVEL ); return defaultTCEntry; } /** * Returns the environment profiles that are required for the default * entry to sensibly work. * * @param site the site where the job is going to run. * * @return List of environment variables, else null in case where the * required environment variables could not be found. */ protected abstract List getEnvironmentVariables( String site ); /** * An optional method that allows the derived classes to do their own * post processing on the the transfer job before it is returned to * the calling module. * * @param job the TransferJob that has been created. */ public void postProcess( TransferJob job ){ //JIRA PM-538 // change the type of stage worker job back to stage in job if( job.getJobType() == Job.STAGE_IN_WORKER_PACKAGE_JOB ){ job.setJobType( Job.STAGE_IN_JOB ); } } /** * Prepares the stdin for the transfer job. Usually involves writing out a * text file that Condor transfers to the remote end. Additionally, it associates * credentials with the job that are requried to for the transfers. * * @param name the name of the transfer job. * @param files Collection of FileTransfer objects containing * the information about sourceam fin and destURL's. * @param stagingSite the site where the data will be populated by first * level staging jobs. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * * @return the path to the prepared stdin file. * * @throws Exception in case of error. */ protected String prepareSTDINAndAssociateCredentials(TransferJob job, Collection files, String stagingSite, int jobClass )throws Exception{ //writing the stdin file FileWriter stdIn; String basename = job.getName() + ".in"; stdIn = new FileWriter(new File(mPOptions.getSubmitDirectory(), basename)); writeStdInAndAssociateCredentials(job, stdIn, files, stagingSite, jobClass ); //close the stdin stream stdIn.close(); return basename; } /** * Returns the namespace of the derivation that this implementation * refers to. * * @return the namespace of the derivation. */ protected abstract String getDerivationNamespace(); /** * Returns the logical name of the derivation that this implementation * refers to. * * @return the name of the derivation. */ protected abstract String getDerivationName(); /** * Returns the version of the derivation that this implementation * refers to. * * @return the version of the derivation. */ protected abstract String getDerivationVersion(); /** * It constructs the arguments to the transfer executable that need to be passed * to the executable referred to in this transfer mode. * * @param job the object containing the transfer node. * @return the argument string */ protected abstract String generateArgumentString(TransferJob job); /** * Writes to a FileWriter stream the stdin which goes into the magic script * via standard input * * @param job the transfer job . * @param stdIn the writer to the stdin file. * @param files Collection of FileTransfer objects containing * the information about sourceam fin and destURL's. * @param stagingSite the site where the data will be populated by first * level staging jobs. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * * @throws Exception */ protected abstract void writeStdInAndAssociateCredentials( TransferJob job, FileWriter stdIn, Collection files, String stagingSite, int jobClass ) throws Exception ; /** * Returns the complete name for the transformation that the implementation * is using.. * * @return the complete name. */ protected abstract String getCompleteTCName(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/Stork.java0000644000175000017500000003062111757531137030112 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.JobManager; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.common.util.Separator; import java.util.Collection; import java.util.Iterator; import java.util.HashSet; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The implementation that creates transfer jobs referring to the stork data * placement scheduler that can handle only one transfer per job. * *

* Stork is directly invoked by DAGMAN. The appropriate Stork modules need to * be installed on the submit host. * *

* It leads to the creation of the setup chmod jobs to the workflow, that appear * as parents to compute jobs in case the transfer implementation does not * preserve the X bit on the file being transferred. This is required for * staging of executables as part of the workflow. The setup jobs are only added * as children to the stage in jobs. * *

* In order to use the transfer implementation implemented by this class, the * property vds.transfer.*.impl must be set to * value Stork. * * @author Karan Vahi * @version $Revision: 4778 $ */ public class Stork extends AbstractSingleFTPerXFERJob { /** * The transformation namespace for the transfer job. */ public static final String TRANSFORMATION_NAMESPACE = null; /** * The name of the underlying transformation that is queried for in the * Transformation Catalog. */ public static final String TRANSFORMATION_NAME = "stork"; /** * The version number for the transfer job. */ public static final String TRANSFORMATION_VERSION = null; /** * The derivation namespace for for the transfer job. */ public static final String DERIVATION_NAMESPACE = "condor"; /** * The name of the underlying derivation. */ public static final String DERIVATION_NAME = "stork"; /** * The derivation version number for the transfer job. */ public static final String DERIVATION_VERSION = "1.0"; /** * A short description of the transfer implementation. */ public static final String DESCRIPTION = "Stork Data Placement Scheduler that does only one transfer per invocation"; /** * The overloaded constructor, that is called by the Factory to load the * class. * * @param bag bag of intialization objects. */ public Stork( PegasusBag bag ){ super( bag ); } /** * Return a boolean indicating whether the transfers to be done always in * a third party transfer mode. A value of false, results in the * direct or peer to peer transfers being done. *

* A value of false does not preclude third party transfers. They still can * be done, by setting the property "vds.transfer.*.thirdparty.sites". * * @return boolean indicating whether to always use third party transfers * or not. * */ public boolean useThirdPartyTransferAlways(){ return true; } /** * Returns a boolean indicating whether the transfer protocol being used by * the implementation preserves the X Bit or not while staging. * * @return boolean */ public boolean doesPreserveXBit(){ return false; } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String getDescription(){ return this.DESCRIPTION; } /** * Constructs a general transfer job that handles single transfers per * transfer job. There are appropriate callouts to generate the implementation * specific details. It throws an error if asked to create a transfer job * for more than one transfer. * * @param job the Job object for the job, in relation to which * the transfer node is being added. Either the transfer * node can be transferring this jobs input files to * the execution pool, or transferring this job's output * files to the output pool. * @param file collection of FileTransfer objects * representing the data files and staged executables to be * transferred. * @param execFiles subset collection of the files parameter, that identifies * the executable files that are being transferred. * @param txJobName the name of transfer node. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * * @return the created TransferJob. */ public TransferJob createTransferJob(Job job, FileTransfer file, Collection execFiles, String txJobName, int jobClass) { TransferJob txJob = new TransferJob(); SiteInfo ePool; JobManager jobmanager; //Stork does the transfer . Hence set the transfer pool to stork String tPool = "stork"; //the non third party site for the transfer job is //always the job execution site for which the transfer //job is being created. txJob.setNonThirdPartySite( job.getStagingSiteHandle() ); //we first check if there entry for transfer universe, //if no then go for globus // ePool = mSCHandle.getTXPoolEntry(tPool); txJob.jobName = txJobName; txJob.executionPool = tPool; // txJob.condorUniverse = "globus"; txJob.setUniverse( GridGateway.JOB_TYPE.transfer.toString() ); txJob.namespace = this.TRANSFORMATION_NAMESPACE; txJob.logicalName = this.TRANSFORMATION_NAME; txJob.version = null; txJob.dvName = this.getDerivationName(); txJob.dvNamespace = this.getDerivationNamespace(); txJob.dvVersion = this.getDerivationVersion(); //this should in fact only be set // for non third party pools // jobmanager = ePool.selectJobManager(this.TRANSFER_UNIVERSE,true); // txJob.globusScheduler = (jobmanager == null) ? // null : // jobmanager.getInfo(JobManager.URL); txJob.jobClass = jobClass; txJob.jobID = job.jobName; txJob.stdErr = ""; txJob.stdOut = ""; txJob.executable = null; //the i/p and o/p files remain empty //as we doing just copying urls txJob.inputFiles = new HashSet(); txJob.outputFiles = new HashSet(); //no stdin file is written out //the profile information from the pool catalog needs to be //assimilated into the job. // txJob.updateProfiles(mSCHandle.getPoolProfile(tPool)); txJob.updateProfiles( mSiteStore.lookup(tPool).getProfiles() ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. //txJob.updateProfiles(tcEntry); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. txJob.updateProfiles(mProps); //take care of transfer of proxies this.checkAndTransferProxy(txJob); //apply the priority to the transfer job this.applyPriority(txJob); //constructing the arguments to transfer script //they only have to be incorporated after the //profile incorporation txJob.strargs = this.generateArgumentStringAndAssociateCredentials(txJob,file); if(execFiles != null){ //we need to add setup jobs to change the XBit super.addSetXBitJobs(job,txJob,execFiles); } return txJob; } /** * Retrieves the transformation catalog entry for the executable that is * being used to transfer the files in the implementation. The entry * does not refer to any physical path. * * @param siteHandle the handle of the site where the transformation is * to be searched. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * stage-in worker transfer * * @return the transformation catalog entry if found, else null. */ public TransformationCatalogEntry getTransformationCatalogEntry(String siteHandle, int jobClass ){ return new TransformationCatalogEntry(this.TRANSFORMATION_NAMESPACE, this.TRANSFORMATION_NAME, this.TRANSFORMATION_VERSION); } /** * Returns the namespace of the derivation that this implementation * refers to. * * @return the namespace of the derivation. */ protected String getDerivationNamespace(){ return this.DERIVATION_NAMESPACE; } /** * Returns the logical name of the derivation that this implementation * refers to. * * @return the name of the derivation. */ protected String getDerivationName(){ return this.DERIVATION_NAME; } /** * Returns the version of the derivation that this implementation * refers to. * * @return the version of the derivation. */ protected String getDerivationVersion(){ return this.DERIVATION_VERSION; } /** * It constructs the arguments to the transfer executable that need to be passed * to the executable referred to in this transfer mode. * * @param job the transfer job that is being created. * @param file the FileTransfer that needs to be done. * @return the argument string */ protected String generateArgumentStringAndAssociateCredentials(TransferJob job,FileTransfer file){ StringBuffer sb = new StringBuffer(); if(job.vdsNS.containsKey(Pegasus.TRANSFER_ARGUMENTS_KEY)){ sb.append( job.vdsNS.removeKey(Pegasus.TRANSFER_ARGUMENTS_KEY) ); } String source = ((NameValue)file.getSourceURL()).getValue(); String dest = ((NameValue)file.getDestURL()).getValue(); sb.append( source ).append("\n"). append( dest ); job.addCredentialType( source ); job.addCredentialType( dest ); return sb.toString(); } /** * Returns the complete name for the transformation. * * @return the complete name. */ protected String getCompleteTCName(){ return Separator.combine(this.TRANSFORMATION_NAMESPACE, this.TRANSFORMATION_NAME, this.TRANSFORMATION_VERSION); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/T2.java0000644000175000017500000003211111757531137027271 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.classes.NameValue; import java.io.FileWriter; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.io.File; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The implementation that creates transfer jobs referring to the T2 * executable distributed with the Pegasus. T2 extends upon the multiple transfers * provided by the transfer executable, providing for conditional/optional transfers, * and retry in case of url if alternative source or destinations are specified. *

* The T2 client is generally invoked on the remote execution sites, unless the * user uses the thirdparty transfer option, in which case the T2 is invoked on * the submit host. Hence there should be an entry in the transformation catalog * for logical transformation T2 at the execution sites. * T2 is distributed as part of the Pegasus worker package and can be found at * $PEGASUS_HOME/bin/T2. *

* It leads to the creation of the setup chmod jobs to the workflow, that appear * as parents to compute jobs in case the transfer implementation does not * preserve the X bit on the file being transferred. This is required for * staging of executables as part of the workflow. The setup jobs are only added * as children to the stage in jobs. * *

* In order to use the transfer implementation implemented by this class, *

 *        - the property pegasus.transfer.*.impl must be set to value T2.
 * 
* *

* There should be an entry in the transformation catalog with the fully qualified * name as pegasus::T2 for all the sites where workflow is run, * or on the local site in case of third party transfers. * *

* The arguments with which the client is invoked can be specified *

 *       - by specifying the property pegasus.transfer.arguments
 *       - associating the Pegasus profile key transfer.arguments
 * 
* * @author Karan Vahi * @version $Revision: 4778 $ */ public class T2 extends AbstractMultipleFTPerXFERJob { /** * The transformation namespace for the transfer job. */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The name of the underlying transformation that is queried for in the * Transformation Catalog. */ public static final String TRANSFORMATION_NAME = "T2"; /** * The version number for the transfer job. */ public static final String TRANSFORMATION_VERSION = null; /** * The derivation namespace for for the transfer job. */ public static final String DERIVATION_NAMESPACE = "pegasus"; /** * The name of the underlying derivation. */ public static final String DERIVATION_NAME = "T2"; /** * The derivation version number for the transfer job. */ public static final String DERIVATION_VERSION = "1.0"; /** * A short description of the transfer implementation. */ public static final String DESCRIPTION = "Pegasus T2"; /** * The number of g-u-c processes that are spawned to transfer the files in * one transfer job. */ protected String mNumOfTXProcesses; /** * The number of streams that each g-u-c process opens to do the ftp transfer. */ protected String mNumOfTXStreams; /** * Whether to use force option for the transfer executable or not. */ protected boolean mUseForce; /** * The overloaded constructor, that is called by the Factory to load the * class. * * @param bag the bag of initialization objects. */ public T2( PegasusBag bag ){ super( bag ); mNumOfTXProcesses = mProps.getNumOfTransferProcesses(); mNumOfTXStreams = mProps.getNumOfTransferStreams(); mUseForce = mProps.useForceInTransfer(); } /** * Return a boolean indicating whether the transfers to be done always in * a third party transfer mode. A value of false, results in the * direct or peer to peer transfers being done. *

* A value of false does not preclude third party transfers. They still can * be done, by setting the property "pegasus.transfer.*.thirdparty.sites". * * @return boolean indicating whether to always use third party transfers * or not. */ public boolean useThirdPartyTransferAlways(){ return false; } /** * Returns a boolean indicating whether the transfer protocol being used by * the implementation preserves the X Bit or not while staging. * * @return boolean */ public boolean doesPreserveXBit(){ return false; } /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String getDescription(){ return T2.DESCRIPTION; } /** * Retrieves the transformation catalog entry for the executable that is * being used to transfer the files in the implementation. * * @param siteHandle the handle of the site where the transformation is * to be searched. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * stage-in worker transfer * * @return the transformation catalog entry if found, else null. */ public TransformationCatalogEntry getTransformationCatalogEntry(String siteHandle, int jobClass ){ List tcentries = null; try { //namespace and version are null for time being tcentries = mTCHandle.lookup(T2.TRANSFORMATION_NAMESPACE, T2.TRANSFORMATION_NAME, T2.TRANSFORMATION_VERSION, siteHandle, TCType.INSTALLED); } catch (Exception e) { mLogger.log( "Unable to retrieve entry from TC for " + getCompleteTCName() + " Cause:" + e, LogManager.DEBUG_MESSAGE_LEVEL ); } return ( tcentries == null ) ? this.defaultTCEntry( T2.TRANSFORMATION_NAMESPACE, T2.TRANSFORMATION_NAME, T2.TRANSFORMATION_VERSION, T2.TRANSFORMATION_NAME, siteHandle ): //try using a default one (TransformationCatalogEntry) tcentries.get(0); } /** * Returns the namespace of the derivation that this implementation * refers to. * * @return the namespace of the derivation. */ protected String getDerivationNamespace(){ return T2.DERIVATION_NAMESPACE; } /** * Returns the logical name of the derivation that this implementation * refers to. * * @return the name of the derivation. */ protected String getDerivationName(){ return T2.DERIVATION_NAME; } /** * Returns the version of the derivation that this implementation * refers to. * * @return the version of the derivation. */ protected String getDerivationVersion(){ return T2.DERIVATION_VERSION; } /** * It constructs the arguments to the transfer executable that need to be passed * to the executable referred to in this transfer mode. * * @param job the object containing the transfer node. * @return the argument string */ protected String generateArgumentString(TransferJob job) { StringBuffer sb = new StringBuffer(); if(job.vdsNS.containsKey(Pegasus.TRANSFER_ARGUMENTS_KEY)){ sb.append( job.vdsNS.removeKey(Pegasus.TRANSFER_ARGUMENTS_KEY) ); } else{ sb.append(" -P ").append(mNumOfTXProcesses). append(" -p ").append(mNumOfTXStreams); sb = (this.mUseForce)? sb.append(" -f ") : sb; } sb.append(" base-uri se-mount-point"); return sb.toString(); } /** * Writes to a FileWriter stream the stdin which T2 takes via standard input. * * @param job the transfer job. * @param writer the writer to the stdin file. * @param files Collection of FileTransfer objects containing * the information about sourceam fin and destURL's. * @param stagingSite the site where the data will be populated by first * level staging jobs. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * * @see org.griphyn.cPlanner.classes.FileTransfer#toString() * @throws java.lang.Exception */ protected void writeStdInAndAssociateCredentials( TransferJob job, FileWriter writer, Collection files, String stagingSite, int jobClass ) throws Exception { for(Iterator it = files.iterator();it.hasNext();){ FileTransfer ft = (FileTransfer)it.next(); //the FileTransfer object writes out in T2 compatible format writer.write(ft.toString()); writer.write("\n"); writer.flush(); NameValue source = ft.getSourceURL(); //we want to leverage multiple dests if possible NameValue dest = ft.getDestURL( true ); //associate any credential required , both with destination // and the source urls job.addCredentialType( source.getValue() ); job.addCredentialType( dest.getValue() ); } } /** * Returns the complete name for the transformation. * * @return the complete name. */ protected String getCompleteTCName(){ return Separator.combine(T2.TRANSFORMATION_NAMESPACE, T2.TRANSFORMATION_NAME, T2.TRANSFORMATION_VERSION); } /** * Returns the environment profiles that are required for the default * entry to sensibly work. * * @param site the site where the job is going to run. * * @return List of environment variables, else null in case where the * required environment variables could not be found. */ protected List getEnvironmentVariables( String site ){ List result = new ArrayList(2) ; //create the CLASSPATH from home String globus = mSiteStore.getEnvironmentVariable( site, "GLOBUS_LOCATION" ); if( globus == null ){ mLogger.log( "GLOBUS_LOCATION not set in site catalog for site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); return null; } //check for LD_LIBRARY_PATH String ldpath = mSiteStore.getEnvironmentVariable( site, "LD_LIBRARY_PATH" ); if ( ldpath == null ){ //construct a default LD_LIBRARY_PATH ldpath = globus; //remove trailing / if specified ldpath = ( ldpath.charAt( ldpath.length() - 1 ) == File.separatorChar )? ldpath.substring( 0, ldpath.length() - 1 ): ldpath; ldpath = ldpath + File.separator + "lib"; mLogger.log( "Constructed default LD_LIBRARY_PATH " + ldpath, LogManager.DEBUG_MESSAGE_LEVEL ); } //we have both the environment variables result.add( new Profile( Profile.ENV, "GLOBUS_LOCATION", globus) ); result.add( new Profile( Profile.ENV, "LD_LIBRARY_PATH", ldpath) ); return result; } } ././@LongLink0000000000000000000000000000016700000000000011571 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/TransferImplementationFactoryException.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/implementation/TransferImplementationFac0000644000175000017500000000710411757531137033174 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.implementation; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Transfer Implementations. * * @author Karan Vahi * @version $Revision: 2258 $ */ public class TransferImplementationFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Transfer Implementation"; /** * Constructs a TransferImplementationFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public TransferImplementationFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a TransferImplementationFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public TransferImplementationFactoryException( String msg, String classname) { super( msg , classname ); } /** * Constructs a TransferImplementationFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public TransferImplementationFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a TransferImplementationFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public TransferImplementationFactoryException(String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/TPT.java0000644000175000017500000004152311757531137024435 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import java.util.Iterator; import java.util.TreeMap; import java.util.Map; import java.util.Set; import java.util.HashSet; import java.util.StringTokenizer; /** * A common class, that builds up the third party state for the sites from * the properties file. * * * @author Karan Vahi * @version $Revision: 2567 $ */ public class TPT { /** * The constant to apply to all sites. */ public static final String ALL_SITES = "*"; /** * The property name to get the sites for which all transfers need to * be TPT. */ public static final String ALL_TPT_PROPERTY = "pegasus.transfer.*.thirdparty.sites"; /** * The property name to get the sites for which stage-in transfers need to * be TPT. */ public static final String STAGE_IN_TPT_PROPERTY = "pegasus.transfer.stagein.thirdparty.sites"; /** * The property name to get the sites for which inter site transfers need * to be TPT. */ public static final String INTER_TPT_PROPERTY = "pegasus.transfer.inter.thirdparty.sites"; /** * The property name to get the sites for which stage-out transfers need to * be TPT. */ public static final String STAGE_OUT_TPT_PROPERTY = "pegasus.transfer.stageout.thirdparty.sites"; /** * The property name to get the sites for which all TPT transfers need to * be executed on the remote site. */ public static final String ALL_TPT_REMOTE_PROPERTY = "pegasus.transfer.*.thirdparty.remote"; /** * The property name to get the sites for which stage-in TPT transfers need to * be executed on the remote site. */ public static final String STAGE_IN_TPT_REMOTE_PROPERTY = "pegasus.transfer.stagein.thirdparty.remote"; /** * The property name to get the sites for which inter site TPT transfers need to * be executed on the remote site. */ public static final String INTER_TPT_REMOTE_PROPERTY = "pegasus.transfer.inter.thirdparty.remote"; /** * The property name to get the sites for which stage-out TPT transfers need to * be executed on the remote site. */ public static final String STAGE_OUT_TPT_REMOTE_PROPERTY = "pegasus.transfer.stageout.thirdparty.remote"; /** * An internal table that maps third party transfer type to the corresponding * property. */ private static Map mPropertyTable; /** * The handle to the properties object holding the properties relevant to * Pegasus. */ private PegasusProperties mProps; /** * The handle to the logging object. */ private LogManager mLogger; /** * The map indexed by site name, that contains the state for all the sites. */ private Map mStateMap; /** * Singleton access to the type table * Contains the mapping of a property to the third party transfer type * * @return map */ private static Map propertyTable(){ //singleton access if (mPropertyTable == null) { mPropertyTable = new TreeMap(); mPropertyTable.put(new Integer(TPTState.STAGE_IN_TPT_TYPE), STAGE_IN_TPT_PROPERTY); mPropertyTable.put(new Integer(TPTState.INTER_TPT_TYPE), INTER_TPT_PROPERTY); mPropertyTable.put(new Integer(TPTState.STAGE_OUT_TPT_TYPE), STAGE_OUT_TPT_PROPERTY); mPropertyTable.put(new Integer(TPTState.ALL_TPT_TYPE), ALL_TPT_PROPERTY); mPropertyTable.put(new Integer(TPTState.STAGE_IN_TPT_REMOTE), STAGE_IN_TPT_REMOTE_PROPERTY); mPropertyTable.put(new Integer(TPTState.INTER_TPT_REMOTE), INTER_TPT_REMOTE_PROPERTY); mPropertyTable.put(new Integer(TPTState.STAGE_OUT_TPT_REMOTE), STAGE_OUT_TPT_REMOTE_PROPERTY); mPropertyTable.put(new Integer(TPTState.ALL_TPT_REMOTE), ALL_TPT_REMOTE_PROPERTY); } return mPropertyTable; } /** * The default constructor. */ public TPT() { mProps = PegasusProperties.getInstance(); mLogger = LogManagerFactory.loadSingletonInstance( ); mStateMap = new TreeMap(); } /** * The overloaded constructor. * * @param properties handle to the properties required. */ public TPT(PegasusProperties properties) { mProps = properties; mLogger = LogManagerFactory.loadSingletonInstance( properties ); mStateMap = new TreeMap(); } /** * Builds up the third party state for all the sites. This reflects what is * set in the properties file. */ public void buildState(){ String site; Set sites; //build for stagein transfers buildState(TPTState.STAGE_IN_TPT_TYPE); buildState(TPTState.STAGE_IN_TPT_REMOTE); //build for inter site transfers buildState(TPTState.INTER_TPT_TYPE); buildState(TPTState.INTER_TPT_REMOTE); //build for stage out transfers buildState(TPTState.STAGE_OUT_TPT_TYPE); buildState(TPTState.STAGE_OUT_TPT_REMOTE); //build for all transfers buildState(TPTState.ALL_TPT_TYPE); buildState(TPTState.ALL_TPT_REMOTE); //put the all sites (site = *) entry TPTState allState; if(containsKey(ALL_SITES)){ allState = get(ALL_SITES); } else{ allState = new TPTState(); put(ALL_SITES,allState); } if(allState.getState() != 0x0){ //apply the state to all sites for(Iterator it = mStateMap.values().iterator();it.hasNext();){ TPTState state = (TPTState)it.next(); state.set(allState.getState()); } } } /** * Adds to the existing state table, state information for a particular * type of transfers. * * @param type the type of transfer. */ private void buildState(int type){ String property = (String)propertyTable().get(new Integer(type)); Set sites = getThirdPartySites( (type > TPTState.ALL_TPT_TYPE)? mProps.getThirdPartySitesRemote(property): mProps.getThirdPartySites(property) ); String site; for(Iterator it = sites.iterator();it.hasNext();){ site = (String)it.next(); TPTState state = containsKey(site)? get(site): new TPTState(); state.set(type); put(site, state); } } /** * Returns a boolean indicating whether to use third party transfers for * stage-in transfers or not. * * @return boolean */ public boolean stageInThirdParty(String site){ return containsKey(site)? get(site).get(TPTState.STAGE_IN_TPT_TYPE): //return the value for all sites get(ALL_SITES).get(TPTState.STAGE_IN_TPT_TYPE); } /** * Returns a boolean indicating whether to use third party transfers for * inter site transfers or not. * * @return boolean */ public boolean interThirdParty(String site){ return containsKey(site)? get(site).get(TPTState.INTER_TPT_TYPE): //return the value for all sites get(ALL_SITES).get(TPTState.INTER_TPT_TYPE); } /** * Returns a boolean indicating whether to use third party transfers for * stage-out transfers or not. * * @return boolean */ public boolean stageOutThirdParty(String site){ return containsKey(site)? get(site).get(TPTState.STAGE_OUT_TPT_TYPE): //return the value for all sites get(ALL_SITES).get(TPTState.STAGE_OUT_TPT_TYPE); } /** * Returns a boolean indicating whether to execute third party transfers for * stage-in on remote site or not. * * @return boolean */ public boolean stageInThirdPartyRemote(String site){ return containsKey(site)? get(site).get(TPTState.STAGE_IN_TPT_REMOTE): //return the value for all sites get(ALL_SITES).get(TPTState.STAGE_IN_TPT_REMOTE); } /** * Returns a boolean indicating whether to execute third party transfers for * inter site on remote site or not. * * @return boolean */ public boolean interThirdPartyRemote(String site){ return containsKey(site)? get(site).get(TPTState.INTER_TPT_REMOTE): //return the value for all sites get(ALL_SITES).get(TPTState.INTER_TPT_REMOTE); } /** * Returns a boolean indicating whether to execute third party transfers for * stage-out on remote site or not. * * @return boolean */ public boolean stageOutThirdPartyRemote(String site){ return containsKey(site)? get(site).get(TPTState.STAGE_OUT_TPT_REMOTE): //return the value for all sites get(ALL_SITES).get(TPTState.STAGE_OUT_TPT_REMOTE); } /** * Prints out the third party state for the various sites. */ public void print(){ StringBuffer sb = new StringBuffer(); TPTState allSitesState = null; Object key; sb.append("Site | SI_TPT_R, INTER_TPT_R, SO_TPT_R, SI_TPT, IN_TPT , SO_TPT"); for(Iterator it = mStateMap.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = entry.getKey(); if(key.equals(ALL_SITES)){ //store value for printing in the end allSitesState = (TPTState)entry.getValue(); } else{ sb.append('\n').append(key).append(" | ").append(entry.getValue()); } } if(allSitesState != null){ sb.append('\n').append(ALL_SITES).append(" ").append(" | ").append(allSitesState); } System.out.println(sb.toString()); } /** * Returns whether there is an entry for a particular site or not. * * @param site the site handle for a site. * * @return boolean */ private boolean containsKey(String site){ return mStateMap.containsKey(site); } /** * Inserts an entry in to the State Map, that maintains state of various * sites. * * @param site the site handle for a site. * @param state the thirdparty state for the site. */ private void put(String site,TPTState state){ mStateMap.put(site, state); } /** * Returns the TPT state for a particular site. * * @param site the site handle for the site. * @return state the third party state for the site if there is an entry, * else null. */ private TPTState get(String site){ Object state = mStateMap.get(site); return (state == null)?null:(TPTState)state; } /** * Returns a set of third party sites. An empty set is returned if value is * null. * * @param value the value in the properties file. * * @return Set containing the names of the pools. */ private Set getThirdPartySites(String value) { HashSet set = new HashSet(); String site; if (value == null) { return set; } for (StringTokenizer st = new StringTokenizer(value, ",");st.hasMoreTokens();){ site = (String) st.nextToken(); /* mLogger.log(site + " is a third party enabled site " + "for " + desc + " transfers", LogManager.DEBUG_MESSAGE_LEVEL); */ set.add(site); } return set; } /** * An inner class that holds the third party state for a particular site. * It designates whether a transfer needs to be third party or not, and in * addition whether it needs to be executed remotely or locally. */ private class TPTState{ /** * The constant to denote that a stage-in transfer is to be third party. */ public static final int STAGE_IN_TPT_TYPE= 0x1; //000001 /** * The constant to denote that an inter site transfer is to be third party. */ public static final int INTER_TPT_TYPE = 0x2; //000010 /** * The constant to denote that a stage-out transfer is to be third party. */ public static final int STAGE_OUT_TPT_TYPE = 0x4;//000100 /** * The constant to denote that all transfers are to be third party. */ public static final int ALL_TPT_TYPE = 0x7; //000111 /** * The constant to denote that a stage-in transfer is to be executed * on the remote site. */ public static final int STAGE_IN_TPT_REMOTE= 0x8; //001000 /** * The constant to denote that an inter site transfer is to be executed * on the remote site. */ public static final int INTER_TPT_REMOTE = 0x10; //010000 /** * The constant to denote that a stage-out transfer is to be executed * on the remote site. */ public static final int STAGE_OUT_TPT_REMOTE = 0x20;//100000 /** * The constant to denote that all transfers are to be executed * on the remote site. */ public static final int ALL_TPT_REMOTE = 0x38; //111000 /** * Stores the state as an integer. */ private int mState; /** * The default constructor. */ public TPTState(){ mState = 0x0; } /** * Returns the state. * * @return the state as an int */ public int getState(){ return mState; } /** * Sets a type of transfer to be third party. * * @param type the type of transfer to be set TPT */ public void set(int type){ //no type checking for time being mState = mState | type; } /** * Returns a boolean indicating whether the attribute passed is set * in the transfer state or not. * The attribute types are as constants in this class. * * @param type the attribute type. */ public boolean get(int type){ return ((mState & type) == type); } /** * Returns a textual description of the state as * (stageinRemote,interRemote,stageoutRemote,stageinTPT,interTPT,stageOutTPT). * * @return the textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(36); sb.append('('). append(this.get(this.STAGE_IN_TPT_REMOTE)).append(" ").append(','). append(this.get(this.INTER_TPT_REMOTE)).append(" ").append(','). append(this.get(this.STAGE_OUT_TPT_REMOTE)).append(" ").append(','). append(this.get(this.STAGE_IN_TPT_TYPE)).append(" ").append(','). append(this.get(this.INTER_TPT_TYPE)).append(" ").append(','). append(this.get(this.STAGE_OUT_TPT_TYPE)). append(')'); return sb.toString(); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/MultipleFTPerXFERJobRefiner.java0000644000175000017500000001172411757531137031115 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.transfer.implementation.ImplementationFactory; import edu.isi.pegasus.planner.transfer.implementation.TransferImplementationFactoryException; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The refiner interface, that determines the functions that need to be * implemented to add various types of transfer nodes to the workflow. * The multiple in the name indicates that the refiner works only with the * implementation that handles multiple file transfer per transfer job. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2572 $ */ public abstract class MultipleFTPerXFERJobRefiner extends AbstractRefiner { /** * The overloaded constructor. * * @param dag the workflow to which transfer nodes need to be added. * @param bag the bag of initialization objects. */ public MultipleFTPerXFERJobRefiner( ADag dag, PegasusBag bag ){ super( dag, bag ); } /** * Loads the appropriate implementations that is required by this refinement * strategy for different types of transfer jobs. It calls to the factory * method to load the appropriate Implementor. * * Loads the implementing class corresponding to the mode specified by the user * at runtime in the properties file. The properties object passed should not * be null. * * @param bag the bag of initialization objects. * * @exception TransferImplementationFactoryException that nests any error that * might occur during the instantiation. * @exception ClassCastException in case the incompatible implementation is * loaded */ public void loadImplementations( PegasusBag bag ) throws TransferImplementationFactoryException{ //load this.mTXStageInImplementation = ImplementationFactory.loadInstance( bag, ImplementationFactory.TYPE_STAGE_IN); this.mTXStageInImplementation.setRefiner(this); checkCompatibility(this.mTXStageInImplementation); this.mTXInterImplementation = ImplementationFactory.loadInstance( bag, ImplementationFactory.TYPE_STAGE_INTER); this.mTXInterImplementation.setRefiner(this); checkCompatibility(this.mTXInterImplementation); this.mTXStageOutImplementation = ImplementationFactory.loadInstance( bag, ImplementationFactory.TYPE_STAGE_OUT); this.mTXStageOutImplementation.setRefiner(this); checkCompatibility(this.mTXStageOutImplementation); this.mTXSymbolicLinkImplementation = ImplementationFactory.loadInstance( bag, ImplementationFactory.TYPE_SYMLINK_STAGE_IN ); this.mTXSymbolicLinkImplementation.setRefiner( this ); checkCompatibility( this.mTXSymbolicLinkImplementation ); //log config messages message super.logConfigMessages(); } /** * Checks whether the implementation loaded is compatible with the refiner. * If not throws a ClassCastException. * * @param implementation the implementation whose compatibility needs to * be checked. * * @exception ClassCastException in case the implementation is incompatible. */ private void checkCompatibility(Implementation implementation) throws ClassCastException{ //check if refiner loaded is of special type boolean condition1 = !this.getClass().getName(). equalsIgnoreCase("org.griphyn.cPlanner.transfer.refiner.Chain"); //check if implementation loaded is of right type if(condition1 && !(implementation instanceof MultipleFTPerXFERJob)){ throw new ClassCastException("Wrong type of transfer implementation loaded " + implementation.getDescription() + " for refiner " + this.getDescription()); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/Implementation.java0000644000175000017500000002474411757531137026761 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import java.util.Collection; import edu.isi.pegasus.planner.classes.FileTransfer; /** * The interface defines the functions that a particular Transfer Implementation * should implement. The functions deal with the creation of a TransferJob that * can transfer files using the transfer tool to which it refers to. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4720 $ */ public interface Implementation { /** * The version number associated with this API */ public static final String VERSION = "1.6"; /** * The universe that applies for the transfer jobs. Used for querying to the * Site Catalog. */ public static final String TRANSFER_UNIVERSE = "transfer"; /** * Sets the callback to the refiner, that has loaded this implementation. * * @param refiner the transfer refiner that loaded the implementation. */ public void setRefiner(Refiner refiner); /** * This constructs the Job object for the transfer node. The transfer is * supposed to occur at job execution site. It should lead to the creation * of the setup chmod jobs to the workflow, that appear as parents to compute * jobs in case the transfer implementation does not preserve the X bit * on the file being transferred. This is required for staging of executables * as part of the workflow. * * @param job the Job object for the job, in relation to which * the transfer node is being added. Either the transfer * node can be transferring this jobs input files to * the execution pool, or transferring this job's output * files to the output pool. * @param site the site where the transfer job should run. * @param files collection of FileTransfer objects * representing the data files and staged executables to be * transferred. * @param execFiles subset collection of the files parameter, that identifies * the executable files that are being transferred. * @param txJobName the name of transfer node. * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * * @return the created TransferJob. */ public TransferJob createTransferJob(Job job, String site, Collection files, Collection execFiles, String txJobName, int jobClass ) ; /** * Returns a boolean indicating whether the transfer protocol being used by * the implementation preserves the X Bit or not while staging. If it does * not, then it should extend the Abstract implementation of this interface, * that allows for adding of a setup job after the stagein job that changes * the X Bit. */ public boolean doesPreserveXBit(); /** * Adds the dirmanager job to the workflow, that do a chmod on the files * being staged. * * @param computeJob the computeJob for which the files are * being staged. * @param txJobName the name of the transfer job that is staging the files. * @param execFiles the executable files that are being staged. * @param transferClass the class of transfer job * * @return boolean indicating whether any XBitJobs were succesfully added or * not. */ /*public boolean addSetXBitJobs( Job computeJob, String txJobName, Collection execFiles, int transferClass ); */ /** * Adds the dirmanager job to the workflow, that do a chmod on the files * being staged. * * @param computeJob the computeJob for which the files are * being staged. * @param txJobName the name of the transfer job that is staging the files. * @param execFiles the executable files that are being staged. * @param transferClass the class of transfer job * @param xbitIndex index to be used for creating the name of XBitJob. * * @return boolean indicating whether any XBitJobs were succesfully added or * not. */ public boolean addSetXBitJobs( Job computeJob, String txJobName, Collection execFiles, int transferClass, int xbitIndex ); /** * Adds the dirmanager job to the workflow, that do a chmod on the files * being staged. * * @param computeJob the computeJob for which the files are * being staged. * @param execFiles the executable files that are being staged. * @param transferClass the class of transfer job * @param xbitIndex index to be used for creating the name of XBitJob. * * @return the job object for the xBitJob */ public Job createSetXBitJob( Job computeJob, Collection execFiles, int transferClass, int xbitIndex ); /** * Adds the dirmanager job to the workflow, that do a chmod on the executable * files that are being staged. It should be empty for the implementations * that preserve the X bit while staging files. * * @param computeJobName the name pf the computeJob for which the files are * being staged. * @param txJobName the name of the transfer job that is staging the files. * @param execFiles the executable files that are being staged. * @param transferClass the class of transfer job * * @return boolean indicating whether any XBitJobs were succesfully added or * not. */ // public boolean addSetXBitJobs(String computeJobName, // String txJobName, // Collection execFiles, // int transferClass); /** * Generates the name of the setXBitJob , that is unique for the given * workflow. If the implementation preserve the X bit, then it should * return null. * * @param name the name of the compute job for which the executable is * being staged. * @param counter the index for the setXBit job. * * @return the name of the setXBitJob, null in case the implementation * preserves the XBit. */ public String getSetXBitJobName(String name, int counter); /** * Retrieves the transformation catalog entry for the executable that is * being used to transfer the files in the implementation. * * @param siteHandle the handle of the site where the transformation is * to be searched. * * @param jobClass the job Class for the newly added job. Can be one of the * following: * stage-in * stage-out * inter-pool transfer * stage-in worker transfer * * @return the transformation catalog entry if found, else null. */ public TransformationCatalogEntry getTransformationCatalogEntry(String siteHandle, int jobClass ); /** * Return a boolean indicating whether the transfers to be done always in * a third party transfer mode. A value of false, results in the * direct or peer to peer transfers being done. *

* A value of false does not preclude third party transfers. They still can * be done, by setting the property "pegasus.transfer.*.thirdparty.sites". * * @return boolean indicating whether to always use third party transfers * or not. * * @see PegasusProperties#getThirdPartySites(String) */ public boolean useThirdPartyTransferAlways(); /** * Applies priorities to the transfer jobs if a priority is specified * in the properties file. * * @param job the transfer job . */ public void applyPriority(TransferJob job); /** * Determines if there is a need to transfer proxy for the transfer * job or not. If there is a need to transfer proxy, then the job is * modified to create the correct condor commands to transfer the proxy. * Proxy is usually transferred if the VDS profile TRANSFER_PROXY is set, * or the job is being run in the condor vanilla universe. The proxy is * transferred from the submit host (i.e site local). The location is * determined from the value of the X509_USER_PROXY profile key associated * in the env namespace. * * @param job the transfer job . * * @return boolean true job was modified to transfer the proxy, else * false when job is not modified. */ public boolean checkAndTransferProxy(TransferJob job); /** * Returns a textual description of the transfer implementation. * * @return a short textual description */ public String getDescription(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/AbstractRefiner.java0000644000175000017500000003225411757531137027045 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; import edu.isi.pegasus.planner.transfer.Refiner; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import java.util.Collection; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.provenance.pasoa.XMLProducer; import edu.isi.pegasus.planner.provenance.pasoa.producer.XMLProducerFactory; /** * An abstract implementation that implements some of the common functions * in the Refiner Interface and member variables that are required by all the * refiners. * * @author Karan Vahi * @version $Revision: 2590 $ */ public abstract class AbstractRefiner implements Refiner{ /** * The stage-in transfer implementation that the refiner requires. */ protected Implementation mTXStageInImplementation ; /** * The stage-in symbolic link transfer implementation that refiner requires. */ protected Implementation mTXSymbolicLinkImplementation; /** * The inter transfer implementation that the refiner requires. */ protected Implementation mTXInterImplementation ; /** * The stage-out transfer implementation that the refiner requires. */ protected Implementation mTXStageOutImplementation ; /** * The ADag object associated with the Dag. This is the object to * which the transfer nodes are added. This object is initialised in the * TransferEngine. */ protected ADag mDAG; /** * The handle to the properties object holding the properties relevant to * Pegasus. */ protected PegasusProperties mProps; /** * The options passed to the planner at runtime. */ protected PlannerOptions mPOptions; /** * The logging object which is used to log all the messages. * */ protected LogManager mLogger; /** * The handle to the Third Party State machinery. */ protected TPT mTPT; /** * The handle to the Remote Transfers State machinery. */ protected RemoteTransfer mRemoteTransfers; /** * The XML Producer object that records the actions. */ protected XMLProducer mXMLStore; /** * The overloaded constructor. * * @param dag the workflow to which transfer nodes need to be added. * @param bag the bag of initialization objects. */ public AbstractRefiner( ADag dag, PegasusBag bag ){ mLogger = bag.getLogger(); mDAG = dag; mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mTPT = new TPT( mProps ); mTPT.buildState(); mRemoteTransfers = new RemoteTransfer( mProps ); mRemoteTransfers.buildState(); mXMLStore = XMLProducerFactory.loadXMLProducer( mProps ); } /** * Returns a reference to the workflow that is being refined by the refiner. * * * @return ADAG object. */ public ADag getWorkflow(){ return this.mDAG; } /** * Default behaviour to preserve backward compatibility when the stage in * and symbolic link jobs were not separated. The symlink transfer files * are added back into the files collection and passed onto * legacy implementations. Refiners that want to distinguish between * symlink and stagein jobs should over ride this method. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param symlinkFiles Collection of FileTransfer objects containing * source and destination file url's for symbolic linking * on compute site. */ public void addStageInXFERNodes( Job job, Collection files, Collection symlinkFiles ){ files.addAll( symlinkFiles ); addStageInXFERNodes( job, files ); } /** * Default behaviour to preserve backward compatibility when the stage in * and symbolic link jobs were not separated. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. */ public void addStageInXFERNodes( Job job, Collection files ){ throw new UnsupportedOperationException( "Refiner does not implement the function addStageInXFERNodes( SubInfo, Collection)"); } /** * Returns a reference to the XMLProducer, that generates the XML fragment * capturing the actions of the refiner. This is used for provenace * purposes. * * @return XMLProducer */ public XMLProducer getXMLProducer(){ return this.mXMLStore; } /** * Boolean indicating whether the Transfer Refiner has a preference for * where a transfer job is run. By default, Refiners dont advertise any * preference as to where transfer jobs run. * * @return false */ public boolean refinerPreferenceForTransferJobLocation( ){ return false; } /** * Boolean indicating Refiner preference for transfer jobs to run locally. * This method should be called only if refinerPreferenceForTransferJobLocation * is true for a refiner. * * @param type the type of transfer job for which the URL is being constructed. * Should be one of the following: * stage-in * stage-out * inter-pool transfer * * @return boolean refiner preference for transfer job to run locally or not. */ public boolean refinerPreferenceForLocalTransferJobs( int type ){ throw new UnsupportedOperationException( "Refiner does not advertise preference for local transfer jobs "); } /** * Returns whether a Site prefers transfers to be run on it i.e remote transfers * enabled. * * @param site the name of the site. * @param type the type of transfer job for which the URL is being constructed. * Should be one of the following: * stage-in * stage-out * inter-pool transfer * * @return true if site is setup for remote transfers * * @see Job#STAGE_IN_JOB * @see Job#INTER_POOL_JOB * @see Job#STAGE_OUT_JOB */ public boolean runTransferRemotely( String site, int type ) { Implementation implementation; //the value from the properties file //later on maybe picked up as profiles boolean runTransferRemotely = false; if(type == Job.STAGE_IN_JOB ){ implementation = mTXStageInImplementation; runTransferRemotely = mRemoteTransfers.stageInOnRemoteSite( site ); } else if(type == Job.INTER_POOL_JOB){ implementation = mTXInterImplementation; runTransferRemotely = mRemoteTransfers.interOnRemoteSite( site ); } else if(type == Job.STAGE_OUT_JOB){ implementation = mTXStageOutImplementation; runTransferRemotely = mRemoteTransfers.stageOutOnRemoteSite( site ); }/* else if(type == Job.SYMLINK_STAGE_IN_JOB){ implementation = mTXSymbolicLinkImplementation; runTransferRemotely = true; }*/ else{ throw new java.lang.IllegalArgumentException( "Invalid implementation type passed " + type); } return runTransferRemotely; } /** * Returns whether a Site is third party enabled or not. * * @param site the name of the site. * @param type the type of transfer job for which the URL is being constructed. * Should be one of the following: * stage-in * stage-out * inter-pool transfer * * @return true pool is third party enabled * false pool is not third party enabled. * * @see Job#STAGE_IN_JOB * @see Job#INTER_POOL_JOB * @see Job#STAGE_OUT_JOB * * @throws IllegalArgumentException */ public boolean isSiteThirdParty(String site,int type){ Implementation implementation; //the value from the properties file //later on maybe picked up as profiles boolean useTPT = false; if(type == Job.STAGE_IN_JOB ){ implementation = mTXStageInImplementation; useTPT = mTPT.stageInThirdParty(site); } else if(type == Job.INTER_POOL_JOB){ implementation = mTXInterImplementation; useTPT = mTPT.interThirdParty(site); } else if(type == Job.STAGE_OUT_JOB){ implementation = mTXStageOutImplementation; useTPT = mTPT.stageOutThirdParty(site); }/* else if(type == Job.SYMLINK_STAGE_IN_JOB){ implementation = mTXSymbolicLinkImplementation; useTPT = false; }*/ else{ throw new java.lang.IllegalArgumentException( "Invalid implementation type passed " + type); } return implementation.useThirdPartyTransferAlways()|| useTPT; } /** * Returns whether the third party transfers for a particular site are to * be run on the remote site or the submit host. * * @param site the name of the site. * @param type the type of transfer job for which the URL is being constructed. * Should be one of the following: * stage-in * stage-out * inter-pool transfer * * @return true if the transfers are to be run on remote site, else false. * * @see Job#STAGE_IN_JOB * @see Job#INTER_POOL_JOB * @see Job#STAGE_OUT_JOB */ public boolean runTPTOnRemoteSite(String site,int type){ //Implementation implementation; //the value from the properties file //later on maybe picked up as profiles boolean remoteTPT = false; if(type == Job.STAGE_IN_JOB ){ //implementation = mTXStageInImplementation; remoteTPT = mTPT.stageInThirdPartyRemote(site); } else if(type == Job.INTER_POOL_JOB){ //implementation = mTXInterImplementation; remoteTPT = mTPT.interThirdPartyRemote(site); } else if(type == Job.STAGE_OUT_JOB){ //implementation = mTXStageOutImplementation; remoteTPT = mTPT.stageOutThirdPartyRemote(site); } else{ throw new java.lang.IllegalArgumentException( "Invalid implementation type passed " + type); } return remoteTPT; } /** * Logs configuration messages regarding the type of implementations loaded * for various type of transfer node creations. */ protected void logConfigMessages(){ //log a message mLogger.log("Transfer Implementation loaded for Stage-In [" + mTXStageInImplementation.getDescription() + "]", LogManager.CONFIG_MESSAGE_LEVEL); mLogger.log("Transfer Implementation loaded for symbolic linking Stage-In [" + mTXSymbolicLinkImplementation.getDescription() + "]", LogManager.CONFIG_MESSAGE_LEVEL); mLogger.log("Transfer Implementation loaded for Inter Site [" + mTXInterImplementation.getDescription() + "]", LogManager.CONFIG_MESSAGE_LEVEL); mLogger.log("Transfer Implementation loaded for Stage-Out [" + mTXStageOutImplementation.getDescription() + "]", LogManager.CONFIG_MESSAGE_LEVEL); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/Refiner.java0000644000175000017500000003016511757531137025360 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.refiner.ReplicaCatalogBridge; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.transfer.implementation.TransferImplementationFactoryException; import java.util.Collection; import java.io.IOException; import java.lang.reflect.InvocationTargetException; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The refiner interface, that determines the functions that need to be * implemented to add various types of transfer nodes to the workflow. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2590 $ */ public interface Refiner extends edu.isi.pegasus.planner.refiner.Refiner {//need to extend it for the PASOA integration /** * The prefix for all local transfer jobs. */ public static final String LOCAL_PREFIX = "local_"; /** * The prefix for all remote transfer jobs */ public static final String REMOTE_PREFIX = "remote_"; /** * The prefix for the jobs which are added to transfer the files to a job's * execution pool from the location returned from the replica mechanism. * the new job's name is FROM_RC_PREFIX + nameofjob + _+ counter. */ public static final String STAGE_IN_PREFIX = "stage_in_"; /** * The prefix for the jobs that symlink against existing input data on a * compute site. */ //public static final String SYMBOLIC_LINK_PREFIX = "symlink_"; /** * The prefix for the jobs which are added to transfer the files generated by * a job on an execution pool to the output pool. The new job's name is * TO_RC_PREFIX + nameofjob + _+ counter. * */ public static final String STAGE_OUT_PREFIX = "stage_out_"; /** * The prefix for the jobs which are added to transfer the files generated by * the parents of a job to the jobs execution pool. The new job's name is * INTER_POOL_PREFIX + nameofjob + _+ counter. */ public static final String INTER_POOL_PREFIX = "stage_inter_"; /** * The prefix for the jobs which register the newly materialized files in the * Replica Catalog. The job's name should be RC_REGISTER_PREFIX + nameofjob, * where nameofjob is the job that generates these materialized files. */ public static final String REGISTER_PREFIX = "register_"; /** * Loads the appropriate implementations that is required by this refinement * strategy for different types of transfer jobs. It calls to the factory * method to load the appropriate Implementor. * * Loads the implementing class corresponding to the mode specified by the user * at runtime in the properties file. The properties object passed should not * be null. * * @param bag the bag of initialization objects. * @throws org.griphyn.cPlanner.transfer.implementation.TransferImplementationFactoryException * */ public void loadImplementations( PegasusBag bag ) throws TransferImplementationFactoryException; /** * Adds the inter pool transfer nodes that are required for transferring * the output files of the parents to the jobs execution site. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param localTransfer boolean indicating that associated transfer job will run * on local site. * */ public void addInterSiteTXNodes( Job job, Collection files, boolean localTransfer ); /** * Adds the stageout transfer nodes, that stage data to an output site * specified by the user. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param rcb bridge to the Replica Catalog. Used for creating registration * nodes in the workflow. * @param localTransfer boolean indicating that associated transfer job will run * on local site. * */ public void addStageOutXFERNodes( Job job, Collection files, ReplicaCatalogBridge rcb, boolean localTransfer ) ; /** * Adds the stageout transfer nodes, that stage data to an output site * specified by the user. It also adds the registration nodes to register * the data in the replica catalog if required. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param rcb bridge to the Replica Catalog. Used for creating registration * nodes in the workflow. * @param localTransfer boolean indicating that associated transfer job will run * on local site. * @param deletedLeaf to specify whether the node is being added for * a deleted node by the reduction engine or not. * default: false */ public abstract void addStageOutXFERNodes(Job job, Collection files, ReplicaCatalogBridge rcb, boolean localTransfer, boolean deletedLeaf ); /** * Adds the stage in transfer nodes which transfer the input files for a job, * from the location returned from the replica catalog to the job's execution * pool. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param symLinkFiles Collection of FileTransfer objects containing * source and destination file url's for symbolic linking * on compute site. */ public void addStageInXFERNodes( Job job, Collection files, Collection symLinkFiles); /** * Signals that the traversal of the workflow is done. This would allow * the transfer mechanisms to clean up any state that they might be keeping * that needs to be explicitly freed. */ public void done(); /** * Boolean indicating whether the Transfer Refiner has a preference for * where a transfer job is run. * * @return boolean */ public boolean refinerPreferenceForTransferJobLocation( ); /** * Boolean indicating Refiner preference for transfer jobs to run locally. * This method should be called only if refinerPreferenceForTransferJobLocation * is true for a refiner. * * @param type the type of transfer job for which the URL is being constructed. * Should be one of the following: * stage-in * stage-out * inter-pool transfer * * @return boolean refiner preference for transfer job to run locally or not. */ public boolean refinerPreferenceForLocalTransferJobs( int type ); /** * Returns whether a Site prefers transfers to be run on it i.e remote transfers * enabled. * * @param site the name of the site. * @param type the type of transfer job for which the URL is being constructed. * Should be one of the following: * stage-in * stage-out * inter-pool transfer * * @return true if site is setup for remote transfers * * @see Job#STAGE_IN_JOB * @see Job#INTER_POOL_JOB * @see Job#STAGE_OUT_JOB */ public boolean runTransferRemotely( String site, int type ) ; /** * Returns whether a Site is third party enabled or not. * * @param site the name of the site. * @param type the type of transfer job for which the URL is being constructed. * Should be one of the following: * stage-in * stage-out * inter-pool transfer * * @return true pool is third party enabled * false pool is not third party enabled. * * @see Job#STAGE_IN_JOB * @see Job#INTER_POOL_JOB * @see Job#STAGE_OUT_JOB */ public boolean isSiteThirdParty(String site, int type) ; /** * Returns whether the third party transfers for a particular site are to * be run on the remote site or the submit host. * * @param site the name of the site. * @param type the type of transfer job for which the URL is being constructed. * Should be one of the following: * stage-in * stage-out * inter-pool transfer * * @return true if the transfers are to be run on remote site, else false. * * @see Job#STAGE_IN_JOB * @see Job#INTER_POOL_JOB * @see Job#STAGE_OUT_JOB */ public boolean runTPTOnRemoteSite(String site,int type); /** * Add a new job to the workflow being refined. * * @param job the job to be added. */ public void addJob(Job job); /** * Adds a new relation to the workflow being refiner. * * @param parent the jobname of the parent node of the edge. * @param child the jobname of the child node of the edge. */ public void addRelation(String parent, String child); /** * Adds a new relation to the workflow. In the case when the parent is a * transfer job that is added, the parentNew should be set only the first * time a relation is added. For subsequent compute jobs that maybe * dependant on this, it needs to be set to false. * * @param parent the jobname of the parent node of the edge. * @param child the jobname of the child node of the edge. * @param pool the execution pool where the transfer node is to be run. * @param parentNew the parent node being added, is the new transfer job * and is being called for the first time. */ public void addRelation(String parent, String child, String pool, boolean parentNew); /** * Returns a textual description of the transfer mode. * * @return a short textual description */ public String getDescription(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/refiner/0000755000175000017500000000000011757531667024560 5ustar ryngerynge././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/refiner/TransferRefinerFactoryException.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/refiner/TransferRefinerFactoryException.0000644000175000017500000000676011757531137033070 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.refiner; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Transfer Refiners. * * @author Karan Vahi * @version $Revision: 2258 $ */ public class TransferRefinerFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Transfer Refiner"; /** * Constructs a TransferRefinerFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public TransferRefinerFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a TransferRefinerFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public TransferRefinerFactoryException( String msg, String classname) { super( msg , classname ); } /** * Constructs a TransferRefinerFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public TransferRefinerFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a TransferRefinerFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public TransferRefinerFactoryException(String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/refiner/Cluster.java0000644000175000017500000007202211757531137027037 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.transfer.refiner; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import java.util.Collection; import java.util.List; import java.util.ArrayList; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.Set; import java.util.HashSet; import java.util.Map; import java.util.HashMap; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.transfer.Implementation; /** * A cluster refiner that builds upon the Bundle Refiner. It clusters the stage-in * jobs and stage-out jobs per level of the workflow. The difference from the * Bundle refiner beings * *

 *        - stagein is also clustered/bundled per level. In Bundle it was for the 
 *          whole workflow.
 *        - keys that control the clustering ( old name bundling are ) 
 *          cluster.stagein and cluster.stageout
 * 
* * In order to use the transfer refiner implemented by this class, *
 *        - the property pegasus.transfer.refiner  must be set to value Cluster
 * 
* * * @version $Revision: 4720 $ * @author Karan Vahi */ public class Cluster extends Bundle { /** * A short description of the transfer refinement. */ public static final String DESCRIPTION = "Cluster Transfers: Stagein and Stageout TX jobs are clustered per level"; /** * The default clustering factor that identifies the number of transfer jobs * that are being created per execution pool for stageing in data for * the workflow. */ public static final String DEFAULT_LOCAL_STAGE_IN_CLUSTER_FACTOR = "4"; /** * The default clustering factor that identifies the number of transfer jobs * that are being created per execution pool for stageing in data for * the workflow. */ public static final String DEFAULT_REMOTE_STAGE_IN_CLUSTER_FACTOR = "4"; /** * The default bundling factor that identifies the number of transfer jobs * that are being created per execution pool for stageing out data for * the workflow. */ public static final String DEFAULT_LOCAL_STAGE_OUT_CLUSTER_FACTOR = "4"; /** * The default bundling factor that identifies the number of transfer jobs * that are being created per execution pool for stageing out data for * the workflow. */ public static final String DEFAULT_REMOTE_STAGE_OUT_CLUSTER_FACTOR = "4"; /** * A map indexed by site name, that contains the pointer to the stage in * PoolTransfer objects for that site. This is per level of the workflow. */ protected Map mStageInLocalMapPerLevel; /** * A map indexed by site name, that contains the pointer to the symlink stage * in PoolTransfer objects for that site. This is per level of the workflow. */ protected Map mStageInRemoteMapPerLevel; /** * The current level of the jobs being traversed. */ private int mCurrentSILevel; /** * Maps the site name to the current synch job */ private Map< String, Job > mSyncJobMap; /** * The overloaded constructor. * * @param dag the workflow to which transfer nodes need to be added. * @param bag the bag of initialization objects * */ public Cluster( ADag dag, PegasusBag bag ){ super( dag, bag ); mCurrentSILevel = -1; mSyncJobMap = new HashMap< String, Job >(); } /** * Initializes the bundle value variables, that are responsible determining * the bundle values. */ protected void initializeBundleValues() { mStageinLocalBundleValue = new BundleValue(); mStageinLocalBundleValue.initialize( Pegasus.CLUSTER_LOCAL_STAGE_IN_KEY, Pegasus.CLUSTER_STAGE_IN_KEY, getDefaultBundleValueFromProperties( Pegasus.CLUSTER_LOCAL_STAGE_IN_KEY, Pegasus.CLUSTER_STAGE_IN_KEY, Cluster.DEFAULT_LOCAL_STAGE_IN_CLUSTER_FACTOR )); mStageInRemoteBundleValue = new BundleValue(); mStageInRemoteBundleValue.initialize( Pegasus.CLUSTER_REMOTE_STAGE_IN_KEY, Pegasus.CLUSTER_STAGE_IN_KEY, getDefaultBundleValueFromProperties( Pegasus.CLUSTER_LOCAL_STAGE_IN_KEY, Pegasus.CLUSTER_STAGE_IN_KEY, Cluster.DEFAULT_REMOTE_STAGE_IN_CLUSTER_FACTOR )); mStageOutLocalBundleValue = new BundleValue(); mStageOutLocalBundleValue.initialize( Pegasus.CLUSTER_LOCAL_STAGE_OUT_KEY, Pegasus.CLUSTER_STAGE_OUT_KEY, getDefaultBundleValueFromProperties( Pegasus.CLUSTER_LOCAL_STAGE_OUT_KEY, Pegasus.CLUSTER_STAGE_OUT_KEY, Cluster.DEFAULT_LOCAL_STAGE_OUT_CLUSTER_FACTOR )); mStageOutRemoteBundleValue = new BundleValue(); mStageOutRemoteBundleValue.initialize( Pegasus.BUNDLE_REMOTE_STAGE_OUT_KEY, Pegasus.BUNDLE_STAGE_OUT_KEY, getDefaultBundleValueFromProperties( Pegasus.BUNDLE_REMOTE_STAGE_OUT_KEY, Pegasus.BUNDLE_STAGE_OUT_KEY, Cluster.DEFAULT_REMOTE_STAGE_OUT_CLUSTER_FACTOR )); } /** * Adds the stage in transfer nodes which transfer the input files for a job, * from the location returned from the replica catalog to the job's execution * pool. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param symlinkFiles Collection of FileTransfer objects containing * source and destination file url's for symbolic linking * on compute site. */ public void addStageInXFERNodes( Job job, Collection files, Collection symlinkFiles ){ addStageInXFERNodes( job, true, files, Job.STAGE_IN_JOB, this.mStageInLocalMapPerLevel, this.mStageinLocalBundleValue, this.mTXStageInImplementation ); addStageInXFERNodes( job, false, symlinkFiles, Job.STAGE_IN_JOB, this.mStageInRemoteMapPerLevel, this.mStageInRemoteBundleValue, this.mTXStageInImplementation ); } /** * Adds the stage in transfer nodes which transfer the input files for a job, * from the location returned from the replica catalog to the job's execution * pool. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param jobType the type of transfer job being created * @param stageInMap Map indexed by site name that gives all the transfers for * that site. * @param cValue used to determine the bundling factor to employ for a job. * @param implementation the transfer implementation to use. */ public void addStageInXFERNodes( Job job, boolean localTransfer, Collection files, int jobType, Map stageInMap, BundleValue cValue, Implementation implementation ){ //sanity check if( files.isEmpty() ){ return; } String jobName = job.getName(); mLogMsg = "Adding stagin transfer nodes for job " + jobName; //separate the files for transfer //and for registration List txFiles = new ArrayList(); List stagedExecFiles = new ArrayList(); //to prevent duplicate dependencies Set tempSet = new HashSet(); //iterate through all the files for ( Iterator it = files.iterator(); it.hasNext(); ) { FileTransfer ft = ( FileTransfer ) it.next(); String lfn = ft.getLFN(); mLogger.log( "File being looked at is " + lfn, LogManager.DEBUG_MESSAGE_LEVEL ); //check for transfer flag to see if we need to transfer the file. if ( !ft.getTransientTransferFlag() ) { String key = this.constructFileKey( ft.getLFN(), job.getStagingSiteHandle() ); //check to see if the file is already being transferred by //some other stage in job to that site String existingSiTX = (String) mFileTable.get( key ); if ( existingSiTX == null) { //schedule the file for transfer txFiles.add( ft ); if( ft.isTransferringExecutableFile() ){ stagedExecFiles.add( ft ); } } else{ //there is an existing tx job that is transferring the file. //check if tempSet does not contain the parent //fix for sonal's bug if ( tempSet.contains( existingSiTX )) { StringBuffer msg = new StringBuffer(); msg.append( "IGNORING TO ADD rc pull relation from rc tx node: " ). append( existingSiTX ).append( " -> " ).append( jobName ). append( " for transferring file " ).append( lfn ).append( " to site " ). append( job.getStagingSiteHandle() ); mLogger.log( msg.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); } else { mLogger.log( " For transferring file " + lfn, LogManager.DEBUG_MESSAGE_LEVEL ); addRelation( existingSiTX, jobName, job.getStagingSiteHandle(), false ); tempSet.add( existingSiTX ); } } } } boolean makeTNode = !txFiles.isEmpty(); int level = job.getLevel(); String site = job.getStagingSiteHandle(); int clusterValue = cValue.determine( implementation, job ); /* int clusterValue = getSISiteBundleValue( site, job.vdsNS.getStringValue( Pegasus.CLUSTER_STAGE_IN_KEY ) ); */ mLogger.log( "The Cluster value for site " + site + " is " + clusterValue, LogManager.DEBUG_MESSAGE_LEVEL ); if ( level != mCurrentSILevel ){ mCurrentSILevel = level; //we are starting on a new level of the workflow. //reinitialize stuff this.resetStageInMaps(); //the stagein map needs to point to the correct reinitialized one if( localTransfer ){ stageInMap = this.mStageInLocalMapPerLevel; } else { stageInMap = this.mStageInRemoteMapPerLevel; }/* else{ //error throw new RuntimeException( "Invalide job type specified " + jobType ); }*/ } TransferContainer siTC = null; if ( makeTNode ) { //get the appropriate pool transfer object for the site PoolTransfer pt = this.getStageInPoolTransfer( stageInMap, site, clusterValue, localTransfer ); //we add all the file transfers to the pool transfer siTC = pt.addTransfer( txFiles, level, jobType ); siTC.setTransferType( jobType ); String siJob = siTC.getTXName(); //traverse through all files to be staged int staged = 0; String dataFileSiJob = null;//the si job that stages in data files for( Iterator it = txFiles.iterator(); it.hasNext(); ){ FileTransfer ft = ( FileTransfer)it.next(); String key = this.constructFileKey( ft.getLFN(), job.getStagingSiteHandle() ); if( ft.isTransferringExecutableFile() && this.mAddNodesForSettingXBit ){ //the staged execution file should be having the setup //job as parent if it does not preserve x bit if( implementation.doesPreserveXBit()){ mFileTable.put( key, siJob ); } else{ mFileTable.put( key, implementation.getSetXBitJobName( jobName,staged) ); } } else{ //make a new entry into the table mFileTable.put( key, siJob); dataFileSiJob = siJob; } } //add the newJobName to the tempSet so that even //if the job has duplicate input files only one instance //of transfer is scheduled. This came up during collapsing //June 15th, 2004 //tempSet.add( siJob ) if( dataFileSiJob != null ){ addRelation( dataFileSiJob, jobName ); } if( !stagedExecFiles.isEmpty() && mAddNodesForSettingXBit ){ //create en-mass the setXBit jobs //if there were any staged files /*implementation.addSetXBitJobs( job, siJob, stagedExecFiles, Job.STAGE_IN_JOB ); */ Job xBitJob = implementation.createSetXBitJob( job, stagedExecFiles, Job.STAGE_IN_JOB, staged ); this.addJob( xBitJob ); //add the relation txJob->XBitJob->ComputeJob this.addRelation( siJob, xBitJob.getName(), xBitJob.getSiteHandle(), true); this.addRelation( xBitJob.getName(), job.getName() ); } } } /** * Resets the stage in map and adds the stage in jobs for each site per level. * */ protected void resetStageInMaps( ){ //stores the mapping of site to synch job per call //to resetStagInMaps Map< String, Job > tempSynchJobMap = new HashMap(); //reset both the stagein and symlink stage in maps this.mStageInLocalMapPerLevel = resetStageInMap( this.mStageInLocalMapPerLevel, this.mTXStageInImplementation, tempSynchJobMap, Job.STAGE_IN_JOB, true , true ); //we dont want any synch jobs to be created while creating symlink jobs this.mStageInRemoteMapPerLevel = resetStageInMap( this.mStageInRemoteMapPerLevel, this.mTXStageInImplementation, tempSynchJobMap, Job.STAGE_IN_JOB, false, false ); //populate the contents of temp synch jobmap //into the main synch job map this.mSyncJobMap.putAll( tempSynchJobMap ); } /** * Resets the stage in map and adds the stage in jobs for each site per level. * * @param stageInMap the stagein map per level to be reset * @param implementation the transfer implementation corresponding to the map * being reset * @param transientSynchJobMap transient map to keep track of synch jobs per site. * @param jobType the type of Transfer Job created * @param createChildSyncJob boolean indicating whether to create a child synch job or not. * * @return the reset map */ protected Map resetStageInMap( Map stageInMap, Implementation implementation, Map transientSynchJobMap, int jobType, boolean createChildSyncJob, boolean localTransfer ){ if ( stageInMap != null ){ Job job = new Job(); //before flushing add the stage in nodes to the workflow for( Iterator it = stageInMap.values().iterator(); it.hasNext(); ){ PoolTransfer pt = ( PoolTransfer ) it.next(); String site = pt.getPoolName() ; job.setSiteHandle( site ); job.setStagingSiteHandle( site ); Job parentSyncJob = this.getSyncJob( site ); //add a child synch job for this level if required Job childSyncJob = null; if( createChildSyncJob ){ childSyncJob = createSyncJobBetweenLevels( getSyncJobBetweenLevelsName( site, mCurrentSILevel - 1 ) ); addJob( childSyncJob ); mLogger.log( "Added synch job " + childSyncJob.getName(), LogManager.DEBUG_MESSAGE_LEVEL ); } mLogger.log( "Adding jobs for staging in data to site " + pt.getPoolName(), LogManager.DEBUG_MESSAGE_LEVEL ); //traverse through all the TransferContainers for( Iterator tcIt = pt.getTransferContainerIterator(); tcIt.hasNext(); ){ TransferContainer tc = ( TransferContainer ) tcIt.next(); if(tc == null){ //break out break; } //add the stagein job if required Job siJob = null; if( !tc.getFileTransfers().isEmpty() ){ mLogger.log( "Adding stage-in job " + tc.getTXName(), LogManager.DEBUG_MESSAGE_LEVEL); String tSite = localTransfer ? "local" : job.getSiteHandle(); siJob = implementation.createTransferJob( job, tSite, tc.getFileTransfers(), null, tc.getTXName(), jobType ); //always set job type to stage in even for symlink after creation siJob.setJobType( Job.STAGE_IN_JOB ); addJob( siJob ); } //add the dependency to parent synch if( parentSyncJob != null ){ addRelation( parentSyncJob.getName(), siJob.getName() ); } //stagein job is parent to child synch i.e if it was created if( createChildSyncJob ){ addRelation( siJob.getName(), childSyncJob.getName() ); } }//end of traversal thru all transfer containers //update the synch job map if child synch job is created if( createChildSyncJob ){ //mSyncJobMap.put( site, childSyncJob ); //populate to the transient job map not the main one transientSynchJobMap.put( site, childSyncJob ); } }//end of traversal thru all pool transfers } stageInMap = new HashMap< String, PoolTransfer >(); return stageInMap; } /** * Returns the bundle value associated with a compute job as a String. * * @param job * * @return value as String or NULL */ protected String getComputeJobBundleValue( Job job ){ return job.vdsNS.getStringValue( Pegasus.CLUSTER_STAGE_OUT_KEY ); } /** * Returns the appropriate stagein pool transfer for a particular site. * * * @param stageInMap map that indexes site to PoolTransfer objects * @param site the site for which the PT is reqd. * @param num the number of stage in jobs required for that Pool. * @param localTransfer whether the transfer needs to run on local site or not. * * @return the PoolTransfer */ protected PoolTransfer getStageInPoolTransfer( Map stageInMap, String site, int num, boolean localTransfer ){ if ( stageInMap.containsKey( site ) ){ return ( PoolTransfer ) stageInMap.get( site ); } else{ PoolTransfer pt = new PoolTransfer( site, localTransfer, num ); stageInMap.put( site, pt ); return pt; } } /** * Signals that the traversal of the workflow is done. At this point the * transfer nodes are actually constructed traversing through the transfer * containers and the stdin of the transfer jobs written. */ public void done(){ //increment the level counter this.mCurrentSILevel++; //reset the stageout stagein map too this.resetStageInMaps(); this.resetStageOutMaps(); } /** * Returns a textual description of the transfer mode. * * @return a short textual description */ public String getDescription(){ return Cluster.DESCRIPTION; } /** * Determines the bundle factor for a particular site on the basis of the * stage in bundle value associcated with the underlying transfer * transformation in the transformation catalog. If the key is not found, * then the default value is returned. In case of the default value being * null the global default is returned. * * @param site the site at which the value is desired. * @param deflt the default value. * * @return the bundle factor. * * @see #DEFAULT_LOCAL_STAGE_IN_CLUSTER_FACTOR */ protected int getSISiteBundleValue(String site, String deflt){ //this should be parameterised Karan Dec 20,2005 TransformationCatalogEntry entry = mTXStageInImplementation.getTransformationCatalogEntry(site, Job.STAGE_IN_JOB ); Job sub = new Job(); String value = (deflt == null)? this.DEFAULT_LOCAL_STAGE_IN_CLUSTER_FACTOR: deflt; if(entry != null){ sub.updateProfiles(entry); value = (sub.vdsNS.containsKey( Pegasus.CLUSTER_STAGE_IN_KEY ))? sub.vdsNS.getStringValue( Pegasus.CLUSTER_STAGE_IN_KEY ): value; } return Integer.parseInt(value); } /** * Returns the name of the job that acts as a synchronization node in * between stage in jobs of different levels. * * @param site the site of the transfer job. * * @param level the level of the job * * @return name of synce job */ protected String getSyncJobBetweenLevelsName( String site, int level ){ StringBuffer sb = new StringBuffer(); sb.append( "sync_tx_noop_" ); //append the job prefix if specified in options at runtime if ( mJobPrefix != null ) { sb.append( mJobPrefix ); } if( site != null ){ sb.append( site ).append( "_" ); } sb.append( level ); return sb.toString(); } /** * It creates a NoOP synch job that runs on the submit host. * * @param name of the job * * @return the noop job. */ private Job createSyncJobBetweenLevels( String name ) { Job newJob = new Job(); List entries = null; String execPath = null; //jobname has the dagname and index to indicate different //jobs for deferred planning newJob.setName( name ); newJob.setTransformation( "pegasus", "noop", "1.0" ); newJob.setDerivation( "pegasus", "noop", "1.0" ); // newJob.setUniverse( "vanilla" ); newJob.setUniverse( GridGateway.JOB_TYPE.auxillary.toString()); //the noop job does not get run by condor //even if it does, giving it the maximum //possible chance newJob.executable = "/bin/true"; //construct noop keys newJob.setSiteHandle( "local" ); newJob.setJobType( Job.CREATE_DIR_JOB ); constructCondorKey( newJob, "noop_job", "true" ); constructCondorKey( newJob, "noop_job_exit_code", "0" ); //we do not want the job to be launched //by kickstart, as the job is not run actually newJob.vdsNS.checkKeyInNS( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[GridStartFactory.NO_GRIDSTART_INDEX] ); return newJob; } /** * Constructs a condor variable in the condor profile namespace * associated with the job. Overrides any preexisting key values. * * @param job contains the job description. * @param key the key of the profile. * @param value the associated value. */ protected void constructCondorKey(Job job, String key, String value){ job.condorVariables.checkKeyInNS(key,value); } /** * Returns the current synch job for a site. * * @param site * * @return synch job if exists else null */ public Job getSyncJob( String site ){ return (Job)mSyncJobMap.get( site ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/refiner/Default.java0000644000175000017500000010011311757531137026773 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.refiner; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.refiner.ReplicaCatalogBridge; import edu.isi.pegasus.planner.transfer.MultipleFTPerXFERJobRefiner; import edu.isi.pegasus.planner.provenance.pasoa.PPS; import edu.isi.pegasus.planner.provenance.pasoa.pps.PPSFactory; import java.util.ArrayList; import java.util.Iterator; import java.util.Collection; import java.util.TreeMap; import java.util.Map; import java.util.List; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.transfer.Implementation; import edu.isi.pegasus.planner.transfer.Refiner; /** * The default transfer refiner, that implements the multiple refiner. * For each compute job if required it creates the following * - a single stagein transfer job * - a single stageout transfer job * - a single interpool transfer job * * In addition this implementation prevents file clobbering while staging in data * to a remote site, that is shared amongst jobs. * * @author Karan Vahi * @version $Revision: 2764 $ */ public class Default extends MultipleFTPerXFERJobRefiner { /** * A short description of the transfer refinement. */ public static final String DESCRIPTION = "Default Multiple Refinement "; /** * The string holding the logging messages */ protected String mLogMsg; /** * A Map containing information about which logical file has been * transferred to which site and the name of the stagein transfer node * that is transferring the file from the location returned from * the replica catalog. * The key for the hashmap is logicalfilename:sitehandle and the value would be * the name of the transfer node. * */ protected Map mFileTable; /** * The handle to the provenance store implementation. */ protected PPS mPPS; /** * Boolean indicating whether to create registration jobs or not. */ protected Boolean mCreateRegistrationJobs; /** * The overloaded constructor. * * @param dag the workflow to which transfer nodes need to be added. * @param bag the bag of initialization objects. * */ public Default( ADag dag, PegasusBag bag ){ super( dag, bag ); mLogMsg = null; mFileTable = new TreeMap(); mCreateRegistrationJobs = ( mProps.getReplicaMode() != null ); if( !mCreateRegistrationJobs ){ mLogger.log( "No Replica Registration Jobs will be created as Replica Catalog not configured.", LogManager.CONFIG_MESSAGE_LEVEL ); } //load the PPS implementation mPPS = PPSFactory.loadPPS( this.mProps ); mXMLStore.add( "" ); //call the begin workflow method try{ mPPS.beginWorkflowRefinementStep( this, PPS.REFINEMENT_STAGE, false ); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception", e ); } //clear the XML store mXMLStore.clear(); } /** * Adds the stage in transfer nodes which transfer the input files for a job, * from the location returned from the replica catalog to the job's execution * pool. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param symlinkFiles Collection of FileTransfer objects containing * source and destination file url's for symbolic linking * on compute site. */ public void addStageInXFERNodes( Job job, Collection files, Collection symlinkFiles ){ addStageInXFERNodes( job, files, Refiner.STAGE_IN_PREFIX + Refiner.LOCAL_PREFIX, this.mTXStageInImplementation); addStageInXFERNodes( job, symlinkFiles, Refiner.STAGE_IN_PREFIX + Refiner.REMOTE_PREFIX, this.mTXSymbolicLinkImplementation ); } /** * Adds the stage in transfer nodes which transfer the input files for a job, * from the location returned from the replica catalog to the job's execution * pool. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param prefix the prefix to be used while constructing the transfer jobname. * @param implementation the transfer implementation to use * */ public void addStageInXFERNodes( Job job, Collection files, String prefix, Implementation implementation ){ String site = prefix.endsWith( Refiner.LOCAL_PREFIX ) ? "local": job.getSiteHandle(); String jobName = job.getName(); String pool = job.getSiteHandle(); int counter = 0; String newJobName = prefix + jobName + "_" + counter; String key = null; String msg = "Adding stagein transfer nodes for job " + jobName; String par = null; Collection stagedFiles = new ArrayList(1); //the job class is always stage in , as we dont want //later modules to treat symlink jobs different from stagein jobs int jobClass = Job.STAGE_IN_JOB; //to prevent duplicate dependencies java.util.HashSet tempSet = new java.util.HashSet(); int staged = 0; for (Iterator it = files.iterator();it.hasNext();) { FileTransfer ft = (FileTransfer) it.next(); String lfn = ft.getLFN(); //get the key for this lfn and pool //if the key already in the table //then remove the entry from //the Vector and add a dependency //in the graph key = this.constructFileKey(lfn, pool); par = (String) mFileTable.get(key); //System.out.println("lfn " + lfn + " par " + par); if (par != null) { it.remove(); //check if tempSet does not contain the parent //fix for sonal's bug if (tempSet.contains(par)) { mLogMsg = "IGNORING TO ADD rc pull relation from rc tx node: " + par + " -> " + jobName + " for transferring file " + lfn + " to pool " + pool; mLogger.log(mLogMsg,LogManager.DEBUG_MESSAGE_LEVEL); } else { mLogMsg = /*"Adding relation " + par + " -> " + jobName +*/ " For transferring file " + lfn; mLogger.log(mLogMsg, LogManager.DEBUG_MESSAGE_LEVEL); addRelation(par,jobName,pool,false); tempSet.add(par); } } else { if(ft.isTransferringExecutableFile()){ //add to staged files for adding of //set up job. stagedFiles.add(ft); //the staged execution file should be having the setup //job as parent if it does not preserve x bit if( implementation.doesPreserveXBit() ){ mFileTable.put(key,newJobName); } else{ mFileTable.put(key, implementation.getSetXBitJobName(jobName,staged++)); } } else{ //make a new entry into the table mFileTable.put(key, newJobName); } //add the newJobName to the tempSet so that even //if the job has duplicate input files only one instance //of transfer is scheduled. This came up during collapsing //June 15th, 2004 tempSet.add(newJobName); } } if (!files.isEmpty()) { mLogger.log(msg,LogManager.DEBUG_MESSAGE_LEVEL); msg = "Adding new stagein transfer node named " + newJobName; mLogger.log(msg,LogManager.DEBUG_MESSAGE_LEVEL); //add a direct dependency between compute job //and stagein job only if there is no //executables being staged if(stagedFiles.isEmpty()){ //add the direct relation addRelation(newJobName, jobName, pool, true); Job siJob = implementation.createTransferJob( job, site, files, null, newJobName, jobClass ); addJob( siJob ); //record the action in the provenance store. logRefinerAction( job, siJob, files , "stage-in" ); } else{ //the dependency to stage in job is added via the //the setup job that does the chmod Job siJob = implementation.createTransferJob( job, site, files, stagedFiles, newJobName, jobClass ); addJob( siJob ); //record the action in the provenance store. logRefinerAction( job, siJob, files , "stage-in" ); } } } /** * Adds the inter pool transfer nodes that are required for transferring * the output files of the parents to the jobs execution site. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * * @param localTransfer boolean indicating that associated transfer job will run * on local site. */ public void addInterSiteTXNodes(Job job, Collection files, boolean localTransfer ){ String jobName = job.getName(); int counter = 0; StringBuffer name = new StringBuffer(); name.append( Refiner.INTER_POOL_PREFIX ).append( localTransfer ? Refiner.LOCAL_PREFIX : Refiner.REMOTE_PREFIX ). append( jobName ).append( "_" ).append( counter ); String newJobName = name.toString(); String msg = "Adding inter pool nodes for job " + jobName; String prevParent = null; String lfn = null; String key = null; String par = null; String pool = job.getSiteHandle(); boolean toAdd = true; //to prevent duplicate dependencies java.util.HashSet tempSet = new java.util.HashSet(); String site = localTransfer ? "local" : pool; //node construction only if there is //a file to transfer if (!files.isEmpty()) { mLogger.log(msg,LogManager.DEBUG_MESSAGE_LEVEL); for(Iterator it = files.iterator();it.hasNext();) { FileTransfer ft = (FileTransfer) it.next(); lfn = ft.getLFN(); //System.out.println("Trying to figure out for lfn " + lfn); //to ensure that duplicate edges //are not added in the graph //between the parent of a node and the //inter tx node that transfers the file //to the node site. //get the key for this lfn and pool //if the key already in the table //then remove the entry from //the Vector and add a dependency //in the graph key = this.constructFileKey(lfn, pool); par = (String) mFileTable.get(key); //System.out.println("\nGot Key :" + key + " Value :" + par ); if (par != null) { //transfer of this file //has already been scheduled //onto the pool it.remove(); //check if tempSet does not contain the parent if (tempSet.contains(par)) { mLogMsg = "IGNORING TO ADD interpool relation 1 from inter tx node: " + par + " -> " + jobName + " for transferring file " + lfn + " to pool " + pool; mLogger.log(mLogMsg,LogManager.DEBUG_MESSAGE_LEVEL); } else { mLogMsg = "Adding interpool relation 1 from inter tx node: " + par + " -> " + jobName + " for transferring file " + lfn + " to pool " + pool; mLogger.log(mLogMsg, LogManager.DEBUG_MESSAGE_LEVEL); addRelation(par, jobName); tempSet.add(par); } } else { //make a new entry into the table mFileTable.put(key, newJobName); //System.out.println("\nPut Key :" + key + " Value :" + newJobName ); //to ensure that duplicate edges //are not added in the graph //between the parent of a node and the //inter tx node that transfers the file //to the node site. if (prevParent == null || !prevParent.equalsIgnoreCase(ft.getJobName())) { mLogMsg = "Adding interpool relation 2" + ft.getJobName() + " -> " + newJobName + " for transferring file " + lfn + " to pool " + pool; mLogger.log(mLogMsg,LogManager.DEBUG_MESSAGE_LEVEL); addRelation(ft.getJobName(), newJobName); } else{ mLogger.log( "NOT ADDED relation "+ ft.getJobName() + " -> " + newJobName, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Previous parent " + prevParent + " " + ft.getLFN(), LogManager.DEBUG_MESSAGE_LEVEL ); } //we only need to add the relation between a //inter tx node and a node once. if (toAdd) { mLogMsg = "Adding interpool relation 3" + newJobName + " -> " + jobName + " for transferring file " + lfn + " to pool " + pool; mLogger.log(mLogMsg,LogManager.DEBUG_MESSAGE_LEVEL); addRelation(newJobName, jobName); tempSet.add(newJobName); toAdd = false; } //moved to the inner loop Karan Aug 26, 2009 //else in some cases relations between compute job //and inter pool job are not added even though they shoud be prevParent = ft.getJobName(); } } //add the new job and construct it's //subinfo only if the vector is not //empty if (!files.isEmpty()) { msg = "Adding new inter pool node named " + newJobName; mLogger.log(msg,LogManager.DEBUG_MESSAGE_LEVEL); //added in make transfer node Job interJob = mTXInterImplementation.createTransferJob( job, site, files, null, newJobName, Job.INTER_POOL_JOB ); addJob( interJob ); this.logRefinerAction( job, interJob, files, "inter-site" ); } } tempSet = null; } /** * Adds the stageout transfer nodes, that stage data to an output site * specified by the user. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param rcb bridge to the Replica Catalog. Used for creating registration * nodes in the workflow. * @param localTransfer boolean indicating that associated transfer job will run * on local site. */ public void addStageOutXFERNodes(Job job, Collection files, ReplicaCatalogBridge rcb, boolean localTransfer ) { this.addStageOutXFERNodes( job, files, rcb, localTransfer, false); } /** * Adds the stageout transfer nodes, that stage data to an output site * specified by the user. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param rcb bridge to the Replica Catalog. Used for creating registration * nodes in the workflow. * @param localTransfer boolean indicating that associated transfer job will run * on local site. * @param deletedLeaf to specify whether the node is being added for * a deleted node by the reduction engine or not. * default: false */ public void addStageOutXFERNodes( Job job, Collection files, ReplicaCatalogBridge rcb, boolean localTransfer, boolean deletedLeaf ){ String jobName = job.getName(); int counter = 0; StringBuffer name = new StringBuffer(); name.append( Refiner.STAGE_OUT_PREFIX ).append( localTransfer ? Refiner.LOCAL_PREFIX : Refiner.REMOTE_PREFIX ). append( jobName ).append( "_" ).append( counter ); String newJobName = name.toString(); String regJob = Refiner.REGISTER_PREFIX + jobName; mLogMsg = "Adding output pool nodes for job " + jobName; //separate the files for transfer //and for registration List txFiles = new ArrayList(); List regFiles = new ArrayList(); for(Iterator it = files.iterator();it.hasNext();){ FileTransfer ft = (FileTransfer) it.next(); if (!ft.getTransientTransferFlag()) { txFiles.add(ft); } if ( mCreateRegistrationJobs && ft.getRegisterFlag() ) { regFiles.add(ft); } } boolean makeTNode = !txFiles.isEmpty(); boolean makeRNode = !regFiles.isEmpty(); String site = localTransfer ? "local" : job.getSiteHandle(); if (!files.isEmpty()) { mLogger.log(mLogMsg,LogManager.DEBUG_MESSAGE_LEVEL); mLogMsg = "Adding new output pool node named " + newJobName; mLogger.log(mLogMsg,LogManager.DEBUG_MESSAGE_LEVEL); if (makeTNode) { //added in make transfer node //mDag.addNewJob(newJobName); Job soJob = mTXStageOutImplementation.createTransferJob( job, site, txFiles, null, newJobName, Job.STAGE_OUT_JOB ); addJob( soJob ); if (!deletedLeaf) { addRelation(jobName, newJobName); } if (makeRNode) { addRelation(newJobName, regJob); } //log the refiner action this.logRefinerAction( job, soJob, txFiles, "stage-out" ); } else if (!makeTNode && makeRNode) { addRelation(jobName, regJob); } if (makeRNode) { //call to make the reg subinfo //added in make registration node addJob(createRegistrationJob( regJob, job, regFiles, rcb )); } } } /** * Creates the registration jobs, which registers the materialized files on * the output site in the Replica Catalog. * * @param regJobName The name of the job which registers the files in the * Replica Mechanism. * @param job The job whose output files are to be registered in the * Replica Mechanism. * @param files Collection of FileTransfer objects containing * the information about source and destURL's. * @param rcb bridge to the Replica Catalog. Used for creating registration * nodes in the workflow. * * * @return the registration job. */ protected Job createRegistrationJob(String regJobName, Job job, Collection files, ReplicaCatalogBridge rcb ) { Job regJob = rcb.makeRCRegNode( regJobName, job, files ); //log the registration action for provenance purposes StringBuffer sb = new StringBuffer(); String indent = "\t"; sb.append( indent ); sb.append( " "); sb.append( "\n" ); //traverse through all the files NameValue dest; String newIndent = indent + "\t"; for( Iterator it = files.iterator(); it.hasNext(); ){ FileTransfer ft = (FileTransfer)it.next(); dest = ft.getDestURL(); sb.append( newIndent ); sb.append( "" ); sb.append( "\n" ); sb.append( newIndent ).append( indent ); sb.append( dest.getValue() ); sb.append( "\n" ); sb.append( newIndent ); sb.append( "" ).append( "\n" ); } sb.append( indent ); sb.append( "" ).append( "\n" ); //log the graph relationship String parent = job.getName (); String child = regJob.getName(); sb.append( indent ); sb.append( "" ).append( "\n" ); sb.append( newIndent ); sb.append( "" ).append( "\n" ); sb.append( indent ); sb.append( "" ).append( "\n" ); mXMLStore.add( sb.toString() ); //log the action for creating the relationship assertions try{ mPPS.registrationIntroducedFor( regJob.getName(),job.getName() ); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception while logging relationship assertion for registration", e ); } return regJob; } /** * Signals that the traversal of the workflow is done. It signals to the * Provenace Store, that refinement is complete. */ public void done(){ try{ mPPS.endWorkflowRefinementStep( this ); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception", e ); } } /** * Add a new job to the workflow being refined. * * @param job the job to be added. */ public void addJob(Job job){ mDAG.add(job); } /** * Adds a new relation to the workflow being refiner. * * @param parent the jobname of the parent node of the edge. * @param child the jobname of the child node of the edge. */ public void addRelation(String parent, String child){ mLogger.log("Adding relation " + parent + " -> " + child, LogManager.DEBUG_MESSAGE_LEVEL); mDAG.addNewRelation(parent,child); } /** * Adds a new relation to the workflow. In the case when the parent is a * transfer job that is added, the parentNew should be set only the first * time a relation is added. For subsequent compute jobs that maybe * dependant on this, it needs to be set to false. * * @param parent the jobname of the parent node of the edge. * @param child the jobname of the child node of the edge. * @param site the execution pool where the transfer node is to be run. * @param parentNew the parent node being added, is the new transfer job * and is being called for the first time. */ public void addRelation(String parent, String child, String site, boolean parentNew){ mLogger.log("Adding relation " + parent + " -> " + child, LogManager.DEBUG_MESSAGE_LEVEL); mDAG.addNewRelation(parent,child); } /** * Returns a textual description of the transfer mode. * * @return a short textual description */ public String getDescription(){ return this.DESCRIPTION; } /** * Records the refiner action into the Provenace Store as a XML fragment. * * @param computeJob the compute job. * @param txJob the associated transfer job. * @param files list of FileTransfer objects containing file transfers. * @param type the type of transfer job */ protected void logRefinerAction( Job computeJob, Job txJob, Collection files , String type ){ StringBuffer sb = new StringBuffer(); String indent = "\t"; sb.append( indent ); sb.append( "" ); sb.append( "\n" ); //traverse through all the files NameValue source; NameValue dest; String newIndent = indent + "\t"; for( Iterator it = files.iterator(); it.hasNext(); ){ FileTransfer ft = (FileTransfer)it.next(); source = ft.getSourceURL(); dest = ft.getDestURL(); sb.append( newIndent ); sb.append( "" ); sb.append( "\n" ); sb.append( newIndent ); sb.append( "" ); sb.append( "\n" ); } sb.append( indent ); sb.append( "" ); sb.append( "\n" ); //log the graph relationship String parent = ( txJob.getJobType() == Job.STAGE_IN_JOB )? txJob.getName(): computeJob.getName(); String child = ( txJob.getJobType() == Job.STAGE_IN_JOB )? computeJob.getName(): txJob.getName(); sb.append( indent ); sb.append( "" ).append( "\n" ); sb.append( newIndent ); sb.append( "" ).append( "\n" ); sb.append( indent ); sb.append( "" ).append( "\n" ); //log the action for creating the relationship assertions try{ List stagingNodes = new java.util.ArrayList(1); stagingNodes.add(txJob.getName()); mPPS.stagingIntroducedFor(stagingNodes, computeJob.getName()); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception while logging relationship assertion for staging ", e ); } mXMLStore.add( sb.toString() ); } /** * Appends an xml attribute to the xml feed. * * @param xmlFeed the xmlFeed to which xml is being written * @param key the attribute key * @param value the attribute value */ protected void appendAttribute( StringBuffer xmlFeed, String key, String value ){ xmlFeed.append( key ).append( "=" ).append( "\"" ).append( value ). append( "\" " ); } /** * Constructs the key for an entry to the file table. The key returned * is lfn:siteHandle * * @param lfn the logical filename of the file that has to be * transferred. * @param siteHandle the name of the site to which the file is being * transferred. * * @return the key for the entry to be made in the filetable. */ protected String constructFileKey(String lfn, String siteHandle) { StringBuffer sb = new StringBuffer(); sb.append(lfn).append(":").append(siteHandle); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/refiner/Chain.java0000644000175000017500000003351111757531137026440 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.refiner; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import java.util.Map; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.util.LinkedList; import edu.isi.pegasus.planner.classes.PegasusBag; /** * This transfer refiner builds upon the Default Refiner. * The defaul Refiner allows the transfer of multiple files in a single * condor job. However, it adds the stage in transfer nodes in parallel leading * to multiple invocation of the globus-url-copy at remote execution pools, * while running huge workflows. * This refiner, tries to circumvent this problem by chaining up the stagein jobs * instead of scheduling in parallel. This works best only when the top level * of the workflow requires stage in jobs. The correct way is that the traversal * needs to be done breath first in the TransferEngine.java. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 4720 $ */ public class Chain extends Default { /** * The default bundling factor that identifies the number of transfer jobs * that are being created per execution pool for the workflow. */ public static final String DEFAULT_BUNDLE_FACTOR = "1"; /** * The handle to the Site Catalog. It is instantiated in this class. */ //protected PoolInfoProvider mSCHandle; protected SiteStore mSiteStore; /** * The map containing the stage in bundle values indexed by the name of the * site. If the bundle value is not specified, then null is stored. */ private Map mSIBundleMap; /** * A map indexed by execution sites. Each value is a SiteTransfer object, * that contains the Bundles of stagin transfer jobs. * * @see TransferChain */ private Map mStageInMap; /** * A short description of the transfer refinement. */ public static final String DESCRIPTION = "Chain Mode (the stage in jobs being chained together in bundles"; /** * The overloaded constructor. * * @param dag the workflow to which transfer nodes need to be added. * @param bag the bag of initialization objects * */ public Chain( ADag dag, PegasusBag bag ){ super( dag, bag ); //specifying initial capacity. //adding one to account for local pool mStageInMap = new HashMap( mPOptions.getExecutionSites().size() + 1); mSIBundleMap= new HashMap(); //load the site catalog mSiteStore = bag.getHandleToSiteStore(); } /** * Adds a new relation to the workflow. In the case when the parent is a * transfer job that is added, the parentNew should be set only the first * time a relation is added. For subsequent compute jobs that maybe * dependant on this, it needs to be set to false. * * @param parent the jobname of the parent node of the edge. * @param child the jobname of the child node of the edge. * @param site the execution site where the transfer node is to be run. * @param parentNew the parent node being added, is the new transfer job * and is being called for the first time. */ public void addRelation(String parent, String child, String site, boolean parentNew){ addRelation(parent,child); // mDAG.addNewRelation(parent,child); if(parentNew){ //a new transfer job is being added //figure out the correct bundle to //put in List l = null; if(mStageInMap.containsKey(site)){ //get the SiteTransfer for the site SiteTransfer old = (SiteTransfer)mStageInMap.get(site); //put the parent in the appropriate bundle //and get the pointer to the last element in //the chain before the parent is added. String last = old.addTransfer(parent); if(last != null){ //the parent is now the last element in the chain //continue the chain forward //adding the last link in the chain this.addRelation(last, parent,site, false); } } else{ //create a new SiteTransfer for the job //determine the bundle for the site int bundle ; if (mSIBundleMap.containsKey(site)){ bundle = ((Integer)mSIBundleMap.get(site)).intValue(); } else{ bundle = getSiteBundleValue(site,Pegasus.CHAIN_STAGE_IN_KEY); //put the value into the map mSIBundleMap.put(site,new Integer(bundle)); } SiteTransfer siteTX = new SiteTransfer(site,bundle); siteTX.addTransfer(parent); mStageInMap.put(site,siteTX); } } } /** * Determines the bundle factor for a particular site on the basis of the * key associcated with the underlying transfer transformation in the * transformation catalog. If none specified in transformation catalog then * one is picked up from the site catalog. If the key is not found in the * site catalog too , then the global default is returned. * * @param site the site at which the transfer job is being run. * @param key the bundle key whose value needs to be searched. * * @return the bundle factor. * * @see #DEFAULT_BUNDLE_FACTOR */ public int getSiteBundleValue(String site, String key){ String value = this.DEFAULT_BUNDLE_FACTOR; //construct a sudo transfer job object //and populate the profiles in it. Job sub = new Job(); //assimilate the profile information from the //site catalog into the job. sub.updateProfiles( mSiteStore.lookup(site).getProfiles() ); //this should be parameterised Karan Dec 20,2005 TransformationCatalogEntry entry = mTXStageInImplementation.getTransformationCatalogEntry(site, Job.STAGE_IN_JOB ); //assimilate the profile information from transformation catalog if(entry!= null){ sub.updateProfiles(entry); } value = (sub.vdsNS.containsKey(key))? sub.vdsNS.getStringValue(key): value; return Integer.parseInt(value); } /** * Prints out the bundles and chains that have been constructed. */ public void done(){ //print out all the Site transfers that you have mLogger.log("Chains of stagein jobs per sites are " , LogManager.DEBUG_MESSAGE_LEVEL); for(Iterator it = mStageInMap.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); mLogger.log(entry.getKey() + " " + entry.getValue(), LogManager.DEBUG_MESSAGE_LEVEL); } } /** * Returns a textual description of the transfer mode. * * @return a short textual description */ public String getDescription(){ return this.DESCRIPTION; } /** * A container to manage the transfer jobs that are needed to be done on a * single site. The container maintains the bundles and controls the * distribution of a transfer job amongst the bundles in a round robin manner. * Each bundle itself is actually a chain of transfer jobs. */ private class SiteTransfer{ /** * The maximum number of transfer jobs that are allowed for this * particular site. This should correspond to the bundle factor. */ private int mCapacity; /** * The index of the bundle to which the next transfer for the site would * be added to. */ private int mNext; /** * The site for which these transfers are grouped. */ private String mSite; /** * The list of Chain object. Each bundle is actually a chain * of transfer nodes. */ private List mBundles; /** * The default constructor. */ public SiteTransfer(){ mCapacity = 1; mNext = -1; mSite = null; mBundles = null; } /** * Convenience constructor. * * @param pool the pool name for which transfers are being grouped. * @param bundle the number of logical bundles that are to be created * per site. it directly translates to the number of transfer * jobs that can be running at a particular site */ public SiteTransfer(String pool, int bundle){ mCapacity = bundle; mNext = 0; mSite = pool; mBundles = new ArrayList(bundle); //intialize to null for(int i = 0; i < bundle; i++){ mBundles.add(null); } } /** * Adds a file transfer to the appropriate TransferChain. * The file transfers are added in a round robin manner underneath. * * @param txJobName the name of the transfer job. * * @return the last transfer job in the chain before the current job * was added, null in case the job is the first in the chain */ public String addTransfer(String txJobName){ //hmmm i could alternatively add using the //iterator and move iterator around. //we add the transfer to the chain pointed //by next Object obj = mBundles.get(mNext); TransferChain chain = null; String last = null; if(obj == null){ //on demand add a new chain to the end //is there a scope for gaps?? chain = new TransferChain(); mBundles.set(mNext,chain); } else{ chain = (TransferChain)obj; } //we have the chain to which we want //to add the transfer job. Get the //current last job in the chain before //adding the transfer job to the chain last = chain.getLast(); chain.add(txJobName); //update the next pointer to maintain //round robin status mNext = (mNext < (mCapacity -1))? mNext + 1 : 0; return last; } /** * Returns the textual description of the object. * * @return the textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(32); boolean first = true; sb.append("Site ").append(mSite); int num = 1; for(Iterator it = mBundles.iterator();it.hasNext();num++){ sb.append("\n").append(num).append(" :").append(it.next()); } return sb.toString(); } } /** * A shallow container class, that contains the list of the names of the * transfer jobs and can return the last job in the list. */ private class TransferChain{ /** * The linked list that maintians the chain of names of the transfer * jobs. */ private LinkedList mChain ; /** * The default constructor. */ public TransferChain(){ mChain = new LinkedList(); } /** * Adds to the end of the chain. Allows null to be added. * * @param name the name of the transfer job. */ public void add(String name){ mChain.addLast(name); } /** * Returns the last element in the chain. * * @return the last element in the chain, null if the chain is empty */ public String getLast(){ String last = null; try{ last = (String) mChain.getLast(); } catch(java.util.NoSuchElementException e){ } return last; } /** * Returns the textual description of the object. * * @return the textual description. */ public String toString(){ StringBuffer sb = new StringBuffer(32); boolean first = true; for(Iterator it = mChain.iterator();it.hasNext();){ if(first){ first = false; } else{ sb.append("->"); } sb.append(it.next()); } return sb.toString(); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/refiner/Bundle.java0000644000175000017500000013604311757531137026633 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.refiner; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.catalog.classes.Profiles.NAMESPACES; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.transfer.Refiner; import java.util.Collection; import java.util.List; import java.util.ArrayList; import java.util.Vector; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.Set; import java.util.HashSet; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.refiner.ReplicaCatalogBridge; import edu.isi.pegasus.planner.transfer.Implementation; import java.util.LinkedList; /** * An extension of the default refiner, that allows the user to specify * the number of transfer nodes per execution site for stagein and stageout. * * @author Karan Vahi * @version $Revision: 4642 $ */ public class Bundle extends Default { /** * A short description of the transfer refinement. */ public static final String DESCRIPTION = "Bundle Mode (stagein files distributed amongst bundles)"; /** * The default bundling factor that identifies the number of transfer jobs * that are being created per execution pool for stageing in data for * the workflow. */ public static final String DEFAULT_LOCAL_STAGE_IN_BUNDLE_FACTOR = "4"; /** * The default bundling factor that identifies the number of transfer jobs * that are being created per execution pool for stageing in data for * the workflow. */ public static final String DEFAULT_REMOTE_STAGE_IN_BUNDLE_FACTOR = "4"; /** * The default bundling factor that identifies the number of transfer jobs * that are being created per execution pool for stageing out data for * the workflow. */ public static final String DEFAULT_LOCAL_STAGE_OUT_BUNDLE_FACTOR = "4"; /** * The default bundling factor that identifies the number of transfer jobs * that are being created per execution pool for stageing out data for * the workflow. */ public static final String DEFAULT_REMOTE_STAGE_OUT_BUNDLE_FACTOR = "4"; /** * The map containing the list of stage in transfer jobs that are being * created for the workflow indexed by the execution poolname. */ private Map mStageInLocalMap; /** * The map containing the list of stage in transfer jobs that are being * created for the workflow indexed by the execution poolname. */ private Map mStageInRemoteMap; /** * The map indexed by compute jobnames that contains the list of stagin job * names that are being added during the traversal of the workflow. This is * used to construct the relations that need to be added to workflow, once * the traversal is done. */ private Map mRelationsMap; /** * The BundleValue that evaluates for local stage in jobs. */ protected BundleValue mStageinLocalBundleValue; /** * The BundleValue that evaluates for remote stage-in jobs. */ protected BundleValue mStageInRemoteBundleValue; /** * The BundleValue that evaluates for local stage out jobs. */ protected BundleValue mStageOutLocalBundleValue; /** * The BundleValue that evaluates for remote stage out jobs. */ protected BundleValue mStageOutRemoteBundleValue; /** * The map indexed by staged executable logical name. Each entry is the * name of the corresponding setup job, that changes the XBit on the staged * file. */ protected Map mSetupMap; /** * A map indexed by site name, that contains the pointer to the local stage out * PoolTransfer objects for that site. This is per level of the workflow. */ private Map mStageOutLocalMapPerLevel; /** * A map indexed by site name, that contains the pointer to the remote stage out * PoolTransfer objects for that site. This is per level of the workflow. */ private Map mStageOutRemoteMapPerLevel; /** * The current level of the jobs being traversed. */ private int mCurrentSOLevel; /** * The handle to the replica catalog bridge. */ private ReplicaCatalogBridge mRCB; /** * The job prefix that needs to be applied to the job file basenames. */ protected String mJobPrefix; /** * Pegasus Profiles specified in the properties. */ protected Pegasus mPegasusProfilesInProperties; /** * Handle to the SiteStore */ protected SiteStore mSiteStore; /** * A boolean indicating whether chmod jobs should be created that set the * xbit in case of executable staging. */ protected boolean mAddNodesForSettingXBit; /** * The overloaded constructor. * * @param dag the workflow to which transfer nodes need to be added. * @param bag the bag of initialization objects * */ public Bundle( ADag dag, PegasusBag bag ){ super( dag, bag ); //from pegasus release 3.2 onwards xbit jobs are not added //for worker node execution/Pegasus Lite mAddNodesForSettingXBit = !mProps.executeOnWorkerNode(); mStageInLocalMap = new HashMap( mPOptions.getExecutionSites().size()); mStageInRemoteMap = new HashMap( mPOptions.getExecutionSites().size()); mRelationsMap = new HashMap(); mSetupMap = new HashMap(); mCurrentSOLevel = -1; mJobPrefix = mPOptions.getJobnamePrefix(); mSiteStore = bag.getHandleToSiteStore(); mPegasusProfilesInProperties = (Pegasus) mProps.getProfiles( NAMESPACES.pegasus ); initializeBundleValues(); } /** * Initializes the bundle value variables, that are responsible determining * the bundle values. */ protected void initializeBundleValues() { mStageinLocalBundleValue = new BundleValue(); mStageinLocalBundleValue.initialize( Pegasus.BUNDLE_LOCAL_STAGE_IN_KEY, Pegasus.BUNDLE_STAGE_IN_KEY, getDefaultBundleValueFromProperties( Pegasus.BUNDLE_LOCAL_STAGE_IN_KEY, Pegasus.BUNDLE_STAGE_IN_KEY, Bundle.DEFAULT_LOCAL_STAGE_IN_BUNDLE_FACTOR ) ); mStageInRemoteBundleValue = new BundleValue(); mStageInRemoteBundleValue.initialize( Pegasus.BUNDLE_REMOTE_STAGE_IN_KEY, Pegasus.BUNDLE_STAGE_IN_KEY, getDefaultBundleValueFromProperties( Pegasus.BUNDLE_LOCAL_STAGE_IN_KEY, Pegasus.BUNDLE_STAGE_IN_KEY, Bundle.DEFAULT_REMOTE_STAGE_IN_BUNDLE_FACTOR ) ); mStageOutLocalBundleValue = new BundleValue(); mStageOutLocalBundleValue.initialize( Pegasus.BUNDLE_LOCAL_STAGE_OUT_KEY, Pegasus.BUNDLE_STAGE_OUT_KEY, getDefaultBundleValueFromProperties( Pegasus.BUNDLE_LOCAL_STAGE_OUT_KEY, Pegasus.BUNDLE_STAGE_OUT_KEY, Bundle.DEFAULT_LOCAL_STAGE_OUT_BUNDLE_FACTOR )); mStageOutRemoteBundleValue = new BundleValue(); mStageOutRemoteBundleValue.initialize( Pegasus.BUNDLE_REMOTE_STAGE_OUT_KEY, Pegasus.BUNDLE_STAGE_OUT_KEY, getDefaultBundleValueFromProperties( Pegasus.BUNDLE_REMOTE_STAGE_OUT_KEY, Pegasus.BUNDLE_STAGE_OUT_KEY, Bundle.DEFAULT_REMOTE_STAGE_OUT_BUNDLE_FACTOR )); } /** * Returns the default value for the clustering/bundling of jobs to be used. * * The factor is computed by looking up the pegasus profiles in the properties. *
     *    return value of pegasus profile key if it exists,
     *    else return value of pegasus profile defaultKey if it exists, 
     *    else the defaultValue
     * 
* * @param key the pegasus profile key * @param defaultKey the default pegasus profile key * @param defaultValue the default value. * * @return the value as string. */ protected String getDefaultBundleValueFromProperties( String key, String defaultKey, String defaultValue ){ String result = mPegasusProfilesInProperties.getStringValue( key ); if( result == null ){ //rely on defaultKey value result = mPegasusProfilesInProperties.getStringValue( defaultKey ); if( result == null ){ //none of the keys are mentioned in properties //use the default value result = defaultValue; } } return result; } /** * Adds the stage in transfer nodes which transfer the input files for a job, * from the location returned from the replica catalog to the job's execution * pool. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param symlinkFiles Collection of FileTransfer objects containing * source and destination file url's for symbolic linking * on compute site. */ public void addStageInXFERNodes( Job job, Collection files, Collection symlinkFiles ){ addStageInXFERNodes( job, true, files, Job.STAGE_IN_JOB , this.mStageInLocalMap, this.mStageinLocalBundleValue, this.mTXStageInImplementation ); addStageInXFERNodes( job, false, symlinkFiles, Job.STAGE_IN_JOB, this.mStageInRemoteMap, this.mStageInRemoteBundleValue, this.mTXStageInImplementation ); } /** * Adds the stage in transfer nodes which transfer the input files for a job, * from the location returned from the replica catalog to the job's execution * pool. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param localTransfer boolean indicating whether transfer has to happen on local site. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param type the type of transfer job being created * @param stageInMap Map indexed by site name that gives all the transfers for * that site. * @param bundleValue used to determine the bundling factor to employ for a job. * @param implementation the transfer implementation to use. */ public void addStageInXFERNodes( Job job, boolean localTransfer, Collection files, int type, Map stageInMap, BundleValue bundleValue, Implementation implementation ){ String jobName = job.getName(); // instead of site handle now we refer to the staging site handle // String siteHandle = job.getSiteHandle(); String siteHandle = job.getStagingSiteHandle(); String key = null; String par = null; int bundle = -1; //to prevent duplicate dependencies Set tempSet = new HashSet(); int staged = 0; Collection stagedExecutableFiles = new LinkedList(); Collection stageInExecJobs = new LinkedList();//store list of jobs that are transferring the stage file for(Iterator it = files.iterator();it.hasNext();) { FileTransfer ft = (FileTransfer) it.next(); String lfn = ft.getLFN(); //get the key for this lfn and pool //if the key already in the table //then remove the entry from //the Vector and add a dependency //in the graph key = this.constructFileKey(lfn, siteHandle); par = (String) mFileTable.get(key); //System.out.println("lfn " + lfn + " par " + par); if (par != null) { it.remove(); //check if tempSet does not contain the parent //fix for sonal's bug tempSet.add(par); if(ft.isTransferringExecutableFile() && this.mAddNodesForSettingXBit ){ //currently we have only one file to be staged per //compute job . Taking a short cut in determining //the name of setXBit job String xBitJobName = (String)mSetupMap.get(key); if(key == null){ throw new RuntimeException("Internal Pegasus Error while " + "constructing bundled stagein jobs"); } //add relation xbitjob->computejob this.addRelation(xBitJobName,jobName); } } else { //get the name of the transfer job boolean contains = stageInMap.containsKey(siteHandle); //following pieces need rearragnement! if(!contains){ bundle = bundleValue.determine( implementation, job ); } PoolTransfer pt = (contains)? (PoolTransfer)stageInMap.get(siteHandle): new PoolTransfer( siteHandle, localTransfer, bundle); if(!contains){ stageInMap.put(siteHandle,pt); } //add the FT to the appropriate transfer job. String newJobName = pt.addTransfer( ft, type ); if(ft.isTransferringExecutableFile()){ //add both the name of the stagein job and the executable file stageInExecJobs.add( newJobName ); stagedExecutableFiles.add( ft ); mLogger.log("Entered " + key + "->" + implementation.getSetXBitJobName(job.getName(),staged), LogManager.DEBUG_MESSAGE_LEVEL); mSetupMap.put(key, implementation.getSetXBitJobName(job.getName(),staged)); //all executables for a job are chmod with a single node //staged++; } //make a new entry into the table mFileTable.put(key, newJobName); //add the newJobName to the tempSet so that even //if the job has duplicate input files only one instance //of transfer is scheduled. This came up during collapsing //June 15th, 2004 tempSet.add(newJobName); } } //if there were any staged files //add the setXBitJobs for them int index = 0; //stageInExecJobs has corresponding list of transfer //jobs that transfer the files if( !stagedExecutableFiles.isEmpty() && mAddNodesForSettingXBit ){ Job xBitJob = implementation.createSetXBitJob( job, stagedExecutableFiles, Job.STAGE_IN_JOB, index); this.addJob( xBitJob ); //add the relation txJob->XBitJob->ComputeJob Set edgesAdded = new HashSet(); for( String txJobName : stageInExecJobs ){ //adding relation txJob->XBitJob if( edgesAdded.contains( txJobName ) ){ //do nothing mLogger.log( "Not adding edge " + txJobName + " -> " + xBitJob.getName(), LogManager.DEBUG_MESSAGE_LEVEL ); } else{ this.addRelation( txJobName, xBitJob.getName(), xBitJob.getSiteHandle(), true); edgesAdded.add( txJobName ); } } this.addRelation( xBitJob.getName(), job.getName() ); } //add the temp set to the relations //relations are added to the workflow in the end. if( mRelationsMap.containsKey( jobName )){ //the map already has some relations for the job //add those to temp set to tempSet.addAll( (Set) mRelationsMap.get( jobName ) ); } mRelationsMap.put(jobName,tempSet); } /** * Adds the stageout transfer nodes, that stage data to an output site * specified by the user. * * @param job Job object corresponding to the node to * which the files are to be transferred to. * @param files Collection of FileTransfer objects containing the * information about source and destURL's. * @param rcb bridge to the Replica Catalog. Used for creating registration * nodes in the workflow. * @param localTransfer whether the transfer should be on local site or not. * @param deletedLeaf to specify whether the node is being added for * a deleted node by the reduction engine or not. * default: false */ public void addStageOutXFERNodes(Job job, Collection files, ReplicaCatalogBridge rcb, boolean localTransfer, boolean deletedLeaf){ //initializing rcb till the change in function signature happens //needs to be passed during refiner initialization mRCB = rcb; //sanity check if( files.isEmpty() ){ return; } String jobName = job.getName(); BundleValue bundleValue = (localTransfer) ? this.mStageOutLocalBundleValue : this.mStageOutRemoteBundleValue; mLogMsg = "Adding stageout nodes for job " + jobName; //separate the files for transfer //and for registration List txFiles = new ArrayList(); List regFiles = new ArrayList(); for (Iterator it = files.iterator(); it.hasNext(); ) { FileTransfer ft = (FileTransfer) it.next(); if (!ft.getTransientTransferFlag()) { txFiles.add(ft); } if ( mCreateRegistrationJobs && ft.getRegisterFlag() ) { regFiles.add(ft); } } boolean makeTNode = !txFiles.isEmpty(); boolean makeRNode = !regFiles.isEmpty(); int level = job.getLevel(); // instead of site handle now we refer to the staging site handle // String site = job.getSiteHandle(); String site = job.getStagingSiteHandle(); int bundle = bundleValue.determine( this.mTXStageOutImplementation, job ); if ( level != mCurrentSOLevel ){ mCurrentSOLevel = level; //we are starting on a new level of the workflow. //reinitialize stuff this.resetStageOutMaps(); } TransferContainer soTC = null; if (makeTNode) { //get the appropriate pool transfer object for the site PoolTransfer pt = this.getStageOutPoolTransfer( site, localTransfer, bundle ); //we add all the file transfers to the pool transfer soTC = pt.addTransfer( txFiles, level, Job.STAGE_OUT_JOB ); String soJob = soTC.getTXName(); if (!deletedLeaf) { //need to add a relation between a compute and stage-out //job only if the compute job was not reduced. addRelation( jobName, soJob ); } //moved to the resetStageOut method // if (makeRNode) { // addRelation( soJob, soTC.getRegName() ); // } } else if ( makeRNode ) { //add an empty file transfer //get the appropriate pool transfer object for the site PoolTransfer pt = this.getStageOutPoolTransfer( site, localTransfer, bundle ); //we add all the file transfers to the pool transfer soTC = pt.addTransfer( new Vector(), level, Job.STAGE_OUT_JOB ); //direct link between compute job and registration job addRelation( jobName, soTC.getRegName() ); } if ( makeRNode ) { soTC.addRegistrationFiles( regFiles ); //call to make the reg subinfo //added in make registration node // addJob(createRegistrationJob(regJob, job, regFiles, rcb)); } } /** * Returns the bundle value associated with a compute job as a String. * * @param job * * @return value as String or NULL */ protected String getComputeJobBundleValue( Job job ){ return job.vdsNS.getStringValue( Pegasus.BUNDLE_STAGE_OUT_KEY ); } /** * Signals that the traversal of the workflow is done. At this point the * transfer nodes are actually constructed traversing through the transfer * containers and the stdin of the transfer jobs written. */ public void done( ){ doneStageIn( this.mStageInLocalMap, this.mTXStageInImplementation , Job.STAGE_IN_JOB, true ); doneStageIn( this.mStageInRemoteMap, this.mTXStageInImplementation, Job.STAGE_IN_JOB, false ); //adding relations that tie in the stagin //jobs to the compute jobs. for(Iterator it = mRelationsMap.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); String key = (String)entry.getKey(); mLogger.log("Adding relations for job " + key, LogManager.DEBUG_MESSAGE_LEVEL); for(Iterator pIt = ((Collection)entry.getValue()).iterator(); pIt.hasNext();){ String value = (String)pIt.next(); addRelation( value, key ); } } //reset the stageout map too this.resetStageOutMaps(); } /** * * Signals that the traversal of the workflow is done. At this point the * transfer nodes are actually constructed traversing through the transfer * containers and the stdin of the transfer jobs written. * * @param stageInMap maps site names to PoolTransfer * @param implementation the transfer implementation to use * @param stageInJobType whether a stagein or symlink stagein job * @param localTransfer indicates whether transfer job needs to run on * local site or not. */ public void doneStageIn( Map stageInMap, Implementation implementation, int stageInJobType, boolean localTransfer ){ //traverse through the stagein map and //add transfer nodes per pool String key; String value; PoolTransfer pt; TransferContainer tc; Map.Entry entry; Job job = new Job(); for(Iterator it = stageInMap.entrySet().iterator();it.hasNext();){ entry = (Map.Entry)it.next(); key = (String)entry.getKey(); pt = (PoolTransfer)entry.getValue(); mLogger.log("Adding stage in transfer nodes for pool " + key, LogManager.DEBUG_MESSAGE_LEVEL); for(Iterator pIt = pt.getTransferContainerIterator();pIt.hasNext();){ tc = (TransferContainer)pIt.next(); if(tc == null){ //break out break; } mLogger.log("Adding stagein transfer node " + tc.getTXName(), LogManager.DEBUG_MESSAGE_LEVEL); //added in make transfer node //mDag.addNewJob(tc.getName()); //we just need the execution pool in the job object job.executionPool = key; job.setStagingSiteHandle( key ); String site = localTransfer ? "local" : job.getSiteHandle(); Job tJob = implementation.createTransferJob( job, site, tc.getFileTransfers(), null, tc.getTXName(), stageInJobType ); //always set the type to stagein after it is created tJob.setJobType( stageInJobType ); addJob( tJob ); } } } /** * Returns a textual description of the transfer mode. * * @return a short textual description */ public String getDescription(){ return Bundle.DESCRIPTION; } /** * Returns the appropriate pool transfer for a particular site. * * @param site the site for which the PT is reqd. * @param localTransfer whethe the associated transfer job runs on local site or remote. * @param num the number of Stageout jobs required for that Pool. * * @return the PoolTransfer */ public PoolTransfer getStageOutPoolTransfer( String site, boolean localTransfer, int num ){ //pick up appropriate map Map map = localTransfer ? this.mStageOutLocalMapPerLevel : this.mStageOutRemoteMapPerLevel; if ( map.containsKey( site ) ){ return ( PoolTransfer ) map.get( site ); } else{ PoolTransfer pt = new PoolTransfer( site, localTransfer, num ); map.put( site, pt ); return pt; } } /** * Resets the local and remote stage out maps. */ protected void resetStageOutMaps( ){ mStageOutLocalMapPerLevel = this.resetStageOutMap( mStageOutLocalMapPerLevel, true ); mStageOutRemoteMapPerLevel = this.resetStageOutMap( mStageOutRemoteMapPerLevel, false ); } /** * Resets a single map * * @param map the map to be reset * @param localTransfer whether the transfer jobs need to run on local site or not * * @return the reset map */ protected Map resetStageOutMap( Map map, boolean localTransfer ){ if ( map != null ){ //before flushing add the stageout nodes to the workflow Job job = new Job(); for( Iterator it = map.values().iterator(); it.hasNext(); ){ PoolTransfer pt = ( PoolTransfer ) it.next(); job.setSiteHandle( pt.mPool ); job.setStagingSiteHandle( pt.mPool ); //site is where transfer job runs String site = localTransfer ? "local" : job.getSiteHandle(); mLogger.log( "Adding jobs for staging out data from site " + pt.mPool, LogManager.DEBUG_MESSAGE_LEVEL ); //traverse through all the TransferContainers for( Iterator tcIt = pt.getTransferContainerIterator(); tcIt.hasNext(); ){ TransferContainer tc = ( TransferContainer ) tcIt.next(); if(tc == null){ //break out break; } //add the stageout job if required Job soJob = null; if( !tc.getFileTransfers().isEmpty() ){ mLogger.log( "Adding stage-out job " + tc.getTXName(), LogManager.DEBUG_MESSAGE_LEVEL); soJob = mTXStageOutImplementation.createTransferJob( job, site, tc.getFileTransfers(), null, tc.getTXName(), Job.STAGE_OUT_JOB ); addJob( soJob ); } //add registration job if required if( !tc.getRegistrationFiles().isEmpty() ){ //add relation to stage out if the stageout job was created if( soJob != null ){ //make the stageout job the super node for the registration job job.setName( soJob.getName() ); addRelation( tc.getTXName(), tc.getRegName() ); } mLogger.log( "Adding registration job " + tc.getRegName(), LogManager.DEBUG_MESSAGE_LEVEL ); addJob(createRegistrationJob( tc.getRegName(), job, tc.getRegistrationFiles(), mRCB)); } } } } map = new HashMap(); return map; } /** * A container class for storing the name of the transfer job, the list of * file transfers that the job is responsible for. */ protected class TransferContainer{ /** * The name of the transfer job. */ private String mTXName; /** * The name of the registration job. */ private String mRegName; /** * The collection of FileTransfer objects containing the * transfers the job is responsible for. */ private Collection mFileTXList; /** * The collection of FileTransfer objects containing the * files that need to be registered. */ private Collection mRegFiles; /** * The type of the transfers the job is responsible for. */ private int mTransferType; /** * The default constructor. */ public TransferContainer(){ mTXName = null; mRegName = null; mFileTXList = new Vector(); mRegFiles = new Vector(); mTransferType = Job.STAGE_IN_JOB; } /** * Sets the name of the transfer job. * * @param name the name of the transfer job. */ public void setTXName(String name){ mTXName = name; } /** * Sets the name of the registration job. * * @param name the name of the transfer job. */ public void setRegName(String name){ mRegName = name; } /** * Adds a file transfer to the underlying collection. * * @param transfer the FileTransfer containing the * information about a single transfer. */ public void addTransfer(FileTransfer transfer){ mFileTXList.add(transfer); } /** * Adds a file transfer to the underlying collection. * * @param files collection of FileTransfer. */ public void addTransfer( Collection files ){ mFileTXList.addAll( files ); } /** * Adds a Collection of File transfer to the underlying collection of * files to be registered. * * @param files collection of FileTransfer. */ public void addRegistrationFiles( Collection files ){ mRegFiles.addAll( files ); } /** * Sets the transfer type for the transfers associated. * * @param type type of transfer. */ public void setTransferType(int type){ mTransferType = type; } /** * Returns the name of the transfer job. * * @return name of the transfer job. */ public String getTXName(){ return mTXName; } /** * Returns the name of the registration job. * * @return name of the registration job. */ public String getRegName(){ return mRegName; } /** * Returns the collection of transfers associated with this transfer * container. * * @return a collection of FileTransfer objects. */ public Collection getFileTransfers(){ return mFileTXList; } /** * Returns the collection of registration files associated with this transfer * container. * * @return a collection of FileTransfer objects. */ public Collection getRegistrationFiles(){ return mRegFiles; } } /** * A container to store the transfers that need to be done on a single pool. * The transfers are stored over a collection of Transfer Containers with * each transfer container responsible for one transfer job. */ protected class PoolTransfer{ /** * The maximum number of transfer jobs that are allowed for this * particular pool. */ private int mCapacity; /** * The index of the job to which the next transfer for the pool would * be scheduled. */ private int mNext; /** * The remote pool for which these transfers are grouped. */ private String mPool; /** * The list of TransferContainer that correspond to * each transfer job. */ private List mTXContainers; /** * boolean indicating whether the transfer job needs to run on local site */ private boolean mLocalTransfer; /** * The default constructor. */ public PoolTransfer(){ mCapacity = 0; mNext = -1; mPool = null; mTXContainers = null; mLocalTransfer = true; } /** * Convenience constructor. * * @param pool the pool name for which transfers are being grouped. * @param localTransfer whether the transfers need to be run on local site * @param number the number of transfer jobs that are going to be created * for the pool. */ public PoolTransfer(String pool, boolean localTransfer, int number){ mLocalTransfer = localTransfer; mCapacity = number; mNext = 0; mPool = pool; mTXContainers = new ArrayList(number); //intialize to null for(int i = 0; i < number; i++){ mTXContainers.add(null); } } /** * Adds a a collection of FileTransfer objects to the * appropriate TransferContainer. The collection is added to a single * TransferContainer, and the pointer is then updated to the next container. * * @param files the collection FileTransfer to be added. * @param level the level of the workflow * @param type the type of transfer job * * @return the Transfer Container to which the job file transfers were added. */ public TransferContainer addTransfer( Collection files, int level, int type ){ //we add the transfer to the container pointed //by next Object obj = mTXContainers.get(mNext); TransferContainer tc = null; if(obj == null){ //on demand add a new transfer container to the end //is there a scope for gaps?? tc = new TransferContainer(); tc.setTXName( getTXJobName( mNext, type, level ) ); //add the name for the registration job that maybe associated tc.setRegName( getRegJobName( mNext, level) ); mTXContainers.set(mNext,tc); } else{ tc = (TransferContainer)obj; } tc.addTransfer( files ); //update the next pointer to maintain //round robin status mNext = (mNext < (mCapacity -1))? mNext + 1 : 0; return tc; } /** * Adds a file transfer to the appropriate TransferContainer. * The file transfers are added in a round robin manner underneath. * * @param transfer the FileTransfer containing the * information about a single transfer. * @param type the type of transfer job * * @return the name of the transfer job to which the transfer is added. */ public String addTransfer(FileTransfer transfer, int type ){ //we add the transfer to the container pointed //by next Object obj = mTXContainers.get(mNext); TransferContainer tc = null; if(obj == null){ //on demand add a new transfer container to the end //is there a scope for gaps?? tc = new TransferContainer(); tc.setTXName( getTXJobName( mNext, type ) ); mTXContainers.set(mNext,tc); } else{ tc = (TransferContainer)obj; } tc.addTransfer(transfer); //update the next pointer to maintain //round robin status mNext = (mNext < (mCapacity -1))? mNext + 1 : 0; return tc.getTXName(); } /** * Returns the iterator to the list of transfer containers. * * @return the iterator. */ public Iterator getTransferContainerIterator(){ return mTXContainers.iterator(); } /** * Generates the name of the transfer job, that is unique for the given * workflow. * * @param counter the index for the registration job. * @param level the level of the workflow. * * @return the name of the transfer job. */ private String getRegJobName( int counter, int level ){ StringBuffer sb = new StringBuffer(); sb.append( Refiner.REGISTER_PREFIX ); //append the job prefix if specified in options at runtime if ( mJobPrefix != null ) { sb.append( mJobPrefix ); } sb.append( mPool ).append( "_" ).append( level ). append( "_" ).append( counter ); return sb.toString(); } /** * Return the pool for which the transfers are grouped * * @return name of pool. */ public String getPoolName(){ return this.mPool; } /** * Generates the name of the transfer job, that is unique for the given * workflow. * * @param counter the index for the transfer job. * @param type the type of transfer job. * @param level the level of the workflow. * * @return the name of the transfer job. */ private String getTXJobName( int counter, int type, int level ){ StringBuffer sb = new StringBuffer(); switch ( type ){ case Job.STAGE_IN_JOB: sb.append( Refiner.STAGE_IN_PREFIX ); break; case Job.STAGE_OUT_JOB: sb.append( Refiner.STAGE_OUT_PREFIX ); break; default: throw new RuntimeException( "Wrong type specified " + type ); } if( mLocalTransfer ) { sb.append( Refiner.LOCAL_PREFIX ); } else{ sb.append( Refiner.REMOTE_PREFIX ); } //append the job prefix if specified in options at runtime if ( mJobPrefix != null ) { sb.append( mJobPrefix ); } sb.append( mPool ).append( "_" ).append( level ). append( "_" ).append( counter ); return sb.toString(); } /** * Generates the name of the transfer job, that is unique for the given * workflow. * * @param counter the index for the transfer job. * @param type the type of transfer job. * * @return the name of the transfer job. */ private String getTXJobName( int counter, int type ){ StringBuffer sb = new StringBuffer(); switch ( type ){ case Job.STAGE_IN_JOB: sb.append( Refiner.STAGE_IN_PREFIX ); break; case Job.STAGE_OUT_JOB: sb.append( Refiner.STAGE_OUT_PREFIX ); break; default: throw new RuntimeException( "Wrong type specified " + type ); } if( mLocalTransfer ) { sb.append( Refiner.LOCAL_PREFIX ); } else{ sb.append( Refiner.REMOTE_PREFIX ); } //append the job prefix if specified in options at runtime if ( mJobPrefix != null ) { sb.append( mJobPrefix ); } sb.append(mPool).append("_").append(counter); return sb.toString(); } } protected class BundleValue { /** * The pegasus profile key to use for lookup */ private String mProfileKey; /** * The default bundle value to use. */ private String mDefaultBundleValue; /** * The Default Pegasus profile key to use for lookup */ private String mDefaultProfileKey; /** * The default constructor. */ public BundleValue(){ } /** * Initializes the implementation * * @param key the Pegasus Profile key to be used for lookup of bundle values. * @param defaultKey the default Profile Key to be used if key is not found. * @param defaultValue the default value to be associated if no key is found. */ public void initialize( String key, String defaultKey, String defaultValue ){ mProfileKey = key; mDefaultProfileKey = defaultKey; mDefaultBundleValue = defaultValue; } /** * Determines the bundle factor for a particular site on the basis of the * stage in bundle value associcated with the underlying transfer * transformation in the transformation catalog. If the key is not found, * then the default value is returned. In case of the default value being * null the global default is returned. * * The value is tored internally to ensure that a subsequent * call to get(String site) returns the value determined. * * @param implementation the transfer implementation being used * @param job the compute job for which the bundle factor needs to * be determined. * * @return the bundle factor. */ public int determine( Implementation implementation, Job job ){ String site = job.getStagingSiteHandle(); //look up the value in SiteCatalogEntry for the store SiteCatalogEntry entry = Bundle.this.mSiteStore.lookup( site ); //sanity check if( entry == null ){ return Integer.parseInt( mDefaultBundleValue ); } //check for Pegasus Profile mProfileKey in the site entry Pegasus profiles = (Pegasus) entry.getProfiles().get( NAMESPACES.pegasus ); String value = profiles.getStringValue( mProfileKey ); if( value == null ){ //try to look up value of default key value = profiles.getStringValue( mDefaultProfileKey ); } //if value is still null , rely of the default bundle value value = ( value == null )? this.mDefaultBundleValue: value; return Integer.parseInt(value); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/transfer/refiner/RefinerFactory.java0000644000175000017500000001203011757531137030331 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.transfer.refiner; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.transfer.Refiner; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The factory class that loads an appropriate Transfer Refiner class, * as specified by the properties. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class RefinerFactory { /** * The default package where the implementations reside, which this factory * loads. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.transfer.refiner"; /** * The default refiner implementation that is picked up. */ public static final String DEFAULT_REFINER_IMPLEMENTATION = "Bundle"; /** * Loads the implementing class corresponding to the value specified in the * properties. If the package name is not specified with the class, then * class is assumed to be in the DEFAULT_PACKAGE. The properties object passed * should not be null. *

* In addition it ends up loading the appropriate Transfer Implementation * that is required by the refiner. * * @param dag the workflow that is being refined. * @param bag the bag of initialization objects * * @return the instance of the class implementing this interface. * * @exception TransferRefinerException that nests any error that * might occur during the instantiation. * * @see #DEFAULT_PACKAGE_NAME */ public static Refiner loadInstance( ADag dag, PegasusBag bag ) throws TransferRefinerFactoryException{ return loadInstance( bag.getPegasusProperties().getTransferRefiner(), bag, dag ); } /** * Loads the implementing class corresponding to the class. If the package * name is not specified with the class, then class is assumed to be * in the DEFAULT_PACKAGE. The properties object passed should not be null. * In addition it ends up loading the appropriate Transfer Implementation * that is required by the refiner. * * @param className the name of the class that implements the mode.It can or * cannot be with the package name. * @param bag the bag of initialization objects * @param dag the workflow that is being refined. * * @return the instance of the class implementing this interface. * * @exception TransferRefinerFactoryException that nests any error that * might occur during the instantiation. * * @see #DEFAULT_PACKAGE_NAME */ public static Refiner loadInstance( String className, PegasusBag bag, ADag dag ) throws TransferRefinerFactoryException{ Refiner refiner = null; try{ //sanity check if ( bag.getPegasusProperties() == null) { throw new RuntimeException("Invalid properties passed"); } if (dag == null) { throw new RuntimeException("Invalid workflow passed"); } //set the refiner to default if required if( className == null ){ className = RefinerFactory.DEFAULT_REFINER_IMPLEMENTATION; } //prepend the package name className = (className.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className : //load directly className; //try loading the class dynamically DynamicLoader dl = new DynamicLoader(className); Object argList[] = new Object[2]; argList[0] = dag; argList[1] = bag; refiner = (Refiner) dl.instantiate(argList); //we got the refiner try to load the appropriate //transfer implementation also refiner.loadImplementations( bag ); } catch (Exception e){ throw new TransferRefinerFactoryException("Instantiating Transfer Refiner", className, e); } return refiner; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/0000755000175000017500000000000011757531667022056 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/Transformation.java0000644000175000017500000001636311757531137025730 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import java.util.Collections; import java.util.List; import java.util.LinkedList; import edu.isi.pegasus.common.util.XMLWriter; /** * This Object is used to create a complex Transformation. * A complex transformation is one that uses other executables and files * @author gmehta * @version $Revision: 3675 $ */ public class Transformation { /** * Namespace of the Transformation */ protected String mNamespace; /** * Name of the transformation */ protected String mName; /** * Version of the transformation */ protected String mVersion; /** * List of executable of files used by the transformation */ protected List mUses; protected List mInvokes; /** * Create a new Transformation object * @param name */ public Transformation(String name) { this("", name, ""); } /** * Copy Constructor * @param t */ public Transformation(Transformation t) { this(t.mNamespace,t.mName,t.mVersion); this.mUses = new LinkedList(t.mUses); this.mInvokes = new LinkedList(t.mInvokes); } /** * Create a new Transformation Object * @param namespace * @param name * @param version */ public Transformation(String namespace, String name, String version) { mNamespace = (namespace == null) ? "" : namespace; mName = (name == null) ? "" : name; mVersion = (version == null) ? "" : version; mUses = new LinkedList(); mInvokes = new LinkedList(); } /** * Get the name of the transformation * @return */ public String getName() { return mName; } /** * Get the namespace of the transformation * @return */ public String getNamespace() { return mNamespace; } /** * Get the version of the transformation * @return */ public String getVersion() { return mVersion; } /** * Return the list of Notification objects * @return List */ public List getInvoke() { return Collections.unmodifiableList(mInvokes); } /** * Add a Notification for this Transformation * @param when * @param what * @return Transformation */ public Transformation addInvoke(Invoke.WHEN when, String what) { Invoke i = new Invoke(when, what); mInvokes.add(i); return this; } /** * Add a Notification for this Transformation * @param invoke * @return Transformation */ public Transformation addInvoke(Invoke invoke) { mInvokes.add(invoke.clone()); return this; } /** * Add a List of Notifications for this Transformation * @param invokes * @return Transformation */ public Transformation addInvokes(List invokes) { for (Invoke invoke: invokes){ this.addInvoke(invoke); } return this; } /** * Set the file or executable being used by the transformation * @param fileorexecutable * @return */ public Transformation uses(CatalogType fileorexecutable) { mUses.add(fileorexecutable); return this; } /** * Set the List of files and/or executables being used by the transformation * * @param filesorexecutables * @return */ public Transformation uses(List filesorexecutables) { mUses.addAll(filesorexecutables); return this; } /** * Get the List of files and/or executables being used by the transformation * @return */ public List getUses() { return Collections.unmodifiableList(mUses); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Transformation other = (Transformation) obj; if ((this.mNamespace == null) ? (other.mNamespace != null) : !this.mNamespace.equals(other.mNamespace)) { return false; } if ((this.mName == null) ? (other.mName != null) : !this.mName.equals(other.mName)) { return false; } if ((this.mVersion == null) ? (other.mVersion != null) : !this.mVersion.equals(other.mVersion)) { return false; } return true; } @Override public int hashCode() { int hash = 7; hash = 47 * hash + (this.mNamespace != null ? this.mNamespace.hashCode() : 0); hash = 47 * hash + (this.mName != null ? this.mName.hashCode() : 0); hash = 47 * hash + (this.mVersion != null ? this.mVersion.hashCode() : 0); return hash; } @Override public String toString(){ return mNamespace+"::"+mName+":"+mVersion; } public void toXML(XMLWriter writer) { toXML(writer, 0); } public void toXML(XMLWriter writer, int indent) { if (!mUses.isEmpty()) { writer.startElement("transformation", indent); if (mNamespace != null && !mNamespace.isEmpty()) { writer.writeAttribute("namespace", mNamespace); } writer.writeAttribute("name", mName); if (mVersion != null && !mVersion.isEmpty()) { writer.writeAttribute("version", mVersion); } for (CatalogType c : mUses) { if (c.getClass() == File.class) { File f = (File) c; writer.startElement("uses", indent + 1); writer.writeAttribute("name", f.getName()); writer.endElement(); } else if (c.getClass() == Executable.class) { Executable e = (Executable) c; writer.startElement("uses", indent + 1); if (e.mNamespace != null && !e.mNamespace.isEmpty()) { writer.writeAttribute("namespace", e.mNamespace); } writer.writeAttribute("name", e.mName); if (e.mVersion != null && !e.mVersion.isEmpty()) { writer.writeAttribute("version", e.mVersion); } writer.writeAttribute("executable", "true"); writer.endElement(); } } for (Invoke i : mInvokes) { i.toXML(writer, indent + 1); } writer.endElement(indent); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/Executable.java0000644000175000017500000003017511757531137025000 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import java.util.List; import java.util.LinkedList; import java.util.Collections; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.XMLWriter; /** * The Transformation Catalog object the represent the entries in the DAX transformation section. * @author gmehta * @version $Revision: 4301 $ */ public class Executable extends CatalogType { /** * ARCH Types */ public static enum ARCH { X86, x86, X86_64, x86_64, PPC, ppc, PPC_64, ppc_64, IA64, ia64, SPARCV7, sparcv7, SPARCV9, sparcv9 } /** * OS Types */ public static enum OS { LINUX, linux, SUNOS, sunos, AIX, aix, MACOSX, macosx, WINDOWS, windows } /** * Namespace of the executable */ protected String mNamespace; /** * Name of the executable */ protected String mName; /** * Version of the executable */ protected String mVersion; /** * Architecture the executable is compiled for */ protected ARCH mArch; /** * Os the executable is compiled for */ protected OS mOs; /** * Os release the executable is compiled for */ protected String mOsRelease; /** * OS version the executable is compiled for */ protected String mOsVersion; /** * Glibc the executable is compiled for */ protected String mGlibc; /** * Flag to mark if the executable is installed or can be staged. */ protected boolean mInstalled = true; /** * List of Notification objects */ protected List mInvokes; /** * Create a new executable * @param name */ public Executable(String name) { this("", name, ""); } /** * Copy Constructor * @param e */ public Executable(Executable e) { super(e); this.mNamespace = e.mNamespace; this.mName = e.mName; this.mVersion = e.mVersion; this.mArch = e.mArch; this.mOs = e.mOs; this.mOsRelease = e.mOsRelease; this.mOsVersion = e.mOsVersion; this.mGlibc = e.mGlibc; this.mInstalled=e.mInstalled; this.mInvokes = new LinkedList(e.mInvokes); } /** * Create a new Executable * @param namespace * @param name * @param version */ public Executable(String namespace, String name, String version) { super(); mNamespace = (namespace == null) ? "" : namespace; mName = (name == null) ? "" : name; mVersion = (version == null) ? "" : version; mInvokes=new LinkedList(); } /** * Get the name of the executable * @return */ public String getName() { return mName; } /** * Get the namespace of the executable * @return */ public String getNamespace() { return mNamespace; } /** * Get the version of the executablle * @return */ public String getVersion() { return mVersion; } /** * Return the list of Notification objects * @return List */ public List getInvoke() { return Collections.unmodifiableList(mInvokes); } /** * Add a Notification for this Executable * same as addNotification * @param when * @param what * @return Executable */ public Executable addInvoke(Invoke.WHEN when, String what) { Invoke i = new Invoke(when, what); mInvokes.add(i); return this; } /** * Add a Notification for this Executable * same as addInvoke * @param when * @param what * @return Executable */ public Executable addNotification(Invoke.WHEN when, String what) { return addInvoke(when,what); } /** * Add a Notification for this Executable * Same as add Notification * @param invoke * @return Executable */ public Executable addInvoke(Invoke invoke) { mInvokes.add(invoke.clone()); return this; } /** * Add a Notification for this Executable * Same as addInvoke * @param invoke * @return Executable */ public Executable addNotification(Invoke invoke) { return addInvoke(invoke); } /** * Add a List of Notifications for this Executable * Same as addNotifications * @param invokes * @return Executable */ public Executable addInvokes(List invokes) { for (Invoke invoke: invokes){ this.addInvoke(invoke); } return this; } /** * Add a List of Notifications for this Executable. * Same as addInvokes * @param invokes * @return Executable */ public Executable addNotifications(List invokes) { return addInvokes(invokes); } /** * Set the architecture the executable is compiled for * @param arch * @return Executable */ public Executable setArchitecture(ARCH arch) { mArch = arch; return this; } /** * Set the OS the executable is compiled for * @param os * @return */ public Executable setOS(OS os) { mOs = os; return this; } /** * Set the osrelease the executable is compiled for * @param osrelease * @return */ public Executable setOSRelease(String osrelease) { mOsRelease = osrelease; return this; } /** * Set the osversion the executable is compiled for * @param osversion * @return */ public Executable setOSVersion(String osversion) { mOsVersion = osversion; return this; } /** * Set the glibc this executable is compiled for * @param glibc * @return */ public Executable setGlibc(String glibc) { mGlibc = glibc; return this; } /** * set the installed flag on the executable. Default is installed * @return */ public Executable setInstalled() { mInstalled = true; return this; } /** * Unset the installed flag on the executable. Default is installed. * @return */ public Executable unsetInstalled() { mInstalled = false; return this; } /** * Set the installed flag on the executable. Default is installed */ public Executable setInstalled(boolean installed) { mInstalled = installed; return this; } /** * Check if the executable is of type installed. * @return */ public boolean getInstalled() { return mInstalled; } /** * Get the architecture the Executable is compiled for * @return */ public ARCH getArchitecture() { return mArch; } /** * Get the OS the Executable is compiled for * @return */ public OS getOS() { return mOs; } /** * Get the OS release set for this executable. Returns empty string if not set * @return */ public String getOsRelease() { return (mOsRelease == null) ? "" : mOsRelease; } /** * Get the OS version set for this executable. * @return */ public String getOsVersion() { return (mOsVersion == null) ? "" : mOsVersion; } /** * Get the Glibc version if any set for this file. Returns empty string if not set * @return */ public String getGlibc() { return (mGlibc == null) ? "" : mGlibc; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final Executable other = (Executable) obj; if ((this.mNamespace == null) ? (other.mNamespace != null) : !this.mNamespace.equals(other.mNamespace)) { return false; } if ((this.mName == null) ? (other.mName != null) : !this.mName.equals(other.mName)) { return false; } if ((this.mVersion == null) ? (other.mVersion != null) : !this.mVersion.equals(other.mVersion)) { return false; } if (this.mArch != other.mArch) { return false; } if (this.mOs != other.mOs) { return false; } if ((this.mOsRelease == null) ? (other.mOsRelease != null) : !this.mOsRelease.equals(other.mOsRelease)) { return false; } if ((this.mOsVersion == null) ? (other.mOsVersion != null) : !this.mOsVersion.equals(other.mOsVersion)) { return false; } if ((this.mGlibc == null) ? (other.mGlibc != null) : !this.mGlibc.equals(other.mGlibc)) { return false; } if (this.mInstalled != other.mInstalled) { return false; } return true; } @Override public int hashCode() { int hash = 7; hash = 53 * hash + (this.mNamespace != null ? this.mNamespace.hashCode() : 0); hash = 53 * hash + (this.mName != null ? this.mName.hashCode() : 0); hash = 53 * hash + (this.mVersion != null ? this.mVersion.hashCode() : 0); hash = 53 * hash + (this.mArch != null ? this.mArch.hashCode() : 0); hash = 53 * hash + (this.mOs != null ? this.mOs.hashCode() : 0); hash = 53 * hash + (this.mOsRelease != null ? this.mOsRelease.hashCode() : 0); hash = 53 * hash + (this.mOsVersion != null ? this.mOsVersion.hashCode() : 0); hash = 53 * hash + (this.mGlibc != null ? this.mGlibc.hashCode() : 0); hash = 53 * hash + (this.mInstalled ? 1 : 0); return hash; } @Override public String toString(){ return mNamespace+"::"+mName+":"+mVersion; } @Override public void toXML(XMLWriter writer) { toXML(writer, 0); } @Override public void toXML(XMLWriter writer, int indent) { if (mProfiles.isEmpty() && mPFNs.isEmpty() && mMetadata.isEmpty()) { mLogger.log("The executable element for " + mName + " must have atleast 1 profile, 1 pfn or 1 metadata entry. Skipping empty executable element", LogManager.WARNING_MESSAGE_LEVEL); } else { writer.startElement("executable", indent); if (mNamespace != null && !mNamespace.isEmpty()) { writer.writeAttribute("namespace", mNamespace); } writer.writeAttribute("name", mName); if (mVersion != null && !mVersion.isEmpty()) { writer.writeAttribute("version", mVersion); } if (mInstalled) { writer.writeAttribute("installed", "true"); } else { writer.writeAttribute("installed", "false"); } if (mArch != null) { writer.writeAttribute("arch", mArch.toString().toLowerCase()); } if (mOs != null) { writer.writeAttribute("os", mOs.toString().toLowerCase()); } if (mOsRelease != null && !mOsRelease.isEmpty()) { writer.writeAttribute("osrelease", mOsRelease); } if (mOsVersion != null && !mOsVersion.isEmpty()) { writer.writeAttribute("osversion", mOsVersion); } if (mGlibc != null && !mGlibc.isEmpty()) { writer.writeAttribute("glibc", mGlibc); } super.toXML(writer, indent); for (Invoke i : mInvokes) { i.toXML(writer, indent+1); } writer.endElement(indent); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/Invoke.java0000644000175000017500000000566211757531137024155 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.util.XMLWriter; /** * The Notification invoke object for the Dax API * @author gmehta * @version $Revision: 3666 $ */ public class Invoke { /** * WHEN To INVOKE */ public static enum WHEN { never, start, on_success, on_error, at_end, all }; /** * WHen to Invoke */ protected WHEN mWhen; /** * What to invoke */ protected String mWhat; /** * Copy Constructor * @param i */ public Invoke(Invoke i) { this(WHEN.valueOf(i.getWhen()), i.getWhat()); } /** * Crete a new Invoke object * @param when */ public Invoke(WHEN when) { mWhen = when; } /** * Create a new Invoke object * @param when * @param what */ public Invoke(WHEN when, String what) { mWhen = when; mWhat = what; } /** * Get when to Invoke * @return */ public String getWhen() { return mWhen.toString(); } /** * Set when to invoke * @param when * @return */ public Invoke setWhen(WHEN when) { mWhen = when; return this; } /** * Get what to invoke * @return */ public String getWhat() { return mWhat; } /** * Set what executable to invoke and how * @param what * @return */ public Invoke setWhat(String what) { mWhat = what; return this; } /** * Create a copy of this Invoke object * @return */ public Invoke clone() { return new Invoke(this.mWhen, this.mWhat); } public void toXML(XMLWriter writer) { toXML(writer, 0); } public void toXML(XMLWriter writer, int indent) { writer.startElement("invoke", indent); writer.writeAttribute("when", mWhen.toString().toLowerCase()); writer.writeData(mWhat).endElement(); } /** * Returns the object as String * * @return the description */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append( "[invoke " ).append( "when=\"" ).append( mWhen.toString().toLowerCase() ). append( "\"" ).append( " what=\"" ).append( mWhat ).append( "\"]" ); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/MetaData.java0000644000175000017500000000517011757531137024374 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.util.XMLWriter; /** * Metadata object for the DAX API * @author gmehta * @version $Revision: 3201 $ */ public class MetaData { /** * Metadata Key */ protected String mKey; /** * Metadata type */ protected String mType; /** * Metadata value */ protected String mValue; /** * Copy constructor * @param m */ public MetaData(MetaData m) { //create a copy this(m.getKey(), m.getType(), m.getValue()); } /** * Create a new Metadata object * @param type * @param key */ public MetaData(String type, String key) { mType = type; mKey = key; } /** * Create a new Metadata object * @param type * @param key * @param value */ public MetaData(String type, String key, String value) { mType = type; mKey = key; mValue = value; } /** * Create a copy of this Metdata Object * @return */ public MetaData clone() { return new MetaData(this.mType, this.mKey, this.mValue); } /** * Set the value of the metadata * @param value * @return */ public MetaData setValue(String value) { mValue = value; return this; } /** * Get the key of this metadata object * @return */ public String getKey() { return mKey; } /** * Get the type of the metdata object * @return */ public String getType() { return mType; } /** * Get the value of the metdata object * @return */ public String getValue() { return mValue; } public void toXML(XMLWriter writer) { toXML(writer, 0); } public void toXML(XMLWriter writer, int indent) { writer.startElement("metadata", indent).writeAttribute("type", mType); writer.writeAttribute("key", mKey).writeData(mValue).endElement(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/PFN.java0000644000175000017500000000473611757531137023346 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import java.util.List; import java.util.LinkedList; import java.util.Collections; import edu.isi.pegasus.common.util.XMLWriter; /** * * @author gmehta * @version $Revision: 3009 $ */ public class PFN { protected String mURL; protected String mSite; protected List mProfiles; public PFN(String url) { this(url, null); } public PFN(String url, String site) { mURL = url; mSite = site; // mProfiles=new Profiles(); mProfiles = new LinkedList(); } public String getURL() { return mURL; } public PFN setSite(String site) { mSite = site; return this; } public String getSite() { return (mSite == null) ? "" : mSite; } public PFN addProfile(String namespace, String key, String value) { mProfiles.add(new Profile(namespace, key, value)); return this; } public PFN addProfile(Profile.NAMESPACE namespace, String key, String value) { mProfiles.add(new Profile(namespace, key, value)); return this; } public PFN addProfiles(List profiles) { mProfiles.addAll(profiles); return this; } public PFN addProfiles(Profile profile) { mProfiles.add(profile); return this; } public List getProfiles() { return Collections.unmodifiableList(mProfiles); } public void toXML(XMLWriter writer) { toXML(writer, 0); } public void toXML(XMLWriter writer, int indent) { writer.startElement("pfn", indent); writer.writeAttribute("url", mURL); if (mSite != null) { writer.writeAttribute("site", mSite); } for (Profile p : mProfiles) { p.toXML(writer, indent + 1); } writer.endElement(indent); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/Patterns.java0000644000175000017500000000242111757531137024510 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import java.util.regex.*; /** * * @author gmehta * @version $Revision: 3009 $ */ public class Patterns { private static Pattern mVersionPattern = Pattern.compile("^\\d+(\\.\\d+(\\.\\d+)?)?$"); private static Pattern mNodeIdPattern = Pattern.compile("^[A-Za-z0-9][\\-A-Za-z0-9_]*$"); public static boolean isValid(Pattern p, String s) { return p.matcher(s).matches(); } public static boolean isVersionValid(String version) { return isValid(mVersionPattern, version); } public static boolean isNodeIdValid(String nodeid) { return isValid(mNodeIdPattern, nodeid); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/DAX.java0000644000175000017500000000424511757531137023332 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.util.XMLWriter; /** * Creates a DAX job object * @author GAURANG MEHTA gmehta at isi dot edu * @see AbstractJob * @version $Revision: 4135 $ */ public class DAX extends AbstractJob { /** * Create a DAX job object * @param id The unique id of the DAX job object. Must be of type [A-Za-z][-A-Za-z0-9_]* * @param dagname The DAX file to plan and submit */ public DAX(String id, String daxname) { this(id, daxname, null); } /** * Copy Constructor * @param dax */ public DAX(DAX dax) { super(dax); } /** * Create a DAX job object * @param id The unique id of the DAX job object. Must be of type [A-Za-z][-A-Za-z0-9_]* * @param dagname The DAX file to plan and submit * @param label */ public DAX(String id, String daxname, String label) { super(); checkID(id); // to decide whether to exit. Currently just logging error and proceeding. mId = id; mName = daxname; mNodeLabel = label; } /** * Is this Object a DAX * @return */ public boolean isDAX() { return true; } /** * * @param writer * @param indent */ public void toXML(XMLWriter writer, int indent) { writer.startElement( "dax", indent); writer.writeAttribute( "id", mId); writer.writeAttribute( "file", mName); super.toXML(writer, indent); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/AbstractJob.java0000644000175000017500000011515311757531137025115 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import java.util.List; import java.util.LinkedList; import java.util.Collections; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.common.util.XMLWriter; import java.util.LinkedHashSet; import java.util.Set; /** * * @author gmehta * @version $Revision: 4301 $ */ public class AbstractJob { protected List mArguments; protected List mProfiles; protected File mStdin; protected File mStdout; protected File mStderr; protected Set mUses; protected List mInvokes; protected String mName; protected String mId; protected String mNamespace; protected String mVersion; protected String mNodeLabel; protected static LogManager mLogger; private static final String ARG_DELIMITER = " "; private static final String FILE_DELIMITER = " "; protected AbstractJob() { mLogger = LogManagerFactory.loadSingletonInstance(); mArguments = new LinkedList(); mUses = new LinkedHashSet(); mInvokes = new LinkedList(); mProfiles = new LinkedList(); } /** * Copy constructor * @param a */ protected AbstractJob(AbstractJob a) { this.mArguments = new LinkedList(a.mArguments); this.mProfiles = new LinkedList(a.mProfiles); this.mStdin = new File(a.mStdin); this.mStdout = new File(a.mStdout); this.mStderr = new File(a.mStderr); this.mUses = new LinkedHashSet(a.mUses); this.mInvokes = new LinkedList(a.mInvokes); this.mName = a.mName; this.mId = a.mId; this.mNamespace = a.mNamespace; this.mVersion = a.mVersion; this.mNodeLabel = a.mNodeLabel; } /** * Copy Constructor * @param a */ protected static void checkID(String id) { if (!Patterns.isNodeIdValid(id)) { mLogger.log( "Id: " + id + " should of the type [A-Za-z0-9][-A-Za-z0-9]*", LogManager.ERROR_MESSAGE_LEVEL); } } /** * Return the argument List. The List contains both {@link String} as well as {@link File} objects * @return List */ public List getArguments() { return Collections.unmodifiableList(mArguments); } /** * Add a string argument to the argument List. Each call to argument adds a space in between entries * @param argument * @return AbstractJob */ public AbstractJob addArgument(String argument) { if (argument != null) { if (!mArguments.isEmpty()) { mArguments.add(ARG_DELIMITER); } mArguments.add(argument); } return this; } /** * Add a file object to the argument List. Each call to argument adds a space between entries. * @param file * @return AbstractJob * @see File */ public AbstractJob addArgument(File file) { if (file != null) { if (!mArguments.isEmpty()) { mArguments.add(ARG_DELIMITER); } mArguments.add(file); } return this; } /** * Add a Array of {@link File} objects to the argument list. The files will be separated by space when rendered on the command line * @param files File[] * @return AbstractJob * @see File */ public AbstractJob addArgument(File[] files) { this.addArgument(files, FILE_DELIMITER); return this; } /** * Add a List of {@link File} objects to the argument list. The files will be separated by space when rendered on the command line * @param files List * @return AbstractJob * @see File */ public AbstractJob addArgument(List files) { this.addArgument(files, FILE_DELIMITER); return this; } /** * Add a Array of {@link File} objects to the argument list. * The files will be separated by the filedelimiter(default is space) when rendered on the command line. * @param files File[] Array of file objects * @param filedelimiter String delimiter for the files. Default is space * @return AbstractJob * @see File */ public AbstractJob addArgument(File[] files, String filedelimiter) { filedelimiter = (filedelimiter == null) ? FILE_DELIMITER : filedelimiter; if (files != null && files.length > 0) { if (!mArguments.isEmpty()) { mArguments.add(ARG_DELIMITER); } boolean first = true; for (File f : files) { if (!first) { mArguments.add(filedelimiter); } mArguments.add(f); first = false; } } return this; } /** * Add a List of {@link File} objects to the argument list. * The files will be separated by the filedelimiter(default is space) when rendered on the command line. * @param files List Array of file objects * @param filedelimiter String delimiter for the files. Default is space * @return AbstractJob * @see File */ public AbstractJob addArgument(List files, String filedelimiter) { if (files != null && !files.isEmpty()) { this.addArgument((File[]) files.toArray(), filedelimiter); } return this; } /** * Add a argument key and value to the argument List. * The argkey and argvalue are seperated by space. * Example addArgument("-p","0") will result in the argument being added as * -p 0
* Multiple calls to addArgument results in the arguments being separated by space. * @param argkey String * @param argvalue String * @return AbstractJob */ public AbstractJob addArgument(String argkey, String argvalue) { this.addArgument(argkey, argvalue, ARG_DELIMITER); return this; } /** * Add a argument key and value to the argument List.
* The argkey and argvalue are seperated by argdelimiter.
* Example addArgument("-p","0","=") will result in the argument being added as * -p=0
* Multiple calls to addArgument results in the arguments being separated by space. * @param argkey String Key * @param argvalue String Value * @param argdelimiter String argdelimiter * @return AbstractJob * */ public AbstractJob addArgument(String argkey, String argvalue, String argdelimiter) { argdelimiter = (argdelimiter == null) ? ARG_DELIMITER : argdelimiter; if (argkey != null && argvalue != null) { this.addArgument(argkey + argdelimiter + argvalue); } return this; } /** * Add a argument key and File value to the argument List.
* The argkey and argvalue are seperated by space.
* Example addArgument("-i",new File("f.a")) will result in the argument being added as * -i <file name="f.a">
* Multiple calls to addArgument results in the arguments being separated by space. * @param argkey String * @param argvalue File * @return AbstractJob */ public AbstractJob addArgument(String argkey, File argvalue) { this.addArgument(argkey, argvalue, ARG_DELIMITER); return this; } /** * Add a argument key and File value to the argument List.
* The argkey and argvalue are separated by the argdelimiter.
* Example addArgument("-i",new File("f.a"),"=") will result in the argument being added as * -i=<file name="f.a">
* Multiple calls to addArgument results in the arguments being separated by space. * @param argkey String * @param argvalue File * @param argdelimiter * @return AbstractJob */ public AbstractJob addArgument(String argkey, File argvalue, String argdelimiter) { argdelimiter = (argdelimiter == null) ? ARG_DELIMITER : argdelimiter; if (argkey != null && argvalue != null) { this.addArgument(argkey + argdelimiter); mArguments.add(argvalue); } return this; } /** * Add a argument key and an array of Files to the argument List.
* The argkey and argvalue are separated space.
* The files are separated by a space
* Example:
* File[] files = {new File("f.a1"), new File("f.a2")};
* job.addArgument("-i",files)

* will result in the argument being added as * -i <file name="f.a1"> <file name="f.a2">
* Multiple calls to addArgument results in the arguments being separated by space. * @param argkey String * @param argvalue File[] * @return AbstractJob */ public AbstractJob addArgument(String argkey, File[] argvalue) { this.addArgument(argkey, argvalue, ARG_DELIMITER, FILE_DELIMITER); return this; } /** * Add a argument key and a List of Files to the argument List.
* The argkey and argvalue are separated space.
* The files are separated by a space
* Example:
* List files = new LinkedList();
* files.add(new File("f.a1"));
* files.add(new File("f.a2"));
* job.addArgument("-i",files)

* will result in the argument being added as * -i <file name="f.a1"> <file name="f.a2">
* Multiple calls to addArgument results in the arguments being separated by space. * @param argkey String * @param argvalue List * @return AbstractJob */ public AbstractJob addArgument(String argkey, List argvalue) { this.addArgument(argkey, argvalue, ARG_DELIMITER, FILE_DELIMITER); return this; } /** * Add a argument key and an array of Files to the argument List.
* The argkey and argvalue are separated by the argdelimiter.
* The files are separated by a filedelimiter
* Example:
* File[] files = {new File("f.a1"), new File("f.a2")};
* job.addArgument("-i",files,"=",",")

* will result in the argument being added as * -i=<file name="f.a1">,<file name="f.a2">
* Multiple calls to addArgument results in the arguments being separated by space. * @param argkey String * @param argvalue File[] * @param argdelimiter String * @param filedelimiter String * @return AbstractJob */ public AbstractJob addArgument(String argkey, File[] argvalue, String argdelimiter, String filedelimiter) { argdelimiter = (argdelimiter == null) ? ARG_DELIMITER : argdelimiter; filedelimiter = (filedelimiter == null) ? FILE_DELIMITER : filedelimiter; if (argkey != null && argvalue != null && argvalue.length > 0) { this.addArgument(argkey + argdelimiter); boolean first = true; for (File f : argvalue) { if (!first) { mArguments.add(filedelimiter); } mArguments.add(f); first = false; } } return this; } /** * Add a argument key and a List of Files to the argument List.
* The argkey and argvalue are separated by the argdelimiter.
* The files are separated by a filedelimter
* Example:
* List files = new LinkedList();
* files.add(new File("f.a1"));
* files.add(new File("f.a2"));
* job.addArgument("-i",files,"=",",")

* will result in the argument being added as * -i=<file name="f.a1">,<file name="f.a2">
* Multiple calls to addArgument results in the arguments being separated by space. * @param argkey String * @param argvalue List<File> List of File objects * @param argdelimiter String * @param filedelimiter String * @return AbstractJob */ public AbstractJob addArgument(String argkey, List argvalue, String argdelimiter, String filedelimiter) { if (argkey != null && argvalue != null && !argvalue.isEmpty()) { this.addArgument(argkey, (File[]) argvalue.toArray(), argdelimiter, filedelimiter); } return this; } /** * Add a profile to the job * @param namespace String * @param key String * @param value String * @return AbstractJob */ public AbstractJob addProfile(String namespace, String key, String value) { mProfiles.add(new Profile(namespace, key, value)); return this; } /** * Add a profile to the job * @param namespace {@link Profile.NAMESPACE} * @param key String * @param value String * @return AbstractJob */ public AbstractJob addProfile(Profile.NAMESPACE namespace, String key, String value) { mProfiles.add(new Profile(namespace, key, value)); return this; } /** * Add a Profile object * @param profile * @return AbstractJob * @see Profile */ public AbstractJob addProfile(Profile profile) { mProfiles.add(new Profile(profile)); return this; } /** * Add a list of Profile objects * @param profiles List<Profile> * @return */ public AbstractJob addProfiles(List profiles) { mProfiles.addAll(Collections.unmodifiableCollection(profiles)); return this; } /** * Get the STDIN file object * @return File */ public File getStdin() { return mStdin; } /** * * @param stdin * @return AbstractJob */ public AbstractJob setStdin(File stdin) { File f = new File(stdin, File.LINK.INPUT); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdin * @param transfer * @return AbstractJob */ public AbstractJob setStdin(File stdin, File.TRANSFER transfer) { File f = new File(stdin, File.LINK.INPUT); f.setTransfer(transfer); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdin * @param register * @return AbstractJob */ public AbstractJob setStdin(File stdin, boolean register) { File f = new File(stdin, File.LINK.INPUT); f.setRegister(register); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdin * @param transfer * @param register * @return AbstractJob */ public AbstractJob setStdin(File stdin, File.TRANSFER transfer, boolean register) { File f = new File(stdin, File.LINK.INPUT); f.setTransfer(transfer); f.setRegister(register); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdin * @param transfer * @param register * @return AbstractJob */ public AbstractJob setStdin(File stdin, File.TRANSFER transfer, boolean register, boolean optional) { File f = new File(stdin, File.LINK.INPUT); f.setTransfer(transfer); f.setRegister(register); f.setOptional(optional); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdin * @return AbstractJob */ public AbstractJob setStdin(String stdin) { File f = new File(stdin, File.LINK.INPUT); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdin * @param transfer * @return AbstractJob */ public AbstractJob setStdin(String stdin, File.TRANSFER transfer) { File f = new File(stdin, File.LINK.INPUT); f.setTransfer(transfer); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdin * @param register * @return AbstractJob */ public AbstractJob setStdin(String stdin, boolean register) { File f = new File(stdin, File.LINK.INPUT); f.setRegister(register); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdin * @param transfer * @param register * @return AbstractJob */ public AbstractJob setStdin(String stdin, File.TRANSFER transfer, boolean register) { File f = new File(stdin, File.LINK.INPUT); f.setTransfer(transfer); f.setRegister(register); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdin * @param transfer * @param register * @param optional * @return AbstractJob */ public AbstractJob setStdin(String stdin, File.TRANSFER transfer, boolean register, boolean optional) { File f = new File(stdin, File.LINK.INPUT); f.setTransfer(transfer); f.setRegister(register); f.setOptional(optional); mStdin = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @return File */ public File getStdout() { return mStdout; } /** * * @param stdout * @return AbstractJob */ public AbstractJob setStdout(File stdout) { File f = new File(stdout, File.LINK.OUTPUT); mStdout = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdout * @param transfer * @return AbstractJob */ public AbstractJob setStdout(File stdout, File.TRANSFER transfer) { File f = new File(stdout, File.LINK.OUTPUT); f.setTransfer(transfer); mStdout = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdout * @param register * @return AbstractJob */ public AbstractJob setStdout(File stdout, boolean register) { File f = new File(stdout, File.LINK.OUTPUT); f.setRegister(register); mStdout = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdout * @param transfer * @param register * @return AbstractJob */ public AbstractJob setStdout(File stdout, File.TRANSFER transfer, boolean register) { File f = new File(stdout, File.LINK.OUTPUT); f.setTransfer(transfer); f.setRegister(register); mStdout = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdout * @param transfer * @param register * @param optional * @return AbstractJob */ public AbstractJob setStdout(File stdout, File.TRANSFER transfer, boolean register, boolean optional) { File f = new File(stdout, File.LINK.OUTPUT); f.setTransfer(transfer); f.setRegister(register); f.setOptional(optional); mStdout = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdout * @return AbstractJob */ public AbstractJob setStdout(String stdout) { File f = new File(stdout, File.LINK.OUTPUT); mStdout = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdout * @param transfer * @return AbstractJob */ public AbstractJob setStdout(String stdout, File.TRANSFER transfer) { File f = new File(stdout, File.LINK.OUTPUT); f.setTransfer(transfer); mStdout = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdout * @param register * @return AbstractJob */ public AbstractJob setStdout(String stdout, boolean register) { File f = new File(stdout, File.LINK.OUTPUT); f.setRegister(register); mStdout = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stdout * @param transfer * @param register * @return AbstractJob */ public AbstractJob setStdout(String stdout, File.TRANSFER transfer, boolean register) { File f = new File(stdout, File.LINK.OUTPUT); f.setTransfer(transfer); f.setRegister(register); mStdout = f; mUses.add(f); return this; } /** * * @param stdout * @param transfer * @param register * @param optional * @return AbstractJob */ public AbstractJob setStdout(String stdout, File.TRANSFER transfer, boolean register, boolean optional) { File f = new File(stdout, File.LINK.OUTPUT); f.setTransfer(transfer); f.setRegister(register); f.setOptional(optional); mStdout = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @return File */ public File getStderr() { return mStderr; } /** * * @param stderr * @return AbstractJob */ public AbstractJob setStderr(File stderr) { File f = new File(stderr, File.LINK.OUTPUT); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stderr * @param transfer * @return AbstractJob */ public AbstractJob setStderr(File stderr, File.TRANSFER transfer) { File f = new File(stderr, File.LINK.OUTPUT); f.setTransfer(transfer); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stderr * @param register * @return AbstractJob */ public AbstractJob setStderr(File stderr, boolean register) { File f = new File(stderr, File.LINK.OUTPUT); f.setRegister(register); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stderr * @param transfer * @param register * @return AbstractJob */ public AbstractJob setStderr(File stderr, File.TRANSFER transfer, boolean register) { File f = new File(stderr, File.LINK.OUTPUT); f.setTransfer(transfer); f.setRegister(register); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stderr * @param transfer * @param register * @param optional * @return AbstractJob */ public AbstractJob setStderr(File stderr, File.TRANSFER transfer, boolean register, boolean optional) { File f = new File(stderr, File.LINK.OUTPUT); f.setTransfer(transfer); f.setRegister(register); f.setOptional(optional); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stderr * @return AbstractJob */ public AbstractJob setStderr(String stderr) { File f = new File(stderr, File.LINK.OUTPUT); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stderr * @param transfer * @return AbstractJob */ public AbstractJob setStderr(String stderr, File.TRANSFER transfer) { File f = new File(stderr, File.LINK.OUTPUT); f.setTransfer(transfer); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stderr * @param register * @return AbstractJob */ public AbstractJob setStderr(String stderr, boolean register) { File f = new File(stderr, File.LINK.OUTPUT); f.setRegister(register); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stderr * @param transfer * @param register * @return AbstractJob */ public AbstractJob setStderr(String stderr, File.TRANSFER transfer, boolean register) { File f = new File(stderr, File.LINK.OUTPUT); f.setTransfer(transfer); f.setRegister(register); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @param stderr * @param transfer * @param register * @param optional * @return AbstractJob */ public AbstractJob setStderr(String stderr, File.TRANSFER transfer, boolean register, boolean optional) { File f = new File(stderr, File.LINK.OUTPUT); f.setTransfer(transfer); f.setRegister(register); f.setOptional(optional); mStderr = f; if (!mUses.contains(f)) { mUses.add(f); } return this; } /** * * @return Set */ public Set getUses() { return Collections.unmodifiableSet(mUses); } /** * * * * * @param file * @param link * @return AbstractJob */ public AbstractJob uses(String file, File.LINK link) { File f = new File(file, link); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator. combine(f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param file * @param link * @param register * @return AbstractJob */ public AbstractJob uses(String file, File.LINK link, boolean register) { File f = new File(file, link); f.setRegister(register); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator. combine(f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param file * @param link * @param transfer * @return AbstractJob */ public AbstractJob uses(String file, File.LINK link, File.TRANSFER transfer) { File f = new File(file, link); f.setTransfer(transfer); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator. combine(f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param file * @param link * @param transfer * @param register * @return AbstractJob */ public AbstractJob uses(String file, File.LINK link, File.TRANSFER transfer, boolean register) { File f = new File(file, link); f.setRegister(register); f.setTransfer(transfer); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator. combine(f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param file * @param link * @param transfer * @param register * @param optional * @param executable * @return AbstractJob */ public AbstractJob uses(String file, File.LINK link, File.TRANSFER transfer, boolean register, boolean optional, boolean executable) { File f = new File(file, link); f.setRegister(register); f.setOptional(optional); f.setTransfer(transfer); f.setExecutable(executable); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator.combine( f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param file * @param link * @return AbstractJob */ public AbstractJob uses(File file, File.LINK link) { File f = new File(file, link); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator.combine( f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param file * @param link * @param transfer * @return AbstractJob */ public AbstractJob uses(File file, File.LINK link, File.TRANSFER transfer) { File f = new File(file, link); f.setTransfer(transfer); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator.combine( f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param file * @param link * @param register * @return AbstractJob */ public AbstractJob uses(File file, File.LINK link, boolean register) { File f = new File(file, link); f.setRegister(register); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator.combine(f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param file * @param link * @param transfer * @param register * @return AbstractJob */ public AbstractJob uses(File file, File.LINK link, File.TRANSFER transfer, boolean register) { File f = new File(file, link); f.setTransfer(transfer); f.setRegister(register); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator.combine( f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param file * @param link * @param transfer * @param register * @param optional * @param executable * @return AbstractJob */ public AbstractJob uses(File file, File.LINK link, File.TRANSFER transfer, boolean register, boolean optional, boolean executable) { File f = new File(file, link); f.setTransfer(transfer); f.setRegister(register); f.setOptional(optional); f.setExecutable(executable); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator.combine( f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } return this; } /** * * @param files * @param link * @return AbstractJob */ public AbstractJob uses(List files, File.LINK link) { for (File file : files) { File f = new File(file, link); if (!mUses.contains(f)) { mUses.add(f); } else { mLogger.log("Job " + Separator.combine(mNamespace, mName, mVersion) + "already contains a file " + Separator.combine( f.mNamespace, f.mName, f.mVersion) + ". Ignoring", LogManager.WARNING_MESSAGE_LEVEL); } } return this; } /** * * @return List */ public List getInvoke() { return Collections.unmodifiableList(mInvokes); } /** * Add Notification to the job * @param when * @param what * @return AbstractJob */ public AbstractJob addInvoke(Invoke.WHEN when, String what) { Invoke i = new Invoke(when, what); mInvokes.add(i); return this; } /** * Add Notification to the job * @param when * @param what * @return AbstractJob */ public AbstractJob addNotification(Invoke.WHEN when, String what) { return addInvoke(when,what); } /** * Add notification to the job * @param invoke * @return AbstractJob */ public AbstractJob addInvoke(Invoke invoke) { mInvokes.add(invoke.clone()); return this; } /** * Add notification to the job * @param invoke * @return AbstractJob */ public AbstractJob addNotification(Invoke invoke) { return addInvoke(invoke); } /** * Add Notifications to the job * @param invokes * @return AbstractJob */ public AbstractJob addInvokes(List invokes) { for (Invoke invoke : invokes) { this.addInvoke(invoke); } return this; } /** * Add Notifications to the job * @param invokes * @return AbstractJob */ public AbstractJob addNotifications(List invokes) { return addInvokes(invokes); } /** * Is this Object a Job * @return */ public boolean isJob() { return false; } /** * Is this Object a DAX * @return */ public boolean isDAX() { return false; } /** * Is this Object a DAG * @return */ public boolean isDAG() { return false; } /** * * @return String */ public String getName() { return mName; } /** * * @return String */ public String getId() { return mId; } /** * * @return String */ public String getNodeLabel() { return mNodeLabel; } /** * * @param label */ public void setNodeLabel(String label) { this.mNodeLabel = label; } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final AbstractJob other = (AbstractJob) obj; if ((this.mId == null) ? (other.mId != null) : !this.mId.equals( other.mId)) { return false; } return true; } @Override public int hashCode() { int hash = 7; hash = 29 * hash + (this.mId != null ? this.mId.hashCode() : 0); return hash; } /** * * @param writer */ public void toXML(XMLWriter writer) { toXML(writer, 0); } /** * * @param writer * @param indent */ public void toXML(XMLWriter writer, int indent) { //Check if its a dax, dag or job class if (mNodeLabel != null && !mNodeLabel.isEmpty()) { writer.writeAttribute("node-label", mNodeLabel); } //add argument if (!mArguments.isEmpty()) { writer.startElement("argument", indent + 1); for (Object o : mArguments) { if (o.getClass() == String.class) { //if class is string add argument string in the data section writer.writeData( (String) o); } if (o.getClass() == File.class) { //add file tags in the argument elements data section ((File) o).toXML(writer, 0, "argument"); } } writer.endElement(); } //add profiles for (Profile p : mProfiles) { p.toXML(writer, indent + 1); } //add stdin if (mStdin != null) { mStdin.toXML(writer, indent + 1, "stdin"); } //add stdout if (mStdout != null) { mStdout.toXML(writer, indent + 1, "stdout"); } //add stderr if (mStderr != null) { mStderr.toXML(writer, indent + 1, "stderr"); } //add uses for (File f : mUses) { f.toXML(writer, indent + 1, "uses"); } //add invoke for (Invoke i : mInvokes) { i.toXML(writer, indent + 1); } if (!(mUses.isEmpty() && mInvokes.isEmpty() && mStderr == null && mStdout == null && mStdin == null && mProfiles. isEmpty() && mArguments.isEmpty())) { writer.endElement(indent); } else { writer.endElement(); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/CatalogType.java0000644000175000017500000001501111757531137025123 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import java.util.List; import java.util.LinkedList; import java.util.Collections; import edu.isi.pegasus.common.util.XMLWriter; /** * Abstract Type for RC and TC Sections of the DAX. Extended by {@link Executable} and {@link File} * @author gmehta * @version $Revision: 3618 $ * @see Executable * @see File */ public class CatalogType { protected List mProfiles; protected List mMetadata; protected List mPFNs; protected LogManager mLogger; protected CatalogType() { mProfiles = new LinkedList(); mMetadata = new LinkedList(); mPFNs = new LinkedList(); mLogger = LogManagerFactory.loadSingletonInstance(); } /** Copy Constructor * * @param c */ protected CatalogType(CatalogType c){ this.mProfiles = new LinkedList(c.mProfiles); this.mMetadata = new LinkedList(c.mMetadata); this.mPFNs = new LinkedList(c.mPFNs); this.mLogger = c.mLogger; } /** * Add a pfn url to the Catalog * @param url * @return CatalogType */ public CatalogType addPhysicalFile(String url) { PFN p = new PFN(url); mPFNs.add(p); return this; } /** * Add a PFN url and a site id to the Catalog * @param url * @param site * @return CatalogType */ public CatalogType addPhysicalFile(String url, String site) { PFN p = new PFN(url, site); mPFNs.add(p); return this; } /** * Add a PFN object to the Catalog * @param pfn * @return CatalogType * @see PFN */ public CatalogType addPhysicalFile(PFN pfn) { mPFNs.add(pfn); return this; } /** * Add a list of PFN objects to the Catalog * @param pfns * @return CatalogType * @see PFN */ public CatalogType addPhysicalFiles(List pfns) { mPFNs.addAll(pfns); return this; } /** * Returns a List of PFN objects associated with this Catalog entry * @return List * @see PFN */ public List getPhysicalFiles() { return Collections.unmodifiableList(mPFNs); } /** * Add a Metadata entry for the Catalog object * @param type String type of metadata * @param key String key for the metadata entry * @param value String value for the metadata entry * @return CatalogType * */ public CatalogType addMetaData(String type, String key, String value) { MetaData m = new MetaData(type, key, value); mMetadata.add(m); return this; } /** * Add a {@link MetaData} object for the Catalog object * @param metadata * @return CatalogType * @see MetaData */ public CatalogType addMetaData(MetaData metadata) { mMetadata.add(metadata); return this; } /** * Add a List of {@link MetaData} objects to the Catalog entry object * @param metadata * @return CatalogType * @see MetaData */ public CatalogType addMetaData(List metadata) { mMetadata.addAll(metadata); return this; } /** * Returns the List of MetaData objects associated with this Catalog entry object * @return List * @see MetaData */ public List getMetaData() { return Collections.unmodifiableList(mMetadata); } /** * Add a profile to the catalog entry * @param namespace String Namespace of the profile. See {@link Profile.NAMESPACE} for a list of valid namespaces * @param key String Key of the profile * @param value String Value of the profile * @return CatalogType * @see Profile.NAMESPACE */ public CatalogType addProfile(String namespace, String key, String value) { mProfiles.add(new Profile(namespace, key, value)); return this; } /** * Add a profile to the catalog entry * @param namespace {@link Profile.NAMESPACE} Namespace of the profile * @param key String Key of the profile * @param value String Value of the profile * @return CatalogType * @see Profile.NAMESPACE */ public CatalogType addProfile(Profile.NAMESPACE namespace, String key, String value) { mProfiles.add(new Profile(namespace, key, value)); return this; } /** * Add a List of profile objects to this Catalog entry * @param profiles List of Profile objects * @return CatalogType * @see Profile */ public CatalogType addProfiles(List profiles) { mProfiles.addAll(profiles); return this; } /** * Add a Profile object to this Catalog entry * @param profile * @return CatalogType * @see Profile */ public CatalogType addProfiles(Profile profile) { mProfiles.add(profile); return this; } /** * Return the List of {@link Profile} objects associated with this Catalog entry * @return List * @see Profile */ public List getProfiles() { return Collections.unmodifiableList(mProfiles); } /** * Write the XML representation of this object * @param writer * @see XMLWriter */ public void toXML(XMLWriter writer) { toXML(writer, 0); } /** * Write the XML representation of this object * @param writer * @param indent * @see XMLwriter */ public void toXML(XMLWriter writer, int indent) { for (Profile p : mProfiles) { p.toXML(writer, indent + 1); } for (MetaData m : mMetadata) { m.toXML(writer, indent + 1); } for (PFN f : mPFNs) { f.toXML(writer, indent + 1); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/File.java0000644000175000017500000002470311757531137023576 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.XMLWriter; import edu.isi.pegasus.common.util.Separator; /** * This class is the container for any File object, either the RC section, or uses * @author gmehta * @version $Revision: 3201 $ */ public class File extends CatalogType { /** * The linkages that a file can be of */ public static enum LINK { INPUT, input, OUTPUT, output, INOUT, inout }; /** * Three Transfer modes supported, Transfer this file, don't transfer or stageout as well as optional. Dont mark transfer or absence as a failure */ public static enum TRANSFER { TRUE, FALSE, OPTIONAL } /** * The namespace on a file. This is used for Executables only */ protected String mNamespace; /** * The logical name of the file. */ protected String mName; /** * The logical version of the file. This is used for executables only. */ protected String mVersion; /* * The Linkage of the file. (Input, Output, or INOUT) */ protected LINK mLink; /** * Is the file optional */ protected boolean mOptional = false; /** * Should the file be registered in the replica catalog */ protected boolean mRegister = true; /** * Should the file be transferred on generation. */ protected TRANSFER mTransfer = TRANSFER.TRUE; /** * Is the file an executable. */ protected boolean mExecutable = false; /** * Copy constructor * @param f File */ public File(File f) { this(f.getNamespace(), f.getName(), f.getVersion(), f.getLink()); this.mOptional = f.getOptional(); this.mRegister = f.getRegister(); this.mTransfer = f.getTransfer(); this.mExecutable = f.getExecutable(); } /** * Copy constructor, but change the linkage of the file. * @param f File * @param link Link */ public File(File f, LINK link) { this(f.getNamespace(), f.getName(), f.getVersion(), link); this.mOptional = f.getOptional(); this.mRegister = f.getRegister(); this.mTransfer = f.getTransfer(); this.mExecutable = f.getExecutable(); } /** * Create new File object * @param namespace * @param name * @param version */ public File(String namespace, String name, String version) { mNamespace = namespace; mName = name; mVersion = version; } /** * Create new file object * @param name The name of the file */ public File(String name) { mName = name; } /** * Create new file object * @param name The name of the file * @param link The linkage of the file */ public File(String name, LINK link) { mName = name; mLink = link; } /** * Create a new file object * @param namespace The namespace of the file * @param name The name of the file * @param version The version of the file * @param link The linkage of the file. */ public File(String namespace, String name, String version, LINK link) { mNamespace = namespace; mName = name; mVersion = version; mLink = link; } /** * Get the name of the file * @return */ public String getName() { return mName; } /** * Get the namespace of the file * @return */ public String getNamespace() { return mNamespace; } /** * Get the version of the file * @return */ public String getVersion() { return mVersion; } /** * Get the linkage of the file. * @return */ public LINK getLink() { return mLink; } /** * Set the file linkage * @param link * @return * @see LINK */ public File setLink(LINK link) { mLink = link; return this; } /** * Set the optional flag on the file. Default is false * @param optionalflag * @return */ public File setOptional(boolean optionalflag) { mOptional = optionalflag; return this; } /** * Check the optional flag of the file * @return */ public boolean getOptional() { return mOptional; } /** * Set the register flag of the file. Default is true * @param registerflag * @return */ public File setRegister(boolean registerflag) { mRegister = registerflag; return this; } /** * Get the register flag of this file. * @return */ public boolean getRegister() { return mRegister; } /** * Set the transfer type of the file * @param transferflag * @return * @see TRANSFER */ public File setTransfer(TRANSFER transferflag) { mTransfer = transferflag; return this; } /** * Get the transfer type of the file * @return */ public TRANSFER getTransfer() { return mTransfer; } /** * Mark the file as executable. Default is false * @param executable * @return */ public File setExecutable(boolean executable) { mExecutable = executable; return this; } /** * Mart the file as executable. Default is false * @return */ public File setExecutable() { mExecutable = true; return this; } /** * Use setExecutable instead. * @deprecated * @return */ public File SetExecutable() { mExecutable = true; return this; } /** * Check if the file is an executable * @return */ public boolean getExecutable() { return mExecutable; } /** * Check if this File is equal to Object o * @param o * @return */ public boolean equals(Object o) { if (o instanceof File) { File f = (File) o; return Separator.combine(mNamespace, mName, mVersion).equalsIgnoreCase(Separator.combine(f.mNamespace, f.mName, f.mVersion)); } return false; } /** * HashCode of this File * @return */ public int hashCode() { return Separator.combine(mNamespace, mName, mVersion).hashCode(); } /** * Return a clone of this File * @return */ public File clone() { File f = new File(mNamespace, mName, mVersion, mLink); this.mOptional = f.getOptional(); this.mRegister = f.getRegister(); this.mTransfer = f.getTransfer(); this.mExecutable = f.getExecutable(); return f; } /** * Write the file object * @param writer */ public void toXML(XMLWriter writer) { toXML(writer, 0, "file"); } /** * Write the file object, with indent level N * @param writer * @param indent */ public void toXML(XMLWriter writer, int indent) { toXML(writer, indent, "file"); } /** * Write the file object as XML but render it as the elementname * @param writer * @param indent * @param elementname */ public void toXML(XMLWriter writer, int indent, String elementname) { if (elementname.equalsIgnoreCase("stdin")) { //used in job element writer.startElement("stdin", indent); writer.writeAttribute("name", mName); writer.endElement(); } else if (elementname.equalsIgnoreCase("stdout")) { //used in job element writer.startElement("stdout", indent); writer.writeAttribute("name", mName); writer.endElement(); } else if (elementname.equalsIgnoreCase("stderr")) { //used in job element writer.startElement("stderr", indent); writer.writeAttribute("name", mName); writer.endElement(); } else if (elementname.equalsIgnoreCase("argument")) { //used in job's argument element writer.startElement("file", indent); writer.writeAttribute("name", mName); writer.noLine(); writer.endElement(); } else if (elementname.equalsIgnoreCase("uses")) { // used by job, dax, dag and transformation elements writer.startElement("uses", indent); if (mNamespace != null && !mNamespace.isEmpty()) { writer.writeAttribute("namespace", mNamespace); } writer.writeAttribute("name", mName); if (mVersion != null && !mVersion.isEmpty()) { writer.writeAttribute("version", mVersion); } if (mLink != null) { writer.writeAttribute("link", mLink.toString().toLowerCase()); } if (mOptional) { writer.writeAttribute("optional", "true"); } writer.writeAttribute("transfer", mTransfer.toString().toLowerCase()); writer.writeAttribute("register", Boolean.toString(mRegister)); if (mExecutable) { writer.writeAttribute("executable", "true"); } writer.endElement(); } else if (elementname.equalsIgnoreCase("file")) { //Used by the file element at the top of the dax if (mPFNs.isEmpty() && mMetadata.isEmpty()) { mLogger.log("The file element for " + mName + " must have atleast 1 pfn or 1 metadata entry. Skipping empty file element", LogManager.WARNING_MESSAGE_LEVEL); } else { writer.startElement("file", indent); writer.writeAttribute("name", mName); //call CatalogType's writer method to generate the profile, metadata and pfn elements super.toXML(writer, indent); writer.endElement(indent); } } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/Parent.java0000644000175000017500000000554511757531137024153 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.util.XMLWriter; /** * * @author gmehta * @version $Revision: 3009 $ */ public class Parent { /** * The name of the parent */ private String mName; /** * The edge label for the parent child relationship. Optional. */ private String mLabel; /** * * @param name */ public Parent(String name) { mName = name; } public Parent(Parent p) { this(p.getName(), p.getLabel()); } /** * * @param name * @param label */ public Parent(String name, String label) { mName = name; mLabel = label; } /** * @return the name of the parent */ public String getName() { return mName; } /** * @param name the name of the parent to set */ public void setName(String name) { mName = name; } /** * @return the label */ public String getLabel() { return mLabel; } /** * @param label the label to set */ public void setLabel(String label) { mLabel = label; } public Parent clone() { return new Parent(this.mName, this.mLabel); } @Override public int hashCode() { int hashcode; if (mLabel == null) { hashcode = 0; } else { hashcode = mLabel.hashCode(); } return 31 * mName.hashCode() + hashcode; } @Override public boolean equals(Object o) { if (!(o instanceof Parent)) { return false; } if (this == o) { return true; } return mName.equals(((Parent) o).getName()) && mLabel.equals(((Parent) o).getLabel()); } @Override public String toString() { return "(" + mName + ", " + mLabel == null ? "" : mLabel + ')'; } public void toXML(XMLWriter writer) { toXML(writer, 0); } public void toXML(XMLWriter writer, int indent) { writer.startElement("parent", indent); writer.writeAttribute("ref", mName); if (mLabel != null && !mLabel.isEmpty()) { writer.writeAttribute("edge-label", mLabel); } writer.endElement(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/Profile.java0000644000175000017500000000621311757531137024313 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.util.XMLWriter; /** * Profile Object for the DAX API * @author gmehta * @version $Revision: 3201 $ */ public class Profile { /** * Supported NAMESPACES. */ public static enum NAMESPACE { CONDOR, condor, PEGASUS, pegasus, DAGMAN, dagman, GLOBUS, globus, HINTS, hints, SELECTOR, selector, STAT, stat, ENV, env } /** * Namespace of the profile */ protected String mNamespace; /** * Key of the profile */ protected String mKey; /** * Value of the profile */ protected String mValue; /** * Create a new Profile object * @param namespace * @param key */ public Profile(String namespace, String key) { mNamespace = namespace; mKey = key; } /** * Create a new Profile object * @param namespace * @param key * @param value */ public Profile(String namespace, String key, String value) { mNamespace = namespace; mKey = key; mValue = value; } /** * * @param namespace * @param key * @param value */ public Profile(NAMESPACE namespace, String key, String value) { mNamespace = namespace.toString(); mKey = key; mValue = value; } /** * Copy constructor * @param p */ public Profile(Profile p) { this(p.getNameSpace(), p.getKey(), p.getValue()); } /** * Get the key of this Profile * @return */ public String getKey() { return mKey; } /** * Get the namespace of this profile * @return */ public String getNameSpace() { return mNamespace; } /** * Get the value of this profile * @return */ public String getValue() { return mValue; } /** * Set the value of this Profile * @param value * @return */ public Profile setValue(String value) { mValue = value; return this; } /** * Create a copy of this Profile * @return */ @Override public Profile clone() { return new Profile(this.mNamespace, this.mKey, this.mValue); } public void toXML(XMLWriter writer) { toXML(writer, 0); } public void toXML(XMLWriter writer, int indent) { writer.startElement("profile", indent).writeAttribute("namespace", mNamespace.toLowerCase()); writer.writeAttribute("key", mKey).writeData(mValue).endElement(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/DAG.java0000644000175000017500000000416311757531137023310 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.util.XMLWriter; /** * DAG Class to hold the DAG job object. * * @see AbstractJob * @author Gaurang Mehta gmehta at isi dot edu * @version $Revision: 4135 $ */ public class DAG extends AbstractJob { /** * Create a DAG object * @param id The unique id of the DAG job object. Must be of type [A-Za-z][-A-Za-z0-9_]* * @param dagname The dag file to submit */ public DAG(String id, String dagname) { this(id, dagname, null); } /** * Copy Constructor * @param dag */ public DAG(DAG dag) { super(dag); } /** * Create a DAG object * @param id The unique id of the DAG job object. Must be of type [A-Za-z][-A-Za-z0-9_]* * @param dagname The dag file to submit * @param label The label for this job. */ public DAG(String id, String dagname, String label) { super(); checkID(id); // to decide whether to exit. Currently just logging error and proceeding. mId = id; mName = dagname; mNodeLabel = label; } /** * Is this Object a DAG * @return */ public boolean isDAG() { return true; } public void toXML(XMLWriter writer, int indent) { writer.startElement( "dag", indent); writer.writeAttribute( "id", mId); writer.writeAttribute( "file", mName); super.toXML(writer, indent); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/ADAG.java0000644000175000017500000010275511757531137023417 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import java.util.List; import java.util.Map; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Set; import java.util.LinkedHashSet; import java.io.Writer; import java.io.FileWriter; import java.io.IOException; import java.io.BufferedWriter; import java.io.OutputStreamWriter; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.common.util.XMLWriter; import edu.isi.pegasus.planner.dax.Invoke.WHEN; /** *

 * This class provides the Java API to create DAX files.
 *
 * The DAX XML SCHEMA is available at http://pegasus.isi.edu/schema/dax-3.3.xsd
 * and documentation available at http://pegasus.isi.edu/wms/docs/schemas/dax-3.3/dax-3.3.html
 *
 * The DAX consists of 6 parts the first 4 are optional and the last is optional.
 * 
*
    *
  1. file:Used as "In DAX" Replica Catalog (Optional)

  2. *
  3. executable: Used as "In DAX" Transformation Catalog (Optional)

  4. *
  5. transformation: Used to describe compound executables. i.e. Executable depending on other executables (Optional)

  6. *
  7. job|dax|dag: Used to describe a single job or sub dax or sub dax. Atleast 1 required.

  8. *
  9. child: The dependency section to describe dependencies between job|dax|dag elements. (Optional)

  10. *
*
*
 * To generate an example DIAMOND DAX run the ADAG Class as shown below
 * java ADAG filename
 * NOTE: This is an illustrative example only. Please see examples directory for a working example
 *
 * Shown below are some of the steps in creating a  DIAMOND DAX.
 * 
*
    *
  1. * Create a new {@link ADAG} object

    * ADAG dax = new ADAG("test"); *

  2. *
  3. * Add notifications to the workflow

    * j3.addNotification(WHEN.start,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com");

    * j3.addNotification(WHEN.at_end,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com");
    *

  4. *
  5. * Create a {@link File} object
    * You only need to add entries to this section if you want to use an "IN-DAX" Replica Catalog"

    * File fa = new File("f.a"); *

  6. *
      *
    1. Add {@link MetaData} entry to the file objects

      * fa.addMetaData("string", "foo", "bar");
      * fa.addMetaData("int", "num", "1");
      *

    2. *
    3. Add {@link Profile} entry to the file objects

      * fa.addProfile("env", "FOO", "/usr/bar");
      * fa.addProfile("globus", "walltime", "40"); *

    4. *
    5. Add {@link PFN} to the File object

      * fa.addPhysicalFile("file:///scratch/f.a", "local"); *

    6. *
    7. Add the File object to the Replica Catalog section of the DAX

      * dax.addFile(fa); *

    8. *
    *
  7. Create an {@link Executable} object
    * You only need to add entries to this section if you want to use an "IN-DAX" Replica Catalog"

    * Executable preprocess = new Executable("pegasus", "preproces", "1.0");
    *
      *
    1. Set the {@link Executable.ARCH} and {@link Executable.OS} for the executable. Default is x86 and LINUX

      * preprocess.setArchitecture(Executable.ARCH.x86).setOS(Executable.OS.LINUX); *

    2. *
    3. Set the executable as available to be staged. Default is installed executable

      * preprocess.unsetInstalled(); *

    4. *
    5. Add the physical location {@link PFN} of the executable. In case of stageable executables the path should be a url

      * preprocess.addPhysicalFile(new PFN("file:///opt/pegasus/default/bin/keg")); *

    6. *
    7. Add {@link Profile} and {@link MetaData} objects to the executable

      * preprocess.addProfile(Profile.NAMESPACE.globus, "walltime", "120");
      * preprocess.addMetaData("string", "project", "pegasus"); *

    8. *
    9. Add the {@link Executable} object to the {@link ADAG} object

      * dax.addExecutable(preprocess); *
    10. *
    *

  8. *
  9. Create a {@link Transformation} object : compound Executable (Executable depending on other executable and files)

    * Transformation diamond = new Transformation("pegasus", "diamond", "1.0");
    *
      *
    1. Add the sub executable for this transformation

      * diamond.uses(preprocess).uses(findrange).uses(analyze);
      *

    2. *
    3. Add the sub files(e.g config files) for this transformation

      * diamond.uses(new File("config", File.LINK.INPUT));
      *

    4. *
    5. Finally Add the Transformation to the {@link ADAG} object

      * dax.addTransformation(diamond); *
    6. *
    *

  10. *
  11. Create a {@link Job} object

    * Job j1 = new Job("j1", "pegasus", "preprocess", "1.0", "j1");
    *
      *
    1. Add Arguments to the job object

      * j1.addArgument("-a","preprocess")
      * j1.addArgument("-T","60").addArgument("-i",fa);
      * j1.addArgument("-o").addArgument(fb1).addArgument(fb2);
      *

    2. *
    3. Add the Files that are used by this job

      * j1.uses(fa, File.LINK.INPUT);
      * j1.uses(fb1, File.LINK.OUTPUT);
      * j1.uses(new File("f.b2"), File.LINK.OUTPUT);
      *

    4. *
    5. Add the Notifications to this job

      j3.addNotification(WHEN.start,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com");
      * j3.addNotification(WHEN.at_end,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com"); * *

    6. *
    7. Add {@link Profile}s to the job

      * j1.addProfile(Profile.NAMESPACE.dagman, "pre", "20"); *

    8. *
    9. Add the Job object to {@link ADAG}

      * dax.addJob(j1); *
    10. *
    *

  12. *
  13. Add a {@link DAG} object

    * DAG j2 = new DAG("j2", "findrange.dag", "j2");
    * j2.uses(new File("f.b1"), File.LINK.INPUT);
    * j2.uses(new File("f.c1"), File.LINK.OUTPUT);
    * j2.addProfile(Profile.NAMESPACE.dagman, "pre", "20");
    * j2.addProfile("condor", "universe", "vanilla");
    * dax.addDAG(j2);
    *

  14. *
  15. Add a {@link DAX} job object.

    * DAX j3 = new DAX("j3", "findrange.dax", "j3");
    * j3.addArgument("--site").addArgument("local");
    * j3.uses(new File("f.b2"), File.LINK.INPUT);
    * j3.uses(new File("f.c2"), File.LINK.OUTPUT);
    * j3.addProfile("ENV", "HAHA", "YADAYADAYADA");
    * dax.addDAX(j3);
    *

  16. *
  17. Add the Job dependencies

    * Dependencies can be added by specifiying the job id's like so

    * dax.addDependency("j1", "j2", "1-2").addDependency("j1", "j3", "1-3");

    * or by specifying the job|dax|dag objects directly as below

    * dax.addDependency(j1,j3); *

  18. *
  19. Finally write the dax to a file

    * dax.writeToFile("diamond.dax"); *
  20. *
* * @author Gaurang Mehta gmehta at isi dot edu * @version $Revision: 4507 $ */ public class ADAG { /** * The "official" namespace URI of the site catalog schema. */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/DAX"; /** * XSI SCHEMA NAMESPACE */ public static final String SCHEMA_NAMESPACE_XSI = "http://www.w3.org/2001/XMLSchema-instance"; /** * The "not-so-official" location URL of the DAX schema definition. */ public static final String SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/dax-3.3.xsd"; /** * The version to report. */ public static final String SCHEMA_VERSION = "3.3"; /** * The Name / Label of the DAX */ private String mName; /** * The Index of the dax object. I out of N */ private int mIndex; /** * The Count of the number of dax objects : N */ private int mCount; /** * The List of Job,DAX and DAG objects * @see DAG * @see DAX * @see Job * @see AbstractJob */ private Map mJobs; /** * The List of Transformation objects * @see Transformation */ private Set mTransformations; /** * The list of Executable objects * @see Executable */ private Set mExecutables; /** * The list of edu.isi.pegasus.planner.dax.File objects * @see File */ private List mFiles; /** * Map of Dependencies between Job,DAX,DAG objects. * Map key is a string that holds the child element reference, the value is a List of Parent objects * @see Parent */ private Map> mDependencies; /** * List of Notification objects */ private List mInvokes; /** * Handle the XML writer */ private XMLWriter mWriter; private LogManager mLogger; /** * The Simple constructor for the DAX object * @param name DAX LABEL */ public ADAG(String name) { this(name, 0, 1); } /** * DAX Constructor * @param name DAX Label * @param index Index of DAX out of N DAX's * @param count Number of DAXS in a group */ public ADAG(String name, int index, int count) { //initialize everything mName = name; mIndex = index; mCount = count; mJobs = new LinkedHashMap(); mTransformations = new LinkedHashSet(); mExecutables = new LinkedHashSet(); mFiles = new LinkedList(); mInvokes = new LinkedList(); mDependencies = new LinkedHashMap>(); // PM-435 - commented this out for FHS work - do we need references to the bin/schema/...? // System.setProperty("pegasus.home", System.getProperty("user.dir")); mLogger = LogManagerFactory.loadSingletonInstance(); mLogger.logEventStart("event.dax.generate", "pegasus.version", Version. instance().toString()); } /** * Add a Notification for this Workflow * @param when * @param what * @return ADAG */ public ADAG addInvoke(Invoke.WHEN when, String what) { Invoke i = new Invoke(when, what); mInvokes.add(i); return this; } /** * Add a Notification for this Workflow * @param when * @param what * @return ADAG */ public ADAG addNotification(Invoke.WHEN when, String what) { return addInvoke(when, what); } /** * Add a Notification for this Workflow * @param invoke * @return ADAG */ public ADAG addInvoke(Invoke invoke) { mInvokes.add(invoke.clone()); return this; } /** * Add a Notification for this Workflow * @param invoke * @return ADAG */ public ADAG addNotification(Invoke invoke) { return addInvoke(invoke); } /** * Add a List of Notifications for this Workflow * @param invokes * @return ADAG */ public ADAG addInvokes(List invokes) { for (Invoke invoke : invokes) { this.addInvoke(invoke); } return this; } /** * Add a List of Notifications for this Workflow * @param invokes * @return ADAG */ public ADAG addNotifications(List invokes) { return addInvokes(invokes); } /** * Add a RC File object to the top of the DAX. * @param file File object to be added to the RC section * @return ADAG * @see File */ public ADAG addFile(File file) { mFiles.add(file); return this; } /** * Add Files to the RC Section on top of the DAX * @param files List List of file objects to be added to the RC Section * @return ADAG * @see File * */ public ADAG addFiles(List files) { mFiles.addAll(files); return this; } /** * Add Executable to the DAX * @param executable Executable to be added * @return ADAG * @see Executable */ public ADAG addExecutable(Executable executable) { if (!mExecutables.contains(executable)){ mExecutables.add(new Executable(executable)); } else { throw new RuntimeException("Error: Executable "+executable.toString()+" already exists in the DAX.\n"); } return this; } /** * Add Multiple Executable objects to the DAX * @param executables List of Executable objects to be added * @return ADAG * @see Executable */ public ADAG addExecutables(List executables) { for (Executable executable : executables) { addExecutable(executable); } return this; } /** * Checks if a given executable exists in the DAX based Transformation Catalog * @param executable * @return boolean */ public boolean containsExecutable(Executable executable){ return mExecutables.contains(executable); } /** * Add Transformation to the DAX * @param transformation Transformation object to be added * @return ADAG * @see Transformation */ public ADAG addTransformation(Transformation transformation) { if (!mTransformations.contains(transformation)){ mTransformations.add(new Transformation(transformation)); } else { throw new RuntimeException("Error: Transformation "+transformation.toString()+" already exists in the DAX.\n"); } return this; } /** * Add Multiple Transformation to the DAX * @param transformations List of Transformation objects * @return ADAG * @see Transformation */ public ADAG addTransformations(List transformations) { for (Transformation transformation: transformations){ addTransformation(transformation); } return this; } /** * Checks if a given Transformation exists in the DAX based Transformation Catalog * @param transformation Transformation * @return boolean */ public boolean containsTransformation(Transformation transformation){ return mTransformations.contains(transformation); } /** * Add AbstractJob to the DAX * @param ajob AbstractJob * @return ADAG * @see Job * @see DAG * @see DAX * @see AbstractJob */ private ADAG addAbstractJob(AbstractJob ajob) { if (!mJobs.containsKey(ajob.mId)) { mJobs.put(ajob.mId,ajob); } else { throw new RuntimeException("Job of type"+ajob.getClass().getSimpleName()+" with jobid "+ajob.mId+" already exists in the DAX"); } return this; } /** * Add AbstractJobs to the DAX * @param ajobs AbstractJob * @return ADAG * @see Job * @see DAG * @see DAX * @see AbstractJob */ private ADAG addAbstractJobs(List ajobs) { for (AbstractJob ajob: ajobs) { addAbstractJob(ajob); } return this; } /** * Returns an abstract Job with id ajobid if present otherwise null. * @param ajobid * @return */ private AbstractJob getAbstractJob(String ajobid) { if (ajobid != null) { AbstractJob j = mJobs.get(ajobid); if (j != null) { return j; } else { mLogger.log("No Job/DAX/DAG found with id " + ajobid, LogManager.ERROR_MESSAGE_LEVEL); } } return null; } /** * Check if an abstractjob exists in the DAX * @param ajob * @return */ private boolean containsAbstractJob(AbstractJob ajob){ return mJobs.containsKey(ajob.mId); } /** * Check if a jobid exists in the DAX * @param ajobid * @return */ private boolean containsAbstractJobId(String ajobid){ return mJobs.containsKey(ajobid); } /** * Add Job to the DAX * @param job * @return ADAG * @see Job * @see AbstractJob */ public ADAG addJob(Job job) { return addAbstractJob(job); } /** * Add multiple Jobs to the DAX * @param jobs * @return ADAG * @see Job * @see AbstractJob */ public ADAG addJobs(List jobs) { for(Job job: jobs){ addJob(job); } return this; } /** * Check if a job exists in the DAX * @param job * @return */ public boolean containsJob(Job job){ return containsAbstractJob(job); } /** * Check if a jobid exists in the DAX * @param jobid * @return */ public boolean containsJobId(String jobid){ return containsAbstractJobId(jobid); } /** * Returns a Job object with id jobid if present otherwise null. * @param jobid * @return */ public Job getJob(String jobid){ AbstractJob j = getAbstractJob(jobid); if (j!=null){ if (j.isJob()) { return (Job)j; } else { mLogger.log("Returned object is not of type Job, but "+j.getClass().getSimpleName(),LogManager.ERROR_MESSAGE_LEVEL); } } return null; } /** * Returns a DAX object with id daxid if present otherwise null. * @param daxid * @return */ public DAX getDAX(String daxid){ AbstractJob j = getAbstractJob(daxid); if (j!=null){ if (j.isDAX()) { return (DAX)j; } else { mLogger.log("Return object is not of type DAX, but "+j.getClass().getSimpleName(),LogManager.ERROR_MESSAGE_LEVEL); } } return null; } /** * Returns a DAG object with id dagid if present otherwise null. * @param dagid * @return */ public DAG getDAG(String dagid){ AbstractJob j = getAbstractJob(dagid); if (j!=null){ if (j.isDAG()) { return (DAG)j; } else { mLogger.log("Return object is not of type DAG, but "+j.getClass().getSimpleName(),LogManager.ERROR_MESSAGE_LEVEL); } } return null; } /** * Add a DAG job to the DAX * @param dag the DAG to be added * @return ADAG * @see DAG * @see AbstractJob */ public ADAG addDAG(DAG dag) { return addAbstractJob(dag); } /** * Add multiple DAG jobs to the DAX * @param dags List of DAG jobs to be added * @return ADAG * @see DAG * @see AbstractJob */ public ADAG addDAGs(List dags) { for(DAG dag: dags){ addDAG(dag); } return this; } /** * Check if a DAG job exists in the DAX * @param dag * @return */ public boolean containsDAG(DAG dag){ return containsAbstractJob(dag); } /** * Check if a DAG job id exists in the DAX * @param dagid * @return */ public boolean containsDAGId(String dagid){ return containsAbstractJobId(dagid); } /** * Add a DAX job to the DAX * @param dax DAX to be added * @return ADAG * @see DAX * @see AbstractJob */ public ADAG addDAX(DAX dax) { return addAbstractJob(dax); } /** * Add multiple DAX jobs to the DAX * @param daxs LIST of DAX jobs to be added * @return ADAG * @see DAX * @see AbstractJob */ public ADAG addDAXs(List daxs) { for(DAX dax: daxs){ addDAX(dax); } return this; } /** * Check if a DAX job exists in the DAX * @param dax * @return */ public boolean containsDAX(DAX dax){ return containsAbstractJob(dax); } /** * Check if a DAX job id exists in the DAX * @param daxid * @return */ public boolean containsDAXId(String daxid){ return containsAbstractJobId(daxid); } /** * Add a parent child dependency between two jobs,dax,dag * @param parent String job,dax,dag id * @param child String job,dax,dag,id * @return ADAG * */ public ADAG addDependency(String parent, String child) { addDependency(parent, child, null); return this; } /** * Add a parent child dependency between two jobs,dax,dag * @param parent Job|DAX|DAG object * @param child Job|DAX|DAG object * @return */ public ADAG addDependency(AbstractJob parent, AbstractJob child) { addDependency(parent.getId(), child.getId(), null); return this; } /** * Add a parent child dependency with a dependency label * @param parent String job,dax,dag id * @param child String job,dax,dag id * @param label String dependency label * @return ADAG */ public ADAG addDependency(String parent, String child, String label) { if(containsAbstractJobId(parent)&& containsAbstractJobId(child)){ List parents = mDependencies.get(child); if (parents == null) { parents = new LinkedList(); } Parent p = new Parent(parent, label); parents.add(p); mDependencies.put(child, parents); } else { throw new RuntimeException("Either Job with id "+parent+" or "+child+"is not added to the DAX.\n" + "Please add the jobs first to the dax and then add the dependencies between them\n"); } return this; } /** * Add a parent child dependency with a dependency label * @param parent Job|DAX|DAG object * @param child Job|DAX|DAG object * @param label String label for annotation * @return ADAG */ public ADAG addDependency(AbstractJob parent, AbstractJob child, String label) { addDependency(parent.getId(), child.getId(), label); return this; } /** * Generate a DAX File out of this object; * @param daxfile The file to write the DAX to */ public void writeToFile(String daxfile) { try { mWriter = new XMLWriter(new FileWriter(daxfile)); toXML(mWriter); mWriter.close(); } catch (IOException ioe) { System.err.println(ioe.getMessage()); } } /** * Generate a DAX representation on STDOUT. */ public void writeToSTDOUT() { mWriter = new XMLWriter(new BufferedWriter(new OutputStreamWriter( System.out))); toXML(mWriter); mWriter.close(); } /** * Generate a DAX representation and pipe it into the Writer * @param writer A Writer object * @param close Whether writer should be closed on return. */ public void writeToWriter(Writer writer, boolean close) { mWriter = new XMLWriter(writer); toXML(mWriter); if (close) { mWriter.close(); } } /** * Generates a DAX representation. * @param writer * @ */ public void toXML(XMLWriter writer) { int indent = 0; writer.startElement("adag"); writer.writeAttribute("xmlns", SCHEMA_NAMESPACE); writer.writeAttribute("xmlns:xsi", SCHEMA_NAMESPACE_XSI); writer.writeAttribute("xsi:schemaLocation", SCHEMA_NAMESPACE + " " + SCHEMA_LOCATION); writer.writeAttribute("version", SCHEMA_VERSION); writer.writeAttribute("name", mName); writer.writeAttribute("index", Integer.toString(mIndex)); writer.writeAttribute("count", Integer.toString(mCount)); //print notification invokes writer.writeXMLComment( "Section 1: Invokes - Adds notifications for a workflow (can be empty)", true); for (Invoke i : mInvokes) { i.toXML(writer, indent + 1); } //print file writer.writeXMLComment( "Section 2: Files - Acts as a Replica Catalog (can be empty)", true); for (File f : mFiles) { f.toXML(writer, indent + 1); } //print executable writer.writeXMLComment( "Section 3: Executables - Acts as a Transformaton Catalog (can be empty)", true); for (Executable e : mExecutables) { e.toXML(writer, indent + 1); } //print transformation writer.writeXMLComment( "Section 4: Transformations - Aggregates executables and Files (can be empty)", true); for (Transformation t : mTransformations) { t.toXML(writer, indent + 1); } //print jobs, daxes and dags writer.writeXMLComment( "Section 5: Job's, DAX's or Dag's - Defines a JOB or DAX or DAG (Atleast 1 required)", true); for (AbstractJob j : mJobs.values()) { j.toXML(writer, indent + 1); } //print dependencies writer.writeXMLComment( "Section 6: Dependencies - Parent Child relationships (can be empty)", true); for (String child : mDependencies.keySet()) { writer.startElement("child", indent + 1).writeAttribute("ref", child); for (Parent p : mDependencies.get(child)) { p.toXML(writer, indent + 2); } writer.endElement(indent + 1); } //end adag writer.endElement(); } /** * Create an example DIAMOND DAX * @param args */ public static void main(String[] args) { if (args.length == 0) { System.out.println("Usage: java ADAG "); System.exit(1); } Diamond().writeToFile(args[0]); } private static ADAG Diamond() { ADAG dax = new ADAG("test"); File fa = new File("f.a"); fa.addMetaData("string", "foo", "bar"); fa.addMetaData("int", "num", "1"); fa.addProfile("env", "FOO", "/usr/bar"); fa.addProfile("globus", "walltime", "40"); fa.addPhysicalFile("file:///scratch/f.a", "local"); dax.addFile(fa); File fb1 = new File("f.b1"); fb1.addMetaData("string", "foo", "bar"); fb1.addMetaData("int", "num", "2"); fb1.addProfile("env", "GOO", "/usr/foo"); fb1.addProfile("globus", "walltime", "40"); dax.addFile(fb1); File fb2 = new File("f.b2"); fb2.addMetaData("string", "foo", "bar"); fb2.addMetaData("int", "num", "3"); fb2.addProfile("env", "BAR", "/usr/goo"); fb2.addProfile("globus", "walltime", "40"); dax.addFile(fb2); File fc1 = new File("f.c1"); fc1.addProfile("env", "TEST", "/usr/bin/true"); fc1.addProfile("globus", "walltime", "40"); dax.addFile(fc1); File fc2 = new File("f.c2"); fc2.addMetaData("string", "foo", "bar"); fc2.addMetaData("int", "num", "5"); dax.addFile(fc2); File fd = new File("f.d"); dax.addFile(fd); Executable preprocess = new Executable("pegasus", "preproces", "1.0"); preprocess.setArchitecture(Executable.ARCH.X86).setOS( Executable.OS.LINUX); preprocess.setInstalled(false); preprocess.addPhysicalFile( new PFN("file:///opt/pegasus/default/bin/keg")); preprocess.addProfile(Profile.NAMESPACE.globus, "walltime", "120"); preprocess.addMetaData("string", "project", "pegasus"); Executable findrange = new Executable("pegasus", "findrange", "1.0"); findrange.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); findrange.unsetInstalled(); findrange.addPhysicalFile(new PFN("http://pegasus.isi.edu/code/bin/keg")); findrange.addProfile(Profile.NAMESPACE.globus, "walltime", "120"); findrange.addMetaData("string", "project", "pegasus"); Executable analyze = new Executable("pegasus", "analyze", "1.0"); analyze.setArchitecture(Executable.ARCH.X86).setOS(Executable.OS.LINUX); analyze.unsetInstalled(); analyze.addPhysicalFile(new PFN( "gsiftp://localhost/opt/pegasus/default/bin/keg")); analyze.addProfile(Profile.NAMESPACE.globus, "walltime", "120"); analyze.addMetaData("string", "project", "pegasus"); dax.addExecutable(preprocess).addExecutable(findrange).addExecutable( analyze); Transformation diamond = new Transformation("pegasus", "diamond", "1.0"); diamond.uses(preprocess).uses(findrange).uses(analyze); diamond.uses(new File("config", File.LINK.INPUT)); dax.addTransformation(diamond); Job j1 = new Job("j1", "pegasus", "preprocess", "1.0", "j1"); j1.addArgument("-a preprocess -T 60 -i ").addArgument(fa); j1.addArgument("-o ").addArgument(fb1).addArgument(fb2); j1.uses(fa, File.LINK.INPUT); j1.uses(fb1, File.LINK.OUTPUT); j1.uses("f.b2", File.LINK.OUTPUT); j1.addProfile(Profile.NAMESPACE.dagman, "pre", "20"); j1.addInvoke(WHEN.start,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com"); j1.addInvoke(WHEN.at_end,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com"); dax.addJob(j1); DAG j2 = new DAG("j2", "findrange.dag", "j2"); j2.uses(new File("f.b1"), File.LINK.INPUT); j2.uses("f.c1", File.LINK.OUTPUT, File.TRANSFER.FALSE, false); j2.addProfile(Profile.NAMESPACE.dagman, "pre", "20"); j2.addProfile("condor", "universe", "vanilla"); j2.addInvoke(WHEN.start,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com"); j2.addInvoke(WHEN.at_end,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com"); dax.addDAG(j2); DAX j3 = new DAX("j3", "findrange.dax", "j3"); j3.addArgument("--site ").addArgument("local"); j3.uses(new File("f.b2"), File.LINK.INPUT); j3.uses(new File("f.c2"), File.LINK.OUTPUT, File.TRANSFER.FALSE, false); j3.addInvoke(Invoke.WHEN.start, "/bin/notify -m START gmehta@isi.edu"); j3.addInvoke(Invoke.WHEN.at_end, "/bin/notify -m END gmehta@isi.edu"); j3.addInvoke(WHEN.start,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com"); j3.addInvoke(WHEN.at_end,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com"); j3.addProfile("ENV", "HAHA", "YADAYADAYADA"); dax.addDAX(j3); Job j4 = new Job("j4", "pegasus", "analyze", ""); File[] infiles = {fc1, fc2}; j4.addArgument("-a", "analyze").addArgument("-T").addArgument("60"). addArgument("-i", infiles, " ", ","); j4.addArgument("-o", fd); j4.uses(fc1, File.LINK.INPUT); j4.uses(fc2, File.LINK.INPUT); j4.uses(fd, File.LINK.OUTPUT); j4.addInvoke(WHEN.start,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com"); j4.addInvoke(WHEN.at_end,"/usr/local/pegasus/libexec/notification/email -t notify@example.com -f workflow@example.com"); dax.addJob(j4); dax.addDependency("j1", "j2", "1-2"); dax.addDependency("j1", "j3", "1-3"); dax.addDependency("j2", "j4"); dax.addDependency("j3", "j4"); return dax; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/dax/Job.java0000644000175000017500000000455311757531137023432 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.dax; import edu.isi.pegasus.common.util.XMLWriter; /** * * @author gmehta * @version $Revision: 4135 $ */ public class Job extends AbstractJob { public Job(String id, String name) { this(id, "", name, "", ""); } public Job(String id, String name, String label) { this(id, "", name, "", label); } public Job(String id, String namespace, String name, String version) { this(id, namespace, name, version, ""); } public Job(Job j) { super(j); } public Job(String id, String namespace, String name, String version, String label) { super(); checkID(id); // to decide whether to exit. Currently just logging error and proceeding. mId = id; mName = name; mNamespace = namespace; mVersion = version; mNodeLabel = label; } public String getNamespace() { return mNamespace; } public String getVersion() { return mVersion; } /** * Is this Object a Job * @return */ public boolean isJob() { return true; } /** * Overrides Base TOXML method. * @param writer * @param indent */ @Override public void toXML(XMLWriter writer, int indent) { writer.startElement( "job", indent); writer.writeAttribute( "id", mId); if (mNamespace != null && !mNamespace.isEmpty()) { writer.writeAttribute("namespace", mNamespace); } writer.writeAttribute("name", mName); if (mVersion != null && !mVersion.isEmpty()) { writer.writeAttribute("version", mVersion); } super.toXML(writer, indent); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/ranking/0000755000175000017500000000000011757531667022733 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/ranking/Rank.java0000644000175000017500000000775711757531137024501 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.ranking; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.selector.site.heft.Algorithm; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.parser.dax.DAXParser2; import edu.isi.pegasus.planner.parser.DAXParserFactory; import edu.isi.pegasus.planner.parser.Parser; import edu.isi.pegasus.planner.parser.dax.Callback; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.LinkedList; /** * The Rank class that ranks the DAX'es * * @author Karan Vahi * @version $Revision: 3778 $ */ public class Rank { /** * The handle to the ranking algorithm. */ private Algorithm mHeft; /** * The pegasus bag. */ private PegasusBag mBag; /** * The list of candidate grid sites. */ private List mSites; /** * The optional request id. */ private String mRequestID; /** * The handle to the logging object. */ private LogManager mLogger; /** * The default constructor. */ public Rank() { } /** * Initializes the rank client. * * @param bag the PegasusBag. * @param sites the sites where the wf can run potentially. * @param id the request id */ public void initialize( PegasusBag bag , List sites , String id ){ mBag = bag; //set the wings request property mBag.getPegasusProperties().setProperty( "pegasus.wings.request.id", id); mLogger = bag.getLogger(); mHeft = new Algorithm( bag ); mRequestID = id; mSites = sites; } /** * Ranks the daxes, and returns a sort collection of Ranking objects. * * @param daxes Collection * * @return a sorted collection according to the ranks. */ public Collection rank( Collection daxes ){ Collection result = new LinkedList(); long max = 0; //traverse through the DAX'es long runtime; for( Iterator it = daxes.iterator(); it.hasNext(); ){ String dax = ( String ) it.next(); Callback cb = DAXParserFactory.loadDAXParserCallback( mBag.getPegasusProperties(), dax, "DAX2CDAG" ); mLogger.log( "Ranking dax " + dax, LogManager.DEBUG_MESSAGE_LEVEL ); // DAXParser2 daxParser = new DAXParser2( dax, mBag, cb ); Parser p = (Parser)DAXParserFactory.loadDAXParser( mBag, cb, dax ); p.startParser( dax ); ADag dag = (ADag)cb.getConstructedObject(); //dag.setRequestID( mRequestID ); mHeft.schedule( dag, mSites ); runtime = mHeft.getMakespan(); max = ( runtime > max ) ? runtime : max; result.add( new Ranking( dax, runtime ) ); } //update the ranks for all the daxes ( inverse them ) for( Iterator it = result.iterator(); it.hasNext(); ){ Ranking r = ( Ranking )it.next(); //inverse the ranking r.setRank( max - r.getRuntime() ); } Collections.sort( (List)result, Collections.reverseOrder() ); return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/ranking/GetDAX.java0000644000175000017500000003407711757531137024655 0ustar ryngerynge/** * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found at $PEGASUS_HOME/GTPL or * http://www.globus.org/toolkit/download/license.html. * This notice must appear in redistributions of this file * with or without modification. * * Redistributions of this Software, with or without modification, must reproduce * the GTPL in: * (1) the Software, or * (2) the Documentation or * some other similar material which is provided with the Software (if any). * * Copyright 1999-2004 * University of Chicago and The University of Southern California. * All rights reserved. */ package edu.isi.pegasus.planner.ranking; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.planner.catalog.Catalog; import java.util.Properties; import java.util.Collection; import java.util.Enumeration; import java.util.LinkedList; import java.io.PrintWriter; import java.io.FileWriter; import java.io.IOException; import java.io.File; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.SQLException; import java.sql.DriverManager; import java.sql.ResultSet; /** * This class is responsible for the fetching the DAX'es on the basis of the * request ID's from the Windward Provenance Tracking Catalog. If there are * more than one way's to get the DAX's then it should be an interface. * * @author Karan Vahi * @version $Revision: 2585 $ */ public class GetDAX { /** * Prefix for the property subset to use with this catalog. */ public static final String c_prefix = "pegasus.catalog.provenance.windward"; /** * The DB Driver properties prefix. */ public static final String DB_PREFIX = "pegasus.catalog.provenance.windward.db"; /** * The statement to prepare to slurp attributes. */ private static final String mCStatements[] = { // 0: "SELECT dax FROM instances_and_daxes WHERE seed_id=?", }; /** * Maintains the connection to the database over the lifetime of * this instance. */ private Connection mConnection = null; /** * Maintains an essential set of prepared statement, ready to use. */ private PreparedStatement mStatements[] = null; /** * The properties passed to the client. */ private Properties mProps; /** * The instance to the Logging manager. */ private LogManager mLogger; /** * The default constructor. */ public GetDAX() { mLogger = LogManagerFactory.loadSingletonInstance(); // make connection defunct mConnection = null; mStatements = null; } /** * A convenience method to connect on the basis of PegasusProperties. * Eventually this logic should go in the invoking code or factory. * * @param properties PegasusProperties * * @return boolean */ public boolean connect( PegasusProperties properties ){ CommonProperties props = properties.getVDSProperties(); Properties connect = props.matchingSubset( GetDAX.c_prefix, false ); //get the default db driver properties in first pegasus.catalog.*.db.driver.* Properties db = props.matchingSubset( Catalog.DB_ALL_PREFIX, false ); //now overload with the work catalog specific db properties. //pegasus.catalog.work.db.driver.* db.putAll( props.matchingSubset( GetDAX.DB_PREFIX , false ) ); //to make sure that no confusion happens. //add the db prefix to all the db properties for( Enumeration e = db.propertyNames(); e.hasMoreElements(); ){ String key = (String)e.nextElement(); connect.put( "db." + key, db.getProperty( key )); } return connect( connect ); } /** * Establishes a connection to the database from the properties. You * can specify a driver property to contain the class name of * the JDBC driver for your database. This property will be removed * before attempting to connect. You must speficy a url * property to describe the connection. It will be removed before * attempting to connect. * * @param props is the property table with sufficient settings to * establish a link with the database. The minimum key required key is * "url", and possibly "driver". Any other keys depend on the database * driver. * * @return true if connected, false if failed to connect. * * @see java.sql.DriverManager#getConnection( String, Properties ) * */ public boolean connect( Properties props ) { boolean result = false; // class loader: Will propagate any runtime errors!!! String driver = (String) props.remove("db.driver"); Properties localProps = CommonProperties.matchingSubset( (Properties)props.clone(), "db", false ); String url = (String) localProps.remove("url"); if( url == null ){ //try to construct the jdbc string from the properties url = getJDBCURL( driver, localProps ); } if (url == null || url.length() == 0) { return result; } try { if (driver != null) { //only support mysql and postgres for time being if( driver.equalsIgnoreCase( "MySQL") ){ driver = "com.mysql.jdbc.Driver"; } else if ( driver.equalsIgnoreCase( "Postgres" )){ driver = "org.postgresql.Driver"; } mLogger.log( "Driver being used to connect to Work Catalog is " + driver, LogManager.DEBUG_MESSAGE_LEVEL ); Class.forName(driver); } } catch (Exception e) { mLogger.log( "While connecting to Windward Provenance Catalog", e, LogManager.DEBUG_MESSAGE_LEVEL ); return result; } try { mLogger.log( "Connecting with jdbc url " + url , LogManager.DEBUG_MESSAGE_LEVEL ); mConnection = DriverManager.getConnection( url, localProps ); // m_autoinc = mConnection.getMetaData().supportsGetGeneratedKeys(); // prepared statements are Singletons -- prepared on demand mStatements = new PreparedStatement[ mCStatements.length ]; for (int i = 0; i < mCStatements.length; ++i) { mStatements[i] = null; } result = true; } catch (SQLException e) { mLogger.log( "While Windward Provenance Catalog", e , LogManager.DEBUG_MESSAGE_LEVEL ); result = false; } return result; } /** * Constructs the jdbc url on the basis fo the driver and db properties. * * @param driver the driver being used. * @param properties the db properites * * @return the jdbc url, else null if unable to construct */ protected String getJDBCURL( String driver, Properties properties ){ if( driver == null ) { return null; } StringBuffer result = new StringBuffer(); result.append( "jdbc:" ).append( driver.toLowerCase() ).append( "://" ); String hostname = (String)properties.remove( "hostname" ); if( hostname == null || hostname.length() == 0 ){ return null; } result.append( hostname ); String database = (String)properties.remove( "database" ); if( database == null || database.length() == 0 ){ return null; } result.append( "/" ).append( database ); return result.toString(); } /** * Given a request ID it fetches the DAX's from the DB and writes out to * the directory passed. * * @param id the request id. * @param dir the directory where the DAX'es need to be placed. * * @return a Collection of basenames fo the DAX'es placed in the directory. */ public Collection get( String id, String dir ){ if( isClosed() ){ throw new RuntimeException( "The connection to backend database is closed" ); } //if if( dir == null ){ throw new RuntimeException( "Unable to write out to null directory" ); } Collection result = new LinkedList(); //get the prepared statement int which = 0; try{ //do sanity check on dir sanityCheck( new File( dir ) ); PreparedStatement ps = getStatement(which); ps.setString( 1, id ); mLogger.log( "Executing query " + ps.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); ResultSet rs = ps.executeQuery(); int index = 0; while ( rs.next() ) { index++; String xml = rs.getString( 1 ); //construct the name of the file on index and //request id only. StringBuffer name = new StringBuffer(); name.append( id ).append( "_" ).append( index ); name.append( ".dax" ); //pipe the dax to the directory. File dax = new File( dir, name.toString() ); PrintWriter pw = new PrintWriter( new FileWriter( dax ) ); pw.println( xml ); pw.close(); //add to the result result.add( dax.getAbsolutePath() ); } rs.close(); } catch (SQLException e) { throw new RuntimeException( "Unable to query from Windward Provenance Catalog " , e); } catch ( IOException ioe ) { throw new RuntimeException( "IOException while trying to write to dir " + dir , ioe ); } return result; } /** * Predicate to check, if the connection with the catalog's * implementation is still active. This helps determining, if it makes * sense to call close(). * * @return true, if the implementation is disassociated, false otherwise. * @see #close() */ public boolean isClosed() { return ( mConnection == null ); } /** * Explicitely free resources before the garbage collection hits. */ public void close() { if (mConnection != null) { try { if (!mConnection.getAutoCommit()) { mConnection.commit(); } } catch (SQLException e) { // ignore } } if (mStatements != null) { try { for (int i = 0; i < mCStatements.length; ++i) { if (mStatements[i] != null) { mStatements[i].close(); mStatements[i] = null; } } } catch (SQLException e) { // ignore } finally { mStatements = null; } } if (mConnection != null) { try { mConnection.close(); } catch (SQLException e) { // ignore } finally { mConnection = null; } } } /** * Singleton manager for prepared statements. This instruction * checks that a prepared statement is ready to use, and will * create an instance of the prepared statement, if it was unused * previously. * * @param i is the index which prepared statement to check. * @return a handle to the prepared statement. * * * @throws SQLException in case of unable to delete entry. */ protected PreparedStatement getStatement(int i) throws SQLException { if (mStatements[i] == null) { mStatements[i] = mConnection.prepareStatement(mCStatements[i]); } else { mStatements[i].clearParameters(); } return mStatements[i]; } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheck( File dir ) throws IOException{ if ( dir.exists() ) { // location exists if ( dir.isDirectory() ) { // ok, isa directory if ( dir.canWrite() ) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException( "Cannot write to existing directory " + dir.getPath() ); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory." ); } } else { // does not exist, try to make it if ( ! dir.mkdirs() ) { throw new IOException( "Unable to create directory " + dir.getPath() ); } } } /** * For Testing purposes only. * * @param args the arguments passed. */ public static void main( String[] args ){ GetDAX d = new GetDAX(); LogManagerFactory.loadSingletonInstance().setLevel( LogManager.DEBUG_MESSAGE_LEVEL ); System.out.println( "Connecting to database " + d.connect( PegasusProperties.getInstance( ) ) ); //d.get( "RPaper-ModelerThenClassifier-d3206cf5-b3ad-4c9d-9f08-5d25653d5ccf", null ); Collection daxes = d.get( "RPaper-ModelerThenClassifier-a93169ee-ed72-4d4b-be99-f6d69ae29e04" , "/tmp/wings" ); System.out.println( "DAX'es written out are " + daxes ); d.close(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/ranking/Ranking.java0000644000175000017500000000753311757531137025167 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.ranking; /** * A Data class that associates a DAX with the rank. * * @author Karan Vahi * @version $Revision: 793 $ */ public class Ranking implements Comparable { /** * The name of the DAX. */ private String mName; /** * Rank of the dax. */ private long mRank; /** * The runtime of the dax in seconds. */ private long mRuntime; /** * The overloaded constructor. * * @param name the name of the dax * @param runtime the runtime of the dax */ public Ranking( String name, long runtime ) { mRuntime = runtime; mName = name; } /** * Sets the runtime. * * @param runtime the runtime */ public void setRuntime( long runtime ){ mRuntime = runtime; } /** * Sets the rank. * * @param rank the rank. */ public void setRank( long rank ){ mRank = rank; } /** * Sets the name. * * @param name the name of the dax */ public void setName( String name ){ mName = name; } /** * Returns the rank. * * @return the rank. */ public long getRank(){ return mRank; } /** * Returns the runtime. * * @return the runtime */ public long getRuntime( ){ return mRuntime; } /** * Returns the name of the dax. * * @return the name */ public String getName(){ return mName; } /** * Returns a textual description. * * @return String */ public String toString(){ StringBuffer sb = new StringBuffer(); // sb.append( mName ).append( ":" ).append( "DAX" ).append( ":" ).append( mRank ); //adding new format for Ensemble just pass daxname and rank sb.append( mName ).append( "\t" ).append( mRank ).append( "\t" ).append( mRuntime ); return sb.toString(); } /** * Implementation of the {@link java.lang.Comparable} interface. * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer as this object is * less than, equal to, or greater than the specified object. The * definitions are compared by their type, and by their short ids. * * @param o is the object to be compared * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compareTo( Object o ){ if ( o instanceof Ranking ) { Ranking r = ( Ranking ) o; return (int)(this.getRank() - r.getRank()); } else { throw new ClassCastException( "object is not a Ranking" ); } } public boolean equals( Object o ){ boolean result = false; if( o instanceof Ranking ){ Ranking r = ( Ranking ) o; result = ( r.getName().equals( this.getName() ) ) && ( r.getRank() == this.getRank() ); } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/0000755000175000017500000000000011757531667022576 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/Parser.java0000644000175000017500000003451611757531137024676 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import org.xml.sax.Attributes; import org.xml.sax.Locator; import org.xml.sax.SAXException; import org.xml.sax.XMLReader; import org.xml.sax.helpers.DefaultHandler; import org.xml.sax.helpers.XMLReaderFactory; import java.io.File; import java.io.FileNotFoundException; /** * This is the base class which all the xml parsing classes extend. * It initializes the xml parser namely Xerces, sets it's various features * like turning on validation against schema etc, plus the namespace resolution. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2603 $ * */ public abstract class Parser extends DefaultHandler{ /** * Default parser name. Using Xerces at present. */ protected final String DEFAULT_PARSER_NAME = "org.apache.xerces.parsers.SAXParser"; /** * Locator object to determine on which line in the xml has the error * occured. */ protected Locator mLocator; /** * Holds the text in an element (text between start and final tags if any). * Used in case of elements of mixed type. */ protected StringBuffer mTextContent ; /** * The LogManager object which logs the Pegasus messages. */ protected LogManager mLogger; /** * The String which contains the messages to be logged. */ protected String mLogMsg; /** * The object which is used to parse the dax. This reads the XML document * and sends it to the event handlers. */ protected XMLReader mParser = null; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * A String that holds the contents of data passed as text. The string * should only be trimmed when the appropriate end tag of the element is * invoked. At this point, a whitespace is added if there are whitespaces in * at the ends. */ protected String mTextString; /** * Boolean flag to ensure that two adjacent filenames are separated by a * whitespace. */ protected boolean mAdjFName; /** * Intialises the parser. Sets the various features. However the parsing is * done in the implementing class, by call mParser.parse(filename). * * @param bag the bag of objects that is useful for initialization. */ public Parser( PegasusBag bag ) { mTextContent = new StringBuffer(); mLogMsg = new String(); mLogger = bag.getLogger(); mProps = bag.getPegasusProperties(); mTextString = new String(); mAdjFName = false; mTextContent.setLength(0); createParserInstance(); } /** * Intialises the parser. Sets the various features. However the parsing is * done in the implementing class, by call mParser.parse(filename). * * @param properties the properties passed at runtime. */ public Parser( PegasusProperties properties ) { mTextContent = new StringBuffer(); mLogMsg = new String(); mLogger = LogManagerFactory.loadSingletonInstance( properties ); mProps = properties; mTextString = new String(); mAdjFName = false; mTextContent.setLength(0); createParserInstance(); } /** * An empty implementation is provided by DefaultHandler of ContentHandler. * This method receives the notification from the sacks parser when start * tag of an element comes. Any parser class must implement this method. */ public abstract void startElement(String uri, String local, String raw, Attributes attrs) throws SAXException ; /** * An empty implementation is provided by DefaultHandler class. This method * is called automatically by the Sax parser when the end tag of an element * comes in the xml file. Any parser class should implement this method */ public abstract void endElement(String uri,String localName,String qName) throws SAXException; /** * This is called automatically when the end of the XML file is reached. */ public abstract void endDocument(); /** * Start the parser. This starts the parsing of the file by the parser. * * @param file the path to the XML file you want to parse. */ public abstract void startParser(String file); /** * Helps the load database to locate the XML schema, if available. * Please note that the schema location URL in the instance document * is only a hint, and may be overriden by the findings of this method. * * @return a location pointing to a definition document of the XML * schema that can read VDLx. Result may be null, if such a document * is unknown or unspecified. */ public abstract String getSchemaLocation(); /** * Returns the XML schema namespace that a document being parsed conforms * to. * * @return the schema namespace */ public abstract String getSchemaNamespace( ); /** * Sets the list of external real locations where the XML schema may be found. * Since this list can be determined at run-time through properties etc., we * expect this function to be called between instantiating the parser, and * using the parser * * @param list is a list of strings representing schema locations. The content * exists in pairs, one of the namespace URI, one of the location URL. * * */ public void setSchemaLocations( String list ){ /* // default place to add list += "http://www.griphyn.org/working_groups/VDS/vdl-1.19.xsd " + "http://www.griphyn.org/working_groups/VDS/vdl-1.19.xsd"; */ // schema location handling try { mParser.setProperty( "http://apache.org/xml/properties/schema/external-schemaLocation", list); } catch (SAXException se) { mLogger.log("The SAXParser reported an error: " + se.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } } /** * This is used to store the character data that is in xml. An implementation * of the interface for the Sacks parser. */ public void characters(char[] chars,int start,int length){ //appending the buffer with chars. We use this way bec sacks parser can //parse internally the data any way they like //Very IMPORTANT String temp = new String(chars,start,length); /*if(temp.trim().length() > 0){ mTextContent.append(temp); }*/ temp = this.ignoreWhitespace(temp); mTextContent.append(temp); //set the adjacent flag to false mAdjFName = false; } /** * Our own implementation for ignorable whitespace. A String that holds the * contents of data passed as text by the underlying parser. The whitespaces * at the end are replaced by one whitespace. * * @param str The string that contains whitespaces. * * @return String corresponding to the trimmed version. * */ public String ignoreWhitespace(String str){ return ignoreWhitespace( str, false ); } /** * Our own implementation for ignorable whitespace. A String that holds the * contents of data passed as text by the underlying parser. The whitespaces * at the end are replaced by one whitespace. * * @param str The string that contains whitespaces. * * @return String corresponding to the trimmed version. * */ /* public String ignoreWhitespace(String str){ boolean st = false; boolean end = false; int length = str.length(); if(length > 0){ //check for whitespace in the //starting if(str.charAt(0) == ' ' || str.charAt(0) == '\t' || str.charAt(0) == '\n'){ st = true; } //check for whitespace in the end if(str.length() > 1 && (str.charAt(length -1) == ' ' || str.charAt(length -1) == '\t' || str.charAt(length -1) == '\n')){ end = true; } //trim the string and add a single whitespace accordingly str = str.trim(); str = st == true ? ' ' + str:str; str = end == true ? str + ' ':str; } return str; } */ /** * Our own implementation for ignorable whitespace. A String that holds the * contents of data passed as text by the underlying parser. The whitespaces * at the end are replaced by one whitespace. * * @param str The string that contains whitespaces. * * @return String corresponding to the trimmed version. * */ public String ignoreWhitespace(String str, boolean preserveLineBreak ){ boolean st = false; boolean end = false; int length = str.length(); boolean sN = false;//start with \n ; boolean eN = false;//end with \n if(length > 0){ sN = str.charAt(0) == '\n'; eN = str.charAt(length -1) == '\n'; //check for whitespace in the //starting if(str.charAt(0) == ' ' || str.charAt(0) == '\t' || str.charAt(0) == '\n'){ st = true; } //check for whitespace in the end if(str.length() > 1 && (str.charAt(length -1) == ' ' || str.charAt(length -1) == '\t' || str.charAt(length -1) == '\n')){ end = true; } //trim the string and add a single whitespace accordingly str = str.trim(); str = st == true ? ' ' + str:str; str = end == true ? str + ' ':str; if( preserveLineBreak ){ str = sN ? '\n' + str:str; str = eN ? str + '\n':str; } } return str; } /** * Overrides the empty implementation provided by Default Handler and sets * the locator variable for the locator. * * @param loc the Locator object which keeps the track as to the line * numbers of the line being parsed. */ public void setDocumentLocator(Locator loc) { this.mLocator = loc; } /** * Tests whether the file exists or not. */ public void testForFile(String file) throws FileNotFoundException{ File f = new File(file); if (!f.exists()){ mLogMsg = "The file (" + file +" ) specified does not exist"; throw new FileNotFoundException( mLogMsg ); } } /** * Creates an instance of the parser, and sets the various options to it. */ private void createParserInstance(){ //creating a parser try{ mParser = XMLReaderFactory.createXMLReader(DEFAULT_PARSER_NAME); } catch(Exception e){ throw new RuntimeException( "Unable to create XMLReader" + e.getMessage() , e ); } //setting the handlers The class extend DefaultHandler which provides //for a empty implemetnation of the four handlers mParser.setContentHandler(this); mParser.setErrorHandler(new XMLErrorHandler()); try{ //setting the feature that xml should be validated against the //xml schema specified in it setParserFeature("http://xml.org/sax/features/validation", true); setParserFeature("http://apache.org/xml/features/validation/schema", true); //should be set only for debugging purposes //setParserFeature("http://apache.org/xml/features/validation/schema-full-checking", true); setParserFeature("http://apache.org/xml/features/validation/dynamic", true); setParserFeature("http://apache.org/xml/features/validation/warn-on-duplicate-attdef", true); //fails with the new xerces //setParserFeature("http://apache.org/xml/features/validation/warn-on-undeclared-elemdef", true); setParserFeature("http://apache.org/xml/features/warn-on-duplicate-entitydef", true); setParserFeature("http://apache.org/xml/features/validation/schema/element-default", true); } catch (Exception e){ //if a locator error then if(mLocator != null){ String message = "Error in " + mLocator.getSystemId() + " at line " + mLocator.getLineNumber() + " at column " + mLocator.getColumnNumber() + e.getMessage(); mLogger.log(message,LogManager.ERROR_MESSAGE_LEVEL ); } mLogger.log(e.getMessage(),LogManager.ERROR_MESSAGE_LEVEL); } } /** * Sets a parser feature, and fails here enabling us to set all the following * features. * * @param uri is the feature's URI to modify * @param flag is the new value to set. * @return true if the feature could be set, else false for * an exception. */ public boolean setParserFeature( String uri, boolean flag){ boolean result = false; try{ this.mParser.setFeature(uri,flag); result = true; } catch (SAXException se){ mLogger.log("Unable to set parser feature " + uri + " :" + se.getMessage(),LogManager.ERROR_MESSAGE_LEVEL); } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/PDAXParser.java0000644000175000017500000003314211757531137025345 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.parser.pdax.Callback; import edu.isi.pegasus.planner.partitioner.Partition; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.common.util.FactoryException; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import java.io.File; import java.util.HashMap; import java.util.List; /** * This is a parser class for the parsing the pdax that contain the jobs in the * various partitions and the relations between the partitions. * * @author Karan Vahi * @version $Revision: 2611 $ */ public class PDAXParser extends Parser { /** * The "not-so-official" location URL of the DAX schema definition. */ public static final String SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/pdax-2.0.xsd"; /** * URI namespace */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/PDAX"; /** * The object holding the contents of one partition as indicated in the * pdax. */ private Partition mPartition; /** * The current depth of parsing through the xml structure. */ private int mCurrentDepth; /** * The current child. */ private String mChild; /** * List of parents for a particular child. */ private List mParents; /** * The callback handler to which the callbacks are sent during designated * points of parsing the pdax. */ private Callback mCallback; /** * The default constructor. * * @param properties the PegasusProperties to be used. */ public PDAXParser( PegasusProperties properties ) { super( properties ); //intialize to null every member variable mPartition = null; mCurrentDepth = 0; mCallback = null; } /** * The constructor initialises the parser, and turns on the validation feature * in Xerces. * * @param fileName the file which one has to parse using the parser. * @param properties the PegasusProperties to be used. */ public PDAXParser( String fileName, PegasusProperties properties ) { super( properties ); mCurrentDepth = 0; try{ this.testForFile(fileName); } catch( Exception e){ throw new RuntimeException( e ); } mCallback = null; //set the schema location against which //to validate. String schemaLoc = getSchemaLocation(); mLogger.log("Picking schema " + schemaLoc,LogManager.CONFIG_MESSAGE_LEVEL); String list = PDAXParser.SCHEMA_NAMESPACE + " " + schemaLoc; setSchemaLocations(list); } /** * Returns the XML schema namespace that a document being parsed conforms * to. * * @return the schema namespace */ public String getSchemaNamespace( ){ return PDAXParser.SCHEMA_NAMESPACE; } /** * Sets the callback handler for this parsing instance. */ public void setCallback(Callback callback){ mCallback = callback; } /** * Ends up starting the parsing of the file , by the underlying parser. * * @param file the path/url to the file that needs to be parsed. */ public void startParser(String file) { try { mParser.parse(file); } catch( FactoryException fe){ //throw it as it is for time being throw fe; } catch (Exception e) { String message; //if a locator error then if(mLocator != null){ message = "Parsing Error in " + mLocator.getSystemId() + " at line " + mLocator.getLineNumber() + " at column " + mLocator.getColumnNumber() + " : " ; } else{ message = "Parsing the PDAX file " ; mLogger.log(message, LogManager.ERROR_MESSAGE_LEVEL); } throw new RuntimeException(message, e); } } /** * An empty implementation is provided by DefaultHandler of ContentHandler. * This method receives the notification from the sacks parser when start * tag of an element comes. Any parser class must implement this method. */ public void startElement(String uri, String local, String raw, Attributes attrs) throws SAXException{ String key; String value; int i = 0; //new element increment the depth mCurrentDepth++; if(local.equals("pdag")){ HashMap mp = new HashMap(); for(i = 0; i < attrs.getLength(); i++){ key = attrs.getLocalName(i); value = attrs.getValue(i); //should probably check for valid attributes before setting mp.put(key,value); //System.out.println(key + " --> " + value); } //call the callback interface mCallback.cbDocument(mp); return; } else if(local.equals("partition")){ mPartition = new Partition(); for(i = 0; i < attrs.getLength(); i++){ key = attrs.getLocalName(i); value = attrs.getValue(i); //check for valid attributes before setting if(key.equals("name")){ mPartition.setName(value); } else if(key.equals("id")){ mPartition.setID(value); } else if(key.equals("index")){ int index = -1; //try convert the String to int try{ index = Integer.parseInt(value); } catch(Exception e){ invalidValue(local,key,value); } mPartition.setIndex(index); } else{ invalidAttribute(local,key,value); } //System.out.println(key + " --> " + value); } return; } else if(local.equals("job")){ String name = null; String id = null; GraphNode job; for(i = 0; i < attrs.getLength(); i++){ key = attrs.getLocalName(i); value = attrs.getValue(i); //check for valid attributes before setting if (key.equals("name")) { name = value; } else if(key.equals("id")){ id = value; } else{ //complain about invalid key invalidAttribute(local,key,value); } } job = new GraphNode(id,name); //add it to the partition mPartition.addNode(job); return; } else if(local.equals("child")){ //we do not know how many parents it has mParents = new java.util.LinkedList(); for(i = 0; i < attrs.getLength(); i++){ key = attrs.getLocalName(i); value = attrs.getValue(i); if(key.equals("ref")){ mChild = value; } else{ invalidAttribute(local,key,value); } } return; } else if(local.equals("parent")){ for( i = 0; i < attrs.getLength(); i++){ key = attrs.getLocalName(i); value = attrs.getValue(i); if(key.equals("ref")){ mParents.add(value); } else{ invalidAttribute(local,key,value); } } return; } else { mLogger.log("No implementation for element " + local, LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( "No implementation for element " + local ); } } /** * An empty implementation is provided by DefaultHandler class. This method * is called automatically by the Sax parser when the end tag of an element * comes in the xml file. Any parser class should implement this method */ public void endElement(String uri, String local, String qName){ //decrement the depth of parsing mCurrentDepth--; if(local.equals("pdag")){ //call the callback interface return; } else if(local.equals("partition")){ //call the callback interface mCallback.cbPartition(mPartition); //cleanup the object mPartition = null; } else if(local.equals("child")){ //check if it was nested in partition element //or the pdag element if(mCurrentDepth == 2){ //means the put the child and parents in partition mPartition.addParents(mChild,mParents); } else if(mCurrentDepth == 1){ //need to call the callback interface mCallback.cbParents(mChild,mParents); } else{ throw new RuntimeException( "Wrongly formed xml" ); } } else if(local.equals("parent") || local.equals("job")){ //do nothing return; } else{ //end of invalid element. //non reachable line??? mLogMsg = "End of invalid element reached " + local; mLogMsg = (mLocator == null) ? mLogMsg: //append the locator information mLogMsg + " at line " + mLocator.getLineNumber() + " at column " + mLocator.getColumnNumber(); throw new RuntimeException( mLogMsg ); } } /** * This is called automatically when the end of the XML file is reached. */ public void endDocument(){ //do a sanity check if(mCurrentDepth != 0){ mLogger.log("It seems that the xml was not well formed!!", LogManager.ERROR_MESSAGE_LEVEL); } //call the callback interface mCallback.cbDone(); } /** * Helps the load database to locate the PDAX XML schema, if available. * Please note that the schema location URL in the instance document * is only a hint, and may be overriden by the findings of this method. * * @return a location pointing to a definition document of the XML * schema that can read PDAX. Result may be null, if such a document * is unknown or unspecified. */ public String getSchemaLocation(){ String child = new File(this.SCHEMA_LOCATION).getName(); File pdax = // create a pointer to the default local position new File(this.mProps.getSysConfDir(), child); //System.out.println("\nDefault Location of PDAX is " + pdax.getAbsolutePath()); // Nota bene: vds.schema.dax may be a networked URI... return this.mProps.getPDAXSchemaLocation(pdax.getAbsolutePath()); } /** * Logs a message if an unknown key is come across, while parsing the * xml document. * * @param element the xml element in which the invalid key was come across. * @param key the key that is construed to be invalid. * @param value the value associated with the key. */ private void invalidAttribute(String element, String key, String value){ String message = "Invalid attribute " + key + "found in " + element + " with value " + value; message = (mLocator == null) ? message: //append the locator information message + " at line " + mLocator.getLineNumber() + " at column " + mLocator.getColumnNumber(); mLogger.log(message, LogManager.WARNING_MESSAGE_LEVEL); } /** * Logs a message if an unknown value is come across, while parsing the * xml document. * * @param element the xml element in which the invalid key was come across. * @param key the key that is construed to be invalid. * @param value the value associated with the key. */ private void invalidValue(String element, String key, String value){ String message = "Invalid value " + value + "found in " + element + " for attribute " + value; message = (mLocator == null) ? message: //append the locator information message + " at line " + mLocator.getLineNumber() + " at column " + mLocator.getColumnNumber(); mLogger.log(message,LogManager.WARNING_MESSAGE_LEVEL); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/pdax/0000755000175000017500000000000011757531667023532 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/pdax/PDAXCallbackFactoryException.java0000644000175000017500000000653711757531137031760 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.pdax; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating PDAXCallback implementations. * * @author Karan Vahi * @version $Revision: 2575 $ */ public class PDAXCallbackFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "PDAX Callback"; /** * Constructs a PDAXCallbackFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public PDAXCallbackFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a PDAXCallbackFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public PDAXCallbackFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a PDAXCallbackFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public PDAXCallbackFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a PDAXCallbackFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public PDAXCallbackFactoryException( String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/pdax/PDAX2MDAG.java0000644000175000017500000017053711757531137025651 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.pdax; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.code.CodeGeneratorFactory; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.CondorVersion; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.StreamGobbler; import edu.isi.pegasus.common.util.DefaultStreamGobblerCallback; import edu.isi.pegasus.common.util.StreamGobblerCallback; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.partitioner.Partition; import edu.isi.pegasus.planner.partitioner.DAXWriter; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.TransformationFactory; import org.griphyn.vdl.euryale.FileFactory; import org.griphyn.vdl.euryale.HashedFileFactory; import org.griphyn.vdl.euryale.FlatFileFactory; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.io.FileOutputStream; import java.io.FileWriter; import java.io.PrintWriter; import java.io.BufferedWriter; import java.io.FilenameFilter; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.regex.Pattern; import java.text.NumberFormat; import java.text.DecimalFormat; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.namespace.ENV; /** * This callback ends up creating the megadag that contains the smaller dags * each corresponding to the one level as identified in the pdax file * generated by the partitioner. * * @author Karan Vahi * @version $Revision: 4507 $ */ public class PDAX2MDAG implements Callback { /** * The SubmitWriter that has to be loaded for now. */ public static final String CODE_GENERATOR_CLASS = CodeGeneratorFactory.CONDOR_CODE_GENERATOR_CLASS; /** * The prefix for the submit directory. */ public static final String SUBMIT_DIRECTORY_PREFIX = "run"; /** * The number of jobs into which each job in the partition graph is * expanded to. */ public static final int NUM_OF_EXPANDED_JOBS = 2; /** * The index of the head job. */ public static final int HEAD_INDEX = 0; /** * The index of the tail job. */ public static final int TAIL_INDEX = 1; /** * The logical name with which to query the transformation catalog for * cPlanner executable. */ public static final String CPLANNER_LOGICAL_NAME = "pegasus-plan"; /** * The namespace to use for condor dagman. */ public static final String CONDOR_DAGMAN_NAMESPACE = "condor"; /** * The logical name with which to query the transformation catalog for the * condor_dagman executable, that ends up running the mini dag as one * job. */ public static final String CONDOR_DAGMAN_LOGICAL_NAME = "dagman"; /** * The namespace to which the job in the MEGA DAG being created refer to. */ public static final String NAMESPACE = "pegasus"; /** * The planner utility that needs to be called as a prescript. */ public static final String RETRY_LOGICAL_NAME = "pegasus-plan"; /** * The dagman knobs controlled through property. They map the property name to * the corresponding dagman option. */ public static final String DAGMAN_KNOBS[][]={ { "pegasus.dagman.maxpre", " -MaxPre " }, { "pegasus.dagman.maxpost", " -MaxPost " }, { "pegasus.dagman.maxjobs", " -MaxJobs " }, { "pegasus.dagman.maxidle", " -MaxIdle " }, }; /** * The file Separator to be used on the submit host. */ protected static char mSeparator = File.separatorChar; /** * The directory in which the daxes corresponding to the partitions are * kept. This should be the same directory where the pdax containing the * partition graph resides. */ private String mPDAXDirectory; /** * The root of the submit directory where all the submit directories for * the various partitions reside. */ private String mSubmitDirectory; /** * The abstract dag object that ends up holding the megadag. */ private ADag mMegaDAG; /** * The internal map that maps the partition id to the job responsible * for executing the partition.. */ private Map mJobMap; /** * The internal map that contains maps the job id of the partition to the * head and tail jobs in the linear sequence of jobs to which the partion * job is expanded to. */ //private Map mSequenceMap; /** * The handle to the properties file. */ private PegasusProperties mProps; /** * The handle to the transformation catalog. */ private TransformationCatalog mTCHandle; /** * The handle to the logging object. */ private LogManager mLogger; /** * The object containing the options that were given to the concrete * planner at runtime. */ private PlannerOptions mPOptions; /** * The path to the properties file that is written out and shared by * all partitions in the mega DAG. */ private String mMDAGPropertiesFile; /** * The handle to the file factory, that is used to create the top level * directories for each of the partitions. */ private FileFactory mFactory; /** * An instance of the default stream gobbler callback implementation that * is used for creating symbolic links. */ private StreamGobblerCallback mDefaultCallback; /** * The number formatter to format the run submit dir entries. */ private NumberFormat mNumFormatter; /** * The user name of the user running Pegasus. */ private String mUser; /** * A flag to store whether the parsing is complete or not. */ private boolean mDone; /** * Any extra arguments that need to be passed to dagman, as determined * from the properties file. */ private String mDAGManKnobs; /** * The long value of condor version. */ private long mCondorVersion; /** * The cleanup scope for the workflows. */ private PegasusProperties.CLEANUP_SCOPE mCleanupScope; /** * Bag of initialization objects. */ //private PegasusBag mBag; /** * The overloaded constructor. * * @param directory the directory where the pdax and all the daxes * corresponding to the partitions reside. * @param properties the PegasusProperties to be used. * @param options the options passed to the planner. */ public PDAX2MDAG( String directory, PegasusProperties properties, PlannerOptions options) { mPDAXDirectory = directory; mProps = properties; mLogger = LogManagerFactory.loadSingletonInstance( properties ); mPOptions = options; mTCHandle = TransformationFactory.loadInstance( mProps, mLogger ); mMDAGPropertiesFile = null; mNumFormatter = new DecimalFormat( "0000" ); mDone = false; mUser = mProps.getProperty( "user.name" ) ; if ( mUser == null ){ mUser = "user"; } //the default gobbler callback always log to debug level mDefaultCallback = new DefaultStreamGobblerCallback(LogManager.DEBUG_MESSAGE_LEVEL); mDAGManKnobs = constructDAGManKnobs( properties ); mCleanupScope = mProps.getCleanupScope(); mCondorVersion = CondorVersion.getInstance( mLogger ).numericValue(); if( mCondorVersion == -1 ){ mLogger.log( "Unable to determine the version of condor " , LogManager.WARNING_MESSAGE_LEVEL ); } else{ mLogger.log( "Condor Version detected is " + mCondorVersion , LogManager.DEBUG_MESSAGE_LEVEL ); } } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheck( File dir ) throws IOException{ if ( dir.exists() ) { // location exists if ( dir.isDirectory() ) { // ok, isa directory if ( dir.canWrite() ) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException( "Cannot write to existing directory " + dir.getPath() ); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory." ); } } else { // does not exist, try to make it if ( ! dir.mkdirs() ) { throw new IOException( "Unable to create directory destination " + dir.getPath() ); } } } /** * Callback when the opening tag was parsed. This contains all * attributes and their raw values within a map. This callback can * also be used to initialize callback-specific resources. * * @param attributes is a map of attribute key to attribute value */ public void cbDocument(Map attributes) { mMegaDAG = new ADag(); mJobMap = new HashMap(); //mSequenceMap = new HashMap(); //the name of the mega dag is set to the name //attribute in the pdax mMegaDAG.dagInfo.nameOfADag = (String)attributes.get("name"); mMegaDAG.dagInfo.count = (String)attributes.get("count"); mMegaDAG.dagInfo.index = (String)attributes.get("index"); // create files in the directory, unless anything else is known. try { //create a submit directory structure if required String relativeDir = ( mPOptions.getRelativeDirectory() == null ) ? this.createSubmitDirectory( mMegaDAG.getLabel(), mPOptions.getSubmitDirectory(), mUser, mPOptions.getVOGroup(), mProps.useTimestampForDirectoryStructure() ): mPOptions.getRelativeDirectory(); //set the directory structure mPOptions.setSubmitDirectory( mPOptions.getBaseSubmitDirectory(), relativeDir); mSubmitDirectory = mPOptions.getSubmitDirectory(); //we want to set the relative directory as the base working //directory for all the partition on the remote sites. mPOptions.setRandomDir( relativeDir ); mFactory = new FlatFileFactory(mSubmitDirectory); // minimum default } catch ( IOException ioe ) { throw new RuntimeException( "Unable to generate files in the submit directory " , ioe ); } // not in the PDAX format currently String s = (String) attributes.get("partitionCount"); // create hashed, and levelled directories try { HashedFileFactory temp = null; int partCount = ( s == null ) ? //determine at runtime the number of partitions getPartitionCount(mPOptions.getPDAX()) : Integer.parseInt(s) ; //if ( m_minlevel > 0 && m_minlevel > jobCount ) jobCount = m_minlevel; if ( partCount > 0 )temp = new HashedFileFactory( mSubmitDirectory, partCount ); else temp = new HashedFileFactory( mPDAXDirectory ); //each job creates at creates the following files // - submit file // - out file // - error file // - prescript log // - the partition directory temp.setMultiplicator(5); //we want a minimum of one level always for clarity temp.setLevels(1); //for the time being and test set files per directory to 50 /* temp.setFilesPerDirectory( 40 ); temp.setLevelsFromTotals(partCount); */ mFactory = temp; //write out all the properties into a temp file //in the root submit directory //mMDAGPropertiesFile = writeOutProperties( mSubmitDirectory ); mMDAGPropertiesFile = mProps.writeOutProperties( mSubmitDirectory ); } catch ( NumberFormatException nfe ) { String error = ( s == null ) ? "Unspecified number for jobCount": "Illegal number \"" + s + "\" for partition count"; throw new RuntimeException( error ); } catch ( IOException e ) { //figure out where error happened String message = (mMDAGPropertiesFile == null)? "Unable to write out properties file in base submit directory": "Base directory creation"; //wrap into runtime and throw throw new RuntimeException( message, e ); } } /** * Callback for the partition . These partitions are completely * assembled, but each is passed separately. * * @param partition is the PDAX-style partition. */ public void cbPartition(Partition partition) { String name = partition.getName(); int index = partition.getIndex(); ArrayList sequenceList = new ArrayList(NUM_OF_EXPANDED_JOBS); String tailJob; Job job; //get the filename of the dax file containing the partition String dax = DAXWriter.getPDAXFilename(name, index); //construct the path to the file dax = mPDAXDirectory + File.separator + dax; File partitionDirectory; try{ partitionDirectory = mFactory.createFile( getBaseName(partition) ); partitionDirectory.mkdirs(); //construct a symlink to the dax file in the partition directory if (!createSymlink( dax, partitionDirectory)){ mLogger.log("Unable to create symlinks of the dax file to submit dir", LogManager.WARNING_MESSAGE_LEVEL); } } catch(IOException e){ //wrap and throw throw new RuntimeException( "Unable to create partition submit directory ", e ); } //construct the appropriate vds-submit-dag job with the //prescript set as an invocation to gencdag etc. job = constructDAGJob( partition , partitionDirectory, dax); //add to the workflow mMegaDAG.add(job); //map the partition id to the the job that is constructed. mJobMap.put(partition.getID(),job); /** String jobName = getPegasusJobName(name,index); //populate the internal job map with jobname and id mJobMap.put(partition.getID(),getPegasusJobName(name,index)); //add the sub info for it job = constructPegasusJob(jobName, file); mMegaDAG.add(job); //generate the dagman job that ends up submitting //the mini dag corresponding to the partition //mMegaDAG.addNewJob(getJobName(name,index)); tailJob = "condor_submit_" + jobName ; job = constructCondorSubmitJob(tailJob,name,index); mMegaDAG.add(job); //put the sequence list sequenceList.add(HEAD_INDEX,jobName); sequenceList.add(TAIL_INDEX,tailJob); mSequenceMap.put(jobName,sequenceList); //add the relation between jobname and tail job mMegaDAG.addNewRelation(jobName,tailJob); */ } /** * Callback for child and parent relationships from section 3. This ties * in the relations between the partitions to the relations between the jobs * that are responsible for partitions. In addition, appropriate cache * file arguments are generated. * * @param child is the IDREF of the child element. * @param parents is a list of IDREFs of the included parents. */ public void cbParents(String child, List parents) { String cacheName; String cacheArgs = null; //get hold of the constructed job for the child. //the name of the jobs are treated as ID's Job cJob = getJob(child); String cID = cJob.getName(); //glue in the sequences for the expanded things together if(!parents.isEmpty()){ //the megadag should not be invoked with cache option for time being cacheArgs = " --cache "; } //traverse through the parents to put in the relations //and the cache file arguments. String pID; Job pJob; for(Iterator it = parents.iterator();it.hasNext();){ //get the parent job and name pJob = (Job)mJobMap.get(it.next()); pID = pJob.getName(); mLogger.log("Adding Relation " + pID + "->" + cID, LogManager.DEBUG_MESSAGE_LEVEL); mMegaDAG.addNewRelation(pID,cID); //we need to specify the cache files for those partitions //even if they are not constructed. there is a disconnect //as to how the names are being generated. There should be //a call to one function only. cacheName = getCacheFilePath(pJob); cacheArgs += cacheName + ","; } //stuff the arguments back into replanner prescript. //should be a callout to a different function for portability String args = cJob.getPreScriptArguments(); //System.out.println("Arguments are " + args); cJob.setPreScript( cJob.getPreScriptPath(), (cacheArgs == null)? //remains the same args: //remove the last instance of , from cache args args + cacheArgs.substring(0,cacheArgs.lastIndexOf(',')) ); } /** * Callback when the parsing of the document is done. This ends up * triggering the writing of the condor submit files corresponding to the * mega dag. */ public void cbDone() { mDone = true; //generate the classad's options //for the Mega DAG mMegaDAG.dagInfo.generateFlowName(); mMegaDAG.dagInfo.setFlowTimestamp( mPOptions.getDateTime( mProps.useExtendedTimeStamp() )); mMegaDAG.dagInfo.setDAXMTime( new File( mPOptions.getPDAX() ) ); mMegaDAG.dagInfo.generateFlowID(); mMegaDAG.dagInfo.setReleaseVersion(); /* Moved to CPlanner Class Karan Apr 1 2008 CodeGenerator codeGenerator = null; int state = 0; try{ //load the Condor Writer that understands HashedFile Factories. codeGenerator = CodeGeneratorFactory.loadInstance( mBag, CODE_GENERATOR_CLASS ); state = 1; codeGenerator.generateCode( mMegaDAG ); //generate only the braindump file that is required. //no spawning off the tailstatd for time being codeGenerator.startMonitoring(); } catch( FactoryException fe ){ throw new FactoryException("PDAX2MDAG",fe); } catch( Exception e){ throw new RuntimeException("Error while generating code for the workflow",e); } */ } /** * Returns the MEGADAG that is generated * * @return ADag object containing the mega daga */ public Object getConstructedObject(){ if(!mDone) throw new RuntimeException("Method called before the megadag " + " was fully generated"); return mMegaDAG; } /** * Constructs a job that plans and submits the partitioned workflow, * referred to by a Partition. The main job itself is a condor dagman job * that submits the concrete workflow. The concrete workflow is generated by * running the planner in the prescript for the job. * * @param partition the partition corresponding to which the job has to be * constructed. * @param directory the submit directory where the submit files for the * partition should reside. * @param dax the absolute path to the partitioned dax file that * corresponds to this partition. * * @return the constructed DAG job. */ protected Job constructDAGJob( Partition partition , File directory, String dax){ //for time being use the old functions. Job job = new Job(); //the parent directory where the submit file for condor dagman has to //reside. the submit files for the corresponding partition are one level //deeper. String parentDir = directory.getParent(); //set the logical transformation job.setTransformation(CONDOR_DAGMAN_NAMESPACE, CONDOR_DAGMAN_LOGICAL_NAME, null); //set the logical derivation attributes of the job. job.setDerivation(CONDOR_DAGMAN_NAMESPACE, CONDOR_DAGMAN_LOGICAL_NAME, null); //always runs on the submit host job.setSiteHandle("local"); //set the partition id only as the unique id //for the time being. // job.setName(partition.getID()); //set the logical id for the job same as the partition id. job.setLogicalID(partition.getID()); //figure out the relative submit directory where the dagman job should //reside. It should be one level up from the partition directory. String dir = ""; dir += (parentDir.equals(mSubmitDirectory))? //the directory is same as the root dir : //get the relative from root parentDir.substring(mSubmitDirectory.length() ); // job.setSubmitDirectory(dir); //construct the name of the job as a deep lfn with a directory path StringBuffer name = new StringBuffer(); //get the part from the first file separator onwards name.append( (dir.indexOf(File.separatorChar) == 0) ? dir.substring(1) : dir.substring(0)); //append a file separator in the end if dir was some name if( dir.length() > 1) {name.append(File.separatorChar);} //set the basename for the deep lfn name.append(partition.getID()); //System.out.println (" The name is " + name.toString()); job.setName(name.toString()); List entries; TransformationCatalogEntry entry = null; //get the path to condor dagman try{ //try to construct the path from the environment entry = constructTCEntryFromEnvironment( ); //try to construct from the TC if( entry == null ){ entries = mTCHandle.lookup(job.namespace, job.logicalName, job.version, job.getSiteHandle(), TCType.INSTALLED); entry = (entries == null) ? defaultTCEntry( "local") ://construct from site catalog //Gaurang assures that if no record is found then //TC Mechanism returns null (TransformationCatalogEntry) entries.get(0); } } catch(Exception e){ throw new RuntimeException( "ERROR: While accessing the Transformation Catalog",e); } if(entry == null){ //throw appropriate error throw new RuntimeException("ERROR: Entry not found in tc for job " + job.getCompleteTCName() + " on site " + job.getSiteHandle()); } //set the path to the executable and environment string job.executable = entry.getPhysicalTransformation(); //the environment variable are set later automatically from the tc //job.envVariables = entry.envString; //the job itself is the main job of the super node //construct the classad specific information job.jobID = job.getName(); job.jobClass = Job.COMPUTE_JOB; //directory where all the dagman related files for the nested dagman //reside. Same as the directory passed as an input parameter dir = directory.getAbsolutePath(); //make the initial dir point to the submit file dir for the partition //we can do this as we are running this job both on local host, and scheduler //universe. Hence, no issues of shared filesystem or anything. job.condorVariables.construct("initialdir", dir); //construct the argument string, with all the dagman files //being generated in the partition directory. Using basenames as //initialdir has been specified for the job. StringBuffer sb = new StringBuffer(); sb.append(" -f -l . -Debug 3"). append(" -Lockfile ").append( getBasename( partition, ".dag.lock") ). append(" -Dag ").append( getBasename( partition, ".dag")); //specify condor log for condor version less than 7.1.2 if( mCondorVersion < CondorVersion.v_7_1_2 ){ sb.append(" -Condorlog ").append(getBasename( partition, ".log")); } //allow for version mismatch as after 7.1.3 condor does tight //checking on dag.condor.sub file and the condor version used if( mCondorVersion >= CondorVersion.v_7_1_3 ){ sb.append( " -AllowVersionMismatch " ); } //we append the Rescue DAG option only if old version //of Condor is used < 7.1.0. To detect we check for a non //zero value of --rescue option to pegasus-plan //Karan June 27, 2007 mLogger.log( "Number of Resuce retries " + mPOptions.getNumberOfRescueTries() , LogManager.DEBUG_MESSAGE_LEVEL ); if( mCondorVersion >= CondorVersion.v_7_1_0 || mPOptions.getNumberOfRescueTries() > 0 ){ mLogger.log( "Constructing arguments to dagman in 7.1.0 and later style", LogManager.DEBUG_MESSAGE_LEVEL ); sb.append( " -AutoRescue 1 -DoRescueFrom 0 "); } else{ mLogger.log( "Constructing arguments to dagman in pre 7.1.0 style", LogManager.DEBUG_MESSAGE_LEVEL ); sb.append(" -Rescue ").append(getBasename( partition, ".dag.rescue")); } //pass any dagman knobs that were specified in properties file sb.append( this.mDAGManKnobs ); //put in the environment variables that are required job.envVariables.construct("_CONDOR_DAGMAN_LOG", getAbsolutePath( partition, dir,".dag.dagman.out")); job.envVariables.construct("_CONDOR_MAX_DAGMAN_LOG","0"); //set the arguments for the job job.setArguments(sb.toString()); //the environment need to be propogated for exitcode to be picked up job.condorVariables.construct("getenv","TRUE"); job.condorVariables.construct("remove_kill_sig","SIGUSR1"); //the log file for condor dagman for the dagman also needs to be created //it is different from the log file that is shared by jobs of //the partition. That is referred to by Condorlog // keep the log file common for all jobs and dagman albeit without // dag.dagman.log suffix // job.condorVariables.construct("log", getAbsolutePath( partition, dir,".dag.dagman.log")); // String dagName = mMegaDAG.dagInfo.nameOfADag; // String dagIndex= mMegaDAG.dagInfo.index; // job.condorVariables.construct("log", dir + mSeparator + // dagName + "_" + dagIndex + ".log"); //the job needs to be explicitly launched in //scheduler universe instead of local universe job.condorVariables.construct( Condor.UNIVERSE_KEY, Condor.SCHEDULER_UNIVERSE ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 job.addNotifications( entry ); //incorporate profiles from the transformation catalog //and properties for the time being. Not from the site catalog. //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. job.updateProfiles( entry ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. job.updateProfiles(mProps); //constructed the main job. now construct the prescript //the log file resides in the directory where the condor_dagman //job resides i.e the parent directory. StringBuffer log = new StringBuffer(); log.append(parentDir).append(mSeparator).append(partition.getID()). append(".pre.log"); //set the prescript for the job in the dagman namespace setPrescript( job, dax, log.toString()); //construct the braindump file for tailstatd invocations //the dag should be same as the one passed in the arguments string! StringBuffer dag = new StringBuffer(); dag.append(dir).append(mSeparator).append( getBasename( partition, ".dag")); //we do not want the job to be launched via kickstart //Fix for Pegasus bug number 143 //http://bugzilla.globus.org/vds/show_bug.cgi?id=143 job.vdsNS.construct( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[GridStartFactory.NO_GRIDSTART_INDEX] ); return job; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param site the site for which the default entry is required. * * * @return the default entry. */ private TransformationCatalogEntry defaultTCEntry( String site ){ //not implemented as we dont have handle to site catalog in this class return null; } /** * Returns a tranformation catalog entry object constructed from the environment * * An entry is constructed if either of the following environment variables * are defined * 1) CONDOR_HOME * 2) CONDOR_LOCATION * * CONDOR_HOME takes precedence over CONDOR_LOCATION * * * @return the constructed entry else null. */ private TransformationCatalogEntry constructTCEntryFromEnvironment( ){ //construct environment profiles Map m = System.getenv(); ENV env = new ENV(); String key = "CONDOR_HOME"; if( m.containsKey( key ) ){ env.construct( key, m.get( key ) ); } key = "CONDOR_LOCATION"; if( m.containsKey( key ) ){ env.construct( key, m.get( key ) ); } return constructTCEntryFromEnvProfiles( env ); } /** * Returns a tranformation catalog entry object constructed from the environment * * An entry is constructed if either of the following environment variables * are defined * 1) CONDOR_HOME * 2) CONDOR_LOCATION * * CONDOR_HOME takes precedence over CONDOR_LOCATION * * @param env the environment profiles. * * * @return the entry constructed else null if environment variables not defined. */ private TransformationCatalogEntry constructTCEntryFromEnvProfiles( ENV env ) { TransformationCatalogEntry entry = null; //check if either CONDOR_HOME or CONDOR_LOCATION is defined String key = null; if( env.containsKey( "CONDOR_HOME") ){ key = "CONDOR_HOME"; } else if( env.containsKey( "CONDOR_LOCATION") ){ key = "CONDOR_LOCATION"; } if( key == null ){ //environment variables are not defined. return entry; } mLogger.log( "Constructing path to dagman on basis of env variable " + key, LogManager.DEBUG_MESSAGE_LEVEL ); entry = new TransformationCatalogEntry(); entry.setLogicalTransformation( CONDOR_DAGMAN_NAMESPACE, CONDOR_DAGMAN_LOGICAL_NAME, null ); entry.setType( TCType.INSTALLED ); entry.setResourceId( "local" ); //construct path to condor dagman StringBuffer path = new StringBuffer(); path.append( env.get( key ) ).append( File.separator ). append( "bin" ).append( File.separator). append( "condor_dagman" ); entry.setPhysicalTransformation( path.toString() ); return entry; } /** * Writes out the braindump.txt file for a partition in the partition submit * directory. The braindump.txt file is used for passing to the tailstatd * daemon that monitors the state of execution of the workflow. * * @param directory the directory in which the braindump file needs to * be written to. * @param partition the partition for which the braindump is to be written out. * @param dax the dax file * @param dag the dag file * * @return the absolute path to the braindump file.txt written in the directory. * * @throws IOException in case of error while writing out file. */ protected String writeOutBraindump( File directory, Partition partition, String dax, String dag) throws IOException{ //sanity check on the directory sanityCheck( directory ); //create a writer to the braindump.txt in the directory. File f = new File( directory , "braindump.txt"); PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(f))); //store absolute path to dir just once String absPath = directory.getAbsolutePath(); //assemble all the contents in a buffer before writing out StringBuffer contents = new StringBuffer(); contents.append("dax ").append(dax).append("\n"). append("dag ").append(dag).append("\n"). append("run ").append(absPath).append("\n"). append("jsd ").append(absPath).append(mSeparator).append("jobstate.log").append("\n"). append("rundir ").append(directory.getName()).append("\n"). append("bindir ").append(mProps.getBinDir()).append("\n"). append("vogroup pegasus").append("\n").//for time being append("label " + partition.getName()); writer.write( contents.toString()); writer.close(); return f.getAbsolutePath(); } /** * Writes out the properties to a temporary file in the directory passed. * * @param directory the directory in which the properties file needs to * be written to. * * @return the absolute path to the properties file written in the directory. * * @throws IOException in case of error while writing out file. */ protected String writeOutProperties( String directory ) throws IOException{ File dir = new File(directory); //sanity check on the directory sanityCheck( dir ); //we only want to write out the Pegasus properties for time being Properties properties = mProps.matchingSubset( "pegasus", true ); //create a temporary file in directory File f = File.createTempFile( "pegasus.", ".properties", dir ); //the header of the file StringBuffer header = new StringBuffer(64); header.append("PEGASUS USER PROPERTIES AT RUNTIME \n") .append("#ESCAPES IN VALUES ARE INTRODUCED"); //create an output stream to this file and write out the properties OutputStream os = new FileOutputStream(f); properties.store( os, header.toString() ); os.close(); return f.getAbsolutePath(); } /** * Sets the prescript that ends up calling to the default wrapper that * introduces retry into Pegasus for a particular job. * * @param job the job whose prescript needs to be set. * @param daxURL the path to the dax file on the filesystem. * @param log the file where the output of the prescript needs to be * redirected to. * * @see #RETRY_LOGICAL_NAME */ protected void setPrescript(Job job, String daxURL, String log){ setPrescript( job, daxURL, log, this.NAMESPACE, RETRY_LOGICAL_NAME, null); } /** * Sets the prescript that ends up calling to the default wrapper that * introduces retry into Pegasus for a particular job. * * @param job the job whose prescript needs to be set. * @param daxURL the path to the dax file on the filesystem. * @param log the file where the output of the prescript needs to be * redirected to. * @param namespace the namespace of the replanner utility. * @param name the logical name of the replanner. * @param version the version of the replanner to be picked up. * */ protected void setPrescript(Job job, String daxURL, String log, String namespace, String name, String version){ String site = job.getSiteHandle(); TransformationCatalogEntry entry = null; //get the path to script wrapper from the try{ List entries = mTCHandle.lookup(namespace, name, version, site, TCType.INSTALLED); //get the first entry from the list returned entry = ( entries == null ) ? null : //Gaurang assures that if no record is found then //TC Mechanism returns null ((TransformationCatalogEntry) entries.get(0)); } catch(Exception e){ throw new RuntimeException( "ERROR: While accessing the Transformation Catalog",e); } PlannerOptions options = ( mPOptions == null)? null : (PlannerOptions)mPOptions.clone(); if( options == null ){ throw new RuntimeException( "ERROR: Planner Options passed to setPrescript are null" ); } //construct the prescript path StringBuffer script = new StringBuffer(); if(entry == null){ //log to debug mLogger.log("Constructing the default path to the replanner for prescript", LogManager.DEBUG_MESSAGE_LEVEL); //construct the default path to the executable script.append( mProps.getBinDir() ).append( mSeparator ). append( RETRY_LOGICAL_NAME ); } else{ script.append(entry.getPhysicalTransformation()); } //the output of the prescript i.e submit files should be created //in the directory where the job is being run. options.setSubmitDirectory( (String)job.condorVariables.get("initialdir")); //generate the remote working directory for the paritition String submit = options.getSubmitDirectory(); // like /tmp/vahi/pegasus/blackdiamond/run0001/00/PID1 String remoteBase = mPOptions.getRandomDir(); // like vahi/pegasus/blackdiamond/run0001 String remoteWorkDir = submit.substring( submit.indexOf( remoteBase) ); //gets us vahi/pegasus/blackdiamond/run0001/00/PID1 //trying to use the relative dir option now, Karan April 10, 2008 // options.setRandomDir( remoteWorkDir ); // mLogger.log( "Remote working directory set to " + remoteWorkDir + // " for partition " + job.getID() , // LogManager.DEBUG_MESSAGE_LEVEL ); //set the base and relative submit dir options.setBaseSubmitDirectory( mPOptions.getBaseSubmitDirectory() ); options.setRelativeDirectory( remoteWorkDir ); //set the basename for the nested dag as the ID of the job. //which is actually the basename of the deep lfn job name!! options.setBasenamePrefix( getBasenamePrefix(job)); //set the flag designating that the planning invocation is part //of a deferred planning run options.setPartOfDeferredRun( true ); //in case of deferred planning cleanup wont work //explicitly turn it off if the file cleanup scope if fullahead if( mCleanupScope.equals( PegasusProperties.CLEANUP_SCOPE.fullahead ) ){ options.setCleanup( false ); } //we want monitoring to happen options.setMonitoring( true ); //construct the argument string. //add the jvm options and the pegasus options if any StringBuffer arguments = new StringBuffer(); arguments./*append( mPOptions.toJVMOptions())*/ append( " -Dpegasus.log.*=").append(log). //add other jvm options that user may have specified append( options.toJVMOptions() ). append(" --conf ").append( mMDAGPropertiesFile ). //the dax argument is diff for each partition append(" --dax ").append( daxURL ). //put in all the other options. append( options.toOptions()); //set the path and the arguments to prescript job.setPreScript( script.toString(), arguments.toString()); } /** * Returns the base name of the submit directory in which the submit files * for a particular partition reside. * * @param partition the partition for which the base directory is to be * constructed. * * @return the base name of the partition. */ protected String getBaseName( Partition partition ){ String id = partition.getID(); StringBuffer sb = new StringBuffer( id.length() + 1 ); sb.append('P').append(id); return sb.toString(); } /** * Returns the absolute path to a dagman (usually) related file for a * particular partition in the submit directory that is passed as an input * parameter. This does not create the file, just returns an absolute path * to it. Useful for constructing argument string for condor_dagman. * * @param partition the partition for which the dagman is responsible for * execution. * @param directory the directory where the file should reside. * @param suffix the suffix for the file basename. * * @return the absolute path to a file in the submit directory. */ protected String getAbsolutePath( Partition partition, String directory, String suffix){ StringBuffer sb = new StringBuffer(); //add a prefix P to partition id sb.append( directory ).append(mSeparator). append( getBasename( partition, suffix) ); return sb.toString(); } /** * Returns the basename of a dagman (usually) related file for a particular * partition. * * @param partition the partition for which the dagman is responsible for * execution. * @param suffix the suffix for the file basename. * * @return the basename. */ protected String getBasename( Partition partition, String suffix ){ StringBuffer sb = new StringBuffer( 16 ); //add a prefix P sb.append('P').append(partition.getID()).append( suffix ); return sb.toString(); } /** * Returns the basename prefix of a dagman (usually) related file for a * a job that submits nested dagman. * * @param job the job that submits a nested dagman. * * @return the basename. */ protected String getBasenamePrefix( Job job ){ StringBuffer sb = new StringBuffer( 8 ); //add a prefix P sb.append('P').append(job.getLogicalID()); return sb.toString(); } /** * Returns the full path to a cache file that corresponds for one partition. * The cache file resides in the submit directory for the partition for which * the job is responsible for. * * @param job the job running on the submit host that submits the partition. * * @return the full path to the file. */ protected String getCacheFilePath(Job job){ StringBuffer sb = new StringBuffer(); //cache file is being generated in the initialdir set for the job. //intialdir is set correctly to the submit directory for nested dag. sb.append(job.condorVariables.get("initialdir")). append(File.separatorChar).append(getBasenamePrefix(job)). append(".cache"); return sb.toString(); } /** * Returns the number of partitions referred to in the PDAX file. * * @param source the source file that has to be symlinked. * @param destDir the destination directory where the symlink has to be * placed. * * @return the number of partitions in the pdax file. */ protected boolean createSymlink( String source, File destDir ){ boolean result = false; //do some sanity checks on the source and the destination File f = new File( source ); if( !f.exists() || !f.canRead()){ mLogger.log("The source for symlink does not exist " + source, LogManager.ERROR_MESSAGE_LEVEL); return result; } if( !destDir.exists() || !destDir.isDirectory() || !destDir.canWrite()){ mLogger.log("The destination directory cannot be written to " + destDir, LogManager.ERROR_MESSAGE_LEVEL); return result; } try{ //set the callback and run the grep command Runtime r = Runtime.getRuntime(); String command = "ln -s " + source + " " + destDir.getAbsolutePath(); mLogger.log("Creating symlink " + command, LogManager.DEBUG_MESSAGE_LEVEL); Process p = r.exec(command); //spawn off the gobblers with the already initialized default callback StreamGobbler ips = new StreamGobbler(p.getInputStream(), mDefaultCallback); StreamGobbler eps = new StreamGobbler(p.getErrorStream(), mDefaultCallback); ips.start(); eps.start(); //wait for the threads to finish off ips.join(); eps.join(); //get the status int status = p.waitFor(); if( status != 0){ mLogger.log("Command " + command + " exited with status " + status, LogManager.DEBUG_MESSAGE_LEVEL); return result; } result = true; } catch(IOException ioe){ mLogger.log("IOException while creating symbolic links ", ioe, LogManager.ERROR_MESSAGE_LEVEL); } catch( InterruptedException ie){ //ignore } return result; } /** * Returns the number of partitions referred to in the PDAX file. * * @param pdax the path to the pdax file. * * @return the number of partitions in the pdax file. */ protected int getPartitionCount( String pdax ){ int result = 0; File f = new File( pdax ); if( !f.exists() || !f.canRead()){ throw new RuntimeException( "PDAX File is unreadable " + pdax); } try{ //set the callback and run the grep command String word = " max ){ max = num + 1; } } //create the directory name leaf.append( SUBMIT_DIRECTORY_PREFIX ).append( mNumFormatter.format( max ) ); } result.append( leaf.toString() ); base = new File( base, leaf.toString() ); mLogger.log( "Directory to be created is " + base.getAbsolutePath(), LogManager.DEBUG_MESSAGE_LEVEL ); sanityCheck( base ); return result.toString(); } /** * Constructs Any extra arguments that need to be passed to dagman, as determined * from the properties file. * * @param properties the PegasusProperties * * @return any arguments to be added, else empty string */ public static String constructDAGManKnobs( PegasusProperties properties ){ StringBuffer sb = new StringBuffer(); //get all the values for the dagman knows int value; for( int i = 0; i < PDAX2MDAG.DAGMAN_KNOBS.length; i++ ){ value = parseInt( properties.getProperty( PDAX2MDAG.DAGMAN_KNOBS[i][0] ) ); if ( value > 0 ){ //add the option sb.append( PDAX2MDAG.DAGMAN_KNOBS[i][1] ); sb.append( value ); } } return sb.toString(); } /** * Parses a string into an integer. Non valid values returned as -1 * * @param s the String to be parsed as integer * * @return the int value if valid, else -1 */ protected static int parseInt( String s ){ int value = -1; try{ value = Integer.parseInt( s ); } catch( Exception e ){ //ignore } return value; } /** * A small utility method that constructs the name of the Condor files * that are generated when a dag is submitted. The default separator _ is * used. * * @param name the name attribute in the partition element of the pdax. * @param index the partition number of the partition. * @param suffix the suffix that needs to be added to the filename. * * @return the name of the condor file. */ private String getCondorFileName(String name, int index, String suffix){ return getCondorFileName(name,index,suffix,"_"); } /** * A small utility method that constructs the name of the Condor files * that are generated when a dag is submitted. * * @param name the name attribute in the partition element of the pdax. * @param index the partition number of the partition. * @param suffix the suffix that needs to be added to the filename * @param separator the separator that is to be used while constructing * the filename. * * @return the name of the condor file */ private String getCondorFileName(String name, int index, String suffix, String separator){ StringBuffer sb = new StringBuffer(); //all the files reside in the submit file //directory specified by the user. //sb.append(mPOptions.submitFileDir).append(File.separator); sb.append(name).append(separator).append(index).append(suffix); return sb.toString(); } /** * An inner class, that implements the StreamGobblerCallback to count * the occurences of a word in a document. * */ private class GrepCallback implements StreamGobblerCallback{ /** * The word that is to be searched for. */ private String mWord; /** * The length of the word to be searched for. */ private int mWordLength; /** * The number of times the word appears. */ private int mCount; /** * Overloaded Constructor. * * @param word the word to be searched for. */ public GrepCallback( String word ){ mWord = word; mWordLength = (word == null) ? 0 : word.length(); mCount = 0; } /** * Callback whenever a line is read from the stream by the StreamGobbler. * Counts the occurences of the word that are in the line, and * increments to the global counter. * * @param line the line that is read. */ public void work( String line ){ //sanity check to prevent infinite iterations if( mWordLength == 0 ) return; int start = 0; int index; while ( ( index = line.indexOf( mWord, start)) != -1){ mCount++; start = index + mWordLength; } } /** * Returns the number of words counted so far. * * @return the number of words */ public int getCount(){ return mCount; } /** * Resets the internal counters. */ public void reset(){ mCount = 0; } } } /** * A filename filter for identifying the run directory * * @author Karan Vahi vahi@isi.edu */ class RunDirectoryFilenameFilter implements FilenameFilter { /** * Store the regular expressions necessary to parse kickstart output files */ private static final String mRegexExpression = "(" + PDAX2MDAG.SUBMIT_DIRECTORY_PREFIX + ")([0-9][0-9][0-9][0-9])"; /** * Stores compiled patterns at first use, quasi-Singleton. */ private static Pattern mPattern = null; /*** * Tests if a specified file should be included in a file list. * * @param dir the directory in which the file was found. * @param name - the name of the file. * * @return true if and only if the name should be included in the file list * false otherwise. * * */ public boolean accept( File dir, String name) { //compile the pattern only once. if( mPattern == null ){ mPattern = Pattern.compile( mRegexExpression ); } return mPattern.matcher( name ).matches(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/pdax/PDAXCallbackFactory.java0000644000175000017500000001236311757531137030073 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.pdax; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; /** * A factory class to load the appropriate DAX callback implementations that need * to be passed to the DAX Parser. * * @author Karan Vahi * @version $Revision: 2575 $ */ public class PDAXCallbackFactory { /** * Package to prefix "just" class names with. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.parser.pdax"; /** * Loads the implementing class corresponding to the type specified by the user. * The properties object passed should not be null. The callback that is * loaded, is the one referred to in the properties by the user. * * @param directory the base level directory in which the output files are * to be generated. * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * * @return the instance of the class implementing this interface. * * @exception PDAXCallbackFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME * @see org.griphyn.cPlanner.common.PegasusProperties#getPartitionerDAXCallback() */ public static Callback loadInstance(PegasusProperties properties, PlannerOptions options, String directory) throws PDAXCallbackFactoryException{ String mode = options.getMegaDAGMode(); if(mode == null || mode.equalsIgnoreCase("dag")){ //load the default one mode = "PDAX2MDAG"; } else if(mode.equalsIgnoreCase("noop")){ mode = "PDAX2NOOP"; } else if(mode.equalsIgnoreCase("daglite")){ mode = "PDAX2DAGLite"; } //load the class stored in the mode return loadInstance(properties,options,directory,mode); } /** * Loads the implementing class corresponding to the type specified by the user. * The properties object passed should not be null. The callback that is * loaded, is the one referred to by the className parameter passed. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * dag|noop|daglite. * @param options the PlannerOptions object containing the * options passed to gencdag. * @param directory the directory where the pdax file and parititioned daxes * reside. * @param className the name of the implementing class. * * @return the instance of the class implementing this interface. * * @exception PDAXCallbackFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static Callback loadInstance(PegasusProperties properties, PlannerOptions options, String directory, String className) throws PDAXCallbackFactoryException{ Callback callback = null; try{ //sanity check if(properties == null){ throw new RuntimeException("Invalid properties passed"); } if(className == null){ return loadInstance(properties,options,directory); } //prepend the package name className = (className.indexOf('.') == -1)? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className: //load directly className; //try loading the class dynamically DynamicLoader dl = new DynamicLoader(className); Object argList[] = new Object[3]; argList[0] = directory; argList[1] = properties; argList[2] = options; callback = (Callback) dl.instantiate(argList); } catch( Exception e ){ throw new PDAXCallbackFactoryException("Instantiating PDAXCallback ", className, e); } return callback; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/pdax/Callback.java0000644000175000017500000000550311757531137026064 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.pdax; import edu.isi.pegasus.planner.partitioner.Partition; /** * This interfaces defines the callback calls from PDAX parsing. A slim * and memory-efficient parser of PDAX is expected to call these callbacks. * * @author Karan Vahi * @version $Revision: 2576 $ */ public interface Callback { /** * Callback when the opening tag was parsed. This contains all * attributes and their raw values within a map. This callback can * also be used to initialize callback-specific resources. * * @param attributes is a map of attribute key to attribute value */ public void cbDocument(java.util.Map attributes); /** * Callback for the partition . These partitions are completely * assembled, but each is passed separately. * * @param partition is the PDAX-style partition. */ public void cbPartition(Partition partition); /** * Callback for child and parent relationships from section 3. * * @param child is the IDREF of the child element. * @param parents is a list of IDREFs of the included parents. */ public void cbParents(String child, java.util.List parents); /** * Callback when the parsing of the document is done. While this state * could also be determined from the return of the invocation of the * parser, that return may be hidden in another place of the code. * This callback can be used to free callback-specific resources. */ public void cbDone(); /** * Return a object that is constructed during the parsing of the object. * The type of the object that is constructed is determined by the * implementing callback handler. For example, it could be an Adag object * used by Pegasus or a map containing the graph structure of the dax. * The implementing classes should keep a boolean flag that signifies whether * the corresponding object has been created by the implementing class or * not. The variable should be set when the implementing callback handler * deems that it has enough data to construct that object. * * @return the constructed object */ public Object getConstructedObject(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/IVSElement.java0000644000175000017500000000232211757531137025403 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.planner.invocation.Invocation; import java.util.*; /** * This class keeps the name of an element and its corresponding * java object reference. The structure is used by the stack in * InvocationParser. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * * @see InvocationParser */ public class IVSElement { public String m_name; public Invocation m_obj; public IVSElement( String name, Invocation invocation ) { m_name = new String(name); m_obj = invocation; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/SiteCatalogTextParser.java0000644000175000017500000003654311757531137027665 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPServer; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.WorkDir; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.LRC; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.JobManager; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.PoolConfig; import edu.isi.pegasus.planner.parser.tokens.OpenBrace; import edu.isi.pegasus.planner.parser.tokens.SiteCatalogReservedWord; import edu.isi.pegasus.planner.parser.tokens.Token; import edu.isi.pegasus.planner.parser.tokens.QuotedString; import edu.isi.pegasus.planner.parser.tokens.Identifier; import edu.isi.pegasus.planner.parser.tokens.CloseBrace; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import java.io.IOException; import java.io.Reader; import java.util.StringTokenizer; /** * Parses the input stream and generates site configuration map as * output. * * @author Jens Vöckler * @author Gaurang Mehta * @author Karan Vahi * @version $Revision: 2572 $ * * @see org.griphyn.cPlanner.classes.SiteCatalogTextScanner * @see org.griphyn.cPlanner.classes.Token */ public class SiteCatalogTextParser { /** * The access to the lexical scanner is stored here. */ private SiteCatalogTextScanner m_scanner = null; /** * Stores the look-ahead symbol. */ private Token m_lookAhead = null; /** * The handle to the logger used to log messages. */ private LogManager m_logger; /** * Initializes the parser with an input stream to read from. * @param r is the stream opened for reading. * * @throws IOException * @throws ScannerException */ public SiteCatalogTextParser(Reader r) throws IOException, ScannerException { m_logger = LogManagerFactory.loadSingletonInstance(); m_scanner = new SiteCatalogTextScanner(r); m_lookAhead = m_scanner.nextToken(); } /** * Parses the complete input stream, into the PoolConfig data object that * holds the contents of all the sites referred to in the stream. * * @return a map indexed by the site handle strings. * @throws IOException * @throws ScannerException * @throws Exception * @see org.griphyn.cPlanner.classes.PoolConfig */ public PoolConfig parse() throws IOException, ScannerException, Exception { //to check more PoolConfig sites = new PoolConfig(); String handle = null; do { if (m_lookAhead != null) { //get the site handle/id, that is parsed differently //compared to the rest of the attributes of the site. handle = getSiteHandle(); SiteInfo site = new SiteInfo(); site.setInfo(SiteInfo.HANDLE, handle); while (! (m_lookAhead instanceof CloseBrace)) { //populate all the rest of the attributes //associated with the site populate(site); } if (! (m_lookAhead instanceof CloseBrace)) { throw new ScannerException(m_scanner.getLineNumber(), "expecting a closing brace"); } //we have information about one complete site! m_logger.log("Site parsed is - " + site.toMultiLine(), LogManager.DEBUG_MESSAGE_LEVEL); m_lookAhead = m_scanner.nextToken(); // enter the site information. if (sites.contains(handle)) { //Karan October 13, 2005 //NEEDS CLARIFICATION FROM GAURANG //PROBABLY IS A MDS ARTIFACT. ALSO NEEDS //TO BE MOVED TO PoolConfig.add(PoolConfig,boolean) java.util.Date date = new java.util.Date(); sites.add(handle + "-" + date.getTime(), site); } else { sites.add(handle, site); } } } while (m_scanner.hasMoreTokens()); return sites; } /** * Remove potential leading and trainling quotes from a string. * * @param input is a string which may have leading and trailing quotes * @return a string that is either identical to the input, or a * substring thereof. */ public String niceString(String input) { // sanity if (input == null) { return input; } int l = input.length(); if (l < 2) { return input; } // check for leading/trailing quotes if (input.charAt(0) == '"' && input.charAt(l - 1) == '"') { return input.substring(1, l - 1); } else { return input; } } /** * Populates all the attributes except the handle, associated with the site * in the SiteInfo object. * * @param site the SiteInfo object that is to be populated. * @throws even more mystery */ private void populate(SiteInfo site) throws IOException, ScannerException, Exception { if (! (m_lookAhead instanceof SiteCatalogReservedWord)) { throw new ScannerException(m_scanner.getLineNumber(), "expecting a reserved word describing a site attribute instead of "+ m_lookAhead); } int word = ( (SiteCatalogReservedWord) m_lookAhead).getValue(); m_lookAhead = m_scanner.nextToken(); switch (word) { case SiteCatalogReservedWord.UNIVERSE: if (! (m_lookAhead instanceof Identifier)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"universe\" requires an identifier as first argument"); } JobManager jbminfo = new JobManager(); String universe = ( (Identifier) m_lookAhead). getValue(); m_lookAhead = m_scanner.nextToken(); jbminfo.setInfo(JobManager.UNIVERSE, universe); // System.out.println("universe="+universe ); if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"universe\" requires a quoted string as second argument"); } // System.out.println("url="+((QuotedString) m_lookAhead).getValue() ); jbminfo.setInfo(JobManager.URL, niceString( ( (QuotedString) m_lookAhead).getValue())); m_lookAhead = m_scanner.nextToken(); if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"universe\" requires a quoted string for version as third argument"); } jbminfo.setInfo(JobManager.GLOBUS_VERSION, niceString( ( (QuotedString) m_lookAhead).getValue())); m_lookAhead = m_scanner.nextToken(); site.setInfo(SiteInfo.JOBMANAGER, jbminfo); break; case SiteCatalogReservedWord.LRC: if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"lrc\" requires a quoted string argument"); } LRC lrc = new LRC(niceString( ( (QuotedString) m_lookAhead).getValue())); site.setInfo(SiteInfo.LRC, lrc); m_lookAhead = m_scanner.nextToken(); break; case SiteCatalogReservedWord.GRIDLAUNCH: if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"gridlaunch\" requires a quoted string argument"); } site.setInfo(SiteInfo.GRIDLAUNCH, niceString( ( (QuotedString) m_lookAhead).getValue())); m_lookAhead = m_scanner.nextToken(); break; case SiteCatalogReservedWord.WORKDIR: if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"workdir\" requires a quoted string argument"); } WorkDir gdw = new WorkDir(); gdw.setInfo(WorkDir.WORKDIR, niceString( ( (QuotedString) m_lookAhead). getValue())); site.setInfo(SiteInfo.WORKDIR, gdw); //System.out.println("workdir ="+((QuotedString) m_lookAhead).getValue() ); m_lookAhead = m_scanner.nextToken(); break; case SiteCatalogReservedWord.GRIDFTP: if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"gridftp\" requires a quoted string argument for url"); } GridFTPServer gftp = new GridFTPServer(); String gftp_url = new String(niceString( ( ( QuotedString) m_lookAhead).getValue())); StringTokenizer stt = new StringTokenizer(gftp_url, "/"); String gridftpurl = stt.nextToken() + "//" + stt.nextToken(); String storagedir = ""; while (stt.hasMoreTokens()) { storagedir += "/" + stt.nextToken(); } gftp.setInfo(GridFTPServer.GRIDFTP_URL, gridftpurl); gftp.setInfo(GridFTPServer.STORAGE_DIR, storagedir); // System.out.println(" gridftp url="+((QuotedString) m_lookAhead).getValue() ); m_lookAhead = m_scanner.nextToken(); if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"gridftp\" requires a quoted string argument for globus version"); } gftp.setInfo(GridFTPServer.GLOBUS_VERSION, niceString( ( (QuotedString) m_lookAhead). getValue())); // System.out.println("version="+((QuotedString) m_lookAhead).getValue() ); site.setInfo(SiteInfo.GRIDFTP, gftp); m_lookAhead = m_scanner.nextToken(); break; case SiteCatalogReservedWord.PROFILE: if (! (m_lookAhead instanceof Identifier)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"profile\" requires a namespace identifier as first argument"); } String namespace = ( (Identifier) m_lookAhead). getValue(); m_lookAhead = m_scanner.nextToken(); // System.out.println("profile namespace="+namespace ); if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"profile\" requires a quoted string argument"); } String key = ( (QuotedString) m_lookAhead).getValue(); // System.out.println("key="+((QuotedString) m_lookAhead).getValue() ); m_lookAhead = m_scanner.nextToken(); if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"profile\" requires a quoted string argument"); } String value = ( (QuotedString) m_lookAhead).getValue(); // System.out.println("value="+((QuotedString) m_lookAhead).getValue() ); m_lookAhead = m_scanner.nextToken(); Profile profile = new Profile(namespace, niceString(key), niceString(value)); site.setInfo(SiteInfo.PROFILE, profile); break; case SiteCatalogReservedWord.SYSINFO: if (! (m_lookAhead instanceof QuotedString)) { throw new ScannerException(m_scanner.getLineNumber(), "the \"sysinfo\" requires a quoted string argument"); } String sysinfo = ( (QuotedString) m_lookAhead). getValue(); // System.out.println("key="+((QuotedString) m_lookAhead).getValue() ); m_lookAhead = m_scanner.nextToken(); site.setInfo(SiteInfo.SYSINFO, niceString(sysinfo)); break; default: throw new ScannerException(m_scanner.getLineNumber(), "invalid reserved word used to configure a site entry"); } } /** * Returns the site handle for a site, and moves the scanner to hold the next * SiteCatalogReservedWord. * * @return the site handle for a site, usually the name of the site. * * @throws plenty */ private String getSiteHandle() throws IOException, ScannerException { String handle = null; if (! (m_lookAhead instanceof SiteCatalogReservedWord) || ( (SiteCatalogReservedWord) m_lookAhead).getValue() != SiteCatalogReservedWord.SITE) { throw new ScannerException(m_scanner.getLineNumber(), "expecting reserved word \"site\""); } m_lookAhead = m_scanner.nextToken(); // proceed with next token if (! (m_lookAhead instanceof Identifier)) { throw new ScannerException(m_scanner.getLineNumber(), "expecting the pool handle identifier"); } handle = ( (Identifier) m_lookAhead).getValue(); m_lookAhead = m_scanner.nextToken(); // proceed with next token if (! (m_lookAhead instanceof OpenBrace)) { throw new ScannerException(m_scanner.getLineNumber(), "expecting an opening brace"); } m_lookAhead = m_scanner.nextToken(); return handle; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/XMLErrorHandler.java0000644000175000017500000000546711757531137026415 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import org.xml.sax.ErrorHandler; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; /** * This class handles the errors which occur while enforcing validation against * the XML Schema. Same as the VDLErrorHandler. * * @author Karan Vahi * @author Gaurang Mehta * * $Revision: 2575 $ */ public class XMLErrorHandler implements ErrorHandler{ /** * The handle to the logging object that is used to log the messages. */ private LogManager mLogger ; /** * The default constructor. * Initializes the logger object. */ public XMLErrorHandler(){ mLogger = LogManagerFactory.loadSingletonInstance(); } /** * Logs the warning messages in the SAX parser generates while * validating the XML file against an XML Schema. * * @param e the execption that is being caught. */ public void warning (SAXParseException e) throws SAXException{ mLogger.log("**Parsing ** " + " Line: " + e.getLineNumber() + "\n" + "[" + e + "]\n",LogManager.WARNING_MESSAGE_LEVEL); } /** * Logs the error messages which SAX parser generates while * validating the XML file against an XML Schema * * @param e the exception that is being caught. */ public void error(SAXParseException e) throws SAXException { mLogger.log("**Parsing ** " + " Line: " + e.getLineNumber() + "\n" + "[" + e + "]\n",LogManager.ERROR_MESSAGE_LEVEL); } /** * Logs the the fatal messages which SAX parser generates while * validating the XML file against an XML Schema * * @param e the exception that is being caught */ public void fatalError(SAXParseException e) throws SAXException{ mLogger.log("\n** Parsing ** " + " Line: " + e.getLineNumber() + "\n" + "[" + e + "]\n",LogManager.FATAL_MESSAGE_LEVEL); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/TransformationCatalogTextScanner.java0000644000175000017500000002044611757531137032117 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.planner.parser.tokens.OpenBrace; import edu.isi.pegasus.planner.parser.tokens.TransformationCatalogReservedWord; import edu.isi.pegasus.planner.parser.tokens.Token; import edu.isi.pegasus.planner.parser.tokens.QuotedString; import edu.isi.pegasus.planner.parser.tokens.Identifier; import edu.isi.pegasus.planner.parser.tokens.CloseBrace; import java.io.IOException; import java.io.LineNumberReader; import java.io.Reader; /** * Implements the scanner for reserved words and other tokens that are * generated from the input stream for the Transformation Catalog. * * @author Jens Vöckler * @author Karan Vahi */ public class TransformationCatalogTextScanner { /** * Stores the stream from which we are currently scanning. */ private LineNumberReader mInputReader; /** * Captures the look-ahead character. */ private int mLookAhead; /** * Captures the previous token. Required to parse transformation value correctly. */ private Token mPreviousToken; /** * Starts to scan the given stream. * * @param reader the reader stream from which we are reading the site catalog. */ public TransformationCatalogTextScanner( Reader reader ) throws IOException { this.mInputReader = new LineNumberReader( reader ); this.mLookAhead = mInputReader.read(); // skipWhitespace(); } /** * Obtains the current line number in the input stream from the outside. * * @return the current line number. */ public int getLineNumber() { return mInputReader.getLineNumber(); } /** * Skips any white space and comments in the input. This method stops either * at the end of file, or at any non-whitespace input character. */ private void skipWhitespace() throws IOException { // end of file? if (mLookAhead == -1) { return; } // skip over whitespace while (mLookAhead != -1 && Character.isWhitespace((char) mLookAhead)) { mLookAhead = mInputReader.read(); } // skip over comments until eoln if (mLookAhead == '#') { mInputReader.readLine(); mLookAhead = mInputReader.read(); skipWhitespace(); // FIXME: reformulate end-recursion into loop } } /** * Checks for the availability of more input. * * @return true, if there is more to read, false for EOF. */ public boolean hasMoreTokens() throws IOException { skipWhitespace(); return ( this.mLookAhead != -1 ); } /** * Obtains the next token from the input stream. * * @return an instance conforming to the token interface, or null for eof. * * @throws IOException if something went wrong while reading * @throws Exception if a lexical error was encountered. */ public Token nextToken() throws IOException, ScannerException { // sanity check skipWhitespace(); if (mLookAhead == -1) { mPreviousToken = null; return null; } //for identifier after tr we allow for . - : and / \ boolean previousTokenIsTR = false; boolean previousTokenIsSite = false; if( ( mPreviousToken instanceof TransformationCatalogReservedWord && ((TransformationCatalogReservedWord)mPreviousToken).getValue() == TransformationCatalogReservedWord.TRANSFORMATION ) ){ previousTokenIsTR = true; } else if( ( mPreviousToken instanceof TransformationCatalogReservedWord && ((TransformationCatalogReservedWord)mPreviousToken).getValue() == TransformationCatalogReservedWord.SITE ) ){ previousTokenIsSite = true; } // are we parsing a reserved word or identifier if ( Character.isJavaIdentifierStart( (char) mLookAhead) ) { StringBuffer identifier = new StringBuffer(8); identifier.append( (char) mLookAhead ); mLookAhead = mInputReader.read(); if( previousTokenIsTR ){ //allow : - / \ and . for transformation names while ( mLookAhead != -1 && ( Character.isJavaIdentifierPart((char) mLookAhead) || mLookAhead == ':' || mLookAhead == '.' || mLookAhead == '-' || mLookAhead == '/' || mLookAhead == '\\') ) { identifier.append( (char) mLookAhead ); mLookAhead = mInputReader.read(); } } else if( previousTokenIsSite ){ //allow - . @ in site names while ( mLookAhead != -1 && ( Character.isJavaIdentifierPart((char) mLookAhead) || mLookAhead == '-' || mLookAhead == '.' || mLookAhead == '@' ) ){ identifier.append( (char) mLookAhead ); mLookAhead = mInputReader.read(); } } else{ //be more restrictive while parsing while ( mLookAhead != -1 && Character.isJavaIdentifierPart((char) mLookAhead)) { identifier.append( (char) mLookAhead ); mLookAhead = mInputReader.read(); } } // done parsing identifier or reserved word skipWhitespace(); String s = identifier.toString().toLowerCase(); if ( TransformationCatalogReservedWord.symbolTable().containsKey(s) ) { // isa reserved word mPreviousToken = ( TransformationCatalogReservedWord ) TransformationCatalogReservedWord.symbolTable().get(s); } else { // non-reserved identifier mPreviousToken = new Identifier( identifier.toString() ); } } else if ( mLookAhead == '{' ) { mLookAhead = mInputReader.read(); skipWhitespace(); mPreviousToken = new OpenBrace(); } else if ( mLookAhead == '}' ) { mLookAhead = mInputReader.read(); skipWhitespace(); mPreviousToken = new CloseBrace(); } else if ( mLookAhead == '"' ) { // parser quoted string StringBuffer result = new StringBuffer(16); do { mLookAhead = mInputReader.read(); if (mLookAhead == -1 || mLookAhead == '\r' || mLookAhead == '\n') { // eof is an unterminated string throw new ScannerException( mInputReader, "unterminated quoted string" ); } else if ( mLookAhead == '\\' ) { int temp = mInputReader.read(); if (temp == -1) { throw new ScannerException(mInputReader, "unterminated escape in quoted string"); } else { // always add whatever is after the backslash // FIXME: We could to fancy C-string style \012 \n \r escapes here ;-P result.append((char) temp); } } else if ( mLookAhead != '"' ) { result.append( (char) mLookAhead ); } } while ( mLookAhead != '"' ); // skip over final quote mLookAhead = mInputReader.read(); skipWhitespace(); mPreviousToken = new QuotedString(result.toString()); } else { // unknown material throw new ScannerException( mInputReader, "unknown character " + ((char)mLookAhead) ); } return mPreviousToken; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/SiteCatalogParser.java0000644000175000017500000010167611757531137027020 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.site.classes.SiteData; import edu.isi.pegasus.planner.catalog.site.classes.Connection; import edu.isi.pegasus.planner.catalog.site.classes.FileServer; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.HeadNodeFS; import edu.isi.pegasus.planner.catalog.site.classes.HeadNodeScratch; import edu.isi.pegasus.planner.catalog.site.classes.HeadNodeStorage; import edu.isi.pegasus.planner.catalog.site.classes.InternalMountPoint; import edu.isi.pegasus.planner.catalog.site.classes.LocalDirectory; import edu.isi.pegasus.planner.catalog.site.classes.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.site.classes.StorageType; import edu.isi.pegasus.planner.catalog.site.classes.SharedDirectory; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.catalog.site.classes.WorkerNodeFS; import edu.isi.pegasus.planner.catalog.site.classes.WorkerSharedDirectory; import edu.isi.pegasus.planner.catalog.site.classes.WorkerNodeStorage; import edu.isi.pegasus.planner.catalog.site.classes.WorkerNodeScratch; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Stack; import java.util.Set; import java.util.HashSet; import java.util.Iterator; import org.xml.sax.Attributes; import org.xml.sax.SAXException; /** * This class uses the Xerces SAX2 parser to validate and parse an XML * document conforming to the Site Catalog schema v3.0 * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4507 $ */ public class SiteCatalogParser extends StackBasedXMLParser { /** * The "not-so-official" location URL of the Site Catalog Schema. */ public static final String SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/sc-3.0.xsd"; /** * uri namespace */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/sitecatalog"; /** * The final result constructed. */ private SiteStore mResult; /** * The set of sites that need to be parsed. */ private Set mSites; /** * A boolean indicating whether to load all sites. */ private boolean mLoadAll; /** * The default Constructor. * * @param sites the list of sites to be parsed. * means all. * */ /* public SiteCatalogParser( List sites ) { this( PegasusProperties.nonSingletonInstance(), sites ); }*/ /** * The overloaded constructor. * * @param bag the bag of initialization objects. * @param sites the list of sites that need to be parsed. * means all. */ public SiteCatalogParser( PegasusBag bag, List sites ) { super( bag ); mStack = new Stack(); mDepth = 0; mSites = new HashSet(); for( Iterator it = sites.iterator(); it.hasNext(); ){ mSites.add( it.next() ); } mLoadAll = mSites.contains( "*" ); } /** * Returns the constructed site store object * * @return SiteStore if parsing completed */ public SiteStore getSiteStore() { if( mParsingDone ){ return mResult; } else{ throw new RuntimeException( "Parsing of file needs to complete before function can be called" ); } } /** * The main method that starts the parsing. * * @param file the XML file to be parsed. */ public void startParser( String file ) { try { this.testForFile( file ); mParser.parse( file ); //sanity check if ( mDepth != 0 ){ throw new RuntimeException( "Invalid stack depth at end of parsing " + mDepth ); } mLogger.log( "Object constructed is " + mResult.toXML(), LogManager.DEBUG_MESSAGE_LEVEL ); } catch ( IOException ioe ) { mLogger.log( "IO Error :" + ioe.getMessage(), LogManager.ERROR_MESSAGE_LEVEL ); } catch ( SAXException se ) { if ( mLocator != null ) { mLogger.log( "Error in " + mLocator.getSystemId() + " at line " + mLocator.getLineNumber() + "at column " + mLocator.getColumnNumber() + " :" + se.getMessage() , LogManager.ERROR_MESSAGE_LEVEL); } } } /** * Composes the SiteData object corresponding to the element * name in the XML document. * * @param element the element name encountered while parsing. * @param names is a list of attribute names, as strings. * @param values is a list of attribute values, to match the key list. * * @return the relevant SiteData object, else null if unable to construct. * * @exception IllegalArgumentException if the element name is too short. */ public Object createObject(String element, List names, List values) { if ( element == null || element.length() < 1 ){ throw new IllegalArgumentException("illegal element length"); } SiteData object = null; switch ( element.charAt(0) ) { // a alias case 'a': if ( element.equals( "alias" ) ) { String alias = null; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "name" ) ) { alias = value; this.log( element, name, value ); } else { this.complain( element, name, value ); } } return alias; } else{ return null; } //c connection case 'c': if ( element.equals( "connection" ) ) { Connection c = new Connection(); for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "key" ) ) { c.setKey( value ); this.log( element, name, value ); } else { this.complain( element, name, value ); } } return c; } else{ return null; } //f case 'f': if( element.equals( "file-server" ) ){ FileServer fs = new FileServer(); for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "protocol" ) ) { fs.setProtocol( value ); this.log( element, name, value ); } else if ( name.equals( "url" ) ) { fs.setURLPrefix( value ); this.log( element, name, value ); } else if ( name.equals( "mount-point" ) ) { fs.setMountPoint( value ); this.log( element, name, value ); } else { this.complain( element, name, value ); } } return fs; } else{ return null; } //g grid case 'g': if( element.equals( "grid" ) ){ GridGateway gw = new GridGateway(); for ( int i=0; iSiteStore * * @param site the SiteCatalogEntry object. * * @return boolean */ private boolean loadSite(SiteCatalogEntry site) { return ( mLoadAll || mSites.contains( site.getSiteHandle() )); } /** * This method sets the relations between the currently finished XML * element and its containing element in terms of Java objects. * Usually it involves adding the object to the parent's child object * list. * * @param childElement name is the the child element name * @param parent is a reference to the parent's Java object * @param child is the completed child object to connect to the parent * * @return true if the element was added successfully, false, if the * child does not match into the parent. */ public boolean setElementRelation( String childElement, Object parent, Object child ) { switch ( childElement.charAt( 0 ) ) { // a alias case 'a': //alias only appears in replica-catalog if ( child instanceof String && parent instanceof ReplicaCatalog ) { ReplicaCatalog replica = ( ReplicaCatalog )parent; replica.addAlias( (String)child ); return true; } else{ return false; } //c connection case 'c': //connection only appears in replica-catalog if ( child instanceof Connection && parent instanceof ReplicaCatalog ) { ReplicaCatalog replica = ( ReplicaCatalog )parent; Connection c = ( Connection )child; c.setValue( mTextContent.toString().trim() ); replica.addConnection( c ); return true; } else{ return false; } //f case 'f': //file-server appears in local , shared, wshared if ( child instanceof FileServer && parent instanceof LocalDirectory ) { LocalDirectory directory = ( LocalDirectory )parent; directory.addFileServer( (FileServer)child ); return true; } else if ( child instanceof FileServer && parent instanceof SharedDirectory ) { SharedDirectory directory = ( SharedDirectory )parent; directory.addFileServer( (FileServer)child ); return true; } else if ( child instanceof FileServer && parent instanceof WorkerSharedDirectory ) { WorkerSharedDirectory directory = ( WorkerSharedDirectory )parent; directory.addFileServer( (FileServer)child ); return true; } else{ return false; } //g grid case 'g': //grid only appears in the site element if ( child instanceof GridGateway && parent instanceof SiteCatalogEntry ) { SiteCatalogEntry site = ( SiteCatalogEntry )parent; site.addGridGateway( (GridGateway)child ); return true; } else{ return false; } //h head-fs case 'h': //head-fs only appears in the site element if ( child instanceof HeadNodeFS && parent instanceof SiteCatalogEntry ) { SiteCatalogEntry site = ( SiteCatalogEntry )parent; site.setHeadNodeFS( (HeadNodeFS)child ); return true; } else{ return false; } //i internal-mount-point case 'i': //internal-mount-point appears in local , shared, wshared if ( child instanceof InternalMountPoint && parent instanceof LocalDirectory ) { LocalDirectory directory = ( LocalDirectory )parent; directory.setInternalMountPoint( (InternalMountPoint)child ); return true; } else if ( child instanceof InternalMountPoint && parent instanceof SharedDirectory ) { SharedDirectory directory = ( SharedDirectory )parent; directory.setInternalMountPoint( (InternalMountPoint)child ); return true; } else if ( child instanceof InternalMountPoint && parent instanceof WorkerSharedDirectory ) { WorkerSharedDirectory directory = ( WorkerSharedDirectory )parent; directory.setInternalMountPoint( (InternalMountPoint)child ); return true; } else{ return false; } //l local case 'l': //local appears in scratch and storage if ( child instanceof LocalDirectory && parent instanceof StorageType ) { StorageType st = ( StorageType )parent; st.setLocalDirectory( (LocalDirectory)child ); return true; } else{ return false; } //p profile case 'p': //profile appear in file-server site head-fs worker-fs if ( child instanceof Profile ){ Profile p = ( Profile ) child; p.setProfileValue( mTextContent.toString().trim() ); mLogger.log( "Set Profile Value to " + p.getProfileValue(), LogManager.TRACE_MESSAGE_LEVEL ); if ( parent instanceof FileServer ) { FileServer server = ( FileServer )parent; server.addProfile( p ); return true; } else if ( parent instanceof HeadNodeFS ) { HeadNodeFS fs = ( HeadNodeFS )parent; fs.addProfile( p ); return true; } else if ( parent instanceof WorkerNodeFS ) { WorkerNodeFS fs = ( WorkerNodeFS )parent; fs.addProfile( p ); return true; } else if ( parent instanceof SiteCatalogEntry ){ SiteCatalogEntry s = ( SiteCatalogEntry )parent; s.addProfile( p ); return true; } } else{ return false; } //r replica-catalog case 'r': //replica-catalog appear in site if ( child instanceof ReplicaCatalog && parent instanceof SiteCatalogEntry ){ SiteCatalogEntry s = ( SiteCatalogEntry )parent; s.addReplicaCatalog( (ReplicaCatalog)child ); return true; } else{ return false; } //s shared scratch storage site site-catalog case 's': if ( child instanceof SharedDirectory ){ //shared appears in scratch and storage if ( parent instanceof StorageType ) { StorageType st = ( StorageType )parent; st.setSharedDirectory( (SharedDirectory)child ); return true; } } else if ( child instanceof StorageType && childElement.equals( "scratch" ) ){ //scratch appears in HeadNodeFS and WorkerNodeFS StorageType scratch = ( StorageType )child; if ( parent instanceof HeadNodeFS ) { HeadNodeFS fs = ( HeadNodeFS )parent; fs.setScratch( new HeadNodeScratch(scratch) ); return true; } else if ( parent instanceof WorkerNodeFS ) { WorkerNodeFS fs = ( WorkerNodeFS )parent; fs.setScratch( new WorkerNodeScratch(scratch) ); return true; } } else if ( child instanceof StorageType && childElement.equals( "storage" ) ){ //storage appears in HeadNodeFS and WorkerNodeFS StorageType storage = ( StorageType )child; if ( parent instanceof HeadNodeFS ) { HeadNodeFS fs = ( HeadNodeFS )parent; fs.setStorage( new HeadNodeStorage( storage ) ); return true; } else if ( parent instanceof WorkerNodeFS ) { WorkerNodeFS fs = ( WorkerNodeFS )parent; fs.setStorage( new WorkerNodeStorage( storage ) ); return true; } } else if( child instanceof SiteCatalogEntry && parent instanceof SiteStore ){ SiteStore c = ( SiteStore )parent; //add only to store if required. SiteCatalogEntry site = (SiteCatalogEntry)child ; if( loadSite( site ) ){ mLogger.log( "Loading site in SiteStore " + site.getSiteHandle(), LogManager.DEBUG_MESSAGE_LEVEL ); c.addEntry( site ); } return true; } else if ( child instanceof SiteStore && parent == null){ //end of parsing reached mLogger.log( "End of last element reached ", LogManager.DEBUG_MESSAGE_LEVEL ); return true; } else{ return false; } //w worker-fs wshared case 'w': //worker-fs appears in site if ( child instanceof WorkerNodeFS && parent instanceof SiteCatalogEntry ) { SiteCatalogEntry site = ( SiteCatalogEntry )parent; site.setWorkerNodeFS((WorkerNodeFS)child ); return true; } //wshared appears in shared scratch of worker node else if ( child instanceof WorkerSharedDirectory && parent instanceof WorkerNodeScratch ){ WorkerNodeScratch scratch = ( WorkerNodeScratch )parent; scratch.setWorkerSharedDirectory( (WorkerSharedDirectory)child ); } else if ( child instanceof WorkerSharedDirectory && parent instanceof WorkerNodeStorage ){ WorkerNodeStorage storage = ( WorkerNodeStorage )parent; storage.setWorkerSharedDirectory( (WorkerSharedDirectory)child ); } else{ return false; } default: return false; } } /** * Returns the XML schema namespace that a document being parsed conforms * to. * * @return the schema namespace */ public String getSchemaNamespace( ){ return SiteCatalogParser.SCHEMA_NAMESPACE; } /** * Returns the local path to the XML schema against which to validate. * * @return path to the schema */ public String getSchemaLocation() { // treat URI as File, yes, I know - I need the basename File uri = new File( SiteCatalogParser.SCHEMA_LOCATION ); // create a pointer to the default local position File poolconfig = new File( this.mProps.getSchemaDir(), uri.getName() ); return this.mProps.getPoolSchemaLocation( poolconfig.getAbsolutePath() ); } /** * * @param element * @param attribute * @param value */ public void complain(String element, String attribute, String value) { mLogger.log( "For element " + element + " invalid attribute found " + attribute + " -> " + value, LogManager.ERROR_MESSAGE_LEVEL ); } /** * * @param args */ public static void main( String[] args ){ /* LogManagerFactory.loadSingletonInstance().setLevel( 5 ); List s = new ArrayList(1); s.add( "*" ); SiteCatalogParser parser = new SiteCatalogParser( s ); if (args.length == 1) { parser.startParser( args[0] ); } else { System.out.println("Usage: SiteCatalogParser "); } */ } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/DAXParserFactory.java0000644000175000017500000003361111757531137026556 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.planner.parser.dax.*; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.CondorVersion; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.classes.PegasusBag; import java.io.File; import java.util.HashMap; import java.util.Map; /** * A factory class to load the appropriate DAX Parser and Callback implementations that need * to be passed to the DAX Parser. * * @author Karan Vahi * @version $Revision: 3820 $ */ public class DAXParserFactory { /** * The default callback for label partitioning. */ public static String LABEL_CALLBACK_CLASS = "DAX2LabelGraph"; /** * Package to prefix "just" class names with. */ public static final String DEFAULT_PARSER_PACKAGE_NAME = "edu.isi.pegasus.planner.parser.dax"; /** * Package to prefix "just" class names with. */ public static final String DEFAULT_CALLBACK_PACKAGE_NAME = "edu.isi.pegasus.planner.parser.dax"; /* * Predefined Constant for DAX version 3.2.0 */ public static final long DAX_VERSION_3_2_0 = CondorVersion.numericValue( "3.2.0" ); /** * The default DAXParser classname */ public static final String DEFAULT_DAX_PARSER_CLASS = "DAXParser3"; /** * The DAXParser3 classname */ public static final String DAX_PARSER2_CLASS = "DAXParser2"; /** * The DAXParser3 classname */ public static final String DAX_PARSER3_CLASS = "DAXParser3"; /** * Loads the appropriate DAXParser looking at the dax schema that is specified by * the user. * * @param bag bag of Pegasus intialization objects * @param callbackClass the dax callback class * @param dax file the dax file * * @return the DAXParser loaded. * * @exception DAXParserFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_CALLBACK_PACKAGE_NAME */ public static DAXParser loadDAXParser( PegasusBag bag, String callbackClass, String daxFile ) throws DAXParserFactoryException{ PegasusProperties properties = bag.getPegasusProperties(); //sanity check if( properties == null){ throw new RuntimeException("Invalid properties passed"); } //load the callback Callback c = DAXParserFactory.loadDAXParserCallback( properties, daxFile, callbackClass ); return DAXParserFactory.loadDAXParser( bag, c, daxFile ); } /** * Loads the appropriate DAXParser looking at the dax schema that is specified * in the DAX file. * * @param bag bag of Pegasus intialization objects * @param c the dax callback. * @param daxFile the dax file to parser * * @return the DAXParser loaded. * * @exception DAXParserFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_CALLBACK_PACKAGE_NAME */ public static DAXParser loadDAXParser( PegasusBag bag, Callback c , String daxFile ) throws DAXParserFactoryException{ String daxClass = DAXParserFactory.DEFAULT_DAX_PARSER_CLASS; LogManager logger = bag.getLogger(); PegasusProperties properties = bag.getPegasusProperties(); String daxSchema = properties.getDAXSchemaLocation(); //sanity check if( properties == null){ throw new RuntimeException("Invalid properties passed"); } if( logger == null){ throw new RuntimeException("Invalid logger passed"); } try{ //try to figure out the schema version by parsing the dax file String schemaVersion = null; if( daxFile != null && !daxFile.isEmpty() ){ Map m = getDAXMetadata( bag, daxFile ); if( m.containsKey( "version" ) && (schemaVersion = (String)m.get( "version" )) != null ){ logger.log( "DAX Version as determined from DAX file " + schemaVersion, LogManager.DEBUG_MESSAGE_LEVEL ); //append .0 to the version number //to be able to convert to numberic value schemaVersion = schemaVersion + ".0"; } } //try to figure out the schema from the schema in properties //in case unable to determine from the dax file if( schemaVersion == null && daxSchema != null ){ //try to determin the version of dax schema daxSchema = new File( daxSchema ).getName(); if( daxSchema.startsWith( "dax-" ) && daxSchema.endsWith( ".xsd" ) ){ schemaVersion = daxSchema.substring( daxSchema.indexOf( "dax-" ) + 4, daxSchema.lastIndexOf(".xsd") ); logger.log( "DAX Version as determined from schema property " + schemaVersion, LogManager.DEBUG_MESSAGE_LEVEL ); //append .0 to the version number //to be able to convert to numberic value schemaVersion = schemaVersion + ".0"; } } if( schemaVersion != null ){ if( CondorVersion.numericValue(schemaVersion) < DAXParserFactory.DAX_VERSION_3_2_0 ){ daxClass = DAXParserFactory.DAX_PARSER2_CLASS; } else{ daxClass = DAXParserFactory.DAX_PARSER3_CLASS; } } } catch( Exception e ){ logger.log( "Problem while determining the version of dax" , e, LogManager.ERROR_MESSAGE_LEVEL ); } logger.log( "DAX Parser Class to be loaded is " + daxClass, LogManager.CONFIG_MESSAGE_LEVEL ); return loadDAXParser( daxClass, bag, c ); } /** * Loads the appropriate DAXParser looking at the dax schema that is specified by * the user. * * @param classname the classname of the parser class that needs to be loaded * @param bag bag of Pegasus intialization objects * @param c the DAX Callback to use * * @return the DAXParser loaded. * * @exception DAXParserFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_CALLBACK_PACKAGE_NAME */ public static final DAXParser loadDAXParser( String classname, PegasusBag bag, Callback c ){ DAXParser daxParser = null; try{ //load the DAX Parser class //prepend the package name String daxClass = ( classname.indexOf('.') == -1)? //pick up from the default package DEFAULT_PARSER_PACKAGE_NAME + "." + classname: //load directly classname; DynamicLoader dl = new DynamicLoader( daxClass ); Object argList[] = new Object[1]; argList[0] = bag; daxParser = (DAXParser)dl.instantiate(argList); //set the callback for the DAX Parser ((DAXParser)daxParser).setDAXCallback( c ); } catch(Exception e){ throw new DAXParserFactoryException( "Instantiating DAXParser ", classname, e); } return daxParser; } /** * Loads the implementing class corresponding to the type specified by the user. * The properties object passed should not be null. The callback that is * loaded, is the one referred to in the properties by the user, unless the * type of partitioning is label. In that case DAX2LabelGraph is loaded always. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param type the type of partitioning the user specified. * @param dax the path to the DAX file that has to be parsed. * * @return the instance of the class implementing this interface. * * @exception DAXParserFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_CALLBACK_PACKAGE_NAME * @see edu.isi.pegasus.planner.common.PegasusProperties#getPartitionerDAXCallback() */ public static Callback loadDAXParserCallback( String type, PegasusProperties properties, String dax ) throws DAXParserFactoryException{ String callbackClass = null; //for type label always load DAX2LabelGraph if ( type.equalsIgnoreCase("label") ){ callbackClass = LABEL_CALLBACK_CLASS; //graph with labels populated }else{ //pick up the value passed in properties callbackClass = properties.getPartitionerDAXCallback(); } return loadDAXParserCallback( properties, dax, callbackClass ); } /** * Returns the metadata stored in the root adag element in the DAX * * @param bag the bag of initialization objects * @param dax the dax file. * * @return Map containing the metadata, else an empty map */ public static Map getDAXMetadata( PegasusBag bag, String dax ){ Callback cb = DAXParserFactory.loadDAXParserCallback( bag.getPegasusProperties(), dax, "DAX2Metadata" ); LogManager logger = bag.getLogger(); if( logger != null ){ logger.log( "Retrieving Metadata from the DAX file " + dax , LogManager.DEBUG_MESSAGE_LEVEL ); } try{ Parser p = (Parser)DAXParserFactory.loadDAXParser( DAXParserFactory.DAX_PARSER2_CLASS, bag, cb ); //while determining the metadata we are just parsing adag element //we want the parser validation to be turned off. p.setParserFeature("http://xml.org/sax/features/validation", false); p.setParserFeature("http://apache.org/xml/features/validation/schema", false); p.startParser( dax ); } catch( RuntimeException e ){ //check explicity for file not found exception if( e.getCause() != null && e.getCause() instanceof java.io.IOException){ //rethrow throw e; } } Map result = ( Map ) cb.getConstructedObject(); return ( result == null ) ? new HashMap() : result; } /** * Loads the implementing class corresponding to the type specified by the user. * The properties object passed should not be null. The callback that is * loaded, is the one referred to by the className parameter passed. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param dax the path to the DAX file that has to be parsed. * @param className the name of the implementing class. * * @return the instance of the class implementing this interface. * * @exception DAXParserFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_CALLBACK_PACKAGE_NAME */ public static Callback loadDAXParserCallback( PegasusProperties properties, String dax, String className) throws DAXParserFactoryException{ //try loading the class dynamically Callback callback = null; try{ //sanity check if(properties == null){ throw new RuntimeException("Invalid properties passed"); } if(className == null){ throw new RuntimeException("Invalid class specified to load"); } //prepend the package name className = (className.indexOf('.') == -1)? //pick up from the default package DEFAULT_CALLBACK_PACKAGE_NAME + "." + className: //load directly className; DynamicLoader dl = new DynamicLoader( className); Object argList[] = new Object[2]; argList[0] = properties; argList[1] = dax; callback = (Callback)dl.instantiate(argList); } catch(Exception e){ throw new DAXParserFactoryException("Instantiating DAXCallback ", className, e); } return callback; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/0000755000175000017500000000000011757531667024101 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/QuotedString.java0000644000175000017500000000257011757531137027370 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.tokens; /** * Class to capture the content within a quoted string. * * @author Jens Voeckler * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2151 $ */ public class QuotedString implements Token{ /** * This instance variable captures the token value for the quoted string. */ private String m_value; /** * Initializes an instance of a quoted string. * @param tokenValue is the string content to remember. */ public QuotedString(String tokenValue) { m_value = tokenValue; } /** * Obtains the token value of a given string token. * @return the token value. */ public String getValue() { return this.m_value; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/CloseBrace.java0000644000175000017500000000167311757531137026745 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.tokens; /** * Class to convey a closed brace, no token value necessary. * * @author Jens Voeckler * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2151 $ */ public class CloseBrace implements Token{ public CloseBrace(){ // empty } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/Identifier.java0000644000175000017500000000304011757531137027013 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.tokens; /** * Class to capture reserved words. * * @author Jens Voeckler * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2151 $ */ public class Identifier implements Token{ /** * This instance variable captures the token value for the reserved word. */ private String m_value; /** * Initializes an instance of an identifier. * * @param tokenValue is the identifier to remember. */ public Identifier( String tokenValue ){ m_value = tokenValue; } /** * Obtains the token value of a given reserved word token. * @return the token value. */ public String getValue(){ return this.m_value; } /** * Returns the textual description of the object. * * @return the description as String. */ public String toString(){ return getValue(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/Token.java0000644000175000017500000000160611757531137026017 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.tokens; /** * Base class for the tokens passed from the Text Scanner to the parser. * * @author Jens Voeckler * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2151 $ */ public interface Token { // empty } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/CloseParanthesis.java0000644000175000017500000000174311757531137030210 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.tokens; /** * Class to convey a closed paranthesis, no token value necessary. * * @author Jens Voeckler * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2151 $ */ public class CloseParanthesis implements Token{ /** * Default Constructor */ public CloseParanthesis() { } } ././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/TransformationCatalogReservedWord.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/TransformationCatalogReservedWord.j0000644000175000017500000001035111757531137033101 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.tokens; /** * Class to capture reserved words for the textual format of Transformation * Catalog * * @version $Revision: 2175 $ * @author Jens Vöckler * @author Karan Vahi */ public class TransformationCatalogReservedWord implements Token { /** * token value for the reserved word "tr". */ public static final int TRANSFORMATION = 0; /** * token value for the reserved word "site". */ public static final int SITE = 1; /** * token value for the reserved word "profile". */ public static final int PROFILE = 2; /** * token value for the reserved word "pfn". */ public static final int PFN = 3; /** * token value for the reserved word "arch". */ public static final int ARCH = 4; /** * token value for the reserved word "os". */ public static final int OS = 5; /** * token value for the reserved word "osrelease". */ public static final int OSRELEASE = 6; /** * token value for the reserver word "osversion". */ public static final int OSVERSION = 7; /** * token value for the reserver word "osversion". */ public static final int TYPE = 8; /** * Singleton implementation of a symbol table for reserved words. */ private static java.util.Map mSymbolTable = null; /** * Singleton access to the symbol table as a whole. * @return Map */ public static java.util.Map symbolTable() { if (mSymbolTable == null) { // only initialize once and only once, as needed. mSymbolTable = new java.util.TreeMap(); mSymbolTable.put( "tr", new TransformationCatalogReservedWord(TransformationCatalogReservedWord.TRANSFORMATION)); mSymbolTable.put( "site", new TransformationCatalogReservedWord(TransformationCatalogReservedWord.SITE )); mSymbolTable.put( "profile", new TransformationCatalogReservedWord(TransformationCatalogReservedWord.PROFILE )); mSymbolTable.put( "pfn", new TransformationCatalogReservedWord(TransformationCatalogReservedWord.PFN )); mSymbolTable.put( "arch", new TransformationCatalogReservedWord(TransformationCatalogReservedWord.ARCH)); mSymbolTable.put( "os", new TransformationCatalogReservedWord(TransformationCatalogReservedWord.OS)); mSymbolTable.put( "osrelease", new TransformationCatalogReservedWord(TransformationCatalogReservedWord.OSRELEASE )); mSymbolTable.put( "osversion", new TransformationCatalogReservedWord(TransformationCatalogReservedWord.OSVERSION )); mSymbolTable.put( "type", new TransformationCatalogReservedWord(TransformationCatalogReservedWord.TYPE)); } return mSymbolTable; } /** * This instance variable captures the token value for the reserved word. */ private int mValue; /** * Initializes an instance of a reserved word token. The constructor * is unreachable from the outside. Use symbol table lookups to obtain * reserved word tokens. * * @param tokenValue is the token value to memorize. * @see #symbolTable() */ protected TransformationCatalogReservedWord(int tokenValue) { mValue = tokenValue; } /** * Obtains the token value of a given reserved word token. * @return the token value. */ public int getValue() { return this.mValue; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/OpenParanthesis.java0000644000175000017500000000167111757531137030044 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.tokens; /** * Class to convey a closed paranthesis, no token value necessary. * * * @author Jens Voeckler * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2151 $ */ public class OpenParanthesis implements Token{ public OpenParanthesis() { } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/SiteCatalogReservedWord.java0000644000175000017500000000734111757531137031474 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.tokens; /** * Class to capture reserved words. * $Revision: 2151 $ * @author Jens Vöckler * @author Gaurang Mehta */ public class SiteCatalogReservedWord implements Token { /** * token value for the reserved word "site". */ public static final int SITE = 0; /** * token value for the reserved word "version". */ public static final int VERSION = 1; /** * token value for the reserved word "lrc". */ public static final int LRC = 2; /** * token value for the reserved word "universe". */ public static final int UNIVERSE = 3; /** * token value for the reserved word "gridlaunch". */ public static final int GRIDLAUNCH = 4; /** * token value for the reserved word "workdir". */ public static final int WORKDIR = 5; /** * token value for the reserved word "gridftp". */ public static final int GRIDFTP = 6; /** * token value for the reserver word "profile". */ public static final int PROFILE = 7; /** * token value for the reserved work sysinfo. */ public static final int SYSINFO = 8; /** * Singleton implementation of a symbol table for reserved words. */ private static java.util.Map m_symbolTable = null; /** * Singleton access to the symbol table as a whole. * @return Map */ public static java.util.Map symbolTable() { if ( m_symbolTable == null ) { // only initialize once and only once, as needed. m_symbolTable = new java.util.TreeMap(); m_symbolTable.put( "site", new SiteCatalogReservedWord(SiteCatalogReservedWord.SITE) ); m_symbolTable.put( "version", new SiteCatalogReservedWord(SiteCatalogReservedWord.VERSION) ); m_symbolTable.put( "lrc", new SiteCatalogReservedWord(SiteCatalogReservedWord.LRC) ); m_symbolTable.put( "universe", new SiteCatalogReservedWord(SiteCatalogReservedWord.UNIVERSE) ); m_symbolTable.put( "gridlaunch", new SiteCatalogReservedWord(SiteCatalogReservedWord.GRIDLAUNCH) ); m_symbolTable.put( "workdir", new SiteCatalogReservedWord(SiteCatalogReservedWord.WORKDIR) ); m_symbolTable.put( "gridftp", new SiteCatalogReservedWord(SiteCatalogReservedWord.GRIDFTP) ); m_symbolTable.put( "profile", new SiteCatalogReservedWord(SiteCatalogReservedWord.PROFILE) ); m_symbolTable.put("sysinfo", new SiteCatalogReservedWord(SiteCatalogReservedWord.SYSINFO)); } return m_symbolTable; } /** * This instance variable captures the token value for the reserved word. */ private int m_value; /** * Initializes an instance of a reserved word token. The constructor * is unreachable from the outside. Use symbol table lookups to obtain * reserved word tokens. * @param tokenValue is the token value to memorize. * @see #symbolTable() */ protected SiteCatalogReservedWord( int tokenValue ) { m_value = tokenValue; } /** * Obtains the token value of a given reserved word token. * @return the token value. */ public int getValue() { return this.m_value; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/tokens/OpenBrace.java0000644000175000017500000000167411757531137026602 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.tokens; /** * Class to convey a opened brace, no token value necessary. * * @author Jens Voeckler * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2151 $ */ public class OpenBrace implements Token { public OpenBrace() { // empty } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/InvocationParser.java0000644000175000017500000013702111757531137026723 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.parser; import org.griphyn.vdl.parser.*; import edu.isi.pegasus.planner.invocation.CPU; import edu.isi.pegasus.planner.invocation.HasText; import edu.isi.pegasus.planner.invocation.JobStatus; import edu.isi.pegasus.planner.invocation.StatCall; import edu.isi.pegasus.planner.invocation.Architecture; import edu.isi.pegasus.planner.invocation.Machine; import edu.isi.pegasus.planner.invocation.StatInfo; import edu.isi.pegasus.planner.invocation.JobStatusSignal; import edu.isi.pegasus.planner.invocation.Regular; import edu.isi.pegasus.planner.invocation.ArgEntry; import edu.isi.pegasus.planner.invocation.ArgVector; import edu.isi.pegasus.planner.invocation.Proc; import edu.isi.pegasus.planner.invocation.Fifo; import edu.isi.pegasus.planner.invocation.Temporary; import edu.isi.pegasus.planner.invocation.EnvEntry; import edu.isi.pegasus.planner.invocation.MachineSpecific; import edu.isi.pegasus.planner.invocation.JobStatusSuspend; import edu.isi.pegasus.planner.invocation.Descriptor; import edu.isi.pegasus.planner.invocation.Task; import edu.isi.pegasus.planner.invocation.Load; import edu.isi.pegasus.planner.invocation.Environment; import edu.isi.pegasus.planner.invocation.Usage; import edu.isi.pegasus.planner.invocation.Boot; import edu.isi.pegasus.planner.invocation.MachineInfo; import edu.isi.pegasus.planner.invocation.Invocation; import edu.isi.pegasus.planner.invocation.InvocationRecord; import edu.isi.pegasus.planner.invocation.RAM; import edu.isi.pegasus.planner.invocation.WorkingDir; import edu.isi.pegasus.planner.invocation.Arguments; import edu.isi.pegasus.planner.invocation.JobStatusRegular; import edu.isi.pegasus.planner.invocation.ArgString; import edu.isi.pegasus.planner.invocation.Data; import edu.isi.pegasus.planner.invocation.Stamp; import edu.isi.pegasus.planner.invocation.Uname; import edu.isi.pegasus.planner.invocation.Swap; import edu.isi.pegasus.planner.invocation.Job; import edu.isi.pegasus.planner.invocation.Status; import edu.isi.pegasus.planner.invocation.Ignore; import edu.isi.pegasus.planner.invocation.JobStatusFailure; import org.griphyn.vdl.util.Logging; // Xerces import org.xml.sax.*; import org.xml.sax.helpers.DefaultHandler; import java.io.*; import java.util.*; import java.text.*; import java.net.InetAddress; import java.net.UnknownHostException; /** * This class uses the Xerces SAX2 parser to validate and parse an XML * document which contains information from kickstart generated * invocation record. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 3732 $ * */ public class InvocationParser extends DefaultHandler { /** * Default parser is the Xerces parser. */ protected static final String vendorParserClass = "org.apache.xerces.parsers.SAXParser"; /** * Holds the instance of a {@link org.xml.sax.XMLReader} class. */ private XMLReader m_parser; /** * Holds the result, will be overwritten by each invocation of parse(). */ private InvocationRecord m_result; /** * Keep the location within the document */ private Locator m_location; /** * A Hashmap to forward resolve namespaces that were encountered * during parsing. */ private Map m_forward; /** * A Hashmap to reverse resolve namespaces that were encountered * during parsing. */ private Map m_reverse; /** * Parsing for ISO dates without milliseconds */ private SimpleDateFormat m_coarse; /** * Parsing for ISO dates with millisecond extension. */ private SimpleDateFormat m_fine; /** * Obtain our logger once for multiple uses. */ private Logging m_log; /** * Count the depths of elements in the document */ private int m_depth = 0; /** * A stack of namespaces? */ private Stack m_stack; /** * Sets a feature while capturing failed features right here. * * @param uri is the feature's URI to modify * @param flag is the new value to set. * @return true, if the feature could be set, false for an exception */ private boolean set( String uri, boolean flag ) { boolean result = false; try { this.m_parser.setFeature( uri, flag ); result = true; } catch ( SAXException se ) { Logging.instance().log( "default", 0, "Could not set parser feature " + se.getMessage() ); } return result; } /** * The class constructor. This function initializes the Xerces parser * and the features that enable schema validation. * * @param schemaLocation is the default location of the XML Schema * which this parser is capable of parsing. It may be null to use * the defaults provided in the document. */ public InvocationParser( String schemaLocation ) { this.m_forward = new HashMap(); this.m_reverse = new HashMap(); this.m_coarse = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ssZ" ); this.m_fine = new SimpleDateFormat( "yyyy-MM-dd'T'HH:mm:ss.SSSZ" ); this.m_log = Logging.instance(); try { m_parser = (XMLReader) Class.forName(vendorParserClass).newInstance(); m_parser.setContentHandler(this); // m_parser.setErrorHandler(this); m_parser.setErrorHandler( new VDLErrorHandler() ); set( "http://xml.org/sax/features/validation", true ); set( "http://apache.org/xml/features/validation/dynamic", true ); set( "http://apache.org/xml/features/validation/schema", true ); // time+memory consuming, see http://xml.apache.org/xerces2-j/features.html // set( "http://apache.org/xml/features/validation/schema-full-checking", true ); // Send XML Schema element default values via characters(). set( "http://apache.org/xml/features/validation/schema/element-default", true ); set( "http://apache.org/xml/features/validation/warn-on-duplicate-attdef", true ); // mysteriously, this one fails with recent Xerces // set( "http://apache.org/xml/features/validation/warn-on-undeclared-elemdef", true ); set( "http://apache.org/xml/features/warn-on-duplicate-entitydef", true ); set( "http://apache.org/xml/features/honour-all-schemaLocations", true ); // set the schema default location. if ( schemaLocation != null ) { setSchemaLocations( InvocationRecord.SCHEMA_NAMESPACE + ' ' + schemaLocation ); m_log.log("app", 2, "will use " + schemaLocation ); } else { m_log.log("app", 2, "will use document schema hint" ); } } catch (ClassNotFoundException e) { m_log.log( "defaut", 0, "The SAXParser class was not found: " + e); } catch (InstantiationException e) { m_log.log( "default", 0, "The SAXParser class could not be instantiated: " + e); } catch (IllegalAccessException e) { m_log.log( "default", 0, "The SAXParser class could not be accessed: " + e); } } /** * Sets the list of external real locations where the XML schema may * be found. Since this list can be determined at run-time through * properties etc., we expect this function to be called between * instantiating the parser, and using the parser * * @param list is a list of strings representing schema locations. The * content exists in pairs, one of the namespace URI, one of the * location URL. */ public void setSchemaLocations( String list ) { // schema location handling try { m_parser.setProperty( "http://apache.org/xml/properties/schema/external-schemaLocation", list ); } catch ( SAXException se ) { m_log.log( "default", 0, "The SAXParser reported an error: " + se ); } } /** * This function parses a XML source from an InputStream source, and * creates java class instances that correspond to different elements * in the XML source. * * @param reader is a bytestream opened for reading. * @return the records with the invocation information, or null on failure. */ public InvocationRecord parse( java.io.InputStream reader ) { try { // will change m_result m_parser.parse( new InputSource(reader) ); return m_result; } catch (SAXException e) { // e.printStackTrace( System.err ); m_log.log( "default", 0, "SAX Error: " + e.getMessage() ); } catch (IOException e) { m_log.log( "default", 0, "IO Error: " + e.getMessage() ); } return null; } /** * This function parses a XML source from the new Reader source, and * creates java class instances that correspond to different elements * in the XML source. * * @param reader is a character stream opened for reading. * @return the records with the invocation information, or null on failure. */ public InvocationRecord parse( java.io.Reader reader ) { try { // will change m_result m_parser.parse( new InputSource(reader) ); return m_result; } catch (SAXException e) { // e.printStackTrace( System.err ); m_log.log( "default", 0, "SAX Error: " + e.getMessage() ); } catch (IOException e) { m_log.log( "default", 0, "IO Error: " + e.getMessage() ); } return null; } // // here starts the implementation to the Interface // /** * Obtains the document locator from the parser. The document location * can be used to print debug information, i.e the current location * (line, column) in the document. * * @param locator is the externally set current position */ public void setDocumentLocator( Locator locator ) { this.m_location = locator; } private String full_where() { return ( "line " + m_location.getLineNumber() + ", col " + m_location.getColumnNumber() ); } private String where() { return ( m_location.getLineNumber() + ":" + m_location.getColumnNumber() ); } /** * This method specifies what to do when the parser is at the beginning * of the document. In this case, we simply print a message for debugging. */ public void startDocument() { this.m_depth = 0; this.m_stack = new Stack(); this.m_log.log( "parser", 1, "*** start of document ***" ); } /** * The parser comes to the end of the document. */ public void endDocument() { this.m_log.log( "parser", 1, "*** end of document ***" ); } /** * There is a prefix or namespace defined, put the prefix and its URI * in the HashMap. We can get the URI when the prefix is used here after. * * @param prefix the Namespace prefix being declared. * @param uri the Namespace URI the prefix is mapped to. */ public void startPrefixMapping( java.lang.String prefix, java.lang.String uri ) throws SAXException { String p = prefix == null ? null : new String(prefix); String u = uri == null ? null : new String(uri); m_log.log( "parser", 2, "adding \"" + p + "\" <=> " + u ); if ( ! this.m_forward.containsKey(p) ) this.m_forward.put(p, new Stack()); ((Stack) this.m_forward.get(p)).push(u); if ( ! this.m_reverse.containsKey(u) ) this.m_reverse.put(u, new Stack()); ((Stack) this.m_reverse.get(u)).push(p); } /** * Out of the reach of the prefix, remove it from the HashMap. * * @param prefix is the prefix that was being mapped previously. */ public void endPrefixMapping( java.lang.String prefix ) throws SAXException { String u = (String) ((Stack) this.m_forward.get(prefix)).pop(); String p = (String) ((Stack) this.m_reverse.get(u)).pop(); m_log.log( "parser", 2, "removed \"" + p + "\" <=> " + u ); } /** * Helper function to map prefixes correctly onto the elements. * * @param uri is the parser-returned URI that needs translation. * @return the correct prefix for the URI */ private String map( String uri ) { if ( uri == null || uri.length() == 0 ) return ""; Stack stack = (Stack) this.m_reverse.get(uri); String result = stack == null ? null : (String) stack.peek(); if ( result == null || result.length() == 0 ) return ""; else return result + ':'; } /** * This method defines the action to take when the parser begins to parse * an element. * * @param namespaceURI is the URI of the namespace for the element * @param localName is the element name without namespace * @param qName is the element name as it appears in the docment * @param atts has the names and values of all the attributes */ public void startElement( java.lang.String namespaceURI, java.lang.String localName, java.lang.String qName, Attributes atts ) throws SAXException { m_log.log( "parser", 3, "<" + map(namespaceURI) + localName + "> at " + where() ); // yup, one more element level m_depth++; java.util.List names = new java.util.ArrayList(); java.util.List values = new java.util.ArrayList(); for ( int i=0; i < atts.getLength(); ++i ) { String name = new String( atts.getLocalName(i) ); String value = new String( atts.getValue(i) ); m_log.log( "parser", 2, "attribute " + map(atts.getURI(i)) + name + "=\"" + value + "\"" ); names.add(name); values.add(value); } //System.out.println( "QNAME " + qName + " NAME " + names + "\t Values" + values ); Invocation parent = null; if ( ! m_stack.empty() ) { IVSElement peek = (IVSElement) m_stack.peek(); parent = (Invocation)peek.m_obj; } Invocation object = createObject( parent, qName, names, values ); if ( object != null ) m_stack.push( new IVSElement( qName, object ) ); else throw new SAXException( "empty element while parsing" ); } /** * The parser is at the end of an element. Each successfully and * completely parsed Definition will trigger a callback to the * registered DefinitionHandler. * * @param namespaceURI is the URI of the namespace for the element * @param localName is the element name without namespace * @param qName is the element name as it appears in the docment */ public void endElement( java.lang.String namespaceURI, java.lang.String localName, java.lang.String qName ) throws SAXException { // that's it for this level m_depth--; m_log.log( "parser", 3, " at " + where() ); IVSElement tos = (IVSElement) m_stack.pop(); if ( ! qName.equals(tos.m_name) ) { m_log.log( "default", 0, "assertion failure" ); System.exit(1); } if ( ! m_stack.empty() ) { // add pieces to lower levels IVSElement peek = (IVSElement) m_stack.peek(); if ( !setElementRelation( peek.m_name.charAt(0), peek.m_obj, tos.m_obj )){ m_log.log( "parser", 0, "Element " + tos.m_name + " does not fit into element " + peek.m_name ); //System.out.println( "Element " + tos.m_name + // " does not fit into element " + peek.m_name ); } } else { // run finalizer, if available // m_log.log( "default", 0, "How did I get here?" ); } } /** * This method is the callback function for characters in an element. * The element is expected to be of mixed content. * * @param ch are the characters from the XML document * @param start is the start position into the array * @param length is the amount of valid data in the array */ public void characters( char[] ch, int start, int length ) throws SAXException { String message = new String( ch, start, length ); if ( message.length() > 0 ) { if ( message.trim().length() == 0 ) m_log.log( "parser", 3, "Characters: \' \' x " + message.length() ); else m_log.log( "parser", 3, "Characters: \"" + message + "\"" ); // Insert text into the text carrying elements. These elements // must be capable to have text added repeatedly. if ( ! m_stack.empty() ) { IVSElement tos = (IVSElement) m_stack.peek(); if ( tos.m_obj instanceof HasText ) { HasText obj = (HasText) tos.m_obj; obj.appendValue(message); } } else { // run finalizer, if available m_log.log( "default", 0, "How did I get here II?" ); } } } /** * Currently, ignorable whitespace will be ignored. * * @param ch are the characters from the XML document * @param start is the start position into the array * @param length is the amount of valid data in the array */ public void ignorableWhitespace( char[] ch, int start, int length ) throws SAXException { // not implemented } /** * Receive a processing instruction. Currently, we are just printing * a debug message that we received a PI. * * @param target the processing instruction target * @param data the processing instruction data, or null if none was supplied. * The data does not include any whitespace separating it from the target. */ public void processingInstruction( java.lang.String target, java.lang.String data ) throws SAXException { m_log.log( "parser", 2, "processing instruction " + target + "=\"" + data + "\" was skipped!"); } /** * Receive a notification that an entity was skipped. Currently, we * are just printing a debug message to this fact. * * @param name The name of the skipped entity. If it is a parameter * entity, the name will begin with '%', and if it is the external DTD * subset, it will be the string "[dtd]". */ public void skippedEntity(java.lang.String name) throws SAXException { m_log.log( "parser", 2, "entity " + name + " was skipped!"); } // // =================================================== our own stuff === // /** * Small helper method to bundle repetitive parameters in a template * for reporting progress. * * @param subject is the name of the XML element that is being scrutinized. * @param name is then name of the element we are working with. * @param value is the attribute value. */ private void log( String subject, String name, String value ) { if ( value == null ) value = new String(); m_log.log( "filler", 3, subject + "." + name + "=\"" + value + "\"" ); } /** * Small helper method to bundle repetitive complaints in a template * for reporting progress. * * @param subject is the name of the XML element that is being scrutinized. * @param name is then name of the element we are working with. * @param value is the attribute value. */ private void complain( String subject, String name, String value ) { if ( value == null ) value = new String(); m_log.log( "default", 0, "ignoring " + subject + '@' + name + "=\"" + value + '"', true ); } /** * Small helper to parse the different date varieties and deal with * Java obnoxeity. * * @param date is an ISO 8601 timestamp * @return a date field * @exception ParseException thrown if the date cannot be parsed */ private Date parseDate( String date ) throws ParseException { // SimpleDataFormat stumbles over colon in time zone int size = date.length(); if ( date.charAt(size-3) == ':' ) { StringBuffer temp = new StringBuffer(date); temp.deleteCharAt(size-3); date = temp.toString(); } Date result; if ( date.indexOf('.') == -1 ) { // coarse grained timestamp result = m_coarse.parse(date); } else { // fine grained timestamp result = m_fine.parse(date); } SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd'T'HHmmssZ"); m_log.log( "filler", 3, "found date " + sdf.format(result) ); return result; } /** * Small helper method to set up the attributes for the job elements. * * @param job is the job to set up. * @param names is the list of attribute names * @param values is the list of attribute values */ private void setupJob( Job job, java.util.List names, java.util.List values ) throws NumberFormatException, ParseException { for ( int i=0; i 0 switch ( e.charAt(0) ) { // // A // case 'a': if ( e.equals("arg") ) { ArgEntry entry = new ArgEntry(); for ( int i=0; i " + value ); if ( name.equals("mode") ) { this.log( e, name, value ); statinfo.setMode( Integer.parseInt(value,8) ); } else if ( name.equals("size") ) { this.log( e, name, value ); statinfo.setSize( Long.parseLong(value) ); } else if ( name.equals("inode") ) { this.log( e, name, value ); statinfo.setINode( (long)Double.parseDouble(value) ); } else if ( name.equals("nlink") ) { this.log( e, name, value ); statinfo.setLinkCount( Long.parseLong(value) ); } else if ( name.equals("blksize") ) { this.log( e, name, value ); statinfo.setBlockSize( Long.parseLong(value) ); } else if ( name.equals("blocks") ) { this.log( e, name, value ); statinfo.setBlocks( Long.parseLong(value) ); } else if ( name.equals("atime") ) { this.log( e, name, value ); statinfo.setAccessTime( parseDate(value) ); } else if ( name.equals("ctime") ) { this.log( e, name, value ); statinfo.setCreationTime( parseDate(value) ); } else if ( name.equals("mtime") ) { this.log( e, name, value ); statinfo.setModificationTime( parseDate(value) ); } else if ( name.equals("uid") ) { this.log( e, name, value ); statinfo.setUID( (int) (Long.parseLong(value) & 0xFFFFFFFF) ); } else if ( name.equals("user") ) { this.log( e, name, value ); statinfo.setUser( value ); } else if ( name.equals("gid") ) { this.log( e, name, value ); statinfo.setGID( (int) (Long.parseLong(value) & 0xFFFFFFFF) ); } else if ( name.equals("group") ) { this.log( e, name, value ); statinfo.setGroup( value ); } else { this.complain( e, name, value ); } } return statinfo; } else if ( e.equals("status") ) { Status status = new Status(); for ( int i=0; iInvocationParser class. * It parses an invocation record, creates the corresponding java * objects, and generates an XML document from these objects. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * * @see InvocationParser * @see org.griphyn.vdl.invocation.Invocation */ public class IVPTest { static public void main(String[] args) throws IOException { if (args.length == 0) { System.err.println( "Usage: java IVPTest [invocationfile] ..." ); return; } // connect debug stream Logging.instance().register( "parser", System.err ); Logging.instance().register( "app", System.err ); // Logging.instance().register( "app", System.err ); InvocationParser ip = new InvocationParser( InvocationRecord.SCHEMA_LOCATION ); Writer stdout = new BufferedWriter(new OutputStreamWriter(System.out)); for (int i = 0; i < args.length; i++) { InvocationRecord invocation = ip.parse( new FileInputStream(args[i]) ); System.err.println("\nNow convert back to XML\n"); invocation.toXML( stdout, "", null ); Logging.instance().log( "app", 0, "done writing XML" ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/TransformationCatalogTextParser.java0000644000175000017500000004437411757531137031770 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.planner.parser.tokens.OpenBrace; import edu.isi.pegasus.planner.parser.tokens.TransformationCatalogReservedWord; import edu.isi.pegasus.planner.parser.tokens.Token; import edu.isi.pegasus.planner.parser.tokens.QuotedString; import edu.isi.pegasus.planner.parser.tokens.Identifier; import edu.isi.pegasus.planner.parser.tokens.CloseBrace; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationStore; import edu.isi.pegasus.planner.catalog.transformation.impl.Abstract; import java.io.File; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.io.Reader; import java.util.logging.Level; import java.util.logging.Logger; import edu.isi.pegasus.planner.classes.Profile; /** * Parses the input stream and generates the TransformationStore as output. * * This parser is able to parse the Transformation Catalog specification in the * following format * *
 * tr example::keg:1.0 {
 * 
 *  #specify profiles that apply for all the sites for the transformation
 *  #in each site entry the profile can be overriden
 *  profile env "APP_HOME" "/tmp/karan"
 *  profile env "JAVA_HOME" "/bin/java.1.5"
 * 
 *  site isi {
 *   profile env "me" "with"
 *   profile condor "more" "test"
 *   profile env "JAVA_HOME" "/bin/java.1.6"
 *   pfn "/path/to/keg"
 *   arch  "x86"
 *   os    "linux"
 *   osrelease "fc"
 *   osversion "4"
 *   type "installed"            
 *  }
 * 
 *  site wind {
 *   profile env "me" "with"
 *   profile condor "more" "test"
 *   pfn "/path/to/keg"
 *   arch  "x86"
 *   os    "linux"
 *   osrelease "fc"
 *   osversion "4"
 *   type "STAGEABLE"
 *  }
 * }

 * 
* * @author Karan Vahi * @author Jens Vöckler * @version $Revision: 4092 $ * * @see edu.isi.pegasus.planner.parser.TransformationCatalogTextScanner */ public class TransformationCatalogTextParser { /** * The access to the lexical scanner is stored here. */ private TransformationCatalogTextScanner mScanner = null; /** * Stores the look-ahead symbol. */ private Token mLookAhead = null; /** * The transformation to the logger used to log messages. */ private LogManager mLogger; /** * Initializes the parser with an input stream to read from. * * @param r is the stream opened for reading. * @param logger the transformation to the logger. * * @throws IOException * @throws ScannerException */ public TransformationCatalogTextParser(Reader r, LogManager logger ) throws IOException, ScannerException { mLogger = logger; mScanner = new TransformationCatalogTextScanner(r); mLookAhead = mScanner.nextToken(); } /** * Parses the complete input stream, into the PoolConfig data object that * holds the contents of all the sites referred to in the stream. * *@param modifyFileURL Boolean indicating whether to modify the file URL or not * * @return TransformationStore * * @throws IOException * @throws ScannerException * @throws Exception * @see org.griphyn.cPlanner.classes.PoolConfig */ public TransformationStore parse(boolean modifyFileURL) throws IOException, ScannerException { //to check more TransformationStore store = new TransformationStore(); try{ String transformation = null; do { if ( mLookAhead != null ) { //get the transformation/id, that is parsed differently //compared to the rest of the attributes of the site. transformation = getTransformation(); //check for any profiles that maybe specified and need to //applied for all entries related to the transformation Profiles profiles = getProfilesForTransformation(); while( !( mLookAhead instanceof CloseBrace ) ){ TransformationCatalogEntry entry = getTransformationCatalogEntry( transformation, profiles , modifyFileURL); store.addEntry( entry ); //we have information about one transformation catalog entry mLogger.log( "Transformation Catalog Entry parsed is - " + entry, LogManager.DEBUG_MESSAGE_LEVEL); } //again check for any profiles that may be associated //makes profiles overloading slightly more complicated //no need to do it //profiles.addAll( getProfilesForTransformation() ); if (! (mLookAhead instanceof CloseBrace)) { throw new ScannerException( mScanner.getLineNumber(), "expecting a closing brace"); } mLookAhead = mScanner.nextToken(); } } while ( mScanner.hasMoreTokens() ); } //we wrap all non scanner and ioexceptions as scanner exceptions catch( ScannerException e ){ throw e; } catch( IOException e ){ throw e; } catch( Exception e ){ //wrap as a scanner exception and throw throw new ScannerException( mScanner.getLineNumber(), e.getMessage() ); } return store; } /** * Remove potential leading and trainling quotes from a string. * * @param input is a string which may have leading and trailing quotes * @return a string that is either identical to the input, or a * substring thereof. */ public String niceString(String input) { // sanity if (input == null) { return input; } int l = input.length(); if (l < 2) { return input; } // check for leading/trailing quotes if (input.charAt(0) == '"' && input.charAt(l - 1) == '"') { return input.substring(1, l - 1); } else { return input; } } /** * Constructs a single transformation catalog entry and returns it. * * @param entry the TransformationCatalogEntry object that is to be populated. * @param profiles the profiles that apply to all the entries * @param modifyFileURL Boolean indicating whether to modify the file URL or not * @return the transformation catalog entry object. * * @throws even more mystery */ private TransformationCatalogEntry getTransformationCatalogEntry( String transformation, Profiles profiles , boolean modifyFileURL ) throws IOException, ScannerException { TransformationCatalogEntry entry = new TransformationCatalogEntry(); String site = getSite(); entry.setLogicalTransformation( transformation ); entry.setResourceId( site ); SysInfo sysinfo = new SysInfo(); Profiles p = (Profiles) profiles.clone(); while ( mLookAhead != null && ! (mLookAhead instanceof CloseBrace) ) { //populate all the rest of the attributes //associated with the transformation if (! (mLookAhead instanceof TransformationCatalogReservedWord)) { throw new ScannerException(mScanner.getLineNumber(), "expecting a reserved word describing a transformation attribute instead of "+ mLookAhead); } int word = ( (TransformationCatalogReservedWord) mLookAhead).getValue(); mLookAhead = mScanner.nextToken(); String value ; switch ( word ) { case TransformationCatalogReservedWord.ARCH: value = getQuotedValue( "arch" ); sysinfo.setArchitecture( SysInfo.Architecture.valueOf( value ) ); break; case TransformationCatalogReservedWord.OS: value = getQuotedValue( "os" ); sysinfo.setOS( SysInfo.OS.valueOf( value.toUpperCase() ) ); break; case TransformationCatalogReservedWord.OSRELEASE: value = getQuotedValue( "osrelease" ); sysinfo.setOSRelease(value); break; case TransformationCatalogReservedWord.OSVERSION: value = getQuotedValue( "osversion" ); sysinfo.setOSVersion( value ); break; case TransformationCatalogReservedWord.PFN: value = getQuotedValue( "pfn" ); entry.setPhysicalTransformation( value ); break; case TransformationCatalogReservedWord.PROFILE: p.addProfileDirectly( this.getProfile() ); break; case TransformationCatalogReservedWord.TYPE: value = getQuotedValue( "type" ); entry.setType( TCType.valueOf(value.toUpperCase()) ); break; default: throw new ScannerException(mScanner.getLineNumber(), "invalid reserved word used to configure a transformation catalog entry"); } } //System.out.println( "*** Profiles are " + p ); entry.setSysInfo( sysinfo ); //add all the profiles for the entry only if they are empty if( !p.isEmpty() ){ entry.addProfiles( p ); } if (! (mLookAhead instanceof CloseBrace)) { throw new ScannerException(mScanner.getLineNumber(), "expecting a closing brace"); } mLookAhead = mScanner.nextToken(); //modify the entry to handle for file URL's //specified for the PFN's if(modifyFileURL){ return Abstract.modifyForFileURLS( entry ); }else{ return entry; } } /** * Returns the transformation name, and moves the scanner to hold the next * TransformationCatalogReservedWord. * * @return the transformation name * * @throws plenty */ private String getTransformation() throws IOException, ScannerException { String transformation = null; if (! ( mLookAhead instanceof TransformationCatalogReservedWord ) || ( (TransformationCatalogReservedWord) mLookAhead ).getValue() != TransformationCatalogReservedWord.TRANSFORMATION ) { throw new ScannerException( mScanner.getLineNumber(), "expecting reserved word \"tr\""); } mLookAhead = mScanner.nextToken(); // proceed with next token if (! (mLookAhead instanceof Identifier)) { throw new ScannerException(mScanner.getLineNumber(), "expecting the transformation identifier"); } transformation = ( (Identifier) mLookAhead).getValue(); mLookAhead = mScanner.nextToken(); // proceed with next token if (! (mLookAhead instanceof OpenBrace)) { throw new ScannerException(mScanner.getLineNumber(), "expecting an opening brace"); } mLookAhead = mScanner.nextToken(); return transformation; } /** * Returns the site transformation for a site, and moves the scanner to hold the next * TransformationCatalogReservedWord. * * @return the transformation name * * @throws plenty */ private String getSite() throws IOException, ScannerException { String site = null; if (! ( mLookAhead instanceof TransformationCatalogReservedWord ) || ( (TransformationCatalogReservedWord) mLookAhead ).getValue() != TransformationCatalogReservedWord.SITE ) { throw new ScannerException( mScanner.getLineNumber(), "expecting reserved word \"site\" or closing brace"); } mLookAhead = mScanner.nextToken(); // proceed with next token if (! (mLookAhead instanceof Identifier)) { throw new ScannerException(mScanner.getLineNumber(), "expecting the site identifier"); } site = ( (Identifier) mLookAhead).getValue(); mLookAhead = mScanner.nextToken(); // proceed with next token if (! (mLookAhead instanceof OpenBrace)) { throw new ScannerException(mScanner.getLineNumber(), "expecting an opening brace"); } mLookAhead = mScanner.nextToken(); return site; } /** * Returns a list of profiles that have to be applied to the entries for * all the sites corresponding to a transformation. * * @return Profiles specified * * @throws IOException * @throws ScannerException */ private Profiles getProfilesForTransformation() throws IOException, ScannerException { Profiles profiles = new Profiles(); while( true ){ if (( mLookAhead instanceof TransformationCatalogReservedWord ) && ( (TransformationCatalogReservedWord) mLookAhead ).getValue() == TransformationCatalogReservedWord.PROFILE ) { //move cursor to next token mLookAhead = mScanner.nextToken(); profiles.addProfile( this.getProfile() ); } else{ break; } } return profiles; } /** * Parses a single line and returns a profile. * * @return Profile * @throws ScannerException */ private Profile getProfile() throws ScannerException, IOException{ Profile p = new Profile(); if( !(mLookAhead instanceof Identifier) ){ throw new ScannerException(mScanner.getLineNumber(), "the \"profile\" requires a namespace identifier as first argument"); } String namespace = ( (Identifier) mLookAhead).getValue(); mLookAhead = mScanner.nextToken(); if( !p.namespaceValid(namespace) ){ throw new ScannerException( mScanner.getLineNumber(), "Invalid namespace specified for profile " + namespace ); } // System.out.println("profile namespace="+namespace ); if (! (mLookAhead instanceof QuotedString)) { throw new ScannerException( mScanner.getLineNumber(), "the \"profile\" key needs to be quoted"); } String key = ( (QuotedString) mLookAhead).getValue(); // System.out.println("key="+((QuotedString) mLookAhead).getValue() ); mLookAhead = mScanner.nextToken(); if (! (mLookAhead instanceof QuotedString)) { throw new ScannerException(mScanner.getLineNumber(), "the \"profile\" value requires a quoted string argument"); } String value = ( (QuotedString) mLookAhead).getValue(); mLookAhead = mScanner.nextToken(); p = new Profile(namespace, niceString(key), niceString(value)); return p; } /** * Parses a quoted value and strips out the enclosing quotes. * * @param key the key for which we need to associated the quoted value * * @return quoted value. */ private String getQuotedValue( String key ) throws IOException { //mLookAhead = mScanner.nextToken(); //System.out.println( mLookAhead ); // System.out.println("universe="+universe ); if (! (mLookAhead instanceof QuotedString) ) { StringBuffer error = new StringBuffer(); error.append( "The " ).append( key ).append( " requires a quoted string as second argument " ); throw new ScannerException( mScanner.getLineNumber(), error.toString() ); } String value = niceString( ( (QuotedString)mLookAhead ).getValue()); mLookAhead = mScanner.nextToken(); return value; } /** * Test function. * * @param args */ public static void main( String[] args ) throws ScannerException{ try { Reader r = new FileReader(new File("/lfs1/work/pegasus-features/text-tc/sample_tc.data")); LogManager logger = LogManagerFactory.loadSingletonInstance(); logger.setLevel( LogManager.DEBUG_MESSAGE_LEVEL ); logger.logEventStart( "event.pegasus.catalog.transformation.test", "planner.version", Version.instance().toString() ); TransformationCatalogTextParser p = new TransformationCatalogTextParser( r, logger ); p.parse(true); } catch (FileNotFoundException ex) { Logger.getLogger(TransformationCatalogTextParser.class.getName()).log(Level.SEVERE, null, ex); } catch( ScannerException se ){ se.printStackTrace(); } catch( IOException ioe ){ ioe.printStackTrace(); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/ScannerException.java0000644000175000017500000000275411757531137026711 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import java.io.LineNumberReader; /** * This class is used to signal errors while scanning or parsing. * * * @see org.griphyn.cPlanner.classes.PoolConfigScanner * @see org.griphyn.cPlanner.classes.PoolConfigParser2 * * @author Jens Voeckler * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2175 $ */ public class ScannerException extends java.lang.RuntimeException { private int m_lineno; public ScannerException(LineNumberReader stream, String message) { super("line " + stream.getLineNumber() + ": " + message); this.m_lineno = stream.getLineNumber(); } public ScannerException(int lineno, String message) { super("line " + lineno + ": " + message); this.m_lineno = lineno; } public int getLineNumber() { return this.m_lineno; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/0000755000175000017500000000000011757531667023352 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/DAX2Metadata.java0000644000175000017500000001351311757531137026347 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.dax; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.classes.CompoundTransformation; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.dax.Invoke; import java.util.HashMap; import java.util.List; import java.util.Map; /** * A callback that causes the parser to exit after the metadata about the DAX * has been parsed. This is achieved by stopping the parsing after the * cbDocument method. * * @author Karan Vahi * @version $Revision: 314 $ */ public class DAX2Metadata implements Callback { /** * The parsing completed message. */ public static final String PARSING_DONE_ERROR_MESSAGE = "Parsing done"; /** * Default attribute value for the count attribute */ public static final String DEFAULT_ADAG_COUNT_ATTRIBUTE = "1"; /** * Default index value for the count attribute */ public static final String DEFAULT_ADAG_INDEX_ATTRIBUTE = "0"; /** * The handle to the properties object. */ private PegasusProperties mProps; /** * A flag to specify whether the graph has been generated for the partition * or not. */ private boolean mDone; /** * The metadata of the workflow. */ private Map mMetadata; /** * The overloaded constructor. * * @param properties the properties passed to the planner. * @param dax the path to the DAX file. */ public DAX2Metadata( PegasusProperties properties, String dax ) { mProps = properties; mDone = false; } /** * Callback when the opening tag was parsed. This contains all * attributes and their raw values within a map. It ends up storing * the attributes with the adag element in the internal memory structure. * * @param attributes is a map of attribute key to attribute value */ public void cbDocument(Map attributes) { mMetadata = new HashMap(); mMetadata.put( "count", attributes.containsKey( "count" ) ? (String)attributes.get( "count" ) : DEFAULT_ADAG_COUNT_ATTRIBUTE ) ; mMetadata.put( "index", attributes.containsKey( "index" ) ? (String)attributes.get( "index" ) : DEFAULT_ADAG_INDEX_ATTRIBUTE ) ; mMetadata.put( "name", (String)attributes.get( "name" ) ); mMetadata.put( "version", (String)attributes.get( "version" ) ); //call the cbDone() cbDone(); } /** * Callback when a invoke entry is encountered in the top level inside the adag element in the DAX. * * @param invoke the invoke object */ public void cbWfInvoke(Invoke invoke){ throw new UnsupportedOperationException("Not supported yet."); } /** * Callback for the job from section 2 jobs. These jobs are completely * assembled, but each is passed separately. * * @param job the Job object storing the job information * gotten from parser. */ public void cbJob( Job job ) { } /** * Callback for child and parent relationships from section 3. * * @param child is the IDREF of the child element. * @param parents is a list of IDREFs of the included parents. */ public void cbParents(String child, List parents) { } /** * Callback when the parsing of the document is done. It sets the flag * that the parsing has been done, that is used to determine whether the * ADag object has been fully generated or not. */ public void cbDone() { mDone = true; throw new RuntimeException( PARSING_DONE_ERROR_MESSAGE ); } /** * Returns an ADag object corresponding to the abstract plan it has generated. * It throws a runtime exception if the method is called before the object * has been created fully. * * @return ADag object containing the abstract plan referred in the dax. */ public Object getConstructedObject(){ if(!mDone) throw new RuntimeException( "Method called before the metadata was parsed" ); return mMetadata; } /** * Callback when a compound transformation is encountered in the DAX * * @param compoundTransformation the compound transforamtion */ public void cbCompoundTransformation( CompoundTransformation compoundTransformation ){ throw new UnsupportedOperationException("Not supported yet."); } /** * Callback when a replica catalog entry is encountered in the DAX * * @param rl the ReplicaLocation object */ public void cbFile( ReplicaLocation rl ){ throw new UnsupportedOperationException("Not supported yet."); } /** * Callback when a transformation catalog entry is encountered in the DAX * * @param tce the transformationc catalog entry object. */ public void cbExecutable( TransformationCatalogEntry tce ){ throw new UnsupportedOperationException("Not supported yet."); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/DAX2NewGraph.java0000644000175000017500000001374211757531137026346 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.dax; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.classes.CompoundTransformation; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.MapGraph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.dax.Invoke; import java.util.List; import java.util.Map; /** * An exploratory implementation that builds on the DAX2Graph. * There is a graph object created that is returned. * * @author Karan Vahi * @version $Revision: 3669 $ */ public class DAX2NewGraph implements Callback { /** * The Graph instance that stores the abstract workflow as a Graph. */ protected Graph mWorkflow; /** * A flag to specify whether the graph has been generated for the partition * or not. */ protected boolean mDone; /** * The label of the abstract dax. */ protected String mLabel; /** * The handle to the properties object. */ protected PegasusProperties mProps; /** * The overloaded constructor. * * @param properties the properties passed to the planner. * @param dax the path to the DAX file. */ public DAX2NewGraph( PegasusProperties properties, String dax ){ mProps = properties; mWorkflow = new MapGraph(); mDone = false; mLabel = null; } /** * Returns the workflow represented in the Graph form. * * * @return Graph containing the abstract workflow referred * in the dax. */ public Object getConstructedObject() { if(!mDone) throw new RuntimeException("Method called before the abstract dag " + " for the partition was fully generated"); return mWorkflow; } /** * Callback when the opening tag was parsed. This contains all * attributes and their raw values within a map. It ends up storing * the attributes with the adag element in the internal memory structure. * * @param attributes is a map of attribute key to attribute value */ public void cbDocument(Map attributes) { /**@todo Implement this org.griphyn.cPlanner.parser.Callback method*/ if( attributes == null || (mLabel = (String)attributes.get("name")) == null){ mLabel = "test"; } } /** * Callback when a invoke entry is encountered in the top level inside the adag element in the DAX. * * @param invoke the invoke object */ public void cbWfInvoke(Invoke invoke){ throw new UnsupportedOperationException("Not supported yet."); } /** * This constructs a graph node for the job and ends up storing it in the * internal map. * * @param job the job that was parsed. */ public void cbJob( Job job ) { //populate the job as a node in the graph GraphNode node = new GraphNode( job.getID(), job ); mWorkflow.addNode( node ); } /** * This updates the internal graph nodes of child with references to it's * parents referred to by the list of parents passed. It gets the handle * to the parents graph nodes from it's internal map. * * @param child the logical id of the child node. * @param parents list containing the logical id's of the parents of the * child nodes. */ public void cbParents( String child, List parents ) { mWorkflow.addEdges( child, parents ); } /** * Returns the name of the dax. */ public String getNameOfDAX(){ return mLabel; } /** * Callback to signal that traversal of the DAX is complete. At this point a * dummy root node is added to the graph, that is the parents to all the root * nodes in the existing DAX. */ public void cbDone() { //the abstract graph is fully generated mDone = true; } /** * Returns the GraphNode of the corresponding id. * * @param key the id of the node. * * @return GraphNode. */ public GraphNode get( String key ){ return mWorkflow.getNode( key ); } /** * Callback when a compound transformation is encountered in the DAX * * @param compoundTransformation the compound transforamtion */ public void cbCompoundTransformation( CompoundTransformation compoundTransformation ){ throw new UnsupportedOperationException("Not supported yet."); } /** * Callback when a replica catalog entry is encountered in the DAX * * @param rl the ReplicaLocation object */ public void cbFile( ReplicaLocation rl ){ throw new UnsupportedOperationException("Not supported yet."); } /** * Callback when a transformation catalog entry is encountered in the DAX * * @param tce the transformationc catalog entry object. */ public void cbExecutable( TransformationCatalogEntry tce ){ throw new UnsupportedOperationException("Not supported yet."); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/DAXParser.java0000644000175000017500000000220711757531137025777 0ustar ryngerynge/** * Copyright 2007-2011 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.dax; import edu.isi.pegasus.planner.parser.dax.Callback; /** * An interface for all the DAX Parsers * * @author Karan Vahi * @version $Revision: 2647 $ */ public interface DAXParser { /** * Set the DAXCallback for the parser to call out to. * * @param c the callback */ public void setDAXCallback( Callback c ); /** * Retuns the DAXCallback for the parser * * @return the callback */ public Callback getDAXCallback( ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/DAXParser2.java0000644000175000017500000014416011757531137026066 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.dax; import edu.isi.pegasus.planner.parser.*; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.namespace.Hints; import edu.isi.pegasus.planner.namespace.Namespace; import edu.isi.pegasus.planner.parser.dax.Callback; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.Arch; import edu.isi.pegasus.planner.catalog.transformation.classes.Os; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.StringTokenizer; import java.util.Set; import java.util.HashSet; import java.util.Iterator; import java.util.Map; import edu.isi.pegasus.planner.classes.DAGJob; import edu.isi.pegasus.planner.classes.DAXJob; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.namespace.Pegasus; /** * This class parses the XML file whichis generated by Abstract Planner and ends * up making an ADag object which contains theinformation to make the Condor * submit files. The parser used to parse the file is Xerces. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4559 $ * * @see org.griphyn.cPlanner.classes.Job * @see org.griphyn.cPlanner.classes.DagInfo * @see org.griphyn.cPlanner.classes.ADag * @see org.griphyn.cPlanner.classes.PCRelation */ public class DAXParser2 extends Parser implements DAXParser { /** * The "not-so-official" location URL of the DAX schema definition. */ public static final String SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/dax-3.0.xsd"; /** * URI namespace */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/DAX"; /** * The constant designating the version when the double negative transfer * and registration flags were removed. */ private static final String DAX_VERSION_WITHOUT_DOUBLE_NEGATIVE = "2.1"; /** * The map that maps keys in execution tag to hints namespace. */ private static Map mExecutionToHintsNamespace ; /** * Maps the execution key to corresponding key in hints namespace. * * @param key the key in execution tag * * @return corresponding key in hints namespace else null */ private static String executionToHintsNamespace( String key ){ if ( mExecutionToHintsNamespace == null ){ mExecutionToHintsNamespace = new HashMap(); mExecutionToHintsNamespace.put( "site", Hints.EXECUTION_POOL_KEY ); mExecutionToHintsNamespace.put( "executable", Hints.PFN_HINT_KEY ); } return mExecutionToHintsNamespace.get( key ); } public String mDaxSchemaVersion; /** * A boolean variable set to true when we have got all the logical filenames. * After this all the filename tags are not added in Vector mLogicalFilesInADag * This is because the DAX file specifies all the input and output files * in the starting, and then in the job tags also the filename tags are nested. */ private boolean infoAboutAllFilesRecv = false; /** * The handle to the class implementing the callback interface. */ private Callback mCallback; /** * For holding the key attribute in profile tag. */ private String mProfileKey = new String(); /** * For holding the namespace if specified in the Profile Element. */ private String mNamespace = new String(); /** * Set as and when Profile and Argument tags are started and ended. * Need to in order to determine the nested filename tags which may appear in * these elements. */ private boolean mProfileTag = false; private boolean mArgumentTag = false; /** * These store the current child element for the child parent relationship. * We get nested parent elements in a child element. Hence the child remains * the same while the parent id for the relationship varies. */ private String mCurrentChildId = new String(); /** * The list of parents of a node referred to by mCurrentChildId. */ private List mParents; /** * Holds information regarding the current job being parsed. It's scope can * be seen as the job element. */ private Job mCurrentJobSubInfo = new Job(); /** * All the arguments to a particular job. */ private String mWholeCommandString = new String(); /** * Holds the input files for a particular job making the aDag. They are Vector * of PegasusFile Objects which store the transiency information of each * logical file. * * @see org.griphyn.cPlanner.classes.PegasusFile */ private Set mVJobInpFiles = new HashSet(); /** * Holds the output files for a particular job making the aDag. * They are vector of PegasusFile Objects which store the transiency * information of each logical file. * * @see org.griphyn.cPlanner.classes.PegasusFile */ private Set mVJobOutFiles = new HashSet(); /** * A boolean indicating whether to use the double negative flags for * transfer and register or not. */ private boolean mUseDoubleNegative; /** * The job prefix that needs to be applied to the job file basenames. */ protected String mJobPrefix; /** * The uses link type for a file when uses tag is parsed. */ private String mUsesLinkType; /** * The PegasusFile object that a uses tag corresponds to. */ private PegasusFile mUsesPegasusFile; /** * The file attribute encountered in the dax element. */ private String mDAXLFN; /** * The file attribute encountered in the dag element. */ private String mDAGLFN; /** * The default constructor * * @param bag the bag of objects that is useful for initialization. */ public DAXParser2( PegasusBag bag ) { //default constructor super( bag ); mUseDoubleNegative = false; mJobPrefix = ( bag.getPlannerOptions() == null ) ? null: bag.getPlannerOptions().getJobnamePrefix(); } /** * Set the DAXCallback for the parser to call out to. * * @param c the callback */ public void setDAXCallback( Callback c ){ this.mCallback = c; } /** * Retuns the DAXCallback for the parser * * @return the callback */ public Callback getDAXCallback( ){ return this.mCallback; } /** * This starts the parsing of the file by the parser. * * @param daxFileName the path/uri to the XML file you want to parse. */ public void startParser(String daxFileName) { try{ this.testForFile(daxFileName); } catch( Exception e){ throw new RuntimeException( e ); } //try to get the version number //of the dax mDaxSchemaVersion = getVersionOfDAX( daxFileName ); mLogger.log( "DAXParser2 Version of DAX as picked up from the DAX " + mDaxSchemaVersion, LogManager.DEBUG_MESSAGE_LEVEL ); String schemaLoc = getSchemaLocation(); mLogger.log( "DAXParser2 Picking schema for DAX " + schemaLoc, LogManager.DEBUG_MESSAGE_LEVEL ); String list = DAXParser2.SCHEMA_NAMESPACE + " " + schemaLoc; setSchemaLocations(list); //figure out whether to pick up the double negative flags or not mUseDoubleNegative = useDoubleNegative( mDaxSchemaVersion ); mLogger.log( "DAXParser2 Picking up the dontTransfer and dontRegister flags " + mUseDoubleNegative, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_PARSE_DAX, LoggingKeys.DAX_ID, daxFileName ); mCurrentJobSubInfo.condorUniverse = GridGateway.JOB_TYPE.compute.toString(); //default value try { mParser.parse(daxFileName); } catch (Exception e) { //if a locator error then String message = (mLocator == null) ? "DAXParser2 While parsing the file " + daxFileName: "DAXParser2 While parsing file " + mLocator.getSystemId() + " at line " + mLocator.getLineNumber() + " at column " + mLocator.getColumnNumber(); throw new RuntimeException(message, e); } mLogger.logEventCompletion(); } /** * Overriding the empty implementation provided by * DefaultHandler of ContentHandler. This receives the notification * from the sacks parser when start tag of an element comes */ public void startElement(String uri, String local, String raw, Attributes attrs) throws SAXException { //setting the command line option only if textContent > 0 if (mTextContent.length() > 0) { mWholeCommandString = mWholeCommandString.concat(new String( mTextContent)); //System.out.println("\n Text Content is:" + new String(mTextContent)); //resetting the buffer mTextContent.setLength(0); } local = local.trim(); //dealing with ADag tag if (local.equalsIgnoreCase("adag")) { handleAdagTagStart(local, attrs); } //deal with execution tag dax 3.0 else if ( local.equalsIgnoreCase( "execution" ) ){ handleExecutionTagStart( local, attrs ); } //dealing with filename tags else if (local.equalsIgnoreCase("filename")) { handleFilenameTagStart(local, attrs); } //dealing with the uses tag July 18 else if (local.equalsIgnoreCase("uses")) { handleUsesTagStart(local, attrs); } //dealing with pfn tag in the uses tag // for dax 3.0 schema else if ( local.equalsIgnoreCase( "pfn" ) ){ handleUsesPFNTagStart( local, attrs ); } //dealing with the job tags else if (local.equalsIgnoreCase("job")) { handleJobTagStart(local, attrs); } //dealing with the dax tags for DAX 3.0 else if (local.equalsIgnoreCase("dax")) { handleDAXTagStart(local, attrs); } //dealing with the dag tags for DAX 3.0 else if (local.equalsIgnoreCase("dag")) { handleDAGTagStart(local, attrs); } //dealing with metadata tags for DAX 3.0 else if (local.equalsIgnoreCase("metadata")) { handleMetadataTagStart(local, attrs); } //dealing with profile tag else if (local.equalsIgnoreCase("profile")) { handleProfileTagStart(local, attrs); } //dealing with the making of parent child relationship pairs else if (local.equalsIgnoreCase("child")) { handleChildTagStart(local, attrs); } else if (local.equalsIgnoreCase("parent")) { handleParentTagStart(local, attrs); } //dealing with the start of argument tag else if (local.equalsIgnoreCase("argument")) { handleArgumentTagStart(local, attrs); } //dealing with stdout for current job else if (local.equalsIgnoreCase("stdout")) { handleStdoutTagStart(local, attrs); } //dealing with stdin for current job else if (local.equalsIgnoreCase("stdin")) { handleStdinTagStart(local, attrs); } //dealing with stderr for current job else if (local.equalsIgnoreCase("stderr")) { handleStderrTagStart(local, attrs); } } /** * A convenience method that tries to determine the version of the dax * schema by reading ahead in the DAX file, and searching for * the version attribue in the file. * * @param file the name of the dax file. */ public String getVersionOfDAX(String file){ String schema = getSchemaOfDocument(file); return extractVersionFromSchema(schema); } /** * Determines the version of the DAX as specified in a schema string. * * @param schema the schema string as specified in the root element of * the DAX. * * @return the version. */ public String extractVersionFromSchema(String schema){ String token = null; String version = null; if(schema == null) return null; StringTokenizer st = new StringTokenizer(schema); while(st.hasMoreTokens()){ token = st.nextToken(); if(token.endsWith(".xsd")){ //we got our match String name = new File(token).getName(); int p1 = name.indexOf("dax-"); int p2 = name.lastIndexOf(".xsd"); //extract the version number version = (( p1 > -1) && (p2 > -1))?name.substring(p1+4,p2):null; return version; } } mLogger.log("Could not find the version number in DAX schema name", LogManager.WARNING_MESSAGE_LEVEL); return version; } /** * A convenience method that tries to get the name of the schema the document * refers to. It returns the value of the xsi:schemaLocation. * * @param file the name of the dax file. */ public String getSchemaOfDocument(String file){ StringTokenizer st = null; String key = null; String value = null; try{ BufferedReader in = new BufferedReader(new FileReader(file)); String line = null; int p1 , p2 , c = 0; while ( (line = (in.readLine()).trim()) != null) { if(c == 0){ //try to check if it is an xml file if ( ( (p1 = line.indexOf(" -1) && ( (p2 = line.indexOf("?>", p1)) > -1) ) { //xml file is valid. c++; } else{ //throw a exception throw new java.lang.RuntimeException("Dax File is not xml " + file); } } else{ if( (p1 = line.indexOf(" -1){ line = line.substring(p1 + " " + value); } //call the callback interface mCallback.cbDocument(mp); } /** * Replaces the keys associated with the execution tag, with the corresponding * keys in the hints profile namespace * * @param local the local name of the leemnt * @param attrs the attributes */ private void handleExecutionTagStart( String local, Attributes attrs ) { String key = attrs.getValue("key"); mProfileKey = executionToHintsNamespace( key ); if( mProfileKey == null ){ throw new RuntimeException( "Invalid key associated with execution tag " + key ); } mNamespace = Hints.NAMESPACE_NAME; mProfileTag = true; } /** * Invoked when the starting of the filename element is got. */ private void handleFilenameTagStart(String local, Attributes attrs) { String linkType = new String(); //holds the link info about a logical file corr to a job String fileName = new String(); String isTemp = new String(); fileName = attrs.getValue("", "file").trim(); PegasusFile pf = new PegasusFile(fileName); if (!infoAboutAllFilesRecv) { //this means we are dealing with filename tags in //the starting of the dax. These tags //contain the linkage information //logicalFilesInADag.addElement(fileName); } else if (mArgumentTag) { //means that the filename tag is nested in //an argument tag. Since dax 1.6 //no linkage information comes //in this. mWholeCommandString = mAdjFName? //as per the default behaviour adding //a whitespace between two adjacent //filename tags mWholeCommandString + " " + fileName: //else doing a simple concatenation mWholeCommandString + fileName; mAdjFName = true; } //dealing with profile tags else if (mProfileTag) { //means that filename tag is nested in a profile tag fileName = attrs.getValue("", "file"); //an extra check if (mNamespace.equalsIgnoreCase("env")) { //namespace class member variables removed //mEnvNS.checkKeyInNS(mProfileKey,fileName); mCurrentJobSubInfo.envVariables.checkKeyInNS( mProfileKey,fileName ); } } } //end of dealing with fileName tags in argument tag /** * Metadata parsing is ignored for time being. * * @param local * @param attrs */ private void handleMetadataTagStart(String local, Attributes attrs) { mLogger.log( "metadata element parsing is ignored for job " + mCurrentJobSubInfo.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); } /** * Resets the text content buffer */ private void handleMetadataTagEnd() { //reset buffer mTextContent.setLength( 0 ); } /** * Invoked when the starting of the uses element is got. Uses tag is used to * denote all the files a particular job uses, be it as input , output or * silent. */ private void handleUsesTagStart(String local, Attributes attrs) { String fileName = attrs.getValue("", "file"); String linkType = attrs.getValue("", "link"); String isTemp = attrs.getValue("", "isTemporary"); String type = attrs.getValue("", "type"); String size = attrs.getValue("", "size" ); mUsesLinkType = linkType; //since dax 1.6, the isTemporary //is broken into two transient //attributes dontTransfer and dontRegister //pick up the registration flag boolean register = ( mUseDoubleNegative )? //pick up the dR flag !new Boolean( attrs.getValue( "", "dontRegister" ) ).booleanValue(): //pick up the register flag new Boolean( attrs.getValue( "", "register" ) ).booleanValue(); //boolean dontRegister = new Boolean(attrs.getValue("","dontRegister")).booleanValue(); //notion of optional file since dax 1.8 boolean optionalFile = new Boolean( attrs.getValue( "", "optional" ) ).booleanValue(); //value of dontTransfer is tri state (true,false,optional) since dax 1.7 String transfer = ( mUseDoubleNegative )? attrs.getValue( "", "dontTransfer" ): attrs.getValue( "" , "transfer" ); //String dontTransfer = attrs.getValue("","dontTransfer"); PegasusFile pf = new PegasusFile(fileName); //handling the transient file feature if (isTemp != null) { //this for dax 1.5 handling boolean temp = new Boolean(isTemp.trim()).booleanValue(); if (temp) { //set the transient flags pf.setTransferFlag(PegasusFile.TRANSFER_NOT); register = false; } } else{ //set the transfer mode for the file //for dax 1.5 onwards pf.setTransferFlag( transfer, mUseDoubleNegative ); } //handling the dR flag // if( !register ){ // pf.setTransientRegFlag(); // } pf.setRegisterFlag( register ); //handling the optional attribute if(optionalFile) pf.setFileOptional(); //handle type of file if( type != null ) pf.setType( type ); //handle the size of file pf.setSize( size ); //store for later reference in endUses method mUsesPegasusFile = pf; } /** * Handles the end of a uses tag. */ private void handleUsesTagEnd(){ if ( mUsesLinkType.trim().equalsIgnoreCase("input")) { mVJobInpFiles.add( mUsesPegasusFile ); } else if ( mUsesLinkType.trim().equalsIgnoreCase("output")) { mVJobOutFiles.add( mUsesPegasusFile ); //the notion of an optional file as an output would mean it //has the optional transfer flag set. if( mUsesPegasusFile.fileOptional() && mUsesPegasusFile.getTransferFlag() == PegasusFile.TRANSFER_MANDATORY){ //update the transfer flag to optional mUsesPegasusFile.setTransferFlag(PegasusFile.TRANSFER_OPTIONAL); } } else if ( mUsesLinkType.trim().equalsIgnoreCase("inout")) { mVJobInpFiles.add( mUsesPegasusFile ); mVJobOutFiles.add( mUsesPegasusFile ); } //reset the tracking variables mUsesPegasusFile = null; mUsesLinkType = null; } /** * Invoked when start of the pfn element nested in uses element is encountered * * @param local the local name of the element * @param attrs the map of attributes and values in the element tag */ private void handleUsesPFNTagStart( String local, Attributes attrs ){ String url = attrs.getValue( "", "url" ); String site = attrs.getValue( "", "site" ); //convert the existing PegasusFile object to it's physical //mapping , FileTransfer object /* FileTransfer ft = new FileTransfer( mUsesPegasusFile ); //the linkage type determines whether it is input or source if ( mUsesLinkType.trim().equalsIgnoreCase("input")) { ft.addSource( site, url ); } else{ ft.addDestination( site, url ); } mUsesPegasusFile = ft; */ if ( mUsesLinkType.trim().equalsIgnoreCase("input")) { //create a new replica catalog entry //only for input files. we dont care about output file pfn's ReplicaLocation rl = new ReplicaLocation( ); rl.setLFN( this.mUsesPegasusFile.getLFN() ); ReplicaCatalogEntry rce = new ReplicaCatalogEntry( ); //site = ( site == null || site.length() == 0 )? "unknown" : site; rce.setResourceHandle( site ); rce.setPFN( url ); rl.addPFN( rce ); this.mCallback.cbFile(rl); } } /** * Invoked when the starting of the dax element is retrieved. The * DAG element extends on the job element. * * * @param local the local name of the element * @param attrs the attributes */ private void handleDAGTagStart( String local, Attributes attrs ) { mCurrentJobSubInfo = new DAGJob(); //the job should be tagged type pegasus //the job should always execute on local site //for time being mCurrentJobSubInfo.hints.construct( Hints.EXECUTION_POOL_KEY, "local" ); //also set the executable to be used mCurrentJobSubInfo.hints.construct( Hints.PFN_HINT_KEY, "/opt/condor/bin/condor-dagman" ); //retrieve the extra attribute about the DAX mDAGLFN = attrs.getValue("", "file"); ((DAGJob)mCurrentJobSubInfo).setDAGLFN( mDAGLFN ); //add default name and namespace information mCurrentJobSubInfo.setTransformation( "condor", "dagman", null ); mCurrentJobSubInfo.setDerivation( "condor", "dagman", null ); mCurrentJobSubInfo.level = (attrs.getValue("","level") == null) ? -1: Integer.parseInt(attrs.getValue("","level")); mCurrentJobSubInfo.setLogicalID( attrs.getValue("", "id") ); mCurrentJobSubInfo.vdsNS.construct( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[GridStartFactory.NO_GRIDSTART_INDEX] ); handleJobTagStart( mCurrentJobSubInfo ); mCurrentJobSubInfo.setName( ((DAGJob)mCurrentJobSubInfo).generateName( this.mJobPrefix) ); } /** * Invoked when the starting of the dax element is retrieved. The * DAX element is a extends on the job element. * * * @param local the local name of the element * @param attrs the attributes */ private void handleDAXTagStart( String local, Attributes attrs ) { mCurrentJobSubInfo = new DAXJob(); //the job should be tagged type pegasus mCurrentJobSubInfo.setTypeRecursive(); //the job should always execute on local site //for time being mCurrentJobSubInfo.hints.construct( Hints.EXECUTION_POOL_KEY, "local" ); //also set the executable to be used mCurrentJobSubInfo.hints.construct( Hints.PFN_HINT_KEY, "/tmp/pegasus-plan" ); //retrieve the extra attribute about the DAX mDAXLFN = attrs.getValue("", "file"); ((DAXJob)mCurrentJobSubInfo).setDAXLFN( mDAXLFN ); //add default name and namespace information mCurrentJobSubInfo.setTransformation( "pegasus", "pegasus-plan", Version.instance().toString() ); mCurrentJobSubInfo.setDerivation( "pegasus", "pegasus-plan", Version.instance().toString() ); mCurrentJobSubInfo.level = (attrs.getValue("","level") == null) ? -1: Integer.parseInt(attrs.getValue("","level")); mCurrentJobSubInfo.setLogicalID( attrs.getValue("", "id") ); handleJobTagStart( mCurrentJobSubInfo ); mCurrentJobSubInfo.setName( ((DAXJob)mCurrentJobSubInfo).generateName( this.mJobPrefix) ); } /** * Invoked when the starting of the job element is got. The following * information is retrieved from the tag * * name : name of the job, which is the logical name of the job. * namespace : the namespace with which the transformation corresponding to * the job is associated. * version : the version of the transformation. * * @param local the local name of the element * @param attrs the attributes */ private void handleJobTagStart( String local, Attributes attrs ) { mCurrentJobSubInfo = new Job(); mCurrentJobSubInfo.namespace = attrs.getValue("", "namespace"); mCurrentJobSubInfo.logicalName = attrs.getValue("", "name"); mCurrentJobSubInfo.version = attrs.getValue("", "version"); mCurrentJobSubInfo.dvName = attrs.getValue("", "dv-name"); mCurrentJobSubInfo.dvNamespace = attrs.getValue("","dv-namespace"); mCurrentJobSubInfo.dvVersion = attrs.getValue("","dv-version"); mCurrentJobSubInfo.level = (attrs.getValue("","level") == null) ? -1: Integer.parseInt(attrs.getValue("","level")); mCurrentJobSubInfo.setLogicalID( attrs.getValue("", "id") ); mCurrentJobSubInfo.setRuntime( attrs.getValue("","runtime") ); handleJobTagStart( mCurrentJobSubInfo ); } /** * Invoked when the starting of the job element is got. The following * information is retrieved from the tag * * name : name of the job, which is the logical name of the job. * namespace : the namespace with which the transformation corresponding to * the job is associated. * version : the version of the transformation. * * @param job the Job object */ private void handleJobTagStart( Job job ) { String jobId = job.getLogicalID(); job.condorUniverse = GridGateway.JOB_TYPE.compute.toString(); infoAboutAllFilesRecv = true; mLogger.log( "Parsing job with logical id " + job.getLogicalID(), LogManager.DEBUG_MESSAGE_LEVEL ); //mvJobIds.addElement(jobId); String jobName = job.logicalName; //construct the jobname/primary key for job StringBuffer name = new StringBuffer(); //prepend a job prefix to job if required if( mJobPrefix != null ){ name.append( mJobPrefix ); } //append the name and id recevied from dax name.append( jobName ); name.append( "_" ); name.append( jobId ); job.setName( name.toString() ); } /** * Invoked when the end of the dag tag is reached. * * It removes the dag file referred in the element. */ private void handleDAGTagEnd() { /* //Moved to Transfer Engine String dag = null; //go through all the job input files //and set transfer flag to false for( Iterator it = mVJobInpFiles.iterator(); it.hasNext(); ){ PegasusFile pf = it.next(); if( pf.getLFN().equals( mDAGLFN )){ //retrieve the source url if ( pf instanceof FileTransfer ){ dag = ((FileTransfer)pf).getSourceURL().getValue(); } //at the moment dax files are not staged in. //remove from input set of files it.remove(); } } if( dag == null ){ throw new RuntimeException( "Path to DAG file not specified in DAX for job " + mCurrentJobSubInfo.getLogicalID() ); } ((DAGJob)mCurrentJobSubInfo).setDAGFile( dag ); //set the directory if specified ((DAGJob)mCurrentJobSubInfo).setDirectory( (String)mCurrentJobSubInfo.dagmanVariables.removeKey( Dagman.DIRECTORY_EXTERNAL_KEY )); */ handleJobTagEnd(); } /** * Invoked when the end of the job tag is reached. */ private void handleDAXTagEnd() { /* //Moved to Transfer Engine //determine the dax input file and specify //the path in the argument string for now. String dax = mDAXLFN; for( Iterator it = mVJobInpFiles.iterator(); it.hasNext(); ){ PegasusFile pf = it.next(); if( pf.getLFN().equals( mDAXLFN )){ //retrieve the source url if ( pf instanceof FileTransfer ){ dax = ((FileTransfer)pf).getSourceURL().getValue(); } //at the moment dax files are not staged in. //remove from input set of files it.remove(); } } //add the dax to the argument StringBuffer arguments = new StringBuffer(); arguments.append( mCurrentJobSubInfo.getArguments() ). append( " --dax ").append( dax ); mCurrentJobSubInfo.setArguments( arguments.toString() ); */ handleJobTagEnd(); } /** * Invoked when the end of the job tag is reached. */ private void handleJobTagEnd() { //adding the information about the job to mCurrentJobSubInfo mCurrentJobSubInfo.setInputFiles( mVJobInpFiles ); mCurrentJobSubInfo.setOutputFiles( mVJobOutFiles ); //The job id for the compute jobs //is the name of the job itself. //All the jobs in the DAX are //compute jobs mCurrentJobSubInfo.jobClass = Job.COMPUTE_JOB; mCurrentJobSubInfo.jobID = mCurrentJobSubInfo.jobName; //send the job to the appropriate callback implementing class mCallback.cbJob(mCurrentJobSubInfo); mVJobInpFiles = new HashSet(); mVJobOutFiles = new HashSet(); } /** * Invoked when the starting of the profile element is got. */ private void handleProfileTagStart(String local, Attributes attrs) { mProfileKey = attrs.getValue("key"); mNamespace = attrs.getValue("namespace"); mProfileTag = true; } /** * Invoked when the end of the execution element is reached. */ private void handleExecutionTagEnd() { handleProfileTagEnd(); //check if we an executable path is specified if( this.mCurrentJobSubInfo.hints.containsKey( Hints.PFN_HINT_KEY ) ){ TransformationCatalogEntry entry = this.constructTCEntryFromJobHints(mCurrentJobSubInfo); this.mCallback.cbExecutable( entry ); } } /** * Constructs a TC entry object from the contents of a job. * The architecture assigned to this entry is default ( INTEL32::LINUX ) * and resource id is set to unknown. * * @param job the job object * * @return constructed TransformationCatalogEntry */ private TransformationCatalogEntry constructTCEntryFromJobHints( Job job ){ String executable = (String) job.hints.get( Hints.PFN_HINT_KEY ); TransformationCatalogEntry entry = new TransformationCatalogEntry(); entry.setLogicalTransformation(job.getTXNamespace(), job.getTXName(), job.getTXVersion()); entry.setResourceId( "unknown" ); entry.setVDSSysInfo( new VDSSysInfo( Arch.INTEL64, Os.LINUX, "", "" ) ); entry.setPhysicalTransformation( executable ); //hack to determine whether an executable is //installed or static binary entry.setType( executable.startsWith("/") ? TCType.INSTALLED : TCType.STAGEABLE ); return entry; } /** * Invoked when the end of the profile element is got. * * Here we handle all the namespaces supported by Chimera at present. */ private void handleProfileTagEnd() { mProfileTag = false; //setting the command line option only if textContent > 0 if (mTextContent.length() > 0) { //check if namespace is valid mNamespace = mNamespace.toLowerCase(); if(!Namespace.isNamespaceValid(mNamespace)){ //reset buffer mTextContent.setLength( 0 ); mLogger.log("Namespace specified in the DAX not supported. ignoring "+ mNamespace, LogManager.WARNING_MESSAGE_LEVEL); return; } String value = mTextContent.toString().trim(); switch(mNamespace.charAt(0)){ case 'c'://condor mCurrentJobSubInfo.condorVariables.checkKeyInNS( mProfileKey, value ); break; case 'd'://dagman mCurrentJobSubInfo.dagmanVariables.checkKeyInNS(mProfileKey, value ); break; case 'e'://env mCurrentJobSubInfo.envVariables.checkKeyInNS(mProfileKey, value ); break; case 'g'://globus mCurrentJobSubInfo.globusRSL.checkKeyInNS(mProfileKey, value ); break; case 'h'://hint mCurrentJobSubInfo.hints.checkKeyInNS(mProfileKey, value ); break; case 'p'://pegasus mCurrentJobSubInfo.vdsNS.checkKeyInNS(mProfileKey, value ); break; default: //ignore should not come here ever. mLogger.log("Namespace not supported. ignoring "+ mNamespace, LogManager.WARNING_MESSAGE_LEVEL); break; } //resetting the buffer mTextContent.setLength(0); mProfileKey = ""; mNamespace = ""; } } /** * Invoked when the starting of the child element is got. The child element * gives us the child of an edge of the dag. The edge being parent->child. */ private void handleChildTagStart(String local, Attributes attrs) { mCurrentChildId = ""; mCurrentChildId = attrs.getValue("", "ref"); mParents = new LinkedList(); } /** * This passes the child and it's parents list to the callback object. */ private void handleChildTagEnd(){ //String childName = lookupName(mCurrentChildId); mCallback.cbParents(mCurrentChildId,mParents); } /** * Invoked when the starting of the parent element is got. The child element * gives us the child of an edge of the dag. The edge being parent->child. */ private void handleParentTagStart(String local, Attributes attrs) { //stores the child parent Relation String parentId = attrs.getValue("", "ref"); //looking up the parent name //parentName = lookupName(parentId); // mParents.add(parentId); mParents.add( new PCRelation( parentId, this.mCurrentChildId )); } /** * Invoked when the starting of the Argument Tag is reached. Just set a * boolean variable */ private void handleArgumentTagStart(String local, Attributes attrs) { //setting the boolean variable. mArgumentTag = true; //set the adjacency flag for //adjacent filename to false mAdjFName = false; } /** * Invoked when the end of the Argument Tag is reached. * * The buffers are reset */ private void handleArgumentTagEnd() { mArgumentTag = false; mWholeCommandString = mWholeCommandString.concat(new String( mTextContent)); mWholeCommandString = this.ignoreWhitespace(mWholeCommandString); //adding the commmand string mCurrentJobSubInfo.strargs = new String(mWholeCommandString); //resetting mWholeCommandString mWholeCommandString = ""; //resetting the buffer mTextContent.setLength(0); //System.out.println( "Argument is " + mCurrentJobSubInfo.getArguments() ); } /** * Our own implementation for ignorable whitespace. A String that holds the * contents of data passed as text by the underlying parser. The whitespaces * at the end are replaced by one whitespace. * * @param str The string that contains whitespaces. * * @return String corresponding to the trimmed version. * */ public String ignoreWhitespace(String str){ return ignoreWhitespace( str, mProps.preserveParserLineBreaks() ); } /** * Invoked when the starting of the stdout tag is reached. * Used to specify the stdout of the application by the user. It can be * a file also. */ private void handleStdoutTagStart(String local, Attributes attrs) { mCurrentJobSubInfo.stdOut = attrs.getValue("", "file"); } /** * Invoked when the starting of the stdin tag is reached. * Used to specify the stdout of the application by the user. It can be * a file also. */ private void handleStdinTagStart(String local, Attributes attrs) { mCurrentJobSubInfo.stdIn = attrs.getValue("", "file"); } /** * Invoked when the starting of the stdout tag is reached. * Used to specify the stderr of the application by the user. It can be * a file also. */ private void handleStderrTagStart(String local, Attributes attrs) { mCurrentJobSubInfo.stdErr = attrs.getValue("", "file"); } /** * Overrides the default implementation when the elements end tag comes. * This method is called automatically by the Sax parser when the end tag of * an element comes in the xml file. */ public void endElement(String uri, String localName, String qName) { /*System.out.println("element end tag ---------"); System.out.println("line number "+ locator.getLineNumber()); System.out.println("URI: "+ uri); System.out.println("local name " + localName); System.out.println("qname: "+qName);*/ boolean temp = true; String universe = GridGateway.JOB_TYPE.compute.toString(); //by default jobs are vanilla //when we get the end tag of argument, we change reset the currentCommOpt if (localName.equals("argument")) { // || localName.trim().equalsIgnoreCase("job")){ handleArgumentTagEnd(); } else if (localName.equals( "execution" )) { handleExecutionTagEnd(); } else if (localName.equals("job")) { handleJobTagEnd(); } else if (localName.equals("dax")) { handleDAXTagEnd(); } else if (localName.equals("dag")) { handleDAGTagEnd(); } else if (localName.equals("metadata")) { handleMetadataTagEnd(); } else if (localName.equals("profile")) { handleProfileTagEnd(); } else if (localName.equals("child")) { handleChildTagEnd(); } else if(localName.equals("adag")){ //call the callback interface mCallback.cbDone(); return; } else if( localName.equals( "uses" ) ){ handleUsesTagEnd(); } } /** * Here we have all the elements in our data structure. This is called * automatically when the end of the XML file is reached. */ public void endDocument() { } /** * The main program. The DAXParser2 can be run standalone, by which it just * parses the file and populates the required data objects. * */ public static void main(String args[]) { //System.setProperty("vds.home","/nfs/asd2/vahi/test/chimera/"); //DAXParser2 d = new DAXParser2("sdss.xml","isi",null); //DAXParser2 d = new DAXParser2("sonal.xml",new DAX2CDAG("./sonal.xml")); //DAXParser2 d = new DAXParser2("./testcases/black-diamond/blackdiamond_dax_1.7.xml"); //DAXParser2 d = new DAXParser2("/nfs/asd2/vahi/gurmeet_dax.xml"); /*DagInfo dagInfo = d.getDagInfo(); Vector vSubInfo = d.getSubInfo(); ADag adag = new ADag(dagInfo, vSubInfo); System.out.println(adag); */ } /** * Returns the XML schema namespace that a document being parsed conforms * to. * * @return the schema namespace */ public String getSchemaNamespace( ){ return DAXParser2.SCHEMA_NAMESPACE; } /** * Helps the load database to locate the DAX XML schema, if available. * Please note that the schema location URL in the instance document * is only a hint, and may be overriden by the findings of this method. * * @return a location pointing to a definition document of the XML * schema that can read VDLx. Result may be null, if such a document * is unknown or unspecified. */ public String getSchemaLocation() { // treat URI as File, yes, I know - I need the basename File uri = new File(DAXParser2.SCHEMA_LOCATION); //get the default version with decimal point shifted right float defaultVersion = shiftRight( extractVersionFromSchema( uri.getName() ) ); float schemaVersion = shiftRight( mDaxSchemaVersion ); String child = ( schemaVersion == -1 || schemaVersion > defaultVersion)? //use the default uri.getName(): //use the schema version specified in the dax "dax-" + mDaxSchemaVersion + ".xsd"; // create a pointer to the default local position File dax = new File(this.mProps.getSchemaDir(), child); //System.out.println("\nDefault Location of Dax is " + dax.getAbsolutePath()); // Nota bene: vds.schema.dax may be a networked URI... return this.mProps.getDAXSchemaLocation(dax.getAbsolutePath()); } /** * Determines whether to use a doubleNegative or not. * * @param daxVersion the version of the dax as determined. * * @return boolean */ protected boolean useDoubleNegative( String daxVersion ){ float current = shiftRight( daxVersion ); boolean result = false; //sanity check if( current == -1 ){ //we were unable to parse the dax version //means we assume double negative is turned off return result; } float base = shiftRight( this.DAX_VERSION_WITHOUT_DOUBLE_NEGATIVE ); //we turned off double negative after >= base return base > current; } /** * Returns a float with the decimal point shifted right till the end. * Is necessary for comparing a String "1.10" with a String "1.9". * * @param value the value that has to be shifted right. * * @return the float value, with the decimal point shifted or -1 in case * of error. */ public float shiftRight(String value){ float result = -1; //sanity check in case of null value if( value == null ) return result; value = value.trim(); int i = 0; for( i = 0; i < value.length(); i++){ char c = value.charAt(i); //parse till the first '.' if ( c >= '0' && c <= '9' ) { continue; } else if ( c == '.' ) { i++; break; } else{ //invalid string return result; } } //determine the multiplicative factor int factor = 1; for ( i = i ; i < value.length(); i++, factor *= 10 ){ char c = value.charAt(i); //exit if any of the trailing characters are non digits if ( ! ( c >= '0' && c <= '9') ) return result; } result = Float.parseFloat(value) * factor; return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/DAX2LabelGraph.java0000644000175000017500000001021411757531137026623 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.dax; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.partitioner.graph.Bag; import edu.isi.pegasus.planner.partitioner.graph.LabelBag; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; /** * The callback, that ends up building a label graph. With each GraphNode a label * is associated. The label can be associated via a profile in the VDS namespace * with the jobs in the DAX. The key for the profile can either be set via the * constructor, or a default key is used. * * @author Karan Vahi * @version $Revision: 2590 $ */ public class DAX2LabelGraph extends DAX2Graph { /** * The default key for the VDS namespace profile, that is used in case none * is specified by the user. */ public static final String DEFAULT_LABEL_KEY = "label"; /** * The default label value that is to be applied, in case the profile is * not associated with the job. */ //public static String DEFAULT_LABEL_VALUE = "default"; /** * The profile key that is used for the labelling. */ private String mLabelKey; /** * Sets the default label value that is to be used if the profile is not * associated with the job. * * @param value the value to be associated. */ // public static void setLabelValue(String value){ // DEFAULT_LABEL_VALUE = value; // } /** * The overloaded constructor. * * @param properties the properties passed to the planner. * @param dax the path to the DAX file. */ public DAX2LabelGraph( PegasusProperties properties, String dax ){ super( properties, dax ); mProps = properties; this.setLabelKey( DEFAULT_LABEL_KEY ); } /** * Set the profile key that is to be used to pick up the labels. * Sets the profile key to the value specified. If value passed is * null, then is set to the default label key. * * @param key the VDS profile key that is to be used. * * @see #DEFAULT_LABEL_KEY */ public void setLabelKey( String key ){ mLabelKey = ( key == null )? this.DEFAULT_LABEL_KEY : key; LabelBag.setLabelKey(mLabelKey); } /** * This constructs a graph node for the job and ends up storing it in the * internal map. In addition assigns a label with the node. The label is * is the value of a profile in the VDS namespace. The name of the profile * can * * @param job the job that was parsed. */ public void cbJob(Job job) { mLogger.log( "Adding job to graph " + job.getName() , LogManager.DEBUG_MESSAGE_LEVEL ); GraphNode gn = new GraphNode(job.logicalId,job.logicalName); String label = (String)job.vdsNS.get(mLabelKey); // label = (label == null)? DEFAULT_LABEL_VALUE : label; Bag bag = new LabelBag(); bag.add(mLabelKey,label); gn.setBag(bag); put(job.logicalId,gn); } /** * Callback to signal that traversal of the DAX is complete. At this point a * dummy root node is added to the graph, that is the parents to all the root * nodes in the existing DAX. This method in additions adds the default label * to the root. */ public void cbDone() { super.cbDone(); Bag bag = new LabelBag(); // bag.add(mLabelKey,DEFAULT_LABEL_VALUE); mRoot.setBag(bag); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/DAX2CDAG.java0000644000175000017500000002360711757531137025332 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.dax; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.CompoundTransformation; import edu.isi.pegasus.planner.classes.DagInfo; import edu.isi.pegasus.planner.classes.Notifications; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationStore; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.classes.ReplicaStore; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.dax.Invoke; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Vector; /** * This creates a dag corresponding to one particular partition of the whole * abstract plan. The partition can be as big as the whole abstract graph or can * be as small as a single job. The partitions are determined by the Partitioner. * * @author Karan Vahi * @version $Revision: 4282 $ */ public class DAX2CDAG implements Callback { /** * The DAGInfo object which contains information corresponding to the ADag in * the XML file. */ private DagInfo mDagInfo; /** * Contains Job objects. One per submit file. */ private Vector mVSubInfo; /** * The mapping of the idrefs of a job to the job name. */ private Map mJobMap; /** * The handle to the properties object. */ private PegasusProperties mProps; /** * A flag to specify whether the graph has been generated for the partition * or not. */ private boolean mDone; /** * Handle to the replica store that stores the replica catalog * user specifies in the DAX */ protected ReplicaStore mReplicaStore; /** * Handle to the transformation store that stores the transformation catalog * user specifies in the DAX */ protected TransformationStore mTransformationStore; /** * Map of Compound Transfomations indexed by complete name of the compound * transformation. */ protected Map mCompoundTransformations; /** * All the notifications associated with the adag */ private Notifications mNotifications; /** * The overloaded constructor. * * @param properties the properties passed to the planner. * @param dax the path to the DAX file. */ public DAX2CDAG( PegasusProperties properties, String dax ) { // mDAXPath = dax; mDagInfo = new DagInfo(); mVSubInfo = new Vector(); mJobMap = new HashMap(); mProps = properties; mDone = false; this.mReplicaStore = new ReplicaStore(); this.mTransformationStore = new TransformationStore(); this.mCompoundTransformations = new HashMap(); this.mNotifications = new Notifications(); } /** * Callback when the opening tag was parsed. This contains all * attributes and their raw values within a map. It ends up storing * the attributes with the adag element in the internal memory structure. * * @param attributes is a map of attribute key to attribute value */ public void cbDocument(Map attributes) { mDagInfo.setDAXVersion( (String)attributes.get( "version" )); mDagInfo.count = (String)attributes.get("count"); mDagInfo.index = (String)attributes.get("index"); mDagInfo.setLabel( (String)attributes.get("name") ); } /** * Callback when a invoke entry is encountered in the top level inside the adag element in DAX. * * @param invoke the invoke object */ public void cbWfInvoke(Invoke invoke){ //System.out.println( "[DEBUG] WF Invoke " + invoke ); this.mNotifications.add(invoke); } /** * Callback for the job from section 2 jobs. These jobs are completely * assembled, but each is passed separately. * * @param job the Job object storing the job information * gotten from parser. */ public void cbJob(Job job) { mJobMap.put(job.logicalId,job.jobName); mVSubInfo.add(job); mDagInfo.addNewJob( job ); //check for compound executables if( this.mCompoundTransformations.containsKey( job.getCompleteTCName() ) ){ CompoundTransformation ct = this.mCompoundTransformations.get( job.getCompleteTCName() ); //add all the dependant executables and data files for( PegasusFile pf : ct.getDependantFiles() ){ job.addInputFile( pf ); String lfn = pf.getLFN(); mDagInfo.updateLFNMap(lfn,"i"); } job.addNotifications( ct.getNotifications()); } //put the input files in the map for ( Iterator it = job.inputFiles.iterator(); it.hasNext(); ){ PegasusFile pf = (PegasusFile)it.next(); String lfn = pf.getLFN(); mDagInfo.updateLFNMap(lfn,"i"); } for ( Iterator it = job.outputFiles.iterator(); it.hasNext(); ){ PegasusFile pf = (PegasusFile)it.next(); String lfn = ( pf ).getLFN(); //if the output LFN is also an input LFN of the same //job then it is a pass through LFN. Should be tagged //as i only, as we want it staged in if( job.inputFiles.contains( pf ) ){ //dont add to lfn map in DagInfo continue; } mDagInfo.updateLFNMap(lfn,"o"); } } /** * Callback for child and parentID relationships from section 3. * * @param child is the IDREF of the child element. * @param parents is a list of IDREFs of the included parents. */ public void cbParents(String child, List parents) { String childID = (String)mJobMap.get(child); String parentID; //System.out.println( child + " -> " + parents ); for ( PCRelation pc : parents ){ parentID = (String)mJobMap.get( pc.getParent() ); if(parentID == null){ //this actually means dax is generated wrong. //probably some one tinkered with it by hand. throw new RuntimeException( "Cannot find parent for job " + childID ); } PCRelation relation = new PCRelation( parentID, childID ); relation.setAbstractChildID( child ); relation.setAbstractParentID( pc.getParent() ); mDagInfo.addNewRelation( relation ); } } /** * Callback when the parsing of the document is done. It sets the flag * that the parsing has been done, that is used to determine whether the * ADag object has been fully generated or not. */ public void cbDone() { mDone = true; } /** * Returns an ADag object corresponding to the abstract plan it has generated. * It throws a runtime exception if the method is called before the object * has been created fully. * * @return ADag object containing the abstract plan referred in the dax. */ public Object getConstructedObject(){ if(!mDone) throw new RuntimeException("Method called before the abstract dag " + " for the partition was fully generated"); ADag dag = new ADag(mDagInfo,mVSubInfo); dag.setReplicaStore(mReplicaStore); dag.setTransformationStore(mTransformationStore); dag.addNotifications(mNotifications); return dag; } /** * Callback when a compound transformation is encountered in the DAX * * @param compoundTransformation the compound transforamtion */ public void cbCompoundTransformation( CompoundTransformation compoundTransformation ){ this.mCompoundTransformations.put( compoundTransformation.getCompleteName(), compoundTransformation ); if(!compoundTransformation.getNotifications().isEmpty()){ System.out.println( "[DEBUG] Compound Transformation Invoke " + compoundTransformation.getCompleteName() + " " +compoundTransformation.getNotifications() ); } } /** * Callback when a replica catalog entry is encountered in the DAX * * @param rl the ReplicaLocation object */ public void cbFile( ReplicaLocation rl ){ //System.out.println( "File Locations passed are " + rl ); //we only add to replica store if there is a PFN specified if( rl.getPFNCount() > 0 ){ this.mReplicaStore.add( rl ); } } /** * Callback when a transformation catalog entry is encountered in the DAX * * @param tce the transformationc catalog entry object. */ public void cbExecutable( TransformationCatalogEntry tce ){ this.mTransformationStore.addEntry( tce ); if( !tce.getNotifications().isEmpty() ){ System.out.println( "[DEBUG] Executable Invoke " + tce.getLogicalTransformation() + " " + tce.getNotifications() ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/DAX2Graph.java0000644000175000017500000002076211757531137025674 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.dax; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.classes.CompoundTransformation; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.dax.Invoke; import edu.isi.pegasus.common.logging.LogManager; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; /** * This callback implementation ends up building a detailed structure of the * graph referred to by the abstract plan in dax, that should make the graph * traversals easier. Later on this graph representation would be used * uniformly in the Pegasus code base. * * @author Karan Vahi * @version $Revision: 3669 $ */ public class DAX2Graph implements Callback { /** * The id of the dummy root node added on the top of the graph. Makes * easier the starting of the traversal. */ public static final String DUMMY_NODE_ID = "dummy"; /** * The map containing the a graph node for each of the jobs referred to in * the dax. The key is the logical id of the job. */ protected Map mAbstractGraph; /** * A flag to specify whether the graph has been generated for the partition * or not. */ protected boolean mDone; /** * The label of the abstract dax as set by Chimera. */ protected String mLabel; /** * The root node for the graph that is constructed. */ protected GraphNode mRoot; /** * The handle to the properties object. */ protected PegasusProperties mProps; /** * The logging object. */ protected LogManager mLogger; /** * The overloaded constructor. * * @param properties the properties passed to the planner. * @param dax the path to the DAX file. */ public DAX2Graph(PegasusProperties properties, String dax){ mProps = properties; mAbstractGraph = new java.util.HashMap(); mLogger = LogManagerFactory.loadSingletonInstance( properties ); mDone = false; mLabel = null; mRoot = null; } /** * Returns a Map indexed by the logical ID of the jobs, and each value being * a GraphNode object. * * @return ADag object containing the abstract plan referred in the dax. */ public Object getConstructedObject() { if(!mDone) throw new RuntimeException("Method called before the abstract dag " + " for the partition was fully generated"); return mAbstractGraph; } /** * Callback when the opening tag was parsed. This contains all * attributes and their raw values within a map. It ends up storing * the attributes with the adag element in the internal memory structure. * * @param attributes is a map of attribute key to attribute value */ public void cbDocument(Map attributes) { /**@todo Implement this org.griphyn.cPlanner.parser.Callback method*/ if( attributes == null || (mLabel = (String)attributes.get("name")) == null){ mLabel = "test"; } } /** * Callback when a invoke entry is encountered in the top level inside the adag element in the DAX. * * @param invoke the invoke object */ public void cbWfInvoke(Invoke invoke){ throw new UnsupportedOperationException("Not supported yet."); } /** * This constructs a graph node for the job and ends up storing it in the * internal map. * * @param job the job that was parsed. */ public void cbJob( Job job ) { GraphNode gn = new GraphNode( job.getLogicalID(), job.getTXName() ); mLogger.log( "Adding job to graph " + job.getName() , LogManager.DEBUG_MESSAGE_LEVEL ); put( job.logicalId, gn ); } /** * This updates the internal graph nodes of child with references to it's * parents referred to by the list of parents passed. It gets the handle * to the parents graph nodes from it's internal map. * * @param child the logical id of the child node. * @param parents list containing the logical id's of the parents of the * child nodes. */ public void cbParents(String child, List parents) { GraphNode childNode = (GraphNode)get(child); Iterator it = parents.iterator(); String parentId; ArrayList parentList = new ArrayList(parents.size()); mLogger.log( "Adding parents for child " + child, LogManager.DEBUG_MESSAGE_LEVEL ); //construct the references to the parent nodes while(it.hasNext()){ parentId = (String)it.next(); GraphNode parentNode = (GraphNode)get(parentId); parentList.add(parentNode); //add the child to the parent's child list parentNode.addChild(childNode); } childNode.setParents(parentList); } /** * Returns the name of the dax. * * @return name of dax */ public String getNameOfDAX(){ return mLabel; } /** * Callback to signal that traversal of the DAX is complete. At this point a * dummy root node is added to the graph, that is the parents to all the root * nodes in the existing DAX. */ public void cbDone() { //the abstract graph is fully generated mDone = true; //just print out the graph that is generated internally. //find the root nodes from where to start the breadth first //search Iterator it = mAbstractGraph.entrySet().iterator(); List rootNodes = new LinkedList(); while(it.hasNext()){ GraphNode gn = (GraphNode)((java.util.Map.Entry)it.next()).getValue(); if(gn.getParents() == null || gn.getParents().isEmpty()){ rootNodes.add(gn); } //System.out.println(gn); } //add a dummy node that is a root to all these nodes. String rootId = this.DUMMY_NODE_ID; mRoot = new GraphNode(rootId,rootId); mRoot.setChildren(rootNodes); put(rootId,mRoot); //System.out.println(dummyNode); } /** * It puts the key and the value in the internal map. * * @param key the key to the entry in the map. * @param value the entry in the map. */ protected void put(Object key, Object value){ mAbstractGraph.put(key,value); } /** * It returns the value associated with the key in the map. * * @param key the key to the entry in the map. * * @return the object */ public Object get(Object key){ return mAbstractGraph.get(key); } /** * Callback when a compound transformation is encountered in the DAX * * @param compoundTransformation the compound transforamtion */ public void cbCompoundTransformation( CompoundTransformation compoundTransformation ){ throw new UnsupportedOperationException("Not supported yet."); } /** * Callback when a replica catalog entry is encountered in the DAX * * @param rl the ReplicaLocation object */ public void cbFile( ReplicaLocation rl ){ throw new UnsupportedOperationException("Not supported yet."); } /** * Callback when a transformation catalog entry is encountered in the DAX * * @param tce the transformation catalog entry object. */ public void cbExecutable( TransformationCatalogEntry tce ){ throw new UnsupportedOperationException("Not supported yet."); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/Callback.java0000644000175000017500000001077111757531137025707 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.dax; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.classes.CompoundTransformation; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.dax.Invoke; /** * This interfaces defines the callback calls from DAX parsing. A slim * and memory-efficient parser of DAX is expected to implement these * callbacks, and generate its own information on the fly. * * @author Karan Vahi * @author Jens-S. Vöckler * @version $Revision: 3668 $ */ public interface Callback { /** * The version of the Callback api */ public static final String VERSION = "1.3"; /** * Return a object that is constructed during the parsing of the object. * The type of the object that is constructed is determined by the * implementing callback handler. For example, it could be an Adag object * used by Pegasus or a map containing the graph structure of the dax. * The implementing classes should keep a boolean flag that signifies whether * the corresponding object has been created by the implementing class or * not. The variable should be set when the implementing callback handler * deems that it has enough data to construct that object. * * @return constructed object */ public Object getConstructedObject(); /** * Callback when the opening tag was parsed. This contains all * attributes and their raw values within a map. This callback can * also be used to initialize callback-specific resources. * * @param attributes is a map of attribute key to attribute value */ public void cbDocument(java.util.Map attributes); /** * Callback when a invoke is encountered in the DAX from the top level inside * adag tag. * * @param invoke the invoke object */ public void cbWfInvoke(Invoke invoke); /** * Callback when a replica catalog entry is encountered in the DAX from * Section 1: Files that lists entries in a Replica Catalog * * @param rl the ReplicaLocation object */ public void cbFile( ReplicaLocation rl ); /** * Callback when a transformation catalog entry is encountered in the DAX * from Section 2: Executables that list entries in a Transformaton Catalog * * @param tce the transformationc catalog entry object. */ public void cbExecutable( TransformationCatalogEntry tce ); /** * Callback when a compound transformation is encountered in the DAX from * Section 3: that lists Transformations that Aggregate executables and Files * * @param compoundTransformation the compound transforamtion */ public void cbCompoundTransformation( CompoundTransformation compoundTransformation ); /** * Callback for the job from section 4: Job's, DAX's or Dag's that list * a JOB or DAX or DAG . These jobs are completely * assembled, but each is passed separately. * * @param job is the DAX-style job. */ public void cbJob(Job job); /** * Callback for child and parent relationships from Section 5: Dependencies * that lists Parent Child relationships (can be empty) * * @param child is the IDREF of the child element. * @param parents is a list of edjes denoted by PCRelation object. */ public void cbParents(String child, java.util.List parents); /** * Callback when the parsing of the document is done. While this state * could also be determined from the return of the invocation of the * parser, that return may be hidden in another place of the code. * This callback can be used to free callback-specific resources. */ public void cbDone(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/DAXParser3.java0000644000175000017500000014564611757531137026101 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.parser.dax; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import org.xml.sax.SAXException; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.CondorVersion; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.impl.Abstract; import edu.isi.pegasus.planner.classes.CompoundTransformation; import edu.isi.pegasus.planner.classes.DAGJob; import edu.isi.pegasus.planner.classes.DAXJob; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.Notifications; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.classes.PegasusFile.LINKAGE; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.dax.Executable; import edu.isi.pegasus.planner.dax.Invoke; import edu.isi.pegasus.planner.dax.MetaData; import edu.isi.pegasus.planner.dax.PFN; import edu.isi.pegasus.planner.dax.Executable.ARCH; import edu.isi.pegasus.planner.dax.Executable.OS; import edu.isi.pegasus.planner.dax.Invoke.WHEN; import edu.isi.pegasus.planner.namespace.Hints; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.parser.StackBasedXMLParser; /** * This class uses the Xerces SAX2 parser to validate and parse an XML * document conforming to the DAX Schema 3.2 * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4507 $ */ public class DAXParser3 extends StackBasedXMLParser implements DAXParser { /** * The "not-so-official" location URL of the Site Catalog Schema. */ public static final String SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/dax-3.3.xsd"; /** * uri namespace */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/DAX"; /** * Constant denoting an undefined site */ public static final String UNDEFINED_SITE = "undefined"; /* * Predefined Constant for dax version 3.2.0 */ public static final long DAX_VERSION_3_2_0 = CondorVersion.numericValue( "3.2.0" ); /* * Predefined Constant for dax version 3.2.0 */ public static final long DAX_VERSION_3_3_0 = CondorVersion.numericValue( "3.3.0" ); /** * Constant denoting default metadata type */ private String DEFAULT_METADATA_TYPE = "String"; /** * List of parents for a child node in the graph */ protected List mParents; /** * Handle to the callback */ protected Callback mCallback; /** * A job prefix specifed at command line. */ protected String mJobPrefix; /** * The overloaded constructor. * * @param properties the PegasusProperties to be used. */ public DAXParser3( PegasusBag bag ) { super( bag ); mJobPrefix = ( bag.getPlannerOptions() == null ) ? null: bag.getPlannerOptions().getJobnamePrefix(); } /** * Set the DAXCallback for the parser to call out to. * * @param c the callback */ public void setDAXCallback( Callback c ){ this.mCallback = c; } /** * Retuns the DAXCallback for the parser * * @return the callback */ public Callback getDAXCallback( ){ return this.mCallback; } /** * The main method that starts the parsing. * * @param file the XML file to be parsed. */ public void startParser( String file ) { try { this.testForFile( file ); mParser.parse( file ); //sanity check if ( mDepth != 0 ){ throw new RuntimeException( "Invalid stack depth at end of parsing " + mDepth ); } } catch ( IOException ioe ) { mLogger.log( "IO Error :" + ioe.getMessage(), LogManager.ERROR_MESSAGE_LEVEL ); } catch ( SAXException se ) { if ( mLocator != null ) { mLogger.log( "Error in " + mLocator.getSystemId() + " at line " + mLocator.getLineNumber() + " at column " + mLocator.getColumnNumber() + " :" + se.getMessage() , LogManager.ERROR_MESSAGE_LEVEL); } } } /** * Returns the XML schema namespace that a document being parsed conforms * to. * * @return the schema namespace */ public String getSchemaNamespace( ){ return DAXParser3.SCHEMA_NAMESPACE; } /** * Returns the local path to the XML schema against which to validate. * * @return path to the schema */ public String getSchemaLocation() { // treat URI as File, yes, I know - I need the basename File uri = new File( DAXParser3.SCHEMA_LOCATION ); // create a pointer to the default local position File dax = new File( this.mProps.getSchemaDir(), uri.getName() ); return this.mProps.getDAXSchemaLocation( dax.getAbsolutePath() ); } /** * Composes the SiteData object corresponding to the element * name in the XML document. * * @param element the element name encountered while parsing. * @param names is a list of attribute names, as strings. * @param values is a list of attribute values, to match the key list. * * @return the relevant SiteData object, else null if unable to construct. * * @exception IllegalArgumentException if the element name is too short. */ public Object createObject( String element, List names, List values ){ if ( element == null || element.length() < 1 ){ throw new IllegalArgumentException("illegal element length"); } switch ( element.charAt(0) ) { // a adag argument case 'a': if ( element.equals( "adag" ) ) { //for now the adag element is just a map of //key value pair Map m = new HashMap(); for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); m.put( name, value ); } sanityCheckOnVersion( m.get( "version" ) ); //put the call to the callback this.mCallback.cbDocument( m ); return m; }//end of element adag else if( element.equals( "argument" ) ){ return new Arguments(); } return null; //c child compound case 'c': if( element.equals( "child") ){ this.mParents = new LinkedList(); PCRelation pc = new PCRelation(); String child = null; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "ref" ) ) { child = value; } } if( child == null ){ this.complain( element, "child", child ); return null; } pc.setChild( child ); return pc; } else if ( element.equals( "compound") ){ } return null; //d dag dax case 'd': if( element.equals( "dag" ) || element.equals( "dax" ) ){ Job j = new Job( ); //all jobs in the DAX are of type compute j.setUniverse( GridGateway.JOB_TYPE.compute.toString() ); j.setJobType( Job.COMPUTE_JOB ); String file = null; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "namespace" ) ) { j.setTXNamespace( value ); } else if( name.equals( "name" ) ){ j.setTXName( value ); } else if( name.equals( "version" ) ){ j.setTXVersion( value ); } else if( name.equals( "id" ) ){ j.setLogicalID( value ); } else if( name.equals( "file" ) ){ file = value; } else if( name.equals( "node-label" ) ){ this.attributeNotSupported( element, name, value ); } else { this.complain( element, name, value ); } } if( file == null ){ this.complain( element, "file", file ); return null; } PegasusFile pf = new PegasusFile( file ); pf.setLinkage( LINKAGE.INPUT ); if( element.equals( "dag" ) ){ DAGJob dagJob = new DAGJob( j ); //we dont want notifications to be inherited dagJob.resetNotifications(); dagJob.setDAGLFN( file ); dagJob.addInputFile( pf ); //the job should always execute on local site //for time being dagJob.hints.construct(Hints.EXECUTION_POOL_KEY, "local"); //also set the executable to be used dagJob.hints.construct(Hints.PFN_HINT_KEY, "/opt/condor/bin/condor-dagman"); //add default name and namespace information dagJob.setTransformation("condor", "dagman", null); dagJob.setDerivation("condor", "dagman", null); dagJob.level = -1; //dagman jobs are always launched without a gridstart dagJob.vdsNS.construct(Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[GridStartFactory.NO_GRIDSTART_INDEX]); //set the internal primary id for job //dagJob.setName( constructJobID( dagJob ) ); dagJob.setName( dagJob.generateName( this.mJobPrefix) ); return dagJob; } else if (element.equals( "dax" ) ){ DAXJob daxJob = new DAXJob( j ); //we dont want notifications to be inherited daxJob.resetNotifications(); //the job should be tagged type pegasus daxJob.setTypeRecursive(); //the job should always execute on local site //for time being daxJob.hints.construct( Hints.EXECUTION_POOL_KEY, "local" ); //also set a fake executable to be used daxJob.hints.construct( Hints.PFN_HINT_KEY, "/tmp/pegasus-plan" ); //retrieve the extra attribute about the DAX daxJob.setDAXLFN( file ); daxJob.addInputFile( pf ); //add default name and namespace information daxJob.setTransformation( "pegasus", "pegasus-plan", Version.instance().toString() ); daxJob.setDerivation( "pegasus", "pegasus-plan", Version.instance().toString() ); daxJob.level = -1; //set the internal primary id for job //daxJob.setName( constructJobID( daxJob ) ); daxJob.setName( daxJob.generateName( this.mJobPrefix) ); return daxJob; } }//end of element job return null;//end of j //e executable case 'e': if( element.equals( "executable" ) ){ String namespace = null; String execName = null; String version = null; ARCH arch = null; OS os = null; String os_release = null; String os_version = null; String os_glibc = null; Boolean os_installed = true; // Default is installed for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "namespace" ) ) { namespace = value; } else if( name.equals( "name" ) ){ execName = value; } else if( name.equals( "version" ) ){ version = value; } else if( name.equals( "arch" ) ){ arch = Executable.ARCH.valueOf( value.toLowerCase() ); } else if( name.equals( "os" ) ){ os = Executable.OS.valueOf( value.toLowerCase() ); } else if( name.equals( "osrelease" ) ){ os_release = value; } else if( name.equals( "osversion" ) ){ os_version =value; } else if( name.equals( "glibc" ) ){ os_glibc = value; } else if( name.equals( "installed" ) ){ os_installed = Boolean.parseBoolean( value ); } } Executable executable = new Executable( namespace , execName ,version); executable.setArchitecture(arch); executable.setOS(os); executable.setOSRelease(os_release); executable.setOSVersion(os_version); executable.setGlibc(os_glibc); executable.setInstalled(os_installed); return executable; }//end of element executable return null; //end of e //f file case 'f': if( element.equals( "file" ) ){ //create a FileTransfer Object or shd it be ReplicaLocations? //FileTransfer ft = new FileTransfer(); ReplicaLocation rl = new ReplicaLocation(); for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "name" ) ) { //ft.setLFN( value ); rl.setLFN( value ); } else if( name.equals( "link" ) ){ //ignore dont need to do anything } else if( name.equals( "optional" ) ){ Boolean optional = Boolean.parseBoolean( value ); if( optional ){ //replica location object does not handle //optional attribute right now. // ft.setFileOptional(); } } else { this.complain( element, name, value ); } } return rl; }//end of element file return null; //end of f //i invoke case 'i': if( element.equals( "invoke" ) ){ String when = null; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "when" ) ) { when = value; this.log( element, name, value ); } else { this.complain( element, name, value ); } } if( when == null ){ this.complain( element, "when", when ); return null; } return new Invoke( WHEN.valueOf( when ) ); }//end of element invoke return null; //j job case 'j': if( element.equals( "job" ) ){ Job j = new Job( ); //all jobs in the DAX are of type compute j.setUniverse( GridGateway.JOB_TYPE.compute.toString() ); j.setJobType( Job.COMPUTE_JOB ); for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "namespace" ) ) { j.setTXNamespace( value ); } else if( name.equals( "name" ) ){ j.setTXName( value ); } else if( name.equals( "version" ) ){ j.setTXVersion( value ); } else if( name.equals( "id" ) ){ j.setLogicalID( value ); } else if( name.equals( "node-label" ) ){ this.attributeNotSupported( element, name, value ); } else { this.complain( element, name, value ); } } //set the internal primary id for job j.setName( constructJobID( j ) ); return j; }//end of element job return null;//end of j //m metadata case 'm': if( element.equals( "metadata" ) ){ String key = null; String type = DEFAULT_METADATA_TYPE; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "key" ) ) { key = value; this.log( element, name, value ); } else if ( name.equals( "type" ) ) { type = value; this.log( element, name, value ); } else { this.complain( element, name, value ); } } if( key == null ){ this.complain( element, "key", key ); } MetaData md = new MetaData( key, type ); return md; }//end of element metadata return null;//end of case m //p parent profile pfn case 'p': if( element.equals( "parent" ) ){ String parent = null; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "ref" ) ) { parent = value; } else if( name.equals( "edge-label" ) ){ this.attributeNotSupported( "parent", "edge-label", value); } else { this.complain( element, name, value ); } } if( parent == null ){ this.complain( element, "parent", parent ); return null; } return parent; } else if( element.equals( "profile" ) ){ Profile p = new Profile(); for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "namespace" ) ) { p.setProfileNamespace( value.toLowerCase() ); this.log( element, name, value ); } else if ( name.equals( "key" ) ) { p.setProfileKey( value ); this.log( element, name, value ); } else { this.complain( element, name, value ); } } return p; }//end of element profile else if( element.equals( "pfn" ) ){ String url = null; String site = UNDEFINED_SITE; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "url" ) ) { url = value; this.log( element, name, value ); } else if ( name.equals( "site" ) ) { site = value; this.log( element, name, value ); } else { this.complain( element, name, value ); } } if( url == null ){ this.complain( element, "url", url ); return null; } PFN pfn = new PFN( url, site ); return pfn; }//end of element pfn return null;//end of case p //s stdin stdout stderr case 's': if( element.equals( "stdin" ) || element.equals( "stdout" ) || element.equals( "stderr") ){ //we use DAX API File object for this String fileName = null; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "name" ) ) { fileName = value; this.log( element, name, value ); } else if ( name.equals( "link" ) ) { //we ignore as linkage is fixed for stdout|stderr|stdin this.log( element, name, value ); } else { this.complain( element, name, value ); } } if( fileName == null ){ this.complain( element, "name", fileName ); return null; } return new edu.isi.pegasus.planner.dax.File( fileName ); }//end of stdin|stdout|stderr return null;//end of case s //t transformation case 't': if( element.equals( "transformation" ) ){ String namespace = null,lname = null, version = null; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); if ( name.equals( "namespace" ) ) { namespace = value; } else if( name.equals( "name" ) ){ lname = value; } else if( name.equals( "version" ) ){ version = value; } } return new CompoundTransformation( namespace, lname, version ); } return null; //u uses case 'u': if( element.equals( "uses" ) ){ PegasusFile pf = new PegasusFile( ); String fName = null; String fNamespace = null; String fVersion = null; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get( i ); String value = (String) values.get( i ); /* * Name Type Use Default Fixed name xs:string required link LinkageType optional optional xs:boolean optional false register xs:boolean optional true transfer TransferType optional true namespace xs:string optional version VersionPattern optional exectuable xs:boolean optional false */ if ( name.equals( "name" ) ) { pf.setLFN( value ); fName = value; this.log( element, name, value ); } else if ( name.equals( "link" ) ) { pf.setLinkage( PegasusFile.LINKAGE.valueOf( value.toUpperCase() ) ); this.log( element, name, value ); } else if ( name.equals( "optional" ) ) { Boolean bValue = Boolean.parseBoolean( value ); if( bValue ){ pf.setFileOptional(); } this.log( element, name, value ); } else if( name.equals( "register") ){ Boolean bValue = Boolean.parseBoolean( value ); pf.setRegisterFlag( bValue ); } else if ( name.equals( "transfer" ) ) { pf.setTransferFlag( value ); this.log( element, name, value ); } else if ( name.equals( "namespace" ) ) { fNamespace = value; this.log( element, name, value ); } else if ( name.equals( "version" ) ) { fVersion = value; this.log( element, name, value ); } else if ( name.equals( "executable" ) ) { Boolean bValue = Boolean.parseBoolean( value ); if( bValue ){ pf.setType( PegasusFile.EXECUTABLE_FILE ); } this.log( element, name, value ); } else if ( name.equals( "size" ) ) { pf.setSize( value ); this.log( element, name, value ); } else { this.complain( element, name, value ); } } //if executable then update lfn to combo of namespace,name,version if( pf.getType() == PegasusFile.EXECUTABLE_FILE ){ pf.setLFN( Separator.combine(fNamespace, fName, fVersion) ); } return pf; }//end of uses return null;//end of case u default: return null; }//end of switch statement } /** * This method sets the relations between the currently finished XML * element(child) and its containing element in terms of Java objects. * Usually it involves adding the object to the parent's child object * list. * * @param childElement name is the the child element name * @param parent is a reference to the parent's Java object * @param child is the completed child object to connect to the parent * * @return true if the element was added successfully, false, if the * child does not match into the parent. */ public boolean setElementRelation( String childElement, Object parent, Object child ){ switch ( childElement.charAt( 0 ) ) { //a argument adag case 'a': if( child instanceof Arguments ){ Arguments a = (Arguments)child; a.addArgument( mTextContent.toString() ); if( parent instanceof Job ){ //argument appears in job element Job j = (Job)parent; j.setArguments( a.toString() ); return true; } } else if( child instanceof Map && parent == null){ //end of parsing reached mLogger.log( "End of last element reached ", LogManager.DEBUG_MESSAGE_LEVEL ); this.mCallback.cbDone(); return true; } return false; //c child case 'c': if( parent instanceof Map ){ if( child instanceof PCRelation ){ PCRelation pc = (PCRelation)child; //call the callback this.mCallback.cbParents( pc.getChild(), mParents); return true; } } return false; //d dax dag case 'd': if( parent instanceof Map ){ if( child instanceof DAGJob ){ //dag appears in adag element DAGJob dagJob = ( DAGJob )child; //call the callback function this.mCallback.cbJob(dagJob); return true; } else if( child instanceof DAXJob ){ //dag appears in adag element DAXJob daxJob = ( DAXJob )child; //call the callback function this.mCallback.cbJob( daxJob ); return true; } } return false; //f file case 'f': if( child instanceof ReplicaLocation ){ ReplicaLocation rl = ( ReplicaLocation )child; if( parent instanceof Map ){ //file appears in adag element // this.mReplicaStore.add( rl ); this.mCallback.cbFile( rl ); return true; } else if( parent instanceof Arguments ){ //file appears in the argument element Arguments a = (Arguments)parent; a.addArgument( mTextContent.toString() ); a.addArgument( rl ); return true; } } return false; //e executable case 'e': if( child instanceof Executable ){ if( parent instanceof Map ){ //executable appears in adag element Executable exec = (Executable)child; List tceList = convertExecutableToTCE(exec); for(TransformationCatalogEntry tce : tceList){ this.mCallback.cbExecutable(Abstract.modifyForFileURLS(tce)); } //moved the callback call to end of pfn //each new pfn is a new transformation //catalog entry //this.mCallback.cbExecutable( tce ); return true; } } return false; //i invoke case 'i': if( child instanceof Invoke ){ Invoke i = (Invoke)child; i.setWhat( mTextContent.toString().trim() ); if( parent instanceof Map ){ this.mCallback.cbWfInvoke(i); return true; } else if(parent instanceof DAXJob ){ //invoke appears in dax element DAXJob daxJob = (DAXJob)parent; daxJob.addNotification(i); return true; }else if(parent instanceof DAGJob ){ //invoke appears in dag element DAGJob dagJob = (DAGJob)parent; dagJob.addNotification(i); return true; }else if( parent instanceof Job ){ //invoke appears in job element Job job = (Job)parent; job.addNotification(i); return true; }else if(parent instanceof Executable ){ //invoke appears in executable element Executable exec = (Executable)parent; exec.addInvoke(i); return true; }else if(parent instanceof CompoundTransformation ){ //invoke appears in transformation element CompoundTransformation ct = (CompoundTransformation)parent; ct.addNotification(i); return true; } } return false; //j job case 'j': if( child instanceof Job && parent instanceof Map ){ //callback for Job this.mCallback.cbJob( (Job)child ); return true; } return false; //m metadata case 'm': if ( child instanceof MetaData ) { MetaData md = ( MetaData )child; md.setValue( mTextContent.toString().trim() ); //metadata appears in file element if( parent instanceof ReplicaLocation ){ unSupportedNestingOfElements( "file", "metadata" ); return true; } //metadata appears in executable element if( parent instanceof Executable ){ unSupportedNestingOfElements( "executable", "metadata" ); return true; } } return false; //p parent profile pfn case 'p': if( parent instanceof PCRelation ){ if( child instanceof String ){ //parent appears in child element String parentNode = ( String )child; PCRelation pc = (PCRelation) (( PCRelation )parent).clone(); pc.setParent( parentNode ); mParents.add( pc ); return true; } } else if ( child instanceof Profile ){ Profile p = ( Profile ) child; p.setProfileValue( mTextContent.toString().trim() ); mLogger.log( "Set Profile Value to " + p.getProfileValue(), LogManager.TRACE_MESSAGE_LEVEL ); if ( parent instanceof ReplicaLocation ) { //profile appears in file element unSupportedNestingOfElements( "file", "profile" ); return true; } else if ( parent instanceof Executable ) { //profile appears in executable element Executable exec = ( Executable)parent; exec.addProfiles(new edu.isi.pegasus.planner.dax.Profile(p.getProfileNamespace(),p.getProfileKey(),p.getProfileValue())); return true; } else if ( parent instanceof Job ){ //profile appears in the job element Job j = (Job)parent; j.addProfile( p ); return true; } } else if( child instanceof PFN ){ if ( parent instanceof ReplicaLocation ) { //pfn appears in file element ReplicaLocation rl = ( ReplicaLocation )parent; PFN pfn = ( PFN )child; rl.addPFN( pfn ); return true; } else if ( parent instanceof Executable){ //pfn appears in executable element Executable executable = (Executable)parent; PFN pfn = ( PFN )child; //tce.setResourceId( pfn.getSite() ); //tce.setPhysicalTransformation( pfn.getURL() ); executable.addPhysicalFile(pfn); //convert file url appropriately for installed executables //before returning //this.mCallback.cbExecutable( Abstract.modifyForFileURLS(tce) ); return true; } } return false; //s stdin stdout stderr case 's': if( parent instanceof Job ){ Job j = ( Job )parent; if( child instanceof edu.isi.pegasus.planner.dax.File ){ //stdin stdout stderr appear in job element edu.isi.pegasus.planner.dax.File f = ( edu.isi.pegasus.planner.dax.File )child; if( childElement.equals( "stdin" ) ){ j.setStdIn( f.getName() ); return true; } else if( childElement.equals( "stdout" ) ){ j.setStdOut( f.getName() ); return true; } if( childElement.equals( "stderr" ) ){ j.setStdErr( f.getName() ); return true; } } } return false; //t transformation case 't': if( parent instanceof Map ){ if( child instanceof CompoundTransformation ){ this.mCallback.cbCompoundTransformation( (CompoundTransformation)child ); return true; } return true; } return false; //u uses case 'u': if( child instanceof PegasusFile ){ PegasusFile pf = ( PegasusFile )child; if( parent instanceof Job ){ //uses appears in job Job j = ( Job )parent; if( pf.getLinkage().equals( LINKAGE.INPUT ) ){ j.addInputFile(pf); } else if( pf.getLinkage().equals( LINKAGE.OUTPUT ) ){ j.addOutputFile(pf); } else if( pf.getLinkage().equals( LINKAGE.INOUT ) ){ j.addInputFile(pf); j.addOutputFile(pf); } return true; } else if( parent instanceof CompoundTransformation ){ CompoundTransformation compound = (CompoundTransformation)parent; compound.addDependantFile( pf ); return true; } } return false; //default case default: return false; } } /** * Converts the executable into transformation catalog entries * @param executable executable object * @return transformation catalog entries */ public List convertExecutableToTCE(Executable executable){ List tceList = new ArrayList (); TransformationCatalogEntry tce = null; for(PFN pfn : executable.getPhysicalFiles()){ tce = new TransformationCatalogEntry(executable.getNamespace(), executable.getName(), executable.getVersion()); SysInfo sysinfo = new SysInfo(); sysinfo.setArchitecture( SysInfo.Architecture.valueOf( executable.getArchitecture().toString().toLowerCase() ) ); sysinfo.setOS( SysInfo.OS.valueOf( executable.getOS().toString().toUpperCase() ) ); sysinfo.setOSRelease( executable.getOsRelease() ); sysinfo.setOSVersion( executable.getOsVersion() ); sysinfo.setGlibc( executable.getGlibc() ); tce.setSysInfo(sysinfo); tce.setType( executable.getInstalled() ? TCType.INSTALLED : TCType.STAGEABLE ); tce.setResourceId( pfn.getSite() ); tce.setPhysicalTransformation( pfn.getURL() ); Notifications notifications = new Notifications(); for(Invoke invoke : executable.getInvoke()){ notifications.add( new Invoke(invoke) ); } tce.addNotifications(notifications); for(edu.isi.pegasus.planner.dax.Profile profile : executable.getProfiles()){ tce.addProfile(new edu.isi.pegasus.planner.classes.Profile(profile.getNameSpace(),profile.getKey() , profile.getValue())); } tceList.add(tce); } return tceList; } /** * Returns the id for a job * * @param j the job * * @return the id. */ protected String constructJobID( Job j ){ //construct the jobname/primary key for job StringBuffer name = new StringBuffer(); //prepend a job prefix to job if required if (mJobPrefix != null) { name.append(mJobPrefix); } //append the name and id recevied from dax name.append(j.getTXName()); name.append("_"); name.append(j.getLogicalID()); return name.toString(); } /** * Sanity check on the version that this parser works on. * * @param version the version as specified in the DAX */ protected void sanityCheckOnVersion( String version ) { if( version == null ){ mLogger.log( "Version not specified in the adag element " , LogManager.WARNING_MESSAGE_LEVEL ); return ; } //add a 0 suffix String nversion = version + ".0"; if( CondorVersion.numericValue( nversion) < DAXParser3.DAX_VERSION_3_2_0 ){ StringBuffer sb = new StringBuffer(); sb.append( "DAXParser3 Unsupported DAX Version " ).append( version ). append( ". Set pegasus.schema.dax property to load the old DAXParser" ); throw new RuntimeException( sb.toString() ); } return; } /** * Private class to handle mix data content for arguments tags. * */ private class Arguments{ /** * Handle to a job arguments to handle mixed content. */ protected StringBuffer mBuffer; /** * The default constructor */ public Arguments(){ reset(); } /** * Resets the internal buffer */ public void reset() { mBuffer = new StringBuffer(); } /** * Adds text to the arguments string * * @param text the text to be added. */ public void addArgument( String text ){ mBuffer.append( text ); } /** * Adds filename to the arguments * * @param rl the ReplicaLocation object */ public void addArgument(ReplicaLocation rl) { mBuffer.append( " " ).append( rl.getLFN() ).append( " "); } /** * Adds a file name to the argument string * * @param file the file object. */ private void addArgument( edu.isi.pegasus.planner.dax.File file ){ mBuffer.append( " " ).append( file.getName() ).append( " " ); } /** * Our own implementation for ignorable whitespace. A String that holds the * contents of data passed as text by the underlying parser. The whitespaces * at the end are replaced by one whitespace. * * @param str The string that contains whitespaces. * * @return String corresponding to the trimmed version. * */ public String ignoreWhitespace(String str) { return ignoreWhitespace(str, mProps.preserveParserLineBreaks()); } /** * Our own implementation for ignorable whitespace. A String that holds the * contents of data passed as text by the underlying parser. The whitespaces * at the end are replaced by one whitespace. * * @param str The string that contains whitespaces. * * @return String corresponding to the trimmed version. * */ public String ignoreWhitespace(String str, boolean preserveLineBreak ){ boolean st = false; boolean end = false; int length = str.length(); boolean sN = false;//start with \n ; boolean eN = false;//end with \n if(length > 0){ sN = str.charAt(0) == '\n'; eN = str.charAt(length -1) == '\n'; //check for whitespace in the //starting if(str.charAt(0) == ' ' || str.charAt(0) == '\t' || str.charAt(0) == '\n'){ st = true; } //check for whitespace in the end if(str.length() > 1 && (str.charAt(length -1) == ' ' || str.charAt(length -1) == '\t' || str.charAt(length -1) == '\n')){ end = true; } //trim the string and add a single whitespace accordingly str = str.trim(); str = st == true ? ' ' + str:str; str = end == true ? str + ' ':str; if( preserveLineBreak ){ str = sN ? '\n' + str:str; str = eN ? str + '\n':str; } } return str; } /** * Returns the arguments as string * * @return the arguments */ public String toString(){ return this.ignoreWhitespace( mBuffer.toString() ); } } /** * * @param args */ public static void main( String[] args ){ LogManagerFactory.loadSingletonInstance().setLevel( 5 ); /*DAXParser3 parser = new DAXParser3( ); if (args.length == 1) { parser.startParser( args[0] ); } else { System.out.println("Usage: SiteCatalogParser "); }*/ } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/dax/ExampleDAXCallback.java0000644000175000017500000001176011757531137027557 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser.dax; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.classes.CompoundTransformation; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.dax.Invoke; import java.util.Map; import java.util.List; import java.util.Iterator; /** * An example callback that prints out the various elements in the DAX. * * @author Karan Vahi * @version $Revision: 3670 $ */ public class ExampleDAXCallback implements Callback { private boolean mDone; /** * The overloaded constructor. * * @param properties the properties passed to the planner. * @param dax the path to the DAX file. */ public ExampleDAXCallback( PegasusProperties properties, String dax ) { } /** * Callback when the opening tag was parsed. This contains all * attributes and their raw values within a map. It ends up storing * the attributes with the adag element in the internal memory structure. * * @param attributes is a map of attribute key to attribute value */ public void cbDocument(Map attributes) { System.out.println( "The attributes in DAX header retrieved "); System.out.println( attributes ); } /** * Callback when a invoke entry is encountered in the top level inside the adag element in the DAX. * * @param invoke the invoke object */ public void cbWfInvoke(Invoke invoke){ System.out.println(); System.out.println( "Invoke Entry "); System.out.println( invoke ); } /** * Callback for the job from section 2 jobs. These jobs are completely * assembled, but each is passed separately. * * @param job the Job object storing the job information * gotten from parser. */ public void cbJob(Job job) { System.out.println(); System.out.println( "Job parsed "); System.out.println( job ); } /** * Callback for child and parent relationships from section 3. * * @param child is the IDREF of the child element. * @param parents is a list of IDREFs of the included parents. */ public void cbParents(String child, List parents) { System.out.println(); System.out.println( "Edges in the DAX " ); for( Iterator it = parents.iterator() ; it.hasNext(); ){ System.out.println( it.next() + " -> " + child ); } } /** * Callback when the parsing of the document is done. It sets the flag * that the parsing has been done, that is used to determine whether the * ADag object has been fully generated or not. */ public void cbDone() { mDone = true; } /** * Returns an ADag object corresponding to the abstract plan it has generated. * It throws a runtime exception if the method is called before the object * has been created fully. * * @return ADag object containing the abstract plan referred in the dax. */ public Object getConstructedObject(){ //RETURN YOUR CONVERTED OBJECT HERE return new String( "Shallow Object" ); } /** * Callback when a compound transformation is encountered in the DAX * * @param compoundTransformation the compound transforamtion */ public void cbCompoundTransformation( CompoundTransformation compoundTransformation ){ System.out.println(); System.out.println( "CompoundTransformation "); System.out.println( compoundTransformation ); } /** * Callback when a replica catalog entry is encountered in the DAX * * @param rl the ReplicaLocation object */ public void cbFile( ReplicaLocation rl ){ System.out.println(); System.out.println( "Replica Catalog Entry "); System.out.println( rl ); } /** * Callback when a transformation catalog entry is encountered in the DAX * * @param tce the transformationc catalog entry object. */ public void cbExecutable( TransformationCatalogEntry tce ){ System.out.println(); System.out.println( "TransformationCatalogEntry Entry "); System.out.println( tce ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/StackBasedXMLParser.java0000644000175000017500000002174211757531137027201 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.List; import java.util.Stack; import org.xml.sax.Attributes; import org.xml.sax.SAXException; /** * An abstract base class that XML Parsers can use if they use stack internally * to store the elements encountered while parsing XML documents using SAX * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2832 $ */ public abstract class StackBasedXMLParser extends Parser { /** * Count the depths of elements in the document */ protected int mDepth; /** * The stack of objects kept around. */ protected Stack mStack; /** * A boolean indicating that parsing is done. */ protected boolean mParsingDone; /** * The default Constructor. * * */ /* public StackBasedXMLParser( ) { this( PegasusProperties.nonSingletonInstance() ); }*/ /** * The overloaded constructor. * * @param bag the PegasusBag to be used. */ public StackBasedXMLParser( PegasusBag bag ) { super( bag ); mStack = new Stack(); mDepth = 0; //setting the schema Locations String schemaLoc = getSchemaLocation(); mLogger.log( "Picking schema " + schemaLoc, LogManager.CONFIG_MESSAGE_LEVEL); String list = getSchemaNamespace( ) + " " + schemaLoc; setSchemaLocations( list ); } /** * Composes the SiteData object corresponding to the element * name in the XML document. * * @param element the element name encountered while parsing. * @param names is a list of attribute names, as strings. * @param values is a list of attribute values, to match the key list. * * @return the relevant SiteData object, else null if unable to construct. * * @exception IllegalArgumentException if the element name is too short. */ public abstract Object createObject( String element, List names, List values ); /** * This method sets the relations between the currently finished XML * element(child) and its containing element in terms of Java objects. * Usually it involves adding the object to the parent's child object * list. * * @param childElement name is the the child element name * @param parent is a reference to the parent's Java object * @param child is the completed child object to connect to the parent * * @return true if the element was added successfully, false, if the * child does not match into the parent. */ public abstract boolean setElementRelation( String childElement, Object parent, Object child ); /** * */ public void endDocument() { mParsingDone = true; } /** * This method defines the action to take when the parser begins to parse * an element. * * @param namespaceURI is the URI of the namespace for the element * @param localName is the element name without namespace * @param qName is the element name as it appears in the docment * @param atts has the names and values of all the attributes */ public void startElement( String namespaceURI, String localName, String qName, Attributes atts ) throws SAXException{ //one more element level mDepth++; List names = new java.util.ArrayList(); List values = new java.util.ArrayList(); for ( int i=0; i < atts.getLength(); ++i ) { String name = new String( atts.getLocalName(i) ); String value = new String( atts.getValue(i) ); names.add(name); values.add(value); } //System.out.println( "QNAME " + qName + " NAME " + names + "\t Values" + values ); Object object = createObject( qName, names, values ); if ( object != null ){ mStack.push( new ParserStackElement( qName, object ) ); } else{ mLogger.log( "Unknown element in xml :" + namespaceURI + ":" + localName + ":" + qName, LogManager.ERROR_MESSAGE_LEVEL ); throw new SAXException( "Unknown or Empty element while parsing " ); } } /** * The parser is at the end of an element. Triggers the association of * the child elements with the appropriate parent elements. * * @param namespaceURI is the URI of the namespace for the element * @param localName is the element name without namespace * @param qName is the element name as it appears in the docment */ public void endElement( String namespaceURI, String localName, String qName ) throws SAXException{ // that's it for this level mDepth--; mLogger.log( " at " + this.mLocator.getLineNumber() + ":" + mLocator.getColumnNumber() , LogManager.TRACE_MESSAGE_LEVEL ); ParserStackElement tos = ( ParserStackElement ) mStack.pop(); if ( ! qName.equals( tos.getElementName() ) ) { String error = "Top of Stack " + tos.getElementName() + " does not mactch " + qName; mLogger.log( error, LogManager.FATAL_MESSAGE_LEVEL ); throw new SAXException( error ); } // add pieces to lower levels ParserStackElement peek = mStack.empty() ? null : (ParserStackElement) mStack.peek(); if (!setElementRelation( tos.getElementName(), peek == null ? null : peek.getElementObject(), tos.getElementObject())) { String element = peek == null ? "root-element" : peek.getElementName(); mLogger.log( "Element " + tos.getElementName() + " does not fit into element " + element, LogManager.ERROR_MESSAGE_LEVEL ); } //reinitialize our cdata handler at end of each element mTextContent.setLength( 0 ); } /** * * @param element * @param attribute * @param value */ public void log( String element, String attribute, String value) { //to be enabled when logging per queue. mLogger.log( "For element " + element + " found " + attribute + " -> " + value, LogManager.TRACE_MESSAGE_LEVEL ); } /** * This is called when an attribute is encountered for an element that is invalid * from the schema context and is not supported. * * @param element the element name * @param attribute the attribute name * @param value the attribute value */ public void complain(String element, String attribute, String value) { mLogger.log( "For element " + element + " invalid attribute found " + attribute + " -> " + value, LogManager.ERROR_MESSAGE_LEVEL ); } /** * This is called when an attribute is encountered for an element that is valid * in the schema context but not supported right now. * * @param element the element name * @param attribute the attribute name * @param value the attribute value */ public void attributeNotSupported(String element, String attribute, String value) { mLogger.log( "For element " + element + " attribute currently not supported " + attribute + " -> " + value, LogManager.WARNING_MESSAGE_LEVEL ); } /** * Called when certain element nesting is allowed in the XML schema * but is not supported in the code as yet. * * @param parent parent element * @param child child element */ public void unSupportedNestingOfElements(String parent, String child ) { StringBuffer sb = new StringBuffer(); sb.append( "Unsupported nesting for element " ).append( child ). append( " in parent element " ).append( parent ); mLogger.log( sb.toString(), LogManager.WARNING_MESSAGE_LEVEL ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/ConfigXmlParser.java0000644000175000017500000005105311757531137026500 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.PoolConfig; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GlobusVersion; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPServer; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPBandwidth; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.JobManager; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.LRC; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.WorkDir; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import java.io.File; import java.io.IOException; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import edu.isi.pegasus.planner.namespace.Namespace; /** * This is the parsing class, used to parse the pool config file in xml format. * * * @author Gaurang Mehta gmehta@isi.edu * @author Karan Vahi vahi@isi.edu * @version $Revision: 2611 $ */ public class ConfigXmlParser extends Parser { /** * The "not-so-official" location URL of the VDLx schema definition. */ public static final String SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/sc-2.0.xsd"; /** * uri namespace */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/sitecatalog"; public PoolConfig m_pconfig = null; private SiteInfo m_pool_info = null; private String m_namespace = null; private String m_key = null; private GridFTPServer gftp = null; /** * Default Class Constructor. * * @param properties the PegasusProperties to be used. */ public ConfigXmlParser(PegasusProperties properties ) { super( properties ); } /** * Class Constructor intializes the parser and turns on validation. * * @param configFileName The file which you want to parse * @param properties the PegasusProperties to be used. */ public ConfigXmlParser( String configFileName, PegasusProperties properties ) { super( properties ); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_PARSE_SITE_CATALOG, "site-catalog.id", configFileName, LogManager.DEBUG_MESSAGE_LEVEL ); //setting the schema Locations String schemaLoc = getSchemaLocation(); mLogger.log( "Picking schema for site catalog" + schemaLoc, LogManager.CONFIG_MESSAGE_LEVEL); String list = ConfigXmlParser.SCHEMA_NAMESPACE + " " + schemaLoc; setSchemaLocations( list ); startParser( configFileName ); mLogger.logEventCompletion(); } public void startParser( String configxml ) { try { this.testForFile( configxml ); mParser.parse( configxml ); } catch ( IOException ioe ) { mLogger.log( "IO Error :" + ioe.getMessage(), LogManager.ERROR_MESSAGE_LEVEL ); } catch ( SAXException se ) { if ( mLocator != null ) { mLogger.log( "Error in " + mLocator.getSystemId() + " at line " + mLocator.getLineNumber() + "at column " + mLocator.getColumnNumber() + " :" + se.getMessage() , LogManager.ERROR_MESSAGE_LEVEL); } } } public void endDocument() { } public void endElement( String uri, String localName, String qName ) { if ( localName.trim().equalsIgnoreCase( "sitecatalog" ) ) { handleConfigTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "site" ) ) { handlePoolTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "lrc" ) ) { handleLRCTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "jobmanager" ) ) { handleJobManagerTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "profile" ) ) { handleProfileTagEnd( m_pool_info ); } else if ( localName.trim().equalsIgnoreCase( "gridftp" ) ) { handleGridFtpTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "workdirectory" ) ) { handleWorkDirectoryTagEnd( m_pool_info ); } else if ( localName.trim().equalsIgnoreCase( "bandwidth" ) ) { handleGridFtpBandwidthTagEnd(); } else { mLogger.log( "Unkown element end reached :" + uri + ":" + localName + ":" + qName + "-******" + mTextContent + "***********", LogManager.ERROR_MESSAGE_LEVEL ); mTextContent.setLength( 0 ); } } public void startElement( String uri, String localName, String qName, Attributes attrs ) { try { if ( localName.trim().equalsIgnoreCase( "sitecatalog" ) ) { handleConfigTagStart(); } else if ( localName.trim().equalsIgnoreCase( "site" ) ) { m_pool_info = handlePoolTagStart( m_pconfig, attrs ); } else if ( localName.trim().equalsIgnoreCase( "lrc" ) ) { handleLRCTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "jobmanager" ) ) { handleJobManagerTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "profile" ) ) { handleProfileTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "gridftp" ) ) { handleGridFtpTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "workdirectory" ) ) { handleWorkDirectoryTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "bandwidth" ) ) { handleGridFtpBandwidthTagStart( m_pool_info, attrs ); } else { mLogger.log( "Unknown element in xml :" + uri + ":" + localName + ":" + qName, LogManager.ERROR_MESSAGE_LEVEL ); } } catch ( Exception e ) { e.printStackTrace(); } } /** * Returns the XML schema namespace that a document being parsed conforms * to. * * @return the schema namespace */ public String getSchemaNamespace( ){ return ConfigXmlParser.SCHEMA_NAMESPACE; } public String getSchemaLocation() { // treat URI as File, yes, I know - I need the basename File uri = new File( ConfigXmlParser.SCHEMA_LOCATION ); // create a pointer to the default local position File poolconfig = new File( this.mProps.getSysConfDir(), uri.getName() ); return this.mProps.getPoolSchemaLocation( poolconfig.getAbsolutePath() ); } /** * * @return PoolConfig Returns a new PoolConfig object when * it encounters start of XML. * * @see org.griphyn.cPlanner.classes.PoolConfig */ private PoolConfig handleConfigTagStart() { m_pconfig = new PoolConfig(); return m_pconfig; } /** * * @param pcfg Takes the PoolConfig class. * @param attrs Takes the atrributes returned in XML. * * @return SiteInfo returns the reference to the PooInfo ojject * * @throws Exception * @see org.griphyn.cPlanner.classes.SiteInfo * @see org.griphyn.cPlanner.classes.PoolConfig */ private SiteInfo handlePoolTagStart( PoolConfig pcfg, Attributes attrs ) throws Exception { m_pool_info = new SiteInfo(); String handle = new String( attrs.getValue( "", "handle" ) ); //set the id of object m_pool_info.setInfo(SiteInfo.HANDLE,handle); if ( attrs.getValue( "", "gridlaunch" ) != null ) { String gridlaunch = new String( attrs.getValue( "", "gridlaunch" ) ); gridlaunch = (gridlaunch == null || gridlaunch.length() == 0 || gridlaunch.equalsIgnoreCase("null"))? null: gridlaunch; m_pool_info.setInfo( SiteInfo.GRIDLAUNCH, gridlaunch ); } if ( attrs.getValue( "", "sysinfo" ) != null ) { String sysinfo = new String( attrs.getValue( "", "sysinfo" ) ); m_pool_info.setInfo( SiteInfo.SYSINFO, sysinfo ); } pcfg.add( handle, m_pool_info ); return m_pool_info; } /** * * @param pinfo Poolinfo object that is to be populated * @param attrs Attributes for the element * @throws Exception */ private void handleProfileTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { m_namespace = new String( attrs.getValue( "", "namespace" ) ); m_key = new String( attrs.getValue( "", "key" ) ); } /** * * @param pinfo Poolinfo object that is to be populated * @param attrs Attributes for the element * @throws Exception */ private static void handleLRCTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { LRC lrc = new LRC( attrs.getValue( "", "url" ) ); pinfo.setInfo( SiteInfo.LRC, lrc ); } /** * @param pinfo Poolinfo object that is to be populated * @param attrs Attributes for the element * @throws Exception */ private void handleGridFtpTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { gftp = new GridFTPServer(); String gftp_url = new String( attrs.getValue( "", "url" ) ); gftp.setInfo( GridFTPServer.GRIDFTP_URL, gftp_url ); GlobusVersion globusver = new GlobusVersion( new Integer(attrs.getValue( "", "major" ) ).intValue(), new Integer( attrs.getValue( "", "minor" ) ).intValue(), new Integer( attrs.getValue( "", "patch" ) ).intValue() ); gftp.setInfo( GridFTPServer.GLOBUS_VERSION, globusver.getGlobusVersion() ); if ( attrs.getValue( "", "storage" ) != null ) { gftp.setInfo( GridFTPServer.STORAGE_DIR, new String( attrs.getValue( "", "storage" ) ) ); } if ( attrs.getValue( "", "total-size" ) != null ) { gftp.setInfo( GridFTPServer.TOTAL_SIZE, new String( attrs.getValue( "", "total-size" ) ) ); } if ( attrs.getValue( "", "free-size" ) != null ) { gftp.setInfo( GridFTPServer.FREE_SIZE, new String( attrs.getValue( "", "free-size" ) ) ); } //following line commented by sk setppolinfo is now called in handleGridFtpTagstop() //pinfo.setPoolInfo(GvdsPoolInfo.GRIDFTP, gftp); } /** * sk added function to handle gridftpbandwidth tag * @param pinfo Poolinfo object that is to be populated * @param attrs Attributes for the element * @throws Exception */ private void handleGridFtpBandwidthTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { GridFTPBandwidth gridftp_bandwidth = new GridFTPBandwidth(); String dest_id = new String( attrs.getValue( "", "dest-subnet" ) ); gridftp_bandwidth.setInfo( GridFTPBandwidth. DEST_ID, dest_id ); String avg_bw_range1 = new String( attrs.getValue( "", "avg-bandwidth-range1" ) ); if ( avg_bw_range1.length() != 0 ) { gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW_RANGE1, avg_bw_range1 ); } String avg_bw_range2 = attrs.getValue( "", "avg-bandwidth-range2" ); if ( avg_bw_range2 != null ) { gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW_RANGE1, avg_bw_range2 ); } String avg_bw_range3 = attrs.getValue( "", "avg-bandwidth-range3" ); if ( avg_bw_range3 != null ) { gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW_RANGE1, avg_bw_range3 ); } String avg_bw_range4 = attrs.getValue( "", "avg-bandwidth-range4" ); if ( avg_bw_range4 != null ) { gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW_RANGE1, avg_bw_range4 ); } gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW, new String( attrs.getValue( "", "avg-bandwidth" ) ) ); gridftp_bandwidth.setInfo( GridFTPBandwidth. MAX_BW, new String( attrs.getValue( "", "max-bandwidth" ) ) ); gridftp_bandwidth.setInfo( GridFTPBandwidth. MIN_BW, new String( attrs.getValue( "", "min-bandwidth" ) ) ); gftp.setGridFTPBandwidthInfo( gridftp_bandwidth ); } /** * This method handles the start of a jobmanager tag. * * @param pinfo The PoolInfo object which will hold the jobmanager information * @param attrs The attributes about the jobmanager tag returned from the XML. * * @throws Exception * @see org.griphyn.cPlanner.classes.SiteInfo */ private static void handleJobManagerTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { JobManager jbinfo = new JobManager(); jbinfo.setInfo( JobManager.UNIVERSE, new String( attrs.getValue( "", "universe" ) ) ); jbinfo.setInfo( JobManager.URL, new String( attrs.getValue( "", "url" ) ) ); GlobusVersion globusver = new GlobusVersion( new Integer( attrs.getValue( "", "major" ) ).intValue(), new Integer( attrs.getValue( "", "minor" ) ).intValue(), new Integer( attrs.getValue( "", "patch" ) ).intValue() ); jbinfo.setInfo( JobManager.GLOBUS_VERSION, globusver.getGlobusVersion() ); if ( attrs.getValue( "", "free-mem" ) != null ) { jbinfo.setInfo( JobManager.FREE_MEM, new String( attrs.getValue( "", "free-mem" ) ) ); } if ( attrs.getValue( "", "total-mem" ) != null ) { jbinfo.setInfo( JobManager.TOTAL_MEM, new String( attrs.getValue( "", "total-mem" ) ) ); } if ( attrs.getValue( "", "max-count" ) != null ) { jbinfo.setInfo( JobManager.MAX_COUNT, new String( attrs.getValue( "", "max-count" ) ) ); } if ( attrs.getValue( "", "max-cpu-time" ) != null ) { jbinfo.setInfo( JobManager.MAX_CPU_TIME, new String( attrs.getValue( "", "max-cpu-time" ) ) ); } if ( attrs.getValue( "", "running-jobs" ) != null ) { jbinfo.setInfo( JobManager.RUNNING_JOBS, new String( attrs.getValue( "", "running-jobs" ) ) ); } if ( attrs.getValue( "", "jobs-in-queue" ) != null ) { jbinfo.setInfo( JobManager.JOBS_IN_QUEUE, new String( attrs.getValue( "", "jobs-in-queue" ) ) ); } if ( attrs.getValue( "", "max-cpu-time" ) != null ) { jbinfo.setInfo( JobManager.MAX_CPU_TIME, new String( attrs.getValue( "", "max-cpu-time" ) ) ); } if ( attrs.getValue( "", "idle-nodes" ) != null ) { jbinfo.setInfo( JobManager.IDLE_NODES, new String( attrs.getValue( "", "idle-nodes" ) ) ); } if ( attrs.getValue( "", "total-nodes" ) != null ) { jbinfo.setInfo( JobManager.TOTAL_NODES, new String( attrs.getValue( "", "total-nodes" ) ) ); } if ( attrs.getValue( "", "os" ) != null ) { jbinfo.setInfo( JobManager.OS_TYPE, new String( attrs.getValue( "", "os" ) ) ); } if ( attrs.getValue( "", "arch" ) != null ) { jbinfo.setInfo( JobManager.ARCH_TYPE, new String( attrs.getValue( "", "arch" ) ) ); } if ( attrs.getValue( "", "type" ) != null ) { jbinfo.setInfo( JobManager.JOBMANAGER_TYPE, new String( attrs.getValue( "", "type" ) ) ); } pinfo.setInfo( SiteInfo.JOBMANAGER, jbinfo ); } /** * Handles the WorkDirectory Tag Start. * @param pinfo Takes a SiteInfo object for which the work directory is. * @param attrs Takes the attributes returned from the XML by the parser. * @throws Exception * @see org.griphyn.cPlanner.classes.SiteInfo */ private static void handleWorkDirectoryTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { WorkDir gwd = new WorkDir(); if ( attrs.getValue( "", "total-size" ) != null ) { gwd.setInfo( WorkDir.TOTAL_SIZE, new String( attrs.getValue( "", "total-size" ) ) ); } if ( attrs.getValue( "", "free-size" ) != null ) { gwd.setInfo( WorkDir.FREE_SIZE, new String( attrs.getValue( "", "free-size" ) ) ); } //pinfo.setInfo( WorkDir.WORKDIR, gwd ); pinfo.setInfo(SiteInfo.WORKDIR,gwd); } /** * Handles the end of the Xml files. * */ private static void handleConfigTagEnd() { // System.out.println(m_pconfig.toXml()); } /** * Handles the end of the pool tag. */ private static void handlePoolTagEnd() { } /** * Handles the end of the Profile tag. * @param pinfo PoolInfo object for which the * profiles are collected. * * @throws java.lang.Exception * @see org.griphyn.cPlanner.classes.SiteInfo */ private void handleProfileTagEnd( SiteInfo pinfo ) throws RuntimeException { if ( mTextContent != null && m_namespace != null && m_key != null ) { //check if namespace is valid m_namespace = m_namespace.toLowerCase(); if( !Namespace.isNamespaceValid( m_namespace ) ){ mTextContent.setLength( 0 ); mLogger.log("Namespace specified in Site Catalog not supported. ignoring "+ m_namespace, LogManager.WARNING_MESSAGE_LEVEL); return; } Profile profile = new Profile( m_namespace, m_key, mTextContent.toString().trim() ); pinfo.setInfo( SiteInfo.PROFILE, profile ); mTextContent.setLength( 0 ); } } /** * Handles the end of the LRC Tag */ private static void handleLRCTagEnd() { } /** * sk made changes to the following function to set GRIDFTPServer instead of * setting it in fn handleGridFtpTagStart() * @throws java.lang.RuntimeException */ private void handleGridFtpTagEnd() throws RuntimeException { m_pool_info.setInfo( SiteInfo.GRIDFTP, gftp ); } private static void handleGridFtpBandwidthTagEnd() { } /** * Handles the end of the JobManager Tag */ private static void handleJobManagerTagEnd() { } /** * This method handles the Workdirectory tg end. * @param pinfo Takes the PoolInfo object. * @throws java.lang.Exception * @see org.griphyn.cPlanner.classes.SiteInfo */ private void handleWorkDirectoryTagEnd( SiteInfo pinfo ) throws RuntimeException { if ( mTextContent != null ) { WorkDir gdw = ( WorkDir ) pinfo.getInfo( SiteInfo.WORKDIR ); gdw.setInfo( WorkDir.WORKDIR, mTextContent.toString().trim() ); } mTextContent.setLength( 0 ); } /** * This class returns the reference to the PooConfig object * containing information about all the pools. * * @return returns a reference to the PoolConfig object which * contains all the pools. * * @see org.griphyn.cPlanner.classes.PoolConfig */ public PoolConfig getPoolConfig() { return m_pconfig; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/DAXParserFactoryException.java0000644000175000017500000000647511757531137030445 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.parser; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating DAXCallback implementations. * * @author Karan Vahi * @version $Revision: 2646 $ */ public class DAXParserFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "DAX Callback"; /** * Constructs a DAXParserFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public DAXParserFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a DAXParserFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public DAXParserFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a DAXParserFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public DAXParserFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a DAXParserFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public DAXParserFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/parser/ParserStackElement.java0000644000175000017500000000301311757531137027162 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.planner.parser; /** * This class keeps the name of an element and its corresponding * java object reference. The structure is used by the stack in * SiteCatalogParser. * * @author Karan Vahi * @version $Revision: 2003 $ * */ public class ParserStackElement{ private String mElement; private Object mObject; /** * The overloaded constructor * * @param name * @param obj */ public ParserStackElement( String name, Object obj ) { mElement = name; mObject = obj; } /** * Returns the element name. * * @return name */ public String getElementName(){ return mElement; } /** * Returns the object referred to by the element. * * @return the object */ public Object getElementObject(){ return mObject; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/0000755000175000017500000000000011757531667022714 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/0000755000175000017500000000000011757531667023660 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/SiteFactoryException.java0000644000175000017500000000651611757531137030636 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Site Catalog * implementations. * * @author Karan Vahi * @version $Revision: 2079 $ */ public class SiteFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Site Catalog"; /** * Constructs a SiteFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public SiteFactoryException( String msg ) { super( msg ); mClassname = SiteFactoryException.DEFAULT_NAME; } /** * Constructs a SiteFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public SiteFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a SiteFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public SiteFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = SiteFactoryException.DEFAULT_NAME; } /** * Constructs a SiteFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public SiteFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/0000755000175000017500000000000011757531667025315 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/SiteStore.java0000644000175000017500000006102611757531137030076 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.File; import java.io.IOException; import java.io.Writer; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.Pegasus; /** * The site store contains the collection of sites backed by a HashMap. * * @author Karan Vahi * @author Mats Rynge * @version $Revision: 4915 $ */ public class SiteStore extends AbstractSiteData{ /** * The "official" namespace URI of the site catalog schema. */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/sitecatalog"; /** * The "not-so-official" location URL of the DAX schema definition. */ public static final String SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/sc-3.0.xsd"; /** * The version to report. */ public static final String SCHEMA_VERSION = "3.0"; /** * The internal map that maps a site catalog entry to the site handle. */ private Map mStore; /** * The work dir path from the properties. */ private String mWorkDir; private PlannerOptions mPlannerOptions; /** * This contains the storage directory relative to the se mount point of the * pool. It is populated from the pegasus.dir.storage property from the properties * file. If not specified then the storage directory is the se mount point * from the pool.config file. */ protected String mStorageDir; /** * A boolean indicating whether to have a deep directory structure for * the storage directory or not. */ protected boolean mDeepStorageStructure; /** * The default constructor. */ public SiteStore(){ initialize(); } /** * The intialize method. */ public void initialize() { mStore = new HashMap( ); } /** * A setter method that is to be set to use getWorkDirectory functions, correctly. * * @param properties the PegasusProperties * @param options the PlannerOptions */ public void setForPlannerUse( PegasusProperties properties, PlannerOptions options ){ mPlannerOptions = options; mWorkDir = properties.getExecDirectory(); mStorageDir = properties.getStorageDirectory(); mDeepStorageStructure = properties.useDeepStorageDirectoryStructure(); } /** * Adds a site catalog entry to the store. * * @param entry the site catalog entry. * * @return previous value associated with specified key, or null * if there was no mapping for key */ public SiteCatalogEntry addEntry( SiteCatalogEntry entry ){ return this.mStore.put( entry.getSiteHandle() , entry ); } /** * Returns an iterator to SiteCatalogEntry objects in the store. * * @return Iterator */ public Iterator entryIterator(){ return this.mStore.values().iterator(); } /** * Returns the list of sites, in the store. * * @return list of sites */ public Set list() { return mStore.keySet(); } /** * Returns SiteCatalogEntry matching a site handle. * * @param handle the handle of the site to be looked up. * * @return SiteCatalogEntry if exists else null. */ public SiteCatalogEntry lookup ( String handle ){ return this.mStore.get( handle ); } /** * Returns boolean indicating whether the store has a SiteCatalogEntry * matching a handle. * * @param handle the site handle / identifier. * * @return boolean */ public boolean contains( String handle ){ return this.mStore.containsKey( handle ); } /** * * @param sites the list of site identifiers for which sysinfo is required. * * * @return the sysinfo map */ public Map getSysInfos( List sites ) { HashMap result = new HashMap(); for ( Iterator i = sites.iterator(); i.hasNext(); ) { SiteCatalogEntry site = this.lookup (( String ) i.next()); if( site != null ){ result.put( site.getSiteHandle(), site.getSysInfo() ); } } return result; } /** * * @param sites the list of site identifiers for which sysinfo is required. * * * @return the sysinfo map */ /*private Map getVDSSysInfos( List sites ) { HashMap result = new HashMap(); for ( Iterator i = sites.iterator(); i.hasNext(); ) { SiteCatalogEntry site = this.lookup (( String ) i.next()); if( site != null ){ result.put( site.getSiteHandle(), site.getVDSSysInfo() ); } } return result; } */ /** * Returns the VDSSysInfo for the site * * @param handle the site handle / identifier. * @return the VDSSysInfo else null */ public VDSSysInfo getVDSSysInfo( String handle ){ //sanity check if( !this.contains( handle ) ) { return null; } else{ return this.lookup( handle ).getVDSSysInfo(); } } /** * Returns the SysInfo for the site * * @param handle the site handle / identifier. * @return the SysInfo else null */ public SysInfo getSysInfo( String handle ){ //sanity check if( !this.contains( handle ) ) { return null; } else{ return this.lookup( handle ).getSysInfo(); } } /** * Returns the value of VDS_HOME for a site. * * @param handle the site handle / identifier. * * @return value if set else null. */ @Deprecated public String getVDSHome( String handle ){ //sanity check if( !this.contains( handle ) ) { return null; } else{ return this.lookup( handle ).getVDSHome(); } } /** * Returns the value of PEGASUS_HOME for a site. * * @param handle the site handle / identifier. * * @return value if set else null. */ @Deprecated public String getPegasusHome( String handle ){ if( !this.contains( handle ) ) { return null; } else{ return this.lookup( handle ).getPegasusHome(); } } /** * Returns an environment variable associated with the site. If a env value * is not specified in the site catalog, then only for local site * it falls back on the value retrieved from the environment. * * @param handle the site handle / identifier. * @param variable the name of the environment variable. * * @return value of the environment variable if found, else null */ public String getEnvironmentVariable( String handle, String variable ){ String value = null; //sanity check if( !this.contains( handle ) ) { value = null; } else{ value = this.lookup( handle ).getEnvironmentVariable( variable ); } /* Moved to SiteCatalogEntry Karan Dec 15, 2011 //change the preference order because of JIRA PM-471 if( value == null ){ //fall back only for local site the value in the env if( handle != null && handle.equals( "local" ) ){ //try to retrieve value from environment //for local site. value = System.getenv( variable ); } } */ return value; } /** * This is a soft state remove, that removes a GridGateway from a particular * site. The cause of this removal could be the inability to * authenticate against it at runtime. The successful removal lead Pegasus * not to schedule job on that particular grid gateway. * * @param handle the site handle with which it is associated. * @param contact the contact string for the grid gateway. * * @return true if was able to remove the jobmanager from the cache * false if unable to remove, or the matching entry is not found * or if the implementing class does not maintain a soft state. */ public boolean removeGridGateway( String handle, String contact ) { //sanity check if( !this.contains( handle ) ) { return false; } else{ return this.lookup( handle ).removeGridGateway( contact ); } } /** * This is a soft state remove, that removes a file server from a particular * pool entry. The cause of this removal could be the inability to * authenticate against it at runtime. The successful removal lead Pegasus * not to schedule any transfers on that particular gridftp server. * * @param handle the site handle with which it is associated. * @param url the contact string for the file server. * * @return true if was able to remove the gridftp from the cache * false if unable to remove, or the matching entry is not found * or if the implementing class does not maintain a soft state. * or the information about site is not in the site catalog. */ public boolean removeFileServer( String handle, String url ){ throw new UnsupportedOperationException( "Method remove( String , String ) not yet implmeneted" ); } /** * Returns a URL to the work directory as seen externally ( including external * mount point ). * * @param siteHandle the site handle. * * @return the url */ public String getExternalWorkDirectoryURL( String siteHandle ){ String url = null; SiteCatalogEntry site = this.lookup( siteHandle ); if( site == null ){ return url; } //select a file server FileServer fs = site.getHeadNodeFS().selectScratchSharedFileServer(); url = fs.getURLPrefix() + this.getExternalWorkDirectory( fs, siteHandle ); return url; } /** * Return the work directory as seen externally (including external mount point) * * @param fs the FileServer with the file system * @param siteHanlde the site for which you want the directory * * @return String corresponding to the mount point */ public String getExternalWorkDirectory( FileServer fs, String siteHandle) { StringBuffer path = new StringBuffer(); if ( mWorkDir.length() == 0 ) { // special case - no pegasus.dir.exec path.append( fs.getMountPoint() ); } else if ( mWorkDir.charAt( 0 ) != '/' ) { String mountPoint = fs.getMountPoint(); // path = fs.getMountPoint() + File.separator + mWorkDir; // not a absolute path given - append path.append( mountPoint ); if( mountPoint.charAt( mountPoint.length() - 1 ) == File.separatorChar ){ //no need to add path separator } else{ path.append( File.separator ); } } //always add the mWorkDir, whatever it is StringBuffer addon = new StringBuffer(); addon.append( mWorkDir ); String randDir = mPlannerOptions.getRandomDirName(); if ( randDir != null) { //append the random dir name to the //work dir constructed till now addon.append( File.separator ); //append withtout any modifications addon.append( randDir ); } path.append( addon.toString() ); return path.toString(); } /** * Return the storage mount point for a particular pool. * * @param site the site for which you want the storage-mount-point. * * @return String corresponding to the mount point if the pool is found. * null if pool entry is not found. */ public String getExternalStorageDirectory( String site ) { String mount_point = mStorageDir; SiteCatalogEntry entry = this.lookup( site ); //sanity check if( entry == null ){ return null; } FileServer server = null; if ( mStorageDir.length() == 0 || mStorageDir.charAt( 0 ) != '/' ) { server = entry.selectStorageFileServerForStageout(); mount_point = server.getMountPoint(); //removing the trailing slash if there int length = mount_point.length(); if ( length > 1 && mount_point.charAt( length - 1 ) == '/' ) { mount_point = mount_point.substring( 0, length - 1 ); } //append the Storage Dir File f = new File( mount_point, mStorageDir ); mount_point = f.getAbsolutePath(); } //check if we need to replicate the submit directory //structure on the storage directory if( mDeepStorageStructure ){ String leaf = ( this.mPlannerOptions.partOfDeferredRun() )? //if a deferred run then pick up the relative random directory //this.mUserOpts.getOptions().getRandomDir(): this.mPlannerOptions.getRelativeDirectory(): //for a normal run add the relative submit directory this.mPlannerOptions.getRelativeDirectory(); File f = new File( mount_point, leaf ); mount_point = f.getAbsolutePath(); } return mount_point; } /** * This determines the working directory on remote execution pool on the * basis of whether an absolute path is specified in the pegasus.dir.exec directory * or a relative path. * * @param handle the site handle of the site where a job has to be executed. * * @return the path to the pool work dir. * @throws RuntimeException in case of site not found in the site catalog. */ public String getInternalWorkDirectory( String handle ) { return this.getInternalWorkDirectory( handle, null, -1 ); } /** * This determines the working directory on remote execution pool for a * particular job. The job should have it's execution pool set. * * @param job Job object for the job. * * @return the path to the pool work dir. * @throws RuntimeException in case of site not found in the site catalog. */ public String getInternalWorkDirectory( Job job ) { return this.getInternalWorkDirectory( job, false ); } /** * This determines the working directory on remote execution pool or a staging * site for a particular job. The job should have it's execution pool set. * * @param job Job object for the job. * @param onStagingSite boolean indicating whether the work directory required * is the one on staging site. * * @return the path to the pool work dir. * @throws RuntimeException in case of site not found in the site catalog. */ public String getInternalWorkDirectory( Job job , boolean onStagingSite ) { return this.getInternalWorkDirectory( onStagingSite ? job.getStagingSiteHandle() : job.getSiteHandle(), job.vdsNS.getStringValue( Pegasus.REMOTE_INITIALDIR_KEY ), job.jobClass ); } /** * This determines the working directory on remote execution pool on the * basis of whether an absolute path is specified in the pegasus.dir.exec * directory or a relative path. * * @param handle the site handle of the site where a job has to be executed. * @param path the relative path that needs to be appended to the * workdir from the execution pool. * * @return the path to the pool work dir. * @throws RuntimeException in case of site not found in the site catalog. */ public String getInternalWorkDirectory( String handle, String path ) { return this.getInternalWorkDirectory( handle, path, -1 ); } /** * This determines the working directory on remote execution pool on the * basis of whether an absolute path is specified in the pegasus.dir.exec directory * or a relative path. If the job class happens to be a create directory job * it does not append the name of the random directory since the job is * trying to create that random directory. * * @param handle the site handle of the site where a job has to be executed. * @param path the relative path that needs to be appended to the * workdir from the execution pool. * @param jobClass the class of the job. * * @return the path to the pool work dir. * @throws RuntimeException in case of site not found in the site catalog. */ public String getInternalWorkDirectory( String handle, String path, int jobClass ) { //get the random directory name //sanitary check if( mPlannerOptions == null ){ throw new RuntimeException( "The initializeUseForPlanner() was not called before calling getWorkDirectory"); } if(jobClass == Job.CREATE_DIR_JOB ){ //the create dir jobs always run in the //workdir specified in the site catalog //return execPool.getHeadNodeFS().getScratch().getSharedDirectory().getInternalMountPoint().getMountPoint(); //Related to JIRA PM-67 http://pegasus.isi.edu/jira/browse/PM-67 //pegasus-get-sites generates site catalog with VO specific // storage mount points and work directories. These dont exist //by default. Hence the job needs to be launched in /tmp return File.separator + "tmp"; } SiteCatalogEntry execPool = this.lookup( handle ); if(execPool == null){ throw new RuntimeException("Entry for " + handle + " does not exist in the Site Catalog"); } String execPoolDir = mWorkDir; if ( mWorkDir.length() == 0 || mWorkDir.charAt( 0 ) != '/' ) { //means you have to append the //value specfied by pegasus.dir.exec File f = new File( execPool.getInternalMountPointOfWorkDirectory(), mWorkDir ); execPoolDir = f.getAbsolutePath(); } String randDir = mPlannerOptions.getRandomDirName(); if ( randDir != null) { //append the random dir name to the //work dir constructed till now File f = new File( execPoolDir, randDir ); execPoolDir = f.getAbsolutePath(); } //path takes precedence over random dir if ( path != null ) { //well i can do nesting conditional return but wont return ( path.length() == 0 || path.charAt( 0 ) != '/' ) ? //append the path new File( execPoolDir, path ).getAbsolutePath() : //else absolute path specified path; } return execPoolDir; } /** * Writes out the contents of the replica store as XML document * * @param writer * @param indent * @throws java.io.IOException */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); indent = (indent != null && indent.length() > 0 ) ? indent: ""; String newIndent = indent + "\t"; //write out the xml header first. this.writeXMLHeader( writer, indent ); //iterate through all the entries and spit them out. for( Iterator it = this.entryIterator(); it.hasNext(); ){ it.next().toXML( writer, newIndent ); } //write out the footer writer.write( indent ); writer.write( "" ); writer.write( newLine ); } /** * Writes the header of the XML output. The output contains the special * strings to start an XML document, some comments, and the root element. * The latter points to the XML schema via XML Instances. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty * string. The parameter is used internally for the recursive * traversal. * * @exception IOException if something fishy happens to the stream. */ public void writeXMLHeader( Writer stream, String indent ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); indent = (indent != null && indent.length() > 0 ) ? indent: ""; stream.write( indent ); stream.write( "" ); stream.write( newline ); stream.write( indent ); stream.write( "" ); stream.write( newline ); // who generated this document stream.write( indent ); stream.write( "" ); stream.write( newline ); // root element with elementary attributes stream.write( indent ); stream.write( '<' ); stream.write( "sitecatalog xmlns" ); stream.write( "=\""); stream.write( SCHEMA_NAMESPACE ); stream.write( "\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"" ); stream.write( SCHEMA_NAMESPACE ); stream.write( ' ' ); stream.write( SCHEMA_LOCATION ); stream.write( '"' ); writeAttribute( stream, "version", SCHEMA_VERSION ); stream.write( '>' ); stream.write( newline ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ SiteStore obj; try{ obj = ( SiteStore ) super.clone(); obj.initialize(); //iterate through all the entries and spit them out. for( Iterator it = this.entryIterator(); it.hasNext(); ){ obj.addEntry( (SiteCatalogEntry)it.next().clone( )); } } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/VORSSiteCatalogUtil.java0000644000175000017500000005100711757531137031722 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway.SCHEDULER_TYPE; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.io.File; public class VORSSiteCatalogUtil { private static LogManager mLogger = LogManagerFactory.loadSingletonInstance(); public static List get_sites_in_grid(String host, String port, String vo, String grid){ URL vors = null; URLConnection vc = null; BufferedReader in = null; String inputLine = null; ArrayList ret = new ArrayList(); try { vors = new URL("http://" + host + ":" + port + "/cgi-bin/tindex.cgi?VO=" + vo + "&grid=" + grid); //"http://vors.grid.iu.edu/cgi-bin/tindex.cgi?VO=ligo&grid=osg"); vc = vors.openConnection(); in = new BufferedReader(new InputStreamReader(vc.getInputStream())); while ((inputLine = in.readLine()) != null){ inputLine = inputLine.trim(); //ignore commented or empty lines if(inputLine.startsWith("#") || inputLine.equals("") ){ continue; } //#columns=ID,Name,Gatekeeper,Type,Grid,Status,Last Test Date String[] col = inputLine.split(","); VORSVOInfo vinfo = new VORSVOInfo(); vinfo.setID(col[0]); vinfo.setName(col[1]); vinfo.setGatekeeper(col[2]); vinfo.setType(col[3]); vinfo.setGrid(col[4]); vinfo.setStatus(col[5]); vinfo.setLast_Test_Date(col[6]); ret.add(vinfo); //System.out.println(inputLine); } } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally{ try { in.close(); } catch (IOException e) { e.printStackTrace(); } } return ret; } public static VORSSiteInfo get_sites_info(String host, String port, String vo, String grid, String id){ URL vors = null; URLConnection vc = null; BufferedReader in = null; String inputLine = null; VORSSiteInfo ret = new VORSSiteInfo(); Map values = new HashMap(); try { vors = new URL("http://" + host + ":" + port + "/cgi-bin/tindex.cgi?VO=" + vo + "&grid=" + grid + "&res=" + id); //"http://vors.grid.iu.edu/cgi-bin/tindex.cgi?VO=ligo&grid=osg&res=" + id); vc = vors.openConnection(); in = new BufferedReader(new InputStreamReader(vc.getInputStream())); while ((inputLine = in.readLine()) != null) { //ignore commented or empty lines inputLine = inputLine.trim(); if (inputLine.startsWith("#") || inputLine.equals("")) { continue; } String[] col = inputLine.split("="); if (col.length > 1) { values.put(col[0], col[1]); } } } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally{ try { in.close(); } catch (IOException e) { e.printStackTrace(); } } ret.setApp_loc((String)values.get("app_loc")); ret.setGatekeeper((String)values.get("gatekeeper")); ret.setGk_port((String)values.get("gk_port")); ret.setGsiftp_port((String)values.get("gsiftp_port")); ret.setData_loc((String)values.get("data_loc")); ret.setOsg_grid((String)values.get("osg_grid")); ret.setShortname((String)values.get("shortname")); ret.setTmp_loc((String)values.get("tmp_loc")); ret.setVdt_version((String)values.get("vdt_version")); ret.setSponsor_vo((String)values.get("sponsor_vo")); ret.setWntmp_loc((String)values.get("wntmp_loc")); ret.setApp_space((String)values.get("app_space")); ret.setData_space((String)values.get("data_space")); ret.setExec_jm((String)values.get("exec_jm")); ret.setGlobus_loc((String)values.get("globus_loc")); ret.setGrid_services((String)values.get("grid_services")); ret.setTmp_space((String)values.get("tmp_space")); ret.setUtil_jm((String)values.get("util_jm")); return ret; } public static VORSSiteInfo getLocalSiteInfo( String vo ){ VORSSiteInfo localSite = new VORSSiteInfo(); localSite.setShortname( "local" ); //System.out.println( System.getenv() ); //set some values on the basis of environment variables String pHome = System.getenv( "PEGASUS_HOME" ); if( pHome != null ){ //set osg_grid to the parent directory localSite.setOsg_grid( new File( pHome).getParent() ); } String gLocation = System.getenv( "GLOBUS_LOCATION" ); if( gLocation != null ){ localSite.setGlobus_loc( gLocation ); } String home = System.getenv( "HOME" ); if( home != null ){ String dir = new File( home, "pegasus" ).getAbsolutePath(); localSite.setData_loc( dir ); localSite.setTmp_loc( dir ); } localSite.setWntmp_loc( "/tmp" ); String localHost = "localhost"; try { localHost = java.net.InetAddress.getLocalHost().getHostName(); } catch (UnknownHostException ex) { mLogger.log( "Unable to determine hostname of local site" , ex, LogManager.WARNING_MESSAGE_LEVEL ); } //associate default ports for gridftp and gatekeeper localSite.setGatekeeper( localHost ); localSite.setGsiftp_port( "2811" ); localSite.setUtil_jm( localHost + "/jobmanager-fork" ); localSite.setExec_jm( localHost + "/jobmanager-condor" ); localSite.setSponsor_vo( vo ); return localSite; } /*public static String getGatekeeper(VORSSiteInfo sitInfo) { String host = sitInfo.getGatekeeper(); String port = sitInfo.getGk_port(); if(host == null || host.equals("")){ mLogger.log( "Gatekeeper hostname missing in "+ sitInfo.getVoInfo().getName() +" using gatekeeper entry.", LogManager.CONFIG_MESSAGE_LEVEL); host = (sitInfo.getVoInfo().getGatekeeper().split(":"))[0]; } if(port == null || port.equals("")){ mLogger.log( "Gatekeeper hostname missing in " + sitInfo.getVoInfo().getName() + " using default 2119.", LogManager.CONFIG_MESSAGE_LEVEL); port = "2119"; } if(port.equals("2119")){ return host; } else{ return host + ":" + port; } }*/ public static String getGsiftp(VORSSiteInfo sitInfo) { String host = sitInfo.getGatekeeper(); String port = sitInfo.getGsiftp_port(); if(host == null || host.equals("")){ mLogger.log( "Gridftp hostname missing in "+ sitInfo.getVoInfo().getName() +" using gatekeeper entry.", LogManager.CONFIG_MESSAGE_LEVEL); host = (sitInfo.getVoInfo().getGatekeeper().split(":"))[0]; } if(port == null || port.equals("")){ mLogger.log( "Gridftp hostname missing in " + sitInfo.getVoInfo().getName() + " using default 2811.", LogManager.CONFIG_MESSAGE_LEVEL); port = "2811"; } if(port.equals("2811")){ return "gsiftp://" + host; } else{ return "gsiftp://" + host + ":" + port; } } /** * Creates a Pegasus SiteCatalogEntry object from the information * in VORS. * * The following coventions are followed for determining the worker node * and storage node directories. * *
             * head node shared -> data_loc
             * head node local -> tmp_loc
             * worker node shared ->data_loc
             * worker node local -> wntmp_loc
             * 
* * @param sitInfo * * @return SiteCatalogEntry object. */ public static SiteCatalogEntry createSiteCatalogEntry(VORSSiteInfo sitInfo){ SiteCatalogEntry entry = new SiteCatalogEntry( sitInfo.getShortname()); entry.setHeadNodeFS( createHeadNodeFS(sitInfo) ); entry.setWorkerNodeFS( createWorkerNodeFS(sitInfo) ); //associate a replica catalog with the site. ReplicaCatalog rc = new ReplicaCatalog( "rls://replica.isi.edu", "RLS" ); rc.addAlias( sitInfo.getShortname()); //rc.addConnection( new Connection("ignore.lrc", "rls://replica.caltech.edu" )); entry.addReplicaCatalog( rc ); //associate some profiles entry.addProfile( new Profile( Profile.ENV, "PEGASUS_HOME", ((sitInfo.getOsg_grid() != null)?sitInfo.getOsg_grid():"") +"/pegasus")) ; entry.addProfile( new Profile( Profile.ENV, "app_loc",((sitInfo.getApp_loc() != null)?sitInfo.getApp_loc():"/")) ); entry.addProfile( new Profile( Profile.ENV, "data_loc", ((sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/")) ); entry.addProfile( new Profile( Profile.ENV, "osg_grid", ((sitInfo.getOsg_grid() != null)?sitInfo.getOsg_grid():"/")) ); entry.addProfile( new Profile( Profile.ENV, "tmp_loc", ((sitInfo.getTmp_loc() != null)?sitInfo.getTmp_loc():"/")) ); entry.addProfile( new Profile( Profile.ENV, "wntmp_loc", ((sitInfo.getWntmp_loc() != null)?sitInfo.getWntmp_loc():"/")) ); entry.addProfile( new Profile( Profile.VDS, "grid", ((sitInfo.getVoInfo().getGrid() != null)?sitInfo.getVoInfo().getGrid():"")) ); //associate grid gateway for auxillary and compute jobs GridGateway gw = new GridGateway( GridGateway.TYPE.gt2, ((sitInfo.getUtil_jm() != null)? sitInfo.getUtil_jm(): (sitInfo.getVoInfo().getGatekeeper().split(":"))[0] + "/jobmanager-fork"), getSchedulerType(sitInfo.getUtil_jm()) ); gw.setJobType( GridGateway.JOB_TYPE.auxillary ); entry.addGridGateway( gw ); if( gw.getScheduler() == GridGateway.SCHEDULER_TYPE.Fork ){ //add the headnode globus location entry.addProfile( new Profile( Profile.ENV, "GLOBUS_LOCATION", ((sitInfo.getGlobus_loc() != null)?sitInfo.getGlobus_loc():"/") ) ); entry.addProfile( new Profile( Profile.ENV, "LD_LIBRARY_PATH", ((sitInfo.getGlobus_loc() != null)?sitInfo.getGlobus_loc():"") + "/lib") ); } else{ mLogger.log( "Creating globus location on basis of OSG_GRID for site " + entry.getSiteHandle() , LogManager.DEBUG_MESSAGE_LEVEL ); String wn = sitInfo.getOsg_grid(); String globus = ( wn == null )? "/globus" : wn + "/globus"; entry.addProfile( new Profile( Profile.ENV, "GLOBUS_LOCATION", globus ) ); entry.addProfile( new Profile( Profile.ENV, "LD_LIBRARY_PATH", globus + "/lib" )); } gw = new GridGateway( GridGateway.TYPE.gt2, ((sitInfo.getExec_jm() != null)? sitInfo.getExec_jm(): (sitInfo.getVoInfo().getGatekeeper().split(":"))[0] + "/jobmanager-fork"), getSchedulerType(sitInfo.getExec_jm()) ); gw.setJobType( GridGateway.JOB_TYPE.compute ); entry.addGridGateway( gw ); return entry; } private static SCHEDULER_TYPE getSchedulerType(String url) { if(url == null){ return GridGateway.SCHEDULER_TYPE.Fork ; } if( url.endsWith( "condor" ) ){ return GridGateway.SCHEDULER_TYPE.Condor ; } else if( url.endsWith( "fork" ) ){ return GridGateway.SCHEDULER_TYPE.Fork ; } else if( url.endsWith( "pbs" ) ){ return GridGateway.SCHEDULER_TYPE.PBS ; } else if( url.endsWith( "lsf" ) ){ return GridGateway.SCHEDULER_TYPE.LSF ; } else if( url.endsWith( "sge" ) ){ return GridGateway.SCHEDULER_TYPE.SGE; } //if nothing is there than return fork return GridGateway.SCHEDULER_TYPE.Fork ; } /** * Creates an object describing the head node filesystem. * * The following conventions are followed. *
	     *	shared:
	     *	    scratch data_loc
	     *	    storage data_loc
	     *	local:   
	     *	    scratch tmp_loc
	     *	    storage tmp_loc
             * 
             * 
* * @return the HeadNodeFS */ public static HeadNodeFS createHeadNodeFS(VORSSiteInfo sitInfo){ // describe the head node filesystem HeadNodeFS hfs = new HeadNodeFS(); //head node scratch description start HeadNodeScratch hscratch = new HeadNodeScratch(); //head node local scratch description LocalDirectory hscratchLocal = new LocalDirectory(); String directory = (sitInfo.getTmp_loc() != null)?sitInfo.getTmp_loc():"/"; FileServer f = new FileServer( "gsiftp", getGsiftp(sitInfo), directory ); hscratchLocal.addFileServer( f ); //no distinction between internal and external view hscratchLocal.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); //head node shared scratch description SharedDirectory hscratchShared = new SharedDirectory(); directory = (sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/"; f = new FileServer( "gsiftp", getGsiftp(sitInfo), directory ); hscratchShared.addFileServer( f ); //no distinction between internal and external view hscratchShared.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); hscratch.setLocalDirectory( hscratchLocal ); hscratch.setSharedDirectory( hscratchShared ); //head node scratch description ends //head node storage description start HeadNodeStorage hstorage = new HeadNodeStorage(); //head node local storage description LocalDirectory hstorageLocal = new LocalDirectory(); directory = (sitInfo.getTmp_loc() != null)?sitInfo.getTmp_loc():"/" ; f = new FileServer( "gsiftp", getGsiftp(sitInfo), directory ); hstorageLocal.addFileServer( f ); //internal and external view is same hstorageLocal.setInternalMountPoint( new InternalMountPoint( directory, "30G", "100G") ); //head node shared storage description SharedDirectory hstorageShared = new SharedDirectory(); directory = (sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/"; f = new FileServer( "gsiftp", getGsiftp(sitInfo), directory ); hstorageShared.addFileServer( f ); //no distinction between internal and external view hstorageShared.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); hstorage.setLocalDirectory( hstorageLocal ); hstorage.setSharedDirectory( hstorageShared ); //head node storage description ends hfs.setScratch( hscratch ); hfs.setStorage( hstorage ); return hfs; } /** * Creates an object describing the worker node filesystem. * * The following conventions are followed. *
             *  shared:
	     *	    scratch data
	     *	    storage data
	     *	local:   
	     *	    scratch wntmp
	     *	    storage wntmp
             * 
* * @return the WorkerNodeFS * */ public static WorkerNodeFS createWorkerNodeFS(VORSSiteInfo sitInfo){ // describe the head node filesystem WorkerNodeFS wfs = new WorkerNodeFS(); //worker node scratch description start WorkerNodeScratch wscratch = new WorkerNodeScratch(); //worker node local scratch description LocalDirectory wscratchLocal = new LocalDirectory(); String directory = (sitInfo.getWntmp_loc() != null)?sitInfo.getWntmp_loc():"/"; FileServer f = new FileServer( "file", "file:///", directory ); wscratchLocal.addFileServer( f ); //no distinction between internal and external view wscratchLocal.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); //worker node shared scratch description SharedDirectory wscratchShared = new SharedDirectory(); directory = (sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/"; f = new FileServer( "file", "file:///", directory ); //no distinction between internal and external view wscratchShared.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); wscratch.setLocalDirectory( wscratchLocal ); wscratch.setSharedDirectory( wscratchShared ); //head node scratch description ends //worker node storage description start WorkerNodeStorage wstorage = new WorkerNodeStorage(); //worker node local scratch description LocalDirectory wstorageLocal = new LocalDirectory(); directory = (sitInfo.getWntmp_loc() != null)?sitInfo.getWntmp_loc():"/"; f = new FileServer( "file", "file:///", directory ); wstorageLocal.addFileServer( f ); //no distinction between internal and external view wstorageLocal.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); //worker node shared scratch description SharedDirectory wstorageShared = new SharedDirectory(); directory = (sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/" ; f = new FileServer( "file", "file:///", directory ); //no distinction between internal and external view wstorageShared.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); wstorage.setLocalDirectory( wstorageLocal ); wstorage.setSharedDirectory( wstorageShared ); //worker node scratch description ends //worker node storage description ends wfs.setScratch( wscratch ); wfs.setStorage( wstorage ); return wfs; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/InternalMountPoint.java0000644000175000017500000000624211757531137031765 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.StringWriter; import java.io.IOException; /** * A data class to signify the Internal Mount Point for a filesystem. * * @author Karan Vahi */ public class InternalMountPoint extends FileSystemType { /** * The default constructor. */ public InternalMountPoint() { super(); } /** * The overloaded constructor. * * @param mountPoint the mount point of the system. */ public InternalMountPoint( String mountPoint ) { this( mountPoint, null, null ); } /** * The overloaded constructor. * * @param mountPoint the mount point of the system. * @param totalSize the total size of the system. * @param freeSize the free size */ public InternalMountPoint( String mountPoint, String totalSize, String freeSize ){ super( mountPoint, totalSize, freeSize ); } /*** * A convenience method that returns true if all the attributes values are * uninitialized or empty strings. Useful for serializing the object as * XML. * * @return boolean */ public boolean isEmpty(){ return ( this.getFreeSize() == null || this.getFreeSize().length() == 0 ) && ( this.getMountPoint() == null || this.getMountPoint().length() == 0 ) && ( this.getTotalSize() == null || this.getTotalSize().length() == 0 ) ; } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to use. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); //sanity check? if( this.isEmpty() ){ return; } //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/LocalDirectory.java0000644000175000017500000000511011757531137031064 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.IOException; import java.util.Iterator; import java.util.List; /** * This data class represents a local directory on a site. * Can be local scratch or local storage. * * @version $Revision: 600 $ * @author Karan Vahi * */ public class LocalDirectory extends DirectoryType{ /** * The default constructor. */ public LocalDirectory(){ super(); } /** * The overloaded constructor * * @param fs list of file servers * @param imt the internal mount point. */ public LocalDirectory( List fs, InternalMountPoint imt ){ super( fs, imt ); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //sanity check? if( this.isEmpty() ){ return; } //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); //iterate through all the file servers for( Iterator it = this.getFileServersIterator(); it.hasNext(); ){ FileServer fs = it.next(); fs.toXML( writer, newIndent ); } //write out the internal mount point this.getInternalMountPoint().toXML( writer, newIndent ); writer.write( indent ); writer.write( "" ); writer.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/VORSVOInfo.java0000644000175000017500000000375011757531137030027 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; public class VORSVOInfo { private String ID; private String Name; private String Gatekeeper; private String Type; private String Grid; private String Status; private String Last_Test_Date; public void print(){ System.out.println("ID " + ID); System.out.println("Name " + Name); System.out.println("Gatekeeper " + Gatekeeper); System.out.println("Type " + Type); System.out.println("Grid " + Grid); System.out.println("Status " + Status); System.out.println("Last_Test_Date " + Last_Test_Date); } public String getID() { return ID; } public void setID(String id) { ID = id; } public String getName() { return Name; } public void setName(String name) { Name = name; } public String getGatekeeper() { return Gatekeeper; } public void setGatekeeper(String gatekeeper) { Gatekeeper = gatekeeper; } public String getType() { return Type; } public void setType(String type) { Type = type; } public String getGrid() { return Grid; } public void setGrid(String grid) { Grid = grid; } public String getStatus() { return Status; } public void setStatus(String status) { Status = status; } public String getLast_Test_Date() { return Last_Test_Date; } public void setLast_Test_Date(String last_Test_Date) { Last_Test_Date = last_Test_Date; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/AbstractSiteData.java0000644000175000017500000000571211757531137031337 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.StringWriter; import java.io.IOException; /** * The abstract data class for Site classes. * * @author Karan Vahi */ public abstract class AbstractSiteData extends SiteData { /** * Returns the xml description of the object. This is used for generating * the partition graph. That is no longer done. * * @return String containing the object in XML. * * @exception IOException if something fishy happens to the stream. */ public String toXML() throws IOException{ Writer writer = new StringWriter(32); toXML( writer, "" ); return writer.toString(); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public abstract void toXML( Writer writer, String indent ) throws IOException ; /** * Writes out the object as a string. Internally calls out the toXML method. * * @return string description * * */ public String toString(){ StringWriter writer = new StringWriter( ); try{ this.toXML( writer, "" ); } catch( IOException ioe ){ } return writer.toString(); } /** * Writes an attribute to the stream. Wraps the value in quotes as required * by XML. * * @param writer * @param key * @param value * * @exception IOException if something fishy happens to the stream. */ public void writeAttribute( Writer writer, String key, String value ) throws IOException{ writer.write( " " ); writer.write( key ); writer.write( "=\""); writer.write( value ); writer.write( "\"" ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone() throws CloneNotSupportedException{ AbstractSiteData d; d = ( AbstractSiteData ) super.clone(); return d; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/SiteCatalogEntry.java0000644000175000017500000004577411757531137031412 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.classes; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.catalog.classes.VDSSysInfo2NMI; import edu.isi.pegasus.planner.catalog.classes.Profiles.NAMESPACES; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway.JOB_TYPE; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import edu.isi.pegasus.planner.catalog.transformation.classes.NMI2VDSSysInfo; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegRandom; import edu.isi.pegasus.planner.namespace.Namespace; import edu.isi.pegasus.planner.namespace.Pegasus; import java.util.List; import java.util.LinkedList; import java.util.Map; import java.util.HashMap; import java.util.Iterator; import java.util.Map.Entry; import java.io.File; import java.io.Writer; import java.io.IOException; /** * This data class describes a site in the site catalog. * * @author Karan Vahi * @version $Revision: 5078 $ */ public class SiteCatalogEntry extends AbstractSiteData{ /** * The name of the environment variable PEGASUS_BIN_DIR. */ public static final String PEGASUS_BIN_DIR = "PEGASUS_BIN_DIR"; /** * The name of the environment variable PEGASUS_HOME. */ public static final String PEGASUS_HOME = "PEGASUS_HOME"; /** * The name of the environment variable VDS_HOME. */ public static final String VDS_HOME = "VDS_HOME"; /** * The site identifier. */ private String mID; /** * The System Information for the Site. */ private SysInfo mSysInfo; /** * The profiles asscociated with the site. */ private Profiles mProfiles; /** * The handle to the head node filesystem. */ private HeadNodeFS mHeadFS; /** * The handle to the worker node filesystem. */ private WorkerNodeFS mWorkerFS; /** * Map of grid gateways at the site for submitting different job types. */ private Map mGridGateways; /** * The list of replica catalog associated with the site. */ private List mReplicaCatalogs; /** * The default constructor. */ public SiteCatalogEntry() { this( "" ); } /** * The overloaded constructor. * * @param id the site identifier. */ public SiteCatalogEntry( String id ) { initialize( id ); } /** * Not implmented as yet. * * @return UnsupportedOperationException */ public Iterator getFileServerIterator() { throw new UnsupportedOperationException("Not yet implemented"); } /** * Not implemented as yet. * * @return UnsupportedOperationException */ public List getFileServers() { throw new UnsupportedOperationException("Not yet implemented"); } /** * Not implemented as yet * * @return UnsupportedOperationException */ public List getGridGateways() { throw new UnsupportedOperationException("Not yet implemented"); } /** * Initializes the object. * * @param id the site identifier. */ public void initialize( String id ){ mID = id; mSysInfo = new SysInfo(); mProfiles = new Profiles(); mGridGateways = new HashMap(); mReplicaCatalogs = new LinkedList(); } /** * Sets the site handle for the site * * @param id the site identifier. */ public void setSiteHandle( String id ){ mID = id; } /** * Returns the site handle for the site * * @return the site identifier. */ public String getSiteHandle( ){ return mID; } /** * Sets the System Information associated with the Site. * * * @param sysinfo the system information of the site. */ public void setSysInfo( SysInfo sysinfo ) { mSysInfo = sysinfo; } /** * Returns the System Information associated with the Site. * * * @return SysInfo the system information. */ public SysInfo getSysInfo( ) { return mSysInfo; } /** * Sets the architecture of the site. * * @param arch the architecture. */ public void setArchitecture( SysInfo.Architecture arch ){ mSysInfo.setArchitecture(arch); } /** * Returns the architecture of the site. * * @return the architecture. */ public SysInfo.Architecture getArchitecture( ){ return mSysInfo.getArchitecture(); } /** * Sets the OS of the site. * * @param os the os of the site. */ public void setOS( SysInfo.OS os ){ mSysInfo.setOS(os); } /** * Returns the OS of the site. * * @return the OS */ public SysInfo.OS getOS( ){ return mSysInfo.getOS(); } /** * Sets the sysinfo for the site. * * @param sysinfo */ public void setVDSSysInfo( VDSSysInfo sysinfo ){ this.setSysInfo( VDSSysInfo2NMI.vdsSysInfo2NMI(sysinfo)); } /** * Returns the sysinfo for the site. * * @return getVDSSysInfo */ public VDSSysInfo getVDSSysInfo(){ return NMI2VDSSysInfo.nmiToVDSSysInfo(mSysInfo); } /** * Sets the OS release of the site. * * @param release the os releaseof the site. */ public void setOSRelease( String release ){ mSysInfo.setOSRelease(release); } /** * Returns the OS release of the site. * * @return the OS */ public String getOSRelease( ){ return mSysInfo.getOSRelease(); } /** * Sets the OS version of the site. * * @param version the os versionof the site. */ public void setOSVersion( String version ){ mSysInfo.setOSVersion(version); } /** * Returns the OS version of the site. * * @return the OS */ public String getOSVersion( ){ return mSysInfo.getOSVersion(); } /** * Sets the glibc version on the site. * * @param version the glibc version of the site. */ public void setGlibc( String version ){ mSysInfo.setGlibc(version); } /** * Returns the glibc version of the site. * * @return the OS */ public String getGlibc( ){ return mSysInfo.getGlibc(); } /** * Sets the headnode filesystem. * * @param system the head node filesystem. */ public void setHeadNodeFS( HeadNodeFS system ){ mHeadFS = system; } /** * Returns the headnode filesystem. * * @return the head node filesystem. */ public HeadNodeFS getHeadNodeFS( ){ return mHeadFS; } /** * Sets the worker node filesystem. * * @param system the head node filesystem. */ public void setWorkerNodeFS( WorkerNodeFS system ){ mWorkerFS = system; } /** * Returns the worker node filesystem. * * @return the worker node filesystem. */ public WorkerNodeFS getWorkerNodeFS( ){ return mWorkerFS; } /** * Returns the work directory for the compute jobs on a site. * * Currently, the work directory is picked up from the head node shared filesystem. * * @return the internal mount point. */ public String getInternalMountPointOfWorkDirectory() { return this.getHeadNodeFS().getScratch().getSharedDirectory().getInternalMountPoint().getMountPoint(); } /** * Adds a profile. * * @param p the profile to be added */ public void addProfile( Profile p ){ //retrieve the appropriate namespace and then add mProfiles.addProfile( p ); } /** * Sets the profiles associated with the file server. * * @param profiles the profiles. */ public void setProfiles( Profiles profiles ){ mProfiles = profiles; } /** * Returns the profiles associated with the site. * * @return profiles. */ public Profiles getProfiles( ){ return mProfiles; } /** * Returns the value of VDS_HOME for a site. * * * @return value if set else null. */ @Deprecated public String getVDSHome( ){ String s = this.getEnvironmentVariable( VDS_HOME ); if (s != null && s.length() > 0) { return s; } // fall back on bin dir - this is to ensure a smooth transition to FHS s = this.getEnvironmentVariable( PEGASUS_BIN_DIR ); if (s != null && s.length() > 0) { File f = new File(s + "/.."); return f.getAbsolutePath(); } return null; } /** * Returns the value of PEGASUS_HOME for a site. * * * @return value if set else null. */ @Deprecated public String getPegasusHome( ){ String s = this.getEnvironmentVariable( PEGASUS_HOME ); if (s == null || s.length() == 0) { // fall back on bin dir - this is to ensure a smooth transition to FHS s = this.getEnvironmentVariable( PEGASUS_BIN_DIR ); if (s != null && s.length() > 0) { s += "/.."; } } // normalize the path if (s != null && s.length() > 0) { File f = new File(s); try { s = f.getAbsolutePath(); } catch (Exception e) { // ignore - just leave s alone } } else { s = null; } return s; } /** * Returns an environment variable associated with the site. * * @param variable the environment variable whose value is required. * * @return value of the environment variable if found, else null */ public String getEnvironmentVariable( String variable ){ Namespace n = this.mProfiles.get( Profiles.NAMESPACES.env ); String value = ( n == null ) ? null : (String)n.get( variable ); //change the preference order because of JIRA PM-471 if( value == null ){ //fall back only for local site the value in the env String handle = this.getSiteHandle(); if( handle != null && handle.equals( "local" ) ){ //try to retrieve value from environment //for local site. value = System.getenv( variable ); } } return value; } /** * Returns a grid gateway object corresponding to a job type. * * @param type the job type * * @return GridGateway */ public GridGateway getGridGateway( GridGateway.JOB_TYPE type ){ return mGridGateways.get( type ); } /** * Selects a grid gateway object corresponding to a job type. * It also defaults to other GridGateways if grid gateway not found for * that job type. * * @param type the job type * * @return GridGateway */ public GridGateway selectGridGateway( GridGateway.JOB_TYPE type ){ GridGateway g = this.getGridGateway( type ); if( g == null ){ if( type == JOB_TYPE.transfer || type == JOB_TYPE.cleanup || type == JOB_TYPE.register ){ return this.selectGridGateway( JOB_TYPE.auxillary ); } else if ( type == JOB_TYPE.auxillary ){ return this.selectGridGateway( JOB_TYPE.compute ); } } return g; } /** * A convenience method that selects a file server for staging the data out to * a site. It returns the file server to which the generated data is staged * out / published. * * The FileServer selected is associated with the HeadNode Filesystem. * * @return the FileServer else null. */ public FileServer selectStorageFileServerForStageout(){ return ( this.getHeadNodeFS() == null )? null: this.getHeadNodeFS().selectStorageFileServerForStageout(); } /** * Return an iterator to value set of the Map. * * @return Iterator */ public Iterator getGridGatewayIterator(){ return mGridGateways.values().iterator(); } /** * Add a GridGateway to the site. * * @param g the grid gateway to be added. */ public void addGridGateway( GridGateway g ){ mGridGateways.put( g.getJobType(), g ); } /** * This is a soft state remove, that removes a GridGateway from a particular * site. * * @param contact the contact string for the grid gateway. * * @return true if was able to remove the jobmanager from the cache * false if unable to remove, or the matching entry is not found * or if the implementing class does not maintain a soft state. */ public boolean removeGridGateway( String contact ) { //iterate through the entry set for( Iterator it = this.mGridGateways.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Entry) it.next(); GridGateway g = ( GridGateway )entry.getValue(); if( g.getContact().equals( contact ) ) { it.remove(); return true; } } return false; } /** * Return an iterator to the replica catalog associated with the site. * * @return Iterator */ public Iterator getReplicaCatalogIterator(){ return mReplicaCatalogs.iterator(); } /** * Add a Replica Catalog to the site. * * @param catalog the replica catalog to be added. */ public void addReplicaCatalog( ReplicaCatalog catalog ){ mReplicaCatalogs.add( catalog ); } /** * Selects a Random ReplicaCatalog. * * @return ReplicaCatalog
if more than one associates else * returns null. */ public ReplicaCatalog selectReplicaCatalog( ) { return ( this.mReplicaCatalogs == null || this.mReplicaCatalogs.size() == 0 )? null : this.mReplicaCatalogs.get( PegRandom.getInteger( this.mReplicaCatalogs.size() - 1) ); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( ""); writer.write( newLine ); //list all the gridgateways for( Iterator it = this.getGridGatewayIterator(); it.hasNext(); ){ it.next().toXML( writer, newIndent ); } HeadNodeFS fs = null; if( (fs = this.getHeadNodeFS()) != null ){ fs.toXML( writer, newIndent ); } WorkerNodeFS wfs = null; if( ( wfs = this.getWorkerNodeFS() ) != null ){ wfs.toXML( writer, newIndent ); } //list all the replica catalogs associate for( Iterator it = this.getReplicaCatalogIterator(); it.hasNext(); ){ it.next().toXML( writer, newIndent ); } this.getProfiles().toXML( writer, newIndent ); writer.write( indent ); writer.write( "" ); writer.write( newLine ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ SiteCatalogEntry obj; try{ obj = ( SiteCatalogEntry ) super.clone(); obj.initialize( this.getSiteHandle() ); obj.setSysInfo( (SysInfo)this.getSysInfo().clone()); //list all the gridgateways for( Iterator it = this.getGridGatewayIterator(); it.hasNext(); ){ obj.addGridGateway( (GridGateway)it.next().clone() ); } HeadNodeFS fs = null; if( (fs = this.getHeadNodeFS()) != null ){ obj.setHeadNodeFS( (HeadNodeFS)fs.clone() ); } WorkerNodeFS wfs = null; if( ( wfs = this.getWorkerNodeFS() ) != null ){ obj.setWorkerNodeFS( (WorkerNodeFS)wfs.clone() ); } //list all the replica catalogs associate for( Iterator it = this.getReplicaCatalogIterator(); it.hasNext(); ){ obj.addReplicaCatalog( (ReplicaCatalog)it.next().clone( ) ); } obj.setProfiles( (Profiles)this.mProfiles.clone() ); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } }pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/HeadNodeScratch.java0000644000175000017500000000467411757531137031142 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.IOException; /** * This data class describes the scratch area on a head node. * * @version $Revision: 2003 $ * @author Karan Vahi */ public class HeadNodeScratch extends StorageType { /** * The default constructor */ public HeadNodeScratch() { super(); } /** * The overloaded constructor * * @param type StorageType */ public HeadNodeScratch( StorageType type ) { this( type.getLocalDirectory(), type.getSharedDirectory() ); } /** * The overloaded constructor. * * @param local the local directory on the node. * @param shared the shared directory on the node. */ public HeadNodeScratch( LocalDirectory local, SharedDirectory shared ){ super( local, shared ); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); this.getLocalDirectory().toXML( writer, newIndent ); this.getSharedDirectory().toXML( writer, newIndent ); writer.write( indent ); writer.write( "" ); writer.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/StorageType.java0000644000175000017500000000770111757531137030423 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.IOException; import java.io.Writer; /** * An Abstract Data class to describe the filesystem layout on a site, both * shared and local on a site/node * * @version $Revision: 2003 $ * @author Karan Vahi */ public class StorageType extends AbstractSiteData{ /** * The local directory on the node. */ protected LocalDirectory mLocalDirectory; /** * The shared directory on the node. */ protected SharedDirectory mSharedDirectory; /** * The default constructor */ public StorageType() { mLocalDirectory = new LocalDirectory(); mSharedDirectory = new SharedDirectory(); } /** * The overloaded constructor. * * @param local the local directory on the node. * @param shared the shared directory on the node. */ public StorageType( LocalDirectory local, SharedDirectory shared ){ mLocalDirectory = local; mSharedDirectory = shared; } /** * Sets the local directory. * * @param local the local directory. */ public void setLocalDirectory( LocalDirectory local ){ mLocalDirectory = local; } /** * Returns the local directory. * * @return the local directory. */ public LocalDirectory getLocalDirectory( ){ return mLocalDirectory; } /** * Selects a FileServer associated with the Local Directory. * * @return if specified, else null */ public FileServer selectLocalFileServer(){ return this.getLocalDirectory().selectFileServer(); } /** * Selects a FileServer associated with the Shared Directory. * * @return if specified, else null */ public FileServer selectSharedFileServer(){ return this.getSharedDirectory().selectFileServer(); } /** * Sets the shared directory. * * @param shared the shared directory. */ public void setSharedDirectory( SharedDirectory shared ){ mSharedDirectory = shared; } /** * Returns the shared directory. * * @return the shared directory. */ public SharedDirectory getSharedDirectory( ){ return mSharedDirectory; } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ StorageType obj; try{ obj = ( StorageType ) super.clone(); obj.setLocalDirectory( ( LocalDirectory )this.getLocalDirectory().clone() ); obj.setSharedDirectory( ( SharedDirectory )this.getSharedDirectory().clone() ); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } /** * * @param writer * @param indent * @throws java.io.IOException */ public void toXML(Writer writer, String indent) throws IOException { throw new UnsupportedOperationException("Not supported yet."); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/WorkerNodeScratch.java0000644000175000017500000000762411757531137031550 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.IOException; /** * This data class describes the scratch area on a head node. The difference * from the worker node scratch is that it additionally has a worker shared directory * that designates the shared directory amongst the worker nodes. * * * @version $Revision: 2003 $ * @author Karan Vahi */ public class WorkerNodeScratch extends StorageType { /** * The directory shared only amongst the worker nodes. */ protected WorkerSharedDirectory mWorkerShared; /** * The default constructor */ public WorkerNodeScratch() { super(); mWorkerShared = null; } /** * The overloaded constructor * * @param type StorageType */ public WorkerNodeScratch( StorageType type ) { this( type.getLocalDirectory(), type.getSharedDirectory() ); mWorkerShared = null; } /** * The overloaded constructor. * * @param local the local directory on the node. * @param shared the shared directory on the node. */ public WorkerNodeScratch( LocalDirectory local, SharedDirectory shared ){ super( local, shared ); mWorkerShared = null; } /** * Sets the directory shared amongst the worker nodes only. * * @param directory the worker node shared directory. */ public void setWorkerSharedDirectory( WorkerSharedDirectory directory ){ mWorkerShared = directory; } /** * Returns the directory shared amongst the worker nodes only. * * @return the worker shared directory. */ public WorkerSharedDirectory getWorkerSharedDirectory( ){ return mWorkerShared; } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ WorkerNodeScratch obj; obj = ( WorkerNodeScratch ) super.clone(); if( this.getWorkerSharedDirectory() != null ){ obj.setWorkerSharedDirectory( (WorkerSharedDirectory)this.getWorkerSharedDirectory().clone() ); } return obj; } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); this.getLocalDirectory().toXML( writer, newIndent ); this.getSharedDirectory().toXML( writer, newIndent ); if( this.getWorkerSharedDirectory() != null ){ this.getWorkerSharedDirectory().toXML( writer, newIndent ); } writer.write( indent ); writer.write( "" ); writer.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/GridGateway.java0000644000175000017500000002744311757531137030371 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import java.io.Writer; import java.io.IOException; /** * This class describes the Grid Gateway into a site. * * @version $Revision: 2511 $ * @author Karan Vahi */ public class GridGateway extends AbstractSiteData{ /** * An enumeration of valid types of grid gateway. */ public static enum TYPE { gt2, gt4, gt5, condor, condorc, pbs, lsf }; /** * An enumeration of types of jobs handled by an instance of a grid gateway. */ public static enum JOB_TYPE{ compute, auxillary, transfer, register, cleanup }; /** * An enumeration of valid schedulers on the grid gateway. */ public static enum SCHEDULER_TYPE{ Fork, LSF, PBS, Condor, SGE, Unknown }; /** * The grid type associated with this instance. */ private TYPE mType; /** * The contact string for the grid gateway. */ private String mContact; /** * The scheduler type with which it interfaces. */ private SCHEDULER_TYPE mScheduler; /** * The type of jobs that this grid gateway can accept. */ private JOB_TYPE mJobType; /** * An optional os type for the grid gateway. */ private SysInfo.OS mOS; /** * An optional architecture type for the grid gateway. */ private SysInfo.Architecture mArch; /** * Optional information about the os release. */ private String mOSRelease; /** * Optional information about the version. */ private String mOSVersion; /** * Optional information about the glibc. */ private String mGlibc; /** * The number of idle nodes. */ private int mIdleNodes; /** * The number of total nodes */ private int mTotalNodes; /** * The default constructor. */ public GridGateway( ){ this( TYPE.gt2, "localhost/jobmanager-fork", SCHEDULER_TYPE.Fork ); } /** * The overloaded constructor. * * @param type the type of grid gateway. * @param contact the contact string for it. * @param scheduler the underlying scheduler that it talks to. */ public GridGateway( TYPE type, String contact, SCHEDULER_TYPE scheduler ){ mType = type; mContact = contact; mScheduler = scheduler; mIdleNodes = -1; mTotalNodes = -1; } /** * Sets the type of jobs that run via this grid gateway. * * @param type the job type. */ public void setJobType( JOB_TYPE type ){ mJobType = type; } /** * Returns the type of jobs that run via this grid gateway. * * @return the job type. */ public JOB_TYPE getJobType( ){ return mJobType; } /** * Sets the type of grid gateway. * * @param type the gateway type. */ public void setType( TYPE type ){ mType = type; } /** * Returns the type of jobs that run via this grid gateway. * * @return the type of grid gateway */ public TYPE getType( ){ return mType; } /** * Sets the contact string for the Grid gateway * * @param contact the contact string */ public void setContact( String contact ){ mContact = contact; } /** * Returns the contact string for the Grid gateway * * @return the contact string */ public String getContact( ){ return mContact; } /** * Sets the number of total nodes that this grid gateway talks to. * * @param num the number of nodes. */ public void setTotalNodes( String num ){ try{ mTotalNodes = Integer.parseInt( num ); } catch( Exception e ){ //empty catch } } /** * Sets the total number of nodes that this grid gateway talks to. * * @param num the number of nodes. */ public void setTotalNodes( int num ){ mTotalNodes = num; } /** * Returns the total number of nodes that this grid gateway talks to. * If they are not set then -1 is returned. * * @return the number of nodes, or -1 if not set. */ public int getTotalNodes( ){ return mTotalNodes; } /** * Sets the number of idle nodes that this grid gateway talks to. * * @param num the number of nodes. */ public void setIdleNodes( String num ){ try{ mIdleNodes = Integer.parseInt( num ); } catch( Exception e ){ //empty catch } } /** * Sets the number of idle nodes that this grid gateway talks to. * * @param num the number of nodes. */ public void setIdleNodes( int num ){ mIdleNodes = num; } /** * Returns the total number of nodes that this grid gateway talks to. * If they are not set then -1 is returned. * * @return the number of nodes, or -1 if not set. */ public int getIdleNodes( ){ return mIdleNodes; } /** * Sets the the underlying scheduler that gateway talks to. * In case the value does not match any of the predefined enumeration, * the SCHEDULER_TYPE is set to Unknown. * * @param value the string value * * @see SCHEDULER_TYPE */ public void setScheduler(String value) { try{ mScheduler = GridGateway.SCHEDULER_TYPE.valueOf( value ); } catch( IllegalArgumentException iae ){ //set the scheduler to unknown mScheduler = GridGateway.SCHEDULER_TYPE.Unknown; } } /** * Sets the the underlying scheduler that gateway talks to. * * @param scheduler the scheduler. */ public void setScheduler( SCHEDULER_TYPE scheduler ){ mScheduler = scheduler; } /** * Returns the the underlying scheduler that gateway talks to. * * @return the scheduler. */ public SCHEDULER_TYPE getScheduler( ){ return mScheduler; } /** * Sets the OS of the nodes that this grid gateway talks to. * * @param os the os */ public void setOS( SysInfo.OS os ){ mOS = os; } /** * Returns the OS of the nodes that this grid gateway talks to. * * @return the os if set else null */ public SysInfo.OS getOS( ){ return mOS; } /** * Sets the OS release of the site. * * @param release the os releaseof the site. */ public void setOSRelease( String release ){ mOSRelease = release; } /** * Returns the OS release of the site. * * @return the OS */ public String getOSRelease( ){ return mOSRelease; } /** * Sets the OS version of the site. * * @param version the os versionof the site. */ public void setOSVersion( String version ){ mOSVersion = version; } /** * Returns the OS version of the site. * * @return the OS */ public String getOSVersion( ){ return mOSVersion; } /** * Sets the glibc version on the site. * * @param version the glibc version of the site. */ public void setGlibc( String version ){ mGlibc = version; } /** * Returns the glibc version of the site. * * @return the OS */ public String getGlibc( ){ return mGlibc; } /** * Sets the architecture of the nodes that this grid gateway talks to. * * @param arch the architecture of the nodes */ public void setArchitecture( SysInfo.Architecture arch ){ mArch = arch; } /** * Returns the architecture of the nodes that this grid gateway talks to. * * @return the architecture if set else null */ public SysInfo.Architecture getArchitecture( ){ return mArch; } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( ""); writer.write( newLine ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ GridGateway obj; try{ obj = ( GridGateway ) super.clone(); obj.setType( this.getType() ); obj.setContact( this.getContact() ); obj.setScheduler( this.getScheduler() ); obj.setJobType( this.getJobType() ); obj.setOS( this.getOS() ); obj.setArchitecture( this.getArchitecture() ); obj.setOSRelease( this.getOSRelease() ); obj.setOSVersion( this.getOSVersion() ); obj.setGlibc( this.getGlibc() ); obj.setTotalNodes( this.getTotalNodes()); obj.setIdleNodes( this.getIdleNodes() ); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/SiteData.java0000644000175000017500000000156111757531137027651 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; /** * The abstract base class for all site catalog classes. * * @author Karan Vahi * @version $Revision: 579 $ */ public abstract class SiteData implements Cloneable{ } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/Connection.java0000644000175000017500000000724611757531137030260 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.IOException; /** * This data class describes a connection property for replica catalog. * * @version $Revision: 579 $ * @author Karan Vahi */ public class Connection extends AbstractSiteData { /** * The connection key. */ private String mKey; /** * The value of the connection key. */ private String mValue; /** * The default constructor. */ public Connection() { this( "", "" ); } /** * The overloaded constructor. * * @param key the key * @param value the key value */ public Connection( String key, String value ){ initialize( key, value ); mKey = key; mValue = value; } /** * Initializes the object. * * @param key the key * @param value the key value */ public void initialize( String key, String value ){ mKey = key; mValue = value; } /** * Returns the connection key. * * @return key */ public String getKey(){ return this.mKey; } /** * Returns the key value. * * @return value. */ public String getValue(){ return this.mValue; } /** * Returns the connection key. * * @param key the key */ public void setKey( String key ){ this.mKey = key; } /** * Returns the key value. * * @param value the value. */ public void setValue( String value ){ this.mValue = value ; } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); //write out the xml element writer.write( indent ); writer.write( ""); writer.write( getValue() ); writer.write( "" ); writer.write( newLine ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ Connection obj; try{ obj = ( Connection ) super.clone(); obj.initialize( this.getKey(), this.getValue() ); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/VORSSiteInfo.java0000644000175000017500000001232211757531137030402 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; public class VORSSiteInfo { private String shortname; //shortname private String app_loc; //app_loc private String data_loc; //data_loc private String osg_grid; //osg_grid private String wntmp_loc; //wntmp_loc private String tmp_loc; //tmp_loc private String gatekeeper; //gatekeeper private String gk_port; //gk_port private String gsiftp_port; //gsiftp_port private String sponsor_vo; //sponsor_vo private String vdt_version; //vdt_version private String globus_loc; //globus_loc --if globus_loc is empty then use osg_grid private String exec_jm; //exec_jm -- compute private String util_jm; //util_jm -- transfer private String grid_services; //grid_services private String app_space; //app_space -- saved for later use private String data_space; //data_space -- saved for later use private String tmp_space; //tmp_space -- saved for later use private VORSVOInfo voInfo; public void print(){ System.out.println("***********************************************"); System.out.println("shortname " + shortname); System.out.println("app_loc " + app_loc); System.out.println("data_loc " + data_loc); System.out.println("osg_grid " + osg_grid); System.out.println("tmp_loc " + tmp_loc); System.out.println("wntmp_loc " + wntmp_loc); System.out.println("gatekeeper " + gatekeeper); System.out.println("gk_port " + gk_port); System.out.println("gsiftp_port " + gsiftp_port); System.out.println("sponsor_vo " + sponsor_vo); System.out.println("vdt_version " + vdt_version); System.out.println("globus_loc " + globus_loc); System.out.println("exec_jm " + exec_jm); System.out.println("util_jm " + util_jm); System.out.println("grid_services " + grid_services); System.out.println("app_space " + app_space); System.out.println("data_space " + data_space); System.out.println("tmp_space " + tmp_space); voInfo.print(); System.out.println("***********************************************"); } public String getShortname() { return shortname; } public void setShortname(String shortname) { this.shortname = shortname; } public String getApp_loc() { return app_loc; } public void setApp_loc(String app_loc) { this.app_loc = app_loc; } public String getData_loc() { return data_loc; } public void setData_loc(String data_loc) { this.data_loc = data_loc; } public String getOsg_grid() { return osg_grid; } public void setOsg_grid(String osg_grid) { this.osg_grid = osg_grid; } public String getWntmp_loc() { return wntmp_loc; } public void setWntmp_loc(String wntmp_loc) { this.wntmp_loc = wntmp_loc; } public String getTmp_loc() { return tmp_loc; } public void setTmp_loc(String tmp_loc) { this.tmp_loc = tmp_loc; } public String getGatekeeper() { return gatekeeper; } public void setGatekeeper(String gatekeeper) { this.gatekeeper = gatekeeper; } public String getGk_port() { return gk_port; } public void setGk_port(String gk_port) { this.gk_port = gk_port; } public String getGsiftp_port() { return gsiftp_port; } public void setGsiftp_port(String gsiftp_port) { this.gsiftp_port = gsiftp_port; } public String getSponsor_vo() { return sponsor_vo; } public void setSponsor_vo(String sponsor_vo) { this.sponsor_vo = sponsor_vo; } public String getVdt_version() { return vdt_version; } public void setVdt_version(String vdt_version) { this.vdt_version = vdt_version; } public String getGlobus_loc() { return globus_loc; } public void setGlobus_loc(String globus_loc) { this.globus_loc = globus_loc; } public String getExec_jm() { return exec_jm; } public void setExec_jm(String exec_jm) { this.exec_jm = exec_jm; } public String getUtil_jm() { return util_jm; } public void setUtil_jm(String util_jm) { this.util_jm = util_jm; } public String getGrid_services() { return grid_services; } public void setGrid_services(String grid_services) { this.grid_services = grid_services; } public String getApp_space() { return app_space; } public void setApp_space(String app_space) { this.app_space = app_space; } public String getData_space() { return data_space; } public void setData_space(String data_space) { this.data_space = data_space; } public String getTmp_space() { return tmp_space; } public void setTmp_space(String tmp_space) { this.tmp_space = tmp_space; } public VORSVOInfo getVoInfo() { return voInfo; } public void setVoInfo(VORSVOInfo voInfo) { this.voInfo = voInfo; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/DirectoryType.java0000644000175000017500000001137411757531137030764 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.util.List; import java.util.LinkedList; import java.util.Iterator; import edu.isi.pegasus.planner.common.PegRandom; /** * An abstract base class that creates a directory type. It associates multiple * file servers and an internal mount point. * * @author Karan Vahi */ public abstract class DirectoryType extends AbstractSiteData{ /** * The list of file servers that can be used to write to access this directory. */ protected List mFileServers; /** * The internal mount point for the directory. */ protected InternalMountPoint mInternalMount; /** * The default constructor. */ public DirectoryType(){ initialize( ); } /** * The overloaded constructor * * @param fs list of file servers * @param imt the internal mount point. */ public DirectoryType( List fs, InternalMountPoint imt ){ initialize( fs, imt ); } /** * Initializes the object * */ public void initialize( ){ this.mFileServers = new LinkedList (); this.mInternalMount = new InternalMountPoint(); } /** * Initializes the object * * @param fs list of file servers * @param imt the internal mount point. */ public void initialize( List fs, InternalMountPoint imt ){ this.mFileServers = fs; this.mInternalMount = imt ; } /** * Adds a FileServer that can access this directory. * * @param server the file server. */ public void addFileServer( FileServer server ){ mFileServers.add( server ); } /** * Sets the list of FileServers that can access this directory. * * @param servers the list of servers */ public void setFileServers( List servers ){ mFileServers = servers; } /** * Selects a random file server and returns it. * * @return FileServer */ public FileServer selectFileServer(){ return ( this.mFileServers == null || this.mFileServers.size() == 0 )? null : this.mFileServers.get( PegRandom.getInteger( this.mFileServers.size() - 1) ); } /** * Returns at iterator to the file servers. * * @return Iterator */ public Iterator getFileServersIterator(){ return mFileServers.iterator(); } /** * Sets the internal mount point for the directory. * * @param mountPoint the internal mount point. */ public void setInternalMountPoint( InternalMountPoint mountPoint ){ mInternalMount = mountPoint; } /** * Returns the internal mount point for the directory. * * @return the internal mount point. */ public InternalMountPoint getInternalMountPoint(){ return this.mInternalMount; } /*** * A convenience method that returns true if all the attributes values are * uninitialized or empty strings. Useful for serializing the object as * XML. * * @return boolean */ public boolean isEmpty(){ return this.mFileServers.isEmpty() && this.getInternalMountPoint().isEmpty(); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ DirectoryType obj; try{ obj = ( DirectoryType ) super.clone(); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } obj.initialize( ); obj.setInternalMountPoint( (InternalMountPoint)mInternalMount.clone() ); for( FileServer server : mFileServers ){ obj.addFileServer( (FileServer)server.clone() ); } return obj; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/FileSystemType.java0000644000175000017500000000747111757531137031107 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.classes; /** * An abstract class describing a filesystem type. * *

* The various attributes that can be associated with the the server are * displayed in the following table. * *

* * * * * *
Attribute NameAttribute Description
mount-pointthe mount point for the filesystem
* * @author Karan Vahi * @version $Revision: 600 $ */ public abstract class FileSystemType extends AbstractSiteData { /** * The mount point for the file system. */ protected String mMountPoint; /** * The total size of file system. */ protected String mTotalSize; /** * The free space on the file system. */ protected String mFreeSize; /** * The default constructor. */ public FileSystemType() { mMountPoint = ""; mTotalSize = ""; mFreeSize = ""; } /** * The overloaded constructor. * * @param mountPoint the mount point of the system. * @param totalSize the total size of the system. * @param freeSize the free size */ public FileSystemType( String mountPoint, String totalSize, String freeSize ){ mMountPoint = mountPoint; mTotalSize = totalSize; mFreeSize = freeSize; } /** * Sets the mount point for the file system type. * * @param point the mount point. */ public void setMountPoint( String point ){ this.mMountPoint = point; } /** * Returns the mount point for the file system type. * * @return the mount point. */ public String getMountPoint( ){ return this.mMountPoint; } /** * Sets the directory size * * @param size the directory size. */ public void setTotalSize( String size ){ this.mTotalSize = size; } /** * Returns the total directory size. * * @return the directory size. */ public String getTotalSize( ){ return this.mTotalSize ; } /** * Sets the free directory size * * @param size the directory size. */ public void setFreeSize( String size ){ this.mFreeSize = size; } /** * Sets the free directory size * * @return the directory size. */ public String getFreeSize( ){ return this.mFreeSize; } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ FileSystemType obj; try{ obj = ( FileSystemType ) super.clone(); obj.setMountPoint( this.getMountPoint() ); obj.setTotalSize( this.getTotalSize() ); obj.setFreeSize( this.getFreeSize() ); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/ReplicaCatalog.java0000644000175000017500000001621111757531137031023 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.util.Collection; import java.util.Set; import java.util.HashSet; import java.util.Iterator; import java.io.Writer; import java.io.IOException; import java.util.LinkedList; /** * This data class describes the Replica Catalog associated with the site. * * @version $Revision: 2982 $ * @author Karan Vahi */ public class ReplicaCatalog extends AbstractSiteData { /** * The url for the catalog. */ protected String mURL; /** * The type of the replica catalog implementation to use. */ protected String mType; /** * Collection of connection parameters to use for connecting to that replica * catalog. */ protected Collection mConnectionParams; /** * Set of alias names to be used for lookup. */ protected Set mAliases; /** * The default constrcutor. */ public ReplicaCatalog( ){ this( "", "" ); } /** * The overloaded constructor. * * @param url the url for the replica catalog. * @param type the type of replica catalog. */ public ReplicaCatalog( String url, String type ){ initialize( url, type ); } /** * Initialize the class. * * @param url the url for the replica catalog. * @param type the type of replica catalog. */ public void initialize( String url, String type ){ mURL = url; mType = type; mAliases = new HashSet(); mConnectionParams = new LinkedList(); } /** * Sets the url for the replica catalog. * * @param url the url */ public void setURL( String url ){ mURL = url; } /** * Returns the url for the replica catalog. * * @return url */ public String getURL(){ return mURL; } /** * Sets the type of replica catalog. * * @param type the type of replica catalog. */ public void setType( String type ){ mType = type; } /** * Returns the type of replica catalog. * * @return type. */ public String getType(){ return mType; } /** * Adds an alias site handle. * * @param name the site handle to alias to. */ public void addAlias( String name ){ mAliases.add( name ); } /** * Adds a connection parameter * * @param connection the connection parameter. */ public void addConnection( Connection connection ){ mConnectionParams.add( connection ); } /** * Clears the aliases associates with the replica catalog. */ public void clearAliases( ){ this.mAliases.clear(); } /** * Returns an iterator to aliases associated with the site. * * @return Iterator */ public Iterator getAliasIterator(){ return this.mAliases.iterator(); } /** * Returns an iterator to connection params associated with the replica * catalog. * * @return Iterator */ public Iterator getConnectionIterator(){ return this.mConnectionParams.iterator(); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( "" ); } else{ writer.write( ">"); writer.write( newLine ); //list all the aliases first for( Iterator it = this.getAliasIterator(); it.hasNext(); ){ writeAlias( writer, newIndent, it.next() ); } //list all the connection params for( Iterator it = this.getConnectionIterator(); it.hasNext(); ){ it.next().toXML( writer, newIndent ); } writer.write( indent ); writer.write( "" ); } writer.write( newLine ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ ReplicaCatalog obj; try{ obj = ( ReplicaCatalog ) super.clone(); obj.initialize( this.getType(), this.getURL() ); for( Iterator it = this.getAliasIterator(); it.hasNext() ; ){ obj.addAlias( it.next() ); } for( Iterator it = this.getConnectionIterator(); it.hasNext() ; ){ obj.addConnection( (Connection)it.next().clone() ); } } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } /** * Renders alias as xml * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * @param value the value to use. * * @exception IOException if something fishy happens to the stream. */ protected void writeAlias( Writer writer, String indent, String value ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/WorkerNodeStorage.java0000644000175000017500000000672111757531137031562 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.IOException; /** * This data class describes the storage area on worker nodes. The difference * from the headnode storage is that it additionally has a worker shared directory * that designates the shared directory amongst the worker nodes. * * @version $Revision: 2003 $ * @author Karan Vahi */ public class WorkerNodeStorage extends StorageType { /** * The directory shared only amongst the worker nodes. */ protected WorkerSharedDirectory mWorkerShared; /** * The default constructor */ public WorkerNodeStorage() { super(); mWorkerShared = null; } /** * The overloaded constructor * * @param type StorageType */ public WorkerNodeStorage( StorageType type ) { this( type.getLocalDirectory(), type.getSharedDirectory() ); mWorkerShared = null; } /** * The overloaded constructor. * * @param local the local directory on the node. * @param shared the shared directory on the node. */ public WorkerNodeStorage( LocalDirectory local, SharedDirectory shared ){ super( local, shared ); mWorkerShared = null; } /** * Sets the directory shared amongst the worker nodes only. * * @param directory the worker node shared directory. */ public void setWorkerSharedDirectory( WorkerSharedDirectory directory ){ mWorkerShared = directory; } /** * Returns the directory shared amongst the worker nodes only. * * @return the worker shared directory. */ public WorkerSharedDirectory getWorkerSharedDirectory( ){ return mWorkerShared; } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); this.getLocalDirectory().toXML( writer, newIndent ); this.getSharedDirectory().toXML( writer, newIndent ); if( this.getWorkerSharedDirectory() != null ){ this.getWorkerSharedDirectory().toXML( writer, newIndent ); } writer.write( indent ); writer.write( "" ); writer.write( newLine ); } }pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/HeadNodeStorage.java0000644000175000017500000000467711757531137031162 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.IOException; /** * This data class describes the storage area on a node. * * @version $Revision: 2003 $ * @author Karan Vahi */ public class HeadNodeStorage extends StorageType { /** * The default constructor */ public HeadNodeStorage() { super(); } /** * The overloaded constructor * * @param type StorageType */ public HeadNodeStorage( StorageType type ) { this( type.getLocalDirectory(), type.getSharedDirectory() ); } /** * The overloaded constructor. * * @param local the local directory on the node. * @param shared the shared directory on the node. */ public HeadNodeStorage( LocalDirectory local, SharedDirectory shared ){ super( local, shared ); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); this.getLocalDirectory().toXML( writer, newIndent ); this.getSharedDirectory().toXML( writer, newIndent ); writer.write( indent ); writer.write( "" ); writer.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/HeadNodeFS.java0000644000175000017500000001526011757531137030054 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.classes.Profile; import java.io.Writer; import java.io.IOException; /** * This data class describes the HeadNode Filesystem layout. * * @version $Revision: 2572 $ * @author Karan Vahi */ public class HeadNodeFS extends AbstractSiteData { /** * The scratch area on the head node. */ private HeadNodeScratch mScratch; /** * The storage area on the head node. */ private HeadNodeStorage mStorage; /** * The profiles associated with the headnode filesystem. */ private Profiles mProfiles; /** * The default constructor. */ public HeadNodeFS(){ mScratch = new HeadNodeScratch(); mStorage = new HeadNodeStorage(); mProfiles= new Profiles(); } /** * The overloaded constructor. * * @param scratch the scratch area. * @param storage the storage area. */ public HeadNodeFS( HeadNodeScratch scratch, HeadNodeStorage storage ){ setScratch( scratch ); setStorage( storage ); mProfiles = new Profiles(); } /** * Sets the scratch area on the head node. * * @param scratch the scratch area. */ public void setScratch( HeadNodeScratch scratch ){ mScratch = scratch; } /** * Selects a FileServer associated with the Local Directory on * the Scratch system. * * @return if specified, else null */ public FileServer selectScratchLocalFileServer(){ return this.getScratch().getLocalDirectory().selectFileServer(); } /** * Selects a FileServer associated with the Shared Directory on * the Scratch system. * * @return if specified, else null */ public FileServer selectScratchSharedFileServer(){ return this.getScratch().getSharedDirectory().selectFileServer(); } /** * Returns the scratch area on the head node. * * @return the scratch area. */ public HeadNodeScratch getScratch( ){ return this.mScratch; } /** * Sets the storage area on the head node. * * @param storage the storage area. */ public void setStorage( HeadNodeStorage storage ){ mStorage = storage; } /** * A convenience method that returns a file server to be used for stageout. * * It selects a FileServer associated with the Local Directory. * If that is null, it then selects a FileServer associated with the * Shared Directory. * * @return storage FileServer for stageout. */ public FileServer selectStorageFileServerForStageout() { HeadNodeStorage s = this.getStorage(); if( s == null ) { return null ; } FileServer fs = null; return ( (fs = this.selectStorageLocalFileServer()) == null)? this.selectStorageSharedFileServer(): fs; } /** * Selects a FileServer associated with the Local Directory on * the Storage system. * * @return if specified, else null */ public FileServer selectStorageLocalFileServer(){ return this.getStorage().getLocalDirectory().selectFileServer(); } /** * Selects a FileServer associated with the Shared Directory on * the Storage system. * * @return if specified, else null */ public FileServer selectStorageSharedFileServer(){ return this.getStorage().getSharedDirectory().selectFileServer(); } /** * Returns the storage area on the head node. * * @return the storage area. */ public HeadNodeStorage getStorage( ){ return this.mStorage; } /** * Adds a profile. * * @param p the profile to be added */ public void addProfile( Profile p ){ //retrieve the appropriate namespace and then add mProfiles.addProfile( p ); } /** * Sets the profiles associated with the file server. * * @param profiles the profiles. */ public void setProfiles( Profiles profiles ){ mProfiles = profiles; } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); this.getScratch().toXML( writer, newIndent ); this.getStorage().toXML( writer, newIndent ); this.mProfiles.toXML( writer, newIndent ); writer.write( indent ); writer.write( "" ); writer.write( newLine ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ HeadNodeFS obj; try{ obj = ( HeadNodeFS ) super.clone(); obj.setScratch( (HeadNodeScratch)this.getScratch().clone() ); obj.setStorage( (HeadNodeStorage)this.getStorage().clone() ); obj.setProfiles( (Profiles)this.mProfiles.clone() ); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } } ././@LongLink0000000000000000000000000000014700000000000011567 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/SiteInfo2SiteCatalogEntry.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/SiteInfo2SiteCatalogEntry.ja0000644000175000017500000001535011757531137032571 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPServer; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.JobManager; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.LRC; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import java.util.Iterator; import java.util.List; /** * * An adapter class that converts SiteInfo object to SiteCatalogEntry object. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class SiteInfo2SiteCatalogEntry { /** * An adapter method that converts the SiteInfo object to * SiteCatalogEntry object. * * @param s SiteInfo to be converted. * * @return the converted SiteCatalogEntry object. */ public static SiteCatalogEntry convert( SiteInfo s ) { return SiteInfo2SiteCatalogEntry.convert( s, LogManagerFactory.loadSingletonInstance() ); } /** * An adapter method that converts the SiteInfo object to * SiteCatalogEntry object. * * @param s SiteInfo to be converted. * @param logger the hande to the LogManager * * @return the converted SiteCatalogEntry object. */ public static SiteCatalogEntry convert( SiteInfo s, LogManager logger ) { SiteCatalogEntry site = new SiteCatalogEntry(); /* set the handle */ site.setSiteHandle( (String)s.getInfo( SiteInfo.HANDLE ) ); VDSSysInfo sysinfo = ( VDSSysInfo )s.getInfo( SiteInfo.SYSINFO ) ; if( sysinfo !=null) { site.setVDSSysInfo( sysinfo ); } // describe the head node filesystem HeadNodeFS hfs = new HeadNodeFS(); /* set the work directory as shared scratch */ HeadNodeScratch hscratch = new HeadNodeScratch(); SharedDirectory hscratchShared = new SharedDirectory(); String workDir = s.getExecMountPoint(); for ( Iterator it = ((List)s.getInfo( SiteInfo.GRIDFTP )).iterator(); it.hasNext(); ) { GridFTPServer g = (GridFTPServer) it.next(); hscratchShared.addFileServer( new FileServer( "gsiftp" , (String)g.getInfo( GridFTPServer.GRIDFTP_URL ), workDir ) ); } hscratchShared.setInternalMountPoint( new InternalMountPoint( workDir )); hscratch.setSharedDirectory( hscratchShared ); hfs.setScratch( hscratch ); /* set the storage directory as shared storage */ HeadNodeStorage hstorage = new HeadNodeStorage(); SharedDirectory hstorageShared = new SharedDirectory(); String storageDir = null; for ( Iterator it = ((List)s.getInfo( SiteInfo.GRIDFTP )).iterator(); it.hasNext(); ) { GridFTPServer g = (GridFTPServer) it.next(); storageDir = ( String )g.getInfo( GridFTPServer.STORAGE_DIR ) ; hstorageShared.addFileServer( new FileServer( "gsiftp" , (String)g.getInfo( GridFTPServer.GRIDFTP_URL ), storageDir ) ); } hstorageShared.setInternalMountPoint( new InternalMountPoint( storageDir ) ); hstorage.setSharedDirectory( hstorageShared ); hfs.setStorage( hstorage ); site.setHeadNodeFS( hfs ); /* set the storage directory as GridGateways */ for ( Iterator it = ((List)s.getInfo( SiteInfo.JOBMANAGER )).iterator(); it.hasNext(); ) { JobManager jm = (JobManager) it.next(); GridGateway gw = new GridGateway(); String universe = (String)jm.getInfo( JobManager.UNIVERSE ); if( universe.equals( "vanilla" ) ){ gw.setJobType( GridGateway.JOB_TYPE.compute ); } else if( universe.equals( "transfer" ) ){ gw.setJobType( GridGateway.JOB_TYPE.auxillary ); } else{ throw new RuntimeException( "Unknown universe type " + universe + " for site " + site.getSiteHandle() ); } String url = (String)jm.getInfo( JobManager.URL ); gw.setContact( url ); if( url.endsWith( "condor" ) ){ gw.setScheduler( GridGateway.SCHEDULER_TYPE.Condor ); } else if( url.endsWith( "fork" ) ){ gw.setScheduler( GridGateway.SCHEDULER_TYPE.Fork ); } else if( url.endsWith( "pbs" ) ){ gw.setScheduler( GridGateway.SCHEDULER_TYPE.PBS ); } else if( url.endsWith( "lsf" ) ){ gw.setScheduler( GridGateway.SCHEDULER_TYPE.LSF ); } else if( url.endsWith( "sge" ) ){ gw.setScheduler( GridGateway.SCHEDULER_TYPE.SGE); } gw.setIdleNodes( (String)jm.getInfo( JobManager.IDLE_NODES ) ); gw.setTotalNodes( (String)jm.getInfo( JobManager.TOTAL_NODES ) ); site.addGridGateway( gw ); } /* set the LRC as Replica Catalog */ for( Iterator it = ((List)s.getInfo( SiteInfo.LRC )).iterator(); it.hasNext(); ) { LRC lrc = (LRC) it.next(); ReplicaCatalog rc = new ReplicaCatalog( lrc.getURL() , "LRC" ); site.addReplicaCatalog( rc ); } /* add Profiles */ for( Iterator it = ((List)s.getInfo( SiteInfo.PROFILE )).iterator(); it.hasNext(); ) { site.addProfile( (Profile) it.next() ); } logger.log( "SiteCatalogEntry object created is " + site, LogManager.DEBUG_MESSAGE_LEVEL ); return site; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/WorkerNodeFS.java0000644000175000017500000001406611757531137030467 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.classes.Profile; import java.io.Writer; import java.io.IOException; /** * This data class describes the WorkerNode Filesystem layout. * * @version $Revision: 2572 $ * @author Karan Vahi */ public class WorkerNodeFS extends AbstractSiteData { /** * The scratch area on the head node. */ private WorkerNodeScratch mScratch; /** * The storage area on the head node. */ private WorkerNodeStorage mStorage; /** * The profiles associated with the headnode filesystem. */ private Profiles mProfiles; /** * The default constructor. */ public WorkerNodeFS(){ mScratch = new WorkerNodeScratch(); mStorage = new WorkerNodeStorage(); mProfiles = new Profiles(); } /** * The overloaded constructor. * * @param scratch the scratch area. * @param storage the storage area. */ public WorkerNodeFS( WorkerNodeScratch scratch, WorkerNodeStorage storage ){ setScratch( scratch ); setStorage( storage ); mProfiles = new Profiles(); } /** * Sets the scratch area on the head node. * * @param scratch the scratch area. */ public void setScratch( WorkerNodeScratch scratch ){ mScratch = scratch; } /** * Selects a FileServer associated with the Local Directory on * the Scratch system. * * @return if specified, else null */ public FileServer selectScratchLocalFileServer(){ return this.getScratch().getLocalDirectory().selectFileServer(); } /** * Selects a FileServer associated with the Shared Directory on * the Scratch system. * * @return if specified, else null */ public FileServer selectScratchSharedFileServer(){ return this.getScratch().getSharedDirectory().selectFileServer(); } /** * Returns the scratch area on the head node. * * @return the scratch area. */ public WorkerNodeScratch getScratch( ){ return this.mScratch; } /** * Sets the storage area on the head node. * * @param storage the storage area. */ public void setStorage( WorkerNodeStorage storage ){ mStorage = storage; } /** * Selects a FileServer associated with the Local Directory on * the Storage system. * * @return if specified, else null */ public FileServer selectStorageLocalFileServer(){ return this.getStorage().getLocalDirectory().selectFileServer(); } /** * Selects a FileServer associated with the Shared Directory on * the Storage system. * * @return if specified, else null */ public FileServer selectStorageSharedFileServer(){ return this.getStorage().getSharedDirectory().selectFileServer(); } /** * Returns the storage area on the head node. * * @return the storage area. */ public WorkerNodeStorage getStorage( ){ return this.mStorage; } /** * Sets the profiles associated with the file server. * * @param profiles the profiles. */ public void setProfiles( Profiles profiles ){ mProfiles = profiles; } /** * Adds a profile. * * @param p the profile to be added */ public void addProfile( Profile p ){ //retrieve the appropriate namespace and then add mProfiles.addProfile( p ); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); this.getScratch().toXML( writer, newIndent ); this.getStorage().toXML( writer, newIndent ); this.mProfiles.toXML( writer, newIndent ); writer.write( indent ); writer.write( "" ); writer.write( newLine ); } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ WorkerNodeFS obj; try{ obj = ( WorkerNodeFS ) super.clone(); obj.setScratch( ( WorkerNodeScratch )this.getScratch().clone() ); obj.setStorage( ( WorkerNodeStorage )this.getStorage().clone() ); obj.setProfiles( (Profiles)this.mProfiles.clone() ); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/CreateSampleSiteCatalog.java0000644000175000017500000002106111757531137032635 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import java.io.FileWriter; import edu.isi.pegasus.planner.classes.Profile; import java.io.IOException; /** * Generates a sample site catalog in XML. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class CreateSampleSiteCatalog { /** * Generates a sample site catalog. * * @param file the path to the file to be written out. */ public void constructSiteCatalog( String file ) { try{ SiteStore store = new SiteStore(); SiteCatalogEntry entry = new SiteCatalogEntry( "isi_viz" ); entry.setHeadNodeFS( createHeadNodeFS() ); entry.setWorkerNodeFS( createWorkerNodeFS() ); //associate a replica catalog with the site. ReplicaCatalog rc = new ReplicaCatalog( "rls://replica.isi.edu", "RLS" ); rc.addAlias( "isi_skynet" ); rc.addConnection( new Connection("ignore.lrc", "rls://replica.caltech.edu" )); entry.addReplicaCatalog( rc ); //associate some profiles entry.addProfile( new Profile( Profile.ENV, "JAVA_HOME", "/java/") ); entry.addProfile( new Profile( Profile.VDS, "style", "gt2")); //associate grid gateway for auxillary and compute jobs GridGateway gw = new GridGateway( GridGateway.TYPE.gt2, "cluster.isi.edu/jobmanager-fork", GridGateway.SCHEDULER_TYPE.Fork ); gw.setJobType( GridGateway.JOB_TYPE.auxillary ); entry.addGridGateway( gw ); gw = new GridGateway( GridGateway.TYPE.gt2, "cluster.isi.edu/jobmanager-pbs", GridGateway.SCHEDULER_TYPE.PBS ); gw.setJobType( GridGateway.JOB_TYPE.compute ); entry.addGridGateway( gw ); //add entry to site store store.addEntry( entry ); //write DAX to file FileWriter scFw = new FileWriter( file ); System.out.println( "Writing out sample site catalog to " + file ); store.toXML( scFw, "" ); scFw.close(); //test the clone method also System.out.println( store.clone() ); } catch( IOException ioe ){ ioe.printStackTrace(); } } /** * Creates an object describing the head node filesystem. * * @return the HeadNodeFS */ public HeadNodeFS createHeadNodeFS(){ // describe the head node filesystem HeadNodeFS hfs = new HeadNodeFS(); //head node scratch description start HeadNodeScratch hscratch = new HeadNodeScratch(); //head node local scratch description LocalDirectory hscratchLocal = new LocalDirectory(); FileServer f = new FileServer( "gsiftp", "gsiftp://hserver1.isi.edu", "/external/local" ); hscratchLocal.addFileServer( f ); f = new FileServer( "gsiftp", "gsiftp://hserver2.isi.edu", "/external/h2-local" ); hscratchLocal.addFileServer( f ); hscratchLocal.setInternalMountPoint( new InternalMountPoint( "/local", "50G", "100G") ); //head node shared scratch description SharedDirectory hscratchShared = new SharedDirectory(); f = new FileServer( "gsiftp", "gsiftp://hserver1.isi.edu", "/external/shared-scratch" ); f.addProfile( new Profile( Profile.VDS, "transfer.arguments", "-s -a")); hscratchShared.addFileServer( f ); hscratchShared.setInternalMountPoint( new InternalMountPoint( "/shared-scratch", "50G", "100G") ); hscratch.setLocalDirectory( hscratchLocal ); hscratch.setSharedDirectory( hscratchShared ); //head node scratch description ends //head node storage description start HeadNodeStorage hstorage = new HeadNodeStorage(); //head node local storage description LocalDirectory hstorageLocal = new LocalDirectory(); f = new FileServer( "gsiftp", "gsiftp://hserver1.isi.edu", "/external/local-storage" ); hstorageLocal.addFileServer( f ); hstorageLocal.setInternalMountPoint( new InternalMountPoint( "/local-storage", "30G", "100G") ); //head node shared storage description SharedDirectory hstorageShared = new SharedDirectory(); f = new FileServer( "gsiftp", "gsiftp://hserver1.isi.edu", "/external/shared-storage" ); f.addProfile( new Profile( Profile.VDS, "transfer.arguments", "-s -a")); hstorageShared.addFileServer( f ); hstorageShared.setInternalMountPoint( new InternalMountPoint( "/shared-storage", "50G", "100G") ); hstorage.setLocalDirectory( hstorageLocal ); hstorage.setSharedDirectory( hstorageShared ); //head node storage description ends hfs.setScratch( hscratch ); hfs.setStorage( hstorage ); return hfs; } /** * Creates an object describing the worker node filesystem. * * @return the HeadNodeFS */ public WorkerNodeFS createWorkerNodeFS(){ // describe the head node filesystem WorkerNodeFS wfs = new WorkerNodeFS(); //worker node scratch description start WorkerNodeScratch wscratch = new WorkerNodeScratch(); //worker node local scratch description LocalDirectory wscratchLocal = new LocalDirectory(); FileServer f = new FileServer( "file", "file:///", "/tmp" ); wscratchLocal.addFileServer( f ); wscratchLocal.setInternalMountPoint( new InternalMountPoint( "/tmp", "50G", "100G") ); //worker node shared scratch description SharedDirectory wscratchShared = new SharedDirectory(); f = new FileServer( "file", "file:///", "/external/shared-scratch" ); wscratchShared.setInternalMountPoint( new InternalMountPoint( "/shared-scratch", "50G", "100G") ); wscratch.setLocalDirectory( wscratchLocal ); wscratch.setSharedDirectory( wscratchShared ); //head node scratch description ends //worker node storage description start WorkerNodeStorage wstorage = new WorkerNodeStorage(); //worker node local scratch description LocalDirectory wstorageLocal = new LocalDirectory(); f = new FileServer( "file", "file:///", "/tmp" ); wstorageLocal.addFileServer( f ); wstorageLocal.setInternalMountPoint( new InternalMountPoint( "/tmp", "50G", "100G") ); //worker node shared scratch description SharedDirectory wstorageShared = new SharedDirectory(); f = new FileServer( "file", "file:///", "/external/shared-storage" ); wstorageShared.setInternalMountPoint( new InternalMountPoint( "/shared-storage", "50G", "100G") ); wstorage.setLocalDirectory( wstorageLocal ); wstorage.setSharedDirectory( wstorageShared ); //worker node scratch description ends //worker node storage description ends wfs.setScratch( wscratch ); wfs.setStorage( wstorage ); return wfs; } /** * The main program * * @param args */ public static void main( String[] args ){ CreateSampleSiteCatalog csc = new CreateSampleSiteCatalog(); if (args.length == 1) { csc.constructSiteCatalog(args[0]); } else { System.out.println("Usage: CreateSampleSiteCatalog "); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/WorkerSharedDirectory.java0000644000175000017500000000470411757531137032442 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.IOException; import java.util.Iterator; import java.util.List; /** * This data class describes the directory shared only amongst worker nodes . * * @version $Revision: 561 $ * @author Karan Vahi */ public class WorkerSharedDirectory extends DirectoryType{ /** * The default constructor. */ public WorkerSharedDirectory(){ super(); } /** * The overloaded constructor * * @param fs list of file servers * @param imt the internal mount point. */ public WorkerSharedDirectory( List fs, InternalMountPoint imt ){ super( fs, imt ); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( "" ); //iterate through all the file servers for( Iterator it = this.getFileServersIterator(); it.hasNext(); ){ FileServer fs = it.next(); fs.toXML( writer, newIndent ); } //write out the internal mount point this.getInternalMountPoint().toXML( writer, newIndent ); writer.write( indent ); writer.write( "" ); writer.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/LocalSiteCatalogEntry.java0000644000175000017500000000343311757531137032347 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; /** * A convenience class that creates a default local site catalog entry * * @author Karan Vahi * @version $Revision: 2003 $ */ public class LocalSiteCatalogEntry{ /** * Creates a default site catalog entry for site local with the VO * set to pegasus. * * @return SiteCatalogEntry for the local site. */ public static SiteCatalogEntry create(){ return create( "pegasus" , null ); } /** * Creates a default site catalog entry for site local with the VO * set to pegasus. * * @param vo the VO to create the entry for. * @param grid the grid to create entry for * * @return SiteCatalogEntry for the local site. */ public static SiteCatalogEntry create( String vo, String grid ){ //always add local site. VORSSiteInfo siteInfo = VORSSiteCatalogUtil.getLocalSiteInfo( vo ); VORSVOInfo local = new VORSVOInfo(); local.setGrid( grid ); siteInfo.setVoInfo( local ); return VORSSiteCatalogUtil.createSiteCatalogEntry( siteInfo ); } }pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/SharedDirectory.java0000644000175000017500000000512011757531137031241 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.IOException; import java.util.Iterator; import java.util.List; /** * This data class represents a shared directory on a site. * Can be local scratch or local storage. * * @version $Revision: 600 $ * @author Karan Vahi * */ public class SharedDirectory extends DirectoryType{ /** * The default constructor. */ public SharedDirectory(){ super(); } /** * The overloaded constructor * * @param fs list of file servers * @param imt the internal mount point. */ public SharedDirectory( List fs, InternalMountPoint imt ){ super( fs, imt ); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //sanity check? if( this.isEmpty() ){ return; } //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( newLine ); //iterate through all the file servers for( Iterator it = this.getFileServersIterator(); it.hasNext(); ){ FileServer fs = it.next(); fs.toXML( writer, newIndent ); } //write out the internal mount point this.getInternalMountPoint().toXML( writer, newIndent ); writer.write( indent ); writer.write( "" ); writer.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/FileServer.java0000644000175000017500000000507311757531137030223 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.classes; import java.io.Writer; import java.io.IOException; /** * This class describes a file server that can be used to stage data * to and from a site. * * @author Karan Vahi */ public class FileServer extends FileServerType { /** * The default constructor. */ public FileServer() { super(); } /** * Overloaded constructor. * * @param protocol protocol employed by the File Server. * @param urlPrefix the url prefix * @param mountPoint the mount point for the server. */ public FileServer( String protocol, String urlPrefix, String mountPoint ) { super( protocol, urlPrefix, mountPoint ); } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); String newIndent = indent + "\t"; //write out the xml element writer.write( indent ); writer.write( "" ); } else{ writer.write( ">" ); writer.write( newLine ); mProfiles.toXML( writer , newIndent ); writer.write( indent ); writer.write( "" ); } writer.write( newLine ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/classes/FileServerType.java0000644000175000017500000001045011757531137031060 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.classes; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.catalog.classes.Profiles; /** * An abstract class that describes a file server that can be used to stage data * to and from a site. * * @author Karan Vahi * @version $Revision: 2572 $ */ public abstract class FileServerType extends AbstractSiteData { /** * The protocol used by the file server. */ protected String mProtocol; /** * The URL prefix for the server. */ protected String mURLPrefix; /** * The mount point for the server. */ protected String mMountPoint; /** * The profiles associated with the FileSystem. */ protected Profiles mProfiles; /** * The default constructor. */ public FileServerType() { mProtocol = new String(); mURLPrefix = new String(); mMountPoint = new String(); mProfiles = new Profiles(); } /** * Overloaded constructor. * * @param protocol protocol employed by the File Server. * @param urlPrefix the url prefix * @param mountPoint the mount point for the server. */ public FileServerType( String protocol, String urlPrefix, String mountPoint ) { mProtocol = protocol; mURLPrefix = urlPrefix; mMountPoint = mountPoint; mProfiles = new Profiles(); } /** * Set the protocol implemented by the file server. * * @param protocol the protocol */ public void setProtocol( String protocol ){ mProtocol = protocol; } /** * Returns protocol implemented by the file server. * * @return protocol */ public String getProtocol(){ return mProtocol; } /** * Sets the url prefix . * * @param prefix the url prefix */ public void setURLPrefix( String prefix ){ mURLPrefix = prefix; } /** * Returns the url prefix . * * @return the url prefix */ public String getURLPrefix(){ return mURLPrefix; } /** * Returns the mount point. * * @param point the mount point. */ public void setMountPoint( String point ){ mMountPoint = point; } /** * Returns the mount point * * @return the mount point. */ public String getMountPoint(){ return mMountPoint; } /** * Adds a profile. * * @param p the profile to be added */ public void addProfile( Profile p ){ //retrieve the appropriate namespace and then add mProfiles.addProfile( p ); } /** * Sets the profiles associated with the file server. * * @param profiles the profiles. */ public void setProfiles( Profiles profiles ){ mProfiles = profiles; } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ FileServerType obj; try{ obj = ( FileServerType ) super.clone(); obj.setMountPoint( this.getMountPoint() ); obj.setProtocol( this.getProtocol() ); obj.setURLPrefix( this.getURLPrefix() ); obj.setProfiles( (Profiles)this.mProfiles.clone() ); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/SiteFactory.java0000644000175000017500000001653711757531137026763 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import java.util.Properties; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; /** * A factory class to load the appropriate implementation of Site Catalog * as specified by properties. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class SiteFactory { /** * The default package where all the implementations reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.catalog.site.impl"; /** * * @param sites * @param bag the bag of pegasus objects * * @return SiteStore object containing the information about the sites. */ public static SiteStore loadSiteStore( Collection sites , PegasusBag bag ) { LogManager logger = bag.getLogger(); SiteStore result = new SiteStore(); if( sites.isEmpty() ) { logger.log( "No sites given by user. Will use sites from the site catalog", LogManager.DEBUG_MESSAGE_LEVEL); sites.add( "*" ); } SiteCatalog catalog = null; /* load the catalog using the factory */ catalog = SiteFactory.loadInstance( bag.getPegasusProperties() ); /* always load local site */ List toLoad = new ArrayList( sites ); toLoad.add( "local" ); /* load the sites in site catalog */ try{ catalog.load( toLoad ); /* query for the sites, and print them out */ logger.log( "Sites loaded are " + catalog.list( ) , LogManager.DEBUG_MESSAGE_LEVEL ); //load into SiteStore from the catalog. for( Iterator it = toLoad.iterator(); it.hasNext(); ){ SiteCatalogEntry s = catalog.lookup( it.next() ); if( s != null ){ result.addEntry( s ); } } } catch ( SiteCatalogException e ){ throw new RuntimeException( "Unable to load from site catalog " , e ); } finally{ /* close the connection */ try{ catalog.close(); }catch( Exception e ){} } return result; } /** * Connects the interface with the transformation catalog implementation. The * choice of backend is configured through properties. This method uses default * properties from the property singleton. * * @return handle to the Site Catalog. * * @throws SiteFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static SiteCatalog loadInstance() throws SiteFactoryException { return loadInstance( PegasusProperties.getInstance() ); } /** * Connects the interface with the site catalog implementation. Tedu.isi.pegasus.catalog.site.impl.XML3he * choice of backend is configured through properties. * * @param properties is an instance of properties to use. * * @return handle to the Site Catalog. * * @throws SiteFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static SiteCatalog loadInstance( PegasusProperties properties ) throws SiteFactoryException { if( properties == null ){ throw new SiteFactoryException( "Invalid NULL properties passed" ); } /* get the implementor from properties */ String catalogImplementor = properties.getPoolMode().trim(); /* prepend the package name if required */ catalogImplementor = (catalogImplementor.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + catalogImplementor : //load directly catalogImplementor; Properties connect = properties.matchingSubset( SiteCatalog.c_prefix, false ); // determine the class that implements the site catalog return loadInstance( properties.getProperty( SiteCatalog.c_prefix ), connect ); } /** * Connects the interface with the site catalog implementation. The * choice of backend is configured through properties. * * * @param catalogImplementor the name of the class implementing catalog * @param properties the connection properties. * * @return handle to the Site Catalog. * * @throws SiteFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static SiteCatalog loadInstance( String catalogImplementor, Properties properties ) { if( properties == null ){ throw new SiteFactoryException( "Invalid NULL properties passed" ); } SiteCatalog catalog = null; try{ if ( catalogImplementor == null ){ throw new RuntimeException( "You need to specify the " + SiteCatalog.c_prefix + " property" ); } /* prepend the package name if required */ catalogImplementor = (catalogImplementor.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + catalogImplementor : //load directly catalogImplementor; DynamicLoader dl = new DynamicLoader( catalogImplementor ); catalog = ( SiteCatalog ) dl.instantiate( new Object[0] ); if ( catalog == null ){ throw new RuntimeException( "Unable to load " + catalogImplementor ); } if ( ! catalog.connect( properties ) ) throw new RuntimeException( "Unable to connect to site catalog implementation" ); } catch( Exception e ){ throw new SiteFactoryException( "Unable to instantiate Site Catalog ", catalogImplementor, e ); } return catalog; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/SiteCatalogException.java0000644000175000017500000000450711757531137030577 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site; import edu.isi.pegasus.planner.catalog.CatalogException; /** * Class to notify of failures. Exceptions are chained like the * {@link java.sql.SQLException} interface.

* * @author Jens-S. Vöckler, Karan Vahi * * @see edu.isi.pegasus.planner.catalog.SiteCatalog */ public class SiteCatalogException extends CatalogException{ /** * Constructs a SiteCatalogException with no detail * message. */ public SiteCatalogException() { super(); } /** * Constructs a SiteCatalogException with the * specified detailed message. * * @param s is the detailled message. */ public SiteCatalogException(String s) { super(s); } /** * Constructs a SiteCatalogException with the * specified detailed message and a cause. * * @param s is the detailled message. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public SiteCatalogException(String s, Throwable cause) { super(s, cause); } /** * Constructs a SiteCatalogException with the * specified just a cause. * * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public SiteCatalogException(Throwable cause) { super(cause); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/TestVORSSiteCatalog.java0000644000175000017500000000637211757531137030274 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.Version; import java.util.ArrayList; import java.util.List; /** * A Test program that shows how to load a Site Catalog, and query for all sites. * The configuration is picked from the Properties. The following properties * need to be set *

 *      pegasus.catalog.site       Text|XML|XML3
 *      pegasus.catalog.site.file  path to the site catalog.
 *  
* * @author Karan Vahi * @version $Revision: 595 $ */ public class TestVORSSiteCatalog { /** * The main program. * * @param args passed at runtime */ public static void main( String[] args ) { SiteCatalog catalog = null; /* Properties props = new Properties(); props.put("host", "vors.grid.iu.edu"); props.put("port", "80"); props.put("vo","all"); props.put("grid","all");*/ /* load the catalog using the factory */ try{ //catalog = SiteFactory.loadInstance("VORS", props); PegasusProperties p = PegasusProperties.nonSingletonInstance(); p.setProperty( "pegasus.catalog.site", "VORS" ); LogManager logger = LogManagerFactory.loadInstance( p ); logger.setLevel( LogManager.DEBUG_MESSAGE_LEVEL ); logger.logEventStart( "event.pegasus.planner", "planner.version", Version.instance().toString() ); catalog = SiteFactory.loadInstance( p); } catch ( SiteFactoryException e ){ System.out.println( e.convertException() ); System.exit( 2 ); } /* load all sites in site catalog */ try{ //catalog.connect(props); List s = new ArrayList(1); String handle = "CIT_CMS_T2"; s.add( handle );//"*" ); System.out.println( "Loaded " + catalog.load( s ) + " number of sites " ); /* query for the sites, and print them out */ System.out.println( "Sites loaded are " + catalog.list( ) ); System.out.println( catalog.lookup( handle )); } catch ( SiteCatalogException e ){ e.printStackTrace(); } finally{ /* close the connection */ try{ catalog.close(); }catch( Exception e ){} } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/0000755000175000017500000000000011757531667024621 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/XML3.java0000644000175000017500000001577211757531137026213 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.planner.catalog.site.SiteCatalogException; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.parser.SiteCatalogParser; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import java.io.File; import java.io.IOException; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; /** * An implementation of the Site Catalog interface that is backed up by * an XML file conforming to site catalog xml schema version 3. * * The schema can be found online at * *
 *  http://pegasus.isi.edu/schema/sc-3.0.xsd
 * 
* * @author Karan Vahi * @version $Revision: 2622 $ */ public class XML3 implements SiteCatalog { /** * The handle to parser instance that will parse the site catalog. */ private SiteCatalogParser mParser; /** * Stores sites in memory */ //private MapmSiteMap; private SiteStore mSiteStore; /** * The Site Catalog file to be parser. */ private String mFilename; /** * The handle to the log manager. */ private LogManager mLogger; /** * The bag of Pegasus Initialization objects */ private PegasusBag mBag; /** * The default constructor. */ public XML3(){ mLogger = LogManagerFactory.loadSingletonInstance(); mBag = new PegasusBag(); mBag.add( PegasusBag.PEGASUS_LOGMANAGER, mLogger ); mBag.add( PegasusBag.PEGASUS_PROPERTIES, PegasusProperties.nonSingletonInstance() ); //mSiteMap = new HashMap(); } /** * Establishes a connection to the file from the properties. * You will need to specify a "file" property to point to the * location of the on-disk instance. * * @param props is the property table with sufficient settings to * to connect to the implementation. * * @return true if connected, false if failed to connect. * * @throws SiteCatalogException */ public boolean connect( Properties props ) throws SiteCatalogException{ if ( props.containsKey("file") ) return connect( props.getProperty("file") ); return false; } /** * Initializes the Site Catalog Parser instance for the file. * * @param filename is the name of the file to read. * * @return true, */ public boolean connect( String filename ){ mFilename = filename; File f = new File( filename ); if ( f.exists() && f.canRead() ){ return true; } else{ throw new RuntimeException( "Cannot read or access file " + filename ); } } /** * Close the connection to backend file. */ public void close() { mFilename = null; } /** * Returns if the connection is closed or not. * * @return boolean */ public boolean isClosed() { return mFilename == null; } /** * Loads up the Site Catalog implementation with the sites whose * site handles are specified. This is a convenience method, that can * allow the backend implementations to maintain soft state if required. * * If the implementation chooses not to implement this, just do an empty * implementation. * * The site handle * is a special handle designating all sites are to be * loaded. * * @param sites the list of sites to be loaded. * * @return the number of sites loaded. * * @throws SiteCatalogException in case of error. */ public int load( List sites ) throws SiteCatalogException { if( this.isClosed() ){ throw new SiteCatalogException( "Need to connect to site catalog before loading" ); } mParser = new SiteCatalogParser( this.mBag, sites ); //mLogger.log( "Parsing file " + mFilename, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_PARSE_SITE_CATALOG , "site-catalog.id", mFilename, LogManager.DEBUG_MESSAGE_LEVEL ); mParser.startParser( mFilename ); mLogger.logEventCompletion( LogManager.DEBUG_MESSAGE_LEVEL ); mSiteStore = mParser.getSiteStore(); return mSiteStore.list().size(); } /** * Not implemented as yet. * * @param entry * @return number of entries inserted. * @throws edu.isi.pegasus.planner.catalog.site.SiteCatalogException */ public int insert(SiteCatalogEntry entry) throws SiteCatalogException { throw new UnsupportedOperationException("Not supported yet."); } /** * Lists the site handles for all the sites in the Site Catalog. * * @return A set of site handles. * * @throws SiteCatalogException in case of error. */ public Set list() throws SiteCatalogException { return ( mSiteStore == null )? new HashSet(): mSiteStore.list(); } /** * Retrieves the SiteCatalogEntry for a site. * * @param handle the site handle / identifier. * * @return SiteCatalogEntry in case an entry is found , or null * if no match is found. * * * @throws SiteCatalogException in case of error. */ public SiteCatalogEntry lookup(String handle) throws SiteCatalogException { return ( mSiteStore == null )? null: mSiteStore.lookup( handle ); } /** * Not yet implemented as yet. * * @param handle * @return number of entries removed. * @throws edu.isi.pegasus.planner.catalog.site.SiteCatalogException */ public int remove( String handle ) throws SiteCatalogException { throw new UnsupportedOperationException("Not supported yet."); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/MYOSG.java0000644000175000017500000003451611757531137026363 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl; import edu.isi.pegasus.common.util.Boolean; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.planner.catalog.site.SiteCatalogException; import edu.isi.pegasus.planner.catalog.site.classes.LocalSiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.catalog.site.impl.myosg.classes.MYOSGSiteInfo; import edu.isi.pegasus.planner.catalog.site.impl.myosg.classes.MYOSGSiteInfoFacade; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.DateUtils; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGSiteCatalogParser; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGSiteCatalogUtil; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGSiteConstants; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGURLGenerator; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.SiteScrapper; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.URLParamConstants; import java.io.File; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; /** * This class implements the SiteCatalog interface * * * @author Prasanth Thomas * @author Karan Vahi * * @version $Revision: 2079 $ */ public class MYOSG implements SiteCatalog { /** * The date format to be used while passing dates to the URL construction. */ public static final String DATE_FORMAT = "MM/dd/yyyy"; /** * The default VO to use if none is specified */ public static final String DEFAULT_VO = "LIGO"; /** * The default VO to use if none is specified */ public static final String DEFAULT_GRID = "OSG"; /** * The name of the key that determines whether we keep the tmp xml file * around or not. */ public static final String KEEP_TMP_FILE_KEY = "myosg.keep.tmp.file"; /** * The name of the key that determines what VO to query for. */ public static final String VO_KEY = "myosg.vo"; /** * The name of the key that determines whether we keep the tmp xml file * around or not. */ public static final String GRID_KEY = "myosg.grid"; private Map mMYOSGInfo = null; /** * The Logging instance. */ private final LogManager mLogger; /** * A boolean variable tracking whether catalog is connected or not */ private boolean mConnected; /** * A boolean variable that tracks whether to keep the temp xml file or not. */ private boolean mKeepTmpFile; /** * The Site Catalog file to be parser. */ // private String mFilename; /** * The SiteStore object where information about the sites is stored. */ private final SiteStore mSiteStore; /** * The vo for which to query MYOSG */ private String mVO; /** * The grid for which to query MYOSG */ private String mGrid; /** * The default constructor. */ public MYOSG() { mLogger = LogManagerFactory.loadSingletonInstance(); mSiteStore = new SiteStore(); mConnected = false; mKeepTmpFile = false; mVO = MYOSG.DEFAULT_VO; } /** * Not implemented as yet. * * @param entry * * @return number of entries inserted * * @throws edu.isi.pegasus.planner.catalog.site.SiteCatalogException */ public int insert(SiteCatalogEntry entry) throws SiteCatalogException { mSiteStore.addEntry(entry); return 1; } /** * Lists the site handles for all the sites in the Site Catalog. * * @return A set of site handles. * * @throws SiteCatalogException * in case of error. */ public Set list() throws SiteCatalogException { return mSiteStore.list(); } /** * Loads up the Site Catalog implementation with the sites whose site * handles are specified. This is a convenience method, that can allow the * backend implementations to maintain soft state if required. * * If the implementation chooses not to implement this, just do an empty * implementation. * * The site handle * is a special handle designating all sites are to be * loaded. * * @param sites the list of sites to be loaded. * * @return the number of sites loaded. * * @throws SiteCatalogException * in case of error. */ public int load(List sites) throws SiteCatalogException { if (this.isClosed()) { throw new SiteCatalogException( "Need to connect to site catalog before loading"); } int ret = 0; Iterator siteItr; if (sites.get(0).trim().equals("*")) { siteItr = mMYOSGInfo.keySet().iterator(); } else { siteItr = sites.iterator(); } while (siteItr.hasNext()) { String sitename = siteItr.next(); MYOSGSiteInfo temp = mMYOSGInfo.get(sitename); if (temp == null) { mLogger.log(sitename + " site not found.", LogManager.ERROR_MESSAGE_LEVEL); continue; } MYOSGSiteInfoFacade myOSiteInfoFacade = new MYOSGSiteInfoFacade( temp); if (myOSiteInfoFacade.isValidSite()) { mSiteStore.addEntry(MYOSGSiteCatalogUtil .createSiteCatalogEntry(new MYOSGSiteInfoFacade(temp))); ret++; } else { mLogger.log( "Not constructing entry for site " + sitename + " "+ myOSiteInfoFacade.getSitesMissingInformation(sitename), LogManager.INFO_MESSAGE_LEVEL); } } //always add local site. mLogger.log( "Site LOCAL . Creating default entry" , LogManager.INFO_MESSAGE_LEVEL ); mSiteStore.addEntry( LocalSiteCatalogEntry.create( mVO, mGrid ) ); ret++; return ret; } /** * Retrieves the SiteCatalogEntry for a site. * * @param handle * the site handle / identifier. * * @return SiteCatalogEntry in case an entry is found , or null * if no match is found. * * * @throws SiteCatalogException * in case of error. */ public SiteCatalogEntry lookup(String handle) throws SiteCatalogException { return mSiteStore.lookup(handle); } /** * Not yet implemented as yet. * * @param handle * * @return Exception * * @throws edu.isi.pegasus.planner.catalog.site.SiteCatalogException */ public int remove(String handle) throws SiteCatalogException { throw new UnsupportedOperationException("Not supported yet."); } /** * Close the connection to back end file. */ public void close() { mConnected = false; } /** * Establishes a connection to the file from the properties. You will need * to specify a "file" property to point to the location of the on-disk * instance. * * @param props * is the property table with sufficient settings to to connect * to the implementation. * * @return true if connected, false if failed to connect. * * @throws SiteCatalogException */ public boolean connect(Properties props) throws SiteCatalogException { /* * for( Iterator it = props.keySet().iterator(); it.hasNext(); ){ String * key = (String)it.next(); System.out.println( key + " -> " + * props.getProperty( key )); } */ if (props.containsKey(MYOSG.KEEP_TMP_FILE_KEY)) { String value = props.getProperty(MYOSG.KEEP_TMP_FILE_KEY); mKeepTmpFile = Boolean.parse(value, false); } /* determine the VO and grid if specified */ if (props.containsKey( MYOSG.VO_KEY )) { mVO = props.getProperty( MYOSG.VO_KEY ); } if (props.containsKey( MYOSG.GRID_KEY )) { mGrid = props.getProperty( MYOSG.GRID_KEY ); } mLogger.log( "MYOSG queried for VO " + mVO + " for grid " + mGrid , LogManager.DEBUG_MESSAGE_LEVEL ); /* generate the HTTP URL to the MYOSG website. */ String urlString = new MYOSGURLGenerator().getURL(this .createConnectionURLProperties()); mLogger.log("HTTP URL constructed to the MYOSG website " + urlString, LogManager.DEBUG_MESSAGE_LEVEL); /* grab the XML on the web url and populate to a temp file */ File temp; try { temp = File.createTempFile("myosg-", ".xml"); } catch (IOException ioe) { throw new SiteCatalogException("Unable to create a temp file ", ioe); } String tempPath = temp.getAbsolutePath(); SiteScrapper.scrapeSite(urlString, tempPath); mLogger.log("Webpage retrieved to " + tempPath, LogManager.DEBUG_MESSAGE_LEVEL); MYOSGSiteCatalogParser myOSGCatalogCreator = new MYOSGSiteCatalogParser(); mLogger.logEventStart(LoggingKeys.EVENT_PEGASUS_PARSE_SITE_CATALOG, "site-catalog.id", tempPath, LogManager.DEBUG_MESSAGE_LEVEL); myOSGCatalogCreator.startParser(tempPath); mLogger.logEventCompletion(LogManager.DEBUG_MESSAGE_LEVEL); for (Iterator itr = myOSGCatalogCreator.getSites() .iterator(); itr.hasNext();) { if (mMYOSGInfo == null) { mMYOSGInfo = new HashMap(); } MYOSGSiteInfo myOSGSiteInfo = itr.next(); mMYOSGInfo.put((String) myOSGSiteInfo .getProperty(MYOSGSiteConstants.SITE_NAME_ID), myOSGSiteInfo); } /* delete the temp file if required */ if (!mKeepTmpFile){ mLogger.log( "Deleting temp file " + tempPath, LogManager.DEBUG_MESSAGE_LEVEL ); temp.delete(); } mConnected = true; return true; } /** * Creates the properties that are required to compose the HTTP URL to the * MYOSG website. * * @return Properties containing the connection URL Properties */ private Properties createConnectionURLProperties() { Properties properties = new Properties(); properties.setProperty("" + URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWSERVICE, "on"); properties.setProperty("" + URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWRSVSTATUS, "on"); properties.setProperty("" + URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWFQDN, "on"); properties.setProperty("" + URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWVOMEMBERSHIP, "on"); properties.setProperty("" + URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWVOOWNERSHIP, "on"); properties.setProperty("" + URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWENVIRONMNENT, "on"); properties.setProperty("" + URLParamConstants.PARAM_GIP_STATUS_ATTRS_SHOWTESTRESULTS, "on"); properties.setProperty("" + URLParamConstants.PARAM_GIP_STATUS_ATTRS_SHOWFQDN, "on"); properties.setProperty("" + URLParamConstants.PARAM_ACCOUNT_TYPE, "cumulative_hours"); properties.setProperty("" + URLParamConstants.PARAM_CE_ACCOUNT_TYPE, "gip_vo"); properties.setProperty("" + URLParamConstants.PARAM_SE_ACCOUNT_TYPE, "vo_transfer_volume"); properties.setProperty("" + URLParamConstants.PARAM_START_TYPE, "7daysago"); properties.setProperty("" + URLParamConstants.PARAM_START_DATE, getStartDate()); properties.setProperty("" + URLParamConstants.PARAM_END_TYPE, "now"); properties.setProperty("" + URLParamConstants.PARAM_END_DATE, getDateAfter(7)); properties.setProperty("" + URLParamConstants.PARAM_RESOURCE_TO_DISPLAY_ALL_RESOURCES, "on"); properties.setProperty("" + URLParamConstants.PARAM_FILTER_GRID_TYPE, "on"); int gridTypeID = URLParamConstants.getGridTypeID(mGrid); if( gridTypeID == -1 ){ throw new SiteCatalogException("Unable to determine integer ID for grid " + mGrid); } properties.setProperty("" + URLParamConstants.PARAM_FILTER_GRID_TYPE_OPTION, ""+ gridTypeID); properties.setProperty("" + URLParamConstants.PARAM_FILTER_CURRENT_RSV_STATUS, "on"); properties .setProperty( "" + URLParamConstants.PARAM_FILTER_CURRENT_RSV_STATUS_OPTION, "1"); properties.setProperty("" + URLParamConstants.PARAM_FILTER_VO_SUPPORT, "on"); int voID = URLParamConstants.getVOID( mVO ); if( voID == -1 ){ throw new SiteCatalogException("Unable to determine integer ID for VO " + mVO); } properties.setProperty(""+URLParamConstants.PARAM_FILTER_VO_SUPPORT_OPTION,"" + voID ); properties.setProperty("" + URLParamConstants.PARAM_FILTER_ACTIVE_STATUS_OPTION, "1"); properties.setProperty("" + URLParamConstants.PARAM_FILTER_DISABLE_STATUS_OPTION, "1"); return properties; } /** * Returns the start date formatted as MM/dd/yyyy. * * @return the start date. */ private String getStartDate() { String now = null; try { now = URLEncoder.encode(DateUtils.now(DATE_FORMAT), "UTF-8"); } catch (UnsupportedEncodingException e) { throw new SiteCatalogException("Problem encoding the date after", e); } return now; } /** * Returns the date after n days formatted as MM/dd/yyyy. * * @param days the days after. * * @return the added date */ private static String getDateAfter(int days) { String now = null; try { now = URLEncoder .encode(DateUtils.after(days, DATE_FORMAT), "UTF-8"); } catch (UnsupportedEncodingException e) { throw new SiteCatalogException("Problem encoding the date after", e); } return now; } /** * Returns if the connection is closed or not. * * @return boolean */ public boolean isClosed() { // TODO Auto-generated method stub return !mConnected; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/XML.java0000644000175000017500000006226711757531137026131 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.planner.catalog.site.SiteCatalogException; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.PoolConfig; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GlobusVersion; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPServer; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPBandwidth; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.JobManager; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.LRC; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.WorkDir; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.site.classes.SiteInfo2SiteCatalogEntry; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.parser.Parser; import edu.isi.pegasus.planner.namespace.Namespace; import java.io.File; import java.io.IOException; import java.util.Set; import java.util.HashSet; import java.util.List; import java.util.Properties; import org.xml.sax.Attributes; import org.xml.sax.SAXException; /** * A back port to old site catalog schema for the current Site Catalog API * * This class parses XML documents that conform to site catalog schema version 2. * *
 *  http://pegasus.isi.edu/schema/sc-2.0.xsd
 * 
* * @author Karan Vahi * @version $Revision: 4559 $ */ public class XML extends Parser implements SiteCatalog{ /** * The "not-so-official" location URL of the VDLx schema definition. */ public static final String SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/sc-2.0.xsd"; /** * uri namespace */ public static final String SCHEMA_NAMESPACE = "http://pegasus.isi.edu/schema/sitecatalog"; public PoolConfig m_pconfig = null; private SiteInfo m_pool_info = null; private String m_namespace = null; private String m_key = null; private GridFTPServer gftp = null; /** * The Logging instance. */ private LogManager mLogger; /** * Stores sites in memory */ //private MapmSiteMap; private SiteStore mSiteStore; /** * The Site Catalog file to be parser. */ private String mFilename; /** * The default constructor. */ public XML(){ super( PegasusProperties.nonSingletonInstance() ); mLogger = LogManagerFactory.loadSingletonInstance(); mSiteStore = new SiteStore(); } /** * Establishes a connection to the file from the properties. * You will need to specify a "file" property to point to the * location of the on-disk instance. * * @param props is the property table with sufficient settings to * to connect to the implementation. * * @return true if connected, false if failed to connect. * * @throws SiteCatalogException */ public boolean connect( Properties props ) throws SiteCatalogException{ if ( props.containsKey("file") ) return connect( props.getProperty("file") ); return false; } /** * Initializes the Site Catalog Parser instance for the file. * * @param filename is the name of the file to read. * * @return true, */ public boolean connect( String filename ){ mFilename = filename; File f = new File( filename ); if ( f.exists() && f.canRead() ){ return true; } else{ throw new RuntimeException( "Cannot read or access file " + filename ); } } /** * Close the connection to backend file. */ public void close() { mFilename = null; } /** * Returns if the connection is closed or not. * * @return boolean */ public boolean isClosed() { return mFilename == null; } /** * Loads up the Site Catalog implementation with the sites whose * site handles are specified. This is a convenience method, that can * allow the backend implementations to maintain soft state if required. * * If the implementation chooses not to implement this, just do an empty * implementation. * * The site handle * is a special handle designating all sites are to be * loaded. * * @param sites the list of sites to be loaded. * * @return the number of sites loaded. * * @throws SiteCatalogException in case of error. */ public int load( List sites ) throws SiteCatalogException { if( this.isClosed() ){ throw new SiteCatalogException( "Need to connect to site catalog before loading" ); } mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_PARSE_SITE_CATALOG , "site-catalog.id", mFilename, LogManager.DEBUG_MESSAGE_LEVEL ); //mLogger.log( "Parsing file " + mFilename, LogManager.DEBUG_MESSAGE_LEVEL ); //setting the schema Locations String schemaLoc = getSchemaLocation(); mLogger.log( "Picking schema for site catalog" + schemaLoc, LogManager.CONFIG_MESSAGE_LEVEL); String list = XML.SCHEMA_NAMESPACE + " " + schemaLoc; setSchemaLocations( list ); startParser( mFilename ); mLogger.logEventCompletion( LogManager.DEBUG_MESSAGE_LEVEL ); return mSiteStore.list().size(); } /** * Not implemented as yet. * * @param entry the entry * @return number of entries inserted * @throws edu.isi.pegasus.planner.catalog.site.SiteCatalogException */ public int insert(SiteCatalogEntry entry) throws SiteCatalogException { throw new UnsupportedOperationException("Not supported yet."); } /** * Lists the site handles for all the sites in the Site Catalog. * * @return A set of site handles. * * @throws SiteCatalogException in case of error. */ public Set list() throws SiteCatalogException { return ( mSiteStore == null )? new HashSet(): mSiteStore.list(); } /** * Retrieves the SiteCatalogEntry for a site. * * @param handle the site handle / identifier. * * @return SiteCatalogEntry in case an entry is found , or null * if no match is found. * * * @throws SiteCatalogException in case of error. */ public SiteCatalogEntry lookup( String handle ) throws SiteCatalogException { return ( mSiteStore == null )? null: mSiteStore.lookup( handle ); } /** * Not yet implemented as yet. * * @param handle the site to be removed * * @return number of entries removed. * @throws edu.isi.pegasus.planner.catalog.site.SiteCatalogException */ public int remove( String handle ) throws SiteCatalogException { throw new UnsupportedOperationException("Not supported yet."); } public void startParser( String configxml ) { try { this.testForFile( configxml ); mParser.parse( configxml ); } catch ( IOException ioe ) { mLogger.log( "IO Error :" + ioe.getMessage(), LogManager.ERROR_MESSAGE_LEVEL ); } catch ( SAXException se ) { if ( mLocator != null ) { mLogger.log( "Error in " + mLocator.getSystemId() + " at line " + mLocator.getLineNumber() + "at column " + mLocator.getColumnNumber() + " :" + se.getMessage() , LogManager.ERROR_MESSAGE_LEVEL); } } } public void endDocument() { } public void endElement( String uri, String localName, String qName ) { if ( localName.trim().equalsIgnoreCase( "sitecatalog" ) ) { handleConfigTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "site" ) ) { handlePoolTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "lrc" ) ) { handleLRCTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "jobmanager" ) ) { handleJobManagerTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "profile" ) ) { handleProfileTagEnd( m_pool_info ); } else if ( localName.trim().equalsIgnoreCase( "gridftp" ) ) { handleGridFtpTagEnd(); } else if ( localName.trim().equalsIgnoreCase( "workdirectory" ) ) { handleWorkDirectoryTagEnd( m_pool_info ); } else if ( localName.trim().equalsIgnoreCase( "bandwidth" ) ) { handleGridFtpBandwidthTagEnd(); } else { mLogger.log( "Unkown element end reached :" + uri + ":" + localName + ":" + qName + "-******" + mTextContent + "***********", LogManager.ERROR_MESSAGE_LEVEL ); mTextContent.setLength( 0 ); } } public void startElement( String uri, String localName, String qName, Attributes attrs ) { try { if ( localName.trim().equalsIgnoreCase( "sitecatalog" ) ) { handleConfigTagStart(); } else if ( localName.trim().equalsIgnoreCase( "site" ) ) { m_pool_info = handlePoolTagStart( m_pconfig, attrs ); } else if ( localName.trim().equalsIgnoreCase( "lrc" ) ) { handleLRCTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "jobmanager" ) ) { handleJobManagerTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "profile" ) ) { handleProfileTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "gridftp" ) ) { handleGridFtpTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "workdirectory" ) ) { handleWorkDirectoryTagStart( m_pool_info, attrs ); } else if ( localName.trim().equalsIgnoreCase( "bandwidth" ) ) { handleGridFtpBandwidthTagStart( m_pool_info, attrs ); } else { mLogger.log( "Unknown element in xml :" + uri + ":" + localName + ":" + qName, LogManager.ERROR_MESSAGE_LEVEL ); } } catch ( Exception e ) { e.printStackTrace(); } } /** * Returns the XML schema namespace that a document being parsed conforms * to. * * @return the schema namespace */ public String getSchemaNamespace( ){ return this.SCHEMA_NAMESPACE; } public String getSchemaLocation() { // treat URI as File, yes, I know - I need the basename File uri = new File( XML.SCHEMA_LOCATION ); // create a pointer to the default local position File poolconfig = new File( this.mProps.getSchemaDir(), uri.getName() ); return this.mProps.getPoolSchemaLocation( poolconfig.getAbsolutePath() ); } /** * * @return PoolConfig Returns a new PoolConfig object when * it encounters start of XML. * * @see org.griphyn.cPlanner.classes.PoolConfig */ private PoolConfig handleConfigTagStart() { m_pconfig = new PoolConfig(); return m_pconfig; } /** * * @param pcfg Takes the PoolConfig class. * @param attrs Takes the atrributes returned in XML. * * @return SiteInfo returns the reference to the PooInfo ojject * * @throws Exception * @see org.griphyn.cPlanner.classes.SiteInfo * @see org.griphyn.cPlanner.classes.PoolConfig */ private SiteInfo handlePoolTagStart( PoolConfig pcfg, Attributes attrs ) throws Exception { m_pool_info = new SiteInfo(); String handle = new String( attrs.getValue( "", "handle" ) ); //set the id of object m_pool_info.setInfo(SiteInfo.HANDLE,handle); if ( attrs.getValue( "", "gridlaunch" ) != null ) { String gridlaunch = new String( attrs.getValue( "", "gridlaunch" ) ); gridlaunch = (gridlaunch == null || gridlaunch.length() == 0 || gridlaunch.equalsIgnoreCase("null"))? null: gridlaunch; m_pool_info.setInfo( SiteInfo.GRIDLAUNCH, gridlaunch ); } if ( attrs.getValue( "", "sysinfo" ) != null ) { String sysinfo = new String( attrs.getValue( "", "sysinfo" ) ); m_pool_info.setInfo( SiteInfo.SYSINFO, sysinfo ); } //pcfg.add( handle, m_pool_info ); return m_pool_info; } /** * * @param pinfo Poolinfo object that is to be populated * @param attrs Attributes for the element * @throws Exception */ private void handleProfileTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { m_namespace = new String( attrs.getValue( "", "namespace" ) ); m_key = new String( attrs.getValue( "", "key" ) ); } /** * * @param pinfo Poolinfo object that is to be populated * @param attrs Attributes for the element * @throws Exception */ private void handleLRCTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { LRC lrc = new LRC( attrs.getValue( "", "url" ) ); pinfo.setInfo( SiteInfo.LRC, lrc ); } /** * @param pinfo Poolinfo object that is to be populated * @param attrs Attributes for the element * @throws Exception */ private void handleGridFtpTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { gftp = new GridFTPServer(); String gftp_url = new String( attrs.getValue( "", "url" ) ); gftp.setInfo( GridFTPServer.GRIDFTP_URL, gftp_url ); GlobusVersion globusver = new GlobusVersion( new Integer(attrs.getValue( "", "major" ) ).intValue(), new Integer( attrs.getValue( "", "minor" ) ).intValue(), new Integer( attrs.getValue( "", "patch" ) ).intValue() ); gftp.setInfo( GridFTPServer.GLOBUS_VERSION, globusver.getGlobusVersion() ); if ( attrs.getValue( "", "storage" ) != null ) { gftp.setInfo( GridFTPServer.STORAGE_DIR, new String( attrs.getValue( "", "storage" ) ) ); } if ( attrs.getValue( "", "total-size" ) != null ) { gftp.setInfo( GridFTPServer.TOTAL_SIZE, new String( attrs.getValue( "", "total-size" ) ) ); } if ( attrs.getValue( "", "free-size" ) != null ) { gftp.setInfo( GridFTPServer.FREE_SIZE, new String( attrs.getValue( "", "free-size" ) ) ); } //following line commented by sk setppolinfo is now called in handleGridFtpTagstop() //pinfo.setPoolInfo(GvdsPoolInfo.GRIDFTP, gftp); } /** * sk added function to handle gridftpbandwidth tag * @param pinfo Poolinfo object that is to be populated * @param attrs Attributes for the element * @throws Exception */ private void handleGridFtpBandwidthTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { GridFTPBandwidth gridftp_bandwidth = new GridFTPBandwidth(); String dest_id = new String( attrs.getValue( "", "dest-subnet" ) ); gridftp_bandwidth.setInfo( GridFTPBandwidth. DEST_ID, dest_id ); String avg_bw_range1 = new String( attrs.getValue( "", "avg-bandwidth-range1" ) ); if ( avg_bw_range1.length() != 0 ) { gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW_RANGE1, avg_bw_range1 ); } String avg_bw_range2 = attrs.getValue( "", "avg-bandwidth-range2" ); if ( avg_bw_range2 != null ) { gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW_RANGE1, avg_bw_range2 ); } String avg_bw_range3 = attrs.getValue( "", "avg-bandwidth-range3" ); if ( avg_bw_range3 != null ) { gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW_RANGE1, avg_bw_range3 ); } String avg_bw_range4 = attrs.getValue( "", "avg-bandwidth-range4" ); if ( avg_bw_range4 != null ) { gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW_RANGE1, avg_bw_range4 ); } gridftp_bandwidth.setInfo( GridFTPBandwidth. AVG_BW, new String( attrs.getValue( "", "avg-bandwidth" ) ) ); gridftp_bandwidth.setInfo( GridFTPBandwidth. MAX_BW, new String( attrs.getValue( "", "max-bandwidth" ) ) ); gridftp_bandwidth.setInfo( GridFTPBandwidth. MIN_BW, new String( attrs.getValue( "", "min-bandwidth" ) ) ); gftp.setGridFTPBandwidthInfo( gridftp_bandwidth ); } /** * This method handles the start of a jobmanager tag. * * @param pinfo The PoolInfo object which will hold the jobmanager information * @param attrs The attributes about the jobmanager tag returned from the XML. * * @throws Exception * @see org.griphyn.cPlanner.classes.SiteInfo */ private void handleJobManagerTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { JobManager jbinfo = new JobManager(); jbinfo.setInfo( JobManager.UNIVERSE, new String( attrs.getValue( "", "universe" ) ) ); jbinfo.setInfo( JobManager.URL, new String( attrs.getValue( "", "url" ) ) ); GlobusVersion globusver = new GlobusVersion( new Integer( attrs.getValue( "", "major" ) ).intValue(), new Integer( attrs.getValue( "", "minor" ) ).intValue(), new Integer( attrs.getValue( "", "patch" ) ).intValue() ); jbinfo.setInfo( JobManager.GLOBUS_VERSION, globusver.getGlobusVersion() ); if ( attrs.getValue( "", "free-mem" ) != null ) { jbinfo.setInfo( JobManager.FREE_MEM, new String( attrs.getValue( "", "free-mem" ) ) ); } if ( attrs.getValue( "", "total-mem" ) != null ) { jbinfo.setInfo( JobManager.TOTAL_MEM, new String( attrs.getValue( "", "total-mem" ) ) ); } if ( attrs.getValue( "", "max-count" ) != null ) { jbinfo.setInfo( JobManager.MAX_COUNT, new String( attrs.getValue( "", "max-count" ) ) ); } if ( attrs.getValue( "", "max-cpu-time" ) != null ) { jbinfo.setInfo( JobManager.MAX_CPU_TIME, new String( attrs.getValue( "", "max-cpu-time" ) ) ); } if ( attrs.getValue( "", "running-jobs" ) != null ) { jbinfo.setInfo( JobManager.RUNNING_JOBS, new String( attrs.getValue( "", "running-jobs" ) ) ); } if ( attrs.getValue( "", "jobs-in-queue" ) != null ) { jbinfo.setInfo( JobManager.JOBS_IN_QUEUE, new String( attrs.getValue( "", "jobs-in-queue" ) ) ); } if ( attrs.getValue( "", "max-cpu-time" ) != null ) { jbinfo.setInfo( JobManager.MAX_CPU_TIME, new String( attrs.getValue( "", "max-cpu-time" ) ) ); } if ( attrs.getValue( "", "idle-nodes" ) != null ) { jbinfo.setInfo( JobManager.IDLE_NODES, new String( attrs.getValue( "", "idle-nodes" ) ) ); } if ( attrs.getValue( "", "total-nodes" ) != null ) { jbinfo.setInfo( JobManager.TOTAL_NODES, new String( attrs.getValue( "", "total-nodes" ) ) ); } if ( attrs.getValue( "", "os" ) != null ) { jbinfo.setInfo( JobManager.OS_TYPE, new String( attrs.getValue( "", "os" ) ) ); } if ( attrs.getValue( "", "arch" ) != null ) { jbinfo.setInfo( JobManager.ARCH_TYPE, new String( attrs.getValue( "", "arch" ) ) ); } if ( attrs.getValue( "", "type" ) != null ) { jbinfo.setInfo( JobManager.JOBMANAGER_TYPE, new String( attrs.getValue( "", "type" ) ) ); } pinfo.setInfo( SiteInfo.JOBMANAGER, jbinfo ); } /** * Handles the WorkDirectory Tag Start. * @param pinfo Takes a SiteInfo object for which the work directory is. * @param attrs Takes the attributes returned from the XML by the parser. * @throws Exception * @see org.griphyn.cPlanner.classes.SiteInfo */ private void handleWorkDirectoryTagStart( SiteInfo pinfo, Attributes attrs ) throws Exception { WorkDir gwd = new WorkDir(); if ( attrs.getValue( "", "total-size" ) != null ) { gwd.setInfo( WorkDir.TOTAL_SIZE, new String( attrs.getValue( "", "total-size" ) ) ); } if ( attrs.getValue( "", "free-size" ) != null ) { gwd.setInfo( WorkDir.FREE_SIZE, new String( attrs.getValue( "", "free-size" ) ) ); } //pinfo.setInfo( WorkDir.WORKDIR, gwd ); pinfo.setInfo(SiteInfo.WORKDIR,gwd); } /** * Handles the end of the Xml files. * */ private void handleConfigTagEnd() { // System.out.println(m_pconfig.toXml()); } /** * Handles the end of the pool tag. */ private void handlePoolTagEnd() { mSiteStore.addEntry( SiteInfo2SiteCatalogEntry.convert( this.m_pool_info ) ); } /** * Handles the end of the Profile tag. * @param pinfo PoolInfo object for which the * profiles are collected. * * @throws java.lang.Exception * @see org.griphyn.cPlanner.classes.SiteInfo */ private void handleProfileTagEnd( SiteInfo pinfo ) throws RuntimeException { if ( mTextContent != null && m_namespace != null && m_key != null ) { //check if namespace is valid m_namespace = m_namespace.toLowerCase(); if( !Namespace.isNamespaceValid( m_namespace ) ){ mTextContent.setLength( 0 ); mLogger.log("Namespace specified in Site Catalog not supported. ignoring "+ m_namespace, LogManager.WARNING_MESSAGE_LEVEL); return; } Profile profile = new Profile( m_namespace, m_key, mTextContent.toString().trim() ); pinfo.setInfo( SiteInfo.PROFILE, profile ); mTextContent.setLength( 0 ); } } /** * Handles the end of the LRC Tag */ private static void handleLRCTagEnd() { } /** * sk made changes to the following function to set GRIDFTPServer instead of * setting it in fn handleGridFtpTagStart() * @throws java.lang.RuntimeException */ private void handleGridFtpTagEnd() throws RuntimeException { m_pool_info.setInfo( SiteInfo.GRIDFTP, gftp ); } private static void handleGridFtpBandwidthTagEnd() { } /** * Handles the end of the JobManager Tag */ private static void handleJobManagerTagEnd() { } /** * This method handles the Workdirectory tg end. * @param pinfo Takes the PoolInfo object. * @throws java.lang.Exception * @see org.griphyn.cPlanner.classes.SiteInfo */ private void handleWorkDirectoryTagEnd( SiteInfo pinfo ) throws RuntimeException { if ( mTextContent != null ) { WorkDir gdw = ( WorkDir ) pinfo.getInfo( SiteInfo.WORKDIR ); gdw.setInfo( WorkDir.WORKDIR, mTextContent.toString().trim() ); } mTextContent.setLength( 0 ); } /** * This class returns the reference to the PooConfig object * containing information about all the pools. * * @return returns a reference to the PoolConfig object which * contains all the pools. * * @see org.griphyn.cPlanner.classes.PoolConfig */ public PoolConfig getPoolConfig() { return m_pconfig; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/0000755000175000017500000000000011757531667025377 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/Abstract.java0000644000175000017500000002411311757531137027776 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.PoolConfig; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.HashMap; import java.util.Set; /** * An abstract implementation of the PoolInfoProvider. Implementations should * extend it, only if they are statically loading information into a * PoolConfig object. The object once populated contains all * the contents of the catalog. * * @author Karan Vahi * @version $Revision: 2571 $ * * @see #mPoolConfig */ public abstract class Abstract extends PoolInfoProvider { /** * Handle to the PoolConfig object */ protected PoolConfig mPoolConfig = null; /** * Returns the System information for a bunch of sites. * * @param siteids List The siteid whose system information is required * * @return Map The key is the siteid and the value is a VDSSysInfo object * * @see org.griphyn.common.classes.VDSSysInfo */ public Map getSysinfos( List siteids ) { logMessage("Map getSysinfos(List siteIDS)"); logMessage("\t getSysinfos(" + siteids + ")"); HashMap sysinfomap = null; for ( Iterator i = siteids.iterator(); i.hasNext(); ) { String site = ( String ) i.next(); SiteInfo siteinfo = mPoolConfig.get( site ); if ( siteinfo != null ) { if ( sysinfomap == null ) { sysinfomap = new HashMap( 5 ); } sysinfomap.put( site, siteinfo.getInfo( SiteInfo.SYSINFO ) ); } } return sysinfomap; } /** * Returns the System information for a single site. * * @param siteID String The site whose system information is requested * * @return VDSSysInfo The system information as a VDSSysInfo object * * @see org.griphyn.common.classes.VDSSysInfo */ public VDSSysInfo getSysinfo( String siteID ) { logMessage("SysInfo getSysinfo(String siteID)"); logMessage("\t getSysinfo(" + siteID + ")"); SiteInfo siteinfo = mPoolConfig.get( siteID ); if ( siteinfo != null ) { return ( VDSSysInfo ) siteinfo.getInfo( SiteInfo.SYSINFO ); } return null; } /** * Gets the pool information from the pool.config file on the basis * of the name of the pool, and the universe. * * @param siteID the name of the site * @param universe the execution universe for the job * * @return the corresponding pool object for the entry if found * else null */ public SiteInfo getPoolEntry( String siteID, String universe ) { logMessage("SiteInfo getPoolEntry(String siteID,String universe)"); logMessage("\tSiteInfo getPoolEntry(" + siteID + "," + universe +")"); SiteInfo site = mPoolConfig.get( siteID); return site; } /** * It returns the profile information associated with a particular pool. If * the pool provider has no such information it should return null. * The name of the object may purport that it is specific to GVDS format, but * in fact it a tuple consisting of namespace, key and value that can be used * by other Pool providers too. * * @param siteID the name of the site, whose profile information you want. * * @return List of Profile objects * null if the information about the site is not with the pool provider. * * @see org.griphyn.cPlanner.classes.Profile */ public List getPoolProfile( String siteID ) { logMessage("List getPoolProfile(String siteID)"); logMessage("\tList getPoolProfile(" + siteID + ")"); SiteInfo poolInfo = mPoolConfig.get( siteID ); ArrayList profileList = null; try { profileList = ( poolInfo == null ) ? null : ( ArrayList ) poolInfo.getInfo( SiteInfo.PROFILE ); if ( profileList == null ) { return null; } } catch ( Exception e ) { throw new RuntimeException( "While getting profiles for site " + siteID , e ); } return profileList; } /** * It returns all the jobmanagers corresponding to a specified site. * * @param siteID the name of the site at which the jobmanager runs. * * @return list of JobManager, each referring to * one jobmanager contact string. An empty list if no jobmanagers * found. */ public List getJobmanagers( String siteID ) { logMessage("List getJobmanagers(String siteID)"); logMessage("\tList getJobamager(" + siteID + ")"); SiteInfo poolInfo = mPoolConfig.get( siteID ); return ( poolInfo == null ) ? new java.util.ArrayList( 0 ) : poolInfo.getJobmanagers(); } /** * It returns all the jobmanagers corresponding to a specified pool and * universe. * * @param siteID the name of the site at which the jobmanager runs. * @param universe the gvds universe with which it is associated. * * @return list of JobManager, each referring to * one jobmanager contact string. An empty list if no jobmanagers * found. */ public List getJobmanagers( String siteID, String universe ) { logMessage("List getJobmanagers(String siteID,String universe"); logMessage("\tList getJobmanagers( " + siteID + "," + universe + ")"); SiteInfo poolInfo = mPoolConfig.get( siteID ); return ( poolInfo == null ) ? new java.util.ArrayList( 0 ) : poolInfo.getJobmanagers( universe ); } /** * It returns all the gridftp servers corresponding to a specified pool. * * @param siteID the name of the site at which the jobmanager runs. * * @return List of GridFTPServer, each referring to one * GridFtp Server. */ public List getGridFTPServers( String siteID ) { logMessage("List getGridFTPServers(String siteID)"); logMessage("\tList getGridFTPServers(" + siteID + ")" ); SiteInfo poolInfo = mPoolConfig.get( siteID ); if ( poolInfo == null ) { return new java.util.ArrayList(); } ArrayList gridftp = ( ArrayList ) poolInfo.getInfo( SiteInfo. GRIDFTP ); return gridftp; } /** * It returns all the pools available in the site catalog * * @return List of names of the pools available as String */ public List getPools() { logMessage("List getPools()"); Set s = mPoolConfig.getSites().keySet(); return new ArrayList( s ); } /** * This is a soft state remove, that removes a jobmanager from a particular * pool entry. The cause of this removal could be the inability to * authenticate against it at runtime. The successful removal lead Pegasus * not to schedule job on that particular jobmanager. * * @param siteID the name of the site at which the jobmanager runs. * @param universe the gvds universe with which it is associated. * @param jobManagerContact the contact string to the jobmanager. * * @return true if was able to remove the jobmanager from the cache * false if unable to remove, or the matching entry is not found * or if the implementing class does not maintain a soft state. */ public boolean removeJobManager( String siteID, String universe, String jobManagerContact ) { logMessage("boolean removeJobManager(String siteID, String universe," + "String jobManagerContact)"); logMessage("\tboolean removeJobManager(" + siteID + "," + universe + "," + jobManagerContact + ")"); SiteInfo poolinfo = mPoolConfig.get( siteID ); return ( poolinfo == null ) ? false : poolinfo.removeJobmanager( universe, jobManagerContact ); } /** * This is a soft state remove, that removes a gridftp server from a particular * pool entry. The cause of this removal could be the inability to * authenticate against it at runtime. The successful removal lead Pegasus * not to schedule any transfers on that particular gridftp server. * * @param siteID the name of the site at which the gridftp runs. * @param urlPrefix the url prefix containing the protocol,hostname and port. * * @return true if was able to remove the gridftp from the cache * false if unable to remove, or the matching entry is not found * or if the implementing class does not maintain a soft state. * or the information about site is not in the site catalog. */ public boolean removeGridFtp( String siteID, String urlPrefix ) { logMessage("boolean removeGrid(String siteID, String urlPrefix)"); logMessage("\t boolean removeGrid(" + siteID + "," + urlPrefix + ")"); SiteInfo poolinfo = mPoolConfig.get( siteID ); return ( poolinfo == null ) ? false : poolinfo.removeGridFtp( urlPrefix ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/SiteFactoryException.java0000644000175000017500000000643211757531137032352 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Site Catalog * implementations. * * @author Karan Vahi * @version $Revision: 2570 $ */ public class SiteFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Site Catalog"; /** * Constructs a SiteFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public SiteFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a SiteFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public SiteFactoryException( String msg, String classname ) { super( msg , classname ); } /** * Constructs a SiteFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public SiteFactoryException( String msg, Throwable cause ) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a SiteFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public SiteFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/classes/0000755000175000017500000000000011757531667027034 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/classes/GridFTPServer.java0000644000175000017500000002442411757531137032323 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old.classes; import java.util.ArrayList; import java.util.List; /** * This is a data class that is used to store information about a grid ftp server. *

* The various attributes that can be associated with the the server are * displayed in the following table. * *

* * * * * * * * * * * * * * * * * *
Attribute NameAttribute Description
urlthe url string pointing to gridftp server, consisting of the host and * the port.
globus versionthe version of the Globus Toolkit that was used to install the server.
storage mount pointthe storage mount point for the server.
total sizethe total storage space at the grid ftp server.
free sizethe free space at the grid ftp server.
* * @author Gaurang Mehta gmehta@isi.edu * @author Karan Vahi vahi@isi.edu * * @version $Revision: 2571 $ */ public class GridFTPServer{ /** * Array storing the names of the attributes that are stored with the * grid ftp server. */ public static final String GRIDFTPINFO[] = { "url", "storage", "globus-version", "total-size", "free-size"}; /** * The constant to be passed to the accessor functions to get or set the url. */ public static final int GRIDFTP_URL = 0; /** * The constant to be passed to the accessor functions to get or set the * storage directory of the grid ftp server. */ public static final int STORAGE_DIR = 1; /** * The constant to be passed to the accessor functions to get or set the * globus version of the grid ftp server. */ public static final int GLOBUS_VERSION = 2; /** * The constant to be passed to the accessor functions to get or set the * total size. */ public static final int TOTAL_SIZE = 3; /** * The constant to be passed to the accessor functions to get or set the * free size. */ public static final int FREE_SIZE = 4; /** * The url string of the gridftp that contains the host and the port. */ private String mURL; /** * The storage mount point for the grid ftp server. This is the absolute * path on the file system being accessed through the grid ftp server. */ private String mStorageDir; /** * The version of Globus Toolkit that was used to install the grid ftp server. */ private String mGlobusVersion; /** * The total storage space at the grid ftp server. * In what units?? */ private String mTotalSize; /** * The free space at the grid ftp server. * In what units?? */ private String mFreeSize; /** * */ private List mBandWidths; /** * The default constructor. */ public GridFTPServer() { mGlobusVersion = null; mFreeSize = null; mStorageDir = null; mTotalSize = null; mURL = null; // sk initialised gridftp_bandwidths HashMap mBandWidths = new ArrayList(); } /** * Checks if an object is similar to the one referred to by this class. * We compare the primary key to determine if it is the same or not. * * @param o Object * @return true if the primary key (universe,jobmanager-type,pool) match. * else false. */ public boolean equals( Object o ) { GridFTPServer server = ( GridFTPServer ) o; if ( this.mURL.equals( server.mURL ) ) { return true; } return false; } /** * Sets an attribute associated with the grid ftp server. * * @param key the attribute key, which is one of the predefined keys. * @param value value of the attribute. * * @throws Exception if illegal key defined. */ public void setInfo( int key, String value ) throws Exception { switch ( key ) { case 0: mURL = value == null ? null : new String( value ); break; case 1: mStorageDir = value == null ? null : new String( value ); break; case 2: mGlobusVersion = value == null ? null : new String( ( new GlobusVersion( value ) ). getGlobusVersion() ); break; case 3: mTotalSize = value == null ? null : new String( value ); break; case 4: mFreeSize = value == null ? null : new String( value ); break; default: throw new Exception( "Wrong key = " + key +" specified. key must be one of the predefined types" ); } } /** * It fills information in the mBandWidths ArrayList. * * @param bandwidth the object that is stored in the hash, containing the * information about the gridftp bandwidth between the host * and the destination. * * @throws Exception */ public void setGridFTPBandwidthInfo( GridFTPBandwidth bandwidth) throws Exception { mBandWidths.add( bandwidth ); } /** * Returns a list of GridFTPBandwidth objects that contain the * bandwidths by which a site is connected to other sites. * * @return list of GridFTPBandwidth objects. * * @throws Exception */ public List getGridFTPBandwidthInfo() throws Exception { return mBandWidths; } /** * Returns the attribute value of a particular attribute of the server. * * @param key the key/attribute name. * * @return the attribute value * @throws RuntimeException if illegal key defined. */ public String getInfo( int key ) { switch ( key ) { case 0: return mURL; case 1: return mStorageDir; case 2: return mGlobusVersion; case 3: return mTotalSize; case 4: return mFreeSize; default: throw new RuntimeException( "Wrong key = " + key + " specified. key must be one of the predefined types" ); } } /** * Returns the textual description of the contents of GridFTPServer * object in the multiline format. * * @return the textual description in multiline format. */ public String toMultiLine() { String output = "gridftp"; if ( mURL != null ) { output += " \"" + mURL +mStorageDir+"\""; } if (mGlobusVersion != null) { output += " \"" + mGlobusVersion + "\""; } return output; } /** * Returns the textual description of the contents of LRC * object. * * @return the textual description. */ public String toString() { String output = "gridftp"; if ( mURL != null ) { output += " \"" + mURL +mStorageDir+"\""; } if (mURL != null) { output += " " + GRIDFTPINFO[GRIDFTP_URL] + "=" + mURL; } if (mStorageDir != null) { output += " " + GRIDFTPINFO[STORAGE_DIR] + "=" + mStorageDir; } if (mGlobusVersion != null) { output += " " + GRIDFTPINFO[GLOBUS_VERSION] + "=" + mGlobusVersion; } if (mTotalSize != null) { output += " " + GRIDFTPINFO[TOTAL_SIZE] + "=" + mTotalSize; } if (mFreeSize != null) { output += " " + GRIDFTPINFO[FREE_SIZE] + "=" + mFreeSize; } output += " )"; // System.out.println(output); return output; } /** * Returns the XML description of the contents of LRC * object. * * @return the xml description. */ public String toXML() { String output = " 0) ? new Integer(st.nextToken()).intValue():0; mMinorVersion = (count > 1) ? new Integer(st.nextToken()).intValue():0; mPatchVersion = (count > 2) ? new Integer(st.nextToken()).intValue():0; } /** * Constructor to set the version information * * @param major Specifies the Major version number. * @param minor Specifies the minor version number. * @param patch Specifies the patch version number. */ public GlobusVersion(int major, int minor, int patch) { mMajorVersion = major; mMinorVersion = minor; mPatchVersion = patch; } /** * Returns the version corresponding to a particular version type. * If an invalid version type is specified then 0 is returned. * * @param version the String type corresponding to the version that * you want. * * @return int value corresponding to the version, * 0 in case of incorrect version type. * * @see #MAJOR * @see #MINOR * @see #PATCH */ public int getGlobusVersion(String version) { int value = 0; if (version.equalsIgnoreCase(MAJOR)) { value = mMajorVersion; } else if (version.equalsIgnoreCase(MINOR)) { value = mMinorVersion; } else if (version.equalsIgnoreCase(PATCH)) { value = mPatchVersion; } return value; } /** * Returns the Globus version as a dot separated String. * It is of type major.minor.patch where major, minor and patch are the * various version numbers stored in the class. * * @return the version a dot separated String. */ public String getGlobusVersion(){ StringBuffer version = new StringBuffer(5); version.append(mMajorVersion).append(".") .append(mMinorVersion).append("."). append(mPatchVersion); return version.toString(); } /** * Returns the textual description of the contents of GlobusVersion * object in the multiline format. * * @return the textual description in multiline format. */ public String toMultiLine() { return getGlobusVersion(); } /** * Returns the textual description of the contents of GlobusVersion * object. * * @return the textual description. */ public String toString() { return getGlobusVersion(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/classes/LRC.java0000644000175000017500000000532711757531137030316 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old.classes; /** * This is a data class that is used to store information about a * local replica catalog, that is associated with a site in the pool configuration * catalog. *

* The various attributes that can be associated with the the server are * displayed in the following table. * *

* * * * * *
Attribute NameAttribute Description
urlthe url string pointing to local replica catalog.
* * @author Gaurang Mehta gmehta@isi.edu * @author Karan Vahi vahi@isi.edu * * @version $Revision: 2571 $ */ public class LRC { /** * The contact string to the lrc e.g rls://sukhna.isi.edu . */ private String mLRCURL; /** * Constructor for the class. * * @param url the url for the local replica catalog. */ public LRC(String url){ mLRCURL = url==null ? null : new String(url); } /** * Returns the LRC url associated with a pool. * * @return the lrc url. */ public String getURL(){ return mLRCURL; } /** * Sets the url of the LRC associated with the object. * * @param url the url string. */ public void setURL(String url){ mLRCURL = url; } /** * Returns the textual description of the contents of LRC * object in the multiline format. * * @return the textual description in multiline format. */ public String toMultiLine() { return this.toString(); } /** * Returns the textual description of the contents of LRC object. * * @return the textual description. */ public String toString(){ String output="lrc \""+mLRCURL+"\""; return output; } /** * Returns the XML description of the contents of LRC * object. * * @return the xml description. */ public String toXML(){ String output=""; return output; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/classes/LRCResult.java0000644000175000017500000000660311757531137031513 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old.classes; import edu.isi.pegasus.planner.classes.Data; import edu.isi.pegasus.common.logging.LogManager; import org.globus.replica.rls.RLSClient; import org.globus.replica.rls.RLSString2Bulk; /** * A class that stores the results * of querying an LRC. It includes * whether the operation was a success * or not and in addition the value * of the pool attribute. * * @author Gaurang Mehta * @author Karan Vahi * @version $Revision: 2572 $ * * @see org.globus.replica.rls.RLSString2Bulk */ public class LRCResult extends Data { /** * The lfn for which the LRC * was queried. */ public String lfn; /** * The pfn associated * with the lfn, if an * entry found in the LRC. * Else it can be null. */ public String pfn; /** * The pool attribute associated * with the pfn returned. * This should be set to null * if pfn is not found. */ public String pool; /** * The status of the operation. * Whether it was a sucess or not. * The status are defined in * RLSClient.java * * @see org.globus.replica.rls.RLSClient */ public int LRCExitStatus; /** * The default constructor */ public LRCResult() { lfn = new String(); pfn = new String(); pool= new String(); LRCExitStatus = 0; } /** * The overloaded constructor. * Takes in RLSString2Bulk * object. */ public LRCResult(RLSString2Bulk s2, String poolAtt){ lfn = s2.s1; pfn = s2.s2; LRCExitStatus = s2.rc; pool = poolAtt; } /** * Returns a clone of the * existing object. */ public Object clone(){ LRCResult res = new LRCResult(); res.lfn = this.lfn; res.pfn = this.pfn; res.pool = this.pool; res.LRCExitStatus = this.LRCExitStatus; return res; } /** * Returns a textual description * of the object. */ public String toString(){ String str = "\n lfn: " + lfn + " exit status: " + getErrorMessage()+ " pfn: " + pfn + " pool: " + pool; return str; } /** * Returns the error/status * message according to * the LRCExitStatus. */ public String getErrorMessage(){ RLSClient rls = null; try{ rls = new RLSClient(); } catch(Exception e){ mLogger.log("Exception while initialising to RLS" + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } String err = rls.getErrorMessage(this.LRCExitStatus); return err; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/classes/JobManager.java0000644000175000017500000004213011757531137031674 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old.classes; /** * This is a data class that is used to store information about a jobmanager and * the information that it reports about a remote pool. * *

* The various attributes that can be associated with the the server are * displayed in the following table. * *

* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
Attribute NameAttribute Description
urlthe url string pointing to the jobmanager.
universethe VDS universe that is associated with this job. Can be transfer or * vanilla or any other user defined type.
jobamanager typeremote scheduler type to which the jobmanager talks to.
idle nodesthe number of idle nodes on the remote resource.
total nodesthe total number of nodes on the remote resource.
free memorythe free memory.
total memorythe total memory
jobs in queuethe number of jobs in the queue on the remote scheduler.
running jobsthe number of jobs currently running on the remote site.
max countthe maximum number of jobs that can be run.
max cpu timethe max walltime for the jobs on the remote resource.
os type/th> * the operating system type of the remote machines to which the jobmanager * talks to.
architecture typethe architecture type of the remote machines to which the jobmanager * talks to.
* * @author Gaurang Mehta gmehta@isi.edu * @author Karan Vahi vahi@isi.edu * * @version $Revision: 2571 $ */ public class JobManager { /** * Array storing the names of the attributes that are stored with the * jobmanager. */ public static final String JOBMANAGERINFO[] = { "url", "universe", "globus-version", "type", "idle-nodes", "total-nodes", "free-mem", "total-mem", "jobs-in-queue", "running-jobs", "max-count", "max-cpu-time", "os", "arch"}; /** * The jobmanager type associated with the compute jobs. */ public static final String VANILLA_JOBMANAGER_TYPE = "vanilla"; /** * The jobmanager type associated with the transfer jobs. */ public static final String FORK_JOBMANAGER_TYPE = "transfer"; /** * The constant to be passed to the accessor functions to get or set the url. */ public static final int URL = 0; /** * The constant to be passed to the accessor functions to get or set the * universe. */ public static final int UNIVERSE = 1; /** * The constant to be passed to the accessor functions to get or set the * globus version. */ public static final int GLOBUS_VERSION = 2; /** * The constant to be passed to the accessor functions to get or set the * jobmanager type. */ public static final int JOBMANAGER_TYPE = 3; /** * The constant to be passed to the accessor functions to get or set the * idle nodes. */ public static final int IDLE_NODES = 4; /** * The constant to be passed to the accessor functions to get or set the total * number of nodes. */ public static final int TOTAL_NODES = 5; /** * The constant to be passed to the accessor functions to get or set the free * memory . */ public static final int FREE_MEM = 6; /** * The constant to be passed to the accessor functions to get or set the * total memory. */ public static final int TOTAL_MEM = 7; /** * The constant to be passed to the accessor functions to get or set the * number of the jobs in the queue attribute. */ public static final int JOBS_IN_QUEUE = 8; /** * The constant to be passed to the accessor functions to get or set the * number of running jobs attribute. */ public static final int RUNNING_JOBS = 9; /** * The constant to be passed to the accessor functions to get or set the * maximum number of jobs that can be in the queue. */ public static final int MAX_COUNT = 10; /** * The constant to be passed to the accessor functions to get or set the * maxwalltime for the jobs. */ public static final int MAX_CPU_TIME = 11; /** * The constant to be passed to the accessor functions to get or set the * operating system type of the remote machines to which the jobmanager talks * to. */ public static final int OS_TYPE = 12; /** * The constant to be passed to the accessor functions to get or set the * architecture type of the remote machines to which the jobmanager talks to. */ public static final int ARCH_TYPE = 13; /** * The total memory that the jobmanager reports. */ private String mTotalMem; /** * The free memory that the jobmanager reports. */ private String mFreeMem; /** * The total number of nodes that the jobmanager reports are on the remote * site. */ private String mTotalNodes; /** * The total number of idle nodes that the jobmanager reports are on the * remote site. */ private String mIdleNodes; /** * The maximum number of jobs that can be running on the remote site. */ private String mMaxCount; /** * The max walltime of the jobs that are run on the remote site. */ private String mMaxCPUTime; /** * The number of jobs in the remote queue at the remote site. */ private String mJobsInQueue; /** * The number of jobs in the remote queue that are running at the remote site. */ private String mRunningJobs; /** * The operating system type type of the remote machines to which the * jobmanager talks to. */ private String mOSType; /** * The architecture type of the remote machines to which the jobmanager * talks to. */ private String mArchType; /** * The type of remote scheduler to which the jobmanager talks to. */ private String mJobManagerType; /** * The url to the jobmanager on the remote site. */ private String mURL; /** * The VDS universe with which the jobmanager is associated with. */ private String mUniverse; /** * The globus version that is installed on the remote site. */ private String mGlobusVersion; /** * Default constructor for the class. */ public JobManager() { // m_jobmanager_info = new HashMap(14); mTotalMem = null; mFreeMem = null; mTotalNodes = null; mIdleNodes = null; mMaxCount = null; mMaxCPUTime = null; mJobsInQueue = null; mRunningJobs = null; mOSType = null; mArchType = null; mJobManagerType = null; mURL = null; mUniverse = null; mGlobusVersion = null; } /** * Sets an attribute associated with the jobmanager. * * @param key the attribute key, which is one of the predefined keys. * @param value value of the attribute. * */ public void setInfo(int key, String value) { switch (key) { case 0: mURL = value == null ? null : new String(value); break; case 1: mUniverse = value == null ? null : new String(value); break; case 2: mGlobusVersion = value == null ? null : new String(new GlobusVersion(value). getGlobusVersion()); break; case 3: mJobManagerType = value == null ? null : new String(value); break; case 4: mIdleNodes = value == null ? null : new String(value); break; case 5: mTotalNodes = value == null ? null : new String(value); break; case 6: mFreeMem = value == null ? null : new String(value); break; case 7: mTotalMem = value == null ? null : new String(value); break; case 8: mJobsInQueue = value == null ? null : new String(value); break; case 9: mRunningJobs = value == null ? null : new String(value); break; case 10: mMaxCount = value == null ? null : new String(value); break; case 11: mMaxCPUTime = value == null ? null : new String(value); break; case 12: mOSType = value == null ? null : new String(value); break; case 13: mArchType = value == null ? null : new String(value); break; default: throw new RuntimeException("Wrong key =" + key + ". Please have one of the prefedefined jobmanager keys"); } } /** * Returns the attribute value of a particular attribute of the jobmanager. * * @param key the key/attribute name. * * @return the attribute value */ public String getInfo(int key) { switch (key) { case 0: return mURL; case 1: return mUniverse; case 2: return mGlobusVersion; case 3: return mJobManagerType; case 4: return mIdleNodes; case 5: return mTotalNodes; case 6: return mFreeMem; case 7: return mTotalMem; case 8: return mJobsInQueue; case 9: return mRunningJobs; case 10: return mMaxCount; case 11: return mMaxCPUTime; case 12: return mOSType; case 13: return mArchType; default: throw new RuntimeException("Wrong key=" + key + ". Please have one of the prefedefined jobmanager keys"); } } /** * Checks if an object is similar to the one referred to by this class. * We compare the primary key to determine if it is the same or not. * * @param o the object to be compared for equality. * * @return true if the primary key (url) match. * else false. */ public boolean equals(Object o){ JobManager jm = (JobManager)o; //for the time being only match on url. if(/*this.mUniverse == jm.mUniverse && */ this.mURL.equals(jm.mURL)) { return true; } return false; } /** * Returns the textual description of the contents of JobManager * object in the multiline format. * * @return the textual description in multiline format. */ public String toMultiLine() { String output = "universe"; if (mUniverse != null) { output += " " + mUniverse; } if (mURL != null) { output += " \"" + mURL + "\""; } if (mGlobusVersion != null) { output += " \"" + mGlobusVersion + "\""; } return output; } /** * Returns the textual description of the contents of JobManager * object. * * @return the textual description. */ public String toString() { String output = "jobmanager"; if (mUniverse != null) { output += " " + mUniverse; } if (mURL != null) { output += " \""+mURL+"\""; } if (mGlobusVersion != null) { output += " \""+mGlobusVersion+"\""; } if (mUniverse != null) { output += " " + JOBMANAGERINFO[UNIVERSE] + "=" + mUniverse; } if (mURL != null) { output += " " + JOBMANAGERINFO[URL] + "=" + mURL; } if (mGlobusVersion != null) { output += " " + JOBMANAGERINFO[GLOBUS_VERSION] + "=" + mGlobusVersion; } if (mJobManagerType != null) { output += " " + JOBMANAGERINFO[JOBMANAGER_TYPE] + "=" + mJobManagerType; } if (mOSType != null) { output += " " + JOBMANAGERINFO[OS_TYPE] + "=" + mOSType; } if (mArchType != null) { output += " " + JOBMANAGERINFO[ARCH_TYPE] + "=" + mArchType; } if (mRunningJobs != null) { output += " " + JOBMANAGERINFO[RUNNING_JOBS] + "=" + mRunningJobs; } if (mJobsInQueue != null) { output += " " + JOBMANAGERINFO[JOBS_IN_QUEUE] + "=" + mJobsInQueue; } if (mMaxCPUTime != null) { output += " " + JOBMANAGERINFO[MAX_CPU_TIME] + "=" + mMaxCPUTime; } if (mMaxCount != null) { output += " " + JOBMANAGERINFO[MAX_COUNT] + "=" + mMaxCount; } if (mTotalNodes != null) { output += " " + JOBMANAGERINFO[TOTAL_NODES] + "=" + mTotalNodes; } if (mIdleNodes != null) { output += " " + JOBMANAGERINFO[IDLE_NODES] + "=" + mIdleNodes; } if (mTotalMem != null) { output += " " + JOBMANAGERINFO[TOTAL_MEM] + "=" + mTotalMem; } if (mFreeMem != null) { output += " " + JOBMANAGERINFO[FREE_MEM] + "=" + mFreeMem; } output += " )"; // System.out.println(output); return output; } /** * Returns the XML description of the contents of JobManager * object. * * @return the xml description. */ public String toXML() { String output = " * The various types of information that can be associated with the the remote * site are displayed in the following table. * *

* * * * * * * * * * * * * * * * * * * * *
NameDescription
grid launchthe path to kickstart on the remote site.
work directorythe WorkDir object containing the information about the * scratch space on the remote site.
grid ftp serversthe list of GridFTPServer objects each containing information * about one grid ftp server.
job managersthe list of JobManager objects each containing information * about one jobmanager.
profilesthe list of Profile objects each containing one profile.
system infothe VDSSysInfo object containing the remote sites system * information.
* * * @author Gaurang Mehta gmehta@isi.edu * @author Karan Vahi vahi@isi.edu * * @version $Revision: 2572 $ * * @see GlobusVersion * @see GridFTPServer * @see GridFTPBandwidth * @see JobManager * @see LRC * @see Profile * @see SiteInfo * @see org.griphyn.common.classes.VDSSysInfo * @see WorkDir */ public class SiteInfo { /** * Array storing the names of the attributes that are stored with the * site. */ public static final String SITEINFO[] = { "grid-ftp-server", "jobmanager", "profile", "lrc", "workdir", "gridlaunch", "sysinfo", "handle"}; /** * The constant to be passed to the accessor functions to get or set the * list of GridFTP objects for the remote site. */ public static final int GRIDFTP = 0; /** * The constant to be passed to the accessor functions to get or set the * list of JobManager objects for the remote site. */ public static final int JOBMANAGER = 1; /** * The constant to be passed to the accessor functions to get or set the * list of Profile objects for the remote site. */ public static final int PROFILE = 2; /** * The constant to be passed to the accessor functions to get or set the list * of LRC objects for the remote site. */ public static final int LRC = 3; /** * The constant to be passed to the accessor functions to get or set the * List of WorkDir objects. */ public static final int WORKDIR = 4; /** * The constant to be passed to the accessor functions to get or set the * path to kickstart. */ public static final int GRIDLAUNCH = 5; /** * The constant to be passed to the accessor functions to get or set the * VDSSysInfo site. */ public static final int SYSINFO = 6; /** * The name of the remote site. This is acts as the key by which to query * a site catalog for information regarding a particular remote site. */ public static final int HANDLE = 7; /** * The path to the kickstart on the remote site. */ private String mGridLaunch ; /** * The list of LRC objects that contain the information about * the various LRCs associated with the remote site. */ private List mLRCList; /** * The list of Profile objects that contain the profile * information associated with the remote site. */ private List mProfileList ; /** * The list of GridFTPServer objects that contain the information * about the gridftp servers on the remote site. */ private List mGridFTPList ; /** * The list of JobManager objects that contain the information * about the jobmanagers associated with the remote site. */ private List mJobManagerList ; /** * Contains the information about the work directory on the remote site. */ private WorkDir mWorkDir; /** * The system information of the remote site. */ private VDSSysInfo mSysInfo; /** * The handle to the site, usually name of the site. */ private String mHandle; /** * Default Constructor. */ public SiteInfo() { mHandle = null; mLRCList = new ArrayList(3); mProfileList = new ArrayList(3); mGridFTPList = new ArrayList(3); mJobManagerList = new ArrayList(5); mSysInfo = new VDSSysInfo(); mWorkDir = new WorkDir(); } /** * Returns an Object containing the attribute value * corresponding to the key specified. * * @param key the key. * * @return Object corresponding to the key value. * @throws RuntimeException if illegal key defined. * * * @see #HANDLE * @see #GRIDFTP * @see #GRIDLAUNCH * @see #JOBMANAGER * @see #LRC * @see #PROFILE * @see #SYSINFO * @see #WORKDIR */ public Object getInfo(int key) { switch (key) { case 0: return mGridFTPList; case 1: return mJobManagerList; case 2: return mProfileList; case 3: return mLRCList; case 4: return mWorkDir; case 5: return mGridLaunch; case 6: return mSysInfo; case 7: return mHandle; default: throw new RuntimeException( " Wrong site key. Please use one of the predefined key types"); } } /** * A helper method that returns the execution mount point. * * @return the execution mount point, else * null if no mount point associated with the pool. */ public String getExecMountPoint(){ Object workdir = getInfo(this.WORKDIR); return (workdir == null)? null: ((WorkDir)workdir).getInfo(WorkDir.WORKDIR); } /** * A helper method that returns the path to gridlaunch on the site. * * @return the path to the kickstart. */ public String getKickstartPath(){ Object path = getInfo(this.GRIDLAUNCH); return (path == null)? null: ((String)path); } /** * A helper method that returns the url prefix for one of the gridftp server * associated with the pool. If more than one gridftp servers is associated * with the pool, then the function returns url prefix for the first * gridftp server in the list, unless the parameter random is set to true. * * @param random boolean denoting whether to select a random gridftp server. * * @return the url prefix for the grid ftp server, * else null if no gridftp server mentioned. */ public String getURLPrefix(boolean random){ String url = null; GridFTPServer server = selectGridFTP(random); url = server.getInfo(GridFTPServer.GRIDFTP_URL); //on the safe side should prune also.. return Utility.pruneURLPrefix(url); } /** * It returns all the jobmanagers corresponding to a specified pool. * * @return list of JobManager, each referring to * one jobmanager contact string. An empty list if no jobmanagers * found. */ public List getJobmanagers() { Object obj; return ((obj = getInfo(this.JOBMANAGER)) == null)? new java.util.ArrayList(0): (List)obj; } /** * It returns all the jobmanagers corresponding to a specified pool and * universe. * * @param universe the gvds universe with which it is associated. * * @return list of JobManager, each referring to * one jobmanager contact string. An empty list if no jobmanagers * found. */ public List getJobmanagers(String universe) { Object obj; return ((obj = getInfo(this.JOBMANAGER)) == null)? new java.util.ArrayList(0): this.getMatchingJMList((List)obj,universe); } /** * Sets an attribute associated with the remote site. It actually * adds to the list where there is a list maintained like for grid ftp servers, * jobmanagers, profiles, and LRCs. * * @param key the attribute key, which is one of the predefined keys. * @param object the object containing the attribute value. * * @throws RuntimeException if the object passed for the key is not of * valid type. * * @throws Exception if illegal key defined. * * * @see #HANDLE * @see #GRIDFTP * @see #GRIDLAUNCH * @see #JOBMANAGER * @see #LRC * @see #PROFILE * @see #SYSINFO * @see #WORKDIR */ public void setInfo(int key, Object object) throws RuntimeException { //to denote if object is of valid type or not. boolean valid = true; switch (key) { case GRIDFTP: if (object != null && object instanceof GridFTPServer) mGridFTPList.add(object); else valid = false; break; case JOBMANAGER: if (object != null && object instanceof JobManager) mJobManagerList.add(object); else valid = false; break; case PROFILE: if (object != null && object instanceof Profile) mProfileList.add(object); else valid = false; break; case LRC: if (object != null && object instanceof LRC) mLRCList.add(object); else valid = false; break; case WORKDIR: if(object != null && object instanceof WorkDir) mWorkDir = (WorkDir) object; else{ valid = false; mWorkDir = null; } break; case GRIDLAUNCH: if(object != null && object instanceof String) mGridLaunch = (String) object; else{ valid = false; mGridLaunch = null; } break; case SYSINFO: if(object != null && object instanceof String) mSysInfo = new VDSSysInfo((String) object); else if(object != null && object instanceof VDSSysInfo){ mSysInfo = (VDSSysInfo)object; } else{ valid = false; mSysInfo = null; } break; case HANDLE: if(object != null && object instanceof String) mHandle = (String) object; else{ valid = false; mHandle = null; } break; default: throw new RuntimeException( " Wrong site key. Please use one of the predefined key types"); } //if object is not null , and valid == false //throw exception if(!valid && object != null){ throw new RuntimeException("Invalid object passed for key " + SITEINFO[key]); } } /** * It removes a jobmanager from the pool. It calls the underlying equals * method of the associated jobmanager object to remove it. * * @param universe the gvds universe with which it is associated. * @param jobManagerContact the contact string to the jobmanager. * * @return true if was able to remove successfully * else false. */ public boolean removeJobmanager(String universe, String jobManagerContact) { if (mJobManagerList == null) { return false; } JobManager jm = new JobManager(); boolean val = false; try { jm.setInfo(JobManager.UNIVERSE, universe); jm.setInfo(JobManager.URL, jobManagerContact); } catch (Exception e) { //wonder why gaurang throws it LogManagerFactory.loadSingletonInstance(). log("Exception while removing jobmanager:" + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); return false; } synchronized(mJobManagerList){ val = mJobManagerList.remove(jm); } jm = null; return val; } /** * Removes a grid ftp server from the soft state associated with the pool. * * @param urlPrefix the urlprefix associated with the server. * * @return boolean */ public boolean removeGridFtp(String urlPrefix){ if(mGridFTPList == null) return false; GridFTPServer server = new GridFTPServer(); boolean val = false; try{ server.setInfo(GridFTPServer.GRIDFTP_URL, urlPrefix); } catch(Exception e){ //wonder why gaurang throws it LogManagerFactory.loadSingletonInstance().log( "Exception while removing jobmanager:" + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); return false; } synchronized(mGridFTPList){ val = mGridFTPList.remove(server); server = null; } return val; } /** * Returns a gridftp server from the list of gridftp servers associated with * the site. If more than one candidate GridFTPServer is found , then the * function returns the first matching GridFTPServer * unless parameter random is set to true. * * @param random boolean denoting whether to select a random gridftp server. * * @return the selected GridFTPServer corresponding to the * grid ftp server, * else null if list is null. * * @see org.griphyn.cPlanner.classes.GridFTPServer */ public GridFTPServer selectGridFTP(boolean random) { List l = (List) this.getInfo(SiteInfo.GRIDFTP); //sanity check if(l == null || l.isEmpty()) return null; int sel = (random == true)? PegRandom.getInteger(l.size() - 1): 0; return (GridFTPServer) (l.get(sel)); } /** * Returns an LRC from the list of LRCs associated with the site. * If more than one candidate LRC is found , then the function * the first matching LRCLRC corresponding to the selected LRC. * else null if list is null. * * @see org.griphyn.cPlanner.classes.LRC */ public LRC selectLRC(boolean random) { List l = (List) this.getInfo(SiteInfo.LRC); //sanity check if(l == null || l.isEmpty()) return null; int sel = (random == true)? PegRandom.getInteger(l.size() - 1): 0; return (LRC) (l.get(sel)); } /** * Returns a selected jobmanager corresponding to a particular VDS * universe. * If more than one candidate jobmanager is found , then the function * the first matching jobmanager unless parameter random is set to true. * * @param universe the VDS universe with which the jobmanager is associated. * @param random boolean denoting whether to select a random gridftp server. * * @return the selected jobmanager, * else null if list is null. * * @see org.griphyn.cPlanner.classes.JobManager */ public JobManager selectJobManager(String universe, boolean random) { List l = (List) this.getInfo(SiteInfo.JOBMANAGER); //sanity check if(l == null || l.isEmpty()) return null; //match on the universe l = this.getMatchingJMList(l,universe); //do a sanity check again if(l == null || l.isEmpty()) return null; int sel = (random == true)? PegRandom.getInteger(l.size() - 1): 0; return (JobManager) (l.get(sel)); } /** * Returns the textual description of the contents of SiteInfo * object in the multiline format. * * @return the textual description in multiline format. */ public String toMultiLine() { String output = "site " + mHandle + "{\n"; if(mSysInfo !=null) { output+="sysinfo \""+mSysInfo+"\"\n"; } if (mGridLaunch != null) { output += "gridlaunch \"" + mGridLaunch + "\"\n"; } if (mWorkDir != null) { output += mWorkDir.toMultiLine()+"\n"; } if (!mGridFTPList.isEmpty()) { for (Iterator i = mGridFTPList.iterator(); i.hasNext(); ) { output += ( (GridFTPServer) i.next()).toMultiLine() + "\n"; } } if (!mJobManagerList.isEmpty()) { for (Iterator i = mJobManagerList.iterator(); i.hasNext(); ) { output += ( (JobManager) i.next()).toMultiLine() + "\n"; } } if (!mLRCList.isEmpty()) { for (Iterator i = mLRCList.iterator(); i.hasNext(); ) { output += ( (LRC) i.next()).toMultiLine() + "\n"; } } if (!mProfileList.isEmpty()) { for (Iterator i = mProfileList.iterator(); i.hasNext(); ) { output += ( (Profile) i.next()).toMultiLine() + "\n"; } } output += "}\n"; // System.out.println(output); return output; } /** * Returns the textual description of the contents of SiteInfo * object. * * @return the textual description. */ public String toString() { String output = "{\n"; if(mSysInfo !=null) { output+="sysinfo \""+mSysInfo+"\"\n"; } if (mGridLaunch != null) { output += "gridlaunch \"" + mGridLaunch + "\"\n"; } if (mWorkDir != null) { output += mWorkDir.toString()+"\n"; } if (!mGridFTPList.isEmpty()) { for (Iterator i = mGridFTPList.iterator(); i.hasNext(); ) { output += ( (GridFTPServer) i.next()).toString() + "\n"; } } if (!mJobManagerList.isEmpty()) { for (Iterator i = mJobManagerList.iterator(); i.hasNext(); ) { output += ( (JobManager) i.next()).toString() + "\n"; } } if (!mLRCList.isEmpty()) { for (Iterator i = mLRCList.iterator(); i.hasNext(); ) { output += ( (LRC) i.next()).toString() + "\n"; } } if (!mProfileList.isEmpty()) { for (Iterator i = mProfileList.iterator(); i.hasNext(); ) { output += ( (Profile) i.next()).toString() + "\n"; } } output += "}\n"; // System.out.println(output); return output; } /** * Returns the XML description of the contents of SiteInfo * object. * * @return the xml description. */ public String toXML() { String output = ""; if (mGridLaunch != null) { output += " gridlaunch=\"" + mGridLaunch + "\""; } if(mSysInfo!=null) { output+=" sysinfo=\""+mSysInfo+"\""; } output += ">\n"; if (!mProfileList.isEmpty()) { for (Iterator i = mProfileList.iterator(); i.hasNext(); ) { output += " " + ( (Profile) i.next()).toXML() + "\n"; } } if (!mLRCList.isEmpty()) { for (Iterator i = mLRCList.iterator(); i.hasNext(); ) { output += " " + ( (LRC) i.next()).toXML() + "\n"; } } if (!mGridFTPList.isEmpty()) { for (Iterator i = mGridFTPList.iterator(); i.hasNext(); ) { output += " " + ( (GridFTPServer) i.next()).toXML() + "\n"; } } if (!mJobManagerList.isEmpty()) { for (Iterator i = mJobManagerList.iterator(); i.hasNext(); ) { output += " " + ( (JobManager) i.next()).toXML() + "\n"; } } if (mWorkDir != null) { output += " " + mWorkDir.toXML() + "\n"; } output += " \n"; return output; } /** * Returns a list containing only those jobmanager entries that match a * particular universe. * * @param superList the list containing all the entries of type * JobManager. * @param universe the universe against which you want to match the * entries. * * @return List which is a subset of the elements in the superList */ private List getMatchingJMList(List superList, String universe) { ArrayList subList = new ArrayList(0); for (Iterator i = superList.iterator(); i.hasNext(); ) { JobManager jbinfo = (JobManager) i.next(); if (jbinfo.getInfo(JobManager.UNIVERSE). equalsIgnoreCase(universe)) { subList.add(jbinfo); } } return subList; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/classes/WorkDir.java0000644000175000017500000001354711757531137031262 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old.classes; /** * This is a data class that is used to store information about the scratch * work directory or the execution mount point on the remote pool. *

* The various attributes that can be associated with the work directory * displayed in the following table. * *

* * * * * * * * * * * *
Attribute NameAttribute Description
paththe absolute path on the remote site to the work directory.
total sizethe total scratch space available under the work directory.
free sizethe free space available under the work directory.
* * @author Gaurang Mehta gmehta@isi.edu * @author Karan Vahi vahi@isi.edu * * @version $Revision: 2571 $ */ public class WorkDir { /** * Array storing the names of the attributes that are stored with the * work directory. */ public static final String[] WORKDIRINFO = {"path", "total-size", "free-size"}; /** * The constant to be passed to the accessor functions to get or set the * path to the work directory. */ public static final int WORKDIR = 0; /** * The constant to be passed to the accessor functions to get or set the * total space available. */ public static final int TOTAL_SIZE = 1; /** * The constant to be passed to the accessor functions to get or set the * free space available. */ public static final int FREE_SIZE = 2; /** * The path to the work directory. */ private String mWorkDir; /** * The total space available at the file system under this directory. */ private String mTotalSize; /** * The free space available at the file system under this directory. */ private String mFreeSize; /** * The default constructor. Sets all the variables to null. */ public WorkDir() { mWorkDir = null; mTotalSize = null; mFreeSize = null; } /** * Returns the attribute value of a particular attribute of the work * directory. * * @param key the key/attribute name. * * @return the attribute value * @throws RuntimeException if illegal key defined. */ public String getInfo( int key ) { switch ( key ) { case 0: return mWorkDir; case 1: return mTotalSize; case 2: return mFreeSize; default: throw new RuntimeException( "Illegal workdir key type=" + key + ". Use on of the predefined types" ); } } /** * Sets an attribute associated with the work directory. * * @param key the attribute key, which is one of the predefined keys. * @param value value of the attribute. * * @throws Exception if illegal key defined. */ public void setInfo( int key, String value ) throws RuntimeException { switch ( key ) { case 0: mWorkDir = value == null ? null : new String( value ); break; case 1: mTotalSize = value == null ? null : new String( value ); break; case 2: mFreeSize = value == null ? null : new String( value ); break; default: throw new RuntimeException( "Illegal workdir key type=" + key + ". Use on of the predefined types" ); } } /** * Returns the textual description of the contents of WorkDir * object in the multiline format. * * @return the textual description in multiline format. */ public String toMultiLine() { String output = "workdir \"" + mWorkDir + "\""; return output; } /** * Returns the textual description of the contents of WorkDir * object. * * @return the textual description. */ public String toString() { String output = "workdir \"" + mWorkDir + "\""; if ( mWorkDir != null ) { output += " " + WORKDIRINFO[ 0 ] + "=" + mWorkDir; } if(mTotalSize!=null){ output+=" " + WORKDIRINFO[1] + "=" + mTotalSize; } if(mFreeSize!=null){ output+=" " + WORKDIRINFO[2] + "=" + mFreeSize; } output+=" )"; // System.out.println(output); return output; } /** * Returns the XML description of the contents of WorkDir * object. * * @return the xml description. */ public String toXML() { String output = ""; if ( mWorkDir != null ) { output += "SiteInfo
object containing the information * about the site. */ public void add(String id, SiteInfo site){ mSiteCatalog.put(id,site); } /** * Adds all the sites in a controlled fashion, to the existing map containing * information about the sites. If an information about a site already * exists, it is overwritten. * * @param sites a map indexed by siteid. Each value is a SiteInfo object. */ public void add( PoolConfig sites){ add(sites,true); } /** * Adds all the sites in a controlled fashion, to the existing map containing * information about the sites. * * @param sites a map indexed by siteid. Each value is a SiteInfo object. * @param overwrite resolves intersections, in case of a site already exists. * If true, the orginal site information is overwritten with * the new one. If false original site information remains. * */ public void add( PoolConfig sites, boolean overwrite ){ String id; SiteInfo site; boolean contains = false; for(Iterator it = sites.getSites().entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); id = (String)entry.getKey(); site = (SiteInfo)entry.getValue(); contains = contains(id); if( overwrite || !contains) { add(id,site); } } } /** * Returns a boolean indicating if an entry for a Site with a particular id * exists or not. * * @param id the id of the site, usually the name of the site. * * @return true if entry for the site exists, else false. */ public boolean contains(String id){ return mSiteCatalog.containsKey(id); } /** * Retrives the information about a site. * * @param siteID the id of the site, usually the name of the site. * @return SiteInfo containing the site layout, * else null in case of site not existing. */ public SiteInfo get(String siteID){ if(mSiteCatalog.containsKey(siteID)) { return ( SiteInfo ) mSiteCatalog.get( siteID ); } else { mLogger.log("Site '" + siteID + "' does not exist in the Site Catalog.", LogManager.ERROR_MESSAGE_LEVEL); return null; } } /** * Returns information about all the sites. * * @return a Map indexed by the site id (name of the site). Each value is a * SiteInfo object. */ public Map getSites(){ return mSiteCatalog; } /** * Returns the textual description of the contents of PoolConfig * object in the multiline format. * * @return the textual description in multiline format. */ public String toMultiLine() { String output=""; for (Iterator i = mSiteCatalog.keySet().iterator();i.hasNext();){ String poolid = (String)i.next(); //Karan Oct 13,2005 //This is moved to SiteInfo.toMultiLine() //output += "pool " + poolid + output += ((SiteInfo)mSiteCatalog.get(poolid)).toMultiLine()+"\n"; } return output; } /** * Returns the XML description of the contents of PoolConfig * object. * * @return the xml description. */ public String toXML(){ String output=""; for (Iterator i = mSiteCatalog.keySet().iterator();i.hasNext();){ String poolid=(String)i.next(); output+=" Profile
objects * null if the information about the site is not with the pool provider. * * @see org.griphyn.cPlanner.classes.Profile */ public abstract List getPoolProfile( String siteID ); /** * It returns all the jobmanagers corresponding to a specified site. * * @param siteID the name of the site at which the jobmanager runs. * * @return list of JobManager, each referring to * one jobmanager contact string. An empty list if no jobmanagers * found. */ public abstract List getJobmanagers( String siteID ); /** * It returns all the jobmanagers corresponding to a specified pool and * universe. * * @param siteID the name of the site at which the jobmanager runs. * @param universe the gvds universe with which it is associated. * * @return list of JobManager, each referring to * one jobmanager contact string. An empty list if no jobmanagers * found. */ public abstract List getJobmanagers( String siteID, String universe ); /** * It returns all the gridftp servers corresponding to a specified pool. * * @param siteID the name of the site at which the jobmanager runs. * * @return List of GridFTPServer, each referring to one * GridFtp Server. */ public abstract List getGridFTPServers( String siteID ); /** * It returns all the pools available in the site catalog * * @return List of names of the pools available as String */ public abstract List getPools(); /** * This is a soft state remove, that removes a jobmanager from a particular * pool entry. The cause of this removal could be the inability to * authenticate against it at runtime. The successful removal lead Pegasus * not to schedule job on that particular jobmanager. * * @param siteID the name of the site at which the jobmanager runs. * @param universe the gvds universe with which it is associated. * @param jobManagerContact the contact string to the jobmanager. * * @return true if was able to remove the jobmanager from the cache * false if unable to remove, or the matching entry is not found * or if the implementing class does not maintain a soft state. */ public abstract boolean removeJobManager( String siteID, String universe, String jobManagerContact ) ; /** * This is a soft state remove, that removes a gridftp server from a particular * pool entry. The cause of this removal could be the inability to * authenticate against it at runtime. The successful removal lead Pegasus * not to schedule any transfers on that particular gridftp server. * * @param siteID the name of the site at which the gridftp runs. * @param urlPrefix the url prefix containing the protocol,hostname and port. * * @return true if was able to remove the gridftp from the cache * false if unable to remove, or the matching entry is not found * or if the implementing class does not maintain a soft state. * or the information about site is not in the site catalog. */ public abstract boolean removeGridFtp( String siteID, String urlPrefix ); /** * Returns a textual description of the pool mode being used. * @return String */ public abstract String getPoolMode(); /** * Return a random lrc url from the list of lrc url's. * @param lrcs Arraylist of LRC objects. * * @return String Returns one of lrc url's * @see org.griphyn.cPlanner.classes.LRC */ public String selectLRC( ArrayList lrcs ) { String lrcurl = null; if ( lrcs.size() == 1 ) { lrcurl = ( ( LRC ) ( lrcs.get( 0 ) ) ).getURL(); } else { lrcurl = ( ( LRC ) ( lrcs.get( ( int ) Math.random() * lrcs.size() ) ) ). getURL(); } return lrcurl; } /** * Returns the path to the execution mount point (The Workdir). * * @param workdir the WorkDir object containing the workdir * information. * * @return String The exec-mount point (aka workdir) * * @throws Exception */ public String selectWorkdir( WorkDir workdir ) throws Exception { return workdir.getInfo( WorkDir.WORKDIR ); } /** * Return a random gridftp url from the list of gridftp url's. * * @param ftp Takes an ArrayList of GridFTPServer Objects. * * @return String Returns a single gridftp url from among many * * @see org.griphyn.cPlanner.classes.GridFTPServer */ public GridFTPServer selectGridFtp( ArrayList ftp ) { int sel = PegRandom.getInteger( ftp.size() - 1 ); return ( GridFTPServer ) ( ftp.get( sel ) ); } /** * Returns the value of VDS_HOME for a site. * * @param siteID the name of the site. * @return value if set else null. */ public String getVDS_HOME( String siteID ){ return this.getEnvironmentVariable( siteID, VDS_HOME ); } /** * Returns the value of PEGASUS_HOME for a site. * * @param siteID the name of the site. * @return value if set else null. */ public String getPegasusHome( String siteID ){ return this.getEnvironmentVariable( siteID, PEGASUS_HOME ); } /** * Returns an environment variable for a particular site set in the * Site Catalog. * * @param siteID the name of the site. * @param envVariable the environment variable whose value is required. * * @return value of the environment variable if found, else null */ public String getEnvironmentVariable( String siteID, String envVariable ){ String result = null; //get all environment variables List envs = this.getPoolProfile( siteID, Profile.ENV ); if ( envs == null ) { return result; } //traverse through all the environment variables for( Iterator it = envs.iterator(); it.hasNext(); ){ Profile p = ( Profile ) it.next(); if( p.getProfileKey().equals( envVariable ) ){ result = p.getProfileValue(); break; } } return result; } /** * It returns profile information associated with a particular namespace and * pool. * * @param siteID the name of the site, whose profile information you want. * @param namespace the namespace correspoinding to which the profile * information of a particular site is desired. * * @return List of Profile objects * NULL when the information about the site is not there or no * profile information associated with the site. * * @see org.griphyn.cPlanner.classes.Profile */ public List getPoolProfile( String siteID, String namespace ) { logMessage("List getPoolProfile(String siteID, String namespace"); logMessage("\tList getPoolProfile(" + siteID + "," + namespace +")"); List profileList = null; ArrayList namespList = null; //sanity checks if ( siteID == null || namespace == null || namespace.length() < 2 ) { return null; } //check if the namespace asked for //is a valid namespace or not if ( !Namespace.isNamespaceValid( namespace ) ) { mLogger.log( "Namespace " + namespace + " not suppored. Ignoring", LogManager.WARNING_MESSAGE_LEVEL); return null; } //get information about all the profiles profileList = this.getPoolProfile( siteID ); if ( profileList == null ) { return profileList; } //iterate through the list and add to the namespace list Iterator it = profileList.iterator(); namespList = new ArrayList( 3 ); Profile poolPf = null; while ( it.hasNext() ) { poolPf = ( Profile ) it.next(); if ( poolPf.getProfileNamespace().equalsIgnoreCase( namespace ) ) { namespList.add( poolPf ); } } if ( namespList.isEmpty() ) { namespList = null; } return namespList; } /** * This determines the working directory on remote execution pool on the * basis of whether an absolute path is specified in the pegasus.dir.exec directory * or a relative path. * * @param executionPool the pool where a job has to be executed. * * @return the path to the pool work dir. * @throws RuntimeException in case of site not found in the site catalog. */ public String getExecPoolWorkDir( String executionPool ) { return this.getExecPoolWorkDir( executionPool, null, -1 ); } /** * This determines the working directory on remote execution pool for a * particular job. The job should have it's execution pool set. * * @param job Job object for the job. * * @return the path to the pool work dir. * @throws RuntimeException in case of site not found in the site catalog. */ public String getExecPoolWorkDir( Job job ) { return this.getExecPoolWorkDir( job.executionPool, job.vdsNS.getStringValue( Pegasus.REMOTE_INITIALDIR_KEY ), job.jobClass ); } /** * This determines the working directory on remote execution pool on the * basis of whether an absolute path is specified in the pegasus.dir.exec * directory or a relative path. * * @param siteID the name of the site where a job has to be executed. * @param path the relative path that needs to be appended to the * workdir from the execution pool. * * @return the path to the pool work dir. * @throws RuntimeException in case of site not found in the site catalog. */ public String getExecPoolWorkDir( String siteID, String path ) { logMessage("String getExecPoolWorkDir(String siteID, String path)"); logMessage("\t String getExecPoolWorkDir(" + siteID + "," + path + ")"); return this.getExecPoolWorkDir( siteID, path, -1 ); } /** * This determines the working directory on remote execution pool on the * basis of whether an absolute path is specified in the pegasus.dir.exec directory * or a relative path. If the job class happens to be a create directory job * it does not append the name of the random directory since the job is * trying to create that random directory. * * @param siteID the name of the site where the job has to be executed. * @param path the relative path that needs to be appended to the * workdir from the execution pool. * @param jobClass the class of the job. * * @return the path to the pool work dir. * @throws RuntimeException in case of site not found in the site catalog. */ public String getExecPoolWorkDir( String siteID, String path,int jobClass ) { SiteInfo execPool = this.getPoolEntry( siteID, "vanilla" ); if(execPool == null){ throw new RuntimeException("Entry for " + siteID + " does not exist in the Site Catalog"); } String execPoolDir = mWorkDir; if(jobClass == Job.CREATE_DIR_JOB){ //the create dir jobs always run in the //workdir specified in the site catalog return execPool.getExecMountPoint(); } if ( mWorkDir.length() == 0 || mWorkDir.charAt( 0 ) != '/' ) { //means you have to append the //value specfied by pegasus.dir.exec File f = new File( execPool.getExecMountPoint(), mWorkDir ); execPoolDir = f.getAbsolutePath(); } //get the random directory name String randDir = mUserOpts.getRandomDirName(); if ( randDir != null) { //append the random dir name to the //work dir constructed till now File f = new File( execPoolDir, randDir ); execPoolDir = f.getAbsolutePath(); } //path takes precedence over random dir if ( path != null ) { //well i can do nesting conditional return but wont return ( path.length() == 0 || path.charAt( 0 ) != '/' ) ? //append the path new File( execPoolDir, path ).getAbsolutePath() : //else absolute path specified path; } return execPoolDir; } /** * Returns the url prefix of a gridftp server on the pool. * gsiftp://dataserver.phys.uwm.edu/~/griphyn_test/ligodemo_output/ * gives a URL prefix of gsiftp://dataserver.phys.uwm.edu * * @param poolName the name of the pool. * * @return String corresponding to the url prefix if the pool is found. * null if pool entry is not found. */ public String getURLPrefix( String poolName ) { SiteInfo pool = getPoolEntry( poolName, "vanilla" ); String urlPrefix = pool.getURLPrefix( true ); if ( urlPrefix == null || urlPrefix.trim().length() == 0 ) { throw new RuntimeException( " URL prefix not specified for site " + poolName ); } return urlPrefix; } /** * Return the storage mount point for a particular pool. * * @param site SiteInfo object of the site for which you want the * storage-mount-point. * * @return String corresponding to the mount point if the pool is found. * null if pool entry is not found. */ public String getSeMountPoint( SiteInfo site ) { logMessage("String getSeMountPoint(SiteInfo site)"); String mount_point = mStorageDir; GridFTPServer server = null; if ( mStorageDir.length() == 0 || mStorageDir.charAt( 0 ) != '/' ) { server = site.selectGridFTP( false ); mount_point = server.getInfo( GridFTPServer.STORAGE_DIR ); //removing the trailing slash if there int length = mount_point.length(); if ( length > 1 && mount_point.charAt( length - 1 ) == '/' ) { mount_point = mount_point.substring( 0, length - 1 ); } //append the Storage Dir File f = new File( mount_point, mStorageDir ); mount_point = f.getAbsolutePath(); } //check if we need to replicate the submit directory //structure on the storage directory if( mDeepStorageStructure ){ String leaf = ( this.mUserOpts.getOptions().partOfDeferredRun() )? //if a deferred run then pick up the relative random directory //this.mUserOpts.getOptions().getRandomDir(): this.mUserOpts.getOptions().getRelativeDirectory(): //for a normal run add the relative submit directory this.mUserOpts.getOptions().getRelativeDirectory(); File f = new File( mount_point, leaf ); mount_point = f.getAbsolutePath(); } return mount_point; } /** * Gets the pool object to be used for the transfer universe. If we * do not get that then defaults back to globus universe for the same pool. * * @param poolName the name of the pool * @return Pool */ public SiteInfo getTXPoolEntry( String poolName ) { SiteInfo p = this.getPoolEntry( poolName, Engine.TRANSFER_UNIVERSE ); return p; } /** * Logs the message to a logging stream. Currently does not log to any stream. * * @param msg the message to be logged. */ protected void logMessage(String msg){ //mLogger.logMessage("[Shishir] Site Catalog : " + msg); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/MdsQuery.java0000644000175000017500000010560511757531137030012 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old; import java.util.ArrayList; import java.util.Enumeration; import java.util.Hashtable; import java.util.Iterator; import java.util.StringTokenizer; import javax.naming.Context; import javax.naming.NamingEnumeration; import javax.naming.NamingException; import javax.naming.directory.Attribute; import javax.naming.directory.Attributes; import javax.naming.directory.SearchResult; import javax.naming.ldap.InitialLdapContext; import javax.naming.ldap.LdapContext; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.PoolConfig; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPServer; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPBandwidth; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.JobManager; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.LRC; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.WorkDir; /** * This Class queries the GT2 based Monitoring and Discovery Service (MDS) * and stores the remote sites information into a single data class. * * @author Gaurang Mehta gmehta@isi.edu * @author Karan Vahi vahi@isi.edu * * @version $Revision: 2572 $ */ public class MdsQuery { private String mdshost; //holds the hostname for MDS private int mdsport; //holds the port number for MDS private String mdsbaseDN; //holds the baseDN for the GRIS/GIIS private String DEFAULT_CTX = "com.sun.jndi.ldap.LdapCtxFactory"; private Hashtable env; //Hashtable holding connection setting to the MDS // private GvdsPoolConfig poolconfig = new GvdsPoolConfig(); private static final int Gvds_Pool_Id = 0; private static final int Gvds_Pool_Universe = 1; private static final int Gvds_Pool_WorkDir = 2; private static final int Gvds_Pool_Lrc = 3; private static final int Gvds_Pool_Gridlaunch = 4; private static final int Gvds_Pool_Storage = 5; private static final int Gvds_Pool_Profile = 6; private static final int Mds_Computer_Total_Free_NodeCount = 7; private static final int Mds_Computer_Total_NodeCount = 8; private static final int Mds_Gram_Job_Queue_MaxCount = 9; private static final int Mds_Gram_Job_Queue_MaxCpuTime = 10; private static final int Mds_Gram_Job_Queue_MaxRunningJobs = 11; private static final int Mds_Gram_Job_Queue_MaxJobsInQueue = 12; private static final int Mds_Memory_Ram_Total_SizeMB = 13; private static final int Mds_Memory_Ram_FreeSizeMB = 14; private static final int Mds_Service_Gram_SchedulerType = 15; private static final int Mds_Computer_Isa = 16; private static final int Mds_Os_Name = 17; private static final int Mds_Subnetid = 18; /* * TODO:sk to add constants which represent the attributes in the * MDS objectclass=gridftp-pair-bandwidth-info */ private static final int Host_Subnet_Id = 19; private static final int Dest_Subnet_Id = 20; private static final int Avg_Bandwidth_range1 = 21; private static final int Avg_Bandwidth_range2 = 22; private static final int Avg_Bandwidth_range3 = 23; private static final int Avg_Bandwidth_range4 = 24; private static final int Avg_Bandwidth = 25; private static final int Max_Bandwidth = 26; private static final int Min_Bandwidth = 27; private ArrayList m_identifiers = new ArrayList(28); // private SearchControls constraints; /** * C'tor for the class. */ public MdsQuery() { } /** * Valid C'tor for the class to create a MdsQuery object. * Sets the SECURITY_ATHENTICATION with simple authentication. * Sets the PROVIDER_URL to the MDS host and port. * Sets the INTITIAL CONTEXT FACTORY. * * @param host the hostname of the machine on which a GRIS or GIIS is running. * @param port the Port number on which a GRIS or GIIS is running. */ public MdsQuery(String host, int port) { mdshost = host; mdsport = port; mdsbaseDN = new String(); env = new Hashtable(); env.put(Context.PROVIDER_URL, "ldap://" + mdshost + ":" + mdsport); env.put(Context.INITIAL_CONTEXT_FACTORY, DEFAULT_CTX); env.put(Context.SECURITY_AUTHENTICATION, "simple"); m_identifiers.add("Gvds-Pool-Id"); m_identifiers.add("Gvds-Pool-Universe"); m_identifiers.add("Gvds-Pool-WorkDir"); m_identifiers.add("Gvds-Pool-Lrc"); m_identifiers.add("Gvds-Pool-GridLaunch"); m_identifiers.add("Gvds-Pool-Storage"); m_identifiers.add("Gvds-Pool-Profile"); m_identifiers.add("Mds-Computer-Total-Free-nodeCount"); m_identifiers.add("Mds-Computer-Total-nodeCount"); m_identifiers.add("Mds-Gram-Job-Queue-maxcount"); m_identifiers.add("Mds-Gram-Job-Queue-maxcputime"); m_identifiers.add("Mds-Gram-Job-Queue-maxrunningjobs"); m_identifiers.add("Mds-Gram-Job-Queue-maxjobsinqueue"); m_identifiers.add("Mds-Memory-Ram-Total-sizeMB"); m_identifiers.add("Mds-Memory-Ram-sizeMB"); m_identifiers.add("Mds-Service-Gram-schedulertype"); m_identifiers.add("Mds-Computer-isa"); m_identifiers.add("Mds-Os-name"); m_identifiers.add("Mds-Net-netaddr"); /** * sk added the attributes here as well in the same order as they are defined * before as this is like a hash */ m_identifiers.add("Host-Subnet-Id"); m_identifiers.add("Dest-Subnet-Id"); m_identifiers.add("Avg-Bandwidth-range1"); m_identifiers.add("Avg-Bandwidth-range2"); m_identifiers.add("Avg-Bandwidth-range3"); m_identifiers.add("Avg-Bandwidth-range4"); m_identifiers.add("Avg-Bandwidth"); m_identifiers.add("Max-Bandwidth"); m_identifiers.add("Min-Bandwidth"); // constraints = new SearchControls(); // constraints.setSearchScope(SearchControls.SUBTREE_SCOPE); } /** * Enables a user to set new or overide existing connection settings to the MDS. * * @param index Holds the index string for the connection environment. * @param value Holds the value corresponding to the index fro the connnection environment. */ public void setLdapConnectionEnv(String index, String value) { env.put(index, value); } public void setLdapSearchConstraints() { } /** * Create and return a MDS LdapContext. * * @return LdapContext */ public LdapContext connectMds() throws NamingException { LdapContext mdsctx = new InitialLdapContext(env, null); return mdsctx; } /** * Still Thinking how to eable this option. */ public NamingEnumeration searchMDS(LdapContext mdsctx, String baseDN, String filter) { NamingEnumeration results = null; return results; } public PoolConfig StoreGvdsMdsInfo(NamingEnumeration results, String baseDN, PoolConfig poolconfig) throws NamingException, Exception { SiteInfo poolinfo = null; String jburl = null; mdsbaseDN = baseDN; if (results == null) { return null; } String dn; String attribute; Attributes attrs; Attribute at; SearchResult si; while (results.hasMoreElements()) { si = (SearchResult) results.next(); attrs = si.getAttributes(); if (si.getName().trim().length() == 0) { dn = baseDN; } else { dn = si.getName() + ", " + baseDN; } // System.out.println("dn: " + dn); if (dn.startsWith("Gvds-Vo-name") || dn.startsWith("Gvds-Software-deployment")) { poolinfo = new SiteInfo(); for (NamingEnumeration ae = attrs.getAll(); ae.hasMoreElements(); ) { at = (Attribute) ae.next(); attribute = at.getID(); Enumeration vals = at.getAll(); while (vals.hasMoreElements()) { int switchkey = m_identifiers.indexOf(attribute); switch (switchkey) { //Setup pool id case 0: String poolHandle = new String( (String) vals. nextElement()); if (poolconfig.getSites().containsKey( poolHandle)) { java.util.Date date = new java.util.Date(); poolconfig.add(poolHandle + "-" + date.getTime(), poolinfo); } else { poolconfig.add(poolHandle, poolinfo); } // poolconfig.setPoolConfig((String) vals.nextElement(),poolinfo); break; //Setup pool universe's info case 1: StringTokenizer st = new StringTokenizer( ( String) vals.nextElement(), "@"); String universe = st.nextToken(); String url = st.nextToken(); String globus_version = st.nextToken(); JobManager jobmanagerinfo = new JobManager(); //setting the universe,globus version and the url mappings jobmanagerinfo.setInfo( JobManager.URL, url); jobmanagerinfo.setInfo( JobManager.UNIVERSE, universe); jobmanagerinfo.setInfo( JobManager.GLOBUS_VERSION, globus_version); poolinfo.setInfo(SiteInfo.JOBMANAGER, jobmanagerinfo); break; //Setup the pool workdir info case 2: WorkDir workdir = new WorkDir(); st = new StringTokenizer( (String) vals. nextElement(), "@"); String path = null; String totalsize = null; String freesize = null; if (st.countTokens() == 1) { path = st.nextToken(); } else { path = st.nextToken(); totalsize = st.nextToken(); freesize = st.nextToken(); } workdir.setInfo(WorkDir. WORKDIR, path); workdir.setInfo(WorkDir. TOTAL_SIZE, totalsize); workdir.setInfo(WorkDir. FREE_SIZE, freesize); poolinfo.setInfo(SiteInfo.WORKDIR, workdir); break; //Setup the pool LRC info case 3: LRC lrc = new LRC( (String) vals.nextElement()); poolinfo.setInfo(SiteInfo.LRC, lrc); break; //Setup the pool GridLaunch Info case 4: poolinfo.setInfo(SiteInfo.GRIDLAUNCH, (String) vals.nextElement()); break; //Setup the pool Storage info case 5: GridFTPServer gftp = new GridFTPServer(); st = new StringTokenizer( (String) vals. nextElement(), "@"); String gftp_url = null; String gftp_globus_version = null; String storage_totalsize = null; String storage_freesize = null; if (st.countTokens() == 2) { gftp_url = st.nextToken(); gftp_globus_version = st.nextToken(); } else { gftp_url = st.nextToken(); gftp_globus_version = st.nextToken(); storage_totalsize = st.nextToken(); storage_freesize = st.nextToken(); } StringTokenizer stt = new StringTokenizer( gftp_url, "/"); String gridftpurl = stt.nextToken() + "//" + stt.nextToken(); String storagedir = ""; while (stt.hasMoreTokens()) { storagedir += "/" + stt.nextToken(); } gftp.setInfo(GridFTPServer.GRIDFTP_URL, gridftpurl); gftp.setInfo(GridFTPServer.STORAGE_DIR, storagedir); gftp.setInfo(GridFTPServer.TOTAL_SIZE, storage_totalsize); gftp.setInfo(GridFTPServer.FREE_SIZE, storage_freesize); gftp.setInfo(GridFTPServer.GLOBUS_VERSION, gftp_globus_version); poolinfo.setInfo(SiteInfo.GRIDFTP, gftp); break; //Setup the pool Profile Info case 6: st = new StringTokenizer( (String) vals. nextElement(), "@"); String namespace = st.nextToken(); String key = st.nextToken(); String value = st.nextToken(); Profile profile = new Profile( namespace, key, value); poolinfo.setInfo(SiteInfo.PROFILE, profile); break; default: vals.nextElement(); } } } } else if (dn.startsWith("Mds-Job-Queue-name") || dn.startsWith("Mds-Software-deployment=jobmanager")) { StringTokenizer dnst = new StringTokenizer(dn, ","); if (dn.startsWith("Mds-Job-Queue-name")) { dnst.nextToken(); } String jbmanager = dnst.nextToken(); String jbhost = dnst.nextToken(); jburl = jbhost.substring(jbhost.indexOf("=") + 1) + "/" + jbmanager.substring(jbmanager.indexOf("=") + 1); ArrayList jobmanagers = null; JobManager jobmanager = null; for (Iterator i = poolconfig.getSites().values().iterator(); i.hasNext(); ) { poolinfo = (SiteInfo) i.next(); if ( (jobmanagers = (ArrayList) poolinfo.getInfo( SiteInfo.JOBMANAGER)) != null) { if (!jobmanagers.isEmpty()) { for (Iterator j = jobmanagers.iterator(); j.hasNext(); ) { jobmanager = (JobManager) j.next(); if (jobmanager.getInfo( JobManager.URL). equalsIgnoreCase(jburl)) { for (NamingEnumeration ae = attrs.getAll(); ae.hasMoreElements(); ) { at = (Attribute) ae.next(); attribute = at.getID(); Enumeration vals = at.getAll(); while (vals.hasMoreElements()) { int switchkey = m_identifiers. indexOf(attribute); switch (switchkey) { //Setup other jobmanager Related Information. case 7: jobmanager. setInfo( JobManager.IDLE_NODES, (String) vals. nextElement()); break; case 8: jobmanager. setInfo( JobManager.TOTAL_NODES, (String) vals. nextElement()); break; case 9: jobmanager. setInfo( JobManager.MAX_COUNT, (String) vals. nextElement()); break; case 10: jobmanager. setInfo( JobManager.MAX_CPU_TIME, (String) vals. nextElement()); break; case 11: jobmanager. setInfo( JobManager.RUNNING_JOBS, (String) vals. nextElement()); break; case 12: jobmanager. setInfo( JobManager. JOBS_IN_QUEUE, (String) vals. nextElement()); break; case 13: jobmanager. setInfo( JobManager.TOTAL_MEM, (String) vals. nextElement()); break; case 14: jobmanager. setInfo( JobManager.FREE_MEM, (String) vals. nextElement()); break; case 15: jobmanager. setInfo( JobManager. JOBMANAGER_TYPE, (String) vals. nextElement()); break; case 16: jobmanager. setInfo( JobManager.ARCH_TYPE, (String) vals. nextElement()); break; case 17: jobmanager. setInfo( JobManager.OS_TYPE, (String) vals. nextElement()); break; default: vals.nextElement(); } } } } } //for loop } } } } /* * sk added a case where the dn starts with 'Dest-Subnet-Id' * to gather destination bandwidth information for each gridftp server */ else if (dn.startsWith("Dest-Subnet-Id")) { GridFTPBandwidth gridftp_bandwidth = new GridFTPBandwidth(); String dest_subnet_id = null; boolean flag = false; //flag to check if any elements occur StringTokenizer dnst = new StringTokenizer(dn, ","); if (dn.startsWith("Dest-Subnet-Id")) { dnst.nextToken(); } String gridhost = dnst.nextToken(); String hosturl = gridhost.substring(gridhost.indexOf("=") + 1); ArrayList gridftpservers = null; GridFTPServer gridftpserver = null; for (Iterator i = poolconfig.getSites().values().iterator(); i.hasNext(); ) { poolinfo = (SiteInfo) i.next(); if ( (gridftpservers = (ArrayList) poolinfo.getInfo( SiteInfo.GRIDFTP)) != null) { if (!gridftpservers.isEmpty()) { for (Iterator j = gridftpservers.iterator(); j.hasNext(); ) { gridftpserver = (GridFTPServer) j.next(); /** * calculate the gridftpserver url in the form smarty.isi.edu */ String url = gridftpserver.getInfo(0); String halfurl = url.substring(url.indexOf("/") + 2); String finalurl = halfurl.substring(0, halfurl.indexOf("/")); //System.out.println("In url="+hosturl +" grid url="+finalurl); //if (finalurl.equalsIgnoreCase(hosturl)) //means that the particular gridftpserver object has been found among the elements of the arraylist maintained in //the poolinfo class { flag = true; //System.out.println("Url has matched "+hosturl); for (NamingEnumeration ae = attrs.getAll(); ae.hasMoreElements(); ) { //ae iterates over the attributes at = (Attribute) ae.next(); //get each attribute attribute = at.getID(); Enumeration vals = at.getAll(); //get all the values of that attribute ! while (vals.hasMoreElements()) { //form a GridFTPBandwidth object and then // call gridftpserver.setGridFtpBandwidthInfo(dest_subnet_id, object); boolean intflag = false; int switchkey = m_identifiers. indexOf(attribute); switch (switchkey) { /** * populate the gridftp_bandwidth object with the attributes * and then store this object in the hashmap maintained in the gridftpserver object */ //Host-Subnet-Id case 19: //neednt store this information vals.nextElement(); break; //Dest-Subnet-Id case 20: dest_subnet_id = (String) vals.nextElement(); gridftp_bandwidth. setInfo( GridFTPBandwidth. DEST_ID, dest_subnet_id); break; //Avg-Bandwidth-range1 case 21: gridftp_bandwidth. setInfo( GridFTPBandwidth. AVG_BW_RANGE1, (String) vals. nextElement()); break; //Avg-Bandwidth-range2 case 22: gridftp_bandwidth. setInfo( GridFTPBandwidth. AVG_BW_RANGE2, (String) vals. nextElement()); break; //Avg-Bandwidth-range3 case 23: gridftp_bandwidth. setInfo( GridFTPBandwidth. AVG_BW_RANGE3, (String) vals. nextElement()); break; //Avg-Bandwidth-range4 case 24: gridftp_bandwidth. setInfo( GridFTPBandwidth. AVG_BW_RANGE4, (String) vals. nextElement()); break; //Avg-Bandwidth case 25: gridftp_bandwidth. setInfo( GridFTPBandwidth. AVG_BW, (String) vals.nextElement()); break; //Max-Bandwidth case 26: gridftp_bandwidth. setInfo( GridFTPBandwidth. MAX_BW, (String) vals.nextElement()); break; //Min-Bandwidth case 27: gridftp_bandwidth. setInfo( GridFTPBandwidth. MIN_BW, (String) vals.nextElement()); break; default: intflag = true; break; } if (intflag) { break; } } //end of While //now add the gridftp_bandwidth object in the hash maintained in the GvdsPoolGridFtp object /* if(flag) { gridftpserver.setInfo(dest_subnet_id,gridftp_bandwidth); }*/ } } //end of if matching the appropriate gridftpserver } //for loop } } } if (flag) { gridftpserver.setGridFTPBandwidthInfo(gridftp_bandwidth); //set the gridftp_bandwidth object in GvdsPoolGridFtp class } } } return poolconfig; } /** * Displays the result on stdout instead of putting it in data classes. * * @param results Takes a NamingEnumeration returned by the MDS search * @param baseDN Takes the baseDN provided to the MDS search. * */ public void displayResults(NamingEnumeration results, String baseDN) throws NamingException { mdsbaseDN = baseDN; if (results == null) { return; } String dn; String attribute; Attributes attrs; Attribute at; SearchResult si; while (results.hasMoreElements()) { si = (SearchResult) results.next(); attrs = si.getAttributes(); if (si.getName().trim().length() == 0) { dn = baseDN; } else { dn = si.getName() + ", " + baseDN; } System.out.println("dn: " + dn); for (NamingEnumeration ae = attrs.getAll(); ae.hasMoreElements(); ) { at = (Attribute) ae.next(); attribute = at.getID(); Enumeration vals = at.getAll(); while (vals.hasMoreElements()) { System.out.println(attribute + ": " + vals.nextElement()); } } System.out.println(); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/PoolMode.java0000644000175000017500000002602511757531137027755 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.common.util.FactoryException; import java.lang.reflect.Method; /** * This class determines at runtime which * implementing class to use as a Pool Handle. * It uses the reflection package of java * to dynamically load a class. The class * to be loaded is specified by the * vds.pool.mode property. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2585 $ */ public class PoolMode { /** * Constants to specify how to load the * class, as singleton or non singleton. */ public static final int SINGLETON_LOAD = 0; public static final int NON_SINGLETON_LOAD = 1; /** * Constants for single read. * */ public static final String SINGLE_READ = "single"; // public static final int SINGLE_READ_VALUE = 0; // public static final String SINGLE_READ_CLASS = "SingleReadPool"; /** * Constants for multiple read. */ public static final String MULTIPLE_READ = "multiple"; // public static final int MULTIPLE_READ_VALUE = 1; // public static final String MULTIPLE_READ_CLASS = "XML"; /** * Constants for xml pool read. */ // public static final String XML_READ = "xml"; public static final int XML_READ_VALUE = 2; public static final String XML_READ_CLASS = "XML"; /** * Constants for multiline text pool read. */ // public static final String TEXT_READ = "text"; public static final int TEXT_READ_VALUE = 3; public static final String TEXT_READ_CLASS = "Text"; /** * Constants for mode not defined. */ public static final int UNDEFINED_READ_VALUE = -1; public static final String PACKAGE_NAME = "edu.isi.pegasus.planner.catalog.site.impl.oldimpl."; private static LogManager mLogger = LogManagerFactory.loadSingletonInstance(); //add your constants here. /** * Given a string readMode returns the * name of the class that implements * that read mode. If the readMode * doesnt equal any of the predefined * constants then the value of readMode * is taken as the name of the implementing class. * * @param readMode The String form of the * read mode, got from the property * vds.pool.mode. * * @return the corresponding int value * of the mode. If not found * then null * */ public static String getImplementingClass( String readMode ) { if ( readMode.trim().equalsIgnoreCase( SINGLE_READ ) || readMode.trim().equalsIgnoreCase( MULTIPLE_READ )) { throw new RuntimeException("The pool mode " + readMode + " is no " + "longer supported. Please use the " + XML_READ_CLASS + " mode or the "+TEXT_READ_CLASS+" mode."); } else if ( readMode.trim().equalsIgnoreCase( XML_READ_CLASS ) ) { return XML_READ_CLASS; } else if ( readMode.trim().equalsIgnoreCase(TEXT_READ_CLASS)){ return TEXT_READ_CLASS; } else { //no match to any predefined constant //assume that the value of readMode is the //name of the implementing class return readMode; } } /** * Loads the pool info provider class using the reflection package * in java at runtime. The class is loaded as a singleton or * a non-singleton dependant on the parameter passed. The properties * file that is picked up is the default properties file from * $PEGASUS_HOME/etc directory. * * @param poolClass the name of the class that resides in the * package named PoolMode.PACKAGE_NAME or the * complete name of the class including the * package name. * * @param poolProvider the path to the file, that contains the * pool configuration in the appropriate format * that the implementing poolClass understands. * * * @param lMode the loading mode of the class. It specifies whether * the singleton object of the class needs to be * loaded or the non singleton instance. * * @return the object corresponding to the pool info provider class. */ public static PoolInfoProvider loadPoolInstance( String poolClass, String poolProvider, int lMode ) { Object argList[] = new Object[2 ]; argList[ 0 ] = poolProvider; argList[ 1 ] = edu.isi.pegasus.common.util.CommonProperties.PROPERTY_FILENAME; return loadPoolInstance( poolClass, lMode, argList ); } /** * Loads the pool info provider class using the reflection package * in java at runtime. The class is loaded as a singleton or * a non-singleton dependant on the parameter passed. * * @param poolClass the name of the class that resides in the * package named PoolMode.PACKAGE_NAME or the * complete name of the class including the * package name. * * @param poolProvider the path to the file, that contains the * pool configuration in the appropriate format * that the implementing poolClass understands. * * @param propFileName name of the properties file to picked from * $PEGASUS_HOME/etc/ directory. For the singleton * loading only the default file is picked up. * * @param lMode the loading mode of the class. It specifies whether * the singleton object of the class needs to be * loaded or the non singleton instance. * * @return the object corresponding to the pool info provider class. */ public static PoolInfoProvider loadPoolInstance( String poolClass, String poolProvider, String propFileName, int lMode ) { Object argList[] = new Object[2 ]; argList[ 0 ] = poolProvider; argList[ 1 ] = propFileName; return loadPoolInstance( poolClass, lMode, argList ); } /** * Its returns the name of the method that needs to be invoked * to get the object of the implementing pool class. It determines * the method name on the basis of the value of the loading mode * specified. * * @param lMode the loading mode of the class. It specifies whether * the singleton object of the class needs to be * loaded or the non singleton instance. * * @return the name of the method that needs to be invoked. */ public static String getMethodName( int lMode ) { String name = null; if ( lMode == SINGLETON_LOAD ) { name = "singletonInstance"; } else { if ( lMode == NON_SINGLETON_LOAD ) { name = "nonSingletonInstance"; } } return name; } /** * Loads the appropriate class that implements a particular * pool mode using the reflection package in java at runtime. * * @param poolClass String * * @param lMode the loading mode of the class. It specifies whether * the singleton object of the class needs to be * loaded or the non singleton instance. * @param argList Object[] * @return PoolInfoProvider * * @throws FactoryException that nests any error that * might occur during the instantiation of the implementation. */ private static PoolInfoProvider loadPoolInstance( String poolClass, int lMode, Object[] argList ) throws FactoryException { PoolInfoProvider pi = null; String mLogMsg = null; String methodName = getMethodName( lMode ); //get the complete name including //the package if the package name not //specified if ( poolClass.indexOf( "." ) == -1 ) { poolClass = PACKAGE_NAME + poolClass; } DynamicLoader d = new DynamicLoader( poolClass ); try { //instantiate the class //with no constructor Class cls = Class.forName( poolClass ); //creating a new string to get //it's class object that needs to //be passed. Could be wrong potentially?? //This identifies the signature for //the method Class partypes[] = new Class[argList.length ]; for ( int i = 0; i < argList.length; i++ ) { partypes[ i ] = ( argList[ i ] == null ) ? //try to put in a string //actually the calling class should never pass //null new String().getClass() : argList[ i ].getClass(); } //get the handle to the method Method meth = cls.getMethod( methodName, partypes ); //invoke the method that returns //us the singleton instance Object retobj = meth.invoke( null, argList ); pi = ( PoolInfoProvider ) retobj; } catch ( Exception e ) { throw new FactoryException( "Instantiating Create Directory", poolClass, e ); } return pi; } /** * given a string Mode returns the * corresponding int value * * @param readMode The String form of the * read mode * * @return the corresponding int value * of the mode. If not found * then null * @deprecated */ public static int getValue( String readMode ) { if ( readMode.trim().equalsIgnoreCase( SINGLE_READ ) ) { return -1; } else if ( readMode.trim().equalsIgnoreCase( MULTIPLE_READ ) ) { return -1; } else if ( readMode.trim().equalsIgnoreCase( XML_READ_CLASS ) ) { return XML_READ_VALUE; } else if ( readMode.trim().equalsIgnoreCase( TEXT_READ_CLASS ) ) { return TEXT_READ_VALUE; } else { return UNDEFINED_READ_VALUE; } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/old/TestSiteCatalog.java0000644000175000017500000000466011757531137031277 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.site.impl.old; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Condor; import java.util.List; import java.util.Iterator; /** * A Test program that shows how to load a Site Catalog, and query for all sites. * The configuration is picked from the Properties. The following properties * need to be set *

 *      pegasus.catalog.site       Text|XML
 *      pegasus.catalog.site.file  path to the site catalog.
 *  
* * @author Karan Vahi * @version $Revision: 2571 $ */ public class TestSiteCatalog { /** * The main program. */ public static void main( String[] args ) { PoolInfoProvider catalog = null; LogManager logger = LogManagerFactory.loadSingletonInstance(); /* load the catalog using the factory */ try{ catalog = SiteFactory.loadInstance( PegasusProperties.nonSingletonInstance(), false ); } catch ( SiteFactoryException e ){ logger.log( e.convertException() , LogManager.FATAL_MESSAGE_LEVEL); System.exit( 2 ); } /* query for the sites, and print them out */ List siteIDs = catalog.getPools(); for( Iterator it = catalog.getPools().iterator(); it.hasNext(); ){ String siteID = (String)it.next(); SiteInfo site = catalog.getPoolEntry( siteID, Condor.VANILLA_UNIVERSE ); //System.out.println( site.toXML() ); //for XML output System.out.println( site.toMultiLine() ); //for multiline text output } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/VORS.java0000644000175000017500000001577111757531137026260 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl; import edu.isi.pegasus.common.logging.LogManagerFactory; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.planner.catalog.site.SiteCatalogException; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.catalog.site.classes.VORSSiteCatalogUtil; import edu.isi.pegasus.planner.catalog.site.classes.VORSSiteInfo; import edu.isi.pegasus.planner.catalog.site.classes.VORSVOInfo; /** * The VORS implementation of the Site Catalog interface. * * @author Atul Kumara * @author Karan Vahi */ public class VORS implements SiteCatalog { /** * The default VORS mVORSHost. */ public static final String DEFAULT_VORS_HOST = "vors.grid.iu.edu"; /** * The default VORS mVORSPort. */ public static final String DEFAULT_VORS_PORT = "80"; /** * The SiteStore object where information about the sites is stored. */ private SiteStore mSiteStore; private Map mVOInfo = null; /** * The mVORSHost where VORS is running. */ private String mVORSHost; /** * The VORS mVORSPort. */ private String mVORSPort; /** * The VO to which the user belongs to. */ private String mVO; /** * The Grid for which information is required. */ private String mGRID; /** * The handle to the log manager. */ private LogManager mLogger; /** * The default constructor. */ public VORS() { mLogger = LogManagerFactory.loadSingletonInstance(); mSiteStore = new SiteStore(); } /* (non-Javadoc) * @see edu.isi.pegasus.planner.catalog.SiteCatalog#insert(edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry) */ public int insert(SiteCatalogEntry entry) throws SiteCatalogException { mSiteStore.addEntry(entry); return 1; } /* (non-Javadoc) * @see edu.isi.pegasus.planner.catalog.SiteCatalog#list() */ public Set list() throws SiteCatalogException { return mSiteStore.list(); } /* (non-Javadoc) * @see edu.isi.pegasus.planner.catalog.SiteCatalog#load(java.util.List) */ public int load(List sites) throws SiteCatalogException { int ret = 0; Iterator siteItr; if (sites.get(0).trim().equals("*")) { siteItr = mVOInfo.keySet().iterator(); } else { siteItr = sites.iterator(); } while (siteItr.hasNext()) { String sitename = siteItr.next(); VORSVOInfo temp = mVOInfo.get(sitename); if (temp == null) { mLogger.log( sitename + " site not found.", LogManager.ERROR_MESSAGE_LEVEL ); continue; } if (temp.getStatus().equals("PASS")) { VORSSiteInfo siteInfo = VORSSiteCatalogUtil.get_sites_info(mVORSHost, mVORSPort, mVO, mGRID, temp.getID()); siteInfo.setVoInfo(temp); if (siteInfo.getOsg_grid() == null || siteInfo.getTmp_loc() == null) { mLogger.log("Paths are null." + sitename + " is invalid.", LogManager.CONFIG_MESSAGE_LEVEL); } else { mLogger.log("Site " + sitename + " is ACCESSIBLE", LogManager.INFO_MESSAGE_LEVEL); mSiteStore.addEntry(VORSSiteCatalogUtil.createSiteCatalogEntry(siteInfo)); ret++; } } else { mLogger.log("Site " + sitename + " is INACCESSIBLE", LogManager.INFO_MESSAGE_LEVEL); } } //always add local site. VORSSiteInfo siteInfo = VORSSiteCatalogUtil.getLocalSiteInfo( mVO ); VORSVOInfo local = new VORSVOInfo(); local.setGrid( mGRID ); siteInfo.setVoInfo( local ); mLogger.log( "Site LOCAL . Creating default entry" , LogManager.INFO_MESSAGE_LEVEL ); mSiteStore.addEntry(VORSSiteCatalogUtil.createSiteCatalogEntry(siteInfo)); ret++; /*////////////////////////FOR TESTING///////////// try { System.out.println(mSiteStore.toXML()); } catch (IOException ex) { Logger.getLogger(VORS.class.getName()).log(Level.SEVERE, null, ex); } //////////////////////////////////////*/ return ret; } /* (non-Javadoc) * @see edu.isi.pegasus.planner.catalog.SiteCatalog#lookup(java.lang.String) */ public SiteCatalogEntry lookup(String handle) throws SiteCatalogException { return mSiteStore.lookup(handle); } /* (non-Javadoc) * @see edu.isi.pegasus.planner.catalog.SiteCatalog#remove(java.lang.String) */ public int remove(String handle) throws SiteCatalogException { throw new UnsupportedOperationException( "Method remove( String , String ) not yet implmeneted" ); } /* (non-Javadoc) * @see org.griphyn.common.catalog.Catalog#close() */ public void close() { if(mVOInfo != null){ mVOInfo.clear(); mVOInfo = null; } } /* (non-Javadoc) * @see org.griphyn.common.catalog.Catalog#connect(java.util.Properties) */ public boolean connect(Properties props) { mVORSHost = props.getProperty( "vors.host", DEFAULT_VORS_HOST ); mVORSPort = props.getProperty("vors.port", DEFAULT_VORS_PORT ); mVO = props.getProperty("vors.vo", "all"); mGRID = props.getProperty("vors.grid", "all"); Iterator itr = VORSSiteCatalogUtil.get_sites_in_grid(mVORSHost, mVORSPort, mVO, mGRID).iterator(); while(itr.hasNext()){ if(mVOInfo == null){ mVOInfo = new HashMap(); } VORSVOInfo temp = itr.next(); mVOInfo.put(temp.getName(), temp); } return true; } /* (non-Javadoc) * @see org.griphyn.common.catalog.Catalog#isClosed() */ public boolean isClosed() { return mVOInfo == null; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/OSGMM.java0000644000175000017500000006676411757531137026361 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl; import edu.clemson.SiteCatalogGenerator; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.planner.catalog.site.SiteCatalogException; import edu.isi.pegasus.planner.catalog.site.classes.LocalSiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteInfo2SiteCatalogEntry; import edu.isi.pegasus.common.util.StreamGobbler; import edu.isi.pegasus.common.util.StreamGobblerCallback; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPServer; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.JobManager; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.LRC; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.WorkDir; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import edu.isi.pegasus.common.util.Boolean; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Properties; import java.util.Set; /** * The OSGMM implementation of the Site Catalog interface. * This implementation also has a method to generate the SRM property mappings * to be used by Pegasus. * * The following pegasus properties are created for the sites that have the SRM * information available. * *
 * pegasus.transfer.srm.[sitename].service.url
 * pegasus.transfer.srm.[sitename].service.mountpoint
 * 
* * @author Karan Vahi * @version $Revision: 2932 $ */ public class OSGMM implements SiteCatalog { /** * The property key without the pegasus prefix'es to get the condor collector host. */ public static final String DEFAULT_CONDOR_COLLECTOR_PROPERTY_KEY = "osgmm.collector.host"; /** * The pegasus property prefix. */ public static final String PEGASUS_PROPERTY_PREFIX = "pegasus.transfer.srm"; /** * The name of the ENGAGE VO */ public static final String ENGAGE_VO = "engage"; /** * The default condor collector to query to for non LIGO VO's */ public static final String DEFAULT_CONDOR_COLLECTOR = "engage-central.renci.org"; /** * The name of the LIGO VO */ public static final String LIGO_VO = "ligo"; /** * The default condor collector to query to for LIGO VO */ public static final String DEFAULT_LIGO_CONDOR_COLLECTOR = "osg-itb.ligo.caltech.edu"; /** * The property key without the pegasus prefix'es to get the VO. */ public static final String DEFAULT_VO_PROPERTY_KEY = "osgmm.vo"; /** * The default VO to use to query the condor collector. */ public static final String DEFAULT_VO = "ligo"; /** * The property key without the pegasus prefix'es to get the grid. */ public static final String DEFAULT_GRID_PROPERTY_KEY = "osgmm.grid"; /** * The default Grid to retreive the sites for. */ public static final String DEFAULT_GRID = "osg"; /** * The property key without the pegasus prefix'es to get the VO. */ public static final String DEFAULT_RETRIEVE_VALIDATED_SITES_PROPERTY_KEY = "osgmm.retrieve.validated.sites"; /** * The default VO to use to query the condor collector. */ public static final boolean DEFAULT_RETRIEVE_VALIDATED_SITES = true; /** * An adapter method that converts the Site object to the SiteInfo object * corresponding to the site catalog schema version 2. * * @param s the Site object to convert. * * @return the coverted SiteInfo object */ private static SiteInfo convertToSiteInfo( SiteCatalogGenerator.Site s ) throws Exception{ SiteInfo site = new SiteInfo(); site.setInfo( SiteInfo.HANDLE, s.siteName ); site.setInfo( SiteInfo.GRIDLAUNCH, s.gridlaunch ); site.setInfo( SiteInfo.SYSINFO, new VDSSysInfo( s.sysinfo )); site.setInfo( SiteInfo.LRC, new LRC(s.lrcUrl) ); //fork jobmanager JobManager forkJM = new JobManager( ); forkJM.setInfo( JobManager.UNIVERSE, JobManager.FORK_JOBMANAGER_TYPE ); forkJM.setInfo( JobManager.URL, s.transferUniverseJobManager ); site.setInfo( SiteInfo.JOBMANAGER, forkJM ); //compute jobmanager JobManager computeJM = new JobManager( ); computeJM.setInfo( JobManager.UNIVERSE, JobManager.VANILLA_JOBMANAGER_TYPE ); computeJM.setInfo( JobManager.URL, s.VanillaUniverseJobManager ); site.setInfo( SiteInfo.JOBMANAGER, computeJM ); //set the gridftp server GridFTPServer server = new GridFTPServer(); server.setInfo( GridFTPServer.GRIDFTP_URL, s.gridFtpUrl ); server.setInfo( GridFTPServer.STORAGE_DIR, s.gridFtpStorage ); site.setInfo( SiteInfo.GRIDFTP, server ); //set the environment profiles if( s.app != null ){ site.setInfo( SiteInfo.PROFILE, new Profile( Profile.ENV, "app", s.app ) ); } if( s.data != null ){ site.setInfo( SiteInfo.PROFILE, new Profile( Profile.ENV, "data", s.data ) ); } if( s.tmp != null ){ site.setInfo( SiteInfo.PROFILE, new Profile( Profile.ENV, "tmp", s.tmp ) ); } if( s.wntmp != null ){ site.setInfo( SiteInfo.PROFILE, new Profile( Profile.ENV, "wntmp", s.wntmp ) ); } if( s.globusLocation != null ){ site.setInfo( SiteInfo.PROFILE, new Profile( Profile.ENV, "GLOBUS_LOCATION", s.globusLocation ) ); site.setInfo( SiteInfo.PROFILE, new Profile( Profile.ENV, "LD_LIBRARY_PATH", s.globusLocation + File.separator + "lib" ) ); } //set the working directory WorkDir dir = new WorkDir(); dir.setInfo( WorkDir.WORKDIR, s.workingDirectory ); site.setInfo( SiteInfo.WORKDIR, dir ); return site; } /** * The List storing the output of condor-status. */ List mCondorStatusOutput; /** * The List storing the stderr of condor-status. */ List mCondorStatusError; /** * The SiteStore object where information about the sites is stored. */ private SiteStore mSiteStore; /** * The handle to the log manager. */ private LogManager mLogger; /** * The VO to which the user belongs to. */ private String mVO; /** * The collector host to query to. */ private String mCollectorHost; /** * The grid to which the user belongs to. */ private String mGrid; /** * The default constructor. */ public OSGMM() { mLogger = LogManagerFactory.loadSingletonInstance(); mSiteStore = new SiteStore(); mVO = OSGMM.DEFAULT_VO; mGrid = OSGMM.DEFAULT_GRID; } /* (non-Javadoc) * @see edu.isi.pegasus.planner.catalog.SiteCatalog#insert(edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry) */ public int insert(SiteCatalogEntry entry) throws SiteCatalogException { mSiteStore.addEntry(entry); return 1; } /* (non-Javadoc) * @see edu.isi.pegasus.planner.catalog.SiteCatalog#list() */ public Set list() throws SiteCatalogException { return mSiteStore.list(); } /** * Loads up the Site Catalog implementation with the sites whose * site handles are specified. This is a convenience method, that can * allow the backend implementations to maintain soft state if required. * * If the implementation chooses not to implement this, just do an empty * implementation. * * The site handle * is a special handle designating all sites are to be * loaded. * * @param sites the list of sites to be loaded. * * @return the number of sites loaded. * * @throws SiteCatalogException in case of error. */ public int load( List sites ) throws SiteCatalogException { if( this.isClosed() ){ throw new SiteCatalogException( "Need to connect to site catalog before loading" ); } // TODO: these should come from either the command line or a config file SiteCatalogGenerator sg = new SiteCatalogGenerator( (ArrayList)mCondorStatusOutput ); List sgSites = sg.loadSites( sites, mVO ); int result = 0; for( SiteCatalogGenerator.Site s : sgSites ){ SiteInfo site; try { //some sanity check before attempting to convert if ( s.globusLocation == null || s.VanillaUniverseJobManager == null) { mLogger.log( "Skipping site " + s.siteName, LogManager.INFO_MESSAGE_LEVEL ); continue; } mLogger.log( "Adding site " + s.siteName, LogManager.INFO_MESSAGE_LEVEL ); site = OSGMM.convertToSiteInfo(s); mSiteStore.addEntry( SiteInfo2SiteCatalogEntry.convert( site, mLogger ) ); result++; } catch (Exception ex) { mLogger.log( " While converting Site object for site " + s.siteName, ex, LogManager.ERROR_MESSAGE_LEVEL ); } } //always add local site. mLogger.log( "Site LOCAL . Creating default entry" , LogManager.INFO_MESSAGE_LEVEL ); mSiteStore.addEntry( LocalSiteCatalogEntry.create( mVO, mGrid ) ); result++; return result; } /* (non-Javadoc) * @see edu.isi.pegasus.planner.catalog.SiteCatalog#lookup(java.lang.String) */ public SiteCatalogEntry lookup(String handle) throws SiteCatalogException { return mSiteStore.lookup(handle); } /* (non-Javadoc) * @see edu.isi.pegasus.planner.catalog.SiteCatalog#remove(java.lang.String) */ public int remove(String handle) throws SiteCatalogException { throw new UnsupportedOperationException("Method remove( String , String ) not yet implmented"); } /** * Closes the connection. It resets the internal buffers that contain output * of the condor_status command. */ public void close() { mCondorStatusOutput = null; mCondorStatusError = null; } /** * Issues the condor status command, and stores the results retrieved back * into a List. * * @param props is the property table with sufficient settings to * to connect to the implementation. * * @return true if connected, false if failed to connect. * * @throws SiteCatalogException */ public boolean connect(Properties props) throws SiteCatalogException { Runtime r = Runtime.getRuntime(); ListCallback ic = new ListCallback(); ListCallback ec = new ListCallback(); mLogger.log( "Properties passed at connection " + props , LogManager.DEBUG_MESSAGE_LEVEL ); // TODO: these should come from either the command line or a config file //String collectorHost = "engage-central.renci.org"; mCollectorHost = props.getProperty( OSGMM.DEFAULT_CONDOR_COLLECTOR_PROPERTY_KEY ); mVO = props.getProperty( OSGMM.DEFAULT_VO_PROPERTY_KEY, OSGMM.DEFAULT_VO ).toLowerCase(); if( mCollectorHost == null){ //user did not specify in the properties. //assign a collector host on basis of VO. if( mVO.equals( OSGMM.LIGO_VO ) ){ mCollectorHost = OSGMM.DEFAULT_LIGO_CONDOR_COLLECTOR; } else{ mCollectorHost = OSGMM.DEFAULT_CONDOR_COLLECTOR; } } mGrid = props.getProperty( OSGMM.DEFAULT_GRID_PROPERTY_KEY, OSGMM.DEFAULT_GRID ); boolean onlyOSGMMValidatedSites = Boolean.parse( props.getProperty( OSGMM.DEFAULT_RETRIEVE_VALIDATED_SITES_PROPERTY_KEY), OSGMM.DEFAULT_RETRIEVE_VALIDATED_SITES ); mLogger.log( "The Condor Collector Host is " + mCollectorHost, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "The User specified VO is " + mVO, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Retrieve only validated sites " + onlyOSGMMValidatedSites, LogManager.DEBUG_MESSAGE_LEVEL ); String voToQueryFor = mVO; //if the collector is the default collector //then vo to query for is always engage if( mCollectorHost.equals( OSGMM.DEFAULT_CONDOR_COLLECTOR ) ){ voToQueryFor = OSGMM.ENGAGE_VO; } mLogger.log( "The condor collector will be queried for VO " + voToQueryFor, LogManager.DEBUG_MESSAGE_LEVEL ); String constraint = "StringlistIMember(\"VO:" + voToQueryFor + "\";GlueCEAccessControlBaseRule)"; if (onlyOSGMMValidatedSites) { constraint += " && SiteVerified==True"; } String condorStatusCmd[] = {"condor_status", "-any", "-pool", mCollectorHost, "-constraint", constraint, "-format", "%s", "GlueSiteName", "-format", ";", "1", // to force a semicolon, even if the attribute was not found "-format", "%s", "GlueClusterUniqueID", "-format", ";", "1", "-format", "%s", "OSGMM_Globus_Location_Fork", "-format", ";", "1", "-format", "%s", "GlueCEInfoContactString", "-format", ";", "1", "-format", "%s", "GlueClusterTmpDir", "-format", ";", "1", "-format", "%s", "GlueCEInfoHostName", "-format", ";", "1", "-format", "%s", "GlueCEInfoApplicationDir", "-format", ";", "1", "-format", "%s", "GlueCEInfoDataDir", "-format", ";", "1", "-format", "%s", "GlueClusterTmpDir", "-format", ";", "1", "-format", "%s", "GlueClusterWNTmpDir", "-format", ";\\n", "1"}; String cmdPretty = ""; for(int i=0; i < condorStatusCmd.length; i++) { cmdPretty += condorStatusCmd[i] + " "; } try{ mLogger.log( "condor_status command is \n " + cmdPretty, LogManager.DEBUG_MESSAGE_LEVEL ); Process p = r.exec( condorStatusCmd ); //spawn off the gobblers StreamGobbler ips = new StreamGobbler( p.getInputStream(), ic ); StreamGobbler eps = new StreamGobbler( p.getErrorStream(), ec ); ips.start(); eps.start(); //wait for the threads to finish off ips.join(); mCondorStatusOutput = ic.getContents(); eps.join(); mCondorStatusError = ec.getContents(); //get the status int status = p.waitFor(); if( status != 0){ mLogger.log("condor_status command exited with status " + status, LogManager.WARNING_MESSAGE_LEVEL); //also dump the stderr mLogger.log( "stderr for command invocation " + mCondorStatusError, LogManager.ERROR_MESSAGE_LEVEL ); } } catch(IOException ioe){ mLogger.log( "IOException while calling out to condor_status. Probably" + " condor-status not in path.", ioe, LogManager.ERROR_MESSAGE_LEVEL); //also dump the stderr mLogger.log( "stderr for command invocation " + mCondorStatusError, LogManager.ERROR_MESSAGE_LEVEL ); return false; } catch( InterruptedException ie){ //ignore } return true; } /** * Generates SRM properties that can be used by Pegasus to do SRM URL * substitution for the case where all the data is accessible on the * worker nodes locally. * * @return Properties object containing the relevant Properties. */ public Properties generateSRMProperties( ){ Properties result = new Properties( ); if( this.isClosed() ){ throw new SiteCatalogException( "Need to connect to site catalog before properties can be generated" ); } String constraint = "regexp(\"file://\",GlueSEAccessProtocolEndpoint) && GlueSAPath=!=UNDEFINED && GlueSEControlProtocolEndpoint=!=UNDEFINED"; //condor_status -l -pool ligo-osgmm.renci.org -constraint 'regexp("file://", GlueSEAccessProtocolEndpoint) && GlueSAPath =!= UNDEFINED && GlueSEControlProtocolEndpoint =!= UNDEFINED' -format %s GlueSiteName -format ";" 1 -format "srm://%s?SFN=" 'substr(GlueSEControlProtocolEndpoint, 8)' -format "%s" 'ifThenElse(GlueVOInfoPath =!= UNDEFINED, GlueVOInfoPath, GlueSAPath)' -format ";" 1 -format "%s" GlueSAPath -format ";" 1 -format "%s" GlueVOInfoPath -format ";" 1 -format "%s" GlueCESEBindMountInfo -format ";\n" 1 String condorStatusCmd[] = { "condor_status", "-pool", mCollectorHost, "-constraint", constraint, "-format", "%s", "GlueSiteName", //retrieve the site name "-format", ";", "1", // to force a semicolon, even if the attribute was not found "-format", "srm://%s?SFN=", "substr(GlueSEControlProtocolEndpoint, 8)", //"-format", "\";\"", "1", this is incorrect. java trips badly on this style "-format", "%s" , "ifThenElse(GlueVOInfoPath =!= UNDEFINED, GlueVOInfoPath, GlueSAPath)", "-format", ";", "1", "-format", "%s", "GlueSAPath", "-format", ";", "1", "-format", "%s", "GlueVOInfoPath", "-format", ";", "1", "-format", "%s", "GlueCESEBindMountInfo", "-format", ";\\n", "1" }; String cmdPretty = ""; for(int i=0; i < condorStatusCmd.length; i++) { cmdPretty += condorStatusCmd[i] + " "; } Runtime r = Runtime.getRuntime(); ListCallback ic = new ListCallback(); ListCallback ec = new ListCallback(); try{ mLogger.log( "condor_status command issued to retrieve SRM mappings is \n " + cmdPretty, LogManager.DEBUG_MESSAGE_LEVEL ); Process p = r.exec( condorStatusCmd ); //spawn off the gobblers StreamGobbler ips = new StreamGobbler( p.getInputStream(), ic ); StreamGobbler eps = new StreamGobbler( p.getErrorStream(), ec ); ips.start(); eps.start(); //wait for the threads to finish off ips.join(); List stdout = ic.getContents(); eps.join(); List stderr = ec.getContents(); //get the status int status = p.waitFor(); if( status != 0){ mLogger.log("condor_status command exited with status " + status, LogManager.WARNING_MESSAGE_LEVEL); //also dump the stderr mLogger.log( "stderr for command invocation " + ec.getContents(), LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( "condor-status command exited with non zero status " + status ); } //System.out.println( "The Stdout is " + stdout ); for( Iterator it = stdout.iterator(); it.hasNext(); ){ //create properties for each site result.putAll( generateSRMProperties( (String)it.next() ) ); } } catch(IOException ioe){ mLogger.log( "IOException while calling out to condor_status. Probably" + " condor-status not in path.", ioe, LogManager.ERROR_MESSAGE_LEVEL); //also dump the stderr mLogger.log( "stderr for command invocation " + ec.getContents(), LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( "IOException while invoking condor-status command", ioe ); } catch( InterruptedException ie){ //ignore } return result; } /** * Generates SRM properties that can be used by Pegasus to do SRM URL * substitution for the case where all the data is accessible on the * worker nodes locally for a particular site. * The condor status output for a single site site is passed as input. * * Example condor_status output for a site *
     * CIT_CMS_T2;srm://cit-se.ultralight.org:8443/srm/v2/server?SFN=/mnt/hadoop/osg;/mnt/hadoop/osg;/mnt/hadoop/osg;/mnt/hadoop,/mnt/hadoop;
     * 
* * The properties created have the following keys *
     * pegasus.transfer.srm.[sitename].service.url
     * pegasus.transfer.srm.[sitename].service.mountpoint
     * 
* where [sitename] is replaced by the name of the site. * * @param line the line from condor_status output for a site. * * @return Properties object containing the relevant Properties. */ public Properties generateSRMProperties( String line ){ Properties result = new Properties(); mLogger.log( "Line being worked on is " + line, LogManager.DEBUG_MESSAGE_LEVEL ); //split the line first String contents[] = line.split(";"); // do we have a valid site name? if (contents[0] == null || contents[0].equals("")) { return result; } String site = contents[0]; //do another sanity check if( contents.length < 4 ){ //ignore the length mLogger.log( "Ignoring line " + line, LogManager.WARNING_MESSAGE_LEVEL ); return result; } String srmURLPrefix = contents[ 1 ]; //the srm url prefix String glueSAPath = contents[ 2 ]; //the storage access path String glueVOPath = contents[ 3 ]; //the vo specific path . it is a subset of sa path //figure out the mount point String mountPoint = glueVOPath; if( contents.length == 5 ){ String bindMountInfo = contents[ 4 ]; //tells how to get to the path on file system. //check if any replacement needs to be done contents = bindMountInfo.split( "," ); if( contents.length == 2 ){ //we have to do replacement //However we dont do any replacement for time being as it is incorrect. StringBuffer message = new StringBuffer(); message.append( "Replacing " ).append( contents[0] ).append( " with "). append( contents[1] ).append( " for site " ).append( site ). append( " to get to the local filesystem path "); mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); mountPoint = mountPoint.replace( contents[0], contents[1]); } } //some sanity check on the srmURLPrefix contents = srmURLPrefix.split( "," ); if( contents.length > 1 ){ //handling the following case //UFlorida-PG;srm://srmb.ihepa.ufl.edu:8443/srm/v2/server,httpg://srmb.ihepa.ufl.edu:8443/srm/v2/server?SFN=/lustre/raidl/user/ligo;/lustre/raidl/user/;/lustre/raidl/user/ligo;; mLogger.log( "Ignoring line " + line, LogManager.WARNING_MESSAGE_LEVEL ); return result; } //create the properties String key = createPropertyKey( site, "service.url"); result.setProperty( key , srmURLPrefix ); mLogger.log( "Created property " + key + " -> " + srmURLPrefix, LogManager.DEBUG_MESSAGE_LEVEL ); key = createPropertyKey( site, "service.mountpoint"); result.setProperty( key , mountPoint ); mLogger.log( "Created property " + key + " -> " + mountPoint, LogManager.DEBUG_MESSAGE_LEVEL ); return result; } /** * Creates the property key * * @param site the name of site * @param suffix the suffix to be added to site. * * @return the property key. */ private String createPropertyKey( String site, String suffix ){ StringBuffer key = new StringBuffer(); key.append( OSGMM.PEGASUS_PROPERTY_PREFIX ).append( "." ).append( site ). append( "." ).append( suffix ); return key.toString(); } /** * Returns if the connection is closed or not. * * @return boolean indicating connection is closed. */ public boolean isClosed() { return ( mCondorStatusOutput == null ); } /** * An inner class, that implements the StreamGobblerCallback to store all * the lines in a List * */ private class ListCallback implements StreamGobblerCallback{ /** * The ArrayList where the lines are stored. */ List mList; /** * Default Constructor. * */ public ListCallback( ){ mList = new ArrayList(); } /** * Callback whenever a line is read from the stream by the StreamGobbler. * Adds the line to the list. * * @param line the line that is read. */ public void work( String line ){ mList.add( line ); } /** * Returns the contents captured. * * @return List */ public List getContents(){ return mList; } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/0000755000175000017500000000000011757531667025757 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/0000755000175000017500000000000011757531667027414 5ustar ryngerynge././@LongLink0000000000000000000000000000015500000000000011566 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteServiceInfo.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteServiceI0000644000175000017500000000537711757531137032520 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.classes; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGSiteConstants; /** * This class extends AbstractSiteCatalogResource and stores MYOSG Site service Information * @author prasanth * */ public class MYOSGSiteServiceInfo extends AbstractSiteCatalogResource{ private static final String SERVICE_ID_TAG ="ID"; private static final String SERVICE_NAME_TAG ="Name"; private static final String SERVICE_DESCRIPTION_TAG ="Description"; private static final String SERVICE_URI_TAG ="ServiceUri"; private String serviceID =""; private String serviceName=""; private String serviceDescription=""; private String serviceURI=""; public MYOSGSiteServiceInfo(int depth){ setDepth(depth); } /** * Returns the property value * @param ID * @return property value */ public Object getProperty(int ID) { switch(ID){ case MYOSGSiteConstants.SERVICE_ID_ID: return serviceID ; case MYOSGSiteConstants.SERVICE_NAME_ID: return serviceName; case MYOSGSiteConstants.SERVICE_DESCRIPTION_ID: return serviceDescription; case MYOSGSiteConstants.SERVICE_URI_ID: return serviceURI; } return super.getProperty(ID); } /** * Sets the property of Site Catalog resource * @param ID property ID * @param value property value */ public void setProperty(String ID, Object value) { if(ID.equals(SERVICE_ID_TAG)){ serviceID = (String)value; }else if(ID.equals(SERVICE_NAME_TAG)){ serviceName = (String)value; }else if(ID.equals(SERVICE_DESCRIPTION_TAG)){ serviceDescription = (String)value; }else if(ID.equals(SERVICE_URI_TAG)){ serviceURI = (String)value; } } /** * Add child resources to a site catalog resource * @param childResource child resource */ public void addChildResource(AbstractSiteCatalogResource childResource) { } public String toString(){ String info ="Service :- "+ serviceID +" , "+ serviceName +" , " + serviceDescription +" , " + serviceURI; return info; } } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteInfo.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteInfo.jav0000644000175000017500000000452611757531137032454 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.classes; import java.util.ArrayList; import java.util.List; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGSiteConstants; /** * This class extends AbstractSiteCatalogResource and stores MYOSG Site Information * @author prasanth * */ public class MYOSGSiteInfo extends AbstractSiteCatalogResource { private String siteName =""; private List resourceList; public MYOSGSiteInfo(int depth) { resourceList = new ArrayList(); setDepth(depth); } private static final String SITE_NAME_TAG ="GroupName"; /** * Returns the property value * @param ID * @return property value */ public Object getProperty(int ID) { switch (ID) { case MYOSGSiteConstants.SITE_NAME_ID: return siteName; case MYOSGSiteConstants.RESOURCE_LIST_ID: return resourceList; } return super.getProperty(ID); } /** * Sets the property of Site Catalog resource * @param ID property ID * @param value property value */ public void setProperty(String ID, Object value) { if(ID.equals(SITE_NAME_TAG)){ siteName = (String)value; } } /** * Add child resources to a site catalog resource * @param childResource child resource */ public void addChildResource(AbstractSiteCatalogResource childResource) { if(childResource instanceof MYOSGSiteResourceInfo) resourceList.add((MYOSGSiteResourceInfo)childResource); } public String toString(){ String info = "Site :- " + siteName; for(int i =0 ;i < resourceList.size();i++){ info += "\n"+resourceList.get(i); } return info ; } } ././@LongLink0000000000000000000000000000015600000000000011567 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteResourceInfo.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteResource0000644000175000017500000000700711757531137032566 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.classes; import java.util.ArrayList; import java.util.List; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGSiteConstants; /** * This class extends AbstractSiteCatalogResource and stores MYOSG Site resource Information * @author prasanth * */ public class MYOSGSiteResourceInfo extends AbstractSiteCatalogResource{ private static final String RESOURCE_ID_TAG ="ID"; private static final String RESOURCE_NAME_TAG ="Name"; private static final String RESOURCE_DESCRIPTION_TAG ="Description"; private String resourceID =""; private String resourceName =""; private String resourceDescription=""; private List serviceList; private MYOSGSiteEnvironmentInfo environmentInfo; private MYOSGSiteVOOwnershipInfo VOOwnershipInfo; public MYOSGSiteResourceInfo(int depth){ serviceList = new ArrayList(); setDepth(depth); } /** * Returns the property value * @param ID * @return property value */ public Object getProperty(int ID) { switch(ID){ case MYOSGSiteConstants.RESOURCE_ID_ID: return resourceID ; case MYOSGSiteConstants.RESOURCE_NAME_ID: return resourceName; case MYOSGSiteConstants.RESOURCE_DESCRIPTION_ID: return resourceDescription ; case MYOSGSiteConstants.SERVICE_LIST_ID: return serviceList ; case MYOSGSiteConstants.ENVIRONMENT_INFO_ID: return environmentInfo ; case MYOSGSiteConstants.VO_OWNERSHIP_INFO_ID: return VOOwnershipInfo ; } return super.getProperty(ID); } /** * Sets the property of Site Catalog resource * @param ID property ID * @param value property value */ public void setProperty(String ID, Object value) { if(ID.equals(RESOURCE_ID_TAG)){ resourceID = (String)value; }else if(ID.equals(RESOURCE_NAME_TAG)){ resourceName = (String)value; }else if(ID.equals(RESOURCE_DESCRIPTION_TAG)){ resourceDescription = (String)value; } } /** * Add child resources to a site catalog resource * @param childResource child resource */ public void addChildResource(AbstractSiteCatalogResource childResource) { if(childResource instanceof MYOSGSiteServiceInfo){ serviceList.add((MYOSGSiteServiceInfo)childResource); } else if(childResource instanceof MYOSGSiteEnvironmentInfo){ environmentInfo = (MYOSGSiteEnvironmentInfo)childResource; } else if(childResource instanceof MYOSGSiteVOOwnershipInfo){ VOOwnershipInfo = (MYOSGSiteVOOwnershipInfo)childResource; } } public String toString(){ String info ="Resource :- "+ resourceID +" , "+ resourceName +" , " + resourceDescription; for(int i =0 ;i < serviceList.size();i++){ info += "\n"+serviceList.get(i); } info +="\n"+environmentInfo; info +="\n"+VOOwnershipInfo; return info; } } ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteEnvironmentInfo.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteEnvironm0000644000175000017500000001277311757531137032602 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.classes; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGSiteConstants; /** * This class extends AbstractSiteCatalogResource and stores MYOSG Site * Environment Information * * @author prasanth * */ public class MYOSGSiteEnvironmentInfo extends AbstractSiteCatalogResource { private String envGlobusLocation = ""; private String envApp = ""; private String envData = ""; private String envDefaultSe = ""; private String envGlexecLocation = ""; private String envGrid = ""; private String envHostName = ""; private String envJobContact = ""; private String envLocation = ""; private String envSiteName = ""; private String envSiteRead = ""; private String envSiteWrite = ""; private String envSquidLocation = ""; private String envStorageElement = ""; private String envWnTmp = ""; private static final String ENV_GLOBUS_LOCATION_TAG = "GLOBUS_LOCATION"; private static final String ENV_APP_TAG = "OSG_APP"; private static final String ENV_DATA_TAG = "OSG_DATA"; private static final String ENV_DEFAULT_SE_TAG = "OSG_DEFAULT_SE"; private static final String ENV_GLEXEC_LOCATION_TAG = "OSG_GLEXEC_LOCATION"; private static final String ENV_GRID_TAG = "OSG_GRID"; private static final String ENV_HOSTNAME_TAG = "OSG_HOSTNAME"; private static final String ENV_JOB_CONTACT_TAG = "OSG_JOB_CONTACT"; private static final String ENV_LOCATION_TAG = "OSG_LOCATION"; private static final String ENV_SITE_NAME_TAG = "OSG_SITE_NAME"; private static final String ENV_SITE_READ_TAG = "OSG_SITE_READ"; private static final String ENV_SITE_WRITE_TAG = "OSG_SITE_WRITE"; private static final String ENV_SQUID_LOCATION_TAG = "OSG_SQUID_LOCATION"; private static final String ENV_STORAGE_ELEMENT_TAG = "OSG_STORAGE_ELEMENT"; private static final String ENV_WN_TMP_TAG = "OSG_WN_TMP"; public MYOSGSiteEnvironmentInfo(int depth) { setDepth(depth); } /** * Returns the property value * * @param ID * @return propety value */ public Object getProperty(int ID) { switch (ID) { case MYOSGSiteConstants.ENV_GLOBUS_LOCATION_ID: return envGlobusLocation; case MYOSGSiteConstants.ENV_APP_ID: return envApp; case MYOSGSiteConstants.ENV_DATA_ID: return envData; case MYOSGSiteConstants.ENV_DEFAULT_SE_ID: return envDefaultSe; case MYOSGSiteConstants.ENV_GLEXEC_LOCATION_ID: return envGlexecLocation; case MYOSGSiteConstants.ENV_GRID_ID: return envGrid; case MYOSGSiteConstants.ENV_HOSTNAME_ID: return envHostName; case MYOSGSiteConstants.ENV_JOB_CONTACT_ID: return envJobContact; case MYOSGSiteConstants.ENV_LOCATION_ID: return envLocation; case MYOSGSiteConstants.ENV_SITE_NAME_ID: return envSiteName; case MYOSGSiteConstants.ENV_SITE_READ_ID: return envSiteRead; case MYOSGSiteConstants.ENV_SITE_WRITE_ID: return envSiteWrite; case MYOSGSiteConstants.ENV_SQUID_LOCATION_ID: return envSquidLocation; case MYOSGSiteConstants.ENV_STORAGE_ELEMENT_ID: return envStorageElement; case MYOSGSiteConstants.ENV_WN_TMP_ID: return envWnTmp; } return super.getProperty(ID); } /** * Sets the property of Site Catalog resource * * @param ID * property ID * @param value * property value */ public void setProperty(String ID, Object value) { if (ID.equals(ENV_GLOBUS_LOCATION_TAG)) { envGlobusLocation = (String) value; } else if (ID.equals(ENV_APP_TAG)) { envApp = (String) value; } else if (ID.equals(ENV_DATA_TAG)) { envData = (String) value; } else if (ID.equals(ENV_DEFAULT_SE_TAG)) { envDefaultSe = (String) value; } else if (ID.equals(ENV_GLEXEC_LOCATION_TAG)) { envGlexecLocation = (String) value; } else if (ID.equals(ENV_GRID_TAG)) { envGrid = (String) value; } else if (ID.equals(ENV_HOSTNAME_TAG)) { envHostName = (String) value; } else if (ID.equals(ENV_JOB_CONTACT_TAG)) { envJobContact = (String) value; } else if (ID.equals(ENV_LOCATION_TAG)) { envLocation = (String) value; } else if (ID.equals(ENV_SITE_NAME_TAG)) { envSiteName = (String) value; } else if (ID.equals(ENV_SITE_READ_TAG)) { envSiteRead = (String) value; } else if (ID.equals(ENV_SITE_WRITE_TAG)) { envSiteWrite = (String) value; } else if (ID.equals(ENV_SQUID_LOCATION_TAG)) { envSquidLocation = (String) value; } else if (ID.equals(ENV_STORAGE_ELEMENT_TAG)) { envStorageElement = (String) value; } else if (ID.equals(ENV_WN_TMP_TAG)) { envWnTmp = (String) value; } } /** * Add child resources to a site catalog resource * * @param ID child resource */ public void addChildResource(AbstractSiteCatalogResource ID) { } public String toString() { return "Environment Info :- " + envApp; } } ././@LongLink0000000000000000000000000000015400000000000011565 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteInfoFacade.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteInfoFaca0000644000175000017500000002575511757531137032457 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.classes; import java.util.Iterator; import java.util.List; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGSiteConstants; public class MYOSGSiteInfoFacade { MYOSGSiteInfo myOsgSiteInfo; private String shortname; //shortname private String app_loc; //app_loc private String data_loc; //data_loc private String osg_grid; //osg_grid private String wntmp_loc; //wntmp_loc private String tmp_loc; //tmp_loc private String gatekeeper; //gatekeeper private String gk_port; //gk_port private String gsiftp_server; // gk_server private String gsiftp_port; //gsiftp_port private String sponsor_vo; //sponsor_vo private String vdt_version; //vdt_version private String globus_loc; //globus_loc --if globus_loc is empty then use osg_grid private String exec_jm; //exec_jm -- compute private String util_jm; //util_jm -- transfer private String grid_services; //grid_services private String app_space; //app_space -- saved for later use private String data_space; //data_space -- saved for later use private String tmp_space; //tmp_space -- saved for later use public MYOSGSiteInfoFacade(MYOSGSiteInfo myOsgSiteInfo){ this.myOsgSiteInfo = myOsgSiteInfo; init(this.myOsgSiteInfo); } public String getSitesMissingInformation(String siteName){ String missingInformation = "Following environment variables are not specified for the site " ; if(data_loc.equals("")){ missingInformation += " $OSG_DATA "; } if(globus_loc.equals("")){ missingInformation += " $GLOBUS_LOCATION "; } if(exec_jm.equals("")){ missingInformation += " $OSG_JOB_CONTACT "; } return missingInformation; } /* * Checks if the site has all the required information. */ public boolean isValidSite(){ if( data_loc.equals("") || globus_loc.equals("")|| exec_jm.equals("")){ return false; } return true; } private void init(MYOSGSiteInfo myOsgSiteInfo){ List myOSGSiteResourceInfoList = (List )myOsgSiteInfo.getProperty(MYOSGSiteConstants.RESOURCE_LIST_ID); MYOSGSiteResourceInfo myOSGSiteResourceInfo = myOSGSiteResourceInfoList.get(0); List myOSGSiteServiceInfoList = (List )myOSGSiteResourceInfo.getProperty(MYOSGSiteConstants.SERVICE_LIST_ID); MYOSGSiteServiceInfo myOSGSiteServiceInfoCE = getCE(myOSGSiteServiceInfoList); MYOSGSiteServiceInfo myOSGSiteServiceInfoGridFtp = getGridFtp(myOSGSiteServiceInfoList); MYOSGSiteEnvironmentInfo myOSGSiteEnvironmentInfo = (MYOSGSiteEnvironmentInfo)myOSGSiteResourceInfo.getProperty(MYOSGSiteConstants.ENVIRONMENT_INFO_ID); MYOSGSiteVOOwnershipInfo myOSiteVOOwnershipInfo = (MYOSGSiteVOOwnershipInfo)myOSGSiteResourceInfo.getProperty(MYOSGSiteConstants.VO_OWNERSHIP_INFO_ID); shortname = (String)myOsgSiteInfo.getProperty(MYOSGSiteConstants.SITE_NAME_ID); //shortname app_loc = (String)myOSGSiteEnvironmentInfo.getProperty(MYOSGSiteConstants.ENV_APP_ID); //app_loc data_loc = (String)myOSGSiteEnvironmentInfo.getProperty(MYOSGSiteConstants.ENV_DATA_ID); //data_loc osg_grid = (String)myOSGSiteEnvironmentInfo.getProperty(MYOSGSiteConstants.ENV_GRID_ID); //osg_grid wntmp_loc = (String)myOSGSiteEnvironmentInfo.getProperty(MYOSGSiteConstants.ENV_WN_TMP_ID); //wntmp_loc tmp_loc = "/tmp"; //tmp_loc gatekeeper = (String)myOSGSiteEnvironmentInfo.getProperty(MYOSGSiteConstants.ENV_JOB_CONTACT_ID); //gatekeeper if(myOSGSiteServiceInfoCE == null || ((String)myOSGSiteServiceInfoCE.getProperty(MYOSGSiteConstants.SERVICE_URI_ID)).equals("")){ gk_port = ((String)myOSGSiteServiceInfoCE.getProperty(MYOSGSiteConstants.SERVICE_URI_ID)).split(":")[1]; //(DEFAULT to 2119) //gk_port } else{ gk_port ="2119"; } if(myOSGSiteServiceInfoGridFtp == null || ((String)myOSGSiteServiceInfoGridFtp.getProperty(MYOSGSiteConstants.SERVICE_URI_ID)).equals("")){ gsiftp_server =""; }else{ gsiftp_server = ((String)myOSGSiteServiceInfoGridFtp.getProperty(MYOSGSiteConstants.SERVICE_URI_ID)); } if(myOSGSiteServiceInfoGridFtp == null || ((String)myOSGSiteServiceInfoGridFtp.getProperty(MYOSGSiteConstants.SERVICE_URI_ID)).equals("")){ gsiftp_port = "2811"; } else{ gsiftp_port = ((String)myOSGSiteServiceInfoGridFtp.getProperty(MYOSGSiteConstants.SERVICE_URI_ID)).split(":")[1]; // DEFAULT to 2811 //gsiftp_port } sponsor_vo = (myOSiteVOOwnershipInfo == null)?"":(String)myOSiteVOOwnershipInfo.getProperty(MYOSGSiteConstants.VO_OWN_VO_ID); //sponsor_vo // GET from VO ownership vdt_version = ""; //vdt_version globus_loc = (String)myOSGSiteEnvironmentInfo.getProperty(MYOSGSiteConstants.ENV_GLOBUS_LOCATION_ID); //globus_loc --if globus_loc is empty then use osg_grid exec_jm = (String)myOSGSiteEnvironmentInfo.getProperty(MYOSGSiteConstants.ENV_JOB_CONTACT_ID); //exec_jm -- compute util_jm = getUtilJobManager(exec_jm); //util_jm -- transfer grid_services = ""; //grid_services app_space = ""; //app_space -- saved for later use data_space = ""; //data_space -- saved for later use tmp_space = ""; //tmp_space -- saved for later use } /** * Returns the utility job manager path * @param exec_jm execute job manager path * @return utility job manager path */ private String getUtilJobManager(String exec_jm){ String jobManager =""; if(exec_jm.equals("")) { return jobManager; } int index = exec_jm.lastIndexOf("/"); if(index != -1){ jobManager = exec_jm.substring(0, index) +"/jobmanager-fork"; } return jobManager; } /** * Returns the compute element(CE) * @param myOSGSiteServiceInfoList service list * @return compute element */ private MYOSGSiteServiceInfo getCE( List myOSGSiteServiceInfoList) { Iterator iterator = myOSGSiteServiceInfoList .iterator(); while (iterator.hasNext()) { MYOSGSiteServiceInfo serviceInfo = iterator.next(); if (serviceInfo.getProperty(MYOSGSiteConstants.SERVICE_NAME_ID) .equals("CE")) { return serviceInfo; } } return null; } /** * Returns the GridFtp * @param myOSGSiteServiceInfoList service list * @return Grid Ftp */ private MYOSGSiteServiceInfo getGridFtp( List myOSGSiteServiceInfoList) { Iterator iterator = myOSGSiteServiceInfoList .iterator(); while (iterator.hasNext()) { MYOSGSiteServiceInfo serviceInfo = iterator.next(); if (serviceInfo.getProperty(MYOSGSiteConstants.SERVICE_NAME_ID) .equals("GridFtp")) { return serviceInfo; } } return null; } public void print(){ System.out.println("***********************************************"); System.out.println("shortname " + shortname); System.out.println("app_loc " + app_loc); System.out.println("data_loc " + data_loc); System.out.println("osg_grid " + osg_grid); System.out.println("tmp_loc " + tmp_loc); System.out.println("wntmp_loc " + wntmp_loc); System.out.println("gatekeeper " + gatekeeper); System.out.println("gk_port " + gk_port); System.out.println("gsiftp_port " + gsiftp_port); System.out.println("sponsor_vo " + sponsor_vo); System.out.println("vdt_version " + vdt_version); System.out.println("globus_loc " + globus_loc); System.out.println("exec_jm " + exec_jm); System.out.println("util_jm " + util_jm); System.out.println("grid_services " + grid_services); System.out.println("app_space " + app_space); System.out.println("data_space " + data_space); System.out.println("tmp_space " + tmp_space); System.out.println("***********************************************"); } public String getShortname() { return shortname; } public void setShortname(String shortname) { this.shortname = shortname; } public String getApp_loc() { return app_loc; } public void setApp_loc(String app_loc) { this.app_loc = app_loc; } public String getData_loc() { return data_loc; } public void setData_loc(String data_loc) { this.data_loc = data_loc; } public String getOsg_grid() { return osg_grid; } public void setOsg_grid(String osg_grid) { this.osg_grid = osg_grid; } public String getWntmp_loc() { return wntmp_loc; } public void setWntmp_loc(String wntmp_loc) { this.wntmp_loc = wntmp_loc; } public String getTmp_loc() { return tmp_loc; } public void setTmp_loc(String tmp_loc) { this.tmp_loc = tmp_loc; } public String getGatekeeper() { return gatekeeper; } public void setGatekeeper(String gatekeeper) { this.gatekeeper = gatekeeper; } public String getGk_port() { return gk_port; } public void setGk_port(String gk_port) { this.gk_port = gk_port; } public String getGsiftp_port() { return gsiftp_port; } public void setGsiftp_port(String gsiftp_port) { this.gsiftp_port = gsiftp_port; } public String getSponsor_vo() { return sponsor_vo; } public void setSponsor_vo(String sponsor_vo) { this.sponsor_vo = sponsor_vo; } public String getVdt_version() { return vdt_version; } public void setVdt_version(String vdt_version) { this.vdt_version = vdt_version; } public String getGlobus_loc() { return globus_loc; } public void setGlobus_loc(String globus_loc) { this.globus_loc = globus_loc; } public String getExec_jm() { return exec_jm; } public void setExec_jm(String exec_jm) { this.exec_jm = exec_jm; } public String getUtil_jm() { return util_jm; } public void setUtil_jm(String util_jm) { this.util_jm = util_jm; } public String getGrid_services() { return grid_services; } public void setGrid_services(String grid_services) { this.grid_services = grid_services; } public String getApp_space() { return app_space; } public void setApp_space(String app_space) { this.app_space = app_space; } public String getData_space() { return data_space; } public void setData_space(String data_space) { this.data_space = data_space; } public String getTmp_space() { return tmp_space; } public void setTmp_space(String tmp_space) { this.tmp_space = tmp_space; } public String getGsiftp_server() { return gsiftp_server; } public void setGsiftp_server(String gsiftp_server) { this.gsiftp_server = gsiftp_server; } } ././@LongLink0000000000000000000000000000016100000000000011563 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteVOOwnershipInfo.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/MYOSGSiteVOOwners0000644000175000017500000000425211757531137032520 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.classes; import edu.isi.pegasus.planner.catalog.site.impl.myosg.util.MYOSGSiteConstants; /** * This class extends AbstractSiteCatalogResource and stores MYOSG Site service Information * @author prasanth * */ public class MYOSGSiteVOOwnershipInfo extends AbstractSiteCatalogResource{ private static final String VO_OWN_PERCENT_TAG ="Percent"; private static final String VO_OWN_VO_TAG ="VO"; private String percent =""; private String vo=""; public MYOSGSiteVOOwnershipInfo(int depth){ setDepth(depth); } /** * Returns the property value * @param ID * @return property value */ public Object getProperty(int ID) { switch(ID){ case MYOSGSiteConstants.VO_OWN_PERCENT_ID: return percent ; case MYOSGSiteConstants.VO_OWN_VO_ID: return vo; } return super.getProperty(ID); } /** * Sets the property of Site Catalog resource * @param ID property ID * @param value property value */ public void setProperty(String ID, Object value) { if(ID.equals(VO_OWN_PERCENT_TAG)){ percent = (String)value; }else if(ID.equals(VO_OWN_VO_TAG)){ vo = (String)value; } } /** * Add child resources to a site catalog resource * @param childResource child resource */ public void addChildResource(AbstractSiteCatalogResource childResource) { } public String toString(){ String info ="VO OWNERSHIP :- "+ percent +" , "+ vo ; return info; } } ././@LongLink0000000000000000000000000000016400000000000011566 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/AbstractSiteCatalogResource.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/classes/AbstractSiteCatal0000644000175000017500000000330211757531137032662 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.classes; /** * This class defines a AbstractSiteCatalogResoure type. * @author prasanth * */ public abstract class AbstractSiteCatalogResource { /** * */ protected int mDepth; /** * Add child resources to a site catalog resource * @param childResource child resource */ abstract public void addChildResource(AbstractSiteCatalogResource childResource); /** * Set the depth * * @param depth */ public void setDepth(int depth){ mDepth = depth; } /** * * @return the depth */ public int getDepth(){ return mDepth; } /** * Sets the property of Site Catalog resource * @param ID property ID * @param value property value */ public void setProperty(String ID , Object value){ } /** * Returns the property value * @param ID * @return object. */ public Object getProperty(int ID){ return null; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/0000755000175000017500000000000011757531667026734 5ustar ryngerynge././@LongLink0000000000000000000000000000015000000000000011561 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/MYOSGSiteConstants.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/MYOSGSiteConstants.j0000644000175000017500000000303011757531137032513 0ustar ryngeryngepackage edu.isi.pegasus.planner.catalog.site.impl.myosg.util; public interface MYOSGSiteConstants { public static final int SITE_NAME_ID = 100; public static final int RESOURCE_LIST_ID = 101; public static final int RESOURCE_ID_ID =201; public static final int RESOURCE_NAME_ID =202; public static final int RESOURCE_DESCRIPTION_ID =203; public static final int SERVICE_LIST_ID =204; public static final int ENVIRONMENT_INFO_ID =205; public static final int VO_OWNERSHIP_INFO_ID =206; public static final int SERVICE_URI_ID =301; public static final int SERVICE_ID_ID = 302; public static final int SERVICE_NAME_ID =303; public static final int SERVICE_DESCRIPTION_ID =304; public static final int ENV_GLOBUS_LOCATION_ID = 401; public static final int ENV_APP_ID =402; public static final int ENV_DATA_ID =403; public static final int ENV_DEFAULT_SE_ID =404; public static final int ENV_GLEXEC_LOCATION_ID =405; public static final int ENV_GRID_ID =406; public static final int ENV_HOSTNAME_ID =407; public static final int ENV_JOB_CONTACT_ID = 408; public static final int ENV_LOCATION_ID =409; public static final int ENV_SITE_NAME_ID = 410; public static final int ENV_SITE_READ_ID =411; public static final int ENV_SITE_WRITE_ID =412; public static final int ENV_SQUID_LOCATION_ID =413; public static final int ENV_STORAGE_ELEMENT_ID =414; public static final int ENV_WN_TMP_ID =415; public static final int VO_OWN_PERCENT_ID =501; public static final int VO_OWN_VO_ID =502; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/SiteScrapper.java0000644000175000017500000000254111757531137032175 0ustar ryngeryngepackage edu.isi.pegasus.planner.catalog.site.impl.myosg.util; import java.io.BufferedReader; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.PrintWriter; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; /** * Utility class that scrapes content from a website * @author prasanth * */ public class SiteScrapper { /** * Scarps a web site and stores it in a file * @param urlString URL * @param outputFilePath file name to store the content. */ public static void scrapeSite(String urlString, String outputFilePath) { URL url = null; URLConnection urlConnection = null; BufferedReader in = null; PrintWriter out = null; String line = null; try { url = new URL(urlString); urlConnection = url.openConnection(); in = new BufferedReader(new InputStreamReader(urlConnection .getInputStream())); out = new PrintWriter(new FileOutputStream(outputFilePath)); while ((line = in.readLine()) != null) { out.print(line); } } catch (MalformedURLException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } finally { try { in.close(); out.close(); } catch (IOException e) { e.printStackTrace(); } } return; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/DateUtils.java0000644000175000017500000000246711757531137031476 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.util; import java.text.SimpleDateFormat; import java.util.Calendar; public class DateUtils { private static final int ONE_DAY_IN_MILLS = 86400;//24 � 60 � 60 public static String now(String dateFormat) { Calendar cal = Calendar.getInstance(); SimpleDateFormat sdf = new SimpleDateFormat(dateFormat); return sdf.format(cal.getTime()); } public static String after(int days,String dateFormat) { Calendar cal = Calendar.getInstance(); cal.add(Calendar.DATE, days); SimpleDateFormat sdf = new SimpleDateFormat(dateFormat); return sdf.format(cal.getTime()); } } ././@LongLink0000000000000000000000000000014700000000000011567 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/URLParamConstants.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/URLParamConstants.ja0000644000175000017500000001035111757531137032560 0ustar ryngeryngepackage edu.isi.pegasus.planner.catalog.site.impl.myosg.util; /** * This class contains the URL parameter ID's and utility methods for converting user input to param values. * @author prasanth * */ public abstract class URLParamConstants { public static final int PARAM_SUMMARY_ATTRS_SHOWSERVICE=101; // &summary_attrs_showservice=on public static final int PARAM_SUMMARY_ATTRS_SHOWRSVSTATUS=102; // &summary_attrs_showrsvstatus=on public static final int PARAM_SUMMARY_ATTRS_SHOWFQDN=103; // &summary_attrs_showfqdn=on public static final int PARAM_SUMMARY_ATTRS_SHOWVOMEMBERSHIP=104; // &summary_attrs_showvomembership=on public static final int PARAM_SUMMARY_ATTRS_SHOWVOOWNERSHIP=105; // &summary_attrs_showvoownership=on public static final int PARAM_SUMMARY_ATTRS_SHOWENVIRONMNENT=106; // summary_attrs_showenv=on& public static final int PARAM_GIP_STATUS_ATTRS_SHOWTESTRESULTS=201; // &gip_status_attrs_showtestresults=on public static final int PARAM_GIP_STATUS_ATTRS_SHOWFQDN=202; // &gip_status_attrs_showfqdn=on public static final int PARAM_ACCOUNT_TYPE=203; // &account_type=cumulative_hours public static final int PARAM_CE_ACCOUNT_TYPE=204; // &ce_account_type=gip_vo public static final int PARAM_SE_ACCOUNT_TYPE=205; // &se_account_type=vo_transfer_volume public static final int PARAM_START_TYPE=206; // &start_type=7daysago public static final int PARAM_START_DATE=207; // &start_date=05%2F12%2F2009 public static final int PARAM_END_TYPE=208; // &end_type=now public static final int PARAM_END_DATE=209; // &end_date=05%2F19%2F2009 public static final int PARAM_RESOURCE_TO_DISPLAY_ALL_RESOURCES = 301; // &all_resources=on public static final int PARAM_FILTER_GRID_TYPE = 401; // &gridtype=on public static final int PARAM_FILTER_GRID_TYPE_OPTION = 402; // &gridtype_1=on public static final int PARAM_FILTER_CURRENT_RSV_STATUS = 403; // &status=on public static final int PARAM_FILTER_CURRENT_RSV_STATUS_OPTION = 404; // &status_1=on public static final int PARAM_FILTER_VO_SUPPORT = 405; // &vosup=on public static final int PARAM_FILTER_VO_SUPPORT_OPTION = 406; // &vosup_23=on public static final int PARAM_FILTER_ACTIVE_STATUS = 407; // &active=on public static final int PARAM_FILTER_ACTIVE_STATUS_OPTION = 408; // &active_value=1 public static final int PARAM_FILTER_DISABLE_STATUS = 409; // &disable=on public static final int PARAM_FILTER_DISABLE_STATUS_OPTION = 410; // &disable_value=1 public static final String[][] voNameID = { { "ALICE", "58" }, { "ATLAS", "35" }, { "CDF", "1" }, { "CIGI", "2" }, { "CMS", "3" }, { "COMPBIOGRID", "4" }, { "DES", "5" }, { "DOSAR", "6" }, { "DZERO", "7" }, { "ENGAGE", "8" }, { "FERMILAB", "9" }, { "FERMILABACCELERATOR", "51" }, { "FERMILABASTRO", "52" }, { "FERMILABCDMS", "49" }, { "FERMILABGRID", "37" }, { "FERMILABHYPERCP", "47" }, { "FERMILABKTEV", "50" }, { "FERMILABMINERVA", "48" }, { "FERMILABMINIBOONE", "45" }, { "FERMILABMINOS", "44" }, { "FERMILABMIPP", "46" }, { "FERMILABMU2E", "59" }, { "FERMILABNOVA", "53" }, { "FERMILABNUMI", "54" }, { "FERMILABPATRIOT", "55" }, { "FERMILABTEST", "43" }, { "FERMILABTHEORY", "56" }, { "GEANT4", "12" }, { "GLOW", "13" }, { "GPN", "14" }, { "GRASE", "15" }, { "GROW", "17" }, { "I2U2", "19" }, { "ICECUBE", "38" }, { "ILC", "20" }, { "JDEM", "57" }, { "LIGO", "23" }, { "MARIACHI", "24" }, { "MIS", "25" }, { "NANOHUB", "26" }, { "NEBIOGRID", "60" }, { "NWICG", "27" }, { "NYSGRID", "28" }, { "OPS", "29" }, { "OSG", "30" }, { "OSGEDU", "31" }, { "SBGRID", "32" }, { "SDSS", "33" }, { "STAR", "34" } }; // Store as all upper case public static int getVOID(String name){ String voCAPS = name.toUpperCase(); for(int i =0 ;i < voNameID.length;i++){ if(voNameID[i][0].equals(voCAPS)) { return Integer.parseInt(voNameID[i][1]); } } return -1; } public static final String[][] gridTypeID = { { "OSG", "1" }, {"OSG-ITB","2"}}; public static int getGridTypeID(String type){ String gridType = type.toUpperCase(); for(int i =0 ;i < gridTypeID.length;i++){ if(gridTypeID[i][0].equals(gridType)) { return Integer.parseInt(gridTypeID[i][1]); } } return -1; } } ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/MYOSGSiteCatalogUtil.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/MYOSGSiteCatalogUtil0000644000175000017500000003633011757531137032530 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.util; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.catalog.site.classes.FileServer; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.HeadNodeFS; import edu.isi.pegasus.planner.catalog.site.classes.HeadNodeScratch; import edu.isi.pegasus.planner.catalog.site.classes.HeadNodeStorage; import edu.isi.pegasus.planner.catalog.site.classes.InternalMountPoint; import edu.isi.pegasus.planner.catalog.site.classes.LocalDirectory; import edu.isi.pegasus.planner.catalog.site.classes.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.site.classes.SharedDirectory; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.WorkerNodeFS; import edu.isi.pegasus.planner.catalog.site.classes.WorkerNodeScratch; import edu.isi.pegasus.planner.catalog.site.classes.WorkerNodeStorage; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway.SCHEDULER_TYPE; import edu.isi.pegasus.planner.catalog.site.impl.myosg.classes.MYOSGSiteInfoFacade; /** * Utility class for converting the Facade object to SiteCatalogEntry object * @author Pt * */ public class MYOSGSiteCatalogUtil { private static LogManager mLogger = LogManagerFactory.loadSingletonInstance(); public static String getGsiftp(MYOSGSiteInfoFacade sitInfo) { String host = sitInfo.getGsiftp_server(); String port = sitInfo.getGsiftp_port(); if(host == null || host.equals("")){ mLogger.log( "Gridftp hostname missing in site "+ sitInfo.getShortname() +" .Using gatekeeper entry.", LogManager.CONFIG_MESSAGE_LEVEL); host = (sitInfo.getGatekeeper().split("/"))[0]; } if(port == null || port.equals("")){ mLogger.log( "Gridftp hostname missing in site " + sitInfo.getShortname() + " .Using default 2811.", LogManager.CONFIG_MESSAGE_LEVEL); port = "2811"; } if(port.equals("2811")){ return "gsiftp://" + host; } else{ return "gsiftp://" + host + ":" + port; } } /** * Creates a Pegasus SiteCatalogEntry object from the information * in VORS. * * The following coventions are followed for determining the worker node * and storage node directories. * *
             * head node shared -> data_loc
             * head node local -> tmp_loc
             * worker node shared ->data_loc
             * worker node local -> wntmp_loc
             * 
* * @param sitInfo * @return SiteCatalogEntry object. */ public static SiteCatalogEntry createSiteCatalogEntry(MYOSGSiteInfoFacade sitInfo){ mLogger.logEventStart(LoggingKeys.EVENT_PEGASUS_PARSE_SITE_CATALOG, "site-catalog.id", sitInfo.getShortname(), LogManager.DEBUG_MESSAGE_LEVEL); SiteCatalogEntry entry = new SiteCatalogEntry( sitInfo.getShortname()); entry.setHeadNodeFS( createHeadNodeFS(sitInfo) ); entry.setWorkerNodeFS( createWorkerNodeFS(sitInfo) ); //associate a replica catalog with the site. ReplicaCatalog rc = new ReplicaCatalog( "rls://replica.isi.edu", "RLS" ); rc.addAlias( sitInfo.getShortname()); //rc.addConnection( new Connection("ignore.lrc", "rls://replica.caltech.edu" )); entry.addReplicaCatalog( rc ); //associate some profiles entry.addProfile( new Profile( Profile.ENV, "PEGASUS_HOME", ((sitInfo.getOsg_grid() != null)?sitInfo.getOsg_grid():"") +"/pegasus")) ; entry.addProfile( new Profile( Profile.ENV, "app_loc",((sitInfo.getApp_loc() != null)?sitInfo.getApp_loc():"/")) ); entry.addProfile( new Profile( Profile.ENV, "data_loc", ((sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/")) ); entry.addProfile( new Profile( Profile.ENV, "osg_grid", ((sitInfo.getOsg_grid() != null)?sitInfo.getOsg_grid():"/")) ); entry.addProfile( new Profile( Profile.ENV, "tmp_loc", ((sitInfo.getTmp_loc() != null)?sitInfo.getTmp_loc():"/")) ); entry.addProfile( new Profile( Profile.ENV, "wntmp_loc", ((sitInfo.getWntmp_loc() != null)?sitInfo.getWntmp_loc():"/")) ); //entry.addProfile( new Profile( Profile.VDS, "grid", ((sitInfo.getVoInfo().getGrid() != null)?sitInfo.getVoInfo().getGrid():"")) ); // TODO Check Pt // entry.addProfile( new Profile( Profile.VDS, "grid", ((sitInfo.getGrid() != null)?sitInfo.getGrid():"")) ); //associate grid gateway for auxillary and compute jobs /* GridGateway gw = new GridGateway( GridGateway.TYPE.gt2, ((sitInfo.getUtil_jm() != null)? sitInfo.getUtil_jm(): (sitInfo.getVoInfo().getGatekeeper().split(":"))[0] + "/jobmanager-fork"), getSchedulerType(sitInfo.getUtil_jm()) );*/ GridGateway gw = new GridGateway( GridGateway.TYPE.gt2, sitInfo.getUtil_jm(), getSchedulerType(sitInfo.getUtil_jm()) ); gw.setJobType( GridGateway.JOB_TYPE.auxillary ); entry.addGridGateway( gw ); if( gw.getScheduler() == GridGateway.SCHEDULER_TYPE.Fork ){ //add the headnode globus location entry.addProfile( new Profile( Profile.ENV, "GLOBUS_LOCATION", ((sitInfo.getGlobus_loc() != null)?sitInfo.getGlobus_loc():"/") ) ); entry.addProfile( new Profile( Profile.ENV, "LD_LIBRARY_PATH", ((sitInfo.getGlobus_loc() != null)?sitInfo.getGlobus_loc():"") + "/lib") ); } else{ mLogger.log( "Creating globus location on basis of OSG_GRID for site " + entry.getSiteHandle() , LogManager.DEBUG_MESSAGE_LEVEL ); String wn = sitInfo.getOsg_grid(); String globus = ( wn == null )? "/globus" : wn + "/globus"; entry.addProfile( new Profile( Profile.ENV, "GLOBUS_LOCATION", globus ) ); entry.addProfile( new Profile( Profile.ENV, "LD_LIBRARY_PATH", globus + "/lib" )); } /* gw = new GridGateway( GridGateway.TYPE.gt2, ((sitInfo.getExec_jm() != null)? sitInfo.getExec_jm(): (sitInfo.getVoInfo().getGatekeeper().split(":"))[0] + "/jobmanager-fork"), getSchedulerType(sitInfo.getExec_jm()) );*/ gw = new GridGateway( GridGateway.TYPE.gt2, sitInfo.getExec_jm(), getSchedulerType(sitInfo.getExec_jm()) ); gw.setJobType( GridGateway.JOB_TYPE.compute ); entry.addGridGateway( gw ); mLogger.logEventCompletion(LogManager.DEBUG_MESSAGE_LEVEL); return entry; } private static SCHEDULER_TYPE getSchedulerType(String url) { if(url == null){ return GridGateway.SCHEDULER_TYPE.Fork ; } if( url.endsWith( "condor" ) ){ return GridGateway.SCHEDULER_TYPE.Condor ; } else if( url.endsWith( "fork" ) ){ return GridGateway.SCHEDULER_TYPE.Fork ; } else if( url.endsWith( "pbs" ) ){ return GridGateway.SCHEDULER_TYPE.PBS ; } else if( url.endsWith( "lsf" ) ){ return GridGateway.SCHEDULER_TYPE.LSF ; } else if( url.endsWith( "sge" ) ){ return GridGateway.SCHEDULER_TYPE.SGE; } //if nothing is there than return fork return GridGateway.SCHEDULER_TYPE.Fork ; } /** * Creates an object describing the head node filesystem. * * The following conventions are followed. *
	     *	shared:
	     *	    scratch data_loc
	     *	    storage data_loc
	     *	local:   
	     *	    scratch tmp_loc
	     *	    storage tmp_loc
             * 
             * 
* * @return the HeadNodeFS */ public static HeadNodeFS createHeadNodeFS(MYOSGSiteInfoFacade sitInfo){ // describe the head node filesystem HeadNodeFS hfs = new HeadNodeFS(); //head node scratch description start HeadNodeScratch hscratch = new HeadNodeScratch(); //head node local scratch description LocalDirectory hscratchLocal = new LocalDirectory(); String directory = (sitInfo.getTmp_loc() != null)?sitInfo.getTmp_loc():"/"; FileServer f = new FileServer( "gsiftp", getGsiftp(sitInfo), directory ); hscratchLocal.addFileServer( f ); //no distinction between internal and external view hscratchLocal.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); //head node shared scratch description SharedDirectory hscratchShared = new SharedDirectory(); directory = (sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/"; f = new FileServer( "gsiftp", getGsiftp(sitInfo), directory ); hscratchShared.addFileServer( f ); //no distinction between internal and external view hscratchShared.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); hscratch.setLocalDirectory( hscratchLocal ); hscratch.setSharedDirectory( hscratchShared ); //head node scratch description ends //head node storage description start HeadNodeStorage hstorage = new HeadNodeStorage(); //head node local storage description LocalDirectory hstorageLocal = new LocalDirectory(); directory = (sitInfo.getTmp_loc() != null)?sitInfo.getTmp_loc():"/" ; f = new FileServer( "gsiftp", getGsiftp(sitInfo), directory ); hstorageLocal.addFileServer( f ); //internal and external view is same hstorageLocal.setInternalMountPoint( new InternalMountPoint( directory, "30G", "100G") ); //head node shared storage description SharedDirectory hstorageShared = new SharedDirectory(); directory = (sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/"; f = new FileServer( "gsiftp", getGsiftp(sitInfo), directory ); hstorageShared.addFileServer( f ); //no distinction between internal and external view hstorageShared.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); hstorage.setLocalDirectory( hstorageLocal ); hstorage.setSharedDirectory( hstorageShared ); //head node storage description ends hfs.setScratch( hscratch ); hfs.setStorage( hstorage ); return hfs; } /** * Creates an object describing the worker node filesystem. * * The following conventions are followed. *
             *  shared:
	     *	    scratch data
	     *	    storage data
	     *	local:   
	     *	    scratch wntmp
	     *	    storage wntmp
             * 
* * @return the WorkerNodeFS * */ public static WorkerNodeFS createWorkerNodeFS(MYOSGSiteInfoFacade sitInfo){ // describe the head node filesystem WorkerNodeFS wfs = new WorkerNodeFS(); //worker node scratch description start WorkerNodeScratch wscratch = new WorkerNodeScratch(); //worker node local scratch description LocalDirectory wscratchLocal = new LocalDirectory(); String directory = (sitInfo.getWntmp_loc() != null)?sitInfo.getWntmp_loc():"/"; FileServer f = new FileServer( "file", "file:///", directory ); wscratchLocal.addFileServer( f ); //no distinction between internal and external view wscratchLocal.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); //worker node shared scratch description SharedDirectory wscratchShared = new SharedDirectory(); directory = (sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/"; f = new FileServer( "file", "file:///", directory ); //no distinction between internal and external view wscratchShared.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); wscratch.setLocalDirectory( wscratchLocal ); wscratch.setSharedDirectory( wscratchShared ); //head node scratch description ends //worker node storage description start WorkerNodeStorage wstorage = new WorkerNodeStorage(); //worker node local scratch description LocalDirectory wstorageLocal = new LocalDirectory(); directory = (sitInfo.getWntmp_loc() != null)?sitInfo.getWntmp_loc():"/"; f = new FileServer( "file", "file:///", directory ); wstorageLocal.addFileServer( f ); //no distinction between internal and external view wstorageLocal.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); //worker node shared scratch description SharedDirectory wstorageShared = new SharedDirectory(); directory = (sitInfo.getData_loc() != null)?sitInfo.getData_loc():"/" ; f = new FileServer( "file", "file:///", directory ); //no distinction between internal and external view wstorageShared.setInternalMountPoint( new InternalMountPoint( directory, "50G", "100G") ); wstorage.setLocalDirectory( wstorageLocal ); wstorage.setSharedDirectory( wstorageShared ); //worker node scratch description ends //worker node storage description ends wfs.setScratch( wscratch ); wfs.setStorage( wstorage ); return wfs; } } ././@LongLink0000000000000000000000000000015400000000000011565 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/MYOSGSiteCatalogParser.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/MYOSGSiteCatalogPars0000644000175000017500000001556511757531137032527 0ustar ryngeryngepackage edu.isi.pegasus.planner.catalog.site.impl.myosg.util; import java.io.IOException; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Stack; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.parser.Parser; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.site.impl.myosg.classes.AbstractSiteCatalogResource; import edu.isi.pegasus.planner.catalog.site.impl.myosg.classes.MYOSGSiteEnvironmentInfo; import edu.isi.pegasus.planner.catalog.site.impl.myosg.classes.MYOSGSiteInfo; import edu.isi.pegasus.planner.catalog.site.impl.myosg.classes.MYOSGSiteResourceInfo; import edu.isi.pegasus.planner.catalog.site.impl.myosg.classes.MYOSGSiteServiceInfo; import edu.isi.pegasus.planner.catalog.site.impl.myosg.classes.MYOSGSiteVOOwnershipInfo; /** * This class uses the Xerces SAX2 parser to validate and parse an XML. * * @author prasanth * */ public class MYOSGSiteCatalogParser extends Parser { public static final String RESOURCE_GROUP_TAG = "ResourceGroup"; public static final String SERVICE_TAG = "Service"; public static final String RESOURCE_TAG = "Resource"; public static final String ENV_TAG = "ENV"; public static final String VO_OWNERSHIP_TAG = "Ownership"; private int mDepth = 0; Stack elementStack = new Stack(); List siteList = new ArrayList(); /** * The handle to the log manager. */ private LogManager mLogger; /** * The set of sites that need to be parsed. */ private Set mSites; /** * A boolean indicating whether to load all sites. */ private boolean mLoadAll; /** * The default Constructor. * * * */ public MYOSGSiteCatalogParser() { this(PegasusProperties.nonSingletonInstance(),null); } public MYOSGSiteCatalogParser(List sites) { this(PegasusProperties.nonSingletonInstance(), sites); } public MYOSGSiteCatalogParser(PegasusProperties properties, List sites) { super(properties); mLogger = LogManagerFactory.loadSingletonInstance(); if (sites != null) { mSites = new HashSet(); for (Iterator it = sites.iterator(); it.hasNext();) { mSites.add(it.next()); } mLoadAll = mSites.contains("*"); } else { mLoadAll = true; } } public void endDocument() { } /** * The parser is at the end of an element. Triggers the association of * the child elements with the appropriate parent elements. * * @param uri is the URI of the namespace for the element * @param localName is the element name without namespace * @param name is the element name as it appears in the docment */ public void endElement(String uri, String localName, String name) throws SAXException { AbstractSiteCatalogResource resource = null; mDepth--; if (isStackedElement(name)) { resource = (AbstractSiteCatalogResource) elementStack.pop(); if (elementStack.isEmpty()) { if (loadSite(resource)) siteList.add(resource); } else { AbstractSiteCatalogResource parentResource = (AbstractSiteCatalogResource) elementStack .peek(); parentResource.addChildResource(resource); } } else { if (!elementStack.isEmpty()) { resource = (AbstractSiteCatalogResource) elementStack.peek(); if (resource.getDepth() == mDepth) resource.setProperty(name, mTextContent.toString().trim()); } } // reinitialize our cdata handler at end of each element mTextContent.setLength(0); } private boolean isStackedElement(String name) { if (name.equals(RESOURCE_GROUP_TAG) || name.equals(SERVICE_TAG) || name.equals(RESOURCE_TAG) || name.equals(ENV_TAG) || name.equals(VO_OWNERSHIP_TAG)) { return true; } return false; } /** * Whether to laod a site or not in the SiteStore * * @param site * the SiteCatalogEntry object. * * @return boolean */ private boolean loadSite(AbstractSiteCatalogResource site) { return (mLoadAll || mSites.contains(site .getProperty(MYOSGSiteConstants.SITE_NAME_ID))); } /** * Returns the XML schema namespace that a document being parsed conforms * to. * * @return the schema namespace */ public String getSchemaNamespace( ){ return null; } public String getSchemaLocation() { // No Schema supported return null; } /** * This method defines the action to take when the parser begins to parse * an element. * * @param uri is the URI of the namespace for the element * @param local is the element name without namespace * @param name is the element name as it appears in the docment * @param attrs has the names and values of all the attributes */ public void startElement(String uri, String local, String name, Attributes attrs) throws SAXException { mDepth++; AbstractSiteCatalogResource resource = null; if (name.equals(RESOURCE_GROUP_TAG)) { elementStack.push(new MYOSGSiteInfo(mDepth)); } else if (name.equals(SERVICE_TAG)) { elementStack.push(new MYOSGSiteServiceInfo(mDepth)); } else if (name.equals(RESOURCE_TAG)) { elementStack.push(new MYOSGSiteResourceInfo(mDepth)); } else if (name.equals(VO_OWNERSHIP_TAG)) { elementStack.push(new MYOSGSiteVOOwnershipInfo(mDepth)); } else if (name.equals(ENV_TAG)) { elementStack.push(new MYOSGSiteEnvironmentInfo(mDepth)); } } /** * The main method that starts the parsing. * * @param file the XML file to be parsed. */ public void startParser(String file) { try { mParser.parse(file); // sanity check if (mDepth != 0) { throw new RuntimeException( "Invalid stack depth at end of parsing " + mDepth); } } catch (IOException ioe) { mLogger.log("IO Error :" + ioe.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } catch (SAXException se) { if (mLocator != null) { mLogger.log("Error in " + mLocator.getSystemId() + " at line " + mLocator.getLineNumber() + "at column " + mLocator.getColumnNumber() + " :" + se.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } } } /** * Returns the site's list * * @return site's list */ public List getSites() { return this.siteList; } /** * Returns the number of sites parsed * * @return number of sites */ public int getNumberOfSites() { return this.siteList.size(); } } ././@LongLink0000000000000000000000000000014700000000000011567 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/MYOSGURLGenerator.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/util/MYOSGURLGenerator.ja0000644000175000017500000001114411757531137032371 0ustar ryngeryngepackage edu.isi.pegasus.planner.catalog.site.impl.myosg.util; import java.util.Iterator; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.Map.Entry; /** * This is an utility class which generates the MYOSG url. * * @author prasanth * */ public class MYOSGURLGenerator { private static String BASE_URL = "http://myosg.grid.iu.edu/wizardsummary/xml?"; /** * Returns the url * * @param paramProperties * list of parameterID-value pair * @return URL */ public String getURL(Properties paramProperties) { return BASE_URL + defaultParameters() + getConfigurableParameters(paramProperties); } /** * Returns the default parameters * * @return default parameters. */ private String defaultParameters() { return "datasource=summary"; } /** * Parsers through the list of parameterID-value pair and returns the * parameters name value pair * * @param paramProperties * URL's paramID-value parameter pair * @return parameters name value pair */ private String getConfigurableParameters(Properties paramProperties) { Set entrySet = paramProperties.entrySet(); int paramID = 0; String configParam = ""; String paramValue = ""; for (Iterator it = entrySet.iterator(); it.hasNext();) { Map.Entry entry = (Entry) it.next(); paramID = Integer.parseInt(((String) entry.getKey())); paramValue = (String) entry.getValue(); configParam += "&" + getParam(paramID, paramValue); } return configParam; } /** * Returns the parameter name-value pair * * @param ID * parameter ID * @param value * parameter value * @return parameter name-value pair */ String getParam(int ID, String value) { String param = null; switch (ID) { case URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWSERVICE: param = "summary_attrs_showservice=" + value; break; case URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWRSVSTATUS: param = "summary_attrs_showrsvstatus=" + value; break; case URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWFQDN: param = "summary_attrs_showfqdn=" + value; break; case URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWVOMEMBERSHIP: param = "summary_attrs_showvomembership=" + value; break; case URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWVOOWNERSHIP: param = "summary_attrs_showvoownership=" + value; break; case URLParamConstants.PARAM_SUMMARY_ATTRS_SHOWENVIRONMNENT: param = "summary_attrs_showenv=" + value; break; case URLParamConstants.PARAM_GIP_STATUS_ATTRS_SHOWTESTRESULTS: param = "gip_status_attrs_showtestresults=" + value; break; case URLParamConstants.PARAM_GIP_STATUS_ATTRS_SHOWFQDN: param = "gip_status_attrs_showfqdn=" + value; break; case URLParamConstants.PARAM_ACCOUNT_TYPE: param = "account_type=" + value; break; case URLParamConstants.PARAM_CE_ACCOUNT_TYPE: param = "ce_account_type=" + value; break; case URLParamConstants.PARAM_SE_ACCOUNT_TYPE: param = "se_account_type=" + value; break; case URLParamConstants.PARAM_START_TYPE: param = "start_type=" + value; break; case URLParamConstants.PARAM_START_DATE: param = "start_date=" + value; break; case URLParamConstants.PARAM_END_TYPE: param = "end_type=" + value; break; case URLParamConstants.PARAM_END_DATE: param = "end_date=" + value; break; case URLParamConstants.PARAM_RESOURCE_TO_DISPLAY_ALL_RESOURCES: param = "all_resources=" + value; break; case URLParamConstants.PARAM_FILTER_GRID_TYPE: param = "gridtype=" + value; break; case URLParamConstants.PARAM_FILTER_GRID_TYPE_OPTION: param = "gridtype_" + value + "=" + "on"; break; case URLParamConstants.PARAM_FILTER_CURRENT_RSV_STATUS: param = "status=" + value; break; case URLParamConstants.PARAM_FILTER_CURRENT_RSV_STATUS_OPTION: param = "status_" + value + "=" + "on"; break; case URLParamConstants.PARAM_FILTER_VO_SUPPORT: param = "vosup=" + value; break; case URLParamConstants.PARAM_FILTER_VO_SUPPORT_OPTION: param = "vosup_" + value + "=" + "on"; break; case URLParamConstants.PARAM_FILTER_ACTIVE_STATUS: param = "active=" + value; break; case URLParamConstants.PARAM_FILTER_ACTIVE_STATUS_OPTION: param = "active_value=" + value; break; case URLParamConstants.PARAM_FILTER_DISABLE_STATUS: param = "disable=" + value; break; case URLParamConstants.PARAM_FILTER_DISABLE_STATUS_OPTION: param = "disable_value=" + value; break; } return param; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/test/0000755000175000017500000000000011757531667026736 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/impl/myosg/test/TestMyOSG.java0000644000175000017500000000423311757531137031371 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site.impl.myosg.test; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.site.impl.MYOSG; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.ArrayList; import java.util.List; import java.util.Properties; /** * Testing class to test the MyOSG implementation. * * @author vahi */ public class TestMyOSG { private static final String TMP_FILE_NAME ="MYOSG_SC.xml"; private static final String DATE_FORMAT ="MM/dd/yyyy"; /** * @param args */ public static void main(String[] args) { LogManager logger = LogManagerFactory.loadSingletonInstance( PegasusProperties.nonSingletonInstance() ); logger.logEventStart( "event.pegasus.test.myosg", "planner.version", "test" ); MYOSG myOSG = new MYOSG(); Properties properties = new Properties(); /* dont delete the tmp file created to store the xml * contents of the MyOSG website */ properties.setProperty( "myosg.keep.tmp.file", "true" ); myOSG.connect(properties); List sitesList = new ArrayList(); sitesList.add("*"); myOSG.load(sitesList); System.out.println(myOSG.list().size() +" " + myOSG.list()); logger.logEventCompletion(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/site/TestSiteCatalog.java0000644000175000017500000001004511757531137027552 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.site; import edu.isi.pegasus.planner.catalog.SiteCatalog; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.Set; import java.util.ArrayList; import java.util.List; import java.util.Properties; /** * A Test program that shows how to load a Site Catalog, and query for all sites. * The configuration is picked from the Properties. The following properties * need to be set *
 *      pegasus.catalog.site       Text|XML|XML3
 *      pegasus.catalog.site.file  path to the site catalog.
 *  
* * The Pegasus Properties can be picked from property files at various locations. * The priorities are explained below. *
 *   - The default path for the properties file is $PEGASUS_HOME/etc/properties.
 *   - A properties file if found at ${user.home}/.pegasusrc has higher property.
 *   - Finally a user can specify the path to the properties file by specifying 
 *     the JVM  property pegasus.user.properties . This has the higher priority.
 * 
* * @author Karan Vahi * @version $Revision: 2567 $ */ public class TestSiteCatalog { /** * The main program. * * @param args arguments passed at runtime */ public static void main( String[] args ) { SiteCatalog catalog = null; PegasusProperties properties = PegasusProperties.nonSingletonInstance(); //setup the logger for the default streams. LogManager logger = LogManagerFactory.loadSingletonInstance( properties ); logger.logEventStart( "event.pegasus.catalog.site.test", "planner.version", Version.instance().toString() ); //set debug level to maximum //set if something is going wrong //logger.setLevel( LogManager.DEBUG_MESSAGE_LEVEL ); /* print out all the relevant site catalog properties that were specified*/ Properties siteProperties = properties.matchingSubset( "pegasus.catalog.site", true ); System.out.println( "Site Catalog Properties specified are " + siteProperties ); /* load the catalog using the factory */ try{ catalog = SiteFactory.loadInstance( PegasusProperties.nonSingletonInstance()); } catch ( SiteFactoryException e ){ System.out.println( e.convertException() ); System.exit( 2 ); } /* load all sites in site catalog */ try{ List s = new ArrayList(1); s.add( "*" ); System.out.println( "Loaded " + catalog.load( s ) + " number of sites " ); /* query for the sites, and print them out */ Set sites = catalog.list(); System.out.println( "Sites loaded are " + sites ); /* get detailed information about all the sites */ for( String site: sites ){ SiteCatalogEntry entry = catalog.lookup( site ); System.out.println( entry ); } } catch ( SiteCatalogException e ){ e.printStackTrace(); } finally{ /* close the connection */ try{ catalog.close(); }catch( Exception e ){} } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/classes/0000755000175000017500000000000011757531667024351 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/classes/Profiles.java0000644000175000017500000003475411757531137027004 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.classes; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.namespace.Namespace; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.namespace.ENV; import edu.isi.pegasus.planner.namespace.Globus; import edu.isi.pegasus.planner.namespace.Hints; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.namespace.Stat; import edu.isi.pegasus.planner.namespace.Selector; import java.util.List; import java.util.EnumMap; import java.util.Iterator; import java.io.IOException; import java.io.StringWriter; import java.io.Writer; import java.util.LinkedList; import java.util.logging.Level; import java.util.logging.Logger; /** * Maintains profiles for different namespaces. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class Profiles { /** * The enumeration of valid namespaces. It should be */ public static enum NAMESPACES { env, globus, condor, dagman, pegasus, hints,selector,stat }; /** * An enum map that associates the enum keys with the corresponding * namespace objects. */ private EnumMap mProfileMap; /** * The default constructor. */ public Profiles() { mProfileMap = new EnumMap( NAMESPACES.class ); mProfileMap.put( NAMESPACES.condor, new Condor() ); mProfileMap.put( NAMESPACES.dagman, new Dagman() ); mProfileMap.put( NAMESPACES.env, new ENV() ); mProfileMap.put( NAMESPACES.globus, new Globus() ); mProfileMap.put( NAMESPACES.hints, new Hints() ); mProfileMap.put( NAMESPACES.pegasus, new Pegasus() ); mProfileMap.put( NAMESPACES.selector, new Selector() ); mProfileMap.put( NAMESPACES.stat, new Stat() ); } /** * Adds multiple profiles. * * @param profiles the profiles object */ public void addProfiles( Profiles profiles ) { //traverse through all the enum keys for ( NAMESPACES n : NAMESPACES.values() ){ Namespace nm = profiles.get( n ); for( Iterator it = nm.getProfileKeyIterator(); it.hasNext(); ){ String key = (String) it.next(); this.addProfile( new Profile( n.toString(), key, (String)nm.get( key ) )); } } } /** * Adds multiple profiles. * * @param profiles List of Profile objects. */ public void addProfiles( List profiles ) { for( Profile profile: profiles ){ this.addProfile( profile ); } } /** * Adds multiple profiles . to namespace bypassing any underlying namespace * specific checks. The catalog parsers should use this function * * @param profiles the profiles object */ public void addProfilesDirectly( Profiles profiles ) { //traverse through all the enum keys for ( NAMESPACES n : NAMESPACES.values() ){ Namespace nm = profiles.get( n ); for( Iterator it = nm.getProfileKeyIterator(); it.hasNext(); ){ String key = (String) it.next(); this.addProfileDirectly( new Profile( n.toString(), key, (String)nm.get( key ) )); } } } /** * Adds multiple profiles to namespace bypassing any underlying namespace * specific checks. The catalog parsers should use this function. * * * @param profiles List of Profile objects. */ public void addProfilesDirectly( List profiles ) { for ( Profile profile : profiles ){ this.addProfileDirectly( profile); } } /** * Adds a profile directly to namespace bypassing any underlying namespace * specific checks. The catalog parsers should use this function. * * @param p the profile to be added */ public void addProfileDirectly( Profile p ){ //retrieve the appropriate namespace and then add Namespace n = ( Namespace )mProfileMap.get( NAMESPACES.valueOf( p.getProfileNamespace() ) ); n.construct( p.getProfileKey(), p.getProfileValue() ); } /** * Adds a profile. * * @param p the profile to be added */ public void addProfile( Profile p ){ //retrieve the appropriate namespace and then add Namespace n = ( Namespace )mProfileMap.get( NAMESPACES.valueOf( p.getProfileNamespace() ) ); n.checkKeyInNS( p.getProfileKey(), p.getProfileValue() ); } /** * Add a profile. Convenience method * @param namespace * @param key * @param value */ public void addProfileDirectly( NAMESPACES namespace, String key, String value ){ //retrieve the appropriate namespace and then add Namespace n = ( Namespace )mProfileMap.get(namespace); n.construct(key,value); } /** * Add a profile. Convenience method * @param namespace * @param key * @param value */ public void addProfileDirectly( String namespace, String key, String value ){ //retrieve the appropriate namespace and then add Namespace n = ( Namespace )mProfileMap.get(namespace); n.construct(key,value); } /** * Add a profile. Convenience method * @param namespace * @param key * @param value */ public void addProfile( NAMESPACES namespace, String key, String value ){ //retrieve the appropriate namespace and then add Namespace n = ( Namespace )mProfileMap.get(namespace); n.checkKeyInNS(key,value); } /** * Add a profile. Convenience method * @param namespace * @param key * @param value */ public void addProfile( String namespace, String key, String value ){ //retrieve the appropriate namespace and then add Namespace n = ( Namespace )mProfileMap.get(namespace); n.checkKeyInNS(key,value); } /** * Returns the list of profiles for all namespaces. * * @return List of Profiles */ public List getProfiles( ){ List result = new LinkedList(); //traverse through all the enum keys for ( NAMESPACES n : NAMESPACES.values() ){ Namespace nm = this.get( n ); for( Iterator it = nm.getProfileKeyIterator(); it.hasNext(); ){ String key = ( String )it.next(); result.add( new Profile( n.toString(), key, (String)nm.get( key ) )); } } return result; } /** * Returns the list of profiles corresponding to a single namespace * * @param namespace the namespace * * @return List of Profiles */ public List getProfiles( String namespace ){ return this.getProfiles( NAMESPACES.valueOf( namespace.toLowerCase() )); } /** * Returns the list of profiles corresponding to a single namespace * * @param namespace the namespace * * @return List of Profiles */ public List getProfiles( NAMESPACES namespace ){ return this.getProfiles( this.get(namespace) ); } /** * Returns the list of profiles corresponding to a single namespace * * @param namespace the namespace * * @return List of Profiles */ public List getProfiles( Namespace namespace ){ List result = new LinkedList(); for( Iterator it = namespace.getProfileKeyIterator(); it.hasNext(); ){ String key = ( String )it.next(); result.add( new Profile( namespace.namespaceName(), key, (String)namespace.get( key ) )); } return result; } /** * Returns a iterator over the profile keys corresponding to a particular namespace. * * @param n the namespace * * @return iterator */ public Iterator getProfileKeyIterator( NAMESPACES n ){ return (( Namespace )mProfileMap.get( n )).getProfileKeyIterator(); } /** * Returns the namespace object corresponding to a namespace * * @param n the namespace * * @return Namespace */ public Namespace get( NAMESPACES n ){ return ( Namespace )mProfileMap.get( n ); } /** * Returns a boolean indicating if the object is empty. * * The object is empty if all the underlying profile maps are empty. * * @return */ public boolean isEmpty(){ boolean result = true; for ( NAMESPACES n : NAMESPACES.values() ){ if( !this.get( n ).isEmpty() ){ result = false; break; } } return result; } /** * Writes out the xml description of the object. * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); //traverse through all the enum keys for ( NAMESPACES n : NAMESPACES.values() ){ Namespace nm = this.get( n ); for( Iterator it = nm.getProfileKeyIterator(); it.hasNext(); ){ String key = ( String )it.next(); //write out the xml element writer.write( indent ); writer.write( "" ); writer.write( (String)nm.get( key ) ); writer.write( "" ); writer.write( newLine ); } } } /** * Returns the string description of the object. * * @return String containing the object in XML. * * @throws RuntimeException if something fishy happens to the stream. */ public String toString() { try { Writer writer = new StringWriter(32); toString(writer, ""); return writer.toString(); } catch (IOException ex) { throw new RuntimeException( "Exception while converting to String", ex ); } } /** * Writes out the contents of the object as a String * * @param writer is a Writer opened and ready for writing. This can also * be a StringWriter for efficient output. * @param indent the indent to be used. * * @exception IOException if something fishy happens to the stream. */ public void toString( Writer writer, String indent ) throws IOException { String newLine = System.getProperty( "line.separator", "\r\n" ); //traverse through all the enum keys for ( NAMESPACES n : NAMESPACES.values() ){ Namespace nm = this.get( n ); for( Iterator it = nm.getProfileKeyIterator(); it.hasNext(); ){ String key = ( String )it.next(); //write out the xml element writer.write( indent ); writer.write( "profile" ); writer.write( " " );writer.write( n.toString() ); writer.write( " " );writer.write ( key ); writer.write( " " );writer.write( (String)nm.get( key ) ); writer.write( newLine ); } } } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ Profiles obj; obj = new Profiles(); //traverse through all the enum keys for ( NAMESPACES n : NAMESPACES.values() ){ Namespace nm = this.get( n ); nm = ( Namespace )this.get( n ).clone(); obj.mProfileMap.put( n, nm ); } return obj; } /** * Returns the xml description of the object. This is used for generating * the partition graph. That is no longer done. * * @return String containing the object in XML. * * @exception IOException if something fishy happens to the stream. */ public String toXML() throws IOException{ Writer writer = new StringWriter(32); toXML( writer, "" ); return writer.toString(); } /** * Writes an attribute to the stream. Wraps the value in quotes as required * by XML. * * @param writer * @param key * @param value * * @exception IOException if something fishy happens to the stream. */ public void writeAttribute( Writer writer, String key, String value ) throws IOException{ writer.write( " " ); writer.write( key ); writer.write( "=\""); writer.write( value ); writer.write( "\"" ); } /** * Returns the index for the namespace. * * @param u the unit * @return the index. */ private int getIndex( NAMESPACES u ){ return u.ordinal(); } /** * * @param args */ public static void main ( String[] args ){ try { Profiles p = new Profiles(); p.addProfile(new Profile("pegasus", "gridstart", "none")); p.addProfile(new Profile("env", "PEGASUS_HOME", "/pegasus")); p.addProfile(new Profile("env", "GLOBUS_LOCATION", "GLOBUS_LOCATION")); System.out.println("Profiles are " + p.toXML()); } catch (IOException ex) { Logger.getLogger(Profiles.class.getName()).log(Level.SEVERE, null, ex); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/classes/VDSSysInfo2NMI.java0000644000175000017500000001333311757531137027604 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.classes; import edu.isi.pegasus.planner.catalog.transformation.classes.Arch; import edu.isi.pegasus.planner.catalog.transformation.classes.Os; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import java.util.HashMap; import java.util.Map; /** * An Adapter class that translates the old ( VDS era ) Arch and Os objects * to the new NMI based Architecture and OS objects. * * @author Karan Vahi * @version $Revision: 2511 $ */ public class VDSSysInfo2NMI { /** * The separator used to combine OS version and release. */ public static final String OS_COMBINE_SEPARATOR = "_"; /** * The map storing architecture to corresponding NMI architecture platforms. */ private static Map mVDSArchToNMIArch = null; /** * Singleton access to the architecture to NMI arch map. * * @return Map mapping VDS Arch to NMI architecture */ private static Map vdsArchToNMIArchMap(){ //singleton access if( mVDSArchToNMIArch == null ){ mVDSArchToNMIArch = new HashMap(); mVDSArchToNMIArch.put( Arch.INTEL32, SysInfo.Architecture.x86 ); mVDSArchToNMIArch.put( Arch.INTEL64, SysInfo.Architecture.x86_64 ); mVDSArchToNMIArch.put( Arch.AMD64, SysInfo.Architecture.amd64 ); mVDSArchToNMIArch.put( Arch.SPARCV7, SysInfo.Architecture.sparcv7 ); mVDSArchToNMIArch.put( Arch.SPARCV9, SysInfo.Architecture.sparcv9 ); } return mVDSArchToNMIArch; } /** * The map storing OS to corresponding NMI OS platforms. */ private static Map mVDSOSToNMIOS = null; /** * Singleton access to the os to NMI os map. * * @return Map mapping VDS Os to NMI OS */ private static Map vdsOsToNMIOSMap(){ //singleton access if( mVDSOSToNMIOS == null ){ mVDSOSToNMIOS = new HashMap(); mVDSOSToNMIOS.put( Os.LINUX, SysInfo.OS.LINUX ); mVDSOSToNMIOS.put( Os.AIX, SysInfo.OS.AIX ); mVDSOSToNMIOS.put( Os.SUNOS, SysInfo.OS.SUNOS ); mVDSOSToNMIOS.put( Os.WINDOWS, SysInfo.OS.WINDOWS ); } return mVDSOSToNMIOS; } /** * Converts VDS SysInfo to NMI based SysInfo object * * @param sysinfo VDS based SysInfo object * * @return NMI SysInfo object. */ public static SysInfo vdsSysInfo2NMI(VDSSysInfo sysinfo) { SysInfo result = new SysInfo(); result.setArchitecture( vdsArchToNMIArch( sysinfo.getArch() ) ); result.setOS( vdsOsToNMIOS( sysinfo.getOs() ) ); String glibc = sysinfo.getGlibc(); if( glibc != null ){ result.setGlibc( glibc ); } //what we call os release and version now was called os version! String osVersion = sysinfo.getOsversion(); if( osVersion != null && osVersion.length() != 0){ if( osVersion.contains( OS_COMBINE_SEPARATOR ) ){ //split on _ int last = osVersion.lastIndexOf( OS_COMBINE_SEPARATOR ); result.setOSRelease( osVersion.substring( 0, last )); result.setOSVersion( osVersion.substring( last + 1 )); } else{ result.setOSRelease( osVersion ); } } return result; } /** * Returns the NMI Architecture object corresponding to the VDS Arch * object * * @param arch architecture in the VDS format. * * @return NMI Architecture */ public static SysInfo.Architecture vdsArchToNMIArch( Arch arch ){ return vdsArchToNMIArchMap().get( arch ); } /** * Returns the NMI Architecture object corresponding to the VDS Arch * object * * @param arch architecture in the VDS format. * * @return NMI Architecture */ public static SysInfo.Architecture vdsArchToNMIArch( String arch ){ return vdsArchToNMIArchMap().get( Arch.fromString(arch) ); } /** * Returns the NMI OS object corresponding to the VDS Os * object * * @param os os in the VDS format. * * @return NMI OS */ public static SysInfo.OS vdsOsToNMIOS( Os os ){ return vdsOsToNMIOSMap().get( os ); } /** * Returns the NMI OS object corresponding to the VDS Os * object * * @param os os in the VDS format. * * @return NMI OS */ public static SysInfo.OS vdsOsToNMIOS( String os ){ return vdsOsToNMIOSMap().get( Os.fromValue(os) ); } public static void main( String[] args ){ VDSSysInfo v = new VDSSysInfo(); v.setArch(Arch.AMD64); v.setOs(Os.LINUX); v.setOsversion( "rhel_4" ); SysInfo s = VDSSysInfo2NMI.vdsSysInfo2NMI(v); System.out.println( s.getOSRelease() ); System.out.println( s.getOSVersion() ); v.setOsversion( "rhel_" ); s = VDSSysInfo2NMI.vdsSysInfo2NMI(v); System.out.println( s.getOSRelease() ); System.out.println( s.getOSVersion() ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/classes/SysInfo.java0000644000175000017500000002114411757531137026600 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.classes; /** * A container class to keep system information associated with a Site entry in * the Site Catalog or a Transformation in the Transformation Catalog. * * The class follows the NMI conventions for specifying Architecture/ OS and OS release. * * * @author Karan Vahi * @version $Revision: 3283 $ */ public class SysInfo implements Cloneable { /** * Enumerates the new OS types supported in Pegasus. */ public enum OS { LINUX, SUNOS, AIX, MACOSX, WINDOWS } /** * Enumerates the new architecture types supported in Pegasus. */ public enum Architecture { x86, x86_64, ppc, ppc_64, ia64, sparcv7, sparcv9, amd64 } /** * The default OS the entry is associated with if none is specified */ public static final OS DEFAULT_OS = OS.LINUX; /** * The default Architecture the entry is associated with if none is specified */ public static final Architecture DEFAULT_ARCHITECTURE = Architecture.x86; /** * The architecture. */ protected Architecture mArchitecture; /** * The Operating System. */ protected OS mOS; /** * The Operating System Release. Optional. */ protected String mOSRelease; /** * The Operating System Version. Optional. */ protected String mOSVersion; /** * The Glibc version. Optional. */ protected String mGlibc; /** * The default constructor. */ public SysInfo(){ mArchitecture = SysInfo.DEFAULT_ARCHITECTURE; mOS = SysInfo.DEFAULT_OS; mOSRelease = ""; mOSVersion = ""; mGlibc = ""; } /** * This constructor takes the system information in the format arch::os:osversion:glibc * @param system the system information string */ public SysInfo(String system){ if (system != null) { String s1[] = system.split("::", 2); if (s1.length == 2) { if(isValidArchitecture(s1[0].trim())){ mArchitecture =Architecture.valueOf(s1[0].trim()); }else { throw new IllegalStateException( "Error: Illegal Architecture defined. Please specify one of the predefined types \n [x86, x86_64, ppc, ppc_64, ia64, sparcv7, sparcv9, amd64]"); } String s2[] = s1[1].split(":", 3); if(isValidOS(s2[0].trim())){ mOS = OS.valueOf(s2[0].trim()); }else { throw new IllegalStateException( "Error: Illegal Operating System defined. Please specify one of the predefined types \n [LINUX, SUNOS, AIX, MACOSX, WINDOWS]"); } for (int i = 1; i < s2.length; i++) { if (i == 1) { mOSVersion = s2[i]; } if (i == 2) { mGlibc = s2[i]; } } } else { throw new IllegalStateException( "Error : Please check your system info string"); } } else { mArchitecture = SysInfo.DEFAULT_ARCHITECTURE; mOS = SysInfo.DEFAULT_OS; mOSRelease = ""; mOSVersion = ""; mGlibc = ""; } } /** * Checks if the architecture is a valid supported architecture * @param arch architecture * @return true if it is a valid supported architecture, false otherwise */ private static boolean isValidArchitecture(String arch){ for(Architecture architecture : Architecture.values()){ if(architecture.toString().equals(arch)) return true; } return false; } /** * Checks if the operating system is a valid supported operating system * @param os operating system * @return true if it is a valid supported operating system, false otherwise */ private static boolean isValidOS(String os){ for(OS osystem : OS.values()){ if(osystem.toString().equals(os)) return true; } return false; } /** * Sets the architecture of the site. * * @param arch the architecture. */ public void setArchitecture( Architecture arch ){ mArchitecture = arch; } /** * Returns the architecture of the site. * * @return the architecture. */ public Architecture getArchitecture( ){ return mArchitecture; } /** * Sets the OS of the site. * * @param os the os of the site. */ public void setOS( OS os ){ mOS = os; } /** * Returns the OS of the site. * * @return the OS */ public OS getOS( ){ return mOS; } /** * Sets the OS release of the site. * * @param release the os releaseof the site. */ public void setOSRelease( String release ){ mOSRelease = release; } /** * Returns the OS release of the site. * * @return the OS */ public String getOSRelease( ){ return mOSRelease; } /** * Sets the OS version of the site. * * @param version the os versionof the site. */ public void setOSVersion( String version ){ mOSVersion = version; } /** * Returns the OS version of the site. * * @return the OS */ public String getOSVersion( ){ return mOSVersion; } /** * Sets the glibc version on the site. * * @param version the glibc version of the site. */ public void setGlibc( String version ){ mGlibc = version; } /** * Returns the glibc version of the site. * * @return the OS */ public String getGlibc( ){ return mGlibc; } /** * Check if the system information matches. * * @param obj to be compared. * * @return boolean */ public boolean equals(Object obj) { boolean result = false; if( obj instanceof SysInfo ){ SysInfo sysinfo = (SysInfo)obj; result = this.getArchitecture().equals( sysinfo.getArchitecture() ) && this.getOS().equals( sysinfo.getOS() ) && this.getOSRelease().equals( sysinfo.getOSRelease() ) && this.getOSVersion().equals( sysinfo.getOSVersion() ) && this.getGlibc().equals( sysinfo.getGlibc() ); } return result; } /** * Returns the clone of the object. * * @return the clone */ public Object clone(){ SysInfo obj = null; try{ obj = ( SysInfo ) super.clone(); obj.setArchitecture( this.getArchitecture() ); obj.setOS( this.getOS() ); obj.setOSRelease( this.getOSRelease() ); obj.setOSVersion( this.getOSVersion() ); obj.setGlibc( this.getGlibc() ); } catch( CloneNotSupportedException e ){ //somewhere in the hierarch chain clone is not implemented throw new RuntimeException("Clone not implemented in the base class of " + this.getClass().getName(), e ); } return obj; } /** * Returns the output of the data class as string. * @return String */ public String toString() { StringBuffer s = new StringBuffer(); s.append( "{" ); s.append( "arch=" + this.getArchitecture() ); s.append( " os=" + this.getOS() ); String release = this.getOSRelease(); if ( release != null && release.length() > 0 ) { s.append( " osrelease=" + release ); } String version = this.getOSVersion(); if ( version != null && version.length() > 0 ) { s.append( " osversion=" + version ); } s.append( "}" ); return s.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/classes/CatalogEntry.java0000644000175000017500000000156511757531137027607 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.classes; /** * This interface create a common ancestor for all catalog entries. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2076 $ */ public interface CatalogEntry { // empty } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/work/0000755000175000017500000000000011757531667023676 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/work/Database.java0000644000175000017500000004602511757531137026244 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.work; import edu.isi.pegasus.common.logging.LogManagerFactory; import java.util.*; import java.sql.*; import edu.isi.pegasus.planner.catalog.WorkCatalog; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.common.logging.LogManager; /** * This class implements a work catalog on top of a simple table in a * JDBC database. This enables a variety of replica catalog * implementations in a transactionally safe, concurrent environment. * The table must be defined using the statements appropriate for your * database - they are part of the setup in $PEGASUS_HOME/sql/create-my-wf.sql * for MYSQL database and in $PEGASUS_HOME/sql/create-pg-wf.sql. * * If you chose to use an unsupported database, please check, if your * database either supports sequence number, or if it supports auto * increment columns. If your database supports sequences (e.g. * PostGreSQL), you can use a setup similar to the following (for * Oracle, the autoinc can be implemented via a trigger). * *
 * CREATE TABLE wf_work (
 *       id         BIGSERIAL PRIMARY KEY,
 *       basedir    TEXT,
 *       vogroup    VARCHAR(255),
 *       workflow   VARCHAR(255),
 *       run        VARCHAR(255),
 *       creator    VARCHAR(32),
 *       ctime      TIMESTAMP WITH TIME ZONE NOT NULL,
 *       state      INTEGER NOT NULL,
 *       mtime      TIMESTAMP WITH TIME ZONE NOT NULL,
 *
 *        CONSTRAINT sk_wf_work UNIQUE(basedir,vogroup,workflow,run)
 * );
 *
 * CREATE TABLE wf_jobstate (
 *       wfid       BIGINT REFERENCES wf_work(id) ON DELETE CASCADE,
 *       jobid      VARCHAR(64),
 *       state      VARCHAR(24) NOT NULL,
 *       mtime      TIMESTAMP WITH TIME ZONE NOT NULL,
 *       site       VARCHAR(64),
 *
 *        CONSTRAINT pk_wf_jobstate PRIMARY KEY (wfid,jobid)
 * );
 * CREATE INDEX ix_wf_jobstate ON wf_jobstate(jobid);
 *
 * CREATE TABLE wf_siteinfo (
 *       id         BIGSERIAL PRIMARY KEY,
 *       handle     VARCHAR(48) NOT NULL,
 *       mtime      TIMESTAMP WITH TIME ZONE,
 *       -- gauges
 *       other      INTEGER DEFAULT 0,
 *       pending    INTEGER DEFAULT 0,
 *       running    INTEGER DEFAULT 0,
 *       -- counters
 *       success    INTEGER DEFAULT 0,
 *       smtime     TIMESTAMP WITH TIME ZONE,
 *       failure    INTEGER DEFAULT 0,
 *       fmtime     TIMESTAMP WITH TIME ZONE,
 *
 *        CONSTRAINT sk_wf_siteinfo UNIQUE(handle)
 * );
 *
 * 
* * In case of databases that do not support sequences (e.g. MySQL), do * not specify the create sequence, and use an * auto-increment column for the primary key instead, e.g.: * *
 * CREATE TABLE wf_work (
 *       id         BIGINT AUTO_INCREMENT PRIMARY KEY,
 *       basedir    TEXT,
 *       vogroup    VARCHAR(255),
 *       workflow   VARCHAR(255),
 *       run        VARCHAR(255),
 *       creator    VARCHAR(32),
 *       ctime      DATETIME NOT NULL,
 *       state      INTEGER NOT NULL,
 *       mtime      DATETIME NOT NULL,
 *
 *        CONSTRAINT sk_wf_work UNIQUE(basedir(255),vogroup,workflow,run)
 * ) type=InnoDB;
 *
 * CREATE TABLE wf_jobstate (
 *       wfid       BIGINT REFERENCES wf_work(id) ON DELETE CASCADE,
 *        jobid      VARCHAR(64),
 *        state      VARCHAR(24) NOT NULL,
 *        mtime      DATETIME NOT NULL,
 *        site       VARCHAR(64),
 *
 *        CONSTRAINT pk_wf_jobstate PRIMARY KEY (wfid,jobid)
 * ) type=InnoDB;
 * CREATE INDEX ix_wf_jobstate ON wf_jobstate(jobid);
 *
 * CREATE TABLE wf_siteinfo (
 *       id         BIGINT AUTO_INCREMENT PRIMARY KEY,
 *       handle     VARCHAR(48) NOT NULL,
 *       mtime      DATETIME,
 *       -- gauges
 *       other      INTEGER DEFAULT 0,
 *       pending    INTEGER DEFAULT 0,
 *       running    INTEGER DEFAULT 0,
 *       -- counters
 *       success    INTEGER DEFAULT 0,
 *       smtime     DATETIME,
 *       failure    INTEGER DEFAULT 0,
 *       fmtime     DATETIME,
 *
 *        CONSTRAINT sk_wf_siteinfo UNIQUE(handle)
 * ) type=InnoDB;
 * 
* * The site attribute should be specified whenever possible. For the * shell planner, it will always be of value "local". * * @author Karan Vahi * @version $Revision$ */ public class Database implements WorkCatalog { /** * This message is sent whenever one of the member function is executed * which relies on an established database context. */ private static final String mConnectionError = "The database connection is not established"; /** * Maintains the connection to the database over the lifetime of * this instance. */ protected Connection mConnection = null; /** * Maintains an essential set of prepared statement, ready to use. */ protected PreparedStatement mStatements[] = null; /** * The handle to the logging object. */ protected LogManager mLogger; /** * The statement to prepare to slurp attributes. */ private static final String mCStatements[] = { // 0: "INSERT INTO wf_work(basedir, vogroup, workflow, run, creator, ctime, state, mtime) " + "VALUES( ? , ? , ? , ? , ? , ? , ? , ? )", //1: "DELETE FROM wf_work WHERE basedir=? AND vogroup=? AND workflow=? AND run=? " }; /** * Remembers if obtaining generated keys will work or not. */ private boolean m_autoinc = false; /** * Convenience c'tor: Establishes the connection to the work * catalog database. The usual suspects for the class name include: * *
     * org.postgresql.Driver
     * com.mysql.jdbc.Driver
     * com.microsoft.jdbc.sqlserver.SQLServerDriver
     * SQLite.JDBCDriver
     * sun.jdbc.odbc.JdbcOdbcDriver
     * 
* * @param jdbc is a string containing the full name of the java class * that must be dynamically loaded. This is usually an external jar * file which contains the Java database driver. * @param url is the database driving URL. This string is database * specific, and tell the JDBC driver, at which host and port the * database listens, permits additional arguments, and selects the * database inside the rDBMS to connect to. Please refer to your * JDBC driver documentation for the format and permitted values. * @param username is the database user account name to connect with. * @param password is the database account password to use. * * @throws LinkageError if linking the dynamically loaded driver fails. * This is a run-time error, and does not need to be caught. * @throws ExceptionInInitializerError if the initialization function * of the driver's instantiation threw an exception itself. This is a * run-time error, and does not need to be caught. * @throws ClassNotFoundException if the class in your jdbc parameter * cannot be found in your given CLASSPATH environment. Callers must * catch this exception. * @throws SQLException if something goes awry with the database. * Callers must catch this exception. */ public Database(String jdbc, String url, String username, String password) throws LinkageError, ExceptionInInitializerError, ClassNotFoundException, SQLException { this( ); // load database driver jar Class.forName(jdbc); // may throw LinkageError, // may throw ExceptionInInitializerError, // may throw ClassNotFoundException // establish connection to database generically connect(url, username, password); // may throws SQLException } /** * Default empty constructor creates an object that is not yet connected * to any database. You must use support methods to connect before this * instance becomes usable. * * @see #connect( String, String, String ) */ public Database() { // make connection defunct mConnection = null; mStatements = null; mLogger = LogManagerFactory.loadSingletonInstance(); } /** * Connects to the database. This is effectively an accessor to * initialize the internal connection instance variable. Warning! * You must call {@link java.lang.Class#forName( String )} yourself * to load the database JDBC driver jar! * * @param url is the database driving URL. This string is database * specific, and tell the JDBC driver, at which host and port the * database listens, permits additional arguments, and selects the * database inside the rDBMS to connect to. Please refer to your * JDBC driver documentation for the format and permitted values. * @param username is the database user account name to connect with. * @param password is the database account password to use. * @throws SQLException if something goes awry with the database. * Callers must catch this exception. * @see java.sql.DriverManager#getConnection( String, String, String ) */ public void connect(String url, String username, String password) throws SQLException { // establish connection to database generically mConnection = DriverManager.getConnection(url, username, password); // may throws SQLException m_autoinc = mConnection.getMetaData().supportsGetGeneratedKeys(); // prepared statements are Singletons -- prepared on demand mStatements = new PreparedStatement[mCStatements.length]; for (int i = 0; i < mCStatements.length; ++i) { mStatements[i] = null; } } /** * Establishes a connection to the database from the properties. You * can specify a driver property to contain the class name of * the JDBC driver for your database. This property will be removed * before attempting to connect. You must speficy a url * property to describe the connection. It will be removed before * attempting to connect. * * @param props is the property table with sufficient settings to * establish a link with the database. The minimum key required key is * "url", and possibly "driver". Any other keys depend on the database * driver. * @return true if connected, false if failed to connect. * @see java.sql.DriverManager#getConnection( String, Properties ) * * @throws Error subclasses for runtime errors in the class loader. */ public boolean connect(Properties props) { boolean result = false; // class loader: Will propagate any runtime errors!!! String driver = (String) props.remove("db.driver"); Properties localProps = CommonProperties.matchingSubset( (Properties)props.clone(), "db", false ); String url = (String) localProps.remove("url"); if (url == null || url.length() == 0) { return result; } try { if (driver != null) { //only support mysql and postgres for time being if( driver.equalsIgnoreCase( "MySQL") ){ driver = "com.mysql.jdbc.Driver"; } else if ( driver.equalsIgnoreCase( "Postgres" )){ driver = "org.postgresql.Driver"; } mLogger.log( "Driver being used to connect to Work Catalog is " + driver, LogManager.DEBUG_MESSAGE_LEVEL ); Class.forName(driver); } } catch (Exception e) { mLogger.log( "While connecting to Work Catalog", e, LogManager.DEBUG_MESSAGE_LEVEL ); return result; } try { mConnection = DriverManager.getConnection( url, localProps ); m_autoinc = mConnection.getMetaData().supportsGetGeneratedKeys(); // prepared statements are Singletons -- prepared on demand mStatements = new PreparedStatement[mCStatements.length]; for (int i = 0; i < mCStatements.length; ++i) { mStatements[i] = null; } result = true; } catch (SQLException e) { mLogger.log( "While connecting to Work Catalog", e , LogManager.DEBUG_MESSAGE_LEVEL ); result = false; } return result; } /** * Explicitely free resources before the garbage collection hits. */ public void close() { if (mConnection != null) { try { if (!mConnection.getAutoCommit()) { mConnection.commit(); } } catch (SQLException e) { // ignore } } if (mStatements != null) { try { for (int i = 0; i < mCStatements.length; ++i) { if (mStatements[i] != null) { mStatements[i].close(); mStatements[i] = null; } } } catch (SQLException e) { // ignore } finally { mStatements = null; } } if (mConnection != null) { try { mConnection.close(); } catch (SQLException e) { // ignore } finally { mConnection = null; } } } /** * Predicate to check, if the connection with the catalog's * implementation is still active. This helps determining, if it makes * sense to call close(). * * @return true, if the implementation is disassociated, false otherwise. * @see #close() */ public boolean isClosed() { return (mConnection == null); } /** * Singleton manager for prepared statements. This instruction * checks that a prepared statement is ready to use, and will * create an instance of the prepared statement, if it was unused * previously. * * @param i is the index which prepared statement to check. * @return a handle to the prepared statement. * * * @throws SQLException in case of unable to delete entry. */ protected PreparedStatement getStatement(int i) throws SQLException { if (mStatements[i] == null) { mStatements[i] = mConnection.prepareStatement(mCStatements[i]); } else { mStatements[i].clearParameters(); } return mStatements[i]; } /** * Inserts a new mapping into the work catalog. * * @param basedir the base directory * @param vogroup the vo to which the user belongs to. * @param label the label in the DAX * @param run the run number. * @param creator the user who is running. * @param cTime the creation time of the DAX * @param mTime the modification time. * @param state the state of the workflow * * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @throws WorkCatalogException in case of unable to delete entry. */ public int insert(String basedir, String vogroup, String label, String run, String creator, java.util.Date cTime, java.util.Date mTime, int state) throws WorkCatalogException { String query = "[no query]"; int result = 0; boolean autoCommitWasOn = false; // sanity checks if (mConnection == null) { throw new RuntimeException(mConnectionError); } try { // delete-before-insert as one transaction if ( (autoCommitWasOn = mConnection.getAutoCommit())) { mConnection.setAutoCommit(false); } // state == 1 // // delete before insert... this.delete(basedir, vogroup, label, run); // state == 2 int which = 0; query = mCStatements[which]; // sanity checks if (mConnection == null) throw new RuntimeException( mConnectionError ); PreparedStatement ps = getStatement( which ); ps.setString( 1, basedir ); ps.setString( 2, vogroup ); ps.setString( 3, label ); ps.setString( 4, run ); ps.setString( 5, creator ); ps.setTimestamp( 6, new Timestamp( cTime.getTime() ) ); ps.setInt( 7, state ); ps.setTimestamp( 8, new Timestamp( mTime.getTime() ) ); mLogger.log( "Executing query " + ps.toString() , LogManager.DEBUG_MESSAGE_LEVEL ); result = ps.executeUpdate(); } catch (SQLException e) { throw new WorkCatalogException( "Unable to insert into work database using " + query, e); } finally { // restore original auto-commit state try { if (autoCommitWasOn) { mConnection.setAutoCommit(true); } } catch (SQLException e) { // ignore } } return result; } /** * Deletes a mapping from the work catalog. * * @param basedir the base directory * @param vogroup the vo to which the user belongs to. * @param label the label in the DAX * @param run the run number. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * @throws WorkCatalogException in case of unable to delete entry. */ public int delete(String basedir, String vogroup, String label, String run ) throws WorkCatalogException{ int result = 0; int which = 1; String query = mCStatements[which]; // sanity checks if ( mConnection == null ) throw new RuntimeException( mConnectionError ); try { PreparedStatement ps = getStatement( which ); ps.setString( 1, basedir ); ps.setString( 2, vogroup ); ps.setString( 3, label ); ps.setString( 4, run ); result = ps.executeUpdate(); } catch ( SQLException e ) { throw new WorkCatalogException( "Unable to delete from database using " + query , e ); } // done return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/work/WorkFactory.java0000644000175000017500000001621611757531137027011 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.work; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.planner.catalog.WorkCatalog; import java.util.Properties; import java.util.Enumeration; import edu.isi.pegasus.planner.common.PegasusProperties; /** * This factory loads a work catalog, as specified by the properties. * Each invocation of the factory will result in a new instance of a * connection to the replica catalog. * * @author Karan Vahi * @author Jens-S. Vöckler * @version $Revision: 50 $ * * @see org.griphyn.common.catalog.WorkCatalog */ public class WorkFactory{ /** * Package to prefix "just" class names with. */ public static final String DEFAULT_PACKAGE = "edu.isi.pegasus.planner.catalog.work"; /** * Connects the interface with the work catalog implementation. The * choice of backend is configured through properties. This class is * useful for non-singleton instances that may require changing * properties. * * @param props is an instance of properties to use. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * * @see org.griphyn.common.util.CommonProperties */ static public WorkCatalog loadInstance( PegasusProperties props ) throws WorkFactoryException { return loadInstance( props.getVDSProperties() ); } /** * Connects the interface with the work catalog implementation. The * choice of backend is configured through properties. This class is * useful for non-singleton instances that may require changing * properties. * * @param props is an instance of properties to use. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * * @see org.griphyn.common.util.CommonProperties */ static public WorkCatalog loadInstance( CommonProperties props ) throws WorkFactoryException { // sanity check if ( props == null ) throw new NullPointerException("invalid properties"); Properties connect = props.matchingSubset( WorkCatalog.c_prefix, false ); //get the default db driver properties in first pegasus.catalog.*.db.driver.* Properties db = props.matchingSubset( WorkCatalog.DB_ALL_PREFIX, false ); //now overload with the work catalog specific db properties. //pegasus.catalog.work.db.driver.* db.putAll( props.matchingSubset( WorkCatalog.DB_PREFIX , false ) ); //to make sure that no confusion happens. //add the db prefix to all the db properties for( Enumeration e = db.propertyNames(); e.hasMoreElements(); ){ String key = (String)e.nextElement(); connect.put( "db." + key, db.getProperty( key )); } //put the driver property back into the DB property // String driver = props.getProperty( WorkCatalog.DBDRIVER_PREFIX ); // if( driver == null ){ driver = props.getProperty( WorkCatalog.DBDRIVER_ALL_PREFIX ); } // connect.put( "db.driver", driver ); // determine the class that implements the work catalog return loadInstance( props.getProperty( WorkCatalog.c_prefix ), connect ); } /** * Connects the interface with the work catalog implementation. The * choice of backend is configured through properties. This class is * useful for non-singleton instances that may require changing * properties. * * @param props is an instance of properties to use. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * * @see org.griphyn.common.util.CommonProperties */ static private WorkCatalog loadInstance( String catalogImplementor, Properties props ) throws WorkFactoryException { WorkCatalog result = null; try{ if ( catalogImplementor == null ) throw new RuntimeException( "You need to specify the " + WorkCatalog.c_prefix + " property" ); // syntactic sugar adds absolute class prefix if ( catalogImplementor.indexOf('.') == -1 ) catalogImplementor = DEFAULT_PACKAGE + "." + catalogImplementor; // POSTCONDITION: we have now a fully-qualified classname DynamicLoader dl = new DynamicLoader( catalogImplementor ); result = (WorkCatalog) dl.instantiate( new Object[0] ); if ( ! result.connect( props ) ) throw new RuntimeException( "Unable to connect to work catalog implementation" ); } catch( Exception e ) { throw new WorkFactoryException( " Unable to instantiate Work Catalog ", catalogImplementor, e ); } // done return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/work/WorkCatalogException.java0000644000175000017500000000434411757531137030632 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.work; import edu.isi.pegasus.planner.catalog.CatalogException; /** * Class to notify of failures. Exceptions are chained like the * {@link java.sql.SQLException} interface.

* * @author Jens-S. Vöckler, Karan Vahi * @see org.griphyn.common.catalog.ReplicaCatalog */ public class WorkCatalogException extends CatalogException { /* * Constructs a WorkCatalogException with no detail * message. */ public WorkCatalogException() { super(); } /** * Constructs a WorkCatalogException with the * specified detailed message. * * @param s is the detailled message. */ public WorkCatalogException( String s ) { super(s); } /** * Constructs a WorkCatalogException with the * specified detailed message and a cause. * * @param s is the detailled message. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public WorkCatalogException( String s, Throwable cause ) { super( s, cause ); } /** * Constructs a WorkCatalogException with the * specified just a cause. * * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public WorkCatalogException( Throwable cause ) { super(cause); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/work/WorkFactoryException.java0000644000175000017500000000644311757531137030671 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.work; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Transformation Catalog * implementations. * * @author Karan Vahi * @version $Revision: 50 $ */ public class WorkFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Work Catalog"; /** * Constructs a WorkFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public WorkFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a WorkFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public WorkFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a WorkFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public WorkFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a WorkFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public WorkFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/SiteCatalog.java0000644000175000017500000000654111757531137025754 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog; import edu.isi.pegasus.planner.catalog.site.*; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.SiteCatalogException; import java.util.List; import java.util.Set; /** * * @author Karan Vahi * @version $Revision: 2076 $ */ public interface SiteCatalog extends Catalog { /** * The version of the API */ public static final String VERSION = "1.0"; /** * Prefix for the property subset to use with this catalog. */ public static final String c_prefix = "pegasus.catalog.site"; /** * Loads up the Site Catalog implementation with the sites whose * site handles are specified. This is a convenience method, that can * allow the backend implementations to maintain soft state if required. * * If the implementation chooses not to implement this, just do an empty * implementation. * * The site handle * is a special handle designating all sites are to be * loaded. * * @param sites the list of sites to be loaded. * * @return the number of sites loaded. * * @throws SiteCatalogException in case of error. */ public int load( List sites ) throws SiteCatalogException; /** * Inserts a new mapping into the Site catalog. * * @param entry the SiteCatalogEntry object that describes * a site. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * @throws SiteCatalogException in case of error. */ public int insert( SiteCatalogEntry entry ) throws SiteCatalogException; /** * Lists the site handles for all the sites in the Site Catalog. * * @return A set of site handles. * * @throws SiteCatalogException in case of error. */ public Set list() throws SiteCatalogException; /** * Retrieves the SiteCatalogEntry for a site. * * @param handle the site handle / identifier. * * @return SiteCatalogEntry in case an entry is found , or null * if no match is found. * * * @throws SiteCatalogException in case of error. */ public SiteCatalogEntry lookup( String handle ) throws SiteCatalogException; /** * Removes a site catalog entry matching the the handle. * * @param handle the site handle / identifier. * * @return the number of removed entries. * * @throws SiteCatalogException in case of error. */ public int remove( String handle ) throws SiteCatalogException; }pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/TransformationCatalog.java0000644000175000017500000004707111757531137030061 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.classes.PegasusBag; import java.util.List; import edu.isi.pegasus.planner.classes.Profile; /** * This class is an interface to the various TxCatalog implementations that Pegasus will use. * It defines the basic functionality for interfacing with various transformation Catalogs * It defines api's for the querying, adding and deleting transformation and associated mappings from the implementing Tx Catalog * By implementing this inteface a user can easily use his own TX Catalog with Pegasus. * * * * @author Gaurang Mehta * @author Karan Vahi * * @version $Revision: 4089 $ */ public interface TransformationCatalog extends edu.isi.pegasus.planner.catalog.Catalog { /** * The version of the API */ public static final String VERSION = "1.3"; /** * The scheme name for file url. */ public static final String FILE_URL_SCHEME = "file:"; /** * Property specify whether to modify file url or not. */ public static final String MODIFY_FOR_FILE_URLS_KEY = "pegasus.catalog.transformation.modify.file.urls"; /** * Initialize the implementation, and return an instance of the implementation. * * @param bag the bag of Pegasus initialization objects. * */ public void initialize ( PegasusBag bag ); /** * Returns TC entries for a particular logical transformation and/or on a * particular resource and/or of a particular type. * * @param namespace String The namespace of the logical transformation. * @param name String the name of the logical transformation. * @param version String The version of the logical transformation. * @param resourceid String The resourceid where the transformation is located. * If NULL it returns all resources. * @param type TCType The type of the transformation to search for. * If NULL it returns all types. * * @return List Returns a list of TransformationCatalogEntry objects * containing the corresponding entries from the TC. * Returns null if no entry found. * @throws Exception * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public List lookup( String namespace, String name, String version, String resourceid, TCType type ) throws Exception; /** * Returns TC entries for a particular logical transformation and/or on a * number of resources and/or of a particular type. * * @param namespace String The namespace of the logical transformation. * @param name String the name of the logical transformation. * @param version String The version of the logical transformation. * @param resourceids List The List resourceid where the transformation is located. * If NULL it returns all resources. * @param type TCType The type of the transformation to search for. * If NULL it returns all types. * * @return List Returns a list of TransformationCatalogEntry objects containing * the corresponding entries from the TC. Returns null if no entry found. * @throws Exception * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public List lookup( String namespace, String name, String version, List resourceids, TCType type ) throws Exception; /** * Get the list of Resource ID's where a particular transformation may reside. * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * @param type TCType The type of the transformation to search for.
* (Enumerated type includes SOURCE, STATIC-BINARY, DYNAMIC-BINARY, PACMAN, INSTALLED, SCRIPT)
* If NULL it returns all types. * * @return List Returns a list of Resource Id's as strings. Returns NULL if no results found. * * @throws Exception NotImplementedException if not implemented * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public List lookupSites( String namespace, String name, String version, TCType type ) throws Exception; /** * Get the list of PhysicalNames for a particular transformation on a site/sites * for a particular type/types; * * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * @param resourceid String The id of the resource on which you want to search.
* If NULL then returns entries on all resources * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* If NULL then returns entries of all types. * * @return List Returns a List of objects * with the profiles not populated. * * @throws Exception NotImplementedException if not implemented. * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public List lookupNoProfiles( String namespace, String name, String version, String resourceid, TCType type ) throws Exception; /** * Get the list of LogicalNames available on a particular resource. * @param resourceid String The id of the resource on which you want to search * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* If NULL then return logical name for all types. * * @return List Returns a list of String Arrays. * Each array contains the resourceid, logical transformation * in the format namespace::name:version and type. * Returns NULL if no results found. * * @throws Exception NotImplementedException if not implemented. */ public List getTCLogicalNames( String resourceid, TCType type ) throws Exception; /** * Get the list of Profiles associated with a particular logical transformation. * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * * @return List Returns a list of Profile Objects containing profiles * assocaited with the transformation. * Returns NULL if no profiles found. * * @throws Exception NotImplementedException if not implemented. * @see org.griphyn.cPlanner.classes.Profile */ public List lookupLFNProfiles( String namespace, String name, String version ) throws Exception; /** * Get the list of Profiles associated with a particular physical transformation. * @param pfn The physical file name to search the transformation by. * @param resourceid String The id of the resource on which you want to search. * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* * @throws Exception NotImplementedException if not implemented. * @return List Returns a list of Profile Objects containing profiles * assocaited with the transformation. * Returns NULL if no profiless found. * * @see org.griphyn.cPlanner.classes.Profile */ public List lookupPFNProfiles( String pfn, String resourceid, TCType type ) throws Exception; /** * List all the contents of the TC * * @return List Returns a List of TransformationCatalogEntry objects. * @throws Exception */ public List getContents() throws Exception; /** * ADDITIONS */ /** * Add multiple TCEntries to the Catalog. * * @param tcentry List Takes a list of TransformationCatalogEntry objects as input * * @throws Exception * @return number of insertions On failure,throw an exception, don't use zero. * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert( List tcentry ) throws Exception; /** * Add single TCEntry to the Catalog. * @param tcentry Takes a single TransformationCatalogEntry object as input * @throws Exception * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert( TransformationCatalogEntry tcentry ) throws Exception; /** * Add single TCEntry object temporarily to the in memory Catalog. * This is a hack to get around for adding soft state entries to the TC * @param tcentry Takes a single TransformationCatalogEntry object as input * @param write boolean enable write commits to backed catalog or not. * @throws Exception * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert( TransformationCatalogEntry tcentry,boolean write) throws Exception; /** * Add an single entry into the transformation catalog. * * @param namespace String The namespace of the transformation to be added (Can be null) * @param name String The name of the transformation to be added. * @param version String The version of the transformation to be added. (Can be null) * @param physicalname String The physical name/location of the transformation to be added. * @param type TCType The type of the physical transformation. * @param resourceid String The resource location id where the transformation is located. * @param lfnprofiles List The List of Profile objects associated with a Logical Transformation. (can be null) * @param pfnprofiles List The List of Profile objects associated with a Physical Transformation. (can be null) * @param sysinfo SysInfo The System information associated with a physical transformation. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @throws Exception * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @see org.griphyn.cPlanner.classes.Profile */ public int insert( String namespace, String name, String version, String physicalname, TCType type, String resourceid, List lfnprofiles, List pfnprofiles, SysInfo sysinfo ) throws Exception; /** * Add additional profile to a logical transformation . * * @param namespace String The namespace of the transformation to be added. (can be null) * @param name String The name of the transformation to be added. * @param version String The version of the transformation to be added. (can be null) * @param profiles List The List of Profile objects that are to be added * to the transformation. * * @return number of insertions. On failure, throw an exception, don't use zero. * * @throws Exception * @see org.griphyn.cPlanner.classes.Profile */ public int addLFNProfile( String namespace, String name, String version, List profiles ) throws Exception; /** * Add additional profile to a physical transformation. * @param pfn String The physical name of the transformation * @param type TCType The type of transformation that the profile is * associated with. * @param resourcename String The resource on which the physical transformation exists * @param profiles The List of Profile objects that are to be added * to the transformation. * @return number of insertions. On failure, throw an exception, don't use zero. * * @throws Exception * @see org.griphyn.cPlanner.classes.Profile */ public int addPFNProfile( String pfn, TCType type, String resourcename, List profiles ) throws Exception; /** * DELETIONS */ /** * Delete all entries in the transformation catalog for a give logical transformation and/or on a resource and/or of * a particular type * @param namespace String The namespace of the transformation to be deleted. (can be null) * @param name String The name of the transformation to be deleted. * @param version String The version of the transformation to be deleted. ( can be null) * @param resourceid String The resource id for which the transformation is to be deleted. * If NULL then transformation on all resource are deleted * @param type TCType The type of the transformation. If NULL then all types are deleted for the transformation. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByLFN( String namespace, String name, String version, String resourceid, TCType type ) throws Exception; /** * Delete all entries in the transformation catalog for pair of logical and physical transformation. * @param physicalname String The physical name of the transformation * @param namespace String The namespace associated in the logical name of the transformation. * @param name String The name of the logical transformation. * @param version String The version number of the logical transformation. * @param resourceid String The resource on which the transformation is to be deleted. * If NULL then it searches all the resource id. * @param type TCType The type of transformation. If NULL then it search and deletes entries for all types. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByPFN( String physicalname, String namespace, String name, String version, String resourceid, TCType type ) throws Exception; /** * Delete a particular type of transformation, and/or on a particular resource * @param type TCType The type of the transformation * @param resourceid String The resource on which the transformation exists. * If NULL then that type of transformation is deleted from all the resources. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByType( TCType type, String resourceid ) throws Exception; /** * Delete all entries on a particular resource from the transformation catalog. * @param resourceid String The resource which you want to remove. * @throws Exception * * @return the number of removed entries. */ public int removeBySiteID( String resourceid ) throws Exception; /** * Deletes entries from the catalog which have a particular system information. * @param sysinfo SysInfo The System Information by which you want to delete * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @throws Exception */ public int removeBySysInfo( SysInfo sysinfo ) throws Exception; /** * Deletes the entire transformation catalog. CLEAN............. USE WITH CAUTION. * * @return the number of removed entries. * * @throws Exception */ public int clear() throws Exception; /** * Delete a list of profiles or all the profiles associated with a pfn on a * resource and of a type. * * @param physicalname String The physical name of the transformation. * @param type TCType The type of the transformation. * @param resourceid String The resource of the transformation. * @param profiles List The list of profiles to be deleted. If NULL then all profiles for that pfn+resource+type are deleted. * * @return the number of removed entries. * * @see org.griphyn.cPlanner.classes.Profile * @throws Exception */ public int deletePFNProfiles( String physicalname, TCType type, String resourceid, List profiles ) throws Exception; /** * Delete a list of profiles or all the profiles associated with a logical * transformation. * * @param namespace String The namespace of the logical transformation. * @param name String The name of the logical transformation. * @param version String The version of the logical transformation. * @param profiles List The List of profiles to be deleted. If NULL * then all profiles for the logical transformation are deleted. * * @return the number of removed entries. * * @see org.griphyn.cPlanner.classes.Profile * @throws Exception */ public int deleteLFNProfiles( String namespace, String name, String version, List profiles ) throws Exception; /** * Returns the TC implementation being used * * @return String */ public String getDescription(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/ReplicaCatalog.java0000644000175000017500000002756211757531137026435 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import java.util.*; /** * This interface describes a minimum set of essential tasks required * from a replica catalog. The method verbs have a steering meaning * associated with them: * *

*
lookup
*
retrieves physical filenames or replica entries
*
list
*
retrieves only lists of logical filenames
*
delete
*
removes an entry specified by LFN and PFN
*
remove
*
removes en-bulk by LFN
*
* * @author Jens-S. Vöckler * @author Karan Vahi * @version $Revision: 2076 $ */ public interface ReplicaCatalog extends Catalog { /** * Prefix for the property subset to use with this catalog. */ public static final String c_prefix = "pegasus.catalog.replica"; /** * The DB Driver properties prefix. */ public static final String DB_PREFIX = "pegasus.catalog.replica.db"; /** * The suffix for the property that if set, specifies the size of the chunk * in which the implementations handle multiple queries. The property that * needs to be specified is vds.rc.chunk.size. */ public static final String BATCH_KEY = "chunk.size"; /** * Retrieves the entry for a given filename and resource handle from * the replica catalog. * * @param lfn is the logical filename to obtain information for. * @param handle is the resource handle to obtain entries for. * @return the (first) matching physical filename, or * null if no match was found. */ public String lookup( String lfn, String handle ); /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is a tuple of a PFN and all its * attributes. * * @param lfn is the logical filename to obtain information for. * @return a collection of replica catalog entries * @see ReplicaCatalogEntry */ public Collection lookup( String lfn ); /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is just a PFN string. Duplicates * are reduced through the set paradigm. * * @param lfn is the logical filename to obtain information for. * @return a set of PFN strings */ public Set lookupNoAttributes( String lfn ); /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in an online display or portal. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries for the LFN. * @see ReplicaCatalogEntry */ public Map lookup( Set lfns ); /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in an online display or portal. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a set * of PFN strings. */ public Map lookupNoAttributes( Set lfns ); /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries (all attributes). * @see ReplicaCatalogEntry */ public Map lookup( Set lfns, String handle ); /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * @return a map indexed by the LFN. Each value is a set of * physical filenames. */ public Map lookupNoAttributes( Set lfns, String handle ); /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal. * * @param constraints is mapping of keys 'lfn', 'pfn', or any * attribute name, e.g. the resource handle 'site', to a string that * has some meaning to the implementing system. This can be a SQL * wildcard for queries, or a regular expression for Java-based memory * collections. Unknown keys are ignored. Using an empty map requests * the complete catalog. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries. * @see ReplicaCatalogEntry */ public Map lookup( Map constraints ); /** * Lists all logical filenames in the catalog. * * @return A set of all logical filenames known to the catalog. */ public Set list(); /** * Lists a subset of all logical filenames in the catalog. * * @param constraint is a constraint for the logical filename only. It * is a string that has some meaning to the implementing system. This * can be a SQL wildcard for queries, or a regular expression for * Java-based memory collections. * @return A set of logical filenames that match. The set may be empty */ public Set list( String constraint ); /** * Inserts a new mapping into the replica catalog. * * @param lfn is the logical filename under which to book the entry. * @param tuple is the physical filename and associated PFN attributes. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. */ public int insert( String lfn, ReplicaCatalogEntry tuple ); /** * Inserts a new mapping into the replica catalog. This is a * convenience function exposing the resource handle. Internally, * the ReplicaCatalogEntry element will be contructed, and passed to * the appropriate insert function. * * @param lfn is the logical filename under which to book the entry. * @param pfn is the physical filename associated with it. * @param handle is a resource handle where the PFN resides. * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * @see #insert( String, ReplicaCatalogEntry ) * @see ReplicaCatalogEntry */ public int insert( String lfn, String pfn, String handle ); // ^^ MARKER ^^ /** * Inserts multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. * * @param x is a map from logical filename string to list of replica * catalog entries. * @return the number of insertions. * @see ReplicaCatalogEntry */ public int insert( Map x ); /** * Deletes multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. On setting matchAttributes to false, all entries * having matching lfn pfn mapping to an entry in the Map are deleted. * However, upon removal of an entry, all attributes associated with the pfn * also evaporate (cascaded deletion). * * @param x is a map from logical filename string to list of * replica catalog entries. * @param matchAttributes whether mapping should be deleted only if all * attributes match. * * @return the number of deletions. * @see ReplicaCatalogEntry */ public int delete( Map x , boolean matchAttributes); /** * Deletes a specific mapping from the replica catalog. We don't care * about the resource handle. More than one entry could theoretically * be removed. Upon removal of an entry, all attributes associated * with the PFN also evaporate (cascading deletion). * * @param lfn is the logical filename in the tuple. * @param pfn is the physical filename in the tuple. * @return the number of removed entries. */ public int delete( String lfn, String pfn ); /** * Deletes a very specific mapping from the replica catalog. The LFN * must be matches, the PFN, and all PFN attributes specified in the * replica catalog entry. More than one entry could theoretically be * removed. Upon removal of an entry, all attributes associated with * the PFN also evaporate (cascading deletion). * * @param lfn is the logical filename in the tuple. * @param tuple is a description of the PFN and its attributes. * @return the number of removed entries, either 0 or 1. */ public int delete( String lfn, ReplicaCatalogEntry tuple ); /** * Deletes all PFN entries for a given LFN from the replica catalog * where the PFN attribute is found, and matches exactly the object * value. This method may be useful to remove all replica entries that * have a certain MD5 sum associated with them. It may also be harmful * overkill. * * @param lfn is the logical filename to look for. * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * @return the number of removed entries. */ public int delete( String lfn, String name, Object value ); // ^^ MARKER ^^ /** * Deletes all PFN entries for a given LFN from the replica catalog * where the resource handle is found. Karan requested this * convenience method, which can be coded like *

   *  delete( lfn, RESOURCE_HANDLE, handle )
   * 
* * @param lfn is the logical filename to look for. * @param handle is the resource handle * @return the number of entries removed. */ public int deleteByResource( String lfn, String handle ); /** * Removes all mappings for an LFN from the replica catalog. * * @param lfn is the logical filename to remove all mappings for. * @return the number of removed entries. */ public int remove( String lfn ); /** * Removes all mappings for a set of LFNs. * * @param lfns is a set of logical filename to remove all mappings for. * @return the number of removed entries. */ public int remove( Set lfns ); /** * Removes all entries from the replica catalog where the PFN attribute * is found, and matches exactly the object value. * * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * @return the number of removed entries. */ public int removeByAttribute( String name, Object value ); /** * Removes all entries associated with a particular resource handle. * This is useful, if a site goes offline. It is a convenience method, * which calls the generic removeByAttribute method. * * @param handle is the site handle to remove all entries for. * @return the number of removed entries. * @see #removeByAttribute( String, Object ) */ public int removeByAttribute( String handle ); // ^^ MARKER ^^ /** * Removes everything. Use with caution! * * @return the number of removed entries. */ public int clear(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/Catalog.java0000644000175000017500000000344611757531137025130 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog; import java.util.Properties; /** * This interface create a common ancestor for all cataloging * interfaces. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2076 $ */ public interface Catalog{ /** * The default DB Driver properties prefix. */ public static final String DB_ALL_PREFIX = "pegasus.catalog.*.db"; /** * Establishes a link between the implementation and the thing the * implementation is build upon.

* FIXME: The cause for failure is lost without exceptions. * * @param props contains all necessary data to establish the link. * @return true if connected now, or false to indicate a failure. */ public boolean connect( Properties props ); /** * Explicitely free resources before the garbage collection hits. */ public void close(); /** * Predicate to check, if the connection with the catalog's * implementation is still active. This helps determining, if it makes * sense to call close(). * * @return true, if the implementation is disassociated, false otherwise. * @see #close() */ public boolean isClosed(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/0000755000175000017500000000000011757531667024333 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/ReplicaCatalogEntry.java0000644000175000017500000002637411757531137031076 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.replica; import edu.isi.pegasus.planner.catalog.classes.CatalogEntry; import java.util.*; /** * The entry is a high-level logical structure representing the physical * filename, the site handle, and optional attributes related to the PFN * as one entity.

* * The resource handle is the most frequently used attribute. In * reality, the resource handle may be a relational attribute of the * mapping relation between an LFN and a PFN - there is disagreement * among the developers on this issue. For simplicity purposes, it * appears to be sufficient to make the resource handle a regular PFN * attribute.

* * @author Jens-S. Vöckler * @author Karan Vahi * @version $Revision: 2076 $ */ public class ReplicaCatalogEntry implements CatalogEntry { /** * The (reserved) attribute name used for the resource handle. */ public static final String RESOURCE_HANDLE = "pool"; /** * The physical filename. */ private String m_pfn; /** * Any optional attributes associated with the PFN. */ private Map m_attributeMap; /** * Default constructor for arrays. The PFN is initialized to * null, and thus must be explicitly set later. The map * of attributes associated with the PFN is initialized to be empty. * Thus, no resource handle is available. */ public ReplicaCatalogEntry() { m_pfn = null; m_attributeMap = new TreeMap(); } /** * Convenience constructor initializes the PFN. The map of attributes * is initialized to be empty. Thus, no resource handle is avaiable. * * @param pfn is the PFN to remember. */ public ReplicaCatalogEntry( String pfn ) { m_pfn = pfn; m_attributeMap = new TreeMap(); } /** * Convenience constructor initializes the PFN and the resource * handle. The resource handle is stored as regular PFN attribute. * * @param pfn is the PFN to remember. * @param handle is the resource handle to remember. */ public ReplicaCatalogEntry( String pfn, String handle ) { m_pfn = pfn; m_attributeMap = new TreeMap(); m_attributeMap.put( RESOURCE_HANDLE, handle ); } /** * Standard constructor initializes the PFN and arbitrary attributes. * * @param pfn is the PFN to remember. * @param attributes is a map of arbitrary attributes related to the * PFN. */ public ReplicaCatalogEntry( String pfn, Map attributes ) { m_pfn = pfn; m_attributeMap = new TreeMap(attributes); } /** * Adds an attribute to the set of attributes. Note, this is identical * to the {@link #setAttribute( String, Object )} method of the same * signature. * * @param key is the key denoting an attribute. * @param value is a value object to store. */ public void addAttribute( String key, Object value ) { this.m_attributeMap.put( key, value ); } /** * Adds attributes to the existing attributes. * * @param attributes is a map of attributes to add. * @see #setAttribute(Map) * @see java.util.Map#putAll( Map ) */ public void addAttribute( Map attributes ) { this.m_attributeMap.putAll(attributes); } /** * Obtains the attribute value for a given key. * * @param key is the key to look up * @return the object stored as value, may be null. * @see java.util.Map#get( Object ) */ public Object getAttribute( String key ) { return this.m_attributeMap.get(key); } /** * Checks for the existence of an attribute key. * * @param key is the key to look up * @return true if the key is known, false otherwise. */ public boolean hasAttribute( String key ) { return this.m_attributeMap.containsKey(key); } /** * Counts the number of attributes known for the PFN. * * @return number of attributes, may be zero. * @see java.util.Map#size() */ public int getAttributeCount() { return this.m_attributeMap.size(); } /** * Provides an iterator to traverse the attributes by their keys. * * @return an iterator over the keys to walk the attribute list. */ public Iterator getAttributeIterator() { return this.m_attributeMap.keySet().iterator(); } /** * Merges the attribute maps of two entries in a controlled fashion. * Entries are only merged with another entry, if the physical * filenames match. * * @param a is one replica catalog entry to merge. * @param b is the other replica catalog entry to merge. * @param overwrite resolves intersections. If true, uses rce's * attribute to remain, if false, the original attribute remains. * @return the merged entry, if the PFNs matched, or null * if the PFN mismatched. */ public static ReplicaCatalogEntry merge( ReplicaCatalogEntry a, ReplicaCatalogEntry b, boolean overwrite ) { ReplicaCatalogEntry result = null; String pfn1 = a.getPFN(); String pfn2 = b.getPFN(); if ( pfn1 == null && pfn2 == null || pfn1 != null && pfn2 != null && pfn1.equals(pfn2) ) { result = new ReplicaCatalogEntry( pfn1, a.m_attributeMap ); result.merge( b, overwrite ); // result cannot be false } // will return null on PFN mismatch return result; } /** * Merges the attribute maps in a controlled fashion. An entry is only * merged with another entry, if the physical filenames match. * * @param rce is another replica catalog entry to merge with. * @param overwrite resolves intersections. If true, uses rce's * attribute to remain, if false, the original attribute remains. * @return true if a merge was attempted, false if the PFNs did not * match. */ public boolean merge( ReplicaCatalogEntry rce, boolean overwrite ) { String pfn1 = this.m_pfn; String pfn2 = rce.getPFN(); boolean result = ( pfn1 == null && pfn2 == null || pfn1 != null && pfn2 != null && pfn1.equals(pfn2) ); // only merge if PFN match if ( result ) { String key; Object val; for ( Iterator i=rce.getAttributeIterator(); i.hasNext(); ) { key = (String) i.next(); val = rce.getAttribute(key); if ( hasAttribute(key) ) { if ( overwrite ) setAttribute( key, val ); } else { setAttribute( key, val ); } } } return result; } /** * Removes all attributes associated with a PFN. * @see #removeAttribute( String ) */ public void removeAllAttribute() { this.m_attributeMap.clear(); } /** * Removes a specific attribute. * * @param name is the name of the attribute to remove. * @return the value object that was removed, or null, * if the key was not in the map. * @see #removeAllAttribute() */ public Object removeAttribute( String name ) { return this.m_attributeMap.remove(name); } /** * Adds a new or overwrites an existing attribute. Note, this is * identical to the {@link #addAttribute( String, Object)} method of * the same signature. * * @param key is the name of the attribute * @param value is the value object associated with the attribute. */ public void setAttribute( String key, Object value ) { this.m_attributeMap.put( key, value ); } /** * Replaces all existing attributes with new attributes. Existing * attributes are removed before attempting a shallow copy of the new * attributes. * * @param attributes is the map of new attributes to remember. * @see #addAttribute(Map) */ public void setAttribute( Map attributes ) { this.m_attributeMap.clear(); this.m_attributeMap.putAll(attributes); } /** * Obtains the resource handle from the attributes map. This is a * convenience method. Internally, the PFN attribute map is queried * for the value of the resource handle. * * @return the resource handle, or null if unset. * @see #setResourceHandle( String ) */ public String getResourceHandle() { return (String) this.m_attributeMap.get( RESOURCE_HANDLE ); } /** * Sets a new resource handle to remember as PFN attribute. This is a * convenience method. Internally, the PFN attribute map is changed * to remember the new resource handle. * * @param handle is the new resource handle. * @see #getResourceHandle() */ public void setResourceHandle( String handle ) { this.m_attributeMap.put( RESOURCE_HANDLE, handle ); } /** * Accessor: Obtains the PFN portion from this entry. * * @return the physical filename, or null if unset. * @see #setPFN( String ) */ public String getPFN() { return m_pfn; } /** * Accessor: Sets a new PFN to remember. * * @param pfn is a new physical filename. * @see #getPFN() */ public void setPFN( String pfn ) { m_pfn = pfn; } /** * Converts the contents into a string. * * @return a textual representation of the item content. */ public String toString() { // return "(" + m_pfn + "," + m_attributeMap.toString() + ")"; StringBuffer result = null; // save the formatted map content String save = m_attributeMap.toString(); if ( m_pfn == null ) { result = new StringBuffer( 10 + save.length() ); result.append( "((null)," ); } else { result = new StringBuffer( 4 + m_pfn.length() + save.length() ); result.append( '(' ).append( m_pfn ).append( ',' ); } result.append( save ); result.append( ')' ); return result.toString(); } /** * Matches two ReplicaCatalogEntry objects. The primary key in this case is * the pfn and all the attributes. * * @return true if the pfn and all the attributes match, false otherwise. */ public boolean equals(Object obj){ // null check if (obj == null) return false; // see if type of objects match if (! (obj instanceof ReplicaCatalogEntry)) return false; ReplicaCatalogEntry rce = (ReplicaCatalogEntry)obj; String pfn1 = this.m_pfn; String pfn2 = rce.getPFN(); //rce with null pfns are assumed to match boolean result = ( pfn1 == null && pfn2 == null || pfn1 != null && pfn2 != null && pfn1.equals(pfn2) && this.getAttributeCount() == rce.getAttributeCount()); if(result){ String key; Object val; //do the matching on attributes now for (Iterator it = rce.getAttributeIterator(); it.hasNext(); ) { key = (String) it.next(); val = rce.getAttribute(key); if (hasAttribute(key)) { if(!(getAttribute(key).equals(val))){ result = false; break; } } } } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/ReplicaCatalogException.java0000644000175000017500000000440111757531137031716 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.replica; import edu.isi.pegasus.planner.catalog.CatalogException; /** * Class to notify of failures. Exceptions are chained like the * {@link java.sql.SQLException} interface.

* * @author Jens-S. Vöckler, Karan Vahi * @see org.griphyn.common.catalog.ReplicaCatalog */ public class ReplicaCatalogException extends CatalogException { /* * Constructs a ReplicaCatalogException with no detail * message. */ public ReplicaCatalogException() { super(); } /** * Constructs a ReplicaCatalogException with the * specified detailed message. * * @param s is the detailled message. */ public ReplicaCatalogException( String s ) { super(s); } /** * Constructs a ReplicaCatalogException with the * specified detailed message and a cause. * * @param s is the detailled message. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public ReplicaCatalogException( String s, Throwable cause ) { super( s, cause ); } /** * Constructs a ReplicaCatalogException with the * specified just a cause. * * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public ReplicaCatalogException( Throwable cause ) { super(cause); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/TestReplicaCatalog.java0000644000175000017500000001343211757531137030703 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.replica; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import java.util.Set; import java.util.Collection; import java.util.Iterator; import java.util.Map; import java.util.Properties; /** * A Test program that shows how to load a Replica Catalog, and query for entries sites. * The configuration is picked from the Properties. The following properties * need to be set *

 *      pegasus.catalog.replica       SimpleFile|JDCBCRC|RLS|LRC
 *      pegasus.catalog.replica.file  path to the Simple File Replica catalog if SimpleFile is being used.
 *      pegasus.catalog.replica.url   The RLS url  if replica catalog being used is RLS or LRC
 *  
* * To use the JDBCRC the database connection parameters can be specified by * specifying the following properties *
 *   pegasus.catalog.replica.db.url
 *   pegasus.catalog.replica.db.user
 *   pegasus.catalog.replica.db.password
 * 
* The sql schema’s for this catalog can be found at $PEGASUS_HOME/sql directory. * * The Pegasus Properties can be picked from property files at various locations. * The priorities are explained below. *
 *   - The default path for the properties file is $PEGASUS_HOME/etc/properties.
 *   - A properties file if found at ${user.home}/.pegasusrc has higher property.
 *   - Finally a user can specify the path to the properties file by specifying 
 *     the JVM  property pegasus.user.properties . This has the higher priority.
 * 
* * @author Karan Vahi * @version $Revision: 2567 $ */ public class TestReplicaCatalog { /** * The main program. */ public static void main( String[] args ) { ReplicaCatalog catalog = null; PegasusProperties properties = PegasusProperties.nonSingletonInstance(); //setup the logger for the default streams. LogManager logger = LogManagerFactory.loadSingletonInstance( properties ); logger.logEventStart( "event.pegasus.catalog.replica.test", "planner.version", Version.instance().toString() ); //set debug level to maximum //set if something is going wrong //logger.setLevel( LogManager.DEBUG_MESSAGE_LEVEL ); /* print out all the relevant site catalog properties that were specified*/ Properties replicaProperties = properties.matchingSubset( "pegasus.catalog.replica", true ); System.out.println( "Replica Catalog Properties specified are " + replicaProperties ); /* load the catalog using the factory */ try{ catalog = ReplicaFactory.loadInstance( PegasusProperties.nonSingletonInstance()); } catch ( Exception e ){ System.out.println( e ); System.exit( 2 ); } /* lets insert an entry into ReplicaCatalog */ try{ String lfn = "test.replica"; String pfn = "gsiftp://test.isi.edu/examples/test.replica"; String handle = "isi";// the site handle of the site where the data is ReplicaCatalogEntry rce = new ReplicaCatalogEntry( pfn, handle ); /* insert the RCE into Replica Catalog * multiple RCE for the same LFN can be inserted */ catalog.insert( lfn, rce ); /* insert another */ catalog.insert( lfn, "gsiftp://replica.isi.edu/examples/test.replica", "local" ); /* query for the entry we just entered */ Collection results = catalog.lookup( lfn ); System.out.println( "Results for LFN " + lfn ); for( ReplicaCatalogEntry entry: results ){ System.out.println( entry ); } /* remove the first entry */ catalog.delete( lfn, rce ); /* can remove all the entries associated with a lfn */ //catalog.remove( lfn ); /* list all the entries remaining in the Replica Catalog */ Set lfns = catalog.list(); System.out.println( "LFN's in replica catalog " + lfns ); /* we can do a bulk lookup of lfns */ Map> entries = catalog.lookup( lfns ); for( Iterator >> it = entries.entrySet().iterator(); it.hasNext(); ){ Map.Entry > entry = it.next(); String logicalFilename = entry.getKey(); results = catalog.lookup( logicalFilename ); System.out.println( "Results for LFN " + logicalFilename ); for( ReplicaCatalogEntry result: results ){ System.out.println( result ); } } } catch ( ReplicaCatalogException e ){ e.printStackTrace(); } finally{ /* close the connection */ try{ catalog.close(); }catch( Exception e ){} } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/ReplicaFactory.java0000644000175000017500000002254411757531137030104 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.replica; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import java.lang.reflect.*; import java.io.IOException; import java.util.Properties; import java.util.MissingResourceException; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.Enumeration; /** * This factory loads a replica catalog, as specified by the properties. * Each invocation of the factory will result in a new instance of a * connection to the replica catalog. * * @author Karan Vahi * @author Jens-S. Vöckler * @version $Revision: 2585 $ * * @see edu.isi.pegasus.planner.catalog.replica.ReplicaCatalog * @see edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry * @see edu.isi.pegasus.planner.catalog.replica.impl.JDBCRC */ public class ReplicaFactory{ /** * Package to prefix "just" class names with. */ public static final String DEFAULT_PACKAGE = "edu.isi.pegasus.planner.catalog.replica.impl"; /** * Connects the interface with the replica catalog implementation. The * choice of backend is configured through properties. This class is * useful for non-singleton instances that may require changing * properties. * * @param props is an instance of properties to use. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * * @see org.griphyn.common.util.CommonProperties * @see #loadInstance() */ static public ReplicaCatalog loadInstance( PegasusProperties props ) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { return loadInstance( props.getVDSProperties() ); } /** * Connects the interface with the replica catalog implementation. The * choice of backend is configured through properties. This class is * useful for non-singleton instances that may require changing * properties. * * @param props is an instance of properties to use. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * * @see org.griphyn.common.util.CommonProperties * @see #loadInstance() */ static public ReplicaCatalog loadInstance( CommonProperties props ) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { // sanity check if ( props == null ) throw new NullPointerException("invalid properties"); Properties connect = props.matchingSubset( ReplicaCatalog.c_prefix, false ); //get the default db driver properties in first pegasus.catalog.*.db.driver.* Properties db = props.matchingSubset( ReplicaCatalog.DB_ALL_PREFIX, false ); //now overload with the work catalog specific db properties. //pegasus.catalog.work.db.driver.* db.putAll( props.matchingSubset( ReplicaCatalog.DB_PREFIX , false ) ); //to make sure that no confusion happens. //add the db prefix to all the db properties for( Enumeration e = db.propertyNames(); e.hasMoreElements(); ){ String key = (String)e.nextElement(); connect.put( "db." + key, db.getProperty( key )); } //put the driver property back into the DB property // String driver = props.getProperty( ReplicaCatalog.DBDRIVER_PREFIX ); // driver = ( driver == null )? driver = props.getProperty( ReplicaCatalog.DBDRIVER_ALL_PREFIX ): driver; // connect.put( "db.driver", driver ); // determine the class that implements the work catalog return loadInstance( props.getProperty( ReplicaCatalog.c_prefix ), connect ); } /** * Connects the interface with the replica catalog implementation. The * choice of backend is configured through properties. This class is * useful for non-singleton instances that may require changing * properties. * * @param props is an instance of properties to use. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * * @see org.griphyn.common.util.CommonProperties * @see #loadInstance() */ static public ReplicaCatalog loadInstance( String catalogImplementor, Properties props ) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { ReplicaCatalog result = null; if ( catalogImplementor == null ) throw new RuntimeException( "You need to specify the " + ReplicaCatalog.c_prefix + " property" ); // for Karan: 2005-10-27 if ( catalogImplementor.equalsIgnoreCase("rls") ){ catalogImplementor = "RLI"; } //File also means SimpleFile if( catalogImplementor.equalsIgnoreCase( "File" ) ){ catalogImplementor = "SimpleFile"; } // syntactic sugar adds absolute class prefix if ( catalogImplementor.indexOf('.') == -1 ) catalogImplementor = DEFAULT_PACKAGE + "." + catalogImplementor; // POSTCONDITION: we have now a fully-qualified classname DynamicLoader dl = new DynamicLoader( catalogImplementor ); result = (ReplicaCatalog) dl.instantiate( new Object[0] ); if ( result == null ) throw new RuntimeException( "Unable to load " + catalogImplementor ); if ( ! result.connect( props ) ) throw new RuntimeException( "Unable to connect to replica catalog implementation" ); // done return result; } /** * Connects the interface with the replica catalog implementation. The * choice of backend is configured through properties. This method uses * default properties from the property singleton. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * @exception MissingResourceException if the properties could not * be loaded properly. * * @see org.griphyn.common.util.CommonProperties * @see #loadInstance( org.griphyn.common.util.CommonProperties ) */ static public ReplicaCatalog loadInstance() throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, MissingResourceException { return loadInstance( CommonProperties.instance() ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/impl/0000755000175000017500000000000011757531667025274 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/impl/MRC.java0000644000175000017500000006307611757531137026564 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.replica.impl; import edu.isi.pegasus.planner.catalog.replica.*; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.common.util.CommonProperties; import java.util.List; import java.util.LinkedList; import java.util.Iterator; import java.util.Map; import java.util.HashMap; import java.util.Set; import java.util.HashSet; import java.util.Collection; import java.util.Properties; /** * A multiple replica catalog implementation that allows users to query * different multiple catalogs at the same time.

* * To use it set *

 * pegasus.catalog.replica MRC
 * 
* * Each associated replica catalog can be configured via properties as follows. *

* The user associates a variable name referred to as [value] * for each of the catalogs, where [value] is any legal identifier * (concretely [A-Za-z][_A-Za-z0-9]*) * * For each associated replica catalogs the user specifies the following * properties. *

 * pegasus.catalog.replica.mrc.[value]      to specify the type of replica catalog.
 * pegasus.catalog.replica.mrc.[value].key  to specify a property name key for a
 *                                          particular catalog
 * 
* *

* For example, if a user wants to query two lrc's at the same time he/she * can specify as follows * *

 *    pegasus.catalog.replica.mrc.lrc1 LRC
 *    pegasus.catalog.replica.mrc.lrc2.url rls://sukhna
 *
 *    pegasus.catalog.replica.mrc.lrc2 LRC
 *    pegasus.catalog.replica.mrc.lrc2.url rls://smarty
 *
 * 
* *

* In the above example, lrc1, lrc2 are any valid identifier names and url is * the property key that needed to be specified. * * @author Karan Vahi * @version $Revision: 2585 $ */ public class MRC implements ReplicaCatalog { /** * The prefix for the property subset for connecting to the individual * catalogs. */ public static final String PROPERTY_PREFIX = "mrc"; /** * The property key that designates the type of replica catalog to connect * to. */ public static final String TYPE_KEY = "type"; /** * The list of replica catalogs that need to be queried for. */ protected List mRCList; /** * The handle to the logging manager. */ protected LogManager mLogger; /** * The default constructor. */ public MRC() { mRCList = new LinkedList(); mLogger = LogManagerFactory.loadSingletonInstance(); } /** * Removes everything from the catalogs. * Use with care!!! * * @return the number of removed entries. */ public int clear() { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.clear(); } return 0; } /** * Explicitely free resources before the garbage collection hits. * * */ public void close() { for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = ( ReplicaCatalog )it.next(); catalog.close(); } } /** * Establishes a link between the implementation and the thing the * implementation is build upon. * * @param props contains all necessary data to establish the link. * * @return true if connected now, or false to indicate a failure. */ public boolean connect( Properties props ) { //get the subset for the properties Properties subset = CommonProperties.matchingSubset( props, PROPERTY_PREFIX, false ); mLogger.log( "MRC Properties are " + subset , LogManager.DEBUG_MESSAGE_LEVEL ); //container for properties for each of the different catalogs Map propertiesMap = new HashMap(); //put each of the keys in the correct bin for( Iterator it = subset.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = ( Map.Entry )it.next(); String key = ( String ) entry.getKey(); String value = ( String ) entry.getValue(); String name = getName( key ); //bin stores the user defined name specified //now determine the key key = getKey( key, name ); //store the key, value in the correct properties object Properties p; if( propertiesMap.containsKey( name ) ){ p = ( Properties )propertiesMap.get( name ); } else{ p = new Properties( ); propertiesMap.put( name, p ); } p.setProperty( key, value ); } //now that we have all the properties sorted accd to individual catalogs //try connecting to them one by one boolean result = true; for( Iterator it = propertiesMap.entrySet().iterator(); it.hasNext() ; ){ Map.Entry entry = ( Map.Entry )it.next(); result &= connect( (String)entry.getKey(), ( Properties )entry.getValue() ); //if unable to connect to any single //break out and exit if( !result ){ break; } } //if the result is false, then disconnect from //already connected replica catalogs if( !result ){ close(); } return result; } /** * Connects to an individual replica catalog. Also adds the handle to the * connected replica catalog in the internal list. * * @param name the name given by the user in the properties file. * @param properties the properties to use for connecting. * @return boolean */ protected boolean connect( String name, Properties properties ){ //get the type first String type = properties.getProperty( this.TYPE_KEY ); if( type == null ){ StringBuffer message = new StringBuffer(); message.append( "No type associated with replica catalog of name " ). append( name ); message.append( "Set the property " ).append( ReplicaCatalog.c_prefix ). append( "." ).append( name ); mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); return false; } //try and connect ReplicaCatalog catalog = null; try{ catalog = ReplicaFactory.loadInstance(type, properties); }catch( Exception e ){ //log the connection error mLogger.log( "Unable to connect to replica catalog of name " + name, e, LogManager.ERROR_MESSAGE_LEVEL ); return false; } mRCList.add( catalog ); return true; } /** * Returns an iterator to iterate through the list of ReplicaCatalogs that * MRC is associated with. * * @return Iterator */ protected Iterator rcIterator(){ return this.mRCList.iterator(); } /** * Returns the name from the key. The name is first component of the key before * the first dot (.). * * @param key String * @return String */ protected String getName( String key ){ return ( key.indexOf( '.' ) == -1 )? //if there is no instance of . then the key is the name key: //else get the substring to first dot key.substring( 0, key.indexOf( '.' )); } /** * Returns the key with the prefix stripped off. In the case, where the key * is the prefix, STYLE_KEY is returned. If the key does not start with the * prefix, then null is returned. * * @param key the key * @param prefix String * * @return key stripped off of the prefix * * @see #TYPE_KEY */ protected String getKey( String key, String prefix ){ //sanity check if( !key.startsWith( prefix ) ) return null; //if the key and prefix are same length if( key.length() == prefix.length() ){ return this.TYPE_KEY; } //if prefix does not end in a dot add a dot if ( prefix.charAt(prefix.length()-1) != '.' ) { prefix = prefix + '.'; } //for a valid subsetting operation there should be . at prefix.length() - 1 //allows us to distinguish between lrc1.url and lrc1a.url for prefix //lrc1 return ( key.charAt( prefix.length() - 1) != '.' )? null: key.substring( prefix.length() ); } /** * Deletes all PFN entries for a given LFN from the replica catalog where * the PFN attribute is found, and matches exactly the object value. * * @param lfn is the logical filename to look for. * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * @return the number of removed entries. */ public int delete(String lfn, String name, Object value) { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.delete( lfn, name, value ); } return result; } /** * Deletes a very specific mapping from the replica catalog. * * @param lfn is the logical filename in the tuple. * @param tuple is a description of the PFN and its attributes. * @return the number of removed entries, either 0 or 1. */ public int delete(String lfn, ReplicaCatalogEntry tuple) { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.delete( lfn, tuple ); } return result; } /** * Deletes multiple mappings into the replica catalog. * * @param x is a map from logical filename string to list of replica * catalog entries. * @param matchAttributes whether mapping should be deleted only if all * attributes match. * @return the number of deletions. */ public int delete(Map x, boolean matchAttributes) { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.delete( x, matchAttributes ); } return result; } /** * Deletes a specific mapping from the replica catalog. * * @param lfn is the logical filename in the tuple. * @param pfn is the physical filename in the tuple. * @return the number of removed entries. */ public int delete( String lfn, String pfn ) { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.delete( lfn, pfn ); } return result; } /** * Deletes all PFN entries for a given LFN from the replica catalog where * the resource handle is found. * * @param lfn is the logical filename to look for. * @param handle is the resource handle * * @return the number of entries removed. */ public int deleteByResource(String lfn, String handle) { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.delete( lfn, handle ); } return result; } /** * Inserts a new mapping into the replica catalog. * * @param lfn is the logical filename under which to book the entry. * @param pfn is the physical filename associated with it. * @param handle is a resource handle where the PFN resides. * * @return number of insertions, should always be 1. On failure, throw * an exception, don't use zero. * * @throws UnsupportedOperationException */ public int insert(String lfn, String pfn, String handle) { throw new UnsupportedOperationException( "Method insert( String, String, String ) not supported in MRC" ); } /** * Inserts a new mapping into the replica catalog. * * @param lfn is the logical filename under which to book the entry. * @param tuple is the physical filename and associated PFN attributes. * * @return number of insertions, should always be 1. On failure, throw exception * @throws UnsupportedOperationException */ public int insert(String lfn, ReplicaCatalogEntry tuple) { throw new UnsupportedOperationException( "Method insert( String, ReplicaCatalogEntry ) not supported in MRC" ); } /** * Inserts multiple mappings into the replica catalog. * * @param x is a map from logical filename string to list of replica * catalog entries. * * @return the number of insertions. * @throws UnsupportedOperationException */ public int insert( Map x ) { throw new UnsupportedOperationException( "Method insert( Map ) not supported in MRC" ); } /** * Predicate to check, if the connection with the catalog's * implementation is still active. Returns true only if the connections to * all the associated replica catalogs is closed. * * @return true, if the implementation is disassociated, false otherwise. */ public boolean isClosed() { boolean result = true; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result &= catalog.isClosed( ); } return result; } /** * Lists a subset of all logical filenames in the catalog. * * @param constraint is a constraint for the logical filename only. It * is a string that has some meaning to the implementing system. This * can be a SQL wildcard for queries, or a regular expression for * Java-based memory collections. * * @return A set of logical filenames that match. The set may be empty * */ public Set list( String constraint ) { Set result = new HashSet(); for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result.addAll( catalog.list( constraint ) ); } return result; } /** * Lists all logical filenames in the catalog. * * @return A set of all logical filenames known to the catalog. */ public Set list() { Set result = new HashSet(); for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result.addAll( catalog.list() ); } return result; } /** * Retrieves the entry for a given filename and resource handle from the * replica catalog. * * @param lfn is the logical filename to obtain information for. * @param handle is the resource handle to obtain entries for. * @return the (first) matching physical filename, or null * if no match was found. */ public String lookup( String lfn, String handle ) { String result = null; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); if( (result = catalog.lookup( lfn, handle )) != null ){ return result; } } return result; } /** * Retrieves all entries for a given LFN from the replica catalog. * * @param lfn is the logical filename to obtain information for. * * @return a collection of replica catalog entries * */ public Collection lookup( String lfn ) { Collection result = new LinkedList(); for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); Collection l = catalog.lookup( lfn ); if ( l != null ){ result.addAll( l ); } } return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. * * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * * @return a map indexed by the LFN. Each value is a collection of * replica catalog entries (all attributes). */ public Map lookup( Set lfns, String handle ) { Map result = new HashMap(); for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); Map m = catalog.lookup(lfns, handle); //merge all the entries in the map into the result for (Iterator mit = m.entrySet().iterator(); mit.hasNext(); ) { Map.Entry entry = (Map.Entry) mit.next(); //merge the entries into the main result String lfn = (String) entry.getKey(); //the lfn if ( result.containsKey( lfn ) ) { //right now no merging of RCE being done on basis //on them having same pfns. duplicate might occur. ( (Set) result.get( lfn )).addAll( (Set) entry.getValue()); } else { result.put( lfn, entry.getValue() ); } } } return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. * * @param constraints is mapping of keys 'lfn', 'pfn', or any attribute * name, e.g. the resource handle 'site', to a string that has some * meaning to the implementing system. This can be a SQL wildcard for * queries, or a regular expression for Java-based memory collections. * Unknown keys are ignored. Using an empty map requests the complete * catalog. * * @return a map indexed by the LFN. Each value is a collection of * replica catalog entries. */ public Map lookup( Map constraints ) { Map result = new HashMap(); for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); Map m = catalog.lookup( constraints ); //merge all the entries in the map into the result for (Iterator mit = m.entrySet().iterator(); mit.hasNext(); ) { Map.Entry entry = (Map.Entry) mit.next(); //merge the entries into the main result String lfn = (String) entry.getKey(); //the lfn if ( result.containsKey( lfn ) ) { //right now no merging of RCE being done on basis //on them having same pfns. duplicate might occur. ( (Set) result.get( lfn )).addAll( (Set) entry.getValue()); } else { result.put( lfn, entry.getValue() ); } } } return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. * * @param lfns is a set of logical filename strings to look up. * * @return a map indexed by the LFN. Each value is a collection of * replica catalog entries for the LFN. */ public Map lookup( Set lfns ) { Map result = new HashMap(); for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); Map m = catalog.lookup( lfns ); //merge all the entries in the map into the result for (Iterator mit = m.entrySet().iterator(); mit.hasNext(); ) { Map.Entry entry = (Map.Entry) mit.next(); //merge the entries into the main result String lfn = (String) entry.getKey(); //the lfn if ( result.containsKey( lfn ) ) { //right now no merging of RCE being done on basis //on them having same pfns. duplicate might occur. ( (Collection)result.get( lfn )).addAll( (Collection) entry.getValue()); } else { result.put( lfn, entry.getValue() ); } } } return result; } /** * Retrieves all entries for a given LFN from the replica catalog. * * @param lfn is the logical filename to obtain information for. * @return a set of PFN strings */ public Set lookupNoAttributes( String lfn ) { Set result = new HashSet(); for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result.addAll( catalog.lookupNoAttributes( lfn ) ); } return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a set of PFN strings. */ public Map lookupNoAttributes( Set lfns ) { Map result = new HashMap(); for( Iterator it = lfns.iterator(); it.hasNext() ; ){ String lfn = ( String )it.next(); result.put( lfn, this.lookupNoAttributes( lfn ) ); } return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. * * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * @return a map indexed by the LFN. Each value is a set of physical * filenames. */ public Map lookupNoAttributes( Set lfns, String handle ) { Map result = new HashMap(); for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); Map m = this.lookupNoAttributes( lfns, handle ); //merge the map into the result for( Iterator mit = m.entrySet().iterator(); mit.hasNext(); ){ Map.Entry entry = (Map.Entry)mit.next(); //merge the entries into the main result String key = (String)entry.getKey(); //the lfn if( result.containsKey(key) ){ //merge the results ( (Set)result.get( key ) ).addAll( ( Set )entry.getValue() ); } else{ result.put(key,entry.getValue()); } } } return result; } /** * Removes all mappings for an LFN from the replica catalog. * * @param lfn is the logical filename to remove all mappings for. * @return the number of removed entries. */ public int remove( String lfn ) { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.remove( lfn ) ; } return result; } /** * Removes all mappings for a set of LFNs. * * @param lfns is a set of logical filename to remove all mappings for. * * @return the number of removed entries. */ public int remove( Set lfns ) { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.remove( lfns ) ; } return result; } /** * Removes all entries from the replica catalog where the PFN attribute * is found, and matches exactly the object value. * * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * * @return the number of removed entries. */ public int removeByAttribute(String name, Object value) { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.removeByAttribute( name, value ) ; } return result; } /** * Removes all entries associated with a particular resource handle. * * @param handle is the site handle to remove all entries for. * * @return the number of removed entries. */ public int removeByAttribute(String handle) { int result = 0; for( Iterator it = this.rcIterator(); it.hasNext() ; ){ ReplicaCatalog catalog = (ReplicaCatalog) it.next(); result += catalog.removeByAttribute( handle ) ; } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/impl/LRC.java0000644000175000017500000032211511757531137026553 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.replica.impl; import edu.isi.pegasus.planner.catalog.replica.*; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.planner.catalog.CatalogException; import edu.isi.pegasus.common.util.CommonProperties; import org.globus.replica.rls.RLSClient; import org.globus.replica.rls.RLSException; import org.globus.replica.rls.RLSAttribute; import org.globus.replica.rls.RLSAttributeObject; import org.globus.replica.rls.RLSString2Bulk; import org.globus.replica.rls.RLSString2; import org.globus.replica.rls.RLSOffsetLimit; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Map; import java.util.HashMap; import java.util.Set; import java.util.HashSet; import java.util.Properties; import java.util.List; import java.util.ArrayList; import java.util.Iterator; /** * This class implements the VDS replica catalog interface on top of the * LRC. This class implementation ends up talking to a single LRC. * It is accessed internally from the RLI implementation. * RLS Exceptions are being caught here. They probably should be thrown * and caught at the calling class (i.e the RLI implementation). * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2585 $ */ public class LRC implements ReplicaCatalog { /** * The number of entries searched in each bulk query to RLS. */ public static final int RLS_BULK_QUERY_SIZE = 1000; /** * The default timeout in seconds to be used while querying the LRC. */ public static final String DEFAULT_LRC_TIMEOUT = "30"; /** * The key that is used to get hold of the timeout value from the properties * object. */ public static final String RLS_TIMEOUT_KEY = "rls.timeout"; /** * The key that is used to get hold of the timeout value from the properties * object. */ public static final String LRC_TIMEOUT_KEY = "lrc.timeout"; /** * The properties key that allow us to associate a site with a LRC URL, * and hence providing a value for the SITE_ATTRIBUTE. * User will specify lrc.site.isi_viz rls://isi.edu to associate * site isi_viz with rls://isi.edu */ public static final String LRC_SITE_TO_LRC_URL_KEY = "lrc.site."; /** * The attribute in RLS that maps to a site handle. */ public static final String SITE_ATTRIBUTE = ReplicaCatalogEntry.RESOURCE_HANDLE; /** * The undefined pool attribute value. The pool attribute is assigned this * value if the pfn queried does not have a pool associated with it. */ public static final String UNDEFINED_SITE = "UNDEFINED_POOL"; /** * The key that is used to get hold of the url from the properties object. */ public static final String URL_KEY = "url"; /** * The key that if set, specifies the proxy to be picked up while connecting * to the RLS. */ public static final String PROXY_KEY = "proxy"; /** * The error message for not connected to LRC. */ public static final String LRC_NOT_CONNECTED_MSG = "Not connected to LRC "; /** * The handle to the logging object. Should be log4j soon. */ private LogManager mLogger; /** * The string holding the message that is logged in the logger. */ private String mLogMsg; /** * The URL pointing to the LRC to which this instance of class talks to. */ private String mLRCURL; /** * The handle to the client that allows access to the RLS running at the * url specified while connecting. */ private RLSClient mRLS; /** * The handle to the client that allows access to the LRC running at the * url specified while connecting. */ private RLSClient.LRC mLRC; /** * The batch size while querying the LRC in the bulk mode. */ private int mBatchSize; /** * The timeout in seconds while querying to the LRC. */ private int mTimeout; /** * The default site attribute to be associated with the results. */ private String mDefaultSiteAttribute; /** * The default constructor, that creates an object which is not linked with * any RLS. Use the connect method to connect to the LRC and use it. * * @see #connect(Properties). */ public LRC() { mRLS = null; mLRC = null; mLogger = LogManagerFactory.loadSingletonInstance(); mBatchSize = LRC.RLS_BULK_QUERY_SIZE; mTimeout = Integer.parseInt(LRC.DEFAULT_LRC_TIMEOUT); } /** * Establishes a connection to the LRC. * * @param props contains all necessary data to establish the link. * @return true if connected now, or false to indicate a failure. */ public boolean connect(Properties props) { boolean con = false; Object obj = props.get(URL_KEY); mLRCURL = (obj == null) ? null : (String) obj; if (mLRCURL == null) { //nothing to connect to. log("The LRC url is not specified", LogManager.ERROR_MESSAGE_LEVEL); return con; } //try to see if a proxy cert has been specified or not String proxy = props.getProperty(PROXY_KEY); //determine timeout mTimeout = getTimeout(props); //set the batch size for querie setBatchSize(props); //stripe out the properties that assoicate site handle to lrc url Properties site2LRC = CommonProperties.matchingSubset( props, LRC.LRC_SITE_TO_LRC_URL_KEY, false); //traverse through the properties to figure out //the default site attribute for the URL for( Iterator it = site2LRC.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry)it.next(); if( entry.getValue().equalsIgnoreCase( mLRCURL ) ){ mDefaultSiteAttribute = entry.getKey(); } } if( mDefaultSiteAttribute != null ){ mLogger.log( "Default Site attribute is " + mDefaultSiteAttribute, LogManager.DEBUG_MESSAGE_LEVEL ); } return connect(mLRCURL, proxy); } /** * Establishes a connection to the LRC, picking up the proxy from the default * location usually /tmp/ directory. * * @param url the url to lrc to connect to. * * @return true if connected now, or false to indicate a failure. */ public boolean connect(String url) { return connect(url,null); } /** * Establishes a connection to the LRC. * * @param url the url to lrc to connect to. * @param proxy the path to the proxy file to be picked up. null denotes * default location. * * @return true if connected now, or false to indicate a failure. * * @throws ReplicaCatalogException in case of */ public boolean connect(String url, String proxy) { mLRCURL = url; try { mRLS = (proxy == null) ? new RLSClient(url) : //proxy is picked up from default loc /tmp new RLSClient(url, proxy); //set the timeout mRLS.SetTimeout(mTimeout); //connect is only successful if we have //successfully connected to the LRC mLRC = mRLS.getLRC(); } catch (RLSException e) { log("RLS Exception", e,LogManager.ERROR_MESSAGE_LEVEL); return false; } return true; } /** * Gets a handle to the LRC that is associated with the RLS running at * url. * * @return RLSClient.LRC that points to the RLI that is * running , or null in case connect method not being called. * @see #mLRCURL */ public RLSClient.LRC getLRC() { return (isClosed()) ? null : mLRC; } /** * Retrieves the entry for a given filename and resource handle from * the LRC. * * @param lfn is the logical filename to obtain information for. * @param handle is the resource handle to obtain entries for. * @return the (first) matching physical filename, or * null if no match was found. * * @throws ReplicaCatalogException in case of any error that is throw by LRC * that can't be handled. */ public String lookup(String lfn, String handle) { //sanity check if (this.isClosed()) { throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } String pfn = null; String site = null; //query the lrc try { List l = mLRC.getPFN(lfn); for (Iterator it = l.iterator(); it.hasNext(); ) { //query for the pool attribute pfn = ( (RLSString2) it.next()).s2; site = getSiteHandle(pfn); if (site.equalsIgnoreCase(handle)) { //ok we have the first pfn with for the site and lfn break; } } } catch (RLSException ex) { if(ex.GetRC() == RLSClient.RLS_LFN_NEXIST || ex.GetRC() == RLSClient.RLS_MAPPING_NEXIST){ pfn = null; } else{ throw exception("lookup(String,String)", ex); } } return pfn; } /** * Retrieves all entries for a given LFN from the LRC. * Each entry in the result set is a tuple of a PFN and all its * attributes. * * @param lfn is the logical filename to obtain information for. * @return a collection of replica catalog entries, or null in case of * unable to connect to RLS or error. * * @throws ReplicaCatalogException in case of any error that is throw by LRC * that can't be handled. * @see ReplicaCatalogEntry */ public Collection lookup(String lfn) throws ReplicaCatalogException { //sanity check if (this.isClosed()) { throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } List res = new ArrayList(3); //query the lrc try { List l = mLRC.getPFN(lfn); for (Iterator it = l.iterator(); it.hasNext(); ) { String pfn = ( (RLSString2) it.next()).s2; //get hold of all attributes ReplicaCatalogEntry entry = new ReplicaCatalogEntry(pfn, getAttributes(pfn)); res.add(entry); } } catch (RLSException ex) { if(ex.GetRC() == RLSClient.RLS_LFN_NEXIST || ex.GetRC() == RLSClient.RLS_MAPPING_NEXIST){ log("Mapping for lfn " + lfn + " does not exist", LogManager.DEBUG_MESSAGE_LEVEL); } else{ throw exception("lookup(String)", ex); } } return res; } /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is just a PFN string. Duplicates * are reduced through the set paradigm. * * @param lfn is the logical filename to obtain information for. * @return a set of PFN strings, or null in case of unable to connect * to RLS. * */ public Set lookupNoAttributes(String lfn) { //sanity check if (this.isClosed()) { throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } Set res = new HashSet(3); //query the lrc try { List l = mLRC.getPFN(lfn); for (Iterator it = l.iterator(); it.hasNext(); ) { String pfn = ( (RLSString2) it.next()).s2; res.add(pfn); } } catch (RLSException ex) { //am not clear whether to throw the exception or what log("lookup(String,String):", ex,LogManager.ERROR_MESSAGE_LEVEL); return null; } return res; } /** * Retrieves multiple entries for a given logical filename, up to the * complete LRC. It uses the bulk query api to the LRC to query for stuff. * Bulk query has been in RLS since version 2.0.8. Internally, the bulk * queries are done is sizes specified by variable mBatchSize. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries for the LFN. * @see ReplicaCatalogEntry * @see #getBatchSize() */ public Map lookup(Set lfns) throws ReplicaCatalogException { //one has to do a bulk query in batches Set s = null; int size = mBatchSize; Map map = new HashMap(lfns.size()); log("Number of files to query LRC " + lfns.size() + " in batch sizes of " + size, LogManager.DEBUG_MESSAGE_LEVEL); for (Iterator it = lfns.iterator(); it.hasNext(); ) { s = new HashSet(size); for (int j = 0; (j < size) && (it.hasNext()); j++) { s.add(it.next()); } if (!s.isEmpty()) { //there is no conflict, as the keys are unique //via the set paradigm. Passing null as we want //to get hold of all attributes. map.putAll(bulkLookup(s, null)); } } return map; } /** * Retrieves multiple entries for a given logical filename, up to the * complete LRC. * * The noAttributes flag is missing on purpose, because * due to the resource handle, attribute lookups are already required. * * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries. * * @see ReplicaCatalogEntry */ public Map lookup(Set lfns, String handle) { return lookup(lfns,SITE_ATTRIBUTE,handle); } /** * Retrieves multiple entries for a given logical filename, up to the * complete LRC. It returns the complete RCE for each entry i.e all the * attributes a pfn is associated with in addition to the one that is * the key for matching. * * @param lfns is a set of logical filename strings to look up. * @param name is the name of the attribute. * @param value is the value of the attribute. * * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries. * * @see ReplicaCatalogEntry */ public Map lookup(Set lfns, String name, Object value) { //one has to do a bulk query in batches Set s = null; int size = mBatchSize; Map map = new HashMap(lfns.size()); log("Number of files to query LRC " + lfns.size() + " in batch sizes of " + size, LogManager.DEBUG_MESSAGE_LEVEL); for (Iterator it = lfns.iterator(); it.hasNext(); ) { s = new HashSet(size); for (int j = 0; (j < size) && (it.hasNext()); j++) { s.add(it.next()); } if (!s.isEmpty()) { //there is no conflict, as the keys are unique //via the set paradigm. //temp contains results indexed by lfn but each value //is a collection of ReplicaCatalogEntry objects //we query for all attributes as we are to return //complete RCE as stipulated by the interface. Map temp = bulkLookup(s, null); //iterate thru it for (Iterator it1 = temp.entrySet().iterator(); it1.hasNext(); ) { Map.Entry entry = (Map.Entry) it1.next(); Set pfns = subset( (Collection) entry.getValue(), name, value); if (!pfns.isEmpty()) { map.put(entry.getKey(), pfns); } } } } return map; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in an online display or portal. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a set * of PFN strings. */ public Map lookupNoAttributes(Set lfns) { //one has to do a bulk query in batches Set s = null; int size = mBatchSize; size = (size > lfns.size())?lfns.size():size; Map map = new HashMap(lfns.size()); log("Number of files to query LRC " + lfns.size() + " in batch sizes of " + size,LogManager.DEBUG_MESSAGE_LEVEL); for (Iterator it = lfns.iterator(); it.hasNext(); ) { s = new HashSet(size); for (int j = 0; (j < size) && (it.hasNext()); j++) { s.add(it.next()); } if (!s.isEmpty()) { //there is no conflict, as the keys are unique //via the set paradigm. map.putAll(bulkLookupNoAttributes(s)); } } return map; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * * @return a map indexed by the LFN. Each value is a set of * physical filenames. */ public Map lookupNoAttributes(Set lfns, String handle) { return lookupNoAttributes(lfns,SITE_ATTRIBUTE,handle); } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * @param lfns is a set of logical filename strings to look up. * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * * @return a map indexed by the LFN. Each value is a set of * physical filenames. */ public Map lookupNoAttributes(Set lfns, String name, Object value) { //one has to do a bulk query in batches Set s = null; Collection c ; int size = mBatchSize; Map map = new HashMap(lfns.size()); log("Number of files to query LRC " + lfns.size() + " in batch sizes of " + size,LogManager.DEBUG_MESSAGE_LEVEL); for (Iterator it = lfns.iterator(); it.hasNext(); ) { s = new HashSet(size); for (int j = 0; (j < size) && (it.hasNext()); j++) { s.add(it.next()); } if (!s.isEmpty()) { //there is no conflict, as the keys are unique //via the set paradigm. //temp contains results indexed by lfn but each value //is a collection of ReplicaCatalogEntry objects Map temp = bulkLookup(s, name,value); //iterate thru it for (Iterator it1 = temp.entrySet().iterator(); it1.hasNext(); ) { Map.Entry entry = (Map.Entry) it1.next(); c = (Collection) entry.getValue(); //System.out.println("Entry is " + entry); Set pfns = new HashSet(c.size()); for(Iterator cit = c.iterator();cit.hasNext();){ pfns.add( ((ReplicaCatalogEntry)cit.next()).getPFN()); } if (!pfns.isEmpty()) { map.put(entry.getKey(), pfns); } } } } return map; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * At present it DOES NOT SUPPORT ATTRIBUTE MATCHING. * * @param constraints is mapping of keys 'lfn', 'pfn', to a string that * has some meaning to the implementing system. This can be a SQL * wildcard for queries, or a regular expression for Java-based memory * collections. Unknown keys are ignored. Using an empty map requests * the complete catalog. * * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries. * * @see ReplicaCatalogEntry */ public Map lookup(Map constraints) throws ReplicaCatalogException{ return (constraints.isEmpty())? lookup(list()): getAttributes(lookupLFNPFN(constraints),null,null); } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal. At present it does not * support attribute matching. * * @param constraints is mapping of keys 'lfn', 'pfn', or any * attribute name, e.g. the resource handle 'site', to a string that * has some meaning to the implementing system. This can be a SQL * wildcard for queries, or a regular expression for Java-based memory * collections. Unknown keys are ignored. Using an empty map requests * the complete catalog. * * @return A list of MyRLSString2Bulk objects containing * the lfn in s1 field, and pfn in s2 field. The list is * grouped by lfns. The set may be empty. */ public List lookupLFNPFN(Map constraints) { if(isClosed()){ //not connected to LRC //throw an exception?? throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } /* if(constraints == null || constraints.isEmpty()){ //return the set of all LFNs in the catalog return list(); }*/ List result = new ArrayList(); boolean notFirst = false; for (Iterator i = constraints.keySet().iterator(); i.hasNext(); ) { String key = (String) i.next(); if ( key.equals("lfn") ) { if(notFirst){ //do the AND(intersect)operation result.retainAll(listLFNPFN((String)constraints.get(key),true)); } else{ result = listLFNPFN( (String) constraints.get(key), true); } } else if ( key.equals("pfn") ) { if(notFirst){ //do the AND(intersect)operation result.retainAll(listLFNPFN((String)constraints.get(key),false)); } else{ result = listLFNPFN( (String) constraints.get(key), false); } } else{ //just a warning log("Implementation does not support constraint " + "matching of type " + key, LogManager.WARNING_MESSAGE_LEVEL); } if(result.isEmpty()){ //the intersection is already empty. No use matching further break; } notFirst = true; } //sort according to lfn Collections.sort(result,new RLSString2BulkComparator()); return result; } /** * Lists all logical filenames in the catalog. * * @return a set of all logical filenames known to the catalog or null in * case of not connected to the LRC or error. */ public Set list() { return list("*"); } /** * Lists a subset of all logical filenames in the catalog. * * @param constraint is a constraint for the logical filename only. It * is a string that has some meaning to the implementing system. This * can be a SQL wildcard for queries, or a regular expression for * Java-based memory collections. * * @return A set of logical filenames that match. The set may be empty */ public Set list(String constraint) { List l = listLFNPFN(constraint,true); Set result = new HashSet(l.size()); for(Iterator it = l.iterator();it.hasNext();){ RLSString2Bulk rs = (RLSString2Bulk)it.next(); result.add(rs.s1); } return result; } /** * Lists a subset of all LFN,PFN pairs in the catalog matching to * a pfn or a lfn constraint. * * @param constraint is a constraint for the logical filename only. It * is a string that has some meaning to the implementing system. This * can be a SQL wildcard for queries, or a regular expression for * Java-based memory collections. * * @return A set a list of MyRLSString2Bulk objects containing * the lfn in s1 field, and pfn in s2 field. The list is * grouped by lfns. The set may be empty. * * @see #getAttributes(List,String,Object) */ public List listLFNPFN( String constraint, boolean lfnConstraint ) throws ReplicaCatalogException{ if(isClosed()){ //not connected to LRC //throw an exception?? throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } int size = getBatchSize(); List l = new ArrayList(); ArrayList result = new ArrayList(); int capacity = size; //do a wildcard query in batch sizes RLSOffsetLimit offset = new RLSOffsetLimit(0,size); while(true){ try{ l = (lfnConstraint)? //do lfn matching mLRC.getPFNWC(constraint, offset): //do pfn matching mLRC.getLFNWC(constraint, offset); //we need to group pfns by lfn Collections.sort(l, new RLSString2Comparator()); } catch(RLSException e){ if(e.GetRC() == RLSClient.RLS_PFN_NEXIST || e.GetRC() == RLSClient.RLS_LFN_NEXIST || e.GetRC() == RLSClient.RLS_MAPPING_NEXIST){ log("listLFNPFN(String, boolean) :Mapping matching constraint " + constraint + " does not exist",LogManager.ERROR_MESSAGE_LEVEL); } else{ //am not clear whether to throw the exception or what log("list()", e,LogManager.ERROR_MESSAGE_LEVEL); } //return empty list return new ArrayList(0); } //result = new ArrayList(l.size()); //increment the size of the list //but first the capacity capacity += l.size(); result.ensureCapacity(capacity); for(Iterator it = l.iterator();it.hasNext();){ RLSString2 res = (RLSString2)it.next(); result.add(convert(res)); } if(offset.offset == -1) break;//offset is set to -1 when no more results } return result; } /** * Inserts multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. Ends up doing a sequential insert for all * the entries instead of doing a bulk insert. Easier to track failure this * way. * * @param x is a map from logical filename string to list of replica * catalog entries. * @return the number of insertions. * @see ReplicaCatalogEntry */ public int insert(Map x) { int result = 0; String lfn; ReplicaCatalogEntry rce = null; // Not doing sequential inserts any longer // Karan April 9, 2006 // Collection c; // for(Iterator it = x.entrySet().iterator();it.hasNext();){ // Map.Entry entry = (Map.Entry)it.next(); // lfn = (String)entry.getKey(); // c = (Collection)entry.getValue(); // log("Inserting entries for lfn " + lfn, // LogManager.DEBUG_MESSAGE_LEVEL); // for(Iterator pfnIt = c.iterator();pfnIt.hasNext();){ // try{ // rce = (ReplicaCatalogEntry)pfnIt.next(); // insert(lfn,rce); // res += 1; // } // catch(ReplicaCatalogException e){ // log("Inserting lfn->pfn " + // lfn + "->" + rce.getPFN(),e, // LogManager.ERROR_MESSAGE_LEVEL); // } // } // } // return res; int size = this.getBatchSize(); int current = 0; String pfn; List lfnPfns = new ArrayList(size); List attrs = new ArrayList(size); CatalogException exception = new ReplicaCatalogException(); //indexed by pfn and values as RLSAttributeObject objects Map attrMap = new HashMap(size); for (Iterator it = x.entrySet().iterator(); it.hasNext(); ) { Map.Entry entry = (Map.Entry)it.next(); lfn = (String)entry.getKey(); Collection c = (Collection)entry.getValue(); //traverse through the rce's for the pfn's for(Iterator pfnIt = c.iterator();pfnIt.hasNext();){ rce = (ReplicaCatalogEntry)pfnIt.next(); pfn = rce.getPFN(); lfnPfns.add(new RLSString2(lfn,pfn)); //increment current only once per pfn if(rce.getAttributeCount() == 0)current++; //build the attributes list for(Iterator attrIt = rce.getAttributeIterator(); attrIt.hasNext();current++){ String key = (String)attrIt.next(); RLSAttribute attr = new RLSAttribute(key,RLSAttribute.LRC_PFN, (String)rce.getAttribute(key)); attrs.add(new RLSAttributeObject(attr,pfn)); attrMap.put(pfn,new RLSAttributeObject(attr,pfn)); } } //check if diff is more than batch size if( current >= size){ //we have the subset of RCE's on which we //want to do bulk inserts, and the value till //we want to do bulk inserts try{ result += bulkInsert(lfnPfns, attrMap); } catch(ReplicaCatalogException e){exception.setNextException(e);} //reset data structures current = 0; lfnPfns.clear(); attrs.clear(); attrMap.clear(); } } //check for the last bunch if(!lfnPfns.isEmpty()){ //we have the subset of RCE's on which we //want to do bulk inserts, and the value till //we want to do bulk inserts try{ result += bulkInsert(lfnPfns, attrMap); }catch(ReplicaCatalogException e){exception.setNextException(e);} current = 0; } //throw an exception only if a nested exception if( (exception = exception.getNextException()) != null) throw exception; return result; } /** * Calls the bulk delete on the mappings. This function can timeout if the * size of the list passed is too large. * * @param lfnPfns list of RLSString2 objects containing the * lfn pfn mappings to be deleted. * * @return the number of items deleted * * @throws ReplicaCatalogException in case of error */ private int bulkDelete(List lfnPfns) throws ReplicaCatalogException{ if(isClosed()){ //not connected to LRC //throw an exception?? throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } //result only tracks successful lfn->pfn mappings int result = lfnPfns.size(); Collection failedDeletes; CatalogException exception = new ReplicaCatalogException(); //do a bulk delete //FIX ME: The deletes should have been done in batches. try{ failedDeletes = mLRC.deleteBulk( (ArrayList)lfnPfns); } catch(RLSException e){ mLogger.log("RLS: Bulk Delete " ,e , LogManager.ERROR_MESSAGE_LEVEL); throw new ReplicaCatalogException("RLS: Bulk Delete " + e.getMessage()); } if(!failedDeletes.isEmpty()){ result -= failedDeletes.size(); //FIXME: Do we really care about failed deletes //and reporting why deletes failed. // i think we do. RLSString2Bulk rs; int error; for(Iterator it = failedDeletes.iterator();it.hasNext();){ rs = (RLSString2Bulk)it.next(); error = rs.rc; if(error == RLSClient.RLS_PFN_NEXIST || error == RLSClient.RLS_LFN_NEXIST || error == RLSClient.RLS_MAPPING_NEXIST){ log("Mapping " + rs.s1 + "->" + rs.s2 + " does not exist",LogManager.DEBUG_MESSAGE_LEVEL); } else{ exception.setNextException(exception(rs)); } } } //throw an exception only if a nested exception if( (exception = exception.getNextException()) != null) throw exception; return result; } /** * Calls the bulk insert on the mappings. This function can timeout if the * size of the list passed is too large. * * @param lfnPfns list of RLSString2 objects containing the * lfn pfn mappings to be inserted. * @param attrMap a map indexed by pfn and values as RLSAttributeObject objects. * * @return the number of items inserted * * @throws ReplicaCatalogException in case of error */ private int bulkInsert(List lfnPfns, Map attrMap){ if(isClosed()){ //not connected to LRC //throw an exception?? throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } //result only tracks successful lfn->pfn mappings int result = lfnPfns.size(); List failedCreates; List failedAdds; CatalogException exception = new ReplicaCatalogException(); try{ /* bulk insert on mappings starts*/ failedCreates = mLRC.createBulk( (ArrayList) lfnPfns); //try to do a bulkAdd on the failed creates List bulkAdd = new ArrayList(failedCreates.size()); for (Iterator it = failedCreates.iterator(); it.hasNext(); ) { RLSString2Bulk rs = (RLSString2Bulk) it.next(); if (rs.rc == RLSClient.RLS_LFN_EXIST) { //s1 is lfn and s2 is pfn bulkAdd.add(new RLSString2(rs.s1, rs.s2)); } else { exception.setNextException(exception(rs)); result--; } } //do a bulk add if list non empty if (!bulkAdd.isEmpty()) { failedAdds = mLRC.addBulk( (ArrayList) bulkAdd); //pipe all the failed adds to the exception for (Iterator it = failedAdds.iterator(); it.hasNext(); ) { RLSString2Bulk rs = (RLSString2Bulk) it.next(); //just log that mapping already exists if (rs.rc == RLSClient.RLS_MAPPING_EXIST) { //we want to log instead of throwning an exception log("LFN-PFN Mapping alreadys exists in LRC " + mLRCURL + " for " + rs.s1 + "->" + rs.s2, LogManager.DEBUG_MESSAGE_LEVEL); result --; } else { exception.setNextException(exception(rs)); result--; } } } /*bulk insert on mappings ends */ /*bulk insert on attributes starts */ ArrayList failedAttrs;//the failed attributes //build the attribute list ArrayList attrs = new ArrayList(attrMap.size()); int num = 0; for(Iterator it = attrMap.values().iterator();it.hasNext();num++){ attrs.add(it.next()); } //try a bulk add on attributes assuming attrs already exist failedAttrs = mLRC.attributeAddBulk((ArrayList)attrs); //go through the failed attributes and create them for(Iterator it = failedAttrs.iterator();it.hasNext();){ RLSString2Bulk s2b = (RLSString2Bulk)it.next(); /* RLSAttribute attributeToAdd = new RLSAttribute(s2b.s2,RLSAttribute.LRC_PFN, (String)tuple.getAttribute(s2b.s2)); */ //s1 is the pfn //s2 is the attribute name String pfn = s2b.s1; RLSAttributeObject attrObject = (RLSAttributeObject)attrMap.get(pfn); RLSAttribute attributeToAdd = attrObject.attr; if(s2b.rc == RLSClient.RLS_ATTR_NEXIST){ //we need to create the attribute log("Creating an attribute name " + s2b.s2 + " for pfn " + pfn, LogManager.DEBUG_MESSAGE_LEVEL); try{ //FIXME : should have done a bulkAttributeCreate that doesnt exist mLRC.attributeCreate(s2b.s2, RLSAttribute.LRC_PFN, RLSAttribute.STR); //add the attribute in sequentially instead of bulk mLRC.attributeAdd(pfn,attributeToAdd); } catch(RLSException e){ //ignore any attribute already exist error //case of multiple creates of same attribute if(e.GetRC() != RLSClient.RLS_ATTR_EXIST){ exception.setNextException( new ReplicaCatalogException("Adding attrib to pfn " + pfn + " " + e.getMessage())); } } } else if(s2b.rc == RLSClient.RLS_ATTR_EXIST){ log("Attribute " + s2b.s2 + " for pfn " + pfn + " already exists", LogManager.DEBUG_MESSAGE_LEVEL); //get the existing value of attribute List l = null; try{ l = mLRC.attributeValueGet(pfn, s2b.s2, RLSAttribute.LRC_PFN); } catch(RLSException e){ exception.setNextException( new ReplicaCatalogException("Getting value of existing attrib "+ e.getMessage())); } if(l == null || l.isEmpty() || l.size() > 1){ log("Contents of list are " + l,LogManager.DEBUG_MESSAGE_LEVEL); //should never happen log("Unreachable case.",LogManager.FATAL_MESSAGE_LEVEL); throw new ReplicaCatalogException( "Whammy while trying to get value of an exisiting attribute " + s2b.s2 + " associated with PFN " + pfn); } //try to see if it matches with the existing value RLSAttribute attribute = (RLSAttribute)l.get(0); if(!attribute.GetStrVal().equalsIgnoreCase( attributeToAdd.GetStrVal())){ //log a warning saying updating value mLogMsg = "Existing value for attribute " + s2b.s2 + " associated with PFN " + pfn + " updated with new value " + attributeToAdd.GetStrVal(); //update the value try{ mLRC.attributeModify(pfn, attributeToAdd); log(mLogMsg,LogManager.WARNING_MESSAGE_LEVEL); } catch(RLSException e){ exception.setNextException( new ReplicaCatalogException("RLS Exception "+ e.getMessage())); } } } else { exception.setNextException(exception(s2b)); } } /*bulk insert on attributes ends */ } catch(RLSException e){ exception.setNextException( new ReplicaCatalogException("RLS Exception "+ e.getMessage())); } //throw an exception only if a nested exception if( (exception = exception.getNextException()) != null) throw exception; return result; } /** * Inserts a new mapping into the replica catalog. The attributes are added * in bulk assuming the attribute definitions already exist. If an attribute * definition does not exist, it is created and inserted. Note there is no * notion of transactions in LRC. It assumes all the attributes are of type * String. * * @param lfn is the logical filename under which to book the entry. * @param tuple is the physical filename and associated PFN attributes. * * @return number of insertions, should always be 1. On failure, * throws an exception instead of returning zero. */ public int insert(String lfn, ReplicaCatalogEntry tuple) { Map m = new HashMap(1); List l = new ArrayList(1); l.add(tuple); m.put(lfn,l); return insert(m); // Just composing the call to insert(Map method) // Only one code handles inserts. Karan April 12, 2006 // if(isClosed()){ // //not connected to LRC // //throw an exception?? // throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); // } // int res = 0; // //we have no notion of transaction at this point. // String pfn = tuple.getPFN(); // try{ // //insert the pfn // mLRC.add(lfn, pfn); // } // catch(RLSException e){ // if(e.GetRC() == RLSClient.RLS_LFN_NEXIST){ // //the first instance of the lfn, so we add // //instead of creating the mapping // try{ // mLRC.create(lfn, pfn); // } // catch(RLSException ex){ // throw new ReplicaCatalogException("RLS Exception "+ ex.getMessage()); // } // } // else if(e.GetRC() == RLSClient.RLS_MAPPING_EXIST){ // log("LFN-PFN Mapping alreadys exists in LRC " // + mLRCURL + " for " + lfn + "->" + pfn, // LogManager.ERROR_MESSAGE_LEVEL); // return res; // } // else // throw new ReplicaCatalogException("RLS Exception "+ e.getMessage()); // } // // //we need to add attributes in bulk // String key; // ArrayList failedAttrs;//the failed attributes // ArrayList attrs = new ArrayList(tuple.getAttributeCount()); // for(Iterator it = tuple.getAttributeIterator(); it.hasNext();){ // key = (String)it.next(); // RLSAttribute attr = new RLSAttribute(key,RLSAttribute.LRC_PFN, // (String)tuple.getAttribute(key)); // attrs.add(new RLSAttributeObject(attr,pfn)); // // } // // try{ // failedAttrs = mLRC.attributeAddBulk(attrs); // } // catch(RLSException e){ // throw new ReplicaCatalogException("RLS Exception "+ e.getMessage()); // } // // //go through the failed attributes and create them // for(Iterator it = failedAttrs.iterator();it.hasNext();){ // RLSString2Bulk s2b = (RLSString2Bulk)it.next(); // RLSAttribute attributeToAdd = new RLSAttribute(s2b.s2,RLSAttribute.LRC_PFN, // (String)tuple.getAttribute(s2b.s2)); // //s1 is the pfn // //s2 is the attribute name // if(s2b.rc == RLSClient.RLS_ATTR_NEXIST){ // //we need to create the attribute // log("Creating an attribute name " + s2b.s2 + // " for pfn " + pfn, LogManager.DEBUG_MESSAGE_LEVEL); // try{ // mLRC.attributeCreate(s2b.s2, RLSAttribute.LRC_PFN, // RLSAttribute.STR); // //add the attribute in sequentially instead of bulk // mLRC.attributeAdd(pfn,attributeToAdd); // // } // catch(RLSException e){ // throw new ReplicaCatalogException("RLS Exception "+ e.getMessage()); // } // } // else if(s2b.rc == RLSClient.RLS_ATTR_EXIST){ // log("Attribute " + s2b.s2 + " for pfn " + pfn + // " already exists", LogManager.DEBUG_MESSAGE_LEVEL); // //get the existing value of attribute // List l = null; // try{ // l = mLRC.attributeValueGet(pfn, s2b.s2, RLSAttribute.LRC_PFN); // } // catch(RLSException e){ // throw new ReplicaCatalogException("RLS Exception "+ e.getMessage()); // } // if(l == null || l.isEmpty() || l.size() > 1){ // log("Contents of list are " + l,LogManager.DEBUG_MESSAGE_LEVEL); // //should never happen // log("Unreachable case.",LogManager.FATAL_MESSAGE_LEVEL); // throw new ReplicaCatalogException( // "Whammy while trying to get value of an exisiting attribute " + // s2b.s2 + " associated with PFN " + pfn); // } // //try to see if it matches with the existing value // RLSAttribute attribute = (RLSAttribute)l.get(0); // if(!attribute.GetStrVal().equalsIgnoreCase( // attributeToAdd.GetStrVal())){ // // //log a warning saying updating value // mLogMsg = "Existing value for attribute " + s2b.s2 + // " associated with PFN " + pfn + // " updated with new value " + attributeToAdd.GetStrVal(); // // //update the value // try{ // mLRC.attributeModify(pfn, attributeToAdd); // log(mLogMsg,LogManager.WARNING_MESSAGE_LEVEL); // } // catch(RLSException e){ // throw new ReplicaCatalogException("RLS Exception" + // e.getMessage()); // } // } // } // else{ // throw new ReplicaCatalogException( // "Unknown Error while adding attributes. RLS Error Code " + // s2b.rc); // } // } // // return 1; } /** * Inserts a new mapping into the replica catalog. This is a * convenience function exposing the resource handle. Internally, * the ReplicaCatalogEntry element will be contructed, and passed to * the appropriate insert function. * * @param lfn is the logical filename under which to book the entry. * @param pfn is the physical filename associated with it. * @param handle is a resource handle where the PFN resides. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * @see #insert( String, ReplicaCatalogEntry ) * @see ReplicaCatalogEntry */ public int insert(String lfn, String pfn, String handle) { //prepare the appropriate ReplicaCatalogEntry object ReplicaCatalogEntry rce = new ReplicaCatalogEntry(pfn,handle); return insert(lfn,rce); } /** * Deletes multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. On setting matchAttributes to false, all entries * having matching lfn pfn mapping to an entry in the Map are deleted. * However, upon removal of an entry, all attributes associated with the pfn * also evaporate (cascaded deletion). * The deletes are done in batches. * * @param x is a map from logical filename string to list of * replica catalog entries. * @param matchAttributes whether mapping should be deleted only if all * attributes match. * * @return the number of deletions. * @see ReplicaCatalogEntry */ public int delete( Map x , boolean matchAttributes){ int result = 0; if(isClosed()){ //not connected to LRC //throw an exception?? throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } String lfn,pfn; ReplicaCatalogEntry rce; Collection c; CatalogException exception = new ReplicaCatalogException(); if(matchAttributes){ //do a sequential delete for the time being for(Iterator it = x.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); lfn = (String)entry.getKey(); c = (Collection)entry.getValue(); //iterate through all RCE's for this lfn and delete for(Iterator rceIt = c.iterator();rceIt.hasNext();){ rce = (ReplicaCatalogEntry)it.next(); result += delete(lfn,rce); } } } else{ //we can use bulk delete int size = this.getBatchSize(); int current = 0; List lfnPfns = new ArrayList(size); for (Iterator it = x.entrySet().iterator(); it.hasNext(); ) { Map.Entry entry = (Map.Entry)it.next(); lfn = (String)entry.getKey(); c = (Collection)entry.getValue(); //traverse through the rce's for the pfn's for(Iterator pfnIt = c.iterator();pfnIt.hasNext();){ rce = (ReplicaCatalogEntry) pfnIt.next(); pfn = rce.getPFN(); lfnPfns.add(new RLSString2(lfn, pfn)); current++; //check if diff is more than batch size if( current >= size){ //we have the subset of RCE's on which we //want to do bulk deletes try{ result += bulkDelete(lfnPfns); } catch(ReplicaCatalogException e){exception.setNextException(e);} current = 0; lfnPfns.clear(); } } } //check for the last bunch if(!lfnPfns.isEmpty()){ //we have the subset of RCE's on which we //we want to do bulk deletes try{ result += bulkDelete(lfnPfns); } catch(ReplicaCatalogException e){exception.setNextException(e);} current = 0; } } //throw an exception only if a nested exception if( (exception = exception.getNextException()) != null) throw exception; return result; } /** * Deletes a specific mapping from the replica catalog. We don't care * about the resource handle. More than one entry could theoretically * be removed. Upon removal of an entry, all attributes associated * with the PFN also evaporate (cascading deletion) automatically at the * RLS server end. * * @param lfn is the logical filename in the tuple. * @param pfn is the physical filename in the tuple. * * @return the number of removed entries. */ public int delete(String lfn, String pfn) { int res = 0; if(isClosed()){ //not connected to LRC //throw an exception?? throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } try{ mLRC.delete(lfn,pfn); res++; } catch(RLSException e){ if(e.GetRC() == RLSClient.RLS_PFN_NEXIST || e.GetRC() == RLSClient.RLS_LFN_NEXIST || e.GetRC() == RLSClient.RLS_MAPPING_NEXIST){ log("Mapping " + lfn + "->" + pfn + " does not exist",LogManager.DEBUG_MESSAGE_LEVEL); } else{ throw new ReplicaCatalogException("Error while deleting mapping " + e.getMessage()); } } return res; } /** * Deletes a very specific mapping from the replica catalog. The LFN * must be matches, the PFN, and all PFN attributes specified in the * replica catalog entry. More than one entry could theoretically be * removed. Upon removal of an entry, all attributes associated with * the PFN also evaporate (cascading deletion). * * @param lfn is the logical filename in the tuple. * @param tuple is a description of the PFN and its attributes. * @return the number of removed entries, either 0 or 1. */ public int delete(String lfn, ReplicaCatalogEntry tuple) { int res = 0; if(isClosed()){ //not connected to LRC //throw an exception?? throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } //get hold of all the RCE in this LRC matching to lfn Collection c = lookup(lfn); ReplicaCatalogEntry rce; for(Iterator it = c.iterator();it.hasNext();){ rce = (ReplicaCatalogEntry)it.next(); if(rce.equals(tuple)){ //we need to delete the rce //cascaded deletes take care of the attribute deletes delete(lfn,tuple.getPFN()); res++; } } return res; } /** * Deletes all PFN entries for a given LFN from the replica catalog * where the PFN attribute is found, and matches exactly the object * value. This method may be useful to remove all replica entries that * have a certain MD5 sum associated with them. It may also be harmful * overkill. * * @param lfn is the logical filename to look for. * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * * @return the number of removed entries. */ public int delete(String lfn, String name, Object value) { int result = 0; Collection c = null; if(isClosed()){ //not connected to LRC //throw an exception?? throw new ReplicaCatalogException(LRC_NOT_CONNECTED_MSG + this.mLRCURL); } //query lookup for that lfn and delete accordingly. Set s = new HashSet(1); s.add(lfn); Map map = this.lookupNoAttributes(s,name,value); if(map == null || map.isEmpty()){ return 0; } //we need to pipe this into a list of RLSString2 objects ArrayList lfnPfns = new ArrayList(3); for(Iterator it = map.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry) it.next(); lfn = (String)entry.getKey(); for (Iterator it1 = ( (Set) entry.getValue()).iterator(); it1.hasNext(); ) { RLSString2 lfnPfn = new RLSString2(lfn, (String) it1.next()); lfnPfns.add(lfnPfn); result++; } } try{ c = mLRC.deleteBulk(lfnPfns); } catch(RLSException e){ log("remove(Set)" ,e,LogManager.ERROR_MESSAGE_LEVEL); } //c should be empty ideally if(!c.isEmpty()){ result -= c.size(); log("Removing lfns remove(Set)" + c, LogManager.ERROR_MESSAGE_LEVEL); } return result; } /** * Deletes all PFN entries for a given LFN from the replica catalog * where the resource handle is found. Karan requested this * convenience method, which can be coded like *

     *  delete( lfn, SITE_ATTRIBUTE, handle )
     * 
* * @param lfn is the logical filename to look for. * @param handle is the resource handle * * @return the number of entries removed. * * @see #SITE_ATTRIBUTE */ public int deleteByResource(String lfn, String handle) { return delete(lfn,SITE_ATTRIBUTE,handle); } /** * Removes all mappings for a set of LFNs. * * @param lfn is a set of logical filename to remove all mappings for. * * @return the number of removed entries. */ public int remove(String lfn) { //first get hold of all the pfn mappings for the lfn Collection c = this.lookupNoAttributes(lfn); int result = 0; if(c == null || c.isEmpty()){ return 0; } //we need to pipe this into a list of RLSString2Bulk objects result = c.size(); ArrayList lfnPfns = new ArrayList(result); for(Iterator it = c.iterator();it.hasNext();){ RLSString2 lfnPfn = new RLSString2(lfn,(String)it.next()); lfnPfns.add(lfnPfn); } //do a bulk delete try{ c = mLRC.deleteBulk(lfnPfns); } catch(RLSException e){ log("remove(String)",e,LogManager.ERROR_MESSAGE_LEVEL); } //c should be empty ideally if(!c.isEmpty()){ result -= c.size(); log("remove(String)" + c,LogManager.ERROR_MESSAGE_LEVEL); } return result; } /** * Removes all mappings for a set of LFNs. * * @param lfns is a set of logical filename to remove all mappings for. * * @return the number of removed entries. */ public int remove(Set lfns) { String lfn = null; Collection c = null; int result = 0; //first get hold of all the pfn mappings for the lfn Map map = this.lookupNoAttributes(lfns); if(map == null || map.isEmpty()){ return 0; } //we need to pipe this into a list of RLSString2 objects ArrayList lfnPfns = new ArrayList(map.keySet().size()); for(Iterator it = map.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry) it.next(); lfn = (String)entry.getKey(); for (Iterator it1 = ( (Set) entry.getValue()).iterator(); it1.hasNext(); ) { RLSString2 lfnPfn = new RLSString2(lfn, (String) it1.next()); lfnPfns.add(lfnPfn); result++; } } //do a bulk delete //FIX ME: The deletes should have been done in batches. try{ c = mLRC.deleteBulk(lfnPfns); } catch(RLSException e){ log("remove(Set)" + e,LogManager.ERROR_MESSAGE_LEVEL); } //c should be empty ideally if(!c.isEmpty()){ result -= c.size(); log("remove(Set)" + c,LogManager.ERROR_MESSAGE_LEVEL); for(Iterator it = c.iterator();it.hasNext();){ RLSString2Bulk rs2 = (RLSString2Bulk)it.next(); System.out.println("(" + rs2.s1 + "->" + rs2.s2 +"," + rs2.rc + ")"); } } return result; } /** * Removes all entries associated with a particular resource handle. * This is useful, if a site goes offline. It is a convenience method, * which calls the generic removeByAttribute method. * * @param handle is the site handle to remove all entries for. * * @return the number of removed entries. * * @see #removeByAttribute( String, Object ) */ public int removeByAttribute(String handle) { return removeByAttribute(SITE_ATTRIBUTE,handle); } /** * Removes all entries from the replica catalog where the PFN attribute * is found, and matches exactly the object value. * * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * * @return the number of removed entries. */ public int removeByAttribute(String name, Object value) { String lfn = null; String pfn = null; Collection c = null; int result = 0; //get hold of all the lfns in the lrc Set s = list(); //first get hold of all the pfn mappings for the lfn Map map = this.lookup(s,name,value); if(map == null || map.isEmpty()){ return 0; } //we need to pipe this into a list of RLSString2 objects ArrayList lfnPfns = new ArrayList(result); for(Iterator it = map.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry) it.next(); lfn = (String)entry.getKey(); //System.out.println(lfn + " ->"); for (Iterator it1 = ( (Set) entry.getValue()).iterator(); it1.hasNext(); ) { pfn = ((ReplicaCatalogEntry)it1.next()).getPFN(); RLSString2 lfnPfn = new RLSString2(lfn, pfn); lfnPfns.add(lfnPfn); result++; //System.out.print(lfnPfn.s2 + ","); } } //do a bulk delete //FIX ME: The deletes should have been done in batches. try{ c = mLRC.deleteBulk(lfnPfns); } catch(RLSException e){ throw new ReplicaCatalogException("Bulk Delete: " + e.getMessage()); } //c should be empty ideally if(!c.isEmpty()){ result -= c.size(); log("removeByAttribute(String,Object)" + c, LogManager.ERROR_MESSAGE_LEVEL); } return result; } /** * Removes everything. Use with caution! * * @return the number of removed entries. */ public int clear() { //do a bulk delete //FIX ME: The deletes should have been done in batches. try{ mLRC.clear(); } catch(RLSException e){ log("clear()",e,LogManager.ERROR_MESSAGE_LEVEL); } return 0; /* String lfn = null; String pfn = null; Collection c = null; int result = 0; //get hold of all the lfns in the lrc Set s = list(); //first get hold of all the pfn mappings for the lfn Map map = this.lookupNoAttributes(s); if(map == null || map.isEmpty()){ return 0; } //we need to pipe this into a list of RLSString2 objects ArrayList lfnPfns = new ArrayList(result); for(Iterator it = map.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry) it.next(); lfn = (String)entry.getKey(); //System.out.println(lfn + " ->"); for (Iterator it1 = ( (Set) entry.getValue()).iterator(); it1.hasNext(); ) { pfn = ((String)it1.next()); RLSString2 lfnPfn = new RLSString2(lfn, pfn); lfnPfns.add(lfnPfn); result++; //System.out.print(lfnPfn.s2 + ","); } } //do a bulk delete //FIX ME: The deletes should have been done in batches. try{ c = mLRC.deleteBulk(lfnPfns); } catch(RLSException e){ log("clear()",e,LogManager.ERROR_MESSAGE_LEVEL); } //c should be empty ideally if(!c.isEmpty()){ result -= c.size(); log("clear()" + c,LogManager.ERROR_MESSAGE_LEVEL); } return result; */ } /** * Explicitely free resources before the garbage collection hits. */ public void close() { try { if (mRLS != null) { mRLS.Close(); } } catch (RLSException e) { //ignore } finally { mRLS = null; } } /** * Returns whether the connection to the RLS with which this instance is * associated is closed or not. * * @return true, if the implementation is disassociated, false otherwise. * @see #close() */ public boolean isClosed() { return (mRLS == null); } /** * It returns the timeout value in seconds after which to timeout in case of * no activity from the LRC. * * Referred to by the "lrc.timeout" property. * * @return the timeout value if specified else, * the value specified by "rls.timeout" property, else * DEFAULT_LRC_TIMEOUT. * * @see #DEFAULT_LRC_TIMEOUT */ protected int getTimeout(Properties properties) { String prop = properties.getProperty( this.LRC_TIMEOUT_KEY); //if prop is null get rls timeout, prop = (prop == null)? properties.getProperty(this.RLS_TIMEOUT_KEY):prop; int val = 0; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { val = Integer.parseInt( DEFAULT_LRC_TIMEOUT ); } return val; } /** * Returns the site handle associated with the pfn at the lrc to which * the instance of this application binds. It returns UNDEFINED_SITE * even when the pfn is not in the lrc. * * @param pfn The pfn with which the attribute is associated. * * @return value of the attribute if found * else UNDEFINED_POOL */ private String getSiteHandle(String pfn) { return getSiteHandle(mLRC, pfn); } /** * Returns the site handle associated with a pfn at the lrc associated * with the RLSClient passed. It returns UNDEFINED_SITE * even when the pfn is not in the lrc. * * @param lrc the handle to the lrc , where the attributes are stored. * @param pfn the pfn with which the attribute is associated. * * @return value of the attribute if found * else UNDEFINED_POOL * */ private String getSiteHandle(RLSClient.LRC lrc, String pfn) { String poolAttr = getAttribute(lrc, pfn, SITE_ATTRIBUTE); return (poolAttr == null) ? defaultResourceHandle() : poolAttr; } /** * Returns the default value that is to be assigned to site handle * for a replica catalog entry. * * @return default site handle */ private String defaultResourceHandle(){ return ( this.mDefaultSiteAttribute == null ) ? LRC.UNDEFINED_SITE: this.mDefaultSiteAttribute; } /** * Sets the resource handle in an attribute map. * The resource handle is set to the default site handle if the map * does not contain the site attribute key. * * @param m the attribute map. * * @see #defaultResourceHandle() */ private void setResourceHandle( Map m ){ String dflt = defaultResourceHandle(); //update the site attribute only if the default //attribute is other than undefined site if( m.containsKey( LRC.SITE_ATTRIBUTE) && !dflt.equals(LRC.UNDEFINED_SITE ) ){ //populate the default site handle m.put( LRC.SITE_ATTRIBUTE, dflt ); } else if( !m.containsKey( LRC.SITE_ATTRIBUTE ) ){ //populate the default site handle m.put( LRC.SITE_ATTRIBUTE, dflt ); } } /** * Sets the resource handle in an attribute map. * The resource handle is set to the default site handle if the map * does not contain the site attribute key. * * @param rce the ReplicaCatalogEntry * * @see #defaultResourceHandle() */ private void setResourceHandle( ReplicaCatalogEntry rce ){ String dflt = defaultResourceHandle(); //update the site attribute only if the default //attribute is other than undefined site if( rce.hasAttribute( LRC.SITE_ATTRIBUTE) && !dflt.equals(LRC.UNDEFINED_SITE ) ){ //populate the default site handle rce.setResourceHandle( dflt ); } else if( ! rce.hasAttribute( LRC.SITE_ATTRIBUTE ) ){ //populate the default site handle rce.setResourceHandle( dflt ); } } /** * Retrieves from the lrc, associated with this instance all the * attributes associated with the pfn in a map. All the * attribute values are stored as String. * * @param pfn the pfn with which the attribute is associated. * * @return Mapcontaining the attribute keys and values, * else an empty Map. */ private Map getAttributes(String pfn) { return getAttributes(mLRC, pfn); } /** * Retrieves from the lrc associated with this instance, all the * attributes associated with the lfn-pfns in a map indexed by the lfn. * The value for each entry is a collection of * ReplicaCatalogEntry objects. * All the attribute values are stored as String. * * If the attribute value passed is not null, then explicit matching occurs * on attribute values in addition. * * @param lfnPfns a list of RLSString2Bulk objects containing * the lfn in s1 field, and pfn in s2 field. The list is * assumed to be grouped by lfns. * @param attrKey the name of attribute that needs to be queried for each * pfn. a value of null denotes all attributes. * @param attrVal the value of the attribute that should be matching. * * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries for the LFN. */ private Map getAttributes(List lfnPfns, String attrKey, Object attrVal) { Map result = new HashMap(); String curr = null; String prev = null; //loss of information. i should have known the size at this pt! List l = new ArrayList(); ArrayList pfns = new ArrayList(lfnPfns.size()); int size = mBatchSize; ReplicaCatalogEntry entry = null; Map temp = new HashMap(); Map pfnMap = new HashMap(); //contains pfn and their ReplicaCatalogEntry objects //sanity check if(lfnPfns == null || lfnPfns.isEmpty()){ return result; } //put just the pfns in a list that needs //to be sent to the RLS API for (Iterator it = lfnPfns.iterator(); it.hasNext(); ) { pfns.add( ( (RLSString2Bulk) it.next()).s2); } //now query for the attributes in bulk List attributes = null; try { attributes = mLRC.attributeValueGetBulk(pfns, attrKey, RLSAttribute.LRC_PFN); } catch (RLSException e) { //some other error, but we can live with it. //just flag as warning mLogMsg = "getAttributes(List,String,Object)"; log(mLogMsg, e,LogManager.ERROR_MESSAGE_LEVEL); return result; } //we need to sort them on the basis of the pfns //which is the populate the key field Collections.sort(attributes, new RLSAttributeComparator()); /* System.out.println("Sorted attributes are "); for(Iterator it = attributes.iterator(); it.hasNext();){ RLSAttributeObject obj = (RLSAttributeObject) it.next(); if(obj.rc == RLSClient.RLS_ATTR_NEXIST){ System.out.print("\tAttribute does not exist"); } System.out.println("\t" + obj.key + "->rc" + obj.rc); } */ for (Iterator it = attributes.iterator(); it.hasNext(); ) { RLSAttributeObject attr = (RLSAttributeObject) it.next(); Object value = (attr.rc == RLSClient.RLS_ATTR_NEXIST)? null: //assign an empty value getAttributeValue(attr.attr);//retrieve the value curr = attr.key; //push in the attribute into the temp map only //if prev is null or the prev and current pfn's match if((prev == null || curr.equalsIgnoreCase(prev)) && (value != null)//value being null means no attribute associated && ((attrVal == null) || (attrVal.equals(value)) )){ temp.put(attr.attr.name,value); } else{ //push it into the map all attributes for a single pfn //only if the map is not empty or there was no matching //being done attrVal (i.e it is null) if(attrVal == null || !temp.isEmpty()){ entry = new ReplicaCatalogEntry(prev, temp); //System.out.println("0:Entry being made is " + entry); //the entry has to be put in a map keyed by the pfn name pfnMap.put(prev, entry); temp = new HashMap(); } //added June 15,2005 if(value != null && ( attrVal == null || attrVal.equals(value))){ temp.put(attr.attr.name,value); } } //push in the last attribute entry if(!it.hasNext()){ //push it into the map all attributes for a single pfn //only if the map is not empty or there was no matching //being done attrVal (i.e it is null) if(attrVal == null || !temp.isEmpty()){ entry = new ReplicaCatalogEntry(curr, temp); //System.out.println("1:Entry being made is " + entry); //the entry has to be put in a map keyed by the pfn name pfnMap.put(curr, entry); } } prev = curr; } //the final iteration that groups the pfn and their //attributes according to the lfn prev = null; for (Iterator it = lfnPfns.iterator(); it.hasNext(); ) { RLSString2Bulk lfnPfn = (RLSString2Bulk) it.next(); curr = lfnPfn.s1; entry = (ReplicaCatalogEntry) pfnMap.get(lfnPfn.s2); if(entry == null){ //means no match on attribute or attribute value was found continue; } if (!curr.equalsIgnoreCase(prev) && (prev != null)) { //push it into the map //we have entry for one lfn and all pfns constructed //System.out.println("Putting in entry for " + prev + " " + l); result.put(prev, l); l = new ArrayList(); } //set a site handle if not already set setResourceHandle( entry ); l.add(entry); //if this was the last one push it in result if(!it.hasNext()){ //System.out.println("Putting in entry for " + curr + " " + l); result.put(curr, l); } prev = curr; } return result; } /** * Retrieves from the lrc, all the attributes associated with the pfn * in a map. All the attribute values are stored as String. * * @param lrc the handle to the lrc , where the attributes are stored. * @param pfn the pfn with which the attribute is associated. * @return Mapcontaining the attribute keys and values, * else an empty Map. */ private Map getAttributes(RLSClient.LRC lrc, String pfn) { String val = null; List attrList = null; Map m = new HashMap(); RLSAttribute att = null; try { //passing null denotes to get //hold of all attributes attrList = lrc.attributeValueGet(pfn, null, RLSAttribute.LRC_PFN); } catch (RLSException e) { //attribute does not exist error means no attributes //associated, return empty map else just denote a warning if(e.GetRC() != RLSClient.RLS_ATTR_NEXIST){ //some other error, but we can live with it. //just flag as warning mLogMsg = "getAttributes(RLSClient.LRC,String)"; log(mLogMsg, e,LogManager.ERROR_MESSAGE_LEVEL); } //associate a default value if required. setResourceHandle( m ); return m; } //iterate throught the list and push all //the attributes in the map for (Iterator it = attrList.iterator(); it.hasNext(); ) { att = (RLSAttribute) it.next(); //the list can contain a null attribute key //we dont want that. if( att.name != null ){ m.put(att.name, att.GetStrVal()); } } //populate default site handle if //site attribute is not specified setResourceHandle( m ); return m; } /** * Retrieves from the lrc associated with this instance all, the attribute * value associated with the pfn for a given attribute name. * * @param pfn the pfn with which the attribute is associated. * @param name the name of the attribute for which we want to search. * * @return value of the attribute if found * else null */ private String getAttribute(String pfn, String name) { return getAttribute(mLRC, pfn, name); } /** * Retrieves from the lrc, the attribute value associated with the pfn * for a given attribute name. * * @param lrc the handle to the lrc , where the attributes are stored. * @param pfn the pfn with which the attribute is associated. * @param name the name of the attribute for which we want to search. * * @return value of the attribute if found * else null */ private String getAttribute(RLSClient.LRC lrc, String pfn, String name) { String val = null; List attrList = null; try { attrList = lrc.attributeValueGet(pfn, name, RLSAttribute.LRC_PFN); } catch (RLSException e) { if (e.GetRC() == RLSClient.RLS_ATTR_NEXIST) { //attribute does not exist we return null } else { //some other error, but we can live with it. //just flag as warning mLogMsg = "getAttribute(String,String,String):"; log(mLogMsg, e,LogManager.ERROR_MESSAGE_LEVEL); } return null; } return (attrList.isEmpty()) ? null : //we return the first attribute value //Does not make much sense for //more than one attribute value //for the same key and pfn attrList.get(0).toString(); } /** * Retrieves the attribute value as an object from the RLSAttribute * object. Does automatic boxing (i.e converts int to Integer) etc. * The value is returned of the type as determined from the internal value * type. * * @param attr the RLSAttribute from which to extract the value. * * @return Object containing the value. * * @throws ReplicaCatalogException if illegal value associated. */ private Object getAttributeValue(RLSAttribute attr){ Object obj = null; int type = attr.GetValType(); switch(type){ case RLSAttribute.STR: obj = attr.GetStrVal(); break; case RLSAttribute.DATE: obj = attr.GetDateVal(); break; case RLSAttribute.DOUBLE: obj = new Double(attr.GetDoubleVal()); break; case RLSAttribute.INT: obj = new Integer(attr.GetIntVal()); break; default: throw new ReplicaCatalogException("Invalid value type associated " + type); } return obj; } /** * Sets the number of lfns in each batch while querying the lrc in the * bulk mode. * * @param properties the properties passed while connecting. * */ private void setBatchSize(Properties properties) { String s = properties.getProperty(this.BATCH_KEY); int size = this.RLS_BULK_QUERY_SIZE; try{ size = Integer.parseInt(s); } catch(Exception e){} mBatchSize = size; } /** * Returns the number of lfns in each batch while querying the lrc in the * bulk mode. * * @return the batch size. */ private int getBatchSize() { return mBatchSize; } /** * Retrieves multiple entries for a given logical filename, up to the * complete LRC. It uses the bulk query api to the LRC to query for stuff. * Bulk query has been in RLS since version 2.0.8. All the lfns in set * are put in one single bulk query to the LRC. There is a risk of seeing * a timeout error in case of large set of lfns. User should use the * lookup function that internally does the bulk query in batches. * Passing a null value for the attribute key results in the querying for all * attributes. The function returns ReplicaCatalogEntry objects * that have the attribute identified by attribute key passed. * * @param lfns set of logical filename strings to look up. * @param attrKey the name of attribute that needs to be queried for each * pfn. a value of null denotes all attributes. * * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries for the LFN. * @see ReplicaCatalogEntry * @see #lookup(Set) */ private Map bulkLookup(Set lfns, String attrKey) { return bulkLookup(lfns,attrKey,null); } /** * Retrieves multiple entries for a given logical filename, up to the * complete LRC. It uses the bulk query api to the LRC to query for stuff. * Bulk query has been in RLS since version 2.0.8. All the lfns in set * are put in one single bulk query to the LRC. There is a risk of seeing * a timeout error in case of large set of lfns. User should use the * lookup function that internally does the bulk query in batches. * Passing a null value for the attribute key results in the querying for all * attributes. A null value for the attribute value, disables attribute matching * and results in the ReplicaCatalogEntry objects that have * the attribute identified by attribute key passed. * * @param lfns set of logical filename strings to look up. * @param attrKey the name of attribute that needs to be queried for each * pfn. a value of null denotes all attributes. * @param attrVal the value of the attribute that should be matching. * * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries for the LFN. * @see ReplicaCatalogEntry * @see #lookup(Set) */ private Map bulkLookup(Set lfns, String attrKey, Object attrVal) { List list = null; List lfnsFound = null; RLSString2Bulk curr = null; int size = mBatchSize; Map result = new HashMap(lfns.size()); try { list = mLRC.getPFNBulk( new ArrayList(lfns)); //we need to group pfns by lfn Collections.sort(list, new RLSString2BulkComparator()); /* System.out.println("Sorted list is "); for(Iterator it = list.iterator(); it.hasNext();){ RLSString2Bulk s2b = (RLSString2Bulk) it.next(); System.out.println("\t" + s2b.s1 + "->" + s2b.s2); } */ size = list.size() <= size ? list.size() :size; for (Iterator it = list.iterator(); it.hasNext(); ) { //the pfn themseleves need to be queried //in batches to avoid timeout errors but the batch size //should have all the pfns for a lfn!! List l = new ArrayList(size); String prev = ""; if (curr != null) { //this is the case where the current //item is not in any of the sublists l.add(curr); } for (int j = 0; (it.hasNext()); ) { RLSString2Bulk s2b = (RLSString2Bulk) it.next(); //s1 is the lfn //s2 denotes the pfn //rc is the exit status returned by the RLI if (s2b.rc == RLSClient.RLS_SUCCESS) { curr = s2b; if (s2b.s2 != null) { //query for the pool attribute //for that pfn to the lrc //if none is found or you do not //query for the attribute //pool is set to UNDEFINED_POOL if (!curr.s1.equalsIgnoreCase(prev)) { //do nothing //check if j > size if (j >= size) { //break out of the loop. //current needs to go into the next list break; } } l.add(s2b); j++; } else { mLogMsg = "bulkLookup(Set): Unexpected Mapping with no pfn for lfn: " + s2b.s1; log(mLogMsg,LogManager.ERROR_MESSAGE_LEVEL); } prev = curr.s1; } else if (s2b.rc != RLSClient.RLS_LFN_NEXIST) { mLogMsg = "bulkLookup(Set): " + mRLS.getErrorMessage(s2b.rc); log(mLogMsg,LogManager.ERROR_MESSAGE_LEVEL); } //prev = curr.s1; } //get hold of all attributes for the pfn's result.putAll(getAttributes(l, attrKey,attrVal)); } } catch (Exception e) { log("bulkLookup(Set)", e,LogManager.FATAL_MESSAGE_LEVEL); System.exit(1); } return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete LRC. It uses the bulk query api to the LRC to query for stuff. * Bulk query has been in RLS since version 2.0.8. All the lfns in set * are put in one single bulk query to the LRC. There is a risk of seeing * a timeout error in case of large set of lfns. User should use the * lookup function that internally does the bulk query in batches. * * @param lfns is a set of logical filename strings to look up. * * @return a map indexed by the LFN. Each value is a set * of PFN strings. * * @see #lookupNoAttributes(Set) */ private Map bulkLookupNoAttributes(Set lfns) { List list = null; List lfnsFound = null; Map result = new HashMap(lfns.size()); String prev = null; String curr = null; Set s = new HashSet(); try { list = mLRC.getPFNBulk( new ArrayList(lfns)); //we need to group pfns by lfn Collections.sort(list, new RLSString2BulkComparator()); for (Iterator it = list.iterator(); it.hasNext(); ) { RLSString2Bulk s2b = (RLSString2Bulk) it.next(); //s1 is the lfn //s2 denotes the pfn //rc is the exit status returned by the RLI if (s2b.rc == RLSClient.RLS_SUCCESS) { curr = s2b.s1; if (s2b.s2 != null) { if (!curr.equalsIgnoreCase(prev) && (prev != null)) { //push it into the map //we have entry for one lfn and all pfns constructed result.put(prev, s); s = new HashSet(); } s.add(s2b.s2); //if this was the last one push it in result if(!it.hasNext()){ result.put(curr,s); } } else { mLogMsg = "bulkLookupNoAttributes(Set): Unexpected Mapping with no pfn for lfn: " + s2b.s1; log(mLogMsg,LogManager.ERROR_MESSAGE_LEVEL); } prev = curr; } else if (s2b.rc != RLSClient.RLS_LFN_NEXIST) { mLogMsg = "bulkLookupNoAttributes(Set): " + mRLS.getErrorMessage(s2b.rc); log(mLogMsg,LogManager.ERROR_MESSAGE_LEVEL); } //prev = curr; } } catch (Exception e) { log("bulkLookupNoAttributes(Set):", e, LogManager.FATAL_MESSAGE_LEVEL); System.exit(1); } return result; } /** * Constructs replica catalog exception out the RLSException that is * thrown by the API underneath. * * @param prefix the prefix that is to be applied to the message * passed while creating the exception. * @param e the RLSException that is caught underneath. * * @return a ReplicaCatalogException */ private ReplicaCatalogException exception(String prefix,RLSException e){ StringBuffer message = new StringBuffer(32); message.append("{LRC ").append(mLRCURL).append("} ") .append(prefix).append(": ").append(e.getMessage()); return new ReplicaCatalogException(message.toString(),e); } /** * Constructs an exception from the RLSString2Bulk object. * * @return a ReplicaCatalogException */ private ReplicaCatalogException exception(RLSString2Bulk rs){ StringBuffer sb = new StringBuffer(32); sb.append("Error (lfn,pfn,ec)").append(" (") .append(rs.s1).append(',') .append(rs.s2).append(',') .append(rs.rc).append(',').append(mRLS.getErrorMessage(rs.rc)) .append(')'); return new ReplicaCatalogException(sb.toString()); } /** * Returns a subset of a collection of ReplicaCatalogEntry * objects that have attributes matchin to the attribute identified by * the parameters passed. * * @param collection the collection of ReplicaCatalogEntry * objects. * @param name the attribute name to match. * @param value the attribute value. * * @return Set of matching ReplicaCatalogEntry objects. */ private Set subset(Collection collection, String name, Object value) { return subset(collection,name,value,false); } /** * Returns a subset of a collection of ReplicaCatalogEntry * objects that have attributes matchin to the attribute identified by * the parameters passed. * * @param collection the collection of ReplicaCatalogEntry * objects. * @param name the attribute name to match. * @param value the attribute value. * @param onlyPFN boolean to denote if we only want the PFN's * * @return Set of ReplicaCatalogEntry objects if onlyPfn * parameter is set to false, else a Set of pfns. */ private Set subset(Collection collection, String name, Object value, boolean onlyPFN) { Set s = new HashSet(); ReplicaCatalogEntry rce; Object attrVal; for (Iterator it = collection.iterator(); it.hasNext(); ) { rce = (ReplicaCatalogEntry) it.next(); //System.out.println("RCE is " + rce); attrVal = rce.getAttribute(name); if ( attrVal != null && attrVal.equals(value)) { //adding to the set only if //the attribute existed in the rce s.add(onlyPFN? (Object)rce.getPFN(): rce); } } return s; } /** * A helper method that converts RLSString2 to MyRLSString2Bulk object. * * @param obj the RLSString2 to convert. * * @return the converted MyRLSString2 object. */ private RLSString2Bulk convert(RLSString2 obj){ return new MyRLSString2Bulk(0,obj.s1,obj.s2); } /** * Logs to the logger object. * * @param message the message to be logged. * @param level the logger level at which the message is to be logged. */ private void log(String message,int level){ message = "{LRC " + mLRCURL + "} " + message; mLogger.log(message,level); } /** * Logs to the logger object. * * @param message the message to be logged. * @param e the exception that occured. * @param level the logger level at which the message is to be logged. */ private void log(String message,Exception e,int level){ message = "{LRC " + mLRCURL + "} " + message; mLogger.log(message,e,level); } /** * The comparator that is used to group the RLSString2 objects by the * value in the s1 field. This comparator should only be used for grouping * purposes not in Sets or Maps etc. */ private class RLSString2Comparator implements Comparator { /** * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer if the first argument is * less than, equal to, or greater than the specified object. The * RLSString2 are compared by their s1 field. * * @param o1 is the first object to be compared. * @param o2 is the second object to be compared. * * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compare(Object o1, Object o2) { if (o1 instanceof RLSString2 && o2 instanceof RLSString2) { return ( (RLSString2) o1).s1.compareTo( ( (RLSString2) o2).s1); } else { throw new ClassCastException("object is not RLSString2"); } } } /** * The comparator that is used to group the RLSString2Bulk objects by the * value in the s1 field. This comparator should only be used for grouping * purposes not in Sets or Maps etc. */ private class RLSString2BulkComparator implements Comparator { /** * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer if the first argument is * less than, equal to, or greater than the specified object. The * RLSString2Bulk are compared by their s1 field. * * @param o1 is the first object to be compared. * @param o2 is the second object to be compared. * * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compare(Object o1, Object o2) { if (o1 instanceof RLSString2Bulk && o2 instanceof RLSString2Bulk) { return ( (RLSString2Bulk) o1).s1.compareTo( ( (RLSString2Bulk) o2).s1); } else { throw new ClassCastException("object is not RLSString2Bulk"); } } } /** * The comparator that is used to group the RLSAttributeObject objects by the * value in the key field. This comparator should only be used for grouping * purposes not in Sets or Maps etc. */ private class RLSAttributeComparator implements Comparator { /** * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer if the first argument is * less than, equal to, or greater than the specified object. The * RLSAttributeObject are compared by their s1 field. * * @param o1 is the first object to be compared. * @param o2 is the second object to be compared. * * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compare(Object o1, Object o2) { if (o1 instanceof RLSAttributeObject && o2 instanceof RLSAttributeObject) { return ( (RLSAttributeObject) o1).key.compareTo( ( ( RLSAttributeObject) o2).key); } else { throw new ClassCastException("object is not RLSAttributeObject"); } } } /** * The class that extends RLSString2Bulk and adds on the equals method, * that allows me to do the set operations */ private class MyRLSString2Bulk extends RLSString2Bulk{ /** * The overloaded constructor. * * @param rca the rls exitcode * @param s1a the String object usually containing the lfn */ public MyRLSString2Bulk(int rca, java.lang.String s1a){ super(rca,s1a); } /** * The overloaded constructor. * * @param rca the rls exitcode. * @param s1a the String object usually containing the lfn. * @param s2a the String object usually containing the pfn. */ public MyRLSString2Bulk(int rca, java.lang.String s1a, java.lang.String s2a){ super(rca,s1a,s2a); } /** * Indicates whether some other object is "equal to" this one. * * An object is considered equal if it is of the same type and * all the fields s1 and s2 match. * * @return boolean whether the object is equal or not. */ public boolean equals(Object obj){ if(obj instanceof MyRLSString2Bulk){ MyRLSString2Bulk sec = (MyRLSString2Bulk)obj; return this.s1.equals(sec.s1) && this.s2.equals(sec.s2); } return false; } /** * Returns a string representation of the object. * * @return the String representation. */ public String toString(){ StringBuffer sb = new StringBuffer(10); sb.append("(").append(s1).append("->").append(s2). append(",").append(rc).append(")"); return sb.toString(); } } /** * Testing function. */ public static void main(String[] args){ LRC lrc = new LRC(); lrc.connect("rls://sukhna.isi.edu"); String lfn = "test"; LogManagerFactory.loadSingletonInstance().setLevel(LogManager.DEBUG_MESSAGE_LEVEL); /* ReplicaCatalogEntry rce = new ReplicaCatalogEntry("gsiftp://sukhna.isi.edu/tmp/test"); rce.addAttribute("name","karan"); lrc.insert("test",rce); lrc.insert("test","gsiftp://sukhna.isi.edu/tmp/test1","isi"); lrc.insert("test","gsiftp://sukhna.isi.edu/constraint/testvahi","isi"); lrc.insert("vahi.f.a","file:///tmp/vahi.f.a","isi"); lrc.insert("testvahi.f.a","file:///tmp/testvahi.f.a","isi"); rce = new ReplicaCatalogEntry("gsiftp://sukhna.isi.edu/tmp/testX"); rce.addAttribute("name","karan"); rce.addAttribute("pool","isi"); lrc.insert("testX",rce); */ /* System.out.println("Getting list of lfns"); System.out.println("\t" + lrc.list()); Set s = new HashSet(); s.add("test");s.add("vahi.f.a");s.add("testX"); s.add("unknown"); System.out.println("\nQuerying for complete RCE for site " + s ); System.out.println(lrc.lookup(s)); */ /* System.out.println("\n Deleting " + lfn); System.out.println(lrc.deleteByResource(lfn,"isi")); System.out.println("\nQuerying for PFN for site" + s ); System.out.println(lrc.lookupNoAttributes(s,"isi")); System.out.println("\nRemoving lfns " + s); //System.out.println(lrc.remove(s)); System.out.println(lrc.removeByAttribute("isi")); System.out.println("\n\nClearing catalog " + lrc.clear()); */ //System.out.println("Getting list of lfns"); //System.out.println("\t" + lrc.listLFNPFN("*vahi*",false)); /* System.out.println("Removing lfns in set " + s + " "); System.out.println(lrc.removeByAttribute("isi")); Map m = new HashMap(); //m.put("pfn","*vahi*"); m.put("lfn","test*"); System.out.println("Getting lfns matching constraint"); System.out.println("\t" + lrc.lookup(m)); */ //test bulk insert System.out.println("Clearing the database"); //lrc.clear(); Map inserts = new HashMap(); Collection c1 = new ArrayList(); c1.add(new ReplicaCatalogEntry("gsiftp://test/f.a","isi")); Collection c2 = new ArrayList(); c2.add(new ReplicaCatalogEntry("gsiftp://test/f.b","isi")); Collection c3 = new ArrayList(); c3.add(new ReplicaCatalogEntry("gsiftp://test/f.c","isi1")); inserts.put("f.a",c1); inserts.put("f.b",c2); inserts.put("f.c",c3); System.out.println("Doing bulk inserts"); try{ System.out.println("Inserted " + lrc.insert(inserts) + " entries"); } catch(ReplicaCatalogException rce){ do { System.out.println(rce.getMessage()); rce = (ReplicaCatalogException) rce.getNextException(); } while ( rce != null ); } lrc.close(); } } //end of class LRC pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/impl/RLI.java0000644000175000017500000021267211757531137026567 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.replica.impl; import edu.isi.pegasus.planner.catalog.replica.*; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.planner.catalog.CatalogException; import edu.isi.pegasus.common.util.Version; import org.globus.replica.rls.RLSClient; import org.globus.replica.rls.RLSException; import org.globus.replica.rls.RLSAttribute; import org.globus.replica.rls.RLSAttributeObject; import org.globus.replica.rls.RLSLRCInfo; import org.globus.replica.rls.RLSString2Bulk; import org.globus.replica.rls.RLSString2; import java.util.Collection; import java.util.Collections; import java.util.Comparator; import java.util.Map; import java.util.HashMap; import java.util.Set; import java.util.HashSet; import java.util.Properties; import java.util.List; import java.util.ArrayList; import java.util.LinkedHashSet; import java.util.Iterator; /** * This class implements the VDS replica catalog interface on top of RLI API. * A thing to take care of is that all delete and remove operations are * propoagated to all the Local Replica Catalogs (LRCs) reporting to the RLI. * Hence, * you should be careful while deleting LFNs, as deletions can cascade to * multiple LRCs. If you want to delete or remove an LFN from a particular LRC, * use the LRC implementation to connect to that LRC and call the corresponding * delete functions on that. * There is no transaction support in the implementation. The implementation * is best effort. Inconsistencies can occur if one of the LRCs goes offline, * or an operation fails for whatsoever reason. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2079 $ */ public class RLI implements ReplicaCatalog { /** * The number of entries searched in each bulk query to RLS. */ public static final int RLS_BULK_QUERY_SIZE = 1000; /** * The default timeout in seconds to be used while querying the RLI. */ public static final String DEFAULT_RLI_TIMEOUT = "30"; /** * The key that is used to get hold of the timeout value from the properties * object. */ public static final String RLS_TIMEOUT_KEY = "rls.timeout"; /** * The key that is used to get hold of the timeout value from the properties * object. */ public static final String RLI_TIMEOUT_KEY = "rli.timeout"; /** * The key that is used to designate the LRC whose results are to be * ignored. */ public static final String LRC_IGNORE_KEY = "lrc.ignore"; /** * The key that is used to designate the LRC whose results are to be * restricted. */ public static final String LRC_RESTRICT_KEY = "lrc.restrict"; /** * The attribute in RLS that maps to a site handle. */ public static final String SITE_ATTRIBUTE = "pool"; /** * The key that is used to get hold of the url from the properties object. */ public static final String URL_KEY = "url"; /** * The key that if set, specifies the proxy to be picked up while connecting * to the RLS. */ public static final String PROXY_KEY = "proxy"; /** * The error message for not connected to RLI. */ public static final String RLI_NOT_CONNECTED_MSG = "Not connected to RLI "; /** * The error message for not connected to LRC. */ public static final String LRC_NOT_CONNECTED_MSG = "Unable to connect to LRC "; /** * The LRC query state indicating that LRC needs to queried fully. The LRC * returns all PFNs irrespective of whether they have a site attribute or * not. */ public static final int LRC_QUERY_NORMAL = 0; /** * The LRC query state indicating that LRC has to be restricted query. * LRC should return only PFNs with site attributes tagged. */ public static final int LRC_QUERY_RESTRICT = 1; /** * The LRC query state indicating that LRC has to be ignored. */ public static final int LRC_QUERY_IGNORE = 2; /** * The handle to the client that allows access to both the RLI and the LRC * running at the url specified while connecting. */ private RLSClient mRLS; /** * The handle to the client that allows access to the LRC running at the * url specified while connecting. */ private RLSClient.RLI mRLI; /** * The url to the RLI to which this instance implementation talks to. */ private String mRLIURL; /** * A String array contains the LRC URLs that have to be ignored for querying. */ private String[] mLRCIgnoreList; /** * A String array contains the LRC URLs that have to be restricted for querying. * Only those entries are returned that have a site attribute associated * with them. */ private String[] mLRCRestrictList; /** * The handle to the logging object. Should be log4j soon. */ private LogManager mLogger; /** * The string holding the message that is logged in the logger. */ private String mLogMsg; /** * The properties object containing all the properties, that are required * to connect to a RLS. */ private Properties mConnectProps; /** * The batch size while querying the RLI in the bulk mode. */ private int mBatchSize; /** * The timeout in seconds to be applied while querying the RLI. */ private int mTimeout; /** * The default constructor, that creates an object which is not linked with * any RLS. Use the connect method to connect to the RLS. * * @see #connect(Properties). */ public RLI() { mRLS = null; mLogger = LogManagerFactory.loadSingletonInstance(); mConnectProps = new Properties(); mBatchSize = this.RLS_BULK_QUERY_SIZE; mTimeout = Integer.parseInt(DEFAULT_RLI_TIMEOUT); } /** * Establishes a connection to the RLI, picking up the proxy from the default * location usually /tmp/ directory. * * @param url the url to lrc to connect to. * * @return true if connected now, or false to indicate a failure. */ public boolean connect(String url) { return connect(url,null); } /** * Establishes a connection to the RLI. * * @param props contains all necessary data to establish the link. * * @return true if connected now, or false to indicate a failure. */ public boolean connect(Properties props) { boolean con = false; Object obj = props.remove(URL_KEY); mRLIURL = (obj == null) ? null : (String) obj; if (mRLIURL == null) { //nothing to connect to. mLogger.log("The RLI url is not specified", LogManager.ERROR_MESSAGE_LEVEL); return con; } //try to see if a proxy cert has been specified or not String proxy = props.getProperty(PROXY_KEY); mConnectProps = props;//?? mLogger.log( "[RLI-RC] Connection properties passed are " + props, LogManager.DEBUG_MESSAGE_LEVEL ); mLRCIgnoreList = this.getRLSLRCIgnoreURLs( props ); mLRCRestrictList = this.getRLSLRCRestrictURLs( props ); //determine timeout mTimeout = getTimeout(props); //set the batch size for queries setBatchSize(props); return connect(mRLIURL, proxy); } /** * Establishes a connection to the RLI. * * @param url the url to lrc to connect to. * @param proxy the path to the proxy file to be picked up. null denotes * default location. * * @return true if connected now, or false to indicate a failure. */ public boolean connect(String url, String proxy) { mRLIURL = url; //push it into the internal properties object mConnectProps.setProperty(URL_KEY,url); if(proxy != null){ mConnectProps.setProperty(PROXY_KEY, proxy); } try { mRLS = (proxy == null) ? new RLSClient(url) : //proxy is picked up from default loc /tmp new RLSClient(url, proxy); //set RLI timeout mRLS.SetTimeout(mTimeout); //connect is only successful if we have //successfully connected to the LRC mRLI = mRLS.getRLI(); } catch (RLSException e) { mLogger.log("RLS Exception", e,LogManager.ERROR_MESSAGE_LEVEL); return false; } return true; } /** * Gets a handle to the RLI that is associated with the RLS running at * url. * * @return RLSClient.RLI that points to the RLI that is * running , or null in case connect method not being called. * @see #mRLIURL */ public RLSClient.RLI getRLI() { return (this.isClosed()) ? null: mRLS.getRLI() ; } /** * Gets a handle to the LRC that is associated with the RLS running at * url. * * @return RLSClient.LRC that points to the RLI that is * running , or null in case connect method not being called. * @see #mRLIURL */ public RLSClient.LRC getLRC() { return (this.isClosed()) ? null : mRLS.getLRC(); } /** * Retrieves the entry for a given filename and resource handle from * the RLS. * * @param lfn is the logical filename to obtain information for. * @param handle is the resource handle to obtain entries for. * * @return the (first) matching physical filename, or * null if no match was found. */ public String lookup(String lfn, String handle) { //sanity check if (this.isClosed()) { //probably an exception should be thrown here!! throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL); } String pfn = null; ArrayList lrcList = null; try { lrcList = mRLI.getLRC(lfn); for (Iterator it = lrcList.iterator(); it.hasNext(); ) { //connect to an lrc String lrcURL = ( (RLSString2) it.next()).s2; //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,lrcURL); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + lrcURL, LogManager.ERROR_MESSAGE_LEVEL); continue; } //query the lrc try{ pfn = lrc.lookup(lfn,handle); if(pfn != null) return pfn; } catch(Exception ex){ mLogger.log("lookup(String,String)",ex, LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } } } catch (RLSException ex) { mLogger.log("lookup(String,String)",ex, LogManager.ERROR_MESSAGE_LEVEL); } return null; } /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is a tuple of a PFN and all its * attributes. * * @param lfn is the logical filename to obtain information for. * * @return a collection of replica catalog entries, or null in case of * unable to connect to RLS. * * @see ReplicaCatalogEntry */ public Collection lookup(String lfn) { Set lfns = new HashSet(); lfns.add( lfn ); Map> result = this.lookup( lfns ); if( result == null ){ return null; } else{ Collection values = result.get( lfn ); if( values == null ){ //JIRA PM-74 values = new ArrayList(); } return values; } } /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is just a PFN string. Duplicates * are reduced through the set paradigm. * * @param lfn is the logical filename to obtain information for. * @return a set of PFN strings, or null in case of unable to connect * to RLS. * */ public Set lookupNoAttributes(String lfn) { Set lfns = new HashSet(); lfns.add( lfn ); Map> result = this.lookupNoAttributes( lfns ); if( result == null ){ return null; } else{ Set values = result.get( lfn ); if( values == null ){ //JIRA PM-74 values = new HashSet(); } return values; } } /** * Retrieves multiple entries for a given logical filename, up to the * complete LRC. It uses the bulk query api to the LRC to query for stuff. * Bulk query has been in RLS since version 2.0.8. Internally, the bulk * queries are done is sizes specified by variable mBatchSize. * * @param lfns is a set of logical filename strings to look up. * * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries for the LFN. * * @see ReplicaCatalogEntry * @see #getBatchSize() */ public Map lookup(Set lfns) { //Map indexed by lrc url and each value a collection //of lfns that the RLI says are present in it. Map lrc2lfn = this.getLRC2LFNS(lfns); if(lrc2lfn == null){ //probably RLI is not connected!! return null; } // now query the LRCs with the LFNs that they are responsible for // and aggregate stuff. String key = null; Map result = new HashMap(lfns.size()); String message; for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = (String)entry.getKey(); message = "Querying LRC " + key; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,key); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + key, LogManager.ERROR_MESSAGE_LEVEL); continue; } //query the lrc try{ Map m = lrc.lookup((Set)entry.getValue()); //figure out if we need to restrict our queries or not. //restrict means only include results if they have a site //handle associated boolean restrict = ( this.determineQueryType(key) == this.LRC_QUERY_RESTRICT ); for(Iterator mit = m.entrySet().iterator();mit.hasNext();){ entry = (Map.Entry)mit.next(); List pfns = (( List )entry.getValue()); if ( restrict ){ //traverse through all the PFN's and check for resource handle for ( Iterator pfnIterator = pfns.iterator(); pfnIterator.hasNext(); ){ ReplicaCatalogEntry pfn = (ReplicaCatalogEntry) pfnIterator.next(); if ( pfn.getResourceHandle() == null ){ //do not include in the results if the entry does not have //a pool attribute associated with it. mLogger.log("Ignoring entry " + entry.getValue() + " from LRC " + key, LogManager.DEBUG_MESSAGE_LEVEL); pfnIterator.remove(); } } } //if pfns are empty which could be due to //restriction case taking away all pfns //do not merge in result if( pfns.isEmpty() ){ continue; } //merge the entries into the main result key = (String)entry.getKey(); //the lfn if( result.containsKey(key) ){ //right now no merging of RCE being done on basis //on them having same pfns. duplicate might occur. ((List)result.get(key)).addAll( pfns ); } else{ result.put( key, pfns ); } } } catch(Exception ex){ mLogger.log("lookup(Set)",ex,LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } mLogger.log( message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is just a PFN string. Duplicates * are reduced through the set paradigm. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a collection * of PFN's for the LFN. */ public Map lookupNoAttributes(Set lfns) { //Map indexed by lrc url and each value a collection //of lfns that the RLI says are present in it. Map lrc2lfn = this.getLRC2LFNS(lfns); if(lrc2lfn == null){ //probably RLI is not connected!! return null; } // now query the LRCs with the LFNs that they are responsible for // and aggregate stuff. String key = null; String message; Map result = new HashMap(lfns.size()); for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = (String)entry.getKey(); message = "Querying LRC " + key; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,key); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + key, LogManager.ERROR_MESSAGE_LEVEL); continue; } //query the lrc try{ Map m = lrc.lookupNoAttributes((Set)entry.getValue()); for(Iterator mit = m.entrySet().iterator();mit.hasNext();){ entry = (Map.Entry)mit.next(); //merge the entries into the main result key = (String)entry.getKey(); //the lfn if(result.containsKey(key)){ //right now no merging of RCE being done on basis //on them having same pfns. duplicate might occur. ((Set)result.get(key)).addAll((Set)entry.getValue()); } else{ result.put(key,entry.getValue()); } } } catch(Exception ex){ mLogger.log("lookup(Set)",ex, LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Retrieves multiple entries for a given logical filenames, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries (all attributes). * * @see ReplicaCatalogEntry */ public Map lookup(Set lfns, String handle) { //Map indexed by lrc url and each value a collection //of lfns that the RLI says are present in it. Map lrc2lfn = this.getLRC2LFNS(lfns); if(lrc2lfn == null){ //probably RLI is not connected!! return null; } // now query the LRCs with the LFNs they are responsible for // and aggregate stuff. String key = null,message = null; Map result = new HashMap(lfns.size()); for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = (String)entry.getKey(); message = "Querying LRC " + key; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,key); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + key, LogManager.ERROR_MESSAGE_LEVEL); continue; } //query the lrc try{ Map m = lrc.lookup((Set)entry.getValue(),handle); for(Iterator mit = m.entrySet().iterator();mit.hasNext();){ entry = (Map.Entry)mit.next(); //merge the entries into the main result key = (String)entry.getKey(); //the lfn if(result.containsKey(key)){ //right now no merging of RCE being done on basis //on them having same pfns. duplicate might occur. ((Set)result.get(key)).addAll((Set)entry.getValue()); } else{ result.put(key,entry.getValue()); } } } catch(Exception ex){ mLogger.log("lookup(Set,String)",ex, LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * The noAttributes flag is missing on purpose, because * due to the resource handle, attribute lookups are already required. * * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * * @return a map indexed by the LFN. Each value is a set of * physical filenames. */ public Map lookupNoAttributes( Set lfns, String handle ){ //Map indexed by lrc url and each value a collection //of lfns that the RLI says are present in it. Map lrc2lfn = this.getLRC2LFNS(lfns); if(lrc2lfn == null){ //probably RLI is not connected!! return null; } // now query the LRCs with the LFNs that they are responsible for // and aggregate stuff. String key = null,message = null; Map result = new HashMap(lfns.size()); for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = (String)entry.getKey(); message = "Querying LRC " + key; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,key); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + key, LogManager.ERROR_MESSAGE_LEVEL); continue; } //query the lrc try{ Map m = lrc.lookupNoAttributes((Set)entry.getValue(),handle); for(Iterator mit = m.entrySet().iterator();mit.hasNext();){ entry = (Map.Entry)mit.next(); //merge the entries into the main result key = (String)entry.getKey(); //the lfn if(result.containsKey(key)){ //right now no merging of RCE being done on basis //on them having same pfns. duplicate might occur. ((Set)result.get(key)).addAll((Set)entry.getValue()); } else{ result.put(key,entry.getValue()); } } } catch(Exception ex){ mLogger.log("lookup(Set,String):",ex, LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * * At present it DOES NOT SUPPORT ATTRIBUTE MATCHING. * * @param constraints is mapping of keys 'lfn', 'pfn' to a string that * has some meaning to the implementing system. This can be a SQL * wildcard for queries, or a regular expression for Java-based memory * collections. Unknown keys are ignored. Using an empty map requests * the complete catalog. * * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries. * * @see ReplicaCatalogEntry */ public Map lookup(Map constraints) { //sanity check if (this.isClosed()) { //probably an exception should be thrown here!! throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL); } Map result = new HashMap(); String url = null,message = null; //we need to get hold of all the LRC //that report to the RLI and call the //list() method on each of them for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){ url = (String)it.next(); message = "Querying LRC " + url; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,url); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + url, LogManager.ERROR_MESSAGE_LEVEL); continue; } try{ Map m = lrc.lookup(constraints); for(Iterator mit = m.entrySet().iterator();mit.hasNext();){ Map.Entry entry = (Map.Entry)mit.next(); //merge the entries into the main result String key = (String)entry.getKey(); //the lfn if(result.containsKey(key)){ //right now no merging of RCE being done on basis //on them having same pfns. duplicate might occur. ((List)result.get(key)).addAll((List)entry.getValue()); } else{ result.put(key,entry.getValue()); } } } catch(Exception e){ mLogger.log("list(String)",e,LogManager.ERROR_MESSAGE_LEVEL); } finally{ lrc.close(); } } return result; } /** * Lists all logical filenames in the catalog. * * @return A set of all logical filenames known to the catalog. */ public Set list() { //sanity check if (this.isClosed()) { //probably an exception should be thrown here!! throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL); } Set result = new HashSet(); String url = null,message = null; //we need to get hold of all the LRC //that report to the RLI and call the //list() method on each of them for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){ url = (String)it.next(); message = "Querying LRC " + url; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,url); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + url, LogManager.ERROR_MESSAGE_LEVEL); continue; } try{ result.addAll(lrc.list()); } catch(Exception e){ mLogger.log("list()",e,LogManager.ERROR_MESSAGE_LEVEL); } finally{ lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Lists a subset of all logical filenames in the catalog. * * @param constraint is a constraint for the logical filename only. It * is a string that has some meaning to the implementing system. This * can be a SQL wildcard for queries, or a regular expression for * Java-based memory collections. * * @return A set of logical filenames that match. The set may be empty */ public Set list(String constraint) { //sanity check if (this.isClosed()) { //probably an exception should be thrown here!! throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL); } Set result = new HashSet(); String url = null,message = null; //we need to get hold of all the LRC //that report to the RLI and call the //list() method on each of them for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){ url = (String)it.next(); message = "Querying LRC " + url; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,url); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + url, LogManager.ERROR_MESSAGE_LEVEL); continue; } try{ result.addAll(lrc.list(constraint)); } catch(Exception e){ mLogger.log("list(String)",e,LogManager.ERROR_MESSAGE_LEVEL); } finally{ lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Inserts a new mapping into the LRC running at the URL, where the RLI * is running. * * @param lfn is the logical filename under which to book the entry. * @param tuple is the physical filename and associated PFN attributes. * * @return number of insertions, should always be 1. On failure, * throws a RuntimeException. */ public int insert(String lfn, ReplicaCatalogEntry tuple) { //get hold of the LRC if that is running LRC lrc = new LRC(); int result = 1; if(!lrc.connect(mConnectProps)){ //log an error/warning message throw new RuntimeException(LRC_NOT_CONNECTED_MSG + mConnectProps.getProperty(URL_KEY)); } result = lrc.insert(lfn,tuple); //better to keep a handle to the running LRC //as a member variable, and close it in //RLI.close() lrc.close(); return result; } /** * Inserts a new mapping into the LRC running at the URL, where the RLI * is running. * This is a convenience function exposing the resource handle. Internally, * the ReplicaCatalogEntry element will be contructed, and passed to * the appropriate insert function. * * @param lfn is the logical filename under which to book the entry. * @param pfn is the physical filename associated with it. * @param handle is a resource handle where the PFN resides. * * @return number of insertions, should always be 1. On failure, * throws a RuntimeException. * * @see #insert( String, ReplicaCatalogEntry ) * @see ReplicaCatalogEntry */ public int insert(String lfn, String pfn, String handle) { //get hold of the LRC if that is running LRC lrc = new LRC(); int result = 1; if(!lrc.connect(mConnectProps)){ //log an error/warning message throw new RuntimeException(LRC_NOT_CONNECTED_MSG + mConnectProps.getProperty(URL_KEY)); } result = lrc.insert(lfn,pfn,handle); //better to keep a handle to the running LRC //as a member variable, and close it in //RLI.close() lrc.close(); return result; } /** * Inserts multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. * * @param x is a map from logical filename string to list of replica * catalog entries. * * @return the number of insertions. * @see ReplicaCatalogEntry */ public int insert(Map x) { //get hold of the LRC if that is running LRC lrc = new LRC(); int result = 1; if(!lrc.connect(mConnectProps)){ //log an error/warning message throw new RuntimeException(LRC_NOT_CONNECTED_MSG + mConnectProps.getProperty(URL_KEY)); } result = lrc.insert(x); //better to keep a handle to the running LRC //as a member variable, and close it in //RLI.close() lrc.close(); return result; } /** * Deletes a specific mapping from the replica catalog. We don't care * about the resource handle. More than one entry could theoretically * be removed. Upon removal of an entry, all attributes associated * with the PFN also evaporate (cascading deletion). * * It can result in a deletion of more than one entry, and from more * than one local replica catalog that might be reporting to the RLI. * * @param lfn is the logical filename in the tuple. * @param pfn is the physical filename in the tuple. * * @return the number of removed entries. */ public int delete(String lfn, String pfn) { ReplicaCatalogEntry rce = new ReplicaCatalogEntry(pfn); return delete(lfn,rce); } /** * Deletes a very specific mapping from the replica catalog. The LFN * must be matches, the PFN, and all PFN attributes specified in the * replica catalog entry. More than one entry could theoretically be * removed. Upon removal of an entry, all attributes associated with * the PFN also evaporate (cascading deletion). * It can result in a deletion of more than one entry, and from more * than one local replica catalog that might be reporting to the RLI. * * @param lfn is the logical filename in the tuple. * @param tuple is a description of the PFN and its attributes. * * @return the number of removed entries, either 0 or 1. */ public int delete(String lfn, ReplicaCatalogEntry tuple) { //Map indexed by lrc url and each value a collection //of lfns that the RLI says are present in it. Set lfns = new HashSet(1); lfns.add(lfn); Map lrc2lfn = this.getLRC2LFNS(lfns); int result = 0; if(lrc2lfn == null){ //probably RLI is not connected!! return 0; } // call the delete function on the individual // LRCs where the mapping resides String key = null,message = null; for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = (String)entry.getKey(); message = "Querying LRC " + key; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,key); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + key, LogManager.ERROR_MESSAGE_LEVEL); continue; } //delete from the LRC try{ result += lrc.delete(lfn,tuple); } catch(Exception ex){ mLogger.log("delete(String, ReplicaCatalogEntry)",ex, LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Deletes all PFN entries for a given LFN from the replica catalog * where the PFN attribute is found, and matches exactly the object * value. This method may be useful to remove all replica entries that * have a certain MD5 sum associated with them. It may also be harmful * overkill. * It can result in a deletion of more than one entry, and from more * than one local replica catalog that might be reporting to the RLI. * * @param lfn is the logical filename to look for. * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * * @return the number of removed entries. */ public int delete(String lfn, String name, Object value) { //Map indexed by lrc url and each value a collection //of lfns that the RLI says are present in it. Set lfns = new HashSet(1); lfns.add(lfn); Map lrc2lfn = this.getLRC2LFNS(lfns); int result = 0; if(lrc2lfn == null){ //probably RLI is not connected!! return 0; } // call the delete function on the individual // LRCs where the mapping resides String key = null,message = null; for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = (String)entry.getKey(); message = "Deleting from LRC " + key; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,key); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + key, LogManager.ERROR_MESSAGE_LEVEL); continue; } //delete from the LRC try{ result += lrc.delete(lfn,name,value); } catch(Exception ex){ mLogger.log("delete(String, String, Object)", ex,LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Deletes all PFN entries for a given LFN from the replica catalog * where the resource handle is found. Karan requested this * convenience method, which can be coded like *

      *  delete( lfn, SITE_ATTRIBUTE, handle )
      * 
* * It can result in a deletion of more than one entry, and from more * than one local replica catalog that might be reporting to the RLI. * * @param lfn is the logical filename to look for. * @param handle is the resource handle * * @return the number of entries removed. */ public int deleteByResource(String lfn, String handle) { return delete(lfn,SITE_ATTRIBUTE,handle); } /** * Deletes multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. On setting matchAttributes to false, all entries * having matching lfn pfn mapping to an entry in the Map are deleted. * However, upon removal of an entry, all attributes associated with the pfn * also evaporate (cascaded deletion). * The deletes are done in batches. * * @param x is a map from logical filename string to list of * replica catalog entries. * @param matchAttributes whether mapping should be deleted only if all * attributes match. * * @return the number of deletions. * @see ReplicaCatalogEntry */ public int delete( Map x , boolean matchAttributes){ //Map indexed by lrc url and each value a collection //of lfns that the RLI says are present in it. Set lfns = new HashSet(x.size()); for(Iterator it = x.keySet().iterator();it.hasNext();){ lfns.add( (String)it.next()); } Map lrc2lfn = this.getLRC2LFNS(lfns); int result = 0; if(lrc2lfn == null){ //probably RLI is not connected!! return 0; } //compose an exception that might need to be thrown CatalogException exception = new ReplicaCatalogException(); // call the delete function on the individual // LRCs where the mapping resides String key = null,message = null; String lfn; for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = ( String )entry.getKey(); lfns = ( Set )entry.getValue(); message = "Deleting from LRC " + key; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,key); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + key, LogManager.ERROR_MESSAGE_LEVEL); continue; } //compose the map to delete for a particular LRC Map lmap = new HashMap(lfns.size()); for(Iterator lfnIt = lfns.iterator();lfnIt.hasNext();){ lfn = (String)lfnIt.next(); lmap.put(lfn,x.get(lfn)); } //delete from the LRC try{ result += lrc.delete(x,matchAttributes); } catch(ReplicaCatalogException e){ exception.setNextException(e); } catch(Exception ex){ mLogger.log("delete(Map,boolean)", ex,LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } //throw an exception only if a nested exception if( (exception = exception.getNextException()) != null) throw exception; return result; } /** * Removes all mappings for an LFN from the replica catalog. * It can result in a deletion of more than one entry, and from more * than one local replica catalog that might be reporting to the RLI. * * @param lfn is the logical filename to remove all mappings for. * * @return the number of removed entries. */ public int remove(String lfn) { //Map indexed by lrc url and each value a collection //of lfns that the RLI says are present in it. Set lfns = new HashSet(1); lfns.add(lfn); Map lrc2lfn = this.getLRC2LFNS(lfns); int result = 0; if(lrc2lfn == null){ //probably RLI is not connected!! return 0; } // call the delete function on the individual // LRCs where the mapping resides String key = null,message = null; for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = (String)entry.getKey(); message = "Deleting from LRC " + key; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,key); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + key, LogManager.ERROR_MESSAGE_LEVEL); continue; } //delete from the LRC try{ result += lrc.remove(lfn); } catch(Exception ex){ mLogger.log("remove(String):",ex, LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Removes all mappings for a set of LFNs. * It can result in a deletion of more than one entry, and from more * than one local replica catalog that might be reporting to the RLI. * * @param lfns is a set of logical filename to remove all mappings for. * * @return the number of removed entries. */ public int remove(Set lfns) { //Map indexed by lrc url and each value a collection //of lfns that the RLI says are present in it. Map lrc2lfn = this.getLRC2LFNS(lfns); int result = 0; Set s = null; if(lrc2lfn == null){ //probably RLI is not connected!! return 0; } // call the delete function on the individual // LRCs where the mapping resides String key = null,message = null; for(Iterator it = lrc2lfn.entrySet().iterator();it.hasNext();){ Map.Entry entry = (Map.Entry)it.next(); key = (String)entry.getKey(); message = "Deleting from LRC " + key; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,key); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + key, LogManager.ERROR_MESSAGE_LEVEL); continue; } //delete from the LRC try{ s = (Set)entry.getValue(); mLogger.log("\tDeleting the following lfns " + s, LogManager.DEBUG_MESSAGE_LEVEL); result += lrc.remove((Set)entry.getValue()); } catch(Exception ex){ mLogger.log("remove(Set)",ex,LogManager.ERROR_MESSAGE_LEVEL); } finally{ //disconnect lrc.close(); } mLogger.log(message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Removes all entries from the replica catalog where the PFN attribute * is found, and matches exactly the object value. * It can result in a deletion of more than one entry, and from more * than one local replica catalog that might be reporting to the RLI. * * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * * @return the number of removed entries. */ public int removeByAttribute(String name, Object value) { //sanity check if (this.isClosed()) { //probably an exception should be thrown here!! throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL); } int result = 0; String url = null; //we need to get hold of all the LRC //that report to the RLI and call the //list() method on each of them for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){ url = (String)it.next(); mLogger.log("Removing from LRC " + url,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,url); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + url, LogManager.ERROR_MESSAGE_LEVEL); continue; } try{ result += lrc.removeByAttribute(name,value); } catch(Exception e){ mLogger.log("removeByAttribute(String,Object)",e, LogManager.ERROR_MESSAGE_LEVEL); } finally{ lrc.close(); } mLogger.log( "Removing from LRC " + url + LogManager.MESSAGE_DONE_PREFIX, LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Removes all entries associated with a particular resource handle. * This is useful, if a site goes offline. It is a convenience method, * which calls the generic removeByAttribute method. * It can result in a deletion of more than one entry, and from more * than one local replica catalog that might be reporting to the RLI. * * @param handle is the site handle to remove all entries for. * * @return the number of removed entries. * @see #removeByAttribute( String, Object ) */ public int removeByAttribute(String handle) { return removeByAttribute(SITE_ATTRIBUTE,handle); } /** * Removes everything from all the LRCs that report to this RLI. * Use with caution! * * @return the number of removed entries. */ public int clear() { //sanity check if (this.isClosed()) { //probably an exception should be thrown here!! throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL); } int result = 0; String url = null,message = null; //we need to get hold of all the LRC //that report to the RLI and call the //list() method on each of them for(Iterator it = this.getReportingLRC().iterator();it.hasNext();){ url = (String)it.next(); message = "Querying LRC " + url; mLogger.log(message,LogManager.DEBUG_MESSAGE_LEVEL); //push the lrcURL to the properties object mConnectProps.setProperty(this.URL_KEY,url); LRC lrc = new LRC(); if(!lrc.connect(mConnectProps)){ //log an error/warning message mLogger.log("Unable to connect to LRC " + url, LogManager.ERROR_MESSAGE_LEVEL); continue; } try{ result += lrc.clear(); } catch(Exception e){ mLogger.log("list(String)",e, LogManager.ERROR_MESSAGE_LEVEL); } finally{ lrc.close(); } mLogger.log( message + LogManager.MESSAGE_DONE_PREFIX,LogManager.DEBUG_MESSAGE_LEVEL); } return result; } /** * Explicitely free resources before the garbage collection hits. */ public void close() { try{ if (mRLS != null) mRLS.Close(); } catch(RLSException e){ //ignore } finally{ mRLS = null; } } /** * Returns whether the connection to the RLS with which this instance is * associated is closed or not. * * @return true, if the implementation is disassociated, false otherwise. * @see #close() */ public boolean isClosed() { return (mRLS == null); } /** * It returns the timeout value in seconds after which to timeout in case of * no activity from the RLI. * * Referred to by the "rli.timeout" property. * * @param properties the properties passed in the connect method. * * @return the timeout value if specified else, * the value specified by "rls.timeout" property, else * DEFAULT_RLI_TIMEOUT. * * @see #DEFAULT_RLI_TIMEOUT */ public int getTimeout(Properties properties) { String prop = properties.getProperty( this.RLI_TIMEOUT_KEY); //if prop is null get rls timeout, prop = (prop == null)? properties.getProperty(this.RLS_TIMEOUT_KEY):prop; int val = 0; try { val = Integer.parseInt( prop ); } catch ( Exception e ) { val = Integer.parseInt( DEFAULT_RLI_TIMEOUT ); } return val; } /** * Sets the number of lfns in each batch while querying the lrc in the * bulk mode. * * @param properties the properties passed while connecting. * */ protected void setBatchSize(Properties properties) { String s = properties.getProperty(this.BATCH_KEY); int size = this.RLS_BULK_QUERY_SIZE; try{ size = Integer.parseInt(s); } catch(Exception e){} mBatchSize = size; } /** * Returns the number of lfns in each batch while querying the lrc in the * bulk mode. * * @return the batch size. */ protected int getBatchSize() { return mBatchSize; } /** * Returns a map indexed by lrc urls. Each value is a set of * String objects referring to the logical filenames whose mappings reside * at a particular lrc amongst the set of logical filenames passed. * * @param lfns the set of lfns queried to the RLI. * * @return Map indexed by lrc urls. Each value is a set of lfn strings. * null in case the connection to RLI is closed or error. */ private Map getLRC2LFNS(Set lfns){ int batch = lfns.size() > mBatchSize ? mBatchSize:lfns.size(); //sanity check if (this.isClosed()) { //probably an exception should be thrown here!! throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL); } Map lrc2lfn = new HashMap();//indexed by lrc url and each value a collection //of lfns that the RLI says are present init. //get a handle to the rli //we need to query the RLI in batches for (Iterator it = lfns.iterator(); it.hasNext(); ) { ArrayList l = new ArrayList(batch); for (int j = 0; (j < batch) && (it.hasNext()); j++) { l.add(it.next()); } //query the RLI for one batch List res = null; try{ res = mRLI.getLRCBulk(l); } catch(RLSException ex){ mLogger.log("getLRC2LFNS(Set)",ex, LogManager.ERROR_MESSAGE_LEVEL); //or throw a runtime exception return null; } //iterate through the results and put them in the map String lrc = null; String lfn = null; for(Iterator lit = res.iterator();lit.hasNext();){ RLSString2Bulk s2b = (RLSString2Bulk) lit.next(); lfn = s2b.s1;//s1 is the lfn lrc = s2b.s2;//s2 denotes the lrc which contains the mapping //rc is the exit status returned by the RLI if (s2b.rc == RLSClient.RLS_SUCCESS) { //we are really only concerned with success //and do not care about other exit codes Object val = null; Set s = null; s = ( (val = lrc2lfn.get(lrc)) == null) ? new LinkedHashSet(): (LinkedHashSet)val; s.add(lfn); if(val == null) lrc2lfn.put(lrc,s); } } } //match LRC's just once against ingore and restrict lists for( Iterator it = lrc2lfn.keySet().iterator(); it.hasNext(); ){ String lrc = ( String ) it.next(); int state = this.determineQueryType(lrc); //do the query on the basis of the state if (state == LRC_QUERY_IGNORE) { mLogger.log("Skipping LRC " + lrc, LogManager.DEBUG_MESSAGE_LEVEL); it.remove(); } } return lrc2lfn; } /** * Returns a tri state indicating what type of query needs to be done to * a particular LRC. * * @param url the LRC url. * * @return tristate */ private int determineQueryType(String url){ int type = RLI.LRC_QUERY_NORMAL; if(mLRCRestrictList != null){ for ( int j = 0; j < mLRCRestrictList.length; j++ ) { if ( url.indexOf( mLRCRestrictList[ j ] ) != -1 ) { type = RLI.LRC_QUERY_RESTRICT; break; } } } if(mLRCIgnoreList != null){ for ( int j = 0; j < mLRCIgnoreList.length; j++ ) { if ( url.indexOf( mLRCIgnoreList[ j ] ) != -1 ) { type = RLI.LRC_QUERY_IGNORE; break; } } } return type; } /** * Returns the rls LRC urls to ignore for querying (requested by LIGO). * * Referred to by the "pegasus.catalog.replica.lrc.ignore" property. * * @param properties the properties passed in the connect method. * * @return String[] if a comma separated list supplied as the property value, * else null */ protected String[] getRLSLRCIgnoreURLs( Properties properties ) { String urls = properties.getProperty( this.LRC_IGNORE_KEY, null ); if ( urls != null ) { String[] urllist = urls.split( "," ); return urllist; } else { return null; } } /** * Returns the rls LRC urls to restrict for querying (requested by LIGO). * * Referred to by the "pegasus.catalog.replica.lrc.restrict" property. * * @param properties the properties passed in the connect method. * * @return String[] if a comma separated list supplied as the property value, * else null */ protected String[] getRLSLRCRestrictURLs( Properties properties ) { String urls = properties.getProperty( this.LRC_RESTRICT_KEY, null ); if ( urls != null ) { String[] urllist = urls.split( "," ); return urllist; } else { return null; } } /** * Retrieves the URLs of all the LRCs that report to the RLI. * * @return a Set containing the URLs to all the LRCs that report to the * RLI. */ private Set getReportingLRC(){ //sanity check if (this.isClosed()) { //probably an exception should be thrown here!! throw new RuntimeException(RLI_NOT_CONNECTED_MSG + this.mRLIURL); } Set result = new HashSet(); Collection c = null; try{ c = mRLI.lrcList(); } catch(RLSException e){ mLogger.log("getReportingLRC(Set)",e,LogManager.ERROR_MESSAGE_LEVEL); } for(Iterator it = c.iterator(); it.hasNext();){ RLSLRCInfo lrc = (RLSLRCInfo)it.next(); result.add(lrc.url); } return result; } /** * Populates the mapping table by querying the LRC in the mLRCList. At * present it searches for all the files in the original DAG. At this point * it should be all the files in the Reduced Dag but not doing so in order to * conserve memory. * * @param allInCache indicates whether all input file entries were found in * cache or not. * * @return List */ /* private List populateMapTable( boolean allInCache ) { String lrcURL = null; List list = null; RLSQuery client = null; ReplicaLocation rl = null; List pfnList = null; mTable = new HashMap( mSearchFiles.size() ); int size = mLRCMap.size(); mLogger.log("Number of LRCs that will be queried: "+size, LogManager.DEBUG_MESSAGE_LEVEL); for ( Iterator iter = mLRCMap.keySet().iterator(); iter.hasNext(); ) { lrcURL = ( String ) iter.next(); int state = this.determineQueryType(lrcURL); //do the query on the basis of the state if ( state == LRC_QUERY_IGNORE ) { mLogger.log( "Skipping LRC " + lrcURL, LogManager.DEBUG_MESSAGE_LEVEL); } else{ mLogger.log( "Querying LRC " + lrcURL, LogManager.DEBUG_MESSAGE_LEVEL); list = ( ArrayList ) mLRCMap.get( lrcURL ); try { client = new RLSQuery( lrcURL ); boolean restrict = (state == LRC_QUERY_RESTRICT); client.bulkQueryLRC( list, RLSQuery.RLS_BULK_QUERY_SIZE, mTable,restrict); client.close(); } catch ( Exception e ) { mLogMsg = "RLSEngine.java: While getting connection to LRC " + lrcURL + " " + e; mLogger.log( mLogMsg, LogManager.ERROR_MESSAGE_LEVEL ); size--; //do a hard fail only if the RLS exitmode is set to error //or we could not query to all the LRCs // and we could not find all the entries in the cache mLogger.log("RLS exit mode is " + mProps.getRLSExitMode(), LogManager.DEBUG_MESSAGE_LEVEL); boolean exitOnError = mProps.getRLSExitMode().equalsIgnoreCase( "error" ); if ( exitOnError || ( size == 0 && !allInCache )) { mLogMsg = ( exitOnError ) ? "Unable to access LRC " + lrcURL : "Unable to query any LRC and not all input files are in cache!"; throw new RuntimeException( mLogMsg ); } } mLogger.logCompletion("Querying LRC " + lrcURL, LogManager.DEBUG_MESSAGE_LEVEL); } } return new ArrayList( mTable.keySet() ); } */ /** * The main program, for some unit testing. * * @param args String[] */ public static void main(String[] args) { //setup the logger for the default streams. LogManager logger = LogManagerFactory.loadSingletonInstance( ); logger.logEventStart( "event.pegasus.catalog.replica.RLI", "planner.version", Version.instance().toString() ); RLI rli = new RLI(); Properties props = new Properties(); props.setProperty( RLI.URL_KEY, "rls://dataserver.phy.syr.edu" ); props.setProperty( RLI.LRC_IGNORE_KEY, "rls://ldas-cit.ligo.caltech.edu:39281" ); rli.connect(props); System.out.println( "Complete Lookup " + rli.lookup("H-H1_RDS_C03_L2-847608132-128.gwf" ) ); System.out.println( "Lookup without attributes " + rli.lookupNoAttributes("H-H1_RDS_C03_L2-847608132-128.gwf" ) ); rli.close(); //RLI rli = new RLI(); String lfn = "test"; Set s = new HashSet(); s.add(lfn);s.add("testX");s.add("vahi.f.a"); System.out.println("Connecting " + rli.connect("rls://sukhna")); boolean insert = false; if(insert){ ReplicaCatalogEntry rce = new ReplicaCatalogEntry( "gsiftp://sukhna.isi.edu/tmp/test"); rce.addAttribute("name", "karan"); LRC sukhna = new LRC(); sukhna.connect("rls://sukhna"); sukhna.insert("test", rce); sukhna.insert("test", "gsiftp://sukhna.isi.edu/tmp/test1", "isi"); sukhna.insert("vahi.f.a", "file:///tmp/vahi.f.a", "isi"); sukhna.insert("testX", "gsiftp://sukhna.isi.edu/tmp/testX", "isi"); sukhna.insert("testX", "gsiftp://sukhna.isi.edu/tmp/testXvahi", "isi"); sukhna.close(); LRC smarty = new LRC(); ReplicaCatalogEntry rce1 = new ReplicaCatalogEntry( "gsiftp://smarty.isi.edu/tmp/test"); rce1.addAttribute("name", "gaurang"); smarty.connect("rlsn://smarty"); smarty.insert("test", rce1); smarty.insert("test", "gsiftp://smarty.isi.edu/tmp/test1", "isi"); smarty.insert("vahi.f.a", "file:///tmp-smarty/vahi.f.a", "isi"); smarty.insert("testX", "gsiftp://smarty.isi.edu/tmp/testX", "isi"); smarty.close(); } System.out.println("\n Searching for lfn " + lfn); System.out.println(rli.lookup(lfn)); System.out.println("\n Searching for lfn w/o attributes " + lfn); System.out.println(rli.lookupNoAttributes(lfn)); System.out.println("\nSearching for a set of lfn " + s); System.out.println(rli.lookup(s)); System.out.println("\nSearching for a set of lfn with handle matching" + s); System.out.println(rli.lookup(s,"isi")); System.out.println("\nSearching for a set of lfn with handle matching "+ " returning only pfns" + s); System.out.println(rli.lookupNoAttributes(s,"isi")); System.out.println("\nListing all the lfns tracked in RLI"); System.out.println(rli.list("*").size()); //System.out.println("\n Removing entry for lfn " + lfn); //System.out.println(rli.remove(lfn)); //System.out.println("\n Removing entry for lfns " + s); //System.out.println(rli.remove(s)); //System.out.println("\n Removing entry for lfn by handle matching " + lfn); //System.out.println(rli.deleteByResource(lfn,"isi")); //System.out.println("\nSearching for a set of lfn " + s); //System.out.println(rli.lookup(s)); Map m = new HashMap(); m.put("lfn","test*"); m.put("pfn","*vahi*"); System.out.println("\nDoing a constraint lookup " + rli.lookup(m)); rli.close(); } }//end of main class pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/impl/JDBCRC.java0000644000175000017500000012421211757531137027060 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.replica.impl; import edu.isi.pegasus.common.logging.LogManagerFactory; import java.util.*; import java.sql.*; import edu.isi.pegasus.planner.catalog.Catalog; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.common.logging.LogManager; /** * This class implements a replica catalog on top of a simple table in a * JDBC database. This enables a variety of replica catalog * implementations in a transactionally safe, concurrent environment. * The table must be defined using the statements appropriate for your * database - they are part of the setup in $PEGASUS_HOME/sql/. * * If you chose to use an unsupported database, please check, if your * database either supports sequence number, or if it supports auto * increment columns. If your database supports sequences (e.g. * PostGreSQL), you can use a setup similar to the following (for * Oracle, the autoinc can be implemented via a trigger). * *
 * create sequence rc_lfn_id;
 *
 * create table rc_lfn (
 *   id      bigint default nextval('rc_lfn_id'::text),
 *   lfn     varchar(255) not null,
 *   pfn     varchar(255) not null,
 *
 *   constraint pk_rc_lfn primary key(id),
 *   constraint sk_rc_lfn unique(lfn,pfn)
 * );
 *
 * create index idx_rc_lfn on rc_lfn(lfn);
 *
 * create table rc_attr (
 *   id      bigint,
 *   name    varchar(64) not null,
 *   value   varchar(255) not null,
 *
 *   constraint pk_rc_attr primary key(id,name),
 *   constraint fk_rc_attr foreign key(id) references rc_lfn(id) on delete cascade
 * );
 *
 * create index idx_rc_attr on rc_attr(name);
 * 
* * In case of databases that do not support sequences (e.g. MySQL), do * not specify the create sequence, and use an * auto-increment column for the primary key instead, e.g.: * *
 * create table rc_lfn (
 *   id      bigint default null auto_increment,
 *   lfn     varchar(255) not null,
 *   pfn     varchar(255) not null,
 *
 *   constraint pk_rc_lfn primary key(id),
 *   constraint sk_rc_lfn unique(lfn,pfn)
 * );
 *
 * create index idx_rc_lfn on rc_lfn(lfn);
 *
 * create table rc_attr (
 *   id      bigint,
 *   name    varchar(64) not null,
 *   value   varchar(255) not null,
 *
 *   constraint pk_rc_attr primary key(id,name),
 *   constraint fk_rc_attr foreign key id references rc_lfn(id) on delete cascade
 * );
 *
 * create index idx_rc_attr on rc_attr(name);
 * 
* * The site attribute should be specified whenever possible. For the * shell planner, it will always be of value "local". * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2585 $ */ public class JDBCRC implements ReplicaCatalog { /** * This message is sent whenever one of the member function is executed * which relies on an established database context. */ private static final String c_error = "The database connection is not established"; /** * Maintains the connection to the database over the lifetime of * this instance. */ protected Connection mConnection = null; /** * Maintains an essential set of prepared statement, ready to use. */ protected PreparedStatement mStatements[] = null; /** * The handle to the logging object. */ protected LogManager mLogger; /** * The statement to prepare to slurp attributes. */ private static final String mCStatements[] = { // 0: "SELECT name,value FROM rc_attr WHERE id=?", // 1: "SELECT id,pfn FROM rc_lfn WHERE lfn=?", // 2: "SELECT r.id,r.pfn FROM rc_lfn r, rc_attr a WHERE r.id=a.id" + " AND r.lfn=? AND a.name=? AND a.value=?", // 3: "SELECT r.id,r.pfn FROM rc_lfn r, rc_attr a WHERE r.id=a.id" + " AND r.lfn=? AND a.name=? AND a.value IS NULL", // 4: "INSERT INTO rc_attr(id,name,value) VALUES(?,?,?)", // 5: "DELETE FROM rc_lfn WHERE lfn=?", // 6: "DELETE FROM rc_lfn WHERE id IN" + " ( SELECT id FROM rc_attr WHERE name=? AND value=? )", // 7: "DELETE FROM rc_lfn WHERE id IN" + " ( SELECT id FROM rc_attr WHERE name=? AND value IS NULL )", // 8: "DELETE FROM rc_lfn WHERE lfn=? AND id IN" + " ( SELECT id FROM rc_attr WHERE name=? AND value=? )", // 9: "DELETE FROM rc_lfn WHERE lfn=? AND id IN" + " ( SELECT id FROM rc_attr WHERE name=? AND value IS NULL )", }; /** * Remembers if obtaining generated keys will work or not. */ private boolean m_autoinc = false; /** * Convenience c'tor: Establishes the connection to the replica * catalog database. The usual suspects for the class name include: * *
   * org.postgresql.Driver
   * com.mysql.jdbc.Driver
   * com.microsoft.jdbc.sqlserver.SQLServerDriver
   * SQLite.JDBCDriver
   * sun.jdbc.odbc.JdbcOdbcDriver
   * 
* * @param jdbc is a string containing the full name of the java class * that must be dynamically loaded. This is usually an external jar * file which contains the Java database driver. * @param url is the database driving URL. This string is database * specific, and tell the JDBC driver, at which host and port the * database listens, permits additional arguments, and selects the * database inside the rDBMS to connect to. Please refer to your * JDBC driver documentation for the format and permitted values. * @param username is the database user account name to connect with. * @param password is the database account password to use. * * @throws LinkageError if linking the dynamically loaded driver fails. * This is a run-time error, and does not need to be caught. * @throws ExceptionInInitializerError if the initialization function * of the driver's instantiation threw an exception itself. This is a * run-time error, and does not need to be caught. * @throws ClassNotFoundException if the class in your jdbc parameter * cannot be found in your given CLASSPATH environment. Callers must * catch this exception. * @throws SQLException if something goes awry with the database. * Callers must catch this exception. */ public JDBCRC( String jdbc, String url, String username, String password ) throws LinkageError, ExceptionInInitializerError, ClassNotFoundException, SQLException { this(); // load database driver jar Class.forName( jdbc ); // may throw LinkageError, // may throw ExceptionInInitializerError, // may throw ClassNotFoundException // establish connection to database generically connect( url, username, password ); // may throws SQLException } /** * Default empty constructor creates an object that is not yet connected * to any database. You must use support methods to connect before this * instance becomes usable. * * @see #connect( String, String, String ) */ public JDBCRC() { // make connection defunc mConnection = null; mStatements = null; mLogger = LogManagerFactory.loadSingletonInstance(); } /** * Connects to the database. This is effectively an accessor to * initialize the internal connection instance variable. Warning! * You must call {@link java.lang.Class#forName( String )} yourself * to load the database JDBC driver jar! * * @param url is the database driving URL. This string is database * specific, and tell the JDBC driver, at which host and port the * database listens, permits additional arguments, and selects the * database inside the rDBMS to connect to. Please refer to your * JDBC driver documentation for the format and permitted values. * @param username is the database user account name to connect with. * @param password is the database account password to use. * @throws SQLException if something goes awry with the database. * Callers must catch this exception. * @see #JDBCRC( String, String, String, String ) * @see java.sql.DriverManager#getConnection( String, String, String ) */ public void connect( String url, String username, String password ) throws SQLException { // establish connection to database generically mConnection = DriverManager.getConnection( url, username, password ); // may throws SQLException m_autoinc = mConnection.getMetaData().supportsGetGeneratedKeys(); // prepared statements are Singletons -- prepared on demand mStatements = new PreparedStatement[ mCStatements.length ]; for ( int i=0; i < mCStatements.length; ++i ) mStatements[i] = null; } /** * Establishes a connection to the database from the properties. You * can specify a driver property to contain the class name of * the JDBC driver for your database. This property will be removed * before attempting to connect. You must speficy a url * property to describe the connection. It will be removed before * attempting to connect. * * @param props is the property table with sufficient settings to * establish a link with the database. The minimum key required key is * "url", and possibly "driver". Any other keys depend on the database * driver. * @return true if connected, false if failed to connect. * @see java.sql.DriverManager#getConnection( String, Properties ) * * @throws Error subclasses for runtime errors in the class loader. */ public boolean connect( Properties props ){ boolean result = false; // class loader: Will propagate any runtime errors!!! String driver = (String) props.remove("db.driver"); Properties localProps = CommonProperties.matchingSubset( (Properties)props.clone(), "db", false ); String url = (String) localProps.remove("url"); if (url == null || url.length() == 0) { return result; } try { if (driver != null) { //only support mysql and postgres for time being if( driver.equalsIgnoreCase( "MySQL") ){ driver = "com.mysql.jdbc.Driver"; } else if ( driver.equalsIgnoreCase( "Postgres" )){ driver = "org.postgresql.Driver"; } Class.forName(driver); } } catch (Exception e) { mLogger.log( "While connecting to JDBCRC Replica Catalog", e, LogManager.DEBUG_MESSAGE_LEVEL ); return result; } try { mConnection = DriverManager.getConnection( url, localProps ); m_autoinc = mConnection.getMetaData().supportsGetGeneratedKeys(); // prepared statements are Singletons -- prepared on demand mStatements = new PreparedStatement[mCStatements.length]; for (int i = 0; i < mCStatements.length; ++i) { mStatements[i] = null; } result = true; } catch (SQLException e) { mLogger.log( "While connecting to JDBCRC Replica Catalog", e , LogManager.DEBUG_MESSAGE_LEVEL ); result = false; } return result; } /** * Explicitely free resources before the garbage collection hits. */ public void close() { if ( mConnection != null ) { try { if ( ! mConnection.getAutoCommit() ) mConnection.commit(); } catch ( SQLException e ) { // ignore } } if ( mStatements != null ) { try { for ( int i=0; i < mCStatements.length; ++i ) { if ( mStatements[i] != null ) { mStatements[i].close(); mStatements[i] = null; } } } catch ( SQLException e ) { // ignore } finally { mStatements = null; } } if ( mConnection != null ) { try { mConnection.close(); } catch ( SQLException e ) { // ignore } finally { mConnection = null; } } } /** * Predicate to check, if the connection with the catalog's * implementation is still active. This helps determining, if it makes * sense to call close(). * * @return true, if the implementation is disassociated, false otherwise. * @see #close() */ public boolean isClosed() { return ( mConnection == null ); } /** * Quotes a string that may contain special SQL characters. * @param s is the raw string. * @return the quoted string, which may be just the input string. */ protected String quote( String s ) { if ( s.indexOf('\'') != -1 ) { StringBuffer result = new StringBuffer(); for ( int i=0; i < s.length(); ++i ) { char ch = s.charAt(i); result.append(ch); if ( ch == '\'' ) result.append(ch); } return result.toString(); } else { return s; } } /** * Singleton manager for prepared statements. This instruction * checks that a prepared statement is ready to use, and will * create an instance of the prepared statement, if it was unused * previously. * * @param i is the index which prepared statement to check. * @return a handle to the prepared statement. */ protected PreparedStatement getStatement( int i ) throws SQLException { if ( mStatements[i] == null ) mStatements[i] = mConnection.prepareStatement( mCStatements[i] ); else mStatements[i].clearParameters(); return mStatements[i]; } /** * Retrieves the entry for a given filename and site handle from the * replica catalog. * * @param lfn is the logical filename to obtain information for. * @param handle is the resource handle to obtain entries for. * @return the (first) matching physical filename, or * null if no match was found. */ public String lookup( String lfn, String handle ) { String result = null; int which = ( handle == null ? 3 : 2 ); String query = mCStatements[which]; // sanity check if ( lfn == null ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { PreparedStatement ps = getStatement(which); ps.setString( 1, quote(lfn) ); ps.setString( 2, quote(ReplicaCatalogEntry.RESOURCE_HANDLE) ); if ( handle != null ) ps.setString( 3, quote(handle) ); // there should only be one result ResultSet rs = ps.executeQuery(); if ( rs.next() ) result = rs.getString("pfn"); rs.close(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to query database about " + query + ": " + e.getMessage() ); } // done return result; } /** * Slurps all attributes from related to a mapping into a map. * * @param id is the reference id to slurp from as string. Especially * Postgres's indexing mechanism goes from tables scans to btrees, if * the numeric key is represented as a string. Strings should be safe * for other databases, too. * @return a Map with the attributes, which may be empty. */ private Map attributes( String id ) throws SQLException { Map result = new TreeMap(); // sanity checks if ( id == null ) return result; // parametrize PreparedStatement ps = getStatement(0); ps.setString( 1, id ); // slurp results ResultSet rs = ps.executeQuery(); while ( rs.next() ) result.put( rs.getString(1), rs.getString(2) ); // done rs.close(); return result; } /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is a tuple of a PFN and all its * attributes. * * @param lfn is the logical filename to obtain information for. * @return a collection of replica catalog entries * @see ReplicaCatalogEntry */ public Collection lookup( String lfn ) { List result = new ArrayList(); String query = mCStatements[1]; // sanity check if ( lfn == null ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); // start to ask try { PreparedStatement ps = getStatement(1); ps.setString( 1, quote(lfn) ); ResultSet rs = ps.executeQuery(); while ( rs.next() ) { result.add( new ReplicaCatalogEntry( rs.getString("pfn"), attributes(rs.getString("id")) ) ); } rs.close(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to query database about " + query + ": " + e.getMessage() ); } // done return result; } /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is just a PFN string. Duplicates * are reduced through the set paradigm. * * @param lfn is the logical filename to obtain information for. * @return a set of PFN strings */ public Set lookupNoAttributes( String lfn ) { Set result = new TreeSet(); String query = mCStatements[1]; // sanity check if ( lfn == null ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); // start to ask try { PreparedStatement ps = getStatement(1); ps.setString( 1, quote(lfn) ); ResultSet rs = ps.executeQuery(query); while ( rs.next() ) result.add( rs.getString("pfn") ); rs.close(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to query database about " + query + ": " + e.getMessage() ); } // done return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in an online display or portal. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries for the LFN. * @see org.griphyn.common.catalog.ReplicaCatalogEntry */ public Map lookup( Set lfns ) { Map result = new HashMap(); String query = mCStatements[1]; // sanity check if ( lfns == null || lfns.size() == 0 ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { ResultSet rs = null; PreparedStatement ps = getStatement(1); for ( Iterator i = lfns.iterator(); i.hasNext(); ) { List value = new ArrayList(); String lfn = (String) i.next(); ps.setString( 1, quote(lfn) ); rs = ps.executeQuery(); while ( rs.next() ) { value.add( new ReplicaCatalogEntry( rs.getString("pfn"), attributes(rs.getString("id")) ) ); } rs.close(); result.put( lfn, value ); } } catch ( SQLException e ) { throw new RuntimeException( "Unable to query database with " + query + ": " + e.getMessage() ); } // done return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in an online display or portal. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a set * of PFN strings. */ public Map lookupNoAttributes( Set lfns ) { Map result = new HashMap(); String query = mCStatements[1]; // sanity check if ( lfns == null || lfns.size() == 0 ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { ResultSet rs = null; PreparedStatement ps = getStatement(1); for ( Iterator i = lfns.iterator(); i.hasNext(); ) { Set value = new TreeSet(); String lfn = (String) i.next(); ps.setString( 1, quote(lfn) ); rs = ps.executeQuery(); while ( rs.next() ) value.add(rs.getString("pfn")); rs.close(); result.put( lfn, value ); } } catch ( SQLException e ) { throw new RuntimeException( "Unable to query database with " + query + ": " + e.getMessage() ); } // done return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries (all attributes). * @see ReplicaCatalogEntry */ public Map lookup( Set lfns, String handle ) { Map result = new HashMap(); int which = ( handle == null ? 3 : 2 ); String query = mCStatements[which]; // sanity check if ( lfns == null || lfns.size() == 0 ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { ResultSet rs = null; PreparedStatement ps = getStatement(which); ps.setString( 2, quote(ReplicaCatalogEntry.RESOURCE_HANDLE) ); if ( handle != null ) ps.setString( 3, quote(handle) ); for ( Iterator i = lfns.iterator(); i.hasNext(); ) { List value = new ArrayList(); String lfn = (String) i.next(); ps.setString( 1, quote(lfn) ); rs = ps.executeQuery(); while ( rs.next() ) { value.add( new ReplicaCatalogEntry( rs.getString("pfn"), attributes(rs.getString("id")) ) ); } rs.close(); result.put( lfn, value ); } } catch ( SQLException e ) { throw new RuntimeException( "Unable to query database with " + query + ": " + e.getMessage() ); } // done return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * @return a map indexed by the LFN. Each value is a set of * physical filenames. */ public Map lookupNoAttributes( Set lfns, String handle ) { Map result = new HashMap(); int which = ( handle == null ? 3 : 2 ); String query = mCStatements[which]; // sanity check if ( lfns == null || lfns.size() == 0 ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { ResultSet rs = null; PreparedStatement ps = getStatement(which); ps.setString( 2, quote(ReplicaCatalogEntry.RESOURCE_HANDLE) ); if ( handle != null ) ps.setString( 3, quote(handle) ); for ( Iterator i = lfns.iterator(); i.hasNext(); ) { Set value = new TreeSet(); String lfn = (String) i.next(); ps.setString( 1, quote(lfn) ); rs = ps.executeQuery(); while ( rs.next() ) value.add( rs.getString("pfn") ); rs.close(); result.put( lfn, value ); } } catch ( SQLException e ) { throw new RuntimeException( "Unable to query database with " + query + ": " + e.getMessage() ); } // done return result; } /** * Helper function to assemble various pieces. * * @param value is the value of the object from the map. * @param obj is the name of the table column * @param where is the decision, if we had a previous WHERE clause or not. * @see #lookup( Map ) */ private String addItem( Object value, String obj, boolean where ) { // sanity check, no column can be NULL if ( value == null ) return new String(); String v = ( value instanceof String ) ? (String) value : value.toString(); StringBuffer q = new StringBuffer(80); q.append( where ? " AND " : " WHERE " ); q.append(obj).append(" LIKE '").append(quote(v)).append('\''); return q.toString(); } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal. * * @param constraints is mapping of keys 'lfn', 'pfn', or any * attribute name, e.g. the resource handle 'site', to a string that * has some meaning to the implementing system. This can be a SQL * wildcard for queries, or a regular expression for Java-based memory * collections. Unknown keys are ignored. Using an empty map requests * the complete catalog. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries. * @see ReplicaCatalogEntry */ public Map lookup( Map constraints ) { Map result = new TreeMap(); // more sanity if ( mConnection == null ) throw new RuntimeException( c_error ); // prepare statement boolean flag = false; boolean where = false; StringBuffer q = new StringBuffer(256); q.append("SELECT DISTINCT r.id,r.lfn,r.pfn FROM rc_lfn r, rc_attr a"); for ( Iterator i=constraints.keySet().iterator(); i.hasNext(); ) { String s, key = (String) i.next(); if ( key.equals("lfn") ) { s = addItem( constraints.get("lfn"), "r.lfn", where ); } else if ( key.equals("pfn") ) { s = addItem( constraints.get("pfn"), "r.pfn", where ); } else { if ( ! flag ) { q.append( where ? " AND " : " WHERE " ).append( "r.id=a.id" ); flag = true; } s = addItem( constraints.get(key), "a." + key, where ); } if ( s.length() > 0 ) { where = true; q.append(s); } } // start to ask String lfn = null; ReplicaCatalogEntry pair = null; String query = q.toString(); try { Statement st = mConnection.createStatement(); ResultSet rs = st.executeQuery(query); while ( rs.next() ) { lfn = rs.getString("lfn"); pair = new ReplicaCatalogEntry( rs.getString("pfn"), attributes(rs.getString("id")) ); // add list, if the LFN does not already exist if ( ! result.containsKey(lfn) ) result.put( lfn, new ArrayList() ); // now add to the list ((List) result.get(lfn)).add( pair ); } rs.close(); st.close(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to query database about " + query + ": " + e.getMessage() ); } // done return result; } /** * Lists all logical filenames in the catalog. * * @return A set of all logical filenames known to the catalog. */ public Set list() { // short-cut return list( null ); } /** * Lists a subset of all logical filenames in the catalog. * * @param constraint is a constraint for the logical filename only. It * is a string that has some meaning to the implementing system. This * can be a SQL wildcard for queries, or a regular expression for * Java-based memory collections. * @return A set of logical filenames that match. The set may be empty */ public Set list( String constraint ) { Set result = new TreeSet(); // more sanity if ( mConnection == null ) throw new RuntimeException( c_error ); // prepare statement // FIXME: work with pre-prepared statements String query = "SELECT lfn FROM rc_lfn"; if ( constraint != null && constraint.length() > 0 ) query += " WHERE lfn LIKE '" + quote(constraint) + "'"; // start to ask try { Statement st = mConnection.createStatement(); ResultSet rs = st.executeQuery(query); while ( rs.next() ) { result.add( rs.getString(0) ); } rs.close(); st.close(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to query database about " + query + ": " + e.getMessage() ); } // done return result; } /** * Inserts a new mapping into the replica catalog. * * @param lfn is the logical filename under which to book the entry. * @param tuple is the physical filename and associated PFN attributes. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. */ public int insert( String lfn, ReplicaCatalogEntry tuple ) { String query = "[no query]"; int result = 0; boolean autoCommitWasOn = false; int state = 0; // sanity checks if ( lfn == null || tuple == null ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { // delete-before-insert as one transaction if ( (autoCommitWasOn = mConnection.getAutoCommit()) ) mConnection.setAutoCommit(false); state++; // state == 1 // // delete before insert... // delete( lfn, tuple.getPFN() ); state++; // state == 2 ResultSet rs = null; Statement st = null; StringBuffer m = new StringBuffer(256); String id = null; if ( ! m_autoinc ) { // // use sequences, no auto-generated keys possible // query = "SELECT nextval('rc_lfn_id')"; st = mConnection.createStatement(); rs = st.executeQuery(query); if ( rs.next() ) id = rs.getString(1); else throw new SQLException( "Unable to access sequence generator" ); rs.close(); st.close(); state++; // state == 3 m.append( "INSERT INTO rc_lfn(id,lfn,pfn) VALUES('" ); m.append(id).append("','"); m.append(quote(lfn)).append("','"); m.append(quote(tuple.getPFN())).append("')"); query = m.toString(); st = mConnection.createStatement(); result = st.executeUpdate(query); // ,Statement.RETURN_GENERATED_KEYS); st.close(); state++; // state == 4 } else { // // use autoinc columns, obtain autogenerated keys afterwards // m.append( "INSERT INTO rc_lfn(lfn,pfn) VALUES('" ); m.append(quote(lfn)).append("','"); m.append(quote(tuple.getPFN())).append("')"); query = m.toString(); st = mConnection.createStatement(); result = st.executeUpdate(query, Statement.RETURN_GENERATED_KEYS); state++; // state == 3 rs = st.getGeneratedKeys(); if ( rs.next() ) id = rs.getString(1); else throw new SQLException( "Unable to access autogenerated key" ); rs.close(); st.close(); state++; // state == 4 } query = mCStatements[4]; PreparedStatement ps = getStatement(4); // ps.setString( 1, id ); // GRRR, Pg8!!! ps.setLong( 1, Long.parseLong(id) ); for ( Iterator i=tuple.getAttributeIterator(); i.hasNext(); ) { String name = (String) i.next(); Object value = tuple.getAttribute(name); ps.setString( 2, name ); if ( value == null ) ps.setNull( 3, Types.VARCHAR ); else ps.setString( 3, value instanceof String ? (String) value : value.toString() ); ps.executeUpdate(); } state++; // state == 5 mConnection.commit(); } catch ( SQLException e ) { try { if ( state > 0 && state < 4 ) mConnection.rollback(); } catch ( SQLException e2 ) { // ignore rollback problems } throw new RuntimeException( "Unable to tell database " + query + " (state=" + state + "): " + e.getMessage() ); } finally { // restore original auto-commit state try { if ( autoCommitWasOn ) mConnection.setAutoCommit(true); } catch ( SQLException e ) { // ignore } } return result; } /** * Inserts a new mapping into the replica catalog. This is a * convenience function exposing the resource handle. Internally, the * ReplicaCatalogEntry element will be contructed, and * passed to the appropriate insert function. * * @param lfn is the logical filename under which to book the entry. * @param pfn is the physical filename associated with it. * @param handle is a resource handle where the PFN resides. * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * @see #insert( String, ReplicaCatalogEntry ) * @see ReplicaCatalogEntry */ public int insert( String lfn, String pfn, String handle ) { return insert( lfn, new ReplicaCatalogEntry( pfn, handle ) ); } /** * Inserts multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. * * @param x is a map from logical filename string to list of replica * catalog entries. * @return the number of insertions. * @see org.griphyn.common.catalog.ReplicaCatalogEntry */ public int insert( Map x ) { int result = 0; // sanity checks if ( x == null || x.size() == 0 ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); // FIXME: Create a true bulk mode. This is inefficient, but will // get the job done (for now). Set lfns = x.keySet(); for ( Iterator i=lfns.iterator(); i.hasNext(); ) { String lfn = (String) i.next(); List value = (List) x.get(lfn); if ( value != null && value.size() > 0 ) { for ( Iterator j=value.iterator(); j.hasNext(); ) { result += insert( lfn, (ReplicaCatalogEntry) j.next() ); } } } // done return result; } /** * Deletes multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. On setting matchAttributes to false, all entries * having matching lfn pfn mapping to an entry in the Map are deleted. * However, upon removal of an entry, all attributes associated with the pfn * also evaporate (cascaded deletion). * * @param x is a map from logical filename string to list of * replica catalog entries. * @param matchAttributes whether mapping should be deleted only if all * attributes match. * * @return the number of deletions. * @see ReplicaCatalogEntry */ public int delete( Map x , boolean matchAttributes){ throw new java.lang.UnsupportedOperationException ("delete(Map,boolean) not implemented as yet"); } /** * Deletes a specific mapping from the replica catalog. We don't care * about the resource handle. More than one entry could theoretically * be removed. Upon removal of an entry, all attributes associated * with the PFN also evaporate (cascading deletion). * * @param lfn is the logical filename in the tuple. * @param pfn is the physical filename in the tuple. * @return the number of removed entries. */ public int delete( String lfn, String pfn ) { int result = 0; // sanity checks if ( lfn == null || pfn == null ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); // prepare statement // FIXME: work with pre-prepared statements StringBuffer m = new StringBuffer(256); m.append( "DELETE FROM rc_lfn WHERE lfn='"); m.append( quote(lfn) ).append('\''); m.append( " AND pfn='").append( quote(pfn) ).append('\''); String query = m.toString(); try { Statement st = mConnection.createStatement(); st.execute(query); result = st.getUpdateCount(); st.close(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to tell database " + query + ": " + e.getMessage() ); } // done return result; } /** * Deletes a very specific mapping from the replica catalog. The LFN * must be matches, the PFN, and all PFN attributes specified in the * replica catalog entry. More than one entry could theoretically be * removed. Upon removal of an entry, all attributes associated with * the PFN also evaporate (cascading deletion). * * @param lfn is the logical filename in the tuple. * @param tuple is a description of the PFN and its attributes. * @return the number of removed entries, either 0 or 1. */ public int delete( String lfn, ReplicaCatalogEntry tuple ) { int result = 0; String query = "[no query]"; // sanity checks if ( lfn == null || tuple == null ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { StringBuffer m = new StringBuffer(256); for ( Iterator i=tuple.getAttributeIterator(); i.hasNext(); ) { String name = (String) i.next(); Object value = tuple.getAttribute(name); m.append( "SELECT id FROM rc_attr WHERE name='"); m.append(quote(name)).append( "' AND value" ); if ( value == null ) m.append( " IS NULL" ); else m.append("='").append(quote(value.toString())).append('\''); if ( i.hasNext() ) m.append( " INTERSECT " ); } query = m.toString(); m = new StringBuffer(256); m.append( "DELETE FROM rc_lfn WHERE lfn='" ).append(quote(lfn)); m.append("' AND pfn='" ).append(quote(tuple.getPFN())); m.append("' AND id=?"); Statement st = mConnection.createStatement(); ResultSet rs = st.executeQuery(query); query = m.toString(); PreparedStatement ps = mConnection.prepareStatement(query); while ( rs.next() ) { ps.setString( 1, rs.getString(1) ); result += ps.executeUpdate(); } ps.close(); rs.close(); st.close(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to tell database " + query + ": " + e.getMessage() ); } // done return result; } /** * Deletes all PFN entries for a given LFN from the replica catalog * where the PFN attribute is found, and matches exactly the object * value. This method may be useful to remove all replica entries that * have a certain MD5 sum associated with them. It may also be harmful * overkill. * * @param lfn is the logical filename to look for. * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * @return the number of removed entries. */ public int delete( String lfn, String name, Object value ) { int result = 0; int which = value == null ? 9 : 8; String query = mCStatements[which]; // sanity checks if ( lfn == null || name == null ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { PreparedStatement ps = getStatement(which); ps.setString( 1, quote(lfn) ); ps.setString( 2, quote(name) ); if ( value != null ) ps.setString( 3, quote(value.toString()) ); result = ps.executeUpdate(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to tell database " + query + ": " + e.getMessage() ); } // done return result; } /** * Deletes all PFN entries for a given LFN from the replica catalog * where the resource handle is found. Karan requested this * convenience method, which can be coded like *

   *  delete( lfn, RESOURCE_HANDLE, handle )
   * 
* * @param lfn is the logical filename to look for. * @param handle is the resource handle * @return the number of entries removed. */ public int deleteByResource( String lfn, String handle ) { return delete( lfn, ReplicaCatalogEntry.RESOURCE_HANDLE, handle ); } /** * Removes all mappings for an LFN from the replica catalog. * * @param lfn is the logical filename to remove all mappings for. * @return the number of removed entries. */ public int remove( String lfn ) { int result = 0; String query = mCStatements[5]; // sanity checks if ( lfn == null ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { PreparedStatement ps = getStatement(5); ps.setString( 1, quote(lfn) ); result = ps.executeUpdate(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to tell database " + query + ": " + e.getMessage() ); } // done return result; } /** * Removes all mappings for a set of LFNs. * * @param lfns is a set of logical filename to remove all mappings for. * @return the number of removed entries. */ public int remove( Set lfns ) { int result = 0; String query = mCStatements[5]; // sanity checks if ( lfns == null || lfns.size() == 0 ) return result; if ( mConnection == null ) throw new RuntimeException( c_error ); try { PreparedStatement ps = getStatement(5); for ( Iterator i = lfns.iterator(); i.hasNext(); ) { ps.setString( 1, quote((String) i.next()) ); result += ps.executeUpdate(); } } catch ( SQLException e ) { throw new RuntimeException( "Unable to tell database " + query + ": " + e.getMessage() ); } // done return result; } /** * Removes all entries from the replica catalog where the PFN attribute * is found, and matches exactly the object value. * * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * @return the number of removed entries. */ public int removeByAttribute( String name, Object value ) { int result = 0; int which = value == null ? 7 : 6; String query = mCStatements[which]; // sanity checks if ( mConnection == null ) throw new RuntimeException( c_error ); try { PreparedStatement ps = getStatement(which); ps.setString( 1, quote(name) ); if ( value != null ) ps.setString( 2, value.toString() ); result = ps.executeUpdate(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to tell database " + query + ": " + e.getMessage() ); } // done return result; } /** * Removes all entries associated with a particular resource handle. * This is useful, if a site goes offline. It is a convenience method, * which calls the generic removeByAttribute method. * * @param handle is the site handle to remove all entries for. * @return the number of removed entries. * @see #removeByAttribute( String, Object ) */ public int removeByAttribute( String handle ) { return removeByAttribute( ReplicaCatalogEntry.RESOURCE_HANDLE, handle ); } /** * Removes everything. Use with caution! * * @return the number of removed entries. */ public int clear() { int result = 0; // sanity checks if ( mConnection == null ) throw new RuntimeException( c_error ); // prepare statement String query = "DELETE FROM lfn_rc"; try { Statement st = mConnection.createStatement(); st.execute(query); result = st.getUpdateCount(); st.close(); } catch ( SQLException e ) { throw new RuntimeException( "Unable to tell database " + query + ": " + e.getMessage() ); } // done return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/replica/impl/SimpleFile.java0000644000175000017500000010251511757531137030164 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.replica.impl; import java.io.*; import java.util.*; import java.util.regex.*; import edu.isi.pegasus.common.util.Boolean; import edu.isi.pegasus.common.util.Escape; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.planner.catalog.Catalog; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; /** * This class implements a replica catalog on top of a simple file which * contains two or more columns. It is neither transactionally safe, nor * advised to use for production purposes in any way. Multiple * concurrent instances will clobber each other!

* * The site attribute should be specified whenever possible. The * attribute key for the site attribute is "pool". For the shell * planner, its value will always be "local".

* * The class is permissive in what inputs it accepts. The LFN may or may * not be quoted. If it contains linear whitespace, quotes, backslash or * an equality sign, it must be quoted and escaped. Ditto for the PFN. * The attribute key-value pairs are separated by an equality sign * without any whitespaces. The value may be in quoted. The LFN * sentiments about quoting apply.

* *

 * LFN PFN
 * LFN PFN a=b [..]
 * LFN PFN a="b" [..]
 * "LFN w/LWS" "PFN w/LWS" [..]
 * 
* * The class is strict when producing (storing) results. The LFN and PFN * are only quoted and escaped, if necessary. The attribute values are * always quoted and escaped. * * @author Jens-S. Vöckler * @version $Revision: 2079 $ */ public class SimpleFile implements ReplicaCatalog { /** * The name of the key that disables writing back to the cache file. * Designates a static file. i.e. read only */ public static final String READ_ONLY_KEY = "read.only"; /** * Records the quoting mode for LFNs and PFNs. If false, only quote as * necessary. If true, always quote all LFNs and PFNs. */ protected boolean m_quote = false; /** * Records the name of the on-disk representation. */ protected String m_filename = null; /** * Maintains a memory slurp of the file representation. */ protected Map m_lfn = null; /** * A boolean indicating whether the catalog is read only or not. */ boolean m_readonly; /** * Default empty constructor creates an object that is not yet connected * to any database. You must use support methods to connect before this * instance becomes usable. * * @see #connect( Properties ) */ public SimpleFile() { // make connection defunc m_lfn = null; m_filename = null; m_readonly = false; } /** * Provides the final states and associated messages. * *
   * ---+----+--------------------
   * F1 | 17 | final state, no record
   * F2 | 16 | final state, valid record
   * E1 | 18 | premature end
   * E2 | 19 | illegal character
   * E3 | 20 | incomplete record
   * E4 | 21 | unterminated string
   * 
*/ private static final String c_final[] = { "OK", "noop", "premature end of record", "illegal character @", "incomplete record", "missing closing quote" }; /** * Contains the state transition tables. The notes a through c mark * similar states: *
   *      | EOS | lws |  =  | ""  | \\  | else|
   * -----+-----+-----+-----+-----+-----+-----+--------------
   *    0 | F1,-|  0,-|  E2 |  3,-|  E2 | 1,Sl| skip initial ws
   * a  1 |  E3 | 2,Fl|  E2 |  E2 |  E2 | 1,Sl| LFN w/o quotes
   *    2 |  E3 |  2,-|  E2 |  6,-|  E2 | 5,Sp| skip ws between LFN and PFN
   * b  3 |  E4 | 3,Sl| 3,Sl| 2,Fl|  4,-| 3,Sl| LFN in quotes
   * c  4 |  E4 | 3,Sl| 3,Sl| 3,Sl| 3,Sl| 3,Sl| LFN backslash escape
   * -----+-----+-----+-----+-----+-----+-----+--------------
   * a  5 |F2,Fp| 8,Fp|  E2 |  E2 |  E2 | 5,Sp| PFN w/o quotes
   * b  6 |  E4 | 6,Sp| 6,Sp| 8,Fp|  7,-| 6,Sp| PFN in quotes
   * c  7 |  E4 | 6,Sp| 6,Sp| 6,Sp| 6,Sp| 6,Sp| PFN backslash escape
   *    8 | F2,-|  8,-|  E2 |  E2 |  E2 | 9,Sk| skip ws before attributes
   *    9 |  E1 |  E2 |10,Fk|  E2 |  E2 | 9,Sk| attribute key
   *   10 |  E1 |  E2 |  E2 | 12,-|  E2 |11,Sv| equals sign
   * -----+-----+-----+-----+-----+-----+-----+--------------
   * a 11 |F2,Fv| 8,Fv|  E2 |  E2 |  E2 |11,Sv| value w/o quotes
   * b 12 |  E4 |12,Sv|12,Sv| 8,Fv| 13,-|12,Sv| value in quotes
   * c 13 |  E4 |12,Sv|12,Sv|12,Sv|12,Sv|12,Sv| value backslash escape
   * 
*/ private static final short c_state[][] = { { 17, 0, 19, 3, 19, 1 }, // 0 { 20, 2, 19, 19, 19, 1 }, // 1 { 20, 2, 19, 6, 19, 5 }, // 2 { 21, 3, 3, 2, 4, 3 }, // 3 { 21, 3, 3, 3, 3, 3 }, // 4 { 16, 8, 19, 19, 19, 5 }, // 5 { 21, 6, 6, 8, 7, 6 }, // 6 { 21, 6, 6, 6, 6, 6 }, // 7 { 16, 8, 19, 19, 19, 9 }, // 8 { 18, 19, 10, 19, 19, 9 }, // 9 { 18, 19, 19, 12, 19, 11 }, // 10 { 16, 8, 19, 19, 19, 11 }, // 11 { 21, 12, 12, 8, 13, 12 }, // 12 { 21, 12, 12, 12, 12, 12 } }; // 13 /** * Contains the actions to perform upon each state transition including * transition into self state. * *
   *    |   |
   * ---+---+-------------------------------------------
   *  - | 0 | no op
   *  S*| 1 | append to sb
   *  Fl| 2 | lfn := sb
   *  Fp| 3 | pfn := sb
   *  Fk| 4 | key := sb
   *  Fv| 5 | value := sb
   * 
*/ private static final short c_action[][] = { { 0, 0, 0, 0, 0, 1 }, // 0 { 0, 2, 0, 0, 0, 1 }, // 1 a { 0, 0, 0, 0, 0, 1 }, // 2 { 0, 1, 1, 2, 0, 1 }, // 3 b { 0, 1, 1, 1, 1, 1 }, // 4 c { 3, 3, 0, 0, 0, 1 }, // 5 a { 0, 1, 1, 3, 0, 1 }, // 6 b { 0, 1, 1, 1, 1, 1 }, // 7 c { 0, 0, 0, 0, 0, 1 }, // 8 { 0, 0, 4, 0, 0, 1 }, // 9 { 0, 0, 0, 0, 0, 1 }, // 10 { 5, 5, 0, 0, 0, 1 }, // 11 a { 0, 1, 1, 5, 0, 1 }, // 12 b { 0, 1, 1, 1, 1, 1 } }; // 13 c /** * Parses a line from the file replica catalog * * @param line is the line to parse * @param lineno is the line number of this line * @return true if a valid element was generated */ public boolean parse( String line, int lineno ) { char ch = ' '; String lfn = null; String pfn = null; String key = null; Map attr = new TreeMap(); short input, state = 0; int i = 0; StringBuffer sb = new StringBuffer(); while ( state < 16 ) { if ( line.length() <= i ) { ch=' '; input=0; } // EOS else switch ( (ch=line.charAt(i)) ) { case ' ': input=1; break; case '\t': input=1; break; case '=': input=2; break; case '"': input=3; break; case '\\': input=4; break; default: input=5; break; } i++; // perform action switch ( c_action[state][input] ) { case 0: // noop break; case 1: // append to sb sb.append(ch); break; case 2: // sb to lfn lfn = sb.toString(); sb = new StringBuffer(); break; case 3: // sb to pfn pfn = sb.toString(); sb = new StringBuffer(); break; case 4: // sb to key key = sb.toString(); sb = new StringBuffer(); break; case 5: // sb to value attr.put( key, sb.toString() ); sb = new StringBuffer(); break; } // goto new state state = c_state[state][input]; } if ( state > 17 ) { // error report sb = new StringBuffer(i+1); for ( int j=1; j * However, if the property "quote" had a true value * when connecting to the database, output will always be quoted. * * @param e is the Escape instance used to escape strings. * @param s is the string that may require quoting * @return either the original string, or a newly allocated instance * to an escaped string. */ public String quote( Escape e, String s ) { String result = null; if ( s == null || s.length() == 0 ) { // empty string short-cut result = ( m_quote ? "\"\"" : s ); } else { // string has content boolean flag = m_quote; for ( int i=0; ialways
* quoted and thus quote-escaped. */ public void close() { String newline = System.getProperty("line.separator", "\r\n"); Escape e = new Escape( "\"\\", '\\' ); // sanity check if ( m_lfn == null ) return; //check if the file is writeable or not if( m_readonly ){ m_lfn.clear(); m_lfn = null; m_filename = null; return; } try { // open Writer out = new BufferedWriter(new FileWriter(m_filename)); // write header out.write( "# file-based replica catalog: " + Currently.iso8601(false,true,true,new Date()) ); out.write( newline ); // write data for ( Iterator i=m_lfn.keySet().iterator(); i.hasNext(); ) { String lfn = (String) i.next(); Collection c = (Collection) m_lfn.get(lfn); if ( c != null ) { for ( Iterator j=c.iterator(); j.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) j.next(); out.write( quote(e,lfn) ); out.write( ' ' ); out.write( quote(e,rce.getPFN()) ); for ( Iterator k=rce.getAttributeIterator(); k.hasNext(); ) { String key = (String) k.next(); String value = (String) rce.getAttribute(key); out.write( ' ' ); out.write( key ); out.write( "=\"" ); out.write( e.escape(value) ); out.write( '"' ); } // finalize record/line out.write( newline ); } } } // close out.close(); } catch ( IOException ioe ) { // FIXME: blurt message somewhere sane System.err.println( ioe.getMessage() ); } finally { m_lfn.clear(); m_lfn = null; m_filename = null; } } /** * Predicate to check, if the connection with the catalog's * implementation is still active. This helps determining, if it makes * sense to call close(). * * @return true, if the implementation is disassociated, false otherwise. * @see #close() */ public boolean isClosed() { return ( m_lfn == null ); } /** * Retrieves the entry for a given filename and site handle from the * replica catalog. * * @param lfn is the logical filename to obtain information for. * @param handle is the resource handle to obtain entries for. * @return the (first) matching physical filename, or * null if no match was found. */ public String lookup( String lfn, String handle ) { Collection c = (Collection) m_lfn.get(lfn); if ( c == null ) return null; for ( Iterator i=c.iterator(); i.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) i.next(); String pool = rce.getResourceHandle(); if ( pool == null && handle == null || pool != null && handle != null && pool.equals(handle) ) return rce.getPFN(); } return null; } /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is a tuple of a PFN and all its * attributes. * * @param lfn is the logical filename to obtain information for. * @return a collection of replica catalog entries * @see ReplicaCatalogEntry */ public Collection lookup( String lfn ) { Collection c = (Collection) m_lfn.get(lfn); if ( c == null ) return new ArrayList(); else return new ArrayList(c); } /** * Retrieves all entries for a given LFN from the replica catalog. * Each entry in the result set is just a PFN string. Duplicates * are reduced through the set paradigm. * * @param lfn is the logical filename to obtain information for. * @return a set of PFN strings */ public Set lookupNoAttributes( String lfn ) { Set result = new TreeSet(); Collection c = (Collection) m_lfn.get(lfn); if ( c != null ) { for ( Iterator i=c.iterator(); i.hasNext(); ) { result.add( ((ReplicaCatalogEntry) i.next()).getPFN() ); } } // done return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in an online display or portal. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries for the LFN. * @see org.griphyn.common.catalog.ReplicaCatalogEntry */ public Map lookup( Set lfns ) { Map result = new HashMap(); if ( lfns == null || lfns.size() == 0 ) return result; for ( Iterator i = lfns.iterator(); i.hasNext(); ) { String lfn = (String) i.next(); Collection c = (Collection) m_lfn.get(lfn); if ( c == null ) result.put( lfn, new ArrayList() ); else result.put( lfn, new ArrayList(c) ); } // done return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in an online display or portal. * * @param lfns is a set of logical filename strings to look up. * @return a map indexed by the LFN. Each value is a set * of PFN strings. */ public Map lookupNoAttributes( Set lfns ) { Map result = new HashMap(); if ( lfns == null || lfns.size() == 0 ) return result; for ( Iterator i = lfns.iterator(); i.hasNext(); ) { Set value = new TreeSet(); String lfn = (String) i.next(); Collection c = (Collection) m_lfn.get(lfn); if ( c != null ) { for ( Iterator j=c.iterator(); j.hasNext(); ) { value.add( ((ReplicaCatalogEntry) j.next()).getPFN() ); } } result.put( lfn, value ); } // done return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries (all attributes). * @see ReplicaCatalogEntry */ public Map lookup( Set lfns, String handle ) { Map result = new HashMap(); if ( lfns == null || lfns.size() == 0 ) return result; for ( Iterator i = lfns.iterator(); i.hasNext(); ) { String lfn = (String) i.next(); Collection c = (Collection) m_lfn.get(lfn); if ( c != null ) { List value = new ArrayList(); for ( Iterator j=c.iterator(); j.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) j.next(); String pool = rce.getResourceHandle(); if ( pool == null && handle == null || pool != null && handle != null && pool.equals(handle) ) value.add( rce ); } // only put found LFNs into result result.put( lfn, value ); } } // done return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal.

* * @param lfns is a set of logical filename strings to look up. * @param handle is the resource handle, restricting the LFNs. * @return a map indexed by the LFN. Each value is a set of * physical filenames. */ public Map lookupNoAttributes( Set lfns, String handle ) { Map result = new HashMap(); if ( lfns == null || lfns.size() == 0 ) return result; for ( Iterator i = lfns.iterator(); i.hasNext(); ) { String lfn = (String) i.next(); Collection c = (Collection) m_lfn.get(lfn); if ( c != null ) { List value = new ArrayList(); for ( Iterator j=c.iterator(); j.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) j.next(); String pool = rce.getResourceHandle(); if ( pool == null && handle == null || pool != null && handle != null && pool.equals(handle) ) value.add( rce.getPFN() ); } // only put found LFNs into result result.put( lfn, value ); } } // done return result; } /** * Retrieves multiple entries for a given logical filename, up to the * complete catalog. Retrieving full catalogs should be harmful, but * may be helpful in online display or portal. * * @param constraints is mapping of keys 'lfn', 'pfn', or any * attribute name, e.g. the resource handle 'pool', to a string that * has some meaning to the implementing system. This can be a SQL * wildcard for queries, or a regular expression for Java-based memory * collections. Unknown keys are ignored. Using an empty map requests * the complete catalog. * @return a map indexed by the LFN. Each value is a collection * of replica catalog entries. * @see ReplicaCatalogEntry */ public Map lookup( Map constraints ) { if ( constraints == null || constraints.size() == 0 ) { // return everything return Collections.unmodifiableMap(m_lfn); } else if ( constraints.size() == 1 && constraints.containsKey("lfn") ) { // return matching LFNs Pattern p = Pattern.compile( (String) constraints.get("lfn") ); Map result = new HashMap(); for ( Iterator i=m_lfn.entrySet().iterator(); i.hasNext(); ) { Map.Entry e = (Map.Entry) i.next(); String lfn = (String) e.getKey(); if ( p.matcher(lfn).matches() ) result.put( lfn, e.getValue() ); } return result; } else { // FIXME: Implement! throw new RuntimeException( "method not implemented" ); } } /** * Lists all logical filenames in the catalog. * * @return A set of all logical filenames known to the catalog. */ public Set list() { return new TreeSet( m_lfn.keySet() ); } /** * Lists a subset of all logical filenames in the catalog. * * @param constraint is a constraint for the logical filename only. It * is a string that has some meaning to the implementing system. This * can be a SQL wildcard for queries, or a regular expression for * Java-based memory collections. * @return A set of logical filenames that match. The set may be empty */ public Set list( String constraint ) { Set result = new TreeSet(); Pattern p = Pattern.compile(constraint); for ( Iterator i=m_lfn.keySet().iterator(); i.hasNext(); ) { String lfn = (String) i.next(); if ( p.matcher(lfn).matches() ) result.add(lfn); } // done return result; } /** * Inserts a new mapping into the replica catalog. Any existing * mapping of the same LFN and PFN will be replaced, including all its * attributes. * * @param lfn is the logical filename under which to book the entry. * @param tuple is the physical filename and associated PFN attributes. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. */ public int insert( String lfn, ReplicaCatalogEntry tuple ) { if ( lfn == null || tuple == null ) throw new NullPointerException(); Collection c = null; if ( m_lfn.containsKey(lfn) ) { boolean seen = false; String pfn = tuple.getPFN(); c = (Collection) m_lfn.get(lfn); for ( Iterator i=c.iterator(); i.hasNext() && ! seen; ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) i.next(); if ( (seen = pfn.equals(rce.getPFN())) ) { try { i.remove(); } catch ( UnsupportedOperationException uoe ) { return 0; } } } } else { c = new ArrayList(); m_lfn.put( lfn, c ); } c.add(tuple); return 1; } /** * Inserts a new mapping into the replica catalog. This is a * convenience function exposing the resource handle. Internally, the * ReplicaCatalogEntry element will be contructed, and * passed to the appropriate insert function. * * @param lfn is the logical filename under which to book the entry. * @param pfn is the physical filename associated with it. * @param handle is a resource handle where the PFN resides. * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * @see #insert( String, ReplicaCatalogEntry ) * @see ReplicaCatalogEntry */ public int insert( String lfn, String pfn, String handle ) { if ( lfn == null || pfn == null || handle == null ) throw new NullPointerException(); return insert( lfn, new ReplicaCatalogEntry(pfn,handle) ); } /** * Inserts multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. Note that this operation will replace * existing entries. * * @param x is a map from logical filename string to list of replica * catalog entries. * @return the number of insertions. * @see org.griphyn.common.catalog.ReplicaCatalogEntry */ public int insert( Map x ) { int result = 0; // shortcut sanity if ( x == null || x.size() == 0 ) return result; for ( Iterator i=x.keySet().iterator(); i.hasNext(); ) { String lfn = (String) i.next(); Object val = x.get(lfn); if ( val instanceof ReplicaCatalogEntry ) { // permit misconfigured clients result += insert( lfn, (ReplicaCatalogEntry) val ); } else { // this is how it should have been for ( Iterator j=((Collection) val).iterator(); j.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) j.next(); result += insert( lfn, rce ); } } } return result; } /** * Deletes a specific mapping from the replica catalog. We don't care * about the resource handle. More than one entry could theoretically * be removed. Upon removal of an entry, all attributes associated * with the PFN also evaporate (cascading deletion). * * @param lfn is the logical filename in the tuple. * @param pfn is the physical filename in the tuple. * @return the number of removed entries. */ public int delete( String lfn, String pfn ) { int result = 0; if ( lfn == null || pfn == null ) return result; Collection c = (Collection) m_lfn.get(lfn); if ( c == null ) return result; List l = new ArrayList(); for ( Iterator i=c.iterator(); i.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) i.next(); if ( ! rce.getPFN().equals(pfn) ) l.add(rce); } // anything removed? if ( l.size() != c.size() ) { result = c.size() - l.size(); m_lfn.put( lfn, l ); } // done return result; } /** * Deletes multiple mappings into the replica catalog. The input is a * map indexed by the LFN. The value for each LFN key is a collection * of replica catalog entries. On setting matchAttributes to false, all entries * having matching lfn pfn mapping to an entry in the Map are deleted. * However, upon removal of an entry, all attributes associated with the pfn * also evaporate (cascaded deletion). * * @param x is a map from logical filename string to list of * replica catalog entries. * @param matchAttributes whether mapping should be deleted only if all * attributes match. * * @return the number of deletions. * @see ReplicaCatalogEntry */ public int delete( Map x , boolean matchAttributes){ throw new java.lang.UnsupportedOperationException ("delete(Map,boolean) not implemented as yet"); } /** * Attempts to see, if all keys in the partial replica catalog entry are * contained in the full replica catalog entry. * * @param full is the full entry to check against. * @param part is the partial entry to check with. * @return true, if contained, false if not contained. */ private boolean matchMe( ReplicaCatalogEntry full, ReplicaCatalogEntry part ) { if ( full.getPFN().equals( part.getPFN() ) ) { for ( Iterator i=part.getAttributeIterator(); i.hasNext(); ) { if ( ! full.hasAttribute((String) i.next()) ) return false; } return true; } else { return false; } } /** * Deletes a very specific mapping from the replica catalog. The LFN * must be matches, the PFN, and all PFN attributes specified in the * replica catalog entry. More than one entry could theoretically be * removed. Upon removal of an entry, all attributes associated with * the PFN also evaporate (cascading deletion). * * @param lfn is the logical filename in the tuple. * @param tuple is a description of the PFN and its attributes. * @return the number of removed entries, either 0 or 1. */ public int delete( String lfn, ReplicaCatalogEntry tuple ) { int result = 0; if ( lfn == null || tuple == null ) return result; Collection c = (Collection) m_lfn.get(lfn); if ( c == null ) return result; List l = new ArrayList(); for ( Iterator i=c.iterator(); i.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) i.next(); if ( ! matchMe( rce, tuple ) ) l.add(rce); } // anything removed? if ( l.size() != c.size() ) { result = c.size() - l.size(); m_lfn.put( lfn, l ); } // done return result; } /** * Looks for a match of an attribute value in a replica catalog * entry. * * @param rce is the replica catalog entry * @param name is the attribute key to match * @param value is the value to match against * @return true, if a match was found. */ private boolean hasMatchingAttr( ReplicaCatalogEntry rce, String name, Object value ) { if ( rce.hasAttribute(name) ) return rce.getAttribute(name).equals(value); else return value==null; } /** * Deletes all PFN entries for a given LFN from the replica catalog * where the PFN attribute is found, and matches exactly the object * value. This method may be useful to remove all replica entries that * have a certain MD5 sum associated with them. It may also be harmful * overkill. * * @param lfn is the logical filename to look for. * @param name is the PFN attribute name to look for. * @param value is an exact match of the attribute value to match. * @return the number of removed entries. */ public int delete( String lfn, String name, Object value ) { int result = 0; if ( lfn == null || name == null ) return result; Collection c = (Collection) m_lfn.get(lfn); if ( c == null ) return result; List l = new ArrayList(); for ( Iterator i=c.iterator(); i.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) i.next(); if ( ! hasMatchingAttr(rce,name,value) ) l.add(rce); } // anything removed? if ( l.size() != c.size() ) { result = c.size() - l.size(); m_lfn.put( lfn, l ); } // done return result; } /** * Deletes all PFN entries for a given LFN from the replica catalog * where the resource handle is found. Karan requested this * convenience method, which can be coded like *

   *  delete( lfn, RESOURCE_HANDLE, handle )
   * 
* * @param lfn is the logical filename to look for. * @param handle is the resource handle * @return the number of entries removed. */ public int deleteByResource( String lfn, String handle ) { return delete( lfn, ReplicaCatalogEntry.RESOURCE_HANDLE, handle ); } /** * Removes all mappings for an LFN from the replica catalog. * * @param lfn is the logical filename to remove all mappings for. * @return the number of removed entries. */ public int remove( String lfn ) { Collection c = (Collection) m_lfn.remove(lfn); if ( c == null ) return 0; else return c.size(); } /** * Removes all mappings for a set of LFNs. * * @param lfns is a set of logical filename to remove all mappings for. * @return the number of removed entries. * @see #remove( String ) */ public int remove( Set lfns ) { int result = 0; // sanity checks if ( lfns == null || lfns.size() == 0 ) return result; for ( Iterator i = lfns.iterator(); i.hasNext(); ) { String lfn = (String) i.next(); result += remove(lfn); } // done return result; } /** * Removes all entries from the replica catalog where the PFN attribute * is found, and matches exactly the object value. * * @param name is the PFN attribute key to look for. * @param value is an exact match of the attribute value to match. * @return the number of removed entries. */ public int removeByAttribute( String name, Object value ) { int result = 0; for ( Iterator i=m_lfn.keySet().iterator(); i.hasNext(); ) { String lfn = (String) i.next(); Collection c = (Collection) m_lfn.get(lfn); if ( c != null ) { List l = new ArrayList(); for ( Iterator j=c.iterator(); j.hasNext(); ) { ReplicaCatalogEntry rce = (ReplicaCatalogEntry) j.next(); if ( ! hasMatchingAttr(rce,name,value) ) l.add(rce); } if ( l.size() != c.size() ) { result += ( c.size() - l.size() ); m_lfn.put( lfn, l ); } } } // done return result; } /** * Removes all entries associated with a particular resource handle. * This is useful, if a site goes offline. It is a convenience method, * which calls the generic removeByAttribute method. * * @param handle is the site handle to remove all entries for. * @return the number of removed entries. * @see #removeByAttribute( String, Object ) */ public int removeByAttribute( String handle ) { return removeByAttribute( ReplicaCatalogEntry.RESOURCE_HANDLE, handle ); } /** * Removes everything. Use with caution! * * @return the number of removed entries. */ public int clear() { int result = m_lfn.size(); m_lfn.clear(); return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/CatalogException.java0000644000175000017500000001010111757531137026771 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog; import java.util.List; import java.util.ArrayList; /** * Class to notify of failures. Exceptions are chained like the * {@link java.sql.SQLException} interface.

* * Here is a fragment of code to chain exceptions for later throwing:

* *

 * CatalogException rce = null;
 * ... some loop code ... {
 *   ...
 *   if ( exception triggered ) {
 *     if ( rce == null ) rce = new CatalogException( reason );
 *     else rce.setNextException( new CatalogException(reason) );
 *   ...
 * } ... loop end ...
 * if ( rce != null ) throw rce;
 * 
* * Here is a fragment of code to unchain exceptions in the client:

* *

 * try {
 *   ... operation ...
 * } catch ( CatalogException rce ) {
 *   for ( ; rce != null; rce = rce.getNextException ) {
 *      ... do something with the payload ...
 *   }
 * }
 * 
* * @author Karan Vahi * @author Jens-S. Vöckler */ public class CatalogException // method A: no need to change interface, obsfuscated use, though extends java.lang.RuntimeException // method B: needs API small change, but makes things clear. // extends java.lang.Exception { /** * chains the next exception into line. */ private CatalogException m_next_exception = null; /* * Constructs a CatalogException with no detail * message. */ public CatalogException() { super(); m_next_exception = null; } /** * Constructs a CatalogException with the * specified detailed message. * * @param s is the detailled message. */ public CatalogException( String s ) { super(s); m_next_exception = null; } /** * Constructs a CatalogException with the * specified detailed message and a cause. * * @param s is the detailled message. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CatalogException( String s, Throwable cause ) { super(s,cause); m_next_exception = null; } /** * Constructs a CatalogException with the * specified just a cause. * * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CatalogException( Throwable cause ) { super(cause); m_next_exception = null; } /** * Retrieves the exception chained to this * CatalogException object. * * @return the next CatalogException object in the chain; * null if there are none. * @see #setNextException( CatalogException ) */ public CatalogException getNextException() { return m_next_exception; } /** * Adds an CatalogException object to the end of * the chain. * * @param ex the new exception that will be added to the end of the * CatalogException chain. * @see #getNextException() */ public void setNextException( CatalogException ex ) { if ( m_next_exception == null ) { m_next_exception = ex; } else { CatalogException temp, rce = m_next_exception; while ( (temp = rce.getNextException()) != null ) { rce = temp; } rce.setNextException(ex); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/WorkCatalog.java0000644000175000017500000000563511757531137025775 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog; import edu.isi.pegasus.planner.catalog.work.WorkCatalogException; /** * * The catalog interface to the Work Catalog, the erstwhile Work DB, that is * populated by tailstatd and associates. * * @author Karan Vahi * @version $Revision$ */ public interface WorkCatalog extends Catalog { /** * Prefix for the property subset to use with this catalog. */ public static final String c_prefix = "pegasus.catalog.work"; /** * The DB Driver properties prefix. */ public static final String DB_PREFIX = "pegasus.catalog.work.db"; /** * The version of the API */ public static final String VERSION = "1.0"; /** * Inserts a new mapping into the work catalog. * * @param basedir the base directory * @param vogroup the vo to which the user belongs to. * @param label the label in the DAX * @param run the run number. * @param creator the user who is running. * @param cTime the creation time of the DAX * @param mTime the modification time. * @param state the state of the workflow * * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @throws WorkCatalogException in case of unable to delete entry. */ public int insert(String basedir, String vogroup, String label, String run, String creator, java.util.Date cTime, java.util.Date mTime, int state) throws WorkCatalogException ; /** * Deletes a mapping from the work catalog. * * @param basedir the base directory * @param vogroup the vo to which the user belongs to. * @param label the label in the DAX * @param run the run number. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * @throws WorkCatalogException in case of unable to delete entry. */ public int delete(String basedir, String vogroup, String label, String run ) throws WorkCatalogException; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/0000755000175000017500000000000011757531667025762 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/classes/0000755000175000017500000000000011757531667027417 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/classes/TCType.java0000644000175000017500000000163111757531137031423 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.classes; /** * This is an enumerated data class for the different types of transformation. * * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 2636 $ */ public enum TCType { STATIC_BINARY, INSTALLED, STAGEABLE } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/classes/Arch.java0000644000175000017500000000631111757531137031130 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.classes; /** * This is an enumerated data class for the different types of architecture. * * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 2076 $ */ import java.io.Serializable; import java.util.HashMap; public class Arch implements Serializable { private String _value_; private static HashMap _table_ = new HashMap(5); protected Arch(String value) { _value_ = value; _table_.put(_value_, this); } private static final String _INTEL32 = "INTEL32"; private static final String _INTEL64 = "INTEL64"; private static final String _SPARCV7 = "SPARCV7"; private static final String _SPARCV9 = "SPARCV9"; private static final String _AMD64 = "AMD64"; public static final Arch INTEL32 = new Arch(_INTEL32); public static final Arch INTEL64 = new Arch(_INTEL64); public static final Arch SPARCV7 = new Arch(_SPARCV7); public static final Arch SPARCV9 = new Arch(_SPARCV9); public static final Arch AMD64 = new Arch(_AMD64); public static final String err = "Error: Illegal Architecture defined. Please specify one of the predefined types \n [INTEL32, INTEL64, AMD64, SPARCV7, SPARCV9]"; /** * Returns the value of the architecture as string. * @return String */ public String getValue() { return _value_; } /** * Creates a new Arch Object givan a arch string. * @param value String * @throws IllegalStateException Throws Exception if the architecure is not defined in this class. * @return Arch */ public static Arch fromValue(String value) throws IllegalStateException { Arch m_enum = (Arch) _table_.get(value.toUpperCase()); if (m_enum == null) { throw new IllegalStateException(err); } return m_enum; } /** * Creates a new Arch object given a arch string. * @param value String * @throws IllegalStateException Throws Exception if the architecure is not defined in this class. * @return Arch */ public static Arch fromString(String value) throws IllegalStateException { return fromValue(value); } /** * Compares if a given Arch object is equal to this. * @param obj Object * @return boolean */ public boolean equals(Object obj) { return (obj == this); } public int hashCode() { return toString().hashCode(); } /** * Returns the string value of the architecture. * @return String */ public String toString() { return _value_; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/classes/Os.java0000644000175000017500000000604311757531137030636 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.classes; /** * This is an enumerated data class for the different types of operating systems. * * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 2076 $ */ import java.io.Serializable; import java.util.HashMap; public class Os implements Serializable { private String _value_; private static HashMap _table_ = new HashMap(5); protected Os(String value) { _value_ = value; _table_.put(_value_, this); } private static final String _LINUX = "LINUX"; private static final String _SUNOS = "SUNOS"; private static final String _AIX = "AIX"; private static final String _WINDOWS = "WINDOWS"; public static final Os LINUX = new Os(_LINUX); public static final Os SUNOS = new Os(_SUNOS); public static final Os AIX = new Os(_AIX); public static final Os WINDOWS = new Os(_WINDOWS); public static final String err = "Error: Illegal Operating System defined. Please specify one of the predefined types \n [LINUX, SUNOS, AIX, WINDOWS]"; /** * Returns the value of the operating system as string. * @return String */ public String getValue() { return _value_; } /** * Creates a new Os object given an os string. * @param value String * @throws IllegalStateException Throws Exception if the operating system is not defined in this class. * @return Os */ public static Os fromValue(String value) throws IllegalStateException { Os m_enum = (Os) _table_.get(value.toUpperCase()); if (m_enum == null) { throw new IllegalStateException(err); } return m_enum; } /** * Creates a new Os object given an os string. * @param value String * @throws IllegalStateException Throws Exception if the operating system is not defined in this class. * @return Os */ public static Os fromString(String value) throws IllegalStateException { return fromValue(value); } /** * Compares if a given Os object is equal to this. * @param obj Object * @return boolean */ public boolean equals(Object obj) { return (obj == this); } public int hashCode() { return toString().hashCode(); } /** * Returns the string value of the operating system. * @return String */ public String toString() { return _value_; } } ././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/classes/NMI2VDSSysInfo.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/classes/NMI2VDSSysInfo.jav0000644000175000017500000001400311757531137032504 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.classes; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import java.util.HashMap; import java.util.Map; /** * An Adapter class that translates the new NMI based Architecture and OS * specifications to VDS ( VDS era ) Arch and Os objects * * @author Karan Vahi * @version $Revision: 2511 $ */ public class NMI2VDSSysInfo { /** * The map storing architecture to corresponding NMI architecture platforms. */ private static Map< SysInfo.Architecture,Arch > mNMIArchToVDSArchMap = null; /** * The separator used to combine OS version and release. */ public static final String OS_COMBINE_SEPARATOR = "_"; /** * Singleton access to the NMI arch to VDS arch map. * * @return Map mapping NMI Architecture to VDS Arch object. */ public static Map NMIArchToVDSArchMap(){ //singleton access if( mNMIArchToVDSArchMap == null ){ mNMIArchToVDSArchMap = new HashMap< SysInfo.Architecture,Arch >(); mNMIArchToVDSArchMap.put( SysInfo.Architecture.x86, Arch.INTEL32 ); mNMIArchToVDSArchMap.put( SysInfo.Architecture.x86_64, Arch.INTEL64 ); mNMIArchToVDSArchMap.put( SysInfo.Architecture.amd64, Arch.AMD64 ); //mNMIArchToVDSArch.put( Architecture.x86_64, Arch.AMD64 ); //VDS arch INTEL64 actually meant IA64 mNMIArchToVDSArchMap.put( SysInfo.Architecture.ia64, Arch.INTEL64 ); mNMIArchToVDSArchMap.put( SysInfo.Architecture.sparcv7, Arch.SPARCV7 ); mNMIArchToVDSArchMap.put( SysInfo.Architecture.sparcv9, Arch.SPARCV9 ); } return mNMIArchToVDSArchMap; } /** * The map storing OS to corresponding NMI OS platforms. */ private static Map mNMIOSToVDSOSMap = null; /** * Singleton access to the os to NMI os map. * * * @return Map mapping NMI OS to VDS Os object. */ public static Map NMIOSToVDSOSMap(){ //singleton access if( mNMIOSToVDSOSMap == null ){ mNMIOSToVDSOSMap = new HashMap(); //mNMIOSToVDSOS.put( "rhas_3", Os.LINUX ); mNMIOSToVDSOSMap.put( SysInfo.OS.LINUX, Os.LINUX ); mNMIOSToVDSOSMap.put( SysInfo.OS.WINDOWS, Os.WINDOWS ); mNMIOSToVDSOSMap.put( SysInfo.OS.AIX, Os.AIX ); mNMIOSToVDSOSMap.put( SysInfo.OS.SUNOS, Os.SUNOS ); } return mNMIOSToVDSOSMap; } /** * Returns the VDSSysInfo object. * * @param sysinfo the sysinfo object * * @return VDSSysInfo object */ public static VDSSysInfo nmiToVDSSysInfo(SysInfo sysinfo) { VDSSysInfo result = new VDSSysInfo(); result.setArch( nmiArchToVDSArch( sysinfo.getArchitecture() ) ); result.setOs( nmiOSToVDSOS( sysinfo.getOS() ) ); result.setGlibc( sysinfo.getGlibc() ); //in VDS days os version was release and version. StringBuffer osVersion = new StringBuffer(); String rel = sysinfo.getOSRelease(); if( rel != null && rel.length() != 0 ){ osVersion.append( rel ); String ver = sysinfo.getOSVersion(); if( ver != null && ver.length() != 0 ){ //combine version and release osVersion.append( NMI2VDSSysInfo.OS_COMBINE_SEPARATOR ); osVersion.append( ver ); } } result.setOsversion( osVersion.toString() ); return result; } /** * Returns the VDS VDSSysInfo object corresponding to the NMI arch and OS * * @param arch architecture in the new NMI format * @param os the os in NMI format * @param glibc the glibc version * * @return the VDSSysInfo object */ public static VDSSysInfo nmiToVDSSysInfo( SysInfo.Architecture arch, SysInfo.OS os, String glibc ){ VDSSysInfo result = new VDSSysInfo(); result.setArch( nmiArchToVDSArch(arch) ); result.setOs( nmiOSToVDSOS( os ) ); result.setGlibc(glibc); return result; } /** * Returns the the VDS Arch object corresponding to the new * NMI Architecture object . * * @param arch architecture in the new NMI format. * * @return Arch */ public static Arch nmiArchToVDSArch( SysInfo.Architecture arch ){ return NMIArchToVDSArchMap().get( arch ); } /** * Returns the VDS Arch object corresponding to the new * NMI Architecture object . * * @param arch architecture in the new NMI format. * * @return Arch */ public static Arch nmiArchToVDSArch( String arch ){ return NMIArchToVDSArchMap().get( SysInfo.Architecture.valueOf(arch) ); } /** * Returns the VDS Os object corresponding to the new * NMI OS object . * * @param os the os in the new NMI format. * * @return the VDS description of OS */ public static Os nmiOSToVDSOS( SysInfo.OS os ){ return NMIOSToVDSOSMap().get( os ); } /** * Returns the VDS Os object corresponding to the new * NMI OS object . * * @param os the os in the new NMI format. * * @return the VDS description of OS */ public static Os nmiOSToVDSOS( String os ){ return NMIOSToVDSOSMap().get( SysInfo.OS.valueOf(os) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/classes/VDSSysInfo.java0000644000175000017500000001416311757531137032226 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.classes; /** * This class keeps the system information associated with a * resource or transformation. * * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 3065 $ */ public class VDSSysInfo{ /** * Architecture of the system. */ private Arch arch; /** * Os of the system. */ private Os os; /** * Os version of the system. */ private String osversion; /** * Glibc version of the system */ private String glibc; /** * The secondary convenience constructor. * @param arch Arch The architecture of the system. * @param os Os The os of the system. * @param osversion String The os version of the system. * @param glibc String The glibc version of the system. * @see Arch * @see Os */ public VDSSysInfo(Arch arch, Os os, String osversion, String glibc) { this.arch = (arch == null) ? Arch.INTEL32 : arch; this.os = (os == null) ? Os.LINUX : os; this.osversion = (osversion == null || osversion.equals("") ) ? null: osversion; this.glibc = (glibc == null || glibc.equals(""))? null: glibc; } /** * Another convenience constructor that uses all entries as strings. * @param arch String * @param os String * @param glibc String */ public VDSSysInfo(String arch, String os, String glibc) { this( arch, os, null, glibc ); } /** * Another convenience constructor that uses all entries as strings. * @param arch String * @param os String * @param osversion String * @param glibc String */ public VDSSysInfo(String arch, String os, String osversion, String glibc) { this.arch = (arch == null) ? Arch.INTEL32 : Arch.fromString(arch); this.os = (os == null) ? Os.LINUX : Os.fromString(os); this.osversion = (osversion == null || osversion.equals("") ) ? null: osversion; this.glibc = (glibc == null || glibc.equals(""))? null: glibc; } public VDSSysInfo(String system) { if (system != null) { String s1[] = system.split("::", 2); if (s1.length == 2) { arch = Arch.fromString(s1[0]); String s2[] = s1[1].split(":", 3); os = Os.fromString(s2[0]); for (int i = 1; i < s2.length; i++) { if (i == 1) { osversion = s2[i]; } if (i == 2) { glibc = s2[i]; } } } else { throw new IllegalStateException( "Error : Please check your system info string"); } } else { this.arch = Arch.INTEL32; this.os = Os.LINUX; } } /** * The default constructor. * Sets the sysinfo to INTEL32::LINUX */ public VDSSysInfo() { this.arch=Arch.INTEL32; this.os=Os.LINUX; } /** * Sets the architecture of the system. * @param arch Arch * @see Arch */ public void setArch(Arch arch) { this.arch = (arch == null) ? Arch.INTEL32 : arch; } /** * Sets the Os of the sytem. * @param os Os * @see Os */ public void setOs(Os os) { this.os = (os == null) ? Os.LINUX : os; } /** * Sets the Os version of the system. * @param osversion String */ public void setOsversion(String osversion) { this.osversion = osversion; } /** * Sets the glibc version of the system * @param glibc String */ public void setGlibc(String glibc) { this.glibc = glibc; } /** * Returns the architecture of the sytem. * @return Arch * @see Arch */ public Arch getArch() { return arch; } /** * Returns the os type of the system. * @return Os * @see Os */ public Os getOs() { return os; } /** * Returns the os version of the system. * @return String */ public String getOsversion() { return osversion; } /** * Retuns the glibc version of the system. * @return String */ public String getGlibc() { return glibc; } /** * Return a copy of this Sysinfo object * @return Object */ public Object clone() { return new VDSSysInfo(arch, os, osversion, glibc); } /** * Check if the system information matches. * @param obj to be compared. * @return boolean */ public boolean equals(Object obj) { boolean result = false; if(obj instanceof VDSSysInfo){ VDSSysInfo sysinfo = (VDSSysInfo)obj; result = (arch.equals(sysinfo.getArch()) && os.equals(sysinfo.getOs())); } return result; } /** * Returns the output of the data class as string. * @return String */ public String toString() { StringBuffer s = new StringBuffer(); s.append(arch + "::" + os); if (osversion != null && !osversion.isEmpty()) { s.append(":" + osversion); } if (glibc != null && !glibc.isEmpty()) { s.append(":" + glibc); } return s.toString(); } } ././@LongLink0000000000000000000000000000015300000000000011564 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/classes/TransformationStore.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/classes/TransformationStor0000644000175000017500000002530311757531137033213 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.classes; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.TreeMap; /** * * A container data class that is used to store transformations. The transformation * are stored internally indexed by transformation name. * * @author Karan Vahi * @version $Revision: 2672 $ */ public class TransformationStore { /** * The internal store map. The Map is indexed by transformation names. * The corresponding value is a Map that contains entries for all sites for * a particular transformation . This map is indexed by site name and * corresponding values are Lists of TransformationCatalogEntry objects. * */ private Map>> mTCStore; /** * The default constructor. */ public TransformationStore(){ initialize(); } /** * Intializes the store. */ private void initialize(){ mTCStore = new TreeMap>>(); } /** * Clears all the entries in the store. */ public void clear(){ //more efficient to create a new object rather than relying on //the underlying map clear method initialize(); } /** * Adds an entry into the store. If the entry already exists i.e entry * for a site and corresponding PFN exists it's overriden. * * @param entry the transformation catalog object. */ public void addEntry( TransformationCatalogEntry entry ){ String completeName = entry.getLogicalTransformation(); if( this.containsTransformation( completeName )){ //retrieve the associated map Map> m = mTCStore.get( completeName ); //check if the transformation is defined for a particular site if( m.containsKey( entry.getResourceId() ) ){ //add an existing entry List l = m.get( entry.getResourceId() ); boolean existing = false; for( TransformationCatalogEntry e : l ){ if( e.getPhysicalTransformation().equals( entry.getPhysicalTransformation() ) ){ //lets overwrite the entry and break out l.remove( e ); l.add( entry ); existing = true; break; } } if ( !existing ){ //an entry with the same pfn does not exist for the site l.add( entry ); } } else{ //no entries for the transformation at the site entry.getResourceId() List l = new LinkedList(); l.add( entry ); m.put( entry.getResourceId(), l ); } } else{ Map> m = new HashMap(); List l = new LinkedList(); l.add( entry ); m.put( entry.getResourceId(), l ); mTCStore.put(completeName, m ); } } /** * Returns List of TransformationCatalogEntry objects for a transformation * on a particular site and a type. If the site parameter passed is null, then all * entries are returned corresponding to a tranformation. If type is null, * then all entries associated with a site are returned. * * @param completeName the complete name of the transformation * @param site the site on which to search for entries. null means all * @param type the type to match on . null means all types. * * @return List if entries are found , else empty list. */ public List getEntries( String completeName, String site, TCType type ){ //retrieve all entries for a site List result = null; //check whether we need to filter on type ? if( type == null ){ result = this.getEntries( completeName, site ); } else{ result = new LinkedList(); for( TransformationCatalogEntry entry : this.getEntries(completeName, site) ){ if( entry.getType().equals( type ) ){ result.add( entry ); } } } return result; } /** * Returns List of TransformationCatalogEntry objects for a transformation * on a particular site. If the site parameter passed is null, then all * entries are returned corresponding to a tranformation. * * @param completeName the complete name of the transformation * @param site the site on which to search for entries. null means all sites * * @return List if entries are found , else empty list. */ public List getEntries( String completeName, String site){ List result = new LinkedList(); if( this.containsTransformation( completeName )){ Map> m = mTCStore.get( completeName ); if( site == null ){ //return all entries for( Map.Entry> entry :m.entrySet() ){ result.addAll( entry.getValue() ); } } else if ( m.containsKey( site ) ){ //retrieve all the entries for the site. result.addAll( m.get( site ) ); } } return result; } /** * Returns all the entries in the Transformation Store * * @return all entries. */ public List getAllEntries( ){ return this.getEntries( (String)null, (TCType)null ); } /** * Returns a list of TransformationCatalogEntry objects matching on a site and * transformation type. * * * @param site the site on which to search for entries. null means all * @param type the type to match on . null means all types. * * @return List if transformations exist */ public List getEntries( String site, TCType type ){ List result = new LinkedList(); //retrieve list of all transformation names for( String name: mTCStore.keySet() ){ result.addAll( this.getEntries( name, site, type )); } return result; } /** * Returns a list of transformation names matching on a site and * transformation type. * * * @param site the site on which to search for entries. null means all * @param type the type to match on . null means all types. * * @return List if transformations exist */ public List getTransformations( String site, TCType type ){ List result = new LinkedList(); if( site == null && type == null ){ //retrieve list of all transformation names for( String name: mTCStore.keySet() ){ result.add(name); } return result; } else if ( type == null ){ //no matching on type required only on site for( String name: mTCStore.keySet() ){ Map> m = mTCStore.get( name ); for( String s: m.keySet() ){ if( s.equals( site ) ){ result.add( name ); break; } } } } else{ //(site == null || site is not null ) and match on type for( String name: mTCStore.keySet() ){ Map> m = mTCStore.get( name ); for( String s: m.keySet() ){ boolean matchFound = false; //either site name matches or we searching for all sites if( site == null || site.equals( s )){ //traverse through all entries and match on type List l = m.get( s ); for( TransformationCatalogEntry entry : l ){ if( entry.getType().equals( type )){ result.add( name ); matchFound = true; break; } } } if( matchFound ){ break; } }//end of iterating entries for sites }//end of iteration over all transformation names } return result; } /** * Returns a boolean indicating whether the store contains an entry * corresponding to a particular transformation or not. * * @param namespace the namespace associated with the transformation * @param name the logical name * @param version the version of the transformation * * @return boolean */ public boolean containsTransformation( String namespace, String name, String version ){ return this.mTCStore.containsKey( Separator.combine(namespace, name, version) ); } /** * Returns a boolean indicating whether the store contains an entry * corresponding to a particular transformation or not. * * @param completeName the complete name of the transformation as constructed from * namespace, name and version * * @return boolean */ public boolean containsTransformation( String completeName ){ return this.mTCStore.containsKey( completeName ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/TransformationFactory.java0000644000175000017500000001446611757531137033166 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.PegasusBag; /** * A factory class to load the appropriate implementation of Transformation * Catalog as specified by properties. * * @author Karan Vahi * @version $Revision: 2572 $ */ public class TransformationFactory { /** * Some Constants for backward compatibility. */ public static final String DEFAULT_TC_CLASS = "File"; /** * The default package where all the implementations reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.catalog.transformation.impl"; /** * Connects the interface with the transformation catalog implementation. The * choice of backend is configured through properties. This method uses default * properties from the property singleton. * * @return handle to the Transformation Catalog. * * @throws TransformationFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static TransformationCatalog loadInstance() throws TransformationFactoryException { PegasusBag bag = new PegasusBag(); bag.add( PegasusBag.PEGASUS_LOGMANAGER, LogManagerFactory.loadSingletonInstance() ); bag.add( PegasusBag.PEGASUS_PROPERTIES, PegasusProperties.nonSingletonInstance() ); return loadInstance( bag ); } /** * Connects the interface with the transformation catalog implementation. The * choice of backend is configured through properties. This class is * useful for non-singleton instances that may require changing * properties. * * @param properties is an instance of properties to use. * * @return handle to the Transformation Catalog. * * @throws TransformationFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static TransformationCatalog loadInstance( PegasusProperties properties) throws TransformationFactoryException { PegasusBag bag = new PegasusBag(); bag.add( PegasusBag.PEGASUS_LOGMANAGER, LogManagerFactory.loadSingletonInstance( properties ) ); bag.add( PegasusBag.PEGASUS_PROPERTIES, properties ); return loadInstance( bag ); } /** * Connects the interface with the transformation catalog implementation. The * choice of backend is configured through properties. This class is * useful for non-singleton instances that may require changing * properties. * * @param properties is an instance of properties to use. * @param logger handle to the logging. * * @return handle to the Transformation Catalog. * * @throws TransformationFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static TransformationCatalog loadInstance( PegasusProperties properties, LogManager logger ) throws TransformationFactoryException { PegasusBag bag = new PegasusBag(); bag.add( PegasusBag.PEGASUS_LOGMANAGER, logger ); bag.add( PegasusBag.PEGASUS_PROPERTIES, properties ); return loadInstance( bag ); } /** * Connects the interface with the transformation catalog implementation. The * choice of backend is configured through properties. This class is * useful for non-singleton instances that may require changing * properties. * * @param bag is bag of initialization objects * * @return handle to the Transformation Catalog. * * @throws TransformationFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static TransformationCatalog loadInstance( PegasusBag bag ) throws TransformationFactoryException { TransformationCatalog tc = null; /* get the implementor from properties */ String catalogImplementor = bag.getPegasusProperties().getTCMode().trim(); /* prepend the package name if required */ catalogImplementor = (catalogImplementor.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + catalogImplementor : //load directly catalogImplementor; TransformationCatalog catalog; /* try loading the catalog implementation dynamically */ try { DynamicLoader dl = new DynamicLoader(catalogImplementor); catalog = ( TransformationCatalog ) dl.instantiate( new Object[0] ); if ( catalog == null ){ throw new RuntimeException( "Unable to load " + catalogImplementor ); } catalog.initialize( bag ); } catch (Exception e) { throw new TransformationFactoryException( " Unable to instantiate Transformation Catalog ", catalogImplementor, e); } if (catalog == null) { throw new TransformationFactoryException( " Unable to instantiate Transformation Catalog ", catalogImplementor); } return catalog; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/TCMode.java0000644000175000017500000001016411757531137027732 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.common.util.DynamicLoader; /** * This class defines all the constants * referring to the various interfaces * to the transformation catalog, and * used by the Concrete Planner. * * @author Gaurang Mehta * @version $Revision: 2567 $ */ public class TCMode { /** * Constants for backward compatibility. */ public static final String SINGLE_READ = "single"; public static final String MULTIPLE_READ = "multiple"; public static final String OLDFILE_TC_CLASS = "OldFile"; public static final String DEFAULT_TC_CLASS = "File"; /** * Default PACKAGE PATH for the TC implementing classes */ public static final String PACKAGE_NAME = "org.griphyn.common.catalog.transformation."; private static LogManager mLogger = LogManagerFactory.loadSingletonInstance(); //add your constants here. /** * This method just checks and gives the correct classname if a user provides the classname in a different case. * @param tcmode String * @return String */ private static String getImplementingClass( String tcmode ) { if ( tcmode.trim().equalsIgnoreCase( SINGLE_READ ) || tcmode.trim().equalsIgnoreCase( MULTIPLE_READ ) ) { return OLDFILE_TC_CLASS; } else { //no match to any predefined constant //assume that the value of readMode is the //name of the implementing class return tcmode; } } /** * The overloaded method which is to be used internally in Pegasus. * * @return TCMechanism */ public static TransformationCatalog loadInstance() { PegasusProperties mProps = PegasusProperties.getInstance(); TransformationCatalog tc = null; String tcClass = getImplementingClass( mProps.getTCMode() ); //if (tcClass.equals(FILE_TC_CLASS)) { // String[] args = {mProps.getTCPath()}; // return loadInstance(tcClass, args); // } else { String[] args = new String[0 ]; tc=loadInstance( tcClass, args ); if(tc==null) { mLogger.log( "Unable to load TC",LogManager.FATAL_MESSAGE_LEVEL); System.exit(1); } return tc; // } } /** * Loads the appropriate TC implementing Class with the given arguments. * @param tcClass String * @param args String[] * @return TCMechanism */ public static TransformationCatalog loadInstance( String tcClass, Object[] args ) { TransformationCatalog tc = null; String methodName = "getInstance"; //get the complete name including //the package if the package name not //specified if ( tcClass.indexOf( "." ) == -1 ) { tcClass = PACKAGE_NAME + tcClass; } DynamicLoader d = new DynamicLoader( tcClass ); try { tc = ( TransformationCatalog ) d.static_method( methodName, args ); //This identifies the signature for //the method } catch ( Exception e ) { mLogger.log( d.convertException( e ), LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } return tc; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/client/0000755000175000017500000000000011757531667027240 5ustar ryngerynge././@LongLink0000000000000000000000000000014600000000000011566 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/client/TCFormatUtility.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/client/TCFormatUtility.jav0000644000175000017500000001665211757531137033007 0ustar ryngeryngepackage edu.isi.pegasus.planner.catalog.transformation.client; import java.io.BufferedWriter; import java.io.OutputStreamWriter; import java.util.Date; import java.util.Iterator; import java.util.List; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.common.util.XMLWriter; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationStore; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.dax.Executable; import edu.isi.pegasus.planner.dax.PFN; import edu.isi.pegasus.planner.dax.Executable.ARCH; import edu.isi.pegasus.planner.dax.Executable.OS; /** * This is a utility class for converting transformation catalog into different formats. * * @author prasanth@isi.edu * @version $Revision $ */ public class TCFormatUtility { /** * Converts the transformations into multi line text format * @param mTCStore the transformation store * @return the text format */ public static String toTextFormat(TransformationStore mTCStore){ String newline = System.getProperty("line.separator", "\r\n"); String indent = ""; StringBuffer buf = new StringBuffer(); String newIndent = indent + "\t"; // write header buf.append( "# multiple line text-based transformation catalog: " + Currently.iso8601(false,true,true,new Date()) ); buf.append( newline ); // write out data //traverse through all the logical transformations in the //catalog for ( Iterator i= mTCStore.getTransformations(null, null).iterator(); i.hasNext(); ) { //transformation is the complete name comprised of namespace,name,version String transformation = (String) i.next(); buf.append( indent ); buf.append( "tr " ); buf.append( transformation ); buf.append( " {" ); buf.append( newline ); //get all the entries for that transformations on all sites for( TransformationCatalogEntry entry: mTCStore.getEntries( transformation, (String)null ) ){ //write out all the entries for the transformation buf.append( toText( entry, newline, newIndent ) ); } buf.append( indent ); buf.append( "}" ); buf.append( newline ); buf.append( newline ); } return buf.toString(); } /** * Converts the transformation catalog entry object to the multi line * textual representation. e.g. * * site wind { * profile env "me" "with" * profile condor "more" "test" * pfn "/path/to/keg" * arch "x86" * os "linux" * osrelease "fc" * osversion "4" * type "STAGEABLE" * } * * @param entry the transformation catalog entry * @param newline the newline characters * @param indent the indentation to use * * @return the textual description */ private static String toText( TransformationCatalogEntry entry, String newline , String indent ){ StringBuffer sb = new StringBuffer(); indent = (indent != null && indent.length() > 0 ) ? indent: ""; String newIndent = indent + "\t"; sb.append( indent ); sb.append( "site" ).append( " " ).append( entry.getResourceId() ).append( " {" ).append( newline ); //list out all the profiles List profiles = entry.getProfiles(); if( profiles != null ){ for( Profile p : profiles ){ sb.append( newIndent ).append( "profile" ).append( " " ). append( p.getProfileNamespace() ).append( " " ). append( quote( p.getProfileKey() ) ).append( " "). append( quote( p.getProfileValue())).append( " "). append( newline ); } } //write out the pfn addLineToText( sb, newIndent, newline, "pfn", entry.getPhysicalTransformation() ); //write out sysinfo SysInfo s = entry.getSysInfo(); SysInfo.Architecture arch = s.getArchitecture(); if( arch != null ){ addLineToText( sb, newIndent, newline, "arch", arch.toString() ); } SysInfo.OS os = s.getOS(); if( os != null ){ addLineToText( sb, newIndent, newline, "os", os.toString() ); } String osrelease = s.getOSRelease(); if( osrelease != null && osrelease.length() > 0 ){ addLineToText( sb, newIndent, newline, "osrelease", osrelease ); } String osversion = s.getOSVersion(); if( osversion != null && osversion.length() > 0 ){ addLineToText( sb, newIndent, newline, "osversion", osversion ); } String glibc = s.getGlibc(); if( glibc != null && glibc.length() > 0 ){ addLineToText( sb, newIndent, newline, "glibc", glibc ); } //write out the type addLineToText( sb, newIndent, newline, "type", entry.getType().toString() ); sb.append( indent ).append( "}" ).append( newline ); return sb.toString(); } /** * Convenience method to add a line to the internal textual representation. * * @param sb the StringBuffer to which contents are to be added. * @param newIndent the indentation * @paran newline the newline character * @param key the key * @param value the value */ private static void addLineToText( StringBuffer sb, String newIndent, String newline, String key, String value) { sb.append( newIndent ).append( key ).append( " " ). append( quote( value ) ).append( newline ); } /** * Quotes a String. * * @param str the String to be quoted. * * @return quoted version */ private static String quote( String str ){ //maybe should use the escape class also? StringBuffer sb = new StringBuffer(); sb.append( "\"" ).append( str ).append( "\"" ); return sb.toString(); } /** * Prints the transformations in XML format * @param tStore the transformation store */ // Note : xml format ignores logical profiles associated with a transformation. public static void printXMLFormat(TransformationStore tStore){ BufferedWriter pw = new BufferedWriter(new OutputStreamWriter(System.out)); XMLWriter writer = new XMLWriter(pw); for(TransformationCatalogEntry entry :tStore.getEntries(null, (TCType)null) ){ Executable exec = new Executable(entry.getLogicalNamespace(), entry.getLogicalName(), entry.getLogicalVersion()); exec.setArchitecture(ARCH.valueOf(entry.getSysInfo().getArchitecture().toString())); exec.setOS(OS.valueOf(entry.getSysInfo().getOS().toString())); exec.setOSVersion(entry.getSysInfo().getOSVersion()); exec.setGlibc(entry.getSysInfo().getGlibc()); if(entry.getType().equals(TCType.INSTALLED)){ exec.setInstalled(true); }else{ exec.setInstalled(false); } PFN pfn = new PFN(entry.getPhysicalTransformation() , entry.getResourceId()); if(entry.getProfiles() != null){ for (Profile profile:((List)entry.getProfiles()) ){ pfn.addProfile(profile.getProfileNamespace(), profile.getProfileKey(), profile.getProfileValue()); } } exec.addPhysicalFile(pfn); exec.toXML(writer); } writer.close(); return; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/client/TCQuery.java0000644000175000017500000004531611757531137031440 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.client; /** * This is a TCClient class which handles the Query Operations. * * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 3366 $ */ import java.util.Comparator; import java.util.Iterator; import java.util.List; import java.util.Map; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.ProfileParser; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationStore; import edu.isi.pegasus.planner.classes.Profile; public class TCQuery extends Client { private final static int TABSPACE = 4; private final static String XML_NAMESPACE="http://pegasus.isi.edu/schema"; private final static String XML_VERSION="2.0"; public TCQuery( TransformationCatalog tc, LogManager mLogger, Map argsmap ) { this.fillArgs( argsmap ); this.tc = tc; this.mLogger = mLogger; } public void doQuery() { //SWitch for what triggers are defined. switch ( trigger ) { case 1: //query and return entire tc if ( !isxml ) { getTC(); } else { getTCXML(); } break; case 2: //query lfns getLfn( resource, type ); break; case 4: //query for PFN getPfn( namespace, name, version, resource, type ); break; case 8: //query for Resource getResource( type ); break; case 18: //query for LFN profiles getLfnProfile( namespace, name, version ); break; case 20: //query for PFN profiles getPfnProfile( pfn, resource, type ); break; default: mLogger.log( "Wrong trigger invoked in TC Query. Try pegasus-tc-client --help for a detailed help.", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } } /** * Get logical transformations on a given resource and/or of a particular type. * @param resource The resource on which the transformations exist * @param type the type of the transformation. */ private void getLfn( String resource, String type ) { List l = null; TCType t = ( type == null ) ? null : TCType.valueOf( type ); try { mLogger.log( "Querying the TC for logical names on resource " + resource + " and type " + t, LogManager.DEBUG_MESSAGE_LEVEL ); l = tc.getTCLogicalNames( resource, t ); } catch ( Exception e ) { mLogger.log( "Unable to query for logicalnames", e, LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } if ( l != null ) { //int[] count = ( int[] ) l.get( l.size() - 1 ); int[] count = { 0, 0}; //l.remove( l.size() - 1 ); for ( Iterator itr = l.iterator(); itr.hasNext(); ) { String[] s = ( String[] ) itr.next(); columnLength(s, count); } System.out.println( "#RESID" + getSpace( count[ 0 ], "#RESID".length() ) + " LTX" + getSpace( count[ 1 ], " LTX".length() ) + "TYPE" ); System.out.println( "" ); for ( Iterator i = l.iterator(); i.hasNext(); ) { String[] s = ( String[] ) i.next(); System.out.println( " " + s[ 0 ] + getSpace( count[ 0 ], s[ 0 ].length() ) + s[ 1 ] + getSpace( count[ 1 ], s[ 1 ].length() ) + s[ 2 ] ); } } else { mLogger.log( "No Logical Transformations found.", LogManager.CONSOLE_MESSAGE_LEVEL ); System.exit( 1 ); } } /** * Get physical transformation for a particular logical transformation and/or on a resource and/or of a particular type * @param namespace String Namespace for the transformation. * @param name String Logical name for the transformation. * @param version String Version for the transformation. * @param resource String The resource for the transformation * @param type String The type of the transformation. */ private void getPfn( String namespace, String name, String version, String resource, String type ) { if ( name != null ) { List l = null; TCType t = ( type == null ) ? null : TCType.valueOf( type ); try { mLogger.log( "Querying the TC for physical names for lfn " + lfn + " resource " + resource + " type " + type, LogManager.DEBUG_MESSAGE_LEVEL ); l = tc.lookupNoProfiles( namespace, name, version, resource, t ); } catch ( Exception e ) { mLogger.log( "Unable to query for physical names", e, LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } if ( l != null && !l.isEmpty() ) { if(isoldformat){ int count[] = { 0, 0, 0}; for ( TransformationCatalogEntry entry : l ) { String[] s = { entry.getResourceId(), entry.getPhysicalTransformation(), entry.getType().toString(), entry.getSysInfo().toString()}; columnLength(s, count); } System.out.println( "#RESID" + getSpace( count[ 0 ], "#RESID".length() ) + " LTX" + getSpace( lfn.length(), " LTX".length() ) + " PFN" + getSpace( count[ 1 ], "PFN".length() ) + " TYPE" + getSpace( count[ 2 ], "TYPE".length() ) + " SYSINFO" ); System.out.println( "" ); for ( TransformationCatalogEntry entry : l ) { String[] s = { entry.getResourceId(), entry.getPhysicalTransformation(), entry.getType().toString(), entry.getSysInfo().toString()}; System.out.println( s[ 0 ] + getSpace( count[ 0 ], s[ 0 ].length() ) + lfn + getSpace( lfn.length(), lfn.length() ) + s[ 1 ] + getSpace( count[ 1 ], s[ 1 ].length() ) + s[ 2 ] + getSpace( count[ 2 ], s[ 2 ].length() ) + s[ 3 ] ); } }else{ TransformationStore tcStore = new TransformationStore(); for ( TransformationCatalogEntry entry : l ) { tcStore.addEntry(entry); } if(isxml){ TCFormatUtility.printXMLFormat(tcStore); }else{ String textFormat = TCFormatUtility.toTextFormat(tcStore); System.out.println(textFormat); } } } else { mLogger.log( "No Physical Transformations found.", LogManager.CONSOLE_MESSAGE_LEVEL ); System.exit( 1 ); } } else { mLogger.log( "Provide an lfn to list the pfns", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } } /** * Get the LFn profiles associated with a logical transformation * @param namespace String * @param name String * @param version String */ private void getLfnProfile( String namespace, String name, String version ) { if ( name != null ) { List l = null; try { mLogger.log( "Querying the TC for Profiles for lfn " + lfn, LogManager.DEBUG_MESSAGE_LEVEL ); l = tc.lookupLFNProfiles( namespace, name, version ); } catch ( Exception e ) { mLogger.log( "Unable to query the lfn profiles", e, LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } if ( l != null ) { System.out.println( "LFN Profiles :" ); for ( Iterator i = l.iterator(); i.hasNext(); ) { System.out.println( " " + ( Profile ) i.next() ); } } else { mLogger.log( "No LFN Profiles found.", LogManager.CONSOLE_MESSAGE_LEVEL ); System.exit( 1 ); } } else { mLogger.log( "Provide an lfn to list the lfn profiles", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } } /** * Get the profiles associated with a physical transformation. * @param pfn String * @param resource String * @param type String */ private void getPfnProfile( String pfn, String resource, String type ) { if ( pfn != null && resource != null && type != null ) { List l = null; try { mLogger.log( "Query the TC for profiles with pfn=" + pfn + " type=" + type + " resource=" + resource, LogManager.DEBUG_MESSAGE_LEVEL ); l = tc.lookupPFNProfiles( pfn, resource, TCType.valueOf( type ) ); } catch ( Exception e ) { mLogger.log( "Unable to query the pfn profiles", e, LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } if ( l != null ) { System.out.println( "PFN Profiles :" ); for ( Iterator i = l.iterator(); i.hasNext(); ) { System.out.println( " " + ( Profile ) i.next() ); } } else { mLogger.log( "No PFN Profiles found.", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } } else { mLogger.log( "Please provide an pfn, resource and type to list the pfn profiles", LogManager.CONSOLE_MESSAGE_LEVEL ); System.exit( 1 ); } } /** * Get and print the Resource entries for a given logical transformation and transformation type * @param type the type of the transformation * @throws Exception Throws all kinds of exception */ private void getResource( String type ) { List l = null; TCType t = ( type == null ) ? null : TCType.valueOf( type ); try { l = tc.lookupSites( namespace, name, version, t ); } catch ( Exception e ) { mLogger.log( "Unable to query TC for resources", e, LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } if ( l != null ) { System.out.println( "Resources :" ); for ( Iterator i = l.iterator(); i.hasNext(); ) { System.out.println( " " + ( String ) i.next() ); } } else { mLogger.log( "No resources found.", LogManager.CONSOLE_MESSAGE_LEVEL ); System.exit( 1 ); } } /** * Print all the contents of the TC in pretty print */ private void getTC() { try { List l = tc.getContents(); if (l!=null && !l.isEmpty() ) { if(isoldformat){ //this means entries are there. //get the pretty print column size information. int[] count = {0, 0, 0, 0, 0}; for ( Iterator i = l.iterator(); i.hasNext(); ) { TransformationCatalogEntry tcentry = ( TransformationCatalogEntry ) i.next(); String[] s = {tcentry.getResourceId(), tcentry.getLogicalTransformation(), tcentry.getPhysicalTransformation(), tcentry.getType().toString(), tcentry.getSysInfo().toString(), ( ( tcentry.getProfiles() != null ) ? ProfileParser.combine( tcentry.getProfiles() ) : "NULL" )}; columnLength( s, count ); } System.out.println( "#RESID" + getSpace( count[ 0 ], "#RESID".length() ) + " LTX" + getSpace( count[ 1 ], " LTX".length() ) + " PFN" + getSpace( count[ 2 ], " PFN".length() ) + " TYPE" + getSpace( count[ 3 ], "TYPE".length() ) + " SYSINFO" + getSpace( count[ 4 ], "SYSINFO".length() ) + " PROFILES" ); System.out.println( "" ); //start printing the results. for ( Iterator i = l.iterator(); i.hasNext(); ) { TransformationCatalogEntry tcentry = ( TransformationCatalogEntry ) i.next(); StringBuffer sb=new StringBuffer(); sb.append(tcentry.getResourceId()); sb.append(getSpace(count[0],tcentry.getResourceId().length())); sb.append(tcentry.getLogicalTransformation()); sb.append(getSpace( count[ 1 ],tcentry.getLogicalTransformation().length())); sb.append(tcentry.getPhysicalTransformation()); sb.append(getSpace(count[2],tcentry.getPhysicalTransformation().length())); sb.append(tcentry.getType()); sb.append(getSpace(count[3],tcentry.getType().toString().length())); sb.append(tcentry.getSysInfo()); sb.append(getSpace(count[4],tcentry.getSysInfo().toString().length())); if( tcentry.getProfiles() != null ) { sb.append(ProfileParser.combine( tcentry.getProfiles())); } else { sb.append("NULL"); } System.out.println( sb ); } }else{ TransformationStore tcStore = new TransformationStore(); for ( TransformationCatalogEntry entry : l ) { tcStore.addEntry(entry); } String textFormat = TCFormatUtility.toTextFormat(tcStore); System.out.println(textFormat); } } else { mLogger.log( "No Entries found in the TC.", LogManager.CONSOLE_MESSAGE_LEVEL ); System.exit( 1 ); } } catch ( Exception e ) { mLogger.log( "Unable to query entire TC", LogManager.FATAL_MESSAGE_LEVEL ); mLogger.log(convertException(e,mLogger.getLevel()),LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } } private void getTCXML() { try{ List l=tc.getContents(); if (l!=null && !l.isEmpty() ) { TransformationStore tcStore = new TransformationStore(); for ( TransformationCatalogEntry entry : l ) { tcStore.addEntry(entry); } TCFormatUtility.printXMLFormat(tcStore); } }catch ( Exception e ) { mLogger.log( "Unable to query entire TC", LogManager.FATAL_MESSAGE_LEVEL ); mLogger.log(convertException(e,mLogger.getLevel()),LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } } /** * Gets the required space for pretty printing. * @param maxlength int * @param currlength int * @return String */ private static String getSpace( int maxlength, int currlength ) { int length = maxlength + TABSPACE - currlength; StringBuffer sb = new StringBuffer( length ); for ( int i = 0; i < length; i++ ) { sb.append( " " ); } return sb.toString(); } /** * Computes the maximum column lenght for pretty printing. * * @param s String[] * @param length int[] */ private static void columnLength( String[] s, int[] length ) { for ( int i = 0; i < length.length; i++ ) { if ( s[ i ].length() > length[ i ] ) { length[ i ] = s[ i ].length(); } } } /** * The comparator that is used to group the RLSAttributeObject objects by the * value in the key field. This comparator should only be used for grouping * purposes not in Sets or Maps etc. */ private class LFNComparator implements Comparator { /** * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer if the first argument is * less than, equal to, or greater than the specified object. The * TransformationCatalogEntry object are compared by their lfn field. * * @param o1 is the first object to be compared. * @param o2 is the second object to be compared. * * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compare(Object o1, Object o2) { if (o1 instanceof TransformationCatalogEntry && o2 instanceof TransformationCatalogEntry) { return ((TransformationCatalogEntry) o1).getLogicalTransformation().compareTo(((TransformationCatalogEntry) o2).getLogicalTransformation()); } else { throw new ClassCastException("object is not TranformationCatalogEntry"); } } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/client/Client.java0000644000175000017500000001172411757531137031316 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.client; /** * This is a helper class which all TC client components (like tcAdd, tcDelete and tcQuery) must extend. * * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 4795 $ */ import java.io.PrintWriter; import java.io.StringWriter; import java.util.List; import java.util.Map; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.common.util.ProfileParser; import edu.isi.pegasus.common.util.ProfileParserException; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.classes.SysInfo; public class Client { protected int trigger = 0; protected String lfn = null; protected String pfn = null; protected String profile = null; protected String type = null; protected String resource = null; protected String systemstring = null; protected String namespace = null; protected String name = null; protected String version = null; protected List profiles = null; protected SysInfo system = null; protected String file = null; protected LogManager mLogger = null; protected TransformationCatalog tc = null; protected boolean isxml = false; protected boolean isoldformat = false; public Client() { } /** * Takes the arguments from the TCClient and stores it for acess to the other TC Client modules. * @param argsmap Map */ public void fillArgs( Map argsmap ) { lfn = ( String ) argsmap.get( "lfn" ); pfn = ( String ) argsmap.get( "pfn" ); resource = ( String ) argsmap.get( "resource" ); type = ( String ) argsmap.get( "type" ); profile = ( String ) argsmap.get( "profile" ); systemstring = ( String ) argsmap.get( "system" ); trigger = ( ( Integer ) argsmap.get( "trigger" ) ).intValue(); file = ( String ) argsmap.get( "file" ); isxml = ( ( Boolean ) argsmap.get( "isxml" ) ).booleanValue(); isoldformat = ( ( Boolean ) argsmap.get( "isoldformat" ) ).booleanValue(); if ( lfn != null ) { String[] logicalname = Separator.split( lfn ); namespace = logicalname[ 0 ]; name = logicalname[ 1 ]; version = logicalname[ 2 ]; } if ( profile != null ) { try { profiles = ProfileParser.parse( profile ); } catch ( ProfileParserException ppe ) { mLogger.log( "Parsing profiles " + ppe.getMessage() + " at position " + ppe.getPosition(), ppe, LogManager.ERROR_MESSAGE_LEVEL ); } } if ( systemstring != null ) { system = new SysInfo( systemstring ); } } /** * Returns an error message that chains all the lower order error messages * that might have been thrown. * * @param e the Exception for which the error message has to be composed. * @param logLevel the user specified level for the logger * * @return the error message. */ public static String convertException( Exception e , int logLevel ){ StringBuffer message = new StringBuffer(); int i = 0; //check if we want to throw the whole stack trace if( logLevel >= LogManager.TRACE_MESSAGE_LEVEL ){ //we want the stack trace to a String Writer. StringWriter sw = new StringWriter(); e.printStackTrace( new PrintWriter( sw ) ); return sw.toString(); } //append all the causes for(Throwable cause = e; cause != null ; cause = cause.getCause()){ if( cause instanceof FactoryException ){ //do the specialized convert for Factory Exceptions message.append(((FactoryException)cause).convertException(i)); break; } message.append("\n [").append( Integer.toString(++i)).append("] "). append(cause.getClass().getName()).append(": "). append(cause.getMessage()); //append just one elment of stack trace for each exception message.append( " at " ).append( cause.getStackTrace()[0] ); } return message.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/client/TCDelete.java0000644000175000017500000004147311757531137031535 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.client; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import java.util.Map; /** * This is a TCClient class which handles the Delete Operations. * * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 3286 $ */ public class TCDelete extends Client { public TCDelete( TransformationCatalog tc, LogManager mLogger, Map argsmap ) { this.fillArgs( argsmap ); this.tc = tc; this.mLogger = mLogger; } public void doDelete() { //SWitch for what triggers are defined. int status = -1; try { switch ( trigger ) { case 2: //delete TC by logical name if ( name == null ) { mLogger.log( "You need to provide the logical name by which you want to delete", LogManager.ERROR_MESSAGE_LEVEL ); mLogger.log( "See pegasus-tc-client --help or man pegasus-tc-client for more details ", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } else { mLogger.log( "Trying to delete the TC by logical name " + lfn + " on resource " + ( ( resource == null ) ? "ALL" : resource ) + " and type " + ( ( type == null ) ? "ALL" : type ), LogManager.DEBUG_MESSAGE_LEVEL); status = tc.removeByLFN( namespace, name, version, resource, ( ( type == null ) ? null : TCType.valueOf( type ) ) ); if(status ==0) { mLogger.log( "No match found for TC entries by logical name " + lfn + " on resource " + ( ( resource == null ) ? "ALL" : resource ) + " and type " + ( ( type == null ) ? null : type ), LogManager.CONSOLE_MESSAGE_LEVEL ); } else if ( status >= 1) { mLogger.log( "Deleted the TC entries by logical name " + lfn + " on resource " + ( ( resource == null ) ? "ALL" : resource ) + " and type " + ( ( type == null ) ? null : type ), LogManager.CONSOLE_MESSAGE_LEVEL ); } else { mLogger.log( "Unable to detele TC by logical name " + lfn + " on resource " + ( ( resource == null ) ? null : resource ) + " and type " + ( ( type == null ) ? "ALL" : type ) ,LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } } break; case 4: //delete TC by physical name if ( pfn == null || name == null ) { mLogger.log( "You need to provide the pfn and logical " + "name by which you want to delete" , LogManager.ERROR_MESSAGE_LEVEL); mLogger.log( "See pegasus-tc-client --help or man pegasus-tc-client for more details ", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } else { mLogger.log( "Trying to delete the TC by physicalname " + pfn + " and logical name " + lfn + " on resource " + ( ( resource == null ) ? "ALL" : resource ) + " and type " + ( ( type == null ) ? "ALL" : type ), LogManager.DEBUG_MESSAGE_LEVEL); status = tc.removeByPFN( pfn, namespace, name, version, resource, ( ( type == null ) ? null : TCType.valueOf( type ) ) ); if ( status == 0) { mLogger.log( "No match found for TC entries by physicalname " + pfn + " and logical name " + lfn + " on resource " + ( ( resource == null ) ? "ALL" : resource ) + " and type " + ( ( type == null ) ? "ALL" : type ) ,LogManager.CONSOLE_MESSAGE_LEVEL); } else if ( status >= 1) { mLogger.log( "Deleted the TC entries by physicalname " + pfn + " and logical name " + lfn + " on resource " + ( ( resource == null ) ? "ALL" : resource ) + " and type " + ( ( type == null ) ? "ALL" : type ) ,LogManager.CONSOLE_MESSAGE_LEVEL); } else { mLogger.log( "Unable to delete TC by physicalname " + pfn + " and logical name " + lfn + " on resource " + ( ( resource == null ) ? "ALL" : resource ) + " and type " + ( ( type == null ) ? "ALL" : type ),LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } } break; case 8: //delete TC by resource if ( resource == null ) { mLogger.log( "You need to provide the resourceid by which you want to delete", LogManager.ERROR_MESSAGE_LEVEL ); mLogger.log( "See pegasus-tc-client --help or man pegasus-tc-client for more details ", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } else { mLogger.log( "Trying to delete the TC by resourceid " + resource, LogManager.DEBUG_MESSAGE_LEVEL); status = tc.removeBySiteID( resource ); if ( status == 0 ) { mLogger.log( "No match found for TC entries by resourceid " + resource,LogManager.CONSOLE_MESSAGE_LEVEL ); } else if ( status >= 1 ) { mLogger.log( "Deleted the TC entries by resourceid " + resource,LogManager.CONSOLE_MESSAGE_LEVEL ); } else { mLogger.log( "Unable to delete TC by resourceid" , LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } } break; case 18: //delete TC lfnprofiles if ( name == null ) { mLogger.log( "You need to provide the logical transformation by " + "which you want to delete the profiles", LogManager.ERROR_MESSAGE_LEVEL ); mLogger.log( "See pegasus-tc-client --help or man pegasus-tc-client for more details " , LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } else { mLogger.log( "Trying to delete the TC LFN profiles for LFN " + lfn,LogManager.DEBUG_MESSAGE_LEVEL); status = tc.deleteLFNProfiles( namespace, name, version, profiles ); if ( status == 0 ) { mLogger.log( "No match found for TC LFN profile entries for LFN " + lfn , LogManager.CONSOLE_MESSAGE_LEVEL); } else if ( status >= 1 ) { mLogger.log( "Deleted the TC LFN profile entries for LFN " + lfn , LogManager.CONSOLE_MESSAGE_LEVEL); } else { mLogger.log( "Unable to delete the TC LFN profiles", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } } break; case 20: //delete TC pfnprofiles if ( pfn == null || resource == null || type == null ) { mLogger.log( " You need to provide the physical transformation, " + " resource and type by which you want to delete the profiles", LogManager.ERROR_MESSAGE_LEVEL ); mLogger.log( "See pegasus-tc-client --help or man pegasus-tc-client for more details ", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } else { mLogger.log( "Trying to delete the TC PFN profiles for PFN " + pfn + " type " + type + " resource " + resource, LogManager.DEBUG_MESSAGE_LEVEL ); status = tc.deletePFNProfiles( pfn, TCType.valueOf( type ), resource, profiles ) ; if ( status == 0) { mLogger.log( "No match found for TC PFN profile entries for PFN " + pfn + " type " + type + " resource " + resource, LogManager.CONSOLE_MESSAGE_LEVEL ); } else if ( status >= 1) { mLogger.log( "Deleted the TC PFN profile entries for PFN " + pfn + " type " + type + " resource " + resource, LogManager.CONSOLE_MESSAGE_LEVEL ); } else { mLogger.log( "Unable to delete the TC PFN profiles", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } } break; case 32: //delete by TC type if ( type == null ) { mLogger.log( "You need to provide the transformation type by " + "which you want to delete the TC." , LogManager.ERROR_MESSAGE_LEVEL); mLogger.log( "See pegasus-tc-client --help or man pegasus-tc-client for more details ", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } else { mLogger.log ("Trying to delete the TC by Type " + type + "and resource " + ( ( resource == null ) ? "ALL" : resource ), LogManager.DEBUG_MESSAGE_LEVEL); status = tc.removeByType( TCType.valueOf( type ), resource ); if ( status == 0) { mLogger.log( "No match found for TC entries for Type " + type + " resource " + ( ( resource == null ) ? "ALL" : resource ), LogManager.CONSOLE_MESSAGE_LEVEL ); } else if ( status >= 1) { mLogger.log( "Deleted the TC entries for Type " + type + " resource " + ( ( resource == null ) ? "ALL" : resource ), LogManager.CONSOLE_MESSAGE_LEVEL ); } else { mLogger.log( "Unable to delete the TC by type" , LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } } break; case 64: //delete the TC by sysinfo. if ( system == null ) { mLogger.log( "You need to provide the transformation sysinfo "+ "by which you want to delete the TC.", LogManager.ERROR_MESSAGE_LEVEL ); mLogger.log( "See pegasus-tc-client --help or man pegasus-tc-client for more details ", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } else { mLogger.log( "Trying to delete the TC by VDSSysInfo " + systemstring, LogManager.DEBUG_MESSAGE_LEVEL); status = tc.removeBySysInfo( system ); if ( status == 0) { mLogger.log( "No match found for TC entries for VDSSysInfo " + systemstring ,LogManager.CONSOLE_MESSAGE_LEVEL); } else if ( status >= 1) { mLogger.log( "Deleted the TC entries for VDSSysInfo " + systemstring ,LogManager.CONSOLE_MESSAGE_LEVEL); } else { mLogger.log( "Unable to delete the TC by VDSSysInfo", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } } break; case 127: //delete entire TC. whoopa. mLogger.log( "Trying to delete the entire TC ", LogManager.DEBUG_MESSAGE_LEVEL); if ( tc.clear() >= 0 ) { mLogger.log( "Deleted the entire tc succesfully", LogManager.CONSOLE_MESSAGE_LEVEL ); } else { mLogger.log( "Error while deleting entire TC", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } break; default: mLogger.log( "Wrong trigger invoked in TC Delete. Try pegasus-tc-client --help for a detailed help." + trigger, LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } } catch ( Exception e ) { mLogger.log( "Unable to do delete operation", LogManager.FATAL_MESSAGE_LEVEL ); mLogger.log(convertException(e,mLogger.getLevel()),LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/client/TCAdd.java0000644000175000017500000002052511757531137031016 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.client; /** * This is a TCClient class which handles the Add Operations. * * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 4037 $ */ import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.IOException; import java.util.List; import java.util.Map; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.ProfileParser; import edu.isi.pegasus.common.util.ProfileParserException; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.TransformationFactory; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.common.PegasusProperties; public class TCAdd extends Client { public TCAdd( TransformationCatalog tc, LogManager mLogger, Map argsmap ) { this.fillArgs( argsmap ); this.tc = tc; this.mLogger = mLogger; } public void doAdds() { int count =0; try { //SWitch for what triggers are defined. switch ( trigger ) { case 0: //normal tc entry if ( this.addEntry() ) { mLogger.log( "Added tc entry sucessfully", LogManager.CONSOLE_MESSAGE_LEVEL ); } else { mLogger.log( "Unable to add tc entry", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } break; case 1: //bulk mode tc entry if ( this.addBulk() ) { mLogger.log( "Added bulk tc entries sucessfully", LogManager.CONSOLE_MESSAGE_LEVEL ); } else { mLogger.log( "Unable to add bulk tc entries", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } break; case 18: //lfn profile additions if ( lfn == null ) { mLogger.log( "The logical transformation cannot be null. " , LogManager.ERROR_MESSAGE_LEVEL); mLogger.log( "Please try pegasus-tc-client --help or man pegasus-tc-client for more information." , LogManager.ERROR_MESSAGE_LEVEL); System.exit( 1 ); } mLogger.log( "Trying to add profiles for lfn " + lfn + " " + profiles, LogManager.DEBUG_MESSAGE_LEVEL); if ( (count = tc.addLFNProfile( namespace, name, version, profiles )) >= 1 ) { mLogger.log( "Added " + count + " lfn profiles sucessfully", LogManager.CONSOLE_MESSAGE_LEVEL ); } else { mLogger.log( "Unable to add LFN profiles", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } break; case 19: //bulk lfn profile additions break; case 20: //pfn profile additions if ( resource == null || type == null || pfn == null ) { mLogger.log( "The resourceid or physical name or type cannot be null.", LogManager.ERROR_MESSAGE_LEVEL ); mLogger.log( "Please try pegasus-tc-client --help or man pegasus-tc-client for more information.", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } mLogger.log( "Trying to add profiles for pfn " + pfn + " " + resource + " " + type + " " + profiles,LogManager.DEBUG_MESSAGE_LEVEL); if ( (count = tc.addPFNProfile( pfn, TCType.valueOf( type ), resource, profiles )) >= 1 ) { mLogger.log( "Added " +count + "pfn profiles sucessfully", LogManager.CONSOLE_MESSAGE_LEVEL ); } else { mLogger.log( "Unable to add PFN profiles", LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } break; case 21: //bulk pfn profile additions break; default: mLogger.log( "Wrong trigger invoked in TC ADD" , LogManager.ERROR_MESSAGE_LEVEL); mLogger.log( "Check pegasus-tc-client --help or man pegasus-tc-client for correct usage." , LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } } catch ( Exception e ) { mLogger.log( "Unable to add entry to TC", LogManager.FATAL_MESSAGE_LEVEL); mLogger.log(convertException(e,mLogger.getLevel()),LogManager.FATAL_MESSAGE_LEVEL); System.exit( 1 ); } } /** * Adds a single entry into the TC. * @throws Exception * @return boolean */ private boolean addEntry() throws Exception { if ( lfn == null || pfn == null || resource == null ) { System.out.println( "Error : Please enter atleast the lfn, pfn and resource you want to add" ); System.out.println( "See pegasus-tc-client --help for more information." ); System.exit( 1 ); return false; } TCType t = ( type == null ) ? TCType.INSTALLED : TCType.valueOf( type ); mLogger.log( "Trying to add entry in TC with " + namespace + "::" + name + ":" + version + " " + pfn + " " + t + " " + resource + " " + system + " " + profiles, LogManager.DEBUG_MESSAGE_LEVEL); SysInfo s = ( system == null ) ? new SysInfo() : system; return ( tc.insert( namespace, name, version, pfn, t, resource, null, profiles, s ) == 1 ) ? true : false; } /** * Adds multiple entries into the TC. Calls the above api multiple times. * @return boolean */ private boolean addBulk() { List entries = null; PegasusProperties mBag = PegasusProperties.getInstance (); mBag.setProperty( "pegasus.catalog.transformation", "File" ); mBag.setProperty( "pegasus.catalog.transformation.file", file ); TransformationCatalog catalog = TransformationFactory.loadInstance( mBag ); try { entries = catalog.getContents(); mLogger.log( "Loaded " + entries.size( ) + " number of transformations ", LogManager.DEBUG_MESSAGE_LEVEL ); int i = tc.insert( entries ); mLogger.log( "Added " + i + " number of transformations ", LogManager.DEBUG_MESSAGE_LEVEL ); return true; } catch ( Exception e ) { mLogger.log( "Unable to add bulk entries into tc", e, LogManager.ERROR_MESSAGE_LEVEL); return false; } finally { if (catalog != null) { catalog.close (); } } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/Mapper.java0000644000175000017500000001615411757531137030050 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.TCMap; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.mapper.Staged; import edu.isi.pegasus.planner.catalog.transformation.mapper.Submit; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.common.util.Separator; import java.util.ArrayList; import java.util.List; import java.util.Map; /** * This is an interface for generating valid TC maps which will be used for * executable staging. The interface sort of access as an accessor for Pegasus * to the transformation catalog. The map ends up being built as and when the * query for a particular lfn is made to it. * * @author Gaurang Mehta * @version $Revision: 2572 $ */ public abstract class Mapper { /** * The name of the package where the implementing classes reside. */ public static final String PACKAGE_NAME = "edu.isi.pegasus.planner.catalog.transformation.mapper"; /** * The handle to the tranformation catalog. */ protected TransformationCatalog mTCHandle; /** * The handle to the RIC. */ // protected PoolInfoProvider mPoolHandle; protected SiteStore mSiteStore; /** * Handle to Pegasus Properties */ protected PegasusProperties mProps; /** * Handle to the TCMap */ protected TCMap mTCMap = null; /** * Handle to the logger. */ protected LogManager mLogger; /** * Loads the implementing class corresponding to the mode specified by the user * at runtime in the properties file. * * @param className The name of the class that implements the mode. It is the * name of the class, not the complete name with package. That * is added by itself. * @param bag the bag of initialization objects * * @return Mapper */ public static Mapper loadTCMapper( String className, PegasusBag bag ) { //prepend the package name className = PACKAGE_NAME + "." + className; //try loading the class dynamically Mapper ss = null; DynamicLoader dl = new DynamicLoader( className ); try { Object argList[] = new Object[ 1 ]; argList[ 0 ] = bag; ss = ( Mapper ) dl.instantiate( argList ); } catch ( Exception e ) { System.err.println( dl.convertException( e ) ); System.exit( 1 ); } return ss; } /** * The private constructor. * * @param bag the bag of initialization objects */ protected Mapper( PegasusBag bag ) { mLogger = bag.getLogger(); mTCHandle = bag.getHandleToTransformationCatalog(); mProps = bag.getPegasusProperties(); mSiteStore= bag.getHandleToSiteStore(); mTCMap = new TCMap(); } /** * Returns whether this instance of mapper is an instance of a Stageable * mapper or not. * * @return boolean */ public boolean isStageableMapper(){ return ( ( this instanceof Staged ) || ( this instanceof Submit ) ); } /** * This method returns a Map of compute sites to List of * TransformationCatalogEntry objects that are valid for that site. * * @param namespace the namespace of the transformation. * @param name the name of the transformation. * @param version the version of the transformation. * @param siteids the sites for which you want the map. * * @return Map Key=String SiteId , Values = List of TransformationCatalogEntry * object. Returns null if no entries are found. */ public abstract Map getSiteMap( String namespace, String name, String version, List siteids ); /** * Returns the TCMapper Mode. * * @return String */ public abstract String getMode(); /** * This method returns a List of TransformationCatalog Objects valid for a * particular transformation and for a particular compute site * * @param namespace the namespace of the transformation. * @param name the name of the transformation. * @param version the version of the transformation. * @param siteid the compute site for which you want the List. * @return List Returns null if no entries are found. */ public List getTCList( String namespace, String name, String version, String siteid ) { List siteids = new ArrayList( 1 ); List tcentries = null; String lfn = Separator.combine( namespace, name, version ); siteids.add( siteid ); if ( getSiteMap( namespace, name, version, siteids ) != null ) { tcentries = mTCMap.getSiteTCEntries( lfn, siteid ); } return tcentries; } /** * Returns a list of sites that are valid sites for a given lfn and a list of sites. * * @param namespace the namespace of the transformation. * @param name the name of the transformation. * @param version the version of the transformation. * @param siteids the list of sites on which the transformation is to be checked. * * @return List */ public List getSiteList( String namespace, String name, String version, List siteids ) { List sites = null; String lfn = Separator.combine( namespace, name, version ); if ( getSiteMap( namespace, name, version, siteids ) != null ) { sites = mTCMap.getSiteList( lfn, siteids ); } return sites; } /** * Checks if a give site is valid for a given transformation. * * @param namespace the namespace of the transformation. * @param name the name of the transformation. * @param version the version of the transformation. * @param siteid the site that needs to be checked. * * @return boolean */ public boolean isSiteValid( String namespace, String name, String version, String siteid ) { List siteids = new ArrayList( 1 ); siteids.add( siteid ); Map m = getSiteMap( namespace, name, version, siteids ); return ( m == null || m.isEmpty() ) ? false : true; } } ././@LongLink0000000000000000000000000000015600000000000011567 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/TransformationFactoryException.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/TransformationFactoryExcep0000644000175000017500000000662311757531137033227 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Transformation Catalog * implementations. * * @author Karan Vahi * @version $Revision: 2079 $ */ public class TransformationFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Transformation Catalog"; /** * Constructs a TransformationFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public TransformationFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a TransformationFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public TransformationFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a TransformationFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public TransformationFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a TransformationFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public TransformationFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/TransformationCatalogEntry.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/TransformationCatalogEntry0000644000175000017500000004046111757531137033225 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation; import edu.isi.pegasus.planner.catalog.classes.CatalogEntry; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.catalog.classes.VDSSysInfo2NMI; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.NMI2VDSSysInfo; import edu.isi.pegasus.common.util.ProfileParser; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.classes.Notifications; import java.io.IOException; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.dax.Invoke; import java.util.Collection; import java.util.List; /** * An object of this class corresponds to a * tuple in the Transformation Catalog. * @author Gaurang Mehta * @$Revision: 3831 $ * * */ public class TransformationCatalogEntry implements CatalogEntry { /** * The logical mNamespace of the transformation */ private String mNamespace; /** * The mVersion of the transformation. */ private String mVersion; /** * The logical mName of the transformation. */ private String mName; /** * The Id of the resource on which the transformation * is installed. */ private String mResourceID; /** * The physical path on the resource for a particular arch, os and type. */ private String mPFN; /** * The profiles associated with the site. */ private Profiles mProfiles; /** * The System Info for the transformation. */ private SysInfo mSysInfo; /** * The type of transformation. Takes one of the predefined enumerated type TCType. */ private TCType type = TCType.INSTALLED; /** * All the notifications associated with the job */ private Notifications mNotifications; /** * The basic constructor */ public TransformationCatalogEntry() { mNamespace = null; mName = null; mVersion = null; mResourceID = null; mPFN = null; mProfiles = null; // sysinfo = null; mSysInfo = null; mNotifications = new Notifications(); } /** * Optimized Constructor * * @param namespace String * @param name String * @param version String */ public TransformationCatalogEntry( String namespace, String name, String version){ this(); this.mNamespace = namespace; this.mVersion = version; this.mName = name; } /** * Optimized Constructor * * @param namespace String * @param name String * @param version String * @param resourceID String * @param physicalname String * @param type TCType * @param profiles List * @param sysinfo VDSSysInfo */ public TransformationCatalogEntry( String namespace, String name, String version, String resourceid, String physicalname, TCType type, List profiles, VDSSysInfo sysinfo ) { this( namespace, name, version ); this.mResourceID = resourceid; this.mPFN = physicalname; if(profiles != null) { this.mProfiles = new Profiles (); this.mProfiles.addProfiles( profiles ); } // this.sysinfo = sysinfo; mSysInfo = VDSSysInfo2NMI.vdsSysInfo2NMI( sysinfo ); this.type = type; } /** * Overloaded constructor. * * @param namespace the namespace * @param name the name * @param version the version * @param resourceID the site with which entry is associated * @param physicalname the pfn * @param type the type * @param profiles the profiles passed * @param sysinfo the SystemInformation */ private TransformationCatalogEntry( String namespace, String name, String version, String resourceID, String physicalname, TCType type, Profiles profiles, SysInfo sysinfo) { this.mNamespace = namespace; this.mVersion = version; this.mName = name; this.mResourceID = resourceID; this.mPFN = physicalname; this.mProfiles = profiles; this.mNotifications = new Notifications(); // this.sysinfo = sysinfo; mSysInfo = sysinfo; this.type = type; } /** * creates a new instance of this object and returns * you it. A shallow clone. * TO DO : Gaurang correct the clone method. * * @return Object */ public Object clone() { TransformationCatalogEntry entry = new TransformationCatalogEntry( mNamespace, mName, mVersion, mResourceID, mPFN, type, mProfiles, this.getSysInfo() ); entry.addNotifications( this.getNotifications() ); return entry; } /** * gets the String mVersion of the * data class * @return String */ public String toString() { StringBuffer sb = new StringBuffer( ); sb.append( "\n " ). append( "\n Logical Namespace : " ).append( this.mNamespace ). append( "\n Logical Name : " ).append( this.mName ). append( "\n Version : " ).append( this.mVersion ). append( "\n Resource Id : " ).append( this.mResourceID ). append( "\n Physical Name : " ).append( this.mPFN ). append( "\n SysInfo : " ).append( this.getSysInfo() ). append( "\n TYPE : " ).append( ((this.type == null) ? "" : type.toString()) ); if( mProfiles != null){ sb.append( "\n Profiles : " ); sb.append( mProfiles ); } sb.append( "\n Notifications: " ).append( this.mNotifications ); return sb.toString(); } /** * Prints out a TC file format String. * @return String */ public String toTCString() { String st = this.getResourceId() + "\t" + this.getLogicalTransformation() + "\t" + this.getPhysicalTransformation() + "\t" + this.getType() + "\t" + this.getVDSSysInfo() + "\t"; if ( mProfiles != null ) { st += ProfileParser.combine( mProfiles ); } else { st += "NULL"; } return st; } /** * Returns an xml output of the contents of the data class. * @return String */ public String toXML() { String xml = "\t\t getNotifications( Invoke.WHEN when ){ return this.mNotifications.getNotifications(when); } /** * Returns all the notifications associated with the job. * * @return the notifications */ public Notifications getNotifications( ){ return this.mNotifications; } /** * Allows you to add one profile at a time to the transformation. * @param profile Profile A single profile consisting of mNamespace, key and value */ public void addProfiles( Profiles profiles ) { if(profiles != null) { if ( this.mProfiles == null ) { this.mProfiles = new Profiles(); } this.mProfiles.addProfilesDirectly( profiles ); } } /** * Allows you to add one profile at a time to the transformation. * @param profile Profile A single profile consisting of mNamespace, key and value */ public void addProfile( Profile profile ) { if ( profile != null ) { if ( this.mProfiles == null ) { this.mProfiles = new Profiles(); } this.mProfiles.addProfileDirectly( profile ); } } /** * Allows you to add multiple profiles to the transformation. * @param profiles List of Profile objects containing the profile information. */ public void addProfiles( List profiles ) { if ( profiles != null ) { if ( this.mProfiles == null ) { this.mProfiles = new Profiles(); } this.mProfiles.addProfilesDirectly( profiles ); } } /** * Gets the Fully Qualified Transformation mName in the format NS::Name:Ver. * @return String */ public String getLogicalTransformation() { return joinLFN( mNamespace, mName, mVersion ); } /** * Returns the Namespace associated with the logical transformation. * @return String Returns null if no mNamespace associated with the transformation. */ public String getLogicalNamespace() { return this.mNamespace; } /** * Returns the Name of the logical transformation. * @return String */ public String getLogicalName() { return this.mName; } /** * Returns the mVersion of the logical transformation. * @return String Returns null if no mVersion assocaited with the transformation. */ public String getLogicalVersion() { return this.mVersion; } /** * Returns the resource where the transformation is located. * @return String */ public String getResourceId() { return this.mResourceID; } /** * Returns the type of the transformation. * @return TCType */ public TCType getType() { return this.type; } /** * Returns the physical location of the transformation. * @return String */ public String getPhysicalTransformation() { return this.mPFN; } /** * Returns the System Information associated with the transformation. * * * @return SysInfo */ public SysInfo getSysInfo( ) { return mSysInfo; } /** * Returns the System Information in the old VDS format associated with the * transformation. * * * @return VDSSysInfo */ public VDSSysInfo getVDSSysInfo( ) { return NMI2VDSSysInfo.nmiToVDSSysInfo( mSysInfo ); } /** * Returns the list of profiles associated with the transformation. * @return List Returns null if no profiles associated. */ public List getProfiles() { return ( this.mProfiles == null ) ? null : this.mProfiles.getProfiles(); } /** * Returns the profiles for a particular Namespace. * @param mNamespace String The mNamespace of the profile * @return List List of Profile objects. returns null if none are found. */ public List getProfiles( String namespace ) { return ( this.mProfiles == null ) ? null : mProfiles.getProfiles(namespace); } /** * Joins the 3 components into a fully qualified logical mName of the format NS::NAME:VER * @param mNamespace String * @param mName String * @param mVersion String * @return String */ private static String joinLFN( String namespace, String name, String version ) { return Separator.combine( namespace, name, version ); } /** * Splits the full qualified logical transformation into its components. * @param logicaltransformation String * @return String[] */ private static String[] splitLFN( String logicaltransformation ) { return Separator.split( logicaltransformation ); } /** * Compares two catalog entries for equality. * * @param entry is the entry to compare with * @return true if the entries match, false otherwise */ public boolean equals( TransformationCatalogEntry entry ) { return this.toTCString().equalsIgnoreCase( entry.toTCString() ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/mapper/0000755000175000017500000000000011757531667027246 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/mapper/All.java0000644000175000017500000001363111757531137030615 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.mapper; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.Mapper; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import java.util.Iterator; import java.util.List; import java.util.Map; import edu.isi.pegasus.planner.classes.PegasusBag; /** * This implementation of the mapper generates maps for sites with installed as * well as stageable transformations. * *@author Gaurang Mehta *@version $Revision: 4283 $ */ public class All extends Mapper { /** * The private constructor. * * @param bag the bag of initialization objects */ public All( PegasusBag bag ) { super( bag ); } /** * This method returns a Map of compute sites to List of TransformationCatalogEntry * objects that are valid for that site. * * @param namespace String The namespace of the logical transformation * @param name String The name of the logical transformation * @param version String The version of the logical transformation * @param siteids List The sites for which you want the map. * * @return Map Key=String SiteId , Values = List of TransformationCatalogEntry object. * Returns null if no entries are found. */ public Map getSiteMap( String namespace, String name, String version, List siteids ) { //stores the entries got from the TC List tcentries = null; //stores the string arrays mapping a site to an entry. Map sitemap = null; //stores the system information obtained from RIC Map sysinfomap = null; //the fully qualified lfn String lfn = Separator.combine( namespace, name, version ); boolean hassite = false; //check if the sitemap already exists in the TCMap if ( ( sitemap = mTCMap.getSiteMap( lfn ) ) != null ) { if ( !sitemap.isEmpty() ) { hassite = true; for ( Iterator i = siteids.iterator(); i.hasNext(); ) { //check if the site exists in the sitemap if not then generate sitemap again if ( !sitemap.containsKey( ( String ) i.next() ) ) { hassite = false; } } } if ( hassite ) { return mTCMap.getSitesTCEntries( lfn, siteids ); } } //since sitemap does not exist we need to generate and populate it. //get the TransformationCatalog entries from the TC. try { tcentries = mTCHandle.lookup( namespace, name, version, ( List )null, null ); } catch ( Exception e ) { mLogger.log( "Getting physical names from TC in the TC Mapper\n", e, LogManager.FATAL_MESSAGE_LEVEL ); } //get the system info for the sites from the SC if ( tcentries != null ) { sysinfomap = mSiteStore.getSysInfos( siteids ); } else { throw new RuntimeException( "There are no entries for the transformation \"" + lfn + "\"in the TC" ); } if ( sysinfomap != null ) { for ( Iterator i = siteids.iterator(); i.hasNext(); ) { String site = ( String ) i.next(); SysInfo sitesysinfo = ( SysInfo ) sysinfomap.get( site ); for ( Iterator j = tcentries.iterator(); j.hasNext(); ) { TransformationCatalogEntry entry = ( TransformationCatalogEntry ) j.next(); //get the required stuff from the TCentry. String txsiteid = entry.getResourceId(); TCType txtype = entry.getType(); SysInfo txsysinfo = entry.getSysInfo(); //check for installed and static binary executables at each site. if ( txsysinfo.equals( sitesysinfo ) ) { if ( ( ( txsiteid.equalsIgnoreCase( site ) ) && ( txtype.equals( TCType.INSTALLED ) ) ) || ( txtype.equals( TCType.STAGEABLE ) ) ) { //add the TC entries in the map. mTCMap.setSiteTCEntries( lfn, site, entry ); } } } //outside inner for loop } //outside outer for loop } else { mLogger.log( "There are no entries in the site catalog for site" + siteids.toString(), LogManager.FATAL_MESSAGE_LEVEL ); System.exit( 1 ); } return mTCMap.getSitesTCEntries( lfn, siteids ); } /** * Returns the mode description. */ public String getMode() { return "All Mode - Handle both Installed and Stageable Executables on all sites"; } /** * Returns a Map of entries */ } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/mapper/Installed.java0000644000175000017500000001567611757531137032037 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.mapper; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.Mapper; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.util.Map; import edu.isi.pegasus.planner.classes.PegasusBag; /** *This class only generates maps for sites with installed transformations. * *@author Gaurang Mehta *@version $Revision: 2636 $ */ public class Installed extends Mapper { /** * A map that maps a transformation name to the set of sites where it is not * installed. It is used to prevent duplicate queries in case of a * transformation not installed on a particular site. */ // protected Map mNullMap; /** * The protected constructor. * * @param bag the bag of initialization objects */ public Installed( PegasusBag bag ) { super( bag ); } /** * This method returns a Map of compute sites to List of * TransformationCatalogEntry objects that are valid for that site. * * @param namespace String The namespace of the logical transformation * @param name String The name of the logical transformation * @param version String The version of the logical transformation * @param siteids List The sites for which you want the map. * * @return Map Key=String SiteId , Values = List of TransformationCatalogEntry object. * null if no entries are found. */ public Map getSiteMap( String namespace, String name, String version, List siteids ) { //stores the entries got from the TC List tcentries = null; //stores the string arrays mapping a site to an entry. Map sitemap = null; //stores the system information obtained from RIC Map sysinfomap = null; //the fully qualified lfn String lfn = Separator.combine( namespace, name, version ); //optimization. query only for sites where transformation is not //already in case of hassite=false Karan May 13, 2008 Pegasus Bug 33 List falseSites = new ArrayList(); List cacheSites = new ArrayList();//list of sites for which entries already exist boolean hassite = true; //check if the sitemap already exists in the TCMap if ( ( sitemap = mTCMap.getSiteMap( lfn ) ) != null ) { for ( Iterator i = siteids.iterator(); i.hasNext(); ) { //check if the site exists in the sitemap if not then generate sitemap again String site = ( String ) i.next(); if ( !sitemap.containsKey( site ) ) { hassite = false; falseSites.add( site ); } else{ cacheSites.add( site ); } } if ( hassite ) { // CANNOT RETURN THIS. YOU NEED ONLY RETURN THE RELEVANT // ENTRIES MATCHING THE SITES . KARAN SEPT 21, 2005 //return sitemap; return mTCMap.getSitesTCEntries(lfn,siteids); } } //since sitemap does not exist we need to generate and populate it. //get the TransformationCatalog entries from the TC. //Only query for falseSites not the whole sites. Karan May 13, 2008 //Pegasus Bug 33 try { tcentries = mTCHandle.lookup( namespace, name, version, hassite? siteids : falseSites, TCType.INSTALLED ); } catch ( Exception e ) { mLogger.log( "Unable to get physical names from TC in the TC Mapper\n", e ,LogManager.ERROR_MESSAGE_LEVEL); } //get the system info for the sites from the RIC if ( tcentries != null ) { sysinfomap = mSiteStore.getSysInfos( hassite? siteids : falseSites ); } else { //throw an execption only if cacheSites is empty if( cacheSites.isEmpty() ){ throw new RuntimeException( "There are no entries for the transformation \"" + lfn + "\" of type \"" + TCType.INSTALLED + "\" the TC" + " for sites " + siteids); } else{ //return entries for cached sites only return mTCMap.getSitesTCEntries( lfn, cacheSites ); } } if ( sysinfomap != null ) { for ( Iterator i = siteids.iterator(); i.hasNext(); ) { String site = ( String ) i.next(); SysInfo sitesysinfo = ( SysInfo ) sysinfomap.get( site ); for ( Iterator j = tcentries.iterator(); j.hasNext(); ) { TransformationCatalogEntry entry = ( TransformationCatalogEntry ) j.next(); //get the required stuff from the TCentry. String txsiteid = entry.getResourceId(); TCType txtype = entry.getType(); SysInfo txsysinfo = entry.getSysInfo(); //check for installed executables at each site. if ( txsysinfo.equals( sitesysinfo ) && txsiteid.equalsIgnoreCase( site ) ) { //add the installed executables in the map. mTCMap.setSiteTCEntries( lfn, site, entry ); } } //outside inner for loop } //outside outer for loop } else { throw new RuntimeException( "There are no entries for the sites \n" + siteids.toString() + "\n" ); } // CANNOT RETURN THIS. YOU NEED ONLY RETURN THE RELEVANT // ENTRIES MATCHING THE SITES . KARAN SEPT 21, 2005 // return mTCMap.getSiteMap( lfn ); return mTCMap.getSitesTCEntries(lfn,siteids); } public String getMode() { return "Installed Mode : Only use Installed executables at the site"; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/mapper/Submit.java0000644000175000017500000001366411757531137031356 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.mapper; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.Mapper; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.classes.PegasusBag; import java.util.Iterator; import java.util.List; import java.util.Map; /** * This implementation of the TCMapper returns a TCMap which only contains * Stageable executables from the Local site. * * @author Gaurang Mehta * @version $Revision: 2638 $ */ public class Submit extends Mapper { /** * The protected constructor. * * @param bag the bag of initialization objects */ public Submit( PegasusBag bag ) { super( bag ); } /** * This method returns a Map of compute sites to List of TransformationCatalogEntry * objects that are valid for that site * * @param namespace String The namespace of the logical transformation * @param name String The name of the logical transformation * @param version String The version of the logical transformation * @param siteids List The sites for which you want the map. * * @return Map Key=String SiteId , Values = List of TransformationCatalogEntry object. * null if no entries are found. */ public Map getSiteMap( String namespace, String name, String version, List siteids ) { //stores the entries got from the TC List tcentries = null; //stores the string arrays mapping a site to an entry. Map sitemap = null; //stores the system information obtained from RIC Map sysinfomap = null; //the fully qualified lfn String lfn = Separator.combine( namespace, name, version ); //check if the sitemap already exists in the TCMap if ( ( sitemap = mTCMap.getSiteMap( lfn ) ) != null ) { boolean hassite = true; for ( Iterator i = siteids.iterator(); i.hasNext(); ) { //check if the site exists in the sitemap if not then generate sitemap again // Need to check if this can be avoided by making sure Karan always sends me a list of sites rather then individual sites. if ( !sitemap.containsKey( ( String ) i.next() ) ) { hassite = false; } } if ( hassite ) { // CANNOT RETURN THIS. YOU NEED ONLY RETURN THE RELEVANT // ENTRIES MATCHING THE SITES . KARAN SEPT 21, 2005 //return sitemap; return mTCMap.getSitesTCEntries(lfn,siteids); } } //since sitemap does not exist we need to generate and populate it. //get the TransformationCatalog entries from the TC. try { tcentries = mTCHandle.lookup( namespace, name, version, "local", TCType.STAGEABLE ); } catch ( Exception e ) { mLogger.log( "Unable to get physical names from TC in the TC Mapper\n", e ,LogManager.ERROR_MESSAGE_LEVEL); } //get the system info for the sites from the RIC if ( tcentries != null ) { sysinfomap = mSiteStore.getSysInfos( siteids ); } else { throw new RuntimeException( "There are no entries for the transformation \"" + lfn + "\" on the \"local\" site in the TC for type \"" + TCType.STAGEABLE ); } if ( sysinfomap != null ) { for ( Iterator i = siteids.iterator(); i.hasNext(); ) { String site = ( String ) i.next(); SysInfo sitesysinfo = ( SysInfo ) sysinfomap.get( site ); for ( Iterator j = tcentries.iterator(); j.hasNext(); ) { TransformationCatalogEntry entry = ( TransformationCatalogEntry ) j.next(); //get the required stuff from the TCentry. SysInfo txsysinfo = entry.getSysInfo(); //check for static binary executables at the local site. if ( txsysinfo.equals( sitesysinfo ) ) { //add the stageable executables in the map. mTCMap.setSiteTCEntries( lfn, site, entry ); } } //outside inner for loop } //outside outer for loop } else { throw new RuntimeException( "There are no entries for the sites :\n" + siteids.toString() + "\n" ); } // CANNOT RETURN THIS. YOU NEED ONLY RETURN THE RELEVANT // ENTRIES MATCHING THE SITES . KARAN SEPT 21, 2005 // return mTCMap.getSiteMap( lfn ); return mTCMap.getSitesTCEntries(lfn,siteids); } public String getMode() { return "Local Mode : Only Stageable Executables from only the local site"; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/mapper/Staged.java0000644000175000017500000001343511757531137031316 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.mapper; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.Mapper; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import java.util.Iterator; import java.util.List; import java.util.Map; import edu.isi.pegasus.planner.classes.PegasusBag; /** * This implementation only generates maps for sites where transformation can be staged *@author Gaurang Mehta *@version $Revision: 2638 $ */ public class Staged extends Mapper { /** * The protected constructor. * * @param bag the bag of initialization objects */ public Staged( PegasusBag bag ) { super( bag ); } /** * This method returns a Map of compute sites to List of TransformationCatalogEntry * objects that are valid for that site * @param namespace String The namespace of the logical transformation * @param name String The name of the logical transformation * @param version String The version of the logical transformation * @param siteids List The sites for which you want the map. * @return Map Key=String SiteId , Values = List of TransformationCatalogEntry * object. Returns null if no entries are found. */ public Map getSiteMap( String namespace, String name, String version, List siteids ) { //stores the entries got from the TC List tcentries = null; //stores the string arrays mapping a site to an entry. Map sitemap = null; //stores the system information obtained from RIC Map sysinfomap = null; //the fully qualified lfn String lfn = Separator.combine( namespace, name, version ); //check if the sitemap already exists in the TCMap if ( ( sitemap = mTCMap.getSiteMap( lfn ) ) != null ) { boolean hassite = true; for ( Iterator i = siteids.iterator(); i.hasNext(); ) { //check if the site exists in the sitemap if not then generate sitemap again // Need to check if this can be avoided by making sure Karan always sends me a list of sites rather then individual sites. if ( !sitemap.containsKey( ( String ) i.next() ) ) { hassite = false; } } if ( hassite ) { // CANNOT RETURN THIS. YOU NEED ONLY RETURN THE RELEVANT // ENTRIES MATCHING THE SITES . KARAN SEPT 21, 2005 //return sitemap; return mTCMap.getSitesTCEntries(lfn,siteids); } } //since sitemap does not exist we need to generate and populate it. //get the TransformationCatalog entries from the TC. try { tcentries = mTCHandle.lookup( namespace, name, version, ( List )null, TCType.STAGEABLE ); } catch ( Exception e ) { mLogger.log( "Unable to get physical names from TC in the TC Mapper", LogManager.ERROR_MESSAGE_LEVEL ); } //get the system info for the sites from the RIC if ( tcentries != null ) { sysinfomap = mSiteStore.getSysInfos( siteids ); } else { throw new RuntimeException( "There are no entries for the transformation \"" + lfn + "\"in the TC" ); } if ( sysinfomap != null ) { for ( Iterator i = siteids.iterator(); i.hasNext(); ) { String site = ( String ) i.next(); SysInfo sitesysinfo = ( SysInfo ) sysinfomap.get( site ); for ( Iterator j = tcentries.iterator(); j.hasNext(); ) { TransformationCatalogEntry entry = ( TransformationCatalogEntry ) j.next(); //get the required stuff from the TCentry. String txsiteid = entry.getResourceId(); SysInfo txsysinfo = entry.getSysInfo(); //check for static binary executables at each site. if ( txsysinfo.equals( sitesysinfo ) ) { //add the stageable executables in the map. mTCMap.setSiteTCEntries( lfn, site, entry ); } } //outside inner for loop } //outside outer for loop } else { throw new RuntimeException( "There are no entries for the sites " + siteids.toString() ); } // CANNOT RETURN THIS. YOU NEED ONLY RETURN THE RELEVANT // ENTRIES MATCHING THE SITES . KARAN SEPT 21, 2005 // return mTCMap.getSiteMap( lfn ); return mTCMap.getSitesTCEntries(lfn,siteids); } public String getMode() { return "Stage Mode : Stageable Executables only from all sites"; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/impl/0000755000175000017500000000000011757531667026723 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/impl/Abstract.java0000644000175000017500000000777511757531137031341 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.transformation.impl; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import java.net.MalformedURLException; import java.net.URL; /** * An abstract base class that provides useful methods for all the * TransformationCatalog Implementations to use. * * @author Karan Vahi * @version $Revision: 2728 $ */ public abstract class Abstract implements TransformationCatalog{ /** * Modifies a Transformation Catalog Entry to handle file URL's. * A file URL if specified for the physical path is converted to an * absolute path if the type of entry is set to INSTALLED. * * Alternately it modifies the TC to handle absolute file paths by converting * them to file URL if the type of entry is set to STAGEABLE. * * @param entry the transformation catalog entry object. * * @return the TransformationCatalogEntry object. */ public static TransformationCatalogEntry modifyForFileURLS( TransformationCatalogEntry entry ){ //sanity checks if( entry == null || entry.getPhysicalTransformation() == null ){ //return without modifying return entry; } String url = entry.getPhysicalTransformation(); //convert file url appropriately for installed executables if ( entry.getType().equals( TCType.INSTALLED) && url.startsWith(TransformationCatalog.FILE_URL_SCHEME)) { try { url = new URL(url).getFile(); entry.setPhysicalTransformation(url); } catch (MalformedURLException ex) { throw new RuntimeException("Error while converting file url ", ex); } } else if ( entry.getType().equals( TCType.STAGEABLE) && url.startsWith("/")) { url = TransformationCatalog.FILE_URL_SCHEME + "//" + url; entry.setPhysicalTransformation(url); } return entry; } /** * Modifies a Transformation Catalog Entry to handle file URL's. * A file URL if specified for the physical path is converted to an * absolute path if the type of entry is set to INSTALLED. * * Alternately it modifies the TC to handle absolute file paths by converting * them to file URL if the type of entry is set to STAGEABLE. * * @param pfn The PFN to modify * @param type The type of PFN * * @return the Transformed PFN. */ public static String modifyForFileURLS( String pfn, String type){ //sanity checks if( pfn== null ){ //return without modifying return pfn; } if ( TCType.valueOf(type)== TCType.INSTALLED && pfn.startsWith(TransformationCatalog.FILE_URL_SCHEME)) { try { pfn = new URL(pfn).getFile(); } catch (MalformedURLException ex) { throw new RuntimeException("Error while converting file url ", ex); } } else if ( TCType.valueOf(type) == TCType.STAGEABLE && pfn.startsWith("/")) { pfn = TransformationCatalog.FILE_URL_SCHEME + "//" + pfn; } return pfn; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/impl/Database.java0000644000175000017500000027230311757531137031271 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.impl; /** * This is the database implementation for the TC. * * @author Gaurang Mehta gmehta@isi.edu * @version $Revision: 4090 $ */ import java.io.IOException; import java.lang.reflect.InvocationTargetException; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import org.griphyn.vdl.dbschema.DatabaseSchema; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.Boolean; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.NMI2VDSSysInfo; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import edu.isi.pegasus.planner.classes.Notifications; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegasusProperties; public class Database extends DatabaseSchema implements TransformationCatalog { /** * The LogManager object which is * used to log all the messages. * It's values are set in the CPlanner class. */ protected static LogManager mLogger = LogManagerFactory.loadSingletonInstance(); // private static final String TC_MODE = "Database TC Mode"; // private PegasusProperties mProps; private static Database mDatabaseTC = null; /** * Boolean indicating whether to modify the file URL or not * */ private boolean modifyURL = true; /** * Used for a singleton access to the implementation * * @return instance to TransformationCatalog. * * @deprecated * */ public static TransformationCatalog getInstance() { try { if( mDatabaseTC == null ){ PegasusBag bag = new PegasusBag(); bag.add( PegasusBag.PEGASUS_LOGMANAGER, LogManagerFactory.loadSingletonInstance() ); mDatabaseTC = new Database(); mDatabaseTC.initialize( bag ); } return mDatabaseTC; } catch (Exception e) { mLogger.log( "Unable to create Database TC Instance", e, LogManager.ERROR_MESSAGE_LEVEL); return null; } } /** * Initialize the implementation, and return an instance of the implementation. * * @param bag the bag of Pegasus initialization objects. * */ public void initialize ( PegasusBag bag ){ mLogger = bag.getLogger(); modifyURL = Boolean.parse(bag.getPegasusProperties().getProperty( MODIFY_FOR_FILE_URLS_KEY), true ); try{ /** * ADD SECTION */ this.m_dbdriver.insertPreparedStatement("stmt.add.lfn", "INSERT INTO tc_logicaltx VALUES (?,?,?,?)"); this.m_dbdriver.insertPreparedStatement("stmt.add.pfn", "INSERT INTO tc_physicaltx VALUES (?,?,?,?,?)"); this.m_dbdriver.insertPreparedStatement("stmt.add.sysinfo", "INSERT INTO tc_sysinfo VALUES (?,?,?,?,?)"); this.m_dbdriver.insertPreparedStatement("stmt.add.pfnprofile", "INSERT INTO tc_pfnprofile(namespace,name,value,pfnid) VALUES (?,?,?,?)"); this.m_dbdriver.insertPreparedStatement("stmt.add.lfnprofile", "INSERT INTO tc_lfnprofile(namespace,name,value,lfnid) VALUES (?,?,?,?)"); this.m_dbdriver.insertPreparedStatement("stmt.add.lfnpfnmap", "INSERT INTO tc_lfnpfnmap(lfnid,pfnid) VALUES (?,?)"); /** * QUERY SECTION */ this.m_dbdriver.insertPreparedStatement("stmt.query.lfns", "SELECT DISTINCT p.resourceid, l.namespace,l.name,l.version, p.type FROM " + "tc_logicaltx l, tc_physicaltx p, tc_lfnpfnmap m " + "WHERE l.id=m.lfnid and p.id=m.pfnid and p.resourceid like ? and p.type like ? " + "ORDER BY resourceid,name,namespace,version"); this.m_dbdriver.insertPreparedStatement("stmt.query.pfns", "SELECT p.resourceid, p.pfn, p.type, s.architecture, s.os, " + "s.osversion, s.glibc " + "FROM tc_lfnpfnmap m, tc_logicaltx l, tc_physicaltx p, tc_sysinfo s " + "WHERE l.id=m.lfnid and p.id=m.pfnid and p.archid=s.id and " + "l.namespace=? and l.name=? and l.version=? and p.resourceid like ? and p.type like ? " + "ORDER by p.resourceid"); this.m_dbdriver.insertPreparedStatement("stmt.query.resource", "SELECT DISTINCT p.resourceid FROM tc_logicaltx l, tc_physicaltx p, tc_lfnpfnmap m " + "WHERE l.id=m.lfnid and p.id=m.pfnid and l.namespace like ? and " + "l.name like ? and l.version like ? and p.type like ? ORDER by resourceid"); this.m_dbdriver.insertPreparedStatement("stmt.query.lfnprofiles", "SELECT pr.namespace, pr.name, pr.value FROM " + "tc_logicaltx l, tc_lfnprofile pr WHERE " + "l.id=pr.lfnid and l.namespace=? and l.name=? and l.version=? ORDER BY namespace"); this.m_dbdriver.insertPreparedStatement("stmt.query.pfnprofiles", "SELECT pr.namespace, pr.name, pr.value FROM tc_physicaltx p, tc_pfnprofile pr " + "WHERE p.id=pr.pfnid and p.pfn=? and p.resourceid like ? and " + "p.type like ? ORDER BY namespace"); this.m_dbdriver.insertPreparedStatement("stmt.query.lfnid", "SELECT id FROM tc_logicaltx WHERE namespace=? and name=? and version=?"); this.m_dbdriver.insertPreparedStatement("stmt.query.sysid", "SELECT id FROM tc_sysinfo WHERE " + "architecture=? and os=? and osversion=? and glibc=?"); this.m_dbdriver.insertPreparedStatement("stmt.query.pfnid", "SELECT id FROM tc_physicaltx p WHERE p.pfn = ? and p.type like ? and p.resourceid like ?"); this.m_dbdriver.insertPreparedStatement("stmt.query.pfnid2", "SELECT p.id FROM tc_physicaltx p,tc_lfnpfnmap m " + "WHERE m.pfnid=p.id and p.resourceid like ? and p.type like ? and m.lfnid=?"); this.m_dbdriver.insertPreparedStatement("stmt.query.lfnpfnmap", "SELECT * FROM tc_lfnpfnmap WHERE lfnid like ? and pfnid like ?"); this.m_dbdriver.insertPreparedStatement("stmt.query.tc", "SELECT DISTINCT p.resourceid,l.namespace,l.name, l.version, p.pfn, p.type, " + "s.architecture, s.os, s.osversion, s.glibc " + "FROM tc_logicaltx l, tc_physicaltx p, tc_lfnpfnmap m, tc_sysinfo s " + "WHERE l.id=m.lfnid and p.id=m.pfnid and p.archid=s.id " + "ORDER by p.resourceid, l.name, l.namespace, l.version"); this.m_dbdriver.insertPreparedStatement("stmt.query.lfnprofileid", "SELECT * FROM tc_lfnprofile WHERE " + "namespace=? and name = ? and value =? and lfnid=?"); this.m_dbdriver.insertPreparedStatement("stmt.query.pfnprofileid", "SELECT * FROM tc_pfnprofile WHERE " + "namespace=? and name = ? and value =? and pfnid=?"); /** * DELETE SECTION */ this.m_dbdriver.insertPreparedStatement("stmt.delete.alllfns", "DELETE FROM tc_logicaltx"); this.m_dbdriver.insertPreparedStatement("stmt.delete.alllfnpfnmap", "DELETE FROM tc_lfnpfnmap"); this.m_dbdriver.insertPreparedStatement("stmt.delete.allpfns", "DELETE FROM tc_physicaltx"); this.m_dbdriver.insertPreparedStatement("stmt.delete.allsysinfo", "DELETE FROM tc_sysinfo"); this.m_dbdriver.insertPreparedStatement("stmt.delete.allpfnprofile", "DELETE FROM tc_pfnprofile"); this.m_dbdriver.insertPreparedStatement("stmt.delete.alllfnprofile", "DELETE FROM tc_lfnprofile"); this.m_dbdriver.insertPreparedStatement("stmt.delete.byresourceid", "DELETE FROM tc_physicaltx WHERE resourceid=?"); this.m_dbdriver.insertPreparedStatement("stmt.delete.lfnprofiles", "DELETE FROM tc_lfnprofile WHERE lfnid=?"); this.m_dbdriver.insertPreparedStatement("stmt.delete.pfnprofiles", "DELETE FROM tc_pfnprofile WHERE pfnid=?"); this.m_dbdriver.insertPreparedStatement("stmt.delete.lfnprofile", "DELETE FROM tc_lfnprofile WHERE namespace=? and name=? and value=? and lfnid=?"); this.m_dbdriver.insertPreparedStatement("stmt.delete.pfnprofile", "DELETE FROM tc_pfnprofile WHERE namespace=? and name=? and value=? and pfnid=?"); this.m_dbdriver.insertPreparedStatement("stmt.delete.bytype", "DELETE FROM tc_physicaltx WHERE type=? and resourceid like ?"); this.m_dbdriver.insertPreparedStatement("stmt.delete.lfnpfnmap", "DELETE FROM tc_lfnpfnmap where lfnid like ? and pfnid like ?"); this.m_dbdriver.insertPreparedStatement("stmt.delete.logicaltx", "DELETE FROM tc_logicaltx WHERE id=?"); this.m_dbdriver.insertPreparedStatement("stmt.delete.physicaltx", "DELETE FROM tc_physicaltx WHERE id=?"); this.m_dbdriver.insertPreparedStatement("stmt.delete.sysinfo", "DELETE FROM tc_sysinfo WHERE architecture=? and os=? and osversion=? and glibc=?"); } catch ( SQLException sqe ){ throw new RuntimeException( "SQL exception during initialization" + sqe ); } } public Database() throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException, IOException { super( (String)null, "pegasus.catalog.transformation.db.schema"); //mLogger = LogManager.getInstance(); mLogger.log("TC Mode being used is " + this.getDescription(), LogManager.CONFIG_MESSAGE_LEVEL); } /** * Returns TC entries for a particular logical transformation and/or on a * particular resource and/or of a particular type. * * @param namespace String The namespace of the logical transformation. * @param name String the name of the logical transformation. * @param version String The version of the logical transformation. * @param resourceid String The resourceid where the transformation is located. * If NULL it returns all resources. * @param type TCType The type of the transformation to search for. * If NULL it returns all types. * * @return List Returns a list of TransformationCatalogEntry objects * containing the corresponding entries from the TC. * Returns null if no entry found. * @throws Exception * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public List lookup( String namespace, String name, String version, String resourceid, TCType type ) throws Exception { List resultEntries = null; mLogger.log("Trying to get TCEntries for " + Separator.combine(namespace, name, version) + " on resource " + ( (resourceid == null) ? "ALL" : resourceid) + " of type " + ( (type == null) ? "ALL" : type.toString()), LogManager.DEBUG_MESSAGE_LEVEL); List pfnentries = this.lookupNoProfiles(namespace, name, version, resourceid, type); if (pfnentries != null) { resultEntries = new LinkedList(); List lfnprofiles = this.lookupLFNProfiles(namespace, name, version); for (int i = 0; i < pfnentries.size() - 1; i++) { String[] pfnresult = (String[]) pfnentries.get(i); String qresourceid = pfnresult[0]; String qpfn = pfnresult[1]; String qtype = pfnresult[2]; VDSSysInfo qsysinfo = new VDSSysInfo(pfnresult[3]); List pfnprofiles = this.lookupPFNProfiles(qpfn, qresourceid, TCType.valueOf(qtype)); TransformationCatalogEntry tc = new TransformationCatalogEntry( namespace, name, version, qresourceid, qpfn, TCType.valueOf(qtype), null, qsysinfo); try { if (lfnprofiles != null) { tc.addProfiles(lfnprofiles); } if (pfnprofiles != null) { tc.addProfiles(pfnprofiles); } } catch (RuntimeException e) { mLogger.log( "Ignoring errors while parsing profile in Transformation Catalog DB" + " for " + Separator.combine(namespace, name, version), e, LogManager.WARNING_MESSAGE_LEVEL); } if(modifyURL){ resultEntries.add(Abstract.modifyForFileURLS(tc)); }else{ resultEntries.add(tc); } } } return resultEntries; } /** * Returns TC entries for a particular logical transformation and/or on a * number of resources and/or of a particular type. * * @param namespace String The namespace of the logical transformation. * @param name String the name of the logical transformation. * @param version String The version of the logical transformation. * @param resourceids List The List resourceid where the transformation is located. * If NULL it returns all resources. * @param type TCType The type of the transformation to search for. * If NULL it returns all types. * * @return List Returns a list of TransformationCatalogEntry objects containing * the corresponding entries from the TC. Returns null if no entry found. * @throws Exception * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public List lookup( String namespace, String name, String version, List resourceids, TCType type ) throws Exception { List results = null; List siteids=resourceids; if (siteids != null) { for (String site : siteids) { List tempresults = lookup(namespace, name, version, site, type); if (tempresults != null) { if (results == null) { results = new LinkedList(); } results.addAll(tempresults); } } } else { results = lookup(namespace, name, version, (String)null, type); } return results; } /** * List all the contents of the TC * * @return List Returns a List of TransformationCatalogEntry objects. * @throws Exception */ public List getContents() throws Exception { //get the statement. PreparedStatement ps = m_dbdriver.getPreparedStatement("stmt.query.tc"); //execute the query ResultSet rs = ps.executeQuery(); List result = new ArrayList(); // int[] count = {0, 0, 0, 0, 0}; while (rs.next()) { // if ( result == null ) { // result = new ArrayList(); // } //get the entries. String resourceid = rs.getString(1); String namespace = rs.getString(2); String name = rs.getString(3); String version = rs.getString(4); // String lfn = Separator.combine( namespace, name, version ); String pfn = rs.getString(5); String type = rs.getString(6); String sysinfo = new VDSSysInfo(rs.getString(7), rs.getString(8), rs.getString(9), rs.getString(10)). toString(); List pfnprofiles = this.lookupPFNProfiles(pfn, resourceid, TCType.valueOf(type)); List lfnprofiles = this.lookupLFNProfiles(namespace, name, version); // String profiles = null; List allprofiles = null; if (lfnprofiles != null) { allprofiles = new ArrayList(lfnprofiles); } if (pfnprofiles != null) { if (allprofiles == null) { allprofiles = new ArrayList(pfnprofiles); } else { allprofiles.addAll(pfnprofiles); } } //get the profiles and construct a string out of them. // if ( allprofiles != null ) { // profiles = ProfileParser.combine( allprofiles ); // } //add them to the array. TransformationCatalogEntry tcentry = new TransformationCatalogEntry( namespace, name, version, resourceid, pfn, TCType.valueOf(type), allprofiles, new VDSSysInfo(sysinfo)); //caculate the max length of each column // columnLength( s, count ); //add the array to the list to be returned. result.add(tcentry); } rs.close(); // if ( result != null ) { //set the column length info as the last element // result.add( count ); // } return result; } /** * Get the list of Resource ID's where a particular transformation may reside. * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * @param type TCType The type of the transformation to search for.
* (Enumerated type includes SOURCE, STATIC-BINARY, DYNAMIC-BINARY, PACMAN, INSTALLED, SCRIPT)
* If NULL it returns all types. * * @return List Returns a list of Resource Id's as strings. Returns NULL if no results found. * * @throws Exception NotImplementedException if not implemented * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public List lookupSites( String namespace, String name, String version, TCType type ) throws Exception { //get the statement PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.query.resource"); //set the parameters //if name is null then search for all lfn if (name == null) { ps.setString(1, "%"); ps.setString(2, "%"); ps.setString(3, "%"); } else { ps.setString(1, makeNotNull(namespace)); ps.setString(2, makeNotNull(name)); ps.setString(3, makeNotNull(version)); } //if type is null then search for all type String temp; if( type==null){ temp="%"; } else if (type==TCType.STAGEABLE || type==TCType.STATIC_BINARY) { temp="STA%"; } else { temp=type.toString(); } ps.setString(4, temp); //execute the query. ResultSet rs = ps.executeQuery(); List result = null; while (rs.next()) { if (result == null) { result = new ArrayList(); } //add the results to the list. result.add(rs.getString(1)); } rs.close(); return result; } /** * Get the list of PhysicalNames for a particular transformation on a site/sites * for a particular type/types; * * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * @param resourceid String The id of the resource on which you want to search.
* If NULL then returns entries on all resources * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* If NULL then returns entries of all types. * * @return List Returns a List of objects * with the profiles not populated. * * @throws Exception NotImplementedException if not implemented. * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public List lookupNoProfiles( String namespace, String name,String version,String resourceid, TCType type ) throws Exception { /* int[] count = { 0, 0, 0}; */ PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.query.pfns"); ps.setString(1, makeNotNull(namespace)); ps.setString(2, makeNotNull(name)); ps.setString(3, makeNotNull(version)); String temp = (resourceid != null) ? resourceid : "%"; ps.setString(4, temp); if( type==null){ temp="%"; } else if (type==TCType.STAGEABLE || type==TCType.STATIC_BINARY) { temp="STA%"; } else { temp=type.toString(); } ps.setString(5, temp); ResultSet rs = ps.executeQuery(); List result = null; while (rs.next()) { if (result == null) { result = new ArrayList(); } String ttype=rs.getString(3); if(TCType.valueOf(ttype)==TCType.STATIC_BINARY){ ttype=TCType.STAGEABLE.toString(); } String pfn=rs.getString(2); if(modifyURL){ pfn=Abstract.modifyForFileURLS(pfn, ttype); } TransformationCatalogEntry entry = new TransformationCatalogEntry(namespace,name,version); entry.setPhysicalTransformation(pfn); entry.setType(TCType.valueOf(ttype)); entry.setVDSSysInfo(new VDSSysInfo(rs.getString(4), rs.getString(5), rs.getString(6), rs.getString(7))); entry.setResourceId(rs.getString(1)); result.add(entry); //columnLength(s, count); } rs.close(); /* if (result != null) { result.add(count); } */ return result; } /** * Get the list of LogicalNames available on a particular resource. * @param resourceid String The id of the resource on which you want to search * @param type TCType The type of the transformation to search for.
* (Enumerated type includes stageable and installed)
* If NULL then return logical name for all types. * * @return List Returns a list of String Arrays. * Each array contains the resourceid, logical transformation * in the format namespace::name:version and type. * Returns NULL if no results found. * * @throws Exception NotImplementedException if not implemented. */ public List getTCLogicalNames( String resourceid, TCType type ) throws Exception { /* int[] count = { 0, 0}; */ PreparedStatement ps = null; ps = this.m_dbdriver.getPreparedStatement("stmt.query.lfns"); String temp; temp = (resourceid == null) ? "%" : resourceid; ps.setString(1, temp); if( type==null){ temp="%"; } else if (type==TCType.STAGEABLE || type==TCType.STATIC_BINARY) { temp="STA%"; } else { temp=type.toString(); } ps.setString(2, temp); ResultSet rs = ps.executeQuery(); List result = null; while (rs.next()) { if (result == null) { result = new ArrayList(); } String ttype= rs.getString(5); if(TCType.valueOf(ttype)==TCType.STATIC_BINARY){ ttype=TCType.STAGEABLE.toString(); } String[] st = { rs.getString(1), Separator.combine(rs.getString(2), rs.getString(3), rs.getString(4)), ttype}; //columnLength(st, count); result.add(st); } rs.close(); /* if (result != null) { result.add(count); } */ return result; } /** * Get the list of Profiles associated with a particular logical transformation. * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * * @return List Returns a list of Profile Objects containing profiles * associated with the transformation. * Returns NULL if no profiles found. * * @throws Exception NotImplementedException if not implemented. * @see org.griphyn.cPlanner.classes.Profile */ public List lookupLFNProfiles( String namespace, String name, String version ) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.query.lfnprofiles"); ps.setString(1, makeNotNull(namespace)); ps.setString(2, makeNotNull(name)); ps.setString(3, makeNotNull(version)); ResultSet rs = ps.executeQuery(); List result = null; while (rs.next()) { if (result == null) { result = new LinkedList(); } Profile p = new Profile(rs.getString(1), rs.getString(2), rs.getString(3)); result.add(p); } rs.close(); return result; } /** * Get the list of Profiles associated with a particular physical transformation. * @param pfn The physical file name to search the transformation by. * @param resourceid String The id of the resource on which you want to search. * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* * @throws Exception NotImplementedException if not implemented. * @return List Returns a list of Profile Objects containing profiles * associated with the transformation. * Returns NULL if no profiles found. * * @see org.griphyn.cPlanner.classes.Profile */ public List lookupPFNProfiles( String pfn, String resourceid, TCType type ) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.query.pfnprofiles"); ps.setString(1, pfn); String temp = (resourceid != null) ? resourceid : "%"; ps.setString(2, temp); if( type==null){ temp="%"; } else if (type==TCType.STAGEABLE || type==TCType.STATIC_BINARY) { temp="STA%"; } else { temp=type.toString(); } ps.setString(3, temp); ResultSet rs = ps.executeQuery(); List result = null; while (rs.next()) { if (result == null) { result = new LinkedList(); } Profile p = new Profile(rs.getString(1), rs.getString(2), rs.getString(3)); result.add(p); } rs.close(); return result; } /** * ADDITIONS */ /** * Add multiple TCEntries to the Catalog. * * @param tcentry List Takes a list of TransformationCatalogEntry objects as input * * @throws Exception * @return number of insertions On failure,throw an exception, don't use zero. * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert( List tcentry ) throws Exception { for (int i = 0; i < tcentry.size(); i++) { TransformationCatalogEntry entry = ( (TransformationCatalogEntry) tcentry.get(i)); this.insert(entry); } return tcentry.size(); } /** * Add single TCEntry to the Catalog. * @param tcentry Takes a single TransformationCatalogEntry object as input * @throws Exception * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert(TransformationCatalogEntry entry) throws Exception { return this.insert(entry.getLogicalNamespace(), entry.getLogicalName(), entry.getLogicalVersion(), entry.getPhysicalTransformation(), entry.getType(), entry.getResourceId(), null, entry.getProfiles(), entry.getSysInfo()); } /** * Add single TCEntry object temporarily to the in memory Catalog. * This is a hack to get around for adding soft state entries to the TC * @param tcentry Takes a single TransformationCatalogEntry object as input * @param write boolean enable write commits to backed catalog or not. * @throws Exception * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert(TransformationCatalogEntry entry, boolean write) throws Exception { if(this.addTCEntry(entry.getLogicalNamespace(), entry.getLogicalName(), entry.getLogicalVersion(), entry.getPhysicalTransformation(), entry.getType(), entry.getResourceId(), null, entry.getProfiles(), entry.getSysInfo(), entry.getNotifications(), write)){ return 1; }else{ throw new RuntimeException("Failed to add TransformationCatalogEntry " + entry.getLogicalName()); } } /** * Add an single entry into the transformation catalog. * * @param namespace String The namespace of the transformation to be added (Can be null) * @param name String The name of the transformation to be added. * @param version String The version of the transformation to be added. (Can be null) * @param physicalname String The physical name/location of the transformation to be added. * @param type TCType The type of the physical transformation. * @param resourceid String The resource location id where the transformation is located. * @param lfnprofiles List The List of Profile objects associated with a Logical Transformation. (can be null) * @param pfnprofiles List The List of Profile objects associated with a Physical Transformation. (can be null) * @param sysinfo SysInfo The System information associated with a physical transformation. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @throws Exception * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @see org.griphyn.cPlanner.classes.Profile */ public int insert(String namespace, String name, String version, String physicalname, TCType type, String resourceid, List lfnprofiles, List pfnprofiles, SysInfo system) throws Exception { if(this.addTCEntry(namespace, name, version, physicalname, type, resourceid, lfnprofiles, pfnprofiles, system, null, true)){ return 1; }else{ throw new RuntimeException("Failed to add TransformationCatalogEntry " + name); } } /** * Add an single entry into the transformation catalog. * * @param namespace String The namespace of the transformation to be * added (Can be null) * @param name String The name of the transformation to be added. * * @param version String The version of the transformation to be added. * (Can be null) * @param physicalname String The physical name/location of the transformation * to be added. * @param type TCType The type of the physical transformation. * @param resourceid String The resource location id where the * transformation is located. * @param lfnprofiles List The List of Profile objects associated with * a Logical Transformation. (can be null) * @param pfnprofiles List The List of Profile objects associated with * a Physical Transformation. (can be null) * @param system SysInfo The System information associated with a * physical transformation. * @param invokes the Notifications associated with the * transformation. * @param write boolean to commit changes to the backend catalog * @throws Exception * @return boolean Returns true if succesfully added, returns false if * error and throws exception. * @see org.griphyn.common.catalog.TransformationCatalogEntry * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @see org.griphyn.cPlanner.classes.Profile */ protected boolean addTCEntry(String namespace, String name, String version, String physicalname, TCType type, String resourceid, List lfnprofiles, List pfnprofiles, SysInfo system, Notifications invokes, boolean write) throws Exception { if(!write) return false; //ADD LFN //try to add the logical name of the transformation long lfnid = -1; if ( (lfnid = this.getLogicalId(namespace, name, version)) == -1) { //the lfn does not exist so we can go ahead if ( (lfnid = this.addLogicalTr(namespace, name, version)) == -1) { return false; } } else { mLogger.log("Logical tr entry already exists", LogManager.DEBUG_MESSAGE_LEVEL); } //ADD SYSINFO //now since the lfn is in lets check if the architecture info is there. long archid = -1; VDSSysInfo vdsSystem = NMI2VDSSysInfo.nmiToVDSSysInfo(system); if ( (archid = this.getSysInfoId(vdsSystem)) == -1) { //this means sytem information does not exist and we have to add it. if ( (archid = this.addSysInfo(vdsSystem)) == -1) { return false; } // archid = this.getSysInfoId( system ); } else { mLogger.log("Sysinfo entry already exists", LogManager.DEBUG_MESSAGE_LEVEL); } //ADD PFN //now since the sysinfo is in the map lets add the pfn to the table. long pfnid = -1; if ( (pfnid = this.getPhysicalId(physicalname, type, resourceid)) == -1) { //since pfn is not in the database if ( (pfnid = this.addPhysicalTr(physicalname, resourceid, type, archid)) == -1) { return false; } } else { mLogger.log("PFN entry already exists", LogManager.DEBUG_MESSAGE_LEVEL); } //ADD LFN-PFN MAP //now since the pfn is in lets check the lfnpfn map if it is correct if (this.checkLfnPfnMap(lfnid, pfnid) == -1) { //entry does not exist and we need to add it PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.add.lfnpfnmap"); ps.setLong(1, lfnid); ps.setLong(2, pfnid); try { ps.executeUpdate(); } catch (SQLException e) { mLogger.log( "Unable to add lfn-pfn mapping entry to the TC :" + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); m_dbdriver.cancelPreparedStatement("stmt.add.lfnpfnmap"); m_dbdriver.rollback(); return false; } } else { mLogger.log("LFN - PFN mapping entry already exists", LogManager.DEBUG_MESSAGE_LEVEL); } //lfn pfn mapping is done now lets move to profiles. //ADD PFN PROFILES if (pfnprofiles != null) { for (Iterator i = pfnprofiles.iterator(); i.hasNext(); ) { Profile p = (Profile) i.next(); this.addProfile(p, pfnid, true); } } //ADD LFN PROFILES if (lfnprofiles != null) { for (Iterator i = lfnprofiles.iterator(); i.hasNext(); ) { Profile p = (Profile) i.next(); this.addProfile(p, lfnid, false); } } //everything seems to have gone ok. //so lets commit m_dbdriver.commit(); mLogger.log("Added TC entry", LogManager.DEBUG_MESSAGE_LEVEL); return true; } /** * Add additional profile to a logical transformation . * * @param namespace String The namespace of the transformation to be added. (can be null) * @param name String The name of the transformation to be added. * @param version String The version of the transformation to be added. (can be null) * @param profiles List The List of Profile objects that are to be added * to the transformation. * * @return number of insertions. On failure, throw an exception, don't use zero. * * @throws Exception * @see org.griphyn.cPlanner.classes.Profile */ public int addLFNProfile(String namespace, String name, String version, List profiles) throws Exception { long lfnid = -1; int profileCount = 0; if ( (lfnid = getLogicalId(namespace, name, version)) != -1) { if (profiles != null) { for (Iterator i = profiles.iterator(); i.hasNext(); ) { Profile p = (Profile) i.next(); if(this.addProfile(p, lfnid, false)) profileCount++; } } } else { mLogger.log( "The lfn does not exist. Cannot add profiles to non existent lfn", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("The lfn does not exist. Cannot add profiles to non existent lfn " ); } m_dbdriver.commit(); return profileCount; } /** * Add additional profile to a physical transformation. * @param pfn String The physical name of the transformation * @param type TCType The type of transformation that the profile is * associated with. * @param resourcename String The resource on which the physical transformation exists * @param profiles The List of Profile objects that are to be added * to the transformation. * @return number of insertions. On failure, throw an exception, don't use zero. * * @throws Exception * @see org.griphyn.cPlanner.classes.Profile */ public int addPFNProfile(String pfn, TCType type, String resourceid, List profiles) throws Exception { long pfnid = -1; int profileCount =0; if ( (pfnid = getPhysicalId(pfn, type, resourceid)) != -1) { long profileid = -1; if (profiles != null) { for (Iterator i = profiles.iterator(); i.hasNext(); ) { Profile p = (Profile) i.next(); if(this.addProfile(p, pfnid, true)) profileCount++; } } m_dbdriver.commit(); return profileCount; } else { mLogger.log( "The pfn does not exist. Cannot add profiles to a non existent pfn", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( "The pfn does not exist. Cannot add profiles to a non existent pfn"); } } /** * DELETIONS */ /** * Delete all entries in the transformation catalog for a give logical transformation and/or on a resource and/or of * a particular type * @param namespace String The namespace of the transformation to be deleted. (can be null) * @param name String The name of the transformation to be deleted. * @param version String The version of the transformation to be deleted. ( can be null) * @param resourceid String The resource id for which the transformation is to be deleted. * If NULL then transformation on all resource are deleted * @param type TCType The type of the transformation. If NULL then all types are deleted for the transformation. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByLFN(String namespace, String name, String version, String resourceid, TCType type) throws Exception { long lfnid; long[] pfnids; if (name != null) { //humm this is where we start. //get the logical transformation id first. if ( (lfnid = this.getLogicalId(namespace, name, version)) != -1) { //this means lfn is there now we try to get the list of pfnids. PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.query.pfnid2"); String temp = (resourceid == null) ? "%" : resourceid; ps.setString(1, temp); temp = (type == null) ? "%" : type.toString(); ps.setString(2, temp); ps.setLong(3, lfnid); ResultSet rst = ps.executeQuery(); List results = null; while (rst.next()) { if (results == null) { results = new ArrayList(); } results.add(new Long(rst.getLong(1))); } if (results != null) { pfnids = new long[results.size()]; for (int i = 0; i < results.size(); i++) { pfnids[i] = ( (Long) results.get(i)).longValue(); } } else { long[] t = { -1}; pfnids = t; } if (pfnids[0] != -1) { //this means there are pfns. ps = this.m_dbdriver.getPreparedStatement( "stmt.delete.lfnpfnmap"); ps.setLong(1, lfnid); for (int i = 0; i < pfnids.length; i++) { ps.setLong(2, pfnids[i]); int rs = ps.executeUpdate(); if (rs < 1) { mLogger.log( "No entries found in the lfnpfnmap that could be deleted.", LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Invalid state!. No entries found in the lfnpfnmap that could be deleted."); } else { mLogger.log("Deleted " + rs + " mappings from the lfnpfnmap", LogManager.DEBUG_MESSAGE_LEVEL); } } //now since the map is deleted we check if the any lfn pfn mappings exist for that lfn ps = this.m_dbdriver.getPreparedStatement( "stmt.query.lfnpfnmap"); ps.setLong(1, lfnid); ps.setString(2, "%"); rst = ps.executeQuery(); int count = 0; while (rst.next()) { count++; } int tcEntriesRmvdCount = pfnids.length; if (count == 0) { //no more pfns are mapped to the same lfn. //It is safe to delete the lfn; PreparedStatement pst = this.m_dbdriver. getPreparedStatement("stmt.delete.logicaltx"); pst.setLong(1, lfnid); int rs = pst.executeUpdate(); if (rs < 1) { //this should not happen cause we checked that it existed before we started the work here. mLogger.log( "No entry for the lfn exists", LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Invalid state!. No entry for the lfn exists"); } else { mLogger.log( "Deleted the logical transformation " + Separator.combine(namespace, name, version), LogManager.DEBUG_MESSAGE_LEVEL); } } else { mLogger.log("Logical transformation " + Separator.combine(namespace, name, version) + " is mapped to " + count + " other pfns.", LogManager.DEBUG_MESSAGE_LEVEL); mLogger.log( "***Wont delete logical transformation***", LogManager.DEBUG_MESSAGE_LEVEL); } //now that we have handled the lfns lets handle the pfns. for (int i = 0; i < pfnids.length; i++) { ps.clearParameters(); ps.setString(1, "%"); ps.setLong(2, pfnids[i]); rst = ps.executeQuery(); count = 0; while (rst.next()) { count++; } if (count == 0) { //this means this pfn is not mapped to any other lfn and is safe to be deleted PreparedStatement pst = this.m_dbdriver. getPreparedStatement( "stmt.delete.physicaltx"); pst.setLong(1, pfnids[i]); int rs = pst.executeUpdate(); if (rs < 1) { //this should not happen cause we checked that pfn existed before we started the work here. mLogger.log( "No entry for the pfn exists", LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Invalid state!. No entry for the pfn exists"); } else { mLogger.log( "Deleted the physical transformation with id " + pfnids[i], LogManager.DEBUG_MESSAGE_LEVEL); } } else { mLogger.log("Pfn with id " + pfnids[i] + " is mapped with " + count + " other lfns.", LogManager.DEBUG_MESSAGE_LEVEL); mLogger.log( "***Wont delete physical transformation***", LogManager.DEBUG_MESSAGE_LEVEL); } } //hopefully everything went ok so lets commit this.m_dbdriver.commit(); return tcEntriesRmvdCount; } else { mLogger.log( "No pfns associated with lfn " + Separator.combine(namespace, name, version) + " of type " + ( (type == null) ? "ALL" : type.toString()) + " on resource " + ( (resourceid == null) ? "ALL" : resourceid) + " exist", LogManager.ERROR_MESSAGE_LEVEL); return 0; } } else { //logical transformation does not exist mLogger.log( "The logical transformation " + Separator.combine(namespace, name, version) + " does not exist.", LogManager.ERROR_MESSAGE_LEVEL); return 0; } } else { mLogger.log( "The logical transformation name cannot be null.", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("The logical transformation name cannot be null."); } } /** * Delete all entries in the transformation catalog for pair of logical and physical transformation. * @param physicalname String The physical name of the transformation * @param namespace String The namespace associated in the logical name of the transformation. * @param name String The name of the logical transformation. * @param version String The version number of the logical transformation. * @param resourceid String The resource on which the transformation is to be deleted. * If NULL then it searches all the resource id. * @param type TCType The type of transformation. If NULL then it search and deletes entries for all types. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByPFN(String physicalname, String namespace,String name, String version,String resourceid, TCType type) throws Exception { long lfnid; long[] pfnids; if (name != null) { if (physicalname != null) { //humm this is where we start. //get the logical transformation id first. if ( (lfnid = this.getLogicalId(namespace, name, version)) != -1) { //this means lfn is there now we try to get the list of pfnids. pfnids = this.getPhysicalIds(physicalname, type, resourceid); if (pfnids[0] != -1) { //this means there are pfns. PreparedStatement ps = this.m_dbdriver. getPreparedStatement("stmt.delete.lfnpfnmap"); ps.setLong(1, lfnid); for (int i = 0; i < pfnids.length; i++) { ps.setLong(2, pfnids[i]); int rs = ps.executeUpdate(); if (rs < 1) { mLogger.log( "No entries found in the lfnpfnmap that could be deleted.", LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Invalid state!. No entries found in the lfnpfnmap that could be deleted."); } else { mLogger.log("Deleted " + rs + " mappings from the lfnpfnmap", LogManager.DEBUG_MESSAGE_LEVEL); } } //now since the map is deleted we check if the any lfn pfn mappings exist for that lfn ps = this.m_dbdriver.getPreparedStatement( "stmt.query.lfnpfnmap"); ps.setLong(1, lfnid); ps.setString(2, "%"); ResultSet rst = ps.executeQuery(); int count = 0; while (rst.next()) { count++; } int tcEntriesRmvdCount = pfnids.length; if (count == 0) { //no more pfns are mapped to the same lfn. //It is safe to delete the lfn; PreparedStatement pst = this.m_dbdriver. getPreparedStatement("stmt.delete.logicaltx"); pst.setLong(1, lfnid); int rs = pst.executeUpdate(); if (rs < 1) { //this should not happen cause we checked that it existed before we started the work here. mLogger.log( "No entry for the lfn exists", LogManager.DEBUG_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Invalid state!. No entry for the lfn exists"); } else { mLogger.log( "Deleted the logical transformation " + Separator.combine(namespace, name, version), LogManager.DEBUG_MESSAGE_LEVEL); } } else { mLogger.log("Logical transformation " + Separator.combine(namespace, name, version) + " is mapped to " + count + " other pfns.", LogManager.DEBUG_MESSAGE_LEVEL); mLogger.log( "***Wont delete logical transformation***", LogManager.DEBUG_MESSAGE_LEVEL); } //now that we have handled the lfns lets handle the pfns. for (int i = 0; i < pfnids.length; i++) { ps.clearParameters(); ps.setString(1, "%"); ps.setLong(2, pfnids[i]); rst = ps.executeQuery(); count = 0; while (rst.next()) { count++; } if (count == 0) { //this means this pfn is not mapped to any other lfn and is safe to be deleted PreparedStatement pst = this.m_dbdriver. getPreparedStatement( "stmt.delete.physicaltx"); pst.setLong(1, pfnids[i]); int rs = pst.executeUpdate(); if (rs < 1) { //this should not happen cause we checked that pfn existed before we started the work here. mLogger.log( "No entry for the pfn exists", LogManager.DEBUG_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Invalid state!. No entry for the pfn exists"); } else { mLogger.log( "Deleted the physical transformation " + physicalname, LogManager.DEBUG_MESSAGE_LEVEL); } } else { mLogger.log("Pfn " + physicalname + " with id " + pfnids[i] + " is mapped with " + count + " other lfns.", LogManager.DEBUG_MESSAGE_LEVEL); mLogger.log( "***Wont delete physical transformation***", LogManager.DEBUG_MESSAGE_LEVEL); } } //hopefully everything went ok so lets commit this.m_dbdriver.commit(); return tcEntriesRmvdCount; } else { mLogger.log( "The physical transformation " + physicalname + " of type " + ( (type == null) ? "ALL" : type.toString()) + " on resource " + ( (resourceid == null) ? "ALL" : resourceid), LogManager.ERROR_MESSAGE_LEVEL); mLogger.log("does not exist", LogManager.ERROR_MESSAGE_LEVEL); return 0; } } else { //logical transformation does not exist mLogger.log( "The logical transformation " + Separator.combine(namespace, name, version) + " does not exist.", LogManager.ERROR_MESSAGE_LEVEL); return 0; } } else { mLogger.log( "The physical transformation cannot be null.", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("The physical transformation cannot be null."); } } else { mLogger.log( "The logical transformation name cannot be null.", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("The logical transformation name cannot be null."); } } /** * Deletes entries from the catalog which have a particular system information. * @param sysinfo SysInfo The System Information by which you want to delete * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @throws Exception */ public int removeBySysInfo( SysInfo sysinfo) throws Exception { if (sysinfo == null) { mLogger.log( "The system information cannot be null", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("The system information cannot be null"); } PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.delete.sysinfo"); VDSSysInfo vdsSysInfo = NMI2VDSSysInfo.nmiToVDSSysInfo(sysinfo); ps.setString(1, this.makeNotNull(vdsSysInfo.getArch().toString())); ps.setString(2, this.makeNotNull(vdsSysInfo.getOs().toString())); ps.setString(3, this.makeNotNull(vdsSysInfo.getOsversion())); ps.setString(4, this.makeNotNull(vdsSysInfo.getGlibc())); try { int i = ps.executeUpdate(); if (i < 1) { mLogger.log( "No entries found that could be deleted", LogManager.ERROR_MESSAGE_LEVEL); return 0; } else { mLogger.log("Deleted " + i + "entry with system info " + sysinfo.toString(), LogManager.DEBUG_MESSAGE_LEVEL); this.m_dbdriver.commit(); return i; } } catch (SQLException e) { mLogger.log( "Unable to delete entries by SysInfo from TC :" + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Unable to delete entries by SysInfo from TC :" + e.getMessage()); } } /** * Delete a particular type of transformation, and/or on a particular resource * @param type TCType The type of the transformation * @param resourceid String The resource on which the transformation exists. * If NULL then that type of transformation is deleted from all the resources. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByType(TCType type, String resourceid) throws Exception { if (type == null) { mLogger.log( "The type of transformation cannot be null", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("The type of transformation cannot be null"); } PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.delete.bytype"); ps.setString(1, this.makeNotNull(type.toString())); String temp = (resourceid != null) ? resourceid : "%"; ps.setString(2, temp); try { int i = ps.executeUpdate(); if (i < 1) { mLogger.log( "No entries found that could be deleted", LogManager.ERROR_MESSAGE_LEVEL); return 0; } else { mLogger.log("Deleted " + i + " transformations with Type " + type + " and on Resource " + ( (resourceid == null) ? "ALL" : resourceid), LogManager.DEBUG_MESSAGE_LEVEL); this.m_dbdriver.commit(); return i; } } catch (SQLException e) { mLogger.log( "Unable to delete entries by type from TC :" + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException ("Unable to delete entries by type from TC :" + e.getMessage()); } } /** * Delete all entries on a particular resource from the transformation catalog. * @param resourceid String The resource which you want to remove. * @throws Exception * * @return the number of removed entries. */ public int removeBySiteID(String resourceid) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.delete.byresourceid"); ps.setString(1, resourceid); try { int i = ps.executeUpdate(); if (i < 1) { mLogger.log( "No entries found that could be deleted", LogManager.ERROR_MESSAGE_LEVEL); return 0; } else { mLogger.log("Deleted " + i + " resources", LogManager.INFO_MESSAGE_LEVEL); this.m_dbdriver.commit(); return i; } } catch (SQLException e) { mLogger.log( "Unable to delete Resource from TC :" + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Unable to delete Resource from TC :" + e.getMessage()); } } /** * Deletes the entire transformation catalog. CLEAN............. USE WITH CAUTION. * * @return the number of removed entries. * * @throws Exception */ public int clear() throws Exception { PreparedStatement[] ps = { this.m_dbdriver.getPreparedStatement("stmt.delete.alllfnpfnmap"), this.m_dbdriver.getPreparedStatement("stmt.delete.alllfnprofile"), this.m_dbdriver.getPreparedStatement("stmt.delete.allpfnprofile"), this.m_dbdriver.getPreparedStatement("stmt.delete.alllfns"), this.m_dbdriver.getPreparedStatement("stmt.delete.allpfns"), this.m_dbdriver.getPreparedStatement("stmt.delete.allsysinfo") }; int updateCount =0; try { for (int i = 0; i < ps.length; i++) { if(i == 3){ // Gets the number of rows updates by stmt.delete.alllfns and stores it in updateCount variable updateCount = ps[i].executeUpdate(); }else{ ps[i].executeUpdate(); } } this.m_dbdriver.commit(); return updateCount; } catch (SQLException e) { mLogger.log( "Unable to delete the entire TC ", e, LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Unable to delete the entire TC ", e); } //if we are here all succeeded and we should commit; } /** * Delete a list of profiles or all the profiles associated with a pfn on a * resource and of a type. * * @param physicalname String The physical name of the transformation. * @param type TCType The type of the transformation. * @param resourceid String The resource of the transformation. * @param profiles List The list of profiles to be deleted. If NULL then all profiles for that pfn+resource+type are deleted. * * @return the number of removed entries. * * @see org.griphyn.cPlanner.classes.Profile * @throws Exception */ public int deletePFNProfiles(String physicalname, TCType type, String resourceid, List profiles) throws Exception { long pfnid; if ( (pfnid = this.getPhysicalId(physicalname, type, resourceid)) != -1) { if (profiles == null) { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.delete.pfnprofiles"); ps.setLong(1, pfnid); try { int i = ps.executeUpdate(); if (i < 1) { mLogger.log( "No entries found that could be deleted", LogManager.ERROR_MESSAGE_LEVEL); return 0; } else { mLogger.log("Deleted " + i + " pfn profiles", LogManager.INFO_MESSAGE_LEVEL); this.m_dbdriver.commit(); return i; } } catch (SQLException e) { mLogger.log( "Unable to delete pfnprofiles from TC", e, LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Unable to delete pfnprofiles from TC" , e); } } else { int count =0; for (Iterator i = profiles.iterator(); i.hasNext(); ) { if(this.deleteProfile( (Profile) i.next(), pfnid, true)) count++; } this.m_dbdriver.commit(); return count; } } else { mLogger.log("The pfn " + physicalname + " of type " + type + " on resource " + resourceid + " does not exist", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException("The pfn " + physicalname + " of type " + type + " on resource " + resourceid + " does not exist"); } } /** * Delete a list of profiles or all the profiles associated with a logical * transformation. * * @param namespace String The namespace of the logical transformation. * @param name String The name of the logical transformation. * @param version String The version of the logical transformation. * @param profiles List The List of profiles to be deleted. If NULL * then all profiles for the logical transformation are deleted. * * @return the number of removed entries. * * @see org.griphyn.cPlanner.classes.Profile * @throws Exception */ public int deleteLFNProfiles(String namespace, String name, String version, List profiles) throws Exception { long lfnid; if ( (lfnid = getLogicalId(namespace, name, version)) != -1) { if (profiles == null) { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.delete.lfnprofiles"); ps.setLong(1, lfnid); try { int i = ps.executeUpdate(); if (i < 1) { mLogger.log( "No entries found that could be deleted", LogManager.ERROR_MESSAGE_LEVEL); return 0; } else { mLogger.log("Deleted " + i + " lfn profiles", LogManager.INFO_MESSAGE_LEVEL); this.m_dbdriver.commit(); return i; } } catch (SQLException e) { mLogger.log( "Unable to delete lfnprofiles from TC", e, LogManager.ERROR_MESSAGE_LEVEL); this.m_dbdriver.rollback(); throw new RuntimeException("Unable to delete lfnprofiles from TC" , e ); } } else { int count =0; for (Iterator i = profiles.iterator(); i.hasNext(); ) { if(this.deleteProfile( (Profile) i.next(), lfnid, false)) count++; } this.m_dbdriver.commit(); return count; } } else { mLogger.log("Logical transformation " + Separator.combine(namespace, name, version) + " does not exist", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( "Logical transformation " + Separator.combine(namespace, name, version) + " does not exist"); } } /** * Returns the TC implementation being used * @return String */ public String getDescription() { return new String(PegasusProperties.nonSingletonInstance().getTCMode()); } public void close() { try { super.close(); } catch (SQLException e) { mLogger.log( "Unable to call Close on the Database", e, LogManager.ERROR_MESSAGE_LEVEL); } } public boolean connect(java.util.Properties props) { //not implemented return true; } public boolean isClosed() { //not impelemented return true; } /** * Returns the id associated with the logical transformation. * @param namespace String * @param name String * @param version String * @throws Exception * @return long Returns -1 if entry does not exist */ private long getLogicalId(String namespace, String name, String version) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.query.lfnid"); ps.setString(1, this.makeNotNull(namespace)); ps.setString(2, this.makeNotNull(name)); ps.setString(3, this.makeNotNull(version)); ResultSet rs = ps.executeQuery(); if (rs.next()) { long l = rs.getLong(1); rs.close(); return l; } else { return -1; } } /** * Checks if a lfn,pfn exist in the map. * @param lfnid long * @param pfnid long * @throws Exception * @return long Returns 1 if exists, -1 if not exists */ private long checkLfnPfnMap(long lfnid, long pfnid) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.query.lfnpfnmap"); ps.setLong(1, lfnid); ps.setLong(2, pfnid); ResultSet rs = ps.executeQuery(); if (rs.next()) { rs.close(); return 1; } else { return -1; } } /** * Checks if a given profile exists * @param namespace String * @param name String * @param value String * @param id long * @param pfn boolean * @throws Exception * @return long Returns 1 if exists , -1 if does not exist. */ private long checkProfile(String namespace, String name, String value, long id, boolean pfn) throws Exception { PreparedStatement ps = null; if (pfn) { ps = this.m_dbdriver.getPreparedStatement( "stmt.query.pfnprofileid"); } else { ps = this.m_dbdriver.getPreparedStatement( "stmt.query.lfnprofileid"); } ps.setString(1, makeNotNull(namespace)); ps.setString(2, makeNotNull(name)); ps.setString(3, makeNotNull(value)); ps.setLong(4, id); ResultSet rs = ps.executeQuery(); if (rs.next()) { rs.close(); return 1; } else { return -1; } } /** * Gets the id for the system information entry. * @param system VDSSysInfo * @throws Exception * @return long Returns -1 if it does not exist * @see org.griphyn.common.classes.VDSSysInfo */ private long getSysInfoId(VDSSysInfo system) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.query.sysid"); ps.setString(1, system.getArch().getValue()); ps.setString(2, system.getOs().getValue()); ps.setString(3, this.makeNotNull(system.getOsversion())); ps.setString(4, this.makeNotNull(system.getGlibc())); ResultSet rs = ps.executeQuery(); if (rs.next()) { long l = rs.getLong(1); rs.close(); return l; } else { return -1; } } /** * Adds a system information entry into the TC. * @param system VDSSysInfo * @throws Exception * @return boolean Returns true if success, false if error occurs. */ private long addSysInfo(VDSSysInfo system) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.add.sysinfo"); long id = -1; try { id = m_dbdriver.sequence1("tc_sysinfo_id_seq"); } catch (SQLException e) { mLogger.log("Error while getting sysinfoid : ", e, LogManager.ERROR_MESSAGE_LEVEL); mLogger.log("Starting Rollback", LogManager.ERROR_MESSAGE_LEVEL); m_dbdriver.cancelPreparedStatement("stmt.add.sysinfo"); m_dbdriver.rollback(); mLogger.log("Finished Rollback", LogManager.ERROR_MESSAGE_LEVEL); return -1; } longOrNull(ps, 1, id); ps.setString(2, this.makeNotNull(system.getArch().getValue())); ps.setString(3, this.makeNotNull(system.getOs().getValue())); ps.setString(4, this.makeNotNull(system.getOsversion())); ps.setString(5, this.makeNotNull(system.getGlibc())); try { ps.executeUpdate(); mLogger.log("Added the system info " + system.toString(), LogManager.DEBUG_MESSAGE_LEVEL); if (id == -1) { id = m_dbdriver.sequence2(ps, "tc_sysinfo_id_seq", 1); } } catch (SQLException e) { mLogger.log( "Unable to add system info to the TC", e, LogManager.ERROR_MESSAGE_LEVEL); m_dbdriver.cancelPreparedStatement("stmt.add.sysinfo"); m_dbdriver.rollback(); return -1; } return id; } /** * Adds a logical entry to the logicaltx table * @param namespace String The namespace of the transformation * @param name String The name of the transformation * @param version String The version of the transformation * @return long The position in table at which the entry is added. * If there is an error -1 is returned. * @throws Exception */ private long addLogicalTr(String namespace, String name, String version) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.add.lfn"); long id = -1; try { id = m_dbdriver.sequence1("tc_logicaltx_id_seq"); } catch (SQLException e) { mLogger.log("Unable to get lfnid : ", e, LogManager.ERROR_MESSAGE_LEVEL); mLogger.log("Starting Rollback", LogManager.ERROR_MESSAGE_LEVEL); m_dbdriver.cancelPreparedStatement("stmt.add.lfn"); m_dbdriver.rollback(); mLogger.log("Finished Rollback", LogManager.ERROR_MESSAGE_LEVEL); return -1; } longOrNull(ps, 1, id); //fill the data ps.setString(2, this.makeNotNull(namespace)); ps.setString(3, this.makeNotNull(name)); ps.setString(4, this.makeNotNull(version)); try { //run the command ps.executeUpdate(); mLogger.log("Added the lfn " + Separator.combine(namespace, name, version), LogManager.DEBUG_MESSAGE_LEVEL); if (id == -1) { id = m_dbdriver.sequence2(ps, "tc_logicaltx_id_seq", 1); } // lfnid = this.getLogicalId( namespace, name, version ); } catch (SQLException e) { mLogger.log( "Unable to add logical transformation into the TC", e, LogManager.ERROR_MESSAGE_LEVEL); m_dbdriver.cancelPreparedStatement("stmt.add.lfn"); m_dbdriver.rollback(); return -1; } return id; } /** * Adds a physical entry to the physicaltxtable * @param physicalname String The physical name of the transformation * @param resourceid String The resource on which the transformation exists * @param type TCType The type of the transformation * @param archid long The architecture id from the sysinfo table for the tr. * @return long The position in the physicaltx table at which the entry is * stored. If there is an error -1 is returned. * @throws Exception */ private long addPhysicalTr(String physicalname, String resourceid, TCType type, long archid) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.add.pfn"); long id = -1; try { id = m_dbdriver.sequence1("tc_physicaltx_id_seq"); } catch (SQLException e) { mLogger.log("Error while getting pfnid : ", e, LogManager.ERROR_MESSAGE_LEVEL); mLogger.log("Starting Rollback", LogManager.ERROR_MESSAGE_LEVEL); m_dbdriver.cancelPreparedStatement("stmt.add.pfn"); m_dbdriver.rollback(); mLogger.log("Finished Rollback", LogManager.ERROR_MESSAGE_LEVEL); return -1; } longOrNull(ps, 1, id); //this means the pfn entry does not exist and we have to add it. ps.setString(2, makeNotNull(resourceid)); ps.setString(3, makeNotNull(physicalname)); ps.setString(4, makeNotNull(type.toString())); ps.setLong(5, archid); try { ps.executeUpdate(); mLogger.log("Added the pfn " + physicalname, LogManager.DEBUG_MESSAGE_LEVEL); mLogger.log("Added the type " + type.toString(), LogManager.DEBUG_MESSAGE_LEVEL); mLogger.log("Added the resource " + resourceid, LogManager.DEBUG_MESSAGE_LEVEL); mLogger.log("Added the archid " + archid, LogManager.DEBUG_MESSAGE_LEVEL); if (id == -1) { id = m_dbdriver.sequence2(ps, "tc_physicaltx_id_seq", 1); } // pfnid = this.getPhysicalId( physicalname, type, resourceid ); } catch (SQLException e) { mLogger.log( "Unable to add pfn entry to the TC", e, LogManager.ERROR_MESSAGE_LEVEL); m_dbdriver.cancelPreparedStatement("stmt.add.pfn"); m_dbdriver.rollback(); return -1; } return id; } /** * Add a lfn or pfn profile to the TC * @param p Profile The profile to be added * @param id long The lfn or pfn id to which the profile is associated. * @param pfn boolean if true entry is added to the pfn, false to the lfn. * @throws Exception * @return boolean Returns true if success, false if error occurs. */ private boolean addProfile(Profile p, long id, boolean pfn) throws Exception { String namespace = p.getProfileNamespace(); String key = p.getProfileKey(); String value = p.getProfileValue(); long profileid = -1; PreparedStatement ps = null; if ( (profileid = this.checkProfile(namespace, key, value, id, pfn)) == -1) { if (pfn) { //add pfn profile //the profile doesnt exist so lets add it in there. ps = m_dbdriver.getPreparedStatement("stmt.add.pfnprofile"); } else { //add lfn profile ps = m_dbdriver.getPreparedStatement("stmt.add.lfnprofile"); } ps.setString(1, makeNotNull(namespace)); ps.setString(2, makeNotNull(key)); ps.setString(3, makeNotNull(value)); ps.setLong(4, id); try { ps.executeUpdate(); } catch (SQLException e) { mLogger.log( "Unable to add Profile to the TC", e, LogManager.ERROR_MESSAGE_LEVEL); m_dbdriver.rollback(); return false; } return true; } else { String temp = (pfn) ? "PFN" : "LFN"; mLogger.log("The " + temp + "profile exists.", LogManager.DEBUG_MESSAGE_LEVEL); return false; } } /** * Delete a given lfn or pfn profile * @param p Profile The profile to be deleted * @param id long The lfn or pfnid with which the profile is associated * @param pfn boolean If true the pfn profile is deleted, if false lfn profile is deleted * @throws Exception * @return boolean Returns true if success, false if any error occurs. */ private boolean deleteProfile(Profile p, long id, boolean pfn) throws Exception { String namespace = p.getProfileNamespace(); String key = p.getProfileKey(); String value = p.getProfileValue(); long profileid = -1; PreparedStatement ps = null; if ( (profileid = this.checkProfile(namespace, key, value, id, pfn)) != -1) { if (pfn) { //delete pfn profile //the profile exists so lets delete it. ps = m_dbdriver.getPreparedStatement("stmt.delete.pfnprofile"); } else { //delete lfn profile ps = m_dbdriver.getPreparedStatement("stmt.delete.lfnprofile"); } ps.setString(1, makeNotNull(namespace)); ps.setString(2, makeNotNull(key)); ps.setString(3, makeNotNull(value)); ps.setLong(4, id); try { ps.executeUpdate(); } catch (SQLException e) { mLogger.log( "Unable to delete Profile to the TC", e, LogManager.ERROR_MESSAGE_LEVEL); m_dbdriver.rollback(); return false; } return true; } else { String temp = (pfn) ? "PFN" : "LFN"; mLogger.log("The " + pfn + " profile does not exist.", LogManager.DEBUG_MESSAGE_LEVEL); return false; } } /** * Returns the id of the physical transformation * @param pfn String the physical transformation * @param type TCType The type of the transformation * @param resourceid String The resource on which the transformation exists. * @throws Exception * @return long Returns -1 if entry does not exist. */ private long getPhysicalId(String pfn, TCType type, String resourceid) throws Exception { PreparedStatement ps; ps = this.m_dbdriver.getPreparedStatement( "stmt.query.pfnid"); ps.setString(1, this.makeNotNull(pfn)); String ttype; if (type==null) { ttype=null; }else if(type==TCType.STAGEABLE || type == TCType.STATIC_BINARY){ ttype="STA%"; } else { ttype=type.toString(); } ps.setString(2, this.makeNotNull(ttype)); ps.setString(3, this.makeNotNull(resourceid)); ResultSet rs = ps.executeQuery(); if (rs.next()) { long l = rs.getLong(1); rs.close(); return l; } else { return -1; } } /** * Returns a list of pfnid for a given pfn on any resource, and of any type. * @param pfn String The physical transformation to search for. * @param type TCType The type to search for. If NULL then all types are searched. * @param resourceid String The resource to search for. If NULL then all resources are searched. * @throws Exception * @return long[] Returns -1 is no entry exist. */ private long[] getPhysicalIds(String pfn, TCType type, String resourceid) throws Exception { PreparedStatement ps = this.m_dbdriver.getPreparedStatement( "stmt.query.pfnid"); String temp; if(type==null){ temp="%"; } else if(type==TCType.STAGEABLE || type==TCType.STATIC_BINARY){ temp="STA%"; } else { temp=type.toString(); } ps.setString(1, pfn); ps.setString(2, temp); temp = (resourceid == null) ? "%" : resourceid; ps.setString(3, temp); ResultSet rs = ps.executeQuery(); List resultlist = null; while (rs.next()) { if (resultlist == null) { resultlist = new ArrayList(); } resultlist.add(new Long(rs.getLong(1))); } if (resultlist != null) { long[] result = new long[resultlist.size()]; int count = 0; for (Iterator i = resultlist.iterator(); i.hasNext(); ) { result[count] = ( (Long) i.next()).longValue(); count++; } return result; } else { long[] result = { -1}; return result; } } /** * Computes the maximum column lenght for pretty printing. * @param s String[] * @param count int[] */ /* private static void columnLength(String[] s, int[] count) { for (int i = 0; i < count.length; i++) { if (s[i].length() > count[i]) { count[i] = s[i].length(); } } } */ } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/impl/CreateTCDatabase.java0000644000175000017500000002236611757531137032646 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.impl; import java.io.File; import java.io.IOException; import java.io.OutputStream; import java.sql.Connection; import java.sql.DriverManager; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.util.MissingResourceException; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.DefaultStreamGobblerCallback; import edu.isi.pegasus.common.util.StreamGobbler; /** * This class provides a bridge for creating and initializing transformation catalog on database . * * @author Prasanth Thomas * @version $Revision: 2649 $ */ public class CreateTCDatabase { /** * The default logger. */ private LogManager mLogger; /** * Maintains the connection to the database over the lifetime of * this instance. */ protected Connection mConnection = null; /** * MySQL statement for checking if DB exists */ private String CHECK_DB_EXISTS_STMT = "SELECT SCHEMA_NAME FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = ?"; /** * MySQL statement for checking for dropping database */ private String DROP_DATABASE_STMT = "DROP DATABASE " ; /** * MySQL statement for checking for creating database */ private String CREATE_DATABASE_STMT = "CREATE DATABASE " ; /** * Stores the driver string */ private String mDriver ; /** * Stores the database url */ private String mUrl; /** * Stores the user name */ private String mUsername ; /** * Stores the MySQL password */ private String mPassword ; /** * Stores the database name */ private String mDatabaseName ; /** * Stores the database host name */ private String mDatabaseHost ; /** * Stores the absolute path to the mysql home directory */ private String mDatabaseAbsolutePath ; /** * Creates a JDBCTC instance . Supports only MySQL connection for the time being * @param driver the Database driver * @param url the Database url * @param username the Database user name * @param password the Database user password * @throws ClassNotFoundException if it fails to load the driver * @throws SQLException */ public CreateTCDatabase(String driver, String url, String username, String password , String host) throws ClassNotFoundException, SQLException{ mLogger = LogManagerFactory.loadSingletonInstance(); this.mUrl = url; this.mUsername = username; this.mPassword = password; this.mDatabaseName = getDatabaseName(mUrl); this.mDatabaseHost = host; if (driver != null) { //only support mysql if( driver.equalsIgnoreCase( "MySQL") ){ this.mDriver = "com.mysql.jdbc.Driver"; }else{ throw new RuntimeException("Only MySQL supported !"); } } mDatabaseAbsolutePath = System.getProperty("mysql.home"); if(mDatabaseAbsolutePath == null){ throw new MissingResourceException( "The mysql.home property was not set!", "java.util.Properties", "mysql.home" ); } try { Class.forName(mDriver); } catch (ClassNotFoundException e) { mLogger.log( "Failed to load driver " + driver, e, LogManager.DEBUG_MESSAGE_LEVEL ); throw e; } try { mConnection = DriverManager.getConnection( this.mUrl.substring(0,url.lastIndexOf("/")+1), this.mUsername, this.mPassword ); } catch (SQLException e) { mLogger.log( "Failed to get connection " + url +" with user " + username , e, LogManager.DEBUG_MESSAGE_LEVEL ); throw e; } } /** * Returns the database name from the database url string * @param dbURL the database url string * @return the database name, null if it does not contain a database name */ public String getDatabaseName(String dbURL){ String databaseName = null; int index = -1; if((index =dbURL.lastIndexOf("/")) !=-1){ if(index < dbURL.length()){ databaseName = dbURL.substring(index+1, dbURL.length()); if(databaseName.trim().isEmpty()) return null; } } return databaseName; } /** * Checks if the given database exists * @param databaseName the database name * @return true if database schema exists, false otherwise * @throws SQLException */ public boolean checkIfDatabaseExists(String databaseName) throws SQLException{ PreparedStatement ps; ResultSet rs = null; try { ps = mConnection.prepareStatement(CHECK_DB_EXISTS_STMT); ps.setString(1, databaseName); rs = ps.executeQuery(); if(rs.next()){ return true; }else{ return false; } } catch (SQLException e) { mLogger.log( "Failed to check if " + databaseName +" exists", e, LogManager.DEBUG_MESSAGE_LEVEL ); return false; }finally{ if(rs != null) rs.close(); } } /** * Deletes the database * @param databaseName the database * @return true, if database is deleted , false otherwise * @throws SQLException */ public boolean deleteDatabase(String databaseName) throws SQLException{ PreparedStatement ps; ResultSet rs = null; try { ps = mConnection.prepareStatement(DROP_DATABASE_STMT + databaseName); ps.setString(1, databaseName); rs = ps.executeQuery(); if(rs.next()){ return true; }else{ return false; } } catch (SQLException e) { mLogger.log( "Failed to drop " + databaseName , e, LogManager.DEBUG_MESSAGE_LEVEL ); return false; }finally{ if(rs != null) rs.close(); } } /** * Creates data base * @param databaseName the database name * @return true, if database is created , false otherwise * @throws SQLException */ public boolean createDatabase(String databaseName) throws SQLException{ PreparedStatement ps; try { ps = mConnection.prepareStatement(CREATE_DATABASE_STMT + databaseName); ps.execute(); } catch (SQLException sqlException) { mLogger.log( "Failed to create " + databaseName, sqlException, LogManager.DEBUG_MESSAGE_LEVEL ); return false; } return true; } /** * Initialize the database with given .sql file * @param databaseName the database name * @param fileName the file name * @return true, if initialization succeeds, false otherwise. */ public boolean initializeDatabase(String databaseName, String fileName){ try{ Runtime r = Runtime.getRuntime(); //creating the command String command = mDatabaseAbsolutePath+ File.separator +"bin"+ File.separator +"mysql" +" --host " +mDatabaseHost + " --user="+mUsername +" --password="+mPassword +" "+databaseName ; mLogger.log("Executing command " + command, LogManager.DEBUG_MESSAGE_LEVEL); String sqlCommand ="source "+ fileName + ";\n"; String exitCommand ="exit \n"; Process p = r.exec( command ); // Sending soure command and exit command to the output stream OutputStream os = p.getOutputStream(); os.write(sqlCommand.getBytes()); os.flush(); os.write(exitCommand.getBytes()); os.flush(); //spawn off the gobblers with the already initialized default callback StreamGobbler ips = new StreamGobbler(p.getInputStream(), new DefaultStreamGobblerCallback( LogManager.DEBUG_MESSAGE_LEVEL)); StreamGobbler eps = new StreamGobbler(p.getErrorStream(), new DefaultStreamGobblerCallback( LogManager.DEBUG_MESSAGE_LEVEL)); ips.start(); eps.start(); //wait for the threads to finish off ips.join(); eps.join(); //get the status int status = p.waitFor(); if( status != 0){ mLogger.log("Command " + command + " exited with status " + status + eps.getStackTrace(), LogManager.WARNING_MESSAGE_LEVEL); return false; } } catch(IOException ioe){ mLogger.log("IOException while running command ", ioe, LogManager.ERROR_MESSAGE_LEVEL); return false; } catch( InterruptedException ie){ mLogger.log("InterruptedException while running command ", ie, LogManager.ERROR_MESSAGE_LEVEL); return false; } return true; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/impl/Text.java0000644000175000017500000011123211757531137030502 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.impl; import java.io.BufferedWriter; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.Writer; import java.util.ArrayList; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Properties; import java.util.Set; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Boolean; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationStore; import edu.isi.pegasus.planner.catalog.transformation.client.TCFormatUtility; import edu.isi.pegasus.planner.classes.Notifications; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.parser.TransformationCatalogTextParser; /** * A File based Transformation Catalog where each entry spans multiple lines. * * The implementation refers to the following same format for specifying a * transformation catalog entry. * * *
 * tr example::keg:1.0 {
 *
 *  #specify profiles that apply for all the sites for the transformation
 *  #in each site entry the profile can be overriden
 *  profile env "APP_HOME" "/tmp/karan"
 *  profile env "JAVA_HOME" "/bin/java.1.5"
 * 
 *  site isi {
 *   profile env "me" "with"
 *   profile condor "more" "test"
 *   profile env "JAVA_HOME" "/bin/java.1.6"
 *   pfn "/path/to/keg"
 *   arch  "x86"
 *   os    "linux"
 *   osrelease "fc"
 *   osversion "4"
 *   type "installed"
 *  }
 *
 *  site wind {
 *   profile env "me" "with"
 *   profile condor "more" "test"
 *   pfn "/path/to/keg"
 *   arch  "x86"
 *   os    "linux"
 *   osrelease "fc"
 *   osversion "4"
 *   type "STAGEABLE"
 *  }
 * }
 *
 * 
* * * * @author Karan Vahi * @version $Revision: 2183 $ */ public class Text extends Abstract implements TransformationCatalog { /** * Describes the transformation catalog mode. */ public static final String DESCRIPTION = "Multiline Textual TC"; /** * The LogManager object which is used to log all the messages. * It's values are set in the CPlanner (the main toolkit) class. */ protected LogManager mLogger; /** * The path to the file based TC. */ private String mTCFile; /** * The handle to the properties object. */ private PegasusProperties mProps; /** * Instance to the TextParser. */ private TransformationCatalogTextParser mTextParser; /** * The transformation store containing the transformations after parsing the * file. */ private TransformationStore mTCStore; /** * Boolean indicating whether to flush the contents back to the file on * close. */ private boolean mFlushOnClose; /** * Boolean indicating whether to modify the file URL or not * */ private boolean modifyFileURL = true; /** * Default constructor. */ public Text(){ } /** * Initialize the implementation, and return an instance of the implementation. * It should be in the connect method, to be consistent with the other * catalogs. * * @param bag the bag of Pegasus initialization objects. * */ public void initialize ( PegasusBag bag ){ mProps = bag.getPegasusProperties(); mLogger = bag.getLogger(); mFlushOnClose = false; modifyFileURL = Boolean.parse(mProps.getProperty( MODIFY_FOR_FILE_URLS_KEY), true ); mTCFile = mProps.getTCPath(); mLogger.log("TC Mode being used is " + this.getDescription(), LogManager.CONFIG_MESSAGE_LEVEL); mLogger.log("TC File being used is " + mTCFile, LogManager.CONFIG_MESSAGE_LEVEL); if (mTCFile == null) { throw new RuntimeException( "The File to be used as TC should be " + "defined with the property pegasus.catalog.transformation.file"); } try{ java.io.File f = new java.io.File( mTCFile ); if( f.exists() ){ mTextParser = new TransformationCatalogTextParser ( new FileReader( f ), mLogger ); mTCStore = mTextParser.parse(modifyFileURL); } else{ //empty TCStore mTCStore = new TransformationStore(); mLogger.log("The Transformation Catalog file " + mTCFile + " was not found ", LogManager.DEBUG_MESSAGE_LEVEL ); } } catch (FileNotFoundException ex) { throw new RuntimeException( "Unable to find file " + mTCFile ); } catch( IOException ioe ){ throw new RuntimeException( "IOException while parsing transformation catalog" , ioe ); } } /** * Empty for the time being. The factory still calls out to the initialize * method. * * @param props the connection properties. * @return */ public boolean connect( Properties props ) { //not implemented return true; } /** * Returns whether the connection is closed or not. * * @return */ public boolean isClosed() { //not implemented return this.mTCStore == null ; } /** * Closes the connection to the back end. */ public void close() { if( mFlushOnClose ){ //we flush back the contents of the internal store to the file. String newline = System.getProperty("line.separator", "\r\n"); String indent = ""; try { // open Writer out = new BufferedWriter( new FileWriter( mTCFile ) ); out.write(TCFormatUtility.toTextFormat(mTCStore)); // close out.close(); this.mFlushOnClose = false; } catch ( IOException ioe ) { throw new RuntimeException( "Unable to write contents of TC to " + mTCFile, ioe ); } finally { this.mTCStore = null; this.mTCFile = null; } } } /** * Returns a textual description of the transformation mode. * * @return String containing the description. */ public String getDescription() { return Text.DESCRIPTION; } /** * Returns TC entries for a particular logical transformation and/or on a * number of resources and/or of a particular type. * * @param namespace String The namespace of the logical transformation. * @param name String the name of the logical transformation. * @param version String The version of the logical transformation. * @param resourceids List The List resourceid where the transformation is located. * If NULL it returns all resources. * @param type TCType The type of the transformation to search for. * If NULL it returns all types. * * @return List Returns a list of TransformationCatalogEntry objects containing * the corresponding entries from the TC. Returns null if no entry found. * @throws Exception * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public List lookup( String namespace, String name, String version, List resourceids, TCType type ) throws Exception { logMessage("getTCEntries(String namespace,String name,String version," + "List resourceids, TCType type"); logMessage("\tgetTCEntries(" + namespace + ", " + name + ", " + version + ", " + resourceids + ", " + type); List results = null; if (resourceids != null) { for (Iterator i = resourceids.iterator(); i.hasNext(); ) { List tempresults = lookup(namespace, name, version, (String) i.next(), type); if (tempresults != null) { if (results == null) { results = new ArrayList(); } results.addAll(tempresults); } } } else { List tempresults = lookup(namespace, name, version, (String)null, type); if (tempresults != null) { results = new ArrayList(tempresults.size()); results.addAll(tempresults); } } return results; } /** * Returns TC entries for a particular logical transformation and/or on a * particular resource and/or of a particular type. * * @param namespace String The namespace of the logical transformation. * @param name String the name of the logical transformation. * @param version String The version of the logical transformation. * @param resourceid String The resourceid where the transformation is located. * If NULL it returns all resources. * @param type TCType The type of the transformation to search for. * If NULL it returns all types. * * @return List Returns a list of TransformationCatalogEntry objects * containing the corresponding entries from the TC. * Returns null if no entry found. * @throws Exception * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public List lookup( String namespace, String name, String version, String resourceid, TCType type ) throws Exception { logMessage( "getTCEntries(String namespace, String name, String version, " + "String resourceId, TCType type)"); logMessage("\t getTCEntries(" + namespace + ", " + name + ", " + version + "," + resourceid + ", " + type); List result = null; String lfn = Separator.combine(namespace, name, version); mLogger.log("Trying to get TCEntries for " + lfn + " on resource " + ( (resourceid == null) ? "ALL" : resourceid) + " of type " + ( (type == null) ? "ALL" : type.toString()), LogManager.DEBUG_MESSAGE_LEVEL); //always returns a list , empty in case of no results result = mTCStore.getEntries( Separator.combine(namespace, name, version), resourceid, type ); //API dictates we return null in case of empty return ( result == null || result.isEmpty() ) ? null : result; } /** * Get the list of Resource ID's where a particular transformation may reside. * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * @param type TCType The type of the transformation to search for.
* (Enumerated type includes SOURCE, STATIC-BINARY, DYNAMIC-BINARY, PACMAN, INSTALLED, SCRIPT)
* If NULL it returns all types. * * @return List Returns a list of Resource Id's as strings. Returns NULL if no results found. * * @throws Exception NotImplementedException if not implemented * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public List lookupSites( String namespace, String name, String version, TCType type ) throws Exception { logMessage( "List getTCResourceIds(String namespace, String name, String " + "version, TCType type"); logMessage("\t getTCResourceIds(" + namespace + ", " + name + ", " + version + ", " + type); //retrieve all entries for a transformation, matching a tc type List entries = this.lookup( namespace, name, version, (String)null, type ); Set result = new HashSet(); for( TransformationCatalogEntry entry : entries ){ result.add( entry.getResourceId() ); } //API dictates we return null in case of empty return ( result == null ||result.isEmpty()) ? null : new LinkedList( result ); } /** * Get the list of PhysicalNames for a particular transformation on a site/sites * for a particular type/types; * * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * @param resourceid String The id of the resource on which you want to search.
* If NULL then returns entries on all resources * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* If NULL then returns entries of all types. * * @return List Returns a List of objects * with the profiles not populated. * * @throws Exception NotImplementedException if not implemented. * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public List lookupNoProfiles( String namespace, String name,String version,String resourceid, TCType type ) throws Exception { logMessage("List getTCPhysicalNames(String namespace, String name," + "String version, String resourceid,TCType type)"); logMessage("\t getTCPhysicalNames(" + namespace + ", " + name + ", " + version + ", " + resourceid + ", " + type + ")"); //retrieve all entries for a transformation, matching a tc type List entries = this.lookup( namespace, name, version, resourceid, type ); List result = entries; //API dictates we return null in case of empty if( result == null || result.isEmpty() ){ return null; } return result; } /** * Get the list of LogicalNames available on a particular resource. * @param resourceid String The id of the resource on which you want to search * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* If NULL then return logical name for all types. * * @return List Returns a list of String Arrays. * Each array contains the resourceid, logical transformation * in the format namespace::name:version and type. * Returns NULL if no results found. * * @throws Exception NotImplementedException if not implemented. */ public List getTCLogicalNames( String resourceid, TCType type ) throws Exception { logMessage("List getTCLogicalNames(String resourceid, TCType type)"); logMessage("\t getTCLogicalNames(" + resourceid + "," + type + ")"); List entries = mTCStore.getEntries( resourceid, type ); //convert the list into the format Gaurang wants for the API. List result = new LinkedList(); for( TransformationCatalogEntry entry: entries ){ String l = entry.getLogicalTransformation(); String r = entry.getResourceId(); String t = entry.getType().toString(); String[] s = { r, l, t}; result.add( s ); } //API dictates we return null in case of empty if( result == null || result.isEmpty() ){ return null; } return result; } /** * Get the list of Profiles associated with a particular logical transformation. * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * * @return List Returns a list of Profile Objects containing profiles * assocaited with the transformation. * Returns NULL if no profiles found. * * @throws Exception NotImplementedException if not implemented. * @see org.griphyn.cPlanner.classes.Profile */ public List lookupLFNProfiles( String namespace, String name, String version ) throws Exception { throw new UnsupportedOperationException( "Not Implemented" ); } /** * Get the list of Profiles associated with a particular physical transformation. * @param pfn The physical file name to search the transformation by. * @param resourceid String The id of the resource on which you want to search. * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* * @throws Exception NotImplementedException if not implemented. * @return List Returns a list of Profile Objects containing profiles * assocaited with the transformation. * Returns NULL if no profiless found. * * @see org.griphyn.cPlanner.classes.Profile */ public List lookupPFNProfiles( String pfn, String resourceid, TCType type ) throws Exception { logMessage( "getTCPfnProfiles(String pfn, String resourceid, TCType type)"); logMessage("\t getTCPfnProfiles(" + pfn + "," + resourceid + "," + type + ")"); List result = new LinkedList(); //retrieve all the transformations corresponding to resource id and type //first List entries = mTCStore.getEntries( resourceid, type ); //traverse through the list for( TransformationCatalogEntry entry : entries ){ if( entry.getPhysicalTransformation().equals( pfn ) ){ result.addAll( entry.getProfiles() ); } } //API dictates we return null in case of empty if( result == null || result.isEmpty() ){ return null; } return result; } /** * List all the contents of the TC * * @return List Returns a List of TransformationCatalogEntry objects. * @throws Exception */ public List getContents() throws Exception { return mTCStore.getEntries( (String)null, (TCType)null ); } /** * ADDITIONS */ /** * Add multiple TCEntries to the Catalog. * * @param tcentry List Takes a list of TransformationCatalogEntry objects as input * * @throws Exception * @return number of insertions On failure,throw an exception, don't use zero. * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert(List entries) throws Exception { for (int i = 0; i < entries.size(); i++) { TransformationCatalogEntry entry = ( (TransformationCatalogEntry) entries.get(i)); this.insert(entry); } return entries.size(); } /** * Add single TCEntry to the Catalog. * @param tcentry Takes a single TransformationCatalogEntry object as input * @throws Exception * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert(TransformationCatalogEntry entry) throws Exception { return this.insert(entry.getLogicalNamespace(), entry.getLogicalName(), entry.getLogicalVersion(), entry.getPhysicalTransformation(), entry.getType(), entry.getResourceId(), null, entry.getProfiles(), entry.getSysInfo()); } /** * Add single TCEntry object temporarily to the in memory Catalog. * This is a hack to get around for adding soft state entries to the TC * @param tcentry Takes a single TransformationCatalogEntry object as input * @param write boolean enable write commits to backed catalog or not. * @throws Exception * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert(TransformationCatalogEntry entry, boolean write) throws Exception { if(this.addTCEntry(entry.getLogicalNamespace(), entry.getLogicalName(), entry.getLogicalVersion(), entry.getPhysicalTransformation(), entry.getType(), entry.getResourceId(), null, entry.getProfiles(), entry.getSysInfo(), entry.getNotifications(),write)) { return 1; }else{ throw new RuntimeException("Failed to add TransformationCatalogEntry " + entry.getLogicalName()); } } /** * Add an single entry into the transformation catalog. * * @param namespace String The namespace of the transformation to be added (Can be null) * @param name String The name of the transformation to be added. * @param version String The version of the transformation to be added. (Can be null) * @param physicalname String The physical name/location of the transformation to be added. * @param type TCType The type of the physical transformation. * @param resourceid String The resource location id where the transformation is located. * @param lfnprofiles List The List of Profile objects associated with a Logical Transformation. (can be null) * @param pfnprofiles List The List of Profile objects associated with a Physical Transformation. (can be null) * @param sysinfo SysInfo The System information associated with a physical transformation. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @throws Exception * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @see org.griphyn.cPlanner.classes.Profile */ public int insert(String namespace, String name, String version, String physicalname, TCType type, String resourceid, List pfnprofiles, List lfnprofiles, SysInfo system) throws Exception { if(this.addTCEntry(namespace, name, version, physicalname, type, resourceid, lfnprofiles, pfnprofiles, system, null, true)){ return 1; }else{ throw new RuntimeException("Failed to add TransformationCatalogEntry " + name); } } /** * Add an single entry into the transformation catalog. * * @param namespace the namespace of the transformation to be added (Can be null) * @param name the name of the transformation to be added. * @param version the version of the transformation to be added. (Can be null) * @param physicalname the physical name/location of the transformation to be added. * @param type the type of the physical transformation. * @param resourceid the resource location id where the transformation is located. * @param lfnprofiles the List of Profile objects associated * with a Logical Transformation. (can be null) * @param pfnprofiles the list of Profile objects associated * with a Physical Transformation. (can be null) * @param system the System information associated with a physical * transformation. * @param invokes the Notifications associated with the * transformation. * @param write boolean to commit changes to backend catalog * @return boolean true if succesfully added, returns false if error and * throws exception. * * @throws Exception * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @see org.griphyn.cPlanner.classes.Profile */ protected boolean addTCEntry(String namespace, String name, String version, String physicalname, TCType type, String resourceid, List pfnprofiles, List lfnprofiles, SysInfo system, Notifications invokes, boolean write) throws Exception { TransformationCatalogEntry entry = new TransformationCatalogEntry(); entry.setLogicalNamespace(namespace); entry.setLogicalName(name); entry.setLogicalVersion(version); entry.setPhysicalTransformation(physicalname); entry.setType(type); entry.setResourceId(resourceid); entry.addProfiles(lfnprofiles); entry.addProfiles(pfnprofiles); entry.setSysInfo( system ); //entry.setVDSSysInfo( NMI2VDSSysInfo.nmiToVDSSysInfo(system) ); entry.addNotifications(invokes); List existing = this.lookup( namespace, name, version, resourceid, type ); boolean add = true; if( existing != null ){ //check to see if entries match for( TransformationCatalogEntry e: existing ){ if ( e.equals( entry ) ){ add = false; break; } } } if( add ){ mTCStore.addEntry( entry ); } else { mLogger.log("TC Entry already exists. Skipping", LogManager.DEBUG_MESSAGE_LEVEL); } //if entry needs to be added and flushed to the backend //set to flag to true. if( write && add ) { mFlushOnClose = true; } return true; } /** * Add additional profile to a logical transformation . * * @param namespace String The namespace of the transformation to be added. (can be null) * @param name String The name of the transformation to be added. * @param version String The version of the transformation to be added. (can be null) * @param profiles List The List of Profile objects that are to be added * to the transformation. * * @return number of insertions. On failure, throw an exception, don't use zero. * * @throws Exception * @see org.griphyn.cPlanner.classes.Profile */ public int addLFNProfile(String namespace, String name, String version, List profiles) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Add additional profile to a physical transformation. * @param pfn String The physical name of the transformation * @param type TCType The type of transformation that the profile is * associated with. * @param resourcename String The resource on which the physical transformation exists * @param profiles The List of Profile objects that are to be added * to the transformation. * @return number of insertions. On failure, throw an exception, don't use zero. * * @throws Exception * @see org.griphyn.cPlanner.classes.Profile */ public int addPFNProfile(String pfn, TCType type, String resourcename, List profiles) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * DELETIONS */ /** * Delete all entries in the transformation catalog for a give logical transformation and/or on a resource and/or of * a particular type * @param namespace String The namespace of the transformation to be deleted. (can be null) * @param name String The name of the transformation to be deleted. * @param version String The version of the transformation to be deleted. ( can be null) * @param resourceid String The resource id for which the transformation is to be deleted. * If NULL then transformation on all resource are deleted * @param type TCType The type of the transformation. If NULL then all types are deleted for the transformation. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByLFN(String namespace, String name, String version, String resourceid, TCType type) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Delete all entries in the transformation catalog for pair of logical and physical transformation. * @param physicalname String The physical name of the transformation * @param namespace String The namespace associated in the logical name of the transformation. * @param name String The name of the logical transformation. * @param version String The version number of the logical transformation. * @param resourceid String The resource on which the transformation is to be deleted. * If NULL then it searches all the resource id. * @param type TCType The type of transformation. If NULL then it search and deletes entries for all types. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByPFN(String physicalname, String namespace, String name, String version, String resourceid, TCType type) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Delete a particular type of transformation, and/or on a particular resource * @param type TCType The type of the transformation * @param resourceid String The resource on which the transformation exists. * If NULL then that type of transformation is deleted from all the resources. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByType(TCType type, String resourceid) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Deletes entries from the catalog which have a particular system information. * @param sysinfo SysInfo The System Information by which you want to delete * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @throws Exception */ public int removeBySysInfo( SysInfo sysinfo) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Delete all entries on a particular resource from the transformation catalog. * @param resourceid String The resource which you want to remove. * @throws Exception * * @return the number of removed entries. */ public int removeBySiteID(String resourceid) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Deletes the entire transformation catalog. CLEAN............. USE WITH CAUTION. * * @return the number of removed entries. * * @throws Exception */ public int clear() throws Exception { int length = (mTCStore.getEntries(null,(TCType)null)).size(); mTCStore.clear(); mFlushOnClose = true; return length; } /** * Delete a list of profiles or all the profiles associated with a pfn on a * resource and of a type. * * @param physicalname String The physical name of the transformation. * @param type TCType The type of the transformation. * @param resourceid String The resource of the transformation. * @param profiles List The list of profiles to be deleted. If NULL then all profiles for that pfn+resource+type are deleted. * * @return the number of removed entries. * * @see org.griphyn.cPlanner.classes.Profile * @throws Exception */ public int deletePFNProfiles(String physicalname, TCType type, String resourceid, List profiles) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Delete a list of profiles or all the profiles associated with a logical * transformation. * * @param namespace String The namespace of the logical transformation. * @param name String The name of the logical transformation. * @param version String The version of the logical transformation. * @param profiles List The List of profiles to be deleted. If NULL * then all profiles for the logical transformation are deleted. * * @return the number of removed entries. * * @see org.griphyn.cPlanner.classes.Profile * @throws Exception */ public int deleteLFNProfiles(String namespace, String name, String version, List profiles) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Computes the maximum column lenght for pretty printing. * * @param s String[] * @param count int[] */ private static void columnLength(String[] s, int[] count) { for (int i = 0; i < count.length; i++) { if (s[i].length() > count[i]) { count[i] = s[i].length(); } } } /** * Logs the message to a logging stream. Currently does not log to any stream. * * @param msg the message to be logged. */ protected void logMessage(String msg) { //mLogger.logMessage("[Shishir] Transformation Catalog : " + msg); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/impl/File.java0000644000175000017500000015636711757531137030457 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.catalog.transformation.impl; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.FileNotFoundException; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.io.Reader; import java.util.ArrayList; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.TreeSet; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.Boolean; import edu.isi.pegasus.common.util.ProfileParser; import edu.isi.pegasus.common.util.ProfileParserException; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.NMI2VDSSysInfo; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import edu.isi.pegasus.planner.classes.Notifications; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.common.PegasusProperties; /** * This is the new file based TC implementation storing the contents of the file * in memory. For the old tc file implementation see OldTC.java * * @author Gaurang Mehta * @author Karan Vahi * @version $Revision: 4800 $ */ public class File extends Abstract implements TransformationCatalog{ /** * The singleton handler to the contents of the transformation catalog. */ private static File mTCFileHandle = null; /** * The LogManager object which is used to log all the messages. * It's values are set in the CPlanner (the main toolkit) class. */ protected LogManager mLogger; /** * The List containing the user specified list of pools on which he wants * the dag to run. */ protected List mvExecPools; /** * The Tree Map which stores the contents of the file. * The key is the transformationname. */ private Map mTreeMap; /** * The path to the file based TC. */ private String mTCFile; /** * The handle to the properties object. */ private PegasusProperties mProps; /** * Boolean indicating whether to flush the contents back to the file on * close. */ private boolean mFlushOnClose; /** * Boolean indicating whether to modify the file URL or not * */ private boolean modifyFileURL = true; /** * Returns an instance of the File TC. * * @return TransformationCatalog * @deprecated */ public static TransformationCatalog getInstance() { if (mTCFileHandle == null) { PegasusBag bag = new PegasusBag(); bag.add( PegasusBag.PEGASUS_LOGMANAGER, LogManagerFactory.loadSingletonInstance() ); bag.add( PegasusBag.PEGASUS_PROPERTIES, PegasusProperties.nonSingletonInstance() ); mTCFileHandle = new File(); mTCFileHandle.initialize( bag ); } return mTCFileHandle; } /** * The default constructor. */ public void File(){ } /** * Initialize the implementation, and return an instance of the implementation. * * @param bag the bag of Pegasus initialization objects. * */ public void initialize ( PegasusBag bag ){ mProps = bag.getPegasusProperties(); mLogger = bag.getLogger(); mFlushOnClose = false; modifyFileURL = Boolean.parse(mProps.getProperty( MODIFY_FOR_FILE_URLS_KEY), true ); mTCFile = mProps.getTCPath(); mTreeMap = new TreeMap(); mLogger.log("TC Mode being used is " + this.getDescription(), LogManager.CONFIG_MESSAGE_LEVEL); mLogger.log("TC File being used is " + mTCFile, LogManager.CONFIG_MESSAGE_LEVEL); if (mTCFile == null) { throw new RuntimeException( "The File to be used as TC should be " + "defined with the property pegasus.catalog.transformation.file"); } else { java.io.File f = new java.io.File( mTCFile ); if( f.exists() ){ populateTC(); } else{ mLogger.log("The Transformation Catalog file " + mTCFile + " was not found", LogManager.DEBUG_MESSAGE_LEVEL); } } } /** * Returns a textual description of the transformation mode. * * @return String containing the description. */ @Override public String getDescription() { String st = "New FILE TC Mode"; return st; } /** * Returns TC entries for a particular logical transformation and/or on a * number of resources and/or of a particular type. * * @param namespace String The namespace of the logical transformation. * @param name String the name of the logical transformation. * @param version String The version of the logical transformation. * @param resourceids List The List resourceid where the transformation is located. * If NULL it returns all resources. * @param type TCType The type of the transformation to search for. * If NULL it returns all types. * * @return List Returns a list of TransformationCatalogEntry objects containing * the corresponding entries from the TC. Returns null if no entry found. * @throws Exception * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ @Override public List lookup( String namespace, String name, String version, List resourceids, TCType type ) throws Exception { logMessage("getTCEntries(String namespace,String name,String version," + "List resourceids, TCType type"); logMessage("\tgetTCEntries(" + namespace + ", " + name + ", " + version + ", " + resourceids + ", " + type); List results = null; if (resourceids != null) { for (Iterator i = resourceids.iterator(); i.hasNext(); ) { List tempresults = lookup(namespace, name, version, (String) i.next(), type); if (tempresults != null) { if (results == null) { results = new ArrayList(); } results.addAll(tempresults); } } } else { List tempresults = lookup(namespace, name, version, (String)null, type); if (tempresults != null) { results = new ArrayList(tempresults.size()); results.addAll(tempresults); } } return results; } /** * Returns TC entries for a particular logical transformation and/or on a * particular resource and/or of a particular type. * * @param namespace String The namespace of the logical transformation. * @param name String the name of the logical transformation. * @param version String The version of the logical transformation. * @param resourceid String The resourceid where the transformation is located. * If NULL it returns all resources. * @param type TCType The type of the transformation to search for. * If NULL it returns all types. * * @return List Returns a list of TransformationCatalogEntry objects * containing the corresponding entries from the TC. * Returns null if no entry found. * @throws Exception * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public List lookup( String namespace, String name, String version, String resourceid, TCType type ) throws Exception { logMessage( "getTCEntries(String namespace, String name, String version, " + "String resourceId, TCType type)"); logMessage("\t getTCEntries(" + namespace + ", " + name + ", " + version + "," + resourceid + ", " + type); List results = null; String lfn = Separator.combine(namespace, name, version); mLogger.log("Trying to get TCEntries for " + lfn + " on resource " + ( (resourceid == null) ? "ALL" : resourceid) + " of type " + ( (type == null) ? "ALL" : type.toString()), LogManager.DEBUG_MESSAGE_LEVEL); if (resourceid != null) { if (mTreeMap.containsKey(resourceid)) { Map lfnMap = (Map) mTreeMap.get(resourceid); if (lfnMap.containsKey(lfn)) { List l = (List) lfnMap.get(lfn); if (type != null && l != null) { for (Iterator i = l.iterator(); i.hasNext(); ) { TransformationCatalogEntry tc = ( TransformationCatalogEntry) i.next(); if (tc.getType().equals(type)) { if (results == null) { results = new ArrayList(); } results.add(tc); } } } else { results = l; } } } } else { //since resourceid is null return entries for all sites if (!mTreeMap.isEmpty()) { for (Iterator j = mTreeMap.values().iterator(); j.hasNext(); ) { //check all maps for the executable. Map lfnMap = (Map) j.next(); if (lfnMap.containsKey(lfn)) { List l = (List) lfnMap.get(lfn); if (type != null && l != null) { for (Iterator i = l.iterator(); i.hasNext(); ) { TransformationCatalogEntry tc = ( TransformationCatalogEntry) i.next(); if (tc.getType().equals(type)) { if (results == null) { results = new ArrayList(); } results.add(tc); } } } else { //if the list returned is not empty keep adding to the result list. if (l != null) { if (results == null) { results = new ArrayList(); } results.addAll(l); } } } } } } return results; } /** * Get the list of Resource ID's where a particular transformation may reside. * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * @param type TCType The type of the transformation to search for.
* (Enumerated type includes SOURCE, STATIC-BINARY, DYNAMIC-BINARY, PACMAN, INSTALLED, SCRIPT)
* If NULL it returns all types. * * @return List Returns a list of Resource Id's as strings. Returns NULL if no results found. * * @throws Exception NotImplementedException if not implemented * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public List lookupSites( String namespace, String name, String version, TCType type ) throws Exception { logMessage( "List getTCResourceIds(String namespace, String name, String " + "version, TCType type"); logMessage("\t getTCResourceIds(" + namespace + ", " + name + ", " + version + ", " + type); List results = null; List lfnList = new ArrayList(); if (name == null) { if (type == null) { //return all the resources only results = new ArrayList(mTreeMap.keySet()); return results; } } //return all the entries to search for type lfnList.addAll(mTreeMap.values()); List entries = null; for (Iterator i = lfnList.iterator(); i.hasNext(); ) { Map lfnMap = (Map) i.next(); if (entries == null) { entries = new ArrayList(); } if (name == null) { for (Iterator j = lfnMap.values().iterator(); j.hasNext(); ) { entries.addAll( (List) j.next()); } } else { if (lfnMap.containsKey(Separator.combine(namespace, name, version))) { entries.addAll( (List) lfnMap.get(Separator.combine( namespace, name, version))); } } } TreeSet rset = null; for (Iterator i = entries.iterator(); i.hasNext(); ) { if (rset == null) { rset = new TreeSet(); } TransformationCatalogEntry entry = (TransformationCatalogEntry) i. next(); if (type == null) { rset.add(entry.getResourceId()); } else { if (entry.getType().equals(type)) { rset.add(entry.getResourceId()); } } } if (rset != null) { results = new ArrayList(); for (Iterator i = rset.iterator(); i.hasNext(); ) { results.add( (String) i.next()); } } return results; } /** * Get the list of PhysicalNames for a particular transformation on a site/sites * for a particular type/types; * * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * @param resourceid String The id of the resource on which you want to search.
* If NULL then returns entries on all resources * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* If NULL then returns entries of all types. * * @return List Returns a List of objects * with the profiles not populated. * * @throws Exception NotImplementedException if not implemented. * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public List lookupNoProfiles( String namespace, String name,String version,String resourceid, TCType type ) throws Exception { logMessage("List getTCPhysicalNames(String namespace, String name," + "String version, String resourceid,TCType type)"); logMessage("\t getTCPhysicalNames(" + namespace + ", " + name + ", " + version + ", " + resourceid + ", " + type + ")"); List results = null; List lfnMap = new ArrayList(); /* int count[] = { 0, 0, 0}; */ if (resourceid == null) { lfnMap.addAll(mTreeMap.values()); } else { if (mTreeMap.containsKey(resourceid)) { lfnMap.add(mTreeMap.get(resourceid)); } else { return null; } } for (Iterator i = lfnMap.iterator(); i.hasNext(); ) { Map lMap = (Map) i.next(); if (lMap.containsKey(Separator.combine(namespace, name, version))) { for (Iterator j = ( (List) lMap.get(Separator.combine( namespace, name, version))).iterator(); j.hasNext(); ) { TransformationCatalogEntry entry = ( TransformationCatalogEntry) j.next(); if (type != null) { if (!entry.getType().equals(type)) { continue; } } if (results == null) { results = new ArrayList(); } results.add(entry); } } } /* if (results != null) { results.add(count); } */ return results; } /** * Get the list of LogicalNames available on a particular resource. * @param resourceid String The id of the resource on which you want to search * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* If NULL then return logical name for all types. * * @return List Returns a list of String Arrays. * Each array contains the resourceid, logical transformation * in the format namespace::name:version and type. * Returns NULL if no results found. * * @throws Exception NotImplementedException if not implemented. */ public List getTCLogicalNames( String resourceid, TCType type ) throws Exception { logMessage("List getTCLogicalNames(String resourceid, TCType type)"); logMessage("\t getTCLogicalNames(" + resourceid + "," + type + ")"); List result = null; /* int[] length = { 0, 0}; */ List lfnMap = new ArrayList(); String lfn = null, resource = null, tctype = null; if (resourceid == null) { lfnMap.addAll(mTreeMap.values()); } else { if (mTreeMap.containsKey(resourceid)) { lfnMap.add( (Map) mTreeMap.get(resourceid)); } else { lfnMap = null; } } if (lfnMap != null) { for (Iterator i = lfnMap.iterator(); i.hasNext(); ) { for (Iterator j = ( (Map) i.next()).values().iterator(); j.hasNext(); ) { for (Iterator k = ( (List) j.next()).iterator(); k.hasNext(); ) { TransformationCatalogEntry tc = ( TransformationCatalogEntry) k.next(); String l = null, r = null, t = null; if (type == null) { l = tc.getLogicalTransformation(); r = tc.getResourceId(); t = tc.getType().toString(); } else { if (tc.getType().equals(type)) { l = tc.getLogicalTransformation(); r = tc.getResourceId(); t = tc.getType().toString(); } } if (l != null && r != null && t != null) { if (lfn == null || ! (lfn.equalsIgnoreCase(l) && resource.equalsIgnoreCase(r) && tctype.equalsIgnoreCase(t))) { lfn = l; resource = r; tctype = t; String[] s = { r, l, t}; //columnLength(s, length); if (result == null) { result = new ArrayList(5); } result.add(s); } } } } } } /* if (result != null) { result.add(length); } */ return result; } /** * Get the list of Profiles associated with a particular logical transformation. * @param namespace String The namespace of the transformation to search for. * @param name String The name of the transformation to search for. * @param version String The version of the transformation to search for. * * @return List Returns a list of Profile Objects containing profiles * assocaited with the transformation. * Returns NULL if no profiles found. * * @throws Exception NotImplementedException if not implemented. * @see org.griphyn.cPlanner.classes.Profile */ public List lookupLFNProfiles( String namespace, String name, String version ) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Get the list of Profiles associated with a particular physical transformation. * @param pfn The physical file name to search the transformation by. * @param resourceid String The id of the resource on which you want to search. * @param type TCType The type of the transformation to search for.
* (Enumerated type includes source, binary, dynamic-binary, pacman, installed)
* * @throws Exception NotImplementedException if not implemented. * @return List Returns a list of Profile Objects containing profiles * assocaited with the transformation. * Returns NULL if no profiless found. * * @see org.griphyn.cPlanner.classes.Profile */ public List lookupPFNProfiles( String pfn, String resourceid, TCType type ) throws Exception { logMessage( "getTCPfnProfiles(String pfn, String resourceid, TCType type)"); logMessage("\t getTCPfnProfiles(" + pfn + "," + resourceid + "," + type + ")"); List result = null; List lfnMap = new ArrayList(); if (mTreeMap.containsKey(resourceid)) { lfnMap.add( (Map) mTreeMap.get(resourceid)); } for (Iterator i = lfnMap.iterator(); i.hasNext(); ) { for (Iterator j = ( (Map) i.next()).values().iterator(); j.hasNext(); ) { for (Iterator k = ( (List) j.next()).iterator(); k.hasNext(); ) { TransformationCatalogEntry tc = ( TransformationCatalogEntry) k.next(); List profiles = null; if (tc.getPhysicalTransformation().equals(pfn)) { if (type == null || tc.getType().equals(type)) { profiles = tc.getProfiles(); } if (profiles != null) { if (result == null) { result = new ArrayList(10); } result.addAll(profiles); } } } } } return result; } /** * List all the contents of the TC * * @return List Returns a List of TransformationCatalogEntry objects. * @throws Exception */ public List getContents() throws Exception { List result = new ArrayList(); for (Iterator i = mTreeMap.values().iterator(); i.hasNext(); ) { for (Iterator j = ( (Map) i.next()).values().iterator(); j.hasNext(); ) { for (Iterator k = ( (List) j.next()).iterator(); k.hasNext(); ) { TransformationCatalogEntry tc = ( TransformationCatalogEntry) k.next(); result.add(tc); } } } /* List result = null; int[] length = {0, 0, 0, 0, 0}; for ( Iterator i = mTreeMap.values().iterator(); i.hasNext(); ) { for ( Iterator j = ( ( Map ) i.next() ).values().iterator(); j.hasNext(); ) { for ( Iterator k = ( ( List ) j.next() ).iterator(); k.hasNext(); ) { TransformationCatalogEntry tc = ( TransformationCatalogEntry ) k.next(); if ( result == null ) { result = new ArrayList( 10 ); } String[] s = {tc.getResourceId(), tc.getLogicalTransformation(), tc.getPhysicalTransformation(), tc.getType().toString(), tc.getVDSSysInfo().toString(), ( ( tc.getProfiles() != null ) ? ProfileParser.combine( tc.getProfiles() ) : "NULL" )}; columnLength( s, length ); result.add( s ); } } } if ( result != null ) { result.add( length ); } */ return result; } /** * ADDITIONS */ /** * Add multiple TCEntries to the Catalog. * * @param tcentry List Takes a list of TransformationCatalogEntry objects as input * * @throws Exception * @return number of insertions On failure,throw an exception, don't use zero. * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert(List entries) throws Exception { for (int i = 0; i < entries.size(); i++) { TransformationCatalogEntry entry = ( (TransformationCatalogEntry) entries.get(i)); this.insert(entry); } return entries.size(); } /** * Add single TCEntry to the Catalog. * @param tcentry Takes a single TransformationCatalogEntry object as input * @throws Exception * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert(TransformationCatalogEntry entry) throws Exception { return this.insert(entry.getLogicalNamespace(), entry.getLogicalName(), entry.getLogicalVersion(), entry.getPhysicalTransformation(), entry.getType(), entry.getResourceId(), null, entry.getProfiles(), entry.getSysInfo()) ; } /** * Add single TCEntry object temporarily to the in memory Catalog. * This is a hack to get around for adding soft state entries to the TC * @param tcentry Takes a single TransformationCatalogEntry object as input * @param write boolean enable write commits to backed catalog or not. * @throws Exception * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry */ public int insert(TransformationCatalogEntry entry, boolean write) throws Exception { if (this.addTCEntry(entry.getLogicalNamespace(), entry.getLogicalName(), entry.getLogicalVersion(), entry.getPhysicalTransformation(), entry.getType(), entry.getResourceId(), null, entry.getProfiles(), entry.getSysInfo(), entry.getNotifications(), write)){ return 1; }else{ throw new RuntimeException("Failed to add TransformationCatalogEntry " + entry.getLogicalName()); } } /** * Add an single entry into the transformation catalog. * * @param namespace String The namespace of the transformation to be added (Can be null) * @param name String The name of the transformation to be added. * @param version String The version of the transformation to be added. (Can be null) * @param physicalname String The physical name/location of the transformation to be added. * @param type TCType The type of the physical transformation. * @param resourceid String The resource location id where the transformation is located. * @param lfnprofiles List The List of Profile objects associated with a Logical Transformation. (can be null) * @param pfnprofiles List The List of Profile objects associated with a Physical Transformation. (can be null) * @param sysinfo SysInfo The System information associated with a physical transformation. * * @return number of insertions, should always be 1. On failure, * throw an exception, don't use zero. * * * @throws Exception * * @see edu.isi.pegasus.planner.catalog.TransformationCatalogEntry * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @see org.griphyn.cPlanner.classes.Profile */ public int insert(String namespace, String name, String version, String physicalname, TCType type, String resourceid, List pfnprofiles, List lfnprofiles, SysInfo system) throws Exception { if(this.addTCEntry(namespace, name, version, physicalname, type, resourceid, lfnprofiles, pfnprofiles, system, null, true)) { return 1; }else{ throw new RuntimeException("Failed to add TransformationCatalogEntry " + name); } } /** * Add an single entry into the transformation catalog. * * @param namespace the namespace of the transformation to be added (Can be null) * @param name the name of the transformation to be added. * @param version the version of the transformation to be added. (Can be null) * @param physicalname the physical name/location of the transformation to be added. * @param type the type of the physical transformation. * @param resourceid the resource location id where the transformation is located. * @param lfnprofiles the List of Profile objects associated * with a Logical Transformation. (can be null) * @param pfnprofiles the list of Profile objects associated * with a Physical Transformation. (can be null) * @param system the System information associated with a physical * transformation. * @param invokes the Notifications associated with the * transformation. * @param write boolean to commit changes to backend catalog * @return boolean true if succesfully added, returns false if error and * throws exception. * * @throws Exception * * @see org.griphyn.common.catalog.TransformationCatalogEntry * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @see org.griphyn.cPlanner.classes.Profile */ protected boolean addTCEntry(String namespace, String name, String version, String physicalname, TCType type, String resourceid, List pfnprofiles, List lfnprofiles, SysInfo system, Notifications invokes, boolean write) throws Exception { TransformationCatalogEntry entry = new TransformationCatalogEntry(); entry.setLogicalNamespace(namespace); entry.setLogicalName(name); entry.setLogicalVersion(version); entry.setPhysicalTransformation(physicalname); entry.setType(type); entry.setResourceId(resourceid); entry.addProfiles(lfnprofiles); entry.addProfiles(pfnprofiles); entry.setVDSSysInfo( NMI2VDSSysInfo.nmiToVDSSysInfo(system) ); entry.addNotifications(invokes); Map lfnMap = null; if (mTreeMap.containsKey(resourceid)) { lfnMap = (Map) mTreeMap.get(resourceid); } else { lfnMap = new TreeMap(); mTreeMap.put(resourceid, lfnMap); } List pfnList = null; if (lfnMap.containsKey(entry.getLogicalTransformation())) { pfnList = (List) lfnMap.get(entry.getLogicalTransformation()); } else { pfnList = new ArrayList(2); lfnMap.put(entry.getLogicalTransformation(), pfnList); } boolean add = true; for (Iterator i = pfnList.iterator(); i.hasNext(); ) { TransformationCatalogEntry test = (TransformationCatalogEntry) i. next(); if (test.equals(entry)) { add = false; } } if (add) { pfnList.add(entry); if (write) { mFlushOnClose = true; //writeTC(); } } else { mLogger.log("TC Entry already exists. Skipping", LogManager.DEBUG_MESSAGE_LEVEL); } return true; } /** * Add additional profile to a logical transformation . * * @param namespace String The namespace of the transformation to be added. (can be null) * @param name String The name of the transformation to be added. * @param version String The version of the transformation to be added. (can be null) * @param profiles List The List of Profile objects that are to be added * to the transformation. * * @return number of insertions. On failure, throw an exception, don't use zero. * * @throws Exception * @see org.griphyn.cPlanner.classes.Profile */ public int addLFNProfile(String namespace, String name, String version, List profiles) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Add additional profile to a physical transformation. * @param pfn String The physical name of the transformation * @param type TCType The type of transformation that the profile is * associated with. * @param resourcename String The resource on which the physical transformation exists * @param profiles The List of Profile objects that are to be added * to the transformation. * @return number of insertions. On failure, throw an exception, don't use zero. * * @throws Exception * @see org.griphyn.cPlanner.classes.Profile */ public int addPFNProfile(String pfn, TCType type, String resourcename, List profiles) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * DELETIONS */ /** * Delete all entries in the transformation catalog for a give logical tranformation and/or on a resource and/or of * a particular type * @param namespace String The namespace of the transformation to be deleted. (can be null) * @param name String The name of the transformation to be deleted. * @param version String The version of the transformation to be deleted. ( can be null) * @param resourceid String The resource id for which the transformation is to be deleted. * If NULL then transformation on all resource are deleted * @param type TCType The type of the transformation. If NULL then all types are deleted for the transformation. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByLFN(String namespace, String name, String version, String resourceid, TCType type) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Delete all entries in the transformation catalog for pair of logical and physical transformation. * @param physicalname String The physical name of the transformation * @param namespace String The namespace assocaited in the logical name of the transformation. * @param name String The name of the logical transformation. * @param version String The version number of the logical transformation. * @param resourceid String The resource on which the transformation is to be deleted. * If NULL then it searches all the resource id. * @param type TCType The type of transformation. If NULL then it search and deletes entries for all types. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByPFN(String physicalname, String namespace, String name, String version, String resourceid, TCType type) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Delete a particular type of transformation, and/or on a particular resource * @param type TCType The type of the transformation * @param resourceid String The resource on which the transformation exists. * If NULL then that type of transformation is deleted from all the resources. * @throws Exception * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.transformation.classes.TCType */ public int removeByType(TCType type, String resourceid) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Deletes entries from the catalog which have a particular system information. * @param sysinfo SysInfo The System Information by which you want to delete * * @return the number of removed entries. * * @see edu.isi.pegasus.planner.catalog.classes.SysInfo * @throws Exception */ public int removeBySysInfo( SysInfo sysinfo) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Delete all entries on a particular resource from the transformation catalog. * @param resourceid String The resource which you want to remove. * @throws Exception * * @return the number of removed entries. */ public int removeBySiteID(String resourceid) throws Exception { if (mTreeMap.containsKey(resourceid)) { mTreeMap.remove(resourceid); mFlushOnClose = true; //writeTC(); return 1; }else{ return 0; } } /** * Deletes the entire transformation catalog. CLEAN............. USE WITH CAUTION. * * @return the number of removed entries. * * @throws Exception */ public int clear() throws Exception { int length = mTreeMap.size(); mTreeMap.clear(); mFlushOnClose = true; return length; } /** * Delete a list of profiles or all the profiles associated with a pfn on a * resource and of a type. * * @param physicalname String The physical name of the transformation. * @param type TCType The type of the transformation. * @param resourceid String The resource of the transformation. * @param profiles List The list of profiles to be deleted. If NULL then all profiles for that pfn+resource+type are deleted. * * @return the number of removed entries. * * @see org.griphyn.cPlanner.classes.Profile * @throws Exception */ public int deletePFNProfiles(String physicalname, TCType type, String resourceid, List profiles) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } /** * Delete a list of profiles or all the profiles associated with a logical * transformation. * * @param namespace String The namespace of the logical transformation. * @param name String The name of the logical transformation. * @param version String The version of the logical transformation. * @param profiles List The List of profiles to be deleted. If NULL * then all profiles for the logical transformation are deleted. * * @return the number of removed entries. * * @see org.griphyn.cPlanner.classes.Profile * @throws Exception */ public int deleteLFNProfiles(String namespace, String name, String version, List profiles) throws Exception { throw new UnsupportedOperationException("Not Implemented"); } public boolean connect(java.util.Properties props) { //not implemented return true; } public boolean isClosed() { return (this.mTreeMap == null); } public void close() { try{ if(mFlushOnClose){ writeTC(); this.mFlushOnClose = false; } }finally{ this.mTreeMap = null; this.mTCFile = null; } } private void writeTC() { PrintWriter writer = null; try { mLogger.log("Starting to write the TC file", LogManager.DEBUG_MESSAGE_LEVEL); writer = new PrintWriter(new BufferedWriter(new FileWriter( mTCFile, false))); } catch (IOException e) { mLogger.log( "Unable to open TC File for writing\"" + mTCFile, e, LogManager.ERROR_MESSAGE_LEVEL); } int count = 0; for (Iterator i = mTreeMap.values().iterator(); i.hasNext(); ) { //get all the values from the main map for (Iterator j = ( (Map) i.next()).values().iterator(); j.hasNext(); ) { //for each resource and each logical transformatino get the arraylist. for (Iterator k = ( (List) j.next()).iterator(); k.hasNext(); ) { //start printing each entry writer.println( ( (TransformationCatalogEntry) k.next()). toTCString()); count++; } } } mLogger.log("Written " + count + " entries back to the TC file", LogManager.DEBUG_MESSAGE_LEVEL); writer.flush(); writer.close(); mLogger.log( "Starting to write the TC file - DONE", LogManager.DEBUG_MESSAGE_LEVEL); } /** * Computes the maximum column lenght for pretty printing. * * @param s String[] * @param count int[] */ /* private static void columnLength(String[] s, int[] count) { for (int i = 0; i < count.length; i++) { if (s[i].length() > count[i]) { count[i] = s[i].length(); } } }*/ /** * Populates the internal copy of the transformation catalog from a byte * stream (input stream). Used in webservices, when clients upload their files. * It uses the default character encoding. * * @param reader the InputStrean containing the bytes to be * read. * @return boolean */ private boolean populateTC(InputStream reader) { return populateTC(new InputStreamReader(reader)); } /** * Populates the internal copy of the transformation catalog from the file * containing the transformation catalog in the 6 column format. * * @return boolean */ private boolean populateTC() { boolean result = false; try { result = populateTC(new FileReader(mTCFile)); } catch (IOException e) { mLogger.log("Unable to open the file " + mTCFile, e, LogManager.ERROR_MESSAGE_LEVEL); return false; } return result; } /** * Adds multiple entries into the TC. Calls the above api multiple times. * * @param reader the input stream from where to read the contents of the * transformation catalog. * @return boolean */ private boolean populateTC(Reader reader) { BufferedReader buf = new BufferedReader(reader); // String profilestring = null; int linecount = 0; int count = 0; try { String line = null; //buf = new BufferedReader( new FileReader( mTCFile ) ); while ( (line = buf.readLine()) != null) { boolean profile_error=false; linecount++; if (! (line.startsWith("#") || line.trim().equalsIgnoreCase(""))) { TransformationCatalogEntry tc = new TransformationCatalogEntry(); String[] tokens = line.trim().split("[ \t]+", 6); for (int i = 0; i < tokens.length; i++) { switch (i) { case 0: //poolname tc.setResourceId(tokens[i]); break; case 1: //logical transformation name if (tokens[i].indexOf("__") != -1) { mLogger.log( "Logical Transformations in the new File TC " + "are represented as NS::NAME:VER", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log("Assuming " + tokens[i] + " as just the transformation NAME.", LogManager.DEBUG_MESSAGE_LEVEL); } tc.setLogicalTransformation(tokens[i]); break; case 2: //pfn tc.setPhysicalTransformation(tokens[i]); break; case 3: //type if( tokens[i].equalsIgnoreCase( "null") ){ tc.setType( TCType.INSTALLED ); } else if ( tokens[i].equalsIgnoreCase( "STATIC_BINARY") ){ //if entry is static binary we set it to stageable tc.setType( TCType.STAGEABLE ); } else{ //set to whatever the value was tc.setType(TCType.valueOf(tokens[i])); } /* tc.setType( (tokens[i].equalsIgnoreCase( "null")) ? TCType.INSTALLED : TCType.valueOf(tokens[i])); */ break; case 4: //systeminfo tc.setVDSSysInfo( (tokens[i].equalsIgnoreCase( "null")) ? new VDSSysInfo(null) : new VDSSysInfo(tokens[i])); break; case 5: //profile string if (!tokens[i].equalsIgnoreCase("null")) { try { tc.addProfiles(ProfileParser.parse( tokens[ i])); } catch (ProfileParserException ppe) { mLogger.log( "Could not parse profile(s) for transformation \""+ tc.getLogicalTransformation()+ "\" on site \""+ tc.getResourceId()+ "\" on line " + linecount, LogManager.ERROR_MESSAGE_LEVEL); mLogger.log( ppe.getMessage() + " at position "+ ppe.getPosition() + " for the string \"" + tokens[i]+"\"", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log("Ignoring the current transformation. Please fix the profiles shown above." ,LogManager.ERROR_MESSAGE_LEVEL); profile_error=true; continue; } catch (RuntimeException e) { mLogger.log( "Ignoring errors while parsing profile in Transformation Catalog on line " + linecount, e, LogManager.WARNING_MESSAGE_LEVEL); } } break; default: mLogger.log("Line " + linecount + " : Humm no need to be in default", LogManager.ERROR_MESSAGE_LEVEL); } //end of switch } //end of for loop if (profile_error){ //if there is an error while parsing the profile //Skip adding the entry to TC (As Per JIRA PM-164 continue; } // if (count > 0) { // mLogger.logMessage("Loading line number" + linecount + // " to the map", 1); Map lfnMap = null; if (!mTreeMap.containsKey(tc.getResourceId())) { lfnMap = new TreeMap(); } else { lfnMap = (Map) mTreeMap.get(tc.getResourceId()); } List entries = null; if (!lfnMap.containsKey(tc.getLogicalTransformation())) { entries = new ArrayList(3); } else { entries = (List) lfnMap.get(tc. getLogicalTransformation()); } if(modifyFileURL){ entries.add( Abstract.modifyForFileURLS(tc) ); }else{ entries.add(tc); } lfnMap.put(tc.getLogicalTransformation(), entries); mTreeMap.put(tc.getResourceId(), lfnMap); count++; } //end of if "#" } //end of while line mLogger.log("Loaded " + count + " entries to the TC Map", LogManager.DEBUG_MESSAGE_LEVEL); buf.close(); return true; } catch (FileNotFoundException ex) { mLogger.log("The tc text file " + mTCFile + " was not found", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log("Considering it as Empty TC", LogManager.ERROR_MESSAGE_LEVEL); return true; } catch (IOException e) { mLogger.log("Unable to open the file " + mTCFile, e, LogManager.ERROR_MESSAGE_LEVEL); return false; } catch (IllegalStateException e) { mLogger.log("On line " + linecount + "in File " + mTCFile + "\n", e, LogManager.ERROR_MESSAGE_LEVEL); return false; } catch (Exception e) { mLogger.log( "While loading entries into the map on line " + linecount + "\n", e, LogManager.ERROR_MESSAGE_LEVEL); return false; } } /** * Logs the message to a logging stream. Currently does not log to any stream. * * @param msg the message to be logged. */ protected void logMessage(String msg) { //mLogger.logMessage("[Shishir] Transformation Catalog : " + msg); } } ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/TestTransformationCatalog.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/catalog/transformation/TestTransformationCatalog.0000644000175000017500000001466411757531137033127 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.planner.catalog.transformation; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.catalog.transformation.classes.Arch; import edu.isi.pegasus.planner.catalog.transformation.classes.VDSSysInfo; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.classes.Os; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import java.util.Set; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; /** * A Test program that shows how to load a Replica Catalog, and query for entries sites. * The configuration is picked from the Properties. The following properties * need to be set *
 *      pegasus.catalog.transformation       File|Database
 *      pegasus.catalog.transformation.file  path to the File Based Transformation Catalog if File is being used.
 *  
* * To use the Database Transformation catalog the database connection parameters * can be specified by specifying the following properties *
 *   pegasus.catalog.transformation.db.url
 *   pegasus.catalog.transformation.db.user
 *   pegasus.catalog.transformation.db.password
 * 
* The sql schema’s for this catalog can be found at $PEGASUS_HOME/sql directory. * * The Pegasus Properties can be picked from property files at various locations. * The priorities are explained below. *
 *   - The default path for the properties file is $PEGASUS_HOME/etc/properties.
 *   - A properties file if found at ${user.home}/.pegasusrc has higher property.
 *   - Finally a user can specify the path to the properties file by specifying 
 *     the JVM  property pegasus.user.properties . This has the higher priority.
 * 
* * @author Karan Vahi * @version $Revision: 2572 $ */ public class TestTransformationCatalog { /** * The main program. */ public static void main( String[] args ) { TransformationCatalog catalog = null; PegasusProperties properties = PegasusProperties.nonSingletonInstance(); //setup the logger for the default streams. LogManager logger = LogManagerFactory.loadSingletonInstance( properties ); logger.logEventStart( "event.pegasus.catalog.transformation.test", "planner.version", Version.instance().toString() ); //set debug level to maximum //set if something is going wrong //logger.setLevel( LogManager.DEBUG_MESSAGE_LEVEL ); /* print out all the relevant site catalog properties that were specified*/ Properties replicaProperties = properties.matchingSubset( "pegasus.catalog.transformation", true ); System.out.println( "Transformation Catalog Properties specified are " + replicaProperties ); /* load the catalog using the factory */ try{ catalog = TransformationFactory.loadInstance( PegasusProperties.nonSingletonInstance()); } catch ( TransformationFactoryException e ){ System.out.println( e.convertException() ); System.exit( 2 ); } /* lets insert an entry into TransformationCatalog */ try{ /* the logical transformation is identified by a tuple * consisting of namespace , name and version * The namespace and version can be null. */ String namespace = "pegasus"; String lfn = "preprocess"; String version = null; String pfn = "/usr/pegasus/bin/keg"; String handle = "isi";// the site handle of the site where the data is TransformationCatalogEntry tce = new TransformationCatalogEntry( namespace, lfn, version ); tce.setPhysicalTransformation( pfn ); tce.setResourceId( handle ); tce.setType( TCType.INSTALLED ); //executable is installed tce.setVDSSysInfo( new VDSSysInfo( Arch.INTEL32, Os.LINUX, null, null ) ); //add an environment profile with the entry tce.addProfile( new Profile( Profile.ENV, "PEGASUS_HOME", "/usr/pegasus/bin" )); /* insert the entry into transformation catalog */ boolean added = (catalog.insert( tce ) == 1)?true : false; System.out.println( "Entry added " + added ); /* query for the entry we just entered */ List results = catalog.lookup( namespace, lfn, version, handle, TCType.INSTALLED ); if( results != null ){ System.out.println( "Results for LFN " + lfn ); for( TransformationCatalogEntry entry: results ){ System.out.println( entry ); } } /* remove the entry we added * deletes are only implemented for the database version */ //boolean deleted = catalog.removeByLFN( namespace, lfn, version, handle, TCType.INSTALLED ); //System.out.println( "Entry deleted " + added ); /* list all the entries remaining in the TC */ System.out.println( "\nListing all entries in the transformation catalog " ); List entries = catalog.getContents(); for( TransformationCatalogEntry e: entries ){ System.out.println( e ); } } catch ( Exception e ){ e.printStackTrace(); } finally{ /* close the connection */ try{ catalog.close(); }catch( Exception e ){} } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/0000755000175000017500000000000011757531667022734 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/NodeCollapser.java0000644000175000017500000002144411757531137026326 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.partitioner.Partitioner; import edu.isi.pegasus.planner.partitioner.ClustererCallback; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.cluster.ClustererFactory; import edu.isi.pegasus.planner.cluster.Clusterer; import edu.isi.pegasus.planner.cluster.ClustererException; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.parser.dax.DAX2LabelGraph; import java.util.Map; import java.util.HashMap; import java.util.Vector; import java.util.Iterator; import java.util.List; import java.util.StringTokenizer; /** * This collapses the nodes of the same logical name scheduled on the same * pool into fewer fat nodes. The idea behind this is to collapse jobs that * take a few seconds to run into a larger job, and hence reducing time because * of lesser delays due to lesser number of Condor Globus interactions. * Note that the merging of the edges for the jobs being collapsed at present, is * not the best implementation. Once the graph structure is correct , it would * be modified. * * @author Karan Vahi vahi@isi.edu * @author Mei-Hui Su mei@isi.edu * * @version $Revision: 2590 $ */ public class NodeCollapser extends Engine { /** * The handle to the logger object. */ protected LogManager mLogger; /** * The directory, where the stdin file of the fat jobs are created. * It should be the submit file directory that the user mentions at * runtime. */ private String mDirectory; /** * The internal map that contains the adjacency list representation of the * Graph referred to by the workflow. This is temporary till the main ADag * data structure is corrected. */ private Map mGraph; /** * The bag of initialization objects. */ private PegasusBag mBag; /** * The overloaded constructor. * * @param bag the bag of initialization objects. * */ public NodeCollapser( PegasusBag bag ) { super( bag ); mBag = bag; mLogger = bag.getLogger(); mGraph = new HashMap(); mPOptions = bag.getPlannerOptions(); setDirectory( mPOptions.getSubmitDirectory() ); } /** * Sets the directory where the stdin files are to be generated. * * @param directory the path to the directory to which it needs to be set. */ public void setDirectory(String directory){ mDirectory = (directory == null)? //user did not specify a submit file dir //use the default i.e current directory ".": //user specified directory picked up directory; } /** * Clusters the jobs in the workflow. It applies a series of clustering * actions on the graph, as specified by the user at runtime. * * For each clustering action, the graph is first partitioned, * and then sent to the appropriate clustering module for clustering. * * @param dag the scheduled dag that has to be clustered. * * @return ADag containing the collapsed scheduled workflow. * * @throws ClustererException in case of error while clustering */ public ADag cluster( ADag dag ) throws ClustererException{ //load the appropriate partitioner and clusterer String types = mPOptions.getClusteringTechnique(); //sanity check if( types == null){ //return the orginal DAG only mLogger.log( "No clustering actions specified. Returning orginal DAG", LogManager.DEBUG_MESSAGE_LEVEL); return dag; } //tokenize and get the types ADag clusteredDAG = dag; for( StringTokenizer st = new StringTokenizer( types, ","); st.hasMoreTokens(); ){ clusteredDAG = this.cluster( clusteredDAG, st.nextToken() ); } return clusteredDAG; } /** * Clusters the jobs in the workflow. The graph is first partitioned, * and then sent to the appropriate clustering module for clustering. * * @param dag the scheduled dag that has to be clustered. * @param type the type of clustering to do. * * @return ADag containing the collapsed scheduled workflow. * * @throws ClustererException in case of error while clustering */ public ADag cluster( ADag dag, String type ) throws ClustererException{ //convert the graph representation to a //more manageable and traversal data structure that is sent //to the partitioning stuff Map nameIDMap = new HashMap(); Job job; for( Iterator it = dag.vJobSubInfos.iterator(); it.hasNext(); ){ //pass the jobs to the callback job = (Job)it.next(); nameIDMap.put( job.getName(), job.getLogicalID() ); } mGraph = edgeList2Graph( dag.dagInfo.relations, nameIDMap ); //we need to build up a partitioner graph structure to do //the partitioning on the graph. Use the callback mechanism //developed for the partiotioner stuff and populate it //from the exisiting graph structure DAX2LabelGraph d2g = new DAX2LabelGraph( mProps, mPOptions.getDAX() ); //set the appropriate key that is to be used for picking up the labels d2g.setLabelKey( mProps.getClustererLabelKey() ); //no need to pass any attributes d2g.cbDocument( null ); for( Iterator it = dag.vJobSubInfos.iterator(); it.hasNext(); ){ //pass the jobs to the callback d2g.cbJob( (Job)it.next() ); } //pass the relations for( Iterator it = mGraph.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry)it.next(); d2g.cbParents( (String)entry.getKey(), (List)entry.getValue() ); } //finished populating d2g.cbDone(); //get the graph map mGraph = (Map)d2g.getConstructedObject(); //get the fake dummy root node GraphNode root = (GraphNode)mGraph.get( DAX2LabelGraph.DUMMY_NODE_ID ); Partitioner p = ClustererFactory.loadPartitioner( mProps, type, root, mGraph ); mLogger.log( "Partitioner loaded is " + p.description(), LogManager.CONFIG_MESSAGE_LEVEL ); Clusterer c = ClustererFactory.loadClusterer( dag, mBag, type ); mLogger.log( "Clusterer loaded is "+ c.description(), LogManager.CONFIG_MESSAGE_LEVEL ); ClustererCallback cb = new ClustererCallback(); cb.initialize( mProps, c); //start the partitioner and let the fun begin! p.determinePartitions( cb ); return c.getClusteredDAG(); } /** * Returns an adjacency list representation of the graph referred to by * the list of edges. The map contains adjacency list with key as a child * and value as the list of parents. * * @param relations vector of PCRelation objects that does * the conversion. * @param nameIDMap map with the key as the jobname and value as the * logical id * * @return Map. */ protected Map edgeList2Graph(Vector relations, Map nameIDMap){ Map map = new HashMap(); List l = null; for( Iterator it = relations.iterator(); it.hasNext(); ){ PCRelation rel = (PCRelation)it.next(); if(map.containsKey(nameIDMap.get(rel.child))){ l = (List)map.get(nameIDMap.get(rel.child)); l.add(nameIDMap.get(rel.parent)); } else{ l = new java.util.LinkedList(); l.add( nameIDMap.get(rel.parent)); map.put(nameIDMap.get(rel.child),l); } } return map; } }//end of NodeCollapser pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/InterPoolEngine.java0000644000175000017500000010221211757531137026626 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.selector.SiteSelector; import edu.isi.pegasus.planner.selector.site.SiteSelectorFactory; import edu.isi.pegasus.planner.selector.TransformationSelector; import edu.isi.pegasus.planner.namespace.Hints; import edu.isi.pegasus.planner.provenance.pasoa.XMLProducer; import edu.isi.pegasus.planner.provenance.pasoa.producer.XMLProducerFactory; import edu.isi.pegasus.planner.provenance.pasoa.PPS; import edu.isi.pegasus.planner.provenance.pasoa.pps.PPSFactory; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.transformation.Mapper; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.transformation.classes.TransformationStore; import edu.isi.pegasus.planner.common.PegRandom; import edu.isi.pegasus.planner.transfer.SLS; import edu.isi.pegasus.planner.transfer.sls.SLSFactory; import java.io.File; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.util.Set; import java.util.Vector; /** * This engine calls out to the Site Selector selected by the user and maps the * jobs in the workflow to the execution pools. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 5256 $ * */ public class InterPoolEngine extends Engine implements Refiner { /** * ADag object corresponding to the Dag whose jobs we want to schedule. * */ private ADag mDag; /** * Set of the execution pools which the user has specified. */ private Set mExecPools; /** * Handle to the site selector. */ private SiteSelector mSiteSelector; /** * The handle to the transformation selector, that ends up selecting * what transformations to pick up. */ private TransformationSelector mTXSelector; /** * The handle to the transformation catalog mapper object that caches the * queries to the transformation catalog, and indexes them according to * lfn's. There is no purge policy in the TC Mapper, so per se it is not a * classic cache. */ private Mapper mTCMapper; /** * The XML Producer object that records the actions. */ private XMLProducer mXMLStore; /** * The handle to the SLS implementor */ private SLS mSLS; /** * A boolean indicating whether to have worker node execution or not. */ private boolean mWorkerNodeExecution; /** * Handle to the transformation store that stores the transformation catalog * user specifies in the DAX */ protected TransformationStore mDAXTransformationStore; /** * Default constructor. * * * @param bag the bag of initialization objects. */ public InterPoolEngine( PegasusBag bag ) { super( bag ); mDag = new ADag(); mExecPools = new java.util.HashSet(); //initialize the transformation mapper mTCMapper = Mapper.loadTCMapper( mProps.getTCMapperMode(), mBag ); mBag.add( PegasusBag.TRANSFORMATION_MAPPER, mTCMapper ); mTXSelector = null; mXMLStore = XMLProducerFactory.loadXMLProducer( mProps ); mWorkerNodeExecution = mProps.executeOnWorkerNode(); if( mWorkerNodeExecution ){ //load SLS mSLS = SLSFactory.loadInstance( mBag ); } } /** * Overloaded constructor. * * @param aDag the ADag object corresponding to the Dag * for which we want to determine on which pools to run * the nodes of the Dag. * @param bag the bag of initialization objects * */ public InterPoolEngine( ADag aDag, PegasusBag bag ) { super( bag ); mDag = aDag; mExecPools = (Set)mPOptions.getExecutionSites(); mLogger.log( "List of executions sites is " + mExecPools, LogManager.DEBUG_MESSAGE_LEVEL ); mTCMapper = Mapper.loadTCMapper( mProps.getTCMapperMode(), mBag ); mBag.add( PegasusBag.TRANSFORMATION_MAPPER, mTCMapper ); mTXSelector = null; mXMLStore = XMLProducerFactory.loadXMLProducer( mProps ); this.mDAXTransformationStore = aDag.getTransformationStore(); mWorkerNodeExecution = mProps.executeOnWorkerNode(); if( mWorkerNodeExecution ){ //load SLS mSLS = SLSFactory.loadInstance( mBag ); } } /** * Returns the bag of intialization objects. * * @return PegasusBag */ public PegasusBag getPegasusBag(){ return mBag; } /** * Returns a reference to the workflow that is being refined by the refiner. * * * @return ADAG object. */ public ADag getWorkflow(){ return this.mDag; } /** * Returns a reference to the XMLProducer, that generates the XML fragment * capturing the actions of the refiner. This is used for provenace * purposes. * * @return XMLProducer */ public XMLProducer getXMLProducer(){ return this.mXMLStore; } /** * This is where the callout to the Partitioner should take place, that * partitions the workflow into clusters and sends to the site selector only * those list of jobs that are ready to be scheduled. * */ public void determineSites() { Job job; //at present we schedule the whole workflow at once List jobs = convertToList( mDag.vJobSubInfos ); List pools = convertToList( mExecPools ); //going through all the jobs making up the Adag, to do the physical mapping scheduleJobs( mDag, pools ); } /** * It schedules a list of jobs on the execution pools by calling out to the * site selector specified. It is upto to the site selector to determine if * the job can be run on the list of sites passed. * * @param dag the abstract workflow. * @param sites the list of execution sites, specified by the user. * */ public void scheduleJobs( ADag dag, List sites ) { //we iterate through the DAX Transformation Store and update //the transformation catalog with any transformation specified. for( TransformationCatalogEntry entry : this.mDAXTransformationStore.getAllEntries() ) { try { //insert an entry into the transformation catalog //for the mapper to pick up later on mLogger.log("Addding entry into transformation catalog " + entry, LogManager.DEBUG_MESSAGE_LEVEL); if (mTCHandle.insert(entry, false) != 1) { mLogger.log("Unable to add entry to transformation catalog " + entry, LogManager.WARNING_MESSAGE_LEVEL); } } catch (Exception ex) { throw new RuntimeException("Exception while inserting into TC in Interpool Engine " + ex ); } } mSiteSelector = SiteSelectorFactory.loadInstance( mBag ); mSiteSelector.mapWorkflow( dag, sites ); int i = 0; StringBuffer error; //load the PPS implementation PPS pps = PPSFactory.loadPPS( this.mProps ); mXMLStore.add( "" ); //call the begin workflow method try{ pps.beginWorkflowRefinementStep( this, PPS.REFINEMENT_SITE_SELECT, false ); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception", e ); } //clear the XML store mXMLStore.clear(); //Iterate through the jobs and hand them to //the site selector if required String site ; for( Iterator it = dag.jobIterator(); it.hasNext(); i++ ){ Job job = ( Job ) it.next(); site = job.getSiteHandle(); mLogger.log( "Mapping Job " + job.getName(), LogManager.DEBUG_MESSAGE_LEVEL ); //check if the user has specified any hints in the dax incorporateHint(job, Hints.JOBMANAGER_UNIVERSE_KEY ); if (incorporateHint(job, "executionPool")) { //i++; incorporateProfiles(job); //set the staging site for the job job.setStagingSiteHandle( getStagingSite( job ) ); continue; } //set the staging site for the job job.setStagingSiteHandle( getStagingSite( job ) ); if ( site == null ) { error = new StringBuffer(); error.append( "Site Selector could not map the job " ). append( job.getCompleteTCName() ).append( " to any of the execution sites " ). append( sites ).append( " using the Transformation Mapper (" ).append( this.mTCMapper.getMode() ). append( ")" ). append( "\n" ). append( "\nThis error is most likely due to an error in the transformation catalog." ). append( "\nMake sure that the ").append( job.getCompleteTCName() ).append(" transformation" ). append( "\nexists for the site you are trying to plan for, and that OS name/architechture match." ). append( "\n" ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( error.toString() ); } /* JIRA PM-277 String jm = job.getJobManager(); jm = ( (jm == null) || jm.length() == 0 ) ? null : jm; */ if ( site.length() == 0 || site.equalsIgnoreCase( SiteSelector.SITE_NOT_FOUND ) ) { error = new StringBuffer(); error.append( "Site Selector (" ).append( mSiteSelector.description() ). append( ") could not map job " ).append( job.getCompleteTCName() ). append( " to any site" ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( error.toString() ); } /* JIRA PM-277 job.setJobManager( jm == null ? getJobManager( site, job.getUniverse() ) : jm ); */ mLogger.log("Mapped job " + job.jobName + " to pool " + site, LogManager.DEBUG_MESSAGE_LEVEL); //incorporate the profiles and //do transformation selection if ( !incorporateProfiles(job) ){ error = new StringBuffer(); error.append( "Profiles incorrectly incorporated for "). append( job.getCompleteTCName()); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( error.toString() ); } /* //modify the jobs if required for worker node execution if( mWorkerNodeExecution ){ mSLS.modifyJobForFirstLevelStaging( job, mPOptions.getSubmitDirectory(), mSLS.getSLSInputLFN( job ), mSLS.getSLSOutputLFN( job ) ); } */ //log actions as XML fragment try{ logRefinerAction(job); pps.siteSelectionFor( job.getName(), job.getName() ); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception", e ); } }//end of mapping all jobs try{ pps.endWorkflowRefinementStep( this ); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception", e ); } } /** * Returns the staging site to be used for a job. If a staging site is not * determined from the options it is set to be the execution site for the job * * @param job the job for which to determine the staging site * * @return the staging site */ public String getStagingSite( Job job ){ String ss = this.mPOptions.getStagingSite( job.getSiteHandle() ); return (ss == null) ? job.getSiteHandle(): ss; } /** * Incorporates the profiles from the various sources into the job. * The profiles are incorporated in the order pool, transformation catalog, * and properties file, with the profiles from the properties file having * the highest priority. * It is here where the transformation selector is called to select * amongst the various transformations returned by the TC Mapper. * * @param job the job into which the profiles have been incorporated. * * @return true profiles were successfully incorporated. * false otherwise */ private boolean incorporateProfiles(Job job){ TransformationCatalogEntry tcEntry = null; List tcEntries = null; String siteHandle = job.getSiteHandle(); String stagingSiteHandle = job.getStagingSiteHandle(); mLogger.log( "For job " + job.getName() + " updating profiles from site " + job.getSiteHandle() , LogManager.TRACE_MESSAGE_LEVEL ); //the profile information from the pool catalog needs to be //assimilated into the job. job.updateProfiles( mSiteStore.lookup( siteHandle ).getProfiles() ); //we now query the TCMapper only if there is no hint available //by the user in the DAX 3.0 . if( job.getRemoteExecutable() == null || job.getRemoteExecutable().length() == 0 ){ //query the TCMapper and get hold of all the valid TC //entries for that site tcEntries = mTCMapper.getTCList(job.namespace,job.logicalName, job.version,siteHandle); StringBuffer error; if(tcEntries != null && tcEntries.size() > 0){ //select a tc entry calling out to //the transformation selector tcEntry = selectTCEntry(tcEntries,job,mProps.getTXSelectorMode()); if(tcEntry == null){ error = new StringBuffer(); error.append( "Transformation selection operation for job "). append( job.getCompleteTCName() ).append(" for site " ). append( job.getSiteHandle() ).append( " unsuccessful." ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( error.toString() ); } } else{ //mismatch. should be unreachable code!!! //as error should have been thrown in the site selector mLogger.log( "Site selector mapped job " + job.getCompleteTCName() + " to pool " + job.executionPool + " for which no mapping exists in " + "transformation mapper.",LogManager.FATAL_MESSAGE_LEVEL); return false; } } else{ //create a transformation catalog entry object //corresponding to the executable set String executable = job.getRemoteExecutable(); tcEntry = new TransformationCatalogEntry(); tcEntry.setLogicalTransformation( job.getTXNamespace(), job.getTXName(), job.getTXVersion() ); tcEntry.setResourceId( job.getSiteHandle() ); tcEntry.setPhysicalTransformation( executable ); //hack to determine whether an executable is //installed or static binary tcEntry.setType( executable.startsWith( "/" ) ? TCType.INSTALLED: TCType.STAGEABLE ); } FileTransfer fTx = null; //something seriously wrong in this code line below. //Need to verify further after more runs. (Gaurang 2-7-2006). // tcEntry = (TransformationCatalogEntry) tcEntries.get(0); if(tcEntry.getType().equals( TCType.STAGEABLE )){ SiteCatalogEntry site = mSiteStore.lookup( stagingSiteHandle ); //construct a file transfer object and add it //as an input file to the job in the dag fTx = new FileTransfer( job.getStagedExecutableBaseName(), job.jobName); fTx.setType(FileTransfer.EXECUTABLE_FILE); //the physical transformation points to //guc or the user specified transfer mechanism //accessible url fTx.addSource( tcEntry.getResourceId(), tcEntry.getPhysicalTransformation()); //the destination url is the working directory for //pool where it needs to be staged to //always creating a third party transfer URL //for the destination. String stagedPath = mSiteStore.getInternalWorkDirectory( job, true ) + File.separator + job.getStagedExecutableBaseName(); fTx.addDestination( stagingSiteHandle, site.getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix() + stagedPath); //added in the end now after dependant executables //have been handled Karan May 31 2007 //job.addInputFile(fTx); /* Karan April 27, 2012. No longer required if( mWorkerNodeExecution ){ //do not specify the full path as we do not know worker //node directory if( mSLS.doesCondorModifications() ){ //we need to take the basename of the source url //as condor file transfer mech does not allow to //specify destination filenames job.setRemoteExecutable( new File( tcEntry.getPhysicalTransformation() ).getName() ); } else{ //do this only when kickstart executable existance check is fixed //Karan Nov 30 2007 //job.setRemoteExecutable(job.getStagedExecutableBaseName()); job.setRemoteExecutable( stagedPath ); } } else{ //the jobs executable is the path to where //the executable is going to be staged job.executable = stagedPath; } */ job.setRemoteExecutable( stagedPath ); //setting the job type of the job to //denote the executable is being staged //job.setJobType(Job.STAGED_COMPUTE_JOB); job.setExecutableStagingForJob( true ); } else{ //the executable needs to point to the physical //path gotten from the selected transformantion //entry job.executable = tcEntry.getPhysicalTransformation(); } //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 job.addNotifications( tcEntry ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. job.updateProfiles(tcEntry); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. job.updateProfiles(mProps); //handle dependant executables handleDependantExecutables( job ); if( fTx != null ){ //add the main executable back as input job.addInputFile( fTx); } return true; } /** * Handles the dependant executables that need to be staged. * * @param job Job * */ private void handleDependantExecutables( Job job ){ String siteHandle = job.getSiteHandle(); String stagingSiteHandle = job.getStagingSiteHandle(); boolean installedTX = !( job.userExecutablesStagedForJob() ); List dependantExecutables = new ArrayList(); for (Iterator it = job.getInputFiles().iterator(); it.hasNext(); ) { PegasusFile input = (PegasusFile) it.next(); if (input.getType() == PegasusFile.EXECUTABLE_FILE) { //if the main executable is installed, just remove the executable //file requirement from the input files if( installedTX ){ it.remove(); continue; } //query the TCMapper and get hold of all the valid TC //entries for that site String lfn[] = Separator.split( input.getLFN() ); List tcEntries = mTCMapper.getTCList( lfn[0], lfn[1], lfn[2], siteHandle); StringBuffer error; if (tcEntries != null && tcEntries.size() > 0) { //select a tc entry calling out to //the transformation selector , we only should stage //never pick any installed one. TransformationCatalogEntry tcEntry = selectTCEntry(tcEntries, job, "Staged" ); if (tcEntry == null) { error = new StringBuffer(); error.append("Transformation selection operation for job "). append(job.getCompleteTCName()).append(" for site "). append(job.getSiteHandle()).append(" unsuccessful."); mLogger.log(error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException(error.toString()); } // tcEntry = (TransformationCatalogEntry) tcEntries.get(0); if (tcEntry.getType().equals(TCType.STAGEABLE )) { // SiteInfo site = mPoolHandle.getPoolEntry(siteHandle, // "vanilla"); SiteCatalogEntry site = mSiteStore.lookup( stagingSiteHandle ); //construct a file transfer object and add it //as an input file to the job in the dag //a disconnect between the basename and the input lfn. String basename = Job.getStagedExecutableBaseName( lfn[0], lfn[1], lfn[2] ); FileTransfer fTx = new FileTransfer( basename, job.jobName ); fTx.setType(FileTransfer.EXECUTABLE_FILE); //the physical transformation points to //guc or the user specified transfer mechanism //accessible url fTx.addSource(tcEntry.getResourceId(), tcEntry.getPhysicalTransformation()); //the destination url is the working directory for //pool where it needs to be staged to //always creating a third party transfer URL //for the destination. // String stagedPath = mPoolHandle.getExecPoolWorkDir(job) // + File.separator + basename; // fTx.addDestination(siteHandle, // site.getURLPrefix(false) + stagedPath); String stagedPath = mSiteStore.getInternalWorkDirectory( job, true ) + File.separator + basename; fTx.addDestination(siteHandle, site.getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix() + stagedPath); dependantExecutables.add( fTx ); //the jobs executable is the path to where //the executable is going to be staged //job.executable = stagedPath; mLogger.log( "Dependant Executable " + input.getLFN() + " being staged from " + fTx.getSourceURL(), LogManager.DEBUG_MESSAGE_LEVEL ); } } it.remove(); } //end of if file is exectuable } //add all the dependant executable FileTransfers back as input files for( Iterator it = dependantExecutables.iterator(); it.hasNext(); ){ FileTransfer file = (FileTransfer)it.next(); job.addInputFile( file ); } } /** * Calls out to the transformation selector to select an entry from a list * of valid transformation catalog entries. * * @param entries list of TransformationCatalogEntry objects. * @param job the job. * @param selectors the selector to be called * * @return the selected TransformationCatalogEntry object * null when transformation selector is unable to select any * transformation */ private TransformationCatalogEntry selectTCEntry(List entries, Job job, String selector){ //load the transformation selector. different //selectors may end up being loaded for different jobs. mTXSelector = TransformationSelector.loadTXSelector(selector); entries = mTXSelector.getTCEntry(entries); return (entries == null || entries.size() == 0)? null: entries.size() > 1? //select a random entry (TransformationCatalogEntry) entries.get( PegRandom.getInteger( entries.size() - 1 )): //return the first one (TransformationCatalogEntry) entries.get(0); } /** * It returns a jobmanager for the given pool. * * @param site the name of the pool. * @param universe the universe for which you need the scheduler on that * particular pool. * * @return the jobmanager for that pool and universe. * null if not found. */ private String getJobManager( String site, String universe) { SiteCatalogEntry p = mSiteStore.lookup( site ); GridGateway jm = ( p == null )? null : p.selectGridGateway( GridGateway.JOB_TYPE.valueOf( universe ) ); String result = ( jm == null ) ? null : jm.getContact( ); if ( result == null) { StringBuffer error = new StringBuffer(); error = new StringBuffer(); error.append( "Could not find a jobmanager at site ("). append( site ).append( ") for universe " ). append( universe ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( error.toString() ); } return result; } /** * It incorporates a hint in the namespace to the job. After the hint * is incorporated the key is deleted from the hint namespace for that * job. * * @param job the job that needs the hint to be incorporated. * @param key the key in the hint namespace. * * @return true the hint was successfully incorporated. * false the hint was not set in job or was not successfully * incorporated. */ private boolean incorporateHint(Job job, String key) { //sanity check if (key.length() == 0) { return false; } switch (key.charAt(0)) { case 'e': if (key.equals("executionPool") && job.hints.containsKey(key)) { //user has overridden in the dax which execution Pool to use job.executionPool = (String) job.hints.removeKey( "executionPool"); incorporateHint( job, "globusScheduler"); incorporateHint( job, Hints.PFN_HINT_KEY ); return true; } break; /* case 'g': if (key.equals("globusScheduler")) { if( job.hints.containsKey( Hints.GLOBUS_SCHEDULER_KEY ) ){ //user specified the jobmanager on which they want //the job to execute on job.globusScheduler = (String) job.hints.removeKey("globusScheduler"); } else{ //try to lookup in the site catalog SiteCatalogEntry s = mSiteStore.lookup( job.getSiteHandle() ); if( s == null ){ throw new RuntimeException( "Unable to find entry for site in site catalog " + job.getSiteHandle() ); } GridGateway gw = s.selectGridGateway( GridGateway.JOB_TYPE.valueOf(job.condorUniverse)); if( gw == null ){ throw new RuntimeException( "No GridGateway specified for compute jobs for site " + job.getSiteHandle() ); } job.globusScheduler = gw.getContact(); } return true; } break; */ case 'p': if (key.equals( Hints.PFN_HINT_KEY )) { job.setRemoteExecutable( job.hints.containsKey( Hints.PFN_HINT_KEY ) ? (String) job.hints.removeKey( Hints.PFN_HINT_KEY ) : null ); return true; } break; case 'j': if (key.equals( Hints.JOBMANAGER_UNIVERSE_KEY )) { job.condorUniverse = job.hints.containsKey( Hints.JOBMANAGER_UNIVERSE_KEY ) ? (String) job.hints.removeKey( Hints.JOBMANAGER_UNIVERSE_KEY ) : job.condorUniverse; return true; } break; default: break; } return false; } /** * Converts a Vector to a List. It only copies by reference. * @param v Vector * @return a ArrayList */ public List convertToList(Vector v) { return new java.util.ArrayList(v); } /** * Converts a Set to a List. It only copies by reference. * @param s Set * @return a ArrayList */ public List convertToList(Set s) { return new java.util.ArrayList(s); } /** * Logs the action taken by the refiner on a job as a XML fragment in * the XML Producer. * * @param job the Job containing the job that was mapped * to a site. */ protected void logRefinerAction( Job job ){ StringBuffer sb = new StringBuffer(); sb.append( "\t" ); sb.append( "\n" ).append( "\t\t" ); sb.append( "" ).append( job.getSiteHandle() ).append( "" ); sb.append( "\n" ).append( "\t\t" ); sb.append( "" ).append( job.getJobManager() ).append( "" ); sb.append( "\n" ); sb.append( "\t" ); sb.append( "\n" ); mXMLStore.add( sb.toString() ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/ReplicaCatalogBridge.java0000644000175000017500000007560411757531137027572 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.catalog.classes.Profiles; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.classes.ReplicaStore; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.ENV; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.replica.ReplicaFactory; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.util.Separator; import java.io.File; import java.io.FileWriter; import java.util.Collection; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Properties; import java.util.Set; import java.util.StringTokenizer; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.namespace.Dagman; /** * This coordinates the look up to the Replica Location Service, to determine * the logical to physical mappings. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4736 $ * */ public class ReplicaCatalogBridge extends Engine //for the time being. { /** * Default category for registration jobs */ public static final String DEFAULT_REGISTRATION_CATEGORY_KEY = "registration"; /** * The transformation namespace for the regostration jobs. */ public static final String RC_TRANSFORMATION_NS = "pegasus"; /** * The logical name of the transformation used. */ public static final String RC_TRANSFORMATION_NAME = "rc-client"; /** * The logical name of the transformation used. */ public static final String RC_TRANSFORMATION_VERSION = null; /** * The derivation namespace for the transfer jobs. */ public static final String RC_DERIVATION_NS = "pegasus"; /** * The derivation name for the transfer jobs. */ public static final String RC_DERIVATION_NAME = "rc-client"; /** * The version number for the derivations for registration jobs. */ public static final String RC_DERIVATION_VERSION = "1.0"; /** * The name of the Replica Catalog Implementer that serves as the source for * cache files. */ public static final String CACHE_REPLICA_CATALOG_IMPLEMENTER = "SimpleFile"; /** * The name of the source key for Replica Catalog Implementer that serves as * cache */ public static final String CACHE_REPLICA_CATALOG_KEY = "file"; /** * The name of the key that disables writing back to the cache file. * Designates a static file. i.e. read only */ public static final String CACHE_READ_ONLY_KEY = "read.only"; /** * The name of the URL key for the replica catalog impelementer to be picked * up. */ public static final String REPLICA_CATALOG_URL_KEY = "url"; /** * The handle to the main Replica Catalog. */ private ReplicaCatalog mReplicaCatalog; /** * The Vector of String objects containing the logical * filenames of the files whose locations are to be searched in the * Replica Catalog. */ protected Set mSearchFiles ; /** * A boolean variable to desingnate whether the RLI queried was down or not. * By default it is up, unless it is set to true explicitly. */ private boolean mRCDown; /** * The replica store in which we store all the results that are queried from * the main replica catalog. */ private ReplicaStore mReplicaStore; /** * The replica store in which we store all the results that are queried from * the cache replica catalogs. */ private ReplicaStore mCacheStore; /** * The DAX Replica Store. */ private ReplicaStore mDAXReplicaStore; /** * The inherited Replica Store */ private ReplicaStore mInheritedReplicaStore; /** * A boolean indicating whether the cache file needs to be treated as a * replica catalog or not. */ private boolean mTreatCacheAsRC; /** * The namespace object holding the environment variables for local * pool. */ private ENV mLocalEnv; /** * The default tc entry. */ private TransformationCatalogEntry mDefaultTCRCEntry; /** * A boolean indicating whether the attempt to create a default tc entry * has happened or not. */ private boolean mDefaultTCRCCreated; /** * The DAG being worked upon. */ private ADag mDag; /** * The overloaded constructor. * * @param dag the workflow that is being worked on. * @param bag of initialization objects. * */ public ReplicaCatalogBridge( ADag dag , // PegasusProperties properties, // PlannerOptions options PegasusBag bag ) { super( bag ); this.initialize( dag, bag ); } /** * Intialises the refiner. * * @param dag the workflow that is being worked on. * @param bag the bag of Pegasus initialization objects * */ public void initialize( ADag dag , PegasusBag bag ){ this.mDAXReplicaStore = dag.getReplicaStore(); this.initialize( dag, bag.getPegasusProperties(), bag.getPlannerOptions() ); } /** * Intialises the refiner. * * @param dag the workflow that is being worked on. * @param properties the properties passed to the planner. * @param options the options passed to the planner at runtime. * */ @SuppressWarnings("static-access") public void initialize( ADag dag , PegasusProperties properties, PlannerOptions options ){ mDag = dag; mProps = properties; mPOptions = options; mRCDown = false; mCacheStore = new ReplicaStore(); mInheritedReplicaStore = new ReplicaStore(); mTreatCacheAsRC = mProps.treatCacheAsRC(); mDefaultTCRCCreated = false; //converting the Vector into vector of //strings just containing the logical //filenames mSearchFiles = dag.dagInfo.getLFNs( options.getForce() ); //load the local environment variable //from pool config and property file mLocalEnv = loadLocalEnvVariables(); //only for windward for time being properties.setProperty( "pegasus.catalog.replica.dax.id", dag.getAbstractWorkflowName() ); properties.setProperty( "pegasus.catalog.replica.mrc.windward.dax.id", dag.getAbstractWorkflowName() ); try { //make sure that RLS can be loaded from local environment //Karan May 1 2007 mReplicaCatalog = null; if ( mSearchFiles != null && !mSearchFiles.isEmpty() ){ mReplicaCatalog = ReplicaFactory.loadInstance(properties); //load all the mappings. mReplicaStore = new ReplicaStore( mReplicaCatalog.lookup( mSearchFiles ) ); } } catch ( Exception ex ) { String msg = "Problem while connecting with the Replica Catalog: "; //set the flag to denote RLI is down mRCDown = true; //mReplicaStore = new ReplicaStore(); //exit if there is no cache overloading specified. if ( options.getCacheFiles().isEmpty() && //no cache files specified options.getInheritedRCFiles().isEmpty() && //no files locations inherited from outer level DAX this.mDAXReplicaStore.isEmpty() ) { //no file locations in current DAX mLogger.log( msg + ex.getMessage(),LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( msg , ex ); } else{ mLogger.log( msg + ex.getMessage(),LogManager.DEBUG_MESSAGE_LEVEL ); } } finally{ //set replica store to an empty store if required mReplicaStore = ( mReplicaStore == null ) ?new ReplicaStore() : mReplicaStore; } if( mReplicaCatalog != null ){ //specify maxjobs to 1 for File based replica catalog //JIRA PM-377 if( mReplicaCatalog instanceof edu.isi.pegasus.planner.catalog.replica.impl.SimpleFile ){ //we set the default category value to 1 //in the properties String key = getDefaultRegistrationMaxJobsPropertyKey(); mLogger.log( "Setting property " + key + " to 1 to set max jobs for registrations jobs category", LogManager.DEBUG_MESSAGE_LEVEL ); mProps.setProperty( key, "1" ); } } //incorporate the caching if any if ( !options.getCacheFiles().isEmpty() ) { loadCacheFiles( options.getCacheFiles() ); } //load inherited replica store if ( !options.getInheritedRCFiles().isEmpty() ) { this.loadInheritedReplicaStore( options.getInheritedRCFiles() ); } } /** * To close the connection to replica services. This must be defined in the * case where one has not done a singleton implementation. In other * cases just do an empty implementation of this method. */ public void closeConnection() { if ( mReplicaCatalog != null ) { mReplicaCatalog.close(); } } /** * Closes the connection to the rli. */ public void finalize() { this.closeConnection(); } /** * This returns the files for which mappings exist in the Replica Catalog. * This should return a subset of the files which are * specified in the mSearchFiles, while getting an instance to this. * * @return a Set of logical file names as String objects, for * which logical to physical mapping exists. * * @see #mSearchFiles */ public Set getFilesInReplica() { //check if any exist in the cache Set lfnsFound = mCacheStore.getLFNs( mSearchFiles ); mLogger.log(lfnsFound.size() + " entries found in cache of total " + mSearchFiles.size(), LogManager.DEBUG_MESSAGE_LEVEL); //check in the main replica catalog if ( this.mDAXReplicaStore.isEmpty() && ( mRCDown || mReplicaCatalog == null )) { mLogger.log("Replica Catalog is either down or connection to it was never opened ", LogManager.WARNING_MESSAGE_LEVEL); return lfnsFound; } //lookup from the DAX Replica Store lfnsFound.addAll( this.mDAXReplicaStore.getLFNs() ); //lookup from the inherited Replica Store lfnsFound.addAll( this.mInheritedReplicaStore.getLFNs( mSearchFiles ) ); //look up from the the main replica catalog lfnsFound.addAll( mReplicaStore.getLFNs() ); return lfnsFound; } /** * Returns all the locations as returned from the Replica Lookup Mechanism. * * @param lfn The name of the logical file whose PFN mappings are * required. * * @return ReplicaLocation containing all the locations for that LFN * * @see org.griphyn.cPlanner.classes.ReplicaLocation */ public ReplicaLocation getFileLocs( String lfn ) { ReplicaLocation rl = retrieveFromCache( lfn ); //first check from cache if(rl != null && !mTreatCacheAsRC){ mLogger.log( "Location of file " + rl + " retrieved from cache" , LogManager.DEBUG_MESSAGE_LEVEL); return rl; } //we prefer location in DAX over the inherited replica store if( this.mDAXReplicaStore.containsLFN( lfn ) ){ return this.mDAXReplicaStore.getReplicaLocation(lfn); } //we prefer location in inherited replica store over replica catalog if( this.mInheritedReplicaStore.containsLFN(lfn) ){ return this.mInheritedReplicaStore.getReplicaLocation(lfn); } ReplicaLocation rcEntry = mReplicaStore.getReplicaLocation( lfn ); if (rl == null) { rl = rcEntry; } else{ //merge with the ones found in cache rl.merge(rcEntry); } return rl; } /** * Returns the property key that can be used to set the max jobs for the * default category associated with the registration jobs. * * @return the property key */ public String getDefaultRegistrationMaxJobsPropertyKey(){ StringBuffer key = new StringBuffer(); key.append( Dagman.NAMESPACE_NAME ).append( "." ). append( ReplicaCatalogBridge.DEFAULT_REGISTRATION_CATEGORY_KEY ). append( "." ).append( Dagman.MAXJOBS_KEY.toLowerCase() ); return key.toString(); } /** * It constructs the Job object for the registration node, which * registers the materialized files on the output pool in the RLS. * Note that the relations corresponding to this node should already have * been added to the concerned DagInfo object. * * @param regJobName The name of the job which registers the files in the * Replica Location Service. * @param job The job whose output files are to be registered in * the Replica Location Service. * * @param files Collection of FileTransfer objects * containing the information about source and * destination URLs. The destination * URLs would be our PFNs. * * @return Job corresponding to the new registration node. */ public Job makeRCRegNode( String regJobName, Job job, Collection files ) { //making the files string Job newJob = new Job(); newJob.setName( regJobName ); newJob.setTransformation( ReplicaCatalogBridge.RC_TRANSFORMATION_NS, ReplicaCatalogBridge.RC_TRANSFORMATION_NAME, ReplicaCatalogBridge.RC_TRANSFORMATION_VERSION ); newJob.setDerivation( ReplicaCatalogBridge.RC_DERIVATION_NS, ReplicaCatalogBridge.RC_DERIVATION_NAME, ReplicaCatalogBridge.RC_DERIVATION_VERSION ); // SiteInfo site = mPoolHandle.getPoolEntry( mOutputPool, "vanilla" ); SiteCatalogEntry site = mSiteStore.lookup( mOutputPool ); //change this function List tcentries = null; try { tcentries = mTCHandle.lookup( newJob.getTXNamespace(), newJob.getTXName(), newJob.getTXVersion(), "local", TCType.INSTALLED ); } catch ( Exception e ) { mLogger.log( "While retrieving entries from TC " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } TransformationCatalogEntry tc; if ( tcentries == null || tcentries.isEmpty() ) { mLogger.log( "Unable to find in entry for " + newJob.getCompleteTCName() + " in transformation catalog on site local", LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Constructing a default entry for it " , LogManager.DEBUG_MESSAGE_LEVEL ); tc = defaultTCRCEntry( ); if( tc == null ){ throw new RuntimeException( "Unable to create an entry for " + newJob.getCompleteTCName() + " on site local"); } } else{ tc = (TransformationCatalogEntry) tcentries.get(0); } newJob.setRemoteExecutable( tc.getPhysicalTransformation() ); newJob.setArguments( this.generateRepJobArgumentString( site, regJobName, files ) ); // newJob.setUniverse( Engine.REGISTRATION_UNIVERSE ); newJob.setUniverse( GridGateway.JOB_TYPE.register.toString() ); newJob.setSiteHandle( tc.getResourceId() ); newJob.setJobType( Job.REPLICA_REG_JOB ); newJob.setVDSSuperNode( job.getName() ); //the profile information from the pool catalog needs to be //assimilated into the job. // newJob.updateProfiles( mPoolHandle.getPoolProfile( newJob.getSiteHandle() ) ); newJob.updateProfiles( mSiteStore.lookup( newJob.getSiteHandle() ).getProfiles() ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 newJob.addNotifications( tc ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. newJob.updateProfiles( tc ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. newJob.updateProfiles( mProps ); //if no category is associated with the job, add a default //category if( !newJob.dagmanVariables.containsKey( Dagman.CATEGORY_KEY ) ){ newJob.dagmanVariables.construct( Dagman.CATEGORY_KEY, DEFAULT_REGISTRATION_CATEGORY_KEY ); } //in order to make sure that COG picks the default proxy //correctly through condor newJob.condorVariables.construct( "getenv", "true" ); return newJob; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * * * @return the default entry. */ private TransformationCatalogEntry defaultTCRCEntry( ){ String site = "local"; //generate only once. if( !mDefaultTCRCCreated ){ //construct the path to it StringBuffer path = new StringBuffer(); path.append( mProps.getBinDir() ).append( File.separator ). append( "pegasus-rc-client" ); mDefaultTCRCEntry = new TransformationCatalogEntry( this.RC_TRANSFORMATION_NS, this.RC_TRANSFORMATION_NAME, this.RC_TRANSFORMATION_VERSION ); mDefaultTCRCEntry.setPhysicalTransformation( path.toString() ); mDefaultTCRCEntry.setResourceId( site ); //set the flag back to true mDefaultTCRCCreated = true; } return mDefaultTCRCEntry; } /** * Returns the classpath for the default rc-client entry. * * @param home the home directory where we need to check for lib directory. * * @return the classpath in an environment profile. */ private Profile getClassPath( String home ){ Profile result = null ; //create the CLASSPATH from home String classpath = mProps.getProperty( "java.class.path" ); if( classpath == null || classpath.trim().length() == 0 ){ return result; } mLogger.log( "JAVA CLASSPATH SET IS " + classpath , LogManager.DEBUG_MESSAGE_LEVEL ); StringBuffer cp = new StringBuffer(); String prefix = home + File.separator + "lib"; for( StringTokenizer st = new StringTokenizer( classpath, ":" ); st.hasMoreTokens(); ){ String token = st.nextToken(); if( token.startsWith( prefix ) ){ //this is a valid lib jar to put in cp.append( token ).append( ":" ); } } if ( cp.length() == 0 ){ //unable to create a valid classpath mLogger.log( "Unable to create a sensible classpath from " + home, LogManager.DEBUG_MESSAGE_LEVEL ); return result; } //we have everything now result = new Profile( Profile.ENV, "CLASSPATH", cp.toString() ); return result; } /** * Generates the argument string to be given to the replica registration job. * At present by default it would be picking up the file containing the * mappings. * * @param site the SiteCatalogEntry object * @param regJob The name of the registration job. * * @param files Collection of FileTransfer objects containing the * information about source and destURLs. The destination * URLs would be our PFNs. * * @return the argument string. */ private String generateRepJobArgumentString( SiteCatalogEntry site, String regJob, Collection files ) { StringBuffer arguments = new StringBuffer(); //select a LRC. disconnect here. It should be select a RC. edu.isi.pegasus.planner.catalog.site.classes.ReplicaCatalog rc = (site == null) ? null : site.selectReplicaCatalog(); //we append the url property if a user has specified a //URL in the site catalog entry, else we rely on what //was specified in properties if (!( rc == null || rc.getURL() == null || rc.getURL().length() == 0)) { //we have a lrc selected . construct vds.rc.url property arguments.append( "-D" ).append( ReplicaCatalog.c_prefix ).append( "." ). append( ReplicaCatalogBridge.REPLICA_CATALOG_URL_KEY).append( "=" ).append( rc.getURL() ). append( " " ); } //get any command line properties that may need specifying arguments.append( "--conf" ).append( " " ). append( mProps.getPropertiesInSubmitDirectory( ) ). append( " " ); //append the insert option arguments.append( "--insert" ).append( " " ). append( this.generateMappingsFile( regJob, files ) ); return arguments.toString(); } /** * Returns the properties that need to be passed to the the rc-client * invocation on the command line . It is of the form * "-Dprop1=value1 -Dprop2=value2 .." * * @param properties the properties object * * @return the properties list, else empty string. */ protected String getCommandLineProperties( PegasusProperties properties ){ StringBuffer sb = new StringBuffer(); appendProperty( sb, "pegasus.user.properties", properties.getPropertiesInSubmitDirectory( )); return sb.toString(); } /** * Appends a property to the StringBuffer, in the java command line format. * * @param sb the StringBuffer to append the property to. * @param key the property. * @param value the property value. */ protected void appendProperty( StringBuffer sb, String key, String value ){ sb.append("-D").append( key ).append( "=" ).append( value ).append( " "); } /** * Generates the registration mappings in a text file that is generated in the * dax directory (the directory where all the condor submit files are * generated). The pool value for the mapping is the output pool specified * by the user when running Pegasus. The name of the file is regJob+.in * * @param regJob The name of the registration job. * @param files Collection of FileTransferobjects containing the * information about source and destURLs. The destination * URLs would be our PFNs. * * @return String corresponding to the path of the the file containig the * mappings in the appropriate format. */ private String generateMappingsFile( String regJob, Collection files ) { String fileName = regJob + ".in"; File f = null; String submitFileDir = mPOptions.getSubmitDirectory(); //writing the stdin file try { f = new File( submitFileDir, fileName ); FileWriter stdIn = new FileWriter( f ); for(Iterator it = files.iterator();it.hasNext();){ FileTransfer ft = ( FileTransfer ) it.next(); //checking for transient flag if ( !ft.getTransientRegFlag() ) { stdIn.write( ftToRC( ft ) ); stdIn.flush(); } } stdIn.close(); } catch ( Exception e ) { throw new RuntimeException( "While writing out the registration file for job " + regJob, e ); } return f.getAbsolutePath(); } /** * Converts a FileTransfer to a RC compatible string representation. * * @param ft the FileTransfer object * * @return the RC version. */ private String ftToRC( FileTransfer ft ){ StringBuffer sb = new StringBuffer(); NameValue destURL = ft.getDestURL(); sb.append( ft.getLFN() ).append( " " ); sb.append( destURL.getValue() ).append( " " ); sb.append( "pool=\"" ).append( destURL.getKey() ).append( "\"" ); sb.append( "\n" ); return sb.toString(); } /** * Retrieves a location from the cache table, that contains the contents * of the cache files specified at runtime. * * @param lfn the logical name of the file. * * @return ReplicaLocation object corresponding to the entry * if found, else null. */ private ReplicaLocation retrieveFromCache( String lfn ){ return mCacheStore.getReplicaLocation( lfn ); } /** * Ends up loading the inherited replica files. * * @param files set of paths to the inherited replica files. */ private void loadInheritedReplicaStore( Set files ) { mLogger.log("Loading Inhertied ReplicaFiles files: " + files, LogManager.DEBUG_MESSAGE_LEVEL); this.mInheritedReplicaStore = this.getReplicaStoreFromFiles( files ); } /** * Ends up loading the cache files so as to enable the lookup for the transient * files created by the parent jobs. * * @param cacheFiles set of paths to the cache files. */ private void loadCacheFiles( Set cacheFiles ) { mLogger.log("Loading cache files: " + cacheFiles, LogManager.DEBUG_MESSAGE_LEVEL); mCacheStore = this.getReplicaStoreFromFiles(cacheFiles); } /** * Ends up loading a Replica Store from replica catalog files * * @param files set of paths to the cache files. */ private ReplicaStore getReplicaStoreFromFiles( Set files ) { ReplicaStore store = new ReplicaStore(); Properties cacheProps = mProps.getVDSProperties().matchingSubset( ReplicaCatalog.c_prefix, false ); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_LOAD_TRANSIENT_CACHE, LoggingKeys.DAX_ID, mDag.getAbstractWorkflowName() ); ReplicaCatalog simpleFile; Map wildcardConstraint = null; //all cache files are loaded in readonly mode cacheProps.setProperty( ReplicaCatalogBridge.CACHE_READ_ONLY_KEY, "true" ); for ( Iterator it = files.iterator(); it.hasNext() ; ) { //read each of the cache file and load in memory String file = ( String ) it.next(); //set the appropriate property to designate path to file cacheProps.setProperty( ReplicaCatalogBridge.CACHE_REPLICA_CATALOG_KEY, file ); mLogger.log("Loading file: " + file, LogManager.DEBUG_MESSAGE_LEVEL); try{ simpleFile = ReplicaFactory.loadInstance( CACHE_REPLICA_CATALOG_IMPLEMENTER, cacheProps ); } catch( Exception e ){ mLogger.log( "Unable to load cache file " + file, e, LogManager.ERROR_MESSAGE_LEVEL ); continue; } //suck in all the entries into the cache replica store. //returns an unmodifiable collection. so merging an issue.. store.add( simpleFile.lookup( mSearchFiles ) ); //no wildcards as we only want to load mappings for files that //we require //mCacheStore.add( simpleFile.lookup( wildcardConstraint ) ); //close connection simpleFile.close(); } mLogger.logEventCompletion(); return store; } /** * Reads in the environment variables into memory from the properties file * and the pool catalog. * * @return the ENV namespace object holding the environment * variables. */ private ENV loadLocalEnvVariables() { //assumes that pool handle, and property handle are initialized. ENV env = new ENV(); //load from the pool.config // env.checkKeyInNS( mPoolHandle.getPoolProfile( "local", Profile.ENV ) ); SiteCatalogEntry local = mSiteStore.lookup( "local" ); env.checkKeyInNS( local.getProfiles().get( Profiles.NAMESPACES.env ) ); //load from property file env.checkKeyInNS( mProps.getLocalPoolEnvVar() ); // the new RC API has a different key. if that is specified use that. //mProps.getProperty( ReplicaCatalog.c_prefix ) return env; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/DataReuseEngine.java0000644000175000017500000005025611757531137026602 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.partitioner.graph.Adapter; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.provenance.pasoa.XMLProducer; import edu.isi.pegasus.planner.provenance.pasoa.producer.XMLProducerFactory; import edu.isi.pegasus.planner.provenance.pasoa.PPS; import edu.isi.pegasus.planner.provenance.pasoa.pps.PPSFactory; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.partitioner.graph.Bag; import java.util.Set; import java.util.Iterator; import java.util.LinkedList; import java.util.List; /** * The data reuse engine reduces the workflow on the basis of existing output * files of the workflow found in the Replica Catalog. The algorithm works in * two passes. * *

* In the first pass , we determine all the jobs whose output files exist in * the Replica Catalog. An output file with the transfer flag set to false is * treated equivalent to the file existing in the Replica Catalog , if *

 *  - the output file is not an input to any of the children of the job X
 *  
* * In the second pass, we remove the job whose output files exist in the * Replica Catalog and try to cascade the deletion upwards to the parent * jobs. We start the breadth first traversal of the workflow bottom up. * A node is marked for deletion if - * *
 *  ( It is already marked for deletion in pass 1
 *      OR
 *      ( ALL of it's children have been marked for deletion
 *        AND
 *        Node's output files have transfer flags set to false
 *      )
 *  )
 * 
* * @author Karan Vahi * @version $Revision: 3471 $ * */ public class DataReuseEngine extends Engine implements Refiner{ /** * List of all deleted jobs during workflow reduction. */ private List mAllDeletedJobs; /** * The XML Producer object that records the actions. */ private XMLProducer mXMLStore; /** * The workflow object being worked upon. */ private ADag mWorkflow; /** * The constructor * * @param orgDag The original Dag object * @param bag the bag of initialization objects. */ public DataReuseEngine( ADag orgDag, PegasusBag bag ){ super( bag) ; mAllDeletedJobs = new LinkedList(); mXMLStore = XMLProducerFactory.loadXMLProducer( mProps ); mWorkflow = orgDag; } /** * Returns a reference to the workflow that is being refined by the refiner. * * * @return ADAG object. */ public ADag getWorkflow(){ return this.mWorkflow; } /** * Returns a reference to the XMLProducer, that generates the XML fragment * capturing the actions of the refiner. This is used for provenace * purposes. * * @return XMLProducer */ public XMLProducer getXMLProducer(){ return this.mXMLStore; } /** * Reduces the workflow on the basis of the existence of lfn's in the * replica catalog. The existence of files, is determined via the bridge. * * @param workflow the workflow to be reduced. * @param rcb instance of the replica catalog bridge. * * @return the reduced dag * */ public ADag reduceWorkflow( ADag workflow, ReplicaCatalogBridge rcb ){ //clone the original workflow. it will be reduced later on ADag reducedWorkflow = (ADag) workflow.clone(); //we first need to convert internally into graph format Graph reducedGraph = this.reduceWorkflow( Adapter.convert( reducedWorkflow ), rcb ); //convert back to ADag and return //we need to reset the jobs and the relations in it reducedWorkflow.clearJobs(); //traverse through the graph and jobs and edges for( Iterator it = reducedGraph.nodeIterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); //get the job associated with node reducedWorkflow.add( ( Job )node.getContent() ); //all the children of the node are the edges of the DAG for( Iterator childrenIt = node.getChildren().iterator(); childrenIt.hasNext(); ){ GraphNode child = ( GraphNode ) childrenIt.next(); // System.out.println( node.getID() + " -> " + child.getID() ); reducedWorkflow.addNewRelation( node.getID(), child.getID() ); } } mWorkflow = reducedWorkflow; return reducedWorkflow; } /** * Reduces the workflow on the basis of the existence of lfn's in the * replica catalog. The existence of files, is determined via the bridge. * * @param workflow the workflow to be reduced. * @param rcb instance of the replica catalog bridge. * * @return the reduced dag. The input workflow object is returned reduced. * */ public Graph reduceWorkflow( Graph workflow, ReplicaCatalogBridge rcb ){ //search for the replicas of the files. The search list //is already present in Replica Catalog Bridge Set filesInRC = rcb.getFilesInReplica(); //we reduce the dag only if the //force option is not specified. if(mPOptions.getForce()) return workflow; //load the PPS implementation PPS pps = PPSFactory.loadPPS( this.mProps ); //mXMLStore.add( "" ); mXMLStore.add( "" ); //call the begin workflow method try{ pps.beginWorkflowRefinementStep(this, PPS.REFINEMENT_REDUCE , true); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception", e ); } //clear the XML store mXMLStore.clear(); mLogger.log("Reducing the workflow",LogManager.DEBUG_MESSAGE_LEVEL); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_REDUCE, LoggingKeys.DAX_ID, mWorkflow.getAbstractWorkflowName() ); //figure out jobs whose output files already exist in the Replica Catalog List originalJobsInRC = getJobsInRC( workflow ,filesInRC ); //mAllDeletedJobs = (Vector)mOrgJobsInRC.clone(); //firstPass( originalJobsInRC ); Graph reducedWorkflow = cascadeDeletionUpwards( workflow, originalJobsInRC ); mLogMsg = "Nodes/Jobs Deleted from the Workflow during reduction "; mLogger.log( mLogMsg,LogManager.INFO_MESSAGE_LEVEL ); for( Job job : this.mAllDeletedJobs){ mLogger.log("\t" + job.getID(), LogManager.INFO_MESSAGE_LEVEL ); mXMLStore.add( "" ); mXMLStore.add( "\n" ); } mLogger.log( mLogMsg + " - DONE", LogManager.INFO_MESSAGE_LEVEL ); //call the end workflow method for pasoa interactions try{ for( Iterator it = reducedWorkflow.nodeIterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); pps.isIdenticalTo( node.getName(), node.getName() ); } pps.endWorkflowRefinementStep( this ); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception", e ); } mLogger.logEventCompletion(); return reducedWorkflow; } /** * This returns all the jobs deleted from the workflow after the reduction * algorithm has run. * * @return List containing the Job of deleted leaf jobs. */ public List getDeletedJobs(){ return this.mAllDeletedJobs; } /** * This returns all the deleted jobs that happen to be leaf nodes. This * entails that the output files of these jobs be transferred * from the location returned by the Replica Catalog to the * pool specified. This is a subset of mAllDeletedJobs * Also to determine the deleted leaf jobs it refers the original * dag, not the reduced dag. * * @return List containing the Job of deleted leaf jobs. */ public List getDeletedLeafJobs(){ mLogger.log( "Date Reuse Engine no longer tracks deleted leaf jobs. Returning empty list ", LogManager.DEBUG_MESSAGE_LEVEL ); List delLeafJobs = new LinkedList(); return delLeafJobs; } /** * Returns all the jobs whose output files exist in the Replica Catalog. * An output file with the transfer flag set to false is treated equivalent * to the file being in the Replica Catalog , if * * - the output file is not an input to any of the children of the job X * * @param workflow the workflow object * @param filesInRC Set of String objects corresponding to the * logical filenames of files that are found to be in the * Replica Catalog. * * @return a List of GraphNodes with their Boolean bag value set to true. * * @see org.griphyn.cPlanner.classes.Job */ private List getJobsInRC(Graph workflow ,Set filesInRC){ List jobsInReplica = new LinkedList(); int noOfOutputFilesInJob = 0; int noOfSuccessfulMatches = 0; if( workflow.isEmpty() ){ String msg = "ReductionEngine: The set of jobs in the workflow " + "\n is empty."; mLogger.log( msg, LogManager.DEBUG_MESSAGE_LEVEL ); return jobsInReplica; } mLogger.log("Jobs whose o/p files already exist", LogManager.DEBUG_MESSAGE_LEVEL); //iterate through all the nodes in the graph for( Iterator it = workflow.nodeIterator(); it.hasNext(); ){ GraphNode node = (GraphNode)it.next(); Job job = (Job)node.getContent(); Set outputFiles = job.getOutputFiles(); String jobName = job.jobName; if( job.getOutputFiles().isEmpty() ){ //a job with no output file should not be //marked as a job in the RC //Otherwise it can result in whole workflow being reduced //if such a node is the leaf of the workflow. mLogger.log("Job " + job.getName() + " has no o/p files", LogManager.DEBUG_MESSAGE_LEVEL); continue; } /* Commented on Oct10. This ended up making the Planner doing duplicate transfers if(subInfo.stdOut.length()>0) vJobOutputFiles.addElement(subInfo.stdOut); */ noOfOutputFilesInJob = outputFiles.size(); //traversing through the output files of that particular job for( PegasusFile pf : outputFiles ){ if(filesInRC.contains(pf.getLFN()) ){ noOfSuccessfulMatches++; } else if ( pf.getTransientTransferFlag() ){ //successful match only if the output file is not an input //to any of the children of the job X boolean input = true; for( Iterator cit = node.getChildren().iterator(); cit.hasNext(); ){ GraphNode child = (GraphNode) cit.next(); Job childJob = (Job)child.getContent(); if( childJob.getInputFiles().contains( pf ) ){ input = false; break; } } if( input ){ noOfSuccessfulMatches++; } } } //we add a job to list of jobs whose output files already exist //only if noOfSuccessFulMatches is equal to the number of output //files in job if(noOfOutputFilesInJob == noOfSuccessfulMatches){ mLogger.log("\t" + jobName, LogManager.DEBUG_MESSAGE_LEVEL); //COLOR the node as while node.setColor( GraphNode.BLACK_COLOR ); jobsInReplica.add( node ); } //reinitialise the variables noOfSuccessfulMatches = 0; noOfOutputFilesInJob = 0; } mLogger.log("Jobs whose o/p files already exist - DONE", LogManager.DEBUG_MESSAGE_LEVEL); return jobsInReplica; } /** * Cascade the deletion of the jobs upwards in the workflow. We start a * breadth first traversal of the workflow bottom up. A node is marked for * deletion if - * *
     *  ( It is already marked for deletion
     *      OR
     *      ( ALL of it's children have been marked for deletion
     *        AND
     *        Node's output files have transfer flags set to false
     *      )
     *  )
     * 
* * @param workflow the worfklow to be deduced * @param originalJobsInRC list of nodes found to be in the Replica Catalog. */ protected Graph cascadeDeletionUpwards(Graph workflow, List originalJobsInRC) { LinkedList queue = new LinkedList(); int currentDepth = -1; //sanity intialization of all nodes depth //also associate a boolean bag with the nodes //that tracks whether a node has been traversed or not for( Iterator it = workflow.nodeIterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); node.setDepth( currentDepth ); BooleanBag bag = new BooleanBag(); node.setBag(bag); } //intialize all the leave nodes depth to 0 //and put them in the queue currentDepth = 0; for( Iterator it = workflow.getLeaves().iterator(); it.hasNext(); ){ GraphNode node = it.next(); node.setDepth( currentDepth ); queue.add( node ); } //A node with COLOR set to BLACK means it is marked for deletion //start the bottom up traversal while( !queue.isEmpty() ){ GraphNode node = (GraphNode)queue.removeFirst(); int depth = node.getDepth(); //System.out.println( "Traversing " + node.getID() ); //traverse through all the parents and add to the queue for( Iterator it = node.getParents().iterator(); it.hasNext(); ){ GraphNode parent = (GraphNode)it.next(); //if the parent has already made it's way to the queue //dont add again. this is when multiple nodes have same parent //if( parent.isColor( GraphNode.GRAY_COLOR ) ){ if( ((BooleanBag)parent.getBag()).getBooleanValue() ){ continue; } parent.setDepth( depth + 1 ); //parent.setColor( GraphNode.GRAY_COLOR ); ((BooleanBag)parent.getBag()).add(true); //System.out.println( "Adding parent " + parent.getID() ); queue.addLast( parent ); } if( !node.isColor( GraphNode.BLACK_COLOR ) ){ //If a node is not already marked for deletion , it can be marked //for deletion if // a) all it's children have been marked for deletion AND // b) node's output files have transfer flags set to false boolean delete = true; for( Iterator cit = node.getChildren().iterator(); cit.hasNext(); ){ GraphNode child = (GraphNode)cit.next(); if( !child.isColor( GraphNode.BLACK_COLOR ) ){ delete = false; break; } } if( delete ){ //all the children are deleted. However delete only if // all the output files have transfer flags set to false if( /*node.isColor( GraphNode.BLACK_COLOR ) ||*/ !transferOutput( node ) ){ mLogger.log( "Node can be deleted " + node.getID() , LogManager.DEBUG_MESSAGE_LEVEL ); node.setColor( GraphNode.BLACK_COLOR ); } } } //if the node is colored BLACK at this point //remove the node from the workflow if( node.isColor( GraphNode.BLACK_COLOR ) ){ mLogger.log( "Removing node from the workflow " + node.getID() , LogManager.DEBUG_MESSAGE_LEVEL ); this.mAllDeletedJobs.add( (Job)node.getContent() ); workflow.remove( node.getID() ); } } return workflow; } /** * Returns whether a user wants output transferred for a node or not. * If no output files are associated , true will be returned * * @param node the GraphNode * * @return boolean */ protected boolean transferOutput(GraphNode node) { boolean result = false; Job job = (Job)node.getContent(); if( job.getOutputFiles().isEmpty() ){ //no output files means we should not delete the job automatically //JIRA PM-24 return true; } for( Iterator it = job.getOutputFiles().iterator(); it.hasNext(); ){ PegasusFile pf = (PegasusFile)it.next(); if( !pf.getTransientTransferFlag() ){ result = true; break; } } return result; } /** * A bag implementation that cam be used to hold a boolean value associated with the * graph node * */ public class BooleanBag implements Bag { /** * The boolean value */ private boolean mBoolean; /** * The default constructor. */ public BooleanBag(){ mBoolean = false; } /** * Returns the boolean value * * @return */ public boolean getBooleanValue(){ return mBoolean; } /** * For all keys returns the boolean value * * @param key * @return */ public Object get(Object key) { return mBoolean; } /** * Ignores the key and only adds the value . * The value should be a boolean * * @param key * @param value * * @return */ public boolean add(Object key, Object value) { if (!(value instanceof Boolean )){ throw new IllegalArgumentException( "Boolean Bag only accepts boolean values" + value ); } mBoolean = (Boolean)value; return true; } /** * Returns false. You cannot associate a key with this bag. * * @param key * * @return false */ public boolean containsKey(Object key) { return false; } /** * Adds a boolean value to the bag * * @param b the boolean value */ public void add(boolean b) { this.add( null, b ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/RemoveDirectory.java0000644000175000017500000004513711757531137026723 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.classes.TransferJob; import java.util.Iterator; import java.util.Set; import java.util.HashSet; import java.util.List; import java.util.LinkedList; import java.io.File; import edu.isi.pegasus.planner.namespace.Pegasus; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.IOException; /** * Ends up creating a cleanup dag that deletes the remote directories that * were created by the create dir jobs. The cleanup dag is generated in a * sub directory from the main directory containing the submit files of the * dag. The dag consists of independant jobs, with each job responsible for * deleting directory for a execution pool. The current way of generating the * dag is tied to the fact, that the directories in which a job are executed * is tied to the pool not the job itself. * * @author Karan Vahi * @version $Revision: 4716 $ * @see CreateDirectory */ public class RemoveDirectory extends Engine { /** * The scheme name for file url. */ public static final String FILE_URL_SCHEME = "file:"; /** * The prefix that is attached to the name of the dag for which the * cleanup Dag is being generated, to generate the name of the cleanup * Dag. */ public static final String CLEANUP_DAG_PREFIX = "del_"; /** * Constant suffix for the names of the remote directory nodes. */ public static final String REMOVE_DIR_SUFFIX = "_rdir"; /** * The logical name of the transformation that removes directories on the * remote execution pools. */ public static final String TRANSFORMATION_NAME = "cleanup"; /** * The basename of the pegasus dirmanager executable. */ public static final String REMOVE_DIR_EXECUTABLE_BASENAME = "pegasus-cleanup"; /** * The transformation namespace for the create dir jobs. */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The version number for the derivations for create dir jobs. */ public static final String TRANSFORMATION_VERSION = null; /** * The derivation namespace for the create dir jobs. */ public static final String DERIVATION_NAMESPACE = "pegasus"; /** * The logical name of the transformation that removes directories on the * remote execution pools. */ public static final String DERIVATION_NAME = "cleanup"; /** * The version number for the derivations for create dir jobs. */ public static final String DERIVATION_VERSION = "1.0"; /** * The concrete dag so far, for which the clean up dag needs to be generated. */ private ADag mConcDag; /** * Boolean indicating whether we need to transfer dirmanager from the submit * host. */ private boolean mTransferFromSubmitHost; /** * A convenience method to return the complete transformation name being * used to construct jobs in this class. * * @return the complete transformation name */ public static String getCompleteTranformationName(){ return Separator.combine( TRANSFORMATION_NAMESPACE, TRANSFORMATION_NAME, TRANSFORMATION_VERSION ); } /** * The submit directory for the workflow. */ private String mSubmitDirectory; /** * The overloaded constructor that sets the dag for which we have to * generated the cleanup dag for. * * @param concDag the concrete dag for which cleanup is reqd. * @param bag the bag of initialization objects * @param submitDirectory the submit directory for the cleanup workflow */ public RemoveDirectory( ADag concDag, PegasusBag bag, String submitDirectory ) { super( bag ); mConcDag = concDag; mTransferFromSubmitHost = bag.getPegasusProperties().transferWorkerPackage(); mSubmitDirectory = submitDirectory; //check to see if it exists File dir = new File( submitDirectory ); if( !dir.exists() ){ // does not exist, try to make it if ( ! dir.mkdirs() ) { //try to get around JVM bug. JIRA PM-91 if( dir.getPath().endsWith( "." ) ){ //just try to create the parent directory if( !dir.getParentFile().mkdirs() ){ throw new RuntimeException( "Unable to create directory " + dir.getPath() ); } return; } throw new RuntimeException( "Unable to create directory " + dir.getPath() ); } } } /** * Generates a cleanup DAG for the dag associated with the class. Creates a * cleanup node per remote pool. It looks at the ADAG, to determine the * sites at which the jobs in the dag have been scheduled. * * @return the cleanup DAG. */ public ADag generateCleanUPDAG( ){ return this.generateCleanUPDAG( mConcDag ); } /** * Generates a cleanup DAG for the dag object passed. Creates a cleanup * node per remote pool. It looks at the ADAG, to determine the sites at * which the jobs in the dag have been scheduled. * * @param dag the dag for which cleanup dag needs to be generated. * * @return the cleanup DAG. * @see org.griphyn.cPlanner.classes.ADag#getExecutionSites() */ public ADag generateCleanUPDAG(ADag dag ){ ADag cDAG = new ADag(); cDAG.dagInfo.nameOfADag = this.CLEANUP_DAG_PREFIX + dag.dagInfo.nameOfADag; cDAG.dagInfo.index = dag.dagInfo.index; cDAG.dagInfo.setDAXMTime( dag.getMTime() ); Set pools = this.getCreateDirSites(dag); String pool = null; String jobName = null; //remove the entry for the local pool //pools.remove("local"); for(Iterator it = pools.iterator();it.hasNext();){ pool = (String)it.next(); jobName = getRemoveDirJobName(dag,pool); cDAG.add(makeRemoveDirJob( pool,jobName )); } return cDAG; } /** * Retrieves the sites for which the create dir jobs need to be created. * It returns all the sites where the compute jobs have been scheduled. * * * @return a Set containing a list of siteID's of the sites where the * dag has to be run. */ protected Set getCreateDirSites( ADag dag ){ Set set = new HashSet(); for( Iterator it = dag.vJobSubInfos.iterator();it.hasNext();){ Job job = (Job)it.next(); if( job.getJobType() == Job.CHMOD_JOB ){ //skip //chmod jobs dont have a staging site associated //they are only created in the shared fs mode. continue; } //add to the set only if the job is //being run in the work directory //this takes care of local site create dir if( job.runInWorkDirectory()){ String site = job.getStagingSiteHandle(); //sanity check for remote transfer jobs if( job instanceof TransferJob ){ site = ((TransferJob)job).getNonThirdPartySite(); } //System.out.println( "Job staging site handle " + job.getID() + " " + site ); set.add( site ); } } //remove the stork pool set.remove("stork"); return set; } /** * It returns the name of the remove directory job, that is to be assigned. * The name takes into account the workflow name while constructing it, as * that is thing that can guarentee uniqueness of name in case of deferred * planning. * * @param dag the dag for which the cleanup DAG is being generated. * @param pool the execution pool for which the remove directory job * is responsible. * * @return String corresponding to the name of the job. */ private String getRemoveDirJobName(ADag dag,String pool){ StringBuffer sb = new StringBuffer(); sb.append(dag.dagInfo.nameOfADag).append("_"). append(dag.dagInfo.index).append("_"). append(pool).append(this.REMOVE_DIR_SUFFIX); return sb.toString(); } /** * It creates a remove directory job that creates a directory on the remote pool * using the perl executable that Gaurang wrote. It access mkdir underneath. * It gets the name of the random directory from the Pool handle. * * @param site the execution pool for which the create dir job is to be * created. * @param jobName the name that is to be assigned to the job. * * @return the remove dir job. */ public Job makeRemoveDirJob( String site, String jobName ) { List l = new LinkedList(); //the externally accessible url to the directory/ workspace for the workflow l.add( mSiteStore.getExternalWorkDirectoryURL( site ) ); return makeRemoveDirJob( site, jobName, l ); } /** * It creates a remove directory job that creates a directory on the remote pool * using the perl executable that Gaurang wrote. It access mkdir underneath. * It gets the name of the random directory from the Pool handle. * * @param site the site from where the directory need to be removed. * @param jobName the name that is to be assigned to the job. * @param files the list of files to be cleaned up. * * @return the remove dir job. */ public Job makeRemoveDirJob( String site, String jobName, List files ) { Job newJob = new Job(); List entries = null; String execPath = null; TransformationCatalogEntry entry = null; //the site where the cleanup job will run String eSite = "local"; for( String file: files ){ if( file.startsWith( this.FILE_URL_SCHEME ) ){ //means the cleanup job should run on the staging site mLogger.log( "Directory URL is a file url for site " + site + " " + files, LogManager.DEBUG_MESSAGE_LEVEL ); eSite = site; } } SiteCatalogEntry ePool = mSiteStore.lookup( eSite ); try { entries = mTCHandle.lookup( RemoveDirectory.TRANSFORMATION_NAMESPACE, RemoveDirectory.TRANSFORMATION_NAME, RemoveDirectory.TRANSFORMATION_VERSION, eSite, TCType.INSTALLED); } catch (Exception e) { //non sensical catching mLogger.log("Unable to retrieve entry from TC " + e.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL ); } entry = ( entries == null ) ? this.defaultTCEntry( ePool ): //try using a default one (TransformationCatalogEntry) entries.get(0); if( entry == null ){ //NOW THROWN AN EXCEPTION //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append("Could not find entry in tc for lfn "). append( this.getCompleteTranformationName() ). append(" at site ").append( eSite ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } if( mTransferFromSubmitHost ){ /* //we are using mkdir directly argString = " -p " + mPoolHandle.getExecPoolWorkDir( execPool ); execPath = "mkdir"; //path variable needs to be set newJob.envVariables.construct( "PATH", CreateDirectory.PATH_VALUE ); */ newJob.vdsNS.construct( Pegasus.GRIDSTART_KEY, "None" ); StringBuffer sb = new StringBuffer(); sb.append( mProps.getBinDir() ). append( File.separator ).append( RemoveDirectory.REMOVE_DIR_EXECUTABLE_BASENAME ); execPath = sb.toString(); newJob.condorVariables.construct( "transfer_executable", "true" ); } else{ execPath = entry.getPhysicalTransformation(); } //prepare the stdin for the cleanup job String stdIn = jobName + ".in"; try{ BufferedWriter writer; writer = new BufferedWriter( new FileWriter( new File( mSubmitDirectory, stdIn ) )); for( String file: files ){ writer.write( file ); writer.write( "\n" ); } //closing the handle to the writer writer.close(); } catch(IOException e){ mLogger.log( "While writing the stdIn file " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( "While writing the stdIn file " + stdIn, e ); } //set the stdin file for the job newJob.setStdIn( stdIn ); newJob.jobName = jobName; newJob.setTransformation( RemoveDirectory.TRANSFORMATION_NAMESPACE, RemoveDirectory.TRANSFORMATION_NAME, RemoveDirectory.TRANSFORMATION_VERSION ); newJob.setDerivation( RemoveDirectory.DERIVATION_NAMESPACE, RemoveDirectory.DERIVATION_NAME, RemoveDirectory.DERIVATION_VERSION ); newJob.executable = execPath; newJob.setSiteHandle( eSite ); newJob.jobClass = Job.CLEANUP_JOB; newJob.jobID = jobName; //the profile information from the pool catalog needs to be //assimilated into the job. newJob.updateProfiles( mSiteStore.lookup( newJob.getSiteHandle() ).getProfiles() ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 newJob.addNotifications( entry ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. newJob.updateProfiles(entry); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. newJob.updateProfiles(mProps); return newJob; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param site the SiteCatalogEntry for the site for which the default entry is required. * * * @return the default entry. */ private TransformationCatalogEntry defaultTCEntry( SiteCatalogEntry site ){ TransformationCatalogEntry defaultTCEntry = null; //check if PEGASUS_HOME is set String home = site.getPegasusHome(); //if PEGASUS_HOME is not set, use VDS_HOME home = ( home == null )? site.getVDSHome( ): home; mLogger.log( "Creating a default TC entry for " + RemoveDirectory.getCompleteTranformationName() + " at site " + site.getSiteHandle(), LogManager.DEBUG_MESSAGE_LEVEL ); //if home is still null if ( home == null ){ //cannot create default TC mLogger.log( "Unable to create a default entry for " + RemoveDirectory.getCompleteTranformationName(), LogManager.DEBUG_MESSAGE_LEVEL ); //set the flag back to true return defaultTCEntry; } //remove trailing / if specified home = ( home.charAt( home.length() - 1 ) == File.separatorChar )? home.substring( 0, home.length() - 1 ): home; //construct the path to it StringBuffer path = new StringBuffer(); path.append( home ).append( File.separator ). append( "bin" ).append( File.separator ). append( RemoveDirectory.REMOVE_DIR_EXECUTABLE_BASENAME ); defaultTCEntry = new TransformationCatalogEntry( RemoveDirectory.TRANSFORMATION_NAMESPACE, RemoveDirectory.TRANSFORMATION_NAME, RemoveDirectory.TRANSFORMATION_VERSION ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site.getSiteHandle() ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.setSysInfo( site.getSysInfo() ); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.DEBUG_MESSAGE_LEVEL ); } return defaultTCEntry; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/CreateDirectory.java0000644000175000017500000001351611757531137026665 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.refiner.createdir.Implementation; import edu.isi.pegasus.planner.refiner.createdir.Strategy; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.common.util.FactoryException; /** * This common interface that identifies the basic functions that need to be * implemented to introduce random directories in which the jobs are executed on * the remote execution pools. The implementing classes are invoked when the user * gives the --randomdir option. The implementing classes determine where in the * graph the nodes creating the random directories are placed and their * dependencies with the rest of the nodes in the graph. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2582 $ */ public class CreateDirectory extends Engine { /** * The name of the package in which all the implementing classes are. */ public static final String PACKAGE_NAME = "edu.isi.pegasus.planner.refiner.createdir"; /** * It is a reference to the Concrete Dag so far. */ protected ADag mCurrentDag; /** * Loads the implementing class corresponding to the mode specified by the * user at runtime. * * @param bag bag of initialization objects * * * @return instance of a CreateDirecctory implementation * * @throws FactoryException that nests any error that * might occur during the instantiation of the implementation. */ public static Strategy loadCreateDirectoryStraegyInstance( PegasusBag bag ) throws FactoryException { PegasusProperties props = bag.getPegasusProperties(); if( props == null ){ throw new FactoryException( "Properties instance is null " ); } String className = props.getCreateDirClass(); //prepend the package name className = PACKAGE_NAME + "." + className; //try loading the class dynamically Strategy cd = null; DynamicLoader dl = new DynamicLoader(className); try { Object argList[] = new Object[ 0 ]; cd = ( Strategy ) dl.instantiate( argList ); cd.initialize( bag, CreateDirectory.loadCreateDirectoryImplementationInstance( bag ) ); } catch (Exception e) { throw new FactoryException( "Instantiating Create Directory Strategy", className, e ); } return cd; } /** * Loads the implementing class corresponding to the mode specified by the * user at runtime. * * @param bag bag of initialization objects * * * @return instance of a CreateDirecctory implementation * * @throws FactoryException that nests any error that * might occur during the instantiation of the implementation. */ public static Implementation loadCreateDirectoryImplementationInstance( PegasusBag bag ) throws FactoryException { PegasusProperties props = bag.getPegasusProperties(); if( props == null ){ throw new FactoryException( "Properties instance is null " ); } String className = props.getCreateDirImplementation(); //for now //className = "DefaultImplementation"; //prepend the package name className = PACKAGE_NAME + "." + className; //try loading the class dynamically Implementation impl = null; DynamicLoader dl = new DynamicLoader(className); try { Object argList[] = new Object[ 0 ]; impl = ( Implementation ) dl.instantiate( argList ); impl.initialize( bag ); } catch (Exception e) { throw new FactoryException( "Instantiating Create Directory", className, e ); } return impl; } /** * A pratically nothing constructor ! * * * @param bag bag of initialization objects */ protected CreateDirectory( PegasusBag bag ) { super( bag ); } /** * It modifies the concrete dag passed in the constructor and adds the create * random directory nodes to it at the root level. These directory nodes have * a common child that acts as a concatenating job and ensures that Condor * does not start staging in the data before the directories have been added. * The root nodes in the unmodified dag are now chidren of this concatenating * dummy job. * * @param dag the workflow to which nodes have to be added * * @return workflow with nodes added. */ public ADag addCreateDirectoryNodes( ADag dag ){ Strategy s = CreateDirectory.loadCreateDirectoryStraegyInstance( mBag ); return s.addCreateDirectoryNodes( dag ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/createdir/0000755000175000017500000000000011757531667024676 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/createdir/Strategy.java0000644000175000017500000000321411757531137027333 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.createdir; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The interface that defines how the cleanup job is invoked and created. * * @author Karan Vahi * @version $Revision: 2582 $ */ public interface Strategy { /** * The version number associated with this API Cleanup Strategy. */ public static final String VERSION = "1.0"; /** * Intializes the class. * * @param bag bag of initialization objects * @param impl the implementation instance that creates create dir job */ public void initialize( PegasusBag bag, Implementation impl ) ; /** * Modifies the workflow to add create directory nodes. The workflow passed * is a worklow, where the jobs have been mapped to sites. * * @param dag the workflow to which the nodes have to be added. * * @return the added workflow */ public ADag addCreateDirectoryNodes( ADag dag ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/createdir/DefaultImplementation.java0000644000175000017500000003112511757531137032025 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.createdir; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.util.Separator; import java.io.File; import java.util.List; /** * The default implementation for creating create dir jobs. * * @author Karan Vahi * @version $Revision: 4778 $ */ public class DefaultImplementation implements Implementation { /** * The scheme name for file url. */ public static final String FILE_URL_SCHEME = "file:"; /** * The transformation namespace for the create dir jobs. */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The logical name of the transformation that creates directories on the * remote execution pools. */ public static final String TRANSFORMATION_NAME = "dirmanager"; /** * The version number for the derivations for create dir jobs. */ public static final String TRANSFORMATION_VERSION = null; /** * The basename of the pegasus cleanup executable. */ public static final String EXECUTABLE_BASENAME = "pegasus-create-dir"; /** * The path to be set for create dir jobs. */ public static final String PATH_VALUE = ".:/bin:/usr/bin:/usr/ucb/bin"; /** * The complete TC name for kickstart. */ public static final String COMPLETE_TRANSFORMATION_NAME = Separator.combine( TRANSFORMATION_NAMESPACE, TRANSFORMATION_NAME, TRANSFORMATION_VERSION ); /** * The derivation namespace for the create dir jobs. */ public static final String DERIVATION_NAMESPACE = "pegasus"; /** * The logical name of the transformation that creates directories on the * remote execution pools. */ public static final String DERIVATION_NAME = "dirmanager"; /** * The version number for the derivations for create dir jobs. */ public static final String DERIVATION_VERSION = "1.0"; /** * The handle to the transformation catalog. */ protected TransformationCatalog mTCHandle; /** * The handle to the SiteStore. */ protected SiteStore mSiteStore; /** * The handle to the logging object. */ protected LogManager mLogger; /** * The handle to the pegasus properties. */ protected PegasusProperties mProps; /** * Whether we want to use dirmanager or mkdir directly. */ protected boolean mUseMkdir; /** * Intializes the class. * * @param bag bag of initialization objects */ public void initialize( PegasusBag bag ) { mLogger = bag.getLogger(); mSiteStore = bag.getHandleToSiteStore(); mTCHandle = bag.getHandleToTransformationCatalog(); mProps = bag.getPegasusProperties(); //in case of staging of executables/worker package //we use mkdir directly mUseMkdir = bag.getPegasusProperties().transferWorkerPackage(); } /** * It creates a make directoryURL job that creates a directoryURL on the remote pool * using the perl executable that Gaurang wrote. It access mkdir underneath. * * * @param site the site for which the create dir job is to be created. * @param name the name that is to be assigned to the job. * @param directoryURL the externally accessible URL to the directoryURL that is * created * * @return create dir job. */ public Job makeCreateDirJob( String site, String name, String directoryURL ) { Job newJob = new Job(); List entries = null; String execPath = null; TransformationCatalogEntry entry = null; //associate a credential if required newJob.addCredentialType( directoryURL ); //figure out on the basis of directory URL //where to run the job. String eSite = getCreateDirJobExecutionSite( site, directoryURL ); try { entries = mTCHandle.lookup( DefaultImplementation.TRANSFORMATION_NAMESPACE, DefaultImplementation.TRANSFORMATION_NAME, DefaultImplementation.TRANSFORMATION_VERSION, eSite, TCType.INSTALLED); } catch (Exception e) { //non sensical catching mLogger.log("Unable to retrieve entries from TC " + e.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL ); } entry = ( entries == null ) ? this.defaultTCEntry( eSite ): //try using a default one (TransformationCatalogEntry) entries.get(0); if( entry == null ){ //NOW THROWN AN EXCEPTION //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append("Could not find entry in tc for lfn "). append( COMPLETE_TRANSFORMATION_NAME ). append(" at site ").append( eSite ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } SiteCatalogEntry ePool = mSiteStore.lookup( eSite ); String argString = null; if( mUseMkdir ){ /* //we are using mkdir directly argString = " -p " + mPoolHandle.getExecPoolWorkDir( execPool ); execPath = "mkdir"; //path variable needs to be set newJob.envVariables.construct( "PATH", CreateDirectory.PATH_VALUE ); */ newJob.vdsNS.construct( Pegasus.GRIDSTART_KEY, "None" ); StringBuffer sb = new StringBuffer(); sb.append( mProps.getBinDir() ). append( File.separator ).append( DefaultImplementation.EXECUTABLE_BASENAME ); execPath = sb.toString(); argString = "-u " + mSiteStore.getExternalWorkDirectoryURL( site ); newJob.condorVariables.setExecutableForTransfer(); } else{ execPath = entry.getPhysicalTransformation(); argString = "-u " + directoryURL; } newJob.jobName = name; newJob.setTransformation( DefaultImplementation.TRANSFORMATION_NAMESPACE, DefaultImplementation.TRANSFORMATION_NAME, DefaultImplementation.TRANSFORMATION_VERSION ); newJob.setDerivation( DefaultImplementation.DERIVATION_NAMESPACE, DefaultImplementation.DERIVATION_NAME, DefaultImplementation.DERIVATION_VERSION ); newJob.executable = execPath; newJob.executionPool = eSite; newJob.strargs = argString; newJob.jobClass = Job.CREATE_DIR_JOB; newJob.jobID = name; //the profile information from the pool catalog needs to be //assimilated into the job. newJob.updateProfiles( ePool.getProfiles() ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 if( entry != null ){ newJob.addNotifications( entry ); } //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. newJob.updateProfiles(entry); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. newJob.updateProfiles( mProps ); return newJob; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param site the site for which the default entry is required. * * * @return the default entry. */ private TransformationCatalogEntry defaultTCEntry( String site ){ TransformationCatalogEntry defaultTCEntry = null; //check if PEGASUS_HOME is set String home = mSiteStore.getPegasusHome( site ); //if PEGASUS_HOME is not set, use VDS_HOME //home = ( home == null )? mSiteStore.getVDSHome( site ): home; mLogger.log( "Creating a default TC entry for " + COMPLETE_TRANSFORMATION_NAME + " at site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); //if home is still null if ( home == null ){ //cannot create default TC mLogger.log( "Unable to create a default entry for " + COMPLETE_TRANSFORMATION_NAME, LogManager.DEBUG_MESSAGE_LEVEL ); //set the flag back to true return defaultTCEntry; } //remove trailing / if specified home = ( home.charAt( home.length() - 1 ) == File.separatorChar )? home.substring( 0, home.length() - 1 ): home; //construct the path to it StringBuffer path = new StringBuffer(); path.append( home ).append( File.separator ). append( "bin" ).append( File.separator ). append( DefaultImplementation.EXECUTABLE_BASENAME ); defaultTCEntry = new TransformationCatalogEntry( DefaultImplementation.TRANSFORMATION_NAMESPACE, DefaultImplementation.TRANSFORMATION_NAME, DefaultImplementation.TRANSFORMATION_VERSION ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.setSysInfo( this.mSiteStore.lookup( site ).getSysInfo() ); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.DEBUG_MESSAGE_LEVEL ); } return defaultTCEntry; } /** * Determines the site where the create dir job should be run , looking at the * directory URL passed. Preference is given to local site unless the directoryURL * is a file URL. In that case, the create dir job is executed on the site * where the directory is to be created. * * @param site the site where the directory is to be created * @param directoryURL the URL to the directory. * * @return the site for create dir job */ protected String getCreateDirJobExecutionSite( String site, String directoryURL ) { String result = "local"; if( directoryURL != null && directoryURL.startsWith( this.FILE_URL_SCHEME ) ){ result = site; } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/createdir/Tentacles.java0000644000175000017500000001255011757531137027456 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.createdir; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.DAGJob; import edu.isi.pegasus.planner.classes.DAXJob; import java.util.Iterator; import java.util.Set; /** * This Strategy instance places the create directory jobs at the top of the graph. * However instead of constricting it to an hour glass shape, this class links * it to all the relevant nodes for which the create dir job is necessary. It is * like that it spreads its tentacles all around. This potentially ends up * putting more load on the DagMan with all the dependencies but removes the * restriction of the plan progressing only when all the create directory * jobs have progressed on the remote pools, as in the HourGlass model. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 4716 $ */ public class Tentacles extends AbstractStrategy { /** * Intializes the class. * * @param bag bag of initialization objects * @param impl the implementation instance that creates create dir job */ public void initialize( PegasusBag bag, Implementation impl ){ super.initialize( bag , impl ); } /** * Modifies the workflow to add create directory nodes. The workflow passed * is a worklow, where the jobs have been mapped to sites. * * @param dag the workflow to which the nodes have to be added. * * @return the added workflow */ public ADag addCreateDirectoryNodes( ADag dag ){ Set set = this.getCreateDirSites( dag ); String pool = null; String jobName = null; String parent = null; //traverse through the jobs and //looking at their execution pool //and create a dependency to the //the correct create node //we add links first and jobs later //remove the entry for the local pool //set.remove("local"); Job job; int type; boolean local; for(Iterator it = dag.vJobSubInfos.iterator();it.hasNext();){ job = (Job)it.next(); jobName = job.getName(); pool = job.getSiteHandle(); if( job.getJobType() == Job.CHMOD_JOB ){ parent = getCreateDirJobName( dag, job.getSiteHandle() ); } else{ //the parent in case of a transfer job //is the non third party site String site = ( job instanceof TransferJob )? ((TransferJob)job).getNonThirdPartySite(): job.getStagingSiteHandle(); if( site == null ){ //only ok for stage worker jobs if( job instanceof TransferJob ){ mLogger.log( "Not adding edge to create dir job for job " + job.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); continue; } } parent = getCreateDirJobName( dag, site); } //put in the dependency only for transfer jobs that stage in data //or are jobs running on remote sites //or are compute jobs running on local site type = job.getJobType(); local = pool.equals("local"); if( (job instanceof TransferJob && type != Job.STAGE_OUT_JOB ) || (!local || (type == Job.COMPUTE_JOB /*|| type == Job.STAGED_COMPUTE_JOB*/ || job instanceof DAXJob || job instanceof DAGJob ))){ //sanity check if( parent == null ){ //throw an error throw new RuntimeException( "Job not associated with staging site " + job.getID() ); } mLogger.log("Adding relation " + parent + " -> " + jobName, LogManager.DEBUG_MESSAGE_LEVEL); dag.addNewRelation(parent,jobName); } } //for each execution pool add //a create directory node. Job newJob = null; for (Iterator it = set.iterator();it.hasNext();){ pool = (String)it.next(); jobName = getCreateDirJobName( dag, pool); newJob = mImpl.makeCreateDirJob( pool, jobName, mSiteStore.getExternalWorkDirectoryURL( pool ) ); dag.add(newJob); } return dag; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/createdir/AbstractStrategy.java0000644000175000017500000001245411757531137031025 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.createdir; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.partitioner.graph.Graph; import java.util.HashSet; import java.util.Set; import java.util.Iterator; /** * The interface that defines how the cleanup job is invoked and created. * * @author Karan Vahi * @version $Revision: 4716 $ */ public abstract class AbstractStrategy implements Strategy { /** * Constant suffix for the names of the create directory nodes. */ public static final String CREATE_DIR_SUFFIX = "_cdir"; /** * Constant prefix for the names of the create directory nodes. */ public static final String CREATE_DIR_PREFIX = "create_dir_"; /** * The handle to the logging object, that is used to log the messages. */ protected LogManager mLogger; /** * The job prefix that needs to be applied to the job file basenames. */ protected String mJobPrefix; /** * Whether we want to use dirmanager or mkdir directly. */ protected boolean mUseMkdir; /** * The implementation instance that is used to create a create dir job. */ protected Implementation mImpl; /** * The Site Store handle. */ protected SiteStore mSiteStore; /** * Intializes the class. * * @param bag bag of initialization objects * @param impl the implementation instance that creates create dir job */ public void initialize( PegasusBag bag, Implementation impl ){ mImpl = impl; mJobPrefix = bag.getPlannerOptions().getJobnamePrefix(); mLogger = bag.getLogger(); mSiteStore = bag.getHandleToSiteStore(); //in case of staging of executables/worker package //we use mkdir directly mUseMkdir = bag.getPegasusProperties().transferWorkerPackage(); } /** * It returns the name of the create directory job, that is to be assigned. * The name takes into account the workflow name while constructing it, as * that is thing that can guarentee uniqueness of name in case of deferred * planning. * * @param dag the workflow to which the create dir jobs are being added. * @param pool the execution pool for which the create directory job * is responsible. * * @return String corresponding to the name of the job. */ public String getCreateDirJobName( ADag dag, String pool){ //sanity check if( pool == null ){ return null; } StringBuffer sb = new StringBuffer(); sb.append( AbstractStrategy.CREATE_DIR_PREFIX ); //append the job prefix if specified in options at runtime if ( mJobPrefix != null ) { sb.append( mJobPrefix ); } sb.append( dag.dagInfo.nameOfADag).append("_"). append( dag.dagInfo.index).append("_"); //sb.append(pool).append(this.CREATE_DIR_SUFFIX); sb.append( pool ); return sb.toString(); } /** * Retrieves the sites for which the create dir jobs need to be created. * It returns all the sites where the compute jobs have been scheduled. * * * @return a Set containing a list of siteID's of the sites where the * dag has to be run. */ protected Set getCreateDirSites( ADag dag ){ Set set = new HashSet(); for( Iterator it = dag.vJobSubInfos.iterator();it.hasNext();){ Job job = (Job)it.next(); if( job.getJobType() == Job.CHMOD_JOB ){ //skip //chmod jobs dont have a staging site associated //they are only created in the shared fs mode. continue; } //add to the set only if the job is //being run in the work directory //this takes care of local site create dir if( job.runInWorkDirectory()){ String site = job.getStagingSiteHandle(); //sanity check for remote transfer jobs if( job instanceof TransferJob ){ site = ((TransferJob)job).getNonThirdPartySite(); } //System.out.println( "Job staging site handle " + job.getID() + " " + site ); set.add( site ); } } //remove the stork pool set.remove("stork"); return set; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/createdir/HourGlass.java0000644000175000017500000002223711757531137027446 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.createdir; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Pegasus; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.Vector; import edu.isi.pegasus.common.util.Separator; /** * This class inserts the nodes for creating the random directories on the remote * execution pools. This is done when the resources have already been selected * to execute the jobs in the Dag. It adds a make directory node at the top level * of the graph, and all these concat to a single dummy job before branching * out to the root nodes of the original/ concrete dag so far. So we end up * introducing a classic X shape at the top of the graph. Hence the name * HourGlass. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 4553 $ */ public class HourGlass extends AbstractStrategy{ /** * The name concatenating dummy job that ensures that Condor does not start * staging in before the directories are created. */ public static final String DUMMY_CONCAT_JOB = "pegasus_concat"; /** * The prefix assigned to the concatenating dummy job that ensures that Condor does not start * staging in before the directories are created. */ public static final String DUMMY_CONCAT_JOB_PREFIX = "pegasus_concat_"; /** * The transformation namespace for the create dir jobs. */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The logical name of the transformation that creates directories on the * remote execution pools. */ public static final String TRANSFORMATION_NAME = "dirmanager"; /** * The version number for the derivations for create dir jobs. */ public static final String TRANSFORMATION_VERSION = null; /** * The complete TC name for dirmanager. */ public static final String COMPLETE_TRANSFORMATION_NAME = Separator.combine( TRANSFORMATION_NAMESPACE, TRANSFORMATION_NAME, TRANSFORMATION_VERSION ); /** * The derivation namespace for the create dir jobs. */ public static final String DERIVATION_NAMESPACE = "pegasus"; /** * The logical name of the transformation that creates directories on the * remote execution pools. */ public static final String DERIVATION_NAME = "dirmanager"; /** * The version number for the derivations for create dir jobs. */ public static final String DERIVATION_VERSION = "1.0"; /** * Intializes the class. * * @param bag bag of initialization objects * @param impl the implementation instance that creates create dir job */ public void initialize( PegasusBag bag, Implementation impl ){ super.initialize( bag, impl ); } /** * It modifies the concrete dag passed in the constructor and adds the create * random directory nodes to it at the root level. These directory nodes have * a common child that acts as a concatenating job and ensures that Condor * does not start staging in the data before the directories have been added. * The root nodes in the unmodified dag are now chidren of this concatenating * dummy job. * @param dag the workflow to which the nodes have to be added. * * @return the added workflow */ public ADag addCreateDirectoryNodes( ADag dag ){ Set set = this.getCreateDirSites( dag ); //remove the entry for the local pool //set.remove("local"); String pool = null; String jobName = null; Job newJob = null; Job concatJob = null; //add the concat job if (!set.isEmpty()) { concatJob = makeDummyConcatJob( dag ); introduceRootDependencies( dag, concatJob.jobName); dag.add(concatJob); } //for each execution pool add //a create directory node. for (Iterator it = set.iterator();it.hasNext();){ pool = (String) it.next(); jobName = getCreateDirJobName( dag, pool); newJob = mImpl.makeCreateDirJob( pool, jobName, mSiteStore.getExternalWorkDirectoryURL( pool ) ); dag.add(newJob); //add the relation to the concat job String msg = "Adding relation " + jobName + " -> " + concatJob.jobName; mLogger.log( msg, LogManager.DEBUG_MESSAGE_LEVEL ); dag.addNewRelation( jobName, concatJob.jobName ); } return dag; } /** * It traverses through the root jobs of the dag and introduces a new super * root node to it. * * @param dag the DAG * @param newRoot the name of the job that is the new root of the graph. */ private void introduceRootDependencies( ADag dag, String newRoot) { Vector vRootNodes = dag.getRootNodes(); Iterator it = vRootNodes.iterator(); String job = null; while (it.hasNext()) { job = (String) it.next(); dag.addNewRelation(newRoot, job); mLogger.log( "Adding relation " + newRoot + " -> " + job,LogManager.DEBUG_MESSAGE_LEVEL); } } /** * It creates a dummy concat job that is run at the local submit host. * This job should run always provided the directories were created * successfully. * * @param dag the workflow * * @return the dummy concat job. */ public Job makeDummyConcatJob( ADag dag ) { Job newJob = new Job(); List entries = null; String execPath = null; //jobname has the dagname and index to indicate different //jobs for deferred planning newJob.jobName = getConcatJobname( dag ); newJob.setTransformation( HourGlass.TRANSFORMATION_NAMESPACE, HourGlass.TRANSFORMATION_NAME, HourGlass.TRANSFORMATION_VERSION ); newJob.setDerivation( HourGlass.DERIVATION_NAMESPACE, HourGlass.DERIVATION_NAME, HourGlass.DERIVATION_VERSION ); // newJob.condorUniverse = Engine.REGISTRATION_UNIVERSE; newJob.setUniverse( GridGateway.JOB_TYPE.auxillary.toString()); //the noop job does not get run by condor //even if it does, giving it the maximum //possible chance newJob.executable = "/bin/true"; //construct noop keys newJob.executionPool = "local"; newJob.jobClass = Job.CREATE_DIR_JOB; construct(newJob,"noop_job","true"); construct(newJob,"noop_job_exit_code","0"); //we do not want the job to be launched //by kickstart, as the job is not run actually newJob.vdsNS.checkKeyInNS( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[GridStartFactory.NO_GRIDSTART_INDEX] ); return newJob; } /** * Returns the name of the concat job * * @return name */ protected String getConcatJobname( ADag dag ){ StringBuffer sb = new StringBuffer(); sb.append( HourGlass.DUMMY_CONCAT_JOB_PREFIX ); //append the job prefix if specified in options at runtime if ( mJobPrefix != null ) { sb.append( mJobPrefix ) ;} sb.append( dag.dagInfo.nameOfADag ).append( "_" ). append( dag.dagInfo.index )/*.append( "_" )*/; //append the job prefix if specified in options at runtime //if ( mJobPrefix != null ) { sb.append( mJobPrefix ); } //sb.append( this.DUMMY_CONCAT_JOB ); return sb.toString(); } /** * Constructs a condor variable in the condor profile namespace * associated with the job. Overrides any preexisting key values. * * @param job contains the job description. * @param key the key of the profile. * @param value the associated value. */ private void construct(Job job, String key, String value){ job.condorVariables.checkKeyInNS(key,value); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/createdir/Implementation.java0000644000175000017500000000347511757531137030527 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.createdir; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import java.util.List; /** * The interface that defines how the create dir job is created. * * @author Karan Vahi * @version $Revision: 4553 $ */ public interface Implementation { /** * The version number associated with this API. */ public static final String VERSION = "1.1"; /** * Intializes the class. * * @param bag bag of initialization objects */ public void initialize( PegasusBag bag ) ; /** * It creates a make directory job that creates a directory on the remote pool * using the perl executable that Gaurang wrote. It access mkdir underneath. * * * @param site the execution site for which the create dir job is to be * created. * @param name the name that is to be assigned to the job. * @param directoryURL the externally accessible URL to the directory that is * created * * @return create dir job. */ public Job makeCreateDirJob( String site, String name, String directoryURL ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/Engine.java0000644000175000017500000002007611757531137025001 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.catalog.site.impl.old.PoolMode; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; import java.util.List; import java.util.Vector; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The class which is a superclass of all the various Engine classes. It * defines common methods and member variables. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2582 $ * */ public abstract class Engine { //constants public static final String REGISTRATION_UNIVERSE = "registration"; public static final String TRANSFER_UNIVERSE = "transfer"; /** * The pool on which all the output data should be transferred. */ protected static String mOutputPool; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The path to the file containing the pool information. By default it is * $PEGASUS_HOME/etc/pool.config */ protected String mPoolFile; /** * The handle to the Transformation Catalog. It must be instantiated * in the implementing class. */ protected TransformationCatalog mTCHandle; /** * The path to the file containing the pool information. By default it is * $PEGASUS_HOME/etc/tc.data. */ protected String mTCFile; /** * The handle to the Pool Info Provider. It is instantiated in this class */ //protected PoolInfoProvider mPoolHandle; protected SiteStore mSiteStore; /** * Contains the message which is to be logged by Pegasus. */ protected String mLogMsg = new String(); /** * The Replica Location Index URL got from vds.rls.url property */ protected String mRLIUrl = new String(); /** * Defines the read mode for transformation catalog. Whether we want to read all * at once or as desired. * * @see org.griphyn.common.catalog.transformation.TCMode */ protected String mTCMode; /** * The logging object which is used to log all the messages. * */ protected LogManager mLogger ; /** * Contains the various options to the Planner as passed by the user at * runtime. */ protected PlannerOptions mPOptions; /** * The bag of initialization objects */ protected PegasusBag mBag; /** * A pratically nothing constructor ! * * * @param bag bag of initialization objects */ public Engine( PegasusBag bag ) { mBag = bag; mLogger = bag.getLogger(); mProps = bag.getPegasusProperties() ; mPOptions = bag.getPlannerOptions(); mTCHandle = bag.getHandleToTransformationCatalog(); mSiteStore= bag.getHandleToSiteStore(); loadProperties(); } /** * Loads all the properties that are needed by the Engine classes. */ public void loadProperties() { //get from the properties object mPoolFile = mProps.getPoolFile(); mTCFile = mProps.getTCPath(); mRLIUrl = mProps.getRLIURL(); String rmode = mProps.getReplicaMode(); String tcmode = mProps.getTCMode(); } /** * Returns true if a particular String is in the Vector of strings. * * @param stringName the String which has to be searched for in the Vector. * @param vector the Vector of Strings in which to search for a * particular String. * * @return boolean on the basis of whether the String in Vector or not. */ public boolean stringInVector(String stringName, Vector vector) { Enumeration e = vector.elements(); while (e.hasMoreElements()) { if (stringName.equalsIgnoreCase( (String) e.nextElement())) { return true; } } return false; } public boolean stringInList(String stringName, List list) { if (list.contains(stringName)) { return true; } else { return false; } } /** * Returns true if a particular String is in the Vector of PegasusFile objects. * * @param stringName the String which has to be searched for in the Vector. * @param vector the Vector of Strings in which to search for a particular * String * * @return boolean on the basis of whether the String in Vector or not. * */ public boolean stringInPegVector(String stringName, Vector vector) { Enumeration e = vector.elements(); while (e.hasMoreElements()) { PegasusFile pf = (PegasusFile) e.nextElement(); if (stringName.equalsIgnoreCase(pf.getLFN())) { return true; } } return false; } /** * Adds elements (PegasusFile type) in a Vector to another Vector and * returns the new Vector. * * @param from_vector the source * @param to_vector the destination * * @return Vector of PegasusFile objects */ public Vector addVector(Vector from_vector, Vector to_vector) { Enumeration e = from_vector.elements(); Vector newVector = (Vector) to_vector.clone(); while (e.hasMoreElements()) { PegasusFile pf = (PegasusFile) e.nextElement(); newVector.addElement(pf); /*String elem = new String((String)e.nextElement()); if(!stringInVector(elem,to_vector)){ newVector.addElement(elem); }*/ } return newVector; } /** * It prints the contents of the Vector, with the first line being the heading. * * @param heading The heading you want to give to the text which is printed. * @param vector The Vector whose elements you want to print. */ public void printVector(String heading, Vector vector) { mLogger.log(heading, LogManager.DEBUG_MESSAGE_LEVEL); for(Iterator it = vector.iterator() ; it.hasNext() ;) { mLogger.log( it.next().toString() , LogManager.DEBUG_MESSAGE_LEVEL); } } /** * It prints the contents of the Vector, to a String with the first line being * the heading. * * @param heading The heading you want to give to the text which is printed. * @param vector The Vector whose elements you want to print. * * @return String */ public String vectorToString(String heading, Vector vector) { Enumeration e = vector.elements(); String st = heading; while (e.hasMoreElements()) { st += "\t" + e.nextElement(); } return st; } /** * It appends the source list at the end of the destination list. * * @param dest the destination list * @param source the source list */ public void appendArrayList(ArrayList dest, ArrayList source) { Iterator iter = source.iterator(); while (iter.hasNext()) { dest.add(iter.next()); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/CleanupEngine.java0000644000175000017500000000571011757531137026307 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.refiner.cleanup.CleanupStrategy; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.partitioner.graph.Adapter; import java.util.Iterator; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.refiner.cleanup.CleanupFactory; /** * The refiner that results in the creation of cleanup jobs within the workflow. * * @author Karan Vahi * @version $Revision: 2590 $ * */ public class CleanupEngine extends Engine { /** * The overloaded constructor. * * @param bag the bag of initialization objects * */ public CleanupEngine( PegasusBag bag ) { super( bag ); } /** * Adds the cleanup jobs in the workflow that removes the files staged to the * remote site. * * @param dag the scheduled dag that has to be clustered. * * @return ADag containing the cleanup jobs for the workflow. */ public ADag addCleanupJobs( ADag dag ) { ADag result; //load the appropriate strategy and implementation that is to be used //CleanupStrategy strategy = new InPlace( mBag ); CleanupStrategy strategy = CleanupFactory.loadCleanupStraegyInstance( mBag ); //we first need to convert internally into graph format Graph resultGraph = strategy.addCleanupJobs( Adapter.convert(dag ) ); //convert back to ADag and return result = dag; //we need to reset the jobs and the relations in it result.clearJobs(); //traverse through the graph and jobs and edges for( Iterator it = resultGraph.nodeIterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); //get the job associated with node result.add( ( Job )node.getContent() ); //all the children of the node are the edges of the DAG for( Iterator childrenIt = node.getChildren().iterator(); childrenIt.hasNext(); ){ GraphNode child = ( GraphNode ) childrenIt.next(); result.addNewRelation( node.getID(), child.getID() ); } } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/DeployWorkerPackage.java0000644000175000017500000016167211757531137027506 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.Adapter; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.transfer.Implementation; import edu.isi.pegasus.planner.transfer.implementation.ImplementationFactory; import edu.isi.pegasus.planner.transfer.Refiner; import edu.isi.pegasus.planner.transfer.refiner.RefinerFactory; import edu.isi.pegasus.planner.selector.TransformationSelector; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.catalog.classes.SysInfo; import edu.isi.pegasus.planner.catalog.transformation.Mapper; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.transfer.RemoteTransfer; import java.util.List; import java.util.ArrayList; import java.util.Iterator; import java.util.Set; import java.util.HashSet; import java.util.Map; import java.util.HashMap; import java.io.File; import java.util.LinkedList; import java.util.regex.Pattern; import java.util.regex.Matcher; /** * The refiner that is responsible for adding * - setup nodes that deploy a worker package on each deployment site at start * of workflow execution * - cleanup nodes that undeploy a worker package on each deployment site at end * workflow execution * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 538 $ */ public class DeployWorkerPackage extends Engine { /** * Constant suffix for the names of the deployment nodes. */ public static final String DEPLOY_WORKER_PREFIX = "stage_worker_"; /** * Constant suffix for the names of the deployment nodes. */ public static final String UNTAR_PREFIX = "untar_"; /** * Constant suffix for the names of the deployment nodes. */ public static final String CLEANUP_PREFIX = "cleanup_"; /** * Array storing the names of the executables in the $PEGASUS_HOME/bin directory * Associates the transformation name with the executable basenames */ public static final String PEGASUS_WORKER_EXECUTABLES[][] = { { "transfer", "pegasus-transfer" }, { "kickstart", "pegasus-kickstart" }, { "cleanup", "pegasus-cleanup" }, { "seqexec", "pegasus-cluster"}, { "dirmanager", "pegasus-create-dir" }, { "invoke", "pegasus-invoke" }, { "keg" , "pegasus-keg" }, }; /** * Store the regular expressions necessary to parse the basename from the worker * package url to retrieve the version of pegasus. */ private static final String mRegexExpression = // "(pegasus-)(binary|worker)-([0-9]\\.[0-9]\\.[0-9][a-zA-Z]*)-x86.*"; "(pegasus-)(binary|worker)-([0-9]\\.[0-9]\\.[0-9][a-zA-Z]*)-(x86|x86_64|ia64|ppc).*"; /** * The path to be set for create dir jobs. */ public static final String PATH_VALUE = ".:/bin:/usr/bin:/usr/ucb/bin"; /** * The default transfer refiner name. */ public static final String DEFAULT_REFINER = "Default"; /** * The transformation namespace for the worker package */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The logical name of the worker package */ public static final String TRANSFORMATION_NAME = "worker"; /** * The version number for the worker package. */ public static final String TRANSFORMATION_VERSION = null; /** * The transformation namespace for the worker package */ public static final String UNTAR_TRANSFORMATION_NAMESPACE = null; /** * The logical name of the worker package */ public static final String UNTAR_TRANSFORMATION_NAME = "tar"; /** * The version number for the worker package. */ public static final String UNTAR_TRANSFORMATION_VERSION = null; /** * The complete TC name for untar. */ public static final String COMPLETE_UNTAR_TRANSFORMATION_NAME = Separator.combine( UNTAR_TRANSFORMATION_NAMESPACE, UNTAR_TRANSFORMATION_NAME, UNTAR_TRANSFORMATION_VERSION ); /** * The complete TC name for pegasus worker package. */ public static final String COMPLETE_TRANSFORMATION_NAME = Separator.combine( TRANSFORMATION_NAMESPACE, TRANSFORMATION_NAME, TRANSFORMATION_VERSION ); /** * The derivation namespace for the worker package. */ public static final String DERIVATION_NAMESPACE = "pegasus"; /** * The logical name of the transformation for the worker package */ public static final String DERIVATION_NAME = "worker"; /** * The version number for the derivations for worker package. */ public static final String DERIVATION_VERSION = "2.0"; /** * The derivation namespace for the untar job. */ public static final String UNTAR_DERIVATION_NAMESPACE = null; /** * The logical name of the transformation for the untar job. */ public static final String UNTAR_DERIVATION_NAME = "worker"; /** * The version number for the derivations for untar job. */ public static final String UNTAR_DERIVATION_VERSION = "2.0"; /** * The name of the package in which all the implementing classes are. */ public static final String PACKAGE_NAME = "edu.isi.pegasus.planner.refiner."; /** * The base directory URL for the builds. */ public static final String BASE_BUILD_DIRECTORY_URL = "http://download.pegasus.isi.edu/wms/download/"; /** * The version of pegasus matching the planner. */ public static final String PEGASUS_VERSION = Version.instance().toString(); /** * Stores compiled patterns at first use, quasi-Singleton. */ private static Pattern mPattern = null; /** * The map storing OS to corresponding NMI OS platforms. */ private static Map mOSToNMIOSReleaseAndVersion = null; /** * Maps each to OS to a specific OS release for purposes of picking up the * correct worker package for a site. The mapping is to be kept consistent * with the NMI builds for the releases. * * * @return map */ private static Map osToOSReleaseAndVersion(){ //singleton access if( mOSToNMIOSReleaseAndVersion == null ){ mOSToNMIOSReleaseAndVersion = new HashMap(); mOSToNMIOSReleaseAndVersion.put( SysInfo.OS.LINUX, "rhel_5" ); mOSToNMIOSReleaseAndVersion.put( SysInfo.OS.MACOSX, "macos_10.5" ); } return mOSToNMIOSReleaseAndVersion; } /** * It is a reference to the Concrete Dag so far. */ protected ADag mCurrentDag; /** * The job prefix that needs to be applied to the job file basenames. */ protected String mJobPrefix; /** * The transfer implementation to be used for staging in the data as part * of setup job. */ protected Implementation mSetupTransferImplementation; /** * The FileTransfer map indexed by site id. */ protected Map mFTMap; /** * Map that indicates whether we need local setup transfer jobs for a site or * not. */ protected Map mLocalTransfers; /** * Maps a site to the the directory where the pegasus worker package has * been untarred during workflow execution. */ protected Map mSiteToPegasusHomeMap; /** * The user specified location from where to stage the worker packages. */ protected String mUserSpecifiedSourceLocation; /** * Boolean indicating whether to use the user specified location or not */ protected boolean mUseUserSpecifiedSourceLocation; /** * Boolean indicating whether user wants the worker package to be transferred * or not. */ protected boolean mTransferWorkerPackage; /** * The major minor version that is used to construct the URL for the * pegasus website. */ protected String mPlannerMajorMinorVersion; /** * Boolean indicating worker node execution. */ protected boolean mWorkerNodeExecution; /** * Loads the implementing class corresponding to the mode specified by the * user at runtime. * * @param bag bag of initialization objects * * @return instance of a DeployWorkerPackage implementation * * @throws FactoryException that nests any error that * might occur during the instantiation of the implementation. */ public static DeployWorkerPackage loadDeployWorkerPackage( PegasusBag bag ) throws FactoryException { //prepend the package name String className = PACKAGE_NAME + "DeployWorkerPackage"; //try loading the class dynamically DeployWorkerPackage dp = null; DynamicLoader dl = new DynamicLoader(className); try { Object argList[] = new Object[ 1 ]; argList[0] = bag; dp = (DeployWorkerPackage) dl.instantiate(argList); } catch (Exception e) { throw new FactoryException( "Instantiating Deploy Worker Package", className, e ); } return dp; } /** * A pratically nothing constructor ! * * * @param bag bag of initialization objects */ public DeployWorkerPackage( PegasusBag bag ) { super( bag ); mCurrentDag = null; mFTMap = new HashMap(); mLocalTransfers = new HashMap(); mSiteToPegasusHomeMap = new HashMap(); mJobPrefix = bag.getPlannerOptions().getJobnamePrefix(); mTransferWorkerPackage = mProps.transferWorkerPackage(); mWorkerNodeExecution = mProps.executeOnWorkerNode(); //load the transfer setup implementation //To DO . specify type for loading mSetupTransferImplementation = ImplementationFactory.loadInstance( bag, ImplementationFactory.TYPE_SETUP ); mUserSpecifiedSourceLocation = mProps.getBaseSourceURLForSetupTransfers(); mUseUserSpecifiedSourceLocation = !( mUserSpecifiedSourceLocation == null || mUserSpecifiedSourceLocation.trim().length()== 0 ); Version version = Version.instance(); StringBuffer sb = new StringBuffer(); sb.append( version.MAJOR ).append( "." ).append( version.MINOR ); mPlannerMajorMinorVersion = sb.toString(); } /** * Initialize with the scheduled graph. Results in the appropriate * population of the transformation catalog with pegasus-worker executables. * * * @param scheduledDAG the scheduled workflow. */ public void initialize( ADag scheduledDAG ) { Mapper m = mBag.getHandleToTransformationMapper(); if( !m.isStageableMapper() ){ //we want to load a stageable mapper mLogger.log( "User set mapper is not a stageable mapper. Loading a stageable mapper ", LogManager.DEBUG_MESSAGE_LEVEL ); m = Mapper.loadTCMapper( "Staged", mBag ); } SiteStore siteStore = mBag.getHandleToSiteStore(); RemoteTransfer remoteTransfers = new RemoteTransfer( mProps ); remoteTransfers.buildState(); //figure if we need to deploy or not if( !mTransferWorkerPackage ){ mLogger.log( "No Deployment of Worker Package needed" , LogManager.DEBUG_MESSAGE_LEVEL ); return; } mLogger.log( "Deployment of Worker Package needed" , LogManager.DEBUG_MESSAGE_LEVEL ); //load the transformation selector. different //selectors may end up being loaded for different jobs. TransformationSelector txSelector = TransformationSelector.loadTXSelector( mProps.getTXSelectorMode() ); Refiner defaultRefiner = RefinerFactory.loadInstance( DeployWorkerPackage.DEFAULT_REFINER, mBag, scheduledDAG ) ; mSetupTransferImplementation.setRefiner( defaultRefiner ); //a map indexed by execution site and the corresponding worker package //location in the submit directory Map workerPackageMap = new HashMap(); //for the pegasus lite case, we insert entries into the //transformation catalog for all worker package executables //the planner requires with just the basename boolean useFullPath = !( mWorkerNodeExecution ); //for each site insert default entries in the Transformation Catalog //for each scheduled site query TCMapper Set deploymentSites = this.getDeploymentSites( scheduledDAG ); for( Iterator it = deploymentSites.iterator(); it.hasNext(); ){ String site = ( String ) it.next(); //check if there is a valid entry for worker package List entries, selectedEntries = null; try{ entries = m.getTCList( DeployWorkerPackage.TRANSFORMATION_NAMESPACE, DeployWorkerPackage.TRANSFORMATION_NAME, DeployWorkerPackage.TRANSFORMATION_VERSION, site ); selectedEntries = txSelector.getTCEntry( entries ); }catch( Exception e ){ /*ignore*/} //try and create a default entry for pegasus::worker if //not specified in transformation catalog if( selectedEntries == null || selectedEntries.size() == 0 ){ TransformationCatalogEntry entry = this.addDefaultTCEntryForPegasusWebsite( site, DeployWorkerPackage.TRANSFORMATION_NAME ); if( entry == null ){ StringBuffer error = new StringBuffer(); error.append( "Unable to construct default entry for pegasus::worker for site " ).append( site ) .append( " Add entry in TC for pegasus::worker of type STAGEABLE for sysinfo ") .append( mSiteStore.getSysInfo( site ) ); throw new RuntimeException( error.toString() ); } } } //for each scheduled site query TCMapper for( Iterator it = deploymentSites.iterator(); it.hasNext(); ){ String site = ( String ) it.next(); String stagingSite = this.getStagingSite( site ); //get the set of valid tc entries List entries = m.getTCList( DeployWorkerPackage.TRANSFORMATION_NAMESPACE, DeployWorkerPackage.TRANSFORMATION_NAME, DeployWorkerPackage.TRANSFORMATION_VERSION, site ); //get selected entries List selectedEntries = txSelector.getTCEntry( entries ); if( selectedEntries == null || selectedEntries.size() == 0 ){ throw new RuntimeException( "Unable to find a valid location to stage " + Separator.combine( DeployWorkerPackage.TRANSFORMATION_NAMESPACE, DeployWorkerPackage.TRANSFORMATION_NAME, DeployWorkerPackage.TRANSFORMATION_VERSION ) ); } //select the first entry from selector TransformationCatalogEntry selected = ( TransformationCatalogEntry )selectedEntries.get( 0 ); mLogger.log( "Selected entry " + selected, LogManager.DEBUG_MESSAGE_LEVEL ); //figure out the directory where to stage the data //data will be staged to the staging site corresponding to //the execution site String baseRemoteWorkDir = ( mWorkerNodeExecution )? //for pegasus-lite the worker package goes //to the submit directory on the local site. this.mPOptions.getSubmitDirectory() : siteStore.getInternalWorkDirectory( stagingSite ); if( useFullPath ){ // we insert entries into the transformation catalog for all worker // package executables the planner requires with full paths //this is the shared fs case String name = getRootDirectoryNameForPegasus( selected.getPhysicalTransformation() ); File pegasusHome = new File( baseRemoteWorkDir, name ); StringBuffer sb = new StringBuffer(); sb.append( "Directory where pegasus worker executables will reside on site ").append( stagingSite ). append( " " ).append( pegasusHome.getAbsolutePath() ); mLogger.log( sb.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); mSiteToPegasusHomeMap.put( stagingSite, pegasusHome.getAbsolutePath() ); //now create transformation catalog entry objects for each //worker package executable for( int i = 0; i < PEGASUS_WORKER_EXECUTABLES.length; i++){ TransformationCatalogEntry entry = addDefaultTCEntry( stagingSite, pegasusHome.getAbsolutePath(), selected.getSysInfo(), useFullPath, PEGASUS_WORKER_EXECUTABLES[i][0], PEGASUS_WORKER_EXECUTABLES[i][1] ); mLogger.log( "Entry constructed " + entry , LogManager.DEBUG_MESSAGE_LEVEL ); } } else{ // we insert entries into the transformation catalog for all worker // package executables the planner requires with relative paths // and for the execution sites instead of staging site //this is the PegasusLite case //now create transformation catalog entry objects for each //worker package executable for( int i = 0; i < PEGASUS_WORKER_EXECUTABLES.length; i++){ TransformationCatalogEntry entry = addDefaultTCEntry( site, null, selected.getSysInfo(), useFullPath, PEGASUS_WORKER_EXECUTABLES[i][0], PEGASUS_WORKER_EXECUTABLES[i][1] ); mLogger.log( "Entry constructed " + entry , LogManager.DEBUG_MESSAGE_LEVEL ); } } //create the File Transfer object for shipping the worker executable String sourceURL = selected.getPhysicalTransformation(); FileTransfer ft = new FileTransfer( COMPLETE_TRANSFORMATION_NAME, null ); ft.addSource( selected.getResourceId(), sourceURL ); String baseName = sourceURL.substring( sourceURL.lastIndexOf( "/" ) + 1 ); //figure out the URL prefix depending on //the TPT configuration String destURLPrefix = siteStore.lookup( stagingSite ).getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix(); boolean localTransfer = this.runTransferOnLocalSite( defaultRefiner, stagingSite, destURLPrefix, Job.STAGE_IN_JOB); String urlPrefix = localTransfer ? //lookup the site catalog to get the URL prefix destURLPrefix : //push pull mode. File URL will do "file://"; ft.addDestination( stagingSite, urlPrefix + new File( baseRemoteWorkDir, baseName ).getAbsolutePath() ); if( mWorkerNodeExecution ){ //populate the map with the submit directory locations workerPackageMap.put( site, new File( baseRemoteWorkDir, baseName ).getAbsolutePath() ); } mFTMap.put( site, ft ); mLocalTransfers.put( stagingSite, localTransfer ); } //for pegasus lite and worker package execution //we add the worker package map to PegasusBag if( mWorkerNodeExecution ){ mBag.add( PegasusBag.WORKER_PACKAGE_MAP, workerPackageMap ); } } /** * Returns whether to run a transfer job on local site or not. * * * @param site the site handle associated with the destination URL. * @param destURL the destination URL * @param type the type of transfer job for which the URL is being constructed. * * @return true indicating if the associated transfer job should run on local * site or not. */ public boolean runTransferOnLocalSite( Refiner refiner, String site, String destinationURL, int type) { //check if user has specified any preference in config boolean result = true; //short cut for local site if( site.equals( "local" ) ){ //transfer to run on local site return result; } if( refiner.runTransferRemotely( site, type )){ //always use user preference return !result; } //check to see if destination URL is a file url else if( destinationURL != null && destinationURL.startsWith( TransferEngine.FILE_URL_SCHEME ) ){ result = false; } return result; } /** * Does regex magic to figure out the version of pegasus from the url, and * use it to construct the name of pegasus directory, when worker package * is untarred. * * @param url the url. * * @return basename for pegasus directory */ protected String getRootDirectoryNameForPegasus( String url ){ StringBuffer result = new StringBuffer(); result.append( "pegasus-" ); //compile the pattern only once. if( mPattern == null ){ mPattern = Pattern.compile( mRegexExpression ); } String base = url.substring( url.lastIndexOf( "/" ) + 1 ); mLogger.log( "Base is " + base, LogManager.DEBUG_MESSAGE_LEVEL ); Matcher matcher = mPattern.matcher( base ); String version = null; if( matcher.matches() ){ version = matcher.group(3); } else{ throw new RuntimeException( "Unable to determine pegasus version from url " + url ); } mLogger.log( "Version is " + version, LogManager.DEBUG_MESSAGE_LEVEL ); result.append( version ); return result.toString(); } /** * Adds a setup node per execution site in the workflow that will stage the * worker node executables to the workdirectory on the sites the workflow * has been scheduled to. * * @param dag the scheduled workflow. * * @return the workflow with setup jobs added */ public ADag addSetupNodes( ADag dag ){ Mapper m = mBag.getHandleToTransformationMapper(); //figure if we need to deploy or not if( !mTransferWorkerPackage ){ mLogger.log( "No Deployment of Worker Package needed" , LogManager.DEBUG_MESSAGE_LEVEL ); return dag; } //we add untar nodes only if worker node execution/pegasus lite //mode is disabled boolean addUntarJobs = !mWorkerNodeExecution; Set deploymentSites = this.getDeploymentSites( dag ); Graph workflow = ( addUntarJobs )? addSetupNodesWithUntarNodes( dag , deploymentSites ): //non pegasus lite case. shared fs addSetupNodesWithoutUntarNodes( dag, deploymentSites ); //convert back to ADag and return ADag result = dag; //we need to reset the jobs and the relations in it result.clearJobs(); //traverse through the graph and jobs and edges for( Iterator it = workflow.nodeIterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); //get the job associated with node result.add( ( Job )node.getContent() ); //all the children of the node are the edges of the DAG for( Iterator childrenIt = node.getChildren().iterator(); childrenIt.hasNext(); ){ GraphNode child = ( GraphNode ) childrenIt.next(); result.addNewRelation( node.getID(), child.getID() ); } } return result; } /** * Adds untar nodes to the workflow, in addition to the stage worker nodes * * @param dag the dag * @param deploymentSites the sites for which the worker package has to be deployed * * @return the workflow in the graph representation with the nodes added. */ private Graph addSetupNodesWithUntarNodes( ADag dag, Set deploymentSites ) { //convert the dag to a graph representation and walk it //in a top down manner Graph workflow = Adapter.convert( dag ); //get the root nodes of the workflow List roots = workflow.getRoots(); //add a setup job per execution site for( Iterator it = deploymentSites.iterator(); it.hasNext(); ){ String site = ( String ) it.next(); //for pegauss lite mode the staging site for worker package //should be local site , submit directory. String stagingSite = this.getStagingSite(site); mLogger.log( "Adding worker package deployment node for " + site + " and staging site as " + stagingSite, LogManager.DEBUG_MESSAGE_LEVEL ); FileTransfer ft = (FileTransfer)mFTMap.get( site ); List fts = new ArrayList(1); fts.add( ft ); //hmm need to propogate site info with a dummy job on fly Job dummy = new Job() ; dummy.setSiteHandle( site ); //stage worker job runs locally or on the staging site boolean localTransfer = mLocalTransfers.get( stagingSite ) ; String tsite = localTransfer? "local" : stagingSite; TransferJob setupTXJob = mSetupTransferImplementation.createTransferJob( dummy, tsite, fts, null, this.getDeployJobName( dag, site , localTransfer), Job.STAGE_IN_WORKER_PACKAGE_JOB ); //the setupTXJob non third party site, has to be the staging site setupTXJob.setNonThirdPartySite( stagingSite ); //the setup and untar jobs need to be launched without kickstart. setupTXJob.vdsNS.construct( Pegasus.GRIDSTART_KEY, "None" ); GraphNode setupNode = new GraphNode( setupTXJob.getName(), setupTXJob ); //add the untar job Job untarJob = this.makeUntarJob( stagingSite, this.getUntarJobName( dag, site ), getBasename( ((NameValue)ft.getSourceURL()).getValue() ) ); untarJob.vdsNS.construct( Pegasus.GRIDSTART_KEY, "None" ); GraphNode untarNode = new GraphNode( untarJob.getName(), untarJob ); //untar node is child of setup setupNode.addChild( untarNode ); //add the original roots as children to untar node for( Iterator rIt = roots.iterator(); rIt.hasNext(); ){ GraphNode n = ( GraphNode ) rIt.next(); mLogger.log( "Added edge " + untarNode.getID() + " -> " + n.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); untarNode.addChild( n ); } workflow.addNode( untarNode ); workflow.addNode( setupNode ); } return workflow; } /** * Adds only the stage worker nodes to the workflow. This is used when * Pegasus Lite is used to launch the jobs on the execution sites. * * @param dag the dag * @param deploymentSites the sites for which the worker package has to be deployed * * @return the workflow in the graph representation with the nodes added. */ private Graph addSetupNodesWithoutUntarNodes( ADag dag, Set deploymentSites ) { //convert the dag to a graph representation and walk it //in a top down manner Graph workflow = Adapter.convert( dag ); //get the root nodes of the workflow List roots = workflow.getRoots(); Set fts = new HashSet(); //for pegauss lite mode the staging site for worker package //should be local site , submit directory. String stagingSite = "local"; //stage worker job runs locally or on the staging site boolean localTransfer = mLocalTransfers.get( stagingSite ) ; //add a setup job per execution site for( Iterator it = deploymentSites.iterator(); it.hasNext(); ){ String site = ( String ) it.next(); mLogger.log( "Adding worker package deployment node for " + site, LogManager.DEBUG_MESSAGE_LEVEL ); FileTransfer ft = (FileTransfer)mFTMap.get( site ); fts.add( ft ); } //hmm need to propogate site info with a dummy job on fly Job dummy = new Job() ; dummy.setSiteHandle( stagingSite ); String tsite = "local" ; TransferJob setupTXJob = mSetupTransferImplementation.createTransferJob( dummy, tsite, fts, null, this.getDeployJobName( dag, tsite , localTransfer), Job.STAGE_IN_WORKER_PACKAGE_JOB ); //the setupTXJob is null as stage worker job pulls in //data to the submit host directory setupTXJob.setNonThirdPartySite( null ); //the setup and untar jobs need to be launched without kickstart. setupTXJob.vdsNS.construct( Pegasus.GRIDSTART_KEY, "None" ); GraphNode setupNode = new GraphNode( setupTXJob.getName(), setupTXJob ); //add the original roots as children to setup node for( Iterator rIt = roots.iterator(); rIt.hasNext(); ){ GraphNode n = ( GraphNode ) rIt.next(); mLogger.log( "Added edge " + setupNode.getID() + " -> " + n.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); setupNode.addChild( n ); } workflow.addNode( setupNode ); return workflow; } /** * Adds cleanup nodes in the workflow for sites specified. * * @param dag the workflow * * @return workflow with cleanup jobs added */ public ADag addCleanupNodesForWorkerPackage( ADag dag ) { Mapper m = mBag.getHandleToTransformationMapper(); //figure if we need to deploy or not if( !mTransferWorkerPackage ){ mLogger.log( "No cleanup of Worker Package needed" , LogManager.DEBUG_MESSAGE_LEVEL ); return dag; } //convert the dag to a graph representation and walk it //in a top down manner Graph workflow = Adapter.convert( dag ); RemoveDirectory removeDirectory = new RemoveDirectory( dag, mBag, this.mPOptions.getSubmitDirectory() ); //add a setup job per execution site Set sites = this.getDeploymentSites( dag ); for( Iterator it = sites.iterator(); it.hasNext(); ){ String site = ( String ) it.next(); mLogger.log( "Adding worker package cleanup node for " + site, LogManager.DEBUG_MESSAGE_LEVEL ); String baseRemoteWorkDir = mSiteStore.getInternalWorkDirectory( site ); //figure out what needs to be deleted for the site FileTransfer ft = (FileTransfer)mFTMap.get( site ); List cleanupFiles = new LinkedList(); cleanupFiles.add( new File ( baseRemoteWorkDir, getBasename( ft.getSourceURL().getValue() )).getAbsolutePath() ); for( String f : cleanupFiles ){ StringBuffer sb = new StringBuffer(); sb.append( "Need to cleanup file " ).append( f ).append( " on site " ).append( site ); mLogger.log( sb.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); } //create a remove directory job per site String cleanupJobname = this.getCleanupJobname( dag, site ); Job cleanupJob = removeDirectory.makeRemoveDirJob( site, cleanupJobname, cleanupFiles); //add the original leaves as parents to cleanup node for( Iterator lIt = workflow.getLeaves().iterator(); lIt.hasNext(); ){ GraphNode gn = ( GraphNode ) lIt.next(); mLogger.log( "Added edge " + gn.getID() + " -> " + cleanupJobname, LogManager.DEBUG_MESSAGE_LEVEL ); GraphNode cleanupNode = new GraphNode( cleanupJob.getName(), cleanupJob ); cleanupNode.addParent( gn ); gn.addChild( cleanupNode ); workflow.addNode( cleanupNode ); } } //convert back to ADag and return ADag result = dag; //we need to reset the jobs and the relations in it result.clearJobs(); //traverse through the graph and jobs and edges for( Iterator it = workflow.nodeIterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); //get the job associated with node result.add( ( Job )node.getContent() ); //all the children of the node are the edges of the DAG for( Iterator childrenIt = node.getChildren().iterator(); childrenIt.hasNext(); ){ GraphNode child = ( GraphNode ) childrenIt.next(); result.addNewRelation( node.getID(), child.getID() ); } } return result; } /** * Retrieves the sites for which the deployment jobs need to be created. * * @param dag the dag on which the jobs need to execute. * * @return a Set containing a list of siteID's of the sites where the * dag has to be run. */ protected Set getDeploymentSites( ADag dag ){ Set set = new HashSet(); for(Iterator it = dag.vJobSubInfos.iterator();it.hasNext();){ Job job = (Job)it.next(); //PM-497 //we ignore any clean up jobs that may be running if( job.getJobType() == Job.CLEANUP_JOB ){ continue; } //add to the set only if the job is //being run in the work directory //this takes care of local site create dir if(job.runInWorkDirectory()){ set.add(job.executionPool); } } //remove the stork pool set.remove("stork"); return set; } /** * It returns the name of the deployment job, that is to be assigned. * The name takes into account the workflow name while constructing it, as * that is thing that can guarentee uniqueness of name in case of deferred * planning. * * @param dag the workflow so far. * @param site the execution pool for which the create directory job * is responsible. * @param localTransfer whether the transfer needs to run locally or not. * * @return String corresponding to the name of the job. */ protected String getDeployJobName( ADag dag, String site , boolean localTransfer ){ StringBuffer sb = new StringBuffer(); //append setup prefix sb.append( DeployWorkerPackage.DEPLOY_WORKER_PREFIX ); if( localTransfer ){ sb.append( Refiner.LOCAL_PREFIX ); } else{ sb.append( Refiner.REMOTE_PREFIX ); } //append the job prefix if specified in options at runtime if ( mJobPrefix != null ) { sb.append( mJobPrefix ); } sb.append( dag.dagInfo.nameOfADag ).append( "_" ). append( dag.dagInfo.index ).append( "_" ); sb.append( site ); return sb.toString(); } /** * It returns the name of the untar job, that is to be assigned. * The name takes into account the workflow name while constructing it, as * that is thing that can guarentee uniqueness of name in case of deferred * planning. * * @param dag the workflow so far. * @param site the execution pool for which the create directory job * is responsible. * * @return String corresponding to the name of the job. */ protected String getUntarJobName( ADag dag, String site ){ StringBuffer sb = new StringBuffer(); //append setup prefix sb.append( DeployWorkerPackage.UNTAR_PREFIX ); //append the job prefix if specified in options at runtime if ( mJobPrefix != null ) { sb.append( mJobPrefix ); } sb.append( dag.dagInfo.nameOfADag ).append( "_" ). append( dag.dagInfo.index ).append( "_" ); sb.append( site ); return sb.toString(); } /** * It returns the name of the untar job, that is to be assigned. * The name takes into account the workflow name while constructing it, as * that is thing that can guarentee uniqueness of name in case of deferred * planning. * * @param dag the workflow so far. * @param site the execution pool for which the create directory job * is responsible. * * @return String corresponding to the name of the job. */ protected String getCleanupJobname( ADag dag, String site ){ StringBuffer sb = new StringBuffer(); //append setup prefix sb.append( DeployWorkerPackage.CLEANUP_PREFIX ); //append the job prefix if specified in options at runtime if ( mJobPrefix != null ) { sb.append( mJobPrefix ); } sb.append( dag.dagInfo.nameOfADag ).append( "_" ). append( dag.dagInfo.index ).append( "_" ); sb.append( site ); return sb.toString(); } /** * It creates a untar job , that untars the worker package that is staged * by the setup transfer job. * * @param site the execution pool for which the create dir job is to be * created. * @param jobName the name that is to be assigned to the job. * @param wpBasename the basename of the worker package that is staged to remote site. * * @return create dir job. */ protected Job makeUntarJob( String site, String jobName, String wpBasename ) { Job newJob = new Job(); List entries = null; String execPath = null; TransformationCatalogEntry entry = null; // GridGateway jobManager = null; try { entries = mTCHandle.lookup( DeployWorkerPackage.UNTAR_TRANSFORMATION_NAMESPACE, DeployWorkerPackage.UNTAR_TRANSFORMATION_NAME, DeployWorkerPackage.UNTAR_TRANSFORMATION_VERSION, site, TCType.INSTALLED); } catch (Exception e) { //non sensical catching mLogger.log("Unable to retrieve entries from TC " + e.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL ); } entry = ( entries == null ) ? this.defaultUntarTCEntry( mSiteStore.lookup(site) ): //try using a default one (TransformationCatalogEntry) entries.get(0); if( entry == null ){ //NOW THROWN AN EXCEPTION //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append("Could not find entry in tc for lfn "). append( DeployWorkerPackage.COMPLETE_UNTAR_TRANSFORMATION_NAME ). append(" at site ").append( site ); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } execPath = entry.getPhysicalTransformation(); SiteCatalogEntry ePool = mSiteStore.lookup( site ); /* JIRA PM-277 jobManager = ePool.selectGridGateway( GridGateway.JOB_TYPE.transfer ); */ //String argString = "zxvf " + wpBasename; // tar -C /tmp/ -zxvf pegasus-worker-2.4.0cvs-x86_rhas_3.tar.gz //we want to fully specify the directory where we want tar file //untarred StringBuffer arguments = new StringBuffer(); arguments.append( " -C " ).append( mSiteStore.getInternalWorkDirectory( site ) ). append( " -zxvf " ).append( mSiteStore.getInternalWorkDirectory( site ) ). append( File.separator ).append( wpBasename ); newJob.jobName = jobName; newJob.setTransformation( DeployWorkerPackage.UNTAR_TRANSFORMATION_NAMESPACE, DeployWorkerPackage.UNTAR_TRANSFORMATION_NAME, DeployWorkerPackage.UNTAR_TRANSFORMATION_VERSION ); newJob.setDerivation( DeployWorkerPackage.UNTAR_DERIVATION_NAMESPACE, DeployWorkerPackage.UNTAR_DERIVATION_NAME, DeployWorkerPackage.UNTAR_DERIVATION_VERSION ); // newJob.condorUniverse = "vanilla"; /* JIRA PM-277 newJob.condorUniverse = GridGateway.JOB_TYPE.auxillary.toString(); newJob.globusScheduler = jobManager.getContact(); */ newJob.executable = execPath; newJob.executionPool = site; newJob.strargs = arguments.toString(); //for JIRA PM-38 we used to run as a compute job //this creates problem with stampede schema as the job is counted //towards a dax task. the untar job is classified as chmod job //internally now. JIRA PM-326 newJob.jobClass = Job.CHMOD_JOB; newJob.jobID = jobName; //the profile information from the pool catalog needs to be //assimilated into the job. newJob.updateProfiles( ePool.getProfiles() ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 newJob.addNotifications( entry ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. newJob.updateProfiles(entry); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. newJob.updateProfiles(mProps); return newJob; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. It also attempts to add the transformation catalog * entry to the underlying TC store. * * @param site the site for which the default entry is required. * @param pegasusHome the path to deployed worker package * @param sysinfo the system information of that site. * @param useFullPath boolean indicating whether to use just the basename or * the full path * @param name the logical name of the transformation * @param executable the basename of the executable * * * @return the default entry. */ private TransformationCatalogEntry addDefaultTCEntry( String site, String pegasusHome, SysInfo sysinfo, boolean useFullPath, String name, String executable ){ TransformationCatalogEntry defaultTCEntry = null; mLogger.log( "Creating a default TC entry for " + Separator.combine( "pegasus", name, null ) + " at site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); //construct the path to the executable StringBuffer path = new StringBuffer(); if( useFullPath ){ path.append( pegasusHome ).append( File.separator ). append( "bin" ).append( File.separator ); } path.append( executable ); mLogger.log( "Remote Path set is " + path.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); defaultTCEntry = new TransformationCatalogEntry( "pegasus", name , null ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.setSysInfo( sysinfo ); if( useFullPath ){ //add pegasus home as an environment variable defaultTCEntry.addProfile( new Profile( Profile.ENV, "PEGASUS_HOME", pegasusHome )); } //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.ERROR_MESSAGE_LEVEL ); //throw exception as throw new RuntimeException( e ); } return defaultTCEntry; } /** * Returns a default TC entry for the pegasus site. The entry points to * the http webserver on the pegasus website. It also attempts to add * the transformation catalog entry to the TC store. * * @param site the execution site for which we need a matching static binary. * @param name logical name of the transformation * * * @return the default entry. */ protected TransformationCatalogEntry addDefaultTCEntryForPegasusWebsite( String site, String name ){ TransformationCatalogEntry defaultTCEntry = null; //String site = "pegasus"; SysInfo sysinfo = mSiteStore.getSysInfo( site ); if( sysinfo == null ){ mLogger.log( "Unable to get System Information for site " + site, LogManager.ERROR_MESSAGE_LEVEL ); return null; } //construct the path to the executable String path = constructDefaultURLToPegasusWorkerPackage( name, sysinfo ); if( path == null ){ mLogger.log( "Unable to determine path for worker package for " + sysinfo, LogManager.DEBUG_MESSAGE_LEVEL ); return null; } mLogger.log( "Creating a default TC entry for " + Separator.combine( "pegasus", name, null ) + " at site pegasus for sysinfo " + sysinfo, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Remote Path set is " + path.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); defaultTCEntry = new TransformationCatalogEntry( "pegasus", name , null ); defaultTCEntry.setPhysicalTransformation( path ); defaultTCEntry.setResourceId( "pegasus" ); defaultTCEntry.setType( TCType.STAGEABLE ); defaultTCEntry.setSysInfo( sysinfo ); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.ERROR_MESSAGE_LEVEL ); //throw exception as throw new RuntimeException( e ); } return defaultTCEntry; } /** * Constructs the default URL's for the pegasus worker package. If the user * has not specified the URL to the source directory in Pegaus Properties then * the URL constructed points to the pegasus website. * * The version of Pegasus retrieved is the one against which the planner * is executing. * * @param name the logical name of the executable, usually worker|binary. * @param sysinfo the sysinfo for which the path is required. * * @return url */ protected String constructDefaultURLToPegasusWorkerPackage( String name, SysInfo sysinfo ) { //get the matching architecture //String arch = ( String )DeployWorkerPackage.archToNMIArch().get( sysinfo.getArch() ); String arch = sysinfo.getArchitecture().toString(); String os = ( String )DeployWorkerPackage.osToOSReleaseAndVersion().get( sysinfo.getOS() ); if( arch == null || os == null ){ mLogger.log( "Unable to construct url for pegasus worker for " + sysinfo , LogManager.DEBUG_MESSAGE_LEVEL ); return null; } StringBuffer url = new StringBuffer(); //construct the base path if( mUseUserSpecifiedSourceLocation ){ url.append( mUserSpecifiedSourceLocation ).append( File.separator); } else{ url.append( DeployWorkerPackage.BASE_BUILD_DIRECTORY_URL ). append( this.mPlannerMajorMinorVersion ).append( "/" ); url.append( PEGASUS_VERSION.endsWith( "cvs" ) ? "nightly/" : "" ); } url.append( "pegasus-" ).append( name ).append( "-" ). append( PEGASUS_VERSION ).append( "-" ). append( arch ).append( "_" ). append( os ).append( ".tar.gz" ); return url.toString(); } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param site the site for which the default entry is required. * * * @return the default entry. */ private TransformationCatalogEntry defaultUntarTCEntry( SiteCatalogEntry site ){ TransformationCatalogEntry defaultTCEntry = null; mLogger.log( "Creating a default TC entry for " + DeployWorkerPackage.COMPLETE_UNTAR_TRANSFORMATION_NAME + " at site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); //construct the path to the executable StringBuffer path = new StringBuffer(); path.append( "/bin/tar" ); mLogger.log( "Remote Path set is " + path.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); defaultTCEntry = new TransformationCatalogEntry( DeployWorkerPackage.UNTAR_TRANSFORMATION_NAMESPACE, DeployWorkerPackage.UNTAR_TRANSFORMATION_NAME , DeployWorkerPackage.UNTAR_TRANSFORMATION_VERSION ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site.getSiteHandle() ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.setSysInfo( site.getSysInfo()); //add path as an environment variable //addDefaultTCEntry.addProfile( new Profile( Profile.ENV, "PATH", DeployWorkerPackage.PATH_VALUE )); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.ERROR_MESSAGE_LEVEL ); //throw exception as throw new RuntimeException( e ); } return defaultTCEntry; } /** * Returns the basename of the URL using substring. * * @param url * * @return basename */ protected String getBasename( String url ){ return ( url == null || url.length() == 0 )? null: url.substring( url.lastIndexOf( File.separator ) + 1 ); } /** * Returns the staging site for a particular execution site. If worker node * execution is enabled, then the staging site is the submit directory * for the workflow on the local sit.e * * @param site the execution site. * * @return the staging site */ private String getStagingSite(String site) { //for pegauss lite mode the staging site for worker package //should be local site , submit directory. return mWorkerNodeExecution ? "local": site; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/ThreadPool.java0000644000175000017500000003561411757531137025641 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.common.logging.LogManagerFactory; import java.io.File; import java.io.FileInputStream; import java.util.LinkedList; import java.util.Set; import org.gridforum.jgss.ExtendedGSSCredential; import org.gridforum.jgss.ExtendedGSSManager; import edu.isi.pegasus.planner.classes.AuthenticateRequest; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.ENV; import edu.isi.pegasus.planner.catalog.site.impl.old.PoolInfoProvider; import edu.isi.pegasus.planner.catalog.site.impl.old.PoolMode; import org.ietf.jgss.GSSCredential; /** * This maintains a pool of authenticate threads that authenticate against a * particular resource. * * @author Karan Vahi * @version $Revision: 2582 $ */ public class ThreadPool { /** * The maximum number of authentication threads that are spawned. */ public static final int NUMBER_OF_THREADS = 5; /** * The request queue that holds the authenticate requests. The worker * threads do access this job queue. */ private LinkedList mQueue; /** * The handle to the properties object. */ private PegasusProperties mProps; /** * The handle to the Pool Info Provider. */ private PoolInfoProvider mPoolHandle; /** * The handle to the LogManager object. */ private LogManager mLogger; /** * The Set of pools that need to be authenticated against. */ private Set mExecPools; /** * The number of pools that one has to authenticate against. */ private Integer mNumOfPools; /** * The handle to the pool of threads that this thread pool is reponsible for. */ private AuthenticateThread[] mWorkers; /** * The condition variable that is used to synchronize the shutdown. */ private ConditionVariable mCurrentNum; /** * The namespace object holding the environment variables for local * pool. */ private ENV mLocalEnv; /** * The credential loaded from the non default location if specified. */ private GSSCredential mCredential; /** * The overloaded constructor. * * @param properties the PegasusProperties to be used. * @param pools the set of pools against which the user is authenticating. */ public ThreadPool( PegasusProperties properties, Set pools ) { mQueue = new LinkedList(); mCurrentNum = new ConditionVariable(); mProps = properties; mLogger = LogManagerFactory.loadSingletonInstance( properties ); String poolClass = PoolMode.getImplementingClass(mProps.getPoolMode()); mPoolHandle = PoolMode.loadPoolInstance(poolClass,mProps.getPoolFile(), PoolMode.SINGLETON_LOAD); mExecPools = pools; mNumOfPools = new Integer(pools.size()); //load the local environment variables mLocalEnv = loadLocalEnvVariables(); //load the credential if the user has set the //corresponding environment variable. mCredential = (mLocalEnv.containsKey(ENV.X509_USER_PROXY_KEY))? //load the proxy from the path specified getGSSCredential((String)mLocalEnv.get(ENV.X509_USER_PROXY_KEY)): null; if(mCredential == null){ //log message mLogger.log("Proxy will be picked up from the default location in /tmp", LogManager.DEBUG_MESSAGE_LEVEL); } //intialise the worker threads mWorkers = new AuthenticateThread[this.NUMBER_OF_THREADS]; for(int i = 0; i < NUMBER_OF_THREADS; i++){ mWorkers[i] = new AuthenticateThread(i); //start the threads mWorkers[i].start(); } } /** * This method is called to ensure the clean shutdown of threads, and * waits till all the requests have been serviced. */ public void shutdown(){ //mNumOfPools is the CV on which you do a shutdowm synchronized(mCurrentNum){ int numOfPools = mNumOfPools.intValue(); for (int i = 0; i < NUMBER_OF_THREADS; i++) { //send the shutdown signal to the worker threads mWorkers[i].shutdown(); } //wake up all the threads on this synchronized(mQueue){ //mLogger.logMessage("Manager sending notify to all"); mQueue.notifyAll(); } while(mCurrentNum.getValue() < NUMBER_OF_THREADS){ try{ mCurrentNum.wait(); } catch(InterruptedException e){ mLogger.log( "Manager got interrupted during shutdown" + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } } } } /** * Accepts an authentication request, that has to be serviced. It is added * to queue of requests. */ public void acceptRequest(Object request){ //see if any of the worker threads are available /*for(int i = 0; i < NUMBER_OF_THREADS; i++){ if(mWorkers[i].isAvailable()){ //no need to add to queue. } }*/ synchronized(mQueue){ mQueue.addLast(request); //send a notification to a worker thread mQueue.notify(); } } /** * Reads in the environment variables into memory from the properties file * and the pool catalog. * * @return the ENV namespace object holding the environment * variables. */ private ENV loadLocalEnvVariables(){ //assumes that pool handle, and property handle are initialized. ENV env = new ENV(); //load from the pool.config env.checkKeyInNS(mPoolHandle.getPoolProfile("local",Profile.ENV)); //load from property file env.checkKeyInNS(mProps.getLocalPoolEnvVar()); return env; } /** * Loads a GSSCredential from the proxy file residing at the path specified. * * @param file the path to the proxy file. * * @return GSSCredential * null in case the file format is wrong, or file does not exist. */ private GSSCredential getGSSCredential(String file){ File f = new File(file); GSSCredential gcred = null; //sanity check first if(!f.exists()){ return null; } try{ byte[] data = new byte[ (int) f.length()]; FileInputStream in = new FileInputStream(f); in.read(data); in.close(); ExtendedGSSManager manager = (ExtendedGSSManager) ExtendedGSSManager.getInstance(); gcred = manager.createCredential(data, ExtendedGSSCredential.IMPEXP_OPAQUE, GSSCredential.DEFAULT_LIFETIME, null, GSSCredential.INITIATE_AND_ACCEPT); mLogger.log("Loaded the credential from proxy file " + file, LogManager.DEBUG_MESSAGE_LEVEL); } catch(Exception e){ mLogger.log( "Unable to load proxy from file" + file + " " + e.getMessage(),LogManager.ERROR_MESSAGE_LEVEL); } return gcred; } /** * A thread as an inner class, that authenticates against one particular * pool. */ class AuthenticateThread implements Runnable{ /** * The pool against which to authenticate. */ private String mPool; /** * The thread object that is used to launch the thread. */ private Thread mThread; /** * Whether the thread is available to do some work or not. */ private boolean mAvailable; /** * Whether to shutdown or not. */ private boolean mShutdown; /** * The unique identifying id of the thread. */ private int mIndex; /** * The overloaded constructor. * * */ public AuthenticateThread(int index){ mAvailable = true; mShutdown = false; mIndex = index; } /** * The start method for the thread. It initialises the thread and calls * it's start method. */ public void start(){ mThread = new Thread(this); mThread.start(); } /** * Returns whether a thread is available to do some work or not. */ public boolean isAvailable(){ return mAvailable; } /** * Sets the shutdown flag to true. This does not make the thread stop. * The thread only stops when it's current request is serviced and the * queue is empty. */ public void shutdown(){ mShutdown = true; } /** * Calls the corresponding join method of the thread associated with * this class. * * @param millis The time to wait in milliseconds. */ public void join(long millis) throws InterruptedException{ mThread.join(millis); } /** * The runnable method of the thread, that is called when the thread is * started. */ public void run(){ AuthenticateRequest ar; Authenticate a = new Authenticate( mProps, mPoolHandle ); a.setCredential(mCredential); boolean authenticated = false; for(;;){ //remain in an infinite loop and wait for a request to be released //from the queue. ar = getAuthenticateRequest(); if(ar == null){ //no more requests to service and the shutdown signal has //been received. send the notification to the manager and exit mLogger.log("Thread [" + mIndex +"] got shutdown signal", LogManager.DEBUG_MESSAGE_LEVEL); synchronized(mCurrentNum){ mCurrentNum.increment(); mCurrentNum.notify(); } break; } //means worker is busy, processing a request. mAvailable = false; //do the processing. authenticated = a.authenticate(ar); mLogger.log("Thread [" + mIndex +"] Authentication of " + ar + " successful:" + authenticated, LogManager.DEBUG_MESSAGE_LEVEL); if(!authenticated){ //we need to remove boolean removal = a.removeResource(ar); mLogger.log("Thread [" + mIndex +"] Removal of resource" + ar + " successful:" + removal,LogManager.DEBUG_MESSAGE_LEVEL); } mAvailable = true; //be nice and sleep try{ mThread.sleep(5); } catch (InterruptedException ex) { mLogger.log( "Authenticate Thread [" + mIndex +"] got interrupted while waiting", LogManager.DEBUG_MESSAGE_LEVEL); //go into sleep again continue; } } } /** * Returns an authentication request to the worker thread. * * @return the authentication request. */ public AuthenticateRequest getAuthenticateRequest(){ synchronized(mQueue){ for(;;){ if(mQueue.isEmpty() && mShutdown){ //no more requests to service and the shutdown signal has //been received. return null; } else if (mQueue.isEmpty()) { //there is nothing in the queue so wait on it. try { mLogger.log("Thread [" + mIndex +"] going to wait", LogManager.DEBUG_MESSAGE_LEVEL); mQueue.wait(); //again check for empty queue and shutdown signal if(mQueue.isEmpty() && !mShutdown) //go back to the wait state to receive a new //request or a AR request continue; } catch (InterruptedException ex) { mLogger.log( "Authenticate Thread [" + mIndex +"] got interrupted while waiting " + ex.getMessage(),LogManager.ERROR_MESSAGE_LEVEL); //go into sleep again continue; } } return (mQueue.isEmpty() && mShutdown)? //indicates shutdown null: (AuthenticateRequest)mQueue.removeFirst(); } } } } /** * A wrapper around an int that acts as a Condition Variable, and is used * as such. In behaviour it is probably closer to a semaphore. */ class ConditionVariable{ /** * The int that is associated with this object. */ private int value; /** * The default constructor. */ public ConditionVariable(){ value = 0; } /** * It increments the value by 1. */ public void increment(){ value++; } /** * Returns the value. */ public int getValue(){ return value; } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/cleanup/0000755000175000017500000000000011757531667024363 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/cleanup/CleanupStrategy.java0000644000175000017500000000312411757531137030330 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.cleanup; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.partitioner.graph.Graph; /** * The interface that defines how the cleanup job is invoked and created. * * @author Karan Vahi * @version $Revision: 2582 $ */ public interface CleanupStrategy { /** * The version number associated with this API Cleanup CleanupStrategy. */ public static final String VERSION = "1.1"; /** * Intializes the class. * * @param bag bag of initialization objects * @param impl the implementation instance that creates file cleanup job */ public void initialize( PegasusBag bag, CleanupImplementation impl ) ; /** * Adds cleanup jobs to the workflow. * * @param workflow the workflow to add cleanup jobs to. * * @return the workflow with cleanup jobs added to it. */ public Graph addCleanupJobs( Graph workflow ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/cleanup/InPlace.java0000644000175000017500000006040311757531137026534 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.cleanup; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.TransferJob; import java.util.Map; import java.util.Iterator; import java.util.HashMap; import java.util.List; import java.util.LinkedList; import java.util.Set; import java.util.HashSet; /** * This generates cleanup jobs in the workflow itself. * * * @author Arun ramakrishnan * @author Karan Vahi * * @version $Revision: 4551 $ */ public class InPlace implements CleanupStrategy{ /** * The prefix for CLEANUP_JOB ID i.e prefix+the parent compute_job ID becomes * ID of the cleanup job. */ public static final String CLEANUP_JOB_PREFIX = "clean_up_"; /** * The mapping to siteHandle to all the jobs that are mapped to it * mapping to siteHandle(String) to Set */ private HashMap mResMap; /** * The mapping of siteHandle to all subset of the jobs mapped to it that are * leaves in the workflow mapping to siteHandle(String) to Set. */ private HashMap mResMapLeaves; /** * The mapping of siteHandle to all subset of the jobs mapped to it that are * leaves in the workflow mapping to siteHandle(String) to Set. */ private HashMap mResMapRoots; /** * The max depth of any job in the workflow useful for a priorityQueue * implementation in an array */ private int mMaxDepth; /** * HashSet of Files that should not be cleaned up */ private HashSet mDoNotClean; /** * The handle to the CleanupImplementation instance that creates the jobs for us. */ private CleanupImplementation mImpl; /** * The handle to the properties passed to Pegasus. */ private PegasusProperties mProps; /** * The handle to the logging object used for logging. */ private LogManager mLogger; /** * The default constructor. */ public InPlace(){ } /** * Intializes the class. * * @param bag bag of initialization objects * @param impl the implementation instance that creates cleanup job */ public void initialize( PegasusBag bag, CleanupImplementation impl ) { mProps = bag.getPegasusProperties(); mLogger = bag.getLogger(); mImpl = impl; //intialize the internal structures mResMap = new HashMap(); mResMapLeaves = new HashMap(); mResMapRoots = new HashMap(); mDoNotClean = new HashSet(); mMaxDepth=0; } /** * Adds cleanup jobs to the workflow. * * @param workflow the workflow to add cleanup jobs to. * * @return the workflow with cleanup jobs added to it. */ public Graph addCleanupJobs( Graph workflow ){ //reset the internal data structures reset(); //add the priorities to all the jobs //applyJobPriorities( workflow ); //determine the files that should not be removed from the resource where it is produced // i.e file A produced by job J should not be removed if J does not have a stage out job // and A has getTransientTransferFlag() set to false for( Iterator it = workflow.nodeIterator() ; it.hasNext(); ){ GraphNode _GN = ( GraphNode ) it.next(); Job _SI = ( Job ) _GN.getContent(); //only for compute jobs if( ! ( _SI.getJobType() == _SI.COMPUTE_JOB /*|| _SI.getJobType() == _SI.STAGED_COMPUTE_JOB*/ ) ) { continue; } //if the compute job has a stage out job then all the files produced by it can be removed // so , skip such cases boolean job_has_stageout = false ; for( Iterator itjc = _GN.getChildren().iterator() ; itjc.hasNext() ; ){ Job _SIchild = ( Job ) ( ( GraphNode ) itjc.next() ).getContent() ; if( _SIchild.getJobType() == _SIchild.STAGE_OUT_JOB ){ job_has_stageout = true; break; } } if( job_has_stageout ) continue; //else add files with getTransientTransferFlag() set to false to the do_not_clean List Set _ofiles = _SI.getOutputFiles(); for( Iterator itof = _ofiles.iterator() ; itof.hasNext() ; ){ PegasusFile of = ( PegasusFile ) itof.next(); if( of.getTransientTransferFlag() == false ){ this.mDoNotClean.add( of ); } } } // mLogger.log( "The input workflow " + workflow, // LogManager.DEBUG_MESSAGE_LEVEL ); //set the depth and ResMap values iteratively setDepth_ResMap( workflow.getRoots() ); mLogger.log( "Number of sites " + mResMap.size(), LogManager.DEBUG_MESSAGE_LEVEL ); //output for debug StringBuffer message = new StringBuffer(); for( Iterator it= mResMap.entrySet().iterator(); it.hasNext() ;){ Map.Entry entry = (Map.Entry)it.next(); message.append( "Site " ).append( (String)entry.getKey()) .append(" count jobs = ").append( ( (Set)entry.getValue()).size()); mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); Set whatever= (Set)entry.getValue() ; for( Iterator weit=whatever.iterator(); weit.hasNext() ; ){ mLogger.log( "* "+ ((GraphNode)weit.next()).getID(), LogManager.DEBUG_MESSAGE_LEVEL ); } message = new StringBuffer(); } //for each site do the process of adding cleanup jobs for( Iterator it= mResMap.entrySet().iterator(); it.hasNext() ;){ Map.Entry entry = (Map.Entry)it.next(); addCleanUpJobs( (String)entry.getKey() , (Set)entry.getValue() , workflow ); } // mLogger.log( "The resultant workflow with cleanup jobs " + workflow, // LogManager.DEBUG_MESSAGE_LEVEL ); return workflow; } /** * Resets the internal data structures. * */ protected void reset(){ mResMap.clear(); mResMapLeaves.clear(); mResMapRoots.clear(); mMaxDepth = 0; } /** * A BFS implementation to set depth value (roots have depth 1) and also * to populate mResMap ,mResMapLeaves,mResMapRoots which contains all the * jobs that are assigned to a particular resource * * @param roots List of GraphNode objects that are roots */ private void setDepth_ResMap( List roots ){ LinkedList que = new LinkedList(); que.addAll( roots ); for(int i=0; i < que.size(); i++){ ( (GraphNode)que.get(i) ).setDepth( 1 ); } while( que.size() >= 1 ){ GraphNode curGN = (GraphNode)que.removeFirst(); //debug /* System.out.print(curGN.getDepth() +" "+((Job)curGN.getContent()).getSiteHandle()+" "); if( curGN.getChildren() == null ) System.out.print("0"); else System.out.print( curGN.getChildren().size() ); */ //populate mResMap ,mResMapLeaves,mResMapRoots Job si = ( Job )curGN.getContent(); //Commented out as for stage out jobs we need non third party //site. Karan Jan 8, 2009 // if( !mResMap.containsKey( si.getSiteHandle() ) ){ // mResMap.put( si.getSiteHandle(), new HashSet() ); // // } // ((Set)mResMap.get( si.getSiteHandle() )).add( curGN ); String site = getSiteForCleanup( si ); if( !mResMap.containsKey( site ) ){ mResMap.put( site, new HashSet() ); } ((Set)mResMap.get( site )).add( curGN ); //System.out.println( " site count="+((Set)mResMap.get( si.getSiteHandle() )).size() ); //now set the depth for( Iterator it = curGN.getChildren().iterator() ; it.hasNext() ;){ GraphNode child = (GraphNode)it.next(); if(!( child.getDepth() == -1 || child.getDepth() < curGN.getDepth()+1 ) ){ continue; } child.setDepth( curGN.getDepth() + 1 ); if( child.getDepth() > mMaxDepth ) mMaxDepth=child.getDepth(); que.addLast( child ); } } } /** * Adds cleanup jobs for the workflow scheduled to a particular site * a best first search strategy is implemented based on the depth of the job * in the workflow * * @param site the site ID * @param leaves the leaf jobs that are scheduled to site * @param workflow the Graph into which new cleanup jobs can be added */ private void addCleanUpJobs( String site, Set leaves, Graph workflow ){ mLogger.log( site + " " + leaves.size() , LogManager.DEBUG_MESSAGE_LEVEL ); //if( !site.equals(new String("k")) )return; //file(String) cleaned by GraphNode HashMap cleanedBy = new HashMap(); //the below in case we get rid of the primitive java 1.4 //PriorityQueue pQ=new PriorityQueue(resMap.get(site).size(),GraphNode_ORDER); StringBuffer message = new StringBuffer(); message.append( "Leaf jobs scheduled at site ").append( site ) .append( " are " ); for( Iterator it = leaves.iterator(); it.hasNext(); ){ message.append( ((GraphNode)it.next()).getID() ); } mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); //its a Set of GraphNode's Set[] pQA = new Set[ mMaxDepth + 1 ]; for( int i = 0 ; i < pQA.length ; i++ ){ pQA[i] = new HashSet(); } //populate the priority Array pQA with all the leaf nodes for( Iterator it = leaves.iterator() ; it.hasNext() ;){ GraphNode gN = (GraphNode)it.next(); pQA[ gN.getDepth() ].add( gN ); } //start the best first cleanup job addition for( int curP = mMaxDepth; curP >= 0 ; curP-- ){ //process all elements in the current priority while( pQA[curP].size() >= 1 ){ GraphNode curGN = (GraphNode) pQA[ curP ].iterator().next(); pQA[ curP ].remove( curGN ); Job curGN_SI = (Job) curGN.getContent(); if( !typeNeedsCleanUp( curGN_SI.getJobType() ) ) { continue; } /*if( curGN_SI.getJobType() == Job.STAGE_OUT_JOB ){ curGN_SI.getInputFiles().addAll( curGN_SI.getOutputFiles() ); curGN_SI.getOutputFiles().clear(); System.out.println( curGN_SI.getName() ); System.out.println( curGN_SI.getOutputFiles() ); System.out.println( curGN_SI.getInputFiles() ); }*/ // Leads to corruption of input files for the job. // Set fileSet = curGN_SI.getInputFiles(); Set fileSet = new HashSet( curGN_SI.getInputFiles() ); fileSet.addAll( curGN_SI.getOutputFiles() ); //remove the files in fileSet that are in this.mDoNotClean Set fileSet2 = new HashSet( fileSet ); for( Iterator itfs2 = fileSet2.iterator() ; itfs2.hasNext() ; ){ Object _dum_pf = itfs2.next() ; if( this.mDoNotClean.contains( _dum_pf ) ){ fileSet.remove( _dum_pf ); } } // create new GraphNode with MLogicalID=mLogicalName , mParents // mContent ID ,Name , jobtype //the files it cleans up are specified in mContent.inputFiles //create a dummy GraphNode .first create Job object and then add it to GraphNode GraphNode nuGN = new GraphNode( generateCleanupID( curGN_SI ), curGN_SI.getTXName() ); // InPlace.CLEANUP_JOB_PREFIX + curGN.getName() , // InPlace.CLEANUP_JOB_PREFIX + curGN.getName() ); List cleanupFiles = new LinkedList(); for( Iterator it = fileSet.iterator() ; it.hasNext() ; ){ PegasusFile file = (PegasusFile) it.next(); //check if its already set up to be cleaned up if( cleanedBy.containsKey( file.getLFN()) ){ GraphNode child = (GraphNode) cleanedBy.get( file.getLFN() ); if( !child.getParents().contains( curGN ) ){ child.addParent( curGN ); } if( !curGN.getChildren().contains( child ) ){ curGN.addChild( child ); } }else{ // nuSI.addInputFile( file ); cleanupFiles.add( file ); cleanedBy.put( file.getLFN(), nuGN ); if( !curGN.getChildren().contains( nuGN ) ){ curGN.addChild( nuGN ); } if( ! nuGN.getParents().contains( curGN ) ){ nuGN.addParent( curGN ); } } }// all the files //create a cleanup job if the cleanup node has any parents if( nuGN.getParents().size() >= 1 ){ mLogger.log( "Adding cleanup job with ID " + nuGN.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); // We have always pass the associaated compute job. Since now //a cleanup job can be associated with stageout jobs also, we //need to make sure that for the stageout job the cleanup job //is passed. Karan Jan 9, 2008 // Job cleanupJob = mImpl.createCleanupJob( nuGN.getID(), // cleanupFiles, // curGN_SI // ); Job computeJob; if( typeStageOut( curGN_SI.getJobType() ) ){ //find a compute job that is parent of this GraphNode node = (GraphNode)curGN.getParents().get( 0 ); computeJob = (Job)node.getContent(); message = new StringBuffer(); message.append( "For cleanup job " ).append( nuGN.getID() ). append( " the associated compute job is ").append( computeJob.getID() ); mLogger.log( message.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); } else{ computeJob = curGN_SI; } Job cleanupJob = mImpl.createCleanupJob( nuGN.getID(), cleanupFiles, computeJob ); //No longer required as stageout jobs are also cleaned. Karan Jan , 2008 //if the corresponding compute job has any transfer or stageout jobs as child add it //as a parent of the cleanup job for( Iterator itc=curGN.getChildren().iterator(); itc.hasNext() ;){ GraphNode curGNchild=(GraphNode) itc.next(); Job itc_si=(Job) curGNchild.getContent(); if( itc_si != null ) if( itc_si.getJobType() == Job.STAGE_OUT_JOB || itc_si.getJobType() == Job.INTER_POOL_JOB ){ nuGN.addParent( curGNchild ); curGNchild.addChild( nuGN ); } } //add the job as a content to the graphnode //and the node itself to the Graph nuGN.setContent( cleanupJob ); workflow.addNode(nuGN); } } } //output whats file is cleaned by what ? mLogger.log( "", LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "CLEANUP LIST",LogManager.DEBUG_MESSAGE_LEVEL); for( Iterator it = cleanedBy.keySet().iterator() ; it.hasNext() ;){ String lfn = (String)it.next(); GraphNode cl_GN = (GraphNode)cleanedBy.get(lfn); Job cl_si = (Job)cl_GN.getContent(); //Arun please use a StringBuffer first //Karan March 13, 2007 mLogger.log( "file:" + lfn + " site:" + cl_si.getSiteHandle() + " " + cl_GN.getID() , LogManager.DEBUG_MESSAGE_LEVEL ); } //reduce dependencies. for each cleanup job X, look at the parents of //the job. For each parent Y see if there is a path to any other parent Z of X. //If a path exists, then the edge from Z to cleanup job can //be removed. int num = 0; for( Iterator it = cleanedBy.values().iterator() ; it.hasNext() ;){ num++; mLogger.log(" cleanup job counter = " + num, mLogger.DEBUG_MESSAGE_LEVEL); GraphNode cl_GN = (GraphNode)it.next(); //Job cl_si=(Job)cl_GN.getContent(); List cl_GNparents = cl_GN.getParents(); List redundant = new LinkedList(); HashSet visit = new HashSet(); for( Iterator itp = cl_GN.getParents().iterator() ; itp.hasNext() ;){ LinkedList mque = new LinkedList(); mque.add( itp.next() ); while( mque.size() > 0 ){ GraphNode popGN = (GraphNode) mque.removeFirst(); if( visit.contains(popGN) ) { continue; } visit.add(popGN); for( Iterator itp_pop = popGN.getParents().iterator() ; itp_pop.hasNext() ;){ GraphNode pop_pGN = (GraphNode) itp_pop.next(); //check if its redundant ..if so add it to redundant list if( cl_GNparents.contains( pop_pGN ) ){ redundant.add( pop_pGN ); }else{ //mque.addAll( pop_pGN.getParents() ); for( Iterator itgp = pop_pGN.getParents().iterator() ; itgp.hasNext() ;){ GraphNode gpGN = (GraphNode) itgp.next(); if( ! visit.contains( gpGN ) ){ mque.add( gpGN ); } } } } } } //remove all redundant nodes that were found for( Iterator itr = redundant.iterator() ; itr.hasNext() ;){ GraphNode r_GN = (GraphNode) itr.next(); cl_GN.removeParent( r_GN ); r_GN.removeChild( cl_GN ); } } } /** * Adds job priorities to the jobs in the workflow on the basis of * the levels in the traversal order given by the iterator. Later on * this would be a separate refiner. * * @param workflow the workflow on which to apply job priorities. * */ protected void applyJobPriorities( Graph workflow ){ for ( Iterator it = workflow.iterator(); it.hasNext(); ){ GraphNode node = (GraphNode)it.next(); Job job = ( Job )node.getContent(); //only apply priority if job is not associated with a priority //beforehand if( !job.condorVariables.containsKey( Condor.PRIORITY_KEY ) ){ //log to debug StringBuffer sb = new StringBuffer(); sb.append( "Applying priority of " ).append( node.getDepth() ). append( " to " ).append( job.getID() ); mLogger.log( sb.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); //apply a priority to the job overwriting any preexisting priority job.condorVariables.construct( Condor.PRIORITY_KEY, new Integer( node.getDepth() ).toString()); //also for compute and staged compute jobs //forward to remote job manager also //the below hack only works for condor pools // if( job.getJobType() == Job.COMPUTE_JOB || // job.getJobType() == Job.STAGED_COMPUTE_JOB ){ // job.globusRSL.construct( "condorsubmit", // "(priority " + node.getDepth() + ")"); // } } } return; } /** * Returns the identifier that is to be assigned to cleanup job. * * @param job the job with which the cleanup job is primarily associated. * * @return the identifier for a cleanup job. */ protected String generateCleanupID( Job job ){ StringBuffer sb = new StringBuffer(); sb.append( this.CLEANUP_JOB_PREFIX ).append( job.getID() ); return sb.toString(); } /** * Checks to see which job types are required to be looked at for cleanup. * COMPUTE_JOB , STAGE_OUT_JOB , INTER_POOL_JOB are the ones that need * cleanup * * @param type the type of the job. * * @return boolean */ protected boolean typeNeedsCleanUp( int type ){ return ( type == Job.COMPUTE_JOB || type == Job.STAGE_OUT_JOB || type == Job.INTER_POOL_JOB /*|| type == Job.STAGED_COMPUTE_JOB*/ ); } /** * Checks to see if job type is a stageout job type. * * @param type the type of the job. * * @return boolean */ protected boolean typeStageOut( int type ){ return ( type == Job.STAGE_OUT_JOB || type == Job.INTER_POOL_JOB ); } /** * Returns site to be used for the cleanup algorithm. * For compute jobs the staging site is used, while for stageout jobs * is used. * * For all other jobs the execution site is used. * * @param job the job * * @return the site to be used */ protected String getSiteForCleanup( Job job ) { /* String site = typeStageOut( job.getJobType() )? ((TransferJob)job).getNonThirdPartySite(): job.getStagingSiteHandle(); */ String site = null; if( typeStageOut( job.getJobType() )){ //for stage out jobs we prefer the non third party site site = ((TransferJob)job).getNonThirdPartySite(); } else if ( job.getJobType() == Job.COMPUTE_JOB ){ //for compute jobs we refer to the staging site site = job.getStagingSiteHandle(); } else{ //for all other jobs we use the execution site site = job.getSiteHandle(); } return site; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/cleanup/Cleanup.java0000644000175000017500000003625111757531137026614 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.cleanup; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.namespace.Dagman; import java.util.List; import java.util.Iterator; import java.util.HashSet; import java.io.BufferedWriter; import java.io.FileWriter; import java.io.File; import java.io.IOException; /** * Use's RM to do removal of the files on the remote sites. * * @author Karan Vahi * @version $Revision: 4778 $ */ public class Cleanup implements CleanupImplementation{ /** * The scheme name for file url. */ public static final String FILE_URL_SCHEME = "file:"; /** * Default category for registration jobs */ public static final String DEFAULT_CLEANUP_CATEGORY_KEY = "cleanup"; /** * The transformation namespace for the job. */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The name of the underlying transformation that is queried for in the * Transformation Catalog. */ public static final String TRANSFORMATION_NAME = "cleanup"; /** * The version number for the job. */ public static final String TRANSFORMATION_VERSION = null; /** * The derivation namespace for the job. */ public static final String DERIVATION_NAMESPACE = "pegasus"; /** * The name of the underlying derivation. */ public static final String DERIVATION_NAME = "cleanup"; /** * The derivation version number for the job. */ public static final String DERIVATION_VERSION = null; /** * The basename of the pegasus cleanup executable. */ public static final String EXECUTABLE_BASENAME = "pegasus-cleanup"; /** * A short description of the transfer implementation. */ public static final String DESCRIPTION = "A cleanup script that reads from the stdin the list of files" + " to be cleaned, with one file per line"; /** * The handle to the transformation catalog. */ protected TransformationCatalog mTCHandle; /** * Handle to the stagingSite catalog. */ protected SiteStore mSiteStore; /** * Handle to the transient replica catalog. */ protected ReplicaCatalog mTransientRC; /** * The handle to the properties passed to Pegasus. */ private PegasusProperties mProps; /** * The submit directory where the output files have to be written. */ private String mSubmitDirectory; /** * The handle to the logger. */ private LogManager mLogger; /** * A convenience method to return the complete transformation name being * used to construct jobs in this class. * * @return the complete transformation name */ public static String getCompleteTranformationName(){ return Separator.combine( TRANSFORMATION_NAMESPACE, TRANSFORMATION_NAME, TRANSFORMATION_VERSION ); } /** * The default constructor. */ public Cleanup(){ } /** * Creates a new instance of InPlace * * @param bag the bag of initialization objects. * */ public void initialize( PegasusBag bag ) { mProps = bag.getPegasusProperties(); mSubmitDirectory = bag.getPlannerOptions().getSubmitDirectory(); mSiteStore = bag.getHandleToSiteStore(); mTCHandle = bag.getHandleToTransformationCatalog(); mLogger = bag.getLogger(); mTransientRC = bag.getHandleToTransientReplicaCatalog(); } /** * Creates a cleanup job that removes the files from remote working directory. * This will eventually make way to it's own interface. * * @param id the identifier to be assigned to the job. * @param files the list of PegasusFile that need to be * cleaned up. * @param job the primary compute job with which this cleanup job is associated. * * @return the cleanup job. */ public Job createCleanupJob( String id, List files, Job job ){ //we want to run the clnjob in the same directory //as the compute job. We cannot clone as then the //the cleanup jobs for clustered jobs appears as //a clustered job. PM-368 Job cJob = new Job( job ); //we dont want credentials to be inherited cJob.resetCredentialTypes(); String stagingSite = job.getStagingSiteHandle(); //by default execution site for a cleanup job is local unless //overridden because of File URL's in list of files to be cleaned String eSite = "local"; //prepare the stdin for the cleanup job String stdIn = id + ".in"; try{ BufferedWriter writer; writer = new BufferedWriter( new FileWriter( new File( mSubmitDirectory, stdIn ) )); for( Iterator it = files.iterator(); it.hasNext(); ){ PegasusFile file = (PegasusFile)it.next(); String pfn = mTransientRC.lookup( file.getLFN(), stagingSite ); if( pfn == null ){ throw new RuntimeException( "Unable to determine url for lfn " + file.getLFN() + " at site " + stagingSite ); } if( pfn.startsWith( Cleanup.FILE_URL_SCHEME ) ){ //means the cleanup job should run on the staging site mLogger.log( " PFN for file " + file.getLFN() + " on staging site is a file URL " + pfn, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Cleanup Job " + id + " instead of running on local site , will run on site " + stagingSite, LogManager.DEBUG_MESSAGE_LEVEL ); eSite = stagingSite; } //associate a credential if required cJob.addCredentialType( pfn ); writer.write( pfn ); writer.write( "\n" ); } //closing the handle to the writer writer.close(); } catch(IOException e){ mLogger.log( "While writing the stdIn file " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( "While writing the stdIn file " + stdIn, e ); } cJob.setSiteHandle( eSite ); //we dont want notifications to be inherited cJob.resetNotifications(); //also make sure that user executables staged is set to false cJob.setExecutableStagingForJob( false ); cJob.setJobType( Job.CLEANUP_JOB ); cJob.setName( id ); cJob.setArguments( "" ); //bug fix for JIRA PM-311 //we dont want cleanup job to inherit any stdout or stderr //specified in the DAX for compute job cJob.setStdOut( "" ); cJob.setStdErr( "" ); //inconsistency between job name and logical name for now cJob.setTransformation( Cleanup.TRANSFORMATION_NAMESPACE, Cleanup.TRANSFORMATION_NAME, Cleanup.TRANSFORMATION_VERSION ); cJob.setDerivation( Cleanup.DERIVATION_NAMESPACE, Cleanup.DERIVATION_NAME, Cleanup.DERIVATION_VERSION ); //cJob.setLogicalID( id ); //set the list of files as input files //to change function signature to reflect a set only cJob.setInputFiles( new HashSet( files) ); //the compute job of the VDS supernode is this job itself cJob.setVDSSuperNode( job.getID() ); //set the path to the rm executable TransformationCatalogEntry entry = this.getTCEntry( eSite ); cJob.setRemoteExecutable( entry.getPhysicalTransformation() ); //we want to run the job on fork jobmanager //SiteInfo stagingSite = mSiteHandle.getTXPoolEntry( cJob.getSiteHandle() ); //JobManager jobmanager = stagingSite.selectJobManager( Engine.TRANSFER_UNIVERSE, true ); //cJob.globusScheduler = (jobmanager == null) ? // null : // jobmanager.getInfo(JobManager.URL); //set the stdin file for the job cJob.setStdIn( stdIn ); //the cleanup job is a clone of compute //need to reset the profiles first cJob.resetProfiles(); //the profile information from the pool catalog needs to be //assimilated into the job. cJob.updateProfiles( mSiteStore.lookup( eSite ).getProfiles() ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 cJob.addNotifications( entry ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. cJob.updateProfiles( entry ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. cJob.updateProfiles( mProps ); //if no category is associated with the job, add a default //category if( !cJob.dagmanVariables.containsKey( Dagman.CATEGORY_KEY ) ){ cJob.dagmanVariables.construct( Dagman.CATEGORY_KEY, DEFAULT_CLEANUP_CATEGORY_KEY ); } //a remote hack that only works for condor pools //cJob.globusRSL.construct( "condorsubmit", // "(priority " + DEFAULT_PRIORITY_KEY + ")"); return cJob; } /** * Returns the TCEntry object for the rm executable on a grid stagingSite. * * @param stagingSite the stagingSite corresponding to which the entry is required. * * @return the TransformationCatalogEntry corresponding to the stagingSite. */ protected TransformationCatalogEntry getTCEntry( String site ){ List tcentries = null; TransformationCatalogEntry entry = null; try { tcentries = mTCHandle.lookup( Cleanup.TRANSFORMATION_NAMESPACE, Cleanup.TRANSFORMATION_NAME, Cleanup.TRANSFORMATION_VERSION, site, TCType.INSTALLED ); } catch (Exception e) { /* empty catch */ } entry = ( tcentries == null ) ? this.defaultTCEntry( site ): //try using a default one (TransformationCatalogEntry) tcentries.get(0); if( entry == null ){ //NOW THROWN AN EXCEPTION //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append("Could not find entry in tc for lfn "). append( Cleanup.getCompleteTranformationName()). append(" at site ").append(site); mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } return entry; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param stagingSite the stagingSite for which the default entry is required. * * * @return the default entry. */ private TransformationCatalogEntry defaultTCEntry( String site ){ TransformationCatalogEntry defaultTCEntry = null; //check if PEGASUS_HOME is set String home = mSiteStore.getPegasusHome( site ); mLogger.log( "Creating a default TC entry for " + Cleanup.getCompleteTranformationName() + " at site " + site, LogManager.DEBUG_MESSAGE_LEVEL ); //if home is still null if ( home == null ){ //cannot create default TC mLogger.log( "Unable to create a default entry for " + Separator.combine( Cleanup.TRANSFORMATION_NAMESPACE, Cleanup.TRANSFORMATION_NAME, Cleanup.TRANSFORMATION_VERSION ), LogManager.DEBUG_MESSAGE_LEVEL ); //set the flag back to true return defaultTCEntry; } //remove trailing / if specified home = ( home.charAt( home.length() - 1 ) == File.separatorChar )? home.substring( 0, home.length() - 1 ): home; //construct the path to it StringBuffer path = new StringBuffer(); path.append( home ).append( File.separator ). append( "bin" ).append( File.separator ). append( Cleanup.EXECUTABLE_BASENAME ); defaultTCEntry = new TransformationCatalogEntry( Cleanup.TRANSFORMATION_NAMESPACE, Cleanup.TRANSFORMATION_NAME, Cleanup.TRANSFORMATION_VERSION ); defaultTCEntry.setPhysicalTransformation( path.toString() ); defaultTCEntry.setResourceId( site ); defaultTCEntry.setType( TCType.INSTALLED ); defaultTCEntry.setSysInfo( this.mSiteStore.lookup( site ).getSysInfo() ); //register back into the transformation catalog //so that we do not need to worry about creating it again try{ mTCHandle.insert( defaultTCEntry , false ); } catch( Exception e ){ //just log as debug. as this is more of a performance improvement //than anything else mLogger.log( "Unable to register in the TC the default entry " + defaultTCEntry.getLogicalTransformation() + " for site " + site, e, LogManager.DEBUG_MESSAGE_LEVEL ); } return defaultTCEntry; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/cleanup/CleanupFactory.java0000644000175000017500000001052011757531137030133 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.cleanup; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; /** * A factory class to load the appropriate type of Code Generator. The * CodeGenerator implementation is used to write out the concrete plan. * * @author Karan Vahi * @version $Revision: 2582 $ */ public class CleanupFactory { /** * The default package where the all the implementing classes are supposed to * reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.refiner.cleanup"; /** * Loads the implementing class corresponding to the mode specified by the * user at runtime. * * @param bag bag of initialization objects * * * @return instance of a Cleanup CleanupStrategy implementation * * @throws FactoryException that nests any error that * might occur during the instantiation of the implementation. */ public static CleanupStrategy loadCleanupStraegyInstance( PegasusBag bag ) throws CleanupFactoryException { PegasusProperties props = bag.getPegasusProperties(); if( props == null ){ throw new CleanupFactoryException( "Properties instance is null " ); } String className = props.getCleanupStrategy(); //prepend the package name className = DEFAULT_PACKAGE_NAME + "." + className; //try loading the class dynamically CleanupStrategy cd = null; DynamicLoader dl = new DynamicLoader(className); try { Object argList[] = new Object[ 0 ]; cd = ( CleanupStrategy ) dl.instantiate( argList ); cd.initialize( bag, CleanupFactory.loadCleanupImplementationInstance( bag ) ); } catch (Exception e) { throw new CleanupFactoryException( "Instantiating Cleanup Strategy", className, e ); } return cd; } /** * Loads the implementing class corresponding to the mode specified by the * user at runtime. * * @param bag bag of initialization objects * * * @return instance of a CreateDirecctory implementation * * @throws FactoryException that nests any error that * might occur during the instantiation of the implementation. */ public static CleanupImplementation loadCleanupImplementationInstance( PegasusBag bag ) throws CleanupFactoryException { PegasusProperties props = bag.getPegasusProperties(); if( props == null ){ throw new CleanupFactoryException( "Properties instance is null " ); } String className = props.getCleanupImplementation(); //for now //className = "DefaultImplementation"; //prepend the package name className = DEFAULT_PACKAGE_NAME + "." + className; //try loading the class dynamically CleanupImplementation impl = null; DynamicLoader dl = new DynamicLoader(className); try { Object argList[] = new Object[ 0 ]; impl = ( CleanupImplementation ) dl.instantiate( argList ); impl.initialize( bag ); } catch (Exception e) { throw new CleanupFactoryException( "Instantiating Cleanup Implementation", className, e ); } return impl; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/cleanup/CleanupImplementation.java0000644000175000017500000000345711757531137031524 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.cleanup; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import java.util.List; /** * The interface that defines how the cleanup job is invoked and created. * * @author Karan Vahi * @version $Revision: 2590 $ */ public interface CleanupImplementation { /** * The version number associated with this API Cleanup CleanupImplementation. */ public static final String VERSION = "1.1"; /** * Intializes the class. * * @param bag bag of initialization objects */ public void initialize( PegasusBag bag ) ; /** * Creates a cleanup job that removes the files from remote working directory. * This will eventually make way to it's own interface. * * @param id the identifier to be assigned to the job. * @param files the list of PegasusFile that need to be * cleaned up. * @param job the primary compute job with which this cleanup job is associated. * * @return the cleanup job. */ public Job createCleanupJob( String id, List files, Job job ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/cleanup/RM.java0000644000175000017500000001630411757531137025540 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.cleanup; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import java.util.List; import java.util.Iterator; import java.util.HashSet; import edu.isi.pegasus.planner.classes.PegasusBag; /** * Use's RM to do removal of the files on the remote sites. * * @author Karan Vahi * @version $Revision: 4551 $ */ public class RM implements CleanupImplementation{ /** * The default logical name to rm executable. */ public static final String DEFAULT_RM_LOGICAL_NAME = "rm"; /** * The default path to rm executable. */ public static final String DEFAULT_RM_LOCATION = "/bin/rm"; /** * The default priority key associated with the cleanup jobs. */ public static final String DEFAULT_PRIORITY_KEY = "1000"; /** * The handle to the transformation catalog. */ protected TransformationCatalog mTCHandle; /** * Handle to the site catalog. */ // protected PoolInfoProvider mSiteHandle; protected SiteStore mSiteStore; /** * The handle to the properties passed to Pegasus. */ private PegasusProperties mProps; /** * The default constructor. */ public RM( ){ } /** * Intializes the class. * * @param bag bag of initialization objects */ public void initialize( PegasusBag bag ) { mSiteStore = bag.getHandleToSiteStore(); mTCHandle = bag.getHandleToTransformationCatalog(); mProps = bag.getPegasusProperties(); } /** * Creates a cleanup job that removes the files from remote working directory. * This will eventually make way to it's own interface. * * @param id the identifier to be assigned to the job. * @param files the list of PegasusFile that need to be * cleaned up. * @param job the primary compute job with which this cleanup job is associated. * * @return the cleanup job. */ public Job createCleanupJob( String id, List files, Job job ){ //we want to run the clnjob in the same directory //as the compute job. We cannot clone as then the //the cleanup jobs for clustered jobs appears as //a clustered job. PM-368 Job cJob = new Job( job ); //we dont want notifications to be inherited cJob.resetNotifications(); cJob.setJobType( Job.CLEANUP_JOB ); cJob.setName( id ); cJob.setSiteHandle( job.getStagingSiteHandle() ); //bug fix for JIRA PM-311 //we dont want cleanup job to inherit any stdout or stderr //specified in the DAX for compute job cJob.setStdOut( "" ); cJob.setStdErr( "" ); //inconsistency between job name and logical name for now cJob.setTXVersion( null ); cJob.setTXName( "rm" ); cJob.setTXNamespace( null ); cJob.setLogicalID( id ); //the compute job of the VDS supernode is this job itself cJob.setVDSSuperNode( job.getID() ); //set the list of files as input files //to change function signature to reflect a set only. cJob.setInputFiles( new HashSet( files) ); //set the path to the rm executable TransformationCatalogEntry entry = this.getTCEntry( job.getSiteHandle() ); cJob.setRemoteExecutable( entry.getPhysicalTransformation() ); //set the arguments for the cleanup job StringBuffer arguments = new StringBuffer(); for( Iterator it = files.iterator(); it.hasNext(); ){ PegasusFile file = (PegasusFile)it.next(); arguments.append( " " ).append( file.getLFN() ); } cJob.setArguments( arguments.toString() ); //the cleanup job is a clone of compute //need to reset the profiles first cJob.resetProfiles(); //the profile information from the pool catalog needs to be //assimilated into the job. cJob.updateProfiles( mSiteStore.lookup( job.getSiteHandle() ).getProfiles() ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 cJob.addNotifications( entry ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. cJob.updateProfiles( entry ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. cJob.updateProfiles( mProps ); //let us put some priority for the cleaunup jobs cJob.condorVariables.construct( Condor.PRIORITY_KEY, DEFAULT_PRIORITY_KEY ); return cJob; } /** * Returns the TCEntry object for the rm executable on a grid site. * * @param site the site corresponding to which the entry is required. * * @return the TransformationCatalogEntry corresponding to the site. */ protected TransformationCatalogEntry getTCEntry( String site ){ List tcentries = null; TransformationCatalogEntry entry = null; try { tcentries = mTCHandle.lookup( null, DEFAULT_RM_LOGICAL_NAME, null, site, TCType.INSTALLED ); } catch (Exception e) { /* empty catch */ } //see if any record is returned or not entry = (tcentries == null)? defaultTCEntry() : (TransformationCatalogEntry) tcentries.get(0); return entry; } /** * Returns a default TransformationCatalogEntry object for the rm executable. * * @return default TransformationCatalogEntry */ private static TransformationCatalogEntry defaultTCEntry(){ TransformationCatalogEntry entry = new TransformationCatalogEntry( null, DEFAULT_RM_LOGICAL_NAME, null); entry.setPhysicalTransformation( DEFAULT_RM_LOCATION ); return entry; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/cleanup/CleanupFactoryException.java0000644000175000017500000000653311757531137032023 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner.cleanup; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Cleanup Strategy and Implementation * classes. * * @author Karan Vahi * @version $Revision: 2582 $ */ public class CleanupFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "File Cleanup"; /** * Constructs a CleanupFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public CleanupFactoryException( String msg ) { super( msg ); mClassname = CleanupFactoryException.DEFAULT_NAME; } /** * Constructs a CleanupFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public CleanupFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a CleanupFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public CleanupFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a CleanupFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CleanupFactoryException( String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/ReductionEngine.java0000644000175000017500000005210611757531137026655 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.DagInfo; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.provenance.pasoa.XMLProducer; import edu.isi.pegasus.planner.provenance.pasoa.producer.XMLProducerFactory; import edu.isi.pegasus.planner.provenance.pasoa.PPS; import edu.isi.pegasus.planner.provenance.pasoa.pps.PPSFactory; import java.util.Enumeration; import java.util.Set; import java.util.HashSet; import java.util.Vector; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import edu.isi.pegasus.planner.classes.PegasusBag; /** * * Reduction engine for Planner 2. * Given a ADAG it looks up the replica catalog and * determines which output files are in the * Replica Catalog, and on the basis of these * ends up reducing the dag. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 3471 $ * */ public class ReductionEngine extends Engine implements Refiner{ /** * the original dag object which * needs to be reduced on the basis of * the results returned from the * Replica Catalog */ private ADag mOriginalDag; /** * the dag relations of the * orginal dag */ private Vector mOrgDagRelations; /** * the reduced dag object which is * returned. */ private ADag mReducedDag; /** * the jobs which are found to be in * the Replica Catalog. These are * the jobs whose output files are at * some location in the Replica Catalog. * This does not include the jobs which * are deleted by applying the reduction * algorithm */ private Vector mOrgJobsInRC ; /** * the jobs which are deleted due * to the application of the * Reduction algorithm. These do * not include the jobs whose output * files are in the RC. These are * the ones which are deleted due * to cascade delete */ private Vector mAddJobsDeleted; /** * all deleted jobs. This * is mOrgJobsInRC + mAddJobsDeleted. */ private Vector mAllDeletedJobs; /** * the files whose locations are * returned from the ReplicaCatalog */ private Set mFilesInRC; /** * The XML Producer object that records the actions. */ private XMLProducer mXMLStore; /** * The workflow object being worked upon. */ private ADag mWorkflow; /** * The constructor * * @param orgDag The original Dag object * @param bag the bag of initialization objects. */ public ReductionEngine( ADag orgDag, PegasusBag bag ){ super( bag) ; mOriginalDag = orgDag; mOrgDagRelations = mOriginalDag.dagInfo.relations; mOrgJobsInRC = new Vector(); mAddJobsDeleted = new Vector(); mAllDeletedJobs = new Vector(); mXMLStore = XMLProducerFactory.loadXMLProducer( mProps ); mWorkflow = orgDag; } /** * Returns a reference to the workflow that is being refined by the refiner. * * * @return ADAG object. */ public ADag getWorkflow(){ return this.mWorkflow; } /** * Returns a reference to the XMLProducer, that generates the XML fragment * capturing the actions of the refiner. This is used for provenace * purposes. * * @return XMLProducer */ public XMLProducer getXMLProducer(){ return this.mXMLStore; } /** * Reduces the workflow on the basis of the existence of lfn's in the * replica catalog. The existence of files, is determined via the bridge. * * @param rcb instance of the replica catalog bridge. * * @return the reduced dag * */ public ADag reduceDag( ReplicaCatalogBridge rcb ){ //search for the replicas of //the files. The search list //is already present in Replica //Mechanism classes mFilesInRC = rcb.getFilesInReplica(); //we reduce the dag only if the //force option is not specified. if(mPOptions.getForce()) return mOriginalDag; //load the PPS implementation PPS pps = PPSFactory.loadPPS( this.mProps ); //mXMLStore.add( "" ); mXMLStore.add( "" ); //call the begin workflow method try{ pps.beginWorkflowRefinementStep(this, PPS.REFINEMENT_REDUCE , true); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception", e ); } //clear the XML store mXMLStore.clear(); //mLogger.log("Reducing the workflow",LogManager.DEBUG_MESSAGE_LEVEL); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_REDUCE, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() ); mOrgJobsInRC = getJobsInRC(mOriginalDag.vJobSubInfos,mFilesInRC); mAllDeletedJobs = (Vector)mOrgJobsInRC.clone(); firstPass(mOrgJobsInRC); secondPass(); firstPass(mAddJobsDeleted); mLogMsg = "Nodes/Jobs Deleted from the Workflow during reduction "; mLogger.log( mLogMsg,LogManager.INFO_MESSAGE_LEVEL ); for(Enumeration e = mAllDeletedJobs.elements();e.hasMoreElements();){ String deletedJob = (String) e.nextElement(); mLogger.log("\t" + deletedJob, LogManager.INFO_MESSAGE_LEVEL ); mXMLStore.add( "" ); mXMLStore.add( "\n" ); } mLogger.log( mLogMsg + " - DONE", LogManager.INFO_MESSAGE_LEVEL ); mReducedDag = makeRedDagObject( mOriginalDag, mAllDeletedJobs ); //call the end workflow method for pasoa interactions try{ mWorkflow = mReducedDag; for( Iterator it = mWorkflow.jobIterator(); it.hasNext(); ){ Job job = ( Job )it.next(); pps.isIdenticalTo( job.getName(), job.getName() ); } pps.endWorkflowRefinementStep( this ); } catch( Exception e ){ throw new RuntimeException( "PASOA Exception", e ); } mLogger.logEventCompletion(); return mReducedDag; } /** * This determines the jobs which are in * the RC corresponding to the files found * in the Replica Catalog. A job is said to * be in the RC if all the outfiles for * that job are found to be in the RC. * A job in RC can be removed from the Dag * and the Dag correspondingly reduced. * * @param vSubInfos Vector of Job * objects corresponding to all * the jobs of a Abstract Dag * * @param filesInRC Set of String * objects corresponding to the * logical filenames of files * which are found to be in the * Replica Catalog * * @return a Vector of jobNames (Strings) * * @see org.griphyn.cPlanner.classes.Job */ private Vector getJobsInRC(Vector vSubInfos,Set filesInRC){ Job subInfo; Set vJobOutputFiles; String jobName; Vector vJobsInReplica = new Vector(); int noOfOutputFilesInJob = 0; int noOfSuccessfulMatches = 0; if( vSubInfos.isEmpty() ){ String msg = "ReductionEngine: The set of jobs in the workflow " + "\n is empty."; mLogger.log( msg, LogManager.DEBUG_MESSAGE_LEVEL ); return new Vector(); } Enumeration e = vSubInfos.elements(); mLogger.log("Jobs whose o/p files already exist", LogManager.DEBUG_MESSAGE_LEVEL); while(e.hasMoreElements()){ //getting submit information about each submit file of a job subInfo = (Job)e.nextElement(); jobName = subInfo.jobName; //System.out.println(jobName); if(!subInfo.outputFiles.isEmpty()){ vJobOutputFiles = subInfo.getOutputFiles(); }else{ vJobOutputFiles = new HashSet(); } /* Commented on Oct10. This ended up making the Planner doing duplicate transfers if(subInfo.stdOut.length()>0) vJobOutputFiles.addElement(subInfo.stdOut); */ //determine the no of output files for that job if(vJobOutputFiles.isEmpty()){ mLogger.log("Job " + subInfo.getName() + " has no o/p files", LogManager.DEBUG_MESSAGE_LEVEL); continue; } noOfOutputFilesInJob = vJobOutputFiles.size(); //traversing through the output files of that particular job for( Iterator en = vJobOutputFiles.iterator(); en.hasNext(); ){ PegasusFile pf = (PegasusFile)en.next(); //jobName = pf.getLFN(); //if(stringInList(jobName,filesInRC)){ if(filesInRC.contains(pf.getLFN()) /*|| pf.getTransientTransferFlag()*/ ){ noOfSuccessfulMatches++; } } //we add a job to list of jobs whose output files already exist //only if noOfSuccessFulMatches is equal to the number of output //files in job if(noOfOutputFilesInJob == noOfSuccessfulMatches){ mLogger.log("\t" + subInfo.jobName, LogManager.DEBUG_MESSAGE_LEVEL); vJobsInReplica.addElement(subInfo.jobName); } //reinitialise the variables noOfSuccessfulMatches = 0; noOfOutputFilesInJob = 0; } mLogger.log("Jobs whose o/p files already exist - DONE", LogManager.DEBUG_MESSAGE_LEVEL); return vJobsInReplica; } /** * If a job is deleted it marks * all the relations related to that * job as deleted * * @param vDelJobs the vector containing the names * of the deleted jobs whose relations * we want to nullify */ private void firstPass(Vector vDelJobs){ Enumeration edeljobs = vDelJobs.elements(); while(edeljobs.hasMoreElements()){ String deljob = (String)edeljobs.nextElement(); Enumeration epcrel = mOrgDagRelations.elements(); while( epcrel.hasMoreElements()){ PCRelation pcrc = (PCRelation)epcrel.nextElement(); if((pcrc.child.equalsIgnoreCase(deljob))|| (pcrc.parent.equalsIgnoreCase(deljob))){ pcrc.isDeleted=true; } } } } /** * In the second pass we find all the * parents of the nodes which have been * found to be in the RC. * Corresponding to each parent, we find * the corresponding siblings for that * deleted job. * If all the siblings are deleted, we * can delete that parent. */ private void secondPass(){ Enumeration eDelJobs = mAllDeletedJobs.elements(); Enumeration ePcRel; Enumeration eParents; String node; String parentNodeName; PCRelation currentRelPair; Vector vParents = new Vector();//all parents of a particular node Vector vSiblings = new Vector(); while(eDelJobs.hasMoreElements()){ node = (String)eDelJobs.nextElement(); //getting the parents of that node vParents = this.getNodeParents(node); //now for each parent checking if the siblings are deleted //if yes then delete the node eParents = vParents.elements(); while(eParents.hasMoreElements()){ parentNodeName = (String)eParents.nextElement(); //getting all the siblings for parentNodeName vSiblings = this.getChildren(parentNodeName); //now we checking if all the siblings are in vdeljobs Enumeration temp = vSiblings.elements(); boolean siblingsDeleted = true; while(temp.hasMoreElements()){ if(stringInVector( (String)temp.nextElement(),mAllDeletedJobs)){ //means the sibling has been marked deleted } else{ siblingsDeleted = false; } } //if all siblings are deleted add the job to vdeljobs if(siblingsDeleted){ //only add if the parentNodeName is not already in the list if(!stringInVector(parentNodeName,mAllDeletedJobs)){ String msg = "Deleted Node :" + parentNodeName; mLogger.log(msg,LogManager.DEBUG_MESSAGE_LEVEL); mAddJobsDeleted.addElement(new String (parentNodeName)); mAllDeletedJobs.addElement(new String (parentNodeName)); } } //clearing the siblings vector for that parent vSiblings.clear(); }//end of while(eParents.hasMoreElements()){ //clearing the parents Vector for that job vParents.clear(); }//end of while(eDelJobs.hasMoreElements) } /** * Gets all the parents of a particular node. * * @param node the name of the job whose parents are to be found. * * @return Vector corresponding to the parents of the node. */ private Vector getNodeParents(String node){ //getting the parents of that node return mOriginalDag.getParents(node); } /** * Gets all the children of a particular node. * * @param node the name of the node whose children we want to find. * * @return Vector containing the children of the node. */ private Vector getChildren(String node){ return mOriginalDag.getChildren(node); } /** * This returns all the jobs deleted from the workflow after the reduction * algorithm has run. * * @return List containing the Job of deleted leaf jobs. */ public List getDeletedJobs(){ List deletedJobs = new LinkedList(); for( Iterator it = mAllDeletedJobs.iterator(); it.hasNext(); ){ String job = (String)it.next(); deletedJobs.add( mOriginalDag.getSubInfo(job) ); } return deletedJobs; } /** * This returns all the deleted jobs that happen to be leaf nodes. This * entails that the output files of these jobs be transferred * from the location returned by the Replica Catalog to the * pool specified. This is a subset of mAllDeletedJobs * Also to determine the deleted leaf jobs it refers the original * dag, not the reduced dag. * * @return List containing the Job of deleted leaf jobs. */ public List getDeletedLeafJobs(){ List delLeafJobs = new LinkedList(); mLogger.log("Finding deleted leaf jobs",LogManager.DEBUG_MESSAGE_LEVEL); for( Iterator it = mAllDeletedJobs.iterator(); it.hasNext(); ){ String job = (String)it.next(); if(getChildren(job).isEmpty()){ //means a leaf job String msg = "Found deleted leaf job :" + job; mLogger.log(msg,LogManager.DEBUG_MESSAGE_LEVEL); delLeafJobs.add( mOriginalDag.getSubInfo(job) ); } } mLogger.log("Finding deleted leaf jobs - DONE", LogManager.DEBUG_MESSAGE_LEVEL); return delLeafJobs; } /** * makes the Reduced Dag object which * corresponding to the deleted jobs * which are specified. * * Note : We are plainly copying the * inputFiles and the outputFiles. After * reduction this changes but since we * need those only to look up the RC, * which we have done. * * @param orgDag the original Dag * @param vDelJobs the Vector containing the * names of the jobs whose * SubInfos and Relations we * want to remove. * * @return the reduced dag, which doesnot * have the deleted jobs * */ public ADag makeRedDagObject(ADag orgDag, Vector vDelJobs){ ADag redDag = new ADag(); redDag.dagInfo = constructNewDagInfo(mOriginalDag.dagInfo,vDelJobs); redDag.vJobSubInfos = constructNewSubInfos(mOriginalDag.vJobSubInfos,vDelJobs); return redDag; } /** * Constructs a DagInfo object for the * decomposed Dag on the basis of the jobs * which are deleted from the DAG by the * reduction algorithm * * Note : We are plainly copying the * inputFiles and the outputFiles. After reduction * this changes but since we need those * only to look up the RC, which we have done. * * @param dagInfo the object which is reduced on * the basis of vDelJobs * * @param vDelJobs Vector containing the logical file * names of jobs which are to * be deleted * * @return the DagInfo object corresponding * to the Decomposed Dag * */ private DagInfo constructNewDagInfo(DagInfo dagInfo,Vector vDelJobs){ DagInfo newDagInfo = (DagInfo)dagInfo.clone(); String jobName; PCRelation currentRelation; String parentName; String childName; boolean deleted; //populating DagJobs newDagInfo.dagJobs = new Vector(); Enumeration e = dagInfo.dagJobs.elements(); while(e.hasMoreElements()){ jobName = (String)e.nextElement(); if(!stringInVector( jobName,vDelJobs) ){ //that job is to be executed so we add it newDagInfo.dagJobs.addElement(new String(jobName)); } } //populating PCRelation Vector newDagInfo.relations = new Vector(); e = dagInfo.relations.elements(); while(e.hasMoreElements()){ currentRelation = (PCRelation)e.nextElement(); parentName = new String(currentRelation.parent); childName = new String(currentRelation.child); if( !(currentRelation.isDeleted) ){//the pair has not been marked deleted newDagInfo.relations.addElement(new PCRelation(parentName,childName,false)); } } return newDagInfo; }//end of function /** * constructs the Vector of subInfo objects * corresponding to the reduced ADAG. * * It also modifies the strargs to remove * them up of markup and display correct paths * to the filenames * * * @param vSubInfos the Job object including * the jobs which are not needed * after the execution of the * reduction algorithm * * @param vDelJobs the jobs which are deleted by * the reduction algo as their * output files are in the Replica Catalog * * @return the Job objects except the ones * for the deleted jobs * */ private Vector constructNewSubInfos(Vector vSubInfos,Vector vDelJobs){ Vector vNewSubInfos = new Vector(); Job newSubInfo; Job currentSubInfo; String jobName; Enumeration e = vSubInfos.elements(); while(e.hasMoreElements()){ currentSubInfo = (Job)e.nextElement(); jobName = currentSubInfo.jobName; //we add only if the jobName is not in vDelJobs if(!stringInVector(jobName,vDelJobs)){ newSubInfo = (Job)currentSubInfo.clone(); //adding to Vector vNewSubInfos.addElement(newSubInfo); } }//end of while return vNewSubInfos; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/AuthenticateEngine.java0000644000175000017500000001456211757531137027343 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.catalog.site.classes.FileServer; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.classes.AuthenticateRequest; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.GridFTPServer; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.JobManager; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.HashSet; import java.util.Iterator; import java.util.List; import java.util.Set; import edu.isi.pegasus.planner.classes.PegasusBag; /** * It authenticates the user with the sites, that the user specifies at the * execution time. It spawns out a thread for each pool that authenticates * against the jobmanager for the vanilla universe as specified in the pool * configuration file. * * @author Karan Vahi * @version $Revision: 2582 $ */ public class AuthenticateEngine extends Engine { /** * The Set of pools that need to be authenticated against. */ private Set mExecPools; /** * The overloaded constructor. * * @param props the PegasusProperties to be used. * @param pools The set of pools against which you want to authenticate the * user. */ /* public AuthenticateEngine( PegasusProperties props, Set pools) { super( props ); mExecPools = pools; } */ /** * The overloaded constructor. * * @param bag the PegasusBag to be used. * @param pools The set of pools against which you want to authenticate the * user. */ public AuthenticateEngine( PegasusBag bag, Set pools) { super( bag ); mExecPools = pools; } /** * It returns a set of pools against which the user can authenticate to. * * @return the set of authenticated pools. */ public Set authenticate(){ Iterator it = mExecPools.iterator(); ThreadPool manager = new ThreadPool( mProps, mExecPools); String pool; GridGateway jm; FileServer gserv; String contact; //we need synchronization to ensure that an threads are started only //when all the requests have been sent to the threadpool, as this //failure to authenticate against a pool leads to it's removal from //this set. synchronized(mExecPools){ while(it.hasNext()){ pool = (String)it.next(); // List jmList = mPoolHandle.getJobmanagers(pool); // Iterator it1 = jmList.iterator(); for( Iterator it1 = mSiteStore.lookup( pool ).getGridGatewayIterator(); it1.hasNext() ;){ jm = (GridGateway)it1.next(); // contact = jm.getInfo(JobManager.URL); AuthenticateRequest ar = new AuthenticateRequest('j',pool, jm.getContact()); manager.acceptRequest(ar); } // List gridFtpList = mPoolHandle.getGridFTPServers(pool); // it1 = gridFtpList.iterator(); // while(it1.hasNext()){ for( Iterator it1 = mSiteStore.lookup( pool ).getFileServerIterator(); it1.hasNext();){ gserv = ( FileServer )it1.next(); // contact = gserv.getInfo(GridFTPServer.GRIDFTP_URL); AuthenticateRequest ar = new AuthenticateRequest('g',pool, gserv.getURLPrefix() ); manager.acceptRequest(ar); } } } manager.shutdown(); purgePools(); return mExecPools; } /** * It removies from the list of pools the pool that was not authenticated * against. It queries the soft state of the pool config to see if there * are at least one jobmanager and gridftp server on the pool. * Due to the authentication the unauthenticated jobmanagers and servers * would have been removed from the soft state of the pool config. */ private synchronized void purgePools(){ Iterator it = mExecPools.iterator(); String pool; List l; while(it.hasNext()){ pool = (String)it.next(); l = mSiteStore.lookup( pool ).getFileServers(); if(l == null || l.isEmpty()){ mLogger.log("Removing Exec pool " + pool + " as no authenticated gridftp server", LogManager.DEBUG_MESSAGE_LEVEL); it.remove(); continue; } List l1 = mSiteStore.lookup( pool ).getGridGateways( ); // List l1 = mPoolHandle.getJobmanagers(pool,"transfer"); if( (l == null || l.isEmpty()) || (l1 == null || l1.isEmpty())){ //we have no jobmanagers for universe vanilla or transfer universe mLogger.log("Removing Exec pool " + pool + " as no authenticated jobmanager", LogManager.DEBUG_MESSAGE_LEVEL); it.remove(); continue; } } } /** * The main testing method. * */ public static void main(String[] args){ Set s = new HashSet(); //s.add("isi_condor"); s.add("isi_lsf"); /* AuthenticateEngine a = new AuthenticateEngine( PegasusProperties.getInstance(),s); a.mLogger.setLevel(1); a.authenticate(); System.out.println("Authentication Done!!"); System.out.println(a.mPoolHandle.getGridFTPServers("isi_lsf")); a.mLogger.log("Vanilla JMS " + a.mPoolHandle.getJobmanagers("isi_lsf"), LogManager.DEBUG_MESSAGE_LEVEL); */ } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/Authenticate.java0000644000175000017500000003723411757531137026216 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.AuthenticateRequest; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.catalog.site.impl.old.PoolInfoProvider; import org.globus.gram.Gram; import org.globus.gram.GramException; import org.ietf.jgss.GSSCredential; import org.ietf.jgss.GSSException; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.IOException; import java.io.InputStreamReader; import java.io.InterruptedIOException; import java.io.OutputStreamWriter; import java.net.ConnectException; import java.net.InetSocketAddress; import java.net.Socket; import java.util.StringTokenizer; /** * It takes in a authenticate request and authenticates against the resource * on the basis of the type of the resource against which authentication is * required. * * @author Karan Vahi * @version $Revision: 2582 $ */ public class Authenticate { /** * The standard port at which Grid FTP runs. */ public static final int GRID_FTP_STANDARD_PORT = 2811; /** * The timeout in seconds. All sockets opened timeout after this period. */ public static final int TIMEOUT_VALUE = 120; /** * The timeout value that is to be used in milliseconds */ private int mTimeout; /** * The object containing the authenticate request. */ private AuthenticateRequest mAuthRequest; /** * The handle to the Pool Info Provider. */ private PoolInfoProvider mPoolHandle; /** * The handle to the LogManager object. */ private LogManager mLogger; /** * The handle to the PegasusProperties object. */ private PegasusProperties mProps; /** * The credential to be used while authentication to jobmanager. */ private GSSCredential mCredential; /** * The overloaded constructor. * * @param properties the PegasusProperties to be used. */ public Authenticate( PegasusProperties properties, PoolInfoProvider poolHandle ) { mPoolHandle = poolHandle; mLogger = LogManagerFactory.loadSingletonInstance( ); mProps = properties; mTimeout = (mProps.getGridFTPTimeout() == null)? this.TIMEOUT_VALUE: Integer.parseInt(mProps.getGridFTPTimeout()); mTimeout *= 1000; } /** * Sets the credential that has to be used for authentication. * * @param credential the credential to be set. */ public void setCredential(GSSCredential credential){ mCredential = credential; } /** * Authenticates against a resource referred to in the authenticate request * object. */ public boolean authenticate(AuthenticateRequest ar) { mAuthRequest = ar; char type = ar.getResourceType(); boolean alive = false; //check if the request is invalid if (ar.requestInvalid()) { throw new RuntimeException("Invalid authentication request " + ar); } if (type == AuthenticateRequest.GRIDFTP_RESOURCE) { //check if the grid ftp server is alive. HostPort hp = getHostPort(ar.getResourceContact()); alive = gridFTPAlive(hp.getHost(),hp.getPort()); } if (type == AuthenticateRequest.JOBMANAGER_RESOURCE) { alive = authenticateJobManager(ar.getResourceContact()); } return alive; } /** * It tries to remove a resource from the soft state of the pool. This is * possible only if the underlying pool interface implementation is soft * state. * * @param ar the AuthenticateRequest containing the resource info * * @return boolean true removal was successful. * false unable to remove. */ public boolean removeResource(AuthenticateRequest ar){ char type = ar.getResourceType(); if(type == AuthenticateRequest.GRIDFTP_RESOURCE){ return mPoolHandle.removeGridFtp(ar.getPool(), ar.getResourceContact()); } if(type == AuthenticateRequest.JOBMANAGER_RESOURCE){ return mPoolHandle.removeJobManager(ar.getPool(),null,ar.getResourceContact()); } return false; } /** * It authenticates against the jobmanager specifyied. * * @param contact the jobmanager contact. */ public boolean authenticateJobManager(String contact){ boolean val = true; try{ mLogger.log( "Authenticating " + contact, LogManager.DEBUG_MESSAGE_LEVEL); if(mCredential == null){ //try authenticating the default credential Gram.ping(contact); } else Gram.ping(mCredential,contact); } catch(GramException gex){ mLogger.log("Unable authenticate against jobmanager " + contact + " because " + gex.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); val = false; } catch(GSSException gss){ String message = (gss.getMajor() == GSSException.CREDENTIALS_EXPIRED)? "Your credentials have expired. You need to do a grid-proxy-init.": "GssException caught " +gss.getMajorString() + gss.getMinorString(); mLogger.log(message,LogManager.ERROR_MESSAGE_LEVEL); val = false; } catch(Exception e){ //an unknown exception occured. print a message and return false mLogger.log("Unknown Exception occured " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); val = false; } finally{ mLogger.log("Authenticating completed for " + contact,LogManager.DEBUG_MESSAGE_LEVEL); } return val; } /** * It checks with a grid ftp server running at a particular host * and port, to see if it is up or not. This is done by opening a * socket to the specified host at the specified port. If the socket * timesout (which could be due to excessive load on the server or * server being hung) false is returned. * * @param host the host at which the gridftp server is running . * @param port the port at which server is running on the host. * * @return true the gridftp server is alive and kicking. * false - the submit host is not connected to the network. * - the server is not running. * - we were able to connect but timeout. * - version is not compatible. * */ public boolean gridFTPAlive(String host, int port) { Socket s = new Socket(); String hp = combine(host, port); boolean alive = false; mLogger.log("Checking status of " + hp, LogManager.DEBUG_MESSAGE_LEVEL); InetSocketAddress addrs = new InetSocketAddress(host, port); if (addrs.isUnresolved()) { //either the host on which Pegasus is running is not connected //to the network, or the hostname is invalid. Either way we return //false; mLogger.log("Unresolved address to " + hp, LogManager.DEBUG_MESSAGE_LEVEL); return false; } try { s.connect(addrs,mTimeout); //set the timeout for the input streams // gotten from this socket s.setSoTimeout(mTimeout); String response; char type = 'c'; BufferedReader rd = new BufferedReader(new InputStreamReader( s.getInputStream())); BufferedWriter out = new BufferedWriter(new OutputStreamWriter( s.getOutputStream())); while ( (response = rd.readLine()) != null) { /*mLogger.logMessage("Response from server " + hp + " " + response, 1);*/ alive = parseGridFTPResponse(response, type); if (type == 'c' && alive) { //send the quit command to the server out.write("quit\r\n"); //do a half close. We just need to wait for the response //from server now s.shutdownOutput(); type = 'q'; } else { //invalid response or the server is stuck. //break out of the infinite waiting. break; } } } catch(java.net.SocketTimeoutException se){ //means we experienced a timeout on read mLogger.log("Timeout experienced while reading from ip" + " stream of " + hp, LogManager.ERROR_MESSAGE_LEVEL); alive = false; } catch (InterruptedIOException e) { //timeout was reached. mLogger.log("Timeout experienced while contacting " + hp, LogManager.ERROR_MESSAGE_LEVEL); alive = false; } catch (ConnectException ce) { //probably no process running at the port mLogger.log("GridFtp server on " + host + " not running on port " + port + " .Exception " + ce.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); alive = false; } catch (IOException ie) { mLogger.log("Unable to contact " + hp + " due to " + ie.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); alive = false; } catch(Exception e){ //an unknown exception occured. print a message and return false mLogger.log("Unknown Exception occured " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); alive = false; } finally{ try{ s.close(); } catch(IOException e){ mLogger.log("Unable to close socket to " + hp + " because" + e.getMessage(),LogManager.ERROR_MESSAGE_LEVEL); alive = false; } } return alive; } /** * The parses the grid ftp server response and returns if the response * was valid or not. * * @param response the response got from the grid ftp server. * @param type c response when first connected to server. * q response when sent the quit command. * * @return boolean true if the response was valid * false invalid response. */ private boolean parseGridFTPResponse(String response, char type) { StringTokenizer st = new StringTokenizer(response); boolean valid = false; switch (type) { case 'c': //valid response should be of type 220 blah while (st.hasMoreTokens()) { if (st.nextToken().equals("220")) { valid = true; } break; } break; case 'q': //valid response would be type 221 blah while (st.hasMoreTokens()) { if (st.nextToken().equals("221")) { valid = true; } break; } break; default: valid = false; } if(valid == false) mLogger.log(response,LogManager.ERROR_MESSAGE_LEVEL); return valid; } /** * A small helper method that returns the standard host and port * combination to be used for logging purposes. * * @param host the host. * @param port the port. * * @return combined string. */ private String combine(String host, int port) { String st = host + ":" + port; return st; } /** * Determines the hostname from the urlPrefix string in the pool file. * * @param urlPrefix the protocol, hostname and port combination. * * @return the host name. */ private HostPort getHostPort(String urlPrefix) { StringTokenizer st = new StringTokenizer(urlPrefix); String hostPort; String hostName = new String(); String token = new String(); int count = 0; int port = this.GRID_FTP_STANDARD_PORT; HostPort hp = null; while (st.hasMoreTokens()) { token = st.nextToken("/"); count++; if (count == 2) { hostPort = token.trim(); StringTokenizer st1 = new StringTokenizer(hostPort,":"); hostName = st1.nextToken(); if(st1.hasMoreTokens()){ //port is specified try{ port = Integer.parseInt(st1.nextToken()); } catch(NumberFormatException e){ port = this.GRID_FTP_STANDARD_PORT; } } //System.out.println("Host->" + hostName + " Port->" + port); hp = new HostPort(hostName,port); //System.out.println(hp); return hp; } } return null; } /** * A convenience inner class that stores the host and the port associated * with a server. */ class HostPort{ /** * The host at which the server is running. */ private String mHost; /** * The port at which the server is running. */ private int mPort; /** * The overloaded constructor */ public HostPort(String host, int port){ mHost = host; mPort = port; } /** * Returns the host associated with this object. * * @return String */ public String getHost(){ return mHost; } /** * Returns the port associated with this object. * * @return int */ public int getPort(){ return mPort; } /** * Returns the string version of this object. */ public String toString(){ StringBuffer sb = new StringBuffer(); sb.append("host name ").append(mHost). append(" port ").append(mPort); return sb.toString(); } } public static void main(String[] args){ Authenticate a = new Authenticate( PegasusProperties.getInstance(), null ); String contact = "dc-user2.isi.edu/jobmanager-lsf"; String contact1 = "dc-n1.isi.edu"; System.out.println("Authenticating " + contact1); //a.authenticateJobManager(contact); a.gridFTPAlive("dc-n1.isi.edu",a.GRID_FTP_STANDARD_PORT); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/TransferEngine.java0000644000175000017500000020170511757531137026506 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.catalog.site.classes.FileServer; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.NameValue; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.common.Utility; //import edu.isi.pegasus.planner.refiner.createdir.S3; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.Adapter; import edu.isi.pegasus.planner.selector.ReplicaSelector; import edu.isi.pegasus.planner.selector.replica.ReplicaSelectorFactory; import edu.isi.pegasus.planner.transfer.Refiner; import edu.isi.pegasus.planner.transfer.refiner.RefinerFactory; import edu.isi.pegasus.planner.refiner.createdir.Implementation; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.common.util.FactoryException; import edu.isi.pegasus.planner.classes.DAGJob; import edu.isi.pegasus.planner.classes.DAXJob; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.transfer.SLS; import edu.isi.pegasus.planner.transfer.sls.SLSFactory; import org.griphyn.vdl.euryale.FileFactory; import org.griphyn.vdl.euryale.VirtualDecimalHashedFileFactory; import org.griphyn.vdl.euryale.VirtualFlatFileFactory; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.Vector; import java.util.Properties; /** * The transfer engine, which on the basis of the pools on which the jobs are to * run, adds nodes to transfer the data products. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4950 $ * */ public class TransferEngine extends Engine { /** * The MAX level is assigned as the level for deleted jobs. * We can put it to Integer.MAX_VALUE, but it is rare that number of levels * in a workflows exceed 1000. */ public static final int DELETED_JOBS_LEVEL = 1000; /** * The scheme name for file url. */ public static final String FILE_URL_SCHEME = "file:"; /** * The scheme name for file url. */ public static final String SYMLINK_URL_SCHEME = "symlink:"; /** * The property prefix for retrieving SRM properties. */ public static final String SRM_PROPERTIES_PREFIX = "pegasus.transfer.srm"; /** * The suffix to retrive the service url for SRM server. */ public static final String SRM_SERVICE_URL_PROPERTIES_SUFFIX = "service.url"; /** * The suffix to retrive the mount point for SRM server. */ public static final String SRM_MOUNT_POINT_PROPERTIES_SUFFIX = "mountpoint"; /** * A map that associates the site name with the SRM server url and mount point. */ private Map mSRMServiceURLToMountPointMap; /** * The DAG object to which the transfer nodes are to be added. This is the * reduced Dag, which is got from the Reduction Engine. */ private ADag mDag; /** * The bridge to the Replica Catalog. */ private ReplicaCatalogBridge mRCBridge; /** * The handle to the replica selector that is to used to select the various * replicas. */ private ReplicaSelector mReplicaSelector; /** * The handle to the transfer refiner that adds the transfer nodes into the * workflow. */ private Refiner mTXRefiner; /** * Holds all the jobs deleted by the reduction algorithm. */ private List mDeletedJobs; /** * Holds the jobs from the original dags which are deleted by the reduction * algorithm. */ private List mDeletedLeafJobs; /** * A SimpleFile Replica Catalog, that tracks all the files that are being * materialized as part of workflow executaion. */ private ReplicaCatalog mTransientRC; /** * The handle to the file factory, that is used to create the top level * directories for each of the partitions. */ private FileFactory mFactory; /** * The base path for the stageout directory on the output site where all * the files are staged out. */ private String mStageOutBaseDirectory; /** * The working directory relative to the mount point of the execution pool. * It is populated from the pegasus.dir.exec property from the properties file. * If not specified then it work_dir is supposed to be the exec mount point * of the execution pool. */ protected String mWorkDir; /** * This contains the storage directory relative to the se mount point of the * pool. It is populated from the pegasus.dir.storage property from the properties * file. If not specified then the storage directory is the se mount point * from the pool.config file. */ protected String mStorageDir; /** * A boolean indicating whether to have a deep directory structure for * the storage directory or not. */ protected boolean mDeepStorageStructure; /** * This member variable if set causes the source url for the pull nodes from * the RLS to have file:// url if the pool attributed associated with the pfn * is same as a particular jobs execution pool. */ protected boolean mUseSymLinks; /** * A boolean indicating whether we are doing worker node execution or not. */ private boolean mWorkerNodeExecution; /** * The handle to the SLS implementor */ private SLS mSLS; /** * Overloaded constructor. * * @param reducedDag the reduced workflow. * @param bag bag of initialization objects * @param deletedJobs list of all jobs deleted by reduction algorithm. * @param deletedLeafJobs list of deleted leaf jobs by reduction algorithm. */ public TransferEngine( ADag reducedDag, PegasusBag bag, List deletedJobs , List deletedLeafJobs){ super( bag ); mWorkDir = mProps.getExecDirectory(); mStorageDir = mProps.getStorageDirectory(); mDeepStorageStructure = mProps.useDeepStorageDirectoryStructure(); mUseSymLinks = mProps.getUseOfSymbolicLinks(); mWorkerNodeExecution = mProps.executeOnWorkerNode(); mSRMServiceURLToMountPointMap = constructSiteToSRMServerMap( mProps ); mDag = reducedDag; mDeletedJobs = deletedJobs; mDeletedLeafJobs = deletedLeafJobs; if( mWorkerNodeExecution ){ //load SLS mSLS = SLSFactory.loadInstance( mBag ); } try{ mTXRefiner = RefinerFactory.loadInstance( reducedDag, bag ); mReplicaSelector = ReplicaSelectorFactory.loadInstance(mProps); } catch(Exception e){ //wrap all the exceptions into a factory exception throw new FactoryException("Transfer Engine ", e); } this.initializeStageOutSiteDirectoryFactory( reducedDag ); //log some configuration messages mLogger.log("Transfer Refiner loaded is [" + mTXRefiner.getDescription() + "]",LogManager.CONFIG_MESSAGE_LEVEL); mLogger.log("ReplicaSelector loaded is [" + mReplicaSelector.description() + "]",LogManager.CONFIG_MESSAGE_LEVEL); } /** * Returns whether to run a transfer job on local site or not. * * * @param site the site handle associated with the destination URL. * @param destURL the destination URL * @param type the type of transfer job for which the URL is being constructed. * * @return true indicating if the associated transfer job should run on local * site or not. */ public boolean runTransferOnLocalSite( String site, String destinationURL, int type) { //check if user has specified any preference in config boolean result = true; //short cut for local site if( site.equals( "local" ) ){ //transfer to run on local site return result; } if( mTXRefiner.refinerPreferenceForTransferJobLocation() ){ //refiner is advertising a preference for where transfer job //should be run. Use that. return mTXRefiner.refinerPreferenceForLocalTransferJobs( type ); } if( mTXRefiner.runTransferRemotely( site, type )){ //always use user preference return !result; } //check to see if destination URL is a file url else if( destinationURL != null && destinationURL.startsWith( TransferEngine.FILE_URL_SCHEME ) ){ result = false; } return result; } /** * Returns the Job object for the job specified. * * @param jobName the name of the job * * @return the Job object for a job. */ private Job getSubInfo(String jobName) { return mDag.getSubInfo(jobName); } /** * Adds the transfer nodes to the workflow. * * @param rcb the bridge to the ReplicaCatalog. * @param transientCatalog an instance of the replica catalog that will * store the locations of the files on the remote * sites. */ public void addTransferNodes( ReplicaCatalogBridge rcb, ReplicaCatalog transientCatalog ) { mRCBridge = rcb; mTransientRC = transientCatalog; Job currentJob; String currentJobName; Vector vOutPoolTX; String msg; String outputSite = mPOptions.getOutputSite(); //convert the dax to a graph representation and walk it //in a top down manner Graph workflow = Adapter.convert( mDag ); //go through each job in turn boolean stageOut = (( outputSite != null ) && ( outputSite.trim().length() > 0 )); for( Iterator it = workflow.iterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); currentJob = (Job)node.getContent(); //set the staging site for the job //currentJob.setStagingSiteHandle( getStagingSite( currentJob ) ); //For 3.2 release we may not need this, as credentials will be handled //explicitly. And no SLS files exist any longer. /* //modify the jobs if required for worker node execution if( mWorkerNodeExecution ){ mSLS.modifyJobForFirstLevelStaging( currentJob, mPOptions.getSubmitDirectory(), mSLS.getSLSInputLFN( currentJob ), mSLS.getSLSOutputLFN( currentJob ) ); } */ //set the node depth as the level currentJob.setLevel( node.getDepth() ); currentJobName = currentJob.getName(); mLogger.log("",LogManager.DEBUG_MESSAGE_LEVEL); msg = "Job being traversed is " + currentJobName; mLogger.log(msg, LogManager.DEBUG_MESSAGE_LEVEL); msg = "To be run at " + currentJob.executionPool; mLogger.log(msg, LogManager.DEBUG_MESSAGE_LEVEL); //getting the parents of that node Vector vParents = mDag.getParents(currentJobName); mLogger.log(vectorToString("Parents of job:", vParents), LogManager.DEBUG_MESSAGE_LEVEL); processParents(currentJob, vParents); //transfer the nodes output files //to the output pool if ( stageOut ) { SiteCatalogEntry stagingSite = mSiteStore.lookup( currentJob.getStagingSiteHandle() ); if (stagingSite == null ) { mLogMsg = this.poolNotFoundMsg( currentJob.getSiteHandle(), "vanilla"); mLogger.log( mLogMsg, LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( mLogMsg ); } boolean localTransfer = runTransferOnLocalSite( currentJob.getSiteHandle(), stagingSite.getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix(), Job.STAGE_OUT_JOB); vOutPoolTX = getFileTX(outputSite, currentJob, localTransfer ); mTXRefiner.addStageOutXFERNodes( currentJob, vOutPoolTX, rcb, localTransfer ); } else{ //create the cache file always //Pegasus Bug PM-32 and PM-356 trackInTransientRC( currentJob ); } } //we are done with the traversal. //mTXRefiner.done(); //get the deleted leaf jobs o/p files to output pool //only if output pool is specified //should be moved upwards in the pool. redundancy at present if (outputSite != null && outputSite.trim().length() > 0 && !mDeletedJobs.isEmpty() ) { mLogger.log( "Adding stage out jobs for jobs deleted from the workflow", LogManager.INFO_MESSAGE_LEVEL ); for( Iterator it = this.mDeletedJobs.iterator(); it.hasNext() ;) { currentJob = (Job)it.next(); currentJob.setLevel( TransferEngine.DELETED_JOBS_LEVEL ); //for a deleted node, to transfer it's output //the execution pool should be set to local i.e submit host currentJob.setSiteHandle( "local" ); //set the staging site for the deleted job currentJob.setStagingSiteHandle( getStagingSite( currentJob ) ); //for jobs deleted during data reuse we dont //go through the staging site. they are transferred //directly to the output site. vOutPoolTX = getDeletedFileTX(outputSite, currentJob); if( !vOutPoolTX.isEmpty() ){ mTXRefiner.addStageOutXFERNodes( currentJob, vOutPoolTX, rcb, true ); } } } //we are done with the traversal. mTXRefiner.done(); //close the handle to the cache file if it is written //closeTransientRC(); } /** * Returns the staging site to be used for a job. If a staging site is not * determined from the options it is set to be the execution site for the job * * @param job the job for which to determine the staging site * * @return the staging site */ public String getStagingSite( Job job ){ String ss = this.mPOptions.getStagingSite( job.getSiteHandle() ); return (ss == null) ? job.getSiteHandle(): ss; } /** * This gets the file transfer objects corresponding to the location of files * found in the replica mechanism, and transfers it to the output pool asked * by the user. If the output pool path and the one returned by the replica * mechanism match then that object is not transferred. * * @param pool this the output pool which the user specifies at runtime. * @param job The Job object corresponding to the leaf job which was * deleted by the Reduction algorithm * * @return Vector of FileTransfer objects */ private Vector getDeletedFileTX( String pool, Job job ) { Vector vFileTX = new Vector(); SiteCatalogEntry p = mSiteStore.lookup(pool); for( Iterator it = job.getOutputFiles().iterator(); it.hasNext(); ){ PegasusFile pf = (PegasusFile)it.next(); String lfn = pf.getLFN(); //we only have to get a deleted file that user wants to be transferred if( pf.getTransientTransferFlag() ){ continue; } ReplicaLocation rl = mRCBridge.getFileLocs( lfn ); //sanity check if( rl == null ){ throw new RuntimeException( "Unable to find a location in the Replica Catalog for output file " + lfn ); } //definite inconsitency as url prefix and mount point //are not picked up from the same server String destURL = p.getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix() + this.getPathOnStageoutSite( lfn ); //selLocs are all the locations found in ReplicaMechanism corr //to the pool pool ReplicaLocation selLocs = mReplicaSelector.selectReplicas( rl, pool, this.runTransferOnLocalSite( pool,destURL, Job.STAGE_OUT_JOB )); boolean flag = false; FileTransfer ft = null; //checking through all the pfn's returned on the pool for ( Iterator selIt = selLocs.pfnIterator(); selIt.hasNext(); ) { ReplicaCatalogEntry selLoc = ( ReplicaCatalogEntry ) selIt.next(); String sourceURL = selLoc.getPFN(); //check if the URL's match if (sourceURL.trim().equalsIgnoreCase(destURL.trim())){ String msg = "The leaf file " + lfn + " is already at the output pool " + pool; mLogger.log(msg,LogManager.INFO_MESSAGE_LEVEL); flag = true; break; } ft = new FileTransfer( lfn, job.getName() ); ft.addSource( selLoc.getResourceHandle() , sourceURL ); ft.addDestination( pool, destURL ); ft.setSize( pf.getSize() ); //System.out.println("Deleted Leaf Job File transfer object " + ft); } if (!flag) { // adding the last pfn vFileTX.addElement(ft); } } return vFileTX; } /** * It processes a nodes parents and determines if nodes are to be added * or not. All the input files for the job are searched in the output files of * the parent nodes and the Replica Mechanism. * * @param job the Job object containing all the * details of the job. * @param vParents Vector of String objects corresponding to the Parents * of the node. */ private void processParents(Job job, Vector vParents) { Set nodeIpFiles = job.getInputFiles(); Vector vRCSearchFiles = new Vector(); //vector of PegasusFile Vector vIPTxFiles = new Vector(); Vector vParentSubs = new Vector(); //getAll the output Files of the parents Set parentsOutFiles = getOutputFiles(vParents, vParentSubs); //interpool transfer of the nodes parents //output files Collection[] interSiteFileTX = this.getInterpoolFileTX(job, vParentSubs); Collection localInterSiteTX = interSiteFileTX[0]; Collection remoteInterSiteTX = interSiteFileTX[1]; //only add if there are files to transfer if( !localInterSiteTX.isEmpty()){ mTXRefiner.addInterSiteTXNodes(job, localInterSiteTX, true ); } if( !remoteInterSiteTX.isEmpty() ){ mTXRefiner.addInterSiteTXNodes(job, remoteInterSiteTX, false ); } //check if node ip files are in the parents out files //if files are not, then these are to be got //from the RC based on the transiency characteristic for( Iterator it = nodeIpFiles.iterator(); it.hasNext(); ){ PegasusFile pf = (PegasusFile) it.next(); if( !parentsOutFiles.contains( pf ) ){ if (!pf.getTransientTransferFlag()) { vRCSearchFiles.addElement(pf); } } } if (!vRCSearchFiles.isEmpty()) { if( job instanceof DAXJob ){ getFilesFromRC( (DAXJob)job, vRCSearchFiles); } else if( job instanceof DAGJob ){ getFilesFromRC( (DAGJob)job, vRCSearchFiles); } else{ //get the locations from the RC getFilesFromRC(job, vRCSearchFiles); } } } /** * This gets the Vector of FileTransfer objects for the files which have to * be transferred to an one destination pool. It checks for the transient * flags for files. If the transfer transient flag is set, it means the file * does not have to be transferred to the destination pool. * * @param destSiteHandle The pool to which the files are to be transferred to. * @param job The Jobobject of the job whose output files * are needed at the destination pool. * @param localTransfer boolean indicating that associated transfer job will run * on local site. * * @return Vector of FileTransfer objects */ private Vector getFileTX(String destPool, Job job, boolean localTransfer ) { Vector vFileTX = new Vector(); //check if there is a remote initialdir set String path = job.vdsNS.getStringValue( Pegasus.REMOTE_INITIALDIR_KEY ); for( Iterator it = job.getOutputFiles().iterator(); it.hasNext(); ){ PegasusFile pf = (PegasusFile) it.next(); String file = pf.getLFN(); FileTransfer ft = this.constructFileTX( pf, job.getStagingSiteHandle(), destPool, job.logicalName, path, localTransfer ); if (ft != null) { vFileTX.add(ft); } } return vFileTX; } /** * Constructs the FileTransfer object on the basis of the transiency * information. If the transient flag for transfer is set, the destURL for the * FileTransfer object would be the execution directory, as this is the entry * that has to be registered in the ReplicaMechanism * * @param pf the PegasusFile for which the transfer has to be done. * @param stagingSiteHandle the staging site at which file is placed after execution. * @param destSiteHandle the output pool where the job should be transferred * @param job the name of the associated job. * @param path the path that a user specifies in the profile for key * remote_initialdir that results in the workdir being * changed for a job on a execution pool. * @param localTransfer boolean indicating that associated transfer job will run * on local site. * * @return the corresponding FileTransfer object */ private FileTransfer constructFileTX( PegasusFile pf, String stagingSiteHandle, String destSiteHandle, String job, String path, boolean localTransfer ) { String lfn = pf.getLFN(); FileTransfer ft = null; SiteCatalogEntry stagingSite = mSiteStore.lookup( stagingSiteHandle ); SiteCatalogEntry destinationSite = mSiteStore.lookup( destSiteHandle ); if (stagingSite == null || destinationSite == null) { mLogMsg = (stagingSite == null) ? this.poolNotFoundMsg(stagingSiteHandle, "vanilla") : this.poolNotFoundMsg(destSiteHandle, "vanilla"); mLogger.log( mLogMsg, LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( mLogMsg ); } String execURL = stagingSite.getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix() + mSiteStore.getExternalWorkDirectory(stagingSite.getHeadNodeFS().selectScratchSharedFileServer(), stagingSiteHandle) + File.separatorChar + lfn; //write out the exec url to the cache file trackInTransientRC(lfn,execURL,stagingSiteHandle); //if both transfer and registration //are transient return null if (pf.getTransientRegFlag() && pf.getTransientTransferFlag()) { return null; } //if only transient transfer flag //means destURL and sourceURL //are same and are equal to //execution directory on stagingSiteHandle if (pf.getTransientTransferFlag()) { ft = new FileTransfer(lfn,job,pf.getFlags()); //set the transfer mode ft.setSize( pf.getSize() ); ft.setTransferFlag(pf.getTransferFlag()); ft.addSource(stagingSiteHandle,execURL); ft.addDestination(stagingSiteHandle,execURL); } //the source dir is the exec dir //on exec pool and dest dir //would be on the output pool else { //construct the source url depending on whether third party tx String sourceURL = localTransfer ? execURL : "file://" + mSiteStore.getInternalWorkDirectory(stagingSiteHandle,path) + File.separator + lfn; ft = new FileTransfer(lfn,job,pf.getFlags()); ft.setSize( pf.getSize() ); //set the transfer mode ft.setTransferFlag(pf.getTransferFlag()); ft.addSource(stagingSiteHandle,sourceURL); //if the PegasusFile is already an instance of //FileTransfer the user has specified the destination //that they want to use in the DAX 3.0 if( pf instanceof FileTransfer ){ ft.addDestination( ((FileTransfer)pf).removeDestURL() ); return ft; } //add all the possible destination urls iterating through //the list of grid ftp servers associated with the dest pool. Iterator it = mSiteStore.lookup( destSiteHandle ).getHeadNodeFS().getStorage().getSharedDirectory().getFileServersIterator(); //sanity check if( !it.hasNext() ){ // no file servers were returned throw new RuntimeException( " No File Servers specified for Shared Storage on Headnode for site " + destSiteHandle ); } String destURL = null; boolean first = true; while(it.hasNext()){ FileServer fs = (FileServer)it.next(); destURL = fs.getURLPrefix() ; //assumption of same external mount point for each storage //file server on output site destURL += this.getPathOnStageoutSite( lfn ); //if the paths match of dest URI //and execDirURL we return null if (execURL.equalsIgnoreCase(destURL)) { /*ft = new FileTransfer(file, job); ft.addSource(stagingSiteHandle, execURL);*/ ft.addDestination(stagingSiteHandle, execURL); //make the transfer transient? ft.setTransferFlag(PegasusFile.TRANSFER_NOT); return ft; } ft.addDestination(destSiteHandle, destURL); first = false; } } return ft; } /** * This generates a error message for pool not found in the pool * config file. * * @param poolName the name of pool that is not found. * @param universe the condor universe * * @return the message. */ private String poolNotFoundMsg(String poolName, String universe) { String st = "Error: No matching entry to pool = " + poolName + " ,universe = " + universe + "\n found in the pool configuration file "; return st; } /** * This gets the Vector of FileTransfer objects for all the files which have * to be transferred to the destination pool in case of Interpool transfers. * Each FileTransfer object has the source and the destination URLs. the * source URI is determined from the pool on which the jobs are executed. * * @param job the job with reference to which interpool file transfers * need to be determined. * @param nodes Vector of Job objects for the nodes, whose * outputfiles are to be transferred to the dest pool. * * @return Vector of FileTransfer objects */ private Collection[] getInterpoolFileTX(Job job, Vector nodes) { String destSiteHandle = job.getStagingSiteHandle(); //contains the remote_initialdir if specified for the job String destRemoteDir = job.vdsNS.getStringValue( Pegasus.REMOTE_INITIALDIR_KEY); SiteCatalogEntry destSite = mSiteStore.lookup( destSiteHandle ); SiteCatalogEntry sourceSite; Collection[] result = new Collection[2]; Collection localTransfers = new LinkedList(); Collection remoteTransfers = new LinkedList(); for (Iterator it = nodes.iterator();it.hasNext();) { //get the parent job Job pJob = (Job)it.next(); sourceSite = mSiteStore.lookup( pJob.getStagingSiteHandle() ); if( sourceSite.getSiteHandle().equalsIgnoreCase( destSiteHandle ) ){ //no need to add transfers, as the parent job and child //job are run in the same directory on the pool continue; } String sourceURI = null; String thirdPartyDestURI = destSite.getHeadNodeFS().selectScratchSharedFileServer().getURLPrefix() + mSiteStore.getExternalWorkDirectory( destSite.getHeadNodeFS().selectScratchSharedFileServer(), destSiteHandle); //definite inconsitency as url prefix and mount point //are not picked up from the same server boolean localTransfer = runTransferOnLocalSite( destSiteHandle, thirdPartyDestURI, Job.INTER_POOL_JOB ); String destURI = localTransfer ? //construct for third party transfer thirdPartyDestURI : //construct for normal transfer "file://" + mSiteStore.getInternalWorkDirectory( destSiteHandle, destRemoteDir ); for (Iterator fileIt = pJob.getOutputFiles().iterator(); fileIt.hasNext(); ){ PegasusFile pf = (PegasusFile) fileIt.next(); String outFile = pf.getLFN(); if( job.getInputFiles().contains( pf ) ){ String sourceURL = null; String destURL = destURI + File.separator + outFile; String thirdPartyDestURL = thirdPartyDestURI + File.separator + outFile; FileTransfer ft = new FileTransfer(outFile,pJob.jobName); ft.setSize( pf.getSize() ); ft.addDestination(destSiteHandle,destURL); //add all the possible source urls iterating through //the list of grid ftp servers associated with the dest pool. boolean first = true; for( Iterator it1 = mSiteStore.lookup( pJob.getSiteHandle() ).getHeadNodeFS().getScratch().getSharedDirectory().getFileServersIterator(); it1.hasNext();){ FileServer server = ( FileServer)it1.next(); //definite inconsitency as url prefix and mount point //are not picked up from the same server sourceURI = server.getURLPrefix(); //sourceURI += server.getMountPoint(); sourceURI += mSiteStore.getExternalWorkDirectory(server, pJob.getSiteHandle()); sourceURL = sourceURI + File.separator + outFile; if(!(sourceURL.equalsIgnoreCase(thirdPartyDestURL))){ //add the source url only if it does not match to //the third party destination url ft.addSource(pJob.getStagingSiteHandle(), sourceURL); } first = false; } if( ft.isValid() ){ if( localTransfer ){ localTransfers.add(ft); } else{ remoteTransfers.add(ft); } } } } } result[0] = localTransfers; result[1] = remoteTransfers; return result; } /** * Special Handling for a DAGJob for retrieving files from the Replica Catalog. * * @param job the DAGJob * @param searchFiles file that need to be looked in the Replica Catalog. */ private void getFilesFromRC( DAGJob job, Collection searchFiles ){ //dax appears in adag element String dag = null; //go through all the job input files //and set transfer flag to false for (Iterator it = job.getInputFiles().iterator(); it.hasNext();) { PegasusFile pf = it.next(); //at the moment dax files are not staged in. //remove from input set of files //part of the reason is about how to handle where //to run the DAGJob. We dont have much control over it. it.remove(); } String lfn = job.getDAGLFN(); ReplicaLocation rl = mRCBridge.getFileLocs( lfn ); if (rl == null) { //flag an error throw new RuntimeException( "TransferEngine.java: Can't determine a location to " + "transfer input file for DAG lfn " + lfn + " for job " + job.getName()); } ReplicaCatalogEntry selLoc = mReplicaSelector.selectReplica( rl, job.getSiteHandle(), true ); String pfn = selLoc.getPFN(); //some extra checks to ensure paths if( pfn.startsWith( File.separator ) ){ dag = pfn; } else if( pfn.startsWith( TransferEngine.FILE_URL_SCHEME ) ){ dag = Utility.getAbsolutePath( pfn ); } else{ throw new RuntimeException( "Invalid URL Specified for DAG Job " + job.getName() + " -> " + pfn ); } job.setDAGFile(dag); //set the directory if specified job.setDirectory((String) job.dagmanVariables.removeKey(Dagman.DIRECTORY_EXTERNAL_KEY)); } /** * Special Handling for a DAXJob for retrieving files from the Replica Catalog. * * @param job the DAXJob * @param searchFiles file that need to be looked in the Replica Catalog. */ private void getFilesFromRC( DAXJob job, Collection searchFiles ){ //dax appears in adag element String dax = null; //go through all the job input files //and set transfer flag to false for (Iterator it = job.getInputFiles().iterator(); it.hasNext();) { PegasusFile pf = it.next(); //at the moment dax files are not staged in. //remove from input set of files //part of the reason is about how to handle where //to run the DAGJob. We dont have much control over it. it.remove(); } String lfn = job.getDAXLFN(); ReplicaLocation rl = mRCBridge.getFileLocs( lfn ); if (rl == null) { //flag an error throw new RuntimeException( "TransferEngine.java: Can't determine a location to " + "transfer input file for DAX lfn " + lfn + " for job " + job.getName()); } ReplicaCatalogEntry selLoc = mReplicaSelector.selectReplica( rl, job.getSiteHandle(), true ); String pfn = selLoc.getPFN(); //some extra checks to ensure paths if( pfn.startsWith( File.separator ) ){ dax = pfn; } else if( pfn.startsWith( TransferEngine.FILE_URL_SCHEME ) ){ dax = Utility.getAbsolutePath( pfn ); } else{ throw new RuntimeException( "Invalid URL Specified for DAX Job " + job.getName() + " -> " + pfn ); } //add the dax to the argument StringBuffer arguments = new StringBuffer(); arguments.append(job.getArguments()). append(" --dax ").append(dax); job.setArguments(arguments.toString()); } /** * It looks up the RCEngine Hashtable to lookup the locations for the * files and add nodes to transfer them. If a file is not found to be in * the Replica Catalog the Transfer Engine flags an error and exits * * @param job the Jobobject for whose ipfile have * to search the Replica Mechanism for. * @param searchFiles Vector containing the PegasusFile objects corresponding * to the files that need to have their mapping looked * up from the Replica Mechanism. */ private void getFilesFromRC( Job job, Collection searchFiles ) { //Vector vFileTX = new Vector(); //Collection symLinkFileTransfers = new LinkedList(); Collection localFileTransfers = new LinkedList(); Collection remoteFileTransfers = new LinkedList(); String jobName = job.logicalName; String stagingSiteHandle = job.getStagingSiteHandle(); //contains the remote_initialdir if specified for the job String eRemoteDir = job.vdsNS.getStringValue( Pegasus.REMOTE_INITIALDIR_KEY); SiteCatalogEntry stagingSite = mSiteStore.lookup( stagingSiteHandle ); //we are using the pull mode for data transfer String scheme = "file"; //sAbsPath would be just the source directory absolute path //dAbsPath would be just the destination directory absolute path String dAbsPath = mSiteStore.getExternalWorkDirectory( stagingSite.getHeadNodeFS().selectScratchSharedFileServer(), stagingSiteHandle); String sAbsPath = null; //sDirURL would be the url to the source directory. //dDirURL would be the url to the destination directoy //and is always a networked url. //definite inconsitency as url prefix and mount point //are not picked up from the same server String dDirURL = stagingSite.getHeadNodeFS().selectScratchSharedFileServer( ).getURLPrefix() + dAbsPath; String sDirURL = null; //file dest dir is destination dir accessed as a file URL String fileDestDir = scheme + "://" + mSiteStore.getInternalWorkDirectory( stagingSiteHandle, eRemoteDir ); //check if the execution pool is third party or not boolean runTransferOnLocalSite = runTransferOnLocalSite( stagingSiteHandle, dDirURL, Job.STAGE_IN_JOB); String destDir = ( runTransferOnLocalSite ) ? //use the full networked url to the directory dDirURL : //use the default pull mode fileDestDir; for( Iterator it = searchFiles.iterator(); it.hasNext(); ){ String sourceURL,destURL=null; PegasusFile pf = (PegasusFile) it.next(); List pfns = null; ReplicaLocation rl = null; String lfn = pf.getLFN(); NameValue nv = null; //see if the pf is infact an instance of FileTransfer if( pf instanceof FileTransfer ){ //that means we should be having the source url already. //nv contains both the source pool and the url. //This happens in case of AI Planner or transfer of executables nv = ((FileTransfer)pf).getSourceURL(); NameValue destNV = ((FileTransfer)pf).removeDestURL(); if( destNV == null ){ //the source URL was specified in the DAX //no transfer of executables case throw new RuntimeException( "Unreachable code . Signifies error in internal logic " ); } else{//staging of executables case destURL = destNV.getValue(); destURL = (runTransferOnLocalSite( stagingSiteHandle, destURL, Job.STAGE_IN_JOB))? //the destination URL is already third party //enabled. use as it is destURL: //explicitly convert to file URL scheme scheme + "://" + Utility.getAbsolutePath(destURL); } } else{ //query the replica services and get hold of pfn rl = mRCBridge.getFileLocs( lfn ); pfns = (rl == null) ? null: rl.getPFNList(); } if ( pfns == null && nv == null ) { //check to see if the input file is optional if(pf.fileOptional()){ //no need to add a transfer node for it if no location found //remove the PegasusFile object from the list of //input files for the job boolean removed = job.getInputFiles().remove( pf ); //System.out.println( "Removed " + pf.getLFN() + " " + removed ); continue; } //flag an error throw new RuntimeException( "TransferEngine.java: Can't determine a location to " + "transfer input file for lfn " + lfn + " for job " + job.getName()); } ReplicaCatalogEntry selLoc = (nv == null)? //select from the various replicas mReplicaSelector.selectReplica( rl, job.getSiteHandle(), runTransferOnLocalSite ): //we have the replica already selected new ReplicaCatalogEntry( nv.getValue(), nv.getKey() ); //check if we need to replace url prefix for //symbolic linking boolean symLinkSelectedLocation; if ( symLinkSelectedLocation = (mUseSymLinks && selLoc.getResourceHandle().equals( job.getStagingSiteHandle() )) ) { //resolve any srm url's that are specified selLoc = replaceSourceProtocolFromURL( selLoc ); } //get the file to the job's execution pool //this is assuming that there are no directory paths //in the pfn!!! sDirURL = selLoc.getPFN().substring( 0, selLoc.getPFN().lastIndexOf(File.separator) ); //try to get the directory absolute path //yes i know that we sending the url to directory //not the file. sAbsPath = Utility.getAbsolutePath(sDirURL); //the final source and destination url's to the file sourceURL = selLoc.getPFN(); if( destURL == null ){ //no staging of executables case. //we construct destination URL to file. StringBuffer destPFN = new StringBuffer(); if( symLinkSelectedLocation ){ //we use the file URL location to dest dir //in case we are symlinking //destPFN.append( fileDestDir ); destPFN.append( this.replaceProtocolFromURL( destDir ) ); } else{ //we use whatever destDir was set to earlier destPFN.append( destDir ); } destPFN.append( File.separator).append( lfn ); destURL = destPFN.toString(); } //we have all the chopped up combos of the urls. //do some funky matching on the basis of the fact //that each pool has one shared filesystem //match the source and dest 3rd party urls or //match the directory url knowing that lfn and //(source and dest pool) are same try{ if(sourceURL.equalsIgnoreCase(dDirURL + File.separator + lfn)|| ( selLoc.getResourceHandle().equalsIgnoreCase( stagingSiteHandle ) && lfn.equals( sourceURL.substring(sourceURL.lastIndexOf(File.separator) + 1)) && //sAbsPath.equals( dAbsPath ) new File( sAbsPath ).getCanonicalPath().equals( new File( dAbsPath).getCanonicalPath()) ) ){ //do not need to add any transfer node StringBuffer message = new StringBuffer( ); message.append( sAbsPath ).append( " same as " ).append( dAbsPath ); mLogger.log( message.toString() , LogManager.DEBUG_MESSAGE_LEVEL ); message = new StringBuffer(); message.append( " Not transferring ip file as ").append( lfn ). append( " for job " ).append( job.jobName ).append( " to site " ).append( stagingSiteHandle); mLogger.log( message.toString() , LogManager.DEBUG_MESSAGE_LEVEL ); continue; } }catch( IOException ioe ){ /*ignore */ } //add locations of input data on the remote site to the transient RC boolean bypassFirstLevelStaging = mWorkerNodeExecution && selLoc.getResourceHandle().equals( job.getSiteHandle() ); //for 3.2 first level staging cannot be bypassed //bypassing staging creates problems with in place cleanup and condor //IO case. bypassFirstLevelStaging = false; if( bypassFirstLevelStaging ){ //the selected replica already exists on //the compute site. we can bypass first level //staging of the data //we add into transient RC the source URL without any modifications trackInTransientRC( lfn, sourceURL, job.getSiteHandle(), false ); continue; } else{ //track the location where the data is staged as //part of the first level staging //we always store the thirdparty url //trackInTransientRC( lfn, destURL, job.getSiteHandle() ); trackInTransientRC( lfn, dDirURL + File.separator + lfn, job.getStagingSiteHandle()); } //construct the file transfer object FileTransfer ft = (pf instanceof FileTransfer) ? (FileTransfer)pf: new FileTransfer( lfn, jobName ); //make sure the type information is set in file transfer ft.setType( pf.getType() ); ft.setSize( pf.getSize() ); //the transfer mode for the file needs to be //propogated for optional transfers. ft.setTransferFlag(pf.getTransferFlag()); //to prevent duplicate source urls if(ft.getSourceURL() == null){ ft.addSource( selLoc.getResourceHandle(), sourceURL); } //to prevent duplicate destination urls if(ft.getDestURL() == null) ft.addDestination(stagingSiteHandle,destURL); if( symLinkSelectedLocation || !runTransferOnLocalSite ){ //all symlink transfers and user specified remote transfers remoteFileTransfers.add(ft); } else{ localFileTransfers.add(ft); } //we need to set destURL to null destURL = null; } //call addTransferNode if (!localFileTransfers.isEmpty() || !remoteFileTransfers.isEmpty()) { mTXRefiner.addStageInXFERNodes(job, localFileTransfers, remoteFileTransfers ); } } /** * Replaces the SRM URL scheme from the url, and replaces it with the * file url scheme and returns in a new object if replacement happens. * The original object passed as a parameter still remains the same. * * @param rce the ReplicaCatalogEntry object whose url need to be * replaced. * * @return the object with the url replaced. */ protected ReplicaCatalogEntry replaceSourceProtocolFromURL( ReplicaCatalogEntry rce ) { String pfn = rce.getPFN(); //if the pfn starts with a file url we //dont need to replace . a sanity check if( pfn.startsWith( FILE_URL_SCHEME ) ){ return rce; } /* special handling for SRM urls */ StringBuffer newPFN = new StringBuffer(); if( mSRMServiceURLToMountPointMap.containsKey( rce.getResourceHandle() ) ){ //try to do replacement of URL with internal mount point NameValue nv = mSRMServiceURLToMountPointMap.get( rce.getResourceHandle() ); String urlPrefix = nv.getKey(); if( pfn.startsWith( urlPrefix ) ){ //replace the starting with the mount point newPFN.append( FILE_URL_SCHEME ).append( "//" ); newPFN.append( nv.getValue() ); newPFN.append( pfn.substring( urlPrefix.length(), pfn.length() )); mLogger.log( "Replaced pfn " + pfn + " with " + newPFN.toString() , LogManager.TRACE_MESSAGE_LEVEL ); } } if( newPFN.length() == 0 ){ //there is no SRM Replacement to do //Still do the FILE replacement //return the original object //we have to the manual replacement String hostName = Utility.getHostName( pfn ); newPFN.append( FILE_URL_SCHEME ).append( "//" ); //we want to skip out the hostname newPFN.append( pfn.substring( pfn.indexOf( hostName ) + hostName.length() ) ); } //we do not need a full clone, just the PFN ReplicaCatalogEntry result = new ReplicaCatalogEntry( newPFN.toString(), rce.getResourceHandle() ); for( Iterator it = rce.getAttributeIterator(); it.hasNext();){ String key = (String)it.next(); result.addAttribute( key, rce.getAttribute( key ) ); } return result; } /** * Replaces the gsiftp URL scheme from the url, and replaces it with the * symlink url scheme and returns in a new object. The original object * passed as a parameter still remains the same. * * @param pfn the pfn that needs to be replaced * * @return the replaced PFN */ protected String replaceProtocolFromURL( String pfn ) { /* special handling for SRM urls */ StringBuffer newPFN = new StringBuffer(); if( pfn.startsWith(FILE_URL_SCHEME) ){ //special handling for FILE URL's as //utility hostname functions dont hold up newPFN.append( TransferEngine.SYMLINK_URL_SCHEME ). append( pfn.substring( FILE_URL_SCHEME.length() ) ); //System.out.println( "Original PFN " + pfn + " \nReplaced PFN " + newPFN.toString() ); return newPFN.toString(); } //we have to the manual replacement String hostName = Utility.getHostName( pfn ); newPFN.append( TransferEngine.SYMLINK_URL_SCHEME ).append( "//" ); //we want to skip out the hostname newPFN.append( pfn.substring( pfn.indexOf( hostName ) + hostName.length() ) ); return newPFN.toString(); } /** * Constructs a Properties objects by parsing the relevant SRM * pegasus properties. * * For example, if users have the following specified in properties file *
     * pegasus.transfer.srm.ligo-cit.service.url          srm://osg-se.ligo.caltech.edu:10443/srm/v2/server?SFN=/mnt/hadoop
     * pegasus.transfer.srm.ligo-cit.service.mountpoint   /mnt/hadoop
     * 
* * then, a Map is create the associates ligo-cit with NameValue object * containing the service url and mount point ( ). * * @param props the PegasusProperties object * * @return Map that maps a site name to a NameValue object that has the * URL prefix and the mount point */ private Map constructSiteToSRMServerMap( PegasusProperties props ) { Map m = new HashMap(); //first strip of prefix from properties and get matching subset Properties siteProps = props.matchingSubset( TransferEngine.SRM_PROPERTIES_PREFIX, false ); //retrieve all the sites for which SRM servers are specified Map m1 = new HashMap(); //associates site name to url prefix Map m2 = new HashMap(); //associates site name to mount point for( Iterator it = siteProps.keySet().iterator(); it.hasNext(); ){ String key = (String) it.next(); //determine the site name String site = key.substring( 0, key.indexOf( "." ) ); if( key.endsWith( TransferEngine.SRM_SERVICE_URL_PROPERTIES_SUFFIX ) ){ m1.put( site, siteProps.getProperty( key ) ); } else if( key.endsWith( TransferEngine.SRM_MOUNT_POINT_PROPERTIES_SUFFIX ) ){ m2.put( site, siteProps.getProperty( key ) ); } } //now merge the information into m and return for( Iterator >it = m1.entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = it.next(); String site = entry.getKey(); String url = entry.getValue(); String mountPoint = m2.get( site ); if( mountPoint == null ){ mLogger.log( "Mount Point for SRM server not specified in properties for site " + site, LogManager.WARNING_MESSAGE_LEVEL ); continue; } m.put( site, new NameValue( url, mountPoint ) ); } mLogger.log( "SRM Server map is " + m, LogManager.DEBUG_MESSAGE_LEVEL ); return m; } /** * It gets the output files for all the nodes which are specified in * the Vector nodes passed. * * @param nodes Vector of nodes job names whose output files are required. * * @param parentSubs Vector of Job objects. One passes an * empty vector as a parameter. And this populated with * Job objects, of the nodes when output files are * being determined. * * @return Set of PegasusFile objects */ private Set getOutputFiles(Vector nodes, Vector parentSubs) { Set files = new HashSet(); for( Iterator it = nodes.iterator(); it.hasNext(); ){ String jobName = (String) it.next(); Job sub = getSubInfo(jobName); parentSubs.addElement(sub); files.addAll( sub.getOutputFiles() ); } return files; } /** * Returns the full path on remote output site, where the lfn will reside. * Each call to this function could trigger a change in the directory * returned depending upon the file factory being used. * * @param lfn the logical filename of the file. * * @return the storage mount point. */ protected String getPathOnStageoutSite( String lfn ){ String file; try{ file = mFactory.createFile( lfn ).toString(); } catch( IOException e ){ throw new RuntimeException( "IOException " , e ); } return file; } /** * Initialize the Stageout Site Directory factory. * The factory is used to returns the relative directory that a particular * file needs to be staged to on the output site. * * @param workflow the workflow to which the transfer nodes need to be * added. * */ protected void initializeStageOutSiteDirectoryFactory( ADag workflow ){ String outputSite = mPOptions.getOutputSite(); boolean stageOut = (( outputSite != null ) && ( outputSite.trim().length() > 0 )); if (!stageOut ){ //no initialization and return mLogger.log( "No initialization of StageOut Site Directory Factory", LogManager.DEBUG_MESSAGE_LEVEL ); return; } // create files in the directory, unless anything else is known. mStageOutBaseDirectory = mSiteStore.getExternalStorageDirectory( outputSite ); if( mProps.useDeepStorageDirectoryStructure() ){ // create hashed, and levelled directories try { VirtualDecimalHashedFileFactory temp = null; //get the total number of files that need to be stageout int totalFiles = 0; for ( Iterator it = workflow.jobIterator(); it.hasNext(); ){ Job job = ( Job )it.next(); //traverse through all the job output files for( Iterator opIt = job.getOutputFiles().iterator(); opIt.hasNext(); ){ if( !((PegasusFile)opIt.next()).getTransientTransferFlag() ){ //means we have to stage to output site totalFiles++; } } } temp = new VirtualDecimalHashedFileFactory( mStageOutBaseDirectory, totalFiles ); //each stageout file has only 1 file associated with it temp.setMultiplicator( 1 ); mFactory = temp; } catch (IOException e) { //wrap into runtime and throw throw new RuntimeException( "While initializing HashedFileFactory", e ); } } else{ try { //Create a flat file factory mFactory = new VirtualFlatFileFactory( mStageOutBaseDirectory ); // minimum default } catch ( IOException ioe ) { throw new RuntimeException( "Unable to generate files in the submit directory " , ioe ); } } } /** * Tracks the files created by a job in the Transient Replica Catalog. * * @param job the job whose input files need to be tracked. */ protected void trackInTransientRC( Job job ){ //check if there is a remote initialdir set String path = job.vdsNS.getStringValue( Pegasus.REMOTE_INITIALDIR_KEY ); // SiteInfo stagingSite = mPoolHandle.getPoolEntry( job.getSiteHandle(), "vanilla" ); SiteCatalogEntry stagingSiteEntry = mSiteStore.lookup( job.getStagingSiteHandle() ); if ( stagingSiteEntry == null ) { this.poolNotFoundMsg( job.getStagingSiteHandle(), "vanilla" ) ; mLogger.log( mLogMsg, LogManager.ERROR_MESSAGE_LEVEL ); throw new RuntimeException( mLogMsg ); } for( Iterator it = job.getOutputFiles().iterator(); it.hasNext(); ){ PegasusFile pf = (PegasusFile) it.next(); String lfn = pf.getLFN(); StringBuffer stagingSiteURL = new StringBuffer(); FileServer server = stagingSiteEntry.getHeadNodeFS().selectScratchSharedFileServer(); stagingSiteURL.append( server.getURLPrefix() ). append( mSiteStore.getExternalWorkDirectory(server, job.getSiteHandle() ) ). append( File.separatorChar ).append( lfn ); trackInTransientRC( lfn, stagingSiteURL.toString(), job.getSiteHandle() ); } } /** * Inserts an entry into the Transient RC. It modifies the PFN if the * workflow is running on the cloud and S3 is being used for storage.. * * @param lfn the logical name of the file. * @param pfn the pfn * @param site the site handle */ private void trackInTransientRC( String lfn, String pfn, String site ){ this.trackInTransientRC( lfn, pfn, site, true ); } /** * Inserts an entry into the Transient RC. It modifies the PFN if the * workflow is running on the cloud and S3 is being used for storage, * dependant on the modifyURL parameter passed. * * @param lfn the logical name of the file. * @param pfn the pfn * @param site the site handle * @param modifyURL whether to modify URL in case of S3 or not. */ private void trackInTransientRC( String lfn, String pfn, String site, boolean modifyURL ){ /* // For JIRA PM-501. We dont want to do anyting specific on basis of custom // S3 implementations. if( this.mS3BucketUsedForStorage ){ //modify the PFN only for non raw input files. //This takes care of the case, where //the data already might be on the cloud , and first level //staging is bypassed. if( modifyURL ){ StringBuffer execURL = new StringBuffer(); execURL.append( ((S3)mCreateDirImpl).getBucketNameURL( site ) ).append( File.separatorChar ).append( lfn ); pfn = execURL.toString(); } } */ mTransientRC.insert( lfn, pfn, site ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/MainEngine.java0000644000175000017500000003451611757531137025612 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.common.logging.LoggingKeys; import java.util.Iterator; import java.util.Set; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.common.logging.LogManager; import java.io.File; import java.util.Properties; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaFactory; /** * The central class that calls out to the various other components of Pegasus. * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4689 $ */ public class MainEngine extends Engine { /** * The basename of the directory that contains the submit files for the * cleanup DAG that for the concrete dag generated for the workflow. */ public static final String CLEANUP_DIR = "cleanup"; /** * The name of the source key for Replica Catalog Implementer that serves as * cache */ public static final String TRANSIENT_REPLICA_CATALOG_KEY = "file"; /** * The name of the Replica Catalog Implementer that serves as the source for * cache files. */ public static final String TRANSIENT_REPLICA_CATALOG_IMPLEMENTER = "SimpleFile"; /** * The Original Dag object which is constructed by parsing the dag file. */ private ADag mOriginalDag; /** * The reduced Dag object which is got from the Reduction Engine. */ private ADag mReducedDag; /** * The cleanup dag for the final concrete dag. */ private ADag mCleanupDag; /** * The pools on which the Dag should be executed as specified by the user. */ private Set mExecPools; /** * The bridge to the Replica Catalog. */ private ReplicaCatalogBridge mRCBridge; /** * The handle to the InterPool Engine that calls out to the Site Selector * and maps the jobs. */ private InterPoolEngine mIPEng; /** * The handle to the Reduction Engine that performs reduction on the graph. */ private DataReuseEngine mRedEng; /** * The handle to the Transfer Engine that adds the transfer nodes in the * graph to transfer the files from one site to another. */ private TransferEngine mTransEng; /** * The engine that ends up creating random directories in the remote * execution pools. */ private CreateDirectory mCreateEng; /** * The engine that ends up creating the cleanup dag for the dag. */ private RemoveDirectory mRemoveEng; /** * The handle to the Authentication Engine that performs the authentication * with the various sites. */ private AuthenticateEngine mAuthEng; /** * The handle to the node collapser. */ private NodeCollapser mNodeCollapser; /** * This constructor initialises the class variables to the variables * passed. The pool names specified should be present in the pool.config file * * @param orgDag the dag to be worked on. * @param bag the bag of initialization objects */ public MainEngine( ADag orgDag, PegasusBag bag ) { super( bag ); mOriginalDag = orgDag; mExecPools = (Set)mPOptions.getExecutionSites(); mOutputPool = mPOptions.getOutputSite(); if (mOutputPool != null && mOutputPool.length() > 0) { Engine.mOutputPool = mOutputPool; } } /** * The main function which calls the other engines and does the necessary work. * * @return the planned worflow. */ public ADag runPlanner() { //create the main event refinement event mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_REFINEMENT, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() ); //refinement process starting mOriginalDag.setWorkflowRefinementStarted( true ); //do the authentication against the pools if (mPOptions.authenticationSet()) { mAuthEng = new AuthenticateEngine( mBag, new java.util.HashSet(mPOptions.getExecutionSites())); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_AUTHENTICATION, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() ); Set authenticatedSet = mAuthEng.authenticate(); if (authenticatedSet.isEmpty()) { StringBuffer error = new StringBuffer( ); error.append( "Unable to authenticate against any site. "). append( "Probably your credentials were not generated" ). append( " or have expired" ); throw new RuntimeException( error.toString() ); } mLogger.log("Sites authenticated are " + setToString(authenticatedSet, ","), LogManager.DEBUG_MESSAGE_LEVEL); mLogger.logEventCompletion(); mPOptions.setExecutionSites(authenticatedSet); } String message = null; mRCBridge = new ReplicaCatalogBridge( mOriginalDag, mBag ); mRedEng = new DataReuseEngine( mOriginalDag, mBag ); mReducedDag = mRedEng.reduceWorkflow(mOriginalDag, mRCBridge ); //mReducedDag = new ReductionEngine( mOriginalDag, mBag ).reduceDag(mRCBridge); //System.out.print( mReducedDag ); //unmark arg strings //unmarkArgs(); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_SITESELECTION, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() ); mIPEng = new InterPoolEngine( mReducedDag, mBag ); mIPEng.determineSites(); mBag = mIPEng.getPegasusBag(); mIPEng = null; mLogger.logEventCompletion(); //intialize the deployment engine DeployWorkerPackage deploy = DeployWorkerPackage.loadDeployWorkerPackage( mBag ); deploy.initialize( mReducedDag ); //do the node cluster if( mPOptions.getClusteringTechnique() != null ){ mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_CLUSTER, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() ); mNodeCollapser = new NodeCollapser( mBag ); try{ mReducedDag = mNodeCollapser.cluster( mReducedDag ); } catch ( Exception e ){ throw new RuntimeException( message, e ); } mNodeCollapser = null; mLogger.logEventCompletion(); } message = "Grafting transfer nodes in the workflow"; ReplicaCatalog transientRC = initializeTransientRC( mReducedDag ) ; mLogger.log(message,LogManager.INFO_MESSAGE_LEVEL); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_ADD_TRANSFER_NODES, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() ); mTransEng = new TransferEngine( mReducedDag, mBag, mRedEng.getDeletedJobs(), mRedEng.getDeletedLeafJobs()); mTransEng.addTransferNodes( mRCBridge , transientRC ); mTransEng = null; mRedEng = null; mLogger.logEventCompletion(); //populate the transient RC into PegasusBag mBag.add( PegasusBag.TRANSIENT_REPLICA_CATALOG, transientRC ); //close the connection to RLI explicitly mRCBridge.closeConnection(); //add the deployment of setup jobs if required mReducedDag = deploy.addSetupNodes( mReducedDag ); if (mPOptions.generateRandomDirectory()) { //add the nodes to that create //random directories at the remote //execution pools. message = "Grafting the remote workdirectory creation jobs " + "in the workflow"; //mLogger.log(message,LogManager.INFO_MESSAGE_LEVEL); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_GENERATE_WORKDIR, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() ); mCreateEng = new CreateDirectory( mBag ); mCreateEng.addCreateDirectoryNodes( mReducedDag ); mCreateEng = null; mLogger.logEventCompletion(); // CLEANUP WORKFLOW GENERATION IS DISABLED FOR 3.2 // JIRA PM-529 // //create the cleanup dag // message = "Generating the cleanup workflow"; // //mLogger.log(message,LogManager.INFO_MESSAGE_LEVEL); // mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_GENERATE_CLEANUP_WF, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() ); // //for the cleanup dag the submit directory is the cleanup // //subdir // File submitDir = new File( this.mPOptions.getSubmitDirectory(), MainEngine.CLEANUP_DIR ); // mRemoveEng = new RemoveDirectory( mReducedDag, mBag, submitDir.getAbsolutePath() ); // mCleanupDag = mRemoveEng.generateCleanUPDAG( ); // mLogger.logEventCompletion(); // END OF COMMENTED OUT CODE } //add the cleanup nodes in place if ( mPOptions.getCleanup() ){ /* should be exposed via command line option */ message = "Adding cleanup jobs in the workflow"; // mLogger.log( message, LogManager.INFO_MESSAGE_LEVEL ); mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_GENERATE_CLEANUP, LoggingKeys.DAX_ID, mOriginalDag.getAbstractWorkflowName() ); CleanupEngine cEngine = new CleanupEngine( mBag ); mReducedDag = cEngine.addCleanupJobs( mReducedDag ); mLogger.logEventCompletion(); //for the non pegasus lite case we add the clenaup nodes //for the worker package. if( !mProps.executeOnWorkerNode() ){ //add the cleanup of setup jobs if required mReducedDag = deploy.addCleanupNodesForWorkerPackage( mReducedDag ); } } mLogger.logEventCompletion(); return mReducedDag; } /** * Returns the cleanup dag for the concrete dag. * * @return the cleanup dag if the random dir is given. * null otherwise. */ public ADag getCleanupDAG(){ return mCleanupDag; } /** * Returns the bag of intialization objects. * * @return PegasusBag */ public PegasusBag getPegasusBag(){ return mBag; } /** * Initializes the transient replica catalog and returns a handle to it. * * @param dag the workflow being planned * * @return handle to transient catalog */ private ReplicaCatalog initializeTransientRC( ADag dag ){ ReplicaCatalog rc = null; mLogger.log("Initialising Transient Replica Catalog", LogManager.DEBUG_MESSAGE_LEVEL ); Properties cacheProps = mProps.getVDSProperties().matchingSubset( ReplicaCatalog.c_prefix, false ); String file = mPOptions.getSubmitDirectory() + File.separatorChar + getCacheFileName( dag ); //set the appropriate property to designate path to file cacheProps.setProperty( MainEngine.TRANSIENT_REPLICA_CATALOG_KEY, file ); try{ rc = ReplicaFactory.loadInstance( TRANSIENT_REPLICA_CATALOG_IMPLEMENTER, cacheProps); } catch( Exception e ){ throw new RuntimeException( "Unable to initialize the transient replica catalog " + file, e ); } return rc; } /** * Constructs the basename to the cache file that is to be used * to log the transient files. The basename is dependant on whether the * basename prefix has been specified at runtime or not. * * @param adag the ADag object containing the workflow that is being * concretized. * * @return the name of the cache file */ private String getCacheFileName(ADag adag){ StringBuffer sb = new StringBuffer(); String bprefix = mPOptions.getBasenamePrefix(); if(bprefix != null){ //the prefix is not null using it sb.append(bprefix); } else{ //generate the prefix from the name of the dag sb.append(adag.dagInfo.nameOfADag).append("-"). append(adag.dagInfo.index); } //append the suffix sb.append(".cache"); return sb.toString(); } /** * Unmarks the arguments , that are tagged in the DaxParser. At present there are * no tagging. * * @deprecated */ private void unmarkArgs() { /*Enumeration e = mReducedDag.vJobSubInfos.elements(); while(e.hasMoreElements()){ SubInfo sub = (SubInfo)e.nextElement(); sub.strargs = new String(removeMarkups(sub.strargs)); }*/ } /** * A small helper method that displays the contents of a Set in a String. * * @param s the Set whose contents need to be displayed * @param delim The delimited between the members of the set. * @return String */ public String setToString(Set s, String delim) { StringBuffer sb = new StringBuffer(); for( Iterator it = s.iterator(); it.hasNext(); ) { sb.append( (String) it.next() ).append( delim ); } String result = sb.toString(); result = (result.length() > 0) ? result.substring(0, result.lastIndexOf(delim)) : result; return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/refiner/Refiner.java0000644000175000017500000000275311757531137025170 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.refiner; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.provenance.pasoa.XMLProducer; /** * A first cut at a separate refiner interface. Right now it only has method * required for the PASOA integration. * * @author Karan Vahi * @version $Revision: 2582 $ */ public interface Refiner { /** * The version of the API. */ public static final String VERSION = "1.0"; /** * Returns a reference to the workflow that is being refined by the refiner. * * * @return ADAG object. */ public ADag getWorkflow(); /** * Returns a reference to the XMLProducer, that generates the XML fragment * capturing the actions of the refiner. This is used for provenace * purposes. * * @return XMLProducer */ public XMLProducer getXMLProducer(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/0000755000175000017500000000000011757531667022214 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/GridStartFactory.java0000644000175000017500000004672511757531137026320 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code; import edu.isi.pegasus.planner.code.gridstart.*; import edu.isi.pegasus.planner.code.GridStart; import edu.isi.pegasus.planner.code.POSTScript; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.common.util.DynamicLoader; import java.util.Map; import java.util.HashMap; import edu.isi.pegasus.planner.classes.PegasusBag; /** * An abstract factory class to load the appropriate type of GridStart * implementations, and their corresponding POSTScript classes. * This factory class is different from other factories, in the sense that it * must be instantiated first and intialized first before calling out to any * of the Factory methods. * * * @author Karan Vahi * @version $Revision: 4627 $ */ public class GridStartFactory { /** * The package name where the implementations of this interface reside * by default. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.code.gridstart"; /** * The corresponding short names for the implementations. */ public static String[] GRIDSTART_SHORT_NAMES = { "kickstart", "none", "seqexec" }; /** * The index in the constant arrays for NoGridStart. */ public static final int KICKSTART_INDEX = 0; /** * The index in the constant arrays for NoGridStart. */ public static final int NO_GRIDSTART_INDEX = 1; /** * The index in constant arrays for SeqExec */ public static final int SEQEXEC_INDEX = 2; /** * The postscript mode in which post scripts are added only for essential * jobs. */ public static final String ESSENTIAL_POST_SCRIPT_SCOPE = "essential"; /** * The postscript mode in which post scripts are added only for all * jobs. */ public static final String ALL_POST_SCRIPT_SCOPE = "all"; /** * The known gridstart implementations. */ public static String[] GRIDSTART_IMPLEMENTING_CLASSES = { "Kickstart", "NoGridStart" }; // /** * A table that associates POSTScript implementing classes with their * SHORT_NAMES. */ private static Map POSTSCRIPT_IMPLEMENTING_CLASS_TABLE; /** * Initializes the POSTScript implementation table, associating * short names for the POSTScript with the name of the classes itself. */ static{ POSTSCRIPT_IMPLEMENTING_CLASS_TABLE = new HashMap( 8 ); //not really the best way. should have avoided creating objects //but then too many constants everywhere. associate( new ExitPOST() ); associate( new ExitCode() ); associate( new UserPOSTScript() ); associate( new NoPOSTScript() ); associate( new NetloggerPostScript() ); associate( new PegasusExitCode() ); } /** * Associates a shortname with the classname. * * @param ps the POSTScript implementation. */ private static void associate( POSTScript ps ){ POSTSCRIPT_IMPLEMENTING_CLASS_TABLE.put( ps.shortDescribe(), ps.getClass().getName() ); } /** * Associates a shortname with the classname. * * @param shortName the shortName for the POSTScript implementation * @param className the fully qualified className of the implementing class. */ private static void associate( String shortName, String className ){ POSTSCRIPT_IMPLEMENTING_CLASS_TABLE.put( shortName, className ); } /** * Returns the name of the implementing POSTSCript class. * * @param shortName the shortName for the POSTScript implementation * * @return the className the fully qualified className of the implementing class, * else null. */ private static String implementingPOSTScriptClass( String shortName ){ Object obj = POSTSCRIPT_IMPLEMENTING_CLASS_TABLE.get( shortName ); return ( obj == null ) ? null : ( String )obj; } /** * The postscript mode. Whether to add postscripts for the jobs or not. * At present just two modes supported * all add postscripts for jobs where kickstart is present. * none do not add postscripts to anyjob */ // private String mPostScriptScope; /** * A table that maps short names of POSTScript implementations * with the implementations themselves. */ private Map mPOSTScriptImplementationTable; /** * A table that maps short names of GridStart implementations * with the implementations themselves. */ private Map mGridStartImplementationTable ; /** * The bag of objects used for initialization. */ private PegasusBag mBag; /** * The properties object holding all the properties. */ private PegasusProperties mProps; /** * The submit directory where the submit files are being generated for * the workflow. */ private String mSubmitDir; /** * The workflow object. */ private ADag mDAG; /** * A boolean indicating that the factory has been initialized. */ private boolean mInitialized; /** * The default constructor. */ public GridStartFactory() { mGridStartImplementationTable = new HashMap( 3 ); mPOSTScriptImplementationTable = new HashMap( 3 ); mInitialized = false; } /** * Initializes the factory with known GridStart implementations. * * @param bag the bag of objects that is used for initialization. * @param dag the concrete dag so far. */ public void initialize( PegasusBag bag, ADag dag ){ mBag = bag; mProps = bag.getPegasusProperties(); mSubmitDir = bag.getPlannerOptions().getSubmitDirectory() ; mDAG = dag; // mPostScriptScope = mProps.getPOSTScriptScope(); //load all the known implementations and initialize them for( int i = 0; i < GRIDSTART_IMPLEMENTING_CLASSES.length; i++){ //load via reflection just once registerGridStart( GRIDSTART_SHORT_NAMES[i], this.loadGridStart( bag, dag, GRIDSTART_IMPLEMENTING_CLASSES[i] ) ); } mInitialized = true; } /** * Loads the appropriate gridstart implementation for a job on the basis of * the value of the GRIDSTART_KEY in the Pegasus namepsace. If no value is * specified then the value in the properties file is picked up. * * @param job the job for which we want the gridstart handle. * @param gridStartPath the path to the gridstart from the site catalog. * * @return a handle to appropriate GridStart implementation. * * @see org.griphyn.cPlanner.namespace.Pegasus#GRIDSTART_KEY * @see org.griphyn.cPlanner.common.PegasusProperties#getGridStart() * * @throws GridStartFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public GridStart loadGridStart( Job job, String gridStartPath ) throws GridStartFactoryException { //sanity checks first if( !mInitialized ){ throw new GridStartFactoryException( "GridStartFactory needs to be initialized first before using" ); } GridStart gs = null; if ( /*gridStartPath == null ||*/ job.isMPIJob()){ //return NoGridStart implementation gs = (GridStart)this.gridStart( GRIDSTART_SHORT_NAMES[ NO_GRIDSTART_INDEX ] ); } else{ //determine the short name of GridStart implementation //on the basis of any profile associated or from the properties file String shortName = ( job.vdsNS.containsKey( Pegasus.GRIDSTART_KEY) ) ? //pick the one associated in profiles ( String ) job.vdsNS.get( Pegasus.GRIDSTART_KEY ): //pick the one in the properties file mProps.getGridStart(); //try loading on the basis of short name from the cache Object obj = this.gridStart( shortName ); if( obj == null ){ //load via reflection and register in the cache obj = this.loadGridStart( mBag, mDAG, shortName ); this.registerGridStart( shortName, (GridStart)obj ); } gs = (GridStart) obj; } return gs; } /** * Loads the appropriate POST Script implementation for a job on the basis of * the value of the Pegasus profile GRIDSTART_KEY, and the DAGMan profile * POST_SCRIPT_KEY in the Pegasus namepsace. If no value is * specified then the value in the properties file is picked up. * * @param job the job for which we want the gridstart handle. * @param gridStart the GridStart for which we want to load * the POSTSCRIPT implementation. * * @return a handle to appropriate POSTScript implementation. * * @see org.griphyn.cPlanner.namespace.Pegasus#GRIDSTART_KEY * @see org.griphyn.cPlanner.namespace.Dagman#POST_SCRIPT_KEY * @see org.griphyn.cPlanner.common.PegasusProperties#getGridStart() * * @throws GridStartFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public POSTScript loadPOSTScript( Job job, GridStart gridStart ) throws GridStartFactoryException { //sanity checks first if( !mInitialized ){ throw new GridStartFactoryException( "GridStartFactory needs to be initialized first before using" ); } if ( gridStart == null ){ throw new GridStartFactoryException( "POSTScript can only be instantiated if supplied a GridStart implementation" ); } //figure out the postscript type. the scope takes precedence String postScriptType; String postScriptScope = (String) job.dagmanVariables.get( Dagman.POST_SCRIPT_SCOPE_KEY ); postScriptScope = ( postScriptScope == null )? GridStartFactory.ALL_POST_SCRIPT_SCOPE: postScriptScope; if ( postScriptScope.equals( GridStartFactory.ALL_POST_SCRIPT_SCOPE ) || ( postScriptScope.equals( GridStartFactory.ESSENTIAL_POST_SCRIPT_SCOPE ) && job.getJobType() != Job.REPLICA_REG_JOB) ) { //we need to apply some postscript //let us figure out the type of postscript to instantiate Object profileValue = job.dagmanVariables.get( Dagman.POST_SCRIPT_KEY ); postScriptType = ( profileValue == null )? //get the default associated with gridstart gridStart.defaultPOSTScript(): //use the one specified in profiles/properties ( String ) profileValue; } else{ //mode is none , make sure to remove post key and the arguments postScriptType = NoPOSTScript.SHORT_NAME; } //try loading on the basis of postscript type from the cache Object obj = this.postScript( postScriptType ); POSTScript ps = null; if( obj == null ){ //determine the className for postScriptType String className = GridStartFactory.implementingPOSTScriptClass( postScriptType ); if( className == null ){ //so this is a user specified postscript className = GridStartFactory.implementingPOSTScriptClass( UserPOSTScript.SHORT_NAME ); } //load via reflection and register in the cache obj = this.loadPOSTScript( mProps, mSubmitDir, //mProps.getPOSTScriptPath( postScriptType ), job.dagmanVariables.getPOSTScriptPath( postScriptType ), className ); this.registerPOSTScript( postScriptType, (POSTScript)obj ); } ps = ( POSTScript ) obj; return ps; } /** * Loads the implementing class corresponding to the class. If the package * name is not specified with the class, then class is assumed to be * in the DEFAULT_PACKAGE. The properties object passed should not be null. * * @param bag the bag of initialization objects * @param dag the concrete dag so far. * @param className the name of the class that implements the mode. It is the * name of the class, not the complete name with package. That * is added by itself. * * @return the instance of the class implementing this interface. * * @throws GridStartFactoryException that nests any error that * might occur during the instantiation of the implementation. * * @see #DEFAULT_PACKAGE_NAME */ private GridStart loadGridStart( PegasusBag bag, ADag dag, String className ) throws GridStartFactoryException { //prepend the package name className = (className.indexOf('.') == -1)? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className: //load directly className; //try loading the class dynamically GridStart gs = null; try{ DynamicLoader dl = new DynamicLoader( className); gs = (GridStart) dl.instantiate( new Object[0] ); gs.initialize( bag, dag); } catch (Exception e) { throw new GridStartFactoryException("Instantiating GridStart ", className, e); } return gs; } /** * Loads the implementing class corresponding to the class. If the package * name is not specified with the class, then class is assumed to be * in the DEFAULT_PACKAGE. The properties object passed should not be null. * * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param submitDir the submit directory where the submit file for the job * has to be generated. * @param path the path to the postscript on the submit host. * @param className the name of the class that implements the mode. It is the * name of the class, not the complete name with package. That * is added by itself. * * @return the instance of the class implementing this interface. * * @throws GridStartFactoryException that nests any error that * might occur during the instantiation of the implementation. * * @see #DEFAULT_PACKAGE_NAME */ private POSTScript loadPOSTScript( PegasusProperties properties, String submitDir, String path, String className ) throws GridStartFactoryException { //prepend the package name className = (className.indexOf('.') == -1)? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className: //load directly className; //try loading the class dynamically POSTScript ps = null; try{ DynamicLoader dl = new DynamicLoader( className); ps = ( POSTScript ) dl.instantiate( new Object[0] ); ps.initialize( properties, path, submitDir ); } catch (Exception e) { throw new GridStartFactoryException("Instantiating GridStart ", className, e); } return ps; } /** * Returns the cached implementation of POSTScript * from the implementing class table. * * @param type the short name for a POSTScript implementation * * @return implementation the object class implementing that style, else null */ private POSTScript postScript( String type ){ Object obj = mPOSTScriptImplementationTable.get( type.toLowerCase() ); return ( obj == null ) ? null : (POSTScript)obj ; } /** * Inserts an entry into the implementing class table. The name is * converted to lower case before being stored. * * @param name the short name for a POSTScript implementation * @param implementation the object of the class implementing that style. */ private void registerPOSTScript( String name, POSTScript implementation){ mPOSTScriptImplementationTable.put( name.toLowerCase(), implementation ); } /** * Returns the cached implementation of GridStart from the implementing * class table. * * @param name the short name for a GridStart implementation * * @return implementation the object of the class implementing that style, else null */ private GridStart gridStart( String name ){ Object obj = mGridStartImplementationTable.get( name.toLowerCase() ); return ( obj == null ) ? null : (GridStart)obj ; } /** * Inserts an entry into the implementing class table. The name is * converted to lower case before being stored. * * @param name the short name for a GridStart implementation * @param implementation the object of the class implementing that style. */ private void registerGridStart( String name, GridStart implementation){ mGridStartImplementationTable.put( name.toLowerCase(), implementation ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/CodeGeneratorException.java0000644000175000017500000000435711757531137027460 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code; /** * The baseclass of the exception that is thrown by all Code Generators. * It is a checked exception. * * @author Karan Vahi * @version $Revision: 2090 $ */ public class CodeGeneratorException extends Exception { /** * Constructs a CodeGeneratorException with no detail * message. */ public CodeGeneratorException() { super(); } /** * Constructs a CodeGeneratorException with the specified detailed * message. * * @param message is the detailled message. */ public CodeGeneratorException(String message) { super(message); } /** * Constructs a CodeGeneratorException with the specified detailed * message and a cause. * * @param message is the detailled message. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CodeGeneratorException(String message, Throwable cause) { super(message, cause); } /** * Constructs a CodeGeneratorException with the * specified just a cause. * * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CodeGeneratorException(Throwable cause) { super(cause); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/CodeGenerator.java0000644000175000017500000000711211757531137025571 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.Collection; import java.io.File; /** * The interface that allows us to plug in various code generators for writing * out the concrete plan. Each of Code Generators are dependant upon the * underlying workflow executors being used. A Code Generator implementation * generates the concrete plan in the input format of the underlying Workflow * Executor. * * The appropriate format can be condor submit files, or some XML description. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2590 $ */ public interface CodeGenerator { /** * The version number associated with this API of Code Generator. */ public static final String VERSION = "1.5"; /** * Initializes the Code Generator implementation. * * @param bag the bag of initialization objects. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void initialize( PegasusBag bag ) throws CodeGeneratorException; /** * Generates the code for the concrete workflow in the input format of the * workflow executor being used. * * @param dag the concrete workflow. * * @return the Collection of File objects for the files written * out. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public Collection generateCode( ADag dag ) throws CodeGeneratorException; /** * Generates the code for a single job in the input format of the workflow * executor being used. * * @param dag the dag of which the job is a part of. * @param job the Job object holding the information about * that particular job. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void generateCode( ADag dag, Job job ) throws CodeGeneratorException; /** * Starts monitoring of the workflow by invoking a workflow monitor daemon. * The monitoring should start only after the output files have been generated. * FIXME: It should actually happen after the workflow has been submitted. * Eventually should be a separate monitor interface, and submit writers * should be loaded by an AbstractFactory. * * @return boolean indicating whether could successfully start the monitor * daemon or not. */ public boolean startMonitoring(); /** * Resets the Code Generator implementation. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void reset( )throws CodeGeneratorException; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/POSTScript.java0000644000175000017500000000517511757531137025031 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.common.PegasusProperties; /** * The interface that defines the creation of a POSTSCRIPT for a job. * The POSTSCRIPT created is dependant on the GridStart being used to enable * the job. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2590 $ */ public interface POSTScript { /** * The version number associated with this API of GridStart. */ public static final String VERSION = "1.0"; /** * Initialize the POSTScript implementation. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param path the path to the POSTScript on the submit host. * @param submitDir the submit directory where the submit file for the job * has to be generated. */ public void initialize( PegasusProperties properties, String path, String submitDir ); /** * Constructs the postscript that has to be invoked on the submit host * after the job has executed on the remote end. The postscript usually * works on the output generated by the executable that is used to grid * enable a job, and has been piped back by Condor. *

* The postscript should be constructed and populated as a profile * in the DAGMAN namespace. * * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param key the DAGMan profile key that has to be inserted. * * @return boolean true if postscript was generated,else false. */ public boolean construct( Job job, String key ) ; /** * Returns a short textual description of the implementing class. * * @return short textual description. */ public String shortDescribe(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/0000755000175000017500000000000011757531667024217 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/PegasusExitCode.java0000644000175000017500000000421611757531137030111 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import java.io.File; import edu.isi.pegasus.planner.common.PegasusProperties; /** * The exitcode wrapper, that can parse kickstart output's and put them in the * database also. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4507 $ */ public class PegasusExitCode extends VDSPOSTScript { /** * The SHORTNAME for this implementation. */ public static final String SHORT_NAME = "pegasus-exitcode"; /** * The default constructor. */ public PegasusExitCode(){ super(); } /** * Returns a short textual description of the implementing class. * * @return short textual description. */ public String shortDescribe(){ return PegasusExitCode.SHORT_NAME; } /** * Returns the path to exitcode that is to be used on the kickstart * output. * * @return the path to the exitcode script to be invoked. */ public String getDefaultExitCodePath(){ StringBuffer sb = new StringBuffer(); sb.append( mProps.getBinDir() ); sb.append( File.separator ).append( "pegasus-exitcode" ); return sb.toString(); } /** * Returns an empty string, as python version of exitcode cannot parse properties * file. * * @param properties the properties object * * @return the properties list, else empty string. */ protected String getPostScriptProperties( PegasusProperties properties ){ return ""; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/ExitPOST.java0000644000175000017500000000331311757531137026471 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import java.io.File; /** * A wrapper around the Exitcode, that takes care of backing up output and * error files. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4507 $ */ public class ExitPOST extends VDSPOSTScript { /** * The SHORTNAME for this implementation. */ public static final String SHORT_NAME = "exitpost"; /** * The default constructor. */ public ExitPOST(){ super(); } /** * Returns a short textual description of the implementing class. * * @return short textual description. */ public String shortDescribe(){ return this.SHORT_NAME; } /** * Returns the path to exitcode that is to be used on the kickstart * output. * * @return the path to the exitcode script to be invoked. */ public String getDefaultExitCodePath(){ StringBuffer sb = new StringBuffer(); sb.append(mProps.getBinDir()); sb.append(File.separator).append("exitpost"); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/VDSPOSTScript.java0000644000175000017500000002143311757531137027404 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.code.POSTScript; /** * An abstract implementation of the interface, that is a superclass for * all the VDS supplied postscripts. These postscripts work can parse * kickstart records. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2590 $ */ public abstract class VDSPOSTScript implements POSTScript { /** * The suffix for the exitcode output file, that is generated at the * submit host. */ public static final String EXITCODE_OUTPUT_SUFFIX = "exit.log"; /** * The LogManager object which is used to log all the messages. */ protected LogManager mLogger; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The path to the exitcode client that parses the exit status of * the kickstart. The client is run as a postscript. It also * includes the option to the command since at present it is same for all. * It is $PEGASUS_HOME/bin/exitcode (no -n)! */ protected String mExitParserPath; /** * A boolean indicating whether to turn the debug on for the postscript or * not. */ protected boolean mPostScriptDebug; /** * The properties that need to be passed to the postscript invocation * on the command line in the java format. */ protected String mPostScriptProperties; /** * The submit directory where the submit files are being generated for * the workflow. */ protected String mSubmitDir; /** * Returns the path to exitcode that is to be used on the kickstart * output. * * @return the path to the exitcode script to be invoked. */ protected abstract String getDefaultExitCodePath(); /** * The default constructor. */ public VDSPOSTScript(){ //mLogger = LogManager.getInstance(); } /** * Initialize the POSTScript implementation. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param path the path to the POSTScript on the submit host. * @param submitDir the submit directory where the submit file for the job * has to be generated. */ public void initialize( PegasusProperties properties, String path, String submitDir ){ mProps = properties; mSubmitDir = submitDir; mLogger = LogManagerFactory.loadSingletonInstance( properties ); //construct the exitcode paths and arguments mExitParserPath = (path == null ) ? getDefaultExitCodePath() : path; mPostScriptDebug = mProps.setPostSCRIPTDebugON(); mPostScriptProperties = getPostScriptProperties( properties ); } /** * Constructs the postscript that has to be invoked on the submit host * after the job has executed on the remote end. The postscript usually * works on the xml output generated by kickstart. The postscript invoked * is exitcode that is shipped with VDS, and can usually be found at * $PEGASUS_HOME/bin/exitcode. *

* The postscript is constructed and populated as a profile * in the DAGMAN namespace. * * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param key the key for the profile that has to be inserted. * * @return boolean true if postscript was generated,else false. */ public boolean construct( Job job, String key ) { String postscript = mExitParserPath; // //NO NEED TO REMOVE AS WE ARE HANDLING CORRECTLY IN DAGMAN NAMESPACE // //NOW. THERE THE ARGUMENTS AND KEY ARE COMBINED. Karan May 11,2006 // //arguments are already taken // //care of in the profile incorporation // postscript += " " + // (String)job.dagmanVariables.removeKey( // Dagman.POST_SCRIPT_ARGUMENTS_KEY); //check if the initialdir condor key has been set //for the job or not // This is no longer required, as to support // submit host execution , all kickstart outputs // are propogated back to the submit directory // Karan Aug 22, 2005 // if(job.condorVariables.containsKey("initialdir") && // !job.executionPool.equalsIgnoreCase("local")){ // String dir = (String)job.condorVariables.get("initialdir"); // //means that the kickstart output is being // //generated in the initialdir instead of the directory // //from where the dag is submitted // sb.append(dir).append(File.separator); // } // //append the name of kickstart output // sb.append(job.jobName).append(".out"); //pick up whatever output has been set and set it //as a corresponding DAGMAN profile. Bug Fix for VDS Bug 144 // postscript += " " + (String)job.condorVariables.get("output"); job.dagmanVariables.construct( Dagman.OUTPUT_KEY, (String)job.condorVariables.get("output")); StringBuffer extraOptions = new StringBuffer(); if( mPostScriptDebug ){ //add in the debug options appendProperty( extraOptions, "pegasus.log.default", getPostScriptLogFile( job ) ); appendProperty( extraOptions, "pegasus.verbose", "5" ); } //put in the postscript properties if any extraOptions.append( this.mPostScriptProperties ); //put the extra options into the exitcode arguments //in the correct order. Object args = job.dagmanVariables.get( Dagman.POST_SCRIPT_ARGUMENTS_KEY ); StringBuffer arguments = (args == null ) ? //only have extra options extraOptions : //have extra options in addition to existing args new StringBuffer().append( extraOptions ) .append( " " ).append( args ); job.dagmanVariables.construct( Dagman.POST_SCRIPT_ARGUMENTS_KEY, arguments.toString() ); //put in the postscript mLogger.log("Postscript constructed is " + postscript, LogManager.DEBUG_MESSAGE_LEVEL); job.dagmanVariables.checkKeyInNS( key, postscript ); return true; } /** * Returns the path to the postscript log file for a job. * * @param job the Job containing job description */ protected String getPostScriptLogFile( Job job ){ StringBuffer sb = new StringBuffer(); sb.append( job.getName() ).append( "." ).append( this.EXITCODE_OUTPUT_SUFFIX ); return sb.toString(); } /** * Returns the properties that need to be passed to the the postscript * invocation in the java format. It is of the form * "-Dprop1=value1 -Dprop2=value2 .." * * @param properties the properties object * * @return the properties list, else empty string. */ protected String getPostScriptProperties( PegasusProperties properties ){ StringBuffer sb = new StringBuffer(); appendProperty( sb, "pegasus.user.properties", properties.getPropertiesInSubmitDirectory( )); return sb.toString(); } /** * Appends a property to the StringBuffer, in the java command line format. * * @param sb the StringBuffer to append the property to. * @param key the property. * @param value the property value. */ protected void appendProperty( StringBuffer sb, String key, String value ){ sb.append( " ").append("-D").append( key ).append( "=" ).append( value ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/NoPOSTScript.java0000644000175000017500000000740011757531137027322 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.code.POSTScript; import java.io.File; /** * This class refers to having no postscript associated with the job. * In addition, it removes from the job postscript specific arguments, * and other profiles. * * The postscript is only constructed if the job already contains the * Dagman profile key passed. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 2590 $ */ public class NoPOSTScript implements POSTScript { /** * The SHORTNAME for this implementation. */ public static final String SHORT_NAME = "none"; /** * The LogManager object which is used to log all the messages. */ protected LogManager mLogger; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The default constructor. */ public NoPOSTScript(){ //mLogger = LogManager.getInstance(); } /** * Initialize the POSTScript implementation. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param path the path to the POSTScript on the submit host. * @param submitDir the submit directory where the submit file for the job * has to be generated. */ public void initialize( PegasusProperties properties, String path, String submitDir ){ mProps = properties; mLogger = LogManagerFactory.loadSingletonInstance( properties ); } /** * Returns a short textual description of the implementing class. * * @return short textual description. */ public String shortDescribe(){ return this.SHORT_NAME; } /** * Constructs the postscript that has to be invoked on the submit host * after the job has executed on the remote end. The postscript works on the * stdout of the remote job, that has been transferred back to the submit * host by Condor. *

* The postscript is constructed and populated as a profile * in the DAGMAN namespace. * * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param key the DAGMan profile key that has to be inserted. * * @return false as postscript is never created for the job. */ public boolean construct(Job job, String key) { //mode is none , make sure to remove post key and the arguments //Karan Nov 15,2005 VDS BUG FIX 128 //Always remove POST_SCRIPT_ARGUMENTS job.dagmanVariables.removeKey( key ); job.dagmanVariables.removeKey( Dagman.POST_SCRIPT_ARGUMENTS_KEY ); return false; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/NetloggerPostScript.java0000644000175000017500000002004711757531137031036 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.code.POSTScript; import java.io.File; /** * This postscript invokes the netlogger-exitcode to parse the kickstart * output and write out in netlogger format. * * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4507 $ */ public class NetloggerPostScript implements POSTScript { /** * The SHORTNAME for this implementation. */ public static final String SHORT_NAME = "Netlogger"; /** * The property to be set for postscript to pick up workflow id */ public static final String WORKFLOW_ID_PROPERTY = "pegasus.gridstart.workflow.id" ; /** * The LOG4j system configuration property. */ private static String LOG4J_CONF_PROPERTY = "log4j.configuration"; /** * The LogManager object which is used to log all the messages. */ protected LogManager mLogger; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The path to the user postscript on the submit host. * */ protected String mPOSTScriptPath; /** * The path to the properties file created in submit directory. */ private String mPostScriptProperties; /** * the workflow id used. */ private String mWorkflowID; /** * The log4j system property */ private String mLog4jConf; /** * The default constructor. */ public NetloggerPostScript(){ //mLogger = LogManager.getInstance(); mLog4jConf = System.getProperty( NetloggerPostScript.LOG4J_CONF_PROPERTY ); } /** * Initialize the POSTScript implementation. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param path the path to the POSTScript on the submit host. * @param submitDir the submit directory where the submit file for the job * has to be generated. * * @throws RuntimeException in case of path being null. */ public void initialize( PegasusProperties properties, String path, String submitDir ){ mProps = properties; mPOSTScriptPath = path == null ? this.getNetloggerExitCodePath() :path ; mLogger = LogManagerFactory.loadSingletonInstance( properties ); mPostScriptProperties = getPostScriptProperties( properties ); mWorkflowID = properties.getProperty( this.WORKFLOW_ID_PROPERTY ); } /** * Constructs the postscript that has to be invoked on the submit host * after the job has executed on the remote end. The postscript works on the * stdout of the remote job, that has been transferred back to the submit * host by Condor. *

* The postscript is constructed and populated as a profile * in the DAGMAN namespace. * * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param key the DAGMan profile key that has to be inserted. * * @return boolean true if postscript was generated,else false. */ public boolean construct(Job job, String key) { boolean constructed = false; //see if any specific postscript was specified for this job //get the value user specified for the job String postscript = mPOSTScriptPath; job.dagmanVariables.construct( Dagman.OUTPUT_KEY, (String) job.condorVariables.get( "output" ) ); ///// StringBuffer extraOptions = new StringBuffer(); //put in the postscript properties if any extraOptions.append( this.mPostScriptProperties ); //add the log4j conf option if specified if( mLog4jConf != null ){ extraOptions.append( " -D" ).append( NetloggerPostScript.LOG4J_CONF_PROPERTY ). append( "=" ).append( mLog4jConf ); } //add the -j and -w options extraOptions.append( " -j " ).append( job.getID() ). append( " -w " ).append( mWorkflowID ).append( " -f "); //put the extra options into the exitcode arguments //in the correct order. Object args = job.dagmanVariables.get( Dagman.POST_SCRIPT_ARGUMENTS_KEY ); StringBuffer arguments = (args == null ) ? //only have extra options extraOptions : //have extra options in addition to existing args new StringBuffer().append( extraOptions ) .append( " " ).append( args ); job.dagmanVariables.construct( Dagman.POST_SCRIPT_ARGUMENTS_KEY, arguments.toString() ); ////// constructed = true; //put in the postscript mLogger.log("Postscript constructed is " + postscript, LogManager.DEBUG_MESSAGE_LEVEL); job.dagmanVariables.checkKeyInNS( key, postscript ); // else{ // //Karan Nov 15,2005 VDS BUG FIX 128 // //Always remove POST_SCRIPT_ARGUMENTS // job.dagmanVariables.removeKey(Dagman.POST_SCRIPT_ARGUMENTS_KEY); // } return constructed; } /** * Returns a short textual description of the implementing class. * * @return short textual description. */ public String shortDescribe(){ return this.SHORT_NAME; } /** * Returns the path to exitcode that is to be used on the kickstart * output. * * @return the path to the exitcode script to be invoked. */ public String getNetloggerExitCodePath(){ StringBuffer sb = new StringBuffer(); sb.append(mProps.getBinDir()); sb.append(File.separator).append("netlogger-exitcode"); return sb.toString(); } /** * Returns the properties that need to be passed to the the postscript * invocation in the java format. It is of the form * "-Dprop1=value1 -Dprop2=value2 .." * * @param properties the properties object * * @return the properties list, else empty string. */ protected String getPostScriptProperties( PegasusProperties properties ){ StringBuffer sb = new StringBuffer(); appendProperty( sb, "pegasus.user.properties", properties.getPropertiesInSubmitDirectory( )); return sb.toString(); } /** * Appends a property to the StringBuffer, in the java command line format. * * @param sb the StringBuffer to append the property to. * @param key the property. * @param value the property value. */ protected void appendProperty( StringBuffer sb, String key, String value ){ sb.append( " ").append("-D").append( key ).append( "=" ).append( value ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/Kickstart.java0000644000175000017500000013676211757531137027030 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.code.GridStart; import edu.isi.pegasus.planner.code.generator.condor.CondorQuoteParser; import edu.isi.pegasus.planner.code.generator.condor.CondorQuoteParserException; import edu.isi.pegasus.planner.transfer.SLS; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.cluster.JobAggregator; import java.util.Iterator; import java.util.StringTokenizer; import java.util.Set; import java.util.List; import java.util.ArrayList; import java.io.File; import java.io.FileWriter; import java.io.IOException; /** * This enables a constituentJob to be run on the grid, by launching it through kickstart. * The kickstart executable is a light-weight program which connects the * stdin, stdout and stderr filehandles for Pegasus jobs on the remote * site. *

* Sitting in between the remote scheduler and the executable, it is * possible for kickstart to gather additional information about the * executable run-time behavior, including the exit status of jobs. *

* Kickstart is an executable distributed with Pegasus that can generally be found * at $PEGASUS_HOME/bin/kickstart * * @author Karan Vahi vahi@isi.edu * @version $Revision: 5152 $ */ public class Kickstart implements GridStart { /** * The transformation namespace for the kickstart */ public static final String TRANSFORMATION_NAMESPACE = "pegasus"; /** * The logical name of kickstart */ public static final String TRANSFORMATION_NAME = "kickstart"; /** * The version number for kickstart. */ public static final String TRANSFORMATION_VERSION = null; /** * The basename of the kickstart executable. */ public static final String EXECUTABLE_BASENAME = "pegasus-kickstart"; /** * The complete TC name for kickstart. */ public static final String COMPLETE_TRANSFORMATION_NAME = Separator.combine( TRANSFORMATION_NAMESPACE, TRANSFORMATION_NAME, TRANSFORMATION_VERSION ); /** * The suffix for the kickstart input file, that is generated to use * invoke at the remote end. */ public static final String KICKSTART_INPUT_SUFFIX = "arg"; /** * The basename of the class that is implmenting this. Could have * been determined by reflection. */ public static final String CLASSNAME = "Kickstart"; /** * The SHORTNAME for this implementation. */ public static final String SHORT_NAME = "kickstart"; /** * The environment variable used to the set Kickstart SETUP JOB. */ public static final String KICKSTART_SETUP = "GRIDSTART_SETUP"; /** * The environment variable used to the set Kickstart PREJOB. */ public static final String KICKSTART_PREJOB = "GRIDSTART_PREJOB"; /** * The environment variable used to the set Kickstart POSTJOB. */ public static final String KICKSTART_POSTJOB = "GRIDSTART_POSTJOB"; /** * The environment variable used to the set Kickstart CLEANUP JOB. */ public static final String KICKSTART_CLEANUP = "GRIDSTART_CLEANUP"; /** * The LogManager object which is used to log all the messages. */ private LogManager mLogger; /** * The object holding all the properties pertaining to Pegasus. */ private PegasusProperties mProps; /** * The options passed to the planner. */ private PlannerOptions mPOptions; /** * The handle to the workflow that is being enabled. */ private ADag mConcDAG; /** * Handle to the site catalog store. */ private SiteStore mSiteStore; //private PoolInfoProvider mSiteHandle; /** * Handle to Transformation Catalog. */ private TransformationCatalog mTCHandle; /** * The submit exectionSiteDirectory where the submit files are being generated for * the workflow. */ private String mSubmitDir; /** * A boolean indicating whether to use invoke always or not. */ private boolean mInvokeAlways; /** * A boolean indicating whether to stat files or not. */ private boolean mDoStat; /** * A boolean indicating whether to generate lof files or not. */ private boolean mGenerateLOF; /** * The invoke limit trigger. */ private long mInvokeLength; /** * A boolean indicating whether to have worker node execution or not. */ boolean mWorkerNodeExecution; /** * The handle to the SLS implementor */ private SLS mSLS; /** * An instance variable to track if enabling is happening as part of a clustered constituentJob. * See Bug 21 comments on Pegasus Bugzilla */ private boolean mEnablingPartOfAggregatedJob; /** * A boolean indicating whether kickstart is deployed dynamically or not. */ private boolean mDynamicDeployment; /** * The label that is passed to kickstart. */ private String mKickstartLabel; /** * Whether kickstart should set the X Bit on the staged executables. */ private boolean mSetXBit; /** * Handle to NoGridStart implementation. */ private GridStart mNoGridStartImpl; /** * Boolean indicating whether to use full path or not */ private boolean mUseFullPathToGridStart; /** * Boolean indicating whether to disable invoke functionality. */ private boolean mDisableInvokeFunctionality; /** * Initializes the GridStart implementation. * * @param bag the bag of objects that is used for initialization. * @param dag the concrete dag so far. */ public void initialize( PegasusBag bag, ADag dag ){ mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mLogger = bag.getLogger(); mSubmitDir = mPOptions.getSubmitDirectory(); mKickstartLabel = ( dag == null ) ? null : ( mPOptions.getBasenamePrefix() == null )? dag.getLabel(): mPOptions.getBasenamePrefix() ; mInvokeAlways = mProps.useInvokeInGridStart(); mInvokeLength = mProps.getGridStartInvokeLength(); mDoStat = mProps.doStatWithKickstart(); mGenerateLOF = mProps.generateLOFFiles(); mConcDAG = dag; mSiteStore = bag.getHandleToSiteStore(); mTCHandle = bag.getHandleToTransformationCatalog(); mDynamicDeployment = mProps.transferWorkerPackage(); mWorkerNodeExecution = mProps.executeOnWorkerNode(); mEnablingPartOfAggregatedJob = false; mSetXBit = mProps.setXBitWithKickstart(); mNoGridStartImpl = new NoGridStart(); mNoGridStartImpl.initialize( bag, dag ); mUseFullPathToGridStart = true; mDisableInvokeFunctionality = mProps.disableInvokeInGridStart(); } /** * Setter method to control whether a full path to Gridstart should be * returned while wrapping a job or not. * * @param fullPath if set to true, indicates that full path would be used. */ public void useFullPathToGridStarts( boolean fullPath ){ this.mUseFullPathToGridStart = fullPath; } /** * Enables a constituentJob to run on the grid. This also determines how the * stdin,stderr and stdout of the constituentJob are to be propogated. * To grid enable a constituentJob, the constituentJob may need to be wrapped into another * constituentJob, that actually launches the constituentJob. It usually results in the constituentJob * description passed being modified modified. * * @param constituentJob the Job object containing the constituentJob description * of the constituentJob that has to be enabled on the grid. * @param isGlobusJob is true, if the constituentJob generated a * line universe = globus, and thus runs remotely. * Set to false, if the constituentJob runs on the submit * host in any way. * * @return boolean true if enabling was successful,else false. */ public boolean enable( AggregatedJob job,boolean isGlobusJob){ boolean first = true; //get hold of the JobAggregator determined for this clustered job //during clustering JobAggregator aggregator = job.getJobAggregator(); if( aggregator == null ){ throw new RuntimeException( "Clustered job not associated with a job aggregator " + job.getID() ); } boolean partOfClusteredJob = true; //we want to evaluate the exectionSiteDirectory only once //for the clustered job for (Iterator it = job.constituentJobsIterator(); it.hasNext(); ) { Job constituentJob = (Job)it.next(); //earlier was set in SeqExec JobAggregator in the enable function constituentJob.vdsNS.construct( Pegasus.GRIDSTART_KEY, this.getVDSKeyValue() ); if(first){ first = false; } else{ //we need to pass -H to kickstart //to suppress the header creation constituentJob.vdsNS.construct(Pegasus.GRIDSTART_ARGUMENTS_KEY,"-H"); } //no worker node case //always pass isGlobus true as always //interested only in executable strargs //due to the fact that seqexec does not allow for setting environment //per constitutent constituentJob, we cannot set the postscript removal option this.enable( constituentJob, isGlobusJob, mDoStat, false, partOfClusteredJob ); } //all the constitutent jobs are enabled. //get the job aggregator to render the job //to it's executable form aggregator.makeAbstractAggregatedJobConcrete( job ); //the aggregated job itself needs to be enabled via NoGridStart mNoGridStartImpl.enable( (Job)job, isGlobusJob); return true; } /** * Enables a constituentJob to run on the grid by launching it through kickstart. * Does the stdio, and stderr handling of the constituentJob to be run on the grid. * It modifies the constituentJob description, and also constructs all the valid * option to be passed to kickstart for launching the executable. * * @param constituentJob the Job object containing the constituentJob description * of the constituentJob that has to be enabled on the grid. * @param isGlobusJob is true, if the constituentJob generated a * line universe = globus, and thus runs remotely. * Set to false, if the constituentJob runs on the submit * host in any way. * * @return boolean true if enabling was successful,else false in case when * the path to kickstart could not be determined on the site where * the constituentJob is scheduled. */ public boolean enable( Job job, boolean isGlobusJob ){ return this.enable( job, isGlobusJob, mDoStat , true, false ); } /** * Enables a constituentJob to run on the grid by launching it through kickstart. * Does the stdio, and stderr handling of the constituentJob to be run on the grid. * It modifies the constituentJob description, and also constructs all the valid * option to be passed to kickstart for launching the executable. * * @param constituentJob the Job object containing the constituentJob description * of the constituentJob that has to be enabled on the grid. * @param isGlobusJob is true, if the constituentJob generated a * line universe = globus, and thus runs remotely. * Set to false, if the constituentJob runs on the submit * host in any way. * @param stat boolean indicating whether to generate the lof files * for kickstart stat option or not. * @param addPostScript boolean indicating whether to add a postscript or not. * @param partOfClusteredJob boolean indicating whether the job being enabled * is part of a clustered job or not. * * @return boolean true if enabling was successful,else false in case when * the path to kickstart could not be determined on the site where * the constituentJob is scheduled. */ protected boolean enable( Job job, boolean isGlobusJob, boolean stat, boolean addPostScript , boolean partOfClusteredJob) { //take care of relative submit exectionSiteDirectory if specified. String submitDir = mSubmitDir + mSeparator; // String submitDir = getSubmitDirectory( mSubmitDir , constituentJob) + mSeparator; //To get the gridstart/kickstart path on the remote //pool, querying with entry for vanilla universe. //In the new format the gridstart is associated with the //pool not pool, condor universe SiteCatalogEntry site = mSiteStore.lookup( job.getSiteHandle() ); //the executable path and arguments are put //in the Condor namespace and not printed to the //file so that they can be overriden if desired //later through profiles and key transfer_executable String gridStartPath = handleTransferOfExecutable( job, getKickstartPath( site ) ); //sanity check if (gridStartPath == null){ return false; } StringBuffer gridStartArgs = new StringBuffer(); // the executable is gridstart, the application becomes its argument gridStartArgs.append(' '); gridStartArgs.append("-n "); gridStartArgs.append(job.getCompleteTCName()); gridStartArgs.append(' '); //for derivation we now pass the logical id in the DAX //for the job JIRA PM-329 gridStartArgs.append("-N ").append( job.getDAXID() ).append( " " ); // handle stdin if (job.stdIn.length() > 0) { //for using the transfer script and other vds executables the //input file is transferred from the submit host by Condor to //stdin. We fool the kickstart to pick up the input file from //standard stdin by giving the input file name as - if (job.logicalName.equals( edu.isi.pegasus.planner.transfer.implementation.Transfer.TRANSFORMATION_NAME) ||job.logicalName.equals(edu.isi.pegasus.planner.cluster.aggregator.SeqExec. COLLAPSE_LOGICAL_NAME) ||job.logicalName.equals(edu.isi.pegasus.planner.cluster.aggregator.MPIExec. COLLAPSE_LOGICAL_NAME) ||job.logicalName.equals(edu.isi.pegasus.planner.refiner.cleanup.Cleanup.TRANSFORMATION_NAME ) ) { //condor needs to pick up the constituentJob stdin and //transfer it to the remote end construct( job, "input" , submitDir + job.getStdIn() ); gridStartArgs.append("-i ").append("-").append(' '); } else { //kickstart provides the app's *tracked* stdin gridStartArgs.append("-i ").append(job.stdIn).append(' '); } } // handle stdout if (job.stdOut.length() > 0) { // gridstart saves the app's *tracked* stdout gridStartArgs.append("-o ").append(job.stdOut).append(' '); } // the Condor output variable and kickstart -o option // must not point to the same file for any local constituentJob. if (job.stdOut.equals(job.jobName + ".out") && !isGlobusJob) { mLogger.log("Detected WAW conflict for stdout",LogManager.WARNING_MESSAGE_LEVEL); } // the output of gridstart is propagated back to the submit host construct(job,"output",submitDir + job.jobName + ".out"); if (isGlobusJob) { construct(job,"transfer_output","true"); } // handle stderr if (job.stdErr.length() > 0) { // gridstart saves the app's *tracked* stderr gridStartArgs.append("-e ").append(job.stdErr).append(' '); } // the Condor error variable and kickstart -e option // must not point to the same file for any local constituentJob. if (job.stdErr.equals(job.jobName + ".err") && !isGlobusJob) { mLogger.log("Detected WAW conflict for stderr",LogManager.WARNING_MESSAGE_LEVEL); } // the error from gridstart is propagated back to the submit host construct(job,"error",submitDir + job.jobName + ".err"); if (isGlobusJob) { construct(job,"transfer_error","true"); } //we need to pass the resource handle //to kickstart as argument gridStartArgs.append("-R ").append(job.executionPool).append(' '); // Added for JIRA PM-543 String directory = this.getDirectory( job ); //handle the -W option that asks kickstart to create and change //exectionSiteDirectory before launching an executable. if(job.vdsNS.getBooleanValue(Pegasus.CREATE_AND_CHANGE_DIR_KEY ) ){ // Commented to take account of submitting to condor pool // directly or glide in nodes. However, does not work for // standard universe jobs. Also made change in Kickstart // to pick up only remote_initialdir Karan Nov 15,2005 // Removed for JIRA PM-543 // String directory = null; // String key = getDirectoryKey( job ); // //we remove the key JIRA PM-80 // directory = (String)job.condorVariables.removeKey( key ); //pass the exectionSiteDirectory as an argument to kickstart gridStartArgs.append(" -W ").append(directory).append(' '); } else if(job.vdsNS.getBooleanValue(Pegasus.CHANGE_DIR_KEY) ){ //handle the -w option that asks kickstart to change //exectionSiteDirectory before launching an executable. // Removed for JIRA PM-543 // String directory = null; // String key = getDirectoryKey( job );\ // //we remove the key JIRA PM-80 // directory = (String)job.condorVariables.removeKey( key ); //pass the exectionSiteDirectory as an argument to kickstart gridStartArgs.append(" -w ").append( directory ).append(' '); } else{ //set the directory key with the job if( requiresToSetDirectory( job ) ){ job.setDirectory( directory ); } } if( job.vdsNS.getBooleanValue(Pegasus.TRANSFER_PROXY_KEY) ){ // Removed for JIRA PM-543 // // String key = getDirectoryKey( job ); // //just remove the remote_initialdir key // //the constituentJob needs to be run in the exectionSiteDirectory // //Condor or GRAM decides to run // job.condorVariables.removeKey( key ); job.setDirectory( null ); } //check if the constituentJob type indicates staging of executable //The -X functionality is handled by the setup jobs that //are added as childern to the stage in jobs, unless they are //disabled and users set a property to set the xbit //Karan November 22, 2005 if( mSetXBit && job.userExecutablesStagedForJob() ){ //add the -X flag to denote turning on gridStartArgs.append( " -X " ); } //add the stat options to kickstart only for certain jobs for time being //and if the input variable is true if ( stat ){ if (job.getJobType() == Job.COMPUTE_JOB || // job.getJobType() == Job.STAGED_COMPUTE_JOB || job.getJobType() == Job.CLEANUP_JOB || job.getJobType() == Job.STAGE_IN_JOB || job.getJobType() == Job.INTER_POOL_JOB) { String lof; List files = new ArrayList(2); //inefficient check here again. just a prototype //we need to generate -S option only for non transfer jobs //generate the list of filenames file for the input and output files. if (! (job instanceof TransferJob)) { lof = generateListofFilenamesFile(job.getInputFiles(), job.getID() + ".in.lof"); if (lof != null) { File file = new File(lof); job.condorVariables.addIPFileForTransfer(lof); //arguments just need basename . no path component gridStartArgs.append("-S @").append(file.getName()). append(" "); files.add(file.getName()); } } //for cleanup jobs no generation of stats for output files if (job.getJobType() != Job.CLEANUP_JOB) { lof = generateListofFilenamesFile(job.getOutputFiles(), job.getID() + ".out.lof"); if (lof != null) { File file = new File(lof); job.condorVariables.addIPFileForTransfer(lof); //arguments just need basename . no path component gridStartArgs.append("-s @").append(file.getName()).append(" "); files.add(file.getName()); } } //add kickstart postscript that removes these files if( addPostScript ) {addCleanupPostScript(job, files); } } }//end of if ( stat ) else if( mGenerateLOF ){ //dostat is false. so no generation of stat option //but generate lof files nevertheless //inefficient check here again. just a prototype //we need to generate -S option only for non transfer jobs //generate the list of filenames file for the input and output files. if (! (job instanceof TransferJob)) { generateListofFilenamesFile( job.getInputFiles(), job.getID() + ".in.lof"); } //for cleanup jobs no generation of stats for output files if (job.getJobType() != Job.CLEANUP_JOB) { generateListofFilenamesFile(job.getOutputFiles(), job.getID() + ".out.lof"); } }///end of mGenerateLOF //append any arguments that need to be passed //kickstart directly, set elsewhere if(job.vdsNS.containsKey(Pegasus.GRIDSTART_ARGUMENTS_KEY)){ gridStartArgs.append(job.vdsNS.get(Pegasus.GRIDSTART_ARGUMENTS_KEY)) .append(' '); } if(mProps.generateKickstartExtraOptions() && mConcDAG != null){ gridStartArgs.append("-L ").append( mKickstartLabel ).append(" "); gridStartArgs.append("-T ").append(mConcDAG.getMTime()).append(" "); } /* mLogger.log( "User executables staged for job " + job.getID() + " " + job.userExecutablesStagedForJob() , LogManager.DEBUG_MESSAGE_LEVEL ); */ //figure out job executable String jobExecutable = ( !this.mUseFullPathToGridStart && job.userExecutablesStagedForJob() )? //the basename of the executable used for pegasus lite //and staging of executables "." + File.separator + job.getStagedExecutableBaseName( ): //use whatever is set in the executable field job.executable; long argumentLength = gridStartArgs.length() + jobExecutable.length() + 1 + job.strargs.length(); //invoke is disabled if part of clustered job or because of a global disable //JIRA PM-526 boolean disableInvoke = mDisableInvokeFunctionality || partOfClusteredJob; if( !disableInvoke && (mInvokeAlways || argumentLength > mInvokeLength) ){ if(!useInvoke(job, jobExecutable, gridStartArgs)){ mLogger.log("Unable to use invoke for job ", LogManager.ERROR_MESSAGE_LEVEL); return false; } } else{ gridStartArgs.append( jobExecutable ); gridStartArgs.append(' ').append(job.strargs); } //the executable path and arguments are put //in the Condor namespace and not printed to the //file so that they can be overriden if desired //later through profiles and key transfer_executable // the arguments are no longer set as condor profiles // they are now set to the corresponding profiles in // the Condor Code Generator only. /* construct(job, "executable", gridStartPath ); construct(job, "arguments", gridStartArgs.toString()); */ job.setArguments( gridStartArgs.toString() ); job.setRemoteExecutable( gridStartPath ); //all finished successfully return true; } /** * It changes the paths to the executable depending on whether we want to * transfer the executable or not. * * If the transfer_executable is set to true, then the executable needs to be * shipped from the submit host meaning the local pool. This function changes * the path of the executable to the one on the local pool, so that it can * be shipped. * * If the worker package is being deployed dynamically, then the path is set * to the exectionSiteDirectory where the worker package is deployed. * * Else, we pick up the path from the site catalog that is passed as input * * @param constituentJob the Job containing the constituentJob description. * @param path the path to kickstart on the remote compute site, as determined * from the site catalog. * * @return the path that needs to be set as the executable */ protected String handleTransferOfExecutable( Job job, String path ) { Condor cvar = job.condorVariables; if ( cvar.getBooleanValue("transfer_executable")) { SiteCatalogEntry site = mSiteStore.lookup( "local" ); TransformationCatalogEntry entry = this.getTransformationCatalogEntry( site.getSiteHandle() ); String gridStartPath = ( entry == null )? //rely on the path determined from sc getKickstartPath( site ): //the tc entry has highest priority entry.getPhysicalTransformation(); if (gridStartPath == null) { mLogger.log( "Gridstart needs to be shipped from the submit host to pool" + job.getSiteHandle() + ".\nNo entry for it in pool local", LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( "GridStart needs to be shipped from submit host to site " + job.getSiteHandle() + " for job " + job.getName()); } return gridStartPath; } else if( mDynamicDeployment && job.runInWorkDirectory() && !mWorkerNodeExecution ){ //worker package deployment //pick up the path from the transformation catalog of //dynamic deployment //in case of pegasus lite mode, we dont look up here. TransformationCatalogEntry entry = this.getTransformationCatalogEntry( job.getSiteHandle() ); if( entry == null ){ //NOW THROWN AN EXCEPTION //should throw a TC specific exception StringBuffer error = new StringBuffer(); error.append("Could not find entry in tc for lfn "). append( COMPLETE_TRANSFORMATION_NAME ). append(" at site ").append( job.getSiteHandle() ); if ( job.getSiteHandle().equalsIgnoreCase( "local" ) ){ //for local site in case of worker package staging also //we can pick up the path on submit host, if not staged //PM-497 SiteCatalogEntry site = mSiteStore.lookup( "local" ); String p = this.getKickstartPath( site ); if( p != null ){ return p; } } mLogger.log( error.toString(), LogManager.ERROR_MESSAGE_LEVEL); throw new RuntimeException( error.toString() ); } return entry.getPhysicalTransformation(); } else{ //the vanilla case where kickstart is pre installed. TransformationCatalogEntry entry = this.getTransformationCatalogEntry( job.getSiteHandle() ); String ksPath = ( entry == null )? //rely on the path determined from profiles (String)job.vdsNS.get( Pegasus.GRIDSTART_PATH_KEY ): //the tc entry has highest priority entry.getPhysicalTransformation(); //we use full paths for pegasus auxillary jobs //even when pegasus lite is used i.e mUseFullPathToGridStart is set to true boolean useFullPath = mUseFullPathToGridStart || job.getJobType() != Job.COMPUTE_JOB ; if( useFullPath ){ ksPath = ( ksPath == null )? //rely on the path from the site catalog path: ksPath; } else{ //pegasus lite case. we dont want to rely on site catalog //constructed path ksPath = ( ksPath == null )? this.EXECUTABLE_BASENAME ://use the basename ksPath; } //sanity check if( ksPath == null ){ throw new RuntimeException( "Unable to determine path to kickstart for site " + job.getSiteHandle() + " for job " + job.getID() ); } return ksPath; } } /** * Returns the transformation catalog entry for kickstart on a site * * @param site the site on which the entry is required * * @return the entry if found else null */ public TransformationCatalogEntry getTransformationCatalogEntry( String site ){ List entries = null; try { entries = mTCHandle.lookup( Kickstart.TRANSFORMATION_NAMESPACE, Kickstart.TRANSFORMATION_NAME, Kickstart.TRANSFORMATION_VERSION, site, TCType.INSTALLED ); } catch (Exception e) { //non sensical catching mLogger.log("Unable to retrieve entries from TC " + e.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } return ( entries == null ) ? null : (TransformationCatalogEntry) entries.get(0); } /** * Returns the default path to kickstart as constructed from the * environment variable associated with a site in the site catalog * * @param site the SiteCatalogEntry object for the site. * * @return value if set else null */ public String getKickstartPath( SiteCatalogEntry site ) { //try to construct the default path on basis of //PEGASUS_HOME environment variable. String home = site.getPegasusHome(); if( home == null ){ return null; } StringBuffer ks = new StringBuffer(); ks.append( home ).append( File.separator ). append( "bin").append( File.separator ). append( Kickstart.EXECUTABLE_BASENAME ); return ks.toString(); } /** * Returns the exectionSiteDirectory in which the constituentJob executes on the worker node. * * * @param constituentJob * * @return the full path to the exectionSiteDirectory where the constituentJob executes */ public String getWorkerNodeDirectory( Job job ){ //check for Pegasus Profile if( job.vdsNS.containsKey( Pegasus.WORKER_NODE_DIRECTORY_KEY ) ){ return job.vdsNS.getStringValue( Pegasus.WORKER_NODE_DIRECTORY_KEY ); } if( mSLS.doesCondorModifications() ){ //indicates the worker node exectionSiteDirectory is the exectionSiteDirectory //in which condor launches the job // JIRA PM-380 return "."; } StringBuffer workerNodeDir = new StringBuffer(); String destDir = mSiteStore.getEnvironmentVariable( job.getSiteHandle() , "wntmp" ); destDir = ( destDir == null ) ? "/tmp" : destDir; String relativeDir = mPOptions.getRelativeDirectory(); workerNodeDir.append( destDir ).append( File.separator ). append( relativeDir.replaceAll( "/" , "-" ) ). //append( File.separator ).append( constituentJob.getCompleteTCName().replaceAll( ":[:]*", "-") ); append( "-" ).append( job.getID() ); return workerNodeDir.toString(); } /** * Indicates whether the enabling mechanism can set the X bit * on the executable on the remote grid site, in addition to launching * it on the remote grid site. * * @return true indicating Kickstart can set the X bit or not. */ public boolean canSetXBit(){ return true; } /** * Returns the value of the vds profile with key as Pegasus.GRIDSTART_KEY, * that would result in the loading of this particular implementation. * It is usually the name of the implementing class without the * package name. * * @return the value of the profile key. * @see org.griphyn.cPlanner.namespace.Pegasus#GRIDSTART_KEY */ public String getVDSKeyValue(){ return Kickstart.CLASSNAME; } /** * Returns a short textual description in the form of the name of the class. * * @return short textual description. */ public String shortDescribe(){ return Kickstart.SHORT_NAME; } /** * Returns the SHORT_NAME for the POSTScript implementation that is used * to be as default with this GridStart implementation. * * @return the identifier for the PegasusExitCode POSTScript implementation. * * */ public String defaultPOSTScript(){ return PegasusExitCode.SHORT_NAME; } /** * Returns a boolean indicating whether we need to set the directory for * the job or not. * * @param job the job for which to set directory. * * @return */ protected boolean requiresToSetDirectory( Job job ) { //the cleanup jobs should never have directory set as full path //is specified return ( job.getJobType() != Job.CLEANUP_JOB && job.getJobType() != Job.REPLICA_REG_JOB ); } /** * Returns the directory in which the job should run. * * @param job the job in which the directory has to run. * * @return */ protected String getDirectory( Job job ){ String execSiteWorkDir = mSiteStore.getInternalWorkDirectory(job); String workdir = (String) job.globusRSL.removeKey("directory"); // returns old value workdir = (workdir == null)?execSiteWorkDir:workdir; return workdir; } /** * Returns the exectionSiteDirectory that is associated with the constituentJob to specify * the exectionSiteDirectory in which the constituentJob needs to run * * @param constituentJob the constituentJob * * @return the condor key . can be initialdir or remote_initialdir */ private String getDirectoryKey(Job job) { /*String exectionSiteDirectory = (style.equalsIgnoreCase(Pegasus.GLOBUS_STYLE) || style.equalsIgnoreCase(Pegasus.GLIDEIN_STYLE) || style.equalsIgnoreCase(Pegasus.GLITE_STYLE))? (String)constituentJob.condorVariables.removeKey("remote_initialdir"): (String)constituentJob.condorVariables.removeKey("initialdir"); */ String universe = (String) job.condorVariables.get( Condor.UNIVERSE_KEY ); return ( universe.equals( Condor.STANDARD_UNIVERSE ) || universe.equals( Condor.LOCAL_UNIVERSE) || universe.equals( Condor.SCHEDULER_UNIVERSE ) )? "initialdir" : "remote_initialdir"; } /** * Triggers the creation of the kickstart input file, that contains the * the remote executable and the arguments with which it has to be invoked. * The kickstart input file is created in the submit directory. * * @param constituentJob the Job object containing the constituentJob description. * @param executable the path to the executable used. * @param args the arguments buffer for gridstart invocation so far. * * @return boolean indicating whether kickstart input file was generated or not. * false in case of any error. */ private boolean useInvoke(Job job, String executable, StringBuffer args){ boolean result = true; String inputBaseName = job.jobName + "." + Kickstart.KICKSTART_INPUT_SUFFIX; //writing the stdin file File argFile = new File(mSubmitDir, inputBaseName); try { FileWriter input; input = new FileWriter( argFile ); //the first thing that goes in is the executable name input.write( executable ); input.write("\n"); //write out all the arguments //one on each line StringTokenizer st = new StringTokenizer(job.strargs); while(st.hasMoreTokens()){ input.write(st.nextToken()); input.write("\n"); } //close the stream input.close(); } catch (Exception e) { mLogger.log("Unable to write the kickstart input file for job " + job.getCompleteTCName() + " " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); return false; } //check if a directory is associated with the job String directory = job.getDirectory(); if( directory != null ){ //for PM-526 //we want to trigger the -w option if a directory is associated with //the jobs args.append( " -w " ).append( directory ); job.setDirectory( null ); } job.condorVariables.addIPFileForTransfer( argFile.getAbsolutePath() ); //add the -I argument to kickstart args.append(" -I ").append(inputBaseName).append(" "); return result; } /** * Constructs a kickstart setup constituentJob * * @param constituentJob the constituentJob to be run. * @param workerNodeTmp the worker node tmp to run the constituentJob in. * * @return String */ protected String constructSetupJob( Job job, String workerNodeTmp ){ StringBuffer setup = new StringBuffer(); setup.append( "/bin/mkdir -p " ).append( workerNodeTmp ); return setup.toString(); } /** * Constructs a kickstart setup constituentJob * * @param constituentJob the constituentJob to be run. * @param workerNodeTmp the worker node tmp to run the constituentJob in. * * @return String */ protected String constructCleanupJob( Job job, String workerNodeTmp ){ StringBuffer setup = new StringBuffer(); setup.append( "/bin/rm -rf " ).append( workerNodeTmp ); return setup.toString(); } /** * Constructs the prejob that fetches sls file, and then invokes transfer * again. * * @param constituentJob the constituentJob for which the prejob is being created * @param headNodeURLPrefix String * @param headNodeDirectory String * @param workerNodeDirectory String * @param slsFile String * * @return String containing the prescript invocation */ protected String constructPREJob( Job job, String headNodeURLPrefix, String headNodeDirectory, String workerNodeDirectory, String slsFile ){ File headNodeSLS = new File( headNodeDirectory, slsFile ); return mSLS.invocationString( job, headNodeSLS ); //first we need to get the sls file to worker node /* preJob.append( "/bin/echo -e \" " ). append( headNodeURLPrefix ).append( File.separator ). append( headNodeDirectory ).append( File.separator ). append( slsFile ).append( " \\n " ). append( "file://" ).append( workerNodeDirectory ).append( File.separator ). append( slsFile ).append( "\"" ). append( " | " ).append( transfer ).append( " base mnt " ); preJob.append( " && " ); //now we need to get transfer to execute this sls file preJob.append( transfer ).append( " base mnt < " ).append( slsFile ); */ } /** * Writes out the list of filenames file for the constituentJob. * * @param files the list of PegasusFile objects contains the files * whose stat information is required. * * @param basename the basename of the file that is to be created * * @return the full path to lof file created, else null if no file is written out. */ public String generateListofFilenamesFile( Set files, String basename ){ //sanity check if ( files == null || files.isEmpty() ){ return null; } String result = null; //writing the stdin file try { File f = new File( mSubmitDir, basename ); FileWriter input; input = new FileWriter( f ); PegasusFile pf; for( Iterator it = files.iterator(); it.hasNext(); ){ pf = ( PegasusFile ) it.next(); input.write( pf.getLFN() ); input.write( "\n" ); } //close the stream input.close(); result = f.getAbsolutePath(); } catch ( IOException e) { mLogger.log("Unable to write the lof file " + basename, e , LogManager.ERROR_MESSAGE_LEVEL); } return result; } /** * Constructs a condor variable in the condor profile namespace * associated with the constituentJob. Overrides any preexisting key values. * * @param constituentJob contains the constituentJob description. * @param key the key of the profile. * @param value the associated value. */ private void construct(Job job, String key, String value){ job.condorVariables.construct(key,value); } /** * Condor Quotes a string * * @param string the string to be quoted. * * @return quoted string. */ private String quote( String string ){ String result; try{ mLogger.log("Unquoted Prejob is " + string, LogManager.DEBUG_MESSAGE_LEVEL); result = CondorQuoteParser.quote( string, false ); mLogger.log("Quoted Prejob is " + result, LogManager.DEBUG_MESSAGE_LEVEL ); } catch (CondorQuoteParserException e) { throw new RuntimeException("CondorQuoting Problem " + e.getMessage()); } return result; } /** * Adds a /bin/rm post constituentJob to kickstart that removes the files passed. * The post jobs is added as an environment variable. * * @param constituentJob the constituentJob in which the post constituentJob needs to be added. * @param files the files to be deleted. */ private void addCleanupPostScript( Job job, List files ){ //sanity check if ( files == null || !mDoStat || files.isEmpty() ) { return; } //do not add if constituentJob already has a postscript specified if( job.envVariables.containsKey( this.KICKSTART_CLEANUP ) ){ mLogger.log( "Not adding lof cleanup as another kickstart cleanup already exists", LogManager.DEBUG_MESSAGE_LEVEL ); return; } StringBuffer ps = new StringBuffer(); //maybe later we might want to pick it up from the TC ps.append( "/bin/rm -rf").append( " " ); for( Iterator it = files.iterator(); it.hasNext(); ){ ps.append( it.next() ).append( " " ); } job.envVariables.construct( this.KICKSTART_CLEANUP, ps.toString() ); return; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/PegasusLite.java0000644000175000017500000011660111757531137027304 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.DefaultStreamGobblerCallback; import edu.isi.pegasus.common.util.StreamGobbler; import edu.isi.pegasus.common.util.StreamGobblerCallback; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.code.GridStart; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.transfer.sls.SLSFactory; import edu.isi.pegasus.planner.transfer.SLS; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.site.classes.FileServer; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.classes.FileTransfer; import edu.isi.pegasus.planner.classes.NameValue; import java.io.BufferedReader; import java.io.File; import java.io.FileOutputStream; import java.io.BufferedWriter; import java.io.FileReader; import java.io.IOException; import java.io.FileWriter; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.PrintWriter; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; /** * This class launches all the jobs using Pegasus Lite a shell script based wrapper. * * The Pegasus Lite shell script for the compute jobs contains the commands to * *

 * 1) create directory on worker node
 * 2) fetch input data files
 * 3) execute the job
 * 4) transfer the output data files
 * 5) cleanup the directory
 * 
* * * The following property should be set to false to disable the staging of the * SLS files via the first level staging jobs * *
 * pegasus.transfer.stage.sls.file     false
 * 
* * To enable this implementation at runtime set the following property *
 * pegasus.gridstart PegasusLite
 * 
* * * @author Karan Vahi * @version $Revision: 4781 $ */ public class PegasusLite implements GridStart { private PegasusBag mBag; private ADag mDAG; /** * The basename of the class that is implmenting this. Could have * been determined by reflection. */ public static final String CLASSNAME = "PegasusLite"; /** * The SHORTNAME for this implementation. */ public static final String SHORT_NAME = "pegasus-lite"; /** * The basename of the pegasus lite common shell functions file. */ public static final String PEGASUS_LITE_COMMON_FILE_BASENAME = "pegasus-lite-common.sh"; /** * The logical name of the transformation that creates directories on the * remote execution pools. */ public static final String XBIT_TRANSFORMATION = "chmod"; /** * The basename of the pegasus dirmanager executable. */ public static final String XBIT_EXECUTABLE_BASENAME = "chmod"; /** * The transformation namespace for the setXBit jobs. */ public static final String XBIT_TRANSFORMATION_NS = "system"; /** * The version number for the derivations for setXBit jobs. */ public static final String XBIT_TRANSFORMATION_VERSION = null; /** * The derivation namespace for the setXBit jobs. */ public static final String XBIT_DERIVATION_NS = "system"; /** * The version number for the derivations for setXBit jobs. */ public static final String XBIT_DERIVATION_VERSION = null; /** * Stores the major version of the planner. */ private String mMajorVersionLevel; /** * Stores the major version of the planner. */ private String mMinorVersionLevel; /** * Stores the major version of the planner. */ private String mPatchVersionLevel; /** * The LogManager object which is used to log all the messages. */ protected LogManager mLogger; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The submit directory where the submit files are being generated for * the workflow. */ protected String mSubmitDir; /** * The argument string containing the arguments with which the exitcode * is invoked on kickstart output. */ // protected String mExitParserArguments; /** * A boolean indicating whether to generate lof files or not. */ protected boolean mGenerateLOF; /** * A boolean indicating whether to have worker node execution or not. */ protected boolean mWorkerNodeExecution; /** * The handle to the SLS implementor */ protected SLS mSLS; /** * The options passed to the planner. */ protected PlannerOptions mPOptions; /** * Handle to the site catalog store. */ //protected PoolInfoProvider mSiteHandle; protected SiteStore mSiteStore; /** * An instance variable to track if enabling is happening as part of a clustered job. * See Bug 21 comments on Pegasus Bugzilla */ protected boolean mEnablingPartOfAggregatedJob; /** * Handle to kickstart GridStart implementation. */ private Kickstart mKickstartGridStartImpl; /** * Handle to Transformation Catalog. */ private TransformationCatalog mTCHandle; /** * Boolean to track whether to stage sls file or not */ protected boolean mStageSLSFile; /** * The local path on the submit host to pegasus-lite-common.sh */ protected String mLocalPathToPegasusLiteCommon; /** * Boolean indicating whether worker package transfer is enabled or not */ protected boolean mTransferWorkerPackage; /** A map indexed by execution site and the corresponding worker package *location in the submit directory */ Map mWorkerPackageMap ; /** * A map indexed by the execution site and value is the path to chmod on * that site. */ private Map mChmodOnExecutionSiteMap; /** * Initializes the GridStart implementation. * * @param bag the bag of objects that is used for initialization. * @param dag the concrete dag so far. */ public void initialize( PegasusBag bag, ADag dag ){ mBag = bag; mDAG = dag; mLogger = bag.getLogger(); mSiteStore = bag.getHandleToSiteStore(); mPOptions = bag.getPlannerOptions(); mSubmitDir = mPOptions.getSubmitDirectory(); mProps = bag.getPegasusProperties(); mGenerateLOF = mProps.generateLOFFiles(); mTCHandle = bag.getHandleToTransformationCatalog(); mTransferWorkerPackage = mProps.transferWorkerPackage(); if( mTransferWorkerPackage ){ mWorkerPackageMap = bag.getWorkerPackageMap(); if( mWorkerPackageMap == null ){ mWorkerPackageMap = new HashMap(); } } mChmodOnExecutionSiteMap = new HashMap(); Version version = Version.instance(); mMajorVersionLevel = Integer.toString( Version.MAJOR ); mMinorVersionLevel = Integer.toString( Version.MINOR ); mPatchVersionLevel = Integer.toString( Version.PLEVEL ); if( version.toString().endsWith( "cvs" ) ){ mPatchVersionLevel += "cvs"; } mWorkerNodeExecution = mProps.executeOnWorkerNode(); if( mWorkerNodeExecution ){ //load SLS mSLS = SLSFactory.loadInstance( bag ); } else{ //sanity check throw new RuntimeException( "PegasusLite only works if worker node execution is set. Please set " + PegasusProperties.PEGASUS_WORKER_NODE_EXECUTION_PROPERTY + " to true ."); } //pegasus lite needs to disable invoke functionality mProps.setProperty( PegasusProperties.DISABLE_INVOKE_PROPERTY, "true" ); mEnablingPartOfAggregatedJob = false; mKickstartGridStartImpl = new Kickstart(); mKickstartGridStartImpl.initialize( bag, dag ); //for pegasus lite we dont want ot use the full path, unless //a user has specifically catalogued in the transformation catalog mKickstartGridStartImpl.useFullPathToGridStarts( false ); //for pegasus-lite work, worker node execution is no //longer handled in kickstart/no kickstart cases //mKickstartGridStartImpl.mWorkerNodeExecution = false; mStageSLSFile = mProps.stageSLSFilesViaFirstLevelStaging(); mLocalPathToPegasusLiteCommon = getSubmitHostPathToPegasusLiteCommon( ); } /** * Enables a job to run on the grid. This also determines how the * stdin,stderr and stdout of the job are to be propogated. * To grid enable a job, the job may need to be wrapped into another * job, that actually launches the job. It usually results in the job * description passed being modified modified. * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param isGlobusJob is true, if the job generated a * line universe = globus, and thus runs remotely. * Set to false, if the job runs on the submit * host in any way. * * @return boolean true if enabling was successful,else false. */ public boolean enable( AggregatedJob job,boolean isGlobusJob){ //in pegasus lite mode we dont want kickstart to change or create //worker node directories for( Iterator it = job.constituentJobsIterator(); it.hasNext() ; ){ Job j = (Job) it.next(); j.vdsNS.construct( Pegasus.CHANGE_DIR_KEY , "false" ); j.vdsNS.construct( Pegasus.CREATE_AND_CHANGE_DIR_KEY, "false" ); } //for time being we treat clustered jobs same as normal jobs //in pegasus-lite return this.enable( (Job)job, isGlobusJob ); /* boolean result = true; if( mWorkerNodeExecution ){ File jobWrapper = wrapJobWithPegasusLite( job, isGlobusJob ); //the .sh file is set as the executable for the job //in addition to setting transfer_executable as true job.setRemoteExecutable( jobWrapper.getAbsolutePath() ); job.condorVariables.construct( "transfer_executable", "true" ); } return result; */ } /** * Enables a job to run on the grid by launching it directly. It ends * up running the executable directly without going through any intermediate * launcher executable. It connects the stdio, and stderr to underlying * condor mechanisms so that they are transported back to the submit host. * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param isGlobusJob is true, if the job generated a * line universe = globus, and thus runs remotely. * Set to false, if the job runs on the submit * host in any way. * * @return boolean true if enabling was successful,else false in case when * the path to kickstart could not be determined on the site where * the job is scheduled. */ public boolean enable(Job job, boolean isGlobusJob) { //take care of relative submit directory if specified String submitDir = mSubmitDir + mSeparator; //NOT CLEAR HOW Pegasus-Lite will handle stdout and stdin // handle stdin if (job.stdIn.length() > 0) { construct(job,"input",submitDir + job.stdIn); if (isGlobusJob) { //this needs to be true as you want the stdin //to be transfered to the remote execution //pool, as in case of the transfer script. //it needs to be set if the stdin is already //prepopulated at the remote side which //it is not. construct(job,"transfer_input","true"); } } if (job.stdOut.length() > 0) { //handle stdout construct(job,"output",job.stdOut); if (isGlobusJob) { construct(job,"transfer_output","false"); } } else { // transfer output back to submit host, if unused construct(job,"output",submitDir + job.jobName + ".out"); if (isGlobusJob) { construct(job,"transfer_output","true"); } } if (job.stdErr.length() > 0) { //handle stderr construct(job,"error",job.stdErr); if (isGlobusJob) { construct(job,"transfer_error","false"); } } else { // transfer error back to submit host, if unused construct(job,"error",submitDir + job.jobName + ".err"); if (isGlobusJob) { construct(job,"transfer_error","true"); } } //consider case for non worker node execution first if( !mWorkerNodeExecution ){ //shared filesystem case. //for now a single job is launched via kickstart only //no point launching it via seqexec and then kickstart return mKickstartGridStartImpl.enable( job, isGlobusJob ); }//end of handling of non worker node execution else{ //handle stuff differently enableForWorkerNodeExecution( job, isGlobusJob ); }//end of worker node execution if( mGenerateLOF ){ //but generate lof files nevertheless //inefficient check here again. just a prototype //we need to generate -S option only for non transfer jobs //generate the list of filenames file for the input and output files. if (! (job instanceof TransferJob)) { generateListofFilenamesFile( job.getInputFiles(), job.getID() + ".in.lof"); } //for cleanup jobs no generation of stats for output files if (job.getJobType() != Job.CLEANUP_JOB) { generateListofFilenamesFile(job.getOutputFiles(), job.getID() + ".out.lof"); } }///end of mGenerateLOF return true; } /** * Enables jobs for worker node execution. * * * * @param job the job to be enabled. * @param isGlobusJob is true, if the job generated a * line universe = globus, and thus runs remotely. * Set to false, if the job runs on the submit * host in any way. */ private void enableForWorkerNodeExecution(Job job, boolean isGlobusJob ) { if( job.getJobType() == Job.COMPUTE_JOB ){ //in pegasus lite mode we dont want kickstart to change or create //worker node directories job.vdsNS.construct( Pegasus.CHANGE_DIR_KEY , "false" ); job.vdsNS.construct( Pegasus.CREATE_AND_CHANGE_DIR_KEY, "false" ); File jobWrapper = wrapJobWithPegasusLite( job, isGlobusJob ); //the job wrapper requires the common functions file //from the submit host job.condorVariables.addIPFileForTransfer( this.mLocalPathToPegasusLiteCommon ); //figure out transfer of worker package if( mTransferWorkerPackage ){ //sanity check to see if PEGASUS_HOME is defined if( mSiteStore.getEnvironmentVariable( job.getSiteHandle(), "PEGASUS_HOME" ) == null ){ //yes we need to add from the location in the worker package map String location = this.mWorkerPackageMap.get( job.getSiteHandle() ); if( location == null ){ throw new RuntimeException( "Unable to figure out worker package location for job " + job.getID() ); } job.condorVariables.addIPFileForTransfer(location); } else{ mLogger.log( "No worker package staging for job " + job.getSiteHandle() + " PEGASUS_HOME specified in the site catalog for site " + job.getSiteHandle(), LogManager.DEBUG_MESSAGE_LEVEL ); } } //the .sh file is set as the executable for the job //in addition to setting transfer_executable as true job.setRemoteExecutable( jobWrapper.getAbsolutePath() ); job.condorVariables.construct( "transfer_executable", "true" ); } //for all auxillary jobs let kickstart figure what to do else{ mKickstartGridStartImpl.enable( job, isGlobusJob ); } } /** * Indicates whether the enabling mechanism can set the X bit * on the executable on the remote grid site, in addition to launching * it on the remote grid stie * * @return false, as no wrapper executable is being used. */ public boolean canSetXBit(){ return false; } /** * Returns the value of the vds profile with key as Pegasus.GRIDSTART_KEY, * that would result in the loading of this particular implementation. * It is usually the name of the implementing class without the * package name. * * @return the value of the profile key. * @see org.griphyn.cPlanner.namespace.Pegasus#GRIDSTART_KEY */ public String getVDSKeyValue(){ return PegasusLite.CLASSNAME; } /** * Returns a short textual description in the form of the name of the class. * * @return short textual description. */ public String shortDescribe(){ return PegasusLite.SHORT_NAME; } /** * Returns the SHORT_NAME for the POSTScript implementation that is used * to be as default with this GridStart implementation. * * @return the identifier for the default POSTScript implementation for * kickstart gridstart module. * * @see Kickstart#defaultPOSTScript() */ public String defaultPOSTScript(){ return this.mKickstartGridStartImpl.defaultPOSTScript(); } /** * Returns the directory that is associated with the job to specify * the directory in which the job needs to run * * @param job the job * * @return the condor key . can be initialdir or remote_initialdir */ private String getDirectoryKey(Job job) { /* String style = (String)job.vdsNS.get( Pegasus.STYLE_KEY ); //remove the remote or initial dir's for the compute jobs String key = ( style.equalsIgnoreCase( Pegasus.GLOBUS_STYLE ) )? "remote_initialdir" : "initialdir"; */ String universe = (String) job.condorVariables.get( Condor.UNIVERSE_KEY ); return ( universe.equals( Condor.STANDARD_UNIVERSE ) || universe.equals( Condor.LOCAL_UNIVERSE) || universe.equals( Condor.SCHEDULER_UNIVERSE ) )? "initialdir" : "remote_initialdir"; } /** * Returns a boolean indicating whether to remove remote directory * information or not from the job. This is determined on the basis of the * style key that is associated with the job. * * @param job the job in question. * * @return boolean */ private boolean removeDirectoryKey(Job job){ String style = job.vdsNS.containsKey(Pegasus.STYLE_KEY) ? null : (String)job.vdsNS.get(Pegasus.STYLE_KEY); //is being run. Remove remote_initialdir if there //condor style associated with the job //Karan Nov 15,2005 return (style == null)? false: style.equalsIgnoreCase(Pegasus.CONDOR_STYLE); } /** * Constructs a condor variable in the condor profile namespace * associated with the job. Overrides any preexisting key values. * * @param job contains the job description. * @param key the key of the profile. * @param value the associated value. */ private void construct(Job job, String key, String value){ job.condorVariables.construct(key,value); } /** * Writes out the list of filenames file for the job. * * @param files the list of PegasusFile objects contains the files * whose stat information is required. * * @param basename the basename of the file that is to be created * * @return the full path to lof file created, else null if no file is written out. */ public String generateListofFilenamesFile( Set files, String basename ){ //sanity check if ( files == null || files.isEmpty() ){ return null; } String result = null; //writing the stdin file try { File f = new File( mSubmitDir, basename ); FileWriter input; input = new FileWriter( f ); PegasusFile pf; for( Iterator it = files.iterator(); it.hasNext(); ){ pf = ( PegasusFile ) it.next(); input.write( pf.getLFN() ); input.write( "\n" ); } //close the stream input.close(); result = f.getAbsolutePath(); } catch ( IOException e) { mLogger.log("Unable to write the lof file " + basename, e , LogManager.ERROR_MESSAGE_LEVEL); } return result; } /** * Returns the directory in which the job executes on the worker node. * * @param job * * @return the full path to the directory where the job executes */ public String getWorkerNodeDirectory( Job job ){ //for pegasus-lite for time being we rely on //$PWD that is resolved in the directory at runtime return "$PWD"; } /** * Generates a seqexec input file for the job. The function first enables the * job via kickstart module for worker node execution and then retrieves * the commands to put in the input file from the environment variables specified * for kickstart. * * It creates a single input file for the seqexec invocation. * The input file contains commands to * *
     * 1) create directory on worker node
     * 2) fetch input data files
     * 3) execute the job
     * 4) transfer the output data files
     * 5) cleanup the directory
     * 
* * @param job the job to be enabled. * @param isGlobusJob is true, if the job generated a * line universe = globus, and thus runs remotely. * Set to false, if the job runs on the submit * host in any way. * * @return the file handle to the seqexec input file */ protected File wrapJobWithPegasusLite(Job job, boolean isGlobusJob) { File shellWrapper = new File( mSubmitDir, job.getID() + ".sh" ); // Removed for JIRA PM-543 // // //remove the remote or initial dir's for the compute jobs // String key = getDirectoryKey( job ); // // String exectionSiteDirectory = (String)job.condorVariables.removeKey( key ); FileServer stagingSiteFileServer = mSiteStore.lookup( job.getStagingSiteHandle() ).getHeadNodeFS().selectScratchSharedFileServer(); String stagingSiteDirectory = mSiteStore.getExternalWorkDirectory(stagingSiteFileServer, job.getStagingSiteHandle() ); String workerNodeDir = getWorkerNodeDirectory( job ); try{ OutputStream ostream = new FileOutputStream( shellWrapper , true ); PrintWriter writer = new PrintWriter( new BufferedWriter(new OutputStreamWriter(ostream)) ); StringBuffer sb = new StringBuffer( ); sb.append( "#!/bin/bash" ).append( '\n' ); sb.append( "set -e" ).append( '\n' ); sb.append( "pegasus_lite_version_major=\"" ).append( this.mMajorVersionLevel ).append( "\"").append( '\n' ); sb.append( "pegasus_lite_version_minor=\"" ).append( this.mMinorVersionLevel ).append( "\"").append( '\n' ); sb.append( "pegasus_lite_version_patch=\"" ).append( this.mPatchVersionLevel ).append( "\"").append( '\n' ); sb.append( '\n' ); sb.append( ". " ).append( PegasusLite.PEGASUS_LITE_COMMON_FILE_BASENAME ).append( '\n' ); sb.append( '\n' ); sb.append( "pegasus_lite_init\n" ); sb.append( '\n' ); sb.append( "# cleanup in case of failures" ).append( '\n' ); sb.append( "trap pegasus_lite_exit INT TERM EXIT" ).append( '\n' ); sb.append( '\n' ); sb.append( "# work dir" ).append( '\n' ); if( mSLS.doesCondorModifications() ){ //when using condor IO with pegasus lite we dont want //pegasus lite to change the directory where condor //launches the jobs sb.append( "export pegasus_lite_work_dir=$PWD" ).append( '\n' ); } sb.append( "pegasus_lite_setup_work_dir" ).append( '\n' ); sb.append( '\n' ); sb.append( "# figure out the worker package to use" ).append( '\n' ); sb.append( "pegasus_lite_worker_package" ).append( '\n' ); sb.append( '\n' ); if( mSLS.needsSLSInputTransfers( job ) ){ //generate the sls file with the mappings in the submit exectionSiteDirectory Collection files = mSLS.determineSLSInputTransfers( job, mSLS.getSLSInputLFN( job ), mSubmitDir, stagingSiteDirectory, workerNodeDir ); sb.append( "# stage in " ).append( '\n' ); sb.append( mSLS.invocationString( job, null ) ); sb.append( " 1>&2" ).append( " << EOF" ).append( '\n' ); sb.append( convertToTransferInputFormat( files ) ); sb.append( "EOF" ).append( '\n' ); sb.append( '\n' ); //associate any credentials if required with the job associateCredentials( job, files ); } if( job.userExecutablesStagedForJob() ){ sb.append( "# set the xbit for any executables staged" ).append( '\n' ); sb.append( getPathToChmodExecutable( job.getSiteHandle() ) ); sb.append( " +x " ); for( Iterator it = job.getInputFiles().iterator(); it.hasNext(); ){ PegasusFile pf = ( PegasusFile )it.next(); if( pf.getType() == PegasusFile.EXECUTABLE_FILE ){ sb.append( pf.getLFN() ).append( " " ); } } sb.append( '\n' ); sb.append( '\n' ); } sb.append( "# execute the tasks" ).append( '\n' ); writer.print( sb.toString() ); writer.flush(); sb = new StringBuffer(); //enable the job via kickstart //separate calls for aggregated and normal jobs if( job instanceof AggregatedJob ){ this.mKickstartGridStartImpl.enable( (AggregatedJob)job, isGlobusJob ); //for clustered jobs we embed the contents of the input //file in the shell wrapper itself sb.append( job.getRemoteExecutable() ).append( " " ).append( job.getArguments() ); sb.append( " << EOF" ).append( '\n' ); sb.append( slurpInFile( mSubmitDir, job.getStdIn() ) ); sb.append( "EOF" ).append( '\n' ); //rest the jobs stdin job.setStdIn( "" ); job.condorVariables.removeKey( "input" ); } else{ this.mKickstartGridStartImpl.enable( job, isGlobusJob ); sb.append( job.getRemoteExecutable() ).append( job.getArguments() ).append( '\n' ); } sb.append( '\n' ); //the pegasus lite wrapped job itself does not have any //arguments passed job.setArguments( "" ); if( mSLS.needsSLSOutputTransfers( job ) ){ //construct the postjob that transfers the output files //back to head node exectionSiteDirectory //to fix later. right now post constituentJob only created is pre constituentJob //created Collection files = mSLS.determineSLSOutputTransfers( job, mSLS.getSLSOutputLFN( job ), mSubmitDir, stagingSiteDirectory, workerNodeDir ); //generate the post constituentJob String postJob = mSLS.invocationString( job, null ); sb.append( "# stage out" ).append( '\n' ); sb.append( postJob ); sb.append( " 1>&2" ).append( " << EOF" ).append( '\n' ); sb.append( convertToTransferInputFormat( files ) ); sb.append( "EOF" ).append( '\n' ); sb.append( '\n' ); //associate any credentials if required with the job associateCredentials( job, files ); } writer.print( sb.toString() ); writer.flush(); writer.close(); ostream.close(); //set the xbit on the shell script //for 3.2, we will have 1.6 as the minimum jdk requirement shellWrapper.setExecutable( true ); //JIRA PM-543 job.setDirectory( null ); //this.setXBitOnFile( shellWrapper.getAbsolutePath() ); } catch( IOException ioe ){ throw new RuntimeException( "[Pegasus-Lite] Error while writing out pegasus lite wrapper " + shellWrapper , ioe ); } //modify the constituentJob if required if ( !mSLS.modifyJobForWorkerNodeExecution( job, stagingSiteFileServer.getURLPrefix(), stagingSiteDirectory, workerNodeDir ) ){ throw new RuntimeException( "Unable to modify job " + job.getName() + " for worker node execution" ); } return shellWrapper; } /** * Convers the collection of files into an input format suitable for the * transfer executable * * @param files Collection of FileTransfer objects. * * @return the blurb containing the files in the input format for the transfer * executable */ protected StringBuffer convertToTransferInputFormat( Collection files ){ StringBuffer sb = new StringBuffer(); int num = 1; for( FileTransfer ft : files ){ NameValue nv = ft.getSourceURL(); sb.append( "# " ).append( "src " ).append( num ).append( " " ).append( nv.getKey() ).append( '\n' ); sb.append( nv.getValue() ); sb.append( '\n' ); nv = ft.getDestURL(); sb.append( "# " ).append( "dst " ).append( num ).append( " " ).append( nv.getKey() ).append( '\n' ); sb.append( nv.getValue() ); sb.append( '\n' ); num++; } return sb; } /** * Convenience method to slurp in contents of a file into memory. * * @param directory the directory where the file resides * @param file the file to be slurped in. * * @return StringBuffer containing the contents */ protected StringBuffer slurpInFile( String directory, String file ) throws IOException{ StringBuffer result = new StringBuffer(); //sanity check if( file == null ){ return result; } BufferedReader in = new BufferedReader( new FileReader( new File( directory, file )) ); String line = null; while(( line = in.readLine() ) != null ){ //System.out.println( line ); result.append( line ).append( '\n' ); } in.close(); return result; } /** * Returns the path to the chmod executable for a particular execution * site by looking up the transformation executable. * * @param site the execution site. * * @return the path to chmod executable */ protected String getPathToChmodExecutable( String site ){ String path; //check if the internal map has anything path = mChmodOnExecutionSiteMap.get( site ); if( path != null ){ //return the cached path return path; } List entries; try { //try to look up the transformation catalog for the path entries = mTCHandle.lookup( PegasusLite.XBIT_TRANSFORMATION_NS, PegasusLite.XBIT_TRANSFORMATION, PegasusLite.XBIT_TRANSFORMATION_VERSION, site, TCType.INSTALLED ); } catch (Exception e) { //non sensical catching mLogger.log("Unable to retrieve entries from TC " + e.getMessage(), LogManager.ERROR_MESSAGE_LEVEL ); return null; } TransformationCatalogEntry entry = ( entries == null ) ? null: //try using a default one (TransformationCatalogEntry) entries.get(0); if( entry == null ){ //construct the path the default path. //construct the path to it StringBuffer sb = new StringBuffer(); sb.append( File.separator ).append( "bin" ).append( File.separator ). append( PegasusLite.XBIT_EXECUTABLE_BASENAME ); path = sb.toString(); } else{ path = entry.getPhysicalTransformation(); } mChmodOnExecutionSiteMap.put( site, path ); return path; } /** * Sets the xbit on the file. * * @param file the file for which the xbit is to be set * * @return boolean indicating whether xbit was set or not. */ protected boolean setXBitOnFile( String file ) { boolean result = false; //do some sanity checks on the source and the destination File f = new File( file ); if( !f.exists() || !f.canRead()){ mLogger.log("The file does not exist " + file, LogManager.ERROR_MESSAGE_LEVEL); return result; } try{ //set the callback and run the grep command Runtime r = Runtime.getRuntime(); String command = "chmod +x " + file; mLogger.log("Setting xbit " + command, LogManager.DEBUG_MESSAGE_LEVEL); Process p = r.exec(command); //the default gobbler callback always log to debug level StreamGobblerCallback callback = new DefaultStreamGobblerCallback(LogManager.DEBUG_MESSAGE_LEVEL); //spawn off the gobblers with the already initialized default callback StreamGobbler ips = new StreamGobbler(p.getInputStream(), callback); StreamGobbler eps = new StreamGobbler(p.getErrorStream(), callback); ips.start(); eps.start(); //wait for the threads to finish off ips.join(); eps.join(); //get the status int status = p.waitFor(); if( status != 0){ mLogger.log("Command " + command + " exited with status " + status, LogManager.DEBUG_MESSAGE_LEVEL); return result; } result = true; } catch(IOException ioe){ mLogger.log("IOException while creating symbolic links ", ioe, LogManager.ERROR_MESSAGE_LEVEL); } catch( InterruptedException ie){ //ignore } return result; } /** * Determines the path to common shell functions file that Pegasus Lite * wrapped jobs use. * * @return the path on the submit host. */ protected String getSubmitHostPathToPegasusLiteCommon() { StringBuffer path = new StringBuffer(); //first get the path to the share directory File share = mProps.getSharedDir(); if( share == null ){ throw new RuntimeException( "Property for Pegasus share directory is not set" ); } path.append( share.getAbsolutePath() ).append( File.separator ). append( "sh" ).append( File.separator ).append( PegasusLite.PEGASUS_LITE_COMMON_FILE_BASENAME ); return path.toString(); } public void useFullPathToGridStarts(boolean fullPath) { throw new UnsupportedOperationException("Not supported yet."); } /** * Associates credentials with the job corresponding to the files that * are being transferred. * * @param job the job for which credentials need to be added. * @param files the files that are being transferred. */ private void associateCredentials(Job job, Collection files) { for( FileTransfer ft: files ){ job.addCredentialType( ft.getSourceURL().getValue() ); job.addCredentialType( ft.getDestURL().getValue() ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/ExitCode.java0000644000175000017500000000332111757531137026555 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import java.io.File; /** * The exitcode wrapper, that can parse kickstart output's and put them in the * database also. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4507 $ */ public class ExitCode extends VDSPOSTScript { /** * The SHORTNAME for this implementation. */ public static final String SHORT_NAME = "exitcode"; /** * The default constructor. */ public ExitCode(){ super(); } /** * Returns a short textual description of the implementing class. * * @return short textual description. */ public String shortDescribe(){ return this.SHORT_NAME; } /** * Returns the path to exitcode that is to be used on the kickstart * output. * * @return the path to the exitcode script to be invoked. */ public String getDefaultExitCodePath(){ StringBuffer sb = new StringBuffer(); sb.append(mProps.getBinDir()); sb.append(File.separator).append("exitcode"); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/NoGridStart.java0000644000175000017500000005521711757531137027264 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.site.classes.FileServer; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.code.GridStart; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.code.POSTScript; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.PegasusFile; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.transfer.sls.SLSFactory; import edu.isi.pegasus.planner.transfer.SLS; import edu.isi.pegasus.planner.namespace.Pegasus; import java.io.File; import java.io.FileInputStream; import java.util.Collection; import java.util.Iterator; import java.util.Set; import java.io.IOException; import java.io.FileWriter; import java.io.InputStream; import java.io.OutputStream; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.cluster.JobAggregator; import edu.isi.pegasus.planner.namespace.Condor; /** * This class ends up running the job directly on the grid, without wrapping * it in any other launcher executable. * It ends up connecting the jobs stdio and stderr to condor commands to * ensure they are sent back to the submit host. * * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4731 $ */ public class NoGridStart implements GridStart { private PegasusBag mBag; private ADag mDAG; /** * The basename of the class that is implmenting this. Could have * been determined by reflection. */ public static final String CLASSNAME = "NoGridStart"; /** * The SHORTNAME for this implementation. */ public static final String SHORT_NAME = "none"; /** * The LogManager object which is used to log all the messages. */ protected LogManager mLogger; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The submit directory where the submit files are being generated for * the workflow. */ protected String mSubmitDir; /** * The argument string containing the arguments with which the exitcode * is invoked on kickstart output. */ protected String mExitParserArguments; /** * A boolean indicating whether to generate lof files or not. */ protected boolean mGenerateLOF; /** * A boolean indicating whether to have worker node execution or not. */ //protected boolean mWorkerNodeExecution; /** * The handle to the SLS implementor */ protected SLS mSLS; /** * The options passed to the planner. */ protected PlannerOptions mPOptions; /** * Handle to the site catalog store. */ //protected PoolInfoProvider mSiteHandle; protected SiteStore mSiteStore; /** * An instance variable to track if enabling is happening as part of a clustered job. * See Bug 21 comments on Pegasus Bugzilla */ protected boolean mEnablingPartOfAggregatedJob; /** * Boolean indicating whether worker package staging is enabled or not. */ protected boolean mWorkerPackageStagingEnabled; /** * Initializes the GridStart implementation. * * @param bag the bag of objects that is used for initialization. * @param dag the concrete dag so far. */ public void initialize( PegasusBag bag, ADag dag ){ mBag = bag; mDAG = dag; mLogger = bag.getLogger(); mSiteStore = bag.getHandleToSiteStore(); mPOptions = bag.getPlannerOptions(); mSubmitDir = mPOptions.getSubmitDirectory(); mProps = bag.getPegasusProperties(); mGenerateLOF = mProps.generateLOFFiles(); mWorkerPackageStagingEnabled = mProps.transferWorkerPackage(); // mExitParserArguments = getExitCodeArguments(); /* JIRA PM-495 mWorkerNodeExecution = mProps.executeOnWorkerNode(); if( mWorkerNodeExecution ){ //load SLS mSLS = SLSFactory.loadInstance( bag ); } */ mEnablingPartOfAggregatedJob = false; } /** * Enables a collection of jobs and puts them into an AggregatedJob. * The assumption here is that all the jobs are being enabled by the same * implementation. It enables the jobs and puts them into the AggregatedJob * that is passed to it. * * @param aggJob the AggregatedJob into which the collection has to be * integrated. * @param jobs the collection of jobs (Job) that need to be enabled. * * @return the AggregatedJob containing the enabled jobs. * @see #enable(Job,boolean) */ public AggregatedJob enable(AggregatedJob aggJob,Collection jobs){ //sanity check for the arguments if( aggJob.strargs != null && aggJob.strargs.length() > 0){ //construct( aggJob, "arguments", aggJob.strargs); // the arguments are no longer set as condor profiles // they are now set to the corresponding profiles in // the Condor Code Generator only. aggJob.setArguments( aggJob.strargs ); } //we do not want the jobs being clustered to be enabled //for worker node execution just yet. mEnablingPartOfAggregatedJob = true; for (Iterator it = jobs.iterator(); it.hasNext(); ) { Job job = (Job)it.next(); //always pass isGlobus true as always //interested only in executable strargs this.enable(job, true); aggJob.add(job); } //set the flag back to false mEnablingPartOfAggregatedJob = false; return aggJob; } /** * Enables a job to run on the grid. This also determines how the * stdin,stderr and stdout of the job are to be propogated. * To grid enable a job, the job may need to be wrapped into another * job, that actually launches the job. It usually results in the job * description passed being modified modified. * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param isGlobusJob is true, if the job generated a * line universe = globus, and thus runs remotely. * Set to false, if the job runs on the submit * host in any way. * * @return boolean true if enabling was successful,else false. */ public boolean enable( AggregatedJob job,boolean isGlobusJob){ //get hold of the JobAggregator determined for this clustered job //during clustering JobAggregator aggregator = job.getJobAggregator(); if( aggregator == null ){ throw new RuntimeException( "Clustered job not associated with a job aggregator " + job.getID() ); } boolean first = true; for (Iterator it = job.constituentJobsIterator(); it.hasNext(); ) { Job constituentJob = (Job)it.next(); //earlier was set in SeqExec JobAggregator in the enable function constituentJob.vdsNS.construct( Pegasus.GRIDSTART_KEY, this.getVDSKeyValue() ); if(first){ first = false; } else{ //we need to pass -H to kickstart //to suppress the header creation constituentJob.vdsNS.construct(Pegasus.GRIDSTART_ARGUMENTS_KEY,"-H"); } //always pass isGlobus true as always //interested only in executable strargs //due to the fact that seqexec does not allow for setting environment //per constitutent constituentJob, we cannot set the postscript removal option this.enable( constituentJob, isGlobusJob ); } //all the constitutent jobs are enabled. //get the job aggregator to render the job //to it's executable form aggregator.makeAbstractAggregatedJobConcrete( job ); //set the flag back to false //mEnablingPartOfAggregatedJob = false; //the aggregated job itself needs to be enabled via NoGridStart this.enable( (Job)job, isGlobusJob); return true; } /** * Enables a job to run on the grid by launching it directly. It ends * up running the executable directly without going through any intermediate * launcher executable. It connects the stdio, and stderr to underlying * condor mechanisms so that they are transported back to the submit host. * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param isGlobusJob is true, if the job generated a * line universe = globus, and thus runs remotely. * Set to false, if the job runs on the submit * host in any way. * * @return boolean true if enabling was successful,else false in case when * the path to kickstart could not be determined on the site where * the job is scheduled. */ public boolean enable(Job job, boolean isGlobusJob) { //take care of relative submit directory if specified String submitDir = mSubmitDir + mSeparator; // String submitDir = getSubmitDirectory( mSubmitDir , job) + mSeparator; // the arguments are no longer set as condor profiles // they are now set to the corresponding profiles in // the Condor Code Generator only. job.setRemoteExecutable( handleTransferOfExecutable( job ) ); //JIRA PM-543 //set the directory key with the job if( requiresToSetDirectory( job ) ){ job.setDirectory( this.getDirectory( job ) ); } /* //the executable path and arguments are put //in the Condor namespace and not printed to the //file so that they can be overriden if desired //later through profiles and key transfer_executable construct(job,"executable", handleTransferOfExecutable( job ) ); //sanity check for the arguments if(job.strargs != null && job.strargs.length() > 0){ construct(job, "arguments", job.strargs); } */ // handle stdin if (job.stdIn.length() > 0) { construct(job,"input",submitDir + job.stdIn); if (isGlobusJob) { //this needs to be true as you want the stdin //to be transfered to the remote execution //pool, as in case of the transfer script. //it needs to be set if the stdin is already //prepopulated at the remote side which //it is not. construct(job,"transfer_input","true"); } } if (job.stdOut.length() > 0) { //handle stdout construct(job,"output",job.stdOut); if (isGlobusJob) { construct(job,"transfer_output","false"); } } else { // transfer output back to submit host, if unused construct(job,"output",submitDir + job.jobName + ".out"); if (isGlobusJob) { construct(job,"transfer_output","true"); } } if (job.stdErr.length() > 0) { //handle stderr construct(job,"error",job.stdErr); if (isGlobusJob) { construct(job,"transfer_error","false"); } } else { // transfer error back to submit host, if unused construct(job,"error",submitDir + job.jobName + ".err"); if (isGlobusJob) { construct(job,"transfer_error","true"); } } if( mGenerateLOF ){ //but generate lof files nevertheless //inefficient check here again. just a prototype //we need to generate -S option only for non transfer jobs //generate the list of filenames file for the input and output files. if (! (job instanceof TransferJob)) { generateListofFilenamesFile( job.getInputFiles(), job.getID() + ".in.lof"); } //for cleanup jobs no generation of stats for output files if (job.getJobType() != Job.CLEANUP_JOB) { generateListofFilenamesFile(job.getOutputFiles(), job.getID() + ".out.lof"); } }///end of mGenerateLOF return true; } /** * It changes the paths to the executable depending on whether we want to * transfer the executable or not. Currently, the transfer_executable is only * handled for staged compute jobs, where Pegasus is staging the binaries * to the remote site. * * @param job the Job containing the job description. * * @return the path that needs to be set as the executable key. If * transfer_executable is not set the path to the executable is * returned as is. */ protected String handleTransferOfExecutable( Job job ) { Condor cvar = job.condorVariables; String path = job.executable; if ( cvar.getBooleanValue( "transfer_executable" )) { //explicitly check for whether the job is a staged compute job or not // if( job.getJobType() == Job.STAGED_COMPUTE_JOB ){ if( job.userExecutablesStagedForJob() ){ //the executable is being staged to the remote site. //all we need to do is unset transfer_executable cvar.construct( "transfer_executable", "false" ); } else if ( mWorkerPackageStagingEnabled && job.getJobType() == Job.CREATE_DIR_JOB ){ //we dont complain. //JIRA PM-281 } else{ mLogger.log( "Transfer of Executables in NoGridStart only works for staged computes jobs " + job.getName(), LogManager.ERROR_MESSAGE_LEVEL ); } } else{ //the executable paths are correct and //point to the executable on the remote pool } return path; } /** * Indicates whether the enabling mechanism can set the X bit * on the executable on the remote grid site, in addition to launching * it on the remote grid stie * * @return false, as no wrapper executable is being used. */ public boolean canSetXBit(){ return false; } /** * Returns the value of the vds profile with key as Pegasus.GRIDSTART_KEY, * that would result in the loading of this particular implementation. * It is usually the name of the implementing class without the * package name. * * @return the value of the profile key. * @see org.griphyn.cPlanner.namespace.Pegasus#GRIDSTART_KEY */ public String getVDSKeyValue(){ return this.CLASSNAME; } /** * Returns a short textual description in the form of the name of the class. * * @return short textual description. */ public String shortDescribe(){ return this.SHORT_NAME; } /** * Returns the SHORT_NAME for the POSTScript implementation that is used * to be as default with this GridStart implementation. * * @return the identifier for the NoPOSTScript POSTScript implementation. * * @see POSTScript#shortDescribe() */ public String defaultPOSTScript(){ return NoPOSTScript.SHORT_NAME; } /** * Returns a boolean indicating whether we need to set the directory for * the job or not. * * @param job the job for which to set directory. * * @return */ protected boolean requiresToSetDirectory( Job job ) { //the cleanup jobs should never have directory set as full path //is specified return ( job.getJobType() != Job.CLEANUP_JOB ); } /** * Returns the directory in which the job should run. * * @param job the job in which the directory has to run. * * @return */ protected String getDirectory( Job job ){ String execSiteWorkDir = mSiteStore.getInternalWorkDirectory(job); String workdir = (String) job.globusRSL.removeKey("directory"); // returns old value workdir = (workdir == null)?execSiteWorkDir:workdir; return workdir; } /** * Returns the directory that is associated with the job to specify * the directory in which the job needs to run * * @param job the job * * @return the condor key . can be initialdir or remote_initialdir */ private String getDirectoryKey(Job job) { /* String style = (String)job.vdsNS.get( Pegasus.STYLE_KEY ); //remove the remote or initial dir's for the compute jobs String key = ( style.equalsIgnoreCase( Pegasus.GLOBUS_STYLE ) )? "remote_initialdir" : "initialdir"; */ String universe = (String) job.condorVariables.get( Condor.UNIVERSE_KEY ); return ( universe.equals( Condor.STANDARD_UNIVERSE ) || universe.equals( Condor.LOCAL_UNIVERSE) || universe.equals( Condor.SCHEDULER_UNIVERSE ) )? "initialdir" : "remote_initialdir"; } /** * Returns a boolean indicating whether to remove remote directory * information or not from the job. This is determined on the basis of the * style key that is associated with the job. * * @param job the job in question. * * @return boolean */ private boolean removeDirectoryKey(Job job){ String style = job.vdsNS.containsKey(Pegasus.STYLE_KEY) ? null : (String)job.vdsNS.get(Pegasus.STYLE_KEY); //is being run. Remove remote_initialdir if there //condor style associated with the job //Karan Nov 15,2005 return (style == null)? false: style.equalsIgnoreCase(Pegasus.CONDOR_STYLE); } /** * Constructs a condor variable in the condor profile namespace * associated with the job. Overrides any preexisting key values. * * @param job contains the job description. * @param key the key of the profile. * @param value the associated value. */ private void construct(Job job, String key, String value){ job.condorVariables.construct(key,value); } /** * Returns a string containing the arguments with which the exitcode * needs to be invoked. * * @return the argument string. */ /* private String getExitCodeArguments(){ return mProps.getPOSTScriptArguments(); } */ /** * Writes out the list of filenames file for the job. * * @param files the list of PegasusFile objects contains the files * whose stat information is required. * * @param basename the basename of the file that is to be created * * @return the full path to lof file created, else null if no file is written out. */ public String generateListofFilenamesFile( Set files, String basename ){ //sanity check if ( files == null || files.isEmpty() ){ return null; } String result = null; //writing the stdin file try { File f = new File( mSubmitDir, basename ); FileWriter input; input = new FileWriter( f ); PegasusFile pf; for( Iterator it = files.iterator(); it.hasNext(); ){ pf = ( PegasusFile ) it.next(); input.write( pf.getLFN() ); input.write( "\n" ); } //close the stream input.close(); result = f.getAbsolutePath(); } catch ( IOException e) { mLogger.log("Unable to write the lof file " + basename, e , LogManager.ERROR_MESSAGE_LEVEL); } return result; } /** * Adds contents to an output stream. * @param src * @param out * @throws java.io.IOException */ private void addToFile( File src, OutputStream out ) throws IOException{ InputStream in = new FileInputStream(src); // Transfer bytes from in to out byte[] buf = new byte[1024]; int len; while ((len = in.read(buf)) > 0) { out.write(buf, 0, len); } in.close(); } /** * Returns the directory in which the job executes on the worker node. * * @param job * * @return the full path to the directory where the job executes */ public String getWorkerNodeDirectory( Job job ){ StringBuffer workerNodeDir = new StringBuffer(); String destDir = mSiteStore.getEnvironmentVariable( job.getSiteHandle() , "wntmp" ); destDir = ( destDir == null ) ? "/tmp" : destDir; String relativeDir = mPOptions.getRelativeDirectory(); workerNodeDir.append( destDir ).append( File.separator ). append( relativeDir.replaceAll( "/" , "-" ) ). //append( File.separator ).append( job.getCompleteTCName().replaceAll( ":[:]*", "-") ); append( "-" ).append( job.getID() ); return workerNodeDir.toString(); } public void useFullPathToGridStarts(boolean fullPath) { throw new UnsupportedOperationException("Not supported yet."); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/gridstart/UserPOSTScript.java0000644000175000017500000001223411757531137027665 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.gridstart; import edu.isi.pegasus.common.logging.LogManagerFactory; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.code.POSTScript; import java.io.File; /** * A user defined post script. By default, the postscript is given the name * of the job output file on the submit host, to work upon. Additional arguments * to the post script can be specified via properties or profiles. * * The postscript is only constructed if the job already contains the * Dagman profile key passed. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4507 $ */ public class UserPOSTScript implements POSTScript { /** * The SHORTNAME for this implementation. */ public static final String SHORT_NAME = "user"; /** * The LogManager object which is used to log all the messages. */ protected LogManager mLogger; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The path to the user postscript on the submit host. * */ protected String mPOSTScriptPath; /** * The default constructor. */ public UserPOSTScript(){ //mLogger = LogManager.getInstance(); } /** * Initialize the POSTScript implementation. * * @param properties the PegasusProperties object containing all * the properties required by Pegasus. * @param path the path to the POSTScript on the submit host. * @param submitDir the submit directory where the submit file for the job * has to be generated. * * @throws RuntimeException in case of path being null. */ public void initialize( PegasusProperties properties, String path, String submitDir ){ mProps = properties; mPOSTScriptPath = path; mLogger = LogManagerFactory.loadSingletonInstance( properties ); if ( path == null ){ throw new RuntimeException( "Path to user specified postscript not given" ); } } /** * Constructs the postscript that has to be invoked on the submit host * after the job has executed on the remote end. The postscript works on the * stdout of the remote job, that has been transferred back to the submit * host by Condor. *

* The postscript is constructed and populated as a profile * in the DAGMAN namespace. * * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param key the DAGMan profile key that has to be inserted. * * @return boolean true if postscript was generated,else false. */ public boolean construct(Job job, String key) { boolean constructed = false; //see if any specific postscript was specified for this job //get the value user specified for the job String postscript = mPOSTScriptPath; job.dagmanVariables.construct( Dagman.OUTPUT_KEY, (String) job.condorVariables.get( "output" ) ); //arguments are taken care of automatically in DagMan namespace constructed = true; //put in the postscript mLogger.log("Postscript constructed is " + postscript, LogManager.DEBUG_MESSAGE_LEVEL); job.dagmanVariables.checkKeyInNS( key, postscript ); // else{ // //Karan Nov 15,2005 VDS BUG FIX 128 // //Always remove POST_SCRIPT_ARGUMENTS // job.dagmanVariables.removeKey(Dagman.POST_SCRIPT_ARGUMENTS_KEY); // } return constructed; } /** * Returns a short textual description of the implementing class. * * @return short textual description. */ public String shortDescribe(){ return this.SHORT_NAME; } /** * Returns the path to exitcode that is to be used on the kickstart * output. * * @return the path to the exitcode script to be invoked. */ public String getExitCodePath(){ StringBuffer sb = new StringBuffer(); sb.append(mProps.getBinDir()); sb.append(File.separator).append("exitcode"); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/GridStartFactoryException.java0000644000175000017500000000646111757531137030170 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating GridStart implementations. * * @author Karan Vahi * @version $Revision: 2090 $ */ public class GridStartFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "GridStart"; /** * Constructs a GridStartFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public GridStartFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a GridStartFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public GridStartFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a GridStartFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public GridStartFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a GridStartFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public GridStartFactoryException( String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/0000755000175000017500000000000011757531667024202 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Abstract.java0000644000175000017500000002700611757531137026605 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.code.CodeGenerator; import edu.isi.pegasus.planner.code.CodeGeneratorException; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.classes.PegasusBag; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; /** * An Abstract Base class implementing the CodeGenerator interface. Introduces * helper methods for determining basenames of files, that contain concrete * job descriptions. * * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 3930 $ */ public abstract class Abstract implements CodeGenerator{ /** * The bag of initialization objects. */ protected PegasusBag mBag; /** * The directory where all the submit files are to be generated. */ protected String mSubmitFileDir; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The object containing the command line options specified to the planner * at runtime. */ protected PlannerOptions mPOptions; /** * The LogManager object which is used to log all the messages. */ protected LogManager mLogger; /** * Returns the name of the file on the basis of the metadata associated * with the DAG. * In case of Condor dagman, it is the name of the .dag file that is * written out. The basename of the .dag file is dependant on whether the * basename prefix has been specified at runtime or not by command line * options. * * @param dag the dag for which the .dag file has to be created. * @param suffix the suffix to be applied at the end. * * @return the name of the dagfile. */ protected String getDAGFilename( ADag dag, String suffix ){ return getDAGFilename( mPOptions, dag.dagInfo.nameOfADag, dag.dagInfo.index, suffix ); } /** * Returns the name of the file on the basis of the metadata associated * with the DAG. * In case of Condor dagman, it is the name of the .dag file that is * written out. The basename of the .dag file is dependant on whether the * basename prefix has been specified at runtime or not by command line * options. * * @param options the options passed to the planner. * @param name the name attribute in dax * @param index the index attribute in dax. * @param suffix the suffix to be applied at the end. * * @return the name of the dagfile. */ public static String getDAGFilename( PlannerOptions options, String name, String index, String suffix ){ //constructing the name of the dagfile StringBuffer sb = new StringBuffer(); String bprefix = options.getBasenamePrefix(); if( bprefix != null){ //the prefix is not null using it sb.append(bprefix); } else{ //generate the prefix from the name of the dag sb.append( name ).append("-"). append( index ); } //append the suffix sb.append( suffix ); return sb.toString(); } /** * Initializes the Code Generator implementation. * * @param bag the bag of initialization objects. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void initialize( PegasusBag bag ) throws CodeGeneratorException{ mBag = bag; mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mSubmitFileDir = mPOptions.getSubmitDirectory(); mLogger = bag.getLogger(); } /** * Starts monitoring of the workflow by invoking a workflow monitor daemon. * The monitoring should start only after the output files have been generated. * FIXME: It should actually happen after the workflow has been submitted. * Eventually should be a separate monitor interface, and submit writers * should be loaded by an AbstractFactory. * * @return boolean indicating whether could successfully start the monitor * daemon or not. */ public boolean startMonitoring(){ //by default not all code generators support monitoring. return false; } /** * Writes out the workflow metrics file for the workflow. * * @param workflow the workflow whose metrics file needs to be generated. */ protected void writeOutWorkflowMetrics( ADag workflow ){ try{ Metrics metrics = new Metrics(); metrics.initialize(mBag); Collection result = metrics.generateCode(workflow ); for( Iterator it = result.iterator(); it.hasNext() ;){ mLogger.log("Written out workflow metrics file to " + it.next(), LogManager.DEBUG_MESSAGE_LEVEL); } } catch(CodeGeneratorException ioe){ //log the message and return mLogger.log("Unable to write out the workflow metrics file ", ioe, LogManager.ERROR_MESSAGE_LEVEL ); } } /** * Writes out the stampedeEventGenerator events for the workflow. * * @param workflow the workflow whose metrics file needs to be generated. */ protected void writeOutStampedeEvents( ADag workflow )throws CodeGeneratorException{ Stampede stampedeEventGenerator = new Stampede(); stampedeEventGenerator.initialize(mBag); Collection result = stampedeEventGenerator.generateCode( workflow ); for( Iterator it = result.iterator(); it.hasNext() ;){ mLogger.log("Written out stampede events for the executable workflow to " + it.next(), LogManager.DEBUG_MESSAGE_LEVEL); } } /** * Writes out the metrics file for the workflow * * @param workflow the workflow whose metrics file needs to be generated. */ protected void writeOutBraindump( ADag workflow ){ //generate some extra keys for metrics file Map entries = getAdditionalBraindumpEntries( workflow ); try{ Braindump braindump = new Braindump(); braindump.initialize(mBag); Collection result = braindump.generateCode(workflow, entries); for( Iterator it = result.iterator(); it.hasNext() ;){ mLogger.log("Written out braindump to " + it.next(), LogManager.DEBUG_MESSAGE_LEVEL); } } catch(CodeGeneratorException ioe){ //log the message and return mLogger.log("Unable to write out the braindump file for pegasus-monitord", ioe, LogManager.ERROR_MESSAGE_LEVEL ); } } /** * Writes out the DAX replica store * * @param workflow the work-flow */ protected void writeOutDAXReplicaStore( ADag workflow ){ try{ DAXReplicaStore generator = new DAXReplicaStore (); generator.initialize(mBag); Collection result = generator.generateCode(workflow); for( File f : result){ mLogger.log("Written out dax replica store to " + f.getName(), LogManager.DEBUG_MESSAGE_LEVEL); } } catch(CodeGeneratorException ioe){ //log the message and return mLogger.log("Unable to write out the notifications file ", ioe, LogManager.ERROR_MESSAGE_LEVEL ); } } /** * Writes out the generator input file for the work-flow. * * @param workflow the work-flow whose generator files needs to be generated. */ protected void writeOutNotifications( ADag workflow ){ try{ MonitordNotify notifications = new MonitordNotify (); notifications.initialize(mBag); Collection result = notifications.generateCode(workflow); for( File f : result){ mLogger.log("Written out notifications to " + f.getName(), LogManager.DEBUG_MESSAGE_LEVEL); } } catch(CodeGeneratorException ioe){ //log the message and return mLogger.log("Unable to write out the notifications file ", ioe, LogManager.ERROR_MESSAGE_LEVEL ); } } /** * Returns a Map containing additional metrics entries that are specific * to a Code Generator * * @param workflow the workflow whose metrics file needs to be generated. * * @return Map */ public abstract Map getAdditionalBraindumpEntries( ADag workflow ); /** * Resets the Code Generator implementation. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void reset( )throws CodeGeneratorException{ mSubmitFileDir = null; mProps = null; mPOptions = null; } /** * Returns an open stream to the file that is used for writing out the * job information for the job. * * @param job the job whose job information needs to be written. * * @return the writer to the open file. * @exception IOException if unable to open a write handle to the file. */ public PrintWriter getWriter( Job job ) throws IOException{ // String jobDir = job.getSubmitDirectory(); StringBuffer sb = new StringBuffer(); //determine the absolute submit directory for the job // sb.append( GridStart.getSubmitDirectory( mSubmitFileDir, job )); sb.append( mSubmitFileDir ); //append the base name of the job sb.append( File.separatorChar ).append( getFileBaseName(job) ); // intialize the print stream to the file return new PrintWriter(new BufferedWriter(new FileWriter(sb.toString()))); } /** * Returns the basename of the file to which the job is written to. * * @param job the job whose job information needs to be written. * * @return the basename of the file. */ public String getFileBaseName(Job job){ StringBuffer sb = new StringBuffer(); sb.append(job.jobName).append(".sub"); return sb.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/HashedFile.java0000644000175000017500000000720311757531137027033 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.code.CodeGenerator; import edu.isi.pegasus.planner.code.generator.condor.CondorGenerator; import edu.isi.pegasus.planner.common.PegasusProperties; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.PrintWriter; import java.io.IOException; /** * A Condor Submit Writer, that understands the notion of hashed file directories. * * @author Karan Vahi * @version $Revision: 2590 $ */ public class HashedFile extends CondorGenerator { /** * The default constructor. */ public HashedFile() { super(); } /** * Returns an open stream to the file that is used for writing out the * job information for the job. * * @param job the job whose job information needs to be written. * * @return the writer to the open file. * @exception IOException if unable to open a write handle to the file. */ public PrintWriter getWriter(Job job) throws IOException{ // String jobDir = job.getSubmitDirectory(); StringBuffer sb = new StringBuffer(); //determine the absolute submit directory for the job // sb.append( GridStart.getSubmitDirectory( mSubmitFileDir, job )); sb.append(mSubmitFileDir); //append the base name of the job sb.append( File.separatorChar ).append(getFileBaseName(job)); // intialize the print stream to the file return new PrintWriter(new BufferedWriter(new FileWriter(sb.toString()))); } /** * Returns the path relative to the workflow submit directory of the file to * which the job is written to. * * @param job the job whose job information needs to be written. * * @return the relative path of the file. */ /* public String getDAGMANFilename(Job job){ //do the correct but the inefficient way. String name = ""; //get the absolute directory first String absolute = GridStart.getSubmitDirectory( mSubmitFileDir, job ); if (absolute.indexOf( mSubmitFileDir ) == 0){ if(absolute.length() > mSubmitFileDir.length()){ name = absolute.substring(mSubmitFileDir.length()); //remove the file separator if present at the starting name = (name.indexOf( File.separatorChar) == 0)? name.substring(1): name; name += File.separatorChar; } else{ //empty. no relative directory } } else{ //the absolute path does not contain the submit file directory //root. Should not really be the case. name = absolute; name += File.separatorChar; } name += this.getFileBaseName(job); return name; } */ } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/0000755000175000017500000000000011757531667025466 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/CondorQuoteParser.java0000644000175000017500000002341411757531137031744 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor; /** * A utility class to correctly quote arguments strings before handing over * to Condor. *

* The following Condor Quoting Rules are followed while quoting a String. * *

 * 1) \' => ''   e.g \'Test\' is converted to ''Test''
 * 2) \" => ""   e.g \"Test\" is converted to ""Test""
 * 3) '  => '    if not enclosed in surrounding double quotes
 *               e.g 'Test' is converted to 'Test'
 * 4) '  => ''   if enclosed in surrounding double quotes
 *               e.g "'Test'" is converted to ''Test''
 * 5) "  => '    if not enclosed in surrounding single quotes
 *               e.g Karan "Vahi" is converted to Karan 'Vahi'
 * 6) "  => ""   if enclosed in surrounding single quotes.
 *               e.g 'Karan "Vahi"' is converted to 'Karan ""Vahi""'.
 * 7) *  =>  *   if enclosed in single or double quotes, the enclosed characters
 *               are copied literally including \ (no escaping rules apply)
 * 8) \\ => \    escaping rules apply if not enclosed in single or double quotes.
 *               e.g \\\\ becomes \\, and \\\ throws error.
 * 
* * In order to pass \n etc in the arguments, either quote it or escape it. * for e.g in the DAX the following are valid ways to pass Karan\nVahi to the * as arguments *
 *   1) "Karan\nVahi"
 *   2) 'Karan\nVahi'
 *   3) Karan\\nVahi
 * 
* * In addition while writing out to the SubmitFile the whole argument String * should be in enclosing ". for e.g arguments = "Test"; * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2090 $ */ public class CondorQuoteParser { /** * Table to contain the state transition diagram for the parser. The * rows are defined as current states 0 through 7. The columns is the * current input character. The cell contains first the action to be * taken, followed by the new state to transition to: * *
     *      | EOS |  \  |  '  |  "  |other|
     *      |  0  |  1  |  2  |  3  |  4  |
     * -----+-----+-----+-----+-----+-----+
     *   0  | -,F |  -,1| A2,2| A2,3| A1,0|
     *   1  | -,E1| A1,0| A3,0| A4,0| A1,0|
     *   2  | -,E2| A1,2| A2,0| A4,2| A1,2|
     *   3  | -,E3| A1,3| A3,3| A2,0| A1,3|
     * -----+-----+-----+-----+-----+-----+
     *   F  |  4  | final state
     *   E1 |  5  | error1: unexpected end of input
     *   E2 |  6  | error2: unmatched single quotes
     *   E3 |  7  | error3: unmatched double quotes
     * 
* * The state variable collects the new state for a given * state (rows) and input character set (column) identifier. * *

* The state diagram for the above table is shown as follows * *
* * * * */ private static final byte cState[][] = { // E \ ' " O { 4, 1, 2, 3, 0}, // 0: starting state { 5, 0, 0, 0, 0}, // 1: found a \ { 6, 2, 0, 2, 2}, // 2: found an opening single quote { 7, 3, 3, 0, 3}, // 3: found an opening double quote }; /** * There are five identified actions. * *

     *  -   | 0 | noop
     *  A1  | 1 | append input character to result
     *  A2  | 2 | append '  to result
     *  A3  | 3 | append '' to result
     *  A4  | 4 | append "" to result
     * 
* * The action variable collects the action to take for a * given state (rows) and input character set (column). */ private static final byte cAction[][] = { // E \ ' " O { 0, 0, 2, 2, 1}, // 0: starting state { 0, 1, 3, 4, 1}, // 1: found a \ { 0, 1, 2, 4, 1}, // 2: found an opening single quote { 0, 1, 3, 2, 1}, // 3: found an opening double quote }; /** * Parses a string and condor quotes it. The enclosing quotes are not * generated around the String. * * @param s is the input string to parse and quote. * * @return the quoted String. * @throws CondorQuoteParserException if the input cannot be recognized. */ public static String quote( String s) throws CondorQuoteParserException{ return quote( s , false ); } /** * Parses a string and condor quotes it. Enclosing quotes are generated * around the whole string if boolean enclose parameter is set. * * @param s is the input string to parse and quote. * @param enclose boolean indicating whether to generate enclosing quotes or * not. * * @return the quoted String. * @throws CondorQuoteParserException if the input cannot be recognized. */ public static String quote( String s, boolean enclose ) throws CondorQuoteParserException{ StringBuffer result = new StringBuffer(); //enclose the string with mandatory " to start if(enclose) result.append("\""); int index = 0; byte charset, state = 0; char ch = '?'; while ( state < 4 ) { // // determine character class // switch ( (ch = ( index < s.length() ? s.charAt(index++) : '\0' )) ) { case '\0': charset = 0; break; case '\\': charset = 1; break; case '\'': charset = 2; break; case '\"': charset = 3; break; default: charset = 4; break; } // // perform action // switch ( cAction[state][charset] ) { case 0 :// do nothing break; case 1: // append the character to the result result.append(ch); break; case 2: // append \ to the result result.append('\''); break; case 3: // append '' to the result result.append("\'\'"); break; case 4: // append "" to the result result.append("\"\""); break; } // // progress state // state = cState[state][charset]; } if ( state > 4 ) { switch ( state ) { case 5: //we have unmatched single quotes throw new CondorQuoteParserException("Unexpected end of input in string " + s, index); case 6: //we have unmatched single quotes throw new CondorQuoteParserException("Unmatched Single Quotes in string " + s, index); case 7: //we have unmatched double quotes throw new CondorQuoteParserException("Unmatched Double Quotes in string " + s, index ); default: throw new CondorQuoteParserException( "Unknown error", index ); } } //end the result with the mandatory closing " to end if( enclose ) result.append("\""); return result.toString(); } /** * A Test program. */ public static void main(String[] args) { test("Test Input"); //result should be Test Input test("'Test Input'"); //result should be 'Test Input' test("\"Test Input\""); //result should be 'Test Input' test("\\'Test Input\\'"); //result should be ''Test Input'' test("\\\"Test Input\\\""); //result should be ""Test Input"" test("\\\'Test Input\\\'"); //result should be ''Test Input'' test("\"\'Test Input\'\""); //result should be '''Test Input''' test("\'\"Test Input\"\'"); //result should be '""Test Input""' test("\"\'Test \\Input\'\""); //result should be '''Test \Input''' test("\\\"Test Input\\\""); //result should be ""Test Input"" test("\'Test \"Input\"\'"); //result should be 'Test ""Input""' test("Test \"Input\""); //result should be Test 'Input' test("\\\\Test Input"); //result should be \Test Input test("\\\\"); //result should be \ test("\'\"Test Input\'"); //result should be '""Test Input' //errorneous inputs test("\'\"Test Input\" "); test(" \"\"\" "); test(" ''' "); } /** * Helper test method that tries and catches exception * * @param s the string to be parsed. */ private static void test(String s){ try{ System.out.println(s + " condor quoted is " + quote(s) ); } catch(CondorQuoteParserException e){ System.out.println("Error " + e + " at position " + e.getPosition()); } //System.out.println(); } } ././@LongLink0000000000000000000000000000015100000000000011562 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/CondorQuoteParserException.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/CondorQuoteParserException.0000644000175000017500000000364211757531137032762 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor; /** * This class is used to signal errors while parsing argument strings for * Condor Quoting. * * * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2090 $ * * @see CondorQuoteParser */ public class CondorQuoteParserException extends Exception{ /** * Remembers the position that cause the exception to be thrown. */ private int mPosition; /** * The overloaded constructor * * @param msg the message to be thrown in exception * @param position the position at which error occured */ public CondorQuoteParserException( String msg, int position ){ super(msg); mPosition = position; } /** * The overloaded constructor * * @param msg the message to be thrown in exception. * @param position the position at which error occured. * @param cause the cause of the error. */ public CondorQuoteParserException( String msg, int position, Throwable cause ){ super(msg,cause); mPosition = position; } /** * Obtains the position at which point the exception was thrown. * @return a column position into the string */ public int getPosition(){ return this.mPosition; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/SUBDAXGenerator.java0000644000175000017500000015472511757531137031174 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.CondorVersion; import edu.isi.pegasus.common.util.FindExecutable; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.parser.DAXParserFactory; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.common.RunDirectoryFilenameFilter; import edu.isi.pegasus.planner.client.CPlanner; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.classes.DAXJob; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.code.generator.DAXReplicaStore; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.namespace.ENV; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.PrintWriter; import java.text.NumberFormat; import java.text.DecimalFormat; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * The class that takes in a dax job specified in the DAX and renders it into * a SUBDAG with pegasus-plan as the appropriate prescript. * * @author Karan Vahi * @version $Revision: 4507 $ */ public class SUBDAXGenerator{ /** * The default category for the sub dax jobs. */ public static final String DEFAULT_SUBDAX_CATEGORY_KEY = "subwf"; /** * Whether to generate the SUBDAG keyword or not. */ public static final boolean GENERATE_SUBDAG_KEYWORD = false; /** * Suffix to be applied for cache file generation. */ private static final String CACHE_FILE_SUFFIX = ".cache"; /** * The logical name with which to query the transformation catalog for * cPlanner executable. */ public static final String CPLANNER_LOGICAL_NAME = "pegasus-plan"; /** * The namespace to use for condor dagman. */ public static final String CONDOR_DAGMAN_NAMESPACE = "condor"; /** * The logical name with which to query the transformation catalog for the * condor_dagman executable, that ends up running the mini dag as one * job. */ public static final String CONDOR_DAGMAN_LOGICAL_NAME = "dagman"; /** * The namespace to which the job in the MEGA DAG being created refer to. */ public static final String NAMESPACE = "pegasus"; /** * The planner utility that needs to be called as a prescript. */ public static final String RETRY_LOGICAL_NAME = "pegasus-plan"; /** * The dagman knobs controlled through property. They map the property name to * the corresponding dagman option. */ public static final String DAGMAN_KNOBS[][]={ { Dagman.MAXPRE_KEY, " -MaxPre " }, { Dagman.MAXPOST_KEY, " -MaxPost " }, { Dagman.MAXJOBS_KEY, " -MaxJobs " }, { Dagman.MAXIDLE_KEY, " -MaxIdle " }, }; /** * The username of the user running the program. */ private String mUser; /** * The number formatter to format the run submit dir entries. */ private NumberFormat mNumFormatter; /** * The object containing all the options passed to the Concrete Planner. */ private PlannerOptions mPegasusPlanOptions; /** * The handle to Pegasus Properties. */ private PegasusProperties mProps; /** * Handle to the logging manager. */ private LogManager mLogger; /** * Bag of Pegasus objects */ private PegasusBag mBag; /** * The print writer handle to DAG file being written out. */ private PrintWriter mDAGWriter; /** * The handle to the transformation catalog */ private TransformationCatalog mTCHandle; /** * The cleanup scope for the workflows. */ private PegasusProperties.CLEANUP_SCOPE mCleanupScope; /** * The long value of condor version. */ private long mCondorVersion; /** * Any extra arguments that need to be passed to dagman, as determined * from the properties file. */ // String mDAGManKnobs; /** * Maps a sub dax job id to it's submit directory. The population relies * on top down traversal during Code Generation. */ private MapmDAXJobIDToSubmitDirectoryCacheFile; private Graph mWorkflow; private ADag mDAG; /** * The default constructor. */ public SUBDAXGenerator() { mNumFormatter = new DecimalFormat( "0000" ); } /** * Initializes the class. * * @param bag the bag of objects required for initialization * @param dag the dag for which code is being generated * @param workflow the graph representation of the dag * @param daxReplicaStore the dax replica store. * @param dagWriter handle to the dag writer */ public void initialize( PegasusBag bag, ADag dag,Graph workflow, PrintWriter dagWriter ){ mBag = bag; mWorkflow = workflow; mDAG = dag; mDAGWriter = dagWriter; mProps = bag.getPegasusProperties(); mLogger = bag.getLogger(); mTCHandle = bag.getHandleToTransformationCatalog(); this.mPegasusPlanOptions = bag.getPlannerOptions(); mCleanupScope = mProps.getCleanupScope(); mDAXJobIDToSubmitDirectoryCacheFile = new HashMap(); mUser = mProps.getProperty( "user.name" ) ; if ( mUser == null ){ mUser = "user"; } //hardcoded options for time being. mPegasusPlanOptions.setPartitioningType( "Whole" ); mCondorVersion = CondorVersion.getInstance( mLogger ).numericValue(); if( mCondorVersion == -1 ){ mLogger.log( "Unable to determine the version of condor " , LogManager.WARNING_MESSAGE_LEVEL ); } else{ mLogger.log( "Condor Version detected is " + mCondorVersion , LogManager.DEBUG_MESSAGE_LEVEL ); } } /** * Generates code for a job * * @param job the job for which code has to be generated. * * @return a Job if a submit file needs to be generated * for the job. Else return null. * */ public Job generateCode( Job job ){ String arguments = job.getArguments(); String [] args = arguments.split( " " ); mLogger.log( "Generating code for DAX job " + job.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Arguments passed to SUBDAX Generator are " + arguments, LogManager.DEBUG_MESSAGE_LEVEL ); //convert the args to pegasus-plan options PlannerOptions options = new CPlanner( mLogger ).parseCommandLineArguments( args, false ); //figure out the label and index for SUBDAX String label = null; String index = null; File dax = new File( options.getDAX() ); String labelBasedDir = null; if( dax.exists() ){ //retrieve the metadata in the subdax. //means the the dax needs to be generated beforehand. //Map metadata = getDAXMetadata( options.getDAX() ); Map metadata = DAXParserFactory.getDAXMetadata( mBag, options.getDAX() ); label = (String) metadata.get( "name" ); index = (String) metadata.get( "index" ); //the label for directory purposes includes the logical id too labelBasedDir = label + "_" + job.getLogicalID(); } else{ //try and construct on basis of basename prefix option String basenamePrefix = options.getBasenamePrefix() ; if( basenamePrefix == null ){ StringBuffer error = new StringBuffer(); error.append( "DAX file for subworkflow does not exist " ).append( dax ). append( " . Either set the --basename option to subworkflow or make sure dax exists" ); throw new RuntimeException( error.toString() ); } label = options.getBasenamePrefix(); index = "0"; labelBasedDir = label; mLogger.log( "DAX File for subworkflow does not exist. Set label value to the basename option passed ", LogManager.DEBUG_MESSAGE_LEVEL ); } //check if we want a label based submit directory for the sub workflow // if( mProps.labelBasedSubmitDirectoryForSubWorkflows() ){ //From 3.0 onwards if a user does not specify a relative submit //we always create a label/job id based directory structure String relative = options.getRelativeSubmitDirectoryOption(); relative = ( relative == null )? labelBasedDir ://no relative-submit-dir option specified. set to label new File( relative, labelBasedDir ).getPath(); options.setRelativeSubmitDirectory( relative ); //} String submit = options.getSubmitDirectory(); mLogger.log( "Submit directory in sub dax specified is " + submit, LogManager.DEBUG_MESSAGE_LEVEL ); if( submit == null || !submit.startsWith( File.separator ) ){ //then set the submit directory relative to the parent workflow basedir String innerBase = mPegasusPlanOptions.getBaseSubmitDirectory(); String innerRelative = mPegasusPlanOptions.getRelativeSubmitDirectory(); innerRelative = ( innerRelative == null && mPegasusPlanOptions.partOfDeferredRun() )? mPegasusPlanOptions.getRandomDir(): //the random dir is the relative submit dir? innerRelative; //FIX for JIRA bug 65 to ensure innerRelative is resolved correctly //in case of innerRelative being ./ . We dont want inner relative //to compute to .// Instead we want it to compute to ././ //innerRelative += File.separator + submit ; innerRelative = new File( innerRelative, submit ).getPath(); //options.setSubmitDirectory( mPegasusPlanOptions.getSubmitDirectory(), submit ); options.setSubmitDirectory( innerBase, innerRelative ); mLogger.log( "Base Submit directory for inner workflow set to " + innerBase, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Relative Submit Directory for inner workflow set to " + innerRelative, LogManager.DEBUG_MESSAGE_LEVEL ); mLogger.log( "Submit directory for inner workflow set to " + options.getSubmitDirectory(), LogManager.DEBUG_MESSAGE_LEVEL ); } if( options.getExecutionSites().isEmpty() ){ //for JIRA feature request PM-64 //no sites are specified. use the execution sites for //the parent workflow mLogger.log( "Setting list of execution sites to the same as outer workflow", LogManager.DEBUG_MESSAGE_LEVEL ); options.getExecutionSites().addAll( mPegasusPlanOptions.getExecutionSites() ); } //we propogate force-replan if set in outer level workflow //to the sub workflow if( mPegasusPlanOptions.getForceReplan() ){ options.setForceReplan( true ); } //we propogate the rescue option also if( mPegasusPlanOptions.getNumberOfRescueTries() != PlannerOptions.DEFAULT_NUMBER_OF_RESCUE_TRIES ){ //user specified a value. //put that for sub workflow if not specified in there if( options.getNumberOfRescueTries() == PlannerOptions.DEFAULT_NUMBER_OF_RESCUE_TRIES ){ options.setNumberOfRescueTries( mPegasusPlanOptions.getNumberOfRescueTries() ); } } //add the parents generated transient rc to the cache files //arguments for the sub workflow Set cacheFiles = options.getCacheFiles(); Set parentsTransientRCs = getParentsTransientRC( job ); if ( !parentsTransientRCs.isEmpty() ){ mLogger.log( "Parent DAX Jobs Transient RC's are " + parentsTransientRCs, LogManager.DEBUG_MESSAGE_LEVEL ); cacheFiles.addAll( parentsTransientRCs ); } //do some sanitization of the path to the dax file. //if it is a relative path, then ??? options.setSanitizePath( true ); String baseDir = options.getBaseSubmitDirectory(); String relativeDir = null; //construct the submit directory structure for subdax try{ relativeDir = (options.getRelativeSubmitDirectory() == null) ? //create our own relative dir createSubmitDirectory(label, baseDir, mUser, options.getVOGroup(), mProps.useTimestampForDirectoryStructure()) : options.getRelativeSubmitDirectory(); } catch( IOException ioe ){ String error = "Unable to write to directory" ; throw new RuntimeException( error + options.getSubmitDirectory() , ioe ); } options.setSubmitDirectory( baseDir, relativeDir ); mLogger.log( "Submit Directory for SUB DAX is " + options.getSubmitDirectory() , LogManager.DEBUG_MESSAGE_LEVEL ); if( options.getRelativeDirectory() == null || !options.getRelativeDirectory().startsWith( File.separator ) ){ //then set the relative directory relative to the parent workflow relative dir String baseRelativeExecDir = mPegasusPlanOptions.getRelativeDirectory(); if( baseRelativeExecDir == null ){ //set the relative execution directory to relative submit directory options.setRelativeDirectory( options.getRelativeSubmitDirectory() ); } else{ //the else look should not be there. //construct path from base relative exec dir File innerRelativeExecDir = null; if( mProps.labelBasedSubmitDirectoryForSubWorkflows() ){ innerRelativeExecDir = new File( baseRelativeExecDir, options.getRelativeSubmitDirectory() ); //this is temporary till LIGO fixes it's dax //and above property will go away. //we dont want label in the exec dir innerRelativeExecDir = innerRelativeExecDir.getParentFile(); } else{ //starting 3.0 onwards we dont want long paths //in execution directories for sub workflows //JIRA PM-260 String innerRelative = options.getRelativeDirectory(); innerRelative = ( innerRelative == null )? //construct something on basis of label labelBasedDir : innerRelative; innerRelativeExecDir = new File( baseRelativeExecDir, innerRelative); } options.setRelativeDirectory(innerRelativeExecDir.getPath() ); } } mLogger.log( "Relative Execution Directory for SUB DAX is " + options.getRelativeDirectory() , LogManager.DEBUG_MESSAGE_LEVEL ); //no longer create a symbolic link at this point Karan. June 1, 2011 /* //create a symbolic link to dax in the subdax submit directory String linkedDAX = createSymbolicLinktoDAX( options.getSubmitDirectory(), options.getDAX() ); //update options with the linked dax options.setDAX( linkedDAX ); */ //for time being for LIGO , try and create a symlink for //the cache file that is created during sub workflow execution //in parent directory of the submit directory //JIRA PM-116 if( mProps.labelBasedSubmitDirectoryForSubWorkflows() ){ this.createSymbolicLinktoCacheFile( options, label, index); } /* //write out the properties in the submit directory String propertiesFile = null; try{ //we dont want to store the path to sub workflow properties files in the //internal variable in PegasusProperties. propertiesFile = this.mProps.writeOutProperties( options.getSubmitDirectory(), true, false ); } catch( IOException ioe ){ throw new RuntimeException( "Unable to write out properties to directory " + options.getSubmitDirectory() ); } */ //refer to the parent workflow's properties file only instead. //Karan June 1, 2011 String propertiesFile = this.mProps.getPropertiesInSubmitDirectory(); //check if a encompassing DAX to which the dax job belongs has a //replica store associated. if( !this.mDAG.getReplicaStore().isEmpty() ){ //construct the path to mDAG replica store StringBuffer inheritedRCFile = new StringBuffer(); //point to the outer level workflow DAX replica store file inheritedRCFile.append( DAXReplicaStore.getDAXReplicaStoreFile( this.mPegasusPlanOptions, this.mDAG.getLabel(), this.mDAG.dagInfo.index ) ); options.setInheritedRCFiles( inheritedRCFile.toString() ); } //construct the pegasus-plan prescript for the JOB //the log file for the prescript should be in the //submit directory of the outer level workflow StringBuffer log = new StringBuffer(); log.append( mPegasusPlanOptions.getSubmitDirectory() ).append( File.separator ). append( job.getName() ).append( ".pre.log" ); String prescript = constructPegasusPlanPrescript( job, options, mDAG.getRootWorkflowUUID(), propertiesFile, log.toString() ); job.setPreScript( prescript ); //determine the path to the dag file that will be constructed if( GENERATE_SUBDAG_KEYWORD ){ StringBuffer dag = new StringBuffer(); dag.append( options.getSubmitDirectory() ).append( File.separator ). append( CondorGenerator.getDAGFilename( options, label, index, ".dag") ); //print out the SUBDAG keyword for the job StringBuffer sb = new StringBuffer(); sb.append( Dagman.SUBDAG_EXTERNAL_KEY ).append( " " ). append( job.getName() ).append( " " ). append( dag.toString() ); mDAGWriter.println( sb.toString() ); return null; } else{ StringBuffer basenamePrefix = new StringBuffer(); if( options.getBasenamePrefix() == null ){ basenamePrefix.append( label ); basenamePrefix.append( "-" ).append( index ); }else{ //add the prefix from options basenamePrefix.append( options.getBasenamePrefix() ); } mLogger.log( "Basename prefix for the sub workflow is " + basenamePrefix, LogManager.DEBUG_MESSAGE_LEVEL ); String subDAXCache = new File( options.getSubmitDirectory(), basenamePrefix + CACHE_FILE_SUFFIX ).getAbsolutePath(); mLogger.log( "Cache File for the sub workflow is " + subDAXCache, LogManager.DEBUG_MESSAGE_LEVEL ); mDAXJobIDToSubmitDirectoryCacheFile.put( job.getID(), subDAXCache); Job dagJob = constructDAGJob( job, new File( mPegasusPlanOptions.getSubmitDirectory() ), new File( options.getSubmitDirectory()), basenamePrefix.toString() ); //set the prescript dagJob.setPreScript( prescript ); return dagJob; } } /** * Constructs a job that plans and submits the partitioned workflow, * referred to by a Partition. The main job itself is a condor dagman job * that submits the concrete workflow. The concrete workflow is generated by * running the planner in the prescript for the job. * * @param subdaxJob the original subdax job. * @param directory the directory where the submit file for dagman job has * to be written out to. * @param subdaxDirectory the submit directory where the submit files for the * subdag reside. * @param basenamePrefix the basename to be assigned to the files associated * with DAGMan * * @return the constructed DAG job. */ protected Job constructDAGJob( Job subdaxJob, File directory, File subdaxDirectory, String basenamePrefix){ //for time being use the old functions. Job job = new DAXJob(); //the parent directory where the submit file for condor dagman has to //reside. the submit files for the corresponding partition are one level //deeper. String parentDir = directory.getAbsolutePath(); //set the logical transformation job.setTransformation(CONDOR_DAGMAN_NAMESPACE, CONDOR_DAGMAN_LOGICAL_NAME, null); //set the logical derivation attributes of the job. job.setDerivation(CONDOR_DAGMAN_NAMESPACE, CONDOR_DAGMAN_LOGICAL_NAME, null); //always runs on the submit host job.setSiteHandle("local"); //we want the DAG job to inherit the dagman profile keys //cannot inherit condor and pegasus profiles, as //they may get the dag job to run incorrectly //example, pegasus style keys if specified for site local job.dagmanVariables.merge( subdaxJob.dagmanVariables ); //set the partition id only as the unique id //for the time being. // job.setName(partition.getID()); //set the logical id for the job same as the partition id. job.setLogicalID( subdaxJob.getLogicalID() ); //construct the name of the DAG job as same as subdax job job.setName( subdaxJob.getName() ); List entries; TransformationCatalogEntry entry = null; //get the path to condor dagman try{ entries = mTCHandle.lookup(job.namespace, job.logicalName, job.version, job.getSiteHandle(), TCType.INSTALLED); entry = (entries == null) ? defaultTCEntry( "local") ://construct from site catalog //Gaurang assures that if no record is found then //TC Mechanism returns null (TransformationCatalogEntry) entries.get(0); if( entry == null ){ mLogger.log( "DAGMan not catalogued in the TC. Trying to construct from the environment", LogManager.DEBUG_MESSAGE_LEVEL ); entry = constructTCEntryFromEnvironment( ); } else{ mLogger.log( "Picked path to DAGMan from the TC " + entry.getPhysicalTransformation() , LogManager.DEBUG_MESSAGE_LEVEL ); } } catch(Exception e){ throw new RuntimeException( "ERROR: While accessing the Transformation Catalog",e); } if(entry == null){ //throw appropriate error throw new RuntimeException("ERROR: Entry not found in tc for job " + job.getCompleteTCName() + " on site " + job.getSiteHandle()); } //set the path to the executable and environment string job.executable = entry.getPhysicalTransformation(); //the environment variable are set later automatically from the tc //job.envVariables = entry.envString; //the job itself is the main job of the super node //construct the classad specific information job.jobID = job.getName(); // job.jobClass = Job.COMPUTE_JOB; //directory where all the dagman related files for the nested dagman //reside. Same as the directory passed as an input parameter String subdaxDir = subdaxDirectory.getAbsolutePath(); //make the initial dir point to the submit file dir for the subdag //we can do this as we are running this job both on local host, and scheduler //universe. Hence, no issues of shared filesystem or anything. job.condorVariables.construct("initialdir", subdaxDir ); //construct the argument string, with all the dagman files //being generated in the partition directory. Using basenames as //initialdir has been specified for the job. StringBuffer sb = new StringBuffer(); sb.append(" -f -l . -Debug 3"). append(" -Lockfile ").append( getBasename( basenamePrefix, ".dag.lock") ). append(" -Dag ").append( getBasename( basenamePrefix, ".dag")); //specify condor log for condor version less than 7.1.2 if( mCondorVersion < CondorVersion.v_7_1_2 ){ sb.append(" -Condorlog ").append(getBasename( basenamePrefix, ".log")); } //allow for version mismatch as after 7.1.3 condor does tight //checking on dag.condor.sub file and the condor version used if( mCondorVersion >= CondorVersion.v_7_1_3 ){ sb.append( " -AllowVersionMismatch " ); } //we append the Rescue DAG option only if old version //of Condor is used < 7.1.0. To detect we check for a non //zero value of --rescue option to pegasus-plan //Karan June 27, 2007 mLogger.log( "Number of Resuce retries " + mPegasusPlanOptions.getNumberOfRescueTries() , LogManager.DEBUG_MESSAGE_LEVEL ); if( mCondorVersion >= CondorVersion.v_7_1_0 || mPegasusPlanOptions.getNumberOfRescueTries() > 0 ){ mLogger.log( "Constructing arguments to dagman in 7.1.0 and later style", LogManager.DEBUG_MESSAGE_LEVEL ); sb.append( " -AutoRescue 1 -DoRescueFrom 0 "); } else{ mLogger.log( "Constructing arguments to dagman in pre 7.1.0 style", LogManager.DEBUG_MESSAGE_LEVEL ); sb.append(" -Rescue ").append(getBasename( basenamePrefix, ".dag.rescue")); } //pass any dagman knobs that were specified in properties file sb.append( this.constructDAGManKnobs( job ) ); //put in the environment variables that are required job.envVariables.construct("_CONDOR_DAGMAN_LOG", new File( subdaxDir, getBasename( basenamePrefix, ".dag.dagman.out")).getAbsolutePath() ); job.envVariables.construct("_CONDOR_MAX_DAGMAN_LOG","0"); //set the arguments for the job job.setArguments(sb.toString()); //the environment need to be propogated for exitcode to be picked up job.condorVariables.construct("getenv","TRUE"); job.condorVariables.construct("remove_kill_sig","SIGUSR1"); //the log file for condor dagman for the dagman also needs to be created //it is different from the log file that is shared by jobs of //the partition. That is referred to by Condorlog // keep the log file common for all jobs and dagman albeit without // dag.dagman.log suffix // job.condorVariables.construct("log", getAbsolutePath( partition, dir,".dag.dagman.log")); // String dagName = mMegaDAG.dagInfo.nameOfADag; // String dagIndex= mMegaDAG.dagInfo.index; // job.condorVariables.construct("log", dir + mSeparator + // dagName + "_" + dagIndex + ".log"); //the job needs to be explicitly launched in //scheduler universe instead of local universe job.condorVariables.construct( Condor.UNIVERSE_KEY, Condor.SCHEDULER_UNIVERSE ); //incorporate profiles from the transformation catalog //and properties for the time being. Not from the site catalog. //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 job.addNotifications( entry ); //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. job.updateProfiles( entry ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. job.updateProfiles(mProps); //if no category is associated with the job, add a default //category if( !job.dagmanVariables.containsKey( Dagman.CATEGORY_KEY ) ){ job.dagmanVariables.construct( Dagman.CATEGORY_KEY, DEFAULT_SUBDAX_CATEGORY_KEY ); } //we do not want the job to be launched via kickstart //Fix for Pegasus bug number 143 //http://bugzilla.globus.org/vds/show_bug.cgi?id=143 job.vdsNS.construct( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[GridStartFactory.NO_GRIDSTART_INDEX] ); return job; } /** * Constructs Any extra arguments that need to be passed to dagman, as determined * from the properties file. * * @param job the job * * @return any arguments to be added, else empty string */ public String constructDAGManKnobs( Job job ){ StringBuffer sb = new StringBuffer(); //get all the values for the dagman knows int value; for( int i = 0; i < SUBDAXGenerator.DAGMAN_KNOBS.length; i++ ){ value = parseInt( (String)job.dagmanVariables.get( SUBDAXGenerator.DAGMAN_KNOBS[i][0] ) ); if ( value > 0 ){ //add the option sb.append( SUBDAXGenerator.DAGMAN_KNOBS[i][1] ); sb.append( value ); } } return sb.toString(); } /** * Parses a string into an integer. Non valid values returned as -1 * * @param s the String to be parsed as integer * * @return the int value if valid, else -1 */ protected static int parseInt( String s ){ int value = -1; try{ value = Integer.parseInt( s ); } catch( Exception e ){ //ignore } return value; } /** * Returns the basename of a dagman (usually) related file for a particular * partition. * * @param prefix the prefix. * @param suffix the suffix for the file basename. * * @return the basename. */ protected String getBasename( String prefix, String suffix ){ StringBuffer sb = new StringBuffer( 16 ); //add a prefix P sb.append( prefix ).append( suffix ); return sb.toString(); } /** * Constructs the basename to the cache file that is to be used * to log the transient files. The basename is dependant on whether the * basename prefix has been specified at runtime or not. * * @param options the options for the sub workflow. * @param label the label for the workflow. * @param index the index for the workflow. * * @return the name of the cache file */ protected String getCacheFileName( PlannerOptions options, String label , String index ){ return this.getWorkflowFileName(options, label, index, SUBDAXGenerator.CACHE_FILE_SUFFIX ); } /** * Constructs the basename to a workflow file that. The basename is dependant * on whether the basename prefix has been specified at runtime or not. * * @param options the options for the sub workflow. * @param label the label for the workflow. * @param index the index for the workflow. * * @return the name of the cache file */ protected String getWorkflowFileName( PlannerOptions options, String label , String index, String suffix ){ StringBuffer sb = new StringBuffer(); String bprefix = options.getBasenamePrefix(); if(bprefix != null){ //the prefix is not null using it sb.append(bprefix); } else{ //generate the prefix from the name of the dag sb.append( label ).append("-"). append( index ); } //append the suffix sb.append( suffix ); return sb.toString(); } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param site the site for which the default entry is required. * * * @return the default entry. */ private TransformationCatalogEntry defaultTCEntry( String site ){ //not implemented as we dont have handle to site catalog in this class return null; } /** * Returns a tranformation catalog entry object constructed from the environment * * An entry is constructed if either of the following environment variables * are defined * 1) CONDOR_HOME * 2) CONDOR_LOCATION * * CONDOR_HOME takes precedence over CONDOR_LOCATION * * * @return the constructed entry else null. */ private TransformationCatalogEntry constructTCEntryFromEnvironment( ){ //construct environment profiles Map m = System.getenv(); ENV env = new ENV(); String key = "CONDOR_HOME"; if( m.containsKey( key ) ){ env.construct( key, m.get( key ) ); } key = "CONDOR_LOCATION"; if( m.containsKey( key ) ){ env.construct( key, m.get( key ) ); } return ( env.isEmpty() ) ? constructTCEntryFromPath( )://construct entry from PATH environment variable constructTCEntryFromEnvProfiles( env ); //construct entry from environment } /** * Returns a tranformation catalog entry object constructed from the path * environment variable * * @param env the environment profiles. * * @return the entry constructed else null if environment variables not defined. */ private TransformationCatalogEntry constructTCEntryFromPath( ) { //find path to condor_dagman TransformationCatalogEntry entry = null; File condorDagMan = FindExecutable.findExec( "condor_dagman" ); if( condorDagMan == null ){ mLogger.log( "Unable to determine path to condor_dagman using PATH environment variable ", LogManager.DEBUG_MESSAGE_LEVEL ); return entry; } String dagManPath = condorDagMan.getAbsolutePath(); mLogger.log( "Constructing path to dagman on basis of env variable PATH " + dagManPath , LogManager.DEBUG_MESSAGE_LEVEL ); return this.constructTransformationCatalogEntryForDAGMan( dagManPath ); } /** * Returns a tranformation catalog entry object constructed from the environment * * An entry is constructed if either of the following environment variables * are defined * 1) CONDOR_HOME * 2) CONDOR_LOCATION * * CONDOR_HOME takes precedence over CONDOR_LOCATION * * @param env the environment profiles. * * * @return the entry constructed else null if environment variables not defined. */ private TransformationCatalogEntry constructTCEntryFromEnvProfiles( ENV env ) { //check if either CONDOR_HOME or CONDOR_LOCATION is defined String key = null; if( env.containsKey( "CONDOR_HOME") ){ key = "CONDOR_HOME"; } else if( env.containsKey( "CONDOR_LOCATION") ){ key = "CONDOR_LOCATION"; } if( key == null ){ //environment variables are not defined. return null; } mLogger.log( "Constructing path to dagman on basis of env variable " + key, LogManager.DEBUG_MESSAGE_LEVEL ); //construct path to condor dagman StringBuffer path = new StringBuffer(); path.append( env.get( key ) ).append( File.separator ). append( "bin" ).append( File.separator). append( "condor_dagman" ); return this.constructTransformationCatalogEntryForDAGMan( path.toString() ); } /** * Constructs TransformationCatalogEntry for DAGMan. * * @param path path to dagman * * @return TransformationCatalogEntry for dagman if path is not null, else null. */ private TransformationCatalogEntry constructTransformationCatalogEntryForDAGMan( String path ){ if( path == null ){ return null; } TransformationCatalogEntry entry = new TransformationCatalogEntry(); entry = new TransformationCatalogEntry(); entry.setLogicalTransformation( CONDOR_DAGMAN_NAMESPACE, CONDOR_DAGMAN_LOGICAL_NAME, null ); entry.setType( TCType.INSTALLED ); entry.setResourceId( "local" ); entry.setPhysicalTransformation( path.toString() ); return entry; } /** * Constructs the pegasus plan prescript for the subdax * * @param job the subdax job * @param options the planner options with which subdax has to be invoked * @param rootUUID the root workflow uuid * @param properties the properties file. * @param log the log for the prescript output * * @return the prescript */ public String constructPegasusPlanPrescript( Job job, PlannerOptions options, String rootUUID, String properties, String log ){ StringBuffer prescript = new StringBuffer(); String site = job.getSiteHandle(); TransformationCatalogEntry entry = null; //get the path to script wrapper from the try{ List entries = mTCHandle.lookup( "pegasus", "pegasus-plan", null, "local", TCType.INSTALLED); //get the first entry from the list returned entry = ( entries == null ) ? null : //Gaurang assures that if no record is found then //TC Mechanism returns null ((TransformationCatalogEntry) entries.get(0)); } catch(Exception e){ throw new RuntimeException( "ERROR: While accessing the Transformation Catalog",e); } //construct the prescript path StringBuffer script = new StringBuffer(); if(entry == null){ //log to debug mLogger.log("Constructing the default path to the pegasus-plan", LogManager.DEBUG_MESSAGE_LEVEL); //construct the default path to the executable script.append( mProps.getBinDir() ). append( File.separator ). append( "pegasus-plan" ); } else{ script.append(entry.getPhysicalTransformation()); } //set the flag designating that the planning invocation is part //of a deferred planning run options.setPartOfDeferredRun( true ); //in case of deferred planning cleanup wont work //explicitly turn it off if the file cleanup scope if fullahead if( mCleanupScope.equals( PegasusProperties.CLEANUP_SCOPE.fullahead ) ){ options.setCleanup( false ); } //we want monitoring to happen options.setMonitoring( true ); //construct the argument string. //add the jvm options and the pegasus options if any StringBuffer arguments = new StringBuffer(); arguments./*append( mPOptions.toJVMOptions())*/ append( " -Dpegasus.log.*=").append(log). append( " -D" ).append( PegasusProperties.ROOT_WORKFLOW_UUID_PROPERTY_KEY ). append( "=" ).append( rootUUID ). //add other jvm options that user may have specified append( options.toJVMOptions() ). append(" --conf ").append( properties ). //put in all the other options. append( options.toOptions()); //add the --dax option explicitly in the end arguments.append( " --dax " ).append( options.getDAX() ); prescript.append( script ).append( " " ).append( arguments ); return prescript.toString(); } /** * Creates a symbolic link to the DAX file in a dax sub directory in the * submit directory * * @param options the options for the sub workflow. * @param label the label for the workflow. * @param index the index for the workflow. * * @return boolean whether symlink is created or not */ public boolean createSymbolicLinktoCacheFile( PlannerOptions options , String label, String index ){ File f = new File( options.getSubmitDirectory() ); String cache = this.getCacheFileName(options, label, index); File source = new File( f, cache ); File dest = new File ( f.getParent(), cache ); StringBuffer sb = new StringBuffer(); sb.append( "Creating symlink " ).append( source.getAbsolutePath() ). append( " -> ").append( dest.getAbsolutePath() ); mLogger.log( sb.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); return this.createSymbolicLink( source.getAbsolutePath(), dest.getAbsolutePath(), true ); } /** * Creates a symbolic link to the DAX file in a dax sub directory in the * submit directory * * * @param submitDirectory the submit directory for the sub workflow. * @param dax the dax file to which the symbolic link has * to be created. * * @return the symbolic link created. */ public String createSymbolicLinktoDAX( String submitDirectory , String dax ){ File dir = new File( submitDirectory, "dax" ); //create a symbolic in the dax subdirectory to the //dax file specified in the sub dax //create the dir if it does not exist try{ sanityCheck( dir ); } catch( IOException ioe ){ throw new RuntimeException( "Unable to create the submit directory for sub dax " + dir ); } //we have the partition written out //now create a symlink to the DAX file StringBuffer destinationDAX = new StringBuffer(); destinationDAX.append( dir ).append( File.separator ). append( new File(dax).getName() ); if ( !createSymbolicLink( dax , destinationDAX.toString() ) ){ throw new RuntimeException( "Unable to create symbolic link between " + dax + " and " + destinationDAX.toString() ); } return destinationDAX.toString(); } /** * Creates the submit directory for the workflow. This is not thread safe. * * @param dag the workflow being worked upon. * @param dir the base directory specified by the user. * @param user the username of the user. * @param vogroup the vogroup to which the user belongs to. * @param timestampBased boolean indicating whether to have a timestamp based dir or not * * @return the directory name created relative to the base directory passed * as input. * * @throws IOException in case of unable to create submit directory. */ protected String createSubmitDirectory( ADag dag, String dir, String user, String vogroup, boolean timestampBased ) throws IOException { return createSubmitDirectory( dag.getLabel(), dir, user, vogroup, timestampBased ); } /** * Creates the submit directory for the workflow. This is not thread safe. * * @param label the label of the workflow * @param dir the base directory specified by the user. * @param user the username of the user. * @param vogroup the vogroup to which the user belongs to. * @param timestampBased boolean indicating whether to have a timestamp based dir or not * * @return the directory name created relative to the base directory passed * as input. * * @throws IOException in case of unable to create submit directory. */ protected String createSubmitDirectory( String label, String dir, String user, String vogroup, boolean timestampBased ) throws IOException { File base = new File( dir ); StringBuffer result = new StringBuffer(); //do a sanity check on the base sanityCheck( base ); //add the user name if possible base = new File( base, user ); result.append( user ).append( File.separator ); //add the vogroup base = new File( base, vogroup ); sanityCheck( base ); result.append( vogroup ).append( File.separator ); //add the label of the DAX base = new File( base, label ); sanityCheck( base ); result.append( label ).append( File.separator ); //create the directory name StringBuffer leaf = new StringBuffer(); if( timestampBased ){ leaf.append( mPegasusPlanOptions.getDateTime( mProps.useExtendedTimeStamp() ) ); } else{ //get all the files in this directory String[] files = base.list( new RunDirectoryFilenameFilter() ); //find the maximum run directory int num, max = 1; for( int i = 0; i < files.length ; i++ ){ num = Integer.parseInt( files[i].substring( RunDirectoryFilenameFilter.SUBMIT_DIRECTORY_PREFIX.length() ) ); if ( num + 1 > max ){ max = num + 1; } } //create the directory name leaf.append( RunDirectoryFilenameFilter.SUBMIT_DIRECTORY_PREFIX ).append( mNumFormatter.format( max ) ); } result.append( leaf.toString() ); base = new File( base, leaf.toString() ); mLogger.log( "Directory to be created is " + base.getAbsolutePath(), LogManager.DEBUG_MESSAGE_LEVEL ); sanityCheck( base ); return result.toString(); } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheck( File dir ) throws IOException{ if ( dir.exists() ) { // location exists if ( dir.isDirectory() ) { // ok, isa directory if ( dir.canWrite() ) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException( "Cannot write to existing directory " + dir.getPath() ); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory." ); } } else { // does not exist, try to make it if ( ! dir.mkdirs() ) { //try to get around JVM bug. JIRA PM-91 if( dir.getPath().endsWith( "." ) ){ //just try to create the parent directory if( !dir.getParentFile().mkdirs() ){ //tried everything and failed throw new IOException( "Unable to create directory " + dir.getPath() ); } return; } throw new IOException( "Unable to create directory " + dir.getPath() ); } } } /** * This method generates a symlink between two files * * @param source the file that has to be symlinked * @param destination the destination of the symlink * * @return boolean indicating if creation of symlink was successful or not * */ protected boolean createSymbolicLink( String source, String destination ) { return this.createSymbolicLink( source, destination, false ); } /** * This method generates a symlink between two files * * @param source the file that has to be symlinked * @param destination the destination of the symlink * @param logErrorToDebug whether to log messeage to debug or not * * @return boolean indicating if creation of symlink was successful or not * */ protected boolean createSymbolicLink( String source, String destination , boolean logErrorToDebug ) { try{ Runtime rt = Runtime.getRuntime(); String command = "ln -sf " + source + " " + destination; mLogger.log( "Creating symlink between " + source + " " + destination, LogManager.DEBUG_MESSAGE_LEVEL); Process p = rt.exec( command, null ); // set up to read subprogram output InputStream is = p.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); // set up to read subprogram error InputStream er = p.getErrorStream(); InputStreamReader err = new InputStreamReader(er); BufferedReader ebr = new BufferedReader(err); // read output from subprogram // and display it String s,se=null; while ( ((s = br.readLine()) != null) || ((se = ebr.readLine()) != null ) ) { if(s!=null){ mLogger.log(s,LogManager.DEBUG_MESSAGE_LEVEL); } else { if( logErrorToDebug ){ mLogger.log( se, LogManager.DEBUG_MESSAGE_LEVEL ); } else{ mLogger.log(se,LogManager.ERROR_MESSAGE_LEVEL ); } } } br.close(); return true; } catch(Exception ex){ if( logErrorToDebug ){ mLogger.log("Unable to create symlink to the log file" , ex, LogManager.DEBUG_MESSAGE_LEVEL ); }else{ mLogger.log("Unable to create symlink to the log file" , ex, LogManager.ERROR_MESSAGE_LEVEL); } return false; } } /** * Returns a set containing the paths to the parent dax jobs * transient replica catalogs. * * @param job the job * * @return Set of paths */ public Set getParentsTransientRC( Job job ){ Set s = new HashSet(); //get the graph node corresponding to the jobs GraphNode node = this.mWorkflow.getNode( job.getID() ); for( GraphNode parent : node.getParents() ){ Job p = ( Job )parent.getContent(); if( p instanceof DAXJob ){ s.add( this.mDAXJobIDToSubmitDirectoryCacheFile.get( p.getID() )); } } return s; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/doc-files/0000755000175000017500000000000011757531667027333 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/doc-files/.xvpics/0000755000175000017500000000000011757531667030725 5ustar ryngerynge././@LongLink0000000000000000000000000000015400000000000011565 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/doc-files/.xvpics/CondorQuote1.jpgpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/doc-files/.xvpics/CondorQuo0000644000175000017500000001044611757531137032556 0ustar ryngeryngeP7 332 #IMGINFO:1097x741 RGB (52823 bytes) #END_OF_COMMENTS 80 54 255 ۑڶڶ߶mޒە۱nqڑqڻߍڒqlں۹ֻڶ۶ڒۑqߵqڶrmߵڶڶqڻq߶ۺrڒۺږږ߲qںmڑnnڑqr۵rږۑnqڗm߳ڒI۶mnۺ۵nֻڲލ۶qn۶ֺpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/doc-files/CondorQuote.jpg0000644000175000017500000016146611757531137032305 0ustar ryngeryngeJFIF``C    $.' ",#(7),01444'9=82<.342C  2!!22222222222222222222222222222222222222222222222222I" }!1AQa"q2#BR$3br %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz w!1AQaq"2B #3Rbr $4%&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz ?( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( (Wf6ֱ],~ݾOaM&v-M4V4Hƣ,p5CF:к{cOcs~@Z|ZhiV_Χ+'ޯ{Z7>~n9띲?tC'w'-Ң<0Fo s"El~bouḄ׌+}0ץiQG;rV1]B%!=}KYRB$?1Ǧ/Xdx"ыET((((\jQ19.nG&(qbH ٦{ hk3Oiʁ>ǪFltWq_Ңwr]喅wTa% 뵏I2.E# *ޱeemĥnϳCl߭_Gu݂'Zjzoi4nTڀ/QEch+G=FܶLP@~zPQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ą$ 8|G C?kنy7lɍدaF]m/>ݝ=?ޠ Z~!wZ%rچAe~h6Ya]\5åق{'|K1}%ׅϷUl|=v׊x_noo4Dfto8sm<;o#5?Rӵɒ]oH;vZ7:z|l,;vm3#aEX' j7Dxd<@QYnʧ)=z4&O5NnG8==O=qn+XDP'$OCEUQ:}%ՅQR0((3{4Ha6r8lن *tLTQEIAEPEPEBY%,-ܬHb?S~'4݄Yox-\/RGUOVs[h9ŽztQG)H4UG@>}VQEH¢I,gȩh .M"Vls,c~%jRiŹKQenI)1'MGltq[ @B+P:? khj@l['6]m*gw^yĞ |-h4M'cX8)0\6 洎ԮH*p22?+6ỵ![U@JCdc_ xXw}6F{d%g-A@ d~]_xFH7I4,'@-Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@s/uږ>z<-Pߖ&z(RT~SP垩w=Ϋx72@Į?A tv]H2} qg,@I j,V֣Y"䋈PnN=뻪ΘAź'@ ;Ǿ(JUbHA,?ը % Ug&sb9QW*!.(Q@Q@Q@q80X#$?6cw]s3ѿ|;7WMx|rM̋[W=>KN;kTi+LV(dy 0.Ǚz־I(mnT Wr:5PŠ(((j6uhV"xȒ=r?V覝,+DOp}⧬mND'ZT*FQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE~.?kJl?/n[gk3X */tiQE%Q@Q@Q@gk:ik'fV)C3 '։ \U[^~aLV  t jd쀱efI%܂{M:"AI$QXn ( 5+qq\4{/۪ͭ]Fl\ 35Ƌ.[SsQze؞2?:׃KumKźn(E+ʣ?J1XY0iԀ<;ԀQEs-φ,#y,Sü:`z]e =90jB&3?!>a.gQQZA{kմ, xCyTQ@Q@Q@tˁNe-"uYE_ZUOL ((((((((((((((((((((((((((((((((((((((((((((((((((((vu9cmȽQSNڃ;54砜of eu H RQdiEq9;/JDfiy7~cq> $_x>~ٜ}֊N$k)vFH>bF=M]*+HAETQEq2Y|7oRlڊ/xkwbӵDF\?(|Aaznnhx@QEQT]B-'I&M!Qհ8Z}JaujNS">Ѣz@::#5REhv ;eaؖc}Wt ׉t+uO~)#5BK-z<_PSHnXq]{*mɽ s_0Q9[E9QAִhwV )M-,,q3T?ҵ?kf~,ǿݡ+ M|3[X:;q{G?N[C<ܰLqGry+xV(cXQUDʺsarqp@#=c\sq%fE708aPk{lr֟Is ^NjI+őt0;yȻqR $4[oAvr+O2+2^!_Zgy*jҢ𫐸ܚ_G|e8|9q$0Am1ֺcU; d*J u|!{XS=I *w6vi$쌃\}O^j4+,ԞnM6/$s|4rtw?}1үQE&V (Š(K\.􋉵" .`r^N9,?F0x5K-b/tB k=gnoK{'h)\7މB:iag`-#oq a7|@Z^t76h'wGuZvj[OH /udB/QE(uzkR%{|'⫒>6^5<_ߺ- =gQ񆫨?ٖ笲aaU>o±"! qg;݋3Vc>GK-ԗRp͌Ge$EYwQ3N˔SM}`MT6ȩ+bRQU3^xW_%4{31a;Q=GadFfv9if=?0jr#=#}k%f >VostZt uwt5x]eYNA uQHaEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPH̨P2I8TW*9q ˹Uk;vS5J=XMƣ1۽sB5!GiXa~Q#~9<պwKaZQEIAEPEPEPTndcihGdAǰOVDdd"s=;q5v'rǩ'-ط|I;}YOZjd 1cz( bWҎxQϬr>7Eqmݻ6r(eaA5q /?~!Sa5BP.HF z.G..<|>p=s?|IETwIlO$O(Iruu$b?J"G Im9#FN̷kutYߢ(kKfy3JA5zVeX%(TK35)l4./q1nh>*ӼC-|ԙs"c8ku܅V-7(+=eӊ's۱n{8=ZSM5rwE {U _rJ*:c:Yofa' t} )={nhE%Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@TWe@;=O-PIk}=VGSoqN>{Ċgs0{[C,ĒsG"#8bXEHaUF«H:vin|I7U({4["1^-C4,2\z#PՊNERR2R`ޖ-ƝX6߽7dt銷kw ebv2!yV (\H|'nnutlZ;J*8 +xP$Q DQT RPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPE#2v 2I8Vw.5.,ɂ#qLҸ5_g#>Z~4L.o$܏qoS⧶/. $$O p8J,p,@I_aVqP;4vFAcJځ~6Am?ЏZ S>-lB)r}IkW?mo9t7A.$"?0ٓzSE&[pc.Os?cFw`%ָօ @uBk#ݲ5_֐xK+DfWúc>oDP?Z[RѴNk M!e,{Lti7Mw;+90}h 5_XK[*stqK]MC:P9h'Ol^6xIяmlb[-(@D0A۷QEQEVnnn޸b+Jt.46KdѥETQEQEQEQEQEPբv\[: ;9_e qsFۣCdS7AH><$e[ПiQE%Q@Q@ !hdXdMeݘwK^E5+h&ช Hۣ)<ԕJ{5͜ I)mIJ3ot|9 =TGp)A~(a^OoKZ٦.}(ڜk+uޏykM]io#,P\(F1 oR/ >'j%Z){]IZG㓒9W/"Uz k\mozM5@yW8(VXnY`ȧ!? /L< ۀ]Q>>r᦯񾡦魝s[ȋ?,F9 >ޣ6syuk ]BQGקZx36hZKhU $aAF$t<ȮSĞ~x)k;"6HS#}Wë}Y]u[\5",hT`(zV&\$7Fv{DImU$ K^w $ F+(g#ȦD![jZ(((((((((((((((((((((((((((((((*c5 +ݻOX$\F *Z:mΡ{(&W= ECiupb5 )k ӃOhy$Qwc@&Eeh%Qb-w!`Њՠ(((^h>$w>꼗\Hi]yۘ>7Hlc-&wY4UY-ɽ+%v)j(zqѢMiX(M4VHƣ,pPJ祭v6I5mb4p:lV ;Or 3Ǣ槆XcFT` }[ (aEP-cO:{ P<&Td ${K=o8Brgon4[h. oq@[KXll@AU5k|Gkssm{krG\qkB}v/Y>i9F((+6-^h[( zVCum>[C+#{EF&0^s- 991C29Ԛ'p) (((($m$2Rh;O}PVi/Й~HS9JR0eaшsVSXQE%Q@Q@Q@Csk ^\ssr;].Q@Z,j0ʲ=E:[Ym8;#s?}=j:Bk}dE:A_ ՚M5I(@QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE^-/~q+FH23kj9!)e2ȈO\g7>lI0ڰD@.ֻk,@nkiqppR Tp_5i|$--v4p~s bM60*%1 s 3g]Э#{e4(Nav v;|H0.IeU+ d?':ogեym ӂZI€F2rMvTW+{ W5`,vO$F uTQES5h|pzO`in0e@8{;j\\I h=>m .nlacE}j;+_qƐƱƊ 0E2x%TKwƍ= )?O P;4vFAcJkg NXwcsOsUd&؂euu9D_q;>Wm+QHaEPEPEPEPEPEPEPEPs- ѬUaT~e ˝'+J&W3o[c+քr4bH]FSuPDdDCHs!{+7%FD;1dը>Ot?1G*}*+7캷}N {Zs A2e`h]? ƕAsykfb}dpΪc[?m=chഉ$&ܹh8S5/Y\5~x" dU(ÆR0A+ H_ُ36yQ$vrTQ g9~Íb+dф,^ޕycFw`%P]^iX3e==[YSOB?^aީG}>Rд;AW-aʁ6rycܒy'PAEFkyi9u`݉!4r2}Im@ 궀GtqlV ;Or 3ǢӺ[ *J ( ( ( ( ( ( ( ( ( ( ( ( S}Z]XMs 6ۂ„iG<{W[Er.KK6c{i~n~[tA*`+)mK#z* ϊɦ3-ޯq 26 @|3~RY/O$bTlu7wPYwp8 EU'ZEeg #XQT`Wh-%j:(/Tx?> Z>_\ɢkm䕚;V3bkk|_w-_1hp=*>~ur7\E2z7 ?Jv7oMm60 KVoo+ hӚߍBXGya{}nIJ*Ktt`"((((((+|;.RyzSM1&IYvz|ƺ (R7wi v@yuÍoE4 R7crU"ޡ);202z|?}U{(֔vvqdP|w4Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@>s\ ?{/R_e<{3a;W*Xn@A PG F+v&|7:?kA$ExYeYNAV:(0((((((((((((((((((((((((((((((((F#*"1sql^q~BK;ǓE-IX(@pY'|N7H'q6~BGV?a jO#'_C0F:T+*E[  `xȍ_ƞ`Qn[T\g 3]]q%įkwv 0CXTH/!\+(X\k^VWw"PYDj )'{T.W4[I֬nqa|+HAW>ƶ<1MohbyVNܧpq}ڮ@u~ FѬmuHT\ZkjӝǸP-4>{`B!P*2+ʰ>(o|Vmt]'-2%sJ+ʀ&((((Uo=fc j?_quڡ~q}**Ry{[wUSCR+:9f7[]X{0b[H*3:sO ^ۚQEIAEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPYc-qUw=F>E4W+ZEtY0Ο!aG՚ueRT9c8t>t=V[٬GNF0QS{ Ѣ*J ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( u{!˿{{-۴:pVܰh{w={[(70-$i]p; %7 e-ԋ6Un1O;EliX(˨Z5+s6*`pGUҵ?kf~,ǿݦM|3[X:;q{G?NXtu0ڼ죢ܞ,dY@b>HARZ٭i]̷2';rj'VȢy[AIHW\Gfo_)+QHaEPEPEPEPEP=WLt2Yhe %X`Hj0JuQEQEQEQEQES]X$UtaVzuOsnu9xϧaW+VX\NgԵ:Tp ՊբZg9i{DsEqKj6xT;%-01! mN!K?휌TcMM0:+7JdZ\~1mS=* ( ( ( ( ( ( ( ( ( ( ( (W^tmyp o%\(6UrO`R}Sn5.n[?XjޣpvvU'("ϨMԮy!=Rݒb#%vh*"E upV-VOd>`uXtƭp{w+T{DQW%h4ͨ] &5~Oj"6~ kZH>KJ5&F~v RZU vDӒv0< kvzwض[ҏ!8k񕖥u=Į~YϟL Y_%ɝ6ZoG\u+D^ѕ!6g>(UN];ZTڄ5zڣewK+us*Ww:ER(((((((z֖l7_,*,WZt#IxExeaw |Uk\+HfMFAsq#K@+aec*BGJ8ƻ>.%W-fDb޸E$gֻ:R]<5) ,{|:ߝvwwPYwp8 EU'uo j3ˣw 9#> :v.W tV0#/k{wMoT]2ż9BT@O\r?[?5i,5]w7v7ZvMđO=;@EsV6ӯbVei%_H񲩌atN ( ((A$2E'qǿC؋PO 4-dTq˖ZJd=vo9iԗOgcg.Ã4l#S)NG-|j|gY{yEܒ(h )ίޣS&AR3`9IDoG)?4;:Ո?XKZmf,MMQXgJZ%|N:X{PQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ !hdXd7vc~o{ZTSRh aYd2`Dsg 2?m~%mϔ!7/ܹET((iW/gnjZ٨vC?L=@W!%Wb?ªnfS]Y{Zw\7#6>ծ4Ќ<:2MJ\QG7?j6cYCH?߳Waj,ŴLv\as̫mcTuS((5ӒkTYx>9ع$ c\M5Mޡ*[nM™=&PwInͻ+z7&{GpRj }่,oG FAȫZ\iK>"P1aŎJL؞7ēoLv"ǹ_]({_J.zlai_v}=F3WwP~pR*+7'C\aiQY;?GNFaiQY;?GNFaiQY;?GNFaiQY;?GNFQ֭4;[ R+8dr@Fe 1=3Nr7hZnZ[DF`>cƩxB7otm:+Ing,cM_C?wP~_0&t4mLAi Bc!F3o_}֡mVdR|[G~}ǥo;?G/sywʩSB GUW]YwƟ0Kr7Ef;?G/syTVoNkVHEHDqCo̖CUs?48ۨ\uk}"dwRJhK^[ ɷɚ19nh-I`C"6U=lIES((((((((((((((nma˝7rpT9TPt.\Z 2?j][qVU 9=M^]7YQ-iNtgzyrAO={ލ5VVnU$Sj,d!%9Q\ׄWJ5 V+$&;AAg==?̗_PGK$!\`K ?tsՋeqswMDSzľ+ՋKKku8!^MEs960)QESd%цXdE:ͷ@;f{M?p~N/ZNVEQqqXv#B eVmݽ4U;b;[PH:F}^!E09bH$yv`qZuŠ(/o-nI#P2Mrlw6~/o0|~}X銳~ݪ 9{t=R26bQE((((/--洺ed(5Sė>º-Lgy!coaN >1NFv6hd!N~ ޵Qv`z>#VW93B') PTsQEU-cPF̢I`v-*bxDo6E5qVIXC}cRuva>Z|-s\TEfep6 B"6R;%>C|%[&Kz-Y>I󳜅G# ~ 7-_c`A?>H7Pnƞ-` T|}z*fږ4ӓ|U[tdR_xĚo?61$j Xf w'<q2'oy|~:oZuc.BrCqmaP+%>eM^>m8Յw>-[Kw0;dvn:Q[5x.DTiApe$=8zVäiHw*fh>"mwUmi]4vLAI޷gc=>k>b_'º+[MgP--=댑 UKSOOT8fd }N*`Rald)_ Ǚ/^٪ֶZmn!Bza/-4GϼZ؂(((((fq}i3ŬiZx;^ ) H4 zHiAO3טj\s3F?w'͌lz8 V@=@)k7A֭A !oC#ߨ֕j0i HjF0(QEWrU4ҬcդqN)(Q@5j|; X:&hcQls#wvՁ1/J!k3p:(9__T]2q sC"U=>牖-"iwX^.zFd znQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQTmF-#Hgm J:p=Ak\[}k+ba%]!\c$Vgr.kىL-`Nv Rrb (((((;Kymn?8 )X5 L3feO@Zk\wlv7ñFA/- 7uw>zzNJ&XxM]Q%{9 *#\H_*@*cMj> -NefDIιWB}S{ޟ}oi%622>ky:q`zU汸/R?X򊣨kZnk7~95jx9P:c V+ؒ|`'uE,\U$m) O|='H<@nm c%sWqៈzoF4bE^L35Q@Ea=WQ/|'{rVVqfs>QRnih>u 0}<-n'k(7)oiC&%(/T!rvdS^WFm/fȳ8?z] =F[46 C7<{Um熭(2$֯,3F͹ێTƻZ(ƐđFQ*+^|Xix-;.#~UEcx{š'` ,♃ȪۈMI]E j7sj╴oiVxe7 W=xޚW`jZQeǭƬibxя̠~fn!ݮ&8U;zxprx>5Ѥ俳MU?)nw`n fxKIxmXng%$ $_/j'`=HAGzZOhIUm-lfF,~^ҳ?x KJiV9w2C׷JD渆4q)`I&zޏYimHJўd{ K%1vU?3aIMnOsx6-5N=W@GwYlqs ,I_ m2-WI`ϓwO~j|_CUұub@2` mQTuNBL[(+/WuI 1inIHA£>yxJMIldQہM]"1:G|,gQE`QEQEQEQEQEQEQEQEQEQEQEQEQEQQ\Gik5Ͷ(Qs@ɣp%c/#K.e"lΛg;Ua^5KCadǻ< 7hqM1&.QE\YMkqDBb`pGW;ĞoLkVpJp f8NjO Z7@Ab#(XAVψ4kMУap!IXAR6zƚg:$2Qgaqnp031K\<=.++D1_i3$Nz ϯқx\džtM뿳,3˰ `~IYT4i+"]^+[SCU98`@+0/V_v1LzEp? C1Ui͗[穷l}Ĵ I#_M"kkdxDm𮞝 Ҷ#@_bxk]Jq!"ֽ`QEg?~?7oLgwqOK?u h:\j]ț'5 /Y'j-־H:巖Pz*_&ZtF-u-lOXȲG2#隳iv:ŋ6Ah#_`8+oU/o4W9H)<}Mk$cN8wn JSȹzqY>Ƙ ma]OlW{_]-zn˟I#K+x浑vSE gn5[?b`=#:QQCJ۞BFoGDu֣#:Vs9QQCեETfdGPj??#iQG<?ZZTQ FoGDu֣#:Vs9QQCե\Njux6m:-F]A`+r13 FGDu֣#:W! j/UX@2_'*zAʃ#:QQCov<={\=՝봟ⷼ)q O+*{{l4s9QQCՑiӤ.66ee; ʰk6\ݶP GYQCեY^%׭3fHcQ>@ > C?#?Z@-RI4d#jy*3?#?ZAʌGGDu֭*(r7#:UMWCm(o/Tᘩ?Zݢ$ _3vYoRqZ 36vi I.$,+WxYU^JdBaf(@##?hGOq^7sr}IrNY=}i%d76&dۘD06qXםk XwmE<.$ES`rl-KN+TKەUa(N*K`Wޣ :7= R6esgć -{d?_IWU{T7aIT;~]KJlRݎLraQ}ŻkA'ԯ t]oh fi&fa,Iha&/Bɐv8_( Di\fL=ǡvESN;~g]Bv̑#oG_Oz,-'ȧR?J?KֶϪ񬇤:_+~%1\^'&tj sk\ Fu (n$*v Gb+9n"QPEPEPEPEPX>2Oߔn.r>yϊu!`Zۗ#Nxc_;SKP2a"AE <^w,l&?ikafeuLkgBVzUQV0(((5[mxt"}ɨu-GIXbyJS'b9-M+_MӇe' Y͝r?2Zml Gִ(5ѿ09?×ibf4Zne2jojoiQG2Y3~ի2 >ի2 Ңe0fWe}WeE˷agV@ (V@ +J9oϹ_Q_Vs.ߟs7Z-/Z-/*(]? >oڵ[_aGڵ[_aZU멧kzF3&|J*Pzڎe0K_Q_Vs V*Uh@HQKHw̻~}jojoiW''I PڤR]? >ڵ[_aGڵ[_aIj~-x.sPr? ӣv,jojoiQG2Y9 ygմ}^xV}£I4 mjjjdZƓs̫2`:r=Հ# 4n ].^ Hd{0j-n2QYV S)h#v+|p]ߴ j!3 r[j-D>zf(aSTl gHM=s{W' w\/k0j-嘰5 b8PV)UmF3L&&ԅެH\OeA[tݧwڦlUV%flG%&Bn|GۯmF#>aUk?xBǧIpOS/Z-)E%hbb@< ֤QEQEQEQEQEW["|a=Jhg;{h|6 ;_: K{B 684o^B^?k]mGᾒ}z,&,Uc[MT~/HV2ц2F!9v~iiXXZ#^i&?x/j]ͨ-&7znz b؈E;蚼#e՝. Wgz7R|Uw̾f7?m2qL-x>?Mvmn-|M\rJǞ߈ҥTd3 2q֮~8O/SNXNoG*39<Z? OoZe 2 fj&T|/7 @\ιn<vW[[OeZAorɜeP HGJ#Zt[(2 1>mS@W_'zS\H98((|ai-χ' ٲ^B4Lx_oQ@ռ][Eq T:# ԕa6sqc`cʏe)Z՝QE ( ( ( ( BO:9hOe"B=v.J]-m`Ub9pBG K8*aWZ 6bp#؟&M{<@Խmp9 # ;_ NK$qm1?d~:lr"8*!Հ̾ }_mbI$nvcƲF2B `BER((A4vnu"8?,@tHOkKvnp'%:;`qC ę'$?4+Mq3nV–JGe 2`+Fumu :v eI#Ez\PŠ((( BV_Gh"{tPDw=KWfwzqb,nEMbLğsnXǏ´%-Ximi QՊ(mvZV (Q@Q@ }OO7:put#5YKqoq&o`YqsA ñ*OaYގŐLQ7otpzЎS(BȰjGKM]|Op"~N=2u )QEQEQMwXѝ*(3=MpZWW܌8z/w4ҸZ۶tW\'w[*(UT eHbiTd/hY׼Icqsu"B͵q[ҽOxvMK=יOXgd$b+ (`QEQEQEQEQEQEQEhfڅNvM,xGO  HؐtFGӮKmN{H併A30x8'`1->:eS ĊOR 'Z}"D},v.pz(G?P?YwotˋֽzAIRpT fTMo~HIށ~#*z=v6mR0I$MTAgľi2HmecGut=8Nd+ù=&q's;&oZ@ 6܉?iX$($^IkMMNd>pa/\ mme4QK\WBjEUQEQEQEQEQES$)qFS~k>DX*(U(ΟexVNTaL"*PԴPd&цXd*k ;2 }*0Q@28b4Bʨ}QEQEQEQEW?xGf{{d:Gp?Z׺Ԫa,=Wľ +ʅ|y0sڌ`II/srw8?h{-fb<9V}n_hF4q[Ђ AV m%'>沵YU.x\;dzvlYWQd8&ҵ]Y$qm0, &+(((nUst5M*gXa^77R{$rk5}f_W(t6ǩ=Az4{M+h.y?7j:(HnnaIyr\(zg@>ִuι D,G>UozG',ܒOZ5haESHʮ# M\60TwsiT~RtȒƲF!ƹ<ؽׇQ^s1\?x&Q3>Z`IW=ķkj+xW@_W#O]e&4u-вmxXt8E{ 34ڴk])?Q[$mQicrd^  sकb^V5?(C ~"i17u숣Q($m$2YZow4-)"OpXAp[Sד4m@0믝NUl'Yle. nem]ϏSG {;hex+/u4:ީ $Q{;iҸ=?S1s>t?#Ms$IgIm!v]4.@f8 SX]ڂi|bK'{u'V/%d{HO<铆O/}JmAN%s%o1A8H4+_ikem%3}\cVU((((((((((((((/2g drO+՜ײ1NT= sړ7jz[8vdqOJUT@UF #N+@w<̒r}¬QE&IX(@r0{j+Y捶<۱#\㫫WkmzP&C$\=9]@n>@N(cd}#'d)KMtb2};~TE^%t]O2ޯd8>I{kZc7M(<՛[W_3_=n.5NAd({g[ﮠuyVDZ01趏t:_Fqaj$ғ$?YI]-ur3Ƚ&M.MjnN&b?L PK#48ec^ &s=uON^Gc__ēܜ tiXaES((cV ũp9ʼ~ ZTP_|/:/_&/4}V¸Wmgwky$Ukبd9o&TSœ7p3׳G*,kKVO3+|OhzqG}7(] X(EPEPEPEPW0%լ *aF exbIS6 mW=Mn-\qguOD؊p"Z>c+2Š((((K*C #1S^-cx-bBTIsdˏO^41B̊* !kEut-S$1 Bǹ5HB2]zДlvUеW\7}X`vUT(-rj_kF5˞o~>3$Ddg/!a {eRӒIżmtzz0}F/q;A犿UO=Vˢssw?ճz/j=N5'boOКC4<^5 $B.~uD]Gx|3mkg2<'Ԝ㣸5ucso+Ai HU*Ti@_XE;@1P2m[vjR֍|q0Q_=ͫ5ոΣ=~I/fAT|孾I-P-Shl>s}NG#G42,b $1H}c ַ^n'߉O?ϡSsFʊ+ =VS*0W:-xi?$}G8'@q"aӳu&ꋿrQR0(RaxV [FH o' ]7>M2苰dK}v{hf|y}[PH pm8>k}+OD-.{DY;7O''4~F|/k3V)&>c/Cnn/uKadQEQEQEQEQEQEQEQHK1Oj:]d?ha/{j-0BeE=Ϡ>KmΧd?N%{nۡOX8Od^?_Rj;c!cFT`U 6Vk]BggTTp /j^>!~s_ړNArQ5RφVOZSNj%msv1+ڨV|Cr/=괟"qʜ8S@}޺(QL(((((((((((((dM,ƣ,p5C}Υ{l#?=>u݉n/ϜqGEr~XEĆ{1QG{So +F:ϽKM-ՅQR0(((E4Rn0 B+Ծ%J܆?Ͳ]Q@Ou}2rnϊm72ձJ)Y~+c{{\~V|StG4^V@zr8};᎙ ,ƨO+-cR8*p*J)QEQEQEQEQEQEQEQEQEQEQEQEQETW]Io: "J6Ѯe.oݙe;hs]Qtuk?&FhF qхTҵi崽QV-J1G pO;V\o^͚(,(((F`Y k`ɣ= 5ķ8 '-?7-a'K[x-!b8"`0+Mo/BZ(,(FPU {w&UNOT2=JjM ȑ^ eꧣq®T76^EFs}Axn N?#dҢ?2 páSAcSTm+]XAyRܖ3G{oXqx/̿k ҢK]Pf%He]Ai]b$L 3=VMV1IK0}F)B!m++1H}8>n1e;գH@`A2{C(e 2KYF6-g$LN O3I{oA|ʞ.JڷȮ,7+{{i?ŭLq_KÃiIM<G]sv?|Gw,-4e]--`j*Fy]Yu/Z A'_^YF^V3m/MKBE|CˢfiF>hh{?XL-| t6kbQqܥ$ (aE2iD4gsz[1i zMw8yߕ\`Be+zs]mPscfCG<Ð~\D(2O\D_ޠiGWm!K$gmFjƓIMg K=9RM3Z/o`i"?yra%)c*)Fo5xiҏXnNO?wK[ Vs.ߘY3X niZE _8+N/gܴ: (۵= Ϝ4*({~?fkWjlvDQxsEI"Q9?iQG2}7YYB=^vs캴5(cZřiQG7dh˓̜*ͶgeCc)?Y)=.5(QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQfA4ñWsM&݉᳌<͍ 2}U>>_.ߵ9]uOk`HgL08=O5nVp(((((((((((((((((((((( gU߈/4{AFW#4l|oANEݕ&x ~ڜZkzQ2\S&8ՔA,iX8.I񬱸%a#@?vkq,do(e7?vV^+y.w!*ybXAv#Ia&uOc{6kb/Sπ+}#2=_XeE6WCGӸq$y1:aWh6g.#ԣXrps~=T= Tz_6R|ꟆGzz?!j**+rL2z=j&i(C ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( dEo K4j2U{Ah0qG2xC`L7γN(\_Ϧ:U%ՊV=-lYGAW+hV(cX^TRrQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEpQ`lӣ=s$r=V[ _]=,Xztt21@-=w}oA֡a1ZpA0 rHϡWƚ/%~iB-Kd_f?췯bh)QdXd*n9:.7#<"m4]F^_d{ժM5I܆ ȼè9PG ~}<ޭ*)[A4Cmw^dPp{>A55SӢFh.@.cY|(@!?z9oҢX:R AZ((((((((((dbIU,stJgyf7ptPr /.ᰱm[ފ$B߇rj7^ 5YyNv%sB@Mɥ&"KPI5._a{Ks"!| $ {Z( ( ( ( ( ( ( (W٢I, @;Jł@:T`|~dg?wxs_ US>>kG=#N8ħ,w;9by&AEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPQ\Cyk5bH&FD=HhfnЭbY"uQb+((ue⨙>d9It>ÑU|?kq-_ިiG?**15Ԏ n*r*J>1OYpF~Tk(Nu9O m(9c&"I@11 5%0H7F~UڊLSIJFz ?ZkDה5緻itwp(cW<1=usz'}-yl?哷P}zk{ⱅpk!e<Ǫe'!}Vm;@[/"i'01 JP#@<:*}ZjRh  \@Q@Q@Q@VS`p3:7-`2mJz\Ң~ߨ7QM'"hn@Z9йEfhާrD́n?.m灕mh@G=BCKbzZt$616!X$0Ʊƣ?!\Σx{Um#KL$d*`0{WSGg< &h${^Ae p\B/w,4'њ&*4}X&izHҭZbvv."2:V*FAx+:SjA#6ARDžPUү> V, ~t&EJ%\u#+7+ V&/52|Mv >UddZ&z>d=yG>YDFA ßz袊(((((縊y8׫1<Z-l}etQl|4o*<#{GEKkbһ.0?#za8"XEHaUF VQEHŠ((((((((((((((((((((((((((((((((((((((((((((]J `23~=ͦȢ>OTGOmq/\ Qه[nm ˞0AT9Us'klMEfi޾A2?mw^epTcCdVYA#=DZTRMߵXq~l#^S}FGҳ|M-wunsi ) WIYi)m>_J,R`i/]:;)R+eQ;I85? V )Iw K#[lµԀmb,pl8>~Mn4r?</44[J.nJpz@k5ӵRWվ['SKyOcNy=s^E!MhQ mǶ1Vh ( (%Z[9S^v/] =Ҹ5F}SO~j>Jp }kJw*+Cxb$2u_S{ niQQlRbdǒܜ|ZT]'@*R%jŠ((((((+7RD Ex)cX7z֕Sh۸ϧj [(e3C+uxՏ3RԽQEQEQY_Is#AvSnc"M.b sG azYla.`9KU9EcgfZjZM,i9fÊNl+_p*J ( vZ~YKup,JN#_=gƾӵw ,dmN20h+_su'5X(|/Fg,l]Q%w¬NS*G'' ]'kudi#=9kO__Ue<7mfy Ky6gIRu[-sJԴk[<Ўv5.?kOm|5?sx#w#wOQ@,}JxeM+}.++H [g;8=cYo1^O5{I㹵ȿČ#׭m6x:u"QK c {bxG&q wlp!JbNN}qO@h79渟Z #5lvO]^w-cyqiH3($sMrS~1D 66y kƀ;EBJZ++]xv;ͺMXv!z@ XE#N|Ÿt9ǶjZ(((((((((((((((((((((((((((((((((((((((((((LSIJFz ?Z^yl?唍F޴RkA5r!ãє*UetI;]{UXq~l#^aS}FGҋ'Ң*r4ꒊ3i5ŜfQ׃MMHrp7cqЦ,` *&݇QYbNpЏuu_#SܹFZ F_B=E=Pѐj#ix*j/l'ݴȄIe!dO8aEXfY*J ( ( ( ( Hb/J&D~dUͿJ,G*2| L/!(>*J ( HL=`R}Ascmmu G{Ioaasu'uٱג}jWE\\I h?#"cFT`:MQEQEQEx_ŝ{Il c<tWOEy)}$c][hzL3\[O51$qh1]O6kZ4soLQ봃çj4/G']}a4f?`WEEr+Hn5f-d1̃=ʳGu;{BW^=R7 GwPYZfi꺮IuʒNҰ8ػUWa@Z((((((((((((((((((((((((((((((((((((((((((((((((o#Oa/٦c\f9K0~Emm,p9CCW4SFF 2\aVoٮ9s<]nW?ɸf ȅd6}G48CO,Vp^ Yݴ2U#}OE$׶9?j+k"^`1ֽgj:5 etq۸E\d44U>bB>fQ"p=>٣+[kE~HR2!>즢KrYt=.souۭx̛[tWֵ)dC.lB=?rNCei_܆0 {xH֮G04L{-6G㊞^шbKO,L|GYGkcN^#.kyⲫ>Doj~D2$\0 qs:Q̜+{t:VeBi!,2)I"oFSGȭO`)'Ŝw';$zsVtbVV{8AڃG}Wh|]8yV7>PGrH9w&i KM%~RMYsJRCJ*VPgW c.=tٻQʈ,p5\\[=g# `R:ͨmr[h{xZ:+khm!@9=='}KE7AEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPU` ޜvfE4W3|?kq-_/:WWVh$I#n ԕfٯt.~! U'>Nj[2W:5eu 於~+E˭Iac{~kWILѻ3 '8U=@# x;sU2T*S$Ms 0{q^06JmX\I=I'O&[[pơQ`(%qJMQEHόEY[6BT.*2zd~~ՅyZlՆ^ܥo KwlK^^Q{]L'U-0導x܎r}RLT&[E>]y㻷o>uGCO`Q^uKk8#=?<u-Kyͮle'z24xk qcm;.'.@lzD6s{nݡӂQ=BxSEis&}=*"U n]%ՅQR0((((((((((((((((((((((((((((((((((((((((((((((((((((((((((((7]Gm#,ch×=ԓhz{iTs6g@WP{(^(Ώaq`"hYԓE`+ (9ozv܈-6c4pcN7#cakv>1:%JgwU @2Gzj)" !vUO@QEm;R&#[Ύ*ON;WqUu;].|W14LGQq֚v`^"&e";S}U6IV+'ʰ,;GhxRX^7PrAw(Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@PHpV{jE8୺pQ=~6 W$̻TVoCףjYiH4rKs#J]{K,A1ثȒx]OB"[k~ζkOh^wj!~_Z-( +_?vnM|VΕ\\FM #P2[ܵb_ذzw }/ YIpכo-[m OYQ\L|3oZ_;^y7M"Njf')v)|7i7{6wtz,(w5Z)kpO:{8z5R(8Eup0fA}ᖨgglɦ8H<&? D7#o.mgwfdMB[S{(k%0QTv["Bʐ l\η]Rs8dc)dQS*9Cg z#n62n]֊)W'Gb¿n>rEah4iQYe!S:vBڒ[q=џ2/ G'mCE5dEt`!N(((((((((((((((((((((((((((((((((((((((((((((((((((((({Xq"SAN= Jn%24:r8{1t4|D'ޯC*"*ҝOoD"*N(e XPj&\Y )5IpT"Zgm]1V#O'z~bMJ: a5?\( ( ( ( ( (tu\9 Y⟲Anm[pNrYOs׶6k>tf|q<-p=RV!HRC7Aktk/o6 0x~W>Qܾ$zsuyc$)=˓(+ *B*08=?AQ*@+ i~}_#skvZFu1Z~$=Gަ-#B(`VM n[8؃}u 4sjZF,>d~N*QEQEKW4{gZ3?0'iM]Xutq#5 eb@`H88=+?@H$YrfJ:},'`=&+6;5cRTu$SJ~\jJ 7[<+>+s>_H \`RFc CMvǃego$/ܖOJ(((((((((((((((((((((((((((((((((((((((((((((w\@K2aenfdhl|?mpms%ډLP:ZYUWiקJh=z‘?Nrw!%@9ӽtQ^i~Tz}V6qE>)ݾ ۃϨKdEt`!#֖3u4IiU]Nٯ4e᥅O O[ {FFj?h(((((*EdvXKtaGB WB]:VJ,c;]OI=P *ZuX|佥@o^Te<M;Rm%ppFV?w>B,4!dyx#L5wP# 0` v=G7m_h9HOLO(ֺu^\G lڸ]cWzk}:*֚vƻIXvXz <ᦖdeT)0O/$kDQE0 ( ( M}Qarw8~uru_߽c(5-Ng8b0d`0[yxyt7j^!M/ KĺK]֍oa$I|;ЏjUXܩ ax3v ?%-3,Gc8~r^'M+^$ЭMH .Dlgk<[ KK=6kei2 9ew}O|)i?MH FrGY0l}q @'s>ƀ:( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ( ((:T:ޏu\KT;y9 ^w|8fA[:ȑ}_?>\76$O~Kvo֤>=h9exkل:t-l i2AEG*IR=Z4ye+&ƚ)ggm9L0-z)Ⱥts¯ 7OK /Eֵ?4suubLvȦ M$2Y¾ R=GZb0YHif #?l{u/ X tǚEɢx^T=O^QE!Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@R&7vN`;C>*ӰWqv0!#=TY#Ar 2qãc`PiIgqy`@GOǏsOm*)Ѭ:0ʲ4ꒂ*_L[NTrOҚM رYұfHe֥^wJhaaߎRnJERQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEBMiR6V9/o#DI;Hgeoz2LQU"yPd5 Gq,yQTՋ]:ȳ[Gp2ch&*-QE# ( ( ( ( ( FUt*X`2{nt뷶Ǜं>@ -m-:e2WV*=L*xӼU{\yons u,_0Gɏ_,h!*rt&ȽBAEDPIElQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/CondorGenerator.java0000644000175000017500000020537111757531137031424 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.catalog.transformation.TransformationCatalogEntry; import edu.isi.pegasus.planner.catalog.transformation.classes.TCType; import edu.isi.pegasus.planner.catalog.TransformationCatalog; import edu.isi.pegasus.planner.code.CodeGenerator; import edu.isi.pegasus.planner.code.CodeGeneratorException; import edu.isi.pegasus.planner.code.GridStart; import edu.isi.pegasus.planner.code.POSTScript; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.code.generator.Abstract; import edu.isi.pegasus.planner.code.CodeGeneratorFactory; import edu.isi.pegasus.planner.code.generator.Braindump; import edu.isi.pegasus.planner.code.generator.NetloggerJobMapper; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.CondorVersion; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.DagInfo; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.DAGJob; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.Boolean; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.code.generator.MonitordNotify; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.namespace.Globus; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.namespace.ENV; import edu.isi.pegasus.planner.partitioner.graph.Adapter; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; import org.griphyn.vdl.euryale.VTorInUseException; import java.io.BufferedWriter; import java.io.File; import java.io.Writer; import java.io.FileWriter; import java.io.PrintWriter; import java.io.IOException; import java.io.BufferedReader; import java.io.InputStreamReader; import java.io.InputStream; import java.util.ArrayList; import java.util.Enumeration; import java.util.Iterator; import java.util.Map; import java.util.StringTokenizer; import java.util.Vector; import java.util.Collection; import java.util.List; import java.util.Properties; import java.util.HashMap; /** * This class generates the condor submit files for the DAG which has to * be submitted to the Condor DagMan. * * @author Gaurang Mehta * @author Karan Vahi * @version $Revision: 4782 $ */ public class CondorGenerator extends Abstract { /** * The default category for the sub dax jobs. */ public static final String DEFAULT_SUBDAG_CATEGORY_KEY = "subwf"; /** * The nice separator, define once, use often. */ public static final String mSeparator = "######################################################################"; /** * The namespace to use for condor dagman. */ public static final String CONDOR_DAGMAN_NAMESPACE = "condor"; /** * The logical name with which to query the transformation catalog for the * condor_dagman executable, that ends up running the mini dag as one * job. */ public static final String CONDOR_DAGMAN_LOGICAL_NAME = "dagman"; /** * The prefix for DAGMan specific properties */ public static final String DAGMAN_PROPERTIES_PREFIX = "dagman."; /** * The default priority key associated with the stagein jobs. */ public static final int DEFAULT_STAGE_IN_PRIORITY_KEY = 700; /** * The default priority key associated with the inter site transfer jobs. */ public static final int DEFAULT_INTER_SITE_PRIORITY_KEY = 700; /** * The default priority key associated with the create dir jobs. */ public static final int DEFAULT_CREATE_DIR_PRIORITY_KEY = 800; /** * The default priority key associated with chmod jobs. */ public static final int DEFAULT_CHMOD_PRIORITY_KEY = 800; /** * The default priority key associated with the stage out jobs. */ public static final int DEFAULT_STAGE_OUT_PRIORITY_KEY = 900; /** * The default priority key associated with the replica registration jobs. */ public static final int DEFAULT_REPLICA_REG_PRIORITY_KEY = 900; /** * The default priority key associated with the cleanup jobs. */ public static final int DEFAULT_CLEANUP_PRIORITY_KEY = 1000; /** * Handle to the Transformation Catalog. */ protected TransformationCatalog mTCHandle; /** * Handle to the pool provider. */ //private PoolInfoProvider mPoolHandle; /** * The handle to the site catalog store. */ private SiteStore mSiteStore; /** * Specifies the implementing class for the pool interface. Contains * the name of the class that implements the pool interface the user has * asked at runtime. */ protected String mPoolClass; /** * The file handle to the .dag file. A part of the dag file is printed * as we write the submit files, to insert the appropriate postscripts * for handling exit codes. */ protected PrintWriter mDagWriter; /** * The name of the log file in the /tmp directory */ protected String mTempLogFile; /** * A boolean indicating whether the files have been generated or not. */ protected boolean mDone; /** * The workflow for which the code has to be generated. */ protected ADag mConcreteWorkflow; /** * Handle to the Style factory, that is used for this workflow. */ protected CondorStyleFactory mStyleFactory; /** * The handle to the GridStart Factory. */ protected GridStartFactory mGridStartFactory; /** * A boolean indicating whether grid start has been initialized or not. */ protected boolean mInitializeGridStart; /** * The long value of condor version. */ private long mCondorVersion; /** * Boolean indicating whether to assign job priorities or not. */ private boolean mAssignDefaultJobPriorities; /** * The default constructor. */ public CondorGenerator(){ super(); mInitializeGridStart = true; mStyleFactory = new CondorStyleFactory(); mGridStartFactory = new GridStartFactory(); } /** * Initializes the Code Generator implementation. Initializes the various * writers. * * @param bag the bag of initialization objects. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void initialize( PegasusBag bag ) throws CodeGeneratorException{ super.initialize( bag ); //create the base directory recovery File wdir = new File(mSubmitFileDir); wdir.mkdirs(); mTCHandle = bag.getHandleToTransformationCatalog(); mSiteStore = bag.getHandleToSiteStore(); mAssignDefaultJobPriorities = mProps.assignDefaultJobPriorities(); //instantiate and intialize the style factory mStyleFactory.initialize( bag ); //determine the condor version mCondorVersion = CondorVersion.getInstance( mLogger ).numericValue(); if( mCondorVersion == -1 ){ mLogger.log( "Unable to determine the version of condor " , LogManager.WARNING_MESSAGE_LEVEL ); } else{ mLogger.log( "Condor Version detected is " + mCondorVersion , LogManager.DEBUG_MESSAGE_LEVEL ); } } /** * Generates the code for the concrete workflow in Condor DAGMAN and CondorG * input format. Returns only the File object for the DAG file that is written * out. * * @param dag the concrete workflow. * * @return the Collection of File objects for the files written * out. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public Collection generateCode( ADag dag ) throws CodeGeneratorException{ DagInfo ndi = dag.dagInfo; Vector vSubInfo = dag.vJobSubInfos; if ( mInitializeGridStart ){ mConcreteWorkflow = dag; mGridStartFactory.initialize( mBag, dag ); mInitializeGridStart = false; } CodeGenerator storkGenerator = CodeGeneratorFactory.loadInstance( mBag, "Stork" ); String className = this.getClass().getName(); String dagFileName = getDAGFilename( dag, ".dag" ); mDone = false; File dagFile = null; Collection result = new ArrayList(1); if (ndi.dagJobs.isEmpty()) { //call the callout before returns concreteDagEmpty( dagFileName, dag ); return result ; } else { //initialize the file handle to the dag //file and print it's header dagFile = initializeDagFileWriter( dagFileName, ndi ); result.add( dagFile ); //write out any category based dagman knobs to the dagman file printDagString( this.getCategoryDAGManKnobs( mProps ) ); } if( mProps.symlinkCommonLog() ){ //figure out the logs directory for condor logs String dir = mProps.getSubmitLogsDirectory(); File directory = null; if( dir != null ){ directory = new File( dir ); //try to create this directory if it does not exist if( !directory.exists() && !directory.mkdirs() ){ //directory does not exist and cannot be created directory = null; } } mLogger.log( "Condor logs directory to be used is " + directory, LogManager.DEBUG_MESSAGE_LEVEL ); //Create a file in the submit logs directory for the log //and symlink it to the submit directory. try{ File f = File.createTempFile( dag.dagInfo.nameOfADag + "-" + dag.dagInfo.index,".log", directory ); mTempLogFile=f.getAbsolutePath(); } catch (IOException ioe) { mLogger.log( "Error while creating an empty log file in " + "the local temp directory " + ioe.getMessage(), LogManager.ERROR_MESSAGE_LEVEL); } } mLogger.logEventStart( LoggingKeys.EVENT_PEGASUS_CODE_GENERATION, LoggingKeys.DAX_ID, dag.getAbstractWorkflowName(), LogManager.DEBUG_MESSAGE_LEVEL); //convert the dax to a graph representation and walk it //in a top down manner Graph workflow = Adapter.convert( dag ); SUBDAXGenerator subdaxGen = new SUBDAXGenerator(); subdaxGen.initialize( mBag, dag, workflow, mDagWriter ); for( Iterator it = workflow.iterator(); it.hasNext(); ){ GraphNode node = ( GraphNode )it.next(); Job job = (Job)node.getContent(); //only apply priority if job is not associated with a priority //beforehand and assign priorities by default is true if( !job.condorVariables.containsKey( Condor.PRIORITY_KEY ) && this.mAssignDefaultJobPriorities ){ int priority = getJobPriority( job, node.getDepth() ); //apply a priority to the job overwriting any preexisting priority job.condorVariables.construct( Condor.PRIORITY_KEY, new Integer(priority).toString() ); //log to debug StringBuffer sb = new StringBuffer(); sb.append( "Applying priority of " ).append( priority ). append( " to " ).append( job.getID() ); mLogger.log( sb.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); } //write out the submit file for each job //in addition makes the corresponding //entries in the .dag file corresponding //to the job and it's postscript if ( job.getSiteHandle().equals( "stork" ) ) { //write the job information in the .dag file StringBuffer dagString = new StringBuffer(); dagString.append( "DATA " ).append( job.getName() ).append( " " ); dagString.append( job.getName() ).append( ".stork" ); printDagString( dagString.toString() ); storkGenerator.generateCode( dag, job ); } //for dag jobs we dont need to generate submit file else if( job instanceof DAGJob ){ //SUBDAG EXTERNAL B inner.dag DAGJob djob = ( DAGJob )job; //djob.dagmanVariables.checkKeyInNS( Dagman.SUBDAG_EXTERNAL_KEY, // djob.getDAGFile() ); StringBuffer sb = new StringBuffer(); sb.append( Dagman.SUBDAG_EXTERNAL_KEY ).append( " " ).append( job.getName() ). append( " " ).append( djob.getDAGFile() ); //check if dag needs to run in a specific directory String dagDir = djob.getDirectory(); if( dagDir != null){ sb.append( " " ).append( Dagman.DIRECTORY_EXTERNAL_KEY ). append( " " ).append( dagDir ); } //if no category is associated with the job, add a default //category if( !job.dagmanVariables.containsKey( Dagman.CATEGORY_KEY ) ){ job.dagmanVariables.construct( Dagman.CATEGORY_KEY, DEFAULT_SUBDAG_CATEGORY_KEY ); } printDagString( sb.toString() ); printDagString( job.dagmanVariables.toString( job.getName()) ); } else{ //normal jobs and subdax jobs if( job.typeRecursive() ){ Job daxJob = job; job = subdaxGen.generateCode( job ); //set the arguments to the DAX job to the ones //in the generated DAGJob to ensure stampede event //is generated correctly daxJob.setRemoteExecutable( job.getRemoteExecutable() ); daxJob.setArguments( job.getArguments() ); } if( job != null ){ //the submit file for the job needs to be written out //write out a condor submit file generateCode( dag, job ); } //write out all the dagman profile variables associated //with the job to the .dag file. printDagString( job.dagmanVariables.toString( job.getName()) ); } mLogger.log("Written Submit file : " + getFileBaseName(job), LogManager.DEBUG_MESSAGE_LEVEL); } mLogger.logEventCompletion( LogManager.DEBUG_MESSAGE_LEVEL ); //writing the tail of .dag file //that contains the relation pairs this.writeDagFileTail( ndi ); mLogger.log("Written Dag File : " + dagFileName.toString(), LogManager.DEBUG_MESSAGE_LEVEL); //symlink the log file to a file in the temp directory if possible if( mProps.symlinkCommonLog() ){ this.generateLogFileSymlink( this.getCondorLogInTmpDirectory(), this.getCondorLogInSubmitDirectory( dag ) ); } //write out the DOT file mLogger.log( "Writing out the DOT file ", LogManager.DEBUG_MESSAGE_LEVEL ); this.writeDOTFile( getDAGFilename( dag, ".dot"), dag ); /* //we no longer write out the job.map file //write out the netlogger file mLogger.log( "Written out job.map file", LogManager.DEBUG_MESSAGE_LEVEL ); this.writeJobMapFile( getDAGFilename( dag, ".job.map"), dag ); */ //write out the notifications input file this.writeOutNotifications( dag ); //the dax replica store this.writeOutDAXReplicaStore( dag ); //write out the nelogger file this.writeOutStampedeEvents( dag ); //write out the metrics file this.writeOutWorkflowMetrics(dag); //write out the braindump file this.writeOutBraindump( dag ); //we are donedirectory mDone = true; return result; } /** * Generates the code (condor submit file) for a single job. * * @param dag the dag of which the job is a part of. * @param job the Job object holding the information about * that particular job. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void generateCode( ADag dag, Job job ) throws CodeGeneratorException{ String dagname = dag.dagInfo.nameOfADag; String dagindex = dag.dagInfo.index; String dagcount = dag.dagInfo.count; String subfilename = this.getFileBaseName( job ); String envStr = null; //initialize GridStart if required. if ( mInitializeGridStart ){ mConcreteWorkflow = dag; mGridStartFactory.initialize( mBag, dag ); mInitializeGridStart = false; } //for recursive dax's trigger partition and plan and exit. //Commented out to be replaced with SUBDAG rendering. //Karan September 10th 2009 /* if ( job.typeRecursive() ){ String args = job.getArguments(); PartitionAndPlan pap = new PartitionAndPlan(); pap.initialize( mBag ); Collection files = pap.doPartitionAndPlan( args ); File dagFile = null; for( Iterator it = files.iterator(); it.hasNext(); ){ File f = (File) it.next(); if ( f.getName().endsWith( ".dag" ) ){ dagFile = f; break; } } mLogger.log( "The DAG for the recursive job created is " + dagFile, LogManager.DEBUG_MESSAGE_LEVEL ); //translate the current job into DAGMan submit file Job dagCondorJob = this.constructDAGJob( job.getName(), dagFile.getParent(), dagFile.getName() ); //write out the dagCondorJob for it mLogger.log( "Generating submit file for DAG " , LogManager.DEBUG_MESSAGE_LEVEL ); this.generateCode( dag, dagCondorJob ); //setting the dagman variables of dagCondorJob to original job //so that the right information is printed in the .dag file job.dagmanVariables = dagCondorJob.dagmanVariables; return; } */ // intialize the print stream to the file PrintWriter writer = null; try{ writer = getWriter(job); }catch(IOException ioe ){ throw new CodeGeneratorException( "IOException while writing submit file for job " + job.getName(), ioe); } //handle the globus rsl parameters //for the job from various resources handleGlobusRSLForJob( job ); writer.println(this.mSeparator); writer.println("# PEGASUS WMS GENERATED SUBMIT FILE"); writer.println("# DAG : " + dagname + ", Index = " + dagindex + ", Count = " + dagcount); writer.println("# SUBMIT FILE NAME : " + subfilename); writer.println(this.mSeparator); //figure out the style to apply for a job applyStyle( job, writer ); // handling of log file is now done through condor profile //bwSubmit.println("log = " + dagname + "_" + dagindex + ".log"); // handle environment settings handleEnvVarForJob( job ); envStr = job.envVariables.toString(); if (envStr != null) { writer.print( job.envVariables ); } // handle Condor variables handleCondorVarForJob( job ); writer.print( job.condorVariables ); //write the classad's that have the information regarding //which Pegasus super node is a node part of, in addition to the //release version of Chimera/Pegasus, the jobClass and the //workflow id ClassADSGenerator.generate( writer, dag, job ); // DONE writer.println("queue"); writer.println(this.mSeparator); writer.println("# END OF SUBMIT FILE"); writer.println(this.mSeparator); // close the print stream to the file (flush) writer.close(); return; } /** * Starts monitoring of the workflow by invoking a workflow monitor daemon * tailstatd. The tailstatd is picked up from the default path of * $PEGASUS_HOME/bin/tailstatd. * * @return boolean indicating whether could successfully start the monitor * daemon or not. * * @throws VTorInUseException in case the method is called before the * submit files have been generated. */ public boolean startMonitoring() throws VTorInUseException{ //do nothing. //earlier the braindump file was generated when this function //was called. return true; } /** * Resets the Code Generator implementation. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void reset( )throws CodeGeneratorException{ super.reset(); mDone = false; mInitializeGridStart = true; } /** * Constructs a map with the numbers/values to be passed in the RSL handle * for certain pools. The user ends up specifying these through the * properties file. The value of the property is of the form * poolname1=value,poolname2=value.... * * @param propValue the value of the property got from the properties file. * * @return Map */ private Map constructMap(String propValue) { Map map = new java.util.TreeMap(); if (propValue != null) { StringTokenizer st = new StringTokenizer(propValue, ","); while (st.hasMoreTokens()) { String raw = st.nextToken(); int pos = raw.indexOf('='); if (pos > 0) { map.put(raw.substring(0, pos).trim(), raw.substring(pos + 1).trim()); } } } return map; } /** * Constructs a job that plans and submits the partitioned workflow, * referred to by a Partition. The main job itself is a condor dagman job * that submits the concrete workflow. The concrete workflow is generated by * running the planner in the prescript for the job. * * @param name the name to be assigned to the job. * @param directory the submit directory where the submit files for the * partition should reside. this is where the dag file is * created * @param dagBasename the basename of the dag file created. * * @return the constructed DAG job. */ protected Job constructDAGJob( String name, String directory, String dagBasename){ //for time being use the old functions. Job job = new Job(); //set the logical transformation job.setTransformation( CONDOR_DAGMAN_NAMESPACE, CONDOR_DAGMAN_LOGICAL_NAME, null); //set the logical derivation attributes of the job. job.setDerivation( CONDOR_DAGMAN_NAMESPACE, CONDOR_DAGMAN_LOGICAL_NAME, null ); //always runs on the submit host job.setSiteHandle( "local" ); //set the partition id only as the unique id //for the time being. // job.setName(partition.getID()); //set the logical id for the job same as the partition id. job.setName( name ); List entries; TransformationCatalogEntry entry = null; //get the path to condor dagman try{ //try to construct the path from the environment entry = constructTCEntryFromEnvironment( ); //try to construct from the TC if( entry == null ){ entries = mTCHandle.lookup(job.namespace, job.logicalName, job.version, job.getSiteHandle(), TCType.INSTALLED); entry = (entries == null) ? defaultTCEntry( "local" ) ://construct from site catalog //Gaurang assures that if no record is found then //TC Mechanism returns null (TransformationCatalogEntry) entries.get(0); } } catch(Exception e){ throw new RuntimeException( "ERROR: While accessing the Transformation Catalog",e); } if(entry == null){ //throw appropriate error throw new RuntimeException("ERROR: Entry not found in tc for job " + job.getCompleteTCName() + " on site " + job.getSiteHandle()); } //set the path to the executable and environment string job.setRemoteExecutable( entry.getPhysicalTransformation() ); //the job itself is the main job of the super node //construct the classad specific information job.jobID = job.getName(); job.jobClass = Job.COMPUTE_JOB; //directory where all the dagman related files for the nested dagman //reside. Same as the directory passed as an input parameter String dir = directory; //make the initial dir point to the submit file dir for the partition //we can do this as we are running this job both on local host, and scheduler //universe. Hence, no issues of shared filesystem or anything. job.condorVariables.construct( "initialdir", dir ); //construct the argument string, with all the dagman files //being generated in the partition directory. Using basenames as //initialdir has been specified for the job. StringBuffer sb = new StringBuffer(); sb.append(" -f -l . -Debug 3"). append(" -Lockfile ").append( getBasename( dagBasename, ".lock") ). append(" -Dag ").append( dagBasename ); //append(" -Rescue ").append( getBasename( dagBasename, ".rescue")). //specify condor log for condor version less than 7.1.2 if( mCondorVersion < CondorVersion.v_7_1_2 ){ sb.append(" -Condorlog ").append( getBasename( dagBasename, ".log")); } //allow for version mismatch as after 7.1.3 condor does tight //checking on dag.condor.sub file and the condor version used if( mCondorVersion >= CondorVersion.v_7_1_3 ){ sb.append( " -AllowVersionMismatch " ); } //for condor 7.1.0 sb.append( " -AutoRescue 1 -DoRescueFrom 0 "); //pass any dagman knobs that were specified in properties file // sb.append( this.mDAGManKnobs ); //put in the environment variables that are required job.envVariables.construct("_CONDOR_DAGMAN_LOG", directory + File.separator + dagBasename + ".dagman.out" ); job.envVariables.construct("_CONDOR_MAX_DAGMAN_LOG","0"); //set the arguments for the job job.setArguments(sb.toString()); //the environment need to be propogated for exitcode to be picked up job.condorVariables.construct("getenv","TRUE"); job.condorVariables.construct("remove_kill_sig","SIGUSR1"); //the log file for condor dagman for the dagman also needs to be created //it is different from the log file that is shared by jobs of //the partition. That is referred to by Condorlog // keep the log file common for all jobs and dagman albeit without // dag.dagman.log suffix // job.condorVariables.construct("log", getAbsolutePath( partition, dir,".dag.dagman.log")); // String dagName = mMegaDAG.dagInfo.nameOfADag; // String dagIndex= mMegaDAG.dagInfo.index; // job.condorVariables.construct("log", dir + mSeparator + // dagName + "_" + dagIndex + ".log"); //the job needs to be explicitly launched in //scheduler universe instead of local universe job.condorVariables.construct( Condor.UNIVERSE_KEY, Condor.SCHEDULER_UNIVERSE ); //add any notifications specified in the transformation //catalog for the job. JIRA PM-391 job.addNotifications( entry ); //incorporate profiles from the transformation catalog //and properties for the time being. Not from the site catalog. //the profile information from the transformation //catalog needs to be assimilated into the job //overriding the one from pool catalog. job.updateProfiles( entry ); //the profile information from the properties file //is assimilated overidding the one from transformation //catalog. job.updateProfiles(mProps); //we do not want the job to be launched via kickstart //Fix for Pegasus bug number 143 //http://bugzilla.globus.org/vds/show_bug.cgi?id=143 job.vdsNS.construct( Pegasus.GRIDSTART_KEY, GridStartFactory.GRIDSTART_SHORT_NAMES[GridStartFactory.NO_GRIDSTART_INDEX] ); return job; } /** * Returns a default TC entry to be used in case entry is not found in the * transformation catalog. * * @param site the site for which the default entry is required. * * * @return the default entry. */ private TransformationCatalogEntry defaultTCEntry( String site ){ //not implemented as we dont have handle to site catalog in this class return null; } /** * Returns a tranformation catalog entry object constructed from the environment * * An entry is constructed if either of the following environment variables * are defined * 1) CONDOR_HOME * 2) CONDOR_LOCATION * * CONDOR_HOME takes precedence over CONDOR_LOCATION * * * @return the constructed entry else null. */ private TransformationCatalogEntry constructTCEntryFromEnvironment( ){ //construct environment profiles Map m = System.getenv(); ENV env = new ENV(); String key = "CONDOR_HOME"; if( m.containsKey( key ) ){ env.construct( key, m.get( key ) ); } key = "CONDOR_LOCATION"; if( m.containsKey( key ) ){ env.construct( key, m.get( key ) ); } return constructTCEntryFromEnvProfiles( env ); } /** * Returns a tranformation catalog entry object constructed from the environment * * An entry is constructed if either of the following environment variables * are defined * 1) CONDOR_HOME * 2) CONDOR_LOCATION * * CONDOR_HOME takes precedence over CONDOR_LOCATION * * @param env the environment profiles. * * * @return the entry constructed else null if environment variables not defined. */ private TransformationCatalogEntry constructTCEntryFromEnvProfiles( ENV env ) { TransformationCatalogEntry entry = null; //check if either CONDOR_HOME or CONDOR_LOCATION is defined String key = null; if( env.containsKey( "CONDOR_HOME") ){ key = "CONDOR_HOME"; } else if( env.containsKey( "CONDOR_LOCATION") ){ key = "CONDOR_LOCATION"; } if( key == null ){ //environment variables are not defined. return entry; } mLogger.log( "Constructing path to dagman on basis of env variable " + key, LogManager.DEBUG_MESSAGE_LEVEL ); entry = new TransformationCatalogEntry(); entry.setLogicalTransformation( CONDOR_DAGMAN_NAMESPACE, CONDOR_DAGMAN_LOGICAL_NAME, null ); entry.setType( TCType.INSTALLED ); entry.setResourceId( "local" ); //construct path to condor dagman StringBuffer path = new StringBuffer(); path.append( env.get( key ) ).append( File.separator ). append( "bin" ).append( File.separator). append( "condor_dagman" ); entry.setPhysicalTransformation( path.toString() ); return entry; } /** * A covenience method to construct the basename. * * @param prefix the first half of basename * @param suffix the latter half of basename * * @return basename */ protected String getBasename( String prefix, String suffix ){ StringBuffer sb = new StringBuffer(); sb.append( prefix ).append( suffix ); return sb.toString(); } /** * Initializes the file handler to the dag file and writes the header to it. * * @param filename basename of dag file to be written. * @param dinfo object containing daginfo of type DagInfo . * * @return the File object for the DAG file. * * @throws CodeGeneratorException in case of any error occuring code generation. */ protected File initializeDagFileWriter(String filename, DagInfo dinfo) throws CodeGeneratorException{ // initialize file handler filename = mSubmitFileDir + "/" + filename; File dag = new File(filename); try { //initialize the print stream to the file mDagWriter = new PrintWriter(new BufferedWriter(new FileWriter(dag))); printDagString(this.mSeparator); printDagString("# PEGASUS WMS GENERATED DAG FILE"); printDagString("# DAG " + dinfo.nameOfADag); printDagString("# Index = " + dinfo.index + ", Count = " + dinfo.count); printDagString(this.mSeparator); } catch (Exception e) { throw new CodeGeneratorException( "While writing to DAG FILE " + filename, e); } return dag; } /** * Write out the DAGMan knobs for each category the user mentions in * the properties. * * @param properties the pegasus properties * * @return the String */ protected String getCategoryDAGManKnobs( PegasusProperties properties ){ //get all dagman properties Properties dagman = properties.matchingSubset( DAGMAN_PROPERTIES_PREFIX, false ); StringBuffer result = new StringBuffer(); String newLine = System.getProperty( "line.separator", "\r\n" ); //iterate through all the properties for( Iterator it = dagman.keySet().iterator(); it.hasNext(); ){ String name = ( String ) it.next();//like bigjob.maxjobs //System.out.println( name ); //figure out whether it is a category property or not //really a short cut way of doing it //if( (dotIndex = name.indexOf( "." )) != -1 && dotIndex != name.length() - 1 ){ if( Dagman.categoryRelatedKey( name.toUpperCase() ) ){ //we have a category and a key int dotIndex = name.indexOf( "." ); String category = name.substring( 0, dotIndex );//like bigjob String knob = name.substring( dotIndex + 1 );//like maxjobs String value = dagman.getProperty( name );//the value of the property in the properties //System.out.println( category + " " + knob + " " + value); result.append( knob.toUpperCase( ) ).append( " " ).append( category ). append( " " ).append( value ).append( newLine ); } } return result.toString(); } /** * Writes out the DOT file in the submit directory. * * @param filename basename of dot file to be written . * @param dag the ADag object. * * @throws CodeGeneratorException in case of any error occuring code generation. */ protected void writeDOTFile( String filename, ADag dag ) throws CodeGeneratorException{ // initialize file handler filename = mSubmitFileDir + File.separator + filename; try { Writer stream = new PrintWriter( new BufferedWriter ( new FileWriter( filename ) ) ); dag.toDOT( stream, null ); stream.close(); } catch (Exception e) { throw new CodeGeneratorException( "While writing to DOT FILE " + filename, e); } } /** * Writes out the job map file in the submit directory. * * @param filename basename of dot file to be written . * @param dag the ADag object. * * @throws CodeGeneratorException in case of any error occuring code generation. */ protected void writeJobMapFile( String filename, ADag dag ) throws CodeGeneratorException{ // initialize file handler filename = mSubmitFileDir + File.separator + filename; try { Writer stream = new PrintWriter( new BufferedWriter ( new FileWriter( filename ) ) ); NetloggerJobMapper njm = new NetloggerJobMapper( mLogger ); njm.writeOutMappings( stream, dag ); stream.close(); } catch (Exception e) { throw new CodeGeneratorException( "While writing to DOT FILE " + filename, e); } } /** * It writes the relations making up the DAG in the dag file and and closes * the file handle to it. * * @param dinfo object containing daginfo of type DagInfo. * * @throws CodeGeneratorException */ protected void writeDagFileTail(DagInfo dinfo) throws CodeGeneratorException{ try { // read the contents from the Daginfo object and //print out the parent child relations. for (Enumeration dagrelationsenum = dinfo.relations.elements(); dagrelationsenum.hasMoreElements(); ) { PCRelation pcrl = (PCRelation) dagrelationsenum.nextElement(); printDagString("PARENT " + pcrl.parent + " CHILD " + pcrl.child); } printDagString(this.mSeparator); printDagString("# End of DAG"); printDagString(this.mSeparator); // close the print stream to the file mDagWriter.close(); } catch (Exception e) { throw new CodeGeneratorException( "Error Writing to Dag file " + e.getMessage(), e ); } } /** * Writes a string to the dag file. When calling this function the * file handle to file is already initialized. * * @param str The String to be printed to the dag file. * * @throws CodeGeneratorException */ protected void printDagString(String str) throws CodeGeneratorException{ try { mDagWriter.println(str); } catch (Exception e) { throw new CodeGeneratorException( "Writing to Dag file " + e.getMessage(), e ); } } /** * Returns the name of Condor log file in a tmp directory that is created * if generation of symlink for condor logs is turned on. * * @return the name of the log file. */ protected String getCondorLogInTmpDirectory(){ return this.mTempLogFile; } /** * Returns the path to the condor log file in the submit directory. * It can be a symlink. * * @param dag the concrete workflow. * * @return the path to condor log file in the submit directory. */ protected String getCondorLogInSubmitDirectory( ){ return this.getCondorLogInSubmitDirectory( this.mConcreteWorkflow ); } /** * Returns the path to the condor log file in the submit directory. * It can be a symlink. * * @param dag the concrete workflow. * * @return the path to condor log file in the submit directory. */ protected String getCondorLogInSubmitDirectory( ADag dag ){ StringBuffer sb = new StringBuffer(); sb.append(this.mSubmitFileDir) .append(File.separator); String bprefix = mPOptions.getBasenamePrefix(); if( bprefix != null){ //the prefix is not null using it sb.append(bprefix); } else{ //generate the prefix from the name of the dag sb.append(dag.dagInfo.nameOfADag).append("-"). append(dag.dagInfo.index); } //append the suffix sb.append(".log"); return sb.toString(); } /** * Returns a Map containing additional braindump entries that are specific * to a Code Generator. * * @param workflow the executable workflow * * @return Map containing entries for dag and condor_log */ public Map getAdditionalBraindumpEntries( ADag workflow ) { Map entries = new HashMap(); entries.put( Braindump.GENERATOR_TYPE_KEY, "dag" ); entries.put( "dag", this.getDAGFilename( workflow, ".dag") ); entries.put( "condor_log", new File(this.getCondorLogInSubmitDirectory( workflow )).getName() ); entries.put( "notify", this.getDAGFilename( workflow, MonitordNotify.NOTIFICATIONS_FILE_SUFFIX ) ); return entries; } /** * This method generates a symlink to the actual log file written in the * local temp directory. The symlink is placed in the dag directory. * * @param logFile the full path to the log file. * @param symlink the full path to the symlink. * * @return boolean indicating if creation of symlink was successful or not */ protected boolean generateLogFileSymlink(String logFile, String symlink) { try{ Runtime rt = Runtime.getRuntime(); String command = "ln -s " +logFile + " " + symlink; mLogger.log("Creating symlink to the log file in the local temp directory\n" + command ,LogManager.DEBUG_MESSAGE_LEVEL); Process p = rt.exec(command,null); // set up to read subprogram output InputStream is = p.getInputStream(); InputStreamReader isr = new InputStreamReader(is); BufferedReader br = new BufferedReader(isr); // set up to read subprogram error InputStream er = p.getErrorStream(); InputStreamReader err = new InputStreamReader(er); BufferedReader ebr = new BufferedReader(err); // read output from subprogram // and display it String s,se=null; while ( ((s = br.readLine()) != null) || ((se = ebr.readLine()) != null ) ) { if(s!=null){ mLogger.log(s,LogManager.DEBUG_MESSAGE_LEVEL); } else { mLogger.log(se,LogManager.ERROR_MESSAGE_LEVEL ); } } br.close(); return true; } catch(Exception ex){ mLogger.log("Unable to create symlink to the log file" , ex, LogManager.ERROR_MESSAGE_LEVEL); return false; } } /** * Returns the basename of the file, that contains the output of the * dagman while running the dag generated for the workflow. * The basename of the .out file is dependant on whether the * basename prefix has been specified at runtime or not by command line * options. * * @param dag the DAG containing the concrete workflow * * @return the name of the dagfile. */ protected String getDAGMANOutFilename( ADag dag ){ //constructing the name of the dagfile StringBuffer sb = new StringBuffer(); String bprefix = mPOptions.getBasenamePrefix(); if( bprefix != null){ //the prefix is not null using it sb.append(bprefix); } else{ //generate the prefix from the name of the dag sb.append(dag.dagInfo.nameOfADag).append("-"). append(dag.dagInfo.index); } //append the suffix sb.append(".dag.dagman.out"); return sb.toString(); } /** * A callout method that dictates what needs to be done in case the concrete * plan that is generated is empty. * It just logs a message saying the plan is empty. * * @param filename Filename of the dag to be written of type String. * @param dag the concrete dag that is empty. * * @throws CodeGeneratorException in case of any error occuring code generation. */ protected void concreteDagEmpty(String filename, ADag dag) throws CodeGeneratorException{ StringBuffer sb = new StringBuffer(); sb.append( "The concrete plan generated contains no nodes. "). append( "\nIt seems that the output files are already at the output pool" ); mLogger.log( sb.toString(), LogManager.INFO_MESSAGE_LEVEL ); } /** * It updates/adds the condor variables that are got through the Dax with * the values specified in the properties file, pool config file or adds some * variables internally. In case of clashes of Condor variables from * various sources the following order is followed,property file, pool config * file and then dax. * * @param job The Job object containing the information about the job. * * * @throws CodeGeneratorException */ protected void handleCondorVarForJob(Job job) throws CodeGeneratorException{ Condor cvar = job.condorVariables; String key = null; String value = null; //put in the classad expression for the values //construct the periodic_release and periodic_remove //values only if their final computed values are > 0 this.populatePeriodicReleaseAndRemoveInJob( job ); // have to change this later maybe key = "notification"; value = (String) cvar.removeKey(key); if (value == null) { cvar.construct(key, "NEVER"); } else { cvar.construct(key, value); //check if transfer_executable was set to //true by the user at runtime } key = "transfer_executable"; if (cvar.containsKey(key)) { //we do not put in the default value } else { // we assume pre-staged executables through the GVDS cvar.construct(key, "false"); } key = "copy_to_spool"; if (cvar.containsKey(key)) { //we do not put in the default value } else // no sense copying files to spool for globus jobs // and is mandatory for the archstart stuff to work // for local jobs cvar.construct(key, "false"); //construct the log file for the submit job key = "log"; if(!cvar.containsKey(key)){ //we put in the default value //cvar.construct("log",dagname + "_" + dagindex + ".log"); cvar.construct("log",this.getCondorLogInSubmitDirectory( ) ); } //also add the information as for the submit event trigger //for mei retry mechanism cvar.construct("submit_event_user_notes","pool:" + job.executionPool); //correctly quote the arguments according to //Condor Quoting Rules. // String args = (String) job.condorVariables.get("arguments"); String args = job.getArguments(); //put the arguments as appropriate condor profile if( args != null && args.length() > 0){ if( mProps.useCondorQuotingForArguments() && args != null){ try { mLogger.log("Unquoted arguments are " + args, LogManager.DEBUG_MESSAGE_LEVEL); //insert a comment for the old args //job.condorVariables.construct("#arguments",args); args = CondorQuoteParser.quote(args, true); job.condorVariables.construct( Condor.ARGUMENTS_KEY, args); mLogger.log("Quoted arguments are " + args, LogManager.DEBUG_MESSAGE_LEVEL); } catch (CondorQuoteParserException e) { throw new RuntimeException("CondorQuoting Problem " + e.getMessage()); } } else{ //add without quoting job.condorVariables.construct( Condor.ARGUMENTS_KEY, args); } } //set the remote executable as condor executable job.condorVariables.construct( Condor.EXECUTABLE_KEY, job.getRemoteExecutable() ); return; } /** * Populates the periodic release and remove values in the job. * If an integer value is specified it is used to construct the default * expression, else the value specified in the profiles is used as is. * * The default expression for periodic_release and periodic_remove is *
     *   periodic_release = (NumSystemHolds <= releasevalue)
     *   periodic_remove = (NumSystemHolds > removevalue)
     * 
* where releasevalue is value of condor profile periodic_release * and removevalue is value of condor profile periodic_remove * * @param job the job object. */ public void populatePeriodicReleaseAndRemoveInJob( Job job ){ //Karan Oct 19, 2005. The values in property file //should only be treated as default. Need to reverse //below. //get the periodic release values always a default //value is got if not specified. String releaseval = (String) job.condorVariables.get( Condor.PERIODIC_RELEASE_KEY ); releaseval = (releaseval == null) ? //put in default value "3" : //keep the one from profiles or dax releaseval; String removeval = (String) job.condorVariables.get( Condor.PERIODIC_REMOVE_KEY ); removeval = (removeval == null) ? //put in default value "3" : //keep the one from profiles or dax removeval; int removeint = this.getNaturalNumberValue( removeval ); int releaseint = this.getNaturalNumberValue( releaseval ); if( removeint > 0 && releaseint > 0 ){ if( removeint > releaseint ){ removeval = releaseval; //throw a warning down mLogger.log( " periodic_remove > periodic_release " + "for job " + ". Setting periodic_remove=periodic_release", LogManager.WARNING_MESSAGE_LEVEL); } } String value = null; if( releaseint > 0){ value = "(NumSystemHolds <= " + releaseint + ")"; job.condorVariables.construct( Condor.PERIODIC_RELEASE_KEY, value); } else{ job.condorVariables.construct( Condor.PERIODIC_RELEASE_KEY, releaseval ); } if( removeint > 0){ value = "(NumSystemHolds > " + removeint + ")"; job.condorVariables.construct( Condor.PERIODIC_REMOVE_KEY, value ); } else{ job.condorVariables.construct( Condor.PERIODIC_REMOVE_KEY, removeval ); } } /** * Returns a natural number value ( > 0 ) if the parameter passed is an integer * and greater than zero, else -1 * * @param value the String passed * * @return */ protected int getNaturalNumberValue( String value ){ int result = -1; try{ result = Integer.parseInt(value); } catch( Exception e ){ } return result; } /** * It changes the paths to the executable depending on whether we want to * transfer the executable or not. If the transfer_executable is set to true, * then the executable needs to be shipped from the submit host meaning the * local pool. This function changes the path of the executable to the one on * the local pool, so that it can be shipped. * * @param job the Job containing the job description. * * @throws CodeGeneratorException */ /* protected void handleTransferOfExecutable(Job sinfo) throws CodeGeneratorException{ Condor cvar = sinfo.condorVariables; if (!cvar.getBooleanValue("transfer_executable")) { //the executable paths are correct and //point to the executable on the remote pool return; } SiteCatalogEntry site = mSiteStore.lookup( sinfo.getSiteHandle() ); String gridStartPath = site.getKickstartPath(); if (gridStartPath == null) { //not using grid start //we need to stage in the executable from //the local pool. Not yet implemented mLogger.log("At present only the transfer of gridstart is supported", LogManager.ERROR_MESSAGE_LEVEL); return; } else { site = mSiteStore.lookup( "local" ); gridStartPath = site.getKickstartPath(); if (gridStartPath == null) { mLogger.log( "Gridstart needs to be shipped from the submit host to pool" + sinfo.executionPool + ".\nNo entry for it in pool local", LogManager.ERROR_MESSAGE_LEVEL); throw new CodeGeneratorException( "GridStart needs to be shipped from submit host to site " + sinfo.getSiteHandle() ); } else { //the jobs path to executable is updated //by the path on the submit host cvar.construct("executable", gridStartPath); //the arguments to gridstart need to be //appended with the true remote directory String args = (String) cvar.removeKey("arguments"); args = " -w " + mSiteStore.getInternalWorkDirectory( sinfo ) + " " + args; cvar.construct("arguments", args); //we have to remove the remote_initial dir for it. //as this is required for the LCG sites //Actually this should be done thru a LCG flag cvar.removeKey("remote_initialdir"); } } } */ /** * Applies a submit file style to the job, according to the fact whether * the job has to be submitted directly to condor or to a remote jobmanager * via CondorG and GRAM. * If no style is associated with the job, then for the job running on * local site, condor style is applied. For a job running on non local sites, * globus style is applied if none is associated with the job. * * @param job the job on which the style needs to be applied. * @param writer the PrintWriter stream to the submit file for the job. * * @throws CodeGeneratorException in case of any error occuring code generation. */ protected void applyStyle( Job job, PrintWriter writer ) throws CodeGeneratorException{ //load the appropriate style for the job CondorStyle cs = mStyleFactory.loadInstance( job ); String style = (String)job.vdsNS.get( Pegasus.STYLE_KEY ); boolean isGlobus = style.equals( Pegasus.GLOBUS_STYLE ) ? true : false; //handle GLOBUS RSL if required, and stdio appropriately String rslString = job.globusRSL.toString(); rslString += gridstart( writer, job, isGlobus ); if( isGlobus ){ //only for CondorG style does RSL make sense //instead of writing directly //incorporate as condor profile //job.condorVariables.construct( "globusrsl", rslString ); job.condorVariables.construct( "globusrsl", job.globusRSL.toString() ); } //apply the appropriate style on the job. if( job instanceof AggregatedJob ){ cs.apply( (AggregatedJob)job ); } else{ cs.apply( job ); } } /** * It updates/adds the environment variables that are got through the Dax with * the values specified in the properties file, pool config file or adds some * variables internally. In case of clashes of environment variables from * various sources the following order is followed,property file, * transformation catalog, pool config file and then dax. * At present values are not picked from the properties file. * * @param job The Job object containing the information about the job. */ protected void handleEnvVarForJob(Job sinfo) { } /** * It updates/adds the the Globus RSL parameters got through the dax that are * in Job object. In addition inserts the additional rsl attributes * that can be specified in the properties file or the pool config files in * the profiles tags. In case of clashes of RSL attributes from various * sources the following order is followed,property file, pool config file * and then dax. * * @param job The Job object containing the information about the job. */ protected void handleGlobusRSLForJob(Job sinfo) { Globus rsl = sinfo.globusRSL; String key = null; String value = null; //Getting all the rsl parameters specified //in dax /* if (job.globusRSL != null) { rsl.putAll(job.globusRSL); // 19-05 jsv: Need to change to {remote_}initialdir commands // allow TR to spec its own directory } */ // check job type, unless already specified // Note, we may need to adjust this again later if (!rsl.containsKey("jobtype")) { rsl.construct("jobtype", "single"); } //sanitize jobtype on basis of jobmanager //Karan Sept 12,2005 //This is to overcome specifically Duncan's problem //while running condor universe standard jobs. //For that the jobtype=condor needs to be set for the compute //job. This is set in the site catalog, but ends up //breaking transfer jobs that are run on jobmanager-fork String jmURL = sinfo.globusScheduler; if(jmURL != null && jmURL.endsWith("fork")){ rsl.construct("jobtype","single"); } } /** * Computes the priority for a job based on job type and depth in the workflow * * @param job the job whose priority needs to be computed * @param depth the depth in the workflow * * @return */ protected int getJobPriority(Job job, int depth) { int priority = 0; int type = job.getJobType(); switch ( type ){ case Job.CREATE_DIR_JOB: priority = CondorGenerator.DEFAULT_CREATE_DIR_PRIORITY_KEY; break; case Job.CHMOD_JOB: priority = CondorGenerator.DEFAULT_CHMOD_PRIORITY_KEY; break; case Job.CLEANUP_JOB: priority = CondorGenerator.DEFAULT_CLEANUP_PRIORITY_KEY; break; case Job.STAGE_IN_JOB: priority = CondorGenerator.DEFAULT_STAGE_IN_PRIORITY_KEY; break; case Job.INTER_POOL_JOB: priority = CondorGenerator.DEFAULT_INTER_SITE_PRIORITY_KEY; break; case Job.STAGE_OUT_JOB: priority = CondorGenerator.DEFAULT_STAGE_OUT_PRIORITY_KEY; break; case Job.REPLICA_REG_JOB: priority = CondorGenerator.DEFAULT_REPLICA_REG_PRIORITY_KEY; default: //compute on the basis of the depth priority = depth * 10; break; } return priority; } /** * This function creates the stdio handling with and without gridstart. * Please note that gridstart will become the default by end 2003, and * no gridstart support will be phased out. * * @param writer is an open stream for the Condor submit file. * @param job is the job information structure. * @param isGlobusJob is true, if the job generated a * line universe = globus, and thus runs remotely. * Set to false, if the job runs on the submit * host in any way. * * @return A possibly empty string which contains things that * need to be added to the "globusrsl" clause. The return * value is only of interest for isGlobusJob==true calls. * * @throws CodeGeneratorException in case of any error occuring code generation. */ private String gridstart(PrintWriter writer, Job job, boolean isGlobusJob) throws CodeGeneratorException { //To get the gridstart/kickstart path on the remote //pool, querying with entry for vanilla universe. //In the new format the gridstart is associated with the //pool not pool, condor universe // SiteInfo site = mPoolHandle.getPoolEntry(job.executionPool, // Condor.VANILLA_UNIVERSE); SiteCatalogEntry site = mSiteStore.lookup( job.getSiteHandle() ); //JIRA PM-491 . Path to kickstart should not be passed //to the factory. // String gridStartPath = site.getKickstartPath(); StringBuffer rslString = new StringBuffer(); String jobName = job.jobName; String script = null; job.dagmanVariables.checkKeyInNS(Dagman.JOB_KEY, getFileBaseName(job)); //remove the prescript arguments key //should be already be set to the prescript key // //NO NEED TO REMOVE AS WE ARE HANDLING CORRECTLY IN DAGMAN NAMESPACE // //NOW. THERE THE ARGUMENTS AND KEY ARE COMBINED. Karan May 11,2006 // //job.dagmanVariables.removeKey(Dagman.PRE_SCRIPT_ARGUMENTS_KEY); // script = (String)job.dagmanVariables.removeKey(Dagman.PRE_SCRIPT_KEY); // if(script != null){ // //put in the new key with the prescript // job.dagmanVariables.checkKeyInNS(PRE_SCRIPT_KEY,script); // } //condor streaming is now for both grid and non grid universe jobs // we always put in the streaming keys. they default to false boolean stream = Boolean.parse( (String)job.condorVariables.removeKey( Condor.STREAM_STDERR_KEY ), false ); if ( stream ) { //we want it to be staged writer.println("stream_error = true"); } else { writer.println("stream_error = false"); } stream = Boolean.parse( (String)job.condorVariables.removeKey( Condor.STREAM_STDOUT_KEY ), false ); if ( stream ) { //we want it to be staged writer.println("stream_output = true" ); } else{ //we want it to be staged writer.println("stream_output = false" ); } GridStart gridStart = mGridStartFactory.loadGridStart( job, null ); //enable the job boolean enable = false; if( job instanceof AggregatedJob ){ enable = gridStart.enable( (AggregatedJob) job, isGlobusJob ); } else{ enable = gridStart.enable( job,isGlobusJob ); } if( !enable ){ String msg = "Job " + jobName + " cannot be enabled by " + gridStart.shortDescribe() + " to run at " + job.getSiteHandle(); mLogger.log( msg, LogManager.FATAL_MESSAGE_LEVEL ); throw new CodeGeneratorException( msg ); } //apply the appropriate POSTScript POSTScript ps = mGridStartFactory.loadPOSTScript( job, gridStart ); boolean constructed = ps.construct( job, Dagman.POST_SCRIPT_KEY ); //write out all the dagman profile variables associated //with the job to the .dag file. // printDagString(job.dagmanVariables.toString(jobName)); return rslString.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/CondorStyleException.java0000644000175000017500000000441111757531137032445 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor; import edu.isi.pegasus.planner.code.CodeGeneratorException; /** * A specific exception for the Condor Style generators. * * @author Karan Vahi * @version $Revision: 2090 $ */ public class CondorStyleException extends CodeGeneratorException { /** * Constructs a CondorStyleException with no detail * message. */ public CondorStyleException() { super(); } /** * Constructs a CondorStyleException with the specified detailed * message. * * @param message is the detailled message. */ public CondorStyleException(String message) { super(message); } /** * Constructs a CondorStyleException with the specified detailed * message and a cause. * * @param message is the detailled message. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CondorStyleException(String message, Throwable cause) { super(message, cause); } /** * Constructs a CondorStyleException with the * specified just a cause. * * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CondorStyleException(Throwable cause) { super(cause); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/ClassADSGenerator.java0000644000175000017500000002275511757531137031600 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.DagInfo; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.namespace.Pegasus; import java.io.PrintWriter; /** * A helper class, that generates Pegasus specific classads for the jobs. * * @author Karan Vahi * @version $Revision: 4850 $ */ public class ClassADSGenerator { /** * The name of the generator. */ public static final String GENERATOR = "Pegasus"; /** * The complete classad designating Pegasus as the generator. */ public static final String GENERATOR_AD_KEY = "pegasus_generator"; /** * The class ad key for the version id. * * @see org.griphyn.cPlanner.classes.DagInfo#releaseVersion */ public static final String VERSION_AD_KEY = "pegasus_version"; /** * The classad key for the pegasus build. */ public static final String BUILD_AD_KEY = "pegasus_build"; /** * The classad for the root workflow uuid */ public static final String ROOT_WF_UUID_KEY = "pegasus_root_wf_uuid"; /** * The classad for the workflow uuid */ public static final String WF_UUID_KEY = "pegasus_wf_uuid"; /** * The classad for the flow id. * * @see org.griphyn.cPlanner.classes.DagInfo#flowIDName */ public static final String WF_NAME_AD_KEY = "pegasus_wf_name"; /** * The classad for the timestamp. * * @see org.griphyn.cPlanner.classes.DagInfo#mFlowTimestamp */ public static final String WF_TIME_AD_KEY = "pegasus_wf_time"; /** * The classad for the complete transformation name. */ public static final String XFORMATION_AD_KEY = "pegasus_wf_xformation"; /** * The classad for generating the DAX ID */ public static final String DAX_JOB_ID_KEY = "pegasus_wf_dax_job_id"; /** * The class ad for job Class. * * @see org.griphyn.cPlanner.classes.Job#jobClass */ public static final String JOB_CLASS_AD_KEY = "pegasus_job_class"; /** * The class ad for the jobId. * * @see org.griphyn.cPlanner.classes.Job#jobID */ public static final String DAG_JOB_ID_KEY = "pegasus_wf_dag_job_id"; /** * The class ad for the expected job value */ public static final String JOB_RUNTIME_AD_KEY = "pegasus_job_runtime"; /** * The key for the number of cores for the multiplier factor in stampede. */ public static final String CORES_KEY = "pegasus_cores"; /** * The class ad to store the execution pool at which the job is run. The * globusscheduler specified in the submit file refers to the jobmanager on * this execution pool. */ public static final String RESOURCE_AD_KEY = "pegasus_site"; /** * The class ad to designate the size of the clustered jobs. */ public static final String JOB_CLUSTER_SIZE_AD_KEY = "pegasus_cluster_size"; /** * Writes out the classads for a workflow to corresponding writer stream. * * @param writer is an open stream for the Condor submit file. * @param dag the workflow object containing metadata about the workflow * like the workflow id and the release version. */ public static void generate( PrintWriter writer, ADag dag ) { //get hold of the object holding the metadata //information about the workflow DagInfo dinfo = dag.dagInfo; //pegasus is the generator writer.println( generateClassAdAttribute( GENERATOR_AD_KEY, GENERATOR) ); //the root workflow and workflow uuid writer.println( generateClassAdAttribute( ROOT_WF_UUID_KEY, dag.getRootWorkflowUUID()) ); writer.println( generateClassAdAttribute( WF_UUID_KEY, dag.getWorkflowUUID()) ); //the vds version if (dinfo.releaseVersion != null) { writer.println( generateClassAdAttribute(VERSION_AD_KEY, dinfo.releaseVersion)); } //the workflow name if (dinfo.flowIDName != null) { writer.println( generateClassAdAttribute(WF_NAME_AD_KEY, dinfo.flowIDName)); } //the workflow time if (dinfo.getMTime() != null) { writer.println( generateClassAdAttribute(WF_TIME_AD_KEY, dinfo.getFlowTimestamp())); } } /** * Writes out the classads for a job to corresponding writer stream. * The writer stream points to a Condor Submit file for the job. * * @param writer is an open stream for the Condor submit file. * @param dag the workflow object containing metadata about the workflow * like the workflow id and the release version. * @param job the Job object for which the writer stream * is passed. **/ public static void generate( PrintWriter writer, ADag dag, Job job ) { //get all the workflow classads generate( writer, dag ); //get the job classads //the tranformation name writer.println( generateClassAdAttribute( ClassADSGenerator.XFORMATION_AD_KEY, job.getCompleteTCName() ) ); //put in the DAX writer.println( generateClassAdAttribute( DAX_JOB_ID_KEY, job.getDAXID() ) ); //the supernode id writer.println(generateClassAdAttribute( ClassADSGenerator.DAG_JOB_ID_KEY, job.getID() )); //the class of the job writer.println(generateClassAdAttribute( ClassADSGenerator.JOB_CLASS_AD_KEY, job.getJobType() ) ); //the resource on which the job is scheduled writer.println(generateClassAdAttribute( ClassADSGenerator.RESOURCE_AD_KEY, job.getSiteHandle() ) ); //add the pegasus value if defined. String value = (String)job.vdsNS.getStringValue( Pegasus.RUNTIME_KEY ); //else see if globus maxwalltime defined value = ( value == null )? (String)job.globusRSL.get( "maxwalltime" ) : value; int runtime = 0; try{ runtime = ( value == null )? 0: Integer.parseInt(value); } catch( Exception e ){ //ignore } writer.println(generateClassAdAttribute( ClassADSGenerator.JOB_RUNTIME_AD_KEY, runtime ) ); //write out the cores if specified for job String coresvalue = job.vdsNS.getStringValue( Pegasus.CORES_KEY ); int cores = 1; try{ cores = ( coresvalue == null ) ? 1 : Integer.parseInt( coresvalue ); } catch( Exception e ){ //ignore } writer.println(generateClassAdAttribute( ClassADSGenerator.CORES_KEY, cores ) ); //determine the cluster size int csize = ( job instanceof AggregatedJob ) ? ( (AggregatedJob)job ).numberOfConsitutentJobs() : 1; writer.println( generateClassAdAttribute( ClassADSGenerator.JOB_CLUSTER_SIZE_AD_KEY, csize ) ); } /** * Generates a classad attribute given the name and the value. * * @param name the attribute name. * @param value the value/expression making the classad attribute. * * @return the classad attriubute. */ private static String generateClassAdAttribute(String name, String value) { return generateClassAdAttribute( name, value, false); } /** * Generates a classad attribute given the name and the value. It by default * adds a new line character at start of each attribute. * * @param name the attribute name. * @param value the value/expression making the classad attribute. * * @return the classad attriubute. */ private static String generateClassAdAttribute(String name, int value) { StringBuffer sb = new StringBuffer(10); sb.append("+"); sb.append(name).append(" = "); sb.append(value); return sb.toString(); } /** * Generates a classad attribute given the name and the value. * * @param name the attribute name. * @param value the value/expression making the classad attribute. * @param newLine boolean denoting whether to add a new line character at * start or not. * * @return the classad attriubute. */ private static String generateClassAdAttribute( String name, String value, boolean newLine) { StringBuffer sb = new StringBuffer(10); if(newLine) sb.append("\n"); sb.append("+"); sb.append(name).append(" = "); sb.append("\""); sb.append(value); sb.append("\""); return sb.toString(); } } ././@LongLink0000000000000000000000000000015200000000000011563 Lustar rootrootpegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/CondorStyleFactoryException.javapegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/CondorStyleFactoryException0000644000175000017500000000654111757531137033063 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Condor Style implementations. * * @author Karan Vahi * @version $Revision: 2090 $ */ public class CondorStyleFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Code Generator"; /** * Constructs a CondorStyleFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public CondorStyleFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a CondorStyleFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public CondorStyleFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a CondorStyleFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public CondorStyleFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a CondorStyleFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CondorStyleFactoryException( String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/style/0000755000175000017500000000000011757531667026626 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/style/Abstract.java0000644000175000017500000002055111757531137031227 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor.style; import java.util.Iterator; import java.util.Set; import edu.isi.pegasus.common.credential.CredentialHandler; import edu.isi.pegasus.common.credential.CredentialHandlerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.code.generator.condor.CondorStyle; import edu.isi.pegasus.planner.code.generator.condor.CondorStyleException; import edu.isi.pegasus.planner.code.generator.condor.CondorStyleFactoryException; import edu.isi.pegasus.planner.common.PegasusProperties; /** * An abstract implementation of the CondorStyle interface. * Impelements the initialization method. * * @author Karan Vahi * @version $Revision: 4929 $ */ public abstract class Abstract implements CondorStyle { /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The handle to the Site Catalog Store. */ protected SiteStore mSiteStore; /** * A handle to the logging object. */ protected LogManager mLogger; /** * Handle to the Credential Handler Factory */ protected CredentialHandlerFactory mCredentialFactory ; /** * The default constructor. */ public Abstract() { //mLogger = LogManager.getInstance(); } /** * Initializes the Code Style implementation. * * @param bag the bag of initialization objects * @param credentialFactory the credential handler factory * * * @throws CondorStyleFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public void initialize( PegasusBag bag , CredentialHandlerFactory credentialFactory )throws CondorStyleException{ mProps = bag.getPegasusProperties(); mSiteStore = bag.getHandleToSiteStore(); mLogger = bag.getLogger(); mCredentialFactory = credentialFactory; } /** * Constructs an error message in case of style mismatch. * * @param job the job object. * @param style the name of the style. * @param universe the universe associated with the job. */ protected String errorMessage( Job job, String style, String universe){ StringBuffer sb = new StringBuffer(); sb.append( "( " ). append( style ).append( "," ). append( universe ).append( "," ). append( job.getSiteHandle() ). append( ")" ). append( " mismatch for job " ).append( job.getName() ); return sb.toString(); } /** * Apply a style to an AggregatedJob * * @param job the AggregatedJob object containing the job. * * @throws CondorStyleException in case of any error occuring code generation. */ public void apply( AggregatedJob job ) throws CondorStyleException{ //apply style to all constituent jobs for( Iterator it = job.constituentJobsIterator(); it.hasNext(); ){ Job j = (Job) it.next(); this.apply( j ); } //also apply style to the aggregated job itself this.apply( (Job)job ); } /** * Examines the credential requirements for a job and adds appropiate * transfer and environment directives for the credentials to be staged * and picked up by the job. * @param job */ protected void applyCredentialsForRemoteExec(Job job) throws CondorStyleException { Set credsNeeded = job.getCredentialTypes(); if (credsNeeded == null || credsNeeded.isEmpty()) { return; } // jobs can have multiple credential requirements Iterator iter = credsNeeded.iterator(); while (iter.hasNext()) { CredentialHandler.TYPE credType = iter.next(); CredentialHandler handler = mCredentialFactory.loadInstance(credType); // if the credential is listed in the remote sites environment, don't do anything SiteCatalogEntry site = mSiteStore.lookup(job.getSiteHandle()); if (site.getEnvironmentVariable(handler.getEnvironmentVariable()) != null) { continue; } switch(credType) { case x509: // x509 credentials are transfered automatically by condor if x509userproxy is set job.condorVariables.construct("x509userproxy", handler.getPath()); break; case irods: case s3: case ssh: // transfer using condor file transfer, and advertise in env // but first make sure it is specified in our environment if (handler.getPath() == null) { throw new CondorStyleException("Unable to find required credential for file transfers. " + "Please make sure " + handler.getEnvironmentVariable() + " is set either in the site catalog or your environment."); } job.condorVariables.addIPFileForTransfer(handler.getPath()); job.envVariables.construct(handler.getEnvironmentVariable(), handler.getBaseName()); break; default: throw new CondorStyleException("Job has been tagged with unknown credential type"); } } } /** * Examines the credential requirements for a job and adds appropiate * transfer and environment directives for the credentials to be picked * up for the local job * @param job */ protected void applyCredentialsForLocalExec(Job job) throws CondorStyleException { Set credsNeeded = job.getCredentialTypes(); if (credsNeeded == null || credsNeeded.isEmpty()) { return; } // jobs can have multiple credential requirements Iterator iter = credsNeeded.iterator(); while (iter.hasNext()) { CredentialHandler.TYPE credType = iter.next(); CredentialHandler handler = mCredentialFactory.loadInstance(credType); switch(credType) { case x509: case irods: case s3: case ssh: // for local exec, just set envionment variables to full path if (handler.getPath() == null) { throw new CondorStyleException("Unable to find required credential for file transfers. " + "Please make sure " + handler.getEnvironmentVariable() + " is set either in the site catalog or your environment."); } job.envVariables.construct(handler.getEnvironmentVariable(), handler.getPath()); break; default: throw new CondorStyleException("Job has been tagged with unknown credential type"); } } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/style/Condor.java0000644000175000017500000004514011757531137030711 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor.style; import java.io.File; import edu.isi.pegasus.common.credential.CredentialHandlerFactory; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.code.generator.condor.CondorStyleException; import edu.isi.pegasus.planner.code.generator.condor.CondorStyleFactoryException; import edu.isi.pegasus.planner.namespace.Pegasus; /** * Enables a job to be directly submitted to the condor pool of which the * submit host is a part of. * This style is applied for jobs to be run * - on the submit host in the scheduler universe (local pool execution) * - on the local condor pool of which the submit host is a part of * * @author Karan Vahi * @version $Revision: 5152 $ */ public class Condor extends Abstract { //some constants imported from the Condor namespace. public static final String UNIVERSE_KEY = edu.isi.pegasus.planner.namespace.Condor.UNIVERSE_KEY; public static final String VANILLA_UNIVERSE = edu.isi.pegasus.planner.namespace.Condor.VANILLA_UNIVERSE; public static final String SCHEDULER_UNIVERSE = edu.isi.pegasus.planner.namespace.Condor.SCHEDULER_UNIVERSE; public static final String STANDARD_UNIVERSE = edu.isi.pegasus.planner.namespace.Condor.STANDARD_UNIVERSE; public static final String LOCAL_UNIVERSE = edu.isi.pegasus.planner.namespace.Condor.LOCAL_UNIVERSE; public static final String PARALLEL_UNIVERSE = edu.isi.pegasus.planner.namespace.Condor.PARALLEL_UNIVERSE; public static final String TRANSFER_EXECUTABLE_KEY = edu.isi.pegasus.planner.namespace.Condor.TRANSFER_EXECUTABLE_KEY; // /** * The name of the style being implemented. */ public static final String STYLE_NAME = "Condor"; /** * The Pegasus Lite local wrapper basename. */ public static final String PEGASUS_LITE_LOCAL_FILE_BASENAME = "pegasus-lite-local.sh"; /** * The name of the environment variable for transferring input files */ public static final String PEGASUS_TRANSFER_INPUT_FILES_KEY = "_PEGASUS_TRANSFER_INPUT_FILES"; /** * The name of the environment variable for transferring output files */ public static final String PEGASUS_TRANSFER_OUTPUT_FILES_KEY = "_PEGASUS_TRANSFER_OUTPUT_FILES"; /** * The name of the environment variable for the initial dir for pegasus lite local */ public static final String PEGASUS_INITIAL_DIR_KEY = "_PEGASUS_INITIAL_DIR"; /** * The name of the environment variable that determines if job should be executed in initial dir or not */ public static final String PEGASUS_EXECUTE_IN_INITIAL_DIR = "_PEGASUS_EXECUTE_IN_INITIAL_DIR"; /** * Whether to connect stdin or not */ public static final String PEGASUS_CONNECT_STDIN_KEY = "_PEGASUS_CONNECT_STDIN"; /** * A boolean indicating whether pegasus lite mode is picked up or not. */ private boolean mPegasusLiteEnabled; /** * Path to Pegasus Lite local wrapper script. */ private String mPegasusLiteLocalWrapper; /** * The default constructor. */ public Condor() { super(); } /** * Initializes the Code Style implementation. * * @param bag the bag of initialization objects * @param credentialFactory the credential handler factory * * * @throws CondorStyleFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public void initialize( PegasusBag bag , CredentialHandlerFactory credentialFactory )throws CondorStyleException{ super.initialize( bag, credentialFactory ); mPegasusLiteEnabled = mProps.getGridStart().equalsIgnoreCase( "PegasusLite" ); mPegasusLiteLocalWrapper = this.getSubmitHostPathToPegasusLiteLocal(); } /** * Applies the condor style to the job. Changes the job so that it results * in generation of a condor style submit file that can be directly * submitted to the underlying condor scheduler on the submit host, without * going through CondorG. This applies to the case of * - local site execution * - submitting directly to the condor pool of which the submit host * is a part of. * * @param job the job on which the style needs to be applied. * * @throws CondorStyleException in case of any error occuring code generation. */ public void apply(Job job) throws CondorStyleException{ //mLogger.log( "Credentials required for job " + job.getID() + " " + job.getCredentialTypes() , // LogManager.DEBUG_MESSAGE_LEVEL ); // Removed for JIRA PM-543 // String execSiteWorkDir = mSiteStore.getInternalWorkDirectory(job); // String workdir = (String) job.globusRSL.removeKey("directory"); // returns old value // workdir = (workdir == null)?execSiteWorkDir:workdir; String workdir = job.getDirectory(); String defaultUniverse = job.getSiteHandle().equalsIgnoreCase("local")? Condor.LOCAL_UNIVERSE: Condor.VANILLA_UNIVERSE; String universe = job.condorVariables.containsKey( Condor.UNIVERSE_KEY )? (String)job.condorVariables.get( Condor.UNIVERSE_KEY): defaultUniverse; //boolean to indicate whether to use remote_initialdir or not //remote_initialdir does not work for standard universe boolean useRemoteInitialDir = !universe.equals( Condor.STANDARD_UNIVERSE ); //extra check for standard universe if( universe.equals( Condor.STANDARD_UNIVERSE ) ){ //standard universe should be only applied for compute jobs int type = job.getJobType(); if ( !( type == Job.COMPUTE_JOB ) ) { //set universe to vanilla universe universe = Condor.VANILLA_UNIVERSE; //fix for JIRA PM-531 //vanilla universe jobs need to have remote_initialdir key useRemoteInitialDir = true; } else{ //job is a compute job. //check if it is clustered . if( job instanceof AggregatedJob ){ //clustered jobs can never execute in standard universe //update to vanilla universe. JIRA PM-530 universe = Condor.VANILLA_UNIVERSE; //fix for JIRA PM-531 //vanilla universe jobs need to have remote_initialdir key useRemoteInitialDir = true; } } } //set the universe for the job // Karan Jan 28, 2008 job.condorVariables.construct( "universe", universe ); if( universe.equalsIgnoreCase( Condor.VANILLA_UNIVERSE ) || universe.equalsIgnoreCase( Condor.STANDARD_UNIVERSE ) || universe.equalsIgnoreCase( Condor.PARALLEL_UNIVERSE ) ){ //the glide in/ flocking case //submitting directly to condor //check if it is a glide in job. //vanilla jobs are glide in jobs? //No they are not. //set the vds change dir key to trigger -w //to kickstart invocation for all non transfer jobs if(!(job instanceof TransferJob)){ job.vdsNS.checkKeyInNS(Pegasus.CHANGE_DIR_KEY, "true"); //set remote_initialdir for the job only for non transfer jobs //this is removed later when kickstart is enabling. //added if loop for JIRA PM-543 if( workdir != null ){ if( useRemoteInitialDir ){ job.condorVariables.construct("remote_initialdir", workdir); }else{ job.condorVariables.construct("initialdir", workdir); } } } else{ //we need to set s_t_f and w_t_f_o to ensure //that condor transfers the proxy to the remote end //also the keys below are mutually exclusive to initialdir keys. job.condorVariables.construct("should_transfer_files", "YES"); job.condorVariables.construct("when_to_transfer_output", "ON_EXIT"); } //isGlobus = false; applyCredentialsForRemoteExec(job); } else if(universe.equalsIgnoreCase(Condor.SCHEDULER_UNIVERSE) || universe.equalsIgnoreCase( Condor.LOCAL_UNIVERSE )){ String ipFiles = job.condorVariables.getIPFilesForTransfer(); //check if the job can be run in the workdir or not //and whether intial dir is populated before hand or not. if(job.runInWorkDirectory() && !job.condorVariables.containsKey("initialdir")){ //for local jobs we need initialdir //instead of remote_initialdir //added if loop for JIRA PM-543 if( workdir != null ){ job.condorVariables.construct("initialdir", workdir); } } wrapJobWithLocalPegasusLite( job ); /* if( this.mPegasusLiteEnabled ){ //wrap the job with local pegasus lite wrapped job //to work around the Condor IO bug for PegasusLite //PM-542 wrapJobWithLocalPegasusLite( job ); } else{ //do same as earlier for time being. //check explicitly for any input files transferred via condor //file transfer mechanism if( ipFiles != null ){ //log a debug message before removing the files StringBuffer sb = new StringBuffer(); sb.append( "Removing the following ip files from condor file tx for job " ). append( job.getID() ).append( " " ).append( ipFiles ); mLogger.log( sb.toString(), LogManager.DEBUG_MESSAGE_LEVEL ); job.condorVariables.removeIPFilesForTransfer(); } //check for transfer_executable and remove if set //transfer_executable does not work in local/scheduler universe if( job.condorVariables.containsKey( Condor.TRANSFER_EXECUTABLE_KEY )){ job.condorVariables.removeKey( Condor.TRANSFER_EXECUTABLE_KEY ); job.condorVariables.removeKey( "should_transfer_files" ); job.condorVariables.removeKey( "when_to_transfer_output" ); } //for local or scheduler universe we never should have //should_transfer_file or w_t_f //the keys can appear if a user in site catalog for local sites //specifies these keys for the vanilla universe jobs if( job.condorVariables.containsKey( "should_transfer_files" ) || job.condorVariables.containsKey( "when_to_transfer_output" )){ job.condorVariables.removeKey( "should_transfer_files" ); job.condorVariables.removeKey( "when_to_transfer_output" ); } } */ applyCredentialsForLocalExec(job); } else{ //Is invalid state throw new CondorStyleException( errorMessage( job, STYLE_NAME, universe ) ); } } /** * Wraps the local universe jobs with a local Pegasus Lite wrapper to get * around the Condor file IO bug for local universe job * * @param job the job that needs to be wrapped. */ private void wrapJobWithLocalPegasusLite(Job job) throws CondorStyleException { //for the time being doing nothing for dax or dag jobs if( job.getJobType() == Job.DAG_JOB || job.getJobType() == Job.DAX_JOB ){ //do nothing return return; } String ipFiles = job.condorVariables.getIPFilesForTransfer(); String opFiles = job.condorVariables.getOutputFilesForTransfer(); if( ipFiles == null && opFiles == null ){ if( job.getRemoteExecutable().startsWith( File.separator ) ){ //absoluate path specified //nothing to do other than check for transfer_executable //check for transfer_executable and remove if set //transfer_executable does not work in local/scheduler universe if( job.condorVariables.containsKey( Condor.TRANSFER_EXECUTABLE_KEY )){ job.condorVariables.removeKey( Condor.TRANSFER_EXECUTABLE_KEY ); job.condorVariables.removeKey( "should_transfer_files" ); job.condorVariables.removeKey( "when_to_transfer_output" ); } return; } //for relative paths for local universe jobs it is better to wrap //with wrapper as condor else assumes the executable is in the //directory where the job is launched. } String workdir = (String)job.condorVariables.get( "initialdir" ); if( workdir != null ){ job.envVariables.construct( Condor.PEGASUS_INITIAL_DIR_KEY, workdir ); if ( !this.mPegasusLiteEnabled ){ //for shared file system mode we want the wrapped job //to execute in workdir job.envVariables.construct( Condor.PEGASUS_EXECUTE_IN_INITIAL_DIR, "true" ); } } //check if any transfer_input_files is transferred if( ipFiles != null ){ String[] files = ipFiles.split( "," ); StringBuffer value = new StringBuffer(); for( String f: files ){ if( f.startsWith( File.separator) ){ //absolute path to file specified value.append( f ); } else{ //make sure workdir is not null if( workdir == null ){ throw new CondorStyleException( "Condor initialdir not set for job " + job.getID() ); } value.append( f ); } value.append( "," ); } job.envVariables.construct( Condor.PEGASUS_TRANSFER_INPUT_FILES_KEY, value.toString() ); job.condorVariables.removeIPFilesForTransfer(); } //check if any transfer_output_files is transferred if( opFiles != null ){ //sanity check as wrapper requires initialdir to be set if( workdir == null ){ throw new CondorStyleException( "Condor initialdir not set for job " + job.getID() ); } String[] files = opFiles.split( "," ); StringBuffer value = new StringBuffer(); for( String f: files ){ value.append( f ); value.append( "," ); } job.envVariables.construct( Condor.PEGASUS_TRANSFER_OUTPUT_FILES_KEY, value.toString() ); job.condorVariables.removeOutputFilesForTransfer(); } //check for transfer_executable and remove if set //transfer_executable does not work in local/scheduler universe if( job.condorVariables.containsKey( Condor.TRANSFER_EXECUTABLE_KEY )){ job.condorVariables.removeKey( Condor.TRANSFER_EXECUTABLE_KEY ); job.condorVariables.removeKey( "should_transfer_files" ); job.condorVariables.removeKey( "when_to_transfer_output" ); } //the job executable is now an argument to pegasus-lite-local String executable = job.getRemoteExecutable(); String arguments = job.getArguments(); job.setRemoteExecutable( this.mPegasusLiteLocalWrapper ); StringBuffer args = new StringBuffer(); args.append( executable ).append( " " ).append( arguments ); job.setArguments( args.toString() ); String stdin = (String)job.condorVariables.get( "input" ) ; if( stdin != null ){ //tell the wrapper to connect the stdin job.envVariables.construct( Condor.PEGASUS_CONNECT_STDIN_KEY, "true" ); } //for local or scheduler universe we never should have //should_transfer_file or w_t_f //the keys can appear if a user in site catalog for local sites //specifies these keys for the vanilla universe jobs if( job.condorVariables.containsKey( "should_transfer_files" ) ){ job.condorVariables.removeKey( "should_transfer_files" ); job.condorVariables.removeKey( "when_to_transfer_output" ); } } /** * Determines the path to PegasusLite local job * * @return the path on the submit host. */ protected String getSubmitHostPathToPegasusLiteLocal() { StringBuffer path = new StringBuffer(); //first get the path to the share directory File share = mProps.getSharedDir(); if( share == null ){ throw new RuntimeException( "Property for Pegasus share directory is not set" ); } path.append( share.getAbsolutePath() ).append( File.separator ). append( "sh" ).append( File.separator ).append( Condor.PEGASUS_LITE_LOCAL_FILE_BASENAME ); return path.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/style/CondorGlideIN.java0000644000175000017500000001147411757531137032110 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor.style; import edu.isi.pegasus.planner.code.generator.condor.CondorStyle; import edu.isi.pegasus.planner.code.generator.condor.CondorStyleException; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.namespace.Condor; /** * Enables a job to be submitted to nodes that are logically part of the local pool, * but physically are not. * * This style is applied for jobs to be run * - on the nodes that have been glided into the local pool * - on the nodes that have been flocked to the local pool (NOT TESTED) * * @author Karan Vahi * @version $Revision: 4817 $ */ public class CondorGlideIN extends Abstract { /** * The name of the style being implemented. */ public static final String STYLE_NAME = "CondorGlideIN"; /** * The default constructor. */ public CondorGlideIN() { super(); } /** * Applies the style to the job to be run in a condor glide in environment. * condor style to the job. Changes the job so that it results * in generation of a submit file that can be directly submitted to the * underlying condor scheduler on the submit host, without * going through CondorG and the jobs run only on the nodes that have * been glided in from a particular remote pool. * Please note that GlideIn only works if all the application jobs are * being run via kickstart, as it relies heavily on the ability of the * launcher to change the directory before running the application job * on the remote end. * * This applies to the case of * - nodes glided in to a local pool * - jobs flocking to remote pools? * * @param job the job on which the style needs to be applied. * * @throws CondorStyleException in case of any error occuring code generation. */ public void apply( Job job ) throws CondorStyleException{ // Removed for JIRA PM-543 // String execSiteWorkDir = mSiteStore.getInternalWorkDirectory( job ); // String workdir = (String) job.globusRSL.removeKey( "directory" ); // returns old value // workdir = (workdir == null)?execSiteWorkDir:workdir; String workdir = job.getDirectory(); String universe = job.condorVariables.containsKey( Condor.UNIVERSE_KEY )? (String)job.condorVariables.get( Condor.UNIVERSE_KEY ): //default is vanilla universe for glidein style Condor.VANILLA_UNIVERSE; if( universe.equalsIgnoreCase( Condor.VANILLA_UNIVERSE ) || universe.equalsIgnoreCase( Condor.STANDARD_UNIVERSE ) || universe.equalsIgnoreCase( Condor.PARALLEL_UNIVERSE ) ){ //the glide in/ flocking case //submitting directly to condor //set the vds change dir key to trigger -w //to kickstart invocation for all non transfer jobs if( !( job instanceof TransferJob ) ){ job.vdsNS.checkKeyInNS( Pegasus.CHANGE_DIR_KEY, "true" ); //set remote_initialdir for the job only for non transfer jobs //this is removed later when kickstart is enabling. if( workdir != null ){ job.condorVariables.construct( "remote_initialdir", workdir ); } } //we want the stdout and stderr to be transferred back //by Condor to the submit host always job.condorVariables.construct( "should_transfer_files", "YES" ); job.condorVariables.construct( "when_to_transfer_output", "ON_EXIT" ); //isGlobus = false; } else{ //Is invalid state throw new CondorStyleException( errorMessage( job, STYLE_NAME, universe ) ); } //the condor universe that is determined //should be set back in the job. job.condorVariables.construct( Condor.UNIVERSE_KEY, universe ); applyCredentialsForRemoteExec(job); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/style/CondorC.java0000644000175000017500000001652211757531137031016 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor.style; import java.util.logging.Level; import java.util.logging.Logger; import edu.isi.pegasus.planner.code.generator.condor.CondorQuoteParserException; import edu.isi.pegasus.planner.code.generator.condor.CondorStyle; import edu.isi.pegasus.planner.code.generator.condor.CondorStyleException; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.code.generator.condor.CondorQuoteParser; import edu.isi.pegasus.planner.namespace.Pegasus; /** * Enables a job to be directly submitted to the condor pool of which the * submit host is a part of. * This style is applied for jobs to be run * - on the submit host in the scheduler universe (local pool execution) * - on the local condor pool of which the submit host is a part of * * @author Karan Vahi * @version $Revision: 3985 $ */ public class CondorC extends Condor { /** * The constant for the remote universe key. */ public static final String REMOTE_UNIVERSE_KEY = edu.isi.pegasus.planner.namespace.Condor.REMOTE_UNIVERSE_KEY; /** * The name of the key that designates that files should be transferred * via Condor File Transfer mechanism. */ public static final String SHOULD_TRANSFER_FILES_KEY = edu.isi.pegasus.planner.namespace.Condor.SHOULD_TRANSFER_FILES_KEY; /** * The corresponding remote kye name that designates that files should be * transferred via Condor File Transfer mechanism. */ public static final String REMOTE_SHOULD_TRANSFER_FILES_KEY = edu.isi.pegasus.planner.namespace.Condor.REMOTE_SHOULD_TRANSFER_FILES_KEY; /** * The name of key that designates when to transfer output. */ public static final String WHEN_TO_TRANSFER_OUTPUT_KEY = edu.isi.pegasus.planner.namespace.Condor.WHEN_TO_TRANSFER_OUTPUT_KEY; /** * The corresponding name of the remote key that designated when to transfer output. */ public static final String REMOTE_WHEN_TO_TRANSFER_OUTPUT_KEY = edu.isi.pegasus.planner.namespace.Condor.REMOTE_WHEN_TO_TRANSFER_OUTPUT_KEY; /** * The key that designates the collector associated with the job */ public static final String COLLECTOR_KEY = edu.isi.pegasus.planner.namespace.Condor.COLLECTOR_KEY; /** * The name of the style being implemented. */ public static final String STYLE_NAME = "CondorC"; /** * The default constructor. */ public CondorC() { super(); } /** * Applies the CondorC style to the job. * * @param job the job on which the style needs to be applied. * * @throws CondorStyleException in case of any error occuring code generation. */ public void apply(Job job) throws CondorStyleException{ //lets apply the Condor style first and then make //some modifications super.apply(job); //the job universe key is translated to +remote_universe String remoteUniverse = (String) job.condorVariables.removeKey( Condor.UNIVERSE_KEY ); job.condorVariables.construct( CondorC.REMOTE_UNIVERSE_KEY, remoteUniverse); //the universe for CondorC is always grid job.condorVariables.construct( CondorC.UNIVERSE_KEY, "grid" ); //construct the grid_resource for the job String gridResource = constructGridResource( job ); //check if s_t_f and w_t_f keys are associated. try { String s_t_f = (String)job.condorVariables.removeKey( CondorC.SHOULD_TRANSFER_FILES_KEY ); if( s_t_f != null ){ //convert to remote key and quote it job.condorVariables.construct( CondorC.REMOTE_SHOULD_TRANSFER_FILES_KEY, CondorQuoteParser.quote(s_t_f, true)); } String w_t_f = (String)job.condorVariables.removeKey( CondorC.WHEN_TO_TRANSFER_OUTPUT_KEY ); if( s_t_f != null ){ //convert to remote key and quote it job.condorVariables.construct( CondorC.REMOTE_WHEN_TO_TRANSFER_OUTPUT_KEY, CondorQuoteParser.quote(w_t_f, true)); } //initialdir makes sense only on submit node //so convert that to remote_initialdir String dir = (String)job.condorVariables.removeKey( "initialdir" ); if( dir != null ){ job.condorVariables.construct( "remote_initialdir", dir ); } } catch ( CondorQuoteParserException ex) { throw new CondorStyleException( "Condor Quote Exception", ex); } } /** * Constructs the grid_resource entry for the job. The grid resource is a * tuple consisting of three fields. * * The first field is the grid type, which is condor. * The second field is the name of the remote condor_schedd daemon. * The third field is the name of the remote pool's condor_collector. * * @param job the job * * @return the grid_resource entry * @throws CondorStyleException in case of any error occuring code generation. */ protected String constructGridResource( Job job ) throws CondorStyleException{ StringBuffer gridResource = new StringBuffer(); //first field is always condor gridResource.append( "condor" ).append( " " ); //the second field is the remote condor schedd //specified in the grid gateway for the site // gridResource.append( job.globusScheduler ).append( " " ); SiteCatalogEntry s = mSiteStore.lookup( job.getSiteHandle() ); GridGateway g = s.selectGridGateway( job.getGridGatewayJobType() ); gridResource.append( g.getContact() ).append( " " ); //the job should have the collector key associated String collector = (String) job.condorVariables.removeKey( CondorC.COLLECTOR_KEY ); if( collector == null ){ StringBuffer error = new StringBuffer(); error.append( "Condor Profile " ).append( CondorC.COLLECTOR_KEY ). append( " not associated with job " ).append( job.getID() ); throw new CondorStyleException( error.toString() ); } gridResource.append( collector ); return gridResource.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/style/CondorG.java0000644000175000017500000001122011757531137031010 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor.style; import edu.isi.pegasus.planner.code.generator.condor.CondorStyle; import edu.isi.pegasus.planner.code.generator.condor.CondorStyleException; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway; import edu.isi.pegasus.planner.catalog.site.classes.GridGateway.JOB_TYPE; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.namespace.Condor; /** * This implementation enables a job to be submitted via CondorG to remote * grid sites. This is the default style, that is applied to all the jobs * in the concrete workflow. * * @author Karan Vahi * @version $Revision: 4886 $ */ public class CondorG extends Abstract { /** * The default Constructor. */ public CondorG() { super(); } /** * The name of the style being implemented. */ public static final String STYLE_NAME = "CondorG"; /** * Applies the globus style to the job. Changes the job so that it results * in generation of a condor style submit file that can be submitted * via CondorG to a remote jobmanager. This is the default case. * * @param job the job on which the style needs to be applied. * * @throws CondorStyleException in case of any error occuring code generation. */ public void apply( Job job ) throws CondorStyleException { // Removed for JIRA PM-543 // // String execSiteWorkDir = mSiteStore.getInternalWorkDirectory( job ); // String workdir = (String) job.globusRSL.removeKey( "directory" ); // returns old value // workdir = (workdir == null) ? execSiteWorkDir : workdir; String workdir = job.getDirectory(); String universe = job.condorVariables.containsKey(Condor.UNIVERSE_KEY)? (String)job.condorVariables.get(Condor.UNIVERSE_KEY): //default is VANILLA universe for globus style Condor.VANILLA_UNIVERSE; if( universe.equalsIgnoreCase( Condor.STANDARD_UNIVERSE ) ){ //construct the appropriate jobtype RSL job.globusRSL.construct( "jobtype", "condor" ); } else if(universe.equalsIgnoreCase( Condor.VANILLA_UNIVERSE )){ //the default case where no universe specified //or a vanilla universe specified //by default pegasus creates globus universe jobs //sinfo.condorVariables.construct("universe",Condor.GLOBUS_UNIVERSE); //since condor 6.7.6 we have the notion of grid universe //and grid types. job.condorVariables.construct( Condor.UNIVERSE_KEY,Condor.GRID_UNIVERSE ); StringBuffer gridResource = new StringBuffer(); //default type is gt2 SiteCatalogEntry s = mSiteStore.lookup( job.getSiteHandle() ); GridGateway g = s.selectGridGateway( job.getGridGatewayJobType() ); if( g == null ){ throw new CondorStyleException( "No valid grid gateway found for site " + job.getSiteHandle() + " for job " + job.getID() ); } gridResource.append( g.getType() ).append( " " ).append( g.getContact() ); //System.out.println( "Grid Resource for job " + job.getName() + " is " + gridResource.toString() ); job.condorVariables.construct( Condor.GRID_RESOURCE_KEY, gridResource.toString() ); } else{ //running jobs in scheduler universe //or some other universe //through CondorG does not make sense. //Is invalid state throw new CondorStyleException( errorMessage( job, STYLE_NAME, universe ) ); } //remote_initialdir might be needed to removed //later if running for a LCG site //bwSubmit.println("remote_initialdir = " + workdir); job.condorVariables.construct( "remote_initialdir", workdir ); applyCredentialsForRemoteExec(job); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/style/GLite.java0000644000175000017500000003036711757531137030476 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor.style; import edu.isi.pegasus.planner.code.generator.condor.CondorStyleException; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.namespace.Condor; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.planner.code.generator.condor.CondorQuoteParser; import edu.isi.pegasus.planner.code.generator.condor.CondorQuoteParserException; import java.util.Iterator; import edu.isi.pegasus.planner.classes.TransferJob; /** * This implementation enables a job to be submitted via gLite to a * grid sites. This is the style applied when job has a pegasus profile style key * with value GLite associated with it. * * *

* This style should only be used when the condor on the submit host can directly * talk to scheduler running on the cluster. In Pegasus there should be a separate * compute site that has this style associated with it. This style should not be * specified for the local site. * * As part of applying the style to the job, this style adds the following * classads expressions to the job description *

 *      +remote_queue  - value picked up from globus profile queue
 *      +remote_cerequirements - See below
 * 
* *

* The remote CE requirements are constructed from the following profiles * associated with the job.The profiles for a job are derived from various * sources * - user properties * - transformation catalog * - site catalog * - DAX * * Note it is upto the user to specify these or a subset of them. * * The following globus profiles if associated with the job are picked up *

 * hostcount  -> PROCS
 * count      -> NODES
 * maxwalltime-> WALLTIME
 * 
* * The following condor profiles if associated with the job are picked up *
 * priority  -> PRIORITY
 * 
* * All the env profiles are translated to MYENV * * For e.g. the expression in the submit file may look as *
 * +remote_cerequirements = "PROCS==18 && NODES==1 && PRIORITY==10 && WALLTIME==3600
 *   && PASSENV==1 && JOBNAME==\"TEST JOB\" && MYENV ==\"GAURANG=MEHTA,KARAN=VAHI\""
 * 
* * All the jobs that have this style applied dont have a remote directory * specified in the submit directory. They rely on kickstart to change to the * working directory when the job is launched on the remote node. * * @author Karan Vahi * @version $Revision: 4817 $ */ public class GLite extends Abstract { /** * The name of the style being implemented. */ public static final String STYLE_NAME = "GLite"; /** * The default Constructor. */ public GLite() { super(); } /** * Applies the gLite style to the job. * * @param job the job on which the style needs to be applied. * * @throws CondorStyleException in case of any error occuring code generation. */ public void apply( Job job ) throws CondorStyleException { String execSiteWorkDir = mSiteStore.getInternalWorkDirectory( job ); String workdir = (String) job.globusRSL.removeKey( "directory" ); // returns old value workdir = (workdir == null) ? execSiteWorkDir : workdir; /* universe is always set to grid*/ job.condorVariables.construct( Condor.UNIVERSE_KEY,Condor.GRID_UNIVERSE ); /* figure out the remote scheduler. should be specified with the job*/ if( !job.condorVariables.containsKey( Condor.GRID_RESOURCE_KEY ) ){ throw new CondorStyleException( missingKeyError( job, Condor.GRID_RESOURCE_KEY ) ); } /* remote_initialdir does not work with gLite * Rely on Gridstart to do the right thing */ job.condorVariables.construct( "remote_initialdir", workdir ); job.vdsNS.construct( Pegasus.CHANGE_DIR_KEY, "true" ); /* transfer_executable does not work with gLite * Explicitly set to false */ job.condorVariables.construct( Condor.TRANSFER_EXECUTABLE_KEY, "false" ); /* retrieve some keys from globus rsl and convert to gLite format */ if( job.globusRSL.containsKey( "queue" ) ){ job.condorVariables.construct( "+remote_queue" , (String)job.globusRSL.get( "queue" ) ); } /* convert some condor keys and globus keys to remote ce requirements +remote_cerequirements = blah */ job.condorVariables.construct( "+remote_cerequirements", getCERequirementsForJob( job ) ); /* do special handling for jobs scheduled to local site * as condor file transfer mechanism does not work * Special handling for the JPL cluster */ if( job.getSiteHandle().equals( "local" ) && job instanceof TransferJob ){ /* remove the change dir requirments for the * third party transfer on local host */ job.vdsNS.construct( Pegasus.CHANGE_DIR_KEY, "false" ); job.condorVariables.removeKey( "remote_initialdir" ); } /* similar handling for registration jobs */ if( job.getSiteHandle().equals( "local" ) && job.getJobType() == Job.REPLICA_REG_JOB ){ /* remove the change dir requirments for the * third party transfer on local host */ job.vdsNS.construct( Pegasus.CHANGE_DIR_KEY, "false" ); job.condorVariables.removeKey( "remote_initialdir" ); } if ( job.getSiteHandle().equals("local") ) { applyCredentialsForLocalExec(job); } else { applyCredentialsForRemoteExec(job); } } /** * Constructs the value for remote CE requirements expression for the job . * * For e.g. the expression in the submit file may look as *
     * +remote_cerequirements = "PROCS==18 && NODES==1 && PRIORITY==10 && WALLTIME==3600
     *   && PASSENV==1 && JOBNAME==\"TEST JOB\" && MYENV ==\"GAURANG=MEHTA,KARAN=VAHI\""
     * 
     * 
* * The requirements are generated on the basis of certain profiles associated * with the jobs. * The following globus profiles if associated with the job are picked up *
     * hostcount  -> PROCS
     * count      -> NODES
     * maxwalltime-> WALLTIME
     * 
* * The following condor profiles if associated with the job are picked up *
     * priority  -> PRIORITY
     * 
* * All the env profiles are translated to MYENV * * @param job * * @return the value to the expression and it is condor quoted * * @throws CondorStyleException in case of condor quoting error */ protected String getCERequirementsForJob( Job job ) throws CondorStyleException { StringBuffer value = new StringBuffer(); //do quoting ourselves value.append( "\"" ); /* append the job name */ /* job name cannot have - or _ */ String id = job.getID().replace( "-", "" ); id = id.replace( "_", "" ); //the jobname in case of pbs can only be 15 characters long id = ( id.length() > 15 )? id.substring( 0, 15 ) : id; /* Not adding JOBNAME the GAHP keeps on crashing if specified * on pollux. Karan Feb 18, 2010 addSubExpression( value, "JOBNAME" , id ); */ /* always have PASSENV to true */ //value.append( " && "); addSubExpression( value, "PASSENV", 1 ); /* specifically pass the queue in the requirement since some versions dont handle +remote_queue correctly */ if( job.globusRSL.containsKey( "queue" ) ){ value.append( " && "); addSubExpression( value, "QUEUE", (String)job.globusRSL.get( "queue" ) ); } /* the globus key hostCount is PROCS */ if( job.globusRSL.containsKey( "hostcount" ) ){ value.append( " && " ); addSubExpression( value, "PROCS" , Integer.parseInt( (String)job.globusRSL.get( "hostcount" ) ) ) ; } /* the globus key count is NODES */ if( job.globusRSL.containsKey( "count" ) ){ value.append( " && " ); addSubExpression( value, "NODES" , Integer.parseInt( (String)job.globusRSL.get( "count" ) ) ); } /* the globus key maxwalltime is WALLTIME */ if( job.globusRSL.containsKey( "maxwalltime" ) ){ value.append( " && " ); addSubExpression( value,"WALLTIME" , Integer.parseInt( (String)job.globusRSL.get( "maxwalltime" ) ) ); } /* the condor key priority is PRIORITY */ if( job.condorVariables.containsKey( "priority" ) ){ value.append( " && " ); addSubExpression( value, "PRIORITY" , Integer.parseInt( (String)job.condorVariables.get( "priority" ) ) ); } /* add the environment that is to be associated with the job */ StringBuffer env = new StringBuffer(); for( Iterator it = job.envVariables.getProfileKeyIterator(); it.hasNext(); ){ String key = (String)it.next(); env.append( key ).append( "=" ).append( job.envVariables.get(key) ); if( it.hasNext() ){ env.append( "," ); } } if( env.length() > 0 ){ value.append( " && " ); addSubExpression( value, "MYENV" , env.toString() ); } //No quoting to be applied // JIRA PM-109 //return this.quote( value.toString() ); //do quoting ourselves value.append( "\"" ); return value.toString(); } /** * Adds a sub expression to a string buffer * * @param sb the StringBuffer * @param key the key * @param value the value */ protected void addSubExpression( StringBuffer sb, String key, String value ) { sb.append( key ).append( "==" ). append( "\\" ).append( "\"" ). append( value ). append( "\\" ).append( "\"" ); } /** * Adds a sub expression to a string buffer * * @param sb the StringBuffer * @param key the key * @param value the value */ protected void addSubExpression( StringBuffer sb, String key, Integer value ) { sb.append( key ).append( "==" ).append( value ); } /** * Constructs an error message in case of style mismatch. * * @param job the job object. * @param key the missing key */ protected String missingKeyError( Job job, String key ){ StringBuffer sb = new StringBuffer(); sb.append( "( " ). append( "Missing key " ).append( key ). append( " for job " ).append( job.getName() ). append( "with style " ).append( STYLE_NAME ); return sb.toString(); } /** * Condor Quotes a string * * @param string the string to be quoted. * * @return quoted string. * * @throws CondorStyleException in case of condor quoting error */ private String quote( String string ) throws CondorStyleException{ String result; try{ mLogger.log("Unquoted Prejob is " + string, LogManager.DEBUG_MESSAGE_LEVEL); result = CondorQuoteParser.quote( string, true ); mLogger.log("Quoted Prejob is " + result, LogManager.DEBUG_MESSAGE_LEVEL ); } catch (CondorQuoteParserException e) { throw new CondorStyleException("CondorQuoting Problem " + e.getMessage()); } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/style/CondorGlideinWMS.java0000644000175000017500000000564411757531137032601 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor.style; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.code.generator.condor.CondorStyleException; /** * Jobs targeting glidinWMS pools. Basically standard Condor jobs with special * requirements and ranks * * @author Mats Rynge * @version $Revision: 2090 $ */ public class CondorGlideinWMS extends Condor { /** * The name of the style being implemented. */ public static final String STYLE_NAME = "CondorGlideinWMS"; /** * The default constructor. */ public CondorGlideinWMS() { super(); } /** * @param job the job on which the style needs to be applied. * * @throws CondorStyleException in case of any error occuring code generation. */ public void apply( Job job ) throws CondorStyleException{ // default is vanilla universe for glideinwms jobs String universe = job.condorVariables.containsKey( Condor.UNIVERSE_KEY )? (String)job.condorVariables.get( Condor.UNIVERSE_KEY): Condor.VANILLA_UNIVERSE; job.condorVariables.construct(Condor.UNIVERSE_KEY, universe); // glideinWMS jobs are basic Condor jobs super.apply(job); universe = (String)job.condorVariables.get( Condor.UNIVERSE_KEY); if (universe.equalsIgnoreCase( Condor.VANILLA_UNIVERSE ) || universe.equalsIgnoreCase( Condor.STANDARD_UNIVERSE ) || universe.equalsIgnoreCase( Condor.PARALLEL_UNIVERSE ) ){ job.condorVariables.construct("should_transfer_files", "YES"); job.condorVariables.construct("when_to_transfer_output", "ON_EXIT"); // job requirements - steer the jobs to the glideins at the right site String req = "(IS_MONITOR_VM == False)" + " && (Arch != \"\") && (OpSys != \"\") && (Disk != -42)" + " && (Memory > 1) && (FileSystemDomain != \"\")"; job.condorVariables.construct("requirements", req); // rank - steer jobs to the newest available glideins - this is so we can // identify and remove unused glideins job.condorVariables.construct("rank", "DaemonStartTime"); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/CondorStyle.java0000644000175000017500000000464711757531137030601 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor; import edu.isi.pegasus.common.credential.CredentialHandlerFactory; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; /** * An interface to allow us to apply different execution styles to a job * via Condor DAGMAN. * * Some of the common styles supported are * - CondorG * - Condor * - Condor GlideIn * * @version $Revision: 4785 $ */ public interface CondorStyle { /** * The version number associated with this API of Code Generator. */ public static final String VERSION = "1.3"; /** * Initializes the Code Style implementation. * * @param bag the bag of initialization objects * @param credentialFactory the credential handler factory * * * @throws CondorStyleFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public void initialize( PegasusBag bag , CredentialHandlerFactory credentialFactory ) throws CondorStyleException; /** * Apply a style to a job. Involves changing the job object, and optionally * writing out to the Condor submit file. * * @param job the Job object containing the job. * * @throws CondorStyleException in case of any error occuring code generation. */ public void apply( Job job ) throws CondorStyleException; /** * Apply a style to an AggregatedJob * * @param job the AggregatedJob object containing the job. * * @throws CondorStyleException in case of any error occuring code generation. */ public void apply( AggregatedJob job ) throws CondorStyleException; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/condor/CondorStyleFactory.java0000644000175000017500000002351311757531137032122 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator.condor; import edu.isi.pegasus.common.credential.CredentialHandlerFactory; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.code.generator.condor.CondorStyle; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.Pegasus; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.classes.PegasusBag; import java.util.Map; import java.util.HashMap; import java.util.Iterator; import java.io.IOException; /** * A factory class to load the appropriate type of Condor Style impelementations. * This factory class is different from other factories, in the sense that it * must be instantiated first and intialized first before calling out to any * of the Factory methods. * * @author Karan Vahi * @version $Revision: 4817 $ */ public class CondorStyleFactory { /** * The default package where the all the implementing classes are supposed to * reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.code.generator.condor.style"; // /** * The name of the class implementing the Condor Style. */ private static final String CONDOR_STYLE_IMPLEMENTING_CLASS = "Condor"; /** * The name of the class implementing the Condor GlideIN Style. */ private static final String GLIDEIN_STYLE_IMPLEMENTING_CLASS = "CondorGlideIN"; /** * The name of the class implementing the Condor GlideinWMS Style. */ private static final String GLIDEINWMS_STYLE_IMPLEMENTING_CLASS = "CondorGlideinWMS"; /** * The name of the class implementing the CondorG Style. */ private static final String GLOBUS_STYLE_IMPLEMENTING_CLASS = "CondorG"; /** * The name of the class implementing the CondorC Style. */ private static final String CONDORC_STYLE_IMPLEMENTING_CLASS = "CondorC"; /** * The name of the class implementing the CondorG Style. */ private static final String GLITE_STYLE_IMPLEMENTING_CLASS = "GLite"; /** * Returns a table that maps, the Pegasus style keys to the names of implementing * classes. * * @return a Map indexed by Pegasus styles, and values as names of implementing * classes. */ private static Map implementingClassNameTable(){ if( mImplementingClassNameTable == null ){ mImplementingClassNameTable = new HashMap(3); mImplementingClassNameTable.put( Pegasus.CONDOR_STYLE, CONDOR_STYLE_IMPLEMENTING_CLASS); mImplementingClassNameTable.put( Pegasus.GLIDEIN_STYLE, GLIDEIN_STYLE_IMPLEMENTING_CLASS); mImplementingClassNameTable.put( Pegasus.GLIDEINWMS_STYLE, GLIDEINWMS_STYLE_IMPLEMENTING_CLASS); mImplementingClassNameTable.put( Pegasus.GLOBUS_STYLE, GLOBUS_STYLE_IMPLEMENTING_CLASS); mImplementingClassNameTable.put( Pegasus.GLITE_STYLE, GLITE_STYLE_IMPLEMENTING_CLASS); mImplementingClassNameTable.put( Pegasus.CONDORC_STYLE, CONDORC_STYLE_IMPLEMENTING_CLASS ); } return mImplementingClassNameTable; } /** * A table that maps, Pegasus style keys to the names of the corresponding classes * implementing the CondorStyle interface. */ private static Map mImplementingClassNameTable; /** * A table that maps, Pegasus style keys to appropriate classes implementing the * CondorStyle interface */ private Map mImplementingClassTable ; /** * A boolean indicating that the factory has been initialized. */ private boolean mInitialized; /** * Handler to the Credential Handler factory. */ private CredentialHandlerFactory mCredentialFactory; /** * The default constructor. */ public CondorStyleFactory(){ mImplementingClassTable = new HashMap(3); mInitialized = false; } /** * Initializes the Factory. Loads all the implementations just once. * * @param bag the bag of initialization objects * * @throws CondorStyleFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public void initialize( PegasusBag bag ) throws CondorStyleFactoryException{ //load and intialize the CredentialHandler Factory mCredentialFactory = new CredentialHandlerFactory(); mCredentialFactory.initialize( bag ); //load all the implementations that correspond to the Pegasus style keys for( Iterator it = this.implementingClassNameTable().entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); String style = (String)entry.getKey(); String className= (String)entry.getValue(); //load via reflection. not required in this case though put( style, this.loadInstance( bag, className )); } //we have successfully loaded all implementations mInitialized = true; } /** * This method loads the appropriate implementing CondorStyle as specified * by the user at runtime. The CondorStyle is initialized and returned. * * @param job the job for which the corresponding style is required. * * @throws CondorStyleFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public CondorStyle loadInstance( Job job ) throws CondorStyleFactoryException{ //sanity checks first if( !mInitialized ){ throw new CondorStyleFactoryException( "CondorStyleFactory needs to be initialized first before using" ); } String defaultStyle = job.getSiteHandle().equalsIgnoreCase( "local" )? //jobs scheduled on local site have //default style as condor Pegasus.CONDOR_STYLE: Pegasus.GLOBUS_STYLE; String style = job.vdsNS.containsKey( Pegasus.STYLE_KEY )? (String)job.vdsNS.get( Pegasus.STYLE_KEY ): defaultStyle; //need to check if the style isvalid or not //missing for now. //update the job with style determined job.vdsNS.construct( Pegasus.STYLE_KEY, style ); //now just load from the implementing classes Object cs = this.get( style ); if ( cs == null ) { throw new CondorStyleFactoryException( "Unsupported style " + style); } return (CondorStyle)cs; } /** * This method loads the appropriate Condor Style using reflection. * * * @param bag the bag of initialization objects * @param className the name of the implementing class. * * @return the instance of the class implementing this interface. * * @throws CondorStyleFactoryException that nests any error that * might occur during the instantiation of the implementation. * * @see #DEFAULT_PACKAGE_NAME */ private CondorStyle loadInstance( PegasusBag bag, String className ) throws CondorStyleFactoryException{ //sanity check PegasusProperties properties = bag.getPegasusProperties(); if (properties == null) { throw new RuntimeException( "Invalid properties passed" ); } if (className == null) { throw new RuntimeException( "Invalid className specified" ); } //prepend the package name if classname is actually just a basename className = (className.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className : //load directly className; //try loading the class dynamically CondorStyle cs = null; try { DynamicLoader dl = new DynamicLoader( className ); cs = (CondorStyle) dl.instantiate( new Object[0] ); //initialize the loaded condor style cs.initialize( bag, mCredentialFactory ); } catch (Exception e) { throw new CondorStyleFactoryException( "Instantiating Condor Style ", className, e); } return cs; } /** * Returns the implementation from the implementing class table. * * @param style the Pegasus style * * @return implementation the class implementing that style, else null */ private Object get( String style ){ return mImplementingClassTable.get( style); } /** * Inserts an entry into the implementing class table. * * @param style the Pegasus style * @param implementation the class implementing that style. */ private void put( String style, Object implementation){ mImplementingClassTable.put( style, implementation ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Shell.java0000644000175000017500000005226011757531137026111 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import edu.isi.pegasus.common.credential.CredentialHandler; import edu.isi.pegasus.common.credential.CredentialHandler.TYPE; import edu.isi.pegasus.common.credential.CredentialHandlerFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.DefaultStreamGobblerCallback; import edu.isi.pegasus.common.util.StreamGobbler; import edu.isi.pegasus.common.util.StreamGobblerCallback; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.code.CodeGeneratorException; import edu.isi.pegasus.planner.code.GridStart; import edu.isi.pegasus.planner.code.GridStartFactory; import edu.isi.pegasus.planner.code.POSTScript; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.partitioner.graph.Adapter; import edu.isi.pegasus.planner.partitioner.graph.Graph; import edu.isi.pegasus.planner.partitioner.graph.GraphNode; /** * This code generator generates a shell script in the submit directory. * The shell script can be executed on the submit host to run the workflow * locally. * * @author Karan Vahi * @version $Revision: 4818 $ */ public class Shell extends Abstract { public static final String PEGASUS_SHELL_RUNNER_FUNCTIONS_BASENAME = "shell-runner-functions.sh "; /** * The prefix for events associated with job in jobstate.log file */ public static final String JOBSTATE_JOB_PREFIX = "JOB"; /** * The prefix for events associated with POST_SCRIPT in jobstate.log file */ public static final String JOBSTATE_POST_SCRIPT_PREFIX = "POST_SCRIPT"; /** * The prefix for events associated with job in jobstate.log file */ public static final String JOBSTATE_PRE_SCRIPT_PREFIX = "PRE_SCRIPT"; /** * The handle to the output file that is being written to. */ private PrintWriter mWriteHandle; /** * Handle to the Site Store. */ private SiteStore mSiteStore; /** * The handle to the GridStart Factory. */ protected GridStartFactory mGridStartFactory; /** * A boolean indicating whether grid start has been initialized or not. */ protected boolean mInitializeGridStart; /** * The default constructor. */ public Shell( ){ super(); mInitializeGridStart = true; mGridStartFactory = new GridStartFactory(); } /** * Initializes the Code Generator implementation. * * @param bag the bag of initialization objects. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void initialize( PegasusBag bag ) throws CodeGeneratorException{ super.initialize( bag ); mLogger = bag.getLogger(); //create the base directory recovery File wdir = new File(mSubmitFileDir); wdir.mkdirs(); //get the handle to pool file mSiteStore = bag.getHandleToSiteStore(); } /** * Generates the code for the concrete workflow in the GRMS input format. * The GRMS input format is xml based. One XML file is generated per * workflow. * * @param dag the concrete workflow. * * @return handle to the GRMS output file. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public Collection generateCode( ADag dag ) throws CodeGeneratorException{ String opFileName = this.getPathToShellScript( dag ) ; initializeWriteHandle( opFileName ); Collection result = new ArrayList( 1 ); result.add( new File( opFileName ) ); //write out the script header writeString(this.getScriptHeader( mSubmitFileDir ) ); //we first need to convert internally into graph format Graph workflow = Adapter.convert( dag ); //traverse the workflow in topological sort order for( Iterator it = workflow.topologicalSortIterator(); it.hasNext(); ){ GraphNode node = it.next(); Job job = (Job)node.getContent(); generateCode( dag, job ); } //write out the footer writeString(this.getScriptFooter()); mWriteHandle.close(); //set the XBit on the generated shell script setXBitOnFile( opFileName ); //the dax replica store this.writeOutDAXReplicaStore( dag ); //write out the braindump file this.writeOutBraindump( dag ); //write out the nelogger file this.writeOutStampedeEvents( dag ); //write out the metrics file this.writeOutWorkflowMetrics(dag); return result; } /** * Generates the code for a single job in the input format of the workflow * executor being used. * * @param dag the dag of which the job is a part of. * @param job the Job object holding the information about * that particular job. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void generateCode( ADag dag, Job job ) throws CodeGeneratorException{ mLogger.log( "Generating code for job " + job.getID() , LogManager.DEBUG_MESSAGE_LEVEL ); //sanity check if( !job.getSiteHandle().equals( "local" ) ){ throw new CodeGeneratorException( "Shell Code generator only works for jobs scheduled to site local" ); } CredentialHandlerFactory factory = new CredentialHandlerFactory(); factory.initialize( mBag ); for (TYPE type : job.getCredentialTypes()) { CredentialHandler handler = factory.loadInstance( type ); job.addProfile( new Profile( Profile.ENV, handler .getEnvironmentVariable(), handler.getPath() ) ); } factory = null; //initialize GridStart if required. if ( mInitializeGridStart ){ mGridStartFactory.initialize( mBag, dag ); mInitializeGridStart = false; } //determine the work directory for the job String execDir = getExecutionDirectory( job ); //for local jobs we need initialdir //instead of remote_initialdir job.condorVariables.construct("initialdir", execDir ); job.condorVariables.construct( "universe", "local" ); SiteCatalogEntry site = mSiteStore.lookup( job.getSiteHandle() ); //JIRA PM-491 . Path to kickstart should not be passed //to the factory. // String gridStartPath = site.getKickstartPath(); GridStart gridStart = mGridStartFactory.loadGridStart( job , null ); //enable the job if( !gridStart.enable( job,false ) ){ String msg = "Job " + job.getName() + " cannot be enabled by " + gridStart.shortDescribe() + " to run at " + job.getSiteHandle(); mLogger.log( msg, LogManager.FATAL_MESSAGE_LEVEL ); throw new CodeGeneratorException( msg ); } //apply the appropriate POSTScript POSTScript ps = mGridStartFactory.loadPOSTScript( job, gridStart ); boolean constructed = ps.construct( job, Dagman.POST_SCRIPT_KEY ); //generate call to executeJob writeString( generateCallToExecuteJob( job, execDir, this.mSubmitFileDir ) ); if( constructed ){ //execute postscript and check for exitcode writeString( generateCallToExecutePostScript( job, mSubmitFileDir ) ); writeString( generateCallToCheckExitcode( job, JOBSTATE_POST_SCRIPT_PREFIX ) ); } else{ //no postscript generated //generate the call to check_exitcode //check_exitcode test1 JOB $? writeString( generateCallToCheckExitcode( job, JOBSTATE_JOB_PREFIX ) ); } writeString( "" ); } /** * Returns a Map containing additional braindump entries that are specific * to a Code Generator * * @param workflow the executable workflow * * @return Map */ public Map getAdditionalBraindumpEntries( ADag workflow ) { Map entries = new HashMap(); entries.put( Braindump.GENERATOR_TYPE_KEY, "shell" ); entries.put( "script", this.getPathToShellScript( workflow ) ); return entries; } /** * Generates a call to check_exitcode function that is used * * @param job the associated job * @param prefix the prefix for the jobstate.log events * * @return the call to execute job function. */ protected String generateCallToCheckExitcode( Job job, String prefix ){ StringBuffer sb = new StringBuffer(); sb.append( "check_exitcode" ).append( " " ). append( job.getID() ).append( " " ). append( prefix ).append( " " ). append( "$?" ); return sb.toString(); } /** * Generates a call to execute_post_script function , that is used to launch * a job from the shell script. * * @param job the job to be launched * @param directory the directory in which the job needs to be launched. * * @return the call to execute job function. */ protected String generateCallToExecutePostScript( Job job, String directory ){ StringBuffer sb = new StringBuffer(); //gridstart modules right now store the executable //and arguments as condor profiles. Should be fixed. //This setting should happen only in Condor Generator String executable = (String) job.dagmanVariables.get( Dagman.POST_SCRIPT_KEY ); StringBuffer args = new StringBuffer(); args.append( (String)job.dagmanVariables.get( Dagman.POST_SCRIPT_ARGUMENTS_KEY ) ). append( " " ).append( (String)job.dagmanVariables.get( Dagman.OUTPUT_KEY) ); String arguments = args.toString(); //generate the call to execute job function //execute_job $jobstate test1 /tmp /bin/echo "Karan Vahi" "stdin file" "k=v" "g=m" sb.append( "execute_post_script" ).append( " " ). append( job.getID() ).append( " " ).//the job id append( directory ).append( " " ). //the directory in which we want the job to execute append( executable ).append( " " ). //the executable to be invoked append( "\"" ).append( arguments ).append( "\"" ).append( " " );//the arguments //handle stdin sb.append( "\"\"" ); sb.append( " " ); //add the environment variables return sb.toString(); } /** * Generates a call to execute_job function , that is used to launch * a job from the shell script. * * @param job the job to be launched * @param scratchDirectory the workflow specific execution directory created during running of the workflow * @param submitDirectory the submit directory of the workflow * * @return the call to execute job function. */ protected String generateCallToExecuteJob( Job job, String scratchDirectory, String submitDirectory ){ StringBuffer sb = new StringBuffer(); //gridstart modules right now store the executable //and arguments as condor profiles. Should be fixed. //This setting should happen only in Condor Generator /* String executable = (String) job.condorVariables.get( "executable" ); String arguments = (String)job.condorVariables.get( Condor.ARGUMENTS_KEY ); */ String executable = job.getRemoteExecutable(); String arguments = job.getArguments(); arguments = ( arguments == null ) ? "" : arguments; String directory = job.runInWorkDirectory() ? scratchDirectory : submitDirectory; //generate the call to execute job function //execute_job $jobstate test1 /tmp /bin/echo "Karan Vahi" "stdin file" "k=v" "g=m" sb.append( "execute_job" ).append( " " ). append( job.getID() ).append( " " ).//the job id append( directory ).append( " " ). //the directory in which we want the job to execute append( executable ).append( " " ). //the executable to be invoked append( "\"" ).append( arguments ).append( "\"" ).append( " " );//the arguments //handle stdin for jobs String stdin = job.getStdIn(); if( stdin == null || stdin.length() == 0 ){ sb.append( "\"\"" ); } else{ if( stdin.startsWith( File.separator ) ){ sb.append( stdin ); } else{ sb.append( this.mSubmitFileDir ).append( File.separator ).append( stdin ); } } sb.append( " " ); //add the environment variables for( Iterator it = job.envVariables.getProfileKeyIterator(); it.hasNext(); ){ String key = (String)it.next(); sb.append( "\"" ). append( key ).append( "=" ).append( job.envVariables.get( key ) ). append( "\"" ).append( " " ); } return sb.toString(); } /** * Returns the header for the generated shell script. The header contains * the code block that sources the common plan script from $PEGASUS_HOME/bin * and initializes the jobstate.log file. * * @param submitDirectory the submit directory for the workflow. * * @return the script header */ protected String getScriptHeader( String submitDirectory ){ StringBuffer sb = new StringBuffer(); sb.append( "#!/bin/bash" ).append( "\n" ). append( "#" ).append( "\n" ). append( "# executes the workflow in shell mode " ).append( "\n" ). append( "#" ).append( "\n" ). append( "\n"); String runnerFunctionsFile = getSubmitHostPathToShellRunnerFunctions(); //check for common shell script before sourcing sb.append( "if [ ! -e " ) .append( runnerFunctionsFile ).append( " ];then" ).append( "\n" ). append( " echo \"Unable to find shell-runner-functions.sh file.\"" ).append( "\n" ). append( " echo \"You need to use Pegasus Version 3.2 or higher\"").append( "\n" ). append( " exit 1 " ).append( "\n" ). append( "fi" ).append( "\n" ); //source the common shell script sb.append( ". ").append( runnerFunctionsFile ).append( "\n" ). append( "" ).append( "\n" ); sb.append( "PEGASUS_SUBMIT_DIR" ).append( "=" ).append( submitDirectory ).append( "\n" ). append( "\n"); sb.append( "#initialize jobstate.log file" ).append( "\n" ). append( "JOBSTATE_LOG=jobstate.log" ).append( "\n" ). append( "touch $JOBSTATE_LOG" ).append( "\n" ). append( "echo \"INTERNAL *** SHELL_SCRIPT_STARTED ***\" >> $JOBSTATE_LOG" ).append( "\n" ); return sb.toString(); } /** * Determines the path to common shell functions file that the generated * shell script will use. * * @return the path on the submit host. */ protected String getSubmitHostPathToShellRunnerFunctions() { StringBuffer path = new StringBuffer(); //first get the path to the share directory File share = mProps.getSharedDir(); if( share == null ){ throw new RuntimeException( "Property for Pegasus share directory is not set" ); } path.append( share.getAbsolutePath() ).append( File.separator ). append( "sh" ).append( File.separator ).append( Shell.PEGASUS_SHELL_RUNNER_FUNCTIONS_BASENAME ); return path.toString(); } /** * Returns the footer for the generated shell script. * * @return the script footer. */ protected String getScriptFooter(){ StringBuffer sb = new StringBuffer(); sb.append( "echo \"INTERNAL *** SHELL_SCRIPT_FINISHED 0 ***\" >> $JOBSTATE_LOG" ); return sb.toString(); } /** * Returns path to the shell script that is generated * * @param dag the workflow * @return path */ protected String getPathToShellScript(ADag dag) { StringBuffer script = new StringBuffer(); script.append( this.mSubmitFileDir ).append( File.separator ). append( dag.dagInfo.nameOfADag ).append( ".sh" ); return script.toString(); } /** * It initializes the write handle to the output file. * * @param filename the name of the file to which you want the write handle. */ private void initializeWriteHandle(String filename) throws CodeGeneratorException{ try { File f = new File( filename ); mWriteHandle = new PrintWriter(new FileWriter( f )); mLogger.log("Writing to file " + filename , LogManager.DEBUG_MESSAGE_LEVEL); } catch (Exception e) { throw new CodeGeneratorException( "Unable to initialize file handle for shell script ", e ); } } /** * Writes a string to the associated write handle with the class * * @param st the string to be written. */ protected void writeString(String st){ //try{ //write the xml header mWriteHandle.println(st); /*} catch(IOException ex){ System.out.println("Error while writing to xml " + ex.getMessage()); }*/ } /** * Returns the directory in which a job should be executed. * * @param job the job. * * @return the directory */ protected String getExecutionDirectory(Job job) { String execSiteWorkDir = mSiteStore.getInternalWorkDirectory(job); String workdir = (String) job.globusRSL.removeKey("directory"); // returns old value workdir = (workdir == null)?execSiteWorkDir:workdir; return workdir; } /** * Sets the xbit on the file. * * @param file the file for which the xbit is to be set * * @return boolean indicating whether xbit was set or not. */ protected boolean setXBitOnFile( String file ) { boolean result = false; //do some sanity checks on the source and the destination File f = new File( file ); if( !f.exists() || !f.canRead()){ mLogger.log("The file does not exist " + file, LogManager.ERROR_MESSAGE_LEVEL); return result; } try{ //set the callback and run the grep command Runtime r = Runtime.getRuntime(); String command = "chmod +x " + file; mLogger.log("Setting xbit " + command, LogManager.DEBUG_MESSAGE_LEVEL); Process p = r.exec(command); //the default gobbler callback always log to debug level StreamGobblerCallback callback = new DefaultStreamGobblerCallback(LogManager.DEBUG_MESSAGE_LEVEL); //spawn off the gobblers with the already initialized default callback StreamGobbler ips = new StreamGobbler(p.getInputStream(), callback); StreamGobbler eps = new StreamGobbler(p.getErrorStream(), callback); ips.start(); eps.start(); //wait for the threads to finish off ips.join(); eps.join(); //get the status int status = p.waitFor(); if( status != 0){ mLogger.log("Command " + command + " exited with status " + status, LogManager.DEBUG_MESSAGE_LEVEL); return result; } result = true; } catch(IOException ioe){ mLogger.log("IOException while creating symbolic links ", ioe, LogManager.ERROR_MESSAGE_LEVEL); } catch( InterruptedException ie){ //ignore } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/DAXReplicaStore.java0000644000175000017500000001637311757531137030000 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.planner.catalog.replica.ReplicaFactory; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.ReplicaLocation; import edu.isi.pegasus.planner.classes.ReplicaStore; import edu.isi.pegasus.planner.code.CodeGenerator; import edu.isi.pegasus.planner.code.CodeGeneratorException; import edu.isi.pegasus.planner.common.PegasusProperties; import java.io.File; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; import java.util.Properties; /** * A generator that writes out the replica store containing a file based replica * catalog that has the file locations mentioned in the DAX. * * @author Karan Vahi * @version $Revision: 3930 $ */ public class DAXReplicaStore implements CodeGenerator { /** * The name of the source key for Replica Catalog Implementer that serves as * the repository for DAX Replica Store */ public static final String DAX_REPLICA_STORE_CATALOG_KEY = "file"; /** * The name of the Replica Catalog Implementer that serves as the source for * cache files. */ public static final String DAX_REPLICA_STORE_CATALOG_IMPLEMENTER = "SimpleFile"; /** * Suffix to be applied for cache file generation. */ private static final String CACHE_FILE_SUFFIX = ".cache"; /** * Suffix to be applied for the DAX Replica Store. */ private static final String DAX_REPLICA_STORE_SUFFIX = ".replica.store"; /** * The bag of initialization objects. */ protected PegasusBag mBag; /** * The directory where all the submit files are to be generated. */ protected String mSubmitFileDir; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The object containing the command line options specified to the planner * at runtime. */ protected PlannerOptions mPOptions; /** * The handle to the logging object. */ protected LogManager mLogger; /** * Returns the path to the DAX Replica Store File. * * @param options the options for the sub workflow. * @param label the label for the workflow. * @param index the index for the workflow. * * @return the name of the cache file */ public static String getDAXReplicaStoreFile( PlannerOptions options, String label , String index ){ StringBuffer sb = new StringBuffer(); sb.append( options.getSubmitDirectory()). append( File.separator ). append( Abstract.getDAGFilename(options, label, index, DAXReplicaStore.DAX_REPLICA_STORE_SUFFIX ) ); return sb.toString(); } /** * Initializes the Code Generator implementation. * * @param bag * the bag of initialization objects. * * @throws CodeGeneratorException * in case of any error occurring code generation. */ public void initialize(PegasusBag bag) throws CodeGeneratorException { mBag = bag; mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mSubmitFileDir = mPOptions.getSubmitDirectory(); mLogger = bag.getLogger(); } /** * Generates the notifications input file. The method initially generates * work-flow level notification records, followed by job-level notification * records. * * @param dag the concrete work-flow. * * @return the Collection of File objects for the files written * out. * * @throws CodeGeneratorException * in case of any error occurring code generation. */ public Collection generateCode(ADag dag) throws CodeGeneratorException { //sanity check if( dag.getReplicaStore().isEmpty() ){ return new LinkedList(); } ReplicaCatalog rc = null; Properties replicaStoreProps = mProps.getVDSProperties().matchingSubset( ReplicaCatalog.c_prefix, false ); File file = new File ( getDAXReplicaStoreFile( this.mPOptions, dag.dagInfo.getLabel(), dag.dagInfo.index ) ); //set the appropriate property to designate path to file replicaStoreProps.setProperty( DAXReplicaStore.DAX_REPLICA_STORE_CATALOG_KEY, file.getAbsolutePath() ); mLogger.log("Writing out the DAX Replica Store to file " + file.getAbsolutePath(), LogManager.DEBUG_MESSAGE_LEVEL ); try{ rc = ReplicaFactory.loadInstance( DAXReplicaStore.DAX_REPLICA_STORE_CATALOG_IMPLEMENTER, replicaStoreProps); } catch( Exception e ){ throw new RuntimeException( "Unable to initialize the DAX Replica Store File " + file, e ); } //get hold of DAX Replica Store ReplicaStore store = dag.getReplicaStore(); for( Iterator it = store.replicaLocationIterator(); it.hasNext() ;){ ReplicaLocation rl = (ReplicaLocation)it.next(); String lfn = rl.getLFN(); for( Iterator rceIt = rl.pfnIterator(); rceIt.hasNext(); ){ ReplicaCatalogEntry rce = (ReplicaCatalogEntry) rceIt.next(); rc.insert(lfn, rce); } } rc.close(); Collection result = new LinkedList(); result.add( file ); return result; } /** * * Not implemented * * @param dag the work-flow * @param job the job for which the code is to be generated. * * @throws edu.isi.pegasus.planner.code.CodeGeneratorException */ public void generateCode(ADag dag, Job job) throws CodeGeneratorException { throw new CodeGeneratorException( "Replica Store generator only generates code for the whole workflow" ); } /** * Not implemented */ public boolean startMonitoring() { throw new UnsupportedOperationException("Not supported yet."); } /** * Not implemented */ public void reset() throws CodeGeneratorException { throw new UnsupportedOperationException("Not supported yet."); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/NetloggerJobMapper.java0000644000175000017500000001520611757531137030567 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import edu.isi.pegasus.common.logging.LogFormatterFactory; import edu.isi.pegasus.common.logging.LogFormatter; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LoggingKeys; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.Job; import java.io.IOException; import java.io.Writer; import java.util.Iterator; /** * This class can write out the job mappings that link jobs with jobs in the DAX * to a Writer stream in the netlogger format. * * @author Karan Vahi * @version $Revision: 3417 $ */ public class NetloggerJobMapper{ public static final String NETLOGGER_LOG_FORMATTER_IMPLEMENTOR = "Netlogger"; /** * The handle to the netlogger log formatter. */ private LogFormatter mLogFormatter; /** * The handle to pegasus logger used for run. */ private LogManager mLogger; /** * The default constructor. * * @param logger the logger instance to use for logging */ public NetloggerJobMapper( LogManager logger ){ mLogFormatter = LogFormatterFactory.loadInstance( NETLOGGER_LOG_FORMATTER_IMPLEMENTOR ); mLogger = logger; } /** * Writes out the job mappings for a workflow. * * @param writer the writer stream to which to write out the mappings * @param dag the dag for which to write out the mappings * * @throws IOException */ public void writeOutMappings( Writer writer , ADag dag ) throws IOException{ for( Iterator it = dag.jobIterator(); it.hasNext(); ){ Job job = it.next(); int type = job.getJobType(); mLogFormatter.addEvent( "pegasus.job", LoggingKeys.JOB_ID, job.getID() ); mLogFormatter.add( "job.class" , Integer.toString( type ) ); mLogFormatter.add( "job.xform" , job.getCompleteTCName() ); //determine count of jobs int taskCount = getTaskCount( job ); mLogFormatter.add( "task.count", Integer.toString( taskCount ) ); writer.write( mLogFormatter.createLogMessage() ); writer.write( "\n" ); mLogFormatter.popEvent(); // add mapping events only if task count > 0 if( taskCount > 0 ){ if ( job instanceof AggregatedJob ){ AggregatedJob j = (AggregatedJob)job; for( Iterator jit = j.constituentJobsIterator(); jit.hasNext(); ){ Job cJob = jit.next(); mLogFormatter.addEvent( "pegasus.job.map", LoggingKeys.JOB_ID, job.getID() ); writer.write( generateLogEvent( cJob, "task." ) ); writer.write( "\n" ); mLogFormatter.popEvent(); } } else{ mLogFormatter.addEvent( "pegasus.job.map", LoggingKeys.JOB_ID, job.getID() ); writer.write( generateLogEvent( job, "task." ) ); writer.write( "\n" ); mLogFormatter.popEvent(); } } } } /** * Generates a log event message in the netlogger format for a job * * @param job the job * @param prefix prefix if any to add to the keys * * @return netlogger formatted message */ private String generateLogEvent ( Job job, String prefix ) { String result = null; /*String taskID = (( job.getJobType() == Job.COMPUTE_JOB || job.getJobType() == Job.STAGED_COMPUTE_JOB ) && !(job instanceof AggregatedJob) )? job.getLogicalID(): ""; */ mLogFormatter.add( "task.id" , job.getLogicalID() ); mLogFormatter.add( getKey( prefix, "class" ), Integer.toString( job.getJobType() ) ); mLogFormatter.add( getKey( prefix, "xform" ), job.getCompleteTCName() ); result = mLogFormatter.createLogMessage(); return result; } /** * Adds a prefix to the key and returns it. * * @param prefix the prefix to be added * @param key the key * * @return the key with prefix added. */ private String getKey( String prefix, String key ){ if( prefix == null || prefix.length() == 0 ){ return key; } StringBuffer result = new StringBuffer(); result.append( prefix ).append( key); return result.toString(); } /** * Returns the task count for a job. The task count is the number of * jobs associated with the job in the DAX * * @param job * * @return task count */ private int getTaskCount( Job job ) { int count = 0; int type = job.getJobType(); //explicitly exclude cleanup jobs that are instance //of aggregated jobs. This is because while creating //the cleanup job we use the clone method. To be fixed. //Karan April 17 2009 if ( job instanceof AggregatedJob && type != Job.CLEANUP_JOB ){ //a clustered job the number of constituent is count count = ((AggregatedJob)job).numberOfConsitutentJobs(); } else if ( type == Job.COMPUTE_JOB /*|| type == Job.STAGED_COMPUTE_JOB*/ ){ //non clustered job check whether compute or not //and make sure there is dax job associated with it if( job.getLogicalID().length() == 0 ){ //takes care of the untar job that is tagged as compute mLogger.log( "Not creating event pegasus.job.map for job " + job.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); count = 0; } else{ count = 1; } } return count; } }pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Metrics.java0000644000175000017500000001244011757531137026444 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.code.CodeGeneratorException; import java.net.UnknownHostException; import org.globus.gsi.GlobusCredentialException; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.common.PegasusProperties; import org.globus.gsi.GlobusCredential; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.net.InetAddress; import java.util.Collection; import java.util.LinkedList; /** * A Metrics file generator that generates a metrics file in the submit directory * * The following metrics are logged in the metrics file * *
 * 
* * @author Karan Vahi * @version $Revision: 3409 $ */ public class Metrics { /** * The suffix to use while constructing the name of the metrics file */ public static final String METRICS_FILE_SUFFIX = ".metrics"; /** * The bag of initialization objects. */ protected PegasusBag mBag; /** * The directory where all the submit files are to be generated. */ protected String mSubmitFileDir; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The object containing the command line options specified to the planner * at runtime. */ protected PlannerOptions mPOptions; /** * The handle to the logging object. */ protected LogManager mLogger; /** * Initializes the Code Generator implementation. * * @param bag the bag of initialization objects. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void initialize( PegasusBag bag ) throws CodeGeneratorException{ mBag = bag; mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mSubmitFileDir = mPOptions.getSubmitDirectory(); mLogger = bag.getLogger(); } /** * Generates the code for the executable workflow in terms of a braindump * file that contains workflow metadata useful for monitoring daemons etc. * * @param dag the concrete workflow. * * @return the Collection of File objects for the files written * out. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public Collection generateCode(ADag dag) throws CodeGeneratorException { try { Collection result = new LinkedList(); result.add( writeOutMetricsFile( dag) ); return result; } catch (IOException ioe) { throw new CodeGeneratorException( "IOException while writing out the braindump file" , ioe ); } } /** * Method not implemented. Throws an exception. * * @param dag the workflow * @param job the job for which the code is to be generated. * * @throws edu.isi.pegasus.planner.code.CodeGeneratorException */ public void generateCode( ADag dag, Job job ) throws CodeGeneratorException { throw new CodeGeneratorException( "Metrics generator only generates code for the whole workflow" ); } /** * Writes out the workflow metrics file in the submit directory * * @param dag the final executable workflow * * @return the absolute path to the braindump file.txt written in the directory. * * @throws IOException in case of error while writing out file. */ protected File writeOutMetricsFile( ADag dag ) throws IOException{ //create a writer to the braindump.txt in the directory. File f = new File( mSubmitFileDir , Abstract.getDAGFilename( this.mPOptions, dag.dagInfo.nameOfADag, dag.dagInfo.index, Metrics.METRICS_FILE_SUFFIX ) ); PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(f))); writer.println( "{\n" ); writer.println( dag.getWorkflowMetrics() ); writer.write( "}\n" ); writer.close(); return f; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/MonitordNotify.java0000644000175000017500000002111511757531137030021 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.Notifications; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.code.CodeGenerator; import edu.isi.pegasus.planner.code.CodeGeneratorException; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.dax.Invoke; import edu.isi.pegasus.planner.dax.Invoke.WHEN; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; /** * A MonitordNotify Input File Generator that generates the input file required * for pegasus-monitord. * * @author Rajiv Mayani * @version $Revision: 3855 $ */ public class MonitordNotify implements CodeGenerator { /** * The suffix to use while constructing the name of the metrics file */ public static final String NOTIFICATIONS_FILE_SUFFIX = ".notify"; /** * The constant string to write for work flow notifications. */ public static final String WORKFLOW = "WORKFLOW"; /** * The constant string to write for job notifications. */ public static final String JOB = "JOB"; /** * The constant string to write for invocation notifications. */ public static final String INVOCATION = "INVOCATION"; /** * The constant string to write for dag job notifications. */ public static final String DAG_JOB = "DAGJOB"; /** * The constant string to write for dax job notifications. */ public static final String DAX_JOB = "DAXJOB"; /** * The delimiter with which to separate different fields in the * notifications file. */ public static final String DELIMITER = " "; /** * The bag of initialization objects. */ protected PegasusBag mBag; /** * The directory where all the submit files are to be generated. */ protected String mSubmitFileDir; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The object containing the command line options specified to the planner * at runtime. */ protected PlannerOptions mPOptions; /** * The handle to the logging object. */ protected LogManager mLogger; /** * The handle to the PrintWriter that writes out the notifications file */ private PrintWriter mNotificationsWriter; /** * Initializes the Code Generator implementation. * * @param bag * the bag of initialization objects. * * @throws CodeGeneratorException * in case of any error occurring code generation. */ public void initialize(PegasusBag bag) throws CodeGeneratorException { mNotificationsWriter = null; mBag = bag; mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mSubmitFileDir = mPOptions.getSubmitDirectory(); mLogger = bag.getLogger(); } /** * Generates the notifications input file. The method initially generates * work-flow level notification records, followed by job-level notification * records. * * @param dag the concrete work-flow. * * @return the Collection of File objects for the files written * out. * * @throws CodeGeneratorException * in case of any error occurring code generation. */ public Collection generateCode(ADag dag) throws CodeGeneratorException { File f = new File( mSubmitFileDir, Abstract.getDAGFilename( this.mPOptions, dag.dagInfo.nameOfADag, dag.dagInfo.index, MonitordNotify.NOTIFICATIONS_FILE_SUFFIX) ); try { mNotificationsWriter = new PrintWriter( new BufferedWriter( new FileWriter(f, true) ) ); } catch (IOException ioe) { mLogger.log("Unable to intialize writer for notifications file ", ioe, LogManager.ERROR_MESSAGE_LEVEL); throw new CodeGeneratorException( "Unable to intialize writer for notifications file ", ioe); } //lets first generate code for the workflow level //notifications String uuid = dag.getWorkflowUUID(); Notifications notfications = dag.getNotifications(); for (WHEN when : WHEN.values()) { for (Invoke invoke : notfications.getNotifications(when)) { mNotificationsWriter.println(MonitordNotify.WORKFLOW + DELIMITER + uuid + DELIMITER + when.toString() + DELIMITER + invoke.getWhat()); } } //walk through the workflow and generate code for //job notifications if specified for ( Iterator it = dag.jobIterator(); it.hasNext();) { Job job = it.next(); this.generateCode( dag, job ); } mNotificationsWriter.close(); Collection result = new LinkedList(); result.add(f); return result; } /** * * Not implemented * * @param dag the work-flow * @param job the job for which the code is to be generated. * * @throws edu.isi.pegasus.planner.code.CodeGeneratorException */ public void generateCode(ADag dag, Job job) throws CodeGeneratorException { String sType = null; String sJobId = job.getID(); switch (job.getJobType()) { case Job.DAG_JOB: sType = MonitordNotify.DAG_JOB; break; case Job.DAX_JOB: sType = MonitordNotify.DAX_JOB; break; default: sType = MonitordNotify.JOB; break; } //a new line only if there are some notification //to print out. if( !job.getNotifications().isEmpty() ){ mNotificationsWriter.println(); } for ( WHEN when : WHEN.values() ) { for ( Invoke invoke : job.getNotifications(when) ) { mNotificationsWriter.println(sType + DELIMITER + sJobId + DELIMITER + when.toString() + DELIMITER + invoke.getWhat()); } } //for clustered jobs we need to list notifications //per invocation of clustered job. if( job instanceof AggregatedJob ){ AggregatedJob aggJob = ( AggregatedJob )job; int invID = 1; for( Iterator it = aggJob.constituentJobsIterator(); it.hasNext(); invID++ ){ Job j = (Job)it.next(); //a new line only if there are some notification //to print out. if( !j.getNotifications().isEmpty() ){ mNotificationsWriter.println(); } for ( WHEN when : WHEN.values() ) { for ( Invoke invoke : j.getNotifications(when) ) { StringBuffer sb = new StringBuffer(); sb.append( MonitordNotify.INVOCATION ) .append( DELIMITER ). append( job.getID() ).append( DELIMITER ). append( invID ).append( DELIMITER ). append( when.toString() ).append( DELIMITER ). append( invoke.getWhat()); mNotificationsWriter.println( sb.toString() ); } } } } } /** * Not implemented */ public boolean startMonitoring() { throw new UnsupportedOperationException("Not supported yet."); } /** * Not implemented */ public void reset() throws CodeGeneratorException { throw new UnsupportedOperationException("Not supported yet."); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Stork.java0000644000175000017500000001473611757531137026152 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.TransferJob; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.code.CodeGenerator; import edu.isi.pegasus.planner.code.CodeGeneratorException; import edu.isi.pegasus.common.logging.LogManager; import java.io.File; import java.io.PrintWriter; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.Map; import java.util.StringTokenizer; /** * This implementation generates files that can be understood by Stork. * * @author Karan Vahi * @version $Revision: 3832 $ */ public class Stork extends Abstract { /** * The nice start separator, define once, use often. */ public final static String mStartSeparator = "/**********************************************************************"; /** * The nice end separator, define once, use often. */ public final static String mEndSeparator = " **********************************************************************/"; /** * The LogManager object which is used to log all the messages. */ private LogManager mLogger; /** * The name of the credential that is to be used for submitting the stork * job. */ // private String mCredName; /** * The default constructor. */ public Stork(){ super(); } /** * Initializes the Code Generator implementation. * * @param bag the bag of initialization objects. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void initialize( PegasusBag bag ) throws CodeGeneratorException{ super.initialize( bag ); mLogger = bag.getLogger(); // mCredName = mProps.getCredName(); } /** * Generates the code for the concrete workflow in the input format of the * workflow executor being used. The method is not yet implemented. * * @param dag the concrete workflow. * * @return null * * @throws CodeGeneratorException in case of any error occuring code generation. */ public Collection generateCode( ADag dag ) throws CodeGeneratorException{ throw new CodeGeneratorException( new UnsupportedOperationException( "Stork Code Generator: Method generateCode( ADag) not implemeneted")); } /** * Generates the code for a single job in the Stork format. * * @param dag the dag of which the job is a part of. * @param job the Job object holding the information about * that particular job. * * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void generateCode( ADag dag, Job job ) throws CodeGeneratorException{ String dagname = dag.dagInfo.nameOfADag; String dagindex = dag.dagInfo.index; String dagcount = dag.dagInfo.count; StringTokenizer st = new StringTokenizer(job.strargs,"\n"); String srcUrl = (st.hasMoreTokens())?st.nextToken():null; String dstUrl = (st.hasMoreTokens())?st.nextToken():null; //sanity check // Credential name is no longer required. Karan Feb 04, 2008 // if(mCredName == null){ // mLogger.log("Credential name needs to be specified for " + // " stork job. Set pegasus.transfer.stork.cred property", // LogManager.ERROR_MESSAGE_LEVEL); // throw new CodeGeneratorException( // "Credential name needs to be specified for " + // " stork job. Set pegasus.transfer.stork.cred property"); // // } //check for type of job. Stork only understands Transfer Jobs if (!(job instanceof TransferJob )){ throw new CodeGeneratorException( "Stork Code Generator can only generate code for transfer jobs" ); } PrintWriter writer = null; try{ writer = this.getWriter( job ); } catch( IOException ioe ){ throw new RuntimeException( "Unable to get Writer to write the Stork Submit file", ioe ); } writer.println(this.mStartSeparator); writer.println(" * PEGASUS WMS STORK FILE GENERATOR"); writer.println(" * DAG : " + dagname + ", Index = " + dagindex + ", Count = " + dagcount); writer.println(" * STORK FILE NAME : " + this.getFileBaseName(job)); writer.println(this.mEndSeparator); writer.println("["); writer.println("\tdap_type = \"" + "transfer" + "\";"); writer.println("\tsrc_url = \"" + srcUrl + "\";"); writer.println("\tdest_url = \"" + dstUrl + "\";"); writer.println("\tx509proxy = \"" + "default" + "\";"); writer.println("\tlog = \"" + this.getFileBaseName(job) + ".log" + "\";"); // DONE writer.println("]"); writer.println(this.mStartSeparator); writer.println(" * END OF STORK FILE"); writer.println(this.mEndSeparator); //flush the contents writer.close(); } /** * Returns the basename of the file to which the job is written to. * * @param job the job whose job information needs to be written. * * @return the basename of the file. */ public String getFileBaseName(Job job){ StringBuffer sb = new StringBuffer(); sb.append(job.jobName).append(".stork"); return sb.toString(); } /** * Returns an empty map * * @param workflow the workflow. * * @return map containing extra entries */ public Map getAdditionalBraindumpEntries(ADag workflow) { return new HashMap(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Stampede.java0000644000175000017500000004626111757531137026610 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import edu.isi.pegasus.common.logging.LogFormatter; import edu.isi.pegasus.common.logging.LogFormatterFactory; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.code.CodeGeneratorException; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.AggregatedJob; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PCRelation; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.code.CodeGenerator; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.planner.namespace.Dagman; import edu.isi.pegasus.planner.refiner.DeployWorkerPackage; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.util.Collection; import java.util.Iterator; import java.util.LinkedList; /** * A Stampede Events Code Generator that generates events in netlogger format * for the exectuable workflow. This generators generates the events about * *
 *   the tasks int he abstract workflow
 *   the edges in the abstract workflow
 *   jobs in the executable workflow
 *   the edges in the executable workflow
 *   relationship about how the tasks in the abstract workflow map to jobs in the
 *   executable workflow.
 * 
* * @author Karan Vahi * @version $Revision: 4369 $ */ public class Stampede implements CodeGenerator { /** * The suffix to use while constructing the name of the metrics file */ public static final String NETLOGGER_BP_FILE_SUFFIX = ".static.bp"; public static final String NETLOGGER_LOG_FORMATTER_IMPLEMENTOR = "Netlogger"; /** * The attribute key for workflow id. */ public static final String WORKFLOW_ID_KEY = "xwf.id"; /** * The event name for task info */ public static final String TASK_EVENT_NAME = "task.info"; /** * The attribute key for task id */ public static final String TASK_ID_KEY = "task.id"; /** * The attribute key for task type */ public static final String TYPE_KEY = "type"; /** * The attribute key for type description */ public static final String TYPE_DESCRIPTION_KEY = "type_desc"; /** * The attribute key for transformation */ public static final String TASK_TRANSFORMATION_KEY = "transformation"; /** * The attribute key for task arguments. */ public static final String ARGUMENTS_KEY = "argv"; /** * The event name for task edge */ public static final String TASK_EDGE_EVENT_NAME = "task.edge"; /** * The atrribute key for parent task id. */ public static final String PARENT_TASK_ID_KEY = "parent.task.id"; /** * The atrribute key for child task id. */ public static final String CHILD_TASK_ID_KEY = "child.task.id"; /** * The event name for a job */ public static final String JOB_EVENT_NAME = "job.info"; /** * The attribute key for job id */ public static final String JOB_ID_KEY = "job.id"; /** * Teh attribute key for the submit file */ public static final String JOB_SUBMIT_FILE_KEY = "submit_file"; /** * The attribute key for whether a job is clustered or not */ public static final String JOB_CLUSTERED_KEY = "clustered"; /** * The attribute key for how many times a job is retried */ public static final String JOB_MAX_RETRIES_KEY = "max_retries"; /** * The attribute key for the number of tasks in the job */ public static final String JOB_TASK_COUNT_KEY = "task_count"; /** * The attribute key for the executable */ public static final String JOB_EXECUTABLE_KEY = "executable"; /** * The event name for job edge */ public static final String JOB_EDGE_EVENT_NAME = "job.edge"; /** * The atrribute key for parent job id. */ public static final String PARENT_JOB_ID_KEY = "parent.job.id"; /** * The atrribute key for child job id. */ public static final String CHILD_JOB_ID_KEY = "child.job.id"; /** * The event name for task map event */ public static final String TASK_MAP_EVENT_NAME = "wf.map.task_job"; /** * The handle to the netlogger log formatter. */ private LogFormatter mLogFormatter; /** * The bag of initialization objects. */ protected PegasusBag mBag; /** * The directory where all the submit files are to be generated. */ protected String mSubmitFileDir; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The object containing the command line options specified to the planner * at runtime. */ protected PlannerOptions mPOptions; /** * The handle to the logging object. */ protected LogManager mLogger; /** * Initializes the Code Generator implementation. * * @param bag the bag of initialization objects. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void initialize( PegasusBag bag ) throws CodeGeneratorException{ mBag = bag; mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mSubmitFileDir = mPOptions.getSubmitDirectory(); mLogger = bag.getLogger(); mLogFormatter = LogFormatterFactory.loadInstance( NETLOGGER_LOG_FORMATTER_IMPLEMENTOR ); } /** * Generates the code for the executable workflow in terms of a braindump * file that contains workflow metadata useful for monitoring daemons etc. * * @param dag the concrete workflow. * * @return the Collection of File objects for the files written * out. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public Collection generateCode(ADag dag) throws CodeGeneratorException { PrintWriter writer = null; File f = new File( mSubmitFileDir , Abstract.getDAGFilename( this.mPOptions, dag.dagInfo.nameOfADag, dag.dagInfo.index, Stampede.NETLOGGER_BP_FILE_SUFFIX ) ); boolean generateCodeForExecutableWorkflow = dag.hasWorkflowRefinementStarted(); String uuid = dag.getWorkflowUUID(); try { writer = new PrintWriter(new BufferedWriter(new FileWriter(f, true) )); } catch ( IOException ioe ) { throw new CodeGeneratorException( "Unable to intialize writer to netlogger file " , ioe ); } if( generateCodeForExecutableWorkflow ){ //events generation for executable workflow for( Iterator it = dag.jobIterator(); it.hasNext(); ){ Job job = it.next(); generateEventsForExecutableJob( writer, dag, job ); } //monte wants the task map events generated separately //en mass. Lets iterate again for( Iterator it = dag.jobIterator(); it.hasNext(); ){ Job job = it.next(); generateTaskMapEvents( writer, dag, job ); } //write out the edge informatiom for the workflow for ( Iterator it = dag.dagInfo.relations.iterator(); it.hasNext(); ){ PCRelation relation = it.next(); mLogFormatter.addEvent( Stampede.JOB_EDGE_EVENT_NAME, Stampede.WORKFLOW_ID_KEY, uuid ); mLogFormatter.add( Stampede.PARENT_JOB_ID_KEY, relation.getParent() ); mLogFormatter.add( Stampede.CHILD_JOB_ID_KEY, relation.getChild() ); writer.println( mLogFormatter.createLogMessage() ); mLogFormatter.popEvent(); } } else{ //events generation for abstract workflow for( Iterator it = dag.jobIterator(); it.hasNext(); ){ Job job = it.next(); generateEventsForDAXTask( writer, dag, job ); } //write out the edge informatiom for the workflow for ( Iterator it = dag.dagInfo.relations.iterator(); it.hasNext(); ){ PCRelation relation = it.next(); mLogFormatter.addEvent( Stampede.TASK_EDGE_EVENT_NAME, Stampede.WORKFLOW_ID_KEY, uuid ); mLogFormatter.add( Stampede.PARENT_TASK_ID_KEY, relation.getAbstractParentID() ); mLogFormatter.add( Stampede.CHILD_TASK_ID_KEY, relation.getAbstractChildID() ); writer.println( mLogFormatter.createLogMessage() ); mLogFormatter.popEvent(); } } writer.close(); Collection result = new LinkedList(); result.add(f); return result; } /** * Generates stampede events corresponding to jobs/tasks in the DAX * * @param writer the writer stream to write the events too * @param workflow the workflow. * @param job the job for which to generate the events. */ protected void generateEventsForDAXTask(PrintWriter writer, ADag workflow, Job job) throws CodeGeneratorException { String wfuuid = workflow.getWorkflowUUID(); //sanity check if ( !( job.getJobType() == Job.COMPUTE_JOB || job.getJobType() == Job.DAG_JOB || job.getJobType() == Job.DAX_JOB ) ){ //jobs/tasks in the dax can only be of the above types throw new CodeGeneratorException( "Invalid Job Type for a DAX Task while generating Stampede Events of type " + job.getJobTypeDescription() + " for workflow " + workflow.getAbstractWorkflowName() ); } mLogFormatter.addEvent( Stampede.TASK_EVENT_NAME, Stampede.WORKFLOW_ID_KEY , wfuuid ); mLogFormatter.add( Stampede.TASK_ID_KEY, job.getLogicalID() ); mLogFormatter.add( Stampede.TYPE_KEY, Integer.toString( job.getJobType() )); mLogFormatter.add( Stampede.TYPE_DESCRIPTION_KEY, job.getJobTypeDescription() ); mLogFormatter.add( Stampede.TASK_TRANSFORMATION_KEY, job.getCompleteTCName() ); //only add arguments attribute if arguments are not //null and length > 0 . Job constructor initializes arguments to "" if( job.getArguments() != null && job.getArguments().length() > 0 ){ mLogFormatter.add( Stampede.ARGUMENTS_KEY, job.getArguments() ); } writer.println( mLogFormatter.createLogMessage() ); mLogFormatter.popEvent(); } /** * Generates stampede events corresponding to an executable job * * @param writer the writer stream to write the events too * @param workflow the workflow. * @param job the job for which to generate the events. */ protected void generateEventsForExecutableJob(PrintWriter writer, ADag dag, Job job) throws CodeGeneratorException{ String wfuuid = dag.getWorkflowUUID(); mLogFormatter.addEvent( Stampede.JOB_EVENT_NAME, Stampede.WORKFLOW_ID_KEY , wfuuid ); mLogFormatter.add( Stampede.JOB_ID_KEY, job.getID() ); mLogFormatter.add( Stampede.JOB_SUBMIT_FILE_KEY, job.getID() + ".sub" ); mLogFormatter.add( Stampede.TYPE_KEY, Integer.toString( job.getJobType() )); mLogFormatter.add( Stampede.TYPE_DESCRIPTION_KEY, job.getJobTypeDescription() ); mLogFormatter.add( Stampede.JOB_CLUSTERED_KEY, booleanToInt( job instanceof AggregatedJob ) ); mLogFormatter.add( Stampede.JOB_MAX_RETRIES_KEY, job.dagmanVariables.containsKey( Dagman.RETRY_KEY ) ? (String)job.dagmanVariables.get( Dagman.RETRY_KEY ): "0" ); mLogFormatter.add( Stampede.JOB_EXECUTABLE_KEY , job.getRemoteExecutable() ); //only add arguments attribute if arguments are not //null and length > 0 . Job constructor initializes arguments to "" if( job.getArguments() != null && job.getArguments().length() > 0 ){ mLogFormatter.add( Stampede.ARGUMENTS_KEY , job.getArguments() ); } //determine count of jobs int taskCount = getTaskCount( job ); mLogFormatter.add( Stampede.JOB_TASK_COUNT_KEY, Integer.toString( taskCount ) ); writer.println( mLogFormatter.createLogMessage() ); mLogFormatter.popEvent(); } /** * Generates the task.map events that link the jobs in the DAX with the * jobs in the executable workflow * * * @param writer the writer stream to write the events too * @param workflow the workflow. * @param job the job for which to generate the events. */ protected void generateTaskMapEvents(PrintWriter writer, ADag dag, Job job) { String wfuuid = dag.getWorkflowUUID(); //add task map events //only compute jobs/ dax and dag jobs have task events associated if( job.getJobType() == Job.COMPUTE_JOB || job.getJobType() == Job.DAG_JOB || job.getJobType() == Job.DAX_JOB ){ // untar jobs created as part of worker package staging //are of type compute but we don't want if( job.getLogicalID() == null || job.getLogicalID().isEmpty() ){ //dont warn if a job is compute and transformation name is untar if( job.getJobType() == Job.COMPUTE_JOB && job.getCompleteTCName().equals( DeployWorkerPackage.COMPLETE_UNTAR_TRANSFORMATION_NAME ) ){ //dont do anything return; } else{ //warn and return mLogger.log( "No corresponding DAX task for compute job " + job.getName() , LogManager.WARNING_MESSAGE_LEVEL ); return; } } if( job instanceof AggregatedJob ){ AggregatedJob j = (AggregatedJob)job; //go through the job constituents and task.map events for( Iterator cit = j.constituentJobsIterator(); cit.hasNext(); ){ Job constituentJob = cit.next(); if( constituentJob.getJobType() == Job.COMPUTE_JOB ){ //create task.map event //to the job in the DAX mLogFormatter.addEvent( Stampede.TASK_MAP_EVENT_NAME, Stampede.WORKFLOW_ID_KEY , wfuuid ); //to be retrieved mLogFormatter.add( Stampede.JOB_ID_KEY, job.getID() ); //mLogFormatter.add( "exec_job.id", job.getID() ); mLogFormatter.add( Stampede.TASK_ID_KEY, constituentJob.getLogicalID() ); writer.println( mLogFormatter.createLogMessage() ); //writer.write( "\n" ); mLogFormatter.popEvent(); } else{ //for time being lets warn mLogger.log( "Constituent Job " + constituentJob.getName() + " not of type compute for clustered job " + j.getName(), LogManager.WARNING_MESSAGE_LEVEL ); } } } else{ //create a single task.map event that maps compute job //to the job in the DAX mLogFormatter.addEvent( Stampede.TASK_MAP_EVENT_NAME, Stampede.WORKFLOW_ID_KEY , wfuuid ); //to be retrieved mLogFormatter.add( Stampede.JOB_ID_KEY, job.getID() ); mLogFormatter.add( Stampede.TASK_ID_KEY, job.getLogicalID() ); writer.println( mLogFormatter.createLogMessage() ); mLogFormatter.popEvent(); } } } /** * Method not implemented. Throws an exception. * * @param dag the workflow * @param job the job for which the code is to be generated. * * @throws edu.isi.pegasus.planner.code.CodeGeneratorException */ public void generateCode( ADag dag, Job job ) throws CodeGeneratorException { throw new CodeGeneratorException( "Stampede generator only generates code for the whole workflow" ); } /** * Returns the task count for a job. The task count is the number of tasks/jobs * in the DAX that map to this job. jobs inserted by Pegasus, which do not * have a mapped task from the DAX, will have its task_count set to 0. * * @param job the executable job. * * @return task count */ private int getTaskCount( Job job ) { int count = 0; int type = job.getJobType(); if ( job instanceof AggregatedJob && type == Job.COMPUTE_JOB ){ //a clustered job the number of constituent is count count = ((AggregatedJob)job).numberOfConsitutentJobs(); } else if ( type == Job.COMPUTE_JOB ){ //non clustered job check whether compute or not //and make sure there is dax job associated with it if( job.getLogicalID().length() == 0 ){ //takes care of the untar job that is tagged as compute mLogger.log( "Not creating event pegasus.task.count for job " + job.getID(), LogManager.DEBUG_MESSAGE_LEVEL ); count = 0; } else{ count = 1; } } return count; } /** * Returns boolean as an integer * * @param value the boolean value * * @return 0 for false and 1 for true */ public String booleanToInt( boolean value ){ return value ? "1" : "0"; } public boolean startMonitoring() { throw new UnsupportedOperationException("Not supported yet."); } public void reset() throws CodeGeneratorException { throw new UnsupportedOperationException("Not supported yet."); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/generator/Braindump.java0000644000175000017500000003334311757531137026764 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code.generator; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.planner.code.CodeGeneratorException; import java.net.UnknownHostException; import org.globus.gsi.GlobusCredentialException; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.DagInfo; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.common.PegasusProperties; import org.globus.gsi.GlobusCredential; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.io.PrintWriter; import java.net.InetAddress; import java.util.Collection; import java.util.LinkedHashMap; import java.util.LinkedList; import java.util.Map; /** * Braindump file code generator that generates a Braindump file for the * executable workflow in the submit directory. * * The following keys are generated in the braindump file. * *
 * wf_uuid
 * submit_hostname
 * planner_arguments
 * user
 * grid_dn
 * dax_label
 * timestamp
 * submit_dir
 * planner_version
 * type
 * properties
 * 
* * Additionally, the following duplicate keys exist till pegasus-run is modified. * *
 * old keyname -> new keyname
 * =============================
 * label --> dax_label
 * pegasus_wf_time --> timestamp
 * run --> submit_dir
 * pegasus_version --> planner_version
 * 
* * @author Karan Vahi * @version $Revision: 4507 $ */ public class Braindump { /** * The basename of the braindump file. */ public static final String BRAINDUMP_FILE = "braindump.txt"; /** * The Key designating type of Pegasus Code Generator. */ public static final String GENERATOR_TYPE_KEY = "type"; /** * The user who submitted the workflow. */ public static final String USER_KEY = "user"; /** * The Grid DN of the user. */ public static final String GRID_DN_KEY = "grid_dn"; /** * The path to the pegasus properties file */ public static final String PROPERTIES_KEY = "properties"; /** * The key for the submit hostname. */ public static final String SUBMIT_HOSTNAME_KEY = "submit_hostname"; /** * The arguments passed to the planner. */ public static final String PLANNER_ARGUMENTS_KEY = "planner_arguments"; /** * The key for UUID of the workflow. */ public static final String ROOT_UUID_KEY = "root_wf_uuid" ; /** * The key for UUID of the workflow. */ public static final String UUID_KEY = "wf_uuid" ; /** * The DAX label. */ public static final String DAX_LABEL_KEY = "dax_label"; /** * The dax index */ public static final String DAX_INDEX_KEY = "dax_index"; /** * The DAX version. */ public static final String DAX_VERRSION_KEY = "dax_version"; /** * The workflow timestamp. */ public static final String TIMESTAMP_KEY = "timestamp"; /** * The submit directory for the workflow. */ public static final String SUBMIT_DIR_KEY = "submit_dir"; /** * The Key for the version id. * * @see org.griphyn.cPlanner.classes.DagInfo#releaseVersion */ public static final String VERSION_KEY = "pegasus_version"; /** * The Key for the planner version */ public static final String PLANNER_VERSION_KEY = "planner_version"; /** * The Key for the pegasus build. */ public static final String BUILD_KEY = "pegasus_build"; /** * The Key for the flow id. * * @see org.griphyn.cPlanner.classes.DagInfo#flowIDName */ public static final String WF_NAME_KEY = "pegasus_wf_name"; /** * The Key for the timestamp. * * @see org.griphyn.cPlanner.classes.DagInfo#mFlowTimestamp */ public static final String WF_TIME_KEY = "pegasus_wf_time"; /** * The Key for the timestamp. * * @see org.griphyn.cPlanner.classes.DagInfo#mFlowTimestamp */ public static final String WF_TIMESTAMP_KEY = "timestamp"; /** * The bag of initialization objects. */ protected PegasusBag mBag; /** * The directory where all the submit files are to be generated. */ protected String mSubmitFileDir; /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The object containing the command line options specified to the planner * at runtime. */ protected PlannerOptions mPOptions; /** * The handle to the logging object. */ protected LogManager mLogger; /** * Initializes the Code Generator implementation. * * @param bag the bag of initialization objects. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public void initialize( PegasusBag bag ) throws CodeGeneratorException{ mBag = bag; mProps = bag.getPegasusProperties(); mPOptions = bag.getPlannerOptions(); mSubmitFileDir = mPOptions.getSubmitDirectory(); mLogger = bag.getLogger(); } /** * Returns default braindump entries. * * @return default entries */ public Map defaultBrainDumpEntries( ADag workflow ) throws CodeGeneratorException { DagInfo dinfo = workflow.dagInfo; //to preserve order while writing out Map entries = new LinkedHashMap(); File directory = new File( mSubmitFileDir ); String absPath = directory.getAbsolutePath(); //user String user = mProps.getProperty( "user.name" ) ; if ( user == null ){ user = "unknown"; } entries.put( Braindump.USER_KEY, user ); //grid dn entries.put( Braindump.GRID_DN_KEY, getGridDN( ) ); //submit hostname entries.put( Braindump.SUBMIT_HOSTNAME_KEY, getSubmitHostname() ); entries.put( Braindump.ROOT_UUID_KEY, workflow.getRootWorkflowUUID() ); entries.put( Braindump.UUID_KEY, workflow.getWorkflowUUID() ); //dax and dax label entries.put( "dax", mPOptions.getDAX() ); entries.put( Braindump.DAX_LABEL_KEY, workflow.getLabel() ); entries.put( Braindump.DAX_INDEX_KEY, workflow.dagInfo.index ); entries.put( Braindump.DAX_VERRSION_KEY, workflow.getDAXVersion() ); //the workflow name if (dinfo.flowIDName != null) { entries.put( Braindump.WF_NAME_KEY, dinfo.flowIDName ); } //the workflow timestamp if (dinfo.getMTime() != null) { entries.put( Braindump.WF_TIMESTAMP_KEY, dinfo.getFlowTimestamp() ); } //basedir and submit directory entries.put( "basedir", mPOptions.getBaseSubmitDirectory() ); //append( "dag " ).append(dagFile).append("\n"). entries.put( Braindump.SUBMIT_DIR_KEY, absPath ); //the properties file entries.put( Braindump.PROPERTIES_KEY, new File( mProps.getPropertiesInSubmitDirectory() ).getName() ); //information about the planner StringBuffer planner = new StringBuffer(); planner.append( mProps.getBinDir() ).append( File.separator ).append( "pegasus-plan" ); entries.put( "planner", planner.toString() ); //planner version and build entries.put( Braindump.PLANNER_VERSION_KEY, Version.instance().toString() ); entries.put( Braindump.BUILD_KEY, Version.instance().determineBuilt() ); //planner arguments StringBuffer arguments = new StringBuffer( ); arguments.append( "\"" ).append( mPOptions.getOriginalArgString() ).append( "\"" ); entries.put( Braindump.PLANNER_ARGUMENTS_KEY, arguments.toString() ); //required by tailstatd entries.put( "jsd" , "jobstate.log"); entries.put( "rundir" , directory.getName()); entries.put( "bindir" , mProps.getBinDir().getAbsolutePath()); entries.put( "vogroup" , mPOptions.getVOGroup() ); return entries; } /** * Generates the code for the executable workflow in terms of a braindump * file that contains workflow metadata useful for monitoring daemons etc. * * @param dag the concrete workflow. * * @return the Collection of File objects for the files written * out. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public Collection generateCode(ADag dag) throws CodeGeneratorException { try { Collection result = new LinkedList(); result.add(writeOutBraindumpFile(this.defaultBrainDumpEntries(dag))); return result; } catch (IOException ioe) { throw new CodeGeneratorException( "IOException while writing out the braindump file" , ioe ); } } /** * Generates the code for the executable workflow in terms of a braindump * file that contains workflow metadata useful for monitoring daemons etc. * * @param dag the concrete workflow. * @param additionalEntries additional entries to go in the braindump file, * overwriting the default entries. * * @return the Collection of File objects for the files written * out. * * @throws CodeGeneratorException in case of any error occuring code generation. */ public Collection generateCode( ADag dag, Map additionalEntries ) throws CodeGeneratorException { try { Collection result = new LinkedList(); Map entries = this.defaultBrainDumpEntries(dag); entries.putAll(additionalEntries); result.add(writeOutBraindumpFile(entries)); return result; } catch (IOException ioe) { throw new CodeGeneratorException( "IOException while writing out the braindump file" , ioe ); } } /** * Method not implemented. Throws an exception. * * @param dag the workflow * @param job the job for which the code is to be generated. * * @throws edu.isi.pegasus.planner.code.CodeGeneratorException */ public void generateCode( ADag dag, Job job ) throws CodeGeneratorException { throw new CodeGeneratorException( "Braindump generator only generates code for the whole workflow" ); } /** * Writes out the braindump.txt file for a workflow in the submit * directory. The braindump.txt file is used for passing to the tailstatd * daemon that monitors the state of execution of the workflow. * * @param entries the Map containing the entries going into the braindump file. * * @return the absolute path to the braindump file.txt written in the directory. * * @throws IOException in case of error while writing out file. */ protected File writeOutBraindumpFile( Map entries ) throws IOException{ //create a writer to the braindump.txt in the directory. File f = new File( mSubmitFileDir , BRAINDUMP_FILE ); PrintWriter writer = new PrintWriter(new BufferedWriter(new FileWriter(f))); //go through all the keys and write out to the file for( Map.Entry entry: entries.entrySet() ){ StringBuffer sb = new StringBuffer(); sb.append( entry.getKey() ).append( " " ).append( entry.getValue() ); writer.println( sb.toString() ); } writer.close(); return f; } /** * Returns the submit hostname * * @return hostname * * @throws edu.isi.pegasus.planner.code.CodeGeneratorException */ protected String getSubmitHostname( ) throws CodeGeneratorException{ try { InetAddress localMachine = java.net.InetAddress.getLocalHost(); return localMachine.getHostName(); } catch ( UnknownHostException ex) { throw new CodeGeneratorException( "Unable to determine hostname", ex ); } } /** * Returns the distinguished name from the proxy * * * @return the DN else null if proxy file not found. */ protected String getGridDN( ){ String dn = null; try { GlobusCredential credential = GlobusCredential.getDefaultCredential(); //new GlobusCredential(proxyFile); dn = credential.getIdentity(); } catch (GlobusCredentialException ex) { mLogger.log( "Unable to determine GRID DN", ex, LogManager.DEBUG_MESSAGE_LEVEL ); } return dn; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/CodeGeneratorFactory.java0000644000175000017500000001306411757531137027124 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code; import edu.isi.pegasus.planner.classes.PlannerOptions; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.code.CodeGenerator; import edu.isi.pegasus.planner.common.PegasusProperties; import edu.isi.pegasus.common.util.DynamicLoader; /** * A factory class to load the appropriate type of Code Generator. The * CodeGenerator implementation is used to write out the concrete plan. * * @author Karan Vahi * @version $Revision: 3481 $ */ public class CodeGeneratorFactory { /** * The default package where the all the implementing classes are supposed to * reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.planner.code.generator"; /** * The name of the class implementing the condor code generator. */ public static final String CONDOR_CODE_GENERATOR_CLASS = "edu.isi.pegasus.planner.code.generator.condor.CondorGenerator"; /** * The name of the class implementing the Stampede Event Generator */ public static final String STAMPEDE_EVENT_GENERATOR_CLASS = "edu.isi.pegasus.planner.code.generator.Stampede"; /** * This method loads the appropriate implementing code generator as specified * by the user at runtime. If the megadag mode is specified in the options, * then that is used to load the implementing class, overriding the submit * mode specified in the properties file. * * * @param bag the bag of initialization objects. * * @return the instance of the class implementing this interface. * * @exception CodeGeneratorFactoryException that nests any error that * might occur during the instantiation of the implementation. * * @see #DEFAULT_PACKAGE_NAME */ public static CodeGenerator loadInstance( PegasusBag bag ) throws CodeGeneratorFactoryException{ PegasusProperties properties = bag.getPegasusProperties(); PlannerOptions options = bag.getPlannerOptions(); //sanity check if(properties == null){ throw new RuntimeException("Invalid properties passed"); } if(options == null){ throw new RuntimeException("Invalid Options specified"); } //resolve the basename of the class on the basis of the megadag mode String mode = (options == null) ? null: options.getMegaDAGMode(); String className = null; if(mode != null){ //try to see if a special writer needs to be loaded className = (mode.equalsIgnoreCase("daglite"))? "DAGLite": null; //we pick from the properties. } if(className == null){ //pick up the basename/classname from the properties. String submitMode = properties.getSubmitMode(); className = ( submitMode.equals( "condor" ) ) ? CONDOR_CODE_GENERATOR_CLASS : submitMode; } return loadInstance( bag, className ); } /** * This method loads the appropriate code generator as specified by the * user at runtime. * * * @param bag the bag of initialization objects. * @param className the name of the implementing class. * * @return the instance of the class implementing this interface. * * @exception CodeGeneratorFactoryException that nests any error that * might occur during the instantiation of the implementation. * * @see #DEFAULT_PACKAGE_NAME */ public static CodeGenerator loadInstance( PegasusBag bag, String className) throws CodeGeneratorFactoryException{ PegasusProperties properties = bag.getPegasusProperties(); PlannerOptions options = bag.getPlannerOptions(); //sanity check if (properties == null) { throw new RuntimeException( "Invalid properties passed" ); } if (className == null) { throw new RuntimeException( "Invalid className specified" ); } //prepend the package name if classname is actually just a basename className = (className.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className : //load directly className; //try loading the class dynamically CodeGenerator cGen = null; try { DynamicLoader dl = new DynamicLoader( className ); cGen = (CodeGenerator) dl.instantiate( new Object[0] ); //initialize the loaded code generator cGen.initialize( bag ); } catch (Exception e) { throw new CodeGeneratorFactoryException( "Instantiating Code Generator ", className, e); } return cGen; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/GridStart.java0000644000175000017500000001711211757531137024754 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code; import edu.isi.pegasus.planner.classes.ADag; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.classes.Job; import edu.isi.pegasus.planner.classes.AggregatedJob; import java.util.Collection; import java.io.File; /** * The interface that defines how a job specified in the abstract workflow * is launched on the grid. This allows to specify different ways to wrap * an executable while running on the grid. One may do this, to gather * additional information about the job like provenance information. * * If the implementation returns true for canSetXBit, then it should be setting * the X bit for the staged compute jobs. * * @author Karan Vahi vahi@isi.edu * @version $Revision: 4602 $ */ public interface GridStart { /** * The version number associated with this API of GridStart. */ public static final String VERSION = "1.5"; /** * The File separator to be used on the submit host. */ public static char mSeparator = File.separatorChar; /** * Initializes the GridStart implementation. * * @param bag the bag of objects that is used for initialization. * @param dag the concrete dag so far. */ public void initialize( PegasusBag bag, ADag dag ); /** * Enables a job to run on the grid. This also determines how the * stdin,stderr and stdout of the job are to be propogated. * To grid enable a job, the job may need to be wrapped into another * job, that actually launches the job. It usually results in the job * description passed being modified modified. * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param isGlobusJob is true, if the job generated a * line universe = globus, and thus runs remotely. * Set to false, if the job runs on the submit * host in any way. * * @return boolean true if enabling was successful,else false. */ public boolean enable( AggregatedJob job,boolean isGlobusJob); /** * Enables a job to run on the grid. This also determines how the * stdin,stderr and stdout of the job are to be propogated. * To grid enable a job, the job may need to be wrapped into another * job, that actually launches the job. It usually results in the job * description passed being modified modified. * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param isGlobusJob is true, if the job generated a * line universe = globus, and thus runs remotely. * Set to false, if the job runs on the submit * host in any way. * * @return boolean true if enabling was successful,else false. */ public boolean enable(Job job,boolean isGlobusJob); /** * Setter method to control whether a full path to Gridstart should be * returned while wrapping a job or not. * * @param fullPath if set to true, indicates that full path would be used. */ public void useFullPathToGridStarts( boolean fullPath ); /** * Constructs the postscript that has to be invoked on the submit host * after the job has executed on the remote end. The postscript usually * works on the output generated by the executable that is used to grid * enable a job, and has been piped back by Condor. *

* The postscript should be constructed and populated as a profile * in the DAGMAN namespace. * * * @param job the Job object containing the job description * of the job that has to be enabled on the grid. * @param key the key for the profile that has to be inserted. * * @return boolean true if postscript was generated,else false. */ // public boolean constructPostScript( Job job, String key ) ; /** * Indicates whether the enabling mechanism can set the X bit * on the executable on the remote grid site, in addition to launching * it on the remote grid stie * * @return boolean indicating whether can set the X bit or not. */ public boolean canSetXBit(); /** * Returns the directory in which the job executes on the worker node. * * @param job * * @return the full path to the directory where the job executes */ public String getWorkerNodeDirectory( Job job ); /** * Returns the value of the vds profile with key as VDS.GRIDSTART_KEY, * that would result in the loading of this particular implementation. * It is usually the name of the implementing class without the * package name. * * @return the value of the profile key. * @see org.griphyn.cPlanner.namespace.VDS#GRIDSTART_KEY */ public String getVDSKeyValue(); /** * Returns a short textual description of the implementing class. * Should usually be the name of the implementing class. * * @return short textual description. */ public String shortDescribe(); /** * Returns the SHORT_NAME for the POSTScript implementation that is used * to be as default with this GridStart implementation. * * @return the id for the POSTScript. * * @see POSTScript#shortDescribe() */ public String defaultPOSTScript(); /** * Returns the full path to the submit directory, for the job. * * @param root the base of the submit directory hierarchy for the workflow. * @param job the job for which the submit directory is to be determined. * * @return the path to the submit directory. */ // public static String getSubmitDirectory( String root, Job job ){ // String jobDir = job.getSubmitDirectory(); // StringBuffer sb = new StringBuffer(); // // //some sanity checks // if( jobDir == null && root == null){ // throw new NullPointerException( // "Both the root directory, and job directory are null"); // } // // // //determine the submit directory for the job // if(jobDir == null){ // sb.append(root); // } // else if(jobDir.indexOf(mSeparator) == 0){ // //absolute path use that // sb.append(jobDir); // } // else{ // //handle the . if given // sb.append(root).append(mSeparator); // sb.append((jobDir.indexOf('.') == 0)? // //handle separator if given // (jobDir.indexOf(mSeparator) == 1)? // jobDir.substring(2):jobDir.substring(1) // //just append whatever is given // :jobDir); // } // // return sb.toString(); // } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/planner/code/CodeGeneratorFactoryException.java0000644000175000017500000000654411757531137031010 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.planner.code; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Code Generator implementations. * * @author Karan Vahi * @version $Revision: 2090 $ */ public class CodeGeneratorFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Code Generator"; /** * Constructs a CodeGeneratorFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public CodeGeneratorFactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a CodeGeneratorFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public CodeGeneratorFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a CodeGeneratorFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public CodeGeneratorFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a CodeGeneratorFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CodeGeneratorFactoryException( String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/0000755000175000017500000000000011757531667021133 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/0000755000175000017500000000000011757531667022110 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/FindExecutable.java0000644000175000017500000000412411757531137025626 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; import java.io.File; /** * A convenice class that allows us to determine the path to an executable * * @author Jens Voeckler * @author Karan Vahi * @version $Revision: 3301 $ */ public class FindExecutable { /** * Finds the path to an executable of a given name , based on the value of * PATH environment variable. * * @param name the name of the executable to search for. * * @return the File object corresponding to the executable if found, else * null */ public static File findExec( String name ) { if ( name == null ) { return null; } String path = System.getenv("PATH"); if ( path == null ) { return null; } String[] list = path.split( ":" ); for ( int i=0; i < list.length; ++i ) { File result = new File( list[i], name ); if ( result.isFile() && result.canExecute() ){ return result; } } return null; } /** * Test function for the class * * @param args */ public static void main(String args[]) { for (int i = 0; i < args.length; ++i) { File f = FindExecutable.findExec( args[i] ); if (f == null) { System.out.println(args[i] + " not found"); } else { System.out.println(args[i] + " -> " + f); } } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/XMLOutput.java0000644000175000017500000002121411757531137024624 0ustar ryngerynge/** * Copyright 2007-2010 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; import java.io.Writer; import java.io.StringWriter; import java.io.IOException; import java.text.StringCharacterIterator; /** * This abstract class defines a common base for certain classes that * deal with the generation of XML files. Historically, this class also * dealt with text generation, but those methods have been mostly * removed. * * @author Jens-S. Vöckler * @version $Revision: 2574 $ */ public abstract class XMLOutput { /** * Escapes certain characters inappropriate for textual output. * * Since this method does not hurt, and may be useful in other * regards, it will be retained for now. * * @param original is a string that needs to be quoted * @return a string that is "safe" to print. */ static public String escape( String original ) { if ( original == null ) return null; StringBuilder result = new StringBuilder( 2 * original.length() ); StringCharacterIterator i = new StringCharacterIterator(original); for ( char ch = i.first(); ch != StringCharacterIterator.DONE; ch = i.next() ) { if ( ch == '\r' ) { result.append("\\r"); } else if ( ch == '\n' ) { result.append("\\n"); } else if ( ch == '\t' ) { result.append("\\t"); } else { // DO NOT escape apostrophe. If apostrophe escaping is required, // do it beforehand. if ( ch == '\"' || ch == '\\' ) result.append('\\'); result.append(ch); } } return result.toString(); } /** * Escapes certain characters inappropriate for XML content output. * * According to the * XML * 1.0 Specification, an attribute cannot contain ampersand, percent, * nor the character that was used to quote the attribute value. * * @param original is a string that needs to be quoted * @param isAttribute denotes an attributes value, if set to true. * If false, it denotes regular XML content outside of attributes. * @return a string that is "safe" to print as XML. */ static public String quote( String original, boolean isAttribute ) { if ( original == null ) return null; StringBuilder result = new StringBuilder( 2 * original.length() ); StringCharacterIterator i = new StringCharacterIterator(original); for ( char ch = i.first(); ch != StringCharacterIterator.DONE; ch = i.next() ) { switch (ch) { case '<': if ( isAttribute ) result.append("<"); else result.append("<"); break; case '&': if ( isAttribute ) result.append("&"); else result.append("&"); break; case '>': // greater-than does not need to be attribute-escaped, // but standard does not say we must not do it, either. if ( isAttribute ) result.append(">"); else result.append(">"); break; case '\'': if ( isAttribute ) result.append("'"); else result.append("'"); break; case '\"': if ( isAttribute ) result.append("""); else result.append("""); break; default: result.append(ch); break; } } return result.toString(); } /** * XML write helper method writes a quoted attribute onto a stream. * The terminating quote will be appended automatically. Values will * be XML-escaped. No action will be taken, if the value is null. * * @param stream is the stream to append to * @param key is the attribute including initial space, attribute name, * equals sign, and opening quote. The string passed as key must never * be null. * @param value is a string value, which will be put within the quotes * and which will be escaped. If the value is null, no action will be * taken * @throws IOException for stream errors. */ public void writeAttribute( Writer stream, String key, String value ) throws IOException { if ( value != null ) { stream.write( key ); stream.write( quote(value,true) ); stream.write( '"' ); } } /** * Saner XML write helper method writes a quoted attribute onto a * stream. The value will be put properly into quotes. Values will be * XML-escaped. No action will be taken, if the key or value are * null. * * @param stream is the stream to append to * @param key is the attribute identifier, and just that. * @param value is a string value, which will be put within the quotes * and which will be escaped. If the value is null, no action will be * taken. * @throws IOException for stream errors. */ public void writeAttribute2( Writer stream, String key, String value ) throws IOException { if ( key != null && value != null ) { stream.write( key ); stream.write( "=\"" ); stream.write( quote(value,true) ); stream.write( '"' ); } } /** * Dumps the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output.

* * Sibling classes which represent small leaf objects, and can return * the necessary data more efficiently, are encouraged to overwrite * this method. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If null, avoidable whitespaces in the output will be avoided. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @return a String which contains the state of the current class and * its siblings using XML. Note that these strings might become large. * @throws IOException when encountering an error constructing the * string. */ public String toXML( String indent, String namespace ) throws IOException { StringWriter sw = new StringWriter(); this.toXML( sw, indent, namespace ); sw.flush(); return sw.toString(); } /** * Convenience adaptor method invoking the equivalent of: *

   * toXML( stream, indent, (String) null );
   * 
* * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @throws IOException if something fishy happens to the stream. * @see #toXML( Writer, String, String ) */ public void toXML( Writer stream, String indent ) throws IOException { toXML( stream, indent, (String) null ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered * writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, and * the root element will map the XML namespace. Use null, * if you do not need an XML namespace. * @throws IOException if something fishy happens to the stream. * @see java.io.BufferedWriter */ public abstract void toXML( Writer stream, String indent, String namespace ) throws IOException; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/Boolean.java0000644000175000017500000000711211757531137024323 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; /** * This class converts a boolean property specification (string) in various * representations into a booelan value. It is liberal in the representation * it accepts, but strict in what it produces.

* * @author Gaurang Mehta * @author Karan Vahi * @author Jens-S. Vöckler * @version $Revision: 2079 $ */ public class Boolean { /** * The character representation of a true value. */ public static final String TRUE = "true"; /** * The character representation of a false value. */ public static final String FALSE = "false"; /** * Converts a boolean value into a strict representation of it. * * @param val is the boolean input value * @return a string representing the boolean value. */ public static String print( boolean val ) { return ( val ? TRUE : FALSE ); } /** * Converts a boolean string representation into a boolean value. * Representations may include non-negative integers, where only * 0 means false. Other valid string representations * of true include: * *

   * true
   * yes
   * on
   * 
* * Any other string representation is taken to mean false * * @param rep is the input string representing a boolean value. * @return a boolean value from the representation. */ public static boolean parse( String rep ) { return parse(rep,false); } /** * Converts a boolean string representation into a boolean value. * Representations may include non-negative integers, where only * 0 means false. Other valid string representations * of true include: * *
   * true
   * yes
   * on
   * 
* * Other valid string representations of false include, * besides the numerical zero: * *
   * false
   * no
   * off
   * 
* * Any other string representation is taken to mean the boolean value * indicated by the paramater deflt. * * @param rep is the input string representing a boolean value. * @param deflt is the deflt value to use in case rep does not * represent a valid boolean value. * * @return a boolean value from the representation. */ public static boolean parse( String rep, boolean deflt ) { if ( rep == null ) return deflt; String s = rep.trim().toLowerCase(); if ( s.length() == 0 ) return deflt; if ( Character.isDigit(s.charAt(0)) ) { // check for number long value; try { value = Long.parseLong(s); } catch ( NumberFormatException nfe ) { value = deflt ? 1 : 0; } return ( value != 0 ); } else { // check for key words return ( (s.equals("true") || s.equals("yes") || s.equals("on") ) ? true : ( (s.equals("false") || s.equals("no") || s.equals("off") )? false : deflt) ); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/DynamicLoader.java0000644000175000017500000003332411757531137025463 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; import java.lang.reflect.*; /** * This class provides a dynamic class loading facility. It is * tightly coupled to the property facility. To dynamically obtain * an instance of a class through its constructor: * *
 * Integer i = null;
 * DynamicLoader dl = new DynamicLoader( "java.lang.Integer" );
 * try {
 *   // instantiate as Integer("42")
 *   String arg[] = new String[1];
 *   arg[0] = "42";
 *   i = (Integer) dl.instantiate(arg);
 * } catch ( Exception e ) {
 *   System.err.println( dl.convertException(e) );
 *   System.exit(1);
 * }
 * 
* * Similarily, to obtain an instance of a class through a static * method provided by the same class, or another class: * *
 * Integer i = null;
 * DynamicLoader dl = new DynamicLoader( "java.lang.Integer" );
 * try {
 *   // instantiate as Integer("42")
 *   String arg[] = new String[1];
 *   arg[0] = "42";
 *   i = (Integer) dl.static_method( "valueOf", arg );
 * } catch ( Exception e ) {
 *   System.err.println( dl.convertException(e) );
 *   System.exit(1);
 * }
 * 
* * @author Karan Vahi * @author Jens-S. Vöckler * @version $Revision: 2079 $ */ public class DynamicLoader { /** * Stores the fully qualified class name to dynamically instantiate. */ private String m_classname; /** * */ public DynamicLoader( String classname ) { if ( (this.m_classname = classname) == null ) throw new NullPointerException( "You must specify a fully-qualified class name" ); } /** * Sets the fully-qualified class name to load. * @param classname is the new class name. * @see #getClassName() */ public void setClassName( String classname ) { if ( (this.m_classname = classname) == null ) throw new NullPointerException( "You must specify a fully-qualified class name" ); } /** * Obtains the fully-qualified class name that this instance works with. * @return the class name. * @see #setClassName( String ) */ public String getClassName() { return this.m_classname; } /** * Dynamically instantiates a class from a contructor. You must have * set the class name before invoking this method. Please note that * any exceptions thrown by the constructor will be wrapped into a * InvocationTargetException. * * @param arguments are arguments to the constructor of the class * to load. Please use "new Object[0]" for the argumentless default * constructor. * @return an instance that must be cast to the correct class. * * @exception ClassNotFoundException if the driver for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the driver's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the driver class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the driver * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the driver * throws an exception while being dynamically loaded. * @exception SQLException if the driver for the database can be * loaded, but faults when initially accessing the database * * @see #setClassName( String ) */ public Object instantiate( Object[] arguments ) throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { // generate class array and populate with class of each argument Class[] temp = new Class[ arguments.length ]; for ( int i=0; iInvocationTargetException
.

* This method should be invoked, if the constructor declares * interface types as formal arguments, but the actual arguments * are implementation classes. * * @param classes is a vector of the classes involved. Each item * in the classes vector matches the item in the arguments vector. * The classes vector will be used to select the correct constructor. * Please use "new Class[0]" for the argumentless default ctor. * @param arguments are arguments to the constructor of the class * to load. Please use "new Object[0]" for the argumentless default * constructor. * @return an instance that must be cast to the correct class. * * @exception ClassNotFoundException if the driver for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the driver's constructor interface * does not comply with the database driver API. * @exception IllegalArgumentException is thrown, if the number of * arguments do not match the number of types, ie the vector have * mismatching sizes. * @exception InstantiationException if the driver class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the driver * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the driver * throws an exception while being dynamically loaded. * @exception SQLException if the driver for the database can be * loaded, but faults when initially accessing the database * * @see #setClassName( String ) */ public Object instantiate( Class[] classes, Object[] arguments ) throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { // complain on argument mismatch if ( classes.length != arguments.length ) throw new IllegalArgumentException( "vector sizes must match" ); // load class into memory and obtain an instance of it return Class.forName(m_classname).getConstructor(classes) .newInstance(arguments); } /** * Dynamically instantiates a class from a static method which * constructs the resulting object. You must have set the class name * before invoking this method. Please note that any exceptions thrown * by the constructor will be wrapped into a * InvocationTargetException. * * @param method is the name of the static method to call. * @param arguments are arguments to the constructor of the class * to load. Please use "new Object[0]" for the argumentless default * constructor. * @return an instance that must be cast to the correct class. * * @exception ClassNotFoundException if the driver for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the driver's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the driver class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the driver * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the driver * throws an exception while being dynamically loaded. * @exception SQLException if the driver for the database can be * loaded, but faults when initially accessing the database * @exception SecurityException if you are not permitted to invoke the * method, or even list the methods provided by the class. * @exception NullPointerException if the method name is * null. * @exception IllegalArgumentException if the number of actual and * formal parameter differ, unwrapping a primitive type failed, or * a parameter value cannot be converted to the formal argument type. * * @see #setClassName( String ) */ public Object static_method( String method, Object[] arguments ) throws ClassNotFoundException, SecurityException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, NullPointerException, IllegalArgumentException { // generate class array and populate with class of each argument Class[] temp = new Class[ arguments.length ]; for ( int i=0; i $ * where may or may not be there, and can include spaces but is * really completely arbitrary. * * e.g. $CondorVersion: 7.1.0 Apr 1 2008 BuildID: 80895$ */ private static final String mRegexExpression = // "\\$CondorVersion:\\s*([0-9][\\.][0-9][\\.][0-9])[a-zA-Z:0-9\\s]*\\$"; //"\\$CondorVersion:\\s*([0-9][\\.][0-9][\\.][0-9])[\\w\\W\\s]*\\$"; "\\$CondorVersion:\\s*([0-9][\\.][0-9][\\.][0-9])[\\p{ASCII}\\s]*\\$"; /** * Stores compiled patterns at first use, quasi-Singleton. */ private static Pattern mPattern = null; /** * Converts a string into the corresponding integer value. * * @param version * * @return int value of the version, else -1 in case of null version * or incorrect formatted string * * @deprecated */ public static int intValue( String version ){ int result = 0; if( version == null ){ return -1; } //split on . try{ String[] subs = version.split( "\\." ); int index = subs.length; for( int i = 0, y = subs.length - 1; y >= 0; y--,i++){ result += (int) (Math.pow(10, y) * (Integer.parseInt(subs[i]))); } } catch( NumberFormatException nfe ){ result = -1; } return result; } /** * Converts a string into the corresponding numeric value. * * @param version in form of major.minor.patch. You can opt to omit the * minor and patch versions if you want * * @return float value of the version, else -1 in case of null version * or incorrect formatted string */ public static long numericValue( String version ){ long result = 0; if( version == null ){ return -1; } //we are converting to XX.XX.XX //add extra padding that is leading zero if only one digit char[] arr = new char[6]; //split on . try{ String[] subs = version.split( "\\." ); int y = subs.length; if ( y > CondorVersion.MAX_NUMBER_OF_VERSION_COMPONENTS ){ throw new IllegalArgumentException( "Only version numbers with max two dots are accepted i.e ( MAJOR.MINOR.PATCH ) " + version ); } int i = 0; //for each sub convert to a two digit form for( int z = 0; z < y; z++ ){ //compute the sub length int len = subs[z].length(); if( len > CondorVersion.MAX_VERSION_PRECISION ){ throw new IllegalArgumentException( "Only two digit precision is allowed in version numbers " + version); } //add leading zeros if required for ( int d = CondorVersion.MAX_VERSION_PRECISION - len; d > 0; d--){ arr[i++] = '0'; } //copy into arr the sub[z] for( int d = 0; d < len; d++){ char ch = subs[z].charAt( d ); if( !Character.isDigit(ch) ){ throw new IllegalArgumentException( "Non digit specified in version " + version); } arr[i++] = ch; } } //add trailing zeroes if required while( i < 6 ){ arr[i++] = '0'; } } catch( NumberFormatException nfe ){ result = -1; } return Long.parseLong( new String(arr) ); } /** * The default logger. */ private LogManager mLogger; /** * Factory method to instantiate the class. * * @return instance to the class */ public static CondorVersion getInstance( ){ return getInstance( null ); } /** * Factory method to instantiate the class. * * * @param logger the logger object * * @return instance to the class. */ public static CondorVersion getInstance( LogManager logger ){ if( logger == null ){ logger = LogManagerFactory.loadSingletonInstance(); } return new CondorVersion( logger ); } /** * The default constructor. * * @param logger the logger object */ private CondorVersion( LogManager logger ){ mLogger = logger; if( mPattern == null ){ mPattern = Pattern.compile( mRegexExpression ); } } /** * Returns the condor version parsed by executing the condor_version * command. * * @return the version number as int else -1 if unable to determine. */ public long numericValue(){ long result = -1; try{ result = CondorVersion.numericValue( version() ); } catch( Exception e ){ mLogger.log("Exception while parsing condor_version ", e, LogManager.ERROR_MESSAGE_LEVEL); } return result; } /** * Returns the condor version parsed by executing the condor_version * command. * * @return the version number as String else null if unable to determine. */ public String version(){ String version = null; try{ //set the callback and run the grep command CondorVersionCallback c = new CondorVersionCallback( ); Runtime r = Runtime.getRuntime(); Process p = r.exec( CONDOR_VERSION_COMMAND ); //Process p = r.exec( CONDOR_VERSION_COMMAND ); //spawn off the gobblers StreamGobbler ips = new StreamGobbler(p.getInputStream(), c); StreamGobbler eps = new StreamGobbler(p.getErrorStream(), new StreamGobblerCallback(){ //we cannot log to any of the default stream LogManager mLogger = this.mLogger; public void work(String s){ mLogger.log("Output on stream gobller error stream " + s,LogManager.DEBUG_MESSAGE_LEVEL); } }); ips.start(); eps.start(); //wait for the threads to finish off ips.join(); version = c.getVersion(); eps.join(); //get the status int status = p.waitFor(); if( status != 0){ mLogger.log("Command " + CONDOR_VERSION_COMMAND + " exited with status " + status, LogManager.WARNING_MESSAGE_LEVEL); } } catch(IOException ioe){ mLogger.log("IOException while determining condor_version ", ioe, LogManager.ERROR_MESSAGE_LEVEL); } catch( InterruptedException ie){ //ignore } mLogger.log( "Condor Version as string " + version, LogManager.DEBUG_MESSAGE_LEVEL ); return version; } /** * An inner class, that implements the StreamGobblerCallback to determine * the version of Condor being used. * */ private class CondorVersionCallback implements StreamGobblerCallback{ /** * The version detected. */ private String mVersion; /** * The Default Constructor */ public CondorVersionCallback( ){ mVersion = null; } /** * Callback whenever a line is read from the stream by the StreamGobbler. * Counts the occurences of the word that are in the line, and * increments to the global counter. * * @param line the line that is read. */ public void work( String line ){ Matcher matcher = mPattern.matcher( line ); if( matcher.matches( ) ){ mVersion = matcher.group( 1 ); } } /** * Returns the condor version detected. * * @return the condor version else null */ public String getVersion(){ return mVersion; } } /** * The main program to test. * * @param args */ public static void main( String[] args ){ LogManager logger = LogManagerFactory.loadSingletonInstance(); CondorVersion cv = CondorVersion.getInstance(); logger.logEventStart( "CondorVersion", "CondorVersion", "Version"); System.out.println( "Condor Version is " + cv.version() ); System.out.println( "10.0.0 is " + CondorVersion.numericValue( "10.0.0") ); System.out.println( "7.1.2 is " + CondorVersion.numericValue( "7.1.2") ); System.out.println( "7.1.18 is " + CondorVersion.numericValue( "7.1.18" ) ); System.out.println( "7.1.19 is " + CondorVersion.numericValue( "7.1.19" ) ); System.out.println( "6.99.9 is " + CondorVersion.numericValue( "6.99.9" ) ); System.out.println( "7 is " + CondorVersion.numericValue( "7.2.2" ) ); logger.logEventCompletion(); //some sanity checks on the Regex String version = "$CondorVersion: 7.4.1 Dec 17 2009 UWCS-PRE $"; Matcher matcher = cv.mPattern.matcher( version ); if( matcher.matches() ){ System.out.println( "Version for " + version + " is " + matcher.group( 1 )); } version = "$CondorVersion: 7.4.1 Dec 17 2009 BuildID: 204351 $"; matcher = cv.mPattern.matcher( version ); if( matcher.matches() ){ System.out.println( "Version for " + version + " is " + matcher.group( 1 )); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/CommonProperties.java0000644000175000017500000005262411757531137026261 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.io.PrintStream; import java.util.Enumeration; import java.util.MissingResourceException; import java.util.Properties; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * This class creates a common interface to handle package properties. * The package properties are meant as read-only (so far, until * requirements crop up for write access). The class is implemented * as a Singleton pattern. * * @author Jens-S. Vöckler * @author Yong Zhao * @author Karan Vahi * @author Mats Rynge * @version $Revision: 4651 $ * */ public class CommonProperties { /** * implements the singleton access via class variable. */ private static CommonProperties m_instance = null; /** * internal set of properties. Direct access is expressly forbidden. */ private Properties m_props; /** * The bin dir of the Pegasus install */ private File m_binDir; /** * GNU: read-only single-machine data in DIR [PREFIX/etc]. * The files in this directory have a low change frequency, are * effectively read-only, they reside on a per-machine basis, and they * are usually valid for a single user. */ private File m_sysConfDir; /** * GNU: modifiable architecture-independent data in DIR [PREFIX/share/pegasus]. * The files in this directory have a high change frequency, are * effectively read-write, can be shared via a networked FS, and they * are usually valid for multiple users. */ private File m_sharedStateDir; /** * Location of our schemas */ private File m_schemaDir; /** * Basename of the file to read to obtain system properties */ public static final String PROPERTY_FILENAME = "properties"; /** * Basename of the (new) file to read for user properties. */ public static final String USER_PROPERTY_FILENAME = ".pegasusrc"; /** * Adds new properties to an existing set of properties while * substituting variables. This function will allow value * substitutions based on other property values. Value substitutions * may not be nested. A value substitution will be ${property.key}, * where the dollar-brace and close-brace are being stripped before * looking up the value to replace it with. Note that the ${..} * combination must be escaped from the shell. * * @param a is the initial set of known properties (besides System ones) * @param b is the set of properties to add to a * @return the combined set of properties from a and b. */ protected static Properties addProperties( Properties a, Properties b ) { // initial Properties result = new Properties(a); Properties sys = System.getProperties(); Pattern pattern = Pattern.compile( "\\$\\{[-a-zA-Z0-9._]+\\}" ); for ( Enumeration e = b.propertyNames(); e.hasMoreElements(); ) { String key = (String) e.nextElement(); String value = b.getProperty(key); // unparse value ${prop.key} inside braces Matcher matcher = pattern.matcher(value); StringBuffer sb = new StringBuffer(); while ( matcher.find() ) { // extract name of properties from braces String newKey = value.substring( matcher.start()+2, matcher.end()-1 ); // try to find a matching value in result properties String newVal = result.getProperty(newKey); /* * // if not found, try b's properties * if ( newVal == null ) newVal = b.getProperty(newKey); */ // try myself if ( newVal == null ) newVal = result.getProperty(newKey); // if still not found, try system properties if ( newVal == null ) newVal = sys.getProperty(newKey); // replace braced string with the actual value or empty string matcher.appendReplacement( sb, newVal == null ? "" : newVal ); } matcher.appendTail( sb ); result.setProperty( key, sb.toString() ); } // final return result; } /** * Set some defaults, should values be missing in the dataset. * * @return the properties. */ private static Properties defaultProps() { // initial Properties result = new Properties(); // copy pegasus keys as specified in the system properties to defaults Properties sys = System.getProperties(); for ( Enumeration e = sys.propertyNames(); e.hasMoreElements(); ) { String key = (String) e.nextElement(); if ( key.startsWith("pegasus.") ) result.setProperty( key, sys.getProperty(key) ); } // INSERT HERE! // final return addProperties( new Properties(), result ); } /** * ctor. This initializes the local instance of properties * from a central file. * * @param confProperties the path to conf properties, that supersede the loading * of properties from $PEGASUS_HOME/.pegasusrc * * @exception IOException will be thrown if reading the property file * goes awry. * @exception MissingResourceException will be thrown if not all * required properties are set */ protected CommonProperties( String confProperties ) throws IOException, MissingResourceException { // create empty new instance this.m_props = new Properties( defaultProps() ); // first check for old -D option - this is just a warning if ( System.getProperty("pegasus.properties") != null) { File props = new File( System.getProperty("pegasus.properties") ); if( props.exists() ){ System.err.println( "[WARNING] Properties are no longer loaded from by" + " -Dpegasus.properties property. " + props.getAbsolutePath() + " will not be loaded. Use --conf option instead." ); } } // first check for old -D option - this is just a warning if ( System.getProperty( "pegasus.user.properties" ) != null ) { File props = new File(System.getProperty( "pegasus.user.properties" )); if( props.exists() ){ System.err.println( "[WARNING] Properties are no longer loaded by " + "specifying -Dpegasus.user.properties property. " + props.getAbsolutePath() + " will not be loaded. Use --conf option instead." ); } } // add user properties afterwards to have higher precedence String userHome = System.getProperty( "user.home", "." ); // try loading $HOME/.pegasusrc File props = new File( userHome, CommonProperties.USER_PROPERTY_FILENAME ); //Prefer conf option over $HOME/.pegasusrc File confProps = null; props = ( confProperties != null && (confProps = new File( confProperties )).exists() )? confProps : props; if ( props.exists() ) { // if this file exists, read the properties (will throw IOException) Properties temp = new Properties(); InputStream stream = new BufferedInputStream( new FileInputStream(props) ); temp.load( stream ); stream.close(); this.m_props = addProperties( this.m_props, temp ); } // now set the paths: set sysconfdir to correct latest value this.m_binDir = pickPath( this.m_props.getProperty( "pegasus.home.bindir" ), System.getProperty( "pegasus.home.bindir" ) ); this.m_sysConfDir = pickPath( this.m_props.getProperty( "pegasus.home.sysconfdir" ), System.getProperty( "pegasus.home.sysconfdir" ) ); this.m_sharedStateDir = pickPath( this.m_props.getProperty( "pegasus.home.sharedstatedir" ), System.getProperty( "pegasus.home.sharedstatedir" ) ); this.m_schemaDir = pickPath( this.m_props.getProperty( "pegasus.home.schemadir" ), System.getProperty( "pegasus.home.schemadir" ) ); } private File pickPath(String p1, String p2) { String winner = null; if (p1 != null) { winner = p1; } else if (p2 != null) { winner = p2; } if (winner != null) { return new File(winner); } return null; } /** * Singleton threading: Creates the one and only instance of the * properties in the current application. * * @return a reference to the properties. * @exception IOException will be thrown if reading the property file * goes awry. * @exception MissingResourceException will be thrown if you forgot * to specify the -Dpegasus.home=$PEGASUS_HOME to the runtime * environment. * @see #noHassleInstance() */ public static CommonProperties instance() throws IOException, MissingResourceException { if ( CommonProperties.m_instance == null ) CommonProperties.m_instance = new CommonProperties( null ); return CommonProperties.m_instance; } /** * Create a temporary property that is not attached to the Singleton. * This may be helpful with portal, which do magic things during the * lifetime of a process. * * * @param confProperties the path to conf properties, that supersede the * loading of properties from $PEGASUS_HOME/.pegasusrc * * @return a reference to the parsed properties. * @exception IOException will be thrown if reading the property file * goes awry. * @exception MissingResourceException will be thrown if you forgot * to specify the -Dpegasus.home=$PEGASUS_HOME to the runtime * environment. * @see #instance() */ public static CommonProperties nonSingletonInstance( String confProperties ) throws IOException, MissingResourceException { return new CommonProperties( confProperties ); } /** * Singleton interface: Creates the one and only instance of the * properties in the current application, and does not bother the * programmer with exceptions. Rather, exceptions from the underlying * instance() call are caught, converted to an error * message on stderr, and the program is exited. * * @return a reference to the properties. * @see #instance() */ public static CommonProperties noHassleInstance() { CommonProperties result = null; try { result = instance(); } catch ( IOException e ) { System.err.println( "While reading property file: " + e.getMessage() ); System.exit(1); } catch ( MissingResourceException mre ) { System.err.println( mre.getMessage() ); System.exit(1); } return result; } /** * Accessor pegasus bin directory * * @return the "bin" directory of the Pegasus runtime system. */ public File getBinDir() { return this.m_binDir; } /** * Accessor to $PEGASUS_HOME/etc. The files in this directory have a low * change frequency, are effectively read-only, they reside on a * per-machine basis, and they are valid usually for a single user. * * @return the "etc" directory of the VDS runtime system. */ public File getSysConfDir() { return this.m_sysConfDir; } /** * Accessor to $PEGASUS_HOME/com. The files in this directory have a high * change frequency, are effectively read-write, they reside on a * per-machine basis, and they are valid usually for a single user. * * @return the "com" directory of the VDS runtime system. */ public File getSharedStateDir() { return this.m_sharedStateDir; } /** * Accessor to schema directory * * @return the schema directoru */ public File getSchemaDir() { return this.m_schemaDir; } /** * Accessor: Obtains the number of properties known to the project. * * @return number of properties in the project property space. */ public int size() { return this.m_props.size(); } /** * Accessor: access to the internal properties as read from file. * An existing system property of the same key will have precedence * over any project property. This method will remove leading and * trailing ASCII control characters and whitespaces. * * @param key is the key to look up * @return the value for the key, or null, if not found. */ public String getProperty( String key ) { String result = System.getProperty( key, this.m_props.getProperty(key) ); return ( result == null ? result : result.trim() ); } /** * Accessor: access to the internal properties as read from file * An existing system property of the same key will have precedence * over any project property. This method will remove leading and * trailing ASCII control characters and whitespaces. * * @param key is the key to look up * @param defValue is a default to use, if no value can be found for the key. * @return the value for the key, or the default value, if not found. */ public String getProperty( String key, String defValue ) { String result = System.getProperty( key, this.m_props.getProperty(key,defValue) ); return ( result == null ? result : result.trim() ); } /** * Accessor: Overwrite any properties from within the program. * * @param key is the key to look up * @param value is the new property value to place in the system. * @return the old value, or null if it didn't exist before. */ public Object setProperty( String key, String value ) { //set in internal properties object also //else prefix option does not work. Karan Oct 1, 2008 this.m_props.setProperty( key, value ); return System.setProperty( key, value ); } /** * Accessor: enumerate all keys known to this property collection * @return an enumerator for the keys of the properties. */ public Enumeration propertyNames() { return this.m_props.propertyNames(); } /** * Extracts a specific property key subset from the known properties. * The prefix may be removed from the keys in the resulting dictionary, * or it may be kept. In the latter case, exact matches on the prefix * will also be copied into the resulting dictionary. * * @param prefix is the key prefix to filter the properties by. * @param keepPrefix if true, the key prefix is kept in the resulting * dictionary. As side-effect, a key that matches the prefix exactly * will also be copied. If false, the resulting dictionary's keys are * shortened by the prefix. An exact prefix match will not be copied, * as it would result in an empty string key. * @return a property dictionary matching the filter key. May be * an empty dictionary, if no prefix matches were found. * * @see #getProperty( String ) is used to assemble matches */ public Properties matchingSubset( String prefix, boolean keepPrefix ) { Properties result = new Properties(); // sanity check if ( prefix == null || prefix.length() == 0 ) return result; String prefixMatch; // match prefix strings with this String prefixSelf; // match self with this if ( prefix.charAt(prefix.length()-1) != '.' ) { // prefix does not end in a dot prefixSelf = prefix; prefixMatch = prefix + '.'; } else { // prefix does end in one dot, remove for exact matches prefixSelf = prefix.substring( 0, prefix.length()-1 ); prefixMatch = prefix; } // POSTCONDITION: prefixMatch and prefixSelf are initialized! // now add all matches into the resulting properties. // Remark 1: #propertyNames() will contain the System properties! // Remark 2: We need to give priority to System properties. This is done // automatically by calling this class's getProperty method. String key; for ( Enumeration e = propertyNames(); e.hasMoreElements(); ) { key = (String) e.nextElement(); if ( keepPrefix ) { // keep full prefix in result, also copy direct matches if ( key.startsWith(prefixMatch) || key.equals(prefixSelf) ) result.setProperty( key, getProperty(key) ); } else { // remove full prefix in result, dont copy direct matches if ( key.startsWith(prefixMatch) ) result.setProperty( key.substring( prefixMatch.length() ), getProperty(key) ); } } // done return result; } /** * Extracts a specific property key subset from the properties passed. * The prefix may be removed from the keys in the resulting dictionary, * or it may be kept. In the latter case, exact matches on the prefix * will also be copied into the resulting dictionary. * * * @param prefix is the key prefix to filter the properties by. * @param keepPrefix if true, the key prefix is kept in the resulting * dictionary. As side-effect, a key that matches the prefix exactly * will also be copied. If false, the resulting dictionary's keys are * shortened by the prefix. An exact prefix match will not be copied, * as it would result in an empty string key. * @return a property dictionary matching the filter key. May be * an empty dictionary, if no prefix matches were found. * * @see #getProperty( String ) is used to assemble matches */ public static Properties matchingSubset( Properties properties, String prefix, boolean keepPrefix ) { Properties result = new Properties(); // sanity check if ( prefix == null || prefix.length() == 0 ) return result; String prefixMatch; // match prefix strings with this String prefixSelf; // match self with this if ( prefix.charAt(prefix.length()-1) != '.' ) { // prefix does not end in a dot prefixSelf = prefix; prefixMatch = prefix + '.'; } else { // prefix does end in one dot, remove for exact matches prefixSelf = prefix.substring( 0, prefix.length()-1 ); prefixMatch = prefix; } // POSTCONDITION: prefixMatch and prefixSelf are initialized! // now add all matches into the resulting properties. // Remark 1: #propertyNames() will contain the System properties! // Remark 2: We need to give priority to System properties. This is done // automatically by calling this class's getProperty method. String key; for ( Enumeration e = properties.propertyNames(); e.hasMoreElements(); ) { key = (String) e.nextElement(); if ( keepPrefix ) { // keep full prefix in result, also copy direct matches if ( key.startsWith(prefixMatch) || key.equals(prefixSelf) ) result.setProperty( key, properties.getProperty(key) ); } else { // remove full prefix in result, dont copy direct matches if ( key.startsWith(prefixMatch) ) result.setProperty( key.substring( prefixMatch.length() ), properties.getProperty(key) ); } } // done return result; } /** * Print out the property list onto the specified stream. This method * is useful for debugging, and meant for debugging. * * @param out an output stream * @throws ClassCastException if any key is not a string. * * @see java.util.Properties#list( PrintStream ) */ public void list(PrintStream out) { m_props.list(out); } public static void main( String[] args ) throws java.io.IOException { CommonProperties cp = null; if ( args.length > 0 ) cp = CommonProperties.nonSingletonInstance(args[0]); else cp = CommonProperties.instance(); cp.list( System.out ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/Version.in0000644000175000017500000001020111757531430024043 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package edu.isi.pegasus.common.util; import java.io.*; /* DONOTEDITWARNING */ /** * This class solely defines the version numbers of PEGASUS. The template * file will be substituted by ant during the built process to compile * in the built timestamp.

* * When using the class, the methods for the version number digits * will always return an integer. In order to obtain the full version * number, including any "release candidate" suffices, please prefer * the toString method over combining the separate version * integers yourself. * * @author Karan Vahi * @author Jens-S. Vöckler * @version $Revision: 5069 $ * $Id: Version.in 5069 2012-03-01 00:32:01Z rynge $ */ public class Version { /** * This constant defines the major version number. */ public static final int MAJOR = 4; /** * This constant defines the minor version number. */ public static final int MINOR = 0; /** * This constant defines the patch level version. */ public static final int PLEVEL = 1; /** * Instance variable keeping track of the Singleton. */ protected static Version c_instance = null; /** * C'tor: construct an element, which is empty anyway. */ private Version() { // empty } /** * Accessor to the singleton. */ public static Version instance() { if ( Version.c_instance == null ) Version.c_instance = new Version(); return Version.c_instance; } /** * Returns a string containing the concatenated version. * Note: This class may also return suffixes beyond the version. * * @return a concatenated string. */ public String toString() { return ( Integer.toString(MAJOR) + '.' + Integer.toString(MINOR) + '.' + Integer.toString(PLEVEL) + "" ); // !! HERE !! } /** * Basename of the build stamp file. */ public static final String STAMP_FILENAME = "stamp"; /** * Determines the built as a time stamp. * @return the formatted time stamp of the built. */ public String determineBuilt() { return "BUILDTIMESTAMP"; } /** * Determines the build platform. * @return an identifier for the build platform. */ public String determinePlatform() { return "BUILDARCHITECTURE"; } /** * Determines the built and architecture of the installation. These * are usually separated by a linear white-space. * * @return the formatted time stamp of the built, if available, and an * identifier for the architecture. An string indicating that the * build is unknown is returned on failure. */ public String determineInstalled() { String result = "unknown unknown"; String pegasushome = System.getProperty( "pegasus.home" ); if ( pegasushome != null ) { try { File stampfile = new File( pegasushome, STAMP_FILENAME ); if ( stampfile.canRead() ) { BufferedReader br = new BufferedReader( new FileReader(stampfile) ); String built = br.readLine(); br.close(); if ( built != null && built.length() > 0 ) result = built; } } catch ( IOException ioe ) { // ignore } } return result; } /** * Determines, if the compiled version and the installed version * match. The match is done by comparing the timestamps and * architectures. * * @return true, if versions match, false otherwise. */ public boolean matches() { String s[] = determineInstalled().split("\\s+"); return ( s.length >= 2 && s[0].equalsIgnoreCase( determineBuilt() ) && s[1].equalsIgnoreCase( determinePlatform() ) ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/StreamGobblerCallback.java0000644000175000017500000000206411757531137027112 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; /** * This interface defines the callback calls that are called from within the * StreamGobbler while working on a stream. * * @author Karan Vahi * @version $Revision: 2567 $ */ public interface StreamGobblerCallback { /** * Callback whenever a line is read from the stream by the StreamGobbler. * * @param line the line that is read. */ public void work(String line); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/XMLWriter.java0000644000175000017500000002541311757531137024605 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; import java.io.IOException; import java.io.Writer; import java.util.Stack; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogManagerFactory; /** * * @author gmehta */ public class XMLWriter { private Writer mWriter; private Stack mStack; private StringBuffer mAttributes; private boolean mEmptyElement; private boolean mClosedElement; private boolean mWriteLine = true; private boolean mHeader = true; private LogManager mLogger; private static String START_ELEMENT_TAG = "<"; private static String CLOSE_ELEMENT_TAG = ">"; private static String START_END_ELEMENT_TAG = ""; private static String START_COMMENT_TAG = ""; private static String INDENT = " "; private static String XML_HEADER = ""; private String mLineSeparator; private String mNamespace; public XMLWriter(Writer writer) { this(writer, ""); } public XMLWriter(Writer writer, String namespace) { mWriter = writer; mNamespace = (namespace == null) ? "" : namespace; mStack = new Stack(); mAttributes = new StringBuffer(); mClosedElement = true; mLogger = LogManagerFactory.loadSingletonInstance(); mLineSeparator = System.getProperty("line.separator", "\r\n"); this.writeXMLHeader(); this.writeXMLComment("generated on: " + Currently.iso8601(false)); this.writeXMLComment("generated by: " + System.getProperties().getProperty("user.name", "unknown") + " [ " + System.getProperties().getProperty("user.region", "??") + " ]"); } public XMLWriter startElement(String name) { startElement(name, 0); return this; } public XMLWriter startElement(String name, int indent) { try { //check if there are any previous elements open and close them closeElement(); indent(indent); mClosedElement = false; mWriter.write(START_ELEMENT_TAG); if (!mNamespace.isEmpty()) { mWriter.write(mNamespace + ":"); } mWriter.write(name); } catch (IOException ioe) { mLogger.log("Could not write element " + name + "using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } mStack.add(name); mEmptyElement = true; return this; } public XMLWriter endElement() { endElement(0); return this; } public XMLWriter endElement(int indent) { if (mStack.empty()) { mLogger.log("No elements left to close", LogManager.WARNING_MESSAGE_LEVEL); } String element = mStack.pop(); try { if (element != null) { if (mEmptyElement) { writeAttributes(); mWriter.write(CLOSE_EMPTY_ELEMENT_TAG); if (mWriteLine) { writeLine(); } // mEmptyElement=false; } else { indent(indent); mWriter.write(START_END_ELEMENT_TAG); if (!mNamespace.isEmpty()) { mWriter.write(mNamespace + ":"); } mWriter.write(element); mWriter.write(CLOSE_ELEMENT_TAG); if (mWriteLine) { writeLine(); } } mClosedElement = true; mEmptyElement = false; mWriteLine = true; } } catch (IOException ioe) { mLogger.log( "Could not close element " + element + "using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } return this; } public XMLWriter writeData(String data) { try { mEmptyElement = false; closeElement(); mWriter.write(escapeXML(data)); } catch (IOException ioe) { mLogger.log( "Could not write data for element " + mStack.peek() + " using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } return this; } public XMLWriter writeUnEscapedData(String data) { try { mEmptyElement = false; closeElement(); mWriter.write(data); } catch (IOException ioe) { mLogger.log( "Could not write data for element " + mStack.peek() + " using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } return this; } public XMLWriter writeLine() { try { mWriter.write(mLineSeparator); } catch (IOException ioe) { mLogger.log( "Could not write empty line using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } return this; } public XMLWriter noLine() { mWriteLine = false; return this; } public XMLWriter writeCData(String data) { try { mEmptyElement = false; closeElement(); mWriter.write(""); } catch (IOException ioe) { mLogger.log( "Could not write data for element " + mStack.peek() + " using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } return this; } public XMLWriter writeAttribute(String key, String value) { mAttributes.append(" "); if (!mNamespace.isEmpty()) { mAttributes.append(mNamespace).append(":"); } mAttributes.append(key).append("=\"").append( escapeXML(value)).append("\""); return this; } /** * Writes out the attributes of a given element to the writer */ private void writeAttributes() { try { mWriter.write(mAttributes.toString()); mAttributes.setLength(0); } catch (IOException ioe) { mLogger.log( "Could not write attributes for element " + mStack.peek() + " using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } } /** * Close open elements start tag. Write any attributes. * This is called when either a new child element is added to existing element or data is added. * @return XMLWriter */ private void closeElement() { try { if (!mClosedElement) { writeAttributes(); mClosedElement = true; mWriter.write(CLOSE_ELEMENT_TAG); if (mEmptyElement) { writeLine(); } } } catch (IOException ioe) { mLogger.log( "Could not close open element " + mStack.peek() + " using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } } public XMLWriter writeXMLHeader() { if (mHeader) { try { mWriter.write(XML_HEADER); writeLine(); mHeader = true; } catch (IOException ioe) { mLogger.log( "Could not write xml header using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } } return this; } public XMLWriter writeXMLComment(String comment, boolean linepadded) { try { closeElement(); if (linepadded) { writeLine(); } mWriter.write(START_COMMENT_TAG); mWriter.write(comment); mWriter.write(CLOSE_COMMENT_TAG); writeLine(); if (linepadded) { writeLine(); } } catch (IOException ioe) { mLogger.log( "Could not write xml comment using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } return this; } public XMLWriter writeXMLComment(String comment) { this.writeXMLComment(comment, false); return this; } private XMLWriter indent(int indent) { try { mWriter.write( (indent <= 0) ? "" : String.format(String.format("%%0%dd", indent), 0).replace("0", INDENT)); } catch (IOException ioe) { mLogger.log( "Could not write xml comment using XMLWriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } return this; } private static String escapeXML(String str) { String st = str; st = st.replaceAll("&", "&"); st = st.replaceAll("<", "<"); st = st.replaceAll(">", ">"); st = st.replaceAll("\"", """); st = st.replaceAll("'", "'"); return st; } public void close() { try { mWriter.close(); } catch (IOException ioe) { mLogger.log( "Could not close XMLwriter", LogManager.ERROR_MESSAGE_LEVEL); mLogger.log(ioe.getMessage(), LogManager.DEBUG_MESSAGE_LEVEL); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/Escape.java0000644000175000017500000000763411757531137024155 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; /** * This class tries to define an interface to deal with quoting, escaping, * and the way back. The quoting algorithm is safe to only itself. Thus, * *

 * unescape( escape( s ) ) === s
 * 
* * holds true, but * *
 * escape( unescape( s ) ) =?= s
 * 
* * does not necessarily hold. * * @author Gaurang Mehta * @author Karan Vahi * @author Jens-S. Vöckler * @version $Revision: 2079 $ */ public class Escape { /** * Defines the character used to escape characters. */ private char m_escape; /** * Defines the set of characters that require escaping. */ private String m_escapable; /** * Defines the default quoting and escaping rules, escaping the * apostrophe, double quote and backslash. The escape character * is the backslash. * */ public Escape() { m_escapable = "\"'\\"; m_escape = '\\'; } /** * Constructs arbitrary escaping rules. * * @param escapable is the set of characters that require escaping * @param escape is the escape character itself. */ public Escape( String escapable, char escape ) { m_escape = escape; m_escapable = escapable; // ensure that the escape character is part of the escapable char set if ( escapable.indexOf(escape) == -1 ) m_escapable += m_escape; } /** * Transforms a given string by escaping all characters inside the * quotable characters set with the escape character. The escape * character itself is also escaped. * * @param s is the string to escape. * @return the quoted string * @see #unescape( String ) */ public String escape( String s ) { // sanity check if ( s == null ) return null; StringBuffer result = new StringBuffer( s.length() ); for ( int i=0; i " + args[i] ); System.out.println( "e(s) > " + e ); System.out.println( "u(e(s))> " + me.unescape(e) ); System.out.println( "u(s) > " + u ); System.out.println( "e(u(s))> " + me.escape(u) ); System.out.println(); } } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/Separator.java0000644000175000017500000003167011757531137024712 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; /** * This class solely defines the separators used in the textual in- * and output between namespace, name and version(s). A textual * representation of a definition looks like ns::name:version, and * a textual representation of a uses like ns::name:min,max.

* * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see org.griphyn.vdl.classes.Definition */ public class Separator { /** * This constant defines the separator between a namespace and * the identifier. */ public static final String NAMESPACE = "::"; /** * This constant defines the separator between an identifier and * its version. */ public static final String NAME = ":"; /** * This constant defines the separator that denotes a version range. * Version ranges are only used with the "uses" clause, which maps * from a derivation to a transformation. */ public static final String VERSION = ","; /** * Although not truly a separator, this is the name of the default * namespace, which is used in the absence of a namespace. * @deprecated The default namespace is null. */ public static final String DEFAULT = "default"; /** * Combines the three components that constitute a fully-qualified * definition identifier into a single string. * * @param namespace is the namespace, may be empty or null. * @param name is the name to use, must not be empty nor null. * @param version is the version to attach, may be empty or null. * @return the combination of namespace, name and version with separators. * @exception NullPointerException will be thrown on an empty or null * name, as no such identifier can be constructed. */ public static String combine( String namespace, String name, String version ) { StringBuffer result = new StringBuffer(32); if ( namespace != null && namespace.length() > 0 ) result.append(namespace).append(Separator.NAMESPACE); // postcondition: no namespace, no double colon if ( name != null && name.length() > 0 ) { result.append(name); } else { // gotta have a name throw new NullPointerException( "Missing identifier for definition" ); } if ( version != null && version.length() > 0 ) result.append( Separator.NAME ).append(version); // postcondition: If there is a version, it will be appended return result.toString(); } /** * Combines the four components that reference a fully-qualified * definition identifier into a single string. * * @param namespace is the namespace, may be empty or null. * @param name is the name to use, must not be empty nor null. * @param min is the lower version to attach, may be empty or null. * @param max is the upper version to attach, may be empty or null. * @return the combination of namespace, name and versions with * appropriate separators. * @exception NullPointerException will be thrown on an empty or null * name, as no such identifier can be constructed. */ public static String combine( String namespace, String name, String min, String max ) { StringBuffer result = new StringBuffer(32); if ( namespace != null && namespace.length() > 0 ) result.append(namespace).append(Separator.NAMESPACE); // postcondition: no namespace, no double colon if ( name != null && name.length() > 0 ) { result.append(name); } else { // gotta have a name throw new NullPointerException( "Missing identifier for definition" ); } if ( min != null && min.length() > 0 ) { // minimum version exists result.append(Separator.NAME).append(min).append(Separator.VERSION); if ( max != null && max.length() > 0 ) result.append(max); } else { // minimum version does not exist if ( max != null && max.length() > 0 ) result.append(Separator.NAME).append(Separator.VERSION).append(max); } return result.toString(); } /** * Maps the action associated with a state and char class. The following * actions were determined: * * * * * * * *
0no operation
1save character
2empty save into ns
3empty save into id
4empty save into vs
5empty save into id, save
*/ private static short actionmap2[][] = { { 3, 0, 1 }, { 3, 2, 5 }, { 3, 3, 1 }, { 4, 0, 1 } }; /** * Maps the new state from current state and character class. The * following character classes are distinguished: * * * * *
0EOS
1colon (:)
2other (*)
*/ private static short statemap2[][] = { { 8, 1, 0 }, { 3, 2, 3 }, { 8, 3, 2 }, { 8, 9, 3 } }; /** * Splits a fully-qualified definition identifier into separate * namespace, name and version. Certain extensions permit a spec * to distinguish between an empty namespace or version and a * null (wildcard match) namespace and version.

* * There is a subtle distinction between a null value and an * empty value for the namespace and version. A null value is * usually taken as a wildcard match. An empty string however * is an exact match of a definition without the namespace or * version.

* * In order to enable the DAX generation function to distinguish * these cases when specifying user input, the following convention * is supported, where * stands in for wild-card matches, and * (-) for a match of an empty element: * * * * * * * * * * * * *
INPUT NS ID VS
id * id *
::id (-) id *
::id: (-) id (-)
id: * id (-)
id:vs * id vs
n::id n id *
n::id:n id (-)
n::i:vn i v
::i:v (-) i v
* * @param fqdi is the fully-qualified definition identifier. * @return an array with 3 entries representing namespace, name * and version. Namespace and version may be empty or even null. */ public static String[] splitFQDI( String fqdi ) throws IllegalArgumentException { String[] result = new String[3]; result[0] = result[1] = result[2] = null; StringBuffer save = new StringBuffer(); short state = 0; int pos = 0; char ch; int chclass; do { // obtain next character and character class if ( pos < fqdi.length() ) { // regular char ch = fqdi.charAt(pos); chclass = ( ch == ':' ) ? 1 : 2; ++pos; } else { // EOS ch = Character.MIN_VALUE; chclass = 0; } // perform the action appropriate for state transition switch ( actionmap2[state][chclass] ) { case 0: // no-op break; case 5: // Vi+S result[1] = save.toString(); save = new StringBuffer(); // NO break on purpose case 1: // S save.append( ch ); break; case 2: // Vn result[0] = save.toString(); save = new StringBuffer(); break; case 3: // Vi result[1] = save.toString(); save = new StringBuffer(); break; case 4: // Vv result[2] = save.toString(); save = new StringBuffer(); break; } // perform state transition state = statemap2[state][chclass]; } while ( state < 8 ); if ( state == 9 || result[1] == null || result[1].trim().length() == 0 ) throw new IllegalArgumentException( "Malformed fully-qualified definition identifier" ); // POSTCONDITION: state == 8 return result; } /** * Maps the action associated with a state and a character class. * The actions are as follows: * * * * * * * * * * *
0no operation
1save character
2empty save into ns
3empty save into name
4empty save into vs
5empty save into vs, 4args
6empty save into max
7empty save into max, 4args
8empty save into name, save
*/ private static int actionmap[][] = { { 0, 0, 0, 1 }, // 0 { 3, 0, 0, 1 }, // 1 { 0, 2, 0, 8 }, // 2 { 0, 0, 0, 1 }, // 3 { 3, 3, 0, 1 }, // 4 { 4, 0, 5, 1 }, // 5 { 7, 0, 0, 1 } // 6 }; /** * Maps the state and character class to the follow-up state. The * final state 16 is a regular final state, and final state 17 is * the error final state. All other states are intermediary states.

* * Four character classes are distinguished: * * * * * *
0end of string (EOS)
1colon (:)
2comma (,)
3any other
*/ private static short statemap[][] = { { 17, 17, 17, 1 }, // 0 { 16, 2, 17, 1 }, // 1 { 17, 3, 17, 5 }, // 2 { 17, 17, 6, 4 }, // 3 { 16, 5, 17, 4 }, // 4 { 16, 17, 6, 5 }, // 5 { 16, 17, 17, 6 } // 6 }; /** * Splits a fully-qualified identifier into its components. Please note * that you must check the length of the result. If it contains three * elements, it is a regular FQDN. If it contains four results, it is * a tranformation reference range. Note though, if the version portion * is not specified, a 3 argument string will always be returned, even * if the context requires a 4 argument string. * * @param fqdn is the string to split into components. * @return a vector with three or four Strings, if it was parsable. *

    *
  1. namespace, may be null *
  2. name, never null *
  3. version for 3arg, or minimum version for 4arg, may be null *
  4. maximum version for 4arg, may be null *
* @exception IllegalArgumentException, if the identifier cannot * be parsed correctly. */ public static String[] split( String fqdn ) throws IllegalArgumentException { String namespace = null; String name = null; String version = null; String max = null; short state = 0; int pos = 0; boolean is4args = false; StringBuffer save = new StringBuffer(); char ch; int chclass; do { // obtain next character and character class if ( pos < fqdn.length() ) { // regular char ch = fqdn.charAt(pos); if ( ch == ':' ) chclass = 1; else if ( ch == ',' ) chclass = 2; else chclass = 3; ++pos; } else { // EOS ch = Character.MIN_VALUE; chclass = 0; } // perform the action appropriate for state transition switch ( actionmap[state][chclass] ) { case 0: // no-op break; case 8: if ( save.length() > 0 ) name = save.toString(); save = new StringBuffer(); // NO break on purpose case 1: // save save.append( ch ); break; case 2: // save(ns) if ( save.length() > 0 ) namespace = save.toString(); save = new StringBuffer(); break; case 3: // save(name) if ( save.length() > 0 ) name = save.toString(); save = new StringBuffer(); break; case 5: // save(version), 4args is4args = true; // NO break on purpose case 4: // save(version) if ( save.length() > 0 ) version = save.toString(); save = new StringBuffer(); break; case 7: // save(max), 4args is4args = true; // NO break on purpose case 6: // save(max) if ( save.length() > 0 ) max = save.toString(); save = new StringBuffer(); break; } // perform state transition state = statemap[state][chclass]; } while ( state < 16 ); if ( state == 17 || ( is4args && version == null && max == null ) ) throw new IllegalArgumentException( "Malformed fully-qualified definition identifier" ); // POSTCONDITION: state == 16 // assemble result String[] result = new String[ is4args ? 4 : 3 ]; result[0] = namespace; result[1] = name; result[2] = version; if ( is4args ) result[3] = max; return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/ProfileParserException.java0000644000175000017500000000265711757531137027411 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; /** * This class is used to signal errors while parsing profile strings * @see ProfileParser * * @author Gaurang Mehta * @author Jens-S. Vöckler * @version $Revision: 2079 $ */ public class ProfileParserException extends Exception { /** * Remembers the position that cause the exception to be thrown. */ private int m_position; public ProfileParserException( String msg, int position ) { super(msg); m_position = position; } public ProfileParserException( String msg, int position, Throwable cause ) { super(msg,cause); m_position = position; } /** * Obtains the position at which point the exception was thrown. * @return a column position into the string */ public int getPosition() { return this.m_position; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/Separator2Test.java0000644000175000017500000000406511757531137025632 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; /** * This is the test program for the Separator class. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see org.griphyn.vdl.classes.Definition */ public class Separator2Test { public static void x( String what, int len ) { String s = ( what == null ? "(null)" : ("\"" + what + "\"") ); System.out.print( s ); for ( int i=s.length(); i [" ); try { String[] x = Separator.splitFQDI(what); for ( int i=0; i 0 ) { for ( int i=0; inull
, if unparsable. */ public static Date parse( String stamp ) { // initialize the compiled expressions once if ( c_pattern == null ) { c_pattern = new Pattern[ c_expression.length ]; for ( int i=0; i 0 ) { boolean utc = ( Character.toUpperCase( m.group(9).charAt(0) ) == 'Z' ); if ( utc ) z = TimeZone.getTimeZone( "GMT+0" ); else z = TimeZone.getTimeZone( "GMT" + m.group(9) ); } c.setTimeZone(z); c.set( Calendar.YEAR, Integer.parseInt(m.group(1)) ); c.set( Calendar.MONTH, Integer.parseInt(m.group(2)) + (Calendar.JANUARY-1) ); c.set( Calendar.DAY_OF_MONTH, Integer.parseInt(m.group(3)) ); if ( m.group(4).length() > 0 ) { c.set( Calendar.HOUR_OF_DAY, Integer.parseInt(m.group(5)) ); c.set( Calendar.MINUTE, Integer.parseInt(m.group(6)) ); if ( m.group(7) != null && m.group(7).length() > 0 ) c.set( Calendar.SECOND, Integer.parseInt(m.group(7)) ); if ( m.group(8) != null && m.group(8).length() > 1 ) { String millis = m.group(8).substring(1); while ( millis.length() < 3 ) millis += "0"; millis = millis.substring(0,3); c.set( Calendar.MILLISECOND, Integer.parseInt(millis) ); } } return c.getTime(); } } // not found return null; } /** * Ignores any internal date format, and tries to show a complete * date/timp stamp of the current time in extended ISO 8601 format. * UTC time (Zulu time) or a local timezone will be used. A sample for * UTC output is 2002-04-23T02:49:58Z A sample for local zone * (CDT) is 2002-04-22T21:49:58-05:00 * * @param zuluTime returns a UTC formatted stamp, if true. Otherwise * the time will be formatted according to the local zone. * @return an ISO 8601 formatted date and time representation for the * current time in extended format without millisecond resolution * @see #iso8601( boolean, boolean, boolean, Date ) */ public static String iso8601( boolean zuluTime ) { return Currently.iso8601( zuluTime, true, false, new Date() ); } /** * Ignores any internal date format, and tries to show a complete * date/timp stamp in extended ISO 8601 format. UTC time (Zulu time) * or a local timezone will be used.

* * * * * * * * * * * *
zoneformatfractionexample
localbasicintegral20020523T140427-0500
UTCbasicintegral20020523190427Z
localextd.integral2002-05-23T14:04:27-05:00
UTCextd.integral2002-05-23T19:04:27Z
localbasicmillis20020523T140427.166-0500
UTCbasicmillis20020523190427.166Z
localextd.millis2002-05-23T14:04:27.166-05:00
UTCextd.millis2002-05-23T19:04:27.166Z

* * @param zuluTime returns a UTC formatted stamp, if true. Otherwise * the time will be formatted according to the local zone. Local time * should be prefixed with the 'T'. * @param extendedFormat will use the extended ISO 8601 format which * separates the different timestamp items. If false, the basic * format will be used. In UTC and basic format, the 'T' separator * will be omitted. * @param withMillis will put the millisecond extension into the timestamp. * If false, the time will be without millisecond fraction. The separator * is taken from {@link java.text.DecimalFormatSymbols#getMinusSign()}, * which usually is a period or a comma. * @param now is a time stamp as Date. * @return an ISO 8601 formatted date and time representation for * the given point in time. */ public static String iso8601( boolean zuluTime, boolean extendedFormat, boolean withMillis, Date now ) { Calendar c = Calendar.getInstance( zuluTime ? TimeZone.getTimeZone("UTC") : TimeZone.getDefault() ); c.setTime( now ); // set up formattting options DecimalFormat nf2 = new DecimalFormat("##00"); DecimalFormat nf3 = new DecimalFormat("###000"); DecimalFormat nf4 = new DecimalFormat("####0000"); DecimalFormatSymbols dfs = nf2.getDecimalFormatSymbols(); // allocate result string buffer int size = extendedFormat ? (zuluTime ? 25 : 30) : (zuluTime ? 21 : 25); if ( ! withMillis ) size -= 4; StringBuffer result = new StringBuffer(size); result.append( nf4.format(c.get(Calendar.YEAR)) ); if ( extendedFormat ) result.append('-'); // position 5 result.append( nf2.format(c.get(Calendar.MONTH)+1) ); if ( extendedFormat ) result.append('-'); // position 8 result.append( nf2.format(c.get(Calendar.DAY_OF_MONTH)) ); // generating UTC in basic format may leave out the 'T' separator if ( extendedFormat || ! zuluTime ) result.append('T'); // position 11 result.append( nf2.format(c.get(Calendar.HOUR_OF_DAY)) ); if ( extendedFormat ) result.append(':'); // position 14 result.append( nf2.format(c.get(Calendar.MINUTE)) ); if ( extendedFormat ) result.append(':'); // position 17 result.append( nf2.format(c.get(Calendar.SECOND)) ); if ( withMillis ) { // Though there is no explicit spec which allows a complete // timestamp with milliseconds, it is implied through two // levels, sigh. 5.3.4.2 allows decimal fractions with // time-only stamps that have a timezone. The intro of 5.4.2 // allows 5.3.1.3. result.append( dfs.getDecimalSeparator() ); // position 20 result.append( nf3.format(c.get(Calendar.MILLISECOND)) ); } if ( zuluTime ) { // this is easy result.append('Z'); } else { // time zone calculations int zone_offset = c.get(Calendar.ZONE_OFFSET)/1000; int save_offset = c.get(Calendar.DST_OFFSET)/1000; int diff = (zone_offset + save_offset) / 60; result.append( diff < 0 ? dfs.getMinusSign() : '+' ); // position 24 if ( diff < 0 ) diff = Math.abs(diff); result.append( nf2.format( diff / 60 ) ); if ( extendedFormat ) result.append( ':' ); result.append( nf2.format( diff % 60 ) ); } return result.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/FactoryException.java0000644000175000017500000001473611757531137026244 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; import edu.isi.pegasus.common.util.DynamicLoader; /** * The base exception class to notify of errors, while instantiating classes * via any of the factories. Each factory, should throw an exception that is * a subclass of this class. * * @author Karan Vahi * @author Jens-S. Vöckler * @author Gaurang Mehta */ public class FactoryException // method A: no need to change interface, obsfuscated use, though extends java.lang.RuntimeException //method B: needs API small change, but makes things clear. //extends java.lang.Exception { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Object"; /** * The name of the class that was trying to be instantiated when this * error occured, or some other signifier like module name. */ protected String mClassname; /** * Converts most of the common instantiation exceptions from the class * loader into an error message. Use for getting sensible error messages, * of the causes that are associated with a FactoryException. The index * in the messages starts from 0. * * @return the error message */ public String convertException(){ return convertException( mClassname, this, 0 ); } /** * Converts most of the common instantiation exceptions from the class * loader into an error message. Use for getting sensible error messages, * of the causes that are associated with a FactoryException. The index * in the messages starts from 0. * * @param index the index to start from. * * @return the error message. */ public String convertException( int index ){ return convertException( mClassname, this , index ); } /** * Converts most of the common instantiation exceptions from the class * loader into an error message. Use for getting sensible error messages, * of the causes that are associated with a FactoryException. * * @param classname the class that was trying to be loaded or some other * signifier. * @param e the FactoryException that is thrown. * @param index the index to start from. * * @return the error message. */ public static String convertException( String classname, Throwable e , int index){ Throwable prev = null; StringBuffer message = new StringBuffer(); int i = index; //append all the causes for(Throwable cause = e; cause != null ; cause = cause.getCause()){ message.append("\n [").append(Integer.toString(++i) ).append( "]: "); if ( cause instanceof FactoryException ){ message.append( cause.getMessage() ); classname = ((FactoryException)cause).getClassname(); } else if ( prev != null && prev instanceof FactoryException ){ //we can use the convert method message.append( DynamicLoader.convertExceptionToString( classname, cause ) ); } else{ //for all other exceptions just chain the message message.append( cause.getMessage() ); } //append just one elment of stack trace for each exception message.append( " at " ).append( cause.getStackTrace()[0] ); prev = cause; } return message.toString(); } /** * Constructs a FactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public FactoryException( String msg ) { super( msg ); mClassname = this.DEFAULT_NAME; } /** * Constructs a FactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public FactoryException(String msg, String classname) { super( msg ); mClassname = classname; } /** * Constructs a FactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public FactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a FactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public FactoryException(String msg, String classname, Throwable cause) { super(msg, cause); mClassname = classname; } /** * Returns the name of the class that was trying to be loaded when this * error occured, or some other signifier like a module name. * * @return the name of the class. */ public String getClassname(){ return this.mClassname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/util/ProfileParser.java0000644000175000017500000002173111757531137025524 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.util; import edu.isi.pegasus.planner.catalog.classes.Profiles; import java.util.List; import java.util.ArrayList; import java.util.Iterator; import edu.isi.pegasus.planner.classes.Profile; /** * Converts between the string version of a profile specification * and the parsed triples and back again. * * @author Gaurang Mehta * @author Jens-S. Vöckler */ public class ProfileParser { /** * Table to contain the state transition diagram for the parser. The * rows are defined as current states 0 through 7. The columns is the * current input character. The cell contains first the action to be * taken, followed by the new state to transition to: * *

   *      | EOS | adu |  ,  |  ;  |  :  |  \  |  "  |  =  |other|
   *      |  0  |  1  |  2  |  3  |  4  |  5  |  6  |  7  |  8  |
   * -----+-----+-----+-----+-----+-----+-----+-----+-----+-----+
   *   0  | -,F |Cn,0 | -,E1| -,E1| -,1 | -,E1| -,E1| -,E1| -,E1|
   *   1  | -,E2| -,E1| -,E1| -,E1| -,2 | -,E1| -,E1| -,E1| -,E1|
   *   2  | -,F |Ck,2 | -,E1| -,E1| -,E1| -,E1| -,E1| -,3 |Ck,E1|
   *   3  | -,E2|Cv,6 | -E1 | -,E1| -,E1| -,E1| -,4 | -,E1|Cv,6 |
   *   4  | -,E2|Cv,4 |Cv,4 |Cv,4 |Cv,4 | -,5 | -,7 |Cv,4 |Cv,4 |
   *   5  | -,E2|Cv,4 |Cv,4 |Cv,4 |Cv,4 |Cv,4 |Cv,4 |Cv,4 |Cv,4 |
   *   6  |A1,F |Cv,6 |A2,2 |A1,0 | -,E1| -,E1| -,E1| -,E1|Cv,6 |
   *   7  |A1,F | -,E1|A2,2 |A1,0 | -,E1| -,E1| -,E1| -,E1| -,E1|
   * -----+-----+-----+-----+-----+-----+-----+-----+-----+-----+
   *   F  |  8  | final state
   *   E1 |  9  | error1: illegal character in input
   *   E2 | 10  | error2: premature end of input
   * 
* * The state variable collects the new state for a given * state (rows) and input character set (column) identifier. */ private static final byte c_state[][] = { // E A , ; : \ " = O { 8, 0, 9, 9, 1, 9, 9, 9, 9 }, // 0: recognize ns { 10, 9, 9, 9, 2, 9, 9, 9, 9 }, // 1: found colon { 8, 2, 9, 9, 9, 9, 9, 3, 9 }, // 2: recognize key { 10, 6, 9, 9, 9, 9, 4, 9, 6 }, // 3: seen equals { 10, 4, 4, 4, 4, 5, 7, 4, 4 }, // 4: quoted value { 10, 4, 4, 4, 4, 4, 4, 4, 4 }, // 5: backslashed qv { 8, 6, 2, 0, 9, 9, 9, 9, 6 }, // 6: unquoted value { 8, 9, 2, 0, 9, 9, 9, 9, 9 } // 7: closed quote }; /** * There are six identified actions. * *
   *  -   | 0 | noop
   *  Cn  | 1 | append input character to namespace field
   *  Ck  | 2 | append input character to key field
   *  Cv  | 3 | append input character to value field
   *  A1  | 4 | create triple and flush all fields
   *  A2  | 5 | create triple and flush key and value only
   * 
* * The action variable collects the action to take for a * given state (rows) and input character set (column). */ private static final byte c_action[][] = { // E A , ; : \ " = O { 0, 1, 0, 0, 0, 0, 0, 0, 0 }, // 0: recognize ns { 0, 0, 0, 0, 0, 0, 0, 0, 0 }, // 1: found colon { 0, 2, 0, 0, 0, 0, 0, 0, 0 }, // 2: recognize key { 0, 3, 0, 0, 0, 0, 0, 0, 3 }, // 3: seen equals { 0, 3, 3, 3, 3, 0, 0, 3, 3 }, // 4: quoted value { 0, 3, 3, 3, 3, 3, 3, 3, 3 }, // 5: backslashed qv { 4, 3, 5, 4, 0, 0, 0, 0, 3 }, // 6: unquoted value { 4, 0, 5, 4, 0, 0, 0, 0, 0 } // 7: closed quote }; /** * Parses a given user profile specification into a map of maps. * * @param s is the input string to parse * @return a map of namespaces mapping to maps of key value pairs. * @throws ProfileParserException if the input cannot be recognized * @see #combine( List m ) */ public static List parse( String s ) throws ProfileParserException { char ch = '?'; List result = new ArrayList(); // sanity check if ( s == null ) return result; StringBuffer namespace = new StringBuffer(); StringBuffer key = new StringBuffer(); StringBuffer value = new StringBuffer(); int index = 0; byte charset, state = 0; while ( state < 8 ) { // // determine character class // switch ( (ch = ( index < s.length() ? s.charAt(index++) : '\0' )) ) { case '\0': charset = 0; break; case '_': charset = 1; break; case '.': charset = 1; break; case '@': charset = 1; break; case '-': charset = 1; break; case '+': charset = 1; break; case '/': charset = 1; break; case ',': charset = 2; break; case ';': charset = 3; break; case ':': charset = 4; break; case '\\': charset = 5; break; case '"': charset = 6; break; case '=': charset = 7; break; default: if ( Character.isLetter(ch) || Character.isDigit(ch) ) charset = 1; else charset = 8; break; } // // perform action // switch ( c_action[state][charset] ) { case 1: // collect namespace namespace.append(ch); break; case 2: // collect key key.append(ch); break; case 3: // collect value value.append(ch); break; case 4: // flush result.add( new Profile( namespace.toString().toLowerCase(), key.toString(), value.toString() ) ); namespace.delete( 0, namespace.length() ); key.delete( 0, key.length() ); value.delete( 0, value.length() ); break; case 5: // partial flush result.add( new Profile( namespace.toString(), key.toString(), value.toString() ) ); key.delete( 0, key.length() ); value.delete( 0, value.length() ); break; } // // progress state // state = c_state[state][charset]; } if ( state > 8 ) { switch ( state ) { case 9: throw new ProfileParserException( "Illegal character '" + ch + "'", index ); case 10: throw new ProfileParserException( "Premature end-of-string", index ); default: throw new ProfileParserException( "Unknown error", index ); } } return result; } /*** * Creates a profile string from the internal representation. * * @param l is a list of profiles * @return a string containing the representation. The string can be * empty (FIXME: should it be "null" or null?) for an empty list. * @see #parse( String s ) */ public static String combine( Profiles p ) { return combine( p.getProfiles() ); } /** * Creates a profile string from the internal representation. * * @param l is a list of profiles * @return a string containing the representation. The string can be * empty (FIXME: should it be "null" or null?) for an empty list. * @see #parse( String s ) */ public static String combine( List l ) { StringBuffer result = new StringBuffer(); // faster, shorter, less mem, retains ordering; alas, no minimal output boolean flag = false; String previous = "invalid namespace"; for ( Iterator i=l.iterator(); i.hasNext(); ) { Profile p = (Profile) i.next(); String ns = p.getProfileNamespace(); if ( ns.equals(previous) ) result.append(','); else { if ( flag ) result.append(';'); result.append(ns).append("::"); } result.append( p.getProfileKey() ).append('=').append('"'); // escape all dquote and backslash with backslash String value = p.getProfileValue(); for ( int k=0; k [" ); try { String[] x = Separator.split(what); for ( int i=0; i 0 ) { for ( int i=0; i implementingClassNameTable(){ if( mImplementingClassNameTable == null ){ mImplementingClassNameTable = new HashMap(3); mImplementingClassNameTable.put( CredentialHandler.TYPE.x509, X509_IMPLEMENTING_CLASS); mImplementingClassNameTable.put( CredentialHandler.TYPE.irods, IRODS_IMPLEMENTING_CLASS); mImplementingClassNameTable.put( CredentialHandler.TYPE.s3, S3_IMPLEMENTING_CLASS); mImplementingClassNameTable.put( CredentialHandler.TYPE.ssh, SSH_IMPLEMENTING_CLASS); } return mImplementingClassNameTable; } /** * A table that maps, Pegasus style keys to the names of the corresponding classes * implementing the CondorStyle interface. */ private static Map mImplementingClassNameTable; /** * A table that maps, Pegasus style keys to appropriate classes implementing the * CredentialHandler interface */ private Map mImplementingClassTable ; /** * A boolean indicating that the factory has been initialized. */ private boolean mInitialized; /** * Handle to the PegasusBag */ private PegasusBag mBag; /** * The default constructor. */ public CredentialHandlerFactory(){ mImplementingClassTable = new HashMap(3); mInitialized = false; } /** * Initializes the Factory. Loads all the implementations just once. * * @param bag the bag of initialization objects * * @throws CredentialHandlerFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public void initialize( PegasusBag bag ) throws CredentialHandlerFactoryException{ mBag = bag; //load all the implementations that correspond to the Pegasus style keys for( Iterator it = this.implementingClassNameTable().entrySet().iterator(); it.hasNext(); ){ Map.Entry entry = (Map.Entry) it.next(); CredentialHandler.TYPE type = (CredentialHandler.TYPE)entry.getKey(); String className= (String)entry.getValue(); //load via reflection. not required in this case though put( type, this.loadInstance( bag, className )); } //we have successfully loaded all implementations mInitialized = true; } /** * This method loads the appropriate implementing CondorStyle as specified * by the user at runtime. The CondorStyle is initialized and returned. * * @param type the credential type that needs to be loaded. * * @throws CredentialHandlerFactoryException that nests any error that * might occur during the instantiation of the implementation. */ public CredentialHandler loadInstance( CredentialHandler.TYPE type ) throws CredentialHandlerFactoryException{ //sanity checks first if( !mInitialized ){ throw new CredentialHandlerFactoryException( "Credential impelmentors needs to be initialized first before using" ); } //now just load from the implementing classes Object credentialHandler = this.get( type ); if ( credentialHandler == null ) { //then load the class named type via reflection and register CredentialHandler handler = this.loadInstance( mBag, type.toString() ); this.put( type, handler ); } return (CredentialHandler)credentialHandler; } /** * This method loads the appropriate CredentialHandler using reflection. * * * @param bag the bag of initialization objects * @param className the name of the implementing class. * * @return the instance of the class implementing this interface. * * @throws CredentialHandlerFactoryException that nests any error that * might occur during the instantiation of the implementation. * * @see #DEFAULT_PACKAGE_NAME */ private CredentialHandler loadInstance( PegasusBag bag, String className ) throws CredentialHandlerFactoryException{ //sanity check PegasusProperties properties = bag.getPegasusProperties(); if (properties == null) { throw new RuntimeException( "Invalid properties passed" ); } if (className == null) { throw new RuntimeException( "Invalid className specified" ); } //prepend the package name if classname is actually just a basename className = (className.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + className : //load directly className; //try loading the class dynamically CredentialHandler credential = null; try { DynamicLoader dl = new DynamicLoader( className ); credential = (CredentialHandler) dl.instantiate( new Object[0] ); //initialize the loaded condor style credential.initialize( bag ); } catch (Exception e) { throw new CredentialHandlerFactoryException( "Instantiating Credential Handler ", className, e); } return credential; } /** * Returns the implementation from the implementing class table. * * @param type the credential handler type * * @return implementation the class implementing that style, else null */ private Object get( CredentialHandler.TYPE type ){ return mImplementingClassTable.get( type ); } /** * Inserts an entry into the implementing class table. * * @param type the credential handler type * @param implementation the class implementing that style. */ private void put(CredentialHandler.TYPE type, CredentialHandler implementation){ mImplementingClassTable.put( type, implementation ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/credential/CredentialHandlerFactoryException.java0000644000175000017500000000663011757531137032664 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.credential; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Condor Style implementations. * * @author Karan Vahi * @version $Revision: 4784 $ */ public class CredentialHandlerFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Credential Implementor"; /** * Constructs a CondorStyleFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public CredentialHandlerFactoryException( String msg ) { super( msg ); mClassname = CredentialHandlerFactoryException.DEFAULT_NAME; } /** * Constructs a CondorStyleFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public CredentialHandlerFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a CondorStyleFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public CredentialHandlerFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = this.DEFAULT_NAME; } /** * Constructs a CondorStyleFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public CredentialHandlerFactoryException( String msg, String classname, Throwable cause) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/credential/CredentialHandler.java0000644000175000017500000000442511757531137027455 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.credential; import edu.isi.pegasus.planner.classes.PegasusBag; /** * The credential interface that defines the credentials that can be associated * with jobs. * * @author Karan Vahi */ public interface CredentialHandler { /** * The version of the API being used. */ public static final String VERSION = "1.0"; //type of credentials associated /** * An enumeration of valid types of credentials that are supported. */ public static enum TYPE { x509, s3, irods, ssh }; /** * Initializes the credential implementation. Implementations require * access to the logger, properties and the SiteCatalog Store. * * @param bag the bag of Pegasus objects. */ public void initialize( PegasusBag bag ); /** * Returns the path to the credential on the submit host. * * @return */ public String getPath(); /** * Returns the path to the credential for a particular site handle * * @param site the site catalog entry object. * * @return the path to the credential */ public String getPath( String site ); /** * Returns the name of the environment variable that needs to be set * for the job associated with the credential. * * @return the name of the environment variable. */ public String getEnvironmentVariable(); /** * Returns the description for the implementing handler * * @return description */ public String getDescription(); /** * returns the basename of the credential file name * @return */ public String getBaseName(); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/credential/impl/0000755000175000017500000000000011757531667024206 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/credential/impl/Ssh.java0000644000175000017500000000566411757531137025611 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.credential.impl; import java.io.File; import java.util.Map; import edu.isi.pegasus.common.credential.CredentialHandler; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; /** * A convenience class that allows us to determine the path to the user ssh private key file. * * @author Mats Rynge * @author Karan Vahi * * @version $Revision: 4817 $ */ public class Ssh extends Abstract implements CredentialHandler { /** * The name of the environment variable that specifies the path to the * s3cfg file. */ public static final String SSH_PRIVATE_KEY_VARIABLE = "SSH_PRIVATE_KEY"; /** * The description */ private static final String DESCRIPTION = "SSH private key Credential Handler"; /** * The default constructor. */ public Ssh(){ super(); } /** * Returns the path to ssh private key. The key has to be specifically listed in the environment * @param site the site handle * * @return the path to s3cfg. */ public String getPath( String site ){ SiteCatalogEntry siteEntry = mSiteStore.lookup( site ); Map envs = System.getenv(); // check if one is specified in site catalog entry String path = ( siteEntry == null )? null :siteEntry.getEnvironmentVariable( Ssh.SSH_PRIVATE_KEY_VARIABLE); if( path == null){ //check if specified in the environment if( envs.containsKey( Ssh.SSH_PRIVATE_KEY_VARIABLE ) ){ path = envs.get( Ssh.SSH_PRIVATE_KEY_VARIABLE ); } } return path; } /** * returns the basename of the path to the local credential */ public String getBaseName() { File path = new File(this.getPath()); return path.getName(); } /** * Returns the name of the environment variable that needs to be set * for the job associated with the credential. * * @return the name of the environment variable. */ public String getEnvironmentVariable(){ return Ssh.SSH_PRIVATE_KEY_VARIABLE; } /** * Returns the description for the implementing handler * * @return description */ public String getDescription(){ return Ssh.DESCRIPTION; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/credential/impl/Abstract.java0000644000175000017500000000314411757531137026606 0ustar ryngerynge/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edu.isi.pegasus.common.credential.impl; import java.io.File; import edu.isi.pegasus.common.credential.CredentialHandler; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.planner.catalog.site.classes.SiteStore; import edu.isi.pegasus.planner.classes.PegasusBag; import edu.isi.pegasus.planner.common.PegasusProperties; /** * An abstract base class to be used by other CredentialHandler implementations. * * @author Karan Vahi * @version $Revision: 4817 $ */ public abstract class Abstract implements CredentialHandler { /** * The object holding all the properties pertaining to Pegasus. */ protected PegasusProperties mProps; /** * The handle to the Site Catalog Store. */ protected SiteStore mSiteStore; /** * A handle to the logging object. */ protected LogManager mLogger; /** * The default constructor. */ public Abstract() { } /** * Initializes the credential implementation. Implementations require * access to the logger, properties and the SiteCatalog Store. * * @param bag the bag of Pegasus objects. */ public void initialize( PegasusBag bag ){ mProps = bag.getPegasusProperties(); mSiteStore = bag.getHandleToSiteStore(); mLogger = bag.getLogger(); } /** * Returns the path to the credential on the submit host. * * @return */ public String getPath(){ return this.getPath( "local" ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/credential/impl/Proxy.java0000644000175000017500000000721611757531137026170 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.credential.impl; import edu.isi.pegasus.common.credential.CredentialHandler; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; import org.globus.common.CoGProperties; import java.io.File; import java.util.Map; /** * A convenice class that allows us to determine the path to the user proxy. * * @author Karan Vahi * @version $Revision: 4817 $ */ public class Proxy extends Abstract implements CredentialHandler{ /** * The name of the environment variable that specifies the path to the * proxy. */ public static final String X509_USER_PROXY_KEY = "X509_USER_PROXY"; /** * The description. */ private static final String DESCRIPTION = "X509 Proxy Handler"; /** * The default constructor. */ public Proxy(){ super(); } /** * Returns the path to user proxy. The order of preference is as follows * * - If a proxy is specified in the site catalog entry that is used * - Else the one pointed to by the environment variable X509_USER_PROXY * - Else the default path to the proxy in /tmp is created as determined by * CoGProperties.getDefault().getProxyFile() * * @param site the site catalog entry object. * * @return the path to user proxy. */ public String getPath( String site ){ SiteCatalogEntry siteEntry = mSiteStore.lookup( site ); //check if one is specified in site catalog entry String proxy = ( siteEntry == null )? null :siteEntry.getEnvironmentVariable( Proxy.X509_USER_PROXY_KEY); if( proxy == null){ //check if X509_USER_PROXY is specified in the environment Map envs = System.getenv(); if( envs.containsKey( Proxy.X509_USER_PROXY_KEY ) ){ proxy = envs.get( Proxy.X509_USER_PROXY_KEY ); } } if( proxy == null ){ //construct default path to user proxy in /tmp proxy = CoGProperties.getDefault().getProxyFile(); } //overload from the properties file /* ENV env = new ENV(); env.checkKeyInNS( mProps,"local" ); proxy = env.containsKey( ENV.X509_USER_PROXY_KEY )? (String)env.get( ENV.X509_USER_PROXY_KEY ): proxy; */ return proxy; } /** * returns the basename of the path to the local credential */ public String getBaseName() { File path = new File(this.getPath()); return path.getName(); } /** * Returns the name of the environment variable that needs to be set * for the job associated with the credential. * * @return the name of the environment variable. */ public String getEnvironmentVariable(){ return Proxy.X509_USER_PROXY_KEY; } /** * Returns the description for the implementing handler * * @return description */ public String getDescription(){ return Proxy.DESCRIPTION; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/credential/impl/S3CFG.java0000644000175000017500000000631111757531137025647 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.credential.impl; import edu.isi.pegasus.common.credential.CredentialHandler; import java.io.File; import java.util.Map; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; /** * A convenience class that allows us to determine the path to the user s3cfg file. * * @author Mats Rynge * @author Karan Vahi * * @version $Revision: 4929 $ */ public class S3CFG extends Abstract implements CredentialHandler { /** * The name of the environment variable that specifies the path to the * s3cfg file. */ public static final String S3CFG_FILE_VARIABLE = "S3CFG"; /** * The description */ private static final String DESCRIPTION = "S3 Conf File Credential Handler"; /** * The default constructor. */ public S3CFG(){ super(); } /** * Returns the path to s3cfg. The order of preference is as follows * * - If a s3cfg is specified in the site catalog entry that is used * - Else the one pointed to by the environment variable S3Cfg * - Else the default path to the ~/.s3cfg * * @param site the site handle * * @return the path to s3cfg. */ public String getPath( String site ){ SiteCatalogEntry siteEntry = mSiteStore.lookup( site ); Map envs = System.getenv(); // check if one is specified in site catalog entry String path = ( siteEntry == null )? null :siteEntry.getEnvironmentVariable( S3CFG.S3CFG_FILE_VARIABLE ); if( path == null){ //check if S3Cfg is specified in the environment if( envs.containsKey( S3CFG.S3CFG_FILE_VARIABLE ) ){ path = envs.get( S3CFG.S3CFG_FILE_VARIABLE ); } } if (path == null) { // default location path = envs.get("HOME") + "/.s3cfg"; } return path; } /** * returns the basename of the path to the local credential */ public String getBaseName() { File path = new File(this.getPath()); return path.getName(); } /** * Returns the name of the environment variable that needs to be set * for the job associated with the credential. * * @return the name of the environment variable. */ public String getEnvironmentVariable(){ return S3CFG.S3CFG_FILE_VARIABLE; } /** * Returns the description for the implementing handler * * @return description */ public String getDescription(){ return S3CFG.DESCRIPTION; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/credential/impl/Irods.java0000644000175000017500000000576511757531137026136 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.credential.impl; import edu.isi.pegasus.common.credential.CredentialHandler; import java.io.File; import java.util.Map; import edu.isi.pegasus.planner.catalog.site.classes.SiteCatalogEntry; /** * A convenience class that allows us to determine the path to the user irodsEnvFile file. * * @author Mats Rynge * @version $Revision: 4929 $ */ public class Irods extends Abstract implements CredentialHandler{ /** * The name of the environment variable that specifies the path to the * s3cfg file. */ public static final String IRODSENVFILE = "irodsEnvFile"; /** * The description. */ public static final String DESCRIPTION = "IRODS Credentials Handler"; /** * The default constructor. */ public Irods(){ super(); } /** * Returns the path to irodsEnv. The order of preference is as follows * * - If a irods is specified in the local catalog entry * - Else the one pointed to by the environment variable S3CFG * * @param site the site handle * * @return the path to s3cfg. */ public String getPath( String site ){ SiteCatalogEntry siteEntry = mSiteStore.lookup( site ); Map envs = System.getenv(); // check if one is specified in site catalog entry String path = ( siteEntry == null )? null :siteEntry.getEnvironmentVariable( Irods.IRODSENVFILE ); if( path == null){ //check if irodsEnvFile is specified in the environment if( envs.containsKey( Irods.IRODSENVFILE ) ){ path = envs.get( Irods.IRODSENVFILE ); } } return path; } /** * returns the basename of the path to the local credential */ public String getBaseName() { File path = new File(this.getPath()); return path.getName(); } /** * Returns the name of the environment variable that needs to be set * for the job associated with the credential. * * @return the name of the environment variable. */ public String getEnvironmentVariable(){ return Irods.IRODSENVFILE; } /** * Returns the description for the implementing handler * * @return description */ public String getDescription(){ return Irods.DESCRIPTION; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/0000755000175000017500000000000011757531667022561 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/LoggingKeys.java0000644000175000017500000001322111757531137025635 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging; /** * Some predifined logging keys to be used for logging. * * @author Karan Vahi * @author gmehta * * @version $Revision: 1010 $ */ public class LoggingKeys { public static final String EVENT_ID_KEY = "eventId"; public static final String PORTFOLIO_ID = "portfolio.id"; public static final String REQUEST_ID = "request.id"; public static final String DAX_ID = "dax.id"; public static final String DAG_ID = "dag.id"; public static final String JOB_NUMBER = "job.id"; public static final String JOB_ID = "job.id"; public static final String SEED_ID = "seed.id"; public static final String TEMPLATE_ID = "template.id"; public static final String SEED_NAME = "seed.name"; public static final String EVENT_QUERY_PROCESSCATALOG = "event.query.processcatalog"; public static final String EVENT_QUERY_DATACATALOG = "event.query.datacatalog"; public static final String EVENT_ENSEMBLE_RANKING = "event.ensemble.ranking"; public static final String EVENT_ENSEMBLE_PLANNING = "event.ensemble.planning"; public static final String EVENT_ENSEMBLE_WG = "event.ensemble.wings"; public static final String EVENT_ENSEMBLE_EXECUTE = "event.ensemble.workflow.execute"; public static final String EVENT_PEGASUS_RANKING = "event.ranking"; public static final String EVENT_PEGASUS_RANKING_RETRIEVE_DAX = "event.ranking.retrive.dax"; public static final String EVENT_PEGASUS_REFINEMENT = "event.pegasus.refinement"; public static final String EVENT_PEGASUS_AUTHENTICATION = "event.pegasus.authenticate"; public static final String EVENT_PEGASUS_PLAN = "event.pegasus.plan"; public static final String EVENT_PEGASUS_REDUCE = "event.pegasus.reduce"; public static final String EVENT_PEGASUS_SITESELECTION = "event.pegasus.siteselection"; public static final String EVENT_PEGASUS_ADD_TRANSFER_NODES = "event.pegasus.generate.transfer-nodes"; public static final String EVENT_PEGASUS_CLUSTER = "event.pegasus.cluster"; public static final String EVENT_PEGASUS_PARTITION = "event.pegasus.partition"; public static final String EVENT_PEGASUS_GENERATE_CLEANUP = "event.pegasus.generate.cleanup-nodes"; public static final String EVENT_PEGASUS_GENERATE_CLEANUP_WF = "event.pegasus.generate.cleanup-wf"; public static final String EVENT_PEGASUS_GENERATE_WORKDIR = "event.pegasus.generate.workdir-nodes"; public static final String EVENT_PEGASUS_CODE_GENERATION = "event.pegasus.code.generation"; public static final String EVENT_PEGASUS_LOAD_TRANSIENT_CACHE = "event.pegasus.load.cache"; public static final String EVENT_PEGASUS_PARSE_SITE_CATALOG = "event.pegasus.parse.site-catalog"; public static final String EVENT_PEGASUS_PARSE_DAX = "event.pegasus.parse.dax"; public static final String EVENT_PEGASUS_PARSE_PDAX = "event.pegasus.parse.pdax"; public static final String EVENT_WORKFLOW_JOB_STATUS = "event.workflow.job.status"; public static final String QUERY_NUMBER = "query.number"; public static final String QUERY_NAME = "query.name"; public static final String QUERY_INPUT = "query.input"; public static final String QUERY_OUTPUT = "query.output"; public static final String QUERY_ID = "query.id"; public static final String QUERY_ARGUMENTS = "query.arguments"; public static final String QUERY_RESPONSE = "query.response"; public static final String FILE_OUTPUT_NAME = "file.output.name"; public static final String FILE_OUTPUT_CONTENTS = "file.output.contents"; public static final String FILE_PRIORTY = "file.priorty"; public static final String FILE_TYPE = "file.type"; public static final String TIME_START = "time.start"; public static final String TIME_END = "time.end"; public static final String SYSTEM_HOSTNAME = "system.hostname"; public static final String SYSTEM_HOSTADDR = "system.hostaddr"; public static final String SYSTEM_OS = "system.os"; public static final String SYSTEM_ARCHITECTURE = "system.architecture"; public static final String SYSTEM_NODENAME = "system.nodename"; public static final String SYSTEM_NUMBEROFPROCESSORS = "system.numberOfProcessors"; public static final String JOB_EXITTCODE = "job.exittcode"; public static final String JOB_ARGUMENTS = "job.arguments"; public static final String JOB_ENVIRONMENTVARIABLE = "job.environmentVariable"; public static final String JOB_RESOURCE_INFORMATION = "job.resource.information"; public static final String PERFMETRIC_CPU_UTILIZATION = "perfmetric.cpu.utilization"; public static final String PERFMETRIC_MEMORY_UTILIZATION = "perfmetric.memory.utilization"; public static final String PERFMETRIC_NETWORK_BANDWIDTH = "perfmetric.network.bandwidth"; public static final String PERFMETRIC_TIME_DURATION = "perfmetric.time.duration"; } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/LogFormatter.java0000644000175000017500000001054111757531137026022 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging; import java.util.Collection; import java.util.Map; /** * The interface that defines how the messages need to be formatted for logging * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2003 $ */ public interface LogFormatter { /** * Sets the program name for the software whose log statement are logged. * * @param name */ public void setProgramName( String name ); /** * Returns the program name for the software whose log statement are logged. * * @param name * * @return name of the program */ public String getProgramName( String name ); /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param entityName the primary entity that is associated with the event e.g. workflow * @param entityID the id of that entity. */ public void addEvent( String name, String entityName, String entityID ); /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param map Map indexed by entity name . The values is corresponding * EntityID */ public void addEvent( String name, Map map ); /** * Pop the event on top of the internal stack. * * @return event on top , else null */ public Event popEvent(); /** * Returns the name of event that is currently associated with the log messages * and is on the top of the stack * * @return name of the event. */ public String getEventName( ); /** * Creates the start message for the event on top of the internal stack * * @return start event message */ public String getStartEventMessage(); /** * Creates the end message for the event on top of the stack. * * @return end event message */ public String getEndEventMessage(); /** * Add to the log message with just a value. * * @param value * * @return self-reference */ public LogFormatter add( String value ); /** * Add to the log message. * * @param key * @param value * * @return Self-reference, so calls can be chained */ public LogFormatter add( String key, String value ); /** * Creates a log message with the contents of the internal log buffer. * * @return log message */ public String createLogMessage(); /** * Creates a log message with the contents of the internal log buffer. * It then resets the buffer before returning the log message * * @return the log message */ public String createLogMessageAndReset(); /** * Creates a log message that connects the parent entities with the * children. For e.g. can we use to create the log messages connecting the * jobs with the workflow they are part of. * * @param parentType the type of parent entity * @param parentID the id of the parent entity * @param childIdType the type of children entities * @param childIDs Collection of children id's * * @return entity hierarchy message. */ public String createEntityHierarchyMessage( String parentType, String parentID, String childIdType, Collection childIDs ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/format/0000755000175000017500000000000011757531667024051 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/format/AbstractLogFormatter.java0000644000175000017500000001230011757531137030771 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging.format; import edu.isi.pegasus.common.logging.*; import java.util.Collection; import java.util.Stack; /** * The abstract formatter that implements all of the functions except * the addEvent function * * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2003 $ */ public abstract class AbstractLogFormatter implements LogFormatter { /** * The default key to use for logging messages */ private static String DEFAULT_KEY = "msg"; /** * The name of the program. */ protected String mProgram; /** * The Stack of event objects maintained internally */ protected Stack mStack; /** * The default constructor. */ public AbstractLogFormatter(){ mStack = new Stack(); } /** * Sets the program name for the software whose log statement are logged. * * @param name */ public void setProgramName( String name ){ mProgram = name; } /** * Returns the program name for the software whose log statement are logged. * * @param name * * @return name of the program */ public String getProgramName( String name ){ return mProgram; } /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param entityName the primary entity that is associated with the event e.g. workflow * @param entityID the id of that entity. */ public abstract void addEvent( String name, String entityName, String entityID ); /** * Pop the event on top of the internal stack. * * @return event on top , else null */ public Event popEvent(){ return mStack.pop(); } /** * Returns the name of event that is currently associated with the log messages * and is on the top of the stack * * @return name of the event. */ public String getEventName( ){ return mStack.peek().getEventName(); } /** * Creates the start message for the event on top of the internal stack * * @return start event message */ public String getStartEventMessage(){ return mStack.peek().getStartEventMessage(); } /** * Creates the end message for the event on top of the stack. * * @return end event message */ public String getEndEventMessage(){ return mStack.peek().getEndEventMessage(); } /** * Add to the log message with just a value. * * @param value * * @return self-reference */ public LogFormatter add( String value ){ return this.add( AbstractLogFormatter.DEFAULT_KEY, value ); } /** * Add to the log message for the event on the top. * * @param key * @param value * * @return Self-reference, so calls can be chained */ public LogFormatter add( String key, String value ){ mStack.peek().add( key, value ); return this; } /** * Creates a log message with the contents of the internal log buffer. * * @return the log message */ public String createLogMessage(){ return mStack.peek().createLogMessage(); } /** * Creates a log message with the contents of the internal log buffer. * It then resets the buffer before returning the log message * * @return log message. */ public String createLogMessageAndReset(){ return mStack.peek().createLogMessageAndReset(); } /** * Creates a log message that connects the parent entities with the * children. For e.g. can we use to create the log messages connecting the * jobs with the workflow they are part of. * * @param parentType the type of parent entity * @param parentID the id of the parent entity * @param childIdType the type of children entities * @param childIDs Collection of children id's * * @return the entity hierarchy message */ public String createEntityHierarchyMessage( String parentType, String parentID, String childIdType, Collection childIDs ){ return mStack.peek().createEntityHierarchyMessage(parentType, parentID, childIdType, childIDs); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/format/Simple.java0000644000175000017500000000457311757531137026146 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging.format; import edu.isi.pegasus.common.logging.*; import java.util.Map; /** * This formatter formats the messages in the simple format. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2003 $ */ public class Simple extends AbstractLogFormatter { /** * The default constructor. */ public Simple(){ super(); } /** * Add to the log message with just a value. * * @param value * * @return self-reference */ public LogFormatter add( String value ){ return this.add( "", value ); } /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param entityName the primary entity that is associated with the event e.g. workflow * @param entityID the id of that entity. */ public void addEvent( String name, String entityName, String entityID ){ Event e = new SimpleEvent(); e.setProgramName( mProgram ); e.setEvent( name, entityName, entityID ); mStack.addElement( e ); return; } /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param map Map indexed by entity name . The values is corresponding * EntityID */ public void addEvent( String name, Map map ){ Event e = new NetloggerEvent(); e.setProgramName( mProgram ); e.setEvent( name, map ); mStack.addElement( e ); return; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/format/Netlogger.java0000644000175000017500000000425411757531137026637 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging.format; import edu.isi.pegasus.common.logging.*; import java.util.Map; /** * This formatter formats the messages in the netlogger format. * * @author Karan Vahi * @author Gaurang Mehta * * @version $Revision: 2003 $ */ public class Netlogger extends AbstractLogFormatter { /** * The default constructor. */ public Netlogger(){ super(); } /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param entityName the primary entity that is associated with the event e.g. workflow * @param entityID the id of that entity. */ public void addEvent( String name, String entityName, String entityID ){ Event e = new NetloggerEvent(); e.setProgramName( mProgram ); e.setEvent( name, entityName, entityID ); mStack.addElement( e ); return; } /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param map Map indexed by entity name . The values is corresponding * EntityID */ public void addEvent( String name, Map map ){ Event e = new NetloggerEvent(); e.setProgramName( mProgram ); e.setEvent( name, map ); mStack.addElement( e ); return; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/format/NetloggerEvent.java0000644000175000017500000001235711757531137027644 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging.format; import edu.isi.pegasus.common.logging.*; import edu.isi.ikcap.workflows.util.logging.LogEvent; import edu.isi.ikcap.workflows.util.logging.EventLogMessage; import java.util.Collection; import java.util.List; import java.util.Map; /** * The netlogger event. * * @author Karan Vahi * @version $Revision: 2003 $ */ public class NetloggerEvent implements Event{ /** * The name of the program. */ private String mProgram; /** * The current event object. */ private LogEvent mLogEvent; /** * The current log event message. */ private EventLogMessage mMessage; /** * The default constructor. */ public NetloggerEvent(){ } /** * Sets the program name for the software whose log statement are logged. * * @param name */ public void setProgramName( String name ){ mProgram = name; } /** * Returns the program name for the software whose log statement are logged. * * @param name * * @return the name of the program */ public String getProgramName( String name ){ return mProgram; } /** * Set the event that is to be associated with the log messages. * * @param name the name of the event to be associated * @param entityName the primary entity that is associated with the event e.g. workflow * @param entityID the id of that entity. */ public void setEvent( String name, String entityName, String entityID ){ mLogEvent = new LogEvent( name, mProgram, entityName, entityID ); reset(); } /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param map Map of Entity Names with the entity identifiers. */ public void setEvent( String name, Map map ){ mLogEvent = new LogEvent( name, mProgram, map ); reset(); } /** * Returns the name of event that is currently associated with the log messages. * * @return the event to be associated */ public String getEventName( ){ return null; } /** * Creates the start message for the event. * * @return start event message */ public String getStartEventMessage(){ return mLogEvent.createStartLogMsg().toString(); } /** * Creates the end message for the event. * * @return end event message */ public String getEndEventMessage(){ return mLogEvent.createEndLogMsg().toString(); } /** * Reset the internal log message buffer associated with the event * */ public void reset(){ mMessage = mLogEvent.createLogMsg(); } /** * Add to the log message. * * @param key * @param value * * @return Self-reference, so calls can be chained */ public Event add( String key, String value ){ mMessage = mMessage.addWQ( key, value ); return this; } /** * Creates a log message with the contents of the internal log buffer. * * @return the log message */ public String createLogMessage(){ return mMessage.toString(); } /** * Creates a log message with the contents of the internal log buffer. * It then resets the buffer before returning the log message * * @return the log message */ public String createLogMessageAndReset(){ String result = mMessage.toString(); this.reset(); return result; } /** * Creates a log message that connects the parent entities with the * children. For e.g. can we use to create the log messages connecting the * jobs with the workflow they are part of. * * @param parentType the type of parent entity * @param parentID the id of the parent entity * @param childIdType the type of children entities * @param childIDs Collection of children id's * * @return entity hierarch message. */ public String createEntityHierarchyMessage( String parentType, String parentID, String childIdType, Collection childIDs ){ return LogEvent.createIdHierarchyLogMsg( parentType, parentID, childIdType, childIDs.iterator() ).toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/format/SimpleEvent.java0000644000175000017500000001516411757531137027146 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging.format; import edu.isi.pegasus.common.logging.*; import java.util.Collection; import java.util.List; import java.util.Map; import edu.isi.pegasus.common.util.Currently; /** * A Simple LogEvent implementation that is back by a StringBuffer. * * * @author Karan Vahi * @version $Revision: 2079 $ */ public class SimpleEvent implements Event{ /** * This is used to format the time stamp. */ //private static final Currently DATE_FORMATTER = new Currently( "yyyy.MM.dd HH:mm:ss.SSS zzz: " ); /** * The name of the program. */ private String mProgram; /** * The buffer that stores information about the event */ private StringBuffer mEventBuffer; /** * The buffer that stores information about the log message */ private StringBuffer mLogBuffer; /** * The default constructor. */ public SimpleEvent(){ reset(); } /** * Sets the program name for the software whose log statement are logged. * * @param name */ public void setProgramName( String name ){ mProgram = name; } /** * Returns the program name for the software whose log statement are logged. * * @param name * * @return program name */ public String getProgramName( String name ){ return mProgram; } /** * Set the event that is to be associated with the log messages. * * @param name the name of the event to be associated * @param entityName the primary entity that is associated with the event e.g. workflow * @param entityID the id of that entity. */ public void setEvent( String name, String entityName, String entityID ){ mEventBuffer = new StringBuffer(); mEventBuffer.append( name ).append( " " ). append( entityName ).append( " " ). append( entityID ).append( " " ); } /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param map Map of Entity Names with the entity identifiers. */ public void setEvent( String name, Map map ){ mEventBuffer = new StringBuffer(); mEventBuffer.append( name ).append( " " ); for( String key : map.keySet() ){ mEventBuffer.append( key ).append( " " ). append( map.get(key) ).append( " " ); } } /** * Returns the name of event that is currently associated with the log messages. * * @return the event to be associated */ public String getEventName( ){ return null; } /** * Creates the start message for the event. * * @return start event message */ public String getStartEventMessage(){ StringBuffer message = new StringBuffer(); message./*append( DATE_FORMATTER.now() ).append( " " ).*/ append( mEventBuffer.toString() ). append( " - STARTED "); return message.toString(); } /** * Creates the end message for the event. * * @return end event message */ public String getEndEventMessage(){ StringBuffer message = new StringBuffer(); message./*append( DATE_FORMATTER.now() ).append( " " ).*/ append( mEventBuffer ). append( " - FINISHED "); return message.toString(); } /** * Reset the internal log message buffer associated with the event * */ public void reset(){ mEventBuffer = new StringBuffer(); mLogBuffer = new StringBuffer(); } /** * Add to the log message. * * @param key * @param value * * @return Self-reference, so calls can be chained */ public Event add( String key, String value ){ mLogBuffer.append( key ).append( " " ).append( value ).append( " " ); return this; } /** * Creates a log message with the contents of the internal log buffer. * * @return the log message */ public String createLogMessage(){ StringBuffer message = new StringBuffer(); return message./*append( DATE_FORMATTER.now() ).append( " " )*/append( mLogBuffer ).toString(); } /** * Creates a log message with the contents of the internal log buffer. * It then resets the buffer before returning the log message * * @return the log message */ public String createLogMessageAndReset(){ String result = this.createLogMessage(); mLogBuffer = new StringBuffer(); return result; } /** * Creates a log message that connects the parent entities with the * children. For e.g. can we use to create the log messages connecting the * jobs with the workflow they are part of. * * @param parentType the type of parent entity * @param parentID the id of the parent entity * @param childIdType the type of children entities * @param childIDs Collection of children id's * @return entity hierarchy message. */ public String createEntityHierarchyMessage( String parentType, String parentID, String childIdType, Collection childIDs ){ StringBuffer result = new StringBuffer(); //result.append( DATE_FORMATTER.now() ).append( " " ); result.append( parentType ).append( "<" ).append( parentID ).append( ">" ). append( " -> ").append( childIdType ).append( "<" ); for( String child: childIDs ){ result.append( child ).append( "," ); } result.append( ">" ); return result.toString(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/logger/0000755000175000017500000000000011757531667024040 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/logger/Log4j.java0000644000175000017500000002513411757531137025657 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging.logger; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogFormatter; import java.util.Collection; import org.apache.log4j.Logger; import org.apache.log4j.Level; import org.apache.log4j.Appender; import org.apache.log4j.PropertyConfigurator; import org.apache.log4j.ConsoleAppender; import org.apache.log4j.PatternLayout; import java.util.Enumeration; import java.util.Properties; /** * A Log4j implementation of the LogManager interface. Using this allows us * us log messages using Log4j * * @author Karan Vahi * @version $Revision: 2567 $ */ public class Log4j extends LogManager{ /** * The property that specifies the path to the log4j properties file. */ private static final String LOG4J_CONF_PROPERTY = "log4j.conf"; //level constants that loosely match Log4J and are used //to generate the appropriate mask values. /** * The handle to a log4j logger object. */ private Logger mLogger; /** * Keeps track of log4j's root logger as singleton. */ private static Logger mRoot; /** * Initializes the root logger when this class is loaded. */ static { if ( (mRoot = Logger.getRootLogger()) != null ) { //get hold of all appenders and override the console appender for ( Enumeration e = mRoot.getAllAppenders(); e.hasMoreElements(); ){ Appender a = ( Appender )e.nextElement(); if( a instanceof ConsoleAppender ){ //set the layout of the console appender //this can be overriden by the log4j.properties file a.setLayout( new PatternLayout("%d{yyyy-MM-dd HH:mm:ss.SSS} %-5p [%c{1}] %m%n") ); } } mRoot.setLevel( Level.INFO ); mRoot.debug( "starting" ); } } /** * The properties passed at runtime */ private Properties mProperties; /** * The constructor. */ public Log4j(){ //configure properties through log4j.properties file mLogger = Logger.getLogger( "pegasus" ); } /** * Sets the log formatter to use for formatting the messages. * * @param formatter the formatter to use. * @param properties properties that the underlying implementations understand */ public void initialize( LogFormatter formatter, Properties properties ){ mLogFormatter = formatter; mProperties = properties; //set formatter to pegasus always for time being mLogFormatter.setProgramName( "pegasus" ); //specify the path to the log4j properties file if specified. String conf = properties.getProperty( Log4j.LOG4J_CONF_PROPERTY); if( conf != null ){ PropertyConfigurator.configure( conf ) ; } } /** * Log a message that connects the parent entities with the * children. For e.g. can we use to create the log messages connecting the * jobs with the workflow they are part of. They are by default logged * to INFO level * * @param parentType the type of parent entity * @param parentID the id of the parent entity * @param childIDType the type of children entities * @param childIDs Collection of children id's * */ public void logEntityHierarchyMessage( String parentType, String parentID, String childIDType, Collection childIDs ){ this.logEntityHierarchyMessage( parentType, parentID, childIDType, childIDs, LogManager.INFO_MESSAGE_LEVEL ); } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. * * @param level the level to which the debug level needs to be set to. */ public void setLevel(Level level){ setLevel( level, true ); } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. In addition the info messages * are always logged. * * @param level the level to which the debug level needs to be set to. */ public void setLevel(int level){ setLevel( level, true ); } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. In case the boolean info * is set, all the info messages are also logged. * * @param level the level to which the debug level needs to be set to. * @param info boolean denoting whether the INFO messages need to be * logged or not. */ protected void setLevel( int level, boolean info){ Level l = Level.ALL; switch( level ){ case LogManager.FATAL_MESSAGE_LEVEL: l = Level.FATAL; break; case LogManager.ERROR_MESSAGE_LEVEL: l = Level.ERROR; break; case LogManager.WARNING_MESSAGE_LEVEL: l = Level.WARN; break; case LogManager.CONFIG_MESSAGE_LEVEL: l = Level.INFO; break; case LogManager.INFO_MESSAGE_LEVEL: l = Level.INFO; break; case LogManager.DEBUG_MESSAGE_LEVEL: l = Level.DEBUG; break; } mLogger.setLevel( l ); } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. In case the boolean info * is set, all the info messages are also logged. * * @param level the level to which the debug level needs to be set to. * @param info boolean denoting whether the INFO messages need to be * logged or not. */ protected void setLevel(Level level, boolean info){ mDebugLevel = level.toInt(); mLogger.setLevel( level ); } /** * Returns the debug level. * * @return the level to which the debug level has been set to. */ public int getLevel(){ return mDebugLevel; } /** * Sets both the output writer and the error writer to the same * underlying writer. * * @param out is the name of a file to append to. Special names are * stdout and stderr, which map to the * system's respective streams. * */ public void setWriters(String out){ throw new UnsupportedOperationException( "Log4jLogger does not support setWriters(out)" ); } /** * Logs the exception on the appropriate queue if the level of the message * is less than or equal to the level set for the Logger. For INFO level * message, the boolean indicating that a completion message is to follow * is set to true always. * * @param message the message to be logged. * @param e the exception to be logged * @param level the level on which the message has to be logged. * * @see #setLevel(int) * @see #log(String,int) */ public void log( String message, Exception e, int level ){ switch( level ){ case LogManager.FATAL_MESSAGE_LEVEL: mLogger.fatal( message, e ); break; case LogManager.ERROR_MESSAGE_LEVEL: mLogger.error( message, e ); break; case LogManager.WARNING_MESSAGE_LEVEL: mLogger.warn( message, e ); break; case LogManager.CONFIG_MESSAGE_LEVEL: mLogger.info( message, e ); break; case LogManager.INFO_MESSAGE_LEVEL: mLogger.info( message, e ); break; case LogManager.DEBUG_MESSAGE_LEVEL: mLogger.debug( message, e ); break; } } /** * Logs the message on the appropriate queue if the level of the message * is less than or equal to the level set for the Logger. For INFO level * message, the boolean indicating that a completion message is to follow * is set to true always. * * @param message the message to be logged. * @param level the level on which the message has to be logged. * * @see #setLevel(int) */ protected void logAlreadyFormattedMessage(String message, int level){ switch( level ){ case LogManager.FATAL_MESSAGE_LEVEL: mLogger.fatal( message ); break; case LogManager.ERROR_MESSAGE_LEVEL: mLogger.error( message ); break; case LogManager.WARNING_MESSAGE_LEVEL: mLogger.warn( message ); break; case LogManager.CONFIG_MESSAGE_LEVEL: mLogger.info( message ); break; case LogManager.INFO_MESSAGE_LEVEL: mLogger.info( message ); break; case LogManager.DEBUG_MESSAGE_LEVEL: mLogger.debug( message ); break; } } /** * Logs the completion message on the basis of the debug level. * * @param level the debug level of the start message for whose completion * you want. */ public void logEventCompletion( int level ){ String message = mLogFormatter.getEndEventMessage(); logAlreadyFormattedMessage( message , level ); mLogFormatter.popEvent(); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/logger/Default.java0000644000175000017500000004522211757531137026264 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging.logger; import edu.isi.pegasus.common.logging.LogManager; import edu.isi.pegasus.common.logging.LogFormatter; import edu.isi.pegasus.common.util.Currently; import org.apache.log4j.Level; import java.io.OutputStream; import java.io.PrintWriter; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.PrintStream; import java.io.Writer; import java.util.Properties; /** * The logging class that to log messages at different levels. * Currently the following levels are supported.

* * Eventually, each of the level can have a different writer stream underneath. * *

* The messages can be logged at various levels. The various levels of logging * with increasing levels of verbosity are displayed in the following table. * *

* * * * * * * * * * * * * * * * * * * * *
Logging LevelDescription
FATALall fatal error messages are logged in this level.
ERRORall non fatal error messages are logged in this level.
WARNINGall warning messages are logged in this level.
INFOall information logging messages are logged in this level.
CONFIGall configuration messages are logged in this level.
DEBUGall debug messages are logged in this level.
* * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 4280 $ */ public class Default extends LogManager{ //level constants that loosely match Log4J and are used //to generate the appropriate mask values. /** * The type value to indicate a FATAL error message. */ private static final int FATAL_MESSAGE_TYPE = 0x1; /** * The type value to indicate an ERROR message. */ private static final int ERROR_MESSAGE_TYPE = 0x2; /** * The type value to indicate a CONSOLE message. */ private static final int CONSOLE_MESSAGE_TYPE = 0x4; /** * The type value to indicate a WARNING message. */ private static final int WARNING_MESSAGE_TYPE = 0x8; /** * The type value to indicate an INFORMATIVE message. */ private static final int INFO_MESSAGE_TYPE = 0x10; /** * The type value to indicate a CONFIG message. */ private static final int CONFIG_MESSAGE_TYPE = 0x20; /** * The type value to indicate a DEBUG message. */ private static final int DEBUG_MESSAGE_TYPE = 0x40; /** * The type value to indicate a DEBUG message. */ private static final int TRACE_MESSAGE_TYPE = 0x80; /** * Ensures only one object is created always. Implements the Singleton. */ private static Default logger; /** * The stream to which one writes. It is System.out by default for the * current release. One can set it using setOutputWriter. * * @see #setOutputWriter */ private PrintStream mOutStream; /** * The stream to which all the error messages are logged.By default it is * System.err */ private PrintStream mErrStream; /** * The mask that needs to be deployed to determine what messages are to be * logged. */ private int mMask; /** * This is used to format the time stamp. */ private static Currently mFormatter ; /** * The constructor. */ public Default(){ mDebugLevel = 0; mOutStream = new PrintStream(System.out,true); mErrStream = new PrintStream(System.err,true); Default.mFormatter = new Currently( "yyyy.MM.dd HH:mm:ss.SSS zzz: " ); //by default we are logging only CONSOLE //and all message less than WARN mMask = generateMask( WARNING_MESSAGE_LEVEL, false ); } /** * Sets the log formatter to use for formatting the messages. * * @param formatter the formatter to use. * @param properties properties that the underlying implementations understand */ public void initialize( LogFormatter formatter, Properties properties ){ mLogFormatter = formatter; } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. * * @param level the level to which the debug level needs to be set to. */ public void setLevel(Level level){ int value = level.toInt(); switch(value){ case Level.DEBUG_INT: value = Default.DEBUG_MESSAGE_LEVEL; break; case Level.INFO_INT: value = Default.INFO_MESSAGE_LEVEL; break; case Level.WARN_INT: value = Default.WARNING_MESSAGE_LEVEL; break; case Level.ERROR_INT: value = Default.ERROR_MESSAGE_LEVEL; break; default: value = Default.FATAL_MESSAGE_LEVEL; break; } setLevel(value,false); } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. * * @param level the level to which the debug level needs to be set to. */ public void setLevel(int level){ setLevel(level, false ); } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. In case the boolean info * is set, all the info messages are also logged. * * @param level the level to which the debug level needs to be set to. * @param info boolean denoting whether the INFO messages need to be * logged or not. */ protected void setLevel(int level, boolean info){ mDebugLevel = level; mMask = generateMask(level,info); } /** * Returns the debug level. * * @return the level to which the debug level has been set to. */ public int getLevel(){ return mDebugLevel; } /** * Sets both the output writer and the error writer to the same * underlying writer. * * @param out is the name of a file to append to. Special names are * stdout and stderr, which map to the * system's respective streams. * * @see #setWriters(OutputStream) */ public void setWriters(String out){ try{ // mOutStream = (PrintStream)getPrintStream(out); // mErrStream = mOutStream; PrintStream ps = (PrintStream)getPrintStream(out); System.setOut( ps ); System.setErr( ps ); mOutStream = System.out; mErrStream = System.err; } catch(IOException e){ //log on the existing streams !!! log("Unable to set streams for logging ",e, this.WARNING_MESSAGE_LEVEL); } } /** * Sets both the output writer and the error writer to the same * underlying writer. * * Note: The previous stream is not closed automatically. * * @param err the stream to which error messages are to be logged. */ /* public void setWriters(OutputStream err){ mOutStream = new PrintWriter( err, true ); mErrStream = mOutStream; } */ /** * Sets the writer associated with the class to the one specified for all * type of messages other than error messages. * * @param out is the name of a file to append to. Special names are * stdout and stderr, which map to the * system's respective streams. * * @see #setOutputWriter(OutputStream) */ public void setOutputWriter(String out){ try{ mOutStream = (PrintStream)getPrintStream(out); } catch(IOException e){ //log on the existing streams !!! log("Unable to set streams for logging ",e, this.WARNING_MESSAGE_LEVEL); } } /** * Sets the writer associated with the class to the one specified for all * type of messages other than error messages. * By default it is System.out. * * @param out the stream to which the messages are logged. * * @see #setErrorWriter(OutputStream) */ public void setOutputWriter(OutputStream out){ mOutStream = new PrintStream( out, true ); } /** * Certains levels like FATAL, ERROR and WARN can be set to log to a * different stream, than the default stream used for writing other messages. * By default, these messages are logged to stderr. * Note: The previous stream is not closed automatically. * * @param out is the name of a file to append to. Special names are * stdout and stderr, which map to the * system's respective streams. * * @see #setErrorWriter(OutputStream) */ public void setErrorWriter(String out){ try{ mErrStream = (PrintStream)getPrintStream(out); } catch(IOException e){ //log on the existing streams !!! log("Unable to set streams for logging ",e, this.WARNING_MESSAGE_LEVEL); } } /** * Certains levels like FATAL, ERROR and WARN can be set to log to a * different stream, than the default stream used for writing other messages. * By default, these messages are logged to stderr. * Note: The previous stream is not closed automatically. * * @param err the stream to which error messages are to be logged. */ public void setErrorWriter(OutputStream err){ mErrStream = new PrintStream( err, true ); } /** * Logs the exception on the appropriate queue if the level of the message * is less than or equal to the level set for the Logger. For INFO level * message, the boolean indicating that a completion message is to follow * is set to true always. * * @param message the message to be logged. * @param e the exception to be logged * @param level the level on which the message has to be logged. * * @see #setLevel(int) * @see #log(String,int) */ public void log(String message, Exception e,int level){ StringBuffer msg = new StringBuffer(); msg.append(message).append(" ").append( e.getClass() ).append( ": ").append(e.getMessage()); log(msg.toString(),level); } /** * Logs the message on the appropriate queue if the level of the message * is less than or equal to the level set for the Logger. For INFO level * message, the boolean indicating that a completion message is to follow * is set to true always. * * @param message the message to be logged. * @param level the level on which the message has to be logged. * * @see #setLevel(int) * @see #log(String,int,boolean) */ public void logAlreadyFormattedMessage(String message, int level){ log(message,level,(level == this.INFO_MESSAGE_LEVEL) ? true : false); } /** * Logs the message on the appropriate queue if the level of the message * is less than or equal to the level set for the Logger. * * @param message the message to be logged. * @param level the level on which the message has to be logged. * @param comp boolean indicating whether a completion message * follows or not. * * @see #setLevel(int) */ private void log(String message, int level, boolean comp){ int type = (int)Math.pow(2, level); if( (type & mMask) != 0x0 ){ //we need to log the message //get hold of the writer to be used to logging the message. PrintStream writer = getPrintStream(level); writer.print(Default.mFormatter.now()); String prefix = getPrefix(type); message = prefix + " " + message; /* *uncomment if we want commpetion message for INFO *on same line if(comp){ if((mMask & INFO_MESSAGE_TYPE) == INFO_MESSAGE_TYPE){ //we need to just print the message writer.print(message); } else{ //write out on a new line and //push the message to the stack writer.println(message); // mMsgStack.push(message); } } else{ writer.println(message); } */ writer.println(message); writer.flush(); } } /** * Gets the timestamp nicely formatted. It generates the date-timestamp * in extended ISO 8601 format. It generates the timestamp using * the local timezone not the UTC. An example of the date-timestamp * generated would be 2003-06-06T14:31:27-07:00 where -07:00 denotes the timezone * offset of the local timezone from UTC. * * @return the formattted timestamp; */ public String getTimeStamp(){ String st = Default.mFormatter.now(); st = Currently.iso8601(false); return st; } /** * Logs the completion message on the basis of the debug level. * * @param level the debug level of the start message for whose completion * you want. */ public void logEventCompletion( int level ){ String message = mLogFormatter.getEndEventMessage(); mLogFormatter.popEvent(); int type = (int)Math.pow(2, level); if( (type & mMask) != 0x0 ){ PrintStream writer = getPrintStream(level); /*uncomment if we want commpetion message for INFO on same line if ( (mMask & INFO_MESSAGE_TYPE) == INFO_MESSAGE_TYPE) { writer.println(" (completed)"); } else { writer.print(LogManager.mFormatter.now()); writer.println(message + " (completed)"); } */ String prefix = getPrefix(type); message = prefix + " " + message; writer.print(Default.mFormatter.now()); writer.println( message ); //writer.println(message + " (completed)"); } } /** * Generates the appropriate mask value, corresponding to the level * passed. * * @param level the level to which the debug level needs to be set to. * @param info boolean denoting whether the CONSOLE messages need to be * logged or not. * * @return mask corresponding to the debug level passed. */ private int generateMask(int level,boolean info){ //construct the appropriate mask int mask = 0x0; for(int i = 0; i <= level; i++){ mask |= (int)Math.pow(2,i); } if(info){ mask |= CONSOLE_MESSAGE_TYPE; } return mask; } /** * Returns the prefix that needs to be logged corresponding to a particular * message type, when a message is being logged. * Should be returning an enumerated data type. * * @param type the type for which prefix is required. * * @return the message type */ private String getPrefix(int type){ String result = null; switch(type){ case FATAL_MESSAGE_TYPE: result = "[FATAL ERROR]"; break; case ERROR_MESSAGE_TYPE: result = "[ERROR]"; break; case CONSOLE_MESSAGE_TYPE: result = ""; break; case WARNING_MESSAGE_TYPE: result = "[WARNING]"; break; case INFO_MESSAGE_TYPE: result = "[INFO]"; break; case CONFIG_MESSAGE_TYPE: result = "[CONFIG]"; break; case DEBUG_MESSAGE_TYPE: result = "[DEBUG]"; break; case TRACE_MESSAGE_TYPE: result = "[TRACE]"; break; default: result = "[UNKNOWN]"; } return result; } /** * Sets an internal writer to point to a particular stream. * * @param out is the name of a file to append to. Special names are * stdout and stderr, which map to the * system's respective streams. * * @return the corresponding PrintStream. * * @throws IOException in case of being unable to open a stream. */ private PrintStream getPrintStream( String out ) throws IOException{ //check if value refers to any of the predefined streams OutputStream stream; if( out.equalsIgnoreCase("stdout")){ stream = System.out; } else if( out.equalsIgnoreCase("stderr")){ stream = System.err; } else{ //try to create an output stream to file specified File f = new File( out ); //do some sanity checks on file sanityCheckOnFile( f ); stream = new FileOutputStream( f); } return new PrintStream(stream); } /** * Returns a PrintWriter stream on which to log the message. Later on * this, function would return the appropriate LOG4J queue on which * the message needs to be logged. * * @param level the level * * @return PrintWriter for logging the message. */ private PrintStream getPrintStream(int level){ return ( (level >= FATAL_MESSAGE_LEVEL && level < CONSOLE_MESSAGE_LEVEL) || level == WARNING_MESSAGE_LEVEL )? mErrStream: mOutStream; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/TestLogFormatter.java0000644000175000017500000000541711757531137026670 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging; import java.util.ArrayList; import java.util.List; /** * Test program to test out LogFormatter API * * @author Karan Vahi * * @version $Revision: 2003 $ */ public class TestLogFormatter { /** * * @param fm the LOG formatter to use. */ public void writeTestLog( LogFormatter fm ){ fm.setProgramName( "Pegasus" ); String wf = "se18-gda.dax"; List jobs = new ArrayList(); jobs.add( "gda-job" ); jobs.add( "pattern-matcher-job" ); fm.addEvent( "event.pegasus.ranking", LoggingKeys.DAX_ID, wf ); System.out.println( fm.getStartEventMessage() ); System.out.println( fm.createEntityHierarchyMessage( LoggingKeys.DAX_ID, wf, LoggingKeys.JOB_ID, jobs) ); /* nesting events */ fm.addEvent( "event.pegasus.parsing.dax", LoggingKeys.DAX_ID, "se18-gda-nested.dax" ); System.out.println( fm.getStartEventMessage() ); System.out.println( fm.getEndEventMessage() ); fm.popEvent(); fm.add( LoggingKeys.JOB_ID, "jobGDA" ); System.out.println( fm.createLogMessage() ); fm.add ( "Messs supplied without a key "); System.out.println( fm.createLogMessage() ); fm.add( LoggingKeys.QUERY_NAME, "getpredicted performace" ).add( "time", "10.00" ); System.out.println( fm.createLogMessageAndReset() ); System.out.println( fm.getEndEventMessage() ); } /** * * @param args */ public static void main(String[] args) { TestLogFormatter test = new TestLogFormatter(); System.out.println("\n Logs in Netlogger Format"); test.writeTestLog( LogFormatterFactory.loadInstance( "Netlogger") ); System.out.println("\n Logs in Simple Format"); test.writeTestLog( LogFormatterFactory.loadInstance( "Simple") ); } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/LogManagerFactory.java0000644000175000017500000001376111757531137026770 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.common.logging; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.common.PegasusProperties; import java.util.Properties; /** * A factory class to load the appropriate implementation of Logger API * as specified by properties. * * @author Karan Vahi * @version $Revision: 2567 $ */ public class LogManagerFactory { /** * The default package where all the implementations reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.common.logging.logger"; /** * Holds a singleton instance that is populated via the loadSingletonInstance() * method. */ private static LogManager mSingletonInstance; /** * Loads the appropriate LogManager class as specified by properties. * * * @return handle to the Log Formatter. * * @throws LogManagerFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static LogManager loadSingletonInstance( ) throws LogManagerFactoryException { return ( mSingletonInstance = ( mSingletonInstance == null )? loadSingletonInstance( PegasusProperties.getInstance() ): mSingletonInstance ); } /** * Loads the appropriate LogManager class as specified by properties. * * @param properties is an instance of properties to use. * * @return handle to the Log Formatter. * * @throws LogManagerFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static LogManager loadSingletonInstance( PegasusProperties properties ) throws LogManagerFactoryException { return ( mSingletonInstance = ( mSingletonInstance == null )? loadInstance( properties ): mSingletonInstance ); } /** * Loads the appropriate LogManager class as specified by properties. * * @param properties is an instance of properties to use. * * @return handle to the Log Manager. * * @throws LogManagerFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static LogManager loadInstance( PegasusProperties properties ) throws LogManagerFactoryException { if( properties == null ){ throw new LogManagerFactoryException( "Invalid NULL properties passed" ); } /* get the implementor from properties */ String logImplementor = properties.getLogManager(); String formatImplementor = properties.getLogFormatter(); Properties initialize = properties.matchingSubset( LogManager.PROPERTIES_PREFIX, false ); // determine the class that implements the site catalog return loadInstance( logImplementor, formatImplementor, initialize ); } /** * Loads the Log Formatter specified. * * @param implementor the name of the class implementing LogManager * @param formatImplementor the name of the class implementing the formatting technique * @param properties properties * * @return handle to the LogManager * * @throws LogManagerFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static LogManager loadInstance( String implementor, String formatImplementor, Properties properties ) throws LogManagerFactoryException{ //implementor = implementor == null ? "Default" : implementor; //formatImplementor = formatImplementor == null ? "Simple" : formatImplementor; LogManager result = null; try{ if ( implementor == null ){ throw new RuntimeException( "You need to specify the Logger implementor " ); } /* prepend the package name if required */ implementor = ( implementor.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + implementor : //load directly implementor; DynamicLoader dl = new DynamicLoader( implementor ); result = ( LogManager ) dl.instantiate( new Object[0] ); if ( implementor == null ){ throw new RuntimeException( "Unable to load " + implementor ); } /* load the log formatter and set it */ result.initialize( LogFormatterFactory.loadInstance( formatImplementor ), properties ); } catch( Exception e ){ throw new LogManagerFactoryException( "Unable to instantiate Logger ", implementor, e ); } /* store reference for singleton return */ mSingletonInstance = result; return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/LogManagerFactoryException.java0000644000175000017500000000661311757531137030645 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.common.logging; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Log Factory * implementations. * * @author Karan Vahi * @version $Revision: 2079 $ */ public class LogManagerFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Logger Factory"; /** * Constructs a LogManagerFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public LogManagerFactoryException( String msg ) { super( msg ); mClassname = LogManagerFactoryException.DEFAULT_NAME; } /** * Constructs a LogManagerFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public LogManagerFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a LogManagerFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public LogManagerFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = LogManagerFactoryException.DEFAULT_NAME; } /** * Constructs a LogManagerFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public LogManagerFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/Event.java0000644000175000017500000000655211757531137024505 0ustar ryngerynge/* * To change this template, choose Tools | Templates * and open the template in the editor. */ package edu.isi.pegasus.common.logging; import java.util.Collection; import java.util.Map; /** * * @author vahi */ public interface Event extends Cloneable{ /** * Sets the program name for the software whose log statement are logged. * * @param name */ public void setProgramName( String name ); /** * Returns the program name for the software whose log statement are logged. * * @param name * * @return the name of the program */ public String getProgramName( String name ); /** * Set the event that is to be associated with the log messages. * * @param name the name of the event to be associated * @param entityName the primary entity that is associated with the event e.g. workflow * @param entityID the id of that entity. */ public void setEvent( String name, String entityName, String entityID ); /** * Adds the event that is to be associated with the log messages onto an * internal stack * * @param name the name of the event to be associated * @param map Map of Entity Names with the entity identifiers. */ public void setEvent( String name, Map map ); /** * Returns the name of event that is currently associated with the log messages. * * @return the event to be associated */ public String getEventName( ); /** * Creates the start message for the event. * * @return start event message */ public String getStartEventMessage(); /** * Creates the end message for the event. * * @return end event message */ public String getEndEventMessage(); /** * Reset the internal log message buffer associated with the event * */ public void reset(); /** * Add to the log message. * * @param key * @param value * * @return Self-reference, so calls can be chained */ public Event add( String key, String value ); /** * Creates a log message with the contents of the internal log buffer. * * @return log message. */ public String createLogMessage(); /** * Creates a log message with the contents of the internal log buffer. * It then resets the buffer before returning the log message * * @return the log message */ public String createLogMessageAndReset(); /** * Creates a log message that connects the parent entities with the * children. For e.g. can we use to create the log messages connecting the * jobs with the workflow they are part of. * * @param parentType the type of parent entity * @param parentID the id of the parent entity * @param childIdType the type of children entities * @param childIDs Collection of children id's * * @return the entity hierarchy message. */ public String createEntityHierarchyMessage( String parentType, String parentID, String childIdType, Collection childIDs ); } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/LogFormatterFactoryException.java0000644000175000017500000000664211757531137031240 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.common.logging; import edu.isi.pegasus.common.util.FactoryException; /** * Class to notify of failures while instantiating Log Formatter * implementations. * * @author Karan Vahi * @version $Revision: 2079 $ */ public class LogFormatterFactoryException extends FactoryException { /** * The default classname that is associated with the exception. */ public static final String DEFAULT_NAME = "Log Formatter"; /** * Constructs a LogFormatterFactoryException with no detail * message. The associated classname is set to value specified by * DEFAULT_NAME. * * @param msg the detailed message. * * @see #DEFAULT_NAME */ public LogFormatterFactoryException( String msg ) { super( msg ); mClassname = LogFormatterFactoryException.DEFAULT_NAME; } /** * Constructs a LogFormatterFactoryException with the specified detailed * message. * * @param msg is the detailed message. * @param classname the name of class that was trying to be instantiated or * some other signifier like module name. */ public LogFormatterFactoryException(String msg, String classname) { super( msg , classname ); } /** * Constructs a LogFormatterFactoryException with the * specified detailed message and a cause. The associated classname is set * to value specified by DEFAULT_NAME. * * @param msg is the detailed message that is to be logged. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. * * @see #DEFAULT_NAME */ public LogFormatterFactoryException(String msg, Throwable cause) { super(msg, cause); mClassname = LogFormatterFactoryException.DEFAULT_NAME; } /** * Constructs a LogFormatterFactoryException with the * specified detailed message and a cause. * * @param msg is the detailed message that is to be logged. * @param classname the name of class that was trying to be instantiated. * @param cause is the cause (which is saved for later retrieval by the * {@link java.lang.Throwable#getCause()} method). A null * value is permitted, and indicates that the cause is nonexistent or * unknown. */ public LogFormatterFactoryException( String msg, String classname, Throwable cause ) { super( msg, cause ); mClassname = classname; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/LogFormatterFactory.java0000644000175000017500000001137411757531137027357 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.isi.pegasus.common.logging; import edu.isi.pegasus.common.util.DynamicLoader; import edu.isi.pegasus.planner.common.PegasusProperties; /** * A factory class to load the appropriate implementation of LogFormatter * as specified by properties. * * @author Karan Vahi * @version $Revision: 2567 $ */ public class LogFormatterFactory { /** * The default package where all the implementations reside. */ public static final String DEFAULT_PACKAGE_NAME = "edu.isi.pegasus.common.logging.format"; /** * Holds a singleton instance that is populated via the loadSingletonInstance() * method. */ private static LogFormatter mSingletonInstance; /** * Loads the appropriate LogFormatter class as specified by properties. * * @param implementor the name of the class implementing LogFormatter * * * @return handle to the Log Formatter. * * @throws LogFormatterFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static LogFormatter loadSingletonInstance( String implementor ) throws LogFormatterFactoryException { return ( mSingletonInstance = ( mSingletonInstance == null )? loadInstance( implementor ): mSingletonInstance ); } /** * Loads the appropriate LogFormatter class as specified by properties. * * @param properties is an instance of properties to use. * * @return handle to the Log Formatter. * * @throws LogFormatterFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static LogFormatter loadInstance( PegasusProperties properties ) throws LogFormatterFactoryException { if( properties == null ){ throw new LogFormatterFactoryException( "Invalid NULL properties passed" ); } /* get the implementor from properties */ String formatImplementor = properties.getLogFormatter().trim(); /* prepend the package name if required */ formatImplementor = ( formatImplementor.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + formatImplementor : //load directly formatImplementor; // determine the class that implements the site catalog return loadInstance( formatImplementor ); } /** * Loads the Log Formatter specified. * * @param implementor the name of the class implementing LogFormatter * * @return handle to the Site Catalog. * * @throws LogFormatterFactoryException that nests any error that * might occur during the instantiation * * @see #DEFAULT_PACKAGE_NAME */ public static LogFormatter loadInstance( String implementor ) throws LogFormatterFactoryException{ LogFormatter result = null; try{ if ( implementor == null ){ throw new RuntimeException( "You need to specify the implementor " ); } /* prepend the package name if required */ implementor = ( implementor.indexOf('.') == -1) ? //pick up from the default package DEFAULT_PACKAGE_NAME + "." + implementor : //load directly implementor; DynamicLoader dl = new DynamicLoader( implementor ); result = ( LogFormatter ) dl.instantiate( new Object[0] ); if ( implementor == null ){ throw new RuntimeException( "Unable to load " + implementor ); } } catch( Exception e ){ throw new LogFormatterFactoryException( "Unable to instantiate Log Formatter ", implementor, e ); } return result; } } pegasus-wms_4.0.1+dfsg/src/edu/isi/pegasus/common/logging/LogManager.java0000644000175000017500000004310511757531137025433 0ustar ryngerynge/** * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package edu.isi.pegasus.common.logging; import edu.isi.pegasus.planner.common.PegasusProperties; import org.apache.log4j.Level; import java.io.File; import java.io.IOException; import java.util.Collection; import java.util.Map; import java.util.Properties; /** * The logging class that to log messages at different levels. * Currently the following levels are supported.

* * Eventually, each of the level can have a different writer stream underneath. * *

* The messages can be logged at various levels. The various levels of logging * with increasing levels of verbosity are displayed in the following table. * *

* * * * * * * * * * * * * * * * * * * * *
Logging LevelDescription
FATALall fatal error messages are logged in this level.
ERRORall non fatal error messages are logged in this level.
WARNINGall warning messages are logged in this level.
INFOall information logging messages are logged in this level.
CONFIGall configuration messages are logged in this level.
DEBUGall debug messages are logged in this level.
* * @author Karan Vahi * @author Gaurang Mehta * @version $Revision: 2818 $ */ public abstract class LogManager { /** * The version of the Logging API */ public static final String VERSION = "2.1"; /** * Prefix for the property subset to use with the LogManager */ public static final String PROPERTIES_PREFIX = "pegasus.log.manager"; /** * Suffx for an event completion message. */ public static final String MESSAGE_DONE_PREFIX = " -DONE"; //level constants that loosely match Log4J and are used //to generate the appropriate mask values. /** * The level value, to indicate a FATAL error message. */ public static final int FATAL_MESSAGE_LEVEL = 0; /** * The level value, to indicate an ERROR message. */ public static final int ERROR_MESSAGE_LEVEL = 1; /** * The level value, to indicate a CONSOLE error message. */ public static final int CONSOLE_MESSAGE_LEVEL = 2; /** * The level value, to indicate a WARNING error message. */ public static final int WARNING_MESSAGE_LEVEL = 3; /** * The level value, to indicate a INFO message. */ public static final int INFO_MESSAGE_LEVEL = 4; /** * The level value, to indicate a CONFIG message. */ public static final int CONFIG_MESSAGE_LEVEL = 5; /** * The level value, to indicate a DEBUG message. */ public static final int DEBUG_MESSAGE_LEVEL = 6; /** * The level value, to indicate a DEBUG message. */ public static final int TRACE_MESSAGE_LEVEL = 7; /** * Ensures only one object is created always. Implements the Singleton. */ private static LogManager mLogger; /** * The default Logger */ public static final String DEFAULT_LOGGER = "Default"; /** * The Log4j logger. */ public static final String LOG4J_LOGGER = "Log4j"; /** * The debug level. Higher the level the more the detail is logged. At present * can be 0 or 1. This is set according to the option given by the user, whether * verbose or not. */ protected int mDebugLevel; /** * The LogFormatter to use to format the message. */ protected LogFormatter mLogFormatter; /** * The constructor. */ public LogManager(){ mDebugLevel = 0; } /** * To get a reference to the the object. * * @param logger the logger to use for logging * @param formatter the log formatter to use for formatting messages * * @return a singleton access to the object. */ public static LogManager getInstance( String logger, String formatter ){ if(mLogger == null){ mLogger = LogManagerFactory.loadSingletonInstance( PegasusProperties.nonSingletonInstance() ); /*if( logger == null || logger.equals( DEFAULT_LOGGER ) ){ mLogger = new Default(); } else if( logger.equals( LOG4J_LOGGER )){ mLogger = new Log4j(); } else{ throw new RuntimeException( "Unknown Logger Implementation Specified" + logger ); } */ } return mLogger; } /** * Sets the log formatter to use for formatting the messages. * * @param formatter the formatter to use. * @param properties properties that the underlying implementations understand */ public abstract void initialize( LogFormatter formatter, Properties properties ); /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param file is the file to write out to. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheckOnFile( File file ) throws IOException{ if (file.exists()) { // location exists if (file.isFile()) { // ok, is a file if (file.canWrite()) { // can write, all is well return; } else { // all is there, but I cannot write to file throw new IOException("Cannot write to existing file " + file.getAbsolutePath()); } } else { // exists but not a file throw new IOException("File " + file.getAbsolutePath() + " already " + "exists, but is not a file."); } } else { // check to see if you can write to the parent directory //could have tried to do just a make dir on parent directory. sanityCheckOnDirectory( file.getParentFile()); } } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. * * @param dir is the new base directory to optionally create. * * @throws IOException in case of error while writing out files. */ protected static void sanityCheckOnDirectory( File dir ) throws IOException{ if ( dir.exists() ) { // location exists if ( dir.isDirectory() ) { // ok, isa directory if ( dir.canWrite() ) { // can write, all is well return; } else { // all is there, but I cannot write to dir throw new IOException( "Cannot write to existing directory " + dir.getPath() ); } } else { // exists but not a directory throw new IOException( "Destination " + dir.getPath() + " already " + "exists, but is not a directory." ); } } else { // does not exist, try to make it if ( ! dir.mkdirs() ) { throw new IOException( "Unable to create directory destination " + dir.getPath() ); } } } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. * * @param level the level to which the debug level needs to be set to. */ public void setLevel(Level level){ int value = level.toInt(); switch(value){ case Level.DEBUG_INT: value = LogManager.DEBUG_MESSAGE_LEVEL; break; case Level.INFO_INT: value = LogManager.INFO_MESSAGE_LEVEL; break; case Level.WARN_INT: value = LogManager.WARNING_MESSAGE_LEVEL; break; case Level.ERROR_INT: value = LogManager.ERROR_MESSAGE_LEVEL; break; default: value = LogManager.FATAL_MESSAGE_LEVEL; break; } setLevel(value,false); } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. In addition the console messages * are always logged. * * @param level the level to which the debug level needs to be set to. */ public void setLevel(int level){ setLevel(level,true); } /** * Sets the debug level. All those messages are logged which have a * level less than equal to the debug level. In case the boolean info * is set, all the info messages are also logged. * * @param level the level to which the debug level needs to be set to. * @param info boolean denoting whether the CONSOLE messages need to be * logged or not. */ protected abstract void setLevel(int level, boolean info); /** * Returns the debug level. * * @return the level to which the debug level has been set to. */ public abstract int getLevel(); /** * Sets both the output writer and the error writer to the same * underlying writer. * * @param out is the name of a file to append to. Special names are * stdout and stderr, which map to the * system's respective streams. * */ public abstract void setWriters( String out ); /** * Log the message represented by the internal log buffer. * The log buffer is populated via the add methods. * * @param level the level on which the message has to be logged. */ public void log( int level ){ this.log( mLogFormatter.createLogMessage(), level ); } /** * Creates a log message with the contents of the internal log buffer. * The log buffer is populated via the add methods. * It then resets the buffer before logging the log message * * * @param level the level on which the message has to be logged. */ public void logAndReset( int level ){ this.logAlreadyFormattedMessage( mLogFormatter.createLogMessageAndReset(), level ); } /** * Logs the exception on the appropriate queue if the level of the message * is less than or equal to the level set for the Logger. For INFO level * message, the boolean indicating that a completion message is to follow * is set to true always. * * @param message the message to be logged. * @param e the exception to be logged * @param level the level on which the message has to be logged. * * @see #setLevel(int) * @see #log(String,int) */ public abstract void log(String message, Exception e,int level); /** * A stop gap function . * * @param message already formatted message * @param level the level on which to log. */ protected abstract void logAlreadyFormattedMessage( String message, int level ); /** * Logs the message on the appropriate queue if the level of the message * is less than or equal to the level set for the Logger. For INFO level * message, the boolean indicating that a completion message is to follow * is set to true always. * * @param message the message to be logged. * @param level the level on which the message has to be logged. * * @see #setLevel(int) */ public void log ( String message, int level){ mLogFormatter.add( message ); this.logAlreadyFormattedMessage( mLogFormatter.createLogMessageAndReset(), level); } /** * Log an event start message to INFO level * * @param name the name of the event to be associated * @param entityName the primary entity that is associated with the event e.g. workflow * @param entityID the id of that entity. */ public void logEventStart( String name, String entityName, String entityID ){ logEventStart( name, entityName, entityID , LogManager.INFO_MESSAGE_LEVEL ); } /** * Log an event start message. * * @param name the name of the event to be associated * @param entityName the primary entity that is associated with the event e.g. workflow * @param entityID the id of that entity. * @param level the level at which event needs to be logged. */ public void logEventStart( String name, String entityName, String entityID , int level ){ mLogFormatter.addEvent( name, entityName, entityID ); this.logAlreadyFormattedMessage( mLogFormatter.getStartEventMessage() , level ); } /** * Log an event start message to the INFO Level * * @param name the name of the event to be associated * @param map Map indexed by entity name . The values is corresponding * EntityID * */ public void logEventStart( String name, Map map ){ this.logEventStart( name, map, LogManager.INFO_MESSAGE_LEVEL ); } /** * Log an event start message. * * @param name the name of the event to be associated * @param map Map indexed by entity name . The values is corresponding * EntityID * @param level the level to log to */ public void logEventStart( String name, Map map , int level ){ mLogFormatter.addEvent( name, map ); this.logAlreadyFormattedMessage( mLogFormatter.getStartEventMessage() , level ); } /** * Logs the completion message on the basis of the debug level. * * */ public void logEventCompletion( ){ //this.log( LogManager.INFO_MESSAGE_LEVEL ); this.logEventCompletion( LogManager.INFO_MESSAGE_LEVEL ); } /** * Logs the completion message on the basis of the debug level. * * @param level the debug level of the start message for whose completion * you want. */ public abstract void logEventCompletion( int level ); /** * Log a message that connects the parent entities with the * children. For e.g. can we use to create the log messages connecting the * jobs with the workflow they are part of. * * @param parentType the type of parent entity * @param parentID the id of the parent entity * @param childIDType the type of children entities * @param childIDs Collection of children id's * */ public void logEntityHierarchyMessage( String parentType, String parentID, String childIDType, Collection childIDs ){ this.logEntityHierarchyMessage( parentType, parentID, childIDType, childIDs, LogManager.DEBUG_MESSAGE_LEVEL ); } /** * Log a message that connects the parent entities with the * children. For e.g. can we use to create the log messages connecting the * jobs with the workflow they are part of. * * @param parentType the type of parent entity * @param parentID the id of the parent entity * @param childIDType the type of children entities * @param childIDs Collection of children id's * @param level the logging level. * */ public void logEntityHierarchyMessage( String parentType, String parentID, String childIDType, Collection childIDs, int level ) { this.logAlreadyFormattedMessage( mLogFormatter.createEntityHierarchyMessage(parentType, parentID, childIDType, childIDs), level ); } /** * Add to the internal log buffer message a value with the default key. * The buffer is logged later when the log() method is called. * * @param value * * @return self-reference */ public LogManager add( String value ){ return add( "msg", value ); } /** * Add to the internal log buffer message a value with the key oassed * The buffer is logged later when the log() method is called. * * * @param key * @param value * * @return Self-reference, so calls can be chained */ public LogManager add( String key, String value ){ mLogFormatter.add( key, value ); return this; } } pegasus-wms_4.0.1+dfsg/src/edu/clemson/0000755000175000017500000000000011757531667017050 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/edu/clemson/SiteWriterMain.java0000644000175000017500000001331211757531137022611 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.clemson; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; /** * * @author Vikas Patel vikas@vikaspatel.org */ public class SiteWriterMain { private String command="condor_status -any -pool engage-central.renci.org " + "-format %s GlueSiteName " + "-format ; 1 "+ // to force a semicolon, even if the attribute was not found "-format %s OSGMM_Globus_Location "+ "-format ; 1 "+ "-format %s GlueCEInfoContactString "+ "-format ; 1 "+ "-format %s GlueClusterTmpDir "+ "-format ; 1 "+ "-format %s GlueCEInfoHostName "+ "-format ; 1 "+ "-format %s GlueCEInfoApplicationDir "+ "-format ; 1 "+ "-format %s GlueCEInfoDataDir "+ "-format ; 1 "+ "-format %s GlueClusterTmpDir "+ "-format ; 1 "+ "-format %s GlueClusterWNTmpDir "+ "-format ;\\n 1 "; private File file=null; ArrayList outputArray=new ArrayList(); ArrayList errorArray=new ArrayList(); /** * * @param args */ public static void main(String[] args) { SiteWriterMain siteWriter=new SiteWriterMain(); if(args.length>0) { if(args[0].startsWith("--")) siteWriter.argumentHandler(args[0]); else siteWriter.getSitesInfo(args[0]); } else siteWriter.getSitesInfo("sites.xml"); } private void getSitesInfo(String sitesFileName) { printTitle(); try { Runtime runtime=Runtime.getRuntime(); System.out.println( "Command to be run is \n" + command ); Process process=runtime.exec(command); StreamGobbler outputGobbler=new StreamGobbler(process.getInputStream(),this.outputArray); StreamGobbler errorGobbler=new StreamGobbler(process.getErrorStream(), this.errorArray); outputGobbler.start(); errorGobbler.start(); outputGobbler.join(); errorGobbler.join(); int result=process.waitFor(); } catch(IOException e) { System.out.println("\n\nERROR OCCURED:"); System.out.println(" condor_status not found \n This program requires Condor (http://www.cs.wisc.edu/condor/)\n"); return; } catch(Exception e) { System.out.println("\n\nERROR OCCURED:"); System.out.println("Exiting program: Please run the program again."); return; } if(sitesFileName==null || sitesFileName.equals("")) sitesFileName="sites.xml"; FileWriter fileWriter; try { file=new File(sitesFileName); if(file.exists()) { int counter=0; String tmpFileName=sitesFileName+".bkp."+counter; File tmpFile; while((tmpFile=new File(tmpFileName+counter)).exists()) { counter++; } System.out.println("Warning: The file "+sitesFileName+" already exists"); System.out.println("Renaming the existing file to "+tmpFileName+counter); file.renameTo(tmpFile); } fileWriter= new FileWriter(file); } catch(IllegalArgumentException e) { System.out.println("Invalid argument, enter a valid path/name"); return; } catch(IOException e) { System.out.println("Invalid argument, enter a valid path/name"); System.out.println("Exiting program, please try again ...."); return; } System.out.println("Discovering sites from RENCI, writing to site catalog\n"); try { new SiteCatalogGenerator(outputArray,fileWriter).generateSiteCatalog( "engage" ); } catch (Exception e) { System.out.println("\nERROR OCCURED: Please try again !!\n"); System.out.println("Please ensure that Condor (http://www.cs.wisc.edu/condor/) is correctly installed and condor_status is not firewalled."); System.out.println("If the problem persists please contact vikasp@clemson.edu."); System.out.println("Note: the site catalog (ex. sites.xml) maybe incomplete, if you already had a site catalog file it was backed up for you."); return; } System.out.println("\nFinished..."); } private void argumentHandler(String arg) { printTitle(); if(arg.equals("--help")) { System.out.println("SYNTAX:"); System.out.println("\n SiteWriter "); } else { System.out.println("Invalid argument"); System.out.println("SYNTAX:"); System.out.println("\n SiteWriter "); } } private void printTitle() { System.out.println("..................................................."); System.out.println("\tSiteWriter, CIRG"); System.out.println("....................................................\n"); } }pegasus-wms_4.0.1+dfsg/src/edu/clemson/SiteCatalogGenerator.java0000644000175000017500000004037211757531137023757 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.clemson; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Map; import java.util.HashMap; import java.util.ArrayList; import java.util.Calendar; import java.util.HashSet; import java.util.List; import java.util.Set; import java.util.regex.Pattern; import java.util.regex.Matcher; /** * * @author Vikas Patel vikas@vikaspatel.org */ public class SiteCatalogGenerator { private ArrayList outputArray; private FileWriter fileWriter; private int siteCount = 0; /** * The sites that need to be parsed */ private Set mSites; /** * Maps OSG site handles to a numeric index */ private Map mSiteNameToIndex; /** * Overloaded Constructor. * * @param outputArray * @param fileWriter */ public SiteCatalogGenerator(ArrayList outputArray, FileWriter fileWriter) { this.outputArray = outputArray; this.fileWriter = fileWriter; this.mSiteNameToIndex = new HashMap(); } /** * Overloaded Constructor for passing the list of sites to load. * * The site handle * is a special handle designating all sites are to be * loaded. * * @param outputArray * */ public SiteCatalogGenerator( ArrayList outputArray){ this.outputArray = outputArray; this.mSiteNameToIndex = new HashMap(); } /** * * Loads the information about the sites that are passed from the condor-status * output. * * The site handle * is a special handle designating all sites are to be * loaded. * * @param sites the sites to be parsed. * @param vo the VO to which the user belongs to. * * * @return List of SiteCatalogEntry objects containing the site information for loaded * sites. */ public List loadSites( List sites, String vo ) { List result = new ArrayList(); this.mSites = new HashSet( sites ); boolean all = mSites.contains( "*" ); for (int i = 0; !outputArray.isEmpty() && i < outputArray.size(); i++) { String line = outputArray.get(i); if (!(line == null || line.equals(""))) { Site site = parseSiteInfo(line, vo); if( all || mSites.contains( site.siteName ) ){ result.add( site ); } } } return result; } private Site parseSiteInfo(String line, String vo) { Site site = new Site(); try { String[] siteInfoArray = new String[10]; // split the line into individual fields siteInfoArray = line.split(";"); // do we have a valid site name? if (siteInfoArray[0] == null || siteInfoArray[0].equals("")) { return site; } // Site names (e.g. CIT_CMS_T2) are not unique in OSG as a site can have // multiple gatekeepers. We make them unique by appending the unique CE name. //site.siteName = siteInfoArray[0] + "_" + siteInfoArray[1]; site.siteName = getSiteName( siteInfoArray[0] ); String globusLocation = null; if (siteInfoArray[2] != null && !siteInfoArray[2].equals("") && siteInfoArray[2].lastIndexOf("/") != -1 && !siteInfoArray[2].equals("UNAVAILABLE")) { globusLocation = siteInfoArray[2]; } else { //System.out.println(site.siteName+": Error fetching globusLocation from ... Ignoring site"); return site; } String hostname = siteInfoArray[3]; Pattern pattern = Pattern.compile("[:\\/]+.*"); Matcher matcher = pattern.matcher(hostname); hostname = matcher.replaceAll(""); if (globusLocation != null) { site.globusLocation = globusLocation; site.globusLib = site.globusLocation + "/lib"; int i = globusLocation.lastIndexOf("/"); String temp = globusLocation.substring(i); if (temp.equals("")) /* Incase '/' is the last character of the string */ { i = i - 1; } //Commented by Karan //System.out.println(site.siteName); String osgLocation = globusLocation.substring(0, i); site.pegasusHome = osgLocation + "/pegasus"; site.gridlaunch = site.pegasusHome + "/bin/kickstart"; } if (siteInfoArray[3] != null && !siteInfoArray[3].equals("")) { String jobmanager = siteInfoArray[3]; site.VanillaUniverseJobManager = jobmanager; site.transferUniverseJobManager = jobmanager.substring(0, jobmanager.indexOf("/jobmanager")) + "/jobmanager-fork"; } if (siteInfoArray[5] != null && !siteInfoArray[5].equals("")) { site.gridFtpUrl = "gsiftp" + "://" + siteInfoArray[5]; } if ((siteInfoArray[6] != null && !siteInfoArray[6].equals("")) && !siteInfoArray[6].equals("UNAVAILABLE")) { site.app = siteInfoArray[6] + "/" + vo; } if ((siteInfoArray[7] != null && !siteInfoArray[7].equals("")) && !siteInfoArray[7].equals("UNAVAILABLE")) { // $OSG_DATA/{vo_name}/tmp/{hostname} // Respects the shared nature (across VO, and across VO members) of OSG_DATA // by using a VO specific temporary work directory. Also, for sites with // multiple gatekeepers, keep separate work directories for each gatekeeper. site.data = siteInfoArray[7] + "/" + vo + "/tmp/" + hostname; } if ((siteInfoArray[8] != null && !siteInfoArray[8].equals("")) && !siteInfoArray[8].equals("UNAVAILABLE")) { // is this the same as site.data? site.tmp = siteInfoArray[8] + "/" + vo + "/tmp/" + hostname; } if ((siteInfoArray[9] != null && !siteInfoArray[9].equals("")) && !siteInfoArray[9].equals("UNAVAILABLE")) { site.wntmp = siteInfoArray[9]; } // work directory and gridFtpStorage is under OSG_DATA if (site.data != null && !("").equals(site.data)) { site.workingDirectory = site.data; } else { site.workingDirectory = "/tmp"; } site.gridFtpStorage = site.workingDirectory; } catch (ArrayIndexOutOfBoundsException e) { e.printStackTrace(); } return site; } /** * * @param vo * @throws IOException */ public void generateSiteCatalog(String vo) throws IOException { //fileWriter= new FileWriter(file); addHeaderInformationToSiteCatalog(); int outputArraySize = outputArray.size(); for (int i = 0; !outputArray.isEmpty() && i < outputArraySize; i++) { String line = outputArray.get(i); if (!(line == null || line.equals(""))) { Site site = parseSiteInfo(line, vo); addSiteToCatalog(site); } } addSiteToCatalog(addLocalSiteInformation()); endSiteCatalog(fileWriter); } private void addSiteToCatalog(Site site) throws IOException { if (site.globusLocation == null || site.VanillaUniverseJobManager == null) { return; } String comment = ""; String siteBody; siteBody = "\n\n" + " " + site.pegasusHome + "\n" + " " + site.globusLocation + "\n" + " " + site.globusLib + "\n"; if (site.app != null) { siteBody += " " + site.app + "\n"; } if (site.data != null) { siteBody += " " + site.data + "\n"; } if (site.tmp != null) { siteBody += " " + site.tmp + "\n"; } if (site.wntmp != null) { siteBody += " " + site.wntmp + "\n"; } siteBody += " \n" + " \n" + " \n" + " \n" + " " + site.workingDirectory + "\n" + "\n\n"; fileWriter.write(comment + siteBody); fileWriter.flush(); } private void addHeaderInformationToSiteCatalog() throws IOException { String header = "\n" + "" + "\n\n"; fileWriter.write(header); fileWriter.flush(); } private void endSiteCatalog(FileWriter fileWriter) throws IOException { fileWriter.write(""); fileWriter.flush(); } Site addLocalSiteInformation() { Site site = new Site(); site.siteName = "local"; String globusLocation = null; String home = null; try { globusLocation = System.getenv("GLOBUS_LOCATION"); //System.getenv :Undeprecated after jdk 1.4 home = System.getenv("HOME"); } catch (Exception e) { System.out.println("\nWARNING: \nError occured getting 'local' site information, please make sure GLOBUS_LOCATION and HOME " + "environment variables are set"); System.out.println("Skipping local site"); } site.globusLocation = globusLocation; site.globusLib = globusLocation + "/lib"; if (globusLocation == null) { System.out.println("\nWARNING: \nError occured getting 'local' site information, please make sure GLOBUS_LOCATION and HOME " + "environment variables are set"); System.out.println("Skipping local site"); } else { int i = globusLocation.lastIndexOf("/"); String temp = globusLocation.substring(i); if (globusLocation.equals("")) /* Incase '/' is the last character of the string */ { i = i - 1; temp = globusLocation.substring(i); } String osgLocation = globusLocation.substring(0, i); site.pegasusHome = osgLocation + "/pegasus"; site.gridlaunch = site.pegasusHome + "/bin/kickstart"; } if (home != null) { File file = new File(home + File.separator + "vdldemo"); if (!file.exists()) { file.mkdir(); } site.workingDirectory = home + File.separator + "vdldemo"; site.gridFtpStorage = site.workingDirectory; } String hostname = null; try { hostname = java.net.InetAddress.getLocalHost().getHostName(); } catch (java.net.UnknownHostException e) { System.out.println("\nWARNING:\nError retrieving local site information..."); System.out.println("Skipping local site..."); } if (hostname != null) { String jobmanager = hostname + "/jobmanager-condor"; site.VanillaUniverseJobManager = jobmanager; site.transferUniverseJobManager = jobmanager.substring(0, jobmanager.indexOf("/jobmanager")) + "/jobmanager-fork"; site.gridFtpUrl = "gsiftp://" + jobmanager.substring(0, jobmanager.indexOf("/jobmanager")); } return site; } /** * Returns the site handle for the site, on the basis of the OSG site handle * retrieved from Ress. OSG Site names (e.g. CIT_CMS_T2) are not unique in OSG * as a site can have multiple gatekeepers. We make them unique by appending * a numeric suffix if in a site already exists in the site catalog. * * @param osgSiteName the osg site name * * @return the site name to use for OSG */ private String getSiteName( String osgSiteName ) { StringBuffer name = new StringBuffer(); //always append the osg site name name.append( osgSiteName ); //do we need to add a suffix. if( mSiteNameToIndex.containsKey( osgSiteName ) ){ //append the suffix and update the index int index = mSiteNameToIndex.get( osgSiteName ); name.append( "__" ).append( ++index ); mSiteNameToIndex.put( osgSiteName, new Integer( index ) ); } else{ //the first entry for the site mSiteNameToIndex.put( osgSiteName, new Integer( 0 ) ); } return name.toString(); } /** * */ public class Site { /** * */ public String siteName; /** * */ public String sysinfo = "INTEL32::LINUX"; //default value /** * */ public String globusLocation; public String globusLib; /** * */ public String gridlaunch; /** * */ public String pegasusHome; /** * */ public String lrcUrl = "rlsn://dummyValue.url.edu"; /** * */ public String gridFtpUrl; /** * */ public String gridFtpStorage; /** * */ public String transferUniverseJobManager; /** * */ public String VanillaUniverseJobManager; /** * */ public String workingDirectory; /** * */ public String app; /** * */ public String data; /** * */ public String tmp; /** * */ public String wntmp; } } pegasus-wms_4.0.1+dfsg/src/edu/clemson/StreamGobbler.java0000644000175000017500000000313711757531137022437 0ustar ryngerynge/* * * Copyright 2007-2008 University Of Southern California * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package edu.clemson; import java.io.InputStream; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.ArrayList; /** * * @author Vikas Patel vikas@vikaspatel.org */ public class StreamGobbler extends Thread { InputStream inputStream; SiteWriterMain siteWriterMain; ArrayList outputArrayList; StreamGobbler(InputStream inputStream, ArrayList outputArrayList) { this.inputStream=inputStream; this.outputArrayList=outputArrayList; } @Override public void run() { try{ InputStreamReader inputStreamReader=new InputStreamReader(inputStream); BufferedReader bufferedReader= new BufferedReader(inputStreamReader); String line=bufferedReader.readLine(); while(line!=null) { // System.out.println(line); outputArrayList.add(line); line=bufferedReader.readLine(); } } catch(Exception e) { e.printStackTrace(); } } } pegasus-wms_4.0.1+dfsg/src/jarsrc/0000755000175000017500000000000011757531667016117 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/README0000644000175000017500000000211211757531137015477 0ustar ryngeryngeTHE SOURCE TREE =============== 1. files -------- GTPL The new Globus Toolkit Public License, which applies to *ALL* our source code in these branches -- unless stated otherwise. Some source files may still cite the (old) LICENSE file. The GTPL supercedes the cited LICENSE file. README This file. ShowProperties A small helper to show specific or all System properties. The program is used by test0 test, and may be invoked simple as "java ShowProperties" if you CLASSPATH is set correctly. VersionTask is a bridge class between Ant and the PEGASUS. It provides the PEGASUS version number inside Ant. 2. directories -------------- CVS Don't touch. jarsrc directory with sources (as available) for the 3rd party jars we use. Many GPL and GLPL programs require us to provide the sources. org PEGASUS sources tools C and C++ program that help the PEGASUS working. These programs currently include: keg, kickstart, k.2, T2, condor-log-parser and the various free programs. 3. ignore for now ================= exp some internal test code, do not even look at it pegasus-wms_4.0.1+dfsg/src/org/0000755000175000017500000000000011757531667015422 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/0000755000175000017500000000000011757531667017102 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/0000755000175000017500000000000011757531667017667 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/0000755000175000017500000000000011757531667021645 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/Display.java0000644000175000017500000001100511757531137024102 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import java.io.*; import java.util.*; import org.griphyn.vdl.dax.*; import org.griphyn.vdl.parser.DAXParser; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.util.DAX2DOT; /** * The class converts a DAX specification into other formats * for visualization purposes. * * @see org.griphyn.vdl.parser.VDLxParser * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public class Display extends Directive { /** * instance variable that is managed by this interface for web * services. */ private DAX2DOT m_dax; /** * Constructor. * @throws IOException * @throws MissingResourceException */ public Display() throws IOException, MissingResourceException { super(); m_dax = new DAX2DOT(); } /** * Sets the graph size. * * @param h is the height in inches? * @param w is the width in inches? */ public void setSize(double h, double w) { m_dax.setSize(h, w); } /** * Determines whether to show derivations. * @param showDV if true, also show the DVs */ public void setShowDV( boolean showDV ) { m_dax.setShowDV(showDV); } /** * Generates GraphViz dot format from the DAX specification. * @param dax is the InputStream for the DAX * @param showFiles specifies whether to show input/output files in * the graph. * @return a string of the GraphViz dot representation * @throws IOException if there is a problem reading or writing */ public String DAX2DOT( InputStream dax, boolean showFiles ) throws IOException { // parse the dax file Logging.instance().log( "display", 0, "Initializing dax parser"); DAXParser daxparser = new DAXParser(m_props.getDAXSchemaLocation()); Logging.instance().log( "display", 0, "parsing the dax..."); ADAG adag = daxparser.parse(dax); if (adag == null) { Logging.instance().log( "display", 0, "failed parsing the dax."); return null; } return m_dax.toDOT(adag, showFiles); } /** * Generates GraphViz dot format from the DAX specification. * @param dax is the InputStream for the DAX * @param writer is the target to output GraphViz dot representation * @param showFiles specifies whether to show input/output files in * the graph. * @throws IOException if there is a problem reading or writing */ public void DAX2DOT( InputStream dax, Writer writer, boolean showFiles ) throws IOException { // parse the dax file Logging.instance().log( "display", 0, "Initializing dax parser"); DAXParser daxparser = new DAXParser(m_props.getDAXSchemaLocation()); Logging.instance().log( "display", 0, "parsing the dax..."); ADAG adag = daxparser.parse(dax); if (adag == null) { Logging.instance().log( "display", 0, "failed parsing the dax."); return; } m_dax.toDOT(adag, writer, showFiles); } /** * Generates GraphViz dot format from the DAX specification. * @param dax is the InputStream for the DAX * @param writer is the target to output GraphViz dot representation * @param showFiles specifies whether to show input/output files in * the graph. * @param jobURL is the base URL for jobs * @param fileURL is the base URL for files * @throws IOException if there is a problem reading or writing */ public void DAX2DOT( InputStream dax, Writer writer, boolean showFiles, String jobURL, String fileURL ) throws IOException { // parse the dax file Logging.instance().log( "display", 0, "Initializing dax parser"); DAXParser daxparser = new DAXParser(m_props.getDAXSchemaLocation()); Logging.instance().log( "display", 0, "parsing the dax..."); ADAG adag = daxparser.parse(dax); if (adag == null) { Logging.instance().log( "display", 0, "failed parsing the dax."); return; } m_dax.toDOT(adag, writer, showFiles, jobURL, fileURL); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/ParseKickstart.java0000644000175000017500000004244611757531137025444 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import edu.isi.pegasus.planner.invocation.InvocationRecord; import edu.isi.pegasus.planner.invocation.JobStatus; import edu.isi.pegasus.planner.invocation.JobStatusSuspend; import edu.isi.pegasus.planner.invocation.JobStatusRegular; import edu.isi.pegasus.planner.invocation.Architecture; import edu.isi.pegasus.planner.invocation.JobStatusSignal; import edu.isi.pegasus.planner.invocation.Job; import edu.isi.pegasus.planner.invocation.Status; import edu.isi.pegasus.planner.invocation.JobStatusFailure; import java.io.*; import java.sql.SQLException; import java.util.Iterator; import java.util.List; import java.util.Date; import java.util.ArrayList; import java.util.MissingResourceException; import edu.isi.pegasus.common.util.Version; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.planner.parser.InvocationParser; import org.griphyn.vdl.dbschema.*; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.toolkit.FriendlyNudge; import org.griphyn.vdl.util.ChimeraProperties; /** * Main objective of this class is to extract the exit status from * the invocation record returned by kickstart. The expected usage * is another Java class passing a filename, and obtaining the * cooked exit status for the parse. All other details, like removing * non-XML header and tailers, de-concatenation, are handled internally.

* * Usage of the class is divided into typically three steps. The first * step is to obtain an instance of the the parser, and configure it * to fit your needs.

* *

 * ParseKickstart pks = new ParseKickstart();
 * ... // set flags
 * pks.setDatabaseSchema( ptcschema );
 * 
* * The next step can be executed multiple times, and parse one or more * kickstart output files.

* *

 * List result = null;
 * try {
 *   result = pks.parseFile( file );
 * } catch ( FriendlyNudge fn ) {
 *   // handle failures
 * }
 * 
* * Once you are definitely done, it is recommend to dis-associate yourself * from the active database connection.

* *

 * pks.close();
 * pks = null;
 * 
* * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ * * @see org.griphyn.vdl.toolkit.ExitCode * @see org.griphyn.vdl.parser.InvocationParser */ public class ParseKickstart extends Directive { /** * Determines, if an empty output record constitutes a failure or success. * In old Globus 2.0, empty output frequently occurred. With the NFS * bug alleviation, while not fixed, it occurs a lot less frequently. */ private boolean m_emptyFail = true; /** * Determines, if the invocation records go back into the VDC or not. */ private boolean m_noDBase = false; /** * Determines, if the invocation records, when incurring a database failure, * will fail the application or not. */ private boolean m_ignoreDBFail = false; /** * The database schema driver used to connect to the PTC. */ private DatabaseSchema m_dbschema = null; /** * Semi-singleton, dynamically instantiated once for the lifetime. * The properties determine which Xerces parser is being used. */ private InvocationParser m_ip = null; /** * Attaches a workflow label (tag) to all workflows passing thru. */ private String m_wf_label = null; /** * Attaches a workflow mtime to all workflows passing thru. */ private Date m_wf_mtime = null; /** * Default c'tor. */ public ParseKickstart() throws IOException, MissingResourceException { super(); } /** * C'tor which permits the setting of a PTC connection. * * @param dbschema is the database schema to use for the PTC. */ public ParseKickstart( DatabaseSchema dbschema ) throws IOException, MissingResourceException { super(); if ( (m_dbschema = dbschema) == null ) m_noDBase = true; } /** * C'tor which permits the setting of a PTC connection. * * @param dbschema is the database schema to use for the PTC. * @param emptyFail determines, if empty input files are error or OK. */ public ParseKickstart( DatabaseSchema dbschema, boolean emptyFail ) throws IOException, MissingResourceException { super(); if ( (m_dbschema = dbschema) == null ) m_noDBase = true; m_emptyFail = emptyFail; } /** * Sets the database schema. * * @param dbschema is a database schema instance for the PTC. */ public void setDatabaseSchema( DatabaseSchema dbschema ) { m_dbschema = dbschema; } /** * Closes the associated database backend and invalidates the schema. */ public void close() throws SQLException { if ( m_dbschema != null ) m_dbschema.close(); m_dbschema = null; m_ip = null; } /** * Obtains the fail-on-empty-file value. * * @return true, if to fail on empty files. * @see #setEmptyFail( boolean ) */ public boolean getEmptyFail() { return m_emptyFail; } /** * Sets the fail-on-empty-file value. * * @param emptyFail contains the new value, if to fail on empty files. * @see #getEmptyFail() */ public void setEmptyFail( boolean emptyFail ) { m_emptyFail = emptyFail; } /** * Gets the variable to permit connections to the PTC, or * use parse-only mode. * * @return true, if the PTC is intended to be used, false for * parse-only mode. * @see #setNoDBase(boolean) */ public boolean getNoDBase() { return this.m_noDBase; } /** * Sets the parse-only versus PTC mode. * * @param noDBase is true to use the parse-only mode. * @see #getNoDBase() */ public void setNoDBase( boolean noDBase ) { this.m_noDBase = noDBase; } /** * Obtains a dont-fail-on-database-errors mode. * * @return true, if database failures are not fatal. * @see #setIgnoreDBFail(boolean) */ public boolean getIgnoreDBFail() { return this.m_ignoreDBFail; } /** * Sets the dont-fail-on-dbase-errors mode. * * @param ignore is true to render database error non-fatal. * @see #getIgnoreDBFail() */ public void setIgnoreDBFail( boolean ignore ) { this.m_ignoreDBFail = ignore; } /** * Obtains the current value of the workflow label to use. * * @return current workflow label to use, may be null. * @see #setWorkflowLabel(String) */ public String getWorkflowLabel() { return this.m_wf_label; } /** * Sets the workflow label. * * @param label is the (new) workflow label. * @see #getWorkflowLabel() */ public void setWorkflowLabel( String label ) { this.m_wf_label = label; } /** * Obtains the current value of the workflow modification time to use. * * @return current workflow mtime, may be null. * @see #setWorkflowTimestamp(Date) */ public Date getWorkflowTimestamp() { return this.m_wf_mtime; } /** * Sets the workflow modification time to record. * * @param mtime is the (new) workflow mtime. * @see #getWorkflowTimestamp() */ public void setWorkflowTimestamp( Date mtime ) { this.m_wf_mtime = mtime; } /** * Determines the exit code of an invocation record. Currently, * we will determine the exit code from all jobs until failure * or no more jobs. However, set-up and clean-up jobs are ignored. * * @param ivr is the invocation record to put into the database * @return the status code as exit code to signal failure etc. *
   *   0   regular exit with exit code 0
   *   1   regular exit with exit code > 0
   *   2   failure to run program from kickstart
   *   3   application had died on signal
   *   4   application was suspended (should not happen)
   *   5   failure in exit code parsing
   *   6   impossible case
   * 
*/ public int determineExitStatus( InvocationRecord ivr ) { boolean seen = false; for ( Iterator i=ivr.iterateJob(); i.hasNext(); ) { Job job = (Job) i.next(); // set-up/clean-up jobs don't count in failure modes if ( job.getTag().equals("cleanup") ) continue; if ( job.getTag().equals("setup") ) continue; // obtains status from job Status status = job.getStatus(); if ( status == null ) return 6; JobStatus js = status.getJobStatus(); if ( js == null ) { // should not happen return 6; } else if ( js instanceof JobStatusRegular ) { // regular exit code - success or failure? int exitcode = ((JobStatusRegular) js).getExitCode(); if ( exitcode != 0 ) return 1; else seen = true; // continue, if exitcode of 0 to implement chaining !!!! } else if ( js instanceof JobStatusFailure ) { // kickstart failure return 2; } else if ( js instanceof JobStatusSignal ) { // died on signal return 3; } else if ( js instanceof JobStatusSuspend ) { // suspended??? return 4; } else { // impossible/unknown case return 6; } } // success, or no [matching] jobs return seen ? 0 : 5; } /** * Extracts records from the given input file. Since there may be * more than one record per file, especially in the case of MPI, * multiple results are possible, though traditionally only one * will be used. * * @param input is the name of the file that contains the records * @return a list of strings, each representing one invocation record. * The result should not be empty (exception will be thrown). * @throws FriendlyNudge, if the input format was invalid. * The caller has to assume failure to parse the record provided. */ public List extractToMemory( java.io.File input ) throws FriendlyNudge { List result = new ArrayList(); StringWriter out = null; Logging log = getLogger(); // open the files int p1, p2, state = 0; try { BufferedReader in = new BufferedReader( new FileReader(input) ); out = new StringWriter(4096); String line = null; while ( (line = in.readLine()) != null ) { if ( (state & 1) == 0 ) { // try to copy the XML line in any case if ( (p1 = line.indexOf( " -1 ) if ( (p2 = line.indexOf( "?>", p1 )) > -1 ) { out.write( line, p1, p2+2 ); log.log( "parser", 2, "state=" + state + ", seen " ); } // start state with the correct root element if ( (p1 = line.indexOf( " -1 ) { if ( p1 > 0 ) line = line.substring( p1 ); log.log( "parser", 2, "state=" + state + ", seen " ); ++state; } } if ( (state & 1) == 1 ) { out.write( line ); if ( (p1 = line.indexOf("")) > -1 ) { log.log( "parser", 2, "state=" + state + ", seen " ); ++state; out.flush(); out.close(); result.add( out.toString() ); out = new StringWriter(4096); } } } in.close(); out.close(); } catch ( IOException ioe ) { throw new FriendlyNudge( "While copying " + input.getPath() + " into temp. file: " + ioe.getMessage(), 5 ); } // some sanity checks if ( state == 0 ) throw new FriendlyNudge( "File " + input.getPath() + " does not contain invocation records," + " assuming failure", 5 ); if ( (state & 1) == 1 ) throw new FriendlyNudge( "File " + input.getPath() + " contains an incomplete invocation record," + " assuming failure", 5 ); // done return result; } /** * Parses the contents of a kickstart output file, and returns a * list of exit codes obtains from the records. * * @param arg0 is the name of the file to read * @return a list with one or more exit code, one for each record. * @throws FriendlyNudge, if parsing of the file goes hay-wire. * @throws IOException if something happens while reading properties * to instantiate the XML parser. * @throws SQLException if accessing the database fails. */ public List parseFile( String arg0 ) throws FriendlyNudge, IOException, SQLException { List result = new ArrayList(); Logging me = getLogger(); me.log( "kickstart", 2, "working with file " + arg0 ); // get access to the invocation parser if ( m_ip == null ) { ChimeraProperties props = ChimeraProperties.instance(); String psl = props.getPTCSchemaLocation(); me.log( "kickstart", 2, "using XML schema location " + psl ); m_ip = new InvocationParser( psl ); } // check input file java.io.File check = new java.io.File(arg0); // test 1: file exists if ( ! check.exists() ) { me.log( "kickstart", 2, "file does not exist, fail with 5" ); throw new FriendlyNudge( "file does not exist " + arg0 + ", assuming failure", 5 ); } // test 2: file is readable if ( ! check.canRead() ) { me.log( "kickstart", 2, "file not readable, fail with 5" ); throw new FriendlyNudge( "unable to read file " + arg0 + ", assuming failure", 5 ); } // test 3: file has nonzero size // FIXME: Actually need to check the record size me.log( "kickstart", 2, "file has size " + check.length() ); if ( check.length() == 0 ) { // deal with 0-byte file if ( getEmptyFail() ) { me.log( "kickstart", 2, "zero size file, fail with 5" ); throw new FriendlyNudge( "file has zero length " + arg0 + ", assuming failure", 5 ); } else { me.log( "kickstart", 2, "zero size file, succeed with 0" ); me.log( "app", 1, "file has zero length " + arg0 + ", assuming success" ); result.add( new Integer(0) ); return result; } } // test 4: extract XML into tmp file me.log( "kickstart", 2, "about to extract content into memory" ); List extract = extractToMemory(check); me.log( "kickstart", 2, extract.size() + " records extracted" ); // testme: for each record obtained, work on it Architecture cachedUname = null; for ( int j=1; j-1 < extract.size(); ++j ) { String temp = (String) extract.get(j-1); me.log( "kickstart", 2, "content[" + j + "] extracted, length " + temp.length() ); // test 5: try to parse XML me.log( "app", 2, "starting to parse invocation" ); me.log( "kickstart", 2, "about to parse invocation record" ); InvocationRecord invocation = m_ip.parse( new StringReader(temp) ); me.log( "kickstart", 2, "done parsing invocation" ); if ( invocation == null ) { me.log( "kickstart", 2, "result record " + j + " is invalid (null), fail with 5" ); throw new FriendlyNudge( "invalid XML invocation record " + j + " in " + arg0 + ", assuming failure", 5 ); } else { me.log( "kickstart", 2, "result record " + j + " appears valid" ); me.log( "app", 1, "invocation " + j + " was parsed successfully" ); } // NEW: attached workflow tag and mtime if ( m_wf_label != null ) invocation.setWorkflowLabel( m_wf_label ); if ( m_wf_mtime != null ) invocation.setWorkflowTimestamp( m_wf_mtime ); // Fix for Pegasus Bug 39 // the machine information tag is created only once for a cluster // the -H flag disables the generation of machine information Architecture uname = invocation.getArchitecture(); if( uname == null ){ //attempt to update with cachedUname invocation.setArchitecture( cachedUname ); } else{ cachedUname = uname; } // insert into database -- iff it is available if ( ! m_noDBase && m_dbschema != null && m_dbschema instanceof PTC ) { PTC ptc = (PTC) m_dbschema; try { // FIXME: (start,host,pid) may not be a sufficient secondary key me.log( "kickstart", 2,"about to obtain secondary key triple" ); if ( ptc.getInvocationID( invocation.getStart(), invocation.getHostAddress(), invocation.getPID() ) == -1 ) { me.log( "kickstart", 2, "new invocation, adding" ); me.log( "app", 1, "adding invocation to database" ); // may throw SQLException ptc.saveInvocation( invocation ); } else { me.log( "kickstart", 2, "existing invocation, skipping" ); me.log( "app", 1, "invocation already exists, skipping!" ); } } catch ( SQLException sql ) { if ( m_ignoreDBFail ) { // if dbase errors are non-fatal, just protocol what is going on. for ( int n=0; sql != null; ++n ) { me.log( "default", 0, "While inserting PTR [" + j + "]:" + n + ": " + sql.getMessage() + ", ignoring" ); sql = sql.getNextException(); } } else { // rethrow, if dbase errors are fatal (default) throw sql; } } // catch } // if use dbase // determine result code int status = 0; me.log( "kickstart", 2, "about to determine exit status" ); status = determineExitStatus( invocation ); me.log( "kickstart", 2, "exit status is " + status ); result.add( new Integer(status) ); } // for // done return result; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/WorkflowJob.java0000644000175000017500000000615511757531137024754 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import java.io.*; import edu.isi.pegasus.common.util.CommonProperties; import org.griphyn.vdl.workflow.*; /** * * A WorkflowJob object defines the state of a job within a Workflow * * @author Mike Wilde * @author Eric Gilbert * @version $Revision: 2585 $ * * @see org.griphyn.vdl.toolkit.VDLc */ public class WorkflowJob { /* Class Variables */ public static final int MAXJOBS = 100000; public static WorkflowJob[] jobs = new WorkflowJob[MAXJOBS]; public static int njobs; public static String wfjobsCmd = "/home/dscheftn/vds/bin/vds-WorkflowJob-script-wfjobs"; /* Instance Variables */ /* db fields select * from wf_jobstate; wfid | jobid | state | mtime | site ------+----------+---------------------+------------------------+------------- 2 | ID000001 | POST_SCRIPT_SUCCESS | 2005-08-21 15:55:10-05 | terminable 4 | ID000001 | PRE_SCRIPT_FAILURE | 2005-08-24 15:51:11-05 | 5 | ID000001 | POST_SCRIPT_SUCCESS | 2005-08-24 16:23:43-05 | terminable 6 | ID000001 | JOB_RELEASED | 2005-08-24 17:26:59-05 | */ public String wfid; public String jobid; public String state; public String mtime; public String site; /* Class Methods */ public static boolean refresh() { Process p; int rc; Reader is; StringBuffer sb = new StringBuffer(); char [] b = new char[100000]; int n; /* Run status command to get job states */ try { p = Runtime.getRuntime().exec(wfjobsCmd); InputStream out = p.getInputStream(); InputStreamReader r = new InputStreamReader(out); BufferedReader in = new BufferedReader(r); String line; njobs=0; while ( (line = in.readLine()) != null ) { WorkflowJob j = new WorkflowJob(); String[] t = line.split("\\|"); int nt = t.length; if (nt > 1) j.wfid=t[1]; if (nt > 2) j.jobid = t[2]; if (nt > 3) j.state = t[3]; if (nt > 4) j.mtime = t[4]; if (nt > 5) j.site = t[5]; if( njobs < MAXJOBS ) { jobs[njobs++] = j; } else { return false; } } rc = p.waitFor(); return true; } catch (Exception e) { System.out.println("WorkflowJob.refresh: Exception: " + e.toString() ); return false; } } /* Instance Methods */ public String asStatusString() { return "jobid=" + jobid + " wfid=" + wfid + " state=" + state + " mtime=" + mtime + " site=" + site; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/Directive.java0000644000175000017500000000466511757531137024431 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.common.util.Version; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.util.ChimeraProperties; import java.util.MissingResourceException; import java.io.IOException; /** * The base class for directives. Directives are a set of * high-level common modules that facilitate user interaction * with the Chimera system. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ */ public abstract class Directive { /** * Logging instance */ protected Logging m_logger; /** * verbose logging mode */ protected boolean m_verbose; /** * properties instance */ protected ChimeraProperties m_props; /** * Constructor, initialize logging and properties instance */ public Directive() throws IOException, MissingResourceException { m_logger = Logging.instance(); m_verbose = false; m_props = ChimeraProperties.instance(); } /** * set verbose mode * @param v true for verbose mode, false otherwise */ public void setVerbose(boolean v) { m_verbose = v; } /** * get verbose mode */ public boolean getVerbose() { return m_verbose; } /** * set logging instance * @param logger the logging instance */ public void setLogger(Logging logger) { if (logger != null) m_logger = logger; } /** * get logging instance */ public Logging getLogger() { return m_logger; } /** * get properties instance */ public ChimeraProperties getProperties() { return m_props; } /** * set properties instance * @param props the chimera properties instance */ public void setProperties(ChimeraProperties props) { if (props != null) m_props = props; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/Connect.java0000644000175000017500000000644711757531137024104 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import org.griphyn.vdl.parser.*; import org.griphyn.vdl.classes.*; import org.griphyn.vdl.dbschema.*; import org.griphyn.vdl.util.Logging; import java.lang.reflect.*; import java.io.IOException; import java.util.MissingResourceException; /** * The class dynamically loads a databaseschema * * @see org.griphyn.vdl.dbschema.DatabaseSchema */ public class Connect extends Directive { /** * Constructor */ public Connect() throws IOException, MissingResourceException { super(); } /** * Connects the database backend. This is not done in the c'tor, because * some apps don't need this heavyweight instructions. * * @param schemaName is the name of the schema class to load. This * better be the fully-qualified name in-sync with properties. * * @return the schema class on success, null on non-exceptional failure. * The result is to be cast to appropriate catalog classes. * * @see org.griphyn.vdl.util.ChimeraProperties#getVDCSchemaName() * @see org.griphyn.vdl.util.ChimeraProperties#getPTCSchemaName() */ public DatabaseSchema connectDatabase( String schemaName ) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { DatabaseSchema result = null; m_logger.log( "connect", 0, "Connecting the database backend" ); Object[] arg = new Object[1]; arg[0] = new String(); return DatabaseSchema.loadSchema( schemaName, null, arg ); } /** * Connects the database backend. This is not done in the c'tor, because * some apps don't need this heavyweight instructions. * * @param schemaName is the name of the schema class to load. This * better be the fully-qualified name in-sync with properties. * @param dbDriverName is the name of the database driver * * @return the schema class on success, null on non-exceptional failure. * The result is to be cast to appropriate catalog classes. * * @see org.griphyn.vdl.util.ChimeraProperties#getVDCSchemaName() * @see org.griphyn.vdl.util.ChimeraProperties#getPTCSchemaName() */ public DatabaseSchema connectDatabase( String schemaName, String dbDriverName ) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { DatabaseSchema result = null; m_logger.log( "connect", 0, "Connecting the database backend" ); Object[] arg = new Object[1]; arg[0] = (dbDriverName == null) ? new String() : dbDriverName; return DatabaseSchema.loadSchema( schemaName, null, arg ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/Derive.java0000644000175000017500000002636711757531137023734 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import java.io.*; import java.util.*; import org.griphyn.vdl.dax.*; import org.griphyn.vdl.classes.LFN; import org.griphyn.vdl.parser.DAXParser; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.planner.*; /** * This class makes concrete plans for a DAX, when planning using the * shell planner. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see org.griphyn.vdl.planner.Scriptor */ public class Derive extends Directive { public Derive() throws IOException, MissingResourceException { super(); } /** * Generates shell scripts for the workflow described by the dax. * For each derivation, there is a shell script generated, and * there is a control script to control the execution sequence of * these shell scripts according to their dependencies. * * @param dax is the InputStream for the dax representation * @param dir is the directory name in which to generate these scripts * @param build specifies whether to force build mode * @param register specifies whether to register output files * @return true if successful, false otherwise */ public boolean genShellScripts( InputStream dax, String dir, boolean build, boolean register ) throws java.sql.SQLException, IOException, InterruptedException { return genShellScripts(dax, dir, build, register, null); } /** * Generates shell scripts for the workflow described by the dax. * For each derivation, there is a shell script generated, and * there is a control script to control the execution sequence of * these shell scripts according to their dependencies. * * @param dax is the InputStream for the dax representation * @param dir is the directory name in which to generate these scripts * @param build specifies whether to force build mode * @param register specifies whether to register output files * @param kickstart_path specifies the location of kickstart. If null, * kickstart will not be used. * @return true if successful, false otherwise */ public boolean genShellScripts( InputStream dax, String dir, boolean build, boolean register, String kickstart_path ) throws java.sql.SQLException, IOException, InterruptedException { // sanity check -- is there a destination directory if ( dir == null || dir.equals("") ) { m_logger.log( "planner", 0, "Output directory not specified, using default: test" ); dir = "test"; } else { m_logger.log( "planner", 0, "Using output directory " + dir ); } // parse the dax file m_logger.log( "planner", 1, "Initializing dax parser"); DAXParser daxparser = new DAXParser(m_props.getDAXSchemaLocation()); m_logger.log( "planner", 1, "parsing the dax..."); ADAG adag = daxparser.parse(dax); // sanity check -- do we have a DAX if ( adag == null ) { m_logger.log( "planner", 0, "failed parsing the dax."); return false; } // check output directory -- does it exist? File f = new File(dir); if ( f.exists() ) { if ( ! f.isDirectory() ) { m_logger.log( "planner", 0, "ERROR: '" + dir + "' is not a directory!" ); throw new IOException( dir + " is not a directory!" ); } } else { m_logger.log( "planner", 0, "directory '" + dir + "' does not exist. Creating." ); f.mkdirs(); } // connect to replica catalog RCWrapper rc = null; try { rc = new RCWrapper(); } catch ( Exception e ) { throw new Error( e.getMessage() ); } m_logger.log( "planner", 2, "Using RC " + rc.getName() ); // connect to transformation catalog TCWrapper tc = new TCWrapper(); m_logger.log( "planner", 2, "Using TC " + tc.getName() ); // connect to site catalog, optional SCWrapper sc = new SCWrapper(); m_logger.log( "planner", 2, "Using SC " + sc.getName() ); // lookup all filenames in replica catalog, and populate the // filename map that is passed around. m_logger.log( "planner", 1, "processing logical filenames" ); HashMap filenameMap = new HashMap(); for ( Iterator i=adag.iterateFilename(); i.hasNext(); ) { Filename fn = (Filename) i.next(); String lfn = fn.getFilename(); String pfn = rc.lookup( "local", lfn ); if ( pfn == null ) { // can't find the lfn->pfn mapping in rc m_logger.log( "planner", 1, "Info: Failed to find LFN " + lfn + " in RC, assuming PFN==LFN" ); pfn = lfn; } filenameMap.put(lfn, pfn); } // convert adag to graph Graph graph = DAX2Graph.DAG2Graph(adag); // to build or to make? if ( build ) { // build mode m_logger.log( "planner", 0, "Running in build mode, DAG pruning skipped"); } else { // make mode m_logger.log( "planner", 0, "Checking nodes whose outputs already exist"); // check output file existence, if all output files exist, then // cut this node boolean cut; // make reverse topological sort to the graph, i.e. find last // finished jobs first. Topology rtp = new Topology( graph.reverseGraph() ); //Hash to keep all existing files HashMap existMap = new HashMap(); //Hash to keep files to add to exist list for this stage HashMap addMap = new HashMap(); //Hash to keep files to remove from exist list for this stage HashMap removeMap = new HashMap(); String jobs[]; // whether we are dealing with last finished jobs boolean last = true; while ( (jobs=rtp.stageSort()) != null ) { int number = jobs.length; int count = 0; for ( int i=0; i list, " + "please check the DAX!" ); return false; } // check if output file exists if ( fn.getLink() == LFN.OUTPUT ) { File fp = new File(pfn); if ( ! fp.exists() ) { // some output file does not exist. cut = false; } } if ( fn.getLink() == LFN.INPUT ) { inputMap.put( lfn, pfn ); } } } if ( cut ) { // cut node m_logger.log( "planner", 1, "Removed job " + jobID + " from DAG" ); graph.removeVertex(jobID); // assume all input files (outputs from upper stages exist) addMap.putAll(inputMap); count++; } else { // assume all input files not exist. removeMap.putAll(inputMap); } } // for enum if ( count == number ) { // output files for all the jobs in this stage exist if ( last ) { // this is the last stage, no need to run the dag m_logger.log( "planner", 0, "All output files already exist, " + "no computation is needed!" ); return true; } // cut all the upper stage jobs while ( (jobs=rtp.stageSort()) != null ) { for ( int i=0; i" ); writer.write( newline ); writer.flush(); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/Search.java0000644000175000017500000001733011757531137023711 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import java.io.*; import java.util.Iterator; import java.util.List; import java.sql.SQLException; import edu.isi.pegasus.common.util.Currently; import org.griphyn.vdl.parser.VDLxParser; import org.griphyn.vdl.classes.*; import org.griphyn.vdl.dbschema.*; import org.griphyn.vdl.annotation.*; import org.griphyn.vdl.util.Logging; import java.util.MissingResourceException; /** * This class searches for definitions that either match certain * namespace, name, version combination, or contain a certain * LFN. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see org.griphyn.vdl.parser.VDLxParser * @see org.griphyn.vdl.dbschema.VDC */ public class Search extends Directive { /** * Defines the output format constants */ public static final int FORMAT_FQDN = 0; public static final int FORMAT_VDLT = 1; public static final int FORMAT_VDLX = 2; private DatabaseSchema m_dbschema = null; /** * Constructor */ public Search() throws IOException, MissingResourceException { super(); } /** * Constructor * @param dbs is the database schema instance */ public Search(DatabaseSchema dbs) throws IOException, MissingResourceException { m_dbschema = dbs; } /** * set database schema * @param dbs is the database schema instance */ public void setDatabaseSchema(DatabaseSchema dbs) { m_dbschema = dbs; } /** * Search for definitions that contain LFN of specific name * and link type. This method does not allow jokers. * * @param filename the LFN name * @param link the linkage type of the LFN * @return a list of Definition items that match the criterion. * * @see org.griphyn.vdl.classes.LFN#NONE * @see org.griphyn.vdl.classes.LFN#INPUT * @see org.griphyn.vdl.classes.LFN#OUTPUT * @see org.griphyn.vdl.classes.LFN#INOUT */ public java.util.List searchDefinition(String filename, int link) throws java.sql.SQLException { return ((VDC)m_dbschema).searchFilename(filename, link); } /** * Search the database for definitions by ns::name:version triple * and by type (either Transformation or Derivation). This version * of the search allows for jokers expressed as null value * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param clsType type of definition, see below, or -1 as wildcard * @return a list of Definition items, which may be empty * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List searchDefinition( String namespace, String name, String version, int clsType ) throws java.sql.SQLException { return ((VDC)m_dbschema).searchDefinition(namespace, name, version, clsType ); } /** * Checks a string for the presence of joker characters. * * @param s is the input string * @return true, if a joker character was detected, false otherwise * and null strings. */ private boolean hasJoker( String s ) { return ( s == null ? false : ( s.indexOf('*')+s.indexOf('?') != -2 ) ); } /** * Translates the regular shell-style jokers into SQL jokers. * Simultaneously protects (with backslash for now) any SQL jokers to * make them literal. * * @param hasJoker is flagged, if the string contains shell jokers * @param s input string to translate * @return translated string -- may be the original reference */ private String mask( boolean hasJoker, String s ) { String result = s; if ( s == null ) return result; if ( result.indexOf('%')+result.indexOf('_') != -2 ) { // has SQL jokers, protect them (backslash for now) result = result.replaceAll( "([%_])", "\\\\$1" ); } if ( hasJoker ) { // turn jokers into SQL jokers result = result.replace('*','%').replace('?','_'); } return result; } /** * Search the database for definitions by ns::name:version triple * and by type (either Transformation or Derivation). This version * of the search allows for jokers expressed as null value, or special * characters '%' and '_'. * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param clsType type of definition, see below, or -1 as wildcard * @return a list of Definition items, which may be empty * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List searchDefinitionEx( String namespace, String name, String version, int clsType ) throws java.sql.SQLException { boolean b1 = hasJoker(namespace); boolean b2 = hasJoker(name); boolean b3 = hasJoker(version); if ( b1 || b2 || b3 ) { // protect and translate jokers return ((Advanced)m_dbschema).searchDefinitionEx( mask(b1,namespace), mask(b2,name), mask(b3,version), clsType ); } else { // no jokers, use potentially more efficient query return ((VDC)m_dbschema).searchDefinition(namespace,name,version,clsType); } } /** * Search for LFNs or Definitions that has certain annotations * * @param kind defines the kind/class of object annotated. * @param arg is used only for TR ARG and TR CALL. For the former * it is the name of the argument (String), for the latter the position of * the call (Integer). * @param tree stores the query tree to query the annotation * @return a list of LFNs if search for filenames, otherwise a list of * definitions. * @exception SQLException if something goes wrong with the database. * @see org.griphyn.vdl.annotation.QueryTree */ public java.util.List searchAnnotation( int kind, Object arg, QueryTree tree) throws java.sql.SQLException { return ((Annotation)m_dbschema).searchAnnotation(kind, arg, tree); } /** * Print a list of definitions in different format: fqdn, vdlt, and vdlx * * @param writer the target to output the list * @param defList a list of definitions * @param format the output format * * @see #FORMAT_FQDN * @see #FORMAT_VDLT * @see #FORMAT_VDLX * NOTE: might be better to move into another module? */ public void printDefinitionList(Writer writer, java.util.List defList, int format) throws IOException { if (defList == null || defList.isEmpty()) return; Definitions defs = new Definitions(); if ( format != FORMAT_FQDN ) { defs.setDefinition(defList); if ( format == FORMAT_VDLX ) defs.toXML(writer, ""); else if ( format == FORMAT_VDLT) defs.toString(writer); } else { for ( Iterator i=defList.iterator(); i.hasNext(); ) { Definition def = (Definition) i.next(); writer.write(def.identify()); writer.write("\n"); } writer.flush(); } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/Define.java0000644000175000017500000001756511757531137023710 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import java.io.*; import java.sql.SQLException; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.util.Set; import java.util.HashSet; import java.util.MissingResourceException; import edu.isi.pegasus.common.util.Currently; import org.griphyn.vdl.parser.*; import org.griphyn.vdl.dbdriver.*; import org.griphyn.vdl.dbschema.*; import org.griphyn.vdl.classes.Definitions; import org.griphyn.vdl.classes.Definition; import org.griphyn.vdl.classes.Derivation; import org.griphyn.vdl.util.Logging; import org.xml.sax.InputSource; /** * This class parses VDL XML specifications and stores * them to database backend. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see org.griphyn.vdl.parser.VDLxParser * @see org.griphyn.vdl.parser.DefinitionHandler */ public class Define extends Directive implements DefinitionHandler { /** * This determines the behavior: insert mode (false) or update mode (true) */ private boolean m_overwrite; /** * This variable keeps the stream to print rejects onto, may be null. */ private Writer m_rejects = null; /** * Counts the number of successful database manipulations. */ private int m_count = 0; /** * Counts the rejected manipulations. */ private int m_rejected = 0; /** * database manipulator. */ private DatabaseSchema m_dbschema = null; /** * If enabled, collapses the names of DVs that were processed. */ private java.util.Set m_derivations = null; /** * Constructor */ public Define() throws IOException, MissingResourceException { super(); } /** * Constructor, set database schema instance * @param dbs the database schema instance */ public Define(DatabaseSchema dbs) throws IOException, MissingResourceException { m_dbschema = dbs; } /** * set database schema * @param dbs the database schema instance */ public void setDatabaseSchema(DatabaseSchema dbs) { m_dbschema = dbs; } /** * Closes the associated database backend and invalidates the schema. */ public void close() throws SQLException { if ( m_dbschema != null ) m_dbschema.close(); m_dbschema = null; } /** * Returns the remembered derivations. * * @return all remembered derivations, an empty set if none * were found, or null if remembering was off. * @see #setDerivationMemory( boolean ) */ public Set getDerivationMemory() { return m_derivations; } /** * Toggles the remembering of derivations that were processed. * * @param on is true to enable derivation memory * @see #getDerivationMemory() */ public void setDerivationMemory( boolean on ) { if ( on ) { // enable remembering derivations if ( m_derivations == null ) m_derivations = new java.util.HashSet(); } else { // disable remembering derivations m_derivations = null; } } /** * Insert definitions into database, if a definition already * exists in the database, then it is rejected. This method * does not keep track of rejected ones. * * @param reader the reader to vdlx source * @return true if insersion is successful */ public boolean insertVDC(Reader reader) { return updateVDC(reader, null, false); } /** * Insert definitions into database, if a definition already * exists in the database, then it is rejected. This method * keeps track of rejected ones. * * @param reader the reader to vdlx source * @param writer writer to output the rejected definitions * @return true if insersion is successful */ public boolean insertVDC(Reader reader, Writer writer) { return updateVDC(reader, writer, false); } /** * Insert definitions into database, if a definition already * exists in the database, then overwrite it. This method * does not keep track of overwritten ones. * * @param reader the reader to vdlx source * @return true if update is successful */ public boolean updateVDC(Reader reader) { return updateVDC(reader, null, true); } /** * Insert definitions into database, if a definition already * exists in the database, then overwrite it. This method * keeps track of overwritten ones. * * @param reader the reader to vdlx source * @param writer writer to output the overwritten definitions * @return true if update is successful */ public boolean updateVDC(Reader reader, Writer writer) { return updateVDC(reader, writer, true); } /** * Insert definitions into database, if a definition already * exists in the database, then either update the definition or * reject the definition. * * @param reader the reader to vdlx source * @param writer writer to output the overwritten/rejected definitions * @return true if update is successful */ public boolean updateVDC(Reader reader, Writer writer, boolean overwrite) { m_rejects = writer; m_overwrite = overwrite; m_count = m_rejected = 0; org.griphyn.vdl.parser.VDLxParser parser = new org.griphyn.vdl.parser.VDLxParser(m_props.getVDLSchemaLocation()); return parser.parse( new InputSource(reader), this ); } /** * This method implements the interface defined in DefinitionHandler * to save definition to database backend. * * @param d is the Definition that is ready to be stored. * @return true, if new version was stored and database modified, * false, if the definition was rejected for any reason. */ public boolean store( Definition d ) { boolean result = false; VDC vdc = (VDC)m_dbschema; // NEW: remember all DVs we came across if ( m_derivations != null && d instanceof Derivation ) m_derivations.add( d.shortID() ); try { if ( m_rejects == null ) { // rely on saveDefinition to do "the right thing" result = vdc.saveDefinition( d, m_overwrite ); } else { // Is the Definition already in the database? if ( vdc.containsDefinition(d) ) { if ( m_overwrite ) { // this is time-consuming and ineffective Definition old = vdc.loadDefinition( d.getNamespace(), d.getName(), d.getVersion(), d.getType() ); old.toXML( m_rejects, " " ); result = vdc.saveDefinition( d, true ); } else { // skip, if not forced to overwrite, but save rejects d.toXML( m_rejects, " " ); } } else { // not found, insert unconditionally result = vdc.saveDefinition( d, true ); } } } catch ( SQLException sql ) { // database problems for ( int i=0; sql != null; ++i ) { m_logger.log( "database", 0, "SQL error " + i + ": " + sql.getErrorCode() + ": " + sql.getMessage() ); sql = sql.getNextException(); } m_logger.log( "database", 0, "ignoring SQL exception(s)" ); } catch ( Exception e ) { m_logger.log( "database", 0, "caught " + e + ", ignoring" ); result = false; } if ( result ) m_count++; else m_rejected++; return result; } /** * return the number of successfully saved definitions */ public int getNumberSaved() { return m_count; } /** * return the number of rejected definitions */ public int getNumberRejected() { return m_rejected; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/Delete.java0000644000175000017500000001304011757531137023700 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import java.io.*; import java.util.Iterator; import java.util.List; import java.util.ArrayList; import java.sql.SQLException; import edu.isi.pegasus.common.util.Currently; import org.griphyn.vdl.parser.VDLxParser; import org.griphyn.vdl.classes.*; import org.griphyn.vdl.dbschema.*; import org.griphyn.vdl.util.Logging; import java.util.MissingResourceException; /** * This class deletes definitions that either match certain * namespace, name, version combination * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see org.griphyn.vdl.parser.VDLxParser * @see org.griphyn.vdl.dbschema.VDC */ public class Delete extends Directive { private DatabaseSchema m_dbschema = null; /** * Constructor */ public Delete() throws IOException, MissingResourceException { super(); } /** * Constructor, set database schema instance * @param dbs the database schema instance */ public Delete(DatabaseSchema dbs) throws IOException, MissingResourceException { m_dbschema = dbs; } /** * set database schema * @param dbs the database schema instance */ public void setDatabaseSchema(DatabaseSchema dbs) { m_dbschema = dbs; } /** * Delete one or more definitions from the backend database. The key * triple parameters may be wildcards. Wildcards are expressed as * null value. * * @param namespace namespace * @param name name * @param version version * @param clsType definition type (TR or DV) * @return a list of definitions that were deleted. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List deleteDefinition(String namespace, String name, String version, int clsType) throws java.sql.SQLException { return ((VDC)m_dbschema).deleteDefinition(namespace, name, version, clsType ); } /** * Delete one or more definitions from the backend database. The key * triple parameters may be wildcards. Wildcards are expressed as * null value. Output the deleted ones. * * @param namespace namespace * @param name name * @param version version * @param clsType definition type (TR or DV) * @param writer writer to output deleted definitions * @return a list of definitions that were deleted. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List deleteDefinition(String namespace, String name, String version, int clsType, Writer writer) throws java.sql.SQLException, IOException { java.util.List defList = ((VDC)m_dbschema).deleteDefinition(namespace, name, version, clsType ); if (writer != null) { for ( Iterator i=defList.iterator(); i.hasNext(); ) { Definition d = (Definition) i.next(); d.toXML( writer, " " ); m_logger.log( "delete", 0, "deleted " + d.identify() ); } } return defList; } /** * Delete one or more definitions from the backend database. The key * triple parameters may be wildcards in force mode. Wildcards are * expressed as null value. Output the deleted ones. * * @param namespace namespace * @param name name * @param version version * @param clsType definition type (TR or DV) * @param writer writer to output deleted definitions * @param force force wildcard matching * @return a list of definitions that were deleted. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List deleteDefinition(String namespace, String name, String version, int clsType, Writer writer, boolean force) throws java.sql.SQLException, IOException { java.util.List defList = null; if (force) { defList = ((VDC)m_dbschema).deleteDefinition(namespace, name, version, clsType ); } else { ArrayList list = new ArrayList(); if (clsType == -1 || clsType == Definition.TRANSFORMATION) { Definition def = ((VDC)m_dbschema).loadDefinition(namespace, name, version, Definition.TRANSFORMATION); if (def != null) { list.add(def); ((VDC)m_dbschema).deleteDefinition(def); } } if (clsType == -1 || clsType == Definition.DERIVATION) { Definition def = ((VDC)m_dbschema).loadDefinition(namespace, name, version, Definition.DERIVATION); if (def != null) { list.add(def); ((VDC)m_dbschema).deleteDefinition(def); } } defList = list; } if (writer != null) { for ( Iterator i=defList.iterator(); i.hasNext(); ) { Definition d = (Definition) i.next(); d.toXML( writer, " " ); m_logger.log( "delete", 0, "deleted " + d.identify() ); } } return defList; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/Explain.java0000644000175000017500000001672511757531137024113 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import java.io.*; import java.sql.SQLException; import java.util.Enumeration; import java.util.ArrayList; import java.util.StringTokenizer; import java.util.Iterator; import java.util.Collection; import java.util.MissingResourceException; import edu.isi.pegasus.common.util.Separator; import org.griphyn.vdl.parser.VDLxParser; import org.griphyn.vdl.classes.*; import org.griphyn.vdl.router.*; import org.griphyn.vdl.dbschema.*; import org.griphyn.vdl.dax.ADAG; /** * This class generates the DAX per the request for LFNs or DVs * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see org.griphyn.vdl.router.Route * @see org.griphyn.vdl.router.BookKeeper * @see org.griphyn.vdl.dbschema.VDC */ public class Explain extends Directive { /** * Maintains the link to the VDC. */ private DatabaseSchema m_dbschema = null; /** * Provides a routing object to traverse dependencies in the VDC. */ private Route m_route = null; /** * Helpful object for routing requests. */ private BookKeeper m_state = null; /** * Constructor */ public Explain() throws IOException, MissingResourceException { super(); } /** * Constructor: Sets the database schema instance, and * initializes the internal Route accordingly. * * @param dbs the database schema instance * @see org.griphyn.vdl.router.Route * @see org.griphyn.vdl.router.BookKeeper */ public Explain(DatabaseSchema dbs) throws IOException, MissingResourceException { m_dbschema = dbs; m_route = new Route(m_dbschema); m_state = new BookKeeper(); } /** * Sets database schema, and initialzes the internal * Route accordingly. * * @param dbs the database schema instance * @see org.griphyn.vdl.router.Route * @see org.griphyn.vdl.router.BookKeeper */ public void setDatabaseSchema(DatabaseSchema dbs) { m_dbschema = dbs; m_route = new Route(m_dbschema); m_state = new BookKeeper(); } /** * Allows to limit the maximum depth that the router is willing to go. * * @param depth is the maximum depth. Use Integer.MAX_VALUE for * (virtually) unlimited depth. */ public void setMaximumDepth( int depth ) { m_route.setMaximumDepth( depth ); } /** * Requests a data product logical filename. As a result, the complete * build-style DAG for producing the requested filename will be * constructed. * * @param filename is the name of the requested LFN. * @return true if the request is successful * * @see #requestLFN( java.util.Collection ) * @see org.griphyn.vdl.router.BookKeeper */ public boolean requestLFN( String filename ) throws java.sql.SQLException { ArrayList al = new ArrayList(1); al.add(filename); return requestLFN(al); } /** * Requests a set of logical filenames. As a result, the complete * build-style DAG for producing the requested LFNs will be * constructed. * * @param filenames is the list or set of logical filenames requested. * @return true if the request is successful * * @see org.griphyn.vdl.router.Route#requestLfn( Collection, BookKeeper ) * @see org.griphyn.vdl.router.BookKeeper */ public boolean requestLFN( java.util.Collection filenames ) throws java.sql.SQLException { // FIXME: What about previous results? m_route.requestLfn( filenames, m_state ); return ( m_state != null && ! m_state.isEmpty() ); } /** * Requests for a specific derivation. As a result, a build-style DAG * will be produced and maintained in the book-keeping structure. * * @param namespace is the namespace of the derivation. * @param name is the name of the derivation. * @param version is the version of the derivation. * @return true if the request is successful * * @see org.griphyn.vdl.router.Route#requestDerivation( String, String, String, BookKeeper ) * @see org.griphyn.vdl.router.BookKeeper */ public boolean requestDerivation( String namespace, String name, String version ) { return m_route.requestDerivation( namespace, name, version, m_state ); } /** * Requests for a specific derivation. As a result, a build-style DAG * will be produced and maintained in the book-keeping structure. * * @param fqdn is the fully qualified name of the derivation. * @return true if the request is successful * * @see org.griphyn.common.util.Separator#splitFQDI( String ) * @see org.griphyn.vdl.router.Route#requestDerivation( String, String, String, BookKeeper ) * @see org.griphyn.vdl.router.BookKeeper */ public boolean requestDerivation( String fqdn ) throws IllegalArgumentException { String[] name = Separator.splitFQDI(fqdn); return m_route.requestDerivation( name[0], name[1], name[2], m_state ); } /** * Requests a set of specific derivations. As a result, a build-style * DAG will be produced and maintained in the book-keeping structure. * * @param symbolicList is a collecting of symbolic FQDN derivation triples. * @return true if the request is successful * * @see org.griphyn.common.util.Separator#splitFQDI( String ) * @see org.griphyn.vdl.router.Route#requestDerivation( Collection, BookKeeper ) * @see org.griphyn.vdl.router.BookKeeper */ public boolean requestDerivation( java.util.Collection symbolicList ) throws IllegalArgumentException { return m_route.requestDerivation( symbolicList, m_state ); } /** * Writes the abstract DAX from the accumulated results. * * @param writer the output writer * @param label the label of the dax * * @see org.griphyn.vdl.router.BookKeeper#getDAX */ public void writeDAX( Writer writer, String label ) throws IOException { this.writeDAX( writer, label, null ); } /** * Writes the abstract DAX with a namespace prefix. * * @param writer the output writer * @param label the label of the dax * @param xmlns the xml namespace prefix * * @see org.griphyn.vdl.router.BookKeeper#getDAX */ public void writeDAX(Writer writer, String label, String xmlns) throws IOException { if ( m_state == null || m_state.isEmpty() ) { // whatever we did, there are no results for us m_logger.log( "explain", 0, "WARNING: The requested DAX is empty!\n" ); } else { ADAG dax = m_state.getDAX( label == null ? "test" : label ); m_logger.log( "explain", 2, "DAX has " + dax.getFilenameCount() + " LFNs, " + dax.getJobCount() + " jobs, " + dax.getChildCount() + " deps." ); dax.toXML(writer, "", xmlns); } } /** * Checks if the result is empty or not. * * @return true, if the result is undefined or empty, false otherwise. */ public boolean isEmpty() { return ( m_state == null || m_state.isEmpty() ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/VDLxConvert.java0000644000175000017500000000467511757531137024672 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import java.io.*; import java.util.MissingResourceException; import edu.isi.pegasus.common.util.Currently; import org.griphyn.vdl.classes.*; import org.griphyn.vdl.parser.*; import org.griphyn.vdl.classes.Definitions; import org.griphyn.vdl.util.Logging; import org.xml.sax.InputSource; /** * This class uses the VDLxParser to parse VDL XML * specification and output VDL textual specification. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see org.griphyn.vdl.parser.VDLxParser * @see org.griphyn.vdl.parser.DefinitionHandler */ public class VDLxConvert extends Directive implements DefinitionHandler { private Writer m_writer; /** * Constructor */ public VDLxConvert() throws IOException, MissingResourceException { super(); } /** * Reads VDLx specification and outputs vdlt specification * @param reader the source vdlx reader * @param writer the target vdlt writer */ public void VDLx2VDLt(Reader reader, Writer writer) { m_writer = writer; org.griphyn.vdl.parser.VDLxParser parser = new org.griphyn.vdl.parser.VDLxParser(m_props.getVDLSchemaLocation()); parser.parse( new InputSource(reader), this ); } /** * This method implements the interface defined in DefinitionHandler * to output the vdlt specification. * * @param d is the Definition that is ready to be stored. * @return always true, if the write was successful. */ public boolean store( Definition d ) { boolean result = true; try { if ( m_verbose ) m_logger.log( "directive", 3, d.shortID() ); d.toString( m_writer ); } catch ( IOException ioe ) { m_logger.log( "default", 0, "IO Error: " + ioe.getMessage() ); result = false; } return result; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/directive/Workflow.java0000644000175000017500000002006411757531137024314 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.directive; import java.io.*; import org.griphyn.vdl.directive.WorkflowJob; import org.griphyn.vdl.util.*; import java.sql.SQLException; /** * * A Workflow object defines a context for running a * derivation graph on the Grid as a DAG, and managing its execution. * It serves as a front-end to an associated shell script (by default * located in $PEGASUS_HOME/bin/vds-Workflow-script-runwf) * * The workflow to be executed is designated by its terminal derivation (DV). * * The Workflow instance is returned by the class method Workflow.run(). * * The Workflow class variables contain status cached from period * queries of the workflow database. * * @author Douglas Scheftner * @author Mike Wilde * @author Eric Gilbert * @version $Revision: 50 $ * */ public class Workflow { /* Class Variables */ public static String runwfCmd = "/home/dscheftn/vds/bin/vds-Workflow-script-runwf"; public static String wfstatCmd = "/home/dscheftn/vds/bin/vds-Workflow-script-wfstat"; public static String defaultVOGroup = "quarknet"; public static String logicalFileNameBase = "/export/d1/dscheftn/quarknet_testing/runs"; /** parent directory for run-dir tree ala vds-plan/vds-run. */ public static String defaultBaseDir = "/no/base/dir"; public static String rlsURL = "rls://terminable.uchicago.edu"; public static final int MAXWF = 100000; public static Workflow[] workflows = new Workflow[MAXWF]; public static int nworkflows; public static long millisecsToRefreshStatus = 30000; // 30 secs between auto-refresh of status public static long timeOfLastRefresh = 0; // really want this publically read-only // private static String voGroup; /* Instance Variables */ /* sample data from database: id | basedir | vogroup | workflow | run | creator | ----+-----------------+---------+------------+---------+---------+ 1 | /home/wilde/run | ivdgl1 | test | run0001 | wilde | ctime | state | mtime ------------------------+-------+------------------------ 2005-08-20 13:25:27-05 | -2 | 2005-08-20 13:28:09-05 */ /* Instance variables that mirror the database fields */ public String wfid; public String basedir; public String vogroup; public String wfname; public String run; public String creator; public String ctime; public String state; public String exitstatus; public String mtime; /* Instance variables to track workflow state */ public static final int WFMAXJOBS = 20000; /* FIX: can we avoid this hard limit? */ public WorkflowJob[] jobs; public int njobs; public String tmpdir; public String errorMessage; /* Class Methods */ public static Workflow run ( String namespace, String dvName ) { Process p; int rc; Reader is; StringBuffer sb = new StringBuffer(); char [] b = new char[100000]; int n; Workflow wf = new Workflow(); wf.basedir = defaultBaseDir; wf.vogroup = defaultVOGroup; wf.wfname = dvName; try { System.out.println("Running Process " + namespace + " " + dvName); String[] cmd = { runwfCmd, defaultVOGroup, logicalFileNameBase, defaultBaseDir, rlsURL, namespace, dvName }; p = Runtime.getRuntime().exec( cmd ); InputStream out = p.getInputStream(); InputStreamReader r = new InputStreamReader(out); BufferedReader in = new BufferedReader(r); wf.tmpdir = in.readLine(); System.out.println("output from runwf: tmpdir=" + wf.tmpdir); wf.run = in.readLine(); System.out.println("output from runwf: run=" + wf.run); wf.errorMessage = in.readLine(); System.out.println("output from runwf: errorMessage=" + wf.errorMessage); rc = p.waitFor(); System.out.println("Process returned rc=" + rc); return(wf); } catch (Exception e) { System.out.println("Prepare: Exception: " + e.toString() ); return wf; } } public static boolean refresh() { Process p; int rc; Reader is; StringBuffer sb = new StringBuffer(); char [] b = new char[100000]; int n; /* Run status command to get workflow states */ try { p = Runtime.getRuntime().exec(wfstatCmd); InputStream out = p.getInputStream(); InputStreamReader r = new InputStreamReader(out); BufferedReader in = new BufferedReader(r); String line; nworkflows=0; while ( (line = in.readLine()) != null ) { Workflow w = new Workflow(); String[] t = line.split("\\|"); int nt = t.length; if (nt > 1) w.wfid=t[1]; if (nt > 2) w.basedir = t[2]; if (nt > 3) w.vogroup = t[3]; if (nt > 4) w.wfname = t[4]; if (nt > 5) w.run = t[5]; if (nt > 6) w.creator = t[6]; if (nt > 7) w.ctime = t[7]; if (nt > 8) { switch(Integer.parseInt(t[8],10)) { case -2: w.state = "WFSTATE_PLANNED"; w.exitstatus = ""; break; case -1: w.state = "WFSTATE_RUNNING"; w.exitstatus = ""; break; default: w.state = "WFSTATE_FINISHED"; w.exitstatus = t[8]; break; } } if (nt > 9) w.mtime = t[9]; if( nworkflows < (MAXWF) ) { workflows[nworkflows++] = w; } else { return false; } } rc = p.waitFor(); return true; } catch (Exception e) { System.out.println("WorkflowJob.refresh: Exception: " + e.toString() ); return false; } } /* Instance Methods */ /** * Sets the status fields in a Workflow instance. * @return true if status was successfully obtained, false if not. */ public boolean updateStatus() { boolean rc; long now = System.currentTimeMillis(); if ( now > (timeOfLastRefresh + millisecsToRefreshStatus) ) { rc = Workflow.refresh(); rc = WorkflowJob.refresh(); timeOfLastRefresh = now; } for( int i=0; i Provides the API to the abstract planner for portals, CLI tools, etc.

Package Specification

The classes in this package provide an implementation which bridges between the intricate, and at times difficult to use, abstract planner classes, and code that is using the abstract planner. The latter includes things like portals, de-agonizers and command-line tools.

Related Documentation

For overviews, tutorials, examples, guides, and tool documentation, please see: @since GVDS 1.2.1 pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/0000755000175000017500000000000011757531667021324 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Meta.java0000644000175000017500000001107411757531137023050 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * This class implements nothing for meta data. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public class Meta extends VDL implements Serializable { /** * Stores the content of whatever. */ private String m_content; /** * Default ctor. Calls the parent initialization. */ public Meta() { super(); this.m_content = new String(); } /** * Ctor to initialize the content while constructing the class. * This is a convenience ctor. */ public Meta( String content ) { super(); this.m_content = content; } /** * Appends text to the current internal state. The text may contain * other elements which are not quoted or changed in any way. * * @param content is the new text to append to the current content. * @see #getContent() */ public void addContent( String content ) { this.m_content += content; } /** * Gets the content state of this object. The text may contain * other elements which are not quoted or changed in any way. * * @return The current state of content. The text may be null. * @see #setContent(String) */ public String getContent() { return this.m_content; } /** * Overwrites the internal state with new content. The supplied content * will become effectively the active state of the object. Usually, this * method will be called during SAX assembly of the instance structure. * * @param content is the new state to register. * @see #getContent() **/ public void setContent( String content ) { this.m_content = content; } /** * Converts the active state into something meant for human consumption. * This method overwrites the base class default as it can be more * efficiently implemented. * * @return There is not textual representation of metadata in VDLt. */ public String toString() { return new String(); } /** * Prints the current content onto the stream. This is a no-op, because * there is not textual representation of metadata in VDLt. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @throws IOException if something happens to the stream. */ public void toString( Writer stream ) throws IOException { // do nothing } /** * Dump the state of the current element as XML output. The stream * interface should be able to handle large output efficiently, if you * use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":meta" : "meta"; // just one tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); if ( this.m_content.length() > 0 ) { stream.write( '>' ); stream.write( quote(this.m_content,false) ); stream.write( "' ); } else { stream.write( "/>" ); } if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/List.java0000644000175000017500000003166411757531137023104 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * This class implements the list argument type used for parameters passed * to transformations from derivations. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Value * @see Scalar */ public class List extends Value implements Cloneable, Serializable { /** * A list is just an ordered bunch of {@link Scalar}. */ private ArrayList m_scalarList; /** * Creates and returns a copy of this object. * @return a new instance. */ public Object clone() { List result = new List(); for ( int index=0; index < this.m_scalarList.size(); ++index ) { result.addScalar( (Scalar) this.getScalar(index).clone() ); } return result; } /** * Default ctor. */ public List() { super(); this.m_scalarList = new ArrayList(); } /** * Convenience ctor: Initializes the list, and stores the given * {@link Scalar} as first child into the list. * * @param firstChild is the first element in the list */ public List( Scalar firstChild ) { super(); this.m_scalarList = new ArrayList(); this.m_scalarList.add(firstChild); } /** * Accessor: Obtains the value type of this class. By using the abstract * method in the parent class, List objects can be * distinguished from Scalar objects without using the * instanceof operator. * * @return the fixed value of being a scalar. * @see Value#LIST */ public int getContainerType() { return Value.LIST; } /** * This method determines which container is being used in the abstract * base class in order to kludgy statements when printing debug info. * * @return the symblic identifier for the type of the Value. */ public String getSymbolicType() { // always return new String("List"); } /** * Accessor: Appends as Scalar value to the list. * * @param vScalar is the Scalar to append to the list. * @throws IndexOutOfBoundsException if the value cannot be added. * @see Scalar */ public void addScalar( Scalar vScalar ) throws IndexOutOfBoundsException { this.m_scalarList.add(vScalar); } /** * Accessor: Insert a Scalar at a specific position. * * @param index is the position to insert the item into * @param vScalar is the Scalar to append to the list. * @throws IndexOutOfBoundsException if the value cannot be added. * @see Scalar */ public void addScalar( int index, Scalar vScalar ) throws IndexOutOfBoundsException { this.m_scalarList.add(index, vScalar); } /** * Accessor: constructs the iterator for the List items. * * @return an enumeration to walk the list with. * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateScalar() { return Collections.enumeration(this.m_scalarList); } /** * Determines all LFN instances of a given scalar that match the * specified linkage. This is a higher-level method employing the * given API. * * @param linkage is the linkage to check for, -1 for all filenames. * @return a set of logical filename instances that match the linkage * and were part of the scalar. The result may be an empty set, if no * such result were to be found. * * @see Scalar#getAllLFN( int ) * @see LFN */ public java.util.List getAllLFN( int linkage ) { java.util.List result = new ArrayList(); for ( Iterator i=this.iterateScalar(); i.hasNext(); ) result.addAll( ((Scalar) i.next()).getAllLFN(linkage) ); return result; } /** * Determines all LFN instances of a given scalar that match the * specified linkage. This is a higher-level method employing the * given API. Note that also linkage of NONE will not be found in * wildcard search mode. * * @param linkage is the linkage to check for, -1 for all filenames. * @return a set of all logical filenames that match the linkage and * were part of the scalar. The result may be an empty set, if no such * result were to be found. For a linkage of -1, complete LFNs will be * returned, for any other linkage, just the filename will be * returned. * * @see Scalar#getLFNList( int ) * @see Derivation#getLFNList( int ) * @see LFN */ public java.util.List getLFNList( int linkage ) { java.util.List result = new ArrayList(); for ( Iterator i=this.iterateScalar(); i.hasNext(); ) result.addAll( ((Scalar) i.next()).getLFNList(linkage) ); return result; } /** * Determines if the list contains an LFN of the specified linkage. * The logic uses short-circuit evaluation, thus finding things is * faster than not finding things. Searching a list is a potentially * expensive method. * * @param filename is the name of the LFN * @param linkage is the linkage to check for, -1 for any linkage type. * @return true if the LFN is contained in the scalar, false otherwise. * * @see org.griphyn.vdl.classes.LFN * @see Scalar#containsLFN( String, int ) */ public boolean containsLFN( String filename, int linkage ) { for ( Iterator i=this.iterateScalar(); i.hasNext(); ) if ( ((Scalar) i.next()).containsLFN( filename, linkage ) ) return true; // not found return false; } /** * Accessor: Obtains the value of a specific item in the list. * * @param index is the position of which to obtain the value of. * @return The {@link Scalar} at the specified position. * @throws IndexOutOfBoundsException if the index points to an element * that is beyond the list boundaries. */ public Scalar getScalar(int index) throws IndexOutOfBoundsException { //-- check bound for index if ((index < 0) || (index >= this.m_scalarList.size())) throw new IndexOutOfBoundsException(); return (Scalar) this.m_scalarList.get(index); } /** * Accessor: Gets an array of all Scalars in the list. * * @return an array of Scalars. * @see Scalar * @deprecated Use the new Collection based interfaces */ public Scalar[] getScalar() { int size = this.m_scalarList.size(); Scalar[] mScalar = new Scalar[size]; System.arraycopy( this.m_scalarList.toArray( new Scalar[0] ), 0, mScalar, 0, size ); return mScalar; } /** * Accessor: Obtains the element count of the internal list * * @return number of elements in the internal list */ public int getScalarCount() { return this.m_scalarList.size(); } /** * Accessor: Gets an array of all Scalars in the list. * This list is read-only. * * @return an array of Scalars. * @see Scalar */ public java.util.List getScalarList() { return Collections.unmodifiableList(this.m_scalarList); } /** * Accessor: constructs the iterator for the List items. * * @return an enumeration to walk the list with. */ public Iterator iterateScalar() { return this.m_scalarList.iterator(); } /** * Accessor: constructs the iterator for the List items. * * @return an enumeration to walk the list with. */ public ListIterator listIterateScalar() { return this.m_scalarList.listIterator(); } /** * Accessor: constructs the iterator for the List items. * @param start is the starting position for the sub-iteration. * @return an enumeration to walk the list with. */ public ListIterator listIterateScalar( int start ) { return this.m_scalarList.listIterator(start); } /** * Accessor: Removes all elements in the List. */ public void removeAllScalar() { this.m_scalarList.clear(); } /** * Accessor: Removes a single element from the List. Each * component in this vector with an index greater or equal to the * specified index is shifted downward to have an index one smaller * than the value it had previously. The size of this vector is * decreased by 1. * * @param index is the position at which an element is to be removed. * @return the {@link Scalar} that was removed. * @throws ArrayIndexOutOfBoundsException if the index was invalid. */ public Scalar removeScalar( int index ) { return (Scalar) this.m_scalarList.remove(index); } /** * Accessor: Overwrite an element at a given position. * * @param index is the position to use. It must be within the list. * @param vScalar is the new value to replace the element with. * @throws IndexOutOfBoundsException if the position is outside the list. */ public void setScalar(int index, Scalar vScalar) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_scalarList.size())) { throw new IndexOutOfBoundsException(); } this.m_scalarList.set(index, vScalar); } //-- void setScalar(int, Scalar) /** * Accessor: Replaces all elements with a new list of {@link Scalar}s. * * @param scalarArray is the list to replace the original list with. * @deprecated Use the new Collection based interfaces */ public void setScalar(Scalar[] scalarArray) { //-- copy array this.m_scalarList.clear(); this.m_scalarList.addAll( Arrays.asList(scalarArray) ); } /** * Accessor: Replaces all elements with a new list of {@link Scalar}s. * * @param scalars is the list to replace the original list with. */ public void setScalar(Collection scalars) { this.m_scalarList.clear(); this.m_scalarList.addAll( scalars ); } /** * Dumps the list and all its contents into a string. The list will * be terminated by brackets, elements separated by komma, space. * Elements itself will be dumped by recursive calls to the element * specific method of the same name. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @exception IOException if something fishy happens to the stream. */ public void toString( Writer stream ) throws IOException { stream.write( "[ " ); for ( Iterator i=this.m_scalarList.iterator(); i.hasNext(); ) { ((Scalar) i.next()).toString(stream); if ( i.hasNext() ) stream.write( ", " ); } stream.write( " ]" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":list" : "list"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); stream.write( '>' ); if ( indent != null ) stream.write( newline ); // dump content String newindent = indent==null ? null : indent+" "; for ( Iterator i=this.m_scalarList.iterator(); i.hasNext(); ) { // FIXME: If we cast to Value, we can have lists in lists ((Scalar) i.next()).toXML( stream, newindent, namespace ); } // close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Transformation.java0000644000175000017500000011456511757531137025201 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * Transformation is an implementation of an abstract VDL * Definition. A transformation describes the immutable * template of an input, processing, and output (IPO) application. The * environment is part of the capture. The template can be parametrized * using formal arguments, which are part of the transformation * definition. Think of a transformation as something similar to a C * function definition. Mutable parts are hidden in the arguments, and * instantiated in Derivation.

* * FIXME: The mixing of compounds with simple transformations within * the same class instead of hierarchy needs some serious redesigning. * Unfortunately, this entails a redesign of just about everything... * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Definition * @see Definitions * @see Derivation */ public class Transformation extends Definition // thus implements VDL implements Serializable { /** * Captures the argument name indexed map of formal arguments. Each * element is of type {@link Declare} with an optional default value * of type {@link Value}. * * @see Declare */ private TreeMap m_declareMap; /** * The profile list encapsulates scheduler specific data in a * generic structure. * * @see Profile */ private ArrayList m_profileList; /** * The argument list describes the command line arguments as sum of * substrings. Each element is an {@link Argument}, and can be * optionally addressed by a name. * * @see Argument */ private ArrayList m_argumentList; /** * All arguments are, for ease-of-use, separated by a single * space. If this is not wanted, the user can overwrite the * separator, including setting it to the empty string and null. */ private String m_argumentSeparator = " "; /** * Each compound transformation may declare local variables. */ private TreeMap m_localMap; /** * Each entry in the call list describes an invocation of another * transformation, complete with actual arguments etc. * * @see Call */ private ArrayList m_callList; /** * Type accessor for quick distinction between definitions. * @return the value of {@link Definition#TRANSFORMATION} */ public int getType() { return Definition.TRANSFORMATION; } /** * Default ctor: This ctor will frequently be used by the SAX parser * to create a hollow instance. Note that a transformation can be named. */ public Transformation() { super(); this.m_argumentList = new ArrayList(); this.m_declareMap = new TreeMap(); this.m_profileList = new ArrayList(); this.m_localMap = new TreeMap(); this.m_callList = new ArrayList(); } /** * Minimum c'tor: Any transformation should be named. * * @param name is the name of the transformation in the current namespace * and version context. * @see Definition */ public Transformation( String name ) { super(name); this.m_argumentList = new ArrayList(); this.m_declareMap = new TreeMap(); this.m_profileList = new ArrayList(); this.m_localMap = new TreeMap(); this.m_callList = new ArrayList(); } /** * Standard ctor: This will name a transformation with the complete * tripel necessary to access it correctly (w/o guessing). * * @param name is the name of the transformation in the current namespace * and version context. * @param namespace is the namespace that the transformation resides in. * @param version is a version number. * * @see Definition */ public Transformation( String namespace, String name, String version ) { super(namespace,name,version); this.m_argumentList = new ArrayList(); this.m_declareMap = new TreeMap(); this.m_profileList = new ArrayList(); this.m_localMap = new TreeMap(); this.m_callList = new ArrayList(); } /** * Accessor: Adds an argument to the list of arguments * * @param vArgument is the argument to append to the command line * arguments. * @exception IndexOutOfBounds if the argument does not fit into the list. * @see Argument */ public void addArgument( Argument vArgument ) throws IndexOutOfBoundsException { this.m_argumentList.add(vArgument); } /** * Accessor: Inserts an argument at an arbitrary place into the list. * Each component in this vector with an index greater or equal to the * specified index is shifted upward to have an index one greater than * the value it had previously. * * @param index is the position to insert an argument * @param vArgument is the argument to append to the command line * arguments. * @exception IndexOutOfBounds if the argument does not fit into the list. * @see #getArgument( int ) * @see #setArgument( int, Argument ) * @see Argument */ public void addArgument( int index, Argument vArgument ) throws IndexOutOfBoundsException { this.m_argumentList.add(index, vArgument); } /** * Accessor: Adds an invocation to the list of calls. * * @param vCall is the invocation to append to the list of calls. * @exception IndexOutOfBounds if the argument does not fit into the list. * @see Call */ public void addCall( Call vCall ) throws IndexOutOfBoundsException { this.m_callList.add(vCall); } /** * Accessor: Inserts an invocation at an arbitrary place into the list. * Each component in this vector with an index greater or equal to the * specified index is shifted upward to have an index one greater than * the value it had previously. * * @param index is the position to insert an invocation * @param vCall is the invocation to append to the list of calls. * @exception IndexOutOfBounds if the argument does not fit into the list. * @see #setCall( int, Call ) * @see #getCall( int ) * @see Call */ public void addCall( int index, Call vCall ) throws IndexOutOfBoundsException { this.m_callList.add(index, vCall); } /** * Accessor: Adds a formal argument declaration to the map of declarations. * * @param vDeclare is the formal argument to add to the declarations. * @see #getDeclare( String ) * @see #setDeclare( Declare ) * @see Declare */ public void addDeclare( Declare vDeclare ) { this.m_declareMap.put( vDeclare.getName(), vDeclare ); } /** * Accessor: Adds a temporary variable declaration to the map of local * variables. * * @param vLocal is the local variable declaration with value. * @see #getLocal( String ) * @see #setLocal( Local ) * @see Local */ public void addLocal( Local vLocal ) { this.m_localMap.put( vLocal.getName(), vLocal ); } /** * Accessor: Appends a profile definition to the list of profiles. * * @param vProfile is the profile to append to remembered profiles. * @exception IndexOutOfBounds if the argument does not fit into the list. * @see Profile */ public void addProfile( Profile vProfile ) throws IndexOutOfBoundsException { this.m_profileList.add(vProfile); } /** * Accessor: Inserts a profile definition at an arbitrary position * into the list of profiles. Each component in this vector with an * index greater or equal to the specified index is shifted upward to * have an index one greater than the value it had previously. * * @param index is the position to insert the definitions into. * @param vProfile is the profile to append to remembered profiles. * @exception IndexOutOfBounds if the argument does not fit into the list. * @see #getProfile( int ) * @see #setProfile( int, Profile ) * @see Profile */ public void addProfile( int index, Profile vProfile ) throws IndexOutOfBoundsException { this.m_profileList.add(index, vProfile); } /** * Accessor: Provides an iterator for the Argument list. * * @return the iterator for the Argument list. * @see Argument * @see java.util.Enumeration * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateArgument() { return Collections.enumeration(this.m_argumentList); } /** * Accessor: Provides an iterator for the Call list. * * @return the iterator for the Call list. * @see Call * @see java.util.Enumeration * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateCall() { return Collections.enumeration(this.m_callList); } /** * Accessor: Provides an iterator for the Declare map. * * @return the iterator for the Declare list. * @see Declare * @see java.util.Enumeration * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateDeclare() { return Collections.enumeration(this.m_declareMap.values()); } /** * Accessor: Provides an iterator for the local variables. * * @return the iterator over all locally declared variables. * @see Local * @see java.util.Enumeration * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateLocal() { return Collections.enumeration(this.m_localMap.values()); } /** * Accessor: Provides an iterator for the Profile list. * * @return the iterator for the Profile list. * @see Profile * @see java.util.Enumeration * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateProfile() { return Collections.enumeration(this.m_profileList); } /** * Predicate: Determines, if this is a simple TR (as opposed to compound). * @return true, if this a simple transformation. */ public boolean isSimple() { return this.m_callList.size() == 0; } /** * Accessor: Obtains an Argument at an arbitrary position. * * @param index is the place to look up the element at. * @return the argument at the specified place. * @throws IndexOutOfBoundsException if the referenced position does * not exist. * @see Argument */ public Argument getArgument(int index) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_argumentList.size())) { throw new IndexOutOfBoundsException(); } return (Argument) this.m_argumentList.get(index); } /** * Accessor: Obtains the complete commandline arguments. This array is * a copy to avoid write-through modifications. * * @return an array with all commandline arguments inside. * @see #setArgument( Argument[] ) * @see Argument * @deprecated Use the new Collection based interfaces */ public Argument[] getArgument() { int size = this.m_argumentList.size(); Argument[] mArray = new Argument[size]; System.arraycopy( this.m_argumentList.toArray(new Argument[0]), 0, mArray, 0, size ); return mArray; } /** * Accessor: Obtains the count of items in the argument list. * * @return the number of arguments in the commandline argument list. * @see Argument */ public int getArgumentCount() { return this.m_argumentList.size(); } /** * Accessor: Obtains the complete commandline arguments. The resulting * list is read-only. * * @return an array with all commandline arguments inside. * @see #setArgument( Collection ) * @see Argument */ public java.util.List getArgumentList() { return Collections.unmodifiableList(this.m_argumentList); } /** * Gets the separating string between multiple {@link Argument} * elements. * * @return The current state of the separator. The text may be null. * @see #setArgumentSeparator(String) */ public String getArgumentSeparator() { return this.m_argumentSeparator; } /** * Accessor: Obtains an Call at an arbitrary position. * * @param index is the place to look up the element at. * @return the call at the specified place. * @throws IndexOutOfBoundsException if the referenced position does * not exist. * @see #addCall( int, Call ) * @see #setCall( int, Call ) * @see Call */ public Call getCall(int index) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_callList.size())) { throw new IndexOutOfBoundsException(); } return (Call) this.m_callList.get(index); } //-- Call getCall(int) /** * Accessor: Obtains the invocation list. This * array is a copy of the original to avoid write-through modifications. * * @return an array with all calls inside. * @see Call * @deprecated Use the new Collection based interfaces */ public Call[] getCall() { int size = this.m_callList.size(); Call[] mArray = new Call[size]; System.arraycopy( this.m_callList.toArray( new Call[0] ), 0, mArray, 0, size ); return mArray; } /** * Accessor: Obtains the count of items in the call list. * * @return the number of calls in the call list. * @see Call */ public int getCallCount() { return this.m_callList.size(); } /** * Accessor: Obtains the invocation list. * @return a read-only list with all calls inside. * @see #setCall( Collection ) * @see Call */ public java.util.List getCallList() { return Collections.unmodifiableList( this.m_callList ); } /** * Accessor: Obtains the declaration of a formal argument as referenced * by its variable name. * * @param name is the symbolic index and variable name to obtain the * declaration for. * @return the Declare object referenced by the name. May * return null, if there is no such object. * @see #addDeclare( Declare ) * @see #setDeclare( Declare ) * @see Declare */ public Declare getDeclare(String name) { return (Declare) this.m_declareMap.get(name); } /** * Accessor: Obtain all known formal arguments. Note that the array * will be arbitrarily sorted, depending on Java's hash function. * * @return a list of all formal arguments. * @see Declare * @see #setDeclare( Declare[] ) * @deprecated Use the new Collection based interfaces */ public Declare[] getDeclare() { int size = this.m_declareMap.size(); Declare[] mArray = new Declare[size]; this.m_declareMap.values().toArray(mArray); return mArray; } /** * Accessor: Counts the number of formal arguments known to this * transformation. * * @return the formal argument count */ public int getDeclareCount() { return this.m_declareMap.size(); } /** * Accessor: Obtain all known formal arguments. Note that the list * will be arbitrarily sorted, depending on Java's hash function. * It is also a read-only list to avoid modifications outside the API. * * @return a list of all formal arguments. * @see #setDeclare( Collection ) * @see Declare */ public java.util.List getDeclareList() { return Collections.unmodifiableList( new ArrayList(this.m_declareMap.values()) ); } /** * Accessor: Obtains all known formal arguments. The map is a read-only * map to avoid modifications outside the API. * * @return a map with all formal arguments. * @see #setDeclare( Map ) * @see Declare */ public java.util.Map getDeclareMap() { return Collections.unmodifiableMap( this.m_declareMap ); } /** * Accessor: Obtains the declaration of a temporary variable as * referenced by its name. * * @param name is the variable name to obtain the declaration for. * @return the Local object referenced by the name. May * return null, if there is no such object. * @see #addLocal( Local ) * @see #setLocal( Local ) * @see Local */ public Local getLocal(String name) { return (Local) this.m_localMap.get(name); } /** * Accessor: Obtain all known temporary variables. Note that the array * will be arbitrarily sorted, depending on Java's hash function. * * @return a list of all temporary variables. * @see #setLocal( Local[] ) * @see Local * @deprecated Use the new Collection based interfaces */ public Local[] getLocal() { int size = this.m_localMap.size(); Local[] mArray = new Local[size]; this.m_localMap.values().toArray(mArray); return mArray; } /** * Accessor: Counts the number of temporary variables known to this * transformation. * * @return the temporary variable count */ public int getLocalCount() { return this.m_localMap.size(); } /** * Accessor: Obtains all known temporary variables. Note that the list * will be arbitrarily sorted, depending on Java's hash function. It * is also a read-only list to avoid modifications outside the API. * * @return a list of all formal arguments. * @see #setLocal( Collection ) * @see Local */ public java.util.List getLocalList() { return Collections.unmodifiableList( new ArrayList(this.m_localMap.values()) ); } /** * Accessor: Obtains all known temporary variables. The map is a * read-only map to avoid modifications outside the API. * * @return a map with all formal arguments. * @see #setLocal( Map ) * @see Local */ public java.util.Map getLocalMap() { return Collections.unmodifiableMap( this.m_localMap ); } /** * Accessor: Obtains an Profile at an arbitrary position. * * @param index is the place to look up the element at. * @exception IndexOutOfBoundsException if the referenced position * does not exist. * @see #addProfile( int, Profile ) * @see #setProfile( int, Profile ) * @see Profile */ public Profile getProfile(int index) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_profileList.size())) { throw new IndexOutOfBoundsException(); } return (Profile) this.m_profileList.get(index); } /** * Accessor: Obtain a copy of the list of all Profile * specifications. * * @return a collection containing the scheduler specific environment * options for the job. * @see #setProfile( Profile[] ) * @see Profile * @deprecated Use the new Collection based interfaces */ public Profile[] getProfile() { int size = this.m_profileList.size(); Profile[] mProfile = new Profile[size]; System.arraycopy( this.m_profileList.toArray(new Profile[0]), 0, mProfile, 0, size ); return mProfile; } /** * Accessor: Counts the number of profile specifications known to this job. * * @return the number of profiles * @see Profile */ public int getProfileCount() { return this.m_profileList.size(); } /** * Accessor: Obtain a read-only copy of the list of all * Profile specifications. * * @return a collection containing the scheduler specific environment * options for the job. * @see #setProfile( Collection ) * @see Profile */ public java.util.List getProfileList() { return Collections.unmodifiableList(this.m_profileList); } /** * Accessor: Provides an iterator for the Argument list. * * @return the iterator for the Argument list. * @see Argument */ public Iterator iterateArgument() { return this.m_argumentList.iterator(); } /** * Accessor: Provides an iterator for the Call list. * * @return the iterator for the Call list. * @see Call */ public Iterator iterateCall() { return this.m_callList.iterator(); } /** * Accessor: Provides an iterator for the Declare map. * * @return the iterator for the Declare list. * @see Declare */ public Iterator iterateDeclare() { return this.m_declareMap.values().iterator(); } /** * Accessor: Provides an iterator for local variables. * * @return the iterator over all local variables. * @see Declare */ public Iterator iterateLocal() { return this.m_localMap.values().iterator(); } /** * Accessor: Provides an iterator for the Profile list. * * @return the iterator for the Profile list. * @see Profile */ public Iterator iterateProfile() { return this.m_profileList.iterator(); } /** * Accessor: Provides a list iterator for the Argument list. * * @return the iterator for the Argument list. * @see Argument */ public ListIterator listIterateArgument() { return this.m_argumentList.listIterator(); } /** * Accessor: Provides a list iterator for the Call list. * * @return the iterator for the Call list. * @see Call */ public ListIterator listIterateCall() { return this.m_callList.listIterator(); } /** * Accessor: Provides a list iterator for the Profile list. * * @return the iterator for the Profile list. * @see Profile */ public ListIterator listIterateProfile() { return this.m_profileList.listIterator(); } /** * Accessor: Provides a list iterator for the Argument list. * * @return the iterator for the Argument list. * @param start is the start index * @see Argument */ public ListIterator listIterateArgument( int start ) { return this.m_argumentList.listIterator(start); } /** * Accessor: Provides a list iterator for the Call list. * * @return the iterator for the Call list. * @param start is the start index * @see Call */ public ListIterator listIterateCall( int start ) { return this.m_callList.listIterator(start); } /** * Accessor: Provides a list iterator for the Profile list. * * @return the iterator for the Profile list. * @param start is the start index * @see Profile */ public ListIterator listIterateProfile( int start ) { return this.m_profileList.listIterator(start); } /** * Accessor: Removes all commandline arguments. * @see Argument */ public void removeAllArgument() { this.m_argumentList.clear(); } /** * Accessor: Removes all calls. * @see Call */ public void removeAllCall() { this.m_callList.clear(); } /** * Accessor: Removes all formal arguments. * @see Declare */ public void removeAllDeclare() { this.m_declareMap.clear(); } /** * Accessor: Removes all temporary variables. * @see Local */ public void removeAllLocal() { this.m_localMap.clear(); } /** * Accessor: Removes all profile declarations. * @see Profile */ public void removeAllProfile() { this.m_profileList.clear(); } /** * Accessor: Removes a commandline argument fragment from the * commandline. Each component in this vector with an index greater or * equal to the specified index is shifted downward to have an index * one smaller than the value it had previously. The size of this * vector is decreased by 1. * * @param index is the position to remove the argument fragment from. * @return the removed Argument. * @exception ArrayIndexOutOfBoundsException if the index was invalid. * @see Argument */ public Argument removeArgument(int index) { return (Argument) this.m_argumentList.remove(index); } /** * Accessor: Removes a single call fragment from the list of calls. * Each component in this vector with an index greater or equal to the * specified index is shifted downward to have an index one smaller * than the value it had previously. The size of this vector is * decreased by 1. * * @param index is the position to remove the call fragment from. * @return the removed Call. * @exception ArrayIndexOutOfBoundsException if the index was invalid. * @see Call */ public Call removeCall(int index) { return (Call) this.m_callList.remove(index); } /** * Accessor: Removes a formal argument. * * @param name is the name of the argument to remove * @return the removed formal argument. * @see Declare */ public Declare removeDeclare(String name) { return (Declare) this.m_declareMap.remove(name); } /** * Accessor: Removes a temporary variable. * * @param name is the name of the temporary variable to remove * @return the removed variable. * @see Local */ public Local removeLocal(String name) { return (Local) this.m_localMap.remove(name); } /** * Accessor: Removes a profile. Each component in this vector with an * index greater or equal to the specified index is shifted downward * to have an index one smaller than the value it had previously. The * size of this vector is decreased by 1. * * @param index is the position to remove the profile from. * @return the removed Profile. * @exception ArrayIndexOutOfBoundsException if the index was invalid. * @see Profile */ public Profile removeProfile(int index) { return (Profile) this.m_profileList.remove(index); } /** * Accessor: Overwrites an commandline argument fragment with a new one. * * @param index is the position to overwrite the element at * @param vArgument is the new commandline argument. * @exception IndexOutOfBoundsException if the position does not exist. * @see Argument */ public void setArgument(int index, Argument vArgument) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_argumentList.size())) { throw new IndexOutOfBoundsException(); } this.m_argumentList.set(index, vArgument); } /** * Accessor: Replace the commandline arguments with a new commandline * argument. * * @param argumentArray is the new commandline argument array. * @see Argument * @deprecated Use the new Collection based interfaces */ public void setArgument(Argument[] argumentArray) { this.m_argumentList.clear(); this.m_argumentList.addAll( Arrays.asList(argumentArray) ); } /** * Accessor: Replace the commandline arguments with a new commandline * argument. * * @param arguments is the new commandline argument array. * @see Argument */ public void setArgument(java.util.Collection arguments) { this.m_argumentList.clear(); this.m_argumentList.addAll(arguments); } /** * Overwrites the internal separator string between neighbouring * {@link Argument} elements with new content. * * @param separator is the new string separating neighbouring arguments. * @see #getArgumentSeparator() **/ public void setArgumentSeparator( String separator ) { this.m_argumentSeparator = separator; } /** * Accessor: Overwrites a call with a new one. * * @param index is the position to overwrite the element at * @param vCall is the new call. * @exception IndexOutOfBoundsException if the position does not exist. * @see #addCall( int, Call ) * @see #getCall( int ) * @see Call */ public void setCall(int index, Call vCall) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_callList.size())) { throw new IndexOutOfBoundsException(); } this.m_callList.set(index, vCall); } /** * Accessor: Replace the calls with a new call list. * * @param callArray is the new call array. * @see Call * @deprecated Use the new Collection based interfaces */ public void setCall(Call[] callArray) { this.m_callList.clear(); this.m_callList.addAll( Arrays.asList(callArray) ); } /** * Accessor: Replace the calls with a new call list. * * @param calls is the new call array. * @see Call */ public void setCall( Collection calls ) { this.m_callList.clear(); this.m_callList.addAll(calls); } /** * Accessor: Insert or replace a declaration with a new version. * * @param vDeclare is the declaration to insert or replace. * @see Declare * @see java.util.Hashtable#put( Object, Object ) */ public void setDeclare(Declare vDeclare) { this.m_declareMap.put(vDeclare.getName(),vDeclare); } /** * Accessor: Replace all declarations by a new set of declarations. * This method effectively exchanges all formal arguments of a job. * * @param declareArray is the new set of declarations. * @see Declare * @deprecated Use the new Collection based interfaces */ public void setDeclare(Declare[] declareArray) { //-- copy array this.m_declareMap.clear(); for (int i = 0; i < declareArray.length; i++) { this.m_declareMap.put(declareArray[i].getName(),declareArray[i]); } } /** * Accessor: Replace all declarations by a new set of declarations. * This method effectively exchanges all formal arguments of a job. * * @param declares is the new set of declarations. * @see Declare */ public void setDeclare( Collection declares ) { this.m_declareMap.clear(); for ( Iterator i=declares.iterator(); i.hasNext(); ) { Declare d = (Declare) i.next(); this.m_declareMap.put(d.getName(), d); } } /** * Accessor: Replace all declarations by a new set of declarations. * This method effectively exchanges all formal arguments of a job. * * @param declares is the new set of declarations. * @see Declare */ public void setDeclare( Map declares ) { this.m_declareMap.clear(); this.m_declareMap.putAll(declares); } /** * Accessor: Inserts or replaces a temporary variable with a new * version. * * @param vLocal is the temporary variable to insert or replace. * @see Local * @see java.util.Hashtable#put( Object, Object ) */ public void setLocal(Local vLocal) { this.m_localMap.put(vLocal.getName(),vLocal); } /** * Accessor: Replaces all declarations by a new set of declarations. * This method effectively exchanges all temporary variables. * * @param localArray is the new set of local variable declarations. * @see Local * @deprecated Use the new Collection based interfaces */ public void setLocal(Local[] localArray) { //-- copy array this.m_localMap.clear(); for (int i = 0; i < localArray.length; i++) { this.m_localMap.put(localArray[i].getName(),localArray[i]); } } /** * Accessor: Replaces all declarations by a new set of declarations. * This method effectively exchanges all temporary variables. * * @param locals is the new set of temporary variable declarations. * @see Local */ public void setLocal( Collection locals ) { this.m_localMap.clear(); for ( Iterator i=locals.iterator(); i.hasNext(); ) { Local d = (Local) i.next(); this.m_localMap.put(d.getName(), d); } } /** * Accessor: Replace all declarations by a new set of declarations. * This method effectively exchanges all temporary variables. * * @param locals is the new set of declarations. * @see Local */ public void setLocal( Map locals ) { this.m_localMap.clear(); this.m_localMap.putAll(locals); } /** * Accessor: Overwrites a profile with a new profile * * @param index is the position to overwrite the profile at. * @param vProfile is the new profile to use in overwriting. * @exception IndexOutOfBoundsException if the position does not exist. * @see Profile */ public void setProfile(int index, Profile vProfile) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_profileList.size())) { throw new IndexOutOfBoundsException(); } this.m_profileList.set(index, vProfile); } /** * Accessor: Replace the internal profiles with a new list. * * @param profileArray is the new list of profiles to use for the job. * @see Profile * @deprecated Use the new Collection based interfaces */ public void setProfile( Profile[] profileArray ) { this.m_profileList.clear(); this.m_profileList.addAll( Arrays.asList(profileArray) ); } /** * Accessor: Replace the internal profiles with a new list. * * @param profiles is the new list of profiles to use for the job. * @see Profile */ public void setProfile( java.util.Collection profiles ) { this.m_profileList.clear(); this.m_profileList.addAll( profiles ); } /** * Constructs dynamically a short descriptive, hopefully unique * identifier for this transformation. * * @return a string describing the transformation * @see Object#hashCode() */ public String identify() { // short and long ID are identical for transformations. return super.shortID(); } /** * Dumps the content of the given element into a string. This function * traverses all sibling classes as necessary and converts the * data into textual output. */ public void toString( Writer stream ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); stream.write( "TR " ); stream.write( this.identify() ); stream.write( '(' ); // write formal args if ( this.m_declareMap.size() > 0 ) { stream.write( newline ); for ( Iterator i=this.m_declareMap.values().iterator(); i.hasNext(); ) { stream.write('\t'); ((Declare) i.next()).toString(stream); if ( i.hasNext() ) stream.write("," + newline); } } stream.write( " )" ); stream.write( newline ); stream.write( '{' ); stream.write( newline ); if ( this.isSimple() ) { for ( Iterator i=this.m_argumentList.iterator(); i.hasNext(); ) { stream.write( " " ); ((Argument) i.next()).toString(stream); stream.write( ';' ); stream.write(newline); } } else { if ( this.m_localMap.size() > 0 ) { for ( Iterator i=this.m_localMap.values().iterator(); i.hasNext(); ) { stream.write( " " ); ((Local) i.next()).toString(stream); stream.write( ';' ); stream.write(newline); } stream.write(newline); } for ( Iterator i=this.m_callList.iterator(); i.hasNext(); ) { stream.write( " " ); ((Call) i.next()).toString(stream); stream.write( ';' ); stream.write(newline); } } for ( Iterator i=this.m_profileList.iterator(); i.hasNext(); ) { stream.write( " " ); ((Profile) i.next()).toString(stream); stream.write( ';' ); stream.write(newline); } stream.write( '}' ); stream.write(newline); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":transformation" : "transformation"; if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); super.toXML(stream); // add argument separator, if it is not a single space. // FIXME: This attribute can only appear in simple TR! if ( this.m_argumentSeparator == null || ! this.m_argumentSeparator.equals(" ") ) { stream.write( " argumentSeparator=\""); if ( this.m_argumentSeparator != null ) stream.write( quote(this.m_argumentSeparator,true) ); stream.write('"'); } if ( this.m_declareMap.size() + this.m_argumentList.size() + this.m_callList.size() + this.m_profileList.size() == 0 ) { // empty transformation, no fargs, no body // FIXME: A compound TR must not be empty! stream.write( "/>" ); } else { // done with opening tag stream.write( '>' ); if ( indent != null ) stream.write( newline ); String newindent = indent==null ? null : indent + " "; for ( Iterator i=this.m_declareMap.values().iterator(); i.hasNext(); ) { ((Declare) i.next()).toXML( stream, newindent, namespace ); } if ( this.isSimple() ) { for ( Iterator i=this.m_argumentList.iterator(); i.hasNext(); ) { ((Argument) i.next()).toXML( stream, newindent, namespace ); } } else { for ( Iterator i=this.m_localMap.values().iterator(); i.hasNext(); ) { ((Local) i.next()).toXML( stream, newindent, namespace ); } // a compound TR must have at least one call! for ( Iterator i=this.m_callList.iterator(); i.hasNext(); ) { ((Call) i.next()).toXML( stream, newindent, namespace ); } } for ( Iterator i=this.m_profileList.iterator(); i.hasNext(); ) { ((Profile) i.next()).toXML( stream, newindent, namespace ); } // close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); } if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Executable.java0000644000175000017500000000552311757531137024245 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; /** * This class is a leftover from an earlier version, and now solely * here for the purposes of providing the Condor universe constants. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public class Executable { /** * Condor vanilla universe to run unmodified jobs. */ public static final int CONDOR_VANILLA = 0; /** * Condor standard universe to run condor_compiled jobs. */ public static final int CONDOR_STANDARD = 1; /** * Condor scheduler universe to run on the submit host. */ public static final int CONDOR_SCHEDULER = 2; /** * Condor globus universe to talk to a GRAM system. */ public static final int CONDOR_GLOBUS = 3; /** * Condor PVM universe to do what? */ public static final int CONDOR_PVM = 4; /** * Condor Java universe to do what? */ public static final int CONDOR_JAVA = 5; /** * Condor MPI universe to do what? */ public static final int CONDOR_MPI = 6; /** * Predicate to determine, if an integer is within the valid range for * Condor universes. * * @param x is the integer to test for in-intervall. * @return true, if the integer satisfies {@link Executable#CONDOR_VANILLA} * ≤ x ≤ {@link Executable#CONDOR_MPI}, false otherwise. */ public static boolean isInRange( int x ) { return ((x >= Executable.CONDOR_VANILLA) && (x <= Executable.CONDOR_MPI)); } /** * Converts an integer into the symbolic Condor universe represented by * the constant. * * @param x is the integer with the universe to symbolically convert * @return a string with the symbolic universe name, or null, if the * constant is out of range. */ public static String toString( int x ) { switch ( x ) { case Executable.CONDOR_VANILLA: return "vanilla"; case Executable.CONDOR_STANDARD: return "standard"; case Executable.CONDOR_SCHEDULER: return "scheduler"; case Executable.CONDOR_GLOBUS: return "globus"; case Executable.CONDOR_PVM: return "pvm"; case Executable.CONDOR_JAVA: return "java"; case Executable.CONDOR_MPI: return "mpi"; default: return null; } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Call.java0000644000175000017500000004352211757531137023040 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import edu.isi.pegasus.common.util.Separator; import org.griphyn.vdl.util.SequenceGenerator; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * Call is an implementation of an anonymous * Derivation. A call describes the mutable part * concerning input, processing, and output (IPO). Calls can * only be part of a CompoundTransformation.

* * A call parametrizes the template provided by a * Transformation with actual values. Think of a call * as something akin to a C function call. The call provides the * actual parameter to a job. The immutable parts are hidden in a * Transformation.

* * FIXME: A Call is essentially an anonymous * Derivation that occurs within a * CompoundTransformation. Thus, the first two should * share code. Also, the latter two already share code. Therefore, * the class hierarchies need to be re-designed, and attribute groups * should be out-sourced. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision $ * * @see Definition * @see Definitions * @see Transformation * @see Derivation */ public class Call extends VDL implements HasPass, Serializable { /** * Though most Calls may have a name of their own, * most of the times, though, calls are anonymous. A call * must provide the name of the Transformation that * it calls, though. * * @see Definition * @see Transformation */ private String m_uses; /** * The namespace in which a call resides can differ from the * namespace that the transformation lives in. This argument provides * the namespace of the Transformation to call. * @see Definition * @see Transformation */ private String m_usesspace; /** * Any Transformation may exist in multiple versions. * This argument specifies the minimum permissable version that can * be used. FIXME: versioning is not really supported. */ private String m_maxIncludeVersion; /** * Any Transformation may exist in multiple versions. * This argument specifies the maximum permissable version that can * be used. FIXME: versioning is not really supported. */ private String m_minIncludeVersion; /** * Actual arguments used when calling a {@link Transformation} are * matched up with the formal arguments of the transformation by their * names. */ private TreeMap m_passMap; /** * Since Call is an anonymous Derivation, * we still need some unique identifiers for the call. This is a * sequence generator. */ private static SequenceGenerator s_sequence = new SequenceGenerator(); /** * This is the sequence number assigned by the c'tor to this call. */ private String m_id; /** * ctor. */ public Call() { this.m_id = "anon" + Call.s_sequence.generate(); this.m_passMap = new TreeMap(); } /** * Convenience ctor: Names the used Transformation * * @param uses is the name of the Transformation * @see Transformation */ public Call( String uses ) { this.m_id = "anon" + Call.s_sequence.generate(); this.m_passMap = new TreeMap(); this.m_uses = uses; } /** * Convenience ctor: Supplies the used Transformation * as well as the permissable version range. * * @param uses is the name of the Transformation. * @param min is the minimum inclusive permissable version. * @param max is the maximum inclusive permissable version. * @see Transformation */ public Call( String uses, String min, String max ) { this.m_id = "anon" + Call.s_sequence.generate(); this.m_passMap = new TreeMap(); this.m_uses = uses; this.m_minIncludeVersion = min; this.m_maxIncludeVersion = max; } /** * Accessor: Adds an actual argument to the bag of arguments. * * @param vPass is the new actual argument to add. * @see Pass */ public void addPass( Pass vPass ) { this.m_passMap.put(vPass.getBind(),vPass); } /* * won't work with maps * public void addPass( int index, Pass vPass ) throws IndexOutOfBoundsException { this.m_passList.insertElementAt(vPass, index); } */ /** * Accessor: Provides an iterator for the bag of actual arguments. * @return the iterator for Pass elements. * @see Pass * @see java.util.Enumeration * @deprecated Use the new Collection based interfaces */ public Enumeration enumeratePass() { // return this.m_passMap.elements(); return Collections.enumeration(this.m_passMap.values()); } /** * Determines all LFN instances of a given scalar that match the * specified linkage. This is a higher-level method employing the * given API. Note that also linkage of NONE will not be found in * wildcard search mode. * * @param linkage is the linkage to check for, -1 for any linkage. * @return a set of all logical filenames that match the linkage and * were part of the scalar. The result may be an empty set, if no such * result were to be found. For a linkage of -1, complete LFNs will be * returned, for any other linkage, just the filename will be * returned. * * @see Value#getLFNList( int ) * @see LFN */ public java.util.List getLFNList( int linkage ) { java.util.List result = new ArrayList(); for ( Iterator i = this.iteratePass(); i.hasNext(); ) { Value actual = ((Pass) i.next()).getValue(); result.addAll( actual.getLFNList(linkage) ); } return result; } /** * Determines if the list contains an LFN of the specified linkage. * The logic uses short-circuit evaluation, thus finding things is * faster than not finding things. Searching a list is a potentially * expensive method. * * @param filename is the name of the LFN * @param linkage is the linkage to check for, -1 for any linkage type. * @return true if the LFN is contained in the scalar, false otherwise. * * @see Value#containsLFN( String, int ) * @see LFN */ public boolean containsLFN( String filename, int linkage ) { for ( Iterator i = this.iteratePass(); i.hasNext(); ) { Value actual = ((Pass) i.next()).getValue(); if ( actual.containsLFN( filename, linkage ) ) return true; } return false; } /** * Accessor: Obtains the maximum inclusive version permissable for * binding to a {@link Transformation}. * * @return the maximum inclusive version number. * @see #setMaxIncludeVersion( java.lang.String ) */ public String getMaxIncludeVersion() { return this.m_maxIncludeVersion; } /** * Accessor: Obtains the minimum inclusive version permissable for * binding to a {@link Transformation}. * * @return the minimum inclusive version number. * @see #setMinIncludeVersion( java.lang.String ) */ public String getMinIncludeVersion() { return this.m_minIncludeVersion; } /** * Accessor: Obtains an actual argument identified by the bound variable. * * @param name is the binding name. * @return the bound value to the given name. * @see Pass */ public Pass getPass(String name) { return (Pass) this.m_passMap.get(name); } /** * Accessor: Obtains the bag of actual arguments as array. Note that the * order is arbitrary. * * @return an array containing all bound variables. * @see Pass * @deprecated Use the new Collection based interfaces */ public Pass[] getPass() { int size = this.m_passMap.size(); Pass[] mPass = new Pass[size]; this.m_passMap.values().toArray(mPass); return mPass; } /** * Accessor: Counts the number of actual arguments. * * @return the number of actual arguments in the internal bag. */ public int getPassCount() { return this.m_passMap.size(); } /** * Accessor: Gets an array of all values that constitute the current * content. This list is read-only. * * @return an array with Pass elements. * @see Pass */ public java.util.List getPassList() { return Collections.unmodifiableList( new ArrayList(this.m_passMap.values()) ); } /** * Accessor: Obtains all actual arguments. The map is a read-only * map to avoid modifications outside the API. * * @return a map will all actual arguments. * @see Pass */ public java.util.Map getPassMap() { return Collections.unmodifiableMap( this.m_passMap ); } /** * Accessor: Provides an iterator for the bag of actual arguments. * @return an iterator to walk the Pass list with. * @see Pass */ public Iterator iteratePass() { return this.m_passMap.values().iterator(); } /* NOT APPLICABLE * * Accessor: Provides an iterator for the bag of actual arguments. * @return an iterator to walk the Pass list with. * @see Pass * public ListIterator listIteratePass() { return (new ArrayList( this.m_passMap.values() ).listIterator()); } */ /** * Accessor: Obtains the name of the logical {@link Transformation} * that this call refers to. * * @see #setUses( java.lang.String ) */ public java.lang.String getUses() { return this.m_uses; } /** * Accessor: Obtains the namespace of the logical {@link Transformation} * that this call refers to. * * @see #setUsesspace( java.lang.String ) */ public java.lang.String getUsesspace() { return this.m_usesspace; } /** * Instance method for matching an external version against the inclusive * version range. * @param version is an externally supplied version to be checked, * if it is within the inclusive interval of min and max. * @return true, if the version is in range, false otherwise. * @see Derivation#match( String, String, String ) */ public boolean match( String version ) { return Derivation.match( version, this.m_minIncludeVersion, this.m_maxIncludeVersion ); } /** * Accessor: Removes all actual arguments. Effectively empties the bag. */ public void removeAllPass() { this.m_passMap.clear(); } /** * Accessor: Removes a specific actual argument. * * @param name is the bound variable name of the argument to remove. * @return the object that was removed, or null, if not found. * @see Pass */ public Pass removePass( String name ) { return (Pass) this.m_passMap.remove(name); } /** * Accessor: Sets the maximum inclusive permissable version of * a logical transformation to run with. * * @param miv is the (new) maximum inclusive version. * @see #getMaxIncludeVersion() */ public void setMaxIncludeVersion(String miv ) { this.m_maxIncludeVersion = miv == null ? null : miv.trim(); } /** * Accessor: Sets the minimum inclusive permissable version of * a logical transformation to run with. * * @param miv is the (new) minimum inclusive version. * @see #getMinIncludeVersion() */ public void setMinIncludeVersion(String miv) { this.m_minIncludeVersion = miv == null ? null : miv.trim(); } /** * Accessor: Adds a new or overwrites an existing actual argument. * * @param vPass is a new actual argument with bound name and value. * @see Pass */ public void setPass(Pass vPass) { this.m_passMap.put(vPass.getBind(),vPass); } /** * Accessor: Replaces the bag of actual argument with new arguments. * * @param passArray is the new actual argument list. * @see Pass * @deprecated Use the new Collection based interfaces */ public void setPass(Pass[] passArray) { //-- copy array this.m_passMap.clear(); for (int i = 0; i < passArray.length; i++) { this.m_passMap.put(passArray[i].getBind(),passArray[i]); } } /** * Accessor: Replaces the bag of actual argument with a bag of * new arguments. * * @param passes is the new actual argument collection. * @see Pass */ public void setPass(Collection passes) { this.m_passMap.clear(); for ( Iterator i=passes.iterator(); i.hasNext(); ) { Pass p = (Pass) i.next(); this.m_passMap.put(p.getBind(),p); } } /** * Accessor: Replaces the bag of actual argument with a map of * new arguments. * * @param passes is the new actual argument map. * @see Pass */ public void setPass( Map passes ) { this.m_passMap.clear(); this.m_passMap.putAll(passes); } /** * Accessor: Sets a new name for a logical Transformation * to call. * * @param uses is the new name of the Transformation to use. * @see #getUses() * @see Transformation */ public void setUses(String uses) { this.m_uses = uses; } /** * Accessor: Sets a new namespace identifier for a logical * Transformation to call. * * @param usesspace is the new namespace of the * Transformation. * @see #getUsesspace() * @see Transformation */ public void setUsesspace(String usesspace) { this.m_usesspace = usesspace; } /** * Generates a pseudo id for this Call. * FIXME: With the advent of a database, we'll need to fix this to * something like the primary key. */ public String shortID() { return this.m_id; } /** * Since calls are anonymous derivations, this function can only * construct the mapped transformation * * @return a string describing the call */ public String identify() { StringBuffer result = new StringBuffer(); result.append( this.m_id ); result.append( "->" ); // and now for the called part result.append( Separator.combine( this.m_usesspace, this.m_uses, getMinIncludeVersion(), getMaxIncludeVersion() ) ); // result return result.toString(); } /** * Dumps the content of the given element into a string. This function * traverses all sibling classes as necessary and converts the * data into textual output. Note that order of the actual arguments * is not preserved. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @exception IOException if something fishy happens to the stream. */ public void toString( Writer stream ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String me = this.identify(); stream.write( "call " ); stream.write( me.substring( me.indexOf("->")+2 ) ); stream.write( '(' ); if ( this.m_passMap.size() > 0 ) { stream.write( newline ); for ( Iterator i=this.m_passMap.values().iterator(); i.hasNext(); ) { stream.write( '\t' ); ((Pass) i.next()).toString(stream); if ( i.hasNext() ) stream.write( "," + newline ); } } stream.write( " )" ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @see org.griphyn.vdl.Chimera#writeAttribute( Writer, String, String ) * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":call" : "call"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " usesspace=\"", this.m_usesspace ); writeAttribute( stream, " uses=\"", this.m_uses ); writeAttribute( stream, " minIncludeVersion=\"", this.m_minIncludeVersion ); writeAttribute( stream, " maxIncludeVersion=\"", this.m_maxIncludeVersion ); if ( this.m_passMap.size() == 0 ) { // empty argument list stream.write( "/>" ); } else { stream.write( '>' ); if ( indent != null ) stream.write( newline ); // dump content String newindent = indent==null ? null : indent+" "; for ( Iterator i=this.m_passMap.values().iterator(); i.hasNext(); ) { ((Pass) i.next()).toXML( stream, newindent, namespace ); } if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); } if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/LFN.java0000644000175000017500000007404711757531137022612 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.Writer; import java.io.IOException; import java.io.Serializable; /** * This class captures the logical filename and its linkage. Also, * some static methods allow to use the linkage constants outside * the class. * * LFN extends the Leaf class by adding * a filename and linkage type. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2003 $ * * @see Leaf * @see Text * @see Use * * @see Value * @see Scalar * @see List */ public class LFN extends Leaf implements Cloneable, Serializable { /** * Linkage type: no linkage, usually used for constants etc. * It can also be used to indicate that the linkage is unknown. * The NONE linkage does not participate in DAG construction. */ public static final int NONE = 0; /** * Linkage type: input file. */ public static final int INPUT = 1; /** * Linkage type: output file. */ public static final int OUTPUT = 2; /** * Linkage type: file is used as input and output. Please note that * this linkage does not allow for DAG linking. */ public static final int INOUT = 3; /** * The filename is the logical name of the file. With the help of * the replica location service (RLS), the physical filename is * determined by the concrete planner. */ private String m_filename; /** * The linkage type of the logical file aids the linkage process. */ private int m_link = LFN.NONE; /** * Predicate to determine, if an integer is within the valid range for * linkage types. * * @param x is the integer to test for in-intervall. * @return true, if the integer satisfies {@link LFN#NONE} * ≤ x ≤ {@link LFN#INOUT}, false otherwise. */ public static boolean isInRange( int x ) { return ((x >= LFN.NONE) && (x <= LFN.INOUT)); } /** * Converts an integer into the symbolic linkage type represented by * the constant. * * @param x is the integer with the linkage type to symbolically convert * @return a string with the symbolic linkage name, or null, if the * constant is out of range. */ public static String toString( int x ) { switch ( x ) { case LFN.NONE: return "none"; case LFN.INPUT: return "input"; case LFN.OUTPUT: return "output"; case LFN.INOUT: return "inout"; default: return null; } } /** * Marks a filename for registration in a replica catalog. If marked * with true, the replica registration will not take place. This is * useful for transient or non-important results. Regular, tracked * files are marked with false. * * @see #m_dontTransfer * @see #m_temporary */ private boolean m_dontRegister = false; /** * Transfer mode: The transfer of the file to the result collector * is mandatory. Failure to transfer the file will make the workflow * fail. */ public static final int XFER_MANDATORY = 0; // false /** * Transfer mode: The transfer of the file to the result collector * is optional. Failure to transfer the file will not abort * the workflow. */ public static final int XFER_OPTIONAL = 1; /** * Transfer mode: The file will not be transferred to the result * collector. */ public static final int XFER_NOT = 2; // true /** * Predicate to determine, if an integer is within the valid range for * transfer modes. * * @param x is the integer to test for in-intervall. * @return true, if the integer satisfies {@link LFN#XFER_MANDATORY} * ≤ x ≤ {@link LFN#XFER_NOT}, false otherwise. */ public static boolean transferInRange( int x ) { return ((x >= LFN.XFER_MANDATORY) && (x <= LFN.XFER_NOT)); } /** * Converts an integer into the symbolic transfer mode represented by * the constant. * * @param x is the integer with the linkage type to symbolically convert * @return a string with the symbolic linkage name, or null, if the * constant is out of range. */ public static String transferString( int x ) { switch ( x ) { case LFN.XFER_MANDATORY: return "true"; case LFN.XFER_OPTIONAL: return "optional"; case LFN.XFER_NOT: return "false"; default: return null; } } /** * Type of File: Denotes a data file. They are generally looked up in a replica * catalog. */ public static final int TYPE_DATA = 0; /** * Type of File: Denotes an executable file. They are generally looked up in a * transformation catalog. */ public static final int TYPE_EXECUTABLE = 1; /** * Type of File: Denotes a pattern. They are generally looked up in a * pattern catalog. */ public static final int TYPE_PATTERN = 2; /** * Predicate to determine, if an integer is within the valid range for * type * * @param x is the integer to test for in-intervall. * @return true, if the integer satisfies {@link LFN#TYPE_DATA} * ≤ x ≤ {@link LFN#TYPE_PATTERN}, false otherwise. */ public static boolean typeInRange( int x ) { return ((x >= LFN.TYPE_DATA) && (x <= LFN.TYPE_PATTERN)); } /** * Converts an integer into the symbolic type mode represented by * the constant. * * @param x is the integer with the linkage type to symbolically convert * @return a string with the symbolic linkage name, or null, if the * constant is out of range. */ public static String typeString( int x ) { switch ( x ) { case TYPE_DATA: return "data"; case TYPE_EXECUTABLE: return "executable"; case TYPE_PATTERN: return "pattern"; default: return null; } } /** * Converts a String into the corresponding integer value. * * @param x is the String to symbolically convert * @return an integer with the value or -1 if not valid. */ public static int typeInt( String x ) { int result = -1; if( x == null ){ return result; } if( x.equalsIgnoreCase( "data" ) ){ result = TYPE_DATA; } else if( x.equalsIgnoreCase( "executable" ) ){ result = TYPE_EXECUTABLE; } else if( x.equalsIgnoreCase( "pattern" ) ){ result = TYPE_PATTERN; } return result; } /** * Marks a filename for transfer to the result collector. If marked * with true, the file is usually a temporary file, and will not be * transferred to the output collector. Inter-pool transfers may still * happen in multi-pool mode. Regular, tracked files are marked with * false. Optional transfers have a special mark. * * @see #m_dontRegister * @see #m_temporary */ private int m_dontTransfer = XFER_MANDATORY; /** * If a filename is marked transient, the higher level planners might * have some notion where to place it, or how to name it. Lower level * planners are not necessarily required to follow this hint. * * @see #m_dontRegister * @see #m_dontTransfer */ private String m_temporary = null; /** * If a filename is marked as optional, it's non-existence must not * stop a workflow. Regular files, however, are not optional. */ private boolean m_optional = false; /** * The type of the filename, whether it refers to a data, pattern or executable. */ private int m_type = TYPE_DATA; /** * Creates and returns a copy of this object. * @return a new instance. */ public Object clone() { return new LFN( this.m_filename, this.m_link, this.m_temporary, this.m_dontRegister, this.m_dontTransfer, this.m_optional ); } /** * ctor. */ public LFN() { super(); } /** * Default ctor: create an instance with a logical filename. The linkage * defaults to {@link LFN#NONE}. * * @param filename is the logical filename to store. */ public LFN( String filename ) { super(); this.m_filename = filename; this.m_dontRegister = false; this.m_dontTransfer = XFER_MANDATORY; } /** * ctor: create a file with a name and linkage. * * @param filename is the logical filename to store. * @param link is the linkage of the file to remember. * @throws IllegalArgumentException if the linkage does not match the * legal range. */ public LFN( String filename, int link ) throws IllegalArgumentException { super(); this.m_filename = filename; this.m_dontRegister = false; this.m_dontTransfer = XFER_MANDATORY; if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException(); } /** * ctor: create a possibly transient file with a name, linkage and hint. * * @param filename is the logical filename to store. * @param link is the linkage of the file to remember. * @param hint is an expression for a temporary filename choice. * If it is not null, the files will neither be marked for registration * nor for transfer to the output collector. * @throws IllegalArgumentException if the linkage does not match the * legal range. */ public LFN( String filename, int link, String hint ) throws IllegalArgumentException { super(); this.m_filename = filename; if ( (this.m_temporary = hint) == null ) { this.m_dontRegister = false; this.m_dontTransfer = XFER_MANDATORY; } else { this.m_dontRegister = true; this.m_dontTransfer = XFER_NOT; } if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException(); } /** * ctor: Creates a filename given almost all specs. This is a backward * compatible constructor, as it lacks access to the optional transfer * attribute. * * @param filename is the logical filename to store. * @param link is the linkage of the file to remember. * @param hint is an expression for a temporary filename choice. * @param dontRegister whether to to register with a replica catalog. * @param dontTransfer whether to transfer the file to the collector. * @throws IllegalArgumentException if the linkage does not match the * legal range, or the transfer mode does not match its legal range. * @since 1.21 * * @deprecated */ public LFN( String filename, int link, String hint, boolean dontRegister, int dontTransfer ) throws IllegalArgumentException { super(); this.m_filename = filename; this.m_temporary = hint; this.m_dontRegister = dontRegister; if ( LFN.transferInRange(dontTransfer) ) this.m_dontTransfer = dontTransfer; else throw new IllegalArgumentException("Illegal transfer mode"); if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException("Illegal linkage type"); } /** * ctor: Creates a filename given almost all specs. This is a backward * compatible constructor, as it lacks access to the optional transfer * attribute. * * @param filename is the logical filename to store. * @param link is the linkage of the file to remember. * @param hint is an expression for a temporary filename choice. * @param dontRegister whether to to register with a replica catalog. * @param dontTransfer whether to transfer the file to the collector. * @param optional whether the file is optional or required. * @throws IllegalArgumentException if the linkage does not match the * legal range, or the transfer mode does not match its legal range. * @since 1.23 * * @deprecated */ public LFN( String filename, int link, String hint, boolean dontRegister, int dontTransfer, boolean optional ) throws IllegalArgumentException { super(); this.m_filename = filename; this.m_temporary = hint; this.m_dontRegister = dontRegister; this.m_optional = optional; if ( LFN.transferInRange(dontTransfer) ) this.m_dontTransfer = dontTransfer; else throw new IllegalArgumentException("Illegal transfer mode"); if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException("Illegal linkage type"); } //The new constructors that need to be added later, after the deprecation //ends for the above constructors. Karan Oct 24, 2007 /** * ctor: Creates a filename given almost all specs. This is a backward * compatible constructor, as it lacks access to the optional transfer * attribute. * * @param filename is the logical filename to store. * @param link is the linkage of the file to remember. * @param hint is an expression for a temporary filename choice. * @param register whether to to register with a replica catalog. * @param transfer whether to transfer the file to the collector. * @throws IllegalArgumentException if the linkage does not match the * legal range, or the transfer mode does not match its legal range. */ // public LFN( String filename, int link, String hint, // boolean register, int transfer ) // throws IllegalArgumentException // { // super(); // this.m_filename = filename; // this.m_temporary = hint; // this.m_dontRegister = !register; // if ( LFN.transferInRange( transfer ) ) // this.m_dontTransfer = transfer; // else // throw new IllegalArgumentException("Illegal transfer mode"); // // if ( LFN.isInRange(link) ) // this.m_link = link; // else // throw new IllegalArgumentException("Illegal linkage type"); // } /** * ctor: Creates a filename given almost all specs. This is a backward * compatible constructor, as it lacks access to the optional transfer * attribute. * * @param filename is the logical filename to store. * @param link is the linkage of the file to remember. * @param hint is an expression for a temporary filename choice. * @param register whether to to register with a replica catalog. * @param transfer whether to transfer the file to the collector. * @param optional whether the file is optional or required. * @throws IllegalArgumentException if the linkage does not match the * legal range, or the transfer mode does not match its legal range. */ // public LFN( String filename, int link, String hint, // boolean register, int transfer, boolean optional ) // throws IllegalArgumentException // { // super(); // this.m_filename = filename; // this.m_temporary = hint; // this.m_dontRegister = !register; // this.m_optional = optional; // if ( LFN.transferInRange( transfer ) ) // this.m_dontTransfer = transfer; // else // throw new IllegalArgumentException("Illegal transfer mode"); // if ( LFN.isInRange(link) ) // this.m_link = link; // else // throw new IllegalArgumentException("Illegal linkage type"); // } /** * ctor: Creates a filename given almost all specs. This is a backward * compatible constructor, as it lacks access to the optional transfer * attribute. * * @param filename is the logical filename to store. * @param link is the linkage of the file to remember. * @param hint is an expression for a temporary filename choice. * @param dontRegister whether to to register with a replica catalog. * @param dontTransfer whether to transfer the file to the collector. * @param optional whether the file is optional or required. * @param type whether the file is data|executable|pattern * * @throws IllegalArgumentException if the linkage does not match the * legal range, or the transfer mode does not match its legal range. * @since 1.23 */ public LFN( String filename, int link, String hint, boolean dontRegister, int dontTransfer, boolean optional, int type ) throws IllegalArgumentException { this( filename, link, hint, dontRegister, dontTransfer, optional ); if ( LFN.typeInRange( type ) ) this.m_type = type; else throw new IllegalArgumentException("Illegal File type"); } // /** // * @deprecated Use the finer control of {@link #getDontRegister} // * and {@link #getDontTransfer}. // * // * @return true, if the current filename instance points to // * a transient (dontRegister, dontTransfer) file. False for all other // * cases. // */ // public boolean getIsTransient() // { // return ( this.m_dontRegister && this.m_dontTransfer ); // } /** * Accessor: Obtains the linkage type from the object. * * @return the linkage type of the current object. Note that * LFN objects default to no linkage. * @see #setLink(int) */ public int getLink() { return this.m_link; } /** * Accessor: Obtains the logical filename of the object. * * @return the logical filename. * @see #setFilename( java.lang.String ) */ public String getFilename() { return this.m_filename; } /** * Accessor: Obtains the predicate on registring with a replica * catalog. * * @return true if the file will be registered with a replica catalog. * * @see #setRegister( boolean ) * * @since 2.1 */ public boolean getRegister() { return !this.m_dontRegister; } /** * Accessor: Returns the predicate on the type of the LFN * * @return the type of LFN * * * @see #setType( int ) * * @since 2.1 */ public int getType( ){ return this.m_type; } /** * Accessor: Obtains the predicate on registring with a replica * catalog. * * @return false if the file will be registered with a replica catalog. * @see #setRegister( boolean ) * @see #getRegister() * @deprecated * @since 1.21 */ public boolean getDontRegister() { return this.m_dontRegister; } /** * Accessor: Obtains the transfering mode. * * @return true if the file will be tranferred to an output collector. * * @see #setTransfer( int ) * * @since 2.1 */ public int getTransfer() { return this.m_dontTransfer; } /** * Accessor: Obtains the transfering mode. * * @return false if the file will be tranferred to an output collector. * * @deprecated * * @see #getTransfer() * @see #setTransfer( int ) * * @since 1.21 */ public int getDontTransfer() { return this.m_dontTransfer; } /** * Acessor: Obtains the optionality of the file. * * @return false, if the file is required, or true, if it is optional. * @see #setOptional( boolean ) * @since 1.23 */ public boolean getOptional() { return this.m_optional; } /** * Accessor: Obtains the file name suggestion for a transient file. * If a filename is marked transient, the higher level planners might * have some notion where to place it, or how to name it. Lower level * planners are not necessarily required to follow this hint. * * @return the transient name suggestion of the file. The current * settings will always be returned, regardless of the transiency * state of the file. * @see #setTemporary(String) */ public String getTemporary() { return this.m_temporary; } // /** // * @deprecated Use the finer control of {@link #setDontRegister} and // * {@link #setDontTranfer} for transiency control. // * // * @param transient is the transience state of this filename instance. // * dontRegister and dontTransfer will both be set to the value of // * transient. // * // * @see #getIsTransient() // */ // public void setIsTransient( boolean isTransient ) // { this.m_dontRegister = this.m_dontTransfer = isTransient; } /** * Accessor: Sets the linkage type. * * @param link is the new linkage type to use. Please note that it * must match the range of legal values. * @throws IllegalArgumentException if the range is beyong legal values. * @see #getLink() */ public void setLink( int link ) throws IllegalArgumentException { if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException(); } /** * Accessor: Sets the filename * * @param fn is the new logical filename. * @see #getFilename() */ public void setFilename( String fn ) { this.m_filename = fn; } /** * Accessor: Sets the predicate on registring with a replica catalog. * * @param register is true, if the file should be registered with a * replica catalog. * * * @see #getRegister( ) * * @since 2.1 */ public void setRegister( boolean register ) { this.m_dontRegister = !register; } /** * Accessor: Sets the predicate on the type of the LFN * * @param type the type of LFN * * * @see #getType( ) * * @since 2.1 */ public void setType( int type ){ if ( typeInRange( type ) ) { this.m_type = type; } else{ throw new IllegalArgumentException( "Invalid LFN type " + type ); } } /** * Accessor: Sets the predicate on registring with a replica catalog. * * @param dontRegister is false, if the file should be registered with a * replica catalog. * @see #getDontRegister() * @since 1.21 * @deprecated * * @see #setRegister( boolean ) */ public void setDontRegister( boolean dontRegister ) { this.m_dontRegister = dontRegister; } /** * Accessor: Sets the transfer mode. * * @param transfer the transfer flag * * @exception IllegalArgumentException if the transfer mode is outside * its legal range. * @see #getTransfer( ) * @see LFN#XFER_MANDATORY * @see LFN#XFER_OPTIONAL * @see LFN#XFER_NOT * * @since 2.1 */ public void setTransfer( int transfer ) throws IllegalArgumentException { if ( LFN.transferInRange( transfer ) ) this.m_dontTransfer = transfer; else throw new IllegalArgumentException(); } /** * Accessor: Sets the transfer mode. * * @param dontTransfer is false, if the file should be transferred to * the output collector. * @exception IllegalArgumentException if the transfer mode is outside * its legal range. * * @deprecated * * @see #getDontTransfer( ) * @see LFN#XFER_MANDATORY * @see LFN#XFER_OPTIONAL * @see LFN#XFER_NOT * @since 1.21 */ public void setDontTransfer( int dontTransfer ) throws IllegalArgumentException { if ( LFN.transferInRange(dontTransfer) ) this.m_dontTransfer = dontTransfer; else throw new IllegalArgumentException(); } /** * Acessor: Sets the optionality of the file. * * @param optional false, if the file is required, or true, if it is * optional. * @see #getOptional() * @since 1.23 */ public void setOptional( boolean optional ) { this.m_optional = optional; } /** * Accessor: Sets a file name suggestion for a transient file. If a * filename is marked transient, the higher level planners might have * some notion where to place it, or how to name it. Lower level * planners are not necessarily required to follow this hint. * * @param name is a transient name suggestion for this filename instance. * No automatic marking of transiency will be done! * @see #getTemporary() */ public void setTemporary( String name ) { this.m_temporary = name; } /** * Predicate to determine, if the output can be abbreviated. Filenames * can be abbreviated, if one of these two conditions are met: The * hint is null and dontRegister is false and * dontTransfer is mandatory, or the hint exists, and dontRegister * is true and dontTransfer is no transfer. * * @param temp is the temporary hint * @param dr is the value of dontRegister * @param dt is the value of dontTransfer * @param opt is whether a given file is optional or not * @return true, if the filename can use abbreviated mode * */ public static boolean abbreviatable( String temp, boolean dr, int dt, boolean opt ) { if ( opt ) return false; else return ( ( temp == null && ! dr && dt == LFN.XFER_MANDATORY ) || ( temp != null && dr && dt == LFN.XFER_NOT ) ); } /** * Convenience function to call the static test, if a filename can * use the abbreviated notation. * * @return true, if abbreviatable notation is possible. * @see #abbreviatable( String, boolean, int, boolean ) */ private boolean abbreviatable() { return LFN.abbreviatable( this.m_temporary, this.m_dontRegister, this.m_dontTransfer, this.m_optional ); } /** * Convert the logical filename and linkage into something human readable. * The output is also slightly nudged towards machine parsability. * * @return a textual description of the element and its attributes. */ public String toString() { // slight over-allocation is without harm StringBuffer result = new StringBuffer( this.m_filename.length() + 32 ); result.append( "@{" ); result.append( LFN.toString(this.m_link) ); result.append( ":\"" ); result.append( escape(this.m_filename) ); if ( this.m_temporary != null ) { result.append( "\":\"" ); result.append( escape(this.m_temporary) ); } result.append('"'); if ( ! abbreviatable() ) { // new mode, generate appendices result.append( '|' ); if ( this.m_optional ) result.append('o'); if ( ! this.m_dontRegister ) result.append('r'); if ( this.m_dontTransfer != LFN.XFER_NOT ) result.append( this.m_dontTransfer == LFN.XFER_OPTIONAL ? 'T' : 't'); } result.append( '}' ); return result.toString(); } /** * Prints the current content onto the stream. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @throws IOException if something happens to the stream. */ public void toString( Writer stream ) throws IOException { stream.write( "@{" ); stream.write( LFN.toString(this.m_link) ); stream.write( ":\"" ); stream.write( escape(this.m_filename) ); // risk NullPointerException if ( this.m_temporary != null ) { stream.write( "\":\"" ); stream.write( escape(this.m_temporary) ); } stream.write( '"' ); if ( ! abbreviatable() ) { // new mode, generate appendices stream.write( '|' ); if ( this.m_optional ) stream.write('o'); if ( ! this.m_dontRegister ) stream.write('r'); if ( this.m_dontTransfer != LFN.XFER_NOT ) stream.write( this.m_dontTransfer == LFN.XFER_OPTIONAL ? 'T' : 't'); } stream.write( "}" ); } /** * Dumps the state of the current element as XML output. This method * converts the data into pretty-printed XML output meant for machine * consumption. * * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * * @return a String which contains the state of the current class * and its siblings using XML. Note that these strings might become large. */ public String toXML( String indent ) { // slight over-allocation is without harm StringBuffer result = new StringBuffer(128); if ( indent != null ) result.append(indent); result.append(""); if ( indent != null ) result.append( System.getProperty( "line.separator", "\r\n" ) ); return result.toString(); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); if ( namespace != null && namespace.length() > 0 ) { stream.write( namespace ); stream.write( ':' ); } stream.write( "lfn" ); writeAttribute( stream, " file=\"", this.m_filename ); writeAttribute( stream, " link=\"", LFN.toString(this.m_link) ); writeAttribute( stream, " register=\"", Boolean.toString(!this.m_dontRegister) ); writeAttribute( stream, " transfer=\"", LFN.transferString(this.m_dontTransfer) ); writeAttribute( stream, " optional=\"", Boolean.toString(this.m_optional) ); writeAttribute( stream, " type=\"", LFN.typeString( this.m_type ) ); // null-safe writeAttribute( stream, " temporaryHint=\"", this.m_temporary ); stream.write( "/>" ); if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/VDL.java0000644000175000017500000000216511757531137022610 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.Chimera; import java.io.Serializable; /** * This abstract class defines a common base for all VDLx JAPI objects. * Since all necessary functionality is described in {@link Chimera}, * this class is empty. It exists for grouping purposes. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public abstract class VDL extends Chimera implements Serializable { // empty class, existence just for grouping purposes } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Text.java0000644000175000017500000001204711757531137023107 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * This class extends the base class Leaf by adding * an attribute to store the content of a pure textual element. The * Text element is designed to be a leaf class in the * instance tree. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Leaf * @see Text * @see LFN */ public class Text extends Leaf implements Cloneable, Serializable { /** * Stores the content of the textual element. */ private String m_content; /** * Creates and returns a copy of this object. * @return a new instance. */ public Object clone() { // java.lang.String implements inherently copy-on-write. return new Text(this.m_content); } /** * Default ctor. Calls the parent initialization. */ public Text() { super(); } /** * Ctor to initialize the content while constructing the class. * This is a convenience ctor. */ public Text( String content ) { super(); this.m_content = content; } /** * Gets the content state of this object. The text may contain * other elements which are not quoted or changed in any way, * because the text element is designed to be a leaf node. * * @return The current state of content. The text may be null. * @see #setContent(String) */ public String getContent() { return this.m_content; } /** * Overwrites the internal state with new content. The supplied content * will become effectively the active state of the object. Usually, this * method will be called during SAX assembly of the instance structure. * * @param content is the new state to register. * @see #getContent() **/ public void setContent( String content ) { this.m_content = content; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. This method overwrites the base class default as it can be * more efficiently implemented. * * @return The current content enclosed in quotes. */ public String toString() { return ( this.m_content == null ? "\"\"" : "\"" + this.m_content + "\"" ); } /** * Prints the current content onto the stream. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @throws IOException if something happens to the stream. */ public void toString( Writer stream ) throws IOException { stream.write( '"' ); if ( this.m_content != null ) stream.write( escape(this.m_content) ); stream.write( '"' ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":text" : "text"; if ( indent != null && indent.length() > 0 ) stream.write( indent ); if ( this.m_content != null && this.m_content.length() > 0 ) { stream.write( '<' ); stream.write( tag ); stream.write( '>' ); stream.write( quote(this.m_content,false) ); stream.write( "' ); } else { stream.write( '<' ); stream.write( tag ); stream.write( "/>" ); } if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/IllegalTransformationException.java0000644000175000017500000000313311757531137030336 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; /** * This exception is thrown if a Transformation * simultaneously displays child elements for a simple transformation * and child elements for a compound transformation. A method is not * required to declare in its throws clause any subclasses of * RuntimeException that might be thrown during the execution of the * method but not caught. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Transformation */ public class IllegalTransformationException extends java.lang.RuntimeException { /** * Constructs a IllegalTransformationException with no * detail message. */ public IllegalTransformationException() { super(); } /** * Constructs a IllegalTransformationException with the * specified detailed message. * * @param s is the detailled message. */ public IllegalTransformationException( String s ) { super(s); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Local.java0000644000175000017500000002731111757531137023215 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * This class defines the temporary variables in compound * Transformation. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Transformation */ public class Local extends VDL implements Serializable { /** * Each temporary variable has a name to which it is bound. */ private String m_name; /** * Each temporary variable has a specific type. There are currently * only the type of {@link Scalar} and {@link List}. */ private int m_containerType = Value.SCALAR; /** * For linking the DAG we need to know if the argument is passed into * the transformation, produced by the transformation, or has some * other behavior. However, temporary variables are usually transient, * and thus either of type none or inout. */ private int m_link = LFN.NONE; /** * The initialization value of a temporary variable is mandatory. */ private Value m_value; /** * Default ctor: needed for JDO */ public Local() { super(); } /** * ctor: Construct a temporary variable with a binding and default * container type. * * @param name is the binding. * @param ct is the container type, the type of the argument. * @throws IllegalArgumentException if the container type is outside * the legal range [{@link Value#SCALAR}, {@link Value#LIST}]. */ public Local( String name, int ct ) throws IllegalArgumentException { super(); this.m_name = name; if ( Value.isInRange(ct) ) this.m_containerType = ct; else throw new IllegalArgumentException( "container type outside legal range" ); } /** * ctor: Construct a new temporary variable with a binding and default * container type, as well as a linkage for the variable. * * @param name is the binding. * @param ct is the container type, the type of the variable. * @param link is the linkage type for the variable. * @throws IllegalArgumentException if the container type is outside * the legal range [{@link Value#SCALAR}, {@link Value#LIST}], or * the linkage is outside [{@link LFN#NONE}, {@link LFN#INOUT}]. */ public Local( String name, int ct, int link ) throws IllegalArgumentException { super(); this.m_name = name; if ( Value.isInRange(ct) ) this.m_containerType = ct; else throw new IllegalArgumentException( "container type outside legal range" ); if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException( "linkage type outside legal range" ); } /** * ctor: Construct a new temporary variable with a binding and default * value. The container type will be determined from the default value. * * @param name is the binding. * @param value is either a {@link Scalar} or {@link List} value. */ public Local( String name, Value value ) { super(); this.m_name = name; this.m_value = value; this.m_containerType = value.getContainerType(); } /** * ctor: Construct a temporary variable with a binding and default * value. The container type will be determined from the default * value. The linkage is set separately. * * @param name is the binding. * @param value is either a {@link Scalar} or {@link List} value. * @param link is the linkage of the value for DAG creation. * @throws IllegalArgumentException if the linkage is outside * [{@link LFN#NONE}, {@link LFN#INOUT}]. */ public Local( String name, Value value, int link ) throws IllegalArgumentException { super(); this.m_name = name; this.m_value = value; this.m_containerType = value.getContainerType(); if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException( "linkage type outside legal range" ); } /** * Accessor: Obtains the value of the variable. * * @return the default as {@link Scalar} or {@link List}, or * null in case of error. * @see #setValue(Value) */ public Value getValue() { return this.m_value; } /** * Accessor: Obtains the name of this temporary variable. * * @return the name to refer to this variable. * @see #setName( java.lang.String ) */ public String getName() { return this.m_name; } /** * Accessor: Obtains the linkage type of the variable. * * @return the linkage as an integer. * @see #setLink(int) * @see LFN#NONE * @see LFN#INPUT * @see LFN#OUTPUT * @see LFN#INOUT */ public int getLink() { return this.m_link; } /** * Accessor: Obtains the container type. Note that the container * type of the value will be considered first. * * @return the container type of the variable. * @see #setContainerType(int) * @see Value#SCALAR * @see Value#LIST */ public int getContainerType() { return ( m_value != null ) ? m_value.getContainerType() : m_containerType; } /** * Accessor: Establishes a new name for this variable. FIXME: Note * that no checks will be done concerning the uniqueness of the new * name. * * @param name is the new binding. * @see #getName() */ public void setName( String name ) { this.m_name = name; } /** * Accessor: Sets a new linkage type for the variable. * * @param link is the new linkage type from {@link LFN}. * @throws IllegalArgumentException, if the argument is outside * the valid range. * @see #getLink() */ public void setLink( int link ) throws IllegalArgumentException { if ( ! LFN.isInRange(link) ) throw new IllegalArgumentException(); this.m_link = link; } /** * Accessor: Sets a new container type for the variable. The new * container type must match the value's container type. * * @param containerType is the new integer describing a container type. * @throws IllegalArgumentException if the container type is neither * {@link Value#SCALAR} nor {@link Value#LIST}. * @see #getContainerType() */ public void setContainerType( int containerType ) throws IllegalArgumentException { if ( m_value == null ) { // no default value known, need to set container type if ( Value.isInRange(containerType) ) this.m_containerType = containerType; else throw new IllegalArgumentException( "container type outside legal range"); } else { // there is a default value, new type must match default if ( m_value.getContainerType() != containerType ) throw new IllegalArgumentException( "new container type does not match container type of default value" ); } } /** * Accessor: Sets or overwrites the default value of variable. * The new default must match the container type. * * @param value is the new default value. * @throws IllegalArgumentException if the container type of the new * value and of the registered container type for the parameter don't * match. * @see #getValue() */ public void setValue(Value value) throws IllegalArgumentException { if ( value.getContainerType() == this.m_containerType ) this.m_value = value; else // container types do not match throw new IllegalArgumentException( "container type of new value does not match Locald container type" ); } /** * Dumps the content of the this element into a string for human * consumption. * * @return a textual description of the element and its attributes. * Be advised that these strings might become large. */ public String toString() { String value = this.m_value == null ? "" : this.m_value.toString(); StringBuffer result = new StringBuffer( 12 + this.m_name.length() + value.length() ); result.append( LFN.toString( this.m_link ) ); result.append( ' ' ); result.append( this.m_name ); if ( this.m_containerType == Value.LIST ) result.append("[]"); if ( this.m_value != null ) { result.append('='); result.append(value); } return result.toString(); } /** * Dumps the content of the variable definition into a string for * human consumption. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @exception IOException if something fishy happens to the stream. */ public void toString( Writer stream ) throws IOException { stream.write( LFN.toString( this.m_link ) ); stream.write( ' ' ); stream.write( escape(this.m_name) ); if ( this.m_containerType == Value.LIST ) stream.write("[]"); if ( this.m_value != null ) { stream.write('='); this.m_value.toString(stream); } } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":local" : "local"; if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " name=\"", this.m_name ); // null-safe if ( LFN.isInRange(this.m_link) ) writeAttribute( stream, " link=\"", LFN.toString(this.m_link) ); if ( this.m_containerType == Value.LIST ) { stream.write( " container=\"list\"" ); } else if ( this.m_containerType == Value.SCALAR ) { stream.write( " container=\"scalar\"" ); } if ( this.m_value == null ) { // no default value stream.write( "/>" ); } else { // there is a default value String newindent = indent==null ? null : indent + " "; stream.write( '>' ); if ( indent != null ) stream.write( newline ); // dump content this.m_value.toXML( stream, newindent, namespace ); // write close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); } if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Definitions.java0000644000175000017500000006201211757531137024433 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import edu.isi.pegasus.common.util.Currently; import org.griphyn.vdl.classes.*; import org.griphyn.vdl.util.*; import java.util.*; import java.io.*; /** * This class implements the container to carry any number of * Transformation and Derivation instances. * In addition, it captures some attributes from the root element of * the XML document. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see Definition * @see Transformation * @see Derivation */ public class Definitions extends VDL implements Serializable { /** * The "official" namespace URI of the VDLx schema. */ public static final String SCHEMA_NAMESPACE = "http://www.griphyn.org/chimera/VDL"; /** * The "not-so-official" location URL of the VDLx schema definition. */ public static final String SCHEMA_LOCATION = "http://www.griphyn.org/chimera/vdl-1.24.xsd"; // attributes of "definitions" element /** * Capture the global namespace given to all child elements that * do not set their own namespace definition. */ private String m_vdlns; /** * Capture the version of the XML document. */ private String m_version; /** * children are either {@link Transformation}s or {@link Derivation}s, * both of which are {@link Derivation}s. */ private ArrayList m_definitionList; /** * ctor. It is strongly suggested that you set the namespace and * version of the object before adding any other {@link Definition} * objects. */ public Definitions() { this.m_definitionList = new ArrayList(); this.m_vdlns = null; this.m_version = null; } /** * ctor: Create a new container, using the given namespace. It is * highly recommended that you set the version number before you add * any {@link Definition} instance. * * @param vdlns is the namespace to use for elements w/o namespace. */ public Definitions( String vdlns ) { this.m_definitionList = new ArrayList(); this.m_vdlns = vdlns; this.m_version = null; } /** * ctor: Create a new container, using a namespace and version. * * @param vdlns is the namespace to propagate to children w/o namespace. * @param version is a version of the XML document used to transport * the data. */ public Definitions( String vdlns, String version ) { this.m_definitionList = new ArrayList(); this.m_vdlns = vdlns; this.m_version = version; } /** * updating old Use.linkage with new Use.linkage. This table uses * the old/stored linkage in the top row, and the new/found linkage * in the first column. (-) denotes no action to be taken, and # * an illegal combination.

*

   *       | -1  | NONE| IN  | OUT | IO
   *   ----+-----+-----+-----+-----+----
   *    -1 | (-) | (-) | (-) | (-) | (-)
   *   NONE| NONE| (-) |  #  |  #  |  # 
   *     IN| IN  |  #  | (-) | IO  | (-)
   *    OUT| OUT |  #  | IO  | (-) | (-)
   *     IO| IO  |  #  | IO  | IO  | (-)
   *   ----+-----+-----+-----+-----+----
   * 
* The table uses -1 for no action to do, and -2 for an illegal state. */ private int m_state[][] = { { -1, -1, -1, -1, -1 }, // newlink == -1 { 0, -1, -2, -2, -2 }, // newlink == NONE { 1, -2, -1, 3, -1 }, // newlink == IN { 2, -2, 3, -1, -1 }, // newlink == OUT { 3, -2, 3, 3, -1 } }; /** * Checks the linkage of a transformation between a declared, previously * used and currently used variable of the same name. * * @param use is a table of previously used variables and their linkage * @param u is the variable at the "cursor position". * @param tr is the transformation to be checked. */ private void checkLinkage( Map use, Use u, Transformation tr ) { if ( use.containsKey( u.getName() ) ) { // key exists, check/modify linkage int linkage = ((Integer) use.get( u.getName() )).intValue(); int newlink = u.getLink(); int result = this.m_state[newlink+1][linkage+1]; if ( result == -2 ) { // illegal combination of linkages, usually NONE w/ I,O,IO throw new IncompatibleLinkageException( "Transformation " + tr.shortID() + "uses variable " + u.getName() + " with incompatibles linkages" ); } else if ( result > -1 ) { // store new result use.put( u.getName(), new Integer(result) ); } } else { // key does not exist, add use.put( u.getName(), new Integer(u.getLink()) ); } } /** * Clean-up definition and perform abstract type checks before * submitting them into the document. * * @exception IllegalArgumentException will be thrown if the * Definition is neither a Derivation nor a * Transformation. This should not happen. * @exception UndeclaredVariableException will be thrown, if * a Transformation uses a bound variable via Use, * but fails to declare the formal argument with Declare. * @exception IncompatibleLinkageException will be thrown, if * the declared linkage of a formal argument is incompatible with the * usage of such a bound variable within a Transformation. * @exception IllegalTransformationException will be thrown, if * the Transformation has simultaneously Call * and Argument items. This exception is bound to vanish * with the next major re-design. * * @see Transformation * @see Derivation * @see Use * @see Declare */ protected void sanitizeDefinition( Definition d ) throws IllegalArgumentException, IncompatibleLinkageException, UndeclaredVariableException, IllegalTransformationException { String newline = System.getProperty( "line.separator", "\r\n" ); // update definition with namespace and version, if necessary // Note: results may still be null if ( d.getNamespace() == null && this.m_vdlns != null ) d.setNamespace(this.m_vdlns); if ( d.getVersion() == null && this.m_version != null ) d.setVersion(this.m_version); switch ( d.getType() ) { case Definition.TRANSFORMATION: Transformation tr = (Transformation) d; HashMap use = new HashMap(); // a TR must not be simultaneously simple and compound if ( tr.getArgumentCount() > 0 && tr.getCallCount() > 0 ) throw new IllegalTransformationException( "TR " + tr.identify() + " is simultaneously simple and compound" + newline + tr.toXML("\t",null) ); // // collect all unique bindings of class Use // if ( tr.isSimple() ) { // collect from Argument list for ( Iterator e=tr.iterateArgument(); e.hasNext(); ) { for ( Iterator f=((Argument) e.next()).iterateLeaf(); f.hasNext(); ) { Leaf l = (Leaf) f.next(); if ( l instanceof Use ) checkLinkage( use, (Use) l, tr ); } } } else { // collect from Call list, this is slightly more complex... // only 3..4 nested for loops, why do you worry... for ( Iterator e=tr.iterateCall(); e.hasNext(); ) { for ( Iterator f=((Call) e.next()).iteratePass(); f.hasNext(); ) { Value v = (Value) ((Pass) f.next()).getValue(); switch ( v.getContainerType() ) { case Value.SCALAR: for ( Iterator g=((Scalar) v).iterateLeaf(); g.hasNext(); ) { Leaf l = (Leaf) g.next(); if ( l instanceof Use ) checkLinkage( use, (Use) l, tr ); } break; case Value.LIST: for ( Iterator h=((List) v).iterateScalar(); h.hasNext(); ) { for ( Iterator g=((Scalar) h.next()).iterateLeaf(); g.hasNext(); ) { Leaf l = (Leaf) g.next(); if ( l instanceof Use ) checkLinkage( use, (Use) l, tr ); } } break; } } } } // collect from Profile list for ( Iterator e=tr.iterateProfile(); e.hasNext(); ) { for ( Iterator f=((Profile) e.next()).iterateLeaf(); f.hasNext(); ) { Leaf l = (Leaf) f.next(); if ( l instanceof Use ) checkLinkage( use, (Use) l, tr ); } } // check usages against all declared and temporary variables. Also // check linkage. Note that the declared variables must have a // linkage. It is permissable to declare variables, but not use // them. for ( Iterator i=use.keySet().iterator(); i.hasNext(); ) { String name = (String) i.next(); // check that the used variable is declared Declare dec = (Declare) tr.getDeclare(name); Local local = (Local) tr.getLocal(name); if ( dec == null && local == null ) throw new UndeclaredVariableException( "variable " + name + " is used, but not declared" + newline + tr.toXML("\t",null) ); // match up linkages. Note that a use linkage of -1 means // that we don't have any information on the used linkage. int dLinkage = ( dec==null ? local.getLink() : dec.getLink() ); int uLinkage = ((Integer) use.get(name)).intValue(); if ( uLinkage > -1 ) { if ( dLinkage == LFN.NONE && uLinkage != LFN.NONE || dLinkage == LFN.INPUT && uLinkage != LFN.INPUT || dLinkage == LFN.OUTPUT && uLinkage != LFN.OUTPUT || dLinkage == LFN.INOUT && uLinkage == LFN.NONE ) throw new IncompatibleLinkageException( "variable " + name + " uses incompatible linkages" + newline + tr.toXML("\t",null) ); } } break; case Definition.DERIVATION: Derivation dv = (Derivation) d; if ( dv.getUsesspace() == null ) { if ( this.m_vdlns != null ) { // either default uses namespace to vdlns dv.setUsesspace(this.m_vdlns); } else if ( d.getNamespace() != null ) { // or default uses namespace to derivation namespace dv.setUsesspace( d.getNamespace() ); } } // nothing really to check for derivations // note: Do *not* check here, if a DV has a matching TR, because // in the future, TR will be stored in distributed database(s). break; default: // this must not happen throw new IllegalArgumentException("Definition " + d.identify() + " is neither TR nor DV"); } } /** * Accessor: Appends a {@link Definition} to the container. The * namespace and version information will be, in case they are * missing, updated from the definitions namespace and version * respectively. * * @param d is the {@link Transformation} or {@link Derivation} * to append to the internal container. * @throws IndexOutOfBoundsException if the definition does not fit into * the container. */ public void addDefinition( Definition d ) throws IndexOutOfBoundsException { this.sanitizeDefinition(d); this.m_definitionList.add(d); } /** * Accessor: Inserts a {@link Definition} at a particular place. The * namespace and version information will be, in case they are * missing, updated from the definitions namespace and version * respectively.

* * Each component in this vector with an index greater or equal to the * specified index is shifted upward to have an index one greater than * the value it had previously. * * @param index is the position to insert a {@link Definition} * @param d is the {@link Transformation} or {@link Derivation} * to append to the internal container. * @throws IndexOutOfBoundsException if the definition does not fit into * the container. */ public void addDefinition( int index, Definition d ) throws IndexOutOfBoundsException { this.sanitizeDefinition(d); this.m_definitionList.add(index, d); } /** * Accessor: Search the database for the existence of a Definition with * the same primary keys and type as the parameter. * @param d is the Definition to search for * @return the position of the selfsame Definition, or -1 if not found. */ public int positionOfDefinition( Definition d ) { int n=0; for ( Iterator i=this.m_definitionList.iterator(); i.hasNext(); ++n ) { if ( d.equals(i.next()) ) return n; } return -1; } /** * Accessor: Provides an iterator for the list of {@link Transformation} * and {@link Derivation}. * * @return the iterator to traverse the container of {@link Definition}s. * @see java.util.Enumeration * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateDefinition() { return Collections.enumeration(this.m_definitionList); } /** * Obtains a vector of all definition instances that share the same * instance type. Please note that the definitions below may change * after the vector is obtained. * * @return a vector with all {@link Transformation} or {@link Derivation} * objects. The vector may have zero size, if no such instances exist. */ public java.util.List getDefinitionOfAKind( int type ) { ArrayList result = new ArrayList(); for ( Iterator i=this.m_definitionList.iterator(); i.hasNext(); ) { Definition d = (Definition) i.next(); if ( d.getType() == type ) { result.add(d); } } return result; } /** * Accessor: Obtains a Definition at a particular place * in this container. * * @param index is the place to look up the element. * @return the Definition at the specified place. * @throws IndexOutOfBoundsException if the referenced position does * not exist. * @see Definition */ public Definition getDefinition(int index) throws IndexOutOfBoundsException { //-- check bound for index if ((index < 0) || (index >= this.m_definitionList.size())) throw new IndexOutOfBoundsException(); return (Definition) this.m_definitionList.get(index); } /** * Accessor: Obtains all {@link Definition}s available. * This array is a copy to avoid write-through modifications. * * @return an array containing either a {@link Transformation} * or {@link Derivation} at each position. * @deprecated Use the new Collection based interfaces */ public Definition[] getDefinition() { int size = this.m_definitionList.size(); Definition[] mDefinition = new Definition[size]; System.arraycopy( this.m_definitionList.toArray(new Definition[0]), 0, mDefinition, 0, size ); return mDefinition; } /** * Accessor: Counts the number of {@link Transformation} and * {@link Derivation} definitions. * @return item count. */ public int getDefinitionCount() { return this.m_definitionList.size(); } /** * Accessor: Obtains all {@link Definition}s available. * This list is read-only. * * @return an array containing either a {@link Transformation} * or {@link Derivation} at each position. */ public java.util.List getDefinitionList() { return Collections.unmodifiableList(this.m_definitionList); } /** * Accessor: Obtains the document namespace. * @return the namespace of the document, or null, if not used. * @see #setVdlns(java.lang.String) */ public String getVdlns() { return this.m_vdlns; } /** Accessor: Obtains the document version number. * * @return the version number from the document header, or null, * if unset. Since the version number is a required attribute, * it should never return null, only an empty string. */ public String getVersion() { return this.m_version; } /** * Accessor: Provides an iterator for the list of {@link Transformation} * and {@link Derivation} references. * * @return a list iterator to traverse the container of {@link Definition}s. * @see java.util.ListIterator */ public Iterator iterateDefinition() { return this.m_definitionList.iterator(); } /** * Accessor: Provides an iterator for the list of {@link Transformation} * and {@link Derivation} references. * * @return a list iterator to traverse the container of {@link Definition}s. * @see java.util.ListIterator */ public ListIterator listIterateDefinition() { return this.m_definitionList.listIterator(); } /** * Accessor: Provides an iterator for the list of {@link Transformation} * and {@link Derivation} references. * * @param start is the starting point of the iteration. * @return a list iterator to traverse the container of {@link Definition}s. * @see java.util.ListIterator */ public ListIterator listIterateDefinition( int start ) { return this.m_definitionList.listIterator(start); } /** * Accessor: Removes all definitions we know about. * @see Definition */ public void removeAllDefinition() { this.m_definitionList.clear(); } /** * Accessor: Removes a definition. Each component in this vector with * an index greater or equal to the specified index is shifted * downward to have an index one smaller than the value it had * previously. The size of this vector is decreased by 1. * * @param index is the position to remove the argument fragment from. * @return the removed Definition. * @exception ArrayIndexOutOfBoundsException if the index was invalid. * @see Definition */ public Definition removeDefinition(int index) { return (Definition) this.m_definitionList.remove(index); } /** * Accessor: Removes a definition named by its reference. Removes the * first occurrence of the specified element in this Vector. * * @param d is a definition instance that originated from this list. * @return true, if the first occurance of the element was deleted, * false, if there was nothing found to be removed. * @see Definition */ public boolean removeDefinition( Definition d ) { return this.m_definitionList.remove(d); } /** * Accessor: Sets the component at the specified index of this vector * to be the specified object. The previous component at that position * is discarded. The index must be a value greater than or equal to 0 * and less than the current size of the vector. * * @param index is the postion at which to replace a {@link Definition}. * @param d is either a {@link Transformation} or * {@link Derivation} to use for replacement. * @throws IndexOutOfBoundsException if the index was invalid. */ public Definition setDefinition(int index, Definition d) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_definitionList.size())) { throw new IndexOutOfBoundsException(); } this.sanitizeDefinition(d); return (Definition) this.m_definitionList.set(index, d); } /** * Accessor: Replace all {@link Definition}s with a new list. * * @param definitionArray is an array of possibly mixed * {@link Transformation} and {@link Derivation} elements. * @deprecated Use the new Collection based interfaces */ public void setDefinition(Definition[] definitionArray) { this.m_definitionList.clear(); this.m_definitionList.addAll( Arrays.asList(definitionArray) ); } /** * Accessor: Replace all {@link Definition}s with a new list. * * @param definitions is an collection of possibly mixed * {@link Transformation} and {@link Derivation} elements. */ public void setDefinition(Collection definitions) { this.m_definitionList.clear(); this.m_definitionList.addAll(definitions); } /** * Accessor: Sets the document default namespace. * * @param vdlns is the new namespace to use. Note that the change will * not be propagated to contained elememts. * @see #getVdlns() */ public void setVdlns(String vdlns) { this.m_vdlns = vdlns; } /** * Accessor: Replaces the version number of the document. * * @param version is the new version number. * @see #getVersion() */ public void setVersion( String version ) { this.m_version = version; } /** * Dumps the content of the given element into a string. This function * traverses all sibling classes as necessary and converts the * data into textual output. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @exception IOException if something fishy happens to the stream. */ public void toString( Writer stream ) throws IOException { for ( Iterator i=this.m_definitionList.iterator(); i.hasNext(); ) { ((Definition) i.next()).toString(stream); } } /** * Writes the header of the XML output. The output contains the special * strings to start an XML document, some comments, and the root element. * The latter points to the XML schema via XML Instances. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void writeXMLHeader( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "" ); stream.write( newline ); if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "" ); stream.write( newline ); if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "" ); stream.write( newline ); // start root element if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); if ( namespace != null && namespace.length() > 0 ) { stream.write( namespace ); stream.write( ':' ); } stream.write( "definitions xmlns" ); if ( namespace != null && namespace.length() > 0 ) { stream.write( ':' ); stream.write( namespace ); } stream.write( "=\""); stream.write( SCHEMA_NAMESPACE ); stream.write( "\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\""); stream.write( SCHEMA_NAMESPACE ); stream.write( ' ' ); stream.write( SCHEMA_LOCATION ); stream.write( '"' ); writeAttribute( stream, " vdlns=\"", this.m_vdlns ); writeAttribute( stream, " version=\"", this.m_version ); stream.write( '>' ); if ( indent != null ) stream.write( newline ); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { // write prefix writeXMLHeader( stream, indent, namespace ); // optionally write content if ( this.m_definitionList.size() > 0 ) { String newindent = indent==null ? null : indent + " "; for ( Iterator i=this.m_definitionList.iterator(); i.hasNext(); ) { ((Definition) i.next()).toXML( stream, newindent, namespace ); } } // finish document if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( " 0 ) { stream.write( namespace ); stream.write( ':' ); } stream.write( "definitions>" ); stream.write( System.getProperty( "line.separator", "\r\n" ) ); stream.flush(); // this is the only time we flush } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Use.java0000644000175000017500000003150711757531137022721 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * Use is employed to reference bound actual arguments. Note * that actual arguments are either of type Scalar or * of type List. Each argument has a preferred linkage that * is optionally repeated in this usage class. * * Use extends the base class Leaf by adding * most attributes of all siblings. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Leaf * @see Text * @see LFN * * @see Value * @see Scalar * @see List */ public class Use extends Leaf implements Cloneable, Serializable { /** * The linkage type when refering to an argument that contains a filename. * Legal values range from LFN#NONE to LFN#INOUT. * The initial value is used to flag the non-initialized state. * * @see LFN#NONE * @see LFN#INPUT * @see LFN#OUTPUT * @see LFN#INOUT * @see LFN#isInRange(int) */ private int m_link = -1; /** * Stores the name of the bound variable from the actual argument. * This must not be empty. A value must be filled in to reach a valid * object state. */ private String m_name; /** * Stores the prefix string to be used when rendering a List. * Unused for Scalar content. * * @see Value * @see Scalar * @see List */ private String m_prefix; /** * Stores the separator string used when rendering a List. * Unused for Scalar content. * * @see Value * @see Scalar * @see List */ private String m_separator; /** * Stores the suffix string to terminate a List rendering with. * Unused for Scalar content. * * @see Value * @see Scalar * @see List */ private String m_suffix; /** * Creates and returns a copy of this object. * @return a new instance. */ public Object clone() { Use result = new Use( this.m_name, this.m_prefix, this.m_separator, this.m_suffix ); result.setAnyLink( this.m_link ); return result; } /** * Default ctor. Creates an empty object that is not valid due * to the lack of a bound variable name. To be used by the SAX * parser. */ public Use() { super(); this.m_separator = " "; // default per XML Schema this.m_prefix = this.m_suffix = ""; } /** * Convenience ctor. Creates an empty object with a bound argument * name. This ctor should be used by outside applications to assure * proper initialization of the bound argument name. * * @param name is the name of the bound argument to remember. */ public Use( String name ) { super(); this.m_name = name; this.m_separator = " "; // default per XML Schema this.m_prefix = this.m_suffix = ""; } /** * Convenience ctor. Creates an object with a bound argument name. * This ctor should be used by outside applications to assure proper * initialization of the bound argument name. * * @param name is the name of the bound argument to remember. * @param prefix is a prefix when rendering list content into a string. * @param separator is a string to be placed between list elements when * rendering a list. * @param suffix is a suffix when rendering list content into a string. * * @see Scalar * @see List */ public Use( String name, String prefix, String separator, String suffix ) { super(); this.m_name = name; this.m_prefix = prefix; this.m_separator = separator; this.m_suffix = suffix; } /** * Convenience ctor. Creates an object with a bound argument name. * This ctor should be used by outside applications to assure proper * initialization of the bound argument name. * * @param name is the name of the bound argument to remember. * @param link is the linkage type of the bound argument for type checking. * @throws IllegalArgumentException if the linkage is not * within the legal range between {@link LFN#NONE} and * {@link LFN#INOUT}. */ public Use( String name, int link ) throws IllegalArgumentException { super(); this.m_name = name; this.m_separator = " "; // default per XML Schema this.m_prefix = this.m_suffix = ""; if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException(); } /** * Accessor: Obtains the current state of the linkage. * * @return the linkage value. The returned value might be -1 to indicate * that the linkage was not initialized. Note that -1 is an out of range * value for linkage. * @see #setLink(int) */ public int getLink() { return this.m_link; } /** * Accessor: Obtains the name of the bound actual argument. * * @return the bound name. A misconfigured object might return an empty * or null string. * @see #setName(String) */ public String getName() { return this.m_name; } /** * Accessor: Obtains the current prefix rendering information. The * prefix is used in {@link List} rendering as front bracket. * * @return the prefix rendering string, which might be null or empty. * @see #setPrefix(String) */ public String getPrefix() { return this.m_prefix; } /** * Accessor: Obtains the current separator rendering information. The * separator is used in {@link List} rendering as element separator. * * @return the separator rendering string, which might be null or empty. * @see #setSeparator(String) */ public String getSeparator() { return this.m_separator; } /** * Accessor: Obtains the current suffix rendering information. The * suffix is used in {@link List} rendering as rear bracket. * * @return the suffix rendering string, which might be null or empty. * @see #setSuffix(String) */ public String getSuffix() { return this.m_suffix; } /** * Accessor: Sets the linkage of the bound argument. * @param link is the linkage value as integer within the range. * @throws IllegalArgumentException if the linkage is not * within the legal range between {@link LFN#NONE} and * {@link LFN#INOUT}. * @see #getLink() * @see LFN#NONE * @see LFN#INPUT * @see LFN#OUTPUT * @see LFN#INOUT * @see LFN#isInRange(int) */ public void setLink( int link ) throws IllegalArgumentException { if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException(); } private void setAnyLink( int link ) { this.m_link = link; } /** * Accessor: Sets or overwrites the name of the bound argument. * Do not use empty or null strings here. * * @param name is the new variable name to remember. * @see #getName() */ public void setName( String name ) { this.m_name = name; } /** * Accessor: Sets or overwrites the current prefix rendering information. * The prefix is used in {@link List} rendering as front bracket. * * @param prefix is a rendering string, which might be null or empty. * @see #getPrefix() */ public void setPrefix( String prefix ) { this.m_prefix = prefix; } /** * Accessor: Sets or overwrites the current separator rendering information. * The separator is used between {@link List} element during rendering. * * @param separator is a rendering string, which might be null or empty. * @see #getSeparator() */ public void setSeparator( String separator ) { this.m_separator = separator; } /** * Accessor: Sets or overwrites the current suffix rendering information. * The suffix is used in {@link List} rendering as rear bracket. * * @param suffix is a rendering string, which might be null or empty. * @see #getSuffix() */ public void setSuffix( String suffix ) { this.m_suffix = suffix; } /** * Dump content of this instance representation into a stream.

* FIXME: The rendering information is not dumped into the non-XML output. * * @param stream is a stream opened and ready for writing. This can * also be a string stream for efficient output. The stream interface * should be able to handle large elements efficiently. * @exception IOException if something fishy happens to the stream. */ public void toString( Writer stream ) throws IOException { boolean has_fix = ( this.m_prefix != null && this.m_prefix.length() > 0 || this.m_suffix != null && this.m_suffix.length() > 0 ); boolean has_sep = ( this.m_separator == null || ! this.m_separator.equals(" ") ); if ( has_fix || has_sep ) { // must use the tedious version stream.write( "${" ); if ( has_fix ) { // this is the ${pre:sep:suf|link:id} version stream.write('"'); if ( this.m_prefix != null ) stream.write( escape(this.m_prefix) ); stream.write("\":\""); if ( this.m_separator != null ) stream.write( escape(this.m_separator) ); stream.write("\":\""); if ( this.m_suffix != null ) stream.write( escape(this.m_suffix) ); stream.write("\"|"); } else if ( has_sep ) { // this is the ${sep|link:id} version, mind that " " is IMPLIED! // thus, ${""|link:id} is the output for any null separator, while // ${link:id} will be the output for a space separator. stream.write('"'); if ( this.m_separator != null ) stream.write( escape(this.m_separator) ); stream.write("\"|"); } if ( LFN.isInRange(this.m_link) ) { stream.write( LFN.toString(this.m_link) ); // no need to escape() stream.write( ':' ); } stream.write( escape(this.m_name) ); stream.write( '}' ); } else if ( LFN.isInRange(this.m_link) ) { // use the type-casting version stream.write( '(' ); stream.write( LFN.toString(this.m_link) ); // no need to escape() stream.write( ')' ); stream.write( escape(this.m_name) ); } else { // can use minimal version stream.write( escape(this.m_name) ); } } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); if ( namespace != null && namespace.length() > 0 ) { stream.write( namespace ); stream.write( ':' ); } stream.write( "use" ); writeAttribute( stream, " name=\"", this.m_name ); if ( LFN.isInRange(this.m_link) ) writeAttribute( stream, " link=\"", LFN.toString(this.m_link) ); if ( this.m_prefix != null && this.m_prefix.length() > 0 ) writeAttribute( stream, " prefix=\"", this.m_prefix ); // If the separator is empty, write it. We may not need to write it, // if the separator is a space. if ( this.m_separator == null || ! this.m_separator.equals(" ") ) { stream.write( " separator=\"" ); if ( this.m_separator != null ) stream.write( quote(this.m_separator,true) ); stream.write( '\"' ); } if ( this.m_suffix != null && this.m_suffix.length() > 0 ) writeAttribute( stream, " suffix=\"", this.m_suffix ) ; stream.write( "/>" ); if ( indent != null ) stream.write( System.getProperty( "line.separator", "\r\n" ) ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/UndeclaredVariableException.java0000644000175000017500000000304711757531137027556 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; /** * This exception is thrown if the matching between a used bound * variable and its argument list variable it refers to fails. A method * is not required to declare in its throws clause any subclasses of * RuntimeException that might be thrown during the execution of the * method but not caught. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Derivation * @see Transformation */ public class UndeclaredVariableException extends java.lang.RuntimeException { /** * Constructs a UndeclaredVariableException with no * detail message. */ public UndeclaredVariableException() { super(); } /** * Constructs a UndeclaredVariableException with the * specified detailed message. * * @param s is the detailled message. */ public UndeclaredVariableException( String s ) { super(s); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Definition.java0000644000175000017500000003676411757531137024267 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import edu.isi.pegasus.common.util.Separator; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * This is a base class which collects attributes that belong to * Transformation and Derivation.

* * Note: this class has a natural ordering that may be inconsistent with * equals due to differing implementations. The equal method will take * object type and primary key triple into consideration, making extensive * null checks. The compareTo method compares the type and short ids of * the Definitions. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see Transformation * @see Derivation */ public abstract class Definition extends VDL implements Comparable, Serializable { // common constants for quick type access /** * This is the return value for {@link #getType()} from a * {@link Transformation}. */ public static final int TRANSFORMATION = 0x71077345; // shell oil /** * This is the return value for {@link #getType()} from a * {@link Derivation}. */ public static final int DERIVATION = 0xCAFEBABE; /** * This is an abstract function that must be implemented by * instantiable children, of which currently only exist * {@link Transformation} and {@link Derivation} siblings and derivatives. * * @return the integer representing the concrete definition type of a * instance. The value of -1 *might* be used to indicate an unknown * type. */ abstract public int getType(); // common attributes from child elements /** * Each transformation and derivation resides in a namespace. Mind * that namespaces are currently flat. If you need to impose any kind * of hierarchy, please do so yourself, e.g. use periods between * hierarchy intifiers. The namespace is part of the key identifying * a logical transformation. The default is null. * * @see Transformation * @see Derivation */ private String m_namespace; /** * Each transformation and derivation can be identified by a name. * For a transformation, this is part of the logical transformation * name. Derivations can no longer be anonymous. * * @see Transformation * @see Derivation */ private String m_name; /** * Each transformation and derivation has a version associated with * their definition. While a version number is highly recommended for * transformation, being part of the primary key triple into the * (future) transformation catalog, a derivation can remain without * version. The default is null. * * @see Transformation * @see Derivation */ private String m_version; // AttributeGroup "DefinitionsAG" /** * Yong's knowledgebase approach needs this. */ private String m_description; /** * Yong's knowledgebase approach needs this. */ private String m_title; /** * Yong's knowledgebase approach needs this. */ private String m_keyword; /** * Yong's knowledgebase approach needs this. */ private String m_url; /** * ctor: empty */ public Definition() { this.m_namespace = null; this.m_version = null; } /** * Convenience ctor: name the definition. The name is part of a * logical {@link Transformation}. Note that a {@link Derivation} may * remain anonymous. The namespace will be the default namespace, or * taken from the {@link Definitions}. The version remains unset. * * @param name is the name to be used for the defintion. */ public Definition( String name ) { this.m_namespace = null; this.m_version = null; this.m_name = name; } /** * Convenience ctor: name the definition. The name is part of a * logical {@link Transformation}. Note that a {@link Derivation} may * remain anonymous. The version remains unset. * * @param namespace is the namespace the name resides in. * @param name is the name to be used for the defintion. */ public Definition( String namespace, String name ) { this.m_name = name; this.m_namespace = namespace; this.m_version = null; } /** * Convenience ctor: name the definition. The name is part of a * logical {@link Transformation}. Note that a {@link Derivation} may * remain anonymous. * * @param namespace is the namespace the name resides in. * @param name is the name to be used for the defintion. * @param version is the version of this definition. */ public Definition( String namespace, String name, String version ) { this.m_name = name; this.m_namespace = namespace; this.m_version = version; } /** * Implementation of the {@link java.lang.Comparable} interface. * Compares this object with the specified object for order. Returns a * negative integer, zero, or a positive integer as this object is * less than, equal to, or greater than the specified object. The * definitions are compared by their type, and by their short ids. * * @param o is the object to be compared * @return a negative number, zero, or a positive number, if the * object compared against is less than, equals or greater than * this object. * @exception ClassCastException if the specified object's type * prevents it from being compared to this Object. */ public int compareTo( Object o ) { if ( o instanceof Definition ) { Definition d = (Definition) o; int diff = d.getType() - getType(); // order is important return ( diff != 0 ? diff : d.shortID().compareTo(this.shortID()) ); } else { throw new ClassCastException( "object is not a Definition" ); } } /** * Calculate a hash code value for the object to support hash tables. * * @return a hash code value for the object. */ public int hashCode() { int result = m_namespace == null ? 0 : m_namespace.hashCode(); result = ( result << 8 ) ^ ( m_name == null ? 0 : m_name.hashCode() ); result = ( result << 8 ) ^ ( m_version == null ? 0 : m_version.hashCode() ); return ( result ^ getType() ); } /** * Accessor: match the primary key of a definition. * Note, this match is not wildcard capable. The type of the definitions * will also be checked. The primary key of a definition is the triple * namespace, name and version. This function is null-capable. * * @param type is the type identifier TRANSFORMATION or DERIVATION * @param namespace is the namespace * @param name is the name * @param version is the version * * @return true, if the primary keys match, false otherwise. */ public boolean match( int type, String namespace, String name, String version ) { return ( // check type type == this.getType() && // check namespace ( m_namespace == null && namespace == null || m_namespace != null && namespace != null && namespace.equals(m_namespace) ) && // check name ( m_name != null && name != null && name.equals(m_name) ) && // check version string ( m_version == null && version == null || m_version != null && version != null && version.equals(m_version) ) ); } /** * Accessor: match primary keys of two Definitions. * Note, this match is not wildcard capable. The type of the definitions * will also be checked. The primary key of a definition is the triple * namespace, name and version. The equals function is null-capable. * * @param obj the reference object with which to compare. * @return true, if the primary keys match, false otherwise. */ public boolean equals( Object obj ) { // ward against null if ( obj == null ) return false; // shortcut if ( obj == this ) return true; // don't compare apples with oranges if ( ! (obj instanceof Definition) ) return false; // now we can safely cast Definition d = (Definition) obj; return match( d.getType(), d.getNamespace(), d.getName(), d.getVersion() ); } /** * Accessor: Obtains the current description state. * * @return a string containing a descriptive remark on the definition, * or null for no description. * @see #setDescription(java.lang.String) */ public String getDescription() { return this.m_description; } /** * Accessor: Obtains the current keyword state. * * @return a string containing a collection of keywords describing the * definition, or null for no keywords. * @see #setKeyword(java.lang.String) */ public String getKeyword() { return this.m_keyword; } /** * Accessor: Obtains the current name of the definition. Note that * a name is mandatory for any {@link Transformation}, but a * {@link Derivation} may remain anonymous. * * @return the current name used for the definition. Note that derivations * may be anonymous. Returns null, if no name exists. * @see #setName(java.lang.String) */ public String getName() { return this.m_name; } /** * Accessor: Obtains the current namespace that is used for the * definition. Note that a namespace is part of the key for any * {@link Transformation}. * * @return the namespace the definition resides in, or null, if * no namespace was defined. * @see #setNamespace(java.lang.String) */ public String getNamespace() { return this.m_namespace; } /** * Accessor: Obtains the current title state. * * @return the title given to this definition, or null, if there * was no title defined. * @see #setTitle(java.lang.String) */ public String getTitle() { return this.m_title; } /** * Accessor: Obtains the current URI definition. * * @return the URL pointing to related information or a project, or * null, if no URL was registered. * @see #setUrl(java.lang.String) */ public String getUrl() { return this.m_url; } /** * Accessor: Obtains the current version of the definition. A version * is an integral part of a logical {@link Transformation}. * * @return the version number of this definition, or null, if no * version number was defined. * @see #setVersion(java.lang.String) */ public String getVersion() { return this.m_version; } /** * Accessor: Sets the description. * * @param description * @see #getDescription() */ public void setDescription(String description) { this.m_description = description; } /** * Accessor: Sets or overwrites the keyword list. * * @param keyword * @see #getKeyword() */ public void setKeyword(String keyword) { this.m_keyword = keyword; } /** * Accessor: Sets or overwrite the currently given name. * * @param name * @see #getName() */ public void setName(String name) { this.m_name = name; } /** * Accessor: Sets or overwrites the namespace identifier. * * @param namespace * @see #getNamespace() */ public void setNamespace(String namespace) { this.m_namespace = namespace; } /** * Accessor: Sets the current title for the definition. * * @param title * @see #getTitle() */ public void setTitle(String title) { this.m_title = title; } /** * Accessor: Sets the project reference. * * @param url * @see #getUrl() */ public void setUrl(String url) { this.m_url = url; } /** * Accessor: Sets the version of the definition. * * @param version * @see #getVersion() */ public void setVersion(String version) { this.m_version = version; } /** * Identify the transformation or derivation by its name. */ public abstract String identify(); /** * Create the short id from ns:id:version. * @param d is a Definition, or null for non-siblings * @param namespace is the namespace to use, may be null. * @param name is the name to produce the id for, should not be null. * @param version is a version string, may be null. * @return A string which textually identifies a Definition. * @exception RuntimeException, if the name and definition are both null. */ public static String shortID( Definition d, String namespace, String name, String version ) { if ( name != null ) return Separator.combine( namespace, name, version ); else if ( d != null ) return Separator.combine( namespace, Integer.toHexString(d.hashCode()), version ); else throw new RuntimeException("Definitions require valid identifiers" ); } /** * Constructs dynamically a short descriptive, hopefully unique * identifier for this derivation w/o referring to any transformation. * FIXME: Anonymous derivations get their hash code, which is well * for the first versions working without database. Later versions * with database must use some unique sequence mechanism instead. * * @return a string describing the derivation * @see Object#hashCode() */ public String shortID() { return shortID( this, this.m_namespace, this.m_name, this.m_version ); } /** * The toXML method is a partial method, to be incorporated/called * by its sibling class method of the same name. For this reason, * it does not fit the {@link VDL} interface. * * @return a string containing the attributes collected in the base class. */ public String toXML() { StringBuffer result = new StringBuffer(); if ( this.m_namespace != null ) result.append(" namespace=\"").append(quote(this.m_namespace,true)).append("\""); if ( this.m_name != null ) result.append(" name=\"").append(quote(this.m_name,true)).append("\""); if ( this.m_version != null ) result.append(" version=\"").append(quote(this.m_version,true)).append("\""); if ( this.m_description != null ) result.append(" description=\"").append(quote(this.m_description,true)).append("\""); if ( this.m_title != null ) result.append(" title=\"").append(quote(this.m_title,true)).append("\""); if ( this.m_keyword != null ) result.append(" keyword=\"").append(quote(this.m_keyword,true)).append("\""); if ( this.m_url != null ) result.append(" url=\"").append(quote(this.m_url,true)).append("\""); return result.toString(); } /** * The toXML method is a partial method, to be incorporated/called * by its sibling class method of the same name. For this reason, * it does not fit the {@link VDL} interface. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream ) throws IOException { writeAttribute( stream, " namespace=\"", this.m_namespace ); writeAttribute( stream, " name=\"", this.m_name ); writeAttribute( stream, " version=\"", this.m_version ); writeAttribute( stream, " description=\"", this.m_description ); writeAttribute( stream, " title=\"", this.m_title ); writeAttribute( stream, " keyword=\"", this.m_keyword ); writeAttribute( stream, " url=\"", this.m_url ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Scalar.java0000644000175000017500000003513011757531137023366 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * This class implements one of the argument types for parameters passed * to transformations from derivations. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Value * @see List */ public class Scalar extends Value implements Cloneable, Serializable { /** * Any value passed down is an arbitrary mix of the three potential * {@link Leaf} types. * * @see Leaf * @see Text * @see LFN */ private ArrayList m_leafList; /** * Creates and returns a copy of this object. * @return a new instance. */ public Object clone() { Scalar result = new Scalar(); for ( int index=0; index < this.m_leafList.size(); ++index ) { result.addLeaf( (Leaf) this.getLeaf(index).clone() ); } return result; } /** * Default ctor. */ public Scalar() { super(); this.m_leafList = new ArrayList(); } /** * Convenience ctor: Initializes the object with the first child to * be put into the list of values. * * @param firstChild is either a LFN or Text * object. * * @see Leaf * @see LFN * @see Text */ public Scalar( Leaf firstChild ) { super(); this.m_leafList = new ArrayList(); this.m_leafList.add(firstChild); } /** * Accessor: Obtains the value type of this class. By using the abstract * method in the parent class, Scalar objects can be * distinguished from List objects without using the * instanceof operator. * * @return the fixed value of being a scalar. * @see Value#SCALAR */ public int getContainerType() { // always return Value.SCALAR; } /** * This method determines which container is being used in the abstract * base class in order to kludgy statements when printing debug info. * * @return the symblic identifier for the type of the Value. */ public String getSymbolicType() { // always return new String("Scalar"); } /** * Accessor: Adds a Leaf value to the list of values gathered * as the content of a Scalar. * * @param vLeaf is the value to append to the list. Note that only leaf * values of LFN or Text are allowed. * @throws IndexOutOfBoundsException if the value cannot be added. * @see Leaf * @see Text * @see LFN */ public void addLeaf( Leaf vLeaf ) throws IndexOutOfBoundsException { this.m_leafList.add(vLeaf); } /** * Accessor: Inserts a Leaf value into a specific position * of the list of gathered values. * * @param index is the position to insert the item into * @param vLeaf is the value to append to the list. Note that only leaf * values of LFN or Text are allowed. * @throws IndexOutOfBoundsException if the value cannot be added. * * @see Text * @see LFN */ public void addLeaf( int index, Leaf vLeaf ) throws IndexOutOfBoundsException { this.m_leafList.add(index, vLeaf); } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @return an enumeration to walk the list with. * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateLeaf() { return Collections.enumeration(this.m_leafList); } /** * Determines all LFN instances of a given scalar that match the * specified linkage. This is a higher-level method employing the * given API. * * @param linkage is the linkage to check for, -1 for all filenames. * @return a set of logical filename instances that match the linkage * and were part of the scalar. The result may be an empty set, if no * such result were to be found. * * @see LFN */ public java.util.List getAllLFN( int linkage ) { java.util.List result = new ArrayList(); for ( Iterator i = iterateLeaf(); i.hasNext(); ) { Leaf leaf = (Leaf) i.next(); if ( leaf instanceof LFN ) { LFN lfn = (LFN) leaf; if ( linkage == -1 || lfn.getLink() == linkage ) result.add( lfn ); // add *all* information about this file } } return result; } /** * Determines all LFN instances of a given scalar that match the * specified linkage. This is a higher-level method employing the * given API. Note that also linkage of NONE will not be found in * wildcard search mode. * * @param linkage is the linkage to check for, -1 for all filenames. * @return a set of all logical filenames that match the linkage and * were part of the scalar. The result may be an empty set, if no such * result were to be found. For a linkage of -1, complete LFNs will be * returned, for any other linkage, just the filename will be * returned. * * @see Derivation#getLFNList( int ) * @see LFN */ public java.util.List getLFNList( int linkage ) { java.util.List result = new ArrayList(); for ( Iterator i = iterateLeaf(); i.hasNext(); ) { Leaf leaf = (Leaf) i.next(); if ( leaf instanceof LFN ) { LFN local = (LFN) leaf; if ( linkage == -1 && local.getLink() != LFN.NONE ) { result.add( local ); // we need *all* information about this file } else if ( local.getLink() == linkage ) { result.add( local.getFilename() ); // we may know some things } } } return result; } /** * Determines if the scalar contains an LFN of the specified linkage. * The logic uses short-circuit evaluation, thus finding things is * faster than not finding things. * * @param filename is the name of the LFN * @param linkage is the linkage to check for, -1 for any linkage type. * @return true if the LFN is contained in the scalar, false otherwise. * * @see org.griphyn.vdl.classes.LFN */ public boolean containsLFN( String filename, int linkage ) { // sanity checks if ( filename == null ) throw new NullPointerException( "You are searching for a non-existing filename" ); for ( Iterator i = this.iterateLeaf(); i.hasNext(); ) { Leaf leaf = (Leaf) i.next(); if ( leaf instanceof LFN ) { int l_link = ((LFN) leaf).getLink(); String l_name = ((LFN) leaf).getFilename(); if ( linkage == -1 && l_link != LFN.NONE ) { if ( filename.equals(l_name) ) return true; } else if ( l_link == linkage && filename.equals(l_name) ) { return true; } } } // not found return false; } /** * Accessor: Obtains the Leaf at a certain position in the * list of leaf values. * * @param index is the position in the list to obtain a value from * @return The LFN or Text at the position. * @throws IndexOutOfBoundsException if the index points to an elment * in the list that does not contain any elments. * * @see LFN * @see Text */ public Leaf getLeaf(int index) throws IndexOutOfBoundsException { //-- check bound for index if ((index < 0) || (index >= this.m_leafList.size())) throw new IndexOutOfBoundsException(); return (Leaf) this.m_leafList.get(index); } /** * Accessor: Gets an array of all values that constitute the current * content. This array is a copy to avoid write-through modifications. * * @return an array with a mixture of either Text or * LFN values. * * @see LFN * @see Text * @deprecated Use the new Collection based interfaces */ public Leaf[] getLeaf() { int size = this.m_leafList.size(); Leaf[] mLeaf = new Leaf[size]; System.arraycopy( this.m_leafList.toArray(new Leaf[0]), 0, mLeaf, 0, size ); return mLeaf; } /** * Accessor: Obtains the size of the internal list of {@link Leaf}s. * * @return number of elements that an external array needs to be sized to. */ public int getLeafCount() { return this.m_leafList.size(); } /** * Accessor: Gets an array of all values that constitute the current * content. This list is read-only. * * @return an array with a mixture of either Text or * LFN values. * * @see LFN * @see Text */ public java.util.List getLeafList() { return Collections.unmodifiableList(this.m_leafList); } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @return an iterator to walk the list with. */ public Iterator iterateLeaf() { return this.m_leafList.iterator(); } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @return an iterator to walk the list with. */ public ListIterator listIterateLeaf() { return this.m_leafList.listIterator(); } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @param start is the start index * @return a list iterator to walk the list with. */ public ListIterator listIterateLeaf(int start) { return this.m_leafList.listIterator(start); } /** * Accessor: Removes the content of the Scalar. */ public void removeAllLeaf() { this.m_leafList.clear(); } /** * Accessor: Remove a single item from the list of nodes. The list is * shrunken in the process. * @param index is the position at which an element is to be removed. * @return the object that was removed. The object is either a * LFN or a Text. * * @see LFN * @see Text */ public Leaf removeLeaf( int index ) { return (Leaf) this.m_leafList.remove(index); } /** * Accessor: Overwrites a LFN or Text value * at a certain position in the content-constituting list. * * @param index position to overwrite an elment in. * @param vLeaf is either a LFN or Text object. * @throws IndexOutOfBoundsException if the position pointed to is invalid. * * @see LFN * @see Text */ public void setLeaf(int index, Leaf vLeaf) throws IndexOutOfBoundsException { //-- check bounds for index if ((index < 0) || (index >= this.m_leafList.size())) { throw new IndexOutOfBoundsException(); } this.m_leafList.set(index, vLeaf); } /** * Accessor: Overwrites internal list with an external list representing * a Scalar value. * * @param leafArray is the external list of Text or * LFN objects used to overwrite things. * @see Text * @see LFN * @deprecated Use the new Collection based interfaces */ public void setLeaf(Leaf[] leafArray) { this.m_leafList.clear(); this.m_leafList.addAll( Arrays.asList(leafArray) ); } /** * Accessor: Overwrites internal list with an external list representing * a Scalar value. * * @param leaves is the external list of Text or * LFN objects used to overwrite things. * @see Text * @see LFN */ public void setLeaf(Collection leaves) { this.m_leafList.clear(); this.m_leafList.addAll(leaves); } /** * Converts the object state into textual format for human consumption. * * @return a textual description of the element and its sub-classes. * Be advised that these strings might become large. */ public String toString() { StringBuffer result = new StringBuffer(40); for ( Iterator i=this.m_leafList.iterator(); i.hasNext(); ) { result.append( ((Leaf) i.next()).toString() ); } return result.toString(); } /** * Converts the object state into textual format for human consumption. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. */ public void toString( Writer stream ) throws IOException { for ( Iterator i=this.m_leafList.iterator(); i.hasNext(); ) { ((Leaf) i.next()).toString(stream); } } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":scalar" : "scalar"; String newline = System.getProperty( "line.separator", "\r\n" ); // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); stream.write( '>' ); if ( indent != null ) stream.write( newline ); // dump content String newindent = indent==null ? null : indent+" "; for ( Iterator i=this.m_leafList.iterator(); i.hasNext(); ) { ((Leaf) i.next()).toXML( stream, newindent, namespace ); } // finalize if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/HasPass.java0000644000175000017500000001243511757531137023526 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import java.util.Enumeration; import java.util.List; /** * This interface defines a common base for {@link Derivation} and * {@link Call}. The latter is in a sense an anonymous DV. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public interface HasPass { /** * Accessor: Adds an actual argument to the bag of arguments. * * @param vPass is the new actual argument to add. * @see Pass */ public void addPass( Pass vPass ) throws NullPointerException; /** * Accessor: Provides an iterator for the bag of actual arguments. * @return the iterator for Pass elements. * @see Pass * @see java.util.Enumeration */ public Enumeration enumeratePass(); /** * Determines all LFN instances from the actual arguments of a given * derivation that match the specified linkage. This is a higher-level * method employing the given interface. Note that also linkage of * NONE will not be found in wildcard search mode. * * @param linkage is the linkage type to match against, -1 for all * files. * @return a list of logical filenames from the given derivation which * match the given linkage. For a linkage of -1, complete LFNs will be * returned, for any other linkage, just the filename will be returned. * * @see Value#getLFNList( int ) * @see LFN */ public java.util.List getLFNList( int linkage ); /** * Determines if the list contains an LFN of the specified linkage. * The logic uses short-circuit evaluation, thus finding things is * faster than not finding things. Searching a list is a potentially * expensive method. * * @param filename is the name of the LFN * @param linkage is the linkage to check for, -1 for any linkage type. * @return true if the LFN is contained in the scalar, false otherwise. * * @see Value#containsLFN( String, int ) * @see LFN */ public boolean containsLFN( String filename, int linkage ); /** * Accessor: Obtains an actual argument identified by the bound variable. * * @param name is the binding name. * @return the bound value to the given name. * @see Pass */ public Pass getPass(String name); /** * Accessor: Obtains the bag of actual arguments as array. Note that the * order is arbitrary. * * @return an array containing all bound variables. * @see Pass */ public Pass[] getPass(); /** * Accessor: Counts the number of actual arguments. * * @return the number of actual arguments in the internal bag. */ public int getPassCount(); /** * Accessor: Removes all actual arguments. Effectively empties the bag. */ public void removeAllPass(); /** * Accessor: Removes a specific actual argument. * * @param name is the bound variable name of the argument to remove. * @return the object that was removed, or null, if not found. * @see Pass */ public Pass removePass( String name ); /** * Accessor: Adds a new or overwrites an existing actual argument. * * @param vPass is a new actual argument with bound name and value. * @see Pass */ public void setPass(Pass vPass); /** * Accessor: Replaces the bag of actual argument with new arguments. * * @param passArray is the new actual argument list. * @see Pass */ public void setPass(Pass[] passArray); /** * Accessor: Obtains the name of the logical {@link Transformation} * that this derivation refers to. */ public java.lang.String getUses(); /** * Accessor: Obtains the namespace of the logical {@link Transformation} * that this derivation refers to. */ public java.lang.String getUsesspace(); /** * Accessor: Obtains the maximum inclusive version permissable for * binding to a {@link Transformation}. * * @return the maximum inclusive version number. */ public String getMaxIncludeVersion(); /** * Accessor: Obtains the minimum inclusive version permissable for * binding to a {@link Transformation}. * * @return the minimum inclusive version number. */ public String getMinIncludeVersion(); /** * Identify the transformation or derivation by its name. */ public String identify(); /** * Constructs dynamically a short descriptive, hopefully unique * identifier for this derivation w/o referring to any transformation. * FIXME: Anonymous derivations get their hash code, which is well * for the first versions working without database. Later versions * with database must use some unique sequence mechanism instead. * * @return a string describing the derivation * @see Object#hashCode() */ public String shortID(); } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Derivation.java0000644000175000017500000005237511757531137024277 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import edu.isi.pegasus.common.util.Separator; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * Derivation is an implementation of an abstract VDL * Definition. A derivation describes the mutable part * concerning input, processing, and output (IPO) when calling an * application. The environment is part of the capture. * * A derivation parametrizes the template provided by a * Transformation with actual values. Think of a derivation * as something akin to a C function call. The derivation provides the * actual parameter to a job. The immutable parts are hidden in a * Transformation. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see Definition * @see Definitions * @see Transformation */ public class Derivation extends Definition // thus implements VDL implements HasPass, Serializable { /** * Though most Derivations may have a name of their own, * most of the times, though, derivations are anonymous. A derivation * must provide the name of the Transformation that * it calls, though. * * @see Definition * @see Transformation */ private String m_uses; /** * The namespace in which a derivation resides can differ from the * namespace that the transformation lives in. This argument provides * the namespace of the Transformation to call. * @see Definition * @see Transformation */ private String m_usesspace; /** * Any Transformation may exist in multiple versions. * This argument specifies the minimum permissable version that can * be used. FIXME: versioning is not really supported. */ private String m_maxIncludeVersion; /** * Any Transformation may exist in multiple versions. * This argument specifies the maximum permissable version that can * be used. FIXME: versioning is not really supported. */ private String m_minIncludeVersion; /** * Actual arguments used when calling a {@link Transformation} are * matched up with the formal arguments of the transformation by their * names. */ private TreeMap m_passMap; /** * Type accessor for quick distinction between definitions. * @return the value of {@link Definition#DERIVATION} */ public int getType() { return Definition.DERIVATION; } /** * ctor. */ public Derivation() { super(); this.m_passMap = new TreeMap(); } /** * Convenience ctor: Names a derivation and the used * Transformation * * @param name is the name of the Derivation * @param uses is the name of the Transformation * @see Transformation */ public Derivation( String name, String uses ) { super(name); this.m_passMap = new TreeMap(); this.m_uses = uses; } /** * Convenience ctor: Names a derivation and supplies the used * Transformation as well as the permissable version * range. * * @param name is the name of the Derivation. * @param uses is the name of the Transformation. * @param min is the minimum inclusive permissable version. * @param max is the maximum inclusive permissable version. * @see Transformation */ public Derivation( String name, String uses, String min, String max ) { super(name); this.m_passMap = new TreeMap(); this.m_uses = uses; this.m_minIncludeVersion = min; this.m_maxIncludeVersion = max; } /** * Complete ctor: Constructs the full three part Derivation * identifier, and four part Transformation mapper. * * @param ns is then namespace of the Derivation. * @param name is the name of the Derivation. * @param version is the version of the Derivation. * @param us is the namespace to search for a Transformation. * @param uses is the name of the Transformation. * @param min is the minimum inclusive permissable version. * @param max is the maximum inclusive permissable version. * @see Transformation */ public Derivation( String ns, String name, String version, String us, String uses, String min, String max ) { super(ns,name,version); this.m_usesspace = us; this.m_uses = uses; this.m_minIncludeVersion = min; this.m_maxIncludeVersion = max; this.m_passMap = new TreeMap(); } /** * Convenience ctor: Names the derivation and supplies the used * Transformation, and the first actual argument. * * @param name is the name of the Derivation. * @param uses is the name of the Transformation. * @param firstChild is a first (possibly only) actual argument. * @see Transformation * @see Pass */ public Derivation( String name, String uses, Pass firstChild ) { super(name); this.m_passMap = new TreeMap(); this.m_passMap.put( firstChild.getBind(), firstChild ); this.m_uses = uses; } /** * Accessor: Adds an actual argument to the bag of arguments. * * @param vPass is the new actual argument to add. * @see Pass */ public void addPass( Pass vPass ) { this.m_passMap.put(vPass.getBind(),vPass); } /* * won't work with maps * public void addPass( int index, Pass vPass ) throws IndexOutOfBoundsException { this.m_passList.insertElementAt(vPass, index); } */ /** * Accessor: Provides an iterator for the bag of actual arguments. * @return the iterator for Pass elements. * @see Pass * @see java.util.Enumeration * @deprecated Use the new Collection based interfaces */ public Enumeration enumeratePass() { // return this.m_passMap.elements(); return Collections.enumeration(this.m_passMap.values()); } /** * Determines all LFN instances from the actual arguments of a given * derivation that match the specified linkage. This is a higher-level * method employing the given interface. Note that also linkage of * NONE will not be found in wildcard search mode. * * @param linkage is the linkage type to match against, -1 for all * files. * @return a list of logical filenames from the given derivation which * match the given linkage. For a linkage of -1, complete LFNs will be * returned, for any other linkage, just the filename will be returned. * * @see Value#getLFNList( int ) * @see LFN */ public java.util.List getLFNList( int linkage ) { java.util.List result = new ArrayList(); for ( Iterator i = this.iteratePass(); i.hasNext() ; ) { Value value = ((Pass) i.next()).getValue(); result.addAll( value.getLFNList(linkage) ); } return result; } /** * Determines if the list contains an LFN of the specified linkage. * The logic uses short-circuit evaluation, thus finding things is * faster than not finding things. Searching a list is a potentially * expensive method. * * @param filename is the name of the LFN * @param linkage is the linkage to check for, -1 for any linkage type. * @return true if the LFN is contained in the scalar, false otherwise. * * @see Value#containsLFN( String, int ) * @see LFN */ public boolean containsLFN( String filename, int linkage ) { for ( Iterator i = this.iteratePass(); i.hasNext(); ) { Value actual = ((Pass) i.next()).getValue(); if ( actual.containsLFN( filename, linkage ) ) return true; } return false; } /** * Accessor: Obtains the maximum inclusive version permissable for * binding to a {@link Transformation}. * * @return the maximum inclusive version number. * @see #setMaxIncludeVersion( java.lang.String ) */ public String getMaxIncludeVersion() { return this.m_maxIncludeVersion; } /** * Accessor: Obtains the minimum inclusive version permissable for * binding to a {@link Transformation}. * * @return the minimum inclusive version number. * @see #setMinIncludeVersion( java.lang.String ) */ public String getMinIncludeVersion() { return this.m_minIncludeVersion; } /** * Accessor: Obtains an actual argument identified by the bound variable. * * @param name is the binding name. * @return the bound value to the given name. * @see Pass */ public Pass getPass(String name) { return (Pass) this.m_passMap.get(name); } /** * Accessor: Obtains the bag of actual arguments as array. Note that the * order is arbitrary. * * @return an array containing all bound variables. * @see Pass * @deprecated Use the new Collection based interfaces */ public Pass[] getPass() { int size = this.m_passMap.size(); Pass[] mPass = new Pass[size]; this.m_passMap.values().toArray(mPass); return mPass; } /** * Accessor: Counts the number of actual arguments. * * @return the number of actual arguments in the internal bag. */ public int getPassCount() { return this.m_passMap.size(); } /** * Accessor: Gets an array of all values that constitute the current * content. This list is read-only. * * @return an array with Pass elements. * @see Pass */ public java.util.List getPassList() { return Collections.unmodifiableList( new ArrayList(this.m_passMap.values()) ); } /** * Accessor: Obtains all actual arguments. The map is a read-only * map to avoid modifications outside the API. * * @return a map will all actual arguments. * @see Pass */ public java.util.Map getPassMap() { return Collections.unmodifiableMap( this.m_passMap ); } /** * Accessor: Obtains the name of the logical {@link Transformation} * that this derivation refers to. * * @see #setUses( java.lang.String ) */ public java.lang.String getUses() { return this.m_uses; } /** * Accessor: Obtains the namespace of the logical {@link Transformation} * that this derivation refers to. * * @see #setUsesspace( java.lang.String ) */ public java.lang.String getUsesspace() { return this.m_usesspace; } /** * Accessor: Provides an iterator for the bag of actual arguments. * @return an iterator to walk the Pass list with. * @see Pass */ public Iterator iteratePass() { return this.m_passMap.values().iterator(); } /* NOT APPLICABLE * * Accessor: Provides an iterator for the bag of actual arguments. * @return an iterator to walk the Pass list with. * @see Pass * public ListIterator listIteratePass() { return (new ArrayList( this.m_passMap.values() ).listIterator()); } */ /** * Matches an external version string against the internal range. This * predicate function uses inclusive matches. Special interpretation * will be applied to null values, internally as well as * an external null value.

* *

   *   vers.   min    max     result
   *   -----   ----   -----   ------
   *   null    null   null    true
   *   null    *      null    true
   *   null    null   *       true
   *   null    *      *       true
   *
   *   *       null   null    true
   *
   *   "A"     "B"    null    false
   *   "B"     "B"    null    true
   *   "C"     "B"    null    true
   *   "A"     null   "B"     true
   *   "B"     null   "B"     true
   *   "C"     null   "B"     false
   *   "A"     "B"    "B"     false 
   *   "B"     "B"    "B"     true
   *   "C"     "B"    "B"     false 
   * 
* * @param version is an externally supplied version to be checked, * if it is within the inclusive interval of min and max. * @param minInc is the minimum inclusive version of the range. * @param maxInc is the maximum inclusive version of the range. * @return true, if the version is in range, false otherwise. */ public static boolean match( String version, String minInc, String maxInc ) { // special null combinations first. if ( minInc == null && maxInc == null || version == null ) return true; String ver = version.trim(); String min = minInc == null ? "" : minInc; String max = maxInc == null ? "" : maxInc; return ( ver.compareTo(min) >= 0 && ver.compareTo(max) <= 0 ); } /** * Instance method for matching an external version against the inclusive * version range. * @param version is an externally supplied version to be checked, * if it is within the inclusive interval of min and max. * @return true, if the version is in range, false otherwise. * @see Derivation#match( String, String, String ) */ public boolean match( String version ) { return Derivation.match( version, this.m_minIncludeVersion, this.m_maxIncludeVersion ); } /** * Accessor: Removes all actual arguments. Effectively empties the bag. */ public void removeAllPass() { this.m_passMap.clear(); } /** * Accessor: Removes a specific actual argument. * * @param name is the bound variable name of the argument to remove. * @return the object that was removed, or null, if not found. * @see Pass */ public Pass removePass( String name ) { return (Pass) this.m_passMap.remove(name); } /** * Accessor: Sets the maximum inclusive permissable version of * a logical transformation to run with. * * @param miv is the (new) maximum inclusive version. * @see #getMaxIncludeVersion() */ public void setMaxIncludeVersion(String miv ) { this.m_maxIncludeVersion = miv == null ? null : miv.trim(); } /** * Accessor: Sets the minimum inclusive permissable version of * a logical transformation to run with. * * @param miv is the (new) minimum inclusive version. * @see #getMinIncludeVersion() */ public void setMinIncludeVersion(String miv) { this.m_minIncludeVersion = miv == null ? null : miv.trim(); } /** * Accessor: Adds a new or overwrites an existing actual argument. * * @param vPass is a new actual argument with bound name and value. * @see Pass */ public void setPass(Pass vPass) { this.m_passMap.put(vPass.getBind(),vPass); } /** * Accessor: Replaces the bag of actual argument with new arguments. * * @param passArray is the new actual argument list. * @see Pass * @deprecated Use the new Collection based interfaces */ public void setPass(Pass[] passArray) { //-- copy array this.m_passMap.clear(); for (int i = 0; i < passArray.length; i++) { this.m_passMap.put(passArray[i].getBind(),passArray[i]); } } /** * Accessor: Replaces the bag of actual argument with a bag of * new arguments. * * @param passes is the new actual argument collection. * @see Pass */ public void setPass(Collection passes) { this.m_passMap.clear(); for ( Iterator i=passes.iterator(); i.hasNext(); ) { Pass p = (Pass) i.next(); this.m_passMap.put(p.getBind(),p); } } /** * Accessor: Replaces the bag of actual argument with a map of * new arguments. * * @param passes is the new actual argument map. * @see Pass */ public void setPass( Map passes ) { this.m_passMap.clear(); this.m_passMap.putAll(passes); } /** * Accessor: Sets a new name for a logical Transformation * to call. * * @param uses is the new name of the Transformation to use. * @see #getUses() * @see Transformation */ public void setUses(String uses) { this.m_uses = uses; } /** * Accessor: Sets a new namespace identifier for a logical * Transformation to call. * * @param usesspace is the new namespace of the * Transformation. * @see #getUsesspace() * @see Transformation */ public void setUsesspace(String usesspace) { this.m_usesspace = usesspace; } /** * Constructs dynamically a short descriptive, hopefully unique * identifier for this derivation. Recent modification add the * complete identification in terms of derivation name, namespace, * and version as well as the called transformation name, namespace * and version range. * FIXME: Anonymous derivations get their hash code, which is well * for the first versions working without database. Later versions * with database must use some unique sequence mechanism instead. * * @return a string describing the derivation * @see Object#hashCode() */ public String identify() { StringBuffer result = new StringBuffer(); result.append( shortID() ); result.append("->"); result.append( Separator.combine( this.m_usesspace, this.m_uses, this.getMinIncludeVersion(), this.getMaxIncludeVersion() ) ); // // and now for the called part // result.append( shortID(null, this.m_usesspace, this.m_uses, null) ); // // String vmin = this.getMinIncludeVersion(); // String vmax = this.getMaxIncludeVersion(); // if ( vmin != null && vmin.length() > 0 && // vmax != null && vmax.length() > 0 ) { // result.append(Separator.NAME); // if ( vmin != null ) result.append(vmin); // result.append(Separator.VERSION); // if ( vmax != null ) result.append(vmax); // } // result return result.toString(); } /** * Dumps the content of the given element into a string. This function * traverses all sibling classes as necessary and converts the * data into textual output. Note that order of the actual arguments * is not preserved. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @exception IOException if something fishy happens to the stream. */ public void toString( Writer stream ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); stream.write( "DV " ); stream.write( this.identify() ); stream.write( '(' ); // write arguments if ( this.m_passMap.size() > 0 ) { stream.write( newline ); for ( Iterator i=this.m_passMap.values().iterator(); i.hasNext(); ) { stream.write( " " ); ((Pass) i.next()).toString(stream); if ( i.hasNext() ) stream.write(","+newline); } } stream.write( " );" ); stream.write(newline); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @see org.griphyn.vdl.Chimera#writeAttribute( Writer, String, String ) * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":derivation" : "derivation"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); super.toXML(stream); writeAttribute( stream, " usesspace=\"", this.m_usesspace ); writeAttribute( stream, " uses=\"", this.m_uses ); writeAttribute( stream, " minIncludeVersion=\"", this.m_minIncludeVersion ); writeAttribute( stream, " maxIncludeVersion=\"", this.m_maxIncludeVersion ); if ( this.m_passMap.size() == 0 ) { // no actual arguments stream.write( "/>" ); } else { // there are actual arguments stream.write( '>' ); if ( indent != null ) stream.write( newline ); String newindent = indent==null ? null : indent + " "; for ( Iterator i=this.m_passMap.values().iterator(); i.hasNext(); ) { ((Pass) i.next()).toXML( stream, newindent, namespace ); } if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); } if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Leaf.java0000644000175000017500000000403111757531137023024 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.Serializable; /** * Leaf is an abstract base class for leaf nodes in the * instance tree. FIXME: The content value is actually handled in the * child classes, and needs to be unified into this class. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Text * @see Use * @see LFN */ public abstract class Leaf extends VDL implements Cloneable, Serializable { /** * Creates and returns a copy of this object. * @return a new instance. */ public abstract Object clone(); /** * This is the name for a value element. * @deprecated Originally we thought to employ this value, but it is * currently unused. */ transient private String m_value; /** * Accessor method. Obtains the current state of the thus named attribute. * * @return the current state of the m_name attribute. * @see #setValue(String) * @deprecated the attribute is currently unused. */ public String getValue() { return this.m_value; } /** * Accessor method. Sets the attribute of same name to a new value. * * @param value is the new value to overwrite the current state with. * @see #getValue() * @deprecated the name attribute is currently unused. */ public void setValue( String value ) { this.m_value = value; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Profile.java0000644000175000017500000003755711757531137023600 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * A Profile captures scheduler system and application * environment specific stuff in a uniform fashion. Each profile * declaration assigns a value to a key within a namespace. As of * this writing, valid namespaces are * *
*
vds
*
Virtual Data System specific material, currently empty. *
condor
*
If the job runs in using the * Condor scheduler, * certain items like the "universe" or "requirments" can be set. * Please note that currently the universe is provided as a hint * to the {@link Transformation} itself.
*
dagman
*
The job graph will usually be run by Condor DAGMan. Some issues, * e.g. the number of retries, are specific to DAGMan and not Condor.
*
env
*
The Unix environment variables that are required for the job.
*
hints
*
A new section collecting various hints that are passed between * planners.
*
* * In the future, more namespaces may be added. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public class Profile extends VDL implements Serializable { /** * Throws this message, if neither <text> nor <use> * elements are tried to be added. */ private static final String c_error_message = "Only \"text\" and \"use\" elements are allowed inside an \"argument\"."; /** * The namespace of a profile. All profiles must mention their namespace * in order to generate acceptable behaviour. */ private String m_namespace; /** * The identifier within a namespace. The meaning of the key can differ * between namespaces. Within the unix namespace, it is the name of an * environment variable. Within the condor namespace, it is a Condor * submit file key. */ private String m_key; /** * The value to above keys. Any value passed down is an arbitrary mix * of the three potential {@link Leaf} types. A profile value element * only allows for {@link Text} and {@link LFN} children in arbitrary * number and order. * * @see Leaf * @see Text * @see LFN */ private ArrayList m_leafList; /** * Array ctor. */ public Profile() { this.m_leafList = new ArrayList(); } /** * Standard ctor: set up everything except a value of the ns.key pair. * * @param namespace is the namespace within which to operate. * @param key is an identifier unique within the namespace. */ public Profile( String namespace, String key ) { this.m_leafList = new ArrayList(); this.m_namespace = namespace; this.m_key = key; } /** * Convenience ctor: set up the first piece of the value in one go. * * @param namespace is the namespace within which to operate. * @param key is an identifier unique within the namespace. * @param firstChild is the first fragment of the value. Only * Leafs of type Use or Text * are permissable. * * @see Leaf * @see Use * @see Text */ public Profile( String namespace, String key, Leaf firstChild ) { this.m_leafList = new ArrayList(); this.m_leafList.add(firstChild); this.m_namespace = namespace; this.m_key = key; } /** * Convenience ctor: set up the first piece of the value in one go. * * @param namespace is the namespace within which to operate. * @param key is an identifier unique within the namespace. * @param children is a collection of fragments for the value. Only * Leafs of type Filename or * PseudoText are permissable. * * @see Leaf * @see Use * @see Text */ public Profile( String namespace, String key, Collection children ) { this.m_leafList = new ArrayList(); this.m_leafList.addAll(children); this.m_namespace = namespace; this.m_key = key; } /** * Accessor: Append a value fragment to this profile instance. * * @param vLeaf is the fragment to add. Note that only leaf values of * Use or Text are allowed. * @throws IndexOutOfBoundsException if the value cannot be added. * @throws IllegalArgumentException if the value type is neither * Use nor Text. * @see Leaf * @see Text * @see Use */ public void addLeaf( Leaf vLeaf ) throws IndexOutOfBoundsException, IllegalArgumentException { if ( vLeaf instanceof Text || vLeaf instanceof Use ) this.m_leafList.add(vLeaf); else throw new java.lang.IllegalArgumentException( c_error_message ); } /** * Accessor: Inserts a Leaf value into a specific position * of the list of gathered values. * * @param index is the position to insert the item into * @param vLeaf is the value to append to the list. Note that only leaf * values of Use or Text are allowed. * @throws IndexOutOfBoundsException if the value cannot be added. * @throws IllegalArgumentException if the value type is neither * Use nor Text. * * @see Text * @see Use */ public void addLeaf( int index, Leaf vLeaf ) throws IndexOutOfBoundsException, IllegalArgumentException { if ( vLeaf instanceof Text || vLeaf instanceof Use ) this.m_leafList.add(index, vLeaf); else throw new java.lang.IllegalArgumentException( c_error_message ); } /** * Accessor: Enumerates the internal values that constitute the content * of the Profile value. * * @return the iterator to the value fragment list. * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateLeaf() { return Collections.enumeration(this.m_leafList); } /** * Accessor: Obtains the Leaf at a certain position in the * list of profile value fragments. * * @param index is the position in the list to obtain a value from * @return The Use or Text at the position. * @throws IndexOutOfBoundsException if the index points to an element * in the list that does not contain any elments. * * @see Use * @see Text */ public Leaf getLeaf(int index) throws IndexOutOfBoundsException { //-- check bound for index if ((index < 0) || (index >= this.m_leafList.size())) throw new IndexOutOfBoundsException(); return (Leaf) this.m_leafList.get(index); } /** * Accessor: Gets an array of all values that constitute the current * value content of a profile. This array is a copy to avoid * write-through modifications. * * @return an array with a mixture of either Text or * Use values. * * @see Use * @see Text * @deprecated Use the new Collection based interfaces */ public Leaf[] getLeaf() { int size = this.m_leafList.size(); Leaf[] mLeaf = new Leaf[size]; System.arraycopy( this.m_leafList.toArray(new Leaf[0]), 0, mLeaf, 0, size ); return mLeaf; } /** * Accessor: Obtains the number of profile value fragments. * * @return number of elements that an external array needs to be sized to. */ public int getLeafCount() { return this.m_leafList.size(); } /** * Accessor: Gets an array of all values that constitute the current * content. This list is read-only. * * @return an array with a mixture of either Text or * LFN values. * * @see LFN * @see Text */ public java.util.List getLeafList() { return Collections.unmodifiableList(this.m_leafList); } /** * Accessor: Gets the namespace value for the profile. * * @return the currently active namespace for this instance. * @see #setNamespace(java.lang.String) */ public String getNamespace() { return this.m_namespace; } /** * Accessor: Gets the key identifier for the profile. * * @return the currently active key for this instance. * @see #setKey(java.lang.String) */ public String getKey() { return this.m_key; } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @return an iterator to walk the list with. */ public Iterator iterateLeaf() { return this.m_leafList.iterator(); } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @return an enumeration to walk the list with. */ public ListIterator listIterateLeaf() { return this.m_leafList.listIterator(); } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @param start is the start index * @return an enumeration to walk the list with. */ public ListIterator listIterateLeaf(int start) { return this.m_leafList.listIterator(start); } /** * Accessor: Removes all value fragments from the profile. */ public void removeAllLeaf() { this.m_leafList.clear(); } /** * Accessor: Remove a single fragment from the list of value fragments. * @param index is the position at which an element is to be removed. * @return the object that was removed. The removed item is either an * Use or a Text. * * @see Use * @see Text */ public Leaf removeLeaf( int index ) { return (Leaf) this.m_leafList.remove(index); } /** * Accessor: Overwrites a Use or Text value * fragment at a certain position in the profile value fragment list. * * @param index position to overwrite an elment in. * @param vLeaf is either a Use or Text object. * @throws IndexOutOfBoundsException if the position pointed to is invalid. * @throws IllegalArgumentException if the added element is of the * incorrect Leaf type. * * @see Use * @see Text */ public void setLeaf(int index, Leaf vLeaf) throws IndexOutOfBoundsException, IllegalArgumentException { //-- check bounds for index if ((index < 0) || (index >= this.m_leafList.size())) { throw new IndexOutOfBoundsException(); } if ( vLeaf instanceof Text || vLeaf instanceof Use ) this.m_leafList.set(index, vLeaf); else throw new IllegalArgumentException( c_error_message ); } //-- void setLeaf(int, Leaf) /** * Accessor: Overwrites internal value fragments list with an external * list representing a profile value. * * @param leafArray is the external list of Text or * Use objects used to overwrite things. * @see Text * @see Use * @deprecated Use the new Collection based interfaces */ public void setLeaf(Leaf[] leafArray) { this.m_leafList.clear(); this.m_leafList.addAll( Arrays.asList(leafArray) ); } /** * Accessor: Overwrites internal list with an external list representing * a Scalar value. * * @param leaves is the external list of Text or * LFN objects used to overwrite things. * @see Text * @see LFN */ public void setLeaf(Collection leaves) { this.m_leafList.clear(); this.m_leafList.addAll(leaves); } /** * Accessor: Adjusts a namespace value to a new state. * @param namespace is the new namespace to use. * @see #getNamespace() */ public void setNamespace( String namespace ) { this.m_namespace = namespace; } /** * Accessor: Adjusts the identifier within a namespace. * @param key is the new identifier to use from now on. * @see #getKey() */ public void setKey( String key ) { this.m_key = key; } /** * Converts the profile state into textual format for human consumption. * * @return a textual description of the element and its sub-classes. * Be advised that these strings might become large. */ public String toString() { StringBuffer result = new StringBuffer(); result.append("profile "); result.append(escape(this.m_namespace)); // result.append('.'); result.append("::"); result.append(escape(this.m_key)); result.append(" = "); for ( int i=0; i 0 ) ? namespace + ":profile" : "profile"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " namespace=\"", this.m_namespace ); writeAttribute( stream, " key=\"", this.m_key ); stream.write( '>' ); if ( indent != null ) stream.write( newline ); // write content String newindent = indent==null ? null : indent + " "; for ( Iterator i=this.m_leafList.iterator(); i.hasNext(); ) { ((Leaf) i.next()).toXML( stream, newindent, namespace ); } // close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Pass.java0000644000175000017500000001316111757531137023067 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * This class encapsulates a single formal argument that is passed * from a {@link Derivation} to a {@link Transformation}. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Value */ public class Pass extends VDL implements Cloneable, Serializable { /** * Each actual argument must bind to a formal argument. Binding * is done via the name of the formal argument, as stored in the * binding variable. */ private String m_bind; /** * Each actual argument does have a value. This attributes store * the current state of a value. */ private Value m_value; /** * Creates and returns a copy of this object. * @return a new instance, semi-deep copy */ public Object clone() { return new Pass( this.m_bind, (Value) this.m_value.clone() ); } /** * Ctor. */ public Pass() { super(); } /** * Convenience ctor: Establishes a binding with an empty value. * Note that the value is still null, and must be set explicitely. * * @param bind is the name of the formal argument to bind to. * @see #setValue(Value) */ public Pass( String bind ) { super(); this.m_bind = bind; } /** * Convencience ctor: Establishes a binding with a value. * * @param bind is the name of the formal argument to bind to. * @param value is the value to pass to a {@link Transformation}. */ public Pass( String bind, Value value ) { super(); this.m_bind = bind; this.m_value = value; } /** * Accessor: Gets the current bound variable name. * * @return the name of the variable bound to. May return null on an * default constructed object. * @see #setBind( java.lang.String ) */ public String getBind() { return this.m_bind; } /** * Accessor: Gets the current value to be passed. Note that each * {@link Value} is either a {@link Scalar} or {@link List}. * * @return the value that is to be passed to a {@link Transformation}. * @see #setValue( Value ) */ public Value getValue() { return this.m_value; } /** * Accessor: Sets a new binding with a formal argument. * * @param bind is the new binding name. * @see #getBind() */ public void setBind( String bind ) { this.m_bind = bind; } /** * Accessor: Sets a new value for a bound variable. * * @param value is the new value, which can be a {@link Scalar} or * a {@link List}. * @see #getValue() */ public void setValue( Value value ) { this.m_value = value; } /** * Converts the active state into something meant for human consumption. * The method will be called when recursively traversing the instance * tree. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @exception IOException if something fishy happens to the stream. */ public void toString( Writer stream ) throws IOException { // stream.write( escape(this.m_bind) ); stream.write( this.m_bind ); stream.write( '=' ); this.m_value.toString(stream); } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":pass" : "pass"; String newline = System.getProperty( "line.separator", "\r\n" ); // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " bind=\"", this.m_bind ); stream.write( '>' ); if ( indent != null ) stream.write( newline ); // write content String newindent = indent==null ? null : indent+" "; this.m_value.toXML( stream, newindent, namespace ); // close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Declare.java0000644000175000017500000003000211757531137023511 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * This class defines the formal arguments to a Transformation. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Transformation */ public class Declare extends VDL implements Serializable { /** * Each formal argument has a name to which it is bound. */ private String m_name; /** * Each formal argument has a specific type. There are currently * only the type of {@link Scalar} and {@link List}. */ private int m_containerType = Value.SCALAR; /** * For linking the DAG we need to know if the argument is passed into * the transformation, produced by the transformation, or has some * other behavior. */ private int m_link = LFN.NONE; /** * The default value of a formal argument is optional. The notion of a * default value is taken from C++. */ private Value m_value; /** * Default ctor: needed for JDO */ public Declare() { super(); } /** * ctor: Construct a new formal argument with a binding and default * container type. * * @param name is the binding. * @param ct is the container type, the type of the argument. * @throws IllegalArgumentException if the container type is outside * the legal range [{@link Value#SCALAR}, {@link Value#LIST}]. */ public Declare( String name, int ct ) throws IllegalArgumentException { super(); this.m_name = name; if ( Value.isInRange(ct) ) this.m_containerType = ct; else throw new IllegalArgumentException( "container type outside legal range" ); } /** * ctor: Construct a new formal argument with a binding and default * container type, as well as a linkage for the argument. * * @param name is the binding. * @param ct is the container type, the type of the argument. * @param link is the linkage type for the argument. * @throws IllegalArgumentException if the container type is outside * the legal range [{@link Value#SCALAR}, {@link Value#LIST}], or * the linkage is outside [{@link LFN#NONE}, {@link LFN#INOUT}]. */ public Declare( String name, int ct, int link ) throws IllegalArgumentException { super(); this.m_name = name; if ( Value.isInRange(ct) ) this.m_containerType = ct; else throw new IllegalArgumentException( "container type outside legal range" ); if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException( "linkage type outside legal range" ); } /** * ctor: Construct a new formal argument with a binding and default * value. The container type will be determined from the default value. * * @param name is the binding. * @param value is either a {@link Scalar} or {@link List} value. */ public Declare( String name, Value value ) { super(); this.m_name = name; this.m_value = value; this.m_containerType = value.getContainerType(); } /** * ctor: Construct a new formal argument with a binding and default * value. The container type will be determined from the default value. * The linkage is set separately. * * @param name is the binding. * @param value is either a {@link Scalar} or {@link List} value. * @param link is the linkage of the value for DAG creation. * @throws IllegalArgumentException if the linkage is outside * [{@link LFN#NONE}, {@link LFN#INOUT}]. */ public Declare( String name, Value value, int link ) throws IllegalArgumentException { super(); this.m_name = name; this.m_value = value; this.m_containerType = value.getContainerType(); if ( LFN.isInRange(link) ) this.m_link = link; else throw new IllegalArgumentException( "linkage type outside legal range" ); } /** * Accessor: Obtains the optional default value for the parameter. * * @return the default as {@link Scalar} or {@link List}, or null, * if not default value was registered. * @see #setValue(Value) */ public Value getValue() { return this.m_value; } /** * Accessor: Obtains the bound name of the formal argument. * * @return the name by which an actual arguments in a {@link Derivation} * can refer to this formal argument. * @see #setName( java.lang.String ) */ public String getName() { return this.m_name; } /** * Accessor: Obtains the linkage type of the formal argument. * * @return the linkage as an integer. * @see #setLink(int) * @see LFN#NONE * @see LFN#INPUT * @see LFN#OUTPUT * @see LFN#INOUT */ public int getLink() { return this.m_link; } /** * Accessor: Obtains the container type. Note that the registered * container type will be taken in case there is no default value. * Otherwise the container type of the default value will be taken. * * @return the container type of the formal argument. * @see #setContainerType(int) * @see Value#SCALAR * @see Value#LIST */ public int getContainerType() { return ( m_value != null ) ? m_value.getContainerType() : m_containerType; } /** * Accessor: Establishes a new name with this formal argument to bind * to by an actual argument. FIXME: Note that no checks will be done * concerning the uniqueness of the new name. * * @param name is the new binding. * @see #getName() */ public void setName( String name ) { this.m_name = name; } /** * Accessor: Sets a new linkage type for the formal argument. * * @param link is the new linkage type from {@link LFN}. * @throws IllegalArgumentException, if the argument is outside * the valid range. * @see #getLink() */ public void setLink( int link ) throws IllegalArgumentException { if ( ! LFN.isInRange(link) ) throw new IllegalArgumentException(); this.m_link = link; } /** * Accessor: Sets a new container type for the formal argument. If * a default value is known, the new container type must match the * default value's container type. * * @param containerType is the new integer describing a container type. * @throws IllegalArgumentException if the container type is neither * {@link Value#SCALAR} nor {@link Value#LIST}. * @see #getContainerType() */ public void setContainerType( int containerType ) throws IllegalArgumentException { if ( m_value == null ) { // no default value known, need to set container type if ( Value.isInRange(containerType) ) this.m_containerType = containerType; else throw new IllegalArgumentException( "container type outside legal range"); } else { // there is a default value, new type must match default if ( m_value.getContainerType() != containerType ) throw new IllegalArgumentException( "new container type does not match container type of default value" ); } } /** * Accessor: Sets or overwrites the optional default value of a formal * argument. FIXME: A value of null should be usable to kill a default * value. The new default must match the container type. * * @param value is the new default value. * @throws IllegalArgumentException if the container type of the new * value and of the registered container type for the parameter don't * match. * @see #getValue() */ public void setValue(Value value) throws IllegalArgumentException { if ( value.getContainerType() == this.m_containerType ) this.m_value = value; else // container types do not match throw new IllegalArgumentException( "container type of new value does not match declared container type" ); } /** * Dumps the content of the use element into a string for human consumption. * * @return a textual description of the element and its attributes. * Be advised that these strings might become large. */ public String toString() { String value = this.m_value == null ? "" : this.m_value.toString(); StringBuffer result = new StringBuffer( 12 + this.m_name.length() + value.length() ); result.append( LFN.toString( this.m_link ) ); result.append( ' ' ); result.append( this.m_name ); if ( this.m_containerType == Value.LIST ) result.append("[]"); if ( this.m_value != null ) { result.append('='); result.append(value); } return result.toString(); } /** * Dumps the content of the declaration into a string for human * consumption. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @exception IOException if something fishy happens to the stream. */ public void toString( Writer stream ) throws IOException { stream.write( LFN.toString( this.m_link ) ); stream.write( ' ' ); stream.write( escape(this.m_name) ); if ( this.m_containerType == Value.LIST ) stream.write("[]"); if ( this.m_value != null ) { stream.write('='); this.m_value.toString(stream); } } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":declare" : "declare"; if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " name=\"", this.m_name ); // null-safe if ( LFN.isInRange(this.m_link) ) writeAttribute( stream, " link=\"", LFN.toString(this.m_link) ); if ( this.m_containerType == Value.LIST ) { stream.write( " container=\"list\"" ); } else if ( this.m_containerType == Value.SCALAR ) { stream.write( " container=\"scalar\"" ); } if ( this.m_value == null ) { // no default value stream.write( "/>" ); } else { // there is a default value String newindent = indent==null ? null : indent + " "; stream.write( '>' ); if ( indent != null ) stream.write( newline ); // dump content this.m_value.toXML( stream, newindent, namespace ); // write close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); } if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Argument.java0000644000175000017500000003202211757531137023740 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.IOException; import java.io.Writer; import java.io.Serializable; /** * A class to encapsulate a command line argument line. The command line * is separated into a list of distinct fragments. Each fragment can only * be of type Use or Text. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Leaf * @see Text * @see Use */ public class Argument extends VDL implements Serializable { /** * Throws this message, if neither <text> nor <use> * elements are tried to be added. */ private static final String c_error_message = "Only \"text\" and \"use\" elements are allowed inside an \"argument\"."; /** * The command line consists of an ordered list of Leaf * pieces, which in their sum create the commandline. Any value passed * down is an arbitrary mix of the three potential Leaf * types. Each element only allows for Text and * Use children in arbitrary number and order. * * @see Leaf * @see Text * @see Use */ private ArrayList m_leafList; /** * Each Argument is a fragment of the complete command * line. Each such group (of fragments) can be given a name. Special * names of the stdio handles refer to these handles. */ private String m_name; /** * Array ctor. */ public Argument() { this.m_leafList = new ArrayList(); } /** * Standard ctor: Constructs a named Argument group. * * @param name is the identifier for the argument group. */ public Argument( String name ) { this.m_name = name; this.m_leafList = new ArrayList(); } /** * Convenience ctor: Constructs a name argument group, and enters the * first (and possibly only) fragment into the group. * * @param name is the unique identifier for the argument group. * @param firstChild is the element to place into the argument group. Only * Leafs of type Use or Text * are permissable. * * @see Leaf * @see Use * @see Text */ public Argument( String name, Leaf firstChild ) { this.m_name = name; this.m_leafList = new ArrayList(); this.m_leafList.add(firstChild); } /** * Accessor: Appends a commandline fragment to the current group. * * @param vLeaf is the fragment to add. Note that only leaf values of * Use or Text are allowed. * @throws IndexOutOfBoundsException if the value cannot be added. * @throws IllegalArgumentException if the value type is neither * Use nor Text. * @see Leaf * @see Text * @see Use */ public void addLeaf( Leaf vLeaf ) throws IndexOutOfBoundsException, IllegalArgumentException { if ( vLeaf instanceof Text || vLeaf instanceof Use ) this.m_leafList.add(vLeaf); else throw new java.lang.IllegalArgumentException( c_error_message ); } /** * Accessor: Inserts a Leaf value into a specific position * of this commandline group. * * @param index is the position to insert the item into * @param vLeaf is the value to append to the list. Note that only leaf * values of Use or Text are allowed. * @throws IndexOutOfBoundsException if the value cannot be added. * @throws IllegalArgumentException if the value type is neither * Use nor Text. * * @see Text * @see Use */ public void addLeaf( int index, Leaf vLeaf ) throws IndexOutOfBoundsException, IllegalArgumentException { if ( vLeaf instanceof Text || vLeaf instanceof Use ) this.m_leafList.add(index, vLeaf); else throw new java.lang.IllegalArgumentException( c_error_message ); } /** * Accessor: Enumerates the internal values that constitute the content * this commandline group. * * @return the iterator to the commandline group internal list. * @deprecated Use the new Collection based interfaces */ public Enumeration enumerateLeaf() { return Collections.enumeration(this.m_leafList); } /** * Accessor: Obtains the Leaf at a certain position in the * commandline argument group. * * @param index is the position in the list to obtain a value from * @return The Use or Text at the position. * @throws IndexOutOfBoundsException if the index points to an element * in the list that does not contain any elments. * * @see Use * @see Text */ public Leaf getLeaf(int index) throws IndexOutOfBoundsException { //-- check bound for index if ((index < 0) || (index >= this.m_leafList.size())) throw new IndexOutOfBoundsException(); return (Leaf) this.m_leafList.get(index); } /** * Accessor: Gets an array of all values that constitute the current * value content of this commandline group. This array is a copy to * avoid write-through modifications. * * @return an array with a mixture of either Text or * Use values. * * @see Use * @see Text * @deprecated Use the new Collection based interfaces */ public Leaf[] getLeaf() { int size = this.m_leafList.size(); Leaf[] mLeaf = new Leaf[size]; System.arraycopy( this.m_leafList.toArray(new Leaf[0]), 0, mLeaf, 0, size ); return mLeaf; } /** * Accessor: Obtains the size of the commandline group. * * @return number of elements that an external array needs to be sized to. */ public int getLeafCount() { return this.m_leafList.size(); } /** * Accessor: Gets an array of all values that constitute the current * content. This list is read-only. * * @return an array with a mixture of either Text or * LFN values. * * @see LFN * @see Text */ public java.util.List getLeafList() { return Collections.unmodifiableList(this.m_leafList); } /** * Accessor: Obtains the current name of this commandline group. * * @return the name of this commandline group. * @see #setName(java.lang.String) */ public String getName() { return this.m_name; } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @return an iterator to walk the list with. */ public Iterator iterateLeaf() { return this.m_leafList.iterator(); } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @return an enumeration to walk the list with. */ public ListIterator listIterateLeaf() { return this.m_leafList.listIterator(); } /** * Accessor: Enumerates the internal values that constitute the content * of the Scalar element. * * @param start is the start index * @return an enumeration to walk the list with. */ public ListIterator listIterateLeaf(int start) { return this.m_leafList.listIterator(start); } /** * Accessor: Removes all values from this commandline group. */ public void removeAllLeaf() { this.m_leafList.clear(); } /** * Accessor: Removes a specific fragment from this commandline group. * @param index is the position at which an element is to be removed. * @return the object that was removed. The removed item is either an * Use or a Text. * * @see Use * @see Text */ public Leaf removeLeaf( int index ) { return (Leaf) this.m_leafList.remove(index); } /** * Accessor: Overwrites a Use or Text value * fragment at a certain position in this command line group. * * @param index position to overwrite an elment in. * @param vLeaf is either a Use or Text object. * @throws IndexOutOfBoundsException if the position pointed to is invalid. * @throws IllegalArgumentException if the added element is of the * incorrect Leaf type. * * @see Use * @see Text */ public void setLeaf(int index, Leaf vLeaf) throws IndexOutOfBoundsException, IllegalArgumentException { //-- check bounds for index if ((index < 0) || (index >= this.m_leafList.size())) { throw new IndexOutOfBoundsException(); } if ( vLeaf instanceof Text || vLeaf instanceof Use ) this.m_leafList.set(index, vLeaf); else throw new IllegalArgumentException( c_error_message ); } //-- void setLeaf(int, Leaf) /** * Accessor: Replaces the commandline group with another group value. * Warning: The replacements are not checked for being of the correct * leaf types. * * @param leafArray is the external list of Text or * Use objects used to overwrite things. * @see Text * @see Use * @deprecated Use the new Collection based interfaces */ public void setLeaf(Leaf[] leafArray) { this.m_leafList.clear(); this.m_leafList.addAll( Arrays.asList(leafArray) ); } /** * Accessor: Overwrites internal list with an external list representing * a Scalar value. * * @param leaves is the external list of Text or * LFN objects used to overwrite things. * @see Text * @see LFN */ public void setLeaf(Collection leaves) { this.m_leafList.clear(); this.m_leafList.addAll(leaves); } /** * Accessor: Replaces or sets the current identifier for this * commandline group. * @param name is the new identifier to use for this commandline group. * @see #getName() */ public void setName( String name ) { this.m_name = name; } /** * Converts the commandline group into textual format for human * consumption. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @exception IOException if something fishy happens to the stream. */ public void toString( Writer stream ) throws IOException { stream.write( "argument" ); if ( this.m_name != null ) { stream.write(' '); stream.write(escape(this.m_name)); } stream.write(" = "); for ( Iterator i=this.m_leafList.iterator(); i.hasNext(); ) { ((Leaf) i.next()).toString(stream); } } /** * Dump the state of the current element as XML output. This function * traverses all sibling classes as necessary, and converts the data * into pretty-printed XML output. The stream interface should be able * to handle large output efficiently, if you use a buffered writer. * * @param stream is a stream opened and ready for writing. This can also * be a string stream for efficient output. * @param indent is a String of spaces used for pretty * printing. The initial amount of spaces should be an empty string. * The parameter is used internally for the recursive traversal. * If a null value is specified, no indentation nor * linefeeds will be generated. * @param namespace is the XML schema namespace prefix. If neither * empty nor null, each element will be prefixed with this prefix, * and the root element will map the XML namespace. * @exception IOException if something fishy happens to the stream. */ public void toXML( Writer stream, String indent, String namespace ) throws IOException { String newline = System.getProperty( "line.separator", "\r\n" ); String tag = ( namespace != null && namespace.length() > 0 ) ? namespace + ":argument" : "argument"; // open tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( '<' ); stream.write( tag ); writeAttribute( stream, " name=\"", this.m_name ); // null-safe stream.write( '>' ); if ( indent != null ) stream.write( newline ); // dump content String newindent = indent==null ? null : indent + " "; for ( Iterator i=this.m_leafList.iterator(); i.hasNext(); ) { ((Leaf) i.next()).toXML( stream, newindent, namespace ); } // close tag if ( indent != null && indent.length() > 0 ) stream.write( indent ); stream.write( "' ); if ( indent != null ) stream.write( newline ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/IncompatibleLinkageException.java0000644000175000017500000000372111757531137027742 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; /** * This exception is thrown if the matching between the linkage of a * declared variable and the combined linkage of a used variable in a * transformation have incompatible values. An example for such an * incompatability is having declared a variable of type input, but * using it as ouput. The exception will also be thrown, if multiple * usages of a bound variable use incompatible linkages, e.g. the same * variable is used with linkage NONE in one instance, and with any other * linkage in the same transformation at another place.

* * A method is not required to declare in its throws clause any * subclasses of RuntimeException that might be thrown during the * execution of the method but not caught. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Derivation * @see Transformation */ public class IncompatibleLinkageException extends java.lang.RuntimeException { /** * Constructs a IncompatibleLinkageException with no * detail message. */ public IncompatibleLinkageException() { super(); } /** * Constructs a IncompatibleLinkageException with the * specified detailed message. * * @param s is the detailled message. */ public IncompatibleLinkageException( String s ) { super(s); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/Value.java0000644000175000017500000001210611757531137023233 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.classes; import org.griphyn.vdl.classes.*; import java.util.*; import java.io.Serializable; /** * This class is the base class for {@link Scalar} and {@link List} * child classes. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public abstract class Value extends VDL implements Cloneable, Serializable { /** * @deprecated This is the name for a value element. Originally we * thought to employ this value, but it is currently unused. */ private String m_name; /** * Denotes a scalar component, which is always a leaf. * * @see #getContainerType() * @see Scalar#getContainerType() */ public static final int SCALAR = 0; /** * Denotes a list component. * * @see #getContainerType() * @see List#getContainerType() */ public static final int LIST = 1; /** * This method determines which container is being used in the abstract * base class in order to avoid typeof constructs. * * @return either the constant to designate a Scalar, or * a constant to designate a List. * * @see #SCALAR * @see #LIST */ abstract public int getContainerType(); /** * This method determines which container is being used in the abstract * base class in order to kludgy statements when printing debug info. * * @return the symblic identifier for the type of the Value. */ abstract public String getSymbolicType(); /** * Creates and returns a copy of this object. * @return a new instance. */ public abstract Object clone(); /** * Determines all LFN instances of a given scalar that match the * specified linkage. This is a higher-level method employing the * given API. * * @param linkage is the linkage to check for, -1 for all filenames. * @return a set of logical filename instances that match the linkage * and were part of the scalar. The result may be an empty set, if no * such result were to be found. * * @see LFN */ abstract public java.util.List getAllLFN( int linkage ); /** * Determines all LFN instances of a given scalar that match the * specified linkage. This is a higher-level method employing the * given API. Note that also linkage of NONE will not be found in * wildcard search mode. * * @param linkage is the linkage to check for, -1 for all filenames. * @return a set of all logical filenames that match the linkage and * were part of the scalar. The result may be an empty set, if no such * result were to be found. For a linkage of -1, complete LFNs will be * returned, for any other linkage, just the filename will be * returned. * * @see Derivation#getLFNList( int ) * @see LFN */ abstract public java.util.List getLFNList( int linkage ); /** * Determines if the Value contains an LFN of the specified linkage. * The logic uses short-circuit evaluation, thus finding things is * faster than not finding things. Searching a list is a potentially * expensive method. * * @param filename is the name of the LFN * @param linkage is the linkage to check for, -1 for any linkage type. * @return true if the LFN is contained in the scalar, false otherwise. * * @see org.griphyn.vdl.classes.LFN * @see Derivation#containsLFN( String, int ) */ abstract public boolean containsLFN( String filename, int linkage ); /** * Predicate function to determine, if a container type is within * the legal range of values. * * @param x is the container type * @return true, if the type is within the legal range, or * false, if the container type is outside legal range. * * @see #SCALAR * @see #LIST */ public static boolean isInRange( int x ) { return ((x >= Value.SCALAR) && (x <= Value.LIST)); } /** * Default ctor. */ public Value() { // empty } /** * Accessor method. Obtains the current state of the thus named attribute. * * @return the current state of the m_name attribute. * @see #setName(String) * @deprecated the attribute is currently unused. */ public String getName() { return this.m_name; } /** * Accessor method. Sets the attribute of same name to a new value. * * @param name is the new value to overwrite the current state with. * @see #getName() * @deprecated the name attribute is currently unused. */ public void setName( String name ) { this.m_name = name; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/classes/package.html0000644000175000017500000000230611757531137023576 0ustar ryngerynge Bridges between the Virtual Data Language (VDL) and Java data structures.

Package Specification

This set of related classes represents the Virtual Data Language (VDL) datastructure inside Java classes. The minimum standard interface is expressed in {@link org.griphyn.vdl.Chimera}, and further refined for grouping purposes in the {@link org.griphyn.vdl.classes.VDL} parent.

Most of the classes have their counterparts in the XML schema definition for VDLx, the XML variant of VDL. All classes contain knowledge to convert their state back into VDLt and VDLx.

Related Documentation

For overviews, tutorials, examples, guides, and tool documentation, please see: pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/GTPL0000644000175000017500000001175011757531137020354 0ustar ryngeryngeGlobus Toolkit Public License Version 2, July 31, 2003 Copyright 1999-2003 University of Chicago and The University of Southern California. All rights reserved. This software referred to as the Globus Toolkit software ("Software") includes voluntary contributions made to the Globus Project collaboration. Persons and entities that have made voluntary contributions are hereinafter referred to as "Contributors." This Globus Toolkit Public License is referred to herein as "the GTPL." For more information on the Globus Project, please see http://www.globus.org/. Permission is granted for the installation, use, reproduction, modification, display, performance and redistribution of this Software, with or without modification, in source and binary forms. Permission is granted for the installation, use, reproduction, modification, display, performance and redistribution of user files, manuals, and training and demonstration slides ("Documentation") distributed with or specifically designated as distributed under the GTPL. Any exercise of rights under the GTPL is subject to the following conditions: 1. Redistributions of this Software, with or without modification, must reproduce the GTPL in: (1) the Software, or (2) the Documentation or some other similar material which is provided with the Software (if any). 2. The Documentation, alone or if included with a redistribution of the Software, must include the following notice: "This product includes material developed by the Globus Project (http://www.globus.org/)." Alternatively, if that is where third-party acknowledgments normally appear, this acknowledgment must be reproduced in the Software itself. 3. Globus Toolkit and Globus Project are trademarks of the University of Chicago. Any trademarks of the University of Chicago or the University of Southern California may not be used to endorse or promote software, or products derived therefrom, and except as expressly provided herein may not be affixed to modified redistributions of this Software or Documentation except with prior written approval, obtainable at the discretion of the trademark owner from info@globus.org. 4. To the extent that patent claims licensable by the University of Southern California and/or by the University of Chicago (as Operator of Argonne National Laboratory) are necessarily infringed by the use or sale of the Software, you and your transferees are granted a non-exclusive, worldwide, royalty-free license under such patent claims, with the rights to make, use, sell, offer to sell, import and otherwise transfer the Software in source code and object code form. This patent license shall not apply to Documentation or to any other software combinations which include the Software. No hardware per se is licensed hereunder. If you or any subsequent transferee (a "Recipient") institutes patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Software infringes such Recipient's patent(s), then such Recipient's rights granted under the patent license above shall terminate as of the date such litigation is filed. 5. DISCLAIMER SOFTWARE AND DOCUMENTATION ARE PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, OF SATISFACTORY QUALITY, AND FITNESS FOR A PARTICULAR PURPOSE OR USE ARE DISCLAIMED. THE COPYRIGHT HOLDERS AND CONTRIBUTORS MAKE NO REPRESENTATION THAT THE SOFTWARE, DOCUMENTATION, MODIFICATIONS, ENHANCEMENTS OR DERIVATIVE WORKS THEREOF, WILL NOT INFRINGE ANY PATENT, COPYRIGHT, TRADEMARK, TRADE SECRET OR OTHER PROPRIETARY RIGHT. 6. LIMITATION OF LIABILITY THE COPYRIGHT HOLDERS AND CONTRIBUTORS SHALL HAVE NO LIABILITY TO LICENSEE OR OTHER PERSONS FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, CONSEQUENTIAL, EXEMPLARY, OR PUNITIVE DAMAGES OF ANY CHARACTER INCLUDING, WITHOUT LIMITATION, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES, LOSS OF USE, DATA OR PROFITS, OR BUSINESS INTERRUPTION, HOWEVER CAUSED AND ON ANY THEORY OF CONTRACT, WARRANTY, TORT (INCLUDING NEGLIGENCE), PRODUCT LIABILITY OR OTHERWISE, ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE OR DOCUMENTATION, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 7. The Globus Project may publish revised and/or new versions of the GTPL from time to time. Each version will be given a distinguishing version number. Once Software or Documentation has been published under a particular version of the GTPL, you may always continue to use it under the terms of that version. You may also choose to use such Software or Documentation under the terms of any subsequent version of the GTPL published by the Globus Project. No one other than the Globus Project has the right to modify the terms of the GTPL. Globus Toolkit Public License 7-31-03 pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/0000755000175000017500000000000011757531667021435 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/DatabaseSchema.java0000644000175000017500000004246011757531137025123 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import java.io.File; import java.io.IOException; import java.util.*; import java.lang.reflect.*; import org.griphyn.vdl.util.ChimeraProperties; import edu.isi.pegasus.common.util.DynamicLoader; import org.griphyn.vdl.classes.Definitions; import org.griphyn.vdl.dbdriver.*; import org.griphyn.vdl.util.Logging; /** * This common schema interface defines the schemas in which the * abstraction layers access any given database. It is independent * of the implementing database, and does so by going via the * database driver class API.

* The separation of database driver and schema lowers the implementation * cost, as only N driver and M schemas need to be implemented, instead * of N x M schema-specific database-specific drivers. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * @see org.griphyn.vdl.dbdriver */ public abstract class DatabaseSchema implements Catalog { /** * This is the variable that connect to the lower level database driver. */ protected DatabaseDriver m_dbdriver; /** * This stores properties specific to the schema. Currently unused. */ protected Properties m_dbschemaprops; // // class methods // /** * Instantiates the appropriate leaf schema according to property values. * This method is a factory. * * @param dbSchemaName is the name of the class that conforms to * the DatabaseSchema API. This class will be dynamically loaded. * If the passed value is null, which should be the * default, the value of property vds.db.schema is taken. * @param propertyPrefix is the property prefix string to use. * @param arguments are arguments to the constructor of the driver * to load. Please use "new Object[0]" for the default constructor. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * * @see org.griphyn.vdl.util.ChimeraProperties */ static public DatabaseSchema loadSchema( String dbSchemaName, String propertyPrefix, Object[] arguments ) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { Logging log = Logging.instance(); log.log( "dbschema", 3, "accessing loadSchema( " + ( dbSchemaName == null ? "(null)" : dbSchemaName ) + ", " + ( propertyPrefix == null ? "(null)" : propertyPrefix ) + " )" ); // determine the database schema to load if ( dbSchemaName == null ) { // get it by property prefix dbSchemaName = ChimeraProperties.instance() .getDatabaseSchemaName( propertyPrefix ); if ( dbSchemaName == null ) throw new RuntimeException( "You need to specify the " + propertyPrefix + " property" ); } // syntactic sugar adds absolute class prefix if ( dbSchemaName.indexOf('.') == -1 ) { // how about xxx.getClass().getPackage().getName()? dbSchemaName = "org.griphyn.vdl.dbschema." + dbSchemaName; } // POSTCONDITION: we have now a fully-qualified class name log.log( "dbschema", 3, "trying to load " + dbSchemaName ); DynamicLoader dl = new DynamicLoader(dbSchemaName); DatabaseSchema result = (DatabaseSchema) dl.instantiate(arguments); // done if ( result == null ) log.log( "dbschema", 0, "unable to load " + dbSchemaName ); else log.log( "dbschema", 3, "successfully loaded " + dbSchemaName ); return result; } /** * Convenience method instantiates the appropriate child according to * property values. Effectively, the following is being called: * *

   * loadSchema( null, propertyPrefix, new Object[0] );
   * 
* * @param propertyPrefix is the property prefix string to use. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * * @see #loadSchema( String, String, Object[] ) * @see org.griphyn.vdl.util.ChimeraProperties */ static public DatabaseSchema loadSchema( String propertyPrefix ) throws ClassNotFoundException,IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { return loadSchema( null, propertyPrefix, new Object[0] ); } // // instance methods // /** * Minimalistic default ctor. This constructor does nothing, * and loads nothing. But it initializes the empty schema props. */ protected DatabaseSchema() { Logging.instance().log( "dbschema", 3, "accessing DatabaseSchema()" ); this.m_dbdriver = null; this.m_dbschemaprops = new Properties(); } /** * Connects to the database, this method does not rely on global * property values, instead, each property has to be provided * explicitly. * * @param dbDriverName is the name of the class that conforms to * the DatabaseDriver API. This class will be dynamically loaded. * @param url is the database url * @param dbDriverProperties holds properties specific to the * database driver. * @param dbSchemaProperties holds properties specific to the * database schema. * * @exception ClassNotFoundException if the driver for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the driver's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the driver class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the driver * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the driver * throws an exception while being dynamically loaded. * @exception SQLException if the driver for the database can be * loaded, but faults when initially accessing the database */ public DatabaseSchema( String dbDriverName, String url, Properties dbDriverProperties, Properties dbSchemaProperties) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException { Logging.instance().log( "dbschema", 3, "accessing DatabaseSchema(String,String, Properties, Properties)" ); // dynamically load the driver from its default constructor this.m_dbdriver = DatabaseDriver.loadDriver( dbDriverName, null, new Object[0] ); this.m_dbschemaprops = dbSchemaProperties; // create a database connection right now, right here // mind, url may be null, which may be legal for some drivers! Logging.instance().log( "dbschema", 3, "invoking connect( " + url + " )" ); this.m_dbdriver.connect( url, dbDriverProperties, null ); Logging.instance().log( "dbschema", 3, "connected to database backend" ); // prepare statements as necessary in the implementing classes! } /** * Guesses from the schema prefix the driver prefix. * * @param schemaPrefix is the property key prefix for the schema. * @return the guess for the driver's prefix, may be null */ private static String driverFromSchema( String schemaPrefix ) { String result = null; if ( schemaPrefix != null && schemaPrefix.endsWith(".schema") ) result = schemaPrefix.substring( 0, schemaPrefix.length()-7 ) + ".driver"; Logging.instance().log( "dbschema", 4, "dbdriver prefix guess " + ( result == null ? "(null)" : result ) ); return result; } /** * Guesses from the schema prefix the db prefix. * * @param schemaPrefix is the property key prefix for the schema. * * @return the guess for the db properties prefix, may be null */ private static String dbFromSchema( String schemaPrefix ) { String result = null; if ( schemaPrefix != null && schemaPrefix.endsWith(".schema") ) result = schemaPrefix.substring( 0, schemaPrefix.length()-7 ); Logging.instance().log( "dbschema", 4, "db propertiesr prefix guess " + ( result == null ? "(null)" : result ) ); return result; } /** * Connects to the database as specified by the properties, and * checks the schema implementation. Makes heavy use of global * property values. * * @param dbDriverName is the name of the class that conforms to * the DatabaseDriver API. This class will be dynamically loaded. * If the passed value is null, which should be the * default, the value of property vds.db.*.driver is taken. * @param propertyPrefix is the property prefix string to use. * * @exception ClassNotFoundException if the driver for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the driver's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the driver class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the driver * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the driver * throws an exception while being dynamically loaded. * @exception SQLException if the driver for the database can be * loaded, but faults when initially accessing the database */ public DatabaseSchema( String dbDriverName, String propertyPrefix ) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException { Logging.instance().log( "dbschema", 3, "accessing DatabaseSchema(String,String)" ); // guess the db driver property prefix from schema prefix String driverPrefix = DatabaseSchema.driverFromSchema(propertyPrefix); // cache the properties - we may need a lot of them ChimeraProperties props = ChimeraProperties.instance(); if ( dbDriverName == null || dbDriverName.equals("") ) { if ( driverPrefix != null ) dbDriverName = props.getDatabaseDriverName(driverPrefix); if ( dbDriverName == null ) throw new RuntimeException( "You need to specify the database driver property" ); } Logging.instance().log( "dbschema", 4, "dbdriver class " + dbDriverName ); // dynamically load the driver from its default constructor this.m_dbdriver = DatabaseDriver.loadDriver( dbDriverName, driverPrefix, new Object[0] ); this.m_dbschemaprops = props.getDatabaseSchemaProperties( propertyPrefix ); //instead of the driverPrefix, use the DB prefix //This is because the DB properties are now gotten from example //pegasus.catalog.provenance.db.* instead of //pegasus.catalog.proveance.db.driver.* //Karan Oct 25, 2007. Pegasus Bug Number: 11 //http://vtcpc.isi.edu/bugzilla/show_bug.cgi?id=11 String dbPrefix = DatabaseSchema.dbFromSchema( propertyPrefix ); // Properties dbdriverprops = props.getDatabaseDriverProperties(driverPrefix); // String url = props.getDatabaseURL(driverPrefix); // extract those properties specific to the database driver. // these properties are transparently passed through MINUS the url key. Properties dbdriverprops = props.getDatabaseDriverProperties( dbPrefix ); String url = props.getDatabaseURL( dbPrefix ); // create a database connection right now, right here // mind, url may be null, which may be legal for some drivers! Logging.instance().log( "dbschema", 3, "invoking connect( " + url + " )" ); this.m_dbdriver.connect( url, dbdriverprops, null ); Logging.instance().log( "dbschema", 3, "connected to database backend" ); // prepare statements as necessary in the implementing classes! } /** * Associates a schema with a given database driver. * * @param driver is an instance conforming to the DatabaseDriver API. * @param propertyPrefix is the property prefix string to use. * * @exception SQLException if the driver for the database can be * loaded, but faults when initially accessing the database */ public DatabaseSchema( DatabaseDriver driver, String propertyPrefix ) throws SQLException, ClassNotFoundException, IOException { Logging.instance().log( "dbschema", 3, "accessing DatabaseSchema(DatabaseDriver,String)" ); this.m_dbdriver = driver; // guess the db driver property prefix from schema prefix String driverPrefix = DatabaseSchema.driverFromSchema(propertyPrefix); // cache the properties - we may need a lot of them ChimeraProperties props = ChimeraProperties.instance(); // get database schema properties this.m_dbschemaprops = props.getDatabaseSchemaProperties( propertyPrefix ); // extract those properties specific to the database driver. // these properties are transparently passed through MINUS the url key. Properties dbdriverprops = props.getDatabaseDriverProperties(driverPrefix); String url = props.getDatabaseURL(driverPrefix); // create a database connection right now, right here // mind, url may be null, which may be legal for some drivers! Logging.instance().log( "dbschema", 3, "invoking connect( " + url + " )" ); this.m_dbdriver.connect( url, dbdriverprops, null ); Logging.instance().log( "dbschema", 3, "connected to database backend" ); // prepare statements as necessary in the implementing classes! } /** * pass-thru to driver. * @return true, if it is feasible to cache results from the driver * false, if requerying the driver is sufficiently fast (e.g. driver * is in main memory, or driver does caching itself). */ public boolean cachingMakesSense() { return this.m_dbdriver.cachingMakesSense(); } /** * Disassociate from the database driver before finishing. * Mind that performing this action may throw NullPointerException * in later stages! */ public void close() throws SQLException { if ( this.m_dbdriver != null ) { this.m_dbdriver.disconnect(); this.m_dbdriver = null; } } /** * Disassociate the database driver cleanly. */ protected void finalize() throws Throwable { this.close(); super.finalize(); } // // papa's little helpers // /** * Adds a string or a SQL-NULL at the current prepared statement * position, depending if the String value is null or not. * * @param ps is the prepared statement to extend * @param pos is the position at which to insert the value * @param s is the String to use, which may be null. */ protected void stringOrNull( PreparedStatement ps, int pos, String s ) throws SQLException { if ( s == null ) ps.setNull( pos, Types.VARCHAR ); else ps.setString( pos, s ); } /** * Adds a BIGINT or a SQL-NULL at the current prepared statement * position, depending if the value is -1 or not. A value of -1 * will lead to SQL-NULL. * * @param ps is the prepared statement to extend * @param pos is the position at which to insert the value * @param l is the long to use, which may be null. */ protected void longOrNull( PreparedStatement ps, int pos, long l ) throws SQLException { if ( l == -1 ) ps.setNull( pos, Types.BIGINT ); else { if ( m_dbdriver.preferString() ) ps.setString( pos, Long.toString(l) ); else ps.setLong( pos, l ); } } /** * Converts any given string into a guaranteed non-null value. * Especially the definition triples use empty strings instead of * null values. * * @param s is the string object to look at, which may be null. * @return a string that may be empty, but is not null. */ protected String makeNotNull( String s ) { return ( s == null ? new String() : s ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/InvocationSchema.java0000644000175000017500000005430011757531137025524 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import edu.isi.pegasus.planner.invocation.InvocationRecord; import edu.isi.pegasus.planner.invocation.HasText; import edu.isi.pegasus.planner.invocation.JobStatus; import edu.isi.pegasus.planner.invocation.Arguments; import edu.isi.pegasus.planner.invocation.Architecture; import edu.isi.pegasus.planner.invocation.StatCall; import edu.isi.pegasus.planner.invocation.HasDescriptor; import edu.isi.pegasus.planner.invocation.Usage; import edu.isi.pegasus.planner.invocation.StatInfo; import edu.isi.pegasus.planner.invocation.Job; import edu.isi.pegasus.planner.invocation.Status; import edu.isi.pegasus.planner.invocation.HasFilename; import java.sql.*; import java.util.*; import java.io.*; import java.lang.reflect.*; import java.net.InetAddress; import org.griphyn.vdl.util.ChimeraProperties; import edu.isi.pegasus.common.util.Separator; import org.griphyn.vdl.util.Logging; /** * This class provides basic functionalities to interact with the * backend database for invocation records, such as insertion, deletion, * and search. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class InvocationSchema extends DatabaseSchema implements PTC { /** * Default constructor for the provenance tracking. * * @param dbDriverName is the database driver name */ public InvocationSchema( String dbDriverName ) throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException, IOException { // load the driver from the properties super( dbDriverName, PROPERTY_PREFIX ); Logging.instance().log( "dbschema", 3, "done with parent schema c'tor" ); // Note: Does not rely on optional JDBC3 features this.m_dbdriver.insertPreparedStatement( "stmt.save.uname", "INSERT INTO ptc_uname(id,archmode,sysname,os_release,machine) " + "VALUES (?,?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.rusage", "INSERT INTO ptc_rusage(id,utime,stime,minflt,majflt,nswaps," + "nsignals,nvcsw,nivcsw) VALUES (?,?,?,?,?,?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.stat", "INSERT INTO ptc_stat(id,errno,fname,fdesc,size,mode,inode,atime," + "ctime,mtime,uid,gid) VALUES (?,?,?,?,?,?,?,?,?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.ivr", "INSERT INTO ptc_invocation(id,creator,creationtime,wf_label," + "wf_time,version,start,duration,tr_namespace,tr_name,tr_version," + "dv_namespace,dv_name,dv_version,resource,host,pid," + "uid,gid,cwd,arch,total)" + "VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.job", "INSERT INTO ptc_job(id,type,start,duration,pid,rusage,stat," + "exitcode,exit_msg,args) VALUES (?,?,?,?,?,?,?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.lfn", "INSERT INTO ptc_lfn(id,stat,initial,lfn) VALUES (?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.ivr.sk", "SELECT id FROM ptc_invocation WHERE start=? AND host=? AND pid=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.uname.sk", "SELECT id FROM ptc_uname WHERE archmode=? AND sysname=? " + "AND os_release=? AND machine=?" ); } /** * Converts a regular datum into an SQL timestamp. * @param date is a regular Java date * @return a SQL timestamp obtained from the Date. */ protected java.sql.Timestamp toStamp( java.util.Date date ) { return new java.sql.Timestamp( date.getTime() ); } /** * Checks the existence of an invocation record in the database. * The information is based on the (start,host,pid) tuple, although * with private networks, cases may arise that have this tuple * identical, yet are different. * * @param start is the start time of the grid launcher * @param host is the address of the host it ran upon * @param pid is the process id of the grid launcher itself. * @return the id of the existing record, or -1 */ public long getInvocationID( java.util.Date start, InetAddress host, int pid ) throws SQLException { long result = -1; Logging.instance().log("xaction", 1, "START select invocation id" ); PreparedStatement ps = m_dbdriver.getPreparedStatement("stmt.select.ivr.sk"); int i=1; ps.setTimestamp( i++, toStamp(start) ); ps.setString( i++, host.getHostAddress() ); ps.setInt( i++, pid ); Logging.instance().log( "chunk", 2, "SELECT id FROM invocation" ); ResultSet rs = ps.executeQuery(); if ( rs.next() ) result = rs.getLong(1); rs.close(); Logging.instance().log("xaction", 1, "FINAL select invocation id" ); return result; } /** * Determines the id of an existing identical architecture, or creates * a new entry. * * @param arch is the architecture description * @return the id of the architecture, either new or existing. */ public long saveArchitecture( Architecture arch ) throws SQLException { long result = -1; Logging.instance().log("xaction", 1, "START select uname id" ); int i=1; PreparedStatement ps = m_dbdriver.getPreparedStatement("stmt.select.uname.sk"); stringOrNull( ps, i++, arch.getArchMode() ); stringOrNull( ps, i++, arch.getSystemName() ); stringOrNull( ps, i++, arch.getRelease() ); stringOrNull( ps, i++, arch.getMachine() ); Logging.instance().log( "chunk", 2, "SELECT id FROM uname" ); ResultSet rs = ps.executeQuery(); if ( rs.next() ) result = rs.getLong(1); rs.close(); Logging.instance().log("xaction", 1, "FINAL select uname id" ); if ( result == -1 ) { // nothing found, need to really insert things Logging.instance().log("xaction", 1, "START save uname" ); try { result = m_dbdriver.sequence1( "uname_id_seq" ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "During rusage sequence number: " + e.toString().trim() ); Logging.instance().log("xaction", 1, "START rollback" ); m_dbdriver.rollback(); Logging.instance().log("xaction", 1, "FINAL rollback" ); throw e; // re-throw } // add ID explicitely from sequence to insertion ps = m_dbdriver.getPreparedStatement("stmt.save.uname"); i=1; longOrNull( ps, i++, result ); stringOrNull( ps, i++, arch.getArchMode() ); stringOrNull( ps, i++, arch.getSystemName() ); stringOrNull( ps, i++, arch.getRelease() ); stringOrNull( ps, i++, arch.getMachine() ); // save prepared values Logging.instance().log( "chunk", 2, "INSERT INTO uname" ); try { int rc = ps.executeUpdate(); if ( result == -1 ) result = m_dbdriver.sequence2( ps, "uname_id_seq", 1 ); } catch ( SQLException e ) { // race condition possibility: try once more to find info result = -1; Logging.instance().log("xaction", 1, "START select uname id" ); i = 1; ps = m_dbdriver.getPreparedStatement("stmt.select.uname.sk"); stringOrNull( ps, i++, arch.getArchMode() ); stringOrNull( ps, i++, arch.getSystemName() ); stringOrNull( ps, i++, arch.getRelease() ); stringOrNull( ps, i++, arch.getMachine() ); Logging.instance().log( "chunk", 2, "SELECT id FROM uname" ); rs = ps.executeQuery(); if ( rs.next() ) result = rs.getLong(1); rs.close(); Logging.instance().log("xaction", 1, "FINAL select uname id" ); if ( result == -1 ) { Logging.instance().log( "app", 0, "While inserting into rusage: " + e.toString().trim() ); // rollback in saveInvocation() m_dbdriver.cancelPreparedStatement( "stmt.save.uname" ); throw e; // re-throw } } Logging.instance().log("xaction", 1, "FINAL save uname: ID=" + result ); } // done return result; } /** * Inserts an invocation record into the database. * * @param ivr is the invocation record to store. * @return true, if insertion was successful, false otherwise. */ public boolean saveInvocation( InvocationRecord ivr ) throws SQLException { // big outer try try { long id = -1; try { id = m_dbdriver.sequence1( "invocation_id_seq" ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "During IVR sequence number: " + e.toString().trim() ); throw e; // re-throw } // add ID explicitely from sequence to insertion Logging.instance().log("xaction", 1, "START save invocation" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.save.ivr" ); int i = 1; longOrNull( ps, i++, id ); // current_user() stringOrNull( ps, i++, System.getProperty("user.name") ); // now() ps.setTimestamp( i++, toStamp(new java.util.Date()) ); // wf_label, wf_time: not available at the moment... if ( ivr.getWorkflowLabel() == null ) ps.setNull( i++, Types.VARCHAR ); else ps.setString( i++, ivr.getWorkflowLabel() ); if ( ivr.getWorkflowTimestamp() == null ) ps.setNull( i++, Types.TIMESTAMP ); else ps.setTimestamp( i++, toStamp(ivr.getWorkflowTimestamp()) ); // version ps.setString( i++, ivr.getVersion() ); // start, duration ps.setTimestamp( i++, toStamp(ivr.getStart()) ); ps.setDouble( i++,ivr.getDuration() ); // TR i = splitDefinition( ps, ivr.getTransformation(), i ); // DV: not available at the moment i = splitDefinition( ps, ivr.getDerivation(), i ); // resource (site handle) if ( ivr.getResource() == null ) ps.setNull( i++, Types.VARCHAR ); else ps.setString( i++, ivr.getResource() ); // host ps.setString( i++, ivr.getHostAddress().getHostAddress() ); // [pug]id ps.setInt( i++, ivr.getPID() ); ps.setInt( i++, ivr.getUID() ); ps.setInt( i++, ivr.getGID() ); // cwd stringOrNull( ps, i++, ivr.getWorkingDirectory().getValue() ); // uname ps.setLong( i++, saveArchitecture( ivr.getArchitecture() ) ); // save usage and remember id ps.setLong( i++, saveUsage( ivr.getUsage() ) ); // save prepared values Logging.instance().log( "chunk", 2, "INSERT INTO invocation" ); int rc = ps.executeUpdate(); if ( id == -1 ) id = m_dbdriver.sequence2( ps, "invocation_id_seq", 1 ); Logging.instance().log("xaction", 1, "FINAL save invocation: ID=" + id ); // save jobs belonging to invocation for ( Iterator j=ivr.iterateJob(); j.hasNext(); ) { saveJob( id, ((Job) j.next()) ); } // jsv 20050815: more stat info for Prophesy for ( Iterator j=ivr.iterateStatCall(); j.hasNext(); ) { StatCall s = (StatCall) j.next(); String sch = s.getHandle().toLowerCase(); if ( sch.equals("initial") || sch.equals("final") ) { saveLFN( id, s ); } } // done m_dbdriver.commit(); return true; } catch ( SQLException e ) { // show complete exception chain for ( SQLException walk=e; walk != null; walk=walk.getNextException() ) { Logging.instance().log( "app", 0, walk.getSQLState() + ": " + walk.getErrorCode() + ": " + walk.getMessage().trim() ); StackTraceElement[] ste = walk.getStackTrace(); for ( int n=0; n<5 && nnull value, or have regular expression. * * @param namespace namespace * @param name name * @param version version * @param type definition type (TR or DV) * @return a list of definitions that were deleted. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List deleteDefinitionEx( String namespace, String name, String version, int type ) throws SQLException { Logging.instance().log("xaction", 1, "START delete definitions ex" ); java.util.List result = searchDefinitionEx(namespace, name, version, type); for (int i=0; i < result.size(); i++) deleteDefinition((Definition)result.get(i)); Logging.instance().log("xaction", 1, "FINAL delete definitions ex" ); return result; } /** * Searches the database for definitions by ns::name:version triple * and by type (either Transformation or Derivation). This version of * the search allows for jokers expressed as null value * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param type type of definition, see below, or -1 as wildcard * @return a list of Definition items, which may be empty * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #loadDefinition( String, String, String, int ) */ public java.util.List searchDefinitionEx( String namespace, String name, String version, int type ) throws SQLException { String xquery = ""; String triple = ""; if (namespace != null) triple += "[matches(@namespace, '" + namespace + "')]"; if (name != null) triple += "[matches(@name, '" + name + "')]"; if (version != null) triple += "[matches(@version, '" + version + "')]"; if (type != -1) { if (type == Definition.TRANSFORMATION) xquery = "//transformation" + triple; else xquery += "//derivation" + triple; } else xquery = "//derivation" + triple + "|//transformation" + triple; return searchDefinition(xquery); } /** * Searches the database for all LFNs that match a certain pattern. * The linkage is an additional constraint. This method allows * regular expression * * @param lfn the LFN name * @param link the linkage type of the LFN * @return a list of filenames that match the criterion. * * @see org.griphyn.vdl.classes.LFN#NONE * @see org.griphyn.vdl.classes.LFN#INPUT * @see org.griphyn.vdl.classes.LFN#OUTPUT * @see org.griphyn.vdl.classes.LFN#INOUT */ public java.util.List searchLFN( String lfn, int link ) throws SQLException { if ( lfn == null ) throw new NullPointerException("You must query for a filename"); String linkQuery = ""; String type = LFN.toString(link); if (type != null) linkQuery = "[@link = '" + type + "')]"; String xquery = // "//lfn[matches(@file, '" + lfn + "')]" + linkQuery + "/@file"; "//lfn" + linkQuery + "/@file[matches(., '" + lfn + "')]"; java.util.List result = searchElements(xquery); Logging.instance().log("xaction", 1, "FINAL select LFNs" ); return result; } /** * Searches the database for a list of namespaces of the definitions * Sorted in ascending order. * * @param type type of definition, see below, or -1 for both * @return a list of namespaces * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List getNamespaceList( int type ) throws SQLException { String xquery = ""; if (type == Definition.TRANSFORMATION) xquery = "for $n in distinct-values(//transformation/@namespace) order by $n return $n"; else if (type == Definition.DERIVATION) xquery = "for $n in distinct-values(//derivation/@namespace) order by $n return $n"; else xquery = "for $n in distinct-values(//derivation/@namespace|//transformation/@namespace) order by $n return $n"; java.util.List result = searchElements(xquery); Logging.instance().log("xaction", 1, "FINAL select LFNs" ); return result; } /** * Searches the database for a list of fully-qualified names of * the definitions sorted in ascending order. * * @param type type of definition, see below, or -1 for both. * @return a list of FQDNs * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List getFQDNList( int type ) throws SQLException { String xquery = ""; if (type == Definition.TRANSFORMATION) xquery = "for $d in //transformation order by $d/@namespace empty least, $d/@name, $d/@version return string-join((string-join(($d/@namespace, $d/@name), '::'), $d/@version), ':')"; else if (type == Definition.DERIVATION) xquery = "for $d in //derivation order by $d/@namespace empty least, $d/@name, $d/@version return string-join((string-join(($d/@namespace, $d/@name), '::'), $d/@version), ':')"; else xquery = "for $d in (//transformation|//derivation) order by $d/@namespace empty least, $d/@name, $d/@version return string-join((string-join(($d/@namespace, $d/@name), '::'), $d/@version), ':')"; java.util.List result = searchElements(xquery); Logging.instance().log("xaction", 1, "FINAL select LFNs" ); return result; } /** * Deletes an annotation with the specified key. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * @param key is the annotation key. * @return true, if the database was modified, false otherwise. * @exception SQLException, if something went wrong during database * access. */ public boolean deleteAnnotation( String primary, Object secondary, int kind, String key ) throws SQLException, IllegalArgumentException { String subject = ""; String select = null; switch ( kind ) { case CLASS_TRANSFORMATION: subject = "tr"; break; case CLASS_DERIVATION: subject = "dv"; break; case CLASS_CALL: // may throw ClassCastException subject = "tr"; select = "call[" + ((Integer) secondary).intValue() + "]"; break; case CLASS_DECLARE: subject = "tr"; // may throw ClassCastException //select = "declare[@name='" + (String)secondary + "']"; select = (String)secondary; break; case CLASS_FILENAME: subject = "lfn"; break; default: throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" ); } try { XMLResource res = null; String xquery = "/annotation/metadata[@subject=\"" + subject + "\"][@name=\"" + primary + "\"]"; if (select == null) { if (kind != CLASS_FILENAME) { xquery += "[empty(@select)]"; } } else { xquery += "[@select=\"" + select + "\"]"; } xquery += "/attribute[@name=\"" + key + "\"]"; if ((res = findAnnotation(xquery)) != null) { String id = res.getDocumentId(); // get the document XMLResource document = (XMLResource)m_meta.getResource(id); m_meta.removeResource(document); return true; } return false; } catch (XMLDBException e) { throw new SQLException(e.getMessage()); } } /** * Deletes a specific key in an annotated transformation. * * @param fqdi is the FQDI of the transformation * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Transformation */ public boolean deleteAnnotationTransformation( String fqdi, String key ) throws SQLException, IllegalArgumentException { return deleteAnnotation(fqdi, null, CLASS_TRANSFORMATION, key); } /** * Deletes a specific key in an annotated derivation. * * @param fqdi is the FQDI of the derivation * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Derivation */ public boolean deleteAnnotationDerivation( String fqdi, String key ) throws SQLException, IllegalArgumentException { return deleteAnnotation(fqdi, null, CLASS_DERIVATION, key); } /** * Deletes a specific key in an annotated formal argument. * * @param fqdi is the FQDI of the transformation * @param farg is the name of the formal argument * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Declare */ public boolean deleteAnnotationDeclare( String fqdi, String farg, String key ) throws SQLException, IllegalArgumentException { return deleteAnnotation(fqdi, farg, CLASS_DECLARE, key); } /** * Deletes a specific key for a call statement. * * @param fqdi is the FQDI of the transformation * @param index is the number of the call to annotate. * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Call */ public boolean deleteAnnotationCall( String fqdi, int index, String key ) throws SQLException, IllegalArgumentException { return deleteAnnotation(fqdi, new Integer(index), CLASS_CALL, key); } /** * Deletes a specific key in an annotated filename. * * @param filename is the name of the file that was annotated. * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.LFN */ public boolean deleteAnnotationFilename( String filename, String key ) throws SQLException, IllegalArgumentException { return deleteAnnotation(filename, null, CLASS_FILENAME, key); } /** * Annotates a transformation with a tuple. * * @param fqdi is the FQDI to annotate * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Transformation */ public long saveAnnotationTransformation( String fqdi, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { /* try { String key = annotation.getKey(); String type = annotation.getTypeString(); Object value = annotation.getValue(); Logging.instance().log( "nxd", 2, "INSERT INTO anno_tr" ); String id = null; XMLResource res = null; String xupdate = ""; String xquery = "//annotation/metadata[@subject='tr'][@name='" + fqdi + "']"; if ((res = findAnnotation(xquery)) != null) { //annotation for tr exists id = res.getDocumentId(); String xquery_attr = xquery + "/attribute[@name='" + key + "']/text()"; if (findAnnotation(xquery_attr)!=null) { //attribute already exists Logging.instance().log( "nxd", 2, "Attribute already exists." ); if (!overwrite) return -1; xupdate += "" + value + ""; //xupdate += "" + //type + ""; } else { //attribute does not exist Logging.instance().log( "nxd", 2, "Attribute does not exist." ); xupdate += "" + "" + "" + key + "" + "" + type + "" + value + "" + ""; } xupdate += ""; System.out.println(xupdate); long l = m_xupdQrySvc.update(xupdate); } else { //create the annotation String anno = "" + "" + value + "" + ""; // create new XMLResource; an id will be assigned to the new resource XMLResource document = (XMLResource)m_meta.createResource(null, "XMLResource"); document.setContent(anno); m_meta.storeResource(document); } return 0; } catch (XMLDBException e) { throw new SQLException(e.getMessage()); } */ return saveAnnotation(fqdi, null, CLASS_TRANSFORMATION, annotation, overwrite); } /** * Annotates a derivation with a tuple. * * @param fqdi is the FQDI to annotate * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Derivation */ public long saveAnnotationDerivation( String fqdi, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { return saveAnnotation(fqdi, null, CLASS_DERIVATION, annotation, overwrite); } /** * Annotates a transformation argument with a tuple. * * @param fqdi is the FQDI to annotate * @param formalname is the name of the formal argument to annotoate. * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Declare */ public long saveAnnotationDeclare( String fqdi, String formalname, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { return saveAnnotation(fqdi, formalname, CLASS_DECLARE, annotation, overwrite); } /** * Annotates a transformation call with a tuple. * * @param fqdi is the FQDI to annotate * @param index is the number of the call to annotate. * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Call */ public long saveAnnotationCall( String fqdi, int index, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { return saveAnnotation(fqdi, new Integer(index), CLASS_CALL, annotation, overwrite); } /** * Annotates a logical filename with a tuple. * * @param filename is the FQDI to annotate * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.LFN */ public long saveAnnotationFilename( String filename, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { return saveAnnotation(filename, null, CLASS_FILENAME, annotation, overwrite); } /** * Annotates any of the annotatable classes with the specified tuple. * This is an interface method to the various class-specific methods. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * @param annotation is the value to place into the class. * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see #saveAnnotationTransformation( String, Tuple, boolean ) * @see #saveAnnotationDerivation( String, Tuple, boolean ) * @see #saveAnnotationCall( String, int, Tuple, boolean ) * @see #saveAnnotationDeclare( String, String, Tuple, boolean ) * @see #saveAnnotationFilename( String, Tuple, boolean ) */ public long saveAnnotation( String primary, Object secondary, int kind, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { long result = -1; String subject = ""; String select = null; String q_sec = null; String defn = "transformation"; switch ( kind ) { case CLASS_TRANSFORMATION: subject = "tr"; break; case CLASS_DERIVATION: subject = "dv"; defn = "derivation"; break; case CLASS_CALL: // may throw ClassCastException subject = "tr"; select = "call[" + ((Integer) secondary).intValue() + "]"; q_sec = select; break; case CLASS_DECLARE: subject = "tr"; // may throw ClassCastException q_sec = "declare[@name='" + (String)secondary + "']"; select = (String)secondary; break; case CLASS_FILENAME: subject = "lfn"; break; default: throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" ); } try { if (kind != CLASS_FILENAME) { String[] names = Separator.split(primary); String q_ns, q_name, q_ver; if (names[0] == null) q_ns = "[empty(@namespace)]"; else q_ns = "[@namespace='" + names[0] + "']"; if (names[1] == null) q_name = "[empty(@name)]"; else q_name = "[@name='" + names[1] + "']"; if (names[2] == null) q_ver = "[empty(@version)]"; else q_ver = "[@version='" + names[2] + "']"; //check if tr/dv is valid String xquery = "//" + defn + q_ns + q_name + q_ver; if (q_sec != null ) xquery += "/" + q_sec; Logging.instance().log("nxd", 0, "query: " + xquery); ResourceSet rs = m_vdcQrySvc.query(xquery); ResourceIterator i = rs.getIterator(); if (!i.hasMoreResources()) { Logging.instance().log("app", 0, "definition not found!"); return -1; } } String key = annotation.getKey(); String type = annotation.getTypeString(); Object value = annotation.getValue(); String id = null; XMLResource res = null; String xquery = "/annotation/metadata[@subject=\"" + subject + "\"][@name=\"" + primary + "\"]"; if (select == null) { if (kind != CLASS_FILENAME) { xquery += "[empty(@select)]"; } } else { xquery += "[@select=\"" + select + "\"]"; } xquery += "/attribute[@name=\"" + key + "\"]"; if ((res = findAnnotation(xquery)) != null) { if (!overwrite) { System.err.println("key " + key + " already defined!"); return -1; } id = res.getDocumentId(); } //create the annotation String anno = "" + value + "" + ""; // create new XMLResource; an id will be assigned to the new resource XMLResource document = (XMLResource)m_meta.createResource(id, "XMLResource"); document.setContent(anno); m_meta.storeResource(document); return 0; } catch (XMLDBException e) { throw new SQLException(e.getMessage()); } } /** * Obtains the value to a specific key in an annotated transformation. * * @param fqdi is the FQDI of the transformation * @param key is the key to search for * @return the annotated value, or null if not found. * @see org.griphyn.vdl.classes.Transformation */ public Tuple loadAnnotationTransformation( String fqdi, String key ) throws SQLException, IllegalArgumentException { return loadAnnotation(fqdi, null, CLASS_TRANSFORMATION, key); } /** * Obtains the value to a specific key in an annotated derivation. * * @param fqdi is the FQDI of the derivation * @param key is the key to search for * @return the annotated value, or null if not found. * @see org.griphyn.vdl.classes.Derivation */ public Tuple loadAnnotationDerivation( String fqdi, String key ) throws SQLException, IllegalArgumentException { return loadAnnotation(fqdi, null, CLASS_DERIVATION, key); } /** * Obtains the value to a specific key in an annotated formal argument. * * @param fqdi is the FQDI of the transformation * @param farg is the name of the formal argument * @param key is the key to search for * @return the annotated value, or null if not found * @see org.griphyn.vdl.classes.Declare */ public Tuple loadAnnotationDeclare( String fqdi, String farg, String key ) throws SQLException, IllegalArgumentException { return loadAnnotation(fqdi, farg, CLASS_DECLARE, key); } /** * Obtains the value to a specific key for a call statement. * * @param fqdi is the FQDI of the transformation * @param index is the number of the call to annotate. * @param key is the key to search for * @return the annotated value, or null if not found * @see org.griphyn.vdl.classes.Call */ public Tuple loadAnnotationCall( String fqdi, int index, String key ) throws SQLException, IllegalArgumentException { return loadAnnotation(fqdi, new Integer(index), CLASS_CALL, key); } /** * Obtains the value to a specific key in an annotated filename. * * @param filename is the name of the file that was annotated. * @param key is the key to search for * @return the annotated value, or null if not found. * @see org.griphyn.vdl.classes.LFN */ public Tuple loadAnnotationFilename( String filename, String key ) throws SQLException, IllegalArgumentException { return loadAnnotation(filename, null, CLASS_FILENAME, key); } /** * Retrieves a specific annotation from an annotatable classes with * the specified tuple. This is an interface method to the various * class-specific methods. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * @param key is the key to look for. * @return null if not found, otherwise the annotation tuple. * @see #loadAnnotationTransformation( String, String ) * @see #loadAnnotationDerivation( String, String ) * @see #loadAnnotationCall( String, int, String ) * @see #loadAnnotationDeclare( String, String, String ) * @see #loadAnnotationFilename( String, String ) */ public Tuple loadAnnotation( String primary, Object secondary, int kind, String key ) throws SQLException, IllegalArgumentException { Tuple result = null; String subject = ""; String select = null; switch ( kind ) { case CLASS_TRANSFORMATION: subject = "tr"; break; case CLASS_DERIVATION: subject = "dv"; break; case CLASS_CALL: // may throw ClassCastException subject = "tr"; select = "call[" + ((Integer) secondary).intValue() + "]"; break; case CLASS_DECLARE: subject = "tr"; // may throw ClassCastException //select = "declare[@name='" + (String)secondary + "']"; select = (String)secondary; break; case CLASS_FILENAME: subject = "lfn"; break; default: throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" ); } try { String id = null; String xquery = "/annotation/metadata[@subject=\"" + subject + "\"][@name=\"" + primary + "\"]"; if (select == null) { if (kind != CLASS_FILENAME) { xquery += "[empty(@select)]"; } } else { xquery += "[@select=\"" + select + "\"]"; } xquery += "/attribute[@name=\"" + key + "\"]"; XMLResource res = null; if ((res = findAnnotation(xquery)) != null) { result = loadAnnotationResource(res); } return result; } catch (Exception e) { throw new SQLException(e.getMessage()); } } /** * get the annotation from a XML resource */ protected Tuple loadAnnotationResource(XMLResource res) throws SQLException { Tuple result = null; if (res == null) return result; Element elem; try { elem = (Element)res.getContentAsDOM(); } catch (Exception e) { throw new SQLException( e.getMessage()); } if (elem != null) { String key = elem.getAttribute("name"); String type = elem.getAttributeNS("http://www.w3.org/2001/XMLSchema-instance","type"); String value = elem.getFirstChild().getNodeValue(); if (key == null || type == null || value == null) return result; if (type.equals("xs:string")) { result = new TupleString(key, null); result.setValue(value); return result; } if (type.equals("xs:float")){ result = new TupleFloat(key, 0); result.setValue(value); return result; } if (type.equals("xs:int")){ result = new TupleInteger(key, 0); result.setValue(value); return result; } if (type.equals("xs:boolean")) { result = new TupleBoolean(key, false); result.setValue(value); return result; } if (type.equals("xs:date")) { result = new TupleDate(key, null); result.setValue(value); return result; } } return result; } /** * Lists all annotations for a transformation. * * @param fqdi is the FQDI of the transformation * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Transformation */ public java.util.List loadAnnotationTransformation( String fqdi ) throws SQLException, IllegalArgumentException { return loadAnnotation(fqdi, null, CLASS_TRANSFORMATION); } /** * Lists all annotations for a derivation. * * @param fqdi is the FQDI of the derivation * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Derivation */ public java.util.List loadAnnotationDerivation( String fqdi ) throws SQLException, IllegalArgumentException { return loadAnnotation(fqdi, null, CLASS_DERIVATION); } /** * Lists all annotations for a formal argument. * * @param fqdi is the FQDI of the transformation * @param farg is the name of the formal argument * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Declare */ public java.util.List loadAnnotationDeclare( String fqdi, String farg ) throws SQLException, IllegalArgumentException { return loadAnnotation(fqdi, farg, CLASS_DECLARE); } /** * Lists all annotations for a call statement. * * @param fqdi is the FQDI of the transformation * @param index is the number of the call to annotate. * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Call */ public java.util.List loadAnnotationCall( String fqdi, int index ) throws SQLException, IllegalArgumentException { return loadAnnotation(fqdi, new Integer(index), CLASS_CALL); } /** * Lists all annotations for a logical filename. * * @param filename is the logical filename. * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.LFN */ public java.util.List loadAnnotationFilename( String filename ) throws SQLException, IllegalArgumentException { return loadAnnotation(filename, null, CLASS_FILENAME); } /** * Retrieves all annotations from an annotatable classes with * the specified tuple. This is an interface method to the various * class-specific methods. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * * @return null if not found, otherwise the annotation tuple. * @see #loadAnnotationTransformation( String ) * @see #loadAnnotationDerivation( String ) * @see #loadAnnotationCall( String, int ) * @see #loadAnnotationDeclare( String, String ) * @see #loadAnnotationFilename( String ) */ public java.util.List loadAnnotation( String primary, Object secondary, int kind ) throws SQLException, IllegalArgumentException { java.util.List result = new java.util.ArrayList(); String subject = ""; String select = null; switch ( kind ) { case CLASS_TRANSFORMATION: subject = "tr"; break; case CLASS_DERIVATION: subject = "dv"; break; case CLASS_CALL: // may throw ClassCastException subject = "tr"; select = "call[" + ((Integer) secondary).intValue() + "]"; break; case CLASS_DECLARE: subject = "tr"; // may throw ClassCastException //select = "declare[@name='" + (String)secondary + "']"; select = (String)secondary; break; case CLASS_FILENAME: subject = "lfn"; break; default: throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" ); } try { String id = null; String xquery = "/annotation/metadata[@subject=\"" + subject + "\"][@name=\"" + primary + "\"]"; if (select == null) { if (kind != CLASS_FILENAME) { xquery += "[empty(@select)]"; } } else { xquery += "[@select=\"" + select + "\"]"; } xquery += "/attribute"; Logging.instance().log("nxd", 1, "query: " + xquery ); ResourceSet rs = m_metaQrySvc.query(xquery); ResourceIterator i = rs.getIterator(); while (i.hasMoreResources()) { XMLResource res = (XMLResource)i.nextResource(); Tuple tuple = loadAnnotationResource(res); if (tuple != null) { result.add(tuple); } } return result; } catch (Exception e) { throw new SQLException(e.getMessage()); } } /** * Search for LFNs or Definitions that has certain annotations * * @param kind defines the kind/class of object annotated. * @param arg is used only for TR ARG and TR CALL. For the former * it is the name of the argument (String), for the latter the position of * the call (Integer). * @param tree stores the query tree to query the annotation * @return a list of LFNs if search for filenames, otherwise a list of * definitions. * @exception SQLException if something goes wrong with the database. * @see org.griphyn.vdl.annotation.QueryTree */ public java.util.List searchAnnotation( int kind, Object arg, QueryTree tree) throws SQLException { java.util.List result = new java.util.ArrayList(); if ( tree == null) return result; String subject = ""; String defn = "transformation"; String select = null; switch ( kind ) { case CLASS_TRANSFORMATION: subject = "tr"; break; case CLASS_DERIVATION: subject = "dv"; defn = "derivation"; break; case CLASS_CALL: // may throw ClassCastException subject = "tr"; select = "call[" + ((Integer) arg).intValue() + "]"; break; case CLASS_DECLARE: subject = "tr"; // may throw ClassCastException //select = "declare[@name='" + (String)arg + "']"; select = (String)arg; break; case CLASS_FILENAME: subject = "lfn"; break; default: throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" ); } try { String id = null; String cond = "[@subject=\"" + subject + "\"]"; if (select == null) { if (kind != CLASS_FILENAME) { cond += "[empty(@select)]"; } } else { cond += "[@select=\"" + select + "\"]"; } String xquery = "for $mn in distinct-values(//annotation/metadata" + cond + "/@name) " + "let $m := //annotation/metadata[@name=$mn]" + cond; String where = " where "; where += tree.toXQuery("$m/attribute"); if (kind == CLASS_FILENAME) { xquery += ", $r := $m"; xquery += where; xquery += " return $mn"; return searchElements(xquery); } else { xquery += ", $n := substring-before($mn, '::'), $na := substring-after($mn, '::'), $iv := if ($na) then $na else $mn, $v := substring-after($iv, ':'), $ib := substring-before($iv, ':'), $i := if ($ib) then $ib else $iv,"; xquery += " $t := if ($n) then if ($v) then //" + defn + "[@namespace=$n][@name=$i][@version=$v] else //" + defn + "[@namespace=$n][@name=$i][empty(@version)] else if ($v) then //" + defn + "[empty(@namespace)][@name=$i][@version=$v] else //" + defn + "[empty(@namespace)][@name=$i][empty(@version)]"; xquery += where; if (kind == CLASS_DECLARE) xquery += " return $t[" + "declare[@name='" + select + "']" + "]"; else xquery += " return $t"; return searchDefinition(xquery); } } catch (Exception e) { throw new SQLException(e.getMessage()); } } /** * pass-thru to driver. * @return true, if it is feasible to cache results from the driver * false, if requerying the driver is sufficiently fast (e.g. driver * is in main memory, or driver does caching itself). */ public boolean cachingMakesSense() { return true; } public void close() throws SQLException { try { //m_vdc.close(); //m_db.close(); } catch (Exception e){ throw new SQLException(e.getMessage()); } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/InMemorySchema.java0000644000175000017500000003267311757531137025163 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import java.util.*; import java.lang.reflect.*; import java.sql.SQLException; import java.io.IOException; import org.griphyn.vdl.util.ChimeraProperties; import org.griphyn.vdl.classes.*; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.dbdriver.*; /** * This is a class that falls back not on a real database backend, * but rather on an existing Definitions data structure in main * memory. This schema is for internal use only.

* * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * @see org.griphyn.vdl.dbdriver * @see org.griphyn.vdl.classes.Definitions */ public class InMemorySchema extends DatabaseSchema implements VDC { /** * Stores a reference to the in-memory data structure that hold * all definitions that we can access from within this instance. */ protected Definitions m_memory; /** * Default ctor does nothing. */ protected InMemorySchema() { super(); this.m_memory = null; } /** * Dirty hack: Returns a reference to the in-memory database for * preliminary routing into DAXes. This is to avoid the duplication * of DVs in memory, as memory becomes quickly a scarce resource. * * @return a reference to the in-memory database. */ public Definitions backdoor() { return this.m_memory; } /** * Fakes a connect to the database. This class never uses any * database, but instead applies all data to the provided * reference to the in-memory structure. Subclasses may refine * this view to work with files or URLs. * * @param memory is a reference to an existing in-memory Java * object holding all our necessary definitions. */ public InMemorySchema( Definitions memory ) throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException, IOException { super(); // call minimalistic c'tor this.m_memory = memory; this.m_dbschemaprops = ChimeraProperties.instance().getDatabaseSchemaProperties( PROPERTY_PREFIX ); } /** * Pass-thru to driver. Always returns false, as the backend is * main memory. * * @return true, if it is feasible to cache results from the driver * false, if requerying the driver is sufficiently fast (e.g. driver * is in main memory, or driver does caching itself). */ public boolean cachingMakesSense() { return false; } // // lower level methods, working directly on specific definitions // /** * Loads a single Definition from the backend database into an Java object. * This method does not allow wildcarding! * * @param namespace namespace, null will be converted into empty string * @param name name, null will be converted into empty string * @param version version, null will be converted into empty string * @param type type of the definition (TR or DV), must not be -1. * @return the Definition as specified, or null if not found. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #saveDefinition( Definition, boolean ) * @see #searchDefinition( String, String, String, int ) */ public Definition loadDefinition( String namespace, String name, String version, int type ) throws SQLException { // walk main memory Definition result = null; for ( Iterator i=this.m_memory.iterateDefinition(); i.hasNext(); ) { Definition d = (Definition) i.next(); if ( d.match( type, namespace, name, version ) ) { result = d; break; } } return result; } /** * Saves a Definition, that is either a Transformation or Derivation, * into the backend database. * * @param definition is the new Definition to store. * @param overwrite true, if existing defitions will be overwritten by * new ones with the same primary (or secondary) key (-set), or false, * if a new definition will be rejected on key matches. * * @return true, if the backend database was changed, or * false, if the definition was not accepted into the backend. * * @see org.griphyn.vdl.classes.Definition * @see org.griphyn.vdl.classes.Transformation * @see org.griphyn.vdl.classes.Derivation * @see #loadDefinition( String, String, String, int ) * @see #deleteDefinition( String, String, String, int ) */ public boolean saveDefinition( Definition definition, boolean overwrite ) throws SQLException { int position = this.m_memory.positionOfDefinition(definition); if ( position != -1 ) { // definition already exists if ( overwrite ) { Logging.instance().log( "app", 1, "Modifying " + definition.shortID() ); this.m_memory.setDefinition( position, definition ); return true; } else { Logging.instance().log( "app", 1, "Rejecting " + definition.shortID() ); return false; } } else { // definition does not exist Logging.instance().log( "app", 1, "Adding " + definition.shortID() ); this.m_memory.addDefinition(definition); return true; } } // // higher level methods, allowing for wildcarding as stated. // /** * Check with the backend database, if the given definition exists. * * @param definition is a Definition object to search for * @return true, if the Definition exists, false if not found */ public boolean containsDefinition( Definition definition ) throws SQLException { return ( this.m_memory.positionOfDefinition(definition) != -1 ); } /** * Delete a specific Definition objects from the database. No wildcard * matching will be done. "Fake" definitions are permissable, meaning * it just has the secondary key triple.

* This method is not implemented! * * @param definition is the definition specification to delete * @return true is something was deleted, false if non existent. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public boolean deleteDefinition( Definition definition ) throws SQLException { return this.m_memory.removeDefinition(definition); } /** * Delete one or more definitions from the backend database. Depending * on the matchAll flag the key triple parameters may be wildcards. * Wildcards are expressed as null value.

* This method is not implemented! * * @param namespace namespace * @param name name * @param version version * @param type definition type (TR or DV) * @return a list of definitions that were deleted. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List deleteDefinition( String namespace, String name, String version, int type ) throws SQLException { java.util.List result = new ArrayList(); // walk the database for ( ListIterator i=this.m_memory.listIterateDefinition(); i.hasNext(); ) { Definition d = (Definition) i.next(); if ( type == -1 || d.getType() == type ) { // yes, type matches, let's continue String ns = d.getNamespace(); String id = d.getName(); String vs = d.getVersion(); if ( ( namespace == null || // match all for null argument ns != null && ns.equals(namespace) ) && ( name == null || // match all for null argument id != null && id.equals(name) ) && ( version == null || // match all for null argument vs != null && vs.equals(version) ) ) { // there was a match including nulls and jokers etc. result.add(d); i.remove(); } } } return result; } /** * Search the database for definitions by ns::name:version triple * and by type (either Transformation or Derivation). This version * of the search allows for jokers expressed as null value.

* This method is not implemented! * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param type type of definition, see below, or -1 as wildcard * @return a list of Definition items, which may be empty * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #loadDefinition( String, String, String, int ) */ public java.util.List searchDefinition( String namespace, String name, String version, int type ) throws SQLException { java.util.List result = new ArrayList(); // walk the database for ( ListIterator i=this.m_memory.listIterateDefinition(); i.hasNext(); ) { Definition d = (Definition) i.next(); if ( type == -1 || d.getType() == type ) { // yes, type matches, let's continue String ns = d.getNamespace(); String id = d.getName(); String vs = d.getVersion(); if ( ( namespace == null || // match all for null argument ns != null && ns.equals(namespace) ) && ( name == null || // match all for null argument id != null && id.equals(name) ) && ( version == null || // match all for null argument vs != null && vs.equals(version) ) ) { result.add(d); } } } return result; } /** * Searches the database for all derivations that contain a certain LFN. * The linkage is an additional constraint. This method does not allow * jokers. * * @param lfn the LFN name * @param link the linkage type of the LFN * @return a list of Definition items that match the criterion. * * @see org.griphyn.vdl.classes.LFN#NONE * @see org.griphyn.vdl.classes.LFN#INPUT * @see org.griphyn.vdl.classes.LFN#OUTPUT * @see org.griphyn.vdl.classes.LFN#INOUT */ public java.util.List searchFilename( String lfn, int link ) throws SQLException { java.util.List result = new ArrayList(); // check all Derivations (this may be time consuming!) for ( Iterator i=this.m_memory.iterateDefinition(); i.hasNext(); ) { Definition d = (Definition) i.next(); if ( d instanceof Derivation ) { Derivation dv = (Derivation) d; for ( Iterator j=dv.iteratePass(); j.hasNext(); ) { boolean found = false; Value actual = ((Pass) j.next()).getValue(); switch ( actual.getContainerType() ) { case Value.SCALAR: // this is a regular SCALAR if ( scalarContainsLfn( (Scalar) actual, lfn, link ) ) { // Logging.instance().log("search", 2, "found " + dv.shortID()); result.add(dv); found = true; } break; case Value.LIST: // a LIST is a list of SCALARs org.griphyn.vdl.classes.List list = (org.griphyn.vdl.classes.List) actual; for ( Iterator f = list.iterateScalar(); f.hasNext() ; ) { if ( scalarContainsLfn( (Scalar) f.next(), lfn, link) ) { // Logging.instance().log("search", 2, "found " + dv.shortID()); result.add(dv); found = true; // skip all other scalars break; } } break; default: // this should not happen Logging.instance().log( "default", 0, "WARNING: An actual argument \"" + actual.toString() + "\" is neither SCALAR nor LIST" ); break; } // if found in one Pass, skip all the others if ( found ) break; } } } return result; } /** * This helper function checks, if a given Scalar instance * contains the specified logical filename as LFN instance anywhere * in its sub-structures. * * @param scalar is a Scalar instance to check * @param lfn is a logical filename string to check for * @param link is the linkage type of the lfn. * if -1, do not check the linkage type. * @return true, if the file was found */ protected boolean scalarContainsLfn( Scalar scalar, String lfn, int link ) { for ( Iterator e = scalar.iterateLeaf(); e.hasNext(); ) { org.griphyn.vdl.classes.Leaf leaf = (org.griphyn.vdl.classes.Leaf) e.next(); if ( leaf instanceof LFN ) { LFN local = (LFN) leaf; if ( (link == -1 || local.getLink() == link) && lfn.compareTo( local.getFilename() ) == 0 ) return true; } } return false; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/doc-files/0000755000175000017500000000000011757531667023302 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/doc-files/AnnotationSchema-1.obj0000644000175000017500000005171711757531137027372 0ustar ryngerynge%TGIF 4.1.43-QPL state(0,37,100.000,0,0,0,16,1,9,1,1,0,0,1,0,1,0,'Helvetica',0,69120,0,0,1,5,0,0,1,1,0,16,1,0,1,1,1,1,1088,1408,1,0,2880,0). % % @(#)$Header: /nfs/asd2/gmehta/GRIPHYN/CVS/cvsroot/vds/src/org/griphyn/vdl/dbschema/doc-files/AnnotationSchema-1.obj,v 1.2 2005/12/14 16:15:46 voeckler Exp $ % %W% % unit("1 pixel/pixel"). color_info(11,65535,0,[ "magenta", 65535, 0, 65535, 65535, 0, 65535, 1, "red", 65535, 0, 0, 65535, 0, 0, 1, "green", 0, 65535, 0, 0, 65535, 0, 1, "blue", 0, 0, 65535, 0, 0, 65535, 1, "yellow", 65535, 65535, 0, 65535, 65535, 0, 1, "pink", 65535, 49344, 52171, 65535, 49344, 52171, 1, "cyan", 0, 65535, 65535, 0, 65535, 65535, 1, "CadetBlue", 24415, 40606, 41120, 24415, 40606, 41120, 1, "white", 65535, 65535, 65535, 65535, 65535, 65535, 1, "black", 0, 0, 0, 0, 0, 0, 1, "DarkSlateGray", 12079, 20303, 20303, 12079, 20303, 20303, 1 ]). script_frac("0.6"). fg_bg_colors('black','white'). dont_reencode("FFDingbests:ZapfDingbats"). page(1,"",1,''). poly('black','',3,[ 330,450,235,450,235,500],1,1,1,1231,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 685,250,755,250,755,200],1,1,1,1233,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 365,250,295,250,295,200],1,1,1,1234,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). polygon('black','',6,[ 350,525,700,525,775,300,525,125,275,300,350,525],5,1,1,0,1507,0,0,0,0,0,'1',0, "00",[ ]). poly('black','',2,[ 480,350,450,350],1,1,1,592,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 570,350,600,350],1,1,1,593,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 525,300,525,270],1,1,1,594,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 500,385,475,405],1,1,1,595,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 550,385,580,405],1,1,1,596,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 725,450,820,450,820,500],1,1,1,1232,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 525,525,525,560],1,1,1,1235,0,2,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). group([ box('black','',460,185,590,215,2,1,1,533,0,0,0,0,0,'1',0,[ ]), text('black',525,194,1,1,1,66,15,534,12,3,2,0,0,0,2,66,15,0,0,"",0,0,0,0,206,'',[ minilines(66,15,0,0,1,0,0,[ mini_line(66,12,3,0,0,0,[ str_block(0,66,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,66,12,3,0,-1,0,0,0,0,0, "ANNO_LFN")]) ]) ])]), box('black','',460,215,590,270,2,1,1,535,0,0,0,0,0,'1',0,[ ]), text('black',495,220,3,0,1,30,45,536,12,3,2,0,0,0,2,30,45,0,0,"",0,0,0,0,232,'',[ minilines(30,45,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(30,12,3,0,0,0,[ str_block(0,30,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,30,12,3,0,-1,0,0,0,0,0, "name")]) ]), mini_line(29,12,3,0,0,0,[ str_block(0,29,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,29,12,3,0,-1,0,0,0,0,0, "mkey")]) ]) ])]), text('black',492,220,3,2,1,27,45,537,12,3,2,0,0,0,2,27,45,0,0,"",0,0,0,0,232,'',[ minilines(27,45,0,0,2,0,0,[ mini_line(27,12,3,0,0,0,[ str_block(0,27,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,27,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',530,220,3,0,1,55,45,538,12,3,2,0,0,0,2,55,45,0,0,"",0,0,0,0,232,'',[ minilines(55,45,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(43,12,3,0,0,0,[ str_block(0,43,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,43,12,3,0,-1,0,0,0,0,0, "vc(255)")]) ]), mini_line(36,12,3,0,0,0,[ str_block(0,36,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,36,12,3,0,-1,0,0,0,0,0, "vc(64)")]) ]) ])]) ], 1804,0,0,[ ]). group([ box('black','',600,300,740,330,2,1,1,144,0,0,0,0,0,'1',0,[ ]), text('black',670,309,1,1,1,60,15,145,12,3,2,0,0,0,2,60,15,0,0,"",0,0,0,0,321,'',[ minilines(60,15,0,0,1,0,0,[ mini_line(60,12,3,0,0,0,[ str_block(0,60,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,60,12,3,0,-1,0,0,0,0,0, "ANNO_TR")]) ]) ])]), box('black','',600,330,740,380,2,1,1,146,0,0,0,0,0,'1',0,[ ]), text('black',645,333,3,0,1,29,45,147,12,3,2,0,0,0,2,29,45,0,0,"",0,0,0,0,345,'',[ minilines(29,45,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(17,12,3,0,0,0,[ str_block(0,17,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,17,12,3,0,-1,0,0,0,0,0, "did")]) ]), mini_line(29,12,3,0,0,0,[ str_block(0,29,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,29,12,3,0,-1,0,0,0,0,0, "mkey")]) ]) ])]), text('black',642,333,3,2,1,39,45,148,12,3,2,0,0,0,2,39,45,0,0,"",0,0,0,0,345,'',[ minilines(39,45,0,0,2,0,0,[ mini_line(27,12,3,0,0,0,[ str_block(0,27,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,27,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(39,12,3,0,0,0,[ str_block(0,39,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,39,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',680,333,3,0,1,55,45,149,12,3,0,0,0,0,2,55,45,0,0,"",0,0,0,0,345,'',[ minilines(55,45,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(30,12,3,0,0,0,[ str_block(0,30,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,30,12,3,0,0,0,0,0,0,0, "bigint")]) ]), mini_line(36,12,3,0,0,0,[ str_block(0,36,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,36,12,3,0,-1,0,0,0,0,0, "vc(64)")]) ]) ])]) ], 1811,0,0,[ ]). group([ box('black','',540,405,690,435,2,1,1,540,0,0,0,0,0,'1',0,[ ]), text('black',615,414,1,1,1,77,15,541,12,3,2,0,0,0,2,77,15,0,0,"",0,0,0,0,426,'',[ minilines(77,15,0,0,1,0,0,[ mini_line(77,12,3,0,0,0,[ str_block(0,77,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,77,12,3,0,-1,0,0,0,0,0, "ANNO_FARG")]) ]) ])]), box('black','',540,435,690,500,2,1,1,542,0,0,0,0,0,'1',0,[ ]), text('black',590,438,4,0,1,30,60,543,12,3,2,0,0,0,2,30,60,0,0,"",0,0,0,0,450,'',[ minilines(30,60,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(17,12,3,0,0,0,[ str_block(0,17,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,17,12,3,0,-1,0,0,0,0,0, "did")]) ]), mini_line(30,12,3,0,0,0,[ str_block(0,30,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,30,12,3,0,-1,0,0,0,0,0, "name")]) ]), mini_line(29,12,3,0,0,0,[ str_block(0,29,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,29,12,3,0,-1,0,0,0,0,0, "mkey")]) ]) ])]), text('black',582,438,4,2,1,39,60,544,12,3,2,0,0,0,2,39,60,0,0,"",0,0,0,0,450,'',[ minilines(39,60,0,0,2,0,0,[ mini_line(27,12,3,0,0,0,[ str_block(0,27,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,27,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(39,12,3,0,0,0,[ str_block(0,39,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,39,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',625,438,4,0,1,55,60,545,12,3,0,0,0,0,2,55,60,0,0,"",0,0,0,0,450,'',[ minilines(55,60,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(30,12,3,0,0,0,[ str_block(0,30,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,30,12,3,0,0,0,0,0,0,0, "bigint")]) ]), mini_line(36,12,3,0,0,0,[ str_block(0,36,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,36,12,3,0,-1,0,0,0,0,0, "vc(64)")]) ]), mini_line(36,12,3,0,0,0,[ str_block(0,36,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,36,12,3,0,-1,0,0,0,0,0, "vc(64)")]) ]) ])]) ], 1820,0,0,[ ]). group([ box('black','',365,405,505,435,2,1,1,526,0,0,0,0,0,'1',0,[ ]), text('black',435,414,1,1,1,73,15,527,12,3,2,0,0,0,2,73,15,0,0,"",0,0,0,0,426,'',[ minilines(73,15,0,0,1,0,0,[ mini_line(73,12,3,0,0,0,[ str_block(0,73,12,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,73,12,3,0,0,0,0,0,0,0, "ANNO_CALL")]) ]) ])]), box('black','',365,435,505,500,2,1,1,528,0,0,0,0,0,'1',0,[ ]), text('black',410,438,4,0,1,29,60,529,12,3,2,0,0,0,2,29,60,0,0,"",0,0,0,0,450,'',[ minilines(29,60,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(17,12,3,0,0,0,[ str_block(0,17,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,17,12,3,0,-1,0,0,0,0,0, "did")]) ]), mini_line(20,12,3,0,0,0,[ str_block(0,20,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,20,12,3,0,-1,0,0,0,0,0, "pos")]) ]), mini_line(29,12,3,0,0,0,[ str_block(0,29,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,29,12,3,0,-1,0,0,0,0,0, "mkey")]) ]) ])]), text('black',407,438,4,2,1,39,60,530,12,3,2,0,0,0,2,39,60,0,0,"",0,0,0,0,450,'',[ minilines(39,60,0,0,2,0,0,[ mini_line(27,12,3,0,0,0,[ str_block(0,27,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,27,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(39,12,3,0,0,0,[ str_block(0,39,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,39,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',445,438,4,0,1,55,60,531,12,3,2,0,0,0,2,55,60,0,0,"",0,0,0,0,450,'',[ minilines(55,60,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(30,12,3,0,0,0,[ str_block(0,30,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,30,12,3,0,0,0,0,0,0,0, "bigint")]) ]), mini_line(38,12,3,0,0,0,[ str_block(0,38,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,38,12,3,0,0,0,0,0,0,0, "integer")]) ]), mini_line(36,12,3,0,0,0,[ str_block(0,36,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,36,12,3,0,-1,0,0,0,0,0, "vc(64)")]) ]) ])]) ], 1827,0,0,[ ]). group([ box('black','',310,300,450,330,2,1,1,519,0,0,0,0,0,'1',0,[ ]), text('black',380,309,1,1,1,60,15,520,12,3,2,0,0,0,2,60,15,0,0,"",0,0,0,0,321,'',[ minilines(60,15,0,0,1,0,0,[ mini_line(60,12,3,0,0,0,[ str_block(0,60,12,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,60,12,3,0,0,0,0,0,0,0, "ANNO_DV")]) ]) ])]), box('black','',310,330,450,385,2,1,1,521,0,0,0,0,0,'1',0,[ ]), text('black',355,333,3,0,1,29,45,522,12,3,2,0,0,0,2,29,45,0,0,"",0,0,0,0,345,'',[ minilines(29,45,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(17,12,3,0,0,0,[ str_block(0,17,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,17,12,3,0,-1,0,0,0,0,0, "did")]) ]), mini_line(29,12,3,0,0,0,[ str_block(0,29,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,29,12,3,0,-1,0,0,0,0,0, "mkey")]) ]) ])]), text('black',352,333,3,2,1,39,45,523,12,3,2,0,0,0,2,39,45,0,0,"",0,0,0,0,345,'',[ minilines(39,45,0,0,2,0,0,[ mini_line(27,12,3,0,0,0,[ str_block(0,27,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,27,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(39,12,3,0,0,0,[ str_block(0,39,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,39,12,3,0,0,0,0,0,0,0, "")]) ]), mini_line(26,12,3,0,0,0,[ str_block(0,26,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,26,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',390,333,3,0,1,55,45,524,12,3,2,0,0,0,2,55,45,0,0,"",0,0,0,0,345,'',[ minilines(55,45,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(30,12,3,0,0,0,[ str_block(0,30,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,30,12,3,0,0,0,0,0,0,0, "bigint")]) ]), mini_line(36,12,3,0,0,0,[ str_block(0,36,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,36,12,3,0,-1,0,0,0,0,0, "vc(64)")]) ]) ])]) ], 1834,0,0,[ ]). group([ box('black','',480,315,570,375,2,1,0,574,0,0,0,0,0,'1',0,[ ]), poly('black','',2,[ 480,315,480,375],0,1,1,577,0,2,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), oval('black','',480,300,570,330,2,1,1,575,0,0,0,0,0,'1',0,[ ]), arc('black','',2,1,1,0,480,360,525,375,480,375,570,375,0,90,30,11520,11520,576,0,0,8,3,0,0,0,'1','8','3',0,[ ]), poly('black','',2,[ 570,315,570,375],0,1,1,578,0,2,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]), box('black','',484,334,566,386,0,1,0,579,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',525,333,1,1,1,104,17,580,14,3,0,0,0,0,2,104,17,0,0,"",0,0,0,0,347,'',[ minilines(104,17,0,0,1,0,0,[ mini_line(104,14,3,0,0,0,[ str_block(0,104,14,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,80640,104,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "sequence", 1, 0, 0, text('black',525,351,1,1,1,61,17,581,14,3,0,0,0,0,2,61,17,0,0,"",0,0,0,0,365,'',[ minilines(61,17,0,0,1,0,0,[ mini_line(61,14,3,0,0,0,[ str_block(0,61,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,80640,61,14,3,0,-1,0,0,0,0,0, "sequence")]) ]) ])])) ]) ], 1843,0,0,[ ]). group([ box('black','',220,135,370,165,2,1,1,337,0,0,0,0,0,'1',0,[ ]), text('black',295,144,1,1,1,64,15,338,12,3,2,0,0,0,2,64,15,0,0,"",0,0,0,0,156,'',[ minilines(64,15,0,0,1,0,0,[ mini_line(64,12,3,0,0,0,[ str_block(0,64,12,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,64,12,3,0,0,0,0,0,0,0, "ANNO_INT")]) ]) ])]), box('black','',220,165,370,200,2,1,1,339,0,0,0,0,0,'1',0,[ ]), text('black',270,168,2,0,1,31,30,340,12,3,2,0,0,0,2,31,30,0,0,"",0,0,0,0,180,'',[ minilines(31,30,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(31,12,3,0,0,0,[ str_block(0,31,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,31,12,3,0,-1,0,0,0,0,0, "value")]) ]) ])]), text('black',265,168,1,2,1,40,15,341,12,3,2,0,0,0,2,40,15,0,0,"",0,0,0,0,180,'',[ minilines(40,15,0,0,2,0,0,[ mini_line(40,12,3,0,0,0,[ str_block(0,40,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,40,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',305,168,2,0,1,55,30,342,12,3,2,0,0,0,2,55,30,0,0,"",0,0,0,0,180,'',[ minilines(55,30,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(30,12,3,0,0,0,[ str_block(0,30,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,30,12,3,0,0,0,0,0,0,0, "bigint")]) ]) ])]) ], 1867,0,0,[ ]). group([ box('black','',675,135,835,165,2,1,1,344,0,0,0,0,0,'1',0,[ ]), text('black',755,144,1,1,1,83,15,345,12,3,2,0,0,0,2,83,15,0,0,"",0,0,0,0,156,'',[ minilines(83,15,0,0,1,0,0,[ mini_line(83,12,3,0,0,0,[ str_block(0,83,12,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,83,12,3,0,0,0,0,0,0,0, "ANNO_FLOAT")]) ]) ])]), box('black','',675,165,835,200,2,1,1,346,0,0,0,0,0,'1',0,[ ]), text('black',725,168,2,0,1,31,30,347,12,3,2,0,0,0,2,31,30,0,0,"",0,0,0,0,180,'',[ minilines(31,30,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(31,12,3,0,0,0,[ str_block(0,31,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,31,12,3,0,-1,0,0,0,0,0, "value")]) ]) ])]), text('black',720,168,1,2,1,40,15,348,12,3,2,0,0,0,2,40,15,0,0,"",0,0,0,0,180,'',[ minilines(40,15,0,0,2,0,0,[ mini_line(40,12,3,0,0,0,[ str_block(0,40,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,40,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',760,168,2,0,1,67,30,349,12,3,2,0,0,0,2,67,30,0,0,"",0,0,0,0,180,'',[ minilines(67,30,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(67,12,3,0,0,0,[ str_block(0,67,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,67,12,3,0,-1,0,0,0,0,0, "double prec")]) ]) ])]) ], 1874,0,0,[ ]). group([ box('black','',745,500,895,530,2,1,1,260,0,0,0,0,0,'1',0,[ ]), text('black',820,509,1,1,1,76,15,261,12,3,2,0,0,0,2,76,15,0,0,"",0,0,0,0,521,'',[ minilines(76,15,0,0,1,0,0,[ mini_line(76,12,3,0,0,0,[ str_block(0,76,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,76,12,3,0,-1,0,0,0,0,0, "ANNO_DATE")]) ]) ])]), box('black','',745,530,895,565,2,1,1,262,0,0,0,0,0,'1',0,[ ]), text('black',795,533,2,0,1,31,30,263,12,3,2,0,0,0,2,31,30,0,0,"",0,0,0,0,545,'',[ minilines(31,30,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(31,12,3,0,0,0,[ str_block(0,31,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,31,12,3,0,-1,0,0,0,0,0, "value")]) ]) ])]), text('black',790,533,1,2,1,40,15,264,12,3,2,0,0,0,2,40,15,0,0,"",0,0,0,0,545,'',[ minilines(40,15,0,0,2,0,0,[ mini_line(40,12,3,0,0,0,[ str_block(0,40,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,40,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',830,533,2,0,1,55,30,265,12,3,2,0,0,0,2,55,30,0,0,"",0,0,0,0,545,'',[ minilines(55,30,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(54,12,3,0,0,0,[ str_block(0,54,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,54,12,3,0,-1,0,0,0,0,0, "timestamp")]) ]) ])]) ], 1881,0,0,[ ]). group([ box('black','',450,560,600,590,2,1,1,60,0,0,0,0,0,'1',0,[ ]), text('black',525,569,1,1,1,79,15,61,12,3,2,0,0,0,2,79,15,0,0,"",0,0,0,0,581,'',[ minilines(79,15,0,0,1,0,0,[ mini_line(79,12,3,0,0,0,[ str_block(0,79,12,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,79,12,3,0,0,0,0,0,0,0, "ANNO_BOOL")]) ]) ])]), box('black','',450,590,600,625,2,1,1,62,0,0,0,0,0,'1',0,[ ]), text('black',500,593,2,0,1,31,30,63,12,3,2,0,0,0,2,31,30,0,0,"",0,0,0,0,605,'',[ minilines(31,30,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(31,12,3,0,0,0,[ str_block(0,31,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,31,12,3,0,-1,0,0,0,0,0, "value")]) ]) ])]), text('black',495,593,1,2,1,40,15,64,12,3,2,0,0,0,2,40,15,0,0,"",0,0,0,0,605,'',[ minilines(40,15,0,0,2,0,0,[ mini_line(40,12,3,0,0,0,[ str_block(0,40,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,40,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',535,593,2,0,1,55,30,65,12,3,2,0,0,0,2,55,30,0,0,"",0,0,0,0,605,'',[ minilines(55,30,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(45,12,3,0,0,0,[ str_block(0,45,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,45,12,3,0,-1,0,0,0,0,0, "boolean")]) ]) ])]) ], 1888,0,0,[ ]). group([ box('black','',160,500,310,530,2,1,1,253,0,0,0,0,0,'1',0,[ ]), text('black',235,509,1,1,1,75,15,254,12,3,2,0,0,0,2,75,15,0,0,"",0,0,0,0,521,'',[ minilines(75,15,0,0,1,0,0,[ mini_line(75,12,3,0,0,0,[ str_block(0,75,12,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,69120,75,12,3,0,0,0,0,0,0,0, "ANNO_TEXT")]) ]) ])]), box('black','',160,530,310,565,2,1,1,255,0,0,0,0,0,'1',0,[ ]), text('black',210,533,2,0,1,31,30,256,12,3,2,0,0,0,2,31,30,0,0,"",0,0,0,0,545,'',[ minilines(31,30,0,0,0,0,0,[ mini_line(10,12,3,0,0,0,[ str_block(0,10,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,10,12,3,0,-1,0,0,0,0,0, "id")]) ]), mini_line(31,12,3,0,0,0,[ str_block(0,31,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,31,12,3,0,-1,0,0,0,0,0, "value")]) ]) ])]), text('black',205,533,1,2,1,40,15,257,12,3,2,0,0,0,2,40,15,0,0,"",0,0,0,0,545,'',[ minilines(40,15,0,0,2,0,0,[ mini_line(40,12,3,0,0,0,[ str_block(0,40,12,3,0,0,0,0,0,[ str_seg('black','Helvetica',0,69120,40,12,3,0,0,0,0,0,0,0, "")]) ]) ])]), text('black',245,533,2,0,1,55,30,258,12,3,2,0,0,0,2,55,30,0,0,"",0,0,0,0,545,'',[ minilines(55,30,0,0,0,0,0,[ mini_line(55,12,3,0,0,0,[ str_block(0,55,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,55,12,3,0,-1,0,0,0,0,0, "sequence")]) ]), mini_line(24,12,3,0,0,0,[ str_block(0,24,12,3,0,-1,0,0,0,[ str_seg('black','Helvetica',0,69120,24,12,3,0,-1,0,0,0,0,0, "clob")]) ]) ])]) ], 1895,0,0,[ ]). pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/doc-files/AnnotationSchema-1.png0000644000175000017500000001614011757531137027373 0ustar ryngeryngePNG  IHDRPLTEٟIDATxOl}庈PzAKӸD+= : 6#nH.-ٵ*jXVܞW:iL8) tM#1p;޼YqS(M|w7ffF;rQWµ^=::xy*12 ,Z߼&ocwkoRwrQO|{nSeҦG7Grh6yyl0 73wAEۻ_@{h:]|͵˜WwTejy͵O7;>awAx%|_&->09? ѫ 65WK rfex(f|Y[f!xwO_{8 rhuUฟoݷ?7/+\hy^9n\˜~-"`gx8-+n\¸ OS\¸pum, 6 v,,Z%ېaMysN2e1 dksaGlHN3 ^i*Kd׽>{kO3 c}Ǿ29~#,]6ϛc;}_.Toִ՞g;J-:{ℱS/]#E]ц {3o2pVw~EP\tfl,Z$Xq*tj6@g3U<.s{)|zΕL#-x|-q(ҪAoyB-Ȉ0)mQAzs<^G%˙H e p> U¥/˖ FE >A iA8a_mt,'j7kCC "o<! '^@y ?v;/ x,u./1Y8IEx0pse/n˦w0ܪ8*K1cum*K4йE&ϻo3p!a4 Fʻo4IoiFʅ c~x&_XaҸJmAįB^x&E0f%H"wpPaDa%WYa)<*yA0k c2<&ldxƌ1 O0&Ù,TRaLJ<˜ U)O0&£jUQV*O0&jg+9QHU*O0&u4QcXx,TO3 c&s3Vwy\aL]y/Zj+O0&jbYӴ"Sj;0Wb啚V' {ט6$|"Y;+wR_Ğsp`Ϗ_6Y c8SMls堷Ɓ b c87:2ʙ c8[M'L1I9Sagxnqp6RFEUùjg>b*l 련zx' N9SagnV;6yBQ 8thX{pvй\}7^-; F%;Us[K}uxb1tV/߳kg?+ܪNX+І]srŔ$)#;}¹pS#TRuv[r˜:3&q,p-}UAg&`!xW|GR7^9 ڼs > DKsS⠳}7l$x.aL N 7H9)paйABcսv{#y.aLW$-mI91@˜:]y.ynbFρQ'^G\.\s9aECs3z.'"N)5M蹜0p8]ͦ\E8471R(µP.4=FpϠ\ܻ:M๔0 pʋ".ءۉ\#FTNrInZ󁽺CK2Bȧ NN &p`KA׃̮|+vh}a^2P{.V>./d8wq'Rz)=7.R)]?OWE8(=OW3+a~,QrOy%^ܫ\xM}k\>rRN7ʫ|iVy^E(|ß2VB)*7ROfBwzbXNy܎)psFod- UlI۪r,ZڀL[llM+!cŵs'0Hyב2|x~|};KELz^e/ݐXw|mբUn |3)b{&ovA{s7e0l! _y_92i- ey#VM"{PAz[Y){^{o~_pL~p9X\gQ" ps%8ZI*~-@<Ä2\N[d(ΨD$/฿"r4c[.6~スwȸEv2lI-NV}^+;z[=v sϼ$c2%By)|Zk=O Y#4fѼ/vU+Ϗu*8_+!-2+W Ow+82pR4sʩ'wݻ4,\N[@%{)Q^teJyx]9N\ЕVUzK$#G677ZM ` 0&]P/a{Z[cWG[ _y)\o1pvű%5Y6٨6jG-*c ^YnjYn_K(_>_mk^yK0;xVk,pnwsgfttM;`s&芼E;^znj̵kiܿk8}sw*㐻TB2I_qg'w)bk V%d{C|>D~;vL#,$(M4`vyK= r*"Wu*lm_H[ xs*3'<+\HyuϵpS>?YTJNzFx#LֱEwLԶȮxpF7)&뫼L!#c"}Q~90|dJ[`8mr xo- ia FV)L>L< }zxUۖQr~k7ʴ%Q9/JfyA?kA?B#_݁hw*Go3t%8s\\( ͭ 9iCY@(٨!lљ=cqp۩!|Ã[yxNBnkͪWs~Ds?a-nvy~1VNB9aȵjO8&Pi-aǫe<\`amՖ7-mԪ>A''ckAao Wvi[<FEڲwb@9m.9<gbAHWqb?WTWM\_ˍj?iH4<%%oB_М|`K hfR eL>r+<煏Ty5ZNjB3rx oJ (#8QWߔ[_7xݍnrDJ7ݨ}c8<9n4m1"Еc2R7ÓsW#5=7%'4WjNnޔW47w7:qj ܿQ]]3&ےoojxtxxS" OLt<\ [79<3rgr;ww24np2RȇJ l~XuYi4RF~4BFJy;/ ;dXë^qMvɐ7\ ﲷ7"ϻUؙ&C+'#퍈K4s2Rވr4,YI3ʡH @1hVV0 oGpN <0!|͂ILCƇ'Ms{Ux۬zf}k)]mĖy$YzK/xeC/e_ n:Y,-i29-/^ښ_/^=xepZ>>xPI%Rs4pZL;*8)ӎ2-;.; i8cZ0=~vO عe.>կN.s 7 S8{>֫K#ss :IENDB`pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/doc-files/dbschema-1.gif0000644000175000017500000001273511757531137025675 0ustar ryngeryngeGIF87a_,_ڋ޼*HLY誫i ĢL*B JԪj Na0 o(86hTh8)YiyiI *:JZiZv*%Zk{BbEK\l|l|5-=MpSmrT=N^n  }^or O|  f,#L9ѡ=:җ 8꧋.WzzuA95?yE\l) ތ3fN1ڦ\wfڏ,-/AUqзtR ^oK(nVW?b}=>>/] @ý-5P+,O|C9@g>5Ґl`)$D_X>ԣ=xc%Uyd*C`];wlЃH X(Jq}Hy_@?&,CYQ8EF92JQ@E %A~T h1j1.X'A=X(Qa B4HX ^L/G>~0L^gI1{D*G5PM E7Aꃋ 'IP8>#I1lSzt99yNORyyc ka*LG"bs9 '{gqv0I8_Q"JЮ}ŝ;WrD|&7;цP 8& 2Y@B# X)?8A kzF$U}C"?h&U3BIⱍ&;&,|%bτdK]OUN +$ZVR\UDUhpTdl@[htu۰5Nf+e/ Ŏͬe=f7e-oIhIZ[lkk_ ZMpn(Z61nmo-eQcٽHn2&fk˭na+7ntӻ7oEۻΪm-^3npscx[UKZv` F `'Z ~yufx$Mˋ W8!Ӵ)NNY,H 9ERcT;IشCV "IK!GYSf26\p]y8"rq݅9][BrecJ0Yrf/lr\r|f1Z̓^̂"BOЋVt U <|HpQ- ;OndunpiMCӣ5#ztֳYQZss{kjþWns[zP6%Du`1Ad5Fܵ;}{v=ϝ{Uv}wc[ވ7=C~1wk>7K_ϗOup/6^8|k8=V|%9k[]oS;ZNJ*in*|H6ϻ .2'? nq7ԆԣǖKW8Г7&:^Xfޏ3^x;F,]cv7:ewAVx=ozϛ!za;W?{Yr|/+oǖs9wsŷ܈s*EX)e׎]\gXg>@hPswi0{nwؑOiHR2CaQD,(ሑkt4^x+xt(uFfDTƔ ^C 8tX46QbyGVX)yЂk_IaDSL6$K搆疩KgRGKƖ7ih3XTcQ@RXOTI1T8SiB>iҸ])OT6阋JidW&.ٖؐy.Й`, ْD)NɌ&PȒYEZiɑ詎9ZUɕ dI6/Ɵ)gr'qucȠ:^p w٠ JF)i7#j؜WŜu75jRFxBfڣ+؇CWg2:Ǣ G"HJmhyePZR*%9E#jTudr&_İeڡ?)ꢑx7Ŧhj:v xsYnڧtX&r b ٨IXzVz hRb8\کJ3pZ x˙ƃbza!ڥe :>ڬtf:`ګtgZd&I\HzZx'قȧ䅋ʚTfjY1l: p꠮* Yjȭ  [y k "[{aZ fjǫኰʯ6:?wz:ߺ8'*HˣJ+ڴNPkT D*sbQ݄o! yW{pө/ڲg W;,beAvk}K|jmJz~kt۱'k;%[k]+Kuk1Ϛw=>w:륬̪Z'魷hUUKjG;imƹлҋgۼћK; ;@{Y+5[;k{+˸Lk/dw;~$ˤ2jMSmڋ<ڲ SyymSIq|iLT׺F}iמr}.m|I=(@޸&˧ּM-mݱ4Sީ]--Gޛہes/ld ʭ⽽L..um)N^B~C^A&xA>pIUWN#IʭLM}]`-=>un 7~r^vf.{^=Ix[O5f(c$N^4X֭ 9^箎N>+~|˰^ NK.ꩮyH끞MNа}鍔O~$HU0P~Nq.Cn.A^~ ~~Nw㟔>>ZgX N׮e97E.Ϊ8<:Ղ~ޜ?;,#?^7V\

pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/doc-files/dbschema-1.obj0000644000175000017500000004540111757531137025676 0ustar ryngerynge%TGIF 4.1.43-QPL state(0,37,100.000,0,0,0,16,1,9,1,1,0,0,0,0,1,1,'Helvetica-Bold',1,80640,0,0,1,5,0,0,1,0,0,16,0,0,1,1,1,1,1088,1408,1,0,2880,0). % % @(#)$Header: /nfs/asd2/gmehta/GRIPHYN/CVS/cvsroot/vds/src/org/griphyn/vdl/dbschema/doc-files/dbschema-1.obj,v 1.4 2005/10/21 21:34:50 griphyn Exp $ % %W% % file_attr([ attr("template=", "imagemap.obj", 1, 1, 0, text('black',0,0,1,1,1,164,17,563,14,3,0,0,0,0,2,164,17,0,0,"",0,0,0,0,14,'',[ minilines(164,17,0,0,1,0,0,[ mini_line(164,14,3,0,0,0,[ str_block(0,164,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,164,14,3,0,-1,0,0,0,0,0, "template=imagemap.obj")]) ]) ])])) ]). unit("1 pixel/pixel"). color_info(11,65535,0,[ "magenta", 65535, 0, 65535, 65535, 0, 65535, 1, "red", 65535, 0, 0, 65535, 0, 0, 1, "green", 0, 65535, 0, 0, 65535, 0, 1, "blue", 0, 0, 65535, 0, 0, 65535, 1, "yellow", 65535, 65535, 0, 65535, 65535, 0, 1, "pink", 65535, 49344, 52171, 65535, 49344, 52171, 1, "cyan", 0, 65535, 65535, 0, 65535, 65535, 1, "CadetBlue", 24415, 40606, 41120, 24415, 40606, 41120, 1, "white", 65535, 65535, 65535, 65535, 65535, 65535, 1, "black", 0, 0, 0, 0, 0, 0, 1, "DarkSlateGray", 12079, 20303, 20303, 12079, 20303, 20303, 1 ]). script_frac("0.6"). fg_bg_colors('black','white'). html_export_template("map.tmpl"). dont_reencode("FFDingbests:ZapfDingbats"). page(1,"",1,''). poly('black','',2,[ 700,180,700,370],0,1,1,316,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 100,180,100,370],0,1,1,313,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 165,280,165,370],0,1,1,311,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 350,180,350,370],0,1,1,308,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 525,300,525,370],0,1,1,306,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 525,180,525,270],0,1,1,292,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 525,125,350,125,350,150],0,1,1,126,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 525,125,700,125,700,150],0,1,1,127,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 350,125,100,125,100,150],0,1,1,128,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 525,80,525,150],0,1,1,123,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). group([ polygon('black','',5,[ 458,50,450,80,592,80,600,50,458,50],0,1,1,0,0,2,0,0,0,0,'1',0, "00",[ ]), box('black','',454,54,596,76,0,1,0,1,2,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',525,53,1,1,1,115,17,2,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,67,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "Catalog", 1, 0, 0, text('black',524,56,1,1,1,53,17,3,14,3,0,0,0,0,2,53,17,0,1,"",0,0,0,0,70,'',[ minilines(53,17,0,1,1,0,0,[ mini_line(53,14,3,0,1,0,[ str_block(0,53,14,3,0,1,0,0,0,[ str_seg('black','Helvetica-BoldOblique',3,80640,53,14,3,0,1,0,0,0,0,0, "Catalog")]) ]) ])])) ]) ], 4,0,0,[ attr("href=", "Catalog.html", 0, 1, 0, text('black',450,80,1,1,1,124,17,380,14,3,0,0,0,0,2,124,17,0,0,"",0,0,0,0,94,'',[ minilines(124,17,0,0,1,0,0,[ mini_line(124,14,3,0,0,0,[ str_block(0,124,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,124,14,3,0,-1,0,0,0,0,0, "href=Catalog.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 308,150,300,180,392,180,400,150,308,150],0,1,1,0,50,2,0,0,0,0,'1',0, "00",[ ]), box('black','',304,154,396,176,0,1,0,51,2,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',350,153,1,1,1,115,17,52,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,167,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "VDC", 1, 0, 0, text('black',350,156,1,1,1,32,17,53,14,3,0,0,0,0,2,32,17,0,1,"",0,0,0,0,170,'',[ minilines(32,17,0,1,1,0,0,[ mini_line(32,14,3,0,1,0,[ str_block(0,32,14,3,0,1,0,0,0,[ str_seg('black','Helvetica-BoldOblique',3,80640,32,14,3,0,1,0,0,0,0,0, "VDC")]) ]) ])])) ]) ], 54,0,0,[ attr("href=", "VDC.html", 0, 1, 0, text('black',300,180,1,1,1,102,17,398,14,3,0,0,0,0,2,102,17,0,0,"",0,0,0,0,194,'',[ minilines(102,17,0,0,1,0,0,[ mini_line(102,14,3,0,0,0,[ str_block(0,102,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,102,14,3,0,-1,0,0,0,0,0, "href=VDC.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 658,150,650,180,742,180,750,150,658,150],0,1,1,0,76,2,0,0,0,0,'1',0, "00",[ ]), box('black','',654,154,746,176,0,1,0,77,2,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',700,153,1,1,1,115,17,78,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,167,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "PTC", 1, 0, 0, text('black',699,156,1,1,1,29,17,79,14,3,0,0,0,0,2,29,17,0,1,"",0,0,0,0,170,'',[ minilines(29,17,0,1,1,0,0,[ mini_line(29,14,3,0,1,0,[ str_block(0,29,14,3,0,1,0,0,0,[ str_seg('black','Helvetica-BoldOblique',3,80640,29,14,3,0,1,0,0,0,0,0, "PTC")]) ]) ])])) ]) ], 75,0,0,[ attr("href=", "PTC.html", 0, 1, 0, text('black',650,180,1,1,1,99,17,386,14,3,0,0,0,0,2,99,17,0,0,"",0,0,0,0,194,'',[ minilines(99,17,0,0,1,0,0,[ mini_line(99,14,3,0,0,0,[ str_block(0,99,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,99,14,3,0,-1,0,0,0,0,0, "href=PTC.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 458,150,450,180,592,180,600,150,458,150],0,1,1,0,87,0,0,0,0,0,'1',0, "00",[ ]), box('black','',454,154,596,176,0,1,0,88,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',525,153,1,1,1,114,17,89,14,3,0,0,0,0,2,114,17,0,2,"",0,0,0,0,167,'',[ minilines(114,17,0,2,1,0,0,[ mini_line(114,14,3,0,2,0,[ str_block(0,114,14,3,0,2,0,0,0,[ str_seg('black','Helvetica-BoldOblique',3,80640,114,14,3,0,2,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "DatabaseSchema", 1, 0, 0, text('black',525,156,1,1,1,120,17,90,14,3,0,0,0,0,2,120,17,0,1,"",0,0,0,0,170,'',[ minilines(120,17,0,1,1,0,0,[ mini_line(120,14,3,0,1,0,[ str_block(0,120,14,3,0,1,0,0,0,[ str_seg('black','Helvetica-BoldOblique',3,80640,120,14,3,0,1,0,0,0,0,0, "DatabaseSchema")]) ]) ])])) ]) ], 91,0,0,[ attr("href=", "DatabaseSchema.html", 0, 1, 0, text('black',450,180,1,1,1,190,17,453,14,3,0,0,0,0,2,190,17,0,0,"",0,0,0,0,194,'',[ minilines(190,17,0,0,1,0,0,[ mini_line(190,14,3,0,0,0,[ str_block(0,190,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,190,14,3,0,-1,0,0,0,0,0, "href=DatabaseSchema.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 58,150,50,180,142,180,150,150,58,150],0,1,1,0,98,2,0,0,0,0,'1',0, "00",[ ]), box('black','',54,154,146,176,0,1,0,99,2,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',100,153,1,1,1,115,17,100,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,167,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "Annotation", 1, 0, 0, text('black',100,156,1,1,1,74,17,101,14,3,0,0,0,0,2,74,17,0,1,"",0,0,0,0,170,'',[ minilines(74,17,0,1,1,0,0,[ mini_line(74,14,3,0,1,0,[ str_block(0,74,14,3,0,1,0,0,0,[ str_seg('black','Helvetica-BoldOblique',3,80640,74,14,3,0,1,0,0,0,0,0, "Annotation")]) ]) ])])) ]) ], 97,0,0,[ attr("href=", "Annotation.html", 0, 1, 0, text('black',50,180,1,1,1,147,17,404,14,3,0,0,0,0,2,147,17,0,0,"",0,0,0,0,194,'',[ minilines(147,17,0,0,1,0,0,[ mini_line(147,14,3,0,0,0,[ str_block(0,147,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,147,14,3,0,-1,0,0,0,0,0, "href=Annotation.html")]) ]) ])])) ]). group([ polygon('black','',4,[ 525,90,510,116,540,116,525,90],2,1,1,0,107,0,0,0,0,0,'1',0, "0",[ ]) ], 112,0,0,[ ]). group([ polygon('black','',4,[ 525,190,510,216,540,216,525,190],2,1,1,0,163,0,0,0,0,0,'1',0, "0",[ ]) ], 162,0,0,[ ]). group([ polygon('black','',4,[ 700,190,685,216,715,216,700,190],2,1,1,0,165,0,0,0,0,0,'1',0, "0",[ ]) ], 164,0,0,[ ]). group([ polygon('black','',4,[ 350,190,335,216,365,216,350,190],2,1,1,0,167,0,0,0,0,0,'1',0, "0",[ ]) ], 166,0,0,[ ]). group([ polygon('black','',4,[ 100,190,85,216,115,216,100,190],2,1,1,0,169,0,0,0,0,0,'1',0, "0",[ ]) ], 168,0,0,[ ]). group([ polygon('black','',5,[ 625,370,625,400,775,400,775,370,625,370],0,1,1,0,180,0,0,0,0,0,'1',0, "00",[ ]), box('black','',629,374,771,396,0,1,0,181,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',700,373,1,1,1,115,17,182,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,387,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "InvocationSchema", 1, 0, 0, text('black',700,376,1,1,1,128,17,183,14,3,0,0,0,0,2,128,17,0,0,"",0,0,0,0,390,'',[ minilines(128,17,0,0,1,0,0,[ mini_line(128,14,3,0,0,0,[ str_block(0,128,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,128,14,3,0,0,0,0,0,0,0, "InvocationSchema")]) ]) ])])) ]) ], 184,0,0,[ attr("href=", "InvocationSchema.html", 0, 1, 0, text('black',625,400,1,1,1,198,17,543,14,3,0,0,0,0,2,198,17,0,0,"",0,0,0,0,414,'',[ minilines(198,17,0,0,1,0,0,[ mini_line(198,14,3,0,0,0,[ str_block(0,198,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,198,14,3,0,-1,0,0,0,0,0, "href=InvocationSchema.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 275,370,275,400,425,400,425,370,275,370],0,1,1,0,210,0,0,0,0,0,'1',0, "00",[ ]), box('black','',279,374,421,396,0,1,0,211,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',350,373,1,1,1,115,17,212,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,387,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "ChunkSchema", 1, 0, 0, text('black',349,376,1,1,1,101,17,213,14,3,0,0,0,0,2,101,17,0,0,"",0,0,0,0,390,'',[ minilines(101,17,0,0,1,0,0,[ mini_line(101,14,3,0,0,0,[ str_block(0,101,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,101,14,3,0,0,0,0,0,0,0, "ChunkSchema")]) ]) ])])) ]) ], 214,0,0,[ attr("href=", "ChunkSchema.html", 0, 1, 0, text('black',275,400,1,1,1,171,17,519,14,3,0,0,0,0,2,171,17,0,0,"",0,0,0,0,414,'',[ minilines(171,17,0,0,1,0,0,[ mini_line(171,14,3,0,0,0,[ str_block(0,171,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,171,14,3,0,-1,0,0,0,0,0, "href=ChunkSchema.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 450,370,450,400,600,400,600,370,450,370],0,1,1,0,236,0,0,0,0,0,'1',0, "00",[ ]), box('black','',454,374,596,396,0,1,0,237,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',525,373,1,1,1,115,17,238,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,387,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "SingleFileSchema", 1, 0, 0, text('black',525,376,1,1,1,124,17,239,14,3,0,0,0,0,2,124,17,0,0,"",0,0,0,0,390,'',[ minilines(124,17,0,0,1,0,0,[ mini_line(124,14,3,0,0,0,[ str_block(0,124,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,124,14,3,0,0,0,0,0,0,0, "SingleFileSchema")]) ]) ])])) ]) ], 235,0,0,[ attr("href=", "SingleFileSchema.html", 0, 1, 0, text('black',450,400,1,1,1,194,17,561,14,3,0,0,0,0,2,194,17,0,0,"",0,0,0,0,414,'',[ minilines(194,17,0,0,1,0,0,[ mini_line(194,14,3,0,0,0,[ str_block(0,194,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,194,14,3,0,-1,0,0,0,0,0, "href=SingleFileSchema.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 450,270,450,300,600,300,600,270,450,270],0,1,1,0,261,0,0,0,0,0,'1',0, "00",[ ]), box('black','',454,274,596,296,0,1,0,262,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',525,273,1,1,1,115,17,263,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,287,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "InMemorySchema", 1, 0, 0, text('black',525,276,1,1,1,124,17,264,14,3,0,0,0,0,2,124,17,0,0,"",0,0,0,0,290,'',[ minilines(124,17,0,0,1,0,0,[ mini_line(124,14,3,0,0,0,[ str_block(0,124,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,124,14,3,0,0,0,0,0,0,0, "InMemorySchema")]) ]) ])])) ]) ], 260,0,0,[ attr("href=", "InMemorySchema.html", 0, 1, 0, text('black',450,300,1,1,1,194,17,531,14,3,0,0,0,0,2,194,17,0,0,"",0,0,0,0,314,'',[ minilines(194,17,0,0,1,0,0,[ mini_line(194,14,3,0,0,0,[ str_block(0,194,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,194,14,3,0,-1,0,0,0,0,0, "href=InMemorySchema.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 123,250,115,280,207,280,215,250,123,250],0,1,1,0,271,2,0,0,0,0,'1',0, "00",[ ]), box('black','',119,254,211,276,0,1,0,272,2,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',165,253,1,1,1,115,17,273,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,267,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "Advanced", 1, 0, 0, text('black',165,256,1,1,1,68,17,274,14,3,0,0,0,0,2,68,17,0,2,"",0,0,0,0,270,'',[ minilines(68,17,0,2,1,0,0,[ mini_line(68,14,3,0,2,0,[ str_block(0,68,14,3,0,2,0,0,0,[ str_seg('black','Helvetica-BoldOblique',3,80640,68,14,3,0,2,0,0,0,0,0, "Advanced")]) ]) ])])) ]) ], 270,0,0,[ attr("href=", "Advanced.html", 0, 1, 0, text('black',115,280,1,1,1,139,17,410,14,3,0,0,0,0,2,139,17,0,0,"",0,0,0,0,294,'',[ minilines(139,17,0,0,1,0,0,[ mini_line(139,14,3,0,0,0,[ str_block(0,139,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,139,14,3,0,-1,0,0,0,0,0, "href=Advanced.html")]) ]) ])])) ]). poly('black','',4,[ 380,370,380,250,675,250,675,370],0,1,1,301,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). group([ polygon('black','',4,[ 165,290,150,316,180,316,165,290],2,1,1,0,303,0,0,0,0,0,'1',0, "0",[ ]) ], 302,0,0,[ ]). group([ polygon('black','',4,[ 525,315,510,341,540,341,525,315],2,1,1,0,305,0,0,0,0,0,'1',0, "0",[ ]) ], 304,0,0,[ ]). poly('black','',3,[ 350,225,165,225,165,250],0,1,1,310,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 355,225,500,225,500,270],0,1,1,315,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',3,[ 380,250,225,250,225,370],0,1,1,351,0,0,0,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). group([ polygon('black','',5,[ 75,370,75,400,250,400,250,370,75,370],0,1,1,0,220,0,0,0,0,0,'1',0, "00",[ ]), box('black','',91,374,233,396,0,1,0,221,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',162,373,1,1,1,115,17,222,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,387,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "AnnotationSchema", 1, 0, 0, text('black',162,376,1,1,1,132,17,223,14,3,0,0,0,0,2,132,17,0,0,"",0,0,0,0,390,'',[ minilines(132,17,0,0,1,0,0,[ mini_line(132,14,3,0,0,0,[ str_block(0,132,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,132,14,3,0,0,0,0,0,0,0, "AnnotationSchema")]) ]) ])])) ]) ], 364,0,0,[ attr("href=", "AnnotationSchema.html", 0, 1, 0, text('black',75,400,1,1,1,202,17,555,14,3,0,0,0,0,2,202,17,0,0,"",0,0,0,0,414,'',[ minilines(202,17,0,0,1,0,0,[ mini_line(202,14,3,0,0,0,[ str_block(0,202,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,202,14,3,0,-1,0,0,0,0,0, "href=AnnotationSchema.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 800,370,800,400,950,400,950,370,800,370],0,1,1,0,565,0,0,0,0,0,'1',0, "00",[ ]), box('black','',804,374,946,396,0,1,0,566,0,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',875,373,1,1,1,115,17,567,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,387,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "WorkflowSchema", 1, 0, 0, text('black',874,376,1,1,1,119,17,568,14,3,0,0,0,0,2,119,17,0,0,"",0,0,0,0,390,'',[ minilines(119,17,0,0,1,0,0,[ mini_line(119,14,3,0,0,0,[ str_block(0,119,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,119,14,3,0,0,0,0,0,0,0, "WorkflowSchema")]) ]) ])])) ]) ], 564,0,0,[ attr("href=", "WorkflowSchema.html", 0, 1, 0, text('black',800,400,1,1,1,189,17,608,14,3,0,0,0,0,2,189,17,0,0,"",0,0,0,0,414,'',[ minilines(189,17,0,0,1,0,0,[ mini_line(189,14,3,0,0,0,[ str_block(0,189,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,189,14,3,0,-1,0,0,0,0,0, "href=WorkflowSchema.html")]) ]) ])])) ]). group([ polygon('black','',5,[ 833,150,825,180,917,180,925,150,833,150],0,1,1,0,571,2,0,0,0,0,'1',0, "00",[ ]), box('black','',829,154,921,176,0,1,0,572,2,0,0,0,0,'1',0,[ attr("", "auto_center_attr", 0, 1, 0, text('black',875,153,1,1,1,115,17,573,14,3,0,0,0,0,2,115,17,0,0,"",0,0,0,0,167,'',[ minilines(115,17,0,0,1,0,0,[ mini_line(115,14,3,0,0,0,[ str_block(0,115,14,3,0,0,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,115,14,3,0,0,0,0,0,0,0, "auto_center_attr")]) ]) ])])), attr("label=", "WF", 1, 0, 0, text('black',875,156,1,1,1,22,17,574,14,3,0,0,0,0,2,22,17,0,3,"",0,0,0,0,170,'',[ minilines(22,17,0,3,1,0,0,[ mini_line(22,14,3,0,3,0,[ str_block(0,22,14,3,0,3,0,0,0,[ str_seg('black','Helvetica-BoldOblique',3,80640,22,14,3,0,3,0,0,0,0,0, "WF")]) ]) ])])) ]) ], 570,0,0,[ attr("href=", "WF.html", 0, 1, 0, text('black',825,180,1,1,1,93,17,601,14,3,0,0,0,0,2,93,17,0,0,"",0,0,0,0,194,'',[ minilines(93,17,0,0,1,0,0,[ mini_line(93,14,3,0,0,0,[ str_block(0,93,14,3,0,-1,0,0,0,[ str_seg('black','Helvetica-Bold',1,80640,93,14,3,0,-1,0,0,0,0,0, "href=WF.html")]) ]) ])])) ]). poly('black','',3,[ 525,125,875,125,875,150],0,1,1,576,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). poly('black','',2,[ 875,180,875,370],0,1,1,579,0,0,2,0,0,0,0,'1',0,0, "0","",[ 0,8,3,0,'8','3','0'],[0,8,3,0,'8','3','0'],[ ]). group([ polygon('black','',4,[ 875,190,860,216,890,216,875,190],2,1,1,0,581,0,0,0,0,0,'1',0, "0",[ ]) ], 580,0,0,[ ]). pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/doc-files/map.tmpl0000644000175000017500000000017211757531137024745 0ustar ryngerynge

&tgvmapobjs;

pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/ChunkSchema.java0000644000175000017500000007327211757531137024474 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import java.util.*; import java.io.*; import java.lang.reflect.*; import java.net.InetAddress; import org.griphyn.vdl.util.ChimeraProperties; import org.griphyn.vdl.classes.*; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.parser.*; import org.griphyn.vdl.router.Cache; import org.xml.sax.InputSource; /** * This class provides basic functionalities to interact with the * backend database, such as insertion, deletion, and search of * entities in the VDC. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public class ChunkSchema extends DatabaseSchema implements VDC { /** * Name of the four parameter tables in human readable format. */ protected static final String[] c_lfn_names = { "VDC_NLFN", "VDC_ILFN", "VDC_OLFN", "VDC_BLFN" }; /** * Communication between saveDefinition and deleteDefinition in * update mode. */ protected boolean m_deferDeleteCommit; /** * An instance of the VDLx XML parser. */ private org.griphyn.vdl.parser.VDLxParser m_parser; /** * A cache for definitions to avoid reloading from the database. */ protected Cache m_cache; /** * Instantiates an XML parser for VDLx on demand. Since XML parsing * XML parsing and parser instantiation is an expensive business, the * reader will only be generated on demand. * * @return a valid VDLx parser instance. */ private org.griphyn.vdl.parser.VDLxParser parserInstance() { if ( this.m_parser == null ) { // obtain the schema location URL from the schema properties: // url is a list of strings representing schema locations. The // content exists in pairs, one of the namespace URI, one of the // location URL. String url = null; try { ChimeraProperties props = ChimeraProperties.instance(); url = m_dbschemaprops.getProperty( "xml.url", props.getVDLSchemaLocation() ); } catch (IOException e) { Logging.instance().log("chunk", 0, "ignored " + e); } this.m_parser = new org.griphyn.vdl.parser.VDLxParser(url); } // done return this.m_parser; } /** * Default constructor for the "chunk" schema. * * @param dbDriverName is the database driver name */ public ChunkSchema( String dbDriverName ) throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException, IOException { // load the driver from the properties super( dbDriverName, PROPERTY_PREFIX ); Logging.instance().log( "dbschema", 3, "done with default schema c'tor" ); this.m_cache = this.m_dbdriver.cachingMakesSense() ? new Cache(600) : null; this.m_deferDeleteCommit = false; this.m_parser = null; this.m_dbdriver.insertPreparedStatement( "stmt.save.definition", "INSERT INTO vdc_definition(id,type,name,namespace,version,xml) " + "VALUES (?,?,?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.nlfn", "INSERT INTO vdc_nlfn(id,name) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.ilfn", "INSERT INTO vdc_ilfn(id,name) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.olfn", "INSERT INTO vdc_olfn(id,name) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.blfn", "INSERT INTO vdc_blfn(id,name) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.nlfn", "SELECT distinct id FROM vdc_nlfn WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.ilfn", "SELECT distinct id FROM vdc_ilfn WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.olfn", "SELECT distinct id FROM vdc_olfn WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.blfn", "SELECT distinct id FROM vdc_blfn WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.all.lfn", "SELECT distinct did FROM vdc_nlfn WHERE name=? UNION " + "SELECT distinct did FROM vdc_ilfn WHERE name=? UNION " + "SELECT distinct did FROM vdc_olfn WHERE name=? UNION " + "SELECT distinct did FROM vdc_blfn WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.xml.id", "SELECT xml FROM vdc_definition WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.xml", "SELECT id,xml FROM vdc_definition WHERE type=? AND name=? AND namespace=? AND version=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.id", "SELECT id FROM vdc_definition WHERE type=? AND name=? AND namespace=? AND version=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.xml", "DELETE FROM vdc_definition WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.nlfn", "DELETE FROM vdc_nlfn WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.ilfn", "DELETE FROM vdc_ilfn WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.olfn", "DELETE FROM vdc_olfn WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.blfn", "DELETE FROM vdc_blfn WHERE id=?" ); } // // lower level methods, working directly on specific definitions // /** * Loads a single Definition from the backend database into an Java object. * This method does not allow wildcarding! * * @param namespace namespace, null will be converted into empty string * @param name name, null will be converted into empty string * @param version version, null will be converted into empty string * @param type type of the definition (TR or DV), must not be -1. * @return the Definition as specified, or null if not found. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #saveDefinition( Definition, boolean ) */ public Definition loadDefinition( String namespace, String name, String version, int type ) throws SQLException { Definition result = null; Logging.instance().log("xaction", 1, "START load definition" ); int i = 1; PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.xml"); ps.setInt( i++, type ); ps.setString( i++, makeNotNull(name) ); ps.setString( i++, makeNotNull(namespace) ); ps.setString( i++, makeNotNull(version) ); Logging.instance().log( "chunk", 2, "SELECT xml FROM definition" ); ResultSet rs = ps.executeQuery(); Logging.instance().log("xaction", 1, "INTER load definition" ); if ( rs.next() ) { MyCallbackHandler cb = new MyCallbackHandler(); Long lid = new Long( rs.getLong("id") ); // FIXME: multiple null handlings missing parserInstance().parse( new org.xml.sax.InputSource(rs.getCharacterStream("xml")), cb ); result = cb.getDefinition(); // add to cache if ( m_cache != null ) m_cache.set( lid, result ); } else { Logging.instance().log( "chunk", 0, "Definition not found" ); } rs.close(); Logging.instance().log("xaction", 1, "FINAL load definition" ); return result; } /** * Load a single Definition from the backend database into a Java * object by its primary key id. This is an internal helper function. * * @param id is a long which represent the primary id. * @return the Definitions that was matched by the id. * * @see #loadDefinition( String, String, String, int ) * @see #saveDefinition( Definition, boolean ) */ private Definition loadDefinition( long id ) throws SQLException { Definition result = null; Long lid = new Long(id); Logging.instance().log("xaction", 1, "START load definition " + lid ); // try grabbing from cache if ( m_cache != null ) result = (Definition) m_cache.get(lid); if ( result == null ) { // no cache, or not in cache PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.xml.id" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); ResultSet rs = ps.executeQuery(); Logging.instance().log("xaction", 1, "INTER load definitions" ); if ( rs.next() ) { MyCallbackHandler cb = new MyCallbackHandler(); // FIXME: multiple null handlings missing parserInstance().parse( new org.xml.sax.InputSource(rs.getCharacterStream("xml")), cb ); result = cb.getDefinition(); // add to cache if ( m_cache != null ) m_cache.set( lid, result ); } else { Logging.instance().log( "chunk", 0, "Definition not found" ); } rs.close(); } Logging.instance().log("xaction", 1, "FINAL load definitions" ); return result; } /** * Compiles the name of a DV/TR for log messages. * * @param d is a definition * @return the type plus FQDN of the definition */ private String what( Definition d ) { StringBuffer result = new StringBuffer(); switch ( d.getType() ) { case Definition.DERIVATION: result.append( "DV" ); break; case Definition.TRANSFORMATION: result.append( "TR" ); break; default: result.append( "??" ); break; } result.append(' ').append( d.shortID() ); return result.toString(); } /** * Saves a Definition, that is either a Transformation or Derivation, * into the backend database. This method, of course, does not allow * wildcarding. The definition has to be completely specified and * valid. * * @param definition is the new Definition to store. * @param overwrite true, if existing defitions will be overwritten by * new ones with the same primary (or secondary) key (-set), or false, * if a new definition will be rejected on key matches. * * @return true, if the backend database was changed, or * false, if the definition was not accepted into the backend. * * @see org.griphyn.vdl.classes.Definition * @see org.griphyn.vdl.classes.Transformation * @see org.griphyn.vdl.classes.Derivation * @see #loadDefinition( String, String, String, int ) */ public boolean saveDefinition( Definition definition, boolean overwrite ) throws SQLException { Logging.instance().log( "chunk", 2, "SAVE DEFINITION started" ); // figure out, if it already exists long probe = -1; try { Long temp = getDefinitionId(definition); if ( temp != null ) probe = temp.longValue(); } catch ( SQLException e ) { String cause = e.getMessage(); Logging.instance().log( "app", 1, "Ignoring SQL exception" + ( cause==null ? "" : ": " + cause ) ); m_dbdriver.clearWarnings(); } if ( probe != -1 ) { if ( overwrite ) { // in overwrite mode, remove old version Logging.instance().log( "app", 1, "Deleting old " + definition.shortID() ); // remove old definition from database (delete-before-insert) try { this.m_deferDeleteCommit = true; deleteDefinition( definition ); } catch ( SQLException e ) { String cause = e.getMessage(); Logging.instance().log( "app", 1, "Ignoring SQL exception" + ( cause==null ? "" : ": " + e.getMessage() ) ); } finally { this.m_deferDeleteCommit = false; } } else { // not overwriting, tell user Logging.instance().log( "app", 0, definition.shortID() + " already exists (SQL vdc_definition.id=" + probe + "), ignoring" ); return false; } } // Definition is prestine (now) Logging.instance().log( "app", 1, "Trying to add " + what(definition) ); long id = -1; try { id = m_dbdriver.sequence1( "def_id_seq" ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "In " + definition.shortID() + ": " + e.toString().trim() ); Logging.instance().log("xaction", 1, "START rollback" ); m_dbdriver.cancelPreparedStatement( "stmt.save.definition" ); m_dbdriver.rollback(); Logging.instance().log("xaction", 1, "FINAL rollback" ); return false; } // add ID explicitely from sequence to insertion -- -1 is autoinc Logging.instance().log("xaction", 1, "START save definition" ); PreparedStatement ps = m_dbdriver.getPreparedStatement("stmt.save.definition"); int i = 1; longOrNull( ps, i++, id ); ps.setInt( i++, definition.getType() ); if ( definition.getName() == null ) throw new SQLException( "VDS inconsistency: The name of a definition is null" ); else ps.setString( i++, definition.getName() ); ps.setString( i++, makeNotNull(definition.getNamespace()) ); ps.setString( i++, makeNotNull(definition.getVersion()) ); String xml = definition.toXML( (String) null, (String) null ); ps.setCharacterStream( i++, new StringReader(xml), xml.length() ); // save prepared values Logging.instance().log( "chunk", 2, "INSERT INTO Definition" ); try { ps.executeUpdate(); if ( id == -1 ) id = m_dbdriver.sequence2( ps, "def_id_seq", 1 ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "In " + definition.shortID() + ": " + e.toString().trim() ); Logging.instance().log("xaction", 1, "START rollback" ); m_dbdriver.cancelPreparedStatement( "stmt.save.definition" ); m_dbdriver.rollback(); Logging.instance().log("xaction", 1, "FINAL rollback" ); return false; } Logging.instance().log("xaction", 1, "FINAL save definition: ID=" + id ); /* NOT YET * // add to cache if ( m_cache != null ) m_cache.set( new Long(id), definition ); * */ // batch save LFNs from Derivations if ( definition instanceof Derivation ) { Derivation derivation = (Derivation) definition; Set alreadyKnown = new HashSet(); // ordering MUST MATCH classes.LFN constants! PreparedStatement stmt[] = { m_dbdriver.getPreparedStatement("stmt.save.nlfn"), m_dbdriver.getPreparedStatement("stmt.save.ilfn"), m_dbdriver.getPreparedStatement("stmt.save.olfn"), m_dbdriver.getPreparedStatement("stmt.save.blfn") }; int[] count = new int[ stmt.length ]; for ( int ii=0; ii 0 ) { // batch insert Logging.instance().log( "chunk", 2, "BATCH INSERT for " + count[ii] + ' ' + c_lfn_names[ii] + 's' ); Logging.instance().log( "xaction", 1, "START batch-add " + count[ii] + ' ' + c_lfn_names[ii] ); int[] update = stmt[ii].executeBatch(); Logging.instance().log( "xaction", 1, "FINAL batch-add " + count[ii] + ' ' + c_lfn_names[ii] ); } } } // commit the changes Logging.instance().log("xaction", 1, "START commit" ); this.m_dbdriver.commit(); Logging.instance().log("xaction", 1, "FINAL commit" ); // done return true; } /** * Saves all logical filenames from a Scalar object. This is a helper * function to save a single definition. * * @param id is the definition id in the DEFINITION table * @param scalar is a Scalar instance of which the LFNs are to be saved. * @param already is a set of filenames that were already added during * this session * @param stmt is an array of the ids of the prepared statements for * the different tables. * @param count count the number of entries in a prepared statement. * * @see #saveDefinition( Definition, boolean ) */ private void saveScalar( long id, Scalar scalar, Set already, PreparedStatement[] stmt, int[] count ) throws SQLException { int result = 0; for ( Iterator i=scalar.iterateLeaf(); i.hasNext(); ) { Leaf leaf = (Leaf) i.next(); // only interested in logical filenames, nothing else if ( leaf instanceof LFN ) { LFN lfn = (LFN) leaf; String name = lfn.getFilename(); // already inserted previously? if ( already.contains(name) ) continue; else already.add(name); // which one to chose int link = lfn.getLink(); if ( ! LFN.isInRange(link) ) throw new RuntimeException( "unknown LFN linkage type" ); int n = 1; if ( m_dbdriver.preferString() ) stmt[link].setString( n++, Long.toString(id) ); else stmt[link].setLong( n++, id ); stmt[link].setString( n++, name ); // only keep filenames and linkage in ancillary tables // stringOrNull( stmt[link], n++, lfn.getTemporary() ); // FIXME: dontTransfer, dontRegister? stmt[link].addBatch(); count[link]++; } } } // // higher level methods, allowing for wildcarding unless working on // a single Definition. // /** * Obtains the primary key id for a given definition. "Fake" definitions * are permissable. This is an internal helper function. * * @param d is a definition specification. * @return the id of the definition, or null if not found. * * @see #getDefinitionId( String, String, String, int ) */ protected Long getDefinitionId( Definition d ) throws SQLException { Logging.instance().log("xaction", 1, "START select ID from DEFINITION" ); Long result = null; // ps.resetPreparedStatement( "stmt.select.id" ); int i = 1; PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.id"); ps.setInt( i++, d.getType() ); ps.setString( i++, makeNotNull(d.getName()) ); ps.setString( i++, makeNotNull(d.getNamespace()) ); ps.setString( i++, makeNotNull(d.getVersion()) ); Logging.instance().log( "chunk", 2, "SELECT id FROM definition" ); ResultSet rs = ps.executeQuery(); Logging.instance().log("xaction", 1, "INTER select ID from DEFINITION" ); if ( rs.next() ) result = new Long( rs.getLong(1) ); else Logging.instance().log( "chunk", 0, "Definition not found" ); rs.close(); Logging.instance().log("xaction", 1, "FINAL select ID from DEFINITION" ); return result; } /** * Obtains the list of primary key ids for a matching definitions. * This method allows for wildcards in the usual fashion. Use null for * strings as wildcards, and -1 for the type wildcard. This method may * return an empty list, but it will not return null. This is an * internal helper function. * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param type definition type (TR or DV) * @return a possibly empty list containing all matching * definition ids as Longs. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #getDefinitionId( Definition ) */ protected java.util.List getDefinitionId( String namespace, String name, String version, int type ) throws SQLException { java.util.List result = new ArrayList(); Logging.instance().log("xaction", 1, "START select IDs from DEFINITION" ); java.util.List select = new ArrayList(1); select.add( new String("distinct id") ); java.util.Map where = new TreeMap(); if ( type != -1 ) where.put( "type", Integer.toString(type) ); if ( namespace != null ) where.put( "namespace", namespace ); if ( name != null ) where.put( "name", name ); if ( version != null ) where.put( "version", version ); Logging.instance().log("xaction", 1, "START select IDs" ); ResultSet rs = m_dbdriver.select(select,"vdc_definition",where,null); while ( rs.next() ) result.add( new Long( rs.getLong("id") ) ); rs.close(); Logging.instance().log("xaction", 1, "FINAL select IDs from DEFINITION" ); return result; } /** * Search the database for the existence of a definition. * * @param definition the definition object to search for * @return true, if the definition exists, false if not found */ public boolean containsDefinition( Definition definition ) throws SQLException { boolean result = false; try { result = ( getDefinitionId(definition) != null ); } catch ( SQLException sql ) { // ignore } return result; } /** * Delete a specific Definition objects from the database. No wildcard * matching will be done. "Fake" definitions are permissable, meaning * it just has the secondary key triple. * * @param definition is the definition specification to delete * @return true is something was deleted, false if non existent. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public boolean deleteDefinition( Definition definition ) throws SQLException { int result = 0; PreparedStatement ps = null; // // TODO: turn into a stored procedure call // Logging.instance().log("xaction", 1, "START delete definition" ); Long defId = getDefinitionId(definition); boolean preferString = m_dbdriver.preferString(); if ( defId != null ) { long id = defId.longValue(); Logging.instance().log( "xaction", 1, "START DELETE FROM nlfn" ); ps = m_dbdriver.getPreparedStatement("stmt.delete.nlfn"); if ( preferString ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); result = ps.executeUpdate(); Logging.instance().log( "xaction", 1, "FINAL DELETE FROM nlfn: " + result ); Logging.instance().log( "xaction", 1, "START DELETE FROM ilfn" ); ps = m_dbdriver.getPreparedStatement( "stmt.delete.ilfn" ); if ( preferString ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); result = ps.executeUpdate(); Logging.instance().log( "xaction", 1, "FINAL DELETE FROM ilfn: " + result ); Logging.instance().log( "xaction", 1, "START DELETE FROM olfn" ); ps = m_dbdriver.getPreparedStatement( "stmt.delete.olfn" ); if ( preferString ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); result = ps.executeUpdate(); Logging.instance().log( "xaction", 1, "FINAL DELETE FROM olfn: " + result ); Logging.instance().log( "xaction", 1, "START DELETE FROM blfn" ); ps = m_dbdriver.getPreparedStatement( "stmt.delete.blfn" ); if ( preferString ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); result = ps.executeUpdate(); Logging.instance().log( "xaction", 1, "FINAL DELETE FROM blfn: " + result ); Logging.instance().log( "xaction", 1, "START DELETE FROM definition" ); ps = m_dbdriver.getPreparedStatement( "stmt.delete.xml" ); if ( preferString ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); result = ps.executeUpdate(); Logging.instance().log( "xaction", 1, "FINAL DELETE FROM definition: " + result ); if ( ! m_deferDeleteCommit ) m_dbdriver.commit(); } Logging.instance().log("xaction", 1, "FINAL delete definition" ); return ( result != 0 ); } /** * Delete Definition objects from the database. This method allows for * wildcards in the usual fashion. Use null for strings as wildcards, * and -1 for the type wildcard. For efficiency reasons, this method * will return an empty list. * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param type definition type (TR or DV) * @return a list containing all Definitions that were deleted * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List deleteDefinition( String namespace, String name, String version, int type ) throws SQLException { java.util.List result = new ArrayList(); Logging.instance().log("xaction", 1, "START delete definitions" ); java.util.List idlist = getDefinitionId( namespace, name, version, type ); if ( idlist.size() == 0 ) return result; // postcondition: contains all IDs, count(id)>0, to be deleted // save old values if ( ! m_deferDeleteCommit ) { // we come from saveDefinition, thus we won't need saved values for ( Iterator i=idlist.iterator(); i.hasNext(); ) { Definition d = loadDefinition( ((Long) i.next()).longValue() ); if ( d != null ) result.add(d); } } // list of all statements we need to access PreparedStatement ps[] = { this.m_dbdriver.getPreparedStatement("stmt.delete.nlfn"), this.m_dbdriver.getPreparedStatement("stmt.delete.ilfn"), this.m_dbdriver.getPreparedStatement("stmt.delete.olfn"), this.m_dbdriver.getPreparedStatement("stmt.delete.blfn"), this.m_dbdriver.getPreparedStatement("stmt.delete.xml") }; // prepare and batch all statements boolean preferString = m_dbdriver.preferString(); for ( Iterator i=idlist.iterator(); i.hasNext(); ) { long id = ((Long) i.next()).longValue(); for ( int j=0; j * * annotation schema

* * The central five elements that can receive annotations, all depend * on the same sequence generator for their primary key. Their secondary * key references the definition in question, or otherwise qualifies the * element to annotate. Note that logical filenames can be annotated * outside of any definitions.

* * Grouped along the outer edges, five primary data type tables store * annotations efficiently. The distinction into separate data types is * necessary to enable efficient searches and appropriate operations. * Their primary key is also a foreign key referencing the five central * elements. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ */ public class AnnotationSchema extends DatabaseSchema implements Advanced, Annotation { /** * Name of the four parameter tables in human readable format. */ protected static final String[] c_lfn_names = { "ANNO_LFN_I", "ANNO_LFN_O", "ANNO_LFN_B" }; /** * Communication between saveDefinition and deleteDefinition in * update mode. */ protected boolean m_deferDeleteCommit; /** * An instance of the VDLx XML parser. */ private org.griphyn.vdl.parser.VDLxParser m_parser; /** * A cache for definitions to avoid reloading from the database. */ protected Cache m_cache; /** * Instantiates an XML parser for VDLx on demand. Since XML parsing * and parser instantiation is an expensive business, the reader will * only be generated on demand, and only once. * * @return a valid VDLx parser instance. */ private org.griphyn.vdl.parser.VDLxParser parserInstance() { if ( this.m_parser == null ) { // obtain the schema location URL from the schema properties: // url is a list of strings representing schema locations. The // content exists in pairs, one of the namespace URI, one of the // location URL. String url = null; try { ChimeraProperties props = ChimeraProperties.instance(); url = m_dbschemaprops.getProperty( "xml.url", props.getVDLSchemaLocation() ); } catch (IOException e) { Logging.instance().log("chunk", 0, "ignored " + e); } this.m_parser = new org.griphyn.vdl.parser.VDLxParser(url); } // done return this.m_parser; } /** * Default constructor for the "chunk" schema. * * @param dbDriverName is the database driver name */ public AnnotationSchema( String dbDriverName ) throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException, IOException { // load the driver from the properties super( dbDriverName, VDC.PROPERTY_PREFIX ); Logging.instance().log( "dbschema", 3, "done with default schema c'tor" ); this.m_cache = this.m_dbdriver.cachingMakesSense() ? new Cache(600) : null; this.m_deferDeleteCommit = false; this.m_parser = null; this.m_dbdriver.insertPreparedStatement( "stmt.save.definition", "INSERT INTO anno_definition(id,type,name,namespace,version,xml) " + "VALUES (?,?,?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.lfn_i", "INSERT INTO anno_lfn_i(did,name) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.lfn_o", "INSERT INTO anno_lfn_o(did,name) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.lfn_b", "INSERT INTO anno_lfn_b(did,name) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.updt.definition", "UPDATE anno_definition SET xml=? WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_tr", "INSERT INTO anno_tr(id,did,mkey) VALUES (?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_dv", "INSERT INTO anno_dv(id,did,mkey) VALUES (?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_lfn", "INSERT INTO anno_lfn(id,name,mkey) VALUES (?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_targ", "INSERT INTO anno_targ(id,did,name,mkey) VALUES (?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_call", "INSERT INTO anno_call(id,did,pos,mkey) VALUES (?,?,?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_bool", "INSERT INTO anno_bool(id,value) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_int", "INSERT INTO anno_int(id,value) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_float", "INSERT INTO anno_float(id,value) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_date", "INSERT INTO anno_date(id,value) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.save.anno_text", "INSERT INTO anno_text(id,value) VALUES (?,?)" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.lfn_i", "SELECT distinct did FROM anno_lfn_i WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.lfn_o", "SELECT distinct did FROM anno_lfn_o WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.lfn_b", "SELECT distinct did FROM anno_lfn_b WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.lfn_*", "SELECT distinct did FROM anno_lfn_i WHERE name=? UNION " + "SELECT distinct did FROM anno_lfn_o WHERE name=? UNION " + "SELECT distinct did FROM anno_lfn_b WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.lfn_*.name", "SELECT distinct name FROM anno_lfn_i WHERE did=? UNION " + "SELECT distinct name FROM anno_lfn_o WHERE did=? UNION " + "SELECT distinct name FROM anno_lfn_b WHERE did=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.lfn_i.name.ex", "SELECT distinct name FROM anno_lfn_i WHERE name LIKE ?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.lfn_o.name.ex", "SELECT distinct name FROM anno_lfn_o WHERE name LIKE ?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.lfn_b.name.ex", "SELECT distinct name FROM anno_lfn_b WHERE name LIKE ?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.lfn_*.name.ex", "SELECT distinct name FROM anno_lfn_i WHERE name LIKE ? UNION " + "SELECT distinct name FROM anno_lfn_o WHERE name LIKE ? UNION " + "SELECT distinct name FROM anno_lfn_b WHERE name LIKE ?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.big", "SELECT id FROM anno_tr WHERE did=? UNION " + "SELECT id FROM anno_dv WHERE did=? UNION " + "SELECT id FROM anno_call WHERE did=? UNION " + "SELECT id FROM anno_targ WHERE did=? UNION " + // "SELECT id FROM anno_lfn WHERE name IN (" + // " SELECT distinct name FROM lfn_i WHERE did=? UNION " + // " SELECT distinct name FROM lfn_o WHERE did=? UNION " + // " SELECT distinct name FROM lfn_b WHERE did=? )" ); "SELECT a.id FROM anno_lfn a, anno_lfn_i i WHERE i.name=a.name AND a.id=? UNION " + "SELECT a.id FROM anno_lfn a, anno_lfn_o o WHERE o.name=a.name AND a.id=? UNION " + "SELECT a.id FROM anno_lfn a, anno_lfn_b b WHERE b.name=a.name AND a.id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.xml.id", "SELECT xml FROM anno_definition WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.xml", "SELECT id,xml FROM anno_definition WHERE type=? AND name=? AND namespace=? AND version=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.id", "SELECT id FROM anno_definition WHERE type=? AND name=? AND namespace=? AND version=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.xml", "DELETE FROM anno_definition WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.lfn_i", "DELETE FROM anno_lfn_i WHERE did=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.lfn_o", "DELETE FROM anno_lfn_o WHERE did=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.lfn_b", "DELETE FROM anno_lfn_b WHERE did=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_bool", "DELETE FROM anno_bool WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_int", "DELETE FROM anno_int WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_float", "DELETE FROM anno_float WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_date", "DELETE FROM anno_date WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_text", "DELETE FROM anno_text WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_tr", "DELETE FROM anno_tr WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_dv", "DELETE FROM anno_dv WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_lfn", "DELETE FROM anno_lfn WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_targ", "DELETE FROM anno_targ WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.delete.anno_call", "DELETE FROM anno_call WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_tr", "SELECT id FROM anno_tr WHERE did=? AND mkey=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_dv", "SELECT id FROM anno_dv WHERE did=? AND mkey=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_lfn", "SELECT id FROM anno_lfn WHERE name=? AND mkey=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_targ", "SELECT id FROM anno_targ WHERE did=? AND name=? AND mkey=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_call", "SELECT id FROM anno_call WHERE did=? AND pos=? AND mkey=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_bool", "SELECT value FROM anno_bool WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_int", "SELECT value FROM anno_int WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_float", "SELECT value FROM anno_float WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_date", "SELECT value FROM anno_date WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_text", "SELECT value FROM anno_text WHERE id=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_tr2", "SELECT id,mkey FROM anno_tr WHERE did=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_dv2", "SELECT id,mkey FROM anno_dv WHERE did=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_lfn2", "SELECT id,mkey FROM anno_lfn WHERE name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_targ2", "SELECT id,mkey FROM anno_targ WHERE did=? and name=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.select.anno_call2", "SELECT id,mkey FROM anno_call WHERE did=? and pos=?" ); // udpates, take one this.m_dbdriver.insertPreparedStatement( "stmt.update.anno_tr", "UPDATE anno_tr SET did=? WHERE did=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.udpate.anno_dv", "UPDATE anno_dv SET did=? WHERE did=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.update.anno_targ", "UPDATE anno_targ SET did=? WHERE did=?" ); this.m_dbdriver.insertPreparedStatement( "stmt.update.anno_call", "UPDATE anno_call SET did=? WHERE did=?" ); } // // lower level methods, working directly on specific definitions // /** * Loads a single Definition from the backend database into an Java object. * This method does not allow wildcarding! * * @param namespace namespace, null will be converted into empty string * @param name name, null will be converted into empty string * @param version version, null will be converted into empty string * @param type type of the definition (TR or DV), must not be -1. * @return the Definition as specified, or null if not found. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #saveDefinition( Definition, boolean ) */ public Definition loadDefinition( String namespace, String name, String version, int type ) throws SQLException { Definition result = null; Logging.instance().log("xaction", 1, "START load definition" ); int i = 1; PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.xml"); ps.setInt( i++, type ); ps.setString( i++, makeNotNull(name) ); ps.setString( i++, makeNotNull(namespace) ); ps.setString( i++, makeNotNull(version) ); Logging.instance().log( "chunk", 2, "SELECT xml FROM anno_definition" ); ResultSet rs = ps.executeQuery(); Logging.instance().log("xaction", 1, "INTER load definition" ); if ( rs.next() ) { MyCallbackHandler cb = new MyCallbackHandler(); Long lid = new Long( rs.getLong("id") ); // FIXME: multiple null handlings missing parserInstance().parse( new org.xml.sax.InputSource(rs.getCharacterStream("xml")), cb ); result = cb.getDefinition(); // add to cache if ( m_cache != null ) m_cache.set( lid, result ); } else { Logging.instance().log( "chunk", 0, "Definition not found" ); } rs.close(); Logging.instance().log("xaction", 1, "FINAL load definition" ); return result; } /** * Load a single Definition from the backend database into a Java * object by its primary key id. This is an internal helper function. * * @param id is a long which represent the primary id. * @return the Definitions that was matched by the id. * * @see #loadDefinition( String, String, String, int ) * @see #saveDefinition( Definition, boolean ) */ private Definition loadDefinition( long id ) throws SQLException { Definition result = null; Long lid = new Long(id); Logging.instance().log("xaction", 1, "START load definition " + lid ); // try grabbing from cache if ( m_cache != null ) result = (Definition) m_cache.get(lid); if ( result == null ) { // no cache, or not in cache PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.xml.id" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); ResultSet rs = ps.executeQuery(); Logging.instance().log("xaction", 1, "INTER load definitions" ); if ( rs.next() ) { MyCallbackHandler cb = new MyCallbackHandler(); // FIXME: multiple null handlings missing parserInstance().parse( new org.xml.sax.InputSource(rs.getCharacterStream("xml")), cb ); result = cb.getDefinition(); // add to cache if ( m_cache != null ) m_cache.set( lid, result ); } else { Logging.instance().log( "chunk", 0, "Definition not found" ); } rs.close(); } Logging.instance().log("xaction", 1, "FINAL load definitions" ); return result; } /** * Compiles the name of a DV/TR for log messages. * * @param d is a definition * @return the type plus FQDN of the definition */ private String what( Definition d ) { StringBuffer result = new StringBuffer(); switch ( d.getType() ) { case Definition.DERIVATION: result.append( "DV" ); break; case Definition.TRANSFORMATION: result.append( "TR" ); break; default: result.append( "??" ); break; } result.append(' ').append( d.shortID() ); return result.toString(); } /** * Saves a Definition, that is either a Transformation or Derivation, * into the backend database. This method, of course, does not allow * wildcarding. The definition has to be completely specified and * valid.

* Please note that updating a definition will remove all the meta- * data that was defined for the definition. * * @param definition is the new Definition to store. * @param overwrite true, if existing defitions will be overwritten by * new ones with the same primary (or secondary) key (-set), or false, * if a new definition will be rejected on key matches. * * @return true, if the backend database was changed, or * false, if the definition was not accepted into the backend. * * @see org.griphyn.vdl.classes.Definition * @see org.griphyn.vdl.classes.Transformation * @see org.griphyn.vdl.classes.Derivation * @see #loadDefinition( String, String, String, int ) */ public boolean saveDefinition( Definition definition, boolean overwrite ) throws SQLException { Logging.instance().log( "chunk", 2, "SAVE DEFINITION started" ); // figure out, if it already exists long id = -1; try { Long temp = getDefinitionId(definition); if ( temp != null ) id = temp.longValue(); } catch ( SQLException e ) { String cause = e.getMessage(); Logging.instance().log( "app", 1, "Ignoring SQL exception" + ( cause==null ? "" : ": " + e.getMessage() ) ); m_dbdriver.clearWarnings(); } boolean useInsert = ( id == -1 ); // if in insertion mode, complain and exit if ( ! useInsert && ! overwrite ) { Logging.instance().log( "app", 0, definition.shortID() + " already exists (SQL anno_definition.id=" + id + "), ignoring" ); return false; } Logging.instance().log( "app", 1, "Trying to add " + what(definition) ); PreparedStatement ps = m_dbdriver.getPreparedStatement( useInsert ? "stmt.save.definition" : "stmt.updt.definition" ); if ( useInsert ) { // INSERT try { id = m_dbdriver.sequence1( "def_id_seq" ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "In " + definition.shortID() + ": " + e.toString().trim() ); Logging.instance().log("xaction", 1, "START rollback" ); m_dbdriver.cancelPreparedStatement( "stmt.save.definition" ); m_dbdriver.rollback(); Logging.instance().log("xaction", 1, "FINAL rollback" ); return false; } // add ID explicitely from sequence to insertion Logging.instance().log("xaction", 1, "START save definition" ); int i = 1; longOrNull( ps, i++, id ); ps.setInt( i++, definition.getType() ); if ( definition.getName() == null ) throw new SQLException( "VDS inconsistency: " + "The name of a definition is null" ); else ps.setString( i++, definition.getName() ); ps.setString( i++, makeNotNull(definition.getNamespace()) ); ps.setString( i++, makeNotNull(definition.getVersion()) ); String xml = definition.toXML( (String) null, (String) null ); ps.setCharacterStream( i++, new StringReader(xml), xml.length() ); // save prepared values Logging.instance().log( "chunk", 2, "INSERT INTO Definition" ); try { ps.executeUpdate(); if ( id== -1 ) id = m_dbdriver.sequence2( ps, "def_id_seq", 1 ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "In " + definition.shortID() + ": " + e.toString().trim() ); Logging.instance().log("xaction", 1, "START rollback" ); m_dbdriver.cancelPreparedStatement( "stmt.save.definition" ); m_dbdriver.rollback(); Logging.instance().log("xaction", 1, "FINAL rollback" ); return false; } Logging.instance().log("xaction", 1, "FINAL save definition: ID=" + id ); } else { // UPDATE Logging.instance().log("xaction", 1, "START udpate definition" ); int i = 1; String xml = definition.toXML( (String) null, (String) null ); ps.setCharacterStream( i++, new StringReader(xml), xml.length() ); longOrNull( ps, i++, id ); // update prepared values Logging.instance().log( "chunk", 2, "UPDATE Definition" ); try { ps.executeUpdate(); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "In " + definition.shortID() + ": " + e.toString().trim() ); Logging.instance().log("xaction", 1, "START rollback" ); m_dbdriver.cancelPreparedStatement( "stmt.updt.definition" ); m_dbdriver.rollback(); Logging.instance().log("xaction", 1, "FINAL rollback" ); return false; } Logging.instance().log("xaction", 1, "FINAL update definition: ID=" + id ); // TODO: Drop all old LFNs deleteLFNsForDefinitionId(id); } // batch save LFNs from Derivations if ( definition instanceof Derivation ) { Derivation derivation = (Derivation) definition; Set alreadyKnown = new HashSet(); // ordering MUST MATCH classes.LFN constants! PreparedStatement stmt[] = { m_dbdriver.getPreparedStatement("stmt.save.lfn_i"), m_dbdriver.getPreparedStatement("stmt.save.lfn_o"), m_dbdriver.getPreparedStatement("stmt.save.lfn_b") }; int[] count = new int[ stmt.length ]; for ( int ii=0; ii 0 ) { // batch insert Logging.instance().log( "chunk", 2, "BATCH INSERT for " + count[ii] + ' ' + c_lfn_names[ii] ); Logging.instance().log( "xaction", 1, "START batch-add " + count[ii] + ' ' + c_lfn_names[ii] ); int[] update = stmt[ii].executeBatch(); Logging.instance().log( "xaction", 1, "FINAL batch-add " + count[ii] + ' ' + c_lfn_names[ii] ); } } } // commit the changes Logging.instance().log("xaction", 1, "START commit" ); this.m_dbdriver.commit(); Logging.instance().log("xaction", 1, "FINAL commit" ); // done return true; } /** * Saves all logical filenames from a Scalar object. This is a helper * function to save a single definition. * * @param id is the definition id in the DEFINITION table * @param scalar is a Scalar instance of which the LFNs are to be saved. * @param already is a set of filenames that were already added during * this session * @param stmt is an array of the ids of the prepared statements for * the different tables. * @param count count the number of entries in a prepared statement. * * @see #saveDefinition( Definition, boolean ) */ private void saveScalar( long id, Scalar scalar, Set already, PreparedStatement[] stmt, int[] count ) throws SQLException { int result = 0; for ( Iterator i=scalar.iterateLeaf(); i.hasNext(); ) { Leaf leaf = (Leaf) i.next(); // only interested in logical filenames, nothing else if ( leaf instanceof LFN ) { LFN lfn = (LFN) leaf; String name = lfn.getFilename(); // already inserted previously? if ( already.contains(name) ) continue; else already.add(name); // adjust! int index = -1; int link = lfn.getLink(); switch ( link ) { case LFN.INPUT: index = 0; break; case LFN.OUTPUT: index = 1; break; case LFN.INOUT: index = 2; break; default: throw new RuntimeException( "Illegal linkage " + link + " for " + name ); } int n = 1; if ( m_dbdriver.preferString() ) stmt[index].setString( n++, Long.toString(id) ); else stmt[index].setLong( n++, id ); stmt[index].setString( n++, name ); Logging.instance().log( "chunk", 3, "adding LFN " + LFN.toString(link) + ':' + name ); stmt[index].addBatch(); count[index]++; } } } // // higher level methods, allowing for wildcarding unless working on // a single Definition. // /** * Obtains the primary key id for a given definition. "Fake" definitions * are NOT permissable. This is an internal helper function. * * @param namespace is the specific namespace, null will be mapped to "" * @param name is the specific name, null will be mapped to "" * @param version is the specific version, null will be mapped to "" * @param type is the type identifier, -1 is not allowed. * @return the id of the definition, or null if not found. * * @see #getDefinitionId( String, String, String, int ) */ protected Long getSpecificDefinitionId( String namespace, String name, String version, int type ) throws SQLException { Logging.instance().log("xaction", 1, "START select ID from DEFINITION" ); Long result = null; // ps.resetPreparedStatement( "stmt.select.id" ); int i = 1; PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.id"); ps.setInt( i++, type ); ps.setString( i++, makeNotNull(name) ); ps.setString( i++, makeNotNull(namespace) ); ps.setString( i++, makeNotNull(version) ); Logging.instance().log( "chunk", 2, "SELECT id FROM definition" ); ResultSet rs = ps.executeQuery(); Logging.instance().log("xaction", 1, "INTER select ID from DEFINITION" ); if ( rs.next() ) result = new Long( rs.getLong(1) ); else Logging.instance().log( "chunk", 0, "Definition not found" ); rs.close(); Logging.instance().log("xaction", 1, "FINAL select ID from DEFINITION" ); return result; } /** * Obtains the primary key id for a given definition. "Fake" definitions * are permissable. This is an internal helper function. * * @param d is a definition specification. * @return the id of the definition, or null if not found. * * @see #getSpecificDefinitionId( String, String, String, int ) * @see #getDefinitionId( String, String, String, int ) */ protected Long getDefinitionId( Definition d ) throws SQLException { return getSpecificDefinitionId( d.getNamespace(), d.getName(), d.getVersion(), d.getType() ); } /** * Obtains the list of primary key ids for a matching definitions. * This method allows for wildcards in the usual fashion. Use null for * strings as wildcards, and -1 for the type wildcard. This method may * return an empty list, but it will not return null. This is an * internal helper function. * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param type definition type (TR or DV) * @return a possibly empty list containing all matching * definition ids as Longs. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #getDefinitionId( Definition ) */ protected java.util.List getDefinitionId( String namespace, String name, String version, int type ) throws SQLException { java.util.List result = new ArrayList(); Logging.instance().log("xaction", 1, "START select IDs from DEFINITION" ); java.util.List select = new ArrayList(1); select.add( new String("distinct id") ); java.util.Map where = new TreeMap(); if ( type != -1 ) where.put( "type", Integer.toString(type) ); if ( namespace != null ) where.put( "namespace", namespace ); if ( name != null ) where.put( "name", name ); if ( version != null ) where.put( "version", version ); ResultSet rs = m_dbdriver.select(select,"anno_definition",where,null); while ( rs.next() ) result.add( new Long( rs.getLong("id") ) ); rs.close(); Logging.instance().log("xaction", 1, "FINAL select IDs from DEFINITION" ); return result; } /** * Obtains the list of primary key ids for a matching definitions. * This method allows for wildcards in the usual fashion. Use null for * strings as wildcards, and -1 for the type wildcard. It also allows * special characters '%' and '_' in strings. This method may * return an empty list, but it will not return null. This is an * internal helper function. * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param type definition type (TR or DV) * @return a possibly empty list containing all matching * definition ids as Longs. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #getDefinitionId( Definition ) */ protected java.util.List getDefinitionIdEx( String namespace, String name, String version, int type ) throws SQLException { java.util.List result = new ArrayList(); Logging.instance().log("xaction", 1, "START select IDs from DEFINITION" ); java.util.List select = new ArrayList(1); select.add( new String("distinct id") ); java.util.Map where = new TreeMap(); java.util.Map operator = new TreeMap(); if ( type != -1 ) where.put( "type", Integer.toString(type) ); if ( namespace != null ) { where.put( "namespace", namespace ); operator.put( "namespace", "LIKE" ); } if ( name != null ) { where.put( "name", name ); operator.put( "name", "LIKE" ); } if ( version != null ) { where.put( "version", version ); operator.put( "version", "LIKE" ); } ResultSet rs = m_dbdriver.select( select, "anno_definition", where, operator, null ); while ( rs.next() ) result.add( new Long( rs.getLong("id") ) ); rs.close(); Logging.instance().log("xaction", 1, "FINAL select IDs from DEFINITION" ); return result; } /** * Search the database for the existence of a definition. * * @param definition the definition object to search for * @return true, if the definition exists, false if not found */ public boolean containsDefinition( Definition definition ) throws SQLException { boolean result = false; try { result = ( getDefinitionId(definition) != null ); } catch ( SQLException sql ) { // ignore this.m_dbdriver.clearWarnings(); } return result; } /** * Deletes the joined annotations when a definition is being deleted. * * @param id is the definition id to remove * @return list of all annotation ids that were removed */ private java.util.List deleteAnnotationFromDefinition( long id ) throws SQLException { PreparedStatement ps = null; ResultSet rs = null; ArrayList idlist = new ArrayList(); boolean preferString = m_dbdriver.preferString(); // // part 1: assemble all affected annotation ids // // fnlist := collect list of filenames WHERE id=$id // annolist := SELECT distinct id FROM anno_$rest WHERE did=$id // annolist += SELECT distinct id FROM anno_lfn WHERE name IN $fnlist Logging.instance().log("xaction", 1, "START *huge* union" ); ps = this.m_dbdriver.getPreparedStatement( "stmt.select.big" ); for ( int i=1; i<=7; ++i ) { if ( preferString ) ps.setString( i, Long.toString(id) ); else ps.setLong( i, id ); } rs = ps.executeQuery(); while ( rs.next() ) { idlist.add( new Long( rs.getLong(1) ) ); } rs.close(); Logging.instance().log("xaction", 1, "FINAL *huge* union" ); // // part 2: remove all affected annotations // // DELETE anno_$type WHERE id IN $annolist // DELETE anno_$rest WHERE id IN $annolist Logging.instance().log("xaction", 1, "START delete annotations" ); // list of all statements we need to access PreparedStatement list[] = { this.m_dbdriver.getPreparedStatement("stmt.delete.anno_bool"), this.m_dbdriver.getPreparedStatement("stmt.delete.anno_int"), this.m_dbdriver.getPreparedStatement("stmt.delete.anno_float"), this.m_dbdriver.getPreparedStatement("stmt.delete.anno_date"), this.m_dbdriver.getPreparedStatement("stmt.delete.anno_text"), this.m_dbdriver.getPreparedStatement("stmt.delete.anno_tr"), this.m_dbdriver.getPreparedStatement("stmt.delete.anno_dv"), this.m_dbdriver.getPreparedStatement("stmt.delete.anno_lfn"), this.m_dbdriver.getPreparedStatement("stmt.delete.anno_targ"), this.m_dbdriver.getPreparedStatement("stmt.delete.anno_call") }; // prepare and batch all statements for ( Iterator i=idlist.iterator(); i.hasNext(); ) { id = ((Long) i.next()).longValue(); for ( int j=0; j0, to be deleted // save old values if ( ! m_deferDeleteCommit ) { // we come from saveDefinition, thus we won't need saved values for ( Iterator i=idlist.iterator(); i.hasNext(); ) { Definition d = loadDefinition( ((Long) i.next()).longValue() ); if ( d != null ) result.add(d); } } // remove all affected annoations by walking through them // yuk, this is probably extremely expensive for ( Iterator i=idlist.iterator(); i.hasNext(); ) { long id = ((Long) i.next()).longValue(); deleteAnnotationFromDefinition(id); } // list of all statements we need to access PreparedStatement ps[] = { this.m_dbdriver.getPreparedStatement("stmt.delete.lfn_i"), this.m_dbdriver.getPreparedStatement("stmt.delete.lfn_o"), this.m_dbdriver.getPreparedStatement("stmt.delete.lfn_b"), this.m_dbdriver.getPreparedStatement("stmt.delete.xml") }; // prepare and batch all statements boolean preferString = m_dbdriver.preferString(); for ( Iterator i=idlist.iterator(); i.hasNext(); ) { long id = ((Long) i.next()).longValue(); for ( int j=0; j" ); PreparedStatement ps = null; switch (kind) { case CLASS_FILENAME: ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_lfn" ); break; case CLASS_TRANSFORMATION: ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_tr" ); break; case CLASS_DERIVATION: ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_dv" ); break; case CLASS_DECLARE: ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_targ" ); break; case CLASS_CALL: ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_call" ); break; default: throw new SQLException( "Don't know the class of object" ); } if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); Logging.instance().log( "chunk", 2, "DELETE FROM anno_" ); int rc = ps.executeUpdate(); Logging.instance().log("xaction", 1, "FINAL delete anno_" ); boolean ret = deleteAnnotationValue(id); return ( rc != 0 && ret ); } /** * Deletes an annotation in a type table with the specified * annotation id. The table is determined from the type of the * annotational tuple. * * @param id is the annotation id for which to delete * @param annotation is the annotation which determines the type * @return true, if the database was modified, false otherwise. * @exception SQLException, if something went wrong during database * access. */ private boolean deleteAnnotationValue( long id, Tuple annotation ) throws SQLException { Logging.instance().log("xaction", 1, "START delete anno_" ); PreparedStatement ps = null; if ( annotation instanceof TupleBoolean ) ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_bool" ); else if ( annotation instanceof TupleDate ) ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_date" ); else if ( annotation instanceof TupleFloat ) ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_float" ); else if ( annotation instanceof TupleInteger ) ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_int" ); else if ( annotation instanceof TupleString ) ps = m_dbdriver.getPreparedStatement( "stmt.delete.anno_text" ); else throw new SQLException( "Don't know the tuple type" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); Logging.instance().log( "chunk", 2, "DELETE FROM anno_" ); int rc = ps.executeUpdate(); Logging.instance().log("xaction", 1, "FINAL delete anno_" ); return ( rc != 0 ); } /** * Deletes a specific key in an annotated transformation. * * @param fqdi is the FQDI of the transformation * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Transformation */ public boolean deleteAnnotationTransformation( String fqdi, String key ) throws SQLException, IllegalArgumentException { int kind = CLASS_TRANSFORMATION; // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain possible existing anno_tr id long id = getAnnotationIdTransformation( did.longValue(), key ); // no such key, if the id is -1, handled by finalizer return deleteAnnotationKey( id, kind ); } /** * Deletes a specific key in an annotated derivation. * * @param fqdi is the FQDI of the derivation * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Derivation */ public boolean deleteAnnotationDerivation( String fqdi, String key ) throws SQLException, IllegalArgumentException { int kind = CLASS_DERIVATION; // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.DERIVATION ); if ( did == null ) throw new SQLException( "Unknown DV " + fqdi ); // obtain possible existing anno_dv id long id = getAnnotationIdDerivation( did.longValue(), key ); // no such key, if the id does not exist return deleteAnnotationKey( id, kind ); } /** * Deletes a specific key in an annotated formal argument. * * @param fqdi is the FQDI of the transformation * @param farg is the name of the formal argument * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Declare */ public boolean deleteAnnotationDeclare( String fqdi, String farg, String key ) throws SQLException, IllegalArgumentException { int kind = CLASS_DECLARE; // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain possible existing anno_call id long id = getAnnotationIdDeclare( did.longValue(), farg, key ); // no such key, if the id does not exist return deleteAnnotationKey( id, kind ); } /** * Deletes a specific key for a call statement. * * @param fqdi is the FQDI of the transformation * @param index is the number of the call to annotate. * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Call */ public boolean deleteAnnotationCall( String fqdi, int index, String key ) throws SQLException, IllegalArgumentException { int kind = CLASS_CALL; // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain possible existing anno_call id long id = getAnnotationIdCall( did.longValue(), index, key ); // no such key, if the id does not exist return deleteAnnotationKey( id, kind ); } /** * Deletes a specific key in an annotated filename. * * @param filename is the name of the file that was annotated. * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.LFN */ public boolean deleteAnnotationFilename( String filename, String key ) throws SQLException, IllegalArgumentException { int kind = CLASS_FILENAME; // obtain possible existing anno_lfn id long id = getAnnotationIdFilename( filename, key ); // no such key, if the id does not exist return deleteAnnotationKey( id, kind ); } /** * Inserts a tuple into the correct type-table for annotations. * * @param id is the annotation id for which to insert. * @param annotation is the annotation to insert. Only the type * and value will be taken, as the key was inserted elsewhere. * @return true, if the database was modified, false otherwise. * @exception SQLException if something during the database access * went awry. */ private boolean saveAnnotationValue( long id, Tuple annotation ) throws SQLException { Logging.instance().log("xaction", 1, "START save anno_" ); PreparedStatement ps = null; if ( annotation instanceof TupleBoolean ) { ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_bool" ); ps.setBoolean( 2, ((Boolean) annotation.getValue()).booleanValue() ); } else if ( annotation instanceof TupleDate ) { ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_date" ); ps.setTimestamp( 2, ((Timestamp) annotation.getValue()) ); } else if ( annotation instanceof TupleFloat ) { ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_float" ); ps.setDouble( 2, ((Double) annotation.getValue()).doubleValue() ); } else if ( annotation instanceof TupleInteger ) { ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_int" ); ps.setLong( 2, ((Long) annotation.getValue()).longValue() ); } else if ( annotation instanceof TupleString ) { ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_text" ); // ps.setString( 2, ((String) annotation.getValue()) ); String value = (String) annotation.getValue(); ps.setCharacterStream( 2, new StringReader(value), value.length() ); } else throw new SQLException( "Don't know the tuple type" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); Logging.instance().log( "chunk", 2, "INSERT INTO anno_" ); int rc = ps.executeUpdate(); Logging.instance().log("xaction", 1, "FINAL save anno_" ); return ( rc != 0 ); } /** * Annotates a transformation with a tuple. * * @param fqdi is the FQDI to annotate * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Transformation */ public long saveAnnotationTransformation( String fqdi, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain possible existing anno_tr id long id = getAnnotationIdTransformation( did.longValue(), annotation.getKey() ); // insert into anno_tr with new id, if id does not exist if ( id == -1 ) { // obtain new id id = m_dbdriver.sequence1( "anno_id_seq" ); Logging.instance().log("xaction", 1, "START save anno_tr" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_tr" ); int i = 1; longOrNull( ps, i++, id ); if ( m_dbdriver.preferString() ) ps.setString( i++, did.toString() ); else ps.setLong( i++, did.longValue() ); ps.setString( i++, makeNotNull( annotation.getKey() ) ); // save prepared values Logging.instance().log( "chunk", 2, "INSERT INTO anno_tr" ); try { int rc = ps.executeUpdate(); if ( id == -1 ) id = m_dbdriver.sequence2( ps, "anno_id_seq", 1 ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "While inserting into anno_tr: " + e.toString().trim() ); // rollback in saveInvocation() m_dbdriver.cancelPreparedStatement( "stmt.save.anno_tr" ); throw e; // re-throw } Logging.instance().log("xaction", 1, "FINAL save anno_tr: ID=" + id ); } else { // id does exist, nothing to do in anno_tr } // delete before insert if overwrite mode if ( overwrite ) deleteAnnotationValue( id, annotation ); return ( saveAnnotationValue( id, annotation ) ? id : -1 ); } /** * Annotates a derivation with a tuple. * * @param fqdi is the FQDI to annotate * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Derivation */ public long saveAnnotationDerivation( String fqdi, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.DERIVATION ); if ( did == null ) throw new SQLException( "Unknown DV " + fqdi ); // obtain possible existing anno_dv id long id = getAnnotationIdDerivation( did.longValue(), annotation.getKey() ); // insert into anno_dv with new id, if id does not exist if ( id == -1 ) { // obtain new id id = m_dbdriver.sequence1( "anno_id_seq" ); Logging.instance().log("xaction", 1, "START save anno_dv" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_dv" ); int i = 1; longOrNull( ps, i++, id ); if ( m_dbdriver.preferString() ) ps.setString( i++, did.toString() ); else ps.setLong( i++, did.longValue() ); ps.setString( i++, makeNotNull( annotation.getKey() ) ); // save prepared values Logging.instance().log( "chunk", 2, "INSERT INTO anno_dv" ); try { int rc = ps.executeUpdate(); if ( id == -1 ) id = m_dbdriver.sequence2( ps, "anno_id_seq", 1 ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "While inserting into anno_tr: " + e.toString().trim() ); // rollback in saveInvocation() m_dbdriver.cancelPreparedStatement( "stmt.save.anno_tr" ); throw e; // re-throw } Logging.instance().log("xaction", 1, "FINAL save anno_dv: ID=" + id ); } else { // id does exist, nothing to do in anno_tr } // delete before insert if overwrite mode if ( overwrite ) deleteAnnotationValue( id, annotation ); return ( saveAnnotationValue( id, annotation ) ? id : -1 ); } /** * Annotates a transformation argument with a tuple. * * @param fqdi is the FQDI to annotate * @param formalname is the name of the formal argument to annotoate. * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Declare */ public long saveAnnotationDeclare( String fqdi, String formalname, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); Transformation tr = (Transformation)loadDefinition( did.longValue()); boolean found = false; for (Iterator i=tr.iterateDeclare(); i.hasNext();) { String arg = ((Declare)i.next()).getName(); if (arg.equals(formalname)) { found = true; break; } } if ( !found ) throw new SQLException( "Invalid argument " + formalname + " for TR " + fqdi ); // obtain possible existing anno_farg id long id = getAnnotationIdDeclare( did.longValue(), formalname, annotation.getKey() ); // insert into anno_dv with new id, if id does not exist if ( id == -1 ) { // obtain new id id = m_dbdriver.sequence1( "anno_id_seq" ); Logging.instance().log("xaction", 1, "START save anno_targ" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_targ" ); int i = 1; longOrNull( ps, i++, id ); if ( m_dbdriver.preferString() ) ps.setString( i++, did.toString() ); else ps.setLong( i++, did.longValue() ); ps.setString( i++, makeNotNull( formalname ) ); ps.setString( i++, makeNotNull( annotation.getKey() ) ); // save prepared values Logging.instance().log( "chunk", 2, "INSERT INTO anno_targ" ); try { int rc = ps.executeUpdate(); if ( id==-1 ) id = m_dbdriver.sequence2( ps, "anno_id_seq", 1 ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "While inserting into anno_targ: " + e.toString().trim() ); // rollback in saveInvocation() m_dbdriver.cancelPreparedStatement( "stmt.save.anno_targ" ); throw e; // re-throw } Logging.instance().log("xaction", 1, "FINAL save anno_targ: ID=" + id ); } else { // id does exist, nothing to do in anno_targ } // delete before insert if overwrite mode if ( overwrite ) deleteAnnotationValue( id, annotation ); return ( saveAnnotationValue( id, annotation ) ? id : -1 ); } /** * Annotates a transformation call with a tuple. * * @param fqdi is the FQDI to annotate * @param index is the number of the call to annotate. * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Call */ public long saveAnnotationCall( String fqdi, int index, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); Transformation tr = (Transformation)loadDefinition( did.longValue()); if (index <=0 || tr.getCallCount() < index) throw new SQLException( "Invalid position " + index + " for TR " + fqdi ); // obtain possible existing anno_call id long id = getAnnotationIdCall( did.longValue(), index, annotation.getKey() ); // insert into anno_dv with new id, if id does not exist if ( id == -1 ) { // obtain new id id = m_dbdriver.sequence1( "anno_id_seq" ); Logging.instance().log("xaction", 1, "START save anno_call" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_call" ); int i = 1; longOrNull( ps, i++, id ); if ( m_dbdriver.preferString() ) { ps.setString( i++, did.toString() ); ps.setString( i++, Integer.toString(index) ); } else { ps.setLong( i++, did.longValue() ); ps.setInt( i++, index ); } ps.setString( i++, makeNotNull( annotation.getKey() ) ); // save prepared values Logging.instance().log( "chunk", 2, "INSERT INTO anno_call" ); try { int rc = ps.executeUpdate(); if ( id==-1 ) id = m_dbdriver.sequence2( ps, "anno_id_seq", 1 ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "While inserting into anno_call: " + e.toString().trim() ); // rollback in saveInvocation() m_dbdriver.cancelPreparedStatement( "stmt.save.anno_call" ); throw e; // re-throw } Logging.instance().log("xaction", 1, "FINAL save anno_targ: ID=" + id ); } else { // id does exist, nothing to do in anno_targ } // delete before insert if overwrite mode if ( overwrite ) deleteAnnotationValue( id, annotation ); return ( saveAnnotationValue( id, annotation ) ? id : -1 ); } /** * Annotates a logical filename with a tuple. * * @param filename is the FQDI to annotate * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.LFN */ public long saveAnnotationFilename( String filename, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { // obtain possible existing anno_lfn id long id = getAnnotationIdFilename( filename, annotation.getKey() ); // insert into anno_dv with new id, if id does not exist if ( id == -1 ) { // obtain new id id = m_dbdriver.sequence1( "anno_id_seq" ); Logging.instance().log("xaction", 1, "START save anno_lfn" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.save.anno_lfn" ); int i = 1; longOrNull( ps, i++, id ); ps.setString( i++, makeNotNull( filename ) ); ps.setString( i++, makeNotNull( annotation.getKey() ) ); // save prepared values Logging.instance().log( "chunk", 2, "INSERT INTO anno_lfn" ); try { int rc = ps.executeUpdate(); if ( id==-1 ) id = m_dbdriver.sequence2( ps, "anno_id_seq", 1 ); } catch ( SQLException e ) { Logging.instance().log( "app", 0, "While inserting into anno_lfn: " + e.toString().trim() ); // rollback in saveInvocation() m_dbdriver.cancelPreparedStatement( "stmt.save.anno_lfn" ); throw e; // re-throw } Logging.instance().log("xaction", 1, "FINAL save anno_lfn: ID=" + id ); } else { // id does exist, nothing to do in anno_targ } // delete before insert if overwrite mode if ( overwrite ) deleteAnnotationValue( id, annotation ); return (saveAnnotationValue( id, annotation ) ? id : -1 ); } /** * Annotates any of the annotatable classes with the specified tuple. * This is an interface method to the various class-specific methods. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * @param annotation is the value to place into the class. * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see #saveAnnotationTransformation( String, Tuple, boolean ) * @see #saveAnnotationDerivation( String, Tuple, boolean ) * @see #saveAnnotationCall( String, int, Tuple, boolean ) * @see #saveAnnotationDeclare( String, String, Tuple, boolean ) * @see #saveAnnotationFilename( String, Tuple, boolean ) */ public long saveAnnotation( String primary, Object secondary, int kind, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException { long result = -1; switch ( kind ) { case CLASS_TRANSFORMATION: result = saveAnnotationTransformation( primary, annotation, overwrite ); break; case CLASS_DERIVATION: result = saveAnnotationDerivation( primary, annotation, overwrite ); break; case CLASS_CALL: // may throw ClassCastException result = saveAnnotationCall( primary, ((Integer) secondary).intValue(), annotation, overwrite ); break; case CLASS_DECLARE: // may throw ClassCastException result = saveAnnotationDeclare( primary, ((String) secondary), annotation, overwrite ); break; case CLASS_FILENAME: result = saveAnnotationFilename( primary, annotation, overwrite ); break; default: throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" ); } if (result != -1) if ( ! m_deferDeleteCommit ) m_dbdriver.commit(); return result; } /** * Obtains the value at a specific id from the boolean annotations. * * @param id is the annotation id * @param key is used to create the tuple * @return null if not found, or a valid tuple otherwise. */ private TupleBoolean loadAnnotationBoolean( long id, String key ) throws SQLException { TupleBoolean result = null; Logging.instance().log("xaction", 1, "START select anno_bool" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_bool" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); Logging.instance().log( "chunk", 2, "SELECT value FROM anno_bool" ); ResultSet rs = ps.executeQuery(); if ( rs.next() ) { boolean value = rs.getBoolean(1); result = new TupleBoolean( key, value ); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_bool" ); return result; } /** * Obtains the value at a specific id from the integer annotations. * * @param id is the annotation id * @param key is used to create the tuple * @return null if not found, or a valid tuple otherwise. */ private TupleInteger loadAnnotationInteger( long id, String key ) throws SQLException { TupleInteger result = null; Logging.instance().log("xaction", 1, "START select anno_int" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_int" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); Logging.instance().log( "chunk", 2, "SELECT value FROM anno_int" ); ResultSet rs = ps.executeQuery(); if ( rs.next() ) { long value = rs.getLong(1); result = new TupleInteger( key, value ); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_int" ); return result; } /** * Obtains the value at a specific id from the float annotations. * * @param id is the annotation id * @param key is used to create the tuple * @return null if not found, or a valid tuple otherwise. */ private TupleFloat loadAnnotationFloat( long id, String key ) throws SQLException { TupleFloat result = null; Logging.instance().log("xaction", 1, "START select anno_float" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_float" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); Logging.instance().log( "chunk", 2, "SELECT value FROM anno_float" ); ResultSet rs = ps.executeQuery(); if ( rs.next() ) { double value = rs.getDouble(1); result = new TupleFloat( key, value ); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_float" ); return result; } /** * Obtains the value at a specific id from the date annotations. * * @param id is the annotation id * @param key is used to create the tuple * @return null if not found, or a valid tuple otherwise. */ private TupleDate loadAnnotationDate( long id, String key ) throws SQLException { TupleDate result = null; Logging.instance().log("xaction", 1, "START select anno_date" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_date" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); Logging.instance().log( "chunk", 2, "SELECT value FROM anno_date" ); ResultSet rs = ps.executeQuery(); if ( rs.next() ) { java.sql.Timestamp value = rs.getTimestamp(1); result = new TupleDate( key, value ); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_date" ); return result; } /** * Obtains the value at a specific id from the string annotations. * * @param id is the annotation id * @param key is used to create the tuple * @return null if not found, or a valid tuple otherwise. */ private TupleString loadAnnotationString( long id, String key ) throws SQLException { TupleString result = null; Logging.instance().log("xaction", 1, "START select anno_text" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_text" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(id) ); else ps.setLong( 1, id ); Logging.instance().log( "chunk", 2, "SELECT value FROM anno_text" ); ResultSet rs = ps.executeQuery(); if ( rs.next() ) { Reader r = rs.getCharacterStream(1); StringBuffer temp = new StringBuffer(128); try { int ch; while ( (ch = r.read()) >= 0 ) temp.append( (char) ch); } catch ( IOException ioe ) { throw new SQLException(ioe.getMessage()); } result = new TupleString( key, temp.toString() ); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_text" ); return result; } /** * Tries to retrieve a tuple value from its annotation id by * walking over all tables, most likely one first. This is an * internal function helping to keep common code common. * * @param id is the annotation id to search for * @param key is the key for tuple creation. * @return null, if the id was -1 (no such id), or if nothing * was found. */ private Tuple loadAnnotationFinal( long id, String key ) throws SQLException { Tuple result = null; if ( id != -1 ) { // order by likelyhood result = loadAnnotationString( id, key ); if ( result == null ) result = loadAnnotationInteger( id, key ); if ( result == null ) result = loadAnnotationFloat( id, key ); if ( result == null ) result = loadAnnotationDate( id, key ); if ( result == null ) result = loadAnnotationBoolean( id, key ); } return result; } /** * Obtains the value to a specific key in an annotated transformation. * * @param fqdi is the FQDI of the transformation * @param key is the key to search for * @return the annotated value, or null if not found. * @see org.griphyn.vdl.classes.Transformation */ public Tuple loadAnnotationTransformation( String fqdi, String key ) throws SQLException, IllegalArgumentException { // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain possible existing anno_tr id long id = getAnnotationIdTransformation( did.longValue(), key ); // no such key, if the id is -1, handled by finalizer return loadAnnotationFinal( id, key ); } /** * Obtains the value to a specific key in an annotated derivation. * * @param fqdi is the FQDI of the derivation * @param key is the key to search for * @return the annotated value, or null if not found. * @see org.griphyn.vdl.classes.Derivation */ public Tuple loadAnnotationDerivation( String fqdi, String key ) throws SQLException, IllegalArgumentException { // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.DERIVATION ); if ( did == null ) throw new SQLException( "Unknown DV " + fqdi ); // obtain possible existing anno_dv id long id = getAnnotationIdDerivation( did.longValue(), key ); // no such key, if the id does not exist return loadAnnotationFinal( id, key ); } /** * Obtains the value to a specific key in an annotated formal argument. * * @param fqdi is the FQDI of the transformation * @param farg is the name of the formal argument * @param key is the key to search for * @return the annotated value, or null if not found * @see org.griphyn.vdl.classes.Declare */ public Tuple loadAnnotationDeclare( String fqdi, String farg, String key ) throws SQLException, IllegalArgumentException { // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain possible existing anno_call id long id = getAnnotationIdDeclare( did.longValue(), farg, key ); // no such key, if the id does not exist return loadAnnotationFinal( id, key ); } /** * Obtains the value to a specific key for a call statement. * * @param fqdi is the FQDI of the transformation * @param index is the number of the call to annotate. * @param key is the key to search for * @return the annotated value, or null if not found * @see org.griphyn.vdl.classes.Call */ public Tuple loadAnnotationCall( String fqdi, int index, String key ) throws SQLException, IllegalArgumentException { // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain possible existing anno_call id long id = getAnnotationIdCall( did.longValue(), index, key ); // no such key, if the id does not exist return loadAnnotationFinal( id, key ); } /** * Obtains the value to a specific key in an annotated filename. * * @param filename is the name of the file that was annotated. * @param key is the key to search for * @return the annotated value, or null if not found. * @see org.griphyn.vdl.classes.LFN */ public Tuple loadAnnotationFilename( String filename, String key ) throws SQLException, IllegalArgumentException { // obtain possible existing anno_lfn id long id = getAnnotationIdFilename( filename, key ); // no such key, if the id does not exist return loadAnnotationFinal( id, key ); } /** * Retrieves a specific annotation from an annotatable classes with * the specified tuple. This is an interface method to the various * class-specific methods. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * @param key is the key to look for. * @return null if not found, otherwise the annotation tuple. * @see #loadAnnotationTransformation( String, String ) * @see #loadAnnotationDerivation( String, String ) * @see #loadAnnotationCall( String, int, String ) * @see #loadAnnotationDeclare( String, String, String ) * @see #loadAnnotationFilename( String, String ) */ public Tuple loadAnnotation( String primary, Object secondary, int kind, String key ) throws SQLException, IllegalArgumentException { Tuple result = null; switch ( kind ) { case CLASS_TRANSFORMATION: result = loadAnnotationTransformation( primary, key ); break; case CLASS_DERIVATION: result = loadAnnotationDerivation( primary, key ); break; case CLASS_CALL: // may throw ClassCastException result = loadAnnotationCall( primary, ((Integer) secondary).intValue(), key ); break; case CLASS_DECLARE: // may throw ClassCastException result = loadAnnotationDeclare( primary, ((String) secondary), key ); break; case CLASS_FILENAME: result = loadAnnotationFilename( primary, key ); break; default: throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" ); } return result; } /** * Lists all annotations for a transformation. * * @param fqdi is the FQDI of the transformation * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Transformation */ public java.util.List loadAnnotationTransformation( String fqdi ) throws SQLException, IllegalArgumentException { java.util.List result = new java.util.ArrayList(); // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain all anno_tr ids Logging.instance().log("xaction", 1, "START select anno_tr2" ); int i = 1; PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_tr2" ); if ( m_dbdriver.preferString() ) ps.setString( i++, did.toString() ); else ps.setLong( i++, did.longValue() ); Logging.instance().log( "chunk", 2, "SELECT id FROM anno_tr" ); ResultSet rs = ps.executeQuery(); while ( rs.next() ) { Tuple temp = loadAnnotationFinal( rs.getLong(1), rs.getString(2) ); if ( temp != null ) result.add(temp); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_tr2" ); return result; } /** * Lists all annotations for a derivation. * * @param fqdi is the FQDI of the derivation * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Derivation */ public java.util.List loadAnnotationDerivation( String fqdi ) throws SQLException, IllegalArgumentException { java.util.List result = new java.util.ArrayList(); // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.DERIVATION ); if ( did == null ) throw new SQLException( "Unknown DV " + fqdi ); // obtain all anno_tr ids Logging.instance().log("xaction", 1, "START select anno_dv2" ); int i = 1; PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_dv2" ); if ( m_dbdriver.preferString() ) ps.setString( i++, did.toString() ); else ps.setLong( i++, did.longValue() ); Logging.instance().log( "chunk", 2, "SELECT id FROM anno_dv" ); ResultSet rs = ps.executeQuery(); while ( rs.next() ) { Tuple temp = loadAnnotationFinal( rs.getLong(1), rs.getString(2) ); if ( temp != null ) result.add(temp); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_dv2" ); return result; } /** * Lists all annotations for a formal argument. * * @param fqdi is the FQDI of the transformation * @param farg is the name of the formal argument * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Declare */ public java.util.List loadAnnotationDeclare( String fqdi, String farg ) throws SQLException, IllegalArgumentException { java.util.List result = new java.util.ArrayList(); // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain all anno_tr ids Logging.instance().log("xaction", 1, "START select anno_targ2" ); int i = 1; PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_targ2" ); if ( m_dbdriver.preferString() ) ps.setString( i++, did.toString() ); else ps.setLong( i++, did.longValue() ); ps.setString( i++, farg ); Logging.instance().log( "chunk", 2, "SELECT id FROM anno_targ" ); ResultSet rs = ps.executeQuery(); while ( rs.next() ) { Tuple temp = loadAnnotationFinal( rs.getLong(1), rs.getString(2) ); if ( temp != null ) result.add(temp); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_targ2" ); return result; } /** * Lists all annotations for a call statement. * * @param fqdi is the FQDI of the transformation * @param index is the number of the call to annotate. * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Call */ public java.util.List loadAnnotationCall( String fqdi, int index ) throws SQLException, IllegalArgumentException { java.util.List result = new java.util.ArrayList(); // split FQDI String[] names = Separator.split(fqdi); // may throw IAE // obtain DID for FQDI Long did = getSpecificDefinitionId( names[0], names[1], names[2], Definition.TRANSFORMATION ); if ( did == null ) throw new SQLException( "Unknown TR " + fqdi ); // obtain all anno_tr ids Logging.instance().log("xaction", 1, "START select anno_call2" ); int i = 1; PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_call2" ); if ( m_dbdriver.preferString() ) { ps.setString( i++, did.toString() ); ps.setString( i++, Integer.toString(index) ); } else { ps.setLong( i++, did.longValue() ); ps.setInt( i++, index ); } Logging.instance().log( "chunk", 2, "SELECT id FROM anno_call" ); ResultSet rs = ps.executeQuery(); while ( rs.next() ) { Tuple temp = loadAnnotationFinal( rs.getLong(1), rs.getString(2) ); if ( temp != null ) result.add(temp); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_call2" ); return result; } /** * Lists all annotations for a logical filename. * * @param filename is the logical filename. * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.LFN */ public java.util.List loadAnnotationFilename( String filename ) throws SQLException, IllegalArgumentException { java.util.List result = new java.util.ArrayList(); // obtain all anno_tr ids Logging.instance().log("xaction", 1, "START select anno_lfn2" ); int i = 1; PreparedStatement ps = m_dbdriver.getPreparedStatement( "stmt.select.anno_lfn2" ); ps.setString( i++, filename ); Logging.instance().log( "chunk", 2, "SELECT id FROM anno_lfn" ); ResultSet rs = ps.executeQuery(); while ( rs.next() ) { Tuple temp = loadAnnotationFinal( rs.getLong(1), rs.getString(2) ); if ( temp != null ) result.add(temp); } rs.close(); Logging.instance().log("xaction", 1, "FINAL select anno_lfn2" ); return result; } /** * Retrieves all annotations from an annotatable classes with * the specified tuple. This is an interface method to the various * class-specific methods. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * * @return null if not found, otherwise the annotation tuple. * @see #loadAnnotationTransformation( String ) * @see #loadAnnotationDerivation( String ) * @see #loadAnnotationCall( String, int ) * @see #loadAnnotationDeclare( String, String ) * @see #loadAnnotationFilename( String ) */ public java.util.List loadAnnotation( String primary, Object secondary, int kind ) throws SQLException, IllegalArgumentException { switch ( kind ) { case CLASS_TRANSFORMATION: return loadAnnotationTransformation( primary ); case CLASS_DERIVATION: return loadAnnotationDerivation( primary ); case CLASS_CALL: // may throw ClassCastException return loadAnnotationCall( primary, ((Integer) secondary).intValue() ); case CLASS_DECLARE: // may throw ClassCastException return loadAnnotationDeclare( primary, ((String) secondary) ); case CLASS_FILENAME: return loadAnnotationFilename( primary ); default: throw new IllegalArgumentException( "The class kind=" + kind + " cannot be annotated" ); } } /** * Search for LFNs or Definitions that has certain annotations * * @param kind defines the kind/class of object annotated. * @param arg is used only for TR ARG and TR CALL. For the former * it is the name of the argument (String), for the latter the position of * the call (Integer). * @param tree stores the query tree to query the annotation * @return a list of LFNs if search for filenames, otherwise a list of * definitions. * @exception SQLException if something goes wrong with the database. * @see org.griphyn.vdl.annotation.QueryTree */ public java.util.List searchAnnotation( int kind, Object arg, QueryTree tree) throws SQLException { java.util.List result = new java.util.ArrayList(); if ( tree == null) return result; String sql = tree.toSQL(kind, arg); if (sql == null || sql.equals("")) return result; // obtain all anno_tr ids Logging.instance().log("xaction", 1, "START search annotation" ); // use backdoor, why don't we change this into some nicer name??? ResultSet rs = backdoor(sql); while ( rs.next() ) { if (kind == Annotation.CLASS_FILENAME) { String fn = rs.getString(1); result.add(fn); } else { Definition d = loadDefinition( rs.getLong(1) ) ; if ( d != null ) result.add(d); } } rs.close(); Logging.instance().log("xaction", 1, "FINAL search annotation" ); return result; } /** * A not very generic method to search annotation (and anything) in * the database. Selects any rows in one or more colums from one or * more tables restricted by some condition, possibly ordered.

* * WARNING: This is a method for internal use only. * * @param select is the ordered set of column names to select, or * simply a one-value list with an asterisk. * @param table is the name of the table to select from. * @param where is a collection of column names and values they must equal. * @param order is an optional ordering string. * @return something to search for results in. * @exception SQLException if something goes wrong with the database. * @see org.griphyn.vdl.dbdriver.DatabaseDriver#select( java.util.List, String, java.util.Map, String ) */ public ResultSet searchAnnotation( java.util.List select, String table, java.util.Map where, String order ) throws SQLException { return m_dbdriver.select( select, table, where, order ); } /** * Delete one or more definitions from the backend database. The key * triple parameters may be wildcards. Wildcards are expressed as * null value, or have special characters '%' and '_'. * * @param namespace namespace * @param name name * @param version version * @param type definition type (TR or DV) * @return a list of definitions that were deleted. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public java.util.List deleteDefinitionEx( String namespace, String name, String version, int type ) throws SQLException { java.util.List result = new ArrayList(); Logging.instance().log("xaction", 1, "START delete definitions" ); java.util.List idlist = getDefinitionIdEx( namespace, name, version, type ); if ( idlist.size() == 0 ) return result; // postcondition: contains all IDs, count(id)>0, to be deleted // save old values if ( ! m_deferDeleteCommit ) { // we come from saveDefinition, thus we won't need saved values for ( Iterator i=idlist.iterator(); i.hasNext(); ) { Definition d = loadDefinition( ((Long) i.next()).longValue() ); if ( d != null ) result.add(d); } } // remove all affected annoations by walking through them // yuk, this is probably extremely expensive for ( Iterator i=idlist.iterator(); i.hasNext(); ) { long id = ((Long) i.next()).longValue(); deleteAnnotationFromDefinition(id); } // list of all statements we need to access PreparedStatement ps[] = { this.m_dbdriver.getPreparedStatement("stmt.delete.lfn_i"), this.m_dbdriver.getPreparedStatement("stmt.delete.lfn_o"), this.m_dbdriver.getPreparedStatement("stmt.delete.lfn_b"), this.m_dbdriver.getPreparedStatement("stmt.delete.xml") }; // prepare and batch all statements boolean preferString = m_dbdriver.preferString(); for ( Iterator i=idlist.iterator(); i.hasNext(); ) { long id = ((Long) i.next()).longValue(); for ( int j=0; j * * WARNING: This is a method for internal use only. * * @param query is an SQL query statement. * @return something to search for results in. * @exception SQLException if something goes wrong with the database. */ public ResultSet backdoor( String query ) throws SQLException { return m_dbdriver.backdoor( query ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/SingleFileSchema.java0000644000175000017500000001424111757531137025434 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import java.util.Iterator; import java.util.ListIterator; import java.util.Vector; import java.util.Enumeration; import java.util.ArrayList; import java.util.List; import java.io.*; import java.lang.reflect.*; import java.sql.SQLException; import edu.isi.pegasus.common.util.DynamicLoader; import org.griphyn.vdl.util.ChimeraProperties; import org.griphyn.vdl.util.FileHelper; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.util.LockFileLock; import org.griphyn.vdl.classes.*; import org.griphyn.vdl.parser.*; import org.griphyn.vdl.dbdriver.*; /** * This is a class that falls back not on a real database backend, but * rather on an existing Definitions data structure that are read from * file during construction (or rather, during open), and pushed back * into file at destruction (or rather, during close).

* While schemas in general should fall back onto drivers to perform * actions, it is rather difficult to create a JDBC interface to file * operations. Thus, the file operations are sneaked into this class.

* This class is thought more for experimental use than production. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 4507 $ * @see org.griphyn.vdl.dbdriver * @see org.griphyn.vdl.classes.Definitions */ public class SingleFileSchema extends InMemorySchema { /** * Save the name of the database file, so we can dump our memory. */ private String m_filename; /** * Save the file locking helper, once dynaloaded. */ private FileHelper m_filehelper; /** * An instance of the VDLx XML parser. */ private org.griphyn.vdl.parser.VDLxParser m_parser; /** * Fakes a connect to the database. This class load the memory * database during construction time from the specified file. * * @param hyphen_d is the CLI argument being passed, and ignored for now. */ public SingleFileSchema( String hyphen_d ) throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException, IOException { super(); // call minimalistic c'tor, no driver loading! ChimeraProperties props = ChimeraProperties.instance(); // Start a new set of definitions this.m_memory = new Definitions(); this.m_dbschemaprops = props.getDatabaseSchemaProperties( PROPERTY_PREFIX ); // obtain the schema location URL from the schema properties: // url is a list of strings representing schema locations. The // content exists in pairs, one of the namespace URI, one of the // location URL. String url = this.m_dbschemaprops.getProperty( "xml.url", props.getVDLSchemaLocation() ); Logging.instance().log( "dbschema", 3, "schema=" + url ); this.m_parser = new org.griphyn.vdl.parser.VDLxParser(url); Logging.instance().log( "dbschema", 3, "created reader" ); // obtain the file location from the schema properties File db = new File( props.getSysConfDir(), "vds.db" ); this.m_filename = this.m_dbschemaprops.getProperty( "file.store", db.getAbsolutePath() ); Logging.instance().log( "dbschema", 3, "filename=" + m_filename ); // Determine helper to provide locking functions String locker = m_dbschemaprops.getProperty( "file.lock", "LockFileLock" ); if ( locker.indexOf('.') == -1 ) locker = "org.griphyn.vdl.util." + locker; // dynamically load the file locking helper implementation Logging.instance().log( "dbschema", 3, "trying to load " + locker ); DynamicLoader dl = new DynamicLoader(locker); String arg[] = new String[1]; arg[0] = m_filename; m_filehelper = (FileHelper) dl.instantiate(arg); // FileHelper m_filehelper = new FileHelper2(this.m_filename); File file = m_filehelper.openReader(); if ( file == null ) { Logging.instance().log( "dbschema", 3, "openReader returned null" ); throw new SQLException( "Can't lock file " + this.m_memory ); } // Does database exist? try { if ( file.exists() ) { // file exists, read it unless empty if ( file.length() > 0 ) { // parse the complete file (database) this.m_parser.parse( new org.xml.sax.InputSource( new BufferedReader( new FileReader(file))), new NoHassleHandler(this.m_memory) ); Logging.instance().log( "app", 1, this.m_memory.getDefinitionCount() + " definitions loaded into main memory" ); } } } catch ( Exception e ) { throw new SQLException( e.getMessage() ); } finally { // always release locks try { m_filehelper.closeReader(file); } catch ( Exception e ) { throw new SQLException( e.getMessage() ); } } } /** * Disassociate from the database driver before finishing. In this * case, dump the memory database back to the file that was saved. * Mind that performing this action may throw NullPointerException * in later stages! */ public void close() throws SQLException { super.close(); // FileHelper m_filehelper = new FileHelper2(this.m_filename); File file = m_filehelper.openWriter(); if (file == null) { throw new SQLException("Unable to create file writer!"); } try { BufferedWriter bw = new BufferedWriter(new FileWriter(file)); this.m_memory.toXML(bw, ""); bw.flush(); bw.close(); } catch ( IOException e ) { throw new SQLException( e.getMessage() ); } finally { // always release locks try { m_filehelper.closeWriter(file); } catch ( Exception e ) { throw new SQLException( e.getMessage() ); } } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/MyCallbackHandler.java0000644000175000017500000000422311757531137025571 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import org.griphyn.vdl.parser.*; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.classes.*; /** * This class adds a given Definition from the parser's callback into * the fresh in-memory storage. It is a streamlined version of the more * versatile {@link MemoryStorage} handler. End-users should not use * this class. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ class MyCallbackHandler implements FinalizerHandler { /** * This is a reference to the already established in-memory storage. */ private Definition m_memory; /** * The c'tor initializes the references to a single Definition. */ public MyCallbackHandler() { this.m_memory = null; } /** * Returns the value stored by the XML reader's callback function. * * @return a single Definition that was read, or null. */ public Definition getDefinition() { return this.m_memory; } /** * This method adds the given Definition to whatever storage is * implemented underneath. * * @param d is the Definition that is ready to be stored. * @return true, if new version was stored and database modified */ public boolean store( VDL d ) { if ( d instanceof Definition ) { this.m_memory = (Definition) d; Logging.instance().log( "chunk", 0, "found " + m_memory.shortID() ); return true; } else { Logging.instance().log( "chunk", 0, "not a definition: " + d.toString() ); return false; } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/VDC.java0000644000175000017500000001425711757531137022715 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import org.griphyn.vdl.classes.Definition; /** * This common schema interface defines the schemas in which the * abstraction layers access the VDC. This layer is independent of the * implementing database, and does so by going via the database driver * class API. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * @see org.griphyn.vdl.dbschema.DatabaseSchema * @see org.griphyn.vdl.dbdriver */ public interface VDC extends Catalog { /** * Names the property key prefix employed for schemas dealing with the VDC. */ public static final String PROPERTY_PREFIX = "vds.db.vdc.schema"; // // taken from (old) DBManager // /** * Loads a single Definition from the backend database into an Java object. * This method does not allow wildcarding! * * @param namespace namespace, null will be converted into empty string * @param name name, null will be converted into empty string * @param version version, null will be converted into empty string * @param type type of the definition (TR or DV), must not be -1. * @return the Definition as specified, or null if not found. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #saveDefinition( Definition, boolean ) * @see #containsDefinition( Definition ) * @see #searchDefinition( String, String, String, int ) */ public abstract Definition loadDefinition( String namespace, String name, String version, int type ) throws SQLException; /** * Saves a Definition, that is either a Transformation or Derivation, * into the backend database. * * @param definition is the new Definition to store. * @param overwrite true, if existing defitions will be overwritten by * new ones with the same primary (or secondary) key (-set), or false, * if a new definition will be rejected on key matches. * * @return true, if the backend database was changed, or * false, if the definition was not accepted into the backend. * * @see org.griphyn.vdl.classes.Definition * @see org.griphyn.vdl.classes.Transformation * @see org.griphyn.vdl.classes.Derivation * @see #loadDefinition( String, String, String, int ) * @see #deleteDefinition( String, String, String, int ) */ public abstract boolean saveDefinition( Definition definition, boolean overwrite ) throws SQLException; // // higher level methods, allowing for wildcarding as stated. // /** * Check with the backend database, if the given definition exists. * * @param definition is a Definition object to search for * @return true, if the Definition exists, false if not found */ public abstract boolean containsDefinition( Definition definition ) throws SQLException; /** * Delete a specific Definition objects from the database. No wildcard * matching will be done. "Fake" definitions are permissable, meaning * it just has the secondary key triple. * * @param definition is the definition specification to delete * @return true is something was deleted, false if non existent. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public abstract boolean deleteDefinition( Definition definition ) throws SQLException; /** * Delete one or more definitions from the backend database. The key * triple parameters may be wildcards. Wildcards are expressed as * null value. * * @param namespace namespace * @param name name * @param version version * @param type definition type (TR or DV) * @return a list of definitions that were deleted. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public abstract java.util.List deleteDefinition( String namespace, String name, String version, int type ) throws SQLException; /** * Search the database for definitions by ns::name:version triple * and by type (either Transformation or Derivation). This version * of the search allows for jokers expressed as null value * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param type type of definition, see below, or -1 as wildcard * @return a list of Definition items, which may be empty * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #loadDefinition( String, String, String, int ) */ public abstract java.util.List searchDefinition( String namespace, String name, String version, int type ) throws SQLException; /** * Searches the database for all derivations that contain a certain LFN. * The linkage is an additional constraint. This method does not allow * jokers. * * @param lfn the LFN name * @param link the linkage type of the LFN * @return a list of Definition items that match the criterion. * * @see org.griphyn.vdl.classes.LFN#NONE * @see org.griphyn.vdl.classes.LFN#INPUT * @see org.griphyn.vdl.classes.LFN#OUTPUT * @see org.griphyn.vdl.classes.LFN#INOUT */ public abstract java.util.List searchFilename( String lfn, int link ) throws SQLException; } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/Advanced.java0000644000175000017500000001041511757531137023776 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import org.griphyn.vdl.classes.Definition; /** * This common schema interface defines advanced search interfaces for * VDC. The advanced methods required permit wildcard searches, partial * matches, and candidate list compilations that are not part of the * simpler @{link VDC} interface. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see org.griphyn.vdl.dbschema.DatabaseSchema * @see org.griphyn.vdl.dbdriver */ public interface Advanced extends VDC { // // higher level methods, allowing for partial matching // /** * Delete one or more definitions from the backend database. The key * triple parameters may be wildcards. Wildcards are expressed as * null value, or have special characters '%' and '_'. * * @param namespace namespace * @param name name * @param version version * @param type definition type (TR or DV) * @return a list of definitions that were deleted. * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public abstract java.util.List deleteDefinitionEx( String namespace, String name, String version, int type ) throws SQLException; /** * Searches the database for definitions by ns::name:version triple * and by type (either Transformation or Derivation). This version of * the search allows for jokers expressed as null value * * @param namespace namespace, null to match any namespace * @param name name, null to match any name * @param version version, null to match any version * @param type type of definition, see below, or -1 as wildcard * @return a list of Definition items, which may be empty * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION * @see #loadDefinition( String, String, String, int ) */ public abstract java.util.List searchDefinitionEx( String namespace, String name, String version, int type ) throws SQLException; /** * Searches the database for all LFNs that match a certain pattern. * The linkage is an additional constraint. This method allows * joker characters such as '%' and '_'. * * @param lfn the LFN name * @param link the linkage type of the LFN * @return a list of filenames that match the criterion. * * @see org.griphyn.vdl.classes.LFN#NONE * @see org.griphyn.vdl.classes.LFN#INPUT * @see org.griphyn.vdl.classes.LFN#OUTPUT * @see org.griphyn.vdl.classes.LFN#INOUT */ public abstract java.util.List searchLFN( String lfn, int link ) throws SQLException; /** * Searches the database for a list of namespaces of the definitions * Sorted in ascending order. * * @param type type of definition, see below, or -1 for both * @return a list of namespaces * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public abstract java.util.List getNamespaceList( int type ) throws SQLException; /** * Searches the database for a list of fully-qualified names of * the definitions sorted in ascending order. * * @param type type of definition, see below, or -1 for both. * @return a list of FQDNs * * @see org.griphyn.vdl.classes.Definition#TRANSFORMATION * @see org.griphyn.vdl.classes.Definition#DERIVATION */ public abstract java.util.List getFQDNList( int type ) throws SQLException; } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/Catalog.java0000644000175000017500000000236111757531137023644 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; /** * This interface groups different but related catalog subinterfaces. *

* The interface will require implementing classes to provide a constructor * that takes one String as only argument. The class may ignore the string * for now. *

* Subinterfaces are required to provide a constant PROPERTY_PREFIX. This * constant select the pieces of the vds.db.catalog.schema * property space that corresponds to the appropriate catalog. * * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public interface Catalog { // empty } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/Annotation.java0000644000175000017500000003602211757531137024405 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import org.griphyn.vdl.classes.Definition; import org.griphyn.vdl.annotation.QueryTree; import org.griphyn.vdl.annotation.Tuple; /** * This common schema interface defines the schemas in which the * abstraction layers access the metadata associated with VDC elements. * This layer is independent of the implementing database, and does so * by going via the database driver class API. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * @see org.griphyn.vdl.dbschema.DatabaseSchema * @see org.griphyn.vdl.dbdriver */ public interface Annotation extends Catalog { /** * Annotations for transformations. */ public static final int CLASS_TRANSFORMATION = 0; /** * Annotations for derivations. */ public static final int CLASS_DERIVATION = 1; /** * Annotations for call statements. */ public static final int CLASS_CALL = 2; /** * Annotations for formal arguments. */ public static final int CLASS_DECLARE = 3; /** * Annotations for logical filenames. */ public static final int CLASS_FILENAME = 4; /** * Annotates any of the annotatable classes with the specified tuple. * This is an interface method to the various class-specific methods. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * @param annotation is the value to place into the class. * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see #saveAnnotationTransformation( String, Tuple, boolean ) * @see #saveAnnotationDerivation( String, Tuple, boolean ) * @see #saveAnnotationCall( String, int, Tuple, boolean ) * @see #saveAnnotationDeclare( String, String, Tuple, boolean ) * @see #saveAnnotationFilename( String, Tuple, boolean ) */ public long saveAnnotation( String primary, Object secondary, int kind, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException; /** * Annotates a transformation with a tuple. * * @param fqdi is the FQDI to annotate * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Transformation */ public long saveAnnotationTransformation( String fqdi, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException; /** * Annotates a derivation with a tuple. * * @param fqdi is the FQDI to annotate * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Derivation */ public long saveAnnotationDerivation( String fqdi, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException; /** * Annotates a transformation argument with a tuple. * * @param fqdi is the FQDI to annotate * @param formalname is the name of the formal argument to annotoate. * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Declare */ public long saveAnnotationDeclare( String fqdi, String formalname, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException; /** * Annotates a transformation call with a tuple. * * @param fqdi is the FQDI to annotate * @param index is the number of the call to annotate. * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.Call */ public long saveAnnotationCall( String fqdi, int index, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException; /** * Annotates a logical filename with a tuple. * * @param filename is the FQDI to annotate * @param annotation is the value to place * @param overwrite is a predicate on replace or maintain. * @return the insertion id, or -1, if the database was untouched * @see org.griphyn.vdl.classes.LFN */ public long saveAnnotationFilename( String filename, Tuple annotation, boolean overwrite ) throws SQLException, IllegalArgumentException; /** * Retrieves a specific annotation from an annotatable classes with * the specified tuple. This is an interface method to the various * class-specific methods. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * @param key is the key to look for. * @return null if not found, otherwise the annotation tuple. * @see #loadAnnotationTransformation( String, String ) * @see #loadAnnotationDerivation( String, String ) * @see #loadAnnotationCall( String, int, String ) * @see #loadAnnotationDeclare( String, String, String ) * @see #loadAnnotationFilename( String, String ) */ public Tuple loadAnnotation( String primary, Object secondary, int kind, String key ) throws SQLException, IllegalArgumentException; /** * Obtains the value to a specific key in an annotated transformation. * * @param fqdi is the FQDI of the transformation * @param key is the key to search for * @return the annotated value, or null if not found. * @see org.griphyn.vdl.classes.Transformation */ public Tuple loadAnnotationTransformation( String fqdi, String key ) throws SQLException, IllegalArgumentException; /** * Obtains the value to a specific key in an annotated derivation. * * @param fqdi is the FQDI of the derivation * @param key is the key to search for * @return the annotated value, or null if not found. * @see org.griphyn.vdl.classes.Derivation */ public Tuple loadAnnotationDerivation( String fqdi, String key ) throws SQLException, IllegalArgumentException; /** * Obtains the value to a specific key in an annotated formal argument. * * @param fqdi is the FQDI of the transformation * @param farg is the name of the formal argument * @param key is the key to search for * @return the annotated value, or null if not found * @see org.griphyn.vdl.classes.Declare */ public Tuple loadAnnotationDeclare( String fqdi, String farg, String key ) throws SQLException, IllegalArgumentException; /** * Obtains the value to a specific key for a call statement. * * @param fqdi is the FQDI of the transformation * @param index is the number of the call to annotate. * @param key is the key to search for * @return the annotated value, or null if not found * @see org.griphyn.vdl.classes.Call */ public Tuple loadAnnotationCall( String fqdi, int index, String key ) throws SQLException, IllegalArgumentException; /** * Obtains the value to a specific key in an annotated filename. * * @param filename is the name of the file that was annotated. * @param key is the key to search for * @return the annotated value, or null if not found. * @see org.griphyn.vdl.classes.LFN */ public Tuple loadAnnotationFilename( String filename, String key ) throws SQLException, IllegalArgumentException; /** * Retrieves all annotations from an annotatable classes with * the specified tuple. This is an interface method to the various * class-specific methods. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * * @return null if not found, otherwise the annotation tuple. * @see #loadAnnotationTransformation( String ) * @see #loadAnnotationDerivation( String ) * @see #loadAnnotationCall( String, int ) * @see #loadAnnotationDeclare( String, String ) * @see #loadAnnotationFilename( String ) */ public java.util.List loadAnnotation( String primary, Object secondary, int kind ) throws SQLException, IllegalArgumentException; /** * Lists all annotations for a transformation. * * @param fqdi is the FQDI of the transformation * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Transformation */ public java.util.List loadAnnotationTransformation( String fqdi ) throws SQLException, IllegalArgumentException; /** * Lists all annotations for a derivation. * * @param fqdi is the FQDI of the derivation * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Derivation */ public java.util.List loadAnnotationDerivation( String fqdi ) throws SQLException, IllegalArgumentException; /** * Lists all annotations for a formal argument. * * @param fqdi is the FQDI of the transformation * @param farg is the name of the formal argument * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Declare */ public java.util.List loadAnnotationDeclare( String fqdi, String farg ) throws SQLException, IllegalArgumentException; /** * Lists all annotations for a call statement. * * @param fqdi is the FQDI of the transformation * @param index is the number of the call to annotate. * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.Call */ public java.util.List loadAnnotationCall( String fqdi, int index ) throws SQLException, IllegalArgumentException; /** * Lists all annotations for a logical filename. * * @param filename is the logical filename. * @return a list of tuples, which may be empty. * @see org.griphyn.vdl.classes.LFN */ public java.util.List loadAnnotationFilename( String filename ) throws SQLException, IllegalArgumentException; /** * Search for LFNs or Definitions that has certain annotations * * @param kind defines the kind/class of object annotated. * @param arg is used only for TR ARG and TR CALL. For the former * it is the name of the argument (String), for the latter the position of * the call (Integer). * @param tree stores the query tree to query the annotation * @return a list of LFNs if search for filenames, otherwise a list of * definitions. * @exception SQLException if something goes wrong with the database. * @see org.griphyn.vdl.annotation.QueryTree */ public abstract java.util.List searchAnnotation( int kind, Object arg, QueryTree tree) throws java.sql.SQLException; /** * Deletes an annotation with the specified key. * * @param primary is the primary object specifier for the class. * According to the type, this is either the FQDI, or the filename. * @param secondary is a helper argument for annotations to calls * and formal arguments, and should be null for all other classes. * For calls, the argument must be packed into {@link java.lang.Integer}. * @param kind defines the kind/class of object to annotate. * @param key is the annotation key. * @return true, if the database was modified, false otherwise. * @exception SQLException, if something went wrong during database * access. */ public boolean deleteAnnotation( String primary, Object secondary, int kind, String key ) throws SQLException, IllegalArgumentException; /** * Deletes a specific key in an annotated transformation. * * @param fqdi is the FQDI of the transformation * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Transformation */ public boolean deleteAnnotationTransformation( String fqdi, String key ) throws SQLException, IllegalArgumentException; /** * Deletes a specific key in an annotated derivation. * * @param fqdi is the FQDI of the derivation * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Derivation */ public boolean deleteAnnotationDerivation( String fqdi, String key ) throws SQLException, IllegalArgumentException; /** * Deletes a specific key in an annotated formal argument. * * @param fqdi is the FQDI of the transformation * @param farg is the name of the formal argument * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Declare */ public boolean deleteAnnotationDeclare( String fqdi, String farg, String key ) throws SQLException, IllegalArgumentException; /** * Deletes a specific key for a call statement. * * @param fqdi is the FQDI of the transformation * @param index is the number of the call to annotate. * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.Call */ public boolean deleteAnnotationCall( String fqdi, int index, String key ) throws SQLException, IllegalArgumentException; /** * Deletes a specific key in an annotated filename. * * @param filename is the name of the file that was annotated. * @param key is the key to search for * @return true, if the database was modified, false otherwise. * @see org.griphyn.vdl.classes.LFN */ public boolean deleteAnnotationFilename( String filename, String key ) throws SQLException, IllegalArgumentException; } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/WorkflowSchema.java0000644000175000017500000002044611757531137025231 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import java.util.*; import java.io.*; import java.lang.reflect.*; import org.griphyn.vdl.util.ChimeraProperties; import edu.isi.pegasus.common.util.Separator; import org.griphyn.vdl.workflow.*; import org.griphyn.vdl.util.Logging; /** * This class provides basic functionalities to interact with the * backend database for workflow records. Currently, only searches * that fill the workflow class are implemented. * * @author Jens-S. Vöckler * @author Mike Wilde * @version $Revision: 2079 $ */ public class WorkflowSchema extends DatabaseSchema implements WF { /** * Default constructor for access to the WF set of tables. * * @param dbDriverName is the database driver name */ public WorkflowSchema( String dbDriverName ) throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException, IOException { // load the driver from the properties super( dbDriverName, PROPERTY_PREFIX ); Logging.instance().log( "dbschema", 3, "done with parent schema c'tor" ); // Note: Does not rely on optional JDBC3 features this.m_dbdriver.insertPreparedStatement( "work.select.all", "SELECT * FROM wf_work" ); this.m_dbdriver.insertPreparedStatement( "work.select.mtime", "SELECT * FROM wf_work WHERE mtime >= ?" ); this.m_dbdriver.insertPreparedStatement( "work.select.sk", "SELECT * FROM wf_work WHERE basedir=? AND vogroup=? " + "AND workflow=? AND run=?" ); this.m_dbdriver.insertPreparedStatement( "job.select.all", "SELECT * FROM wf_jobstate" ); this.m_dbdriver.insertPreparedStatement( "job.select.mtime", "SELECT * FROM wf_jobstate WHERE wfid IN " + "( SELECT id FROM wf_work WHERE mtime >= ? )" ); this.m_dbdriver.insertPreparedStatement( "job.select.sk", "SELECT * FROM wf_jobstate WHERE wfid=? AND jobid=?" ); } /** * Converts a regular datum into an SQL timestamp. * @param date is a regular Java date * @return a SQL timestamp obtained from the Date. */ protected java.sql.Timestamp toStamp( java.util.Date date ) { return new java.sql.Timestamp( date.getTime() ); } /** * Converts a SQL timestamp into a regular datum. * @param date is SQL timestamp from the database * @return a regular Java date */ protected java.util.Date fromStamp( java.sql.Timestamp date ) { return new java.util.Date( date.getTime() ); } /** * Obtains all jobs that belong to a particular workflow. * * @param wfid is the workflow identifier for jobs. * @return a list of all jobs */ private java.util.List getAllJobs( long wfid ) throws SQLException { java.util.List result = new ArrayList(); Logging.instance().log("xaction", 1, "START load jobs for work" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "job.select.work" ); if ( m_dbdriver.preferString() ) ps.setString( 1, Long.toString(wfid) ); else ps.setLong( 1, wfid ); ResultSet rs = ps.executeQuery(); Logging.instance().log("xaction", 1, "INTER load jobs for work" ); while ( rs.next() ) { JobStateEntry j = new JobStateEntry( wfid, rs.getString("id") ); j.setState( rs.getString("state") ); j.setModificationTime( fromStamp(rs.getTimestamp("mtime")) ); j.setSite( rs.getString("site") ); result.add(j); } rs.close(); Logging.instance().log("xaction", 1, "FINAL load jobs for work" ); return result; } /** * Converts the output of a result set into a workflow * * @param rs is the result set of a query, which is better valid. * @param withJobs if true, also add the jobs, if false, no jobs. * @return a workflow instance created from the result set. */ protected WorkEntry convertResultSet( ResultSet rs, boolean withJobs ) throws SQLException { WorkEntry result = new WorkEntry( rs.getLong("id"), rs.getString("basedir"), rs.getString("vogroup"), rs.getString("workflow"), rs.getString("run") ); result.setCreator( rs.getString("creator") ); result.setCreationTime( fromStamp(rs.getTimestamp("ctime")) ); result.setState( rs.getInt("state") ); result.setModificationTime( fromStamp(rs.getTimestamp("mtime")) ); if ( withJobs ) result.setJob( getAllJobs( rs.getLong("id") ) ); return result; } /** * Load a single workflow from the backend database into a Java * object. The identification is based on the secondary key quadruple. * * @param basedir is the base directory * @param vogroup is the VO group identifier * @param label is the workflow label * @param run is the workflow run directory * @return the Workflow that was matched by the id, which may be null */ public WorkEntry getWorkflow( String basedir, String vogroup, String label, String run ) throws SQLException { WorkEntry result = null; Logging.instance().log("xaction", 1, "START load work sk" ); PreparedStatement ps = m_dbdriver.getPreparedStatement( "work.select.sk" ); int i = 1; ps.setString( i++, basedir ); ps.setString( i++, vogroup ); ps.setString( i++, label ); ps.setString( i++, run ); ResultSet rs = ps.executeQuery(); Logging.instance().log("xaction", 1, "INTER load work sk" ); if ( rs.next() ) { result = convertResultSet(rs,true); } else { Logging.instance().log( "wf", 0, "No workflows found" ); } rs.close(); Logging.instance().log("xaction", 1, "FINAL load work sk" ); return result; } /** * Loads all workflows that are fresh enough and returns a map of * workflows matching. The list is indexed by the primary key of * the WF table, which is the unique workflow id. * * @param mtime is the oldest last modification time. Use * null for all. * @return a map of workflows, indexed by their workflow id. */ public java.util.Map getWorkflows( java.util.Date mtime ) throws SQLException { java.util.Map result = new TreeMap(); Logging.instance().log("xaction", 1, "START load all work" ); PreparedStatement ps = null; if ( mtime == null ) { ps = m_dbdriver.getPreparedStatement( "work.select.all" ); } else { ps = m_dbdriver.getPreparedStatement( "work.select.mtime" ); ps.setTimestamp( 1, toStamp(mtime) ); } ResultSet rs = ps.executeQuery(); Logging.instance().log("xaction", 1, "INTER load all work" ); while ( rs.next() ) { // insert workflows without job state result.put( new Long(rs.getLong("id")), convertResultSet(rs,false) ); } rs.close(); if ( result.size() > 0 ) { // now add job state Logging.instance().log( "xaction", 1, "START load all jobs" ); if ( mtime == null ) { ps = m_dbdriver.getPreparedStatement( "job.select.all" ); } else { ps = m_dbdriver.getPreparedStatement( "job.select.mtime" ); ps.setTimestamp( 1, toStamp(mtime) ); } rs = ps.executeQuery(); Logging.instance().log( "xaction", 1, "INTER load all jobs" ); while ( rs.next() ) { Long key = new Long( rs.getLong("wfid") ); JobStateEntry job = new JobStateEntry( rs.getLong("wfid"), rs.getString("jobid") ); job.setState( rs.getString("state") ); job.setModificationTime( fromStamp(rs.getTimestamp("mtime")) ); job.setSite( rs.getString("site") ); if ( result.containsKey(key) ) { ((WorkEntry) (result.get(key))).addJob(job); } } Logging.instance().log( "xaction", 1, "FINAL load all jobs" ); } else { Logging.instance().log( "wf", 0, "No workflows found" ); } Logging.instance().log("xaction", 1, "FINAL load all work" ); return result; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/XDC.java0000644000175000017500000000256111757531137022712 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import org.griphyn.vdl.classes.Definition; /** * This common schema interface defines advanced search interfaces for * VDC. The advanced methods required permit wildcard searches, partial * matches, and candidate list compilations that are not part of the * simpler {@link VDC} interface. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see org.griphyn.vdl.dbschema.DatabaseSchema * @see org.griphyn.vdl.dbdriver */ public interface XDC extends Advanced, Annotation { public abstract java.util.List searchDefinition( String xpath) throws SQLException; public abstract java.util.List searchElements( String xpath) throws SQLException; } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/PTC.java0000644000175000017500000000435111757531137022721 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.SQLException; import edu.isi.pegasus.planner.invocation.InvocationRecord; import java.net.InetAddress; import java.util.Date; /** * This interface defines a common base for all database schemas that * supports the handling of the provenance tracking catalog. It exists * primarily for grouping purposes.

* * For the moment, we are happy to be able to store things inside. The * rest, in form of more required methods, comes later. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public interface PTC extends Catalog { /** * Names the property key prefix employed for schemas dealing with the PTC. */ public static final String PROPERTY_PREFIX = "pegasus.catalog.provenance.db.schema"; /** * Checks the existence of an invocation record in the database. * The information is based on the (start,host,pid) tuple, although * with private networks, cases may arise that have this tuple * identical, yet are different. * * @param start is the start time of the grid launcher * @param host is the address of the host it ran upon * @param pid is the process id of the grid launcher itself. * @return the id of the existing record, or -1 */ public long getInvocationID( Date start, InetAddress host, int pid ) throws SQLException; /** * Inserts an invocation record into the database. * * @param ivr is the invocation record to store. * @return true, if insertion was successful, false otherwise. */ public boolean saveInvocation( InvocationRecord ivr ) throws SQLException; } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/NXDInvSchema.java0000644000175000017500000001460211757531137024522 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import edu.isi.pegasus.planner.invocation.InvocationRecord; import java.sql.*; import java.util.List; import java.util.ArrayList; import java.util.Properties; import java.io.*; import java.lang.reflect.*; import java.net.InetAddress; import org.xmldb.api.base.*; import org.xmldb.api.modules.*; import org.xmldb.api.*; import javax.xml.transform.OutputKeys; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.DOMException; import org.xml.sax.SAXException; import org.xml.sax.SAXParseException; import org.xml.sax.InputSource; import org.griphyn.vdl.util.ChimeraProperties; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.parser.*; import edu.isi.pegasus.common.util.Separator; import org.xml.sax.InputSource; /** * This class provides basic functionalities to interact with the * backend database for invocation records, such as insertion, deletion, * and search. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2587 $ */ public class NXDInvSchema extends DatabaseSchema implements PTC { private DocumentBuilderFactory m_factory; private DocumentBuilder m_builder; protected Collection m_db; protected Collection m_ptc; protected CollectionManagementService m_dbColService; protected CollectionManagementService m_ptcColService; protected XPathQueryService m_dbQrySvc; protected XPathQueryService m_ptcQrySvc; /** * Default constructor for the provenance tracking. * * @param dbDriverName is the database driver name */ public NXDInvSchema( String dbDriverName ) throws ClassNotFoundException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, SQLException, IOException, ParserConfigurationException { // load the driver from the properties super(); // call minimalistic c'tor, no driver loading! ChimeraProperties props = ChimeraProperties.instance(); m_dbschemaprops = props.getDatabaseSchemaProperties( PROPERTY_PREFIX ); // extract those properties specific to the database driver. // use default settings. String driverPrefix = null; String driverName = props.getDatabaseDriverName(driverPrefix); Properties driverprops = props.getDatabaseDriverProperties(driverPrefix); String url = props.getDatabaseURL(driverPrefix); try { m_factory = DocumentBuilderFactory.newInstance(); m_builder = m_factory.newDocumentBuilder(); Class cl = Class.forName(driverName); Database database = (Database) cl.newInstance(); DatabaseManager.registerDatabase(database); // get the collection m_db = DatabaseManager.getCollection(url + "/db"); m_dbColService = (CollectionManagementService) m_db.getService("CollectionManagementService", "1.0"); m_ptc = m_db.getChildCollection("ptc"); if(m_ptc == null) { // collection does not exist, create m_ptc = m_dbColService.createCollection("ptc"); } m_ptc.setProperty(OutputKeys.INDENT, "no"); m_ptcColService = (CollectionManagementService) m_ptc.getService("CollectionManagementService", "1.0"); m_dbQrySvc = (XPathQueryService) m_db.getService("XPathQueryService", "1.0"); m_ptcQrySvc = (XPathQueryService) m_ptc.getService("XPathQueryService", "1.0"); m_dbQrySvc.setProperty("indent", "no"); m_ptcQrySvc.setProperty("indent", "no"); } catch (XMLDBException e) { throw new SQLException (e.getMessage()); } } /** * Checks the existence of an invocation record in the database. * The information is based on the (start,host,pid) tuple, although * with private networks, cases may arise that have this tuple * identical, yet are different. * * @param start is the start time of the grid launcher * @param host is the address of the host it ran upon * @param pid is the process id of the grid launcher itself. * @return the id of the existing record, or -1 */ public long getInvocationID( java.util.Date start, InetAddress host, int pid ) throws SQLException { long result = -1; Logging.instance().log("xaction", 1, "START select invocation id" ); String xquery = "/invocation[@start='" + start + "']"; xquery += "[@host='" + host.getHostAddress() + "']"; xquery += "[@pid=" + pid + "]"; try { Logging.instance().log( "nxd", 2, xquery ); ResourceSet rs = m_dbQrySvc.query(xquery); ResourceIterator i = rs.getIterator(); if (i.hasMoreResources()) { result = 1; } else { result = -1; } } catch (XMLDBException e) { throw new SQLException (e.getMessage()); } Logging.instance().log("xaction", 1, "FINAL select invocation id" ); return result; } /** * Inserts an invocation record into the database. * * @param ivr is the invocation record to store. * @return true, if insertion was successful, false otherwise. */ public boolean saveInvocation( InvocationRecord ivr ) throws SQLException { try { StringWriter sw = new StringWriter(); ivr.toXML(sw, "", null); // create new XMLResource; an id will be assigned to the new resource XMLResource document = (XMLResource)m_ptc.createResource(null, "XMLResource"); document.setContent(sw.toString()); System.out.println(sw.toString()); m_ptc.storeResource(document); return true; } catch (Exception e) { throw new SQLException (e.getMessage()); } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/WF.java0000644000175000017500000000426211757531137022610 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.dbschema; import java.sql.*; import org.griphyn.vdl.workflow.*; /** * This common schema interface defines the schemas in which the * abstraction layers access the WF database. This layer is independent * of the implementing database, and does so by going via the database * driver class API. * * @author Jens-S. Vöckler * @author Mike Wilde * @version $Revision: 50 $ * @see org.griphyn.vdl.dbschema.DatabaseSchema * @see org.griphyn.vdl.dbdriver */ public interface WF extends Catalog { /** * Names the property key prefix employed for schemas dealing with the VDC. */ public static final String PROPERTY_PREFIX = "vds.db.wf.schema"; /** * Determines the primary key of a workflow from the provided tuple * of secondary keys, and instantiates the corresponding workflow. * * @param basedir is the base directory * @param vogroup is the VO group identifier * @param label is the workflow label * @param run is the workflow run directory * @return the workflow identifier, or -1 if not found. */ public abstract WorkEntry getWorkflow( String basedir, String vogroup, String label, String run ) throws SQLException; /** * Returns a set of all workflows that are younger than a cut-off time. * The result may be empty, if no matching workflows exist yet. * * @param mtime is the oldest permissable last modification time * @return a map of workflows, indexed by their primary key */ public abstract java.util.Map getWorkflows( java.util.Date mtime ) throws SQLException; } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/dbschema/package.html0000644000175000017500000000527011757531137023712 0ustar ryngerynge Provides the database schema encapsulation.

Package Specification

The classes in this package provide a database independent abstraction of database access for a variety of catalogs.

To provide an alternative implementation for an existing catalog, you need to:

  1. extend the {@link org.griphyn.vdl.dbschema.DatabaseSchema} class.
  2. implement the appropriate {@link org.griphyn.vdl.dbschema.Catalog} child interface.
To provide an addition catalog implementation, you need to extend the {@link org.griphyn.vdl.dbschema.Catalog} interface. In the next step, you will need to provide an implementation as before.

Currently, several concrete catalogs are implemented in Chimera:

  • The Provenance Tracking Catalog {@link org.griphyn.vdl.dbschema.PTC}
  • The Virtual Data Catalog {@link org.griphyn.vdl.dbschema.VDC}
  • An extension of the VDC {@link org.griphyn.vdl.dbschema.Advanced}
  • Joining attributes to Virtual Data objects {@link org.griphyn.vdl.dbschema.Annotation}

Related Documentation

For overviews, tutorials, examples, guides, and tool documentation, please see: @see org.griphyn.vdl.dbdriver @see org.griphyn.vdl.annotation pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/overview.html0000644000175000017500000000130711757531137022414 0ustar ryngerynge Provides Chimera.

Package Specification

Related Documentation

For overviews, tutorials, examples, guides, and tool documentation, please see: pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/workflow/0000755000175000017500000000000011757531667021541 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/workflow/WorkflowTest.java0000644000175000017500000000316411757531137025052 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.workflow; import org.griphyn.vdl.directive.*; import org.griphyn.vdl.util.*; import org.griphyn.vdl.dbschema.*; import java.sql.SQLException; import java.io.*; import java.util.*; /** * This class is used to show-case some elementary WF table stuff. * * @author Jens-S. Vöckler * @version $Revision: 50 $ */ public class WorkflowTest // extends Toolkit { public static void asdf( DatabaseSchema dbschema ) throws SQLException { WF workflow = (WF) dbschema; Map x = workflow.getWorkflows(null); for ( Iterator i=x.values().iterator(); i.hasNext(); ) { WorkEntry w = (WorkEntry) i.next(); System.out.println( w.toString() ); } } public static void main( String args[] ) throws Exception { // Connect the database. String schemaName = ChimeraProperties.instance().getWFSchemaName(); Connect connect = new Connect(); DatabaseSchema dbschema = connect.connectDatabase(schemaName); asdf(dbschema); dbschema.close(); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/workflow/Workflow.java0000644000175000017500000000176411757531137024216 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.workflow; import org.griphyn.vdl.Chimera; /** * This abstract class defines a common base for all workflow related * Java objects. This class is empty. It exists for grouping purposes. * * @author Jens-S. Vöckler * @author Mike Wilde * @version $Revision: 50 $ */ public interface Workflow { // empty class, existence just for grouping purposes } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/workflow/JobStateEntry.java0000644000175000017500000001301711757531137025133 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.workflow; import java.util.*; import edu.isi.pegasus.common.util.Currently; /** * This class is the container for a job state record.

* * When constructing a job from piece-meal, please note that setting the * job's state will automatically set the job's last modification time * to the current time. If this is not desired, you must set the * modification time after setting the state. However, none of the * constructors will set the modification to the current time (yet). * * @author Jens-S. Vöckler * @author Mike Wilde * @version $Revision: 2079 $ */ public class JobStateEntry implements Workflow, Cloneable { /** * primary key: which workflow do we belong to. If -1 then unknown. */ private long m_wfid = -1; /** * primary key: unique job identifier within the workflow */ private String m_id; /** * the Condor state of the job. */ private String m_state; /** * the last modification time of the job state. */ private Date m_mtime; /** * the resource where the job ran, may be unspecified. */ private String m_site = null; /** * Creates and returns a copy of this object. * @return a new instance. */ public Object clone() { JobStateEntry result = new JobStateEntry( getWorkflowID(), getID() ); result.setState( getState() ); result.setModificationTime( getModificationTime() ); result.setSite( getSite() ); return result; } /** * Default contructor. */ public JobStateEntry() { // empty } /** * Constructs an other empty class. * * @param wfid is the workflow primary key */ public JobStateEntry( long wfid ) { m_wfid = wfid; } /** * Constructs an other empty class. * * @param wfid is the workflow primary key * @param jobid is the job identifier within the workflow */ public JobStateEntry( long wfid, String jobid ) { m_wfid = wfid; m_id = jobid; } /** * Constructs an other empty class. It will set the workflow identifier * to -1 to indicate no connection. * * @param jobid is the job identifier within the workflow */ public JobStateEntry( String jobid ) { m_wfid = -1; m_id = jobid; } /** * Accessor * * @see #setWorkflowID(long) * @return this job's workflow identifier. */ public long getWorkflowID() { return this.m_wfid; } /** * Accessor. * * @param wfid is the new workflow id as positive number. * @see #getWorkflowID() */ public void setWorkflowID( long wfid ) { if ( wfid < 0 ) throw new RuntimeException("negative workflow id"); this.m_wfid = wfid; } /** * Accessor * * @see #setID(String) * @return the job identifier */ public String getID() { return this.m_id; } /** * Accessor. * * @param id is the new job id, must not be null. * @see #getID() */ public void setID( String id ) { if ( id == null ) throw new NullPointerException(); this.m_id = id; } /** * Accessor * * @see #setState(String) * @return the Condor job state string with some extensions. */ public String getState() { return this.m_state; } /** * Accessor. As a side effect, setting the job state will * set the current modification time to the current time. * * @param state is the new Condor job state, must not be * null. * @see #getState() */ public void setState( String state ) { if ( state == null ) throw new NullPointerException(); this.m_state = state; this.m_mtime = new Date(); } /** * Accessor * * @see #setModificationTime(Date) * @return the last modification time of any of this job's state. */ public Date getModificationTime() { return this.m_mtime; } /** * Accessor. * * @param mtime is the new last modification time of this job, must * not be null. * @see #getModificationTime() */ public void setModificationTime( Date mtime ) { if ( mtime == null ) throw new NullPointerException(); this.m_mtime = mtime; } /** * Accessor * * @see #setSite(String) */ public String getSite() { return this.m_site; } /** * Accessor. * * @param site is the new remote site, may be null. * @see #getSite() */ public void setSite( String site ) { this.m_site = site; } /** * Constructs a simple line of all internal data points. Adjust * to your requirements - this is an example, only. * * @return a line containing all internal data. */ public String toString() { StringBuffer result = new StringBuffer(80); result.append( Long.toString(m_wfid) ).append('|'); result.append( m_id ).append('|'); result.append( m_state ).append('|'); result.append( Currently.iso8601(false,true,false,m_mtime) ).append('|'); result.append( m_site == null ? "NULL" : m_site ); return result.toString(); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/workflow/WorkEntry.java0000644000175000017500000002332111757531137024341 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.workflow; import edu.isi.pegasus.common.util.Currently; import java.util.*; /** * This class is the container for a Workflow record. The record itself * contains information about the job or jobs that ran. * * @author Jens-S. Vöckler * @author Mike Wilde * @version $Revision: 2079 $ */ public class WorkEntry implements Workflow, Cloneable { /** * primary key: unique workflow identifier. */ private long m_id = -1; /** * secondary key, part 1: base directory */ private String m_basedir; /** * secondary key, part 2: VO group */ private String m_vogroup; /** * secondary key, part 3: workflow label */ private String m_label; /** * secondary key, part 4: run directory */ private String m_run; /** * workflow creator as an account name. */ private String m_creator; /** * workflow creation time as time stamp. */ private Date m_ctime; /** * workflow state as integer to represent future, current or past * workflows. */ private int m_state; /** * last modification time of workflow state as time stamp. */ private Date m_mtime; /** * list of jobs associated with the workflow. */ private Map m_jobMap; /** * Creates and returns a copy of this object. * @return a new instance with a deep copy of everything. */ public Object clone() { WorkEntry result = new WorkEntry( getID(), getBaseDirectory(), getVOGroup(), getWorkflowLabel(), getRunDirectory() ); result.setCreator( getCreator() ); result.setCreationTime( getCreationTime() ); result.setState( getState() ); result.setModificationTime( getModificationTime() ); // now the tricky portion -- deep clone jobs for ( Iterator i=iterateJob(); i.hasNext(); ) { result.addJob( (JobStateEntry) ((JobStateEntry) i.next()).clone() ); } return result; } /** * Default constructor, should not be used. */ private WorkEntry() { m_jobMap = new TreeMap(); } /** * Constructs another empty class. * * @param id is the workflow primary key */ public WorkEntry( long id ) { m_jobMap = new TreeMap(); m_id = id; } /** * Constructs another empty class. * * @param id is the workflow primary key * @param basedir is the base directory * @param vogroup is the VO group identifier * @param label is the workflow label * @param run is the workflow run directory */ public WorkEntry( long id, String basedir, String vogroup, String label, String run ) { m_jobMap = new TreeMap(); m_id = id; m_basedir = basedir; m_vogroup = vogroup; m_label = label; m_run = run; } /** * Accessor * * @see #setID(long) */ public long getID() { return this.m_id; } /** * Accessor. * * @param id * @see #getID() */ public void setID( long id ) { this.m_id = id; } /** * Accessor * * @see #setBaseDirectory(String) */ public String getBaseDirectory() { return this.m_basedir; } /** * Accessor. * * @param basedir * @see #getBaseDirectory() */ public void setBaseDirectory( String basedir ) { this.m_basedir = basedir; } /** * Accessor * * @see #setVOGroup(String) */ public String getVOGroup() { return this.m_vogroup; } /** * Accessor. * * @param vogroup * @see #getVOGroup() */ public void setVOGroup( String vogroup ) { this.m_vogroup = vogroup; } /** * Accessor * * @see #setWorkflowLabel(String) */ public String getWorkflowLabel() { return this.m_label; } /** * Accessor. * * @param label * @see #getWorkflowLabel() */ public void setWorkflowLabel( String label ) { this.m_label = label; } /** * Accessor * * @see #setRunDirectory(String) */ public String getRunDirectory() { return this.m_run; } /** * Accessor. * * @param run * @see #getRunDirectory() */ public void setRunDirectory( String run ) { this.m_run = run; } /** * Accessor * * @see #setCreator(String) */ public String getCreator() { return this.m_creator; } /** * Accessor. * * @param creator * @see #getCreator() */ public void setCreator( String creator ) { this.m_creator = creator; } /** * Accessor * * @see #setCreationTime(Date) */ public Date getCreationTime() { return this.m_ctime; } /** * Accessor. * * @param ctime * @see #getCreationTime() */ public void setCreationTime( Date ctime ) { this.m_ctime = ctime; } /** * Accessor * * @see #setState(int) */ public int getState() { return this.m_state; } /** * Accessor. * * @param state * @see #getState() */ public void setState( int state ) { this.m_state = state; } /** * Accessor * * @see #setModificationTime(Date) */ public Date getModificationTime() { return this.m_mtime; } /** * Accessor. * * @param mtime * @see #getModificationTime() */ public void setModificationTime( Date mtime ) { this.m_mtime = mtime; } /** * Accessor: Adds a job to the bag of jobs. * * @param job is the job to add. * @see JobStateEntry */ public void addJob( JobStateEntry job ) { this.m_jobMap.put( job.getID(), job ); } /** * Accessor: Obtains an job by the job id. * * @param id is the job identifier in the workflow. * @return a reference to the job of this name. * @see JobStateEntry */ public JobStateEntry getJob( String id ) { return (JobStateEntry) this.m_jobMap.get(id); } /** * Accessor: Counts the number of jobs in this workflow. * * @return the number of jobs in the internal bag. */ public int getJobCount() { return this.m_jobMap.size(); } /** * Accessor: A list of all jobs known to this workflow. This list is * read-only. * * @return an array with JobStateEntry elements. * @see JobStateEntry */ public java.util.List getJobList() { return Collections.unmodifiableList( new ArrayList(this.m_jobMap.values())); } /** * Accessor: Obtains all jobs references by their id. The map is a * read-only map to avoid modifications outside the API. * * @return a map with all jobs known to this workflow * @see JobStateEntry */ public java.util.Map getJobMap() { return Collections.unmodifiableMap( this.m_jobMap ); } /** * Accessor: Provides an iterator over the jobs of this workflow. * @return an iterator to walk the JobStateEntry list with. * @see JobStateEntry */ public Iterator iterateJob() { return this.m_jobMap.values().iterator(); } /** * Accessor: Removes all jobs. Effectively empties the bag. */ public void removeAllJob() { this.m_jobMap.clear(); } /** * Accessor: Removes a specific job. * * @param name is the job identifier. * @return the object that was removed, or null, if not found. * @see JobStateEntry */ public JobStateEntry removeJob( String name ) { return (JobStateEntry) this.m_jobMap.remove(name); } /** * Accessor: Adds a new or overwrites an existing actual argument. * * @param job is a new complete job to enter. * @see JobStateEntry */ public void setJob( JobStateEntry job ) { this.m_jobMap.put( job.getID(), job ); } /** * Accessor: Replaces all jobs with the new collection of jobs. Note * that a collection can be anything in a list or set. * * @param jobs is a collection of jobs. * @see JobStateEntry */ public void setJob( Collection jobs ) { this.m_jobMap.clear(); for ( Iterator i=jobs.iterator(); i.hasNext(); ) { JobStateEntry j = (JobStateEntry) i.next(); this.m_jobMap.put(j.getID(),j); } } /** * Accessor: Replaces all job mappings with new job mappings. * * @param jobs is a map with job id to job mappings. * @see JobStateEntry */ public void setJob( Map jobs ) { this.m_jobMap.clear(); this.m_jobMap.putAll(jobs); } /** * Constructs a sensible line of all internal data points * @return a line containing all internal data. */ public String toString() { String newline = System.getProperty("line.separator","\r\n"); StringBuffer result = new StringBuffer( 80 * (1+m_jobMap.size()) ); // the workflow stuff result.append( getID() ).append('|'); result.append( getBaseDirectory() ).append('|'); result.append( getVOGroup() ).append('|'); result.append( getWorkflowLabel() ).append('|'); result.append( getRunDirectory() ).append('|'); result.append( getCreator() ).append('|');; result.append( Currently.iso8601(false,false,false,getCreationTime()) ); result.append('|'); result.append( Integer.toString(getState()) ).append('|');; result.append( Currently.iso8601(false,false,false,getModificationTime()) ); result.append(newline); // add all jobs with space ahead for ( Iterator i=iterateJob(); i.hasNext(); ) { JobStateEntry j = (JobStateEntry) i.next(); result.append(" ").append(j).append(newline); } return result.toString(); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/invocation/0000755000175000017500000000000011757531667022040 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/0000755000175000017500000000000011757531667020644 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/ChimeraProperties.java0000644000175000017500000003374511757531137025140 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.util; import java.io.IOException; import java.io.File; import java.util.*; import edu.isi.pegasus.common.util.Currently; import edu.isi.pegasus.common.util.CommonProperties; import org.griphyn.vdl.util.Logging; /** * A Central Properties class that keeps track of all the properties * used by Chimera. All other classes access the methods in this class * to get the value of the property. It access the CommonProperties class * to read the property file. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 4507 $ * * @see org.griphyn.common.util.CommonProperties */ public class ChimeraProperties { /** * Default values for schema locations. */ public static final String VDL_SCHEMA_LOCATION = "http://www.griphyn.org/chimera/vdl-1.24.xsd"; public static final String DAX_SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/dax-4.0.xsd"; public static final String IVR_SCHEMA_LOCATION = "http://pegasus.isi.edu/schema/iv-2.1.xsd"; public static final String DB_ALL_PREFIX = "pegasus.catalog.*.db"; public static final String DBDRIVER_ALL_PREFIX = "pegasus.catalog.*.db.driver"; /** * Implements the Singleton access. */ private static ChimeraProperties m_instance = null; /** * The value of the PEGASUS_HOME environment variable. */ private String m_home; /** * The object holding all the properties pertaining * to the VDS system. */ private CommonProperties m_props; /** * To get a reference to the the object. */ public static ChimeraProperties instance() throws IOException, MissingResourceException { if (m_instance == null) { m_instance = new ChimeraProperties(); } return m_instance; } /** * Constructor that is called only once, when creating the * Singleton instance. */ private ChimeraProperties() throws IOException, MissingResourceException { m_props = getVDSPropertiesInstance(); m_home = m_props.getBinDir() + "/.."; } /** * Gets the handle to the property file. */ private CommonProperties getVDSPropertiesInstance() throws IOException, MissingResourceException { return CommonProperties.instance(); } /** * Set up logging */ public void setupLogging(Logging logger) { for ( Enumeration e = m_props.propertyNames(); e.hasMoreElements(); ) { String key = (String) e.nextElement(); if ( key.equals("pegasus.timestamp.format") ) { Currently c = new Currently( this.m_props.getProperty(key) ); logger.setDateFormat( c ); } else if ( key.startsWith("pegasus.log.") ) { String subkey = key.substring(8); logger.log( "default", 2, "found \"" + key + "\" -> " + subkey ); logger.register( subkey, this.m_props.getProperty(key) ); } else if ( key.startsWith( "pegasus.verbose" ) ) { int verbose = Integer.parseInt(this.m_props.getProperty(key)); logger.log( "default", 2, "verbosity mode = " + verbose ); logger.setVerbose(verbose); } } } /** * Accessor to $PEGASUS_HOME/etc. The files in this directory have a low * change frequency, are effectively read-only, they reside on a * per-machine basis, and they are valid usually for a single user. * * @return the "etc" directory of the VDS runtime system. */ public File getSysConfDir() { return m_props.getSysConfDir(); } /** * Accessor: Obtains the root directory of the VDS/Chimera runtime * system. * * @return the root directory of the VDS runtime system, as initially * set from the system properties. */ public String getVDSHome() { return m_home; } /** * Accessor to $PEGASUS_HOME/share. The files in this directory have a low * change frequency, are effectively read-only, can be shared via a * networked FS, and they are valid for multiple users. * * @return the "share" directory of the VDS runtime system. */ public File getDataDir() { return m_props.getSharedStateDir(); } /** * Get the fully qualified class name of the VDC-implementing * database schema. If no properties are configured, it returns * the file-based VDC-schema implementation. * * @return the fully-qualified name of the class which implements * the VDC according to properties. * @see org.griphyn.vdl.dbschema.SingleFileSchema */ public String getVDCSchemaName() { // load the default schema name - default is to use the file based // schema. String schemaName = m_props.getProperty( "pegasus.db.vdc.schema", "SingleFileSchema" ); if ( schemaName.indexOf('.') == -1 ) schemaName = "org.griphyn.vdl.dbschema." + schemaName; // always returns something return schemaName; } /** * Obtains the fully qualified class name of the PTC-implementing * database schema. * * @return the fully-qualified name of the class which implements * the PTC according to properties, or null, if no * such class exists. */ public String getPTCSchemaName() { // load the default schema name - default is to use the file based // schema. //this should not have a default value because if this property is not set // the invocation records should not be populated to DB. String schemaName = m_props.getProperty( "pegasus.catalog.provenance"); if ( schemaName != null && schemaName.indexOf('.') == -1 ) schemaName = "org.griphyn.vdl.dbschema." + schemaName; // may return null return schemaName; } /** * Obtains the fully qualified class name of the WF-implementing * database schema. * * @return the fully-qualified name of the class which implements the * WF according to properties, or null, if no such class * exists. */ public String getWFSchemaName() { // load the default schema name String schemaName = m_props.getProperty( "pegasus.db.wf.schema" ); if ( schemaName != null && schemaName.indexOf('.') == -1 ) schemaName = "org.griphyn.vdl.dbschema." + schemaName; // may return null return schemaName; } /** * Gets the location the VDLx XML schema from properties, if available. * Please note that the schema location URL in the instance document * is only a hint, and may be overriden by the findings of this method. * * @return a location pointing to a definition document of the XML * schema that can read VDLx. Result may be null, if such a document * is unknown or unspecified. * @see org.griphyn.vdl.parser.VDLxParser#VDLxParser( String ) */ public String getVDLSchemaLocation() { // treat URI as File, yes, I know - I need the basename File uri = new File( VDL_SCHEMA_LOCATION ); File vdlx = // create a pointer to the default local position new File( this.m_props.getSysConfDir(), uri.getName() ); // Nota bene: pegasus.schema.vdl may be a networked URI... return m_props.getProperty( "pegasus.schema.vdl", vdlx.getAbsolutePath() ); } /** * Gets the location of the DAX XML schema from properties, if available. * Please note that the schema location URL in the instance document * is only a hint, and may be overriden by the findings of this method. * * @return a location pointing to a definition document of the XML * schema that can read DAX. Result may be null, if such a document * is unknown or unspecified. * @see org.griphyn.vdl.parser.DAXParser#DAXParser( String ) */ public String getDAXSchemaLocation() { // treat URI as File, yes, I know - I need the basename File uri = new File( DAX_SCHEMA_LOCATION ); File dax = // create a pointer to the default local position new File( m_props.getSysConfDir(), uri.getName() ); // Nota bene: pegasus.schema.dax may be a networked URI... return m_props.getProperty( "pegasus.schema.dax", dax.getAbsolutePath() ); } /** * Helps the load database to locate the invocation record XML schema, * if available. Please note that the schema location URL in the * instance document is only a hint, and may be overriden by the * findings of this method. * * @return a location pointing to a definition document of the XML * schema that can read DAX. Result may be null, if such a document * is unknown or unspecified. * @see org.griphyn.vdl.parser.InvocationParser#InvocationParser( String ) */ public String getPTCSchemaLocation() { // treat URI as File, yes, I know - I need the basename File uri = new File( IVR_SCHEMA_LOCATION ); File ptc = // create a pointer to the default local position new File( m_props.getSysConfDir(), uri.getName() ); // Nota bene: pegasus.schema.ptc may be a networked URI... return m_props.getProperty( "pegasus.catalog.provenance", ptc.getAbsolutePath() ); } /** * Get the rc.data file location, which is used by shell planner * */ public String getRCLocation() { File rcFile = new File( m_props.getSysConfDir(), "rc.data" ); return m_props.getProperty( "pegasus.db.rc", rcFile.getAbsolutePath() ); } /** * Get the tc.data file location, which is used by shell planner * */ public String getTCLocation() { File tcFile = new File( m_props.getSysConfDir(), "tc.data" ); return m_props.getProperty( "pegasus.db.tc", tcFile.getAbsolutePath() ); } /** * Gets the name of the database schema name from the properties. * * @param dbSchemaPrefix is the database schema key name in the * properties file, which happens to be the pointer to the class * to load. * @return the database schema name, result may be null, if such * property is not specified. */ public String getDatabaseSchemaName( String dbSchemaPrefix ) { return m_props.getProperty( dbSchemaPrefix ); } /** * Gets then name of the database driver from the properties. * A specific match is preferred over the any match. * * @param dbDriverPrefix is the database schema key name in the * properties file, which happens to be the pointer to the class * to load. * @return the database driver name, result may be null, if such * property is not specified. */ public String getDatabaseDriverName( String dbDriverPrefix ) { return ( dbDriverPrefix == null ? m_props.getProperty( DBDRIVER_ALL_PREFIX ) : m_props.getProperty( dbDriverPrefix, m_props.getProperty(DBDRIVER_ALL_PREFIX) ) ); } /** * Gets the Database URL from Properties file, the URL is a contact * string to the database. The URL contact string is removed from the * regular properties which are passed to the JDBC driver. * * @param dbDriverPrefix is the database schema key name. * @return the database url, result may be null, if the * driver URL is not specified. * @see #getDatabaseDriverProperties( String ) */ public String getDatabaseURL( String dbDriverPrefix ) { return ( dbDriverPrefix == null ? //pick pegasus.catalog.*.db.url m_props.getProperty( DB_ALL_PREFIX + ".url" ) : m_props.getProperty( dbDriverPrefix + ".url", //default value pegasus.catalog.*.db.url m_props.getProperty( DB_ALL_PREFIX + ".url" ) ) ); } /** * Extracts a specific property key subset from the known properties. * The prefix is removed from the keys in the resulting dictionary. * * @param prefix is the key prefix to filter the properties by. * @return a property dictionary matching the filter key. May be * an empty dictionary, if no prefix matches were found. */ public Properties matchingSubset( String prefix ) { return m_props.matchingSubset( prefix, false ); } /** * Obtains database driver specific properties. * * @param dbDriverPrefix is the database driver property key prefix * for which to obtain properties. * @return a property set to be filled with driver specific * properties. May be null if no such properties specified. */ public Properties getDatabaseDriverProperties( String dbDriverPrefix ) { Properties result = new Properties( matchingSubset(DB_ALL_PREFIX) ); if (dbDriverPrefix != null) result.putAll( matchingSubset( dbDriverPrefix ) ); result.remove("url"); // must not be passed to the JDBC driver return result; } /** * Obtains the database schema specific properties. * * @param dbSchemaPrefix is the database schema key name in the * properties file * @return a property set to be filled with schema specific * properties. May be null if no such properties specified. */ public Properties getDatabaseSchemaProperties( String dbSchemaPrefix ) { return matchingSubset( dbSchemaPrefix ); } /** * Gets the name of the replica catalog implementating class from the * properties. * * @param dbReplicaPrefix is the replica catalog class name in the * properties file. * @return the replica catalog implementing class name, result may be * null, if such property is not specified. */ public String getReplicaCatalogName( String dbReplicaPrefix ) { return m_props.getProperty( dbReplicaPrefix ); } /** * Obtains all properties to handle the experimental replica catalog * interface. * @param dbReplicaPrefix is the prefix for the replica catalog's * implementation configuration. * @return all properties, excluding the prefix itself, for the RC. */ public Properties getReplicaCatalogProperties( String dbReplicaPrefix ) { return matchingSubset( dbReplicaPrefix ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/FileHelper.java0000644000175000017500000001244711757531137023526 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.util; import org.griphyn.vdl.util.*; import java.util.*; import java.io.*; /** * This class allows access to a set of files. With the constructor, the * basename of the fileset is specified. From this file, a set of 10 * files can be accessed and constructed, with suffices ".0" to ".9". A * cursor file, suffix ".nr", points to the currently active file in the * ring.

* * In read mode, the cursor file gets locked temporarily while the stream * is opened. If no cursor file exists, it is assumed that the basename * is also the filename of the database. A lock file for the opened database * will be created.

* * In write mode, the cursor file gets locked until the writer is being * closed again. Thus, parallel access by other writers or readers are * prohibited. The cursor is advanced at stream close. The database stream * points to the next file beyong the cursor. If no cursor file existed, it * will point to suffix ".0".

* * All access to the files must go through the respective open and close * methods provided by this class in order to guarantee proper locking! * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see java.io.File * @see LockHelper */ abstract public class FileHelper { /** * base name of the fileset to access. */ protected String m_database; /** * description of the cursor file for the given basename set. */ protected File m_number; /** * Primary ctor: obtain access to a database cycle via basename. * @param basename is the name of the database without digit suffix. */ public FileHelper( String basename ) { this.m_database = basename; this.m_number = new File( basename + ".nr" ); } /** * Reads the cursor file and obtains the current cycle position. * The contract requires you to hold the lock for the cursor file. * * @return the current cycle position, or -1 to indicate failure. */ protected int readCount() { int result = -1; try { LineNumberReader lnr = new LineNumberReader( new FileReader(this.m_number) ); result = Integer.parseInt(lnr.readLine()); lnr.close(); } catch ( IOException ioe ) { Logging.instance().log( "lock", 2, "unable to process " + this.m_number.getPath() + ": " + ioe.getMessage() ); result = -1; // make extra sure } catch ( NumberFormatException nfe ) { result = -1; // make extra sure } return result; } /** * Updates the cursor file with a new cycle position. The contract * requires you to hold the lock for the cursor file. * * @param number is the new cursor position. * @return true, if the file was updated all right, false, * if an error occured during update. */ protected boolean writeCount( int number ) { boolean result = false; try { FileWriter fw = new FileWriter(this.m_number); fw.write( Integer.toString(number) + System.getProperty( "line.separator", "\r\n" ) ); fw.close(); result = true; } catch ( IOException ioe ) { Logging.instance().log( "lock", 2, "unable to update " + this.m_number.getPath() + ": " + ioe.getMessage() ); result = false; } return result; } /** * Opens a reader for the basename, adjusting the database cycles. * The access can be shared with other simultaneous readers. * @return a reader opened to the basename, or null for failure. */ abstract public File openReader(); /** * Closes a previously obtained reader, and releases internal * locking resources. Only if the reader was found in the internal * state, any closing of the stream will be attempted. * * @param r is the reader that was created by {@link #openReader()}. * @return true, if unlocking went smoothly, or false in the presence * of an error. The only error that can happen it to use a File * instance which was not returned by this instance. */ abstract public boolean closeReader( File r ); /** * Opens a writer for the basename, adjusting the database cycles * The access is exclusive, and cannot be shared with readers nor * writers. * * @return a writer opened for the basename, or null for failure. */ abstract public File openWriter(); /** * Closes a previously obtained writer, and releases internal * locking resources. Error conditions can be either a missing * instance that passed, or the inability to update the cursor file. * * @param w is the instance that was returned by {@link #openWriter()}. * @return true, if the closing went smoothly, false in the presence * of an error. */ abstract public boolean closeWriter( File w ); } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/LockFileSet.java0000644000175000017500000000555211757531137023652 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.util; import org.griphyn.vdl.util.Logging; import java.util.*; import java.io.*; /** * This class is a helper for the LockHelper. It maintains * the set of lock filenames in status locked. These files need to be * removed on exit. Thus, this class is implemented as a Singleton. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision $ * * @see LockHelper */ public class LockFileSet extends Thread { /** * This is the Singleton instance variable. */ private static LockFileSet m_instance; /** * The set of files that need to be erased on exit. */ private HashSet m_fileSet; /** * The c'tor is protected in order to enforce the Singleton interface. */ protected LockFileSet() { m_fileSet = new HashSet(); Runtime.getRuntime().addShutdownHook( this ); } /** * This is the only access to any LockFileSet object. It is a kind of * factory that produces just one instance for all. * * @return the one and only instance of a LockFileSet. Always. */ public static synchronized LockFileSet instance() { if ( m_instance == null ) m_instance = new LockFileSet(); return m_instance; } /** * This method should not be called directly. It will be invoked on exit * by the exit hook handler. */ public void run() { synchronized ( this.m_fileSet ) { for ( Iterator i=this.m_fileSet.iterator(); i.hasNext(); ) { ((File) i.next()).delete(); } this.m_fileSet = null; } } /** * Adds a file name to the set of lock filenames that need to be removed * on exit. * @param f is a File instance nameing the lock filename. */ public void add( File f ) { // Logging.instance().log( "lock", 2, "LFS add " + f.getPath() ); synchronized ( this.m_fileSet ) { this.m_fileSet.add(f); } } /** * Removes a filename from the set of lock filenames that get removed * on exit. * @param f is a File instance of a filename to remove from the list * of removable files (are you confused yet). */ public void remove( File f ) { // Logging.instance().log( "lock", 2, "LFS del " + f.getPath() ); synchronized ( this.m_fileSet ) { this.m_fileSet.remove(f); } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/SequenceMapping.java0000644000175000017500000000676311757531137024577 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.util; import java.util.HashMap; /** * This class maps back and forth between a short identifier, and * some unique, abstract identifier that is generated by a sequence. * Instantiated with the default constructor, the generated * identifiers will satisfy NMTOKEN. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public class SequenceMapping { /** * The forward mapping between the external indentifier and * the internally generated sequence for the identifier. */ private HashMap m_forward; /** * The reverse mapping between the sequence-generated identifier, * and the external identifier. The reverse mapping is also * employed to rule out duplicates. */ private HashMap m_backward; /** * The sequence counter employed by this generator. Please note * that multiple instances of this class will produce the same * sequences! */ private SequenceGenerator m_sequence; /** * A prefix string, initialized to "ID" for the default c'tor. */ private String m_prefix; /** * C'tor: This will generate an instance that generated IDxxxxx * symbols. Please note that multiple instances of this class will * produce the same sequences! */ public SequenceMapping() { this.m_prefix = "ID"; this.m_forward = new HashMap(); this.m_backward = new HashMap(); this.m_sequence = new SequenceGenerator(); } /** * C'tor: Instances from this contructor will generate ${prefix}xxxxx. * Please note that multiple instances of this class with the same * prefix will produce the same sequences! * * @param prefix is a prefix to the generated sequence. */ public SequenceMapping( String prefix ) { this.m_prefix = new String(prefix); this.m_forward = new HashMap(); this.m_backward = new HashMap(); this.m_sequence = new SequenceGenerator(); } /** * Creates and caches a unique id for a given short identifier. * @param id is the external identifier which needs to be forward mapped. * @return a sequence-generated identifier to use instead. */ public String forward( String id ) { String result = (String) this.m_forward.get(id); if ( result == null ) { // need to create a new entry do { result = this.m_prefix + this.m_sequence.generate(); } while ( this.m_backward.containsKey(result) ); this.m_forward.put( id, result ); this.m_backward.put( result, id ); } Logging.instance().log( "map", 5, id + " <=> " + result ); return result; } /** * Obtains existing mapping without producing a new one. May return * null if no mapping exists. * * @param shortid is the external identifier to check fwd mapping for * @return null, if no such mapping exists. */ public String get( String shortid ) { return (String) this.m_forward.get(shortid); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/DAX2DOT.java0000644000175000017500000002724011757531137022551 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.util; import java.util.TreeMap; import java.util.Map; import java.util.Iterator; import java.io.*; import edu.isi.pegasus.common.util.Separator; import org.griphyn.vdl.dax.*; import org.griphyn.vdl.classes.LFN; /** * Convert a dag structure into GraphViz dot format. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ */ public class DAX2DOT { /** * Separator for strings. */ public static final String SEPARATOR = "/"; /** * Linefeed element for labels. */ public static final String LINEFEED = "\\n"; /** * height in inches? */ private double m_height; /** * width in inches? */ private double m_width; /** * predicate to show the derivation (DV) name. */ private boolean m_showDV; /** * predicate to show the transformation (TR) name. */ private boolean m_showTR; /** * Maintains namespace to color mappings. */ private Map m_color; /** * Maintains the color cycle. */ private int m_index; /** * Map of default colors to cycle through for * coloration of job nodes by TR namespace. */ private static final String c_color[] = { "#FFAAFF", "#FFFFAA", "#FFAAAA" }; /** * Constructor */ public DAX2DOT() { m_height = 10; m_width = 8; m_showDV = false; m_showTR = true; m_color = new TreeMap(); m_index = 0; } /** * Convenience constructor sets the size of the graph. * * @param h is the height in inches * @param w is the width in inches */ public DAX2DOT(double h, double w) { m_height = h; m_width = w; m_showDV = false; m_showTR = true; m_color = new TreeMap(); m_index = 0; } /** * Sets the size of the graph. * * @param h is the height in inches * @param w is the width in inches * @see #getHeight() * @see #getWidth() */ public void setSize(double h, double w) { m_height = h; m_width = w; } /** * Determines the height of the graph. * @return height in inches * @see #setSize( double, double ) * @see #getWidth() */ public double getHeight() { return m_height; } /** * Determines the width of the graph. * @return width in inches * @see #setSize( double, double ) * @see #getHeight() */ public double getWidth() { return m_width; } /** * Determines, if DV identifiers are show. * * @return true, if the DV identifier is shown * @see #setShowDV( boolean ) */ public boolean getShowDV() { return m_showDV; } /** * Sets the showing of derivation names. * * @param showDV is true to show derivation identifiers. * @see #getShowDV() */ public void setShowDV( boolean showDV ) { m_showDV = showDV; } /** * Determines, if TR identifiers are show. * * @return true, if the TR identifier is shown * @see #setShowTR( boolean ) */ public boolean getShowTR() { return m_showTR; } /** * Sets the showing of derivation names. * * @param showTR is true to show derivation identifiers. * @see #getShowTR() */ public void setShowTR( boolean showTR ) { m_showTR = showTR; } /** * Generates GraphViz .dot format from the specified ADAG * @param adag is the ADAG instance * @return a string representing .dot format */ public String toDOT( ADAG adag ) throws IOException { // do not show files in the graph by default StringWriter sw = new StringWriter(); toDOT(adag, sw, false); return sw.toString(); } /** * Generates GraphViz .dot format from the specified ADAG * @param adag is the ADAG instance * @param showFiles if set to true, then display files in the graph * @return a string representing .dot format * @see #toDOT( ADAG, Writer, boolean ) * @see #toDOT( ADAG, Writer, boolean, String, String ) */ public String toDOT(ADAG adag, boolean showFiles) throws IOException { StringWriter sw = new StringWriter(); toDOT(adag, sw, showFiles); return sw.toString(); } /** * Generates GraphViz .dot format from the specified ADAG * @param adag is the ADAG instance * @param writer is the target to output the dot specification * @param showFiles if set to true, then display files in the graph * @see #toDOT( ADAG, Writer, boolean, String, String ) */ public void toDOT(ADAG adag, Writer writer, boolean showFiles) throws IOException { toDOT(adag, writer, showFiles, null, null); } /** * Prepares and prints the job node of the graph. The job's unique * identifier assigned in the DAX is taken as the job's identifier, * but the TR, ID, and DV are used as a label. * * @param w is the open file writer to print to * @param j is a Job element. * @param url is the job URL, which may be null. * @return the identifier for the job to connect the graph. */ private String showJob( Writer w, Job j, String url ) throws IOException { StringBuffer label = new StringBuffer(48); String id = j.getID(); String tr = Separator.combine(j.getNamespace(), j.getName(), j.getVersion()); label.append(id); if ( m_showTR && tr != null ) label.append(LINEFEED).append("TR ").append(tr); if ( m_showDV ) { String dv = Separator.combine(j.getDVNamespace(), j.getDVName(), j.getDVVersion()); if ( dv != null ) label.append(LINEFEED).append("DV ").append(dv); } // // Doug's wish: color by namespace // String color = null; String ns = j.getNamespace(); // may be null! if ( ns != null ) { if ( m_color.containsKey(ns) ) { // existing namespace, recycle color color = (String) m_color.get(ns); } else { // insert new color for new namespace color = c_color[m_index]; m_index = (m_index + 1) % c_color.length; m_color.put( ns, color ); } } // write output for job node w.write( " \"" ); w.write(id); w.write( "\" [label=\"" ); w.write( label.toString() ); if ( url != null ) { w.write( "\" URL=\"" ); w.write( url ); w.write( tr ); } if ( color != null ) { w.write( "\" color=\"" ); w.write( color ); } w.write( "\"]\n" ); return id; } /** * Prepares and prints the file node of the graph. The file's LFN * will be its unique identifier, and its label. * * @param w is the open file writer to print to * @param f is a Filename element. * @param url is the file URL, which may be null. * @return the identifier for the file to connect the graph. */ private String showFile( Writer w, Filename f, String url ) throws IOException { String lfn = f.getFilename(); // write output for filename node w.write( " \"" ); w.write(lfn); w.write( "\" [color=\"#88" ); w.write( ((f.getLink() & LFN.INPUT) > 0 ? "FF" : "AA" ) ); w.write( ((f.getLink() & LFN.OUTPUT) > 0 ? "FF" : "AA" ) ); if ( url != null ) { w.write( "\" URL=\"" ); w.write( url ); w.write( lfn ); } w.write( "\"]\n" ); return lfn; } /** * Generates GraphViz .dot format from the specified ADAG, also generates * the client side HTML map for nodes. * @param adag is the ADAG instance * @param writer is the target to output the dot specification * @param showFiles if set to true, then display files in the graph * @param jobURL is the base URL for jobs * @param fileURL is the base URL for files */ public void toDOT(ADAG adag, Writer writer, boolean showFiles, String jobURL, String fileURL ) throws IOException { this.m_index = 0; writer.write("digraph DAG {\n"); writer.write(" size=\"" + m_width + "," + m_height +"\"\n"); writer.write(" ratio = fill\n"); if ( showFiles ) { writer.write(" node[shape=parallelogram]\n"); for (Iterator i=adag.iterateFilename(); i.hasNext();) { Filename fn = (Filename) i.next(); String lfn = showFile( writer, fn, fileURL ); } writer.write(" node [shape=ellipse, color=orange, style=filled]\n"); for (Iterator i=adag.iterateJob(); i.hasNext();) { Job job = (Job) i.next(); String jid = showJob(writer,job,jobURL); for (Iterator j=job.iterateUses(); j.hasNext();) { Filename fn = (Filename)j.next(); String lfn = fn.getFilename(); // this covers in, out, and io (two arrows) if ( (fn.getLink() & LFN.INPUT) > 0 ) writer.write(" \"" + lfn + "\" -> \"" + jid + "\"\n"); if ( (fn.getLink() & LFN.OUTPUT) > 0 ) writer.write(" \"" + jid + "\" -> \"" + lfn + "\"\n"); } } } else { writer.write(" node [shape=ellipse, color=orange, style=filled]\n"); for (Iterator i=adag.iterateJob(); i.hasNext();) { Job job = (Job) i.next(); String jid = showJob(writer,job,jobURL); } for (Iterator c=adag.iterateChild(); c.hasNext();) { Child chld = (Child) c.next(); String ch = chld.getChild(); Job cjob = adag.getJob(ch); String cid = cjob.getID(); for (Iterator p=chld.iterateParent(); p.hasNext();) { String pr = (String) p.next(); Job pjob = adag.getJob(pr); String pid = pjob.getID(); writer.write(" \"" + pid + "\" -> \"" + cid + "\"\n"); } } } writer.write("}\n"); writer.flush(); } /** * Simple test */ public static void main(String[] args) throws IOException { ADAG adag = new ADAG(); Job A = new Job("ns1","trA",null,"ID000001"); Job B = new Job("ns2","trB",null,"ID000002"); Job C = new Job("ns3","trC",null,"ID000003"); Job D = new Job(null,"trD",null,"ID000004"); A.setDV("ns2","dvA",null); B.setDV("ns2","dvB",null); C.setDV("ns3","dvC",null); D.setDV("ns3","dvD",null); A.addUses( new Filename("f.1",LFN.INPUT) ); adag.addFilename("f.1",true,"true",false,LFN.XFER_MANDATORY); A.addUses( new Filename("f.2",LFN.OUTPUT) ); adag.addFilename("f.2",false,"true",false,LFN.XFER_MANDATORY); B.addUses( new Filename("f.2",LFN.INPUT) ); adag.addFilename("f.2",true,"true",false,LFN.XFER_MANDATORY); B.addUses( new Filename("f.3",LFN.OUTPUT) ); adag.addFilename("f.3",false,"true",false,LFN.XFER_MANDATORY); C.addUses( new Filename("f.2",LFN.INPUT) ); adag.addFilename("f.2",true,"true",false,LFN.XFER_MANDATORY); C.addUses( new Filename("f.4",LFN.OUTPUT) ); adag.addFilename("f.4",false,"true",false,LFN.XFER_MANDATORY); D.addUses( new Filename("f.3",LFN.INPUT) ); adag.addFilename("f.3",true,"true",false,LFN.XFER_MANDATORY); D.addUses( new Filename("f.4",LFN.INPUT) ); adag.addFilename("f.4",true,"true",false,LFN.XFER_MANDATORY); D.addUses( new Filename("f.5",LFN.OUTPUT) ); adag.addFilename("f.5",false,"true",false,LFN.XFER_MANDATORY); adag.addJob(A); adag.addJob(B); adag.addJob(C); adag.addJob(D); adag.addChild("ID000003","ID000001"); adag.addChild("ID000003","ID000002"); adag.addChild("ID000004","ID000003"); DAX2DOT d2d = new DAX2DOT(5, 5); d2d.setShowDV(true); String dot = d2d.toDOT(adag,true); System.out.println(dot); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/Chmod.java0000644000175000017500000001702711757531137022540 0ustar ryngerynge/* * Globus Toolkit Public License (GTPL) * * Copyright (c) 1999 University of Chicago and The University of * Southern California. All Rights Reserved. * * 1) The "Software", below, refers to the Globus Toolkit (in either * source-code, or binary form and accompanying documentation) and a * "work based on the Software" means a work based on either the * Software, on part of the Software, or on any derivative work of * the Software under copyright law: that is, a work containing all * or a portion of the Software either verbatim or with * modifications. Each licensee is addressed as "you" or "Licensee." * * 2) The University of Southern California and the University of * Chicago as Operator of Argonne National Laboratory are copyright * holders in the Software. The copyright holders and their third * party licensors hereby grant Licensee a royalty-free nonexclusive * license, subject to the limitations stated herein and * U.S. Government license rights. * * 3) A copy or copies of the Software may be given to others, if you * meet the following conditions: * * a) Copies in source code must include the copyright notice and * this license. * * b) Copies in binary form must include the copyright notice and * this license in the documentation and/or other materials * provided with the copy. * * 4) All advertising materials, journal articles and documentation * mentioning features derived from or use of the Software must * display the following acknowledgement: * * "This product includes software developed by and/or derived from * the Globus project (http://www.globus.org/)." * * In the event that the product being advertised includes an intact * Globus distribution (with copyright and license included) then * this clause is waived. * * 5) You are encouraged to package modifications to the Software * separately, as patches to the Software. * * 6) You may make modifications to the Software, however, if you * modify a copy or copies of the Software or any portion of it, * thus forming a work based on the Software, and give a copy or * copies of such work to others, either in source code or binary * form, you must meet the following conditions: * * a) The Software must carry prominent notices stating that you * changed specified portions of the Software. * * b) The Software must display the following acknowledgement: * * "This product includes software developed by and/or derived * from the Globus Project (http://www.globus.org/) to which the * U.S. Government retains certain rights." * * 7) You may incorporate the Software or a modified version of the * Software into a commercial product, if you meet the following * conditions: * * a) The commercial product or accompanying documentation must * display the following acknowledgment: * * "This product includes software developed by and/or derived * from the Globus Project (http://www.globus.org/) to which the * U.S. Government retains a paid-up, nonexclusive, irrevocable * worldwide license to reproduce, prepare derivative works, and * perform publicly and display publicly." * * b) The user of the commercial product must be given the following * notice: * * "[Commercial product] was prepared, in part, as an account of * work sponsored by an agency of the United States Government. * Neither the United States, nor the University of Chicago, nor * University of Southern California, nor any contributors to * the Globus Project or Globus Toolkit nor any of their employees, * makes any warranty express or implied, or assumes any legal * liability or responsibility for the accuracy, completeness, or * usefulness of any information, apparatus, product, or process * disclosed, or represents that its use would not infringe * privately owned rights. * * IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO * OR THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS * TO THE GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY * DAMAGES, INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL * DAMAGES RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR * THE USE OF THE [COMMERCIAL PRODUCT]." * * 8) LICENSEE AGREES THAT THE EXPORT OF GOODS AND/OR TECHNICAL DATA * FROM THE UNITED STATES MAY REQUIRE SOME FORM OF EXPORT CONTROL * LICENSE FROM THE U.S. GOVERNMENT AND THAT FAILURE TO OBTAIN SUCH * EXPORT CONTROL LICENSE MAY RESULT IN CRIMINAL LIABILITY UNDER U.S. * LAWS. * * 9) Portions of the Software resulted from work developed under a * U.S. Government contract and are subject to the following license: * the Government is granted for itself and others acting on its * behalf a paid-up, nonexclusive, irrevocable worldwide license in * this computer software to reproduce, prepare derivative works, and * perform publicly and display publicly. * * 10) The Software was prepared, in part, as an account of work * sponsored by an agency of the United States Government. Neither * the United States, nor the University of Chicago, nor The * University of Southern California, nor any contributors to the * Globus Project or Globus Toolkit, nor any of their employees, * makes any warranty express or implied, or assumes any legal * liability or responsibility for the accuracy, completeness, or * usefulness of any information, apparatus, product, or process * disclosed, or represents that its use would not infringe privately * owned rights. * * 11) IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO OR * THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS TO THE * GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY DAMAGES, * INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES * RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR THE USE OF * THE SOFTWARE. * * END OF LICENSE */ package org.griphyn.vdl.util; import java.io.*; /** * This class changes file permission modes in * Unix/Linux environment. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * */ public class Chmod{ /** * Changes file permission to be executable * @param filename is the name of the file to change permissions, * it allows wildcard. * @param dir is the directory where the file is located. * @return 0 if successful */ public static int changePermission( String filename, String dir ) throws IOException, InterruptedException { if ( System.getProperty("line.separator").equals("\n") && System.getProperty("file.separator").equals("/") && System.getProperty("path.separator").equals(":") ) { String[] cmd = new String[ 3 ]; cmd[0] = "/bin/sh"; cmd[1] = "-c"; cmd[2] = "chmod +x " + filename; Process p; if (dir == null) p = Runtime.getRuntime().exec( cmd, null, null ); else p = Runtime.getRuntime().exec( cmd, null, new File(dir) ); return p.waitFor(); } else { return -1; } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/SequenceGenerator.java.new0000644000175000017500000002076111757531137025714 0ustar ryngerynge/* * Globus Toolkit Public License (GTPL) * * Copyright (c) 1999 University of Chicago and The University of * Southern California. All Rights Reserved. * * 1) The "Software", below, refers to the Globus Toolkit (in either * source-code, or binary form and accompanying documentation) and a * "work based on the Software" means a work based on either the * Software, on part of the Software, or on any derivative work of * the Software under copyright law: that is, a work containing all * or a portion of the Software either verbatim or with * modifications. Each licensee is addressed as "you" or "Licensee." * * 2) The University of Southern California and the University of * Chicago as Operator of Argonne National Laboratory are copyright * holders in the Software. The copyright holders and their third * party licensors hereby grant Licensee a royalty-free nonexclusive * license, subject to the limitations stated herein and * U.S. Government license rights. * * 3) A copy or copies of the Software may be given to others, if you * meet the following conditions: * * a) Copies in source code must include the copyright notice and * this license. * * b) Copies in binary form must include the copyright notice and * this license in the documentation and/or other materials * provided with the copy. * * 4) All advertising materials, journal articles and documentation * mentioning features derived from or use of the Software must * display the following acknowledgement: * * "This product includes software developed by and/or derived from * the Globus project (http://www.globus.org/)." * * In the event that the product being advertised includes an intact * Globus distribution (with copyright and license included) then * this clause is waived. * * 5) You are encouraged to package modifications to the Software * separately, as patches to the Software. * * 6) You may make modifications to the Software, however, if you * modify a copy or copies of the Software or any portion of it, * thus forming a work based on the Software, and give a copy or * copies of such work to others, either in source code or binary * form, you must meet the following conditions: * * a) The Software must carry prominent notices stating that you * changed specified portions of the Software. * * b) The Software must display the following acknowledgement: * * "This product includes software developed by and/or derived * from the Globus Project (http://www.globus.org/) to which the * U.S. Government retains certain rights." * * 7) You may incorporate the Software or a modified version of the * Software into a commercial product, if you meet the following * conditions: * * a) The commercial product or accompanying documentation must * display the following acknowledgment: * * "This product includes software developed by and/or derived * from the Globus Project (http://www.globus.org/) to which the * U.S. Government retains a paid-up, nonexclusive, irrevocable * worldwide license to reproduce, prepare derivative works, and * perform publicly and display publicly." * * b) The user of the commercial product must be given the following * notice: * * "[Commercial product] was prepared, in part, as an account of * work sponsored by an agency of the United States Government. * Neither the United States, nor the University of Chicago, nor * University of Southern California, nor any contributors to * the Globus Project or Globus Toolkit nor any of their employees, * makes any warranty express or implied, or assumes any legal * liability or responsibility for the accuracy, completeness, or * usefulness of any information, apparatus, product, or process * disclosed, or represents that its use would not infringe * privately owned rights. * * IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO * OR THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS * TO THE GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY * DAMAGES, INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL * DAMAGES RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR * THE USE OF THE [COMMERCIAL PRODUCT]." * * 8) LICENSEE AGREES THAT THE EXPORT OF GOODS AND/OR TECHNICAL DATA * FROM THE UNITED STATES MAY REQUIRE SOME FORM OF EXPORT CONTROL * LICENSE FROM THE U.S. GOVERNMENT AND THAT FAILURE TO OBTAIN SUCH * EXPORT CONTROL LICENSE MAY RESULT IN CRIMINAL LIABILITY UNDER U.S. * LAWS. * * 9) Portions of the Software resulted from work developed under a * U.S. Government contract and are subject to the following license: * the Government is granted for itself and others acting on its * behalf a paid-up, nonexclusive, irrevocable worldwide license in * this computer software to reproduce, prepare derivative works, and * perform publicly and display publicly. * * 10) The Software was prepared, in part, as an account of work * sponsored by an agency of the United States Government. Neither * the United States, nor the University of Chicago, nor The * University of Southern California, nor any contributors to the * Globus Project or Globus Toolkit, nor any of their employees, * makes any warranty express or implied, or assumes any legal * liability or responsibility for the accuracy, completeness, or * usefulness of any information, apparatus, product, or process * disclosed, or represents that its use would not infringe privately * owned rights. * * 11) IN NO EVENT WILL THE UNITED STATES, THE UNIVERSITY OF CHICAGO OR * THE UNIVERSITY OF SOUTHERN CALIFORNIA OR ANY CONTRIBUTORS TO THE * GLOBUS PROJECT OR GLOBUS TOOLKIT BE LIABLE FOR ANY DAMAGES, * INCLUDING DIRECT, INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES * RESULTING FROM EXERCISE OF THIS LICENSE AGREEMENT OR THE USE OF * THE SOFTWARE. * * END OF LICENSE */ package org.griphyn.vdl.util; import java.text.DecimalFormat; /** * Each instance of this class produces a row of sequence numbers. * By default, the sequence string has six digits. * * @author Jens-S. Vckler * @author Yong Zhao * @version $Revision: 50 $ */ public class SequenceGenerator { /** * The sequence counter employed by this generator. Please note * that multiple instances of this class will produce the same * sequences! */ private long m_count; /** * The formatting of the number. */ private DecimalFormat m_format; /** * C'tor: This will generate an thirteen digit sequence strings. * The sequence generator will be initialized from the current * time in milliseconds. */ public SequenceGenerator() { this.m_count = System.currentTimeMillis(); this.m_format = new DecimalFormat( "#############0000000000000" ); } /** * C'tor: Instances from this contructor will generate ${prefix}xxxxx. * Please note that multiple instances of this class with the same * prefix will produce the same sequences! The sequence generator will * be initialized to zero. * @param ditigs is the number of digits in the generator. * @exception IllegalArgumentException if the number of digits is negative */ public SequenceGenerator( int digits ) { this.m_count = 0; if ( digits < 0 ) throw new IllegalArgumentException( digits + " < 1" ); if ( digits == 0 ) this.m_format = null; else { StringBuffer pattern = new StringBuffer( digits << 1 ); pattern.setLength( digits << 1 ); for ( int i=0; i * * All access to the files must go through the respective open and close * methods provided by this class in order to guarantee proper locking! * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see java.io.File * @see LockHelper */ public class LockFileLock extends FileHelper { /** * file locking helper (could be static for all I care) */ private LockHelper m_lock; /** * maintainer of reads and writes for this instance. Parallel * instances might still try to access in parallel, the reason * lock files are employed. */ private int m_state = 0; private int m_refCount = 0; /** * state collector of the streams that are currently open. */ private HashMap m_streams; /** * Primary ctor: obtain access to a database cycle via basename. * @param basename is the name of the database without digit suffix. */ public LockFileLock( String basename ) { super( basename ); this.m_lock = new LockHelper(); this.m_streams = new HashMap(); } /** * Opens a reader for the basename, adjusting the database cycles. * The access can be shared with other simultaneous readers. * * @return a reader opened to the basename, or null for failure. * @see #closeReader( File ) */ public synchronized File openReader() { // check, if any writer is already open. Parallel readers are allowed. if ( this.m_state > 1 ) return null; int number = -1; if ( this.m_number.exists() ) { // if the number file does not exist, DON'T create it // FIXME: we still create a lock file for this (nonexisting) file // read which database is the current one if ( this.m_lock.lockFile( this.m_number.getPath() ) ) { number = readCount(); // keep locked until database is opened } } else { // if the number file does not exit, DO NOT create it (yet) Logging.instance().log( "lock", 2, "number file " + m_number.getPath() + " does not exist, ignoring lock" ); } // postcondition: number points to the original database to read from // database file File database = new File( number == -1 ? m_database : m_database + '.' + Integer.toString(number) ); // lock and open database File result = null; if ( this.m_lock.lockFile( database.getPath() ) ) { result = database; this.m_state |= 1; // mark reader as active this.m_refCount++; // and count readers this.m_streams.put( result, new Integer(number) ); } // release lock on number file in any case. Once it is opened, // assume that it is protected by the OS. this.m_lock.unlockFile( this.m_number.getPath() ); // exit condition: Only the database file is locked, or in case of // failure it is unlocked. The number file is always unlocked at this // stage. return result; } /** * Opens a writer for the basename, adjusting the database cycles * The access is exclusive, and cannot be shared with readers nor * writers. * * @return a writer opened for the basename, or null for failure. * @see #closeWriter( File ) */ public synchronized File openWriter() { // check, if any reader or a writer is already open if ( this.m_state > 0 ) return null; int number = -1; if ( ! this.m_number.exists() ) { // if the number file does not exist, DO NOT create it (yet) // FIXME: we still create a lock file for this (nonexisting) file number = 0; } else { // read which database is the current one if ( this.m_lock.lockFile( this.m_number.getPath() ) ) { number = readCount(); // keep file locked! } // generate next file number = (number + 1) % 10; } // postcondition: number is the new database to write to // database file File database = new File( this.m_database + '.' + Integer.toString(number) ); // lock and open database File result = null; if ( this.m_lock.lockFile( database.getPath() ) ) { result = database; this.m_state |= 2; // mark writer as active this.m_streams.put( result, new Integer(number) ); } if ( result == null ) { // failure, release lock on number file this.m_lock.unlockFile( this.m_number.getPath() ); } // exit condition: database file and number file are both locked, or // in case of failure: both unlocked. return result; } /** * Closes a previously obtained reader, and releases internal * locking resources. Only if the reader was found in the internal * state, any closing of the stream will be attempted. * * @param r is the reader that was created by {@link #openReader()}. * @return true, if unlocking went smoothly, or false in the presence * of an error. The only error that can happen it to use a File * instance which was not returned by this instance. * @see #openReader() */ public synchronized boolean closeReader( File r ) { boolean result = false; Integer number = (Integer) this.m_streams.get(r); if ( number != null ) { // deactivate reader refcount if ( --this.m_refCount == 0 ) this.m_state &= ~1; // remove lock from database file in any case this.m_streams.remove( r ); this.m_lock.unlockFile( r.getPath() ); // everything is smooth result = true; } return result; } /** * Closes a previously obtained writer, and releases internal * locking resources. Error conditions can be either a missing * instance that passed, or the inability to update the cursor file. * * @param w is the instance that was returned by {@link #openWriter()}. * @return true, if the closing went smoothly, false in the presence * of an error. * @see #openWriter() */ public synchronized boolean closeWriter( File w ) { boolean result = false; Integer number = (Integer) this.m_streams.get(w); if ( number != null ) { // Since the cursor could not be modified due to being locked, // we can update it now with the new version. NOW create it. result = writeCount(number.intValue()); // deactivate writer this.m_state &= ~2; // remove locks from database and cursor file. this.m_streams.remove( w ); this.m_lock.unlockFile( w.getPath() ); this.m_lock.unlockFile( this.m_number.getPath() ); } return result; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/LockHelper.java0000644000175000017500000001074211757531137023533 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.util; import org.griphyn.vdl.util.*; import java.util.*; import java.io.*; /** * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see java.io.File */ public class LockHelper { /** * The suffix to use for the regular lock file. */ public static final String LOCK_SUFFIX = ".lock"; /** * The initial timeout to use when retrying with exponential backoff. */ private long m_timeout; /** * The number of retries to attempt obtaining a lock. */ private int m_retries; /** * default ctor. */ public LockHelper() { this.m_timeout = 250; this.m_retries = 10; } /** * ctor. */ public LockHelper( long timeout, int retries ) { this.m_timeout = timeout; this.m_retries = retries; } /** * Creates a file-based lock file in an NFS secure fashion. Do not use * this function for locking, use {@link #lockFile(String)} instead. * One exception is to use this method for test-and-set locking. * * @param filename is the file to create a lockfile for * @return the representation of the lock file, or null * in case of error. */ public File createLock( String filename ) { // create local names from basename File result = null; File lock = new File( filename + LockHelper.LOCK_SUFFIX ); // exclusively create new file boolean created = false; try { created = lock.createNewFile(); } catch ( IOException ioe ) { Logging.instance().log( "lock", 0, "while creating lock " + lock.getPath() + ": " + ioe.getMessage() ); created = false; // make extra sure } // if the lock was created, continue if ( created ) { // postcondition: file was created Logging.instance().log( "lock", 2, "created lock " + lock.getPath() ); LockFileSet.instance().add( lock ); lock.setLastModified( System.currentTimeMillis() ); // force NFS result = lock; } else { // unable to rename file to lock file: lock exists Logging.instance().log( "lock", 1, "lock " + lock.getPath() + " already exists" ); } // may be null return result; } /** * Locks a file using an empty lockfile. This method repeatedly retries * to lock a file, and gives up after a few seconds. * * @param filename is the basename of the file to lock. * @return true, if the file was locked successfully, false otherwise. */ public boolean lockFile( String filename ) { long timeout = this.m_timeout; // do five retries for ( int i=0; i * * The logging mechanism works similar to syslog. There is an * arbitrary number of user-named queues, and a "default" queue. * Each queue has a level associated with it. The higher the level, * the less important the message. If the message to be logged * exceeds the level, it will not be logged. Level 0 will always * be logged, if a queue exists for it.

* * Usage is simple. Each queue has to be registered before use. The * registrations associated the output stream and maximum debug level.

* * Each log line will be prefixed by a time stamp. The logging class * maintains internal state for each queue, if it requested a line feed * to be printed. Thus, you are able to construct a message in several * pieces, or a multi-line message by smuggling line feeds within the * message.

* * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * */ public class Logging { /** * Keeper of the Singleton. */ private static Logging m_instance = null; /** * maintains the map with associated output streams. */ private Hashtable m_queues = null; /** * maintains the map with the maximum debug level per queue. */ private Hashtable m_levels = null; /** * maintains the line feed state for each queue. */ private Hashtable m_newline = null; /** * This is used to format the time stamp. */ private static Currently m_formatter = null; /** * This is the verbose option. Any queue will be protocolled up * to the verbose level, iff the level is 0 or above. Verbose * messages are dumped on the stream associated with "default", * which defaults to stderr. */ private int m_verbose = -1; /** * implement the Singleton pattern */ public static Logging instance() { if ( m_instance == null ) m_instance = new Logging(); return m_instance; } /** * Ctor. */ private Logging() { this.m_queues = new Hashtable(); this.m_levels = new Hashtable(); this.m_newline = new Hashtable(); // Logging.m_formatter = new Currently( "yyyy-MM-dd HH:mm:ss.SSSZZZZZ: " ); Logging.m_formatter = new Currently( "yyyyMMdd'T'HHmmss.SSS: " ); register( "default", System.err, 0 ); } /** * Accessor: Obtains the default timestamp format for all queues. * * @return the currently active timestamp prefix format. */ public static Currently getDateFormat() { return Logging.m_formatter; } /** * Accessor: Sets the default timestamp format for all queues. * * @param format is the new timestamp prefix format. */ public static void setDateFormat( Currently format ) { if ( format != null ) Logging.m_formatter = format; } /** * Registers a stream with a name to use for logging. The queue * will be set up for maximum logging, e.g. virtually all levels * for this queue are logged. * * @param handle is the queue identifier * @param out is the name of a file to append to. Special names are * stdout and stderr, which map to the * system's respective streams. * @see #register( String, OutputStream, int ) */ public void register( String handle, String out ) { if ( out.equals("stdout") ) { this.register( handle, System.out, Integer.MAX_VALUE ); } else if ( out.equals("stderr") ) { this.register( handle, System.err, Integer.MAX_VALUE ); } else { try { FileOutputStream fout = new FileOutputStream(out,true); this.register( handle, new BufferedOutputStream(fout), Integer.MAX_VALUE ); } catch ( FileNotFoundException e ) { log( "default", 0, "unable to append \"" + handle + "\" to " + out + ": " + e.getMessage() ); } catch ( SecurityException e ) { log( "default", 0, "unable to append \"" + handle + "\" to " + out + ": " + e.getMessage() ); } } } /** * Registers a stream with a name to use for logging. The queue * will be set up for maximum logging, e.g. virtually all levels * for this queue are logged. * * @param handle is the queue identifier * @param out is the new output stream * @see #register( String, OutputStream, int ) */ public void register( String handle, OutputStream out ) { this.register( handle, out, Integer.MAX_VALUE ); } /** * Registers a stream with a name to use for logging. The queue * will be set up to use the output stream. If there was another * stream previously registered, it will be closed! * * @param handle is the queue identifier * @param out is the output stream associated with the queue * @param level is the maximum debug level to put into the queue */ public void register( String handle, OutputStream out, int level ) { PrintWriter previous = (PrintWriter) m_queues.put( handle, new PrintWriter(out,true) ); // don't close System.out nor System.err. So, rely on Java to close // if ( previous != null ) previous.close(); m_levels.put( handle, new Integer(level) ); m_newline.put( handle, new Boolean(true) ); } /** * Determines the maximum level up to which messages on the given * queue are protocolled. The associated stream is unaffected. * * @param handle is the queue identifier * @return the maximum inclusive log level, or -1 for error * @see #setLevel( String, int ) */ public int getLevel( String handle ) { if ( isUnset(handle) ) return -1; Integer i = (Integer) m_levels.get(handle); return (i != null ? i.intValue() : -1); } /** * Set the maximum level up to which messages on the given queue are * protocolled. The associated stream is unaffected. * * @param handle is the queue identifier * @param level is the new maximum log level (non-negative integer) * @see #setLevel( String, int ) */ public void setLevel( String handle, int level ) { if ( isUnset(handle) ) return; if ( level < 0 ) return; m_levels.put( handle, new Integer(level) ); } /** * Obtains the current verbosity level. * @return -1 for no verbosity, or the level up to which messages are logged. * @see #setVerbose( int ) */ public int getVerbose() { return this.m_verbose; } /** * Sets the maximum verbosity. * @see #resetVerbose() */ public void setVerbose() { this.m_verbose = Integer.MAX_VALUE; } /** * Deactivates any verbosity. * @see #setVerbose() */ public void resetVerbose() { this.m_verbose = -1; } /** * Sets or resets the verbosity level. * @param max is the maximum inclusive level to which messages on any * queue should be logged. A value of -1 (or any negative value) will * deactivate verbosity mode. * @see #getVerbose() */ public void setVerbose( int max ) { this.m_verbose = max; } /** * Prints a message on a previously registered stream. * * @param handle is the symbolic queue handle. * @param level is a verbosity level. The higher the level, the * more debug like the message. Messages of level 0 will always * be printed. * @param msg is the message to put onto the stream. Please note * that this function will automatically add the line break. */ public void log( String handle, int level, String msg ) { this.log( handle, level, msg, true ); } /** * Checks if a queue is free to be set up. This is important for * initialization to setup default queues, but allow user overrides. * * @param handle names the queue to check for a stream. * @return true, if the queue is not yet connected. */ public boolean isUnset( String handle ) { // sanity check if ( handle == null ) return false; return ( this.m_queues.get(handle) == null ); } /** * Prints a message on a previously registered stream. * * @param handle is the symbolic queue handle. * @param level is a verbosity level. The higher the level, the * more debug like the message. Messages of level 0 will always * be printed. * @param msg is the message to put onto the stream. * @param newline is a boolean, which will call invoke the println * method. */ public void log( String handle, int level, String msg, boolean newline ) { Integer maximum = (Integer) this.m_levels.get(handle); // do something, if verbosity if active boolean verbose = this.m_verbose >= 0 && level <= this.m_verbose; // do nothing, if we don't know about this level // do nothing, if the maximum level is below chosen debug level if ( verbose || ( maximum != null && ( level == 0 || level <= maximum.intValue() ) ) ) { // determine stream to dump message upon PrintWriter pw = (PrintWriter) this.m_queues.get( verbose ? "default" : handle); // if stream is known and without fault, dump message if ( pw != null && ! pw.checkError() ) { String prefix = new String(); // determine state of last message Boolean nl = (Boolean) this.m_newline.get(handle); // if last message had a newline attached, prefix with new timestamp if ( nl == null || nl.booleanValue() ) prefix += Logging.m_formatter.now() + '[' + handle + "] "; // print message if ( newline ) pw.println( prefix + msg ); else pw.print( prefix + msg ); // save new newline state for the stream. this.m_newline.put( handle, new Boolean(newline) ); } } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/SequenceGenerator.java0000644000175000017500000000526311757531137025124 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.util; import java.text.DecimalFormat; /** * Each instance of this class produces a row of sequence numbers. * By default, the sequence string has six digits. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ */ public class SequenceGenerator { /** * The sequence counter employed by this generator. Please note * that multiple instances of this class will produce the same * sequences! */ private long m_count = 0; /** * The formatting of the number. */ private DecimalFormat m_format; /** * C'tor: This will generate an six digit sequence strings. */ public SequenceGenerator() { this.m_format = new DecimalFormat( "######000000" ); } /** * C'tor: Instances from this contructor will generate ${prefix}xxxxx. * Please note that multiple instances of this class with the same * prefix will produce the same sequences! * * @param digits are the number of digits to use. This value must be at * least one. * @exception IllegalArgumentException if the number of digits is negative */ public SequenceGenerator( int digits ) { if ( digits < 0 ) throw new IllegalArgumentException( digits + " < 1" ); if ( digits == 0 ) this.m_format = null; else { StringBuffer pattern = new StringBuffer( digits << 1 ); pattern.setLength( digits << 1 ); for ( int i=0; i" ); w.write( newline ); // start nodes for ( Iterator i=dax.iterateJob(); i.hasNext(); ) { Job job = (Job) i.next(); w.write( " " ); w.write( newline ); } // start edges for ( Iterator c=dax.iterateChild(); c.hasNext();) { Child child = (Child) c.next(); String name = child.getChild(); for ( Iterator p=child.iterateParent(); p.hasNext(); ) { w.write( " " ); w.write( newline ); } } // done w.write(""); w.write( newline ); w.flush(); } /** * Converts a DAGMan .dag file into a Peudo-DAX. * * @param dag is a File pointing to the DAG file * @return a Pseudo DAX, or null in case of error. * @throws IOException if reading the DAGMan file fails. */ public static ADAG DAGMan2DAX( File dag ) { // sanity check if ( dag == null ) return null; ADAG result = new ADAG(); return result; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/FcntlFileLock.java0000644000175000017500000001602611757531137024163 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.util; import org.griphyn.vdl.util.*; import java.util.*; import java.io.*; /** * This class implements file locking using system-provided lock * functions. On unix, these may most likely include POSIX fcntl locking * calls. While such lock may on some systems be NFS-safe, networked * file locking on Linux is notoriously broken, and cannot be assumed to * work as expected.

* * All access to the files must go through the respective open and close * methods provided by this class in order to guarantee proper locking! * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see FcntlLock * @see java.io.File */ public class FcntlFileLock extends FileHelper { /** * File channel lock of database. */ private FcntlLock m_lock_db; /** * File channel lock of nr file. */ private FcntlLock m_lock_nr; /** * Number memory for updates on close. */ private int m_current; /** * Remembers, if this instance is already in use. */ private boolean m_active; /** * Primary ctor: obtain access to a database cycle via database. * @param database is the base name of the database without the * digit suffix. */ public FcntlFileLock( String database ) { super(database); this.m_lock_db = this.m_lock_nr = null; this.m_current = -1; this.m_active = false; } /** * Opens a reader for the database, adjusting the database cycles. * The access can be shared with other simultaneous readers. The * locks on number file and database are held shared. They are * released in the close operation. * * @return a reader opened to the database, or null for failure. * @see #closeReader( File ) */ public synchronized File openReader() { File result = null; // sanity check -- don't reopen without close if ( m_active ) return result; // lock number file shared // read contents of number file, if available int number = -1; if ( m_number.exists() ) { try { m_lock_nr = new FcntlLock( m_number, false, true ); } catch ( IOException e ) { Logging.instance().log( "lock", 0, "while creating lock on " + m_number.getPath() + ": " + e.getMessage() ); return result; } // read number from number file number = readCount(); } else { // if the number file does not exit, DO NOT create it (yet) Logging.instance().log( "lock", 2, "number file " + m_number.getPath() + " does not exist, ignoring lock" ); } // remember number m_current = number; // database file File database = new File( number == -1 ? m_database : m_database + '.' + Integer.toString(number) ); // lock and open database if ( database.exists() ) { try { m_lock_db = new FcntlLock( database, false, true ); } catch ( IOException e ) { Logging.instance().log( "lock", 0, "while creating lock on " + database.getPath() + ": " + e.getMessage() ); return result; } } else { Logging.instance().log( "lock", 1, "database file " + database.getPath() + " does not exist, ignoring" ); } result = database; // exit condition: Both, number file and database are locked using // a shared (read) lock. m_active = ( result != null ); return result; } /** * Opens a writer for the database, adjusting the database cycles * The access is exclusive, and cannot be shared with readers nor * writers. Both locks are exclusive, and held through the close * operation. * * @return a writer opened for the database, or null for failure. * @see #closeWriter( File ) */ public synchronized File openWriter() { File result = null; // sanity check -- don't reopen without close if ( m_active ) return result; // lock number file exclusively // read contents of number file, if available int number = -1; if ( m_number.exists() ) { try { m_lock_nr = new FcntlLock( m_number, true, false ); } catch ( IOException e ) { Logging.instance().log( "lock", 0, "while creating lock on " + m_number.getPath() + ": " + e.getMessage() ); return result; } // read number from number file number = readCount(); } else { // if the number file does not exit, DO NOT create it (yet) Logging.instance().log( "lock", 2, "number lock " + m_number.getPath() + " does not exist, ignoring" ); } // generate new file number to write to m_current = number = (number + 1) % 10; // generate database filename File database = new File( m_database + '.' + Integer.toString(number) ); // lock and open database for exclusive access try { m_lock_db = new FcntlLock( database, true, false ); } catch ( IOException e ) { Logging.instance().log( "lock", 0, "while creating lock on " + database.getPath() + ": " + e.getMessage() ); return result; } // valid result result = database; m_active = true; // exit condition: Both, database and number file are locked, using // an exclusive lock. return result; } /** * Closes a previously obtained reader, and releases internal locking * resources. * * @param r is the reader that was created by its open operation. * @return true, if unlocking went smoothly, or false in the presence * of an error. * @see #openReader() */ public synchronized boolean closeReader( File r ) { boolean result = false; // sanity check if ( ! m_active ) return false; result = true; // done with database if ( m_lock_db != null ) m_lock_db.done(); // done with nr file; however, it may not have existed if ( m_lock_nr != null ) m_lock_nr.done(); m_active = false; return result; } /** * Closes a previously obtained writer, and releases internal * locking resources. * * @param w is the instance that was returned by its open operation. * @return true, if the closing went smoothly, false in the presence * of an error. * @see #openWriter() */ public synchronized boolean closeWriter( File w ) { boolean result = false; // sanity check if ( ! m_active ) return result; // done with database m_lock_db.done(); // update number file result = writeCount(m_current); // release shared lock on number file if ( m_lock_nr != null ) m_lock_nr.done(); m_active = false; return result; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/util/VDLType.java0000644000175000017500000000516011757531137022770 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.util; import org.griphyn.vdl.classes.*; /** * This class returns pre-defined type values given the corresponding * strings defined in the XML schema. It is used by * VDLContentHandler to convert string-valued type in XML * document to number-valued type in java objects. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see org.griphyn.vdl.parser.VDLContentHandler * */ public class VDLType{ /** * Get the container type, it is either SCALAR or LIST * * @param container is the containerType string * @return the constant corresponded to the container type * * @see org.griphyn.vdl.classes.Value */ static public int getContainerType(String container){ if (container.equals("list")) return Value.LIST; else return Value.SCALAR; } /** * Get the linkage type of a LFN, which can be INPUT, OUTPUT, * INOUT and NONE. * * @param link is the linkType string * @return the constant corresponded to the string * * @see org.griphyn.vdl.classes.LFN */ static public int getLinkType(String link){ if (link.equals("input")) return LFN.INPUT; if (link.equals("output")) return LFN.OUTPUT; if (link.equals("inout")) return LFN.INOUT; return LFN.NONE; } /** * Get the constant for Condor universe * * @param universe is the string for condor universe * @return the constant corresponded to the string * * @see org.griphyn.vdl.classes.Executable */ static public int getUniverse(String universe){ if (universe.equals("standard")) return Executable.CONDOR_STANDARD; if (universe.equals("scheduler")) return Executable.CONDOR_SCHEDULER; if (universe.equals("globus")) return Executable.CONDOR_GLOBUS; if (universe.equals("pvm")) return Executable.CONDOR_PVM; if (universe.equals("mpi")) return Executable.CONDOR_MPI; return Executable.CONDOR_VANILLA; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/planner/0000755000175000017500000000000011757531667021326 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/planner/Graph.java0000644000175000017500000002007111757531137023222 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.planner; import java.io.*; import java.util.*; /** * This class is used to represent the graph form of a workflow. The * graph is represented using adjaceny lists. Each node is the name * of a job in the workflow. The arcs represent dependencies of the * jobs on one another. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision $ */ public class Graph implements Cloneable { /** * The adjacency list representing a graph. */ private Map m_adj; /** * Initializes internal objects to hold the graph. */ private void initialize() { m_adj = new HashMap(); } /** * Default constructor, call the initialize function */ public Graph() { initialize(); } /** * Clones the graph using a deep copy. */ public Object clone() { Graph result = new Graph(); for ( Iterator i = getVertices(); i.hasNext(); ) { String node = (String) i.next(); result.m_adj.put( new String(node), new ArrayList(neighbors(node)) ); } return result; } /** * Reads a stream representing the graph. The format of the stream is: * *

   *   # of vertices 
   *   vertex adjcency_list_of_the_vertex
   * 
* * @param reader is an input file open for reading. */ public Graph( Reader reader ) { String line = null; StringTokenizer token = null; try { LineNumberReader lnr = new LineNumberReader(reader); line = lnr.readLine(); token = new StringTokenizer(line); if ( token.countTokens() != 1 ) throw new Error( "Bad format: number of vertices is first line" ); // number of nodes int n = Integer.parseInt( token.nextToken() ); initialize(); // read rest of graph for ( int u = 0; u < n; ++u ) { line = lnr.readLine(); token = new StringTokenizer(line); if ( token.countTokens() < 1 ) throw new Error( "line " + lnr.getLineNumber() + ": Please specify the vertex and neighbor list." ); // add node to graph String node = token.nextToken(); addVertex(node); // add arcs to graph while ( token.hasMoreTokens() ) { String w = token.nextToken(); ((List) m_adj.get(node)).add(new String(w)); } } } catch (IOException x) { throw new Error("Bad input stream"); } } /** * Provides an iterator over the vertices. * * @return an initialized iterator to walk all vertices. */ public Iterator getVertices() { return m_adj.keySet().iterator(); } /** * Adds a vertex to the adjacency list. The vortex's adjacency * list is initialized to an empty list. If the vortex already * exists, nothing will be done. * * @param node is the name of the vortex to add. * @see #removeVertex( String ) */ public void addVertex( String node ) { if ( ! m_adj.containsKey(node) ) { m_adj.put( new String(node), new ArrayList() ); } } /** * Removes a vortex from the graph. This is an expensive function, * because the vortex must also be removed from all adjacency lists. * * @param node is the name of the vortex to remove. * @see #addVertex( String ) */ public void removeVertex( String node ) { // remove entry for vortex from adjacency list m_adj.remove(node); // remove vortex from all other adjaceny lists for ( Iterator i = getVertices(); i.hasNext(); ) { List v = (List) m_adj.get( (String) i.next() ); v.remove(node); } } /** * Adds a directed edge between two nodes. * * @param v is the source node * @param w is the destination node * @see #removeArc( String, String ) */ public void addArc( String v, String w ) { // skip, if the arc already exists if ( isArc(v,w) ) return; // add arc ((List) m_adj.get(v)).add( new String(w) ); } /** * Removes a directed edge between two nodes. * * @param v is the source node * @param w is the destination node * @see #addArc( String, String ) */ public void removeArc(String v, String w) { ((List) m_adj.get(v)).remove(w); } /** * Predicate to check the existence of a directed edge * between from v to w. * * @param v is the source node * @param w is the destination node */ public boolean isArc( String v, String w ) { return ((List)m_adj.get(v)).contains(w); } /** * Counts the number of incoming arcs (edges) for a given node. * * @param v is the vortex name to count incoming edge for. * @return the number of incoming edges. * @see #outDegree( String ) */ public int inDegree( String v ) { int result = 0; // for all nodes, see if they have an edge to v for ( Iterator i = getVertices(); i.hasNext(); ) { String w = (String) i.next(); if ( isArc(w,v) ) result++; } return result; } /** * Counts the number of outgoing arcs (edges) for a given node. * * @param v is the vortex name to count outgoing edge for. * @return the number of outgoing edges. * @see #inDegree( String ) */ public int outDegree( String v ) { return ((List) m_adj.get(v)).size(); } /** * Determines the neighbors of a given node. This is effectively * a copy of the node's adjacency list. * * @param v is the node to determine the neighbors for * @return a copy of the node's adjacency list. */ public List neighbors(String v) { return new ArrayList( (List) m_adj.get(v) ); } /** * Counts the number of nodes (vertices) in a graph. * @return the number of vertices. */ public int order() { return m_adj.size(); } /** * Counts the number of directed edges (arcs) in the graph. * Undirected edges are counted as two directed edges. * * @return number of directed edges. */ public int size() { int result = 0; for ( Iterator i = m_adj.values().iterator(); i.hasNext(); ) result += ((List) i.next()).size(); return result; } /** * Constructs the reverse graph by inverting the direction of * every arc. * @return a new graph which is the reverse of the current one. */ public Graph reverseGraph() { String v = null; Graph result = new Graph(); // copy all nodes for ( Iterator i = getVertices(); i.hasNext(); ) { v = (String) i.next(); result.addVertex(v); } // copy all edges for ( Iterator i = getVertices(); i.hasNext(); ) { v = (String) i.next(); for ( Iterator j = ((List) m_adj.get(v)).iterator(); j.hasNext(); ) { result.addArc( (String) j.next(), v ); } } return result; } /** * Generates a simple string representation of the graph. The format * of the representation is the same as it is read from a stream. * * @return a complete graph as a single string * @see #Graph( Reader ) */ public String toString() { String newline = System.getProperty("line.separator", "\r\n"); StringBuffer result = new StringBuffer(256); // write order of graph (number of nodes) result.append( order() ).append(newline); // write nodes for ( Iterator i = getVertices(); i.hasNext(); ) { String v = (String) i.next(); result.append(v); // write adjacency list for node v for ( Iterator j = ((List) m_adj.get(v)).iterator(); j.hasNext(); ) { result.append(' ').append( (String) j.next() ); } result.append(newline); } // done return result.toString(); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/planner/Topology.java0000644000175000017500000001176511757531137024007 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.planner; import java.util.Map; import java.util.HashMap; import java.util.List; import java.util.ArrayList; import java.util.Iterator; import java.io.*; /** * Class which implements a topological sort of a graph. * * @author Jens-S. Vöckler * @author Yong Zhao * * @version $Revision: 50 $ * @see Graph */ public class Topology { /** * The graph representation. */ private Graph m_graph; /** * Hashtable to map vertex name to array index. */ private Map m_vertexMap; /** * Array to keep the name of each vertex. */ private String[] m_vertices; /** * Array to keep the in-degree of each vertex. */ private int[] m_inDeg; /** * Queue to keep 0 in-degree vertices. */ private List m_inQueue; /** * Constructor, given a graph, construct internal objects * needed for topological sort. * * @param graph is a graph representation. */ public Topology( Graph graph ) { m_graph = (Graph) graph.clone(); init(); } /** * Initializes in-degree array and the corresponding 0 in-degree * queue. */ public void init() { // allocate memory for collections int n = m_graph.order(); m_vertexMap = new HashMap(); m_vertices = new String[n]; m_inDeg = new int[n]; m_inQueue = new ArrayList(); // map nodes int i = 0; for ( Iterator e=m_graph.getVertices(); e.hasNext(); ) { String v = (String) e.next(); m_vertices[i] = new String(v); m_vertexMap.put( new String(v), new Integer(i) ); m_inDeg[i] = m_graph.inDegree(v); ++i; } // put 0 in-degree vertices in the queue for ( i=0; i 0 ) { i = ((Integer)m_inQueue.remove(0)).intValue(); s[cnt++] = new String(m_vertices[i]); List neighbors = m_graph.neighbors( m_vertices[i] ); for ( int j=0; j * * The scripts are assembled mostly from template files and substitutions. * The template files reside in $PEGASUS_HOME/share:

* * * * * * * * * * *
templatepurpose
sp-job-1.tmplstart of job script
sp-job-2.tmplunused
sp-job-3.tmplfinal portion of job script
sp-master-1.tmplstart of master script
sp-master-2.tmplintermediary of master script
sp-master-3.tmplfinal portion of master script
sp-master-job.tmplInvocation of job from master
* * The following substitutions are available by default. Some substitutions * are only available during job generation:

* * * * * * * * * * * * * Start time stamp of processing (compile time) * * * *
variablepurpose
DAXLABELuser-given label of the workflow
DVJob: fully-qualified DV of job
FILELISTJob: Name of file of output mappings
HOMEJRE system property user.home
JOBIDJob: the IDxxxxx of the current job
JOBLOGJob: the log file of the job
JOBSCRIPTJob: name of script file for job
KICKSTARTif set, path to local kickstart
LOGFILEName of master log file
NOW
REGISTER0 or 1 for replica registration
TRJob: fully-qualified TR of job
USERJRE system property user.name
* * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * */ public class Scriptor { /** * the directory to put the scripts */ private String m_dirName; /** * the dag structure */ private ADAG m_adag; /** * name of the dag */ private String m_dagName; /** * replica catalog */ private RCWrapper m_rc; /** * site catalog (optional) */ private SCWrapper m_sc; /** * transformation catalog */ private TCWrapper m_tc; /** * the hash that holds all the lfn->pfn mapping */ private HashMap m_filenameMap; /** * the name of the master log file. */ private String m_logFile; /** * whether to register output files */ private boolean m_register; /** * path to kickstart */ private String m_kickstart; /** * buffered writer for control script file */ private BufferedWriter m_master; /** * Stores the reference to the logger. */ private Logging m_log; /** * holds the location where templates reside. */ private File m_dataDir; /** * holds the mapping for permissable substitutions. */ private Map m_substitute = null; /** * a private copy of this environment's notion of a line separator. */ private final static String newline = System.getProperty( "line.separator", "\r\n" ); /** * Constructor * * @param dirName names the directory into which to produce the scripts. * @param adag is the DAX as a parsed data structure in memory. * @param rc is the replica catalog wrapper. * @param sc is the site catalog wrapper, may be null. * @param tc is the transformation catalog wrapper. * @param fnMap is a map containing all filesnames in the DAG. * @param dataDir is the location of $PEGASUS_HOME/share from properties. */ public Scriptor( String dirName, ADAG adag, RCWrapper rc, SCWrapper sc, TCWrapper tc, HashMap fnMap, File dataDir ) { m_dirName = dirName; m_adag = adag; m_dataDir = dataDir; // set dag name m_dagName = adag.getName(); if ( m_dagName == null ) m_dagName = m_dirName; m_rc = rc; m_sc = sc; m_tc = tc; m_filenameMap = fnMap; m_logFile = m_dagName + ".log"; m_register = true; m_kickstart = null; if ( m_sc != null ) { String kl = m_sc.getGridLaunch(); if ( kl != null ) { File k = new File(kl); if ( k.exists() ) m_kickstart = kl; } } m_log = Logging.instance(); // prepare substitutions m_substitute = new TreeMap(); m_substitute.put( "NOW", Currently.iso8601(false,true,false,new Date()) ); m_substitute.put( "DAXLABEL", m_dagName ); m_substitute.put( "USER", System.getProperty("user.name") ); m_substitute.put( "HOME", System.getProperty("user.home") ); m_substitute.put( "LOGFILE", m_logFile ); if ( m_kickstart != null ) m_substitute.put( "KICKSTART", m_kickstart ); m_substitute.put( "REGISTER", m_register ? "1" : "0" ); } /** * Sets the flag indicating whether to register output files. * @param b is a flag to set the registration state. * @see #getRegister() */ public void setRegister( boolean b ) { this.m_register = b; addSubstitution( "REGISTER", b ? "1" : "0" ); } /** * Gets the flag indicating whether to register output files. * @return true, if output files are going to be registered. * @see #setRegister(boolean) */ public boolean getRegister() { return this.m_register; } /** * Sets kickstart path, if the path is null, kickstart will not be used. * @param kickstart the path to invoke kickstart * @see #getKickstart() */ public void setKickstart(String kickstart) { m_kickstart = kickstart; if ( kickstart != null ) addSubstitution( "KICKSTART", kickstart ); else removeSubstitution( "KICKSTART" ); } /** * Gets the current kickstart path. The location may be null. * @return the path to kickstart, or null * @see #setKickstart( String ) */ public String getKickstart() { return this.m_kickstart; } /** * Inserts a substitution into the substitutable variables. * * @param key is the template variable name * @param value is the replacement * @return the previous setting, or null. * @see #getSubstitution( String ) */ public String addSubstitution( String key, String value ) { return (String) m_substitute.put(key,value); } /** * Obtains the setting of a substitutable variable. * @param key is the template variable name to query for. * @return the current setting, or null, if the * variable does not exist. * @see #addSubstitution( String, String ) */ public String getSubstitution( String key ) { String result = null; if ( m_substitute.containsKey(key) ) { result = (String) m_substitute.get(key); if ( result == null ) result = new String(); } return result; } /** * Removes a substition. * @param key is the template variable name to query for. * @return the current setting, or null, if the * variable does not exist. * @see #addSubstitution( String, String ) */ public String removeSubstitution( String key ) { return (String) m_substitute.remove(key); } /** * Writes the control script head, including functions for file * registration. * * @return the name of the control (master) script. * @throws IOException if writing to the master script somehow failes. */ public String initializeControlScript() throws IOException { // control script output filename String controlScript = m_dagName + ".sh"; File controlFile = new File( m_dirName, controlScript ); String fullPath = controlFile.getAbsolutePath(); // existence checks before overwriting if ( controlFile.exists() ) { m_log.log( "planner", 0, "Warning: Master file " + fullPath + " already exists, overwriting"); controlFile.delete(); } // open master for writing m_master = new BufferedWriter( new FileWriter(controlFile) ); // copy template while substituting m_log.log( "planner", 1, "writing control script header" ); copyFromTemplate( m_master, "sp-master-1.tmpl" ); // done return controlScript; } /** * Adds scripts between stages. * @exception IOException if adding to the master script fails for * some reason. */ public void intermediateControlScript() throws IOException { m_log.log( "planner", 1, "writing control script between stages" ); copyFromTemplate( m_master, "sp-master-2.tmpl" ); } /** * Write the control script tail to the control file. * @exception IOException if adding to the master script fails for * some reason. */ public void finalizeControlScript() throws IOException { m_log.log( "planner", 1, "writing control script tail" ); copyFromTemplate( m_master, "sp-master-3.tmpl" ); // close master m_master.flush(); m_master.close(); m_master = null; } /** * Converts a variable into the substituted value. Most of this is just * a hash lookup, but some are more dynamic. * * @param key is the variable to replace * @return the replacement string, which may be empty, never null. */ private String convertVariable( String key ) { if ( key.equals("NOW") ) { return Currently.iso8601(false,true,false,new Date()); } else { return getSubstitution(key); } } /** * Copies a template file into the open writer. During copy, * certain substitutions may take place. The substitutable variables * are dynamically adjusted from the main class. * * @param w is the writer open for writing. * @param tfn is the template base file name. * @throws IOException in case some io operation goes wrong. */ public void copyFromTemplate( Writer w, String tfn ) throws IOException { // determine location File source = new File( m_dataDir, tfn ); if ( source.exists() ) { // template exists, use it LineNumberReader lnr = new LineNumberReader( new FileReader(source) ); String line, key, value; while ( (line = lnr.readLine()) != null ) { StringBuffer sb = new StringBuffer(line); // substitute all substitutables int circuitBreaker = 0; for ( int p1 = sb.indexOf("@@"); p1 != -1; p1 = sb.indexOf("@@") ) { int p2 = sb.indexOf( "@@", p1+2 ) + 2; if ( p2 == -1 ) throw new IOException( "unclosed @@var@@ element" ); key = sb.substring( p1+2, p2-2 ); if ( (value = convertVariable(key)) == null ) { // does not exist m_log.log( "planner", 0, "Warning: " + source +":" + lnr.getLineNumber() + ": Requesting unknown substitution for " + key ); value = new String(); } else { // protocol substitution m_log.log( "planner", 3, "Substituting " + key + " => " + value ); } sb.replace( p1, p2, value ); if ( ++circuitBreaker > 32 ) { m_log.log( "planner", 0, "Warning: " + lnr.getLineNumber() + ": circuit breaker triggered" ); break; } } w.write(sb.toString()); w.write(newline); } // free file handle resource lnr.close(); } else { // template does not exist throw new IOException( "template " + tfn + " not found" ); } } /** * Processes each job in the adag. Also checks for input file * existence, if necessary. * * @param jobID is the DAX-unique job id to generate a scripts for. * @param checkInputFiles if set, checks in the filesystem for the * existence of all input files into the job. * @return the name of the job control script. * @throws IOException for failure to write any job related files. */ public String processJob( String jobID, boolean checkInputFiles ) throws IOException { Logging.instance().log( "planner", 0, "processing job: " + jobID ); // get the job reference from ADAG Job job = m_adag.getJob(jobID); // script file for this job String scriptBase = job.getName() + "_" + jobID; String scriptFile = scriptBase + ".sh"; // file to hold the output file list String outputList = scriptBase + ".lst"; File of = new File( m_dirName, outputList ); String outputFullPath = of.getAbsolutePath(); if ( of.exists() ) { m_log.log( "planner", 0, "Warning: output list file " + outputList + " already exists, overwriting"); of.delete(); } // add to substitutions - temporarily addSubstitution( "JOBSCRIPT", scriptFile ); addSubstitution( "FILELIST", outputList ); addSubstitution( "JOBID", jobID ); addSubstitution( "TR", Separator.combine( job.getNamespace(), job.getName(), job.getVersion()) ); addSubstitution( "DV", Separator.combine( job.getDVNamespace(), job.getDVName(), job.getDVVersion()) ); // create file with all mappings for just this job BufferedWriter obw = new BufferedWriter( new FileWriter(of) ); Map lfnMap = new HashMap(); // store mappings for job for ( Iterator i = job.iterateUses(); i.hasNext(); ) { Filename fn = (Filename) i.next(); String lfn = fn.getFilename(); // look up LFN in hash String pfn = (String) m_filenameMap.get(lfn); if ( pfn == null ) { // can't find the lfn in the filename list m_log.log( "planner", 0, "ERROR: LFN " + lfn + "is not in the " + " list, please check the DAX!" ); return null; } else { lfnMap.put(lfn, pfn); } // check if input files exist if ( checkInputFiles ) { if ( fn.getLink() == LFN.INPUT ) { if ( ! (new File(pfn)).canRead() ) { m_log.log( "planner", 0, "Warning: Unable to read LFN " + lfn ); } } } // write the output file list entry: LFN PFN [abs] if ( fn.getLink() == LFN.OUTPUT ) { obw.write( lfn + " " + pfn + newline ); } } // finish writing file of output files obw.flush(); obw.close(); // generate the script for this job boolean result = generateJobScript( job, scriptFile, outputList, lfnMap ); if ( result ) { // OK: now add script invocation to master m_log.log( "planner", 1, "adding job " + jobID + " to master script" ); copyFromTemplate( m_master, "sp-master-job.tmpl" ); } else { m_log.log( "planner", 0, "Warning: ignoring script " + scriptFile ); } // always clean up removeSubstitution( "JOBSCRIPT" ); removeSubstitution( "FILELIST" ); removeSubstitution( "JOBID" ); removeSubstitution( "TR" ); removeSubstitution( "DV" ); return ( result ? scriptFile : null ); } /** * Extracts all profiles contained within the job description. * * @param job is the job description from the DAX * @param lfnMap is the mapping to PFNs. * @return a map of maps. The outer map is indexed by the lower-cased * namespace identifier. The inner map is indexed by the key within * the particular namespace. An empty map is possible. */ private Map extractProfilesFromJob( Job job, Map lfnMap ) { Map result = new HashMap(); Map submap = null; for ( Iterator i = job.iterateProfile(); i.hasNext(); ) { org.griphyn.vdl.dax.Profile p = (org.griphyn.vdl.dax.Profile) i.next(); String ns = p.getNamespace().trim().toLowerCase(); String key = p.getKey().trim(); // recreate the vlaue StringBuffer sb = new StringBuffer(8); for ( Iterator j = p.iterateLeaf(); j.hasNext(); ) { Leaf l = (Leaf)j.next(); if ( l instanceof PseudoText ) { sb.append( ((PseudoText)l).getContent() ); } else { String lfn = ((Filename)l).getFilename(); sb.append( (String) lfnMap.get(lfn) ); } } String value = sb.toString().trim(); // insert at the right place into the result map if ( result.containsKey(ns) ) { submap = (Map) result.get(ns); } else { result.put( ns, (submap = new HashMap()) ); } submap.put( key, value ); } return result; } /** * Combines profiles from two map of maps, with regards to priority. * * @param high is the higher priority profile * @param low is the lower priority profile * @return a new map with the combination of the two profiles */ private Map combineProfiles( Map high, Map low ) { Set allKeys = new TreeSet( low.keySet() ); allKeys.addAll( high.keySet() ); Map result = new HashMap(); for ( Iterator i=allKeys.iterator(); i.hasNext(); ) { String key = (String) i.next(); boolean h = high.containsKey(key); boolean l = low.containsKey(key); if ( h && l ) { Map temp = new HashMap( (Map) low.get(key) ); temp.putAll( (Map) high.get(key) ); result.put( key, temp ); } else { if ( h ) result.put( key, high.get(key) ); else result.put( key, low.get(key) ); } } return result; } /** * Extracts the environment settings from the combined profiles. * * @param profiles is the combined profile map of maps * @return a string with combined profiles, or null, * if not applicable. */ private String extractEnvironment( Map profiles ) { String result = null; if ( profiles.containsKey("env") ) { StringBuffer sb = new StringBuffer(); Map env = (Map) profiles.get("env"); for ( Iterator i=env.keySet().iterator(); i.hasNext(); ) { String key = (String) i.next(); String value = (String) env.get(key); sb.append(key).append("='").append(value); sb.append("'; export ").append(key).append(newline); } result = sb.toString(); } return result; } /** * Generates the script for each job. * * @param job is an ADAG job for which to generate the script. * @param scriptFile is the basename of the script for the job. * @param outputList is the name of a file containing output files. * @param lfnMap is a map of LFN to PFN. * @return true if all is well, false to signal an error */ private boolean generateJobScript( Job job, String scriptFile, String outputList, Map lfnMap ) throws IOException { String jobID = job.getID(); File f = new File( m_dirName, scriptFile ); String scriptFullPath = f.getAbsolutePath(); if ( f.exists() ) { m_log.log( "planner", 1, "Warning: Script file " + scriptFile + " already exists, overwriting" ); f.delete(); } // kickstart output file // String kickLog = scriptFullPath.substring(0,scriptFullPath.length()-3) + ".out"; String kickLog = scriptFile.substring( 0, scriptFile.length()-3 ) + ".out"; BufferedWriter bw = new BufferedWriter( new FileWriter(f) ); copyFromTemplate( bw, "sp-job-1.tmpl" ); // full definition name of this job's transformation String fqdn = Separator.combine( job.getNamespace(), job.getName(), job.getVersion()); // extract TR profiles Map tr_profiles = extractProfilesFromJob(job,lfnMap); // lookup job in TC List tc = m_tc.lookup( job.getNamespace(), job.getName(), job.getVersion(), "local" ); if ( tc == null || tc.size() == 0 ) { m_log.log( "planner", 0, "ERROR: Transformation " + fqdn + " on site \"local\" not found in TC" ); return false; } else if ( tc.size() > 1 ) { m_log.log( "planner", 0, "Warning: Found " + tc.size() + " matches for " + fqdn + " in TC, using first" ); } TransformationCatalogEntry tce = (TransformationCatalogEntry) tc.get(0); // extract SC profiles Map sc_profiles = ( m_sc == null ? new HashMap() : m_sc.getProfiles() ); // extract TC profiles Map tc_profiles = m_tc.getProfiles(tce); // combine profiles by priority Map temp = combineProfiles( tc_profiles, sc_profiles ); Map profiles = combineProfiles( temp, tr_profiles ); // pfnHint has been deprecated ! if ( profiles.containsKey("hints") ) { m_log.log( "planner", 0, "Warning: The hints profile namespace " + "has been deprecated, ignoring keys " + ((Map) profiles.get("hints")).keySet().toString() ); } // assemble environment variables from profile String executable = tce.getPhysicalTransformation(); String environment = extractEnvironment(profiles); // for web service boolean service = profiles.containsKey("ws"); String invokews = null; String wsenv = null; if ( service ) { // lookup special web service invocation executable tc = m_tc.lookup( null, "invokews", null, "local" ); if ( tc == null || tc.size() == 0 ) { // not found m_log.log( "planner", 0, "ERROR: Transformation invokews not found!" ); return false; } else if ( tc.size() > 1 ) { m_log.log( "planner", 0, "Warning: Found " + tc.size() + " matches for invokews in TC, using first" ); } tce = (TransformationCatalogEntry) tc.get(0); invokews = tce.getPhysicalTransformation(); // combine profiles by priority temp = combineProfiles( m_tc.getProfiles(tce), sc_profiles ); // wsenv = extractEnvironment( combineProfiles( temp, tr_profiles ) ); wsenv = extractEnvironment( temp ); } // collect commandline arguments for invocation StringBuffer argument = new StringBuffer(); for ( Iterator i = job.iterateArgument(); i.hasNext(); ) { Leaf l = (Leaf) i.next(); if ( l instanceof PseudoText ) { argument.append( ((PseudoText)l).getContent() ); } else { String lfn = ((Filename)l).getFilename(); argument.append( lfnMap.get(lfn) ); } } StringBuffer ks_arg = null; if ( m_kickstart != null ) { ks_arg = new StringBuffer(80); ks_arg.append("-R local -l " ).append(kickLog); ks_arg.append(" -n \"").append( getSubstitution("TR") ); ks_arg.append("\" -N \"").append( getSubstitution("DV") ); ks_arg.append('"'); } // process stdin Filename fn = job.getStdin(); if ( fn != null ) { if ( m_kickstart != null ) { ks_arg.append(" -i ").append( (String) lfnMap.get(fn.getFilename()) ); } else { argument.append(" < ").append( (String) lfnMap.get(fn.getFilename()) ); } } // process stdout fn = job.getStdout(); if ( fn != null ) { if ( m_kickstart != null ) { ks_arg.append(" -o ").append( (String) lfnMap.get(fn.getFilename()) ); } else { argument.append(" > ").append( (String) lfnMap.get(fn.getFilename()) ); } } // process stderr fn = job.getStderr(); if ( fn != null ) { if ( m_kickstart != null ) { ks_arg.append(" -e ").append( (String) lfnMap.get(fn.getFilename()) ); } else { argument.append(" 2> ").append( (String) lfnMap.get(fn.getFilename()) ); } } // environment of job if ( environment != null ) { bw.write( "# regular job environment setup" + newline + environment + newline ); } if ( service ) { // // web service invocation // Map in = (Map) profiles.get("ws"); Map out = new HashMap( in.size() ); for ( Iterator i=in.keySet().iterator(); i.hasNext(); ) { String key = (String) i.next(); String value = (String) in.get(key); out.put( key.trim().toLowerCase(), value.trim() ); } // check that all required arguments are present if ( ! ( out.containsKey("porttype") && out.containsKey("operation") && out.containsKey("input") ) ) { m_log.log( "planner", 0, "ERROR: You must specify portType, operation, and input " + "for a web service invocation!" ); return false; } // extra environment for web service? if ( wsenv != null ) { bw.write( "# extra WS invocation environment" + newline + wsenv + newline ); } // invocation of web service bw.write( invokews + " -I " + out.get("input") ); if ( out.containsKey("output") ) bw.write( " -O " + out.get("output") ); // rest of invocation bw.write( " -p " + out.get("porttype") + " -o " + out.get("operation") + " " + executable + newline ); } else { // // call the executable with argument in the script // if ( m_kickstart != null ) bw.write( m_kickstart + " " + ks_arg.toString() + " " ); bw.write( executable + " " + argument + newline ); } copyFromTemplate( bw, "sp-job-3.tmpl" ); // done bw.flush(); bw.close(); return true; } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/planner/test.graph0000644000175000017500000000020211757531137023312 0ustar ryngerynge8 0 2 3 1 3 4 2 6 3 5 4 5 5 6 7 6 7 # 0 1 # / \ / \ # 2 3 4 # \ \ / # \ 5 # \ / \ # 6 7 pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/planner/DAX2Graph.java0000644000175000017500000000446311757531137023650 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.planner; import java.util.Iterator; import org.griphyn.vdl.dax.*; import org.griphyn.vdl.planner.Graph; /** * This class converts a given DAX into the internal representation of a * graph. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * * @see Graph */ public class DAX2Graph { /** * Converts a DAX into the internal representation of a graph. * * @param adag is the parsed DAX's internal representation. * @return our internal representation of a graph that we can sort on. */ public static Graph DAG2Graph( ADAG adag ) { Graph result = new Graph(); // add all nodes for ( Iterator i=adag.iterateJob(); i.hasNext(); ) result.addVertex( ((Job)i.next()).getID() ); // add all edges for ( Iterator i=adag.iterateChild(); i.hasNext(); ) { Child c = (Child) i.next(); String child = c.getChild(); for ( Iterator j=c.iterateParent(); j.hasNext(); ) { String parent = (String) j.next(); result.addArc( parent, child ); } } return result; } /** * Simple test program. */ public static void main(String[] args) { // construct a fake diamond DAG as DAX w/o any real transformations. ADAG adag = new ADAG(); Job A = new Job(); Job B = new Job(); Job C = new Job(); Job D = new Job(); A.setID("A"); B.setID("B"); C.setID("C"); D.setID("D"); adag.addJob(A); adag.addJob(B); adag.addJob(C); adag.addJob(D); adag.addChild("C","A"); adag.addChild("C","B"); adag.addChild("D","C"); // convert DAX into graph Graph g = DAG2Graph(adag); // show System.out.println( g.toString() ); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/planner/Wrapper.java0000644000175000017500000000201211757531137023574 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.planner; /** * This interface permits to categorizes the various catalog accessors. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 50 $ * */ public interface Wrapper { /** * Frees resources taken by the instance of the replica catalog. This * method is safe to be called on failed or already closed catalogs. */ public void close(); } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/planner/RCWrapper.java0000644000175000017500000001154111757531137024030 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.planner; import edu.isi.pegasus.common.util.CommonProperties; import edu.isi.pegasus.planner.catalog.replica.ReplicaCatalogEntry; import edu.isi.pegasus.planner.catalog.replica.ReplicaFactory; import edu.isi.pegasus.planner.catalog.ReplicaCatalog; import java.io.*; import java.util.*; import java.lang.reflect.*; /** * This class wraps the shell planner's request into the new RC API. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2585 $ * */ public class RCWrapper implements Wrapper { /** * replica catalog API reference. */ private ReplicaCatalog m_rc; /** * Connects the interface with the replica catalog implementation. The * choice of backend is configured through properties. * * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. */ public RCWrapper() throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException, MissingResourceException { m_rc = ReplicaFactory.loadInstance( CommonProperties.instance() ); } /** * Connects the interface with the replica catalog implementation. The * choice of backend is configured through properties. * * @param props is an already instantiated version of the properties. * @exception ClassNotFoundException if the schema for the database * cannot be loaded. You might want to check your CLASSPATH, too. * @exception NoSuchMethodException if the schema's constructor interface * does not comply with the database driver API. * @exception InstantiationException if the schema class is an abstract * class instead of a concrete implementation. * @exception IllegalAccessException if the constructor for the schema * class it not publicly accessible to this package. * @exception InvocationTargetException if the constructor of the schema * throws an exception while being dynamically loaded. * */ public RCWrapper( CommonProperties props ) throws ClassNotFoundException, IOException, NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException { m_rc = ReplicaFactory.loadInstance( props ); } /** * Frees resources taken by the instance of the replica catalog. This * method is safe to be called on failed or already closed catalogs. */ public void close() { if ( m_rc != null ) { m_rc.close(); m_rc = null; } } /** * garbage collection. */ protected void finalize() { close(); } /** * Find the (first) physical filename for the logical file and * resource. * * @param pool is the pool, site or resource name. * @param lfn is the logical filename (LFN) to look up. * @return the physical entity, or null if not found. */ public String lookup( String pool, String lfn ) { // sanity check if ( m_rc == null ) return null; return m_rc.lookup( lfn, pool ); } /** * Obtains the name of the class implementing the replica catalog. * * @return class name of the replica catalog implementor. */ public String getName() { return ( m_rc == null ? null : m_rc.getClass().getName() ); } /** * Shows the contents of the catalog as one string. * * @return the string with the complete catalog contents. Warning, * this may be very large, slow, and memory expensive. */ public String toString() { // sanity check if ( m_rc == null ) return null; Map query = new TreeMap(); query.put( ReplicaCatalogEntry.RESOURCE_HANDLE, "local" ); Map reply = new TreeMap( m_rc.lookup(query) ); return reply.toString(); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/planner/SCWrapper.java0000644000175000017500000001254111757531137024032 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.planner; import java.io.*; import java.util.*; import org.griphyn.vdl.util.Logging; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.SiteInfo; import edu.isi.pegasus.planner.classes.Profile; import edu.isi.pegasus.planner.catalog.site.impl.old.classes.WorkDir; import edu.isi.pegasus.planner.catalog.site.impl.old.PoolMode; import edu.isi.pegasus.planner.catalog.site.impl.old.PoolInfoProvider; import edu.isi.pegasus.planner.common.PegasusProperties; /** * This class wraps the shell planner's request into the new site * catalog API. The site catalog is only queried for the contents of its * "local" special site. * * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2572 $ * */ public class SCWrapper implements Wrapper { /** * site catalog API reference. */ private PoolInfoProvider m_sc = null; /** * Connects the interface with the site catalog implementation. The * choice of backend is configured through properties. */ public SCWrapper() { try { PegasusProperties p = PegasusProperties.nonSingletonInstance(); String poolClass = PoolMode.getImplementingClass( p.getPoolMode() ); m_sc = PoolMode.loadPoolInstance( poolClass, p.getPoolFile(), PoolMode.NON_SINGLETON_LOAD ); } catch ( Exception e ) { Logging.instance().log( "planner", 0, "Warning: While loading SC: " + e.getMessage() + ", ignoring" ); m_sc = null; } } /** * Frees resources taken by the instance of the replica catalog. This * method is safe to be called on failed or already closed catalogs. */ public void close() { if ( m_sc != null ) m_sc = null; } /** * garbage collection. */ protected void finalize() { close(); } /** * Determines the working directory for the site "local". * * @return the working directory, of null, if * not available. */ public String getWorkingDirectory() { // sanity check if ( m_sc == null ) return null; String result = null; try { result = m_sc.getExecPoolWorkDir("local"); } catch ( NullPointerException npe ) { // noop } // sanitization if ( result != null && result.length() == 0 ) result = null; return result; } /** * Determines the path to the local installation of a grid launcher * for site "local". * * @return the path to the local kickstart, or null, if * not available. */ public String getGridLaunch() { // sanity check if ( m_sc == null ) return null; String result = null; try { SiteInfo siv = m_sc.getPoolEntry( "local", "vanilla" ); SiteInfo sit = m_sc.getPoolEntry( "local", "transfer" ); if ( siv != null ) { result = siv.getKickstartPath(); } else if ( sit != null ) { result = sit.getKickstartPath(); } } catch ( NullPointerException npe ) { // noop } // sanitization if ( result != null && result.length() == 0 ) result = null; return result; } /** * Gathers all profiles declared for pool local. * * @return a map of maps, the outer map indexed by the profile * namespace, and the inner map indexed by the key in the profile. * Returns null in case of error. */ public Map getProfiles() { Map result = new HashMap(); // sanity checks if ( m_sc == null ) return null; // ask site catalog List lop = m_sc.getPoolProfile("local"); // return empty maps now, if there are no profiles if ( lop == null || lop.size() == 0 ) return result; Map submap; for ( Iterator i=lop.iterator(); i.hasNext(); ) { edu.isi.pegasus.planner.classes.Profile p = ( edu.isi.pegasus.planner.classes.Profile) i.next(); String ns = p.getProfileNamespace().trim().toLowerCase(); String key = p.getProfileKey().trim(); String value = p.getProfileValue(); // insert at the right place into the result map if ( result.containsKey(ns) ) { submap = (Map) result.get(ns); } else { result.put( ns, (submap = new HashMap()) ); } submap.put( key, value ); } return result; } /** * Obtains the name of the class implementing the replica catalog. * * @return class name of the replica catalog implementor. */ public String getName() { return ( m_sc == null ? null : m_sc.getClass().getName() ); } /** * Shows the contents of the catalog as one string. Warning, this may * be very large, slow, and memory expensive. * * @return the string with the complete catalog contents. * @throws RuntimeException because the method is not implemented. */ public String toString() { throw new RuntimeException("method not implemented"); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/euryale/0000755000175000017500000000000011757531667021335 5ustar ryngeryngepegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/euryale/DAXParser.java0000644000175000017500000006311711757531137023771 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.euryale; import org.griphyn.vdl.dax.*; import org.griphyn.vdl.classes.LFN; import org.griphyn.vdl.toolkit.Toolkit; import org.griphyn.vdl.util.Logging; import org.griphyn.vdl.util.VDLType; import org.xml.sax.*; import org.xml.sax.helpers.DefaultHandler; import javax.xml.parsers.*; import java.io.*; import java.util.Iterator; import java.util.ArrayList; import java.util.List; import java.util.HashMap; import java.util.Map; import java.util.Stack; /** * This class uses the xerces SAX2 parser to validate and parse an DAX * document. This class extends the xerces DefaultHandler so that we * only need to override callbacks of interest. * * @author Kavitha Ranganathan * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2575 $ */ public class DAXParser extends DefaultHandler { /** * class name of the SAX parser. */ private static String vendorParserClass = "org.apache.xerces.parsers.SAXParser"; /** * our own instance of the SAX parser. */ private XMLReader m_parser; /** * maintain the hierarchy for some debug printing */ private int m_depth; /** * collects the information for a section 2 job element. */ private Job m_job; /** * collects the leaves for a profile that is part of a job. */ private Profile m_profile; /** * maintains the currently viewed section 3 child element. */ private String m_child; /** * collects the parents associated with a particular child. */ private java.util.List m_parent; /** * maintains the indications which parent element to be used whenever * a filename tag is being encountered. */ private int m_tag; // possible values for m_tag static final int TAG_ADAG = 0; static final int TAG_ARGUMENT = 1; static final int TAG_PROFILE = 2; static final int TAG_OTHER = 3; /** * Keep the location within the document */ private Locator m_location; /** * A Hashmap to forward resolve namespaces that were encountered * during parsing. */ private Map m_forward; /** * A Hashmap to reverse resolve namespaces that were encountered * during parsing. */ private Map m_reverse; /** * Obtain our logger once for multiple uses. */ private Logging m_log; /** * Maintains the callback class to provide the information to. */ private Callback m_callback; /** * Sets a feature while capturing failed features right here. * * @param uri is the feature's URI to modify * @param flag is the new value to set. * @return true, if the feature could be set, false for an exception */ private boolean set( String uri, boolean flag ) { boolean result = false; try { this.m_parser.setFeature( uri, flag ); result = true; } catch ( SAXException se ) { Logging.instance().log( "default", 0, "Could not set parser feature " + se.getMessage() ); } return result; } /** * The class constructor initializes the Xerces parser, sets the * classes that hold the callback functions, and the features that * enable schema validation. * * @param schemaLocation is any URI pointing to the XML schema definition. */ public DAXParser( String schemaLocation ) { // member variables this.m_callback = null; this.m_child = null; this.m_parent = null; // parser related members this.m_forward = new HashMap(); this.m_reverse = new HashMap(); this.m_log = Logging.instance(); try { m_parser = (XMLReader) Class.forName(vendorParserClass).newInstance(); m_parser.setContentHandler(this); // m_parser.setErrorHandler(this); m_parser.setErrorHandler( new edu.isi.pegasus.planner.parser.XMLErrorHandler() ); set( "http://xml.org/sax/features/validation", true ); set( "http://apache.org/xml/features/validation/dynamic", true ); set( "http://apache.org/xml/features/validation/schema", true ); // time+memory consuming, see http://xml.apache.org/xerces2-j/features.html // set( "http://apache.org/xml/features/validation/schema-full-checking", true ); // Send XML Schema element default values via characters(). set( "http://apache.org/xml/features/validation/schema/element-default", true ); set( "http://apache.org/xml/features/validation/warn-on-duplicate-attdef", true ); // mysteriously, this one fails with recent Xerces // set( "http://apache.org/xml/features/validation/warn-on-undeclared-elemdef", true ); set( "http://apache.org/xml/features/warn-on-duplicate-entitydef", true ); // set the schema default location. if ( schemaLocation != null ) { setSchemaLocations( ADAG.SCHEMA_NAMESPACE + ' ' + schemaLocation ); m_log.log("app", 2, "will use " + schemaLocation ); } else { m_log.log("app", 2, "will use document schema hint" ); } } catch (ClassNotFoundException e) { m_log.log( "default", 0, "The SAXParser class was not found: " + e); } catch (InstantiationException e) { m_log.log( "default", 0, "The SAXParser class could not be instantiated: " + e); } catch (IllegalAccessException e) { m_log.log( "default", 0, "The SAXParser class could not be accessed: " + e); } } /** * Obtains the current instance to be used for callbacks. * * @return the current callback instance object, or null. * @see #setCallback( Callback ) */ public Callback getCallback() { return this.m_callback; } /** * Sets a new callback object to use for future callbacks. * * @param callback is the new callback object. * @see #getCallback() */ public void setCallback( Callback callback ) { this.m_callback = callback; } /** * Set the list of external real locations where the XML schema may be found. * Since this list can be determined at run-time through properties etc., we * expect this function to be called between instantiating the parser, and * using the parser. * * @param list is a list of strings representing schema locations. The content * exists in pairs, one of the namespace URI, one of the location URL. */ public void setSchemaLocations( String list ) { // schema location handling try { m_parser.setProperty( "http://apache.org/xml/properties/schema/external-schemaLocation", list ); } catch ( SAXException se ) { m_log.log( "default", 0, "The SAXParser reported an error: " + se ); } } /** * This function parses a DAX source (could be a document, a stream, * etc.), and creates java class instances that correspond to the DAX. * These will provided to the callback functions instead of being * collected here in memory. * * @param daxURI is the URI for the DAX source. * @return true for valid parsing, false if an error occurred. */ public boolean parse( String daxURI ) { boolean result = false; if ( m_callback == null ) { m_log.log( "default", 0, "Error: Programmer forgot to provide a callback" ); return result; } try { InputSource inputSource = new InputSource(daxURI); m_parser.parse( inputSource ); result = true; } catch (SAXException e) { m_log.log( "default", 0, "SAX Error: " + e ); } catch (IOException e) { m_log.log( "default", 0, "IO Error: " + e ); } return result; } /** * This function parses a DAX source (could be a document, a stream, * etc.), and creates java class instances that correspond to the DAX. * These will provided to the callback functions instead of being * collected here in memory. * * @param stream is an input stream for the DAX source. * @return true for valid parsing, false if an error occurred. */ public boolean parse( InputStream stream ) { boolean result = false; if ( m_callback == null ) { m_log.log( "default", 0, "Error: Programmer forgot to provide a callback" ); return result; } try { InputSource inputSource = new InputSource(stream); m_parser.parse(inputSource); result = true; } catch (SAXException e) { m_log.log( "default", 0, "SAX Error: " + e ); } catch (IOException e) { m_log.log( "default", 0, "IO Error: " + e ); } return result; } // // here starts the implementation to the Interface // /** * Obtains the document locator from the parser. The document location * can be used to print debug information, i.e the current location * (line, column) in the document. * * @param locator is the externally set current position */ public void setDocumentLocator( Locator locator ) { this.m_location = locator; } /** * This method specifies what to do when the parser is at the beginning * of the document. In this case, we simply print a message for debugging. */ public void startDocument() { m_depth = 0; m_log.log( "parser", 1, "*** start of document ***" ); } /** * The parser comes to the end of the document. */ public void endDocument() { m_log.log( "parser", 1, "*** end of document ***" ); } /** * There is a prefix or namespace defined, put the prefix and its URI * in the HashMap. We can get the URI when the prefix is used here after. * * @param prefix the Namespace prefix being declared. * @param uri the Namespace URI the prefix is mapped to. */ public void startPrefixMapping( java.lang.String prefix, java.lang.String uri ) throws SAXException { String p = prefix == null ? null : new String(prefix); String u = uri == null ? null : new String(uri); m_log.log( "parser", 2, "adding \"" + p + "\" <=> " + u ); if ( ! this.m_forward.containsKey(p) ) this.m_forward.put(p, new Stack()); ((Stack) this.m_forward.get(p)).push(u); if ( ! this.m_reverse.containsKey(u) ) this.m_reverse.put(u, new Stack()); ((Stack) this.m_reverse.get(u)).push(p); } /** * Out of the reach of the prefix, remove it from the HashMap. * * @param prefix is the prefix that was being mapped previously. */ public void endPrefixMapping( java.lang.String prefix ) throws SAXException { String u = (String) ((Stack) this.m_forward.get(prefix)).pop(); String p = (String) ((Stack) this.m_reverse.get(u)).pop(); m_log.log( "parser", 2, "removed \"" + p + "\" <=> " + u ); } /** * Helper function to map prefixes correctly onto the elements. * * @param uri is the parser-returned URI that needs translation. * @return the correct prefix for the URI */ private String map( String uri ) { if ( uri == null || uri.length() == 0 ) return ""; Stack stack = (Stack) this.m_reverse.get(uri); String result = stack == null ? null : (String) stack.peek(); if ( result == null || result.length() == 0 ) return ""; else return result + ':'; } /** * This method defines the action to take when the parser begins to parse * an element. * * @param namespaceURI is the URI of the namespace for the element * @param localName is the element name without namespace * @param qName is the element name as it appears in the docment * @param atts has the names and values of all the attributes */ public void startElement( java.lang.String namespaceURI, java.lang.String localName, java.lang.String qName, Attributes atts ) throws SAXException { m_log.log( "parser", 3, "<" + map(namespaceURI) + localName + "> at " + m_location.getLineNumber() + ":" + m_location.getColumnNumber() ); // yup, one more element level m_depth++; java.util.List names = new java.util.ArrayList(); java.util.List values = new java.util.ArrayList(); for ( int i=0; i < atts.getLength(); ++i ) { String name = new String( atts.getLocalName(i) ); String value = new String( atts.getValue(i) ); m_log.log( "parser", 2, "attribute " + map(atts.getURI(i)) + name + "=\"" + value + "\"" ); names.add(name); values.add(value); } createElementObject( localName, names, values ); } /** * The parser is at the end of an element. Each successfully and * completely parsed Definition will trigger a callback to the * registered DefinitionHandler. * * @param namespaceURI is the URI of the namespace for the element * @param localName is the element name without namespace * @param qName is the element name as it appears in the docment */ public void endElement( java.lang.String namespaceURI, java.lang.String localName, java.lang.String qName ) throws SAXException { // that's it for this level m_depth--; m_log.log( "parser", 3, " at " + m_location.getLineNumber() + ":" + m_location.getColumnNumber() ); setElementRelation( localName ); } /** * This method is the callback function for characters in an element. * The element should be mixed-content. * * @param ch are the characters from the XML document * @param start is the start position into the array * @param length is the amount of valid data in the array */ public void characters( char[] ch, int start, int length ) throws SAXException { String message = new String( ch, start, length ); if ( message.length() > 0 ) { if ( message.trim().length() == 0 ) m_log.log( "parser", 3, "Characters: whitespace x " + length ); else m_log.log( "parser", 3, "Characters: \"" + message + "\"" ); elementCharacters(message); } } /** * Currently, ignorable whitespace will be ignored. * * @param ch are the characters from the XML document * @param start is the start position into the array * @param length is the amount of valid data in the array */ public void ignorableWhitespace( char[] ch, int start, int length ) throws SAXException { m_log.log( "parser", 3, "Ignoring " + length + " whitespaces" ); } /** * Receive a processing instruction. Currently, we are just printing * a debug message that we received a PI. * * @param target the processing instruction target * @param data the processing instruction data, or null if none was supplied. * The data does not include any whitespace separating it from the target. */ public void processingInstruction( java.lang.String target, java.lang.String data ) throws SAXException { m_log.log( "parser", 2, "processing instruction " + target + "=\"" + data + "\" was skipped!"); } /** * Receive a notification that an entity was skipped. Currently, we * are just printing a debug message to this fact. * * @param name The name of the skipped entity. If it is a parameter * entity, the name will begin with '%', and if it is the external DTD * subset, it will be the string "[dtd]". */ public void skippedEntity(java.lang.String name) throws SAXException { m_log.log( "parser", 2, "entity " + name + " was skipped!"); } // // =================================================== our own stuff === // /** * Small helper method to bundle repetitive parameters in a template * for reporting progress. * * @param subject is the name of the XML element that is being scrutinized. * @param name is then name of the element we are working with. * @param value is the attribute value. */ private void log( String subject, String name, String value ) { if ( value == null ) value = new String(); m_log.log( "parser", 3, subject + "." + name + "=\"" + value + "\"" ); } /** * Small helper method to bundle repetitive complaints in a template * for reporting progress. * * @param subject is the name of the XML element that is being scrutinized. * @param name is then name of the element we are working with. * @param value is the attribute value. */ private void complain( String subject, String name, String value ) { if ( value == null ) value = new String(); m_log.log( "default", 0, "ignoring " + subject + '@' + name + "=\"" + value + '"', true ); } /** * This method finds out what is the current element, creates the * java object that corresponds to the element, and sets the member * variables with the values of the attributes of the element. * * @param e is the name of the element * @param names is a list of attribute names, as strings. * @param values is a list of attribute values, to match the key list. */ public void createElementObject( String e, java.util.List names, java.util.List values ) throws IllegalArgumentException { // invalid length if ( e == null || e.length() < 1 ) throw new IllegalArgumentException("illegal element length"); if ( e.equals("adag") ) { HashMap cbdata = new HashMap(); m_tag = TAG_ADAG; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get(i); String value = (String) values.get(i); if ( name.equals("name") ) { this.log( e, name, value ); cbdata.put( name, value ); } else if (name.equals("index")) { this.log( e, name, value ); cbdata.put( name, value ); } else if (name.equals("count")) { this.log( e, name, value ); cbdata.put( name, value ); } else if (name.equals("version")) { this.log( e, name, value ); cbdata.put( name, value ); } else if (name.equals("jobCount")) { this.log( e, name, value ); cbdata.put( name, value ); } else if (name.equals("fileCount")) { this.log( e, name, value ); cbdata.put( name, value ); } else if (name.equals("childCount")) { this.log( e, name, value ); cbdata.put( name, value ); } else if (name.equals("schemaLocation") ) { cbdata.put( name, value ); } else { this.complain( e, name, value ); } } m_callback.cb_document( cbdata ); return; } if ( e.equals("filename") || e.equals("stdin") || e.equals("stdout") || e.equals("stderr") || e.equals("uses") ) { Filename fn = new Filename(); for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get(i); String value = (String) values.get(i); if ( name.equals("file") ) { this.log( e, name, value ); fn.setFilename(value); } else if (name.equals("link")) { this.log( e, name, value ); fn.setLink( VDLType.getLinkType(value) ); } else if (name.equals("optional")) { this.log( e, name, value ); fn.setOptional( new Boolean(value).booleanValue() ); } else if (name.equals("dontRegister")) { this.log( e, name, value ); fn.setDontRegister( new Boolean(value).booleanValue() ); } //handle the register flag else if ( name.equals( "register" ) ){ fn.setRegister( new Boolean(value).booleanValue() ); } else if (name.equals("dontTransfer")) { // parse tri-state if ( value.equals("false") ) { this.log( e, name, value ); fn.setDontTransfer( LFN.XFER_MANDATORY ); } else if ( value.equals("true") ) { this.log( e, name, value ); fn.setDontTransfer( LFN.XFER_NOT ); } else if ( value.equals("optional") ) { this.log( e, name, value ); fn.setDontTransfer( LFN.XFER_OPTIONAL ); } else { this.complain( e, name, value ); } } //handle the transfer flag else if (name.equals("transfer")) { // parse tri-state if ( value.equals("false") ) { this.log( e, name, value ); fn.setTransfer( LFN.XFER_NOT ); } else if ( value.equals("true") ) { this.log( e, name, value ); fn.setTransfer( LFN.XFER_MANDATORY ); } else if ( value.equals("optional") ) { this.log( e, name, value ); fn.setTransfer( LFN.XFER_OPTIONAL ); } else { this.complain( e, name, value ); } } else if (name.equals("isTemporary")) { this.log( e, name, value ); boolean temp = (new Boolean(value)).booleanValue(); fn.setDontRegister( temp ); fn.setDontTransfer( temp ? LFN.XFER_NOT : LFN.XFER_MANDATORY ); } else if (name.equals("temporaryHint")) { this.log( e, name, value ); fn.setTemporary( value ); } else if (name.equals("varname")) { this.log( e, name, value ); fn.setVariable( value ); } else if (name.equals("type") ){ this.log( e, name, value ); fn.setType( LFN.typeInt( value )); } else { this.complain( e, name, value ); } } // for if (e.equals("filename")) { switch (m_tag) { case TAG_ADAG: m_callback.cb_filename( fn ); break; case TAG_PROFILE: m_profile.addLeaf(fn); break; case TAG_ARGUMENT: m_job.addArgument(fn); } } else { m_tag = TAG_OTHER; if ( e.equals("stdin") ) m_job.setStdin(fn); else if ( e.equals("stdout") ) m_job.setStdout(fn); else if ( e.equals("stderr") ) m_job.setStderr(fn); else if ( e.equals("uses") ) m_job.addUses(fn); } return; } if ( e.equals("job") ) { m_job = new Job(); for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get(i); String value = (String) values.get(i); if ( name.equals("name") ) { this.log( e, name, value ); m_job.setName(value); } else if ( name.equals("level") ) { this.log( e, name, value ); m_job.setLevel( Integer.parseInt(value) ); } else if (name.equals("namespace")) { this.log( e, name, value ); m_job.setNamespace( value ); } else if (name.equals("version")) { this.log( e, name, value ); m_job.setVersion(value); } else if (name.equals("compound")) { this.log( e, name, value ); m_job.setChain( value ); } else if (name.equals("id")) { this.log( e, name, value ); m_job.setID(value); } else if (name.equals("dv-namespace")) { this.log( e, name, value ); m_job.setDVNamespace( value ); } else if (name.equals("dv-name")) { this.log( e, name, value ); m_job.setDVName(value); } else if (name.equals("dv-version")) { this.log( e, name, value ); m_job.setDVVersion(value); } else { this.complain( e, name, value ); } } return; } if ( e.equals("child") ) { this.m_parent = new ArrayList(); for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get(i); String value = (String) values.get(i); if ( name.equals("ref") ) { this.log( e, name, value ); m_child = value; } else { this.complain( e, name, value ); } } return; } if ( e.equals("parent") ) { String parent = null; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get(i); String value = (String) values.get(i); if ( name.equals("ref") ) { this.log( e, name, value ); parent = value; } else { this.complain( e, name, value ); } } if ( parent != null ) m_parent.add(parent); return; } if ( e.equals("argument") ){ m_tag = TAG_ARGUMENT; return; } if ( e.equals("profile") ){ m_profile = new Profile(); m_tag = TAG_PROFILE; for ( int i=0; i < names.size(); ++i ) { String name = (String) names.get(i); String value = (String) values.get(i); if ( name.equals("namespace") ) { this.log( e, name, value ); m_profile.setNamespace(value); } else if (name.equals("key")) { this.log( e, name, value ); m_profile.setKey( value ); } else if (name.equals("origin")) { this.log( e, name, value ); m_profile.setOrigin( value ); } else { this.complain( e, name, value ); } } return; } // FIXME: shouldn't this be an exception? m_log.log( "filler", 0, "Error: No rules defined for element " + e ); } /** * This method sets the relations between the current java object * and its parent object according to the element hierarchy. * Usually it involves adding the object to the parent's child object * list. */ public void setElementRelation(String elementName) { switch ( elementName.charAt(0) ) { case 'a': if ( elementName.equals("argument") ) { m_tag = TAG_OTHER; } else if ( elementName.equals("adag") ) { m_callback.cb_done(); } break; case 'c': if ( elementName.equals("child") ) { m_callback.cb_parents( m_child, m_parent ); } break; case 'j': if ( elementName.equals("job") ) { m_tag = TAG_ADAG; m_callback.cb_job(m_job); m_log.log( "filler", 3, "Adding job " + m_job.getID() ); } break; case 'p': if ( elementName.equals("profile") ) { m_job.addProfile(m_profile); m_tag = TAG_OTHER; } break; default: // m_log.log( "filler", 0, "Cannot guess parent for " + elementName ); break; } } /** * This method sets the content of the java object corresponding to * the element "text", which has mixed content. * @see org.griphyn.vdl.classes.Text */ public void elementCharacters(String elementChars) { PseudoText text = new PseudoText(elementChars); switch (m_tag) { case TAG_PROFILE: m_profile.addLeaf(text); this.log( "profile", "text", elementChars); break; case TAG_ARGUMENT: m_job.addArgument(text); this.log( "argument", "text", elementChars); } } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/euryale/VirtualFlatFileFactory.java0000644000175000017500000000406511757531137026562 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.euryale; import java.io.IOException; import java.io.File; /** * A Virtual Flat File Factory that does not do any existence checks while * creating a directory. The factory, is used to create remote paths without * checking for correctness. * * @author Karan Vahi * @version $Revision: 289 $ */ public class VirtualFlatFileFactory extends FlatFileFactory { /** * Constructor: Creates the directory and employs sanity checks. * @param directory is the place where files should be placed. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public VirtualFlatFileFactory( File directory ) throws IOException { super( directory ); } /** * Constructor: Creates the directory and employs sanity checks. * * @param directory is the place where files should be placed. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public VirtualFlatFileFactory( String directory )throws IOException { super( directory ); } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. Does no check as it is * virtual. * * @param dir is the new base directory to optionally create * * @throws IOException */ protected void sanityCheck( File dir ) throws IOException{ } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/euryale/VirtualDecimalHashedFileFactory.java0000644000175000017500000001243111757531137030343 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.euryale; import java.io.IOException; import java.io.File; /** * * A Virtual Hashed File Factory that does not do any existence checks while * creating a directory. The factory, is used to create remote paths without * checking for correctness. * * Additionally, it employs a decimal numbering scheme instead of hexadecimal * used for HashedFileFactory. * * @author Karan Vahi * @version $Revision: 289 $ */ public class VirtualDecimalHashedFileFactory extends HashedFileFactory { /** * Constructor: Creates the base directory and employs sanity checks. * @param baseDirectory is the place where the other dirs are created, * and where the DAG file resides. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public VirtualDecimalHashedFileFactory( File baseDirectory ) throws IOException { super(baseDirectory); } /** * Constructor: Creates the directory and employs sanity checks. * @param baseDirectory is the place where the other dirs are created, * and where the DAG file resides. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public VirtualDecimalHashedFileFactory( String baseDirectory ) throws IOException { super(baseDirectory); } /** * Constructor: Creates the base directory and employs sanity checks. * @param baseDirectory is the place where the other dirs are created, * and where the DAG file resides. * @param totalFiles is the number of files to support, and the number * of times, the virtual constructor is expected to be called. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public VirtualDecimalHashedFileFactory( File baseDirectory, int totalFiles ) throws IOException { super(baseDirectory, totalFiles ); } /** * Constructor: Creates the directory and employs sanity checks. * @param baseDirectory is the place where the other dirs are created, * and where the DAG file resides. * @param totalFiles is the number of files to support, and the number * of times, the virtual constructor is expected to be called. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public VirtualDecimalHashedFileFactory( String baseDirectory, int totalFiles ) throws IOException { super(baseDirectory, totalFiles); } /** * Resets the helper structures after changing layout parameters. You * will also need to call this function after you invoked the virtual * constructors, but want to change parameter pertaining to the * directory structure. The structured file count will also be reset! */ public void reset() { super.reset(); m_count = 0; mh_level = new int[ m_levels ]; //we are using decimal instead of hexa for this! mh_digits = (int) Math.ceil( Math.log(m_filesPerDirectory)/Math.log(10) ); mh_buffer = new StringBuffer(mh_digits); } /** * Converts the given integer into hexadecimal notation, using * the given number of digits, prefixing with zeros as necessary. * * @param number is the number to format. * @return a string of appropriate length, filled with leading zeros, * representing the number hexadecimally. */ public String format( int number ) { mh_buffer.delete( 0, mh_digits ); mh_buffer.append( Integer.toString(number) ); while ( mh_buffer.length() < mh_digits ) mh_buffer.insert( 0, '0' ); return mh_buffer.toString(); } /** * Checks the destination location for existence, if it can * be created, if it is writable etc. Does no check as it is * virtual. * * @param dir is the new base directory to optionally create */ protected void sanityCheck( File dir ) throws IOException{ } /** * Creates a directory for the hashed file directory structure on the * submit host. It only creates the File with correct path name, however * does not physically create the file. * * * @return the File structure to the created directory * * @throws IOException the exception. */ protected File createDirectory( ) throws IOException{ // create directory, as necessary File d = getBaseDirectory(); for ( int i=0; iDAXParser class and * the input file index. It parses all the DAX documents specified * in the commandline, creates the corresponding java objects, and * generates an XML document from these objects. It also prints * the input file list if the last definition in the document is * a derivation. * * @author Kavitha Ranganathan * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 289 $ * * @see DAXParser * @see org.griphyn.vdl.dax.ADAG */ public class DAXTest implements Callback { long m_callback[] = null; public DAXTest() { m_callback = new long[5]; } public void cb_document( java.util.Map attributes ) { m_callback[0]++; System.out.print( "free=" + Runtime.getRuntime().freeMemory() ); System.out.println( " document callback: " + attributes ); } public void cb_filename( Filename filename ) { m_callback[1]++; System.out.print( "free=" + Runtime.getRuntime().freeMemory() ); System.out.println( " filename callback: \"" + filename.getFilename() + "\"" ); } public void cb_job( Job job ) { m_callback[2]++; System.out.print( "free=" + Runtime.getRuntime().freeMemory() ); System.out.println( " job callback: " + job.getID() ); } public void cb_parents( String child, java.util.List parents ) { m_callback[3]++; System.out.print( "free=" + Runtime.getRuntime().freeMemory() ); System.out.println( " relationship callback: " + child + " " + parents ); } public void cb_done() { m_callback[4]++; System.out.print( "free=" + Runtime.getRuntime().freeMemory() ); System.out.println( " done callback" ); } private static String c_callback[] = { "documents", "filenames", "jobs", "children", "dones" }; protected void finalize() throws Throwable { if ( m_callback != null ) { System.out.print( "CALL STATS: " ); for ( int i=0; i 0 ) System.out.print( ", " ); System.out.print( c_callback[i] + "=" + m_callback[i] ); } System.out.println(); } } static public void main(String[] args) throws IOException { DAXTest me = new DAXTest(); if (args.length == 0) { System.err.println( "Usage: java " + me.getClass().getName() + " [dax] ..." ); return; } // connect debug stream // Logging.instance().register( "parser", System.err ); Logging.instance().register( "app", System.err ); DAXParser parser = new DAXParser(System.getProperty("vds.schema.dax")); parser.setCallback(me); for (int i = 0; i < args.length; i++) { if ( ! parser.parse(args[i]) ) { System.err.println( "Detected error while parsing XML, exiting\n" ); System.exit(1); } } // done -- release references parser = null; me = null; System.gc(); } } pegasus-wms_4.0.1+dfsg/src/org/griphyn/vdl/euryale/HashedFileFactory.java0000644000175000017500000003656011757531137025526 0ustar ryngerynge/* * This file or a portion of this file is licensed under the terms of * the Globus Toolkit Public License, found in file ../GTPL, or at * http://www.globus.org/toolkit/download/license.html. This notice must * appear in redistributions of this file, with or without modification. * * Redistributions of this Software, with or without modification, must * reproduce the GTPL in: (1) the Software, or (2) the Documentation or * some other similar material which is provided with the Software (if * any). * * Copyright 1999-2004 University of Chicago and The University of * Southern California. All rights reserved. */ package org.griphyn.vdl.euryale; import edu.isi.pegasus.common.util.Separator; import edu.isi.pegasus.common.util.Currently; import java.io.*; import java.util.*; import java.text.*; /** * This file factory generates a stream of submit files in a dynamically * determinable directory structure. By default, a 2-level subdirectory * structure is assumed, which should be able to accomodate about 500k * files. * *

 * mult=16, offset=30, fpd=254: nr=15 => l=1
 * mult=16, offset=30, fpd=254: nr=4047 => l=2
 * mult=16, offset=30, fpd=254: nr=1028222 => l=3
 * 
* * With the given multiplicator, offset and files per directory, nr is * smallest number of jobs at which a level change to l occurs. * * @author Kavitha Ranganathan * @author Jens-S. Vöckler * @author Yong Zhao * @version $Revision: 2079 $ * * @see DAX2DAG */ public class HashedFileFactory extends FlatFileFactory { /** * Determines dynamically the number of directory levels required * to accomodate a certain number of files. * *
   *    levels = |log   ( tf * m + offset )|
   *                 fpd
   * 
* * @param totalFiles is the total number of files estimated to generate * @param multiplicator is a corrective factor to account for files * that are created by the run-time system on the fly. For Euryale and * Pegasus it is safe to assume a multiplicator of at least 8. * @param filesPerDirectory is the optimal maximum number of directory * entries in any directory. The value of 254 for Linux ext2, and thus * ext3, is a safe bet. * @param offset is the number of (expected) files in the top level. * @return the number of directory levels necessary to accomodate the * given number of files. */ public static int calculateLevels( int totalFiles, int multiplicator, int filesPerDirectory, int offset ) { // total files to accomodate, including ones cropping up later long total = totalFiles * multiplicator + offset; // "count" the levels // return (int) Math.floor( Math.log(total) / Math.log(filesPerDirectory) ); int levels = 0; while ( total > filesPerDirectory ) { ++levels; total /= filesPerDirectory; } return levels; } /** * Counts the number of times the structured virtual constructor was * called. * @see #getCount() */ protected int m_count; /** * Contains the total number of directory levels. Defaults to a * reasonable level for hashing. */ protected int m_levels = 2; /** * Number of entries per level. The number 254 is optimized for the * Linux VFS ext2, and consequently ext3, which works fastest, if the * number of entries per directory, including dot and dotdot, don't * exceed 256. */ protected int m_filesPerDirectory = 254; /** * Multiplicative factor to estimate the number of result leaf * filenames for each virtual constructor invocation. We assume that * Euryale produces ~12 files per submit file. It is better to err * on the larger side than makeing the multiplicator too small. */ protected int m_multiplicator = 16; /** * Offset of files expected to reside at the top level directory. * This is counted in addition to the directories being created. */ protected int m_offset = 30; /** * Helping structure to avoid repeated memory requests. Stores the * directory number for each level. * @see #createFile( String ) */ protected int mh_level[]; /** * Helping structure to avoid repeated memory requests. Stores the * digits necessary to create one level's directory name. * @see #format( int ) */ protected StringBuffer mh_buffer; /** * Helping structure to avoid repeated memory requests. Stores the * number of digits for hexadecimal formatting. * @see #createFile( String ) */ protected int mh_digits; /** * Resets the helper structures after changing layout parameters. You * will also need to call this function after you invoked the virtual * constructors, but want to change parameter pertaining to the * directory structure. The structured file count will also be reset! */ public void reset() { super.reset(); m_count = 0; mh_level = new int[ m_levels ]; mh_digits = (int) Math.ceil( Math.log(m_filesPerDirectory)/Math.log(16) ); mh_buffer = new StringBuffer(mh_digits); } /** * Constructor: Creates the base directory and employs sanity checks. * @param baseDirectory is the place where the other dirs are created, * and where the DAG file resides. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public HashedFileFactory( File baseDirectory ) throws IOException { super(baseDirectory); reset(); } /** * Constructor: Creates the directory and employs sanity checks. * @param baseDirectory is the place where the other dirs are created, * and where the DAG file resides. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public HashedFileFactory( String baseDirectory ) throws IOException { super(baseDirectory); reset(); } /** * Constructor: Creates the base directory and employs sanity checks. * @param baseDirectory is the place where the other dirs are created, * and where the DAG file resides. * @param totalFiles is the number of files to support, and the number * of times, the virtual constructor is expected to be called. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public HashedFileFactory( File baseDirectory, int totalFiles ) throws IOException { super(baseDirectory); m_levels = calculateLevels( totalFiles, m_multiplicator, m_filesPerDirectory, m_offset ); reset(); } /** * Constructor: Creates the directory and employs sanity checks. * @param baseDirectory is the place where the other dirs are created, * and where the DAG file resides. * @param totalFiles is the number of files to support, and the number * of times, the virtual constructor is expected to be called. * @throws IOException if the location is not a writable directory, * or cannot be created as such. */ public HashedFileFactory( String baseDirectory, int totalFiles ) throws IOException { super(baseDirectory); m_levels = calculateLevels( totalFiles, m_multiplicator, m_filesPerDirectory, m_offset ); reset(); } /** * Converts the given integer into hexadecimal notation, using * the given number of digits, prefixing with zeros as necessary. * * @param number is the number to format. * @return a string of appropriate length, filled with leading zeros, * representing the number hexadecimally. */ public String format( int number ) { mh_buffer.delete( 0, mh_digits ); mh_buffer.append( Integer.toHexString(number).toUpperCase() ); while ( mh_buffer.length() < mh_digits ) mh_buffer.insert( 0, '0' ); return mh_buffer.toString(); } /** * Creates the next file with the given basename. This is the factory * standard virtual constructor. Once invoked, the directory structure * can not be changed any more. * * @param basename is the filename to create. Don't specify dirs here. * @return a File structure which points to the new file. Nothing is * created through this method, and creation may still fail. * @see #getCount() */ public File createFile( String basename ) throws IOException { // calculate the directory which this goes into ////int estimate = m_count++ * m_multiplicator; int estimate = (m_count++ * m_multiplicator) + m_offset; for ( int i=m_levels-1; i>=0; --i ) { estimate /= m_filesPerDirectory; mh_level[i] = estimate % m_filesPerDirectory; } if ( estimate > m_filesPerDirectory ) throw new RuntimeException( "ERROR! Wrap-around of generator." ); //create the base directory if required File d = createDirectory( ); // return position in new (or old) directory return new File( d, basename ); } /** * Creates a directory for the hashed file directory structure on the * submit host. * * * @return the File structure to the created directory * * @throws IOException the exception. */ protected File createDirectory( ) throws IOException{ // create directory, as necessary File d = getBaseDirectory(); for ( int i=0; i0; n-- ) nr *= def.getFilesPerDirectory(); nr -= def.getOffset(); nr /= def.getMultiplicator(); for ( int j=-2; j< Integer.MAX_VALUE; ++j ) { int n = nr + j; def.reset(); def.setLevelsFromTotals(n); if ( level < def.getLevels() ) { ++level; System.out.println( "mult=" + def.getMultiplicator() + ", offset=" + def.getOffset() + ", fpd=" + def.getFilesPerDirectory() + ": nr=" + n + " => l=" + level ); break; } } } } else { // arguments, assume numeric strings for ( int i=0; i